/** * Creates decimal statistics where min and max values are re-created using given scale. * * @param statistics statistics that needs to be adjusted * @param scale adjustment scale * @return adjusted statistics */ @SuppressWarnings("unchecked") private Statistics<C> adjustDecimalStatistics(Statistics<C> statistics, int scale) { byte[] minBytes = new BigDecimal(new BigInteger(statistics.getMinBytes())) .setScale(scale, RoundingMode.HALF_UP).unscaledValue().toByteArray(); byte[] maxBytes = new BigDecimal(new BigInteger(statistics.getMaxBytes())) .setScale(scale, RoundingMode.HALF_UP).unscaledValue().toByteArray(); return (Statistics<C>) Statistics.getBuilderForReading(statistics.type()) .withMin(minBytes) .withMax(maxBytes) .withNumNulls(statistics.getNumNulls()) .build(); }
@SuppressWarnings("unchecked") private Statistics<T> getStatistics(BigDecimal min, BigDecimal max, DecimalMetadata decimalMetadata) { PrimitiveType decimalType = org.apache.parquet.schema.Types.optional(PrimitiveType.PrimitiveTypeName.BINARY) .as(OriginalType.DECIMAL) .precision(decimalMetadata.getPrecision()) .scale(decimalMetadata.getScale()) .named("decimal_type"); return (Statistics<T>) Statistics.getBuilderForReading(decimalType) .withMin(min.unscaledValue().toByteArray()) .withMax(max.unscaledValue().toByteArray()) .withNumNulls(0) .build(); }
.named("decimal_type"); convertedStat = Statistics.getBuilderForReading(decimalType) .withMin(minBytes) .withMax(maxBytes)
org.apache.parquet.column.statistics.Statistics.getBuilderForReading(type);
org.apache.parquet.column.statistics.Statistics.getBuilderForReading(type);