private void init() { inputScale = ((DecimalTypeInfo) inputTypeInfo).getScale(); outputDecimalTypeInfo = (DecimalTypeInfo) outputTypeInfo; }
private void init() { inputScale = ((DecimalTypeInfo) inputTypeInfo).getScale(); outputDecimalTypeInfo = (DecimalTypeInfo) outputTypeInfo; }
private void init() { inputScale = ((DecimalTypeInfo) inputTypeInfo).getScale(); StructTypeInfo structTypeInfo = (StructTypeInfo) outputTypeInfo; outputDecimalTypeInfo = (DecimalTypeInfo) structTypeInfo.getAllStructFieldTypeInfos().get(AVERAGE_SUM_FIELD_INDEX); }
@Override public List<MutablePair<String, String>> getIntervals(String lowerBound, String upperBound, int numPartitions, TypeInfo typeInfo) { List<MutablePair<String, String>> intervals = new ArrayList<>(); DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)typeInfo; int scale = decimalTypeInfo.getScale(); BigDecimal decimalLower = new BigDecimal(lowerBound); BigDecimal decimalUpper = new BigDecimal(upperBound); BigDecimal decimalInterval = (decimalUpper.subtract(decimalLower)).divide(new BigDecimal(numPartitions), MathContext.DECIMAL64); BigDecimal splitDecimalLower, splitDecimalUpper; for (int i=0;i<numPartitions;i++) { splitDecimalLower = decimalLower.add(decimalInterval.multiply(new BigDecimal(i))).setScale(scale, RoundingMode.HALF_EVEN); splitDecimalUpper = decimalLower.add(decimalInterval.multiply(new BigDecimal(i+1))).setScale(scale, RoundingMode.HALF_EVEN); if (splitDecimalLower.compareTo(splitDecimalUpper) < 0) { intervals.add(new MutablePair<String, String>(splitDecimalLower.toPlainString(), splitDecimalUpper.toPlainString())); } } return intervals; } }
private VectorExpression tryDecimal64Between(VectorExpressionDescriptor.Mode mode, boolean isNot, ExprNodeDesc colExpr, List<ExprNodeDesc> childrenAfterNot, TypeInfo returnTypeInfo) throws HiveException { final Class<?> cl; if (mode == VectorExpressionDescriptor.Mode.PROJECTION) { cl = (isNot ? Decimal64ColumnNotBetween.class : Decimal64ColumnBetween.class); } else { cl = (isNot ? FilterDecimal64ColumnNotBetween.class : FilterDecimal64ColumnBetween.class); } return createDecimal64VectorExpression( cl, childrenAfterNot, VectorExpressionDescriptor.Mode.PROJECTION, /* isDecimal64ScaleEstablished */ true, /* decimal64ColumnScale */ ((DecimalTypeInfo) colExpr.getTypeInfo()).getScale(), returnTypeInfo, DataTypePhysicalVariation.NONE); }
@Override public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException { // Intercept result ObjectInspector so we can extract the DECIMAL precision and scale. ObjectInspector resultOI = super.init(m, parameters); if (m == Mode.COMPLETE || m == Mode.FINAL) { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(resultOI); resultPrecision = decimalTypeInfo.getPrecision(); resultScale = decimalTypeInfo.getScale(); } return resultOI; }
@Override public Object terminate(AggregationBuffer agg) throws HiveException { SumHiveDecimalWritableAgg myagg = (SumHiveDecimalWritableAgg) agg; if (myagg.empty || myagg.sum == null || !myagg.sum.isSet()) { return null; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)outputOI.getTypeInfo(); myagg.sum.mutateEnforcePrecisionScale(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); if (!myagg.sum.isSet()) { LOG.warn("The sum of a column with data type HiveDecimal is out of range"); return null; } result.set(myagg.sum); return result; }
@Override public HiveDecimal getPrimitiveJavaObject(Object o) { if (o == null) { return null; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; // We do not want to modify the writable provided by the object o since it is not a copy. HiveDecimalWritable decWritable = ((LazyHiveDecimal)o).getWritableObject(); HiveDecimalWritable result = HiveDecimalWritable.enforcePrecisionScale( decWritable, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); return (result != null && result.isSet() ? result.getHiveDecimal() : null); }
@Override public Object terminate(AggregationBuffer agg) throws HiveException { SumHiveDecimalWritableAgg myagg = (SumHiveDecimalWritableAgg) agg; if (myagg.empty || myagg.sum == null || !myagg.sum.isSet()) { return null; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)outputOI.getTypeInfo(); myagg.sum.mutateEnforcePrecisionScale(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); if (!myagg.sum.isSet()) { LOG.warn("The sum of a column with data type HiveDecimal is out of range"); return null; } result.set(myagg.sum); return result; }
@Override public void assignRowColumn(VectorizedRowBatch batch, int batchIndex, int columnNum, AggregationBuffer agg) throws HiveException { DecimalColumnVector outputColVector = (DecimalColumnVector) batch.cols[columnNum]; Aggregation myagg = (Aggregation) agg; final boolean isNull; if (!myagg.isNull) { myagg.sum.mutateEnforcePrecisionScale( outputDecimalTypeInfo.getPrecision(), outputDecimalTypeInfo.getScale()); isNull = !myagg.sum.isSet(); } else { isNull = true; } if (isNull) { outputColVector.noNulls = false; outputColVector.isNull[batchIndex] = true; return; } outputColVector.isNull[batchIndex] = false; outputColVector.set(batchIndex, myagg.sum); } }
public static HiveDecimal getRandHiveDecimal(Random r, DecimalTypeInfo decimalTypeInfo) { while (true) { final StringBuilder sb = new StringBuilder(); final int precision = 1 + r.nextInt(18); final int scale = 0 + r.nextInt(precision + 1); final int integerDigits = precision - scale; if (r.nextBoolean()) { sb.append("-"); } if (integerDigits == 0) { sb.append("0"); } else { sb.append(RandomTypeUtil.getRandString(r, DECIMAL_CHARS, integerDigits)); } if (scale != 0) { sb.append("."); sb.append(RandomTypeUtil.getRandString(r, DECIMAL_CHARS, scale)); } HiveDecimal dec = HiveDecimal.create(sb.toString()); dec = HiveDecimal.enforcePrecisionScale( dec, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); if (dec != null) { return dec; } } }
private String castConstantToString(Object scalar, TypeInfo type) throws HiveException { if (null == scalar) { return null; } String typename = type.getTypeName(); if (!(type instanceof PrimitiveTypeInfo)) { throw new HiveException("Unsupported type " + typename + " for cast to String"); } PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type; switch (ptinfo.getPrimitiveCategory()) { case FLOAT: case DOUBLE: case BYTE: case SHORT: case INT: case LONG: return ((Number) scalar).toString(); case DECIMAL: HiveDecimal decimalVal = (HiveDecimal) scalar; DecimalTypeInfo decType = (DecimalTypeInfo) type; return decimalVal.toFormatString(decType.getScale()); case TIMESTAMP: return CastTimestampToString.getTimestampString((Timestamp) scalar); default: throw new HiveException("Unsupported type "+typename+" for cast to String"); } }
/** * The decimal precision and scale is filled into decimalColumnVector. If the data in * Parquet is in decimal, the precision and scale will come in from decimalMetadata. If parquet * is not in decimal, then this call is made because HMS shows the type as decimal. So, the * precision and scale are picked from hiveType. * * @param decimalMetadata * @param decimalColumnVector */ private void fillDecimalPrecisionScale(DecimalMetadata decimalMetadata, DecimalColumnVector decimalColumnVector) { if (decimalMetadata != null) { decimalColumnVector.precision = (short) type.asPrimitiveType().getDecimalMetadata().getPrecision(); decimalColumnVector.scale = (short) type.asPrimitiveType().getDecimalMetadata().getScale(); } else if (TypeInfoUtils.getBaseName(hiveType.getTypeName()) .equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) { decimalColumnVector.precision = (short) ((DecimalTypeInfo) hiveType).getPrecision(); decimalColumnVector.scale = (short) ((DecimalTypeInfo) hiveType).getScale(); } else { throw new UnsupportedOperationException( "The underlying Parquet type cannot be converted to Hive Decimal type: " + type); } } }
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; if (!isDecimal64ScaleEstablished) { decimal64ColumnScale = decimalTypeInfo.getScale(); isDecimal64ScaleEstablished = true; } else if (decimalTypeInfo.getScale() != decimal64ColumnScale) { return null; DecimalTypeInfo returnDecimalTypeInfo = (DecimalTypeInfo) returnTypeInfo; if (!isDecimal64ScaleEstablished) { decimal64ColumnScale = returnDecimalTypeInfo.getScale(); isDecimal64ScaleEstablished = true; } else if (returnDecimalTypeInfo.getScale() != decimal64ColumnScale) { return null;
outputDecimalTypeInfo.getPrecision(), outputDecimalTypeInfo.getScale()); isNull = !myagg.regularDecimalSum.isSet(); } else {
result.mutateDivide(temp); result.mutateEnforcePrecisionScale( outputDecimalTypeInfo.getPrecision(), outputDecimalTypeInfo.getScale()); if (!result.isSet()) { outputColVector.noNulls = false;
((DecimalTypeInfo) realHiveType).getPrecision() : 0; int hiveScale = (typeName.equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) ? ((DecimalTypeInfo) realHiveType).getScale() : 0;
case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo; typeBuilder.setPrecision(decimalTypeInfo.getPrecision()).setScale(decimalTypeInfo.getScale()); break; default:
((DecimalTypeInfo) realHiveType).getPrecision() : 0; int hiveScale = (typeName.equalsIgnoreCase(serdeConstants.DECIMAL_TYPE_NAME)) ? ((DecimalTypeInfo) realHiveType).getScale() : 0;
DecimalTypeInfo dinfo = (DecimalTypeInfo) pinfo; return TypeDescription.createDecimal() .withScale(dinfo.getScale()) .withPrecision(dinfo.getPrecision());