private void init() { outputDecimalTypeInfo = (DecimalTypeInfo) outputTypeInfo; sumScale = outputDecimalTypeInfo.scale(); sumPrecision = outputDecimalTypeInfo.precision(); tempDecWritable = new HiveDecimalWritable(); }
/** * Create DecimalTypeInfo from input precision/scale, adjusting if necessary to fit max precision * @param precision precision value before adjustment * @param scale scale value before adjustment * @return */ protected DecimalTypeInfo adjustPrecScale(int precision, int scale) { // Assumptions: // precision >= scale // scale >= 0 if (precision <= HiveDecimal.MAX_PRECISION) { // Adjustment only needed when we exceed max precision return new DecimalTypeInfo(precision, scale); } // Precision/scale exceed maximum precision. Result must be adjusted to HiveDecimal.MAX_PRECISION. // See https://blogs.msdn.microsoft.com/sqlprogrammability/2006/03/29/multiplication-and-division-with-numerics/ int intDigits = precision - scale; // If original scale less than 6, use original scale value; otherwise preserve at least 6 fractional digits int minScaleValue = Math.min(scale, MINIMUM_ADJUSTED_SCALE); int adjustedScale = HiveDecimal.MAX_PRECISION - intDigits; adjustedScale = Math.max(adjustedScale, minScaleValue); return new DecimalTypeInfo(HiveDecimal.MAX_PRECISION, adjustedScale); }
@Override public Object terminate(AggregationBuffer agg) throws HiveException { SumHiveDecimalWritableAgg myagg = (SumHiveDecimalWritableAgg) agg; if (myagg.empty || myagg.sum == null || !myagg.sum.isSet()) { return null; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)outputOI.getTypeInfo(); myagg.sum.mutateEnforcePrecisionScale(decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); if (!myagg.sum.isSet()) { LOG.warn("The sum of a column with data type HiveDecimal is out of range"); return null; } result.set(myagg.sum); return result; }
private static DecimalTypeInfo getOutputTypeInfo(DecimalTypeInfo inputTypeInfo, int dec) { int prec = inputTypeInfo.precision(); int scale = inputTypeInfo.scale(); int intParts = prec - scale; // If we are rounding, we may introduce one more integer digit. int newIntParts = dec < scale ? intParts + 1 : intParts; int newScale = dec < 0 ? 0 : Math.min(dec, HiveDecimal.MAX_SCALE); int newPrec = Math.min(newIntParts + newScale, HiveDecimal.MAX_PRECISION); return TypeInfoFactory.getDecimalTypeInfo(newPrec, newScale); }
public static Type getPrimitiveType(PrimitiveTypeInfo typeInfo) switch (typeInfo.getPrimitiveCategory()) { case BOOLEAN: return BOOLEAN; return createUnboundedVarcharType(); case VARCHAR: return createVarcharType(((VarcharTypeInfo) typeInfo).getLength()); case CHAR: return createCharType(((CharTypeInfo) typeInfo).getLength()); case DATE: return DATE; case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.scale()); default: return null;
"Unsupported non-primitive type " + typeString); switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) { case BOOLEAN: typeDesc = new TypeDesc(TypeDesc.Type.BOOLEAN); case CHAR: CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo; typeDesc = new TypeDesc(TypeDesc.Type.CHAR, charTypeInfo.getLength()); break; case VARCHAR: VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo; typeDesc = new TypeDesc(TypeDesc.Type.VARCHAR, varcharTypeInfo.getLength()); break; case DATE: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; typeDesc = new TypeDesc(TypeDesc.Type.DECIMAL, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); break; default:
case PRIMITIVE: PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type; switch (ptype.getPrimitiveCategory()) { case VARCHAR: // Teradata Type: VARCHAR String st = in.readVarchar(); } else { HiveVarcharWritable r = reuse == null ? new HiveVarcharWritable() : (HiveVarcharWritable) reuse; r.set(st, ((VarcharTypeInfo) type).getLength()); return r; int length = ctype.getLength(); String c = in.readChar(length * getCharByteNum(charCharset)); if (isNull) { int precision = dtype.precision(); int scale = dtype.scale(); HiveDecimal hd = in.readDecimal(scale, getDecimalByteNum(precision)); if (isNull) { return null; } else { HiveDecimalWritable r = (reuse == null ? new HiveDecimalWritable() : (HiveDecimalWritable) reuse); r.set(hd); return r; throw new SerDeException("Unrecognized type: " + ptype.getPrimitiveCategory());
@Override public String vectorExpressionParameters() { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) inputTypeInfos[0]; HiveDecimalWritable writable = new HiveDecimalWritable(); writable.deserialize64(value, decimalTypeInfo.scale()); return getColumnParamString(0, colNum) + ", decimal64Val " + value + ", decimalVal " + writable.toString(); }
switch(primitiveTypeInfo.getPrimitiveCategory()) { case BOOLEAN: case BYTE: if (dataTypePhysicalVariation == DataTypePhysicalVariation.DECIMAL_64) { return new Decimal64ColumnVector(VectorizedRowBatch.DEFAULT_SIZE, tInfo.precision(), tInfo.scale()); } else { return new DecimalColumnVector(VectorizedRowBatch.DEFAULT_SIZE, tInfo.precision(), tInfo.scale()); default: throw new RuntimeException("Vectorizaton is not supported for datatype:" + primitiveTypeInfo.getPrimitiveCategory());
public static RelDataType convert(PrimitiveTypeInfo type, RelDataTypeFactory dtFactory) { RelDataType convertedType = null; switch (type.getPrimitiveCategory()) { case VOID: convertedType = dtFactory.createSqlType(SqlTypeName.NULL); DecimalTypeInfo dtInf = (DecimalTypeInfo) type; convertedType = dtFactory .createSqlType(SqlTypeName.DECIMAL, dtInf.precision(), dtInf.scale()); break; case VARCHAR: throw new RuntimeException("Unsupported Type : " + type.getTypeName());
@Override public HiveDecimalWritable getWritableConstantValue() { // We need to enforce precision/scale here. DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) typeInfo; HiveDecimalWritable result = new HiveDecimalWritable(value); result.mutateEnforcePrecisionScale(decTypeInfo.precision(), decTypeInfo.scale()); if (!result.isSet()) { return null; } return result; }
int varcharLength = varcharType.getLength(); if (varcharLength <= HiveVarchar.MAX_VARCHAR_LENGTH) { return getVarcharTypeInfo(varcharLength); int charLength = charType.getLength(); if (charLength <= HiveChar.MAX_CHAR_LENGTH) { return getCharTypeInfo(charLength); return new DecimalTypeInfo(decimalType.getPrecision(), decimalType.getScale()); return getListTypeInfo(elementType);
assert ((VarcharTypeInfo) ti).getLength() >= hv.getHiveVarchar().getCharacterLength(); assert ((CharTypeInfo) ti).getLength() >= hc.getHiveChar().getCharacterLength(); out.writeChar(hc, getCharByteNum(charCharset) * ((CharTypeInfo) ti).getLength()); return; int precision = dtype.precision(); int scale = dtype.scale(); HiveDecimalObjectInspector hdoi = (HiveDecimalObjectInspector) poi; HiveDecimalWritable hd = hdoi.getPrimitiveWritableObject(objectForField); assert (dtype.getPrecision() >= hd.precision());
switch (primitiveTypeInfo.getPrimitiveCategory()) { case CHAR: case VARCHAR: case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo; typeBuilder.setPrecision(decimalTypeInfo.getPrecision()).setScale(decimalTypeInfo.getScale()); break; default:
/** * Need to keep consistent with JdbcColumn.columnPrecision * */ public static int getPrecisionForType(PrimitiveTypeInfo typeInfo) { switch (typeInfo.getPrimitiveCategory()) { case DECIMAL: return ((DecimalTypeInfo)typeInfo).precision(); case FLOAT: return 7; case DOUBLE: return 15; case BYTE: return 3; case SHORT: return 5; case INT: return 10; case LONG: return 19; case VOID: return 1; default: return HiveDecimal.SYSTEM_DEFAULT_PRECISION; } }
/** * Need to keep consistent with JdbcColumn.columnScale() * */ public static int getScaleForType(PrimitiveTypeInfo typeInfo) { switch (typeInfo.getPrimitiveCategory()) { case DECIMAL: return ((DecimalTypeInfo)typeInfo).scale(); case FLOAT: return 7; case DOUBLE: return 15; case BYTE: case SHORT: case INT: case LONG: case VOID: return 0; default: return HiveDecimal.SYSTEM_DEFAULT_SCALE; } }
public static DecimalTypeInfo getDecimalTypeInfo(int precision, int scale) { String fullName = DecimalTypeInfo.getQualifiedName(precision, scale); return (DecimalTypeInfo) getPrimitiveTypeInfo(fullName); };
rowVal = ((ObjectWritable)value).get(); switch (hiveColumnTypes[i].getPrimitiveCategory()) { case INT: case SHORT: int scale = ((DecimalTypeInfo)hiveColumnTypes[i]).getScale(); rowVal = HiveDecimal.create(rowVal.toString()); ((HiveDecimal)rowVal).setScale(scale, BigDecimal.ROUND_HALF_EVEN);
@Override public HiveDecimal getPrimitiveJavaObject(Object o) { if (o == null) { return null; } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; // We do not want to modify the writable provided by the object o since it is not a copy. HiveDecimalWritable decWritable = ((LazyHiveDecimal)o).getWritableObject(); HiveDecimalWritable result = HiveDecimalWritable.enforcePrecisionScale( decWritable, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); return (result != null && result.isSet() ? result.getHiveDecimal() : null); }