private boolean checkTypeInfoForDecimal64(TypeInfo typeInfo) { if (typeInfo instanceof DecimalTypeInfo) { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return HiveDecimalWritable.isPrecisionDecimal64(decimalTypeInfo.precision()); } return false; }
@Override public boolean accept(TypeInfo other) { if (other == null || getClass() != other.getClass()) { return false; } DecimalTypeInfo dti = (DecimalTypeInfo)other; // Make sure "this" has enough integer room to accomodate other's integer digits. return this.precision() - this.scale() >= dti.precision() - dti.scale(); }
private static DecimalTypeInfo getOutputTypeInfo(DecimalTypeInfo inputTypeInfo, int dec) { int prec = inputTypeInfo.precision(); int scale = inputTypeInfo.scale(); int intParts = prec - scale; // If we are rounding, we may introduce one more integer digit. int newIntParts = dec < scale ? intParts + 1 : intParts; int newScale = dec < 0 ? 0 : Math.min(dec, HiveDecimal.MAX_SCALE); int newPrec = Math.min(newIntParts + newScale, HiveDecimal.MAX_PRECISION); return TypeInfoFactory.getDecimalTypeInfo(newPrec, newScale); }
private void init() { StructTypeInfo structTypeInfo = (StructTypeInfo) outputTypeInfo; outputDecimalTypeInfo = (DecimalTypeInfo) structTypeInfo.getAllStructFieldTypeInfos().get(AVERAGE_SUM_FIELD_INDEX); sumScale = outputDecimalTypeInfo.scale(); sumPrecision = outputDecimalTypeInfo.precision(); }
private static DecimalTypeInfo getOutputTypeInfo(DecimalTypeInfo inputTypeInfo, int dec) { int prec = inputTypeInfo.precision(); int scale = inputTypeInfo.scale(); int intParts = prec - scale; // If we are rounding, we may introduce one more integer digit. int newIntParts = dec < scale ? intParts + 1 : intParts; int newScale = dec < 0 ? 0 : Math.min(dec, HiveDecimal.MAX_SCALE); int newPrec = Math.min(newIntParts + newScale, HiveDecimal.MAX_PRECISION); return TypeInfoFactory.getDecimalTypeInfo(newPrec, newScale); }
private void init() { outputDecimalTypeInfo = (DecimalTypeInfo) outputTypeInfo; sumScale = outputDecimalTypeInfo.scale(); sumPrecision = outputDecimalTypeInfo.precision(); tempDecWritable = new HiveDecimalWritable(); }
private void init() { outputDecimalTypeInfo = (DecimalTypeInfo) outputTypeInfo; sumScale = outputDecimalTypeInfo.scale(); sumPrecision = outputDecimalTypeInfo.precision(); tempDecWritable = new HiveDecimalWritable(); }
public static HiveDecimal enforcePrecisionScale(HiveDecimal dec, DecimalTypeInfo typeInfo) { return HiveDecimal.enforcePrecisionScale(dec, typeInfo.precision(), typeInfo.scale()); }
public LazyHiveDecimal(LazyHiveDecimalObjectInspector oi) { super(oi); DecimalTypeInfo typeInfo = (DecimalTypeInfo)oi.getTypeInfo(); if (typeInfo == null) { throw new RuntimeException("Decimal type used without type params"); } precision = typeInfo.precision(); scale = typeInfo.scale(); data = new HiveDecimalWritable(); }
private void initPartialResultInspector() { // the output type of the vectorized partial aggregate must match the // expected type for the row-mode aggregation // For decimal, the type is "same number of integer digits and 4 more decimal digits" DecimalTypeInfo dtiSum = GenericUDAFAverage.deriveSumFieldTypeInfo(inputPrecision, inputScale); this.sumScale = (short) dtiSum.scale(); this.sumPrecision = (short) dtiSum.precision(); List<ObjectInspector> foi = new ArrayList<ObjectInspector>(); foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector); foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(dtiSum)); List<String> fname = new ArrayList<String>(); fname.add("count"); fname.add("sum"); soi = ObjectInspectorFactory.getStandardStructObjectInspector(fname, foi); }
private RelDataType getRelDataType(DecimalTypeInfo decimalTypeInfo) { return typeFactory.createSqlType(SqlTypeName.DECIMAL, decimalTypeInfo.precision(), decimalTypeInfo.scale()); }
LazyBinaryHiveDecimal(WritableHiveDecimalObjectInspector oi) { super(oi); DecimalTypeInfo typeInfo = (DecimalTypeInfo) oi.getTypeInfo(); this.precision = typeInfo.precision(); this.scale = typeInfo.scale(); data = new HiveDecimalWritable(); }
@Override public void init(AggregationDesc desc) throws HiveException { ExprNodeDesc inputExpr = desc.getParameters().get(0); DecimalTypeInfo tiInput = (DecimalTypeInfo) inputExpr.getTypeInfo(); this.inputScale = (short) tiInput.scale(); this.inputPrecision = (short) tiInput.precision(); initPartialResultInspector(); }
public static long getDecimal64AbsMaxFromDecimalTypeString(String typeString) { TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(typeString); if (!(typeInfo instanceof DecimalTypeInfo)) { throw new RuntimeException( "Expected decimal type but found " + typeInfo.toString()); } DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; final int precision = decimalTypeInfo.precision(); if (!HiveDecimalWritable.isPrecisionDecimal64(precision)) { throw new RuntimeException( "Expected decimal type " + typeInfo.toString() + " to have a decimal64 precision (i.e. <= " + HiveDecimalWritable.DECIMAL64_DECIMAL_DIGITS + ")"); } return HiveDecimalWritable.getDecimal64AbsMax(precision); } }
@Override public HiveDecimalWritable getWritableConstantValue() { // We need to enforce precision/scale here. DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) typeInfo; HiveDecimalWritable result = new HiveDecimalWritable(value); result.mutateEnforcePrecisionScale(decTypeInfo.precision(), decTypeInfo.scale()); if (!result.isSet()) { return null; } return result; }
public static HiveDecimalWritable enforcePrecisionScale(HiveDecimalWritable writable, DecimalTypeInfo typeInfo) { if (writable == null) { return null; } HiveDecimalWritable result = new HiveDecimalWritable(writable); result.mutateEnforcePrecisionScale(typeInfo.precision(), typeInfo.scale()); return (result.isSet() ? result : null); }
private Binary decimalToBinary(final HiveDecimal hiveDecimal, final DecimalTypeInfo decimalTypeInfo) { int prec = decimalTypeInfo.precision(); int scale = decimalTypeInfo.scale(); byte[] decimalBytes = hiveDecimal.setScale(scale).unscaledValue().toByteArray(); // Estimated number of bytes needed. int precToBytes = ParquetHiveSerDe.PRECISION_TO_BYTE_COUNT[prec - 1]; if (precToBytes == decimalBytes.length) { // No padding needed. return Binary.fromByteArray(decimalBytes); } byte[] tgt = new byte[precToBytes]; if (hiveDecimal.signum() == -1) { // For negative number, initializing bits to 1 for (int i = 0; i < precToBytes; i++) { tgt[i] |= 0xFF; } } System.arraycopy(decimalBytes, 0, tgt, precToBytes - decimalBytes.length, decimalBytes.length); // Padding leading zeroes/ones. return Binary.fromByteArray(tgt); } }
case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.scale()); default: return null;
public static TypeQualifiers fromTypeInfo(PrimitiveTypeInfo pti) { TypeQualifiers result = null; if (pti instanceof VarcharTypeInfo) { result = new TypeQualifiers(); result.setCharacterMaximumLength(((VarcharTypeInfo)pti).getLength()); } else if (pti instanceof CharTypeInfo) { result = new TypeQualifiers(); result.setCharacterMaximumLength(((CharTypeInfo)pti).getLength()); } else if (pti instanceof DecimalTypeInfo) { result = new TypeQualifiers(); result.setPrecision(((DecimalTypeInfo)pti).precision()); result.setScale(((DecimalTypeInfo)pti).scale()); } return result; }