private static Decimal convertToThriftDecimal(HiveDecimal d) { return DecimalUtils.getDecimal(ByteBuffer.wrap(d.unscaledValue().toByteArray()), (short) d.scale()); }
@Override public int scale() { if (value == null) { return super.scale(); } return value.getHiveDecimal().scale(); }
private Decimal convertToThriftDecimal(HiveDecimal d) { return new Decimal(ByteBuffer.wrap(d.unscaledValue().toByteArray()), (short)d.scale()); }
private static DecimalTypeInfo adjustType(HiveDecimal hd) { // Note: the normalize() call with rounding in HiveDecimal will currently reduce the // precision and scale of the value by throwing away trailing zeroes. This may or may // not be desirable for the literals; however, this used to be the default behavior // for explicit decimal literals (e.g. 1.0BD), so we keep this behavior for now. int prec = 1; int scale = 0; if (hd != null) { prec = hd.precision(); scale = hd.scale(); } DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale); return typeInfo; }
private void parseDecimalColumn(int column) { loaded[column] = true; Object fieldData = rowInspector.getStructFieldData(rowData, structFields[column]); if (fieldData == null) { nulls[column] = true; } else { Object fieldValue = ((PrimitiveObjectInspector) fieldInspectors[column]).getPrimitiveJavaObject(fieldData); checkState(fieldValue != null, "fieldValue should not be null"); HiveDecimal decimal = (HiveDecimal) fieldValue; DecimalType columnType = (DecimalType) types[column]; BigInteger unscaledDecimal = rescale(decimal.unscaledValue(), decimal.scale(), columnType.getScale()); if (columnType.isShort()) { longs[column] = unscaledDecimal.longValue(); } else { slices[column] = Decimals.encodeUnscaledValue(unscaledDecimal); } nulls[column] = false; } }
/** * * Allocate scratchLongs with HiveDecimal.SCRATCH_LONGS_LEN longs. * And, allocate scratch buffer with HiveDecimal.SCRATCH_BUFFER_LEN_BIG_INTEGER_BYTES bytes. * * @param byteStream * @param dec * @param scratchLongs * @param scratchBytes */ public static void writeToByteStream( RandomAccessOutput byteStream, HiveDecimal dec, long[] scratchLongs, byte[] scratchBytes) { LazyBinaryUtils.writeVInt(byteStream, dec.scale()); // Convert decimal into the scratch buffer without allocating a byte[] each time // for better performance. int byteLength = dec.bigIntegerBytes( scratchLongs, scratchBytes); if (byteLength == 0) { throw new RuntimeException("Decimal to binary conversion failed"); } LazyBinaryUtils.writeVInt(byteStream, byteLength); byteStream.write(scratchBytes, 0, byteLength); }
int hiveScale = writable.getHiveDecimal().scale(); BigInteger bigInteger = writable.getHiveDecimal().unscaledValue(); if (hiveScale < scale) {
private TypeInfo getDecimalScalarTypeInfo(Object scalarObject) { HiveDecimal dec = (HiveDecimal) scalarObject; int precision = dec.precision(); int scale = dec.scale(); return new DecimalTypeInfo(precision, scale); }
private TypeInfo getDecimalScalarTypeInfo(Object scalarObject) { HiveDecimal dec = (HiveDecimal) scalarObject; int precision = dec.precision(); int scale = dec.scale(); return new DecimalTypeInfo(precision, scale); }
public static ExprNodeConstantDesc createDecimal(String strVal, boolean notNull) { // Note: the normalize() call with rounding in HiveDecimal will currently reduce the // precision and scale of the value by throwing away trailing zeroes. This may or may // not be desirable for the literals; however, this used to be the default behavior // for explicit decimal literals (e.g. 1.0BD), so we keep this behavior for now. HiveDecimal hd = HiveDecimal.create(strVal); if (notNull && hd == null) return null; int prec = 1; int scale = 0; if (hd != null) { prec = hd.precision(); scale = hd.scale(); } DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(prec, scale); return new ExprNodeConstantDesc(typeInfo, hd); }
private DecimalTypeInfo decimalTypeFromCastToDecimal(ExprNodeDesc exprNodeDesc, DecimalTypeInfo returnDecimalType) throws HiveException { if (exprNodeDesc instanceof ExprNodeConstantDesc) { // Return a constant vector expression Object constantValue = ((ExprNodeConstantDesc) exprNodeDesc).getValue(); HiveDecimal decimalValue = castConstantToDecimal(constantValue, exprNodeDesc.getTypeInfo()); if (decimalValue == null) { // Return something. return returnDecimalType; } return new DecimalTypeInfo(decimalValue.precision(), decimalValue.scale()); } String inputType = exprNodeDesc.getTypeString(); if (isIntFamily(inputType) || isFloatFamily(inputType) || decimalTypePattern.matcher(inputType).matches() || isStringFamily(inputType) || inputType.equals("timestamp")) { return returnDecimalType; } return null; }
@Test public void testSign() { LongColumnVector lcv = new LongColumnVector(4); HiveDecimal d1 = HiveDecimal.create("19.56778"); DecimalUtil.sign(0, d1, lcv); Assert.assertEquals(1, lcv.vector[0]); HiveDecimal d2 = HiveDecimal.create("-25.34567"); DecimalUtil.sign(0, d2, lcv); Assert.assertEquals(-1, lcv.vector[0]); HiveDecimal d3 = HiveDecimal.create("0.00000"); Assert.assertEquals(0, d3.scale()); DecimalUtil.sign(0, d3, lcv); Assert.assertEquals(0, lcv.vector[0]); } }
@Override void write(Object obj) throws IOException { super.write(obj); if (obj != null) { HiveDecimal decimal = ((HiveDecimalObjectInspector) inspector).getPrimitiveJavaObject(obj); if (decimal == null) { return; } SerializationUtils.writeBigInteger(valueStream, decimal.unscaledValue()); scaleStream.write(decimal.scale()); indexStatistics.updateDecimal(decimal); if (createBloomFilter) { bloomFilter.addString(decimal.toString()); } } }
Assert.assertEquals(0, d2.scale()); HiveDecimal expected2 = HiveDecimal.create("23"); DecimalUtil.ceiling(0, d2, dcv); Assert.assertEquals(0, d4.scale()); HiveDecimal expected4 = HiveDecimal.create("-17"); DecimalUtil.ceiling(0, d4, dcv); Assert.assertEquals(1, d5.scale()); HiveDecimal expected5 = HiveDecimal.create("0"); DecimalUtil.ceiling(0, d5, dcv); Assert.assertEquals(1, d6.scale()); HiveDecimal expected6 = HiveDecimal.create("1"); DecimalUtil.ceiling(0, d6, dcv);
Assert.assertEquals(0, d2.scale()); HiveDecimal expected2 = HiveDecimal.create("23"); DecimalUtil.floor(0, d2, dcv); Assert.assertEquals(0, d4.scale()); HiveDecimal expected4 = HiveDecimal.create("-17"); DecimalUtil.floor(0, d4, dcv); Assert.assertEquals(1, d5.scale()); HiveDecimal expected5 = HiveDecimal.create("-1"); DecimalUtil.floor(0, d5, dcv); Assert.assertEquals(1, d6.scale()); HiveDecimal expected6 = HiveDecimal.create("0"); DecimalUtil.floor(0, d6, dcv);
factor = 1; } else { factor = dec.rawPrecision() - dec.scale();
@Test public void testNegate() { DecimalColumnVector dcv = new DecimalColumnVector(4 ,20, 13); HiveDecimal d1 = HiveDecimal.create("19.56778"); HiveDecimal expected1 = HiveDecimal.create("-19.56778"); DecimalUtil.negate(0, d1, dcv); Assert.assertEquals(0, expected1.compareTo(dcv.vector[0].getHiveDecimal())); HiveDecimal d2 = HiveDecimal.create("-25.34567"); HiveDecimal expected2 = HiveDecimal.create("25.34567"); DecimalUtil.negate(0, d2, dcv); Assert.assertEquals(0, expected2.compareTo(dcv.vector[0].getHiveDecimal())); // As of HIVE-8745, these decimal values should be trimmed of trailing zeros. HiveDecimal d3 = HiveDecimal.create("0.00000"); Assert.assertEquals(0, d3.scale()); HiveDecimal expected3 = HiveDecimal.create("0"); DecimalUtil.negate(0, d3, dcv); Assert.assertEquals(0, expected3.compareTo(dcv.vector[0].getHiveDecimal())); }
int scale = dec.scale();