return literal; } else if (literal instanceof BigDecimal) { return ((BigDecimal) literal).doubleValue(); } else { throw new IllegalArgumentException("A predicate on a FLOAT column requires a floating " + return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal)); } else { throw new IllegalArgumentException("A predicate on a DECIMAL column requires a BigDecimal literal.");
/** * Divide the target object by right, and scale the result to newScale. * * This uses HiveDecimal to get a correct answer with the same rounding * behavior as HiveDecimal, but it is expensive. * * In the future, a native implementation could be faster. */ public void divideDestructive(Decimal128 right, short newScale) { HiveDecimal rightHD = HiveDecimal.create(right.toBigDecimal()); HiveDecimal thisHD = HiveDecimal.create(this.toBigDecimal()); HiveDecimal result = thisHD.divide(rightHD); /* If the result is null, throw an exception. This can be caught * by calling code in the vectorized code path and made to yield * a SQL NULL value. */ if (result == null) { throw new ArithmeticException("null divide result"); } this.update(result.bigDecimalValue().toPlainString(), newScale); this.unscaledValue.throwIfExceedsTenToThirtyEight(); }
@Override protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) { if (right.compareTo(HiveDecimal.ZERO) == 0) { return null; } HiveDecimal dec = left.remainder(right).add(right).remainder(right); if (dec == null) { return null; } decimalWritable.set(dec); return decimalWritable; }
break; case BYTE: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType); break; case SHORT: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType); break; case INT: calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value)); break; case LONG: value = ((HiveDecimal) value).bigDecimalValue(); } else if (value instanceof Decimal128) { value = ((Decimal128) value).toBigDecimal(); case CHAR: if (value instanceof HiveChar) { value = ((HiveChar) value).getValue(); } else { c = Calendar.getInstance(); c.setTimeInMillis(((Timestamp)value).getTime());
private VectorizedRowBatch getBatchDecimalTimestamp(double[] doubleValues) { VectorizedRowBatch b = new VectorizedRowBatch(2); DecimalColumnVector dv; b.cols[0] = dv = new DecimalColumnVector(doubleValues.length, HiveDecimal.SYSTEM_DEFAULT_PRECISION, HiveDecimal.SYSTEM_DEFAULT_SCALE); b.cols[1] = new TimestampColumnVector(doubleValues.length); dv.noNulls = true; Random r = new Random(94830); for (int i = 0; i < doubleValues.length; i++) { long millis = RandomTypeUtil.randomMillis(r); Timestamp ts = new Timestamp(millis); int nanos = RandomTypeUtil.randomNanos(r); ts.setNanos(nanos); TimestampWritableV2 tsw = new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(ts.getTime(), ts.getNanos())); double asDouble = tsw.getDouble(); doubleValues[i] = asDouble; HiveDecimal hiveDecimal = HiveDecimal.create(new BigDecimal(asDouble)); dv.set(i, hiveDecimal); } b.size = doubleValues.length; return b; }
private static final long DATE_DAYS = TimeUnit.MILLISECONDS.toDays(DATE_MILLIS_UTC); private static final String DATE_STRING = DateTimeFormat.forPattern("yyyy-MM-dd").withZoneUTC().print(DATE_MILLIS_UTC); private static final Date SQL_DATE = new Date(UTC.getMillisKeepLocal(DateTimeZone.getDefault(), DATE_MILLIS_UTC)); private static final DecimalType DECIMAL_TYPE_PRECISION_38 = DecimalType.createDecimalType(38, 16); private static final HiveDecimal WRITE_DECIMAL_PRECISION_2 = HiveDecimal.create(new BigDecimal("-1.2")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_4 = HiveDecimal.create(new BigDecimal("12.3")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_8 = HiveDecimal.create(new BigDecimal("-1234.5678")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_17 = HiveDecimal.create(new BigDecimal("123456789.1234")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_18 = HiveDecimal.create(new BigDecimal("-1234567890.12345678")); private static final HiveDecimal WRITE_DECIMAL_PRECISION_38 = HiveDecimal.create(new BigDecimal("1234567890123456789012.12345678")); .add(new TestColumn("p_date", javaDateObjectInspector, DATE_STRING, DATE_DAYS, true)) .add(new TestColumn("p_timestamp", javaTimestampObjectInspector, TIMESTAMP_STRING, TIMESTAMP, true)) .add(new TestColumn("p_decimal_precision_2", DECIMAL_INSPECTOR_PRECISION_2, WRITE_DECIMAL_PRECISION_2.toString(), EXPECTED_DECIMAL_PRECISION_2, true)) .add(new TestColumn("p_decimal_precision_4", DECIMAL_INSPECTOR_PRECISION_4, WRITE_DECIMAL_PRECISION_4.toString(), EXPECTED_DECIMAL_PRECISION_4, true)) .add(new TestColumn("p_decimal_precision_8", DECIMAL_INSPECTOR_PRECISION_8, WRITE_DECIMAL_PRECISION_8.toString(), EXPECTED_DECIMAL_PRECISION_8, true)) .add(new TestColumn("p_decimal_precision_17", DECIMAL_INSPECTOR_PRECISION_17, WRITE_DECIMAL_PRECISION_17.toString(), EXPECTED_DECIMAL_PRECISION_17, true)) .add(new TestColumn("p_decimal_precision_18", DECIMAL_INSPECTOR_PRECISION_18, WRITE_DECIMAL_PRECISION_18.toString(), EXPECTED_DECIMAL_PRECISION_18, true)) .add(new TestColumn("p_decimal_precision_38", DECIMAL_INSPECTOR_PRECISION_38, WRITE_DECIMAL_PRECISION_38.toString() + "BD", EXPECTED_DECIMAL_PRECISION_38, true)) .add(new TestColumn("t_boolean_false", javaBooleanObjectInspector, false, false)) .add(new TestColumn("t_date", javaDateObjectInspector, SQL_DATE, DATE_DAYS)) .add(new TestColumn("t_timestamp", javaTimestampObjectInspector, new Timestamp(TIMESTAMP), TIMESTAMP)) .add(new TestColumn("t_decimal_precision_2", DECIMAL_INSPECTOR_PRECISION_2, WRITE_DECIMAL_PRECISION_2, EXPECTED_DECIMAL_PRECISION_2)) .add(new TestColumn("t_decimal_precision_4", DECIMAL_INSPECTOR_PRECISION_4, WRITE_DECIMAL_PRECISION_4, EXPECTED_DECIMAL_PRECISION_4))
case DATE: return new ExprNodeConstantDesc(TypeInfoFactory.dateTypeInfo, new Date(((Calendar)literal.getValue()).getTimeInMillis())); case TIME: case TIMESTAMP: { Object value = literal.getValue3(); if (value instanceof Long) { value = new Timestamp((Long)value); case DECIMAL: return new ExprNodeConstantDesc(TypeInfoFactory.getDecimalTypeInfo(lType.getPrecision(), lType.getScale()), HiveDecimal.create((BigDecimal)literal.getValue3())); case VARCHAR: case CHAR: { BigDecimal monthsBd = (BigDecimal) literal.getValue(); return new ExprNodeConstantDesc(TypeInfoFactory.intervalYearMonthTypeInfo, new HiveIntervalYearMonth(monthsBd.intValue())); BigDecimal millisBd = (BigDecimal) literal.getValue(); BigDecimal secsBd = millisBd.divide(BigDecimal.valueOf(1000)); return new ExprNodeConstantDesc(TypeInfoFactory.intervalDayTimeTypeInfo, new HiveIntervalDayTime(secsBd));
@Test public void testHiveDecimal_3_4() throws HiveException { List<HiveDecimal> inVals = Arrays .asList(HiveDecimal.create(1L), HiveDecimal.create(2L), HiveDecimal.create(3L), HiveDecimal.create(4L), HiveDecimal.create(5L), HiveDecimal.create(6L), HiveDecimal.create(7L), HiveDecimal.create(8L), HiveDecimal.create(9L), HiveDecimal.create(10L)); List<HiveDecimal> outVals = Arrays.asList( HiveDecimal.create(new BigDecimal(15.0 / 5)), HiveDecimal.create(new BigDecimal(21.0 / 6)), HiveDecimal.create(new BigDecimal(28.0 / 7)), HiveDecimal.create(new BigDecimal(36.0 / 8)), HiveDecimal.create(new BigDecimal(44.0 / 8)), HiveDecimal.create(new BigDecimal(52.0 / 8)), HiveDecimal.create(new BigDecimal(49.0 / 7)), HiveDecimal.create(new BigDecimal(45.0 / 6)), HiveDecimal.create(new BigDecimal(40.0 / 5)), HiveDecimal.create(new BigDecimal(34.0 / 4))); avgHiveDecimal(inVals.iterator(), 10, 3, 4, outVals.iterator()); }
private static HCatRecord getHCat13TypesRecord() { List<Object> rec_hcat13types = new ArrayList<Object>(5); rec_hcat13types.add(HiveDecimal.create(new BigDecimal("123.45")));//prec 5, scale 2 rec_hcat13types.add(new HiveChar("hive_char", 10)); rec_hcat13types.add(new HiveVarchar("hive_varchar", 20)); rec_hcat13types.add(Date.valueOf("2014-01-06")); rec_hcat13types.add(Timestamp.ofEpochMilli(System.currentTimeMillis())); return new DefaultHCatRecord(rec_hcat13types); } private static HCatRecord getHCat13TypesComplexRecord() {
HiveDecimal hd1 = HiveDecimal.create(new BigInteger("42949672")); d6.update(hd1.bigDecimalValue()); assertEquals(hd1.toString(), d6.getHiveDecimalString()); HiveDecimal hd2 = HiveDecimal.create(new BigDecimal("0.0")); d7.update(hd2.bigDecimalValue()); assertEquals(hd2.toString(), d7.getHiveDecimalString()); HiveDecimal hd3 = HiveDecimal.create(new BigDecimal("0.00023000")); d8.update(hd3.bigDecimalValue()); assertEquals(hd3.toString(), d8.getHiveDecimalString()); HiveDecimal hd4 = HiveDecimal.create(new BigDecimal("0.1")); d9.update(hd4.bigDecimalValue()); assertEquals(hd4.toString(), d9.getHiveDecimalString()); HiveDecimal hd5 = HiveDecimal.create(new BigDecimal("-00.100")); d10.update(hd5.bigDecimalValue()); assertEquals(hd5.toString(), d10.getHiveDecimalString()); HiveDecimal hd6 = HiveDecimal.create(new BigDecimal("00.1")); d11.update(hd6.bigDecimalValue()); assertEquals(hd6.toString(), d11.getHiveDecimalString()); HiveDecimal hd7 = HiveDecimal.create(new BigDecimal("27.000")); assertEquals(hd7.toString(), d12.getHiveDecimalString()); assertEquals("27", d12.getHiveDecimalString()); HiveDecimal hd8 = HiveDecimal.create(new BigDecimal("1234123000"));
DateTime ts = new DateTime(((Timestamp) val).getTime()).withZoneRetainFields(DateTimeZone.UTC); long value = ts.getMillis(); for (int i = start; i < end; i++) { final BigDecimal value = ((HiveDecimal)val).bigDecimalValue(); final NullableDecimal9Vector v = ((NullableDecimal9Vector) vector); final Decimal9Holder holder = new Decimal9Holder(); final BigDecimal value = ((HiveDecimal)val).bigDecimalValue(); final NullableDecimal18Vector v = ((NullableDecimal18Vector) vector); final Decimal18Holder holder = new Decimal18Holder(); needSpace, managedBuffer.capacity())); final BigDecimal value = ((HiveDecimal)val).bigDecimalValue(); final NullableDecimal28SparseVector v = ((NullableDecimal28SparseVector) vector); final Decimal28SparseHolder holder = new Decimal28SparseHolder(); String.format("Not sufficient space in given managed buffer. Need %d bytes, buffer has %d bytes", needSpace, managedBuffer.capacity())); final BigDecimal value = ((HiveDecimal)val).bigDecimalValue(); final NullableDecimal38SparseVector v = ((NullableDecimal38SparseVector) vector); final Decimal38SparseHolder holder = new Decimal38SparseHolder(); final BigDecimal value = ((HiveDecimal) val).bigDecimalValue() .setScale(vector.getField().getScale(), RoundingMode.HALF_UP); final NullableVarDecimalVector v = ((NullableVarDecimalVector) vector); for (int i = start; i < end; i++) {
private void writeData(Writer writer) throws IOException { for (int i = 0; i < 25000; i++) { if (i == 0) { writer.addRow(new AllTypesRow(2L, "foo", 0.8, HiveDecimal.create("1.2"), new Timestamp(0))); } else if (i == 5000) { writer.addRow(new AllTypesRow(13L, "bar", 80.0, HiveDecimal.create("2.2"), new Timestamp( 5000))); } else if (i == 10000) { writer.addRow(new AllTypesRow(29L, "cat", 8.0, HiveDecimal.create("3.3"), new Timestamp( 10000))); } else if (i == 15000) { writer.addRow(new AllTypesRow(70L, "dog", 1.8, HiveDecimal.create("4.4"), new Timestamp( 15000))); } else if (i == 20000) { writer.addRow(new AllTypesRow(5L, "eat", 0.8, HiveDecimal.create("5.5"), new Timestamp( 20000))); } else { writer.addRow(new AllTypesRow(100L, "zebra", 8.0, HiveDecimal.create("0.0"), new Timestamp( 250000))); } } } }
case DECIMAL: int scale = ((DecimalTypeInfo)hiveColumnTypes[i]).getScale(); rowVal = HiveDecimal.create(rowVal.toString()); ((HiveDecimal)rowVal).setScale(scale, BigDecimal.ROUND_HALF_EVEN); break; case BOOLEAN: if (rowVal instanceof java.sql.Date) { java.sql.Date dateRowVal = (java.sql.Date) rowVal; rowVal = Date.ofEpochMilli(dateRowVal.getTime()); } else { rowVal = Date.valueOf (rowVal.toString()); if (rowVal instanceof java.sql.Timestamp) { java.sql.Timestamp timestampRowVal = (java.sql.Timestamp) rowVal; rowVal = Timestamp.ofEpochMilli(timestampRowVal.getTime(), timestampRowVal.getNanos()); } else { rowVal = Timestamp.valueOf (rowVal.toString());
throw new IOException("JsonSerDe does not support BINARY type"); case DATE: return Date.valueOf(s); case TIMESTAMP: return Timestamp.valueOf(s); case DECIMAL: return HiveDecimal.create(s); case VARCHAR: return new HiveVarchar(s, ((BaseCharTypeInfo)mapKeyType).getLength()); case CHAR: return new HiveChar(s, ((BaseCharTypeInfo)mapKeyType).getLength());
private void runDecimal(double v, IntWritable scale, Double expV, GenericUDF udf) throws HiveException { HiveDecimal hd = HiveDecimal.create(BigDecimal.valueOf(v)); DeferredObject valueObj0 = new DeferredJavaObject(new HiveDecimalWritable(hd)); DeferredObject valueObj1 = new DeferredJavaObject(scale); DeferredObject[] args = { valueObj0, valueObj1 }; HiveDecimalWritable output = (HiveDecimalWritable) udf.evaluate(args); Assert.assertEquals("bround() test ", expV.doubleValue(), output.getHiveDecimal().doubleValue(), 0.00001); } }
return result; switch (poi.getPrimitiveCategory()) { case FLOAT: case DOUBLE: case TIMESTAMP: java.sql.Timestamp origTimeStamp = (java.sql.Timestamp)poi.getPrimitiveJavaObject(obj); result = new DateTime(origTimeStamp.getTime()); break; case DATE: java.sql.Date origDate = (java.sql.Date)poi.getPrimitiveJavaObject(obj); result = new DateTime(origDate.getTime()); break; case DECIMAL: org.apache.hadoop.hive.common.type.HiveDecimal origDecimal = (org.apache.hadoop.hive.common.type.HiveDecimal)poi.getPrimitiveJavaObject(obj); result = origDecimal.bigDecimalValue(); break; default: