@Override protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) { if (right.compareTo(HiveDecimal.ZERO) == 0) { return null; } HiveDecimal dec = left.divide(right); if (dec == null) { return null; } decimalWritable.set(dec); return decimalWritable; }
@Override protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) { if (right.compareTo(HiveDecimal.ZERO) == 0) { return null; } HiveDecimal dec = left.divide(right); if (dec == null) { return null; } decimalWritable.set(dec); return decimalWritable; }
@Override protected Object doTerminate(AverageAggregationBuffer<HiveDecimal> aggregation) { if(aggregation.count == 0 || aggregation.sum == null) { return null; } else { HiveDecimalWritable result = new HiveDecimalWritable(HiveDecimal.ZERO); result.set(aggregation.sum.divide(HiveDecimal.create(aggregation.count))); return result; } }
@Override protected Object doTerminate(AverageAggregationBuffer<HiveDecimal> aggregation) { if(aggregation.count == 0 || aggregation.sum == null) { return null; } else { HiveDecimalWritable result = new HiveDecimalWritable(HiveDecimal.ZERO); result.set(aggregation.sum.divide(HiveDecimal.create(aggregation.count))); result.mutateEnforcePrecisionScale(resultPrecision, resultScale); return result; } }
/** * Divide the target object by right, and scale the result to newScale. * * This uses HiveDecimal to get a correct answer with the same rounding * behavior as HiveDecimal, but it is expensive. * * In the future, a native implementation could be faster. */ public void divideDestructive(Decimal128 right, short newScale) { HiveDecimal rightHD = HiveDecimal.create(right.toBigDecimal()); HiveDecimal thisHD = HiveDecimal.create(this.toBigDecimal()); HiveDecimal result = thisHD.divide(rightHD); /* If the result is null, throw an exception. This can be caught * by calling code in the vectorized code path and made to yield * a SQL NULL value. */ if (result == null) { throw new ArithmeticException("null divide result"); } this.update(result.bigDecimalValue().toPlainString(), newScale); this.unscaledValue.throwIfExceedsTenToThirtyEight(); }
private IntWritable evaluate(HiveDecimal exprValue, HiveDecimal minValue, HiveDecimal maxValue, int numBuckets) { Preconditions.checkArgument(numBuckets > 0, "numBuckets in width_bucket function must be above 0"); Preconditions.checkArgument(!maxValue.equals(minValue), "maxValue cannot be equal to minValue in width_bucket function"); if (maxValue.compareTo(minValue) > 0) { if (exprValue.compareTo(minValue) < 0) { output.set(0); } else if (exprValue.compareTo(maxValue) >= 0) { output.set(numBuckets + 1); } else { output.set(HiveDecimal.create(numBuckets).multiply(exprValue.subtract(minValue)).divide( maxValue.subtract(minValue)).add(HiveDecimal.ONE).intValue()); } } else { if (exprValue.compareTo(minValue) > 0) { output.set(0); } else if (exprValue.compareTo(maxValue) <= 0) { output.set(numBuckets + 1); } else { output.set(HiveDecimal.create(numBuckets).multiply(minValue.subtract(exprValue)).divide( minValue.subtract(maxValue)).add(HiveDecimal.ONE).intValue()); } } return output; }
@Override public void validate(String key, Object expected, Object result) { Object[] arr = (Object[]) result; assertEquals (1, arr.length); if (expected == null) { assertEquals(key, null, arr[0]); } else { assertEquals (true, arr[0] instanceof Object[]); Object[] vals = (Object[]) arr[0]; assertEquals (3, vals.length); assertEquals (true, vals[0] instanceof LongWritable); LongWritable lw = (LongWritable) vals[0]; if (vals[1] instanceof DoubleWritable) { DoubleWritable dw = (DoubleWritable) vals[1]; if (lw.get() != 0L) { assertEquals (key, expected, dw.get() / lw.get()); } else { assertEquals(key, expected, 0.0); } } else if (vals[1] instanceof HiveDecimalWritable) { HiveDecimalWritable hdw = (HiveDecimalWritable) vals[1]; if (lw.get() != 0L) { assertEquals (key, expected, hdw.getHiveDecimal().divide(HiveDecimal.create(lw.get()))); } else { assertEquals(key, expected, HiveDecimal.ZERO); } } } }
public static void divideChecked(int i, HiveDecimal left, HiveDecimal right, DecimalColumnVector outputColVector) { try { outputColVector.set(i, left.divide(right)); } catch (ArithmeticException e) { // catch on error outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
public static void divideChecked(int i, HiveDecimalWritable left, HiveDecimal right, DecimalColumnVector outputColVector) { try { outputColVector.set(i, left.getHiveDecimal().divide(right)); } catch (ArithmeticException e) { // catch on error outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
@Override protected HiveDecimalWritable evaluate(HiveDecimal left, HiveDecimal right) { if (right.compareTo(HiveDecimal.ZERO) == 0) { return null; } HiveDecimal dec = left.divide(right); if (dec == null) { return null; } decimalWritable.set(dec); return decimalWritable; }
public static void divideChecked(int i, HiveDecimal left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { outputColVector.set(i, left.divide(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on error outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
@Override protected Object doTerminate(AverageAggregationBuffer<HiveDecimal> aggregation) { if(aggregation.count == 0 || aggregation.sum == null) { return null; } else { HiveDecimalWritable result = new HiveDecimalWritable(HiveDecimal.ZERO); result.set(aggregation.sum.divide(HiveDecimal.create(aggregation.count))); return result; } }
public static void divideChecked(int i, HiveDecimalWritable left, HiveDecimalWritable right, DecimalColumnVector outputColVector) { try { outputColVector.set(i, left.getHiveDecimal().divide(right.getHiveDecimal())); } catch (ArithmeticException e) { // catch on error outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
/** * Divide the target object by right, and scale the result to newScale. * * This uses HiveDecimal to get a correct answer with the same rounding * behavior as HiveDecimal, but it is expensive. * * In the future, a native implementation could be faster. */ public void divideDestructive(Decimal128 right, short newScale) { HiveDecimal rightHD = HiveDecimal.create(right.toBigDecimal()); HiveDecimal thisHD = HiveDecimal.create(this.toBigDecimal()); HiveDecimal result = thisHD.divide(rightHD); /* If the result is null, throw an exception. This can be caught * by calling code in the vectorized code path and made to yield * a SQL NULL value. */ if (result == null) { throw new ArithmeticException("null divide result"); } this.update(result.bigDecimalValue().toPlainString(), newScale); this.unscaledValue.throwIfExceedsTenToThirtyEight(); }
/** * Divide the target object by right, and scale the result to newScale. * * This uses HiveDecimal to get a correct answer with the same rounding * behavior as HiveDecimal, but it is expensive. * * In the future, a native implementation could be faster. */ public void divideDestructive(Decimal128 right, short newScale) { HiveDecimal rightHD = HiveDecimal.create(right.toBigDecimal()); HiveDecimal thisHD = HiveDecimal.create(this.toBigDecimal()); HiveDecimal result = thisHD.divide(rightHD); /* If the result is null, throw an exception. This can be caught * by calling code in the vectorized code path and made to yield * a SQL NULL value. */ if (result == null) { throw new ArithmeticException("null divide result"); } this.update(result.bigDecimalValue().toPlainString(), newScale); this.unscaledValue.throwIfExceedsTenToThirtyEight(); }