public static HiveDecimal bround(HiveDecimal input, int scale) { return input.setScale(scale, HiveDecimal.ROUND_HALF_EVEN); } }
public static HiveDecimal round(HiveDecimal input, int scale) { return input.setScale(scale, HiveDecimal.ROUND_HALF_UP); }
public static HiveDecimal bround(HiveDecimal input, int scale) { return input.setScale(scale, HiveDecimal.ROUND_HALF_EVEN); } }
public static HiveDecimal round(HiveDecimal input, int scale) { return input.setScale(scale, HiveDecimal.ROUND_HALF_UP); }
int scale = ((DecimalTypeInfo)hiveColumnTypes[i]).getScale(); rowVal = HiveDecimal.create(rowVal.toString()); ((HiveDecimal)rowVal).setScale(scale, BigDecimal.ROUND_HALF_EVEN); break; case BOOLEAN:
private Binary decimalToBinary(final HiveDecimal hiveDecimal, final DecimalTypeInfo decimalTypeInfo) { int prec = decimalTypeInfo.precision(); int scale = decimalTypeInfo.scale(); byte[] decimalBytes = hiveDecimal.setScale(scale).unscaledValue().toByteArray(); // Estimated number of bytes needed. int precToBytes = ParquetHiveSerDe.PRECISION_TO_BYTE_COUNT[prec - 1]; if (precToBytes == decimalBytes.length) { // No padding needed. return Binary.fromByteArray(decimalBytes); } byte[] tgt = new byte[precToBytes]; if (hiveDecimal.signum() == -1) { // For negative number, initializing bits to 1 for (int i = 0; i < precToBytes; i++) { tgt[i] |= 0xFF; } } System.arraycopy(decimalBytes, 0, tgt, precToBytes - decimalBytes.length, decimalBytes.length); // Padding leading zeroes/ones. return Binary.fromByteArray(tgt); } }
public static DecimalColumnVector generateDecimalColumnVector(DecimalTypeInfo typeInfo, boolean nulls, boolean repeating, int size, Random rand) { DecimalColumnVector dcv = new DecimalColumnVector(size, typeInfo.precision(), typeInfo.scale()); dcv.noNulls = !nulls; dcv.isRepeating = repeating; HiveDecimalWritable repeatingValue = new HiveDecimalWritable(); do{ repeatingValue.set(HiveDecimal.create(((Double) rand.nextDouble()).toString()).setScale((short)typeInfo.scale(), HiveDecimal.ROUND_HALF_UP)); }while(repeatingValue.getHiveDecimal().doubleValue() == 0); int nullFrequency = generateNullFrequency(rand); for(int i = 0; i < size; i++) { if(nulls && (repeating || i % nullFrequency == 0)) { dcv.isNull[i] = true; dcv.vector[i] = null; }else { dcv.isNull[i] = false; if (repeating) { dcv.vector[i].set(repeatingValue); } else { dcv.vector[i].set(HiveDecimal.create(((Double) rand.nextDouble()).toString()).setScale((short) typeInfo.scale(), HiveDecimal.ROUND_HALF_UP)); } if(dcv.vector[i].getHiveDecimal().doubleValue() == 0) { i--; } } } return dcv; }
protected static void writeListData(ParquetWriter<Group> writer, boolean isDictionaryEncoding, int elementNum) throws IOException { SimpleGroupFactory f = new SimpleGroupFactory(schema); int listMaxSize = 4; int listElementIndex = 0; for (int i = 0; i < elementNum; i++) { boolean isNull = isNull(i); Group group = f.newGroup(); int listSize = i % listMaxSize + 1; if (!isNull) { for (int j = 0; j < listSize; j++) { group.append("list_int32_field", getIntValue(isDictionaryEncoding, listElementIndex)); group.append("list_int64_field", getLongValue(isDictionaryEncoding, listElementIndex)); group.append("list_double_field", getDoubleValue(isDictionaryEncoding, listElementIndex)); group.append("list_float_field", getFloatValue(isDictionaryEncoding, listElementIndex)); group.append("list_boolean_field", getBooleanValue(listElementIndex)); group.append("list_binary_field", getBinaryValue(isDictionaryEncoding, listElementIndex)); HiveDecimal hd = getDecimal(isDictionaryEncoding, listElementIndex).setScale(2); HiveDecimalWritable hdw = new HiveDecimalWritable(hd); group.append("list_decimal_field", Binary.fromConstantByteArray(hdw.getInternalStorage())); listElementIndex++; } } for (int j = 0; j < listMaxSize; j++) { group.append("list_binary_field_for_repeat_test", getBinaryValue(isDictionaryEncoding, i)); } writer.write(group); } writer.close(); }
float floatValForMap = getFloatValue(isDictionaryEncoding, mapElementIndex); Binary binaryValForMap = getBinaryValue(isDictionaryEncoding, mapElementIndex); HiveDecimal hd = getDecimal(isDictionaryEncoding, mapElementIndex).setScale(2); HiveDecimalWritable hdw = new HiveDecimalWritable(hd); Binary decimalValForMap = Binary.fromConstantByteArray(hdw.getInternalStorage());
public static HiveDecimal round(HiveDecimal input, int scale) { return input.setScale(scale, HiveDecimal.ROUND_HALF_UP); }
long longVal = getLongValue(isDictionaryEncoding, i); Binary timeStamp = getTimestamp(isDictionaryEncoding, i); HiveDecimal decimalVal = getDecimal(isDictionaryEncoding, i).setScale(2); double doubleVal = getDoubleValue(isDictionaryEncoding, i); float floatVal = getFloatValue(isDictionaryEncoding, i);
public static void floor(int i, HiveDecimal input, DecimalColumnVector outputColVector) { try { outputColVector.set(i, input.setScale(0, HiveDecimal.ROUND_FLOOR)); } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
public static void ceiling(int i, HiveDecimal input, DecimalColumnVector outputColVector) { try { outputColVector.set(i, input.setScale(0, HiveDecimal.ROUND_CEILING)); } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
public static Buffer getBufferFromDecimal(HiveDecimal dec, int scale) { if (dec == null) { return null; } dec = dec.setScale(scale); return AvroSerdeUtils.getBufferFromBytes(dec.unscaledValue().toByteArray()); }
public static Buffer getBufferFromDecimal(HiveDecimal dec, int scale) { if (dec == null) { return null; } dec = dec.setScale(scale); return AvroSerdeUtils.getBufferFromBytes(dec.unscaledValue().toByteArray()); }
public static Buffer getBufferFromDecimal(HiveDecimal dec, int scale) { if (dec == null) { return null; } dec = dec.setScale(scale); return AvroSerdeUtils.getBufferFromBytes(dec.unscaledValue().toByteArray()); }
@Override protected HiveDecimalWritable evaluate(HiveDecimalWritable input) { HiveDecimal bd = input.getHiveDecimal(); decimalWritable.set(bd.setScale(0, HiveDecimal.ROUND_CEILING)); return decimalWritable; }
public static void floor(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) { try { outputColVector.set(i, input.getHiveDecimal().setScale(0, HiveDecimal.ROUND_FLOOR)); } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }
@Override protected HiveDecimalWritable evaluate(HiveDecimalWritable input) { HiveDecimal bd = input.getHiveDecimal(); decimalWritable.set(bd.setScale(0, HiveDecimal.ROUND_FLOOR)); return decimalWritable; }
public static void ceiling(int i, HiveDecimalWritable input, DecimalColumnVector outputColVector) { try { outputColVector.set(i, input.getHiveDecimal().setScale(0, HiveDecimal.ROUND_CEILING)); } catch (ArithmeticException e) { outputColVector.noNulls = false; outputColVector.isNull[i] = true; } }