public FilterVarCharColumnBetweenDynamicValue(int colNum, DynamicValue leftValue, DynamicValue rightValue) { super(colNum, new HiveVarchar("", 1), new HiveVarchar("", 1)); this.leftDynamicValue = leftValue; this.rightDynamicValue = rightValue; }
public HiveVarchar getHiveVarchar() { return new HiveVarchar(value.toString(), -1); }
@Override public Object initValue(Object ignored) { return ((SettableHiveVarcharObjectInspector) this.objectInspector) .create(new HiveVarchar(StringUtils.EMPTY, -1)); } }.init(fieldObjInspector);
public HiveVarcharConverter(PrimitiveObjectInspector inputOI, SettableHiveVarcharObjectInspector outputOI) { this.inputOI = inputOI; this.outputOI = outputOI; // unfortunately we seem to get instances of varchar object inspectors without params // when an old-style UDF has an evaluate() method with varchar arguments. // If we disallow varchar in old-style UDFs and only allow GenericUDFs to be defined // with varchar arguments, then we might be able to enforce this properly. //if (typeParams == null) { // throw new RuntimeException("varchar type used without type params"); //} hc = outputOI.create(new HiveVarchar("",-1)); }
@Override public Object create(HiveVarchar value) { return new HiveVarchar(value, getMaxLength()); }
@Override public Object set(Object o, String value) { return new HiveVarchar(value, getMaxLength()); }
@Override public HiveVarchar getPrimitiveJavaObject(Object o) { final VarCharHolder h = (VarCharHolder)o; final String s = StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer); return new HiveVarchar(s, HiveVarchar.MAX_VARCHAR_LENGTH); } }
@Override public HiveVarchar getPrimitiveJavaObject(Object o) { if (o == null) { return null; } final NullableVarCharHolder h = (NullableVarCharHolder)o; final String s = StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer); return new HiveVarchar(s, HiveVarchar.MAX_VARCHAR_LENGTH); } }
private HiveVarchar getPrimitiveWithParams(HiveVarchar val) { return new HiveVarchar(val, getMaxLength()); }
@Override public Object initValue(Object ignored) { return ((SettableHiveVarcharObjectInspector) this.objectInspector) .create(new HiveVarchar(StringUtils.EMPTY, -1)); } }.init(fieldObjInspector);
@Override public Object set(Object o, HiveVarchar value) { if (BaseCharUtils.doesPrimitiveMatchTypeParams(value, (VarcharTypeInfo) typeInfo)) { return value; } else { // Otherwise value may be too long, convert to appropriate value based on params return new HiveVarchar(value, getMaxLength()); } }
@Override public HiveVarcharWritable getPrimitiveWritableObject(Object o) { if (o == null) { return null; } HiveVarchar var; if (o instanceof String) { var= new HiveVarchar((String)o, getMaxLength()); } else { var = (HiveVarchar)o; } return getWritableWithParams(var); }
private static HiveVarcharWritable varcharW(String value, int length) { return new HiveVarcharWritable(new HiveVarchar(value, length)); }
@Override public Object convert(Object input) { if (input == null) { return null; } switch (inputOI.getPrimitiveCategory()) { case BOOLEAN: return outputOI.set(hc, ((BooleanObjectInspector) inputOI).get(input) ? new HiveVarchar("TRUE", -1) : new HiveVarchar("FALSE", -1)); default: return outputOI.set(hc, PrimitiveObjectInspectorUtils.getHiveVarchar(input, inputOI)); } }
private HiveVarchar getPrimitiveWithParams(HiveVarcharWritable val) { HiveVarchar hv = new HiveVarchar(); hv.setValue(val.getHiveVarchar(), getMaxLength()); return hv; }
private Writable getWritableValue(TypeInfo ti, byte[] value) { if (ti.equals(TypeInfoFactory.stringTypeInfo)) { return new Text(value); } else if (ti.equals(TypeInfoFactory.varcharTypeInfo)) { return new HiveVarcharWritable( new HiveVarchar(new Text(value).toString(), -1)); } else if (ti.equals(TypeInfoFactory.binaryTypeInfo)) { return new BytesWritable(value); } return null; }
@Override public HiveVarchar getPrimitiveJavaObject(Object o) { if (o == null) { return null; } HiveVarchar ret = ((LazyHiveVarchar) o).getWritableObject().getHiveVarchar(); if (!BaseCharUtils.doesPrimitiveMatchTypeParams( ret, (VarcharTypeInfo)typeInfo)) { HiveVarchar newValue = new HiveVarchar(ret, ((VarcharTypeInfo)typeInfo).getLength()); return newValue; } return ret; }
public static HiveVarchar getRandHiveVarchar(Random r, VarcharTypeInfo varcharTypeInfo, boolean isUnicodeOk) { final int maxLength = 1 + r.nextInt(varcharTypeInfo.getLength()); final String randomString = getRandString(r, 100, isUnicodeOk); return new HiveVarchar(randomString, maxLength); }
private static HCatRecord getHCat13TypesRecord() { List<Object> rec_hcat13types = new ArrayList<Object>(5); rec_hcat13types.add(HiveDecimal.create(new BigDecimal("123.45")));//prec 5, scale 2 rec_hcat13types.add(new HiveChar("hive_char", 10)); rec_hcat13types.add(new HiveVarchar("hive_varchar", 20)); rec_hcat13types.add(Date.valueOf("2014-01-06")); rec_hcat13types.add(Timestamp.ofEpochMilli(System.currentTimeMillis())); return new DefaultHCatRecord(rec_hcat13types); } private static HCatRecord getHCat13TypesComplexRecord() {
public static int getHiveBucket(List<Entry<ObjectInspector, Object>> columnBindings, int bucketCount) throws HiveException { GenericUDFHash udf = new GenericUDFHash(); ObjectInspector[] objectInspectors = new ObjectInspector[columnBindings.size()]; GenericUDF.DeferredObject[] deferredObjects = new GenericUDF.DeferredObject[columnBindings.size()]; int i = 0; for (Entry<ObjectInspector, Object> entry : columnBindings) { objectInspectors[i] = entry.getKey(); if (entry.getValue() != null && entry.getKey() instanceof JavaHiveVarcharObjectInspector) { JavaHiveVarcharObjectInspector varcharObjectInspector = (JavaHiveVarcharObjectInspector) entry.getKey(); deferredObjects[i] = new GenericUDF.DeferredJavaObject(new HiveVarchar(((String) entry.getValue()), varcharObjectInspector.getMaxLength())); } else { deferredObjects[i] = new GenericUDF.DeferredJavaObject(entry.getValue()); } i++; } ObjectInspector udfInspector = udf.initialize(objectInspectors); IntObjectInspector inspector = (IntObjectInspector) udfInspector; Object result = udf.evaluate(deferredObjects); HiveKey hiveKey = new HiveKey(); hiveKey.setHashCode(inspector.get(result)); return new DefaultHivePartitioner<>().getBucket(hiveKey, null, bucketCount); }