return new LongWritable(0); case TIMESTAMP: return new TimestampWritable(new Timestamp(0)); case DATE: return new DateWritable(new Date(0)); case FLOAT: return new FloatWritable(0); return new BytesWritable(ArrayUtils.EMPTY_BYTE_ARRAY); case STRING: return new Text(ArrayUtils.EMPTY_BYTE_ARRAY); case VARCHAR: return new HiveVarcharWritable(new HiveVarchar(StringUtils.EMPTY, -1)); case CHAR: return new HiveCharWritable(new HiveChar(StringUtils.EMPTY, -1)); case DECIMAL: return new HiveDecimalWritable();
switch (primitiveCategory) { case FLOAT: rawDecimal = HiveDecimal.create(String.valueOf(scalar)); break; case DOUBLE: rawDecimal = HiveDecimal.create(String.valueOf(scalar)); break; case BYTE: rawDecimal = HiveDecimal.create((Byte) scalar); break; case SHORT: break; case CHAR: rawDecimal = HiveDecimal.create(((HiveChar) scalar).getStrippedValue()); break; case VARCHAR: rawDecimal = HiveDecimal.create(((HiveVarchar) scalar).getValue()); break; case DECIMAL:
static Object convertPrimitive(Object val, PrimitiveObjectInspector poi) { switch (poi.getPrimitiveCategory()) { // Save char/varchar as string case CHAR: return ((HiveChar) poi.getPrimitiveJavaObject(val)).getPaddedValue(); case VARCHAR: return ((HiveVarchar) poi.getPrimitiveJavaObject(val)).toString(); case DECIMAL: return ((HiveDecimal) poi.getPrimitiveJavaObject(val)).bigDecimalValue(); default: return poi.getPrimitiveJavaObject(val); } }
private HiveVarchar getPrimitiveWithParams(HiveVarcharWritable val) { HiveVarchar hv = new HiveVarchar(); hv.setValue(val.getHiveVarchar(), getMaxLength()); return hv; }
public static HiveVarchar getHiveVarchar(Object o, PrimitiveObjectInspector oi) { if (o == null) { return null; } HiveVarchar result = null; switch (oi.getPrimitiveCategory()) { case VARCHAR: result = ((HiveVarcharObjectInspector)oi).getPrimitiveJavaObject(o); break; default: // Is there a way to provide char length here? // It might actually be ok as long as there is an object inspector (with char length) // receiving this value. result = new HiveVarchar(); result.setValue(getString(o, oi)); break; } return result; }
HiveChar hc = new HiveChar(str, maxLength); return hc; case VARCHAR: HiveVarchar hvc = new HiveVarchar(str, maxLength); return hvc; case DATE: return new Date(DateWritable.daysToMillis((Integer)datum)); case TIMESTAMP: if (recordSchema.getType() != Type.LONG) { "Unexpected Avro schema for Date TypeInfo: " + recordSchema.getType()); return new Timestamp((Long)datum); default: return datum;
private static final DecimalType DECIMAL_TYPE_PRECISION_38 = DecimalType.createDecimalType(38, 16); .add(new TestColumn("t_empty_string", javaStringObjectInspector, "", Slices.EMPTY_SLICE)) .add(new TestColumn("t_string", javaStringObjectInspector, "test", Slices.utf8Slice("test"))) .add(new TestColumn("t_empty_varchar", javaHiveVarcharObjectInspector, new HiveVarchar("", HiveVarchar.MAX_VARCHAR_LENGTH), Slices.EMPTY_SLICE)) .add(new TestColumn("t_varchar", javaHiveVarcharObjectInspector, new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH), Slices.utf8Slice("test"))) .add(new TestColumn("t_varchar_max_length", javaHiveVarcharObjectInspector, new HiveVarchar(VARCHAR_MAX_LENGTH_STRING, HiveVarchar.MAX_VARCHAR_LENGTH), Slices.utf8Slice(VARCHAR_MAX_LENGTH_STRING))) .add(new TestColumn("t_char", CHAR_INSPECTOR_LENGTH_10, "test", Slices.utf8Slice("test"), true)) .add(new TestColumn("t_tinyint", javaByteObjectInspector, (byte) 1, (byte) 1)) .add(new TestColumn("t_map_varchar", getStandardMapObjectInspector(javaHiveVarcharObjectInspector, javaHiveVarcharObjectInspector), ImmutableMap.of(new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH), new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH)), mapBlockOf(createVarcharType(HiveVarchar.MAX_VARCHAR_LENGTH), createVarcharType(HiveVarchar.MAX_VARCHAR_LENGTH), "test", "test"))) .add(new TestColumn("t_map_char", getStandardMapObjectInspector(CHAR_INSPECTOR_LENGTH_10, CHAR_INSPECTOR_LENGTH_10), ImmutableMap.of(new HiveChar("test", 10), new HiveChar("test", 10)), mapBlockOf(createCharType(10), createCharType(10), "test", "test"))) .add(new TestColumn("t_map_smallint", "t_array_varchar", getStandardListObjectInspector(javaHiveVarcharObjectInspector), ImmutableList.of(new HiveVarchar("test", HiveVarchar.MAX_VARCHAR_LENGTH)), arrayBlockOf(createVarcharType(HiveVarchar.MAX_VARCHAR_LENGTH), "test"))) .add(new TestColumn(
case BINARY: try { String t = Text.decode(s.getBytes(), 0, s.getBytes().length); return t.getBytes(); } catch (CharacterCodingException e) { return Date.valueOf(s); case TIMESTAMP: return Timestamp.valueOf(s); case DECIMAL: return HiveDecimal.create(s); case VARCHAR: return new HiveVarchar(s, ((BaseCharTypeInfo) mapKeyType).getLength()); case CHAR: return new HiveChar(s, ((BaseCharTypeInfo) mapKeyType).getLength()); default: throw new IOException(
value = ((HiveDecimal) value).bigDecimalValue(); } else if (value instanceof Decimal128) { value = ((Decimal128) value).toBigDecimal(); case CHAR: if (value instanceof HiveChar) { value = ((HiveChar) value).getValue(); case VARCHAR: if (value instanceof HiveVarchar) { value = ((HiveVarchar) value).getValue(); } else { c = Calendar.getInstance(); c.setTimeInMillis(((Timestamp)value).getTime());
sb.append('"'); sb.append(escapeString(((HiveCharObjectInspector) poi) .getPrimitiveJavaObject(o).toString())); sb.append('"'); break; sb.append('"'); sb.append(escapeString(((HiveVarcharObjectInspector) poi) .getPrimitiveJavaObject(o).toString())); sb.append('"'); break; Text txt = new Text(); txt.set(bw.getBytes(), 0, bw.getLength()); sb.append(txt.toString()); break;
Text txt = new Text(); txt.set(b, 0, b.length); appendWithQuotes(sb, SerDeUtils.escapeString(txt.toString())); break; case DATE: Date d = ((DateObjectInspector) poi).getPrimitiveJavaObject(obj); appendWithQuotes(sb, d.toString()); break; case TIMESTAMP: { Timestamp t = ((TimestampObjectInspector) poi).getPrimitiveJavaObject(obj); appendWithQuotes(sb, t.toString()); break; case VARCHAR: { String s = SerDeUtils.escapeString( ((HiveVarcharObjectInspector) poi).getPrimitiveJavaObject(obj).toString()); appendWithQuotes(sb, s); break; ((HiveCharObjectInspector) poi).getPrimitiveJavaObject(obj).toString()); appendWithQuotes(sb, s); break;
return new BytesWritable(ArrayUtils.EMPTY_BYTE_ARRAY); case STRING: return new Text(ArrayUtils.EMPTY_BYTE_ARRAY); case VARCHAR: return new HiveVarcharWritable(new HiveVarchar(StringUtils.EMPTY, -1)); case CHAR: return new HiveCharWritable(new HiveChar(StringUtils.EMPTY, -1)); case DECIMAL: return new HiveDecimalWritable();
case CHAR: HiveChar ch = (HiveChar)fieldOI.getPrimitiveJavaObject(structFieldData); return ch.getStrippedValue(); case VARCHAR: HiveVarchar vc = (HiveVarchar)fieldOI.getPrimitiveJavaObject(structFieldData); return vc.getValue(); case DATE: Date date = ((DateObjectInspector)fieldOI).getPrimitiveJavaObject(structFieldData); return DateWritable.dateToDays(date); case TIMESTAMP: Timestamp timestamp = ((TimestampObjectInspector) fieldOI).getPrimitiveJavaObject(structFieldData); return timestamp.getTime(); case UNKNOWN: throw new AvroSerdeException("Received UNKNOWN primitive category.");
@Test public void testSortPrimitiveTupleTwoField() throws HiveException { List<ObjectInspector> tuple = new ArrayList<ObjectInspector>(); tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector); tuple.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector); ObjectInspector[] inputOIs = { ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory .getStandardStructObjectInspector(asList("Company", "Department"), tuple)), PrimitiveObjectInspectorFactory.writableStringObjectInspector, PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector }; udf.initialize(inputOIs); Object i1 = asList(new Text("Linkedin"), new Text("HR")); Object i2 = asList(new Text("Linkedin"), new Text("IT")); Object i3 = asList(new Text("Linkedin"), new Text("Finance")); Object i4 = asList(new Text("Facebook"), new Text("IT")); Object i5 = asList(new Text("Facebook"), new Text("Finance")); Object i6 = asList(new Text("Facebook"), new Text("HR")); Object i7 = asList(new Text("Google"), new Text("Logistics")); Object i8 = asList(new Text("Google"), new Text("Finance")); Object i9 = asList(new Text("Google"), new Text("HR")); HiveVarchar vc = new HiveVarchar(); vc.setValue("Department"); GenericUDF.DeferredJavaObject[] argas = { new GenericUDF.DeferredJavaObject(asList(i1, i2, i3, i4, i5, i6, i7, i8, i9)), new GenericUDF.DeferredJavaObject( new Text("Company")), new GenericUDF.DeferredJavaObject(new HiveVarcharWritable(vc)) }; runAndVerify(argas, asList(i5, i6, i4, i8, i9, i7, i3, i1, i2)); }
private Writable getWritableValue(TypeInfo ti, byte[] value) { if (ti.equals(TypeInfoFactory.stringTypeInfo)) { return new Text(value); } else if (ti.equals(TypeInfoFactory.varcharTypeInfo)) { return new HiveVarcharWritable( new HiveVarchar(new Text(value).toString(), -1)); } else if (ti.equals(TypeInfoFactory.binaryTypeInfo)) { return new BytesWritable(value); } return null; }
break; case DECIMAL: result = ((HiveDecimal)o).bigDecimalValue(); break; case CHAR: result = ((HiveChar)o).getValue(); break; case VARCHAR: result = ((HiveVarchar)o).getValue(); break; case DATE: result = new DateTime(d.getYear() + 1900, d.getMonth() + 1, d.getDate(), 0, 0);//uses local TZ break; case TIMESTAMP: result = new DateTime(((Timestamp)o).getTime());//uses local TZ break; default:
.startAnd() .lessThan("x", PredicateLeaf.Type.DATE, Date.valueOf("1970-1-11")) .lessThanEquals("y", PredicateLeaf.Type.STRING, new HiveChar("hi", 10).toString()) .equals("z", PredicateLeaf.Type.DECIMAL, new HiveDecimalWritable("1.0")) .end() .in("z", PredicateLeaf.Type.LONG, 1L, 2L, 3L) .nullSafeEquals("a", PredicateLeaf.Type.STRING, new HiveVarchar("stinger", 100).toString()) .end() .end()
public HiveVarchar getHiveVarchar() { return new HiveVarchar(value.toString(), -1); }
Matcher m = inputPattern.matcher(rowText.toString()); break; case DECIMAL: HiveDecimal bd = HiveDecimal.create(t); row.set(c, bd); break; case CHAR: HiveChar hc = new HiveChar(t, ((CharTypeInfo) typeInfo).getLength()); row.set(c, hc); break; case VARCHAR: HiveVarchar hv = new HiveVarchar(t, ((VarcharTypeInfo)typeInfo).getLength()); row.set(c, hv); break;
dateWritable.set(date); ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = dateWritable.getDays(); text.set(string); ((BytesColumnVector) batch.cols[projectionColumnNum]).setVal( batchIndex, text.getBytes(), 0, text.getLength()); byte[] bytes = hiveVarchar.getValue().getBytes(); ((BytesColumnVector) batch.cols[projectionColumnNum]).setVal( batchIndex, bytes, 0, bytes.length); byte[] bytes = hiveChar.getStrippedValue().getBytes(); ((BytesColumnVector) batch.cols[projectionColumnNum]).setVal( batchIndex, bytes, 0, bytes.length);