public void initialize(InputStream in, Configuration conf, Properties tbl) throws IOException { din = new DataInputStream(in); tbIn = new TypedBytesWritableInput(din); tbOut = new TypedBytesWritableOutput(barrStr); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); columnTypes = Arrays.asList(columnTypeProperty.split(",")); for (String columnType : columnTypes) { PrimitiveTypeInfo dstTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(columnType); dstOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( dstTypeInfo)); } }
@Override public void initialize(Configuration conf, Properties tbl) throws SerDeException { this.conf = conf; final String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS); final String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ? tbl .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA); // Create an object inspector final List<String> columnNames; if (columnNameProperty.length() == 0) { columnNames = new ArrayList<>(); } else { columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter)); } final List<TypeInfo> columnTypes; if (columnTypeProperty.length() == 0) { columnTypes = new ArrayList<>(); } else { columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty); } rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes); rowObjectInspector = (StructObjectInspector) getStandardWritableObjectInspectorFromTypeInfo(rowTypeInfo); final List<Field> fields = new ArrayList<>(); final int size = columnNames.size(); for (int i = 0; i < size; i++) { fields.add(toField(columnNames.get(i), columnTypes.get(i))); } }
return TypeInfoFactory.getListTypeInfo(getExtendedTypeInfoFromJavaType( pt.getActualTypeArguments()[0], m)); return TypeInfoFactory.getMapTypeInfo(getExtendedTypeInfoFromJavaType( pt.getActualTypeArguments()[0], m), getExtendedTypeInfoFromJavaType(pt.getActualTypeArguments()[1], m)); return TypeInfoUtils .getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils .getTypeEntryFromPrimitiveJavaType(c).primitiveCategory)); return TypeInfoUtils .getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(PrimitiveObjectInspectorUtils .getTypeEntryFromPrimitiveJavaClass(c).primitiveCategory)); return TypeInfoUtils .getTypeInfoFromObjectInspector(PrimitiveObjectInspectorFactory .getPrimitiveWritableObjectInspector(PrimitiveObjectInspectorUtils .getTypeEntryFromPrimitiveWritableClass(c).primitiveCategory)); field.getGenericType(), m)); return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos);
/** * Generate a TypeInfo for an Avro Map. This is made slightly simpler in that * Avro only allows maps with strings for keys. */ private static TypeInfo generateMapTypeInfo(Schema schema, Set<Schema> seenSchemas) throws AvroSerdeException { assert schema.getType().equals(Schema.Type.MAP); Schema valueType = schema.getValueType(); TypeInfo ti = generateTypeInfo(valueType, seenSchemas); return TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("string"), ti); }
private TypeInfo getTypeInfo() throws Exception { List<String> names = new ArrayList<String>(4); names.add("an_int"); names.add("a_long"); names.add("a_double"); names.add("a_string"); List<TypeInfo> tis = new ArrayList<TypeInfo>(4); tis.add(TypeInfoFactory.getPrimitiveTypeInfo("int")); tis.add(TypeInfoFactory.getPrimitiveTypeInfo("bigint")); tis.add(TypeInfoFactory.getPrimitiveTypeInfo("double")); tis.add(TypeInfoFactory.getPrimitiveTypeInfo("string")); return TypeInfoFactory.getStructTypeInfo(names, tis); }
case CHAR: maxLength = getCommonLength( TypeInfoUtils.getCharacterLengthForType(a), TypeInfoUtils.getCharacterLengthForType(b)); return TypeInfoFactory.getCharTypeInfo(maxLength); case VARCHAR: maxLength = getCommonLength( TypeInfoUtils.getCharacterLengthForType(a), TypeInfoUtils.getCharacterLengthForType(b)); return TypeInfoFactory.getVarcharTypeInfo(maxLength); case DECIMAL: return HiveDecimalUtils.getDecimalTypeForPrimitiveCategories(a, b); default: return TypeInfoFactory.getPrimitiveTypeInfo( PrimitiveObjectInspectorUtils.getTypeEntryFromPrimitiveCategory(typeCategory).typeName);
private StructObjectInspector initSerDe(AbstractSerDe serDe, String[][] schema) throws SerDeException { List<String> fieldNameList = newArrayList(); List<String> fieldTypeList = newArrayList(); List<TypeInfo> typeInfoList = newArrayList(); for (String[] nameAndType : schema) { String name = nameAndType[0]; String type = nameAndType[1]; fieldNameList.add(name); fieldTypeList.add(type); typeInfoList.add(TypeInfoUtils.getTypeInfoFromTypeString(type)); } String fieldNames = Joiner.on(',').join(fieldNameList); String fieldTypes = Joiner.on(',').join(fieldTypeList); Properties schemaProperties = new Properties(); schemaProperties.setProperty(serdeConstants.LIST_COLUMNS, fieldNames); schemaProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, fieldTypes); SerDeUtils.initializeSerDe(serDe, conf, schemaProperties, null); return (StructObjectInspector) TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo( TypeInfoFactory.getStructTypeInfo(fieldNameList, typeInfoList)); }
final String fieldNamesProperty = Preconditions.checkNotNull(properties.getProperty(Constants.DRUID_QUERY_FIELD_NAMES, null)); final String fieldTypesProperty = Preconditions.checkNotNull(properties.getProperty(Constants.DRUID_QUERY_FIELD_TYPES, null)); if (fieldNamesProperty.isEmpty()) { columnNames = Arrays.stream(fieldNamesProperty.trim().split(",")).collect(Collectors.toList()); columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(fieldTypesProperty) .stream() .map(e -> TypeInfoFactory.getPrimitiveTypeInfo(e.getTypeName())) .map(primitiveTypeInfo -> { if (primitiveTypeInfo instanceof TimestampLocalTZTypeInfo) { columns[i] = columnNames.get(i); types[i] = columnTypes.get(i); inspectors.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(types[i]));
this.serdeName = serdeName; String nullString = tbl.getProperty( serdeConstants.SERIALIZATION_NULL_FORMAT, "\\N"); nullSequence = new Text(nullString); .getProperty(serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST); lastColumnTakesRest = (lastColumnTakesRestString != null && lastColumnTakesRestString .equalsIgnoreCase("true")); rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes); String escapeProperty = tbl.getProperty(serdeConstants.ESCAPE_CHAR); escaped = (escapeProperty != null); if (escaped) {
@Test public void testDecimalMinusDecimal() throws HiveException { GenericUDFOPMinus udf = new GenericUDFOPMinus(); HiveDecimalWritable left = new HiveDecimalWritable(HiveDecimal.create("14.5")); HiveDecimalWritable right = new HiveDecimalWritable(HiveDecimal.create("234.97")); ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(3, 1)), PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(5, 2)) }; DeferredObject[] args = { new DeferredJavaObject(left), new DeferredJavaObject(right), }; PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(6,2), oi.getTypeInfo()); HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args); Assert.assertEquals( HiveDecimal.create("-220.47"), res.getHiveDecimal()); }
if (typeEntry.primitiveCategory == PrimitiveCategory.VARCHAR) { BaseCharUtils.validateVarcharParameter(length); return TypeInfoFactory.getVarcharTypeInfo(length); } else { BaseCharUtils.validateCharParameter(length); return TypeInfoFactory.getCharTypeInfo(length); params.length + " is seen"); return TypeInfoFactory.getDecimalTypeInfo(precision, scale); return TypeInfoFactory.getPrimitiveTypeInfo(typeEntry.typeName); TypeInfo listElementType = parseType(); expect(">"); return TypeInfoFactory.getListTypeInfo(listElementType); TypeInfo mapValueType = parseType(); expect(">"); return TypeInfoFactory.getMapTypeInfo(mapKeyType, mapValueType); } while (true); return TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos); } while (true); return TypeInfoFactory.getUnionTypeInfo(objectTypeInfos);
return orcFields.get(0); } else { return TypeInfoFactory.getUnionTypeInfo(orcFields); return TypeInfoFactory.getListTypeInfo(getOrcField(fieldSchema.getElementType())); return TypeInfoFactory.getMapTypeInfo( getPrimitiveOrcTypeFromPrimitiveAvroType(Schema.Type.STRING), getOrcField(fieldSchema.getValueType())); orcFields.add(getOrcField(avroField.schema())); }); return TypeInfoFactory.getStructTypeInfo(orcFieldNames, orcFields);
case PRIMITIVE: { PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi; result = poi.getTypeInfo(); break; ListObjectInspector loi = (ListObjectInspector) oi; result = TypeInfoFactory .getListTypeInfo(getTypeInfoFromObjectInspector(loi .getListElementObjectInspector())); break; result = TypeInfoFactory.getMapTypeInfo( getTypeInfoFromObjectInspector(moi.getMapKeyObjectInspector()), getTypeInfoFromObjectInspector(moi.getMapValueObjectInspector())); break; .getFieldObjectInspector())); result = TypeInfoFactory.getStructTypeInfo(fieldNames, fieldTypeInfos); break; objectTypeInfos.add(getTypeInfoFromObjectInspector(eoi)); result = TypeInfoFactory.getUnionTypeInfo(objectTypeInfos); break;
private void testDecimalDivisionResultType(int prec1, int scale1, int prec2, int scale2, int prec3, int scale3) throws HiveException { GenericUDFOPDivide udf = new GenericUDFOPDivide(); ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec1, scale1)), PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getDecimalTypeInfo(prec2, scale2)) }; PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.getDecimalTypeInfo(prec3, scale3), oi.getTypeInfo()); }
@Test public void testChar() throws HiveException { GenericUDFOPPositive udf = new GenericUDFOPPositive(); HiveChar vc = new HiveChar("32300.004747", 12); HiveCharWritable input = new HiveCharWritable(vc); CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12); ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo), }; DeferredObject[] args = { new DeferredJavaObject(input) }; PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); Assert.assertEquals(new Double(32300.004747), new Double(res.get())); }
int varcharLength = varcharType.getLength(); if (varcharLength <= HiveVarchar.MAX_VARCHAR_LENGTH) { return getVarcharTypeInfo(varcharLength); int charLength = charType.getLength(); if (charLength <= HiveChar.MAX_CHAR_LENGTH) { return getCharTypeInfo(charLength); return getListTypeInfo(elementType); return getMapTypeInfo(keyType, valueType); return getStructTypeInfo( fieldNames.build(), type.getTypeParameters().stream()
col1.add(new Text("2")); col1.add(new Text("3")); col1Type = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo); cola = new ArrayList<Text>(); cola.add(new Text("a")); cola.add(new Text("b")); cola.add(new Text("c")); colaType = TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo); try { data = new ArrayList<Object>(); typeInfos.add(col1Type); typeInfos.add(colaType); dataType = TypeInfoFactory.getStructTypeInfo(names, typeInfos); .getStandardWritableObjectInspectorFromTypeInfo(dataType); } catch (Throwable e) { e.printStackTrace();
@Test public void testVarchar() throws HiveException { GenericUDFOPPositive udf = new GenericUDFOPPositive(); HiveVarchar vc = new HiveVarchar("32300.004747", 12); HiveVarcharWritable input = new HiveVarcharWritable(vc); VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12); ObjectInspector[] inputOIs = { PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo), }; DeferredObject[] args = { new DeferredJavaObject(input) }; PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs); Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo()); DoubleWritable res = (DoubleWritable) udf.evaluate(args); Assert.assertEquals(new Double(32300.004747), new Double(res.get())); }
StructTypeInfo prune() { List<String> newNames = new ArrayList<>(); List<TypeInfo> newTypes = new ArrayList<>(); List<String> oldNames = typeInfo.getAllStructFieldNames(); List<TypeInfo> oldTypes = typeInfo.getAllStructFieldTypeInfos(); for (int i = 0; i < oldNames.size(); ++i) { String fn = oldNames.get(i); if (selected[i]) { newNames.add(fn); if (children.containsKey(fn.toLowerCase())) { newTypes.add(children.get(fn.toLowerCase()).prune()); } else { newTypes.add(oldTypes.get(i)); } } } return (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(newNames, newTypes); } }
private static TypeInfo generateEnumTypeInfo(Schema schema) { assert schema.getType().equals(Schema.Type.ENUM); return TypeInfoFactory.getPrimitiveTypeInfo("string"); } }