private static Map<Schema.Type, TypeInfo> initTypeMap() { Map<Schema.Type, TypeInfo> theMap = new Hashtable<Schema.Type, TypeInfo>(); theMap.put(NULL, TypeInfoFactory.getPrimitiveTypeInfo("void")); theMap.put(BOOLEAN, TypeInfoFactory.getPrimitiveTypeInfo("boolean")); theMap.put(INT, TypeInfoFactory.getPrimitiveTypeInfo("int")); theMap.put(LONG, TypeInfoFactory.getPrimitiveTypeInfo("bigint")); theMap.put(FLOAT, TypeInfoFactory.getPrimitiveTypeInfo("float")); theMap.put(DOUBLE, TypeInfoFactory.getPrimitiveTypeInfo("double")); theMap.put(BYTES, TypeInfoFactory.getPrimitiveTypeInfo("binary")); theMap.put(FIXED, TypeInfoFactory.getPrimitiveTypeInfo("binary")); theMap.put(STRING, TypeInfoFactory.getPrimitiveTypeInfo("string")); return Collections.unmodifiableMap(theMap); }
private static TypeInfo generateEnumTypeInfo(Schema schema) { assert schema.getType().equals(Schema.Type.ENUM); return TypeInfoFactory.getPrimitiveTypeInfo("string"); } }
public static TypeInfo getPrimitiveTypeInfoFromPrimitiveWritable( Class<?> clazz) { String typeName = PrimitiveObjectInspectorUtils .getTypeNameFromPrimitiveWritable(clazz); if (typeName == null) { throw new RuntimeException("Internal error: Cannot get typeName for " + clazz); } return getPrimitiveTypeInfo(typeName); }
public static VarcharTypeInfo getVarcharTypeInfo(int length) { String fullName = BaseCharTypeInfo.getQualifiedName(serdeConstants.VARCHAR_TYPE_NAME, length); return (VarcharTypeInfo) getPrimitiveTypeInfo(fullName); }
public static DecimalTypeInfo getDecimalTypeInfo(int precision, int scale) { String fullName = DecimalTypeInfo.getQualifiedName(precision, scale); return (DecimalTypeInfo) getPrimitiveTypeInfo(fullName); };
public static TimestampLocalTZTypeInfo getTimestampTZTypeInfo(ZoneId defaultTimeZone) { String fullName = TimestampLocalTZTypeInfo.getQualifiedName(defaultTimeZone); return (TimestampLocalTZTypeInfo) getPrimitiveTypeInfo(fullName); };
public static CharTypeInfo getCharTypeInfo(int length) { String fullName = BaseCharTypeInfo.getQualifiedName(serdeConstants.CHAR_TYPE_NAME, length); return (CharTypeInfo) getPrimitiveTypeInfo(fullName); }
public static TypeInfo getPrimitiveTypeInfoFromJavaPrimitive(Class<?> clazz) { return getPrimitiveTypeInfo(PrimitiveObjectInspectorUtils .getTypeNameFromPrimitiveJava(clazz)); }
private static List<PrimitiveTypeInfo> extractPartColTypes(Table tab) { List<FieldSchema> pCols = tab.getPartCols(); List<PrimitiveTypeInfo> partColTypeInfos = new ArrayList<PrimitiveTypeInfo>(pCols.size()); for (FieldSchema pCol : pCols) { partColTypeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(pCol.getType())); } return partColTypeInfos; }
public void initialize(InputStream in, Configuration conf, Properties tbl) throws IOException { din = new DataInputStream(in); tbIn = new TypedBytesWritableInput(din); tbOut = new TypedBytesWritableOutput(barrStr); String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES); columnTypes = Arrays.asList(columnTypeProperty.split(",")); for (String columnType : columnTypes) { PrimitiveTypeInfo dstTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(columnType); dstOIns.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector( dstTypeInfo)); } }
private static List<PrimitiveTypeInfo> extractPartColTypes(Table tab) { List<FieldSchema> pCols = tab.getPartCols(); List<PrimitiveTypeInfo> partColTypeInfos = new ArrayList<PrimitiveTypeInfo>(pCols.size()); for (FieldSchema pCol : pCols) { partColTypeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(pCol.getType())); } return partColTypeInfos; }
@Override protected void setUp() { String maxVarcharTypeName = "varchar(" + HiveVarchar.MAX_VARCHAR_LENGTH + ")"; maxVarchar = TypeInfoFactory.getPrimitiveTypeInfo(maxVarcharTypeName); varchar10 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(10)"); varchar5 = TypeInfoFactory.getPrimitiveTypeInfo("varchar(5)"); char10 = TypeInfoFactory.getPrimitiveTypeInfo("char(10)"); char5 = TypeInfoFactory.getPrimitiveTypeInfo("char(5)"); SessionState.start(new HiveConf()); }
private StructObjectInspector getPartitionKeyOI(TableDesc tableDesc) throws Exception { String pcols = tableDesc.getProperties().getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMNS); String pcolTypes = tableDesc.getProperties().getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_PARTITION_COLUMN_TYPES); String[] partKeys = pcols.trim().split("/"); String[] partKeyTypes = pcolTypes.trim().split(":"); ObjectInspector[] inspectors = new ObjectInspector[partKeys.length]; for (int i = 0; i < partKeys.length; i++) { inspectors[i] = PrimitiveObjectInspectorFactory .getPrimitiveWritableObjectInspector(TypeInfoFactory .getPrimitiveTypeInfo(partKeyTypes[i])); } return ObjectInspectorFactory.getStandardStructObjectInspector( Arrays.asList(partKeys), Arrays.asList(inspectors)); }
public ExpressionBuilder(Table table, Map<String, String> partSpecs) { this.partSpecs = partSpecs; for (FieldSchema partField : table.getPartitionKeys()) { partColumnTypesMap.put(partField.getName().toLowerCase(), TypeInfoFactory.getPrimitiveTypeInfo(partField.getType())); } }
public AbstractPrimitiveWritableObjectInspector getObjectInspector() { return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(TypeInfoFactory.getPrimitiveTypeInfo( typeInfo.getTypeName())); }
private TypeInfo getTypeInfo() throws Exception { List<String> names = new ArrayList<String>(4); names.add("an_int"); names.add("a_long"); names.add("a_double"); names.add("a_string"); List<TypeInfo> tis = new ArrayList<TypeInfo>(4); tis.add(TypeInfoFactory.getPrimitiveTypeInfo("int")); tis.add(TypeInfoFactory.getPrimitiveTypeInfo("bigint")); tis.add(TypeInfoFactory.getPrimitiveTypeInfo("double")); tis.add(TypeInfoFactory.getPrimitiveTypeInfo("string")); return TypeInfoFactory.getStructTypeInfo(names, tis); }
/** * Generate a TypeInfo for an Avro Map. This is made slightly simpler in that * Avro only allows maps with strings for keys. */ private static TypeInfo generateMapTypeInfo(Schema schema, Set<Schema> seenSchemas) throws AvroSerdeException { assert schema.getType().equals(Schema.Type.MAP); Schema valueType = schema.getValueType(); TypeInfo ti = generateTypeInfo(valueType, seenSchemas); return TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("string"), ti); }
public TypeDescriptor(String typeName) { this.type = Type.getType(typeName); if (this.type.isComplexType()) { this.typeName = typeName; } else if (this.type.isQualifiedType()) { PrimitiveTypeInfo pti = TypeInfoFactory.getPrimitiveTypeInfo(typeName); setTypeQualifiers(TypeQualifiers.fromTypeInfo(pti)); } }
@Test public void testUnaryMinusChecked() throws HiveException { VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(1024, 2, 23); LongColUnaryMinusChecked expr = new LongColUnaryMinusChecked(0, 1); expr.setOutputTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("bigint")); expr.evaluate(vrg); //verify long[] inVector = ((LongColumnVector) vrg.cols[0]).vector; long[] outVector = ((LongColumnVector) vrg.cols[1]).vector; for (int i = 0; i < outVector.length; i++) { assertEquals(0, inVector[i]+outVector[i]); } } }
@Test public void testUnaryMinusCheckedOverflow() throws HiveException { VectorizedRowBatch vrg = VectorizedRowGroupGenUtil.getVectorizedRowBatch(1, 2, 0); //set value to MIN_VALUE so that -MIN_VALUE overflows and gets set to MIN_VALUE again ((LongColumnVector)vrg.cols[0]).vector[0] = Integer.MIN_VALUE; LongColUnaryMinusChecked expr = new LongColUnaryMinusChecked(0, 1); expr.setOutputTypeInfo(TypeInfoFactory.getPrimitiveTypeInfo("int")); expr.evaluate(vrg); //verify long[] inVector = ((LongColumnVector) vrg.cols[0]).vector; long[] outVector = ((LongColumnVector) vrg.cols[1]).vector; for (int i = 0; i < outVector.length; i++) { assertEquals(Integer.MIN_VALUE, outVector[i]); } }