@JsonCreator public static HiveType valueOf(String hiveTypeName) { requireNonNull(hiveTypeName, "hiveTypeName is null"); return toHiveType(getTypeInfoFromTypeString(hiveTypeName)); }
public MapCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType) { requireNonNull(typeManager, "typeManage is null"); requireNonNull(fromHiveType, "fromHiveType is null"); this.toType = requireNonNull(toHiveType, "toHiveType is null").getType(typeManager); HiveType fromKeyHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); HiveType fromValueHiveType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); HiveType toKeyHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); HiveType toValueHiveType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); this.keyCoercer = fromKeyHiveType.equals(toKeyHiveType) ? null : createCoercer(typeManager, fromKeyHiveType, toKeyHiveType); this.valueCoercer = fromValueHiveType.equals(toValueHiveType) ? null : createCoercer(typeManager, fromValueHiveType, toValueHiveType); }
private Type canonicalizeType(Type type) { HiveType hiveType = HiveType.toHiveType(typeTranslator, type); return TYPE_MANAGER.getType(hiveType.getTypeSignature()); }
public boolean isSupportedType() { return isSupportedType(getTypeInfo()); }
@Override public boolean canCoerce(HiveType fromHiveType, HiveType toHiveType) { Type fromType = typeManager.getType(fromHiveType.getTypeSignature()); Type toType = typeManager.getType(toHiveType.getTypeSignature()); if (fromType instanceof VarcharType) { return toHiveType.equals(HIVE_BYTE) || toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG); } if (toType instanceof VarcharType) { return fromHiveType.equals(HIVE_BYTE) || fromHiveType.equals(HIVE_SHORT) || fromHiveType.equals(HIVE_INT) || fromHiveType.equals(HIVE_LONG); } if (fromHiveType.equals(HIVE_BYTE)) { return toHiveType.equals(HIVE_SHORT) || toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG); } if (fromHiveType.equals(HIVE_SHORT)) { return toHiveType.equals(HIVE_INT) || toHiveType.equals(HIVE_LONG); } if (fromHiveType.equals(HIVE_INT)) { return toHiveType.equals(HIVE_LONG); } if (fromHiveType.equals(HIVE_FLOAT)) { return toHiveType.equals(HIVE_DOUBLE); } return canCoerceForList(fromHiveType, toHiveType) || canCoerceForMap(fromHiveType, toHiveType) || canCoerceForStruct(fromHiveType, toHiveType); }
private boolean canCoerceForMap(HiveType fromHiveType, HiveType toHiveType) { if (!fromHiveType.getCategory().equals(Category.MAP) || !toHiveType.getCategory().equals(Category.MAP)) { return false; } HiveType fromKeyType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); HiveType fromValueType = HiveType.valueOf(((MapTypeInfo) fromHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); HiveType toKeyType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapKeyTypeInfo().getTypeName()); HiveType toValueType = HiveType.valueOf(((MapTypeInfo) toHiveType.getTypeInfo()).getMapValueTypeInfo().getTypeName()); return (fromKeyType.equals(toKeyType) || canCoerce(fromKeyType, toKeyType)) && (fromValueType.equals(toValueType) || canCoerce(fromValueType, toValueType)); }
protected List<HiveColumnHandle> getColumnHandles(List<TestColumn> testColumns) { List<HiveColumnHandle> columns = new ArrayList<>(); int nextHiveColumnIndex = 0; for (int i = 0; i < testColumns.size(); i++) { TestColumn testColumn = testColumns.get(i); int columnIndex = testColumn.isPartitionKey() ? -1 : nextHiveColumnIndex++; HiveType hiveType = HiveType.valueOf(testColumn.getObjectInspector().getTypeName()); columns.add(new HiveColumnHandle(testColumn.getName(), hiveType, hiveType.getTypeSignature(), columnIndex, testColumn.isPartitionKey() ? PARTITION_KEY : REGULAR, Optional.empty())); } return columns; }
public HiveType toHiveType() { return HiveType.valueOf(value); }
public static List<HiveColumnHandle> hiveColumnHandles(String connectorId, Table table) { ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder(); // add the data fields first int hiveColumnIndex = 0; for (FieldSchema field : table.getSd().getCols()) { // ignore unsupported types rather than failing TypeInfo typeInfo = getTypeInfoFromTypeString(field.getType()); if (HiveType.isSupportedType(typeInfo)) { HiveType hiveType = toHiveType(typeInfo); columns.add(new HiveColumnHandle(connectorId, field.getName(), hiveType, hiveType.getTypeSignature(), hiveColumnIndex, false)); } hiveColumnIndex++; } // add the partition keys last (like Hive does) columns.addAll(getPartitionKeyColumnHandles(connectorId, table)); return columns.build(); }
public StructCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType, BridgingRecordCursor bridgingRecordCursor) { requireNonNull(typeManager, "typeManage is null"); requireNonNull(fromHiveType, "fromHiveType is null"); requireNonNull(toHiveType, "toHiveType is null"); this.bridgingRecordCursor = requireNonNull(bridgingRecordCursor, "bridgingRecordCursor is null"); List<HiveType> fromFieldHiveTypes = extractStructFieldTypes(fromHiveType); List<HiveType> toFieldHiveTypes = extractStructFieldTypes(toHiveType); this.fromFieldTypes = fromHiveType.getType(typeManager).getTypeParameters(); this.toType = toHiveType.getType(typeManager); this.toFieldTypes = toType.getTypeParameters(); this.coercers = new Coercer[toFieldHiveTypes.size()]; for (int i = 0; i < min(fromFieldHiveTypes.size(), toFieldHiveTypes.size()); i++) { if (!fromFieldTypes.get(i).equals(toFieldTypes.get(i))) { coercers[i] = createCoercer(typeManager, fromFieldHiveTypes.get(i), toFieldHiveTypes.get(i), bridgingRecordCursor); } } this.pageBuilder = new PageBuilder(ImmutableList.of(toType)); }
public static Optional<DecimalType> getDecimalType(HiveType hiveType) { return getDecimalType(hiveType.getHiveTypeName().toString()); }
List<Type> fileColumnTypes = toHiveTypes(schema.getProperty(META_TABLE_COLUMN_TYPES, "")).stream() .map(hiveType -> hiveType.getType(typeManager)) .collect(toList());
public ListCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType) { requireNonNull(typeManager, "typeManage is null"); requireNonNull(fromHiveType, "fromHiveType is null"); requireNonNull(toHiveType, "toHiveType is null"); HiveType fromElementHiveType = HiveType.valueOf(((ListTypeInfo) fromHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName()); HiveType toElementHiveType = HiveType.valueOf(((ListTypeInfo) toHiveType.getTypeInfo()).getListElementTypeInfo().getTypeName()); this.elementCoercer = fromElementHiveType.equals(toElementHiveType) ? null : createCoercer(typeManager, fromElementHiveType, toElementHiveType); }
public TypeSignature getTypeSignature() { return getTypeSignature(typeInfo); }
public static List<HiveColumnHandle> getRegularColumnHandles(Table table) { ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder(); int hiveColumnIndex = 0; for (Column field : table.getDataColumns()) { // ignore unsupported types rather than failing HiveType hiveType = field.getType(); if (hiveType.isSupportedType()) { columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getTypeSignature(), hiveColumnIndex, REGULAR, field.getComment())); } hiveColumnIndex++; } return columns.build(); }
public StructCoercer(TypeManager typeManager, HiveType fromHiveType, HiveType toHiveType) { requireNonNull(typeManager, "typeManage is null"); requireNonNull(fromHiveType, "fromHiveType is null"); requireNonNull(toHiveType, "toHiveType is null"); List<HiveType> fromFieldTypes = extractStructFieldTypes(fromHiveType); List<HiveType> toFieldTypes = extractStructFieldTypes(toHiveType); this.coercers = new Function[toFieldTypes.size()]; this.nullBlocks = new Block[toFieldTypes.size()]; for (int i = 0; i < coercers.length; i++) { if (i >= fromFieldTypes.size()) { nullBlocks[i] = toFieldTypes.get(i).getType(typeManager).createBlockBuilder(null, 1).appendNull().build(); } else if (!fromFieldTypes.get(i).equals(toFieldTypes.get(i))) { coercers[i] = createCoercer(typeManager, fromFieldTypes.get(i), toFieldTypes.get(i)); } } }
Type primitiveType = getPrimitiveType((PrimitiveTypeInfo) typeInfo); if (primitiveType == null) { break; case MAP: MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; TypeSignature keyType = getTypeSignature(mapTypeInfo.getMapKeyTypeInfo()); TypeSignature valueType = getTypeSignature(mapTypeInfo.getMapValueTypeInfo()); return new TypeSignature( StandardTypes.MAP, case LIST: ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo; TypeSignature elementType = getTypeSignature(listTypeInfo.getListElementTypeInfo()); return new TypeSignature( StandardTypes.ARRAY, TypeSignature typeSignature = getTypeSignature(structFieldTypeInfos.get(i));
@Override public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column) { if (!allowAddColumn) { throw new PrestoException(PERMISSION_DENIED, "Adding Columns is disabled in this Hive catalog"); } HiveTableHandle handle = checkType(tableHandle, HiveTableHandle.class, "tableHandle"); Optional<Table> tableMetadata = metastore.getTable(handle.getSchemaName(), handle.getTableName()); if (!tableMetadata.isPresent()) { throw new TableNotFoundException(handle.getSchemaTableName()); } Table table = tableMetadata.get(); StorageDescriptor sd = table.getSd(); ImmutableList.Builder<FieldSchema> columns = ImmutableList.builder(); columns.addAll(sd.getCols()); columns.add(new FieldSchema(column.getName(), toHiveType(column.getType()).getHiveTypeName(), column.getComment())); sd.setCols(columns.build()); table.setSd(sd); metastore.alterTable(handle.getSchemaName(), handle.getTableName(), table); }
public static List<HiveType> extractStructFieldTypes(HiveType hiveType) { return ((StructTypeInfo) hiveType.getTypeInfo()).getAllStructFieldTypeInfos().stream() .map(typeInfo -> HiveType.valueOf(typeInfo.getTypeName())) .collect(toImmutableList()); }