@JsonCreator public static HiveType valueOf(String hiveTypeName) { requireNonNull(hiveTypeName, "hiveTypeName is null"); return toHiveType(getTypeInfoFromTypeString(hiveTypeName)); }
@Override public Optional<ConnectorNewTableLayout> getNewTableLayout(ConnectorSession session, ConnectorTableMetadata tableMetadata) { validatePartitionColumns(tableMetadata); validateBucketColumns(tableMetadata); Optional<HiveBucketProperty> bucketProperty = getBucketProperty(tableMetadata.getProperties()); if (!bucketProperty.isPresent()) { return Optional.empty(); } if (!bucketProperty.get().getSortedBy().isEmpty() && !isSortedWritingEnabled(session)) { throw new PrestoException(NOT_SUPPORTED, "Writing to bucketed sorted Hive tables is disabled"); } List<String> bucketedBy = bucketProperty.get().getBucketedBy(); Map<String, HiveType> hiveTypeMap = tableMetadata.getColumns().stream() .collect(toMap(ColumnMetadata::getName, column -> toHiveType(typeTranslator, column.getType()))); return Optional.of(new ConnectorNewTableLayout( new HivePartitioningHandle( bucketProperty.get().getBucketCount(), bucketedBy.stream() .map(hiveTypeMap::get) .collect(toList()), OptionalInt.of(bucketProperty.get().getBucketCount())), bucketedBy)); }
private static Properties createSchema(HiveStorageFormat format, List<String> columnNames, List<Type> columnTypes) { Properties schema = new Properties(); TypeTranslator typeTranslator = new HiveTypeTranslator(); schema.setProperty(SERIALIZATION_LIB, format.getSerDe()); schema.setProperty(FILE_INPUT_FORMAT, format.getInputFormat()); schema.setProperty(META_TABLE_COLUMNS, columnNames.stream() .collect(joining(","))); schema.setProperty(META_TABLE_COLUMN_TYPES, columnTypes.stream() .map(type -> toHiveType(typeTranslator, type)) .map(HiveType::getHiveTypeName) .map(HiveTypeName::toString) .collect(joining(":"))); return schema; }
private void assertTypeTranslation(String typeName, HiveType hiveType) { Type type = TYPE_MANAGER.getType(parseTypeSignature(typeName)); assertEquals(HiveType.toHiveType(typeTranslator, type), hiveType); }
private Type canonicalizeType(Type type) { HiveType hiveType = HiveType.toHiveType(typeTranslator, type); return TYPE_MANAGER.getType(hiveType.getTypeSignature()); }
private static List<HiveColumnHandle> getColumnHandles(ConnectorTableMetadata tableMetadata, Set<String> partitionColumnNames, TypeTranslator typeTranslator) { validatePartitionColumns(tableMetadata); validateBucketColumns(tableMetadata); ImmutableList.Builder<HiveColumnHandle> columnHandles = ImmutableList.builder(); int ordinal = 0; for (ColumnMetadata column : tableMetadata.getColumns()) { HiveColumnHandle.ColumnType columnType; if (partitionColumnNames.contains(column.getName())) { columnType = PARTITION_KEY; } else if (column.isHidden()) { columnType = SYNTHESIZED; } else { columnType = REGULAR; } columnHandles.add(new HiveColumnHandle( column.getName(), toHiveType(typeTranslator, column.getType()), column.getType().getTypeSignature(), ordinal, columnType, Optional.ofNullable(column.getComment()))); ordinal++; } return columnHandles.build(); }
@Override public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column) { HiveTableHandle handle = (HiveTableHandle) tableHandle; failIfAvroSchemaIsSet(handle); metastore.addColumn(handle.getSchemaName(), handle.getTableName(), column.getName(), toHiveType(typeTranslator, column.getType()), column.getComment()); }
private static ConnectorPageSource createPageSource( HivePageSourceFactory pageSourceFactory, ConnectorSession session, File targetFile, List<String> columnNames, List<Type> columnTypes, HiveStorageFormat format) { List<HiveColumnHandle> columnHandles = new ArrayList<>(columnNames.size()); TypeTranslator typeTranslator = new HiveTypeTranslator(); for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType.getTypeSignature(), i, REGULAR, Optional.empty())); } return pageSourceFactory .createPageSource( conf, session, new Path(targetFile.getAbsolutePath()), 0, targetFile.length(), targetFile.length(), createSchema(format, columnNames, columnTypes), columnHandles, TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId())) .get(); }
private static ConnectorPageSource createPageSource( HiveRecordCursorProvider cursorProvider, ConnectorSession session, File targetFile, List<String> columnNames, List<Type> columnTypes, HiveStorageFormat format) { List<HiveColumnHandle> columnHandles = new ArrayList<>(columnNames.size()); TypeTranslator typeTranslator = new HiveTypeTranslator(); for (int i = 0; i < columnNames.size(); i++) { String columnName = columnNames.get(i); Type columnType = columnTypes.get(i); columnHandles.add(new HiveColumnHandle(columnName, toHiveType(typeTranslator, columnType), columnType.getTypeSignature(), i, REGULAR, Optional.empty())); } RecordCursor recordCursor = cursorProvider .createRecordCursor( conf, session, new Path(targetFile.getAbsolutePath()), 0, targetFile.length(), targetFile.length(), createSchema(format, columnNames, columnTypes), columnHandles, TupleDomain.all(), DateTimeZone.forID(session.getTimeZoneKey().getId()), TYPE_MANAGER, false) .get(); return new RecordPageSource(columnTypes, recordCursor); }
private void assertInvalidTypeTranslation(String typeName, ErrorCode errorCode, String message) { Type type = TYPE_MANAGER.getType(parseTypeSignature(typeName)); try { HiveType.toHiveType(typeTranslator, type); fail("expected exception"); } catch (PrestoException e) { try { assertEquals(e.getErrorCode(), errorCode); assertContains(e.getMessage(), message); } catch (Throwable failure) { failure.addSuppressed(e); throw failure; } } } }
List<Column> dataColumns = tableAfter.stream() .filter(columnMetadata -> !columnMetadata.getName().equals("ds")) .map(columnMetadata -> new Column(columnMetadata.getName(), toHiveType(hiveTypeTranslator, columnMetadata.getType()), Optional.empty())) .collect(toList()); Table.Builder newTable = Table.builder(oldTable)
@JsonCreator @Nonnull public static HiveType valueOf(String hiveTypeName) { requireNonNull(hiveTypeName, "hiveTypeName is null"); return toHiveType(getTypeInfoFromTypeString(hiveTypeName)); }
private static Properties createSchema(HiveStorageFormat format, List<String> columnNames, List<Type> columnTypes) { Properties schema = new Properties(); TypeTranslator typeTranslator = new HiveTypeTranslator(); schema.setProperty(SERIALIZATION_LIB, format.getSerDe()); schema.setProperty(FILE_INPUT_FORMAT, format.getInputFormat()); schema.setProperty(META_TABLE_COLUMNS, columnNames.stream() .collect(joining(","))); schema.setProperty(META_TABLE_COLUMN_TYPES, columnTypes.stream() .map(type -> toHiveType(typeTranslator, type)) .map(HiveType::getHiveTypeName) .map(HiveTypeName::toString) .collect(joining(":"))); return schema; }
private void assertTypeTranslation(String typeName, HiveType hiveType) { Type type = TYPE_MANAGER.getType(parseTypeSignature(typeName)); assertEquals(HiveType.toHiveType(typeTranslator, type), hiveType); }
public static List<HiveColumnHandle> hiveColumnHandles(String connectorId, Table table) { ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder(); // add the data fields first int hiveColumnIndex = 0; for (FieldSchema field : table.getSd().getCols()) { // ignore unsupported types rather than failing TypeInfo typeInfo = getTypeInfoFromTypeString(field.getType()); if (HiveType.isSupportedType(typeInfo)) { HiveType hiveType = toHiveType(typeInfo); columns.add(new HiveColumnHandle(connectorId, field.getName(), hiveType, hiveType.getTypeSignature(), hiveColumnIndex, false)); } hiveColumnIndex++; } // add the partition keys last (like Hive does) columns.addAll(getPartitionKeyColumnHandles(connectorId, table)); return columns.build(); }
private Type canonicalizeType(Type type) { HiveType hiveType = HiveType.toHiveType(typeTranslator, type); return TYPE_MANAGER.getType(hiveType.getTypeSignature()); }
@Override public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column) { if (!allowAddColumn) { throw new PrestoException(PERMISSION_DENIED, "Adding Columns is disabled in this Hive catalog"); } HiveTableHandle handle = checkType(tableHandle, HiveTableHandle.class, "tableHandle"); Optional<Table> tableMetadata = metastore.getTable(handle.getSchemaName(), handle.getTableName()); if (!tableMetadata.isPresent()) { throw new TableNotFoundException(handle.getSchemaTableName()); } Table table = tableMetadata.get(); StorageDescriptor sd = table.getSd(); ImmutableList.Builder<FieldSchema> columns = ImmutableList.builder(); columns.addAll(sd.getCols()); columns.add(new FieldSchema(column.getName(), toHiveType(column.getType()).getHiveTypeName(), column.getComment())); sd.setCols(columns.build()); table.setSd(sd); metastore.alterTable(handle.getSchemaName(), handle.getTableName(), table); }
private void assertInvalidTypeTranslation(String typeName, ErrorCode errorCode, String message) { Type type = TYPE_MANAGER.getType(parseTypeSignature(typeName)); try { HiveType.toHiveType(typeTranslator, type); fail("expected exception"); } catch (PrestoException e) { try { assertEquals(e.getErrorCode(), errorCode); assertContains(e.getMessage(), message); } catch (Throwable failure) { failure.addSuppressed(e); throw failure; } } } }
@Override public Map<String, ColumnHandle> getColumnHandles(final ConnectorSession session, final ConnectorTableHandle tableHandle) { final SchemaTableName schemaTableName = HiveUtil.schemaTableName(tableHandle); final Table table = tableDao.getBySourceDatabaseTableName(connectorId.toString(), schemaTableName.getSchemaName(), schemaTableName.getTableName()); if (table == null) { throw new TableNotFoundException(schemaTableName); } final ImmutableMap.Builder<String, ColumnHandle> columnHandles = ImmutableMap.builder(); for (Field field : getFields(table)) { final String type = field.getType(); final Type prestoType = converterUtil.toType(type); final HiveType hiveType = HiveType.toHiveType(prestoType); columnHandles .put(field.getName(), new HiveColumnHandle(connectorId.toString(), field.getName(), field.getPos(), hiveType, prestoType.getTypeSignature(), field.getPos(), field.isPartitionKey())); } return columnHandles.build(); }
@Override public ColumnHandle getSampleWeightColumnHandle(final ConnectorSession session, final ConnectorTableHandle tableHandle) { final SchemaTableName schemaTableName = HiveUtil.schemaTableName(tableHandle); final Table table = tableDao.getBySourceDatabaseTableName(connectorId.toString(), schemaTableName.getSchemaName(), schemaTableName.getTableName()); if (table == null) { throw new TableNotFoundException(schemaTableName); } for (Field field : getFields(table)) { if (HiveColumnHandle.SAMPLE_WEIGHT_COLUMN_NAME.equals(field.getName())) { final String type = field.getType(); final Type prestoType = converterUtil.toType(type); final HiveType hiveType = HiveType.toHiveType(prestoType); return new HiveColumnHandle(connectorId.toString(), field.getName(), field.getPos(), hiveType, prestoType.getTypeSignature(), field.getPos(), field.isPartitionKey()); } } return null; }