@Override public void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName) { Table oldTable = getTableOrElseThrow(databaseName, tableName); if (oldTable.getPartitionColumns().stream().anyMatch(c -> c.getName().equals(oldColumnName))) { throw new PrestoException(NOT_SUPPORTED, "Renaming partition columns is not supported"); } ImmutableList.Builder<Column> newDataColumns = ImmutableList.builder(); for (Column column : oldTable.getDataColumns()) { if (column.getName().equals(oldColumnName)) { newDataColumns.add(new Column(newColumnName, column.getType(), column.getComment())); } else { newDataColumns.add(column); } } Table newTable = Table.builder(oldTable) .setDataColumns(newDataColumns.build()) .build(); replaceTable(databaseName, tableName, newTable, null); }
public static FieldSchema toMetastoreApiFieldSchema(Column column) { return new FieldSchema(column.getName(), column.getType().getHiveTypeName().toString(), column.getComment().orElse(null)); }
@Override public synchronized void addColumn(String databaseName, String tableName, String columnName, HiveType columnType, String columnComment) { alterTable(databaseName, tableName, oldTable -> { if (oldTable.getColumn(columnName).isPresent()) { throw new PrestoException(ALREADY_EXISTS, "Column already exists: " + columnName); } return oldTable.withDataColumns(ImmutableList.<Column>builder() .addAll(oldTable.getDataColumns()) .add(new Column(columnName, columnType, Optional.ofNullable(columnComment))) .build()); }); }
ImmutableMap.Builder<String, Optional<String>> builder = ImmutableMap.builder(); for (Column field : concat(tableColumns, table.getPartitionColumns())) { if (field.getComment().isPresent() && !field.getComment().get().equals("from deserializer")) { builder.put(field.getName(), field.getComment()); builder.put(field.getName(), Optional.empty());
public Optional<Column> getColumn(String name) { for (Column partitionColumn : partitionColumns) { if (partitionColumn.getName().equals(name)) { return Optional.of(partitionColumn); } } for (Column dataColumn : dataColumns) { if (dataColumn.getName().equals(name)) { return Optional.of(dataColumn); } } return Optional.empty(); }
private static boolean areColumnTypesSupported(List<Column> columns) { if (columns == null || columns.isEmpty()) { return false; } for (Column column : columns) { String type = column.getType().getHiveTypeName().toString(); if (column.getType().getTypeInfo() instanceof DecimalTypeInfo) { // skip precision and scale when check decimal type type = DECIMAL_TYPE_NAME; } if (!SUPPORTED_COLUMN_TYPES.contains(type)) { return false; } } return true; }
public static List<HiveColumnHandle> getRegularColumnHandles(Table table) { ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder(); int hiveColumnIndex = 0; for (Column field : table.getDataColumns()) { // ignore unsupported types rather than failing HiveType hiveType = field.getType(); if (hiveType.isSupportedType()) { columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getTypeSignature(), hiveColumnIndex, REGULAR, field.getComment())); } hiveColumnIndex++; } return columns.build(); }
private boolean hasTheSameSchema(Table newTable, Table existingTable) { List<Column> newTableColumns = newTable.getDataColumns(); List<Column> existingTableColumns = existingTable.getDataColumns(); if (newTableColumns.size() != existingTableColumns.size()) { return false; } for (Column existingColumn : existingTableColumns) { if (newTableColumns.stream() .noneMatch(newColumn -> newColumn.getName().equals(existingColumn.getName()) && newColumn.getType().equals(existingColumn.getType()))) { return false; } } return true; }
private static Column convertColumn(com.amazonaws.services.glue.model.Column glueColumn) { return new Column(glueColumn.getName(), HiveType.valueOf(glueColumn.getType().toLowerCase(Locale.ENGLISH)), Optional.ofNullable(glueColumn.getComment())); }
public Optional<Column> getColumn(String name) { return Stream.concat(partitionColumns.stream(), dataColumns.stream()) .filter(column -> column.getName().equals(name)) .findFirst(); }
@Override public synchronized void updateTableStatistics(String databaseName, String tableName, Function<PartitionStatistics, PartitionStatistics> update) { PartitionStatistics currentStatistics = getTableStatistics(databaseName, tableName); PartitionStatistics updatedStatistics = update.apply(currentStatistics); Table originalTable = getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); Table modifiedTable = originalTable.deepCopy(); HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics(); modifiedTable.setParameters(updateStatisticsParameters(modifiedTable.getParameters(), basicStatistics)); alterTable(databaseName, tableName, modifiedTable); com.facebook.presto.hive.metastore.Table table = fromMetastoreApiTable(modifiedTable); OptionalLong rowCount = basicStatistics.getRowCount(); List<ColumnStatisticsObj> metastoreColumnStatistics = updatedStatistics.getColumnStatistics().entrySet().stream() .map(entry -> createMetastoreColumnStatistics(entry.getKey(), table.getColumn(entry.getKey()).get().getType(), entry.getValue(), rowCount)) .collect(toImmutableList()); if (!metastoreColumnStatistics.isEmpty()) { setTableColumnStatistics(databaseName, tableName, metastoreColumnStatistics); } Set<String> removedColumnStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet()); removedColumnStatistics.forEach(column -> deleteTableColumnStatistics(databaseName, tableName, column)); }
private static com.amazonaws.services.glue.model.Column convertColumn(Column prestoColumn) { return new com.amazonaws.services.glue.model.Column() .withName(prestoColumn.getName()) .withType(prestoColumn.getType().toString()) .withComment(prestoColumn.getComment().orElse(null)); } }
@Override public synchronized void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName) { alterTable(databaseName, tableName, oldTable -> { if (oldTable.getColumn(newColumnName).isPresent()) { throw new PrestoException(ALREADY_EXISTS, "Column already exists: " + newColumnName); } if (!oldTable.getColumn(oldColumnName).isPresent()) { SchemaTableName name = new SchemaTableName(databaseName, tableName); throw new ColumnNotFoundException(name, oldColumnName); } for (Column column : oldTable.getPartitionColumns()) { if (column.getName().equals(oldColumnName)) { throw new PrestoException(NOT_SUPPORTED, "Renaming partition columns is not supported"); } } ImmutableList.Builder<Column> newDataColumns = ImmutableList.builder(); for (Column fieldSchema : oldTable.getDataColumns()) { if (fieldSchema.getName().equals(oldColumnName)) { newDataColumns.add(new Column(newColumnName, fieldSchema.getType(), fieldSchema.getComment())); } else { newDataColumns.add(fieldSchema); } } return oldTable.withDataColumns(newDataColumns.build()); }); }
public static Column fromMetastoreApiFieldSchema(FieldSchema fieldSchema) { return new Column(fieldSchema.getName(), HiveType.valueOf(fieldSchema.getType()), Optional.ofNullable(emptyToNull(fieldSchema.getComment()))); }
String directoryPrefix = partitionColumns.get(0).getName() + '=';
public static List<HiveColumnHandle> getPartitionKeyColumnHandles(Table table) { ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder(); List<Column> partitionKeys = table.getPartitionColumns(); for (Column field : partitionKeys) { HiveType hiveType = field.getType(); if (!hiveType.isSupportedType()) { throw new PrestoException(NOT_SUPPORTED, format("Unsupported Hive type %s found in partition keys of table %s.%s", hiveType, table.getDatabaseName(), table.getTableName())); } columns.add(new HiveColumnHandle(field.getName(), hiveType, hiveType.getTypeSignature(), -1, PARTITION_KEY, field.getComment())); } return columns.build(); }
private static String toThriftDdl(String structName, List<Column> columns) { // Mimics function in Hive: // MetaStoreUtils.getDDLFromFieldSchema(String, List<FieldSchema>) StringBuilder ddl = new StringBuilder(); ddl.append("struct "); ddl.append(structName); ddl.append(" { "); boolean first = true; for (Column column : columns) { if (first) { first = false; } else { ddl.append(", "); } ddl.append(typeToThriftType(column.getType().getHiveTypeName().toString())); ddl.append(' '); ddl.append(column.getName()); } ddl.append("}"); return ddl.toString(); }