private static List<HCatFieldSchema> getColumns(StorageDescriptor sd) throws HCatException { ArrayList<HCatFieldSchema> columns = new ArrayList<HCatFieldSchema>(sd.getColsSize()); for (FieldSchema fieldSchema : sd.getCols()) { columns.add(HCatSchemaUtils.getHCatFieldSchema(fieldSchema)); } return columns; }
private static ArrayList<String> getCols(Table table) { List<FieldSchema> cols = table.getSd().getCols(); ArrayList<String> colNames = new ArrayList<String>(cols.size()); for (FieldSchema col : cols) { colNames.add(col.getName().toLowerCase()); } return colNames; }
public static Properties getSchema( org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, org.apache.hadoop.hive.metastore.api.StorageDescriptor tblsd, Map<String, String> parameters, String databaseName, String tableName, List<FieldSchema> partitionKeys) { return addCols(getSchemaWithoutCols(sd, parameters, databaseName, tableName, partitionKeys), tblsd.getCols()); }
public static Table fromMetastoreApiTable(org.apache.hadoop.hive.metastore.api.Table table) { StorageDescriptor storageDescriptor = table.getSd(); if (storageDescriptor == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table is missing storage descriptor"); } return fromMetastoreApiTable(table, storageDescriptor.getCols()); }
default Optional<List<FieldSchema>> getFields(String databaseName, String tableName) { Optional<Table> table = getTable(databaseName, tableName); if (!table.isPresent()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } if (table.get().getSd() == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table is missing storage descriptor"); } return Optional.of(table.get().getSd().getCols()); } }
public static void addSchemaEvolutionToTableScanOperator(Table table, TableScanOperator tableScanOp) { String colNames = MetaStoreUtils.getColumnNamesFromFieldSchema(table.getSd().getCols()); String colTypes = MetaStoreUtils.getColumnTypesFromFieldSchema(table.getSd().getCols()); tableScanOp.setSchemaEvolution(colNames, colTypes); }
@Override public void dropColumn(String databaseName, String tableName, String columnName) { verifyCanDropColumn(this, databaseName, tableName, columnName); org.apache.hadoop.hive.metastore.api.Table table = delegate.getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); table.getSd().getCols().removeIf(fieldSchema -> fieldSchema.getName().equals(columnName)); alterTable(databaseName, tableName, table); }
StorageDescriptorKey(String baseLocation, StorageDescriptor sd) { this.sd = sd; this.baseLocation = baseLocation; if (sd == null) { hashCode = Objects.hashCode(baseLocation); } else { // use the baseLocation provided instead of sd.getLocation() hashCode = Objects.hash(sd.getSerdeInfo() == null ? null : sd.getSerdeInfo().getSerializationLib(), sd.getInputFormat(), sd.getOutputFormat(), baseLocation, sd.getCols()); } }
@Override public PartitionStatistics getTableStatistics(String databaseName, String tableName) { Table table = getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); List<String> dataColumns = table.getSd().getCols().stream() .map(FieldSchema::getName) .collect(toImmutableList()); HiveBasicStatistics basicStatistics = getHiveBasicStatistics(table.getParameters()); Map<String, HiveColumnStatistics> columnStatistics = getTableColumnStatistics(databaseName, tableName, dataColumns, basicStatistics.getRowCount()); return new PartitionStatistics(basicStatistics, columnStatistics); }
@Override public void addColumn(String databaseName, String tableName, String columnName, HiveType columnType, String columnComment) { Optional<org.apache.hadoop.hive.metastore.api.Table> source = delegate.getTable(databaseName, tableName); if (!source.isPresent()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } org.apache.hadoop.hive.metastore.api.Table table = source.get(); table.getSd().getCols().add( new FieldSchema(columnName, columnType.getHiveTypeName().toString(), columnComment)); alterTable(databaseName, tableName, table); }
@Override public void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName) { Optional<org.apache.hadoop.hive.metastore.api.Table> source = delegate.getTable(databaseName, tableName); if (!source.isPresent()) { throw new TableNotFoundException(new SchemaTableName(databaseName, tableName)); } org.apache.hadoop.hive.metastore.api.Table table = source.get(); for (FieldSchema fieldSchema : table.getPartitionKeys()) { if (fieldSchema.getName().equals(oldColumnName)) { throw new PrestoException(NOT_SUPPORTED, "Renaming partition columns is not supported"); } } for (FieldSchema fieldSchema : table.getSd().getCols()) { if (fieldSchema.getName().equals(oldColumnName)) { fieldSchema.setName(newColumnName); } } alterTable(databaseName, tableName, table); }
private void validateAlterTableColumns(Table expectedOldTable, Table expectedNewTable, Table actualOldTable, Table actualNewTable) { validateAlterTable(expectedOldTable, expectedNewTable, actualOldTable, actualNewTable); assertEquals(expectedOldTable.getSd().getCols(), actualOldTable.getSd().getCols()); assertEquals(expectedNewTable.getSd().getCols(), actualNewTable.getSd().getCols()); }
public PartitionBuilder inTable(Table table) { this.dbName = table.getDbName(); this.tableName = table.getTableName(); this.catName = table.getCatName(); setCols(table.getSd().getCols()); return this; }
public static Partition fromMetastoreApiPartition(org.apache.hadoop.hive.metastore.api.Partition partition) { StorageDescriptor storageDescriptor = partition.getSd(); if (storageDescriptor == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Partition does not contain a storage descriptor: " + partition); } Partition.Builder partitionBuilder = Partition.builder() .setDatabaseName(partition.getDbName()) .setTableName(partition.getTableName()) .setValues(partition.getValues()) .setColumns(storageDescriptor.getCols().stream() .map(ThriftMetastoreUtil::fromMetastoreApiFieldSchema) .collect(toList())) .setParameters(partition.getParameters()); fromMetastoreApiStorageDescriptor(storageDescriptor, partitionBuilder.getStorageBuilder(), format("%s.%s", partition.getTableName(), partition.getValues())); return partitionBuilder.build(); }
@Test(expected = MetaException.class) public void testAddPartitionsNullColNameInSd() throws Exception { createTable(); Partition partition = buildPartition(DB_NAME, TABLE_NAME, DEFAULT_YEAR_VALUE); partition.getSd().getCols().get(0).setName(null); client.add_partitions(Lists.newArrayList(partition)); }
@Test(expected = MetaException.class) public void testAddPartitionsNullColTypeInSd() throws Exception { createTable(); Partition partition = buildPartition(DB_NAME, TABLE_NAME, DEFAULT_YEAR_VALUE); partition.getSd().getCols().get(0).setType(null); client.add_partitions(Lists.newArrayList(partition)); }
@Test(expected = MetaException.class) public void testCreateTableInvalidStorageDescriptorNullColumnType() throws Exception { Table table = getNewTable(); table.getSd().getCols().get(0).setType(null); client.createTable(table); }
@Test(expected = InvalidObjectException.class) public void testCreateTableInvalidStorageDescriptorInvalidColumnType() throws Exception { Table table = getNewTable(); table.getSd().getCols().get(0).setType("xyz"); client.createTable(table); }
@Test(expected = MetaException.class) public void testAddPartitionNullColNameInSd() throws Exception { createTable(); Partition partition = buildPartition(DB_NAME, TABLE_NAME, DEFAULT_YEAR_VALUE); partition.getSd().getCols().get(0).setName(null); client.add_partition(partition); }
@Test(expected = MetaException.class) public void testAlterTableInvalidStorageDescriptorNullColumnType() throws Exception { Table originalTable = testTables[0]; Table newTable = originalTable.deepCopy(); newTable.getSd().getCols().get(0).setType(null); client.alter_table(originalTable.getDbName(), originalTable.getTableName(), newTable); }