@Override public HiveTableHandle getTableHandle(ConnectorSession session, SchemaTableName tableName) { requireNonNull(tableName, "tableName is null"); Optional<Table> table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()); if (!table.isPresent()) { return null; } if (isPartitionsSystemTable(tableName)) { // We must not allow $partitions table due to how permissions are checked in PartitionsAwareAccessControl.checkCanSelectFromTable() throw new PrestoException(StandardErrorCode.NOT_SUPPORTED, format("Unexpected table %s present in Hive metastore", tableName)); } verifyOnline(tableName, Optional.empty(), getProtectMode(table.get()), table.get().getParameters()); return new HiveTableHandle(tableName.getSchemaName(), tableName.getTableName()); }
@Override public void dropTable(ConnectorSession session, ConnectorTableHandle tableHandle) { HiveTableHandle handle = (HiveTableHandle) tableHandle; SchemaTableName tableName = schemaTableName(tableHandle); Optional<Table> target = metastore.getTable(handle.getSchemaName(), handle.getTableName()); if (!target.isPresent()) { throw new TableNotFoundException(tableName); } metastore.dropTable(session, handle.getSchemaName(), handle.getTableName()); }
@Override public OptionalLong metadataDelete(ConnectorSession session, ConnectorTableHandle tableHandle, ConnectorTableLayoutHandle tableLayoutHandle) { HiveTableHandle handle = (HiveTableHandle) tableHandle; HiveTableLayoutHandle layoutHandle = (HiveTableLayoutHandle) tableLayoutHandle; Optional<Table> table = metastore.getTable(handle.getSchemaName(), handle.getTableName()); if (!table.isPresent()) { throw new TableNotFoundException(handle.getSchemaTableName()); } if (table.get().getPartitionColumns().isEmpty()) { metastore.truncateUnpartitionedTable(session, handle.getSchemaName(), handle.getTableName()); } else { for (HivePartition hivePartition : getOrComputePartitions(layoutHandle, session, tableHandle)) { metastore.dropPartition(session, handle.getSchemaName(), handle.getTableName(), toPartitionValues(hivePartition.getPartitionId())); } } // it is too expensive to determine the exact number of deleted rows return OptionalLong.empty(); }
@Test public void testRoundTrip() { HiveTableHandle expected = new HiveTableHandle("schema", "table"); String json = codec.toJson(expected); HiveTableHandle actual = codec.fromJson(json); assertEquals(actual.getSchemaTableName(), expected.getSchemaTableName()); } }
@Test public void testTableHandleDeserialize() throws Exception { String json = objectMapper.writeValueAsString(TABLE_HANDLE_AS_MAP); HiveTableHandle tableHandle = objectMapper.readValue(json, HiveTableHandle.class); assertEquals(tableHandle.getClientId(), "hive"); assertEquals(tableHandle.getSchemaName(), "hive_schema"); assertEquals(tableHandle.getTableName(), "hive_table"); assertEquals(tableHandle.getSchemaTableName(), new SchemaTableName("hive_schema", "hive_table")); }
@Test public void testRoundTrip() { HiveTableHandle expected = new HiveTableHandle("client", "schema", "table"); String json = codec.toJson(expected); HiveTableHandle actual = codec.fromJson(json); assertEquals(actual.getClientId(), expected.getClientId()); assertEquals(actual.getSchemaTableName(), expected.getSchemaTableName()); } }
public static SchemaTableName schemaTableName(ConnectorTableHandle tableHandle) { return ((HiveTableHandle) tableHandle).getSchemaTableName(); }
@Override public List<ConnectorTableLayoutResult> getTableLayouts(ConnectorSession session, ConnectorTableHandle tableHandle, Constraint<ColumnHandle> constraint, Optional<Set<ColumnHandle>> desiredColumns) { HiveTableHandle handle = checkType(tableHandle, HiveTableHandle.class, "tableHandle"); HivePartitionResult hivePartitionResult = partitionManager.getPartitions(session, metastore, tableHandle, constraint.getSummary()); return ImmutableList.of(new ConnectorTableLayoutResult( getTableLayout(session, new HiveTableLayoutHandle( handle.getClientId(), ImmutableList.copyOf(hivePartitionResult.getPartitionColumns()), hivePartitionResult.getPartitions(), hivePartitionResult.getEnforcedConstraint())), hivePartitionResult.getUnenforcedConstraint())); }
@Test public void testRoundTrip() { HiveTableHandle expected = new HiveTableHandle("schema", "table"); String json = codec.toJson(expected); HiveTableHandle actual = codec.fromJson(json); assertEquals(actual.getSchemaTableName(), expected.getSchemaTableName()); } }
@Override public TableStatistics getTableStatistics(ConnectorSession session, ConnectorTableHandle tableHandle, Constraint<ColumnHandle> constraint) { if (!isStatisticsEnabled(session)) { return TableStatistics.empty(); } Map<String, ColumnHandle> columns = getColumnHandles(session, tableHandle) .entrySet().stream() .filter(entry -> !((HiveColumnHandle) entry.getValue()).isHidden()) .collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); Map<String, Type> columnTypes = columns.entrySet().stream() .collect(toImmutableMap(Map.Entry::getKey, entry -> getColumnMetadata(session, tableHandle, entry.getValue()).getType())); List<HivePartition> partitions = getPartitionsAsList(tableHandle, constraint); return hiveStatisticsProvider.getTableStatistics(session, ((HiveTableHandle) tableHandle).getSchemaTableName(), columns, columnTypes, partitions); }
@Override public void renameColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle source, String target) { HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle; failIfAvroSchemaIsSet(hiveTableHandle); HiveColumnHandle sourceHandle = (HiveColumnHandle) source; metastore.renameColumn(hiveTableHandle.getSchemaName(), hiveTableHandle.getTableName(), sourceHandle.getName(), target); }
@Test public void testTableHandleDeserialize() throws Exception { String json = objectMapper.writeValueAsString(TABLE_HANDLE_AS_MAP); HiveTableHandle tableHandle = objectMapper.readValue(json, HiveTableHandle.class); assertEquals(tableHandle.getSchemaName(), "hive_schema"); assertEquals(tableHandle.getTableName(), "hive_table"); assertEquals(tableHandle.getSchemaTableName(), new SchemaTableName("hive_schema", "hive_table")); }
@Test public void testTableHandleSerialize() throws Exception { HiveTableHandle tableHandle = new HiveTableHandle("hive_schema", "hive_table"); assertTrue(objectMapper.canSerialize(HiveTableHandle.class)); String json = objectMapper.writeValueAsString(tableHandle); testJsonEquals(json, TABLE_HANDLE_AS_MAP); }
@Override public Optional<ConnectorNewTableLayout> getInsertLayout(ConnectorSession session, ConnectorTableHandle tableHandle) { HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle; SchemaTableName tableName = hiveTableHandle.getSchemaTableName(); Table table = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()) .orElseThrow(() -> new TableNotFoundException(tableName)); Optional<HiveBucketHandle> hiveBucketHandle = getHiveBucketHandle(table); if (!hiveBucketHandle.isPresent()) { return Optional.empty(); } HiveBucketProperty bucketProperty = table.getStorage().getBucketProperty() .orElseThrow(() -> new NoSuchElementException("Bucket property should be set")); if (!bucketProperty.getSortedBy().isEmpty() && !isSortedWritingEnabled(session)) { throw new PrestoException(NOT_SUPPORTED, "Writing to bucketed sorted Hive tables is disabled"); } HivePartitioningHandle partitioningHandle = new HivePartitioningHandle( hiveBucketHandle.get().getTableBucketCount(), hiveBucketHandle.get().getColumns().stream() .map(HiveColumnHandle::getHiveType) .collect(Collectors.toList()), OptionalInt.of(hiveBucketHandle.get().getTableBucketCount())); List<String> partitionColumns = hiveBucketHandle.get().getColumns().stream() .map(HiveColumnHandle::getName) .collect(Collectors.toList()); return Optional.of(new ConnectorNewTableLayout(partitioningHandle, partitionColumns)); }
@Override public void renameTable(ConnectorSession session, ConnectorTableHandle tableHandle, SchemaTableName newTableName) { HiveTableHandle handle = (HiveTableHandle) tableHandle; metastore.renameTable(handle.getSchemaName(), handle.getTableName(), newTableName.getSchemaName(), newTableName.getTableName()); }
@Test public void testTableHandleDeserialize() throws Exception { String json = objectMapper.writeValueAsString(TABLE_HANDLE_AS_MAP); HiveTableHandle tableHandle = objectMapper.readValue(json, HiveTableHandle.class); assertEquals(tableHandle.getSchemaName(), "hive_schema"); assertEquals(tableHandle.getTableName(), "hive_table"); assertEquals(tableHandle.getSchemaTableName(), new SchemaTableName("hive_schema", "hive_table")); }
HiveInsertTableHandle hiveInsertTableHandle = (HiveInsertTableHandle) hiveMetadata.beginInsert(session, new HiveTableHandle(schema, table));
TupleDomain<ColumnHandle> effectivePredicate = constraint.getSummary(); SchemaTableName tableName = hiveTableHandle.getSchemaTableName(); Table table = getTable(metastore, tableName); Optional<HiveBucketHandle> hiveBucketHandle = getHiveBucketHandle(table);
@Override public void dropColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnHandle column) { HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle; failIfAvroSchemaIsSet(hiveTableHandle); HiveColumnHandle columnHandle = (HiveColumnHandle) column; metastore.dropColumn(hiveTableHandle.getSchemaName(), hiveTableHandle.getTableName(), columnHandle.getName()); }
@Override public void addColumn(ConnectorSession session, ConnectorTableHandle tableHandle, ColumnMetadata column) { if (!allowAddColumn) { throw new PrestoException(PERMISSION_DENIED, "Adding Columns is disabled in this Hive catalog"); } HiveTableHandle handle = checkType(tableHandle, HiveTableHandle.class, "tableHandle"); Optional<Table> tableMetadata = metastore.getTable(handle.getSchemaName(), handle.getTableName()); if (!tableMetadata.isPresent()) { throw new TableNotFoundException(handle.getSchemaTableName()); } Table table = tableMetadata.get(); StorageDescriptor sd = table.getSd(); ImmutableList.Builder<FieldSchema> columns = ImmutableList.builder(); columns.addAll(sd.getCols()); columns.add(new FieldSchema(column.getName(), toHiveType(column.getType()).getHiveTypeName(), column.getComment())); sd.setCols(columns.build()); table.setSd(sd); metastore.alterTable(handle.getSchemaName(), handle.getTableName(), table); }