private static HiveStorageFormat extractHiveStorageFormat(Table table) { StorageFormat storageFormat = table.getStorage().getStorageFormat(); String outputFormat = storageFormat.getOutputFormat(); String serde = storageFormat.getSerDe(); for (HiveStorageFormat format : HiveStorageFormat.values()) { if (format.getOutputFormat().equals(outputFormat) && format.getSerDe().equals(serde)) { return format; } } throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, format("Output format %s with SerDe %s is not supported", outputFormat, serde)); }
@Test public void testTableCreation() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { createTable(temporaryCreateTable, storageFormat); dropTable(temporaryCreateTable); } }
public TableMetadata(Table table, Map<String, HiveColumnStatistics> columnStatistics) { owner = table.getOwner(); tableType = table.getTableType(); dataColumns = table.getDataColumns(); partitionColumns = table.getPartitionColumns(); parameters = table.getParameters(); StorageFormat tableFormat = table.getStorage().getStorageFormat(); storageFormat = Arrays.stream(HiveStorageFormat.values()) .filter(format -> tableFormat.equals(StorageFormat.fromHiveStorageFormat(format))) .findFirst(); bucketProperty = table.getStorage().getBucketProperty(); serdeParameters = table.getStorage().getSerdeParameters(); if (tableType.equals(TableType.EXTERNAL_TABLE.name())) { externalLocation = Optional.of(table.getStorage().getLocation()); } else { externalLocation = Optional.empty(); } viewOriginalText = table.getViewOriginalText(); viewExpandedText = table.getViewExpandedText(); this.columnStatistics = ImmutableMap.copyOf(requireNonNull(columnStatistics, "columnStatistics is null")); }
public PartitionMetadata(Table table, PartitionWithStatistics partitionWithStatistics) { Partition partition = partitionWithStatistics.getPartition(); PartitionStatistics statistics = partitionWithStatistics.getStatistics(); this.columns = partition.getColumns(); this.parameters = updateStatisticsParameters(partition.getParameters(), statistics.getBasicStatistics()); StorageFormat tableFormat = partition.getStorage().getStorageFormat(); storageFormat = Arrays.stream(HiveStorageFormat.values()) .filter(format -> tableFormat.equals(StorageFormat.fromHiveStorageFormat(format))) .findFirst(); if (table.getTableType().equals(TableType.EXTERNAL_TABLE.name())) { externalLocation = Optional.of(partition.getStorage().getLocation()); } else { externalLocation = Optional.empty(); } bucketProperty = partition.getStorage().getBucketProperty(); serdeParameters = partition.getStorage().getSerdeParameters(); columnStatistics = ImmutableMap.copyOf(statistics.getColumnStatistics()); }
private List<TestingHiveStorageFormat> getAllTestingHiveStorageFormat() { Session session = getSession(); ImmutableList.Builder<TestingHiveStorageFormat> formats = ImmutableList.builder(); for (HiveStorageFormat hiveStorageFormat : HiveStorageFormat.values()) { formats.add(new TestingHiveStorageFormat(session, hiveStorageFormat)); } formats.add(new TestingHiveStorageFormat( Session.builder(session).setCatalogSessionProperty(session.getCatalog().get(), "orc_optimized_writer_enabled", "true").build(), HiveStorageFormat.ORC)); formats.add(new TestingHiveStorageFormat( Session.builder(session).setCatalogSessionProperty(session.getCatalog().get(), "orc_optimized_writer_enabled", "true").build(), HiveStorageFormat.DWRF)); return formats.build(); }
@Test public void testAllFormats() throws Exception { HiveClientConfig config = new HiveClientConfig(); File tempDir = Files.createTempDir(); try { ExtendedHiveMetastore metastore = createTestingFileHiveMetastore(new File(tempDir, "metastore")); for (HiveStorageFormat format : HiveStorageFormat.values()) { config.setHiveStorageFormat(format); config.setHiveCompressionCodec(NONE); long uncompressedLength = writeTestFile(config, metastore, makeFileName(tempDir, config)); assertGreaterThan(uncompressedLength, 0L); for (HiveCompressionCodec codec : HiveCompressionCodec.values()) { if (codec == NONE) { continue; } config.setHiveCompressionCodec(codec); long length = writeTestFile(config, metastore, makeFileName(tempDir, config)); assertTrue(uncompressedLength > length, format("%s with %s compressed to %s which is not less than %s", format, codec, length, uncompressedLength)); } } } finally { deleteRecursively(tempDir.toPath(), ALLOW_INSECURE); } }
@Test public void insertPartitionedTable() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { insertPartitionedTable(storageFormat); } }
@Test public void testMetadataDelete() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { testMetadataDelete(storageFormat); } }
@Test public void createPartitionedTable() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { createPartitionedTable(storageFormat); } }
@Test public void insertTable() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { insertTable(storageFormat); } }
@Test public void createTableAs() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { createTableAs(storageFormat); } }
@Test public void createPartitionedTableAs() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { createPartitionedTableAs(storageFormat); } }
@Test public void testTableCreation() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { createTable(temporaryCreateTable, storageFormat); dropTable(temporaryCreateTable); } }
@Test public void testTableCreation() throws Exception { for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) { try { doCreateTable(temporaryCreateTable, storageFormat, "presto_test"); } finally { dropTable(temporaryCreateTable); } } }
private static HiveStorageFormat extractHiveStorageFormat(Table table) { StorageDescriptor descriptor = table.getSd(); if (descriptor == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table is missing storage descriptor"); } SerDeInfo serdeInfo = descriptor.getSerdeInfo(); if (serdeInfo == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table storage descriptor is missing SerDe info"); } String outputFormat = descriptor.getOutputFormat(); String serializationLib = serdeInfo.getSerializationLib(); for (HiveStorageFormat format : HiveStorageFormat.values()) { if (format.getOutputFormat().equals(outputFormat) && format.getSerDe().equals(serializationLib)) { return format; } } throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, format("Output format %s with SerDe %s is not supported", outputFormat, serializationLib)); }
private List<TestingHiveStorageFormat> getAllTestingHiveStorageFormat() { Session session = getSession(); ImmutableList.Builder<TestingHiveStorageFormat> formats = ImmutableList.builder(); for (HiveStorageFormat hiveStorageFormat : HiveStorageFormat.values()) { formats.add(new TestingHiveStorageFormat(session, hiveStorageFormat)); } formats.add(new TestingHiveStorageFormat( Session.builder(session).setCatalogSessionProperty(session.getCatalog().get(), "orc_optimized_writer_enabled", "true").build(), HiveStorageFormat.ORC)); formats.add(new TestingHiveStorageFormat( Session.builder(session).setCatalogSessionProperty(session.getCatalog().get(), "orc_optimized_writer_enabled", "true").build(), HiveStorageFormat.DWRF)); return formats.build(); }
@Test public void testAllFormats() throws Exception { HiveClientConfig config = new HiveClientConfig(); File tempDir = Files.createTempDir(); try { HiveMetastore metastore = new InMemoryHiveMetastore(new File(tempDir, "metastore")); for (HiveStorageFormat format : HiveStorageFormat.values()) { config.setHiveStorageFormat(format); config.setHiveCompressionCodec(NONE); long uncompressedLength = writeTestFile(config, metastore, makeFileName(tempDir, config)); assertGreaterThan(uncompressedLength, 0L); for (HiveCompressionCodec codec : HiveCompressionCodec.values()) { if (codec == NONE) { continue; } config.setHiveCompressionCodec(codec); long length = writeTestFile(config, metastore, makeFileName(tempDir, config)); assertTrue(uncompressedLength > length, format("%s with %s compressed to %s which is not less than %s", format, codec, length, uncompressedLength)); } } } finally { FileUtils.deleteRecursively(tempDir); } }
@Test public void testAllFormats() throws Exception { HiveClientConfig config = new HiveClientConfig(); File tempDir = Files.createTempDir(); try { ExtendedHiveMetastore metastore = createTestingFileHiveMetastore(new File(tempDir, "metastore")); for (HiveStorageFormat format : HiveStorageFormat.values()) { config.setHiveStorageFormat(format); config.setHiveCompressionCodec(NONE); long uncompressedLength = writeTestFile(config, metastore, makeFileName(tempDir, config)); assertGreaterThan(uncompressedLength, 0L); for (HiveCompressionCodec codec : HiveCompressionCodec.values()) { if (codec == NONE) { continue; } config.setHiveCompressionCodec(codec); long length = writeTestFile(config, metastore, makeFileName(tempDir, config)); assertTrue(uncompressedLength > length, format("%s with %s compressed to %s which is not less than %s", format, codec, length, uncompressedLength)); } } } finally { deleteRecursively(tempDir.toPath(), ALLOW_INSECURE); } }
.build(); protected Set<HiveStorageFormat> createTableFormats = ImmutableSet.copyOf(HiveStorageFormat.values());