HiveTable.Builder tableBuilder = new HiveTable.Builder().withDbName(dbName).withTableName(tableName); tableBuilder = tableBuilder.withSerdeManaager(HiveSerDeManager.get(this.props)); HiveTable table = tableBuilder.build(); table.setLocation(this.fs.makeQualified(getTableLocation(path)).toString());
@Test public void testGetTableAvro() { final String databaseName = "testdb"; final String tableName = "testtable"; HiveTable.Builder builder = new HiveTable.Builder(); builder.withDbName(databaseName).withTableName(tableName); State serdeProps = new State(); serdeProps.setProp("avro.schema.literal", "{\"type\": \"record\", \"name\": \"TestEvent\"," + " \"namespace\": \"test.namespace\", \"fields\": [{\"name\":\"a\"," + " \"type\": \"int\"}]}"); builder.withSerdeProps(serdeProps); HiveTable hiveTable = builder.build(); hiveTable.setInputFormat(AvroContainerInputFormat.class.getName()); hiveTable.setOutputFormat(AvroContainerOutputFormat.class.getName()); hiveTable.setSerDeType(AvroSerDe.class.getName()); Table table = HiveMetaStoreUtils.getTable(hiveTable); Assert.assertEquals(table.getDbName(), databaseName); Assert.assertEquals(table.getTableName(), tableName); StorageDescriptor sd = table.getSd(); Assert.assertEquals(sd.getInputFormat(), AvroContainerInputFormat.class.getName()); Assert.assertEquals(sd.getOutputFormat(), AvroContainerOutputFormat.class.getName()); Assert.assertNotNull(sd.getSerdeInfo()); Assert.assertEquals(sd.getSerdeInfo().getSerializationLib(), AvroSerDe.class.getName()); List<FieldSchema> fields = sd.getCols(); Assert.assertTrue(fields != null && fields.size() == 1); FieldSchema fieldA = fields.get(0); Assert.assertEquals(fieldA.getName(), "a"); Assert.assertEquals(fieldA.getType(), "int"); }
HiveTable.Builder tableBuilder = new HiveTable.Builder().withDbName(dbName).withTableName(tableName); tableBuilder = tableBuilder.withSerdeManaager(HiveSerDeManager.get(this.props)); HiveTable table = tableBuilder.build(); table.setLocation(this.fs.makeQualified(getTableLocation(path)).toString());
/** * Convert a {@link Table} into a {@link HiveTable}. */ public static HiveTable getHiveTable(Table table) { State tableProps = getTableProps(table); State storageProps = getStorageProps(table.getSd()); State serDeProps = getSerDeProps(table.getSd().getSerdeInfo()); HiveTable hiveTable = new HiveTable.Builder().withDbName(table.getDbName()).withTableName(table.getTableName()) .withPartitionKeys(getColumns(table.getPartitionKeys())).withProps(tableProps).withStorageProps(storageProps) .withSerdeProps(serDeProps).build(); if (table.getCreateTime() > 0) { hiveTable.setCreateTime(table.getCreateTime()); } if (table.getSd().getCols() != null) { hiveTable.setColumns(getColumns(table.getSd().getCols())); } if (table.getSd().getBucketCols() != null) { hiveTable.setBucketColumns(table.getSd().getBucketCols()); } return hiveTable; }
@Test public void testGetTableAvroInvalidSchema() { final String databaseName = "testdb"; final String tableName = "testtable"; HiveTable.Builder builder = new HiveTable.Builder(); builder.withDbName(databaseName).withTableName(tableName); State serdeProps = new State(); serdeProps.setProp("avro.schema.literal", "invalid schema"); builder.withSerdeProps(serdeProps); HiveTable hiveTable = builder.build(); hiveTable.setInputFormat(AvroContainerInputFormat.class.getName()); hiveTable.setOutputFormat(AvroContainerOutputFormat.class.getName()); hiveTable.setSerDeType(AvroSerDe.class.getName()); Table table = HiveMetaStoreUtils.getTable(hiveTable); Assert.assertEquals(table.getDbName(), databaseName); Assert.assertEquals(table.getTableName(), tableName); StorageDescriptor sd = table.getSd(); Assert.assertEquals(sd.getInputFormat(), AvroContainerInputFormat.class.getName()); Assert.assertEquals(sd.getOutputFormat(), AvroContainerOutputFormat.class.getName()); Assert.assertNotNull(sd.getSerdeInfo()); Assert.assertEquals(sd.getSerdeInfo().getSerializationLib(), AvroSerDe.class.getName()); List<FieldSchema> fields = sd.getCols(); Assert.assertTrue(fields != null && fields.size() == 0); }
private void validateSchemaUrl(State state, String targetSchemaFileName, boolean createConflictingFile) throws IOException { HiveAvroSerDeManager manager = new HiveAvroSerDeManager(state); HiveRegistrationUnit registrationUnit = (new HiveTable.Builder()).withDbName(TEST_DB).withTableName(TEST_TABLE).build(); // Clean up existing file String targetPathStr = new Path(this.testBasePath, targetSchemaFileName).toString(); File targetFile = new File(targetPathStr); targetFile.delete(); // create a conflicting file if (createConflictingFile) { targetFile.createNewFile(); } manager.addSerDeProperties(this.testBasePath, registrationUnit); Assert.assertNull(registrationUnit.getSerDeProps().getProp(HiveAvroSerDeManager.SCHEMA_LITERAL)); String schemaUrl = registrationUnit.getSerDeProps().getProp(HiveAvroSerDeManager.SCHEMA_URL); Assert.assertEquals(schemaUrl, targetPathStr); Assert.assertTrue(IOUtils.contentEquals(this.getClass().getResourceAsStream("/test-hive-table/hive-test.avsc"), new FileInputStream(schemaUrl))); }
/** * Convert a {@link Table} into a {@link HiveTable}. */ public static HiveTable getHiveTable(Table table) { State tableProps = getTableProps(table); State storageProps = getStorageProps(table.getSd()); State serDeProps = getSerDeProps(table.getSd().getSerdeInfo()); HiveTable hiveTable = new HiveTable.Builder().withDbName(table.getDbName()).withTableName(table.getTableName()) .withPartitionKeys(getColumns(table.getPartitionKeys())).withProps(tableProps).withStorageProps(storageProps) .withSerdeProps(serDeProps).build(); if (table.getCreateTime() > 0) { hiveTable.setCreateTime(table.getCreateTime()); } if (table.getSd().getCols() != null) { hiveTable.setColumns(getColumns(table.getSd().getCols())); } if (table.getSd().getBucketCols() != null) { hiveTable.setBucketColumns(table.getSd().getBucketCols()); } return hiveTable; }
/** * Test that the schema is written to the schema literal */ @Test public void testSchemaLiteral() throws IOException { State state = new State(); HiveAvroSerDeManager manager = new HiveAvroSerDeManager(state); HiveRegistrationUnit registrationUnit = (new HiveTable.Builder()).withDbName(TEST_DB).withTableName(TEST_TABLE).build(); manager.addSerDeProperties(this.testBasePath, registrationUnit); Assert.assertTrue(registrationUnit.getSerDeProps().getProp(HiveAvroSerDeManager.SCHEMA_LITERAL).contains("example.avro")); }