@Deprecated // @deprecated in favour of {@link #Builder(HCatTable, boolean)}. To be removed in Hive 0.16. private Builder(String dbName, String tableName, List<HCatFieldSchema> columns) { hcatTable = new HCatTable(dbName, tableName).cols(columns); }
new HCatFieldSchema("grid", Type.STRING, "")); HCatTable table = new HCatTable(dbName, tableName).cols(columnSchema).partCols(partitionSchema); client.createTable(HCatCreateTableDesc.create(table, false).build());
ArrayList<HCatFieldSchema> partitionColumns = new ArrayList<HCatFieldSchema>( Arrays.asList(new HCatFieldSchema(partitionColumn, Type.STRING, ""))); HCatTable table = new HCatTable(dbName, tableName).cols(columns).partCols(partitionColumns); client.createTable(HCatCreateTableDesc.create(table, false).build());
new HCatFieldSchema("grid", Type.STRING, "")); HCatTable table = new HCatTable(dbName, tableName).cols(columnSchema).partCols(partitionSchema); client.createTable(HCatCreateTableDesc.create(table, false).build());
ptnCols.add(new HCatFieldSchema("country", Type.STRING, "country column")); HCatTable table = new HCatTable(dbName, tableName).cols(cols) .partCols(ptnCols) .fileFormat("sequenceFile");
/** * This test tests that a plain table instantiation matches what hive says an * empty table create should look like. * @throws Exception */ @Test public void testEmptyTableInstantiation() throws Exception { HCatClient client = HCatClient.create(new Configuration(hcatConf)); String dbName = "default"; String tblName = "testEmptyCreate"; ArrayList<HCatFieldSchema> cols = new ArrayList<HCatFieldSchema>(); cols.add(new HCatFieldSchema("id", Type.INT, "id comment")); cols.add(new HCatFieldSchema("value", Type.STRING, "value comment")); client.dropTable(dbName, tblName, true); // Create a minimalistic table client.createTable(HCatCreateTableDesc .create(new HCatTable(dbName, tblName).cols(cols), false) .build()); HCatTable tCreated = client.getTable(dbName, tblName); org.apache.hadoop.hive.metastore.api.Table emptyTable = Table.getEmptyTable(dbName, tblName); Map<String, String> createdProps = tCreated.getTblProps(); Map<String, String> emptyProps = emptyTable.getParameters(); mapEqualsContainedIn(emptyProps, createdProps); // Test sd params - we check that all the parameters in an empty table // are retained as-is. We may add beyond it, but not change values for // any parameters that hive defines for an empty table. Map<String, String> createdSdParams = tCreated.getSerdeParams(); Map<String, String> emptySdParams = emptyTable.getSd().getSerdeInfo().getParameters(); mapEqualsContainedIn(emptySdParams, createdSdParams); }
HCatTable table = (new HCatTable(dbName, tableName)).tblProps(tprops).cols(cols).partCols(pcols);
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build()); HCatTable table = (new HCatTable(dbName, tableName)).cols(cols).fileFormat("textfile");
client.createDatabase(HCatCreateDBDesc.create(dbName).ifNotExists(false).build()); HCatTable table = (new HCatTable(dbName, tableName)).cols(cols).fileFormat("textfile");
HCatTable table1 = (new HCatTable(dbName, tblName1)).cols(cols1); sourceMetastore.createTable(HCatCreateTableDesc.create(table1).build()); List<HCatFieldSchema> cols2 = HCatSchemaUtils.getHCatSchema("a:int").getFields(); List<HCatFieldSchema> pcols2 = HCatSchemaUtils.getHCatSchema("b:string").getFields(); HCatTable table2 = (new HCatTable(dbName, tblName2)).cols(cols2).partCols(pcols2); sourceMetastore.createTable(HCatCreateTableDesc.create(table2).build());
cols(rhs.cols);
HCatTable table = (new HCatTable(dbName, tableName)).tblProps(props).cols(cols).partCols(pcols);
new HCatFieldSchema("grid", Type.STRING, "")); HCatTable sourceTable = new HCatTable(dbName, tableName).cols(columnSchema).partCols(partitionSchema); sourceMetaStore().createTable(HCatCreateTableDesc.create(sourceTable).build()); Map<String, String> tableParams = new HashMap<String, String>(1); tableParams.put("orc.compress", "ZLIB"); sourceTable.cols(newColumnSchema) // Add a column. .fileFormat("orcfile") // Change SerDe, File I/O formats. .tblProps(tableParams)
new HCatFieldSchema("grid", Type.STRING, "")); HCatTable sourceTable = new HCatTable(dbName, tableName).cols(columnSchema) .partCols(partitionSchema) .comment("Source table."); Map<String, String> tableParams = new HashMap<String, String>(1); tableParams.put("orc.compress", "ZLIB"); sourceTable.cols(newColumnSchema) // Add a column. .fileFormat("orcfile") // Change SerDe, File I/O formats. .tblProps(tableParams)
new HCatFieldSchema("grid", Type.STRING, "")); HCatTable sourceTable = new HCatTable(dbName, tableName).cols(columnSchema) .partCols(partitionSchema) .comment("Source table."); Map<String, String> tableParams = new HashMap<String, String>(1); tableParams.put("orc.compress", "ZLIB"); sourceTable.cols(newColumnSchema) // Add a column. .fileFormat("orcfile") // Change SerDe, File I/O formats. .tblProps(tableParams)
@Deprecated // @deprecated in favour of {@link #Builder(HCatTable, boolean)}. To be removed in Hive 0.16. private Builder(String dbName, String tableName, List<HCatFieldSchema> columns) { hcatTable = new HCatTable(dbName, tableName).cols(columns); }
@Deprecated // @deprecated in favour of {@link #Builder(HCatTable, boolean)}. To be removed in Hive 0.16. private Builder(String dbName, String tableName, List<HCatFieldSchema> columns) { hcatTable = new HCatTable(dbName, tableName).cols(columns); }
@Deprecated // @deprecated in favour of {@link #Builder(HCatTable, boolean)}. To be removed in Hive 0.16. private Builder(String dbName, String tableName, List<HCatFieldSchema> columns) { hcatTable = new HCatTable(dbName, tableName).cols(columns); }