List<FieldSchema> fields = new ArrayList<FieldSchema>(); fields.add(new FieldSchema("val", "int", null)); table.setFields(fields); table.setDataLocation(Warehouse.getDnsPath(new Path(SessionState.get().getTempTableSpace(), tableName), conf));
List<FieldSchema> fields = new ArrayList<FieldSchema>(); fields.add(new FieldSchema("val", "int", null)); table.setFields(fields); table.setDataLocation(Warehouse.getDnsPath(new Path(SessionState.get().getTempTableSpace(), tableName), conf));
private static class ThreadLocalHive extends ThreadLocal<Hive> { @Override protected Hive initialValue() { return null; } @Override public synchronized void set(Hive hiveObj) { Hive currentHive = this.get(); if (currentHive != hiveObj) { // Remove/close current thread-local Hive object before overwriting with new Hive object. remove(); super.set(hiveObj); } } @Override public synchronized void remove() { Hive currentHive = this.get(); if (currentHive != null) { // Close the metastore connections before removing it from thread local hiveDB. currentHive.close(false); super.remove(); } } }
tempTableObj.setFields(table.getAllCols());
Table table = db.newTable(tableName); table.setSerializationLib(format.getSerde()); table.setFields(fields); table.setDataLocation(tablePath); table.getTTable().setTemporary(true);
Deserializer oldSerde = MetaStoreUtils.getDeserializer( conf, tbl.getTTable(), false, oldSerdeName); tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), oldSerde)); } catch (MetaException ex) { throw new HiveException(ex);
Deserializer oldSerde = HiveMetaStoreUtils.getDeserializer( conf, tbl.getTTable(), false, oldSerdeName); tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), oldSerde)); } catch (MetaException ex) { throw new HiveException(ex);
tbl.setFields(oldtbl.getCols()); tbl.setPartCols(oldtbl.getPartCols());
tbl.setFields(oldtbl.getCols()); tbl.setPartCols(oldtbl.getPartCols());
tbl.setFields(MetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), tbl.getDeserializer()));
tbl.setFields(getSchema()); if (getComment() != null) { tbl.setProperty("comment", getComment());
tbl.setFields(getCols());
tbl.setFields(getCols());
fields.add(new FieldSchema("col2", serdeConstants.STRING_TYPE_NAME, "string -- second column")); fields.add(new FieldSchema("col3", serdeConstants.DOUBLE_TYPE_NAME, "double -- thrift column")); tbl.setFields(fields);
oldview.setFields(crtView.getSchema()); if (crtView.getComment() != null) { oldview.setProperty("comment", crtView.getComment());
@Override public void alterSchema(String database, String tableName, Schema schema) { Table table = hiveMetaStore.getTable(database, tableName); List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); hiveMetaStore.alterTable(table); }
/** * Creates the hive table. * * @param db the db * @param table the table * @param columns the columns * @throws Exception the exception */ void createHiveTable(String db, String table, List<FieldSchema> columns) throws Exception { Table tbl1 = new Table(db, table); tbl1.setFields(columns); Hive.get().createTable(tbl1); System.out.println("Created table : " + table); }
/** * Creates the hive table. * * @param db the db * @param table the table * @param columns the columns * @throws Exception the exception */ void createHiveTable(String db, String table, List<FieldSchema> columns) throws Exception { Table tbl1 = new Table(db, table); tbl1.setFields(columns); Hive.get().createTable(tbl1); System.out.println("Created table : " + table); }
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException { Table table = new Table(database, tableName); table.setTableType(TableType.EXTERNAL_TABLE); table.getParameters().put("EXTERNAL", "TRUE"); String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName); table.setDataLocation(new Path(tablePath)); table.setSerializationLib(getHiveParquetSerde()); try { table.setInputFormatClass(getHiveParquetInputFormat()); table.setOutputFormatClass(getHiveParquetOutputFormat()); } catch (HiveException e) { throw new HiveMetaStoreException("Cannot find input/output format:", e); } // convert copycat schema schema to Hive columns List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema); table.setFields(columns); table.setPartCols(partitioner.partitionFields()); return table; }