private static List<HCatFieldSchema> getColumns(StorageDescriptor sd) throws HCatException { ArrayList<HCatFieldSchema> columns = new ArrayList<HCatFieldSchema>(sd.getColsSize()); for (FieldSchema fieldSchema : sd.getCols()) { columns.add(HCatSchemaUtils.getHCatFieldSchema(fieldSchema)); } return columns; }
private int getSerDeOverheadFactor() { final int projectedColumnCount; if (Utilities.isStarQuery(columns)) { Table hiveTable = hiveReadEntry.getTable(); projectedColumnCount = hiveTable.getSd().getColsSize() + hiveTable.getPartitionKeysSize(); } else { // In cost estimation, # of project columns should be >= 1, even for skipAll query. projectedColumnCount = Math.max(columns.size(), 1); } return projectedColumnCount * HIVE_SERDE_SCAN_OVERHEAD_FACTOR_PER_COLUMN; } }
List<String> allCols = new ArrayList<>(table.getSd().getColsSize()); for (FieldSchema fs : table.getSd().getCols()) { allCols.add(fs.getName());
.getName().equals( tTable.getSd().getSerdeInfo().getSerializationLib()) && tTable.getSd().getColsSize() > 0 && tTable.getSd().getCols().get(0).getType().indexOf('<') == -1) { tTable.getSd().getSerdeInfo().setSerializationLib(
private static class ThreadLocalHive extends ThreadLocal<Hive> { @Override protected Hive initialValue() { return null; } @Override public synchronized void set(Hive hiveObj) { Hive currentHive = this.get(); if (currentHive != hiveObj) { // Remove/close current thread-local Hive object before overwriting with new Hive object. remove(); super.set(hiveObj); } } @Override public synchronized void remove() { Hive currentHive = this.get(); if (currentHive != null) { // Close the metastore connections before removing it from thread local hiveDB. currentHive.close(false); super.remove(); } } }
.getName().equals( tTable.getSd().getSerdeInfo().getSerializationLib()) && tTable.getSd().getColsSize() > 0 && tTable.getSd().getCols().get(0).getType().indexOf('<') == -1) { tTable.getSd().getSerdeInfo().setSerializationLib(
tbl.setDbName(SessionState.get().getCurrentDatabase()); if (tbl.getCols().size() == 0 || tbl.getSd().getColsSize() == 0) { tbl.setFields(MetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), tbl.getDeserializer()));
3, table.getSd().getColsSize());
private static List<HCatFieldSchema> getColumns(StorageDescriptor sd) throws HCatException { ArrayList<HCatFieldSchema> columns = new ArrayList<HCatFieldSchema>(sd.getColsSize()); for (FieldSchema fieldSchema : sd.getCols()) { columns.add(HCatSchemaUtils.getHCatFieldSchema(fieldSchema)); } return columns; }
private static List<HCatFieldSchema> getColumns(StorageDescriptor sd) throws HCatException { ArrayList<HCatFieldSchema> columns = new ArrayList<HCatFieldSchema>(sd.getColsSize()); for (FieldSchema fieldSchema : sd.getCols()) { columns.add(HCatSchemaUtils.getHCatFieldSchema(fieldSchema)); } return columns; }
private static List<HCatFieldSchema> getColumns(StorageDescriptor sd) throws HCatException { ArrayList<HCatFieldSchema> columns = new ArrayList<HCatFieldSchema>(sd.getColsSize()); for (FieldSchema fieldSchema : sd.getCols()) { columns.add(HCatSchemaUtils.getHCatFieldSchema(fieldSchema)); } return columns; }
.getName().equals( tTable.getSd().getSerdeInfo().getSerializationLib()) && tTable.getSd().getColsSize() > 0 && tTable.getSd().getCols().get(0).getType().indexOf('<') == -1) { tTable.getSd().getSerdeInfo().setSerializationLib(
.getName().equals( tTable.getSd().getSerdeInfo().getSerializationLib()) && tTable.getSd().getColsSize() > 0 && tTable.getSd().getCols().get(0).getType().indexOf('<') == -1) { tTable.getSd().getSerdeInfo().setSerializationLib(
tbl.setDbName(SessionState.get().getCurrentDatabase()); if (tbl.getCols().size() == 0 || tbl.getSd().getColsSize() == 0) { tbl.setFields(MetaStoreUtils.getFieldsFromDeserializer(tbl.getTableName(), tbl.getDeserializer()));