public Properties getSchema() { return MetaStoreUtils.getSchema(tPartition, table.getTTable()); }
/** * Wrapper around {@link MetaStoreUtils#getSchema(StorageDescriptor, StorageDescriptor, Map, String, String, List)} * which also sets columns from table cache to table and returns properties returned by * {@link MetaStoreUtils#getSchema(StorageDescriptor, StorageDescriptor, Map, String, String, List)}. * * @param table Hive table with cached columns * @return Hive table metadata */ public static Properties getTableMetadata(HiveTableWithColumnCache table) { restoreColumns(table, null); return MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table.getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
final public Properties getSchema() { return MetaStoreUtils.getSchema(tTable); }
public Properties getSchema() { return MetaStoreUtils.getSchema(tPartition, table.getTTable()); }
public Properties getSchema() { return MetaStoreUtils.getSchema(tPartition, table.getTTable()); }
private static Properties getPartitionSchema(Table table, Partition partition) { if (isUnpartitioned(partition)) { return MetaStoreUtils.getTableMetadata(table); } return MetaStoreUtils.getSchema(partition, table); }
try { Deserializer deserializer = SerDeUtils.lookupDeserializer(lib); deserializer.initialize(conf, MetaStoreUtils.getSchema(part, table)); return deserializer; } catch (RuntimeException e) {
deserializer.initialize(conf, MetaStoreUtils.getSchema(table)); return deserializer; } catch (RuntimeException e) {
public static Properties getSchema( org.apache.hadoop.hive.metastore.api.StorageDescriptor sd, org.apache.hadoop.hive.metastore.api.StorageDescriptor tblsd, Map<String, String> parameters, String databaseName, String tableName, List<FieldSchema> partitionKeys) { Properties schema = new Properties(); String inputFormat = sd.getInputFormat(); if (inputFormat == null || inputFormat.length() == 0) { inputFormat = org.apache.hadoop.mapred.SequenceFileInputFormat.class .getName(); } schema.setProperty( org.apache.hadoop.hive.metastore.api.Constants.FILE_INPUT_FORMAT, inputFormat); String outputFormat = sd.getOutputFormat(); if (outputFormat == null || outputFormat.length() == 0) { outputFormat = org.apache.hadoop.mapred.SequenceFileOutputFormat.class .getName(); } schema.setProperty( org.apache.hadoop.hive.metastore.api.Constants.FILE_OUTPUT_FORMAT, outputFormat); schema.setProperty( org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME, databaseName + "." + tableName); if (sd.getLocation() != null) { schema.setProperty( org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_LOCATION,
public static Properties getPartitionMetadata( org.apache.hadoop.hive.metastore.api.Partition partition, org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils .getSchema(partition.getSd(), partition.getSd(), partition .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getPartitionMetadata( org.apache.hadoop.hive.metastore.api.Partition partition, org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils .getSchema(partition.getSd(), partition.getSd(), partition .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getTableMetadata( org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getSchema( org.apache.hadoop.hive.metastore.api.Partition part, org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(part.getSd(), table.getSd(), table .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getSchema( org.apache.hadoop.hive.metastore.api.Partition part, org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(part.getSd(), table.getSd(), table .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getSchema( org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getTableMetadata( org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getSchema( org.apache.hadoop.hive.metastore.api.Partition part, org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(part.getSd(), table.getSd(), table .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getTableMetadata( org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
public static Properties getPartitionMetadata( org.apache.hadoop.hive.metastore.api.Partition partition, org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils .getSchema(partition.getSd(), partition.getSd(), partition .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
tableProperties = MetaStoreUtils.getSchema(table.getSd(), table.getSd(), table.getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys());