/** * create an HCatTableInfo instance from the supplied Hive Table instance * @param table to create an instance from * @return HCatTableInfo * @throws IOException */ static HCatTableInfo valueOf(Table table) throws IOException { // Explicitly use {@link org.apache.hadoop.hive.ql.metadata.Table} when getting the schema, // but store @{link org.apache.hadoop.hive.metastore.api.Table} as this class is serialized // into the job conf. org.apache.hadoop.hive.ql.metadata.Table mTable = new org.apache.hadoop.hive.ql.metadata.Table(table); HCatSchema schema = HCatUtil.extractSchema(mTable); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getSd(), table.getParameters()); HCatSchema partitionColumns = HCatUtil.getPartitionColumns(mTable); return new HCatTableInfo(table.getDbName(), table.getTableName(), schema, partitionColumns, storerInfo, table); }
HCatSchema tableSchema = HCatUtil.extractSchema(table); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getTTable().getSd(), table.getParameters());
HCatSchema schema = HCatUtil.extractSchema( new org.apache.hadoop.hive.ql.metadata.Partition(table, ptn)); PartInfo partInfo = extractPartInfo(schema, ptn.getSd(), HCatSchema schema = HCatUtil.extractSchema(table); PartInfo partInfo = extractPartInfo(schema, table.getTTable().getSd(), table.getParameters(), conf, inputJobInfo);
/** * Extracts the {@link HCatSchema} from the specified {@code conf}. * * @param conf * the conf containing the table schema * @return the HCatSchema * * @throws TException * if there was an issue communicating with the metastore * @throws IOException * if there was an issue connecting to the metastore */ public HCatSchema getTableSchema(Configuration conf) throws TException, IOException { Table hiveTable = getHiveTable(conf); return HCatUtil.extractSchema(hiveTable); }
@Override public void configureForMapReduce(Job job, PType<?> ptype, Path outputPath, String name) { if (Strings.isNullOrEmpty(name)) { throw new AssertionError("Named output wasn't generated. This shouldn't happen"); } CrunchOutputs.addNamedOutput(job, name, bundle, NullWritable.class, HCatRecord.class); try { CrunchHCatOutputFormat.setOutput(job, info); // set the schema into config. this would be necessary if any downstream // tasks need the schema translated between a format (e.g. avro) and // HCatRecord for the destination table Table table = getHiveTable(job.getConfiguration()); CrunchHCatOutputFormat.setSchema(job, HCatUtil.extractSchema(table)); } catch (TException | IOException e) { throw new CrunchRuntimeException(e); } }
/** * create an HCatTableInfo instance from the supplied Hive Table instance * @param table to create an instance from * @return HCatTableInfo * @throws IOException */ static HCatTableInfo valueOf(Table table) throws IOException { // Explicitly use {@link org.apache.hadoop.hive.ql.metadata.Table} when getting the schema, // but store @{link org.apache.hadoop.hive.metastore.api.Table} as this class is serialized // into the job conf. org.apache.hadoop.hive.ql.metadata.Table mTable = new org.apache.hadoop.hive.ql.metadata.Table(table); HCatSchema schema = HCatUtil.extractSchema(mTable); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getSd(), table.getParameters()); HCatSchema partitionColumns = HCatUtil.getPartitionColumns(mTable); return new HCatTableInfo(table.getDbName(), table.getTableName(), schema, partitionColumns, storerInfo, table); }
/** * create an HCatTableInfo instance from the supplied Hive Table instance * @param table to create an instance from * @return HCatTableInfo * @throws IOException */ static HCatTableInfo valueOf(Table table) throws IOException { // Explicitly use {@link org.apache.hadoop.hive.ql.metadata.Table} when getting the schema, // but store @{link org.apache.hadoop.hive.metastore.api.Table} as this class is serialized // into the job conf. org.apache.hadoop.hive.ql.metadata.Table mTable = new org.apache.hadoop.hive.ql.metadata.Table(table); HCatSchema schema = HCatUtil.extractSchema(mTable); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getSd(), table.getParameters()); HCatSchema partitionColumns = HCatUtil.getPartitionColumns(mTable); return new HCatTableInfo(table.getDbName(), table.getTableName(), schema, partitionColumns, storerInfo, table); }
/** * create an HCatTableInfo instance from the supplied Hive Table instance * @param table to create an instance from * @return HCatTableInfo * @throws IOException */ static HCatTableInfo valueOf(Table table) throws IOException { // Explicitly use {@link org.apache.hadoop.hive.ql.metadata.Table} when getting the schema, // but store @{link org.apache.hadoop.hive.metastore.api.Table} as this class is serialized // into the job conf. org.apache.hadoop.hive.ql.metadata.Table mTable = new org.apache.hadoop.hive.ql.metadata.Table(table); HCatSchema schema = HCatUtil.extractSchema(mTable); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getSd(), table.getParameters()); HCatSchema partitionColumns = HCatUtil.getPartitionColumns(mTable); return new HCatTableInfo(table.getDbName(), table.getTableName(), schema, partitionColumns, storerInfo, table); }
HCatSchema schema = HCatUtil.extractSchema( new org.apache.hadoop.hive.ql.metadata.Partition(table, ptn)); PartInfo partInfo = extractPartInfo(schema, ptn.getSd(), HCatSchema schema = HCatUtil.extractSchema(table); PartInfo partInfo = extractPartInfo(schema, table.getTTable().getSd(), table.getParameters(), conf, inputJobInfo);
/** * create an HCatTableInfo instance from the supplied Hive Table instance * @param table to create an instance from * @return HCatTableInfo * @throws IOException */ static HCatTableInfo valueOf(Table table) throws IOException { // Explicitly use {@link org.apache.hadoop.hive.ql.metadata.Table} when getting the schema, // but store @{link org.apache.hadoop.hive.metastore.api.Table} as this class is serialized // into the job conf. org.apache.hadoop.hive.ql.metadata.Table mTable = new org.apache.hadoop.hive.ql.metadata.Table(table); HCatSchema schema = HCatUtil.extractSchema(mTable); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getSd(), table.getParameters()); HCatSchema partitionColumns = HCatUtil.getPartitionColumns(mTable); return new HCatTableInfo(table.getDbName(), table.getTableName(), schema, partitionColumns, storerInfo, table); }
HCatSchema schema = HCatUtil.extractSchema( new org.apache.hadoop.hive.ql.metadata.Partition(table, ptn)); PartInfo partInfo = extractPartInfo(schema, ptn.getSd(), HCatSchema schema = HCatUtil.extractSchema(table); PartInfo partInfo = extractPartInfo(schema, table.getTTable().getSd(), table.getParameters(), conf, inputJobInfo);
HCatSchema schema = HCatUtil.extractSchema( new org.apache.hadoop.hive.ql.metadata.Partition(table, ptn)); PartInfo partInfo = extractPartInfo(schema, ptn.getSd(), HCatSchema schema = HCatUtil.extractSchema(table); PartInfo partInfo = extractPartInfo(schema, table.getTTable().getSd(), table.getParameters(), conf, inputJobInfo);
HCatSchema schema = HCatUtil.extractSchema( new org.apache.hadoop.hive.ql.metadata.Partition(table, ptn)); PartInfo partInfo = extractPartInfo(schema, ptn.getSd(), HCatSchema schema = HCatUtil.extractSchema(table); PartInfo partInfo = extractPartInfo(schema, table.getTTable().getSd(), table.getParameters(), conf, inputJobInfo);
HCatSchema tableSchema = HCatUtil.extractSchema(table); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getTTable().getSd(), table.getParameters());
HCatSchema tableSchema = HCatUtil.extractSchema(table); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getTTable().getSd(), table.getParameters());
HCatSchema tableSchema = HCatUtil.extractSchema(table); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getTTable().getSd(), table.getParameters());
HCatSchema tableSchema = HCatUtil.extractSchema(table); StorerInfo storerInfo = InternalUtil.extractStorerInfo(table.getTTable().getSd(), table.getParameters());