/** * Copies the storage handler properties configured for a table descriptor to a runtime job * configuration. This differs from {@link #copyTablePropertiesToConf(org.apache.hadoop.hive.ql.plan.TableDesc, org.apache.hadoop.mapred.JobConf)} * in that it does not allow parameters already set in the job to override the values from the * table. This is important for setting the config up for reading, * as the job may already have values in it from another table. * @param tbl * @param job */ public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) throws HiveException { Properties tblProperties = tbl.getProperties(); for(String name: tblProperties.stringPropertyNames()) { String val = (String) tblProperties.get(name); if (val != null) { job.set(name, StringEscapeUtils.escapeJava(val)); } } Map<String, String> jobProperties = tbl.getJobProperties(); if (jobProperties != null) { for (Map.Entry<String, String> entry : jobProperties.entrySet()) { job.set(entry.getKey(), entry.getValue()); } } }
/** * Copies the storage handler properties configured for a table descriptor to a runtime job * configuration. * * @param tbl * table descriptor from which to read * * @param job * configuration which receives configured properties */ public static void copyTableJobPropertiesToConf(TableDesc tbl, JobConf job) throws HiveException { Properties tblProperties = tbl.getProperties(); for(String name: tblProperties.stringPropertyNames()) { if (job.get(name) == null) { String val = (String) tblProperties.get(name); if (val != null) { job.set(name, StringEscapeUtils.escapeJava(val)); } } } Map<String, String> jobProperties = tbl.getJobProperties(); if (jobProperties != null) { for (Map.Entry<String, String> entry : jobProperties.entrySet()) { job.set(entry.getKey(), entry.getValue()); } } }
/** * Copies the storage handler proeprites configured for a table descriptor to a runtime job * configuration. This differs from {@link #copyTablePropertiesToConf(org.apache.hadoop.hive.ql.plan.TableDesc, org.apache.hadoop.mapred.JobConf)} * in that it does not allow parameters already set in the job to override the values from the * table. This is important for setting the config up for reading, * as the job may already have values in it from another table. * @param tbl * @param job */ public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) { Properties tblProperties = tbl.getProperties(); for(String name: tblProperties.stringPropertyNames()) { String val = (String) tblProperties.get(name); if (val != null) { job.set(name, StringEscapeUtils.escapeJava(val)); } } Map<String, String> jobProperties = tbl.getJobProperties(); if (jobProperties == null) { return; } for (Map.Entry<String, String> entry : jobProperties.entrySet()) { job.set(entry.getKey(), entry.getValue()); } }
/** * Copies the storage handler properties configured for a table descriptor to a runtime job * configuration. * * @param tbl * table descriptor from which to read * * @param job * configuration which receives configured properties */ public static void copyTableJobPropertiesToConf(TableDesc tbl, Configuration job) { Properties tblProperties = tbl.getProperties(); for(String name: tblProperties.stringPropertyNames()) { if (job.get(name) == null) { String val = (String) tblProperties.get(name); if (val != null) { job.set(name, StringEscapeUtils.escapeJava(val)); } } } Map<String, String> jobProperties = tbl.getJobProperties(); if (jobProperties == null) { return; } for (Map.Entry<String, String> entry : jobProperties.entrySet()) { job.set(entry.getKey(), entry.getValue()); } }
Map<String, String> tableProperties = tableDesc.getJobProperties();
TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), IgnoreKeyTextOutputFormat.class,props); if (tableDesc.getJobProperties() == null) tableDesc.setJobProperties(new HashMap<String, String>()); for (Map.Entry<String, String> el : conf) { tableDesc.getJobProperties().put(el.getKey(), el.getValue()); tableDesc.getJobProperties().put( HCatConstants.HCAT_KEY_OUTPUT_INFO, HCatUtil.serialize(outputJobInfo)); jobProperties); Map<String, String> tableJobProperties = tableDesc.getJobProperties(); if (tableJobProperties != null) { if (tableJobProperties.containsKey(HCatConstants.HCAT_KEY_OUTPUT_INFO)) {
public static Map<String, String> getInputJobProperties(HiveStorageHandler storageHandler, InputJobInfo inputJobInfo) { Properties props = inputJobInfo.getTableInfo().getStorerInfo().getProperties(); props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName()); TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), storageHandler.getOutputFormatClass(),props); if (tableDesc.getJobProperties() == null) { tableDesc.setJobProperties(new HashMap<String, String>()); } Properties mytableProperties = tableDesc.getProperties(); mytableProperties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,inputJobInfo.getDatabaseName()+ "." + inputJobInfo.getTableName()); Map<String, String> jobProperties = new HashMap<String, String>(); try { Map<String, String> properties = tableDesc.getJobProperties(); LinkedList<InputJobInfo> inputJobInfos = (LinkedList<InputJobInfo>) HCatUtil.deserialize( properties.get(HCatConstants.HCAT_KEY_JOB_INFO)); if (inputJobInfos == null) { inputJobInfos = new LinkedList<>(); } inputJobInfos.add(inputJobInfo); properties.put(HCatConstants.HCAT_KEY_JOB_INFO, HCatUtil.serialize(inputJobInfos)); storageHandler.configureInputJobProperties(tableDesc, jobProperties); } catch (IOException e) { throw new IllegalStateException( "Failed to configure StorageHandler", e); } return jobProperties; }
try { OutputJobInfo jobInfo = (OutputJobInfo) HCatUtil.deserialize(tableDesc.getJobProperties().get( HCatConstants.HCAT_KEY_OUTPUT_INFO)); String parentPath = jobInfo.getTableInfo().getTableLocation(); String dynHash = tableDesc.getJobProperties().get( HCatConstants.HCAT_DYNAMIC_PTN_JOBID); String idHash = tableDesc.getJobProperties().get( HCatConstants.HCAT_OUTPUT_ID_HASH);
/** * Copies the storage handler properties configured for a table descriptor to a runtime job * configuration. * * @param tbl * table descriptor from which to read * * @param job * configuration which receives configured properties */ public static void copyTableJobPropertiesToConf(TableDesc tbl, JobConf job) { Map<String, String> jobProperties = tbl.getJobProperties(); if (jobProperties == null) { return; } for (Map.Entry<String, String> entry : jobProperties.entrySet()) { job.set(entry.getKey(), entry.getValue()); } }
/** * Copies the storage handler proeprites configured for a table descriptor to a runtime job * configuration. This differs from {@link #copyTablePropertiesToConf(org.apache.hadoop.hive.ql.plan.TableDesc, org.apache.hadoop.mapred.JobConf)} * in that it does not allow parameters already set in the job to override the values from the * table. This is important for setting the config up for reading, * as the job may already have values in it from another table. * @param tbl * @param job */ public static void copyTablePropertiesToConf(TableDesc tbl, JobConf job) { Properties tblProperties = tbl.getProperties(); for(String name: tblProperties.stringPropertyNames()) { String val = (String) tblProperties.get(name); if (val != null) { job.set(name, StringEscapeUtils.escapeJava(val)); } } Map<String, String> jobProperties = tbl.getJobProperties(); if (jobProperties == null) { return; } for (Map.Entry<String, String> entry : jobProperties.entrySet()) { job.set(entry.getKey(), entry.getValue()); } }
/** * Copies the storage handler properties configured for a table descriptor to a runtime job * configuration. * * @param tbl * table descriptor from which to read * * @param job * configuration which receives configured properties */ public static void copyTableJobPropertiesToConf(TableDesc tbl, Configuration job) { Properties tblProperties = tbl.getProperties(); for(String name: tblProperties.stringPropertyNames()) { if (job.get(name) == null) { String val = (String) tblProperties.get(name); if (val != null) { job.set(name, StringEscapeUtils.escapeJava(val)); } } } Map<String, String> jobProperties = tbl.getJobProperties(); if (jobProperties == null) { return; } for (Map.Entry<String, String> entry : jobProperties.entrySet()) { job.set(entry.getKey(), entry.getValue()); } }
TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), IgnoreKeyTextOutputFormat.class,props); if (tableDesc.getJobProperties() == null) tableDesc.setJobProperties(new HashMap<String, String>()); for (Map.Entry<String, String> el : conf) { tableDesc.getJobProperties().put(el.getKey(), el.getValue()); tableDesc.getJobProperties().put( HCatConstants.HCAT_KEY_OUTPUT_INFO, HCatUtil.serialize(outputJobInfo)); jobProperties); Map<String, String> tableJobProperties = tableDesc.getJobProperties(); if (tableJobProperties != null) { if (tableJobProperties.containsKey(HCatConstants.HCAT_KEY_OUTPUT_INFO)) {
Map<String, String> tableProperties = tableDesc.getJobProperties();
TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), IgnoreKeyTextOutputFormat.class,props); if (tableDesc.getJobProperties() == null) tableDesc.setJobProperties(new HashMap<String, String>()); for (Map.Entry<String, String> el : conf) { tableDesc.getJobProperties().put(el.getKey(), el.getValue()); tableDesc.getJobProperties().put( HCatConstants.HCAT_KEY_OUTPUT_INFO, HCatUtil.serialize(outputJobInfo)); jobProperties); Map<String, String> tableJobProperties = tableDesc.getJobProperties(); if (tableJobProperties != null) { if (tableJobProperties.containsKey(HCatConstants.HCAT_KEY_OUTPUT_INFO)) {
TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), IgnoreKeyTextOutputFormat.class,props); if (tableDesc.getJobProperties() == null) tableDesc.setJobProperties(new HashMap<String, String>()); for (Map.Entry<String, String> el : conf) { tableDesc.getJobProperties().put(el.getKey(), el.getValue()); tableDesc.getJobProperties().put( HCatConstants.HCAT_KEY_OUTPUT_INFO, HCatUtil.serialize(outputJobInfo)); jobProperties); Map<String, String> tableJobProperties = tableDesc.getJobProperties(); if (tableJobProperties != null) { if (tableJobProperties.containsKey(HCatConstants.HCAT_KEY_OUTPUT_INFO)) {
public static Map<String, String> getInputJobProperties(HiveStorageHandler storageHandler, InputJobInfo inputJobInfo) { Properties props = inputJobInfo.getTableInfo().getStorerInfo().getProperties(); props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName()); TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), storageHandler.getOutputFormatClass(),props); if (tableDesc.getJobProperties() == null) { tableDesc.setJobProperties(new HashMap<String, String>()); } Properties mytableProperties = tableDesc.getProperties(); mytableProperties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,inputJobInfo.getDatabaseName()+ "." + inputJobInfo.getTableName()); Map<String, String> jobProperties = new HashMap<String, String>(); try { tableDesc.getJobProperties().put( HCatConstants.HCAT_KEY_JOB_INFO, HCatUtil.serialize(inputJobInfo)); storageHandler.configureInputJobProperties(tableDesc, jobProperties); } catch (IOException e) { throw new IllegalStateException( "Failed to configure StorageHandler", e); } return jobProperties; }
public static Map<String, String> getInputJobProperties(HiveStorageHandler storageHandler, InputJobInfo inputJobInfo) { Properties props = inputJobInfo.getTableInfo().getStorerInfo().getProperties(); props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName()); TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), storageHandler.getOutputFormatClass(),props); if (tableDesc.getJobProperties() == null) { tableDesc.setJobProperties(new HashMap<String, String>()); } Properties mytableProperties = tableDesc.getProperties(); mytableProperties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,inputJobInfo.getDatabaseName()+ "." + inputJobInfo.getTableName()); Map<String, String> jobProperties = new HashMap<String, String>(); try { tableDesc.getJobProperties().put( HCatConstants.HCAT_KEY_JOB_INFO, HCatUtil.serialize(inputJobInfo)); storageHandler.configureInputJobProperties(tableDesc, jobProperties); } catch (IOException e) { throw new IllegalStateException( "Failed to configure StorageHandler", e); } return jobProperties; }
public static Map<String, String> getInputJobProperties(HiveStorageHandler storageHandler, InputJobInfo inputJobInfo) { Properties props = inputJobInfo.getTableInfo().getStorerInfo().getProperties(); props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName()); TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), storageHandler.getOutputFormatClass(),props); if (tableDesc.getJobProperties() == null) { tableDesc.setJobProperties(new HashMap<String, String>()); } Properties mytableProperties = tableDesc.getProperties(); mytableProperties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,inputJobInfo.getDatabaseName()+ "." + inputJobInfo.getTableName()); Map<String, String> jobProperties = new HashMap<String, String>(); try { tableDesc.getJobProperties().put( HCatConstants.HCAT_KEY_JOB_INFO, HCatUtil.serialize(inputJobInfo)); storageHandler.configureInputJobProperties(tableDesc, jobProperties); } catch (IOException e) { throw new IllegalStateException( "Failed to configure StorageHandler", e); } return jobProperties; }
public static Map<String, String> getInputJobProperties(HiveStorageHandler storageHandler, InputJobInfo inputJobInfo) { Properties props = inputJobInfo.getTableInfo().getStorerInfo().getProperties(); props.put(serdeConstants.SERIALIZATION_LIB,storageHandler.getSerDeClass().getName()); TableDesc tableDesc = new TableDesc(storageHandler.getInputFormatClass(), storageHandler.getOutputFormatClass(),props); if (tableDesc.getJobProperties() == null) { tableDesc.setJobProperties(new HashMap<String, String>()); } Properties mytableProperties = tableDesc.getProperties(); mytableProperties.setProperty(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_NAME,inputJobInfo.getDatabaseName()+ "." + inputJobInfo.getTableName()); Map<String, String> jobProperties = new HashMap<String, String>(); try { tableDesc.getJobProperties().put( HCatConstants.HCAT_KEY_JOB_INFO, HCatUtil.serialize(inputJobInfo)); storageHandler.configureInputJobProperties(tableDesc, jobProperties); } catch (IOException e) { throw new IllegalStateException( "Failed to configure StorageHandler", e); } return jobProperties; }
Map<String, String> tableJobProperties = tableDesc.getJobProperties(); String jobString = tableJobProperties.get(HCatConstants.HCAT_KEY_JOB_INFO); try {