public static TaskAttemptContext createTaskAttemptContext(JobConf conf, TaskAttemptID id, Progressable progressable) { return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, id, (Reporter) progressable); } public static org.apache.hadoop.mapreduce.TaskAttemptID createTaskAttemptID(JobID jobId, boolean isMap, int taskId, int id) {
public static JobContext createJobContext(JobConf conf, org.apache.hadoop.mapreduce.JobID id, Progressable progressable) { return ShimLoader.getHadoopShims().getHCatShim().createJobContext(conf, id, (Reporter) progressable); } }
public static org.apache.hadoop.mapreduce.TaskAttemptContext createTaskAttemptContext(Configuration conf, org.apache.hadoop.mapreduce.TaskAttemptID id) { return ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf,id); }
/** * Get the TaskAttemptContext with the related OutputFormat configuration populated given the alias * and the actual TaskAttemptContext * @param alias the name given to the OutputFormat configuration * @param context the Mapper or Reducer Context * @return a copy of the TaskAttemptContext with the alias configuration populated */ public static TaskAttemptContext getTaskAttemptContext(String alias, TaskAttemptContext context) { String aliasConf = context.getConfiguration().get(getAliasConfName(alias)); TaskAttemptContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( context.getConfiguration(), context.getTaskAttemptID()); addToConfig(aliasConf, aliasContext.getConfiguration()); return aliasContext; }
/** * Get the JobContext with the related OutputFormat configuration populated given the alias * and the actual JobContext * @param alias the name given to the OutputFormat configuration * @param context the JobContext * @return a copy of the JobContext with the alias configuration populated */ public static JobContext getJobContext(String alias, JobContext context) { String aliasConf = context.getConfiguration().get(getAliasConfName(alias)); JobContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createJobContext( context.getConfiguration(), context.getJobID()); addToConfig(aliasConf, aliasContext.getConfiguration()); return aliasContext; }
@Override public void cleanupOnFailure(String location, Job job) throws IOException { ShimLoader.getHadoopShims().getHCatShim().abortJob(getOutputFormat(), job); } }
@Override public void checkOutputSpecs(final FileSystem ignored, final JobConf job) throws IOException { realOutputFormat.checkOutputSpecs(ShimLoader.getHadoopShims().getHCatShim().createJobContext(job, null)); }
@Override public void storeSchema(ResourceSchema schema, String arg1, Job job) throws IOException { ShimLoader.getHadoopShims().getHCatShim().commitJob(getOutputFormat(), job); }
@Override public void checkOutputSpecs(final FileSystem ignored, final JobConf job) throws IOException { realOutputFormat.checkOutputSpecs(ShimLoader.getHadoopShims().getHCatShim().createJobContext(job, null)); }
context.getJobID(), InternalUtil.createReporter(HCatMapRedUtil.createTaskAttemptContext(jobConf, ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))); HCatOutputFormat.configureOutputStorageHandler(currContext, jobInfo, fullPartSpec); contextDiscoveredByPath.put(st.getPath().toString(), currContext);
@Override public void abort(WriterContext context) throws HCatException { WriterContextImpl cntxtImpl = (WriterContextImpl)context; try { new HCatOutputFormat().getOutputCommitter( ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( cntxtImpl.getConf(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())) .abortJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext( cntxtImpl.getConf(), null), State.FAILED); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } }
@Override public void commit(WriterContext context) throws HCatException { WriterContextImpl cntxtImpl = (WriterContextImpl)context; try { new HCatOutputFormat().getOutputCommitter( ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( cntxtImpl.getConf(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())) .commitJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext( cntxtImpl.getConf(), null)); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } }
@Override public ReaderContext prepareRead() throws HCatException { try { Job job = new Job(conf); HCatInputFormat hcif = HCatInputFormat.setInput( job, re.getDbName(), re.getTableName(), re.getFilterString()); ReaderContextImpl cntxt = new ReaderContextImpl(); cntxt.setInputSplits(hcif.getSplits( ShimLoader.getHadoopShims().getHCatShim().createJobContext(job.getConfiguration(), null))); cntxt.setConf(job.getConfiguration()); return cntxt; } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } }
new org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenSelector(); Token jtToken = jtTokenSelector.selectToken(org.apache.hadoop.security.SecurityUtil.buildTokenService( ShimLoader.getHadoopShims().getHCatShim().getResourceManagerAddress(conf)), ugi.getTokens()); if (jtToken == null) {
@Override public WriterContext prepareWrite() throws HCatException { OutputJobInfo jobInfo = OutputJobInfo.create(we.getDbName(), we.getTableName(), we.getPartitionKVs()); Job job; try { job = new Job(conf); HCatOutputFormat.setOutput(job, jobInfo); HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job.getConfiguration())); HCatOutputFormat outFormat = new HCatOutputFormat(); outFormat.checkOutputSpecs(job); outFormat.getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())).setupJob(job); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } WriterContextImpl cntxt = new WriterContextImpl(); cntxt.setConf(job.getConfiguration()); return cntxt; }
@Override public Iterator<HCatRecord> read() throws HCatException { HCatInputFormat inpFmt = new HCatInputFormat(); RecordReader<WritableComparable, HCatRecord> rr; try { TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID()); rr = inpFmt.createRecordReader(split, cntxt); rr.initialize(split, cntxt); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } return new HCatRecordItr(rr); }
int readCount = 0; for (int i = 0; i < splits.size(); i++) { TaskAttemptContext tac = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(jonconf, new TaskAttemptID()); RecordReader<LongWritable, BytesRefArrayWritable> rr = inputFormat.createRecordReader(splits.get(i), tac);
setVarsInConf(id); HCatOutputFormat outFormat = new HCatOutputFormat(); TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id)); OutputCommitter committer = null; RecordWriter<WritableComparable<?>, HCatRecord> writer;
private void publishTest(Job job) throws Exception { HCatOutputFormat hcof = new HCatOutputFormat(); TaskAttemptContext tac = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()); OutputCommitter committer = hcof.getOutputCommitter(tac); committer.setupJob(job); committer.setupTask(tac); committer.commitTask(tac); committer.commitJob(job); Partition part = client.getPartition(dbName, tblName, Arrays.asList("p1")); assertNotNull(part); StorerInfo storer = InternalUtil.extractStorerInfo(part.getSd(), part.getParameters()); assertEquals(storer.getProperties().get("hcat.testarg"), "testArgValue"); assertTrue(part.getSd().getLocation().contains("p1")); }
@Override public void cleanupOnFailure(String location, Job job) throws IOException { ShimLoader.getHadoopShims().getHCatShim().abortJob(getOutputFormat(), job); } }