/** * Create a JobContext, supporting many Hadoops. * * @return JobContext */ public static JobContext makeJobContext() { return makeJobContext(new Configuration()); } }
/** * Create a JobContext, supporting many Hadoops. * * @param conf Configuration * @param jobContext Use JobID from this object * @return JobContext */ public static JobContext makeJobContext(Configuration conf, JobContext jobContext) { return makeJobContext(conf, jobContext.getJobID()); }
/** * Create a JobContext, supporting many Hadoops. * * @param conf Configuration * @return JobContext */ public static JobContext makeJobContext(Configuration conf) { return makeJobContext(conf, new JobID()); }
@Override public List<InputSplit> getSplits(JobContext context, int minSplitCountHint) throws IOException, InterruptedException { return originalInputFormat.getSplits( HadoopUtils.makeJobContext(getConf(), context), minSplitCountHint); }
@Override public void abortJob(JobContext context, JobStatus.State state) throws IOException { outputCommitter.abortJob( HadoopUtils.makeJobContext(getConf(), context), state); }
@Override public void setupJob(JobContext context) throws IOException { outputCommitter.setupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void cleanupJob(JobContext context) throws IOException { outputCommitter.cleanupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void setupJob(JobContext context) throws IOException { outputCommitter.setupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void cleanupJob(JobContext context) throws IOException { outputCommitter.cleanupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void commitJob(JobContext context) throws IOException { outputCommitter.commitJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { originalOutputFormat.checkOutputSpecs( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void abortJob(JobContext context, JobStatus.State state) throws IOException { outputCommitter.abortJob( HadoopUtils.makeJobContext(getConf(), context), state); }
@Override public List<InputSplit> getSplits(JobContext context, int minSplitCountHint) throws IOException, InterruptedException { return originalInputFormat.getSplits( HadoopUtils.makeJobContext(getConf(), context), minSplitCountHint); }
@Override public void checkOutputSpecs( JobContext context) throws IOException, InterruptedException { originalOutputFormat.checkOutputSpecs( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public List<InputSplit> getSplits(JobContext context, int minSplitCountHint) throws IOException, InterruptedException { return originalInputFormat.getSplits( HadoopUtils.makeJobContext(getConf(), context), minSplitCountHint); }
@Override public void commitJob(JobContext context) throws IOException { outputCommitter.commitJob( HadoopUtils.makeJobContext(getConf(), context)); }