/** * Create a JobContext, supporting many Hadoops. * * @return JobContext */ public static JobContext makeJobContext() { return makeJobContext(new Configuration()); } }
/** * Create a TaskAttemptContext, supporting many Hadoops. * * @return TaskAttemptContext */ public static TaskAttemptContext makeTaskAttemptContext() { return makeTaskAttemptContext(new Configuration()); }
/** * Create a TaskAttemptContext, supporting many Hadoops. * * @param conf Configuration * @param taskAttemptContext Use TaskAttemptID from this object * @return TaskAttemptContext */ public static TaskAttemptContext makeTaskAttemptContext(Configuration conf, TaskAttemptContext taskAttemptContext) { return makeTaskAttemptContext(conf, taskAttemptContext.getTaskAttemptID()); }
/** * Create a JobContext, supporting many Hadoops. * * @param conf Configuration * @param jobContext Use JobID from this object * @return JobContext */ public static JobContext makeJobContext(Configuration conf, JobContext jobContext) { return makeJobContext(conf, jobContext.getJobID()); }
/** * Create a TaskAttemptContext, supporting many Hadoops. * * @param conf Configuration * @return TaskAttemptContext */ public static TaskAttemptContext makeTaskAttemptContext(Configuration conf) { return makeTaskAttemptContext(conf, new TaskAttemptID()); }
/** * Create a JobContext, supporting many Hadoops. * * @param conf Configuration * @return JobContext */ public static JobContext makeJobContext(Configuration conf) { return makeJobContext(conf, new JobID()); }
@Override public void commitTask(TaskAttemptContext context) throws IOException { outputCommitter.commitTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public List<InputSplit> getSplits(JobContext context, int minSplitCountHint) throws IOException, InterruptedException { return originalInputFormat.getSplits( HadoopUtils.makeJobContext(getConf(), context), minSplitCountHint); }
@Override public void abortTask(TaskAttemptContext context) throws IOException { outputCommitter.abortTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void abortJob(JobContext context, JobStatus.State state) throws IOException { outputCommitter.abortJob( HadoopUtils.makeJobContext(getConf(), context), state); }
@Override public void close( TaskAttemptContext context) throws IOException, InterruptedException { edgeWriter.close( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void setupJob(JobContext context) throws IOException { outputCommitter.setupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void setupTask(TaskAttemptContext context) throws IOException { outputCommitter.setupTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void cleanupJob(JobContext context) throws IOException { outputCommitter.cleanupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void abortTask(TaskAttemptContext context) throws IOException { outputCommitter.abortTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void setupJob(JobContext context) throws IOException { outputCommitter.setupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void initialize( TaskAttemptContext context) throws IOException, InterruptedException { vertexWriter.initialize( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void cleanupJob(JobContext context) throws IOException { outputCommitter.cleanupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { baseEdgeReader.initialize(inputSplit, HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void commitJob(JobContext context) throws IOException { outputCommitter.commitJob( HadoopUtils.makeJobContext(getConf(), context)); }