/** * Create a TaskAttemptContext, supporting many Hadoops. * * @return TaskAttemptContext */ public static TaskAttemptContext makeTaskAttemptContext() { return makeTaskAttemptContext(new Configuration()); }
/** * Create a TaskAttemptContext, supporting many Hadoops. * * @param conf Configuration * @param taskAttemptContext Use TaskAttemptID from this object * @return TaskAttemptContext */ public static TaskAttemptContext makeTaskAttemptContext(Configuration conf, TaskAttemptContext taskAttemptContext) { return makeTaskAttemptContext(conf, taskAttemptContext.getTaskAttemptID()); }
/** * Create a TaskAttemptContext, supporting many Hadoops. * * @param conf Configuration * @return TaskAttemptContext */ public static TaskAttemptContext makeTaskAttemptContext(Configuration conf) { return makeTaskAttemptContext(conf, new TaskAttemptID()); }
@Override public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { baseMappingReader.initialize(inputSplit, HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void commitTask(TaskAttemptContext context) throws IOException { outputCommitter.commitTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void abortTask(TaskAttemptContext context) throws IOException { outputCommitter.abortTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void close( TaskAttemptContext context) throws IOException, InterruptedException { edgeWriter.close( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void setupTask(TaskAttemptContext context) throws IOException { outputCommitter.setupTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void abortTask(TaskAttemptContext context) throws IOException { outputCommitter.abortTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void initialize( TaskAttemptContext context) throws IOException, InterruptedException { vertexWriter.initialize( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { baseEdgeReader.initialize(inputSplit, HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void initialize(TaskAttemptContext context) throws IOException, InterruptedException { edgeWriter.initialize( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void initialize(InputSplit inputSplit, TaskAttemptContext context) throws IOException, InterruptedException { baseVertexReader.initialize(inputSplit, HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void close( TaskAttemptContext context) throws IOException, InterruptedException { vertexWriter.close( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public boolean needsTaskCommit( TaskAttemptContext context) throws IOException { return outputCommitter.needsTaskCommit( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void commitTask(TaskAttemptContext context) throws IOException { outputCommitter.commitTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void setupTask(TaskAttemptContext context) throws IOException { outputCommitter.setupTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public MappingReader<I, V, E, B> createMappingReader(InputSplit split, TaskAttemptContext context) throws IOException { final MappingReader<I, V, E, B> mappingReader = originalInputFormat .createMappingReader(split, HadoopUtils.makeTaskAttemptContext(getConf(), context)); return new WrappedMappingReader<>(mappingReader, getConf()); }
@Override public EdgeReader<I, E> createEdgeReader(InputSplit split, TaskAttemptContext context) throws IOException { EdgeReader<I, E> edgeReader = originalInputFormat.createEdgeReader(split, HadoopUtils.makeTaskAttemptContext(getConf(), context)); return new WrappedEdgeReader<I, E>(edgeReader, getConf()); }
@Override public VertexReader<I, V, E> createVertexReader(InputSplit split, TaskAttemptContext context) throws IOException { final VertexReader<I, V, E> vertexReader = originalInputFormat.createVertexReader(split, HadoopUtils.makeTaskAttemptContext(getConf(), context)); return new WrappedVertexReader<I, V, E>(vertexReader, getConf()); }