/** * commit the task by moving the output file out from the temporary directory. * @throws java.io.IOException */ @Override public void close() throws IOException { // enforce sequential close() calls synchronized (CLOSE_MUTEX) { this.recordWriter.close(new HadoopDummyReporter()); if (this.outputCommitter.needsTaskCommit(this.context)) { this.outputCommitter.commitTask(this.context); } } }
/** * Commit task. * * @throws IOException In failed. */ public void commit() throws IOException { if (writer != null) { OutputCommitter outputCommitter = jobConf.getOutputCommitter(); TaskAttemptContext taskCtx = new TaskAttemptContextImpl(jobConf, attempt); if (outputCommitter.needsTaskCommit(taskCtx)) outputCommitter.commitTask(taskCtx); } }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { return needsTaskCommit((TaskAttemptContext) taskContext); }
@Override public boolean needsTaskCommit(TaskAttemptContext taskContext) throws IOException { return baseOutputCommitter.needsTaskCommit(taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { return needsTaskCommit((TaskAttemptContext) taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { return needsTaskCommit((TaskAttemptContext) taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { return needsTaskCommit((TaskAttemptContext) taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { return needsTaskCommit((TaskAttemptContext) taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { return needsTaskCommit((TaskAttemptContext) taskContext); }
@Override public boolean needsTaskCommit(org.apache.hadoop.mapreduce.TaskAttemptContext context) throws IOException { return baseCommitter.needsTaskCommit(hackTaskAttemptContext(context)); }
@Override public void close() throws Exception { recordWriter.close(Reporter.NULL); if (outputCommitter.needsTaskCommit(taskAttemptContext)) { outputCommitter.commitTask(taskAttemptContext); } }
@Override public void close() throws Exception { recordWriter.close(Reporter.NULL); if (outputCommitter.needsTaskCommit(taskAttemptContext)) { outputCommitter.commitTask(taskAttemptContext); } }
/** * commit the task by moving the output file out from the temporary directory. * @throws java.io.IOException */ @Override public void close() throws IOException { // enforce sequential close() calls synchronized (CLOSE_MUTEX) { this.recordWriter.close(new HadoopDummyReporter()); if (this.outputCommitter.needsTaskCommit(this.context)) { this.outputCommitter.commitTask(this.context); } } }
/** * commit the task by moving the output file out from the temporary directory. * @throws java.io.IOException */ @Override public void close() throws IOException { // enforce sequential close() calls synchronized (CLOSE_MUTEX) { this.recordWriter.close(new HadoopDummyReporter()); if (this.outputCommitter.needsTaskCommit(this.context)) { this.outputCommitter.commitTask(this.context); } } }
/** * commit the task by moving the output file out from the temporary directory. * @throws java.io.IOException */ @Override public void close() throws IOException { // enforce sequential close() calls synchronized (CLOSE_MUTEX) { this.recordWriter.close(new HadoopDummyReporter()); if (this.outputCommitter.needsTaskCommit(this.context)) { this.outputCommitter.commitTask(this.context); } } }
boolean commitRequired = outputCommitter.needsTaskCommit(taskContext); if (commitRequired) { int retries = MAX_RETRIES;