@Override public void abortTask(TaskAttemptContext context) throws IOException { for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) { String dynKey = outputJobInfoEntry.getKey(); OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue(); LOG.info("Aborting task-attempt for " + outputJobInfo.getLocation()); baseDynamicCommitters.get(dynKey) .abortTask(dynamicContexts.get(dynKey)); } }
/** * Abort task. */ public void abort() { try { if (writer != null) jobConf.getOutputCommitter().abortTask(new TaskAttemptContextImpl(jobConf, attempt)); } catch (IOException ignore) { // No-op. } } }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { abortTask((TaskAttemptContext) taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { abortTask((TaskAttemptContext) taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { abortTask((TaskAttemptContext) taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { abortTask((TaskAttemptContext) taskContext); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { abortTask((TaskAttemptContext) taskContext); } }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext taskContext ) throws IOException { abortTask((TaskAttemptContext) taskContext); } }
@Override public void abortTask(TaskAttemptContext taskContext) throws IOException { baseOutputCommitter.abortTask(taskContext); }
@Override public void abortTask(TaskAttemptContext context) throws IOException { for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) { String dynKey = outputJobInfoEntry.getKey(); OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue(); LOG.info("Aborting task-attempt for " + outputJobInfo.getLocation()); baseDynamicCommitters.get(dynKey) .abortTask(dynamicContexts.get(dynKey)); } }
@Override public void abortTask(TaskAttemptContext context) throws IOException { for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) { String dynKey = outputJobInfoEntry.getKey(); OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue(); LOG.info("Aborting task-attempt for " + outputJobInfo.getLocation()); baseDynamicCommitters.get(dynKey) .abortTask(dynamicContexts.get(dynKey)); } }
@Override public void abortTask(TaskAttemptContext context) throws IOException { for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) { String dynKey = outputJobInfoEntry.getKey(); OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue(); LOG.info("Aborting task-attempt for " + outputJobInfo.getLocation()); baseDynamicCommitters.get(dynKey) .abortTask(dynamicContexts.get(dynKey)); } }
private void discardOutput(TaskAttemptContext taskContext, OutputCommitter committer) { try { committer.abortTask(taskContext); } catch (IOException ioe) { LOG.warn("Failure cleaning up: " + StringUtils.stringifyException(ioe)); } }
@Override public void abortTask(org.apache.hadoop.mapreduce.TaskAttemptContext context) throws IOException { baseCommitter.abortTask(hackTaskAttemptContext(context)); }
@Override public void abortTask(TaskAttemptContext context) throws IOException { for (Map.Entry<String, OutputJobInfo> outputJobInfoEntry : dynamicOutputJobInfo.entrySet()) { String dynKey = outputJobInfoEntry.getKey(); OutputJobInfo outputJobInfo = outputJobInfoEntry.getValue(); LOG.info("Aborting task-attempt for " + outputJobInfo.getLocation()); baseDynamicCommitters.get(dynKey) .abortTask(dynamicContexts.get(dynKey)); } }