@Override public void abortTask(TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.abortTask(taskContext); } } }
@Override public void abortTask(org.apache.hadoop.mapred.TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.abortTask(taskContext); } }
@Override public void abortTask(TaskAttemptContext taskContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling abortTask for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().abortTask(outputContainer.getContext()); } }
/** * Abort task. * * @param outputFormat Output format. */ protected void abort(@Nullable OutputFormat outputFormat) { if (hadoopCtx.writer() != null) { assert outputFormat != null; try { outputFormat.getOutputCommitter(hadoopCtx).abortTask(hadoopCtx); } catch (IOException ignore) { // Ignore. } catch (InterruptedException ignore) { Thread.currentThread().interrupt(); } } }
@Override public void abortTask(TaskAttemptContext context) throws IOException { getBaseOutputCommitter().abortTask(HCatMapRedUtil.createTaskAttemptContext(context)); }
@Override public void abortTask(TaskAttemptContext context) throws IOException { if (!dynamicPartitioningUsed) { FileOutputFormatContainer.setWorkOutputPath(context); getBaseOutputCommitter().abortTask(HCatMapRedUtil.createTaskAttemptContext(context)); } else { try { TaskCommitContextRegistry.getInstance().abortTask(context); } finally { TaskCommitContextRegistry.getInstance().discardCleanupFor(context); } } }
if (null != committer) { try { committer.abortTask(cntxt); } catch (IOException e1) { throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, e1); if (null != committer) { try { committer.abortTask(cntxt); } catch (IOException e1) { throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, e1);
@Override public void abortTask(org.apache.hadoop.mapred.TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.abortTask(taskContext); } }
@Override public void abortTask(TaskAttemptContext taskContext) throws IOException { baseCommitter.abortTask(taskContext); } }
@Override public void abortTask(TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.abortTask(taskContext); } } }
@Override public void abortTask(TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.abortTask(taskContext); } } }
@Override public void abortTask(TaskAttemptContext taskContext) throws IOException { rootOutputcommitter.abortTask(taskContext); for (Map.Entry<String, OutputCommitter> committer : committers.entrySet()) { TaskAttemptContext namedTaskContext = MultipleOutputs.getNamedTaskContext(taskContext, committer.getKey()); committer.getValue().abortTask(namedTaskContext); } }
private void discardOutput(TaskAttemptContext taskContext) { try { committer.abortTask(taskContext); } catch (IOException ioe) { LOG.warn("Failure cleaning up: " + StringUtils.stringifyException(ioe)); } }
private void discardOutput(TaskAttemptContext taskContext) { try { committer.abortTask(taskContext); } catch (IOException ioe) { LOG.warn("Failure cleaning up: " + StringUtils.stringifyException(ioe)); } }
@Override public void abortTask(TaskAttemptContext context) throws IOException { outputCommitter.abortTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void abortTask(TaskAttemptContext context) throws IOException { outputCommitter.abortTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }
@Override public void abortTask(TaskAttemptContext taskContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling abortTask for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().abortTask(outputContainer.getContext()); } }
void taskCleanup(TaskUmbilicalProtocol umbilical) throws IOException { // set phase for this task setPhase(TaskStatus.Phase.CLEANUP); getProgress().setStatus("cleanup"); statusUpdate(umbilical); LOG.info("Runnning cleanup for the task"); // do the cleanup committer.abortTask(taskContext); }
@SuppressWarnings("unchecked") protected void handleTaskAbort(CommitterTaskAbortEvent event) { try { committer.abortTask(event.getAttemptContext()); } catch (Exception e) { LOG.warn("Task cleanup failed for attempt " + event.getAttemptID(), e); } context.getEventHandler().handle( new TaskAttemptEvent(event.getAttemptID(), TaskAttemptEventType.TA_CLEANUP_DONE)); }
@SuppressWarnings("unchecked") protected void handleTaskAbort(CommitterTaskAbortEvent event) { try { committer.abortTask(event.getAttemptContext()); } catch (Exception e) { LOG.warn("Task cleanup failed for attempt " + event.getAttemptID(), e); } context.getEventHandler().handle( new TaskAttemptEvent(event.getAttemptID(), TaskAttemptEventType.TA_CLEANUP_DONE)); }