/** {@inheritDoc} */ @Override public void run(HadoopTaskContext taskCtx) throws IgniteCheckedException { HadoopV2TaskContext ctx = (HadoopV2TaskContext)taskCtx; JobContext jobCtx = ctx.jobContext(); try { OutputCommitter committer = jobCtx.getJobConf().getOutputCommitter(); if (abort) committer.abortJob(jobCtx, JobStatus.State.FAILED); else committer.commitJob(jobCtx); } catch (IOException e) { throw new IgniteCheckedException(e); } } }
try { new JobConf(currContext.getConfiguration()) .getOutputCommitter().abortJob(currContext, state); } catch (Exception e) {
@Override public void abortJob(org.apache.hadoop.mapreduce.JobContext context, JobStatus.State state) throws IOException { baseCommitter.abortJob(hackJobContext(context), state); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortJob(org.apache.hadoop.mapreduce.JobContext context, org.apache.hadoop.mapreduce.JobStatus.State runState) throws IOException { int state = JobStatus.getOldNewJobRunState(runState); if (state != JobStatus.FAILED && state != JobStatus.KILLED) { throw new IOException ("Invalid job run state : " + runState.name()); } abortJob((JobContext) context, state); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortJob(org.apache.hadoop.mapreduce.JobContext context, org.apache.hadoop.mapreduce.JobStatus.State runState) throws IOException { int state = JobStatus.getOldNewJobRunState(runState); if (state != JobStatus.FAILED && state != JobStatus.KILLED) { throw new IOException ("Invalid job run state : " + runState.name()); } abortJob((JobContext) context, state); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortJob(org.apache.hadoop.mapreduce.JobContext context, org.apache.hadoop.mapreduce.JobStatus.State runState) throws IOException { int state = JobStatus.getOldNewJobRunState(runState); if (state != JobStatus.FAILED && state != JobStatus.KILLED) { throw new IOException ("Invalid job run state : " + runState.name()); } abortJob((JobContext) context, state); }
@Override public void abortJob(JobContext jobContext, int status) throws IOException { super.abortJob(jobContext, status); writeFile(jobContext.getJobConf(), JOB_ABORT_FILE_NAME); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortJob(org.apache.hadoop.mapreduce.JobContext context, org.apache.hadoop.mapreduce.JobStatus.State runState) throws IOException { int state = JobStatus.getOldNewJobRunState(runState); if (state != JobStatus.FAILED && state != JobStatus.KILLED) { throw new IOException ("Invalid job run state : " + runState.name()); } abortJob((JobContext) context, state); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortJob(org.apache.hadoop.mapreduce.JobContext context, org.apache.hadoop.mapreduce.JobStatus.State runState) throws IOException { int state = JobStatus.getOldNewJobRunState(runState); if (state != JobStatus.FAILED && state != JobStatus.KILLED) { throw new IOException ("Invalid job run state : " + runState.name()); } abortJob((JobContext) context, state); }
@Override public void abortJob(JobContext jobContext, int status) throws IOException { super.abortJob(jobContext, status); writeFile(jobContext.getJobConf(), JOB_ABORT_FILE_NAME); }
/** * This method implements the new interface by calling the old method. Note * that the input types are different between the new and old apis and this * is a bridge between the two. */ @Override public final void abortJob(org.apache.hadoop.mapreduce.JobContext context, org.apache.hadoop.mapreduce.JobStatus.State runState) throws IOException { int state = JobStatus.getOldNewJobRunState(runState); if (state != JobStatus.FAILED && state != JobStatus.KILLED) { throw new IOException ("Invalid job run state : " + runState.name()); } abortJob((JobContext) context, state); }
@Override public void abortJob(JobContext jobContext, int status) throws IOException { super.abortJob(jobContext, status); writeFile(jobContext.getJobConf(), JOB_ABORT_FILE_NAME); }
org.apache.hadoop.mapred.OutputCommitter oldCommitter = (org.apache.hadoop.mapred.OutputCommitter)committer; oldCommitter.abortJob(jobContext, jobRunStateForCleanup);
org.apache.hadoop.mapred.OutputCommitter oldCommitter = (org.apache.hadoop.mapred.OutputCommitter)committer; oldCommitter.abortJob(jobContext, jobRunStateForCleanup);
org.apache.hadoop.mapred.OutputCommitter oldCommitter = (org.apache.hadoop.mapred.OutputCommitter)committer; oldCommitter.abortJob(jobContext, jobRunStateForCleanup);
org.apache.hadoop.mapred.OutputCommitter oldCommitter = (org.apache.hadoop.mapred.OutputCommitter)committer; oldCommitter.abortJob(jobContext, jobRunStateForCleanup);
try { new JobConf(currContext.getConfiguration()) .getOutputCommitter().abortJob(currContext, state); } catch (Exception e) {
@Override public void abortJob(JobContext jobContext, int status) throws IOException { super.abortJob(jobContext, status); RevisionManager rm = null; try { rm = HBaseRevisionManagerUtil .getOpenedRevisionManager(jobContext.getConfiguration()); Transaction writeTransaction = HBaseRevisionManagerUtil .getWriteTransaction(jobContext.getConfiguration()); rm.abortWriteTransaction(writeTransaction); } finally { if (rm != null) rm.close(); } }
org.apache.hadoop.mapred.OutputCommitter oldCommitter = (org.apache.hadoop.mapred.OutputCommitter)committer; oldCommitter.abortJob(jobContext, jobRunStateForCleanup);
@Override public void abortJob(JobContext jobContext, int status) throws IOException { baseOutputCommitter.abortJob(jobContext, status); RevisionManager rm = null; try { rm = HBaseRevisionManagerUtil .getOpenedRevisionManager(jobContext.getConfiguration()); rm.abortWriteTransaction(HBaseRevisionManagerUtil .getWriteTransaction(jobContext.getConfiguration())); } finally { cleanIntermediate(jobContext); if (rm != null) rm.close(); } }