@Override public void commitJob(JobContext jobContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling commitJob for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().commitJob(outputContainer.getContext()); } }
@Override public void commitJob(JobContext jobContext) throws IOException { getBaseOutputCommitter().commitJob(HCatMapRedUtil.createJobContext(jobContext)); cleanupJob(jobContext); }
@Test public void testFinalizedGlobal() throws Exception { HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(new DummyOutputFormat(), Job.getInstance(), null, null, new Configuration()); hadoopOutputFormat.finalizeGlobal(1); verify(hadoopOutputFormat.outputCommitter, times(1)).commitJob(any(JobContext.class)); }
@Override public void finalizeGlobal(int parallelism) throws IOException { JobContext jobContext; TaskAttemptContext taskContext; try { TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String.format("%" + (6 - Integer.toString(1).length()) + "s", " ").replace(" ", "0") + Integer.toString(1) + "_0"); jobContext = new JobContextImpl(this.configuration, new JobID()); taskContext = new TaskAttemptContextImpl(this.configuration, taskAttemptID); this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(taskContext); } catch (Exception e) { throw new RuntimeException(e); } jobContext.getCredentials().addAll(this.credentials); Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser()); if (currentUserCreds != null) { jobContext.getCredentials().addAll(currentUserCreds); } // finalize HDFS output format if (this.outputCommitter != null) { this.outputCommitter.commitJob(jobContext); } }
getBaseOutputCommitter().commitJob( HCatMapRedUtil.createJobContext(jobContext));
/** {@inheritDoc} */ @Override public void run0(HadoopV2TaskContext taskCtx) throws IgniteCheckedException { JobContextImpl jobCtx = taskCtx.jobContext(); try { OutputFormat outputFormat = getOutputFormat(jobCtx); OutputCommitter committer = outputFormat.getOutputCommitter(hadoopContext()); if (committer != null) { if (abort) committer.abortJob(jobCtx, JobStatus.State.FAILED); else committer.commitJob(jobCtx); } } catch (ClassNotFoundException | IOException e) { throw new IgniteCheckedException(e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IgniteInterruptedCheckedException(e); } } }
hof.getOutputCommitter(context).commitJob(context); FileStatus[] families = FSUtils.listStatus(fs, dir, new FSUtils.FamilyDirFilter(fs)); assertEquals(htd.getFamilies().size(), families.length);
@Override public void commit(WriterContext context) throws HCatException { WriterContextImpl cntxtImpl = (WriterContextImpl)context; try { new HCatOutputFormat().getOutputCommitter( ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( cntxtImpl.getConf(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())) .commitJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext( cntxtImpl.getConf(), null)); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } }
private void publishTest(Job job) throws Exception { HCatOutputFormat hcof = new HCatOutputFormat(); TaskAttemptContext tac = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()); OutputCommitter committer = hcof.getOutputCommitter(tac); committer.setupJob(job); committer.setupTask(tac); committer.commitTask(tac); committer.commitJob(job); Partition part = client.getPartition(dbName, tblName, Arrays.asList("p1")); assertNotNull(part); StorerInfo storer = InternalUtil.extractStorerInfo(part.getSd(), part.getParameters()); assertEquals(storer.getProperties().get("hcat.testarg"), "testArgValue"); assertTrue(part.getSd().getLocation().contains("p1")); }
@Override public void commitOutput() throws IOException { if (!initialized) { throw new RuntimeException("Committer not initialized"); } committer.commitJob(jobContext); }
@Override public void commitJob(JobContext jobContext) throws IOException { rootOutputcommitter.commitJob(jobContext); for (Map.Entry<String, OutputCommitter> committer : committers.entrySet()) { JobContext namedJobContext = MultipleOutputs.getNamedJobContext(jobContext, committer.getKey()); committer.getValue().commitJob(namedJobContext); } }
@Override public void commitJob(JobContext jobContext) throws IOException { committer.commitJob(jobContext); } @Override
@Override public final void commitJob(JobContext jobContext) throws IOException { super.commitJob(jobContext); commit(jobContext); }
@Override public void commitJob(JobContext jobContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling commitJob for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().commitJob(outputContainer.getContext()); } }
@Override public void commitJob(JobContext jobContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling commitJob for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().commitJob(outputContainer.getContext()); } }
@Override public void commitJob(JobContext jobContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling commitJob for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().commitJob(outputContainer.getContext()); } }
@Override public void commitJob(JobContext context) throws IOException { outputCommitter.commitJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void commitJob(JobContext jobContext) throws IOException { getBaseOutputCommitter().commitJob(HCatMapRedUtil.createJobContext(jobContext)); cleanupJob(jobContext); }
@Override public void commitJob(JobContext jobContext) throws IOException { getBaseOutputCommitter().commitJob(HCatMapRedUtil.createJobContext(jobContext)); cleanupJob(jobContext); }
@Override public void commit() throws IOException { try { final TaskAttemptContext cleanupContext = HadoopUtils.createCleanupTaskContext(conf.get(), jobID.get()); getOutputFormat(cleanupContext.getTaskAttemptID()) .getOutputCommitter(cleanupContext) .commitJob(cleanupContext); } catch (Exception e) { throw new IOException("Unable to commit output", e); } }