@Override public void setupJob(JobContext jobContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupJob(jobContext); } }
@Override public void setupJob(org.apache.hadoop.mapred.JobContext jobContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupJob(jobContext); } }
@Override public void setupJob(JobContext jobContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling setupJob for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().setupJob(outputContainer.getContext()); } }
@Override public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { final OutputCommitter outputCommitter = Mockito.mock(OutputCommitter.class); doNothing().when(outputCommitter).setupJob(any(JobContext.class)); return outputCommitter; } }
@Override public void setupJob(JobContext context) throws IOException { getBaseOutputCommitter().setupJob(HCatMapRedUtil.createJobContext(context)); }
@Override public void setupJob(JobContext context) throws IOException { if (getBaseOutputCommitter() != null && !dynamicPartitioningUsed) { getBaseOutputCommitter().setupJob(HCatMapRedUtil.createJobContext(context)); } // in dynamic usecase, called through FileRecordWriterContainer }
this.context = new TaskAttemptContextImpl(this.configuration, taskAttemptID); this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(this.context); this.outputCommitter.setupJob(new JobContextImpl(this.configuration, new JobID())); } catch (Exception e) { throw new RuntimeException(e);
@Test public void testOpen() throws Exception { OutputFormat<String, Long> dummyOutputFormat = mock(DummyOutputFormat.class); OutputCommitter outputCommitter = setupOutputCommitter(true); when(dummyOutputFormat.getOutputCommitter(any(TaskAttemptContext.class))).thenReturn(outputCommitter); HadoopOutputFormat<String, Long> hadoopOutputFormat = setupHadoopOutputFormat(dummyOutputFormat, Job.getInstance(), new DummyRecordWriter(), setupOutputCommitter(true), new Configuration()); hadoopOutputFormat.open(1, 4); verify(hadoopOutputFormat.outputCommitter, times(1)).setupJob(any(JobContext.class)); verify(hadoopOutputFormat.mapreduceOutputFormat, times(1)).getRecordWriter(any(TaskAttemptContext.class)); }
/** {@inheritDoc} */ @Override protected void run0(HadoopV2TaskContext taskCtx) throws IgniteCheckedException { try { JobContextImpl jobCtx = taskCtx.jobContext(); OutputFormat outputFormat = getOutputFormat(jobCtx); outputFormat.checkOutputSpecs(jobCtx); OutputCommitter committer = outputFormat.getOutputCommitter(hadoopContext()); if (committer != null) committer.setupJob(jobCtx); } catch (ClassNotFoundException | IOException e) { throw new IgniteCheckedException(e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IgniteInterruptedCheckedException(e); } } }
@Override public WriterContext prepareWrite() throws HCatException { OutputJobInfo jobInfo = OutputJobInfo.create(we.getDbName(), we.getTableName(), we.getPartitionKVs()); Job job; try { job = new Job(conf); HCatOutputFormat.setOutput(job, jobInfo); HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job.getConfiguration())); HCatOutputFormat outFormat = new HCatOutputFormat(); outFormat.checkOutputSpecs(job); outFormat.getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())).setupJob(job); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } WriterContextImpl cntxt = new WriterContextImpl(); cntxt.setConf(job.getConfiguration()); return cntxt; }
private void publishTest(Job job) throws Exception { HCatOutputFormat hcof = new HCatOutputFormat(); TaskAttemptContext tac = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()); OutputCommitter committer = hcof.getOutputCommitter(tac); committer.setupJob(job); committer.setupTask(tac); committer.commitTask(tac); committer.commitJob(job); Partition part = client.getPartition(dbName, tblName, Arrays.asList("p1")); assertNotNull(part); StorerInfo storer = InternalUtil.extractStorerInfo(part.getSd(), part.getParameters()); assertEquals(storer.getProperties().get("hcat.testarg"), "testArgValue"); assertTrue(part.getSd().getLocation().contains("p1")); }
@Override public void setupJob(JobContext jobContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupJob(jobContext); } }
/** Calls the delegate's {@link OutputCommitter#setupJob(JobContext)}. */ @Override public void setupJob(JobContext context) throws IOException { delegate.setupJob(context); }
@Override public void setupJob(org.apache.hadoop.mapred.JobContext jobContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupJob(jobContext); } }
@Override public void setupJob(JobContext jobContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling setupJob for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().setupJob(outputContainer.getContext()); } }
@Override public void setupJob(JobContext context) throws IOException { outputCommitter.setupJob( HadoopUtils.makeJobContext(getConf(), context)); }
@Override public void setupJob(JobContext context) throws IOException { outputCommitter.setupJob( HadoopUtils.makeJobContext(getConf(), context)); }
protected void runJobSetupTask(TaskUmbilicalProtocol umbilical, TaskReporter reporter ) throws IOException, InterruptedException { // do the setup getProgress().setStatus("setup"); committer.setupJob(jobContext); done(umbilical, reporter); }
protected void runJobSetupTask(TaskUmbilicalProtocol umbilical, TaskReporter reporter ) throws IOException, InterruptedException { // do the setup getProgress().setStatus("setup"); committer.setupJob(jobContext); done(umbilical, reporter); }
@SuppressWarnings("unchecked") protected void handleJobSetup(CommitterJobSetupEvent event) { try { committer.setupJob(event.getJobContext()); context.getEventHandler().handle( new JobSetupCompletedEvent(event.getJobID())); } catch (Exception e) { LOG.warn("Job setup failed", e); context.getEventHandler().handle(new JobSetupFailedEvent( event.getJobID(), StringUtils.stringifyException(e))); } }