@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupTask(taskContext); } }
@Override public void setupTask(org.apache.hadoop.mapred.TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupTask(taskContext); } }
@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling setupTask for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().setupTask(outputContainer.getContext()); } }
/** * Setup task. * * @param outputFormat Output format. * @throws IOException In case of IO exception. * @throws InterruptedException In case of interrupt. */ protected void setup(@Nullable OutputFormat outputFormat) throws IOException, InterruptedException { if (hadoopCtx.writer() != null) { assert outputFormat != null; outputFormat.getOutputCommitter(hadoopCtx).setupTask(hadoopCtx); } }
@Override public void setupTask(TaskAttemptContext context) throws IOException { if (!dynamicPartitioningUsed) { getBaseOutputCommitter().setupTask(HCatMapRedUtil.createTaskAttemptContext(context)); } }
@Override public void setupTask(TaskAttemptContext context) throws IOException { getBaseOutputCommitter().setupTask(HCatMapRedUtil.createTaskAttemptContext(context)); }
/** * Put write into Hadoop context and return associated output format instance. * * @param jobCtx Job context. * @return Output format. * @throws IgniteCheckedException In case of Grid exception. * @throws InterruptedException In case of interrupt. */ protected OutputFormat prepareWriter(JobContext jobCtx) throws IgniteCheckedException, InterruptedException { try { OutputFormat outputFormat = getOutputFormat(jobCtx); assert outputFormat != null; OutputCommitter outCommitter = outputFormat.getOutputCommitter(hadoopCtx); if (outCommitter != null) outCommitter.setupTask(hadoopCtx); RecordWriter writer = outputFormat.getRecordWriter(hadoopCtx); hadoopCtx.writer(writer); return outputFormat; } catch (IOException | ClassNotFoundException e) { throw new IgniteCheckedException(e); } }
private void publishTest(Job job) throws Exception { HCatOutputFormat hcof = new HCatOutputFormat(); TaskAttemptContext tac = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()); OutputCommitter committer = hcof.getOutputCommitter(tac); committer.setupJob(job); committer.setupTask(tac); committer.commitTask(tac); committer.commitJob(job); Partition part = client.getPartition(dbName, tblName, Arrays.asList("p1")); assertNotNull(part); StorerInfo storer = InternalUtil.extractStorerInfo(part.getSd(), part.getParameters()); assertEquals(storer.getProperties().get("hcat.testarg"), "testArgValue"); assertTrue(part.getSd().getLocation().contains("p1")); }
try { committer = outFormat.getOutputCommitter(cntxt); committer.setupTask(cntxt); writer = outFormat.getRecordWriter(cntxt); while (recordItr.hasNext()) {
@Override public Void call() throws IOException, InterruptedException { final OutputCommitter outputCommitter = tof[taskIdx].getOutputCommitter(taCtx[taskIdx]); outputCommitter.setupTask(taCtx[taskIdx]); final RecordWriter rw = tof[taskIdx].getRecordWriter(taCtx[taskIdx]); writeOutput(rw, taCtx[taskIdx]); outputCommitter.commitTask(taCtx[taskIdx]); return null; } });
@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupTask(taskContext); } }
@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { delegate.setupTask(taskContext); } }
@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupTask(taskContext); } }
@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { for (OutputCommitter committer : committers) { committer.setupTask(taskContext); } }
@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { rootOutputcommitter.setupTask(taskContext); for (Map.Entry<String, OutputCommitter> committer : committers.entrySet()) { TaskAttemptContext namedTaskContext = MultipleOutputs.getNamedTaskContext(taskContext, committer.getKey()); committer.getValue().setupTask(namedTaskContext); } }
@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling setupTask for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().setupTask(outputContainer.getContext()); } }
@Override public void setupTask(TaskAttemptContext taskContext) throws IOException { for (String alias : outputCommitters.keySet()) { LOGGER.info("Calling setupTask for alias: " + alias); BaseOutputCommitterContainer outputContainer = outputCommitters.get(alias); outputContainer.getBaseCommitter().setupTask(outputContainer.getContext()); } }
@Override public void setupTask(TaskAttemptContext context) throws IOException { if (!dynamicPartitioningUsed) { getBaseOutputCommitter().setupTask(HCatMapRedUtil.createTaskAttemptContext(context)); } }
@Override public void setupTask(TaskAttemptContext context) throws IOException { if (!dynamicPartitioningUsed) { getBaseOutputCommitter().setupTask(HCatMapRedUtil.createTaskAttemptContext(context)); } }
@Override public void setupTask(TaskAttemptContext context) throws IOException { outputCommitter.setupTask( HadoopUtils.makeTaskAttemptContext(getConf(), context)); }