@Override public HiveApiOutputCommitter getOutputCommitter( TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { HadoopUtils.setWorkOutputDir(taskAttemptContext); Configuration conf = taskAttemptContext.getConfiguration(); JobConf jobConf = new JobConf(conf); OutputCommitter baseCommitter = jobConf.getOutputCommitter(); LOG.info("Getting output committer with base output committer {}", baseCommitter.getClass().getSimpleName()); return new HiveApiOutputCommitter(new HackOutputCommitter(baseCommitter, jobConf), myProfileId); } }
@Override public void commitJob(JobContext jobContext) throws IOException { baseCommitter.commitJob(jobContext); Configuration conf = jobContext.getConfiguration(); OutputConf outputConf = new OutputConf(conf, profileId); HiveOutputDescription outputDesc = outputConf.readOutputDescription(); OutputInfo outputInfo = outputConf.readOutputTableInfo(); if (outputInfo.hasPartitionInfo()) { registerPartitions(conf, outputDesc, outputInfo); } else { noPartitionsCopyData(conf, outputInfo); } writeSuccessFile(conf); }
committer.setupJob(jobContext); committer.setupTask(taskContext); for (HiveWritableRecord record : records) { recordWriter.write(NullWritable.get(), record); committer.commitTask(taskContext); committer.commitJob(jobContext);
context.outputFormat.getOutputCommitter(threadLocal.taskContext()); outputCommitter.setupTask(threadLocal.taskContext()); if (outputCommitter.needsTaskCommit(threadLocal.taskContext())) { outputCommitter.commitTask(threadLocal.taskContext());