job.setCombinerKeyGroupingComparatorClass(BytesWritable.Comparator.class);
/** * @param combiner With combiner. * @throws Exception If failed. */ public void doTestGrouping(boolean combiner) throws Exception { HadoopGroupingTestState.values().clear(); Job job = Job.getInstance(); job.setInputFormatClass(InFormat.class); job.setOutputFormatClass(OutFormat.class); job.setOutputKeyClass(YearTemperature.class); job.setOutputValueClass(Text.class); job.setMapperClass(Mapper.class); if (combiner) { job.setCombinerClass(MyReducer.class); job.setNumReduceTasks(0); job.setCombinerKeyGroupingComparatorClass(YearComparator.class); } else { job.setReducerClass(MyReducer.class); job.setNumReduceTasks(4); job.setGroupingComparatorClass(YearComparator.class); } grid(0).hadoop().submit(new HadoopJobId(UUID.randomUUID(), 2), createJobInfo(job.getConfiguration(), null)).get(30000); assertTrue(HadoopGroupingTestState.values().isEmpty()); }
/** * Wraps the combiner group comparator defined in the job with this {@link CombinerGroupComparatorWrapper} if it is * defined. * @param job The MapReduce job */ static void wrap(Job job) { if (WrapperUtil.setIfDefined(job, MRJobConfig.COMBINER_GROUP_COMPARATOR_CLASS, ATTR_CLASS)) { job.setCombinerKeyGroupingComparatorClass(CombinerGroupComparatorWrapper.class); } }
/** * Wraps the combiner group comparator defined in the job with this {@link CombinerGroupComparatorWrapper} if it is * defined. * @param job The MapReduce job */ static void wrap(Job job) { if (WrapperUtil.setIfDefined(job, MRJobConfig.COMBINER_GROUP_COMPARATOR_CLASS, ATTR_CLASS)) { job.setCombinerKeyGroupingComparatorClass(CombinerGroupComparatorWrapper.class); } }
job.setGroupingComparatorClass(GroupComparator.class); job.setCombinerKeyGroupingComparatorClass(GroupComparator.class); job.setCombinerClass(Combiner.class); job.getConfiguration().setInt("min.num.spills.for.combine", 0);
job.setCombinerKeyGroupingComparatorClass(BytesWritable.Comparator.class);
job.setCombinerKeyGroupingComparatorClass(BytesWritable.Comparator.class);
@Override public void initialize() throws Exception { String inputPath = Bytes.toString(table.read(Bytes.toBytes("inputPath"))); String outputPath = Bytes.toString(table.read(Bytes.toBytes("outputPath"))); Job hadoopJob = getContext().getHadoopJob(); WordCount.configureJob(hadoopJob, inputPath, outputPath); hadoopJob.setPartitionerClass(SimplePartitioner.class); hadoopJob.setNumReduceTasks(2); hadoopJob.setGroupingComparatorClass(SimpleComparator.class); hadoopJob.setSortComparatorClass(SimpleComparator.class); hadoopJob.setCombinerKeyGroupingComparatorClass(SimpleComparator.class); } }