} else if (work instanceof ReduceWork) { ReduceTran reduceTran = new ReduceTran(caching); HiveReduceFunction reduceFunc = new HiveReduceFunction(confBytes, sparkReporter); reduceTran.setReduceFunction(reduceFunc); return reduceTran;
@SuppressWarnings("unchecked") @Override public Iterator<Tuple2<HiveKey, BytesWritable>> call(Iterator<Tuple2<HiveKey, V>> it) throws Exception { initJobConf(); SparkReduceRecordHandler reducerRecordhandler = new SparkReduceRecordHandler(); HiveReduceFunctionResultList<V> result = new HiveReduceFunctionResultList<V>(it, reducerRecordhandler); reducerRecordhandler.init(jobConf, result, sparkReporter); return result; }
@SuppressWarnings("unchecked") @Override public Iterator<Tuple2<HiveKey, BytesWritable>> call(Iterator<Tuple2<HiveKey, V>> it) throws Exception { initJobConf(); SparkReduceRecordHandler reducerRecordhandler = new SparkReduceRecordHandler(); HiveReduceFunctionResultList<V> result = new HiveReduceFunctionResultList<V>(it, reducerRecordhandler); reducerRecordhandler.init(jobConf, result, sparkReporter); return result; }
} else if (work instanceof ReduceWork) { ReduceTran reduceTran = new ReduceTran(caching, work.getName(), work); HiveReduceFunction reduceFunc = new HiveReduceFunction(confBytes, sparkReporter); reduceTran.setReduceFunction(reduceFunc); return reduceTran;
@SuppressWarnings("unchecked") @Override public Iterable<Tuple2<HiveKey, BytesWritable>> call(Iterator<Tuple2<HiveKey, Iterable<BytesWritable>>> it) throws Exception { initJobConf(); SparkReduceRecordHandler reducerRecordhandler = new SparkReduceRecordHandler(); HiveReduceFunctionResultList result = new HiveReduceFunctionResultList(it, reducerRecordhandler); reducerRecordhandler.init(jobConf, result, sparkReporter); return result; }
private SparkTran generate(BaseWork work) throws Exception { initStatsPublisher(work); JobConf newJobConf = cloneJobConf(work); checkSpecs(work, newJobConf); byte[] confBytes = KryoSerializer.serializeJobConf(newJobConf); if (work instanceof MapWork) { MapTran mapTran = new MapTran(); HiveMapFunction mapFunc = new HiveMapFunction(confBytes, sparkReporter); mapTran.setMapFunction(mapFunc); return mapTran; } else if (work instanceof ReduceWork) { ReduceTran reduceTran = new ReduceTran(); HiveReduceFunction reduceFunc = new HiveReduceFunction(confBytes, sparkReporter); reduceTran.setReduceFunction(reduceFunc); return reduceTran; } else { throw new IllegalStateException("AssertionError: expected either MapWork or ReduceWork, " + "but found " + work.getClass().getName()); } }