@Override public boolean hasReduce() { MapredWork w = getWork(); return w.getReducer() != null; }
@SuppressWarnings("nls") public String isInvalid() { if ((getNumReduceTasks() >= 1) && (getReducer() == null)) { return "Reducers > 0 but no reduce operator"; } if ((getNumReduceTasks() == 0) && (getReducer() != null)) { return "Reducers == 0 but reduce operator specified"; } return null; }
private JoinOperator getJoinOp(MapRedTask task) throws SemanticException { if (task.getWork() == null) { return null; } Operator<? extends Serializable> reducerOp = task.getWork().getReducer(); if (reducerOp instanceof JoinOperator) { return (JoinOperator) reducerOp; } else { return null; } } }
/** * Update counters relevant to this task. */ private void updateCounters(Counters ctrs, RunningJob rj) throws IOException { mapProgress = Math.round(rj.mapProgress() * 100); reduceProgress = Math.round(rj.reduceProgress() * 100); taskCounters.put("CNTR_NAME_" + getId() + "_MAP_PROGRESS", Long.valueOf(mapProgress)); taskCounters.put("CNTR_NAME_" + getId() + "_REDUCE_PROGRESS", Long.valueOf(reduceProgress)); if (ctrs == null) { // hadoop might return null if it cannot locate the job. // we may still be able to retrieve the job status - so ignore return; } for (Operator<? extends Serializable> op : work.getAliasToWork().values()) { op.updateCounters(ctrs); } if (work.getReducer() != null) { work.getReducer().updateCounters(ctrs); } }
/** * Make a best guess at trying to find the number of reducers */ private static int getNumberOfReducers(MapredWork mrwork, HiveConf conf) { if (mrwork.getReducer() == null) { return 0; } if (mrwork.getNumReduceTasks() >= 0) { return mrwork.getNumReduceTasks(); } return conf.getIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS); } }
if (work.getReducer() != null) { if (work.getReducer().checkFatalErrors(ctrs, errMsg)) { return true;
@Override public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs) throws SemanticException { Task<? extends Serializable> task = (Task<? extends Serializable>) nd; if (!task.isMapRedTask() || task instanceof ConditionalTask || ((MapredWork) task.getWork()).getReducer() == null) { return null; } SkewJoinProcCtx skewJoinProcContext = new SkewJoinProcCtx(task, physicalContext.getParseContext()); Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", "JOIN%"), SkewJoinProcFactory .getJoinProc()); // The dispatcher fires the processor corresponding to the closest // matching rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(SkewJoinProcFactory .getDefaultProc(), opRules, skewJoinProcContext); GraphWalker ogw = new DefaultGraphWalker(disp); // iterator the reducer operator tree ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.add(((MapredWork) task.getWork()).getReducer()); ogw.startWalking(topNodes, null); return null; }
reducer = gWork.getReducer();
Collection<Operator<? extends Serializable>> reducerTopOps = new ArrayList<Operator<? extends Serializable>>(); reducerTopOps.add(mrTask.getWork().getReducer()); extractOperatorCounters(reducerTopOps, task.getId() + "_REDUCE"); if (mrTask.reduceStarted()) {
if (plan.getReducer() == null) { GenMapRedUtils.initMapJoinPlan(op, ctx, true, false, true, -1); assert plan.getReducer() != reducer; GenMapRedUtils.joinPlan(op, currTask, opMapTask, ctx, -1, false, true, false);
.getReducer(); if (reducer != null) { LOG.info("Generating counters for operator " + reducer);
if (currPlan.getReducer() == null) { GenMapRedUtils.initPlan(op, ctx); } else {
if (plan.getReducer() == null) { GenMapRedUtils.initUnionPlan(op, ctx); GenMapRedUtils.splitPlan(op, ctx); } else if (plan.getReducer() == reducer) {
assert currPlan.getReducer() == null; GenMapRedUtils.initMapJoinPlan(mapJoin, ctx, false, false, false, pos); } else {
assert cplan.getReducer() == null; opTaskMap.put(mapJoin, currTask); opProcCtx.setCurrMapJoinOp(null);
assert currPlan.getReducer() == null; GenMapRedUtils.initMapJoinPlan(mapJoin, ctx, true, false, false, pos); } else {
boolean mergeMapOnly = hconf.getBoolVar(HiveConf.ConfVars.HIVEMERGEMAPFILES) && currWork.getReducer() == null; boolean mergeMapRed = hconf.getBoolVar(HiveConf.ConfVars.HIVEMERGEMAPREDFILES) && currWork.getReducer() != null; if (mergeMapOnly || mergeMapRed) { chDir = true;
Collection<Operator<? extends Serializable>> reducerTopOps = new ArrayList<Operator<? extends Serializable>>(); reducerTopOps.add(mrTask.getWork().getReducer()); populateOperatorGraph(reduceTask, reducerTopOps);
assert currPlan.getReducer() == null; ctx.setCurrMapJoinOp(mapJoin); GenMapRedUtils.initMapJoinPlan(mapJoin, ctx, true, true, false, pos);
if (work.getReducer() == null) { console .printInfo("Number of reduce tasks is set to 0 since there's no reduce operator");