/** * Returns the parent operator in the walk path to the current operator. * * @param stack The stack encoding the path. * * @return Operator The parent operator in the current path. */ @SuppressWarnings("unchecked") protected static Operator<? extends OperatorDesc> getParent(Stack<Node> stack) { return (Operator<? extends OperatorDesc>)Utils.getNthAncestor(stack, 1); }
/** * Dispatch the current operator. * * @param nd * node being walked * @param ndStack * stack of nodes encountered * @throws SemanticException */ public void dispatch(Node nd, Stack<Node> ndStack) throws SemanticException { dispatchAndReturn(nd, ndStack); }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { for (NodeProcessor proc: procs) { proc.process(nd, stack, procCtx, nodeOutputs); } return null; } }
private static void collectDynamicValuePredicates(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicValueDesc.class.getName() + "%"), new DynamicValuePredicateProc()); Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); egw.startWalking(startNodes, null); }
@Override public ParseContext transform(ParseContext pctx) throws SemanticException { // 1. Trigger transformation Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), new FilterTransformer()); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null); GraphWalker ogw = new ForwardWalker(disp); List<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getTopOps().values()); ogw.startWalking(topNodes, null); return pctx; }
private void collectFileSinkUris(List<Node> topNodes, Set<URI> uris) { CollectFileSinkUrisNodeProcessor np = new CollectFileSinkUrisNodeProcessor(uris); Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); addCollectFileSinkUrisRules(opRules, np); Dispatcher disp = new DefaultRuleDispatcher(np, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); try { ogw.startWalking(topNodes, null); } catch (SemanticException e) { throw new RuntimeException(e); } }
private ExprNodeDesc generateInClauses(ExprNodeDesc predicate) throws SemanticException { Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), new StructInExprProcessor()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, null); GraphWalker egw = new PreOrderOnceWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(predicate); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return (ExprNodeDesc) outputMap.get(predicate); } }
@Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { Dispatcher disp = new SkewJoinTaskDispatcher(pctx); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getRootTasks()); ogw.startWalking(topNodes, null); return pctx; }
/** * Returns dispatch result */ public <T> T dispatchAndReturn(Node nd, Stack<Node> ndStack) throws SemanticException { Object[] nodeOutputs = null; if (nd.getChildren() != null) { nodeOutputs = new Object[nd.getChildren().size()]; int i = 0; for (Node child : nd.getChildren()) { nodeOutputs[i++] = retMap.get(child); } } Object retVal = dispatcher.dispatch(nd, ndStack, nodeOutputs); retMap.put(nd, retVal); return (T) retVal; }
/** * Constructor. * * @param disp * dispatcher to call for each op encountered */ public TaskGraphWalker(Dispatcher disp) { dispatcher = disp; opStack = new Stack<Node>(); walkerCtx = new TaskGraphWalkerContext(retMap); }
List<String> analyze(BaseWork work) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", MapJoinOperator.getOperatorName() + "%"), this); Dispatcher disp = new DefaultRuleDispatcher(new NoopProcessor(), opRules, this); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(work.getAllRootOperators()); ogw.startWalking(topNodes, null); return warnings; }
@Override public ParseContext transform(ParseContext pctx) throws SemanticException { // 1. Trigger transformation Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), new StructInTransformer()); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null); GraphWalker ogw = new ForwardWalker(disp); List<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getTopOps().values()); ogw.startWalking(topNodes, null); return pctx; }
/** * Returns the parent operator in the walk path to the current operator. * * @param stack The stack encoding the path. * * @return Operator The parent operator in the current path. */ @SuppressWarnings("unchecked") protected static Operator<? extends OperatorDesc> getParent(Stack<Node> stack) { return (Operator<? extends OperatorDesc>)Utils.getNthAncestor(stack, 1); }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { for (NodeProcessor proc: procs) { proc.process(nd, stack, procCtx, nodeOutputs); } return null; } }
/** * Dispatch the current operator. * * @param nd * node being walked * @param ndStack * stack of nodes encountered * @throws SemanticException */ public void dispatch(Node nd, Stack<Node> ndStack) throws SemanticException { dispatchAndReturn(nd, ndStack); }
Map<Integer, Info> analyze(BaseWork work) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", ReduceSinkOperator.getOperatorName() + "%"), this); Dispatcher disp = new DefaultRuleDispatcher(new NoopProcessor(), opRules, this); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(work.getAllRootOperators()); ogw.startWalking(topNodes, null); return reduceSinkInfo; }
/** * Returns the parent operator in the walk path to the current operator. * * @param stack The stack encoding the path. * * @return Operator The parent operator in the current path. */ @SuppressWarnings("unchecked") protected static Operator<? extends OperatorDesc> getParent(Stack<Node> stack) { return (Operator<? extends OperatorDesc>)Utils.getNthAncestor(stack, 1); }
private boolean hasGroupBy(Operator<? extends OperatorDesc> mapjoinOp, GenSparkProcContext context) throws SemanticException { List<Operator<? extends OperatorDesc>> childOps = mapjoinOp.getChildOperators(); Map<Rule, NodeProcessor> rules = new LinkedHashMap<Rule, NodeProcessor>(); SparkMapJoinFollowedByGroupByProcessor processor = new SparkMapJoinFollowedByGroupByProcessor(); rules.put(new RuleRegExp("GBY", GroupByOperator.getOperatorName() + "%"), processor); Dispatcher disp = new DefaultRuleDispatcher(null, rules, context); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(childOps); ogw.startWalking(topNodes, null); return processor.getHasGroupBy(); }
/** * Returns the parent operator in the walk path to the current operator. * * @param stack The stack encoding the path. * * @return Operator The parent operator in the current path. */ @SuppressWarnings("unchecked") protected static Operator<? extends OperatorDesc> getParent(Stack<Node> stack) { return (Operator<? extends OperatorDesc>)Utils.getNthAncestor(stack, 1); }
List<String> analyze(BaseWork work) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", MapJoinOperator.getOperatorName() + "%"), this); Dispatcher disp = new DefaultRuleDispatcher(new NoopProcessor(), opRules, this); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(work.getAllRootOperators()); ogw.startWalking(topNodes, null); return warnings; }