private static void collectDynamicValuePredicates(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicValueDesc.class.getName() + "%"), new DynamicValuePredicateProc()); Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); egw.startWalking(startNodes, null); }
private void collectFileSinkUris(List<Node> topNodes, Set<URI> uris) { CollectFileSinkUrisNodeProcessor np = new CollectFileSinkUrisNodeProcessor(uris); Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); addCollectFileSinkUrisRules(opRules, np); Dispatcher disp = new DefaultRuleDispatcher(np, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); try { ogw.startWalking(topNodes, null); } catch (SemanticException e) { throw new RuntimeException(e); } }
@Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); Dispatcher disp = new AnnotateRunTimeStatsDispatcher(pctx, opRules); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getRootTasks()); ogw.startWalking(topNodes, null); return pctx; }
private static void collectDynamicValuePredicates(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicValueDesc.class.getName() + "%"), new DynamicValuePredicateProc()); Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); egw.startWalking(startNodes, null); } }
private boolean evaluateOperators(BaseWork work) throws SemanticException { // lets take a look at the operators. we're checking for user // code in those. we will not run that in llap. Dispatcher disp = new DefaultRuleDispatcher(null, rules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(work.getAllRootOperators()); HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>(); ogw.startWalking(topNodes, nodeOutput); for (Node n : nodeOutput.keySet()) { if (nodeOutput.get(n) != null) { if (!((Boolean)nodeOutput.get(n))) { return false; } } } return true; }
@Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { Dispatcher disp = new SkewJoinTaskDispatcher(pctx); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getRootTasks()); ogw.startWalking(topNodes, null); return pctx; }
public static Map<Node, Object> collectDynamicPruningConditions(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicListDesc.class.getName() + "%"), new DynamicPartitionPrunerProc()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return outputMap; } }
private boolean evaluateOperators(BaseWork work) throws SemanticException { // lets take a look at the operators. we're checking for user // code in those. we will not run that in llap. Dispatcher disp = new DefaultRuleDispatcher(null, rules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(work.getAllRootOperators()); HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>(); ogw.startWalking(topNodes, nodeOutput); for (Node n : nodeOutput.keySet()) { if (nodeOutput.get(n) != null) { if (!((Boolean)nodeOutput.get(n))) { return false; } } } return true; }
@Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); Dispatcher disp = new AnnotateRunTimeStatsDispatcher(pctx, opRules); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getRootTasks()); ogw.startWalking(topNodes, null); return pctx; }
@Override public ParseContext transform(ParseContext pCtx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining the // operator stack. The dispatcher generates the plan from the operator tree Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); String FS = FileSinkOperator.getOperatorName() + "%"; opRules.put(new RuleRegExp("Sorted Dynamic Partition", FS), getSortDynPartProc(pCtx)); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pCtx.getTopOps().values()); ogw.startWalking(topNodes, null); return pCtx; }
private void clearMapWorkVectorDescs(MapWork mapWork) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); ClearVectorDescsNodeProcessor vnp = new ClearVectorDescsNodeProcessor(); addMapWorkRules(opRules, vnp); Dispatcher disp = new DefaultRuleDispatcher(vnp, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(mapWork.getAliasToWork().values()); ogw.startWalking(topNodes, null); }
@Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { Dispatcher disp = new SkewJoinTaskDispatcher(pctx); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getRootTasks()); ogw.startWalking(topNodes, null); return pctx; }
private Map<Node, Object> collectDynamicPruningConditions(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicListDesc.class.getName() + "%"), new DynamicPartitionPrunerProc()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return outputMap; }
private void clearReduceWorkVectorDescs(ReduceWork reduceWork) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); ClearVectorDescsNodeProcessor vnp = new ClearVectorDescsNodeProcessor(); addReduceWorkRules(opRules, vnp); Dispatcher disp = new DefaultRuleDispatcher(vnp, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.add(reduceWork.getReducer()); ogw.startWalking(topNodes, null); } }
@Override public PhysicalContext resolve(PhysicalContext physicalContext) throws SemanticException { Dispatcher dispatcher = new IndexWhereTaskDispatcher(physicalContext); GraphWalker opGraphWalker = new DefaultGraphWalker(dispatcher); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(physicalContext.getRootTasks()); opGraphWalker.startWalking(topNodes, null); return physicalContext; } }
@Override public ParseContext transform(ParseContext pCtx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining the // operator stack. The dispatcher generates the plan from the operator tree Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); String FS = FileSinkOperator.getOperatorName() + "%"; opRules.put(new RuleRegExp("Sorted Dynamic Partition Time Granularity", FS), getSortDynPartProc(pCtx)); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pCtx.getTopOps().values()); ogw.startWalking(topNodes, null); return pCtx; }
Dispatcher disp = new DefaultRuleDispatcher(getDefaultExprProcessor(), exprRules, exprCtx); GraphWalker egw = new DefaultGraphWalker(disp); egw.startWalking(startNodes, outputMap); return (Dependency)outputMap.get(expr);
@Override public PhysicalContext resolve(PhysicalContext pctx) throws SemanticException { HiveConf conf = pctx.getConf(); LlapMode mode = LlapMode.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.LLAP_EXECUTION_MODE)); if (mode == none) { LOG.info("LLAP disabled."); return pctx; } Dispatcher disp = new LlapPreVectorizationPassDispatcher(pctx); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getRootTasks()); ogw.startWalking(topNodes, null); return pctx; }
@Override public ParseContext transform(ParseContext pctx) throws SemanticException { // 1. We apply the transformation Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", "(" + ReduceSinkOperator.getOperatorName() + "%)"), new ReduceSinkOutputOperatorAnnotator()); GraphWalker ogw = new DefaultGraphWalker(new DefaultRuleDispatcher(null, opRules, null)); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getTopOps().values()); ogw.startWalking(topNodes, null); return pctx; }
Dispatcher disp = new DefaultRuleDispatcher(new UniqueOpIdChecker(), new HashMap<>(), null); GraphWalker ogw = new DefaultGraphWalker(disp); ogw.startWalking(rootOps, nodeOutput);