@Override public ParseContext transform(ParseContext pctx) throws SemanticException { // 1. We apply the transformation Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", "(" + ReduceSinkOperator.getOperatorName() + "%)"), new ReduceSinkOutputOperatorAnnotator()); GraphWalker ogw = new DefaultGraphWalker(new DefaultRuleDispatcher(null, opRules, null)); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getTopOps().values()); ogw.startWalking(topNodes, null); return pctx; }
private static void collectDynamicValuePredicates(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicValueDesc.class.getName() + "%"), new DynamicValuePredicateProc()); Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); egw.startWalking(startNodes, null); }
private void collectFileSinkUris(List<Node> topNodes, Set<URI> uris) { CollectFileSinkUrisNodeProcessor np = new CollectFileSinkUrisNodeProcessor(uris); Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); addCollectFileSinkUrisRules(opRules, np); Dispatcher disp = new DefaultRuleDispatcher(np, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); try { ogw.startWalking(topNodes, null); } catch (SemanticException e) { throw new RuntimeException(e); } }
private ExprNodeDesc generateInClauses(ExprNodeDesc predicate) throws SemanticException { Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), new StructInExprProcessor()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, null); GraphWalker egw = new PreOrderOnceWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(predicate); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return (ExprNodeDesc) outputMap.get(predicate); } }
@Override public ParseContext transform(ParseContext pCtx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining the // operator stack. The dispatcher generates the plan from the operator tree Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); String FS = FileSinkOperator.getOperatorName() + "%"; opRules.put(new RuleRegExp("Sorted Dynamic Partition", FS), getSortDynPartProc(pCtx)); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pCtx.getTopOps().values()); ogw.startWalking(topNodes, null); return pCtx; }
private static void collectDynamicValuePredicates(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicValueDesc.class.getName() + "%"), new DynamicValuePredicateProc()); Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); egw.startWalking(startNodes, null); } }
private ExprNodeDesc generateInClause(ExprNodeDesc predicate) throws SemanticException { Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), new OrExprProcessor()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, null); GraphWalker egw = new PreOrderOnceWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(predicate); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return (ExprNodeDesc) outputMap.get(predicate); } }
private boolean evaluateOperators(BaseWork work) throws SemanticException { // lets take a look at the operators. we're checking for user // code in those. we will not run that in llap. Dispatcher disp = new DefaultRuleDispatcher(null, rules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(work.getAllRootOperators()); HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>(); ogw.startWalking(topNodes, nodeOutput); for (Node n : nodeOutput.keySet()) { if (nodeOutput.get(n) != null) { if (!((Boolean)nodeOutput.get(n))) { return false; } } } return true; }
@Override public ParseContext transform(ParseContext pCtx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining the // operator stack. The dispatcher generates the plan from the operator tree Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); String FS = FileSinkOperator.getOperatorName() + "%"; opRules.put(new RuleRegExp("Sorted Dynamic Partition Time Granularity", FS), getSortDynPartProc(pCtx)); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pCtx.getTopOps().values()); ogw.startWalking(topNodes, null); return pCtx; }
public static Map<Node, Object> collectDynamicPruningConditions(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicListDesc.class.getName() + "%"), new DynamicPartitionPrunerProc()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return outputMap; } }
private ExprNodeDesc generateInClauses(ExprNodeDesc predicate) throws SemanticException { Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), new StructInExprProcessor()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, null); GraphWalker egw = new PreOrderOnceWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(predicate); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return (ExprNodeDesc) outputMap.get(predicate); } }
private boolean evaluateOperators(BaseWork work) throws SemanticException { // lets take a look at the operators. we're checking for user // code in those. we will not run that in llap. Dispatcher disp = new DefaultRuleDispatcher(null, rules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(work.getAllRootOperators()); HashMap<Node, Object> nodeOutput = new HashMap<Node, Object>(); ogw.startWalking(topNodes, nodeOutput); for (Node n : nodeOutput.keySet()) { if (nodeOutput.get(n) != null) { if (!((Boolean)nodeOutput.get(n))) { return false; } } } return true; }
@Override public ParseContext transform(ParseContext pctx) throws SemanticException { // 1. Trigger transformation Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), new FilterTransformer()); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null); GraphWalker ogw = new ForwardWalker(disp); List<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getTopOps().values()); ogw.startWalking(topNodes, null); return pctx; }
private Map<Node, Object> collectDynamicPruningConditions(ExprNodeDesc pred, NodeProcessorCtx ctx) throws SemanticException { // create a walker which walks the tree in a DFS manner while maintaining // the operator stack. The dispatcher // generates the plan from the operator tree Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new RuleRegExp("R1", ExprNodeDynamicListDesc.class.getName() + "%"), new DynamicPartitionPrunerProc()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, ctx); GraphWalker egw = new DefaultGraphWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(pred); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return outputMap; }
private ExprNodeDesc generateInClause(ExprNodeDesc predicate) throws SemanticException { Map<Rule, NodeProcessor> exprRules = new LinkedHashMap<Rule, NodeProcessor>(); exprRules.put(new TypeRule(ExprNodeGenericFuncDesc.class), new OrExprProcessor()); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher(null, exprRules, null); GraphWalker egw = new PreOrderOnceWalker(disp); List<Node> startNodes = new ArrayList<Node>(); startNodes.add(predicate); HashMap<Node, Object> outputMap = new HashMap<Node, Object>(); egw.startWalking(startNodes, outputMap); return (ExprNodeDesc) outputMap.get(predicate); } }
private void clearMapWorkVectorDescs(MapWork mapWork) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); ClearVectorDescsNodeProcessor vnp = new ClearVectorDescsNodeProcessor(); addMapWorkRules(opRules, vnp); Dispatcher disp = new DefaultRuleDispatcher(vnp, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(mapWork.getAliasToWork().values()); ogw.startWalking(topNodes, null); }
@Override public ParseContext transform(ParseContext pctx) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", "TS%.*RS%JOIN%"), getSkewJoinProc(pctx)); SkewJoinOptProcCtx skewJoinOptProcCtx = new SkewJoinOptProcCtx(pctx); // The dispatcher fires the processor corresponding to the closest matching // rule and passes the context along Dispatcher disp = new DefaultRuleDispatcher( null, opRules, skewJoinOptProcCtx); GraphWalker ogw = new DefaultGraphWalker(disp); // Create a list of topop nodes List<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getTopOps().values()); ogw.startWalking(topNodes, null); return pctx; }
List<String> analyze(BaseWork work) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", MapJoinOperator.getOperatorName() + "%"), this); Dispatcher disp = new DefaultRuleDispatcher(new NoopProcessor(), opRules, this); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(work.getAllRootOperators()); ogw.startWalking(topNodes, null); return warnings; }
private void clearReduceWorkVectorDescs(ReduceWork reduceWork) throws SemanticException { Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); ClearVectorDescsNodeProcessor vnp = new ClearVectorDescsNodeProcessor(); addReduceWorkRules(opRules, vnp); Dispatcher disp = new DefaultRuleDispatcher(vnp, opRules, null); GraphWalker ogw = new DefaultGraphWalker(disp); ArrayList<Node> topNodes = new ArrayList<Node>(); topNodes.add(reduceWork.getReducer()); ogw.startWalking(topNodes, null); } }
@Override public ParseContext transform(ParseContext pctx) throws SemanticException { // 1. Trigger transformation Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, NodeProcessor>(); opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"), new StructInTransformer()); Dispatcher disp = new DefaultRuleDispatcher(null, opRules, null); GraphWalker ogw = new ForwardWalker(disp); List<Node> topNodes = new ArrayList<Node>(); topNodes.addAll(pctx.getTopOps().values()); ogw.startWalking(topNodes, null); return pctx; }