private void replaceExprNode(DynamicListContext ctx, FilterDesc desc, ExprNodeDesc node) { if (ctx.grandParent == null) { desc.setPredicate(node); } else { int i = ctx.grandParent.getChildren().indexOf(ctx.parent); ctx.grandParent.getChildren().remove(i); ctx.grandParent.getChildren().add(i, node); } }
private void replaceExprNode(DynamicListContext ctx, FilterDesc desc, ExprNodeDesc node) { if (ctx.grandParent == null) { desc.setPredicate(node); } else { int i = ctx.grandParent.getChildren().indexOf(ctx.parent); ctx.grandParent.getChildren().remove(i); ctx.grandParent.getChildren().add(i, node); } }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; ExprNodeDesc predicate = filterOp.getConf().getPredicate(); // Generate the list bucketing pruning predicate ExprNodeDesc newPredicate = generateInClause(predicate); if (newPredicate != null) { // Replace filter in current FIL with new FIL if (LOG.isDebugEnabled()) { LOG.debug("Generated new predicate with IN clause: " + newPredicate); } filterOp.getConf().setPredicate(newPredicate); } return null; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; ExprNodeDesc predicate = filterOp.getConf().getPredicate(); // Generate the list bucketing pruning predicate ExprNodeDesc newPredicate = generateInClause(predicate); if (newPredicate != null) { // Replace filter in current FIL with new FIL if (LOG.isDebugEnabled()) { LOG.debug("Generated new predicate with IN clause: " + newPredicate); } filterOp.getConf().setPredicate(newPredicate); } return null; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; ExprNodeDesc predicate = filterOp.getConf().getPredicate(); // Generate the list bucketing pruning predicate as 2 separate IN clauses // containing the partitioning and non-partitioning columns. ExprNodeDesc newPredicate = generateInClauses(predicate); if (newPredicate != null) { // Replace filter in current FIL with new FIL if (LOG.isDebugEnabled()) { LOG.debug("Generated new predicate with IN clause: " + newPredicate); } final List<ExprNodeDesc> subExpr = new ArrayList<ExprNodeDesc>(2); subExpr.add(predicate); subExpr.add(newPredicate); ExprNodeGenericFuncDesc newFilterPredicate = new ExprNodeGenericFuncDesc( TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getFunctionInfo(AND_UDF).getGenericUDF(), subExpr); filterOp.getConf().setPredicate(newFilterPredicate); } return null; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; ExprNodeDesc predicate = filterOp.getConf().getPredicate(); // Generate the list bucketing pruning predicate as 2 separate IN clauses // containing the partitioning and non-partitioning columns. ExprNodeDesc newPredicate = generateInClauses(predicate); if (newPredicate != null) { // Replace filter in current FIL with new FIL if (LOG.isDebugEnabled()) { LOG.debug("Generated new predicate with IN clause: " + newPredicate); } final List<ExprNodeDesc> subExpr = new ArrayList<ExprNodeDesc>(2); subExpr.add(predicate); subExpr.add(newPredicate); ExprNodeGenericFuncDesc newFilterPredicate = new ExprNodeGenericFuncDesc( TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getFunctionInfo(AND_UDF).getGenericUDF(), subExpr); filterOp.getConf().setPredicate(newFilterPredicate); } return null; }
op.getConf().setPredicate(newCondn); foldOperator(op, cppCtx); return null;
op.getConf().setPredicate(newCondn); foldOperator(op, cppCtx); return null;
if (nodeParent == null) { filterDesc.setPredicate(constNode); } else { int i = nodeParent.getChildren().indexOf(nodeToRemove);
exprs = ExprNodeDescUtils.split(((FilterOperator)parent).getConf().getPredicate(), exprs); ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs); ((FilterOperator)parent).getConf().setPredicate(merged); } else { ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
if (nodeParent == null) { filterDesc.setPredicate(constNode); } else { int i = nodeParent.getChildren().indexOf(nodeToRemove);
preds.add(f.getConf().getPredicate()); preds.add(filterPred); f.getConf().setPredicate(ExprNodeDescUtils.mergePredicates(preds));
exprs = ExprNodeDescUtils.split(((FilterOperator)parent).getConf().getPredicate(), exprs); ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs); ((FilterOperator)parent).getConf().setPredicate(merged); } else { ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
desc.setPredicate(constNode); } else { int i = parent.getChildren().indexOf(child);
desc.setPredicate(constNode); } else { int i = parent.getChildren().indexOf(child);
private VectorFilterOperator getAVectorFilterOperator() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Long.class, "col1", "table", false); List<String> columns = new ArrayList<String>(); columns.add("col1"); FilterDesc fdesc = new FilterDesc(); fdesc.setPredicate(col1Expr); VectorFilterDesc vectorDesc = new VectorFilterDesc(); Operator<? extends OperatorDesc> filterOp = OperatorFactory.get(new CompilationOpContext(), fdesc); VectorizationContext vc = new VectorizationContext("name", columns); return (VectorFilterOperator) Vectorizer.vectorizeFilterOperator(filterOp, vc, vectorDesc); }
filterDesc.setPredicate(constNode); } else { int i = ctx.grandParent.getChildren().indexOf(ctx.parent);
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator cFIL = (FilterOperator) nd; FilterOperator pFIL = (FilterOperator) stack.get(stack.size() - 2); // Sampling predicates can be merged with predicates from children because PPD/PPR is // already applied. But to clarify the intention of sampling, just skips merging. if (pFIL.getConf().getIsSamplingPred()) { return null; } List<ExprNodeDesc> splits = new ArrayList<ExprNodeDesc>(); ExprNodeDescUtils.split(cFIL.getConf().getPredicate(), splits); ExprNodeDescUtils.split(pFIL.getConf().getPredicate(), splits); pFIL.getConf().setPredicate(ExprNodeDescUtils.mergePredicates(splits)); // if any of filter is sorted filter, it's sorted filter boolean sortedFilter = pFIL.getConf().isSortedFilter() || cFIL.getConf().isSortedFilter(); pFIL.getConf().setSortedFilter(sortedFilter); pFIL.removeChildAndAdoptItsChildren(cFIL); cFIL.setParentOperators(null); cFIL.setChildOperators(null); cFIL = null; return null; } }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator cFIL = (FilterOperator) nd; FilterOperator pFIL = (FilterOperator) stack.get(stack.size() - 2); // Sampling predicates can be merged with predicates from children because PPD/PPR is // already applied. But to clarify the intention of sampling, just skips merging. if (pFIL.getConf().getIsSamplingPred()) { return null; } List<ExprNodeDesc> splits = new ArrayList<ExprNodeDesc>(); ExprNodeDescUtils.split(cFIL.getConf().getPredicate(), splits); ExprNodeDescUtils.split(pFIL.getConf().getPredicate(), splits); pFIL.getConf().setPredicate(ExprNodeDescUtils.mergePredicates(splits)); // if any of filter is sorted filter, it's sorted filter boolean sortedFilter = pFIL.getConf().isSortedFilter() || cFIL.getConf().isSortedFilter(); pFIL.getConf().setSortedFilter(sortedFilter); pFIL.removeChildAndAdoptItsChildren(cFIL); cFIL.setParentOperators(null); cFIL.setChildOperators(null); cFIL = null; return null; } }
new GenericUDFOPAnd(), Arrays.<ExprNodeDesc>asList(tableScanExprNode.clone(), filterExprNode)); filterOp.getConf().setPredicate(newPred); } else { Operator<FilterDesc> newOp = OperatorFactory.get(tsOp.getCompilationOpContext(),