@Override public Object clone() { FilterDesc filterDesc = new FilterDesc(getPredicate().clone(), getIsSamplingPred()); if (getIsSamplingPred()) { filterDesc.setSampleDescr(getSampleDescr()); } filterDesc.setSortedFilter(isSortedFilter()); return filterDesc; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filterOp = (FilterOperator) nd; ExprNodeDesc predicate = filterOp.getConf().getPredicate(); // Generate the list bucketing pruning predicate ExprNodeDesc newPredicate = generateInClause(predicate); if (newPredicate != null) { // Replace filter in current FIL with new FIL if (LOG.isDebugEnabled()) { LOG.debug("Generated new predicate with IN clause: " + newPredicate); } filterOp.getConf().setPredicate(newPredicate); } return null; }
@Override public boolean isSame(OperatorDesc other) { if (getClass().getName().equals(other.getClass().getName())) { FilterDesc otherDesc = (FilterDesc) other; return Objects.equals(getPredicateString(), otherDesc.getPredicateString()) && Objects.equals(getSampleDescExpr(), otherDesc.getSampleDescExpr()) && getIsSamplingPred() == otherDesc.getIsSamplingPred(); } return false; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator cFIL = (FilterOperator) nd; FilterOperator pFIL = (FilterOperator) stack.get(stack.size() - 2); // Sampling predicates can be merged with predicates from children because PPD/PPR is // already applied. But to clarify the intention of sampling, just skips merging. if (pFIL.getConf().getIsSamplingPred()) { return null; } List<ExprNodeDesc> splits = new ArrayList<ExprNodeDesc>(); ExprNodeDescUtils.split(cFIL.getConf().getPredicate(), splits); ExprNodeDescUtils.split(pFIL.getConf().getPredicate(), splits); pFIL.getConf().setPredicate(ExprNodeDescUtils.mergePredicates(splits)); // if any of filter is sorted filter, it's sorted filter boolean sortedFilter = pFIL.getConf().isSortedFilter() || cFIL.getConf().isSortedFilter(); pFIL.getConf().setSortedFilter(sortedFilter); pFIL.removeChildAndAdoptItsChildren(cFIL); cFIL.setParentOperators(null); cFIL.setChildOperators(null); cFIL = null; return null; } }
if (fop2 != null && !fop2.getConf().getIsSamplingPred()) { return null; if (fop.getConf().getIsSamplingPred()) { return null; ExprNodeDesc predicate = fop.getConf().getPredicate(); String alias = top.getConf().getAlias(); owc.getOpToRemove().add(new PcrOpWalkerCtx.OpToDeleteInfo(pop, fop)); } else { fop.getConf().setPredicate(wrapper.outExpr); fop.getConf().setPredicate(wrapper.outExpr); } else { LOG.warn("Filter passes no row"); fop.getConf().setPredicate(wrapper.outExpr);
ExprNodeDescUtils.getExprNodeColumnDesc(Arrays.asList(((FilterDesc)input.getConf()).getPredicate()), hashes); FilterOperator f = (FilterOperator) input; List<ExprNodeDesc> preds = new ArrayList<ExprNodeDesc>(); preds.add(f.getConf().getPredicate()); preds.add(filterPred); f.getConf().setPredicate(ExprNodeDescUtils.mergePredicates(preds)); FilterDesc filterDesc = new FilterDesc(filterPred, false); filterDesc.setGenerated(true); Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(filterDesc, new RowSchema(inputRR.getColumnInfos()), input), inputRR);
private Operator<FilterDesc> createFilter(Operator<?> target, Operator<?> parent, RowSchema parentRS, ExprNodeDesc filterExpr) { Operator<FilterDesc> filter = OperatorFactory.get(parent.getCompilationOpContext(), new FilterDesc(filterExpr, false), new RowSchema(parentRS.getSignature())); filter.getParentOperators().add(parent); filter.getChildOperators().add(target); parent.replaceChild(target, filter); target.replaceParent(parent, filter); return filter; }
if (op instanceof FilterOperator) { FilterOperator filterOp = (FilterOperator) op; ExprNodeDesc filterExprNode = filterOp.getConf().getPredicate(); if (tableScanExprNode.isSame(filterExprNode)) { new GenericUDFOPAnd(), Arrays.<ExprNodeDesc>asList(tableScanExprNode.clone(), filterExprNode)); filterOp.getConf().setPredicate(newPred); } else { Operator<FilterDesc> newOp = OperatorFactory.get(tsOp.getCompilationOpContext(), new FilterDesc(tableScanExprNode.clone(), false), new RowSchema(tsOp.getSchema().getSignature())); tsOp.replaceChild(op, newOp);
@Override public Object process(Node n, Stack<Node> s, NodeProcessorCtx c, Object... os) { ExprNodeDesc expr = ((FilterOperator)n).getConf().getPredicate(); Boolean retval = new Boolean(checkExpression(expr)); if (!retval) { LOG.info("Cannot run filter operator [" + n + "] in llap mode"); } return new Boolean(retval); } });
if (ewi == null && !((FilterOperator)op).getConf().getIsSamplingPred() && (!onlySyntheticJoinPredicate || ((FilterOperator)op).getConf().isSyntheticJoinPredicate())) { ExprNodeDesc predicate = (((FilterOperator) nd).getConf()).getPredicate(); ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, predicate); if (!ewi.isDeterministic()) {
OpParseContext inputCtx = opParseCtx.get(input); RowResolver inputRR = inputCtx.getRowResolver(); FilterDesc orFilterDesc = new FilterDesc(previous, false); orFilterDesc.setGenerated(true);
FilterDesc op2Conf = ((FilterOperator) currentOp2).getConf(); if (op1Conf.getIsSamplingPred() == op2Conf.getIsSamplingPred() && StringUtils.equals(op1Conf.getSampleDescExpr(), op2Conf.getSampleDescExpr())) { Multiset<String> conjsOp1String = extractConjsIgnoringDPPPreds(op1Conf.getPredicate()); Multiset<String> conjsOp2String = extractConjsIgnoringDPPPreds(op2Conf.getPredicate()); if (conjsOp1String.equals(conjsOp2String)) { equalFilters = true;
private VectorFilterOperator getAVectorFilterOperator() throws HiveException { ExprNodeColumnDesc col1Expr = new ExprNodeColumnDesc(Long.class, "col1", "table", false); List<String> columns = new ArrayList<String>(); columns.add("col1"); FilterDesc fdesc = new FilterDesc(); fdesc.setPredicate(col1Expr); VectorFilterDesc vectorDesc = new VectorFilterDesc(); Operator<? extends OperatorDesc> filterOp = OperatorFactory.get(new CompilationOpContext(), fdesc); VectorizationContext vc = new VectorizationContext("name", columns); return (VectorFilterOperator) Vectorizer.vectorizeFilterOperator(filterOp, vc, vectorDesc); }
LOG.debug("Filter: " + desc.getPredicateString()); LOG.debug("TableScan: " + ts); walkExprTree(desc.getPredicate(), removerContext); desc.setPredicate(constNode); } else { int i = ctx.grandParent.getChildren().indexOf(ctx.parent);
if (fop2 != null && !fop2.getConf().getIsSamplingPred()) { return null; if (fop.getConf().getIsSamplingPred()) { return null; ExprNodeDesc predicate = fop.getConf().getPredicate(); String alias = top.getConf().getAlias();
private static Operator<FilterDesc> createFilter(Operator<?> target, Operator<?> parent, RowSchema parentRS, ExprNodeDesc filterExpr) { FilterDesc filterDesc = new FilterDesc(filterExpr, false); filterDesc.setSyntheticJoinPredicate(true); Operator<FilterDesc> filter = OperatorFactory.get(parent.getCompilationOpContext(), filterDesc, new RowSchema(parentRS.getSignature())); filter.getParentOperators().add(parent); filter.getChildOperators().add(target); parent.replaceChild(target, filter); target.replaceParent(parent, filter); return filter; }
/** * Does a depth first search on the operator tree looking for a filter operator whose predicate * has one child which is a column which is not in the partition * @param operators * @return whether or not it has found its target */ private boolean findIndexColumnFilter( Collection<Operator<? extends OperatorDesc>> operators) { for (Operator<? extends OperatorDesc> op : operators) { if (op instanceof FilterOperator && ((FilterOperator)op).getConf().getPredicate().getChildren() != null) { // Is this the target if (findIndexColumnExprNodeDesc(((FilterOperator)op).getConf().getPredicate())) { ((FilterOperator)op).getConf().setSortedFilter(true); return true; } } // If the target has been found, no need to continue if (findIndexColumnFilter(op.getChildOperators())) { return true; } } return false; }
LOG.debug("Filter: " + desc.getPredicateString()); LOG.debug("TableScan: " + ts); GenTezUtils.collectDynamicPruningConditions(desc.getPredicate(), removerContext); table.getStorageHandler().addDynamicSplitPruningEdge(desc.getPredicate())){ generateEventOperatorPlan(ctx, parseContext, ts, column, table.getCols().stream().filter(e -> e.getName().equals(column)).
if (fop2 != null && !fop2.getConf().getIsSamplingPred()) { return null; if (fop.getConf().getIsSamplingPred()) { return null;
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { FilterOperator filOp = (FilterOperator) nd; FilterDesc filOpDesc = filOp.getConf(); SampleDesc sampleDescr = filOpDesc.getSampleDescr(); if ((sampleDescr == null) || !sampleDescr.getInputPruning()) { return null; } assert (stack.size() == 3 && stack.get(1) instanceof FilterOperator) || stack.size() == 2; TableScanOperator tsOp = (TableScanOperator) stack.get(0); ((SamplePrunerCtx) procCtx).getOpToSamplePruner().put(tsOp, sampleDescr); return null; } }