private FetchData checkOperators(FetchData fetch, TableScanOperator ts, boolean bypassFilter) { if (ts.getChildOperators().size() != 1) { return null; } Operator<?> op = ts.getChildOperators().get(0); for (; ; op = op.getChildOperators().get(0)) { if (op instanceof SelectOperator) { if (!checkExpressions((SelectOperator) op)) { return null; } continue; } if (!(op instanceof LimitOperator || (op instanceof FilterOperator && bypassFilter))) { break; } if (op.getChildOperators() == null || op.getChildOperators().size() != 1) { return null; } if (op instanceof FilterOperator) { fetch.setFiltered(true); } } if (op instanceof FileSinkOperator) { fetch.fileSink = op; return fetch; } return null; }
private FetchData checkOperators(FetchData fetch, TableScanOperator ts, boolean bypassFilter) { if (ts.getChildOperators().size() != 1) { return null; } Operator<?> op = ts.getChildOperators().get(0); for (; ; op = op.getChildOperators().get(0)) { if (op instanceof SelectOperator) { if (!checkExpressions((SelectOperator) op)) { return null; } continue; } if (!(op instanceof LimitOperator || (op instanceof FilterOperator && bypassFilter))) { break; } if (op.getChildOperators() == null || op.getChildOperators().size() != 1) { return null; } if (op instanceof FilterOperator) { fetch.setFiltered(true); } } if (op instanceof FileSinkOperator) { fetch.fileSink = op; return fetch; } return null; }
Preconditions.checkArgument(tableScan.getChildOperators().size() == 1 && tableScan.getChildOperators().get(0) instanceof MapJoinOperator); HashTableDummyDesc desc = new HashTableDummyDesc(); HashTableDummyOperator dummyOp = (HashTableDummyOperator) OperatorFactory.get( tableScan.getCompilationOpContext(), desc); dummyOp.getConf().setTbl(tableScan.getTableDescSkewJoin()); MapJoinOperator mapJoinOp = (MapJoinOperator) tableScan.getChildOperators().get(0); mapJoinOp.replaceParent(tableScan, dummyOp); List<Operator<? extends OperatorDesc>> mapJoinChildren =
Preconditions.checkArgument(tableScan.getChildOperators().size() == 1 && tableScan.getChildOperators().get(0) instanceof MapJoinOperator); HashTableDummyDesc desc = new HashTableDummyDesc(); HashTableDummyOperator dummyOp = (HashTableDummyOperator) OperatorFactory.get( tableScan.getCompilationOpContext(), desc); dummyOp.getConf().setTbl(tableScan.getTableDesc()); MapJoinOperator mapJoinOp = (MapJoinOperator) tableScan.getChildOperators().get(0); mapJoinOp.replaceParent(tableScan, dummyOp); List<Operator<? extends OperatorDesc>> mapJoinChildren =
dataSize, maxDataSize); Operator<?> currentOp1 = retainableTsOp.getChildOperators().get(0); Operator<?> currentOp2 = discardableTsOp.getChildOperators().get(0);
List<Operator<? extends OperatorDesc>> children = tableScanOperator.getChildOperators(); while (children.size() > 0) { children = dosetVectorDesc(children);
private boolean checkThreshold(FetchData data, int limit, ParseContext pctx) throws Exception { if (limit > 0) { if (data.hasOnlyPruningFilter()) { /* partitioned table + query has only pruning filters */ return true; } else if (data.isPartitioned() == false && data.isFiltered() == false) { /* unpartitioned table + no filters */ return true; } /* fall through */ } long threshold = HiveConf.getLongVar(pctx.getConf(), HiveConf.ConfVars.HIVEFETCHTASKCONVERSIONTHRESHOLD); if (threshold < 0) { return true; } Operator child = data.scanOp.getChildOperators().get(0); if(child instanceof SelectOperator) { // select *, constant and casts can be allowed without a threshold check if (checkExpressions((SelectOperator)child)) { return true; } } return data.isDataLengthWithInThreshold(pctx, threshold); }
(FilterOperator)ts.getChildOperators().get(0), tsExpr); if (reductionFactor < semijoinReductionThreshold) {
DynamicValuePredicateContext filterDynamicValuePredicatesCollection = new DynamicValuePredicateContext(); FilterDesc filterDesc = ((FilterOperator)(ts.getChildOperators().get(0))).getConf(); collectDynamicValuePredicates(filterDesc.getPredicate(), filterDynamicValuePredicatesCollection);
private boolean checkThreshold(FetchData data, int limit, ParseContext pctx) throws Exception { if (limit > 0) { if (data.hasOnlyPruningFilter()) { /* partitioned table + query has only pruning filters */ return true; } else if (data.isPartitioned() == false && data.isFiltered() == false) { /* unpartitioned table + no filters */ return true; } /* fall through */ } long threshold = HiveConf.getLongVar(pctx.getConf(), HiveConf.ConfVars.HIVEFETCHTASKCONVERSIONTHRESHOLD); if (threshold < 0) { return true; } Operator child = data.scanOp.getChildOperators().get(0); if(child instanceof SelectOperator) { // select *, constant and casts can be allowed without a threshold check if (checkExpressions((SelectOperator)child)) { return true; } } return data.isDataLengthWithInThreshold(pctx, threshold); }
/** * Inserts a filter below the table scan operator. Construct the filter * from the filter expression provided. * @param tableScanOp the table scan operators * @param filterExpr the filter expression */ private void insertFilterOnTop( TableScanOperator tableScanOp, ExprNodeDesc filterExpr) { // Get the top operator and it's child, all operators have a single parent Operator<? extends OperatorDesc> currChild = tableScanOp.getChildOperators().get(0); // Create the filter Operator and update the parents and children appropriately tableScanOp.setChildOperators(null); currChild.setParentOperators(null); Operator<FilterDesc> filter = OperatorFactory.getAndMakeChild( new FilterDesc(filterExpr, false), new RowSchema(tableScanOp.getSchema().getSignature()), tableScanOp); OperatorFactory.makeChild(filter, currChild); }
for (Operator<?> op : ts.getChildOperators()) { if (!(op instanceof FilterOperator)) { continue;
try { ts.initialize(job, new ObjectInspector[]{fetcher.getOutputObjectInspector()}); OperatorUtils.setChildrenCollector(ts.getChildOperators(), sampler); while (fetcher.pushRow()) { } } finally {
/** * Inserts a filter below the table scan operator. Construct the filter * from the filter expression provided. * @param tableScanOp the table scan operators * @param filterExpr the filter expression */ private void insertFilterOnTop( TableScanOperator tableScanOp, ExprNodeDesc filterExpr) { // Get the top operator and it's child, all operators have a single parent Operator<? extends OperatorDesc> currChild = tableScanOp.getChildOperators().get(0); // Create the filter Operator and update the parents and children appropriately tableScanOp.setChildOperators(null); currChild.setParentOperators(null); Operator<FilterDesc> filter = OperatorFactory.getAndMakeChild( new FilterDesc(filterExpr, false), new RowSchema(tableScanOp.getSchema().getSignature()), tableScanOp); OperatorFactory.makeChild(filter, currChild); }
try { ts.initialize(job, new ObjectInspector[]{fetcher.getOutputObjectInspector()}); OperatorUtils.setChildrenCollector(ts.getChildOperators(), sampler); while (fetcher.pushRow()) { } } finally {
for (Operator<?> op : ts.getChildOperators()) { if (!(op instanceof FilterOperator)) { continue;
Preconditions.checkArgument(tableScan.getChildOperators() == null || tableScan.getChildOperators().size() == 0, "AssertionError: expected tableScan.getChildOperators() to be null, " + "or tableScan.getChildOperators().size() to be 0");
assert tableScan.getChildOperators() == null || tableScan.getChildOperators().size() == 0;
ExprNodeGenericFuncDesc tableScanExprNode = tsOp.getConf().getFilterExpr(); List<Operator<? extends OperatorDesc>> allChildren = Lists.newArrayList(tsOp.getChildOperators()); for (Operator<? extends OperatorDesc> op : allChildren) { if (op instanceof FilterOperator) {
ts = new TableScanOperator(cCtx); ts.setConf(new TableScanDesc(null)); ts.getChildOperators().add(rs); rs.getParentOperators().add(ts); rs.getChildOperators().add(fs);