@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { if(!(((LimitOperator)nd).getConf().getLimit() == 0)) { return null; } LOG.info("Found Limit 0 TableScan. " + nd); ((WalkerCtx)procCtx).convertMetadataOnly(); return null; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { LimitOperator limitOp = (LimitOperator)nd; if (!(limitOp.getConf().getLimit() == 0)) { return null; } Set<TableScanOperator> tsOps = ((WalkerCtx) procCtx).getMayBeMetadataOnlyTableScans(); if (tsOps != null) { for (Iterator<TableScanOperator> tsOp = tsOps.iterator(); tsOp.hasNext();) { if (!isNullOpPresentInAllBranches(tsOp.next(), limitOp)) { tsOp.remove(); } } } LOG.debug("Found Limit 0 TableScan. {}", nd); ((WalkerCtx)procCtx).convertMetadataOnly(); return null; } }
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); limit = conf.getLimit(); leastRow = conf.getLeastRows(); offset = (conf.getOffset() == null) ? 0 : conf.getOffset(); currCount = 0; isMap = hconf.getBoolean("mapred.task.is.map", true); }
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); limit = conf.getLimit(); leastRow = conf.getLeastRows(); offset = (conf.getOffset() == null) ? 0 : conf.getOffset(); currCount = 0; isMap = hconf.getBoolean("mapred.task.is.map", true); }
private void handleTopLimit(Operator<?> rootOp) { if (rootOp instanceof LimitOperator) { // this can happen only on top most limit, not while visiting Limit Operator // since that can be within subquery. this.semanticAnalyzer.getQB().getParseInfo().setOuterQueryLimit(((LimitOperator) rootOp).getConf().getLimit()); } }
private void handleTopLimit(Operator<?> rootOp) { if (rootOp instanceof LimitOperator) { // this can happen only on top most limit, not while visiting Limit Operator // since that can be within subquery. this.semanticAnalyzer.getQB().getParseInfo().setOuterQueryLimit(((LimitOperator) rootOp).getConf().getLimit()); } }
LimitDesc limitDesc = limit.getConf(); Integer offset = limitDesc.getOffset(); rs.getConf().setTopN(limitDesc.getLimit() + ((offset == null) ? 0 : offset)); rs.getConf().setTopNMemoryUsage(((LimitPushdownContext) procCtx).threshold); if (rs.getNumChild() == 1 && rs.getChildren().get(0) instanceof GroupByOperator) {
LimitDesc limitDesc = limit.getConf(); Integer offset = limitDesc.getOffset(); rs.getConf().setTopN(limitDesc.getLimit() + ((offset == null) ? 0 : offset)); rs.getConf().setTopNMemoryUsage(((LimitPushdownContext) procCtx).threshold); if (rs.getNumChild() == 1 && rs.getChildren().get(0) instanceof GroupByOperator) {
Statistics parentStats = parent.getStatistics(); long limit = -1; limit = lop.getConf().getLimit();
limit = lop.getConf().getLimit();
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); limit = conf.getLimit(); currCount = 0; }
@Override protected Collection<Future<?>> initializeOp(Configuration hconf) throws HiveException { Collection<Future<?>> result = super.initializeOp(hconf); limit = conf.getLimit(); leastRow = conf.getLeastRows(); currCount = 0; isMap = hconf.getBoolean("mapred.task.is.map", true); return result; }
@Override public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs) throws SemanticException { if(!(((LimitOperator)nd).getConf().getLimit() == 0)) { return null; } LOG.info("Found Limit 0 TableScan. " + nd); ((WalkerCtx)procCtx).convertMetadataOnly(); return null; }
rs.getConf().setTopN(limit.getConf().getLimit()); rs.getConf().setTopNMemoryUsage(((LimitPushdownContext) procCtx).threshold); if (rs.getNumChild() == 1 && rs.getChildren().get(0) instanceof GroupByOperator) {
return ((LimitOperator) limitOps.iterator().next()).getConf().getLimit();
limit = lop.getConf().getLimit();