@Override public boolean isSame(OperatorDesc other) { if (getClass().getName().equals(other.getClass().getName())) { TableScanDesc otherDesc = (TableScanDesc) other; return Objects.equals(getQualifiedTable(), otherDesc.getQualifiedTable()) && ExprNodeDescUtils.isSame(getFilterExpr(), otherDesc.getFilterExpr()) && getRowLimit() == otherDesc.getRowLimit() && isGatherStats() == otherDesc.isGatherStats(); } return false; }
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); inputFileChanged = false; if (conf == null) { return; } rowLimit = conf.getRowLimit(); if (hconf instanceof JobConf) { jc = (JobConf) hconf; } else { // test code path jc = new JobConf(hconf); } defaultPartitionName = HiveConf.getVar(hconf, HiveConf.ConfVars.DEFAULTPARTITIONNAME); currentStat = null; stats = new HashMap<String, Stat>(); /* * This TableScanDesc flag is strictly set by the Vectorizer class for vectorized MapWork * vertices. */ vectorized = conf.isVectorized(); }
@Override protected void initializeOp(Configuration hconf) throws HiveException { super.initializeOp(hconf); inputFileChanged = false; if (conf == null) { return; } rowLimit = conf.getRowLimit(); if (!conf.isGatherStats()) { return; } if (hconf instanceof JobConf) { jc = (JobConf) hconf; } else { // test code path jc = new JobConf(hconf); } defaultPartitionName = HiveConf.getVar(hconf, HiveConf.ConfVars.DEFAULTPARTITIONNAME); currentStat = null; stats = new HashMap<String, Stat>(); }
if (tsOp1.getConf().getRowLimit() != tsOp2.getConf().getRowLimit()) { return false;
&& pctx.getPrunedPartitions(tsOp1).getPartitions().equals( pctx.getPrunedPartitions(tsOp2).getPartitions()) && op1Conf.getRowLimit() == op2Conf.getRowLimit()) { return true; } else {
@Override public boolean equals(TableScanOperator op1, TableScanOperator op2) { Preconditions.checkNotNull(op1); Preconditions.checkNotNull(op2); TableScanDesc op1Conf = op1.getConf(); TableScanDesc op2Conf = op2.getConf(); if (compareString(op1Conf.getAlias(), op2Conf.getAlias()) && compareExprNodeDesc(op1Conf.getFilterExpr(), op2Conf.getFilterExpr()) && op1Conf.getRowLimit() == op2Conf.getRowLimit() && op1Conf.isGatherStats() == op2Conf.isGatherStats()) { return true; } else { return false; } } }
if (tsOp.getConf().getRowLimit() != -1) {
if (tsOp.getConf().getRowLimit() != -1) {
@Override protected Collection<Future<?>> initializeOp(Configuration hconf) throws HiveException { Collection<Future<?>> result = super.initializeOp(hconf); inputFileChanged = false; if (conf == null) { return result; } rowLimit = conf.getRowLimit(); if (!conf.isGatherStats()) { return result; } this.hconf = hconf; if (hconf instanceof JobConf) { jc = (JobConf) hconf; } else { // test code path jc = new JobConf(hconf); } defaultPartitionName = HiveConf.getVar(hconf, HiveConf.ConfVars.DEFAULTPARTITIONNAME); currentStat = null; stats = new HashMap<String, Stat>(); if (conf.getPartColumns() == null || conf.getPartColumns().size() == 0) { // NON PARTITIONED table return result; } return result; }