public static Iterator<SplitWork> transform(final TableMetadata dataset, ExecutionNodeMap nodeMap, DistributionAffinity affinityType){ return transform(dataset.getSplits(), nodeMap, affinityType); }
int recordCount = 0; int qualifiedCount = 0; Iterator<DatasetSplit> splitIter = tableMetadata.getSplits(); LogicalExpression materializedExpr = null;
@Override public final int getMinParallelizationWidth() { if(affinity != DistributionAffinity.HARD){ return 1; } final Set<String> nodes = new HashSet<>(); Iterator<DatasetSplit> iter = dataset.getSplits(); while(iter.hasNext()){ DatasetSplit split = iter.next(); for(Affinity a : split.getAffinitiesList()){ nodes.add(a.getHost()); } } return nodes.size(); }
@Override public int getMinParallelizationWidth() { if(getDistributionAffinity() != DistributionAffinity.HARD){ return 1; } final Set<String> nodes = new HashSet<>(); Iterator<DatasetSplit> iter = tableMetadata.getSplits(); while(iter.hasNext()){ DatasetSplit split = iter.next(); for(Affinity a : split.getAffinitiesList()){ nodes.add(a.getHost()); } } return nodes.size(); }
@Override public Iterator<SplitWork> getSplits(ExecutionNodeMap nodeMap) { return SplitWork.transform(dataset.getSplits(), nodeMap, getDistributionAffinity()); }
logger.debug("Partition pruning using expression evaluation took {} ms", stopwatch.elapsed(TimeUnit.MILLISECONDS)); }else { finalNewSplits = ImmutableList.copyOf(dataset.value.getSplits()); evalPruned = false;
(aggCall.getArgList().size() == 1 && ! agg.getInput().getRowType().getFieldList().get(aggCall.getArgList().get(0).intValue()).getType().isNullable())) { cnt = getAccurateRowCount(scan.getTableMetadata().getSplits()); } else if (aggCall.getArgList().size() == 1) { cnt = getAccurateColumnCount(columnName, scan.getTableMetadata().getSplits()); if (cnt == GroupScan.NO_COLUMN_STATS) {