@Override public long getEstimatedNumRecords() { long estimate = this.source.template.getEstimatedNumRecords(); return estimate < 0 ? estimate : estimate * this.replicationFactor; }
@Override public long getEstimatedNumRecords() { return this.source.getEstimatedNumRecords(); }
/** * Computes the estimates for the Map operator. * We assume that by default, Map takes one value and transforms it into another value. * The cardinality consequently stays the same. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); } }
/** * Computes the estimates for the FlatMap operator. Since it un-nests, we assume a cardinality * increase. To give the system a hint at data increase, we take a default magic number of a 5 times increase. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords() * 5; } }
@Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); }
@Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { long card1 = getFirstPredecessorNode().getEstimatedNumRecords(); long card2 = getSecondPredecessorNode().getEstimatedNumRecords(); this.estimatedNumRecords = (card1 < 0 || card2 < 0) ? -1 : card1 + card2; long size1 = getFirstPredecessorNode().getEstimatedOutputSize(); long size2 = getSecondPredecessorNode().getEstimatedOutputSize(); this.estimatedOutputSize = (size1 < 0 || size2 < 0) ? -1 : size1 + size2; }
@Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedOutputSize = getFirstPredecessorNode().getEstimatedOutputSize(); this.estimatedNumRecords = getFirstPredecessorNode().getEstimatedNumRecords(); }
@Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); this.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); } }
/** * Computes the estimates for the Filter operator. Since it applies a filter on the data we assume a cardinality * decrease. To give the system a hint at data decrease, we use a default magic number to indicate a 0.5 decrease. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedNumRecords = (long) (getPredecessorNode().getEstimatedNumRecords() * 0.5); this.estimatedOutputSize = (long) (getPredecessorNode().getEstimatedOutputSize() * 0.5); } }
@Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { // sorting does not change the number of records this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); this.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); }
@Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { // partitioning does not change the number of records this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); this.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); }
/** * Computes the estimated outputs for the data sink. Since the sink does not modify anything, it simply * copies the output estimates from its direct predecessor. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { this.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); this.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); }
@Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { long card1 = getFirstPredecessorNode().getEstimatedNumRecords(); long card2 = getSecondPredecessorNode().getEstimatedNumRecords(); if (card1 < 0 || card2 < 0) { this.estimatedNumRecords = -1; } else { this.estimatedNumRecords = Math.max(card1, card2); } if (this.estimatedNumRecords >= 0) { float width1 = getFirstPredecessorNode().getEstimatedAvgWidthPerOutputRecord(); float width2 = getSecondPredecessorNode().getEstimatedAvgWidthPerOutputRecord(); float width = (width1 <= 0 || width2 <= 0) ? -1 : width1 + width2; if (width > 0) { this.estimatedOutputSize = (long) (width * this.estimatedNumRecords); } } } }
/** * The default estimates build on the principle of inclusion: The smaller input key domain is included in the larger * input key domain. We also assume that every key from the larger input has one join partner in the smaller input. * The result cardinality is hence the larger one. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { long card1 = getFirstPredecessorNode().getEstimatedNumRecords(); long card2 = getSecondPredecessorNode().getEstimatedNumRecords(); this.estimatedNumRecords = (card1 < 0 || card2 < 0) ? -1 : Math.max(card1, card2); if (this.estimatedNumRecords >= 0) { float width1 = getFirstPredecessorNode().getEstimatedAvgWidthPerOutputRecord(); float width2 = getSecondPredecessorNode().getEstimatedAvgWidthPerOutputRecord(); float width = (width1 <= 0 || width2 <= 0) ? -1 : width1 + width2; if (width > 0) { this.estimatedOutputSize = (long) (width * this.estimatedNumRecords); } } }
public ReduceNode getCombinerUtilityNode() { if (this.preReduceUtilityNode == null) { this.preReduceUtilityNode = new ReduceNode(this); // we conservatively assume the combiner returns the same data size as it consumes this.preReduceUtilityNode.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); this.preReduceUtilityNode.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); } return this.preReduceUtilityNode; } }
public GroupReduceNode getCombinerUtilityNode() { if (this.combinerUtilityNode == null) { this.combinerUtilityNode = new GroupReduceNode(this); // we conservatively assume the combiner returns the same data size as it consumes this.combinerUtilityNode.estimatedOutputSize = getPredecessorNode().getEstimatedOutputSize(); this.combinerUtilityNode.estimatedNumRecords = getPredecessorNode().getEstimatedNumRecords(); } return this.combinerUtilityNode; } }
/** * We assume that the cardinality is the product of the input cardinalities * and that the result width is the sum of the input widths. * * @param statistics The statistics object to optionally access. */ @Override protected void computeOperatorSpecificDefaultEstimates(DataStatistics statistics) { long card1 = getFirstPredecessorNode().getEstimatedNumRecords(); long card2 = getSecondPredecessorNode().getEstimatedNumRecords(); this.estimatedNumRecords = (card1 < 0 || card2 < 0) ? -1 : card1 * card2; if (this.estimatedNumRecords >= 0) { float width1 = getFirstPredecessorNode().getEstimatedAvgWidthPerOutputRecord(); float width2 = getSecondPredecessorNode().getEstimatedAvgWidthPerOutputRecord(); float width = (width1 <= 0 || width2 <= 0) ? -1 : width1 + width2; if (width > 0) { this.estimatedOutputSize = (long) (width * this.estimatedNumRecords); } } } }
addProperty(writer, "Est. Cardinality", n.getEstimatedNumRecords() == -1 ? "(unknown)" : formatNumber(n.getEstimatedNumRecords()), false);
if (pred != null && pred.getEstimatedNumRecords() >= 0) { this.estimatedNumRecords = (long) (pred.getEstimatedNumRecords() * hints.getFilterFactor());
: formatNumber(optNode.getEstimatedOutputSize(), "B")); addProperty(gen, "Est. Cardinality", optNode.getEstimatedNumRecords() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedNumRecords())); gen.writeEndArray();