public void acceptForStepFunction(Visitor<OptimizerNode> visitor) { this.singleRoot.accept(visitor); } }
for (DagConnection c : getIncomingConnections()) { if (c.getSource() == null) { throw new CompilerException("Bug: Estimate computation called before inputs have been set."); computeOperatorSpecificDefaultEstimates(statistics); if (getOperator() == null || getOperator().getCompilerHints() == null) { return ; CompilerHints hints = getOperator().getCompilerHints(); if (hints.getOutputSize() >= 0) { this.estimatedOutputSize = hints.getOutputSize(); if (pred != null && pred.getEstimatedNumRecords() >= 0) { this.estimatedNumRecords = (long) (pred.getEstimatedNumRecords() * hints.getFilterFactor());
/** * This function connects the operators that produce the broadcast inputs to this operator. * * @param operatorToNode The map from program operators to optimizer nodes. * @param defaultExchangeMode The data exchange mode to use, if the operator does not * specify one. * * @throws CompilerException */ public void setBroadcastInputs(Map<Operator<?>, OptimizerNode> operatorToNode, ExecutionMode defaultExchangeMode) { // skip for Operators that don't support broadcast variables if (!(getOperator() instanceof AbstractUdfOperator<?, ?>)) { return; } // get all broadcast inputs AbstractUdfOperator<?, ?> operator = ((AbstractUdfOperator<?, ?>) getOperator()); // create connections and add them for (Map.Entry<String, Operator<?>> input : operator.getBroadcastInputs().entrySet()) { OptimizerNode predecessor = operatorToNode.get(input.getValue()); DagConnection connection = new DagConnection(predecessor, this, ShipStrategyType.BROADCAST, defaultExchangeMode); addBroadcastConnection(input.getKey(), connection); predecessor.addOutgoingConnection(connection); } }
@Override public boolean preVisit(OptimizerNode node) { // The interesting properties must be computed on the descend. In case a node has multiple outputs, // that computation must happen during the last descend. if (node.getInterestingProperties() == null && node.haveAllOutputConnectionInterestingProperties()) { node.computeUnionOfInterestingPropertiesFromSuccessors(); node.computeInterestingPropertiesForInputs(this.estimator); return true; } else { return false; } }
@Override public boolean preVisit(OptimizerNode visitable) { if (visitable.getInterestingProperties() != null) { visitable.clearInterestingProperties(); return true; } else { return false; } }
protected List<UnclosedBranchDescriptor> computeUnclosedBranchStackForBroadcastInputs( List<UnclosedBranchDescriptor> branchesSoFar) { // handle the data flow branching for the broadcast inputs for (DagConnection broadcastInput : getBroadcastConnections()) { OptimizerNode bcSource = broadcastInput.getSource(); addClosedBranches(bcSource.closedBranchingNodes); List<UnclosedBranchDescriptor> bcBranches = bcSource.getBranchesForParent(broadcastInput); ArrayList<UnclosedBranchDescriptor> mergedBranches = new ArrayList<UnclosedBranchDescriptor>(); mergeLists(branchesSoFar, bcBranches, mergedBranches, true); branchesSoFar = mergedBranches.isEmpty() ? Collections.<UnclosedBranchDescriptor>emptyList() : mergedBranches; } return branchesSoFar; }
removeClosedBranches(child1open); removeClosedBranches(child2open); int id2 = index2 >= 0 ? child2open.get(index2).getBranchingNode().getId() : -1; while (index1 >= 0 && (id1 = child1open.get(index1).getBranchingNode().getId()) > id2) { result.add(child1open.get(index1)); index1--; while (index2 >= 0 && (id2 = child2open.get(index2).getBranchingNode().getId()) > id1) { result.add(child2open.get(index2)); index2--; currBanchingNode.markAllOutgoingConnectionsAsPipelineBreaking(); long allInputs = (0x1L << currBanchingNode.getOutgoingConnections().size()) - 1; addClosedBranch(currBanchingNode); } else {
@Override public void postVisit(OptimizerNode visitable) { // the node ids visitable.initId(this.id++); // connections need to figure out their maximum path depths for (DagConnection conn : visitable.getIncomingConnections()) { conn.initMaxDepth(); } for (DagConnection conn : visitable.getBroadcastConnections()) { conn.initMaxDepth(); } // the estimates visitable.computeOutputEstimates(this.statistics); // if required, recurse into the step function if (visitable instanceof IterationNode) { ((IterationNode) visitable).acceptForStepFunction(this); } } }
if (n instanceof DataSinkNode) { type = "sink"; contents = n.getOperator().toString(); } else if (n instanceof DataSourceNode) { type = "source"; contents = n.getOperator().toString(); contents = n.getOperator().getName(); contents = n.getOperator().getName(); contents = n.getOperator().getName(); String name = n.getOperatorName(); if (name.equals("Reduce") && (node instanceof SingleInputPlanNode) && ((SingleInputPlanNode) node).getDriverStrategy() == DriverStrategy.SORTED_GROUP_COMBINE) { + (n.getParallelism() >= 1 ? n.getParallelism() : "default") + "\""); if (inputNum == 0) { child1name += child1name.length() > 0 ? ", " : ""; child1name += source.getOptimizerNode().getOperator().getName() + " (id: " + this.nodeIds.get(source) + ")"; } else if (inputNum == 1) { child2name += child2name.length() > 0 ? ", " : ""; child2name += source.getOptimizerNode().getOperator().getName() + " (id: " + this.nodeIds.get(source) + ")"; if (n.getUniqueFields() == null || n.getUniqueFields().size() == 0) {
@Override public String toString() { return "(" + this.branchingNode.getOperator() + ") [" + this.joinedPathsVector + "]"; } }
@Override public String toString() { StringBuilder bld = new StringBuilder(); bld.append(getOperatorName()); bld.append(" (").append(getOperator().getName()).append(") "); int i = 1; for (DagConnection conn : getIncomingConnections()) { String shipStrategyName = conn.getShipStrategy() == null ? "null" : conn.getShipStrategy().name(); bld.append('(').append(i++).append(":").append(shipStrategyName).append(')'); } return bld.toString(); }
addProperty(gen, "Partitioning Order", "(none)"); if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) { addProperty(gen, "Uniqueness", "not unique"); addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString()); addProperty(gen, "Grouping", "not grouped"); if (optNode.getUniqueFields() == null || optNode.getUniqueFields().size() == 0) { addProperty(gen, "Uniqueness", "not unique"); addProperty(gen, "Uniqueness", optNode.getUniqueFields().toString()); addProperty(gen, "Est. Output Size", optNode.getEstimatedOutputSize() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedOutputSize(), "B")); addProperty(gen, "Est. Cardinality", optNode.getEstimatedNumRecords() == -1 ? "(unknown)" : formatNumber(optNode.getEstimatedNumRecords())); gen.writeEndArray(); if (optNode.getOperator().getCompilerHints() != null) { CompilerHints hints = optNode.getOperator().getCompilerHints(); CompilerHints defaults = new CompilerHints();
if (nextPartialSolution.getParallelism() != getParallelism() || nextPartialSolution == partialSolution || nextPartialSolution instanceof BinaryUnionNode) nextPartialSolution.addOutgoingConnection(noOpConn); terminationCriterion.addOutgoingConnection(terminationCriterionRootConnection); nextPartialSolution.addOutgoingConnection(rootConnection);
@Override public void setInput(Map<Operator<?>, OptimizerNode> contractToNode, ExecutionMode defaultExchangeMode) { Operator<?> children = getOperator().getInput(); final OptimizerNode pred; final DagConnection conn; pred = contractToNode.get(children); conn = new DagConnection(pred, this, defaultExchangeMode); // create the connection and add it this.input = conn; pred.addOutgoingConnection(conn); }
rootNode.accept(new IdAndEstimatesVisitor(this.statistics)); rootNode.accept(unionEnforcer); rootNode.accept(branchingVisitor); rootNode.accept(propsVisitor); if (rootNode.getOpenBranches() != null && rootNode.getOpenBranches().size() > 0) { throw new CompilerException("Bug: Logic for branching plans (non-tree plans) has an error, and does not " + "track the re-joining of branches correctly."); List<PlanNode> bestPlan = rootNode.getAlternativePlans(this.costEstimator);
this.nextPartialSolution.accept(PlanCacheCleaner.INSTANCE); if (this.terminationCriterion != null) { this.terminationCriterion.accept(PlanCacheCleaner.INSTANCE); List<PlanNode> candidates = this.nextPartialSolution.getAlternativePlans(estimator); List<PlanNode> terminationCriterionCandidates = this.terminationCriterion.getAlternativePlans(estimator);
public String toString() { StringBuilder buf = new StringBuilder(50); buf.append("Connection: "); if (this.source == null) { buf.append("null"); } else { buf.append(this.source.getOperator().getName()); buf.append('(').append(this.source.getOperatorName()).append(')'); } buf.append(" -> "); if (this.shipStrategy != null) { buf.append('['); buf.append(this.shipStrategy.name()); buf.append(']').append(' '); } if (this.target == null) { buf.append("null"); } else { buf.append(this.target.getOperator().getName()); buf.append('(').append(this.target.getOperatorName()).append(')'); } return buf.toString(); } }
removeClosedBranches(child1open); removeClosedBranches(child2open); int id2 = index2 >= 0 ? child2open.get(index2).getBranchingNode().getId() : -1; while (index1 >= 0 && (id1 = child1open.get(index1).getBranchingNode().getId()) > id2) { result.add(child1open.get(index1)); index1--; while (index2 >= 0 && (id2 = child2open.get(index2).getBranchingNode().getId()) > id1) { result.add(child2open.get(index2)); index2--; currBanchingNode.markAllOutgoingConnectionsAsPipelineBreaking(); long allInputs = (0x1L << currBanchingNode.getOutgoingConnections().size()) - 1; addClosedBranch(currBanchingNode); } else {
@Override public boolean preVisit(OptimizerNode node) { // The interesting properties must be computed on the descend. In case a node has multiple outputs, // that computation must happen during the last descend. if (node.getInterestingProperties() == null && node.haveAllOutputConnectionInterestingProperties()) { node.computeUnionOfInterestingPropertiesFromSuccessors(); node.computeInterestingPropertiesForInputs(this.estimator); return true; } else { return false; } }
protected List<UnclosedBranchDescriptor> computeUnclosedBranchStackForBroadcastInputs( List<UnclosedBranchDescriptor> branchesSoFar) { // handle the data flow branching for the broadcast inputs for (DagConnection broadcastInput : getBroadcastConnections()) { OptimizerNode bcSource = broadcastInput.getSource(); addClosedBranches(bcSource.closedBranchingNodes); List<UnclosedBranchDescriptor> bcBranches = bcSource.getBranchesForParent(broadcastInput); ArrayList<UnclosedBranchDescriptor> mergedBranches = new ArrayList<UnclosedBranchDescriptor>(); mergeLists(branchesSoFar, bcBranches, mergedBranches, true); branchesSoFar = mergedBranches.isEmpty() ? Collections.<UnclosedBranchDescriptor>emptyList() : mergedBranches; } return branchesSoFar; }