continue; candidatesGrow(currentLevelOpRefs, candidates); prune(context); prune(context);
/** * Cleans up the plan after combining similar branches into one branch making sure parents & children point to * each other correctly. */ private void cleanupPlan() { for (Mutable<ILogicalOperator> root : roots) { replicateToOutputs.clear(); newOutputs.clear(); findReplicateOp(root, replicateToOutputs); cleanup(replicateToOutputs, newOutputs); } }
private boolean[] computeMaterilizationFlags(List<Mutable<ILogicalOperator>> group) { for (Mutable<ILogicalOperator> root : roots) { computeClusters(null, root, new MutableInt(++lastUsedClusterId)); } boolean[] materializationFlags = new boolean[group.size()]; boolean worthMaterialization = worthMaterialization(group.get(0)); boolean requiresMaterialization; // get clusterIds for each candidate in the group List<Integer> groupClusterIds = new ArrayList<Integer>(group.size()); for (int i = 0; i < group.size(); i++) { groupClusterIds.add(clusterMap.get(group.get(i)).getValue()); } for (int i = group.size() - 1; i >= 0; i--) { requiresMaterialization = requiresMaterialization(groupClusterIds, i); if (requiresMaterialization && !worthMaterialization) { group.remove(i); groupClusterIds.remove(i); } materializationFlags[i] = requiresMaterialization; } if (group.size() < 2) { group.clear(); } // if does not worth materialization, the flags for the remaining candidates should be false return worthMaterialization ? materializationFlags : new boolean[group.size()]; }
changed = false; topDownMaterialization(roots); genCandidates(context); removeTrivialShare(); if (!equivalenceClasses.isEmpty()) { changed = rewrite(context);
public final static List<IAlgebraicRewriteRule> prepareForJobGenRuleCollection() { List<IAlgebraicRewriteRule> prepareForJobGenRewrites = new LinkedList<IAlgebraicRewriteRule>(); prepareForJobGenRewrites.add(new ConvertFromAlgebricksExpressionsRule()); prepareForJobGenRewrites .add(new IsolateHyracksOperatorsRule(HeuristicOptimizer.hyraxOperatorsBelowWhichJobGenIsDisabled)); prepareForJobGenRewrites.add(new ExtractCommonOperatorsRule()); // Re-infer all types, so that, e.g., the effect of not-is-null is // propagated. prepareForJobGenRewrites.add(new PushProjectIntoDataSourceScanRule()); prepareForJobGenRewrites.add(new ReinferAllTypesRule()); prepareForJobGenRewrites.add(new SetExecutionModeRule()); return prepareForJobGenRewrites; } }
boolean[] materializationFlags = computeMaterilizationFlags(group); if (group.isEmpty()) { continue; cleanupPlan(); rewritten = true;
private void getAllBlockingClusterIds(int clusterId, BitSet blockingClusters) { BitSet waitFor = clusterWaitForMap.get(clusterId); if (waitFor != null) { for (int i = waitFor.nextSetBit(0); i >= 0; i = waitFor.nextSetBit(i + 1)) { getAllBlockingClusterIds(i, blockingClusters); } blockingClusters.or(waitFor); } }
/** * Collects all replicate operator starting from {@param parent} and all its descendants and keeps track of the * valid parents of a replicate operator. The indexes of valid parents will be set in the BitSet. * @param parent the current operator in consideration for which we want to find replicate op children. * @param replicateToOutputs where the replicate operators will be stored with all its parents (valid & invalid). */ private void findReplicateOp(Mutable<ILogicalOperator> parent, Map<Mutable<ILogicalOperator>, BitSet> replicateToOutputs) { List<Mutable<ILogicalOperator>> children = parent.getValue().getInputs(); for (Mutable<ILogicalOperator> childRef : children) { AbstractLogicalOperator child = (AbstractLogicalOperator) childRef.getValue(); if (child.getOperatorTag() == LogicalOperatorTag.REPLICATE || child.getOperatorTag() == LogicalOperatorTag.SPLIT) { AbstractReplicateOperator replicateChild = (AbstractReplicateOperator) child; int parentIndex = replicateChild.getOutputs().indexOf(parent); if (parentIndex >= 0) { BitSet replicateValidOutputs = replicateToOutputs.get(childRef); if (replicateValidOutputs == null) { replicateValidOutputs = new BitSet(); replicateToOutputs.put(childRef, replicateValidOutputs); } replicateValidOutputs.set(parentIndex); } } findReplicateOp(childRef, replicateToOutputs); } }
computeClusters(opRef, inputRef, newClusterId); BitSet waitForList = clusterWaitForMap.get(currentClusterId.getValue()); if (waitForList == null) { if (prevClusterId == null || prevClusterId.getValue().equals(currentClusterId.getValue())) { clusterMap.put(opRef, currentClusterId); computeClusters(opRef, inputRef, currentClusterId); } else {
public static final List<IAlgebraicRewriteRule> prepareForJobGenRuleCollection() { List<IAlgebraicRewriteRule> prepareForJobGenRewrites = new LinkedList<>(); prepareForJobGenRewrites.add(new InsertProjectBeforeUnionRule()); prepareForJobGenRewrites.add(new SetAlgebricksPhysicalOperatorsRule()); prepareForJobGenRewrites .add(new IsolateHyracksOperatorsRule(HeuristicOptimizer.hyraxOperatorsBelowWhichJobGenIsDisabled)); prepareForJobGenRewrites.add(new FixReplicateOperatorOutputsRule()); prepareForJobGenRewrites.add(new ExtractCommonOperatorsRule()); // Re-infer all types, so that, e.g., the effect of not-is-null is // propagated. prepareForJobGenRewrites.add(new ReinferAllTypesRule()); prepareForJobGenRewrites.add(new PushGroupByIntoSortRule()); prepareForJobGenRewrites.add(new SetExecutionModeRule()); prepareForJobGenRewrites.add(new SweepIllegalNonfunctionalFunctions()); prepareForJobGenRewrites.add(new FixReplicateOperatorOutputsRule()); return prepareForJobGenRewrites; } }
private boolean requiresMaterialization(List<Integer> groupClusterIds, int index) { Integer clusterId = groupClusterIds.get(index); BitSet blockingClusters = new BitSet(); getAllBlockingClusterIds(clusterId, blockingClusters); if (!blockingClusters.isEmpty()) { for (int i = 0; i < groupClusterIds.size(); i++) { if (i == index) { continue; } if (blockingClusters.get(groupClusterIds.get(i))) { return true; } } } return false; }