public void adjustGlobalPropertiesForFullParallelismChange() { if (this.shipStrategy == null || this.shipStrategy == ShipStrategyType.NONE) { throw new IllegalStateException("Cannot adjust channel for parallelism " + "change before the ship strategy is set."); } // make sure the properties are acquired if (this.globalProps == null) { getGlobalProperties(); } // some strategies globally reestablish properties switch (this.shipStrategy) { case FORWARD: throw new CompilerException("Cannot use FORWARD strategy between operations " + "with different number of parallel instances."); case NONE: // excluded by sanity check. left here for verification check completion case BROADCAST: case PARTITION_HASH: case PARTITION_RANGE: case PARTITION_RANDOM: case PARTITION_FORCED_REBALANCE: case PARTITION_CUSTOM: return; } throw new CompilerException("Unrecognized Ship Strategy Type: " + this.shipStrategy); }
/** * Constructs a new sink candidate node that uses <i>NONE</i> as its local strategy. Note that * local sorting and range partitioning are handled by the incoming channel already. * * @param template The template optimizer node that this candidate is created for. */ public SinkPlanNode(DataSinkNode template, String nodeName, Channel input) { super(template, nodeName, input, DriverStrategy.NONE); this.globalProps = input.getGlobalProperties().clone(); this.localProps = input.getLocalProperties().clone(); }
c.getGlobalProperties().reset(); if (rgps.isMetBy(c.getGlobalProperties())) { c.setRequiredGlobalProps(rgps); addLocalCandidates(c, broadcastPlanChannels, igps, outputPlans, estimator); if (rgps.isMetBy(c.getGlobalProperties())) { addLocalCandidates(c, broadcastPlanChannels, rgps, outputPlans, estimator); break;
GlobalProperties gp1 = in1.getGlobalProperties().clone() .filterBySemanticProperties(semPropsGlobalPropFiltering, 0); GlobalProperties gp2 = in2.getGlobalProperties().clone() .filterBySemanticProperties(semPropsGlobalPropFiltering, 1); GlobalProperties combined = operator.computeGlobalProperties(gp1, gp2);
this.partialSolution.setCandidateProperties(in.getGlobalProperties(), in.getLocalProperties(), in); final BulkPartialSolutionPlanNode pspn = this.partialSolution.getCurrentPartialSolutionPlanNode(); rebuildPropertiesPlanNode.initProperties(toNoOp.getGlobalProperties(), toNoOp.getLocalProperties()); estimator.costOperator(rebuildPropertiesPlanNode);
c1.getGlobalProperties().reset(); c2.getGlobalProperties().reset(); if (gpp.getProperties1().isMetBy(c1.getGlobalProperties()) && gpp.getProperties2().isMetBy(c2.getGlobalProperties()) ) c1.getGlobalProperties(), c2.getGlobalProperties()))
GlobalProperties gProps = in.getGlobalProperties().clone(); LocalProperties lProps = in.getLocalProperties().clone(); gProps = dps.computeGlobalProperties(gProps);
this.worksetNode.setCandidateProperties(worksetIn.getGlobalProperties(), worksetIn.getLocalProperties(), worksetIn); this.solutionSetNode.setCandidateProperties(this.partitionedProperties, new LocalProperties(), solutionSetIn); rebuildWorksetPropertiesNode, "Rebuild Workset Properties", toNoOp, DriverStrategy.UNARY_NO_OP); rebuildWorksetPropertiesPlanNode.initProperties(toNoOp.getGlobalProperties(), toNoOp.getLocalProperties()); estimator.costOperator(rebuildWorksetPropertiesPlanNode);
@Override public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) { Channel toReducer = in; if (in.getShipStrategy() == ShipStrategyType.FORWARD || (node.getBroadcastConnections() != null && !node.getBroadcastConnections().isEmpty())) { if (in.getSource().getOptimizerNode() instanceof PartitionNode) { LOG.warn("Cannot automatically inject combiner for ReduceFunction. Please add an explicit combiner with combineGroup() in front of the partition operator."); } } else if (combinerStrategy != DriverStrategy.NONE) { // non forward case. all local properties are killed anyways, so we can safely plug in a combiner Channel toCombiner = new Channel(in.getSource()); toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); // create an input node for combine with same parallelism as input node ReduceNode combinerNode = ((ReduceNode) node).getCombinerUtilityNode(); combinerNode.setParallelism(in.getSource().getParallelism()); SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine ("+node.getOperator().getName()+")", toCombiner, this.combinerStrategy, this.keyList); combiner.setCosts(new Costs(0, 0)); combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties()); toReducer = new Channel(combiner); toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(), in.getShipStrategySortOrder(), in.getDataExchangeMode()); toReducer.setLocalStrategy(LocalStrategy.SORT, in.getLocalStrategyKeys(), in.getLocalStrategySortOrder()); } return new SingleInputPlanNode(node, "Reduce (" + node.getOperator().getName() + ")", toReducer, DriverStrategy.SORTED_REDUCE, this.keyList); }
private void checkValidCoGroupInputProperties(DualInputPlanNode coGroup) { GlobalProperties inProps1 = coGroup.getInput1().getGlobalProperties(); GlobalProperties inProps2 = coGroup.getInput2().getGlobalProperties();
c1.getGlobalProperties().reset(); c2.getGlobalProperties().reset(); GlobalProperties p1 = c1.getGlobalProperties(); GlobalProperties p2 = c2.getGlobalProperties(); p1.clearUniqueFieldCombinations(); p2.clearUniqueFieldCombinations();
private void checkValidJoinInputProperties(DualInputPlanNode join) { GlobalProperties inProps1 = join.getInput1().getGlobalProperties(); GlobalProperties inProps2 = join.getInput2().getGlobalProperties();
PartitioningProperty.HASH_PARTITIONED, sink.getInput().getGlobalProperties().getPartitioning()); assertEquals("Sink input should be hash partitioned on 1.", new FieldList(1), sink.getInput().getGlobalProperties().getPartitioningFields()); PartitioningProperty.HASH_PARTITIONED, partitioner.getInput().getGlobalProperties().getPartitioning()); assertEquals("Partitioner input should be hash partitioned on 1.", new FieldList(1), partitioner.getInput().getGlobalProperties().getPartitioningFields()); assertEquals("Partitioner input channel should be forwarding", ShipStrategyType.FORWARD, partitioner.getInput().getShipStrategy()); PartitioningProperty.HASH_PARTITIONED, c.getGlobalProperties().getPartitioning()); assertEquals("Union input channel should be hash partitioning", ShipStrategyType.PARTITION_HASH, c.getShipStrategy());
.getName()+")", toCombiner, DriverStrategy.SORTED_GROUP_COMBINE); combiner.setCosts(new Costs(0, 0)); combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties());
PartitioningProperty.RANGE_PARTITIONED, sink.getInput().getGlobalProperties().getPartitioning()); assertEquals("Sink input should be range partitioned on 1", new Ordering(1, null, Order.ASCENDING), sink.getInput().getGlobalProperties().getPartitioningOrdering()); PartitioningProperty.RANGE_PARTITIONED, partitioner.getInput().getGlobalProperties().getPartitioning()); assertEquals("Partitioner input should be range partitioned on 1", new Ordering(1, null, Order.ASCENDING), partitioner.getInput().getGlobalProperties().getPartitioningOrdering()); assertEquals("Partitioner input channel should be forwarding", ShipStrategyType.FORWARD, partitioner.getInput().getShipStrategy()); PartitioningProperty.RANGE_PARTITIONED, c.getGlobalProperties().getPartitioning()); assertEquals("Union input channel should be forwarded", ShipStrategyType.FORWARD, c.getShipStrategy());
@Override public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) { if (in.getShipStrategy() == ShipStrategyType.FORWARD) { // locally connected, directly instantiate return new SingleInputPlanNode(node, "GroupReduce ("+node.getOperator().getName()+")", in, DriverStrategy.ALL_GROUP_REDUCE); } else { // non forward case.plug in a combiner Channel toCombiner = new Channel(in.getSource()); toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); // create an input node for combine with same parallelism as input node GroupReduceNode combinerNode = ((GroupReduceNode) node).getCombinerUtilityNode(); combinerNode.setParallelism(in.getSource().getParallelism()); SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine ("+node.getOperator().getName()+")", toCombiner, DriverStrategy.ALL_GROUP_REDUCE_COMBINE); combiner.setCosts(new Costs(0, 0)); combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties()); Channel toReducer = new Channel(combiner); toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(), in.getShipStrategySortOrder(), in.getDataExchangeMode()); toReducer.setLocalStrategy(in.getLocalStrategy(), in.getLocalStrategyKeys(), in.getLocalStrategySortOrder()); return new SingleInputPlanNode(node, "GroupReduce ("+node.getOperator().getName()+")", toReducer, DriverStrategy.ALL_GROUP_REDUCE); } }
@Override public SingleInputPlanNode instantiate(Channel in, SingleInputNode node) { if (in.getShipStrategy() == ShipStrategyType.FORWARD) { // locally connected, directly instantiate return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")", in, DriverStrategy.ALL_REDUCE); } else { // non forward case.plug in a combiner Channel toCombiner = new Channel(in.getSource()); toCombiner.setShipStrategy(ShipStrategyType.FORWARD, DataExchangeMode.PIPELINED); // create an input node for combine with same parallelism as input node ReduceNode combinerNode = ((ReduceNode) node).getCombinerUtilityNode(); combinerNode.setParallelism(in.getSource().getParallelism()); SingleInputPlanNode combiner = new SingleInputPlanNode(combinerNode, "Combine ("+node.getOperator().getName()+")", toCombiner, DriverStrategy.ALL_REDUCE); combiner.setCosts(new Costs(0, 0)); combiner.initProperties(toCombiner.getGlobalProperties(), toCombiner.getLocalProperties()); Channel toReducer = new Channel(combiner); toReducer.setShipStrategy(in.getShipStrategy(), in.getShipStrategyKeys(), in.getShipStrategySortOrder(), in.getDataExchangeMode()); toReducer.setLocalStrategy(in.getLocalStrategy(), in.getLocalStrategyKeys(), in.getLocalStrategySortOrder()); return new SingleInputPlanNode(node, "Reduce ("+node.getOperator().getName()+")", toReducer, DriverStrategy.ALL_REDUCE); } }
PartitioningProperty.FORCED_REBALANCED, sink.getInput().getGlobalProperties().getPartitioning()); PartitioningProperty.FORCED_REBALANCED, partitioner.getInput().getGlobalProperties().getPartitioning()); assertEquals("Partitioner input channel should be forwarding", ShipStrategyType.FORWARD, partitioner.getInput().getShipStrategy()); PartitioningProperty.FORCED_REBALANCED, c.getGlobalProperties().getPartitioning()); assertEquals("Union input channel should be rebalancing", ShipStrategyType.PARTITION_FORCED_REBALANCE, c.getShipStrategy());
PartitioningProperty.FULL_REPLICATION, join.getInput1().getGlobalProperties().getPartitioning()); PartitioningProperty.FULL_REPLICATION, c.getGlobalProperties().getPartitioning()); assertEquals("Union input channel should be broadcasting", ShipStrategyType.BROADCAST, c.getShipStrategy());
groupRed1.getInput().getGlobalProperties().getPartitioningFields().isExactMatch(new FieldList(0))); assertTrue("Reduce input should be partitioned on 1.", groupRed2.getInput().getGlobalProperties().getPartitioningFields().isExactMatch(new FieldList(1)));