@Override public void contributeGraphEdge(ILogicalOperator src, int srcOutputIndex, ILogicalOperator dest, int destInputIndex) { ArrayList<ILogicalOperator> outputs = outEdges.get(src); if (outputs == null) { outputs = new ArrayList<>(); outEdges.put(src, outputs); } addAtPos(outputs, dest, srcOutputIndex); ArrayList<ILogicalOperator> inp = inEdges.get(dest); if (inp == null) { inp = new ArrayList<>(); inEdges.put(dest, inp); } addAtPos(inp, src, destInputIndex); }
private void buildAsterixComponents() { for (ILogicalOperator aop : microOps.keySet()) { addMicroOpToMetaRuntimeOp(aop); } for (Integer k : metaAsterixOpSkeletons.keySet()) { List<Pair<IPushRuntimeFactory, RecordDescriptor>> opContents = metaAsterixOpSkeletons.get(k); AlgebricksMetaOperatorDescriptor amod = buildMetaAsterixOpDesc(opContents); metaAsterixOps.put(k, amod); } }
@Override public void buildSpec(List<ILogicalOperator> roots) throws AlgebricksException { buildAsterixComponents(); Map<IConnectorDescriptor, TargetConstraint> tgtConstraints = setupConnectors(); for (ILogicalOperator r : roots) { IOperatorDescriptor opDesc = findOpDescForAlgebraicOp(r); jobSpec.addRoot(opDesc); } setAllPartitionConstraints(tgtConstraints); }
private JobSpecification compilePlanImpl(ILogicalPlan plan, boolean isNestedPlan, IOperatorSchema outerPlanSchema, IJobletEventListenerFactory jobEventListenerFactory) throws AlgebricksException { JobSpecification spec = new JobSpecification(context.getFrameSize()); if (jobEventListenerFactory != null) { spec.setJobletEventListenerFactory(jobEventListenerFactory); } List<ILogicalOperator> rootOps = new ArrayList<>(); JobBuilder builder = new JobBuilder(spec, context.getClusterLocations()); for (Mutable<ILogicalOperator> opRef : plan.getRoots()) { compileOpRef(opRef, spec, builder, outerPlanSchema); rootOps.add(opRef.getValue()); } reviseEdges(builder); operatorVisitedToParents.clear(); builder.buildSpec(rootOps); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); // Do not do activity cluster planning because it is slow on large clusters spec.setUseConnectorPolicyForScheduling(false); if (isNestedPlan) { spec.setMetaOps(builder.getGeneratedMetaOps()); } return spec; }
IOperatorDescriptor src = p.getLeft().getLeft(); setPartitionConstraintsBottomup(src.getOperatorId(), tgtConstraints, opDesc, finalPass); TargetConstraint constraint = tgtConstraints.get(conn); if (constraint != null) { switch (constraint) { case ONE: opConstraint = composePartitionConstraints(opConstraint, countOneLocation); break; case SAME_COUNT: opConstraint = composePartitionConstraints(opConstraint, partitionConstraintMap.get(src)); break;
@Override public void contributeMicroOperator(ILogicalOperator op, IPushRuntimeFactory runtime, RecordDescriptor recDesc) { contributeMicroOperator(op, runtime, recDesc, null); }
private Map<IConnectorDescriptor, TargetConstraint> setupConnectors() throws AlgebricksException { Map<IConnectorDescriptor, TargetConstraint> tgtConstraints = new HashMap<>(); for (ILogicalOperator exchg : connectors.keySet()) { ILogicalOperator inOp = inEdges.get(exchg).get(0); ILogicalOperator outOp = outEdges.get(exchg).get(0); IOperatorDescriptor inOpDesc = findOpDescForAlgebraicOp(inOp); IOperatorDescriptor outOpDesc = findOpDescForAlgebraicOp(outOp); Pair<IConnectorDescriptor, TargetConstraint> connPair = connectors.get(exchg); IConnectorDescriptor conn = connPair.first; int producerPort = outEdges.get(inOp).indexOf(exchg); int consumerPort = inEdges.get(outOp).indexOf(exchg); jobSpec.connect(conn, inOpDesc, producerPort, outOpDesc, consumerPort); if (connPair.second != null) { tgtConstraints.put(conn, connPair.second); } } return tgtConstraints; }
private void addMicroOpToMetaRuntimeOp(ILogicalOperator aop) { Integer k = algebraicOpBelongingToMetaAsterixOp.get(aop); if (k == null) { k = createNewMetaOpInfo(aop);