public void addRoot(IOperatorDescriptor op) { roots.add(op.getOperatorId()); }
protected IOperatorDescriptor createTupleProcessorOp(JobSpecification spec, RecordDescriptor taggedSecondaryRecDesc, int numSecondaryKeyFields, int numPrimaryKeyFields, boolean hasBuddyBTree) { IOperatorDescriptor op = new LSMSecondaryIndexCreationTupleProcessorOperatorDescriptor(spec, taggedSecondaryRecDesc, MissingWriterFactory.INSTANCE, NUM_TAG_FIELDS, numSecondaryKeyFields, numPrimaryKeyFields, hasBuddyBTree); op.setSourceLocation(sourceLoc); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, primaryPartitionConstraint); return op; }
public FeedMetaOperatorDescriptor(final JobSpecification spec, final FeedConnectionId feedConnectionId, final IOperatorDescriptor coreOperatorDescriptor, final Map<String, String> feedPolicyProperties, final FeedRuntimeType runtimeType) { super(spec, coreOperatorDescriptor.getInputArity(), coreOperatorDescriptor.getOutputArity()); this.feedConnectionId = feedConnectionId; this.feedPolicyProperties = feedPolicyProperties; if (coreOperatorDescriptor.getOutputRecordDescriptors().length == 1) { outRecDescs[0] = coreOperatorDescriptor.getOutputRecordDescriptors()[0]; } this.coreOperator = coreOperatorDescriptor; this.runtimeType = runtimeType; }
OperatorDescriptorId oldId = opDesc.getOperatorId(); OperatorDescriptorId opId = null; if (opDesc instanceof LSMTreeInsertDeleteOperatorDescriptor feedPolicyEntity.getProperties(), FeedRuntimeType.STORE); opId = metaOp.getOperatorId(); opDesc.setOperatorId(opId); } else { if (opDesc instanceof AlgebricksMetaOperatorDescriptor) { subJob.getOperatorInputMap().get(opDesc.getOperatorId()).get(0); feedPolicyEntity.getProperties(), FeedRuntimeType.COMPUTE); opId = metaOp.getOperatorId(); opDesc.setOperatorId(opId); Pair<IOperatorDescriptor, Integer> leftOp = entry.getValue().getLeft(); Pair<IOperatorDescriptor, Integer> rightOp = entry.getValue().getRight(); IOperatorDescriptor leftOpDesc = jobSpec.getOperatorMap().get(leftOp.getLeft().getOperatorId()); IOperatorDescriptor rightOpDesc = jobSpec.getOperatorMap().get(rightOp.getLeft().getOperatorId()); if (leftOp.getLeft() instanceof FeedCollectOperatorDescriptor) { jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), replicateOp, iter1, leftOpDesc, opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId(); IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(operatorIdMapping.get(opId)); List<LocationConstraint> locations = operatorLocations.get(opDesc.getOperatorId()); if (locations == null) { locations = new ArrayList<>(); operatorLocations.put(opDesc.getOperatorId(), locations);
ObjectNode op = value.toJSON(); if (!userConstraints.isEmpty()) { connectorOpMap.get(key); if (connection != null) { conn.put("in-operator-id", connection.getLeft().getLeft().getOperatorId().toString()); conn.put("in-operator-port", connection.getLeft().getRight().intValue()); conn.put("out-operator-id", connection.getRight().getLeft().getOperatorId().toString()); conn.put("out-operator-port", connection.getRight().getRight().intValue());
public RecordDescriptor getConnectorRecordDescriptor(IConnectorDescriptor conn) { Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>> connInfo = connectorOpMap.get(conn.getConnectorId()); return connInfo.getLeft().getLeft().getOutputRecordDescriptors()[connInfo.getLeft().getRight()]; }
@Override public OperatorDescriptorId createOperatorDescriptorId(IOperatorDescriptor op) { OperatorDescriptorId odId = new OperatorDescriptorId(operatorIdCounter++); op.setOperatorId(odId); opMap.put(odId, op); return odId; }
@Override public void visit(IOperatorDescriptor op) { op.contributeActivities(builder); } });
@Override public void visit(IOperatorDescriptor op) { op.contributeSchedulingConstraints(acceptor, ccServiceCtx); } });
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { WriteResultOperator writeResultOp = (WriteResultOperator) op; IMetadataProvider mp = context.getMetadataProvider(); JobSpecification spec = builder.getJobSpec(); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getWriteResultRuntime( dataSource, propagatedSchema, keys, payload, additionalFilteringKeys, context, spec); IOperatorDescriptor opDesc = runtimeAndConstraints.first; opDesc.setSourceLocation(writeResultOp.getSourceLocation()); builder.contributeHyracksOperator(writeResultOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second); ILogicalOperator src = writeResultOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, writeResultOp, 0); }
private static void visitOperator(IOperatorDescriptorVisitor visitor, Set<OperatorDescriptorId> seen, IOperatorDescriptor op) throws HyracksException { if (!seen.contains(op.getOperatorId())) { visitor.visit(op); } seen.add(op.getOperatorId()); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { IndexInsertDeleteUpsertOperator indexInsertDeleteOp = (IndexInsertDeleteUpsertOperator) op; assert indexInsertDeleteOp.getOperation() == Kind.INSERT; assert indexInsertDeleteOp.isBulkload(); IMetadataProvider mp = context.getMetadataProvider(); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); JobSpecification spec = builder.getJobSpec(); RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getIndexInsertRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, additionalFilteringKeys, filterExpr, inputDesc, context, spec, true); IOperatorDescriptor opDesc = runtimeAndConstraints.first; opDesc.setSourceLocation(indexInsertDeleteOp.getSourceLocation()); builder.contributeHyracksOperator(indexInsertDeleteOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second); ILogicalOperator src = indexInsertDeleteOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, indexInsertDeleteOp, 0); }
public void connect(IConnectorDescriptor conn, IOperatorDescriptor producerOp, int producerPort, IOperatorDescriptor consumerOp, int consumerPort) { insertIntoIndexedMap(opInputMap, consumerOp.getOperatorId(), consumerPort, conn); insertIntoIndexedMap(opOutputMap, producerOp.getOperatorId(), producerPort, conn); connectorOpMap.put(conn.getConnectorId(), Pair.of(Pair.of(producerOp, producerPort), Pair.of(consumerOp, consumerPort))); }
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op; assert insertDeleteOp.getOperation() == Kind.INSERT; assert insertDeleteOp.isBulkload(); IMetadataProvider mp = context.getMetadataProvider(); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); JobSpecification spec = builder.getJobSpec(); RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getInsertRuntime(dataSource, propagatedSchema, typeEnv, primaryKeys, payload, additionalFilteringKeys, additionalNonFilterVars, inputDesc, context, spec, true); IOperatorDescriptor opDesc = runtimeAndConstraints.first; opDesc.setSourceLocation(insertDeleteOp.getSourceLocation()); builder.contributeHyracksOperator(insertDeleteOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second); ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, insertDeleteOp, 0); }
public IConnectorDescriptor getInputConnectorDescriptor(IOperatorDescriptor op, int inputIndex) { return getInputConnectorDescriptor(op.getOperatorId(), inputIndex); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { TokenizeOperator tokenizeOp = (TokenizeOperator) op; if (tokenizeOp.getOperation() != Kind.INSERT || !tokenizeOp.isBulkload()) { throw new AlgebricksException("Tokenize Operator only works when bulk-loading data."); } IMetadataProvider mp = context.getMetadataProvider(); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); JobSpecification spec = builder.getJobSpec(); RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor( context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getTokenizerRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, null, inputDesc, context, spec, true); IOperatorDescriptor opDesc = runtimeAndConstraints.first; opDesc.setSourceLocation(tokenizeOp.getSourceLocation()); builder.contributeHyracksOperator(tokenizeOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second); ILogicalOperator src = tokenizeOp.getInputs().get(0).getValue(); builder.contributeGraphEdge(src, 0, tokenizeOp, 0); }
public IConnectorDescriptor getOutputConnectorDescriptor(IOperatorDescriptor op, int outputIndex) { return getOutputConnectorDescriptor(op.getOperatorId(), outputIndex); }
@Override public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op, IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema) throws AlgebricksException { UnnestMapOperator unnestMap = (UnnestMapOperator) op; ILogicalExpression expr = unnestMap.getExpressionRef().getValue(); if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) { throw new IllegalStateException(); } AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr; FunctionIdentifier funcIdent = funcExpr.getFunctionIdentifier(); if (!funcIdent.equals(BuiltinFunctions.EXTERNAL_LOOKUP)) { return; } int[] ridIndexes = getKeyIndexes(ridVarList, inputSchemas); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> externalLoopup = metadataProvider.buildExternalDataLookupRuntime(builder.getJobSpec(), dataset, ridIndexes, retainInput, typeEnv, opSchema, context, metadataProvider, retainMissing); IOperatorDescriptor opDesc = externalLoopup.first; opDesc.setSourceLocation(unnestMap.getSourceLocation()); builder.contributeHyracksOperator(unnestMap, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, externalLoopup.second); ILogicalOperator srcExchange = unnestMap.getInputs().get(0).getValue(); builder.contributeGraphEdge(srcExchange, 0, unnestMap, 0); }
private void setPartitionConstraintsTopdown(OperatorDescriptorId opId, Map<IConnectorDescriptor, TargetConstraint> tgtConstraints, IOperatorDescriptor parentOp) { List<IConnectorDescriptor> opInputs = jobSpec.getOperatorInputMap().get(opId); AlgebricksPartitionConstraint opConstraint; IOperatorDescriptor opDesc = jobSpec.getOperatorMap().get(opId); if (opInputs != null) { for (IConnectorDescriptor conn : opInputs) { ConnectorDescriptorId cid = conn.getConnectorId(); org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p = jobSpec.getConnectorOperatorMap().get(cid); IOperatorDescriptor src = p.getLeft().getLeft(); TargetConstraint constraint = tgtConstraints.get(conn); if (constraint != null) { if (constraint == TargetConstraint.SAME_COUNT) { opConstraint = partitionConstraintMap.get(opDesc); if (partitionConstraintMap.get(src) == null) { if (opConstraint != null) { partitionConstraintMap.put(src, opConstraint); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, src, opConstraint); } } } } // Post Order DFS setPartitionConstraintsTopdown(src.getOperatorId(), tgtConstraints, opDesc); } } }
opDesc.setSourceLocation(insertDeleteOp.getSourceLocation()); builder.contributeHyracksOperator(insertDeleteOp, opDesc); builder.contributeAlgebricksPartitionConstraint(opDesc, runtimeAndConstraints.second);