@Override /** * Used to construct a Job of empty lists at the appropriate depth in the * event of a completion hitting the crystalizer before it sees a child * node, i.e. the result of iterating over an empty collection structure of * some kind. */ public Job getEmptyJob(String owningProcess, int[] index, InvocationContext context) { int wrappingDepth = parent.resultWrappingDepth; if (wrappingDepth < 0) throw new RuntimeException( "Processor hasn't been configured, cannot emit empty job"); // The wrapping depth is the length of index array that would be used if // a single item of the output port type were returned. We can examine // the index array for the node we're trying to create and use this to // work out how much we need to add to the output port depth to create // empty lists of the right type given the index array. int depth = wrappingDepth - index.length; // TODO - why was this incrementing? // depth++; DataManager dManager = context.getDataManager(); Map<String, EntityIdentifier> emptyJobMap = new HashMap<String, EntityIdentifier>(); for (OutputPort op : parent.getOutputPorts()) { emptyJobMap.put(op.getName(), dManager.registerEmptyList(depth + op.getDepth())); } return new Job(owningProcess, index, emptyJobMap, context); }
for (String outputName : listItems.keySet()) { List<EntityIdentifier> idlist = listItems.get(outputName); newDataMap.put(outputName, context.getDataManager().registerList( idlist.toArray(new EntityIdentifier[0])));
public void run() { DataFacade dataFacade = new DataFacade(callback.getContext() .getDataManager());
/** * Construct and send a new result message with error documents in place of * all outputs at the appropriate depth * * @param e */ private void sendErrorOutput(Event<?> e) { DataManager dm = e.getContext().getDataManager(); Processor p = dispatchStack.getProcessor(); Map<String, EntityIdentifier> outputDataMap = new HashMap<String, EntityIdentifier>(); for (OutputPort op : p.getOutputPorts()) { outputDataMap.put(op.getName(), dm.registerError(op.getDepth(), 0, "No message...")); } DispatchResultEvent dre = new DispatchResultEvent(e.getOwningProcess(), e.getIndex(), e.getContext(), outputDataMap, false); getAbove().receiveResult(dre); }
for (String portName : j.getData().keySet()) { EntityIdentifier dataRef = j.getData().get(portName); DataManager manager = e.getContext().getDataManager(); NamedInputPortNode ipn = nodeForName(portName); int desiredDepth = ipn.getCardinality();
public void run() { final DataFacade dataFacade = new DataFacade(callback .getContext().getDataManager());
List<EntityIdentifier> idsInList = new ArrayList<EntityIdentifier>(); for (int j = 0; j < 2; j++) { DataDocumentIdentifier ddocIdentifier = context.getDataManager() .registerDocument(Collections .<ReferenceScheme> emptySet()); idsInList.add(ddocIdentifier); EntityListIdentifier dataReference = context.getDataManager().registerList(idsInList.toArray(new EntityIdentifier[0])); is1.receiveData("a", owningProcess, new int[] { i }, dataReference, context); List<EntityIdentifier> idsInList = new ArrayList<EntityIdentifier>(); for (int j = 0; j < 2; j++) { DataDocumentIdentifier ddocIdentifier = context.getDataManager() .registerDocument(Collections .<ReferenceScheme> emptySet()); idsInList.add(ddocIdentifier); EntityListIdentifier dataReference = context.getDataManager().registerList(idsInList.toArray(new EntityIdentifier[0]));
public void pushToken(WorkflowDataToken dt, String owningProcess, int desiredDepth) { if (dt.getData().getDepth() == desiredDepth) { // System.out.println("** Job : "+dt.getData()); pushData(getName(), owningProcess, dt.getIndex(), dt.getData(), dt.getContext()); } else { DataManager dManager = dt.getContext().getDataManager(); Iterator<ContextualizedIdentifier> children = dManager.traverse(dt .getData(), dt.getData().getDepth() - 1); while (children.hasNext()) { ContextualizedIdentifier ci = children.next(); int[] newIndex = new int[dt.getIndex().length + ci.getIndex().length]; int i = 0; for (int indx : dt.getIndex()) { newIndex[i++] = indx; } for (int indx : ci.getIndex()) { newIndex[i++] = indx; } pushToken(new WorkflowDataToken(owningProcess, newIndex, ci .getDataRef(), dt.getContext()), owningProcess, desiredDepth); } // System.out.println("** Completion : "+dt.getData()); pushCompletion(getName(), owningProcess, dt.getIndex(), dt.getContext()); } }
DataManager dManager = token.getContext().getDataManager(); EntityIdentifier ref = token.getData(); int currentDepth = tokenDepth;