public void fire(String owningProcess, InvocationContext context) { String newOwningProcess = owningProcess + ":" + getLocalName(); if (tokenReceived(newOwningProcess, context)) { // This is not good - should ideally handle it as it means the // workflow has been fired when in a state where this wasn't // sensible, i.e. already having been fired on this process // identifier. For now we'll ignore it (ho hum, release deadline // etc!) } // The code below now happens in the tokenReceived method, we need to // fire any processors which don't have dependencies when a new token // arrives and we weren't doing that anywhere. /** * for (Processor p : getEntities(Processor.class)) { if * (p.getInputPorts().isEmpty()) { p.fire(newOwningProcess, context); } } */ }
public Dataflow createDataflow() { return new DataflowImpl(); }
@Override protected void undoEditAction(DataflowImpl dataflow) { if (merge instanceof MergeImpl) { try { dataflow.addMerge((MergeImpl) merge); } catch (NamingException e) { //a merge with this name has already been removed } } }
@Override public String toString() { return "Dataflow " + getLocalName() + "[" + getIdentifier() + "]"; }
for (Datalink dl : getLinks()) { if (dl instanceof DatalinkImpl) { DatalinkImpl dli = (DatalinkImpl) dl; for (DataflowInputPort dip : getInputPorts()) { for (Datalink dl : dip.getInternalOutputPort().getOutgoingLinks()) { if (dl instanceof DatalinkImpl) { getEntities(TokenProcessingEntity.class)); boolean dataflowIsIncomplete = getProcessors().isEmpty() && getOutputPorts().isEmpty();
/** * For each processor input, merge input and workflow output get the * incoming link and, if non null, add to a list and return the entire list. */ public synchronized List<? extends Datalink> getLinks() { List<Datalink> result = new ArrayList<Datalink>(); // All processors have a set of input ports each of which has at most // one incoming data link for (TokenProcessingEntity p : getEntities(TokenProcessingEntity.class)) { for (EventHandlingInputPort pip : p.getInputPorts()) { Datalink dl = pip.getIncomingLink(); if (dl != null) { result.add(dl); } } } // Workflow outputs have zero or one incoming data link to their // internal input port for (DataflowOutputPort dop : getOutputPorts()) { Datalink dl = dop.getInternalInputPort().getIncomingLink(); if (dl != null) { result.add(dl); } } return result; }
for (Datalink dl : getLinks()) { if (dl instanceof DatalinkImpl) { DatalinkImpl dli = (DatalinkImpl) dl; for (DataflowInputPort dip : getInputPorts()) { for (Datalink dl : dip.getInternalOutputPort().getOutgoingLinks()) { if (dl instanceof DatalinkImpl) { getEntities(TokenProcessingEntity.class));
@Ignore("Integration test") @Test public void translateAndValidateTest() throws Exception { DataflowImpl dataflow = (DataflowImpl) translateScuflFile("ModifiedBiomartAndEMBOSSAnalysis.xml"); assertEquals("the name should have been transferred during the translation","BiomartAndEMBOSSAnalysis",dataflow.getLocalName()); DataflowValidationReport report = dataflow.checkValidity(); for (TokenProcessingEntity unsatisfiedProcessor : report.getUnsatisfiedEntities()) { System.out.println(unsatisfiedProcessor.getLocalName()); } assertTrue(report.getUnsatisfiedEntities().size() == 0); for (TokenProcessingEntity failedProcessor : report.getFailedEntities()) { System.out.println(failedProcessor.getLocalName()); } assertTrue(report.getFailedEntities().size() == 0); for (DataflowOutputPort unresolvedOutput : report .getUnresolvedOutputs()) { System.out.println(unresolvedOutput.getName()); } assertTrue(report.getUnresolvedOutputs().size() == 0); }
if (alreadyReceivedPortNames.size() == getOutputPorts().size()) {
@Override protected void undoEditAction(DataflowImpl dataflow) { if (processor instanceof ProcessorImpl) { try { dataflow.addProcessor((ProcessorImpl) processor); } catch (NamingException e) { //a processor with this name has already been removed } } }
@Test public void testUndoEditAction() throws EditException { CreateDataflowInputPortEdit edit = new CreateDataflowInputPortEdit(dataflow, portName, portDepth, portGranularDepth); assertEquals(0, dataflow.getInputPorts().size()); edit.doEditAction(dataflow); edit.undoEditAction(dataflow); assertEquals(0, dataflow.getInputPorts().size()); }
/** * Gets all workflow entities of the specified type and returns as an * unmodifiable list of that type */ public <T extends NamedWorkflowEntity> List<? extends T> getEntities( Class<T> entityType) { List<T> result = new ArrayList<T>(); filterAndAdd(processors, result, entityType); filterAndAdd(merges, result, entityType); return Collections.unmodifiableList(result); }
@Override protected void doEditAction(DataflowImpl dataflow) throws EditException { dataflow.createOutputPort(newPortName); }
@Override protected void doEditAction(DataflowImpl dataflow) throws EditException { dataflow.createInputPort(newPortName, newPortDepth, newPortGranularDepth); }
@Test public void translateAndValidateTest() throws Exception { DataflowImpl dataflow = (DataflowImpl) translateScuflFile("ModifiedBiomartAndEMBOSSAnalysis.xml"); DataflowValidationReport report = dataflow.checkValidity(); for (TokenProcessingEntity unsatisfiedProcessor : report.getUnsatisfiedEntities()) { System.out.println(unsatisfiedProcessor.getLocalName()); } assertTrue(report.getUnsatisfiedEntities().size() == 0); for (TokenProcessingEntity failedProcessor : report.getFailedEntities()) { System.out.println(failedProcessor.getLocalName()); } assertTrue(report.getFailedEntities().size() == 0); for (DataflowOutputPort unresolvedOutput : report .getUnresolvedOutputs()) { System.out.println(unresolvedOutput.getName()); } assertTrue(report.getUnresolvedOutputs().size() == 0); }
/** * For each processor input, merge input and workflow output get the * incoming link and, if non null, add to a list and return the entire list. */ public synchronized List<? extends Datalink> getLinks() { List<Datalink> result = new ArrayList<Datalink>(); // All processors have a set of input ports each of which has at most // one incoming data link for (TokenProcessingEntity p : getEntities(TokenProcessingEntity.class)) { for (EventHandlingInputPort pip : p.getInputPorts()) { Datalink dl = pip.getIncomingLink(); if (dl != null) { result.add(dl); } } } // Workflow outputs have zero or one incoming data link to their // internal input port for (DataflowOutputPort dop : getOutputPorts()) { Datalink dl = dop.getInternalInputPort().getIncomingLink(); if (dl != null) { result.add(dl); } } return result; }
for (Datalink dl : getLinks()) { if (dl instanceof DatalinkImpl) { DatalinkImpl dli = (DatalinkImpl) dl; for (DataflowInputPort dip : getInputPorts()) { for (Datalink dl : dip.getInternalOutputPort().getOutgoingLinks()) { if (dl instanceof DatalinkImpl) { getEntities(TokenProcessingEntity.class));
@Ignore("Integration test") @Test public void translateAndValidateTest() throws Exception { DataflowImpl dataflow = (DataflowImpl) translateScuflFile("ModifiedBiomartAndEMBOSSAnalysis.xml"); assertEquals("the name should have been transferred during the translation","BiomartAndEMBOSSAnalysis",dataflow.getLocalName()); DataflowValidationReport report = dataflow.checkValidity(); for (TokenProcessingEntity unsatisfiedProcessor : report.getUnsatisfiedEntities()) { System.out.println(unsatisfiedProcessor.getLocalName()); } assertTrue(report.getUnsatisfiedEntities().size() == 0); for (TokenProcessingEntity failedProcessor : report.getFailedEntities()) { System.out.println(failedProcessor.getLocalName()); } assertTrue(report.getFailedEntities().size() == 0); for (DataflowOutputPort unresolvedOutput : report .getUnresolvedOutputs()) { System.out.println(unresolvedOutput.getName()); } assertTrue(report.getUnresolvedOutputs().size() == 0); }
@Test public void testUndoEditAction() throws EditException { CreateDataflowOutputPortEdit edit = new CreateDataflowOutputPortEdit(dataflow, portName); assertEquals(0, dataflow.getOutputPorts().size()); edit.doEditAction(dataflow); edit.undoEditAction(dataflow); assertEquals(0, dataflow.getOutputPorts().size()); }
@Override protected void undoEditAction(DataflowImpl dataflow) { if (processor instanceof ProcessorImpl) { try { dataflow.addProcessor((ProcessorImpl) processor); } catch (NamingException e) { //a processor with this name has already been removed } } }