@Override public boolean handleError() { currentActivityIndex++; if (currentActivityIndex == jobEvent.getActivities().size()) { return false; } else { List<Activity<?>> newActivityList = new ArrayList<Activity<?>>(); newActivityList.add(jobEvent.getActivities().get( currentActivityIndex)); getBelow().receiveJob( new DispatchJobEvent(jobEvent.getOwningProcess(), jobEvent.getIndex(), jobEvent.getContext(), jobEvent.getData(), newActivityList)); return true; } } }
@Override public DispatchJobEvent popOwningProcess() throws ProcessIdentifierException { return new DispatchJobEvent(popOwner(), index, context, dataMap, activities); }
@Override public DispatchJobEvent pushOwningProcess(String localProcessName) throws ProcessIdentifierException { return new DispatchJobEvent(pushOwner(localProcessName), index, context, dataMap, activities); }
public void fail(String message, Throwable t, DispatchErrorType errorType) { logger.warn("Failed (" + errorType + ") invoking " + asyncActivity + " for job " + jobEvent + ": " + message, t); MonitorManager.getInstance().deregisterNode( invocationProcessIdentifier); getAbove().receiveError( new DispatchErrorEvent(jobEvent.getOwningProcess(), jobEvent.getIndex(), jobEvent.getContext(), message, t, errorType, asyncActivity)); }
for (Activity<?> activity : jobEvent.getActivities()) { .pushOwningProcess(getNextProcessID()) .getOwningProcess(); MonitorManager.getInstance().registerNode(activity, invocationProcessIdentifier.split(":"), final ReferenceService refService = jobEvent.getContext() .getReferenceService(); for (String inputName : jobEvent.getData().keySet()) { String activityInputName = asyncActivity .getInputPortMapping().get(inputName); if (activityInputName != null) { inputData.put(activityInputName, jobEvent.getData() .get(inputName));
private DispatchJobEvent prepareNewJobEvent( Map<String, T2Reference> data, AbstractDispatchEvent dispatchEvent) { DispatchJobEvent dispatchJobEvent = (DispatchJobEvent) dispatchEvent; Map<String, T2Reference> newInputs = new HashMap<String, T2Reference>( dispatchJobEvent.getData()); newInputs.putAll(data); DispatchJobEvent newJobEvent = new DispatchJobEvent(dispatchEvent .getOwningProcess(), dispatchEvent.getIndex(), dispatchEvent.getContext(), newInputs, ((DispatchJobEvent) dispatchEvent).getActivities()); // TODO: Should this be registered as an incomingJobs? If so the // conditional // could even feed to itself, and we should also keep a list of // originalJobs. return newJobEvent; }
/** * If the job contains errors, or collections which contain errors * themselves then bounce a result message with error documents in back up * to the layer above */ @Override public void receiveJob(DispatchJobEvent jobEvent) { Set<T2Reference> errorReferences = new HashSet<T2Reference>(); for (T2Reference ei : jobEvent.getData().values()) { if (ei.containsErrors()) { errorReferences.add(ei); } } if (errorReferences.isEmpty()) { // relay the message down... getBelow().receiveJob(jobEvent); } else { getState(jobEvent.getOwningProcess()) .incrementErrorsReflected(); sendErrorOutput(jobEvent, null, errorReferences); } }
/** * Remove the specified pending retry job from the cache * * @param owningProcess * @param index */ private void forget(String owningProcess, int[] index) { List<JobState> activeJobs = stateMap.get(owningProcess); for (JobState rs : new ArrayList<JobState>(activeJobs)) { if (identicalIndex(rs.jobEvent.getIndex(), index)) { activeJobs.remove(rs); } } }
protected void addJobToStateList(DispatchJobEvent jobEvent) { List<JobState> stateList = null; stateList = getJobsDefault(jobEvent.getOwningProcess()); synchronized (stateList) { stateList.add(getStateObject(jobEvent)); } }
getBelow() .receiveJob( new DispatchJobEvent(e .getOwningProcess(), e .getIndex(), e.getContext(),
public InvocationContext getContext() { return jobEvent.getContext(); }
private Map<String, T2Reference> getInData(String jobIdentifier) { AbstractDispatchEvent inEvent; synchronized (incomingJobs) { inEvent = incomingJobs.get(jobIdentifier); } Map<String, T2Reference> inData = new HashMap<String, T2Reference>(); if (inEvent instanceof DispatchJobEvent) { inData = ((DispatchJobEvent) inEvent).getData(); } return inData; }
public void fail(String message, Throwable t, DispatchErrorType errorType) { MonitorManager.getInstance().deregisterNode( invocationProcessIdentifier); getAbove().receiveError( new DispatchErrorEvent(jobEvent .getOwningProcess(), jobEvent .getIndex(), jobEvent.getContext(), message, t, errorType, as)); }
public void receiveJob(final DispatchJobEvent jobEvent) { for (Activity<?> a : jobEvent.getActivities()) { .pushOwningProcess(getNextProcessID()) .getOwningProcess(); MonitorManager.getInstance().registerNode(a, invocationProcessIdentifier.split(":"), final DataManager dManager = jobEvent.getContext() .getDataManager(); for (String inputName : jobEvent.getData().keySet()) { String activityInputName = as.getInputPortMapping().get( inputName); if (activityInputName != null) { inputData.put(activityInputName, jobEvent.getData() .get(inputName));
private DispatchJobEvent prepareNewJobEvent( Map<String, T2Reference> data, AbstractDispatchEvent dispatchEvent) { DispatchJobEvent dispatchJobEvent = (DispatchJobEvent) dispatchEvent; Map<String, T2Reference> newInputs = new HashMap<String, T2Reference>( dispatchJobEvent.getData()); newInputs.putAll(data); DispatchJobEvent newJobEvent = new DispatchJobEvent(dispatchEvent .getOwningProcess(), dispatchEvent.getIndex(), dispatchEvent.getContext(), newInputs, ((DispatchJobEvent) dispatchEvent).getActivities()); // TODO: Should this be registered as an incomingJobs? If so the // conditional // could even feed to itself, and we should also keep a list of // originalJobs. return newJobEvent; }
/** * If the job contains errors, or collections which contain errors * themselves then bounce a result message with error documents in back up * to the layer above */ @Override public void receiveJob(DispatchJobEvent jobEvent) { Set<T2Reference> errorReferences = new HashSet<T2Reference>(); for (T2Reference ei : jobEvent.getData().values()) { if (ei.containsErrors()) { errorReferences.add(ei); } } if (errorReferences.isEmpty()) { // relay the message down... getBelow().receiveJob(jobEvent); } else { getState(jobEvent.getOwningProcess()) .incrementErrorsReflected(); sendErrorOutput(jobEvent, null, errorReferences); } }
/** * Remove the specified pending retry job from the cache * * @param owningProcess * Owning process identifier as returned by * {@link DispatchJobEvent#getOwningProcess()} * @param index * Index of the job as returned by * {@link DispatchJobEvent#getIndex()} */ protected void forget(String owningProcess, int[] index) { for (JobState jobState : getJobsCopy(owningProcess)) { if (identicalIndex(jobState.jobEvent.getIndex(), index)) { removeJob(owningProcess, jobState); return; } } // It could be due to pipelining activities like BioMart logger.debug("Could not forget " + owningProcess + " " + Arrays.toString(index)); }
/** * Receive a job from the layer above, store it for later retries and pass * it down to the next layer */ @SuppressWarnings("unchecked") @Override public void receiveJob(DispatchJobEvent jobEvent) { List<JobState> stateList = null; synchronized (stateMap) { stateList = stateMap.get(jobEvent.getOwningProcess()); if (stateList == null) { stateList = new ArrayList<JobState>(); stateMap.put(jobEvent.getOwningProcess(), stateList); } } stateList.add(getStateObject(jobEvent)); getBelow().receiveJob(jobEvent); }
getBelow() .receiveJob( new DispatchJobEvent(e .getOwningProcess(), e .getIndex(), e.getContext(),
public InvocationContext getContext() { return jobEvent.getContext(); }