@Test public void testFork() throws IOException { InputStream stream = TestDeciderService.class.getResourceAsStream("/test.json"); Workflow workflow = objectMapper.readValue(stream, Workflow.class); DeciderOutcome outcome = deciderService.decide(workflow); assertFalse(outcome.isComplete); assertEquals(5, outcome.tasksToBeScheduled.size()); assertEquals(1, outcome.tasksToBeUpdated.size()); }
public List<Task> getTasksToBeScheduled(Workflow workflow, WorkflowTask taskToSchedule, int retryCount) { return getTasksToBeScheduled(workflow, taskToSchedule, retryCount, null); }
checkForTimeout(taskDefinition.get(), pendingTask); if (isResponseTimedOut(taskDefinition.get(), pendingTask)) { timeoutTask(taskDefinition.get(), pendingTask); pendingTask.setStatus(COMPLETED_WITH_ERRORS); } else { Task retryTask = retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow); tasksToBeScheduled.put(retryTask.getReferenceTaskName(), retryTask); executedTaskRefNames.remove(retryTask.getReferenceTaskName()); List<Task> nextTasks = getNextTask(workflow, pendingTask); nextTasks.forEach(nextTask -> tasksToBeScheduled.putIfAbsent(nextTask.getReferenceTaskName(), nextTask)); outcome.tasksToBeUpdated.add(pendingTask); outcome.tasksToBeScheduled.addAll(unScheduledTasks); if (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow)) { LOGGER.debug("Marking workflow as complete. workflow=" + workflow.getWorkflowId() + ", tasks=" + workflow.getTasks()); outcome.isComplete = true;
public DeciderOutcome decide(Workflow workflow) throws TerminateWorkflowException { //In case of a new workflow the list of tasks will be empty final List<Task> tasks = workflow.getTasks(); //In case of a new workflow the list of executedTasks will also be empty List<Task> executedTasks = tasks.stream() .filter(t -> !t.getStatus().equals(SKIPPED) && !t.getStatus().equals(READY_FOR_RERUN) && !t.isExecuted()) .collect(Collectors.toList()); List<Task> tasksToBeScheduled = new LinkedList<>(); if (executedTasks.isEmpty()) { //this is the flow that the new workflow will go through tasksToBeScheduled = startWorkflow(workflow); if (tasksToBeScheduled == null) { tasksToBeScheduled = new LinkedList<>(); } } return decide(workflow, tasksToBeScheduled); }
@VisibleForTesting List<Task> getNextTask(Workflow workflow, Task task) { final WorkflowDef workflowDef = workflow.getWorkflowDefinition(); // Get the following task after the last completed task if (SystemTaskType.is(task.getTaskType()) && SystemTaskType.DECISION.name().equals(task.getTaskType())) { if (task.getInputData().get("hasChildren") != null) { return Collections.emptyList(); } } String taskReferenceName = task.getReferenceTaskName(); WorkflowTask taskToSchedule = workflowDef.getNextTask(taskReferenceName); while (isTaskSkipped(taskToSchedule, workflow)) { taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName()); } if (taskToSchedule != null) { return getTasksToBeScheduled(workflow, taskToSchedule, 0); } return Collections.emptyList(); }
updateWorkflowOutput(workflow, task); throw new TerminateWorkflowException(task.getReasonForIncompletion(), status, task); Workflow workflowInstance = populateWorkflowAndTaskData(workflow); Map<String, Object> taskInput = parametersUtils.getTaskInputV2(workflowTask.getInputParameters(), workflowInstance, rescheduled.getTaskId(), taskDefinition); rescheduled.getInputData().putAll(taskInput);
new DeciderService(parametersUtils, queueDAO, metadataDAO, externalPayloadStorageUtils, taskMappers).decide(workflow);
@SuppressWarnings("unchecked") @Test public void testUpdateWorkflowOutput() { Workflow workflow = new Workflow(); workflow.setWorkflowDefinition(new WorkflowDef()); deciderService.updateWorkflowOutput(workflow, null); assertNotNull(workflow.getOutput()); assertTrue(workflow.getOutput().isEmpty()); Task task = new Task(); Map<String, Object> taskOutput = new HashMap<>(); taskOutput.put("taskKey", "taskValue"); task.setOutputData(taskOutput); workflow.getTasks().add(task); WorkflowDef workflowDef = new WorkflowDef(); when(metadataDAO.get(anyString(), anyInt())).thenReturn(Optional.of(workflowDef)); deciderService.updateWorkflowOutput(workflow, null); assertNotNull(workflow.getOutput()); assertEquals("taskValue", workflow.getOutput().get("taskKey")); }
/** * Updates the workflow output. * * @param workflow the workflow instance * @param task if not null, the output of this task will be copied to workflow output if no output parameters are specified in the workflow defintion * if null, the output of the last task in the workflow will be copied to workflow output of no output parameters are specified in the workflow definition */ void updateWorkflowOutput(final Workflow workflow, @Nullable Task task) { List<Task> allTasks = workflow.getTasks(); if (allTasks.isEmpty()) { return; } Task last = Optional.ofNullable(task).orElse(allTasks.get(allTasks.size() - 1)); WorkflowDef workflowDef = workflow.getWorkflowDefinition(); Map<String, Object> output; if (workflowDef.getOutputParameters() != null && !workflowDef.getOutputParameters().isEmpty()) { Workflow workflowInstance = populateWorkflowAndTaskData(workflow); output = parametersUtils.getTaskInput(workflowDef.getOutputParameters(), workflowInstance, null, null); } else if (StringUtils.isNotBlank(last.getExternalOutputPayloadStoragePath())) { output = externalPayloadStorageUtils.downloadPayload(last.getExternalOutputPayloadStoragePath()); Monitors.recordExternalPayloadStorageUsage(last.getTaskDefName(), ExternalPayloadStorage.Operation.READ.toString(), ExternalPayloadStorage.PayloadType.TASK_OUTPUT.toString()); } else { output = last.getOutputData(); } workflow.setOutput(output); externalPayloadStorageUtils.verifyAndUpload(workflow, ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT); }
workflowTask.getInputParameters().put("env", env); Task task2 = deciderService.retry(taskDef, workflowTask, task, workflow); System.out.println(task.getTaskId() + ":\n" + task.getInputData()); System.out.println(task2.getTaskId() + ":\n" + task2.getInputData()); when(metadataDAO.get(anyString(), anyInt())).thenReturn(Optional.of(new WorkflowDef())); exception.expect(TerminateWorkflowException.class); deciderService.retry(taskDef, workflowTask, task3, workflow);
@Test public void testIsResponsedTimeOut() { TaskDef taskDef = new TaskDef(); taskDef.setName("test_rt"); taskDef.setResponseTimeoutSeconds(10); Task task = new Task(); task.setTaskDefName("test_rt"); task.setStatus(Status.IN_PROGRESS); task.setTaskId("aa"); task.setUpdateTime(System.currentTimeMillis() - TimeUnit.SECONDS.toMillis(11)); boolean flag = deciderService.isResponseTimedOut(taskDef, task); assertNotNull(task); assertTrue(flag); }
@Before public void setup() { metadataDAO = mock(MetadataDAO.class); externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class); QueueDAO queueDAO = mock(QueueDAO.class); MetadataDAO metadataDAO = mock(MetadataDAO.class); TaskDef taskDef = new TaskDef(); WorkflowDef workflowDef = new WorkflowDef(); workflowDef.setName("TestDeciderService"); workflowDef.setVersion(1); when(metadataDAO.getTaskDef(any())).thenReturn(taskDef); when(metadataDAO.getLatest(any())).thenReturn(Optional.of(workflowDef)); parametersUtils = new ParametersUtils(); Map<String, TaskMapper> taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO)); taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); taskMappers.put("HTTP", new HTTPTaskMapper(parametersUtils, metadataDAO)); deciderService = new DeciderService(parametersUtils, queueDAO, metadataDAO, externalPayloadStorageUtils, taskMappers); }
while (isTaskSkipped(taskToSchedule, workflow)) { taskToSchedule = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName()); return getTasksToBeScheduled(workflow, taskToSchedule, 0);
public DeciderOutcome decide(Workflow workflow) throws TerminateWorkflowException { //In case of a new workflow the list of tasks will be empty final List<Task> tasks = workflow.getTasks(); //In case of a new workflow the list of executedTasks will also be empty List<Task> executedTasks = tasks.stream() .filter(t -> !t.getStatus().equals(SKIPPED) && !t.getStatus().equals(READY_FOR_RERUN) && !t.isExecuted()) .collect(Collectors.toList()); List<Task> tasksToBeScheduled = new LinkedList<>(); if (executedTasks.isEmpty()) { //this is the flow that the new workflow will go through tasksToBeScheduled = startWorkflow(workflow); if (tasksToBeScheduled == null) { tasksToBeScheduled = new LinkedList<>(); } } return decide(workflow, tasksToBeScheduled); }
updateWorkflowOutput(workflow, task); throw new TerminateWorkflowException(task.getReasonForIncompletion(), status, task); Workflow workflowInstance = populateWorkflowAndTaskData(workflow); Map<String, Object> taskInput = parametersUtils.getTaskInputV2(workflowTask.getInputParameters(), workflowInstance, rescheduled.getTaskId(), taskDefinition); rescheduled.getInputData().putAll(taskInput);
workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow); deciderService.updateWorkflowOutput(workflow, null);
public List<Task> getTasksToBeScheduled(Workflow workflow, WorkflowTask taskToSchedule, int retryCount, String retriedTaskId) { workflow = populateWorkflowAndTaskData(workflow); Map<String, Object> input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflow, null, null);
@Before public void init() { TestConfiguration config = new TestConfiguration(); executionDAOFacade = mock(ExecutionDAOFacade.class); metadataDAO = mock(MetadataDAO.class); queueDAO = mock(QueueDAO.class); workflowStatusListener = mock(WorkflowStatusListener.class); ExternalPayloadStorageUtils externalPayloadStorageUtils = mock(ExternalPayloadStorageUtils.class); ObjectMapper objectMapper = new ObjectMapper(); ParametersUtils parametersUtils = new ParametersUtils(); Map<String, TaskMapper> taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO)); taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); taskMappers.put("HTTP", new HTTPTaskMapper(parametersUtils, metadataDAO)); deciderService = new DeciderService(parametersUtils, queueDAO, metadataDAO, externalPayloadStorageUtils, taskMappers); MetadataMapperService metadataMapperService = new MetadataMapperService(metadataDAO); workflowExecutor = new WorkflowExecutor(deciderService, metadataDAO, queueDAO, metadataMapperService, workflowStatusListener, executionDAOFacade, config); }
checkForTimeout(taskDefinition.get(), pendingTask); if (isResponseTimedOut(taskDefinition.get(), pendingTask)) { timeoutTask(taskDefinition.get(), pendingTask); pendingTask.setStatus(COMPLETED_WITH_ERRORS); } else { Task retryTask = retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow); tasksToBeScheduled.put(retryTask.getReferenceTaskName(), retryTask); executedTaskRefNames.remove(retryTask.getReferenceTaskName()); List<Task> nextTasks = getNextTask(workflow, pendingTask); nextTasks.forEach(nextTask -> tasksToBeScheduled.putIfAbsent(nextTask.getReferenceTaskName(), nextTask)); outcome.tasksToBeUpdated.add(pendingTask); outcome.tasksToBeScheduled.addAll(unScheduledTasks); if (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow)) { LOGGER.debug("Marking workflow as complete. workflow=" + workflow.getWorkflowId() + ", tasks=" + workflow.getTasks()); outcome.isComplete = true;
@Test public void testWorkflowWithNoTasks() throws Exception { InputStream stream = TestDeciderOutcomes.class.getResourceAsStream("/conditional_flow.json"); WorkflowDef def = objectMapper.readValue(stream, WorkflowDef.class); assertNotNull(def); Workflow workflow = new Workflow(); workflow.setWorkflowDefinition(def); workflow.setStartTime(0); workflow.getInput().put("param1", "nested"); workflow.getInput().put("param2", "one"); DeciderOutcome outcome = deciderService.decide(workflow); assertNotNull(outcome); assertFalse(outcome.isComplete); assertTrue(outcome.tasksToBeUpdated.isEmpty()); assertEquals(3, outcome.tasksToBeScheduled.size()); System.out.println(outcome.tasksToBeScheduled); outcome.tasksToBeScheduled.forEach(t -> t.setStatus(Status.COMPLETED)); workflow.getTasks().addAll(outcome.tasksToBeScheduled); outcome = deciderService.decide(workflow); assertFalse(outcome.isComplete); assertEquals(outcome.tasksToBeUpdated.toString(), 3, outcome.tasksToBeUpdated.size()); assertEquals(1, outcome.tasksToBeScheduled.size()); assertEquals("junit_task_3", outcome.tasksToBeScheduled.get(0).getTaskDefName()); System.out.println(outcome.tasksToBeScheduled); }