@Override public Collection<StepExecution> call() throws Exception { for(Iterator<StepExecution> stepExecutionIterator = split.iterator(); stepExecutionIterator.hasNext(); ) { StepExecution curStepExecution = stepExecutionIterator.next(); if(!result.contains(curStepExecution)) { StepExecution partitionStepExecution = jobExplorer.getStepExecution(masterStepExecution.getJobExecutionId(), curStepExecution.getId()); if(!partitionStepExecution.getStatus().isRunning()) { result.add(partitionStepExecution); } } } if(logger.isDebugEnabled()) { logger.debug(String.format("Currently waiting on %s partitions to finish", split.size())); } if(result.size() == split.size()) { return result; } else { return null; } } };
if (stepExecution.getStatus().isRunning()) { try {
if (stepExecution.getStatus().isRunning()) { try {
if (status.isRunning() || status == BatchStatus.STOPPING) { throw new JobExecutionAlreadyRunningException("A job execution for this job is already running: " + lastExecution);
@Override public void startJob(String jobName, JobParameters jobParameters) { JobExecution jobExecution = getLatestJobExecution(jobName); if (jobExecution != null && jobExecution.getStatus().isRunning()) { LOG.info("the job {} is already running", jobName); return; } JobParametersBuilder builder = new JobParametersBuilder(jobParameters); Date executionTimestamp = Calendar.getInstance().getTime(); builder.addDate(EXECUTION_TIMESTAMP, executionTimestamp); try { jobLauncher.run(jobRegistry.getJob(jobName), builder.toJobParameters()); } catch (NoSuchJobException e) { LOG.error("the job {} does not exist: {}", jobName, e, ""); LOG.debug("the job {} does not exist", jobName, e); } catch (JobParametersInvalidException e) { LOG.error("the job {} does have invalid parameters: {}", jobName, e, ""); LOG.debug("the job {} does have invalid parameters", jobName, e); } catch (JobExecutionAlreadyRunningException e) { LOG.error("the job {} is already running: {}", jobName, e, ""); LOG.debug("the job {} is already running", jobName, e); } catch (JobRestartException e) { LOG.error("the job {} could not be restarted: {}", jobName, e, ""); LOG.debug("the job {} could not be restarted", jobName, e); } catch (JobInstanceAlreadyCompleteException e) { LOG.error("the job {} could not restart an already successful instance: {}", jobName, e, ""); LOG.debug("the job {} could not restart an already successful instance", jobName, e); } }
@Router public String route(JobExecution jobExecution) { while (jobExecution.getStatus().isRunning()) { LOGGER.debug("Still running job {} execution end ...", jobExecution.getJobId()); try { Thread.sleep(pollingInterval); } catch (InterruptedException e) { LOGGER.warn("Router has been interrupted before job execution ending", e); return errorChannel; } } LOGGER.debug("Job {} exit status: {}", jobExecution.getJobId(), jobExecution.getExitStatus()); if (jobExecution.getExitStatus().equals(ExitStatus.COMPLETED)) { return successChannel; } return errorChannel; }
private void monitorJobExecutions() { RepeatTemplate template = new RepeatTemplate(); Date startDate = new Date(); template.iterate(context -> { List<JobExecution> failedJobExecutions = new ArrayList<>(); RepeatStatus repeatStatus = RepeatStatus.FINISHED; for (JobExecution jobExecution : jobExecutionList) { JobExecution currentJobExecution = taskJobExplorer.getJobExecution(jobExecution.getId()); BatchStatus batchStatus = currentJobExecution.getStatus(); if (batchStatus.isRunning()) { repeatStatus = RepeatStatus.CONTINUABLE; } if (batchStatus.equals(BatchStatus.FAILED)) { failedJobExecutions.add(jobExecution); } } Thread.sleep(taskBatchProperties.getFailOnJobFailurePollInterval()); if (repeatStatus.equals(RepeatStatus.FINISHED) && failedJobExecutions.size() > 0) { throwJobFailedException(failedJobExecutions); } return repeatStatus; }); }
private void monitorJobExecutions() { RepeatTemplate template = new RepeatTemplate(); Date startDate = new Date(); template.iterate(context -> { List<JobExecution> failedJobExecutions = new ArrayList<>(); RepeatStatus repeatStatus = RepeatStatus.FINISHED; for (JobExecution jobExecution : jobExecutionList) { JobExecution currentJobExecution = taskJobExplorer.getJobExecution(jobExecution.getId()); BatchStatus batchStatus = currentJobExecution.getStatus(); if (batchStatus.isRunning()) { repeatStatus = RepeatStatus.CONTINUABLE; } if (batchStatus.equals(BatchStatus.FAILED)) { failedJobExecutions.add(jobExecution); } } Thread.sleep(taskBatchProperties.getFailOnJobFailurePollInterval()); if (repeatStatus.equals(RepeatStatus.FINISHED) && failedJobExecutions.size() > 0) { throwJobFailedException(failedJobExecutions); } return repeatStatus; }); }
while(batchStatus.isRunning()){ log.info("*********** Still running.... **************"); Thread.sleep(1000);
public JobExecution launch(Job job, JobParameters jobParameters) throws JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException, JobParametersInvalidException, Exception { //JobParameters jobParameters = new JobParametersBuilder() //.addString(ConfigSparqlExportJob.JOBPARAM_SERVICE_URI, workflowDesc, true) //.toJobParameters(); JobExecution result = config.jobRepository().getLastJobExecution(job.getName(), jobParameters); // If there was a prior job, return its execution context BatchStatus status = result == null ? null : result.getStatus(); if(status == null || !(status.isRunning() || status.equals(BatchStatus.COMPLETED))) { result = config.jobLauncher().run(job, jobParameters); } return result; }
if (stepExecution.getStatus().isRunning()) { stepExecution.setEndTime(new Date()); stepExecution.setStatus(BatchStatus.FAILED);
if(status.isRunning() || status.equals(BatchStatus.COMPLETED)) { return result;
@Override public Set<JobExecution> findRunningJobExecutions(String jobName) { StructuredQueryBuilder qb = new StructuredQueryBuilder(SEARCH_OPTIONS_NAME); StructuredQueryDefinition querydef = qb.and(qb.valueConstraint("jobName", jobName)); logger.info(querydef.serialize()); QueryManager queryMgr = databaseClient.newQueryManager(); SearchHandle results = queryMgr.search(querydef, new SearchHandle()); Set<JobExecution> jobExecutions = new HashSet<>(); for ( MatchDocumentSummary summary : results.getMatchResults() ) { JAXBHandle<MarkLogicJobInstance> handle = new JAXBHandle<>(jaxbContext()); summary.getFirstSnippet(handle); MarkLogicJobInstance mji = handle.get(); for (JobExecution je : mji.getJobExecutions()) { if (je.getStatus().isRunning() && je.getEndTime() == null) { jobExecutions.add(je); } } } return jobExecutions; }
public JobExecution launchJob(String workflow) throws Exception { Map<String, Object> data = gson.fromJson(workflow, Map.class); //String normalized = gson.toJson(data); JobRepository jobRepository = config.jobRepository(); JobLauncher jobLauncher = config.jobLauncher(); Job job = null; JobParameters jobParameters = new JobParametersBuilder() .addString(JobLauncherWorkflow.JOB_CONTEXT, workflow, true) .toJobParameters(); JobExecution result = jobRepository.getLastJobExecution(job.getName(), jobParameters); // If there was a prior job, return its execution context BatchStatus status = result == null ? null : result.getStatus(); if(status != null) { if(status.isRunning() || status.equals(BatchStatus.COMPLETED)) { return result; } } result = jobLauncher.run(job, jobParameters); return result; } /*
if (stepExecution.getStatus().isRunning()) { try {
if (stepExecution.getStatus().isRunning()) { try {
if (stepExecution.getStatus().isRunning()) { try {
if (stepExecution.getStatus().isRunning()) { try {
@Override public Job convert(JobExecution source, Job job) { Job target = job != null ? job : new Job(source.getJobConfigurationName()); target.startTime(source.getStartTime()) .endTime(source.getEndTime()) .status(source.getStatus().toString()) .running(source.getStatus().isRunning()) .exitStatus(source.getExitStatus().getExitDescription()) .jobParameters(getJobParameters(source.getJobParameters())) .steps(new StepExecutionToSimpleStepConverter().convertAll(new ArrayList<>(source.getStepExecutions()))); return target; }