public JobData jobDataCreator(HashMap<String, Object> payload,UUID exportUUID, S3Export s3Export) { JobData jobData = new JobData(); jobData.setProperty( "jobName", "exportJob" ); jobData.setProperty( "exportInfo", payload ); jobData.setProperty( "exportId", exportUUID ); jobData.setProperty( "s3Export", s3Export ); return jobData; }
logger.info( "Job {} is complete id: {}", data.getJobName(), bulkJobExecution.getTransactionId() ); getQm().deleteTransaction( jobQueueName, bulkJobExecution.getTransactionId(), null ); if (logger.isDebugEnabled()) { logger.debug("delete job data {}", data.getUuid()); logger.warn( "Job {} is dead. Removing", data.getJobName() ); getQm().deleteTransaction( jobQueueName, bulkJobExecution.getTransactionId(), null ); getEm().update( data ); logger.debug( "Updating stats for job {}", data.getJobName() ); throw new JobRuntimeException( String.format( "Unable to delete job data with id %s", data.getUuid() ), e );
JobData jobData = new JobData(); jobData.setProperty("File", file); jobData.setProperty(FILE_IMPORT_ID, fileImport.getUuid()); jobData.addProperties(config);
@Override public JobData createJob( String jobName, long fireTime, JobData jobData ) { Assert.notNull( jobName, "jobName is required" ); Assert.notNull( jobData, "jobData is required" ); try { jobData.setJobName( jobName ); JobData job = getEm().create( jobData ); JobStat stat = getEm().create( new JobStat( jobName, job.getUuid() ) ); scheduleJob( jobName, fireTime, job.getUuid(), stat.getUuid() ); return job; } catch ( Exception e ) { throw new JobRuntimeException( e ); } }
JobData returned = scheduler.createJob( "delayHeartbeat", System.currentTimeMillis(), new JobData() ); JobStat stat = scheduler.getStatsForJob( returned.getJobName(), returned.getUuid() );
@Test public void testExportEmptyJobData() throws Exception { JobData jobData = new JobData(); JobExecution jobExecution = mock( JobExecution.class ); when( jobExecution.getJobData() ).thenReturn( jobData ); when( jobExecution.getJobId() ).thenReturn( UUID.randomUUID() ); ExportJob job = new ExportJob(); S3Export s3Export = mock( S3Export.class ); //setup.getExportService().setS3Export( s3Export ); job.setExportService( setup.getExportService() ); try { job.doJob( jobExecution ); } catch ( Exception e ) { assert ( false ); } assert ( true ); }
JobData test = new JobData(); test.setProperty( "stringprop", "test" ); test.setProperty( "notificationId", notificationId ); assertEquals( saved.getUuid(), r.getEntity().getUuid() ); assertEquals( saved.getUuid(), r.getEntity().getUuid() ); scheduler.deleteJob( saved.getUuid() );
Map<String, Object> config = (Map<String, Object>) jobExecution.getJobData().getProperty("importInfo"); if (config == null) { logger.error("doImport(): Import Information passed through is null"); UUID importId = (UUID) jobExecution.getJobData().getProperty(IMPORT_ID); Import importEntity = rootEM.get(importId, Import.class); Object s3PlaceHolder = jobExecution.getJobData().getProperty("s3Import"); try { if (s3PlaceHolder != null) { fileJobID.put("FileName", scheduled.getProperty("File")); fileJobID.put("JobID", scheduled.getUuid()); value.add(fileJobID);
@Override public void delayRetry( JobExecution execution, long delay ) { JobData data = execution.getJobData(); JobStat stat = execution.getJobStats(); try { // if it's a dead status, it's failed too many times, just kill the job if ( execution.getStatus() == Status.DEAD ) { getQm().deleteTransaction( jobQueueName, execution.getTransactionId(), null ); getEm().update( data ); getEm().update( stat ); return; } // re-schedule the job to run again in the future scheduleJob( execution.getJobName(), System.currentTimeMillis() + delay, data.getUuid(), stat.getUuid() ); // delete the pending transaction getQm().deleteTransaction( jobQueueName, execution.getTransactionId(), null ); // update the data for the next run getEm().update( data ); getEm().update( stat ); } catch ( Exception e ) { // should never happen throw new JobRuntimeException( String.format( "Unable to delete job data with id %s", data.getUuid() ), e ); } }
UUID applicationId = ( UUID ) jobData.getProperty( "applicationId" ); ServiceManager sm = smf.getServiceManager( applicationId ); NotificationsService notificationsService = ( NotificationsService ) sm.getService( "notifications" ); return; UUID notificationId = ( UUID ) jobData.getProperty( "notificationId" ); Notification notification = em.get( notificationId, Notification.class ); if ( notification == null ) {
JobData returned = scheduler.createJob( "delayExecution", System.currentTimeMillis(), new JobData() ); JobStat stat = scheduler.getStatsForJob( returned.getJobName(), returned.getUuid() );
@Test public void testNullJobExecution() { JobData jobData = new JobData(); JobExecution jobExecution = mock( JobExecution.class ); when( jobExecution.getJobData() ).thenReturn( jobData ); when( jobExecution.getJobId() ).thenReturn( UUID.randomUUID() ); ExportJob job = new ExportJob(); S3Export s3Export = mock( S3Export.class ); // setup.getExportService().setS3Export( s3Export ); job.setExportService( setup.getExportService() ); try { job.doJob( jobExecution ); } catch ( Exception e ) { assert ( false ); } assert ( true ); }
public Export getExportEntity( final JobExecution jobExecution ) throws Exception { UUID exportId = ( UUID ) jobExecution.getJobData().getProperty( EXPORT_ID ); EntityManager exportManager = emf.getEntityManager( emf.getManagementAppId() ); return exportManager.get( exportId, Export.class ); }
scheduler.createJob( "onlyOnceUnlockOnFailExceution", System.currentTimeMillis(), new JobData() ); assertTrue( "No Job succeeded", getJobListener().getSuccessCount() == 1 ); JobStat stat = scheduler.getStatsForJob( returned.getJobName(), returned.getUuid() );
public JobData jobImportDataCreator(HashMap<String, Object> payload,UUID importUUID,S3Import s3Import) { JobData jobData = new JobData(); jobData.setProperty( "jobName", "importJob" ); jobData.setProperty( "importInfo", payload ); jobData.setProperty( "importId", importUUID ); jobData.setProperty( "s3Import", s3Import ); return jobData; }
@Test public void transitionsOk() { JobData data = new JobData(); JobStat stat = new JobStat(); JobDescriptor jobDescriptor = new JobDescriptor( "", UUID.randomUUID(), UUID.randomUUID(), data, stat, null ); JobExecution bje = new JobExecutionImpl( jobDescriptor ); assertEquals( JobExecution.Status.NOT_STARTED, bje.getStatus() ); bje.start( 1 ); assertEquals( JobExecution.Status.IN_PROGRESS, bje.getStatus() ); bje.completed(); assertEquals( JobExecution.Status.COMPLETED, bje.getStatus() ); }
/** * Returns the Import Entity that stores all meta-data for the particular import Job * * @param jobExecution the import job details * @return Import Entity */ @Override public Import getImportEntity(final JobExecution jobExecution) throws Exception { UUID importId = (UUID) jobExecution.getJobData().getProperty(IMPORT_ID); EntityManager importManager = emf.getEntityManager(emf.getManagementAppId()); return importManager.get(importId, Import.class); }
"failureJobExceuction", System.currentTimeMillis(), new JobData() ); assertTrue( 1 + " success resulted", getJobListener().getSuccessCount() == 1 ); JobStat stat = scheduler.getStatsForJob( returned.getJobName(), returned.getUuid() );
public JobData jobExportDataCreator(HashMap<String, Object> payload,UUID exportUUID,S3Export s3Export) { JobData jobData = new JobData(); jobData.setProperty( "jobName", "exportJob" ); jobData.setProperty( "exportInfo", payload ); jobData.setProperty( "exportId", exportUUID ); jobData.setProperty( "s3Export", s3Export); return jobData; }
@Test public void transitionsDead() { JobData data = new JobData(); JobStat stat = new JobStat(); JobDescriptor jobDescriptor = new JobDescriptor( "", UUID.randomUUID(), UUID.randomUUID(), data, stat, null ); JobExecution bje = new JobExecutionImpl( jobDescriptor ); assertEquals( JobExecution.Status.NOT_STARTED, bje.getStatus() ); bje.start( 1 ); assertEquals( JobExecution.Status.IN_PROGRESS, bje.getStatus() ); bje.killed(); assertEquals( JobExecution.Status.DEAD, bje.getStatus() ); }