Refine search
public void triggerFired(Trigger trigger, JobExecutionContext context) { // Call the scheduleJobInterruptMonitor and capture the ScheduledFuture in context try { // Schedule Monitor only if the job wants AutoInterruptable functionality if (context.getJobDetail().getJobDataMap().getBoolean(AUTO_INTERRUPTIBLE)) { JobInterruptMonitorPlugin monitorPlugin = (JobInterruptMonitorPlugin) context.getScheduler() .getContext().get(JOB_INTERRUPT_MONITOR_KEY); // Get the MaxRuntime from Job Data if NOT available use DEFAULT_MAX_RUNTIME from Plugin Configuration long jobDataDelay = DEFAULT_MAX_RUNTIME; if (context.getJobDetail().getJobDataMap().get(MAX_RUN_TIME) != null){ jobDataDelay = context.getJobDetail().getJobDataMap().getLong(MAX_RUN_TIME); } future = monitorPlugin.scheduleJobInterruptMonitor(context.getJobDetail().getKey(), jobDataDelay); getLog().debug("Job's Interrupt Monitor has been scheduled to interrupt with the delay :" + DEFAULT_MAX_RUNTIME); } } catch (SchedulerException e) { getLog().info("Error scheduling interrupt monitor " + e.getMessage(), e); } }
if(startDelay <= 0) { scheduler.start(); log.info("Scheduler has been started..."); scheduler.startDelayed(startDelay); log.info("Scheduler will start in " + startDelay + " seconds."); log.info("Storing the ServletContext in the scheduler context at key: " + servletCtxtKey); scheduler.getContext().put(servletCtxtKey, servletContext);
/** * Expose the specified context attributes and/or the current * ApplicationContext in the Quartz SchedulerContext. */ private void populateSchedulerContext(Scheduler scheduler) throws SchedulerException { // Put specified objects into Scheduler context. if (this.schedulerContextMap != null) { scheduler.getContext().putAll(this.schedulerContextMap); } // Register ApplicationContext in Scheduler context. if (this.applicationContextSchedulerContextKey != null) { if (this.applicationContext == null) { throw new IllegalStateException( "SchedulerFactoryBean needs to be set up in an ApplicationContext " + "to be able to handle an 'applicationContextSchedulerContextKey'"); } scheduler.getContext().put(this.applicationContextSchedulerContextKey, this.applicationContext); } }
SchedulerContext schedulerContext = new SchedulerContext(); given(scheduler.getContext()).willReturn(schedulerContext); schedulerFactoryBean.start(); Scheduler returnedScheduler = schedulerFactoryBean.getObject(); assertEquals(tb, returnedScheduler.getContext().get("testBean")); assertEquals(ac, returnedScheduler.getContext().get("appCtx"));
scheduler.deleteJob(JobKey.jobKey(key, key)); JiveGlobals.deleteProperty("ofmeet.cron." + bookmark.getBookmarkID()); JobDetail jobDetail = newJob(MeetingPlanner.class).withIdentity(key, key).build(); CronTrigger conTrigger = newTrigger().withIdentity(key, key).withSchedule(cronSchedule(newTrigger)).build(); scheduler.getContext().put(jobDetail.getKey().toString(), bookmark); scheduler.scheduleJob(jobDetail, conTrigger);
public void execute(JobExecutionContext context) throws JobExecutionException { String camelContextName = (String) context.getJobDetail().getJobDataMap().get(QuartzConstants.QUARTZ_CAMEL_CONTEXT_NAME); String endpointUri = (String) context.getJobDetail().getJobDataMap().get(QuartzConstants.QUARTZ_ENDPOINT_URI); SchedulerContext schedulerContext; try { schedulerContext = context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Failed to obtain scheduler context for job " + context.getJobDetail().getName()); } CamelContext camelContext = (CamelContext) schedulerContext.get(QuartzConstants.QUARTZ_CAMEL_CONTEXT + "-" + camelContextName); if (camelContext == null) { throw new JobExecutionException("No CamelContext could be found with name: " + camelContextName); } Trigger trigger = context.getTrigger(); QuartzEndpoint endpoint = lookupQuartzEndpoint(camelContext, endpointUri, trigger); if (endpoint == null) { throw new JobExecutionException("No QuartzEndpoint could be found with endpointUri: " + endpointUri); } endpoint.onJobExecute(context); }
getScheduler().getContext().put(JOB_INITIALIZATION_PLUGIN_NAME + '_' + getName(), this); trig.setVolatility(true); JobDetail job = new JobDetail( jobTriggerName, JOB_INITIALIZATION_PLUGIN_NAME, FileScanJob.class); job.setVolatility(true); job.getJobDataMap().put(FileScanJob.FILE_NAME, jobFile.getFileName()); job.getJobDataMap().put(FileScanJob.FILE_SCAN_LISTENER_NAME, JOB_INITIALIZATION_PLUGIN_NAME + '_' + getName()); getScheduler().scheduleJob(job, trig);
getScheduler().getContext().put(JOB_INITIALIZATION_PLUGIN_NAME + '_' + getName(), this); getScheduler().unscheduleJob(tKey); .build(); getScheduler().scheduleJob(job, trig); getLog().debug("Scheduled file scan job for data file: {}, at interval: {}", jobFile.getFileName(), scanInterval);
Trigger oldTrigger = scheduler.getTrigger(triggerKey); boolean triggerExisted = oldTrigger != null; if (triggerExisted && !isRecoverableJob()) { scheduler.rescheduleJob(triggerKey, trigger); scheduler.scheduleJob(jobDetail, trigger); } catch (ObjectAlreadyExistsException ex) { LOG.info("Job {} (triggerType={}, jobClass={}) is scheduled. Next fire date is {}", new Object[] {trigger.getKey(), trigger.getClass().getSimpleName(), jobDetail.getJobClass().getSimpleName(), trigger.getNextFireTime()}); AtomicInteger number = (AtomicInteger) scheduler.getContext().get(QuartzConstants.QUARTZ_CAMEL_JOBS_COUNT); if (number != null) { number.incrementAndGet();
public void init() throws SchedulerException { if (isEnabled){ // Only shrink if specified JobDetail workerJob = JobBuilder.newJob(ShrinkerJob.class) .withIdentity("shrinkingJob", "group1").build(); Trigger workerTrigger = TriggerBuilder .newTrigger() .withIdentity("shrinkingTrigger", "group1") .withSchedule( CronScheduleBuilder.cronSchedule(schedule)) // execute this every day at midnight .build(); //Schedule it org.quartz.Scheduler scheduler = new StdSchedulerFactory().getScheduler(); scheduler.start(); scheduler.getContext().put("period", shrinkPeriod); scheduler.getContext().put("thothIndexUrl", thothIndexURL); scheduler.getContext().put("threadPoolSize", threadPoolSize); scheduler.scheduleJob(workerJob, workerTrigger); } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { try { BimServer bimServer = (BimServer) (jobExecutionContext.getScheduler().getContext().get("bimserver")); bimServer.getCompareCache().cleanup(); } catch (SchedulerException e) { LOGGER.error("", e); } } }
@Override protected void executeInternal(JobExecutionContext context) { logger.info("Job " + context.getJobDetail().getDescription() + " started."); JobDataMap dataMap = context.getMergedJobDataMap(); initDependencies(context.getScheduler(),dataMap); String permissionTagsString = (String) context.getScheduler().getContext().get("permissionTagsString"); String[] permissionTagsStringArray = (String[]) context.getScheduler().getContext().get("permissionTagsStringArray"); List <String> permissionTagsList = (List <String>) context.getScheduler().getContext().get("permissionTagsList"); Set<Integer> edcSet = new HashSet<>(); logger.debug("found " + fId + " and " + ODMXMLFileName); logger.info("Finished ODM generation of job " + context.getJobDetail().getDescription()); resetArchiveDataset(datasetBean.getId()); logger.info("Job " + context.getJobDetail().getDescription() + " finished.");
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { LOG.debug("Running ScheduledJob: jobExecutionContext={}", jobExecutionContext); SchedulerContext schedulerContext = getSchedulerContext(jobExecutionContext); ScheduledJobState state = (ScheduledJobState) schedulerContext.get(jobExecutionContext.getJobDetail().getKey().toString()); Action storedAction = state.getAction(); Route storedRoute = state.getRoute(); List<RoutePolicy> policyList = storedRoute.getRouteContext().getRoutePolicyList(); for (RoutePolicy policy : policyList) { try { if (policy instanceof ScheduledRoutePolicy) { ((ScheduledRoutePolicy)policy).onJobExecute(storedAction, storedRoute); } } catch (Exception e) { throw new JobExecutionException("Failed to execute Scheduled Job for route " + storedRoute.getId() + " with trigger name: " + jobExecutionContext.getTrigger().getKey(), e); } } }
/** * Utility method to schedule the job at a fixed interval. */ public static void schedule(MailSender mailSender, Configuration mailConfiguration, String group) throws SchedulerException { Scheduler scheduler = StdSchedulerFactory.getDefaultScheduler(); JobDetail job = JobBuilder .newJob(MailSenderJob.class) .withIdentity("mail.sender", group) .build(); int pollInterval = mailConfiguration.getInt( MailProperties.MAIL_SENDER_POLL_INTERVAL, MailScheduler.DEFAULT_POLL_INTERVAL); Trigger trigger = TriggerBuilder.newTrigger() .withIdentity("mail.sender.trigger", group) .startNow() .withSchedule(SimpleScheduleBuilder.simpleSchedule() .withIntervalInMilliseconds(pollInterval) .repeatForever()) .build(); scheduler.getContext().put(MailSenderJob.MAIL_SENDER_KEY, mailSender); scheduler.scheduleJob(job, trigger); } }
@Override protected void doStop() throws Exception { super.doStop(); if (scheduler != null) { AtomicInteger number = (AtomicInteger) scheduler.getContext().get("CamelJobs"); if (number != null && number.get() > 0) { LOG.info("Cannot shutdown Quartz scheduler: " + scheduler.getSchedulerName() + " as there are still " + number.get() + " jobs registered."); } else { // no more jobs then shutdown the scheduler LOG.info("There are no more jobs registered, so shutting down Quartz scheduler: {}", scheduler.getSchedulerName()); scheduler.shutdown(); scheduler = null; } } }
private void initialize() throws SchedulerException { scheduler.getContext().put(EVENT_BUS_KEY, eventBus); scheduler.getContext().put(TRANSACTION_MANAGER_KEY, transactionManager); scheduler.getContext().put(EVENT_JOB_DATA_BINDER_KEY, jobDataBinder); initialized = true; }
@Service public class StartJob extends QuartzJobBean { // the DAO private NoaJobInstancesDAO njiDAO; public void executeInternal(JobExecutionContext context) throws JobExecutionException { init(context.getJobDetail().getJobDataMap(), context.getScheduler() .getContext()); // some logic here njiDAO.create(params); } private void init(JobDataMap jobContextMap, SchedulerContext schedulerContext) { // some initialization using the job data map, not interesting for DAOs // row that inject the correct DAO this.njiDAO = (NoaJobInstancesDAO) schedulerContext .get("noaJobInstancesDAO"); } }
public void updateExistingJob(Monitor monitor) { JobDetail job = JobBuilder .newJob(MonitorEmailNotificationTask.class) .withIdentity(getIdentity(monitor.getId()), getJobGroupName()) .storeDurably(true).build(); try { springScheduler.getScheduler().getContext().put(job.getKey().getName(), monitor); springScheduler.getScheduler().addJob(job, true); } catch (SchedulerException e) { LOGGER.error("Can't schedule job!"); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { try { Scheduler scheduler = context.getScheduler(); String pluginKey = context.getMergedJobDataMap().getString(PLUGIN_KEY_NAME); JdbcSchedulerHistoryPlugin plugin = (JdbcSchedulerHistoryPlugin) scheduler.getContext().get(pluginKey); long deleteIntervalInSecs = plugin.getDeleteIntervalInSecs(); Date olderThanDate = new Date(System.currentTimeMillis() - (deleteIntervalInSecs * 1000)); int result = plugin.deleteJobHistory(olderThanDate); logger.info("{} job history records were deleted with date older than {}.", result, olderThanDate); } catch (SchedulerException e) { throw new JobExecutionException("Failed to run JobHistoryRemovalJob.", e); } } }
protected void loadCallbackDataIntoSchedulerContext(JobDetail jobDetail, Action action, Route route) throws SchedulerException { getScheduler().getContext().put(jobDetail.getKey().toString(), new ScheduledJobState(action, route)); }