private JobDetail createJobDetail(String name, String groupName, Class<? extends Job> jobClass, boolean isVolatile, JobDataMap jobData) { JobDetail job = new JobDetail(name, groupName, jobClass, isVolatile, false, false); job.setJobDataMap(jobData); return job; }
private void scheduleJob(Server server) throws ParseException, SchedulerException { String name = server.getHost() + ":" + server.getPort() + "-" + System.nanoTime() + "-" + RandomStringUtils.randomNumeric(10); JobDetail jd = new JobDetail(name, "ServerJob", ServerJob.class); JobDataMap map = new JobDataMap(); map.put(Server.class.getName(), server); jd.setJobDataMap(map); Trigger trigger = createTrigger(server); serverScheduler.scheduleJob(jd, trigger); if (log.isDebugEnabled()) { log.debug("Scheduled job: " + jd.getName() + " for server: " + server); } }
public void execute(JobExecutionContext ctx) throws JobExecutionException { SqlTimeBasedQuartzMonitor monitor = extractMonitor(ctx); monitor.saveRunMethodStartTime(); MonitorRunLogPrefix logPrefix = monitor.createLogPrefix(ctx.getTrigger().getPreviousFireTime(), false); logger.info(logPrefix + "HadesJob started"); try { monitor.restoreStateFromJobExecutionContext(logPrefix.indent(), ctx); monitor.run(logPrefix.indent(), ctx); ctx.getJobDetail().setJobDataMap(monitor.createHadesJobDataMap()); logger.info(logPrefix + "HadesJob ended"); } catch(RuntimeException e) { logger.info(logPrefix + "HadesJob ended with exception", e); throw new JobExecutionException(e); } catch (InterruptedException e) { logger.info(logPrefix + "HadesJob interrupted", e); Thread.currentThread().interrupt(); } } }
public ReplicationScheduler(String schedule, ReplicationService service) { try { Properties props = new Properties(); props.setProperty("org.quartz.scheduler.skipUpdateCheck", "true"); props.setProperty("org.quartz.threadPool.class", "org.quartz.simpl.SimpleThreadPool"); props.setProperty("org.quartz.threadPool.threadCount", "1"); props.setProperty("org.quartz.threadPool.threadPriority", Integer.toString(Thread.NORM_PRIORITY)); SDFSLogger.getLog().info("Scheduling Replication Job for SDFS"); SchedulerFactory schedFact = new StdSchedulerFactory(props); sched = schedFact.getScheduler(); sched.start(); JobDataMap dataMap = new JobDataMap(); dataMap.put("service", service); JobDetail ccjobDetail = new JobDetail("replication", null, ReplicationJob.class); ccjobDetail.setJobDataMap(dataMap); CronTrigger cctrigger = new CronTrigger("replicationTrigger", "group1", schedule); sched.scheduleJob(ccjobDetail, cctrigger); SDFSLogger.getLog().info("Replication Job Scheduled"); } catch (Exception e) { SDFSLogger.getLog().fatal("Unable to schedule Replication Job", e); } }
private Trigger schedule(Date[] firstFireTime) { String s = hadesQuartzJob + " with " + hadesQuartzTrigger + " on " + getSchedulerInfo() + "with cron '" + cron + "'"; try { scheduler.deleteJob(hadesQuartzJob, hadesQuartzGroup); JobDetail jobDetail = new JobDetail(hadesQuartzJob, hadesQuartzGroup, HadesJob.class, false, true, false); jobDetail.setJobDataMap(createHadesJobDataMap()); storeMonitor(); scheduler.addJob(jobDetail, false); Trigger t = new CronTrigger(hadesQuartzTrigger, hadesQuartzGroup, hadesQuartzJob, hadesQuartzGroup, new Date(System.currentTimeMillis() + startDelayMillis), null, cron); firstFireTime[0] = scheduler.scheduleJob(t); logger.info("scheduled with first fire time " + Utils.format(firstFireTime[0]) + ": " + s); return t; } catch (Exception e) { logger.error("failed to schedule " + s, e); throw new RuntimeException(e); } }
public static JobDetail getJobDetail(int bundleDeploymentId) { JobDetail jobDetail = new JobDetail(String.valueOf(bundleDeploymentId), BundleDeploymentStatusCheckJob.class.getName(), BundleDeploymentStatusCheckJob.class); jobDetail.setVolatility(false); jobDetail.setDurability(false); jobDetail.setRequestsRecovery(false); JobDataMap map = new JobDataMap(); map.putAsString(BUNDLE_DEPLOYMENT_ID_KEY, bundleDeploymentId); jobDetail.setJobDataMap(map); return jobDetail; }
private JobDetail createJobDetails( QuartzJobKey jobId, Map<String, Serializable> jobParams ) { JobDetail jobDetail = new JobDetail( jobId.toString(), jobId.getUserName(), BlockingQuartzJob.class ); jobParams.put( RESERVEDMAPKEY_ACTIONUSER, jobId.getUserName() ); JobDataMap jobDataMap = new JobDataMap( jobParams ); jobDetail.setJobDataMap( jobDataMap ); return jobDetail; }
private void scheduleJob(Server server) throws SchedulerException, ParseException { Scheduler scheduler = getScheduler(); String name = server.getHost() + ":" + server.getPort() + "-" + System.currentTimeMillis(); JobDetail jd = new JobDetail(name, "ServerJob", ServerJob.class); JobDataMap map = new JobDataMap(); map.put(Server.class.getName(), server); map.put(Server.JMX_CONNECTION_FACTORY_POOL, this.getObjectPoolMap().get(Server.JMX_CONNECTION_FACTORY_POOL)); jd.setJobDataMap(map); Trigger trigger = null; if ((server.getCronExpression() != null) && CronExpression.isValidExpression(server.getCronExpression())) { trigger = new CronTrigger(); ((CronTrigger) trigger).setCronExpression(server.getCronExpression()); ((CronTrigger) trigger).setName(server.getHost() + ":" + server.getPort() + "-" + Long.valueOf(System.currentTimeMillis()).toString()); ((CronTrigger) trigger).setStartTime(new Date()); } else { Trigger minuteTrigger = TriggerUtils.makeSecondlyTrigger(SECONDS_BETWEEN_SERVER_JOB_RUNS); minuteTrigger.setName(server.getHost() + ":" + server.getPort() + "-" + Long.valueOf(System.currentTimeMillis()).toString()); minuteTrigger.setStartTime(new Date()); trigger = minuteTrigger; } scheduler.scheduleJob(jd, trigger); if (LOG.isDebugEnabled()) { LOG.debug("Scheduled job: " + jd.getName() + " for server: " + server); } }
databaseJob.setJobDataMap( dataMap );
/** * Ajout dans le Scheduler d'un notification */ protected void addCron(Cron cron) throws Exception { CronTrigger cronTrigger = new CronTrigger(cron.getLabel(), Scheduler.DEFAULT_GROUP, cron.getLabel(), Scheduler.DEFAULT_GROUP, cron.getDateStart(), cron.getDateEnd(), cron.getExpression()); JobDetail jobDetail = new JobDetail(cron.getLabel(), Scheduler.DEFAULT_GROUP, NotifierJob.class); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("cronTopiaId", cron.getTopiaId()); jobDetail.setJobDataMap(jobDataMap); scheduler.scheduleJob(jobDetail, cronTrigger); log.info("Added cron : " + cron.getLabel()); }
/** * Ajout dans le Scheduler d'un notification */ protected void addCron(Cron cron) throws Exception { CronTrigger cronTrigger = new CronTrigger(cron.getLabel(), Scheduler.DEFAULT_GROUP, cron.getLabel(), Scheduler.DEFAULT_GROUP, cron.getDateStart(), cron.getDateEnd(), cron.getExpression()); JobDetail jobDetail = new JobDetail(cron.getLabel(), Scheduler.DEFAULT_GROUP, NotifierJob.class); JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put("cronTopiaId", cron.getTopiaId()); jobDetail.setJobDataMap(jobDataMap); scheduler.scheduleJob(jobDetail, cronTrigger); log.info("Added cron : " + cron.getLabel()); }
@SuppressWarnings("unchecked") private void scheduleJob(Class<?> jobClass, DateTime jobDate, Map<String, String> jobGroupDataMap, Map<String, String> jobDataMap) throws SchedulerException { if(jobDate == null) { return; } String jobGroupName = BatchJobUtil.getJobGroupName(jobClass, jobGroupDataMap); String jobName = BatchJobUtil.getJobName(jobClass, jobDataMap); String[] jobNames = getScheduler().getJobNames(jobGroupName); if (!ArrayUtils.contains(jobNames, jobName)) { jobDataMap.put("date", jobDate.toString()); Map<String, String> mergedDataMap = MapUtils.merge(jobGroupDataMap, jobDataMap); JobDetail jobDetail = new JobDetail(jobName, jobGroupName, jobClass, false, true, false); jobDetail.setJobDataMap(new JobDataMap(mergedDataMap)); String triggerGroupName = BatchJobUtil.getTriggerGroupName(jobClass, MapUtils.merge(jobGroupDataMap, jobDataMap)); String triggerName = BatchJobUtil.getTriggerName(jobClass, jobDate); Trigger trigger = new SimpleTrigger(triggerName, triggerGroupName, jobDate.toDate()); trigger.setJobGroup(jobGroupName); trigger.setJobName(jobName); LOG.info("Scheduling " + jobDetail.getFullName() + " to be run on " + jobDate); this.updateStatus(jobDetail, BatchJobService.SCHEDULED_JOB_STATUS_CODE); getScheduler().scheduleJob(jobDetail, trigger); } }
dataMap.put( DefaultArchivaTaskScheduler.TASK_QUEUE, repositoryScanningQueue ); dataMap.put( DefaultArchivaTaskScheduler.TASK_REPOSITORY, repoConfig.getId() ); repositoryJob.setJobDataMap( dataMap );
jobDetail.setJobDataMap( dataMap );
private void scheduleJob(Server server) throws ParseException, SchedulerException { String name = server.getHost() + ":" + server.getPort() + "-" + System.currentTimeMillis() + "-" + RandomStringUtils.randomNumeric(10); JobDetail jd = new JobDetail(name, "ServerJob", ServerJob.class); JobDataMap map = new JobDataMap(); map.put(Server.class.getName(), server); jd.setJobDataMap(map); Trigger trigger; if ((server.getCronExpression() != null) && CronExpression.isValidExpression(server.getCronExpression())) { trigger = new CronTrigger(); ((CronTrigger) trigger).setCronExpression(server.getCronExpression()); trigger.setName(server.getHost() + ":" + server.getPort() + "-" + Long.toString(System.currentTimeMillis())); trigger.setStartTime(computeSpreadStartDate(configuration.getRunPeriod())); } else { int runPeriod = configuration.getRunPeriod(); if (server.getRunPeriodSeconds() != null) runPeriod = server.getRunPeriodSeconds(); Trigger minuteTrigger = TriggerUtils.makeSecondlyTrigger(runPeriod); minuteTrigger.setName(server.getHost() + ":" + server.getPort() + "-" + Long.toString(System.currentTimeMillis())); minuteTrigger.setStartTime(computeSpreadStartDate(runPeriod)); trigger = minuteTrigger; // TODO replace Quartz with a ScheduledExecutorService } serverScheduler.scheduleJob(jd, trigger); if (log.isDebugEnabled()) { log.debug("Scheduled job: " + jd.getName() + " for server: " + server); } }
protected static JobDetail getJobDetail(ResourceGroup group, Subject subject, JobDataMap jobDataMap, Class jobClass, String jobNamePrefix) { JobDetail jobDetail = new JobDetail(); jobDetail.setName(createUniqueJobName(group, subject, jobNamePrefix)); jobDetail.setGroup(createJobGroupName(group, jobNamePrefix)); jobDetail.setVolatility(false); // we want it persisted jobDetail.setDurability(false); jobDetail.setRequestsRecovery(false); jobDetail.setJobClass(jobClass); jobDetail.setJobDataMap(jobDataMap); return jobDetail; }
private synchronized void scheduleDatabaseJobs() throws SchedulerException { String cronString = archivaConfiguration.getConfiguration().getDatabaseScanning().getCronExpression(); // setup the unprocessed artifact job JobDetail databaseJob = new JobDetail( DATABASE_JOB, DATABASE_SCAN_GROUP, DatabaseTaskJob.class ); JobDataMap dataMap = new JobDataMap(); dataMap.put( DatabaseTaskJob.TASK_QUEUE, databaseUpdateQueue ); databaseJob.setJobDataMap( dataMap ); CronExpressionValidator cronValidator = new CronExpressionValidator(); if ( !cronValidator.validate( cronString ) ) { getLogger().warn( "Cron expression [" + cronString + "] for database update is invalid. Defaulting to hourly." ); cronString = CRON_HOURLY; } try { CronTrigger trigger = new CronTrigger( DATABASE_JOB_TRIGGER, DATABASE_SCAN_GROUP, cronString ); scheduler.scheduleJob( databaseJob, trigger ); } catch ( ParseException e ) { getLogger().error( "ParseException in database scanning cron expression, disabling database scanning: " + e.getMessage() ); } }
dataMap.put( RepositoryTaskJob.TASK_QUEUE_POLICY, ArchivaTask.QUEUE_POLICY_WAIT ); dataMap.put( RepositoryTaskJob.TASK_REPOSITORY, repoConfig.getId() ); repositoryJob.setJobDataMap( dataMap );
jobDetail.setRequestsRecovery(false); jobDetail.setJobClass(GroupOperationJob.class); jobDetail.setJobDataMap(jobDataMap);
new JobDetail( schedule.getName(), org.quartz.Scheduler.DEFAULT_GROUP, ContinuumBuildJob.class ); jobDetail.setJobDataMap( dataMap );