/** * Extension of the MethodInvokingJob, implementing the StatefulJob interface. * Quartz checks whether or not jobs are stateful and if so, * won't let jobs interfere with each other. */ @PersistJobDataAfterExecution @DisallowConcurrentExecution public static class StatefulMethodInvokingJob extends MethodInvokingJob { // No implementation, just an addition of the tag interface StatefulJob // in order to allow stateful method invoking jobs. }
/** * Register objects in the JobDataMap via a given Map. * <p>These objects will be available to this Trigger only, * in contrast to objects in the JobDetail's data map. * @param jobDataAsMap a Map with String keys and any objects as values * (for example Spring-managed beans) */ public void setJobDataAsMap(Map<String, ?> jobDataAsMap) { this.jobDataMap.putAll(jobDataAsMap); }
@Override public boolean isRunning() throws SchedulingException { if (this.scheduler != null) { try { return !this.scheduler.isInStandbyMode(); } catch (SchedulerException ex) { return false; } } return false; }
private CronTrigger createTrigger(final String cron) { return TriggerBuilder.newTrigger().withIdentity(triggerIdentity).withSchedule(CronScheduleBuilder.cronSchedule(cron).withMisfireHandlingInstructionDoNothing()).build(); }
@Override public void execute(JobExecutionContext context) { JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); LOG.info("Performing timer based backup"); BackupScheduler backupScheduler = (BackupScheduler) jobDataMap.get(BACKUP_SCHEDULER_KEY); backupScheduler.performBackup(); } }
private void unscheduleJob(String pipelineName) { try { JobKey jobKey = jobKey(pipelineName, PIPELINE_TRIGGGER_TIMER_GROUP); if (quartzScheduler.getJobDetail(jobKey) != null) { quartzScheduler.unscheduleJob(triggerKey(pipelineName, PIPELINE_TRIGGGER_TIMER_GROUP)); quartzScheduler.deleteJob(jobKey); } } catch (SchedulerException e) { LOG.error("Could not unschedule quartz jobs", e); } }
private JobDataMap jobDataMapFor(PipelineConfig pipelineConfig) { JobDataMap map = new JobDataMap(); map.put(BUILD_CAUSE_PRODUCER_SERVICE, buildCauseProducerService); map.put(MAINTENANCE_MODE_SERVICE, maintenanceModeService); map.put(PIPELINE_CONFIG, pipelineConfig); return map; }
/** * This implementation applies the passed-in job data map as bean property * values, and delegates to {@code executeInternal} afterwards. * @see #executeInternal */ @Override public final void execute(JobExecutionContext context) throws JobExecutionException { try { BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(this); MutablePropertyValues pvs = new MutablePropertyValues(); pvs.addPropertyValues(context.getScheduler().getContext()); pvs.addPropertyValues(context.getMergedJobDataMap()); bw.setPropertyValues(pvs, true); } catch (SchedulerException ex) { throw new JobExecutionException(ex); } executeInternal(context); }
/** * Add the given job to the Scheduler, if it doesn't already exist. * Overwrites the job in any case if "overwriteExistingJobs" is set. * @param jobDetail the job to add * @return {@code true} if the job was actually added, * {@code false} if it already existed before * @see #setOverwriteExistingJobs */ private boolean addJobToScheduler(JobDetail jobDetail) throws SchedulerException { if (this.overwriteExistingJobs || getScheduler().getJobDetail(jobDetail.getKey()) == null) { getScheduler().addJob(jobDetail, true); return true; } else { return false; } }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { // load jobId JobKey jobKey = context.getTrigger().getJobKey(); Integer jobId = Integer.valueOf(jobKey.getName()); // trigger JobTriggerPoolHelper.trigger(jobId, TriggerTypeEnum.CRON, -1, null, null); }
/** * Shut down the Quartz scheduler on bean factory shutdown, * stopping all scheduled jobs. */ @Override public void destroy() throws SchedulerException { if (this.scheduler != null) { logger.info("Shutting down Quartz Scheduler"); this.scheduler.shutdown(this.waitForJobsToCompleteOnShutdown); } }
/** * 判断作业是否暂停. * * @return 作业是否暂停 */ public synchronized boolean isPaused() { try { return !scheduler.isShutdown() && Trigger.TriggerState.PAUSED == scheduler.getTriggerState(new TriggerKey(triggerIdentity)); } catch (final SchedulerException ex) { throw new JobSystemException(ex); } }
/** * Register all specified listeners with the Scheduler. */ protected void registerListeners() throws SchedulerException { ListenerManager listenerManager = getScheduler().getListenerManager(); if (this.schedulerListeners != null) { for (SchedulerListener listener : this.schedulerListeners) { listenerManager.addSchedulerListener(listener); } } if (this.globalJobListeners != null) { for (JobListener listener : this.globalJobListeners) { listenerManager.addJobListener(listener); } } if (this.globalTriggerListeners != null) { for (TriggerListener listener : this.globalTriggerListeners) { listenerManager.addTriggerListener(listener); } } }
/** * Expose the specified context attributes and/or the current * ApplicationContext in the Quartz SchedulerContext. */ private void populateSchedulerContext(Scheduler scheduler) throws SchedulerException { // Put specified objects into Scheduler context. if (this.schedulerContextMap != null) { scheduler.getContext().putAll(this.schedulerContextMap); } // Register ApplicationContext in Scheduler context. if (this.applicationContextSchedulerContextKey != null) { if (this.applicationContext == null) { throw new IllegalStateException( "SchedulerFactoryBean needs to be set up in an ApplicationContext " + "to be able to handle an 'applicationContextSchedulerContextKey'"); } scheduler.getContext().put(this.applicationContextSchedulerContextKey, this.applicationContext); } }
/** * TODO 完成注释 * * @author zhouhao */ @DisallowConcurrentExecution public class DynamicJob implements Job { @Override public void execute(JobExecutionContext context) throws JobExecutionException { } }
@Override public Job newJob(TriggerFiredBundle bundle, Scheduler scheduler) throws SchedulerException { try { Object jobObject = createJobInstance(bundle); return adaptJob(jobObject); } catch (Throwable ex) { throw new SchedulerException("Job instantiation failed", ex); } }
@Override public void stop() throws SchedulingException { if (this.scheduler != null) { try { this.scheduler.standby(); } catch (SchedulerException ex) { throw new SchedulingException("Could not stop Quartz Scheduler", ex); } } }
@Override public void initialize() throws SchedulerConfigException { // Absolutely needs thread-bound Executor to initialize. this.taskExecutor = SchedulerFactoryBean.getConfigTimeTaskExecutor(); if (this.taskExecutor == null) { throw new SchedulerConfigException("No local Executor found for configuration - " + "'taskExecutor' property must be set on SchedulerFactoryBean"); } }
/** * Register objects in the JobDataMap via a given Map. * <p>These objects will be available to this Trigger only, * in contrast to objects in the JobDetail's data map. * @param jobDataAsMap a Map with String keys and any objects as values * (for example Spring-managed beans) */ public void setJobDataAsMap(Map<String, ?> jobDataAsMap) { this.jobDataMap.putAll(jobDataAsMap); }
/** * Register objects in the JobDataMap via a given Map. * <p>These objects will be available to this Job only, * in contrast to objects in the SchedulerContext. * <p>Note: When using persistent Jobs whose JobDetail will be kept in the * database, do not put Spring-managed beans or an ApplicationContext * reference into the JobDataMap but rather into the SchedulerContext. * @param jobDataAsMap a Map with String keys and any objects as values * (for example Spring-managed beans) * @see org.springframework.scheduling.quartz.SchedulerFactoryBean#setSchedulerContextAsMap */ public void setJobDataAsMap(Map<String, ?> jobDataAsMap) { getJobDataMap().putAll(jobDataAsMap); }