@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap dataMap = context.getJobDetail().getJobDataMap(); JobSpec jobSpec = (JobSpec) dataMap.get(JOB_SPEC_KEY); Runnable jobRunnable = (Runnable) dataMap.get(JOB_RUNNABLE_KEY); try { jobRunnable.run(); } catch (Throwable t) { throw new JobExecutionException("Job runable for " + jobSpec + " failed.", t); } } }
public void triggerFired(Trigger trigger, JobExecutionContext context) { if (!getLog().isInfoEnabled()) { return; } Object[] args = { trigger.getKey().getName(), trigger.getKey().getGroup(), trigger.getPreviousFireTime(), trigger.getNextFireTime(), new java.util.Date(), context.getJobDetail().getKey().getName(), context.getJobDetail().getKey().getGroup(), Integer.valueOf(context.getRefireCount()) }; getLog().info(MessageFormat.format(getTriggerFiredMessage(), args)); }
/** * @param jobDetail * @return CompositeData */ public static CompositeData toCompositeData(JobDetail jobDetail) { try { return new CompositeDataSupport(COMPOSITE_TYPE, ITEM_NAMES, new Object[] { jobDetail.getKey().getName(), jobDetail.getKey().getGroup(), jobDetail.getDescription(), jobDetail.getJobClass().getName(), JobDataMapSupport.toTabularData(jobDetail .getJobDataMap()), jobDetail.isDurable(), jobDetail.requestsRecovery(), }); } catch (OpenDataException e) { throw new RuntimeException(e); } }
List<Trigger> getTriggersOfJob(JobDetail jobDetail, Scheduler scheduler) throws SchedulerException { return Arrays.asList(scheduler.getTriggersOfJob(jobDetail.getName(), jobDetail.getGroup())); }
dupeJ = sched.getJobDetail(detail.getKey()); } catch (JobPersistenceException e) { if (e.getCause() instanceof ClassNotFoundException && isOverWriteExistingData()) { log.info("Removing job: " + detail.getKey()); sched.deleteJob(detail.getKey()); } else { throw e; log.info("Not overwriting existing job: " + dupeJ.getKey()); continue; // just ignore the entry log.info("Replacing job: " + detail.getKey()); } else { log.info("Adding job: " + detail.getKey()); List<MutableTrigger> triggersOfJob = triggersByFQJobName.get(detail.getKey()); if (!detail.isDurable() && (triggersOfJob == null || triggersOfJob.size() == 0)) { if (dupeJ == null) { throw new SchedulerException( "A new job defined without any triggers must be durable: " + detail.getKey()); if ((dupeJ.isDurable() && (sched.getTriggersOfJob( detail.getKey()).size() == 0))) { throw new SchedulerException( "Can't change existing durable job without triggers to non-durable: " +
public void triggerFired(Trigger trigger, JobExecutionContext context) { if (!getLog().isInfoEnabled()) { return; } Object[] args = { trigger.getName(), trigger.getGroup(), trigger.getPreviousFireTime(), trigger.getNextFireTime(), new java.util.Date(), context.getJobDetail().getName(), context.getJobDetail().getGroup(), new Integer(context.getRefireCount()) }; getLog().info(MessageFormat.format(getTriggerFiredMessage(), args)); }
if (jobDetail.getKey() == null) { throw new SchedulerException("Job's key cannot be null"); if (jobDetail.getJobClass() == null) { throw new SchedulerException("Job's class cannot be null"); if (trigger.getJobKey() == null) { trig.setJobKey(jobDetail.getKey()); } else if (!trigger.getJobKey().equals(jobDetail.getKey())) { throw new SchedulerException( "Trigger does not reference given job!"); if (trigger.getCalendarName() != null) { cal = resources.getJobStore().retrieveCalendar(trigger.getCalendarName()); notifySchedulerThread(trigger.getNextFireTime().getTime()); notifySchedulerListenersSchduled(trigger);
JobDataMap mergedJobDataMap = context.getMergedJobDataMap(); SchedulerContext schedCtxt = null; try { schedCtxt = context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Error obtaining scheduler context.", e, false); String fileName = mergedJobDataMap.getString(FILE_NAME); String listenerName = mergedJobDataMap.getString(FILE_SCAN_LISTENER_NAME); throw new JobExecutionException("Required parameter '" + FILE_NAME + "' not found in merged JobDataMap"); throw new JobExecutionException("Required parameter '" + FILE_SCAN_LISTENER_NAME + "' not found in merged JobDataMap"); if(mergedJobDataMap.containsKey(LAST_MODIFIED_TIME)) { lastDate = mergedJobDataMap.getLong(LAST_MODIFIED_TIME); context.getJobDetail().getJobDataMap().put(LAST_MODIFIED_TIME, newDate);
private void process(JobExecutionContext context, Ehcache underlyingCache, ScheduledRefreshConfiguration config, List<Serializable> batch) throws SchedulerException { JobDataMap map = new JobDataMap(context.getJobDetail().getJobDataMap()); map.put(ScheduledRefreshCacheExtension.PROP_KEYS_TO_PROCESS, batch); Scheduler scheduler = context.getScheduler(); JobDetail job = JobBuilder .newJob(RefreshBatchJob.class) .withIdentity("RefreshBatch-" + INSTANCE_ID_GENERATOR.incrementAndGet(), context.getTrigger().getJobKey().getGroup()).usingJobData(map).build(); try { waitForOutstandingJobCount(context, config, scheduler, config.getParallelJobCount()); if (!scheduler.isShutdown()) { Trigger trigger = TriggerBuilder.newTrigger().startNow().forJob(job).build(); scheduler.scheduleJob(job, trigger); } } catch (SchedulerException e) { if (!scheduler.isShutdown()) { throw e; } } }
public void triggerFired(Trigger trigger, JobExecutionContext context) { // Call the scheduleJobInterruptMonitor and capture the ScheduledFuture in context try { // Schedule Monitor only if the job wants AutoInterruptable functionality if (context.getJobDetail().getJobDataMap().getBoolean(AUTO_INTERRUPTIBLE)) { JobInterruptMonitorPlugin monitorPlugin = (JobInterruptMonitorPlugin) context.getScheduler() .getContext().get(JOB_INTERRUPT_MONITOR_KEY); // Get the MaxRuntime from Job Data if NOT available use DEFAULT_MAX_RUNTIME from Plugin Configuration long jobDataDelay = DEFAULT_MAX_RUNTIME; if (context.getJobDetail().getJobDataMap().get(MAX_RUN_TIME) != null){ jobDataDelay = context.getJobDetail().getJobDataMap().getLong(MAX_RUN_TIME); } future = monitorPlugin.scheduleJobInterruptMonitor(context.getJobDetail().getKey(), jobDataDelay); getLog().debug("Job's Interrupt Monitor has been scheduled to interrupt with the delay :" + DEFAULT_MAX_RUNTIME); } } catch (SchedulerException e) { getLog().info("Error scheduling interrupt monitor " + e.getMessage(), e); } }
/** * @return composite data */ public static CompositeData toCompositeData(JobExecutionContext jec) throws SchedulerException { try { return new CompositeDataSupport(COMPOSITE_TYPE, ITEM_NAMES, new Object[] { jec.getScheduler().getSchedulerName(), jec.getTrigger().getKey().getName(), jec.getTrigger().getKey().getGroup(), jec.getJobDetail().getKey().getName(), jec.getJobDetail().getKey().getGroup(), JobDataMapSupport.toTabularData(jec .getMergedJobDataMap()), jec.getTrigger().getCalendarName(), jec.isRecovering(), jec.getRefireCount(), jec.getFireTime(), jec.getScheduledFireTime(), jec.getPreviousFireTime(), jec.getNextFireTime(), jec.getJobRunTime(), jec.getFireInstanceId() }); } catch (OpenDataException e) { throw new RuntimeException(e); } }
sessionValidationInterval); JobDetail detail = new JobDetail(JOB_NAME, Scheduler.DEFAULT_GROUP, QuartzSessionValidationJob.class); detail.getJobDataMap().put(QuartzSessionValidationJob.SESSION_MANAGER_KEY, sessionManager); scheduler.scheduleJob(detail, trigger); if (schedulerImplicitlyCreated) { scheduler.start(); if (log.isDebugEnabled()) { log.debug("Successfully started implicitly created Quartz Scheduler instance.");
private boolean notifyJobListenersComplete(JobExecutionContext jobExCtxt, JobExecutionException jobExEx) { try { qs.notifyJobListenersWasExecuted(jobExCtxt, jobExEx); } catch (SchedulerException se) { qs.notifySchedulerListenersError( "Unable to notify JobListener(s) of Job that was executed: " + "(error will be ignored). trigger= " + jobExCtxt.getTrigger().getKey() + " job= " + jobExCtxt.getJobDetail().getKey(), se); return false; } return true; }
boolean triggerExists = (getScheduler().getTrigger(trigger.getKey()) != null); if (triggerExists && !this.overwriteExistingJobs) { return false; JobDetail jobDetail = (JobDetail) trigger.getJobDataMap().remove("jobDetail"); if (triggerExists) { if (jobDetail != null && this.jobDetails != null && getScheduler().rescheduleJob(trigger.getKey(), trigger); try { if (jobDetail != null && this.jobDetails != null && !this.jobDetails.contains(jobDetail) && (this.overwriteExistingJobs || getScheduler().getJobDetail(jobDetail.getKey()) == null)) { getScheduler().scheduleJob(jobDetail, trigger); this.jobDetails.add(jobDetail);
Scheduler s = sf.getScheduler(); s.start(); JobDetail jd = new JobDetail("MyJobq", "Job_Group", UpdaterJob.class); CronTrigger ct = new CronTrigger("MyTrigger", "MyTrigger_Group", "0/1 * * * * ?"); s.scheduleJob(jd, ct);
@Override public void execute(JobExecutionContext context) throws JobExecutionException { logger.debug("Starting job to publish a scheduled event"); JobDetail jobDetail = context.getJobDetail(); JobDataMap jobData = jobDetail.getJobDataMap(); try { SchedulerContext schedulerContext = context.getScheduler().getContext(); EventJobDataBinder jobDataBinder = (EventJobDataBinder) schedulerContext.get(EVENT_JOB_DATA_BINDER_KEY); Object event = jobDataBinder.fromJobData(jobData); EventMessage<?> eventMessage = createMessage(event); EventBus eventBus = (EventBus) context.getScheduler().getContext().get(EVENT_BUS_KEY); TransactionManager txManager = (TransactionManager) context.getScheduler().getContext().get(TRANSACTION_MANAGER_KEY); UnitOfWork<EventMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(null); if (txManager != null) { unitOfWork.attachTransaction(txManager); } unitOfWork.execute(() -> eventBus.publish(eventMessage)); if (logger.isInfoEnabled()) { logger.info("Job successfully executed. Scheduled Event [{}] has been published.", eventMessage.getPayloadType().getSimpleName()); } } catch (Exception e) { logger.error("Exception occurred while publishing scheduled event [{}]", jobDetail.getDescription(), e); throw new JobExecutionException(e); } }
/** {@inheritDoc} */ @Override protected JobSpecSchedule doScheduleJob(JobSpec jobSpec, Runnable jobRunnable) { // Build a data map that gets passed to the job JobDataMap jobDataMap = new JobDataMap(); jobDataMap.put(JOB_SPEC_KEY, jobSpec); jobDataMap.put(JOB_RUNNABLE_KEY, jobRunnable); // Build a Quartz job JobDetail job = JobBuilder.newJob(QuartzJob.class) .withIdentity(jobSpec.getUri().toString()) .withDescription(Strings.nullToEmpty(jobSpec.getDescription())) .usingJobData(jobDataMap) .build(); Trigger jobTrigger = createTrigger(job.getKey(), jobSpec); QuartzJobSchedule jobSchedule = new QuartzJobSchedule(jobSpec, jobRunnable, jobTrigger); try { _scheduler.getScheduler().scheduleJob(job, jobTrigger); getLog().info(String.format("Scheduled job %s next two fire times: %s , %s.", jobSpec, jobTrigger.getNextFireTime(), jobTrigger.getFireTimeAfter(jobTrigger.getNextFireTime()))); } catch (SchedulerException e) { throw new RuntimeException("Scheduling failed for " + jobSpec + ":" + e, e); } return jobSchedule; }