@Override public void jobWasExecuted(JobExecutionContext context, JobExecutionException jobException) { JobKey sj = chainLinks.get(context.getJobDetail().getKey()); if(sj == null) { return; } getLog().info("Job '" + context.getJobDetail().getKey() + "' will now chain to Job '" + sj + "'"); try { context.getScheduler().triggerJob(sj); } catch(SchedulerException se) { getLog().error("Error encountered during chaining to Job '" + sj + "'", se); } } }
@Override public void jobWasExecuted(JobExecutionContext context, JobExecutionException jobException) { JobKey sj = chainLinks.get(context.getJobDetail().getKey()); if(sj == null) { return; } getLog().info("Job '" + context.getJobDetail().getKey() + "' will now chain to Job '" + sj + "'"); try { context.getScheduler().triggerJob(sj); } catch(SchedulerException se) { getLog().error("Error encountered during chaining to Job '" + sj + "'", se); } } }
public class SimpleJob implements Job { @Override public void execute(JobExecutionContext context) throws JobExecutionException { SchedulerContext schedulerContext = null; try { schedulerContext = context.getScheduler().getContext(); } catch (SchedulerException e1) { e1.printStackTrace(); } ExternalInstance externalInstance = (ExternalInstance) schedulerContext.get("ExternalInstance"); float avg = externalInstance.calculateAvg(); } }
public void triggerFired(Trigger trigger, JobExecutionContext context) { // Call the scheduleJobInterruptMonitor and capture the ScheduledFuture in context try { // Schedule Monitor only if the job wants AutoInterruptable functionality if (context.getJobDetail().getJobDataMap().getBoolean(AUTO_INTERRUPTIBLE)) { JobInterruptMonitorPlugin monitorPlugin = (JobInterruptMonitorPlugin) context.getScheduler() .getContext().get(JOB_INTERRUPT_MONITOR_KEY); // Get the MaxRuntime from Job Data if NOT available use DEFAULT_MAX_RUNTIME from Plugin Configuration long jobDataDelay = DEFAULT_MAX_RUNTIME; if (context.getJobDetail().getJobDataMap().get(MAX_RUN_TIME) != null){ jobDataDelay = context.getJobDetail().getJobDataMap().getLong(MAX_RUN_TIME); } future = monitorPlugin.scheduleJobInterruptMonitor(context.getJobDetail().getKey(), jobDataDelay); getLog().debug("Job's Interrupt Monitor has been scheduled to interrupt with the delay :" + DEFAULT_MAX_RUNTIME); } } catch (SchedulerException e) { getLog().info("Error scheduling interrupt monitor " + e.getMessage(), e); } }
public void triggerFired(Trigger trigger, JobExecutionContext context) { // Call the scheduleJobInterruptMonitor and capture the ScheduledFuture in context try { // Schedule Monitor only if the job wants AutoInterruptable functionality if (context.getJobDetail().getJobDataMap().getBoolean(AUTO_INTERRUPTIBLE)) { JobInterruptMonitorPlugin monitorPlugin = (JobInterruptMonitorPlugin) context.getScheduler() .getContext().get(JOB_INTERRUPT_MONITOR_KEY); // Get the MaxRuntime from Job Data if NOT available use DEFAULT_MAX_RUNTIME from Plugin Configuration long jobDataDelay = DEFAULT_MAX_RUNTIME; if (context.getJobDetail().getJobDataMap().get(MAX_RUN_TIME) != null){ jobDataDelay = context.getJobDetail().getJobDataMap().getLong(MAX_RUN_TIME); } future = monitorPlugin.scheduleJobInterruptMonitor(context.getJobDetail().getKey(), jobDataDelay); getLog().debug("Job's Interrupt Monitor has been scheduled to interrupt with the delay :" + DEFAULT_MAX_RUNTIME); } } catch (SchedulerException e) { getLog().info("Error scheduling interrupt monitor " + e.getMessage(), e); } }
@Override public void execute(JobExecutionContext context) throws JobExecutionException { logger.debug("Starting job to publish a scheduled event"); JobDetail jobDetail = context.getJobDetail(); JobDataMap jobData = jobDetail.getJobDataMap(); try { SchedulerContext schedulerContext = context.getScheduler().getContext(); EventJobDataBinder jobDataBinder = (EventJobDataBinder) schedulerContext.get(EVENT_JOB_DATA_BINDER_KEY); Object event = jobDataBinder.fromJobData(jobData); EventMessage<?> eventMessage = createMessage(event); EventBus eventBus = (EventBus) context.getScheduler().getContext().get(EVENT_BUS_KEY); TransactionManager txManager = (TransactionManager) context.getScheduler().getContext().get(TRANSACTION_MANAGER_KEY); UnitOfWork<EventMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(null); if (txManager != null) { unitOfWork.attachTransaction(txManager); } unitOfWork.execute(() -> eventBus.publish(eventMessage)); if (logger.isInfoEnabled()) { logger.info("Job successfully executed. Scheduled Event [{}] has been published.", eventMessage.getPayloadType().getSimpleName()); } } catch (Exception e) { logger.error("Exception occurred while publishing scheduled event [{}]", jobDetail.getDescription(), e); throw new JobExecutionException(e); } }
/** * This implementation applies the passed-in job data map as bean property * values, and delegates to {@code executeInternal} afterwards. * @see #executeInternal */ @Override public final void execute(JobExecutionContext context) throws JobExecutionException { try { BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(this); MutablePropertyValues pvs = new MutablePropertyValues(); pvs.addPropertyValues(context.getScheduler().getContext()); pvs.addPropertyValues(context.getMergedJobDataMap()); bw.setPropertyValues(pvs, true); } catch (SchedulerException ex) { throw new JobExecutionException(ex); } executeInternal(context); }
SchedulerContext schedCtxt = null; try { schedCtxt = context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Error obtaining scheduler context.", e, false);
SchedulerContext schedCtxt = null; try { schedCtxt = context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Error obtaining scheduler context.", e, false);
schedulerContext = context.getScheduler().getContext(); } catch (Exception e) { logger.error("Exception occurred during processing a deadline job [{}]", jobDetail.getDescription(), e);
/** * This implementation applies the passed-in job data map as bean property * values, and delegates to {@code executeInternal} afterwards. * @see #executeInternal */ @Override public final void execute(JobExecutionContext context) throws JobExecutionException { try { BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(this); MutablePropertyValues pvs = new MutablePropertyValues(); pvs.addPropertyValues(context.getScheduler().getContext()); pvs.addPropertyValues(context.getMergedJobDataMap()); bw.setPropertyValues(pvs, true); } catch (SchedulerException ex) { throw new JobExecutionException(ex); } executeInternal(context); }
/** * @return composite data */ public static CompositeData toCompositeData(JobExecutionContext jec) throws SchedulerException { try { return new CompositeDataSupport(COMPOSITE_TYPE, ITEM_NAMES, new Object[] { jec.getScheduler().getSchedulerName(), jec.getTrigger().getKey().getName(), jec.getTrigger().getKey().getGroup(), jec.getJobDetail().getKey().getName(), jec.getJobDetail().getKey().getGroup(), JobDataMapSupport.toTabularData(jec .getMergedJobDataMap()), jec.getTrigger().getCalendarName(), jec.isRecovering(), jec.getRefireCount(), jec.getFireTime(), jec.getScheduledFireTime(), jec.getPreviousFireTime(), jec.getNextFireTime(), jec.getJobRunTime(), jec.getFireInstanceId() }); } catch (OpenDataException e) { throw new RuntimeException(e); } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { try { BimServer bimServer = (BimServer) (jobExecutionContext.getScheduler().getContext().get("bimserver")); bimServer.getCompareCache().cleanup(); } catch (SchedulerException e) { LOGGER.error("", e); } } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { try { BimServer bimServer = (BimServer) (jobExecutionContext.getScheduler().getContext().get("bimserver")); bimServer.getLongActionManager().cleanup(); } catch (SchedulerException e) { LOGGER.error("", e); } } }
private void process(JobExecutionContext context, Ehcache underlyingCache, ScheduledRefreshConfiguration config, List<Serializable> batch) throws SchedulerException { JobDataMap map = new JobDataMap(context.getJobDetail().getJobDataMap()); map.put(ScheduledRefreshCacheExtension.PROP_KEYS_TO_PROCESS, batch); Scheduler scheduler = context.getScheduler(); JobDetail job = JobBuilder .newJob(RefreshBatchJob.class) .withIdentity("RefreshBatch-" + INSTANCE_ID_GENERATOR.incrementAndGet(), context.getTrigger().getJobKey().getGroup()).usingJobData(map).build(); try { waitForOutstandingJobCount(context, config, scheduler, config.getParallelJobCount()); if (!scheduler.isShutdown()) { Trigger trigger = TriggerBuilder.newTrigger().startNow().forJob(job).build(); scheduler.scheduleJob(job, trigger); } } catch (SchedulerException e) { if (!scheduler.isShutdown()) { throw e; } } }
Scheduler scheduler = context.getScheduler(); if (!context.getScheduler().isShutdown()) { LOG.warn("Unable to process Scheduled Refresh batch " + context.getJobDetail().getKey(), e);
@ResourceAdapter("quartz-ra.rar") @MessageDriven(activationConfig = { @ActivationConfigProperty(propertyName = "cronTrigger", propertyValue = "0 * * * * ?")}) public class MyCronWatchDog implements Job { public void execute(JobExecutionContext jex) throws JobExecutionException { System.out.println("Quartz job executed!!"); Scheduler scheduler = jex.getScheduler(); } }
private boolean isJobCurrentlyRunning(JobExecutionContext context) throws SchedulerException { String beanId = context.getJobDetail().getJobDataMap().getString(SpringJobBeanWrapper.SPRING_BEAN_NAME); List<JobExecutionContext> jobsRunning = context.getScheduler().getCurrentlyExecutingJobs(); int jobsCount = 0; for(JobExecutionContext j : jobsRunning) if(beanId.equals(j.getJobDetail().getJobDataMap().getString(SpringJobBeanWrapper.SPRING_BEAN_NAME))) { jobsCount++; } if(jobsCount > 1) return true; return false; }
protected SchedulerContext getSchedulerContext(JobExecutionContext context) throws JobExecutionException { try { return context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Failed to obtain scheduler context for job " + context.getJobDetail().getKey()); } }
@Override protected void executeInternal(JobExecutionContext context) throws JobExecutionException { try{ SchedulerContext schedulerContext = context.getScheduler().getContext(); ApplicationContext applicationContext = (ApplicationContext) schedulerContext.get("applicationContext"); AbstractProcessor processor = applicationContext.getBean(this.getJobProcessor()); processor.process(context); }catch (SchedulerException e) { throw new JobExecutionException(e); } }