@Override public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap dataMap = context.getJobDetail().getJobDataMap(); JobSpec jobSpec = (JobSpec) dataMap.get(JOB_SPEC_KEY); Runnable jobRunnable = (Runnable) dataMap.get(JOB_RUNNABLE_KEY); try { jobRunnable.run(); } catch (Throwable t) { throw new JobExecutionException("Job runable for " + jobSpec + " failed.", t); } } }
class MyJob implements Job { public MyJob() { } public void execute(JobExecutionContext context) throws JobExecutionException { try{ //connect to other application etc } catch(Exception e){ Thread.sleep(600000); //sleep for 10 mins JobExecutionException e2 = new JobExecutionException(e); //fire it again e2.setRefireImmediately(true); throw e2; } } }
@Override public void executeImpl(JobExecutionContext context) throws JobExecutionException { LOG.info("Starting job " + context.getJobDetail().getKey()); JobDataMap dataMap = context.getJobDetail().getJobDataMap(); JobScheduler jobScheduler = (JobScheduler) dataMap.get(JOB_SCHEDULER_KEY); Properties jobProps = (Properties) dataMap.get(PROPERTIES_KEY); JobListener jobListener = (JobListener) dataMap.get(JOB_LISTENER_KEY); try { jobScheduler.runJob(jobProps, jobListener); } catch (Throwable t) { throw new JobExecutionException(t); } }
@Override public void executeImpl(JobExecutionContext context) throws JobExecutionException { _log.info("Starting FlowSpec " + context.getJobDetail().getKey()); JobDataMap dataMap = context.getJobDetail().getJobDataMap(); JobScheduler jobScheduler = (JobScheduler) dataMap.get(JOB_SCHEDULER_KEY); Properties jobProps = (Properties) dataMap.get(PROPERTIES_KEY); JobListener jobListener = (JobListener) dataMap.get(JOB_LISTENER_KEY); try { jobScheduler.runJob(jobProps, jobListener); } catch (Throwable t) { throw new JobExecutionException(t); } }
@Override public void executeImpl(JobExecutionContext context) throws JobExecutionException { JobDataMap dataMap = context.getJobDetail().getJobDataMap(); final JobScheduler jobScheduler = (JobScheduler) dataMap.get(JobScheduler.JOB_SCHEDULER_KEY); // the properties may get mutated during job execution and the scheduler reuses it for the next round of scheduling, // so clone it final Properties jobProps = (Properties)((Properties) dataMap.get(JobScheduler.PROPERTIES_KEY)).clone(); final JobListener jobListener = (JobListener) dataMap.get(JobScheduler.JOB_LISTENER_KEY); try { if (Boolean.valueOf(jobProps.getProperty(GobblinClusterConfigurationKeys.JOB_EXECUTE_IN_SCHEDULING_THREAD, Boolean.toString(GobblinClusterConfigurationKeys.JOB_EXECUTE_IN_SCHEDULING_THREAD_DEFAULT)))) { jobScheduler.runJob(jobProps, jobListener); } else { cancellable = jobScheduler.scheduleJobImmediately(jobProps, jobListener); } } catch (Throwable t) { throw new JobExecutionException(t); } }
@Override public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { if (m_delay != 0) { int delay = m_rand.nextInt(m_delay); try { Thread.sleep(delay * 1000L); } catch (InterruptedException e) { logger.warn("Sleep delay interrupted", e); } } try { logger.debug("Sending remote data"); m_datastore.sendData(); m_currentException = null; logger.debug("Finished sending remote data"); } catch (Exception e) { logger.error("Unable to send remote data", e); m_currentException = e; throw new JobExecutionException("Unable to send remote data: "+e.getMessage()); } }
@SuppressWarnings("unchecked") @Override public void execute(JobExecutionContext quartzContext) throws JobExecutionException { TimerJobInstance timerJobInstance = (TimerJobInstance) quartzContext.getJobDetail().getJobDataMap().get("timerJobInstance"); try { ((Callable<Void>)timerJobInstance).call(); } catch (Exception e) { boolean reschedule = true; Integer failedCount = (Integer) quartzContext.getJobDetail().getJobDataMap().get("failedCount"); if (failedCount == null) { failedCount = new Integer(0); } failedCount++; quartzContext.getJobDetail().getJobDataMap().put("failedCount", failedCount); if (failedCount > FAILED_JOB_RETRIES) { logger.error("Timer execution failed {} times in a roll, unscheduling ({})", FAILED_JOB_RETRIES, quartzContext.getJobDetail().getKey()); reschedule = false; } // let's give it a bit of time before failing/retrying try { Thread.sleep(failedCount * FAILED_JOB_DELAY); } catch (InterruptedException e1) { logger.debug("Got interrupted", e1); } throw new JobExecutionException("Exception when executing scheduled job", e, reschedule); } }
throw new JobExecutionException("must specify EJB_JNDI_NAME_KEY"); throw new JobExecutionException("must specify EJB_METHOD_KEY"); value = jndiContext.lookup(ejb); } catch (NamingException ne) { throw new JobExecutionException(ne); context.setResult(returnValue); } catch (IllegalAccessException iae) { throw new JobExecutionException(iae); } catch (InvocationTargetException ite) { throw new JobExecutionException(ite.getTargetException()); } catch (NoSuchMethodException nsme) { throw new JobExecutionException(nsme);
@Override public void execute(final JobExecutionContext context) throws JobExecutionException { JobDetail detail = context.getJobDetail(); JobKey key = detail.getKey(); try { String[] names = key.getName().split("\\."); String classname = key.getGroup() + "." + names[0]; Class<?> loadedClass = getClass().getClassLoader().loadClass(classname); String methodname = names[1]; Object job = this.injector.getInstance(loadedClass); Method method = Arrays.stream(loadedClass.getDeclaredMethods()) .filter(m-> m.getName().equals(methodname)) .findFirst() .get(); final Object result; if (method.getParameterCount() == 1) { result = method.invoke(job, context); } else { result = method.invoke(job); } if (method.getReturnType() != void.class) { context.setResult(result); } } catch (InvocationTargetException ex) { throw new JobExecutionException("Job execution resulted in error: " + key, ex.getCause()); } catch (Exception ex) { throw new JobExecutionException("Job execution resulted in error: " + key, ex); } }
JobExecutionException e = new JobExecutionException("Retries exceeded"); count++; dataMap.putAsString("count", count); JobExecutionException e2 = new JobExecutionException(e);
throw new JobExecutionException("Error launching native command: ", x, false);
/** * @see org.quartz.Job#execute(org.quartz.JobExecutionContext) */ public void execute(JobExecutionContext context) throws JobExecutionException { JobDataMap data = context.getMergedJobDataMap(); MailInfo mailInfo = populateMailInfo(data, createMailInfo()); getLog().info("Sending message " + mailInfo); try { MimeMessage mimeMessage = prepareMimeMessage(mailInfo); Transport.send(mimeMessage); } catch (MessagingException e) { throw new JobExecutionException("Unable to send mail: " + mailInfo, e, false); } }
schedCtxt = context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Error obtaining scheduler context.", e, false); throw new JobExecutionException("Required parameter '" + DIRECTORY_NAME + "' not found in merged JobDataMap"); throw new JobExecutionException("Required parameter '" + DIRECTORY_SCAN_LISTENER_NAME + "' not found in merged JobDataMap"); throw new JobExecutionException("DirectoryScanListener named '" + listenerName + "' not found in SchedulerContext");
schedCtxt = context.getScheduler().getContext(); } catch (SchedulerException e) { throw new JobExecutionException("Error obtaining scheduler context.", e, false); throw new JobExecutionException("Required parameter '" + FILE_NAME + "' not found in merged JobDataMap"); throw new JobExecutionException("Required parameter '" + FILE_SCAN_LISTENER_NAME + "' not found in merged JobDataMap"); throw new JobExecutionException("FileScanListener named '" + listenerName + "' not found in SchedulerContext");
/** * This implementation applies the passed-in job data map as bean property * values, and delegates to {@code executeInternal} afterwards. * @see #executeInternal */ @Override public final void execute(JobExecutionContext context) throws JobExecutionException { try { BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(this); MutablePropertyValues pvs = new MutablePropertyValues(); pvs.addPropertyValues(context.getScheduler().getContext()); pvs.addPropertyValues(context.getMergedJobDataMap()); bw.setPropertyValues(pvs, true); } catch (SchedulerException ex) { throw new JobExecutionException(ex); } executeInternal(context); }
throw new JobExecutionException(e); } finally { JmsHelper.closeResource(producer);
@Override public void execute(JobExecutionContext context) throws JobExecutionException { logger.debug("Starting job to publish a scheduled event"); JobDetail jobDetail = context.getJobDetail(); JobDataMap jobData = jobDetail.getJobDataMap(); try { SchedulerContext schedulerContext = context.getScheduler().getContext(); EventJobDataBinder jobDataBinder = (EventJobDataBinder) schedulerContext.get(EVENT_JOB_DATA_BINDER_KEY); Object event = jobDataBinder.fromJobData(jobData); EventMessage<?> eventMessage = createMessage(event); EventBus eventBus = (EventBus) context.getScheduler().getContext().get(EVENT_BUS_KEY); TransactionManager txManager = (TransactionManager) context.getScheduler().getContext().get(TRANSACTION_MANAGER_KEY); UnitOfWork<EventMessage<?>> unitOfWork = DefaultUnitOfWork.startAndGet(null); if (txManager != null) { unitOfWork.attachTransaction(txManager); } unitOfWork.execute(() -> eventBus.publish(eventMessage)); if (logger.isInfoEnabled()) { logger.info("Job successfully executed. Scheduled Event [{}] has been published.", eventMessage.getPayloadType().getSimpleName()); } } catch (Exception e) { logger.error("Exception occurred while publishing scheduled event [{}]", jobDetail.getDescription(), e); throw new JobExecutionException(e); } }
/** * This implementation applies the passed-in job data map as bean property * values, and delegates to {@code executeInternal} afterwards. * @see #executeInternal */ @Override public final void execute(JobExecutionContext context) throws JobExecutionException { try { BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(this); MutablePropertyValues pvs = new MutablePropertyValues(); pvs.addPropertyValues(context.getScheduler().getContext()); pvs.addPropertyValues(context.getMergedJobDataMap()); bw.setPropertyValues(pvs, true); } catch (SchedulerException ex) { throw new JobExecutionException(ex); } executeInternal(context); }
throw new JobExecutionException(e.getMessage()); } finally { JmsHelper.closeResource(sender);
throw new JobExecutionException(e); } finally { JmsHelper.closeResource(publisher);