congrats Icon
New! Tabnine Pro 14-day free trial
Start a free trial
Tabnine Logo
PointbaseDelegate
Code IndexAdd Tabnine to your IDE (free)

How to use
PointbaseDelegate
in
org.quartz.impl.jdbcjobstore

Best Java code snippets using org.quartz.impl.jdbcjobstore.PointbaseDelegate (Showing top 20 results out of 315)

origin: quartz-scheduler/quartz

throws ClassNotFoundException, IOException, SQLException {
if (canUseProperties()) {
  byte data[] = rs.getBytes(colName);
  if(data == null) {
return getObjectFromBlob(rs, colName);
origin: quartz-scheduler/quartz

throws IOException, SQLException {
ByteArrayOutputStream baos = serializeJobData(job.getJobDataMap());
int len = baos.toByteArray().length;
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  ps = conn.prepareStatement(rtp(INSERT_JOB_DETAIL));
  ps.setString(1, job.getKey().getName());
  ps.setString(2, job.getKey().getGroup());
  ps.setString(3, job.getDescription());
  ps.setString(4, job.getJobClass().getName());
  setBoolean(ps, 5, job.isDurable());
  setBoolean(ps, 6, job.isConcurrentExectionDisallowed());
  setBoolean(ps, 7, job.isPersistJobDataAfterExecution());
  setBoolean(ps, 8, job.requestsRecovery());
  ps.setBinaryStream(9, bais, len);
  closeStatement(ps);
origin: quartz-scheduler/quartz

  Calendar calendar) throws IOException, SQLException {
ByteArrayOutputStream baos = serializeObject(calendar);
byte buf[] = baos.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(buf);
  ps = conn.prepareStatement(rtp(UPDATE_CALENDAR));
  ps.setBinaryStream(1, bais, buf.length);
  ps.setString(2, calendarName);
  closeStatement(ps);
origin: quartz-scheduler/quartz

/**
 * <p>
 * Update the job data map for the given job.
 * </p>
 * 
 * @param conn
 *          the DB Connection
 * @param job
 *          the job to update
 * @return the number of rows updated
 */
@Override           
public int updateJobData(Connection conn, JobDetail job)
  throws IOException, SQLException {
  //log.debug( "Updating Job Data for Job " + job );
  ByteArrayOutputStream baos = serializeJobData(job.getJobDataMap());
  int len = baos.toByteArray().length;
  ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  PreparedStatement ps = null;
  try {
    ps = conn.prepareStatement(rtp(UPDATE_JOB_DATA));
    ps.setBinaryStream(1, bais, len);
    ps.setString(2, job.getKey().getName());
    ps.setString(3, job.getKey().getGroup());
    return ps.executeUpdate();
  } finally {
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

  JobDetail jobDetail) throws SQLException, IOException {
ByteArrayOutputStream baos = serializeJobData(trigger.getJobDataMap());
int len = baos.toByteArray().length;
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  ps = conn.prepareStatement(rtp(INSERT_TRIGGER));
  ps.setString(1, trigger.getKey().getName());
  ps.setString(2, trigger.getKey().getGroup());
  ps.setString(8, state);
  TriggerPersistenceDelegate tDel = findTriggerPersistenceDelegate(trigger);
    insertBlobTrigger(conn, trigger);
  else
    tDel.insertExtendedTriggerProperties(conn, trigger, state, jobDetail);
  closeStatement(ps);
origin: quartz-scheduler/quartz

  JobDetail jobDetail) throws SQLException, IOException {
ByteArrayOutputStream baos = serializeJobData(trigger.getJobDataMap());
int len = baos.toByteArray().length;
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  ps = conn.prepareStatement(rtp(UPDATE_TRIGGER));
  ps.setString(6, state);
  TriggerPersistenceDelegate tDel = findTriggerPersistenceDelegate(trigger);
    updateBlobTrigger(conn, trigger);
  else
    tDel.updateExtendedTriggerProperties(conn, trigger, state, jobDetail);
  closeStatement(ps);
origin: quartz-scheduler/quartz

/**
 * <p>
 * Update the job data map for the given job.
 * </p>
 * 
 * @param conn
 *          the DB Connection
 * @param job
 *          the job to update
 * @return the number of rows updated
 */
@Override           
public int updateJobData(Connection conn, JobDetail job)
  throws IOException, SQLException {
  //log.debug( "Updating Job Data for Job " + job );
  ByteArrayOutputStream baos = serializeJobData(job.getJobDataMap());
  int len = baos.toByteArray().length;
  ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  PreparedStatement ps = null;
  try {
    ps = conn.prepareStatement(rtp(UPDATE_JOB_DATA));
    ps.setBinaryStream(1, bais, len);
    ps.setString(2, job.getKey().getName());
    ps.setString(3, job.getKey().getGroup());
    return ps.executeUpdate();
  } finally {
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

  JobDetail jobDetail) throws SQLException, IOException {
ByteArrayOutputStream baos = serializeJobData(trigger.getJobDataMap());
int len = baos.toByteArray().length;
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  ps = conn.prepareStatement(rtp(INSERT_TRIGGER));
  ps.setString(1, trigger.getKey().getName());
  ps.setString(2, trigger.getKey().getGroup());
  ps.setString(8, state);
  TriggerPersistenceDelegate tDel = findTriggerPersistenceDelegate(trigger);
    insertBlobTrigger(conn, trigger);
  else
    tDel.insertExtendedTriggerProperties(conn, trigger, state, jobDetail);
  closeStatement(ps);
origin: quartz-scheduler/quartz

  JobDetail jobDetail) throws SQLException, IOException {
ByteArrayOutputStream baos = serializeJobData(trigger.getJobDataMap());
int len = baos.toByteArray().length;
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  ps = conn.prepareStatement(rtp(UPDATE_TRIGGER));
  ps.setString(6, state);
  TriggerPersistenceDelegate tDel = findTriggerPersistenceDelegate(trigger);
    updateBlobTrigger(conn, trigger);
  else
    tDel.updateExtendedTriggerProperties(conn, trigger, state, jobDetail);
  closeStatement(ps);
origin: quartz-scheduler/quartz

throws IOException, SQLException {
ByteArrayOutputStream baos = serializeJobData(job.getJobDataMap());
int len = baos.toByteArray().length;
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  ps = conn.prepareStatement(rtp(INSERT_JOB_DETAIL));
  ps.setString(1, job.getKey().getName());
  ps.setString(2, job.getKey().getGroup());
  ps.setString(3, job.getDescription());
  ps.setString(4, job.getJobClass().getName());
  setBoolean(ps, 5, job.isDurable());
  setBoolean(ps, 6, job.isConcurrentExectionDisallowed());
  setBoolean(ps, 7, job.isPersistJobDataAfterExecution());
  setBoolean(ps, 8, job.requestsRecovery());
  ps.setBinaryStream(9, bais, len);
  closeStatement(ps);
origin: quartz-scheduler/quartz

  Calendar calendar) throws IOException, SQLException {
ByteArrayOutputStream baos = serializeObject(calendar);
byte buf[] = baos.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(buf);
  ps = conn.prepareStatement(rtp(UPDATE_CALENDAR));
  ps.setBinaryStream(1, bais, buf.length);
  ps.setString(2, calendarName);
  closeStatement(ps);
origin: com.opensymphony.quartz/com.springsource.org.quartz

/**
 * <p>
 * Update the job data map for the given job.
 * </p>
 * 
 * @param conn
 *          the DB Connection
 * @param job
 *          the job to update
 * @return the number of rows updated
 */
public int updateJobData(Connection conn, JobDetail job)
  throws IOException, SQLException {
  //log.debug( "Updating Job Data for Job " + job );
  ByteArrayOutputStream baos = serializeJobData(job.getJobDataMap());
  int len = baos.toByteArray().length;
  ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  PreparedStatement ps = null;
  try {
    ps = conn.prepareStatement(rtp(UPDATE_JOB_DATA));
    ps.setBinaryStream(1, bais, len);
    ps.setString(2, job.getName());
    ps.setString(3, job.getGroup());
    return ps.executeUpdate();
  } finally {
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

throws ClassNotFoundException, IOException, SQLException {
if (canUseProperties()) {
  byte data[] = rs.getBytes(colName);
  if(data == null) {
return getObjectFromBlob(rs, colName);
origin: quartz-scheduler/quartz

throws IOException, SQLException {
ByteArrayOutputStream baos = serializeJobData(job.getJobDataMap());
int len = baos.toByteArray().length;
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  ps = conn.prepareStatement(rtp(UPDATE_JOB_DETAIL));
  ps.setString(1, job.getDescription());
  ps.setString(2, job.getJobClass().getName());
  setBoolean(ps, 3, job.isDurable());
  setBoolean(ps, 4, job.isConcurrentExectionDisallowed());
  setBoolean(ps, 5, job.isPersistJobDataAfterExecution());
  setBoolean(ps, 6, job.requestsRecovery());
  ps.setBinaryStream(7, bais, len);
  ps.setString(8, job.getKey().getName());
  closeStatement(ps);
origin: quartz-scheduler/quartz

ByteArrayOutputStream baos = serializeObject(calendar);
byte buf[] = baos.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(buf);
  ps = conn.prepareStatement(rtp(INSERT_CALENDAR));
  ps.setString(1, calendarName);
  ps.setBinaryStream(2, bais, buf.length);
  closeStatement(ps);
origin: quartz/quartz-all

/**
 * <p>
 * Update the job data map for the given job.
 * </p>
 * 
 * @param conn
 *          the DB Connection
 * @param job
 *          the job to update
 * @return the number of rows updated
 */
public int updateJobData(Connection conn, JobDetail job)
  throws IOException, SQLException {
  //log.debug( "Updating Job Data for Job " + job );
  ByteArrayOutputStream baos = serializeJobData(job.getJobDataMap());
  int len = baos.toByteArray().length;
  ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  PreparedStatement ps = null;
  try {
    ps = conn.prepareStatement(rtp(UPDATE_JOB_DATA));
    ps.setBinaryStream(1, bais, len);
    ps.setString(2, job.getName());
    ps.setString(3, job.getGroup());
    return ps.executeUpdate();
  } finally {
    closeStatement(ps);
  }
}
origin: quartz/quartz-all

throws ClassNotFoundException, IOException, SQLException {
if (canUseProperties()) {
  byte data[] = rs.getBytes(colName);
  if(data == null) {
return getObjectFromBlob(rs, colName);
origin: quartz-scheduler/quartz

throws IOException, SQLException {
ByteArrayOutputStream baos = serializeJobData(job.getJobDataMap());
int len = baos.toByteArray().length;
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  ps = conn.prepareStatement(rtp(UPDATE_JOB_DETAIL));
  ps.setString(1, job.getDescription());
  ps.setString(2, job.getJobClass().getName());
  setBoolean(ps, 3, job.isDurable());
  setBoolean(ps, 4, job.isConcurrentExectionDisallowed());
  setBoolean(ps, 5, job.isPersistJobDataAfterExecution());
  setBoolean(ps, 6, job.requestsRecovery());
  ps.setBinaryStream(7, bais, len);
  ps.setString(8, job.getKey().getName());
  closeStatement(ps);
origin: quartz-scheduler/quartz

ByteArrayOutputStream baos = serializeObject(calendar);
byte buf[] = baos.toByteArray();
ByteArrayInputStream bais = new ByteArrayInputStream(buf);
  ps = conn.prepareStatement(rtp(INSERT_CALENDAR));
  ps.setString(1, calendarName);
  ps.setBinaryStream(2, bais, buf.length);
  closeStatement(ps);
origin: com.opensymphony.quartz/com.springsource.org.quartz

throws ClassNotFoundException, IOException, SQLException {
if (canUseProperties()) {
  byte data[] = rs.getBytes(colName);
  if(data == null) {
return getObjectFromBlob(rs, colName);
org.quartz.impl.jdbcjobstorePointbaseDelegate

Javadoc

This is a driver delegate for the Pointbase JDBC driver.

Most used methods

  • canUseProperties
  • closeStatement
  • getObjectFromBlob
    This method should be overridden by any delegate subclasses that need special handling for BLOBs. T
  • rtp
  • serializeJobData
  • serializeObject
  • setBoolean
  • deleteJobListeners
  • deleteTriggerListeners
  • findTriggerPersistenceDelegate
  • insertBlobTrigger
  • insertJobListener
  • insertBlobTrigger,
  • insertJobListener,
  • insertTriggerListener,
  • updateBlobTrigger

Popular in Java

  • Updating database using SQL prepared statement
  • getSystemService (Context)
  • scheduleAtFixedRate (ScheduledExecutorService)
  • setScale (BigDecimal)
  • MessageDigest (java.security)
    Uses a one-way hash function to turn an arbitrary number of bytes into a fixed-length byte sequence.
  • Date (java.util)
    A specific moment in time, with millisecond precision. Values typically come from System#currentTime
  • Deque (java.util)
    A linear collection that supports element insertion and removal at both ends. The name deque is shor
  • LinkedList (java.util)
    Doubly-linked list implementation of the List and Dequeinterfaces. Implements all optional list oper
  • ConcurrentHashMap (java.util.concurrent)
    A plug-in replacement for JDK1.5 java.util.concurrent.ConcurrentHashMap. This version is based on or
  • Modifier (javassist)
    The Modifier class provides static methods and constants to decode class and member access modifiers
  • Top 25 Plugins for Webstorm
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now