Codota Logo
StdJDBCDelegate
Code IndexAdd Codota to your IDE (free)

How to use
StdJDBCDelegate
in
org.quartz.impl.jdbcjobstore

Best Java code snippets using org.quartz.impl.jdbcjobstore.StdJDBCDelegate (Showing top 20 results out of 315)

  • Add the Codota plugin to your IDE and get smart completions
private void myMethod () {
ScheduledThreadPoolExecutor s =
  • Codota Iconnew ScheduledThreadPoolExecutor(corePoolSize)
  • Codota IconThreadFactory threadFactory;new ScheduledThreadPoolExecutor(corePoolSize, threadFactory)
  • Codota IconString str;new ScheduledThreadPoolExecutor(1, new ThreadFactoryBuilder().setNameFormat(str).build())
  • Smart code suggestions by Codota
}
origin: quartz-scheduler/quartz

ps = conn.prepareStatement(rtp(SELECT_TRIGGER_DATA));
ps.setString(1, triggerName);
ps.setString(2, groupName);
  if (canUseProperties()) { 
    map = getMapFromProperties(rs);
  } else {
    map = (Map<?, ?>) getObjectFromBlob(rs, COL_JOB_DATAMAP);
closeResultSet(rs);
closeStatement(ps);
origin: quartz-scheduler/quartz

/**
 * build Map from java.util.Properties encoding.
 */
private Map<?, ?> getMapFromProperties(ResultSet rs)
  throws ClassNotFoundException, IOException, SQLException {
  Map<?, ?> map;
  InputStream is = (InputStream) getJobDataFromBlob(rs, COL_JOB_DATAMAP);
  if(is == null) {
    return null;
  }
  Properties properties = new Properties();
  if (is != null) {
    try {
      properties.load(is);
    } finally {
      is.close();
    }
  }
  map = convertFromProperty(properties);
  return map;
}
origin: quartz-scheduler/quartz

/**
 * serialize the java.util.Properties
 */
private ByteArrayOutputStream serializeProperties(JobDataMap data)
  throws IOException {
  ByteArrayOutputStream ba = new ByteArrayOutputStream();
  if (null != data) {
    Properties properties = convertToProperty(data.getWrappedMap());
    properties.store(ba, "");
  }
  return ba;
}
origin: quartz-scheduler/quartz

public int deleteAllPausedTriggerGroups(Connection conn)
  throws SQLException {
  PreparedStatement ps = null;
  try {
    ps = conn.prepareStatement(rtp(DELETE_PAUSED_TRIGGER_GROUPS));
    int rows = ps.executeUpdate();
    return rows;
  } finally {
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

public int deletePausedTriggerGroup(Connection conn, GroupMatcher<TriggerKey> matcher)
  throws SQLException {
  PreparedStatement ps = null;
  try {
    ps = conn.prepareStatement(rtp(DELETE_PAUSED_TRIGGER_GROUP));
    ps.setString(1, toSqlLikeClause(matcher));
    int rows = ps.executeUpdate();
    return rows;
  } finally {
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

public boolean isTriggerGroupPaused(Connection conn, String groupName)
  throws SQLException {
  PreparedStatement ps = null;
  ResultSet rs = null;
  try {
    ps = conn.prepareStatement(rtp(SELECT_PAUSED_TRIGGER_GROUP));
    ps.setString(1, groupName);
    rs = ps.executeQuery();
    return rs.next();
  } finally {
    closeResultSet(rs);
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

/**
 * <p>
 * Check whether or not the given job is stateful.
 * </p>
 * 
 * @param conn
 *          the DB Connection
 * @return true if the job exists and is stateful, false otherwise
 */
public boolean isJobNonConcurrent(Connection conn, JobKey jobKey) throws SQLException {
  PreparedStatement ps = null;
  ResultSet rs = null;
  try {
    ps = conn.prepareStatement(rtp(SELECT_JOB_NONCONCURRENT));
    ps.setString(1, jobKey.getName());
    ps.setString(2, jobKey.getGroup());
    rs = ps.executeQuery();
    if (!rs.next()) { return false; }
    return getBoolean(rs, COL_IS_NONCONCURRENT);
  } finally {
    closeResultSet(rs);
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

ps = conn.prepareStatement(rtp(SELECT_TRIGGER));
ps.setString(1, triggerKey.getName());
ps.setString(2, triggerKey.getGroup());
  if (canUseProperties()) {
    map = getMapFromProperties(rs);
  } else {
    map = (Map<?, ?>) getObjectFromBlob(rs, COL_JOB_DATAMAP);
    ps = conn.prepareStatement(rtp(SELECT_BLOB_TRIGGER));
    ps.setString(1, triggerKey.getName());
    ps.setString(2, triggerKey.getGroup());
      trigger = (OperableTrigger) getObjectFromBlob(rs, COL_BLOB);
    TriggerPersistenceDelegate tDel = findTriggerPersistenceDelegate(triggerType);
      triggerProps = tDel.loadExtendedTriggerProperties(conn, triggerKey);
    } catch (IllegalStateException isex) {
      if (isTriggerStillPresent(ps)) {
        throw isex;
      } else {
    setTriggerStateProperties(trigger, triggerProps);
closeResultSet(rs);
closeStatement(ps);
origin: quartz-scheduler/quartz

/**
 * <p>
 * Remove the transient data from and then create a serialized <code>java.util.ByteArrayOutputStream</code>
 * version of a <code>{@link org.quartz.JobDataMap}</code>.
 * </p>
 * 
 * @param data
 *          the JobDataMap to serialize
 * @return the serialized ByteArrayOutputStream
 * @throws IOException
 *           if serialization causes an error
 */
protected ByteArrayOutputStream serializeJobData(JobDataMap data)
  throws IOException {
  if (canUseProperties()) {
    return serializeProperties(data);
  }
  try {
    return serializeObject(data);
  } catch (NotSerializableException e) {
    throw new NotSerializableException(
      "Unable to serialize JobDataMap for insertion into " + 
      "database because the value of property '" + 
      getKeyOfNonSerializableValue(data) + 
      "' is not serializable: " + e.getMessage());
  }
}
origin: quartz-scheduler/quartz

ResultSet rs = null;
try {
  String selCal = rtp(SELECT_CALENDAR);
  ps = conn.prepareStatement(selCal);
  ps.setString(1, calendarName);
    cal = (Calendar) getObjectFromBlob(rs, COL_CALENDAR);
  closeResultSet(rs);
  closeStatement(ps);
origin: quartz-scheduler/quartz

public List<OperableTrigger> selectTriggersForCalendar(Connection conn, String calName)
  throws SQLException, ClassNotFoundException, IOException, JobPersistenceException {
  LinkedList<OperableTrigger> trigList = new LinkedList<OperableTrigger>();
  PreparedStatement ps = null;
  ResultSet rs = null;
  try {
    ps = conn.prepareStatement(rtp(SELECT_TRIGGERS_FOR_CALENDAR));
    ps.setString(1, calName);
    rs = ps.executeQuery();
    while (rs.next()) {
      trigList.add(selectTrigger(conn, triggerKey(rs.getString(COL_TRIGGER_NAME), rs.getString(COL_TRIGGER_GROUP))));
    }
  } finally {
    closeResultSet(rs);
    closeStatement(ps);
  }
  return trigList;
}
origin: quartz-scheduler/quartz

public List<String> selectTriggerGroups(Connection conn, GroupMatcher<TriggerKey> matcher) throws SQLException {
  PreparedStatement ps = null;
  ResultSet rs = null;
  try {
    ps = conn.prepareStatement(rtp(SELECT_TRIGGER_GROUPS_FILTERED));
    ps.setString(1, toSqlLikeClause(matcher));
    rs = ps.executeQuery();
    LinkedList<String> list = new LinkedList<String>();
    while (rs.next()) {
      list.add(rs.getString(1));
    }
    return list;
  } finally {
    closeResultSet(rs);
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

/**
 * <p>
 * This method should be overridden by any delegate subclasses that need
 * special handling for BLOBs for job details. The default implementation
 * uses standard JDBC <code>java.sql.Blob</code> operations.
 * </p>
 * 
 * @param rs
 *          the result set, already queued to the correct row
 * @param colName
 *          the column name for the BLOB
 * @return the deserialized Object from the ResultSet BLOB
 * @throws ClassNotFoundException
 *           if a class found during deserialization cannot be found
 * @throws IOException
 *           if deserialization causes an error
 */
protected Object getJobDataFromBlob(ResultSet rs, String colName)
  throws ClassNotFoundException, IOException, SQLException {
  if (canUseProperties()) {
    Blob blobLocator = rs.getBlob(colName);
    if (blobLocator != null) {
      InputStream binaryInput = blobLocator.getBinaryStream();
      return binaryInput;
    } else {
      return null;
    }
  }
  return getObjectFromBlob(rs, colName);
}
origin: com.opensymphony.quartz/com.springsource.org.quartz

/**
 * build Map from java.util.Properties encoding.
 */
private Map getMapFromProperties(ResultSet rs)
  throws ClassNotFoundException, IOException, SQLException {
  Map map;
  InputStream is = (InputStream) getJobDetailFromBlob(rs, COL_JOB_DATAMAP);
  if(is == null) {
    return null;
  }
  Properties properties = new Properties();
  if (is != null) {
    try {
      properties.load(is);
    } finally {
      is.close();
    }
  }
  map = convertFromProperty(properties);
  return map;
}
origin: quartz-scheduler/quartz

private boolean isTriggerStillPresent(PreparedStatement ps) throws SQLException {
  ResultSet rs = null;
  try {
    rs = ps.executeQuery();
    return rs.next();
  } finally {
    closeResultSet(rs);
  }
}
origin: quartz-scheduler/quartz

/**
 * <p>
 * Check whether or not the given job is stateful.
 * </p>
 * 
 * @param conn
 *          the DB Connection
 * @return true if the job exists and is stateful, false otherwise
 */
public boolean isJobNonConcurrent(Connection conn, JobKey jobKey) throws SQLException {
  PreparedStatement ps = null;
  ResultSet rs = null;
  try {
    ps = conn.prepareStatement(rtp(SELECT_JOB_NONCONCURRENT));
    ps.setString(1, jobKey.getName());
    ps.setString(2, jobKey.getGroup());
    rs = ps.executeQuery();
    if (!rs.next()) { return false; }
    return getBoolean(rs, COL_IS_NONCONCURRENT);
  } finally {
    closeResultSet(rs);
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

public boolean isTriggerGroupPaused(Connection conn, String groupName)
  throws SQLException {
  PreparedStatement ps = null;
  ResultSet rs = null;
  try {
    ps = conn.prepareStatement(rtp(SELECT_PAUSED_TRIGGER_GROUP));
    ps.setString(1, groupName);
    rs = ps.executeQuery();
    return rs.next();
  } finally {
    closeResultSet(rs);
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

ps = conn.prepareStatement(rtp(SELECT_TRIGGER));
ps.setString(1, triggerKey.getName());
ps.setString(2, triggerKey.getGroup());
  if (canUseProperties()) {
    map = getMapFromProperties(rs);
  } else {
    map = (Map<?, ?>) getObjectFromBlob(rs, COL_JOB_DATAMAP);
    ps = conn.prepareStatement(rtp(SELECT_BLOB_TRIGGER));
    ps.setString(1, triggerKey.getName());
    ps.setString(2, triggerKey.getGroup());
      trigger = (OperableTrigger) getObjectFromBlob(rs, COL_BLOB);
    TriggerPersistenceDelegate tDel = findTriggerPersistenceDelegate(triggerType);
      triggerProps = tDel.loadExtendedTriggerProperties(conn, triggerKey);
    } catch (IllegalStateException isex) {
      if (isTriggerStillPresent(ps)) {
        throw isex;
      } else {
    setTriggerStateProperties(trigger, triggerProps);
closeResultSet(rs);
closeStatement(ps);
origin: quartz-scheduler/quartz

public int deleteAllPausedTriggerGroups(Connection conn)
  throws SQLException {
  PreparedStatement ps = null;
  try {
    ps = conn.prepareStatement(rtp(DELETE_PAUSED_TRIGGER_GROUPS));
    int rows = ps.executeUpdate();
    return rows;
  } finally {
    closeStatement(ps);
  }
}
origin: quartz-scheduler/quartz

/**
 * <p>
 * Remove the transient data from and then create a serialized <code>java.util.ByteArrayOutputStream</code>
 * version of a <code>{@link org.quartz.JobDataMap}</code>.
 * </p>
 * 
 * @param data
 *          the JobDataMap to serialize
 * @return the serialized ByteArrayOutputStream
 * @throws IOException
 *           if serialization causes an error
 */
protected ByteArrayOutputStream serializeJobData(JobDataMap data)
  throws IOException {
  if (canUseProperties()) {
    return serializeProperties(data);
  }
  try {
    return serializeObject(data);
  } catch (NotSerializableException e) {
    throw new NotSerializableException(
      "Unable to serialize JobDataMap for insertion into " + 
      "database because the value of property '" + 
      getKeyOfNonSerializableValue(data) + 
      "' is not serializable: " + e.getMessage());
  }
}
org.quartz.impl.jdbcjobstoreStdJDBCDelegate

Javadoc

This is meant to be an abstract base class for most, if not all, org.quartz.impl.jdbcjobstore.DriverDelegate implementations. Subclasses should override only those methods that need special handling for the DBMS driver in question.

Most used methods

  • canUseProperties
  • closeResultSet
    Cleanup helper method that closes the given ResultSet while ignoring any errors.
  • closeStatement
    Cleanup helper method that closes the given Statement while ignoring any errors.
  • convertFromProperty
    convert the JobDataMap into a list of properties
  • convertToProperty
    convert the JobDataMap into a list of properties
  • getBoolean
    Retrieves the value of the designated column in the current row as a boolean. This just wraps Result
  • getKeyOfNonSerializableValue
    Find the key of the first non-serializable value in the given Map.
  • getMapFromProperties
    build Map from java.util.Properties encoding.
  • getObjectFromBlob
    This method should be overridden by any delegate subclasses that need special handling for BLOBs. T
  • rtp
    Replace the table prefix in a query by replacing any occurrences of "{0}" with the table prefix.
  • selectTrigger
    Select a trigger.
  • selectTriggerJobDataMap
    Select a trigger's JobDataMap.
  • selectTrigger,
  • selectTriggerJobDataMap,
  • serializeJobData,
  • serializeObject,
  • serializeProperties,
  • setBoolean,
  • setBytes,
  • addDefaultTriggerPersistenceDelegates,
  • addTriggerPersistenceDelegate,
  • deleteBlobTrigger

Popular in Java

  • Start an intent from android
  • getResourceAsStream (ClassLoader)
  • onRequestPermissionsResult (Fragment)
  • setRequestProperty (URLConnection)
    Sets the general request property. If a property with the key already exists, overwrite its value wi
  • BufferedReader (java.io)
    Reads text from a character-input stream, buffering characters so as to provide for the efficient re
  • EOFException (java.io)
    Thrown when a program encounters the end of a file or stream during an input operation.
  • FileOutputStream (java.io)
    A file output stream is an output stream for writing data to aFile or to a FileDescriptor. Whether
  • URL (java.net)
    A Uniform Resource Locator that identifies the location of an Internet resource as specified by RFC
  • Set (java.util)
    A collection that contains no duplicate elements. More formally, sets contain no pair of elements e1
  • LogFactory (org.apache.commons.logging)
    A minimal incarnation of Apache Commons Logging's LogFactory API, providing just the common Log look
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now