ps = conn.prepareStatement(rtp(SELECT_TRIGGER_DATA)); ps.setString(1, triggerName); ps.setString(2, groupName); if (canUseProperties()) { map = getMapFromProperties(rs); } else { map = (Map<?, ?>) getObjectFromBlob(rs, COL_JOB_DATAMAP); closeResultSet(rs); closeStatement(ps);
/** * build Map from java.util.Properties encoding. */ private Map<?, ?> getMapFromProperties(ResultSet rs) throws ClassNotFoundException, IOException, SQLException { Map<?, ?> map; InputStream is = (InputStream) getJobDataFromBlob(rs, COL_JOB_DATAMAP); if(is == null) { return null; } Properties properties = new Properties(); if (is != null) { try { properties.load(is); } finally { is.close(); } } map = convertFromProperty(properties); return map; }
/** * serialize the java.util.Properties */ private ByteArrayOutputStream serializeProperties(JobDataMap data) throws IOException { ByteArrayOutputStream ba = new ByteArrayOutputStream(); if (null != data) { Properties properties = convertToProperty(data.getWrappedMap()); properties.store(ba, ""); } return ba; }
public int deleteAllPausedTriggerGroups(Connection conn) throws SQLException { PreparedStatement ps = null; try { ps = conn.prepareStatement(rtp(DELETE_PAUSED_TRIGGER_GROUPS)); int rows = ps.executeUpdate(); return rows; } finally { closeStatement(ps); } }
public int deletePausedTriggerGroup(Connection conn, GroupMatcher<TriggerKey> matcher) throws SQLException { PreparedStatement ps = null; try { ps = conn.prepareStatement(rtp(DELETE_PAUSED_TRIGGER_GROUP)); ps.setString(1, toSqlLikeClause(matcher)); int rows = ps.executeUpdate(); return rows; } finally { closeStatement(ps); } }
public boolean isTriggerGroupPaused(Connection conn, String groupName) throws SQLException { PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(rtp(SELECT_PAUSED_TRIGGER_GROUP)); ps.setString(1, groupName); rs = ps.executeQuery(); return rs.next(); } finally { closeResultSet(rs); closeStatement(ps); } }
/** * <p> * Check whether or not the given job is stateful. * </p> * * @param conn * the DB Connection * @return true if the job exists and is stateful, false otherwise */ public boolean isJobNonConcurrent(Connection conn, JobKey jobKey) throws SQLException { PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(rtp(SELECT_JOB_NONCONCURRENT)); ps.setString(1, jobKey.getName()); ps.setString(2, jobKey.getGroup()); rs = ps.executeQuery(); if (!rs.next()) { return false; } return getBoolean(rs, COL_IS_NONCONCURRENT); } finally { closeResultSet(rs); closeStatement(ps); } }
ps = conn.prepareStatement(rtp(SELECT_TRIGGER)); ps.setString(1, triggerKey.getName()); ps.setString(2, triggerKey.getGroup()); if (canUseProperties()) { map = getMapFromProperties(rs); } else { map = (Map<?, ?>) getObjectFromBlob(rs, COL_JOB_DATAMAP); ps = conn.prepareStatement(rtp(SELECT_BLOB_TRIGGER)); ps.setString(1, triggerKey.getName()); ps.setString(2, triggerKey.getGroup()); trigger = (OperableTrigger) getObjectFromBlob(rs, COL_BLOB); TriggerPersistenceDelegate tDel = findTriggerPersistenceDelegate(triggerType); triggerProps = tDel.loadExtendedTriggerProperties(conn, triggerKey); } catch (IllegalStateException isex) { if (isTriggerStillPresent(ps)) { throw isex; } else { setTriggerStateProperties(trigger, triggerProps); closeResultSet(rs); closeStatement(ps);
/** * <p> * Remove the transient data from and then create a serialized <code>java.util.ByteArrayOutputStream</code> * version of a <code>{@link org.quartz.JobDataMap}</code>. * </p> * * @param data * the JobDataMap to serialize * @return the serialized ByteArrayOutputStream * @throws IOException * if serialization causes an error */ protected ByteArrayOutputStream serializeJobData(JobDataMap data) throws IOException { if (canUseProperties()) { return serializeProperties(data); } try { return serializeObject(data); } catch (NotSerializableException e) { throw new NotSerializableException( "Unable to serialize JobDataMap for insertion into " + "database because the value of property '" + getKeyOfNonSerializableValue(data) + "' is not serializable: " + e.getMessage()); } }
ResultSet rs = null; try { String selCal = rtp(SELECT_CALENDAR); ps = conn.prepareStatement(selCal); ps.setString(1, calendarName); cal = (Calendar) getObjectFromBlob(rs, COL_CALENDAR); closeResultSet(rs); closeStatement(ps);
public List<OperableTrigger> selectTriggersForCalendar(Connection conn, String calName) throws SQLException, ClassNotFoundException, IOException, JobPersistenceException { LinkedList<OperableTrigger> trigList = new LinkedList<OperableTrigger>(); PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(rtp(SELECT_TRIGGERS_FOR_CALENDAR)); ps.setString(1, calName); rs = ps.executeQuery(); while (rs.next()) { trigList.add(selectTrigger(conn, triggerKey(rs.getString(COL_TRIGGER_NAME), rs.getString(COL_TRIGGER_GROUP)))); } } finally { closeResultSet(rs); closeStatement(ps); } return trigList; }
public List<String> selectTriggerGroups(Connection conn, GroupMatcher<TriggerKey> matcher) throws SQLException { PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(rtp(SELECT_TRIGGER_GROUPS_FILTERED)); ps.setString(1, toSqlLikeClause(matcher)); rs = ps.executeQuery(); LinkedList<String> list = new LinkedList<String>(); while (rs.next()) { list.add(rs.getString(1)); } return list; } finally { closeResultSet(rs); closeStatement(ps); } }
/** * <p> * This method should be overridden by any delegate subclasses that need * special handling for BLOBs for job details. The default implementation * uses standard JDBC <code>java.sql.Blob</code> operations. * </p> * * @param rs * the result set, already queued to the correct row * @param colName * the column name for the BLOB * @return the deserialized Object from the ResultSet BLOB * @throws ClassNotFoundException * if a class found during deserialization cannot be found * @throws IOException * if deserialization causes an error */ protected Object getJobDataFromBlob(ResultSet rs, String colName) throws ClassNotFoundException, IOException, SQLException { if (canUseProperties()) { Blob blobLocator = rs.getBlob(colName); if (blobLocator != null) { InputStream binaryInput = blobLocator.getBinaryStream(); return binaryInput; } else { return null; } } return getObjectFromBlob(rs, colName); }
/** * build Map from java.util.Properties encoding. */ private Map getMapFromProperties(ResultSet rs) throws ClassNotFoundException, IOException, SQLException { Map map; InputStream is = (InputStream) getJobDetailFromBlob(rs, COL_JOB_DATAMAP); if(is == null) { return null; } Properties properties = new Properties(); if (is != null) { try { properties.load(is); } finally { is.close(); } } map = convertFromProperty(properties); return map; }
private boolean isTriggerStillPresent(PreparedStatement ps) throws SQLException { ResultSet rs = null; try { rs = ps.executeQuery(); return rs.next(); } finally { closeResultSet(rs); } }
/** * <p> * Check whether or not the given job is stateful. * </p> * * @param conn * the DB Connection * @return true if the job exists and is stateful, false otherwise */ public boolean isJobNonConcurrent(Connection conn, JobKey jobKey) throws SQLException { PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(rtp(SELECT_JOB_NONCONCURRENT)); ps.setString(1, jobKey.getName()); ps.setString(2, jobKey.getGroup()); rs = ps.executeQuery(); if (!rs.next()) { return false; } return getBoolean(rs, COL_IS_NONCONCURRENT); } finally { closeResultSet(rs); closeStatement(ps); } }
public boolean isTriggerGroupPaused(Connection conn, String groupName) throws SQLException { PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(rtp(SELECT_PAUSED_TRIGGER_GROUP)); ps.setString(1, groupName); rs = ps.executeQuery(); return rs.next(); } finally { closeResultSet(rs); closeStatement(ps); } }
ps = conn.prepareStatement(rtp(SELECT_TRIGGER)); ps.setString(1, triggerKey.getName()); ps.setString(2, triggerKey.getGroup()); if (canUseProperties()) { map = getMapFromProperties(rs); } else { map = (Map<?, ?>) getObjectFromBlob(rs, COL_JOB_DATAMAP); ps = conn.prepareStatement(rtp(SELECT_BLOB_TRIGGER)); ps.setString(1, triggerKey.getName()); ps.setString(2, triggerKey.getGroup()); trigger = (OperableTrigger) getObjectFromBlob(rs, COL_BLOB); TriggerPersistenceDelegate tDel = findTriggerPersistenceDelegate(triggerType); triggerProps = tDel.loadExtendedTriggerProperties(conn, triggerKey); } catch (IllegalStateException isex) { if (isTriggerStillPresent(ps)) { throw isex; } else { setTriggerStateProperties(trigger, triggerProps); closeResultSet(rs); closeStatement(ps);
public int deleteAllPausedTriggerGroups(Connection conn) throws SQLException { PreparedStatement ps = null; try { ps = conn.prepareStatement(rtp(DELETE_PAUSED_TRIGGER_GROUPS)); int rows = ps.executeUpdate(); return rows; } finally { closeStatement(ps); } }
/** * <p> * Remove the transient data from and then create a serialized <code>java.util.ByteArrayOutputStream</code> * version of a <code>{@link org.quartz.JobDataMap}</code>. * </p> * * @param data * the JobDataMap to serialize * @return the serialized ByteArrayOutputStream * @throws IOException * if serialization causes an error */ protected ByteArrayOutputStream serializeJobData(JobDataMap data) throws IOException { if (canUseProperties()) { return serializeProperties(data); } try { return serializeObject(data); } catch (NotSerializableException e) { throw new NotSerializableException( "Unable to serialize JobDataMap for insertion into " + "database because the value of property '" + getKeyOfNonSerializableValue(data) + "' is not serializable: " + e.getMessage()); } }