ResultSet.getTimestamp
Code IndexAdd Codota to your IDE (free)

Best code snippets using java.sql.ResultSet.getTimestamp(Showing top 20 results out of 2,700)

Refine search

  • ResultSet.getString
  • ResultSet.next
  • ResultSet.getInt
  • Connection.prepareStatement
  • PreparedStatement.executeQuery
  • Timestamp.getTime
  • ResultSet.getLong
  • Common ways to obtain ResultSet
private void myMethod () {
ResultSet r =
  • PreparedStatement ps;ps.executeQuery()
  • Statement stmt;stmt.executeQuery(sql)
  • Statement statement;statement.getResultSet()
  • AI code suggestions by Codota
}
origin: prestodb/presto

public static ReadMapping timestampReadMapping()
{
  return longReadMapping(TIMESTAMP, (resultSet, columnIndex) -> {
    Timestamp timestamp = resultSet.getTimestamp(columnIndex);
    return timestamp.getTime();
  });
}
origin: dropwizard/dropwizard

  @Override
  @Nullable
  public DateTime mapColumn(ResultSet r, String columnLabel, StatementContext ctx) throws SQLException {
    final Timestamp timestamp = calendar.isPresent() ? r.getTimestamp(columnLabel, cloneCalendar()) :
      r.getTimestamp(columnLabel);
    if (timestamp == null) {
      return null;
    }
    return new DateTime(timestamp.getTime());
  }
}
origin: elasticjob/elastic-job

private List<JobStatusTraceEvent> getJobStatusTraceEvents(final Condition condition) {
  List<JobStatusTraceEvent> result = new LinkedList<>();
  try (
      Connection conn = dataSource.getConnection();
      PreparedStatement preparedStatement = createDataPreparedStatement(conn, TABLE_JOB_STATUS_TRACE_LOG, FIELDS_JOB_STATUS_TRACE_LOG, condition);
      ResultSet resultSet = preparedStatement.executeQuery()
      ) {
    while (resultSet.next()) {
      JobStatusTraceEvent jobStatusTraceEvent = new JobStatusTraceEvent(resultSet.getString(1), resultSet.getString(2), resultSet.getString(3), resultSet.getString(4),
          resultSet.getString(5), Source.valueOf(resultSet.getString(6)), ExecutionType.valueOf(resultSet.getString(7)), resultSet.getString(8),
          State.valueOf(resultSet.getString(9)), resultSet.getString(10), new Date(resultSet.getTimestamp(11).getTime()));
      result.add(jobStatusTraceEvent);
    }
  } catch (final SQLException ex) {
    // TODO 记录失败直接输出日志,未来可考虑配置化
    log.error("Fetch JobStatusTraceEvent from DB error:", ex);
  }
  return result;
}

origin: elasticjob/elastic-job-lite

/**
 * 获取运行中的任务统计数据集合.
 * 
 * @param from 统计开始时间
 * @return 运行中的任务统计数据集合
 */
public List<TaskRunningStatistics> findTaskRunningStatistics(final Date from) {
  List<TaskRunningStatistics> result = new LinkedList<>();
  SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
  String sql = String.format("SELECT id, running_count, statistics_time, creation_time FROM %s WHERE statistics_time >= '%s' order by id ASC", 
      TABLE_TASK_RUNNING_STATISTICS, formatter.format(from));
  try (
      Connection conn = dataSource.getConnection();
      PreparedStatement preparedStatement = conn.prepareStatement(sql);
      ResultSet resultSet = preparedStatement.executeQuery()
      ) {
    while (resultSet.next()) {
      TaskRunningStatistics taskRunningStatistics = new TaskRunningStatistics(resultSet.getLong(1), resultSet.getInt(2), 
          new Date(resultSet.getTimestamp(3).getTime()), new Date(resultSet.getTimestamp(4).getTime()));
      result.add(taskRunningStatistics);
    }
  } catch (final SQLException ex) {
    // TODO 记录失败直接输出日志,未来可考虑配置化
    log.error("Fetch taskRunningStatistics from DB error:", ex);
  }
  return result;
}

origin: elasticjob/elastic-job-lite

/**
 * 获取最近一条运行中的任务统计数据.
 * 
 * @return 运行中的任务统计数据对象
 */
public Optional<TaskRunningStatistics> findLatestTaskRunningStatistics() {
  TaskRunningStatistics result = null;
  String sql = String.format("SELECT id, running_count, statistics_time, creation_time FROM %s order by id DESC LIMIT 1", 
      TABLE_TASK_RUNNING_STATISTICS);
  try (
      Connection conn = dataSource.getConnection();
      PreparedStatement preparedStatement = conn.prepareStatement(sql);
      ResultSet resultSet = preparedStatement.executeQuery()
      ) {
    while (resultSet.next()) {
      result = new TaskRunningStatistics(resultSet.getLong(1), resultSet.getInt(2), 
          new Date(resultSet.getTimestamp(3).getTime()), new Date(resultSet.getTimestamp(4).getTime()));
    }
  } catch (final SQLException ex) {
    // TODO 记录失败直接输出日志,未来可考虑配置化
    log.error("Fetch latest taskRunningStatistics from DB error:", ex);
  }
  return Optional.fromNullable(result);
}

origin: org.apache.logging.log4j/log4j-core

  final ResultSet resultSet = statement.executeQuery("SELECT * FROM dsLogEntry ORDER BY id")) {
assertTrue("There should be at least one row.", resultSet.next());
final long date = resultSet.getTimestamp("eventDate").getTime();
assertTrue("The date should be later than pre-logging (1).", date >= millis);
assertTrue("The date should be earlier than now (1).", date <= System.currentTimeMillis());
assertEquals("The literal column is not correct (1).", "Literal Value of Data Source",
    resultSet.getString("literalColumn"));
assertEquals("The level column is not correct (1).", "FATAL", resultSet.getNString("level"));
assertEquals("The logger column is not correct (1).", logger.getName(), resultSet.getNString("logger"));
assertEquals("The message column is not correct (1).", "Error from data source 02.",
    resultSet.getString("message"));
assertEquals("The exception column is not correct (1).", stackTrace,
    IOUtils.readStringAndClose(resultSet.getNClob("exception").getCharacterStream(), -1));
assertFalse("There should not be two rows.", resultSet.next());
origin: apache/storm

try {
  connection = connectionProvider.getConnection();
  try (PreparedStatement preparedStatement = connection.prepareStatement(sqlQuery)) {
    if (queryTimeoutSecs > 0) {
      preparedStatement.setQueryTimeout(queryTimeoutSecs);
    try (ResultSet resultSet = preparedStatement.executeQuery()) {
      List<List<Column>> rows = Lists.newArrayList();
      while (resultSet.next()) {
        ResultSetMetaData metaData = resultSet.getMetaData();
        int columnCount = metaData.getColumnCount();
            row.add(new Column<byte[]>(columnLabel, resultSet.getBytes(columnLabel), columnType));
          } else if (columnJavaType.equals(Long.class)) {
            row.add(new Column<Long>(columnLabel, resultSet.getLong(columnLabel), columnType));
          } else if (columnJavaType.equals(Date.class)) {
            row.add(new Column<Date>(columnLabel, resultSet.getDate(columnLabel), columnType));
            row.add(new Column<Time>(columnLabel, resultSet.getTime(columnLabel), columnType));
          } else if (columnJavaType.equals(Timestamp.class)) {
            row.add(new Column<Timestamp>(columnLabel, resultSet.getTimestamp(columnLabel), columnType));
          } else {
            throw new RuntimeException("type =  " + columnType + " for column " + columnLabel + " not supported.");
origin: shardingjdbc/sharding-jdbc

  result = getCurrentResultSet().getShort(columnLabel);
} else if (int.class == type) {
  result = getCurrentResultSet().getInt(columnLabel);
} else if (long.class == type) {
  result = getCurrentResultSet().getLong(columnLabel);
} else if (float.class == type) {
  result = getCurrentResultSet().getFloat(columnLabel);
  result = getCurrentResultSet().getDouble(columnLabel);
} else if (String.class == type) {
  result = getCurrentResultSet().getString(columnLabel);
} else if (BigDecimal.class == type) {
  result = getCurrentResultSet().getBigDecimal(columnLabel);
  result = getCurrentResultSet().getTime(columnLabel);
} else if (Timestamp.class == type) {
  result = getCurrentResultSet().getTimestamp(columnLabel);
} else if (URL.class == type) {
  result = getCurrentResultSet().getURL(columnLabel);
origin: elasticjob/elastic-job-lite

/**
 * 获取最近一条运行中的任务统计数据.
 * 
 * @return 运行中的任务统计数据对象
 */
public Optional<JobRunningStatistics> findLatestJobRunningStatistics() {
  JobRunningStatistics result = null;
  String sql = String.format("SELECT id, running_count, statistics_time, creation_time FROM %s order by id DESC LIMIT 1", 
      TABLE_JOB_RUNNING_STATISTICS);
  try (
      Connection conn = dataSource.getConnection();
      PreparedStatement preparedStatement = conn.prepareStatement(sql);
      ResultSet resultSet = preparedStatement.executeQuery()
      ) {
    while (resultSet.next()) {
      result = new JobRunningStatistics(resultSet.getLong(1), resultSet.getInt(2), 
          new Date(resultSet.getTimestamp(3).getTime()), new Date(resultSet.getTimestamp(4).getTime()));
    }
  } catch (final SQLException ex) {
    // TODO 记录失败直接输出日志,未来可考虑配置化
    log.error("Fetch latest jobRunningStatistics from DB error:", ex);
  }
  return Optional.fromNullable(result);
}

origin: org.apache.logging.log4j/log4j-core

  final ResultSet resultSet = statement.executeQuery("SELECT * FROM fmLogEntry ORDER BY id")) {
assertTrue("There should be at least one row.", resultSet.next());
long date = resultSet.getTimestamp("eventDate").getTime();
long anotherDate = resultSet.getTimestamp("anotherDate").getTime();
assertEquals(date, anotherDate);
assertTrue("The date should be later than pre-logging (1).", date >= millis);
assertTrue("The date should be earlier than now (1).", date <= System.currentTimeMillis());
assertEquals("The literal column is not correct (1).", "Some Other Literal Value",
    resultSet.getString("literalColumn"));
assertEquals("The level column is not correct (1).", "DEBUG", resultSet.getNString("level"));
assertEquals("The logger column is not correct (1).", logger.getName(), resultSet.getNString("logger"));
assertEquals("The message column is not correct (1).", "Factory logged message 01.",
    resultSet.getString("message"));
assertEquals("The exception column is not correct (1).", Strings.EMPTY,
    IOUtils.readStringAndClose(resultSet.getNClob("exception").getCharacterStream(), -1));
date = resultSet.getTimestamp("eventDate").getTime();
anotherDate = resultSet.getTimestamp("anotherDate").getTime();
assertEquals(date, anotherDate);
assertTrue("The date should be later than pre-logging (2).", date >= millis);
assertTrue("The date should be earlier than now (2).", date <= System.currentTimeMillis());
assertEquals("The literal column is not correct (2).", "Some Other Literal Value",
    resultSet.getString("literalColumn"));
assertEquals("The level column is not correct (2).", "ERROR", resultSet.getNString("level"));
assertEquals("The logger column is not correct (2).", logger.getName(), resultSet.getNString("logger"));
origin: com.h2database/h2

  writeRow(row);
while (rs.next()) {
  for (int i = 0; i < columnCount; i++) {
    Object o;
      break;
    case Types.TIMESTAMP:
      o = rs.getTimestamp(i + 1);
      break;
    default:
      o = rs.getString(i + 1);
origin: elasticjob/elastic-job

/**
 * 获取运行中的任务统计数据集合.
 * 
 * @param from 统计开始时间
 * @return 运行中的任务统计数据集合
 */
public List<JobRunningStatistics> findJobRunningStatistics(final Date from) {
  List<JobRunningStatistics> result = new LinkedList<>();
  SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
  String sql = String.format("SELECT id, running_count, statistics_time, creation_time FROM %s WHERE statistics_time >= '%s' order by id ASC", 
      TABLE_JOB_RUNNING_STATISTICS, formatter.format(from));
  try (
      Connection conn = dataSource.getConnection();
      PreparedStatement preparedStatement = conn.prepareStatement(sql);
      ResultSet resultSet = preparedStatement.executeQuery()
      ) {
    while (resultSet.next()) {
      JobRunningStatistics jobRunningStatistics = new JobRunningStatistics(resultSet.getLong(1), resultSet.getInt(2), 
          new Date(resultSet.getTimestamp(3).getTime()), new Date(resultSet.getTimestamp(4).getTime()));
      result.add(jobRunningStatistics);
    }
  } catch (final SQLException ex) {
    // TODO 记录失败直接输出日志,未来可考虑配置化
    log.error("Fetch jobRunningStatistics from DB error:", ex);
  }
  return result;
}

origin: prestodb/presto

int intValue = resultSet.getInt(i);
if (resultSet.wasNull()) {
  row.add(null);
long longValue = resultSet.getLong(i);
if (resultSet.wasNull()) {
  row.add(null);
String stringValue = resultSet.getString(i);
if (resultSet.wasNull()) {
  row.add(null);
String stringValue = resultSet.getString(i);
if (resultSet.wasNull()) {
  row.add(null);
Timestamp timestampValue = resultSet.getTimestamp(i);
if (resultSet.wasNull()) {
  row.add(null);
origin: com.h2database/h2

Timestamp value = rs.getTimestamp(columnIndex);
v = value == null ? (Value) ValueNull.INSTANCE :
  ValueTimestamp.get(value);
int value = rs.getInt(columnIndex);
v = rs.wasNull() ? (Value) ValueNull.INSTANCE :
  ValueInt.get(value);
long value = rs.getLong(columnIndex);
v = rs.wasNull() ? (Value) ValueNull.INSTANCE :
  ValueLong.get(value);
String s = rs.getString(columnIndex);
v = (s == null) ? (Value) ValueNull.INSTANCE :
  ValueStringIgnoreCase.get(s);
String s = rs.getString(columnIndex);
v = (s == null) ? (Value) ValueNull.INSTANCE :
  ValueStringFixed.get(s);
String s = rs.getString(columnIndex);
v = (s == null) ? (Value) ValueNull.INSTANCE :
  ValueString.get(s);
int value = rs.getInt(columnIndex);
v = rs.wasNull() ? (Value) ValueNull.INSTANCE :
  ValueInt.get(value);
origin: elasticjob/elastic-job-lite

private List<JobExecutionEvent> getJobExecutionEvents(final Condition condition) {
  List<JobExecutionEvent> result = new LinkedList<>();
  try (
      Connection conn = dataSource.getConnection();
      PreparedStatement preparedStatement = createDataPreparedStatement(conn, TABLE_JOB_EXECUTION_LOG, FIELDS_JOB_EXECUTION_LOG, condition);
      ResultSet resultSet = preparedStatement.executeQuery()
      ) {
    while (resultSet.next()) {
      JobExecutionEvent jobExecutionEvent = new JobExecutionEvent(resultSet.getString(1), resultSet.getString(2), resultSet.getString(3), resultSet.getString(4),
          resultSet.getString(5), JobExecutionEvent.ExecutionSource.valueOf(resultSet.getString(6)), Integer.valueOf(resultSet.getString(7)), 
          new Date(resultSet.getTimestamp(8).getTime()), resultSet.getTimestamp(9) == null ? null : new Date(resultSet.getTimestamp(9).getTime()), 
          resultSet.getBoolean(10), new JobExecutionEventThrowable(null, resultSet.getString(11)) 
          );
      result.add(jobExecutionEvent);
    }
  } catch (final SQLException ex) {
    // TODO 记录失败直接输出日志,未来可考虑配置化
    log.error("Fetch JobExecutionEvent from DB error:", ex);
  }
  return result;
}

origin: dropwizard/dropwizard

@Override
@Nullable
public DateTime mapColumn(ResultSet r, int columnNumber, StatementContext ctx) throws SQLException {
  final Timestamp timestamp = calendar.isPresent() ? r.getTimestamp(columnNumber, cloneCalendar()) :
    r.getTimestamp(columnNumber);
  if (timestamp == null) {
    return null;
  }
  return new DateTime(timestamp.getTime());
}
origin: shardingjdbc/sharding-jdbc

  result = getCurrentResultSet().getShort(columnIndex);
} else if (int.class == type) {
  result = getCurrentResultSet().getInt(columnIndex);
} else if (long.class == type) {
  result = getCurrentResultSet().getLong(columnIndex);
} else if (float.class == type) {
  result = getCurrentResultSet().getFloat(columnIndex);
  result = getCurrentResultSet().getDouble(columnIndex);
} else if (String.class == type) {
  result = getCurrentResultSet().getString(columnIndex);
} else if (BigDecimal.class == type) {
  result = getCurrentResultSet().getBigDecimal(columnIndex);
  result = getCurrentResultSet().getTime(columnIndex);
} else if (Timestamp.class == type) {
  result = getCurrentResultSet().getTimestamp(columnIndex);
} else if (URL.class == type) {
  result = getCurrentResultSet().getURL(columnIndex);
origin: elasticjob/elastic-job-lite

/**
 * 获取任务运行结果统计数据集合.
 * 
 * @param from 统计开始时间
 * @param statisticInterval 统计时间间隔
 * @return 任务运行结果统计数据集合
 */
public List<TaskResultStatistics> findTaskResultStatistics(final Date from, final StatisticInterval statisticInterval) {
  List<TaskResultStatistics> result = new LinkedList<>();
  SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
  String sql = String.format("SELECT id, success_count, failed_count, statistics_time, creation_time FROM %s WHERE statistics_time >= '%s' order by id ASC", 
      TABLE_TASK_RESULT_STATISTICS + "_" + statisticInterval, formatter.format(from));
  try (
      Connection conn = dataSource.getConnection();
      PreparedStatement preparedStatement = conn.prepareStatement(sql);
      ResultSet resultSet = preparedStatement.executeQuery()
      ) {
    while (resultSet.next()) {
      TaskResultStatistics taskResultStatistics = new TaskResultStatistics(resultSet.getLong(1), resultSet.getInt(2), resultSet.getInt(3), 
          statisticInterval, new Date(resultSet.getTimestamp(4).getTime()), new Date(resultSet.getTimestamp(5).getTime()));
      result.add(taskResultStatistics);
    }
  } catch (final SQLException ex) {
    // TODO 记录失败直接输出日志,未来可考虑配置化
    log.error("Fetch taskResultStatistics from DB error:", ex);
  }
  return result;
}

origin: elasticjob/elastic-job-lite

/**
 * 获取最近一条任务运行结果统计数据.
 * 
 * @param statisticInterval 统计时间间隔
 * @return 任务运行结果统计数据对象
 */
public Optional<TaskResultStatistics> findLatestTaskResultStatistics(final StatisticInterval statisticInterval) {
  TaskResultStatistics result = null;
  String sql = String.format("SELECT id, success_count, failed_count, statistics_time, creation_time FROM %s order by id DESC LIMIT 1", 
      TABLE_TASK_RESULT_STATISTICS + "_" + statisticInterval);
  try (
      Connection conn = dataSource.getConnection();
      PreparedStatement preparedStatement = conn.prepareStatement(sql);
      ResultSet resultSet = preparedStatement.executeQuery()
      ) {
    while (resultSet.next()) {
      result = new TaskResultStatistics(resultSet.getLong(1), resultSet.getInt(2), resultSet.getInt(3), 
          statisticInterval, new Date(resultSet.getTimestamp(4).getTime()), new Date(resultSet.getTimestamp(5).getTime()));
    }
  } catch (final SQLException ex) {
    // TODO 记录失败直接输出日志,未来可考虑配置化
    log.error("Fetch latest taskResultStatistics from DB error:", ex);
  }
  return Optional.fromNullable(result);
}

origin: elasticjob/elastic-job

  /**
   * 获取最近一条作业注册统计数据.
   * 
   * @return 作业注册统计数据对象
   */
  public Optional<JobRegisterStatistics> findLatestJobRegisterStatistics() {
    JobRegisterStatistics result = null;
    String sql = String.format("SELECT id, registered_count, statistics_time, creation_time FROM %s order by id DESC LIMIT 1", 
        TABLE_JOB_REGISTER_STATISTICS);
    try (
        Connection conn = dataSource.getConnection();
        PreparedStatement preparedStatement = conn.prepareStatement(sql);
        ResultSet resultSet = preparedStatement.executeQuery()
        ) {
      while (resultSet.next()) {
        result = new JobRegisterStatistics(resultSet.getLong(1), resultSet.getInt(2), 
            new Date(resultSet.getTimestamp(3).getTime()), new Date(resultSet.getTimestamp(4).getTime()));
      }
    } catch (final SQLException ex) {
      // TODO 记录失败直接输出日志,未来可考虑配置化
      log.error("Fetch latest jobRegisterStatistics from DB error:", ex);
    }
    return Optional.fromNullable(result);
  }
}
java.sqlResultSetgetTimestamp

Javadoc

Retrieves the value of the designated column in the current row of this ResultSet object as a java.sql.Timestamp object in the Java programming language.

Popular methods of ResultSet

  • next
    Shifts the cursor position down one row in this ResultSet object. Any input streams associated with
  • getString
    Gets the value of a column specified by column name, as a String.
  • close
    Releases this ResultSet's database and JDBC resources. You are strongly advised to use this method r
  • getInt
    Gets the value of a column specified by column name, as an intvalue.
  • getLong
    Gets the value of a column specified by column name, as a longvalue.
  • getMetaData
    Gets the metadata for this ResultSet. This defines the number, types and properties of the columns i
  • getObject
    Gets the value of a column specified by column name as a Java Object. The type of the Java object wi
  • getBoolean
    Gets the value of a column specified by column name, as a boolean.
  • getBytes
    Gets the value of a column specified by column name as a byte array.
  • getDouble
    Gets the value of a column specified by column name as a doublevalue.
  • wasNull
    Determines whether the last column read from this ResultSetcontained SQL NULL.
  • getDate
    Gets the value of a column specified by column name, as a java.sql.Date object.
  • wasNull,
  • getDate,
  • getFloat,
  • getShort,
  • getBlob,
  • getTime,
  • getBigDecimal,
  • getBinaryStream,
  • getByte

Popular classes and methods

  • scheduleAtFixedRate (Timer)
    Schedules the specified task for repeated fixed-rate execution, beginning after the specified delay.
  • setRequestProperty (URLConnection)
    Sets the value of the specified request header field. The value will only be used by the current URL
  • setScale (BigDecimal)
    Returns a new BigDecimal instance with the specified scale. If the new scale is greater than the old
  • InputStreamReader (java.io)
    A class for turning a byte stream into a character stream. Data read from the source input stream is
  • MalformedURLException (java.net)
    Thrown to indicate that a malformed URL has occurred. Either no legal protocol could be found in a s
  • Arrays (java.util)
    This class contains various methods for manipulating arrays (such as sorting and searching). This cl
  • TreeSet (java.util)
    TreeSet is an implementation of SortedSet. All optional operations (adding and removing) are support
  • SSLHandshakeException (javax.net.ssl)
    The exception that is thrown when a handshake could not be completed successfully.
  • JComboBox (javax.swing)
  • Option (scala)

For IntelliJ IDEA and
Android Studio

  • Codota IntelliJ IDEA pluginCodota Android Studio pluginCode IndexSign in
  • EnterpriseFAQAboutContact Us
  • Terms of usePrivacy policyCodeboxFind Usages
Add Codota to your IDE (free)