@Override public long getLong(int i) { switch (getCassandraType(i)) { case INT: return currentRow.getInt(i); case BIGINT: case COUNTER: return currentRow.getLong(i); case TIMESTAMP: return currentRow.getTimestamp(i).getTime(); case FLOAT: return floatToRawIntBits(currentRow.getFloat(i)); default: throw new IllegalStateException("Cannot retrieve long for " + getCassandraType(i)); } }
@Override public Session get(final Builder builder) { ResultSet rs = session .execute(new BoundStatement(selectSQL.apply(tableName)).bind(builder.sessionId())); return Optional.ofNullable(rs.one()) .map(row -> { long createdAt = row.getTimestamp(CREATED_AT).getTime(); long accessedAt = row.getTimestamp(ACCESSED_AT).getTime(); long savedAt = row.getTimestamp(SAVED_AT).getTime(); Map<String, String> attributes = row.getMap(ATTRIBUTES, String.class, String.class); Session session = builder .accessedAt(accessedAt) .createdAt(createdAt) .savedAt(savedAt) .set(attributes) .build(); // touch ttl if (timeout > 0) { save(session); } return session; }) .orElse(null); }
return row.getUUID(i).toString(); case TIMESTAMP: return Long.toString(row.getTimestamp(i).getTime()); case INET: return CassandraCqlUtils.quoteStringLiteral(toAddrString(row.getInet(i)));
private List<Long> createQueryKeyList(ClusterConnection cluster, String metricName, long startTime, long endTime) { List<Long> ret = new ArrayList<>(); if (cluster.psRowKeyTimeQuery != null) //cluster may be old { BoundStatement statement = new BoundStatement(cluster.psRowKeyTimeQuery); statement.setString(0, metricName); statement.setTimestamp(1, new Date(CassandraDatastore.calculateRowTime(startTime))); statement.setTimestamp(2, new Date(endTime)); statement.setConsistencyLevel(cluster.getReadConsistencyLevel()); //printHosts(m_loadBalancingPolicy.newQueryPlan(m_keyspace, statement)); ResultSet rows = cluster.execute(statement); while (!rows.isExhausted()) { ret.add(rows.one().getTimestamp(0).getTime()); } } return ret; }
continue; //empty row rowKey = new DataPointsRowKey(m_metricName, m_clusterName, record.getTimestamp(0).getTime(), record.getString(1), new TreeMap<String, String>(record.getMap(2, String.class, String.class)));
return row.getTimestamp(col);
return row.getTimestamp(i);
return NullableValue.of(nativeType, utf8Slice(row.getUUID(i).toString())); case TIMESTAMP: return NullableValue.of(nativeType, row.getTimestamp(i).getTime()); case INET: return NullableValue.of(nativeType, utf8Slice(toAddrString(row.getInet(i))));
@Test(groups = "short") public void dateHandlingTest() throws Exception { Date d = new Date(); session().execute(insertInto("dateTest").value("t", d)); String query = select().from("dateTest").where(eq(token("t"), fcall("token", d))).toString(); List<Row> rows = session().execute(query).all(); assertEquals(1, rows.size()); Row r1 = rows.get(0); assertEquals(d, r1.getTimestamp("t")); }
retVal = row.getTimestamp(columnName); if (retVal != null && member != null) retVal = CassandraDataTranslator.decompose(member.getType(),
@Override public Date getTimestamp(String s) { return row.getTimestamp(s); }
@Override public Date getTimestamp(int i) { return row.getTimestamp(i); }
Collections.unmodifiableMap( sessRow.getMap("parameters", String.class, String.class)); startedAt = sessRow.getTimestamp("started_at").getTime();
@Override public Date getTimestamp(int i) { return row.getTimestamp(i); }
@Override public Date getTimestamp(int i) { return row.getTimestamp(i); }
public Observable<Date> findActiveTimeSlices(Date currentTime, rx.Scheduler scheduler) { return session.executeAndFetch(findTimeSlices.bind(), scheduler) .map(row -> row.getTimestamp(0)) .filter(timestamp -> timestamp.compareTo(currentTime) < 0) .toSortedList() .doOnNext(timeSlices -> logger.debugf("Active time slices %s", timeSlices)) .flatMap(Observable::from) .concatWith(Observable.just(currentTime)); }
private static CFMetaData.DroppedColumn createDroppedColumnFromRow(Row row, String keyspace) { String name = row.getString("column_name"); AbstractType<?> type = CQLTypeParser.parse(keyspace, row.getString("type"), Types.none()); long droppedTime = TimeUnit.MILLISECONDS.toMicros(row.getTimestamp("dropped_time").getTime()); return new CFMetaData.DroppedColumn(name, type, droppedTime); } }
public Completable deleteJob(UUID jobId, rx.Scheduler scheduler) { return session.executeAndFetch(findTimeSlices.bind(), scheduler) .map(row -> row.getTimestamp(0)) .flatMap(timeSlice -> session.execute(deleteScheduled.bind(timeSlice, jobId))) .toCompletable(); }
public CompletableFuture<Optional<ActiveScriptInfo>> getActiveSctiptInfo(User user) { return cassandraAsyncExecutor.executeSingleRow( selectActiveName.bind() .setString(USER_NAME, user.asString())) .thenApply(rowOptional -> rowOptional.map(row -> new ActiveScriptInfo( new ScriptName(row.getString(SCRIPT_NAME)), ZonedDateTime.ofInstant(row.getTimestamp(DATE).toInstant(), ZoneOffset.UTC)))); }
public static DataPoint<Long> getTempCounterDataPoint(Row row) { return new DataPoint<>( row.getTimestamp(COUNTER_COLS.TIME.ordinal()).toInstant().toEpochMilli(), row.getLong(COUNTER_COLS.VALUE.ordinal()), row.getMap(COUNTER_COLS.TAGS.ordinal(), String.class, String.class)); }