public boolean hasOutstandingTransactions( UUID queueId, UUID consumerId ) { SliceQuery<ByteBuffer, UUID, UUID> q = createSliceQuery( ko, be, ue, ue ); q.setColumnFamily( CONSUMER_QUEUE_TIMEOUTS.getColumnFamily() ); q.setKey( getQueueClientTransactionKey( queueId, consumerId ) ); q.setRange( null, null, false, 1 ); return q.execute().get().getColumns().size() > 0; }
public <N, V> ColumnSlice<N, V> getColumns( Keyspace ko, Object columnFamily, Object key, N[] columns, Serializer<N> nameSerializer, Serializer<V> valueSerializer ) throws Exception { if ( db_logger.isTraceEnabled() ) { db_logger.trace( "getColumn cf={} key={} column={}", columnFamily, key, columns ); } /* * ByteBuffer column_bytes = null; if (column instanceof List) { * column_bytes = Composite.serializeToByteBuffer((List<?>) column); } else * { column_bytes = bytebuffer(column); } */ SliceQuery<ByteBuffer, N, V> q = HFactory.createSliceQuery( ko, be, nameSerializer, valueSerializer ); QueryResult<ColumnSlice<N, V>> r = q.setKey( bytebuffer( key ) ).setColumnNames( columns ).setColumnFamily( columnFamily.toString() ) .execute(); ColumnSlice<N, V> result = r.get(); if ( db_logger.isTraceEnabled() ) { if ( result == null ) { db_logger.trace( "getColumn returned null" ); } } return result; }
/** * Get the bounds for the queue * * @return The bounds for the queue */ public QueueBounds getQueueBounds( UUID queueId ) { try { ColumnSlice<String, UUID> result = HFactory.createSliceQuery( ko, ue, se, ue ).setKey( queueId ) .setColumnNames( QUEUE_NEWEST, QUEUE_OLDEST ) .setColumnFamily( QUEUE_PROPERTIES.getColumnFamily() ).execute() .get(); if ( result != null && result.getColumnByName( QUEUE_OLDEST ) != null && result.getColumnByName( QUEUE_NEWEST ) != null ) { return new QueueBounds( result.getColumnByName( QUEUE_OLDEST ).getValue(), result.getColumnByName( QUEUE_NEWEST ).getValue() ); } } catch ( Exception e ) { logger.error( "Error getting oldest queue message ID", e ); } return null; }
SliceQuery<ByteBuffer, UUID, UUID> q = createSliceQuery( ko, be, ue, ue ); q.setColumnFamily( CONSUMER_QUEUE_TIMEOUTS.getColumnFamily() ); q.setKey( getQueueClientTransactionKey( queueId, consumerId ) ); q.setRange( params.startId, startTimeUUID, false, params.limit + 1 ); column.getName(), column.getValue(), queueId, consumerId ); logger.trace( "Max timeuuid : '{}', Current timeuuid : '{}', comparison '{}'", startTimeUUID, column.getName(), UUIDUtils.compare( startTimeUUID, column.getName() ) );
public void testCompositeOrdering() { Mutator<ByteBuffer> mutator = HFactory.createMutator( CassandraTestBase.keyspace, ByteBufferSerializer.get()); composite.addComponent("jeans", StringSerializer.get(), StringSerializer .get().getComparatorType().getTypeName()); DynamicCompositeSerializer.get(), ByteBufferSerializer.get()); sliceQuery.setColumnFamily(AbstractIndexOperation.CF_NAME); sliceQuery.setKey(rowKey); ComponentEquality.GREATER_THAN_EQUAL); sliceQuery.setRange(start, end, false, 1000); System.out.println(ByteBufferUtil.bytesToHex(end.serialize())); List<HColumn<DynamicComposite, ByteBuffer>> cols = sliceQuery.execute() .get().getColumns(); System.out.println(ByteBufferUtil.bytesToHex(cols.get(0).getNameBytes())); composite = cols.get(0).getName(); composite = cols.get(1).getName();
public static void main(String[] args) throws Exception { Cluster cluster = HFactory.getOrCreateCluster("TestCluster", "localhost:9160"); Keyspace keyspaceOperator = HFactory.createKeyspace("Keyspace1", cluster); try { Mutator<String> mutator = HFactory.createMutator(keyspaceOperator, stringSerializer); OrderedRows<String, String, String> orderedRows = result.get(); for (Row<String, String, String> row : orderedRows) { int keyNum = Integer.valueOf(row.getKey().substring(9)); SliceQuery<String, String, String> q = HFactory.createSliceQuery(keyspaceOperator, stringSerializer, stringSerializer, stringSerializer); q.setColumnFamily("Standard1"); q.setRange("", "", false, 3); q.setKey(row.getKey()); QueryResult<ColumnSlice<String, String>> r = q.execute(); System.out.println("|-- called directly via get_slice, the value is: " +r);
SliceQuery<ByteBuffer, N, V> q = createSliceQuery( ko, be, nameSerializer, valueSerializer ); q.setColumnFamily( columnFamily.toString() ); q.setKey( bytebuffer( key ) ); q.setColumnNames( ( N[] ) nameSerializer.fromBytesSet( se.toBytesSet( new ArrayList<String>( columnNames ) ) ) .toArray() ); QueryResult<ColumnSlice<N, V>> r = q.execute(); ColumnSlice<N, V> slice = r.get(); List<HColumn<N, V>> results = slice.getColumns();
private void refresh() { query.setRange(start, finish.function(), reversed, count); columns = 0; List<HColumn<N, V>> list = query.execute().get().getColumns(); iterator = Iterators.peekingIterator(list.iterator()); if (iterator.hasNext()) { // The lower bound column may have been removed prior to the query executing, // so check to see if the first column returned by the current query is the same // as the lower bound column. If both columns are the same, skip the column N first = list.get(0).getName(); if (first.equals(start)) { next(); } } }
SliceQuery<byte[], String, byte[]> q = HFactory.createSliceQuery(keyspace, BytesArraySerializer.get(), StringSerializer.get(), BytesArraySerializer.get()); q.setColumnFamily(colFamName); q.setKey(colFamKey); q.setColumnNames(cfMapDef.getSliceColumnNameArr()); } else { q.setRange("", "", false, maxNumColumns); QueryResult<ColumnSlice<String, byte[]>> result = q.execute(); if (null == result || null == result.get()) { return null; T obj = createObject(cfMapDef, pkObj, result.get()); return obj;
@Override public Void call() throws Exception { log.debug("Starting VerifyLastInsertCommand"); String key = "test"; sliceQuery.setColumnFamily(commandArgs.workingColumnFamily); log.info("StartKey: {} for thread {}", key, Thread.currentThread().getId()); String colValue; for (int col = 0; col < commandArgs.columnCount; col++) { colValue = String.format(COLUMN_VAL_FORMAT, col); mutator.addInsertion(key, commandArgs.workingColumnFamily, HFactory.createStringColumn(String.format(COLUMN_NAME_FORMAT, col), colValue)); executeMutator(col); // Let's verify sliceQuery.setKey(key); sliceQuery.setRange(null, null, true, 1); QueryResult<ColumnSlice<String,String>> result = sliceQuery.execute(); String actualValue = result.get().getColumns().get(0).getValue(); if (!actualValue.equals(colValue)) { log.error("Column values don't match. Expected: " + colValue + " - Actual: " + actualValue); break; } } commandRunner.doneSignal.countDown(); log.debug("VerifyLastInsertCommand complete"); return null; }
/** * This method intentionally swallows ordered execution issues. For some reason, our Time UUID ordering does * not agree with the cassandra comparator as our micros get very close * @param query * @param <K> * @param <UUID> * @param <V> * @return */ protected static <K, UUID, V> List<HColumn<UUID, V>> swallowOrderedExecution( final SliceQuery<K, UUID, V> query ) { try { return query.execute().get().getColumns(); } catch ( HInvalidRequestException e ) { //invalid request. Occasionally we get order issues when there shouldn't be, disregard them. final Throwable invalidRequestException = e.getCause(); if ( invalidRequestException instanceof InvalidRequestException //we had a range error && ( ( InvalidRequestException ) invalidRequestException ).getWhy().contains( "range finish must come after start in the order of traversal" )) { return Collections.emptyList(); } throw e; } }
cluster.truncate(keyspace.getKeyspaceName(), CF); Mutator<String> m = createMutator(keyspace, se); for (int i = 0; i < 500; i++) { m.addInsertion(KEY, CF, createColumn("a" + i, String.valueOf(i), se, se)); m.addInsertion(KEY, CF, createColumn("b" + i, String.valueOf(i), se, se)); m.addInsertion(KEY, CF, createColumn("c" + i, String.valueOf(i), se, se)); SliceQuery<String, String, String> query = HFactory.createSliceQuery(keyspace, se, se, se) .setKey(KEY) .setColumnFamily(CF); ColumnSliceIterator<String, String, String> it = new ColumnSliceIterator<String, String, String>(query, "a", "d", false, 2). setFilter(new SliceFilter<HColumn<String, String>>() { while (it.hasNext()) { HColumn<String, String> c = it.next(); String name = c.getName();
@Override public String getStoredType(ByteBuffer rowKey, String cfName, Keyspace keyspace) { SliceQuery<ByteBuffer, String, ByteBuffer> query = MappingUtils .buildSliceQuery(rowKey, columns, cfName, keyspace); QueryResult<ColumnSlice<String, ByteBuffer>> result = query.execute(); // only need to check > 0. If the entity wasn't tombstoned then we would // have loaded the static jpa marker column HColumn<String, ByteBuffer> descrimValue = result.get().getColumnByName( DISCRIMINAATOR_COL); if (descrimValue == null) { return null; } return StringSerializer.get().fromByteBuffer(descrimValue.getValue()); }
/** * Loads a lazy property's value * * @param metadata the entity metadata * @param self the entity instance * @param proceed the method being intercepted * @param m * @param args the method arguments */ @Override protected <T> void loadLazyPropertyIfNecessary(ClassMetadata<T> metadata, Object self, Method proceed, Method m, Object[] args) throws Exception { Object value = proceed.invoke(self, args); String key = getKey(self); if (key != null) { //key may be null if this is just a regular access to the property before the entity has been persisted and no key has been assigned SliceQuery<String, String, Object> query = getSliceQuery(metadata); query.setColumnFamily(metadata.getColumnFamily()); query.setKey(key); String column = metadata.getLazyProperty(m); query.setColumnNames(column); List<HColumn<String, Object>> columns = query.execute().get().getColumns(); HColumn<String, Object> mappedColumnValue = columns.size() == 1 ? columns.get(0) : null; if (mappedColumnValue != null && isEmptyContainerValue(value)) { //todo once a load attempt has been made we should not attempt again but we have no sessions...perhaps a weakreference map? Object propertyValue = loadProperty(metadata, column, mappedColumnValue); PropertyUtils.setProperty(self, column, propertyValue); } } }
private ColumnSliceIterator<String, DynamicComposite, String> getIterator(String key, DynamicComposite start, DynamicComposite end) { SliceQuery<String, DynamicComposite, String> query = HFactory.createSliceQuery(keyspace, ss, ds, ss). setColumnFamily(columnFamily). setKey(key); return new ColumnSliceIterator<String, DynamicComposite, String>(query, start, end, false); }
StringSerializer ss = StringSerializer.get(); SliceQuery sliceQuery = HFactory.createSliceQuery(keySpace, rowSerializer, ss, ss); sliceQuery.setColumnFamily(MY_COLUMN_FAMILY); sliceQuery.setKey(rowKey); sliceQuery.setRange("prefix", "prefix|", false, Integer.MAX_INT);
/** * Constructor * * @param query Base SliceQuery to execute * @param start Starting point of the range * @param finish Finish point of the range. Allows for a dynamically * determined point * @param reversed Whether or not the columns should be reversed * @param count the amount of columns to retrieve per batch */ public ColumnSliceIterator(SliceQuery<K, N, V> query, N start, ColumnSliceFinish<N> finish, boolean reversed, int count) { this.query = query; this.start = start; this.finish = finish; this.reversed = reversed; this.count = count; this.query.setRange(this.start, this.finish.function(), this.reversed, this.count); }
@Override public Void call() throws Exception { int rows = 0; Random random = new Random(); sliceQuery.setColumnFamily(commandArgs.workingColumnFamily); log.debug("Starting SliceCommand"); try { while (rows < commandArgs.getKeysPerThread()) { long nanos = System.nanoTime(); columnFamily.addKey(String.format("%010d", startKey + random.nextInt(commandArgs.getKeysPerThread()))); //sliceQuery.setKey(String.format("%010d", startKey + random.nextInt(commandArgs.getKeysPerThread()))); //sliceQuery.setRange(null, null, false, commandArgs.columnCount); //QueryResult<ColumnSlice<String,String>> result = sliceQuery.execute(); columnFamily.getColumns(); //LatencyTracker readCount = commandRunner.latencies.get(new CassandraHost("localhost:9160")); ///readCount.addMicro((System.nanoTime() - nanos) / 1000); columnFamily.removeKeys().clear(); rows++; } } catch (Exception e) { log.error("Problem: ", e); } commandRunner.doneSignal.countDown(); log.debug("SliceCommand complete"); return null; }
SliceQuery<ByteBuffer, UUID, ByteBuffer> q = createSliceQuery( ko, be, ue, be ); q.setColumnFamily( QUEUE_INBOX.getColumnFamily() ); q.setKey( getQueueShardRowKey( queueId, current_ts_shard ) ); q.setRange( lastValue, finish_uuid, params.reversed, params.limit + 1 ); HColumn<UUID, ByteBuffer> column = cassResults.get( i ); final UUID columnName = column.getName();
public void testCompositeOrderingPass() { Mutator<ByteBuffer> mutator = HFactory.createMutator( CassandraTestBase.keyspace, ByteBufferSerializer.get()); composite.addComponent("jeans", StringSerializer.get(), StringSerializer .get().getComparatorType().getTypeName()); DynamicCompositeSerializer.get(), ByteBufferSerializer.get()); sliceQuery.setColumnFamily(AbstractIndexOperation.CF_NAME); sliceQuery.setKey(rowKey); sliceQuery.setRange(start, end, false, 1000); System.out.println(ByteBufferUtil.bytesToHex(end.serialize())); List<HColumn<DynamicComposite, ByteBuffer>> cols = sliceQuery.execute() .get().getColumns(); System.out.println(ByteBufferUtil.bytesToHex(cols.get(0).getNameBytes())); composite = cols.get(0).getName(); composite = cols.get(1).getName();