/** * Execute the query again and set the reuslts */ private void advanceIterator() { //run producing the values within a hystrix command. This way we'll time out if the read takes too long try { sourceIterator = rowQuery.execute().getResult().iterator(); } catch ( ConnectionException e ) { throw new RuntimeException( "Unable to get next page", e ); } } }
private void testRangeColumnsForRow(ColumnList<String> columns, List<String> expected) { Iterator<Column<String>> iter1 = columns.iterator(); Iterator<String> iter2 = expected.iterator(); while (iter2.hasNext()) { Column<String> column = iter1.next(); String expectedName = iter2.next(); Assert.assertEquals(expectedName, column.getName()); int expectedValue = expectedName.charAt(0) - 'a' + 1; Assert.assertEquals(expectedValue, column.getIntegerValue()); } }
@Override protected Iterator<Column<C>> computeNext() { ColumnList<C> page = execute(query); return !page.isEmpty() ? page.iterator() : endOfData(); } });
@Override protected Iterator<Column<C>> computeNext() { ColumnList<C> page = execute(query); return !page.isEmpty() ? page.iterator() : endOfData(); } });
@Override protected Iterator<Column<C>> computeNext() { ColumnList<C> page = execute(query); return !page.isEmpty() ? page.iterator() : endOfData(); } });
@Override protected Iterator<Column<C>> computeNext() { ColumnList<C> page = execute(query); return !page.isEmpty() ? page.iterator() : endOfData(); } });
for(Iterator<Column<String>> i = cols.iterator(); i.hasNext(); ) { Column<String> c = i.next(); Object v = null;
@Test public void testCopy() throws ConnectionException { String keyName = "A"; keyspace.prepareQuery(CF_STANDARD1).getKey(keyName).copyTo(CF_STANDARD2, keyName).execute(); ColumnList<String> list1 = keyspace.prepareQuery(CF_STANDARD1).getKey(keyName).execute().getResult(); ColumnList<String> list2 = keyspace.prepareQuery(CF_STANDARD2).getKey(keyName).execute().getResult(); Iterator<Column<String>> iter1 = list1.iterator(); Iterator<Column<String>> iter2 = list2.iterator(); while (iter1.hasNext()) { Column<String> column1 = iter1.next(); Column<String> column2 = iter2.next(); Assert.assertEquals(column1.getName(), column2.getName()); Assert.assertEquals(column1.getByteBufferValue(),column2.getByteBufferValue()); } Assert.assertFalse(iter2.hasNext()); }
public void testAllColumnsForRow(ColumnList<String> resultColumns, int i) throws Exception { Date date = OriginalDate.plusMinutes(i).toDate(); testColumnValue(resultColumns, "firstname", columnNames, "john_" + i); testColumnValue(resultColumns, "lastname", columnNames, "smith_" + i); testColumnValue(resultColumns, "address", columnNames, "john smith address " + i); testColumnValue(resultColumns, "age", columnNames, 30 + i); testColumnValue(resultColumns, "ageShort", columnNames, new Integer(30+i).shortValue()); testColumnValue(resultColumns, "ageLong", columnNames, new Integer(30+i).longValue()); testColumnValue(resultColumns, "percentile", columnNames, 30.1); testColumnValue(resultColumns, "married", columnNames, true); testColumnValue(resultColumns, "single", columnNames, false); testColumnValue(resultColumns, "birthdate", columnNames, date); testColumnValue(resultColumns, "bytes", columnNames, TestBytes); testColumnValue(resultColumns, "uuid", columnNames, TestUUID); testColumnValue(resultColumns, "empty", columnNames, null); /** TEST THE ITERATOR INTERFACE */ Iterator<Column<String>> iter = resultColumns.iterator(); while (iter.hasNext()) { Column<String> col = iter.next(); Assert.assertNotNull(col.getName()); } }
private MutationBatch getMutationBatch() throws ConnectionException { ColumnList<C> columnList = rowQuery.execute().getResult(); CqlKeyspaceImpl ksImpl = new CqlKeyspaceImpl(ksContext); MutationBatch mBatch = ksImpl.prepareMutationBatch(); CqlColumnListMutationImpl<K,C> colListMutation = (CqlColumnListMutationImpl<K, C>)mBatch.withRow(cf, rowKey); Iterator<Column<C>> iter = columnList.iterator(); boolean first = true; while(iter.hasNext()) { CqlColumnImpl<C> col = (CqlColumnImpl<C>) iter.next(); if (first && useOriginalTimestamp) { colListMutation.setTimestamp(col.getTimestamp()); first = false; } colListMutation.putColumnWithGenericValue(col.getName(), col.getGenericValue(), null); } return mBatch; } }
private Record newRecord(Key key, ByteBuffer rowKey, ColumnList<UUID> columns, int largeRowThreshold, ReadConsistency consistency, @Nullable final Instant cutoffTime) { Iterator<Map.Entry<UUID, Change>> changeIter = decodeChanges(getFilteredColumnIter(columns.iterator(), cutoffTime)); Iterator<Map.Entry<UUID, Compaction>> compactionIter = decodeCompactions(getFilteredColumnIter(columns.iterator(), cutoffTime)); Iterator<RecordEntryRawMetadata> rawMetadataIter = rawMetadata(getFilteredColumnIter(columns.iterator(), cutoffTime)); if (columns.size() >= largeRowThreshold) { // A large row such that the first query likely returned only a subset of all the columns. Lazily fetch // the rest while ensuring we never load all columns into memory at the same time. The current // Compactor+Resolver implementation must scan the row twice: once to find compaction records and once to // find deltas. So we must call columnScan() twice, once for each. UUID lastColumn = columns.getColumnByIndex(columns.size() - 1).getName(); AstyanaxTable table = (AstyanaxTable) key.getTable(); AstyanaxStorage storage = table.getReadStorage(); DeltaPlacement placement = (DeltaPlacement) storage.getPlacement(); ColumnFamily<ByteBuffer, UUID> columnFamily = placement.getDeltaColumnFamily(); // Execute the same scan 3 times, returning 3 iterators that process the results in different ways. In // practice at most two of the iterators are actually consumed (one or more is ignored) so the columnScan // should avoid actually doing any work until the first item is fetched from the iterator. changeIter = Iterators.concat(changeIter, decodeChanges( getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, Long.MAX_VALUE, 1, consistency), cutoffTime))); compactionIter = Iterators.concat(compactionIter, decodeCompactions( getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, Long.MAX_VALUE, 1, consistency), cutoffTime))); rawMetadataIter = Iterators.concat(rawMetadataIter, rawMetadata( getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, Long.MAX_VALUE, 1, consistency), cutoffTime))); } return new RecordImpl(key, compactionIter, changeIter, rawMetadataIter); }
private Record newRecord(Key key, ByteBuffer rowKey, ColumnList<UUID> columns, int largeRowThreshold, ReadConsistency consistency, @Nullable final Instant cutoffTime) { Iterator<Map.Entry<UUID, Change>> changeIter = decodeChanges(getFilteredColumnIter(columns.iterator(), cutoffTime)); Iterator<Map.Entry<UUID, Compaction>> compactionIter = decodeCompactions(getFilteredColumnIter(columns.iterator(), cutoffTime)); Iterator<RecordEntryRawMetadata> rawMetadataIter = rawMetadata(getFilteredColumnIter(columns.iterator(), cutoffTime)); if (columns.size() >= largeRowThreshold) { // A large row such that the first query likely returned only a subset of all the columns. Lazily fetch // the rest while ensuring we never load all columns into memory at the same time. The current // Compactor+Resolver implementation must scan the row twice: once to find compaction records and once to // find deltas. So we must call columnScan() twice, once for each. UUID lastColumn = columns.getColumnByIndex(columns.size() - 1).getName(); AstyanaxTable table = (AstyanaxTable) key.getTable(); AstyanaxStorage storage = table.getReadStorage(); DeltaPlacement placement = (DeltaPlacement) storage.getPlacement(); ColumnFamily<ByteBuffer, UUID> columnFamily = placement.getDeltaColumnFamily(); // Execute the same scan 3 times, returning 3 iterators that process the results in different ways. In // practice at most two of the iterators are actually consumed (one or more is ignored) so the columnScan // should avoid actually doing any work until the first item is fetched from the iterator. changeIter = Iterators.concat(changeIter, decodeChanges( getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, Long.MAX_VALUE, 1, consistency), cutoffTime))); compactionIter = Iterators.concat(compactionIter, decodeCompactions( getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, Long.MAX_VALUE, 1, consistency), cutoffTime))); rawMetadataIter = Iterators.concat(rawMetadataIter, rawMetadata( getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, Long.MAX_VALUE, 1, consistency), cutoffTime))); } return new RecordImpl(key, compactionIter, changeIter, rawMetadataIter); }
@SuppressWarnings("unchecked") public Map<C, V> lookup(TridentTupleMapper<K, C, V> tupleMapper, TridentTuple input) throws Exception { String cf = tupleMapper.mapToColumnFamily(input); String keyspace = tupleMapper.mapToKeyspace(input); K rowKey = tupleMapper.mapToRowKey(input); Class<K> keyClass = tupleMapper.getKeyClass(); Class<C> colClass = tupleMapper.getColumnNameClass(); ColumnFamily<K, C> columnFamily = new ColumnFamily<K, C>(cf, (Serializer<K>) serializerFor(keyClass), (Serializer<C>) serializerFor(colClass)); OperationResult<ColumnList<C>> result; result = this.getKeyspace(keyspace).prepareQuery(columnFamily).getKey(rowKey).execute(); ColumnList<C> columns = (ColumnList<C>) result.getResult(); HashMap<C, V> retval = new HashMap<C, V>(); Iterator<Column<C>> it = columns.iterator(); while (it.hasNext()) { Column<C> col = it.next(); retval.put(col.getName(), col.getValue((Serializer<V>) serializerFor(tupleMapper.getColumnValueClass()))); } return retval; }
@SuppressWarnings("unchecked") public Map<C, V> lookup(TupleMapper<K, C, V> tupleMapper, Tuple input) throws Exception { String cf = tupleMapper.mapToColumnFamily(input); String keyspace = tupleMapper.mapToKeyspace(input); K rowKey = tupleMapper.mapToRowKey(input); Class<K> keyClass = tupleMapper.getKeyClass(); Class<C> colClass = tupleMapper.getColumnNameClass(); ColumnFamily<K, C> columnFamily = new ColumnFamily<K, C>(cf, (Serializer<K>) serializerFor(keyClass), (Serializer<C>) serializerFor(colClass)); OperationResult<ColumnList<C>> result; result = this.getKeyspace(keyspace).prepareQuery(columnFamily).getKey(rowKey).execute(); ColumnList<C> columns = (ColumnList<C>) result.getResult(); HashMap<C, V> retval = new HashMap<C, V>(); Iterator<Column<C>> it = columns.iterator(); while (it.hasNext()) { Column<C> col = it.next(); retval.put(col.getName(), col.getValue((Serializer<V>) serializerFor(tupleMapper.getColumnValueClass()))); } return retval; }
private Record newRecord(Key key, ByteBuffer rowKey, ColumnList<DeltaKey> columns, int largeRowThreshold, ReadConsistency consistency, @Nullable final Instant cutoffTime) { Iterator<Column<DeltaKey>> changeIter = getFilteredColumnIter(columns.iterator(), cutoffTime); Iterator<Column<DeltaKey>> compactionIter = getFilteredColumnIter(columns.iterator(), cutoffTime); Iterator<Column<DeltaKey>> rawMetadataIter = getFilteredColumnIter(columns.iterator(), cutoffTime); if (columns.size() >= largeRowThreshold) { // A large row such that the first query likely returned only a subset of all the columns. Lazily fetch // the rest while ensuring we never load all columns into memory at the same time. The current // Compactor+Resolver implementation must scan the row twice: once to find compaction records and once to // find deltas. So we must call columnScan() twice, once for each. DeltaKey lastColumn = columns.getColumnByIndex(columns.size() - 1).getName(); AstyanaxTable table = (AstyanaxTable) key.getTable(); AstyanaxStorage storage = table.getReadStorage(); DeltaPlacement placement = (DeltaPlacement) storage.getPlacement(); ColumnFamily<ByteBuffer, DeltaKey> columnFamily = placement.getBlockedDeltaColumnFamily(); // Execute the same scan 3 times, returning 3 iterators that process the results in different ways. In // practice at most two of the iterators are actually consumed (one or more is ignored) so the columnScan // should avoid actually doing any work until the first item is fetched from the iterator. changeIter = Iterators.concat(changeIter, getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, _deltaKeyInc, Long.MAX_VALUE, 1, consistency), cutoffTime)); compactionIter = Iterators.concat(compactionIter, getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, _deltaKeyInc, Long.MAX_VALUE, 1, consistency), cutoffTime)); rawMetadataIter = Iterators.concat(rawMetadataIter, getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, _deltaKeyInc, Long.MAX_VALUE, 1, consistency), cutoffTime)); } Iterator<Map.Entry<UUID, Change>> deltaChangeIter = decodeChanges(new AstyanaxDeltaIterator(changeIter, false, _deltaPrefixLength, ByteBufferUtil.bytesToHex((rowKey)))); Iterator<Map.Entry<UUID, Compaction>> deltaCompactionIter = decodeCompactions(new AstyanaxDeltaIterator(compactionIter, false, _deltaPrefixLength, ByteBufferUtil.bytesToHex((rowKey)))); Iterator<RecordEntryRawMetadata> deltaRawMetadataIter = rawMetadata(new AstyanaxDeltaIterator(rawMetadataIter, false, _deltaPrefixLength, ByteBufferUtil.bytesToHex((rowKey)))); return new RecordImpl(key, deltaCompactionIter, deltaChangeIter, deltaRawMetadataIter); }
@SuppressWarnings("unchecked") public Map<C, V> lookup(TridentTupleMapper<K, C, V> tupleMapper, TridentTuple input, List<C> slice) throws Exception { String cf = tupleMapper.mapToColumnFamily(input); String keyspace = tupleMapper.mapToKeyspace(input); K rowKey = tupleMapper.mapToRowKey(input); Class<K> keyClass = tupleMapper.getKeyClass(); Class<C> colClass = tupleMapper.getColumnNameClass(); ColumnFamily<K, C> columnFamily = new ColumnFamily<K, C>(cf, (Serializer<K>) serializerFor(keyClass), (Serializer<C>) serializerFor(colClass)); HashMap<C, V> retval = new HashMap<C, V>(); for (C c : slice) { RowQuery<K, C> query = this.getKeyspace(keyspace).prepareQuery(columnFamily).getKey(rowKey); query = query.withColumnRange(getRangeBuilder(c, c, null, (Serializer<C>) serializerFor(colClass))); OperationResult<ColumnList<C>> result = query.execute(); LOG.debug("Selecting [" + c.toString() + "] returned [" + result.getResult().size() + "] results."); Iterator<Column<C>> it = result.getResult().iterator(); while (it.hasNext()) { Column<C> col = it.next(); LOG.debug("Adding [" + col.getName() + "]=>[" + col.getStringValue() + "]"); retval.put(col.getName(), col.getValue((Serializer<V>) serializerFor(tupleMapper.getColumnValueClass()))); } } return retval; }
private Record newRecord(Key key, ByteBuffer rowKey, ColumnList<DeltaKey> columns, int largeRowThreshold, ReadConsistency consistency, @Nullable final Instant cutoffTime) { Iterator<Column<DeltaKey>> changeIter = getFilteredColumnIter(columns.iterator(), cutoffTime); Iterator<Column<DeltaKey>> compactionIter = getFilteredColumnIter(columns.iterator(), cutoffTime); Iterator<Column<DeltaKey>> rawMetadataIter = getFilteredColumnIter(columns.iterator(), cutoffTime); if (columns.size() >= largeRowThreshold) { // A large row such that the first query likely returned only a subset of all the columns. Lazily fetch // the rest while ensuring we never load all columns into memory at the same time. The current // Compactor+Resolver implementation must scan the row twice: once to find compaction records and once to // find deltas. So we must call columnScan() twice, once for each. DeltaKey lastColumn = columns.getColumnByIndex(columns.size() - 1).getName(); AstyanaxTable table = (AstyanaxTable) key.getTable(); AstyanaxStorage storage = table.getReadStorage(); DeltaPlacement placement = (DeltaPlacement) storage.getPlacement(); ColumnFamily<ByteBuffer, DeltaKey> columnFamily = placement.getBlockedDeltaColumnFamily(); // Execute the same scan 3 times, returning 3 iterators that process the results in different ways. In // practice at most two of the iterators are actually consumed (one or more is ignored) so the columnScan // should avoid actually doing any work until the first item is fetched from the iterator. changeIter = Iterators.concat(changeIter, getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, _deltaKeyInc, Long.MAX_VALUE, 1, consistency), cutoffTime)); compactionIter = Iterators.concat(compactionIter, getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, _deltaKeyInc, Long.MAX_VALUE, 1, consistency), cutoffTime)); rawMetadataIter = Iterators.concat(rawMetadataIter, getFilteredColumnIter(columnScan(rowKey, placement, columnFamily, lastColumn, null, false, _deltaKeyInc, Long.MAX_VALUE, 1, consistency), cutoffTime)); } Iterator<Map.Entry<UUID, Change>> deltaChangeIter = decodeChanges(new AstyanaxDeltaIterator(changeIter, false, _deltaPrefixLength, ByteBufferUtil.bytesToHex((rowKey)))); Iterator<Map.Entry<UUID, Compaction>> deltaCompactionIter = decodeCompactions(new AstyanaxDeltaIterator(compactionIter, false, _deltaPrefixLength, ByteBufferUtil.bytesToHex((rowKey)))); Iterator<RecordEntryRawMetadata> deltaRawMetadataIter = rawMetadata(new AstyanaxDeltaIterator(rawMetadataIter, false, _deltaPrefixLength, ByteBufferUtil.bytesToHex((rowKey)))); return new RecordImpl(key, deltaCompactionIter, deltaChangeIter, deltaRawMetadataIter); }
@SuppressWarnings("unchecked") public Map<C, V> lookup(TupleMapper<K, C, V> tupleMapper, Tuple input, List<C> slice) throws Exception { String cf = tupleMapper.mapToColumnFamily(input); String keyspace = tupleMapper.mapToKeyspace(input); K rowKey = tupleMapper.mapToRowKey(input); Class<K> keyClass = tupleMapper.getKeyClass(); Class<C> colClass = tupleMapper.getColumnNameClass(); ColumnFamily<K, C> columnFamily = new ColumnFamily<K, C>(cf, (Serializer<K>) serializerFor(keyClass), (Serializer<C>) serializerFor(colClass)); HashMap<C, V> retval = new HashMap<C, V>(); for (C c : slice) { RowQuery<K, C> query = this.getKeyspace(keyspace).prepareQuery(columnFamily).getKey(rowKey); query = query.withColumnRange(getRangeBuilder(c, c, null, (Serializer<C>) serializerFor(colClass))); OperationResult<ColumnList<C>> result = query.execute(); Iterator<Column<C>> it = result.getResult().iterator(); while (it.hasNext()) { Column<C> col = it.next(); retval.put(col.getName(), col.getValue((Serializer<V>) serializerFor(tupleMapper.getColumnValueClass()))); } } return retval; }
@SuppressWarnings("unchecked") public Map<C, V> lookup(TridentTupleMapper<K, C, V> tupleMapper, TridentTuple input, C start, C end, Equality equality) throws Exception { if (start == null || end == null) { return null; } String cf = tupleMapper.mapToColumnFamily(input); String keyspace = tupleMapper.mapToKeyspace(input); K rowKey = tupleMapper.mapToRowKey(input); Class<K> keyClass = tupleMapper.getKeyClass(); Class<C> colClass = tupleMapper.getColumnNameClass(); ColumnFamily<K, C> columnFamily = new ColumnFamily<K, C>(cf, (Serializer<K>) serializerFor(keyClass), (Serializer<C>) serializerFor(colClass)); RowQuery<K, C> query = this.getKeyspace(keyspace).prepareQuery(columnFamily).getKey(rowKey); query = query.withColumnRange(getRangeBuilder(start, end, equality, (Serializer<C>) serializerFor(colClass))); OperationResult<ColumnList<C>> result = query.execute(); ColumnList<C> columns = (ColumnList<C>) result.getResult(); HashMap<C, V> retval = new HashMap<C, V>(); Iterator<Column<C>> it = columns.iterator(); while (it.hasNext()) { Column<C> col = it.next(); retval.put(col.getName(), col.getValue((Serializer<V>) serializerFor(tupleMapper.getColumnValueClass()))); } return retval; }
@SuppressWarnings("unchecked") public Map<C, V> lookup(TupleMapper<K, C, V> tupleMapper, Tuple input, C start, C end, Equality equality) throws Exception { if (start == null || end == null) { return null; } String cf = tupleMapper.mapToColumnFamily(input); String keyspace = tupleMapper.mapToKeyspace(input); K rowKey = tupleMapper.mapToRowKey(input); Class<K> keyClass = tupleMapper.getKeyClass(); Class<C> colClass = tupleMapper.getColumnNameClass(); ColumnFamily<K, C> columnFamily = new ColumnFamily<K, C>(cf, (Serializer<K>) serializerFor(keyClass), (Serializer<C>) serializerFor(colClass)); OperationResult<ColumnList<C>> result = this.getKeyspace(keyspace).prepareQuery(columnFamily).getKey(rowKey) .withColumnRange(getRangeBuilder(start, end, equality, (Serializer<C>) serializerFor(colClass))) .execute(); ColumnList<C> columns = (ColumnList<C>) result.getResult(); HashMap<C, V> retval = new HashMap<C, V>(); Iterator<Column<C>> it = columns.iterator(); while (it.hasNext()) { Column<C> col = it.next(); retval.put(col.getName(), col.getValue((Serializer<V>) serializerFor(tupleMapper.getColumnValueClass()))); } return retval; }