private static Serializer getSerializer(CmbSerializer s) throws PersistenceException { if (s instanceof CmbStringSerializer) { return StringSerializer.get(); } else if (s instanceof CmbCompositeSerializer) { return CompositeSerializer.get(); } else if (s instanceof CmbLongSerializer) { return LongSerializer.get(); } throw new PersistenceException(CMBErrorCodes.InternalError, "Unknown serializer " + s); }
@Override public Composite fromByteBuffer(ByteBuffer byteBuffer) { Composite composite = new Composite(); composite.setComparatorsByPosition(getComparators()); composite.deserialize(byteBuffer); return composite; }
serializer = DateSerializer.get(); } else if (valueClass.equals(Composite.class)) { serializer = CompositeSerializer.get();
@Override public void multiPut(List<List<Object>> keys, List<T> values) { MutationBatch mutation = this.keyspace.prepareMutationBatch(); ColumnFamily<Composite, String> cf = new ColumnFamily<Composite, String>(this.options.columnFamily, CompositeSerializer.get(), StringSerializer.get()); for (int i = 0; i < keys.size(); i++) { Composite keyName = toKeyName(keys.get(i)); byte[] bytes = serializer.serialize(values.get(i)); if (options.ttl != null && options.ttl > 0) { mutation.withRow(cf, keyName).putColumn(this.options.columnName, bytes, options.ttl); } else { mutation.withRow(cf, keyName).putColumn(this.options.columnName, bytes); } } try { mutation.execute(); } catch (ConnectionException e) { throw new RuntimeException("Batch mutation for state failed.", e); } }
Collection<Composite> keyNames = toKeyNames(keys); ColumnFamily<Composite, String> cf = new ColumnFamily<Composite, String>(this.options.columnFamily, CompositeSerializer.get(), StringSerializer.get()); RowSliceQuery<Composite, String> query = this.keyspace.prepareQuery(cf).getKeySlice(keyNames);
@ParameterizedTimed(type="AstyanaxStorageProvider") @Override public Iterator<Map.Entry<String, StorageSummary>> scanMetadata(Table tbl, @Nullable String fromBlobIdExclusive, final LimitCounter limit) { checkNotNull(tbl, "table"); checkArgument(limit.remaining() > 0, "Limit must be >0"); final AstyanaxTable table = (AstyanaxTable) tbl; AstyanaxStorage storage = table.getReadStorage(); final BlobPlacement placement = (BlobPlacement) storage.getPlacement(); // Do a column range query on all the A and B columns. Don't get the Z columns with the binary data. CompositeSerializer colSerializer = CompositeSerializer.get(); final ByteBufferRange columnRange = new RangeBuilder() .setStart(getColumnPrefix(ColumnGroup.A, Composite.ComponentEquality.LESS_THAN_EQUAL), colSerializer) .setEnd(getColumnPrefix(ColumnGroup.B, Composite.ComponentEquality.GREATER_THAN_EQUAL), colSerializer) .build(); // Loop over all the range prefixes (256 of them) and, for each, execute Cassandra queries to page through the // records with that prefix. final Iterator<ByteBufferRange> scanIter = storage.scanIterator(fromBlobIdExclusive); return touch(Iterators.concat(new AbstractIterator<Iterator<Map.Entry<String, StorageSummary>>>() { @Override protected Iterator<Map.Entry<String, StorageSummary>> computeNext() { if (scanIter.hasNext()) { ByteBufferRange keyRange = scanIter.next(); return decodeMetadataRows(scanInternal(placement, keyRange, columnRange, limit), table); } return endOfData(); } })); }
@ParameterizedTimed(type="AstyanaxStorageProvider") @Override public long count(Table tbl) { checkNotNull(tbl, "table"); AstyanaxTable table = (AstyanaxTable) tbl; AstyanaxStorage storage = table.getReadStorage(); BlobPlacement placement = (BlobPlacement) storage.getPlacement(); // Limit the # of columns to retrieve since we just want to count rows, but we need one column to ignore range // ghosts. Constrain the search to just small columns. Especially, exclude column group Z with all the bytes. CompositeSerializer colSerializer = CompositeSerializer.get(); ByteBufferRange columnRange = new RangeBuilder() .setStart(getColumnPrefix(ColumnGroup.B, Composite.ComponentEquality.LESS_THAN_EQUAL), colSerializer) .setEnd(getColumnPrefix(ColumnGroup.B, Composite.ComponentEquality.GREATER_THAN_EQUAL), colSerializer) .setLimit(1) .build(); LimitCounter unlimited = LimitCounter.max(); // Range query all the shards and count the number of rows in each. long count = 0; Iterator<ByteBufferRange> scanIter = storage.scanIterator(null); while (scanIter.hasNext()) { ByteBufferRange keyRange = scanIter.next(); Iterator<Row<ByteBuffer, Composite>> rowIter = scanInternal(placement, keyRange, columnRange, unlimited); while (rowIter.hasNext()) { if (!rowIter.next().getColumns().isEmpty()) { count++; } } } return count; }
CompositeSerializer colSerializer = CompositeSerializer.get(); ByteBufferRange metadataColumnRange = new RangeBuilder() .setStart(getColumnPrefix(ColumnGroup.A, Composite.ComponentEquality.LESS_THAN_EQUAL), colSerializer)