public ReadCommand deserialize(DataInputPlus in, int version, boolean isDigest, int digestVersion, boolean isForThrift, CFMetaData metadata, int nowInSec, ColumnFilter columnFilter, RowFilter rowFilter, DataLimits limits, IndexMetadata index) throws IOException { DecoratedKey key = metadata.decorateKey(metadata.getKeyValidator().readValue(in, DatabaseDescriptor.getMaxValueSize())); ClusteringIndexFilter filter = ClusteringIndexFilter.serializer.deserialize(in, version, metadata); return new SinglePartitionReadCommand(isDigest, digestVersion, isForThrift, metadata, nowInSec, columnFilter, rowFilter, limits, key, filter, index); } }
public void serialize(DataRange range, DataOutputPlus out, int version, CFMetaData metadata) throws IOException { AbstractBounds.rowPositionSerializer.serialize(range.keyRange, out, version); ClusteringIndexFilter.serializer.serialize(range.clusteringIndexFilter, out, version); boolean isPaging = range instanceof Paging; out.writeBoolean(isPaging); if (isPaging) { Clustering.serializer.serialize(((Paging)range).lastReturned, out, version, metadata.comparator.subtypes()); out.writeBoolean(((Paging)range).inclusive); } }
public long serializedSize(DataRange range, int version, CFMetaData metadata) { long size = AbstractBounds.rowPositionSerializer.serializedSize(range.keyRange, version) + ClusteringIndexFilter.serializer.serializedSize(range.clusteringIndexFilter, version) + 1; // isPaging boolean if (range instanceof Paging) { size += Clustering.serializer.serializedSize(((Paging)range).lastReturned, version, metadata.comparator.subtypes()); size += 1; // inclusive boolean } return size; } }
public ReadCommand deserialize(DataInputPlus in, int version, boolean isDigest, int digestVersion, boolean isForThrift, CFMetaData metadata, int nowInSec, ColumnFilter columnFilter, RowFilter rowFilter, DataLimits limits, IndexMetadata index) throws IOException { DecoratedKey key = metadata.decorateKey(metadata.getKeyValidator().readValue(in, DatabaseDescriptor.getMaxValueSize())); ClusteringIndexFilter filter = ClusteringIndexFilter.serializer.deserialize(in, version, metadata); return new SinglePartitionReadCommand(isDigest, digestVersion, isForThrift, metadata, nowInSec, columnFilter, rowFilter, limits, key, filter, index); } }
public ReadCommand deserialize(DataInputPlus in, int version, boolean isDigest, int digestVersion, boolean isForThrift, CFMetaData metadata, int nowInSec, ColumnFilter columnFilter, RowFilter rowFilter, DataLimits limits, IndexMetadata index) throws IOException { DecoratedKey key = metadata.decorateKey(metadata.getKeyValidator().readValue(in, DatabaseDescriptor.getMaxValueSize())); ClusteringIndexFilter filter = ClusteringIndexFilter.serializer.deserialize(in, version, metadata); return new SinglePartitionReadCommand(isDigest, digestVersion, isForThrift, metadata, nowInSec, columnFilter, rowFilter, limits, key, filter, index); } }
public DataRange deserialize(DataInputPlus in, int version, CFMetaData metadata) throws IOException { AbstractBounds<PartitionPosition> range = AbstractBounds.rowPositionSerializer.deserialize(in, metadata.partitioner, version); ClusteringIndexFilter filter = ClusteringIndexFilter.serializer.deserialize(in, version, metadata); if (in.readBoolean()) { ClusteringComparator comparator = metadata.comparator; Clustering lastReturned = Clustering.serializer.deserialize(in, version, comparator.subtypes()); boolean inclusive = in.readBoolean(); return new Paging(range, filter, comparator, lastReturned, inclusive); } else { return new DataRange(range, filter); } }
public DataRange deserialize(DataInputPlus in, int version, CFMetaData metadata) throws IOException { AbstractBounds<PartitionPosition> range = AbstractBounds.rowPositionSerializer.deserialize(in, metadata.partitioner, version); ClusteringIndexFilter filter = ClusteringIndexFilter.serializer.deserialize(in, version, metadata); if (in.readBoolean()) { ClusteringComparator comparator = metadata.comparator; Clustering lastReturned = Clustering.serializer.deserialize(in, version, comparator.subtypes()); boolean inclusive = in.readBoolean(); return new Paging(range, filter, comparator, lastReturned, inclusive); } else { return new DataRange(range, filter); } }
public DataRange deserialize(DataInputPlus in, int version, CFMetaData metadata) throws IOException { AbstractBounds<PartitionPosition> range = AbstractBounds.rowPositionSerializer.deserialize(in, metadata.partitioner, version); ClusteringIndexFilter filter = ClusteringIndexFilter.serializer.deserialize(in, version, metadata); if (in.readBoolean()) { ClusteringComparator comparator = metadata.comparator; Clustering lastReturned = Clustering.serializer.deserialize(in, version, comparator.subtypes()); boolean inclusive = in.readBoolean(); return new Paging(range, filter, comparator, lastReturned, inclusive); } else { return new DataRange(range, filter); } }
public DataRange deserialize(DataInputPlus in, int version, CFMetaData metadata) throws IOException { AbstractBounds<PartitionPosition> range = AbstractBounds.rowPositionSerializer.deserialize(in, metadata.partitioner, version); ClusteringIndexFilter filter = ClusteringIndexFilter.serializer.deserialize(in, version, metadata); if (in.readBoolean()) { ClusteringComparator comparator = metadata.comparator; Clustering lastReturned = Clustering.serializer.deserialize(in, version, comparator.subtypes()); boolean inclusive = in.readBoolean(); return new Paging(range, filter, comparator, lastReturned, inclusive); } else { return new DataRange(range, filter); } }
public long serializedSize(DataRange range, int version, CFMetaData metadata) { long size = AbstractBounds.rowPositionSerializer.serializedSize(range.keyRange, version) + ClusteringIndexFilter.serializer.serializedSize(range.clusteringIndexFilter, version) + 1; // isPaging boolean if (range instanceof Paging) { size += Clustering.serializer.serializedSize(((Paging)range).lastReturned, version, metadata.comparator.subtypes()); size += 1; // inclusive boolean } return size; } }
public long serializedSize(DataRange range, int version, CFMetaData metadata) { long size = AbstractBounds.rowPositionSerializer.serializedSize(range.keyRange, version) + ClusteringIndexFilter.serializer.serializedSize(range.clusteringIndexFilter, version) + 1; // isPaging boolean if (range instanceof Paging) { size += Clustering.serializer.serializedSize(((Paging)range).lastReturned, version, metadata.comparator.subtypes()); size += 1; // inclusive boolean } return size; } }
public long serializedSize(DataRange range, int version, CFMetaData metadata) { long size = AbstractBounds.rowPositionSerializer.serializedSize(range.keyRange, version) + ClusteringIndexFilter.serializer.serializedSize(range.clusteringIndexFilter, version) + 1; // isPaging boolean if (range instanceof Paging) { size += Clustering.serializer.serializedSize(((Paging)range).lastReturned, version, metadata.comparator.subtypes()); size += 1; // inclusive boolean } return size; } }
public void serialize(DataRange range, DataOutputPlus out, int version, CFMetaData metadata) throws IOException { AbstractBounds.rowPositionSerializer.serialize(range.keyRange, out, version); ClusteringIndexFilter.serializer.serialize(range.clusteringIndexFilter, out, version); boolean isPaging = range instanceof Paging; out.writeBoolean(isPaging); if (isPaging) { Clustering.serializer.serialize(((Paging)range).lastReturned, out, version, metadata.comparator.subtypes()); out.writeBoolean(((Paging)range).inclusive); } }
protected void serializeSelection(DataOutputPlus out, int version) throws IOException { metadata().getKeyValidator().writeValue(partitionKey().getKey(), out); ClusteringIndexFilter.serializer.serialize(clusteringIndexFilter(), out, version); }
public void serialize(DataRange range, DataOutputPlus out, int version, CFMetaData metadata) throws IOException { AbstractBounds.rowPositionSerializer.serialize(range.keyRange, out, version); ClusteringIndexFilter.serializer.serialize(range.clusteringIndexFilter, out, version); boolean isPaging = range instanceof Paging; out.writeBoolean(isPaging); if (isPaging) { Clustering.serializer.serialize(((Paging)range).lastReturned, out, version, metadata.comparator.subtypes()); out.writeBoolean(((Paging)range).inclusive); } }
protected long selectionSerializedSize(int version) { return metadata().getKeyValidator().writtenLength(partitionKey().getKey()) + ClusteringIndexFilter.serializer.serializedSize(clusteringIndexFilter(), version); }
protected long selectionSerializedSize(int version) { return metadata().getKeyValidator().writtenLength(partitionKey().getKey()) + ClusteringIndexFilter.serializer.serializedSize(clusteringIndexFilter(), version); }
protected void serializeSelection(DataOutputPlus out, int version) throws IOException { metadata().getKeyValidator().writeValue(partitionKey().getKey(), out); ClusteringIndexFilter.serializer.serialize(clusteringIndexFilter(), out, version); }
public void serialize(DataRange range, DataOutputPlus out, int version, CFMetaData metadata) throws IOException { AbstractBounds.rowPositionSerializer.serialize(range.keyRange, out, version); ClusteringIndexFilter.serializer.serialize(range.clusteringIndexFilter, out, version); boolean isPaging = range instanceof Paging; out.writeBoolean(isPaging); if (isPaging) { Clustering.serializer.serialize(((Paging)range).lastReturned, out, version, metadata.comparator.subtypes()); out.writeBoolean(((Paging)range).inclusive); } }
protected long selectionSerializedSize(int version) { return metadata().getKeyValidator().writtenLength(partitionKey().getKey()) + ClusteringIndexFilter.serializer.serializedSize(clusteringIndexFilter(), version); }