public static void writeWithShortLength(ByteBuffer buffer, DataOutputPlus out) throws IOException { int length = buffer.remaining(); assert 0 <= length && length <= FBUtilities.MAX_UNSIGNED_SHORT : String.format("Attempted serializing to buffer exceeded maximum of %s bytes: %s", FBUtilities.MAX_UNSIGNED_SHORT, length); out.writeShort(length); out.write(buffer); }
public void serialize(StreamingHistogram histogram, DataOutputPlus out) throws IOException { out.writeInt(histogram.maxBinSize); Map<Number, long[]> entries = histogram.getAsMap(); out.writeInt(entries.size()); for (Map.Entry<Number, long[]> entry : entries.entrySet()) { out.writeDouble(entry.getKey().doubleValue()); out.writeLong(entry.getValue()[0]); } }
public void serialize(StreamInitMessage message, DataOutputPlus out, int version) throws IOException { CompactEndpointSerializationHelper.serialize(message.from, out); out.writeInt(message.sessionIndex); UUIDSerializer.serializer.serialize(message.planId, out, MessagingService.current_version); out.writeUTF(message.description); out.writeBoolean(message.isForOutgoing); out.writeBoolean(message.keepSSTableLevel); out.writeBoolean(message.isIncremental); }
public void serialize(SnapshotCommand snapshot_command, DataOutputPlus out, int version) throws IOException { out.writeUTF(snapshot_command.keyspace); out.writeUTF(snapshot_command.column_family); out.writeUTF(snapshot_command.snapshot_name); out.writeBoolean(snapshot_command.clear_snapshot); }
public void serialize(Version version, ValidationMetadata component, DataOutputPlus out) throws IOException { out.writeUTF(component.partitioner); out.writeDouble(component.bloomFilterFPChance); }
public void serializeForCache(DataOutputPlus out) throws IOException { out.writeUnsignedVInt(position); out.writeByte(CACHE_NOT_INDEXED); }
public static void writeWithVIntLength(ByteBuffer bytes, DataOutputPlus out) throws IOException { out.writeUnsignedVInt(bytes.remaining()); out.write(bytes); }
public void serialize(IndexInfo info, DataOutputPlus out) throws IOException { assert version.storeRows() : "We read old index files but we should never write them"; clusteringSerializer.serialize(info.firstName, out); clusteringSerializer.serialize(info.lastName, out); out.writeUnsignedVInt(info.offset); out.writeVInt(info.width - WIDTH_BASE); out.writeBoolean(info.endOpenMarker != null); if (info.endOpenMarker != null) DeletionTime.serializer.serialize(info.endOpenMarker, out); }
public void serialize(MerkleTree mt, DataOutputPlus out, int version) throws IOException { out.writeByte(mt.hashdepth); out.writeLong(mt.maxsize); out.writeLong(mt.size); out.writeUTF(mt.partitioner.getClass().getCanonicalName()); // full range Token.serializer.serialize(mt.fullRange.left, out, version); Token.serializer.serialize(mt.fullRange.right, out, version); Hashable.serializer.serialize(mt.root, out, version); }
public void serialize(ClusteringBoundOrBoundary bound, DataOutputPlus out, int version, List<AbstractType<?>> types) throws IOException { out.writeByte(bound.kind().ordinal()); out.writeShort(bound.size()); ClusteringPrefix.serializer.serializeValuesWithoutSize(bound, out, version, types); }
public void serialize(ReadResponse response, DataOutputPlus out, int version) throws IOException { out.writeInt(response.isDigestQuery() ? response.digest().remaining() : 0); ByteBuffer buffer = response.isDigestQuery() ? response.digest() : ByteBufferUtil.EMPTY_BYTE_BUFFER; out.write(buffer); out.writeBoolean(response.isDigestQuery()); if (!response.isDigestQuery()) Row.serializer.serialize(response.row(), out, version); }
public void serialize(ReadCommand rm, DataOutputPlus out, int version) throws IOException { SliceFromReadCommand realRM = (SliceFromReadCommand)rm; out.writeBoolean(realRM.isDigestQuery()); out.writeUTF(realRM.ksName); ByteBufferUtil.writeWithShortLength(realRM.key, out); out.writeUTF(realRM.cfName); out.writeLong(realRM.timestamp); CFMetaData metadata = Schema.instance.getCFMetaData(realRM.ksName, realRM.cfName); metadata.comparator.sliceQueryFilterSerializer().serialize(realRM.filter, out, version); }
public void serialize(Hint hint, DataOutputPlus out, int version) throws IOException { out.writeLong(hint.creationTime); out.writeUnsignedVInt(hint.gcgs); Mutation.serializer.serialize(hint.mutation, out, version); }
public void serialize(IRowCacheEntry entry, DataOutputPlus out) throws IOException { assert entry != null; // unlike CFS we don't support nulls, since there is no need for that in the cache boolean isSentinel = entry instanceof RowCacheSentinel; out.writeBoolean(isSentinel); if (isSentinel) out.writeLong(((RowCacheSentinel) entry).sentinelId); else CachedPartition.cacheSerializer.serialize((CachedPartition)entry, out); }
public void serialize(ValidationRequest message, DataOutputPlus out, int version) throws IOException { RepairJobDesc.serializer.serialize(message.desc, out, version); out.writeInt(message.gcBefore); }
public void serialize(RowFilter filter, DataOutputPlus out, int version) throws IOException { out.writeBoolean(filter instanceof ThriftFilter); out.writeUnsignedVInt(filter.expressions.size()); for (Expression expr : filter.expressions) Expression.serializer.serialize(expr, out, version); }
public void serialize(RepairMessage message, DataOutputPlus out, int version) throws IOException { out.write(message.messageType.type); message.messageType.serializer.serialize(message, out, version); }