static long deserializePositionAndSkip(DataInputPlus in) throws IOException { long position = in.readUnsignedVInt(); int size = (int) in.readUnsignedVInt(); if (size > 0) in.skipBytesFully(size); return position; }
public StreamInitMessage deserialize(DataInputPlus in, int version) throws IOException { InetAddress from = CompactEndpointSerializationHelper.deserialize(in); int sessionIndex = in.readInt(); UUID planId = UUIDSerializer.serializer.deserialize(in, MessagingService.current_version); String description = in.readUTF(); boolean sentByInitiator = in.readBoolean(); boolean keepSSTableLevel = in.readBoolean(); boolean isIncremental = in.readBoolean(); return new StreamInitMessage(from, sessionIndex, planId, description, sentByInitiator, keepSSTableLevel, isIncremental); }
public char readChar() throws IOException { return dataReader.readChar(); }
static long deserializePositionAndSkip(DataInputPlus in) throws IOException { long position = in.readLong(); int size = in.readInt(); if (size > 0) in.skipBytesFully(size); return position; } }
public SnapshotCommand deserialize(DataInputPlus in, int version) throws IOException { String keyspace = in.readUTF(); String column_family = in.readUTF(); String snapshot_name = in.readUTF(); boolean clear_snapshot = in.readBoolean(); return new SnapshotCommand(keyspace, column_family, snapshot_name, clear_snapshot); }
MessagingService.Verb verb = MessagingService.verbValues[in.readInt()]; int parameterCount = in.readInt(); Map<String, byte[]> parameters; if (parameterCount == 0) for (int i = 0; i < parameterCount; i++) String key = in.readUTF(); byte[] value = new byte[in.readInt()]; in.readFully(value); builder.put(key, value); int payloadSize = in.readInt(); IVersionedSerializer<T2> serializer = (IVersionedSerializer<T2>) MessagingService.instance().verbSerializers.get(verb); if (serializer instanceof MessagingService.CallbackDeterminedSerializer) in.skipBytesFully(payloadSize); return null;
public StreamingHistogram deserialize(DataInputPlus in) throws IOException { int maxBinSize = in.readInt(); int size = in.readInt(); Map<Number, long[]> tmp = new HashMap<>(size); for (int i = 0; i < size; i++) { tmp.put(in.readDouble(), new long[]{in.readLong()}); } return new StreamingHistogram(maxBinSize, tmp); }
int digestSize = in.readInt(); if (digestSize > 0) in.readFully(digest, 0, digestSize); boolean isDigest = in.readBoolean(); assert isDigest == digestSize > 0; if (isDigest)
private static void skipPromotedIndex(DataInputPlus in, Version version) throws IOException { int size = version.storeRows() ? (int)in.readUnsignedVInt() : in.readInt(); if (size <= 0) return; in.skipBytesFully(size); }
public ValidationMetadata deserialize(Version version, DataInputPlus in) throws IOException { return new ValidationMetadata(in.readUTF(), in.readDouble()); } }
public PingMessage deserialize(DataInputPlus in, int version) throws IOException { // throw away the one byte of the payload in.readByte(); return new PingMessage(); }
public RowFilter deserialize(DataInputPlus in, int version, CFMetaData metadata) throws IOException { boolean forThrift = in.readBoolean(); int size = (int)in.readUnsignedVInt(); List<Expression> expressions = new ArrayList<>(size); for (int i = 0; i < size; i++) expressions.add(Expression.serializer.deserialize(in, version, metadata)); return forThrift ? new ThriftFilter(expressions) : new CQLFilter(expressions); }
public CompactionMetadata deserialize(Version version, DataInputPlus in) throws IOException { if (version.hasCompactionAncestors()) { // skip ancestors int nbAncestors = in.readInt(); in.skipBytes(nbAncestors * TypeSizes.sizeof(nbAncestors)); } ICardinality cardinality = HyperLogLogPlus.Builder.build(ByteBufferUtil.readBytes(in, in.readInt())); return new CompactionMetadata(cardinality); } }
public EstimatedHistogram deserialize(DataInputPlus in) throws IOException { int size = in.readInt(); long[] offsets = new long[size - 1]; long[] buckets = new long[size]; for (int i = 0; i < size; i++) { offsets[i == 0 ? 0 : i - 1] = in.readLong(); buckets[i] = in.readLong(); } return new EstimatedHistogram(offsets, buckets); }
public IRowCacheEntry deserialize(DataInputPlus in) throws IOException { boolean isSentinel = in.readBoolean(); if (isSentinel) return new RowCacheSentinel(in.readLong()); return CachedPartition.cacheSerializer.deserialize(in); }
public void readFully(byte[] b) throws IOException { dataReader.readFully(b); }
static void skipForCache(DataInputPlus in) throws IOException { /*long indexFilePosition =*/in.readUnsignedVInt(); /*long headerLength =*/in.readUnsignedVInt(); /*DeletionTime deletionTime = */DeletionTime.serializer.skip(in); /*int columnsIndexCount = (int)*/in.readUnsignedVInt(); /*int indexedPartSize = (int)*/in.readUnsignedVInt(); } }
public ClusteringIndexFilter deserialize(DataInputPlus in, int version, CFMetaData metadata) throws IOException { Kind kind = Kind.values()[in.readUnsignedByte()]; boolean reversed = in.readBoolean(); return kind.deserializer.deserialize(in, version, metadata, reversed); }
public GroupingState deserialize(DataInputPlus in, int version, ClusteringComparator comparator) throws IOException { if (!in.readBoolean()) return GroupingState.EMPTY_STATE; ByteBuffer partitionKey = ByteBufferUtil.readWithVIntLength(in); Clustering clustering = null; if (in.readBoolean()) clustering = Clustering.serializer.deserialize(in, version, comparator.subtypes()); return new GroupingState(partitionKey, clustering); }