/** * Creates a new instance of the TableKeyTooLongException class. * * @param key The Key. * @param maximumLength The maximum allowed key length. */ public TableKeyTooLongException(TableKey key, int maximumLength) { super(String.format("Table Key too long. Maximum length: %s, given: %s.", key.getKey().getLength(), maximumLength)); } }
/** * Calculates the number of bytes required to serialize the given {@link TableKey} removal. * * @param key The {@link TableKey} to serialize for removal. * @return The number of bytes required to serialize. */ int getRemovalLength(@NonNull TableKey key) { return HEADER_LENGTH + key.getKey().getLength(); }
/** * Creates a new instance of the TableValueTooLongException class. * * @param entry The TableEntry. * @param maximumLength The maximum allowed value length. */ public TableValueTooLongException(TableEntry entry, int maximumLength) { super(String.format("Table Value too long. Maximum length: %s, given: %s.", entry.getValue().getLength(), maximumLength)); } }
/** * Calculates the number of bytes required to serialize the given {@link TableEntry} update. * * @param entry The {@link TableEntry} to serialize. * @return The number of bytes required to serialize. */ int getUpdateLength(@NonNull TableEntry entry) { return HEADER_LENGTH + entry.getKey().getKey().getLength() + entry.getValue().getLength(); }
/** * Adds a new Write to the end of the queue. * * @param write The write to add. */ synchronized void add(Write write) { Exceptions.checkNotClosed(this.closed, this); this.writes.addLast(write); this.totalLength += write.data.getLength(); write.setQueueAddedTimestamp(this.timeSupplier.get()); }
@Override public String toString() { return String.format("LedgerId = %s, Length = %s, Attempts = %s, InProgress = %s, Done = %s, Failed %s", this.writeLedger.get().metadata.getLedgerId(), this.data.getLength(), this.attemptCount, isInProgress(), isDone(), this.failureCause.get() != null); }
Entry(ArrayView inputData) { this.data = new byte[inputData.getLength()]; System.arraycopy(inputData.array(), inputData.arrayOffset(), this.data, 0, this.data.length); }
/** * Serializes the given {@link TableKey} for removal into the given array. * * @param tableKey The {@link TableKey} to serialize. * @param target The byte array to serialize to. * @param targetOffset The first offset within the byte array to serialize at. * @return The first offset in the given byte array after the serialization. */ private int serializeRemoval(@NonNull TableKey tableKey, byte[] target, int targetOffset) { val key = tableKey.getKey(); Preconditions.checkArgument(key.getLength() <= MAX_KEY_LENGTH, "Key too large."); int serializationLength = getRemovalLength(tableKey); Preconditions.checkElementIndex(targetOffset + serializationLength - 1, target.length, "serialization does not fit in target buffer"); // Serialize Header. target[targetOffset] = CURRENT_SERIALIZATION_VERSION; targetOffset++; targetOffset += BitConverter.writeInt(target, targetOffset, key.getLength()); targetOffset += BitConverter.writeInt(target, targetOffset, NO_VALUE); // Key System.arraycopy(key.array(), key.arrayOffset(), target, targetOffset, key.getLength()); return targetOffset + key.getLength(); }
@Override public UUID hash(@NonNull ArrayView key) { byte[] rawHash = new byte[HASH_SIZE_BYTES]; int c = HASH.hashBytes(key.array(), key.arrayOffset(), key.getLength()).writeBytesTo(rawHash, 0, rawHash.length); assert c == rawHash.length; return toUUID(rawHash); } }
/** * Creates a new instance of the Event class. * * @param ownerId Owner Id (Stream Id, Segment Id, etc) * @param routingKey Routing Key to use. * @param sequence Event Sequence Number. * @param startTime Start (creation) time, in Nanos. * @param length Desired length of the append. */ Event(int ownerId, int routingKey, int sequence, long startTime, int length) { this.ownerId = ownerId; this.routingKey = routingKey; this.sequence = sequence; this.startTime = startTime; this.serialization = new ByteArraySegment(serialize(length)); this.contentLength = this.serialization.getLength() - PREFIX_LENGTH; }
public int hashToBucket(ArrayView array, int numBuckets) { return Hashing.consistentHash(hash.hashBytes(array.array(), array.arrayOffset(), array.getLength()), numBuckets); }
/** * Calculates a Hash Code for the given {@link ArrayView}. * * @param array The {@link ArrayView} to calculate the hash for. * @return The hash code. */ public static int hashCode(ArrayView array) { return HASH.hash(array.array(), array.arrayOffset(), array.getLength()); }
/** * Creates a new instance of the HashedArray class. * * @param array An {@link ArrayView} to wrap. */ public HashedArray(ArrayView array) { super(array.array(), array.arrayOffset(), array.getLength()); this.hashCode = hashCode(array); }
/** * Completes the given Write and makes any necessary internal updates. * * @param write The write to complete. */ private void completeWrite(Write write) { Timer t = write.complete(); if (t != null) { this.metrics.bookKeeperWriteCompleted(write.data.getLength(), t.getElapsed()); } }
@Override public CompletableFuture<Void> append(String streamName, Event event, Duration timeout) { ensureRunning(); ArrayView s = event.getSerialization(); byte[] payload = s.arrayOffset() == 0 ? s.array() : Arrays.copyOfRange(s.array(), s.arrayOffset(), s.getLength()); return this.streamSegmentStore.append(streamName, payload, null, timeout) .exceptionally(ex -> attemptReconcile(ex, streamName, timeout)); }
@SafeVarargs private final TableKeyBatch toUpdateBatch(KeyHasher hasher, List<TableKey>... keyLists) { val batch = TableKeyBatch.update(); for (val keyList : keyLists) { for (val key : keyList) { batch.add(key, hasher.hash(key.getKey()), key.getKey().getLength()); } } return batch; }
private TableKeyBatch toRemoveBatch(List<TableKey> keyList) { val batch = TableKeyBatch.removal(); for (val key : keyList) { batch.add(key, HASHER.hash(key.getKey()), key.getKey().getLength()); } return batch; }
private ArrayList<LogItem> toDataFrames(ArrayList<TestItem> items) throws Exception { val result = new ArrayList<LogItem>(RECORD_COUNT); Consumer<DataFrame> addCallback = df -> result.add(new LogItem(df.getData().getReader(), df.getData().getLength(), new TestLogAddress(result.size()))); try (val dos = new DataFrameOutputStream(FRAME_SIZE, addCallback)) { for (TestItem item : items) { dos.startNewRecord(); long beginSequence = result.size(); dos.write(item.data); dos.endRecord(); item.address = new TestLogAddress(result.size()); for (long s = beginSequence; s <= item.address.getSequence(); s++) { item.dataFrames.add(s); } } dos.flush(); } return result; }
private long batchInsert(long insertOffset, ContainerKeyCache keyCache, HashMap<TestKey, CacheBucketOffset> expectedResult, Random rnd) { val insertBatches = new HashMap<Long, TableKeyBatch>(); long highestOffset = 0L; for (int i = 0; i < KEYS_PER_SEGMENT; i++) { // We reuse the same key hash across multiple "segments", to make sure that segmentId does indeed partition // the cache. val key = newTableKey(rnd); val keyHash = KEY_HASHER.hash(key.getKey()); for (long segmentId = 0; segmentId < SEGMENT_COUNT; segmentId++) { keyCache.updateSegmentIndexOffsetIfMissing(segmentId, () -> 0L); val insertBatch = insertBatches.computeIfAbsent(segmentId, ignored -> TableKeyBatch.update()); val itemOffset = insertOffset + insertBatch.getLength(); insertBatch.add(key, keyHash, key.getKey().getLength()); expectedResult.put(new TestKey(segmentId, keyHash), new CacheBucketOffset(itemOffset, false)); highestOffset = Math.max(highestOffset, itemOffset + key.getKey().getLength()); } } applyBatches(insertBatches, insertOffset, keyCache); return highestOffset; }
/** * Tests the ability to append a set of records to a DataFrame, serialize it, deserialize it, and then read those * records back. */ @Test public void testSerialization() throws Exception { int maxFrameSize = 2 * 1024 * 1024; int maxRecordCount = 4500; int minRecordSize = 0; int maxRecordSize = 1024; List<ByteArraySegment> allRecords = DataFrameTestHelpers.generateRecords(maxRecordCount, minRecordSize, maxRecordSize, ByteArraySegment::new); // Append some records. DataFrame writeFrame = DataFrame.ofSize(maxFrameSize); int recordsAppended = appendRecords(allRecords, writeFrame); AssertExtensions.assertGreaterThan("Did not append enough records. Test may not be valid.", allRecords.size() / 2, recordsAppended); writeFrame.seal(); val frameData = writeFrame.getData(); Assert.assertEquals("Unexpected length from getData().", writeFrame.getLength(), frameData.getLength()); // Read them back, by deserializing the frame. val contents = DataFrame.read(frameData.getReader(), frameData.getLength(), writeFrame.getAddress()); DataFrameTestHelpers.checkReadRecords(contents, allRecords, b -> b); }