/** * Serializes an entire RollingSegmentHandle into a new ByteArraySegment. * * @param handle The RollingSegmentHandle to serialize. * @return A ByteArraySegment with the serialization. */ @SneakyThrows(IOException.class) static ByteArraySegment serialize(RollingSegmentHandle handle) { try (EnhancedByteArrayOutputStream os = new EnhancedByteArrayOutputStream()) { //1. Policy Max Size. os.write(combine(KEY_POLICY_MAX_SIZE, Long.toString(handle.getRollingPolicy().getMaxLength()))); //2. Chunks. handle.chunks().forEach(chunk -> os.write(serializeChunk(chunk))); return os.getData(); } }
CompletableFuture<Long> getLength(Duration timeout) { return CompletableFuture.supplyAsync(() -> { synchronized (this.data) { return (long) this.data.size(); } }, executorService()); }
/** * Serializes the given object to an in-memory buffer (RandomAccessOutputStream) and returns a view of it. * * @param object The object to serialize. * @return An ArrayView which represents the serialized data. This provides a view (offset+length) into a Java byte * array and has APIs to extract or copy the data out of there. * @throws IOException If an IO Exception occurred. */ public ByteArraySegment serialize(T object) throws IOException { val result = new EnhancedByteArrayOutputStream(); serialize(result, object); return result.getData(); }
@Override protected EnhancedByteArrayOutputStream createInstance(byte[] initialData) { val s = new EnhancedByteArrayOutputStream(); s.write(initialData); return s; }
@Override public boolean processEntry(ReadResultEntry entry) { if (this.result.isDone()) { // We are done. Nothing else to do. return false; } try { Preconditions.checkArgument(entry.getContent().isDone(), "Entry Contents is not yet fetched."); ReadResultEntryContents contents = entry.getContent().join(); // TODO: most of these transfers are from memory to memory. It's a pity that we need an extra buffer to do the copy. // TODO: https://github.com/pravega/pravega/issues/2924 this.readData.write(StreamHelpers.readAll(contents.getData(), contents.getLength())); if (this.header == null && this.readData.size() >= EntrySerializer.HEADER_LENGTH) { // We now have enough to read the header. this.header = this.serializer.readHeader(this.readData.getData()); } if (this.header != null) { return !processReadData(this.readData.getData()); } return true; // Not done yet. } catch (Throwable ex) { processError(ex); return false; } }
final int writeSize = 1000; final String segmentName = "Segment"; val writtenData = new EnhancedByteArrayOutputStream(); final Random rnd = new Random(0); int currentEpoch = 1; () -> { rnd.nextBytes(writeBuffer); return oldStorage.write(handle, writtenData.size(), new ByteArrayInputStream(writeBuffer), writeBuffer.length, TIMEOUT) .thenRun(() -> writtenData.write(writeBuffer)); }, executorService()); byte[] expectedData = writtenData.toByteArray(); byte[] readData = new byte[expectedData.length]; @Cleanup
private <T> void testEncodeDecode(BiConsumerWithException<RevisionDataOutputStream, T> write, FunctionWithException<RevisionDataInputStream, T> read, BiFunction<RevisionDataOutputStream, T, Integer> getLength, T value, BiPredicate<T, T> equalityTester) throws Exception { @Cleanup val os = new EnhancedByteArrayOutputStream(); @Cleanup val rdos = RevisionDataOutputStream.wrap(os); write.accept(rdos, value); rdos.close(); os.close(); val actualLength = os.size() - Integer.BYTES; // Subtract 4 because this is the Length being encoded. Assert.assertEquals("Unexpected length for value " + value, (int) getLength.apply(rdos, value), actualLength); @Cleanup val rdis = RevisionDataInputStream.wrap(os.getData().getReader()); val actualValue = read.apply(rdis); Assert.assertTrue(String.format("Encoding/decoding failed for %s (decoded %s).", value, actualValue), equalityTester.test(value, actualValue)); }
@Override public CompletableFuture<Long> append(byte[] data, Collection<AttributeUpdate> attributeUpdates, Duration timeout) { return CompletableFuture.supplyAsync(() -> { // Note that this append is not atomic (data & attributes) - but for testing purposes it does not matter as // this method should only be used for constructing the test data. long offset; synchronized (this) { offset = this.contents.size(); this.contents.write(data); if (attributeUpdates != null) { val updatedValues = new HashMap<UUID, Long>(); attributeUpdates.forEach(update -> collectAttributeValue(update, updatedValues)); this.metadata.updateAttributes(updatedValues); } this.metadata.setLength(this.contents.size()); } return offset; }, this.executor); }
CompletableFuture<ByteArraySegment> read(long offset, int length, Duration timeout) { return CompletableFuture.supplyAsync(() -> { synchronized (this.data) { if (this.checkOffsets.get()) { // We want to make sure that we actually read pages that we wrote, and not from arbitrary locations // in the data source. Preconditions.checkArgument(this.offsets.isEmpty() || this.offsets.getOrDefault(offset, false), "Offset not registered or already obsolete: " + offset); } return new ByteArraySegment(this.data.getData().subSegment((int) offset, length).getCopy()); } }, executorService()); }
/** * Creates a new instance of the AsyncTableEntryReader class. * * @param timer Timer for the whole operation. */ private AsyncTableEntryReader(@NonNull EntrySerializer serializer, @NonNull TimeoutTimer timer) { this.serializer = serializer; this.timer = timer; this.readData = new EnhancedByteArrayOutputStream(); this.result = new CompletableFuture<>(); }
@Override public void write(byte[] array) { this.write(array, 0, array.length); }
@Override protected EnhancedByteArrayOutputStream createInstance(byte[] initialData) { val s = new EnhancedByteArrayOutputStream(); s.write(initialData); return s; }
private <T> void testLength(BiConsumerWithException<RevisionDataOutputStream, T> write, BiFunction<RevisionDataOutputStream, T, Integer> getLength, T value) throws Exception { @Cleanup val os = new EnhancedByteArrayOutputStream(); @Cleanup val rdos = RevisionDataOutputStream.wrap(os); val initialLength = os.getData().getLength(); write.accept(rdos, value); rdos.flush(); val expectedValue = os.getData().getLength() - initialLength; val actualValue = getLength.apply(rdos, value); Assert.assertEquals(String.format("Unexpected length for '%s'.", value), expectedValue, (int) actualValue); }
@Override @SneakyThrows(IOException.class) protected byte[] getData(EnhancedByteArrayOutputStream stream) { val b = stream.getData(); return StreamHelpers.readAll(b.getReader(), b.getLength()); } }
DataSource() { this.data = new EnhancedByteArrayOutputStream(); this.offsets = new HashMap<>(); }
@Override public void write(byte[] buffer, int bufferOffset, int length, int streamPosition) { if (bufferOffset < 0 || length < 0 || (length > 0 && bufferOffset + length > buffer.length)) { throw new ArrayIndexOutOfBoundsException("bufferOffset and length must refer to a range within buffer."); } Preconditions.checkElementIndex(streamPosition, this.buf.length, "streamPosition"); if (streamPosition + length <= this.buf.length) { // This fits entirely within our buffer. System.arraycopy(buffer, bufferOffset, this.buf, streamPosition, length); } else { // This fits partially within our buffer; as such this will result in an increase. int splitPos = this.buf.length - streamPosition; System.arraycopy(buffer, bufferOffset, this.buf, streamPosition, splitPos); write(buffer, bufferOffset + splitPos, length - splitPos); } }
@Override @SneakyThrows(IOException.class) protected byte[] getData(EnhancedByteArrayOutputStream stream) { val b = stream.getData(); return StreamHelpers.readAll(b.getReader(), b.getLength()); } }
/** * Tests the ability to serialize and deserialize objects sharing a common base class using a RandomAccessOutputStream OutputStream. */ @Test public void testMultiTypeRandomOutput() throws IOException { testMultiType(new EnhancedByteArrayOutputStream(), EnhancedByteArrayOutputStream::getData); }
synchronized (this.data) { if (toWrite.isEmpty()) { return (long) this.data.size(); long originalOffset = this.data.size(); long expectedOffset = this.data.size(); for (val e : toWrite) { Preconditions.checkArgument(expectedOffset == e.getKey(), "Bad Offset. Expected %s, given %s.", assert expectedOffset == this.data.size() : "unexpected number of bytes copied"; .map(Map.Entry::getKey) .min(Long::compare) .orElse((long) this.data.size()); Assert.assertEquals("Unexpected truncation offset.", expectedTruncationOffset, truncateOffset); .collect(Collectors.toList()); toRemove.forEach(this.offsets::remove); return (long) this.data.size();
@Override public ReadResult read(long offset, int maxLength, Duration timeout) { // We actually get a view of the data frozen in time, as any changes to the contents field after exiting from the // synchronized block may create a new buffer, but we don't care as the data we already have won't change. ByteArraySegment dataView; synchronized (this) { dataView = this.contents.getData(); } // We get a slice of the data view, and return a ReadResultMock with entry lengths of 3. return new ReadResultMock(offset, dataView.subSegment((int) offset, dataView.getLength() - (int) offset), maxLength, 3); }