@Override public void write(byte[] array) { this.write(array, 0, array.length); }
@Override public void write(byte[] buffer, int bufferOffset, int length, int streamPosition) { if (bufferOffset < 0 || length < 0 || (length > 0 && bufferOffset + length > buffer.length)) { throw new ArrayIndexOutOfBoundsException("bufferOffset and length must refer to a range within buffer."); } Preconditions.checkElementIndex(streamPosition, this.buf.length, "streamPosition"); if (streamPosition + length <= this.buf.length) { // This fits entirely within our buffer. System.arraycopy(buffer, bufferOffset, this.buf, streamPosition, length); } else { // This fits partially within our buffer; as such this will result in an increase. int splitPos = this.buf.length - streamPosition; System.arraycopy(buffer, bufferOffset, this.buf, streamPosition, splitPos); write(buffer, bufferOffset + splitPos, length - splitPos); } }
/** * Serializes an entire RollingSegmentHandle into a new ByteArraySegment. * * @param handle The RollingSegmentHandle to serialize. * @return A ByteArraySegment with the serialization. */ @SneakyThrows(IOException.class) static ByteArraySegment serialize(RollingSegmentHandle handle) { try (EnhancedByteArrayOutputStream os = new EnhancedByteArrayOutputStream()) { //1. Policy Max Size. os.write(combine(KEY_POLICY_MAX_SIZE, Long.toString(handle.getRollingPolicy().getMaxLength()))); //2. Chunks. handle.chunks().forEach(chunk -> os.write(serializeChunk(chunk))); return os.getData(); } }
@Override protected EnhancedByteArrayOutputStream createInstance(byte[] initialData) { val s = new EnhancedByteArrayOutputStream(); s.write(initialData); return s; }
@Override protected EnhancedByteArrayOutputStream createInstance(byte[] initialData) { val s = new EnhancedByteArrayOutputStream(); s.write(initialData); return s; }
@Override public boolean processEntry(ReadResultEntry entry) { if (this.result.isDone()) { // We are done. Nothing else to do. return false; } try { Preconditions.checkArgument(entry.getContent().isDone(), "Entry Contents is not yet fetched."); ReadResultEntryContents contents = entry.getContent().join(); // TODO: most of these transfers are from memory to memory. It's a pity that we need an extra buffer to do the copy. // TODO: https://github.com/pravega/pravega/issues/2924 this.readData.write(StreamHelpers.readAll(contents.getData(), contents.getLength())); if (this.header == null && this.readData.size() >= EntrySerializer.HEADER_LENGTH) { // We now have enough to read the header. this.header = this.serializer.readHeader(this.readData.getData()); } if (this.header != null) { return !processReadData(this.readData.getData()); } return true; // Not done yet. } catch (Throwable ex) { processError(ex); return false; } }
@Override public CompletableFuture<Long> append(byte[] data, Collection<AttributeUpdate> attributeUpdates, Duration timeout) { return CompletableFuture.supplyAsync(() -> { // Note that this append is not atomic (data & attributes) - but for testing purposes it does not matter as // this method should only be used for constructing the test data. long offset; synchronized (this) { offset = this.contents.size(); this.contents.write(data); if (attributeUpdates != null) { val updatedValues = new HashMap<UUID, Long>(); attributeUpdates.forEach(update -> collectAttributeValue(update, updatedValues)); this.metadata.updateAttributes(updatedValues); } this.metadata.setLength(this.contents.size()); } return offset; }, this.executor); }
rnd.nextBytes(writeBuffer); return oldStorage.write(handle, writtenData.size(), new ByteArrayInputStream(writeBuffer), writeBuffer.length, TIMEOUT) .thenRun(() -> writtenData.write(writeBuffer)); }, executorService());