/** * Attempts to read the contents of the InputStream and load it into this ByteArraySegment. Up to getLength() bytes * will be read from the InputStream, but no guarantees are made that the entire ByteArraySegment will be populated. * <p> * Only attempts to read the data, and does not expect any other header/footer information in the InputStream. This * method is the exact reverse of writeTo(). * * @param stream The InputStream to read from. * @return The number of bytes read. This will be less than or equal to getLength(). * @throws IOException If the InputStream threw one. */ public int readFrom(InputStream stream) throws IOException { return StreamHelpers.readAll(stream, this.array, this.startOffset, this.length); }
/** * Reads a number of bytes from the given InputStream and returns it as the given byte array. * * @param source The InputStream to read. * @param length The number of bytes to read. * @return A byte array containing the contents of the Stream. * @throws IOException If unable to read from the given InputStream. */ public static byte[] readAll(InputStream source, int length) throws IOException { byte[] ret = new byte[length]; int readBytes = readAll(source, ret, 0, ret.length); Preconditions.checkArgument(readBytes == ret.length, "Invalid value for length (%s). Was only able to read %s bytes from the given InputStream.", ret.length, readBytes); return ret; } }
/** * Copy all of the contents provided into a byteBuffer and return it. */ @SneakyThrows(IOException.class) private ByteBuffer copyData(List<ReadResultEntryContents> contents) { int totalSize = contents.stream().mapToInt(ReadResultEntryContents::getLength).sum(); ByteBuffer data = ByteBuffer.allocate(totalSize); int bytesCopied = 0; for (ReadResultEntryContents content : contents) { int copied = StreamHelpers.readAll(content.getData(), data.array(), bytesCopied, totalSize - bytesCopied); Preconditions.checkState(copied == content.getLength(), "Read fewer bytes than available."); bytesCopied += copied; } return data; }
int bytesCopied = StreamHelpers.readAll(dataStream, insertArray, insertOffset, toCopy); assert bytesCopied == toCopy : "unable to read the requested number of bytes"; } catch (IOException ex) {
TestLogItem(InputStream input) throws IOException { DataInputStream dataInput = new DataInputStream(input); this.sequenceNumber = dataInput.readLong(); this.data = new byte[dataInput.readInt()]; int readBytes = StreamHelpers.readAll(dataInput, this.data, 0, this.data.length); assert readBytes == this.data.length : "SeqNo " + this.sequenceNumber + ": expected to read " + this.data.length + " bytes, but read " + readBytes; this.failAfterCompleteRatio = -1; }
/** * Reads the remaining contents of the ReadResult into the given array. This will stop when the given target has been * filled or when the current end of the Segment has been reached. * * @param target A byte array where the ReadResult will be read into. * @param fetchTimeout A timeout to use when needing to fetch the contents of an entry that is not in the Cache. * @return The number of bytes read. */ @VisibleForTesting @SneakyThrows(IOException.class) default int readRemaining(byte[] target, Duration fetchTimeout) { int bytesRead = 0; while (hasNext() && bytesRead < target.length) { ReadResultEntry entry = next(); if (entry.getType() == ReadResultEntryType.EndOfStreamSegment || entry.getType() == ReadResultEntryType.Future) { // Reached the end. break; } else if (!entry.getContent().isDone()) { entry.requestContent(fetchTimeout); } ReadResultEntryContents contents = entry.getContent().join(); StreamHelpers.readAll(contents.getData(), target, bytesRead, Math.min(contents.getLength(), target.length - bytesRead)); bytesRead += contents.getLength(); } return bytesRead; }
@Override @SneakyThrows(IOException.class) protected byte[] getData(FixedByteArrayOutputStream stream) { val b = stream.getData(); return StreamHelpers.readAll(b.getReader(), b.getLength()); } }
@SneakyThrows(IOException.class) private byte[] serialize(RollingSegmentHandle handle) { val s = HandleSerializer.serialize(handle); return StreamHelpers.readAll(s.getReader(), s.getLength()); }
@Override @SneakyThrows(IOException.class) protected byte[] getData(FixedByteArrayOutputStream stream) { val b = stream.getData(); return StreamHelpers.readAll(b.getReader(), b.getLength()); } }
@Override @SneakyThrows(IOException.class) protected byte[] getData(EnhancedByteArrayOutputStream stream) { val b = stream.getData(); return StreamHelpers.readAll(b.getReader(), b.getLength()); } }
@Override @SneakyThrows(IOException.class) protected byte[] getData(EnhancedByteArrayOutputStream stream) { val b = stream.getData(); return StreamHelpers.readAll(b.getReader(), b.getLength()); } }
@Override public boolean processEntry(ReadResultEntry e) { ReadResultEntryContents c = e.getContent().join(); byte[] data = new byte[c.getLength()]; try { StreamHelpers.readAll(c.getData(), data, 0, data.length); readContents.write(data); return true; } catch (Exception ex) { processError(ex); return false; } }
@Override public boolean processEntry(ReadResultEntry entry) { if (this.result.isDone()) { // We are done. Nothing else to do. return false; } try { Preconditions.checkArgument(entry.getContent().isDone(), "Entry Contents is not yet fetched."); ReadResultEntryContents contents = entry.getContent().join(); // TODO: most of these transfers are from memory to memory. It's a pity that we need an extra buffer to do the copy. // TODO: https://github.com/pravega/pravega/issues/2924 this.readData.write(StreamHelpers.readAll(contents.getData(), contents.getLength())); if (this.header == null && this.readData.size() >= EntrySerializer.HEADER_LENGTH) { // We now have enough to read the header. this.header = this.serializer.readHeader(this.readData.getData()); } if (this.header != null) { return !processReadData(this.readData.getData()); } return true; // Not done yet. } catch (Throwable ex) { processError(ex); return false; } }
/** * Tests the readAll method that copies data into an existing array. */ @Test public void testReadAllNewArray() throws IOException { final int itemCount = 100; final byte[] buffer = new byte[itemCount]; for (int i = 0; i < itemCount; i++) { buffer[i] = (byte) i; } byte[] readFullyData = StreamHelpers.readAll(new TestInputStream(buffer), buffer.length); Assert.assertArrayEquals(buffer, readFullyData); AssertExtensions.assertThrows( "readAll accepted a length higher than the given input stream length.", () -> StreamHelpers.readAll(new TestInputStream(buffer), buffer.length + 1), ex -> ex instanceof IllegalArgumentException); }
@Override public InputStream readObjectStream(String bucketName, String key, Range range) { byte[] bytes = new byte[Math.toIntExact(range.getLast() + 1 - range.getFirst())]; Path path = Paths.get(this.baseDir, bucketName, key); FileInputStream returnStream; try { returnStream = new FileInputStream(path.toFile()); if (range.getFirst() != 0) { long bytesSkipped = 0; do { bytesSkipped += returnStream.skip(range.getFirst()); } while (bytesSkipped < range.getFirst()); } StreamHelpers.readAll(returnStream, bytes, 0, bytes.length); return new ByteArrayInputStream(bytes); } catch (IOException e) { throw new S3Exception("NoSuchKey", HttpStatus.SC_NOT_FOUND, "NoSuchKey", ""); } }
@Override public boolean processEntry(ReadResultEntry e) { try { Assert.assertTrue("Received Entry that is not ready to serve data yet.", Futures.isSuccessful(e.getContent())); ReadResultEntryContents c = e.getContent().join(); byte[] data = new byte[c.getLength()]; StreamHelpers.readAll(c.getData(), data, 0, data.length); int idx = readEntryCount.getAndIncrement(); AssertExtensions.assertLessThan("Read too many entries.", entries.size(), idx); byte[] expected = entries.get(idx); Assert.assertArrayEquals(String.format("Unexpected read contents after reading %d entries.", idx + 1), expected, data); readCount.incrementAndGet(); } catch (Exception ex) { processError(ex); return false; } return true; }
this.serialization = new ByteArraySegment(StreamHelpers.readAll( source.getReader(sourceOffset - HEADER_LENGTH, getTotalLength()), getTotalLength())); } else {
protected void verifyReads(DurableDataLog log, TreeMap<LogAddress, byte[]> writeData) throws Exception { @Cleanup CloseableIterator<DurableDataLog.ReadItem, DurableDataLogException> reader = log.getReader(); Iterator<Map.Entry<LogAddress, byte[]>> expectedIterator = writeData.entrySet().iterator(); while (true) { DurableDataLog.ReadItem nextItem = reader.getNext(); if (nextItem == null) { Assert.assertFalse("Reader reached the end but there were still items to be read.", expectedIterator.hasNext()); break; } Assert.assertTrue("Reader has more items but there should not be any more items to be read.", expectedIterator.hasNext()); // Verify sequence number, as well as payload. val expected = expectedIterator.next(); Assert.assertEquals("Unexpected sequence number.", expected.getKey().getSequence(), nextItem.getAddress().getSequence()); val actualPayload = StreamHelpers.readAll(nextItem.getPayload(), nextItem.getLength()); Assert.assertArrayEquals("Unexpected payload for sequence number " + expected.getKey(), expected.getValue(), actualPayload); } }
private void checkReadIndexDirect(HashMap<Long, ByteArrayOutputStream> segmentContents, TestContext context) throws Exception { for (long segmentId : segmentContents.keySet()) { val sm = context.metadata.getStreamSegmentMetadata(segmentId); long segmentLength = sm.getLength(); long startOffset = Math.min(sm.getStartOffset(), sm.getStorageLength()); byte[] expectedData = segmentContents.get(segmentId).toByteArray(); if (startOffset > 0) { AssertExtensions.assertThrows( "Read request for a truncated offset was not rejected.", () -> context.readIndex.readDirect(segmentId, 0, 1), ex -> ex instanceof IllegalArgumentException); } int readLength = (int) (segmentLength - startOffset); InputStream readData = context.readIndex.readDirect(segmentId, startOffset, readLength); byte[] actualData = StreamHelpers.readAll(readData, readLength); AssertExtensions.assertArrayEquals("Unexpected data read.", expectedData, (int) startOffset, actualData, 0, actualData.length); } }
private int doRead(SegmentHandle handle, long offset, byte[] buffer, int bufferOffset, int length) throws Exception { long traceId = LoggerHelpers.traceEnter(log, "read", handle.getSegmentName(), offset, bufferOffset, length); if (offset < 0 || bufferOffset < 0 || length < 0) { throw new ArrayIndexOutOfBoundsException(); } try (InputStream reader = client.readObjectStream(config.getBucket(), config.getRoot() + handle.getSegmentName(), Range.fromOffsetLength(offset, length))) { /* * TODO: This implementation assumes that if S3Client.readObjectStream returns null, then * the object does not exist and we throw StreamNotExistsException. The javadoc, however, * says that this call returns null in case of 304 and 412 responses. We need to * investigate what these responses mean precisely and react accordingly. * * See https://github.com/pravega/pravega/issues/1549 */ if (reader == null) { throw new StreamSegmentNotExistsException(handle.getSegmentName()); } int bytesRead = StreamHelpers.readAll(reader, buffer, bufferOffset, length); LoggerHelpers.traceLeave(log, "read", traceId, bytesRead); return bytesRead; } }