/** * Copy from a FS input stream to an output stream. Thread-safe * * @param from a {@link SeekableInputStream} * @param to any {@link PositionOutputStream} * @param start where in the from stream to start copying * @param length the number of bytes to copy * @throws IOException if there is an error while reading or writing */ private static void copy(SeekableInputStream from, PositionOutputStream to, long start, long length) throws IOException{ LOG.debug("Copying {} bytes at {} to {}" ,length , start , to.getPos()); from.seek(start); long bytesCopied = 0; byte[] buffer = COPY_BUFFER.get(); while (bytesCopied < length) { long bytesLeft = length - bytesCopied; int bytesRead = from.read(buffer, 0, (buffer.length < bytesLeft ? buffer.length : (int) bytesLeft)); if (bytesRead < 0) { throw new IllegalArgumentException( "Unexpected end of input file at " + start + bytesCopied); } to.write(buffer, 0, bytesRead); bytesCopied += bytesRead; } }
/** * Copy from a FS input stream to an output stream. Thread-safe * * @param from a {@link SeekableInputStream} * @param to any {@link PositionOutputStream} * @param start where in the from stream to start copying * @param length the number of bytes to copy * @throws IOException if there is an error while reading or writing */ private static void copy(SeekableInputStream from, PositionOutputStream to, long start, long length) throws IOException{ LOG.debug("Copying {} bytes at {} to {}" ,length , start , to.getPos()); from.seek(start); long bytesCopied = 0; byte[] buffer = COPY_BUFFER.get(); while (bytesCopied < length) { long bytesLeft = length - bytesCopied; int bytesRead = from.read(buffer, 0, (buffer.length < bytesLeft ? buffer.length : (int) bytesLeft)); if (bytesRead < 0) { throw new IllegalArgumentException( "Unexpected end of input file at " + start + bytesCopied); } to.write(buffer, 0, bytesRead); bytesCopied += bytesRead; } }
public void loadPage(DrillBuf target, int pageLength) throws IOException { target.clear(); HadoopStreams.wrap(input).read(target.nioBuffer(0, pageLength)); target.writerIndex(pageLength); }
public synchronized int read(DrillBuf buf, int off, int len) throws IOException { buf.clear(); ByteBuffer directBuffer = buf.nioBuffer(0, len); int lengthLeftToRead = len; SeekableInputStream seekableInputStream = HadoopStreams.wrap(getInputStream()); while (lengthLeftToRead > 0) { if(logger.isTraceEnabled()) { logger.trace("PERF: Disk read start. {}, StartOffset: {}, TotalByteSize: {}", this.streamId, this.startOffset, this.totalByteSize); } Stopwatch timer = Stopwatch.createStarted(); int bytesRead = seekableInputStream.read(directBuffer); if (bytesRead < 0) { return bytesRead; } lengthLeftToRead -= bytesRead; if(logger.isTraceEnabled()) { logger.trace( "PERF: Disk read complete. {}, StartOffset: {}, TotalByteSize: {}, BytesRead: {}, Time: {} ms", this.streamId, this.startOffset, this.totalByteSize, bytesRead, ((double) timer.elapsed(TimeUnit.MICROSECONDS)) / 1000); } } buf.writerIndex(len); return len; }
if (bytesToRead > 0) { try { nBytes = HadoopStreams.wrap(getInputStream()).read(directBuffer); } catch (Exception e) { logger.error("Error reading from stream {}. Error was : {}", this.streamId, e.getMessage());