@Override public ByteBuffer[] nioBuffers() { return new ByteBuffer[] {nioBuffer()}; }
@Override public ByteBuffer[] nioBuffers(int index, int length) { return new ByteBuffer[] {nioBuffer(index, length)}; }
public static BytesInput asBytesInput(ArrowBuf buf, int offset, int length) throws IOException { return BytesInput.from(buf.nioBuffer(offset, length), 0, length); }
public static void readFromStream(FSDataInputStream input, final ArrowBuf outputBuffer, final int bytesToRead) throws IOException{ final ByteBuffer directBuffer = outputBuffer.nioBuffer(0, bytesToRead); int lengthLeftToRead = bytesToRead; while (lengthLeftToRead > 0) { final int bytesRead = CompatibilityUtil.getBuf(input, directBuffer, lengthLeftToRead);; if (bytesRead == -1 && lengthLeftToRead > 0) { throw new EOFException("Unexpected end of stream while reading."); } lengthLeftToRead -= bytesRead; } outputBuffer.writerIndex(bytesToRead); } }
@Override public ByteBuffer nioBuffer() { return nioBuffer(readerIndex(), readableBytes()); }
@Override public ByteBuffer getBuffer(boolean direct, int length) { if (!direct) { return heapAllocator.getBuffer(false, length); } ArrowBuf buf = allocator.buffer(length); ByteBuffer retBuf = buf.nioBuffer(0, length); directBufMap.put(new ByteBufferWrapper(retBuf), buf); return retBuf; }
/** * Computes the size of the serialized body for this recordBatch. */ @Override public int computeBodyLength() { int size = 0; List<ArrowBuf> buffers = getBuffers(); List<ArrowBuffer> buffersLayout = getBuffersLayout(); if (buffers.size() != buffersLayout.size()) { throw new IllegalStateException("the layout does not match: " + buffers.size() + " != " + buffersLayout.size()); } for (int i = 0; i < buffers.size(); i++) { ArrowBuf buffer = buffers.get(i); ArrowBuffer layout = buffersLayout.get(i); size += (layout.getOffset() - size); ByteBuffer nioBuffer = buffer.nioBuffer(buffer.readerIndex(), buffer.readableBytes()); size += nioBuffer.remaining(); if (size % 8 != 0) { size += 8 - (size % 8); } } return size; }
public void addBytes(final ArrowBuf buf, final int start, final int end) { final int len = end - start; ByteBuffer buffer = buf.nioBuffer(start, len); Memory.wrap(buffer); sketch.update(Memory.wrap(buffer), 0, len); }
public void write(ArrowBuf buffer) throws IOException { ByteBuffer nioBuffer = buffer.nioBuffer(buffer.readerIndex(), buffer.readableBytes()); write(nioBuffer); }
@Override public void writeValue() throws IOException { reader.read(holder); ArrowBuf buf = holder.buffer; consumer.addBinary(Binary.fromByteBuffer(holder.buffer.nioBuffer(holder.start, holder.end - holder.start))); }
@Override public void writeValue() throws IOException { reader.read(holder); ArrowBuf buf = holder.buffer; consumer.addBinary(Binary.fromByteBuffer(holder.buffer.nioBuffer(holder.start, holder.end - holder.start))); }
public void addHll(ArrowBuf buf, int start, int end) { final int len = end - start; HllSketch sketch = HllSketch.wrap(Memory.wrap(buf.nioBuffer(start, len))); union.update(sketch); }
public void loadPage(ArrowBuf target, int pageLength) throws IOException { target.clear(); ByteBuffer directBuffer = target.nioBuffer(0, pageLength); int lengthLeftToRead = pageLength; while (lengthLeftToRead > 0) { lengthLeftToRead -= CompatibilityUtil.getBuf(input, directBuffer, lengthLeftToRead); } target.writerIndex(pageLength); }
/** * Reads up to len into buffer. Returns bytes read. * * @param buffer the buffer to read to * @param l the amount of bytes to read * @return the number of bytes read * @throws IOException if nit enough bytes left to read */ public int readFully(ArrowBuf buffer, int l) throws IOException { int n = readFully(buffer.nioBuffer(buffer.writerIndex(), l)); buffer.writerIndex(n); return n; }
public static long getEstimate(ArrowBuf buf, int start, int end) { return (long) HllSketch.wrap(Memory.wrap(buf.nioBuffer(start, end - start))).getEstimate(); }
private UnionAccum(BufferManager manager, int lgConfigK) { final int size = HllSketch.getMaxUpdatableSerializationBytes(lgConfigK, TgtHllType.HLL_8); this.buf = manager.getManagedBuffer(size); buf.setZero(0, size); this.union = new Union(lgConfigK, WritableMemory.wrap(buf.nioBuffer(0, size))); }
private HLLAccum(BufferManager manager, int lgConfigK) { final int size = HllSketch.getMaxUpdatableSerializationBytes(lgConfigK, TgtHllType.HLL_8); this.buf = manager.getManagedBuffer(size); buf.setZero(0, size); this.sketch = new HllSketch(lgConfigK, TgtHllType.HLL_8, WritableMemory.wrap(buf.nioBuffer(0, size))); }
public void readPage(PageHeader pageHeader, int compressedSize, int uncompressedSize, ArrowBuf dest) throws IOException { Stopwatch timer = Stopwatch.createUnstarted(); long timeToRead; long start = inputStream.getPos(); if (parentColumnReader.columnChunkMetaData.getCodec() == CompressionCodecName.UNCOMPRESSED) { timer.start(); dataReader.loadPage(dest, compressedSize); timeToRead = timer.elapsed(TimeUnit.MICROSECONDS); this.updateStats(pageHeader, "Page Read", start, timeToRead, compressedSize, uncompressedSize); } else { final ArrowBuf compressedData = allocateTemporaryBuffer(compressedSize); try { timer.start(); dataReader.loadPage(compressedData, compressedSize); timeToRead = timer.elapsed(TimeUnit.MICROSECONDS); timer.reset(); this.updateStats(pageHeader, "Page Read", start, timeToRead, compressedSize, compressedSize); start = inputStream.getPos(); timer.start(); codecFactory.getDecompressor(parentColumnReader.columnChunkMetaData .getCodec()).decompress(compressedData.nioBuffer(0, compressedSize), compressedSize, dest.nioBuffer(0, uncompressedSize), uncompressedSize); timeToRead = timer.elapsed(TimeUnit.MICROSECONDS); this.updateStats(pageHeader, "Decompress", start, timeToRead, compressedSize, uncompressedSize); } finally { compressedData.release(); } } }
/** * Deserializes an ArrowRecordBatch knowing the size of the entire message up front. This * minimizes the number of reads to the underlying stream. * * @param in the channel to deserialize from * @param block the object to deserialize to * @param alloc to allocate buffers * @return the deserialized ArrowRecordBatch * @throws IOException if something went wrong */ public static ArrowRecordBatch deserializeRecordBatch(ReadChannel in, ArrowBlock block, BufferAllocator alloc) throws IOException { // Metadata length contains integer prefix plus byte padding long totalLen = block.getMetadataLength() + block.getBodyLength(); if (totalLen > Integer.MAX_VALUE) { throw new IOException("Cannot currently deserialize record batches over 2GB"); } ArrowBuf buffer = alloc.buffer((int) totalLen); if (in.readFully(buffer, (int) totalLen) != totalLen) { throw new IOException("Unexpected end of input trying to read batch."); } ArrowBuf metadataBuffer = buffer.slice(4, block.getMetadataLength() - 4); Message messageFB = Message.getRootAsMessage(metadataBuffer.nioBuffer().asReadOnlyBuffer()); RecordBatch recordBatchFB = (RecordBatch) messageFB.header(new RecordBatch()); // Now read the body final ArrowBuf body = buffer.slice(block.getMetadataLength(), (int) totalLen - block.getMetadataLength()); return deserializeRecordBatch(recordBatchFB, body); }