public List<ByteBuffer> sliceBuffers(long length) throws EOFException { return delegate.sliceBuffers(length); }
/** * @param size the size of the page * @return the page * @throws IOException if there is an error while reading from the file stream */ public BytesInput readAsBytesInput(int size) throws IOException { return BytesInput.from(stream.sliceBuffers(size)); }
public ByteBufferInputStream sliceStream(long length) throws EOFException { return ByteBufferInputStream.wrap(sliceBuffers(length)); }
/** * @param size the size of the page * @return the page * @throws IOException if there is an error while reading from the file stream */ public BytesInput readAsBytesInput(int size) throws IOException { return BytesInput.from(stream.sliceBuffers(size)); }
public ByteBufferInputStream sliceStream(long length) throws EOFException { return ByteBufferInputStream.wrap(sliceBuffers(length)); }
public BytesInput readAsBytesInput(int size) throws IOException { int available = stream.available(); if (size > available) { // this is to workaround a bug where the compressedLength // of the chunk is missing the size of the header of the dictionary // to allow reading older files (using dictionary) we need this. // usually 13 to 19 bytes are missing int missingBytes = size - available; LOG.info("completed the column chunk with {} bytes", missingBytes); List<ByteBuffer> buffers = new ArrayList<>(); buffers.addAll(stream.sliceBuffers(available)); ByteBuffer lastBuffer = ByteBuffer.allocate(missingBytes); f.readFully(lastBuffer); buffers.add(lastBuffer); return BytesInput.from(buffers); } return super.readAsBytesInput(size); }
public BytesInput readAsBytesInput(int size) throws IOException { int available = stream.available(); if (size > available) { // this is to workaround a bug where the compressedLength // of the chunk is missing the size of the header of the dictionary // to allow reading older files (using dictionary) we need this. // usually 13 to 19 bytes are missing int missingBytes = size - available; LOG.info("completed the column chunk with {} bytes", missingBytes); List<ByteBuffer> buffers = new ArrayList<>(); buffers.addAll(stream.sliceBuffers(available)); ByteBuffer lastBuffer = ByteBuffer.allocate(missingBytes); f.readFully(lastBuffer); buffers.add(lastBuffer); return BytesInput.from(buffers); } return super.readAsBytesInput(size); }
private ColumnChunkPageReader readChunk(SeekableInputStream f, ChunkDescriptor descriptor) { try { List<ByteBuffer> buffers = readBlocks(f, descriptor.fileOffset, descriptor.size); ByteBufferInputStream stream = ByteBufferInputStream.wrap(buffers); Chunk chunk = new WorkaroundChunk(descriptor, stream.sliceBuffers(descriptor.size), f, null); return chunk.readAllPages(); } catch (IOException e) { throw new RuntimeException(e); } }
/** * @param f file to read the chunks from * @param builder used to build chunk list to read the pages for the different columns * @throws IOException if there is an error while reading from the stream */ public void readAll(SeekableInputStream f, ChunkListBuilder builder) throws IOException { List<ByteBuffer> buffers = readBlocks(f, offset, length); // report in a counter the data we just scanned BenchmarkCounter.incrementBytesRead(length); ByteBufferInputStream stream = ByteBufferInputStream.wrap(buffers); for (int i = 0; i < chunks.size(); i++) { ChunkDescriptor descriptor = chunks.get(i); builder.add(descriptor, stream.sliceBuffers(descriptor.size), f); } }
ChunkDescriptor descriptor = chunks.get(i); if (i < chunks.size() - 1) { result.add(new Chunk(descriptor, stream.sliceBuffers(descriptor.size))); } else { result.add(new WorkaroundChunk(descriptor, stream.sliceBuffers(descriptor.size), f));