private ByteBuffer getBuffer(int length) { try { return in.slice(length).order(ByteOrder.LITTLE_ENDIAN); } catch (IOException e) { throw new ParquetDecodingException("Failed to read " + length + " bytes", e); } }
private ByteBuffer getBuffer(int length) { try { return in.slice(length).order(ByteOrder.LITTLE_ENDIAN); } catch (IOException e) { throw new ParquetDecodingException("Failed to read " + length + " bytes", e); } }
while (valueIndex < this.currentCount) { ByteBuffer buffer = in.slice(bitWidth); this.packer.unpack8Values(buffer, buffer.position(), this.currentBuffer, valueIndex); valueIndex += 8;
while (valueIndex < this.currentCount) { ByteBuffer buffer = in.slice(bitWidth); this.packer.unpack8Values(buffer, buffer.position(), this.currentBuffer, valueIndex); valueIndex += 8;
public ByteBuffer slice(int length) throws EOFException { return delegate.slice(length); }
private void unpack8Values(BytePackerForLong packer) throws IOException { // get a single buffer of 8 values. most of the time, this won't require a copy // TODO: update the packer to consume from an InputStream ByteBuffer buffer = in.slice(packer.getBitWidth()); packer.unpack8Values(buffer, buffer.position(), valuesBuffer, valuesBuffered); this.valuesBuffered += 8; }
/** * @return the slice of the byte buffer inside this stream * @deprecated Will be removed in 2.0.0; Use {@link #slice(int)} instead */ @Deprecated public ByteBuffer toByteBuffer() { try { return slice(available()); } catch (EOFException e) { throw new ShouldNeverHappenException(e); } }
private void unpack8Values(BytePackerForLong packer) throws IOException { // get a single buffer of 8 values. most of the time, this won't require a copy // TODO: update the packer to consume from an InputStream ByteBuffer buffer = in.slice(packer.getBitWidth()); packer.unpack8Values(buffer, buffer.position(), valuesBuffer, valuesBuffered); this.valuesBuffered += 8; }
/** * Called to initialize the column reader from a part of a page. * * Implementations must consume all bytes from the input stream, leaving the * stream ready to read the next section of data. The underlying * implementation knows how much data to read, so a length is not provided. * * Each page may contain several sections: * <ul> * <li> repetition levels column * <li> definition levels column * <li> data column * </ul> * * @param valueCount count of values in this page * @param in an input stream containing the page data at the correct offset * * @throws IOException if there is an exception while reading from the input stream */ public void initFromPage(int valueCount, ByteBufferInputStream in) throws IOException { if (actualOffset != -1) { throw new UnsupportedOperationException( "Either initFromPage(int, ByteBuffer, int) or initFromPage(int, ByteBufferInputStream) must be implemented in " + getClass().getName()); } initFromPage(valueCount, in.slice(valueCount), 0); }
@Override public Binary readBytes() { int length = lengthReader.readInteger(); try { return Binary.fromConstantByteBuffer(in.slice(length)); } catch (IOException e) { throw new ParquetDecodingException("Failed to read " + length + " bytes"); } }
@Override public Binary readBytes() { try { return Binary.fromConstantByteBuffer(in.slice(length)); } catch (IOException | RuntimeException e) { throw new ParquetDecodingException("could not read bytes at offset " + in.position(), e); } }
@Override public Binary readBytes() { int length = lengthReader.readInteger(); try { return Binary.fromConstantByteBuffer(in.slice(length)); } catch (IOException e) { throw new ParquetDecodingException("Failed to read " + length + " bytes"); } }
@Override public Binary readBytes() { try { return Binary.fromConstantByteBuffer(in.slice(length)); } catch (IOException | RuntimeException e) { throw new ParquetDecodingException("could not read bytes at offset " + in.position(), e); } }
@Override public int readInteger() { ++ decodedPosition; if (decodedPosition == decoded.length) { try { if (in.available() < bitWidth) { // unpack8Values needs at least bitWidth bytes to read from, // We have to fill in 0 byte at the end of encoded bytes. byte[] tempEncode = new byte[bitWidth]; in.read(tempEncode, 0, in.available()); packer.unpack8Values(tempEncode, 0, decoded, 0); } else { ByteBuffer encoded = in.slice(bitWidth); packer.unpack8Values(encoded, encoded.position(), decoded, 0); } } catch (IOException e) { throw new ParquetDecodingException("Failed to read packed values", e); } decodedPosition = 0; } return decoded[decodedPosition]; }
@Override public int readInteger() { ++ decodedPosition; if (decodedPosition == decoded.length) { try { if (in.available() < bitWidth) { // unpack8Values needs at least bitWidth bytes to read from, // We have to fill in 0 byte at the end of encoded bytes. byte[] tempEncode = new byte[bitWidth]; in.read(tempEncode, 0, in.available()); packer.unpack8Values(tempEncode, 0, decoded, 0); } else { ByteBuffer encoded = in.slice(bitWidth); packer.unpack8Values(encoded, encoded.position(), decoded, 0); } } catch (IOException e) { throw new ParquetDecodingException("Failed to read packed values", e); } decodedPosition = 0; } return decoded[decodedPosition]; }
@Override public Binary readBytes() { try { int length = BytesUtils.readIntLittleEndian(in); return Binary.fromConstantByteBuffer(in.slice(length)); } catch (IOException e) { throw new ParquetDecodingException("could not read bytes at offset " + in.position(), e); } catch (RuntimeException e) { throw new ParquetDecodingException("could not read bytes at offset " + in.position(), e); } }
@Override public Binary readBytes() { try { int length = BytesUtils.readIntLittleEndian(in); return Binary.fromConstantByteBuffer(in.slice(length)); } catch (IOException e) { throw new ParquetDecodingException("could not read bytes at offset " + in.position(), e); } catch (RuntimeException e) { throw new ParquetDecodingException("could not read bytes at offset " + in.position(), e); } }