private static int decompress(Decompressor decompressor, Slice input, int inputOffset, int inputLength, byte[] output, int outputOffset) { byte[] byteArray = (byte[]) input.getBase(); int byteArrayOffset = inputOffset + (int) (input.getAddress() - ARRAY_BYTE_BASE_OFFSET); int size = decompressor.decompress(byteArray, byteArrayOffset, inputLength, output, outputOffset, output.length - outputOffset); return size; } }
public SerializedPage serialize(Page page) { SliceOutput serializationBuffer = new DynamicSliceOutput(toIntExact((page.getSizeInBytes() + Integer.BYTES))); // block length is an int writeRawPage(page, serializationBuffer, blockEncodingSerde); if (!compressor.isPresent()) { return new SerializedPage(serializationBuffer.slice(), UNCOMPRESSED, page.getPositionCount(), serializationBuffer.size()); } int maxCompressedLength = maxCompressedLength(serializationBuffer.size()); byte[] compressionBuffer = new byte[maxCompressedLength]; int actualCompressedLength = compressor.get().compress(serializationBuffer.slice().getBytes(), 0, serializationBuffer.size(), compressionBuffer, 0, maxCompressedLength); if (((1.0 * actualCompressedLength) / serializationBuffer.size()) > MINIMUM_COMPRESSION_RATIO) { return new SerializedPage(serializationBuffer.slice(), UNCOMPRESSED, page.getPositionCount(), serializationBuffer.size()); } return new SerializedPage( Slices.copyOf(Slices.wrappedBuffer(compressionBuffer, 0, actualCompressedLength)), COMPRESSED, page.getPositionCount(), serializationBuffer.size()); }
public static MalformedInputException fail(long offset, String reason) { throw new MalformedInputException(offset, reason); } }
private void verifyCompressedData(byte[] originalUncompressed, byte[] compressed, int compressedLength) { byte[] uncompressed = new byte[originalUncompressed.length]; int uncompressedSize = getVerifyDecompressor().decompress(compressed, 0, compressedLength, uncompressed, 0, uncompressed.length); assertByteArraysEqual(uncompressed, 0, uncompressedSize, originalUncompressed, 0, originalUncompressed.length); }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
@Override public int maxCompressedLength(int uncompressedSize) { // assume hadoop stream encoder won't increase size by more than 10% over the block encoder return (int) ((blockCompressorForSizeCalculation.maxCompressedLength(uncompressedSize) * 1.1) + 8); }
@Override public int decompress(byte[] input, int offset, int length, OutputBuffer output) throws OrcCorruptionException { try { byte[] buffer = output.initialize(maxBufferSize); return decompressor.decompress(input, offset, length, buffer, 0, buffer.length); } catch (MalformedInputException e) { throw new OrcCorruptionException(e, orcDataSourceId, "Invalid compressed stream"); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
public static void verify(boolean condition, long offset, String reason) { if (!condition) { throw new MalformedInputException(offset, reason); } }
public Page deserialize(SerializedPage serializedPage) { checkArgument(serializedPage != null, "serializedPage is null"); if (!decompressor.isPresent() || serializedPage.getCompression() == UNCOMPRESSED) { return readRawPage(serializedPage.getPositionCount(), serializedPage.getSlice().getInput(), blockEncodingSerde); } int uncompressedSize = serializedPage.getUncompressedSizeInBytes(); byte[] decompressed = new byte[uncompressedSize]; int actualUncompressedSize = decompressor.get().decompress(serializedPage.getSlice().getBytes(), 0, serializedPage.getSlice().length(), decompressed, 0, uncompressedSize); checkState(uncompressedSize == actualUncompressedSize); return readRawPage(serializedPage.getPositionCount(), Slices.wrappedBuffer(decompressed, 0, uncompressedSize).getInput(), blockEncodingSerde); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
public static void verify(boolean condition, long offset, String reason) { if (!condition) { throw new MalformedInputException(offset, reason); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
public static MalformedInputException fail(long offset, String reason) { throw new MalformedInputException(offset, reason); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }
@Override protected Decompressor getVerifyDecompressor() { return new HadoopCodecDecompressor(verifyCodec); } }