@Override public Object decode(ByteBuf buf, State state) throws IOException { int decompressSize = buf.readInt(); ByteBuf out = ByteBufAllocator.DEFAULT.buffer(decompressSize); try { LZ4SafeDecompressor decompressor = factory.safeDecompressor(); ByteBuffer outBuffer = out.internalNioBuffer(out.writerIndex(), out.writableBytes()); int pos = outBuffer.position(); decompressor.decompress(buf.internalNioBuffer(buf.readerIndex(), buf.readableBytes()), outBuffer); int compressedLength = outBuffer.position() - pos; out.writerIndex(compressedLength); return innerCodec.getValueDecoder().decode(out, state); } finally { out.release(); } } };
@Override public Object decode(ByteBuf buf, State state) throws IOException { int decompressSize = buf.readInt(); ByteBuf out = ByteBufAllocator.DEFAULT.buffer(decompressSize); try { LZ4SafeDecompressor decompressor = factory.safeDecompressor(); ByteBuffer outBuffer = out.internalNioBuffer(out.writerIndex(), out.writableBytes()); int pos = outBuffer.position(); decompressor.decompress(buf.internalNioBuffer(buf.readerIndex(), buf.readableBytes()), outBuffer); int compressedLength = outBuffer.position() - pos; out.writerIndex(compressedLength); return innerCodec.getValueDecoder().decode(out, state); } finally { out.release(); } } };
public LZ4CompressionCodec() { this.compressor = LZ4Factory.fastestInstance().fastCompressor(); this.fastDecompressor = LZ4Factory.fastestInstance().fastDecompressor(); this.safeDecompressor = LZ4Factory.fastestInstance().safeDecompressor(); }
/** Return a {@link LZ4UnknownSizeDecompressor} instance. * @deprecated use {@link #safeDecompressor()} */ public LZ4UnknownSizeDecompressor unknownSizeDecompressor() { return safeDecompressor(); }
/** Return a {@link LZ4UnknownSizeDecompressor} instance. * @deprecated use {@link #safeDecompressor()} */ public LZ4UnknownSizeDecompressor unknownSizeDecompressor() { return safeDecompressor(); }
@Override public byte[] decompress(final byte[] data) { if (data == null) { return null; } return lz4Factory.safeDecompressor().decompress(data, data.length); } }
/** * When the exact decompressed size is unknown. * Decompress data size cannot be larger then maxDecompressedSize */ public static byte[] decompressSafe(final byte[] src, int maxDecompressedSize) { if (src == null) { throw new IllegalArgumentException("src must not be null."); } if (maxDecompressedSize <= 0) { throw new IllegalArgumentException("maxDecompressedSize must be larger than 0 but " + maxDecompressedSize); } LZ4SafeDecompressor decompressor = factory.safeDecompressor(); return decompressor.decompress(src, maxDecompressedSize); }
/** * When the exact decompressed size is unknown. * Decompress data size cannot be larger then maxDecompressedSize */ public static byte[] decompressSafe(final byte[] src, int maxDecompressedSize) { if (src == null) { throw new IllegalArgumentException("src must not be null."); } if (maxDecompressedSize <= 0) { throw new IllegalArgumentException("maxDecompressedSize must be larger than 0 but " + maxDecompressedSize); } LZ4SafeDecompressor decompressor = factory.safeDecompressor(); return decompressor.decompress(src, maxDecompressedSize); }
private byte[] decompress(DBObject partDbObject) { byte[] compressedContent = (byte[]) partDbObject.get(BatchConstants.partContent); long partSize = (long) partDbObject.get(BatchConstants.partLengthKey); int outputSize = (int) (MIN_BUFFER_SIZE_MULTIPLIER * partSize); try { return lz4Factory.safeDecompressor().decompress(compressedContent, outputSize); }catch(LZ4Exception e) { outputSize = (int) (MAX_BUFFER_SIZE_MULTIPLIER * partSize); return lz4Factory.safeDecompressor().decompress(compressedContent, outputSize); } }
private byte[] decompress(DBObject partDbObject, RecordId yId) { byte[] compressedContent = (byte[]) partDbObject.get(BatchConstants.partContent); long partSize = (long) partDbObject.get(BatchConstants.partLengthKey); int outputSize = (int) (MIN_BUFFER_SIZE_MULTIPLIER * partSize); try { return lz4Factory.safeDecompressor().decompress(compressedContent, outputSize); }catch(LZ4Exception e) { LOGGER.warn("Error while decompressing text part for record with id: " + yId.getUid()+", trying with bigger buffer",e); outputSize = (int) (MAX_BUFFER_SIZE_MULTIPLIER * partSize); return lz4Factory.safeDecompressor().decompress(compressedContent, outputSize); } }
@Override public Object decode(ByteBuf buf, State state) throws IOException { int decompressSize = buf.readInt(); ByteBuf out = ByteBufAllocator.DEFAULT.buffer(decompressSize); try { LZ4SafeDecompressor decompressor = factory.safeDecompressor(); ByteBuffer outBuffer = out.internalNioBuffer(out.writerIndex(), out.writableBytes()); int pos = outBuffer.position(); decompressor.decompress(buf.internalNioBuffer(buf.readerIndex(), buf.readableBytes()), outBuffer); int compressedLength = outBuffer.position() - pos; out.writerIndex(compressedLength); return innerCodec.getValueDecoder().decode(out, state); } finally { out.release(); } } };
/** * Create a new {@link InputStream} that will decompress data using the LZ4 algorithm. * * @param in The stream to decompress * @throws IOException */ public LZ4FrameInputStream(InputStream in) throws IOException { this(in, LZ4Factory.fastestInstance().safeDecompressor(), XXHashFactory.fastestInstance().hash32()); }
LZ4SafeDecompressor safeInstance = SAFE_INSTANCE; if (safeInstance == null) { safeInstance = SAFE_INSTANCE = LZ4Factory.safeInstance().safeDecompressor();
LZ4SafeDecompressor safeInstance = SAFE_INSTANCE; if (safeInstance == null) { safeInstance = SAFE_INSTANCE = LZ4Factory.safeInstance().safeDecompressor();
public LZ4CompressionCodec() { this.compressor = LZ4Factory.fastestInstance().fastCompressor(); this.fastDecompressor = LZ4Factory.fastestInstance().fastDecompressor(); this.safeDecompressor = LZ4Factory.fastestInstance().safeDecompressor(); }
LZ4Factory factory = LZ4Factory.fastestInstance(); byte[] data = "12345345234572".getBytes("UTF-8"); final int decompressedLength = data.length; // compress data LZ4Compressor compressor = factory.fastCompressor(); int maxCompressedLength = compressor.maxCompressedLength(decompressedLength); byte[] compressed = new byte[maxCompressedLength]; int compressedLength = compressor.compress(data, 0, decompressedLength, compressed, 0, maxCompressedLength); // decompress data // - method 1: when the decompressed length is known LZ4FastDecompressor decompressor = factory.fastDecompressor(); byte[] restored = new byte[decompressedLength]; int compressedLength2 = decompressor.decompress(compressed, 0, restored, 0, decompressedLength); // compressedLength == compressedLength2 // - method 2: when the compressed length is known (a little slower) // the destination buffer needs to be over-sized LZ4SafeDecompressor decompressor2 = factory.safeDecompressor(); int decompressedLength2 = decompressor2.decompress(compressed, 0, compressedLength, restored, 0); // decompressedLength == decompressedLength2
/** * Create a new {@link InputStream} that will decompress data using the LZ4 algorithm. * * @param in The stream to decompress * @throws IOException */ public KafkaLZ4BlockInputStream(InputStream in) throws IOException { super(in); decompressor = LZ4Factory.fastestInstance().safeDecompressor(); checksum = XXHashFactory.fastestInstance().hash32(); readHeader(); maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[maxBlockSize]; bufferOffset = 0; bufferSize = 0; finished = false; }