Refine search
@Override public ByteBuf encode(Object in) throws IOException { ByteBuf bytes = null; try { LZ4Compressor compressor = factory.fastCompressor(); bytes = innerCodec.getValueEncoder().encode(in); ByteBuffer srcBuf = bytes.internalNioBuffer(bytes.readerIndex(), bytes.readableBytes()); int outMaxLength = compressor.maxCompressedLength(bytes.readableBytes()); ByteBuf out = ByteBufAllocator.DEFAULT.buffer(outMaxLength + DECOMPRESSION_HEADER_SIZE); out.writeInt(bytes.readableBytes()); ByteBuffer outBuf = out.internalNioBuffer(out.writerIndex(), out.writableBytes()); int pos = outBuf.position(); compressor.compress(srcBuf, outBuf); int compressedLength = outBuf.position() - pos; out.writerIndex(out.writerIndex() + compressedLength); return out; } finally { if (bytes != null) { bytes.release(); } } } };
private byte[] serialize(byte[] value) { final int len = LZ4_COMPRESSOR.maxCompressedLength(value.length); final byte[] out = new byte[len]; final int compressedSize = LZ4_COMPRESSOR.compress(value, 0, value.length, out, 0); return ByteBuffer.allocate(compressedSize + Integer.BYTES) .putInt(value.length) .put(out, 0, compressedSize) .array(); } }
@Override public ByteBuf encode(ByteBuf source) { int uncompressedLength = source.readableBytes(); int maxLength = compressor.maxCompressedLength(uncompressedLength); ByteBuffer sourceNio = source.nioBuffer(source.readerIndex(), source.readableBytes()); ByteBuf target = PooledByteBufAllocator.DEFAULT.buffer(maxLength, maxLength); ByteBuffer targetNio = target.nioBuffer(0, maxLength); int compressedLength = compressor.compress(sourceNio, 0, uncompressedLength, targetNio, 0, maxLength); target.writerIndex(compressedLength); return target; }
@Override protected byte[] compress(byte[] in) { if (in == null) { throw new NullPointerException("Can't compress null"); } LZ4Compressor compressor = lz4Factory.fastCompressor(); byte[] out = new byte[compressor.maxCompressedLength(in.length)]; int compressedLength = compressor.compress(in, 0, in.length, out, 0); getLogger().debug("Compressed %d bytes to %d", in.length, compressedLength); return ByteBuffer.allocate(Integer.BYTES + compressedLength) .putInt(in.length) .put(out, 0, compressedLength) .array(); }
private ByteBuf compressHeap(ByteBuf input) throws IOException { int maxCompressedLength = compressor.maxCompressedLength(input.readableBytes()); // Not a direct buffer so use byte arrays... int inOffset = input.arrayOffset() + input.readerIndex(); byte[] in = input.array(); int len = input.readableBytes(); // Increase reader index. input.readerIndex(input.writerIndex()); // Allocate a heap buffer from the ByteBufAllocator as we may use a PooledByteBufAllocator and // so // can eliminate the overhead of allocate a new byte[]. ByteBuf output = input.alloc().heapBuffer(INTEGER_BYTES + maxCompressedLength); try { output.writeInt(len); // calculate the correct offset. int offset = output.arrayOffset() + output.writerIndex(); byte[] out = output.array(); int written = compressor.compress(in, inOffset, len, out, offset); // Set the writer index so the amount of written bytes is reflected output.writerIndex(output.writerIndex() + written); } catch (Exception e) { // release output buffer so we not leak and rethrow exception. output.release(); throw new IOException(e); } return output; }
private ByteBuf compressDirect(ByteBuf input) throws IOException { int maxCompressedLength = compressor.maxCompressedLength(input.readableBytes()); // If the input is direct we will allocate a direct output buffer as well as this will allow us // to use // LZ4Compressor.compress and so eliminate memory copies. ByteBuf output = input.alloc().directBuffer(INTEGER_BYTES + maxCompressedLength); try { ByteBuffer in = inputNioBuffer(input); // Increase reader index. input.readerIndex(input.writerIndex()); output.writeInt(in.remaining()); ByteBuffer out = outputNioBuffer(output); int written = compressor.compress( in, in.position(), in.remaining(), out, out.position(), out.remaining()); // Set the writer index so the amount of written bytes is reflected output.writerIndex(output.writerIndex() + written); } catch (Exception e) { // release output buffer so we not leak and rethrow exception. output.release(); throw new IOException(e); } return output; }
private void flushBufferedData(ByteBuf out) { int flushableBytes = buffer.readableBytes(); if (flushableBytes == 0) { return; checksum.update(buffer, buffer.readerIndex(), flushableBytes); final int check = (int) checksum.getValue(); final int bufSize = compressor.maxCompressedLength(flushableBytes) + HEADER_LENGTH; out.ensureWritable(bufSize); final int idx = out.writerIndex(); int compressedLength; int pos = outNioBuffer.position(); compressor.compress(buffer.internalNioBuffer(buffer.readerIndex(), flushableBytes), outNioBuffer); compressedLength = outNioBuffer.position() - pos; } catch (LZ4Exception e) {
private ChannelFuture finishEncode(final ChannelHandlerContext ctx, ChannelPromise promise) { if (finished) { promise.setSuccess(); return promise; } finished = true; final ByteBuf footer = ctx.alloc().heapBuffer( compressor.maxCompressedLength(buffer.readableBytes()) + HEADER_LENGTH); flushBufferedData(footer); final int idx = footer.writerIndex(); footer.setLong(idx, MAGIC_NUMBER); footer.setByte(idx + TOKEN_OFFSET, (byte) (BLOCK_TYPE_NON_COMPRESSED | compressionLevel)); footer.setInt(idx + COMPRESSED_LENGTH_OFFSET, 0); footer.setInt(idx + DECOMPRESSED_LENGTH_OFFSET, 0); footer.setInt(idx + CHECKSUM_OFFSET, 0); footer.writerIndex(idx + HEADER_LENGTH); return ctx.writeAndFlush(footer, promise); }
private ByteBuf allocateBuffer(ChannelHandlerContext ctx, ByteBuf msg, boolean preferDirect, boolean allowEmptyReturn) { int targetBufSize = 0; int remaining = msg.readableBytes() + buffer.readableBytes(); // quick overflow check if (remaining < 0) { throw new EncoderException("too much data to allocate a buffer for compression"); } while (remaining > 0) { int curSize = Math.min(blockSize, remaining); remaining -= curSize; // calculate the total compressed size of the current block (including header) and add to the total targetBufSize += compressor.maxCompressedLength(curSize) + HEADER_LENGTH; } // in addition to just the raw byte count, the headers (HEADER_LENGTH) per block (configured via // #blockSize) will also add to the targetBufSize, and the combination of those would never wrap around // again to be >= 0, this is a good check for the overflow case. if (targetBufSize > maxEncodeSize || 0 > targetBufSize) { throw new EncoderException(String.format("requested encode buffer size (%d bytes) exceeds the maximum " + "allowable size (%d bytes)", targetBufSize, maxEncodeSize)); } if (allowEmptyReturn && targetBufSize < blockSize) { return Unpooled.EMPTY_BUFFER; } if (preferDirect) { return ctx.alloc().ioBuffer(targetBufSize, targetBufSize); } else { return ctx.alloc().heapBuffer(targetBufSize, targetBufSize); } }
public Frame compress(Frame frame) throws IOException { byte[] input = CBUtil.readRawBytes(frame.body); int maxCompressedLength = compressor.maxCompressedLength(input.length); ByteBuf outputBuf = CBUtil.allocator.heapBuffer(INTEGER_BYTES + maxCompressedLength); byte[] output = outputBuf.array(); int outputOffset = outputBuf.arrayOffset(); output[outputOffset + 0] = (byte) (input.length >>> 24); output[outputOffset + 1] = (byte) (input.length >>> 16); output[outputOffset + 2] = (byte) (input.length >>> 8); output[outputOffset + 3] = (byte) (input.length); try { int written = compressor.compress(input, 0, input.length, output, outputOffset + INTEGER_BYTES, maxCompressedLength); outputBuf.writerIndex(INTEGER_BYTES + written); return frame.with(outputBuf); } catch (final Throwable e) { outputBuf.release(); throw e; } finally { //release the old frame frame.release(); } }
public static byte[] compressToLhLz4Chunk(byte[] data) throws LogException { final int rawSize = data.length; LZ4Factory factory = LZ4Factory.fastestInstance(); // compress data LZ4Compressor compressor = factory.fastCompressor(); int maxCompressedLength = compressor.maxCompressedLength(rawSize); int encodingSize = 0; byte[] rawCompressed = new byte[maxCompressedLength]; try { encodingSize = compressor.compress(data, 0, rawSize, rawCompressed, 0, maxCompressedLength); } catch (LZ4Exception e) { throw new LogException("CompressException", e.getMessage(), ""); } if (encodingSize <= 0) { throw new LogException("CompressException", "Invalid enconding size", ""); } byte[] ret = new byte[encodingSize]; System.arraycopy(rawCompressed, 0, ret, 0, encodingSize); return ret; }
LZ4Factory factory = LZ4Factory.fastestInstance(); byte[] data = "12345345234572".getBytes("UTF-8"); final int decompressedLength = data.length; // compress data LZ4Compressor compressor = factory.fastCompressor(); int maxCompressedLength = compressor.maxCompressedLength(decompressedLength); byte[] compressed = new byte[maxCompressedLength]; int compressedLength = compressor.compress(data, 0, decompressedLength, compressed, 0, maxCompressedLength); // decompress data // - method 1: when the decompressed length is known LZ4FastDecompressor decompressor = factory.fastDecompressor(); byte[] restored = new byte[decompressedLength]; int compressedLength2 = decompressor.decompress(compressed, 0, restored, 0, decompressedLength); // compressedLength == compressedLength2 // - method 2: when the compressed length is known (a little slower) // the destination buffer needs to be over-sized LZ4SafeDecompressor decompressor2 = factory.safeDecompressor(); int decompressedLength2 = decompressor2.decompress(compressed, 0, compressedLength, restored, 0); // decompressedLength == decompressedLength2
@Override public ByteBuffer compress(ByteBuffer in, ByteBuffer out) { out.clear(); int position = in.position(); lz4High.compress(in, out); in.position(position); out.flip(); return out; } }
/** * Create a new {@link OutputStream} that will compress data using the LZ4 algorithm. * * @param out The output stream to compress * @param blockSize Default: 4. The block size used during compression. 4=64kb, 5=256kb, 6=1mb, 7=4mb. All other * values will generate an exception * @param blockChecksum Default: false. When true, a XXHash32 checksum is computed and appended to the stream for * every block of data * @param useBrokenFlagDescriptorChecksum Default: false. When true, writes an incorrect FrameDescriptor checksum * compatible with older kafka clients. * @throws IOException */ public KafkaLZ4BlockOutputStream(OutputStream out, int blockSize, boolean blockChecksum, boolean useBrokenFlagDescriptorChecksum) throws IOException { this.out = out; compressor = LZ4Factory.fastestInstance().fastCompressor(); checksum = XXHashFactory.fastestInstance().hash32(); this.useBrokenFlagDescriptorChecksum = useBrokenFlagDescriptorChecksum; bd = new BD(blockSize); flg = new FLG(blockChecksum); bufferOffset = 0; maxBlockSize = bd.getBlockMaximumSize(); buffer = new byte[maxBlockSize]; compressedBuffer = new byte[compressor.maxCompressedLength(maxBlockSize)]; finished = false; writeHeader(); }
public static byte[] compress(final byte[] src) { if (src == null) { throw new IllegalArgumentException("src must not be null."); } LZ4Compressor compressor = factory.fastCompressor(); return compressor.compress(src); }
protected void encodeAndCompressBody(ByteBuffer buffer, int startPosition) { int startOfBody = buffer.position(); encodeBody(buffer); setEncodedBody(buffer, startOfBody, buffer.position() - startOfBody); length = buffer.position() - startPosition; if (compressionLimit != 0 && length-4 > compressionLimit) { byte[] compressedBody; compressionType = CompressionType.LZ4; LZ4Factory factory = LZ4Factory.fastestInstance(); LZ4Compressor compressor = factory.fastCompressor(); compressedBody = compressor.compress(encodedBody); log.log(LogLevel.DEBUG, "Uncompressed size: " + encodedBody.length + ", Compressed size: " + compressedBody.length); if (compressedBody.length + 4 < encodedBody.length) { buffer.position(startPosition); buffer.putInt(compressedBody.length + startOfBody - startPosition + 4 - 4); // +4 for compressed size buffer.putInt(getCompressedCode(compressionType)); buffer.position(startOfBody); buffer.putInt(encodedBody.length); buffer.put(compressedBody); buffer.limit(buffer.position()); return; } } buffer.putInt(startPosition, length - 4); // Encoded length 4 less than actual length buffer.limit(buffer.position()); }
@Override public byte[] compress(byte[] buf) { return LZ4Factory.fastestInstance().highCompressor().compress(buf); }
LZ4Compressor safeInstance = SAFE_INSTANCE; if (safeInstance == null) { safeInstance = SAFE_INSTANCE = LZ4Factory.safeInstance().fastCompressor(); return safeInstance.compress(src, srcOff, srcLen, dest, destOff, maxDestLen);
private LZ4Factory(String impl) throws ClassNotFoundException, NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException, NoSuchMethodException, InstantiationException, InvocationTargetException { this.impl = impl; fastCompressor = classInstance("net.jpountz.lz4.LZ4" + impl + "Compressor"); highCompressor = classInstance("net.jpountz.lz4.LZ4HC" + impl + "Compressor"); fastDecompressor = classInstance("net.jpountz.lz4.LZ4" + impl + "FastDecompressor"); safeDecompressor = classInstance("net.jpountz.lz4.LZ4" + impl + "SafeDecompressor"); Constructor<? extends LZ4Compressor> highConstructor = highCompressor.getClass().getDeclaredConstructor(int.class); highCompressors[DEFAULT_COMPRESSION_LEVEL] = highCompressor; for(int level = 1; level <= MAX_COMPRESSION_LEVEL; level++) { if(level == DEFAULT_COMPRESSION_LEVEL) continue; highCompressors[level] = highConstructor.newInstance(level); } // quickly test that everything works as expected final byte[] original = new byte[] {'a','b','c','d',' ',' ',' ',' ',' ',' ','a','b','c','d','e','f','g','h','i','j'}; for (LZ4Compressor compressor : Arrays.asList(fastCompressor, highCompressor)) { final int maxCompressedLength = compressor.maxCompressedLength(original.length); final byte[] compressed = new byte[maxCompressedLength]; final int compressedLength = compressor.compress(original, 0, original.length, compressed, 0, maxCompressedLength); final byte[] restored = new byte[original.length]; fastDecompressor.decompress(compressed, 0, restored, 0, original.length); if (!Arrays.equals(original, restored)) { throw new AssertionError(); } Arrays.fill(restored, (byte) 0); final int decompressedLength = safeDecompressor.decompress(compressed, 0, compressedLength, restored, 0); if (decompressedLength != original.length || !Arrays.equals(original, restored)) { throw new AssertionError(); } } }
/** * Compresses some data * * @param requestedCompression the desired compression type, which will be used if the data is deemed suitable. * Not all the existing types are actually supported. * @param data the data to compress. This array is only read by this method. * @param uncompressedSize uncompressedSize the size in bytes of the data array. If this is not present, it is * assumed that the size is the same as the data array size, i.e that it is completely * filled with uncompressed data. * @return the compression result * @throws IllegalArgumentException if the compression type is not supported */ public Compression compress(CompressionType requestedCompression, byte[] data, Optional<Integer> uncompressedSize) { switch (requestedCompression) { case NONE: data = uncompressedSize.isPresent() ? Arrays.copyOf(data, uncompressedSize.get()) : data; return new Compression(CompressionType.NONE, data.length, data); case LZ4: int dataSize = uncompressedSize.isPresent() ? uncompressedSize.get() : data.length; if (dataSize < compressMinSizeBytes) return new Compression(CompressionType.INCOMPRESSIBLE, dataSize, data); LZ4Compressor compressor = level < 7 ? factory.fastCompressor() : factory.highCompressor(); byte[] compressedData = compressor.compress(data, 0, dataSize); if (compressedData.length + 8 >= dataSize * compressionThresholdFactor) return new Compression(CompressionType.INCOMPRESSIBLE, dataSize, data); return new Compression(CompressionType.LZ4, dataSize, compressedData); default: throw new IllegalArgumentException(requestedCompression + " is not supported"); } } /** Compresses some data using the compression type of this compressor */