Refine search
@Override public ByteBuf decode(ByteBuf encoded, int uncompressedLength) throws IOException { ByteBuf uncompressed = PooledByteBufAllocator.DEFAULT.buffer(uncompressedLength, uncompressedLength); ByteBuffer uncompressedNio = uncompressed.nioBuffer(0, uncompressedLength); ByteBuffer encodedNio = encoded.nioBuffer(encoded.readerIndex(), encoded.readableBytes()); decompressor.decompress(encodedNio, encodedNio.position(), uncompressedNio, uncompressedNio.position(), uncompressedNio.remaining()); uncompressed.writerIndex(uncompressedLength); return uncompressed; } }
@Parameterized.Parameters(name = "{0}") public static Object[][] buffers() { return new Object[][]{ {new NettyDataBufferFactory(new UnpooledByteBufAllocator(true))}, {new NettyDataBufferFactory(new UnpooledByteBufAllocator(false))}, {new NettyDataBufferFactory(new PooledByteBufAllocator(true))}, {new NettyDataBufferFactory(new PooledByteBufAllocator(false))}}; }
@Override public ByteBuf decode(ByteBuf encoded, int uncompressedLength) throws IOException { ByteBuf uncompressed = PooledByteBufAllocator.DEFAULT.directBuffer(uncompressedLength, uncompressedLength); if (encoded.hasMemoryAddress()) { Zstd.decompressUnsafe(uncompressed.memoryAddress(), uncompressedLength, encoded.memoryAddress() + encoded.readerIndex(), encoded.readableBytes()); } else { ByteBuffer uncompressedNio = uncompressed.nioBuffer(0, uncompressedLength); ByteBuffer encodedNio = encoded.nioBuffer(encoded.readerIndex(), encoded.readableBytes()); Zstd.decompress(uncompressedNio, encodedNio); } uncompressed.writerIndex(uncompressedLength); return uncompressed; } }
int payloadSize = payload.readableBytes(); int magicAndChecksumLength = ChecksumType.Crc32c.equals(checksumType) ? (2 + 4 /* magic + checksumLength*/) : 0; boolean includeChecksum = magicAndChecksumLength > 0; int totalSize = headerContentSize + payloadSize; ByteBuf metadataAndPayload = PooledByteBufAllocator.DEFAULT.buffer(totalSize, totalSize); try { ByteBufCodedOutputStream outStream = ByteBufCodedOutputStream.get(metadataAndPayload); metadataAndPayload.writeShort(magicCrc32c); checksumReaderIndex = metadataAndPayload.writerIndex(); metadataAndPayload.writerIndex(metadataAndPayload.writerIndex()
@Override public ByteBuf decode(ByteBuf encoded, int uncompressedLength) throws IOException { ByteBuf uncompressed = PooledByteBufAllocator.DEFAULT.heapBuffer(uncompressedLength, uncompressedLength); int len = encoded.readableBytes(); if (encoded.hasArray()) { array = encoded.array(); offset = encoded.arrayOffset() + encoded.readerIndex(); } else {
ByteBuf cachedData = null; try { cachedData = ALLOCATOR.directBuffer(size, size); } catch (Throwable t) { log.warn("[{}] Failed to allocate buffer for entry cache: {}", ml.getName(), t.getMessage(), t); int readerIdx = entryBuf.readerIndex(); cachedData.writeBytes(entryBuf); entryBuf.readerIndex(readerIdx);
public void precompress() { ByteBuf out = PooledByteBufAllocator.DEFAULT.directBuffer(); try { compress(out); precompressed = new byte[out.readableBytes()]; out.readBytes(precompressed); } finally { out.release(); } }
static ByteBuf serialize(ByteBuf valBuf, long revision) { int serializedSize = valBuf.readableBytes() + Long.BYTES; ByteBuf buffer = PooledByteBufAllocator.DEFAULT.heapBuffer(serializedSize); buffer.writeLong(revision); buffer.writeBytes(valBuf); return buffer; }
private static ByteBufPair serializeCommandMessageWithSize(BaseCommand cmd, ByteBuf metadataAndPayload) { // / Wire format // [TOTAL_SIZE] [CMD_SIZE][CMD] [MAGIC_NUMBER][CHECKSUM] [METADATA_SIZE][METADATA] [PAYLOAD] // // metadataAndPayload contains from magic-number to the payload included int cmdSize = cmd.getSerializedSize(); int totalSize = 4 + cmdSize + metadataAndPayload.readableBytes(); int headersSize = 4 + 4 + cmdSize; ByteBuf headers = PooledByteBufAllocator.DEFAULT.buffer(headersSize); headers.writeInt(totalSize); // External frame try { // Write cmd headers.writeInt(cmdSize); ByteBufCodedOutputStream outStream = ByteBufCodedOutputStream.get(headers); cmd.writeTo(outStream); outStream.recycle(); } catch (IOException e) { // This is in-memory serialization, should not fail throw new RuntimeException(e); } return (ByteBufPair) ByteBufPair.get(headers, metadataAndPayload); }
private void encodeProtonMessage() { int estimated = Math.max(1500, data != null ? data.capacity() + 1000 : 0); ByteBuf buffer = PooledByteBufAllocator.DEFAULT.heapBuffer(estimated); try { getProtonMessage().encode(new NettyWritable(buffer)); byte[] bytes = new byte[buffer.writerIndex()]; buffer.readBytes(bytes); data = ReadableBuffer.ByteBufferReader.wrap(ByteBuffer.wrap(bytes)); bufferValid = true; } finally { buffer.release(); } }
private ReadableBuffer createCopyWithoutDeliveryAnnotations() { assert headerEnds != messagePaylodStart; // The original message had delivery annotations and so we must copy into a new // buffer skipping the delivery annotations section as that is not meant to survive // beyond this hop. ReadableBuffer duplicate = data.duplicate(); final ByteBuf result = PooledByteBufAllocator.DEFAULT.heapBuffer(getEncodeSize()); result.writeBytes(duplicate.limit(headerEnds).byteBuffer()); duplicate.clear(); duplicate.position(messagePaylodStart); result.writeBytes(duplicate.byteBuffer()); return new NettyReadable(result); }
private byte[] hashSHA256( byte[]... message ) { Hash digest = getSHA256(); if ( digest == null ) { return null; } ByteBuf buf = PooledByteBufAllocator.DEFAULT.directBuffer(); for ( byte[] bytes : message ) { buf.writeBytes( bytes ); } digest.update( buf ); buf.release(); return digest.digest(); }
void add(MessageImpl<?> msg, SendCallback callback) { if (log.isDebugEnabled()) { log.debug("[{}] [{}] add message to batch, num messages in batch so far {}", topicName, producerName, numMessagesInBatch); } if (++numMessagesInBatch == 1) { // some properties are common amongst the different messages in the batch, hence we just pick it up from // the first message sequenceId = Commands.initBatchMessageMetadata(messageMetadata, msg.getMessageBuilder()); this.firstCallback = callback; batchedMessageMetadataAndPayload = PooledByteBufAllocator.DEFAULT .buffer(Math.min(maxBatchSize, MAX_MESSAGE_BATCH_SIZE_BYTES), PulsarDecoder.MaxMessageSize); } if (previousCallback != null) { previousCallback.addCallback(msg, callback); } previousCallback = callback; currentBatchSizeBytes += msg.getDataBuffer().readableBytes(); PulsarApi.MessageMetadata.Builder msgBuilder = msg.getMessageBuilder(); batchedMessageMetadataAndPayload = Commands.serializeSingleMessageInBatchWithPayload(msgBuilder, msg.getDataBuffer(), batchedMessageMetadataAndPayload); messages.add(msg); msgBuilder.recycle(); }
static ByteBuf serialize(byte[] value, long revision) { int serializedSize = value.length + Long.BYTES; ByteBuf buffer = PooledByteBufAllocator.DEFAULT.heapBuffer(serializedSize); buffer.writeLong(revision); buffer.writeBytes(value); return buffer; }
@Override protected DataBuffer getDataBuffer(BlockReadRequestContext context, StreamObserver<ReadResponse> response, long offset, int len) throws Exception { openBlock(context, response); BlockReader blockReader = context.getBlockReader(); Preconditions.checkState(blockReader != null); ByteBuf buf = PooledByteBufAllocator.DEFAULT.buffer(len, len); try { while (buf.writableBytes() > 0 && blockReader.transferTo(buf) != -1) { } return new NettyDataBuffer(buf); } catch (Throwable e) { buf.release(); throw e; } }
public static ByteBuf allocate( byte[] data ) { ByteBuf buf = PooledByteBufAllocator.DEFAULT.directBuffer( data.length ); buf.writeBytes( data ); return buf; }
private <U> U withBuffer(final Function<ByteBuf, U> function) { ByteBuf buffer = null; try { buffer = bufferAllocator.directBuffer(maxSerializedEventSize, maxSerializedEventSize); return function.apply(buffer); } finally { if (buffer != null) { buffer.release(); } } }
@VisibleForTesting public static ByteBuf serializeWithSize(BaseCommand.Builder cmdBuilder) { // / Wire format // [TOTAL_SIZE] [CMD_SIZE][CMD] BaseCommand cmd = cmdBuilder.build(); int cmdSize = cmd.getSerializedSize(); int totalSize = cmdSize + 4; int frameSize = totalSize + 4; ByteBuf buf = PooledByteBufAllocator.DEFAULT.buffer(frameSize, frameSize); // Prepend 2 lengths to the buffer buf.writeInt(totalSize); buf.writeInt(cmdSize); ByteBufCodedOutputStream outStream = ByteBufCodedOutputStream.get(buf); try { cmd.writeTo(outStream); } catch (IOException e) { // This is in-memory serialization, should not fail throw new RuntimeException(e); } finally { cmd.recycle(); cmdBuilder.recycle(); outStream.recycle(); } return buf; }
@Override public void onMessage(Delivery delivery) throws ActiveMQAMQPException { ByteBuf buffer = PooledByteBufAllocator.DEFAULT.heapBuffer(1024); try { synchronized (connection.getLock()) { readDelivery(receiver, buffer); MessageImpl clientMessage = decodeMessageImpl(buffer); // This second method could be better // clientMessage.decode(buffer.nioBuffer()); receiver.advance(); delivery.disposition(Accepted.getInstance()); queues.add(clientMessage); } } finally { buffer.release(); } }