private ByteBuf tryDecodeChallenge(ByteBuf in, int offset, int readableBytes) throws IOException { if (readableBytes < 4) { return null; } int len = in.getInt(offset); if (len <= 0) { // fall back to simple in.readerIndex(offset + 4); return in.retainedSlice(offset, 4); } if (len > MAX_CHALLENGE_SIZE) { throw new IOException( "Sasl challenge too large(" + len + "), max allowed is " + MAX_CHALLENGE_SIZE); } int totalLen = 4 + len; if (readableBytes < totalLen) { return null; } in.readerIndex(offset + totalLen); return in.retainedSlice(offset, totalLen); }
in.readerIndex(offset + totalLen); throw new RemoteException(className, msg);
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception { int readableBytes = in.readableBytes(); if (readableBytes < 4) { return; } int offset = in.readerIndex(); int status = in.getInt(offset); if (status == SaslStatus.SUCCESS.state) { ByteBuf challenge = tryDecodeChallenge(in, offset + 4, readableBytes - 4); if (challenge != null) { out.add(challenge); } } else { tryDecodeError(in, offset + 4, readableBytes - 4); } } }
private RPCProtos.RequestHeader getHeader(ByteBuf in, int headerSize) throws IOException { ByteBuf msg = in.readRetainedSlice(headerSize); try { byte[] array; int offset; int length = msg.readableBytes(); if (msg.hasArray()) { array = msg.array(); offset = msg.arrayOffset() + msg.readerIndex(); } else { array = new byte[length]; msg.getBytes(msg.readerIndex(), array, 0, length); offset = 0; } RPCProtos.RequestHeader.Builder builder = RPCProtos.RequestHeader.newBuilder(); ProtobufUtil.mergeFrom(builder, array, offset, length); return builder.build(); } finally { msg.release(); } }
long frameLength = in.getUnsignedInt(in.readerIndex());
private void handleTooBigRequest(ByteBuf in) throws IOException { in.markReaderIndex(); int preIndex = in.readerIndex(); int headerSize = readRawVarint32(in); if (preIndex == in.readerIndex()) { return;
.ensureWritable(trailingPartialChunkLength); if (trailingPartialChunkLength != 0) { buf.readerIndex(dataLen - trailingPartialChunkLength).readBytes(newBuf, trailingPartialChunkLength);
private ByteBuf tryDecodeChallenge(ByteBuf in, int offset, int readableBytes) throws IOException { if (readableBytes < 4) { return null; } int len = in.getInt(offset); if (len <= 0) { // fall back to simple in.readerIndex(offset + 4); return in.retainedSlice(offset, 4); } if (len > MAX_CHALLENGE_SIZE) { throw new IOException( "Sasl challenge too large(" + len + "), max allowed is " + MAX_CHALLENGE_SIZE); } int totalLen = 4 + len; if (readableBytes < totalLen) { return null; } in.readerIndex(offset + totalLen); return in.retainedSlice(offset, totalLen); }
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception { int readableBytes = in.readableBytes(); if (readableBytes < 4) { return; } int offset = in.readerIndex(); int status = in.getInt(offset); if (status == SaslStatus.SUCCESS.state) { ByteBuf challenge = tryDecodeChallenge(in, offset + 4, readableBytes - 4); if (challenge != null) { out.add(challenge); } } else { tryDecodeError(in, offset + 4, readableBytes - 4); } } }
in.readerIndex(offset + totalLen); throw new RemoteException(className, msg);
@Override public final ByteBuf readerIndex(int readerIndex) { buf.readerIndex(readerIndex); return this; }
@Override public int readableBytes() { if (terminated) { return buffer.readableBytes(); } else { return Integer.MAX_VALUE - buffer.readerIndex(); } }
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception { out.add(Base64.decode(msg, msg.readerIndex(), msg.readableBytes(), dialect)); } }
@Override protected void encode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception { out.add(Base64.encode(msg, msg.readerIndex(), msg.readableBytes(), breakLines, dialect)); } }
private static ByteBuf newBinaryData(int statusCode, String reasonText) { if (reasonText == null) { reasonText = StringUtil.EMPTY_STRING; } ByteBuf binaryData = Unpooled.buffer(2 + reasonText.length()); binaryData.writeShort(statusCode); if (!reasonText.isEmpty()) { binaryData.writeCharSequence(reasonText, CharsetUtil.UTF_8); } binaryData.readerIndex(0); return binaryData; }
/** * Appends the prettified multi-line hexadecimal dump of the specified {@link ByteBuf} to the specified * {@link StringBuilder} that is easy to read by humans. */ public static void appendPrettyHexDump(StringBuilder dump, ByteBuf buf) { appendPrettyHexDump(dump, buf, buf.readerIndex(), buf.readableBytes()); }
@Override public byte[] get() { if (byteBuf == null) { return EMPTY_BUFFER.array(); } byte[] array = new byte[byteBuf.readableBytes()]; byteBuf.getBytes(byteBuf.readerIndex(), array); return array; }
static void checkChecksum(ByteBufChecksum checksum, ByteBuf uncompressed, int currentChecksum) { checksum.reset(); checksum.update(uncompressed, uncompressed.readerIndex(), uncompressed.readableBytes()); final int checksumResult = (int) checksum.getValue(); if (checksumResult != currentChecksum) { throw new DecompressionException(String.format( "stream corrupted: mismatching checksum: %d (expected: %d)", checksumResult, currentChecksum)); } }
public ReadOnlyByteBuf(ByteBuf buffer) { super(buffer.maxCapacity()); if (buffer instanceof ReadOnlyByteBuf || buffer instanceof DuplicatedByteBuf) { this.buffer = buffer.unwrap(); } else { this.buffer = buffer; } setIndex(buffer.readerIndex(), buffer.writerIndex()); }
private void encodeRawRecord(DnsRawRecord record, ByteBuf out) throws Exception { encodeRecord0(record, out); ByteBuf content = record.content(); int contentLen = content.readableBytes(); out.writeShort(contentLen); out.writeBytes(content, content.readerIndex(), contentLen); }