public ByteBuffer readPayload() throws IOException { int length = unpacker.unpackBinaryHeader(); ByteBuffer buffer = ByteBuffer.allocate(length); unpacker.readPayload(buffer); return buffer; } }
public byte[] parsePayload() throws IOException { int length = unpacker.unpackBinaryHeader(); return unpacker.readPayload(length); }
@Override public KeyValue next() throws IOException { if (mReader.next(mKey, mValue)) { MessageUnpacker unpacker = MessagePack.newDefaultUnpacker(mKey.getBytes()); int mapSize = unpacker.unpackMapHeader(); long offset = 0; long timestamp = -1; byte[] keyBytes = EMPTY_BYTES; for (int i = 0; i < mapSize; i++) { int key = unpacker.unpackInt(); switch (key) { case KAFKA_MESSAGE_OFFSET: offset = unpacker.unpackLong(); break; case KAFKA_MESSAGE_TIMESTAMP: timestamp = unpacker.unpackLong(); break; case KAFKA_HASH_KEY: int keySize = unpacker.unpackBinaryHeader(); keyBytes = new byte[keySize]; unpacker.readPayload(keyBytes); break; } } unpacker.close(); return new KeyValue(offset, keyBytes, Arrays.copyOfRange(mValue.getBytes(), 0, mValue.getLength()), timestamp); } else { return null; } }
length = unpacker.unpackBinaryHeader(); dst = new byte[length]; unpacker.readPayload(dst);
length = unpacker.unpackBinaryHeader(); dst = new byte[length]; unpacker.readPayload(dst);
case BINARY: type = Type.BYTES; int len = messageUnpacker.unpackBinaryHeader(); bytesValue = messageUnpacker.readPayload(len); if (parsingContext.inObject() && _currToken != JsonToken.FIELD_NAME) {
byte[] payload = messageUnpacker.readPayload(messageUnpacker.unpackBinaryHeader()); messageUnpacker = MessagePack.newDefaultUnpacker(payload); while (messageUnpacker.hasNext()) {
int length = unpackBinaryHeader(); var.setBinaryValue(readPayload(length)); return var;
switch (format.getValueType()) { case BINARY: int len = unpacker.unpackBinaryHeader(); value.setData(unpacker.readPayload(len)); break;
int length = unpackBinaryHeader(); return ValueFactory.newBinary(readPayload(length), true);