public SchemaMetadataIdProtocolHandler() { super(SerDesProtocolHandlerRegistry.METADATA_ID_VERSION_PROTOCOL, new DefaultAvroSerDesHandler()); }
private DatumReader getDatumReader(Schema writerSchema, Schema readerSchema, boolean useSpecificAvroReader) { if (useSpecificAvroReader) { if (readerSchema == null) { readerSchema = this.getReaderSchema(writerSchema); } return new SpecificDatumReader(writerSchema, readerSchema); } else { return readerSchema == null ? new GenericDatumReader(writerSchema) : new GenericDatumReader(writerSchema, readerSchema); } }
@Override public Object handlePayloadDeserialization(InputStream payloadInputStream, Schema writerSchema, Schema readerSchema, boolean useSpecificAvroReader) { Object deserializedObj; Schema.Type writerSchemaType = writerSchema.getType(); try { if (Schema.Type.BYTES.equals(writerSchemaType)) { // serializer writes byte array directly without going through avro encoder layers. deserializedObj = IOUtils.toByteArray(payloadInputStream); } else if (Schema.Type.STRING.equals(writerSchemaType)) { // generate UTF-8 string object from the received bytes. deserializedObj = new String(IOUtils.toByteArray(payloadInputStream), AvroUtils.UTF_8); } else { DatumReader datumReader = getDatumReader(writerSchema, readerSchema, useSpecificAvroReader); deserializedObj = datumReader.read(null, DecoderFactory.get().binaryDecoder(payloadInputStream, null)); } } catch (IOException e) { throw new AvroRetryableException(e); } catch (Exception e) { throw new AvroException(e); } return deserializedObj; }
public SchemaVersionIdAsLongProtocolHandler() { super(SerDesProtocolHandlerRegistry.VERSION_ID_AS_LONG_PROTOCOL, new DefaultAvroSerDesHandler()); }
public SchemaVersionIdAsIntProtocolHandler() { super(SerDesProtocolHandlerRegistry.VERSION_ID_AS_INT_PROTOCOL, new DefaultAvroSerDesHandler()); delegate = new SchemaVersionIdAsLongProtocolHandler(); }
private void testSchemaHeaderNames(String customKeySchemaHeaderName, String customValueSchemaHeaderName) { TestRecord record = new TestRecord(); record.setField1("Hello"); record.setField2("World"); Map<String, Object> configs = new HashMap<>(); configs.put(KafkaAvroSerde.KEY_SCHEMA_VERSION_ID_HEADER_NAME, customKeySchemaHeaderName); configs.put(KafkaAvroSerde.VALUE_SCHEMA_VERSION_ID_HEADER_NAME, customValueSchemaHeaderName); configs.put(KafkaAvroSerializer.STORE_SCHEMA_VERSION_ID_IN_HEADER, "true"); configs.put(AbstractAvroSnapshotDeserializer.SPECIFIC_AVRO_READER, true); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); AvroSerDesHandler handler = new DefaultAvroSerDesHandler(); handler.handlePayloadSerialization(outputStream, record); for (Boolean isKey : Arrays.asList(true, false)) { KafkaAvroSerde serde = new KafkaAvroSerde(schemaRegistryClient); final ExtendedSerializer<Object> serializer = serde.extendedSerializer(); serializer.configure(configs, isKey); Headers headers = new RecordHeaders(); final byte[] bytes = serializer.serialize(topic, headers, record); Assert.assertArrayEquals(outputStream.toByteArray(), bytes); Assert.assertEquals(isKey, headers.lastHeader(customKeySchemaHeaderName) != null); Assert.assertEquals(!isKey, headers.lastHeader(customValueSchemaHeaderName) != null); final ExtendedDeserializer<Object> deserializer = serde.extendedDeserializer(); deserializer.configure(configs, isKey); final TestRecord actual = (TestRecord) deserializer.deserialize(topic, headers, bytes); Assert.assertEquals(record, actual); } }