private void initKafka() { schemaRegistryClient = new MockSchemaRegistryClient(); kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); Properties defaultConfig = new Properties(); defaultConfig.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "bogus"); avroSerializer = new KafkaAvroSerializer(schemaRegistryClient); }
private byte[] serializeAvroRecord(final GenericData.Record avroRecord) { final Map<String, String> props = new HashMap<>(); props.put("schema.registry.url", "localhost:9092"); final KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer(schemaRegistryClient, props); return avroSerializer.serialize("topic", avroRecord); }
@Test public void testConfluentSerDes() throws Exception { org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA); GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build(); Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString()); KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(); kafkaAvroSerializer.configure(config, false); byte[] bytes = kafkaAvroSerializer.serialize("topic", record); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(); kafkaAvroDeserializer.configure(config, false); GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(result.toString()); }
.set("timestamp", 1467176316L) .build(); GenericRecord output = secorSchemaRegistryClient.decodeMessage("test-avr-topic", avroSerializer.serialize("test-avr-topic", record1)); assertEquals(secorSchemaRegistryClient.getSchema("test-avr-topic"), schemaV1); assertEquals(output.get("data_field_1"), 1); assertEquals(output.get("timestamp"), 1467176315L); output = secorSchemaRegistryClient.decodeMessage("test-avr-topic", avroSerializer.serialize("test-avr-topic", record2)); assertEquals(secorSchemaRegistryClient.getSchema("test-avr-topic"), schemaV2); assertEquals(output.get("data_field_1"), 1);
@Override public void configure(Map<String, ?> configs, boolean isKey) { inner.configure(configs, isKey); }
@Override public void close() { inner.close(); } }
io.confluent.kafka.serializers.KafkaAvroSerializer kafkaAvroSerializer = new io.confluent.kafka.serializers.KafkaAvroSerializer(); kafkaAvroSerializer.configure(confluentConfig, false); byte[] bytes = kafkaAvroSerializer.serialize("topic", record);
@Override public byte[] serialize(String topic, T record) { return inner.serialize(topic, record); }
@Override @SuppressWarnings("unchecked") public void configure(Map<String, ?> configs, boolean isKey) { Map<String, Object> effectiveConfigs = new HashMap<>(configs); effectiveConfigs.put(SPECIFIC_AVRO_READER_CONFIG, true); inner.configure(effectiveConfigs, isKey); }
@Override public void close() { inner.close(); } }
@Test public void testConfluentAvroDeserializer() throws IOException, RestClientException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L,10L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); Schema schema = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .endRecord(); GenericRecord testGenericRecord = new GenericRecordBuilder(schema).set(TEST_FIELD_NAME, "testValue").build(); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schema); Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord)); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer); Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testGenericRecord); }
private GenericRow serializeDeserializeAvroRecord(final Schema schema, final String topicName, final SchemaRegistryClient schemaRegistryClient, final GenericRecord avroRecord) { final Map<String, Object> map = new HashMap<>(); map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true); map.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, ""); final KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient, map); final byte[] bytes = kafkaAvroSerializer.serialize(topicName, avroRecord); final Deserializer<GenericRow> deserializer = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( schema, ksqlConfig, false, () -> schemaRegistryClient, "loggerName").deserializer(); return deserializer.deserialize(topicName, bytes); }
@Override public byte[] serialize(String topic, GenericRecord record) { return inner.serialize(topic, record); }
public AvroMessageDecoderTest() { avroSerializer.configure(Collections.singletonMap( KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081" ), false); Properties props = new Properties(); props.setProperty("schema.registry.url", "http://localhost:8081"); avroEncoder = new KafkaAvroEncoder(schemaRegistry, new VerifiableProperties(props)); }
@Test public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .endRecord(); Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .optionalString(TEST_FIELD_NAME2).endRecord(); GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build(); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1); Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord)); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer); GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset); Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}"); }
@Test public void testAvroDecoderFailure() { String topic = "testAvro"; Object avroRecordV1 = createAvroRecordVersion1(); byte[] payloadV1 = avroSerializer.serialize(topic, avroRecordV1); AvroMessageDecoder decoder = createAvroDecoder(topic, true, schemaRegistry); decoder.decode(payloadV1); Object avroRecordV2 = createAvroRecordVersion2(); byte[] payloadV2 = avroSerializer.serialize(topic, avroRecordV2); try { decoder.decode(payloadV2); fail("AvroMessageDecoder should not be able to decode Avro record with new schema version"); } catch (MessageDecoderException e) { assertEquals(e.getMessage(), "Producer schema is newer than the schema known to Camus"); } } }
/** * Constructor used by Kafka Streams. */ public SpecificAvroSerializer() { inner = new KafkaAvroSerializer(); }
@Test public void testAvroDecoderCompatible() { String topic = "testAvro"; Object avroRecordV1 = createAvroRecordVersion1(); byte[] payloadV1 = avroSerializer.serialize(topic, avroRecordV1); Object avroRecordV2 = createAvroRecordVersion2(); byte[] payloadV2 = avroSerializer.serialize(topic, avroRecordV2); AvroMessageDecoder decoder = createAvroDecoder(topic, true, schemaRegistry); try { decoder.decode(payloadV1).getRecord(); } catch (MessageDecoderException e) { fail("Backward compatible schema should be able to decode Avro records with old schema"); } Object recordV2 = decoder.decode(payloadV2).getRecord(); assertEquals(avroRecordV2, recordV2); }
/** * Constructor used by Kafka Streams. */ public GenericAvroSerializer() { inner = new KafkaAvroSerializer(); }
@Test public void testAvroDecoder() { String topic = "testAvro"; Object avroRecord = createAvroRecordVersion1(); byte[] payload = avroSerializer.serialize(topic, avroRecord); AvroMessageDecoder decoder = createAvroDecoder(topic, true, schemaRegistry); Object record = decoder.decode(payload).getRecord(); assertEquals(avroRecord, record); payload = avroEncoder.toBytes(avroRecord); AvroMessageDecoder decoder2 = createAvroDecoder(topic, false, schemaRegistry); record = decoder2.decode(payload).getRecord(); assertEquals(avroRecord, record); }