protected void init(SecorConfig config) { deserializer = new KafkaAvroDeserializer(schemaRegistryClient); }
public static <T> Serializer<T> getJsonSerializer(final boolean isKey) { final Serializer<T> result = new KafkaJsonSerializer<>(); result.configure(Collections.emptyMap(), isKey); return result; }
public GenericRecord decodeMessage(String topic, byte[] message) { GenericRecord record = (GenericRecord) deserializer.deserialize(topic, message); Schema schema = record.getSchema(); schemas.put(topic, schema); return record; }
final KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow); Assert.assertNotNull(genericRecord); assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
private void initKafka() { schemaRegistryClient = new MockSchemaRegistryClient(); kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); Properties defaultConfig = new Properties(); defaultConfig.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "bogus"); avroSerializer = new KafkaAvroSerializer(schemaRegistryClient); }
private byte[] serializeAvroRecord(final GenericData.Record avroRecord) { final Map<String, String> props = new HashMap<>(); props.put("schema.registry.url", "localhost:9092"); final KafkaAvroSerializer avroSerializer = new KafkaAvroSerializer(schemaRegistryClient, props); return avroSerializer.serialize("topic", avroRecord); }
@Test public void testConfluentJsonDeserializer() throws IOException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L); mockWorkUnitState.setProp("json.value.type", KafkaRecord.class.getName()); KafkaRecord testKafkaRecord = new KafkaRecord("Hello World"); Serializer<KafkaRecord> kafkaEncoder = new KafkaJsonSerializer<>(); kafkaEncoder.configure(PropertiesUtils.propsToStringKeyMap(mockWorkUnitState.getProperties()), false); Deserializer<KafkaRecord> kafkaDecoder = new KafkaJsonDeserializer<>(); kafkaDecoder.configure(PropertiesUtils.propsToStringKeyMap(mockWorkUnitState.getProperties()), false); ByteBuffer testKafkaRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testKafkaRecord)); KafkaSchemaRegistry<?, ?> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_JSON), kafkaDecoder, mockKafkaSchemaRegistry); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testKafkaRecordByteBuffer); Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testKafkaRecord); }
.set("timestamp", 1467176316L) .build(); GenericRecord output = secorSchemaRegistryClient.decodeMessage("test-avr-topic", avroSerializer.serialize("test-avr-topic", record1)); assertEquals(secorSchemaRegistryClient.getSchema("test-avr-topic"), schemaV1); assertEquals(output.get("data_field_1"), 1); assertEquals(output.get("timestamp"), 1467176315L); output = secorSchemaRegistryClient.decodeMessage("test-avr-topic", avroSerializer.serialize("test-avr-topic", record2)); assertEquals(secorSchemaRegistryClient.getSchema("test-avr-topic"), schemaV2); assertEquals(output.get("data_field_1"), 1);
public static <T> Deserializer<T> getJsonDeserializer( final Class<T> classs, final boolean isKey) { final Deserializer<T> result = new KafkaJsonDeserializer<>(); final String typeConfigProperty = isKey ? KafkaJsonDeserializerConfig.JSON_KEY_TYPE : KafkaJsonDeserializerConfig.JSON_VALUE_TYPE; final Map<String, ?> props = Collections.singletonMap( typeConfigProperty, classs ); result.configure(props, isKey); return result; }
final KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow); Assert.assertNotNull(genericRecord); assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
@Test public void testConfluentAvroDeserializer() throws IOException, RestClientException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L,10L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); Schema schema = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .endRecord(); GenericRecord testGenericRecord = new GenericRecordBuilder(schema).set(TEST_FIELD_NAME, "testValue").build(); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schema); Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord)); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer); Assert.assertEquals(kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset), testGenericRecord); }
private GenericRow serializeDeserializeAvroRecord(final Schema schema, final String topicName, final SchemaRegistryClient schemaRegistryClient, final GenericRecord avroRecord) { final Map<String, Object> map = new HashMap<>(); map.put(AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true); map.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, ""); final KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(schemaRegistryClient, map); final byte[] bytes = kafkaAvroSerializer.serialize(topicName, avroRecord); final Deserializer<GenericRow> deserializer = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( schema, ksqlConfig, false, () -> schemaRegistryClient, "loggerName").deserializer(); return deserializer.deserialize(topicName, bytes); }
public RecordFormatter(final SchemaRegistryClient schemaRegistryClient, final String topicName) { this.topicName = Objects.requireNonNull(topicName, "topicName"); this.avroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); }
@Override public Optional<Formatter> maybeGetFormatter( final String topicName, final ConsumerRecord<String, Bytes> record, final KafkaAvroDeserializer avroDeserializer, final DateFormat dateFormat) { try { avroDeserializer.deserialize(topicName, record.value().get()); return Optional.of(createFormatter(topicName, avroDeserializer, dateFormat)); } catch (final Throwable t) { return Optional.empty(); } }
@Test public void shouldTransformSourceNameDelimiterForInternal() { final Schema ksqlRecordSchema = SchemaBuilder.struct() .field("source.field0", Schema.OPTIONAL_INT32_SCHEMA) .build(); final GenericRow ksqlRecord = new GenericRow(ImmutableList.of(123)); final Serde<GenericRow> serde = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( ksqlRecordSchema, new KsqlConfig(Collections.emptyMap()), true, () -> schemaRegistryClient, "loggerName" ); final byte[] bytes = serde.serializer().serialize("topic", ksqlRecord); final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes); assertThat(avroRecord.getSchema().getFields().size(), equalTo(1)); assertThat(avroRecord.getSchema().getFields().get(0).name(), equalTo("source_field0")); assertThat(avroRecord.get("source_field0"), equalTo(123)); final GenericRow deserializedKsqlRecord = serde.deserializer().deserialize("topic", bytes); assertThat(deserializedKsqlRecord, equalTo(ksqlRecord)); }
@Test public void testConfluentAvroDeserializerForSchemaEvolution() throws IOException, RestClientException, SchemaRegistryException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 10L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); Schema schemaV1 = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .endRecord(); Schema schemaV2 = SchemaBuilder.record(TEST_RECORD_NAME) .namespace(TEST_NAMESPACE).fields() .name(TEST_FIELD_NAME).type().stringType().noDefault() .optionalString(TEST_FIELD_NAME2).endRecord(); GenericRecord testGenericRecord = new GenericRecordBuilder(schemaV1).set(TEST_FIELD_NAME, "testValue").build(); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); when(mockSchemaRegistryClient.getByID(any(Integer.class))).thenReturn(schemaV1); Serializer<Object> kafkaEncoder = new KafkaAvroSerializer(mockSchemaRegistryClient); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); ByteBuffer testGenericRecordByteBuffer = ByteBuffer.wrap(kafkaEncoder.serialize(TEST_TOPIC_NAME, testGenericRecord)); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); when(mockKafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)).thenReturn(schemaV2); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); when(kafkaDecoderExtractor.getSchema()).thenReturn(schemaV2); ByteArrayBasedKafkaRecord mockMessageAndOffset = getMockMessageAndOffset(testGenericRecordByteBuffer); GenericRecord received = (GenericRecord) kafkaDecoderExtractor.decodeRecord(mockMessageAndOffset); Assert.assertEquals(received.toString(), "{\"testField\": \"testValue\", \"testField2\": null}"); }
@Test public void testConfluentShouldNotQuerySchemaRegistryWhenTheGapIsZero() throws IOException, RestClientException, SchemaRegistryException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 0L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); verify(mockKafkaSchemaRegistry, never()).getLatestSchemaByTopic(any()); kafkaDecoderExtractor.getSchema(); }
@Test public void shouldRemoveSourceName() { final Schema ksqlRecordSchema = SchemaBuilder.struct() .field("source.field0", Schema.OPTIONAL_INT32_SCHEMA) .build(); final GenericRow ksqlRecord = new GenericRow(ImmutableList.of(123)); final Serde<GenericRow> serde = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( ksqlRecordSchema, new KsqlConfig(Collections.emptyMap()), false, () -> schemaRegistryClient, "loggerName" ); final byte[] bytes = serde.serializer().serialize("topic", ksqlRecord); final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes); assertThat(avroRecord.getSchema().getFields().size(), equalTo(1)); assertThat(avroRecord.get("field0"), equalTo(123)); final GenericRow deserializedKsqlRecord = serde.deserializer().deserialize("topic", bytes); assertThat(deserializedKsqlRecord, equalTo(ksqlRecord)); }
final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes);
final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes);