protected void init(SecorConfig config) { deserializer = new KafkaAvroDeserializer(schemaRegistryClient); }
public GenericRecord decodeMessage(String topic, byte[] message) { GenericRecord record = (GenericRecord) deserializer.deserialize(topic, message); Schema schema = record.getSchema(); schemas.put(topic, schema); return record; }
final KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow); Assert.assertNotNull(genericRecord); assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
@Test public void testConfluentSerDes() throws Exception { org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(GENERIC_TEST_RECORD_SCHEMA); GenericRecord record = new GenericRecordBuilder(schema).set("field1", "some value").set("field2", "some other value").build(); Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, rootTarget.getUri().toString()); KafkaAvroSerializer kafkaAvroSerializer = new KafkaAvroSerializer(); kafkaAvroSerializer.configure(config, false); byte[] bytes = kafkaAvroSerializer.serialize("topic", record); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(); kafkaAvroDeserializer.configure(config, false); GenericRecord result = (GenericRecord) kafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(result.toString()); }
private KafkaAvroDeserializer getDeserializer() { Map<String, Object> config = new HashMap<>(); config.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl); KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(); kafkaAvroDeserializer.configure(config, false); return kafkaAvroDeserializer; }
@Override public void configure(Map<String, ?> configs, boolean isKey) { inner.configure(configs, isKey); }
@Override public void close() { inner.close(); } }
final KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow); Assert.assertNotNull(genericRecord); assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo
io.confluent.kafka.serializers.KafkaAvroDeserializer confluentKafkaAvroDeserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer(); confluentKafkaAvroDeserializer.configure(confluentConfig, false); GenericRecord confluentResult = (GenericRecord) confluentKafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(confluentResult.toString());
@Override @SuppressWarnings("unchecked") public void configure(Map<String, ?> configs, boolean isKey) { Map<String, Object> effectiveConfigs = new HashMap<>(configs); effectiveConfigs.put(SPECIFIC_AVRO_READER_CONFIG, true); inner.configure(effectiveConfigs, isKey); }
@Override public void close() { inner.close(); } }
@Test public void shouldTransformSourceNameDelimiterForInternal() { final Schema ksqlRecordSchema = SchemaBuilder.struct() .field("source.field0", Schema.OPTIONAL_INT32_SCHEMA) .build(); final GenericRow ksqlRecord = new GenericRow(ImmutableList.of(123)); final Serde<GenericRow> serde = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( ksqlRecordSchema, new KsqlConfig(Collections.emptyMap()), true, () -> schemaRegistryClient, "loggerName" ); final byte[] bytes = serde.serializer().serialize("topic", ksqlRecord); final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes); assertThat(avroRecord.getSchema().getFields().size(), equalTo(1)); assertThat(avroRecord.getSchema().getFields().get(0).name(), equalTo("source_field0")); assertThat(avroRecord.get("source_field0"), equalTo(123)); final GenericRow deserializedKsqlRecord = serde.deserializer().deserialize("topic", bytes); assertThat(deserializedKsqlRecord, equalTo(ksqlRecord)); }
byte[] bytes = kafkaAvroSerializer.serialize("topic", record); io.confluent.kafka.serializers.KafkaAvroDeserializer confluentKafkaAvroDeserializer = new io.confluent.kafka.serializers.KafkaAvroDeserializer(); confluentKafkaAvroDeserializer.configure(confluentConfig, false); GenericRecord confluentResult = (GenericRecord) confluentKafkaAvroDeserializer.deserialize("topic", bytes); LOG.info(confluentResult.toString());
public RecordFormatter(final SchemaRegistryClient schemaRegistryClient, final String topicName) { this.topicName = Objects.requireNonNull(topicName, "topicName"); this.avroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); }
@Override public Optional<Formatter> maybeGetFormatter( final String topicName, final ConsumerRecord<String, Bytes> record, final KafkaAvroDeserializer avroDeserializer, final DateFormat dateFormat) { try { avroDeserializer.deserialize(topicName, record.value().get()); return Optional.of(createFormatter(topicName, avroDeserializer, dateFormat)); } catch (final Throwable t) { return Optional.empty(); } }
@Test public void shouldRemoveSourceName() { final Schema ksqlRecordSchema = SchemaBuilder.struct() .field("source.field0", Schema.OPTIONAL_INT32_SCHEMA) .build(); final GenericRow ksqlRecord = new GenericRow(ImmutableList.of(123)); final Serde<GenericRow> serde = new KsqlAvroTopicSerDe(KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME).getGenericRowSerde( ksqlRecordSchema, new KsqlConfig(Collections.emptyMap()), false, () -> schemaRegistryClient, "loggerName" ); final byte[] bytes = serde.serializer().serialize("topic", ksqlRecord); final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes); assertThat(avroRecord.getSchema().getFields().size(), equalTo(1)); assertThat(avroRecord.get("field0"), equalTo(123)); final GenericRow deserializedKsqlRecord = serde.deserializer().deserialize("topic", bytes); assertThat(deserializedKsqlRecord, equalTo(ksqlRecord)); }
private void initKafka() { schemaRegistryClient = new MockSchemaRegistryClient(); kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); Properties defaultConfig = new Properties(); defaultConfig.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "bogus"); avroSerializer = new KafkaAvroSerializer(schemaRegistryClient); }
@SuppressWarnings("unchecked") @Override public T deserialize(String s, byte[] bytes) { return (T) inner.deserialize(s, bytes); }
final KafkaAvroDeserializer deserializer = new KafkaAvroDeserializer(schemaRegistryClient); final GenericRecord avroRecord = (GenericRecord) deserializer.deserialize("topic", bytes);
@Test public void testConfluentShouldNotQuerySchemaRegistryWhenTheGapIsZero() throws IOException, RestClientException, SchemaRegistryException { WorkUnitState mockWorkUnitState = getMockWorkUnitState(0L, 0L); mockWorkUnitState.setProp("schema.registry.url", TEST_URL); SchemaRegistryClient mockSchemaRegistryClient = mock(SchemaRegistryClient.class); Deserializer<Object> kafkaDecoder = new KafkaAvroDeserializer(mockSchemaRegistryClient); KafkaSchemaRegistry<Integer, Schema> mockKafkaSchemaRegistry = mock(KafkaSchemaRegistry.class); KafkaDeserializerExtractor kafkaDecoderExtractor = new KafkaDeserializerExtractor(mockWorkUnitState, Optional.fromNullable(Deserializers.CONFLUENT_AVRO), kafkaDecoder, mockKafkaSchemaRegistry); verify(mockKafkaSchemaRegistry, never()).getLatestSchemaByTopic(any()); kafkaDecoderExtractor.getSchema(); }