private void initKafka() { schemaRegistryClient = new MockSchemaRegistryClient(); kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient); Properties defaultConfig = new Properties(); defaultConfig.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "bogus"); avroSerializer = new KafkaAvroSerializer(schemaRegistryClient); }
@Test @SuppressWarnings("unchecked") public void shouldDeserializeCorrectly() { final SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); final List<Object> columns = Arrays.asList( 1511897796092L, 1L, "item_1", 10.0, Collections.singletonList(100.0), Collections.singletonMap("key1", 100.0)); final GenericRow genericRow = new GenericRow(columns); final GenericRow row = serializeDeserializeRow( schema, "t1", schemaRegistryClient, avroSchema, genericRow); Assert.assertNotNull(row); assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6)); assertThat("Incorrect deserializarion", row.getColumns().get(0), equalTo(1511897796092L)); assertThat("Incorrect deserializarion", row.getColumns().get(1), equalTo (1L)); assertThat("Incorrect deserializarion", row.getColumns().get(2), equalTo ( "item_1")); assertThat("Incorrect deserializarion", row.getColumns().get(3), equalTo ( 10.0)); assertThat("Incorrect deserializarion", ((List<Double>)row.getColumns().get(4)).size(), equalTo (1)); assertThat("Incorrect deserializarion", ((Map)row.getColumns().get(5)).size(), equalTo (1)); }
@Test public void shouldDeserializeWithMissingFields() { final String schemaStr1 = "{" + "\"namespace\": \"kql\"," + " \"name\": \"orders\"," + " \"type\": \"record\"," + " \"fields\": [" + " {\"name\": \"orderTime\", \"type\": \"long\"}," + " {\"name\": \"orderId\", \"type\": \"long\"}," + " {\"name\": \"itemId\", \"type\": \"string\"}," + " {\"name\": \"orderUnits\", \"type\": \"double\"}" + " ]" + "}"; final org.apache.avro.Schema.Parser parser = new org.apache.avro.Schema.Parser(); final org.apache.avro.Schema avroSchema1 = parser.parse(schemaStr1); final SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); final List<Object> columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0); final GenericRow genericRow = new GenericRow(columns); final GenericRow row = serializeDeserializeRow( schema, "t1", schemaRegistryClient, avroSchema1, genericRow); assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6)); assertThat("Incorrect deserializarion", row.getColumns().get(0), equalTo(1511897796092L)); assertThat("Incorrect deserializarion", row.getColumns().get(1), equalTo (1L)); assertThat("Incorrect deserializarion", row.getColumns().get(2), equalTo ( "item_1")); Assert.assertNull(row.getColumns().get(4)); Assert.assertNull(row.getColumns().get(5)); }
@Test public void shouldDeserializeIfThereAreRedundantFields() { final Schema newSchema = SchemaBuilder.struct() .field("ordertime".toUpperCase(), Schema.OPTIONAL_INT64_SCHEMA) .field("orderid".toUpperCase(), Schema.OPTIONAL_INT64_SCHEMA) .field("itemid".toUpperCase(), Schema.OPTIONAL_STRING_SCHEMA) .field("orderunits".toUpperCase(), Schema.OPTIONAL_FLOAT64_SCHEMA) .build(); final SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); final List<Object> columns = Arrays.asList( 1511897796092L, 1L, "item_1", 10.0, Collections.emptyList(), Collections.emptyMap()); final GenericRow genericRow = new GenericRow(columns); final GenericRow row = serializeDeserializeRow( newSchema, "t1", schemaRegistryClient, avroSchema, genericRow); Assert.assertNotNull(row); assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(4)); assertThat("Incorrect deserializarion", row.getColumns().get(0), equalTo(1511897796092L)); assertThat("Incorrect deserializarion", row.getColumns().get(1), equalTo (1L)); assertThat("Incorrect deserializarion", row.getColumns().get(2), equalTo ( "item_1")); }
@Test public void testRegisterAndGetByKey() throws SchemaRegistryException { Properties properties = new Properties(); properties.setProperty(KafkaSchemaRegistry.KAFKA_SCHEMA_REGISTRY_URL, TEST_URL); SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry = new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient); Schema schema = SchemaBuilder.record(TEST_RECORD_NAME).namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type() .stringType().noDefault().endRecord(); Integer id = kafkaSchemaRegistry.register(schema); Assert.assertEquals(schema, kafkaSchemaRegistry.getSchemaByKey(id)); }
private void shouldDeserializeTypeCorrectly(final org.apache.avro.Schema avroSchema, final Object avroValue, final Schema ksqlSchema, final Object ksqlValue) { final SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); final org.apache.avro.Schema avroRecordSchema = org.apache.avro.SchemaBuilder.record("test_row") .fields() .name("field0") .type(avroSchema) .noDefault() .endRecord(); final Schema ksqlRecordSchema = SchemaBuilder.struct().field("field0", ksqlSchema).build(); final GenericRecord avroRecord = new GenericData.Record(avroRecordSchema); avroRecord.put("field0", avroValue); final GenericRow row = serializeDeserializeAvroRecord( ksqlRecordSchema, "test-topic", schemaRegistryClient, avroRecord); assertThat(row.getColumns().size(), equalTo(1)); assertThat(row.getColumns().get(0), equalTo(ksqlValue)); }
final Schema ksqlSchema, final Object ksqlValue) { final SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
private void doTestRegisterAndGetLatest(Properties properties) throws SchemaRegistryException { SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient(); KafkaSchemaRegistry<Integer, Schema> kafkaSchemaRegistry = new ConfluentKafkaSchemaRegistry(properties, schemaRegistryClient); Schema schema1 = SchemaBuilder.record(TEST_RECORD_NAME + "1").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type() .stringType().noDefault().endRecord(); Schema schema2 = SchemaBuilder.record(TEST_RECORD_NAME + "2").namespace(TEST_NAMESPACE).fields().name(TEST_FIELD_NAME).type() .stringType().noDefault().endRecord(); kafkaSchemaRegistry.register(schema1, TEST_TOPIC_NAME); kafkaSchemaRegistry.register(schema2, TEST_TOPIC_NAME); Assert.assertNotEquals(schema1, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)); Assert.assertEquals(schema2, kafkaSchemaRegistry.getLatestSchemaByTopic(TEST_TOPIC_NAME)); } }