private static AvroConverter getAvroConverter( final SchemaRegistryClient schemaRegistryClient, final KsqlConfig ksqlConfig) { final AvroConverter avroConverter = new AvroConverter(schemaRegistryClient); avroConverter.configure( ImmutableMap.of( AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, ksqlConfig.getString(KsqlConfig.SCHEMA_REGISTRY_URL_PROPERTY), AvroDataConfig.CONNECT_META_DATA_CONFIG, false ), false); return avroConverter; }
/** * When we want to consume SinkRecord which generated by debezium-connector-mysql, it should not * throw error "org.apache.avro.SchemaParseException: Illegal character in: server-id" */ @Test public void shouldValidateSourceInfoSchema() { org.apache.kafka.connect.data.Schema kafkaSchema = SourceInfo.SCHEMA; Schema avroSchema = avroData.fromConnectSchema(kafkaSchema); assertTrue(avroSchema != null); }
if (value instanceof Record) { final Record record = (Record) value; final Object ksqlValue = avroData.toConnectData(record.getSchema(), record).value(); genericRowValues.add( SchemaUtil.getOptionalValue(ksqlSchema.field(field.name()).schema(), ksqlValue)); final String keyString = avroData.toConnectData( randomAvroMessage.getSchema().getField(key).schema(), randomAvroMessage.get(key)).value().toString();
@Before public void setup() throws Exception { record = getFile("complex_objects.json"); val = BsonDocument.parse(record); builder = SchemaBuilder.struct().name("complex"); avroData = new AvroData(100); converter = new MongoDataConverter(ArrayEncoding.ARRAY); }
final AvroData avroData = new AvroData(1); final org.apache.kafka.connect.data.Schema ksqlSchema = SchemaUtil.getOptionalSchema(avroData.toConnectSchema(avroSchema));
connectRecord.put("field0", connectValue); final AvroConverter converter = new AvroConverter(schemaRegistryClient); converter.configure( ImmutableMap.of( AbstractKafkaAvroSerDeConfig.AUTO_REGISTER_SCHEMAS, true, ); final byte[] bytes = converter.fromConnectData("topic", connectRecordSchema, connectRecord);
@Test public void shouldCreateStructWithNestedObject() { for (Entry<String, BsonValue> entry : val.entrySet()) { converter.addFieldSchema(entry, builder); } Schema finalSchema = builder.build(); Struct struct = new Struct(finalSchema); for (Entry<String, BsonValue> entry : val.entrySet()) { converter.convertRecord(entry, finalSchema, struct); } final GenericData.Record avro = (GenericData.Record)avroData.fromConnectData(finalSchema, struct); assertThat(avro.toString()).isEqualTo( "{\"_id\": 1, " + "\"s1\": {\"s1f1\": \"field1s1\", \"s1f2\": \"field2s1\"}, " + "\"s2\": {\"s2f1\": \"field1s2\", \"s2f2\": {\"in1\": 1}}}"); }
byte[] avroKeyBytes = avroValueConverter.fromConnectData(record.topic(), record.keySchema(), record.key()); msg = "deserializing key using Avro converter"; avroKeyWithSchema = avroValueConverter.toConnectData(record.topic(), avroKeyBytes); msg = "comparing key schema to that serialized/deserialized with Avro converter"; assertEquals(keyWithSchema.schema(), record.keySchema()); byte[] avroValueBytes = avroValueConverter.fromConnectData(record.topic(), record.valueSchema(), record.value()); msg = "deserializing value using Avro converter"; avroValueWithSchema = avroValueConverter.toConnectData(record.topic(), avroValueBytes); msg = "comparing value schema to that serialized/deserialized with Avro converter"; assertEquals(valueWithSchema.schema(), record.valueSchema());
@Override public Schema getSchema(Configuration conf, Path path) throws IOException { SeekableInput input = new FsInput(path, conf); DatumReader<Object> reader = new GenericDatumReader<>(); FileReader<Object> fileReader = DataFileReader.openReader(input, reader); org.apache.avro.Schema schema = fileReader.getSchema(); fileReader.close(); return avroData.toConnectSchema(schema); }
generator, new AvroData(1), generator.schema(), ordersSchema, new SessionManager(), "orderid");
@Test @FixFor("DBZ-650") public void shouldCreateSchemaWithNestedObject() { for (Entry<String, BsonValue> entry : val.entrySet()) { converter.addFieldSchema(entry, builder); } Schema finalSchema = builder.build(); final org.apache.avro.Schema avroSchema = avroData.fromConnectSchema(finalSchema); assertThat(avroSchema.toString()).isEqualTo( "{\"type\":\"record\",\"name\":\"complex\",\"fields\":[" + "{\"name\":\"_id\",\"type\":[\"null\",\"int\"],\"default\":null}," + "{\"name\":\"s1\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"s1\",\"namespace\":\"complex\",\"fields\":[" + "{\"name\":\"s1f1\",\"type\":[\"null\",\"string\"],\"default\":null}," + "{\"name\":\"s1f2\",\"type\":[\"null\",\"string\"],\"default\":null}]," + "\"connect.name\":\"complex.s1\"}],\"default\":null}," + "{\"name\":\"s2\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"s2\",\"namespace\":\"complex\",\"fields\":[" + "{\"name\":\"s2f1\",\"type\":[\"null\",\"string\"],\"default\":null}," + "{\"name\":\"s2f2\",\"type\":[\"null\",{\"type\":\"record\",\"name\":\"s2f2\",\"namespace\":\"complex.s2\",\"fields\":[" + "{\"name\":\"in1\",\"type\":[\"null\",\"int\"],\"default\":null}]," + "\"connect.name\":\"complex.s2.s2f2\"}],\"default\":null}]," + "\"connect.name\":\"complex.s2\"}],\"default\":null}]," + "\"connect.name\":\"complex\"}"); }
@Override public void write(SinkRecord record) throws IOException { log.trace("Sink record: {}", record.toString()); Object value = avroData.fromConnectData(schema, record.value()); writer.append(value); }
@Override public Struct apply(GenericRecord record) { return (Struct) avroData.toConnectData(record.getSchema(), record).value(); } }
public GenericRecordToStruct() { this.avroData = new AvroData(CACHE_SIZE); }
@Override public void alterSchema(String database, String tableName, Schema schema) throws HiveMetaStoreException { Table table = hiveMetaStore.getTable(database, tableName); table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString()); hiveMetaStore.alterTable(table); }
@Override public void write(SinkRecord record) throws IOException { Object value = avroData.fromConnectData(record.valueSchema(), record.value()); writer.write((GenericRecord) value); }
@Override public Struct apply(GenericRecord record) { return (Struct) avroData.toConnectData(record.getSchema(), record).value(); } }
public GenericRecordToStruct() { this.avroData = new AvroData(CACHE_SIZE); }
/** * When we want to consume SinkRecord which generated by debezium-connector-mysql, it should not * throw error "org.apache.avro.SchemaParseException: Illegal character in: server-id" */ @Test public void shouldValidateSourceInfoSchema() { org.apache.kafka.connect.data.Schema kafkaSchema = SourceInfo.SCHEMA; Schema avroSchema = avroData.fromConnectSchema(kafkaSchema); assertTrue(avroSchema != null); }
@Override public void configure(Map<String, String> properties) { SchemaRegistrySchemaRetrieverConfig config = new SchemaRegistrySchemaRetrieverConfig(properties); schemaRegistryClient = new CachedSchemaRegistryClient(config.getString(config.LOCATION_CONFIG), 0); avroData = new AvroData(config.getInt(config.AVRO_DATA_CACHE_SIZE_CONFIG)); }