@SuppressWarnings("unchecked") private GenericRow getGenericRow(final byte[] rowJsonBytes) { final SchemaAndValue schemaAndValue = jsonConverter.toConnectData("topic", rowJsonBytes); final Map<String, Object> valueMap = (Map) schemaAndValue.value(); if (valueMap == null) { return null; } final Map<String, String> caseInsensitiveFieldNameMap = getCaseInsensitiveFieldNameMap(valueMap, true); final List<Object> columns = new ArrayList(schema.fields().size()); for (final Field field : schema.fields()) { final Object columnVal = valueMap.get(caseInsensitiveFieldNameMap.get(field.name())); columns.add(enforceFieldType(field.schema(), columnVal)); } return new GenericRow(columns); }
@SuppressWarnings("unchecked") @Override public GenericRow deserialize(final String topic, final byte[] bytes) { try { final SchemaAndValue schemaAndValue = converter.toConnectData(topic, bytes); return connectToKsqlTranslator.toKsqlRow(schemaAndValue.schema(), schemaAndValue.value()); } catch (final Exception e) { recordLogger.error( ProcessingLogMessageFactory.deserializationErrorMsg(e, Optional.ofNullable(bytes))); throw e; } }
if (value instanceof Record) { final Record record = (Record) value; final Object ksqlValue = avroData.toConnectData(record.getSchema(), record).value(); genericRowValues.add( SchemaUtil.getOptionalValue(ksqlSchema.field(field.name()).schema(), ksqlValue)); randomAvroMessage.get(key)).value().toString();
@Test public void shouldSetNullRecordToNull() { // When: final SchemaAndValue msg = ProcessingLogMessageFactory.deserializationErrorMsg( error, Optional.empty() ).get(); // Then: final Struct struct = (Struct) msg.value(); final Struct deserializationError = struct.getStruct(DESERIALIZATION_ERROR); assertThat(deserializationError.get(DESERIALIZATION_ERROR_FIELD_RECORD), is(nullValue())); }
final Struct struct = (Struct) msg.value(); assertThat( struct.get(ProcessingLogMessageFactory.TYPE),
/** * Assert that the supplied {@link Struct} is {@link Struct#validate() valid} and its {@link Struct#schema() schema} * matches that of the supplied {@code schema}. * * @param schemaAndValue the value with a schema; may not be null */ public static void schemaMatchesStruct(SchemaAndValue schemaAndValue) { Object value = schemaAndValue.value(); if (value == null) { // The schema should also be null ... assertThat(schemaAndValue.schema()).isNull(); } else { // Both value and schema should exist and be valid ... assertThat(value).isInstanceOf(Struct.class); fieldsInSchema((Struct) value, schemaAndValue.schema()); } }
private SourceRecord rehydrateSourceRecord(Document record, SchemaAndValueConverter keyConverter, SchemaAndValueConverter valueConverter) throws IOException { Document sourcePartitionDoc = record.getDocument("sourcePartition"); Document sourceOffsetDoc = record.getDocument("sourceOffset"); String topic = record.getString("topic"); Integer kafkaPartition = record.getInteger("kafkaPartition"); Document keySchema = record.getDocument("keySchema"); Document valueSchema = record.getDocument("valueSchema"); Document key = record.getDocument("key"); Document value = record.getDocument("value"); Document keyAndSchemaDoc = Document.create("schema", keySchema, "payload", key); Document valueAndSchemaDoc = Document.create("schema", valueSchema, "payload", value); SchemaAndValue keyWithSchema = keyConverter.deserialize(topic, keyAndSchemaDoc); SchemaAndValue valueWithSchema = valueConverter.deserialize(topic, valueAndSchemaDoc); Map<String, ?> sourcePartition = toMap(sourcePartitionDoc); Map<String, ?> sourceOffset = toMap(sourceOffsetDoc); return new SourceRecord(sourcePartition, sourceOffset, topic, kafkaPartition, keyWithSchema.schema(), keyWithSchema.value(), valueWithSchema.schema(), valueWithSchema.value()); }
assertThat(keyWithSchema.schema()).isEqualTo(record.keySchema()); msg = "comparing key to that serialized/deserialized with JSON converter"; assertThat(keyWithSchema.value()).isEqualTo(record.key()); msg = "comparing key to its schema"; schemaMatchesStruct(keyWithSchema); assertEquals(valueWithSchema.schema(), record.valueSchema()); msg = "comparing value to that serialized/deserialized with JSON converter"; assertEquals(valueWithSchema.value(), record.value()); msg = "comparing value to its schema"; schemaMatchesStruct(valueWithSchema); assertEquals(keyWithSchema.schema(), record.keySchema()); msg = "comparing key to that serialized/deserialized with Avro converter"; assertEquals(keyWithSchema.value(), record.key()); msg = "comparing key to its schema"; schemaMatchesStruct(keyWithSchema); assertEquals(valueWithSchema.schema(), record.valueSchema()); msg = "comparing value to that serialized/deserialized with Avro converter"; assertEquals(valueWithSchema.value(), record.value()); msg = "comparing value to its schema"; schemaMatchesStruct(valueWithSchema); Testing.print(" key deserialized from JSON: " + prettyJson(keyJson)); if (keyWithSchema != null) { Testing.print(" key to/from JSON: " + SchemaUtil.asString(keyWithSchema.value())); Testing.print(" key to/from Avro: " + SchemaUtil.asString(avroKeyWithSchema.value()));
@Override public Object value() { return schemaAndValue.value(); }
protected long process(SchemaAndValue schemaAndValue) { final long result; if (schemaAndValue.value() instanceof Struct) { result = processStruct(schemaAndValue); } else if (schemaAndValue.value() instanceof Map) { result = processMap(schemaAndValue); } else { throw new UnsupportedOperationException(); } return result; }
@Override public Struct apply(GenericRecord record) { return (Struct) avroData.toConnectData(record.getSchema(), record).value(); } }
@Override public Struct apply(GenericRecord record) { return (Struct) avroData.toConnectData(record.getSchema(), record).value(); } }
@Override public R apply(R record) { try { Object updatedValue = mapper.readValue((String) operatingValue(record), clazz); SchemaAndValue s = avroData.toConnectData(Greeting.getClassSchema(), updatedValue); return newRecord(record, s.schema(), s.value()); } catch (IOException e) { throw new DataException("", e); } }
@Override public R apply(R r) { final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); return r.newRecord( r.topic(), r.kafkaPartition(), transformed.schema(), transformed.value(), r.valueSchema(), r.value(), r.timestamp() ); } }
@Override public R apply(R r) { final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); return r.newRecord( r.topic(), r.kafkaPartition(), r.keySchema(), r.key(), transformed.schema(), transformed.value(), r.timestamp() ); } }
@Override public R apply(R r) { final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); return r.newRecord( r.topic(), r.kafkaPartition(), transformed.schema(), transformed.value(), r.valueSchema(), r.value(), r.timestamp() ); } }
@Override public R apply(R r) { final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); return r.newRecord( r.topic(), r.kafkaPartition(), r.keySchema(), r.key(), transformed.schema(), transformed.value(), r.timestamp() ); } }
@Override public R apply(R r) { final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); return r.newRecord( r.topic(), r.kafkaPartition(), r.keySchema(), r.key(), transformed.schema(), transformed.value(), r.timestamp() ); } }
@Override public R apply(R r) { final SchemaAndValue transformed = process(r, r.valueSchema(), r.value()); return r.newRecord( r.topic(), r.kafkaPartition(), r.keySchema(), r.key(), transformed.schema(), transformed.value(), r.timestamp() ); } }
@Override public R apply(R r) { final SchemaAndValue transformed = process(r, r.keySchema(), r.key()); return r.newRecord( r.topic(), r.kafkaPartition(), transformed.schema(), transformed.value(), r.valueSchema(), r.value(), r.timestamp() ); } }