private void changeSourceToLastSnapshotRecord(SourceRecord currentRecord) { final Struct envelope = (Struct)currentRecord.value(); final Struct source = (Struct)envelope.get("source"); if (source.getBoolean(SourceInfo.LAST_SNAPSHOT_RECORD_KEY) != null) { source.put(SourceInfo.LAST_SNAPSHOT_RECORD_KEY, true); } }
private Predicate<SourceRecord> stopOnPKPredicate(int pkValue) { return record -> { Struct key = (Struct) record.key(); return ((Integer) key.get(PK_FIELD)) == pkValue; }; }
@Test public void shouldSetNullRecordToNull() { // When: final SchemaAndValue msg = ProcessingLogMessageFactory.deserializationErrorMsg( error, Optional.empty() ).get(); // Then: final Struct struct = (Struct) msg.value(); final Struct deserializationError = struct.getStruct(DESERIALIZATION_ERROR); assertThat(deserializationError.get(DESERIALIZATION_ERROR_FIELD_RECORD), is(nullValue())); }
private void assertEmptyFieldValue(SourceRecord record, String fieldName) { final Struct envelope = (Struct)record.value(); final Struct after = (Struct)envelope.get("after"); assertThat(after.getWithoutDefault(fieldName)).isNull(); }
@Override public GenericRow toKsqlRow(final Schema connectSchema, final Object connectData) { if (!schema.type().equals(Schema.Type.STRUCT)) { throw new KsqlException("Schema for a KSQL row should be a struct"); } final Struct rowStruct = (Struct) toKsqlValue(schema, connectSchema, connectData, ""); if (rowStruct == null) { return null; } // streams are expensive, so we don't use them from serdes. build the row using forEach final List<Object> fields = new ArrayList<>(schema.fields().size()); schema.fields().forEach(field -> fields.add(rowStruct.get(field))); return new GenericRow(fields); }
private Struct updateValue(Schema newValueSchema, Struct oldValue) { final Struct newValue = new Struct(newValueSchema); for (org.apache.kafka.connect.data.Field field : oldValue.schema().fields()) { newValue.put(field.name(), oldValue.get(field)); } return newValue; }
private Struct updateEnvelope(Schema newEnvelopeSchema, Struct oldEnvelope) { final Struct newEnvelope = new Struct(newEnvelopeSchema); final Schema newValueSchema = newEnvelopeSchema.field(Envelope.FieldName.BEFORE).schema(); for (org.apache.kafka.connect.data.Field field : oldEnvelope.schema().fields()) { final String fieldName = field.name(); Object fieldValue = oldEnvelope.get(field); if ((Objects.equals(fieldName, Envelope.FieldName.BEFORE) || Objects.equals(fieldName, Envelope.FieldName.AFTER)) && fieldValue != null) { fieldValue = updateValue(newValueSchema, requireStruct(fieldValue, "Updating schema")); } newEnvelope.put(fieldName, fieldValue); } return newEnvelope; }
private static Object[] valuesFor(Struct struct) { Object[] array = new Object[struct.schema().fields().size()]; int index = 0; for (Field field : struct.schema().fields()) { array[index] = struct.get(field); ++index; } return array; }
SourceRecord getRecordByOperation(Envelope.Operation operation) throws InterruptedException { final SourceRecord candidateRecord = getNextRecord(); if (!((Struct) candidateRecord.value()).get("op").equals(operation.code())) { // MongoDB is not providing really consistent snapshot, so the initial insert // can arrive both in initial sync snapshot and in oplog return getRecordByOperation(operation); } return candidateRecord; }
/** * Verify that the given {@link SourceRecord} has a valid non-null integer key that matches the expected integer value. * * @param record the source record; may not be null * @param pkField the single field defining the primary key of the struct; may not be null * @param pk the expected integer value of the primary key in the struct */ public static void hasValidKey(SourceRecord record, String pkField, int pk) { Struct key = (Struct) record.key(); assertThat(key.get(pkField)).isEqualTo(pk); }
public SourceRecordAssert valueAfterFieldIsEqualTo(Struct expectedValue) { Struct value = (Struct) record.value(); Struct afterValue = (Struct) value.get("after"); Assertions.assertThat(afterValue).isEqualTo(expectedValue); return this; }
@Test public void intTypes() throws Exception { Testing.debug("Inserted"); final SourceRecords records = consumeRecordsByTopic(EXPECTED_RECORD_COUNT); List<SourceRecord> testTableRecords = records.recordsForTopic("server1.dbo.type_int"); assertThat(testTableRecords).hasSize(1); // insert VerifyRecord.isValidRead(testTableRecords.get(0)); Struct after = (Struct) ((Struct)testTableRecords.get(0).value()).get("after"); assertRecord(after, EXPECTED_INT); }
@Test public void stringTypes() throws Exception { Testing.debug("Inserted"); final SourceRecords records = consumeRecordsByTopic(EXPECTED_RECORD_COUNT); List<SourceRecord> testTableRecords = records.recordsForTopic("server1.dbo.type_string"); assertThat(testTableRecords).hasSize(1); // insert VerifyRecord.isValidRead(testTableRecords.get(0)); Struct after = (Struct) ((Struct)testTableRecords.get(0).value()).get("after"); assertRecord(after, EXPECTED_STRING); }
@Test public void otherTypes() throws Exception { Testing.debug("Inserted"); final SourceRecords records = consumeRecordsByTopic(EXPECTED_RECORD_COUNT); List<SourceRecord> testTableRecords = records.recordsForTopic("server1.dbo.type_xml"); assertThat(testTableRecords).hasSize(1); // insert VerifyRecord.isValidRead(testTableRecords.get(0)); Struct after = (Struct) ((Struct)testTableRecords.get(0).value()).get("after"); assertRecord(after, EXPECTED_XML); }
private void assertHeartBeatRecordInserted() { assertFalse("records not generated", consumer.isEmpty()); SourceRecord heartbeat = consumer.remove(); assertEquals("__debezium-heartbeat." + TestHelper.TEST_SERVER, heartbeat.topic()); Struct key = (Struct) heartbeat.key(); assertThat(key.get("serverName")).isEqualTo(TestHelper.TEST_SERVER); }
@Test public void fpTypes() throws Exception { Testing.debug("Inserted"); final SourceRecords records = consumeRecordsByTopic(EXPECTED_RECORD_COUNT); List<SourceRecord> testTableRecords = records.recordsForTopic("server1.dbo.type_fp"); assertThat(testTableRecords).hasSize(1); // insert VerifyRecord.isValidRead(testTableRecords.get(0)); Struct after = (Struct) ((Struct)testTableRecords.get(0).value()).get("after"); assertRecord(after, EXPECTED_FP); }
@Test public void dateTimeTypes() throws Exception { Testing.debug("Inserted"); final SourceRecords records = consumeRecordsByTopic(EXPECTED_RECORD_COUNT); List<SourceRecord> testTableRecords = records.recordsForTopic("server1.dbo.type_time"); assertThat(testTableRecords).hasSize(1); // insert VerifyRecord.isValidRead(testTableRecords.get(0)); Struct after = (Struct) ((Struct)testTableRecords.get(0).value()).get("after"); assertRecord(after, EXPECTED_DATE_TIME); }
@Test public void shoudlReplacePrimitivesCorrectly() { final Schema schema = SchemaBuilder.struct() .field("COLUMN_NAME", Schema.OPTIONAL_INT64_SCHEMA) .optional() .build(); final AvroDataTranslator dataTranslator = new AvroDataTranslator(schema, KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME); final GenericRow ksqlRow = new GenericRow(Collections.singletonList(123L)); final Struct struct = dataTranslator.toConnectRow(ksqlRow); assertThat(struct.get("COLUMN_NAME"), equalTo(123L)); final GenericRow translatedRow = dataTranslator.toKsqlRow(struct.schema(), struct); assertThat(translatedRow, equalTo(ksqlRow)); }
@Test public void shouldReplaceNullWithNull() { final Schema schema = SchemaBuilder.struct() .field( "COLUMN_NAME", SchemaBuilder.array(Schema.OPTIONAL_INT64_SCHEMA).optional().build()) .optional() .build(); final AvroDataTranslator dataTranslator = new AvroDataTranslator(schema, KsqlConstants.DEFAULT_AVRO_SCHEMA_FULL_NAME); final GenericRow ksqlRow = new GenericRow(Collections.singletonList(null)); final Struct struct = dataTranslator.toConnectRow(ksqlRow); assertThat(struct.get("COLUMN_NAME"), nullValue()); final GenericRow translatedRow = dataTranslator.toKsqlRow(struct.schema(), struct); assertThat(translatedRow, equalTo(ksqlRow)); }