@Override public Optional<Long> getEventTime() { return Optional.of(srcRecord.timestamp()); }
@Override public void serialize(SourceRecord record, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException { Storage storage = new Storage(); storage.sourcePartition = record.sourcePartition(); storage.sourceOffset = record.sourceOffset(); storage.topic = record.topic(); storage.kafkaPartition = record.kafkaPartition(); storage.keySchema = record.keySchema(); storage.key = record.key(); storage.valueSchema = record.valueSchema(); storage.value = record.value(); storage.timestamp = record.timestamp(); if (null != record.headers()) { List<Header> headers = new ArrayList<>(); for (Header header : record.headers()) { headers.add(header); } storage.headers = headers; } jsonGenerator.writeObject(storage); } }
/** * Convert the source record into a producer record. * * @param record the transformed record * @return the producer record which can sent over to Kafka. A null is returned if the input is null or * if an error was encountered during any of the converter stages. */ private ProducerRecord<byte[], byte[]> convertTransformedRecord(SourceRecord record) { if (record == null) { return null; } RecordHeaders headers = retryWithToleranceOperator.execute(() -> convertHeaderFor(record), Stage.HEADER_CONVERTER, headerConverter.getClass()); byte[] key = retryWithToleranceOperator.execute(() -> keyConverter.fromConnectData(record.topic(), record.keySchema(), record.key()), Stage.KEY_CONVERTER, keyConverter.getClass()); byte[] value = retryWithToleranceOperator.execute(() -> valueConverter.fromConnectData(record.topic(), record.valueSchema(), record.value()), Stage.VALUE_CONVERTER, valueConverter.getClass()); if (retryWithToleranceOperator.failed()) { return null; } return new ProducerRecord<>(record.topic(), record.kafkaPartition(), ConnectUtils.checkAndConvertTimestamp(record.timestamp()), key, value, headers); }