public BinlogFileOffset generateBinlogFileOffset() { long now = System.currentTimeMillis(); return new BinlogFileOffset("binlog.filename." + now, now); }
public boolean isSameOrAfter(BinlogFileOffset binlogFileOffset) { if(this.equals(binlogFileOffset)) return true; if(this.getBinlogFilename().equals(binlogFileOffset.getBinlogFilename())) { if(this.getOffset()>binlogFileOffset.getOffset()) { return true; } } else { if(this.getBinlogFilename().compareTo(binlogFileOffset.getBinlogFilename())>0) { return true; } } return false; }
@Override public void accept(EVENT publishedEvent) { if (couldReadDuplicateEntries) { if (startingBinlogFileOffset.map(s -> s.isSameOrAfter(publishedEvent.getBinlogFileOffset())).orElse(false)) { return; } else { couldReadDuplicateEntries = false; } } eventConsumer.accept(publishedEvent); } });
client.setKeepAliveInterval(5 * 1000); BinlogFileOffset bfo = binlogFileOffset.orElse(new BinlogFileOffset("", 4L)); logger.debug("Starting with {}", bfo); client.setBinlogFilename(bfo.getBinlogFilename()); client.setBinlogPosition(bfo.getOffset());
@Test public void test() throws NoSuchFieldException, IllegalAccessException, InterruptedException, ExecutionException { saveTestOffset(); BinlogFileOffset debeziumBinlogFileOffset = getDebeziumOffset(); Assert.assertEquals(file, debeziumBinlogFileOffset.getBinlogFilename()); Assert.assertEquals(offset, debeziumBinlogFileOffset.getOffset()); }
.flatMap(offset -> Optional.ofNullable(offset.getOffset()).map(LogSequenceNumber::valueOf)) .orElse(LogSequenceNumber.valueOf("0/0"));
@Override protected BinlogFileOffset handleRecord(ConsumerRecord<String, String> record) { try { Map<String, Object> keyMap = JSonMapper.fromJson(record.key(), Map.class); List<Object> payload = (List<Object>)keyMap.get("payload"); String connector = (String)payload.get(0); String server = ((Map<String, String>)payload.get(1)).get("server"); if (!"my-sql-connector".equals(connector) || !"my-app-connector".equals(server)) { return null; } Map<String, Object> valueMap = JSonMapper.fromJson(record.value(), Map.class); Object position = valueMap.get("pos"); return new BinlogFileOffset((String)valueMap.get("file"), position instanceof Long ? ((Long) position) : ((Integer) position)); } catch (Exception e) { logger.error(e.getMessage(), e); return null; } }
@Override public void accept(EVENT publishedEvent) { if (couldReadDuplicateEntries) { if (startingBinlogFileOffset.map(s -> s.isSameOrAfter(publishedEvent.getBinlogFileOffset())).orElse(false)) { return; } else { couldReadDuplicateEntries = false; } } eventConsumer.accept(publishedEvent); } });
@Override public List<PublishedEvent> parse(PostgresWalMessage message, long lastSequenceNumber, String slotName) { List<PostgresWalChange> changes = Arrays.asList(message.getChange()); return changes .stream() .filter(change -> change.getKind().equals("insert") && change.getTable().equals("events")) .map(insertedEvent -> { List<String> columns = Arrays.asList(insertedEvent.getColumnnames()); int id = columns.indexOf("event_id"); int entityId = columns.indexOf("entity_id"); int entityType = columns.indexOf("entity_type"); int eventDate = columns.indexOf("event_data"); int eventType = columns.indexOf("event_type"); int metadata = columns.indexOf("metadata"); List<String> values = Arrays.asList(insertedEvent.getColumnvalues()); return new PublishedEvent(values.get(id), values.get(entityId), values.get(entityType), values.get(eventDate), values.get(eventType), new BinlogFileOffset(slotName, lastSequenceNumber), Optional.ofNullable(values.get(metadata))); }) .collect(Collectors.toList()); } }
logger.info(String.format("got record: %s %s %s", record.partition(), record.offset(), record.value())); return JSonMapper.fromJson(record.value(), PublishedEvent.class).getBinlogFileOffset(); }).filter(binlogFileOffset -> binlogFileOffset!=null).max((blfo1, blfo2) -> blfo1.isSameOrAfter(blfo2) ? 1 : -1); consumer.close(); return max;
@Override public List<MessageWithDestination> parse(PostgresWalMessage message, long lastSequenceNumber, String slotName) { List<PostgresWalChange> changes = Arrays.asList(message.getChange()); return changes .stream() .filter(change -> change.getKind().equals("insert") && change.getTable().equals("message")) .map(insertedEvent -> { List<String> columns = Arrays.asList(insertedEvent.getColumnnames()); List<String> values = Arrays.asList(insertedEvent.getColumnvalues()); int destination = columns.indexOf("destination"); int payload = columns.indexOf("payload"); int headers = columns.indexOf("headers"); return new MessageWithDestination(values.get(destination), new MessageImpl(values.get(payload), JSonMapper.fromJson(values.get(headers), Map.class)), new BinlogFileOffset(slotName, lastSequenceNumber)); }) .collect(Collectors.toList()); } }
@Override public List<MessageWithDestination> parse(PostgresWalMessage message, long lastSequenceNumber, String slotName) { List<PostgresWalChange> changes = Arrays.asList(message.getChange()); return changes .stream() .filter(change -> change.getKind().equals("insert") && change.getTable().equals("message")) .map(insertedEvent -> { List<String> columns = Arrays.asList(insertedEvent.getColumnnames()); List<String> values = Arrays.asList(insertedEvent.getColumnvalues()); int destination = columns.indexOf("destination"); int payload = columns.indexOf("payload"); int headers = columns.indexOf("headers"); return new MessageWithDestination(values.get(destination), new MessageImpl(values.get(payload), JSonMapper.fromJson(values.get(headers), Map.class)), new BinlogFileOffset(slotName, lastSequenceNumber)); }) .collect(Collectors.toList()); } }
@Override public MessageWithDestination parseEventData(WriteRowsEventData eventData, String binlogFilename, long position) throws IOException { if (columnOrders.isEmpty()) { try { getColumnOrders(); } catch (SQLException e) { logger.error("Error getting metadata", e); throw new RuntimeException(e); } } String id = (String)getValue(eventData, ID); String destination = (String)getValue(eventData, DESTINATION); String payload = (String)getValue(eventData, PAYLOAD); Map<String, String> headers = JSonMapper.fromJson((String)getValue(eventData, HEADERS), Map.class); headers.put(Message.ID, id); headers.put("binlogfile", binlogFilename); headers.put("binlogposition", Long.toString(position)); return new MessageWithDestination(destination, new MessageImpl(payload, headers), new BinlogFileOffset(binlogFilename, position)); }
@Override public MessageWithDestination parseEventData(WriteRowsEventData eventData, String binlogFilename, long position) throws IOException { if (columnOrders.isEmpty()) { try { getColumnOrders(); } catch (SQLException e) { logger.error("Error getting metadata", e); throw new RuntimeException(e); } } String id = (String)getValue(eventData, ID); String destination = (String)getValue(eventData, DESTINATION); String payload = (String)getValue(eventData, PAYLOAD); Map<String, String> headers = JSonMapper.fromJson((String)getValue(eventData, HEADERS), Map.class); headers.put(Message.ID, id); headers.put("binlogfile", binlogFilename); headers.put("binlogposition", Long.toString(position)); return new MessageWithDestination(destination, new MessageImpl(payload, headers), new BinlogFileOffset(binlogFilename, position)); }
@Override public PublishedEvent parseEventData(WriteRowsEventData eventData, String binlogFilename, long position) throws IOException { if (columnOrders.isEmpty()) { try { getColumnOrders(); } catch (SQLException e) { throw new RuntimeException(e); } } String eventDataValue; if(getValue(eventData, EVENT_DATA_FIELDNAME) instanceof String) { eventDataValue = (String) getValue(eventData, EVENT_DATA_FIELDNAME); } else { eventDataValue = JsonBinary.parseAsString((byte[])getValue(eventData, EVENT_DATA_FIELDNAME)); } return new PublishedEvent( (String)getValue(eventData, EVENT_ID_FIELDNAME), (String)getValue(eventData, ENTITY_ID_FIELDNAME), (String)getValue(eventData, ENTITY_TYPE_FIELDNAME), eventDataValue, (String)getValue(eventData, EVENT_TYPE_FIELDNAME), new BinlogFileOffset(binlogFilename, position), Optional.ofNullable((String)getValue(eventData, EVENT_METADATA_FIELDNAME)) ); }
private void floodTopic(Producer<String, String> producer, String binlogFilename, String topicName) { for (int i = 0; i < 10; i++) { PublishedEvent publishedEvent = new PublishedEvent(); publishedEvent.setEntityId(UUID.randomUUID().toString()); publishedEvent.setBinlogFileOffset(new BinlogFileOffset(binlogFilename, (long)i)); String json = JSonMapper.toJson(publishedEvent); producer.send( new ProducerRecord<>(topicName, publishedEvent.getEntityId(), json)); } }
@Test public void shouldBePublishedTest() { String topicName = generateUniqueTopicName(); String binlogFilename = "binlog.file." + System.currentTimeMillis(); DuplicatePublishingDetector duplicatePublishingDetector = new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaConsumerConfigurationProperties.empty()); Producer<String, String> producer = createProducer(eventuateKafkaConfigurationProperties.getBootstrapServers()); floodTopic(producer, binlogFilename, topicName); producer.close(); assertFalse(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 1L), topicName)); assertTrue(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 10L), topicName)); }
@Test public void shouldHandlePublishCheckForOldEntires() { String topicName = generateUniqueTopicName(); String binlogFilename = "binlog.file." + System.currentTimeMillis(); DuplicatePublishingDetector duplicatePublishingDetector = new DuplicatePublishingDetector(eventuateKafkaConfigurationProperties.getBootstrapServers(), EventuateKafkaConsumerConfigurationProperties.empty()); Producer<String, String> producer = createProducer(eventuateKafkaConfigurationProperties.getBootstrapServers()); floodTopic(producer, binlogFilename, topicName); sendOldPublishedEvent(producer, topicName); producer.close(); assertTrue(duplicatePublishingDetector.shouldBePublished(new BinlogFileOffset(binlogFilename, 10L), topicName)); }