private SerializedEvent toSerializedEvent(ConsumerRecord<String, String> record) { PublishedEvent pe = JSonMapper.fromJson(record.value(), PublishedEvent.class); return new SerializedEvent( Int128.fromString(pe.getId()), pe.getEntityId(), pe.getEntityType(), pe.getEventData(), pe.getEventType(), record.partition(), record.offset(), EtopEventContext.make(pe.getId(), record.topic(), record.partition(), record.offset()), pe.getMetadata()); }
public PublishedEvent waitForEvent(BlockingQueue<PublishedEvent> publishedEvents, Int128 eventId, LocalDateTime deadline, String eventData) throws InterruptedException { while (LocalDateTime.now().isBefore(deadline)) { long millis = ChronoUnit.MILLIS.between(deadline, LocalDateTime.now()); PublishedEvent event = publishedEvents.poll(millis, TimeUnit.MILLISECONDS); if (event != null && event.getId().equals(eventId.asString()) && eventData.equals(event.getEventData())) return event; } throw new RuntimeException("event not found: " + eventId); }
private void floodTopic(Producer<String, String> producer, String binlogFilename, String topicName) { for (int i = 0; i < 10; i++) { PublishedEvent publishedEvent = new PublishedEvent(); publishedEvent.setEntityId(UUID.randomUUID().toString()); publishedEvent.setBinlogFileOffset(new BinlogFileOffset(binlogFilename, (long)i)); String json = JSonMapper.toJson(publishedEvent); producer.send( new ProducerRecord<>(topicName, publishedEvent.getEntityId(), json)); } }
private void sendOldPublishedEvent(Producer<String, String> producer, String topicName) { for (int i = 0; i < 10; i++) { PublishedEvent publishedEvent = new PublishedEvent(); publishedEvent.setEntityId(UUID.randomUUID().toString()); String json = JSonMapper.toJson(publishedEvent); producer.send( new ProducerRecord<>(topicName, publishedEvent.getEntityId(), json)); } }
@Override public String getId(PublishedEvent data) { return data.getId(); }
@Override public List<PublishedEvent> parse(PostgresWalMessage message, long lastSequenceNumber, String slotName) { List<PostgresWalChange> changes = Arrays.asList(message.getChange()); return changes .stream() .filter(change -> change.getKind().equals("insert") && change.getTable().equals("events")) .map(insertedEvent -> { List<String> columns = Arrays.asList(insertedEvent.getColumnnames()); int id = columns.indexOf("event_id"); int entityId = columns.indexOf("entity_id"); int entityType = columns.indexOf("entity_type"); int eventDate = columns.indexOf("event_data"); int eventType = columns.indexOf("event_type"); int metadata = columns.indexOf("metadata"); List<String> values = Arrays.asList(insertedEvent.getColumnvalues()); return new PublishedEvent(values.get(id), values.get(entityId), values.get(entityType), values.get(eventDate), values.get(eventType), new BinlogFileOffset(slotName, lastSequenceNumber), Optional.ofNullable(values.get(metadata))); }) .collect(Collectors.toList()); } }
@Override public void onEventSent(PublishedEvent publishedEvent) { offsetStore.save(publishedEvent.getBinlogFileOffset()); } }
@Override public String partitionKeyFor(PublishedEvent publishedEvent) { return publishedEvent.getEntityId(); }
@Override public String topicFor(PublishedEvent publishedEvent) { return AggregateTopicMapping.aggregateTypeToTopic(publishedEvent.getEntityType()); }
private PublishedEvent waitForEventExcluding(BlockingQueue<PublishedEvent> publishedEvents, Int128 eventId, LocalDateTime deadline, String eventData, List<String> excludedIds) throws InterruptedException { PublishedEvent result = null; while (LocalDateTime.now().isBefore(deadline)) { long millis = ChronoUnit.MILLIS.between(deadline, LocalDateTime.now()); PublishedEvent event = publishedEvents.poll(millis, TimeUnit.MILLISECONDS); if (event != null) { if (event.getId().equals(eventId.asString()) && eventData.equals(event.getEventData())) { result = event; break; } if (excludedIds.contains(event.getId())) throw new RuntimeException("Event with excluded id found in the queue"); } } if (result != null) return result; throw new RuntimeException("event not found: " + eventId); } }
@Override public Optional<Long> getCreateTime(PublishedEvent publishedEvent) { return Optional.of(Int128.fromString(publishedEvent.getId()).getHi()); }
@Override public PublishedEvent transformEventBeanToEvent(PublishedEventBean eventBean) { return new PublishedEvent(eventBean.getEventId(), eventBean.getEntityId(), eventBean.getEntityType(), eventBean.getEventData(), eventBean.getEventType(), null, eventBean.getMetadataOptional()); } }
@Override public void onEventSent(PublishedEvent publishedEvent) { offsetStore.save(publishedEvent.getBinlogFileOffset()); } }
private SerializedEvent toSerializedEvent(ConsumerRecord<String, String> record) { PublishedEvent pe = JSonMapper.fromJson(record.value(), PublishedEvent.class); return new SerializedEvent( Int128.fromString(pe.getId()), pe.getEntityId(), pe.getEntityType(), pe.getEventData(), pe.getEventType(), record.partition(), record.offset(), EtopEventContext.make(pe.getId(), record.topic(), record.partition(), record.offset()), pe.getMetadata()); }
if (event != null) { System.out.println("Got: " + event); if (event.getId().equals(eventId.asString()) && eventData.equals(event.getEventData())) { foundEvent = true; } else if (event.getId().equals(otherSaveResult.getEntityIdVersionAndEventIds().getEventIds().get(0).asString())) { fail("Found event inserted into other schema");
@Override public PublishedEvent parseEventData(WriteRowsEventData eventData, String binlogFilename, long position) throws IOException { if (columnOrders.isEmpty()) { try { getColumnOrders(); } catch (SQLException e) { throw new RuntimeException(e); } } String eventDataValue; if(getValue(eventData, EVENT_DATA_FIELDNAME) instanceof String) { eventDataValue = (String) getValue(eventData, EVENT_DATA_FIELDNAME); } else { eventDataValue = JsonBinary.parseAsString((byte[])getValue(eventData, EVENT_DATA_FIELDNAME)); } return new PublishedEvent( (String)getValue(eventData, EVENT_ID_FIELDNAME), (String)getValue(eventData, ENTITY_ID_FIELDNAME), (String)getValue(eventData, ENTITY_TYPE_FIELDNAME), eventDataValue, (String)getValue(eventData, EVENT_TYPE_FIELDNAME), new BinlogFileOffset(binlogFilename, position), Optional.ofNullable((String)getValue(eventData, EVENT_METADATA_FIELDNAME)) ); }
@Override public void onEventSent(PublishedEvent publishedEvent) { offsetStore.save(publishedEvent.getBinlogFileOffset()); } }
Optional<BinlogFileOffset> max = StreamSupport.stream(records.spliterator(), false).map(record -> { logger.info(String.format("got record: %s %s %s", record.partition(), record.offset(), record.value())); return JSonMapper.fromJson(record.value(), PublishedEvent.class).getBinlogFileOffset(); }).filter(binlogFileOffset -> binlogFileOffset!=null).max((blfo1, blfo2) -> blfo1.isSameOrAfter(blfo2) ? 1 : -1); consumer.close();
@Override public void onEventSent(PublishedEvent publishedEvent) { createDatabaseOffsetKafkaStore(createMySqlBinaryLogClient()).save(publishedEvent.getBinlogFileOffset()); }