public DataSimulator(String kafkaUrl, Long totalNumberOfEvents, Long waitTimeBetweenEvents, String threadId, DataReplayStatusNotifier statusNotifier) { this.kafkaProducer = new SpKafkaProducer(kafkaUrl, topic); this.threadId = threadId; this.totalNumberOfEvents = totalNumberOfEvents; this.waitTimeBetweenEvents = waitTimeBetweenEvents; this.statusNotifier = statusNotifier; this.random = new Random(); }
public void publish(String message) { publish(message.getBytes()); }
public SpKafkaProducer(String url, String topic) { this.brokerUrl = url; this.topic = topic; this.producer = new KafkaProducer<>(getProperties()); }
@Override public void run() { Long eventCount = 0L; do { kafkaProducer.publish(nextEvent()); eventCount++; } while (eventCount < totalNumberOfEvents); this.kafkaProducer.disconnect(); statusNotifier.onFinished(threadId); }
@Deprecated // TODO remove public SendToPipeline(Format format, String brokerUrl, String topic) { this.format = format; producer = new SpKafkaProducer(brokerUrl, topic); objectMapper = new ObjectMapper(); }
public void publish(String message) { publish(message.getBytes()); }
public SpKafkaProducer(String url, String topic) { this.brokerUrl = url; this.topic = topic; this.producer = new KafkaProducer<>(getProperties()); }
@Deprecated // TODO remove public SendToPipeline(Format format, String brokerUrl, String topic) { this.format = format; producer = new SpKafkaProducer(brokerUrl, topic); objectMapper = new ObjectMapper(); }
@Override public Map<String, Object> process(Map<String, Object> event) { try { if (event != null) { producer.publish(objectMapper.writeValueAsBytes(event)); System.out.println("send to kafka: " + event); } } catch (JsonProcessingException e) { e.printStackTrace(); } return null; } }
@Override public void connect(KafkaTransportProtocol protocolSettings) { LOG.info("Kafka producer: Connecting to " +protocolSettings.getTopicDefinition().getActualTopicName()); this.brokerUrl = protocolSettings.getBrokerHostname() +":" +protocolSettings.getKafkaPort(); this.topic = protocolSettings.getTopicDefinition().getActualTopicName(); this.producer = new KafkaProducer<>(getProperties()); this.connected = true; }
public SpKafkaProtocol() { this.kafkaConsumer = new SpKafkaConsumer(); this.kafkaProducer = new SpKafkaProducer(); }
@Override public Map<String, Object> process(Map<String, Object> event) { try { if (event != null) { producer.publish(objectMapper.writeValueAsBytes(event)); System.out.println("send to kafka: " + event); } } catch (JsonProcessingException e) { e.printStackTrace(); } return null; } }
@Override public void connect(KafkaTransportProtocol protocolSettings) { LOG.info("Kafka producer: Connecting to " +protocolSettings.getTopicDefinition().getActualTopicName()); this.brokerUrl = protocolSettings.getBrokerHostname() +":" +protocolSettings.getKafkaPort(); this.topic = protocolSettings.getTopicDefinition().getActualTopicName(); this.producer = new KafkaProducer<>(getProperties()); this.connected = true; }
public SpKafkaProtocol() { this.kafkaConsumer = new SpKafkaConsumer(); this.kafkaProducer = new SpKafkaProducer(); }
public SendToKafkaAdapterSink(AdapterDescription adapterDescription) { String brokerUrl = GroundingService.extractBroker(adapterDescription); String topic = GroundingService.extractTopic(adapterDescription); producer = new SpKafkaProducer(brokerUrl, topic); objectMapper = new ObjectMapper(); }
public SendToKafkaAdapterSink(AdapterDescription adapterDescription) { String brokerUrl = GroundingService.extractBroker(adapterDescription); String topic = GroundingService.extractTopic(adapterDescription); producer = new SpKafkaProducer(brokerUrl, topic); objectMapper = new ObjectMapper(); }
public static <I extends InvocableStreamPipesEntity> PipelineElementStatusSender getStatusSender(I graph) { SpKafkaProducer kafkaProducer = new SpKafkaProducer(); // TODO refactor return new PipelineElementStatusSender(kafkaProducer, graph.getStatusInfoSettings().getErrorTopic(), graph.getStatusInfoSettings().getStatsTopic()); }