public SpKafkaProtocol() { this.kafkaConsumer = new SpKafkaConsumer(); this.kafkaProducer = new SpKafkaProducer(); }
@Override public SpProtocolDefinition<KafkaTransportProtocol> createInstance() { return new SpKafkaProtocol(); } }
public SpKafkaProducer(String url, String topic) { this.brokerUrl = url; this.topic = topic; this.producer = new KafkaProducer<>(getProperties()); }
public DataSimulator(String kafkaUrl, Long totalNumberOfEvents, Long waitTimeBetweenEvents, String threadId, DataReplayStatusNotifier statusNotifier) { this.kafkaProducer = new SpKafkaProducer(kafkaUrl, topic); this.threadId = threadId; this.totalNumberOfEvents = totalNumberOfEvents; this.waitTimeBetweenEvents = waitTimeBetweenEvents; this.statusNotifier = statusNotifier; this.random = new Random(); }
public void publish(String message) { publish(message.getBytes()); }
@Override public void run() { Long eventCount = 0L; do { kafkaProducer.publish(nextEvent()); eventCount++; } while (eventCount < totalNumberOfEvents); this.kafkaProducer.disconnect(); statusNotifier.onFinished(threadId); }
@Override public void run() { streamToObserver = new HashMap<>(); streamToStoppedMonitoringPipeline = new HashMap<>(); String topic = "internal.streamepipes.sec.stopped"; kafkaConsumerGroup = new SpKafkaConsumer(BackendConfig.INSTANCE.getKafkaUrl(), topic, new KafkaCallback()); Thread thread = new Thread(kafkaConsumerGroup); thread.start(); }
public SpKafkaProtocol() { this.kafkaConsumer = new SpKafkaConsumer(); this.kafkaProducer = new SpKafkaProducer(); }
@Deprecated // TODO remove public SendToPipeline(Format format, String brokerUrl, String topic) { this.format = format; producer = new SpKafkaProducer(brokerUrl, topic); objectMapper = new ObjectMapper(); }
public void publish(String message) { publish(message.getBytes()); }
@Override public void run(AdapterPipeline adapterPipeline) { SendToPipeline stk = new SendToPipeline(format, adapterPipeline); this.kafkaConsumer = new SpKafkaConsumer(this.brokerUrl, this.topic, new EventProcessor(stk)); thread = new Thread(this.kafkaConsumer); thread.start(); }
@Override public SpProtocolDefinition<KafkaTransportProtocol> createInstance() { return new SpKafkaProtocol(); } }
public SpKafkaProducer(String url, String topic) { this.brokerUrl = url; this.topic = topic; this.producer = new KafkaProducer<>(getProperties()); }
@Deprecated // TODO remove public SendToPipeline(Format format, String brokerUrl, String topic) { this.format = format; producer = new SpKafkaProducer(brokerUrl, topic); objectMapper = new ObjectMapper(); }
@Override public Map<String, Object> process(Map<String, Object> event) { try { if (event != null) { producer.publish(objectMapper.writeValueAsBytes(event)); System.out.println("send to kafka: " + event); } } catch (JsonProcessingException e) { e.printStackTrace(); } return null; } }
@Override public void run(AdapterPipeline adapterPipeline) { SendToPipeline stk = new SendToPipeline(format, adapterPipeline); this.kafkaConsumer = new SpKafkaConsumer(this.brokerUrl, this.topic, new EventProcessor(stk)); thread = new Thread(this.kafkaConsumer); thread.start(); }
public SendToKafkaAdapterSink(AdapterDescription adapterDescription) { String brokerUrl = GroundingService.extractBroker(adapterDescription); String topic = GroundingService.extractTopic(adapterDescription); producer = new SpKafkaProducer(brokerUrl, topic); objectMapper = new ObjectMapper(); }
@Override public Map<String, Object> process(Map<String, Object> event) { try { if (event != null) { producer.publish(objectMapper.writeValueAsBytes(event)); System.out.println("send to kafka: " + event); } } catch (JsonProcessingException e) { e.printStackTrace(); } return null; } }
public SendToKafkaAdapterSink(AdapterDescription adapterDescription) { String brokerUrl = GroundingService.extractBroker(adapterDescription); String topic = GroundingService.extractTopic(adapterDescription); producer = new SpKafkaProducer(brokerUrl, topic); objectMapper = new ObjectMapper(); }
public static <I extends InvocableStreamPipesEntity> PipelineElementStatusSender getStatusSender(I graph) { SpKafkaProducer kafkaProducer = new SpKafkaProducer(); // TODO refactor return new PipelineElementStatusSender(kafkaProducer, graph.getStatusInfoSettings().getErrorTopic(), graph.getStatusInfoSettings().getStatsTopic()); }