public SpKafkaProtocol() { this.kafkaConsumer = new SpKafkaConsumer(); this.kafkaProducer = new SpKafkaProducer(); }
@Override public void stop() { try { kafkaConsumer.disconnect(); } catch (SpRuntimeException e) { e.printStackTrace(); } try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } logger.info("Kafka Adapter was sucessfully stopped"); thread.interrupt(); }
@Override public void run() { KafkaConsumer<String, byte[]> kafkaConsumer = new KafkaConsumer<>(getProperties()); if (!patternTopic) { kafkaConsumer.subscribe(Collections.singletonList(topic)); } else { topic = replaceWildcardWithPatternFormat(topic); kafkaConsumer.subscribe(Pattern.compile(topic), new ConsumerRebalanceListener() { @Override public void onPartitionsRevoked(Collection<TopicPartition> partitions) { // TODO } @Override public void onPartitionsAssigned(Collection<TopicPartition> partitions) { // TODO } }); } while (isRunning) { ConsumerRecords<String, byte[]> records = kafkaConsumer.poll(100); for (ConsumerRecord<String, byte[]> record : records) { eventProcessor.onEvent(record.value()); } } LOG.info("Closing Kafka Consumer."); kafkaConsumer.close(); }
public SpKafkaConsumer(String kafkaUrl, String topic, InternalEventProcessor<byte[]> callback) { KafkaTransportProtocol protocol = new KafkaTransportProtocol(); protocol.setKafkaPort(Integer.parseInt(kafkaUrl.split(":")[1])); protocol.setBrokerHostname(kafkaUrl.split(":")[0]); protocol.setTopicDefinition(new SimpleTopicDefinition(topic)); try { this.connect(protocol, callback); } catch (SpRuntimeException e) { e.printStackTrace(); } }
@Override public void run() { KafkaConsumer<String, byte[]> kafkaConsumer = new KafkaConsumer<>(getProperties()); if (!patternTopic) { kafkaConsumer.subscribe(Collections.singletonList(topic)); } else { topic = replaceWildcardWithPatternFormat(topic); kafkaConsumer.subscribe(Pattern.compile(topic), new ConsumerRebalanceListener() { @Override public void onPartitionsRevoked(Collection<TopicPartition> partitions) { // TODO } @Override public void onPartitionsAssigned(Collection<TopicPartition> partitions) { // TODO } }); } while (isRunning) { ConsumerRecords<String, byte[]> records = kafkaConsumer.poll(100); for (ConsumerRecord<String, byte[]> record : records) { eventProcessor.onEvent(record.value()); } } LOG.info("Closing Kafka Consumer."); kafkaConsumer.close(); }
public SpKafkaConsumer(String kafkaUrl, String topic, InternalEventProcessor<byte[]> callback) { KafkaTransportProtocol protocol = new KafkaTransportProtocol(); protocol.setKafkaPort(Integer.parseInt(kafkaUrl.split(":")[1])); protocol.setBrokerHostname(kafkaUrl.split(":")[0]); protocol.setTopicDefinition(new SimpleTopicDefinition(topic)); try { this.connect(protocol, callback); } catch (SpRuntimeException e) { e.printStackTrace(); } }
public SpKafkaProtocol() { this.kafkaConsumer = new SpKafkaConsumer(); this.kafkaProducer = new SpKafkaProducer(); }
@Override public void stop() { try { kafkaConsumer.disconnect(); } catch (SpRuntimeException e) { e.printStackTrace(); } try { Thread.sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } logger.info("Kafka Adapter was sucessfully stopped"); thread.interrupt(); }
@Override public void run() { streamToObserver = new HashMap<>(); streamToStoppedMonitoringPipeline = new HashMap<>(); String topic = "internal.streamepipes.sec.stopped"; kafkaConsumerGroup = new SpKafkaConsumer(BackendConfig.INSTANCE.getKafkaUrl(), topic, new KafkaCallback()); Thread thread = new Thread(kafkaConsumerGroup); thread.start(); }
@Override public void run() { streamToObserver = new HashMap<>(); streamToStoppedMonitoringPipeline = new HashMap<>(); String topic = "internal.streamepipes.sec.stopped"; kafkaConsumerGroup = new SpKafkaConsumer(BackendConfig.INSTANCE.getKafkaUrl(), topic, new KafkaCallback()); Thread thread = new Thread(kafkaConsumerGroup); thread.start(); }
@Override public void run(AdapterPipeline adapterPipeline) { SendToPipeline stk = new SendToPipeline(format, adapterPipeline); this.kafkaConsumer = new SpKafkaConsumer(this.brokerUrl, this.topic, new EventProcessor(stk)); thread = new Thread(this.kafkaConsumer); thread.start(); }
@Override public void run(AdapterPipeline adapterPipeline) { SendToPipeline stk = new SendToPipeline(format, adapterPipeline); this.kafkaConsumer = new SpKafkaConsumer(this.brokerUrl, this.topic, new EventProcessor(stk)); thread = new Thread(this.kafkaConsumer); thread.start(); }