mockKafkaStreams.start(); expectLastCall(); mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class)); expectLastCall(); mockKafkaStreams.cleanUp();
this.kafkaStreams.setUncaughtExceptionHandler(this.uncaughtExceptionHandler); if (this.kafkaStreamsCustomizer != null) { this.kafkaStreamsCustomizer.customize(this.kafkaStreams);
/** * This method initializes the stream client, but doesn't actually start it until * an alarm is forwarded by the producer. * * @throws IOException when an error occurs in loading/parsing the Kafka client/stream configuration */ public void init() throws IOException { if (!isEnabled()) { LOG.info("Alarm synchronization disabled. Skipping initialization."); return; } final Properties streamProperties = loadStreamsProperties(); final StreamsBuilder builder = new StreamsBuilder(); final GlobalKTable<String, byte[]> alarmBytesKtable = builder.globalTable(alarmTopic, Consumed.with(Serdes.String(), Serdes.ByteArray()), Materialized.as(ALARM_STORE_NAME)); final Topology topology = builder.build(); // Use the class-loader for the KStream class, since the kafka-client bundle // does not import the required classes from the kafka-streams bundle streams = Utils.runWithGivenClassLoader(() -> new KafkaStreams(topology, streamProperties), KStream.class.getClassLoader()); streams.setUncaughtExceptionHandler((t, e) -> LOG.error( String.format("Stream error on thread: %s", t.getName()), e)); // Defer startup to another thread scheduler = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder() .setNameFormat("kafka-producer-alarm-datasync-%d") .build() ); closed.set(false); scheduler.execute(this); }
/** * This method initializes the stream client, but doesn't actually start it until * an alarm is forwarded by the producer. * * @throws IOException when an error occurs in loading/parsing the Kafka client/stream configuration */ public void init() throws IOException { if (!isEnabled()) { LOG.info("Alarm synchronization disabled. Skipping initialization."); return; } final Properties streamProperties = loadStreamsProperties(); final StreamsBuilder builder = new StreamsBuilder(); final GlobalKTable<String, byte[]> alarmBytesKtable = builder.globalTable(alarmTopic, Consumed.with(Serdes.String(), Serdes.ByteArray()), Materialized.as(ALARM_STORE_NAME)); final Topology topology = builder.build(); // Use the class-loader for the KStream class, since the kafka-client bundle // does not import the required classes from the kafka-streams bundle streams = Utils.runWithGivenClassLoader(() -> new KafkaStreams(topology, streamProperties), KStream.class.getClassLoader()); streams.setUncaughtExceptionHandler((t, e) -> LOG.error( String.format("Stream error on thread: %s", t.getName()), e)); // Defer startup to another thread scheduler = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder() .setNameFormat("kafka-producer-alarm-datasync-%d") .build() ); closed.set(false); scheduler.execute(this); }
streams.setUncaughtExceptionHandler((t, e) -> {
public ManagedKStreams(Properties streamProperties, TopicsConfig topicsConfig, KStreamsProcessorListener testListener) { this.streamProperties = streamProperties; this.topicsConfig = topicsConfig; stateStoreName = topicsConfig.getStateStoreName(); KStreamBuilder kStreamBuilder= new KStreamBuilder(); kStreamBuilder.globalTable(topicsConfig.getProducerTopic(), stateStoreName); streams = new KafkaStreams(kStreamBuilder, streamProperties); // [ #132 ] - Improve build times by notifying test listener that we are running streams.setStateListener((newState, oldState) -> { if (!isRunning && newState == KafkaStreams.State.RUNNING) { isRunning = true; if( testListener != null) { testListener.stateStoreInitialized(); } } }); streams.setUncaughtExceptionHandler((t, e) -> log.error("KafkaStreams job failed", e)); }