public static void main(String[] args) throws Exception { StreamsConfig streamingConfig = new StreamsConfig(getProperties()); JsonDeserializer<Purchase> purchaseJsonDeserializer = new JsonDeserializer<>(Purchase.class); JsonSerializer<Purchase> purchaseJsonSerializer = new JsonSerializer<>(); JsonSerializer<RewardAccumulator> rewardAccumulatorJsonSerializer = new JsonSerializer<>(); JsonSerializer<PurchasePattern> purchasePatternJsonSerializer = new JsonSerializer<>(); StringDeserializer stringDeserializer = new StringDeserializer(); StringSerializer stringSerializer = new StringSerializer(); TopologyBuilder topologyBuilder = new TopologyBuilder(); topologyBuilder.addSource("SOURCE", stringDeserializer, purchaseJsonDeserializer, "src-topic") .addProcessor("PROCESS", CreditCardAnonymizer::new, "SOURCE") .addProcessor("PROCESS2", PurchasePatterns::new, "PROCESS") .addProcessor("PROCESS3", CustomerRewards::new, "PROCESS") .addSink("SINK", "patterns", stringSerializer, purchasePatternJsonSerializer, "PROCESS2") .addSink("SINK2", "rewards",stringSerializer, rewardAccumulatorJsonSerializer, "PROCESS3") .addSink("SINK3", "purchases", stringSerializer, purchaseJsonSerializer, "PROCESS"); System.out.println("Starting PurchaseProcessor Example"); KafkaStreams streaming = new KafkaStreams(topologyBuilder, streamingConfig); streaming.start(); System.out.println("Now started PurchaseProcessor Example"); }
public static void main(String[] args) throws IOException { Properties props = new Properties(); props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-wordcount-processor"); props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "kafka0:19092"); props.put(StreamsConfig.ZOOKEEPER_CONNECT_CONFIG, "zookeeper0:12181/kafka"); props.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass()); props.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); TopologyBuilder builder = new TopologyBuilder(); builder.addSource("SOURCE", new StringDeserializer(), new StringDeserializer(), "words") .addProcessor("WordCountProcessor", WordCountProcessor::new, "SOURCE") .addStateStore(Stores.create("Counts").withStringKeys().withIntegerValues().inMemory().build(), "WordCountProcessor") // .connectProcessorAndStateStores("WordCountProcessor", "Counts") .addSink("SINK", "count", new StringSerializer(), new IntegerSerializer(), "WordCountProcessor"); KafkaStreams stream = new KafkaStreams(builder, props); stream.start(); System.in.read(); stream.close(); stream.cleanUp(); }
Serdes.Double().deserializer(), "messages-instrumented") .addProcessor("FAST-processor", () -> new MovingAverageProcessor(0.1), "messages-source")
new MessageSerde().deserializer(), "messages") .addProcessor("selector-processor", () -> new SelectorProcessor(), "messages-source") .addProcessor("FAST-processor", () -> new MovingAverageProcessor("FAST", 0.1), "selector-processor") .addProcessor("MEDIUM-processor", () -> new MovingAverageProcessor("MEDIUM", 0.05), "selector-processor") .addProcessor("SLOW-processor", () -> new MovingAverageProcessor("SLOW", 0.01), "selector-processor")
builder.addProcessor(entry.getID(), entry.getSupplier(), SOURCE); } else { final List<TupleExpr> parents = entry.getUpstreamNodes(); parentIDs[id] = idMap.get(parents.get(id)); builder.addProcessor(entry.getID(), entry.getSupplier(), parentIDs); builder.addProcessor("OUTPUT_FORMATTER", sinkEntry.getFormatterSupplier(), entry.getID());
public static void main(String[] args) { StreamsConfig streamingConfig = new StreamsConfig(getProperties()); TopologyBuilder builder = new TopologyBuilder(); JsonSerializer<StockTransactionSummary> stockTxnSummarySerializer = new JsonSerializer<>(); JsonDeserializer<StockTransactionSummary> stockTxnSummaryDeserializer = new JsonDeserializer<>(StockTransactionSummary.class); JsonDeserializer<StockTransaction> stockTxnDeserializer = new JsonDeserializer<>(StockTransaction.class); JsonSerializer<StockTransaction> stockTxnJsonSerializer = new JsonSerializer<>(); StringSerializer stringSerializer = new StringSerializer(); StringDeserializer stringDeserializer = new StringDeserializer(); Serde<StockTransactionSummary> stockTransactionSummarySerde = Serdes.serdeFrom(stockTxnSummarySerializer,stockTxnSummaryDeserializer); builder.addSource("stocks-source", stringDeserializer, stockTxnDeserializer, "stocks") .addProcessor("summary", StockSummaryProcessor::new, "stocks-source") .addStateStore(Stores.create("stock-transactions").withStringKeys() .withValues(stockTransactionSummarySerde).inMemory().maxEntries(100).build(),"summary") .addSink("sink", "stocks-out", stringSerializer,stockTxnJsonSerializer,"stocks-source") .addSink("sink-2", "transaction-summary", stringSerializer, stockTxnSummarySerializer, "summary"); System.out.println("Starting StockSummaryStatefulProcessor Example"); KafkaStreams streaming = new KafkaStreams(builder, streamingConfig); streaming.start(); System.out.println("StockSummaryStatefulProcessor Example now started"); }