private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Trade, Integer, Trade>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .addTimestamps(Trade::getTime, 3000) .groupingKey(Trade::getTicker) .window(WindowDefinition.sliding(SLIDING_WINDOW_LENGTH_MILLIS, SLIDE_STEP_MILLIS)) .aggregate(counting(), (winStart, winEnd, key, result) -> String.format("%s %5s %4d", toLocalTime(winEnd), key, result)) .drainTo(Sinks.logger()); return p; }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.jmsTopic(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL), INPUT_TOPIC)) .filter(message -> message.getJMSPriority() > 3) .map(message -> (TextMessage) message) // print the message text to the log .peek(TextMessage::getText) .drainTo(Sinks.<TextMessage>jmsTopicBuilder(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL)) .destinationName(OUTPUT_TOPIC) .messageFn((session, message) -> { TextMessage textMessage = session.createTextMessage(message.getText()); textMessage.setBooleanProperty("isActive", true); textMessage.setJMSPriority(8); return textMessage; }) .build()); return p; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline aggregate() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<PageVisit, Integer, PageVisit>mapJournal(PAGE_VISIT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pv -> pv.timestamp(), 100) .window(sliding(10, 1)) .aggregate(counting()) .drainTo(Sinks.logger()); return p; }
public static Pipeline build() { Pipeline p = Pipeline.create(); // Palladium and Platinum only p.drawFrom(Sources.<String, Object>mapJournal( Constants.IMAP_NAME_PRECIOUS, JournalInitialPosition.START_FROM_OLDEST) ).map(e -> e.getKey() + "==" + e.getValue()) .filter(str -> str.toLowerCase().startsWith("p")) .drainTo(Sinks.logger()) ; return p; }
public static Pipeline build(String bootstrapServers) { Properties properties = new Properties(); properties.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString()); properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); Pipeline pipeline = Pipeline.create(); pipeline .drawFrom(KafkaSources.kafka(properties, Constants.TOPIC_NAME_PRECIOUS)) .drainTo(Sinks.map(Constants.IMAP_NAME_PRECIOUS)); return pipeline; }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); JetConfig jetConfig = getJetConfig(); JetInstance jet = Jet.newJetInstance(jetConfig); Jet.newJetInstance(jetConfig); try { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, Integer>mapJournal(MAP_NAME, START_FROM_OLDEST)) .map(Entry::getValue) .drainTo(Sinks.list(SINK_NAME)); jet.newJob(p); IMapJet<Integer, Integer> map = jet.getMap(MAP_NAME); for (int i = 0; i < 1000; i++) { map.set(i, i); } TimeUnit.SECONDS.sleep(3); System.out.println("Read " + jet.getList(SINK_NAME).size() + " entries from map journal."); } finally { Jet.shutdownAll(); } }
.groupingKey(CarCount::getLocation) .window(sliding(MINUTES.toMillis(120), MINUTES.toMillis(15))) .aggregate(linearTrend(CarCount::getTime, CarCount::getCount)) .map((TimestampedEntry<String, Double> e) -> entry(new TrendKey(e.getKey(), e.getTimestamp()), e.getValue())) .drainTo(Sinks.map("trends")); .mapUsingContext(ContextFactories.<TrendKey, Double>iMapContext("trends"), (trendMap, cc) -> { int[] counts = new int[NUM_PREDICTIONS]; .drainTo(Sinks.files(targetDirectory)); return pipeline;
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<PriceUpdateEvent, String, Tuple2<Integer, Long>>mapJournal( "prices", mapPutEvents(), e -> new PriceUpdateEvent(e.getKey(), e.getNewValue().f0(), e.getNewValue().f1()), START_FROM_CURRENT )) .addTimestamps(PriceUpdateEvent::timestamp, LAG_SECONDS * 1000) .setLocalParallelism(1) .groupingKey(PriceUpdateEvent::ticker) .window(WindowDefinition.sliding(WINDOW_SIZE_SECONDS * 1000, 1000)) .aggregate(AggregateOperations.counting()) .drainTo(Sinks.logger()); return p; }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Entry<String, Integer>, Integer, Entry<String, Integer>>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .groupingKey(Entry::getKey) .rollingAggregate(summingLong(Entry::getValue)) .drainTo(Sinks.map(VOLUME_MAP_NAME)); return p; }
/** * This code is the main point of the sample: use the source builder to * create an HTTP source connector, then create a Jet pipeline that * performs windowed aggregation over its data. */ private static Pipeline buildPipeline() { StreamSource<TimestampedItem<Long>> usedMemorySource = SourceBuilder .timestampedStream("used-memory", x -> new PollHttp()) .fillBufferFn(PollHttp::fillBuffer) .destroyFn(PollHttp::close) .build(); Pipeline p = Pipeline.create(); p.drawFrom(usedMemorySource) .window(sliding(100, 20)) .aggregate(linearTrend(TimestampedItem::timestamp, TimestampedItem::item)) .map(tsItem -> entry(tsItem.timestamp(), tsItem.item())) .drainTo(Sinks.map(MAP_NAME)); return p; }
.map(entryValue()); .mapUsingIMap(productMap, (map, t) -> { Product product = map.get(t.productId()); return tuple2(t, product.name()); }) .mapUsingIMap(brokerMap, (map, t) -> { Broker broker = map.get(t.f0().brokerId()); return tuple3(t.f0(), t.f1(), broker.name()); }) .drainTo(Sinks.logger());
.map(entryValue()); .mapUsingReplicatedMap(productMap, (map, t) -> { Product product = map.get(t.productId()); return tuple2(t, product.name()); }) .mapUsingReplicatedMap(brokerMap, (map, t) -> { Broker broker = map.get(t.f0().brokerId()); return tuple3(t.f0(), t.f1(), broker.name()); }) .drainTo(Sinks.logger()); return p;
/** * Builds and returns the Pipeline which represents the actual computation. */ private static Pipeline buildPipeline(String modelPath) { Pipeline pipeline = Pipeline.create(); pipeline.drawFrom(WebcamSource.webcam(500)) .mapUsingContext(classifierContext(modelPath), (ctx, img) -> { Entry<String, Double> classification = classifyWithModel(ctx, img); return tuple3(img, classification.getKey(), classification.getValue()); } ) .window(tumbling(1000)) .aggregate(maxBy(comparingDouble(Tuple3::f2))) .drainTo(buildGUISink()); return pipeline; }
JournalInitialPosition.START_FROM_OLDEST)) .filter(eventJournalMapEvent -> eventJournalMapEvent.getType().equals(EntryEventType.UPDATED)) .customTransform("beforeAndAfterBaskets", Processors.mapP(SequenceAnalysis::beforeAndAfterBaskets)) .filter(tuple2 -> ((Tuple2<Map<String, Integer>, Map<String, Integer>>) tuple2).f0().size() < ((Tuple2<Map<String, Integer>, Map<String, Integer>>) tuple2).f1().size() .customTransform("lastItem", Processors.mapP(SequenceAnalysis::lastItem)) .drainTo( Sinks.mapWithEntryProcessor( Constants.IMAP_NAME_SEQUENCE,
private Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(KafkaSources.kafka(brokerProperties(), "t1", "t2")) .drainTo(Sinks.map(SINK_NAME)); return p; }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); JetConfig jetConfig = getJetConfig(); JetInstance jet = Jet.newJetInstance(jetConfig); Jet.newJetInstance(jetConfig); try { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, Integer>cacheJournal(CACHE_NAME, START_FROM_OLDEST)) .map(Entry::getValue) .drainTo(Sinks.list(SINK_NAME)); jet.newJob(p); ICache<Integer, Integer> cache = jet.getCacheManager().getCache(CACHE_NAME); for (int i = 0; i < 1000; i++) { cache.put(i, i); } TimeUnit.SECONDS.sleep(3); System.out.println("Read " + jet.getList(SINK_NAME).size() + " entries from cache journal."); } finally { Jet.shutdownAll(); } }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Trade, Integer, Trade>mapJournal(TRADES_MAP_NAME, DistributedPredicate.alwaysTrue(), EventJournalMapEvent::getNewValue, START_FROM_CURRENT)) .groupingKey(Trade::getTicker) .rollingAggregate(summingLong(Trade::getPrice)) .drainTo(Sinks.map(VOLUME_MAP_NAME)); return p; }
@SuppressWarnings("Convert2MethodRef") // https://bugs.openjdk.java.net/browse/JDK-8154236 private static Pipeline coGroup() { Pipeline p = Pipeline.create(); StreamStageWithKey<PageVisit, Integer> pageVisits = p .drawFrom(Sources.<PageVisit, Integer, PageVisit>mapJournal(PAGE_VISIT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pv -> pv.timestamp(), 100) .groupingKey(pv -> pv.userId()); StreamStageWithKey<Payment, Integer> payments = p .drawFrom(Sources.<Payment, Integer, Payment>mapJournal(PAYMENT, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(pm -> pm.timestamp(), 100) .groupingKey(pm -> pm.userId()); StreamStageWithKey<AddToCart, Integer> addToCarts = p .drawFrom(Sources.<AddToCart, Integer, AddToCart>mapJournal(ADD_TO_CART, mapPutEvents(), mapEventNewValue(), START_FROM_OLDEST)) .addTimestamps(atc -> atc.timestamp(), 100) .groupingKey(atc -> atc.userId()); StageWithKeyAndWindow<PageVisit, Integer> windowStage = pageVisits.window(sliding(10, 1)); StreamStage<TimestampedEntry<Integer, Tuple3<List<PageVisit>, List<AddToCart>, List<Payment>>>> coGrouped = windowStage.aggregate3(toList(), addToCarts, toList(), payments, toList()); coGrouped.drainTo(Sinks.logger()); return p; }
private Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(KafkaSources.kafka(brokerProperties(), TOPIC)) .drainTo(Sinks.logger()); return p; }