public static Pipeline build() { Pipeline p = Pipeline.create(); // Palladium and Platinum only p.drawFrom(Sources.<String, Object>mapJournal( Constants.IMAP_NAME_PRECIOUS, JournalInitialPosition.START_FROM_OLDEST) ).map(e -> e.getKey() + "==" + e.getValue()) .filter(str -> str.toLowerCase().startsWith("p")) .drainTo(Sinks.logger()) ; return p; }
.window(sliding(MINUTES.toMillis(120), MINUTES.toMillis(15))) .aggregate(linearTrend(CarCount::getTime, CarCount::getCount)) .map((TimestampedEntry<String, Double> e) -> entry(new TrendKey(e.getKey(), e.getTimestamp()), e.getValue())) .drainTo(Sinks.map("trends"));
.map(entryValue());
.map(entryValue());
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); JetConfig jetConfig = getJetConfig(); JetInstance jet = Jet.newJetInstance(jetConfig); Jet.newJetInstance(jetConfig); try { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, Integer>mapJournal(MAP_NAME, START_FROM_OLDEST)) .map(Entry::getValue) .drainTo(Sinks.list(SINK_NAME)); jet.newJob(p); IMapJet<Integer, Integer> map = jet.getMap(MAP_NAME); for (int i = 0; i < 1000; i++) { map.set(i, i); } TimeUnit.SECONDS.sleep(3); System.out.println("Read " + jet.getList(SINK_NAME).size() + " entries from map journal."); } finally { Jet.shutdownAll(); } }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); JetConfig jetConfig = getJetConfig(); JetInstance jet = Jet.newJetInstance(jetConfig); Jet.newJetInstance(jetConfig); try { Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, Integer>cacheJournal(CACHE_NAME, START_FROM_OLDEST)) .map(Entry::getValue) .drainTo(Sinks.list(SINK_NAME)); jet.newJob(p); ICache<Integer, Integer> cache = jet.getCacheManager().getCache(CACHE_NAME); for (int i = 0; i < 1000; i++) { cache.put(i, i); } TimeUnit.SECONDS.sleep(3); System.out.println("Read " + jet.getList(SINK_NAME).size() + " entries from cache journal."); } finally { Jet.shutdownAll(); } }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.jmsTopic(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL), INPUT_TOPIC)) .filter(message -> message.getJMSPriority() > 3) .map(message -> (TextMessage) message) // print the message text to the log .peek(TextMessage::getText) .drainTo(Sinks.<TextMessage>jmsTopicBuilder(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL)) .destinationName(OUTPUT_TOPIC) .messageFn((session, message) -> { TextMessage textMessage = session.createTextMessage(message.getText()); textMessage.setBooleanProperty("isActive", true); textMessage.setJMSPriority(8); return textMessage; }) .build()); return p; }
private static Pipeline buildPipeline() { Pipeline p = Pipeline.create(); p.drawFrom(Sources.jmsQueue(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL), INPUT_QUEUE)) .filter(message -> message.getJMSPriority() > 3) .map(message -> (TextMessage) message) // print the message text to the log .peek(TextMessage::getText) .drainTo(Sinks.<TextMessage>jmsQueueBuilder(() -> new ActiveMQConnectionFactory(ActiveMQBroker.BROKER_URL)) .destinationName(OUTPUT_QUEUE) .messageFn((session, message) -> { // create new text message with the same text and few additional properties TextMessage textMessage = session.createTextMessage(message.getText()); textMessage.setBooleanProperty("isActive", true); textMessage.setJMSPriority(8); return textMessage; } ) .build()); return p; }
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); Config hzConfig = getConfig(); HazelcastInstance remoteHz = startRemoteHzCluster(hzConfig); JetInstance localJet = startLocalJetCluster(); try { ClientConfig clientConfig = new ClientConfig(); clientConfig.getNetworkConfig().addAddress(getAddress(remoteHz)); clientConfig.setGroupConfig(hzConfig.getGroupConfig()); Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, Integer>remoteMapJournal( MAP_NAME, clientConfig, START_FROM_OLDEST) ).map(Entry::getValue) .drainTo(Sinks.list(SINK_NAME)); localJet.newJob(p); IMap<Integer, Integer> map = remoteHz.getMap(MAP_NAME); for (int i = 0; i < 1000; i++) { map.set(i, i); } TimeUnit.SECONDS.sleep(3); System.out.println("Read " + localJet.getList(SINK_NAME).size() + " entries from remote map journal."); } finally { Hazelcast.shutdownAll(); Jet.shutdownAll(); } }
.map(entryValue());
public static void main(String[] args) throws Exception { System.setProperty("hazelcast.logging.type", "log4j"); Config hzConfig = getConfig(); HazelcastInstance remoteHz = startRemoteHzCluster(hzConfig); JetInstance localJet = startLocalJetCluster(); try { ClientConfig clientConfig = new ClientConfig(); clientConfig.getNetworkConfig().addAddress(getAddress(remoteHz)); clientConfig.setGroupConfig(hzConfig.getGroupConfig()); Pipeline p = Pipeline.create(); p.drawFrom(Sources.<Integer, Integer>remoteCacheJournal( CACHE_NAME, clientConfig, START_FROM_OLDEST) ).map(Entry::getValue) .drainTo(Sinks.list(SINK_NAME)); localJet.newJob(p); ICache<Integer, Integer> cache = remoteHz.getCacheManager().getCache(CACHE_NAME); for (int i = 0; i < 1000; i++) { cache.put(i, i); } TimeUnit.SECONDS.sleep(3); System.out.println("Read " + localJet.getList(SINK_NAME).size() + " entries from remote cache journal."); } finally { Hazelcast.shutdownAll(); Jet.shutdownAll(); } }
/** * This code is the main point of the sample: use the source builder to * create an HTTP source connector, then create a Jet pipeline that * performs windowed aggregation over its data. */ private static Pipeline buildPipeline() { StreamSource<TimestampedItem<Long>> usedMemorySource = SourceBuilder .timestampedStream("used-memory", x -> new PollHttp()) .fillBufferFn(PollHttp::fillBuffer) .destroyFn(PollHttp::close) .build(); Pipeline p = Pipeline.create(); p.drawFrom(usedMemorySource) .window(sliding(100, 20)) .aggregate(linearTrend(TimestampedItem::timestamp, TimestampedItem::item)) .map(tsItem -> entry(tsItem.timestamp(), tsItem.item())) .drainTo(Sinks.map(MAP_NAME)); return p; }