/** * @param pollers the pollers * @return the endpoint spec. * @see AbstractPollingEndpoint * @see PollerFactory */ public S poller(Function<PollerFactory, PollerSpec> pollers) { return poller(pollers.apply(new PollerFactory())); }
@Bean IntegrationFlow consumer() { log.info("starting consumer.."); KafkaHighLevelConsumerMessageSourceSpec messageSourceSpec = Kafka.inboundChannelAdapter( new ZookeeperConnect(this.kafkaConfig.getZookeeperAddress())) .consumerProperties(props -> props.put("auto.offset.reset", "smallest") .put("auto.commit.interval.ms", "100")) .addConsumer("myGroup", metadata -> metadata.consumerTimeout(100) .topicStreamMap(m -> m.put(this.kafkaConfig.getTopic(), 1)) .maxMessages(10) .valueDecoder(String::new)); Consumer<SourcePollingChannelAdapterSpec> endpointConfigurer = e -> e.poller(p -> p.fixedDelay(100)); return IntegrationFlows .from(messageSourceSpec, endpointConfigurer) .<Map<String, List<String>>>handle((payload, headers) -> { payload.entrySet().forEach(e -> log.info(e.getKey() + '=' + e.getValue())); return null; }) .get(); } }
@Bean public Publisher<Message<String>> publisher() { return IntegrationFlows .from(() -> new GenericMessage<>(Math.random()), e -> e.poller(p -> p.trigger(ctx -> this.invoked.getAndSet(true) ? null : new Date()) .maxMessagesPerPoll(this.elements))) .channel(reactiveChannel()) .toReactivePublisher(); }
@Bean public Publisher<Message<String>> vanillaSource() { return IntegrationFlows .from(() -> new GenericMessage<>("foo"), e -> e.poller(p -> p.fixedDelay(1))) .toReactivePublisher(); }
/** * @param pollers the pollers * @return the endpoint spec. * @see AbstractPollingEndpoint * @see PollerFactory */ public S poller(Function<PollerFactory, PollerSpec> pollers) { return poller(pollers.apply(new PollerFactory())); }
@Bean public Publisher<Message<String>> reactiveFlow() { return IntegrationFlows .from(() -> new GenericMessage<>("a,b,c,d,e,f"), e -> e.poller(p -> p.trigger(ctx -> this.invoked.getAndSet(true) ? null : new Date())) .autoStartup(false) .id("reactiveSteamsMessageSource")) .split(String.class, p -> p.split(",")) .toReactivePublisher(); }
@Bean @DependsOn("barrierFlow") public IntegrationFlow releaseBarrierFlow(MessageTriggerAction barrierTriggerAction) { return IntegrationFlows.from((Channels c) -> c.queue("releaseChannel")) .trigger(barrierTriggerAction, e -> e.poller(p -> p.fixedDelay(100))) .get(); }
@Override protected IntegrationFlowDefinition<?> buildFlow() { return from(() -> new GenericMessage<>("flowAdapterMessage"), e -> e.poller(p -> p .trigger(ctx -> this.nextExecutionTime.getAndSet(null)))) .channel(c -> c.queue("flowAdapterOutput")); }
@Bean public IntegrationFlow scriptPollingAdapter() { return IntegrationFlows .from((MessageSources ms) -> ms.script("org/springframework/integration/dsl/test/scripts/TestMessageSourceScript.ruby"), e -> e.poller(p -> p.fixedDelay(100))) .channel(c -> c.queue("messageSourceChannel")) .get(); }
@Bean public IntegrationFlow pollingAdapterFlow() { return IntegrationFlows .from(Jpa.inboundAdapter(this.entityManagerFactory) .entityClass(StudentDomain.class) .maxResults(1) .expectSingleResult(true), e -> e.poller(p -> p.trigger(new OnlyOnceTrigger()))) .channel(c -> c.queue("pollingResults")) .get(); }
public void pollDirectories(File... directories) { for (File directory : directories) { StandardIntegrationFlow integrationFlow = IntegrationFlows .from(s -> s.file(directory) .scanner(new DefaultDirectoryScanner()), e -> e.poller(p -> p.fixedDelay(1000)) .id(directory.getName() + ".adapter")) .transform(Transformers.fileToString(), e -> e.id(directory.getName() + ".transformer")) .channel(this.dynamicAdaptersResult) .get(); this.beanFactory.initializeBean(integrationFlow, directory.getName()); this.beanFactory.getBean(directory.getName() + ".transformer", Lifecycle.class).start(); this.beanFactory.getBean(directory.getName() + ".adapter", Lifecycle.class).start(); } }
@Override protected IntegrationFlowDefinition<?> buildFlow() { return from(this, "messageSource", e -> e.poller(p -> p.trigger(this::nextExecutionTime))) .split(this, null, e -> e.applySequence(false)) .transform(this) .aggregate(a -> a.processor(this, null)) .enrichHeaders(Collections.singletonMap("foo", "FOO")) .filter(this) .handle(this) .channel(c -> c.queue("myFlowAdapterOutput")); }
@Bean public IntegrationFlow feedFlow() { return IntegrationFlows .from(s -> s.feed(this.feedUrl, "feedTest") .feedFetcher(new FileUrlFeedFetcher()) .metadataStore(metadataStore()), e -> e.poller(p -> p.fixedDelay(100))) .channel(c -> c.queue("entries")) .get(); }
@Bean public IntegrationFlow imapMailFlow() { return IntegrationFlows .from(Mail.imapInboundAdapter("imap://user:pw@localhost:" + imapServer.getPort() + "/INBOX") .searchTermStrategy(this::fromAndNotSeenTerm) .userFlag("testSIUserFlag") .javaMailProperties(p -> p.put("mail.debug", "false")), e -> e.autoStartup(true) .poller(p -> p.fixedDelay(1000))) .channel(MessageChannels.queue("imapChannel")) .get(); }
@Bean public IntegrationFlow pop3MailFlow() { return IntegrationFlows .from(Mail.pop3InboundAdapter("localhost", pop3Server.getPort(), "user", "pw") .javaMailProperties(p -> p.put("mail.debug", "false")) .headerMapper(mailHeaderMapper()), e -> e.autoStartup(true).poller(p -> p.fixedDelay(1000))) .enrichHeaders(s -> s.headerExpressions(c -> c.put(MailHeaders.SUBJECT, "payload.subject") .put(MailHeaders.FROM, "payload.from[0].toString()"))) .channel(MessageChannels.queue("pop3Channel")) .get(); }
@Bean public IntegrationFlow fileSplitterFlow() { return IntegrationFlows .from(s -> s.file(tmpDir.getRoot()) .patternFilter("foo.tmp"), e -> e.poller(p -> p.fixedDelay(100))) .split(Files.splitter() .markers() .charset(StandardCharsets.US_ASCII) .applySequence(true), e -> e.id("fileSplitter")) .channel(c -> c.queue("fileSplittingResultChannel")) .get(); }
.transform("Hello, "::concat, e -> e .poller(p -> p .fixedDelay(10) .maxMessagesPerPoll(1) .receiveTimeout(10)))