@Bean public IntegrationFlow lambdasFlow() { return IntegrationFlows.from("lambdasInput") .filter(String.class, "World"::equals) .transform(String.class, "Hello "::concat) .get(); }
@Bean public IntegrationFlow gatewayRequestFlow() { return IntegrationFlows.from("gatewayRequest") .filter("foo"::equals, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .get(); }
@Override protected IntegrationFlowDefinition<?> buildFlow() { return from(this, "messageSource", e -> e.poller(p -> p.trigger(this::nextExecutionTime))) .split(this, null, e -> e.applySequence(false)) .transform(this) .aggregate(a -> a.processor(this, null)) .enrichHeaders(Collections.singletonMap("foo", "FOO")) .filter(this) .handle(this) .channel(MessageChannels.queue("myFlowAdapterOutput")) .log(); }
@Bean public IntegrationFlow fileReadingFlow() { return IntegrationFlows .from(Files.inboundAdapter(tmpDir.getRoot()) .patternFilter("*.sitest") .useWatchService(true) .watchEvents(FileReadingMessageSource.WatchEventType.CREATE, FileReadingMessageSource.WatchEventType.MODIFY), e -> e.poller(Pollers.fixedDelay(100) .errorChannel("filePollingErrorChannel"))) .filter(File.class, p -> !p.getName().startsWith("a"), e -> e.throwExceptionOnRejection(true)) .transform(Files.toStringTransformer()) .aggregate(a -> a.correlationExpression("1") .releaseStrategy(g -> g.size() == 25)) .channel(MessageChannels.queue("fileReadingResultChannel")) .get(); }
@Bean public IntegrationFlow flow2() { return IntegrationFlows.from(this.inputChannel) .filter(p -> p instanceof String, e -> e .id("filter") .discardFlow(df -> df .transform(String.class, "Discarded: "::concat) .channel(MessageChannels.queue("discardChannel")))) .channel("foo") .fixedSubscriberChannel() .<String, Integer>transform(Integer::parseInt) .transform(Foo::new) .transform(new PayloadSerializingTransformer(), c -> c.autoStartup(false).id("payloadSerializingTransformer")) .channel(MessageChannels.queue(new SimpleMessageStore(), "fooQueue")) .transform(Transformers.deserializer(Foo.class.getName())) .<Foo, Integer>transform(f -> f.value) .filter("true", e -> e.id("expressionFilter")) .channel(publishSubscribeChannel()) .transform((Integer p) -> p * 2, c -> c.advice(this.expressionAdvice())) .get(); }
@Bean public IntegrationFlow lambdasFlow() { return IntegrationFlows.from("lambdasInput") .filter("World"::equals) .transform("Hello "::concat) .get(); }
@Bean public IntegrationFlow gatewayRequestFlow() { return IntegrationFlows.from("gatewayRequest") .filter("foo"::equals, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .get(); }
+ entries.getValue().hashCode(); IntegrationFlowBuilder builder = IntegrationFlows.from(entries.getKey()) .filter(new StubRunnerIntegrationMessageSelector(entries.getValue()), new Consumer<FilterEndpointSpec>() { @Override
@Bean public IntegrationFlow xpathFlow(MessageChannel wrongMessagesChannel) { return IntegrationFlows.from("inputChannel") .filter(new StringValueTestXPathMessageSelector("namespace-uri(/*)", "my:namespace"), e -> e.discardChannel(wrongMessagesChannel)) .log(LoggingHandler.Level.ERROR, "test.category", m -> m.getHeaders().getId()) .route(xpathRouter(wrongMessagesChannel)) .get(); }
+ entries.getValue().hashCode(); IntegrationFlowBuilder builder = IntegrationFlows.from(entries.getKey()) .filter(new StubRunnerStreamMessageSelector(entries.getValue()), new Consumer<FilterEndpointSpec>() { @Override
@Bean public IntegrationFlow topic2ListenerFromKafkaFlow() { return IntegrationFlows .from(Kafka .messageDrivenChannelAdapter(kafkaListenerContainerFactory().createContainer(TEST_TOPIC2), KafkaMessageDrivenChannelAdapter.ListenerMode.record) .filterInRetry(true)) .filter(Message.class, m -> m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer.class) < 101, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .channel(c -> c.queue("listeningFromKafkaResults2")) .get(); }
@Bean public IntegrationFlow listeningFromKafkaFlow() { return IntegrationFlows .from(Kafka.messageDrivenChannelAdapter(consumerFactory(), KafkaMessageDrivenChannelAdapter.ListenerMode.record, TEST_TOPIC) .configureListenerContainer(c -> c.ackMode(AbstractMessageListenerContainer.AckMode.MANUAL)) .errorChannel("errorChannel")) .filter(Message.class, m -> m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer.class) < 101, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .channel(c -> c.queue("listeningFromKafkaResults")) .get(); }
@Override protected IntegrationFlowDefinition<?> buildFlow() { return from(this, "messageSource", e -> e.poller(p -> p.trigger(this::nextExecutionTime))) .split(this, null, e -> e.applySequence(false)) .transform(this) .aggregate(a -> a.processor(this, null)) .enrichHeaders(Collections.singletonMap("foo", "FOO")) .filter(this) .handle(this) .channel(c -> c.queue("myFlowAdapterOutput")); }
@Bean public IntegrationFlow fileReadingFlow() { return IntegrationFlows .from(s -> s.file(tmpDir.getRoot()) .patternFilter("*.sitest") .useWatchService(true) .watchEvents(FileReadingMessageSource.WatchEventType.CREATE, FileReadingMessageSource.WatchEventType.MODIFY), e -> e.poller(Pollers.fixedDelay(100) .errorChannel("filePollingErrorChannel"))) .filter(File.class, p -> !p.getName().startsWith("a"), e -> e.throwExceptionOnRejection(true)) .transform(Transformers.fileToString()) .aggregate(a -> a.correlationExpression("1") .releaseStrategy(g -> g.size() == 25)) .channel(MessageChannels.queue("fileReadingResultChannel")) .get(); }
@Bean public IntegrationFlow fileSplitterAggregator() { return IntegrationFlows .from(Files.inboundAdapter(new File("/tmp/in")) .autoCreateDirectory(true) .patternFilter("*.txt"), e -> e.poller(Pollers.fixedDelay(5000))) .split(Files.splitter() .markers() .applySequence(true)) .filter(p -> !(p instanceof FileSplitter.FileMarker), e -> e.discardChannel("aggregatorChannel")) .<String, Name>transform(Name::new) .<Name>filter(p -> !p.getValue().startsWith("X")) .channel("aggregatorChannel") .aggregate(a -> a.processor(new FileMarkerAggregator())) .<List<Name>, Names>transform(Names::new) .transform(Transformers.marshaller(jaxbMarshaller(), new ResultToStringTransformer())) .handle(Files.outboundAdapter("'/tmp/out'") .fileNameGenerator(m -> m .getHeaders() .get(FileHeaders.FILENAME, String.class) .replace(".txt", ".xml")) .autoCreateDirectory(true)) .get(); }
@Bean public IntegrationFlow topic1ListenerFromKafkaFlow() { return IntegrationFlows .from(Kafka.messageDrivenChannelAdapter(consumerFactory(), KafkaMessageDrivenChannelAdapter.ListenerMode.record, TEST_TOPIC1) .configureListenerContainer(c -> c.ackMode(ContainerProperties.AckMode.MANUAL) .idleEventInterval(100L) .id("topic1ListenerContainer")) .recoveryCallback(new ErrorMessageSendingRecoverer(errorChannel(), new RawRecordHeaderErrorMessageStrategy())) .retryTemplate(new RetryTemplate()) .filterInRetry(true) .onPartitionsAssignedSeekCallback((map, callback) -> ContextConfiguration.this.onPartitionsAssignedCalledLatch.countDown())) .filter(Message.class, m -> m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer.class) < 101, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .channel(c -> c.queue("listeningFromKafkaResults1")) .get(); }
@Bean public IntegrationFlow flow2() { return IntegrationFlows.from(this.inputChannel) .filter(p -> p instanceof String, e -> e .id("filter") .discardFlow(df -> df .transform(String.class, "Discarded: "::concat) .channel(c -> c.queue("discardChannel")))) .channel("foo") .fixedSubscriberChannel() .<String, Integer>transform(Integer::parseInt) .transform(new PayloadSerializingTransformer(), c -> c.autoStartup(false).id("payloadSerializingTransformer")) .channel(MessageChannels.queue(new SimpleMessageStore(), "fooQueue")) .transform(new PayloadDeserializingTransformer()) .filter("true", e -> e.id("expressionFilter")) .channel(publishSubscribeChannel()) .transform((Integer p) -> p * 2, c -> c.advice(this.expressionAdvice())) .get(); }