@Bean public IntegrationFlow gatewayRequestFlow() { return IntegrationFlows.from("gatewayRequest") .filter("foo"::equals, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .get(); }
@Bean public IntegrationFlow fileReadingFlow() { return IntegrationFlows .from(Files.inboundAdapter(tmpDir.getRoot()) .patternFilter("*.sitest") .useWatchService(true) .watchEvents(FileReadingMessageSource.WatchEventType.CREATE, FileReadingMessageSource.WatchEventType.MODIFY), e -> e.poller(Pollers.fixedDelay(100) .errorChannel("filePollingErrorChannel"))) .filter(File.class, p -> !p.getName().startsWith("a"), e -> e.throwExceptionOnRejection(true)) .transform(Files.toStringTransformer()) .aggregate(a -> a.correlationExpression("1") .releaseStrategy(g -> g.size() == 25)) .channel(MessageChannels.queue("fileReadingResultChannel")) .get(); }
@Bean public IntegrationFlow gatewayRequestFlow() { return IntegrationFlows.from("gatewayRequest") .filter("foo"::equals, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .get(); }
@Bean public IntegrationFlow topic2ListenerFromKafkaFlow() { return IntegrationFlows .from(Kafka .messageDrivenChannelAdapter(kafkaListenerContainerFactory().createContainer(TEST_TOPIC2), KafkaMessageDrivenChannelAdapter.ListenerMode.record) .filterInRetry(true)) .filter(Message.class, m -> m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer.class) < 101, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .channel(c -> c.queue("listeningFromKafkaResults2")) .get(); }
@Bean public IntegrationFlow listeningFromKafkaFlow() { return IntegrationFlows .from(Kafka.messageDrivenChannelAdapter(consumerFactory(), KafkaMessageDrivenChannelAdapter.ListenerMode.record, TEST_TOPIC) .configureListenerContainer(c -> c.ackMode(AbstractMessageListenerContainer.AckMode.MANUAL)) .errorChannel("errorChannel")) .filter(Message.class, m -> m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer.class) < 101, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .channel(c -> c.queue("listeningFromKafkaResults")) .get(); }
@Bean public IntegrationFlow fileReadingFlow() { return IntegrationFlows .from(s -> s.file(tmpDir.getRoot()) .patternFilter("*.sitest") .useWatchService(true) .watchEvents(FileReadingMessageSource.WatchEventType.CREATE, FileReadingMessageSource.WatchEventType.MODIFY), e -> e.poller(Pollers.fixedDelay(100) .errorChannel("filePollingErrorChannel"))) .filter(File.class, p -> !p.getName().startsWith("a"), e -> e.throwExceptionOnRejection(true)) .transform(Transformers.fileToString()) .aggregate(a -> a.correlationExpression("1") .releaseStrategy(g -> g.size() == 25)) .channel(MessageChannels.queue("fileReadingResultChannel")) .get(); }
@Bean public IntegrationFlow topic1ListenerFromKafkaFlow() { return IntegrationFlows .from(Kafka.messageDrivenChannelAdapter(consumerFactory(), KafkaMessageDrivenChannelAdapter.ListenerMode.record, TEST_TOPIC1) .configureListenerContainer(c -> c.ackMode(ContainerProperties.AckMode.MANUAL) .idleEventInterval(100L) .id("topic1ListenerContainer")) .recoveryCallback(new ErrorMessageSendingRecoverer(errorChannel(), new RawRecordHeaderErrorMessageStrategy())) .retryTemplate(new RetryTemplate()) .filterInRetry(true) .onPartitionsAssignedSeekCallback((map, callback) -> ContextConfiguration.this.onPartitionsAssignedCalledLatch.countDown())) .filter(Message.class, m -> m.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY, Integer.class) < 101, f -> f.throwExceptionOnRejection(true)) .<String, String>transform(String::toUpperCase) .channel(c -> c.queue("listeningFromKafkaResults1")) .get(); }