/** * Obtain a {@link ReadableByteChannel} from the given supplier, and read it into a * {@code Flux} of {@code DataBuffer}s. Closes the channel when the flux is terminated. * @param channelSupplier the supplier for the channel to read from * @param dataBufferFactory the factory to create data buffers with * @param bufferSize the maximum size of the data buffers * @return a flux of data buffers read from the given channel */ public static Flux<DataBuffer> readByteChannel( Callable<ReadableByteChannel> channelSupplier, DataBufferFactory dataBufferFactory, int bufferSize) { Assert.notNull(channelSupplier, "'channelSupplier' must not be null"); Assert.notNull(dataBufferFactory, "'dataBufferFactory' must not be null"); Assert.isTrue(bufferSize > 0, "'bufferSize' must be > 0"); return Flux.using(channelSupplier, channel -> { ReadableByteChannelGenerator generator = new ReadableByteChannelGenerator(channel, dataBufferFactory, bufferSize); return Flux.generate(generator); }, DataBufferUtils::closeChannel) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); }
@Override public Flux<String> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType, @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { List<byte[]> delimiterBytes = getDelimiterBytes(mimeType); Flux<DataBuffer> inputFlux = Flux.from(inputStream) .flatMapIterable(dataBuffer -> splitOnDelimiter(dataBuffer, delimiterBytes)) .bufferUntil(StringDecoder::isEndFrame) .map(StringDecoder::joinUntilEndFrame) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); return super.decode(inputFlux, elementType, mimeType, hints); }
}).doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release);
/** * Obtain a {@link ReadableByteChannel} from the given supplier, and read it into a * {@code Flux} of {@code DataBuffer}s. Closes the channel when the flux is terminated. * @param channelSupplier the supplier for the channel to read from * @param dataBufferFactory the factory to create data buffers with * @param bufferSize the maximum size of the data buffers * @return a flux of data buffers read from the given channel */ public static Flux<DataBuffer> readByteChannel( Callable<ReadableByteChannel> channelSupplier, DataBufferFactory dataBufferFactory, int bufferSize) { Assert.notNull(channelSupplier, "'channelSupplier' must not be null"); Assert.notNull(dataBufferFactory, "'dataBufferFactory' must not be null"); Assert.isTrue(bufferSize > 0, "'bufferSize' must be > 0"); return Flux.using(channelSupplier, channel -> { ReadableByteChannelGenerator generator = new ReadableByteChannelGenerator(channel, dataBufferFactory, bufferSize); return Flux.generate(generator); }, DataBufferUtils::closeChannel) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); }
/** * Obtain a {@code AsynchronousFileChannel} from the given supplier, and read it into a * {@code Flux} of {@code DataBuffer}s, starting at the given position. Closes the * channel when the flux is terminated. * @param channelSupplier the supplier for the channel to read from * @param position the position to start reading from * @param dataBufferFactory the factory to create data buffers with * @param bufferSize the maximum size of the data buffers * @return a flux of data buffers read from the given channel */ public static Flux<DataBuffer> readAsynchronousFileChannel(Callable<AsynchronousFileChannel> channelSupplier, long position, DataBufferFactory dataBufferFactory, int bufferSize) { Assert.notNull(channelSupplier, "'channelSupplier' must not be null"); Assert.notNull(dataBufferFactory, "'dataBufferFactory' must not be null"); Assert.isTrue(position >= 0, "'position' must be >= 0"); Assert.isTrue(bufferSize > 0, "'bufferSize' must be > 0"); DataBuffer dataBuffer = dataBufferFactory.allocateBuffer(bufferSize); ByteBuffer byteBuffer = dataBuffer.asByteBuffer(0, bufferSize); Flux<DataBuffer> result = Flux.using(channelSupplier, channel -> Flux.create(sink -> { AsynchronousFileChannelReadCompletionHandler completionHandler = new AsynchronousFileChannelReadCompletionHandler(channel, sink, position, dataBufferFactory, bufferSize); channel.read(byteBuffer, position, dataBuffer, completionHandler); sink.onDispose(completionHandler::dispose); }), DataBufferUtils::closeChannel); return result.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); }
@Override public Flux<String> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType, @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { List<byte[]> delimiterBytes = getDelimiterBytes(mimeType); Flux<DataBuffer> inputFlux = Flux.from(inputStream) .flatMapIterable(dataBuffer -> splitOnDelimiter(dataBuffer, delimiterBytes)) .bufferUntil(StringDecoder::isEndFrame) .map(StringDecoder::joinUntilEndFrame) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); return super.decode(inputFlux, elementType, mimeType, hints); }
@Override public Mono<Void> writeAndFlushWith(Publisher<? extends Publisher<? extends DataBuffer>> body) { Flux<ContentChunk> chunks = Flux.from(body) .flatMap(Function.identity()) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release) .map(this::toContentChunk); ReactiveRequest.Content content = ReactiveRequest.Content.fromPublisher(chunks, getContentType()); this.reactiveRequest = ReactiveRequest.newBuilder(this.jettyRequest).content(content).build(); return doCommit(this::completes); }
}).doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release);
@Override public Mono<Void> writeAndFlushWith(Publisher<? extends Publisher<? extends DataBuffer>> body) { Flux<ContentChunk> chunks = Flux.from(body) .flatMap(Function.identity()) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release) .map(this::toContentChunk); ReactiveRequest.Content content = ReactiveRequest.Content.fromPublisher(chunks, getContentType()); this.reactiveRequest = ReactiveRequest.newBuilder(this.jettyRequest).content(content).build(); return doCommit(this::completes); }
/** * Obtain a {@code AsynchronousFileChannel} from the given supplier, and read it into a * {@code Flux} of {@code DataBuffer}s, starting at the given position. Closes the * channel when the flux is terminated. * @param channelSupplier the supplier for the channel to read from * @param position the position to start reading from * @param dataBufferFactory the factory to create data buffers with * @param bufferSize the maximum size of the data buffers * @return a flux of data buffers read from the given channel */ public static Flux<DataBuffer> readAsynchronousFileChannel(Callable<AsynchronousFileChannel> channelSupplier, long position, DataBufferFactory dataBufferFactory, int bufferSize) { Assert.notNull(channelSupplier, "'channelSupplier' must not be null"); Assert.notNull(dataBufferFactory, "'dataBufferFactory' must not be null"); Assert.isTrue(position >= 0, "'position' must be >= 0"); Assert.isTrue(bufferSize > 0, "'bufferSize' must be > 0"); DataBuffer dataBuffer = dataBufferFactory.allocateBuffer(bufferSize); ByteBuffer byteBuffer = dataBuffer.asByteBuffer(0, bufferSize); Flux<DataBuffer> result = Flux.using(channelSupplier, channel -> Flux.create(sink -> { AsynchronousFileChannelReadCompletionHandler completionHandler = new AsynchronousFileChannelReadCompletionHandler(channel, sink, position, dataBufferFactory, bufferSize); channel.read(byteBuffer, position, dataBuffer, completionHandler); sink.onDispose(completionHandler::dispose); }), DataBufferUtils::closeChannel); return result.doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); }
encodeData(data, valueType, mediaType, factory, hints), encodeText("\n", mediaType, factory)) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); });
@Test public void discardLocalOrder() { List<String> discardOrder = Collections.synchronizedList(new ArrayList<>(2)); StepVerifier.create(Flux.range(1, 2) .hide() //hide both avoid the fuseable AND tryOnNext usage .filter(i -> i % 2 == 0) .doOnDiscard(Number.class, i -> discardOrder.add("FIRST")) .doOnDiscard(Integer.class, i -> discardOrder.add("SECOND")) ) .expectNext(2) .expectComplete() .verify(); Assertions.assertThat(discardOrder).containsExactly("FIRST", "SECOND"); }
encodeData(data, valueType, mediaType, factory, hints), encodeText("\n", mediaType, factory)) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); });
@Test public void discardLocalMultipleFilters() { AtomicInteger discardNumberCount = new AtomicInteger(); AtomicInteger discardStringCount = new AtomicInteger(); StepVerifier.create(Flux.range(1, 12) .hide() //hide both avoid the fuseable AND tryOnNext usage .filter(i -> i % 2 == 0) .map(String::valueOf) .filter(s -> s.length() < 2) .doOnDiscard(Number.class, i -> discardNumberCount.incrementAndGet()) .doOnDiscard(String.class, i -> discardStringCount.incrementAndGet()) ) .expectNext("2", "4", "6", "8") .expectComplete() .verify(); Assertions.assertThat(discardNumberCount).hasValue(6); //1 3 5 7 9 11 Assertions.assertThat(discardStringCount).hasValue(2); //10 12 }
@Test public void shouldBeAbleToCatchDiscardedElement() { TestPublisher<Integer> publisher = TestPublisher.createCold(); Integer[] discarded = new Integer[1]; Flux<String> switchTransformed = publisher.flux() .switchOnFirst((first, innerFlux) -> innerFlux.map(String::valueOf)) .doOnDiscard(Integer.class, e -> discarded[0] = e); publisher.next(1); StepVerifier.create(switchTransformed, 0) .thenCancel() .verify(Duration.ofSeconds(10)); publisher.assertCancelled(); publisher.assertWasRequested(); Assertions.assertThat(discarded).containsExactly(1); }
@Test public void shouldBeAbleToCatchDiscardedElementInCaseOfConditional() { TestPublisher<Integer> publisher = TestPublisher.createCold(); Integer[] discarded = new Integer[1]; Flux<String> switchTransformed = publisher.flux() .switchOnFirst((first, innerFlux) -> innerFlux.map(String::valueOf)) .filter(t -> true) .doOnDiscard(Integer.class, e -> discarded[0] = e); publisher.next(1); StepVerifier.create(switchTransformed, 0) .thenCancel() .verify(Duration.ofSeconds(10)); publisher.assertCancelled(); publisher.assertWasRequested(); Assertions.assertThat(discarded).contains(1); }
@Test public void shouldBeAbleToCatchDiscardedElement() { TestPublisher<Integer> publisher = TestPublisher.createCold(); Integer[] discarded = new Integer[1]; Flux<String> switchTransformed = publisher .flux() .transform( flux -> new SwitchTransformFlux<>( flux, (first, innerFlux) -> innerFlux.map(String::valueOf))) .doOnDiscard(Integer.class, e -> discarded[0] = e); publisher.next(1); StepVerifier.create(switchTransformed, 0).thenCancel().verify(Duration.ofSeconds(10)); publisher.assertCancelled(); publisher.assertWasRequested(); Assert.assertArrayEquals(new Integer[] {1}, discarded); }
@Test public void shouldBeAbleToCatchDiscardedElementInCaseOfConditional() { TestPublisher<Integer> publisher = TestPublisher.createCold(); Integer[] discarded = new Integer[1]; Flux<String> switchTransformed = publisher .flux() .transform( flux -> new SwitchTransformFlux<>( flux, (first, innerFlux) -> innerFlux.map(String::valueOf))) .filter(t -> true) .doOnDiscard(Integer.class, e -> discarded[0] = e); publisher.next(1); StepVerifier.create(switchTransformed, 0).thenCancel().verify(Duration.ofSeconds(10)); publisher.assertCancelled(); publisher.assertWasRequested(); Assert.assertArrayEquals(new Integer[] {1}, discarded); } }
@Override public Flux<String> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType, @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) { List<byte[]> delimiterBytes = getDelimiterBytes(mimeType); Flux<DataBuffer> inputFlux = Flux.from(inputStream) .flatMapIterable(dataBuffer -> splitOnDelimiter(dataBuffer, delimiterBytes)) .bufferUntil(StringDecoder::isEndFrame) .map(StringDecoder::joinUntilEndFrame) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release); return super.decode(inputFlux, elementType, mimeType, hints); }
@Override public Mono<Void> writeAndFlushWith(Publisher<? extends Publisher<? extends DataBuffer>> body) { Flux<ContentChunk> chunks = Flux.from(body) .flatMap(Function.identity()) .doOnDiscard(PooledDataBuffer.class, DataBufferUtils::release) .map(this::toContentChunk); ReactiveRequest.Content content = ReactiveRequest.Content.fromPublisher(chunks, getContentType()); this.reactiveRequest = ReactiveRequest.newBuilder(this.jettyRequest).content(content).build(); return doCommit(this::completes); }