ExpandBreathSubscriber(CoreSubscriber<? super T> actual, Function<? super T, ? extends Publisher<? extends T>> expander, int capacityHint) { super(actual); this.expander = expander; this.queue = Queues.<Publisher<? extends T>>unbounded(capacityHint).get(); }
SerializedSink(BaseSink<T> sink) { this.sink = sink; this.mpscQueue = Queues.<T>unboundedMultiproducer().get(); }
@Test public void capacityReactorBoundedQueue() { //the bounded queue floors at 8 and rounds to the next power of 2 assertThat(Queues.capacity(Queues.get(2).get())) .isEqualTo(8); assertThat(Queues.capacity(Queues.get(8).get())) .isEqualTo(8); assertThat(Queues.capacity(Queues.get(9).get())) .isEqualTo(16); }
@Test public void scanOperator() { FluxCombineLatest s = new FluxCombineLatest<>(Collections.emptyList(), v -> v, Queues.small(), 123); assertThat(s.scan(Scannable.Attr.PREFETCH)).isEqualTo(123); }
@Override public int getBufferSize() { return Queues.capacity(this.queue); }
@Test public void scanMainLargeBuffered() { CoreSubscriber<Integer> actual = new LambdaSubscriber<>(null, e -> {}, null, null); FluxFlatMap.FlatMapMain<Integer, Integer> test = new FluxFlatMap.FlatMapMain<>(actual, i -> Mono.just(i), true, 5, Queues.<Integer>unbounded(), 789, Queues.<Integer>get(789)); test.scalarQueue = new ConcurrentLinkedQueue<>(); test.scalarQueue.add(1); test.scalarQueue.add(2); test.scalarQueue.add(3); test.size = Integer.MAX_VALUE; assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(Integer.MIN_VALUE); assertThat(test.scan(Scannable.Attr.LARGE_BUFFERED)).isEqualTo(Integer.MAX_VALUE + 3L); }
private Queue<Integer> emptyOneQueue() { return Queues.<Integer>one().get(); }
FluxMergeSequential(Flux<? extends T> source, Function<? super T, ? extends Publisher<? extends R>> mapper, int maxConcurrency, int prefetch, ErrorMode errorMode) { this(source, mapper, maxConcurrency, prefetch, errorMode, Queues.get(Math.max(prefetch, maxConcurrency))); }
/** * Configures buffer size for this builder. Default value is {@link Queues#SMALL_BUFFER_SIZE}. * @param bufferSize the internal buffer size to hold signals, must be a power of 2. * @return builder with provided buffer size */ public Builder<T> bufferSize(int bufferSize) { if (!Queues.isPowerOfTwo(bufferSize)) { throw new IllegalArgumentException("bufferSize must be a power of 2 : " + bufferSize); } if (bufferSize < 1){ throw new IllegalArgumentException("bufferSize must be strictly positive, " + "was: "+bufferSize); } this.bufferSize = bufferSize; return this; }
/** * Zip two sources together, that is to say wait for all the sources to emit one * element and combine these elements once into an output value (constructed by the provided * combinator). The operator will continue doing so until any of the sources completes. * Errors will immediately be forwarded. * This "Step-Merge" processing is especially useful in Scatter-Gather scenarios. * <p> * <img class="marble" src="doc-files/marbles/zipTwoSourcesWithZipperForFlux.svg" alt=""> * * @param source1 The first {@link Publisher} source to zip. * @param source2 The second {@link Publisher} source to zip. * @param combinator The aggregate function that will receive a unique value from each upstream and return the * value to signal downstream * @param <T1> type of the value from source1 * @param <T2> type of the value from source2 * @param <O> The produced output after transformation by the combinator * * @return a zipped {@link Flux} */ public static <T1, T2, O> Flux<O> zip(Publisher<? extends T1> source1, Publisher<? extends T2> source2, final BiFunction<? super T1, ? super T2, ? extends O> combinator) { return onAssembly(new FluxZip<T1, O>(source1, source2, combinator, Queues.xs(), Queues.XS_BUFFER_SIZE)); }
SpscArrayQueue(int capacity) { super(Queues.ceilingNextPowerOfTwo(capacity)); }
@Test @SuppressWarnings("unchecked") //safe varargs public void fusedThrowsInDrainLoopDelayed() { new FluxMergeOrdered<>(2, Queues.small(), Comparator.naturalOrder(), Flux.just(1).map(v -> { throw new IllegalArgumentException("boom"); }), Flux.just(2, 3)) .as(StepVerifier::create) .expectNext(2, 3) .verifyErrorMessage("boom"); }
@Test public void capacityBoundedBlockingQueue() { Queue q = new LinkedBlockingQueue<>(10); assertThat(Queues.capacity(q)).isEqualTo(10); }
private Queue<Integer> oneQueueWithTestElement(int element) { Queue<Integer> q = Queues.<Integer>one().get(); q.add(element); return q; } }
EqualSubscriber(EqualCoordinator<T> parent, int prefetch) { this.parent = parent; this.prefetch = prefetch; this.queue = Queues.<T>get(prefetch).get(); }
/** * Configures buffer size for this builder. Default value is {@link Queues#SMALL_BUFFER_SIZE}. * @param bufferSize the internal buffer size to hold signals, must be a power of 2 * @return builder with provided buffer size */ public Builder<T> bufferSize(int bufferSize) { if (!Queues.isPowerOfTwo(bufferSize)) { throw new IllegalArgumentException("bufferSize must be a power of 2 : " + bufferSize); } if (bufferSize < 1){ throw new IllegalArgumentException("bufferSize must be strictly positive, " + "was: "+bufferSize); } this.bufferSize = bufferSize; return this; }
@Test public void spscArrayQueuesAPI() { assertThat(Queues.xs().get()).isInstanceOf(SpscArrayQueue.class); }