Refine search
public static <T> List<T> reverse_customStream(ArrayDeque<T> list) { if (list == null) { throw new IllegalArgumentException("list can't be null"); } return StreamSupport.stream(Spliterators.spliteratorUnknownSize(list.descendingIterator(), Spliterator.ORDERED), false).collect(toList()); }
@Override public Stream<E> stream() { final CloseableIterator<E> iterator = createIterator(); Spliterator<E> spliterator = maxSize == null ? Spliterators.spliteratorUnknownSize(iterator, 0) : Spliterators.spliterator(iterator, maxSize, 0); return StreamSupport.stream(spliterator, false).onClose(new Runnable() { @Override public void run() { iterator.close(); } }); }
public Spliterator<String> spliterator() { return Spliterators.emptySpliterator(); }
/** * Ensures the {@link ClusteringCacheManagerServiceConfigurationParser} is locatable as a * {@link CacheManagerServiceConfigurationParser} instance. */ @Test public void testServiceLocator() throws Exception { assertThat(stream(spliterator(ClassLoading.servicesOfType(CacheManagerServiceConfigurationParser.class).iterator(), Long.MAX_VALUE, 0), false).map(Object::getClass).collect(Collectors.toList()), hasItem(ClusteringCacheManagerServiceConfigurationParser.class)); }
public static void testAllServicesAreAvailable() { Set<String> osgiAvailableClasses = stream(spliterator(OsgiServiceLoader.load(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false) .map(f -> f.getClass().getName()) .collect(toSet()); Set<String> jdkAvailableClasses = of(EhcacheActivator.getCoreBundle().getBundles()) .map(b -> b.adapt(BundleWiring.class).getClassLoader()) .flatMap(cl -> stream(spliterator(ServiceLoader.load(ServiceFactory.class, cl).iterator(), Long.MAX_VALUE, 0), false) .map(f -> f.getClass().getName())) .collect(toSet()); assertThat(osgiAvailableClasses, hasItems(jdkAvailableClasses.toArray(new String[0]))); }
final Spliterator<Map.Entry<String, JsonNode>> fieldSpliterator = Spliterators.spliteratorUnknownSize(properties.fields(), Spliterator.IMMUTABLE); final Set<FieldTypeDTO> fieldsMap = StreamSupport.stream(fieldSpliterator, false) .map(field -> Maps.immutableEntry(field.getKey(), field.getValue().path("type").asText())) .filter(field -> !field.getValue().isEmpty()) .map(field -> FieldTypeDTO.create(field.getKey(), field.getValue())) .collect(Collectors.toSet());
@Override public void start(BundleContext context) throws Exception { BundleContext currentContext = CORE_BUNDLE.getAndUpdate(current -> current == null ? context : current); if (currentContext == null) { String greeting = "Detected OSGi Environment (core is in bundle: " + context.getBundle() + ")"; if ("false".equalsIgnoreCase(context.getProperty(OSGI_LOADING))) { SafeOsgi.disableOSGiServiceLoading(); LOGGER.info(greeting + ": OSGi Based Service Loading Disabled Via System/Framework Property - Extensions Outside This Bundle Will Not Be Detected"); LOGGER.debug("JDK Service Loading Sees:\n\t" + stream(spliterator(ClassLoading.servicesOfType(ServiceFactory.class).iterator(), Long.MAX_VALUE, 0), false) .map(sf -> sf.getServiceType().getName()).collect(joining("\n\t"))); } else { SafeOsgi.enableOSGiServiceLoading(); LOGGER.info(greeting + ": Using OSGi Based Service Loading"); } } else { throw new IllegalStateException("Multiple bundle instances running against the same core classes: existing bundle: " + currentContext.getBundle() + " new bundle: " + context.getBundle()); } }
private String formatHeaders(Direction direction, Http2Headers headers) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(headers.iterator(), Spliterator.ORDERED), false) .map(h -> String.format("%s %s: %s", indentArrow(direction), h.getKey(), h.getValue())) .collect(Collectors.joining("\n")); }
private Set<String> collectSecuritySchemeNames(ArrayNode... securityRequirements) { return Stream.of(securityRequirements) // .flatMap(e -> stream(spliterator(e.elements(), e.size(), Spliterator.ORDERED), false)) // .flatMap(e -> stream(spliteratorUnknownSize(e.fieldNames(), Spliterator.ORDERED), false)) // .collect(Collectors.toSet()); }
/** * Accumulates the elements of stream into a new Set. * @param stream the {@code java.util.stream.BaseStream} * @param <T> the type of element * @return a new {@code java.util.Set} instance */ public static <T> Set<T> toSet(BaseStream<T, ? extends BaseStream> stream) { return StreamSupport.stream( Spliterators.spliteratorUnknownSize(stream.iterator(), Spliterator.ORDERED), false).collect(Collectors.<T>toSet()); }
/** {@inheritDoc} */ @Override public Iterator<IgniteBiTuple<String, ModelDescriptor>> iterator() { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(models.iterator(), Spliterator.ORDERED), false) .map(e -> new IgniteBiTuple<>(e.getKey(), e.getValue())) .iterator(); } }
private Map<String, String> toHeadersMap(HeaderMap headerMap) { return stream(spliteratorUnknownSize(headerMap.iterator(), 0), false) .collect(toMap( h -> h.getHeaderName().toString(), h -> h.getFirst())); } }
checkNotNull(streamB); checkNotNull(function); boolean isParallel = streamA.isParallel() || streamB.isParallel(); // same as Stream.concat Spliterator<A> splitrA = streamA.spliterator(); Spliterator<B> splitrB = streamB.spliterator(); int characteristics = splitrA.characteristics() & splitrB.characteristics() & (Spliterator.SIZED | Spliterator.ORDERED); Iterator<A> itrA = Spliterators.iterator(splitrA); Iterator<B> itrB = Spliterators.iterator(splitrB); return StreamSupport.stream( new AbstractSpliterator<R>( Math.min(splitrA.estimateSize(), splitrB.estimateSize()), characteristics) { @Override public boolean tryAdvance(Consumer<? super R> action) {
/** * Returns a sequential {@link Stream} of the remaining contents of {@code iterator}. Do not use * {@code iterator} directly after passing it to this method. */ public static <T> Stream<T> stream(Iterator<T> iterator) { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, 0), false); }
/** * Ensure that the sub-spliterator provided by the List is compatible with * ours, i.e. is {@code SIZED | SUBSIZED}. For standard List implementations, * the spliterators are, so the costs of dumping into an intermediate array * in the other case is irrelevant. */ private static <E> Spliterator<E> spliterator(List<E> list) { Spliterator<E> sp = list.spliterator(); if ((sp.characteristics() & (SIZED | SUBSIZED)) != (SIZED | SUBSIZED)) { sp = Spliterators.spliterator(StreamSupport.stream(sp, false).toArray(), IMMUTABLE | ORDERED); } return sp; }
/** * @return this iterator as a {@link Stream} */ default Stream<T> stream() { return StreamSupport .stream( spliteratorUnknownSize( this, 0 ), false ) .onClose( this::close ); }
@SuppressWarnings("unchecked") @Override public <N> Stream<K> getKeys(String state, N namespace) { Tuple2<ColumnFamilyHandle, RegisteredStateMetaInfoBase> columnInfo = kvStateInformation.get(state); if (columnInfo == null || !(columnInfo.f1 instanceof RegisteredKeyValueStateBackendMetaInfo)) { return Stream.empty(); } RegisteredKeyValueStateBackendMetaInfo<N, ?> registeredKeyValueStateBackendMetaInfo = (RegisteredKeyValueStateBackendMetaInfo<N, ?>) columnInfo.f1; final TypeSerializer<N> namespaceSerializer = registeredKeyValueStateBackendMetaInfo.getNamespaceSerializer(); final DataOutputSerializer namespaceOutputView = new DataOutputSerializer(8); boolean ambiguousKeyPossible = RocksDBKeySerializationUtils.isAmbiguousKeyPossible(getKeySerializer(), namespaceSerializer); final byte[] nameSpaceBytes; try { RocksDBKeySerializationUtils.writeNameSpace( namespace, namespaceSerializer, namespaceOutputView, ambiguousKeyPossible); nameSpaceBytes = namespaceOutputView.getCopyOfBuffer(); } catch (IOException ex) { throw new FlinkRuntimeException("Failed to get keys from RocksDB state backend.", ex); } RocksIteratorWrapper iterator = getRocksIterator(db, columnInfo.f0); iterator.seekToFirst(); final RocksStateKeysIterator<K> iteratorWrapper = new RocksStateKeysIterator<>(iterator, state, getKeySerializer(), keyGroupPrefixBytes, ambiguousKeyPossible, nameSpaceBytes); Stream<K> targetStream = StreamSupport.stream(Spliterators.spliteratorUnknownSize(iteratorWrapper, Spliterator.ORDERED), false); return targetStream.onClose(iteratorWrapper::close); }
private Stream<Arguments> toStream(CsvParser csvParser) { CsvParserIterator iterator = new CsvParserIterator(csvParser, this.annotation); return stream(spliteratorUnknownSize(iterator, Spliterator.ORDERED), false) // .skip(this.numLinesToSkip) // .onClose(() -> { try { csvParser.stopParsing(); } catch (Throwable throwable) { handleCsvException(throwable, this.annotation); } }); }
@Override @SuppressWarnings("unchecked") public Stream<R> stream() { if (getMaxResults() == 0){ final Spliterator<R> spliterator = Spliterators.emptySpliterator(); return StreamSupport.stream( spliterator, false ); } final ScrollableResultsImplementor scrollableResults = scroll( ScrollMode.FORWARD_ONLY ); final ScrollableResultsIterator<R> iterator = new ScrollableResultsIterator<>( scrollableResults ); final Spliterator<R> spliterator = Spliterators.spliteratorUnknownSize( iterator, Spliterator.NONNULL ); final Stream<R> stream = StreamSupport.stream( spliterator, false ); stream.onClose( scrollableResults::close ); return stream; }