@Override public Void call() throws Exception { try { final HashSet<MarshalledEntry> allInternalEntries = new HashSet<>(); dir.loadAllEntries(allInternalEntries, Integer.MAX_VALUE, ctx.getMarshaller()); for (MarshalledEntry me : allInternalEntries) { if (taskContext.isStopped()) break; if (filter == null || filter.accept(me.getKey())) { task.processEntry(me, taskContext); } } return null; } catch (Exception e) { log.errorExecutingParallelStoreTask(e); throw e; } } });
@Override public Void call() throws Exception { try { final HashSet<MarshalledEntry> allInternalEntries = new HashSet<MarshalledEntry>(); dir.loadAllEntries(allInternalEntries, Integer.MAX_VALUE, ctx.getMarshaller()); for (MarshalledEntry me : allInternalEntries) { if (taskContext.isStopped()) break; if (filter == null || filter.shouldLoadKey(me.getKey())) { task.processEntry(me, taskContext); } } return null; } catch (Exception e) { log.errorExecutingParallelStoreTask(e); throw e; } } });
@Override public Publisher<MarshalledEntry<K, V>> publishEntries(Predicate<? super K> filter, boolean fetchValue, boolean fetchMetadata) { return Flowable.defer(() -> { // Make sure that we update directories before we start iterating upon directories scanForUnknownDirectories(); return Flowable.fromIterable(openDirectories.values()); }) // We parallelize this since the loading below is blocking .parallel() .runOn(Schedulers.from(ctx.getExecutor())) .flatMap(dir -> { final Set<MarshalledEntry<K, V>> allInternalEntries = new HashSet<>(); dir.loadAllEntries(allInternalEntries, Integer.MAX_VALUE, ctx.getMarshaller()); return Flowable.fromIterable(allInternalEntries); }) .filter(me -> filter == null || filter.test(me.getKey())) .sequential(); }