private <T> void registerCompletable(String address, Function<JsonObject, Completable> mapper) { register(address, obj -> mapper.apply(obj).toSingleDefault(0)); }
@NonNull private Completable sendEvents(List<Event> events) { return persistence.remove(events) .toSingleDefault(events) .toObservable() .flatMapIterable(__ -> events) .map(event -> service.send(event) .toObservable() .flatMap(o -> Observable.empty()) .cast(Event.class) .onErrorResumeNext(throwable -> Observable.just(event))) .toList() .flatMap(observables -> Observable.merge(observables)) .toList() .filter(failedEvents -> !failedEvents.isEmpty()) .flatMapCompletable(failedEvents -> persistence.save(failedEvents)) .toCompletable(); } }
/** Executes multiple tasks in order. */ static <C extends FlowContext> Single<C> series(C context, Collection<? extends Task<C>> tasks) { return Observable.from(tasks) .flatMapSingle(task -> task.call(context).toSingleDefault(context), false, 1) .doOnSubscribe(() -> context.progress.reset(tasks.size())) .doOnNext(c -> c.progress.tick()) .doOnTerminate(context.progress::finish) .lastOrDefault(context).toSingle(); } }
.flatMapSingle(p -> store.rxGetOne(p.getRight()) .flatMap(crs -> merger.merge(crs, p.getLeft(), out) .toSingleDefault(Pair.of(1L, 0L)) // left: count, right: not_accepted .onErrorResumeNext(t -> { if (t instanceof IllegalStateException) {
/** * Imports a JSON file from the given input stream into the store * @param f the JSON file to read * @param correlationId a unique identifier for this import process * @param filename the name of the file currently being imported * @param timestamp denotes when the import process has started * @param layer the layer where the file should be stored (may be null) * @param tags the list of tags to attach to the file (may be null) * @param properties the map of properties to attach to the file (may be null) * @return an observable that will emit the number 1 when a chunk has been imported */ protected Observable<Integer> importJSON(ReadStream<Buffer> f, String correlationId, String filename, long timestamp, String layer, List<String> tags, Map<String, Object> properties) { UTF8BomFilter bomFilter = new UTF8BomFilter(); StringWindow window = new StringWindow(); GeoJsonSplitter splitter = new GeoJsonSplitter(window); AtomicInteger processing = new AtomicInteger(0); return f.toObservable() .map(Buffer::getDelegate) .map(bomFilter::filter) .doOnNext(window::append) .compose(new JsonParserTransformer()) .flatMap(splitter::onEventObservable) .flatMapSingle(result -> { IndexMeta indexMeta = new IndexMeta(correlationId, filename, timestamp, tags, properties, null); return addToStoreWithPause(result, layer, indexMeta, f, processing) .toSingleDefault(1); }); }
file.close(); }) .toSingleDefault(archivePath); });
timestamp, tags, properties, crsString); return addToStoreWithPause(result, layer, indexMeta, f, processing) .toSingleDefault(1); });
private void doMerge(TestContext context, Observable<Buffer> chunks, Observable<ChunkMeta> metas, String jsonContents) { MultiMerger m = new MultiMerger(false); BufferWriteStream bws = new BufferWriteStream(); Async async = context.async(); metas .flatMapSingle(meta -> m.init(meta).toSingleDefault(meta)) .toList() .flatMap(l -> chunks.map(DelegateChunkReadStream::new) .<ChunkMeta, Pair<ChunkReadStream, ChunkMeta>>zipWith(l, Pair::of)) .flatMapCompletable(p -> m.merge(p.getLeft(), p.getRight(), bws)) .toCompletable() .subscribe(() -> { m.finish(bws); context.assertEquals(jsonContents, bws.getBuffer().toString("utf-8")); async.complete(); }, context::fail); }
s = metas; } else { s = metas.flatMapSingle(meta -> m.init(meta).toSingleDefault(meta));
private void doMerge(TestContext context, Observable<Buffer> chunks, Observable<GeoJsonChunkMeta> metas, String jsonContents, boolean optimistic) { GeoJsonMerger m = new GeoJsonMerger(optimistic); BufferWriteStream bws = new BufferWriteStream(); Async async = context.async(); Observable<GeoJsonChunkMeta> s; if (optimistic) { s = metas; } else { s = metas.flatMapSingle(meta -> m.init(meta).toSingleDefault(meta)); } s.toList() .flatMap(l -> chunks.map(DelegateChunkReadStream::new) .<GeoJsonChunkMeta, Pair<ChunkReadStream, GeoJsonChunkMeta>>zipWith(l, Pair::of)) .flatMapCompletable(p -> m.merge(p.getLeft(), p.getRight(), bws)) .toCompletable() .subscribe(() -> { m.finish(bws); context.assertEquals(jsonContents, bws.getBuffer().toString("utf-8")); async.complete(); }, context::fail); }
/** * Test if a service can be published an unpublished again * @param context the test context */ @Test public void unpublish(TestContext context) { Vertx vertx = new Vertx(rule.vertx()); Async async = context.async(); ServiceDiscovery discovery = ServiceDiscovery.create(vertx); Service.publishOnce("A", "a", discovery, vertx) .andThen(Service.discover("A", discovery, vertx)) .count() .doOnNext(count -> context.assertEquals(1, count)) .flatMap(v -> Service.discover("A", discovery, vertx)) .flatMapSingle(service -> service.unpublish(discovery).toSingleDefault(0)) .flatMap(v -> Service.discover("A", discovery, vertx)) .count() .doOnTerminate(discovery::close) .subscribe(count -> { context.assertEquals(0, count); async.complete(); }, context::fail); } }