/** * Invokes tasks in a thread pool, then unwraps the resulting {@link Future Futures}. * * <p>Any task exception is wrapped in {@link IOException}. */ private <T> List<T> callTasks(Collection<Callable<T>> tasks) throws IOException { try { List<CompletionStage<T>> futures = new ArrayList<>(tasks.size()); for (Callable<T> task : tasks) { futures.add(MoreFutures.supplyAsync(task::call, executorService)); } return MoreFutures.get(MoreFutures.allAsList(futures)); } catch (ExecutionException e) { if (e.getCause() != null) { if (e.getCause() instanceof IOException) { throw (IOException) e.getCause(); } throw new IOException(e.getCause()); } throw new IOException(e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException("executor service was interrupted"); } } }
private static void executeBatches(List<BatchRequest> batches) throws IOException { ExecutorService executor = MoreExecutors.listeningDecorator( new ThreadPoolExecutor( MAX_CONCURRENT_BATCHES, MAX_CONCURRENT_BATCHES, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>())); List<CompletionStage<Void>> futures = new ArrayList<>(); for (final BatchRequest batch : batches) { futures.add(MoreFutures.runAsync(() -> batch.execute(), executor)); } try { MoreFutures.get(MoreFutures.allAsList(futures)); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException("Interrupted while executing batch GCS request", e); } catch (ExecutionException e) { if (e.getCause() instanceof FileNotFoundException) { throw (FileNotFoundException) e.getCause(); } throw new IOException("Error executing batch GCS request", e); } finally { executor.shutdown(); } }
/** Like {@link CompletableFuture#allOf} but returning the result of constituent futures. */ public static <T> CompletionStage<List<T>> allAsList( Collection<? extends CompletionStage<? extends T>> futures) { // CompletableFuture.allOf completes exceptionally if any of the futures do. // We have to gather the results separately. CompletionStage<Void> blockAndDiscard = CompletableFuture.allOf(futuresToCompletableFutures(futures)); return blockAndDiscard.thenApply( nothing -> futures .stream() .map(future -> future.toCompletableFuture().join()) .collect(Collectors.toList())); }
/** * Stages the given artifact files to the staging service. * * @return The artifact staging token returned by the service */ public String stage(String stagingSessionToken, Collection<StagedFile> files) throws IOException, InterruptedException { final Map<StagedFile, CompletionStage<ArtifactMetadata>> futures = new HashMap<>(); LOG.info("Staging {} files (token: {})", files.size(), stagingSessionToken); for (StagedFile file : files) { futures.put( file, MoreFutures.supplyAsync(new StagingCallable(stagingSessionToken, file), executorService)); } CompletionStage<StagingResult> stagingResult = MoreFutures.allAsList(futures.values()) .thenApply(ignored -> new ExtractStagingResultsCallable(futures).call()); return stageManifest(stagingSessionToken, stagingResult); }
@Test public void supplyAsyncSuccess() throws Exception { CompletionStage<Integer> future = MoreFutures.supplyAsync(() -> 42); assertThat(MoreFutures.get(future), equalTo(42)); }
MoreFutures.allAsList(destinationPackages); boolean finished = false; do { try { MoreFutures.get(stagingFutures, 3L, TimeUnit.MINUTES); finished = true; } catch (TimeoutException e) { List<DataflowPackage> stagedPackages = MoreFutures.get(stagingFutures); LOG.info( "Staging files complete: {} files cached, {} files newly uploaded",
/** * Shorthand for {@link #supplyAsync(ThrowingSupplier, ExecutorService)} using {@link * ForkJoinPool#commonPool()}. */ public static <T> CompletionStage<T> supplyAsync(ThrowingSupplier<T> supplier) { return supplyAsync(supplier, ForkJoinPool.commonPool()); }
@Override public StagingResult call() { Set<ArtifactMetadata> metadata = new HashSet<>(); Map<StagedFile, Throwable> failures = new HashMap<>(); for (Entry<StagedFile, CompletionStage<ArtifactMetadata>> stagedFileResult : futures.entrySet()) { try { metadata.add(MoreFutures.get(stagedFileResult.getValue())); } catch (ExecutionException ee) { failures.put(stagedFileResult.getKey(), ee.getCause()); } catch (InterruptedException ie) { throw new AssertionError( "This should never happen. " + "All of the futures are complete by construction", ie); } } if (failures.isEmpty()) { return StagingResult.success(metadata); } else { return StagingResult.failure(failures); } } }
/** * Shorthand for {@link #runAsync(ThrowingRunnable, ExecutorService)} using {@link * ForkJoinPool#commonPool()}. */ public static CompletionStage<Void> runAsync(ThrowingRunnable runnable) { return runAsync(runnable, ForkJoinPool.commonPool()); }
@Test public void supplyAsyncFailure() throws Exception { final String testMessage = "this is just a test"; CompletionStage<Long> future = MoreFutures.supplyAsync( () -> { throw new IllegalStateException(testMessage); }); thrown.expect(ExecutionException.class); thrown.expectCause(isA(IllegalStateException.class)); thrown.expectMessage(testMessage); MoreFutures.get(future); }
@Test public void runAsyncFailure() throws Exception { final String testMessage = "this is just a test"; CompletionStage<Void> sideEffectFuture = MoreFutures.runAsync( () -> { throw new IllegalStateException(testMessage); }); thrown.expect(ExecutionException.class); thrown.expectCause(isA(IllegalStateException.class)); thrown.expectMessage(testMessage); MoreFutures.get(sideEffectFuture); } }
/** Stages one file ("package") if necessary. */ public CompletionStage<StagingResult> stagePackage( final PackageAttributes attributes, final Sleeper retrySleeper, final CreateOptions createOptions) { return MoreFutures.supplyAsync( () -> stagePackageSynchronously(attributes, retrySleeper, createOptions), executorService); }
public DataflowPackage stageToFile( byte[] bytes, String target, String stagingPath, CreateOptions createOptions) { try { return MoreFutures.get( stagePackage( PackageAttributes.forBytesToStage(bytes, target, stagingPath), DEFAULT_SLEEPER, createOptions)) .getPackageAttributes() .getDestination(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException("Interrupted while staging pipeline", e); } catch (ExecutionException e) { throw new RuntimeException("Error while staging pipeline", e.getCause()); } }
/** Asynchronously computes {@link PackageAttributes} for a single staged file. */ private CompletionStage<PackageAttributes> computePackageAttributes( final DataflowPackage source, final String stagingPath) { return MoreFutures.supplyAsync( () -> { final File file = new File(source.getLocation()); if (!file.exists()) { throw new FileNotFoundException( String.format("Non-existent file to stage: %s", file.getAbsolutePath())); } PackageAttributes attributes = PackageAttributes.forFileToStage(file, stagingPath); if (source.getName() != null) { attributes = attributes.withPackageName(source.getName()); } return attributes; }, executorService); }
throws InterruptedException { try { StagingResult stagingResult = MoreFutures.get(stagingFuture); if (stagingResult.isSuccess()) { LOG.info(
/** Like {@link #allAsList} but return a list . */ public static <T> CompletionStage<List<ExceptionOrResult<T>>> allAsListWithExceptions( Collection<? extends CompletionStage<? extends T>> futures) { // CompletableFuture.allOf completes exceptionally if any of the futures do. // We have to gather the results separately. CompletionStage<Void> blockAndDiscard = CompletableFuture.allOf(futuresToCompletableFutures(futures)) .whenComplete((ignoredValues, arbitraryException) -> {}); return blockAndDiscard.thenApply( nothing -> futures .stream() .map( future -> { // The limited scope of the exceptions wrapped allows CancellationException // to still be thrown. try { return ExceptionOrResult.<T>result(future.toCompletableFuture().join()); } catch (CompletionException exc) { return ExceptionOrResult.<T>exception(exc); } }) .collect(Collectors.toList())); }