Refine search
ListeningExecutorService service = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(10)); ListenableFuture<Explosion> explosion = service.submit(new Callable<Explosion>() { public Explosion call() { return pushBigRedButton(); } }); Futures.addCallback(explosion, new FutureCallback<Explosion>() { // we want this handler to run immediately after we push the big red button! public void onSuccess(Explosion explosion) { walkAwayFrom(explosion); } public void onFailure(Throwable thrown) { battleArchNemesis(); // escaped the explosion! } });
@Override public List<Runnable> shutdownNow() { return delegate.shutdownNow(); }
public void destroy() { final List<SendMessageTask> tasks = Lists.newArrayList(sendMessageTasks.values()); for (SendMessageTask task : tasks) { task.shutdown(); } sendMessageExecutor.shutdown(); } }
@Override public void run() { try { executorService.shutdown(); while (!executorService.awaitTermination(5, TimeUnit.SECONDS)) { executorService.shutdownNow(); } } catch (final InterruptedException ex) { Thread.currentThread().interrupt(); } } });
Lists.newArrayList( new IntervalLoadRule( Intervals.of("2012-01-01T00:00:00.000Z/2012-01-01T12:00:00.000Z"), ListeningExecutorService exec = MoreExecutors.listeningDecorator( Executors.newFixedThreadPool(1)); BalancerStrategy balancerStrategy = new CostBalancerStrategyFactory().createBalancerStrategy(exec); exec.shutdown(); EasyMock.verify(emitter); EasyMock.verify(mockPeon);
private void testWithExecutor( final DimFilter filter, final List<String> expectedRows ) { ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(EXECUTOR_NUM_THREADS)); List<ListenableFuture<?>> futures = new ArrayList<>(); for (int i = 0; i < EXECUTOR_NUM_TASKS; i++) { Runnable runnable = makeFilterRunner(filter, expectedRows); ListenableFuture fut = executor.submit(runnable); futures.add(fut); } try { Futures.allAsList(futures).get(60, TimeUnit.SECONDS); } catch (Exception ex) { Assert.fail(ex.getMessage()); } executor.shutdown(); } }
private SingularityS3SearchResult getS3Logs(S3Configuration s3Configuration, Map<SingularityS3Service, Set<String>> servicesToPrefixes, final SingularityS3SearchRequest search, final boolean paginated) throws InterruptedException, ExecutionException, TimeoutException { int totalPrefixCount = 0; for (Map.Entry<SingularityS3Service, Set<String>> entry : servicesToPrefixes.entrySet()) { totalPrefixCount += entry.getValue().size(); } if (totalPrefixCount == 0) { return SingularityS3SearchResult.empty(); } ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(Math.min(totalPrefixCount, s3Configuration.getMaxS3Threads()), new ThreadFactoryBuilder().setNameFormat("S3LogFetcher-%d").build())); try { final ConcurrentHashMap<String, ContinuationToken> continuationTokens = new ConcurrentHashMap<>(); List<SingularityS3LogMetadata> logs = Lists.newArrayList(getS3LogsWithExecutorService(s3Configuration, executorService, servicesToPrefixes, totalPrefixCount, search, continuationTokens, paginated)); Collections.sort(logs, LOG_COMPARATOR); return new SingularityS3SearchResult(continuationTokens, isFinalPageForAllPrefixes(continuationTokens.values()), logs); } finally { executorService.shutdownNow(); } }
@Test public void testEntitySpecManyConcurrently() throws Exception { ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool()); List<ListenableFuture<Void>> futures = Lists.newArrayList(); try { for (int i = 0; i < 100; i++) { futures.add(executor.submit(new Callable<Void>() { @Override public Void call() throws Exception { EntitySpec<?> obj = EntitySpec.create(TestEntity.class); assertSerializeAndDeserialize(obj); return null; }})); } Futures.allAsList(futures).get(); } finally { executor.shutdownNow(); } }
/** * Attempt to consume the specified number of messages * * @param topic Topic to consume * @param consumer Consumer to use * @param numMessagesToConsume Number of messages to consume * @param <K> Type of Key * @param <V> Type of Value * @return ListenableFuture */ public <K, V> ListenableFuture<List<ConsumerRecord<K, V>>> consume(String topic, KafkaConsumer<K, V> consumer, int numMessagesToConsume) { consumer.subscribe(Lists.newArrayList(topic)); ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor()); return executor.submit(new RecordConsumer<>(numMessagesToConsume, consumer)); }
MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(testThreadsCount)); List<ListenableFuture<?>> futures = Lists.newArrayListWithExpectedSize(testThreadsCount); for (int i = 0; i < testThreadsCount; i++) { futures.add( executor.submit( new Runnable() { @Override executor.shutdown(); executor.awaitTermination(1, TimeUnit.SECONDS); Futures.allAsList(futures).get(); } catch (ExecutionException e) { Throwable cause = e.getCause();
return future; } else { List<Pair<SettableFuture, Object>> tasks = Lists.newArrayList(taskQueue.iterator()); Collections.shuffle(tasks, new Random(0)); delegate.submit((Callable) pair.rhs) : delegate.submit((Runnable) pair.rhs); Futures.addCallback( future, new FutureCallback() delegate.submit((Callable) task) : (ListenableFuture<T>) delegate.submit((Runnable) task);
throw new ISE("No sink for identifier: %s", identifier); final List<FireHydrant> hydrants = Lists.newArrayList(sink); currentHydrants.put(identifier.toString(), hydrants.size()); numPersistedRows += sink.getNumRowsInMemory(); final Stopwatch runExecStopwatch = Stopwatch.createStarted(); final Stopwatch persistStopwatch = Stopwatch.createStarted(); final ListenableFuture<Object> future = persistExecutor.submit( new ThreadRenamingCallable<Object>(threadName)
this.executorService = MoreExecutors.listeningDecorator(threadPoolExecutor); this.preemptionQueue = new PriorityBlockingQueue<>(numExecutors, new PreemptionQueueComparator()); ExecutorService wes = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder() .setDaemon(true).setNameFormat(WAIT_QUEUE_SCHEDULER_THREAD_NAME_FORMAT).build()); this.waitQueueExecutorService = MoreExecutors.listeningDecorator(wes); ExecutorService executionCompletionExecutorServiceRaw = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("ExecutionCompletionThread #%d") .build()); executionCompletionExecutorService = MoreExecutors.listeningDecorator( executionCompletionExecutorServiceRaw); ListenableFuture<?> future = waitQueueExecutorService.submit(new WaitQueueWorker()); Futures.addCallback(future, new WaitQueueWorkerCallback());
@Test public void interruptTest() throws InterruptedException { ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newSingleThreadExecutor()); final ListenableFuture<Object> future = executor.submit(() -> { // try { System.out.println("Waiting"); Thread.sleep(10000); System.out.println("Complete"); return null; // } catch (Exception e) { // e.printStackTrace(); // throw e; // } }); future.addListener(() -> { System.out.println("Listener: " + future.isCancelled() + ", " + future.isDone()); try { future.get(); } catch (InterruptedException e) { throw new RuntimeException(e); } catch (ExecutionException e) { throw new RuntimeException(e); } }, MoreExecutors.directExecutor()); Thread.sleep(1000); future.cancel(true); }
@Test @Ignore("convenient trial tool for dev") public void test1() throws Exception { ExecutorService executorService = Executors.newFixedThreadPool(10); ListenableFuture futureTask = MoreExecutors.listeningDecorator(executorService).submit(new Runnable() { @Override public void run() { } }); futureTask.addListener(new Runnable() { @Override public void run() { } }, executorService); }
@BeforeMethod public void setUp() { executor = MoreExecutors.listeningDecorator(Executors.newCachedThreadPool()); closer.register(() -> executor.shutdownNow()); spillPath1 = Files.createTempDir(); closer.register(() -> deleteRecursively(spillPath1.toPath(), ALLOW_INSECURE)); spillPath2 = Files.createTempDir(); closer.register(() -> deleteRecursively(spillPath2.toPath(), ALLOW_INSECURE)); }
persistExecutor = MoreExecutors.listeningDecorator(executorService); } else { persistExecutor = Execs.directExecutor(); final IncrementalIndex persistIndex = index; persistFutures.add( persistExecutor.submit( new ThreadRenamingRunnable(StringUtils.format("%s-persist", file.getName())) List<QueryableIndex> indexes = Lists.newArrayListWithCapacity(indexCount); final File mergedBase; Futures.allAsList(persistFutures).get(1, TimeUnit.HOURS); persistExecutor.shutdown(); index.close(); if (persistExecutor != null) { persistExecutor.shutdownNow();
private void verifyAndMergeCheckpoints(final Collection<TaskGroup> taskGroupsToVerify) { final List<ListenableFuture<?>> futures = new ArrayList<>(); for (TaskGroup taskGroup : taskGroupsToVerify) { futures.add(workerExec.submit(() -> verifyAndMergeCheckpoints(taskGroup))); } try { Futures.allAsList(futures).get(futureTimeoutInSeconds, TimeUnit.SECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { throw new RuntimeException(e); } }
if (completedSuccessfully.get()) { return immediateFuture(null); return immediateFailedFuture(new PrestoException(TRANSACTION_ALREADY_ABORTED, "Current transaction has already been aborted")); ListenableFuture<?> future = Futures.allAsList(connectorIdToMetadata.values().stream() .map(transactionMetadata -> finishingExecutor.submit(transactionMetadata::commit)) .collect(toList())); addExceptionCallback(future, throwable -> { .filter(entry -> !entry.getKey().equals(writeConnectorId)) .map(Entry::getValue) .map(transactionMetadata -> finishingExecutor.submit(transactionMetadata::commit)) .collect(toList())); addExceptionCallback(future, throwable -> log.error(throwable, "Read-only connector should not throw exception on commit")); ListenableFuture<?> commitFuture = finishingExecutor.submit(writeConnector::commit); ListenableFuture<?> readOnlyCommitFuture = Futures.transformAsync(commitFuture, ignored -> commitReadOnlyConnectors.get(), directExecutor()); addExceptionCallback(readOnlyCommitFuture, this::abortInternal); return nonCancellationPropagating(readOnlyCommitFuture);
private List<CloseableIterator<Entry<KeyType>>> parallelSortAndGetGroupersIterator() final ListenableFuture<List<CloseableIterator<Entry<KeyType>>>> future = Futures.allAsList( groupers.stream() .map(grouper -> executor.submit( new AbstractPrioritizedCallable<CloseableIterator<Entry<KeyType>>>(priority) return hasQueryTimeout ? future.get(timeout, TimeUnit.MILLISECONDS) : future.get(); future.cancel(true); throw new QueryInterruptedException(e);