/** * Parallel execution. Modifies the mutation ConcurrentHashMap in parallel based on a Callback. * <p> * Note: This could be replaced with Java 8 parallelStream and lambadas instead of Callback interface * </p> * @param objList * @param callback */ private void executeInParallel(Collection<Object> objList, final Callback callback) { SimultaneousExecutor executor = new SimultaneousExecutor(threadsPerCpu); for(final Object obj : objList) { executor.execute(new Runnable() { public void run() { callback.call(obj); } }); } try { executor.awaitSuccessfulCompletion(); } catch(Exception e) { throw new RuntimeException(e); } }
/** * Await successful completion of all submitted tasks. Throw exception of the first failed task * if 1 or more tasks failed. * * After this call completes, the thread pool will be shut down. * * @throws ExecutionException if a computation threw an * exception * @throws InterruptedException if the current thread was interrupted * while waiting */ public void awaitSuccessfulCompletion() throws InterruptedException, ExecutionException { awaitUninterruptibly(); for (final Future<?> f : futures) { f.get(); } }
/** * Awaits completion of all submitted tasks. * * After this call completes, the thread pool will be shut down. */ public void awaitUninterruptibly() { shutdown(); while (!isTerminated()) { try { awaitTermination(1, TimeUnit.DAYS); } catch (final InterruptedException e) { } } }
SimultaneousExecutor executor = new SimultaneousExecutor(); final int numThreads = executor.getCorePoolSize(); executor.execute(new Runnable() { public void run() { HollowIndexerValueTraverser fromTraverser = new HollowIndexerValueTraverser(from, type, elementPaths); executor.awaitSuccessfulCompletion(); } catch (Exception ex) { throw new RuntimeException(ex);
@Test public void canBeReused() throws Exception { SimultaneousExecutor executor = new SimultaneousExecutor(); executor.execute(new Job(false)); executor.execute(new Job(false)); executor.execute(new Job(false)); executor.execute(new Job(false)); executor.awaitSuccessfulCompletionOfCurrentTasks(); executor.execute(new Job(false)); executor.execute(new Job(false)); executor.execute(new Job(false)); executor.execute(new Job(false)); executor.awaitSuccessfulCompletion(); }
SimultaneousExecutor executor = new SimultaneousExecutor(); final int numThreads = executor.getCorePoolSize(); executor.execute(new Runnable() { @Override public void run() { executor.awaitSuccessfulCompletionOfCurrentTasks(); } catch(Throwable th) { throw new RuntimeException(th); executor.shutdown();
protected void processFile(Reader r, int maxSample) throws Exception { JsonArrayChunker chunker = new JsonArrayChunker(r, executor); chunker.initialize(); int counter = 0; Reader jsonObj = chunker.nextChunk(); while(jsonObj != null && counter < maxSample) { final Reader currentObject = jsonObj; executor.execute(new Runnable() { public void run() { try { JsonFactory factory = new JsonFactory(); JsonParser parser = factory.createParser(currentObject); processRecord(parser); } catch(Exception e){ throw new RuntimeException(e); } } }); while(executor.getQueue().size() > maxWorkQueue) { Thread.sleep(5); } counter++; jsonObj.close(); jsonObj = chunker.nextChunk(); } executor.awaitSuccessfulCompletion(); }
@Test public void failsWhenAnyCallableThrowsException() throws Exception { SimultaneousExecutor executor = new SimultaneousExecutor(); StatusEnsuringCallable firstTask = new StatusEnsuringCallable(false); StatusEnsuringCallable secondTask = new StatusEnsuringCallable(false); executor.submit(firstTask); executor.submit(secondTask); try { executor.awaitSuccessfulCompletion(); fail("Should fail"); } catch (final Exception e) { } }
@Test public void testEvenNumbersMultipleThread() { for (int j = 0; j < 10; j++) { int maxValue = 500000; sparseBitSet = new HollowSparseIntegerSet.SparseBitSet(maxValue); SimultaneousExecutor executor = new SimultaneousExecutor(); int parallelism = executor.getMaximumPoolSize(); int taskSize = maxValue / parallelism; for (int i = 0; i < parallelism; i++) { int from = i * taskSize; int to = (from + taskSize) - 1; if (i == (parallelism - 1)) to = maxValue; executor.submit(new Task(sparseBitSet, from, to)); } executor.awaitUninterruptibly(); HollowSparseIntegerSet.SparseBitSet.compact(sparseBitSet); Assert.assertTrue(sparseBitSet.cardinality() == 250001); } }
final int numThreads = executor.getCorePoolSize(); executor.execute(new Runnable() { public void run() { executor.awaitSuccessfulCompletionOfCurrentTasks(); } catch(Exception e) { throw new RuntimeException(e);
private Map<String, BitSet> discoverChangedOrdinalsBetweenStates() { SimultaneousExecutor executor = new SimultaneousExecutor(); Map<String, BitSet> excludeOrdinalsFromCopy = new HashMap<String, BitSet>(); for(HollowSchema schema : schemas) { BitSet recordsToExclude = findOrdinalsPopulatedWithDifferentRecords(schema.getName(), executor); excludeOrdinalsFromCopy.put(schema.getName(), recordsToExclude); } TransitiveSetTraverser.addReferencingOutsideClosure(from, excludeOrdinalsFromCopy); return excludeOrdinalsFromCopy; }
private void remapHistoricalStateOrdinals(final DiffEqualityMappingOrdinalRemapper remapper, final HollowHistoricalStateDataAccess[] remappedDataAccesses, final HollowHistoricalStateKeyOrdinalMapping[] remappedKeyOrdinalMappings) { SimultaneousExecutor executor = new SimultaneousExecutor(); final int numThreads = executor.getCorePoolSize(); for(int i=0;i<executor.getCorePoolSize();i++) { final int threadNumber = i; executor.execute(() -> { for(int t=threadNumber;t<historicalStates.size();t+=numThreads) { HollowHistoricalState historicalStateToRemap = historicalStates.get(t); remappedDataAccesses[t] = creator.copyButRemapOrdinals(historicalStateToRemap.getDataAccess(), remapper); remappedKeyOrdinalMappings[t] = historicalStateToRemap.getKeyOrdinalMapping().remap(remapper); } }); } try { executor.awaitSuccessfulCompletion(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } }
@Test public void test() throws Exception { String jsonArray = "[ { \"f1\\\"\" : \"value1\", \"f2\" : { \"f1.1\" : \"hel}}{{{{lo \\\"w{orld\\\"\" } } , { \"obj2\" : \"f2.1\" } ]"; JsonArrayChunker chunker = new JsonArrayChunker(new StringReader(jsonArray), new SimultaneousExecutor(), 4); chunker.initialize(); String obj1 = IOUtils.toString(chunker.nextChunk()); String obj2 = IOUtils.toString(chunker.nextChunk()); Assert.assertEquals("{ \"f1\\\"\" : \"value1\", \"f2\" : { \"f1.1\" : \"hel}}{{{{lo \\\"w{orld\\\"\" } }", obj1); Assert.assertEquals("{ \"obj2\" : \"f2.1\" }", obj2); }
private void rehashKeys() { SimultaneousExecutor executor = new SimultaneousExecutor(); for(final Map.Entry<String, HollowHistoryTypeKeyIndex> entry : typeKeyIndexes.entrySet()) { executor.execute(() -> entry.getValue().hashRecordKeys()); } try { executor.awaitSuccessfulCompletion(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } }
SimultaneousExecutor executor = new SimultaneousExecutor(threadsPerCpu); for(int i=0;i<executor.getCorePoolSize();i++) { executor.execute(() -> { FlatRecordDumper flatRecordDumper = null; int currentMutationIdx = nextMutation.getAndIncrement(); executor.awaitSuccessfulCompletion(); } catch(Exception e) { throw new RuntimeException(e);
private void copyUnchangedDataToIntermediateState() { SimultaneousExecutor executor = new SimultaneousExecutor(); for(final HollowSchema schema : schemas) { executor.execute(new Runnable() { public void run() { HollowTypeReadState fromTypeState = from.getTypeState(schema.getName()); executor.awaitSuccessfulCompletion(); } catch (Exception e) { throw new RuntimeException(e);
final HollowDiffNodeIdentifier rootId = new HollowDiffNodeIdentifier(type); SimultaneousExecutor executor = new SimultaneousExecutor(); final int numThreads = executor.getCorePoolSize(); final int threadId = i; executor.execute(new Runnable() { @Override public void run() { executor.awaitSuccessfulCompletion(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e);
private void updateTypeIndexes(final HollowReadStateEngine latestStateEngine, final boolean isDelta) { SimultaneousExecutor executor = new SimultaneousExecutor(); for(final Map.Entry<String, HollowHistoryTypeKeyIndex> entry : typeKeyIndexes.entrySet()) { executor.execute(() -> { HollowObjectTypeReadState typeState = (HollowObjectTypeReadState) latestStateEngine.getTypeState(entry.getKey()); entry.getValue().update(typeState, isDelta); }); } try { executor.awaitSuccessfulCompletion(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } }
hashedToOrdinals.set(i, -1); SimultaneousExecutor executor = new SimultaneousExecutor(1.5d); final int numThreads = executor.getCorePoolSize(); executor.execute(() -> { for(int t=threadNumber;t<ordinalSpaceLength;t+=numThreads) { if(toPopulatedOrdinals.get(t)) { executor.awaitSuccessfulCompletion(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e);
private void prepareForDiffCalculation() { SimultaneousExecutor executor = new SimultaneousExecutor(1 + typeDiffs.size(), "hollow-diff-prepare"); executor.execute(() -> { for(HollowTypeDiff typeDiff : typeDiffs.values()) { equalityMapping.getEqualOrdinalMap(typeDiff.getTypeName()); } }); for(final HollowTypeDiff typeDiff : typeDiffs.values()) { executor.execute(typeDiff::calculateMatches); } try { executor.awaitSuccessfulCompletion(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } equalityMapping.markPrepared(); }