@Test public void testStopWatchStatic() { final StopWatch watch = StopWatch.createStarted(); assertTrue(watch.isStarted()); }
final StopWatch watch = StopWatch.createStarted(); watch.suspend(); try {
@Override public QueryResult<VariantAnnotation> getAnnotation(String name, Query query, QueryOptions options) { StopWatch stopWatch = StopWatch.createStarted(); Iterator<VariantAnnotation> variantAnnotationIterator = annotationIterator(name, query, options); List<VariantAnnotation> annotations = new ArrayList<>(); variantAnnotationIterator.forEachRemaining(annotations::add); return new QueryResult<>("getAnnotation", ((int) stopWatch.getTime(TimeUnit.MILLISECONDS)), annotations.size(), -1, "", "", annotations); }
public Query next(int batchSize) { Query newQuery; if (query == null) { newQuery = new Query(); } else { newQuery = new Query(query); } StopWatch stopWatch = StopWatch.createStarted(); List<Object> variants = new ArrayList<>(batchSize); do { // Always execute "next" over variantsIterator, to fail if empty variants.add(variantsIterator.next()); } while (variantsIterator.hasNext() && variants.size() < batchSize); newQuery.append(VariantQueryParam.ID.key(), variants); logger.info("Get next query: " + stopWatch.getTime(TimeUnit.MILLISECONDS) / 1000.0); return newQuery; }
@Override public List<Variant> read(int batchSize) { StopWatch watch = StopWatch.createStarted(); List<Variant> variants = new ArrayList<>(batchSize); while (variants.size() < batchSize && iterator.hasNext()) { variants.add(iterator.next()); } long newTimeFetching = iterator.getTimeFetching(TimeUnit.MILLISECONDS); long newTimeConverting = iterator.getTimeConverting(TimeUnit.MILLISECONDS); logger.debug("another batch of {} elements read. time: {}ms", variants.size(), watch.getTime()); logger.debug("time splitted: fetch = {}ms, convert = {}ms", newTimeFetching - this.timeFetching, newTimeConverting - this.timeConverting); this.timeFetching = newTimeFetching; this.timeConverting = newTimeConverting; return variants; }
static VariantMongoDBIterator persistentIterator(MongoDBCollection collection, Bson query, Bson projection, QueryOptions options, DocumentToVariantConverter converter) { StopWatch watch = StopWatch.createStarted(); MongoPersistentCursor cursor = new MongoPersistentCursor(collection, query, projection, options); VariantMongoDBIterator iterator = new VariantMongoDBIterator(cursor, converter); iterator.timeFetching += watch.getNanoTime(); return iterator; }
@Override public QueryResult<AlignmentGlobalStats> stats(Path path, Path workspace) throws Exception { StopWatch watch = StopWatch.createStarted(); FileUtils.checkFile(path); FileUtils.checkDirectory(workspace); Path statsPath = workspace.resolve(path.getFileName() + ".stats"); AlignmentGlobalStats alignmentGlobalStats; if (statsPath.toFile().exists()) { // Read the file of stats ObjectMapper objectMapper = new ObjectMapper(); alignmentGlobalStats = objectMapper.readValue(statsPath.toFile(), AlignmentGlobalStats.class); } else { BamManager alignmentManager = new BamManager(path); alignmentGlobalStats = alignmentManager.stats(); ObjectMapper objectMapper = new ObjectMapper(); ObjectWriter objectWriter = objectMapper.typedWriter(AlignmentGlobalStats.class); objectWriter.writeValue(statsPath.toFile(), alignmentGlobalStats); } watch.stop(); return new QueryResult<>("Get stats", (int) watch.getTime(), 1, 1, "", "", Collections.singletonList(alignmentGlobalStats)); }
@Override public QueryResult<RegionCoverage> getLowCoverageRegions(Path path, Region region, int minCoverage) throws Exception { FileUtils.checkFile(path); StopWatch watch = StopWatch.createStarted(); List<RegionCoverage> regionCoverages = null; if (path.toFile().getName().endsWith(".bam")) { BamManager bamManager = new BamManager(path); regionCoverages = bamManager.getUncoveredRegions(region, minCoverage); bamManager.close(); } watch.stop(); return new QueryResult<>(region.toString(), ((int) watch.getTime()), 1, 1, null, null, regionCoverages); }
@Override public QueryResult<RegionCoverage> coverage(Path path, Region region, int windowSize) throws Exception { // QueryOptions options = new QueryOptions(); // options.put(QueryParams.WINDOW_SIZE.key(), DEFAULT_WINDOW_SIZE); // options.put(QueryParams.CONTAINED.key(), false); // return coverage(path, workspace, new Query(), options); FileUtils.checkFile(path); StopWatch watch = StopWatch.createStarted(); RegionCoverage regionCoverage; if (path.toFile().getName().endsWith(".bam")) { BamManager bamManager = new BamManager(path); regionCoverage = bamManager.coverage(region, windowSize); bamManager.close(); } else { regionCoverage = BamUtils.getCoverageFromBigWig(region, windowSize, path); } watch.stop(); return new QueryResult<>(region.toString(), ((int) watch.getTime()), 1, 1, null, null, Collections.singletonList(regionCoverage)); }
@Override public QueryResult<Long> count(Path path, Query query, QueryOptions options) { StopWatch watch = StopWatch.createStarted(); ProtoAlignmentIterator iterator = iterator(path, query, options); long count = 0; while (iterator.hasNext()) { iterator.next(); count++; } watch.stop(); return new QueryResult<>("Get count", (int) watch.getTime(), 1, 1, "", "", Collections.singletonList(count)); }
public QueryResult<VariantMetadata> getMetadata(Query query, QueryOptions queryOptions, String sessionId) throws CatalogException, IOException, StorageEngineException { return secure(query, queryOptions, sessionId, engine -> { StopWatch watch = StopWatch.createStarted(); VariantMetadataFactory metadataFactory = new CatalogVariantMetadataFactory(catalogManager, engine.getDBAdaptor(), sessionId); VariantMetadata metadata = metadataFactory.makeVariantMetadata(query, queryOptions); return new QueryResult<>("getMetadata", ((int) watch.getTime()), 1, 1, "", "", Collections.singletonList(metadata)); }); }
@After public void tearDown() { StopWatch sw = StopWatch.createStarted(); underTest.stop(10, TimeUnit.SECONDS); underTest = null; System.out.printf("%s.tearDown() took %.3f seconds%n", LocalForwarderTelemetryTransmitterTest.class.getSimpleName(), sw.getTime(TimeUnit.MILLISECONDS) / 1000.0); }
public void onGitHubPullRequestEvent(PullRequestEvent pullRequestEvent) { if (!acceptedPullrequestEventType.contains(pullRequestEvent.getAction())) { log.debug("not processing pullRequest event of type {}", pullRequestEvent.getAction()); return; } Event techEvent = Event.technical(MonitoringEvents.PULL_REQUEST_EVENT_TO_PROCESS); techEvent.addAttribute(REPO, pullRequestEvent.getRepository().getFullName()); techEvent.addAttribute(PR_NUMBER, String.valueOf(pullRequestEvent.getPrNumber())); StopWatch stopWatch = StopWatch.createStarted(); for (PullRequestEventHandler pullRequestEventHandler : actionHandlers) { pullRequestEventHandler.handle(pullRequestEvent); } stopWatch.stop(); techEvent.addAttribute("processTime", String.valueOf(stopWatch.getTime())); techEvent.publish(); }
public QueryResult<Long> count(Query query) throws StorageEngineException { query = preProcessQuery(query, null); if (!doQuerySearchManager(query, new QueryOptions(QueryOptions.COUNT, true))) { return getDBAdaptor().count(query); } else { try { StopWatch watch = StopWatch.createStarted(); long count = getVariantSearchManager().count(dbName, query); int time = (int) watch.getTime(TimeUnit.MILLISECONDS); return new QueryResult<>("count", time, 1, 1, "", "", Collections.singletonList(count)); } catch (IOException | VariantSearchException e) { throw new VariantQueryException("Error querying Solr", e); } } }
default QueryResult<VariantFileMetadata> get(int studyId, int fileId, QueryOptions options) throws StorageEngineException { StopWatch stopWatch = StopWatch.createStarted(); Iterator<VariantFileMetadata> iterator; try { Query query = new Query(VariantFileMetadataQueryParam.FILE_ID.key(), fileId) .append(VariantFileMetadataQueryParam.STUDY_ID.key(), studyId); iterator = iterator(query, options); } catch (IOException e) { throw new StorageEngineException("Error reading from VariantFileMetadataDBAdaptor", e); } VariantFileMetadata metadata = Iterators.getOnlyElement(iterator, null); if (metadata != null) { return new QueryResult<>("", ((int) stopWatch.getTime(TimeUnit.MILLISECONDS)), 1, 1, null, null, Collections.singletonList(metadata)); } else { return new QueryResult<>("", ((int) stopWatch.getTime(TimeUnit.MILLISECONDS)), 0, 0, null, null, Collections.emptyList()); } }
@Override public QueryResult<ReadAlignment> get(Path path, Query query, QueryOptions options) { try { FileUtils.checkFile(path); StopWatch watch = StopWatch.createStarted(); BamManager bamManager = new BamManager(path); Region region = parseRegion(query); AlignmentFilters<SAMRecord> alignmentFilters = parseQuery(query); AlignmentOptions alignmentOptions = parseQueryOptions(options); String queryResultId; List<ReadAlignment> readAlignmentList; if (region != null) { readAlignmentList = bamManager.query(region, alignmentFilters, alignmentOptions, ReadAlignment.class); queryResultId = region.toString(); } else { readAlignmentList = bamManager.query(alignmentFilters, alignmentOptions, ReadAlignment.class); queryResultId = "Get alignments"; } bamManager.close(); watch.stop(); return new QueryResult<>(queryResultId, ((int) watch.getTime()), readAlignmentList.size(), readAlignmentList.size(), null, null, readAlignmentList); } catch (Exception e) { e.printStackTrace(); return new QueryResult<>(); } }
public void onGitHubPushEvent(PushEvent pushEvent) { if (shouldNotProcess(pushEvent)) { return; } Event techEvent = Event.technical(PUSH_EVENT_TO_PROCESS); techEvent.addAttribute(REPO, pushEvent.getRepository().getFullName()); StopWatch stopWatch = StopWatch.createStarted(); List<PullRequest> openPRs = retrieveOpenPrs(pushEvent.getRepository().getFullName()); List<PullRequest> openPRsWithDefinedMergeabilityStatus = figureOutMergeableStatusFor(openPRs, 0); logPrMergeabilityStatus(openPRsWithDefinedMergeabilityStatus); for (PushEventOnDefaultBranchHandler pushEventOnDefaultBranchHandler : actionHandlers) { try { pushEventOnDefaultBranchHandler.handle(pushEvent, openPRsWithDefinedMergeabilityStatus); } catch (RuntimeException e) { log.warn("exception thrown during event handling by "+pushEventOnDefaultBranchHandler.getClass(),e); } } stopWatch.stop(); techEvent.addAttribute("processTime", String.valueOf(stopWatch.getTime())); techEvent.publish(); }
@Test public void testSampleIndexDBAdaptor() throws StorageEngineException { List<List<Region>> regionLists = Arrays.asList(null, Arrays.asList(new Region("1", 1000, 300000))); for (List<Region> regions : regionLists) { StopWatch stopWatch = StopWatch.createStarted(); long actualCount = ((HadoopVariantStorageEngine) variantStorageEngine).getSampleIndexDBAdaptor().count(regions, "S_1", "NA12877", Arrays.asList("0/1", "1/1")); Query query = new Query(VariantQueryParam.STUDY.key(), "S_1") .append(VariantQueryParam.SAMPLE.key(), "NA12877"); if (regions != null) { query.append(VariantQueryParam.REGION.key(), regions); } System.out.println("Count indexTable " + stopWatch.getTime(TimeUnit.MILLISECONDS) / 1000.0); System.out.println("Count = " + actualCount); stopWatch = StopWatch.createStarted(); long expectedCount = dbAdaptor.count(query).first(); System.out.println("Count variants " + stopWatch.getTime(TimeUnit.MILLISECONDS) / 1000.0); System.out.println("Count = " + expectedCount); System.out.println("-----------------------------------"); assertEquals(expectedCount, actualCount); } }
@Override public QueryResult<AlignmentGlobalStats> stats(Path path, Path workspace, Query query, QueryOptions options) throws Exception { FileUtils.checkFile(path); StopWatch watch = StopWatch.createStarted(); if (options == null) { options = new QueryOptions(); } if (options.size() == 0 && query.size() == 0) { return stats(path, workspace); } Region region = parseRegion(query); AlignmentFilters alignmentFilters = parseQuery(query); AlignmentOptions alignmentOptions = parseQueryOptions(options); BamManager alignmentManager = new BamManager(path); AlignmentGlobalStats alignmentGlobalStats = alignmentManager.stats(region, alignmentFilters, alignmentOptions); watch.stop(); return new QueryResult<>("Get stats", (int) watch.getTime(), 1, 1, "", "", Arrays.asList(alignmentGlobalStats)); }
@Test public void testCohortIterator() throws CatalogException, SolrServerException, IOException { QueryOptions queryOptions = new QueryOptions(FLATTENED_ANNOTATIONS, "true"); queryOptions.put(QueryOptions.INCLUDE, Arrays.asList(CohortDBAdaptor.QueryParams.ID.key(), CohortDBAdaptor.QueryParams.NAME.key(), CohortDBAdaptor.QueryParams.STUDY_UID.key(), CohortDBAdaptor.QueryParams.TYPE.key(), CohortDBAdaptor.QueryParams.CREATION_DATE.key(), CohortDBAdaptor.QueryParams.STATUS.key(), CohortDBAdaptor.QueryParams.RELEASE.key(), CohortDBAdaptor.QueryParams.ANNOTATION_SETS.key(), CohortDBAdaptor.QueryParams.SAMPLE_UIDS.key())); MongoDBAdaptorFactory factory = new MongoDBAdaptorFactory(catalogManager.getConfiguration()); CohortMongoDBAdaptor cohortMongoDBAdaptor = factory.getCatalogCohortDBAdaptor(); DBIterator<Cohort> cohortIterator = cohortMongoDBAdaptor.iterator(new Query(), queryOptions); int i = 0; StopWatch stopWatch = StopWatch.createStarted(); while (cohortIterator.hasNext()) { cohortIterator.next(); i++; if (i % 10000 == 0) { System.out.println("i: " + i + "; time: " + stopWatch.getTime(TimeUnit.MILLISECONDS)); stopWatch.reset(); stopWatch.start(); return; } } }