/** * Returns the total size of the "master list" that this page is a subset of. * <p/> * <b>Note:</b> This method merely returns the size of this list if it is {@link #isUnbounded() unbounded}. * @return the total size */ public int getTotalSize() { return Math.max(this.size(), this.totalSize); }
public PageList<PackageVersionMetadataComposite> getPackageVersionMetadata(int resourceId, PageControl pc) { ContentSourceManagerLocal manager = LookupUtil.getContentSourceManager(); long start = System.currentTimeMillis(); PageList<PackageVersionMetadataComposite> metadataMap = manager.getPackageVersionMetadata(resourceId, pc); long elapsed = (System.currentTimeMillis() - start); if (elapsed > 30000L) { log.info("Performance: metadata for resource [" + resourceId + "] has [" + metadataMap.size() + "] packages in (" + elapsed + ")ms"); } else if (log.isDebugEnabled()) { log.debug("Performance: metadata for resource [" + resourceId + "] has [" + metadataMap.size() + "] packages in (" + elapsed + ")ms"); } return metadataMap; }
private void loadDataPage(PageControl pageControl) { this.currentPage = getDataPage(pageControl); this.currentPageDataByKey = new LinkedHashMap<Object, T>(this.currentPage.size()); for (T dataObject : this.currentPage) { Object key = getPrimaryKey(dataObject); this.currentPageDataByKey.put(key, dataObject); } }
public PhantomReadMaxAttemptsExceededException(int numberOfAttempts, PageList<?> list, long millisecondsSpentTrying) { super(initMessage(numberOfAttempts, list.size(), list.getTotalSize(), millisecondsSpentTrying)); this.numberOfAttempts = numberOfAttempts; this.list = list; this.millisecondsSpentTrying = millisecondsSpentTrying; }
public PhantomReadMaxAttemptsExceededException(int numberOfAttempts, PageList<?> list, long millisecondsSpentTrying, Throwable cause) { super(initMessage(numberOfAttempts, list.size(), list.getTotalSize(), millisecondsSpentTrying), cause); this.numberOfAttempts = numberOfAttempts; this.list = list; this.millisecondsSpentTrying = millisecondsSpentTrying; }
private Response.ResponseBuilder getEventsAsBuilderForCriteria(HttpHeaders headers, EventCriteria criteria, UriInfo uriInfo) { PageList<Event> eventList = eventManager.findEventsByCriteria(caller, criteria); List<EventRest> restEvents = new ArrayList<EventRest>(eventList.size()); for (Event event : eventList) { restEvents.add(convertEvent(event)); } MediaType mediaType = headers.getAcceptableMediaTypes().get(0); Response.ResponseBuilder builder = Response.ok(); builder.type(mediaType); if (mediaType.equals(MediaType.APPLICATION_XML_TYPE)) { GenericEntity<List<EventRest>> list = new GenericEntity<List<EventRest>>(restEvents) {}; builder.entity(list); createPagingHeader(builder,uriInfo,eventList); } else if (mediaType.equals(MediaType.APPLICATION_JSON_TYPE)) { builder.entity(restEvents); createPagingHeader(builder,uriInfo,eventList); } else { wrapForPaging(builder,uriInfo,eventList,restEvents); } return builder; }
"Could not get consistent results of the paged data and a total count for " + CriteriaUtil.toString(criteria) + ". After " + e.getNumberOfAttempts() + " attempts, the collection size" + " is " + e.getList().size() + ", while the count query reports " + e.getList().getTotalSize() + " for " + pageControl + ". The discrepancy has not cleared up in " + e.getMillisecondsSpentTrying() LOG.debug("restriction=" + criteriaRestriction + ", resultSize=" + results.size() + ", resultCount=" + results.getTotalSize()); results = new PageList<T>(getCollection(), pageControl); if (LOG.isDebugEnabled()) { LOG.debug("restriction=" + criteriaRestriction + ", resultSize=" + results.size());
List<ResourceWithType> rwtList = new ArrayList<ResourceWithType>(resources.size()); for (Resource r : resources) { putToCache(r.getId(), Resource.class, r);
rowsProcessed += alertConditions.size(); if (rowsProcessed >= alertConditions.getTotalSize()) { break; // we've processed all data, we can stop now
@GZIP @AddLinks @GET @Path(("/{id}/alerts")) @ApiError(code = 404, reason = NO_RESOURCE_FOR_ID) @ApiOperation("Get a list of links to the alerts for the passed resource") public List<Link> getAlertsForResource(@ApiParam("Id of the resource to query") @PathParam("id") int resourceId) { AlertCriteria criteria = new AlertCriteria(); // Check for resource existence fetchResource(resourceId); criteria.addFilterResourceIds(resourceId); PageList<Alert> alerts = alertManager.findAlertsByCriteria(caller, criteria); List<Link> links = new ArrayList<Link>(alerts.size()); for (Alert al : alerts) { Link link = new Link(); link.setRel("alert"); link.setHref("/alert/" + al.getId()); links.add(link); } return links; }
private void abortResourceManualAddIfExistingSingleton(Resource parentResource, ResourceType resourceType) { if (resourceType.isSingleton()) { ResourceCriteria resourceCriteria = new ResourceCriteria(); resourceCriteria.addFilterParentResourceId(parentResource.getId()); resourceCriteria.addFilterResourceTypeId(resourceType.getId()); resourceCriteria.clearPaging();//Doc: disable paging as the code assumes all the results will be returned. PageList<Resource> childResourcesOfType = resourceManager.findResourcesByCriteria( subjectManager.getOverlord(), resourceCriteria); if (childResourcesOfType.size() >= 1) { throw new RuntimeException("Cannot manually add " + resourceType + " child Resource under parent " + parentResource + ", since " + resourceType + " is a singleton type, and there is already a child Resource of that type. " + "If the existing child Resource corresponds to a managed Resource which no longer exists, " + "uninventory it and then try again."); } } }
List<Integer> resourceIds = new ArrayList<Integer>(result.size()); for (BundleResourceDeployment brd : result) { int resourceId = brd.getResource().getId();
private void abortResourceCreationIfExistingSingleton(Resource parentResource, ResourceType resourceType) { if (resourceType.isSingleton()) { ResourceCriteria resourceCriteria = new ResourceCriteria(); resourceCriteria.addFilterParentResourceId(parentResource.getId()); resourceCriteria.addFilterResourceTypeId(resourceType.getId()); resourceCriteria.clearPaging();//disable paging as the code assumes all the results will be returned. PageList<Resource> childResourcesOfType = resourceManager.findResourcesByCriteria( subjectManager.getOverlord(), resourceCriteria); if (childResourcesOfType.size() >= 1) { throw new RuntimeException("Cannot create " + resourceType + " child Resource under parent " + parentResource + ", since " + resourceType + " is a singleton type, and there is already a child Resource of that type. " + "If the existing child Resource corresponds to a managed Resource which no longer exists, " + "uninventory it and then try again."); } } }
@Override public InstalledPackage getBackingPackageForResource(Subject subject, int resourceId) { InstalledPackage result = null; // check if the resource is content backed if not, return null Resource res = resourceManager.getResourceById(subject, resourceId); ResourceType type = res.getResourceType(); if (!ResourceCreationDataType.CONTENT.equals(type.getCreationDataType())) { return null; } InstalledPackageCriteria criteria = new InstalledPackageCriteria(); criteria.addFilterResourceId(resourceId); PageList<InstalledPackage> ips = findInstalledPackagesByCriteria(subject, criteria); // should not be more than 1 if ((null != ips) && (ips.size() > 0)) { int mostRecentPackageIndex = 0; if (ips.size() > 1) { for (int index = 1; index < ips.size(); index++) { if (ips.get(index).getInstallationDate() > ips.get(mostRecentPackageIndex).getInstallationDate()) { mostRecentPackageIndex = index; } } } result = ips.get(mostRecentPackageIndex); // fetch these result.getPackageVersion().getGeneralPackage().getId(); result.getPackageVersion().getGeneralPackage().getPackageType().getId(); result.getPackageVersion().getArchitecture().getId(); } return result; }
@Override public PageList<DriftDefinitionComposite> findDriftDefinitionCompositesByCriteria(Subject subject, DriftDefinitionCriteria criteria) { PageList<DriftDefinition> defs = findDriftDefinitionsByCriteria(subject, criteria); PageList<DriftDefinitionComposite> result = new PageList<DriftDefinitionComposite>(defs.getPageControl()); List<DriftDefinitionComposite> composites = new ArrayList<DriftDefinitionComposite>(defs.size()); GenericDriftChangeSetCriteria csCriteria = new GenericDriftChangeSetCriteria(); for (DriftDefinition def : defs) { DriftDefinitionComposite composite = new DriftDefinitionComposite(def, null); csCriteria.addFilterDriftDefinitionId(def.getId()); csCriteria.addSortVersion(PageOrdering.DESC); csCriteria.setPageControl(PageControl.getSingleRowInstance()); PageList<? extends DriftChangeSet<?>> changeSets = findDriftChangeSetsByCriteria(subject, csCriteria); if (!changeSets.isEmpty()) { composite.setMostRecentChangeset(changeSets.get(0)); } composites.add(composite); } result.addAll(composites); return result; }
subject, criteria); if (results != null && results.size() > 0) { successResultFound = true;
getCriteriaFromContext(context)); if (deployments.size() > 0) { BundleDeployment bundleDeployment = deployments.get(0); SchedulerLocal scheduler = LookupUtil.getSchedulerBean();
public ResourceOperationHistory waitForScheduledOperationToComplete(ResourceOperationSchedule schedule, long intervalDuration, int maxIntervals) throws InterruptedException { if (remoteClient == null) { throw new IllegalStateException("The waitForScheduledOperationToComplete() method requires a connection to the RHQ server."); } ResourceOperationHistoryCriteria criteria = new ResourceOperationHistoryCriteria(); criteria.addFilterJobId(schedule.getJobId()); criteria.addFilterResourceIds(schedule.getResource().getId()); criteria.addSortStartTime(PageOrdering.DESC); criteria.setPaging(0, 1); criteria.fetchOperationDefinition(true); criteria.fetchParameters(true); criteria.fetchResults(true); ResourceOperationHistory history = null; int i = 0; while(history == null && i < maxIntervals) { Thread.sleep(intervalDuration); PageList<ResourceOperationHistory> histories = remoteClient.getProxy(OperationManagerRemote.class) .findResourceOperationHistoriesByCriteria(remoteClient.getSubject(), criteria); if (histories.size() > 0 && histories.get(0).getStatus() != OperationRequestStatus.INPROGRESS) { history = histories.get(0); } ++i; } return history; }
public PageList<Subject> findSubjectsByCriteria(Subject subject, SubjectCriteria criteria) { CriteriaQueryGenerator generator = new CriteriaQueryGenerator(subject, criteria); CriteriaQueryRunner<Subject> queryRunner = new CriteriaQueryRunner<Subject>(criteria, generator, entityManager); PageList<Subject> subjects = queryRunner.execute(); boolean canViewUsers = (authorizationManager.isSystemSuperuser(subject) || authorizationManager.hasGlobalPermission(subject, Permission.MANAGE_SECURITY) || authorizationManager .hasGlobalPermission(subject, Permission.VIEW_USERS)); if (!canViewUsers) { if (subjects.contains(subject)) { Subject attachedSubject = subjects.get(subjects.indexOf(subject)); subjects.clear(); subjects.add(attachedSubject); } else { subjects.clear(); } subjects.setTotalSize(subjects.size()); } return subjects; }
PageList<ResourceOperationHistory> histories = operationManager .findResourceOperationHistoriesByCriteria(remoteClient.getSubject(), criteria); if (histories.size() > 0 && histories.get(0).getStatus() != OperationRequestStatus.INPROGRESS) { history = histories.get(0);