Tabnine Logo
Slice
Code IndexAdd Tabnine to your IDE (free)

How to use
Slice
in
org.springframework.data.domain

Best Java code snippets using org.springframework.data.domain.Slice (Showing top 20 results out of 315)

origin: jamesagnew/hapi-fhir

int count = toDelete.getContent().size();
if (count > 0) {
  long total = tt.execute(t -> mySearchDao.count());
origin: jamesagnew/hapi-fhir

private void deleteSearch(final Long theSearchPid) {
  mySearchDao.findById(theSearchPid).ifPresent(searchToDelete -> {
    mySearchIncludeDao.deleteForSearch(searchToDelete.getId());
    /*
     * Note, we're only deleting up to 500 results in an individual search here. This
     * is to prevent really long running transactions in cases where there are
     * huge searches with tons of results in them. By the time we've gotten here
     * we have marked the parent Search entity as deleted, so it's not such a
     * huge deal to be only partially deleting search results. They'll get deleted
     * eventually
     */
    int max = ourMaximumResultsToDeleteInOnePass;
    Slice<Long> resultPids = mySearchResultDao.findForSearch(PageRequest.of(0, max), searchToDelete.getId());
    if (resultPids.hasContent()) {
      List<List<Long>> partitions = Lists.partition(resultPids.getContent(), ourMaximumResultsToDeleteInOneStatement);
      for (List<Long> nextPartition : partitions) {
        mySearchResultDao.deleteByIds(nextPartition);
      }
    }
    // Only delete if we don't have results left in this search
    if (resultPids.getNumberOfElements() < max) {
      ourLog.info("Deleting search {}/{} - Created[{}] -- Last returned[{}]", searchToDelete.getId(), searchToDelete.getUuid(), new InstantType(searchToDelete.getCreated()), new InstantType(searchToDelete.getSearchLastReturned()));
      mySearchDao.deleteByPid(searchToDelete.getId());
    } else {
      ourLog.info("Purged {} search results for deleted search {}/{}", resultPids.getSize(), searchToDelete.getId(), searchToDelete.getUuid());
    }
  });
}
origin: apache/servicemix-bundles

/**
 * Returns the {@link Pageable} that's been used to request the current {@link Slice}.
 *
 * @return
 * @since 2.0
 */
default Pageable getPageable() {
  return PageRequest.of(getNumber(), getSize(), getSort());
}
origin: jamesagnew/hapi-fhir

private <T> void doDelete(String theDescriptor, Supplier<Slice<T>> theLoader, Supplier<Integer> theCounter, JpaRepository<T, ?> theDao) {
  int count;
  ourLog.info(" * Deleting {}", theDescriptor);
  int totalCount = theCounter.get();
  StopWatch sw = new StopWatch();
  count = 0;
  while (true) {
    Slice<T> link = theLoader.get();
    if (link.hasContent() == false) {
      break;
    }
    TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
    txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
    txTemplate.execute(t -> {
      theDao.deleteInBatch(link);
      return null;
    });
    count += link.getNumberOfElements();
    ourLog.info(" * {} {} deleted - {}/sec - ETA: {}", count, theDescriptor, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
  }
  theDao.flush();
}
origin: jamesagnew/hapi-fhir

int count = range.getNumberOfElements();
ourLog.info("Loaded {} resources for reindexing in {}", count, pageSw.toString());
  .stream()
  .map(t -> myTaskExecutor.submit(new ResourceReindexingTask(t, counter)))
  .collect(Collectors.toList());
origin: com.netflix.genie/genie-core

do {
  idProjections = this.jobRepository.findByCreatedBefore(date, page);
  if (idProjections.hasContent()) {
    final List<Long> ids = idProjections
      .getContent()
      .stream()
      .map(IdProjection::getId)
} while (idProjections.hasNext() && totalAttemptedDeletions < maxDeleted);
origin: mtdhb/api

Slice<CookieUseCountView> cookieUseCountViews = cookieUseCountRepository.findCookieUseCountView(upper,
    application, daily, today, PageRequest.of(0, CHUNK_SIZE));
int numberOfElements = cookieUseCountViews.getNumberOfElements();
log.info("cookieUseCountViews#size={}", numberOfElements);
if (numberOfElements < 1) {
CookieUseCountView last = cookieUseCountViews.getContent().get(numberOfElements - 1);
long newUpper = last.getId();
log.info("newUpper={}", newUpper);
cookieUseCountViews.forEach(cookieUseCountView -> {
  if (usage.get(cookieUseCountView.getOpenId()) == null) {
    Cookie cookie = new Cookie();
origin: jamesagnew/hapi-fhir

  if (theResourceId != null) {
    Slice<Long> ids = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
    ourLog.info("Expunging {} deleted resources of type[{}] and ID[{}]", ids.getNumberOfElements(), theResourceName, theResourceId);
    return ids;
  } else {
    if (theResourceName != null) {
      Slice<Long> ids = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
      ourLog.info("Expunging {} deleted resources of type[{}]", ids.getNumberOfElements(), theResourceName);
      return ids;
    } else {
      Slice<Long> ids = myResourceTableDao.findIdsOfDeletedResources(page);
      ourLog.info("Expunging {} deleted resources (all types)", ids.getNumberOfElements(), theResourceName);
      return ids;
List<List<Long>> partitions = Lists.partition(resourceIds.getContent(), 800);
for (List<Long> nextPartition : partitions) {
  ourLog.info("Expunging any search results pointing to {} resources", nextPartition.size());
origin: org.eclipse.hawkbit/hawkbit-repository-jpa

private void setRolloutStatusDetails(final Slice<JpaRollout> rollouts) {
  final List<Long> rolloutIds = rollouts.getContent().stream().map(Rollout::getId).collect(Collectors.toList());
  final Map<Long, List<TotalTargetCountActionStatus>> allStatesForRollout = getStatusCountItemForRollout(
      rolloutIds);
  if (allStatesForRollout != null) {
    rollouts.forEach(rollout -> {
      final TotalTargetCountStatus totalTargetCountStatus = new TotalTargetCountStatus(
          allStatesForRollout.get(rollout.getId()), rollout.getTotalTargets());
      rollout.setTotalTargetCountStatus(totalTargetCountStatus);
    });
  }
}
origin: at.researchstudio.sat/won-node

private NeedInformationService.PagedResource<Dataset,URI> toContainerPage(String containerUri, Slice<URI>
slice) {
List<URI> uris = slice.getContent();
URI resumeBefore = null;
URI resumeAfter = null;
if (slice.getSort() != null && !uris.isEmpty()) {
 Iterator<Sort.Order> sortOrders = slice.getSort().iterator();
 if (sortOrders.hasNext()) {
  Sort.Order sortOrder = sortOrders.next();
origin: mtdhb/api

@Cacheable(cacheNames = CacheNames.RECEIVING_CAROUSEL)
@Override
public List<ReceivingCarouselDTO> listReceivingCarousel() {
  Slice<ReceivingCarouselView> slice = receivingRepository
      .findReceivingCarouselView(PageRequest.of(0, 10, new Sort(Sort.Direction.DESC, "gmtModified")));
  List<ReceivingCarouselDTO> carouselReceivingDTOs = slice.map(receivingCarouselView -> {
    ReceivingCarouselDTO receivingCarouselDTO = new ReceivingCarouselDTO();
    String mail = receivingCarouselView.getMail();
    int index = mail.indexOf("@");
    String prefix = mail.substring(0, index);
    prefix = prefix.length() > 4 ? prefix.substring(0, 4) : prefix;
    mail = prefix + "****";
    BeanUtils.copyProperties(receivingCarouselView, receivingCarouselDTO);
    receivingCarouselDTO.setMail(mail);
    return receivingCarouselDTO;
  }).getContent();
  return carouselReceivingDTOs;
}
origin: at.researchstudio.sat/won-node

 messageEventRepository.findByParentURIFetchDatasetEagerly(connectionUri, new PageRequest(0, 1, new Sort(Sort
                                                  .Direction.DESC, "creationDate")));
if (latestEvents.hasContent()) {
 MessageEventPlaceholder event = latestEvents.getContent().get(0);
origin: at.researchstudio.sat/won-node

  needsToWarn.forEach(need -> {
    try {
      if (cancelled.get()) {
  if (needsToWarn.hasNext()) {
    Pageable pageable = needsToWarn.nextPageable();
    needsToWarn = needRepository.findNeedsInactiveBetweenAndNotConnected(startWarningThreshold, stopWarningThreshold, pageable);
  } else {
    needsToWarn = null;
} while (needsToWarn != null && needsToWarn.hasContent());
  needsToDeactivate.forEach(need -> {
    try {
      if (cancelled.get()) {
  if (needsToDeactivate.hasNext()) {
    Pageable pageable = needsToDeactivate.nextPageable();
    needsToDeactivate = needRepository.findNeedsInactiveSinceAndNotConnected(deactivateThreshold, pageable);
  } else {
    needsToDeactivate = null;
} while (needsToDeactivate != null && needsToDeactivate.hasContent());
  needsToDeactivate.forEach(need -> {
    try {
      if (cancelled.get()) {
  if (needsToDeactivate.hasNext()) {
origin: jamesagnew/hapi-fhir

protected void expungeHistoricalVersionsOfId(Long theResourceId, AtomicInteger theRemainingCount) {
  ResourceTable resource = myResourceTableDao.findById(theResourceId).orElseThrow(IllegalArgumentException::new);
  Pageable page = PageRequest.of(0, theRemainingCount.get());
  Slice<Long> versionIds = myResourceHistoryTableDao.findForResourceId(page, resource.getId(), resource.getVersion());
  ourLog.debug("Found {} versions of resource {} to expunge", versionIds.getNumberOfElements(), resource.getIdDt().getValue());
  for (Long nextVersionId : versionIds) {
    expungeHistoricalVersion(nextVersionId);
    if (theRemainingCount.decrementAndGet() <= 0) {
      return;
    }
  }
}
origin: knes1/todo

@RequestMapping(value = "/todos2.csv", method = RequestMethod.GET)
public void exportTodosCSVSlicing(HttpServletResponse response) {
  final int PAGE_SIZE = 1000;
  response.addHeader("Content-Type", "application/csv");
  response.addHeader("Content-Disposition", "attachment; filename=todos.csv");
  response.setCharacterEncoding("UTF-8");
  try {
    PrintWriter out = response.getWriter();
    int page = 0;
    Slice<Todo> todoPage;
    do {
      todoPage = todoRepository.findAllBy(new PageRequest(page, PAGE_SIZE));
      for (Todo todo : todoPage) {
        String line = todoToCSV(todo);
        out.write(line);
        out.write("\n");
      }
      entityManager.clear();
      page++;
    } while (todoPage.hasNext());
    out.flush();
  } catch (IOException e) {
    log.info("Exception occurred " + e.getMessage(), e);
    throw new RuntimeException("Exception occurred while exporting results", e);
  }
}
origin: ca.uhn.hapi.fhir/hapi-fhir-jpaserver-base

  if (theResourceId != null) {
    Slice<Long> ids = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
    ourLog.info("Expunging {} deleted resources of type[{}] and ID[{}]", ids.getNumberOfElements(), theResourceName, theResourceId);
    return ids;
  } else {
    if (theResourceName != null) {
      Slice<Long> ids = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
      ourLog.info("Expunging {} deleted resources of type[{}]", ids.getNumberOfElements(), theResourceName);
      return ids;
    } else {
      Slice<Long> ids = myResourceTableDao.findIdsOfDeletedResources(page);
      ourLog.info("Expunging {} deleted resources (all types)", ids.getNumberOfElements(), theResourceName);
      return ids;
List<List<Long>> partitions = Lists.partition(resourceIds.getContent(), 800);
for (List<Long> nextPartition : partitions) {
  ourLog.info("Expunging any search results pointing to {} resources", nextPartition.size());
origin: eclipse/hawkbit

private void setRolloutStatusDetails(final Slice<JpaRollout> rollouts) {
  final List<Long> rolloutIds = rollouts.getContent().stream().map(Rollout::getId).collect(Collectors.toList());
  final Map<Long, List<TotalTargetCountActionStatus>> allStatesForRollout = getStatusCountItemForRollout(
      rolloutIds);
  if (allStatesForRollout != null) {
    rollouts.forEach(rollout -> {
      final TotalTargetCountStatus totalTargetCountStatus = new TotalTargetCountStatus(
          allStatesForRollout.get(rollout.getId()), rollout.getTotalTargets());
      rollout.setTotalTargetCountStatus(totalTargetCountStatus);
    });
  }
}
origin: at.researchstudio.sat/won-node

slice, boolean deep) {
List<MessageEventPlaceholder> events = slice.getContent();
URI resumeBefore = null;
URI resumeAfter = null;
if (slice.getSort() != null && !events.isEmpty()) {
 Iterator<Sort.Order> sortOrders = slice.getSort().iterator();
 if (sortOrders.hasNext()) {
  Sort.Order sortOrder = sortOrders.next();
origin: ca.uhn.hapi.fhir/hapi-fhir-jpaserver-base

private <T> void doDelete(String theDescriptor, Supplier<Slice<T>> theLoader, Supplier<Integer> theCounter, JpaRepository<T, ?> theDao) {
  int count;
  ourLog.info(" * Deleting {}", theDescriptor);
  int totalCount = theCounter.get();
  StopWatch sw = new StopWatch();
  count = 0;
  while (true) {
    Slice<T> link = theLoader.get();
    if (link.hasContent() == false) {
      break;
    }
    TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
    txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
    txTemplate.execute(t -> {
      theDao.deleteInBatch(link);
      return null;
    });
    count += link.getNumberOfElements();
    ourLog.info(" * {} {} deleted - {}/sec - ETA: {}", count, theDescriptor, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
  }
  theDao.flush();
}
origin: eclipse/hawkbit

private void deleteScheduledActions(final JpaRollout rollout, final Slice<JpaAction> scheduledActions) {
  final boolean hasScheduledActions = scheduledActions.getNumberOfElements() > 0;
  if (hasScheduledActions) {
    try {
      final Iterable<JpaAction> iterable = scheduledActions::iterator;
      final List<Long> actionIds = StreamSupport.stream(iterable.spliterator(), false).map(Action::getId)
          .collect(Collectors.toList());
      actionRepository.deleteByIdIn(actionIds);
      afterCommit.afterCommit(() -> eventPublisher.publishEvent(
          new RolloutUpdatedEvent(rollout, EventPublisherHolder.getInstance().getApplicationId())));
    } catch (final RuntimeException e) {
      LOGGER.error("Exception during deletion of actions of rollout {}", rollout, e);
    }
  }
}
org.springframework.data.domainSlice

Javadoc

A slice of data that indicates whether there's a next or previous slice available. Allows to obtain a Pageable to request a previous or next Slice.

Most used methods

  • getContent
  • forEach
  • getNumberOfElements
  • hasContent
  • hasNext
  • getSize
  • getSort
  • map
  • getNumber
    Returns the number of the current Slice. Is always non-negative.
  • nextPageable
  • stream
  • stream

Popular in Java

  • Making http post requests using okhttp
  • orElseThrow (Optional)
    Return the contained value, if present, otherwise throw an exception to be created by the provided s
  • addToBackStack (FragmentTransaction)
  • notifyDataSetChanged (ArrayAdapter)
  • Point (java.awt)
    A point representing a location in (x,y) coordinate space, specified in integer precision.
  • MalformedURLException (java.net)
    This exception is thrown when a program attempts to create an URL from an incorrect specification.
  • URL (java.net)
    A Uniform Resource Locator that identifies the location of an Internet resource as specified by RFC
  • Date (java.sql)
    A class which can consume and produce dates in SQL Date format. Dates are represented in SQL as yyyy
  • Iterator (java.util)
    An iterator over a sequence of objects, such as a collection.If a collection has been changed since
  • JButton (javax.swing)
  • Top Vim plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now