@Override public GridFSFindIterable batchSize(final int batchSize) { underlying.batchSize(batchSize); return this; }
private MongoCursor<Document> getCursor(final int startChunkIndex) { FindIterable<Document> findIterable; Document filter = new Document("files_id", fileId).append("n", new Document("$gte", startChunkIndex)); if (clientSession != null) { findIterable = chunksCollection.find(clientSession, filter); } else { findIterable = chunksCollection.find(filter); } return findIterable.batchSize(batchSize).sort(new Document("n", 1)).iterator(); }
public MongoCursor<Document> execute(MongoSplit split, List<MongoColumnHandle> columns) { Document output = new Document(); for (MongoColumnHandle column : columns) { output.append(column.getName(), 1); } MongoCollection<Document> collection = getCollection(split.getSchemaTableName()); FindIterable<Document> iterable = collection.find(buildQuery(split.getTupleDomain())).projection(output); if (cursorBatchSize != 0) { iterable.batchSize(cursorBatchSize); } return iterable.iterator(); }
@SuppressWarnings("resource") @Override public List<T> call() throws Exception { @Nullable Bson query = criteria != null ? convertToBson(criteria) : null; FindIterable<T> cursor = collection().find(query); if (!exclusion.isNil()) { cursor.projection(convertToBson(exclusion)); } if (!ordering.isNil()) { cursor.sort(convertToBson(ordering)); } cursor.skip(skip); if (limit != 0) { cursor.limit(limit); if (limit <= LARGE_BATCH_SIZE) { // if limit specified and is smaller than reasonable large batch size // then we force batch size to be the same as limit, // but negative, this force cursor to close right after result is sent cursor.batchSize(-limit); } } // close properly try (MongoCursor<T> iterator = cursor.iterator()) { return ImmutableList.copyOf(iterator); } } });
cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize());
it.batchSize(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions(input).asInteger());
@Override public FindOperationConfigurator batchSize(int batchSize) { iterable.batchSize(batchSize); return this; }
@Override public GridFSFindIterable batchSize(final int batchSize) { underlying.batchSize(batchSize); return this; }
@Override public GridFSFindIterable batchSize(final int batchSize) { underlying.batchSize(batchSize); return this; }
cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize());
VariantMongoDBIterator(FindIterable<Document> dbCursor, DocumentToVariantConverter documentToVariantConverter, int batchSize) { this.documentToVariantConverter = documentToVariantConverter; if (batchSize > 0) { dbCursor.batchSize(batchSize); } this.dbCursor = fetch(dbCursor::iterator); }
private MongoCursor<Document> getCursor(final int startChunkIndex) { FindIterable<Document> findIterable; Document filter = new Document("files_id", fileId).append("n", new Document("$gte", startChunkIndex)); if (clientSession != null) { findIterable = chunksCollection.find(clientSession, filter); } else { findIterable = chunksCollection.find(filter); } return findIterable.batchSize(batchSize).sort(new Document("n", 1)).iterator(); }
private MongoCursor<Document> getCursor(final int startChunkIndex) { FindIterable<Document> findIterable; Document filter = new Document("files_id", fileId).append("n", new Document("$gte", startChunkIndex)); if (clientSession != null) { findIterable = chunksCollection.find(clientSession, filter); } else { findIterable = chunksCollection.find(filter); } return findIterable.batchSize(batchSize).sort(new Document("n", 1)).iterator(); }
@Override public List<? extends DomainEventData<?>> findDomainEvents(MongoCollection<Document> collection, String aggregateIdentifier, long firstSequenceNumber, int batchSize) { FindIterable<Document> cursor = collection .find(and(eq(eventConfiguration.aggregateIdentifierProperty(), aggregateIdentifier), gte(eventConfiguration.sequenceNumberProperty(), firstSequenceNumber))) .sort(new BasicDBObject(eventConfiguration().sequenceNumberProperty(), ORDER_ASC)); cursor = cursor.batchSize(batchSize); return stream(cursor.spliterator(), false).flatMap(this::extractEvents) .filter(event -> event.getSequenceNumber() >= firstSequenceNumber).collect(Collectors.toList()); }
@Override public List<? extends DomainEventData<?>> findDomainEvents(MongoCollection<Document> collection, String aggregateIdentifier, long firstSequenceNumber, int batchSize) { FindIterable<Document> cursor = collection .find(and(eq(eventConfiguration.aggregateIdentifierProperty(), aggregateIdentifier), gte(eventConfiguration.sequenceNumberProperty(), firstSequenceNumber))) .sort(new BasicDBObject(eventConfiguration().sequenceNumberProperty(), ORDER_ASC)); cursor = cursor.batchSize(batchSize); return stream(cursor.spliterator(), false).flatMap(this::extractEvents) .filter(event -> event.getSequenceNumber() >= firstSequenceNumber).collect(Collectors.toList()); }
public MongoCursor<Document> execute(MongoSplit split, List<MongoColumnHandle> columns) { Document output = new Document(); for (MongoColumnHandle column : columns) { output.append(column.getName(), 1); } MongoCollection<Document> collection = getCollection(split.getSchemaTableName()); FindIterable<Document> iterable = collection.find(buildQuery(split.getTupleDomain())).projection(output); if (cursorBatchSize != 0) { iterable.batchSize(cursorBatchSize); } return iterable.iterator(); }
@Override public CloseableIterable<NodeDocument> getPossiblyDeletedDocs(final long fromModified, final long toModified) { //_deletedOnce == true && _modified >= fromModified && _modified < toModified Bson query = Filters.and( Filters.eq(DELETED_ONCE, true), Filters.gte(MODIFIED_IN_SECS, getModifiedInSecs(fromModified)), Filters.lt(MODIFIED_IN_SECS, getModifiedInSecs(toModified)) ); FindIterable<BasicDBObject> cursor = getNodeCollection() .find(query).batchSize(batchSize); return CloseableIterable.wrap(transform(cursor, input -> store.convertFromDBObject(NODES, input))); }
@Override public CloseableIterable<NodeDocument> getPossiblyDeletedDocs(final long fromModified, final long toModified) { //_deletedOnce == true && _modified >= fromModified && _modified < toModified Bson query = Filters.and( Filters.eq(DELETED_ONCE, true), Filters.gte(MODIFIED_IN_SECS, getModifiedInSecs(fromModified)), Filters.lt(MODIFIED_IN_SECS, getModifiedInSecs(toModified)) ); FindIterable<BasicDBObject> cursor = getNodeCollection() .find(query).batchSize(batchSize); return CloseableIterable.wrap(transform(cursor, input -> store.convertFromDBObject(NODES, input))); }
protected MongoPersistentCursor resume(Object lastObjectId) { Bson query; if (lastObjectId != null) { query = Filters.and(Filters.gt("_id", lastObjectId), this.query); } else { query = this.query; } FindIterable<Document> iterable = newFindIterable(query, this.projection, this.options); if (!options.containsKey(QueryOptions.SORT)) { iterable.sort(Sorts.ascending("$natural")); } mongoCursor = iterable .batchSize(batchSize) .limit(limit) .skip(skip) .iterator(); return this; }
@Override public ScrollResult<String> scroll(DBSExpressionEvaluator evaluator, int batchSize, int keepAliveSeconds) { cursorService.checkForTimedOutScroll(); MongoDBRepositoryQueryBuilder builder = new MongoDBRepositoryQueryBuilder(this, evaluator.getExpression(), evaluator.getSelectClause(), null, evaluator.pathResolver, evaluator.fulltextSearchDisabled); builder.walk(); if (builder.hasFulltext && isFulltextSearchDisabled()) { throw new QueryParseException("Fulltext search disabled by configuration"); } Bson filter = builder.getQuery(); addPrincipals((Document) filter, evaluator.principals); Bson keys = builder.getProjection(); if (log.isTraceEnabled()) { logQuery(filter, keys, null, 0, 0); } MongoCursor<Document> cursor = coll.find(filter).projection(keys).batchSize(batchSize).iterator(); String scrollId = cursorService.registerCursor(cursor, batchSize, keepAliveSeconds); return scroll(scrollId); }