Refine search
/** * <p>Creates a builder for an unordered bulk operation, consisting of an unordered collection of write requests, which can be any * combination of inserts, updates, replaces, or removes. Write requests included in the bulk operation will be executed in an undefined * order, and all requests will be executed even if some fail.</p> * * <p>Note: While this bulk write operation will execute on MongoDB 2.4 servers and below, the writes will be performed one at a time, * as that is the only way to preserve the semantics of the value returned from execution or the exception thrown.</p> * * @return the builder * @since 2.12 * @mongodb.driver.manual reference/method/db.collection.initializeUnorderedBulkOp/ initializeUnorderedBulkOp() */ public BulkWriteOperation initializeUnorderedBulkOperation() { return new BulkWriteOperation(false, this); }
/** * Adds a request to update one document in the collection that matches the query with which this builder was created. * * @param update the update criteria */ public void updateOne(final DBObject update) { bulkWriteOperation.addRequest(new UpdateRequest(query, update, false, upsert, queryCodec, collation, arrayFilters)); } }
try (DBCursor cursor = collection.find(mongoQuery, null)) { BulkWriteOperation bw = collection.initializeUnorderedBulkOperation(); bw.find(new BasicDBObject("_id", id)).remove(); if (writeConcern == null) { LOGGER.debug("Bulk deleting docs"); result = bw.execute(); } else { LOGGER.debug("Bulk deleting docs with writeConcern={} from execution", writeConcern); result = bw.execute(writeConcern); try { if(writeConcern==null) { numDeleted=collection.remove(mongoQuery).getN(); } else { numDeleted=collection.remove(mongoQuery,writeConcern).getN();
DB db = m.getDB( "test" ); DBCollection coll = db.getCollection( "bulk" ); coll.drop(); coll.createIndex(new BasicDBObject("i", 1), new BasicDBObject("unique", true)); BulkWriteOperation bulkWrite = coll.initializeUnorderedBulkOperation(); bulkWrite.insert(new BasicDBObject("i", i)); bulkWrite.insert(new BasicDBObject("i", i)); List<BulkWriteError> errors = null; try { result = bulkWrite.execute(); } catch (BulkWriteException bwe) { bwe.printStackTrace();
DBObject updatedDoc=collection.findOne(findQuery); if(updatedDoc!=null) { BulkWriteOperation nestedBwo=collection.initializeUnorderedBulkOperation(); nestedBwo.find(replaceQuery).replaceOne(newDoc); try { if(nestedBwo.execute().getMatchedCount()==1) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Successfully retried to update a doc: replaceQuery={} newDoc={}", replaceQuery, newDoc);
if (isOrderedBulkOperation()) builder = collection.initializeOrderedBulkOperation(); builder = collection.initializeUnorderedBulkOperation(); bulkWriteOperationMap.get(tableName).insert(documents.get(tableName)); bulkWriteOperationMap.get(tableName).find(new BasicDBObject("_id", node.getEntityId())) .upsert().replaceOne(documents.get(tableName));
DB db = mongoClient.getDB("yourDB"); DBCollection coll = db.getCollection("yourCollection"); BulkWriteOperation builder = coll.initializeUnorderedBulkOperation(); for(DBObject doc :yourList) { builder.insert(doc); } BulkWriteResult result = builder.execute(); return result.isAcknowledged();
DBCollection coll = db.getCollection("user"); BulkWriteOperation bulk = coll.initializeUnorderedBulkOperation(); bulk.find(new BasicDBObject("z", 1)).upsert().update(new BasicDBObject("$inc", new BasicDBObject("y", -1))); bulk.find(new BasicDBObject("z", 1)).upsert().update(new BasicDBObject("$inc", new BasicDBObject("y", -1))); bulk.execute();
DBCollection collection = mongoOperation.getCollection("collection"); BulkWriteOperation bulkWriteOperation = collection.initializeOrderedBulkOperation(); DBObject content = new BasicDBObject("field","value"); bulkWriteOperation.insert(content); BulkWriteResult writeResult = bulkWriteOperation.execute(); System.out.println(writeResult);
MongoClient mongo = new MongoClient("localhost", port_number); DB db = mongo.getDB("db_name"); ArrayList<DBObject> objectList; // Fill this list with your objects to insert BulkWriteOperation operation = col.initializeOrderedBulkOperation(); for (int i = 0; i < objectList.size(); i++) { operation.insert(objectList.get(i)); } BulkWriteResult result = operation.execute();
/** * On flush batch. * * @param bulkWriteOperationMap * the bulk write operation map */ private void onFlushBatch(Map<String, BulkWriteOperation> bulkWriteOperationMap) { if (!bulkWriteOperationMap.isEmpty()) { for (BulkWriteOperation builder : bulkWriteOperationMap.values()) { try { builder.execute(getWriteConcern()); } catch (BulkWriteException bwex) { log.error("Batch insertion is not performed due to error in write command. Caused By: ", bwex); throw new KunderaException( "Batch insertion is not performed due to error in write command. Caused By: ", bwex); } catch (MongoException mex) { log.error("Batch insertion is not performed. Caused By: ", mex); throw new KunderaException("Batch insertion is not performed. Caused By: ", mex); } } } }
public void save(Collection<MyDomainObject> objects) { BulkWriteOperation bulkWriter = dbCollection.initializeUnorderedBulkOperation(); for(MyDomainObject mdo : objects) { DBObject dbObject = convert(mdo); bulkWriter.find(new BasicDBObject("_id",new ObjectId(mdo.getId()))).upsert().replaceOne(dbObject); } bulkWriter.execute(writeConcern); }
if(collection.equals(currentColl) == false){ executeBatchWrite(currentOp, sample); currentColl = collection; currentOp = collection.initializeUnorderedBulkOperation(); currentOpOffset = sampleIdx; currentOp.insert(doc);
List<Future<?>> runningTasks = new ArrayList<Future<?>>(maxRunningTasks); BulkWriteOperation bulkOperation = collection.initializeOrderedBulkOperation(); try { while (objects.hasNext()) { RevObject object = objects.next(); bulkOperation.insert(toDocument(object)); bulkOperation = collection.initializeOrderedBulkOperation(); ids = Lists.newArrayListWithCapacity(bulkSize);
if(collection.equals(currentColl) == false){ executeBatchUpdate(currentOp, sample); currentColl = collection; currentOp = collection.initializeUnorderedBulkOperation(); currentOpOffset = sampleIdx; currentOp.find(query).upsert().updateOne(update);
/** * Adds a document to the current batch. The document should * contain the original docver as read from the db */ @Override public void addDoc(DBObject doc) { DBObject q=writeReplaceQuery(doc); DocVerUtil.cleanupOldDocVer(doc,docVer); DocVerUtil.setDocVer(doc,docVer); LOGGER.debug("replaceQuery={}",q); bwo.find(q).replaceOne(doc); batch.add(new BatchDoc(doc)); }
/** * Add an insert request to the bulk operation * * @param document the document to insert */ public void insert(final DBObject document) { isTrue("already executed", !closed); if (document.get(ID_FIELD_NAME) == null) { document.put(ID_FIELD_NAME, new ObjectId()); } addRequest(new InsertRequest(document, collection.getObjectCodec())); }
.initializeUnorderedBulkOperation(); .createIndex(new BasicDBObject(TIMESTAMP_FIELD, -1)); bulkInsertOperation.insert(object); stratioStreamingMessage.getValue().execute();
if (isOrderedBulkOperation()) builder = collection.initializeOrderedBulkOperation(); builder = collection.initializeUnorderedBulkOperation(); bulkWriteOperationMap.get(tableName).insert(documents.get(tableName)); bulkWriteOperationMap.get(tableName).find(new BasicDBObject("_id", node.getEntityId())) .upsert().replaceOne(documents.get(tableName));
DBCollection collection = database.getCollection("myCollection"); BulkWriteOperation bulkWriteOperation= collection.initializeUnorderedBulkOperation(); for (int i = 0; i <= 1000000; i++) { BasicDBObject newDataObject = new BasicDBObject(); IncrementalStat incrementalStat = new IncrementalStat(); double randomCount = (10 + (Math.random() * 300)); for (int j = 0; j < randomCount; j++) { number = i + randomCount; } newDataObject.put("_id", i); newDataObject.put("myNumber", number); bulkWriteOperation.insert(newDataObject); } //write all data using bulk execute BulkWriteResult result=bulkWriteOperation.execute();