DBCollection coll = db.getCollection("user"); BulkWriteOperation bulk = coll.initializeUnorderedBulkOperation(); bulk.find(new BasicDBObject("z", 1)).upsert().update(new BasicDBObject("$inc", new BasicDBObject("y", -1))); bulk.find(new BasicDBObject("z", 1)).upsert().update(new BasicDBObject("$inc", new BasicDBObject("y", -1))); bulk.execute();
/** * On flush batch. * * @param bulkWriteOperationMap * the bulk write operation map */ private void onFlushBatch(Map<String, BulkWriteOperation> bulkWriteOperationMap) { if (!bulkWriteOperationMap.isEmpty()) { for (BulkWriteOperation builder : bulkWriteOperationMap.values()) { try { builder.execute(getWriteConcern()); } catch (BulkWriteException bwex) { log.error("Batch insertion is not performed due to error in write command. Caused By: ", bwex); throw new KunderaException( "Batch insertion is not performed due to error in write command. Caused By: ", bwex); } catch (MongoException mex) { log.error("Batch insertion is not performed. Caused By: ", mex); throw new KunderaException("Batch insertion is not performed. Caused By: ", mex); } } } }
DB db = mongoClient.getDB("yourDB"); DBCollection coll = db.getCollection("yourCollection"); BulkWriteOperation builder = coll.initializeUnorderedBulkOperation(); for(DBObject doc :yourList) { builder.insert(doc); } BulkWriteResult result = builder.execute(); return result.isAcknowledged();
template.execute(new CollectionCallback<Void> { Void doInCollection(DBCollection collection) { BulkWriteOperation operation = collection.initialize(Uno|O)rderedBulkOperation(); // bulk code goes here operation.execute(); return null; } }
public void save(Collection<MyDomainObject> objects) { BulkWriteOperation bulkWriter = dbCollection.initializeUnorderedBulkOperation(); for(MyDomainObject mdo : objects) { DBObject dbObject = convert(mdo); bulkWriter.find(new BasicDBObject("_id",new ObjectId(mdo.getId()))).upsert().replaceOne(dbObject); } bulkWriter.execute(writeConcern); }
DBCollection collection = mongoOperation.getCollection("collection"); BulkWriteOperation bulkWriteOperation = collection.initializeOrderedBulkOperation(); DBObject content = new BasicDBObject("field","value"); bulkWriteOperation.insert(content); BulkWriteResult writeResult = bulkWriteOperation.execute(); System.out.println(writeResult);
private void executeBatchWrite(BulkWriteOperation batchOp, BasicDBList fullBatch) { if(batchOp != null){ BulkWriteResult result = batchOp.execute(); logger.debug("Wrote sample batch - sent {} : inserted {}", fullBatch.size(), result.getInsertedCount()); } } }
MongoClient mongo = new MongoClient("localhost", port_number); DB db = mongo.getDB("db_name"); ArrayList<DBObject> objectList; // Fill this list with your objects to insert BulkWriteOperation operation = col.initializeOrderedBulkOperation(); for (int i = 0; i < objectList.size(); i++) { operation.insert(objectList.get(i)); } BulkWriteResult result = operation.execute();
private void executeBatchUpdate(BulkWriteOperation batchOp, BasicDBList fullBatch) { if(batchOp != null){ BulkWriteResult result = batchOp.execute(); logger.debug("Wrote sample batch - sent {} : updated {}", fullBatch.size(), result.getModifiedCount()); } }
MongoClient mongo = new MongoClient("localhost", 27017); DB db = (DB) mongo.getDB("test1"); DBCollection collection = db.getCollection("collection"); BulkWriteOperation builder = collection.initializeUnorderedBulkOperation(); builder.find(new BasicDBObject("_id", 1001)).upsert() .replaceOne(new BasicDBObject("_id", 1001).append("author", "newName")); //append all other documents builder.execute();
BulkWriteOperation bulk = coll.initializeOrderedBulkOperation(); Integer count = 0; DBCursor cursor = coll.find(new BasicDBObject("projectid", "projectA")); while (cursor.hasNext()) { DBObject curRecord = cursor.next(); curRecord.removeField("_id"); // why bother with a clone when you can remove the _id curRecord.put("projectid","projectB"); // replace the projectid bulk.insert(curRecord); count++; if ( count % 1000 == 0 ) { bulk.execute(); bulk = collection.initializeOrderedBulkOperation(); } } if (count % 1000 != 0 ) bulk.execute();
/** * On flush batch. * * @param bulkWriteOperationMap * the bulk write operation map */ private void onFlushBatch(Map<String, BulkWriteOperation> bulkWriteOperationMap) { if (!bulkWriteOperationMap.isEmpty()) { for (BulkWriteOperation builder : bulkWriteOperationMap.values()) { try { builder.execute(getWriteConcern()); } catch (BulkWriteException bwex) { log.error("Batch insertion is not performed due to error in write command. Caused By: ", bwex); throw new KunderaException( "Batch insertion is not performed due to error in write command. Caused By: ", bwex); } catch (MongoException mex) { log.error("Batch insertion is not performed. Caused By: ", mex); throw new KunderaException("Batch insertion is not performed. Caused By: ", mex); } } } }
@Override public void run() { if (cancelCondition.get()) { return; } BulkWriteResult bulkResult = bulkOperation.execute(WriteConcern.ACKNOWLEDGED); List<BulkWriteUpsert> upserts = bulkResult.getUpserts(); for (BulkWriteUpsert upsert : upserts) { if (cancelCondition.get()) { return; } int index = upsert.getIndex(); ObjectId existing = ids.set(index, null); listener.found(existing, null); } for (ObjectId inserted : ids) { if (cancelCondition.get()) { return; } if (inserted != null) { listener.inserted(inserted, null); } } ids.clear(); }
DBCollection collection = database.getCollection("myCollection"); BulkWriteOperation bulkWriteOperation= collection.initializeUnorderedBulkOperation(); for (int i = 0; i <= 1000000; i++) { BasicDBObject newDataObject = new BasicDBObject(); IncrementalStat incrementalStat = new IncrementalStat(); double randomCount = (10 + (Math.random() * 300)); for (int j = 0; j < randomCount; j++) { number = i + randomCount; } newDataObject.put("_id", i); newDataObject.put("myNumber", number); bulkWriteOperation.insert(newDataObject); } //write all data using bulk execute BulkWriteResult result=bulkWriteOperation.execute();
MongoClient mongo = new MongoClient("localhost", 27017); DB db = (DB) mongo.getDB("testDB"); DBCollection collection = db.getCollection("collection"); BasicDBObject obj = new BasicDBObject(); obj.append("$set", new BasicDBObject("my_field", dbList)); BasicDBList dbList = new BasicDBList(); dbList.add(1); dbList.add(2); dbList.add(3); BulkWriteOperation bwo = collection.initializeUnorderedBulkOperation(); bwo.find(new BasicDBObject()).upsert().update(obj); bwo.execute();
nestedBwo.find(replaceQuery).replaceOne(newDoc); try { if(nestedBwo.execute().getMatchedCount()==1) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Successfully retried to update a doc: replaceQuery={} newDoc={}", replaceQuery, newDoc);
List<BulkWriteError> errors = null; try { result = bulkWrite.execute(); } catch (BulkWriteException bwe) { bwe.printStackTrace();
stratioStreamingMessage.getValue().execute();