Refine search
checkPermission(RestPermissions.METRICS_READHISTORY, metricName); BasicDBObject andQuery = new BasicDBObject(); obj.add(new BasicDBObject("name", metricName)); if (after != -1) { obj.add(new BasicDBObject("$gt", new BasicDBObject("$gt", new Date(after)))); andQuery.put("$and", obj); final DBCollection dbCollection = mongoConnection.getDatabase().getCollection("graylog2_metrics"); try(DBCursor cursor = dbCollection.find(andQuery).sort(new BasicDBObject("timestamp", 1))) { final Map<String, Object> metricsData = Maps.newHashMap(); metricsData.put("name", metricName); metricsData.put("values", values); while (cursor.hasNext()) { final DBObject value = cursor.next(); metricsData.put("node", value.get("node")); final MetricType metricType = MetricType.valueOf(((String) value.get("type")).toUpperCase(Locale.ENGLISH)); Map<String, Object> dataPoint = Maps.newHashMap(); values.add(dataPoint); dataPoint.put("timestamp", value.get("timestamp")); metricsData.put("type", metricType.toString().toLowerCase(Locale.ENGLISH));
/** * Insert documents into a collection. If the collection does not exists on the server, then it will be created. If the new document * does not contain an '_id' field, it will be added. * * @param documents {@code DBObject}'s to be inserted * @param writeConcern {@code WriteConcern} to be used during operation * @return the result of the operation * @throws com.mongodb.DuplicateKeyException if the write failed to a duplicate unique key * @throws com.mongodb.WriteConcernException if the write failed due some other failure specific to the insert command * @throws MongoException if the operation failed for some other reason * @mongodb.driver.manual tutorial/insert-documents/ Insert Documents */ public WriteResult insert(final WriteConcern writeConcern, final DBObject... documents) { return insert(documents, writeConcern); }
public BasicDBList getList(String key) { DBCollection coll = getCollection(); DBObject query = new BasicDBObject(); query.put("key", key); DBObject result = coll.findOne(query); return (BasicDBList) result.get("value"); }
/** * Insert documents into a collection. If the collection does not exists on the server, then it will be created. If the new document * does not contain an '_id' field, it will be added. * * @param documents list of {@code DBObject} to be inserted * @return the result of the operation * @throws com.mongodb.DuplicateKeyException if the write failed to a duplicate unique key * @throws com.mongodb.WriteConcernException if the write failed due some other failure specific to the insert command * @throws MongoException if the operation failed for some other reason * @mongodb.driver.manual tutorial/insert-documents/ Insert Documents */ public WriteResult insert(final List<? extends DBObject> documents) { return insert(documents, getWriteConcern()); }
@VisibleForTesting static DBCollection prepareCollection(final MongoConnection mongoConnection) { DBCollection coll = mongoConnection.getDatabase().getCollection(COLLECTION_NAME); coll.createIndex(DBSort.asc("type"), "unique_type", true); coll.setWriteConcern(WriteConcern.JOURNALED); return coll; }
protected <T> Key<T> insert(final DBCollection dbColl, final T entity, final InsertOptions options) { final LinkedHashMap<Object, DBObject> involvedObjects = new LinkedHashMap<Object, DBObject>(); dbColl.insert(singletonList(entityToDBObj(entity, involvedObjects)), enforceWriteConcern(options, entity.getClass()) .getOptions()); return postSaveOperations(singletonList(entity), involvedObjects, dbColl.getName()).get(0); }
cr = mongoAdminDB.command(new BasicDBObject("addShard", command)); logger.info(cr.toString()); cr = mongoAdminDB.command(new BasicDBObject("listShards", 1)); logger.info(cr.toString()); cr = mongoAdminDB.command(new BasicDBObject("enableSharding", this.shardDatabase)); logger.info(cr.toString()); DB db = mongo.getDB(this.shardDatabase); db.getCollection(this.shardCollection).createIndex(this.shardKey); DBObject cmd = new BasicDBObject(); cmd.put("shardCollection", this.shardDatabase + "." + this.shardCollection); cmd.put("key", new BasicDBObject(this.shardKey, 1)); cr = mongoAdminDB.command(cmd); logger.info(cr.toString()); DBCursor cursor = mongo.getDB("config").getCollection("shards").find(); while (cursor.hasNext()) { DBObject item = cursor.next(); logger.info(item.toString());
private Timestamp<?> getCurrentOplogTimestamp(MongoClient shardClient) { DBCollection oplogCollection = shardClient .getDB(MongoDBRiver.MONGODB_LOCAL_DATABASE) .getCollection(MongoDBRiver.OPLOG_COLLECTION); try (DBCursor cursor = oplogCollection.find().sort(new BasicDBObject("$natural", -1)).limit(1)) { return Timestamp.on(cursor.next()); } }
MongoClient mongoClient = new MongoClient(); DB db = mongoClient.getDB("test"); DBCollection coll = db.getCollection("testCollection"); BasicDBObject query = new BasicDBObject("_id", "51a29f6413dc992c24e0283e"); DBCursor cursor = coll.find(query); try { while(cursor.hasNext()) { System.out.println(cursor.next()); } } finally { cursor.close(); }
logger.info("MongoDBRiver is beginning initial import of " + collection.getFullName()); boolean inProgress = true; String lastId = null; logger.trace("Collection {} - count: {}", collection.getName(), safeCount(collection, timestamp.getClass())); .find(getFilterForInitialImport(definition.getMongoCollectionFilter(), lastId)) .sort(new BasicDBObject("_id", 1)); while (cursor.hasNext() && context.getStatus() == Status.RUNNING) { DBObject object = cursor.next(); count++; if (cursor.hasNext()) { lastId = addInsertToStream(null, applyFieldFilter(object), collection.getName()); } else { logger.debug("Last entry for initial import of {} - add timestamp: {}", collection.getFullName(), timestamp); lastId = addInsertToStream(timestamp, applyFieldFilter(object), collection.getName()); logger.info("Number of documents indexed in initial import of {}: {}", collection.getFullName(), count); } else { DBObject object = cursor.next(); if (object instanceof GridFSDBFile) { GridFSDBFile file = grid.findOne(new ObjectId(object.get(MongoDBRiver.MONGODB_ID_FIELD).toString())); if (cursor.hasNext()) { lastId = addInsertToStream(null, file); } else { logger.debug("Last entry for initial import of {} - add timestamp: {}", collection.getFullName(), timestamp); lastId = addInsertToStream(timestamp, file);
MongoClient mongo = new MongoClient("localhost", 27017); DB db = (DB) mongo.getDB("testDB"); DBCollection collection = db.getCollection("collection"); DBObject query = new BasicDBObject("_id", "10-100-5675234"); DBObject update = new BasicDBObject(); update.put("$unset", new BasicDBObject("userId","")); WriteResult result = collection.update(query, update); mongo.close();
@Override public Map<String, Long> countByType() { final Map<String, Long> outputsCountByType = new HashMap<>(); try (DBCursor outputTypes = dbCollection.find(null, new BasicDBObject(OutputImpl.FIELD_TYPE, 1))) { for (DBObject outputType : outputTypes) { final String type = (String) outputType.get(OutputImpl.FIELD_TYPE); if (type != null) { final Long oldValue = outputsCountByType.get(type); final Long newValue = (oldValue == null) ? 1 : oldValue + 1; outputsCountByType.put(type, newValue); } } } return outputsCountByType; }
@Test @SuppressWarnings("deprecation") public void testCommentsShowUpInLogsOld() { getDs().save(asList(new Pic("pic1"), new Pic("pic2"), new Pic("pic3"), new Pic("pic4"))); getDb().command(new BasicDBObject("profile", 2)); String expectedComment = "test comment"; toList(getDs().find(Pic.class).comment(expectedComment).find()); DBCollection profileCollection = getDb().getCollection("system.profile"); assertNotEquals(0, profileCollection.count()); DBObject profileRecord = profileCollection.findOne(new BasicDBObject("op", "query") .append("ns", getDs().getCollection(Pic.class).getFullName())); assertEquals(profileRecord.toString(), expectedComment, getCommentFromProfileRecord(profileRecord)); turnOffProfilingAndDropProfileCollection(); }
@Test public void testRecursiveReference() { final DBCollection stuff = getDb().getCollection("stuff"); stuff.save(parentDbObj); stuff.save(childDbObj); stuff.findOne(new BasicDBObject("_id", parentDbObj.get("_id"))), new DefaultEntityCache()); final RecursiveChild childLoaded = getMorphia().fromDBObject(getDs(), RecursiveChild.class, stuff.findOne(new BasicDBObject("_id", childDbObj.get("_id"))), new DefaultEntityCache()); childLoaded.setParent(parentLoaded); stuff.save(getMorphia().toDBObject(parentLoaded)); stuff.save(getMorphia().toDBObject(childLoaded)); stuff.findOne(new BasicDBObject("_id", parentDbObj.get("_id"))), new DefaultEntityCache()); final RecursiveChild finalChildLoaded = getMorphia().fromDBObject(getDs(), RecursiveChild.class, stuff.findOne(new BasicDBObject("_id", childDbObj.get("_id"))), new DefaultEntityCache());
public int count(DBObject query) throws CompatibilityDataException { MongoClient client = null; try { client = getService().createClient(); return client.getDB(database).getCollection(collection).find(query).sort(new BasicDBObject("registrationDate", 1)).size(); } catch (Exception ex) { throw new CompatibilityDataException(String.format("(Unknown Host) Failed to fetch object with query %s", query ), ex); } finally { if(client != null) client.close(); } }
private Timestamp<?> getCurrentOplogTimestamp() { try (DBCursor cursor = oplogCollection.find().sort(new BasicDBObject(MongoDBRiver.INSERTION_ORDER_KEY, -1)).limit(1)) { return Timestamp.on(cursor.next()); } }
public MongoSchemaDBStore(MongoClientURI uri) throws IOException { client = new MongoClient(uri); String databaseName = uri.getDatabase(); if (databaseName == null) { databaseName = DEFAULT_databaseName; } DB database = client.getDB(databaseName); String collectionName = uri.getCollection(); if (collectionName == null) { collectionName = DEFAULT_collectionName; } collection = database.getCollection(collectionName); collection.createIndex( new BasicDBObject(FeatureTypeDBObject.KEY_typeName, 1), new BasicDBObject("unique", true)); }
private List<Shard> getShards(boolean isMongos) { List<Shard> shards = new ArrayList<>(); if (isMongos) { try (DBCursor cursor = getConfigDb().getCollection("shards").find()) { while (cursor.hasNext()) { DBObject item = cursor.next(); List<ServerAddress> shardServers = getServerAddressForReplica(item); if (shardServers != null) { String shardName = item.get(MongoDBRiver.MONGODB_ID_FIELD).toString(); MongoClient shardClient = mongoClientService.getMongoShardClient(definition, shardServers); ensureIsReplicaSet(shardClient); Timestamp<?> latestOplogTimestamp = getCurrentOplogTimestamp(shardClient); shards.add(new Shard(shardName, shardServers, latestOplogTimestamp)); } } } return shards; } else { ensureIsReplicaSet(clusterClient); List<ServerAddress> servers = clusterClient.getServerAddressList(); Timestamp<?> latestOplogTimestamp = getCurrentOplogTimestamp(clusterClient); shards.add(new Shard("unsharded", servers, latestOplogTimestamp)); return shards; } }
@Override public void removeOutputFromAllStreams(Output output) { ObjectId outputId = new ObjectId(output.getId()); DBObject match = new BasicDBObject(StreamImpl.FIELD_OUTPUTS, outputId); DBObject modify = new BasicDBObject("$pull", new BasicDBObject(StreamImpl.FIELD_OUTPUTS, outputId)); // Collect streams that will change before updating them because we don't get the list of changed streams // from the upsert call. final ImmutableSet<String> updatedStreams; try (final DBCursor cursor = collection(StreamImpl.class).find(match)) { updatedStreams = StreamSupport.stream(cursor.spliterator(), false) .map(stream -> stream.get("_id")) .filter(Objects::nonNull) .map(id -> ((ObjectId) id).toHexString()) .collect(ImmutableSet.toImmutableSet()); } collection(StreamImpl.class).update( match, modify, false, true ); clusterEventBus.post(StreamsChangedEvent.create(updatedStreams)); }
protected <T extends Persisted> void removeEmbedded(T model, String key, String searchId) { BasicDBObject aryQry = new BasicDBObject("id", searchId); BasicDBObject qry = new BasicDBObject("_id", new ObjectId(model.getId())); BasicDBObject update = new BasicDBObject("$pull", new BasicDBObject(key, aryQry)); // http://docs.mongodb.org/manual/reference/operator/pull/ collection(model).update(qry, update); }