@Override public void doRun() { LOG.debug("Opening MongoDB cursor on \"{}\"", COLLECTION_NAME); try (DBCursor<ClusterEvent> cursor = eventCursor(nodeId)) { if (LOG.isTraceEnabled()) { LOG.trace("MongoDB query plan: {}", cursor.explain()); } while (cursor.hasNext()) { ClusterEvent clusterEvent = cursor.next(); LOG.trace("Processing cluster event: {}", clusterEvent); Object payload = extractPayload(clusterEvent.payload(), clusterEvent.eventClass()); if (payload != null) { serverEventBus.post(payload); } else { LOG.warn("Couldn't extract payload of cluster event with ID <{}>", clusterEvent.id()); LOG.debug("Invalid payload in cluster event: {}", clusterEvent); } updateConsumers(clusterEvent.id(), nodeId); } } catch (Exception e) { LOG.warn("Error while reading cluster events from MongoDB, retrying.", e); } }
/** * {@inheritDoc} */ @Override public List<IndexSetConfig> findPaginated(Set<String> indexSetIds, int limit, int skip) { final List<DBQuery.Query> idQuery = indexSetIds.stream() .map(id -> DBQuery.is("_id", id)) .collect(Collectors.toList()); final DBQuery.Query query = DBQuery.or(idQuery.toArray(new DBQuery.Query[0])); return ImmutableList.copyOf(collection.find(query) .sort(DBSort.asc("title")) .skip(skip) .limit(limit) .toArray()); }
public PaginatedList<DataAdapterDto> findPaginated(DBQuery.Query query, DBSort.SortBuilder sort, int page, int perPage) { try (DBCursor<DataAdapterDto> cursor = db.find(query) .sort(sort) .limit(perPage) .skip(perPage * Math.max(0, page - 1))) { return new PaginatedList<>(asImmutableList(cursor), cursor.count(), page, perPage); } }
@Override public List<Alert> loadRecentOfStreams(List<String> streamIds, DateTime since, int limit) { if (streamIds == null || streamIds.isEmpty()) { return Collections.emptyList(); } final DateTime effectiveSince = (since == null ? new DateTime(0L, DateTimeZone.UTC) : since); final DBQuery.Query query = DBQuery.and( getFindAnyStreamQuery(streamIds), DBQuery.greaterThanEquals(AlertImpl.FIELD_TRIGGERED_AT, effectiveSince) ); return Collections.unmodifiableList(this.coll.find(query) .limit(limit) .sort(DBSort.desc(AlertImpl.FIELD_TRIGGERED_AT)) .toArray()); }
@Override public List<Decorator> findAll() { return toInterfaceList(coll.find().toArray()); }
/** * Returns a single object from this collection matching the query. * * @param query * the query object * @param fields * fields to return * @param readPref * The read preference * @return the object found, or <code>null</code> if no such object exists */ public T findOne(DBObject query, DBObject fields, ReadPreference readPref) { org.mongojack.DBCursor<T> cursor = find(query, fields) .setReadPreference(readPref); if (cursor.hasNext()) { return cursor.next(); } else { return null; } }
/** * @see KnowledgeBase#getCauseNames() * Can throw MongoException if unknown fields exist in the database. * @return the full list of the names and ids of the causes.. */ @Override public Collection<FailureCause> getCauseNames() { List<FailureCause> list = new LinkedList<FailureCause>(); DBObject keys = new BasicDBObject(); keys.put("name", 1); DBCursor<FailureCause> dbCauses = getJacksonCollection().find(NOT_REMOVED_QUERY, keys); while (dbCauses.hasNext()) { list.add(dbCauses.next()); } return list; }
@Override public Collection<FailureCause> getShallowCauses() { List<FailureCause> list = new LinkedList<>(); DBObject keys = new BasicDBObject(); keys.put("name", 1); keys.put("description", 1); keys.put("categories", 1); keys.put("comment", 1); keys.put("modifications", 1); keys.put("lastOccurred", 1); BasicDBObject orderBy = new BasicDBObject("name", 1); DBCursor<FailureCause> dbCauses = getJacksonCollection().find(NOT_REMOVED_QUERY, keys); dbCauses = dbCauses.sort(orderBy); while (dbCauses.hasNext()) { list.add(dbCauses.next()); } return list; }
public <T> List<T> readMany(BasicDBObject query, Class<T> clazz) throws CompatibilityDataException { List<T> list = new ArrayList<T>(); org.mongojack.DBCursor cursor = null; MongoClient client = null; try { client = getService().createClient(); JacksonDBCollection coll = JacksonDBCollection.wrap(client.getDB(database).getCollection(collection), clazz); cursor = coll.find(query); while(cursor.hasNext()) { list.add((T)cursor.next()); } } catch (Exception ex) { throw new CompatibilityDataException(String.format("Failed to fetch object with query %s", query ), ex); } finally { if(cursor != null) { cursor.close(); } if (client != null) { client.close(); } } return list; }
@Override public Set<FilterDescription> loadAll() throws NotFoundException { final DBCursor<FilterDescription> filterDescriptions = dbCollection.find(); Set<FilterDescription> filters = Sets.newHashSet(); if (filterDescriptions.hasNext()) { Iterators.addAll(filters, filterDescriptions); } return filters; }
@Override public Collection<RuleDao> loadAll() { try(DBCursor<RuleDao> ruleDaos = dbCollection.find().sort(DBSort.asc("title"))) { return ImmutableSet.copyOf((Iterable<RuleDao>) ruleDaos); } catch (MongoException e) { log.error("Unable to load processing rules", e); return Collections.emptySet(); } }
/** * Queries for all objects in this collection. * * @return a cursor which will iterate over every object * @throws MongoException * If an error occurred */ public org.mongojack.DBCursor<T> find() throws MongoException { return new org.mongojack.DBCursor<T>(this, dbCollection.find()); }
@Override protected DBCursor<T> put(String op, QueryCondition value) { checkExecuted(); cursor.getQuery().put(op, jacksonDBCollection.serializeQueryCondition(op, value)); return this; }
@Override public Optional<Alert> getLastTriggeredAlert(String streamId, String conditionId) { final List<AlertImpl> alert = this.coll.find( DBQuery.and( DBQuery.is(AlertImpl.FIELD_STREAM_ID, streamId), DBQuery.is(AlertImpl.FIELD_CONDITION_ID, conditionId) ) ) .sort(DBSort.desc(AlertImpl.FIELD_TRIGGERED_AT)) .limit(1) .toArray(); if (alert == null || alert.size() == 0) { return Optional.empty(); } return Optional.of(alert.get(0)); }
public PaginatedList<LookupTableDto> findPaginated(DBQuery.Query query, DBSort.SortBuilder sort, int page, int perPage) { try (DBCursor<LookupTableDto> cursor = db.find(query) .sort(sort) .limit(perPage) .skip(perPage * Math.max(0, page - 1))) { return new PaginatedList<>(asImmutableList(cursor), cursor.count(), page, perPage); } }
@Override public List<Decorator> findForStream(String streamId) { return toInterfaceList(coll.find(DBQuery.is(DecoratorImpl.FIELD_STREAM, Optional.of(streamId))).toArray()); }
/** * Returns a single object from this collection matching the query. * * @param query * the query object * @param fields * an object for which every non null field will be returned * @param readPref * The read preferences * @return the object found, or <code>null</code> if no such object exists */ public T findOne(DBQuery.Query query, DBObject fields, ReadPreference readPref) { org.mongojack.DBCursor<T> cursor = find(query, fields) .setReadPreference(readPref); if (cursor.hasNext()) { return cursor.next(); } else { return null; } }
@Override public void run() { while (!stop) { try { shouldUpdate.acquire(); if (stop) { break; } List<FailureCause> list = new LinkedList<FailureCause>(); DBCursor<FailureCause> dbCauses = jacksonCollection.find(NOT_REMOVED_QUERY); while (dbCauses.hasNext()) { list.add(dbCauses.next()); } cachedFailureCauses = list; categories = jacksonCollection.distinct("categories"); } catch (MongoException e) { logger.log(Level.SEVERE, "MongoException caught when updating cache: ", e); } catch (InterruptedException e) { logger.log(Level.WARNING, "Updater thread interrupted", e); } } } /**
public Set<ConfigurationBundle> loadAll() { final DBCursor<ConfigurationBundle> ConfigurationBundles = dbCollection.find(); final Set<ConfigurationBundle> bundles = new HashSet<>(); if (ConfigurationBundles.hasNext()) { Iterators.addAll(bundles, ConfigurationBundles); } return bundles; }