Refine search
final String streamId = document.getObjectId(FIELD_ID).toHexString(); if (!document.containsKey(FIELD_ALERT_CONDITIONS)) { continue; final List<Document> alertConditions = (List<Document>) document.get(FIELD_ALERT_CONDITIONS); final UpdateResult result = collection.updateOne(eq(FIELD_ALERT_CONDITIONS_ID, alertConditionId), set(ALERT_CONDITIONS_PARAMETERS_PREFIX + field, intValue)); if (result.getMatchedCount() > 0) { modifiedStreams.add(streamId); modifiedAlertConditions.add(alertConditionId);
@Override public void save(SessionData sessionData) { String sessionId = sessionData.getId(); this.sessions.updateOne( eq(SESSION_ID, sessionId), combine( set(SESSION_ID, sessionId), set(SESSION_TTL, new Date()), set(SESSION_DATA, transcoder.encode(sessionData))), new UpdateOptions().upsert(true)); }
@Override public void upgrade() { if (clusterConfigService.get(MigrationCompleted.class) != null) { LOG.debug("Migration already done."); return; } // Do not overwrite an existing default index config boolean defaultDone = clusterConfigService.get(DefaultIndexSetConfig.class) != null; final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); final FindIterable<Document> documents = collection.find(exists(FIELD_DEFAULT)).sort(ascending(FIELD_CREATION_DATE)); for (final Document document : documents) { final ObjectId id = document.getObjectId(FIELD_ID); final String idString = id.toHexString(); final boolean isDefault = firstNonNull(document.getBoolean(FIELD_DEFAULT), false); if (!defaultDone && isDefault) { defaultDone = true; clusterConfigService.write(DefaultIndexSetConfig.create(idString)); } final long modifiedCount = collection.updateOne(eq(FIELD_ID, id), unset(FIELD_DEFAULT)).getMatchedCount(); if (modifiedCount > 0) { LOG.info("Removed <default> field from index set <{}> ({})", document.getString(FIELD_TITLE), idString); builder.add(idString); } else { LOG.error("Couldn't remove <default> field from index set <{}> ({})", document.getString(FIELD_TITLE), idString); } } clusterConfigService.write(MigrationCompleted.create(builder.build())); }
updates.add(push(STUDIES_FIELD, studyDocument)); updates.add(addToSet(RELEASE_FIELD, release)); if (newVariant) { Document variantDocument = variantConverter.convertToStorageType(emptyVar); updates.add(addEachToSet(IDS_FIELD, ids)); Object value = entry.getValue(); if (value instanceof List) { updates.add(setOnInsert(entry.getKey(), new BsonArray(((List) value)))); } else { updates.add(setOnInsert(entry.getKey(), value)); mongoDBOps.getNewStudy().getQueries().add(eq("_id", id)); mongoDBOps.getNewStudy().getUpdates().add(combine(updates)); } else { id = null; List<Bson> mergeUpdates = new LinkedList<>(); if (!ids.isEmpty()) { mergeUpdates.add(addEachToSet(IDS_FIELD, ids)); List sampleIds = getListFromDocument(gts, gt); if (resume) { mergeUpdates.add(addEachToSet(STUDIES_FIELD + ".$." + GENOTYPES_FIELD + '.' + gt, sampleIds)); } else { mergeUpdates.add(pushEach(STUDIES_FIELD + ".$." + GENOTYPES_FIELD + '.' + gt, sampleIds)); mergeUpdates.add(addEachToSet(STUDIES_FIELD + ".$." + ALTERNATES_FIELD, secondaryAlternates));
and(runningExploratoryAndComputationalCondition(user, exploratoryName, computationalName), eq(COMPUTATIONAL_LIBS + "." + computationalName + "." + LIB_GROUP, library.getGroup()), eq(COMPUTATIONAL_LIBS + "." + computationalName + "." + LIB_NAME, library.getName()))); push(COMPUTATIONAL_LIBS + "." + computationalName, convertToBson(library))); return true; } else { Document values = updateComputationalLibraryFields(computationalName, library, null); if (reinstall) { values.append(computationalLibraryFieldFilter(computationalName, LIB_INSTALL_DATE), null); values.append(computationalLibraryFieldFilter(computationalName, LIB_ERROR_MESSAGE), null); eq(COMPUTATIONAL_LIBS + "." + computationalName + "." + LIB_NAME, library.getName())), new Document(SET, values));
MongoDBVariantStageReader.addChromosomeFilter(chrFilters, chromosome); chrFilter = or(chrFilters); } else { chrFilter = new Document(); updates.add(set(studyId + "." + fileId, null)); filters.add( or( ne(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyFile), and( updates.add(pullAll(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyFiles)); Bson filter = and(in(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyFiles), chrFilter, and(filters)); LOGGER.info("Clean studies from stage where all the files where duplicated"); modifiedCount += stageCollection.update( filter, combine(updates), new QueryOptions(MongoDBCollection.MULTI, true)).first().getModifiedCount(); List<Bson> updates = new LinkedList<>(); for (Integer fileId : fileIds) { updates.add(unset(studyId + "." + fileId)); updates.add(set(studyId + "." + NEW_STUDY_FIELD, false)); updates.add(pullAll(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyFiles)); LOGGER.info("Cleaning files {} from stage collection", fileIds); modifiedCount += stageCollection.update(and(filters), combine(updates), new QueryOptions(MongoDBCollection.MULTI, true)).first().getModifiedCount();
private void writeTopicAssignmentsToMongo( InstanceList instances, TopicWords topicWords, ParallelTopicModel model) { IntStream.range(0, instances.size()) .forEach( document -> { double[] topicDistribution = model.getTopicProbabilities(document); int maxAt = new MaximumIndex(topicDistribution).find(); Instance instance = instances.get(document); List<String> iterator = topicWords.forTopic(maxAt); documentsCollection.findOneAndUpdate( Filters.eq(new ObjectId((String) instance.getName())), Updates.set( TOPIC_FIELD, new Document() .append(KEYWORDS_FIELD, iterator.toString()) .append(TOPIC_NUMBER_FIELD, maxAt))); }); }
private void addPrivateVariableMap(long entryId, Map<String, String> variableMap, boolean isVersioned) throws CatalogDBException { Document queryDocument = new Document(PRIVATE_UID, entryId); if (isVersioned) { queryDocument.append(LAST_OF_VERSION, true); } List<Bson> setMap = new ArrayList<>(variableMap.size()); for (Map.Entry<String, String> entry : variableMap.entrySet()) { setMap.add(Updates.set(AnnotationSetParams.PRIVATE_VARIABLE_SET_MAP.key() + "." + entry.getKey(), entry.getValue())); } QueryResult<UpdateResult> update = getCollection().update(queryDocument, Updates.combine(setMap), new QueryOptions("multi", true)); if (update.first().getModifiedCount() < 1 && update.first().getMatchedCount() == 0) { throw new CatalogDBException("Could not add new private map information"); } }
collection.update(new Document("_id", id), set("_id", id), new QueryOptions(MongoDBCollection.UPSERT, true)); } catch (MongoWriteException e) { Date now = Calendar.getInstance().getTime(); Bson query = and(eq("_id", id), or(eq(lockWriteField, null), lt(lockWriteField, now))); Bson update = combine(set(lockWriteField, date)); modifiedCount = collection.update(query, update, null).first().getModifiedCount();
@Override public void nativeInsert(Map<String, Object> project, String userId) throws CatalogDBException { Bson query = Filters.and(Filters.eq(UserDBAdaptor.QueryParams.ID.key(), userId), Filters.ne(UserDBAdaptor.QueryParams.PROJECTS_ID.key(), project.get(QueryParams.ID.key()))); Bson update = Updates.push("projects", getMongoDBDocument(project, "project")); //Update object QueryResult<UpdateResult> queryResult = userCollection.update(query, update, null); if (queryResult.getResult().get(0).getModifiedCount() == 0) { // Check if the project has been inserted throw new CatalogDBException("Project {" + project.get(QueryParams.ID.key()) + "\"} already exists for this user"); } }
private void removePrivateVariableMap(long entryId, Map<String, String> privateVariableMapToSet, boolean isVersioned) throws CatalogDBException { Document queryDocument = new Document(PRIVATE_UID, entryId); if (isVersioned) { queryDocument.append(LAST_OF_VERSION, true); } for (Map.Entry<String, String> entry : privateVariableMapToSet.entrySet()) { // We only want to remove the private variable map if it is not currently in use by any annotation set queryDocument.append(AnnotationSetParams.VARIABLE_SET_ID.key(), new Document("$ne", Long.parseLong(entry.getKey()))); Bson unset = Updates.unset(AnnotationSetParams.PRIVATE_VARIABLE_SET_MAP.key() + "." + entry.getKey()); QueryResult<UpdateResult> update = getCollection().update(queryDocument, unset, new QueryOptions()); if (update.first().getModifiedCount() < 1 && update.first().getMatchedCount() == 1) { throw new CatalogDBException("Could not remove private map information"); } } }
private int generateId(String idType, boolean retry) throws StorageEngineException { String field = COUNTERS_FIELD + '.' + idType; Document projection = new Document(field, true); Bson inc = Updates.inc(field, 1); QueryOptions queryOptions = new QueryOptions("returnNew", true); QueryResult<Document> result = collection.findAndUpdate(QUERY, projection, null, inc, queryOptions); if (result.first() == null) { if (retry) { ensureProjectMetadataExists(); return generateId(idType, false); } else { throw new StorageEngineException("Error creating new ID. Project Metadata not found"); } } else { Document document = result.getResult().get(0); Document counters = document.get(COUNTERS_FIELD, Document.class); Integer id = counters.getInteger(idType); // System.out.println("New ID " + idType + " : " + id); return id; } }
List<Bson> updates = new ArrayList<>(retryIds != null ? retryIds.size() : values.size()); for (Document id : values.keySet()) { String mongoId = id.getString(StageDocumentToVariantConverter.ID_FIELD); if (retryIds == null || retryIds.contains(mongoId)) { ids.add(mongoId); List<Binary> binaryList = values.get(id); queries.add(eq(StageDocumentToVariantConverter.ID_FIELD, mongoId)); List<Bson> bsons = new ArrayList<>(6); if (directLoad) { bsons.add(set(fieldName, null)); bsons.add(set(studyIdStr + '.' + NEW_STUDY_FIELD, false)); } else if (binaryList.size() == 1) { bsons.add(resumeStageLoad ? addToSet(fieldName, binaryList.get(0)) : push(fieldName, binaryList.get(0))); } else { bsons.add(resumeStageLoad ? addEachToSet(fieldName, binaryList) : pushEach(fieldName, binaryList)); bsons.add(addEachToSet(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyFileValue)); bsons.add(setOnInsert(StageDocumentToVariantConverter.END_FIELD, id.get(StageDocumentToVariantConverter.END_FIELD))); bsons.add(setOnInsert(StageDocumentToVariantConverter.REF_FIELD, id.get(StageDocumentToVariantConverter.REF_FIELD))); bsons.add(setOnInsert(StageDocumentToVariantConverter.ALT_FIELD, id.get(StageDocumentToVariantConverter.ALT_FIELD))); updates.add(combine(bsons));
@Deprecated static long getNewAutoIncrementId(String field, MongoDBCollection metaCollection) { Bson query = Filters.eq(PRIVATE_ID, MongoDBAdaptorFactory.METADATA_OBJECT_ID); Document projection = new Document(field, true); Bson inc = Updates.inc(field, 1); QueryOptions queryOptions = new QueryOptions("returnNew", true); QueryResult<Document> result = metaCollection.findAndUpdate(query, projection, null, inc, queryOptions); // return (int) Float.parseFloat(result.getResult().get(0).get(field).toString()); return result.getResult().get(0).getInteger(field); }
/** * Update EDGE cost in Mongo DB. * * @param user the name of user. */ private void updateEdgeCost(String user) { List<? extends Bson> pipeline = Arrays.asList( match(and(eq(FIELD_USER, user), eq(FIELD_EXPLORATORY_NAME, null))), group(getGrouppingFields(ReportLine.FIELD_CURRENCY_CODE), sum(ReportLine.FIELD_COST, "$" + ReportLine.FIELD_COST)) ); AggregateIterable<Document> docs = connection.getCollection(COLLECTION_BILLING_TOTAL) .aggregate(pipeline); MongoCollection<Document> cEdge = connection.getCollection(COLLECTION_USER_EDGE); for (Document d : docs) { Document id = (Document) d.get(FIELD_ID); Bson values = Updates.combine( Updates.set(ReportLine.FIELD_COST, BillingCalculationUtils.round(d.getDouble(ReportLine .FIELD_COST), 2)), Updates.set(FIELD_CURRENCY_CODE, id.get(ReportLine.FIELD_CURRENCY_CODE))); cEdge.updateOne( eq(FIELD_ID, user), values); } }
@Override public void removeContainer(Id id) { checkNotNull(id, "id"); final String ownerId = idConverter.convert(id); final Bson filter = and(eq(ModelDocument.F_ID, ownerId), exists(ModelDocument.F_CONTAINER)); final Bson update = unset(ModelDocument.F_CONTAINER); documents.updateOne(filter, update); }
/** * {@inheritDoc} */ @Override public void releaseClaim(String processorName, int segment) { UpdateResult updateResult = mongoTemplate.trackingTokensCollection() .updateOne(and( eq("processorName", processorName), eq("segment", segment), eq("owner", nodeId) ), set("owner", null)); if (updateResult.getMatchedCount() == 0) { logger.warn("Releasing claim of token {}/{} failed. It was owned by another node.", processorName, segment); } }
@Override public boolean createValue(final String jobType, final String key, final String value) { final Bson filter = and( eq(ID, jobType), exists(key, false)); final Bson update = set(key, value); try { final Document previous = collection.findOneAndUpdate(filter, update, UPSERT); return previous == null || previous.getString(key) == null; } catch (final Exception e) { return false; } }
@Test public void testUpsertWithoutId() { UpdateResult result = collection.updateOne(eq("a", 1), set("a", 2), new UpdateOptions().upsert(true)); assertThat(result.getModifiedCount()).isEqualTo(0); assertThat(result.getUpsertedId()).isNotNull(); assertThat(collection.find().first().get("_id")).isInstanceOf(ObjectId.class); assertThat(collection.find().first().get("a")).isEqualTo(2); }
@Override public QueryResult updateProjectMetadata(ProjectMetadata projectMetadata, boolean updateCounters) { Document mongo = new GenericDocumentComplexConverter<>(ProjectMetadata.class).convertToStorageType(projectMetadata); // Update field by field, instead of replacing the whole object to preserve existing fields like "_lock" List<Bson> updates = new ArrayList<>(mongo.size()); mongo.forEach((s, o) -> { // Do not update counters if (updateCounters || !s.equals(COUNTERS_FIELD)) { updates.add(new Document("$set", new Document(s, o))); } }); return collection.update(QUERY, Updates.combine(updates), new QueryOptions(UPSERT, true)); }