public void putErrorTransaction(TransactionXid transactionXid, Transaction transaction) { try { TransactionArchive archive = (TransactionArchive) transaction.getTransactionArchive(); byte[] global = transactionXid.getGlobalTransactionId(); String identifier = ByteUtils.byteArrayToString(global); int status = archive.getCompensableStatus(); String databaseName = CommonUtils.getApplication(this.endpoint).replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_TRANSACTIONS); Document target = new Document(); target.append("modified", this.endpoint); target.append("status", status); target.append("error", true); target.append("recovered_at", archive.getRecoveredAt() == 0 ? null : new Date(archive.getRecoveredAt())); target.append("recovered_times", archive.getRecoveredTimes()); Document document = new Document(); document.append("$set", target); // document.append("$inc", new BasicDBObject("modified_time", 1)); UpdateResult result = collection.updateOne(Filters.eq(CONSTANTS_FD_GLOBAL, identifier), document); if (result.getMatchedCount() != 1) { throw new IllegalStateException( String.format("Error occurred while updating transaction(matched= %s, modified= %s).", result.getMatchedCount(), result.getModifiedCount())); } } catch (RuntimeException error) { logger.error("Error occurred while setting the error flag.", error); } }
@Override public void upgrade() { final FindIterable<Document> documentsWithMissingFields = collection.find(or(not(exists(ContentPack.FIELD_META_ID)), not(exists(ContentPack.FIELD_META_REVISION)))); for (Document document : documentsWithMissingFields) { final ObjectId objectId = document.getObjectId("_id"); LOG.debug("Found document with missing \"id\" or \"rev\" field with ID <{}>", objectId); final String id = document.get("id", objectId.toHexString()); final int rev = document.get("rev", 0); document.put("id", id); document.put("rev", rev); final UpdateResult updateResult = collection.replaceOne(eq("_id", objectId), document); if (updateResult.wasAcknowledged()) { LOG.debug("Successfully updated document with ID <{}>", objectId); } else { LOG.error("Failed to update document with ID <{}>", objectId); } } } }
private UpdateResult toUpdateResult(final com.mongodb.bulk.BulkWriteResult result) { if (result.wasAcknowledged()) { Long modifiedCount = result.isModifiedCountAvailable() ? (long) result.getModifiedCount() : null; BsonValue upsertedId = result.getUpserts().isEmpty() ? null : result.getUpserts().get(0).getId(); return UpdateResult.acknowledged(result.getMatchedCount(), modifiedCount, upsertedId); } else { return UpdateResult.unacknowledged(); } }
MongoCollection<Document> collection = database.getCollection(table); Document query = new Document("_id", key); Document fieldsToSet = new Document(); for (Map.Entry<String, ByteIterator> entry : values.entrySet()) { fieldsToSet.put(entry.getKey(), entry.getValue().toArray()); Document update = new Document("$set", fieldsToSet); if (result.wasAcknowledged() && result.getMatchedCount() == 0) { System.err.println("Nothing updated for key " + key); return Status.NOT_FOUND;
private void migrateGeneralScriptFunction(GlobalContext context) { logger.info("Searching for keywords of type 'Script' to be migrated..."); com.mongodb.client.MongoCollection<Document> functions = context.getMongoClientSession().getMongoDatabase().getCollection("functions"); Document filter = new Document("type", "step.plugins.functions.types.GeneralScriptFunction"); Document replacement = new Document("$set", new Document("type", "step.plugins.java.GeneralScriptFunction")); UpdateResult result = functions.updateMany(filter, replacement); logger.info("Migrated "+result.getModifiedCount()+" artefacts of type 'step.plugins.functions.types.GeneralScriptFunction'"); }
private void addPrivateVariableMap(long entryId, Map<String, String> variableMap, boolean isVersioned) throws CatalogDBException { Document queryDocument = new Document(PRIVATE_UID, entryId); if (isVersioned) { queryDocument.append(LAST_OF_VERSION, true); } List<Bson> setMap = new ArrayList<>(variableMap.size()); for (Map.Entry<String, String> entry : variableMap.entrySet()) { setMap.add(Updates.set(AnnotationSetParams.PRIVATE_VARIABLE_SET_MAP.key() + "." + entry.getKey(), entry.getValue())); } QueryResult<UpdateResult> update = getCollection().update(queryDocument, Updates.combine(setMap), new QueryOptions("multi", true)); if (update.first().getModifiedCount() < 1 && update.first().getMatchedCount() == 0) { throw new CatalogDBException("Could not add new private map information"); } }
MongoDBVariantStageReader.addChromosomeFilter(chrFilters, chromosome); chrFilter = or(chrFilters); } else { chrFilter = new Document(); filters.add(exists(studyId + "." + NEW_STUDY_FIELD, false)); List<Bson> updates = new ArrayList<>(fileIds.size() + 1); for (Integer fileId : fileIds) { String studyFile = studyId + "_" + fileId; updates.add(set(studyId + "." + fileId, null)); filters.add( or( ne(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyFile), and( updates.add(pullAll(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyFiles)); Bson filter = and(in(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyFiles), chrFilter, and(filters)); LOGGER.info("Clean studies from stage where all the files where duplicated"); modifiedCount += stageCollection.update( filter, combine(updates), new QueryOptions(MongoDBCollection.MULTI, true)).first().getModifiedCount(); LOGGER.info("Cleaning files {} from stage collection", fileIds); modifiedCount += stageCollection.update(and(filters), combine(updates), new QueryOptions(MongoDBCollection.MULTI, true)).first().getModifiedCount();
@Override public void nativeInsert(Map<String, Object> project, String userId) throws CatalogDBException { Bson query = Filters.and(Filters.eq(UserDBAdaptor.QueryParams.ID.key(), userId), Filters.ne(UserDBAdaptor.QueryParams.PROJECTS_ID.key(), project.get(QueryParams.ID.key()))); Bson update = Updates.push("projects", getMongoDBDocument(project, "project")); //Update object QueryResult<UpdateResult> queryResult = userCollection.update(query, update, null); if (queryResult.getResult().get(0).getModifiedCount() == 0) { // Check if the project has been inserted throw new CatalogDBException("Project {" + project.get(QueryParams.ID.key()) + "\"} already exists for this user"); } }
final String streamId = document.getObjectId(FIELD_ID).toHexString(); if (!document.containsKey(FIELD_ALERT_CONDITIONS)) { continue; final List<Document> alertConditions = (List<Document>) document.get(FIELD_ALERT_CONDITIONS); final UpdateResult result = collection.updateOne(eq(FIELD_ALERT_CONDITIONS_ID, alertConditionId), set(ALERT_CONDITIONS_PARAMETERS_PREFIX + field, intValue)); if (result.getMatchedCount() > 0) { modifiedStreams.add(streamId); modifiedAlertConditions.add(alertConditionId);
collection.update(new Document("_id", id), set("_id", id), new QueryOptions(MongoDBCollection.UPSERT, true)); } catch (MongoWriteException e) { Date now = Calendar.getInstance().getTime(); Bson query = and(eq("_id", id), or(eq(lockWriteField, null), lt(lockWriteField, now))); Bson update = combine(set(lockWriteField, date)); modifiedCount = collection.update(query, update, null).first().getModifiedCount();
private boolean takeOverTransactionInMongoDB(TransactionXid transactionXid, String source, String target) { byte[] global = transactionXid.getGlobalTransactionId(); String instanceId = ByteUtils.byteArrayToString(global); try { String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_LOCKS); Bson globalFilter = Filters.eq(CONSTANTS_FD_GLOBAL, instanceId); Bson instIdFilter = Filters.eq("identifier", source); Document document = new Document("$set", new Document("identifier", target)); UpdateResult result = collection.updateOne(Filters.and(globalFilter, instIdFilter), document); return result.getMatchedCount() == 1; } catch (RuntimeException rex) { logger.error("Error occurred while locking transaction(gxid= {}).", instanceId, rex); return false; } }
/** * {@inheritDoc} */ @Override public void releaseClaim(String processorName, int segment) { UpdateResult updateResult = mongoTemplate.trackingTokensCollection() .updateOne(and( eq("processorName", processorName), eq("segment", segment), eq("owner", nodeId) ), set("owner", null)); if (updateResult.getMatchedCount() == 0) { logger.warn("Releasing claim of token {}/{} failed. It was owned by another node.", processorName, segment); } }
private void removePrivateVariableMap(long entryId, Map<String, String> privateVariableMapToSet, boolean isVersioned) throws CatalogDBException { Document queryDocument = new Document(PRIVATE_UID, entryId); if (isVersioned) { queryDocument.append(LAST_OF_VERSION, true); } for (Map.Entry<String, String> entry : privateVariableMapToSet.entrySet()) { // We only want to remove the private variable map if it is not currently in use by any annotation set queryDocument.append(AnnotationSetParams.VARIABLE_SET_ID.key(), new Document("$ne", Long.parseLong(entry.getKey()))); Bson unset = Updates.unset(AnnotationSetParams.PRIVATE_VARIABLE_SET_MAP.key() + "." + entry.getKey()); QueryResult<UpdateResult> update = getCollection().update(queryDocument, unset, new QueryOptions()); if (update.first().getModifiedCount() < 1 && update.first().getMatchedCount() == 1) { throw new CatalogDBException("Could not remove private map information"); } } }
private static int doReplaceOne(final MongoDBQueryDescriptor queryDescriptor, final MongoCollection<Document> collection) { final Document query = queryDescriptor.getCriteria(); final Document update = queryDescriptor.getUpdateOrInsertOne(); final Document options = queryDescriptor.getOptions(); Boolean upsert = FALSE; Collation collation = null; WriteConcern writeConcern = null; if ( options != null ) { upsert = (Boolean) options.get( "upsert" ); upsert = ( upsert != null ) ? upsert : FALSE; final Document wc = (Document) options.get( "writeConcern" ); writeConcern = ( wc != null ) ? getWriteConcern( wc ) : null; final Document col = (Document) options.get( "collation" ); collation = ( col != null ) ? getCollation( col ) : null; } final ReplaceOptions replaceOptions = new ReplaceOptions().upsert( upsert ).collation( collation ); final UpdateResult result = collection .withWriteConcern( ( writeConcern != null ? writeConcern : collection.getWriteConcern() ) ) .replaceOne( query, update, replaceOptions ); if ( result.wasAcknowledged() ) { return (int) result.getModifiedCount(); } return -1; }
@Override public void upgrade() { if (clusterConfigService.get(MigrationCompleted.class) != null) { LOG.debug("Migration already done."); return; } // Do not overwrite an existing default index config boolean defaultDone = clusterConfigService.get(DefaultIndexSetConfig.class) != null; final ImmutableSet.Builder<String> builder = ImmutableSet.builder(); final FindIterable<Document> documents = collection.find(exists(FIELD_DEFAULT)).sort(ascending(FIELD_CREATION_DATE)); for (final Document document : documents) { final ObjectId id = document.getObjectId(FIELD_ID); final String idString = id.toHexString(); final boolean isDefault = firstNonNull(document.getBoolean(FIELD_DEFAULT), false); if (!defaultDone && isDefault) { defaultDone = true; clusterConfigService.write(DefaultIndexSetConfig.create(idString)); } final long modifiedCount = collection.updateOne(eq(FIELD_ID, id), unset(FIELD_DEFAULT)).getMatchedCount(); if (modifiedCount > 0) { LOG.info("Removed <default> field from index set <{}> ({})", document.getString(FIELD_TITLE), idString); builder.add(idString); } else { LOG.error("Couldn't remove <default> field from index set <{}> ({})", document.getString(FIELD_TITLE), idString); } } clusterConfigService.write(MigrationCompleted.create(builder.build())); }
@Override public QueryResult deleteFilter(String userId, String name) throws CatalogDBException { long startTime = startQuery(); // Delete the filter Bson bsonQuery = Filters.and( Filters.eq(QueryParams.ID.key(), userId), Filters.eq(QueryParams.CONFIGS_FILTERS_NAME.key(), name) ); Bson update = Updates.pull(QueryParams.CONFIGS_FILTERS.key(), new Document(FilterParams.NAME.key(), name)); QueryResult<UpdateResult> queryResult = userCollection.update(bsonQuery, update, null); if (queryResult.first().getModifiedCount() == 0) { throw new CatalogDBException("Internal error: Filter " + name + " could not be removed"); } return endQuery("Delete filter", startTime, Arrays.asList(queryResult.first().getModifiedCount())); }
private void insertUpdate(LockEntry newLock, boolean onlyIfSameOwner) { boolean lockHeld; try { final Bson acquireLockQuery = getAcquireLockQuery(newLock.getKey(), newLock.getOwner(), onlyIfSameOwner); final UpdateResult result = collection.updateMany( acquireLockQuery, new Document().append("$set", newLock.buildFullDBObject()), new UpdateOptions().upsert(!onlyIfSameOwner)); lockHeld = result.getModifiedCount() <= 0 && result.getUpsertedId() == null; } catch (MongoWriteException ex) { lockHeld = ex.getError().getCategory() == ErrorCategory.DUPLICATE_KEY; if (!lockHeld) { throw ex; } } catch (DuplicateKeyException ex) { lockHeld = true; } if (lockHeld) { throw new LockPersistenceException("Lock is held"); } }
private void addNewAnnotations(long entryId, List<Document> annotationDocumentList, boolean isVersioned) throws CatalogDBException { Document queryDocument = new Document(PRIVATE_UID, entryId); if (isVersioned) { queryDocument.append(LAST_OF_VERSION, true); } Bson push = Updates.addEachToSet(AnnotationSetParams.ANNOTATION_SETS.key(), annotationDocumentList); QueryResult<UpdateResult> update = getCollection().update(queryDocument, push, new QueryOptions("multi", true)); if (update.first().getModifiedCount() < 1) { throw new CatalogDBException("Could not add new annotations"); } }
private void removeAnnotationSet(long entryId, String annotationSetId, boolean isVersioned) throws CatalogDBException { Document queryDocument = new Document(PRIVATE_UID, entryId); if (isVersioned) { queryDocument.append(LAST_OF_VERSION, true); } Bson pull = Updates.pull(AnnotationSetParams.ANNOTATION_SETS.key(), new Document(AnnotationSetParams.ANNOTATION_SET_NAME.key(), annotationSetId)); QueryResult<UpdateResult> update = getCollection().update(queryDocument, pull, new QueryOptions("multi", true)); if (update.first().getModifiedCount() < 1) { throw new CatalogDBException("Could not delete the annotation set"); } }
@Override public void removeUsersFromAllGroups(long studyId, List<String> users) throws CatalogDBException { if (users == null || users.size() == 0) { throw new CatalogDBException("Unable to remove users from groups. List of users is empty"); } Document query = new Document() .append(PRIVATE_UID, studyId) .append(QueryParams.GROUP_USER_IDS.key(), new Document("$in", users)) .append("$isolated", 1); Bson pull = Updates.pullAll("groups.$.userIds", users); // Pull those users while they are still there QueryResult<UpdateResult> update; do { update = studyCollection.update(query, pull, null); } while (update.first().getModifiedCount() > 0); }