private Set<String> getTableMetadataNames(String schemaName) throws TableNotFoundException { MongoDatabase db = client.getDatabase(schemaName); MongoCursor<Document> cursor = db.getCollection(schemaCollection) .find().projection(new Document(TABLE_NAME_KEY, true)).iterator(); HashSet<String> names = new HashSet<>(); while (cursor.hasNext()) { names.add((cursor.next()).getString(TABLE_NAME_KEY)); } return names; }
private Document getTableMetadata(SchemaTableName schemaTableName) throws TableNotFoundException { String schemaName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); MongoDatabase db = client.getDatabase(schemaName); MongoCollection<Document> schema = db.getCollection(schemaCollection); Document doc = schema .find(new Document(TABLE_NAME_KEY, tableName)).first(); if (doc == null) { if (!collectionExists(db, tableName)) { throw new TableNotFoundException(schemaTableName); } else { Document metadata = new Document(TABLE_NAME_KEY, tableName); metadata.append(FIELDS_KEY, guessTableFields(schemaTableName)); schema.createIndex(new Document(TABLE_NAME_KEY, 1), new IndexOptions().unique(true)); schema.insertOne(metadata); return metadata; } } return doc; }
private boolean deleteTableMetadata(SchemaTableName schemaTableName) { String schemaName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); MongoDatabase db = client.getDatabase(schemaName); if (!collectionExists(db, tableName)) { return false; } DeleteResult result = db.getCollection(schemaCollection) .deleteOne(new Document(TABLE_NAME_KEY, tableName)); return result.getDeletedCount() == 1; }
private void removeConfigPath() { final FindIterable<Document> documentsWithConfigPath = collection.find(exists("configuration_path")); for (Document document : documentsWithConfigPath) { final ObjectId objectId = document.getObjectId("_id"); document.remove("configuration_path"); final UpdateResult updateResult = collection.replaceOne(eq("_id", objectId), document); if (updateResult.wasAcknowledged()) { LOG.debug("Successfully updated document with ID <{}>", objectId); } else { LOG.error("Failed to update document with ID <{}>", objectId); } } }
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException { if (dbId != null) { collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc); } else { if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) { dbDoc.remove("_id"); } collection.insertOne(dbDoc); } return null; } });
@Override public void upgrade() { final FindIterable<Document> documentsWithMissingFields = collection.find(or(not(exists(ContentPack.FIELD_META_ID)), not(exists(ContentPack.FIELD_META_REVISION)))); for (Document document : documentsWithMissingFields) { final ObjectId objectId = document.getObjectId("_id"); LOG.debug("Found document with missing \"id\" or \"rev\" field with ID <{}>", objectId); final String id = document.get("id", objectId.toHexString()); final int rev = document.get("rev", 0); document.put("id", id); document.put("rev", rev); final UpdateResult updateResult = collection.replaceOne(eq("_id", objectId), document); if (updateResult.wasAcknowledged()) { LOG.debug("Successfully updated document with ID <{}>", objectId); } else { LOG.error("Failed to update document with ID <{}>", objectId); } } } }
@Override public String getArtifactMD5(DBKey dbKey, String objectID) { final String dbName = MongoDBClient.getDbName(dbKey.getCompany(), dbKey.getProject()); FindIterable findIterable = client.getDatabase(dbName) .getCollection(ARTIFACTS_COLLECTION_NAME + FILES_COLLECTION_SUFFIX) .find(new Document(ID_FIELD_NAME, new ObjectId(objectID))); Document fileMetadata = (Document) findIterable.first(); if (fileMetadata == null) { LOGGER.error("Unable to find file artifact with ObjectID: {}", objectID); } return fileMetadata != null ? fileMetadata.get(MD5_FIELD_NAME).toString() : null; }
@Test public void simpleMongoDbTest() { MongoClient mongoClient = new MongoClient(mongo.getContainerIpAddress(), mongo.getMappedPort(MONGO_PORT)); MongoDatabase database = mongoClient.getDatabase("test"); MongoCollection<Document> collection = database.getCollection("testCollection"); Document doc = new Document("name", "foo") .append("value", 1); collection.insertOne(doc); Document doc2 = collection.find(new Document("name", "foo")).first(); assertEquals("A record can be inserted into and retrieved from MongoDB", 1, doc2.get("value")); }
private void markTransactionRollback(TransactionXid transactionXid) { try { byte[] global = transactionXid.getGlobalTransactionId(); String identifier = ByteUtils.byteArrayToString(global); String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_TRANSACTIONS); Document document = new Document(); document.append("$set", new Document("status", Status.STATUS_MARKED_ROLLBACK)); Bson globalFilter = Filters.eq(CONSTANTS_FD_GLOBAL, identifier); Bson statusFilter = Filters.eq("status", Status.STATUS_ACTIVE); collection.updateOne(Filters.and(globalFilter, statusFilter), document); } catch (RuntimeException error) { logger.error("Error occurred while setting the error flag.", error); } }
MongoDatabase db = mongo.getDatabase("db" + dbName); db.getCollection("movies" + dbName); MongoDatabase db = mongo.getDatabase("db" + dbName); MongoCollection<Document> collection = db.getCollection("movies" + dbName); MongoCollection<Document> movies = collection; InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true); movies.insertOne(Document.parse("{ \"name\":\"Starter Wars\"}"), insertOptions); assertThat(collection.countDocuments()).isEqualTo(1); Bson filter = Filters.eq("name", "Starter Wars"); FindIterable<Document> movieResults = collection.find(filter); try (MongoCursor<Document> cursor = movieResults.iterator();) { assertThat(cursor.tryNext().getString("name")).isEqualTo("Starter Wars"); assertThat(cursor.tryNext()).isNull(); Bson filter = Filters.and(Filters.gt("ts", oplogStart), // start just after our last position Filters.exists("fromMigrate", false)); // skip internal movements across shards FindIterable<Document> results = mongo.getDatabase("local") .getCollection("oplog.rs") .find(filter) .sort(new Document("$natural", 1)) .oplogReplay(true) // tells Mongo to not rely on indexes .noCursorTimeout(true) // don't timeout waiting for events .cursorType(CursorType.TailableAwait);
private List<Document> guessTableFields(SchemaTableName schemaTableName) { String schemaName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); MongoDatabase db = client.getDatabase(schemaName); Document doc = db.getCollection(tableName).find().first(); if (doc == null) { // no records at the collection return ImmutableList.of(); } ImmutableList.Builder<Document> builder = ImmutableList.builder(); for (String key : doc.keySet()) { Object value = doc.get(key); Optional<TypeSignature> fieldType = guessFieldType(value); if (fieldType.isPresent()) { Document metadata = new Document(); metadata.append(FIELDS_NAME_KEY, key); metadata.append(FIELDS_TYPE_KEY, fieldType.get().toString()); metadata.append(FIELDS_HIDDEN_KEY, key.equals("_id") && fieldType.get().equals(OBJECT_ID.getTypeSignature())); builder.add(metadata); } else { log.debug("Unable to guess field type from %s : %s", value == null ? "null" : value.getClass().getName(), value); } } return builder.build(); }
/** * Use the given primary to read the oplog. * * @param primary the connection to the replica set's primary node; may not be null */ protected void readOplog(MongoClient primary) { BsonTimestamp oplogStart = source.lastOffsetTimestamp(replicaSet.replicaSetName()); logger.info("Reading oplog for '{}' primary {} starting at {}", replicaSet, primary.getAddress(), oplogStart); // Include none of the cluster-internal operations and only those events since the previous timestamp ... MongoCollection<Document> oplog = primary.getDatabase("local").getCollection("oplog.rs"); Bson filter = Filters.and(Filters.gt("ts", oplogStart), // start just after our last position Filters.exists("fromMigrate", false)); // skip internal movements across shards FindIterable<Document> results = oplog.find(filter) .sort(new Document("$natural", 1)) // force forwards collection scan .oplogReplay(true) // tells Mongo to not rely on indexes .cursorType(CursorType.TailableAwait); // tail and await new data // Read as much of the oplog as we can ... ServerAddress primaryAddress = primary.getAddress(); try (MongoCursor<Document> cursor = results.iterator()) { while (running.get() && cursor.hasNext()) { if (!handleOplogEvent(primaryAddress, cursor.next())) { // Something happened, and we're supposed to stop reading return; } } } }
private String getTransactionOwnerInMongoDB(TransactionXid transactionXid) { byte[] global = transactionXid.getGlobalTransactionId(); String instanceId = ByteUtils.byteArrayToString(global); try { String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_LOCKS); FindIterable<Document> findIterable = collection.find(Filters.eq(CONSTANTS_FD_GLOBAL, instanceId)); MongoCursor<Document> cursor = findIterable.iterator(); if (cursor.hasNext()) { Document document = cursor.next(); return document.getString("identifier"); } else { return null; } } catch (RuntimeException rex) { logger.error("Error occurred while querying the lock-owner of transaction(gxid= {}).", instanceId, rex); return null; } }
@Before public void setUp() { MongoClient mongo = new MongoClient("localhost:27017"); MongoDatabase database = mongo.getDatabase("test"); peopleCollection = database.getCollection("people"); peopleCollection.deleteMany(new Document()); peopleCollection.insertMany(Arrays.asList( new Document() .append("_id", new ObjectId("582ecee28feed271b9f54e58")) .append("name", "Paul") .append("position", "developer") .append("age", 25), new Document() .append("_id", new ObjectId("582ecee28feed271b9f54e59")) .append("name", "Melissa") .append("position", "developer") .append("age", 26), new Document() .append("_id", new ObjectId("582ecee28feed271b9f54e60")) .append("name", "Roger") .append("position", "manager")
@BeforeClass public static void setUp() throws Exception { TestUtil.ignoreException(() -> { mongo = new GenericContainer("mongo:3") .withNetworkAliases("mongo-" + Base58.randomString(6)) .withExposedPorts(MONGO_DEFAULT_PORT) .waitingFor(new HttpWaitStrategy() .forPort(MONGO_DEFAULT_PORT) .forStatusCodeMatching(response -> response == HTTP_OK || response == HTTP_UNAUTHORIZED) .withStartupTimeout(Duration.ofMinutes(2))); mongo.start(); }, Exception.class); assumeNotNull(mongo); assumeTrue("Mongo DB must be running", mongo.isRunning()); MongoClient mongoClient = new MongoClient(mongo.getContainerIpAddress(), mongo.getMappedPort(MONGO_DEFAULT_PORT)); HOST = String.format("mongodb://%s:%s", mongo.getContainerIpAddress(), mongo.getMappedPort(MONGO_DEFAULT_PORT)); params = map("host", HOST, "db", "test", "collection", "test"); MongoDatabase database = mongoClient.getDatabase("test"); collection = database.getCollection("test"); collection.deleteMany(new Document()); collection.insertOne(new Document(map("name", "testDocument", "date", currentTime, "longValue", longValue))); db = new TestGraphDatabaseFactory() .newImpermanentDatabaseBuilder() .newGraphDatabase(); TestUtil.registerProcedure(db, MongoDB.class); mongoClient.close(); }
/** * Obtain the current position of the oplog, and record it in the source. */ protected void recordCurrentOplogPosition() { primaryClient.execute("get oplog position", primary -> { MongoCollection<Document> oplog = primary.getDatabase("local").getCollection("oplog.rs"); Document last = oplog.find().sort(new Document("$natural", -1)).limit(1).first(); // may be null source.offsetStructForEvent(replicaSet.replicaSetName(), last); }); }
public void recover(TransactionRecoveryCallback callback) { MongoCursor<Document> transactionCursor = null; try { String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> transactions = mdb.getCollection(CONSTANTS_TB_TRANSACTIONS); FindIterable<Document> transactionItr = transactions.find(Filters.eq("coordinator", true)); for (transactionCursor = transactionItr.iterator(); transactionCursor.hasNext();) { Document document = transactionCursor.next(); boolean error = document.getBoolean("error"); String targetApplication = document.getString("system"); long expectVersion = document.getLong("version"); long actualVersion = this.versionManager.getInstanceVersion(targetApplication); if (error == false && actualVersion > 0 && actualVersion <= expectVersion) { continue; // ignore } callback.recover(this.reconstructTransactionArchive(document)); } } catch (RuntimeException error) { logger.error("Error occurred while recovering transaction.", error); } catch (Exception error) { logger.error("Error occurred while recovering transaction.", error); } finally { IOUtils.closeQuietly(transactionCursor); } }
private void createTableMetadata(SchemaTableName schemaTableName, List<MongoColumnHandle> columns) throws TableNotFoundException { String schemaName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); MongoDatabase db = client.getDatabase(schemaName); Document metadata = new Document(TABLE_NAME_KEY, tableName); ArrayList<Document> fields = new ArrayList<>(); if (!columns.stream().anyMatch(c -> c.getName().equals("_id"))) { fields.add(new MongoColumnHandle("_id", OBJECT_ID, true).getDocument()); } fields.addAll(columns.stream() .map(MongoColumnHandle::getDocument) .collect(toList())); metadata.append(FIELDS_KEY, fields); MongoCollection<Document> schema = db.getCollection(schemaCollection); schema.createIndex(new Document(TABLE_NAME_KEY, 1), new IndexOptions().unique(true)); schema.insertOne(metadata); }
public int timingExecution(int batchSize) { String databaseName = CommonUtils.getApplication(this.endpoint).replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_REMOVEDRESES); MongoCursor<Document> cursor = null; try { cursor = collection.find().limit(batchSize).iterator(); for (; cursor.hasNext(); length++) { Document document = cursor.next(); String globalValue = document.getString(CONSTANTS_FD_GLOBAL); String branchValue = document.getString(CONSTANTS_FD_BRANCH); byte[] global = ByteUtils.stringToByteArray(globalValue); byte[] branch = ByteUtils.stringToByteArray(branchValue); TransactionXid branchXid = xidFactory.createBranchXid(globalXid, branch); String resourceId = document.getString("resource_id"); if (StringUtils.isBlank(resourceId)) { continue; byte[] branch = transactionXid.getBranchQualifier(); Bson globalFilter = Filters.eq(CONSTANTS_FD_GLOBAL, ByteUtils.byteArrayToString(global)); Bson branchFilter = Filters.eq(CONSTANTS_FD_BRANCH, ByteUtils.byteArrayToString(branch)); collection.deleteOne(Filters.and(globalFilter, branchFilter));
MongoDatabase db1 = mongo.getDatabase("dbit"); MongoCollection<Document> coll = db1.getCollection("arbitrary"); coll.drop(); Document doc = Document.parse("{\"a\": 1, \"b\": 2}"); InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true); coll.insertOne(doc, insertOptions); doc = coll.find().first(); Testing.debug("Document: " + doc); id.set(doc.getObjectId("_id").toString()); Testing.debug("Document ID: " + id.get()); }); MongoDatabase db1 = mongo.getDatabase("dbit"); MongoCollection<Document> coll = db1.getCollection("arbitrary"); Document doc = coll.find().first(); Testing.debug("Document: " + doc); Document filter = Document.parse("{\"a\": 1}"); Document operation = Document.parse("{ \"$set\": { \"b\": 10 } }"); coll.updateOne(filter, operation); doc = coll.find().first(); Testing.debug("Document: " + doc); });