private List<Document> guessTableFields(SchemaTableName schemaTableName) { String schemaName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); MongoDatabase db = client.getDatabase(schemaName); Document doc = db.getCollection(tableName).find().first(); if (doc == null) { // no records at the collection return ImmutableList.of(); } ImmutableList.Builder<Document> builder = ImmutableList.builder(); for (String key : doc.keySet()) { Object value = doc.get(key); Optional<TypeSignature> fieldType = guessFieldType(value); if (fieldType.isPresent()) { Document metadata = new Document(); metadata.append(FIELDS_NAME_KEY, key); metadata.append(FIELDS_TYPE_KEY, fieldType.get().toString()); metadata.append(FIELDS_HIDDEN_KEY, key.equals("_id") && fieldType.get().equals(OBJECT_ID.getTypeSignature())); builder.add(metadata); } else { log.debug("Unable to guess field type from %s : %s", value == null ? "null" : value.getClass().getName(), value); } } return builder.build(); }
private String getTransactionOwnerInMongoDB(TransactionXid transactionXid) { byte[] global = transactionXid.getGlobalTransactionId(); String instanceId = ByteUtils.byteArrayToString(global); try { String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_LOCKS); FindIterable<Document> findIterable = collection.find(Filters.eq(CONSTANTS_FD_GLOBAL, instanceId)); MongoCursor<Document> cursor = findIterable.iterator(); if (cursor.hasNext()) { Document document = cursor.next(); return document.getString("identifier"); } else { return null; } } catch (RuntimeException rex) { logger.error("Error occurred while querying the lock-owner of transaction(gxid= {}).", instanceId, rex); return null; } }
MongoCursor<Document> cursor = null; try { MongoCollection<Document> collection = database.getCollection(table); Document scanRange = new Document("$gte", startkey); Document query = new Document("_id", scanRange); Document sort = new Document("_id", INCLUDE); collection.find(query).sort(sort).limit(recordcount); projection.put(fieldName, INCLUDE); findIterable.projection(projection); cursor = findIterable.iterator(); if (!cursor.hasNext()) { System.err.println("Nothing found in scan for key " + startkey); return Status.ERROR; while (cursor.hasNext()) { HashMap<String, ByteIterator> resultMap = new HashMap<String, ByteIterator>(); Document obj = cursor.next(); fillMap(resultMap, obj);
private Set<String> getTableMetadataNames(String schemaName) throws TableNotFoundException { MongoDatabase db = client.getDatabase(schemaName); MongoCursor<Document> cursor = db.getCollection(schemaCollection) .find().projection(new Document(TABLE_NAME_KEY, true)).iterator(); HashSet<String> names = new HashSet<>(); while (cursor.hasNext()) { names.add((cursor.next()).getString(TABLE_NAME_KEY)); } return names; }
private <T> boolean collectionIsEmpty(@Nullable final ClientSession clientSession, final MongoCollection<T> collection) { if (clientSession != null) { return collection.find(clientSession).projection(new Document("_id", 1)).first() == null; } else { return collection.find().projection(new Document("_id", 1)).first() == null; } }
private MongoCursor<Document> getCursor(final int startChunkIndex) { FindIterable<Document> findIterable; Document filter = new Document("files_id", fileId).append("n", new Document("$gte", startChunkIndex)); if (clientSession != null) { findIterable = chunksCollection.find(clientSession, filter); } else { findIterable = chunksCollection.find(filter); } return findIterable.batchSize(batchSize).sort(new Document("n", 1)).iterator(); }
db.getCollection("movies" + dbName); Bson filter = Filters.eq("name", "Starter Wars"); FindIterable<Document> movieResults = collection.find(filter); try (MongoCursor<Document> cursor = movieResults.iterator();) { assertThat(cursor.tryNext().getString("name")).isEqualTo("Starter Wars"); assertThat(cursor.tryNext()).isNull(); Bson filter = Filters.and(Filters.gt("ts", oplogStart), // start just after our last position Filters.exists("fromMigrate", false)); // skip internal movements across shards FindIterable<Document> results = mongo.getDatabase("local") .getCollection("oplog.rs") .find(filter) .sort(new Document("$natural", 1)) .oplogReplay(true) // tells Mongo to not rely on indexes .noCursorTimeout(true) // don't timeout waiting for events .cursorType(CursorType.TailableAwait); try (MongoCursor<Document> cursor = results.iterator();) { Document event = null; long stopTime = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(maxSeconds); while (System.currentTimeMillis() < stopTime && eventQueue.size() < minimumEventsExpected) { while ((event = cursor.tryNext()) != null) { eventQueue.add(event);
@Test public void simpleMongoDbTest() { MongoClient mongoClient = new MongoClient(mongo.getContainerIpAddress(), mongo.getMappedPort(MONGO_PORT)); MongoDatabase database = mongoClient.getDatabase("test"); MongoCollection<Document> collection = database.getCollection("testCollection"); Document doc = new Document("name", "foo") .append("value", 1); collection.insertOne(doc); Document doc2 = collection.find(new Document("name", "foo")).first(); assertEquals("A record can be inserted into and retrieved from MongoDB", 1, doc2.get("value")); }
private Document getTableMetadata(SchemaTableName schemaTableName) throws TableNotFoundException { String schemaName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); MongoDatabase db = client.getDatabase(schemaName); MongoCollection<Document> schema = db.getCollection(schemaCollection); Document doc = schema .find(new Document(TABLE_NAME_KEY, tableName)).first(); if (doc == null) { if (!collectionExists(db, tableName)) { throw new TableNotFoundException(schemaTableName); } else { Document metadata = new Document(TABLE_NAME_KEY, tableName); metadata.append(FIELDS_KEY, guessTableFields(schemaTableName)); schema.createIndex(new Document(TABLE_NAME_KEY, 1), new IndexOptions().unique(true)); schema.insertOne(metadata); return metadata; } } return doc; }
public void recover(TransactionRecoveryCallback callback) { MongoCursor<Document> transactionCursor = null; try { String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> transactions = mdb.getCollection(CONSTANTS_TB_TRANSACTIONS); FindIterable<Document> transactionItr = transactions.find(Filters.eq("coordinator", true)); for (transactionCursor = transactionItr.iterator(); transactionCursor.hasNext();) { Document document = transactionCursor.next(); boolean error = document.getBoolean("error"); String targetApplication = document.getString("system"); long expectVersion = document.getLong("version"); long actualVersion = this.versionManager.getInstanceVersion(targetApplication); if (error == false && actualVersion > 0 && actualVersion <= expectVersion) { continue; // ignore } callback.recover(this.reconstructTransactionArchive(document)); } } catch (RuntimeException error) { logger.error("Error occurred while recovering transaction.", error); } catch (Exception error) { logger.error("Error occurred while recovering transaction.", error); } finally { IOUtils.closeQuietly(transactionCursor); } }
/** * Use the given primary to read the oplog. * * @param primary the connection to the replica set's primary node; may not be null */ protected void readOplog(MongoClient primary) { BsonTimestamp oplogStart = source.lastOffsetTimestamp(replicaSet.replicaSetName()); logger.info("Reading oplog for '{}' primary {} starting at {}", replicaSet, primary.getAddress(), oplogStart); // Include none of the cluster-internal operations and only those events since the previous timestamp ... MongoCollection<Document> oplog = primary.getDatabase("local").getCollection("oplog.rs"); Bson filter = Filters.and(Filters.gt("ts", oplogStart), // start just after our last position Filters.exists("fromMigrate", false)); // skip internal movements across shards FindIterable<Document> results = oplog.find(filter) .sort(new Document("$natural", 1)) // force forwards collection scan .oplogReplay(true) // tells Mongo to not rely on indexes .cursorType(CursorType.TailableAwait); // tail and await new data // Read as much of the oplog as we can ... ServerAddress primaryAddress = primary.getAddress(); try (MongoCursor<Document> cursor = results.iterator()) { while (running.get() && cursor.hasNext()) { if (!handleOplogEvent(primaryAddress, cursor.next())) { // Something happened, and we're supposed to stop reading return; } } } }
/** * Obtain the current position of the oplog, and record it in the source. */ protected void recordCurrentOplogPosition() { primaryClient.execute("get oplog position", primary -> { MongoCollection<Document> oplog = primary.getDatabase("local").getCollection("oplog.rs"); Document last = oplog.find().sort(new Document("$natural", -1)).limit(1).first(); // may be null source.offsetStructForEvent(replicaSet.replicaSetName(), last); }); }
final Document query = getQuery(context, session, input); MongoCollection mongoCollection = clientService.getDatabase(database).getCollection(collection); FindIterable<Document> find = mongoCollection.find(query); if (context.getProperty(SORT).isSet()) { find = find.sort(Document.parse(context.getProperty(SORT).evaluateAttributeExpressions(input).getValue())); find = find.projection(Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions(input).getValue())); find = find.limit(context.getProperty(LIMIT).evaluateAttributeExpressions(input).asInteger()); MongoCursor<Document> cursor = find.iterator(); long count = 0L; writer.beginRecordSet(); while (cursor.hasNext()) { Document next = cursor.next(); if (next.get("_id") instanceof ObjectId) { next.put("_id", next.get("_id").toString());
public int timingExecution(int batchSize) { String databaseName = CommonUtils.getApplication(this.endpoint).replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_REMOVEDRESES); MongoCursor<Document> cursor = null; try { cursor = collection.find().limit(batchSize).iterator(); for (; cursor.hasNext(); length++) { Document document = cursor.next(); String globalValue = document.getString(CONSTANTS_FD_GLOBAL); String branchValue = document.getString(CONSTANTS_FD_BRANCH); byte[] global = ByteUtils.stringToByteArray(globalValue); byte[] branch = ByteUtils.stringToByteArray(branchValue); TransactionXid branchXid = xidFactory.createBranchXid(globalXid, branch); String resourceId = document.getString("resource_id"); if (StringUtils.isBlank(resourceId)) { continue; byte[] branch = transactionXid.getBranchQualifier(); Bson globalFilter = Filters.eq(CONSTANTS_FD_GLOBAL, ByteUtils.byteArrayToString(global)); Bson branchFilter = Filters.eq(CONSTANTS_FD_BRANCH, ByteUtils.byteArrayToString(branch)); collection.deleteOne(Filters.and(globalFilter, branchFilter));
MongoCollection<Document> contacts = db.getCollection("contacts"); InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true); contacts.insertOne(Document.parse("{ \"name\":\"Jon Snow\"}"), insertOptions); assertThat(db.getCollection("contacts").countDocuments()).isEqualTo(1); Bson filter = Filters.eq("name", "Jon Snow"); FindIterable<Document> movieResults = db.getCollection("contacts").find(filter); try (MongoCursor<Document> cursor = movieResults.iterator();) { assertThat(cursor.tryNext().getString("name")).isEqualTo("Jon Snow"); assertThat(cursor.tryNext()).isNull(); FindIterable<Document> movieResults = db.getCollection("contacts").find(); Set<String> foundNames = new HashSet<>(); try (MongoCursor<Document> cursor = movieResults.iterator();) { while (cursor.hasNext()) { String name = cursor.next().getString("name"); foundNames.add(name); Bson filter = Filters.eq("name", "Jon Snow"); FindIterable<Document> movieResults = db.getCollection("contacts").find(filter); try (MongoCursor<Document> cursor = movieResults.iterator();) { Document doc = cursor.tryNext(); assertThat(doc.getString("name")).isEqualTo("Jon Snow"); contacts.deleteOne(Filters.eq("name", "Jon Snow")); Testing.debug("Removed the Jon Snow document from 'dbA.contacts' collection"); });
@Override public ConversationState getConversationState(String conversationId) { Document conversationMemoryDocument = conversationCollection.find( new Document(OBJECT_ID, new ObjectId(conversationId))). projection(new Document(CONVERSATION_STATE_FIELD, 1).append(OBJECT_ID, 0)). first(); if (conversationMemoryDocument != null && conversationMemoryDocument.containsKey(CONVERSATION_STATE_FIELD)) { return ConversationState.valueOf(conversationMemoryDocument.get(CONVERSATION_STATE_FIELD).toString()); } return null; }
Map<String, ByteIterator> result) { try { MongoCollection<Document> collection = database.getCollection(table); Document query = new Document("_id", key); FindIterable<Document> findIterable = collection.find(query); Document projection = new Document(); for (String field : fields) { projection.put(field, INCLUDE); findIterable.projection(projection); Document queryResult = findIterable.first();
@Override public EventBatch loadEventsForAggregateId(final String aggregateType, String aggregateId, String fromJournalId) { final Document query = new Document("rid", aggregateId); if (fromJournalId != null) query.append("jid", new Document("$gt", Long.parseLong(fromJournalId))); final FindIterable<Document> dbObjects = MongoDbOperations.doDbOperation(() -> db.getCollection(aggregateType).find(query).sort(new Document("jid", 1)).limit(eventReadLimit)); final List<Event> events = StreamSupport.stream(dbObjects.spliterator(), false) .map(document -> deSerialize( ((Binary)document.get("d")).getData())) .collect(Collectors.toList()); return new EventBatch(aggregateType, aggregateId, events, events.size() != eventReadLimit); }
public MongoCursor<Document> execute(MongoSplit split, List<MongoColumnHandle> columns) { Document output = new Document(); for (MongoColumnHandle column : columns) { output.append(column.getName(), 1); } MongoCollection<Document> collection = getCollection(split.getSchemaTableName()); FindIterable<Document> iterable = collection.find(buildQuery(split.getTupleDomain())).projection(output); if (cursorBatchSize != 0) { iterable.batchSize(cursorBatchSize); } return iterable.iterator(); }