private void createTableMetadata(SchemaTableName schemaTableName, List<MongoColumnHandle> columns) throws TableNotFoundException { String schemaName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); MongoDatabase db = client.getDatabase(schemaName); Document metadata = new Document(TABLE_NAME_KEY, tableName); ArrayList<Document> fields = new ArrayList<>(); if (!columns.stream().anyMatch(c -> c.getName().equals("_id"))) { fields.add(new MongoColumnHandle("_id", OBJECT_ID, true).getDocument()); } fields.addAll(columns.stream() .map(MongoColumnHandle::getDocument) .collect(toList())); metadata.append(FIELDS_KEY, fields); MongoCollection<Document> schema = db.getCollection(schemaCollection); schema.createIndex(new Document(TABLE_NAME_KEY, 1), new IndexOptions().unique(true)); schema.insertOne(metadata); }
@Override public void close() { synchronized (closeLock) { if (closed) { return; } closed = true; } writeChunk(); GridFSFile gridFSFile = new GridFSFile(fileId, filename, lengthInBytes, chunkSizeBytes, new Date(), getMD5Digest(), metadata); if (clientSession != null) { filesCollection.insertOne(clientSession, gridFSFile); } else { filesCollection.insertOne(gridFSFile); } buffer = null; }
@Signature public Document insertOne(Document doc) { getWrappedObject().insertOne(doc); return doc; }
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException { MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, document, null); WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); if (writeConcernToUse == null) { collection.insertOne(document); } else { collection.withWriteConcern(writeConcernToUse).insertOne(document); } return operations.forEntity(document).getId(); } });
private void writeChunk() { if (bufferOffset > 0) { if (clientSession != null) { chunksCollection.insertOne(clientSession, new Document("files_id", fileId).append("n", chunkIndex) .append("data", getData())); } else { chunksCollection.insertOne(new Document("files_id", fileId).append("n", chunkIndex).append("data", getData())); } updateMD5(); chunkIndex++; bufferOffset = 0; } }
toInsert, UPDATE_WITH_UPSERT); } else { collection.insertOne(toInsert);
private Document getTableMetadata(SchemaTableName schemaTableName) throws TableNotFoundException { String schemaName = schemaTableName.getSchemaName(); String tableName = schemaTableName.getTableName(); MongoDatabase db = client.getDatabase(schemaName); MongoCollection<Document> schema = db.getCollection(schemaCollection); Document doc = schema .find(new Document(TABLE_NAME_KEY, tableName)).first(); if (doc == null) { if (!collectionExists(db, tableName)) { throw new TableNotFoundException(schemaTableName); } else { Document metadata = new Document(TABLE_NAME_KEY, tableName); metadata.append(FIELDS_KEY, guessTableFields(schemaTableName)); schema.createIndex(new Document(TABLE_NAME_KEY, 1), new IndexOptions().unique(true)); schema.insertOne(metadata); return metadata; } } return doc; }
private boolean lockTransactionInMongoDB(TransactionXid transactionXid, String identifier) { byte[] global = transactionXid.getGlobalTransactionId(); String instanceId = ByteUtils.byteArrayToString(global); try { String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_LOCKS); Document document = new Document(); document.append(CONSTANTS_FD_GLOBAL, instanceId); document.append("identifier", identifier); collection.insertOne(document); return true; } catch (com.mongodb.MongoWriteException error) { com.mongodb.WriteError writeError = error.getError(); if (MONGODB_ERROR_DUPLICATE_KEY != writeError.getCode()) { logger.error("Error occurred while locking transaction(gxid= {}).", instanceId, error); } return false; } catch (RuntimeException rex) { logger.error("Error occurred while locking transaction(gxid= {}).", instanceId, rex); return false; } }
public void forget(Xid xid, String resourceId) throws RuntimeException { try { String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_REMOVEDRESES); byte[] global = xid.getGlobalTransactionId(); byte[] branch = xid.getBranchQualifier(); Document document = new Document(); document.append(CONSTANTS_FD_GLOBAL, ByteUtils.byteArrayToString(global)); document.append(CONSTANTS_FD_BRANCH, ByteUtils.byteArrayToString(branch)); document.append("resource_id", resourceId); document.append("created", this.endpoint); collection.insertOne(document); } catch (RuntimeException error) { logger.error("Error occurred while forgetting resource({}).", resourceId, error); } }
private Consumer<MongoClient> createInsertItemDefault(int id) { return client -> client.getDatabase(DB_NAME).getCollection(this.getCollectionName()) .insertOne(Document.parse("{" + "'_id': " + id + "," + "'dataStr': 'hello'," + "'dataInt': 123," + "'dataLong': 80000000000," + "'dataBoolean': true," + "'dataByte': -1," + "'dataArrayOfStr': ['a','c','e']," + "'nested': {" + "'dataStr': 'hello'," + "'dataInt': 123," + "'dataLong': 80000000000," + "'dataBoolean': true," + "'dataByte': -1" + "}}") ); }
mongoQueryRunner.getMongoClient().getDatabase("test").getCollection("tmp_map8").insertOne(new Document( ImmutableMap.of("col", new Document(ImmutableMap.of("key1", "value1", "key2", "value2"))))); assertQuery("SELECT col['key1'] FROM test.tmp_map8", "SELECT 'value1'"); mongoQueryRunner.getMongoClient().getDatabase("test").getCollection("tmp_map9").insertOne(new Document( ImmutableMap.of("col", new Document(ImmutableMap.of("key1", "value1", "key2", "value2"))))); assertQuery("SELECT col FROM test.tmp_map9", "SELECT '{ \"key1\" : \"value1\", \"key2\" : \"value2\" }'"); mongoQueryRunner.getMongoClient().getDatabase("test").getCollection("tmp_map10").insertOne(new Document( ImmutableMap.of("col", ImmutableList.of(new Document(ImmutableMap.of("key1", "value1", "key2", "value2")), new Document(ImmutableMap.of("key3", "value3", "key4", "value4")))))); mongoQueryRunner.getMongoClient().getDatabase("test").getCollection("tmp_map11").insertOne(new Document( ImmutableMap.of("col", 10))); assertQuery("SELECT col FROM test.tmp_map11", "SELECT '10'"); mongoQueryRunner.getMongoClient().getDatabase("test").getCollection("tmp_map12").insertOne(new Document( ImmutableMap.of("col", Arrays.asList(10, null, 11)))); assertQuery("SELECT col FROM test.tmp_map12", "SELECT '[10, null, 11]'");
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException { if (dbId != null) { collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc); } else { if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) { dbDoc.remove("_id"); } collection.insertOne(dbDoc); } return null; } });
@Test public void simpleMongoDbTest() { MongoClient mongoClient = new MongoClient(mongo.getContainerIpAddress(), mongo.getMappedPort(MONGO_PORT)); MongoDatabase database = mongoClient.getDatabase("test"); MongoCollection<Document> collection = database.getCollection("testCollection"); Document doc = new Document("name", "foo") .append("value", 1); collection.insertOne(doc); Document doc2 = collection.find(new Document("name", "foo")).first(); assertEquals("A record can be inserted into and retrieved from MongoDB", 1, doc2.get("value")); }
public Object doInCollection(MongoCollection<Document> collection) throws MongoException, DataAccessException { MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, dbDoc, null); WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); MappedDocument mapped = MappedDocument.of(dbDoc); if (!mapped.hasId()) { if (writeConcernToUse == null) { collection.insertOne(dbDoc); } else { collection.withWriteConcern(writeConcernToUse).insertOne(dbDoc); } } else if (writeConcernToUse == null) { collection.replaceOne(mapped.getIdFilter(), dbDoc, new ReplaceOptions().upsert(true)); } else { collection.withWriteConcern(writeConcernToUse).replaceOne(mapped.getIdFilter(), dbDoc, new ReplaceOptions().upsert(true)); } return mapped.getId(); } });
@Test public void overwriteValidation() { Document validator = Document.parse("{ \"jelly\" : { \"$ne\" : \"rhubarb\" } }"); MongoDatabase database = addValidation(validator, "validation"); assertEquals(validator, getValidator()); Document rhubarb = new Document("jelly", "rhubarb").append("number", 20); database.getCollection("validation").insertOne(new Document("jelly", "grape")); try { database.getCollection("validation").insertOne(rhubarb); fail("Document should have failed validation"); } catch (MongoWriteException e) { assertTrue(e.getMessage().contains("Document failed validation")); } getMorphia().map(DocumentValidation.class); getDs().enableDocumentValidation(); assertEquals(Document.parse(DocumentValidation.class.getAnnotation(Validation.class).value()), getValidator()); try { database.getCollection("validation").insertOne(rhubarb); } catch (MongoWriteException e) { assertFalse(e.getMessage().contains("Document failed validation")); } try { getDs().save(new DocumentValidation("John", 1, new Date())); fail("Document should have failed validation"); } catch (WriteConcernException e) { assertTrue(e.getMessage().contains("Document failed validation")); } }
protected void storeDocuments(MongoCollection<Document> collection, String pathOnClasspath) { InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true); loadTestDocuments(pathOnClasspath).forEach(doc -> { assertThat(doc).isNotNull(); assertThat(doc.size()).isGreaterThan(0); collection.insertOne(doc, insertOptions); }); }
document.append("recovered_times", archive.getRecoveredTimes()); collection.insertOne(document); } catch (IOException error) { logger.error("Error occurred while creating transaction.", error);
collection.insertOne((Document)doc); logger.info("inserted {} into MongoDB", new Object[] { flowFile }); } else {
@Test @FixFor("DBZ-563") public void shouldDropTombstoneByDefault() throws InterruptedException { // First insert primary().execute("insert", client -> { client.getDatabase(DB_NAME).getCollection(this.getCollectionName()) .insertOne(Document.parse("{'_id': 1, 'dataStr': 'hello', 'dataInt': 123, 'dataLong': 80000000000}")); }); SourceRecords records = consumeRecordsByTopic(1); assertThat(records.recordsForTopic(this.topicName()).size()).isEqualTo(1); // Test Delete primary().execute("delete", client -> { client.getDatabase(DB_NAME).getCollection(this.getCollectionName()).deleteOne(RawBsonDocument.parse("{'_id' : 1}")); }); // First delete record to arrive is coming from the oplog SourceRecord firstRecord = getRecordByOperation(Envelope.Operation.DELETE); final SourceRecord transformedDelete = transformation.apply(firstRecord); assertThat(transformedDelete).isNull(); // Second record is the tombstone SourceRecord tombstoneRecord = getNextRecord(); assertThat(tombstoneRecord).isNotNull(); // Test tombstone record is dropped final SourceRecord transformedTombstone = transformation.apply(tombstoneRecord); assertThat(transformedTombstone).isNull(); }
coll.insertOne(doc, insertOptions); });