public static void setup() throws Exception { StringBuffer failures = new StringBuffer(); WriteBatcher writeBatcher = moveMgr.newWriteBatcher() .withBatchSize(10) .onBatchFailure((event, throwable) -> { throwable.printStackTrace(); failures.append("ERORR:[" + throwable.toString() + "]"); }); moveMgr.startJob(writeBatcher); // a collection so we're only looking at docs related to this test run DocumentMetadataHandle meta = new DocumentMetadataHandle().withCollections(collection); for ( int i=1; i <= numDocs; i++ ) { writeBatcher.addAs(collection + "/doc_" + i + ".txt", meta, "test contents"); } writeBatcher.flushAsync(); writeBatcher.awaitCompletion(); if ( failures.length() > 0 ) fail(failures.toString()); logger.info("Successfully wrote {} docs to collection {}", numDocs, collection); }
@Test public void testIssue793() { WriteBatcher batcher = moveMgr.newWriteBatcher(); batcher.addAs("test.txt", "test"); moveMgr.startJob(batcher); moveMgr.stopJob(batcher); } }
@Ignore public void testIssue646() throws Exception { WriteBatcher ihb2 = moveMgr.newWriteBatcher() .withBatchSize(10); ihb2.onBatchFailure( (batch, throwable) -> throwable.printStackTrace() ); for (int j =0 ;j < 21; j++){ String uri ="/local/string-"+ j; DocumentMetadataHandle meta6 = new DocumentMetadataHandle().withProperty("docMeta-1", "true"); ihb2.addAs(uri , meta6, "test"); } ihb2.flushAndWait(); }
public static void setup() throws Exception { WriteBatcher writeBatcher = moveMgr.newWriteBatcher(); moveMgr.startJob(writeBatcher); // a collection so we're only looking at docs related to this test DocumentMetadataHandle meta = new DocumentMetadataHandle() .withCollections(collection, qhbTestCollection); // all the docs are one-word text docs writeBatcher.addAs(uri1, meta, new StringHandle("{name:\"John Doe\", department:\"HR\"}").withFormat(JSON)); writeBatcher.addAs(uri2, meta, new StringHandle("{name:\"Jane Doe\", department:\"HR\"}").withFormat(JSON)); writeBatcher.addAs(uri3, meta, new StringHandle("{name:\"John Smith\", department:\"HR\"}").withFormat(JSON)); writeBatcher.addAs(uri4, meta, new StringHandle("{name:\"John Lennon\",department:\"HR\"}").withFormat(JSON)); writeBatcher.addAs(uri5, meta, new StringHandle("{name:\"John Man\", department:\"Engineering\"}").withFormat(JSON)); writeBatcher.flushAsync(); writeBatcher.awaitCompletion(); moveMgr.stopJob(writeBatcher); StringHandle options = new StringHandle( "<options xmlns='http://marklogic.com/appservices/search'>" + "<constraint name='dept'>" + "<value>" + "<json-property>department</json-property>" + "</value>" + "</constraint>" + "</options>") .withFormat(XML); QueryOptionsManager queryOptionsMgr = Common.connectAdmin().newServerConfigManager().newQueryOptionsManager(); queryOptionsMgr.writeOptions("employees", options); }
for (int j =0 ;j < 10; j++){ String uri ="/testAgainstRealHosts/"+ j; writeBatcher.addAs(uri, meta6, "test"); sentUris.add(uri);
moveMgr.startJob(wb); for ( String uri : uris ) { wb.addAs(uri, meta, uri);
JsonNode doc4 = new ObjectMapper().readTree("{ \"testProperty4\": \"test4\" }"); DocumentMetadataHandle meta = new DocumentMetadataHandle().withCollections(collection, whbTestCollection); batcher.addAs(uri1, meta, doc1); meta = new DocumentMetadataHandle().withCollections(collection, whbTestCollection); batcher.addAs(uri2, meta, doc2); meta = new DocumentMetadataHandle().withCollections(collection, whbTestCollection); batcher.add(uri3, meta, doc3);
for ( int i=0; i < 100; i++ ) { uris[i] = "/" + collection + "/doc" + i + ".txt"; batcher.addAs(uris[i], meta, docContents);
JobTicket ticket1 = moveMgr.startJob( batcher1 ); for ( int i=0; i < numDocs; i++) { batcher1.addAs(collection + "/test_doc_" + i + ".json", meta, "{ \"testProperty\": \"test3\" }");
public static void setup() throws Exception { assertEquals( "Since the doc doesn't exist, documentManager.exists() should return null", null, client.newDocumentManager().exists(collection + "/doc_1.json") ); WriteBatcher writeBatcher = moveMgr.newWriteBatcher() .withBatchSize(100); moveMgr.startJob(writeBatcher); // a collection so we're only looking at docs related to this test DocumentMetadataHandle meta = new DocumentMetadataHandle() .withCollections(collection, qhbTestCollection); for ( int i=1; i <= numDocs; i++ ) { writeBatcher.addAs(collection + "/doc_" + i + ".json", meta, new StringHandle("{name:\"John Doe\",dept:\"HR\"}").withFormat(JSON)); } writeBatcher.flushAsync(); writeBatcher.awaitCompletion(); }
@Test public void testWriteBatcher() { assertEquals(null, client.newDocumentManager().exists("doc1.txt")); assertEquals(null, client.newDocumentManager().exists("doc2.txt")); // begin copy from "Using WriteBatcher" in src/main/java/com/marklogic/datamovement/package-info.java WriteBatcher whb = dataMovementManager.newWriteBatcher() .withBatchSize(100) .withThreadCount(20) .onBatchSuccess(batch -> { logger.debug("batch # {}, so far: {}", batch.getJobBatchNumber(), batch.getJobWritesSoFar()); }) .onBatchFailure((batch,throwable) -> throwable.printStackTrace() ); JobTicket ticket = dataMovementManager.startJob(whb); // the add or addAs methods could be called in separate threads on the // single whb instance whb.add ("doc1.txt", new StringHandle("doc1 contents")); whb.addAs("doc2.txt", "doc2 contents"); whb.flushAndWait(); // send the two docs even though they're not a full batch dataMovementManager.stopJob(ticket); // end copy from "Using WriteBatcher" in src/main/java/com/marklogic/datamovement/package-info.java assertTrue(null != client.newDocumentManager().exists("doc1.txt")); assertTrue(null != client.newDocumentManager().exists("doc2.txt")); } }
for ( int i=0; i < 100; i++ ) { uris[i] = "doc" + i + ".txt"; writeBatcher.addAs(uris[i], meta, docContents);