/** * Can be overridden by the subclass to prepare the QueryBatcher before the job is started. * * @param queryBatcher */ protected void prepareQueryBatcher(QueryBatcher queryBatcher) { super.prepareBatcher(queryBatcher); if (consistentSnapshot) { queryBatcher.withConsistentSnapshot(); } if (urisReadyListeners != null) { for (QueryBatchListener listener : urisReadyListeners) { queryBatcher.onUrisReady(listener); } } if (queryFailureListeners != null) { for (QueryFailureListener listener : queryFailureListeners) { queryBatcher.onQueryFailure(listener); } } }
public List<String> testQueryExceptions(QueryDefinition query, int expectedSuccesses, int expectedFailures) { QueryBatcher queryBatcher = newQueryBatcher(query) .onUrisReady( batch -> { throw new InternalError(errorMessage); } ) .onQueryFailure( queryThrowable -> { throw new InternalError(errorMessage); } ); testExceptions(queryBatcher, expectedSuccesses, expectedFailures); // collect the uris this time List<String> matchingUris = Collections.synchronizedList(new ArrayList<>()); queryBatcher = newQueryBatcher(query) .onUrisReady( batch -> matchingUris.addAll(Arrays.asList(batch.getItems())) ) .onUrisReady( batch -> { throw new RuntimeException(errorMessage); } ) .onQueryFailure( queryThrowable -> { throw new RuntimeException(errorMessage); } ); testExceptions(queryBatcher, expectedSuccesses, expectedFailures); return matchingUris; }
public void testIteratorExceptions(List<String> uris, int expectedSuccesses, int expectedFailures) { QueryBatcher uriListBatcher = moveMgr.newQueryBatcher(uris.iterator()) .onUrisReady( batch -> { throw new InternalError(errorMessage); } ) .onQueryFailure( queryThrowable -> { throw new InternalError(errorMessage); } ); testExceptions(uriListBatcher, expectedSuccesses, expectedFailures); uriListBatcher = moveMgr.newQueryBatcher(uris.iterator()) .onUrisReady( batch -> { throw new RuntimeException(errorMessage); } ) .onQueryFailure( queryThrowable -> { throw new RuntimeException(errorMessage); } ); testExceptions(uriListBatcher, expectedSuccesses, expectedFailures); }
private void testListenerException(QueryBatchListener listener) { final AtomicInteger failureBatchCount = new AtomicInteger(); Iterator<String> iterator = Arrays.asList(new String[] {uri1}).iterator(); QueryBatcher queryBatcher = moveMgr.newQueryBatcher(iterator) .onUrisReady( batch -> logger.debug("uri={}", batch.getItems()[0]) ) .onUrisReady(listener) .onQueryFailure( queryThrowable -> failureBatchCount.incrementAndGet() ); moveMgr.startJob(queryBatcher); queryBatcher.awaitCompletion(); moveMgr.stopJob(queryBatcher); // there should be no failure sent to the QueryBatcher onQueryFailure listeners assertEquals(0, failureBatchCount.get()); }
public void testExceptions(QueryBatcher queryBatcher, int expectedSuccesses, int expectedFailures) { final AtomicInteger successfulBatchCount = new AtomicInteger(); final AtomicInteger failureBatchCount = new AtomicInteger(); queryBatcher .withBatchSize(1) .onUrisReady( batch -> successfulBatchCount.incrementAndGet() ) .onQueryFailure( queryThrowable -> failureBatchCount.incrementAndGet() ); moveMgr.startJob(queryBatcher); queryBatcher.awaitCompletion(); moveMgr.stopJob(queryBatcher); assertEquals(expectedSuccesses, successfulBatchCount.get()); assertEquals(expectedFailures, failureBatchCount.get()); }
.withThreadCount(5) .withBatchSize(100) .onUrisReady( new UrisToWriterListener(writer) ) .onUrisReady(batch -> successDocs1.addAndGet(batch.getItems().length)) .onQueryFailure( throwable -> { throwable.printStackTrace(); .withThreadCount(6) .withBatchSize(19) .onUrisReady(batch -> successDocs2.addAndGet(batch.getItems().length)) .onQueryFailure( throwable -> { throwable.printStackTrace();
batcher.onUrisReady(batch->{ uris.addAll(Arrays.asList(batch.getItems())); batchCount.incrementAndGet();
.withThreadCount(6) .withBatchSize(5000) .onUrisReady( batch -> uris.addAll(Arrays.asList(batch.getItems())) ) .onQueryFailure( throwable -> { throwable.printStackTrace(); .withThreadCount(2) .withBatchSize(99) .onUrisReady(new DeleteListener()) .onUrisReady(batch -> successDocs.addAndGet(batch.getItems().length)) .onUrisReady(batch -> uris2.addAll(Arrays.asList(batch.getItems()))) .onQueryFailure( throwable -> { throwable.printStackTrace();
QueryBatcher batcher = moveMgr.newQueryBatcher(uris.iterator()) .withBatchSize(1) .onUrisReady( new ApplyTransformListener() .withTransform(transform)
.withBatchSize(99) .withConsistentSnapshot() .onUrisReady(new ExportListener() .onDocumentReady(doc -> { String contents = doc.getContent(new StringHandle()).get(); .onUrisReady(new DeleteListener()) .onUrisReady(batch -> deletedCount.addAndGet(batch.getItems().length)) .onQueryFailure(exception -> exception.printStackTrace()); moveMgr.startJob(qb);
batcher = dmm.newQueryBatcher(sqb.value(sqb.jsonProperty("jobId"), jobIds)); batcher.onUrisReady(new ExportListener().onDocumentReady(zipConsumer)); JobTicket jobTicket = dmm.startJob(batcher); batcher = dmm.newQueryBatcher(sqb.value(sqb.element(new QName("jobId")), jobIds)); batcher.onUrisReady(new ExportListener().onDocumentReady(zipConsumer)); jobTicket = dmm.startJob(batcher);
query.setCollections(qhbTestCollection); QueryBatcher queryBatcher = moveMgr.newQueryBatcher(query) .onUrisReady(new CloseBatchListener()) .onQueryFailure(new CloseFailureListener());
.withBatchSize(2) .withThreadCount(5) .onUrisReady(batch -> retrievedUris.addAll(Arrays.asList(batch.getItems())) ) .onQueryFailure(exception -> exception.printStackTrace() ); moveMgr.startJob(getUris);
QueryBatcher getUris = dataMovementManager.newQueryBatcher(query) .withBatchSize(5000) .onUrisReady( new UrisToWriterListener(writer) ) .onQueryFailure(exception -> exception.printStackTrace()); JobTicket getUrisTicket = dataMovementManager.startJob(getUris); QueryBatcher performDelete = dataMovementManager.newQueryBatcher(reader.lines().iterator()) .onUrisReady(new DeleteListener()) .onQueryFailure(exception-> exception.printStackTrace()); JobTicket ticket = dataMovementManager.startJob(performDelete);
.withThreadCount(5) .withBatchSize(10) .onUrisReady(exportListener) .onQueryFailure( throwable -> throwable.printStackTrace() ); moveMgr.startJob( queryJob );
.onBatchFailure((batch, throwable) -> throwable.printStackTrace()); QueryBatcher batcher = moveMgr.newQueryBatcher(query2) .onUrisReady(listener) .withConsistentSnapshot(); JobTicket ticket2 = moveMgr.startJob( batcher ); QueryBatcher batcher3 = moveMgr.newQueryBatcher(query3) .withBatchSize(100) .onUrisReady(batch -> count3.addAndGet(batch.getItems().length)) .onQueryFailure((throwable) -> throwable.printStackTrace()); JobTicket ticket3 = moveMgr.startJob( batcher3 );
@Test public void testResultReplace() throws Exception { DocumentMetadataHandle meta = new DocumentMetadataHandle().withCollections(collection); // write the document client.newDocumentManager().writeAs(collection + "/test1.json", meta, "{ \"testProperty\": \"test1\" }"); StructuredQueryDefinition query = sqb.value(sqb.jsonProperty("testProperty"), "test1"); ServerTransform transform = new ServerTransform(transformName1) .addParameter("newValue", "test1a"); ApplyTransformListener listener = new ApplyTransformListener() .withTransform(transform) .withApplyResult(ApplyResult.REPLACE); QueryBatcher batcher = moveMgr.newQueryBatcher(query) .onUrisReady(listener); JobTicket ticket = moveMgr.startJob( batcher ); batcher.awaitCompletion(); moveMgr.stopJob(ticket); JsonNode docContents = docMgr.readAs(collection + "/test1.json", JsonNode.class); assertEquals( "the transform should have changed testProperty to 'test1a'", "test1a", docContents.get("testProperty").textValue() ); }
@Test public void testResultIgnore() throws Exception { DocumentMetadataHandle meta = new DocumentMetadataHandle().withCollections(collection); // write the document client.newDocumentManager().writeAs(collection + "/test2.json", meta, "{ \"testProperty\": \"test2\" }"); StructuredQueryDefinition query = sqb.value(sqb.jsonProperty("testProperty"), "test2"); ServerTransform transform = new ServerTransform(transformName2) .addParameter("newValue", "test2a"); ApplyTransformListener listener = new ApplyTransformListener() .withTransform(transform) .withApplyResult(ApplyResult.IGNORE); QueryBatcher batcher = moveMgr.newQueryBatcher(query) .onUrisReady(listener); JobTicket ticket = moveMgr.startJob( batcher ); batcher.awaitCompletion(); moveMgr.stopJob(ticket); JsonNode docContents = docMgr.readAs(collection + "/test2.json", JsonNode.class); assertEquals( "the transform should have changed testProperty to 'test2a'", "test2a", docContents.get("testProperty").textValue() ); }
@Test public void testQueryBatcher() { client.newDocumentManager().writeAs(collection + "/test1.json", meta, "[true]"); client.newDocumentManager().writeAs(collection + "/test1.xml", meta, "<xml/>"); client.newDocumentManager().writeAs(collection + "/test1.txt", meta, "text"); assertEquals(3, client.newQueryManager().search(collectionQuery, new SearchHandle()).getTotalResults()); StructuredQueryDefinition query = collectionQuery; // begin copy from "Using QueryBatcher" in src/main/java/com/marklogic/datamovement/package-info.java QueryBatcher qhb = dataMovementManager.newQueryBatcher(query) .withBatchSize(1000) .withThreadCount(20) .withConsistentSnapshot() .onUrisReady(batch -> { for ( String uri : batch.getItems() ) { if ( uri.endsWith(".txt") ) { client.newDocumentManager().delete(uri); } } }) .onQueryFailure(queryBatchException -> queryBatchException.printStackTrace()); JobTicket ticket = dataMovementManager.startJob(qhb); qhb.awaitCompletion(); dataMovementManager.stopJob(ticket); // end copy from "Using QueryBatcher" in src/main/java/com/marklogic/datamovement/package-info.java SearchHandle results = client.newQueryManager().search(collectionQuery, new SearchHandle()); assertEquals(2, results.getTotalResults()); for ( MatchDocumentSummary match : results.getMatchResults() ) { assertTrue(match.getUri().matches(".*/test1.(json|xml)")); } }
.onUrisReady(new DeleteListener()) .withConsistentSnapshot(); JobTicket ticket = moveMgr.startJob(queryBatcher);