public DataMovementBatchWriter(DatabaseClient client) { this.client = client; this.dataMovementManager = client.newDataMovementManager(); }
public QueryBatcherTemplate(DatabaseClient databaseClient) { this.databaseClient = databaseClient; this.dataMovementManager = this.databaseClient.newDataMovementManager(); }
@Override public QueryBatcherJobTicket run(DatabaseClient databaseClient) { DataMovementManager dmm = this.dataMovementManager != null ? this.dataMovementManager : databaseClient.newDataMovementManager(); String jobDescription = getJobDescription(); if (jobDescription != null && logger.isInfoEnabled()) { logger.info(jobDescription); } QueryBatcherBuilder builder = newQueryBatcherBuilder(); QueryBatcher queryBatcher = builder.buildQueryBatcher(databaseClient, dmm); prepareQueryBatcher(queryBatcher); JobTicket jobTicket = dmm.startJob(queryBatcher); if (awaitCompletion) { queryBatcher.awaitCompletion(); if (stopJobAfterCompletion) { dmm.stopJob(queryBatcher); } if (jobDescription != null && logger.isInfoEnabled()) { logger.info("Completed: " + jobDescription); } } return new QueryBatcherJobTicket(dmm, queryBatcher, jobTicket); }
Enumeration<? extends ZipEntry> entries = importZip.entries(); DataMovementManager dmm = jobClient.newDataMovementManager(); WriteBatcher writer = dmm .newWriteBatcher() dmm = this.jobClient.newDataMovementManager(); writer = dmm .newWriteBatcher()
DataMovementManager dataMovementManager = stagingClient.newDataMovementManager();
public OurJbossESBPlugin(DatabaseClient client) { moveMgr = client.newDataMovementManager(); batcher = moveMgr.newWriteBatcher() .withJobName("OurJbossESBPlugin") .withBatchSize(BATCH_SIZE) // every time a batch is full, write it to the database via mlcp // this is the default, only included here to make it obvious //.onBatchFull( new MlcpBatchFullListener() ) // log a summary report after each successful batch .onBatchSuccess( batch -> logger.info(getSummaryReport()) ) .onBatchFailure( (batch, throwable) -> { List<String> uris = new ArrayList<>(); for ( WriteEvent event : batch.getItems() ) { uris.add(event.getTargetUri()); } logger.warn("FAILURE on batch:" + uris + "\n", throwable); }); ticket = moveMgr.startJob(batcher); }
runFlow.addParameter("job-id", UUID.randomUUID().toString()); DataMovementManager stagingDataMovementManager = flowRunnerClient.newDataMovementManager();
createFlow("extranodes", CodeFormat.JAVASCRIPT, DataFormat.XML, FlowType.HARMONIZE, true, "validPath1-threeProp", 1, (CreateFlowListener)null); installUserModules(getHubAdminConfig(), true); stagingDataMovementManager = flowRunnerClient.newDataMovementManager();
DataMovementManager dmm = jobClient.newDataMovementManager(); QueryBatcher batcher = null; StructuredQueryDefinition query = null; dmm = this.jobClient.newDataMovementManager(); if (jobIds == null) { batcher = dmm.newQueryBatcher(emptyQuery);
flowRunnerDataMovementManager = flowRunnerClient.newDataMovementManager();
installModule("/entities/" + ENTITY + "/harmonize/testharmonize/content.xqy", "stream-collector-test/content.xqy"); DataMovementManager stagingDataMovementManager = stagingClient.newDataMovementManager();
private void installDocs(String flowName, DataFormat dataFormat, String collection, DatabaseClient srcClient) { DataMovementManager mgr; mgr = srcClient.newDataMovementManager();
DataMovementManager moveMgr = evalClient.newDataMovementManager(); StringBuilder anyFailure = new StringBuilder(); Hashtable<String,AtomicInteger> urisDeleted = new Hashtable<>();
runFlow.addParameter("job-id", UUID.randomUUID().toString()); DataMovementManager dataMovementManager = stagingClient.newDataMovementManager();
private void installDocs(DataFormat dataFormat, String collection, DatabaseClient srcClient, boolean useEs, int testSize) { DataMovementManager mgr = srcClient.newDataMovementManager();