}, null, ParallelTaskRunner.Config.builder().setAbortOnFail(true) .setBatchSize(options.getInt(Options.LOAD_BATCH_SIZE.key(), Options.LOAD_BATCH_SIZE.defaultValue())) .setNumTasks(options.getInt(Options.LOAD_THREADS.key(), Options.LOAD_THREADS.defaultValue())).build() batch -> batch, dbWriter, ParallelTaskRunner.Config.builder().setAbortOnFail(true) .setBatchSize(options.getInt(Options.LOAD_BATCH_SIZE.key(), Options.LOAD_BATCH_SIZE.defaultValue())) .setNumTasks(options.getInt(Options.LOAD_THREADS.key(), Options.LOAD_THREADS.defaultValue())).build()
.setNumTasks(numThreads) .setBatchSize(batchSize) .setAbortOnFail(true) .setSorted(false).build(); ParallelTaskRunner<Variant, VariantAnnotation> parallelTaskRunner =
.setNumTasks(options.getInt(Options.TRANSFORM_THREADS.key(), 1)) .setBatchSize(1) .setAbortOnFail(true) .setSorted(false) .setCapacity(1)
.setNumTasks(numConsumers) .setBatchSize(batchSize) .setAbortOnFail(true) .setSorted(false) .build();
.setNumTasks(loadThreads) .setBatchSize(batchSize) .setAbortOnFail(true).build(); if (isStageParallelWrite(options)) { logger.info("Multi thread stage load... [{} readerThreads, {} writerThreads]", numReaders, loadThreads);
.setNumTasks(loadThreads) .setBatchSize(batchSize) .setAbortOnFail(true).build(); if (isDirectLoadParallelWrite(options)) { logger.info("Multi thread direct load... [{} readerThreads, {} writerThreads]", numReaders, loadThreads);
.setNumTasks(loadThreads) .setBatchSize(batchSize) .setAbortOnFail(true).build(); try { if (isMergeParallelWrite(options)) {
/** * Loads variant annotations from an specified file into the selected Variant DataBase. * * @param uri URI of the annotation file * @param params Specific params. * @throws IOException IOException thrown * @throws StorageEngineException if there is a problem creating or running the {@link ParallelTaskRunner} */ public void loadVariantAnnotation(URI uri, ObjectMap params) throws IOException, StorageEngineException { final int batchSize = params.getInt(DefaultVariantAnnotationManager.BATCH_SIZE, 100); final int numConsumers = params.getInt(DefaultVariantAnnotationManager.NUM_WRITERS, 6); ParallelTaskRunner.Config config = ParallelTaskRunner.Config.builder() .setNumTasks(numConsumers) .setBatchSize(batchSize) .setAbortOnFail(true) .setSorted(false).build(); DataReader<VariantAnnotation> reader; reader = newVariantAnnotationDataReader(uri); try { ProgressLogger progressLogger = new ProgressLogger("Loaded annotations: ", numAnnotationsToLoad.get()); ParallelTaskRunner<VariantAnnotation, ?> ptr = buildLoadAnnotationParallelTaskRunner(reader, config, progressLogger, params); ptr.run(); } catch (ExecutionException e) { throw new StorageEngineException("Error loading variant annotation", e); } }
public ParallelTaskRunner<Variant, VcfSliceProtos.VcfSlice> createParallelRunner(int size, DataWriter<VcfSliceProtos.VcfSlice> collector) throws Exception { VcfVariantReader reader = VcfVariantReaderTest.createReader(size); Configuration conf = new Configuration(); ArchiveTableHelper helper = new ArchiveTableHelper(conf, 1, new VariantFileMetadata("1", "1")); ParallelTaskRunner.Task<Variant, VcfSliceProtos.VcfSlice> task = new VariantHbaseTransformTask(helper); ParallelTaskRunner.Config config = ParallelTaskRunner.Config.builder() .setNumTasks(1) .setBatchSize(10) .setAbortOnFail(true) .setSorted(false).build(); return new ParallelTaskRunner<>( reader, () -> task, collector, config ); }