public PipelineExecutor(Pipeline pipeline, boolean dryRun) { super(); this.pipeline = pipeline; if (!dryRun) { addObserver(new PersistantObserver()); } addObserver(new NotifyWhenDoneObserver()); }
public static CondorJob createJob(int count, Class<?> moduleClass, WorkflowPlan workflowPlan, HTSFSample htsfSample) { return createJob(count, moduleClass, workflowPlan, htsfSample, true); }
public void setWorkflowStatus(WorkflowRunStatusType workflowStatus) { setChanged(); notifyObservers(workflowStatus); }
if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { e.printStackTrace(); if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); String fastaDatabase = getPipelineBeanService().getAttributes().get("fastaDatabase"); String quantificationDatabaseKey = getPipelineBeanService().getAttributes().get("quantificationDatabaseKey"); String primerSequence1 = getPipelineBeanService().getAttributes().get("primerSequence1"); String primerSequence2 = getPipelineBeanService().getAttributes().get("primerSequence2"); File outputDirectory = createOutputDirectory(sequencerRun.getName(), htsfSample, getName()); List<File> readPairList = PipelineUtil.getReadPairList(htsfSample.getFileDatas(), sequencerRun.getName(), htsfSample.getLaneIndex()); logger.info("fileList = {}", readPairList.size()); String fastqRootName = PipelineUtil.getRootFastqName(fastqFile.getName()); CondorJob fastxQualityStatsJob = PipelineJobFactory.createJob(++count, FastxQualityStatsCLI.class,
File workDir = IOUtils.createWorkDirectory(getSubmitDirectory(), getName()); logger.info("workDir = {}", workDir); MaPSeqConfigurationService configService = getPipelineBeanService().getMaPSeqConfigurationService(); if (configService != null && configService.getRunMode().equals(RunModeType.DEV)) { includeGlideinRequirements = false; SecureCondorSubmitScriptExporter exporter = new SecureCondorSubmitScriptExporter(); jobNode = exporter.export(getName(), workDir, getGraph(), includeGlideinRequirements); if (!jobNode.getSubmitFile().exists()) { logger.info("jobNode.getSubmitFile().getAbsolutePath() = {}", jobNode.getSubmitFile().getAbsolutePath()); throw new PipelineException("jobNode.getSubmitFile() doesn't exist"); if (backOffCount > getBackOffMultiplier()) { break; throw new PipelineException(String.format("Backed off %d times & still could not submit to condor", getBackOffMultiplier())); CondorDOTExporter<CondorJob, CondorJobEdge> dotExporter = new CondorDOTExporter<CondorJob, CondorJobEdge>( new CondorJobVertexNameProvider(), new CondorJobVertexNameProvider(), null, null, null, props); File dotFile = new File(workDir, getName() + ".dag.dot"); FileWriter fw = new FileWriter(dotFile); dotExporter.export(fw, graph); WorkflowRun workflowRun = getPipelineBeanService().getMaPSeqDAOBean().getWorkflowRunDAO() .findById(getWorkflowPlan().getWorkflowRun().getId()); workflowRun.setStartDate(new Date()); workflowRun.setCondorDAGClusterId(jobNode.getCluster());
String siteName = getPipelineBeanService().getAttributes().get("siteName"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); CondorJob helloJob = PipelineJobFactory.createJob(++count, EchoCLI.class, getWorkflowPlan(), null, false); CondorJob worldJob = PipelineJobFactory.createJob(++count, EchoCLI.class, getWorkflowPlan(), null, false); CondorJob catJob = PipelineJobFactory.createJob(++count, CatCLI.class, getWorkflowPlan(), null, false); graph.addEdge(worldJob, catJob); } catch (Exception e) { throw new PipelineException(e);}
throw new PipelineException("Don't have either sequencerRun and htsfSample"); logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { List<File> readPairList = PipelineUtil.getReadPairList(htsfSample.getFileDatas(), sequencerRun.getName(), htsfSample.getLaneIndex()); logger.info("fileList = {}", readPairList.size()); throw new PipelineException("ReadPairList is not 2"); String r1FastqRootName = PipelineUtil.getRootFastqName(r1FastqFile.getName()); String r2FastqRootName = PipelineUtil.getRootFastqName(r2FastqFile.getName());
public Set<HTSFSample> getAggregateHTSFSampleSet() throws PipelineException { Set<HTSFSample> htsfSampleSet = new HashSet<HTSFSample>(); if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); } if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { logger.error("problem getting HTSFSamples"); } } if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); } return htsfSampleSet; }
case DEV: case STAGING: baseDir = new File(getOutputDirectory(), runMode.toString().toLowerCase()); break; case PROD: default: baseDir = getOutputDirectory(); break; getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO().save(htsfSample); } catch (MaPSeqDAOException e1) { logger.error("Could not persist HTSFSample"); throw new PipelineException("Could not persist HTSFSample");
pipeline.init(); } catch (PipelineException e) { setWorkflowStatus(WorkflowRunStatusType.FAILED); logger.error("Problem with init: ", e); } catch (Exception e) { pipeline.validate(); } catch (PipelineException e) { setWorkflowStatus(WorkflowRunStatusType.FAILED); logger.error("Problem with validate: ", e); } catch (Exception e) { pipeline.preRun(); } catch (PipelineException e) { setWorkflowStatus(WorkflowRunStatusType.FAILED); logger.error("Problem with preRun: ", e); } catch (Exception e) { CondorJob condorJob = pipeline.call(); ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(2); StartCondorMonitor startCondorMonitor = new StartCondorMonitor(this, condorJob); ScheduledFuture<?> startCondorMonitorFuture = scheduler.scheduleWithFixedDelay(startCondorMonitor, 1, 2, TimeUnit.MINUTES); Runnable stopCondorMonitor = new StopCondorMonitor(scheduler, startCondorMonitor, startCondorMonitorFuture); scheduler.scheduleAtFixedRate(stopCondorMonitor, 5, 3, TimeUnit.MINUTES); scheduler.awaitTermination(5, TimeUnit.DAYS); } catch (PipelineException | InterruptedException e) { setWorkflowStatus(WorkflowRunStatusType.FAILED); logger.error("Problem with run: ", e);
PipelineExecutor pipelineExecutor = (PipelineExecutor) o; WorkflowRunStatusType status = (WorkflowRunStatusType) arg; WorkflowRunDAO workflowRunDAO = pipelineExecutor.getPipeline().getPipelineBeanService().getMaPSeqDAOBean() .getWorkflowRunDAO(); try { WorkflowRun workflowRun = workflowRunDAO.findById(pipelineExecutor.getPipeline().getWorkflowPlan() .getWorkflowRun().getId()); pipelineExecutor.getPipeline().getWorkflowPlan().setWorkflowRun(workflowRun);
private int addRandomSampleQCRoutineToDAX(Graph<CondorJob, CondorJobEdge> graph, int count, HTSFSample htsfSample, File fastqFile, CondorJob trimCountAdapterJob, File trimmedFastq, String outputPath) throws IOException { String sequenceDatabaseList = getPipelineBeanService().getAttributes().get("sequenceDatabaseList"); CondorJob randomSampleFastqJob = PipelineJobFactory.createJob(++count, RandomSampleFastqCLI.class, getWorkflowPlan(), htsfSample); randomSampleFastqJob.addArgument(RandomSampleFastqCLI.FASTQ, trimmedFastq.getAbsolutePath()); File randomSampleFastqOutFile = new File(outputPath, key + ".fastq"); CondorJob bwaAlignJob = PipelineJobFactory.createJob(++count, BWAAlignCLI.class, getWorkflowPlan(), htsfSample); bwaAlignJob.setNumberOfProcessors(8); CondorJob bwaSAMSingleEndJob = PipelineJobFactory.createJob(++count, BWASAMSingleEndCLI.class, getWorkflowPlan(), htsfSample); bwaSAMSingleEndJob.addArgument(BWASAMSingleEndCLI.FASTADB, sequenceFastaDB); bwaSAMSingleEndJob.addArgument(BWASAMSingleEndCLI.FASTQ, randomSampleFastqOutFile.getAbsolutePath()); CondorJob genomeSampleQCJob = PipelineJobFactory.createJob(++count, GenomeSampleQCCLI.class, getWorkflowPlan(), htsfSample); genomeSampleQCJob.addArgument(GenomeSampleQCCLI.KEY, key); genomeSampleQCJob.addArgument(GenomeSampleQCCLI.SAMFILE, bwaSAMSingleEndOutFile.getAbsolutePath()); CondorJob rnaSampleQCJob = PipelineJobFactory.createJob(++count, RNASampleQCCLI.class, getWorkflowPlan(), htsfSample); rnaSampleQCJob.addArgument(RNASampleQCCLI.KEY, key); rnaSampleQCJob.addArgument(RNASampleQCCLI.FASTQ, randomSampleFastqOutFile.getAbsolutePath());
@Override public void validate() throws PipelineException { logger.info("ENTERING validate()"); try { this.graph = createGraph(); } catch (PipelineException e) { logger.error("Problem running before start command", e); throw new PipelineException(e); } if (graph == null || (graph != null && graph.vertexSet().size() == 0)) { logger.error("graph is null"); throw new PipelineException("graph is null"); } Set<CondorJob> condorJobSet = graph.vertexSet(); for (CondorJob condorJob : condorJobSet) { if (StringUtils.isEmpty(condorJob.getSiteName()) && (condorJob.getTransferInputList().size() == 0 && condorJob.getTransferOutputList().size() == 0)) { throw new PipelineException("can't have a job where both siteName & list of inputs/outputs are empty"); } } }
if (this.workflowPlan == null) { logger.error("workflowPlan is null"); throw new PipelineException("workflowPlan is null"); if (StringUtils.isEmpty(mapseqHome)) { logger.error("MAPSEQ_HOME not set in env: {}", mapseqHome); throw new PipelineException("MAPSEQ_HOME not set in env"); throw new PipelineException("MAPSEQ_HOME does not exist"); if (StringUtils.isEmpty(outputDir)) { logger.error("MAPSEQ_OUTPUT_DIRECTORY not set in env: {}", outputDir); throw new PipelineException("MAPSEQ_OUTPUT_DIRECTORY not set in env"); if (!outputDirectory.exists()) { logger.error("MAPSEQ_OUTPUT_DIRECTORY does not exist: {}", outputDir); throw new PipelineException("MAPSEQ_OUTPUT_DIRECTORY does not exist");
WorkflowRunStatusType status = (WorkflowRunStatusType) arg; if (WorkflowRunStatusType.DONE.equals(status) || WorkflowRunStatusType.FAILED.equals(status)) { WorkflowPlan workflowPlan = pipelineExecutor.getPipeline().getWorkflowPlan(); if (workflowPlan != null) { WorkflowRun workflowRun = workflowPlan.getWorkflowRun(); message.addRecipient(Message.RecipientType.TO, new InternetAddress(toEmailAddress)); message.setSubject(String.format("The %s pipeline has finished.", pipelineExecutor .getPipeline().getName())); StringBuilder sb = new StringBuilder(); sb.append(String.format("WorkflowRun Name: %s%n", workflowRun.getName()));
@Override public void run() { if (startCondorMonitor.isJobFinished()) { startCondorMonitorFuture.cancel(true); scheduler.shutdown(); } }
switch (statusType.getCode()) { case 1: pipelineExecutor.setWorkflowStatus(WorkflowRunStatusType.PENDING); jobFinished = false; break; case 2: pipelineExecutor.setWorkflowStatus(WorkflowRunStatusType.RUNNING); jobFinished = false; break; case 3: pipelineExecutor.setWorkflowStatus(WorkflowRunStatusType.FAILED); jobFinished = true; break; case 4: pipelineExecutor.setWorkflowStatus(WorkflowRunStatusType.DONE); jobFinished = true; break; case 5: pipelineExecutor.setWorkflowStatus(WorkflowRunStatusType.HELD); jobFinished = true; break;
public static CondorJob createJob(int count, Class<?> moduleClass, WorkflowPlan workflowPlan, HTSFSample htsfSample, boolean persistFileData) { return createJob(count, moduleClass, workflowPlan, htsfSample, persistFileData, 3); }
public PipelineExecutor(Pipeline pipeline) { super(); this.pipeline = pipeline; addObserver(new PersistantObserver()); addObserver(new NotifyWhenDoneObserver()); }
public static CondorJob createJob(int count, Class<?> moduleClass, WorkflowPlan workflowPlan) { return createJob(count, moduleClass, workflowPlan, null); }