case DEV: case STAGING: baseDir = new File(getOutputDirectory(), runMode.toString().toLowerCase()); break; case PROD: default: baseDir = getOutputDirectory(); break; getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO().save(htsfSample); } catch (MaPSeqDAOException e1) { logger.error("Could not persist HTSFSample");
public Set<HTSFSample> getAggregateHTSFSampleSet() throws PipelineException { Set<HTSFSample> htsfSampleSet = new HashSet<HTSFSample>(); if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); } if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { logger.error("problem getting HTSFSamples"); } } if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); } return htsfSampleSet; }
File workDir = IOUtils.createWorkDirectory(getSubmitDirectory(), getName()); logger.info("workDir = {}", workDir); MaPSeqConfigurationService configService = getPipelineBeanService().getMaPSeqConfigurationService(); if (configService != null && configService.getRunMode().equals(RunModeType.DEV)) { includeGlideinRequirements = false; jobNode = exporter.export(getName(), workDir, getGraph(), includeGlideinRequirements); if (!jobNode.getSubmitFile().exists()) { logger.info("jobNode.getSubmitFile().getAbsolutePath() = {}", jobNode.getSubmitFile().getAbsolutePath()); if (backOffCount > getBackOffMultiplier()) { break; getBackOffMultiplier())); CondorDOTExporter<CondorJob, CondorJobEdge> dotExporter = new CondorDOTExporter<CondorJob, CondorJobEdge>( new CondorJobVertexNameProvider(), new CondorJobVertexNameProvider(), null, null, null, props); File dotFile = new File(workDir, getName() + ".dag.dot"); FileWriter fw = new FileWriter(dotFile); dotExporter.export(fw, graph); WorkflowRun workflowRun = getPipelineBeanService().getMaPSeqDAOBean().getWorkflowRunDAO() .findById(getWorkflowPlan().getWorkflowRun().getId()); workflowRun.setStartDate(new Date()); workflowRun.setCondorDAGClusterId(jobNode.getCluster()); workflowRun.setSubmitDirectory(jobNode.getSubmitFile().getParentFile().getAbsolutePath()); WorkflowRunDAO workflowRunDAO = getPipelineBeanService().getMaPSeqDAOBean().getWorkflowRunDAO(); workflowRunDAO.save(workflowRun);
@Override public void validate() throws PipelineException { logger.info("ENTERING validate()"); try { this.graph = createGraph(); } catch (PipelineException e) { logger.error("Problem running before start command", e); throw new PipelineException(e); } if (graph == null || (graph != null && graph.vertexSet().size() == 0)) { logger.error("graph is null"); throw new PipelineException("graph is null"); } Set<CondorJob> condorJobSet = graph.vertexSet(); for (CondorJob condorJob : condorJobSet) { if (StringUtils.isEmpty(condorJob.getSiteName()) && (condorJob.getTransferInputList().size() == 0 && condorJob.getTransferOutputList().size() == 0)) { throw new PipelineException("can't have a job where both siteName & list of inputs/outputs are empty"); } } }