getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO().save(htsfSample); } catch (MaPSeqDAOException e1) { logger.error("Could not persist HTSFSample");
private int addRandomSampleQCRoutineToDAX(Graph<CondorJob, CondorJobEdge> graph, int count, HTSFSample htsfSample, File fastqFile, CondorJob trimCountAdapterJob, File trimmedFastq, String outputPath) throws IOException { String sequenceDatabaseList = getPipelineBeanService().getAttributes().get("sequenceDatabaseList");
logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { String fastaDatabase = getPipelineBeanService().getAttributes().get("fastaDatabase"); String quantificationDatabaseKey = getPipelineBeanService().getAttributes().get("quantificationDatabaseKey"); String primerSequence1 = getPipelineBeanService().getAttributes().get("primerSequence1"); String primerSequence2 = getPipelineBeanService().getAttributes().get("primerSequence2");
MaPSeqConfigurationService configService = getPipelineBeanService().getMaPSeqConfigurationService(); if (configService != null && configService.getRunMode().equals(RunModeType.DEV)) { includeGlideinRequirements = false; WorkflowRun workflowRun = getPipelineBeanService().getMaPSeqDAOBean().getWorkflowRunDAO() .findById(getWorkflowPlan().getWorkflowRun().getId()); workflowRun.setStartDate(new Date()); workflowRun.setCondorDAGClusterId(jobNode.getCluster()); workflowRun.setSubmitDirectory(jobNode.getSubmitFile().getParentFile().getAbsolutePath()); WorkflowRunDAO workflowRunDAO = getPipelineBeanService().getMaPSeqDAOBean().getWorkflowRunDAO(); workflowRunDAO.save(workflowRun); } catch (MaPSeqDAOException e) {
String siteName = getPipelineBeanService().getAttributes().get("siteName"); htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId()));
public Set<HTSFSample> getAggregateHTSFSampleSet() throws PipelineException { Set<HTSFSample> htsfSampleSet = new HashSet<HTSFSample>(); if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); } if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { logger.error("problem getting HTSFSamples"); } } if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); } return htsfSampleSet; }
logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) {
PipelineExecutor pipelineExecutor = (PipelineExecutor) o; WorkflowRunStatusType status = (WorkflowRunStatusType) arg; WorkflowRunDAO workflowRunDAO = pipelineExecutor.getPipeline().getPipelineBeanService().getMaPSeqDAOBean() .getWorkflowRunDAO(); try {