getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO().save(htsfSample); } catch (MaPSeqDAOException e1) { logger.error("Could not persist HTSFSample");
public Set<HTSFSample> getAggregateHTSFSampleSet() throws PipelineException { Set<HTSFSample> htsfSampleSet = new HashSet<HTSFSample>(); if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); } if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { logger.error("problem getting HTSFSamples"); } } if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); } return htsfSampleSet; }
MaPSeqConfigurationService configService = getPipelineBeanService().getMaPSeqConfigurationService(); if (configService != null && configService.getRunMode().equals(RunModeType.DEV)) { includeGlideinRequirements = false; WorkflowRun workflowRun = getPipelineBeanService().getMaPSeqDAOBean().getWorkflowRunDAO() .findById(getWorkflowPlan().getWorkflowRun().getId()); workflowRun.setStartDate(new Date()); workflowRun.setCondorDAGClusterId(jobNode.getCluster()); workflowRun.setSubmitDirectory(jobNode.getSubmitFile().getParentFile().getAbsolutePath()); WorkflowRunDAO workflowRunDAO = getPipelineBeanService().getMaPSeqDAOBean().getWorkflowRunDAO(); workflowRunDAO.save(workflowRun); } catch (MaPSeqDAOException e) {