if (this.workflowPlan == null) { logger.error("workflowPlan is null"); throw new PipelineException("workflowPlan is null"); if (StringUtils.isEmpty(mapseqHome)) { logger.error("MAPSEQ_HOME not set in env: {}", mapseqHome); throw new PipelineException("MAPSEQ_HOME not set in env"); throw new PipelineException("MAPSEQ_HOME does not exist"); if (StringUtils.isEmpty(outputDir)) { logger.error("MAPSEQ_OUTPUT_DIRECTORY not set in env: {}", outputDir); throw new PipelineException("MAPSEQ_OUTPUT_DIRECTORY not set in env"); if (!outputDirectory.exists()) { logger.error("MAPSEQ_OUTPUT_DIRECTORY does not exist: {}", outputDir); throw new PipelineException("MAPSEQ_OUTPUT_DIRECTORY does not exist");
@Override public void validate() throws PipelineException { logger.info("ENTERING validate()"); try { this.graph = createGraph(); } catch (PipelineException e) { logger.error("Problem running before start command", e); throw new PipelineException(e); } if (graph == null || (graph != null && graph.vertexSet().size() == 0)) { logger.error("graph is null"); throw new PipelineException("graph is null"); } Set<CondorJob> condorJobSet = graph.vertexSet(); for (CondorJob condorJob : condorJobSet) { if (StringUtils.isEmpty(condorJob.getSiteName()) && (condorJob.getTransferInputList().size() == 0 && condorJob.getTransferOutputList().size() == 0)) { throw new PipelineException("can't have a job where both siteName & list of inputs/outputs are empty"); } } }
} catch (MaPSeqDAOException e1) { logger.error("Could not persist HTSFSample"); throw new PipelineException("Could not persist HTSFSample");
public Set<HTSFSample> getAggregateHTSFSampleSet() throws PipelineException { Set<HTSFSample> htsfSampleSet = new HashSet<HTSFSample>(); if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); } if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { logger.error("problem getting HTSFSamples"); } } if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); } return htsfSampleSet; }
if (!jobNode.getSubmitFile().exists()) { logger.info("jobNode.getSubmitFile().getAbsolutePath() = {}", jobNode.getSubmitFile().getAbsolutePath()); throw new PipelineException("jobNode.getSubmitFile() doesn't exist"); throw new PipelineException(String.format("Backed off %d times & still could not submit to condor", getBackOffMultiplier())); } catch (MaPSeqDAOException e) { logger.error("Problem saving WorkflowRun: ", e); throw new PipelineException(e);
throw new PipelineException("Don't have either sequencerRun and htsfSample"); throw new PipelineException("ReadPairList is not 2");
throw new PipelineException("Don't have either sequencerRun and htsfSample"); throw new PipelineException(e);
throw new PipelineException("Don't have either sequencerRun and htsfSample"); graph.addEdge(worldJob, catJob); } catch (Exception e) { throw new PipelineException(e);}