public static Platform getPlatform(MaPSeqDAOBean mapseqDAOBean, JSONObject jsonObject) throws JSONException { logger.info("ENTERING getPlatform(MaPSeqDAOBean, JSONObject)"); Platform platform = null; if (jsonObject.has("guid")) { Long guid = jsonObject.getLong("guid"); try { platform = mapseqDAOBean.getPlatformDAO().findById(guid); } catch (MaPSeqDAOException e) { e.printStackTrace(); } } if (platform == null) { logger.warn("No Platform found"); return null; } logger.debug("Found Platform: {}", platform.toString()); return platform; }
public static WorkflowRun getWorkflowRun(MaPSeqDAOBean mapseqDAOBean, String pipelineName, JSONObject jsonObject, Account account) throws JSONException { logger.info("ENTERING getWorkflowRun(Pipeline<?>, JSONObject, Account)"); WorkflowRun workflowRun = null; logger.debug("jsonObject.has(\"name\"): {}", jsonObject.has("name")); if (jsonObject.has("name")) { String workflowRunName = jsonObject.getString("name"); Workflow workflow = null; try { workflow = mapseqDAOBean.getWorkflowDAO().findByName(pipelineName); } catch (MaPSeqDAOException e) { logger.error("ERROR", e); } if (workflow == null) { logger.warn("No Workflow Found: {}", pipelineName); return null; } workflowRun = new WorkflowRun(); workflowRun.setStatus(WorkflowRunStatusType.PENDING); workflowRun.setCreator(account); workflowRun.setName(workflowRunName); workflowRun.setWorkflow(workflow); } if (workflowRun != null) { logger.debug("WorkflowRun: {}", workflowRun.toString()); } return workflowRun; }
if (fileData.getMimeType().equals(mimeType)) { List<Job> jobList = null; try { jobList = mapseqDAOBean.getJobDAO().findFileDataByIdAndWorkflowId(fileData.getId(), clazz.getName(), workflowId); } catch (MaPSeqDAOException e) { e.printStackTrace(); if (job.getName().contains(clazz.getSimpleName())) { logger.debug("using FileData: {}", fileData.toString()); logger.debug("from Job: {}", job.toString()); ret.add(new File(fileData.getPath(), fileData.getName())); break;
if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { e.printStackTrace(); if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); if ("Undetermined".equals(htsfSample.getBarcode())) { continue; SequencerRun sequencerRun = htsfSample.getSequencerRun(); File outputDirectory = createOutputDirectory(sequencerRun.getName(), htsfSample, getName()); Integer laneIndex = htsfSample.getLaneIndex(); logger.debug("laneIndex = {}", laneIndex); List<File> readPairList = PipelineUtil.getReadPairList(htsfSample.getFileDatas(), sequencerRun.getName(), htsfSample.getLaneIndex()); logger.info("fileList = {}", readPairList.size()); int idx = htsfSample.getName().lastIndexOf("-"); String participantId = idx != -1 ? htsfSample.getName().substring(0, idx) : htsfSample.getName();
variantCallingWorkflow = getWorkflowBeanService().getMaPSeqDAOBean().getWorkflowDAO() .findByName("NIDAUCSFVariantCalling"); } catch (MaPSeqDAOException e1) { e1.printStackTrace(); File outputDirectory = new File(htsfSample.getOutputDirectory()); Set<FileData> fileDataSet = htsfSample.getFileDatas(); variantCallingWorkflow.getId()); if (possibleVCFFileList != null && possibleVCFFileList.size() > 0) { vcfFile = possibleVCFFileList.get(0); logger.warn("vcf file to process was not found: {}", htsfSample.toString()); return; Set<EntityAttribute> attributeSet = htsfSample.getAttributes(); entityAttributeNameSet.add(attribute.getName()); if (synchSet.contains("best_match")) { for (EntityAttribute attribute : attributeSet) { if (attribute.getName().equals("best_match")) { attribute.setValue(line.split("\\t")[1]); break; attributeSet.add(new EntityAttribute("best_match", line.split("\\t")[1])); htsfSample.setAttributes(attributeSet);
for (Long id : sequencerRunIdList) { try { sequencerRunList.add(mapseqDAOBean.getSequencerRunDAO().findById(id)); } catch (MaPSeqDAOException e1) { File flowcellDir = new File(sr.getBaseDirectory(), sr.getName()); File dataDir = new File(flowcellDir, "Data"); File reportsDir = new File(dataDir, "reports"); List<HTSFSample> htsfSampleList = mapseqDAOBean.getHTSFSampleDAO().findBySequencerRunId(sr.getId()); Map<Integer, List<Double>> laneClusterDensityTotalMap = new HashMap<Integer, List<Double>>(); if (!laneClusterDensityTotalMap.containsKey(sample.getLaneIndex())) { laneClusterDensityTotalMap.put(sample.getLaneIndex(), new ArrayList<Double>()); List<Double> laneClusterDensityTotalList = laneClusterDensityTotalMap.get(sample.getLaneIndex()); long clusterDensityTotal = 0; for (Double clusterDensity : laneClusterDensityTotalList) { Set<EntityAttribute> attributeSet = sample.getAttributes(); entityAttributeNameSet.add(attribute.getName()); if (synchSet.contains("observedClusterDensity")) { for (EntityAttribute attribute : attributeSet) { if (attribute.getName().equals("observedClusterDensity")) { attribute.setValue(value); break;
variantCallingWorkflow = getWorkflowBeanService().getMaPSeqDAOBean().getWorkflowDAO() .findByName("NIDAUCSFVariantCalling"); } catch (MaPSeqDAOException e1) { e1.printStackTrace(); SequencerRun sequencerRun = htsfSample.getSequencerRun(); File outputDirectory = createOutputDirectory(sequencerRun.getName(), htsfSample, getName().replace("IDCheck", ""), getVersion()); Set<FileData> fileDataSet = htsfSample.getFileDatas(); variantCallingWorkflow.getId()); if (fileData.getMimeType().equals(MimeType.TEXT_VCF)) { possibleVCFFileList.add(new File(fileData.getPath(), fileData.getName())); logger.warn("vcf file to process was not found: {}", htsfSample.toString()); throw new WorkflowException("vcf file to process was not found");
variantCallingWorkflow = getWorkflowBeanService().getMaPSeqDAOBean().getWorkflowDAO() .findByName("NIDAUCSFVariantCalling"); } catch (MaPSeqDAOException e1) { e1.printStackTrace(); SequencerRun sequencerRun = htsfSample.getSequencerRun(); File outputDirectory = createOutputDirectory(sequencerRun.getName(), htsfSample, getName().replace("DOC", ""), getVersion()); Set<FileData> fileDataSet = htsfSample.getFileDatas(); List<File> possibleFileList = WorkflowUtil.lookupFileByJobAndMimeTypeAndWorkflowId(fileDataSet, getWorkflowBeanService().getMaPSeqDAOBean(), PicardMarkDuplicates.class, MimeType.APPLICATION_BAM, variantCallingWorkflow.getId()); logger.warn("bam file to process was not found: {}", htsfSample.toString()); throw new WorkflowException("bam file to process was not found"); logger.warn("bam index file to process was not found: {}", htsfSample.toString()); throw new WorkflowException("bam index file to process was not found");
htsfSample = mapseqDAOBean.getHTSFSampleDAO().findById(guid); } catch (MaPSeqDAOException e) { e.printStackTrace(); Set<EntityAttribute> attributeSet = htsfSample.getAttributes(); attributeNameSet.add(attribute.getName()); EntityAttribute attribute = new EntityAttribute(); attribute.setName(attributeName); attribute.setValue(attributeValue); attributeSet.add(attribute); } else { for (EntityAttribute attribute : attributeSet) { if (attributeName.equals(attribute.getName())) { attribute.setValue(attributeValue); htsfSample.setAttributes(attributeSet); try { mapseqDAOBean.getHTSFSampleDAO().save(htsfSample); } catch (MaPSeqDAOException e) { e.printStackTrace(); logger.debug("Found HTSFSample: {}", htsfSample.toString()); return htsfSample;
sequencerRun = mapseqDAOBean.getSequencerRunDAO().findById(guid); } catch (MaPSeqDAOException e) { e.printStackTrace(); Set<EntityAttribute> attributeSet = sequencerRun.getAttributes(); attributeNameSet.add(attribute.getName()); EntityAttribute attribute = new EntityAttribute(); attribute.setName(attributeName); attribute.setValue(attributeValue); attributeSet.add(attribute); } else { for (EntityAttribute attribute : attributeSet) { if (attributeName.equals(attribute.getName())) { attribute.setValue(attributeValue); sequencerRun.setAttributes(attributeSet); try { mapseqDAOBean.getSequencerRunDAO().save(sequencerRun); } catch (MaPSeqDAOException e) { e.printStackTrace(); logger.debug("Found SequencerRun: {}", sequencerRun.toString()); return sequencerRun;
@Override public void postRun() throws WorkflowException { List<HTSFSample> htsfSampleList = null; try { htsfSampleList = getWorkflowBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId()); } catch (MaPSeqDAOException e) { e.printStackTrace(); } if (htsfSampleList == null) { logger.warn("htsfSampleList was null"); return; } List<Long> sequencerRunIdList = new ArrayList<Long>(); sequencerRunIdList.add(getWorkflowPlan().getSequencerRun().getId()); SaveDemultiplexedStatsAttributesRunnable saveDemultiplexedStatsAttributesRunnable = new SaveDemultiplexedStatsAttributesRunnable(); saveDemultiplexedStatsAttributesRunnable.setMapseqDAOBean(getWorkflowBeanService().getMaPSeqDAOBean()); saveDemultiplexedStatsAttributesRunnable.setSequencerRunIdList(sequencerRunIdList); Executors.newSingleThreadExecutor().execute(saveDemultiplexedStatsAttributesRunnable); SaveObservedClusterDensityAttributesRunnable saveObservedClusterDensityAttributesRunnable = new SaveObservedClusterDensityAttributesRunnable(); saveObservedClusterDensityAttributesRunnable.setMapseqDAOBean(getWorkflowBeanService().getMaPSeqDAOBean()); saveObservedClusterDensityAttributesRunnable.setMapseqConfigurationService(getWorkflowBeanService() .getMaPSeqConfigurationService()); saveObservedClusterDensityAttributesRunnable.setSequencerRunIdList(sequencerRunIdList); Executors.newSingleThreadExecutor().execute(saveObservedClusterDensityAttributesRunnable); }
public Set<HTSFSample> getAggregateHTSFSampleSet() throws PipelineException { Set<HTSFSample> htsfSampleSet = new HashSet<HTSFSample>(); if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); } if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { logger.error("problem getting HTSFSamples"); } } if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); } return htsfSampleSet; }
File htsfSampleOutputDir = new File(workflowDir, htsfSample.getName()); File tmpDir = new File(htsfSampleOutputDir, "tmp"); tmpDir.mkdirs(); htsfSample.setOutputDirectory(htsfSampleOutputDir.getAbsolutePath()); getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO().save(htsfSample); } catch (MaPSeqDAOException e1) { logger.error("Could not persist HTSFSample");
WorkflowRunStatusType status = (WorkflowRunStatusType) arg; WorkflowRunDAO workflowRunDAO = pipelineExecutor.getPipeline().getPipelineBeanService().getMaPSeqDAOBean() .getWorkflowRunDAO(); try { WorkflowRun workflowRun = workflowRunDAO.findById(pipelineExecutor.getPipeline().getWorkflowPlan() .getWorkflowRun().getId()); logger.debug(workflowRun.toString()); logger.info("changing status from : {} to {}", workflowRun.getStatus().getState(), status.getState()); workflowRun.setStatus(status); Date date = new Date(); case DONE: case FAILED: if (workflowRun.getStartDate() == null) { workflowRun.setStartDate(date); workflowRun.setEndDate(date); break; case RUNNING: if (workflowRun.getStartDate() == null) { workflowRun.setStartDate(date); workflowRunDAO.save(workflowRun); logger.debug(workflowRun.toString()); pipelineExecutor.getPipeline().getWorkflowPlan().setWorkflowRun(workflowRun);
if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) if (getWorkflowPlan().getSequencerRun() != null) logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); catch (MaPSeqDAOException e){ e.printStackTrace();} if (getWorkflowPlan().getHTSFSamples() != null) htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); if ("Undetermined".equals(htsfSample.getBarcode())) {continue;} SequencerRun sequencerRun = htsfSample.getSequencerRun(); logger.debug("sequencerRun: {}", sequencerRun.toString()); Integer laneIndex = htsfSample.getLaneIndex(); logger.debug("laneIndex = {}", laneIndex); File outputDirectory = createOutputDirectory(sequencerRun.getName(), htsfSample, getName());
HTSFSampleDAO htsfSampleDAO = mapseqDAOBean.getHTSFSampleDAO(); try { if (sequencerRunId != null) { htsfSampleSet.addAll(htsfSampleDAO.findBySequencerRunId(sequencerRunId)); HTSFSample htsfSample = htsfSampleDAO.findById(htsfSampleId); if (htsfSample == null) { logger.error("HTSFSample was not found"); WorkflowDAO workflowDAO = mapseqDAOBean.getWorkflowDAO(); ncgenesWorkflow = workflowDAO.findByName("NCGenes"); } catch (MaPSeqDAOException e2) { e2.printStackTrace(); Set<EntityAttribute> attributeSet = htsfSample.getAttributes(); if (attributeSet == null) { attributeSet = new HashSet<EntityAttribute>(); htsfSample.getFileDatas(), this.mapseqDAOBean, SAMToolsFlagstat.class, MimeType.TEXT_STAT_SUMMARY, ncgenesWorkflow.getId()); if (StringUtils.isNotEmpty(htsfSample.getOutputDirectory())) { File sampleOutputDirectory = new File(htsfSample.getOutputDirectory()); for (File file : sampleOutputDirectory.listFiles()) { if (file.getName().endsWith("fix.pr.flagstat")) { logger.info("htsfSample: {}", htsfSample.toString()); continue;
htsfSample = mapseqDAOBean.getHTSFSampleDAO().findById(guid); } catch (MaPSeqDAOException e) { e.printStackTrace(); Set<EntityAttribute> attributeSet = htsfSample.getAttributes(); attributeNameSet.add(attribute.getName()); EntityAttribute attribute = new EntityAttribute(); attribute.setName(attributeName); attribute.setValue(attributeValue); attributeSet.add(attribute); } else { for (EntityAttribute attribute : attributeSet) { if (attributeName.equals(attribute.getName())) { attribute.setValue(attributeValue); htsfSample.setAttributes(attributeSet); try { mapseqDAOBean.getHTSFSampleDAO().save(htsfSample); } catch (MaPSeqDAOException e) { e.printStackTrace(); logger.debug("Found HTSFSample: {}", htsfSample.toString()); return htsfSample;
sequencerRun = mapseqDAOBean.getSequencerRunDAO().findById(guid); } catch (MaPSeqDAOException e) { e.printStackTrace(); Set<EntityAttribute> attributeSet = sequencerRun.getAttributes(); attributeNameSet.add(attribute.getName()); EntityAttribute attribute = new EntityAttribute(); attribute.setName(attributeName); attribute.setValue(attributeValue); attributeSet.add(attribute); } else { for (EntityAttribute attribute : attributeSet) { if (attributeName.equals(attribute.getName())) { attribute.setValue(attributeValue); sequencerRun.setAttributes(attributeSet); try { mapseqDAOBean.getSequencerRunDAO().save(sequencerRun); } catch (MaPSeqDAOException e) { e.printStackTrace(); logger.debug("Found SequencerRun: {}", sequencerRun.toString()); return sequencerRun;
workflow = mapseqDAOBean.getWorkflowDAO().findByName(pipelineName); } catch (MaPSeqDAOException e) { logger.error("ERROR", e); workflowRun = new WorkflowRun(); workflowRun.setStatus(WorkflowRunStatusType.PENDING); workflowRun.setCreator(account); workflowRun.setCreationDate(date); workflowRun.setModificationDate(date); workflowRun.setName(workflowRunName); workflowRun.setWorkflow(workflow); logger.debug("WorkflowRun: {}", workflowRun.toString());
public static Platform getPlatform(MaPSeqDAOBean mapseqDAOBean, JSONObject jsonObject) throws JSONException { logger.info("ENTERING getPlatform(MaPSeqDAOBean, JSONObject)"); Platform platform = null; if (jsonObject.has("guid")) { Long guid = jsonObject.getLong("guid"); try { platform = mapseqDAOBean.getPlatformDAO().findById(guid); } catch (MaPSeqDAOException e) { e.printStackTrace(); } } if (platform == null) { logger.warn("No Platform found"); return null; } logger.debug("Found Platform: {}", platform.toString()); return platform; }