PipelineExecutor pipelineExecutor = (PipelineExecutor) o; WorkflowRunStatusType status = (WorkflowRunStatusType) arg; if (WorkflowRunStatusType.DONE.equals(status) || WorkflowRunStatusType.FAILED.equals(status)) { WorkflowPlan workflowPlan = pipelineExecutor.getPipeline().getWorkflowPlan(); if (workflowPlan != null) { WorkflowRun workflowRun = workflowPlan.getWorkflowRun(); if (workflowRun != null) { Account creator = workflowRun.getCreator(); if (creator != null) { String toEmailAddress = creator.getEmailAddress(); if (StringUtils.isNotEmpty(toEmailAddress)) { String fromEmailAddress = creator.getEmailAddress(); Properties properties = System.getProperties(); properties.setProperty("mail.user", creator.getName()); properties.setProperty("mail.smtps.host", "smtp.unc.edu"); properties.setProperty("mail.smtps.port", "465"); .getPipeline().getName())); StringBuilder sb = new StringBuilder(); sb.append(String.format("WorkflowRun Name: %s%n", workflowRun.getName())); if (workflowPlan.getSequencerRun() != null) { sb.append(String.format("SequencerRun Name: %s%n", workflowPlan .getSequencerRun().getName())); if (workflowPlan.getHTSFSamples() != null) { for (HTSFSample sample : workflowPlan.getHTSFSamples()) { sb.append(String.format("HTSFSample Name: %s%n", sample.getName()));
if (fileData.getMimeType().equals(mimeType)) { List<Job> jobList = null; try { jobList = mapseqDAOBean.getJobDAO().findFileDataByIdAndWorkflowId(fileData.getId(), clazz.getName(), workflowId); } catch (MaPSeqDAOException e) { if (job.getName().contains(clazz.getSimpleName())) { logger.debug("using FileData: {}", fileData.toString()); logger.debug("from Job: {}", job.toString()); ret.add(new File(fileData.getPath(), fileData.getName())); break;
.toLowerCase()); job.addArgument(PicardAddOrReplaceReadGroupsCLI.READGROUPID, String.format("%s-%s_L%03d", sequencerRun.getName(), htsfSample.getBarcode(), htsfSample.getLaneIndex())); job.addArgument(PicardAddOrReplaceReadGroupsCLI.READGROUPPLATFORM, sequencerRun.getPlatform().getInstrument()); job.addArgument(PicardAddOrReplaceReadGroupsCLI.READGROUPPLATFORMUNIT, htsfSample.getBarcode()); job.addArgument(PicardAddOrReplaceReadGroupsCLI.READGROUPLIBRARY, String.format("L%03d_%s", htsfSample.getLaneIndex(), htsfSample.getBarcode())); job.addArgument(PicardAddOrReplaceReadGroupsCLI.READGROUPSAMPLENAME, String.format("L%03d_%s", htsfSample.getLaneIndex(), htsfSample.getBarcode())); job.addArgument(PicardAddOrReplaceReadGroupsCLI.READGROUPCENTERNAME, "UNC");
SequencerRun sequencerRun = htsfSample.getSequencerRun(); File outputDirectory = createOutputDirectory(sequencerRun.getName(), htsfSample, getName().replace("IDCheck", ""), getVersion()); Set<FileData> fileDataSet = htsfSample.getFileDatas(); variantCallingWorkflow.getId()); if (fileData.getMimeType().equals(MimeType.TEXT_VCF)) { possibleVCFFileList.add(new File(fileData.getPath(), fileData.getName())); logger.warn("vcf file to process was not found: {}", htsfSample.toString()); throw new WorkflowException("vcf file to process was not found");
if ("Undetermined".equals(htsfSample.getBarcode())) { continue; SequencerRun sequencerRun = htsfSample.getSequencerRun(); File outputDirectory = createOutputDirectory(sequencerRun.getName(), htsfSample, getName(), getVersion());
Set<EntityAttribute> attributeSet = htsfSample.getAttributes(); attributeNameSet.add(attribute.getName()); EntityAttribute attribute = new EntityAttribute(); attribute.setName(attributeName); attribute.setValue(attributeValue); attributeSet.add(attribute); } else { for (EntityAttribute attribute : attributeSet) { if (attributeName.equals(attribute.getName())) { attribute.setValue(attributeValue); htsfSample.setAttributes(attributeSet); try { mapseqDAOBean.getHTSFSampleDAO().save(htsfSample); logger.debug("Found HTSFSample: {}", htsfSample.toString()); return htsfSample;
WorkflowRun workflowRun = workflowPlan.getWorkflowRun(); job.addArgument("--workflowRunId", workflowRun.getId().toString()); if (workflowRun.getCreator() != null) { job.addArgument("--accountId", workflowRun.getCreator().getId().toString()); SequencerRun sequencerRun = workflowPlan.getSequencerRun(); if (sequencerRun != null) { logger.debug("sequencerRun.getId().toString(): {}", sequencerRun.getId().toString()); job.addArgument("--sequencerRunId", sequencerRun.getId().toString()); } else if (sequencerRun == null && htsfSample != null) { logger.debug("htsfSample.getSequencerRun().getId().toString(): {}", htsfSample.getSequencerRun() .getId().toString()); job.addArgument("--sequencerRunId", htsfSample.getSequencerRun().getId().toString()); logger.debug("htsfSample.getId().toString(): {}", htsfSample.getId().toString()); job.addArgument("--htsfSampleId", htsfSample.getId().toString());
Set<EntityAttribute> attributeSet = sequencerRun.getAttributes(); attributeNameSet.add(attribute.getName()); EntityAttribute attribute = new EntityAttribute(); attribute.setName(attributeName); attribute.setValue(attributeValue); attributeSet.add(attribute); } else { for (EntityAttribute attribute : attributeSet) { if (attributeName.equals(attribute.getName())) { attribute.setValue(attributeValue); sequencerRun.setAttributes(attributeSet); try { mapseqDAOBean.getSequencerRunDAO().save(sequencerRun); logger.debug("Found SequencerRun: {}", sequencerRun.toString()); return sequencerRun;
public Set<HTSFSample> getAggregateHTSFSampleSet() throws PipelineException { Set<HTSFSample> htsfSampleSet = new HashSet<HTSFSample>(); if (getWorkflowPlan().getSequencerRun() == null && getWorkflowPlan().getHTSFSamples() == null) { logger.error("Don't have either sequencerRun and htsfSample"); throw new PipelineException("Don't have either sequencerRun and htsfSample"); } if (getWorkflowPlan().getSequencerRun() != null) { logger.info("sequencerRun: {}", getWorkflowPlan().getSequencerRun().toString()); try { htsfSampleSet.addAll(getPipelineBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId())); } catch (MaPSeqDAOException e) { logger.error("problem getting HTSFSamples"); } } if (getWorkflowPlan().getHTSFSamples() != null) { htsfSampleSet.addAll(getWorkflowPlan().getHTSFSamples()); } return htsfSampleSet; }
@Override public ModuleOutput call() throws ModuleException { DefaultModuleOutput moduleOutput = new DefaultModuleOutput(); int exitCode = 0; try { if (destination.exists()) { FileUtils.forceDelete(destination); } if (!destination.getParentFile().exists()) { destination.getParentFile().mkdirs(); } FileUtils.copyFile(source, destination); if (mimeType != null) { FileData fileData = new FileData(); fileData.setName(destination.getName()); fileData.setMimeType(mimeType); addFileData(fileData); } } catch (Exception e) { e.printStackTrace(); moduleOutput.setError(new StringBuilder(e.getMessage())); moduleOutput.setExitCode(exitCode); } moduleOutput.setExitCode(exitCode); return moduleOutput; }
workflowRun = new WorkflowRun(); workflowRun.setStatus(WorkflowRunStatusType.PENDING); workflowRun.setCreator(account); workflowRun.setCreationDate(date); workflowRun.setModificationDate(date); workflowRun.setName(workflowRunName); workflowRun.setWorkflow(workflow); logger.debug("WorkflowRun: {}", workflowRun.toString());
try { WorkflowRun workflowRun = workflowRunDAO.findById(pipelineExecutor.getPipeline().getWorkflowPlan() .getWorkflowRun().getId()); logger.debug(workflowRun.toString()); logger.info("changing status from : {} to {}", workflowRun.getStatus().getState(), status.getState()); workflowRun.setStatus(status); Date date = new Date(); case DONE: case FAILED: if (workflowRun.getStartDate() == null) { workflowRun.setStartDate(date); workflowRun.setEndDate(date); break; case RUNNING: if (workflowRun.getStartDate() == null) { workflowRun.setStartDate(date); logger.debug(workflowRun.toString()); pipelineExecutor.getPipeline().getWorkflowPlan().setWorkflowRun(workflowRun);
public static WorkflowRun getWorkflowRun(MaPSeqDAOBean mapseqDAOBean, String pipelineName, JSONObject jsonObject, Account account) throws JSONException { logger.info("ENTERING getWorkflowRun(Pipeline<?>, JSONObject, Account)"); WorkflowRun workflowRun = null; logger.debug("jsonObject.has(\"name\"): {}", jsonObject.has("name")); if (jsonObject.has("name")) { String workflowRunName = jsonObject.getString("name"); Workflow workflow = null; try { workflow = mapseqDAOBean.getWorkflowDAO().findByName(pipelineName); } catch (MaPSeqDAOException e) { logger.error("ERROR", e); } if (workflow == null) { logger.warn("No Workflow Found: {}", pipelineName); return null; } workflowRun = new WorkflowRun(); workflowRun.setStatus(WorkflowRunStatusType.PENDING); workflowRun.setCreator(account); workflowRun.setName(workflowRunName); workflowRun.setWorkflow(workflow); } if (workflowRun != null) { logger.debug("WorkflowRun: {}", workflowRun.toString()); } return workflowRun; }
@Override public void postRun() throws WorkflowException { List<HTSFSample> htsfSampleList = null; try { htsfSampleList = getWorkflowBeanService().getMaPSeqDAOBean().getHTSFSampleDAO() .findBySequencerRunId(getWorkflowPlan().getSequencerRun().getId()); } catch (MaPSeqDAOException e) { e.printStackTrace(); } if (htsfSampleList == null) { logger.warn("htsfSampleList was null"); return; } List<Long> sequencerRunIdList = new ArrayList<Long>(); sequencerRunIdList.add(getWorkflowPlan().getSequencerRun().getId()); SaveDemultiplexedStatsAttributesRunnable saveDemultiplexedStatsAttributesRunnable = new SaveDemultiplexedStatsAttributesRunnable(); saveDemultiplexedStatsAttributesRunnable.setMapseqDAOBean(getWorkflowBeanService().getMaPSeqDAOBean()); saveDemultiplexedStatsAttributesRunnable.setSequencerRunIdList(sequencerRunIdList); Executors.newSingleThreadExecutor().execute(saveDemultiplexedStatsAttributesRunnable); SaveObservedClusterDensityAttributesRunnable saveObservedClusterDensityAttributesRunnable = new SaveObservedClusterDensityAttributesRunnable(); saveObservedClusterDensityAttributesRunnable.setMapseqDAOBean(getWorkflowBeanService().getMaPSeqDAOBean()); saveObservedClusterDensityAttributesRunnable.setMapseqConfigurationService(getWorkflowBeanService() .getMaPSeqConfigurationService()); saveObservedClusterDensityAttributesRunnable.setSequencerRunIdList(sequencerRunIdList); Executors.newSingleThreadExecutor().execute(saveObservedClusterDensityAttributesRunnable); }
@Override public void postRun() throws WorkflowException { logger.info("ENTERING postRun()"); Set<HTSFSample> htsfSampleSet = getAggregateHTSFSampleSet(); logger.info("htsfSampleSet.size(): {}", htsfSampleSet.size()); for (HTSFSample htsfSample : htsfSampleSet) { SaveBestMatchAttributeRunnable runnable = new SaveBestMatchAttributeRunnable(); runnable.setMapseqDAOBean(getWorkflowBeanService().getMaPSeqDAOBean()); runnable.setHtsfSampleId(htsfSample.getId()); Executors.newSingleThreadExecutor().execute(runnable); } }
MimeType mimeType = fileData.getMimeType(); if (mimeType != null && mimeType.equals(MimeType.FASTQ)) { Pattern patternR1 = Pattern.compile(String.format("^%s.*_L00%d_R1\\.fastq\\.gz$", sequencerRunName, laneIndex)); Matcher matcherR1 = patternR1.matcher(fileData.getName()); File file = new File(fileData.getPath(), fileData.getName()); if (matcherR1.matches()) { logger.debug("found file: {}", file.getAbsolutePath()); Matcher matcherR2 = patternR2.matcher(fileData.getName()); if (matcherR2.matches()) { logger.debug("found file: {}", file.getAbsolutePath());
public static Platform getPlatform(MaPSeqDAOBean mapseqDAOBean, JSONObject jsonObject) throws JSONException { logger.info("ENTERING getPlatform(MaPSeqDAOBean, JSONObject)"); Platform platform = null; if (jsonObject.has("guid")) { Long guid = jsonObject.getLong("guid"); try { platform = mapseqDAOBean.getPlatformDAO().findById(guid); } catch (MaPSeqDAOException e) { e.printStackTrace(); } } if (platform == null) { logger.warn("No Platform found"); return null; } logger.debug("Found Platform: {}", platform.toString()); return platform; }
if ("Undetermined".equals(htsfSample.getBarcode())) { continue; SequencerRun sequencerRun = htsfSample.getSequencerRun(); File outputDirectory = createOutputDirectory(sequencerRun.getName(), htsfSample, getName().replace("Alignment", ""), getVersion());
Set<EntityAttribute> attributeSet = htsfSample.getAttributes(); attributeNameSet.add(attribute.getName()); EntityAttribute attribute = new EntityAttribute(); attribute.setName(attributeName); attribute.setValue(attributeValue); attributeSet.add(attribute); } else { for (EntityAttribute attribute : attributeSet) { if (attributeName.equals(attribute.getName())) { attribute.setValue(attributeValue); htsfSample.setAttributes(attributeSet); try { mapseqDAOBean.getHTSFSampleDAO().save(htsfSample); logger.debug("Found HTSFSample: {}", htsfSample.toString()); return htsfSample;
public static Platform getPlatform(MaPSeqDAOBean mapseqDAOBean, JSONObject jsonObject) throws JSONException { logger.info("ENTERING getPlatform(MaPSeqDAOBean, JSONObject)"); Platform platform = null; if (jsonObject.has("guid")) { Long guid = jsonObject.getLong("guid"); try { platform = mapseqDAOBean.getPlatformDAO().findById(guid); } catch (MaPSeqDAOException e) { e.printStackTrace(); } } if (platform == null) { logger.warn("No Platform found"); return null; } logger.debug("Found Platform: {}", platform.toString()); return platform; }