@Override public String toString() { return String.format("%s ro=%d go=%d cec=%d %s", read.getReadName(), readOffset, genomeOffset, offsetIntoCurrentCigarElement, currentElement); }
@Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof GATKSAMRecordHashWrapper)) { return false; } GATKSAMRecord other = ((GATKSAMRecordHashWrapper) o).samRec; return samRec.getReadName().equals(other.getReadName()) && samRec.getFlags() == other.getFlags(); }
@Override public boolean equals(Object o) { if (o instanceof GATKSAMRecord) { return getReadName().equals(((GATKSAMRecord)o).getReadName()); } else { return false; } }
public static void logReadInfo(final String readName, final GATKSAMRecord rec, final String message) { if (readName != null && rec != null && readName.equals(rec.getReadName())) { logger.info("Found " + rec.toString() + " - " + message); } }
/** * Debug method to dump contents of object into string for display */ public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("Alelles in map:"); for (final Allele a:alleles) { sb.append(a.getDisplayString()+","); } sb.append("\n"); for (final Map.Entry <GATKSAMRecord, Map<Allele, Double>> el : getLikelihoodReadMap().entrySet() ) { for (final Map.Entry<Allele,Double> eli : el.getValue().entrySet()) { sb.append("Read "+el.getKey().getReadName()+". Allele:"+eli.getKey().getDisplayString()+" has likelihood="+Double.toString(eli.getValue())+"\n"); } } return sb.toString(); }
@Override public int compare(final ReadCost o1, final ReadCost o2) { final String s1 = o1.read.getReadName() + (o1.read.getReadPairedFlag() ? (o1.read.getFirstOfPairFlag() ? "/1" : "/2") : ""); final String s2 = o2.read.getReadName() + (o2.read.getReadPairedFlag() ? (o2.read.getFirstOfPairFlag() ? "/1" : "/2") : ""); return s1.compareTo(s2); } };
public void addAlignment(GATKSAMRecord read) { output.println("@" + read.getReadName()); if (read.getReadNegativeStrandFlag()) { output.println(ReadUtils.getBasesReverseComplement(read)); output.println("+"); output.println(ReadUtils.convertReadQualToString(invertQuals(read.getBaseQualities()))); } else { output.println(ReadUtils.convertReadBasesToString(read)); output.println("+"); output.println(ReadUtils.convertReadQualToString(read)); } }
public int hashCode() { // Use a murmur hash of the flags to distribute the values more uniformly over the full range of int return samRec.getReadName().hashCode() + (31 * murmurHasher.hashLong(samRec.getFlags()).asInt()); } }
@Override public int compare(PileupElement element1, PileupElement element2) { final int difference = element1.getRead().getAlignmentStart() - element2.getRead().getAlignmentStart(); return difference != 0 ? difference : element1.getRead().getReadName().compareTo(element2.getRead().getReadName()); } });
private static String createVerboseOutput(final ReadBackedPileup pileup) { final StringBuilder sb = new StringBuilder(); boolean isFirst = true; sb.append(pileup.getNumberOfDeletions()); sb.append(" "); for ( PileupElement p : pileup ) { if ( isFirst ) isFirst = false; else sb.append(","); sb.append(p.getRead().getReadName()); sb.append(verboseDelimiter); sb.append(p.getOffset()); sb.append(verboseDelimiter); sb.append(p.getRead().getReadLength()); sb.append(verboseDelimiter); sb.append(p.getRead().getMappingQuality()); } return sb.toString(); }
@Override public boolean equals(Object o) { if (o instanceof GATKSAMRecord) { return getReadName().equals(((GATKSAMRecord)o).getReadName()); } else { return false; } }
@Override public GATKSAMRecord apply(final GATKSAMRecord read) { if(read == null) throw new UserException.BadInput("try to transform a null GATKSAMRecord"); final Cigar originalCigar = read.getCigar(); if (originalCigar.isValid(read.getReadName(),-1) != null) throw new UserException.BadInput("try to transform a read with non-valid cigar string: readName: "+read.getReadName()+" Cigar String: "+originalCigar); read.setCigar(refactorNDNtoN(originalCigar)); return read; }
@Ensures("result != null") public String toString() { return String.format("%s @ %d = %c Q%d", getRead().getReadName(), getOffset(), (char) getBase(), getQual()); }
public void putReadBase(PileupElement pue) { ReadBase rb = new ReadBase(pue.getRead().getReadName(), pue.getBase(), pue.getMappingQual(), pue.getQual()); bases.add(rb); }
/** tes that we get the read we put into the map function */ @Test public void testReturnRead() { SAMFileHeader head = ArtificialSAMUtils.createArtificialSamHeader(3,1,1000); GATKSAMRecord rec = ArtificialSAMUtils.createArtificialRead(head, "FakeRead", 1, 1, 50); SAMRecord ret = walker.map(bases, rec, null); assertTrue(ret == rec); assertTrue(ret.getReadName().equals(rec.getReadName())); } }
/** * The reads map function. * * * @param ref the reference bases that correspond to our read, if a reference was provided * @param read the read itself, as a GATKSAMRecord * @return the ReadClipper object describing what should be done to clip this read */ public ReadClipperWithData map(ReferenceContext ref, GATKSAMRecord read, RefMetaDataTracker metaDataTracker) { if ( onlyDoRead == null || read.getReadName().equals(onlyDoRead) ) { if ( clippingRepresentation == ClippingRepresentation.HARDCLIP_BASES || clippingRepresentation == ClippingRepresentation.REVERT_SOFTCLIPPED_BASES ) read = ReadClipper.revertSoftClippedBases(read); ReadClipperWithData clipper = new ReadClipperWithData(read, sequencesToClip); // // run all three clipping modules // clipBadQualityScores(clipper); clipCycles(clipper); clipSequences(clipper); return clipper; } return null; }
public void setCigar(Cigar cigar, boolean fixClippedCigar) { if ( cigar == null ) { newCigar = null; return; } if ( fixClippedCigar && getReadBases().length < read.getReadLength() ) cigar = reclipCigar(cigar); // no change? if ( read.getCigar().equals(cigar) ) { newCigar = null; return; } // no indel? String str = cigar.toString(); if ( !str.contains("D") && !str.contains("I") ) { logger.debug("Modifying a read with no associated indel; although this is possible, it is highly unlikely. Perhaps this region should be double-checked: " + read.getReadName() + " near " + read.getReferenceName() + ":" + read.getAlignmentStart()); // newCigar = null; // return; } newCigar = cigar; }
/** * Helper function that returns the phred-scaled base quality score we should use for calculating * likelihoods for a pileup element. May return 0 to indicate that the observation is bad, and may * cap the quality score by the mapping quality of the read itself. * * @param p Pileup element * @param ignoreBadBases Flag to ignore bad bases * @param capBaseQualsAtMappingQual Whether to cap base Q at mapping quality * @param minBaseQual Min qual to use * @return New phred-scaled base quality */ private static byte qualToUse(PileupElement p, boolean ignoreBadBases, boolean capBaseQualsAtMappingQual, int minBaseQual) { if ( ignoreBadBases && !BaseUtils.isRegularBase( p.getBase() ) ) return 0; byte qual = p.getQual(); if ( qual > SAMUtils.MAX_PHRED_SCORE ) throw new UserException.MalformedBAM(p.getRead(), String.format("the maximum allowed quality score is %d, but a quality of %d was observed in read %s. Perhaps your BAM incorrectly encodes the quality scores in Sanger format; see http://en.wikipedia.org/wiki/FASTQ_format for more details", SAMUtils.MAX_PHRED_SCORE, qual, p.getRead().getReadName())); if ( capBaseQualsAtMappingQual ) qual = (byte)Math.min((int)qual, p.getMappingQual()); if ( (int)qual < minBaseQual ) qual = (byte)0; return qual; }
/** * Ensure that splitting read groups still works when dealing with null read groups. */ @Test public void testSplitByNullReadGroups() { SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(1,1,1000); GATKSAMRecord read1 = ArtificialSAMUtils.createArtificialRead(header,"read1",0,1,10); GATKSAMRecord read2 = ArtificialSAMUtils.createArtificialRead(header,"read2",0,1,10); GATKSAMRecord read3 = ArtificialSAMUtils.createArtificialRead(header,"read3",0,1,10); ReadBackedPileup pileup = new ReadBackedPileupImpl(null, Arrays.asList(read1,read2,read3), Arrays.asList(1,1,1)); ReadBackedPileup nullRgPileup = pileup.getPileupForReadGroup(null); List<GATKSAMRecord> nullRgReads = nullRgPileup.getReads(); Assert.assertEquals(nullRgPileup.getNumberOfElements(), 3, "Wrong number of reads in null read group"); Assert.assertEquals(nullRgReads.get(0), read1, "Read " + read1.getReadName() + " should be in null rg but isn't"); Assert.assertEquals(nullRgReads.get(1), read2, "Read " + read2.getReadName() + " should be in null rg but isn't"); Assert.assertEquals(nullRgReads.get(2), read3, "Read " + read3.getReadName() + " should be in null rg but isn't"); ReadBackedPileup rg1Pileup = pileup.getPileupForReadGroup("rg1"); Assert.assertNull(rg1Pileup, "Pileup for non-existent read group should return null"); }
@Test(dataProvider = "ReadAlignedToRefData", enabled = true) public void testReadAlignedToRef(final GATKSAMRecord read, final Haplotype haplotype, final int refStart, final int expectedReadStart, final String expectedReadCigar) throws Exception { final HaplotypeBAMWriter writer = new CalledHaplotypeBAMWriter(new MockDestination()); final GATKSAMRecord originalReadCopy = (GATKSAMRecord)read.clone(); if ( expectedReadCigar == null ) { Assert.assertNull(AlignmentUtils.createReadAlignedToRef(read, haplotype, haplotype, refStart, true)); } else { final Cigar expectedCigar = TextCigarCodec.decode(expectedReadCigar); final GATKSAMRecord alignedRead = AlignmentUtils.createReadAlignedToRef(read, haplotype, haplotype, refStart, true); Assert.assertEquals(alignedRead.getReadName(), originalReadCopy.getReadName()); Assert.assertEquals(alignedRead.getAlignmentStart(), expectedReadStart); Assert.assertEquals(alignedRead.getReadBases(), originalReadCopy.getReadBases()); Assert.assertEquals(alignedRead.getBaseQualities(), originalReadCopy.getBaseQualities()); Assert.assertEquals(alignedRead.getAlignmentStart(), expectedReadStart); Assert.assertEquals(alignedRead.getCigar(), expectedCigar); Assert.assertNotNull(alignedRead.getAttribute("HC")); } Assert.assertEquals(read, originalReadCopy, "createReadAlignedToRef seems be modifying the original read!"); }