public boolean isLogNormalized() { //return false; return getDataMin() < 0; }
private List<LocusScore> getCachedSummaryScores(String querySeq, int zoom, int tileNumber, double tileWidth) { String key = querySeq + "_" + zoom + "_" + tileNumber + "_" + windowFunction; List<LocusScore> scores = summaryScoreCache.get(key); if (scores == null) { int startLocation = (int) (tileNumber * tileWidth); int endLocation = (int) ((tileNumber + 1) * tileWidth); scores = getSummaryScores(querySeq, startLocation, endLocation, zoom); summaryScoreCache.put(key, scores); } return scores; }
public void setNormalize(boolean normalizeCounts) { setNormalizeCounts(normalizeCounts, 1.0e6f); }
private List<LocusScore> getLocusScoresForChr(String chr, int startLocation, int endLocation, int zoom) { List<LocusScore> scores; int chrLength = getChrLength(chr); if (chrLength == 0) { scores = Collections.emptyList(); } else { endLocation = Math.min(endLocation, chrLength); // By definition there are 2^z tiles per chromosome, and 700 bins per tile, where z is the zoom level. // By definition there are 2^z tiles per chromosome, and 700 bins per tile, where z is the zoom level. //int maxZoom = (int) (Math.log(chrLength / 700) / Globals.log2) + 1; //int z = Math.min(zReq, maxZoom); int nTiles = (int) Math.pow(2, zoom); double binSize = Math.max(1, (((double) chrLength) / nTiles) / 700); scores = computeSummaryScores(chr, startLocation, endLocation, binSize); } return scores; }
TDFDataSource tdfSource = new TDFDataSource(tdfReader, 0, tdfPath, genome); List<LocusScore> tdfScores = tdfSource.getSummaryScoresForRange(Globals.CHR_ALL, -1, -1, 0); assertEquals(expHaveChrAll, tdfSource.isChrOrderValid());
public List<LocusScore> getSummaryScores(String querySeq, int startLocation, int endLocation, int zoom) { init(); if (querySeq.equals(Globals.CHR_ALL) && !isChrOrderValid()) { TDFTile wgTile = reader.getWholeGenomeTile(genome, windowFunction); tiles = Arrays.asList(wgTile); scores = getWGRawScores(); } else { scores = getLocusScoresForChr(querySeq, startLocation, endLocation, zoom);
private float getCount(String filename, String chr, int zoom, int pos, Genome genome) { TDFReader reader = TDFReader.getReader(filename); TDFDataset ds = reader.getDataset(chr, zoom, WindowFunction.mean); TDFDataSource dataSource = new TDFDataSource(reader, 0, "test", genome); List<LocusScore> scores = dataSource.getSummaryScoresForRange(chr, pos - 1, pos + 1, zoom); return scores.get(0).getScore(); }
TDFDataSource ds = new TDFDataSource(reader, 0, "", genome); List<LocusScore> wgScores = ds.getSummaryScores(Globals.CHR_ALL, 0, Integer.MAX_VALUE, 0); for (LocusScore score : wgScores) {
public EWigTrack(ResourceLocator locator, Genome genome) { super(locator); TDFReader reader = TDFReader.getReader(locator.getPath()); tdfSource = new TDFDataSource(reader, 4, "Pi", genome); setDataRange(new DataRange(0, 0, 10)); baseSources = new HashMap(); for (int i = 0; i < 4; i++) { TDFDataSource src = new TDFDataSource(reader, i, Character.toString(nucleotides[i]), genome); baseSources.put(nucleotides[i], src); } }
@Override public void load(ReferenceFrame context) { final String chr = context.getChrName(); int start = (int) context.getOrigin(); int end = (int) context.getEnd(); final int zoom = context.getZoom(); // Expand start and end a bit for panning int w = end - start; start -= w / 2; end += w / 2; List<LocusScore> scores = tdfSource.getSummaryScoresForRange(chr, (int) start, end, zoom); Map<Character, List<LocusScore>> nScores = new HashMap(); for (Character c : nucleotides) { nScores.put(c, baseSources.get(c).getSummaryScoresForRange(chr, (int) start, end, zoom)); } loadedIntervalCache.put(context.getName(), new LoadedInterval(chr, start, end, zoom, scores, nScores)); }
List<LocusScore> cachedScores = getCachedSummaryScores(querySeq, zoom, t, tileWidth); if (cachedScores != null) { for (LocusScore s : cachedScores) {
return false; return checkChromoNameOrder(fileChromos, genome.getLongChromosomeNames()); } else if (genome != null) {
public void actionPerformed(ActionEvent e) { final IGVPreferences prefs = PreferencesManager.getPreferences(); File initDirectory = prefs.getLastTrackDirectory(); File file = FileDialogUtils.chooseFile("Select coverage file", initDirectory, FileDialog.LOAD); if (file != null) { prefs.setLastTrackDirectory(file.getParentFile()); String path = file.getAbsolutePath(); if (path.endsWith(".tdf") || path.endsWith(".tdf")) { TDFReader reader = TDFReader.getReader(file.getAbsolutePath()); TDFDataSource ds = new TDFDataSource(reader, 0, getName() + " coverage", genome); setDataSource(ds); IGV.getInstance().revalidateTrackPanels(); } else if (path.endsWith(".counts")) { CoverageDataSource ds = new GobyCountArchiveDataSource(file); setDataSource(ds); IGV.getInstance().revalidateTrackPanels(); } else { MessageUtils.showMessage("Coverage data must be in .tdf format"); } } } });
final DataSource dataSource = locator.getPath().endsWith(".counts") ? new GobyCountArchiveDataSource(locator) : new TDFDataSource(reader, trackNumber, heading, genome); DataSourceTrack track = new DataSourceTrack(locator, trackId, trackName, dataSource);
public boolean isLogNormalized() { //return false; return getDataMin() < 0; }
public void setNormalize(boolean normalizeCounts) { setNormalizeCounts(normalizeCounts, 1.0e6f); }
try { TDFReader reader = TDFReader.getReader(covPath); TDFDataSource ds = new TDFDataSource(reader, 0, dsName + " coverage", genome); covTrack.setDataSource(ds); } catch (Exception e) {