@Override public Object readActualHeader(final LineIterator lineIterator) { this.headerLine = lineIterator.next(); return headerLine; }
@Override public List<CharSequence> read(int size) { List<CharSequence> batch = new ArrayList<>(size); for (int i = 0; i < size && iter.hasNext(); i++) { batch.add(iter.next()); } return batch; }
/** @see AsciiFeatureCodec#decode(htsjdk.tribble.readers.LineIterator) */ public abstract T decode(String s);
/** @see {@link AsciiFeatureCodec#decode(htsjdk.tribble.readers.LineIterator)} */ public abstract T decode(String s);
/** @see AsciiFeatureCodec#decode(htsjdk.tribble.readers.LineIterator) */ public abstract T decode(String s);
@Override public Object readActualHeader(LineIterator reader){ String headerLine = null; try { headerLine = reader.next(); String[] tokens = ParsingUtils.TAB_PATTERN.split(headerLine); return readHeader(tokens); } catch (Exception e) { log.error(e.getMessage(), e); throw new DataLoadException("Error reading header: " + e.getMessage(), this.path); } } }
public List<CharBuffer> next(long blockSize) { long cnt = 0L; List<CharBuffer> next = new LinkedList<>(); // linked list faster at creation time while (iter.hasNext() && cnt < blockSize) { String line = iter.next(); CharBuffer buff = CharBuffer.wrap(line.toCharArray()); //FIXME! Avoid char array copy next.add(buff); cnt += buff.length(); } return next; }
private static String[] readHeader(final LineIterator source, int[] lineCounter) throws IOException { String[] header = null; int numLines = 0; //find the 1st line that's non-empty and not a comment while(source.hasNext()) { final String line = source.next(); numLines++; if ( line.trim().isEmpty() ) { continue; } //parse the header header = line.split(delimiterRegex); break; } // check that we found the header if ( header == null ) { throw new IllegalArgumentException("No header in " + source); } if(lineCounter != null) { lineCounter[0] = numLines; } return header; }
@Override protected VariantContext advance() { return this.lineIterator.hasNext() ? this.codec.decode(this.lineIterator.next()) : null; }
@Override protected VariantContext advance() { return this.lineIterator.hasNext() ? this.codec.decode(this.lineIterator.next()) : null; }
@Override public Feature decodeLoc(final LineIterator lineIterator) { final String line = lineIterator.next(); if (line.startsWith("#")) return null; String fields[] = line.split("\t"); if (fields.length < 3) throw new TribbleException("RefSeq (decodeLoc) : Unable to parse line -> " + line + ", we expected at least 3 columns, we saw " + fields.length); String contig_name = fields[2]; try { return new RefSeqFeature(genomeLocParser.createGenomeLoc(contig_name, Integer.parseInt(fields[4])+1, Integer.parseInt(fields[5]))); } catch ( UserException.MalformedGenomeLoc e ) { Utils.warnUser("RefSeq file is potentially incorrect, as some transcripts or exons have a negative length ("+fields[2]+")"); return null; } catch ( NumberFormatException e ) { throw new UserException.MalformedFile("Could not parse location from line: " + line); } }
@Override public Object readActualHeader(LineIterator reader) { String line; try { while (reader.hasNext()) { line = reader.peek(); if (line.startsWith("#")) { reader.next(); } else if (line.startsWith("#track") || line.startsWith("##track")) { trackProperties = new TrackProperties(); ParsingUtils.parseTrackLine(line, trackProperties); } else { break; } } return trackProperties; } catch (Exception e) { throw new CodecLineParsingException("Error parsing header: " + e.getMessage(), e); } } }
@Test public void testLineReaderIterator_streamConstructor() throws Exception { final File filePath = new File(TestUtils.DATA_DIR + "gwas/smallp.gwas"); final LineIterator lineIterator = new LineIteratorImpl(new SynchronousLineReader(new PositionalBufferedStream(new FileInputStream(filePath)))); final BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath))); while (lineIterator.hasNext()) { Assert.assertEquals(lineIterator.next(), br.readLine()); } Assert.assertNull(br.readLine()); }
@Test public void testLineReaderIterator_readerConstructor() throws Exception { final File filePath = new File(TestUtils.DATA_DIR + "gwas/smallp.gwas"); final LineIterator lineIterator = new LineIteratorImpl(new SynchronousLineReader(new InputStreamReader(new PositionalBufferedStream(new FileInputStream(filePath))))); final BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(filePath))); while (lineIterator.hasNext()) { Assert.assertEquals(lineIterator.next(), br.readLine()); } Assert.assertNull(br.readLine()); } }
public Object readActualHeader(LineIterator reader) { if (header == null) { header = new FeatureFileHeader(); } String line; int nLines = 0; try { while (reader.hasNext()) { line = reader.peek(); if (line.startsWith("#")) { nLines++; readHeaderLine(line); reader.next(); } else { break; } } header.setTrackProperties(trackProperties); return header; } catch (Exception e) { throw new CodecLineParsingException("Error parsing header: " + e.getMessage(), e); } }
/** * test reading the header off of the file. We take in the file, read off the first line, * close the reader, and then ask the HapMap decoder for the header with a new reader. These should * be equal (i.e. they return the same object). */ @Test public void testReadHeader() { RawHapMapCodec codec = new RawHapMapCodec(); final LineIterator reader = getLineIterator(); try { String header = reader.next(); Assert.assertTrue(header.equals(codec.readActualHeader(getLineIterator()))); } finally { codec.close(reader); } }
@Test public void testGetSampleNames() { // setup the record for reading our 500 line file (499 records, 1 header line) RawHapMapCodec codec = new RawHapMapCodec(); final LineIterator reader = getLineIterator(); String line; try { codec.readHeader(reader); line = reader.next(); RawHapMapFeature feature = (RawHapMapFeature) codec.decode(line); Assert.assertEquals(feature.getSampleIDs().length, 87); } catch (IOException e) { Assert.fail("IOException " + e.getMessage()); } finally { codec.close(reader); } }
@Test public void testReadCorrectNumberOfRecords() { // setup the record for reading our 500 line file (499 records, 1 header line) RawHapMapCodec codec = new RawHapMapCodec(); final LineIterator reader = getLineIterator(); int count = 0; try { codec.readHeader(reader); while (reader.hasNext()) { codec.decode(reader.next()); ++count; } } catch (IOException e) { Assert.fail("IOException " + e.getMessage()); } finally { codec.close(reader); } Assert.assertEquals(count,499); }
/** * Returns a list of VariantContext records from a VCF file * * @param vcfFile VCF file * * @throws IOException if the file does not exist or can not be opened * * @return list of VariantContext records */ private static List<VariantContext> getVariantContexts(final File vcfFile) throws IOException { final VCFCodec codec = new VCFCodec(); final FileInputStream s = new FileInputStream(vcfFile); final LineIterator lineIteratorVCF = codec.makeSourceFromStream(new PositionalBufferedStream(s)); codec.readHeader(lineIteratorVCF); final List<VariantContext> VCs = new ArrayList<>(); while ( lineIteratorVCF.hasNext() ) { final String line = lineIteratorVCF.next(); Assert.assertFalse(line == null); VCs.add(codec.decode(line)); } return VCs; }
@Test public void HCTestDanglingTailMergingForDeletions() throws IOException { final String base = String.format("-T HaplotypeCaller --disableDithering --pcr_indel_model NONE -R %s -I %s", REF, NA12878_BAM) + " --no_cmdline_in_header -o %s -L 20:10130740-10130800 --allowNonUniqueKmersInRef"; final WalkerTestSpec spec = new WalkerTestSpec(base, 1, Arrays.asList("")); final File outputVCF = executeTest("HCTestDanglingTailMergingForDeletions", spec).getFirst().get(0); // confirm that the call is the correct one final VCFCodec codec = new VCFCodec(); final FileInputStream s = new FileInputStream(outputVCF); final LineIterator lineIterator = codec.makeSourceFromStream(new PositionalBufferedStream(s)); codec.readHeader(lineIterator); final String line = lineIterator.next(); Assert.assertFalse(line == null); final VariantContext vc = codec.decode(line); Assert.assertTrue(vc.isBiallelic()); Assert.assertTrue(vc.getReference().basesMatch("ATGTATG")); Assert.assertTrue(vc.getAlternateAllele(0).basesMatch("A")); }