@Test public void testResetting() throws Exception { final String text = "A piece of text"; final byte[] bytes = text.getBytes(); final ByteArrayInputStream bais = new ByteArrayInputStream(bytes); final CountingInputStream cis = new CountingInputStream(bais); final byte[] result = new byte[bytes.length]; int found = cis.read(result, 0, 5); assertEquals( found, cis.getCount() ); final int count = cis.resetCount(); found = cis.read(result, 6, 5); assertEquals( found, count ); cis.close(); }
/** * The number of bytes that have passed through this stream. * <p> * NOTE: From v1.3 this method throws an ArithmeticException if the * count is greater than can be expressed by an <code>int</code>. * See {@link #getByteCount()} for a method using a <code>long</code>. * * @return the number of bytes accumulated * @throws ArithmeticException if the byte count is too large */ public int getCount() { final long result = getByteCount(); if (result > Integer.MAX_VALUE) { throw new ArithmeticException("The byte count " + result + " is too large to be converted to an int"); } return (int) result; }
@Test public void testLargeFiles_IO84() throws Exception { final long size = (long)Integer.MAX_VALUE + (long)1; final NullInputStream mock = new NullInputStream(size); final CountingInputStream cis = new CountingInputStream(mock); final OutputStream out = new NullOutputStream(); // Test integer methods IOUtils.copyLarge(cis, out); try { cis.getCount(); fail("Expected getCount() to throw an ArithmeticException"); } catch (final ArithmeticException ae) { // expected result } try { cis.resetCount(); fail("Expected resetCount() to throw an ArithmeticException"); } catch (final ArithmeticException ae) { // expected result } mock.close(); // Test long methods IOUtils.copyLarge(cis, out); assertEquals("getByteCount()", size, cis.getByteCount()); assertEquals("resetByteCount()", size, cis.resetByteCount()); }
@Override public Void invoke(File dir, VirtualChannel channel) throws IOException, InterruptedException { try (InputStream in = archive.openStream()) { CountingInputStream cis = new CountingInputStream(in); try { if (archive.toExternalForm().endsWith(".zip")) { unzip(dir, cis); } else { readFromTar("input stream", dir, GZIP.extract(cis)); } } catch (IOException x) { throw new IOException(String.format("Failed to unpack %s (%d bytes read)", archive, cis.getByteCount()), x); } } return null; } }
sha512 != null ? new DigestOutputStream(_out, sha512) : _out, sha256) : _out, sha1) : _out; InputStream in = con.getInputStream(); CountingInputStream cin = new CountingInputStream(in)) { while ((len = cin.read(buf)) >= 0) { out.write(buf,0,len); job.status = job.new Installing(total == -1 ? -1 : cin.getCount() * 100 / total);
CountingInputStream instream = null; try { instream = new CountingInputStream(conn.getInputStream()); if (gzipped) { in = new BufferedInputStream(new GZIPInputStream(instream)); res.setBodySize(instream.getByteCount()); instream.close();
protected EitherSide(InputStream is, OutputStream os) { cis = new CountingInputStream(is); flightRecorder = new FlightRecorderInputStream(cis); dis = new DataInputStream(flightRecorder); dos = new DataOutputStream(os); }
@Override protected void tagApplicableDocument(String reference, InputStream document, ImporterMetadata metadata, boolean parsed) throws ImporterHandlerException { if (StringUtils.isBlank(field)) { throw new IllegalArgumentException("\"field\" cannot be empty."); } int length = -1; if (document instanceof CachedInputStream) { length = ((CachedInputStream) document).length(); } else { CountingInputStream is = new CountingInputStream(document); try { IOUtils.copy(is, new NullOutputStream()); } catch (IOException e) { throw new ImporterHandlerException(e); } length = is.getCount(); } if (overwrite) { metadata.setInt(field, length); } else { metadata.addInt(field, length); } }
long contentLength = httpUrlConn.getContentLength(); long bytesRead = (cin != null ? cin.getByteCount() : 0); cin.close(); } catch (IOException e) { httpUrlConn.disconnect();
private InputStream dataInputStream(long offset, long size) throws IOException { InputStream is; // use LZMA stream if the bundle is compressed if (bundle.header().compressed()) { // create initial input stream if required if (lzma == null) { lzma = lzmaInputStream(); } // recreate stream if the offset is behind long lzmaOffset = lzma.getByteCount(); if (lzmaOffset > offset) { lzma.close(); lzma = lzmaInputStream(); } // skip forward if required if (lzmaOffset < offset) { lzma.skip(offset - lzmaOffset); } is = lzma; } else { in.position(bundle.header().headerSize() + offset); is = in.stream(); } return new BoundedInputStream(is, size); }
public static Header read(ExtDataInput in, CountingInputStream countIn) throws IOException { short type; int start = countIn.getCount(); try { type = in.readShort(); } catch (EOFException ex) { return new Header(TYPE_NONE, 0, 0, countIn.getCount()); } return new Header(type, in.readShort(), in.readInt(), start); }
private File download(URL src) throws IOException { URLConnection con; if ((proxyConfig != null) && (proxyConfig.name != null)) { con = proxyConfig.openUrl(src); } else { con = src.openConnection(); } int total = con.getContentLength(); CountingInputStream in = new CountingInputStream(con.getInputStream()); byte[] buf = new byte[8192]; int len; File dst = getDestination(); File tmp = new File(dst.getPath() + ".tmp"); OutputStream out = new FileOutputStream(tmp); try { while ((len = in.read(buf)) >= 0) { out.write(buf, 0, len); //job.status = job.new Installing(total == -1 ? -1 : in.getCount() * 100 / total); } } catch (IOException e) { throw new IOException("Failed to load " + src + " to " + tmp, e); } finally { IOUtils.closeQuietly(out); IOUtils.closeQuietly(in); } if (total != -1 && total != tmp.length()) { throw new IOException("Inconsistent file length: expected " + total + " but only got " + tmp.length()); } return tmp; }
@Override public void close() throws IOException { closed = true; if (lzma != null) { lzma.close(); } in.close(); } }
public LeipzigRecordReader(FileSplit split, JobConf jobConf) throws IOException { start = split.getStart(); end = start + split.getLength(); posInByteStream = start; posInCharStream = 0; // Open the file and seek to the start of the split Path file = split.getPath(); FileSystem fs = file.getFileSystem(jobConf); InputStream is = fs.open(split.getPath()); countingIs = new CountingInputStream(is); countingIs.skip(start); recordScanner = new Scanner(countingIs); recordScanner.useDelimiter(RECORD_DELIMITER); reader = new BufferedReader(new InputStreamReader(countingIs, FILE_ENCODING)); // Start with the first valid record after offset "start" while (!nextRecordIsValid && hasNext()) skipToNextRecord(reader); }
/** * Set the byte count back to 0. * <p> * NOTE: From v1.3 this method throws an ArithmeticException if the * count is greater than can be expressed by an <code>int</code>. * See {@link #resetByteCount()} for a method using a <code>long</code>. * * @return the count previous to resetting * @throws ArithmeticException if the byte count is too large */ public int resetCount() { final long result = resetByteCount(); if (result > Integer.MAX_VALUE) { throw new ArithmeticException("The byte count " + result + " is too large to be converted to an int"); } return (int) result; }
private RsrcSection readResourceSection(CountingInputStream executableInputStream, SectionHeader[] sectionHeaders) throws IOException { SectionHeader rsrcSectionHeader = null; for (SectionHeader sectionHeader : sectionHeaders) { if (".rsrc\u0000\u0000\u0000".equals(new String(sectionHeader.name))) { rsrcSectionHeader = sectionHeader; } } if (rsrcSectionHeader == null) { return null; } long numberToSkip = rsrcSectionHeader.pointerToRawData.getUnsignedValue() - executableInputStream.getCount(); executableInputStream.skip(numberToSkip); byte[] rsrcSection = new byte[(int) rsrcSectionHeader.sizeOfRawData.getUnsignedValue()]; executableInputStream.read(rsrcSection); return new RsrcSection(rsrcSection); }
private void readTableTypeSpec() throws AndrolibException, IOException { mTypeSpec = readSingleTableTypeSpec(); addTypeSpec(mTypeSpec); int type = nextChunk().type; ResTypeSpec resTypeSpec; while (type == Header.TYPE_SPEC_TYPE) { resTypeSpec = readSingleTableTypeSpec(); addTypeSpec(resTypeSpec); type = nextChunk().type; // We've detected sparse resources, lets record this so we can rebuild in that same format (sparse/not) // with aapt2. aapt1 will ignore this. if (! mResTable.getSparseResources()) { mResTable.setSparseResources(true); } } while (type == Header.TYPE_TYPE) { readTableType(); // skip "TYPE 8 chunks" and/or padding data at the end of this chunk if (mCountIn.getCount() < mHeader.endPosition) { LOGGER.warning("Unknown data detected. Skipping: " + (mHeader.endPosition - mCountIn.getCount()) + " byte(s)"); mCountIn.skip(mHeader.endPosition - mCountIn.getCount()); } type = nextChunk().type; addMissingResSpecs(); } }
@Override public int read() throws IOException { int result = super.read(); checkSize(); return result; }
public long skip(long n) throws IOException { return is.skip(n); }
try { digest.reset(); is = new CountingInputStream(resource.streamContent()); os = new DigestOutputStream(new NullOutputStream(), digest); IOUtils.copy(is, os); long readLength = is.getByteCount(); if (is != null) { try { is.close(); } catch (IOException e) { log.warn("Failed to close inputstream for: "+ resource.getId());