@Override public InputStream getInputStreamUnbuffered() throws IOException { return proxy.getInputStreamUnbuffered(); }
@Override public InputStream getInputStreamUnbuffered() throws IOException { synchronized(this) { if(migrated) throw new IOException("Already migrated to a RandomAccessBucket"); if(freed) throw new IOException("Already freed"); } return bucket.getInputStreamUnbuffered(); }
@Override public InputStream getInputStreamUnbuffered() throws IOException { return new PaddedEphemerallyEncryptedInputStream(bucket.getInputStreamUnbuffered()); }
@Override public InputStream getInputStreamUnbuffered() throws IOException { return new MyInputStream(underlying.getInputStreamUnbuffered()); }
@Override public InputStream getInputStreamUnbuffered() throws IOException { InputStream is = underlying.getInputStreamUnbuffered(); return AEADInputStream.createAES(is, key); }
ReaderBucketInputStream(boolean buffer) throws IOException { is = buffer ? bucket.getInputStream() : bucket.getInputStreamUnbuffered(); }
public static int toByteArray(Bucket bucket, byte[] output) throws IOException { long size = bucket.size(); if(size > output.length) throw new IllegalArgumentException("Data does not fit in provided buffer"); InputStream is = null; try { is = bucket.getInputStreamUnbuffered(); int moved = 0; while(true) { if(moved == size) return moved; int x = is.read(output, moved, (int)(size - moved)); if(x == -1) return moved; moved += x; } } finally { if(is != null) is.close(); } }
if(truncateLength == 0) return 0; if(truncateLength < 0) truncateLength = Long.MAX_VALUE; InputStream is = decodedData.getInputStreamUnbuffered(); try { int bufferSize = BUFFER_SIZE;
/** * Copy from the input stream of <code>src</code> to the output stream of * <code>dest</code>. * * @param src * @param dst * @throws IOException */ public static void copy(Bucket src, Bucket dst) throws IOException { OutputStream out = dst.getOutputStreamUnbuffered(); InputStream in = src.getInputStreamUnbuffered(); ReadableByteChannel readChannel = Channels.newChannel(in); WritableByteChannel writeChannel = Channels.newChannel(out); try { // No benefit to allocateDirect() as we're wrapping streams anyway, and worse, it'd be a memory leak. ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE); while (readChannel.read(buffer) != -1) { buffer.flip(); while(buffer.hasRemaining()) writeChannel.write(buffer); buffer.clear(); } } finally { writeChannel.close(); readChannel.close(); } }
public static boolean equalBuckets(Bucket a, Bucket b) throws IOException { if(a.size() != b.size()) return false; long size = a.size(); InputStream aIn = null, bIn = null; try { aIn = a.getInputStreamUnbuffered(); bIn = b.getInputStreamUnbuffered(); return FileUtil.equalStreams(aIn, bIn, size); } finally { aIn.close(); bIn.close(); } }
if(truncateLength == 0) return 0; if(truncateLength < 0) truncateLength = Long.MAX_VALUE; InputStream is = bucket.getInputStreamUnbuffered(); try { int bufferSize = BUFFER_SIZE;
in = from.getInputStreamUnbuffered();
public static byte[] hash(Bucket data) throws IOException { InputStream is = data.getInputStreamUnbuffered(); try { MessageDigest md = SHA256.getMessageDigest(); try { long bucketLength = data.size(); long bytesRead = 0; byte[] buf = new byte[BUFFER_SIZE]; while ((bytesRead < bucketLength) || (bucketLength == -1)) { int readBytes = is.read(buf); if (readBytes < 0) break; bytesRead += readBytes; if (readBytes > 0) md.update(buf, 0, readBytes); } if ((bytesRead < bucketLength) && (bucketLength > 0)) throw new EOFException(); if ((bytesRead != bucketLength) && (bucketLength > 0)) throw new IOException("Read " + bytesRead + " but bucket length " + bucketLength + " on " + data + '!'); byte[] retval = md.digest(); return retval; } finally { SHA256.returnMessageDigest(md); } } finally { if(is != null) is.close(); } }
/** * Read the entire bucket in as a byte array. * Not a good idea unless it is very small! * Don't call if concurrent writes may be happening. * @throws IOException If there was an error reading from the bucket. * @throws OutOfMemoryError If it was not possible to allocate enough * memory to contain the entire bucket. */ public static byte[] toByteArray(Bucket bucket) throws IOException { long size = bucket.size(); if(size > Integer.MAX_VALUE) throw new OutOfMemoryError(); byte[] data = new byte[(int)size]; InputStream is = bucket.getInputStreamUnbuffered(); DataInputStream dis = null; try { dis = new DataInputStream(is); dis.readFully(data); } finally { Closer.close(dis); Closer.close(is); } return data; }
Logger.minor(BucketTools.class, "Splitting bucket "+origData+" of size "+length+" into "+bucketCount+" buckets"); Bucket[] buckets = new Bucket[bucketCount]; InputStream is = origData.getInputStreamUnbuffered(); DataInputStream dis = null; try {