@Override public OutputStream getOutputStream() throws IOException { return proxy.getOutputStream(); }
@Override public OutputStream getOutputStream() throws IOException { synchronized(this) { if(migrated) throw new IOException("Already migrated to a RandomAccessBucket"); if(freed) throw new IOException("Already freed"); } return bucket.getOutputStream(); }
@Override public OutputStream getOutputStream() throws IOException { OutputStream os; synchronized(this) { if(outputStreamOpen) throw new IOException("Already have an OutputStream for "+this); os = underlying.getOutputStream(); outputStreamOpen = true; size = 0; } return new MyOutputStream(os); }
public InputStream checksumReaderWithLength(InputStream dis, BucketFactory bf, long maxLength) throws IOException, ChecksumFailedException { // IMHO it is better to implement this with copying, because then we don't start // constructing objects from bad data... long length = new DataInputStream(dis).readLong(); if(length < 0 || length > maxLength) throw new IOException("Bad length"); final Bucket bucket = bf.makeBucket(-1); OutputStream os = bucket.getOutputStream(); copyAndStripChecksum(dis, os, length); os.close(); return ReadBucketAndFreeInputStream.create(bucket); }
private DataOutputStream getOutputStream() throws IOException, BinaryBlobAlreadyClosedException { if (_finalized) { throw new BinaryBlobAlreadyClosedException("Already finalized (getting final data) on "+this); } if (_stream_cache==null) { if (_isSingleBucket) { _stream_cache = new DataOutputStream(_out.getOutputStream()); } else { Bucket newBucket = _bf.makeBucket(-1); _buckets.add(newBucket); _stream_cache = new DataOutputStream(newBucket.getOutputStream()); } } if (!_started) { BinaryBlob.writeBinaryBlobHeader(_stream_cache); _started = true; } return _stream_cache; }
/** Create a stream which writes to temporary space and then on a non-aborted close() will * write the length (minus the offset) followed by the data. */ public static PrependLengthOutputStream create(OutputStream out, BucketFactory bf, int offset, boolean closeUnderlying) throws IOException { Bucket temp = bf.makeBucket(-1); OutputStream os = temp.getOutputStream(); return new PrependLengthOutputStream(os, temp, out, offset, closeUnderlying); }
private void getSnapshot(Bucket bucket, boolean addEndmarker) throws IOException, BinaryBlobAlreadyClosedException { if (_buckets.isEmpty()) return; if (_finalized) { throw new BinaryBlobAlreadyClosedException("Already closed (getting final data snapshot)"); } OutputStream out = bucket.getOutputStream(); try { for (int i=0,n=_buckets.size(); i<n;i++) { BucketTools.copyTo(_buckets.get(i), out, -1); } if (addEndmarker) { DataOutputStream dout = new DataOutputStream(out); BinaryBlob.writeEndBlob(dout); dout.flush(); } } finally { out.close(); } }
private void writePluginStoreInner(String storeIdentifier, PluginStore pluginStore, boolean isEncrypted, boolean backup) throws IOException { Bucket bucket = makePluginStoreBucket(storeIdentifier, isEncrypted, backup); OutputStream os = bucket.getOutputStream(); try { if(pluginStore != null) { pluginStore.exportStoreAsSFS().writeTo(os); } } finally { os.close(); } }
/** * This one could actually be rather large, since it includes the listing of * which blocks go in which cross-segments ... */ private Bucket encodeCrossSegmentSettings(BucketFactory bf) throws IOException { if (crossSegments == null) return new NullBucket(); Bucket bucket = bf.makeBucket(-1); OutputStream os = bucket.getOutputStream(); OutputStream cos = checker.checksumWriterWithLength(os, new ArrayBucketFactory()); DataOutputStream dos = new DataOutputStream(cos); for (SplitFileInserterCrossSegmentStorage segment : crossSegments) { segment.writeFixedSettings(dos); } dos.close(); os.close(); return bucket; }
@Override public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) throws IOException, CompressionOutputSizeException { Bucket output = bf.makeBucket(maxWriteLength); InputStream is = null; OutputStream os = null; try { is = data.getInputStream(); os = output.getOutputStream(); compress(is, os, maxReadLength, maxWriteLength); // It is essential that the close()'s throw if there is any problem. is.close(); is = null; os.close(); os = null; } finally { Closer.close(is); Closer.close(os); } return output; }
@Override public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) throws IOException, CompressionOutputSizeException { Bucket output = bf.makeBucket(maxWriteLength); InputStream is = null; OutputStream os = null; try { is = data.getInputStream(); os = output.getOutputStream(); compress(is, os, maxReadLength, maxWriteLength); // It is essential that the close()'s throw if there is any problem. is.close(); is = null; os.close(); os = null; } finally { Closer.close(is); Closer.close(os); } return output; }
private FilterStatus applyFilter(Bucket input, Bucket output, String mimeType, FilterOperation operation, NodeClientCore core) throws UnsafeContentTypeException, IOException { InputStream inputStream = null; OutputStream outputStream = null; try { inputStream = input.getInputStream(); outputStream = output.getOutputStream(); return applyFilter(inputStream, outputStream, mimeType, operation, core); } finally { Closer.close(inputStream); Closer.close(outputStream); } }
@Override public Bucket compress(Bucket data, BucketFactory bf, long maxReadLength, long maxWriteLength) throws IOException, CompressionOutputSizeException { Bucket output; InputStream is = null; OutputStream os = null; try { output = bf.makeBucket(maxWriteLength); is = data.getInputStream(); os = output.getOutputStream(); if(logMINOR) Logger.minor(this, "Compressing "+data+" size "+data.size()+" to new bucket "+output); compress(is, os, maxReadLength, maxWriteLength); // It is essential that the close()'s throw if there is any problem. is.close(); is = null; os.close(); os = null; } finally { Closer.close(is); Closer.close(os); } return output; }
public void verifyOutput(SplitFileFetcherStorage storage) throws IOException { StreamGenerator g = storage.streamGenerator(); Bucket out = bf.makeBucket(-1); OutputStream os = out.getOutputStream(); g.writeTo(os, null); os.close(); assertTrue(BucketTools.equalBuckets(originalData, out)); out.free(); }
private void testUseMaybeCharset(String charset) throws URISyntaxException, UnsafeContentTypeException, IOException { String original = "h2 { color: red;}"; byte[] bytes = original.getBytes(charset); SimpleReadOnlyArrayBucket inputBucket = new SimpleReadOnlyArrayBucket(bytes); Bucket outputBucket = new ArrayBucket(); InputStream inputStream = inputBucket.getInputStream(); OutputStream outputStream = outputBucket.getOutputStream(); FilterStatus filterStatus = ContentFilter.filter(inputStream, outputStream, "text/css", new URI("/CHK@OR904t6ylZOwoobMJRmSn7HsPGefHSP7zAjoLyenSPw,x2EzszO4Kqot8akqmKYXJbkD-fSj6noOVGB-K2YisZ4,AAIC--8/1-works.html"), null, null, charset); inputStream.close(); outputStream.close(); assertEquals(charset, filterStatus.charset); assertEquals("text/css", filterStatus.mimeType); String filtered = new String(BucketTools.toByteArray(outputBucket), charset); assertTrue("ContentFilter.filter() returns \""+filtered+"\" not original \""+original+"\" with maybeCharset \""+charset+"\"", original.equals(filtered)); }
public void testReadEmpty() throws IOException { Bucket bucket = makeBucket(3); try { assertEquals("Size-0", 0, bucket.size()); OutputStream os = bucket.getOutputStream(); os.close(); // Read byte[] InputStream is = bucket.getInputStream(); byte[] data = new byte[10]; int read = is.read(data, 0, 10); is.close(); assertEquals("Read-Empty", -1, read); } finally { freeBucket(bucket); } }
private void verifyOutput(SplitFileFetcherStorage storage, Bucket originalData) throws IOException { StreamGenerator g = storage.streamGenerator(); Bucket out = smallBucketFactory.makeBucket(-1); OutputStream os = out.getOutputStream(); g.writeTo(os, null); os.close(); assertTrue(BucketTools.equalBuckets(originalData, out)); out.free(); }
/** * Runs a Bucket through the content filter. * * @throws AssertionError on failure */ private static Bucket filterGIF(Bucket input) throws IOException { ContentDataFilter filter = new GIFFilter(); Bucket output = new ArrayBucket(); try (InputStream inStream = input.getInputStream(); OutputStream outStream = output.getOutputStream()) { filter.readFilter(inStream, outStream, "", null, null); } return output; }
public void checkSuccessfulRoundTrip(int keysize, Random random, Bucket input, Bucket output, Bucket decoded) throws IOException { byte[] key = new byte[keysize]; random.nextBytes(key); OutputStream os = output.getOutputStream(); AEADOutputStream cos = AEADOutputStream.innerCreateAES(os, key, random); BucketTools.copyTo(input, cos, -1); cos.close(); assertTrue(output.size() > input.size()); InputStream is = output.getInputStream(); AEADInputStream cis = AEADInputStream.createAES(is, key); BucketTools.copyFrom(decoded, cis, -1); assertEquals(decoded.size(), input.size()); assertTrue(BucketTools.equalBuckets(decoded, input)); }
public void checkSuccessfulRoundTripRandomSplits(int keysize, Random random, Bucket input, Bucket output, Bucket decoded) throws IOException { byte[] key = new byte[keysize]; random.nextBytes(key); OutputStream os = output.getOutputStream(); AEADOutputStream cos = AEADOutputStream.innerCreateAES(os, key, random); BucketTools.copyTo(input, new RandomShortWriteOutputStream(cos, random), -1); cos.close(); assertTrue(output.size() > input.size()); InputStream is = output.getInputStream(); AEADInputStream cis = AEADInputStream.createAES(is, key); BucketTools.copyFrom(decoded, new RandomShortReadInputStream(cis, random), -1); assertEquals(decoded.size(), input.size()); assertTrue(BucketTools.equalBuckets(decoded, input)); }