Refine search
private ObjectMetadata downloadTo0(final OutputStream output, RequestMetricCollector requestMetricCollector) { GetObjectRequest req = new GetObjectRequest(getBucketName(), getKey()) .withRequestMetricCollector(requestMetricCollector); S3Object s3Object = getAmazonS3Client().getObject(req); S3ObjectInputStream objectContent = s3Object.getObjectContent(); try { byte[] buffer = new byte[1024 * 10]; int bytesRead = -1; while ((bytesRead = objectContent.read(buffer)) > -1) { output.write(buffer, 0, bytesRead); } } catch (IOException ioe) { objectContent.abort(); throw new SdkClientException("Unable to transfer content from Amazon S3 to the output stream", ioe); } finally { try { objectContent.close(); } catch (IOException ioe) {} } return s3Object.getObjectMetadata(); }
S3ObjectInputStream objectContent = s3object.getObjectContent(); InputStream adjustedRangeContents = new AdjustedRangeInputStream(objectContent, range[0], range[1]); s3object.setObjectContent(new S3ObjectInputStream(adjustedRangeContents, objectContent.getHttpRequest())); return s3object; } catch (IOException e) { throw new SdkClientException("Error adjusting output to desired byte range: " + e.getMessage());
public S3ObjectInputStream( InputStream in, HttpRequestBase httpRequest, boolean collectMetrics) { super(collectMetrics ? new MetricFilterInputStream(S3ServiceMetric.S3DownloadThroughput, in) : in); this.httpRequest = httpRequest; }
/** * Sets the input stream containing this object's contents. * * @param objectContent * The input stream containing this object's contents. Will get * wrapped in an S3ObjectInputStream. * @see S3Object#getObjectContent() */ public void setObjectContent(InputStream objectContent) { setObjectContent(new S3ObjectInputStream(objectContent, this.objectContent != null ? this.objectContent.getHttpRequest() : null)); }
try { S3Object o = s3.getObject(bucket_name, key_name); S3ObjectInputStream s3is = o.getObjectContent(); FileOutputStream fos = new FileOutputStream(new File(key_name)); byte[] read_buf = new byte[1024]; int read_len = 0; while ((read_len = s3is.read(read_buf)) > 0) { fos.write(read_buf, 0, read_len); s3is.close(); fos.close(); } catch (AmazonServiceException e) {
throw new SdkClientException("testing"); throw ace; } else { LOG.info("Retry the download of object " + s3Object.getKey() + " (bucket " + s3Object.getBucketName() + ")", ace); hasRetried = true; s3Object.getObjectContent().abort();
try { long start_ns = System.nanoTime(); // Blocking i/o call timing - without counting repeats s = getObjectForKey(k, skip, v._max).getObjectContent(); } finally { try { if( s != null ) s.close(); } catch( IOException e ) {}
@Override public InputStream read( UUID appId, Entity entity, long offset, long length ) throws Exception { S3Object object = getS3Client().getObject( bucketName, AssetUtils.buildAssetKey( appId, entity ) ); byte data[] = null; if ( offset == 0 && length == FIVE_MB ) { return object.getObjectContent(); } else { object.getObjectContent().read(data, Ints.checkedCast(offset), Ints.checkedCast(length)); } return new ByteArrayInputStream(data); }
/** * Cancels this download. * * @throws IOException */ public synchronized void abort() throws IOException { this.monitor.getFuture().cancel(true); if ( s3Object != null ) { s3Object.getObjectContent().abort(); } setState(TransferState.Canceled); }
s3Object.setBucketName(getObjectRequest.getBucketName()); s3Object.setKey(getObjectRequest.getKey()); InputStream is = s3Object.getObjectContent(); HttpRequestBase httpRequest = s3Object.getObjectContent().getHttpRequest(); new S3AbortableInputStream(is, httpRequest, s3Object.getObjectMetadata().getContentLength()); s3Object.setObjectContent(new S3ObjectInputStream(abortableInputStream, httpRequest, false)); return s3Object; } catch (AmazonS3Exception ase) {
/** * Returns an updated object where the object content input stream contains * the decrypted contents. * * @param object The object whose contents are to be decrypted. * @param instruction The instruction that will be used to decrypt the * object data. * @return The updated object where the object content input stream contains * the decrypted contents. */ public static S3Object decryptObjectUsingInstruction(S3Object object, EncryptionInstruction instruction) { S3ObjectInputStream objectContent = object.getObjectContent(); InputStream decryptedInputStream = new RepeatableCipherInputStream(objectContent, instruction.getCipherFactory()); object.setObjectContent(new S3ObjectInputStream(decryptedInputStream, objectContent .getHttpRequest())); return object; }
appendData); } catch (SdkClientException ace) { if (!ace.isRetryable()) { s3Object.getObjectContent().abort(); throw ace; if (ace.getCause() instanceof SocketException || ace.getCause() instanceof SSLProtocolException) { throw ace; } else { needRetry = true; if ( hasRetried ) { s3Object.getObjectContent().abort(); throw ace; } else { LOG.info("Retry the download of object " + s3Object.getKey() + " (bucket " + s3Object.getBucketName() + ")", ace); hasRetried = true;
final byte[] buffer = new byte[DEFAULT_BYTE_SIZE]; int bytesRead; while ((bytesRead = s3Object.getObjectContent().read(buffer)) > -1) { outputStream.write(buffer, 0, bytesRead); } finally { closeQuietly(outputStream, log); closeQuietly(s3Object.getObjectContent(), log); return s3Object.getObjectMetadata();
@Test public void testDownloadToOutstream() { EasyMock.reset(mockS3); EasyMock.expect(mockS3.getRegion()).andReturn(Region.US_Standard); byte[] mockResponseBytes = "MyData".getBytes(StringUtils.UTF8); ByteArrayOutputStream bos = new ByteArrayOutputStream(); S3Object mockObj = EasyMock.createMock(S3Object.class); ByteArrayInputStream bis = new ByteArrayInputStream(mockResponseBytes); S3ObjectInputStream s3is = new S3ObjectInputStream(bis, null); EasyMock.expect(mockS3.getObject(anyObject(GetObjectRequest.class))).andReturn(mockObj); EasyMock.expect(mockObj.getObjectContent()).andReturn(s3is); EasyMock.expect(mockObj.getObjectMetadata()).andReturn(new ObjectMetadata()); EasyMock.replay(mockS3, mockObj); mapper.getS3ClientCache().useClient(mockS3); S3Link link = mapper.createS3Link(bucket, key); link.downloadTo(bos); EasyMock.verify(mockS3, mockObj); assertArrayEquals(mockResponseBytes, bos.toByteArray()); }
S3Object getFile(String key, Path dir) throws FileNotFoundException { S3Object obj = mock(S3Object.class); File file = new File(dir.toString(), key); when(obj.getKey()).thenReturn(file.getName()); S3ObjectInputStream stream = new S3ObjectInputStream(new FileInputStream(file), null); when(obj.getObjectContent()).thenReturn(stream); return obj; }
@Test public void testHandle() throws Exception { ByteArrayInputStream bais = new ByteArrayInputStream("content".getBytes(StringUtils.UTF8)); HttpResponse response = new HttpResponse.Builder().content(bais) .header(Headers.REDIRECT_LOCATION, "redirect") .header(Headers.REQUESTER_CHARGED_HEADER, "true").build(); S3ObjectResponseHandler handler = new S3ObjectResponseHandler(); AmazonWebServiceResponse<S3Object> object = handler.handle(response); S3Object content = object.getResult(); assertEquals(content.getRedirectLocation(), "redirect"); assertTrue(content.isRequesterCharged()); S3ObjectInputStream is = content.getObjectContent(); int curr = -1; ByteArrayOutputStream baos = new ByteArrayOutputStream(); while ((curr = is.read()) != -1) { baos.write(curr); } assertArrayEquals(baos.toByteArray(), "content".getBytes(StringUtils.UTF8)); }
/** * @see com.amazonaws.http.HttpResponseHandler#handle(com.amazonaws.http.HttpResponse) */ public AmazonWebServiceResponse<S3Object> handle(HttpResponse response) throws Exception { /* * TODO: It'd be nice to set the bucket name and key here, but the information isn't easy to * pull out of the response/request currently. */ S3Object object = new S3Object(); AmazonWebServiceResponse<S3Object> awsResponse = parseResponseMetadata(response); if (response.getHeaders().get(Headers.REDIRECT_LOCATION) != null) { object.setRedirectLocation(response.getHeaders().get(Headers.REDIRECT_LOCATION)); } // If the requester is charged when downloading a object from an // Requester Pays bucket, then this header is set. if (response.getHeaders().get(Headers.REQUESTER_CHARGED_HEADER) != null) { object.setRequesterCharged(true); } if (response.getHeaders().get(Headers.S3_TAGGING_COUNT) != null) { object.setTaggingCount(Integer.parseInt(response.getHeaders().get(Headers.S3_TAGGING_COUNT))); } ObjectMetadata metadata = object.getObjectMetadata(); populateObjectMetadata(response, metadata); object.setObjectContent(new S3ObjectInputStream(response.getContent(), response.getHttpRequest())); awsResponse.setResult(object); return awsResponse; }
public boolean readBufferFromFile() throws IOException { int n = s3ObjectInputStream.read( bb ); if ( n == -1 ) { return false; } else { // adjust the highest used position... // bufferSize = endBuffer + n; // Store the data in our byte array // for ( int i = 0; i < n; i++ ) { byteBuffer[endBuffer + i] = bb[i]; } return true; } }
/** * ensureEverythingIsReleased as part of close process. */ public void ensureEverythingIsReleased() { if (this.isClosed) { return; } try { // ensure that the S3 Object is closed properly. this.s3Object.close(); } catch (Throwable ex) { LOGGER.warn("Problem Closing the S3Object[{}]: {}", s3Object.getKey(), ex.getMessage()); } try { // Abort the stream this.is.abort(); } catch (Throwable ex) { LOGGER.warn("Problem Aborting S3Object[{}]: {}", s3Object.getKey(), ex.getMessage()); } // close the input Stream Safely closeSafely(this.is); // This corrects the issue with Open HTTP connections closeSafely(this.s3Object); this.isClosed = true; }
/** * Closes the current stream. */ // TODO(calvin): Investigate if close instead of abort will bring performance benefits. private void closeStream() { if (mIn == null) { return; } mIn.abort(); mIn = null; } }