/** * Uploads part async. */ private void uploadPart() throws IOException { if (mFile == null) { return; } mLocalOutputStream.close(); int partNumber = mPartNumber.getAndIncrement(); File newFileToUpload = new File(mFile.getPath()); mFile = null; mLocalOutputStream = null; UploadPartRequest uploadRequest = new UploadPartRequest() .withBucketName(mBucketName) .withKey(mKey) .withUploadId(mUploadId) .withPartNumber(partNumber) .withFile(newFileToUpload) .withPartSize(newFileToUpload.length()); execUpload(uploadRequest); }
int partNumber = mPartNumber.getAndIncrement(); final UploadPartRequest uploadRequest = new UploadPartRequest() .withBucketName(mBucketName) .withKey(mKey) .withUploadId(mUploadId)
.withBucketName(bucketName) .withKey(uploadFileName) .withInputStream(chunk)
/** * Creates and returns an upload-part request corresponding to a ciphertext * file upon a part-creation event. * * @param event * the part-creation event of the ciphertxt file. * @param part * the created ciphertext file corresponding to the upload-part */ protected UploadPartRequest newUploadPartRequest(PartCreationEvent event, final File part) { final UploadPartRequest reqUploadPart = new UploadPartRequest() .withBucketName(req.getBucketName()) .withFile(part) .withKey(req.getKey()) .withPartNumber(event.getPartNumber()) .withPartSize(part.length()) .withLastPart(event.isLastPart()) .withUploadId(uploadId) .withObjectMetadata(req.getUploadPartMetadata()) ; return reqUploadPart; }
(currentState.getContentLength() - currentState.getFilePosition())); UploadPartRequest uploadRequest = new UploadPartRequest() .withBucketName(bucket) .withKey(key) .withUploadId(currentState.getUploadId())
if (wrappedStream != null) { req = new UploadPartRequest() .withBucketName(bucketName) .withKey(key) .withUploadId(uploadId) } else { req = new UploadPartRequest() .withBucketName(bucketName) .withKey(key) .withUploadId(uploadId)
c.getInt(c .getColumnIndexOrThrow(TransferTable.COLUMN_MAIN_UPLOAD_ID))) .withBucketName( c.getString(c .getColumnIndexOrThrow(TransferTable.COLUMN_BUCKET_NAME)))
partSize = Math.min(partSize, (contentLength - filePosition)); UploadPartRequest uploadRequest = new UploadPartRequest() .withBucketName(bucketName) .withKey(key) .withUploadId(initResponse.getUploadId())
/** * Creates and returns an upload-part request corresponding to a ciphertext * file upon a part-creation event. * * @param event * the part-creation event of the ciphertxt file. * @param part * the created ciphertext file corresponding to the upload-part */ protected UploadPartRequest newUploadPartRequest(PartCreationEvent event, final File part) { final UploadPartRequest reqUploadPart = new UploadPartRequest() .withBucketName(req.getBucketName()) .withFile(part) .withKey(req.getKey()) .withPartNumber(event.getPartNumber()) .withPartSize(part.length()) .withLastPart(event.isLastPart()) .withUploadId(uploadId) .withObjectMetadata(req.getUploadPartMetadata()); return reqUploadPart; }
if (putObjectRequest.getInputStream() != null) { request = new UploadPartRequest() .withBucketName(bucketName) .withKey(key) .withUploadId(uploadId) } else { request = new UploadPartRequest() .withBucketName(bucketName) .withKey(key) .withUploadId(uploadId)
upr.withUploadId(uploadId).withPartNumber(1) .withPartSize("multipartContent".getBytes(StringUtils.UTF8).length) .withBucketName("test-bucket123456") .withKey("multi-key") .withInputStream(multipartContent);
public UploadPartRequest getUploadPartRequest() { return new UploadPartRequest().withBucketName(this.bucketName).withKey(this.key) .withPartNumber(this.partCount.incrementAndGet()).withUploadId(this.uploadId); }
public UploadPartRequest getUploadPartRequest() { return new UploadPartRequest().withBucketName(this.bucketName).withKey(this.key) .withPartNumber(this.partCount.incrementAndGet()).withUploadId(this.uploadId); }
public void uploadPart(ByteArrayInputStream inputStream, int partSize) { int currentPartNumber = partETags.size() + 1; UploadPartRequest request = new UploadPartRequest() .withBucketName(bucket) .withKey(key) .withUploadId(uploadId) .withInputStream(inputStream) .withPartNumber(currentPartNumber) .withPartSize(partSize) .withGeneralProgressListener(progressListener); log.debug("Uploading part {} for id '{}'", currentPartNumber, uploadId); partETags.add(s3.uploadPart(request).getPartETag()); }
public void uploadPart(ByteArrayInputStream inputStream, int partSize) { int currentPartNumber = partETags.size() + 1; UploadPartRequest request = new UploadPartRequest() .withBucketName(bucket) .withKey(key) .withUploadId(uploadId) .withInputStream(inputStream) .withPartNumber(currentPartNumber) .withPartSize(partSize) .withGeneralProgressListener(progressListener); log.debug("Uploading part {} for id '{}'", currentPartNumber, uploadId); partETags.add(s3.uploadPart(request).getPartETag()); }
private void uploadStreamPart(StreamPart part) { log.debug("{}: Uploading {}", this, part); UploadPartRequest uploadRequest = new UploadPartRequest() .withBucketName(bucketName).withKey(putKey) .withUploadId(uploadId).withPartNumber(part.getPartNumber()) .withInputStream(part.getInputStream()) .withPartSize(part.size()); customiseUploadPartRequest(uploadRequest); UploadPartResult uploadPartResult = s3Client.uploadPart(uploadRequest); PartETag partETag = uploadPartResult.getPartETag(); partETags.add(partETag); log.info("{}: Finished uploading {}", this, part); }
@Override public UploadPartResult call() throws Exception { try { return this.amazonS3.uploadPart(new UploadPartRequest().withBucketName(this.bucketName). withKey(this.key). withUploadId(this.uploadId). withInputStream(new ByteArrayInputStream(this.content)). withPartNumber(this.partNumber). withLastPart(this.last). withPartSize(this.contentLength)); } finally { //Release the memory, as the callable may still live inside the CompletionService which would cause // an exhaustive memory usage this.content = null; } } }
@Override public UploadPartResult call() throws Exception { try { return this.amazonS3.uploadPart(new UploadPartRequest().withBucketName(this.bucketName). withKey(this.key). withUploadId(this.uploadId). withInputStream(new ByteArrayInputStream(this.content)). withPartNumber(this.partNumber). withLastPart(this.last). withPartSize(this.contentLength)); } finally { //Release the memory, as the callable may still live inside the CompletionService which would cause // an exhaustive memory usage this.content = null; } } }
PartETag upload(String uploadId, S3Part part) { Object[] logParams = new Object[] { part.getSize(), part.getNumber(), bucket, key }; log.info("Uploading {} bytes for part {} to s3://{}/{}.", logParams); UploadPartRequest request = new UploadPartRequest() .withUploadId(uploadId) .withBucketName(bucket) .withKey(key) .withPartNumber(part.getNumber()) .withPartSize(part.getSize()) .withMD5Digest(part.getMd5()) .withInputStream(part.getInputStream()); UploadPartResult result = s3.uploadPart(request); log.info("Uploaded {} bytes for part {} to s3://{}/{}.", logParams); bytes += part.getSize(); return result.getPartETag(); }
protected PartETag doUploadMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId, InputStream is, int length, boolean lastPart) throws AmazonS3Exception { UploadPartRequest request = new UploadPartRequest() .withBucketName(bucketName) .withKey(blobName) .withUploadId(uploadId) .withPartNumber(multipartChunks) .withInputStream(is) .withPartSize(length) .withLastPart(lastPart); UploadPartResult response = blobStore.client().uploadPart(request); return response.getPartETag(); }