@Override public String toString() { StringBuilder buf = new StringBuilder(128); buf.append("S3Recoverable: "); buf.append("key=").append(objectName); buf.append(", uploadId=").append(uploadId); buf.append(", bytesInParts=").append(numBytesInParts); buf.append(", parts=["); int num = 0; for (PartETag part : parts) { if (0 != num++) { buf.append(", "); } buf.append(part.getPartNumber()).append('=').append(part.getETag()); } buf.append("], trailingPart=").append(lastPartObject); buf.append("trailingPartLen=").append(lastPartObjectLength); return buf.toString(); } }
@Override public String toString() { StringBuilder buf = new StringBuilder(); buf.append(_uploadId).append(SEPARATOR) .append(_filePosition.toString()).append(SEPARATOR); if (_partETags.size() > 0) { boolean first = true; for (PartETag tag : _partETags) { if (!first) { buf.append(","); } else { first = false; } buf.append(String.format("%d/%s", tag.getPartNumber(), tag.getETag())); } } buf.append(SEPARATOR) .append(_partSize.toString()).append(SEPARATOR) .append(_storageClass.toString()).append(SEPARATOR) .append(_contentLength.toString()).append(SEPARATOR) .append(_timestamp.toString()); return buf.toString(); } }
int partEtagBytes = 0; for (int i = 0; i < parts.length; i++) { etags[i] = parts[i].getETag().getBytes(CHARSET); partEtagBytes += etags[i].length + 2 * Integer.BYTES;
xml.start("Part"); xml.start("PartNumber").value(Integer.toString(partEtag.getPartNumber())).end(); xml.start("ETag").value(partEtag.getETag()).end(); xml.end();
private PartETag uploadChunk(byte[] buffer, int bytesRead, InitiateMultipartUploadResult initResponse, int index) throws Exception { byte[] md5 = S3Utils.md5(buffer, bytesRead); UploadPartRequest request = new UploadPartRequest(); request.setBucketName(initResponse.getBucketName()); request.setKey(initResponse.getKey()); request.setUploadId(initResponse.getUploadId()); request.setPartNumber(index); request.setPartSize(bytesRead); request.setMd5Digest(S3Utils.toBase64(md5)); request.setInputStream(new ByteArrayInputStream(buffer, 0, bytesRead)); UploadPartResult response = s3Client.uploadPart(request); PartETag partETag = response.getPartETag(); if ( !response.getPartETag().getETag().equals(S3Utils.toHex(md5)) ) { throw new Exception("Unable to match MD5 for part " + index); } return partETag; }
private Void uploadPart() throws AmazonClientException, BackupRestoreException { UploadPartRequest req = new UploadPartRequest(); req.setBucketName(dataPart.getBucketName()); req.setKey(dataPart.getS3key()); req.setUploadId(dataPart.getUploadID()); req.setPartNumber(dataPart.getPartNo()); req.setPartSize(dataPart.getPartData().length); req.setMd5Digest(SystemUtils.toBase64(dataPart.getMd5())); req.setInputStream(new ByteArrayInputStream(dataPart.getPartData())); UploadPartResult res = client.uploadPart(req); PartETag partETag = res.getPartETag(); if (!partETag.getETag().equals(SystemUtils.toHex(dataPart.getMd5()))) throw new BackupRestoreException( "Unable to match MD5 for part " + dataPart.getPartNo()); partETags.add(partETag); if (this.partsUploaded != null) this.partsUploaded.incrementAndGet(); return null; }
currentState.getPartETags().size(), Integer.toString(lastETag.getPartNumber()), lastETag.getETag()}); } else { getLogger().info("Resuming upload for flowfile='{}' bucket='{}' key='{}' " +
xml.start("Part"); xml.start("PartNumber").value(Integer.toString(partEtag.getPartNumber())).end(); xml.start("ETag").value(partEtag.getETag()).end(); xml.end();
@Override public String toString() { StringBuilder buf = new StringBuilder(); buf.append(_uploadId).append(SEPARATOR) .append(_filePosition.toString()).append(SEPARATOR); if (_partETags.size() > 0) { boolean first = true; for (PartETag tag : _partETags) { if (!first) { buf.append(","); } else { first = false; } buf.append(String.format("%d/%s", tag.getPartNumber(), tag.getETag())); } } buf.append(SEPARATOR) .append(_partSize.toString()).append(SEPARATOR) .append(_storageClass.toString()).append(SEPARATOR) .append(_contentLength.toString()).append(SEPARATOR) .append(_timestamp.toString()); return buf.toString(); } }
/** * Set the commit data. * @param parts ordered list of etags. * @throws ValidationFailure if the data is invalid */ public void bindCommitData(List<PartETag> parts) throws ValidationFailure { etags = new ArrayList<>(parts.size()); int counter = 1; for (PartETag part : parts) { verify(part.getPartNumber() == counter, "Expected part number %s but got %s", counter, part.getPartNumber()); etags.add(part.getETag()); counter++; } }
@Override public PartETag call() throws Exception { // this is the queued upload operation LOG.debug("Uploading part {} for id '{}'", currentPartNumber, uploadId); // do the upload PartETag partETag; try { partETag = fs.uploadPart(request).getPartETag(); LOG.debug("Completed upload of {} to part {}", block, partETag.getETag()); } finally { // close the stream and block closeAll(LOG, uploadData, block); } return partETag; } });
@Override public PartETag call() throws Exception { // this is the queued upload operation LOG.debug("Uploading part {} for id '{}'", currentPartNumber, uploadId); // do the upload PartETag partETag; try { partETag = fs.uploadPart(request).getPartETag(); LOG.debug("Completed upload of {} to part {}", block, partETag.getETag()); } finally { // close the stream and block closeAll(LOG, uploadData, block); } return partETag; } });
.getPartETag(); LOG.debug("Completed upload of {} to part {}", block, partETag.getETag()); LOG.debug("Stream statistics of {}", statistics); partsUploaded++;
xml.start("Part"); xml.start("PartNumber").value(Integer.toString(partEtag.getPartNumber())).end(); xml.start("ETag").value(partEtag.getETag()).end(); xml.end();
xml.start("Part"); xml.start("PartNumber").value(Integer.toString(partEtag.getPartNumber())).end(); xml.start("ETag").value(partEtag.getETag()).end(); xml.end();
currentState.getPartETags().size(), Integer.toString(lastETag.getPartNumber()), lastETag.getETag()}); } else { getLogger().info("Resuming upload for flowfile='{}' bucket='{}' key='{}' " +