BufferedImage image = ... ByteArrayOutputStream os = new ByteArrayOutputStream(); ImageIO.write(image, "png", os); byte[] buffer = os.toByteArray(); InputStream is = new ByteArrayInputStream(buffer); AmazonS3 s3 = new AmazonS3Client(new PropertiesCredentials(cred)); ObjectMetadata meta = new ObjectMetadata(); meta.setContentLength(buffer.length); s3.putObject(new PutObjectRequest("folder", key, is, meta));
@Override public String getObjectAsString(String bucketName, String key) throws AmazonServiceException, SdkClientException { rejectNull(bucketName, "Bucket name must be provided"); rejectNull(key, "Object key must be provided"); S3Object object = getObject(bucketName, key); try { return IOUtils.toString(object.getObjectContent()); } catch (IOException e) { throw new SdkClientException("Error streaming content from S3 during download"); } finally { IOUtils.closeQuietly(object, log); } }
@Override public PutObjectResult putObject(String bucketName, String key, String content) throws AmazonServiceException, SdkClientException { rejectNull(bucketName, "Bucket name must be provided"); rejectNull(key, "Object key must be provided"); rejectNull(content, "String content must be provided"); byte[] contentBytes = content.getBytes(StringUtils.UTF8); InputStream is = new ByteArrayInputStream(contentBytes); ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentType("text/plain"); metadata.setContentLength(contentBytes.length); return putObject(new PutObjectRequest(bucketName, key, is, metadata)); }
/** * Instantiates {@code AmazonS3Client} instance. * * @return Client instance to use to connect to AWS. */ AmazonS3Client createAmazonS3Client() { AmazonS3Client cln = cfg != null ? (cred != null ? new AmazonS3Client(cred, cfg) : new AmazonS3Client(credProvider, cfg)) : (cred != null ? new AmazonS3Client(cred) : new AmazonS3Client(credProvider)); if (!F.isEmpty(bucketEndpoint)) cln.setEndpoint(bucketEndpoint); return cln; }
@Override public AmazonS3 get() { AmazonS3Client amazonS3 = new AmazonS3Client(awsClusterSecurityManager.getCredentialsProvider()); amazonS3.setRegion(region); return amazonS3; } });
@Override public PutObjectResult putObject(String bucketName, String key, String content) throws AmazonServiceException, AmazonClientException { assertParameterNotNull(bucketName, "Bucket name must be provided"); assertParameterNotNull(key, "Object key must be provided"); assertParameterNotNull(content, "String content must be provided"); final byte[] contentBytes = content.getBytes(StringUtils.UTF8); final InputStream is = new ByteArrayInputStream(contentBytes); final ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentType("text/plain"); metadata.setContentLength(contentBytes.length); return putObject(new PutObjectRequest(bucketName, key, is, metadata)); }
/** * Generates manifest file and writes it to Amazon S3 * * @param fileName Name of manifest file (Amazon S3 key) * @param records Used to generate the manifest file * @throws IOException */ private String writeManifestToS3(String fileName, List<String> records) throws IOException { String fileContents = generateManifestFile(records); // upload generated manifest file PutObjectRequest putObjectRequest = new PutObjectRequest(s3Bucket, fileName, new ByteArrayInputStream(fileContents.getBytes()), null); s3Client.putObject(putObjectRequest); return fileName; }
@Override protected boolean createEmptyObject(String key) { try { ObjectMetadata meta = new ObjectMetadata(); meta.setContentLength(0); meta.setContentMD5(DIR_HASH); meta.setContentType(Mimetypes.MIMETYPE_OCTET_STREAM); mClient.putObject( new PutObjectRequest(mBucketName, key, new ByteArrayInputStream(new byte[0]), meta)); return true; } catch (AmazonClientException e) { LOG.error("Failed to create object: {}", key, e); return false; } }
try { S3Object object = getS3ObjectAndMetadata(bucket, key, ssecLocal); int sizeOfFile = (int)object.getObjectMetadata().getContentLength(); fieldCount = sizeOfFile/sizeArray; totalSize = sizeOfFile; offset += sizeArray; try (InputStream input = new ByteArrayInputStream(destinationArray)) { ObjectMetadata metadata = new ObjectMetadata(); metadata.setContentLength(totalSize); PutObjectRequest putObjectRequest = null; if (sseLocal.equals("true")) { s3Client.putObject(putObjectRequest); if(res.getETag() == null) { return Status.ERROR;
/** * Same as {@link #uploadFrom(byte[])} but allows specifying a request * metric collector. * * @param buffer the byte buffer. * @param requestMetricCollector the request metrics collector. * @return an instance of {@link PutObjectResult} */ public PutObjectResult uploadFrom(final byte[] buffer, RequestMetricCollector requestMetricCollector) { final ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setContentLength(buffer.length); final PutObjectRequest req = new PutObjectRequest(getBucketName(), getKey(), new ByteArrayInputStream(buffer), objectMetadata) .withRequestMetricCollector(requestMetricCollector); return getAmazonS3Client().putObject(req); }
@Test public void testDownloadToOutstream() { EasyMock.reset(mockS3); EasyMock.expect(mockS3.getRegion()).andReturn(Region.US_Standard); byte[] mockResponseBytes = "MyData".getBytes(StringUtils.UTF8); ByteArrayOutputStream bos = new ByteArrayOutputStream(); S3Object mockObj = EasyMock.createMock(S3Object.class); ByteArrayInputStream bis = new ByteArrayInputStream(mockResponseBytes); S3ObjectInputStream s3is = new S3ObjectInputStream(bis, null); EasyMock.expect(mockS3.getObject(anyObject(GetObjectRequest.class))).andReturn(mockObj); EasyMock.expect(mockObj.getObjectContent()).andReturn(s3is); EasyMock.expect(mockObj.getObjectMetadata()).andReturn(new ObjectMetadata()); EasyMock.replay(mockS3, mockObj); mapper.getS3ClientCache().useClient(mockS3); S3Link link = mapper.createS3Link(bucket, key); link.downloadTo(bos); EasyMock.verify(mockS3, mockObj); assertArrayEquals(mockResponseBytes, bos.toByteArray()); }
@Override public void uploadSchema(BackupRestoreContext ctx, String schema) throws Exception { final String nodeId = ctx.getNodeId(); final AmazonS3Client amazonS3Client = getAmazonS3Client(ctx); final String key = getPrefixKey(ctx) + "/" + nodeId + "/" + StorageUtil.SCHEMA_FILE; final InputStream stream = new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8)); amazonS3Client.putObject(getBucketName(ctx), key, stream, new ObjectMetadata()); }
@Override public AmazonS3 apply(AmazonS3ClientParamsWrapper params) { return new AmazonS3Client(params); } };
@Override public void process(final InputStream rawIn) throws IOException { try (final InputStream in = new BufferedInputStream(rawIn)) { final ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setContentDisposition(ff.getAttribute(CoreAttributes.FILENAME.key())); objectMetadata.setContentLength(ff.getSize()); final PutObjectResult result = s3.putObject(request); if (result.getVersionId() != null) { attributes.put(S3_VERSION_ATTR_KEY, result.getVersionId()); s3.initiateMultipartUpload(initiateRequest); currentState.setUploadId(initiateResult.getUploadId()); currentState.getPartETags().clear(); .withPartSize(thisPartSize); try { UploadPartResult uploadPartResult = s3.uploadPart(uploadRequest); currentState.addPartETag(uploadPartResult.getPartETag()); currentState.setFilePosition(currentState.getFilePosition() + thisPartSize); try { CompleteMultipartUploadResult completeResult = s3.completeMultipartUpload(completeRequest); getLogger().info("Success completing upload flowfile={} etag={} uploadId={}", new Object[]{ffFilename, completeResult.getETag(), currentState.getUploadId()});
ObjectMetadata objectMetadata = new ObjectMetadata(); int bulkSize = buffer.size(); objectMetadata.setContentLength(bulkSize); PutObjectRequest putObjectRequest = new PutObjectRequest(config.getEventStoreBulkS3Bucket(), key, putObjectRequest.getRequestClientOptions().setReadLimit(bulkSize); s3Client.putObject(putObjectRequest); .withDimensions(new Dimension().withName("project").withValue(project)))); } catch (IOException | AmazonClientException e) { s3Client.deleteObject(config.getEventStoreBulkS3Bucket(), key);
clientConfig.setMaxConnections(Integer.parseInt(maxConnections)); s3Client = new AmazonS3Client(s3Credentials, clientConfig); s3Client.setRegion(Region.getRegion(Regions.fromName(region))); s3Client.setEndpoint(endPoint); System.out.println("Connection successfully initialized"); } catch (Exception e){
@Override public PutObjectResult putObject(String bucketName, String key, File file) throws SdkClientException, AmazonServiceException { return putObject(new PutObjectRequest(bucketName, key, file) .withMetadata(new ObjectMetadata())); }
@Test public void testGetStoredMimeType() throws BinaryStoreException { ObjectMetadata objMeta = new ObjectMetadata(); objMeta.setContentType(TEST_MIME); objMeta.addUserMetadata(S3BinaryStore.USER_MIME_TYPE_KEY, String.valueOf(true)); expect(s3Client.getObjectMetadata(BUCKET, TEST_KEY)).andReturn(objMeta); replayAll(); BinaryValue binaryValue = createBinaryValue(TEST_KEY, TEST_CONTENT); String mimeType = s3BinaryStore.getStoredMimeType(binaryValue); assertEquals(TEST_MIME, mimeType); }
final AmazonS3 s3Client = new AmazonS3Client(getCredentialsForBucket(s3Artifact.getS3Bucket()), clientConfiguration); length = details.getObjectMetadata().getContentLength();
AmazonS3Client amazonS3Client = new AmazonS3Client(credentials, clientConf); amazonS3Client.setEndpoint(conf.get(PropertyKey.UNDERFS_S3_ENDPOINT)); amazonS3Client.setS3ClientOptions(clientOptions);