/** * Cleans up the local temp directory and S3 test path that we are using. */ @After public void cleanEnv() throws IOException { // Clean up the local directory. FileUtils.deleteDirectory(localTempPath.toFile()); // Clean up the destination S3 folder. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3KeyPrefix(testS3KeyPrefix); s3Dao.deleteDirectory(s3FileTransferRequestParamsDto); }
@Test public void testRestoreObjectsEmptyList() { // Initiate a restore request for an empty list of S3 files. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setFiles(new ArrayList<>()); s3Dao.restoreObjects(s3FileTransferRequestParamsDto, 0); }
/** * Cleans up the local temp directory and S3 test path that we are using. */ @After public void cleanEnv() throws IOException { // Clean up the local directory. FileUtils.deleteDirectory(localTempPath.toFile()); // Clean up the destination S3 folders. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); for (String keyPrefix : Arrays.asList(testS3KeyPrefix, TEST_S3_KEY_PREFIX)) { // Since the key prefix represents a directory, we add a trailing '/' character to it. s3FileTransferRequestParamsDto.setS3KeyPrefix(keyPrefix + "/"); s3Dao.deleteDirectory(s3FileTransferRequestParamsDto); } }
/** * Cleans up the local temp directory and S3 test path that we are using. */ @After public void cleanEnv() throws IOException { try { // Clean up the local directory. FileUtils.deleteDirectory(localTempPath.toFile()); // Clean up the destination S3 folder. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3KeyPrefix(testS3KeyPrefix); s3Dao.deleteDirectory(s3FileTransferRequestParamsDto); } catch (Exception ex) { // If an exception is thrown by one of the @Test methods, some cleanup operations could also fail. This is why we are just logging a warning here. LOGGER.warn("Unable to cleanup environment.", ex); } }
/** * Test that we are able to make a call to deleteFileList S3Dao method with an empty file list. */ @Test public void testDeleteFileListEmptyList() { S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setFiles(new ArrayList<File>()); s3Dao.deleteFileList(s3FileTransferRequestParamsDto); }
@Test public void testValidateGlacierS3FilesRestoredEmptyList() { // Make a call to validate Glacier S3 files being restored for an empty list of S3 files. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setFiles(new ArrayList<>()); s3Dao.validateGlacierS3FilesRestored(s3FileTransferRequestParamsDto); }
@Test(expected = ObjectNotFoundException.class) public void testValidateS3FileObjectNotFoundException() throws IOException, InterruptedException { // Try to validate a non-existing S3 file. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY); s3Dao.validateS3File(s3FileTransferRequestParamsDto, FILE_SIZE_1_KB); }
/** * Cleans up the S3 test path that we are using. */ @After public void cleanEnv() throws IOException { // Clean up the destination S3 folder. s3Dao.deleteDirectory(s3DaoTestHelper.getTestS3FileTransferRequestParamsDto()); s3Operations.rollback(); }
@Test(expected = IllegalArgumentException.class) public void testValidateS3FileRuntimeExceptionFileSizeDoesNotMatch() throws IOException, InterruptedException { // Put a 1 KB file in S3. PutObjectRequest putObjectRequest = new PutObjectRequest(storageDaoTestHelper.getS3ManagedBucketName(), TARGET_S3_KEY, new ByteArrayInputStream(new byte[(int) FILE_SIZE_1_KB]), null); s3Operations.putObject(putObjectRequest, null); // Try to validate an S3 file by specifying an incorrect file size. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY); s3Dao.validateS3File(s3FileTransferRequestParamsDto, FILE_SIZE_1_KB + 999); }
@Test public void testValidateS3File() throws IOException, InterruptedException { // Put a 1 KB file in S3. PutObjectRequest putObjectRequest = new PutObjectRequest(storageDaoTestHelper.getS3ManagedBucketName(), TARGET_S3_KEY, new ByteArrayInputStream(new byte[(int) FILE_SIZE_1_KB]), null); s3Operations.putObject(putObjectRequest, null); // Validate the S3 file. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY); s3Dao.validateS3File(s3FileTransferRequestParamsDto, FILE_SIZE_1_KB); }
/** * Throws the exception as-is without wrapping if the exception is of type AmazonServiceException or children. */ @Test public void testGetPropertiesThrowsAsIsWhenGenericAmazonError() { try { s3Dao .getProperties(MockS3OperationsImpl.MOCK_S3_BUCKET_NAME_INTERNAL_ERROR, TARGET_S3_KEY, s3DaoTestHelper.getTestS3FileTransferRequestParamsDto()); Assert.fail("expected AmazonServiceException to be thrown, but no exceptions were thrown"); } catch (Exception e) { Assert.assertEquals("thrown exception type", AmazonServiceException.class, e.getClass()); } }
/** * Throws an ObjectNotFoundException when S3 object key does not exist. This should result as a 404 to clients. */ @Test public void testGetPropertiesThrowsWhenKeyDoesNotExist() { try { s3Dao.getProperties(S3_BUCKET_NAME, TARGET_S3_KEY, s3DaoTestHelper.getTestS3FileTransferRequestParamsDto()); Assert.fail("expected ObjectNotFoundException to be thrown, but no exceptions were thrown"); } catch (Exception e) { Assert.assertEquals("thrown exception type", ObjectNotFoundException.class, e.getClass()); Assert.assertEquals("thrown exception message", "Specified S3 object key '" + TARGET_S3_KEY + "' does not exist.", e.getMessage()); } }
/** * The method is successful when both bucket and key exists. */ @Test public void testGetProperties() { String expectedKey = "foo"; String expectedValue = "bar"; ByteArrayInputStream inputStream = new ByteArrayInputStream((expectedKey + "=" + expectedValue).getBytes()); PutObjectRequest putObjectRequest = new PutObjectRequest(S3_BUCKET_NAME, TARGET_S3_KEY, inputStream, new ObjectMetadata()); s3Operations.putObject(putObjectRequest, null); Properties properties = s3Dao.getProperties(S3_BUCKET_NAME, TARGET_S3_KEY, s3DaoTestHelper.getTestS3FileTransferRequestParamsDto()); Assert.assertEquals("properties key '" + expectedKey + "'", expectedValue, properties.get(expectedKey)); }
/** * Test "access denied" scenario for the getObjectMetadata S3Dao operation. */ @Test public void testGetObjectMetadataAccessDenied() { S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); // Try to retrieve S3 object metadata when S3 access is denied. try { s3FileTransferRequestParamsDto.setS3BucketName(MockS3OperationsImpl.MOCK_S3_BUCKET_NAME_ACCESS_DENIED); s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY); s3Dao.getObjectMetadata(s3FileTransferRequestParamsDto); fail("Should throw an ObjectNotFoundException when S3 access is denied."); } catch (IllegalStateException e) { assertEquals(String.format("Failed to get S3 metadata for object key \"%s\" from bucket \"%s\". " + "Reason: AccessDenied (Service: null; Status Code: 403; Error Code: AccessDenied; Request ID: null)", TARGET_S3_KEY, MockS3OperationsImpl.MOCK_S3_BUCKET_NAME_ACCESS_DENIED), e.getMessage()); } }
/** * Test that we are able to perform the uploadFile S3Dao operation utilizing Reduced Redundancy Storage (RRS) storage option. */ @Test public void testUploadFileUseRrs() throws IOException, InterruptedException { // Create local test file. File targetFile = createLocalFile(localTempPath.toString(), LOCAL_FILE, FILE_SIZE_1_KB); Assert.assertTrue(targetFile.isFile()); Assert.assertTrue(targetFile.length() == FILE_SIZE_1_KB); // Upload test file to s3Dao. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY); s3FileTransferRequestParamsDto.setLocalPath(targetFile.getPath()); s3FileTransferRequestParamsDto.setUseRrs(true); S3FileTransferResultsDto results = s3Dao.uploadFile(s3FileTransferRequestParamsDto); // Validate results. Assert.assertTrue(results.getTotalFilesTransferred() == 1L); // Validate the file upload. s3DaoTestHelper.validateS3FileUpload(s3FileTransferRequestParamsDto, Arrays.asList(TARGET_S3_KEY)); // TODO: Validate Reduced Redundancy Storage (RRS) storage option. }
@Test public void testS3FileExistsKeyNoExists() { S3FileTransferRequestParamsDto params = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); params.setS3BucketName(S3_BUCKET_NAME); params.setS3KeyPrefix(TARGET_S3_KEY); Assert.assertFalse(s3Dao.s3FileExists(params)); }
/** * Test "key not found" scenario for the getObjectMetadata S3Dao operation. */ @Test public void testGetObjectMetadataS3KeyNoExists() { // Try to retrieve S3 object metadata for a non-existing S3 key. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3BucketName(S3_BUCKET_NAME); s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY); assertNull(s3Dao.getObjectMetadata(s3FileTransferRequestParamsDto)); }
/** * Test "bucket not found" scenario for the getObjectMetadata S3Dao operation. */ @Test public void testGetObjectMetadataS3BucketNoExists() { S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); // Try to retrieve S3 object metadata when S3 bucket does not exist. s3FileTransferRequestParamsDto.setS3BucketName(MockS3OperationsImpl.MOCK_S3_BUCKET_NAME_NO_SUCH_BUCKET_EXCEPTION); s3FileTransferRequestParamsDto.setS3KeyPrefix(TARGET_S3_KEY); assertNull(s3Dao.getObjectMetadata(s3FileTransferRequestParamsDto)); }
/** * Test that we are able to perform the deleteDirectory S3Dao operation on S3 using our DAO tier. */ @Test public void testDeleteDirectory() throws IOException, InterruptedException { // Upload local directory to s3Dao. testUploadDirectory(); // Validate that S3 directory is not empty. S3FileTransferRequestParamsDto s3FileTransferRequestParamsDto = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); s3FileTransferRequestParamsDto.setS3KeyPrefix(TEST_S3_KEY_PREFIX + "/"); Assert.assertTrue(s3Dao.listDirectory(s3FileTransferRequestParamsDto).size() > 0); // Delete directory from S3 using s3Dao. s3Dao.deleteDirectory(s3FileTransferRequestParamsDto); // Validate that S3 directory got deleted. Assert.assertEquals(0, s3Dao.listDirectory(s3FileTransferRequestParamsDto).size()); }
/** * The method is successful when both bucket and key exists. */ @Test public void testS3FileExists() { String expectedKey = "foo"; String expectedValue = "bar"; ByteArrayInputStream inputStream = new ByteArrayInputStream((expectedKey + "=" + expectedValue).getBytes()); PutObjectRequest putObjectRequest = new PutObjectRequest(S3_BUCKET_NAME, TARGET_S3_KEY, inputStream, new ObjectMetadata()); s3Operations.putObject(putObjectRequest, null); S3FileTransferRequestParamsDto params = s3DaoTestHelper.getTestS3FileTransferRequestParamsDto(); params.setS3BucketName(S3_BUCKET_NAME); params.setS3KeyPrefix(TARGET_S3_KEY); Assert.assertTrue(s3Dao.s3FileExists(params)); }