@Provides @LazySingleton public CloudFilesApi getCloudFilesApi(final CloudFilesAccountConfig config) { log.info("Building Cloud Files Api..."); Iterable<com.google.inject.Module> modules; if (config.getUseServiceNet()) { log.info("Configuring Cloud Files Api to use the internal service network..."); modules = ImmutableSet.of(new SLF4JLoggingModule(), new InternalUrlModule()); } else { log.info("Configuring Cloud Files Api to use the public network..."); modules = ImmutableSet.of(new SLF4JLoggingModule()); } ProviderRegistry.registerProvider(CloudFilesUSProviderMetadata.builder().build()); ProviderRegistry.registerProvider(CloudFilesUKProviderMetadata.builder().build()); ContextBuilder cb = ContextBuilder.newBuilder(config.getProvider()) .credentials(config.getUserName(), config.getApiKey()).modules(modules); CloudFilesApi cfa = cb.buildApi(CloudFilesApi.class); log.info("Cloud Files Api built."); return cfa; }
new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3") .credentials(accessId, secretKey) .modules(MODULES) .overrides(overrides) .buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); blobStore.list(bucketName, new ListContainerOptions().recursive()); logger.trace(" Found {} files in bucket {}", pageSets.size(), bucketName); String blobFileName = ((MutableBlobMetadata)pageSet).getName(); if ( blobFileName.endsWith( endsWith )) { blobFileNames.add(blobFileName);
new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3") .credentials(accessId, secretKey) .modules(MODULES) .overrides(overrides) .buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); Blob blob = blobStore.getBlob(bucketName, blobFileName); if ( blob == null) { throw new RuntimeException( tempFile.deleteOnExit(); fop = new FileOutputStream(tempFile); InputStream is = blob.getPayload().openStream(); IOUtils.copyLarge(is, fop); return tempFile;
public void deleteBucket() { String accessId = System.getProperty( SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR ); String secretKey = System.getProperty( SDKGlobalConfiguration.SECRET_KEY_ENV_VAR ); Properties overrides = new Properties(); overrides.setProperty( "s3" + ".identity", accessId ); overrides.setProperty( "s3" + ".credential", secretKey ); Blob bo = null; BlobStore blobStore = null; final Iterable<? extends Module> MODULES = ImmutableSet .of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3").credentials( accessId, secretKey ).modules( MODULES ) .overrides( overrides ).buildView( BlobStoreContext.class ); blobStore = context.getBlobStore(); blobStore.deleteContainer( bucketName ); }
.of( new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule() ); BlobStoreContext context = ContextBuilder.newBuilder( "s3" ) .credentials(accessId, secretKey) .modules(MODULES) .overrides(overrides) .buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); if ( blobStore.createContainerInLocation(null, bucketName) ) { logger.info( "Created bucket {}", bucketName ); BlobStore blobStore = context.getBlobStore(); BlobBuilder blobBuilder = blobStore.blobBuilder( filename ) .payload( ephemeral ) .contentMD5(Files.hash( ephemeral, Hashing.md5() )) .contentType("application/json"); Blob blob = blobBuilder.build(); final String uploadedFile = blobStore.putBlob( bucketName, blob, PutOptions.Builder.multipart() );
.of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); ContextBuilder.newBuilder("s3").credentials(accessId, secretKey).modules(MODULES) .overrides(overrides).buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); final PageSet<? extends StorageMetadata> blobStoreList = blobStore.list(); for ( Object o : blobStoreList.toArray() ) { StorageMetadata s = (StorageMetadata)o; if ( s.getName().startsWith( bucketPrefix )) { try { blobStore.deleteContainer(s.getName()); } catch ( ContainerNotFoundException cnfe ) { logger.warn("Attempted to delete bucket {} but it is already deleted", cnfe ); logger.debug("Deleted bucket {}", s.getName());
public synchronized boolean hasNewFiles() { // see if there are any files since lastMarker. BlobStoreContext ctx = ContextBuilder.newBuilder(provider) .credentials(user, key) .overrides(new Properties() {{ setProperty(LocationConstants.PROPERTY_ZONE, zone); }}) .buildView(BlobStoreContext.class); BlobStore store = ctx.getBlobStore(); ListContainerOptions options = new ListContainerOptions().maxResults(batchSize).afterMarker(lastMarker); PageSet<? extends StorageMetadata> pages = store.list(container, options); log.debug("Saw {} new files since {}", pages.size() == batchSize ? "many" : Integer.toString(pages.size()), lastMarker); boolean emptiness = getBlobsWithinRange(pages).isEmpty(); if(emptiness) { log.warn("No file found within range {}", new Range(START_TIME, STOP_TIME)); } else { log.debug("New files found within range {}", new Range(START_TIME, STOP_TIME)); } return !emptiness; }
String.format("s3proxy/%s jclouds/%s java/%s", Main.class.getPackage().getImplementationVersion(), JcloudsVersion.get(), System.getProperty("java.version"))); .newBuilder(provider) .credentials(identity, credential) .modules(ImmutableList.<Module>of( new SLF4JLoggingModule(), new ExecutorServiceModule(executorService))) .overrides(properties); if (!Strings.isNullOrEmpty(endpoint)) { builder = builder.endpoint(endpoint); BlobStoreContext context = builder.build(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); if (context instanceof RegionScopedBlobStoreContext && region != null) { blobStore = ((RegionScopedBlobStoreContext) context) .getBlobStore(region);
ContextBuilder ctxBuilder = ContextBuilder.newBuilder(provider); ctxBuilder.credentials(identity, credential); properties.setProperty(LocationConstants.PROPERTY_ZONES, keysSetToStr(zones)); ctxBuilder.overrides(properties); computeService = ctxBuilder.buildView(ComputeServiceContext.class).getComputeService();
public CloudFilesPublisher() { Properties overrides = new Properties(); overrides.setProperty(LocationConstants.PROPERTY_ZONE, ZONE); BlobStoreContext context = ContextBuilder.newBuilder(PROVIDER) .credentials(USERNAME, API_KEY) .overrides(overrides) .buildView(BlobStoreContext.class); blobStore = context.getBlobStore(); }
blobStoreContext = ContextBuilder.newBuilder( builder.blobStoreProvider) .credentials(accessKey, secretKey) .overrides(properties).build(BlobStoreContext.class); .blobStore(blobStoreContext.getBlobStore()) .awsAuthentication(builder.authType, accessKey, secretKey) .ignoreUnknownHeaders(builder.ignoreUnknownHeaders);
private static String getBlobStoreType(BlobStore blobStore) { return blobStore.getContext().unwrap().getProviderMetadata().getId(); }
@Override public PaginatedCollection<Object> createOrPropagate(Throwable t) throws Exception { return valOnNotFoundOr404(EMPTY, t); } }
/** * Delete the configured s3 bucket. */ public void deleteBucket() { logger.debug("\n\nDelete bucket\n"); String accessId = System.getProperty(SDKGlobalConfiguration.ACCESS_KEY_ENV_VAR); String secretKey = System.getProperty(SDKGlobalConfiguration.SECRET_KEY_ENV_VAR); Properties overrides = new Properties(); overrides.setProperty("s3" + ".identity", accessId); overrides.setProperty("s3" + ".credential", secretKey); final Iterable<? extends Module> MODULES = ImmutableSet .of(new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3").credentials(accessId, secretKey).modules(MODULES) .overrides(overrides).buildView(BlobStoreContext.class); BlobStore blobStore = context.getBlobStore(); blobStore.deleteContainer( bucketName ); }
new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule() ); BlobStoreContext context = ContextBuilder.newBuilder( "s3" ) .credentials( accessKey, secretKey ) .modules( MODULES ) .overrides( overrides ) .buildView( BlobStoreContext.class ); BlobStore blobStore = context.getBlobStore(); if ( blobStore.createContainerInLocation( null, bucketName ) ) { logger.info( "Created bucket " + bucketName ); BlobStore blobStore = context.getBlobStore(); BlobBuilder blobBuilder = blobStore.blobBuilder( filename ) .payload( uploadFile ) .contentMD5(Files.hash( uploadFile, Hashing.md5())) .contentType( "application/json" ); Blob blob = blobBuilder.build(); final String uploadedFile = blobStore.putBlob( bucketName, blob, PutOptions.Builder.multipart() );
public synchronized void downloadNewFiles(File downloadDir) { log.info("Downloading new files since {}", lastMarker); BlobStoreContext ctx = ContextBuilder.newBuilder(provider) .credentials(user, key) .overrides(new Properties() {{ setProperty(LocationConstants.PROPERTY_ZONE, zone); }}) .buildView(BlobStoreContext.class); // threadsafe according to https://jclouds.apache.org/documentation/userguide/blobstore-guide/ BlobStore store = ctx.getBlobStore(); ListContainerOptions options = new ListContainerOptions().maxResults(batchSize).afterMarker(lastMarker); PageSet<? extends StorageMetadata> pages = store.list(container, options); //Gets key within the time range specified NavigableMap<Long, String> mapWithinRange = getBlobsWithinRange(pages); //Download only for keys within that range for(Map.Entry<Long, String> blobMeta : mapWithinRange.entrySet()) { log.info("Downloading file: " + blobMeta.getValue()); downloadWorkers.submit(new BlobDownload(downloadDir, store, container, blobMeta.getValue())); lastMarker = blobMeta.getValue(); synchronized (CloudFilesManager.this) { // this is where we resume from. MarkerUtils.writeLastMarker(blobMeta.getValue()); } } log.info("Updated the last marker value as " + lastMarker); }
@Override public PaginatedCollection<Object> createOrPropagate(Throwable t) throws Exception { return valOnNotFoundOr404(EMPTY, t); } }
new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule()); BlobStoreContext context = ContextBuilder.newBuilder("s3") .credentials(accessId, secretKey) .modules(MODULES) .overrides(overrides) .buildView(BlobStoreContext.class); blobStore = context.getBlobStore(); if (!blobStore.blobExists(bucketName, expectedFileName)) { blobStore.deleteContainer(bucketName); Assert.fail("Blob does not exist: " + expectedFileName); Blob bo = blobStore.getBlob(bucketName, expectedFileName); Long numOfFiles = blobStore.countBlobs(bucketName); Long numWeWant = 1L; blobStore.deleteContainer(bucketName); assertEquals(numOfFiles, numWeWant); assertNotNull(bo); blobStore.deleteContainer(bucketName);
new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule() ); BlobStoreContext context = ContextBuilder.newBuilder( "s3" ) .credentials(accessId, secretKey ) .modules(MODULES ) .overrides(overrides ) .buildView(BlobStoreContext.class ); blobStore = context.getBlobStore(); Long numOfFiles = blobStore.countBlobs( bucketName ); Blob bo = blobStore.getBlob(bucketName, expectedFileName); Long numWeWant = 5L; blobStore.deleteContainer( bucketName ); blobStore.deleteContainer( bucketName );
final Iterable<? extends Module> MODULES = ImmutableSet.of( new JavaUrlHttpCommandExecutorServiceModule(), new Log4JLoggingModule(), new NettyPayloadModule() ); BlobStoreContext context = ContextBuilder.newBuilder( "s3" ) .credentials( accessId, secretKey ) .modules( MODULES ) .overrides( overrides ) .buildView( BlobStoreContext.class ); blobStore = context.getBlobStore(); if ( !blobStore.blobExists( bucketName, expectedFileName ) ) { assert ( false ); Long numOfFiles = blobStore.countBlobs( bucketName ); Long numWeWant = Long.valueOf( 1 ); assertEquals( numOfFiles, numWeWant ); bo = blobStore.getBlob( bucketName, expectedFileName ); blobStore.deleteContainer( bucketName );