private GridFSBucket getBucket() { return GridFSBuckets.create(mongoDb); }
private GridFSBucket createGridFSConnection() { MongoDatabase db = mongoClient.getDatabase(database); return GridFSBuckets.create(db, ASSOCIATED_FILES); }
private GridFSBucket getGridFs() { MongoDatabase db = mongoFactory.getDb(); String bucketName = "media"; return GridFSBuckets.create(db, bucketName); } }
public MongoDbProjection(ActorRef eventStore, MongoClient client) { super(eventStore); mongodatabase = client.getDatabase(nodename + "_SnapshotRepository"); gridFS = GridFSBuckets.create(mongodatabase, nodename + "_snapshot_data"); mongodatabase.getCollection("snapshot").createIndex(new BasicDBObject("dataVersion", 1).append("projectionId", 1)); }
public MongoDbProtobufProjection(ActorRef eventStore, MongoClient client) { super(eventStore); mongodatabase = client.getDatabase(nodename + "_SnapshotRepository"); gridFS = GridFSBuckets.create(mongodatabase, nodename + "_snapshot_data"); mongodatabase.getCollection("snapshot").createIndex(new BasicDBObject("dataVersion", 1).append("projectionId", 1)); }
@Override public void createContainer(BucketDirectory bucketDirectory) { LOGGER.debug("createContainer:" + bucketDirectory); GridFSBucket bucket = GridFSBuckets.create(database, bucketDirectory.getObjectHandle().getContainer()); InputStream is = new ByteArrayInputStream(new Date().toString().getBytes()); try { ObjectId objectId = bucket.uploadFromStream(BUCKET_ID_FILENAME, is); LOGGER.debug(" container file has been created " + BUCKET_ID_FILENAME + " with mongo id " + objectId.toString()); } catch (MongoCommandException e) { if (e.getErrorMessage().contains("Too many open files")) { LOGGER.error("****************************************************"); LOGGER.error("Due to the following \"Too many open files exception\""); LOGGER.error("PLEASE READ https://jira.adorsys.de/browse/DOC-22"); LOGGER.error("****************************************************"); } throw new BaseException("exception creating container for " + bucketDirectory, e); } IOUtils.closeQuietly(is); }
private GridFSBucket getGridFSBucket(BucketPath bucketPath) { return GridFSBuckets.create(database, bucketPath.getObjectHandle().getContainer()); }
private GridFSBucket getGridFSBucket(BucketDirectory bucketDirectory) { return GridFSBuckets.create(database, bucketDirectory.getObjectHandle().getContainer()); }
@Override public void initialize(String blobProviderId, Map<String, String> properties) throws IOException { super.initialize(blobProviderId, properties); this.properties = properties; if (StringUtils.isNotBlank(properties.get(SERVER_PROPERTY)) || StringUtils.isNotBlank(properties.get(DBNAME_PROPERTY))) { throw new NuxeoException("Unable to initialize GridFS Binary Manager, properties " + SERVER_PROPERTY + " and " + DBNAME_PROPERTY + " has been removed. Please configure a connection!"); } String bucket = properties.get(BUCKET_PROPERTY); if (StringUtils.isBlank(bucket)) { bucket = blobProviderId + ".fs"; } MongoDBConnectionService mongoService = Framework.getService(MongoDBConnectionService.class); MongoDatabase database = mongoService.getDatabase(BLOB_PROVIDER_CONNECTION_PREFIX + blobProviderId); gridFSBucket = GridFSBuckets.create(database, bucket); filesColl = database.getCollection(bucket + ".files"); garbageCollector = new GridFSBinaryGarbageCollector(bucket); }
@Override public boolean containerExists(BucketDirectory bucketDirectory) { GridFSBucket bucket = GridFSBuckets.create(database, bucketDirectory.getObjectHandle().getContainer()); return containerExists(bucket); }
/** * Connect to MongoDB and get the GridFS chunk size * @param vertx the Vert.x instance * @param handler a handler that will be called with the chunk size */ private void getChunkSize(Vertx vertx, Handler<AsyncResult<Integer>> handler) { vertx.<Integer>executeBlocking(f -> { try (MongoClient client = new MongoClient(mongoConnector.serverAddress)) { MongoDatabase db = client.getDatabase(MongoDBTestConnector.MONGODB_DBNAME); GridFSBucket gridFS = GridFSBuckets.create(db); f.complete(gridFS.getChunkSizeBytes()); } }, handler); }
@Override protected void prepareData(TestContext context, Vertx vertx, String path, Handler<AsyncResult<String>> handler) { String filename = PathUtils.join(path, ID); vertx.<String>executeBlocking(f -> { try (MongoClient client = new MongoClient(mongoConnector.serverAddress)) { MongoDatabase db = client.getDatabase(MongoDBTestConnector.MONGODB_DBNAME); GridFSBucket gridFS = GridFSBuckets.create(db); byte[] contents = CHUNK_CONTENT.getBytes(StandardCharsets.UTF_8); gridFS.uploadFromStream(filename, new ByteArrayInputStream(contents)); f.complete(filename); } }, handler); }
@Override protected void validateAfterStoreDelete(TestContext context, Vertx vertx, String path, Handler<AsyncResult<Void>> handler) { vertx.executeBlocking(f -> { try (MongoClient client = new MongoClient(mongoConnector.serverAddress)) { MongoDatabase db = client.getDatabase(MongoDBTestConnector.MONGODB_DBNAME); GridFSBucket gridFS = GridFSBuckets.create(db); GridFSFindIterable files = gridFS.find(); context.assertTrue(Iterables.isEmpty(files)); } f.complete(); }, handler); } }
@Override public void deleteContainer(BucketDirectory bucketDirectory) { BucketPathUtil.checkContainerName(bucketDirectory.getObjectHandle().getContainer()); GridFSBuckets.create(database, bucketDirectory.getObjectHandle().getContainer()).drop(); }
GridFSBucket gridFSBucket = GridFSBuckets.create( db.getDatabase(dbName), bucket);
@Override protected void validateAfterStoreAdd(TestContext context, Vertx vertx, String path, Handler<AsyncResult<Void>> handler) { vertx.executeBlocking(f -> { try (MongoClient client = new MongoClient(mongoConnector.serverAddress)) { MongoDatabase db = client.getDatabase(MongoDBTestConnector.MONGODB_DBNAME); GridFSBucket gridFS = GridFSBuckets.create(db); GridFSFindIterable files = gridFS.find(); GridFSFile file = files.first(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); gridFS.downloadToStream(file.getFilename(), baos); String contents = new String(baos.toByteArray(), StandardCharsets.UTF_8); context.assertEquals(CHUNK_CONTENT, contents); } f.complete(); }, handler); }