public GridFSFindIterable find(Query query) { Assert.notNull(query, "Query must not be null!"); Document queryObject = getMappedQuery(query.getQueryObject()); Document sortObject = getMappedQuery(query.getSortObject()); return getGridFs().find(queryObject).sort(sortObject); }
public GridFSFindIterable find(Query query) { Assert.notNull(query, "Query must not be null!"); Document queryObject = getMappedQuery(query.getQueryObject()); Document sortObject = getMappedQuery(query.getSortObject()); return getGridFs().find(queryObject).sort(sortObject); }
@Override public List<String> getAssociatedFilenames(String uniqueId) throws Exception { GridFSBucket gridFS = createGridFSConnection(); ArrayList<String> fileNames = new ArrayList<>(); gridFS.find(new Document(ASSOCIATED_METADATA + "." + DOCUMENT_UNIQUE_ID_KEY, uniqueId)) .forEach((Consumer<com.mongodb.client.gridfs.model.GridFSFile>) gridFSFile -> fileNames.add(gridFSFile.getFilename())); return fileNames; }
private void deleteAllExcept(GridFSBucket bucket, String filename, ObjectId objectID) { List<ObjectId> idsToDelete = new ArrayList<>(); bucket.find(Filters.eq(FILENAME_TAG, filename)).forEach((Consumer<GridFSFile>) file -> idsToDelete.add(file.getObjectId())); LOGGER.debug("**** number of files to delete:" + idsToDelete.size()); idsToDelete.forEach(id -> { if (!id.equals(objectID)) { LOGGER.debug("**** delete:" + id); bucket.delete(id); } }); }
@Override public List<AssociatedDocument> getAssociatedDocuments(String uniqueId, FetchType fetchType) throws Exception { GridFSBucket gridFS = createGridFSConnection(); List<AssociatedDocument> assocDocs = new ArrayList<>(); if (!FetchType.NONE.equals(fetchType)) { GridFSFindIterable files = gridFS.find(new Document(ASSOCIATED_METADATA + "." + DOCUMENT_UNIQUE_ID_KEY, uniqueId)); for (GridFSFile file : files) { AssociatedDocument ad = loadGridFSToAssociatedDocument(gridFS, file, fetchType); assocDocs.add(ad); } } return assocDocs; }
@Override public boolean blobExists(BucketPath bucketPath) { LOGGER.debug("start blob Exists for " + bucketPath); GridFSBucket bucket = getGridFSBucket(bucketPath); if (!containerExists(bucket)) { return false; } String filename = bucketPath.getObjectHandle().getName(); List<ObjectId> ids = new ArrayList<>(); bucket.find(Filters.eq(FILENAME_TAG, filename)).forEach((Consumer<GridFSFile>) file -> ids.add(file.getObjectId())); LOGGER.debug("finished blob Exists for " + bucketPath); return !ids.isEmpty(); }
@Override public Binary getBinary(String digest) { GridFSFile dbFile = gridFSBucket.find(Filters.eq(METADATA_PROPERTY_FILENAME, digest)).first(); if (dbFile != null) { return new GridFSBinary(digest, blobProviderId); } return null; }
@Override public void removeBlob(BucketPath bucketPath) { LOGGER.debug("start removeBlob for " + bucketPath); GridFSBucket bucket = getGridFSBucket(bucketPath); checkBucketExists(bucket); String filename = bucketPath.getObjectHandle().getName(); List<ObjectId> ids = new ArrayList<>(); bucket.find(Filters.eq(FILENAME_TAG, filename)).forEach((Consumer<GridFSFile>) file -> ids.add(file.getObjectId())); ids.forEach(id -> bucket.delete(id)); LOGGER.debug("finished removeBlob for " + bucketPath); }
@Override public void deleteAssociatedDocuments(String uniqueId) { GridFSBucket gridFS = createGridFSConnection(); gridFS.find(new Document(ASSOCIATED_METADATA + "." + DOCUMENT_UNIQUE_ID_KEY, uniqueId)) .forEach((Block<com.mongodb.client.gridfs.model.GridFSFile>) gridFSFile -> gridFS.delete(gridFSFile.getObjectId())); }
private boolean containerExists(GridFSBucket bucket) { return (bucket.find(Filters.eq(FILENAME_TAG, BUCKET_ID_FILENAME)).iterator().hasNext()); }
protected void saveDataToGridFS(byte[] data, String fileid) { Bson fileQuery = Filters.regex("filename", getId() + ".*"); try { final ArrayList<GridFSFile> es = new ArrayList<>(); gridFS.find(fileQuery).into(es); for (GridFSFile e : es) { gridFS.delete(e.getObjectId()); } } catch (Exception e) { log.error("failed to delete old gridfsfile", e); } gridFS.uploadFromStream(getId() + fileid, new ByteArrayInputStream(data)); }
protected void saveDataToGridFS(byte[] data, String fileid) { Bson fileQuery = Filters.regex("filename", getId() + ".*"); try { final ArrayList<GridFSFile> es = new ArrayList<>(); gridFS.find(fileQuery).into(es); for (GridFSFile e : es) { gridFS.delete(e.getObjectId()); } } catch (Exception e) { log.error("failed to delete old gridfsfile", e); } gridFS.uploadFromStream(getId() + fileid, new ByteArrayInputStream(data)); }
private GridFSFile getFileForId(GridFSBucket gridFSBucket, BsonValue fileId) { return gridFSBucket .find(eq("_id", fileId)) .limit(1).iterator().tryNext(); }
@Override public AssociatedDocument getAssociatedDocument(String uniqueId, String fileName, FetchType fetchType) throws Exception { GridFSBucket gridFS = createGridFSConnection(); if (!FetchType.NONE.equals(fetchType)) { GridFSFile file = gridFS.find(new Document(ASSOCIATED_METADATA + "." + FILE_UNIQUE_ID_KEY, getGridFsId(uniqueId, fileName))).first(); if (null != file) { return loadGridFSToAssociatedDocument(gridFS, file, fetchType); } } return null; }
@Override public void stop(boolean delete) { gridFSBucket.find(Filters.exists(String.format("%s.%s", METADATA_PROPERTY_METADATA, msKey), false)) // .forEach((Block<GridFSFile>) file -> { status.numBinariesGC += 1; status.sizeBinariesGC += file.getLength(); if (delete) { gridFSBucket.delete(file.getId()); } }); startTime = 0; } }
@Override public void deleteAssociatedDocument(String uniqueId, String fileName) { GridFSBucket gridFS = createGridFSConnection(); gridFS.find(new Document(ASSOCIATED_METADATA + "." + FILE_UNIQUE_ID_KEY, getGridFsId(uniqueId, fileName))) .forEach((Block<com.mongodb.client.gridfs.model.GridFSFile>) gridFSFile -> gridFS.delete(gridFSFile.getObjectId())); }
@Override protected void validateAfterStoreDelete(TestContext context, Vertx vertx, String path, Handler<AsyncResult<Void>> handler) { vertx.executeBlocking(f -> { try (MongoClient client = new MongoClient(mongoConnector.serverAddress)) { MongoDatabase db = client.getDatabase(MongoDBTestConnector.MONGODB_DBNAME); GridFSBucket gridFS = GridFSBuckets.create(db); GridFSFindIterable files = gridFS.find(); context.assertTrue(Iterables.isEmpty(files)); } f.complete(); }, handler); } }
@Override public void removeBlobFolder(BucketDirectory bucketDirectory) { LOGGER.debug("start removeBlobFolder for " + bucketDirectory); if (bucketDirectory.getObjectHandle().getName() == null) { throw new StorageConnectionException("not a valid bucket directory " + bucketDirectory); } GridFSBucket bucket = getGridFSBucket(bucketDirectory); String directoryname = bucketDirectory.getObjectHandle().getName() + BucketPath.BUCKET_SEPARATOR; String pattern = "^" + directoryname + ".*"; GridFSFindIterable list = bucket.find(regex(FILENAME_TAG, pattern, "i")); list.forEach((Consumer<GridFSFile>) file -> bucket.delete(file.getObjectId())); LOGGER.debug("finished removeBlobFolder for " + bucketDirectory); }
private void deleteExistingContent(String fieldName, Object documentId, GridFSBucket gridFSFilesBucket) { GridFSFindIterable results = gridFSFilesBucket.find( Filters.and( Filters.eq( "filename", fileName( fieldName, documentId ) ) ) ); try ( MongoCursor<GridFSFile> iterator = results.iterator() ) { while ( iterator.hasNext() ) { GridFSFile next = iterator.next(); gridFSFilesBucket.delete( next.getId() ); } } }
private void deleteExistingContent(String fieldName, Object documentId, GridFSBucket gridFSFilesBucket) { GridFSFindIterable results = gridFSFilesBucket.find( Filters.and( Filters.eq( "filename", fileName( fieldName, documentId ) ) ) ); try ( MongoCursor<GridFSFile> iterator = results.iterator() ) { while ( iterator.hasNext() ) { GridFSFile next = iterator.next(); gridFSFilesBucket.delete( next.getId() ); } } }