@Override public QueryResult nativeGet(Query query, QueryOptions options) { Bson bson = parseQuery(query); return mongoDBCollection.find(bson, options); }
public QueryResult<Document> aggregate(List<? extends Bson> operations, QueryOptions options) { return aggregate(operations, null, options); }
@Override public QueryResult<Long> count(Query query) { Bson bson = parseQuery(query); return mongoDBCollection.count(bson); }
public MongoDBStudyConfigurationDBAdaptor(MongoDataStore db, String collectionName) { collection = db.getCollection(collectionName) .withReadPreference(ReadPreference.primary()) .withWriteConcern(WriteConcern.ACKNOWLEDGED); collection.createIndex(new Document("studyName", 1), new ObjectMap(MongoDBCollection.UNIQUE, true)); mongoLock = new MongoLock(collection, "_lock"); }
@Override public Iterator nativeIterator(Query query, QueryOptions options) { Bson bson = parseQuery(query); return mongoDBCollection.nativeQuery().find(bson, options).iterator(); }
public long cleanTrash(long timeStamp) { MongoDBCollection collection = getTrashCollection(); // Try to get one variant beyond the ts. If exists, remove by query. Otherwise, remove the whole collection. QueryOptions queryOptions = new QueryOptions(QueryOptions.LIMIT, 1).append(QueryOptions.SKIP_COUNT, true); int results = collection.find(gt(DocumentToTrashVariantConverter.TIMESTAMP_FIELD, timeStamp), queryOptions).getNumResults(); if (results > 0) { return collection.remove(lte(DocumentToTrashVariantConverter.TIMESTAMP_FIELD, timeStamp), null).first().getDeletedCount(); } else { long numElements = collection.count().first(); db.dropCollection(configuration.getString(COLLECTION_TRASH.key(), COLLECTION_TRASH.defaultValue())); return numElements; } }
@Override public QueryResult distinct(Query query, String field) { Bson bsonDocument = parseQuery(query); return mongoDBCollection.distinct(field, bsonDocument); }
@Override public QueryResult<Long> extractSampleFromFiles(Query query, List<Long> sampleIds) throws CatalogDBException { long startTime = startQuery(); Bson bsonQuery = parseQuery(query, true); Bson update = new Document("$pull", new Document(QueryParams.SAMPLES.key(), new Document(PRIVATE_UID, new Document("$in", sampleIds)))); QueryOptions multi = new QueryOptions(MongoDBCollection.MULTI, true); QueryResult<UpdateResult> updateQueryResult = fileCollection.update(bsonQuery, update, multi); return endQuery("Extract samples from files", startTime, Collections.singletonList(updateQueryResult.first().getModifiedCount())); }
trashCollection.createIndex(new Document(DocumentToTrashVariantConverter.TIMESTAMP_FIELD, 1), new ObjectMap()); trashCollection.update(queries, documentsToInsert, new QueryOptions(UPSERT, true).append(REPLACE, true)); long deletedCount = variantsCollection.remove(and(purgeQuery, in("_id", documentsToDelete)), new QueryOptions(MULTI, true)) .first().getDeletedCount();
@Override public void nativeInsert(Map<String, Object> individual, String userId) throws CatalogDBException { Document document = getMongoDBDocument(individual, "individual"); individualCollection.insert(document, null); }
/** * Remove all the variants from the database resulting of executing the query. * * @param query Query to be executed in the database * @param options Query modifiers, accepted values are: include, exclude, limit, skip, sort and count * @return A QueryResult with the number of deleted variants */ public QueryResult remove(Query query, QueryOptions options) { Bson mongoQuery = queryParser.parseQuery(query); logger.debug("Delete to be executed: '{}'", mongoQuery.toString()); return variantsCollection.remove(mongoQuery, options); }
private void createIndexes(MongoDBCollection mongoCollection, List<Map<String, ObjectMap>> indexes) { QueryResult<Document> index = mongoCollection.getIndex(); // We store the existing indexes Set<String> existingIndexes = index.getResult() .stream() .map(document -> (String) document.get("name")) .collect(Collectors.toSet()); if (index.getNumResults() != indexes.size() + 1) { // It is + 1 because mongo always create the _id index by default for (Map<String, ObjectMap> userIndex : indexes) { String indexName = ""; Document keys = new Document(); Iterator fieldsIterator = userIndex.get("fields").entrySet().iterator(); while (fieldsIterator.hasNext()) { Map.Entry pair = (Map.Entry) fieldsIterator.next(); keys.append((String) pair.getKey(), pair.getValue()); if (!indexName.isEmpty()) { indexName += "_"; } indexName += pair.getKey() + "_" + pair.getValue(); } if (!existingIndexes.contains(indexName)) { mongoCollection.createIndex(keys, new ObjectMap(userIndex.get("options"))); } } } }
public long getNewAutoIncrementId(String field) { //, MongoDBCollection metaCollection Bson query = METADATA_QUERY; Document projection = new Document(field, true); Bson inc = Updates.inc(field, 1L); QueryOptions queryOptions = new QueryOptions("returnNew", true); QueryResult<Document> result = metaCollection.findAndUpdate(query, projection, null, inc, queryOptions); return result.getResult().get(0).getLong(field); }
@Override public boolean pre() { Document index = new Document(StageDocumentToVariantConverter.STUDY_FILE_FIELD, 1); // index.put(ID_FIELD, 1); collection.createIndex(index, new ObjectMap(MongoDBCollection.BACKGROUND, true)); return true; }
@Override public Iterator nativeIterator(Query query, QueryOptions options) { List<Bson> aggregateList = unwindAndMatchTranscripts(query, options); return mongoDBCollection.nativeQuery().aggregate(aggregateList, options).iterator(); // return mongoDBCollection.nativeQuery().find(bson, options).iterator(); }
@Override public QueryResult distinct(Query query, String field) { Bson document = parseQuery(query); return mongoDBCollection.distinct(field, document); }
@Override public QueryResult<Long> extractFilesFromDatasets(Query query, List<Long> fileIds) throws CatalogDBException { long startTime = startQuery(); Bson bsonQuery = parseQuery(query, false); Bson update = new Document("$pull", new Document(QueryParams.FILES.key(), new Document("$in", fileIds))); QueryOptions multi = new QueryOptions(MongoDBCollection.MULTI, true); QueryResult<UpdateResult> updateQueryResult = datasetCollection.update(bsonQuery, update, multi); return endQuery("Extract files from datasets", startTime, Collections.singletonList(updateQueryResult.first().getModifiedCount())); }
@Override public void nativeInsert(Map<String, Object> file, String userId) throws CatalogDBException { Document fileDocument = getMongoDBDocument(file, "sample"); fileCollection.insert(fileDocument, null); }
@Override public void delete(int study, int file) { String id = DocumentToVariantFileMetadataConverter.buildId(study, file); MongoDBCollection coll = db.getCollection(collectionName); DeleteResult deleteResult = coll.remove(Filters.eq("_id", id), null).first(); if (deleteResult.getDeletedCount() != 1) { throw new IllegalArgumentException("Unable to delete VariantSource " + id); } }
@Deprecated static long getNewAutoIncrementId(String field, MongoDBCollection metaCollection) { Bson query = Filters.eq(PRIVATE_ID, MongoDBAdaptorFactory.METADATA_OBJECT_ID); Document projection = new Document(field, true); Bson inc = Updates.inc(field, 1); QueryOptions queryOptions = new QueryOptions("returnNew", true); QueryResult<Document> result = metaCollection.findAndUpdate(query, projection, null, inc, queryOptions); // return (int) Float.parseFloat(result.getResult().get(0).get(field).toString()); return result.getResult().get(0).getInteger(field); }