/** * Returns all job types having state information. * * @return set containing job types. */ @Override public Set<String> findAllJobTypes() { return stream(collection.find().maxTime(500, TimeUnit.MILLISECONDS).spliterator(), false) .map(doc -> doc.getString(ID)) .collect(toSet()); }
@Nonnull public List<Tag> findTags() { readLock.lock(); try { Document filter = new Document(Tag.PROJECT_ID, projectId.getId()); FindIterable<Document> documents = getCollection().find(filter); Stream<Document> docs = stream(documents.spliterator(), false); return docs.map(doc -> objectMapper.convertValue(doc, Tag.class)) .collect(toImmutableList()); } finally { readLock.unlock(); } }
@Override public Stream<Reader> entityStates() { return StreamSupport .stream( db.getCollection( collectionName ).find().spliterator(), false ) .map( eachEntity -> { Document bsonState = (Document) eachEntity.get( STATE_COLUMN ); String jsonState = JSON.serialize( bsonState ); return new StringReader( jsonState ); } ); }
/** * Returns all job types having state information. * * @return set containing job types. */ @Override public Set<String> findAllJobTypes() { return stream(collection.find().maxTime(500, TimeUnit.MILLISECONDS).spliterator(), false) .map(doc -> doc.getString(ID)) .collect(toSet()); }
private List<String> getIdsFor(String queryField, String value, String resultField, MongoDBSession session) { FindIterable<Document> docs = getCollection(session) .find(MongoDBSerializationHelper.fieldMapToBson(queryField, value)); return StreamSupport.stream(docs.spliterator(), false) .map(doc -> doc.getString(resultField)) .collect(Collectors.toList()); }
@Override public EventBatch loadEventsForAggregateId(final String aggregateType, String aggregateId, String fromJournalId) { final Document query = new Document("rid", aggregateId); if (fromJournalId != null) query.append("jid", new Document("$gt", Long.parseLong(fromJournalId))); final FindIterable<Document> dbObjects = MongoDbOperations.doDbOperation(() -> db.getCollection(aggregateType).find(query).sort(new Document("jid", 1)).limit(eventReadLimit)); final List<Event> events = StreamSupport.stream(dbObjects.spliterator(), false) .map(document -> deSerialize( ((Binary)document.get("d")).getData())) .collect(Collectors.toList()); return new EventBatch(aggregateType, aggregateId, events, events.size() != eventReadLimit); }
@Override public Optional<? extends DomainEventData<?>> findLastSnapshot(MongoCollection<Document> snapshotCollection, String aggregateIdentifier) { FindIterable<Document> cursor = snapshotCollection.find(eq(eventConfiguration.aggregateIdentifierProperty(), aggregateIdentifier)) .sort(new BasicDBObject(eventConfiguration.sequenceNumberProperty(), ORDER_DESC)).limit(1); return stream(cursor.spliterator(), false).findFirst().map(this::extractSnapshot); }
@Override public List<Suite> listSuites(DBKey dbKey) throws StorageException { MongoCollection<Document> metadata = getMetadataCollection(dbKey); LOGGER.debug("Fetching all suites for company: `{}`, project: `{}`.", dbKey.getCompany(), dbKey.getProject()); final FindIterable<Document> found = metadata .find() .sort(Sorts.descending(SUITE_VERSION_PARAM_NAME)); return StreamSupport.stream(found.spliterator(), false) .map(document -> new DocumentConverter(document).toSuite()) .collect(Collectors.toList()); }
@Override public List<Suite> listSuites(DBKey dbKey) throws StorageException { MongoCollection<Document> metadata = getMetadataCollection(dbKey); LOGGER.debug("Fetching all suites for company: `{}`, project: `{}`.", dbKey.getCompany(), dbKey.getProject()); final FindIterable<Document> found = metadata .find() .sort(Sorts.descending(SUITE_VERSION_PARAM_NAME)); return StreamSupport.stream(found.spliterator(), false) .map(document -> new DocumentConverter(document).toSuite()) .collect(Collectors.toList()); }
@Override public List<? extends DomainEventData<?>> findDomainEvents(MongoCollection<Document> collection, String aggregateIdentifier, long firstSequenceNumber, int batchSize) { FindIterable<Document> cursor = collection .find(and(eq(eventConfiguration.aggregateIdentifierProperty(), aggregateIdentifier), gte(eventConfiguration.sequenceNumberProperty(), firstSequenceNumber))) .sort(new BasicDBObject(eventConfiguration().sequenceNumberProperty(), ORDER_ASC)); cursor = cursor.batchSize(batchSize); return stream(cursor.spliterator(), false).flatMap(this::extractEvents) .filter(event -> event.getSequenceNumber() >= firstSequenceNumber).collect(Collectors.toList()); }
@Override public List<? extends DomainEventData<?>> findDomainEvents(MongoCollection<Document> collection, String aggregateIdentifier, long firstSequenceNumber, int batchSize) { FindIterable<Document> cursor = collection .find(and(eq(eventConfiguration.aggregateIdentifierProperty(), aggregateIdentifier), gte(eventConfiguration.sequenceNumberProperty(), firstSequenceNumber))) .sort(new BasicDBObject(eventConfiguration().sequenceNumberProperty(), ORDER_ASC)); cursor = cursor.batchSize(batchSize); return stream(cursor.spliterator(), false).flatMap(this::extractEvents) .filter(event -> event.getSequenceNumber() >= firstSequenceNumber).collect(Collectors.toList()); }
@Override public Messages.EventWrapperBatch loadEventWrappersForAggregateId(String aggregateType, String aggregateRootId, long fromJournalId) { final Document query = new Document("rid", aggregateRootId); query.append("jid", new Document("$gt", fromJournalId)); final FindIterable<Document> dbObjects = MongoDbOperations.doDbOperation(() -> db.getCollection(aggregateType).find(query).sort(new Document("jid", 1)).limit(eventReadLimit)); final List<Messages.EventWrapper> events = StreamSupport.stream(dbObjects.spliterator(), false) .map(document -> deSerialize( document, aggregateType)) .collect(Collectors.toList()); final Messages.EventWrapperBatch build = Messages.EventWrapperBatch.newBuilder() .setAggregateRootId(aggregateRootId) .setAggregateType(aggregateType) .setReadAllEvents(events.size() != eventReadLimit) .addAllEvents(events).build(); return build; } }
public List<SuiteVersion> listSuiteVersions(DBKey dbKey, String name) throws StorageException { MongoCollection<Document> metadata = getMetadataCollection(dbKey); LOGGER.debug("Fetching all versions of suite: `{}` , company: `{}`, project: `{}`.", name, dbKey.getCompany(), dbKey.getProject()); final FindIterable<Document> found = metadata .find(Filters.eq(SUITE_PARAM_NAME, name)) .sort(Sorts.descending(SUITE_VERSION_PARAM_NAME)); return StreamSupport.stream(found.spliterator(), false) .map(document -> new SuiteVersion( document.getString(CORRELATION_ID_PARAM_NAME), document.getInteger(SUITE_VERSION_PARAM_NAME))) .collect(Collectors.toList()); }
public List<SuiteVersion> listSuiteVersions(DBKey dbKey, String name) throws StorageException { MongoCollection<Document> metadata = getMetadataCollection(dbKey); LOGGER.debug("Fetching all versions of suite: `{}` , company: `{}`, project: `{}`.", name, dbKey.getCompany(), dbKey.getProject()); final FindIterable<Document> found = metadata .find(Filters.eq(SUITE_PARAM_NAME, name)) .sort(Sorts.descending(SUITE_VERSION_PARAM_NAME)); return StreamSupport.stream(found.spliterator(), false) .map(document -> new SuiteVersion( document.getString(CORRELATION_ID_PARAM_NAME), document.getInteger(SUITE_VERSION_PARAM_NAME))) .collect(Collectors.toList()); }
@Override public Stream<? extends DomainEventData<?>> findSnapshots(MongoCollection<Document> snapshotCollection, String aggregateIdentifier) { FindIterable<Document> cursor = snapshotCollection.find(eq(eventConfiguration.aggregateIdentifierProperty(), aggregateIdentifier)) .sort(orderBy(descending(eventConfiguration.sequenceNumberProperty()))); return stream(cursor.spliterator(), false).map(this::extractSnapshot); }
/** * Perform a <em>find</em> operation on given collection using given {@link BsonQuery}. * @param <R> Operation result type * @param context Resolution context * @param collection The collection to use * @param resultType Expected query result type * @param query Query definition * @return The operation result */ private static <R> Stream<R> find(MongoResolutionContext<ClientSession> context, MongoCollection<Document> collection, Class<? extends R> resultType, BsonQuery query) { // converter final DocumentConverter<R> documentConverter = MongoOperations.getAndCheckConverter(query, resultType); // iterable final FindIterable<Document> fi = context.getClientSession().map(cs -> collection.find(cs)) .orElse(collection.find()); // configure Optional<Bson> projection = MongoOperations.configure(query, new SyncFindOperationConfigurator(fi)); // trace context.trace("FIND query", () -> MongoOperations.traceQuery(context, query, projection.orElse(null))); // stream with converter mapper return StreamSupport.stream(fi.spliterator(), false) .map(document -> documentConverter.convert(context, document)); }