@Override public Source<NotUsed, NotUsed> updateLastSuccessfulStreamEnd(final Instant timestamp) { final Date mongoStorableDate = Date.from(timestamp); final Document toStore = new Document() .append(FIELD_TIMESTAMP, mongoStorableDate); return Source.fromPublisher(lastSuccessfulSearchSyncCollection.insertOne(toStore)) .map(success -> { LOGGER.debug("Successfully inserted timestamp for search synchronization: <{}>.", timestamp); return NotUsed.getInstance(); }); }
@Override public Source<NotUsed, NotUsed> updateLastSuccessfulStreamEnd(final Instant timestamp) { final Date mongoStorableDate = Date.from(timestamp); final Document toStore = new Document() .append(FIELD_TIMESTAMP, mongoStorableDate); return Source.fromPublisher(lastSuccessfulSearchSyncCollection.insertOne(toStore)) .map(success -> { LOGGER.debug("Successfully inserted timestamp for search synchronization: <{}>.", timestamp); return NotUsed.getInstance(); }); }
private Source<Optional<Instant>, NotUsed> retrieveLastSuccessfulStreamEndAsync() { return Source.fromPublisher(lastSuccessfulSearchSyncCollection.find()) .limit(1) .flatMapConcat(doc -> { final Date date = doc.getDate(FIELD_TIMESTAMP); final Instant timestamp = date.toInstant(); LOGGER.debug("Returning last timestamp of search synchronization: <{}>.", timestamp); return Source.single(Optional.of(timestamp)); }) .orElse(Source.single(Optional.empty())); }
private Source<Optional<Instant>, NotUsed> retrieveLastSuccessfulStreamEndAsync() { return Source.fromPublisher(lastSuccessfulSearchSyncCollection.find()) .limit(1) .flatMapConcat(doc -> { final Date date = doc.getDate(FIELD_TIMESTAMP); final Instant timestamp = date.toInstant(); LOGGER.debug("Returning last timestamp of search synchronization: <{}>.", timestamp); return Source.single(Optional.of(timestamp)); }) .orElse(Source.single(Optional.empty())); }
private BiFunction<Accumulator<E, A>, Throwable, Void> handler(final Subscription sub) { return (acc, error) -> { if (acc != null) { Source.fromPublisher(publisher(sub)).runWith(acc.toSink().mapMaterializedValue(this::completeResultWith), materializer); } else { // On error sub.cancel(); result.completeExceptionally(error); } return null; }; }
private BiFunction<Accumulator<E, A>, Throwable, Void> handler(final Subscription sub) { return (acc, error) -> { if (acc != null) { Source.fromPublisher(publisher(sub)).runWith(acc.toSink().mapMaterializedValue(this::completeResultWith), materializer); } else { // On error sub.cancel(); result.completeExceptionally(error); } return null; }; }
@Override public Source<Document, NotUsed> execute(final MongoCollection<Document> collection, final Duration maxTime) { checkNotNull(collection, "collection to be aggregated"); final Publisher<Document> publisher = collection.aggregate(aggregationPipeline) .maxTime(maxTime.getSeconds(), TimeUnit.SECONDS) .allowDiskUse(true) // query is faster with cursor disabled .useCursor(false); return Source.<Document>fromPublisher(publisher); }
@Override public Source<Document, NotUsed> execute(final MongoCollection<Document> collection, final Duration maxTime) { checkNotNull(collection, "collection to be aggregated"); final Publisher<Document> publisher = collection.aggregate(aggregationPipeline) .maxTime(maxTime.getSeconds(), TimeUnit.SECONDS) .allowDiskUse(true) // query is faster with cursor disabled .useCursor(false); return Source.<Document>fromPublisher(publisher); }
/** * Drops the specified index. Does <strong>not</strong> throw an exception if the index does not exist. * * @param collectionName the name of the collection containing the index. * @param indexName the name of the index. * @return a source which emits {@link Success}. */ public Source<Success, NotUsed> dropIndex(final String collectionName, final String indexName) { return Source.fromPublisher(getCollection(collectionName).dropIndex(indexName)) .recoverWith(buildDropIndexRecovery(indexName)); }
private Source<Boolean, NotUsed> delete(final String thingId, final Bson filter, final Bson document) { return Source.fromPublisher(collection.updateOne(filter, document)) .flatMapConcat(deleteResult -> { if (deleteResult.getMatchedCount() <= 0) { return Source.single(Boolean.FALSE); } final PolicyUpdate deletePolicyEntries = PolicyUpdateFactory.createDeleteThingUpdate(thingId); final Bson policyIndexRemoveFilter = deletePolicyEntries.getPolicyIndexRemoveFilter(); return Source.fromPublisher(policiesCollection.deleteMany(policyIndexRemoveFilter)) .map(r -> true); }); }
private Source<Boolean, NotUsed> delete(final String thingId, final Bson filter, final Bson document) { return Source.fromPublisher(collection.updateOne(filter, document)) .flatMapConcat(deleteResult -> { if (deleteResult.getMatchedCount() <= 0) { return Source.single(Boolean.FALSE); } final PolicyUpdate deletePolicyEntries = PolicyUpdateFactory.createDeleteThingUpdate(thingId); final Bson policyIndexRemoveFilter = deletePolicyEntries.getPolicyIndexRemoveFilter(); return Source.fromPublisher(policiesCollection.deleteMany(policyIndexRemoveFilter)) .map(r -> true); }); }
/** * Drops the specified index. Does <strong>not</strong> throw an exception if the index does not exist. * * @param collectionName the name of the collection containing the index. * @param indexName the name of the index. * @return a source which emits {@link Success}. */ public Source<Success, NotUsed> dropIndex(final String collectionName, final String indexName) { return Source.fromPublisher(getCollection(collectionName).dropIndex(indexName)) .recoverWith(buildDropIndexRecovery(indexName)); }
@Override public final Source<Set<String>, NotUsed> getThingIdsForPolicy(final String policyId) { log.debug("Retrieving Thing ids for policy: <{}>", policyId); final Bson filter = eq(FIELD_POLICY_ID, policyId); return Source.fromPublisher(collection.find(filter) .projection(new BsonDocument(FIELD_ID, new BsonInt32(1)))) .map(doc -> doc.getString(FIELD_ID)) .fold(new HashSet<>(), (set, id) -> { set.add(id); return set; }); }
@Override public final Source<Set<String>, NotUsed> getThingIdsForPolicy(final String policyId) { log.debug("Retrieving Thing ids for policy: <{}>", policyId); final Bson filter = eq(FIELD_POLICY_ID, policyId); return Source.fromPublisher(collection.find(filter) .projection(new BsonDocument(FIELD_ID, new BsonInt32(1)))) .map(doc -> doc.getString(FIELD_ID)) .fold(new HashSet<>(), (set, id) -> { set.add(id); return set; }); }
/** * {@inheritDoc} */ @Override public final Source<Boolean, NotUsed> updatePolicy(final Thing thing, final Enforcer policyEnforcer) { log.debug("Updating policy for Thing: <{}>", thing); final PolicyUpdate policyUpdate = PolicyUpdateFactory.createPolicyIndexUpdate(thing, policyEnforcer); return Source.fromPublisher(updatePolicy(thing, policyUpdate)) .flatMapConcat(mapPolicyUpdateResult(policyUpdate)) .recoverWithRetries(1, errorRecovery(getThingId(thing))); }
/** * {@inheritDoc} */ @Override public final Source<ThingMetadata, NotUsed> getThingMetadata(final String thingId) { log.debug("Retrieving Thing Metadata for Thing: <{}>", thingId); final Bson filter = eq(FIELD_ID, thingId); return Source.fromPublisher(collection.find(filter) .projection(Projections.include(FIELD_REVISION, FIELD_POLICY_ID, FIELD_POLICY_REVISION))) .map(mapThingMetadataToModel()) .orElse(defaultThingMetadata()); }
/** * {@inheritDoc} */ @Override public final Source<ThingMetadata, NotUsed> getThingMetadata(final String thingId) { log.debug("Retrieving Thing Metadata for Thing: <{}>", thingId); final Bson filter = eq(FIELD_ID, thingId); return Source.fromPublisher(collection.find(filter) .projection(Projections.include(FIELD_REVISION, FIELD_POLICY_ID, FIELD_POLICY_REVISION))) .map(mapThingMetadataToModel()) .orElse(defaultThingMetadata()); }
/** * {@inheritDoc} */ @Override protected final Source<Boolean, NotUsed> save(final Thing thing, final long revision, final long policyRevision) { log.debug("Saving Thing with revision <{}> and policy revision <{}>: <{}>", revision, policyRevision, thing); final Bson filter = filterWithLowerThingRevisionOrLowerPolicyRevision(getThingId(thing), revision, policyRevision); final Document document = toUpdate(ThingDocumentMapper.toDocument(thing), revision, policyRevision); return Source.fromPublisher(collection.updateOne(filter, document, new UpdateOptions().upsert(true))) .map(updateResult -> updateResult.getMatchedCount() > 0 || null != updateResult.getUpsertedId()); }
/** * {@inheritDoc} */ @Override protected final Source<Boolean, NotUsed> save(final Thing thing, final long revision, final long policyRevision) { log.debug("Saving Thing with revision <{}> and policy revision <{}>: <{}>", revision, policyRevision, thing); final Bson filter = filterWithLowerThingRevisionOrLowerPolicyRevision(getThingId(thing), revision, policyRevision); final Document document = toUpdate(ThingDocumentMapper.toDocument(thing), revision, policyRevision); return Source.fromPublisher(collection.updateOne(filter, document, new UpdateOptions().upsert(true))) .map(updateResult -> updateResult.getMatchedCount() > 0 || null != updateResult.getUpsertedId()); }
@Override public Source<String, NotUsed> getOutdatedThingIds(final PolicyTag policyTag) { log.debug("Retrieving outdated Thing ids with policy tag: <{}>", policyTag); final String policyId = policyTag.getId(); final Bson filter = and(eq(FIELD_POLICY_ID, policyId), lt(FIELD_POLICY_REVISION, policyTag.getRevision())); final Publisher<Document> publisher = collection.find(filter).projection(new BsonDocument(FIELD_ID, new BsonInt32(1))); return Source.fromPublisher(publisher) .map(doc -> doc.getString(FIELD_ID)); }