private <S> ReadOperation<BatchCursor<S>> createChangeStreamOperation(final Codec<S> codec) { return new ChangeStreamOperation<S>(namespace, fullDocument, createBsonDocumentList(pipeline), codec, changeStreamLevel) .batchSize(getBatchSize()) .collation(collation) .maxAwaitTime(maxAwaitTimeMS, MILLISECONDS) .resumeAfter(resumeToken) .startAtOperationTime(startAtOperationTime); }
<R> R resumeableOperation(final Function<BatchCursor<RawBsonDocument>, R> function) { while (true) { try { return function.apply(wrapped); } catch (Throwable t) { if (!isRetryableError(t)) { throw MongoException.fromThrowableNonNull(t); } } wrapped.close(); if (resumeToken != null) { changeStreamOperation.startOperationTimeForResume(null); changeStreamOperation.resumeAfter(resumeToken); } wrapped = ((ChangeStreamBatchCursor<T>) changeStreamOperation.execute(binding)).getWrapped(); binding.release(); // release the new change stream batch cursor's reference to the binding } } }
private void retryOperation(final AsyncBlock asyncBlock, final SingleResultCallback<List<RawBsonDocument>> callback) { if (resumeToken != null) { changeStreamOperation.startOperationTimeForResume(null); changeStreamOperation.resumeAfter(resumeToken); } changeStreamOperation.executeAsync(binding, new SingleResultCallback<AsyncBatchCursor<T>>() { @Override public void onResult(final AsyncBatchCursor<T> result, final Throwable t) { if (t != null) { callback.onResult(null, t); } else { wrapped = ((AsyncChangeStreamBatchCursor<T>) result).getWrapped(); binding.release(); // release the new change stream batch cursor's reference to the binding resumeableOperation(asyncBlock, callback); } } }); }
/** * Construct a new instance. * * @param namespace the database and collection namespace for the operation. * @param fullDocument the fullDocument value * @param pipeline the aggregation pipeline. * @param decoder the decoder for the result documents. * @param changeStreamLevel the level at which the change stream is observing * * @since 3.8 */ public ChangeStreamOperation(final MongoNamespace namespace, final FullDocument fullDocument, final List<BsonDocument> pipeline, final Decoder<T> decoder, final ChangeStreamLevel changeStreamLevel) { this.wrapped = new AggregateOperationImpl<RawBsonDocument>(namespace, pipeline, RAW_BSON_DOCUMENT_CODEC, getAggregateTarget(), getPipelineCreator()); this.fullDocument = notNull("fullDocument", fullDocument); this.decoder = notNull("decoder", decoder); this.changeStreamLevel = notNull("changeStreamLevel", changeStreamLevel); }
private List<T> convertResults(final List<RawBsonDocument> rawDocuments) { List<T> results = null; if (rawDocuments != null) { results = new ArrayList<T>(); for (RawBsonDocument rawDocument : rawDocuments) { if (!rawDocument.containsKey("_id")) { throw new MongoChangeStreamException("Cannot provide resume functionality when the resume token is missing."); } resumeToken = rawDocument.getDocument("_id"); results.add(rawDocument.decode(changeStreamOperation.getDecoder())); } } return results; }
/** * Construct a new instance. * * @param namespace the database and collection namespace for the operation. * @param fullDocument the fullDocument value * @param pipeline the aggregation pipeline. * @param decoder the decoder for the result documents. * @param changeStreamLevel the level at which the change stream is observing * * @since 3.8 */ public ChangeStreamOperation(final MongoNamespace namespace, final FullDocument fullDocument, final List<BsonDocument> pipeline, final Decoder<T> decoder, final ChangeStreamLevel changeStreamLevel) { this.wrapped = new AggregateOperationImpl<RawBsonDocument>(namespace, pipeline, RAW_BSON_DOCUMENT_CODEC, getAggregateTarget(), getPipelineCreator()); this.fullDocument = notNull("fullDocument", fullDocument); this.decoder = notNull("decoder", decoder); this.changeStreamLevel = notNull("changeStreamLevel", changeStreamLevel); }
@Override public void onResult(final List<RawBsonDocument> rawDocuments, final Throwable t) { if (t != null) { callback.onResult(null, t); } else if (rawDocuments != null) { List<T> results = new ArrayList<T>(); for (RawBsonDocument rawDocument : rawDocuments) { if (!rawDocument.containsKey("_id")) { callback.onResult(null, new MongoChangeStreamException("Cannot provide resume functionality when the resume token is missing.") ); return; } resumeToken = rawDocument.getDocument("_id"); results.add(rawDocument.decode(changeStreamOperation.getDecoder())); } callback.onResult(results, null); } else { callback.onResult(null, null); } } }, LOGGER);
private <S> ReadOperation<BatchCursor<S>> createChangeStreamOperation(final Codec<S> codec) { return new ChangeStreamOperation<S>(namespace, fullDocument, createBsonDocumentList(pipeline), codec, changeStreamLevel) .batchSize(getBatchSize()) .collation(collation) .maxAwaitTime(maxAwaitTimeMS, MILLISECONDS) .resumeAfter(resumeToken) .startAtOperationTime(startAtOperationTime); }
private void retryOperation(final AsyncBlock asyncBlock, final SingleResultCallback<List<RawBsonDocument>> callback) { if (resumeToken != null) { changeStreamOperation.startOperationTimeForResume(null); changeStreamOperation.resumeAfter(resumeToken); } changeStreamOperation.executeAsync(binding, new SingleResultCallback<AsyncBatchCursor<T>>() { @Override public void onResult(final AsyncBatchCursor<T> result, final Throwable t) { if (t != null) { callback.onResult(null, t); } else { wrapped = ((AsyncChangeStreamBatchCursor<T>) result).getWrapped(); binding.release(); // release the new change stream batch cursor's reference to the binding resumeableOperation(asyncBlock, callback); } } }); }
<R> R resumeableOperation(final Function<BatchCursor<RawBsonDocument>, R> function) { while (true) { try { return function.apply(wrapped); } catch (Throwable t) { if (!isRetryableError(t)) { throw MongoException.fromThrowableNonNull(t); } } wrapped.close(); if (resumeToken != null) { changeStreamOperation.startOperationTimeForResume(null); changeStreamOperation.resumeAfter(resumeToken); } wrapped = ((ChangeStreamBatchCursor<T>) changeStreamOperation.execute(binding)).getWrapped(); binding.release(); // release the new change stream batch cursor's reference to the binding } } }
private List<T> convertResults(final List<RawBsonDocument> rawDocuments) { List<T> results = null; if (rawDocuments != null) { results = new ArrayList<T>(); for (RawBsonDocument rawDocument : rawDocuments) { if (!rawDocument.containsKey("_id")) { throw new MongoChangeStreamException("Cannot provide resume functionality when the resume token is missing."); } resumeToken = rawDocument.getDocument("_id"); results.add(rawDocument.decode(changeStreamOperation.getDecoder())); } } return results; }
private <S> ReadOperation<BatchCursor<S>> createChangeStreamOperation(final Codec<S> codec) { return new ChangeStreamOperation<S>(namespace, fullDocument, createBsonDocumentList(pipeline), codec, changeStreamLevel) .batchSize(getBatchSize()) .collation(collation) .maxAwaitTime(maxAwaitTimeMS, MILLISECONDS) .resumeAfter(resumeToken) .startAtOperationTime(startAtOperationTime); }
@Override public void onResult(final List<RawBsonDocument> rawDocuments, final Throwable t) { if (t != null) { callback.onResult(null, t); } else if (rawDocuments != null) { List<T> results = new ArrayList<T>(); for (RawBsonDocument rawDocument : rawDocuments) { if (!rawDocument.containsKey("_id")) { callback.onResult(null, new MongoChangeStreamException("Cannot provide resume functionality when the resume token is missing.") ); return; } resumeToken = rawDocument.getDocument("_id"); results.add(rawDocument.decode(changeStreamOperation.getDecoder())); } callback.onResult(results, null); } else { callback.onResult(null, null); } } }, LOGGER);