static void putIfNotNullOrEmpty(final BsonDocument command, final String key, final BsonDocument documentValue) { if (documentValue != null && !documentValue.isEmpty()) { command.put(key, documentValue); } }
@Override public boolean isEmpty() { return getUnwrapped().isEmpty(); }
private void addReadConcernDocument(final List<BsonElement> extraElements, final SessionContext sessionContext) { BsonDocument readConcernDocument = getReadConcernDocument(sessionContext); if (!readConcernDocument.isEmpty()) { extraElements.add(new BsonElement("readConcern", readConcernDocument)); } }
static void appendReadConcernToCommand(final SessionContext sessionContext, final BsonDocument commandDocument) { notNull("commandDocument", commandDocument); notNull("sessionContext", sessionContext); if (sessionContext.hasActiveTransaction()) { return; } BsonDocument readConcernDocument = getReadConcernDocument(sessionContext); if (!readConcernDocument.isEmpty()) { commandDocument.append("readConcern", readConcernDocument); } }
@Nullable private Document asDocumentOrNull(final BsonDocument bsonDocument) { if (bsonDocument.isEmpty()) { return null; } else { BsonDocumentReader reader = new BsonDocumentReader(bsonDocument); return documentCodec.decode(reader, DecoderContext.builder().build()); } } }
@Override public <TDocument> BsonDocument toBsonDocument(final Class<TDocument> documentClass, final CodecRegistry codecRegistry) { BsonDocument andRenderable = new BsonDocument(); for (Bson filter : filters) { BsonDocument renderedRenderable = filter.toBsonDocument(documentClass, codecRegistry); for (Map.Entry<String, BsonValue> element : renderedRenderable.entrySet()) { addClause(andRenderable, element); } } if (andRenderable.isEmpty()) { andRenderable.append("$and", new BsonArray()); } return andRenderable; }
if (!resumeToken.isEmpty()) { iterable = iterable.resumeAfter(resumeToken);
if (document.isEmpty()) { document = filter != null ? filter : new BsonDocument(); } else if (filter != null) {
writer.writeName("u"); if (update.getType() == WriteRequest.Type.UPDATE && update.getUpdate().isEmpty()) { throw new IllegalArgumentException("Invalid BSON document for an update");
if (!resumeToken.isEmpty()) { iterable = iterable.resumeAfter(resumeToken);
static void putIfNotNullOrEmpty(final BsonDocument command, final String key, final BsonDocument documentValue) { if (documentValue != null && !documentValue.isEmpty()) { command.put(key, documentValue); } }
private void addReadConcernDocument(final List<BsonElement> extraElements, final SessionContext sessionContext) { BsonDocument readConcernDocument = getReadConcernDocument(sessionContext); if (!readConcernDocument.isEmpty()) { extraElements.add(new BsonElement("readConcern", readConcernDocument)); } }
public BsonDocument asBsonDocument() { if (embedded == null || embedded.isEmpty()) { properties.remove(EMBEDDED); } else { properties.append(EMBEDDED, embedded); } if (links == null || links.isEmpty()) { properties.remove(LINKS); } else { properties.append(LINKS, links); } if (links != null && !links.isEmpty()) { properties.append(LINKS, links); } return properties; }
@Override public Optional<WriteModel<BsonDocument>> handle(SinkDocument doc) { BsonDocument keyDoc = doc.getKeyDoc().orElseGet(BsonDocument::new); BsonDocument valueDoc = doc.getValueDoc().orElseGet(BsonDocument::new); if (valueDoc.isEmpty()) { logger.debug("skipping debezium tombstone event for kafka topic compaction"); return Optional.empty(); } return Optional.of(getCdcOperation(valueDoc) .perform(new SinkDocument(keyDoc,valueDoc))); }
@Override public Optional<WriteModel<BsonDocument>> handle(SinkDocument doc) { BsonDocument keyDoc = doc.getKeyDoc().orElseGet(BsonDocument::new); BsonDocument valueDoc = doc.getValueDoc().orElseGet(BsonDocument::new); if (valueDoc.isEmpty()) { logger.debug("skipping debezium tombstone event for kafka topic compaction"); return Optional.empty(); } return Optional.of(getCdcOperation(valueDoc) .perform(new SinkDocument(keyDoc,valueDoc))); }
private Bson getIdFilter(Object documentId, BsonDocument filter, BsonDocument shardedKeys) { Bson q = eq("_id", documentId); if (shardedKeys != null) { q = and(q, shardedKeys); } if (filter != null && !filter.isEmpty()) { q = and(q, filter); } return q; }
@Override public Optional<WriteModel<BsonDocument>> handle(SinkDocument doc) { BsonDocument keyDoc = doc.getKeyDoc().orElseThrow( () -> new DataException("error: key document must not be missing for CDC mode") ); BsonDocument valueDoc = doc.getValueDoc() .orElseGet(BsonDocument::new); if(keyDoc.containsKey(JSON_ID_FIELD_PATH) && valueDoc.isEmpty()) { logger.debug("skipping debezium tombstone event for kafka topic compaction"); return Optional.empty(); } logger.debug("key: "+keyDoc.toString()); logger.debug("value: "+valueDoc.toString()); return Optional.of(getCdcOperation(valueDoc).perform(doc)); }
static void appendReadConcernToCommand(final SessionContext sessionContext, final BsonDocument commandDocument) { notNull("commandDocument", commandDocument); notNull("sessionContext", sessionContext); if (sessionContext.hasActiveTransaction()) { return; } BsonDocument readConcernDocument = getReadConcernDocument(sessionContext); if (!readConcernDocument.isEmpty()) { commandDocument.append("readConcern", readConcernDocument); } }
@Nullable private Document asDocumentOrNull(final BsonDocument bsonDocument) { if (bsonDocument.isEmpty()) { return null; } else { BsonDocumentReader reader = new BsonDocumentReader(bsonDocument); return documentCodec.decode(reader, DecoderContext.builder().build()); } } }
@Override public <TDocument> BsonDocument toBsonDocument(final Class<TDocument> documentClass, final CodecRegistry codecRegistry) { BsonDocument andRenderable = new BsonDocument(); for (Bson filter : filters) { BsonDocument renderedRenderable = filter.toBsonDocument(documentClass, codecRegistry); for (Map.Entry<String, BsonValue> element : renderedRenderable.entrySet()) { addClause(andRenderable, element); } } if (andRenderable.isEmpty()) { andRenderable.append("$and", new BsonArray()); } return andRenderable; }