Refine search
/** * Perform the given operation on all of the documents inside the named collection in the named database, if the database and * collection both exist. The operation is called once for each document, so if the collection exists but is empty then the * function will not be called. * * @param client the MongoDB client; may not be null * @param dbName the name of the database; may not be null * @param collectionName the name of the collection; may not be null * @param documentOperation the operation to perform; may not be null */ public static void onCollectionDocuments(MongoClient client, String dbName, String collectionName, BlockingConsumer<Document> documentOperation) { onCollection(client, dbName, collectionName, collection -> { try (MongoCursor<Document> cursor = collection.find().iterator()) { while (cursor.hasNext()) { try { documentOperation.accept(cursor.next()); } catch (InterruptedException e) { Thread.interrupted(); break; } } } }); }
MongoCursor<Document> cursor = null; try { MongoCollection<Document> collection = database.getCollection(table); collection.find(query).sort(sort).limit(recordcount); projection.put(fieldName, INCLUDE); findIterable.projection(projection); cursor = findIterable.iterator(); if (!cursor.hasNext()) { System.err.println("Nothing found in scan for key " + startkey); return Status.ERROR; while (cursor.hasNext()) { HashMap<String, ByteIterator> resultMap = new HashMap<String, ByteIterator>(); Document obj = cursor.next(); fillMap(resultMap, obj); } finally { if (cursor != null) { cursor.close();
private Set<String> getTableMetadataNames(String schemaName) throws TableNotFoundException { MongoDatabase db = client.getDatabase(schemaName); MongoCursor<Document> cursor = db.getCollection(schemaCollection) .find().projection(new Document(TABLE_NAME_KEY, true)).iterator(); HashSet<String> names = new HashSet<>(); while (cursor.hasNext()) { names.add((cursor.next()).getString(TABLE_NAME_KEY)); } return names; }
private Document findOne(Document query, Document projection) { MongoCollection col = controllerService.getDatabase(databaseName).getCollection(collection); MongoCursor<Document> it = (projection != null ? col.find(query).projection(projection) : col.find(query)).iterator(); Document retVal = it.hasNext() ? it.next() : null; it.close(); return retVal; } }
private void createLocksIndexIfNecessary() { String databaseName = CommonUtils.getApplication(this.endpoint).replaceAll("\\W", "_"); MongoDatabase database = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> locks = database.getCollection(CONSTANTS_TB_LOCKS); ListIndexesIterable<Document> lockIndexList = locks.listIndexes(); boolean transactionIndexExists = false; MongoCursor<Document> lockCursor = null; try { lockCursor = lockIndexList.iterator(); while (transactionIndexExists == false && lockCursor.hasNext()) { Document document = lockCursor.next(); Boolean unique = document.getBoolean("unique"); Document key = (Document) document.get("key"); boolean globalExists = key.containsKey(CONSTANTS_FD_GLOBAL); boolean lengthEquals = key.size() == 1; transactionIndexExists = lengthEquals && globalExists; if (transactionIndexExists && (unique == null || unique == false)) { throw new IllegalStateException(); } } } finally { IOUtils.closeQuietly(lockCursor); } if (transactionIndexExists == false) { Document index = new Document(CONSTANTS_FD_GLOBAL, 1); locks.createIndex(index, new IndexOptions().unique(true)); } }
db.getCollection("movies" + dbName); MongoCollection<Document> collection = db.getCollection("movies" + dbName); MongoCollection<Document> movies = collection; InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true); movies.insertOne(Document.parse("{ \"name\":\"Starter Wars\"}"), insertOptions); assertThat(collection.countDocuments()).isEqualTo(1); FindIterable<Document> movieResults = collection.find(filter); try (MongoCursor<Document> cursor = movieResults.iterator();) { assertThat(cursor.tryNext().getString("name")).isEqualTo("Starter Wars"); assertThat(cursor.tryNext()).isNull(); Filters.exists("fromMigrate", false)); // skip internal movements across shards FindIterable<Document> results = mongo.getDatabase("local") .getCollection("oplog.rs") .find(filter) .sort(new Document("$natural", 1)) .oplogReplay(true) // tells Mongo to not rely on indexes .noCursorTimeout(true) // don't timeout waiting for events .cursorType(CursorType.TailableAwait); long stopTime = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(maxSeconds); while (System.currentTimeMillis() < stopTime && eventQueue.size() < minimumEventsExpected) { while ((event = cursor.tryNext()) != null) { eventQueue.add(event);
final Document query = getQuery(context, session, input); MongoCollection mongoCollection = clientService.getDatabase(database).getCollection(collection); FindIterable<Document> find = mongoCollection.find(query); if (context.getProperty(SORT).isSet()) { find = find.sort(Document.parse(context.getProperty(SORT).evaluateAttributeExpressions(input).getValue())); find = find.projection(Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions(input).getValue())); find = find.limit(context.getProperty(LIMIT).evaluateAttributeExpressions(input).asInteger()); long count = 0L; writer.beginRecordSet(); while (cursor.hasNext()) { Document next = cursor.next(); if (next.get("_id") instanceof ObjectId) { next.put("_id", next.get("_id").toString());
/** * Use the given primary to read the oplog. * * @param primary the connection to the replica set's primary node; may not be null */ protected void readOplog(MongoClient primary) { BsonTimestamp oplogStart = source.lastOffsetTimestamp(replicaSet.replicaSetName()); logger.info("Reading oplog for '{}' primary {} starting at {}", replicaSet, primary.getAddress(), oplogStart); // Include none of the cluster-internal operations and only those events since the previous timestamp ... MongoCollection<Document> oplog = primary.getDatabase("local").getCollection("oplog.rs"); Bson filter = Filters.and(Filters.gt("ts", oplogStart), // start just after our last position Filters.exists("fromMigrate", false)); // skip internal movements across shards FindIterable<Document> results = oplog.find(filter) .sort(new Document("$natural", 1)) // force forwards collection scan .oplogReplay(true) // tells Mongo to not rely on indexes .cursorType(CursorType.TailableAwait); // tail and await new data // Read as much of the oplog as we can ... ServerAddress primaryAddress = primary.getAddress(); try (MongoCursor<Document> cursor = results.iterator()) { while (running.get() && cursor.hasNext()) { if (!handleOplogEvent(primaryAddress, cursor.next())) { // Something happened, and we're supposed to stop reading return; } } } }
public void recover(TransactionRecoveryCallback callback) { MongoCursor<Document> transactionCursor = null; try { String application = CommonUtils.getApplication(this.endpoint); String databaseName = application.replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> transactions = mdb.getCollection(CONSTANTS_TB_TRANSACTIONS); FindIterable<Document> transactionItr = transactions.find(Filters.eq("coordinator", true)); for (transactionCursor = transactionItr.iterator(); transactionCursor.hasNext();) { Document document = transactionCursor.next(); boolean error = document.getBoolean("error"); String targetApplication = document.getString("system"); long expectVersion = document.getLong("version"); long actualVersion = this.versionManager.getInstanceVersion(targetApplication); if (error == false && actualVersion > 0 && actualVersion <= expectVersion) { continue; // ignore } callback.recover(this.reconstructTransactionArchive(document)); } } catch (RuntimeException error) { logger.error("Error occurred while recovering transaction.", error); } catch (Exception error) { logger.error("Error occurred while recovering transaction.", error); } finally { IOUtils.closeQuietly(transactionCursor); } }
MongoCollection<Document> contacts = db.getCollection("contacts"); InsertOneOptions insertOptions = new InsertOneOptions().bypassDocumentValidation(true); contacts.insertOne(Document.parse("{ \"name\":\"Jon Snow\"}"), insertOptions); assertThat(db.getCollection("contacts").countDocuments()).isEqualTo(1); FindIterable<Document> movieResults = db.getCollection("contacts").find(filter); try (MongoCursor<Document> cursor = movieResults.iterator();) { assertThat(cursor.tryNext().getString("name")).isEqualTo("Jon Snow"); assertThat(cursor.tryNext()).isNull(); FindIterable<Document> movieResults = db.getCollection("contacts").find(); Set<String> foundNames = new HashSet<>(); try (MongoCursor<Document> cursor = movieResults.iterator();) { while (cursor.hasNext()) { String name = cursor.next().getString("name"); foundNames.add(name); FindIterable<Document> movieResults = db.getCollection("contacts").find(filter); try (MongoCursor<Document> cursor = movieResults.iterator();) { Document doc = cursor.tryNext(); assertThat(doc.getString("name")).isEqualTo("Jon Snow"); assertThat(cursor.tryNext()).isNull(); jonSnowId.set(doc.getObjectId("_id")); assertThat(jonSnowId.get()).isNotNull();
public int timingExecution(int batchSize) { String databaseName = CommonUtils.getApplication(this.endpoint).replaceAll("\\W", "_"); MongoDatabase mdb = this.mongoClient.getDatabase(databaseName); MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_REMOVEDRESES); MongoCursor<Document> cursor = null; try { cursor = collection.find().limit(batchSize).iterator(); for (; cursor.hasNext(); length++) { Document document = cursor.next(); String globalValue = document.getString(CONSTANTS_FD_GLOBAL); String branchValue = document.getString(CONSTANTS_FD_BRANCH); byte[] global = ByteUtils.stringToByteArray(globalValue); byte[] branch = ByteUtils.stringToByteArray(branchValue); TransactionXid branchXid = xidFactory.createBranchXid(globalXid, branch); String resourceId = document.getString("resource_id"); if (StringUtils.isBlank(resourceId)) { continue; Bson branchFilter = Filters.eq(CONSTANTS_FD_BRANCH, ByteUtils.byteArrayToString(branch)); collection.deleteOne(Filters.and(globalFilter, branchFilter));
.prepare(collection.find(removeQuery).projection(MappedDocument.getIdOnlyProjection())) // .iterator(); while (cursor.hasNext()) { ids.add(MappedDocument.of(cursor.next()).getId()); ? collection.withWriteConcern(writeConcernToUse) : collection; DeleteResult result = multi ? collectionToUse.deleteMany(removeQuery, options) : collection.deleteOne(removeQuery, options);
@Override public Set<String> findSagas(Class<?> sagaType, AssociationValue associationValue) { final BasicDBObject value = associationValueQuery(sagaType, associationValue); MongoCursor<Document> dbCursor = mongoTemplate.sagaCollection().find(value).projection(include("sagaIdentifier")).iterator(); Set<String> found = new TreeSet<>(); while (dbCursor.hasNext()) { found.add((String) dbCursor.next().get("sagaIdentifier")); } return found; }
@Override public Reader get( EntityReference entityReference ) throws EntityStoreException { MongoCursor<Document> cursor = db.getCollection( collectionName ) .find( byIdentity( entityReference ) ) .limit( 1 ).iterator(); if( !cursor.hasNext() ) { throw new EntityNotFoundException( entityReference ); } Document bsonState = (Document) cursor.next().get( STATE_COLUMN ); String jsonState = JSON.serialize( bsonState ); return new StringReader( jsonState ); }
public Document FindOne(Document query,Document fields,Document order) { Document rval = null; if(query == null) { query = new Document();} FindIterable<Document> fi = collection.find(query,Document.class); if(fields != null) { logger.info("project:" + fields.toJson());; fi.projection(fields);} if(order != null) { fi.sort(order);} MongoCursor<Document> c = fi.iterator(); if(c.hasNext()) { rval=c.next();} return rval; }
? Document.parse(context.getProperty(PROJECTION).evaluateAttributeExpressions(input).getValue()) : null; final Document sort = context.getProperty(SORT).isSet() ? Document.parse(context.getProperty(SORT).evaluateAttributeExpressions(input).getValue()) : null; final FindIterable<Document> it = collection.find(query); final Map<String, String> attributes = getAttributes(context, input, query, collection); it.projection(projection); it.sort(sort); it.limit(context.getProperty(LIMIT).evaluateAttributeExpressions(input).asInteger()); List<Document> batch = new ArrayList<>(); while (cursor.hasNext()) { batch.add(cursor.next()); FlowFile outgoingFlowFile; while (cursor.hasNext()) { outgoingFlowFile = (input == null) ? session.create() : session.create(input); outgoingFlowFile = session.write(outgoingFlowFile, out -> { if (jsonTypeSetting.equals(JSON_TYPE_STANDARD)) { out.write(getObjectWriter(objectMapper, usePrettyPrint).writeValueAsString(cursor.next()).getBytes(charset)); } else { out.write(cursor.next().toJson().getBytes(charset));
/** * 根据统计字段计算统计结果(gte最小值)并排序 * * @param collectionName 集合名 * @param match match条件 * @param field 统计字段 * @param minCount 最小值 * @return */ public LinkedHashMap<String, Integer> sortMap(String collectionName, Document match, String field, int minCount) { AggregateIterable<Document> aggregate = getDB().getCollection(collectionName).aggregate( Arrays.asList( match(match) , group("$" + field, Accumulators.sum("_count", 1)) , match(new Document("_count", new Document("$gte", minCount))) , sort(new Document("_count", -1)) ) ); LinkedHashMap<String, Integer> map = new LinkedHashMap<String, Integer>(); MongoCursor<Document> iterator = aggregate.iterator(); while (iterator.hasNext()) { Document next = iterator.next(); map.put(next.getString("_id"), next.getInteger("_count")); } return map; }
@Override public Document getDocumentEtag( final String dbName, final String collName, final Object documentId) { MongoDatabase mdb = client.getDatabase(dbName); MongoCollection<Document> mcoll = mdb.getCollection(collName); FindIterable<Document> documents = mcoll .find(eq("_id", documentId)) .projection(new Document("_etag", 1)); return documents.iterator().tryNext(); }