fields.add(Projections.computed(VALUES, values)); fields.add(Projections.computed(HASHES, hashes)); if (!types.isEmpty()) { fields.add(Projections.computed(TYPES, types)); fields.add(Projections.computed(LEVEL, new Document("$max", Arrays.asList("$" + LEVEL, getFieldExpr.apply(LEVEL), 0)))); fields.add(Projections.computed(TIMESTAMP, new Document("$max", Arrays.asList("$" + TIMESTAMP, getFieldExpr.apply(TIMESTAMP), 0)))); return Projections.fields(fields);
fields.add(Projections.computed(VALUES, values)); fields.add(Projections.computed(HASHES, hashes)); if (!types.isEmpty()) { fields.add(Projections.computed(TYPES, types)); fields.add(Projections.computed(LEVEL, new Document("$max", Arrays.asList("$" + LEVEL, getFieldExpr.apply(LEVEL), 0)))); fields.add(Projections.computed(TIMESTAMP, new Document("$max", Arrays.asList("$" + TIMESTAMP, getFieldExpr.apply(TIMESTAMP), 0)))); return Projections.fields(fields);
fields.add(Projections.computed(SUBJECT, valueFieldExpr(SUBJECT))); fields.add(Projections.computed(SUBJECT_HASH, hashFieldExpr(SUBJECT))); fields.add(Projections.computed(PREDICATE, valueFieldExpr(PREDICATE))); fields.add(Projections.computed(PREDICATE_HASH, hashFieldExpr(PREDICATE))); fields.add(Projections.computed(OBJECT, valueFieldExpr(OBJECT))); fields.add(Projections.computed(OBJECT_HASH, hashFieldExpr(OBJECT))); fields.add(Projections.computed(OBJECT_TYPE, ConditionalOperators.ifNull(typeFieldExpr(OBJECT), DEFAULT_TYPE))); fields.add(Projections.computed(OBJECT_LANGUAGE, hashFieldExpr(OBJECT))); fields.add(Projections.computed(CONTEXT, DEFAULT_CONTEXT)); fields.add(Projections.computed(STATEMENT_METADATA, DEFAULT_METADATA)); fields.add(DEFAULT_DV); fields.add(Projections.computed(TIMESTAMP, new Document("$literal", timestamp))); fields.add(Projections.computed(LEVEL, new Document("$add", Arrays.asList("$" + LEVEL, 1)))); triplePipeline.add(Aggregates.project(Projections.fields(fields))); if (requireNew) { final String numRedundant = "REDUNDANT"; triplePipeline.add(Aggregates.project(Projections.fields(includeAll, Projections.computed(numRedundant, new Document("$size", redundantFilter))))); triplePipeline.add(Aggregates.match(Filters.eq(numRedundant, 0))); triplePipeline.add(Aggregates.project(Projections.fields(includeAll)));
fields.add(Projections.computed(SUBJECT, valueFieldExpr(SUBJECT))); fields.add(Projections.computed(SUBJECT_HASH, hashFieldExpr(SUBJECT))); fields.add(Projections.computed(PREDICATE, valueFieldExpr(PREDICATE))); fields.add(Projections.computed(PREDICATE_HASH, hashFieldExpr(PREDICATE))); fields.add(Projections.computed(OBJECT, valueFieldExpr(OBJECT))); fields.add(Projections.computed(OBJECT_HASH, hashFieldExpr(OBJECT))); fields.add(Projections.computed(OBJECT_TYPE, ConditionalOperators.ifNull(typeFieldExpr(OBJECT), DEFAULT_TYPE))); fields.add(Projections.computed(CONTEXT, DEFAULT_CONTEXT)); fields.add(Projections.computed(STATEMENT_METADATA, DEFAULT_METADATA)); fields.add(DEFAULT_DV); fields.add(Projections.computed(TIMESTAMP, new Document("$literal", timestamp))); fields.add(Projections.computed(LEVEL, new Document("$add", Arrays.asList("$" + LEVEL, 1)))); triplePipeline.add(Aggregates.project(Projections.fields(fields))); if (requireNew) { String numRedundant = "REDUNDANT"; triplePipeline.add(Aggregates.project(Projections.fields(includeAll, Projections.computed(numRedundant, new Document("$size", redundantFilter))))); triplePipeline.add(Aggregates.match(Filters.eq(numRedundant, 0))); triplePipeline.add(Aggregates.project(Projections.fields(includeAll)));
valueFields.add(Projections.computed(name, valueField)); hashFields.add(Projections.computed(name, hashField)); if (typeField != null) { typeFields.add(Projections.computed(name, typeField)); bindingNames.addAll(newVarNames); final Bson projectOpts = Projections.fields( Projections.computed(VALUES, Projections.fields(valueFields)), Projections.computed(HASHES, Projections.fields(hashFields)), Projections.computed(TYPES, Projections.fields(typeFields)), Projections.include(LEVEL), Projections.include(TIMESTAMP));
valueFields.add(Projections.computed(name, valueField)); hashFields.add(Projections.computed(name, hashField)); if (typeField != null) { typeFields.add(Projections.computed(name, typeField)); bindingNames.addAll(newVarNames); Bson projectOpts = Projections.fields( Projections.computed(VALUES, Projections.fields(valueFields)), Projections.computed(HASHES, Projections.fields(hashFields)), Projections.computed(TYPES, Projections.fields(typeFields)), Projections.include(LEVEL), Projections.include(TIMESTAMP));
String listKey = "PROJECTIONS"; Bson projectIndividual = Projections.fields( Projections.computed(VALUES, "$" + listKey + "." + VALUES), Projections.computed(HASHES, "$" + listKey + "." + HASHES), Projections.computed(TYPES, "$" + listKey + "." + TYPES), Projections.include(LEVEL), Projections.include(TIMESTAMP)); pipeline.add(Aggregates.project(Projections.computed(listKey, projectOpts))); pipeline.add(Aggregates.unwind("$" + listKey)); pipeline.add(Aggregates.project(projectIndividual));
final String listKey = "PROJECTIONS"; final Bson projectIndividual = Projections.fields( Projections.computed(VALUES, "$" + listKey + "." + VALUES), Projections.computed(HASHES, "$" + listKey + "." + HASHES), Projections.computed(TYPES, "$" + listKey + "." + TYPES), Projections.include(LEVEL), Projections.include(TIMESTAMP)); pipeline.add(Aggregates.project(Projections.computed(listKey, projectOpts))); pipeline.add(Aggregates.unwind("$" + listKey)); pipeline.add(Aggregates.project(projectIndividual));
@Nonnull @Nonnegative @Override public Optional<Integer> sizeOfValue(SingleFeatureBean feature) { checkNotNull(feature, "feature"); final String ownerId = idConverter.convert(feature.owner()); final String fieldName = ModelDocument.F_MANY_FEATURE; final String fieldSize = "size"; final Bson filter = and(eq(ModelDocument.F_ID, ownerId), exists(fieldName)); final Bson projection = computed(fieldSize, new Document(QueryOperators.SIZE, concat('$' + fieldName, Integer.toString(feature.id())))); final List<Bson> pipeline = Arrays.asList( match(filter), limit(1), project(projection) ); try { final AggregateIterable<BasicDBObject> aggregate = documents.aggregate(pipeline, BasicDBObject.class); return MoreIterables.onlyElement(aggregate).map(o -> o.getInt(fieldSize)); } catch (MongoCommandException e) { // FIXME Don't use an exception to determine the presence of an index if (e.getErrorCode() != 17124) { // "the $size operator requires an list" when index does not exist in collection throw e; } return Optional.empty(); } }
Projections.computed("FILTER", compareDoc), Projections.include(VALUES, HASHES, TYPES, LEVEL, TIMESTAMP)))); pipeline.add(Aggregates.match(new Document("FILTER", true)));
Projections.computed("FILTER", compareDoc), Projections.include(VALUES, HASHES, TYPES, LEVEL, TIMESTAMP)))); pipeline.add(Aggregates.match(new Document("FILTER", true)));
Projections.computed(FIELDS_MATCH, Filters.and(eqTests)), Projections.include(JOINED_TRIPLE, VALUES, HASHES, TYPES, LEVEL, TIMESTAMP)); pipeline.add(Aggregates.project(eqProjectOpts));
Projections.computed(FIELDS_MATCH, Filters.and(eqTests)), Projections.include(JOINED_TRIPLE, VALUES, HASHES, TYPES, LEVEL, TIMESTAMP)); pipeline.add(Aggregates.project(eqProjectOpts));