Refine search
private int firstDocFromNextBlock() throws IOException { while (true) { block += 1; if (block >= docIdSets.length) { sub = null; return doc = NO_MORE_DOCS; } else if (docIdSets[block] != null) { sub = docIdSets[block].iterator(); final int subNext = sub.nextDoc(); assert subNext != NO_MORE_DOCS; return doc = (block << 16) | subNext; } } }
/** Returns the score of the current document matching the query. * Initially invalid, until the {@link #iterator()} is advanced the first time. * @return The score of the required scorer, eventually increased by the score * of the optional scorer when it also matches the current document. */ @Override public float score() throws IOException { // TODO: sum into a double and cast to float if we ever send required clauses to BS1 int curDoc = reqScorer.docID(); float score = reqScorer.score(); int optScorerDoc = optIterator.docID(); if (optScorerDoc < curDoc) { optScorerDoc = optIterator.advance(curDoc); } if (optScorerDoc == curDoc) { score += optScorer.score(); } return score; }
limit = Integer.MAX_VALUE; final IndexSearcher searcher = new IndexSearcher(readerContext.reader()); searcher.setQueryCache(null); query = searcher.rewrite(query); final Weight weight = searcher.createWeight(query, false, 1); final Scorer scorer = weight.scorer(readerContext); if (scorer != null) { final DocIdSetIterator it = scorer.iterator(); if (segState.rld.sortMap != null && limit != Integer.MAX_VALUE) { assert privateSegment != null; while ((docID = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { while ((docID = it.nextDoc()) < limit) { if (segState.rld.delete(docID)) { delCount++;
readerUpto++; readerContext = leaves.get(readerUpto); endDoc = readerContext.docBase + readerContext.reader().maxDoc(); scorer = weight.scorer(readerContext); int actualDoc = scorer.docID(); if (actualDoc < targetDoc) { actualDoc = scorer.iterator().advance(targetDoc); hit.score = combine(hit.score, true, scorer.score()); } else {
DocIdSetIterator docIdSetIterator = entry.docIdSet.iterator(); if (docIdSetIterator == null) { continue; DocIdSetIterator scorerIt = null; if (needsScores) { Scorer scorer = weight.scorer(entry.context); if (scorer != null) { scorerIt = scorer.iterator(); subCollector.setScorer(scorer); while ((docID = docIdSetIterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { if (needsScores) { assert scorerIt != null && scorerIt.docID() < docID; scorerIt.advance(docID); assert scorerIt.docID() == docID;
DocIdSetIterator docIt = null; if (needsScores && entry.docDeltas.size() > 0) { Scorer scorer = weight.scorer(entry.context); docIt = scorer.iterator(); leafCollector.setScorer(scorer); if (rebasedBucket != -1) { if (needsScores) { if (docIt.docID() < doc) { docIt.advance(doc); assert docIt.docID() == doc;
int remainingBits = (int) Math.min(iterator.cost(), Integer.MAX_VALUE); DocIdSetBuilder.BulkAdder adder = builder == null ? null : builder.grow(remainingBits); final Bits liveDocs = context.reader().getLiveDocs(); final LeafBucketCollector collector = queue.getLeafCollector(leadSourceBucket, context, queueCollector); while (iterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (liveDocs == null || liveDocs.get(iterator.docID())) { collector.collect(iterator.docID());
@Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { Explanation expl = subQueryWeight.explain(context, doc); if (!expl.isMatch()) { return expl; if (filterWeights[i] != null) { final Bits docSet = Lucene.asSequentialAccessBits( context.reader().maxDoc(), filterWeights[i].scorerSupplier(context)); if (docSet.get(doc) == false) { continue; } else { FunctionFactorScorer scorer = functionScorer(context); int actualDoc = scorer.iterator().advance(doc); assert (actualDoc == doc); double score = scorer.computeScore(doc, expl.getValue());
public static void intersect(Weight weight, Weight innerHitQueryWeight, Collector collector, LeafReaderContext ctx) throws IOException { ScorerSupplier scorerSupplier = weight.scorerSupplier(ctx); if (scorerSupplier == null) { return; ScorerSupplier innerHitQueryScorerSupplier = innerHitQueryWeight.scorerSupplier(ctx); if (innerHitQueryScorerSupplier == null) { return; Bits acceptDocs = ctx.reader().getLiveDocs(); DocIdSetIterator iterator = ConjunctionDISI.intersectIterators(Arrays.asList(innerHitQueryScorer.iterator(), scorer.iterator())); for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) { if (acceptDocs == null || acceptDocs.get(docId)) { leafCollector.collect(docId);
Scorer childScorer = childWeight.scorer(subReaderContext); if (childScorer == null) { current = nestedParentObjectMapper; continue; DocIdSetIterator childIter = childScorer.iterator(); for (int docId = childIter.advance(previousParent + 1); docId < nestedSubDocId && docId != DocIdSetIterator.NO_MORE_DOCS; docId = childIter.nextDoc()) { offset++; for (int docId = childIter.advance(currentParent + 1); docId < nextParent && docId != DocIdSetIterator.NO_MORE_DOCS; docId = childIter.nextDoc()) { offset++;
@Test public void testIteratorMatchesTestArray() throws IOException { DocIdSet docIdSet0_9 = arrayToDocIdSet( testDataFrom0to9 ); DocIdSetIterator docIdSetIterator = docIdSet0_9.iterator(); assertTrue( docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS ); assertEquals( 0, docIdSetIterator.docID() ); assertEquals( 9, docIdSetIterator.advance( 9 ) ); assertEquals( DocIdSetIterator.NO_MORE_DOCS, docIdSetIterator.advance( 10 ) ); }
@Override public int numDeletesToMerge(SegmentCommitInfo info, int delCount, IOSupplier<CodecReader> readerSupplier) throws IOException { final int numDeletesToMerge = super.numDeletesToMerge(info, delCount, readerSupplier); if (numDeletesToMerge != 0 && info.getSoftDelCount() > 0) { final CodecReader reader = readerSupplier.get(); if (reader.getLiveDocs() != null) { BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(new DocValuesFieldExistsQuery(field), BooleanClause.Occur.FILTER); builder.add(retentionQuerySupplier.get(), BooleanClause.Occur.FILTER); Scorer scorer = getScorer(builder.build(), FilterCodecReader.wrapLiveDocs(reader, null, reader.maxDoc())); if (scorer != null) { DocIdSetIterator iterator = scorer.iterator(); Bits liveDocs = reader.getLiveDocs(); int numDeletedDocs = reader.numDeletedDocs(); while (iterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (liveDocs.get(iterator.docID()) == false) { numDeletedDocs--; } } return numDeletedDocs; } } } assert numDeletesToMerge >= 0 : "numDeletesToMerge: " + numDeletesToMerge; assert numDeletesToMerge <= info.info.maxDoc() : "numDeletesToMerge: " + numDeletesToMerge + " maxDoc:" + info.info.maxDoc(); return numDeletesToMerge; } }
/** * Returns {@link Matches} for a specific document, or {@code null} if the document * does not match the parent query * * A query match that contains no position information (for example, a Point or * DocValues query) will return {@link MatchesUtils#MATCH_WITH_NO_TERMS} * * @param context the reader's context to create the {@link Matches} for * @param doc the document's id relative to the given context's reader * @lucene.experimental */ public Matches matches(LeafReaderContext context, int doc) throws IOException { Scorer scorer = scorer(context); if (scorer == null) { return null; } final TwoPhaseIterator twoPhase = scorer.twoPhaseIterator(); if (twoPhase == null) { if (scorer.iterator().advance(doc) != doc) { return null; } } else { if (twoPhase.approximation().advance(doc) != doc || twoPhase.matches() == false) { return null; } } return MatchesUtils.MATCH_WITH_NO_TERMS; }
@Override protected Document fetchNextOrNull() { try { int doc = idIterator.nextDoc(); if ( doc == DocIdSetIterator.NO_MORE_DOCS ) { return null; } return getDocument( doc ); } catch ( IOException e ) { throw new LuceneDocumentRetrievalException( "Can't fetch document id from lucene index.", e ); } } };
@Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { Scorer scorer = inner.scorer(context); if (scorer.iterator().advance(doc) != doc) return Explanation.noMatch("No match"); Explanation scoreExplanation = inner.explain(context, doc); Explanation expl = valueSource.explain(context, doc, scoreExplanation); return Explanation.match(expl.getValue() * boost, "product of:", Explanation.match(boost, "boost"), expl); }
@Override public int docID() { return conjunction.docID(); }