@Override public Bits getLiveDocs() { return reader.getLiveDocs(); }
@Override public Bits getLiveDocs() { ensureOpen(); return hasDeletions ? parallelReaders[0].getLiveDocs() : null; }
@Override public Bits getLiveDocs() { final Bits inLiveDocs = in.getLiveDocs(); if (inLiveDocs == null) { return null; } else { return new SortingBits(inLiveDocs, docMap); } }
@Override public Bits getLiveDocs() { ensureOpen(); return in.getLiveDocs(); }
private DocMap[] buildDeletionDocMaps(List<CodecReader> readers) { int totalDocs = 0; int numReaders = readers.size(); DocMap[] docMaps = new DocMap[numReaders]; for (int i = 0; i < numReaders; i++) { LeafReader reader = readers.get(i); Bits liveDocs = reader.getLiveDocs(); final PackedLongValues delDocMap; if (liveDocs != null) { delDocMap = removeDeletes(reader.maxDoc(), liveDocs); } else { delDocMap = null; } final int docBase = totalDocs; docMaps[i] = new DocMap() { @Override public int get(int docID) { if (liveDocs == null) { return docBase + docID; } else if (liveDocs.get(docID)) { return docBase + (int) delDocMap.get(docID); } else { return -1; } } }; totalDocs += reader.numDocs(); } return docMaps; }
assert size > 0 : "A reader with deletions must have at least one leave"; if (size == 1) { return leaves.get(0).reader().getLiveDocs(); liveDocs[i] = ctx.reader().getLiveDocs(); starts[i] = ctx.docBase;
/** Expert: low-level implementation method * Returns an Explanation that describes how <code>doc</code> scored against * <code>weight</code>. * * <p>This is intended to be used in developing Similarity implementations, * and, for good performance, should not be displayed with every hit. * Computing an explanation is as expensive as executing the query over the * entire index. * <p>Applications should call {@link IndexSearcher#explain(Query, int)}. * @throws BooleanQuery.TooManyClauses If a query would exceed * {@link BooleanQuery#getMaxClauseCount()} clauses. */ protected Explanation explain(Weight weight, int doc) throws IOException { int n = ReaderUtil.subIndex(doc, leafContexts); final LeafReaderContext ctx = leafContexts.get(n); int deBasedDoc = doc - ctx.docBase; final Bits liveDocs = ctx.reader().getLiveDocs(); if (liveDocs != null && liveDocs.get(deBasedDoc) == false) { return Explanation.noMatch("Document " + doc + " is deleted"); } return weight.explain(ctx, deBasedDoc); }
if (scorer != null) { try { scorer.score(leafCollector, ctx.reader().getLiveDocs()); } catch (CollectionTerminatedException e) {
static LeafReader wrap(LeafReader reader, String field) throws IOException { DocIdSetIterator iterator = DocValuesFieldExistsQuery.getDocValuesDocIdSetIterator(field, reader); if (iterator == null) { return reader; } Bits liveDocs = reader.getLiveDocs(); final FixedBitSet bits; if (liveDocs != null) { bits = FixedBitSet.copyOf(liveDocs); } else { bits = new FixedBitSet(reader.maxDoc()); bits.set(0, reader.maxDoc()); } int numSoftDeletes = PendingSoftDeletes.applySoftDeletes(iterator, bits); int numDeletes = reader.numDeletedDocs() + numSoftDeletes; int numDocs = reader.maxDoc() - numDeletes; assert assertDocCounts(numDocs, numSoftDeletes, reader); return reader instanceof CodecReader ? new SoftDeletesFilterCodecReader((CodecReader) reader, bits, numDocs) : new SoftDeletesFilterLeafReader(reader, bits, numDocs); }
return null; final Bits liveDocs = reader.getLiveDocs(); if (liveDocs == null) { return converter.apply(pointValues.getMinPackedValue());
return null; final Bits liveDocs = reader.getLiveDocs(); if (liveDocs == null) { return converter.apply(pointValues.getMaxPackedValue());
final Bits liveDocs = context.reader().getLiveDocs(); if (liveDocs != null) { docs = new FilteredDocIdSetIterator(docs) {
private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSuggestDocsCollector collector) throws IOException { query = (CompletionQuery) query.rewrite(searcher.getIndexReader()); Weight weight = query.createWeight(searcher, collector.needsScores(), 1f); for (LeafReaderContext context : searcher.getIndexReader().leaves()) { BulkScorer scorer = weight.bulkScorer(context); if (scorer != null) { try { scorer.score(collector.getLeafCollector(context), context.reader().getLiveDocs()); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf } } } } }
/** * Check whether there is one or more documents matching the provided query. */ public static boolean exists(IndexSearcher searcher, Query query) throws IOException { final Weight weight = searcher.createNormalizedWeight(query, false); // the scorer API should be more efficient at stopping after the first // match than the bulk scorer API for (LeafReaderContext context : searcher.getIndexReader().leaves()) { final Scorer scorer = weight.scorer(context); if (scorer == null) { continue; } final Bits liveDocs = context.reader().getLiveDocs(); final DocIdSetIterator iterator = scorer.iterator(); for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { if (liveDocs == null || liveDocs.get(doc)) { return true; } } } return false; }
assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) : "context's reader is not the same as the reader class was initialized on."; int docID = getDocID(id, context.reader().getLiveDocs());
final Bits liveDocs = context.reader().getLiveDocs(); final LeafBucketCollector collector = queue.getLeafCollector(leadSourceBucket, context, queueCollector); while (iterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
Bits acceptDocs = ctx.reader().getLiveDocs(); DocIdSetIterator iterator = ConjunctionDISI.intersectIterators(Arrays.asList(innerHitQueryScorer.iterator(), scorer.iterator()));
final Bits liveDocs = context.reader().getLiveDocs(); DocIdAndSeqNo result = null; int docID = docsEnum.nextDoc();
@Override public Bits getLiveDocs() { ensureOpen(); return hasDeletions ? parallelReaders[0].getLiveDocs() : null; }
@Override public Bits getLiveDocs() { ensureOpen(); return in.getLiveDocs(); }