public BitSet bits(IndexReader reader) throws IOException { if (cache == null) { cache = new WeakHashMap(); } synchronized (cache) { // check cache BitSet cached = (BitSet) cache.get(reader); if (cached != null) { return cached; } } final BitSet bits = filter.bits(reader); synchronized (cache) { // update cache cache.put(reader, bits); } return bits; }
/** * @return a DocIdSet that provides the documents which should be * permitted or prohibited in search results. * @see DocIdBitSet */ public DocIdSet getDocIdSet(IndexReader reader) throws IOException { return new DocIdBitSet(bits(reader)); } }
/** * @return a DocIdSet that provides the documents which should be * permitted or prohibited in search results. * @see DocIdBitSet */ public DocIdSet getDocIdSet(IndexReader reader) throws IOException { return new DocIdBitSet(bits(reader)); } }
/** * @deprecated Use {@link #getDocIdSet(IndexReader)} instead. */ public BitSet bits(IndexReader reader) throws IOException { if (cache == null) { cache = new WeakHashMap(); } Object cached = null; synchronized (cache) { // check cache cached = cache.get(reader); } if (cached != null) { if (cached instanceof BitSet) { return (BitSet) cached; } else if (cached instanceof DocIdBitSet) return ((DocIdBitSet) cached).getBitSet(); // It would be nice to handle the DocIdSet case, but that's not really possible } final BitSet bits = filter.bits(reader); synchronized (cache) { // update cache cache.put(reader, bits); } return bits; }
/** * Converts a filter into a DocSet. * This method is not cache-aware and no caches are checked. */ public DocSet convertFilter(Filter lfilter) throws IOException { BitSet bs = lfilter.bits(this.reader); OpenBitSet obs = new OpenBitSet(bs.size()); for(int i=bs.nextSetBit(0); i>=0; i=bs.nextSetBit(i+1)) { obs.fastSet(i); } return new BitDocSet(obs); }
/** * @deprecated Use {@link #getDocIdSet(IndexReader)} instead. */ public BitSet bits(IndexReader reader) throws IOException { if (cache == null) { cache = new WeakHashMap(); } Object cached = null; synchronized (cache) { // check cache cached = cache.get(reader); } if (cached != null) { if (cached instanceof BitSet) { return (BitSet) cached; } else if (cached instanceof DocIdBitSet) return ((DocIdBitSet) cached).getBitSet(); // It would be nice to handle the DocIdSet case, but that's not really possible } final BitSet bits = filter.bits(reader); synchronized (cache) { // update cache cache.put(reader, bits); } return bits; }
/** * Uses the {@link FilterManager} to keep the cache for a filter on the * searcher side of a remote connection. * @param reader the index reader for the Filter * @return the bitset * @deprecated Use {@link #getDocIdSet(IndexReader)} instead. */ public BitSet bits(IndexReader reader) throws IOException { Filter cachedFilter = FilterManager.getInstance().getFilter(filter); return cachedFilter.bits(reader); }
/** * Uses the {@link FilterManager} to keep the cache for a filter on the * searcher side of a remote connection. * @param reader the index reader for the Filter * @return the bitset * @deprecated Use {@link #getDocIdSet(IndexReader)} instead. */ public BitSet bits(IndexReader reader) throws IOException { Filter cachedFilter = FilterManager.getInstance().getFilter(filter); return cachedFilter.bits(reader); }
public Scorer scorer (IndexReader indexReader) throws IOException { final Scorer scorer = weight.scorer (indexReader); final BitSet bitset = filter.bits (indexReader); return new Scorer (query.getSimilarity (searcher)) { // pass these methods through to the enclosed scorer public boolean next() throws IOException { return scorer.next(); } public int doc() { return scorer.doc(); } public boolean skipTo (int i) throws IOException { return scorer.skipTo(i); } // if the document has been filtered out, set score to 0.0 public float score() throws IOException { return (bitset.get(scorer.doc())) ? scorer.score() : 0.0f; } // add an explanation about whether the document was filtered public Explanation explain (int i) throws IOException { Explanation exp = scorer.explain (i); if (bitset.get(i)) exp.setDescription ("allowed by filter: "+exp.getDescription()); else exp.setDescription ("removed by filter: "+exp.getDescription()); return exp; } }; } };
public void search(Query query, Filter filter, final HitCollector results) throws IOException { HitCollector collector = results; if (filter != null) { final BitSet bits = filter.bits(reader); collector = new HitCollector() { public final void collect(int doc, float score) { if (bits.get(doc)) { // skip docs not in bits results.collect(doc, score); } } }; } Scorer scorer = query.weight(this).scorer(reader); if (scorer == null) return; scorer.score(collector); }
public TopDocs search(Query query, Filter filter, final int nDocs) throws IOException { Scorer scorer = query.weight(this).scorer(reader); if (scorer == null) return new TopDocs(0, new ScoreDoc[0]); final BitSet bits = filter != null ? filter.bits(reader) : null; final HitQueue hq = new HitQueue(nDocs); final int[] totalHits = new int[1]; scorer.score(new HitCollector() { private float minScore = 0.0f; public final void collect(int doc, float score) { if (score > 0.0f && // ignore zeroed buckets (bits==null || bits.get(doc))) { // skip docs not in bits totalHits[0]++; if (hq.size() < nDocs || score >= minScore) { hq.insert(new ScoreDoc(doc, score)); minScore = ((ScoreDoc)hq.top()).score; // maintain minScore } } } }); ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()]; for (int i = hq.size()-1; i >= 0; i--) // put docs in array scoreDocs[i] = (ScoreDoc)hq.pop(); return new TopDocs(totalHits[0], scoreDocs); }
public TopFieldDocs search(Query query, Filter filter, final int nDocs, Sort sort) throws IOException { Scorer scorer = query.weight(this).scorer(reader); if (scorer == null) return new TopFieldDocs(0, new ScoreDoc[0], sort.fields); final BitSet bits = filter != null ? filter.bits(reader) : null; final FieldSortedHitQueue hq = new FieldSortedHitQueue(reader, sort.fields, nDocs); final int[] totalHits = new int[1]; scorer.score(new HitCollector() { public final void collect(int doc, float score) { if (score > 0.0f && // ignore zeroed buckets (bits==null || bits.get(doc))) { // skip docs not in bits totalHits[0]++; hq.insert(new FieldDoc(doc, score)); } } }); ScoreDoc[] scoreDocs = new ScoreDoc[hq.size()]; for (int i = hq.size()-1; i >= 0; i--) // put docs in array scoreDocs[i] = hq.fillFields ((FieldDoc) hq.pop()); return new TopFieldDocs(totalHits[0], scoreDocs, hq.getFields()); }