private void finishLeaf() { if (currentLeaf != null) { DocIdSet docIdSet = docIdSetBuilder.build(); entries.add(new Entry(currentLeaf, docIdSet)); currentLeaf = null; docIdSetBuilder = null; } }
if (block != currentBlock) { flush(); currentBlock = block;
if (fillDocIdSet) { currentLeaf = ctx; docIdSetBuilder = new RoaringDocIdSet.Builder(ctx.reader().maxDoc());
if (fillDocIdSet) { currentLeaf = ctx; docIdSetBuilder = new RoaringDocIdSet.Builder(ctx.reader().maxDoc());
if (block != currentBlock) { flush(); currentBlock = block;
if (block != currentBlock) { flush(); currentBlock = block;
if (block != currentBlock) { flush(); currentBlock = block;
@Override public CollapseCollector.Query getResult() { // Fill the priority queue wich the results of each segments final GroupQueue groupQueue = new GroupQueue(maxRows); leafCollectors.forEach(leaf -> leaf.reduce(groupQueue)); // Stores for each doc the number of collapsed documents final Int2IntLinkedOpenHashMap collapsedMap = new Int2IntLinkedOpenHashMap(groupQueue.groupLeaders.size()); // The DocID must be sorted and grouped by segment final Map<LeafReaderContext, IntSortedSet> sortedInts = new HashMap<>(); long collapsedCount = 0; for (final GroupLeader groupLeader : groupQueue.groupLeaders.values()) { sortedInts.computeIfAbsent(groupLeader.context, ctx -> new IntAVLTreeSet()).add(groupLeader.doc); collapsedMap.addTo(groupLeader.context.docBase + groupLeader.doc, groupLeader.collapsedCount); collapsedCount += groupLeader.collapsedCount; } // Now we can build the bitsets final Map<LeafReaderContext, RoaringDocIdSet> docIdMaps = new HashMap<>(); sortedInts.forEach((ctx, sortedInt) -> { final RoaringDocIdSet.Builder builder = new RoaringDocIdSet.Builder(ctx.reader().maxDoc()); sortedInt.forEach((IntConsumer) builder::add); docIdMaps.put(ctx, builder.build()); }); // Add empty bitset for unassigned leaf leafCollectors.forEach(leaf -> docIdMaps.putIfAbsent(leaf.context, new RoaringDocIdSet.Builder(leaf.context.reader().maxDoc()).build())); return new Query(new FilteredQuery(docIdMaps), collapsedMap, collapsedCount); }
/** * Default cache implementation: uses {@link RoaringDocIdSet}. */ protected DocIdSet cacheImpl(BulkScorer scorer, int maxDoc) throws IOException { final RoaringDocIdSet.Builder builder = new RoaringDocIdSet.Builder(maxDoc); scorer.score(new LeafCollector() { @Override public void setScorer(Scorer scorer) throws IOException {} @Override public void collect(int doc) throws IOException { builder.add(doc); } }, null); return builder.build(); }
/** * Default cache implementation: uses {@link RoaringDocIdSet}. */ protected DocIdSet cacheImpl(BulkScorer scorer, int maxDoc) throws IOException { final RoaringDocIdSet.Builder builder = new RoaringDocIdSet.Builder(maxDoc); scorer.score(new LeafCollector() { @Override public void setScorer(Scorer scorer) throws IOException {} @Override public void collect(int doc) throws IOException { builder.add(doc); } }, null); return builder.build(); }
@Override final public LeafCollector getLeafCollector(final LeafReaderContext context) throws IOException { final RoaringDocIdSet.Builder builder = new RoaringDocIdSet.Builder(context.reader().maxDoc()); docIdSetMapBuilders.put(context, builder); return new LeafCollector() { @Override final public void setScorer(final Scorer scorer) throws IOException { } @Override final public void collect(final int doc) throws IOException { builder.add(doc); } }; }
/** Build an instance. */ public RoaringDocIdSet build() { flush(); return new RoaringDocIdSet(sets, cardinality); }
/** * Default cache implementation: uses {@link RoaringDocIdSet}. */ protected DocIdSet cacheImpl(DocIdSetIterator iterator, LeafReader reader) throws IOException { return new RoaringDocIdSet.Builder(reader.maxDoc()).add(iterator).build(); }
@Override public void collect(int doc) throws IOException { builder.add(doc); }
@Override final public void collect(final int doc) throws IOException { builder.add(doc); } };
@Override public void collect(int doc, long bucket) throws IOException { int slot = queue.addIfCompetitive(); if (slot != -1) { if (builder != null && lastDoc != doc) { builder.add(doc); lastDoc = doc; } } } };
private void finishLeaf() { if (currentLeaf != null) { DocIdSet docIdSet = docIdSetBuilder.build(); entries.add(new Entry(currentLeaf, docIdSet)); currentLeaf = null; docIdSetBuilder = null; } }
/** Add the content of the provided {@link DocIdSetIterator}. */ public Builder add(DocIdSetIterator disi) throws IOException { for (int doc = disi.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = disi.nextDoc()) { add(doc); } return this; }
@Override public void collect(int doc) throws IOException { builder.add(doc); }
/** * Default cache implementation: uses {@link RoaringDocIdSet}. */ protected DocIdSet cacheImpl(DocIdSetIterator iterator, LeafReader reader) throws IOException { return new RoaringDocIdSet.Builder(reader.maxDoc()).add(iterator).build(); }