@Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); if (innerScorerSupplier == null) { return null; } return new ScorerSupplier() { @Override public Scorer get(long leadCost) throws IOException { final Scorer innerScorer = innerScorerSupplier.get(leadCost); final float score = score(); return new FilterScorer(innerScorer) { @Override public float score() throws IOException { return score; } @Override public Collection<ChildScorer> getChildren() { return Collections.singleton(new ChildScorer(innerScorer, "constant")); } }; } @Override public long cost() { return innerScorerSupplier.cost(); } }; }
@Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final ScorerSupplier indexScorerSupplier = indexWeight.scorerSupplier(context); final ScorerSupplier dvScorerSupplier = dvWeight.scorerSupplier(context); if (indexScorerSupplier == null || dvScorerSupplier == null) { return null; } return new ScorerSupplier() { @Override public Scorer get(long leadCost) throws IOException { // At equal costs, doc values tend to be worse than points since they // still need to perform one comparison per document while points can // do much better than that given how values are organized. So we give // an arbitrary 8x penalty to doc values. final long threshold = cost() >>> 3; if (threshold <= leadCost) { return indexScorerSupplier.get(leadCost); } else { return dvScorerSupplier.get(leadCost); } } @Override public long cost() { return indexScorerSupplier.cost(); } }; }
for (Weight w : weights) { BooleanClause c = cIter.next(); ScorerSupplier subScorer = w.scorerSupplier(context); if (subScorer == null) { if (c.isRequired()) {
return in.scorerSupplier(context); return in.scorerSupplier(context); if (cacheHelper == null) { return in.scorerSupplier(context); return in.scorerSupplier(context); putIfAbsent(in.getQuery(), context, docIdSet, cacheHelper); } else { return in.scorerSupplier(context);
@Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); }
@Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // no need to provide deleted docs to the filter final Bits bits = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filter.get().scorerSupplier(ctx)); return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long bucket) throws IOException { if (bits.get(doc)) { collectBucket(sub, doc, bucket); } } }; }
@Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // no need to provide deleted docs to the filter Weight[] filters = this.filters.get(); final Bits[] bits = new Bits[filters.length]; for (int i = 0; i < filters.length; ++i) { bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorerSupplier(ctx)); } return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long bucket) throws IOException { boolean matched = false; for (int i = 0; i < bits.length; i++) { if (bits[i].get(doc)) { collectBucket(sub, doc, bucketOrd(bucket, i)); matched = true; } } if (showOtherBucket && !matched) { collectBucket(sub, doc, bucketOrd(bucket, bits.length)); } } }; }
@Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { final ScorerSupplier filterScorerSupplier = filter.scorerSupplier(context); final LeafCollector in = collector.getLeafCollector(context); final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier); return new FilterLeafCollector(in) { @Override public void collect(int doc) throws IOException { if (bits.get(doc)) { in.collect(doc); } } }; }
final ScorerSupplier subQueryScorerSupplier; try { subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); } finally { timer.stop();
@Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // no need to provide deleted docs to the filter final Bits[] bits = new Bits[filters.length + totalNumIntersections]; for (int i = 0; i < filters.length; ++i) { bits[i] = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filters[i].scorerSupplier(ctx)); } // Add extra Bits for intersections int pos = filters.length; for (int i = 0; i < filters.length; i++) { for (int j = i + 1; j < filters.length; j++) { bits[pos++] = new BitsIntersector(bits[i], bits[j]); } } assert pos == bits.length; return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long bucket) throws IOException { for (int i = 0; i < bits.length; i++) { if (bits[i].get(doc)) { collectBucket(sub, doc, bucketOrd(bucket, i)); } } } }; }
docBase = ctx.docBase; ScorerSupplier scorerSupplier = weight.scorerSupplier(ctx); matchingDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), scorerSupplier);
public static void intersect(Weight weight, Weight innerHitQueryWeight, Collector collector, LeafReaderContext ctx) throws IOException { ScorerSupplier scorerSupplier = weight.scorerSupplier(ctx); if (scorerSupplier == null) { return; ScorerSupplier innerHitQueryScorerSupplier = innerHitQueryWeight.scorerSupplier(ctx); if (innerHitQueryScorerSupplier == null) { return;
if (filterWeights[i] != null) { final Bits docSet = Lucene.asSequentialAccessBits( context.reader().maxDoc(), filterWeights[i].scorerSupplier(context)); if (docSet.get(doc) == false) { continue;
private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { return null; } final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; final Bits[] docSets = new Bits[functions.length]; for (int i = 0; i < functions.length; i++) { ScoreFunction function = functions[i]; leafFunctions[i] = function.getLeafScoreFunction(context); if (filterWeights[i] != null) { ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier); } else { docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); } } return new FunctionFactorScorer(this, subQueryScorer, scoreMode, functions, maxBoost, leafFunctions, docSets, combineFunction, needsScores); }
@Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); }
@Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); }
@Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); }
@Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // no need to provide deleted docs to the filter final Bits bits = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), filter.get().scorerSupplier(ctx)); return new LeafBucketCollectorBase(sub, null) { @Override public void collect(int doc, long bucket) throws IOException { if (bits.get(doc)) { collectBucket(sub, doc, bucket); } } }; }
@Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { final ScorerSupplier filterScorerSupplier = filter.scorerSupplier(context); final LeafCollector in = collector.getLeafCollector(context); final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier); return new FilterLeafCollector(in) { @Override public void collect(int doc) throws IOException { if (bits.get(doc)) { in.collect(doc); } } }; }
@Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { final ScorerSupplier filterScorerSupplier = filter.scorerSupplier(context); final LeafCollector in = collector.getLeafCollector(context); final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier); return new FilterLeafCollector(in) { @Override public void collect(int doc) throws IOException { if (bits.get(doc)) { in.collect(doc); } } }; }