@Override public Explanation explain(Query query, int doc) throws IOException { if (aggregatedDfs != null) { // dfs data is needed to explain the score return super.explain(createNormalizedWeight(query, true), doc); } return in.explain(query, doc); }
protected Weight createInnerHitQueryWeight() throws IOException { final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores()); return context.searcher().createNormalizedWeight(query(), needsScores); }
int readerIndex = -1; int docBase = -1; Weight weight = context.searcher().createNormalizedWeight(query, false); Bits matchingDocs = null; final IndexReader indexReader = context.searcher().getIndexReader();
Weight weight = null; if (needsScores) { weight = searchContext.searcher().createNormalizedWeight(searchContext.query(), true);
if (needsScores) { Query query = isGlobal ? new MatchAllDocsQuery() : searchContext.query(); weight = searchContext.searcher().createNormalizedWeight(query, true);
if (needsScores) { Query query = context.query(); weight = context.searcher().createNormalizedWeight(query, true);
continue; final Weight childWeight = context.searcher().createNormalizedWeight(childFilter, false); Scorer childScorer = childWeight.scorer(subReaderContext); if (childScorer == null) {
@Override public Explanation explain(Query query, int doc) throws IOException { if (aggregatedDfs != null) { // dfs data is needed to explain the score return super.explain(createNormalizedWeight(query, true), doc); } return in.explain(query, doc); }
@Override public Explanation explain(Query query, int doc) throws IOException { if (aggregatedDfs != null) { // dfs data is needed to explain the score return super.explain(createNormalizedWeight(query, true), doc); } return in.explain(query, doc); }
@Override public Explanation explain(Query query, int doc) throws IOException { if (aggregatedDfs != null) { // dfs data is needed to explain the score return super.explain(createNormalizedWeight(query, true), doc); } return in.explain(query, doc); }
@Override public void extractTerms(SearchContext context, RescoreSearchContext rescoreContext, Set<Term> termsSet) { try { context.searcher().createNormalizedWeight(((QueryRescoreContext) rescoreContext).query(), false).extractTerms(termsSet); } catch (IOException e) { throw new IllegalStateException("Failed to extract terms", e); } }
BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter); Query q = new ParentChildrenBlockJoinQuery(parentFilter, childFilter, parentDocId); Weight weight = context.searcher().createNormalizedWeight(q, false); if (size() == 0) { TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
@Override public void extractTerms(SearchContext context, RescoreSearchContext rescoreContext, Set<Term> termsSet) { try { context.searcher().createNormalizedWeight(((QueryRescoreContext) rescoreContext).query(), false).extractTerms(termsSet); } catch (IOException e) { throw new IllegalStateException("Failed to extract terms", e); } }
final ObjectHashSet<Term> termsSet = new ObjectHashSet<>(); try { context.searcher().createNormalizedWeight(context.query(), true).extractTerms(new DelegateSet(termsSet)); for (RescoreContext rescoreContext : context.rescore()) { try {
public ParentToChildrenAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent, Query childFilter, Query parentFilter, ValuesSource.Bytes.WithOrdinals valuesSource, long maxOrd, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); // these two filters are cached in the parser this.childFilter = context.searcher().createNormalizedWeight(childFilter, false); this.parentFilter = context.searcher().createNormalizedWeight(parentFilter, false); this.parentOrdToBuckets = context.bigArrays().newLongArray(maxOrd, false); this.parentOrdToBuckets.fill(0, maxOrd, -1); this.parentOrdToOtherBuckets = new LongObjectPagedHashMap<>(context.bigArrays()); this.valuesSource = valuesSource; }
public ParentToChildrenAggregator(String name, AggregatorFactories factories, AggregationContext aggregationContext, Aggregator parent, String parentType, Query childFilter, Query parentFilter, ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource, long maxOrd, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); this.parentType = parentType; // these two filters are cached in the parser this.childFilter = aggregationContext.searchContext().searcher().createNormalizedWeight(childFilter, false); this.parentFilter = aggregationContext.searchContext().searcher().createNormalizedWeight(parentFilter, false); this.parentOrdToBuckets = aggregationContext.bigArrays().newLongArray(maxOrd, false); this.parentOrdToBuckets.fill(0, maxOrd, -1); this.parentOrdToOtherBuckets = new LongObjectPagedHashMap<>(aggregationContext.bigArrays()); this.valuesSource = valuesSource; }
protected Weight createInnerHitQueryWeight() throws IOException { final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores()); return context.searcher().createNormalizedWeight(query(), needsScores); }
protected Weight createInnerHitQueryWeight() throws IOException { final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores()); return context.searcher().createNormalizedWeight(query(), needsScores); }
protected Weight createInnerHitQueryWeight() throws IOException { final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores()); return context.searcher().createNormalizedWeight(query(), needsScores); }
if (needsScores) { Query query = context.query(); weight = context.searcher().createNormalizedWeight(query, true);