@Override public Query query() { return in.query(); }
queryResult.from(searchContext.from()); queryResult.size(searchContext.size()); Query query = searchContext.query(); assert query == searcher.rewrite(query); // already rewritten
@Override public String toString() { StringBuilder result = new StringBuilder().append(shardTarget()); if (searchType() != SearchType.DEFAULT) { result.append("searchType=[").append(searchType()).append("]"); } if (scrollContext() != null) { if (scrollContext().scroll != null) { result.append("scroll=[").append(scrollContext().scroll.keepAlive()).append("]"); } else { result.append("scroll=[null]"); } } result.append(" query=[").append(query()).append("]"); return result.toString(); } }
CompositeAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData, int size, CompositeValuesSourceConfig[] sourceConfigs, CompositeKey rawAfterKey) throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); this.size = size; this.sourceNames = Arrays.stream(sourceConfigs).map(CompositeValuesSourceConfig::name).collect(Collectors.toList()); this.reverseMuls = Arrays.stream(sourceConfigs).mapToInt(CompositeValuesSourceConfig::reverseMul).toArray(); this.formats = Arrays.stream(sourceConfigs).map(CompositeValuesSourceConfig::format).collect(Collectors.toList()); this.sources = new SingleDimensionValuesSource[sourceConfigs.length]; for (int i = 0; i < sourceConfigs.length; i++) { this.sources[i] = createValuesSource(context.bigArrays(), context.searcher().getIndexReader(), context.query(), sourceConfigs[i], size, i); } this.queue = new CompositeValuesCollectorQueue(context.bigArrays(), sources, size, rawAfterKey); this.sortedDocsProducer = sources[0].createSortedDocsProducerOrNull(context.searcher().getIndexReader(), context.query()); }
private String explain(SearchContext context, boolean rewritten) { Query query = context.query(); if (rewritten && query instanceof MatchNoDocsQuery) { return context.parsedQuery().query().toString(); } else { return query.toString(); } } }
DocIdSet docIdSet = sortedDocsProducer.processLeaf(context.query(), queue, ctx, fillDocIdSet); if (fillDocIdSet) { entries.add(new Entry(ctx, docIdSet));
IndexReader reader, boolean hasFilterCollector) throws IOException { final Query query = searchContext.query();
Weight weight = null; if (needsScores) { weight = searchContext.searcher().createNormalizedWeight(searchContext.query(), true);
Weight weight = null; if (needsScores) { Query query = isGlobal ? new MatchAllDocsQuery() : searchContext.query(); weight = searchContext.searcher().createNormalizedWeight(query, true);
@Override public void hitExecute(SearchContext context, HitContext hitContext) { if (context.explain() == false) { return; } try { final int topLevelDocId = hitContext.hit().docId(); Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); for (RescoreContext rescore : context.rescore()) { explanation = rescore.rescorer().explain(topLevelDocId, context.searcher(), rescore, explanation); } // we use the top level doc id, since we work with the top level searcher hitContext.hit().explanation(explanation); } catch (IOException e) { throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().getType() + "#" + hitContext.hit().getId() + "]", e); } finally { context.clearReleasables(SearchContext.Lifetime.COLLECTION); } } }
Weight weight = null; if (needsScores) { Query query = context.query(); weight = context.searcher().createNormalizedWeight(query, true);
@Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { innerHitsExecute(context.query(), context.searcher(), hits); }
@Override public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { innerHitsExecute(context.query(), context.searcher(), hits); }
final ObjectHashSet<Term> termsSet = new ObjectHashSet<>(); try { context.searcher().createNormalizedWeight(context.query(), true).extractTerms(new DelegateSet(termsSet)); for (RescoreContext rescoreContext : context.rescore()) { try {
@Override protected HitStream getHitStream(SearchContext context) throws IOException { if (maxTermsPerShard == null) { throw new ElasticsearchParseException("[termsByQuery] maxTermsPerShard parameter is null"); } return new TopHitStream(maxTermsPerShard, context.query(), context.searcher()); } }
private String explain(SearchContext context, boolean rewritten) throws IOException { Query query = context.query(); if (rewritten && query instanceof MatchNoDocsQuery) { return context.parsedQuery().query().toString(); } else { return query.toString(); } } }
private String explain(SearchContext context, boolean rewritten) { Query query = context.query(); if (rewritten && query instanceof MatchNoDocsQuery) { return context.parsedQuery().query().toString(); } else { return query.toString(); } } }
private String explain(SearchContext context, boolean rewritten) throws IOException { Query query = context.query(); if (rewritten && query instanceof MatchNoDocsQuery) { return context.parsedQuery().query().toString(); } else { return query.toString(); } } }
context.preProcess(true); int topLevelDocId = result.docIdAndVersion().docId + result.docIdAndVersion().docBase; Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); for (RescoreContext ctx : context.rescore()) { Rescorer rescorer = ctx.rescorer();