SearchShardTarget shardTarget = new SearchShardTarget("1", "monitoring", 1); ShardSearchFailure[] shardFailures = new ShardSearchFailure[0]; float score = 0.2345f; BytesReference source = new BytesArray("{\"@timestamp\":\"2014-08-20T15:43:20.762Z\",\"category_name\"" + ":\"cat1111\",\"alert_message\":\"the new cpu threshold has been reached 80%\",\"alert_type\":" + "\"Critical\",\"view_mode\":\"unread\"}"); InternalSearchHit hit = new InternalSearchHit(1, "5YmRf-6OTvelt29V5dphmw", new StringText("quota-management"), null); hit.shardTarget(shardTarget); hit.sourceRef(source); hit.score(score); InternalSearchHit[] hits = new InternalSearchHit[]{hit}; InternalSearchHits internalSearchHits = new InternalSearchHits(hits, 28, score); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(internalSearchHits, null, null, null, false); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, "scrollId", 1, 1, 1000, shardFailures);
public static InternalSearchHit readSearchHit(StreamInput in, InternalSearchHits.StreamContext context) throws IOException { InternalSearchHit hit = new InternalSearchHit(); hit.readFrom(in, context); return hit; }
@Override public Explanation getExplanation() { return explanation(); }
InternalSearchHit internalSearchHit = new InternalSearchHit(docId, id, type, fields.build()); internalSearchHit.score(score); internalSearchHit.sourceRef(source); internalSearchHit.shardTarget(searchShardTarget); highlights.put(highlightField.getName(), highlightField); internalSearchHit.highlightFields(highlights.build()); internalSearchHit.sortValues(sorts.toArray()); internalSearchHit.matchedQueries(matched_filters.toArray(new String[matched_filters.size()])); internalSearchHit.explanation(getExplanation(explanation));
InternalSearchHit searchHit = new InternalSearchHit(doc, fieldsVisitor.uid().id(), typeText, sourceRequested ? fieldsVisitor.source() : null, searchFields); searchHit.shardTarget(context.shardTarget()); collectHit(searchHit); numExported++;
if (index < fetchResult.hits().internalHits().length) { InternalSearchHit searchHit = fetchResult.hits().internalHits()[index]; searchHit.score(shardDoc.score); searchHit.shard(fetchResult.shardTarget()); searchHit.sortValues(fieldDoc.fields); if (sortScoreIndex != -1) { searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue());
@Override public BytesReference getSourceRef() { return sourceRef(); }
ScoreDoc scoreDoc = topDocs.scoreDocs[i]; InternalSearchHit searchHitFields = internalHits[i]; searchHitFields.shard(innerHits.shardTarget()); searchHitFields.score(scoreDoc.score); if (scoreDoc instanceof FieldDoc) { FieldDoc fieldDoc = (FieldDoc) scoreDoc; searchHitFields.sortValues(fieldDoc.fields); hitContext.hit().setInnerHits(results);
typeText = documentMapper.typeText(); InternalSearchHit searchHit = new InternalSearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); List<Object> values = context.lookup().source().extractRawValues(extractFieldName); if (!values.isEmpty()) { if (searchHit.fieldsOrNull() == null) { searchHit.fields(new HashMap<String, SearchHitField>(2)); SearchHitField hitField = searchHit.fields().get(extractFieldName); if (hitField == null) { hitField = new InternalSearchHitField(extractFieldName, new ArrayList<>(2)); searchHit.fields().put(extractFieldName, hitField);
Map<String, InternalSearchHit> hitMap = new HashMap<>(); for (InternalSearchHit hit : searchHits) { hitMap.put(hit.getId(), hit); ExternalResult remoteResult = remoteScore.get(item.getKey()); if (remoteResult != null) { float newScore = composeScore(item.getValue().score(), remoteResult.getScore()); if (maxScore < newScore) { maxScore = newScore; item.getValue().score(newScore); && k < newSearchHits.size() && newSearchHits.get(k) != null && newSearchHits.get(k).score() > item.getValue().score() && k < totalSize) { k++;
@Override public void hitExecute(SearchContext context, HitContext hitContext) { try { final int topLevelDocId = hitContext.hit().docId(); Explanation explanation = context.searcher().explain(context.query(), topLevelDocId); for (RescoreSearchContext rescore : context.rescore()) { explanation = rescore.rescorer().explain(topLevelDocId, context, rescore, explanation); } // we use the top level doc id, since we work with the top level searcher hitContext.hit().explanation(explanation); } catch (IOException e) { throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#" + hitContext.hit().id() + "]", e); } finally { context.clearReleasables(SearchContext.Lifetime.COLLECTION); } } }
@Override public float getScore() { return score(); }
for (int i = 0; i < hits.length; ++i) { InternalSearchHit hit = hits[i]; int hitReaderIndex = ReaderUtil.subIndex(hit.docId(), context.searcher().getIndexReader().leaves()); if (readerIndex != hitReaderIndex) { readerIndex = hitReaderIndex; matchingDocs = Lucene.asSequentialAccessBits(ctx.reader().maxDoc(), scorer); if (matchingDocs.get(hit.docId() - docBase)) { matchedQueries[i].add(name); hits[i].matchedQueries(matchedQueries[i].toArray(new String[0]));
if (explanation() != null && shard != null) { builder.field("_shard", shard.shardId()); builder.field("_node", shard.nodeIdText()); if (explanation() != null) { builder.field(Fields._EXPLANATION); buildExplanation(builder, explanation());
@Override public Map<String, HighlightField> getHighlightFields() { return highlightFields(); }
@Override public Object[] getSortValues() { return sortValues(); }
public void initialize(Engine.Searcher docSearcher, ParsedDocument parsedDocument) { this.docSearcher = docSearcher; IndexReader indexReader = docSearcher.reader(); LeafReaderContext atomicReaderContext = indexReader.leaves().get(0); LeafSearchLookup leafLookup = lookup().getLeafSearchLookup(atomicReaderContext); leafLookup.setDocument(0); leafLookup.source().setSource(parsedDocument.source()); Map<String, SearchHitField> fields = new HashMap<>(); for (IndexableField field : parsedDocument.rootDoc().getFields()) { fields.put(field.name(), new InternalSearchHitField(field.name(), Collections.emptyList())); } hitContext().reset( new InternalSearchHit(0, "unknown", new Text(parsedDocument.type()), fields), atomicReaderContext, 0, docSearcher.searcher() ); }
Collection<String> fieldNamesToHighlight; if (Regex.isSimpleMatchPattern(field.field())) { DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().type()); fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field()); } else { SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).sourceMapper(); if (!sourceFieldMapper.enabled()) { throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but type [" + hitContext.hit().type() + "] has disabled _source"); hitContext.hit().highlightFields(highlightFields);
@Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.field(Fields._ID, hit.getId()); } };