@Override public void intField(FieldInfo fieldInfo, int value) throws IOException { addValue(fieldInfo.name, value); }
public void reset(String field) { this.field = field; super.reset(); }
@Override public void postProcess(MapperService mapperService) { super.postProcess(mapperService); if (id != null) { addValue(IdFieldMapper.NAME, id); } if (type != null) { addValue(TypeFieldMapper.NAME, type); } if (type != null && id != null) { addValue(UidFieldMapper.NAME, Uid.createUid(type, id)); } } }
final String sourceField = parallelArray.hasRecoverySource[docIndex] ? SourceFieldMapper.RECOVERY_SOURCE_NAME : SourceFieldMapper.NAME; final FieldsVisitor fields = new FieldsVisitor(true, sourceField); leaf.reader().document(segmentDocID, fields); fields.postProcess(mapperService); if (isTombstone && fields.uid() == null) { op = new Translog.NoOp(seqNo, primaryTerm, fields.source().utf8ToString()); assert version == 1L : "Noop tombstone should have version 1L; actual version [" + version + "]"; assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]"; } else { final String id = fields.uid().id(); final String type = fields.uid().type(); final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); if (isTombstone) { assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]"; } else { final BytesReference source = fields.source(); if (source == null) { source.toBytesRef().bytes, fields.routing(), null, autoGeneratedIdTimestamp);
private FieldLookup loadFieldData(String name) { FieldLookup data = cachedFieldData.get(name); if (data == null) { MappedFieldType fieldType = mapperService.fullName(name); if (fieldType == null) { throw new IllegalArgumentException("No field found for [" + name + "] in mapping with types " + Arrays.toString(types)); } data = new FieldLookup(fieldType); cachedFieldData.put(name, data); } if (data.fields() == null) { String fieldName = data.fieldType().name(); String lookupField = fieldName; if (singleType && UidFieldMapper.NAME.equals(fieldName)) { lookupField = IdFieldMapper.NAME; } fieldVisitor.reset(lookupField); try { reader.document(docId, fieldVisitor); fieldVisitor.postProcess(mapperService); List<Object> storedFields = fieldVisitor.fields().get(fieldName); data.fields(singletonMap(fieldName, storedFields)); } catch (IOException e) { throw new ElasticsearchParseException("failed to load field [{}]", e, name); } } return data; }
private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) { if (fields == null || fields.length == 0) { return fetchSourceContext.fetchSource() ? new FieldsVisitor(true) : null; } return new CustomFieldsVisitor(Sets.newHashSet(fields), fetchSourceContext.fetchSource()); } }
private SearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, Map<String, Set<String>> storedToRequestedFields, LeafReaderContext subReaderContext) { if (fieldsVisitor == null) { return new SearchHit(docId); } Map<String, DocumentField> searchFields = getSearchFields(context, fieldsVisitor, subDocId, storedToRequestedFields, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { typeText = new Text(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); } SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); // Set _source if requested. SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); if (fieldsVisitor.source() != null) { sourceLookup.setSource(fieldsVisitor.source()); } return searchHit; }
private Map<String, DocumentField> getSearchFields(SearchContext context, FieldsVisitor fieldsVisitor, int subDocId, Map<String, Set<String>> storedToRequestedFields, LeafReaderContext subReaderContext) { loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); if (fieldsVisitor.fields().isEmpty()) { return null; } Map<String, DocumentField> searchFields = new HashMap<>(fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) { String storedField = entry.getKey(); List<Object> storedValues = entry.getValue(); if (storedToRequestedFields.containsKey(storedField)) { for (String requestedField : storedToRequestedFields.get(storedField)) { searchFields.put(requestedField, new DocumentField(requestedField, storedValues)); } } else { searchFields.put(storedField, new DocumentField(storedField, storedValues)); } } return searchFields; }
private Map<String, Object> loadSourceIfNeeded() { if (source != null) { return source; } if (sourceAsBytes != null) { Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(sourceAsBytes); sourceContentType = tuple.v1(); source = tuple.v2(); return source; } try { FieldsVisitor sourceFieldVisitor = new FieldsVisitor(true); reader.document(docId, sourceFieldVisitor); BytesReference source = sourceFieldVisitor.source(); if (source == null) { this.source = emptyMap(); this.sourceContentType = null; } else { Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(source); this.sourceContentType = tuple.v1(); this.source = tuple.v2(); } } catch (Exception e) { throw new ElasticsearchParseException("failed to parse / load source", e); } return this.source; }
/** * Load field values for highlighting. */ public static List<Object> loadFieldValues(SearchContextHighlight.Field field, MappedFieldType fieldType, SearchContext searchContext, FetchSubPhase.HitContext hitContext) throws IOException { //percolator needs to always load from source, thus it sets the global force source to true boolean forceSource = searchContext.highlight().forceSource(field); List<Object> textsToHighlight; if (!forceSource && fieldType.stored()) { CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false); hitContext.reader().document(hitContext.docId(), fieldVisitor); textsToHighlight = fieldVisitor.fields().get(fieldType.name()); if (textsToHighlight == null) { // Can happen if the document doesn't have the field to highlight textsToHighlight = Collections.emptyList(); } } else { SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId()); textsToHighlight = sourceLookup.extractRawValues(fieldType.name()); } assert textsToHighlight != null; return textsToHighlight; }
@Override public Status needsField(FieldInfo fieldInfo) throws IOException { if (super.needsField(fieldInfo) == Status.YES) { return Status.YES; } if (fields.contains(fieldInfo.name)) { return Status.YES; } return Status.NO; } }
public void postProcess(MapperService mapperService) { if (mapperService.getIndexSettings().isSingleType()) { final Collection<String> types = mapperService.types(); assert types.size() <= 1 : types; if (types.isEmpty() == false) { type = types.iterator().next(); } } for (Map.Entry<String, List<Object>> entry : fields().entrySet()) { MappedFieldType fieldType = mapperService.fullName(entry.getKey()); if (fieldType == null) { throw new IllegalStateException("Field [" + entry.getKey() + "] exists in the index but not in mappings"); } List<Object> fieldValues = entry.getValue(); for (int i = 0; i < fieldValues.size(); i++) { fieldValues.set(i, fieldType.valueForDisplay(fieldValues.get(i))); } } }
LeafFieldsLookup(MapperService mapperService, @Nullable String[] types, LeafReader reader) { this.mapperService = mapperService; this.singleType = mapperService.getIndexSettings().isSingleType(); this.types = types; this.reader = reader; this.fieldVisitor = new SingleFieldsVisitor(null); }
@Override public void floatField(FieldInfo fieldInfo, float value) throws IOException { addValue(fieldInfo.name, value); }
public FieldsVisitor(boolean loadSource, String sourceFieldName) { this.loadSource = loadSource; this.sourceFieldName = sourceFieldName; requiredFields = new HashSet<>(); reset(); }
@Override public void longField(FieldInfo fieldInfo, long value) throws IOException { addValue(fieldInfo.name, value); }
private void loadStoredFields(SearchContext searchContext, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { fieldVisitor.reset(); try { readerContext.reader().document(docId, fieldVisitor); } catch (IOException e) { throw new FetchPhaseExecutionException(searchContext, "Failed to fetch doc id [" + docId + "]", e); } } }
@Override public void doubleField(FieldInfo fieldInfo, double value) throws IOException { addValue(fieldInfo.name, value); }
@Override public void stringField(FieldInfo fieldInfo, byte[] bytes) throws IOException { final String value = new String(bytes, StandardCharsets.UTF_8); if (UidFieldMapper.NAME.equals(fieldInfo.name)) { // 5.x-only // TODO: Remove when we are on 7.x Uid uid = Uid.createUid(value); type = uid.type(); id = uid.id(); } else if (IdFieldMapper.NAME.equals(fieldInfo.name)) { // only applies to 5.x indices that have single_type = true // TODO: Remove when we are on 7.x id = value; } else { addValue(fieldInfo.name, value); } }
@Override public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException { if (sourceFieldName.equals(fieldInfo.name)) { source = new BytesArray(value); } else if (IdFieldMapper.NAME.equals(fieldInfo.name)) { id = Uid.decodeId(value); } else { addValue(fieldInfo.name, new BytesRef(value)); } }