throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "]", e); source = fieldVisitor.source(); if (!fieldVisitor.fields().isEmpty()) { fieldVisitor.postProcess(mapperService); fields = new HashMap<>(fieldVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldVisitor.fields().entrySet()) { fields.put(entry.getKey(), new DocumentField(entry.getKey(), entry.getValue()));
public void reset(String field) { this.field = field; super.reset(); }
final String sourceField = parallelArray.hasRecoverySource[docIndex] ? SourceFieldMapper.RECOVERY_SOURCE_NAME : SourceFieldMapper.NAME; final FieldsVisitor fields = new FieldsVisitor(true, sourceField); leaf.reader().document(segmentDocID, fields); fields.postProcess(mapperService); if (isTombstone && fields.uid() == null) { op = new Translog.NoOp(seqNo, primaryTerm, fields.source().utf8ToString()); assert version == 1L : "Noop tombstone should have version 1L; actual version [" + version + "]"; assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]"; } else { final String id = fields.uid().id(); final String type = fields.uid().type(); final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id)); if (isTombstone) { assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]"; } else { final BytesReference source = fields.source(); if (source == null) { source.toBytesRef().bytes, fields.routing(), null, autoGeneratedIdTimestamp);
private Map<String, DocumentField> getSearchFields(SearchContext context, FieldsVisitor fieldsVisitor, int subDocId, Map<String, Set<String>> storedToRequestedFields, LeafReaderContext subReaderContext) { loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); if (fieldsVisitor.fields().isEmpty()) { return null; } Map<String, DocumentField> searchFields = new HashMap<>(fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) { String storedField = entry.getKey(); List<Object> storedValues = entry.getValue(); if (storedToRequestedFields.containsKey(storedField)) { for (String requestedField : storedToRequestedFields.get(storedField)) { searchFields.put(requestedField, new DocumentField(requestedField, storedValues)); } } else { searchFields.put(storedField, new DocumentField(storedField, storedValues)); } } return searchFields; }
private SearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, Map<String, Set<String>> storedToRequestedFields, LeafReaderContext subReaderContext) { if (fieldsVisitor == null) { return new SearchHit(docId); } Map<String, DocumentField> searchFields = getSearchFields(context, fieldsVisitor, subDocId, storedToRequestedFields, subReaderContext); DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { typeText = new Text(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); } SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); // Set _source if requested. SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); if (fieldsVisitor.source() != null) { sourceLookup.setSource(fieldsVisitor.source()); } return searchHit; }
private SearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, LeafReaderContext subReaderContext) { if (fieldsVisitor == null) { return new SearchHit(docId); } loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); Map<String, SearchHitField> searchFields = null; if (!fieldsVisitor.fields().isEmpty()) { searchFields = new HashMap<>(fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) { searchFields.put(entry.getKey(), new SearchHitField(entry.getKey(), entry.getValue())); } } DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { typeText = new Text(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); } SearchHit searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); // Set _source if requested. SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); if (fieldsVisitor.source() != null) { sourceLookup.setSource(fieldsVisitor.source()); } return searchHit; }
@Override public void collect(int doc) throws IOException { fieldsVisitor.reset(); currentReader.document(doc, fieldsVisitor); if (fieldsVisitor.fields() != null) { searchFields = new HashMap<String, SearchHitField>( fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor .fields().entrySet()) { searchFields.put(entry.getKey(), new InternalSearchHitField( entry.getKey(), entry.getValue())); fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { typeText = new StringAndBytesText(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); fieldsVisitor.uid().id(), typeText, sourceRequested ? fieldsVisitor.source() : null, searchFields);
private void loadStoredFields(SearchContext searchContext, LeafReaderContext readerContext, FieldsVisitor fieldVisitor, int docId) { fieldVisitor.reset(); try { readerContext.reader().document(docId, fieldVisitor); DocPrimaryKey docPk = clusterService.parseElasticId(indexService, fieldVisitor.uid().type(), fieldVisitor.uid().id()); String typeKey = fieldVisitor.uid().type(); if (docPk.isStaticDocument) typeKey += "_static"; NavigableSet<String> requiredColumns = fieldVisitor.requiredColumns(clusterService, searchContext); if (requiredColumns.size() > 0) { IndexMetaData indexMetaData = clusterService.state().metaData().index(searchContext.request().shardId().getIndexName()); DocumentMapper docMapper = searchContext.mapperService().documentMapper(fieldVisitor.uid().type()); if (fieldVisitor.loadSource() && docMapper.sourceMapper().enabled()) { requiredColumns.add(SourceFieldMapper.NAME); indexService, fieldVisitor.uid().type(), requiredColumns.toArray(new String[requiredColumns.size()]), docPk.isStaticDocument, docMapper.getColumnDefinitions()); Logger logger = Loggers.getLogger(FetchPhase.class); UntypedResultSet rs = UntypedResultSet.create(((ResultMessage.Rows)result).result); if (!rs.isEmpty()) { Map<String, Object> mapObject = clusterService.rowAsMap(indexService, fieldVisitor.uid().type(), rs.one()); if (searchContext.includeNode()) { mapObject.put(NodeFieldMapper.NAME, clusterService.state().nodes().getLocalNodeId()); if (fieldVisitor.requestedFields() != null && fieldVisitor.requestedFields().size() > 0) { Map<String, List<Object>> flatMap = new HashMap<String, List<Object>>();
@Override public void intField(FieldInfo fieldInfo, int value) throws IOException { addValue(fieldInfo.name, value); }
private Map<String, Object> loadSourceIfNeeded() { if (source != null) { return source; } if (sourceAsBytes != null) { Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(sourceAsBytes); sourceContentType = tuple.v1(); source = tuple.v2(); return source; } try { FieldsVisitor sourceFieldVisitor = new FieldsVisitor(true); reader.document(docId, sourceFieldVisitor); BytesReference source = sourceFieldVisitor.source(); if (source == null) { this.source = emptyMap(); this.sourceContentType = null; } else { Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(source); this.sourceContentType = tuple.v1(); this.source = tuple.v2(); } } catch (Exception e) { throw new ElasticsearchParseException("failed to parse / load source", e); } return this.source; }
@Override public void collect(int doc) { try { FieldsVisitor fieldsVisitor = new FieldsVisitor(false); context.reader().document(doc, fieldsVisitor); Uid uid = fieldsVisitor.uid(); final long version = versions == null ? Versions.NOT_FOUND : versions.get(doc); docsToPurge.add(new DocToPurge(uid.type(), uid.id(), version, fieldsVisitor.routing())); } catch (Exception e) { logger.trace("failed to collect doc", e); } }
@Override public void collect(int doc) throws IOException { // the _source is the query uidValues.setDocument(doc); if (uidValues.count() > 0) { assert uidValues.count() == 1; final BytesRef uid = uidValues.valueAt(0); final BytesRef id = Uid.splitUidIntoTypeAndId(uid)[1]; fieldsVisitor.reset(); reader.document(doc, fieldsVisitor); try { // id is only used for logging, if we fail we log the id in the catch statement final Query parseQuery = percolator.parsePercolatorDocument(null, fieldsVisitor.source()); if (parseQuery != null) { queries.put(BytesRef.deepCopyOf(id), parseQuery); } else { logger.warn("failed to add query [{}] - parser returned null", id); } } catch (Exception e) { logger.warn("failed to add query [{}]", e, id.utf8ToString()); } } }
private static FieldsVisitor buildFieldsVisitors(String[] fields, FetchSourceContext fetchSourceContext) { if (fields == null || fields.length == 0) { return fetchSourceContext.fetchSource() ? new FieldsVisitor(true) : null; } return new CustomFieldsVisitor(Sets.newHashSet(fields), fetchSourceContext.fetchSource()); } }
public void postProcess(MapperService mapperService) { if (mapperService.getIndexSettings().isSingleType()) { final Collection<String> types = mapperService.types(); assert types.size() <= 1 : types; if (types.isEmpty() == false) { type = types.iterator().next(); } } for (Map.Entry<String, List<Object>> entry : fields().entrySet()) { MappedFieldType fieldType = mapperService.fullName(entry.getKey()); if (fieldType == null) { throw new IllegalStateException("Field [" + entry.getKey() + "] exists in the index but not in mappings"); } List<Object> fieldValues = entry.getValue(); for (int i = 0; i < fieldValues.size(); i++) { fieldValues.set(i, fieldType.valueForDisplay(fieldValues.get(i))); } } }
@Override public Status needsField(FieldInfo fieldInfo) throws IOException { if (super.needsField(fieldInfo) == Status.YES) { return Status.YES; } if (fields.contains(fieldInfo.name)) { return Status.YES; } return Status.NO; } }
@Override public void postProcess(MapperService mapperService) { super.postProcess(mapperService); if (id != null) { addValue(IdFieldMapper.NAME, id); } if (type != null) { addValue(TypeFieldMapper.NAME, type); } if (type != null && id != null) { addValue(UidFieldMapper.NAME, Uid.createUid(type, id)); } } }
private InternalSearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, List<String> extractFieldNames, LeafReaderContext subReaderContext) { loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); if (!fieldsVisitor.fields().isEmpty()) { searchFields = new HashMap<>(fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) { searchFields.put(entry.getKey(), new InternalSearchHitField(entry.getKey(), entry.getValue())); DocumentMapper documentMapper = context.mapperService().documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { typeText = new Text(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); InternalSearchHit searchHit = new InternalSearchHit(docId, fieldsVisitor.uid().id(), typeText, searchFields); if (fieldsVisitor.source() != null) { sourceLookup.setSource(fieldsVisitor.source());
@Override public void collect(int doc) throws IOException { fieldsVisitor.reset(); currentReader.document(doc, fieldsVisitor); if (fieldsVisitor.fields() != null) { searchFields = new HashMap<String, SearchHitField>(fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) { searchFields.put(entry.getKey(), new InternalSearchHitField(entry.getKey(), entry.getValue())); .documentMapper(fieldsVisitor.uid().type()); Text typeText; if (documentMapper == null) { typeText = new StringAndBytesText(fieldsVisitor.uid().type()); } else { typeText = documentMapper.typeText(); fieldsVisitor.uid().id(), typeText, sourceRequested ? fieldsVisitor.source() : null, searchFields);
private Map<String, DocumentField> getSearchFields(SearchContext context, FieldsVisitor fieldsVisitor, int subDocId, Map<String, Set<String>> storedToRequestedFields, LeafReaderContext subReaderContext) { loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId); fieldsVisitor.postProcess(context.mapperService()); if (fieldsVisitor.fields().isEmpty()) { return null; } Map<String, DocumentField> searchFields = new HashMap<>(fieldsVisitor.fields().size()); for (Map.Entry<String, List<Object>> entry : fieldsVisitor.fields().entrySet()) { String storedField = entry.getKey(); List<Object> storedValues = entry.getValue(); if (storedToRequestedFields.containsKey(storedField)) { for (String requestedField : storedToRequestedFields.get(storedField)) { searchFields.put(requestedField, new DocumentField(requestedField, storedValues)); } } else { searchFields.put(storedField, new DocumentField(storedField, storedValues)); } } return searchFields; }
@Override public void floatField(FieldInfo fieldInfo, float value) throws IOException { addValue(fieldInfo.name, value); }