public Object getValue() { if (valueLoaded) { return value; } valueLoaded = true; value = null; List<Object> values = fields.get(fieldType.name()); return values != null ? value = values.get(0) : null; }
public CollapsingTopDocsCollector<?> createTopDocs(Sort sort, int topN, boolean trackMaxScore) { if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) { return CollapsingTopDocsCollector.createKeyword(fieldType.name(), sort, topN, trackMaxScore); } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { return CollapsingTopDocsCollector.createNumeric(fieldType.name(), sort, topN, trackMaxScore); } else { throw new IllegalStateException("unknown type for collapse field " + fieldType.name() + ", only keywords and numbers are accepted"); } } }
public List<Object> getValues() { if (valuesLoaded) { return values; } valuesLoaded = true; values.clear(); return values = fields().get(fieldType.name()); } }
@Override public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { return new ConstantIndexFieldData(indexSettings, fieldType.name(), valueFunction.apply(mapperService)); }
/** Return a fielddata builder for this field * @throws IllegalArgumentException if the fielddata is not supported on this type. * An IllegalArgumentException is needed in order to return an http error 400 * when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status} * * @param fullyQualifiedIndexName the name of the index this field-data is build for * */ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) { throw new IllegalArgumentException("Fielddata is not supported on field [" + name() + "] of type [" + typeName() + "]"); }
private static Query newLegacyExistsQuery(QueryShardContext context, String field) { MappedFieldType fieldType = context.fieldMapper(field); String fieldName = fieldType != null ? fieldType.name() : field; return new TermQuery(new Term(FieldNamesFieldMapper.NAME, fieldName)); }
/** Return a {@link DocValueFormat} that can be used to display and parse * values as returned by the fielddata API. * The default implementation returns a {@link DocValueFormat#RAW}. */ public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { if (format != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); } if (timeZone != null) { throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); } return DocValueFormat.RAW; }
@Override public Query termQuery(MappedFieldType fieldType, BytesRef value) { /* * Use the string value of the term because we're reusing the * portion of the query is usually after the analyzer has run on * each term. We just skip that analyzer phase. */ return blendTerm(new Term(fieldType.name(), value.utf8ToString()), fieldType); }
/** @throws IllegalArgumentException if the fielddata is not supported on this type. * An IllegalArgumentException is needed in order to return an http error 400 * when this error occurs in a request. see: {@link org.elasticsearch.ExceptionsHelper#status} **/ protected final void failIfNoDocValues() { if (hasDocValues() == false) { throw new IllegalArgumentException("Can't load fielddata on [" + name() + "] because fielddata is unsupported on fields of type [" + typeName() + "]. Use doc values instead."); } }
@Override SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) { if (checkIfSortedDocsIsApplicable(reader, fieldType) == false || fieldType instanceof StringFieldType == false || (query != null && query.getClass() != MatchAllDocsQuery.class)) { return null; } return new TermsSortedDocsProducer(fieldType.name()); }
protected final void failIfNotIndexed() { if (indexOptions() == IndexOptions.NONE && pointDataDimensionCount() == 0) { // we throw an IAE rather than an ISE so that it translates to a 4xx code rather than 5xx code on the http layer throw new IllegalArgumentException("Cannot search on field [" + name() + "] since it is not indexed."); } }
@Override protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException { // we know its low level reader, and matching docId, since that's how we call the highlighter with SourceLookup sourceLookup = searchContext.lookup().source(); sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId); List<Object> values = sourceLookup.extractRawValues(fieldType.name()); Field[] fields = new Field[values.size()]; for (int i = 0; i < values.size(); i++) { fields[i] = new Field(fieldType.name(), values.get(i).toString(), TextField.TYPE_NOT_STORED); } return fields; }
protected final Query termQuery(MappedFieldType fieldType, BytesRef value, boolean lenient) { try { return fieldType.termQuery(value, context); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(fieldType.name(), e); } throw e; } }
@Override protected SpanQuery doToQuery(QueryShardContext context) throws IOException { String fieldInQuery = fieldName; MappedFieldType fieldType = context.fieldMapper(fieldName); if (fieldType != null) { fieldInQuery = fieldType.name(); } Query innerQuery = queryBuilder.toQuery(context); assert innerQuery instanceof SpanQuery; return new FieldMaskingSpanQuery((SpanQuery)innerQuery, fieldInQuery); }
protected void createFieldNamesField(ParseContext context, List<IndexableField> fields) { FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType) context.docMapper() .metadataMapper(FieldNamesFieldMapper.class).fieldType(); if (fieldNamesFieldType != null && fieldNamesFieldType.isEnabled()) { for (String fieldName : FieldNamesFieldMapper.extractFieldNames(fieldType().name())) { fields.add(new Field(FieldNamesFieldMapper.NAME, fieldName, fieldNamesFieldType)); } } }
@Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (((AllFieldMapper)mergeWith).enabled() != this.enabled() && ((AllFieldMapper)mergeWith).enabledState != Defaults.ENABLED) { throw new IllegalArgumentException("mapper [" + fieldType().name() + "] enabled is " + this.enabled() + " now encountering "+ ((AllFieldMapper)mergeWith).enabled()); } super.doMerge(mergeWith, updateAllTypes); }
@Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { if (!enabledState.enabled) { return; } for (AllEntries.Entry entry : context.allEntries().entries()) { fields.add(new AllField(fieldType().name(), entry.value(), entry.boost(), fieldType())); } }
@Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { return; } fields.add(new Field(fieldType().name(), context.sourceToParse().type(), fieldType())); if (fieldType().hasDocValues()) { fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(context.sourceToParse().type()))); } }
@Override protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException { String routing = context.sourceToParse().routing(); if (routing != null) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { fields.add(new Field(fieldType().name(), routing, fieldType())); createFieldNamesField(context, fields); } } }
/** * Parses geopoint represented as a string and ignores malformed geopoints if needed */ private void parseGeoPointStringIgnoringMalformed(ParseContext context, GeoPoint sparse) throws IOException { try { parse(context, sparse.resetFromString(context.parser().text(), ignoreZValue.value())); } catch (ElasticsearchParseException e) { if (ignoreMalformed.value() == false) { throw e; } context.addIgnoredField(fieldType.name()); } }