static Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) { if (type instanceof KeywordFieldMapper.KeywordFieldType) { KeywordFieldMapper.KeywordFieldType keywordFieldType = (KeywordFieldMapper.KeywordFieldType) type; if (keywordFieldType.normalizer() != null) { return keywordFieldType.normalizer(); } } return docMapper.mappers().indexAnalyzer(); } }
public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { final Settings indexSettings = mapperService.getIndexSettings().getSettings(); this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); final String type = rootObjectMapper.name(); final DocumentMapper existingMapper = mapperService.documentMapper(type); final Map<String, TypeParser> metadataMapperParsers = mapperService.mapperRegistry.getMetadataMapperParsers(); for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : metadataMapperParsers.entrySet()) { final String name = entry.getKey(); final MetadataFieldMapper existingMetadataMapper = existingMapper == null ? null : (MetadataFieldMapper) existingMapper.mappers().getMapper(name); final MetadataFieldMapper metadataMapper; if (existingMetadataMapper == null) { final TypeParser parser = entry.getValue(); metadataMapper = parser.getDefault(mapperService.fullName(name), mapperService.documentMapperParser().parserContext(builder.name())); } else { metadataMapper = existingMetadataMapper; } metadataMappers.put(metadataMapper.getClass(), metadataMapper); } }
/** Creates an copy of the current field with given field name and boost */ private static void parseCopy(String field, ParseContext context) throws IOException { Mapper mapper = context.docMapper().mappers().getMapper(field); if (mapper != null) { if (mapper instanceof FieldMapper) { ((FieldMapper) mapper).parse(context); } else if (mapper instanceof FieldAliasMapper) { throw new IllegalArgumentException("Cannot copy to a field alias [" + mapper.name() + "]."); } else { throw new IllegalStateException("The provided mapper [" + mapper.name() + "] has an unrecognized type [" + mapper.getClass().getSimpleName() + "]."); } } else { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); final String[] paths = splitAndValidatePath(field); final String fieldName = paths[paths.length-1]; Tuple<Integer, ObjectMapper> parentMapperTuple = getDynamicParentMapper(context, paths, null); ObjectMapper objectMapper = parentMapperTuple.v2(); parseDynamicValue(context, objectMapper, fieldName, context.parser().currentToken()); for (int i = 0; i < parentMapperTuple.v1(); i++) { context.path().remove(); } } }
private static Map<String, FieldMappingMetaData> findFieldMappingsByType(Predicate<String> fieldPredicate, DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) { Map<String, FieldMappingMetaData> fieldMappings = new HashMap<>(); final DocumentFieldMappers allFieldMappers = documentMapper.mappers(); for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { for (Mapper fieldMapper : allFieldMappers) { addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { for (Mapper fieldMapper : allFieldMappers) { if (Regex.simpleMatch(field, fieldMapper.name())) { addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults()); } } } else { // not a pattern Mapper fieldMapper = allFieldMappers.getMapper(field); if (fieldMapper != null) { addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults()); } else if (request.probablySingleFieldRequest()) { fieldMappings.put(field, FieldMappingMetaData.NULL); } } } return Collections.unmodifiableMap(fieldMappings); }
Mapper fieldMapper = docMapper.mappers().getMapper(field); if (fieldMapper == null) { if (docMapper.objectMappers().get(field) != null) {
for (int i = 0; i < paths.length-1; i++) { String currentPath = context.path().pathAsText(paths[i]); Mapper existingFieldMapper = context.docMapper().mappers().getMapper(currentPath); if (existingFieldMapper != null) { throw new MapperParsingException(
private Map<String, String> getFieldTypes(DocumentMapper documentMapper) { Map<String, String> fieldNameToDataType = new HashMap<>(); for (FieldMapper<?> fieldMapper : documentMapper.mappers()) { String type; if (fieldMapper instanceof AbstractFieldMapper) { try { type = (String) abstractFieldMapperContentTypeMethod.invoke(fieldMapper); } catch (Exception ex) { throw new RuntimeException("Could not find content type", ex); } } else { type = fieldMapper.fieldDataType().getType(); } fieldNameToDataType.put(fieldMapper.name(), type); } return fieldNameToDataType; } }
/** Creates an copy of the current field with given field name and boost */ private static void parseCopy(final String field, final ParseContext context) throws IOException { Mapper mapper = context.docMapper().mappers().getMapper(field); if (mapper != null) { if (mapper instanceof FieldMapper) { ((FieldMapper) mapper).parse(context); } else if (mapper instanceof FieldAliasMapper) { throw new IllegalArgumentException("Cannot copy to a field alias [" + mapper.name() + "]."); } else { throw new IllegalStateException("The provided mapper [" + mapper.name() + "] has an unrecognized type [" + mapper.getClass().getSimpleName() + "]."); } } }
static Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) { if (type instanceof KeywordFieldMapper.KeywordFieldType) { KeywordFieldMapper.KeywordFieldType keywordFieldType = (KeywordFieldMapper.KeywordFieldType) type; if (keywordFieldType.normalizer() != null) { return keywordFieldType.normalizer(); } } return docMapper.mappers().indexAnalyzer(); } }
private Analyzer findAnalyzer(ParseContext context) { Analyzer analyzer = fieldType().indexAnalyzer(); if (analyzer == null) { analyzer = context.docMapper().mappers().indexAnalyzer(); if (analyzer == null) { // This should not happen, should we log warn it? analyzer = Lucene.STANDARD_ANALYZER; } } return analyzer; }
private FieldMapper getMapperForField(String fieldName, SearchContext searchContext, HitContext hitContext) { DocumentMapper documentMapper = searchContext.mapperService().documentMapper(hitContext.hit().type()); // TODO: no need to lookup the doc mapper with unambiguous field names? just look at the mapper service return documentMapper.mappers().smartNameFieldMapper(fieldName); } }
private FieldMapper getMapperForField(String fieldName, SearchContext searchContext, HitContext hitContext) { DocumentMapper documentMapper = searchContext.mapperService().documentMapper(hitContext.hit().type()); // TODO: no need to lookup the doc mapper with unambiguous field names? just look at the mapper service return documentMapper.mappers().smartNameFieldMapper(fieldName); } }
private String regularColumn(final IndexService indexService, final String type) throws IOException { if (indexService != null) { DocumentMapper docMapper = indexService.mapperService().documentMapper(type); if (docMapper != null) { for(FieldMapper fieldMapper : docMapper.mappers()) { if (fieldMapper instanceof MetadataFieldMapper) continue; if (fieldMapper.cqlPrimaryKeyOrder() == -1 && !fieldMapper.cqlStaticColumn() && fieldMapper.cqlCollection() == Mapper.CqlCollection.SINGLETON) { return fieldMapper.name(); } } } } if (logger.isDebugEnabled()) logger.debug("no regular columns for index=[{}] type=[{}]", indexService.index().getName(), type); return null; }
public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { final Settings indexSettings = mapperService.getIndexSettings().getSettings(); this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); final String type = rootObjectMapper.name(); DocumentMapper existingMapper = mapperService.documentMapper(type); for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : mapperService.mapperRegistry.getMetadataMapperParsers().entrySet()) { final String name = entry.getKey(); final MetadataFieldMapper existingMetadataMapper = existingMapper == null ? null : (MetadataFieldMapper) existingMapper.mappers().getMapper(name); final MetadataFieldMapper metadataMapper; if (existingMetadataMapper == null) { final TypeParser parser = entry.getValue(); metadataMapper = parser.getDefault(mapperService.fullName(name), mapperService.documentMapperParser().parserContext(builder.name())); } else { metadataMapper = existingMetadataMapper; } metadataMappers.put(metadataMapper.getClass(), metadataMapper); } }
public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { final Settings indexSettings = mapperService.getIndexSettings().getSettings(); this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); final String type = rootObjectMapper.name(); DocumentMapper existingMapper = mapperService.documentMapper(type); for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : mapperService.mapperRegistry.getMetadataMapperParsers().entrySet()) { final String name = entry.getKey(); final MetadataFieldMapper existingMetadataMapper = existingMapper == null ? null : (MetadataFieldMapper) existingMapper.mappers().getMapper(name); final MetadataFieldMapper metadataMapper; if (existingMetadataMapper == null) { final TypeParser parser = entry.getValue(); metadataMapper = parser.getDefault(mapperService.fullName(name), mapperService.documentMapperParser().parserContext(builder.name())); } else { metadataMapper = existingMetadataMapper; } metadataMappers.put(metadataMapper.getClass(), metadataMapper); } }
public Builder(RootObjectMapper.Builder builder, MapperService mapperService) { final Settings indexSettings = mapperService.getIndexSettings().getSettings(); this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1)); this.rootObjectMapper = builder.build(builderContext); final String type = rootObjectMapper.name(); DocumentMapper existingMapper = mapperService.documentMapper(type); for (Map.Entry<String, MetadataFieldMapper.TypeParser> entry : mapperService.mapperRegistry.getMetadataMapperParsers().entrySet()) { final String name = entry.getKey(); final MetadataFieldMapper existingMetadataMapper = existingMapper == null ? null : (MetadataFieldMapper) existingMapper.mappers().getMapper(name); final MetadataFieldMapper metadataMapper; if (existingMetadataMapper == null) { final TypeParser parser = entry.getValue(); metadataMapper = parser.getDefault(mapperService.fullName(name), mapperService.documentMapperParser().parserContext(builder.name())); } else { metadataMapper = existingMetadataMapper; } metadataMappers.put(metadataMapper.getClass(), metadataMapper); } }
@Override public ContextConfig parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { if(fieldName != null) { FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); if(!(mapper instanceof GeoPointFieldMapper)) { throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); } } Collection<String> locations; if(parser.currentToken() == Token.VALUE_NULL) { locations = null; } else { locations = parseSinglePointOrList(parser); } return new GeoConfig(this, locations); }
@Override public ContextConfig parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { if(fieldName != null) { FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); if(!(mapper instanceof GeoPointFieldMapper)) { throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); } } Collection<String> locations; if(parser.currentToken() == Token.VALUE_NULL) { locations = null; } else { locations = parseSinglePointOrList(parser); } return new GeoConfig(this, locations); }
@Override protected CacheStatsPerFieldStats nodeOperation(CacheStatsPerFieldStatsRequest nodeStatsRequest) throws ElasticSearchException { List<CacheStatsPerFieldStats.FieldEntry> entries = Lists.newArrayList(); for (IndexService indexService : indicesService) { logger.debug("Starting to analyze index {}",indexService.settingsService().index().name()); FieldDataCache fieldData = indexService.cache().fieldData(); for (DocumentMapper mapper : indexService.mapperService()) { for (FieldMapper fieldMapper : mapper.mappers()) { String field = fieldMapper.names().indexName(); logger.debug("Calculating size for field {}",field); long size = fieldData.sizeInBytes(field); if (size > 0) { logger.debug("Size for field {}: {}",field,size); entries.add(new CacheStatsPerFieldStats.FieldEntry(field,size)); } else logger.debug("Field {} is has no cache. Skipping.",field); } } } return new CacheStatsPerFieldStats(clusterService.localNode(),hostname, System.currentTimeMillis(),entries); }
/** Creates an copy of the current field with given field name and boost */ private static void parseCopy(String field, ParseContext context) throws IOException { FieldMapper fieldMapper = context.docMapper().mappers().getMapper(field); if (fieldMapper != null) { fieldMapper.parse(context); } else { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); final String[] paths = splitAndValidatePath(field); final String fieldName = paths[paths.length-1]; Tuple<Integer, ObjectMapper> parentMapperTuple = getDynamicParentMapper(context, paths, null); ObjectMapper mapper = parentMapperTuple.v2(); parseDynamicValue(context, mapper, fieldName, context.parser().currentToken()); for (int i = 0; i < parentMapperTuple.v1(); i++) { context.path().remove(); } } }