tokenizer = @TokenizerDef(factory = KeywordTokenizerFactory.class), filters = { @TokenFilterDef(factory = ClassicFilterFactory.class), }), @AnalyzerDef(name = LuceneAnalyzers.EXACT_ANALYZER, tokenizer = @TokenizerDef(factory = WhitespaceTokenizerFactory.class), filters = { @TokenFilterDef(factory = ClassicFilterFactory.class), }), @AnalyzerDef(name = LuceneAnalyzers.START_ANALYZER, tokenizer = @TokenizerDef(factory = WhitespaceTokenizerFactory.class), filters = { @TokenFilterDef(factory = ClassicFilterFactory.class), }), @AnalyzerDef(name = LuceneAnalyzers.ANYWHERE_ANALYZER, tokenizer = @TokenizerDef(factory = WhitespaceTokenizerFactory.class), filters = { @TokenFilterDef(factory = ClassicFilterFactory.class),
@Override public TokenizerDefinition translate(TokenizerDef hibernateSearchDef) { Class<? extends TokenizerFactory> factoryType = hibernateSearchDef.factory(); AnalysisDefinitionFactory<TokenizerDefinition> factory = luceneTokenizers.get( factoryType.getName() ); if ( factory == null ) { throw log.unsupportedTokenizerFactory( factoryType ); } Map<String, String> map = ParameterAnnotationsReader.toNewMutableMap( hibernateSearchDef.params() ); return factory.create( map ); }
private String registerTokenizerDef(String analyzerDefinitionName, TokenizerDef hibernateSearchDef) { String remoteName = hibernateSearchDef.name(); TokenizerDefinition elasticsearchDefinition = translator.translate( hibernateSearchDef ); if ( remoteName.isEmpty() && !hasParameters( elasticsearchDefinition ) ) { // No parameters, and no specific name was provided => Use the builtin, default definition remoteName = elasticsearchDefinition.getType(); } else { if ( remoteName.isEmpty() ) { remoteName = analyzerDefinitionName + "_" + hibernateSearchDef.factory().getSimpleName(); } registry.register( remoteName, elasticsearchDefinition ); } return remoteName; }
@AnalyzerDef(name = "ConceptNameAnalyzer", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = StandardFilterFactory.class), @TokenFilterDef(factory = LowerCaseFilterFactory.class) }) @Analyzer(definition = "ConceptNameAnalyzer")
@Override public TokenizerDefinition translate(TokenizerDef hibernateSearchDef) { Class<? extends TokenizerFactory> factoryType = hibernateSearchDef.factory(); AnalysisDefinitionFactory<TokenizerDefinition> factory = luceneTokenizers.get( factoryType.getName() ); if ( factory == null ) { throw log.unsupportedTokenizerFactory( factoryType ); } Map<String, String> map = ParameterAnnotationsReader.toNewMutableMap( hibernateSearchDef.params() ); return factory.create( map ); }
private String registerTokenizerDef(String analyzerDefinitionName, TokenizerDef hibernateSearchDef) { String remoteName = hibernateSearchDef.name(); TokenizerDefinition elasticsearchDefinition = translator.translate( hibernateSearchDef ); if ( remoteName.isEmpty() && !hasParameters( elasticsearchDefinition ) ) { // No parameters, and no specific name was provided => Use the builtin, default definition remoteName = elasticsearchDefinition.getType(); } else { if ( remoteName.isEmpty() ) { remoteName = analyzerDefinitionName + "_" + hibernateSearchDef.factory().getSimpleName(); } registry.register( remoteName, elasticsearchDefinition ); } return remoteName; }
@Table(name = "cms_content") @DynamicUpdate @AnalyzerDef(name = "cms", tokenizer = @TokenizerDef(factory = MultiTokenizerFactory.class)) @Analyzer(definition = "cms") // Comment this line to enable elasticsearch
private Analyzer buildAnalyzer(AnalyzerDef analyzerDef) throws IOException { TokenizerDef tokenizer = analyzerDef.tokenizer(); TokenizerFactory tokenizerFactory = buildAnalysisComponent( TokenizerFactory.class, tokenizer.factory(), tokenizer.params() ); return buildAnalyzer( tokenizerFactory, analyzerDef.charFilters(), analyzerDef.filters() ); }
@Table(name = "cms_content") @DynamicUpdate @AnalyzerDef(name = "cms", tokenizer = @TokenizerDef(factory = MultiTokenizerFactory.class)) @Analyzer(definition = "cms") // Comment this line to enable elasticsearch
private Analyzer buildAnalyzer(AnalyzerDef analyzerDef) throws IOException { TokenizerDef tokenizer = analyzerDef.tokenizer(); TokenizerFactory tokenizerFactory = buildAnalysisComponent( TokenizerFactory.class, tokenizer.factory(), tokenizer.params() ); return buildAnalyzer( tokenizerFactory, analyzerDef.charFilters(), analyzerDef.filters() ); }
@Entity @Indexed @Inheritance(strategy = TABLE_PER_CLASS) @AnalyzerDef( name = "customanalyzer", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class) }) public class SolrMember extends Member { /** Default value included to remove warning. Remove or modify at will. **/ private static final long serialVersionUID = 1L; }
@AnalyzerDef( name = "cleaned_keyword", tokenizer = @TokenizerDef( factory = KeywordTokenizerFactory.class ),
@AnalyzerDef(name = "exacttokenanalyzer", tokenizer = @TokenizerDef(factory = ExactTokenizerFactory.class)) @Indexed @Table(indexes = { @Index(columnList = "parentId"),
@AnalyzerDef(name = "textAnalyzer", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class) , filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = {
@Indexed @ClassBridge(impl = FooBridge.class) @AnalyzerDiscriminator(impl = FooBridge.class) @AnalyzerDefs({ @AnalyzerDef(name = "analyzer1", tokenizer = @TokenizerDef(factory = TestTokenizer.TestTokenizer1.class)), @AnalyzerDef(name = "analyzer2", tokenizer = @TokenizerDef(factory = TestTokenizer.TestTokenizer2.class)), @AnalyzerDef(name = "analyzer3", tokenizer = @TokenizerDef(factory = TestTokenizer.TestTokenizer3.class)) }) public static class Foo { @DocumentId private Integer id; public Foo(Integer id) { this.id = id; } }
tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = StopFilterFactory.class) tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class)
@AnalyzerDef(name = Analyzers.DEFAULT, tokenizer = @TokenizerDef( factory = StandardTokenizerFactory.class), filters = { @TokenFilterDef(factory = StandardFilterFactory.class), @AnalyzerDef(name = Analyzers.UNIGRAM, tokenizer = @TokenizerDef( factory = NGramTokenizerFactory.class, params = { @Parameter(name = "minGramSize", value = "1"),
@Indexed @AnalyzerDefs({ @AnalyzerDef( name = "class-analyzer-1", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class) ), @AnalyzerDef( name = "class-analyzer-2", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class) ) }) static class Sample { @DocumentId long id; @Field String description; }
@Indexed @AnalyzerDef(name = "textAnalyzer", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class) , filters = { @TokenFilterDef(factory = LowerCaseFilterFactory.class), @TokenFilterDef(factory = SnowballPorterFilterFactory.class, params = {
@Indexed @AnalyzerDefs({ @AnalyzerDef( name = "package-analyzer-1", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class) ), @AnalyzerDef( name = "class-analyzer-unique", tokenizer = @TokenizerDef(factory = StandardTokenizerFactory.class) ) }) static class SampleWithError { @DocumentId final long id = 1; @Field final String description = ""; } }