@Override public TokenStream create(TokenStream tokenStream) { return synonyms.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonyms, false); }
@Override public TokenStream create(TokenStream input) { // if the fst is null, it means there's actually no synonyms... just return the original stream // as there is nothing to do here. return map.fst == null ? input : new SynonymFilter(input, map, ignoreCase); }
@Override public TokenStream create(TokenStream input) { // if the fst is null, it means there's actually no synonyms... just return the original stream // as there is nothing to do here. return map.fst == null ? input : new SynonymFilter(input, map, ignoreCase); }
@Override public TokenStream create(TokenStream tokenStream) { return synonyms.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonyms, false); } };
@Override public TokenStream create(TokenStream tokenStream) { // fst is null means no synonyms return synonymMap.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonymMap, ignoreCase); } }
@Override public TokenStream create(TokenStream tokenStream) { // fst is null means no synonyms return synonymMap.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonymMap, ignoreCase); }
@Override public TokenStream create(TokenStream input) { return map.fst == null ? input : new SynonymFilter(input, map,ignoreCase); }
@Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new WhitespaceTokenizer(); TokenStream result = new PatternReplaceFilter(tokenizer, Pattern.compile("^([\\.!\\?,:;\"'\\(\\)]*)(.*?)([\\.!\\?,:;\"'\\(\\)]*)$"), "$2", true); result = new PatternReplaceFilter(result, Pattern.compile("'s"), "s", true); result = new BolEolFilter(result); result = new SynonymFilter(result, map, true); // result = new StopFilter(result, LuceneUtils.caseSensitiveStopSet); result = new Lucene43StopFilter(false, result, LuceneUtils.caseSensitiveStopSet); result = new LowerCaseFilter(result); result = new ASCIIFoldingFilter(result); return new TokenStreamComponents(tokenizer, result); }