@Override protected void setReader(final Reader reader) { // So that if maxTokenLength was changed, the change takes // effect next time tokenStream is called: src.setMaxTokenLength(StandardAnalyzer.this.maxTokenLength); super.setReader(reader); } };
@Override protected TokenStreamComponents createComponents(final String fieldName) { final StandardTokenizer src = new StandardTokenizer(); src.setMaxTokenLength(maxTokenLength); TokenStream tok = new LowerCaseFilter(src); tok = new StopFilter(tok, stopwords); return new TokenStreamComponents(src, tok) { @Override protected void setReader(final Reader reader) { // So that if maxTokenLength was changed, the change takes // effect next time tokenStream is called: src.setMaxTokenLength(StandardAnalyzer.this.maxTokenLength); super.setReader(reader); } }; }
@Override public Tokenizer create() { StandardTokenizer tokenizer = new StandardTokenizer(); tokenizer.setMaxTokenLength(maxTokenLength); return tokenizer; } }
@Override public StandardTokenizer create(AttributeFactory factory) { StandardTokenizer tokenizer = new StandardTokenizer(factory); tokenizer.setMaxTokenLength(maxTokenLength); return tokenizer; } }
@Override protected void setReader(final Reader reader) throws IOException { src.setMaxTokenLength(ReaderStandardAnalyzer.this.maxTokenLength); super.setReader(reader); } };
@Override protected void setReader(final Reader reader) throws IOException { src.setMaxTokenLength(StandardAnalyzer.this.maxTokenLength); super.setReader(reader); } };
@Override protected void setReader(final Reader reader) { // So that if maxTokenLength was changed, the change takes // effect next time tokenStream is called: src.setMaxTokenLength(StandardAnalyzer.this.maxTokenLength); super.setReader(reader); } };
@Override public Tokenizer create() { StandardTokenizer tokenizer = new StandardTokenizer(); tokenizer.setMaxTokenLength(maxTokenLength); return tokenizer; } }
@Override protected void setReader(final Reader reader) throws IOException { src.setMaxTokenLength(LiteStandardAnalyzer.this.maxTokenLength); super.setReader(reader); } };
@Override public Tokenizer create() { StandardTokenizer tokenizer = new StandardTokenizer(); tokenizer.setMaxTokenLength(maxTokenLength); return tokenizer; } }
@Override protected void setReader(final Reader reader) { ((StandardTokenizer)src).setMaxTokenLength(255); try { super.setReader(reader); } catch (IOException e) { //TODO: what to do here? } } };
@Override public Tokenizer create() { StandardTokenizer tokenizer = new StandardTokenizer(); tokenizer.setMaxTokenLength(maxTokenLength); return tokenizer; } }
@Override protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) { final StandardTokenizer src = new StandardTokenizer(reader); src.setMaxTokenLength(maxTokenLength); return new TokenStreamComponents(src) { @Override protected void setReader(final Reader reader) throws IOException { src.setMaxTokenLength(LiteStandardAnalyzer.this.maxTokenLength); super.setReader(reader); } }; } }
@Override protected Reader initReader(String fieldName, Reader reader) { return mStripHTML ? new HTMLStripCharFilter(reader) : reader; } @Override protected TokenStreamComponents createComponents(String fieldName) { StandardTokenizer source = new StandardTokenizer(); source.setMaxTokenLength(maxTokenLength); TokenStream result = new StandardFilter(source); result = new LowerCaseFilter(result); return new TokenStreamComponents(source, result); }
@Override protected void setReader(final Reader reader) { int m = StandardAnalyzer.this.maxTokenLength; if (src instanceof StandardTokenizer) { ((StandardTokenizer)src).setMaxTokenLength(m); } else { ((StandardTokenizer40)src).setMaxTokenLength(m); } super.setReader(reader); } };
/** Constructs a {@link StandardTokenizer} filtered by a {@link StandardFilter}, a {@link LowerCaseFilter} and a {@link StopFilter}. */ public TokenStream tokenStream(String fieldName, Reader reader) { StandardTokenizer tokenStream = new StandardTokenizer(reader, replaceInvalidAcronym); tokenStream.setMaxTokenLength(maxTokenLength); TokenStream result = new StandardFilter(tokenStream); result = new LowerCaseFilter(result); result = new StopFilter(result, stopSet); return result; }
/** Constructs a {@link StandardTokenizer} filtered by a {@link StandardFilter}, a {@link LowerCaseFilter} and a {@link StopFilter}. */ public TokenStream tokenStream(String fieldName, Reader reader) { StandardTokenizer tokenStream = new StandardTokenizer(reader, replaceInvalidAcronym); tokenStream.setMaxTokenLength(maxTokenLength); TokenStream result = new StandardFilter(tokenStream); result = new LowerCaseFilter(result); result = new StopFilter(result, stopSet); return result; }
@Override public Tokenizer create(AttributeFactory factory) { if (luceneMatchVersion.onOrAfter(Version.LUCENE_4_7_0)) { StandardTokenizer tokenizer = new StandardTokenizer(factory); tokenizer.setMaxTokenLength(maxTokenLength); return tokenizer; } else { StandardTokenizer40 tokenizer40 = new StandardTokenizer40(factory); tokenizer40.setMaxTokenLength(maxTokenLength); return tokenizer40; } } }
@Override protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) { final StandardTokenizer src = new StandardTokenizer(matchVersion, reader); src.setMaxTokenLength(maxTokenLength); TokenStream tok = new StandardFilter(matchVersion, src); tok = new LowerCaseFilter(matchVersion, tok); tok = new StopFilter(matchVersion, tok, stopwords); return new TokenStreamComponents(src, tok) { @Override protected void setReader(final Reader reader) throws IOException { src.setMaxTokenLength(ReaderStandardAnalyzer.this.maxTokenLength); super.setReader(reader); } }; }
public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException { SavedStreams streams = (SavedStreams) getPreviousTokenStream(); if (streams == null) { streams = new SavedStreams(); setPreviousTokenStream(streams); streams.tokenStream = new StandardTokenizer(reader); streams.filteredTokenStream = new StandardFilter(streams.tokenStream); streams.filteredTokenStream = new LowerCaseFilter(streams.filteredTokenStream); streams.filteredTokenStream = new StopFilter(streams.filteredTokenStream, stopSet); } else { streams.tokenStream.reset(reader); } streams.tokenStream.setMaxTokenLength(maxTokenLength); streams.tokenStream.setReplaceInvalidAcronym(replaceInvalidAcronym); return streams.filteredTokenStream; }