@Override public Tokenizer create(AttributeFactory factory) { return new ThaiTokenizer(factory); } }
/** * Creates * {@link org.apache.lucene.analysis.Analyzer.TokenStreamComponents} * used to tokenize all the text in the provided {@link Reader}. * * @return {@link org.apache.lucene.analysis.Analyzer.TokenStreamComponents} * built from a {@link ThaiTokenizer} filtered with * {@link LowerCaseFilter}, {@link DecimalDigitFilter} and {@link StopFilter} */ @Override protected TokenStreamComponents createComponents(String fieldName) { final Tokenizer source = new ThaiTokenizer(); TokenStream result = new LowerCaseFilter(source); result = new DecimalDigitFilter(result); result = new StopFilter(result, stopwords); return new TokenStreamComponents(source, result); }
@Override protected Tokenizer create(Version version) { return new ThaiTokenizer(); } }
@Override public Tokenizer create() { return new ThaiTokenizer(); } }
@Override public Tokenizer create(AttributeFactory factory) { return new ThaiTokenizer(factory); } }
@Override public Tokenizer create() { return new ThaiTokenizer(); } }
@Override public Tokenizer create() { return new ThaiTokenizer(); } }
@Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { ThaiTokenizer source = new ThaiTokenizer(reader); TokenStream result = new StandardFilter(source); return new TokenStreamComponents(source, result); } }
public void reset(Reader input) throws IOException { assert input != null; try { this.tokenizer = new ThaiTokenizer(); tokenizer.setReader(input); this.term = tokenizer.addAttribute(CharTermAttribute.class); this.tokenizer.reset(); } catch (Exception e) { throw ExceptionUtils.wrapAsRuntimeException(e); } }
protected TokenStreamComponents createComponents(String fieldName) { if (getVersion().onOrAfter(Version.LUCENE_4_8_0)) { final Tokenizer source = new ThaiTokenizer(); TokenStream result = new LowerCaseFilter(source); if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) {