public TokenStreamInfo getStream(String fieldName, Reader reader) { Tokenizer tokenizer = new Tokenizer(reader) { final TermAttribute termAtt = (TermAttribute) addAttribute(TermAttribute.class); boolean done = false; @Override public void reset(Reader input) throws IOException { done = false; super.reset(input); } @Override public boolean incrementToken() throws IOException { clearAttributes(); if (done) return false; done = true; int ch = input.read(); if (ch==-1) return false; termAtt.setTermBuffer( ((ch=='t' || ch=='T' || ch=='1') ? TRUE_TOKEN : FALSE_TOKEN) ,0,1); return true; } }; return new TokenStreamInfo(tokenizer, tokenizer); } };
public TokenStreamInfo getStream(String fieldName, Reader reader) { Tokenizer ts = new Tokenizer(reader) { final char[] cbuf = new char[maxChars]; final TermAttribute termAtt = (TermAttribute) addAttribute(TermAttribute.class); final OffsetAttribute offsetAtt = (OffsetAttribute) addAttribute(OffsetAttribute.class); @Override public boolean incrementToken() throws IOException { clearAttributes(); int n = input.read(cbuf,0,maxChars); if (n<=0) return false; String s = toInternal(new String(cbuf,0,n)); termAtt.setTermBuffer(s); offsetAtt.setOffset(correctOffset(0),correctOffset(n)); return true; } }; return new TokenStreamInfo(ts, ts); } }
@Override public TokenStreamInfo getStream(String fieldName, Reader reader) { Tokenizer tk = (Tokenizer)tokenizer.create(charStream(reader)); TokenStream ts = tk; for (int i=0; i<filters.length; i++) { ts = filters[i].create(ts); } return new TokenStreamInfo(tk,ts); }