@Override public TokenStream create(TokenStream tokenStream) { return synonyms.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonyms, false); }
private void capture() { captureCount++; //System.out.println(" capture slot=" + nextWrite); final PendingInput input = futureInputs[nextWrite]; input.state = captureState(); input.consumed = false; input.term.copyChars(termAtt.buffer(), 0, termAtt.length()); nextWrite = rollIncr(nextWrite); // Buffer head should never catch up to tail: assert nextWrite != nextRead; }
restoreState(input.state); } else { outputs.posIncr = 0; } else { nextRead = rollIncr(nextRead); inputSkipCount--; final int posIncr = outputs.posIncr; final CharsRef output = outputs.pullNext(); clearAttributes(); termAtt.copyBuffer(output.chars, output.offset, output.length); typeAtt.setType(TYPE_SYNONYM); nextRead = rollIncr(nextRead); inputSkipCount--; nextRead = rollIncr(nextRead); inputSkipCount--; futureInputs[nextRead].reset(); if (outputs.count == 0) { nextWrite = nextRead = rollIncr(nextRead); clearAttributes(); parse();
restoreState(input.state); } else { outputs.posIncr = 0; } else { nextRead = rollIncr(nextRead); inputSkipCount--; final int posIncr = outputs.posIncr; final CharsRef output = outputs.pullNext(); clearAttributes(); termAtt.copyBuffer(output.chars, output.offset, output.length); typeAtt.setType(TYPE_SYNONYM); nextRead = rollIncr(nextRead); inputSkipCount--; nextRead = rollIncr(nextRead); inputSkipCount--; futureInputs[nextRead].reset(); if (outputs.count == 0) { nextWrite = nextRead = rollIncr(nextRead); clearAttributes(); parse();
@Override public TokenStream create(TokenStream input) { // if the fst is null, it means there's actually no synonyms... just return the original stream // as there is nothing to do here. return map.fst == null ? input : new SynonymFilter(input, map, ignoreCase); }
restoreState(input.state); } else { outputs.posIncr = 0; } else { nextRead = rollIncr(nextRead); inputSkipCount--; final int posIncr = outputs.posIncr; final CharsRef output = outputs.pullNext(); clearAttributes(); termAtt.copyBuffer(output.chars, output.offset, output.length); typeAtt.setType(TYPE_SYNONYM); nextRead = rollIncr(nextRead); inputSkipCount--; nextRead = rollIncr(nextRead); inputSkipCount--; futureInputs[nextRead].reset(); if (outputs.count == 0) { nextWrite = nextRead = rollIncr(nextRead); clearAttributes(); parse();
private void capture() { captureCount++; //System.out.println(" capture slot=" + nextWrite); final PendingInput input = futureInputs[nextWrite]; input.state = captureState(); input.consumed = false; input.term.copyChars(termAtt.buffer(), 0, termAtt.length()); nextWrite = rollIncr(nextWrite); // Buffer head should never catch up to tail: assert nextWrite != nextRead; }
@Override public TokenStream create(TokenStream tokenStream) { // fst is null means no synonyms return synonymMap.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonymMap, ignoreCase); } }
private void capture() { captureCount++; //System.out.println(" capture slot=" + nextWrite); final PendingInput input = futureInputs[nextWrite]; input.state = captureState(); input.consumed = false; input.term.copyChars(termAtt.buffer(), 0, termAtt.length()); nextWrite = rollIncr(nextWrite); // Buffer head should never catch up to tail: assert nextWrite != nextRead; }
@Override public TokenStream create(TokenStream tokenStream) { return synonyms.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonyms, false); } };
@Override public TokenStream create(TokenStream tokenStream) { // fst is null means no synonyms return synonymMap.fst == null ? tokenStream : new SynonymFilter(tokenStream, synonymMap, ignoreCase); }
@Override public TokenStream create(TokenStream input) { // if the fst is null, it means there's actually no synonyms... just return the original stream // as there is nothing to do here. return map.fst == null ? input : new SynonymFilter(input, map, ignoreCase); }
@Override public TokenStream create(TokenStream input) { return map.fst == null ? input : new SynonymFilter(input, map,ignoreCase); }
@Override protected TokenStreamComponents createComponents(String fieldName) { Tokenizer tokenizer = new WhitespaceTokenizer(); TokenStream result = new PatternReplaceFilter(tokenizer, Pattern.compile("^([\\.!\\?,:;\"'\\(\\)]*)(.*?)([\\.!\\?,:;\"'\\(\\)]*)$"), "$2", true); result = new PatternReplaceFilter(result, Pattern.compile("'s"), "s", true); result = new BolEolFilter(result); result = new SynonymFilter(result, map, true); // result = new StopFilter(result, LuceneUtils.caseSensitiveStopSet); result = new Lucene43StopFilter(false, result, LuceneUtils.caseSensitiveStopSet); result = new LowerCaseFilter(result); result = new ASCIIFoldingFilter(result); return new TokenStreamComponents(tokenizer, result); }