@Override public TokenStream create(TokenStream tokenStream) { return new ICUFoldingFilter(tokenStream); } }));
@Override public TokenStream create(TokenStream tokenStream) { return new ICUTransformFilter(tokenStream, transliterator); } }
@Override public TokenStream create(TokenStream input) { return new ICUNormalizer2Filter(input, normalizer); }
@Override public Reader create(Reader reader) { return new ICUNormalizer2CharFilter(reader, normalizer); } }
private void recordOffsetDiff(int inputLength, int outputLength) { if (inputLength == outputLength) { charCount += outputLength; return; } final int diff = inputLength - outputLength; final int cumuDiff = getLastCumulativeDiff(); if (diff < 0) { for (int i = 1; i <= -diff; ++i) { addOffCorrectMap(charCount + i, cumuDiff - i); } } else { addOffCorrectMap(charCount + outputLength, cumuDiff + diff); } charCount += outputLength; }
@Override public TokenStream create(TokenStream tokenStream) { return new ICUTransformFilter(tokenStream, transliterator); } }
@Override public TokenStream create(TokenStream tokenStream) { return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, Normalizer2.getInstance(null, "nfkc_cf", Normalizer2.Mode.COMPOSE)); } }));
@Override public Reader create(Reader reader) { return new ICUNormalizer2CharFilter(reader); } }));
@Override public TokenStream create(TokenStream tokenStream) { return new ICUFoldingFilter(tokenStream); } }));
@Override public TokenStream create(TokenStream input) { return new ICUTransformFilter(input, transliterator); }
@Override public TokenStream create(TokenStream tokenStream) { return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, Normalizer2.getInstance(null, "nfkc_cf", Normalizer2.Mode.COMPOSE)); } }));
@Override public Reader create(Reader reader) { return new ICUNormalizer2CharFilter(reader, normalizer); } }
@Override public TokenStream create(TokenStream input) { return new ICUFoldingFilter(input, normalizer); }
@Override public TokenStream create(TokenStream tokenStream) { return new ICUTransformFilter(tokenStream, Transliterator.getInstance("Null", Transliterator.FORWARD)); } }));
@Override public TokenStream create(TokenStream tokenStream) { return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, Normalizer2.getInstance(null, name, Normalizer2.Mode.COMPOSE)); } }
@Override public Reader create(Reader input) { return new ICUNormalizer2CharFilter(input, normalizer); }
@Override public TokenStream create(TokenStream tokenStream) { return new ICUTransformFilter(tokenStream, Transliterator.getInstance("Null", Transliterator.FORWARD)); } }));
@Override public TokenStream create(TokenStream tokenStream) { return new org.apache.lucene.analysis.icu.ICUNormalizer2Filter(tokenStream, Normalizer2.getInstance(null, name, Normalizer2.Mode.COMPOSE)); } }