@Override public TokenStream create(final TokenStream input) { return new PositionAttributeFilter(input); }
public PositionAttributeFilter(final TokenStream input) { super(input); nodeAtt = this.addAttribute(NodeAttribute.class); posAtt = this.addAttribute(PositionAttribute.class); posIncrAtt = this.addAttribute(PositionIncrementAttribute.class); }
@Override protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) { final Version matchVersion = Version.LUCENE_4_9; final ConciseJsonTokenizer src = new ConciseJsonTokenizer(reader); TokenStream tok = new DatatypeAnalyzerFilter(src, new StandardAnalyzer(matchVersion), new StandardAnalyzer(matchVersion)); // The PathEncodingFilter is mandatory only for the ConciseJsonTokenizer PathEncodingFilter pathFilter = new PathEncodingFilter(tok); // here we tell the path filter to preserve the original tokens, // it will index the value with and without prepending the path pathFilter.setPreserveOriginal(true); // The PositionAttributeFilter and SirenPayloadFilter are mandatory // and must be always the last filters in your token stream tok = new PositionAttributeFilter(pathFilter); tok = new SirenPayloadFilter(tok); return new TokenStreamComponents(src, tok); }
@Override protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) { final ExtendedJsonTokenizer source = new ExtendedJsonTokenizer(reader); final DatatypeAnalyzerFilter tt = new DatatypeAnalyzerFilter(source, fieldAnalyzer, valueAnalyzer); for (final Entry<Object, Analyzer> e : regAnalyzers.entrySet()) { tt.register((char[]) e.getKey(), e.getValue()); } TokenStream sink = new PositionAttributeFilter(tt); sink = new SirenPayloadFilter(sink); return new TokenStreamComponents(source, sink); }
@Override protected TokenStreamComponents createComponents(String field, Reader reader) { final ExtendedJsonTokenizer source = new ExtendedJsonTokenizer(reader); final DatatypeAnalyzerFilter tt = new DatatypeAnalyzerFilter(source); for (final Entry<Object, Analyzer> e : indexAnalyzers.entrySet()) { tt.register((char[]) e.getKey(), e.getValue()); } TokenStream sink = new PositionAttributeFilter(tt); sink = new SirenPayloadFilter(sink); return new TokenStreamComponents(source, sink); }
@Override protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) { final TupleTokenizer source = new TupleTokenizer(reader); TokenStream sink = new TokenTypeFilter(matchVersion, source, new int[] {TupleTokenizer.BNODE, TupleTokenizer.DOT}); final DatatypeAnalyzerFilter tt = new DatatypeAnalyzerFilter(sink, anyURIAnalyzer, stringAnalyzer); for (final Entry<Object, Analyzer> e : regLitAnalyzers.entrySet()) { tt.register((char[]) e.getKey(), e.getValue()); } sink = new PositionAttributeFilter(tt); sink = new SirenPayloadFilter(sink); return new TokenStreamComponents(source, sink); }
@Override protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) { final ConciseJsonTokenizer source = new ConciseJsonTokenizer(reader); final DatatypeAnalyzerFilter tt = new DatatypeAnalyzerFilter(source, fieldAnalyzer, valueAnalyzer); for (final Entry<Object, Analyzer> e : regAnalyzers.entrySet()) { tt.register((char[]) e.getKey(), e.getValue()); } PathEncodingFilter pathEncodingFilter = new PathEncodingFilter(tt); pathEncodingFilter.setPreserveOriginal(this.generateTokensWithoutPath); TokenStream sink = new PositionAttributeFilter(pathEncodingFilter); sink = new SirenPayloadFilter(sink); return new TokenStreamComponents(source, sink); }
@Override protected TokenStreamComponents createComponents(String field, Reader reader) { final ConciseJsonTokenizer source = new ConciseJsonTokenizer(reader); final DatatypeAnalyzerFilter tt = new DatatypeAnalyzerFilter(source); for (final Entry<Object, Analyzer> e : indexAnalyzers.entrySet()) { tt.register((char[]) e.getKey(), e.getValue()); } PathEncodingFilter pathEncodingFilter = new PathEncodingFilter(tt); pathEncodingFilter.setPreserveOriginal(this.generateTokensWithoutPath); TokenStream sink = new PositionAttributeFilter(pathEncodingFilter); sink = new SirenPayloadFilter(sink); return new TokenStreamComponents(source, sink); }
@Override protected TokenStreamComponents createComponents(final String fieldName, final Reader reader) { final MockSirenReader mockReader = (MockSirenReader) reader; final MockSirenTokenizer tokenizer = new MockSirenTokenizer(mockReader); TokenStream sink = new PositionAttributeFilter(tokenizer); sink = new SirenPayloadFilter(sink); return new TokenStreamComponents(tokenizer, sink); }