@Override public Iterator<TermPosition> iterator() { return iterator.reset(); }
@Override public void nextDoc() throws IOException { super.nextDoc(); ensureSize(freq); record(); }
private void record() throws IOException { TermPosition termPosition; for (int i = 0; i < freq; i++) { termPosition = super.next(); positions.setIntAt(i, termPosition.position); addPayload(i, termPosition.payload); startOffsets.setIntAt(i, termPosition.startOffset); endOffsets.setIntAt(i, termPosition.endOffset); } } private void ensureSize(int freq) {
public IndexFieldTerm(String term, String fieldName, LeafIndexLookup indexLookup, int flags) { assert fieldName != null; this.fieldName = fieldName; assert term != null; this.term = term; assert indexLookup != null; identifier = new Term(fieldName, (String) term); this.flags = flags; boolean doRecord = ((flags & IndexLookup.FLAG_CACHE) > 0); if (!doRecord) { iterator = new PositionIterator(this); } else { iterator = new CachedPositionIterator(this); } setReader(indexLookup.getReader()); setDocument(indexLookup.getDocId()); try { termStats = indexLookup.getIndexSearcher().termStatistics(identifier, TermContext.build(indexLookup.getReaderContext(), identifier)); } catch (IOException e) { throw new ElasticsearchException("Cannot get term statistics: ", e); } }
@Override public void nextDoc() throws IOException { super.nextDoc(); ensureSize(freq); record(); }
private void record() throws IOException { TermPosition termPosition; for (int i = 0; i < freq; i++) { termPosition = super.next(); positions.setIntAt(i, termPosition.position); addPayload(i, termPosition.payload); startOffsets.setIntAt(i, termPosition.startOffset); endOffsets.setIntAt(i, termPosition.endOffset); } } private void ensureSize(int freq) {
public IndexFieldTerm(String term, String fieldName, LeafIndexLookup indexLookup, int flags) { assert fieldName != null; this.fieldName = fieldName; assert term != null; this.term = term; assert indexLookup != null; identifier = new Term(fieldName, (String) term); this.flags = flags; boolean doRecord = ((flags & IndexLookup.FLAG_CACHE) > 0); if (!doRecord) { iterator = new PositionIterator(this); } else { iterator = new CachedPositionIterator(this); } setReader(indexLookup.getReader()); setDocument(indexLookup.getDocId()); try { termStats = indexLookup.getIndexSearcher().termStatistics(identifier, TermContext.build(indexLookup.getReaderContext(), identifier)); } catch (IOException e) { throw new ElasticsearchException("Cannot get term statistics: ", e); } }
public void setDocument(int docId) { assert (postings != null); try { // we try to advance to the current document. int currentDocPos = postings.docID(); if (currentDocPos < docId) { currentDocPos = postings.advance(docId); } if (currentDocPos == docId) { freq = postings.freq(); } else { freq = 0; } iterator.nextDoc(); } catch (IOException e) { throw new ElasticsearchException("While trying to initialize term positions in IndexFieldTerm.setNextDoc() ", e); } }
@Override public Iterator<TermPosition> iterator() { return iterator.reset(); }
public void setDocument(int docId) { assert (postings != null); try { // we try to advance to the current document. int currentDocPos = postings.docID(); if (currentDocPos < docId) { currentDocPos = postings.advance(docId); } if (currentDocPos == docId) { freq = postings.freq(); } else { freq = 0; } iterator.nextDoc(); } catch (IOException e) { throw new ElasticsearchException("While trying to initialize term positions in IndexFieldTerm.setNextDoc() ", e); } }