public LeafIndexLookup getLeafIndexLookup(LeafReaderContext context) { return new LeafIndexLookup(context); }
public IndexReaderContext getReaderContext() { logDeprecation(); return getParentReader().getContext(); } }
public void setDocument(int docId) { if (this.docId == docId) { // if we are called with the same docId, // nothing to do return; } // We assume that docs are processed in ascending order of id. If this // is not the case, we would have to re initialize all posting lists in // IndexFieldTerm. TODO: Instead of assert we could also call // setReaderInFields(); here? if (this.docId > docId) { // This might happen if the same SearchLookup is used in different // phases, such as score and fetch phase. // In this case we do not want to re initialize posting list etc. // because we do not even know if term and field statistics will be // needed in this new phase. // Therefore we just remove all IndexFieldTerms. indexFields.clear(); } this.docId = docId; setNextDocIdInFields(); }
public IndexFieldTerm(String term, String fieldName, LeafIndexLookup indexLookup, int flags) { assert fieldName != null; this.fieldName = fieldName; assert term != null; this.term = term; assert indexLookup != null; identifier = new Term(fieldName, (String) term); this.flags = flags; boolean doRecord = ((flags & IndexLookup.FLAG_CACHE) > 0); if (!doRecord) { iterator = new PositionIterator(this); } else { iterator = new CachedPositionIterator(this); } setReader(indexLookup.getReader()); setDocument(indexLookup.getDocId()); try { termStats = indexLookup.getIndexSearcher().termStatistics(identifier, TermContext.build(indexLookup.getReaderContext(), identifier)); } catch (IOException e) { throw new ElasticsearchException("Cannot get term statistics: ", e); } }
public IndexReader getParentReader() { logDeprecation(); if (parentReader == null) { return reader; } return parentReader; }
public void setDocument(int docId) { docMap.setDocument(docId); sourceLookup.setSegmentAndDocument(ctx, docId); fieldsLookup.setDocument(docId); indexLookup.setDocument(docId); } }
public IndexField(String fieldName, LeafIndexLookup indexLookup) throws IOException { assert fieldName != null; this.fieldName = fieldName; assert indexLookup != null; this.indexLookup = indexLookup; fieldStats = this.indexLookup.getIndexSearcher().collectionStatistics(fieldName); }
public IndexReaderContext getReaderContext() { return getParentReader().getContext(); } }
float score = 0; IndexField indexField = this.indexLookup().get(field); double docVectorNorm = 0.0f; for (int i = 0; i < index.size(); i++) {
public IndexFieldTerm(String term, String fieldName, LeafIndexLookup indexLookup, int flags) { assert fieldName != null; this.fieldName = fieldName; assert term != null; this.term = term; assert indexLookup != null; identifier = new Term(fieldName, (String) term); this.flags = flags; boolean doRecord = ((flags & IndexLookup.FLAG_CACHE) > 0); if (!doRecord) { iterator = new PositionIterator(this); } else { iterator = new CachedPositionIterator(this); } setReader(indexLookup.getReader()); setDocument(indexLookup.getDocId()); try { termStats = indexLookup.getIndexSearcher().termStatistics(identifier, TermContext.build(indexLookup.getReaderContext(), identifier)); } catch (IOException e) { throw new ElasticsearchException("Cannot get term statistics: ", e); } }
public IndexSearcher getIndexSearcher() { logDeprecation(); return indexSearcher; }
public void setDocument(int docId) { docMap.setDocument(docId); sourceLookup.setSegmentAndDocument(ctx, docId); fieldsLookup.setDocument(docId); indexLookup.setDocument(docId); } }
public IndexField(String fieldName, LeafIndexLookup indexLookup) throws IOException { assert fieldName != null; this.fieldName = fieldName; assert indexLookup != null; this.indexLookup = indexLookup; fieldStats = this.indexLookup.getIndexSearcher().collectionStatistics(fieldName); }
LeafReader getReader() { logDeprecation(); return reader; }
public static LeafIndexLookup getLeafIndexLookup(LeafReaderContext context) { return new LeafIndexLookup(context); }
public void setDocument(int docId) { if (this.docId == docId) { // if we are called with the same docId, // nothing to do return; } // We assume that docs are processed in ascending order of id. If this // is not the case, we would have to re initialize all posting lists in // IndexFieldTerm. TODO: Instead of assert we could also call // setReaderInFields(); here? if (this.docId > docId) { // This might happen if the same SearchLookup is used in different // phases, such as score and fetch phase. // In this case we do not want to re initialize posting list etc. // because we do not even know if term and field statistics will be // needed in this new phase. // Therefore we just remove all IndexFieldTerms. indexFields.clear(); } this.docId = docId; setNextDocIdInFields(); }
public int getDocId() { logDeprecation(); return docId; }
public Fields termVectors() throws IOException { logDeprecation(); assert reader != null; return reader.getTermVectors(docId); }
public int maxDoc() { logDeprecation(); if (maxDoc == -1) { maxDoc = parentReader.maxDoc(); } return maxDoc; }
public int numDocs() { logDeprecation(); if (numDocs == -1) { numDocs = parentReader.numDocs(); } return numDocs; }