Refine search
@Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { LeafReader reader = context.reader(); PointValues values = reader.getPointValues(field); if (values == null) { FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); if (fieldInfo == null) { DocIdSetBuilder result = new DocIdSetBuilder(reader.maxDoc(), values, field); final IntersectVisitor visitor = getIntersectVisitor(result);
DocValuesProducer baseProducer = null; for (FieldInfo fi : allInfos) { if (fi.getDocValuesType() == DocValuesType.NONE) { continue; long docValuesGen = fi.getDocValuesGen(); if (docValuesGen == -1) { if (baseProducer == null) { assert !dvGens.contains(docValuesGen); final DocValuesProducer dvp = segDocValues.getDocValuesProducer(docValuesGen, si, dir, new FieldInfos(new FieldInfo[]{fi})); dvGens.add(docValuesGen); dvProducers.add(dvp);
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene50FieldInfosFormat.CODEC_NAME, Lucene50FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency(); output.writeString(fi.name); output.writeVInt(fi.number); byte bits = 0x0; if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; output.writeByte(bits); output.writeByte(indexOptionsByte(fi.getIndexOptions())); // pack the DV type and hasNorms in one byte output.writeByte(docValuesByte(fi.getDocValuesType())); output.writeLong(fi.getDocValuesGen()); output.writeMapOfStrings(fi.attributes()); } CodecUtil.writeFooter(output); } }
docOut = state.directory.createOutput(docFileName, state.context); IndexOutput posOut = null; IndexOutput payOut = null; state.segmentInfo.getId(), state.segmentSuffix); forUtil = new ForUtil(acceptableOverheadRatio, docOut); if (state.fieldInfos.hasProx()) { posDeltaBuffer = new int[MAX_DATA_SIZE]; String posFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.POS_EXTENSION); posOut = state.directory.createOutput(posFileName, state.context); CodecUtil.writeIndexHeader(posOut, POS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); if (state.fieldInfos.hasPayloads()) { payloadBytes = new byte[128]; payloadLengthBuffer = new int[MAX_DATA_SIZE]; if (state.fieldInfos.hasOffsets()) { offsetStartDeltaBuffer = new int[MAX_DATA_SIZE]; offsetLengthBuffer = new int[MAX_DATA_SIZE]; if (state.fieldInfos.hasPayloads() || state.fieldInfos.hasOffsets()) { String payFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.PAY_EXTENSION); payOut = state.directory.createOutput(payFileName, state.context); CodecUtil.writeIndexHeader(payOut, PAY_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix);
/** * @return the {@code NumericDocValues} for a given field * @throws IllegalArgumentException if this field is not indexed with numeric doc values */ public NumericDocValues readDocValues( String field ) { try { NumericDocValues dv = context.reader().getNumericDocValues( field ); if ( dv == null ) { FieldInfo fi = context.reader().getFieldInfos().fieldInfo( field ); DocValuesType actual = null; if ( fi != null ) { actual = fi.getDocValuesType(); } throw new IllegalStateException( "The field '" + field + "' is not indexed properly, expected NumericDV, but got '" + actual + "'" ); } return dv; } catch ( IOException e ) { throw new RuntimeException( e ); } } }
@Override public Scorer scorer(LeafReaderContext context) throws IOException { FieldInfos fieldInfos = context.reader().getFieldInfos(); FieldInfo fieldInfo = fieldInfos.fieldInfo(field); if (fieldInfo == null || fieldInfo.hasNorms() == false) { return null; } LeafReader reader = context.reader(); DocIdSetIterator iterator = reader.getNormValues(field); return new ConstantScoreScorer(this, score(), iterator); }
FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); final DocIdSetIterator iterator; if (fieldInfo != null) { switch (fieldInfo.getDocValuesType()) { case NONE: iterator = null; break; case NUMERIC: iterator = reader.getNumericDocValues(field); break; case BINARY: iterator = reader.getBinaryDocValues(field); break; case SORTED:
return null; } else if (size == 1) { return leaves.get(0).reader().getBinaryDocValues(field); FieldInfo fieldInfo = leaf.reader().getFieldInfos().fieldInfo(field); if (fieldInfo != null) { DocValuesType dvType = fieldInfo.getDocValuesType(); if (dvType == DocValuesType.BINARY) { anyReal = true;
@Override public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, EXTENSION); try (IndexOutput output = directory.createOutput(fileName, context)) { CodecUtil.writeIndexHeader(output, Lucene60FieldInfosFormat.CODEC_NAME, Lucene60FieldInfosFormat.FORMAT_CURRENT, segmentInfo.getId(), segmentSuffix); output.writeVInt(infos.size()); for (FieldInfo fi : infos) { fi.checkConsistency(); if (fi.hasVectors()) bits |= STORE_TERMVECTOR; if (fi.omitsNorms()) bits |= OMIT_NORMS; if (fi.hasPayloads()) bits |= STORE_PAYLOADS; if (fi.isSoftDeletesField()) bits |= SOFT_DELETES_FIELD;
/** * Returns {@code true} if the specified docvalues fields have not been updated */ public static boolean isCacheable(LeafReaderContext ctx, String... fields) { for (String field : fields) { FieldInfo fi = ctx.reader().getFieldInfos().fieldInfo(field); if (fi != null && fi.getDocValuesGen() > -1) return false; } return true; } }
@Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { LeafReader reader = context.reader(); FieldInfo info = reader.getFieldInfos().fieldInfo(field); if (info != null) { LatLonDocValuesField.checkCompatible(info); } currentDocs = DocValues.getSortedNumeric(reader, field); valuesDocID = -1; return this; }
for (int i = 0; i < size; i++) { LeafReaderContext context = in.leaves().get(i); final LeafReader reader = context.reader(); final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); if (fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.SORTED) { return null; SortedDocValues v = reader.getSortedDocValues(field); if (v == null) { v = DocValues.emptySorted();
for (int i = 0; i < size; i++) { LeafReaderContext context = in.leaves().get(i); final LeafReader reader = context.reader(); final FieldInfo fieldInfo = reader.getFieldInfos().fieldInfo(field); if(fieldInfo != null && fieldInfo.getDocValuesType() != DocValuesType.SORTED_SET){ return null; SortedSetDocValues v = reader.getSortedSetDocValues(field); if (v == null) { v = DocValues.emptySortedSet();
private static void checkField(LeafReader in, String field, DocValuesType... expected) { FieldInfo fi = in.getFieldInfos().fieldInfo(field); if (fi != null) { DocValuesType actual = fi.getDocValuesType(); throw new IllegalStateException("unexpected docvalues type " + actual + " for field '" + field + "' " + (expected.length == 1 ? "(expected=" + expected[0] : "(expected one of " + Arrays.toString(expected)) + "). " + "Re-index with correct docvalues type."); } }
docIn = state.directory.openInput(docName, state.context); version = CodecUtil.checkIndexHeader(docIn, DOC_CODEC, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); forUtil = new ForUtil(docIn); CodecUtil.retrieveChecksum(docIn); if (state.fieldInfos.hasProx()) { String proxName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.POS_EXTENSION); posIn = state.directory.openInput(proxName, state.context); CodecUtil.checkIndexHeader(posIn, POS_CODEC, version, version, state.segmentInfo.getId(), state.segmentSuffix); CodecUtil.retrieveChecksum(posIn); if (state.fieldInfos.hasPayloads() || state.fieldInfos.hasOffsets()) { String payName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, Lucene50PostingsFormat.PAY_EXTENSION); payIn = state.directory.openInput(payName, state.context); CodecUtil.checkIndexHeader(payIn, PAY_CODEC, version, version, state.segmentInfo.getId(), state.segmentSuffix); CodecUtil.retrieveChecksum(payIn);
@Override public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException { ensureOpen(); FieldInfo fi = getFieldInfos().fieldInfo(field); if (fi == null) { // Field does not exist return null; } if (fi.getDocValuesType() != DocValuesType.SORTED_NUMERIC) { // Field was not indexed with doc values return null; } return docValues.getSortedNumeric(fi); }
@Override public Terms terms(String field) throws IOException { Terms terms = super.terms(field); return terms==null ? null : new SortingTerms(terms, in.getFieldInfos().fieldInfo(field).getIndexOptions(), docMap); }
termsIn = state.directory.openInput(termsName, state.context); version = CodecUtil.checkIndexHeader(termsIn, TERMS_CODEC_NAME, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); indexIn = state.directory.openInput(indexName, state.context); CodecUtil.checkIndexHeader(indexIn, TERMS_INDEX_CODEC_NAME, version, version, state.segmentInfo.getId(), state.segmentSuffix); CodecUtil.checksumEntireFile(indexIn); final FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field); if (fieldInfo == null) { throw new CorruptIndexException("invalid field number: " + field, termsIn); final long sumTotalTermFreq = fieldInfo.getIndexOptions() == IndexOptions.DOCS ? -1 : termsIn.readVLong(); final long sumDocFreq = termsIn.readVLong(); final int docCount = termsIn.readVInt();
new ArrayList(COMPOUND_EXTENSIONS.length + fieldInfos.size()); for (int i = 0; i < fieldInfos.size(); i++) { FieldInfo fi = fieldInfos.fieldInfo(i); if (fi.isIndexed) { files.add(segment + ".f" + i); if (fieldInfos.hasVectors()) { for (int i = 0; i < VECTOR_EXTENSIONS.length; i++) { files.add(segment + "." + VECTOR_EXTENSIONS[i]); directory.deleteFile((String) it.next());