/** returns an address instance for variable-length binary values. */ private synchronized MonotonicBlockPackedReader getAddressInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, bytes.count+1, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return addresses; }
/** returns an address instance for sortedset ordinal lists */ private synchronized MonotonicBlockPackedReader getOrdIndexInstance(FieldInfo field, NumericEntry entry) throws IOException { MonotonicBlockPackedReader instance = ordIndexInstances.get(field.name); if (instance == null) { data.seek(entry.offset); instance = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.count+1, false); if (!merging) { ordIndexInstances.put(field.name, instance); ramBytesUsed.addAndGet(instance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return instance; }
/** returns an address instance for variable-length binary values. */ private synchronized MonotonicBlockPackedReader getAddressInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, bytes.count+1, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return addresses; }
/** returns an address instance for prefix-compressed binary values. */ private synchronized MonotonicBlockPackedReader getIntervalInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); final long size = (bytes.count + INTERVAL_MASK) >>> INTERVAL_SHIFT; addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + Integer.BYTES); } } return addresses; }
/** returns an address instance for sortedset ordinal lists */ private synchronized MonotonicBlockPackedReader getOrdIndexInstance(FieldInfo field, NumericEntry entry) throws IOException { MonotonicBlockPackedReader instance = ordIndexInstances.get(field.name); if (instance == null) { data.seek(entry.offset); instance = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.count+1, false); if (!merging) { ordIndexInstances.put(field.name, instance); ramBytesUsed.addAndGet(instance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return instance; }
/** returns an address instance for prefix-compressed binary values. */ private synchronized MonotonicBlockPackedReader getIntervalInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); final long size = (bytes.count + INTERVAL_MASK) >>> INTERVAL_SHIFT; addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return addresses; }
/** returns an address instance for prefix-compressed binary values. */ private synchronized MonotonicBlockPackedReader getIntervalInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); final long size = (bytes.count + INTERVAL_MASK) >>> INTERVAL_SHIFT; addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return addresses; }
/** returns an address instance for prefix-compressed binary values. */ private synchronized MonotonicBlockPackedReader getIntervalInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); final long size = (bytes.count + INTERVAL_MASK) >>> INTERVAL_SHIFT; addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return addresses; }
/** returns an address instance for variable-length binary values. */ private synchronized MonotonicBlockPackedReader getAddressInstance(IndexInput data, FieldInfo field, BinaryEntry bytes) throws IOException { final MonotonicBlockPackedReader addresses; MonotonicBlockPackedReader addrInstance = addressInstances.get(field.name); if (addrInstance == null) { data.seek(bytes.addressesOffset); addrInstance = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, bytes.count+1, false); if (!merging) { addressInstances.put(field.name, addrInstance); ramBytesUsed.addAndGet(addrInstance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } addresses = addrInstance; return addresses; }
/** returns an address instance for prefix-compressed binary values. */ private synchronized MonotonicBlockPackedReader getIntervalInstance(FieldInfo field, BinaryEntry bytes) throws IOException { MonotonicBlockPackedReader addresses = addressInstances.get(field.name); if (addresses == null) { data.seek(bytes.addressesOffset); final long size = (bytes.count + INTERVAL_MASK) >>> INTERVAL_SHIFT; addresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); if (!merging) { addressInstances.put(field.name, addresses); ramBytesUsed.addAndGet(addresses.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } return addresses; }
/** returns an address instance for variable-length binary values. * @lucene.internal */ protected synchronized MonotonicBlockPackedReader getAddressInstance(IndexInput data, FieldInfo field, BinaryEntry bytes) throws IOException { final MonotonicBlockPackedReader addresses; MonotonicBlockPackedReader addrInstance = addressInstances.get(field.number); if (addrInstance == null) { data.seek(bytes.addressesOffset); addrInstance = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, bytes.count, false); if (!merging) { addressInstances.put(field.number, addrInstance); ramBytesUsed.addAndGet(addrInstance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } addresses = addrInstance; return addresses; }
/** returns an address instance for sortedset ordinal lists */ private synchronized MonotonicBlockPackedReader getOrdIndexInstance(IndexInput data, FieldInfo field, NumericEntry entry) throws IOException { final MonotonicBlockPackedReader ordIndex; MonotonicBlockPackedReader ordIndexInstance = ordIndexInstances.get(field.name); if (ordIndexInstance == null) { data.seek(entry.offset); ordIndexInstance = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.count+1, false); if (!merging) { ordIndexInstances.put(field.name, ordIndexInstance); ramBytesUsed.addAndGet(ordIndexInstance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } ordIndex = ordIndexInstance; return ordIndex; }
/** returns an address instance for sortedset ordinal lists * @lucene.internal */ protected synchronized MonotonicBlockPackedReader getOrdIndexInstance(IndexInput data, FieldInfo field, NumericEntry entry) throws IOException { final MonotonicBlockPackedReader ordIndex; MonotonicBlockPackedReader ordIndexInstance = ordIndexInstances.get(field.number); if (ordIndexInstance == null) { data.seek(entry.offset); ordIndexInstance = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, entry.count, false); if (!merging) { ordIndexInstances.put(field.number, ordIndexInstance); ramBytesUsed.addAndGet(ordIndexInstance.ramBytesUsed() + RamUsageEstimator.NUM_BYTES_INT); } } ordIndex = ordIndexInstance; return ordIndex; }
public FieldIndexData(IndexInput in, PagedBytes termBytes, long indexStart, long termsStart, long packedIndexStart, long packedOffsetsStart, long numIndexTerms) throws IOException { this.termsStart = termsStart; termBytesStart = termBytes.getPointer(); IndexInput clone = in.clone(); clone.seek(indexStart); this.numIndexTerms = numIndexTerms; assert this.numIndexTerms > 0: "numIndexTerms=" + numIndexTerms; // slurp in the images from disk: try { final long numTermBytes = packedIndexStart - indexStart; termBytes.copy(clone, numTermBytes); // records offsets into main terms dict file termsDictOffsets = MonotonicBlockPackedReader.of(clone, packedIntsVersion, blocksize, numIndexTerms, false); // records offsets into byte[] term data termOffsets = MonotonicBlockPackedReader.of(clone, packedIntsVersion, blocksize, 1+numIndexTerms, false); } finally { clone.close(); } }
/** returns a reverse lookup instance for prefix-compressed binary values. */ private synchronized ReverseTermsIndex getReverseIndexInstance(FieldInfo field, BinaryEntry bytes) throws IOException { ReverseTermsIndex index = reverseIndexInstances.get(field.name); if (index == null) { index = new ReverseTermsIndex(); data.seek(bytes.reverseIndexOffset); long size = (bytes.count + REVERSE_INTERVAL_MASK) >>> REVERSE_INTERVAL_SHIFT; index.termAddresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); long dataSize = data.readVLong(); PagedBytes pagedBytes = new PagedBytes(15); pagedBytes.copy(data, dataSize); index.terms = pagedBytes.freeze(true); if (!merging) { reverseIndexInstances.put(field.name, index); ramBytesUsed.addAndGet(index.ramBytesUsed()); } } return index; }
/** returns a reverse lookup instance for prefix-compressed binary values. */ private synchronized ReverseTermsIndex getReverseIndexInstance(FieldInfo field, BinaryEntry bytes) throws IOException { ReverseTermsIndex index = reverseIndexInstances.get(field.name); if (index == null) { index = new ReverseTermsIndex(); data.seek(bytes.reverseIndexOffset); long size = (bytes.count + REVERSE_INTERVAL_MASK) >>> REVERSE_INTERVAL_SHIFT; index.termAddresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); long dataSize = data.readVLong(); PagedBytes pagedBytes = new PagedBytes(15); pagedBytes.copy(data, dataSize); index.terms = pagedBytes.freeze(true); if (!merging) { reverseIndexInstances.put(field.name, index); ramBytesUsed.addAndGet(index.ramBytesUsed()); } } return index; }
/** returns a reverse lookup instance for prefix-compressed binary values. */ private synchronized ReverseTermsIndex getReverseIndexInstance(FieldInfo field, BinaryEntry bytes) throws IOException { ReverseTermsIndex index = reverseIndexInstances.get(field.name); if (index == null) { index = new ReverseTermsIndex(); data.seek(bytes.reverseIndexOffset); long size = (bytes.count + REVERSE_INTERVAL_MASK) >>> REVERSE_INTERVAL_SHIFT; index.termAddresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); long dataSize = data.readVLong(); PagedBytes pagedBytes = new PagedBytes(15); pagedBytes.copy(data, dataSize); index.terms = pagedBytes.freeze(true); if (!merging) { reverseIndexInstances.put(field.name, index); ramBytesUsed.addAndGet(index.ramBytesUsed()); } } return index; }
/** returns a reverse lookup instance for prefix-compressed binary values. */ private synchronized ReverseTermsIndex getReverseIndexInstance(FieldInfo field, BinaryEntry bytes) throws IOException { ReverseTermsIndex index = reverseIndexInstances.get(field.name); if (index == null) { index = new ReverseTermsIndex(); data.seek(bytes.reverseIndexOffset); long size = (bytes.count + REVERSE_INTERVAL_MASK) >>> REVERSE_INTERVAL_SHIFT; index.termAddresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); long dataSize = data.readVLong(); PagedBytes pagedBytes = new PagedBytes(15); pagedBytes.copy(data, dataSize); index.terms = pagedBytes.freeze(true); if (!merging) { reverseIndexInstances.put(field.name, index); ramBytesUsed.addAndGet(index.ramBytesUsed()); } } return index; }
/** returns a reverse lookup instance for prefix-compressed binary values. */ private synchronized ReverseTermsIndex getReverseIndexInstance(FieldInfo field, BinaryEntry bytes) throws IOException { ReverseTermsIndex index = reverseIndexInstances.get(field.name); if (index == null) { index = new ReverseTermsIndex(); data.seek(bytes.reverseIndexOffset); long size = (bytes.count + REVERSE_INTERVAL_MASK) >>> REVERSE_INTERVAL_SHIFT; index.termAddresses = MonotonicBlockPackedReader.of(data, bytes.packedIntsVersion, bytes.blockSize, size, false); long dataSize = data.readVLong(); PagedBytes pagedBytes = new PagedBytes(15); pagedBytes.copy(data, dataSize); index.terms = pagedBytes.freeze(true); if (!merging) { reverseIndexInstances.put(field.name, index); ramBytesUsed.addAndGet(index.ramBytesUsed()); } } return index; }
private BytesAndAddresses loadBinary(FieldInfo field) throws IOException { BytesAndAddresses bytesAndAddresses = new BytesAndAddresses(); BinaryEntry entry = binaries.get(field.name); IndexInput data = this.data.clone(); data.seek(entry.offset); PagedBytes bytes = new PagedBytes(16); bytes.copy(data, entry.numBytes); bytesAndAddresses.reader = bytes.freeze(true); if (!merging) { ramBytesUsed.addAndGet(bytesAndAddresses.reader.ramBytesUsed()); } if (entry.minLength != entry.maxLength) { data.seek(data.getFilePointer() + entry.missingBytes); bytesAndAddresses.addresses = MonotonicBlockPackedReader.of(data, entry.packedIntsVersion, entry.blockSize, maxDoc, false); if (!merging) { ramBytesUsed.addAndGet(bytesAndAddresses.addresses.ramBytesUsed()); } } return bytesAndAddresses; }