/** * Compute the number of bits required to serialize any of the longs in * <code>data</code>. */ private static int bitsRequired(final int[] data) { long or = 0; for (int i = 0; i < BLOCK_SIZE; ++i) { assert data[i] >= 0; or |= data[i]; } return PackedInts.bitsRequired(or); }
@Override public void encode(int[] values, int valuesOffset, byte[] blocks, int blocksOffset, int iterations) { int nextBlock = 0; int bitsLeft = 8; for (int i = 0; i < byteValueCount * iterations; ++i) { final int v = values[valuesOffset++]; assert PackedInts.bitsRequired(v & 0xFFFFFFFFL) <= bitsPerValue; if (bitsPerValue < bitsLeft) { // just buffer nextBlock |= v << (bitsLeft - bitsPerValue); bitsLeft -= bitsPerValue; } else { // flush as many blocks as possible int bits = bitsPerValue - bitsLeft; blocks[blocksOffset++] = (byte) (nextBlock | (v >>> bits)); while (bits >= 8) { bits -= 8; blocks[blocksOffset++] = (byte) (v >>> bits); } // then buffer bitsLeft = 8 - bits; nextBlock = (v & ((1 << bits) - 1)) << bitsLeft; } } assert bitsLeft == 8; }
protected DocValuesFieldUpdates(int maxDoc, long delGen, String field, DocValuesType type) { this.maxDoc = maxDoc; this.delGen = delGen; this.field = field; if (type == null) { throw new NullPointerException("DocValuesType must not be null"); } this.type = type; bitsPerValue = PackedInts.bitsRequired(maxDoc - 1) + SHIFT; docs = new PagedMutable(1, PAGE_SIZE, bitsPerValue, PackedInts.COMPACT); }
/** * Returns how many bits are required to hold values up * to and including maxValue * * @param maxValue the maximum value that should be representable. * @return the amount of bits needed to represent values from 0 to maxValue. * @see PackedInts#bitsRequired(long) */ public static int bitsRequired(long maxValue) { return roundBits(PackedInts.bitsRequired(maxValue)); }
final int bitsPerDocId = PackedInts.bitsRequired(maxDoc - 1); new MSBRadixSorter(packedBytesLength + (bitsPerDocId + 7) / 8) {
final int offset = splitDim * bytesPerDim + commonPrefixLen; final int cmpBytes = bytesPerDim - commonPrefixLen; final int bitsPerDocId = PackedInts.bitsRequired(maxDoc - 1); new RadixSelector(cmpBytes + (bitsPerDocId + 7) / 8) {
private void flushFields(int totalFields, int[] fieldNums) throws IOException { final PackedInts.Writer writer = PackedInts.getWriterNoHeader(vectorsStream, PackedInts.Format.PACKED, totalFields, PackedInts.bitsRequired(fieldNums.length - 1), 1); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { final int fieldNumIndex = Arrays.binarySearch(fieldNums, fd.fieldNum); assert fieldNumIndex >= 0; writer.add(fieldNumIndex); } } writer.finish(); }
void pack(long[] values, int numValues, int block, float acceptableOverheadRatio) { assert numValues > 0; // compute max delta long minValue = values[0]; long maxValue = values[0]; for (int i = 1; i < numValues; ++i) { minValue = Math.min(minValue, values[i]); maxValue = Math.max(maxValue, values[i]); } // build a new packed reader if (minValue == 0 && maxValue == 0) { this.values[block] = new PackedInts.NullReader(numValues); } else { final int bitsRequired = minValue < 0 ? 64 : PackedInts.bitsRequired(maxValue); final PackedInts.Mutable mutable = PackedInts.getMutable(numValues, bitsRequired, acceptableOverheadRatio); for (int i = 0; i < numValues; ) { i += mutable.set(i, values, i, numValues - i); } this.values[block] = mutable; } }
void reset(int len) { final int bitsPerOffset = PackedInts.bitsRequired(len - LAST_LITERALS); final int bitsPerOffsetLog = 32 - Integer.numberOfLeadingZeros(bitsPerOffset - 1); hashLog = MEMORY_USAGE + 3 - bitsPerOffsetLog; if (hashTable == null || hashTable.size() < 1 << hashLog || hashTable.getBitsPerValue() < bitsPerOffset) { hashTable = PackedInts.getMutable(1 << hashLog, bitsPerOffset, PackedInts.DEFAULT); } else { hashTable.clear(); } }
/** Returns a sorted array containing unique field numbers */ private int[] flushFieldNums() throws IOException { SortedSet<Integer> fieldNums = new TreeSet<>(); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { fieldNums.add(fd.fieldNum); } } final int numDistinctFields = fieldNums.size(); assert numDistinctFields > 0; final int bitsRequired = PackedInts.bitsRequired(fieldNums.last()); final int token = (Math.min(numDistinctFields - 1, 0x07) << 5) | bitsRequired; vectorsStream.writeByte((byte) token); if (numDistinctFields - 1 >= 0x07) { vectorsStream.writeVInt(numDistinctFields - 1 - 0x07); } final PackedInts.Writer writer = PackedInts.getWriterNoHeader(vectorsStream, PackedInts.Format.PACKED, fieldNums.size(), bitsRequired, 1); for (Integer fieldNum : fieldNums) { writer.add(fieldNum); } writer.finish(); int[] fns = new int[fieldNums.size()]; int i = 0; for (Integer key : fieldNums) { fns[i++] = key; } return fns; }
protected void flush() throws IOException { assert off > 0; final float avg = off == 1 ? 0f : (float) (values[off - 1] - values[0]) / (off - 1); long min = values[0]; // adjust min so that all deltas will be positive for (int i = 1; i < off; ++i) { final long actual = values[i]; final long expected = expected(min, avg, i); if (expected > actual) { min -= (expected - actual); } } long maxDelta = 0; for (int i = 0; i < off; ++i) { values[i] = values[i] - expected(min, avg, i); maxDelta = Math.max(maxDelta, values[i]); } out.writeZLong(min); out.writeInt(Float.floatToIntBits(avg)); if (maxDelta == 0) { out.writeVInt(0); } else { final int bitsRequired = PackedInts.bitsRequired(maxDelta); out.writeVInt(bitsRequired); writeValues(bitsRequired); } off = 0; }
private void rehash() throws IOException { final PagedGrowableWriter oldTable = table; table = new PagedGrowableWriter(2*oldTable.size(), 1<<30, PackedInts.bitsRequired(count), PackedInts.COMPACT); mask = table.size()-1; for(long idx=0;idx<oldTable.size();idx++) { final long address = oldTable.get(idx); if (address != 0) { addNew(address); } } } }
private void flushNumTerms(int totalFields) throws IOException { int maxNumTerms = 0; for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { maxNumTerms |= fd.numTerms; } } final int bitsRequired = PackedInts.bitsRequired(maxNumTerms); vectorsStream.writeVInt(bitsRequired); final PackedInts.Writer writer = PackedInts.getWriterNoHeader( vectorsStream, PackedInts.Format.PACKED, totalFields, bitsRequired, 1); for (DocData dd : pendingDocs) { for (FieldData fd : dd.fields) { writer.add(fd.numTerms); } } assert writer.ord() == totalFields - 1; writer.finish(); }
/** * Build a {@link DocIdSet} from the accumulated doc IDs. */ public DocIdSet build() { try { if (bitSet != null) { assert counter >= 0; final long cost = Math.round(counter / numValuesPerDoc); return new BitDocIdSet(bitSet, cost); } else { Buffer concatenated = concat(buffers); LSBRadixSorter sorter = new LSBRadixSorter(); sorter.sort(PackedInts.bitsRequired(maxDoc - 1), concatenated.array, concatenated.length); final int l; if (multivalued) { l = dedup(concatenated.array, concatenated.length); } else { assert noDups(concatenated.array, concatenated.length); l = concatenated.length; } assert l <= concatenated.length; concatenated.array[l] = DocIdSetIterator.NO_MORE_DOCS; return new IntArrayDocIdSet(concatenated.array, l); } } finally { this.buffers = null; this.bitSet = null; } }
final int bitsPerDocBase = PackedInts.bitsRequired(maxDelta); fieldsIndexOut.writeVInt(bitsPerDocBase); PackedInts.Writer writer = PackedInts.getWriterNoHeader(fieldsIndexOut, for (int i = 0; i < blockChunks; ++i) { final long delta = docBase - avgChunkDocs * i; assert PackedInts.bitsRequired(zigZagEncode(delta)) <= writer.bitsPerValue(); writer.add(zigZagEncode(delta)); docBase += docBaseDeltas[i]; final int bitsPerStartPointer = PackedInts.bitsRequired(maxDelta); fieldsIndexOut.writeVInt(bitsPerStartPointer); writer = PackedInts.getWriterNoHeader(fieldsIndexOut, PackedInts.Format.PACKED, startPointer += startPointerDeltas[i]; final long delta = startPointer - avgChunkSize * i; assert PackedInts.bitsRequired(zigZagEncode(delta)) <= writer.bitsPerValue(); writer.add(zigZagEncode(delta));
private static void saveInts(int[] values, int length, DataOutput out) throws IOException { assert length > 0; if (length == 1) { out.writeVInt(values[0]); } else { boolean allEqual = true; for (int i = 1; i < length; ++i) { if (values[i] != values[0]) { allEqual = false; break; } } if (allEqual) { out.writeVInt(0); out.writeVInt(values[0]); } else { long max = 0; for (int i = 0; i < length; ++i) { max |= values[i]; } final int bitsRequired = PackedInts.bitsRequired(max); out.writeVInt(bitsRequired); final PackedInts.Writer w = PackedInts.getWriterNoHeader(out, PackedInts.Format.PACKED, length, bitsRequired, 1); for (int i = 0; i < length; ++i) { w.add(values[i]); } w.finish(); } } }
final int h = hash(v, hashLog); ref = base + (int) hashTable.get(h); assert PackedInts.bitsRequired(off - base) <= hashTable.getBitsPerValue(); hashTable.set(h, off - base); if (off - ref < MAX_DISTANCE && readInt(bytes, ref) == v) {
public SinglePackedOrdinals(OrdinalsBuilder builder, float acceptableOverheadRatio) { assert builder.getNumMultiValuesDocs() == 0; this.valueCount = (int) builder.getValueCount(); // We don't reuse the builder as-is because it might have been built with a higher overhead ratio final PackedInts.Mutable reader = PackedInts.getMutable(builder.maxDoc(), PackedInts.bitsRequired(valueCount), acceptableOverheadRatio); PackedInts.copy(builder.getFirstOrdinals(), 0, reader, 0, builder.maxDoc(), 8 * 1024); this.reader = reader; }
final int bitsRequired = ordDeltaBits[i] < 0 ? 64 : PackedInts.bitsRequired(ordDeltaBits[i]); final long monotonicBits = deltas.ramBytesUsed() * 8; final long packedBits = bitsRequired * deltas.size();
final PackedInts.Reader flags; final int bitsPerOff = PackedInts.bitsRequired(fieldNums.length - 1); final PackedInts.Reader allFieldNumOffs = PackedInts.getReaderNoHeader(vectorsStream, PackedInts.Format.PACKED, packedIntsVersion, totalFields, bitsPerOff); switch (vectorsStream.readVInt()) {