private void rehash() throws IOException { final PagedGrowableWriter oldTable = table; table = new PagedGrowableWriter(2*oldTable.size(), 1<<30, PackedInts.bitsRequired(count), PackedInts.COMPACT); mask = table.size()-1; for(long idx=0;idx<oldTable.size();idx++) { final long address = oldTable.get(idx); if (address != 0) { addNew(address); } } } }
@Override protected PagedGrowableWriter newUnfilledCopy(long newSize) { return new PagedGrowableWriter(newSize, pageSize(), bitsPerValue, acceptableOverheadRatio, false); }
@Override public void merge(DocValuesFieldUpdates other) { BinaryDocValuesFieldUpdates otherUpdates = (BinaryDocValuesFieldUpdates) other; if (otherUpdates.size > Integer.MAX_VALUE - size) { throw new IllegalStateException( "cannot support more than Integer.MAX_VALUE doc/value entries; size=" + size + " other.size=" + otherUpdates.size); } final int newSize = size + otherUpdates.size; docs = docs.grow(newSize); offsets = offsets.grow(newSize); lengths = lengths.grow(newSize); for (int i = 0; i < otherUpdates.size; i++) { int doc = (int) otherUpdates.docs.get(i); docs.set(size, doc); offsets.set(size, values.length() + otherUpdates.offsets.get(i)); // correct relative offset lengths.set(size, otherUpdates.lengths.get(i)); ++size; } values.append(otherUpdates.values); }
private int firstLevel(int docID, long ordinal) { // 0 or 1 ordinal if (firstOrdinals.get(docID) == 0L) { firstOrdinals.set(docID, ordinal + 1); return 1; } else { final long newSlice = newSlice(1); if (firstNextLevelSlices == null) { firstNextLevelSlices = new PagedGrowableWriter(firstOrdinals.size(), PAGE_SIZE, 3, acceptableOverheadRatio); } firstNextLevelSlices.set(docID, newSlice); final long offset = startOffset(1, newSlice); ordinals[1].set(offset, ordinal + 1); positions.set(docID, position(1, offset)); // current position is on the 1st level and not allocated yet return 2; } }
public long add(Builder<T> builder, Builder.UnCompiledNode<T> nodeIn) throws IOException { //System.out.println("hash: add count=" + count + " vs " + table.size() + " mask=" + mask); final long h = hash(nodeIn); long pos = h & mask; int c = 0; while(true) { final long v = table.get(pos); if (v == 0) { // freeze & add final long node = fst.addNode(builder, nodeIn); //System.out.println(" now freeze node=" + node); assert hash(node) == h : "frozenHash=" + hash(node) + " vs h=" + h; count++; table.set(pos, node); // Rehash at 2/3 occupancy: if (count > 2*table.size()/3) { rehash(); } return node; } else if (nodesEqual(nodeIn, v)) { // same node is already here return v; } // quadratic probe pos = (pos + (++c)) & mask; } }
/** * Allocate a new slice and return its ID. */ private long newSlice(int level) { final long newSlice = sizes[level]++; // Lazily allocate ordinals if (ordinals[level] == null) { ordinals[level] = new PagedGrowableWriter(8L * numSlots(level), PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio); } else { ordinals[level] = ordinals[level].grow(sizes[level] * numSlots(level)); if (nextLevelSlices[level] != null) { nextLevelSlices[level] = nextLevelSlices[level].grow(sizes[level]); } } return newSlice; }
public int addOrdinal(int docID, long ordinal) { final long position = positions.get(docID); final long newSlice = newSlice(1); if (firstNextLevelSlices == null) { firstNextLevelSlices = new PagedGrowableWriter(firstOrdinals.size(), PAGE_SIZE, 3, acceptableOverheadRatio); firstNextLevelSlices.set(docID, newSlice); final long offset = startOffset(1, newSlice); ordinals[1].set(offset, ordinal + 1); positions.set(docID, position(1, offset)); // current position is on the 1st level and not allocated yet return 2; nextLevelSlices[level] = new PagedGrowableWriter(sizes[level], PAGE_SIZE, 1, acceptableOverheadRatio); nextLevelSlices[level].set(sliceID(level, offset), newSlice); ++level; offset = startOffset(level, newSlice); ordinals[level].set(offset, ordinal + 1); final long newPosition = position(level, offset); positions.set(docID, newPosition); return numOrdinals(level, offset);
public NodeHash(FST<T> fst, FST.BytesReader in) { table = new PagedGrowableWriter(16, 1<<27, 8, PackedInts.COMPACT); mask = 15; this.fst = fst; this.in = in; }
@Override public void add(int doc, Object value) { // TODO: if the Sorter interface changes to take long indexes, we can remove that limitation if (size == Integer.MAX_VALUE) { throw new IllegalStateException("cannot support more than Integer.MAX_VALUE doc/value entries"); } BytesRef val = (BytesRef) value; // grow the structures to have room for more elements if (docs.size() == size) { docs = docs.grow(size + 1); offsets = offsets.grow(size + 1); lengths = lengths.grow(size + 1); } docs.set(size, doc); offsets.set(size, values.length()); lengths.set(size, val.length); values.append(val); ++size; }
@Override synchronized void add(int doc, long value) { int add = add(doc); values.set(add, value); }
@Override public long ramBytesUsed() { return values.ramBytesUsed() + super.ramBytesUsed() + Long.BYTES + RamUsageEstimator.NUM_BYTES_OBJECT_REF; } }
PagedGrowableWriter(long size, int pageSize,int startBitsPerValue, float acceptableOverheadRatio, boolean fillPages) { super(startBitsPerValue, size, pageSize); this.acceptableOverheadRatio = acceptableOverheadRatio; if (fillPages) { fillPages(); } }
@Override public void merge(DocValuesFieldUpdates other) { assert other instanceof NumericDocValuesFieldUpdates; NumericDocValuesFieldUpdates otherUpdates = (NumericDocValuesFieldUpdates) other; if (otherUpdates.size > Integer.MAX_VALUE - size) { throw new IllegalStateException( "cannot support more than Integer.MAX_VALUE doc/value entries; size=" + size + " other.size=" + otherUpdates.size); } docs = docs.grow(size + otherUpdates.size); values = values.grow(size + otherUpdates.size); for (int i = 0; i < otherUpdates.size; i++) { int doc = (int) otherUpdates.docs.get(i); docs.set(size, doc); values.set(size, otherUpdates.values.get(i)); ++size; } }
private int nonFirstLevel(int docID, long ordinal, long position) { int level = level(position); long offset = offset(position, level); assert offset != 0L; if (((offset + 1) & slotsMask(level)) == 0L) { // reached the end of the slice, allocate a new one on the next level final long newSlice = newSlice(level + 1); if (nextLevelSlices[level] == null) { nextLevelSlices[level] = new PagedGrowableWriter(sizes[level], PAGE_SIZE, 1, acceptableOverheadRatio); } nextLevelSlices[level].set(sliceID(level, offset), newSlice); ++level; offset = startOffset(level, newSlice); assert (offset & slotsMask(level)) == 0L; } else { // just go to the next slot ++offset; } ordinals[level].set(offset, ordinal + 1); final long newPosition = position(level, offset); positions.set(docID, newPosition); return numOrdinals(level, offset); }
public long add(Builder<T> builder, Builder.UnCompiledNode<T> nodeIn) throws IOException { //System.out.println("hash: add count=" + count + " vs " + table.size() + " mask=" + mask); final long h = hash(nodeIn); long pos = h & mask; int c = 0; while(true) { final long v = table.get(pos); if (v == 0) { // freeze & add final long node = fst.addNode(builder, nodeIn); //System.out.println(" now freeze node=" + node); assert hash(node) == h : "frozenHash=" + hash(node) + " vs h=" + h; count++; table.set(pos, node); // Rehash at 2/3 occupancy: if (count > 2*table.size()/3) { rehash(); } return node; } else if (nodesEqual(nodeIn, v)) { // same node is already here return v; } // quadratic probe pos = (pos + (++c)) & mask; } }
/** * Allocate a new slice and return its ID. */ private long newSlice(int level) { final long newSlice = sizes[level]++; // Lazily allocate ordinals if (ordinals[level] == null) { ordinals[level] = new PagedGrowableWriter(8L * numSlots(level), PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio); } else { ordinals[level] = ordinals[level].grow(sizes[level] * numSlots(level)); if (nextLevelSlices[level] != null) { nextLevelSlices[level] = nextLevelSlices[level].grow(sizes[level]); } } return newSlice; }
@Override protected void set(long idx) { value = values.get(idx); } }