public GrowableWriter resize(int newSize) { GrowableWriter next = new GrowableWriter(getBitsPerValue(), newSize, acceptableOverheadRatio); final int limit = Math.min(size(), newSize); PackedInts.copy(current, 0, next, 0, limit, PackedInts.DEFAULT_BUFFER_SIZE); return next; }
@Override public void set(int index, long value) { ensureCapacity(value); current.set(index, value); }
private void ensureCapacity(long value) { if ((value & currentMask) == value) { return; } final int bitsRequired = PackedInts.unsignedBitsRequired(value); assert bitsRequired > current.getBitsPerValue(); final int valueCount = size(); PackedInts.Mutable next = PackedInts.getMutable(valueCount, bitsRequired, acceptableOverheadRatio); PackedInts.copy(current, 0, next, 0, valueCount, PackedInts.DEFAULT_BUFFER_SIZE); current = next; currentMask = mask(current.getBitsPerValue()); }
private int firstLevel(int docID, long ordinal) { // 0 or 1 ordinal if (firstOrdinals.get(docID) == 0L) { firstOrdinals.set(docID, ordinal + 1); return 1; } else { final long newSlice = newSlice(1); if (firstNextLevelSlices == null) { firstNextLevelSlices = new PagedGrowableWriter(firstOrdinals.size(), PAGE_SIZE, 3, acceptableOverheadRatio); } firstNextLevelSlices.set(docID, newSlice); final long offset = startOffset(1, newSlice); ordinals[1].set(offset, ordinal + 1); positions.set(docID, position(1, offset)); // current position is on the 1st level and not allocated yet return 2; } }
final int topN = Math.min(maxDerefNodes, inCounts.size()); for(int node=0; node<inCounts.size(); node++) { if (inCounts.get(node) >= minInCountDeref) { if (bottom == null) { q.add(new NodeAndInCount(node, (int) inCounts.get(node))); if (q.size() == topN) { bottom = q.top(); } else if (inCounts.get(node) > bottom.count) { q.insertWithOverflow(new NodeAndInCount(node, (int) inCounts.get(node))); final GrowableWriter newNodeAddress = new GrowableWriter( PackedInts.bitsRequired(builder.bytes.getPosition()), (int) (1 + builder.nodeCount), acceptableOverheadRatio); newNodeAddress.set(node, 1 + builder.bytes.getPosition() - nodeAddress.get(node)); if (address != newNodeAddress.get(node)) { addressError = address - newNodeAddress.get(node); newNodeAddress.set(node, address); changedCount++; absPtr = ptr; } else { absPtr = topNodeMap.size() + newNodeAddress.get((int) arc.target) + addressError; long delta = newNodeAddress.get((int) arc.target) + addressError - writer.getPosition() - 2; if (delta < 0) {
flags += BIT_STOP_NODE; } else if (inCounts != null) { inCounts.set((int) target.node, inCounts.get((int) target.node) + 1); if ((int) builder.nodeCount == nodeAddress.size()) { nodeAddress = nodeAddress.resize(ArrayUtil.oversize(nodeAddress.size() + 1, nodeAddress.getBitsPerValue())); inCounts = inCounts.resize(ArrayUtil.oversize(inCounts.size() + 1, inCounts.getBitsPerValue())); nodeAddress.set((int) builder.nodeCount, thisNodeAddress);
final GrowableWriter docToTermOrd = new GrowableWriter(startTermsBPV, maxDoc, acceptableOverheadRatio); docToTermOrd.set(docID, 1+termOrd); return new SortedDocValuesImpl(bytes.freeze(true), termOrdToBytesOffset.build(), docToTermOrd.getMutable(), termOrd);
@Override protected Mutable newMutable(int valueCount, int bitsPerValue) { return new GrowableWriter(bitsPerValue, valueCount, acceptableOverheadRatio); }
@Override public void visitTerm(BytesRef term) { currentValue = parser.parseValue(term); if (values == null) { // Lazy alloc so for the numeric field case // (which will hit a NumberFormatException // when we first try the DEFAULT_INT_PARSER), // we don't double-alloc: int startBitsPerValue; // Make sure than missing values (0) can be stored without resizing if (currentValue < 0) { minValue = currentValue; startBitsPerValue = minValue == Long.MIN_VALUE ? 64 : PackedInts.bitsRequired(-minValue); } else { minValue = 0; startBitsPerValue = PackedInts.bitsRequired(currentValue); } values = new GrowableWriter(startBitsPerValue, reader.maxDoc(), PackedInts.FAST); if (minValue != 0) { values.fill(0, values.size(), -minValue); // default value must be 0 } valuesRef.set(new GrowableWriterAndMinValue(values, minValue)); } }
@Override public NumericDocValues getNumeric(FieldInfo field) throws IOException { if (VersionFieldMapper.NAME.equals(field.name)) { // uninvert into a packed ints and expose as docvalues final Terms terms = reader.terms(UidFieldMapper.NAME); final TermsEnum uids = terms.iterator(); final GrowableWriter versions = new GrowableWriter(2, reader.maxDoc(), PackedInts.COMPACT); PostingsEnum dpe = null; for (BytesRef uid = uids.next(); uid != null; uid = uids.next()) { dpe = uids.postings(dpe, PostingsEnum.PAYLOADS); assert terms.hasPayloads() : "field has payloads"; final Bits liveDocs = reader.getLiveDocs(); for (int doc = dpe.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = dpe.nextDoc()) { if (liveDocs != null && liveDocs.get(doc) == false) { continue; } dpe.nextPosition(); final BytesRef payload = dpe.getPayload(); if (payload != null && payload.length == 8) { final long version = Numbers.bytesToLong(payload); versions.set(doc, version); break; } } } return versions; } else { return in.getNumeric(field); } }
private long getNodeAddress(long node) { if (nodeAddress != null) { // Deref return nodeAddress.get((int) node); } else { // Straight return node; } }
/** * @param startBitsPerValue the initial number of bits per value, may grow depending on the data * @param valueCount the number of values * @param acceptableOverheadRatio an acceptable overhead ratio */ public GrowableWriter(int startBitsPerValue, int valueCount, float acceptableOverheadRatio) { this.acceptableOverheadRatio = acceptableOverheadRatio; current = PackedInts.getMutable(valueCount, startBitsPerValue, this.acceptableOverheadRatio); currentMask = mask(current.getBitsPerValue()); }
@Override public void visitDoc(int docID) { values.set(docID, currentValue - minValue); }
private int firstLevel(int docID, long ordinal) { // 0 or 1 ordinal if (firstOrdinals.get(docID) == 0L) { firstOrdinals.set(docID, ordinal + 1); return 1; } else { final long newSlice = newSlice(1); if (firstNextLevelSlices == null) { firstNextLevelSlices = new PagedGrowableWriter(firstOrdinals.size(), PAGE_SIZE, 3, acceptableOverheadRatio); } firstNextLevelSlices.set(docID, newSlice); final long offset = startOffset(1, newSlice); ordinals[1].set(offset, ordinal + 1); positions.set(docID, position(1, offset)); // current position is on the 1st level and not allocated yet return 2; } }
final int topN = Math.min(maxDerefNodes, inCounts.size()); for(int node=0; node<inCounts.size(); node++) { if (inCounts.get(node) >= minInCountDeref) { if (bottom == null) { q.add(new NodeAndInCount(node, (int) inCounts.get(node))); if (q.size() == topN) { bottom = q.top(); } else if (inCounts.get(node) > bottom.count) { q.insertWithOverflow(new NodeAndInCount(node, (int) inCounts.get(node))); final GrowableWriter newNodeAddress = new GrowableWriter( PackedInts.bitsRequired(builder.bytes.getPosition()), (int) (1 + builder.nodeCount), acceptableOverheadRatio); newNodeAddress.set(node, 1 + builder.bytes.getPosition() - nodeAddress.get(node)); if (address != newNodeAddress.get(node)) { addressError = address - newNodeAddress.get(node); newNodeAddress.set(node, address); changedCount++; absPtr = ptr; } else { absPtr = topNodeMap.size() + newNodeAddress.get((int) arc.target) + addressError; long delta = newNodeAddress.get((int) arc.target) + addressError - writer.getPosition() - 2; if (delta < 0) {
flags += BIT_STOP_NODE; } else if (inCounts != null) { inCounts.set((int) target.node, inCounts.get((int) target.node) + 1); if ((int) builder.nodeCount == nodeAddress.size()) { nodeAddress = nodeAddress.resize(ArrayUtil.oversize(nodeAddress.size() + 1, nodeAddress.getBitsPerValue())); inCounts = inCounts.resize(ArrayUtil.oversize(inCounts.size() + 1, inCounts.getBitsPerValue())); nodeAddress.set((int) builder.nodeCount, thisNodeAddress);
final GrowableWriter docToTermOrd = new GrowableWriter(startTermsBPV, maxDoc, acceptableOverheadRatio); docToTermOrd.set(docID, 1+termOrd); return new SortedDocValuesImpl(bytes.freeze(true), termOrdToBytesOffset.build(), docToTermOrd.getMutable(), termOrd);
OrdinalsStore(int maxDoc, int startBitsPerValue, float acceptableOverheadRatio) { this.startBitsPerValue = startBitsPerValue; this.acceptableOverheadRatio = acceptableOverheadRatio; positions = new PagedGrowableWriter(maxDoc, PAGE_SIZE, startBitsPerValue, acceptableOverheadRatio); firstOrdinals = new GrowableWriter(startBitsPerValue, maxDoc, acceptableOverheadRatio); // over allocate in order to never worry about the array sizes, 24 entries would allow // to store several millions of ordinals per doc... ordinals = new PagedGrowableWriter[24]; nextLevelSlices = new PagedGrowableWriter[24]; sizes = new int[24]; Arrays.fill(sizes, 1); // reserve the 1st slice on every level }
@Override public void visitTerm(BytesRef term) { currentValue = parser.parseValue(term); if (values == null) { // Lazy alloc so for the numeric field case // (which will hit a NumberFormatException // when we first try the DEFAULT_INT_PARSER), // we don't double-alloc: int startBitsPerValue; // Make sure than missing values (0) can be stored without resizing if (currentValue < 0) { minValue = currentValue; startBitsPerValue = minValue == Long.MIN_VALUE ? 64 : PackedInts.bitsRequired(-minValue); } else { minValue = 0; startBitsPerValue = PackedInts.bitsRequired(currentValue); } values = new GrowableWriter(startBitsPerValue, reader.maxDoc(), PackedInts.FAST); if (minValue != 0) { values.fill(0, values.size(), -minValue); // default value must be 0 } valuesRef.set(new GrowableWriterAndMinValue(values, minValue)); } }
private long getNodeAddress(long node) { if (nodeAddress != null) { // Deref return nodeAddress.get((int) node); } else { // Straight return node; } }