@Override public void collect(int doc) throws IOException { cost[0]++; bitSet.set(doc); }
@Override public void add(int doc) { bitSet.set(doc); } }
void add(int docID) { if (docID <= lastDocId) { throw new IllegalArgumentException("Out of order doc ids: last=" + lastDocId + ", next=" + docID); } if (set != null) { set = FixedBitSet.ensureCapacity(set, docID); set.set(docID); } else if (docID != cost) { // migrate to a sparse encoding using a bit set set = new FixedBitSet(docID + 1); set.set(0, cost); set.set(docID); } lastDocId = docID; cost++; }
/** bit-sets - for each repeating pp, for each of its repeating terms, the term ordinal values is set */ private ArrayList<FixedBitSet> ppTermsBitSets(PhrasePositions[] rpp, HashMap<Term,Integer> tord) { ArrayList<FixedBitSet> bb = new ArrayList<>(rpp.length); for (PhrasePositions pp : rpp) { FixedBitSet b = new FixedBitSet(tord.size()); Integer ord; for (Term t: pp.terms) { if ((ord=tord.get(t))!=null) { b.set(ord); } } bb.add(b); } return bb; }
/** bit-sets - for each repeating pp, for each of its repeating terms, the term ordinal values is set */ private ArrayList<FixedBitSet> ppTermsBitSets(PhrasePositions[] rpp, HashMap<Term,Integer> tord) { ArrayList<FixedBitSet> bb = new ArrayList<>(rpp.length); for (PhrasePositions pp : rpp) { FixedBitSet b = new FixedBitSet(tord.size()); Integer ord; for (Term t: pp.terms) { if ((ord=tord.get(t))!=null) { b.set(ord); } } bb.add(b); } return bb; }
void add(byte[] packedValue, int docID) throws IOException { assert valueInOrder(valueCount + leafCount, 0, lastPackedValue, packedValue, 0, docID, lastDocID); System.arraycopy(packedValue, 0, leafValues, leafCount * packedBytesLength, packedBytesLength); leafDocs[leafCount] = docID; docsSeen.set(docID); leafCount++; if (valueCount > totalPointCount) { throw new IllegalStateException("totalPointCount=" + totalPointCount + " was passed when we were created, but we just hit " + pointCount + " values"); } if (leafCount == maxPointsInLeafNode) { // We write a block once we hit exactly the max count ... this is different from // when we write N > 1 dimensional points where we write between max/2 and max per leaf block writeLeafBlock(); leafCount = 0; } assert (lastDocID = docID) >= 0; // only assign when asserts are enabled }
private void upgradeToBitSet() { assert bitSet == null; FixedBitSet bitSet = new FixedBitSet(maxDoc); long counter = 0; for (Buffer buffer : buffers) { int[] array = buffer.array; int length = buffer.length; counter += length; for (int i = 0; i < length; ++i) { bitSet.set(array[i]); } } this.bitSet = bitSet; this.counter = counter; this.buffers = null; this.adder = new FixedBitSetAdder(bitSet); }
static SortingLeafReader.CachedNumericDVs sortDocValues(int maxDoc, Sorter.DocMap sortMap, NumericDocValues oldDocValues) throws IOException { FixedBitSet docsWithField = new FixedBitSet(maxDoc); long[] values = new long[maxDoc]; while (true) { int docID = oldDocValues.nextDoc(); if (docID == NO_MORE_DOCS) { break; } int newDocID = sortMap.oldToNew(docID); docsWithField.set(newDocID); values[newDocID] = oldDocValues.longValue(); } return new SortingLeafReader.CachedNumericDVs(values, docsWithField); }
protected FixedBitSet getMutableBits() { // if we pull mutable bits but we haven't been initialized something is completely off. // this means we receive deletes without having the bitset that is on-disk ready to be cloned assert liveDocsInitialized : "can't delete if liveDocs are not initialized"; if (writeableLiveDocs == null) { // Copy on write: this means we've cloned a // SegmentReader sharing the current liveDocs // instance; must now make a private clone so we can // change it: if (liveDocs != null) { writeableLiveDocs = FixedBitSet.copyOf(liveDocs); } else { writeableLiveDocs = new FixedBitSet(info.info.maxDoc()); writeableLiveDocs.set(0, info.info.maxDoc()); } liveDocs = writeableLiveDocs.asReadOnlyBits(); } return writeableLiveDocs; }
private SortingLeafReader.CachedBinaryDVs sortDocValues(int maxDoc, Sorter.DocMap sortMap, BinaryDocValues oldValues) throws IOException { FixedBitSet docsWithField = new FixedBitSet(maxDoc); BytesRef[] values = new BytesRef[maxDoc]; while (true) { int docID = oldValues.nextDoc(); if (docID == NO_MORE_DOCS) { break; } int newDocID = sortMap.oldToNew(docID); docsWithField.set(newDocID); values[newDocID] = BytesRef.deepCopyOf(oldValues.binaryValue()); } return new SortingLeafReader.CachedBinaryDVs(values, docsWithField); }
private FixedBitSet sortLiveDocs(Bits liveDocs, Sorter.DocMap sortMap) throws IOException { assert liveDocs != null && sortMap != null; FixedBitSet sortedLiveDocs = new FixedBitSet(liveDocs.length()); sortedLiveDocs.set(0, liveDocs.length()); for (int i = 0; i < liveDocs.length(); i++) { if (liveDocs.get(i) == false) { sortedLiveDocs.clear(sortMap.oldToNew(i)); } } return sortedLiveDocs; }
static void writeBitSet(DocIdSetIterator it, IndexOutput out) throws IOException { int i = 0; final FixedBitSet buffer = new FixedBitSet(1<<16); int prevBlock = -1; for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { final int block = doc >>> 16; if (prevBlock != -1 && block != prevBlock) { flush(prevBlock, buffer, i, out); buffer.clear(0, buffer.length()); prevBlock = block; i = 0; } buffer.set(doc & 0xFFFF); i++; prevBlock = block; } if (i > 0) { flush(prevBlock, buffer, i, out); buffer.clear(0, buffer.length()); } // NO_MORE_DOCS is stored explicitly buffer.set(DocIdSetIterator.NO_MORE_DOCS & 0xFFFF); flush(DocIdSetIterator.NO_MORE_DOCS >>> 16, buffer, 1, out); }
@Override public NumericDocValues getNormValues(String field) throws IOException { final NumericDocValues oldNorms = in.getNormValues(field); if (oldNorms == null) return null; CachedNumericDVs norms; synchronized (cachedNorms) { norms = cachedNorms.get(field); if (norms == null) { FixedBitSet docsWithField = new FixedBitSet(maxDoc()); long[] values = new long[maxDoc()]; while (true) { int docID = oldNorms.nextDoc(); if (docID == NO_MORE_DOCS) { break; } int newDocID = docMap.oldToNew(docID); docsWithField.set(newDocID); values[newDocID] = oldNorms.longValue(); } norms = new CachedNumericDVs(values, docsWithField); cachedNorms.put(field, norms); } } return new SortingNumericDocValues(norms); }
@Override public NumericDocValues getNumericDocValues(String field) throws IOException { final NumericDocValues oldDocValues = in.getNumericDocValues(field); if (oldDocValues == null) return null; CachedNumericDVs dvs; synchronized (cachedNumericDVs) { dvs = cachedNumericDVs.get(field); if (dvs == null) { FixedBitSet docsWithField = new FixedBitSet(maxDoc()); long[] values = new long[maxDoc()]; while (true) { int docID = oldDocValues.nextDoc(); if (docID == NO_MORE_DOCS) { break; } int newDocID = docMap.oldToNew(docID); docsWithField.set(newDocID); values[newDocID] = oldDocValues.longValue(); } dvs = new CachedNumericDVs(values, docsWithField); cachedNumericDVs.put(field, dvs); } } return new SortingNumericDocValues(dvs); }
/** * Clears all bits in the given bitset that are set and are also in the given DocIdSetIterator. * * @param iterator the doc ID set iterator for apply * @param bits the bit set to apply the deletes to * @return the number of bits changed by this function */ static int applySoftDeletes(DocIdSetIterator iterator, FixedBitSet bits) throws IOException { assert iterator != null; int newDeletes = 0; int docID; DocValuesFieldUpdates.Iterator hasValue = iterator instanceof DocValuesFieldUpdates.Iterator ? (DocValuesFieldUpdates.Iterator) iterator : null; while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { if (hasValue == null || hasValue.hasValue()) { if (bits.get(docID)) { // doc is live - clear it bits.clear(docID); newDeletes++; // now that we know we deleted it and we fully control the hard deletes we can do correct accounting // below. } } else { if (bits.get(docID) == false) { bits.set(docID); newDeletes--; } } } return newDeletes; }
@Override public BinaryDocValues getBinaryDocValues(String field) throws IOException { final BinaryDocValues oldDocValues = in.getBinaryDocValues(field); if (oldDocValues == null) return null; CachedBinaryDVs dvs; synchronized (cachedBinaryDVs) { dvs = cachedBinaryDVs.get(field); if (dvs == null) { FixedBitSet docsWithField = new FixedBitSet(maxDoc()); BytesRef[] values = new BytesRef[maxDoc()]; while (true) { int docID = oldDocValues.nextDoc(); if (docID == NO_MORE_DOCS) { break; } int newDocID = docMap.oldToNew(docID); docsWithField.set(newDocID); values[newDocID] = BytesRef.deepCopyOf(oldDocValues.binaryValue()); } dvs = new CachedBinaryDVs(values, docsWithField); cachedBinaryDVs.put(field, dvs); } } return new SortingBinaryDocValues(dvs); }
/** Sliced reference to points in an OfflineSorter.ByteSequencesWriter file. */ private static final class PathSlice { final PointWriter writer; final long start; final long count; public PathSlice(PointWriter writer, long start, long count) { this.writer = writer; this.start = start; this.count = count; } @Override public String toString() { return "PathSlice(start=" + start + " count=" + count + " writer=" + writer + ")"; } }
/** * Make a copy of the given bits. */ public static FixedBitSet copyOf(Bits bits) { if (bits instanceof FixedBits) { // restore the original FixedBitSet FixedBits fixedBits = (FixedBits) bits; bits = new FixedBitSet(fixedBits.bits, fixedBits.length); } if (bits instanceof FixedBitSet) { return ((FixedBitSet)bits).clone(); } else { int length = bits.length(); FixedBitSet bitSet = new FixedBitSet(length); bitSet.set(0, length); for (int i = 0; i < length; ++i) { if (bits.get(i) == false) { bitSet.clear(i); } } return bitSet; } }
static LeafReader wrap(LeafReader reader, String field) throws IOException { DocIdSetIterator iterator = DocValuesFieldExistsQuery.getDocValuesDocIdSetIterator(field, reader); if (iterator == null) { return reader; } Bits liveDocs = reader.getLiveDocs(); final FixedBitSet bits; if (liveDocs != null) { bits = FixedBitSet.copyOf(liveDocs); } else { bits = new FixedBitSet(reader.maxDoc()); bits.set(0, reader.maxDoc()); } int numSoftDeletes = PendingSoftDeletes.applySoftDeletes(iterator, bits); int numDeletes = reader.numDeletedDocs() + numSoftDeletes; int numDocs = reader.maxDoc() - numDeletes; assert assertDocCounts(numDocs, numSoftDeletes, reader); return reader instanceof CodecReader ? new SoftDeletesFilterCodecReader((CodecReader) reader, bits, numDocs) : new SoftDeletesFilterLeafReader(reader, bits, numDocs); }
bits.set(k); // mark that pp2 need to be re-queued