Refine search
static SortingLeafReader.CachedNumericDVs sortDocValues(int maxDoc, Sorter.DocMap sortMap, NumericDocValues oldDocValues) throws IOException { FixedBitSet docsWithField = new FixedBitSet(maxDoc); long[] values = new long[maxDoc]; while (true) { int docID = oldDocValues.nextDoc(); if (docID == NO_MORE_DOCS) { break; } int newDocID = sortMap.oldToNew(docID); docsWithField.set(newDocID); values[newDocID] = oldDocValues.longValue(); } return new SortingLeafReader.CachedNumericDVs(values, docsWithField); }
/** this = this OR other */ public void or(FixedBitSet other) { or(other.bits, other.numWords); }
FixedBitSet bits = new FixedBitSet(rg.length); // for re-queuing after collisions are resolved int k0 = pp.rptInd; int k; bits = FixedBitSet.ensureCapacity(bits, k); bits.set(k); // mark that pp2 need to be re-queued int numBits = bits.length(); // larges bit we set while (bits.cardinality() > 0) { PhrasePositions pp2 = pq.pop(); rptStack[n++] = pp2; if (pp2.rptGroup >= 0 && bits.get(pp2.rptInd)) { bits.clear(pp2.rptInd);
void add(int docID) { if (docID <= lastDocId) { throw new IllegalArgumentException("Out of order doc ids: last=" + lastDocId + ", next=" + docID); } if (set != null) { set = FixedBitSet.ensureCapacity(set, docID); set.set(docID); } else if (docID != cost) { // migrate to a sparse encoding using a bit set set = new FixedBitSet(docID + 1); set.set(0, cost); set.set(docID); } lastDocId = docID; cost++; }
/** map each term to the single group that contains it */ private HashMap<Term,Integer> termGroups(LinkedHashMap<Term,Integer> tord, ArrayList<FixedBitSet> bb) throws IOException { HashMap<Term,Integer> tg = new HashMap<>(); Term[] t = tord.keySet().toArray(new Term[0]); for (int i=0; i<bb.size(); i++) { // i is the group no. FixedBitSet bits = bb.get(i); for (int ord = bits.nextSetBit(0); ord != DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 >= bits.length() ? DocIdSetIterator.NO_MORE_DOCS : bits.nextSetBit(ord + 1)) { tg.put(t[ord],i); } } return tg; }
/** bit-sets - for each repeating pp, for each of its repeating terms, the term ordinal values is set */ private ArrayList<FixedBitSet> ppTermsBitSets(PhrasePositions[] rpp, HashMap<Term,Integer> tord) { ArrayList<FixedBitSet> bb = new ArrayList<>(rpp.length); for (PhrasePositions pp : rpp) { FixedBitSet b = new FixedBitSet(tord.size()); Integer ord; for (Term t: pp.terms) { if ((ord=tord.get(t))!=null) { b.set(ord); } } bb.add(b); } return bb; }
@Override public void postCollect() throws IOException { final FixedBitSet allVisitedOrds = new FixedBitSet(maxOrd); for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { allVisitedOrds.or(bits); } } final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128(); try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) { for (int ord = allVisitedOrds.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { final BytesRef value = values.lookupOrd(ord); org.elasticsearch.common.hash.MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash); hashes.set(ord, hash.h1); } for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { for (int ord = bits.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { counts.collect(bucket, hashes.get(ord)); } } } } }
FixedBitSet seenOrds = new FixedBitSet(dv.getValueCount()); int maxOrd2 = -1; int docID; } else { maxOrd2 = Math.max(maxOrd2, ord); seenOrds.set(ord); throw new RuntimeException("dv for field: " + fieldName + " reports wrong maxOrd=" + maxOrd + " but this is not the case: " + maxOrd2); if (seenOrds.cardinality() != dv.getValueCount()) { throw new RuntimeException("dv for field: " + fieldName + " has holes in its ords, valueCount=" + dv.getValueCount() + " but only used: " + seenOrds.cardinality());
private FixedBitSet sortLiveDocs(Bits liveDocs, Sorter.DocMap sortMap) throws IOException { assert liveDocs != null && sortMap != null; FixedBitSet sortedLiveDocs = new FixedBitSet(liveDocs.length()); sortedLiveDocs.set(0, liveDocs.length()); for (int i = 0; i < liveDocs.length(); i++) { if (liveDocs.get(i) == false) { sortedLiveDocs.clear(sortMap.oldToNew(i)); } } return sortedLiveDocs; }
public FakeDeleteIndexReader(IndexReader in) { super(in); dels = new FixedBitSet(in.maxDoc()); if (in.hasDeletions()) { oldDels = new FixedBitSet(in.maxDoc()); for (int i = 0; i < in.maxDoc(); i++) { if (in.isDeleted(i)) oldDels.set(i); } dels.or(oldDels); } }
static void writeBitSet(DocIdSetIterator it, IndexOutput out) throws IOException { int i = 0; final FixedBitSet buffer = new FixedBitSet(1<<16); int prevBlock = -1; for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { final int block = doc >>> 16; if (prevBlock != -1 && block != prevBlock) { flush(prevBlock, buffer, i, out); buffer.clear(0, buffer.length()); prevBlock = block; i = 0; } buffer.set(doc & 0xFFFF); i++; prevBlock = block; } if (i > 0) { flush(prevBlock, buffer, i, out); buffer.clear(0, buffer.length()); } // NO_MORE_DOCS is stored explicitly buffer.set(DocIdSetIterator.NO_MORE_DOCS & 0xFFFF); flush(DocIdSetIterator.NO_MORE_DOCS >>> 16, buffer, 1, out); }
/** * Make a copy of the given bits. */ public static FixedBitSet copyOf(Bits bits) { if (bits instanceof FixedBits) { // restore the original FixedBitSet FixedBits fixedBits = (FixedBits) bits; bits = new FixedBitSet(fixedBits.bits, fixedBits.length); } if (bits instanceof FixedBitSet) { return ((FixedBitSet)bits).clone(); } else { int length = bits.length(); FixedBitSet bitSet = new FixedBitSet(length); bitSet.set(0, length); for (int i = 0; i < length; ++i) { if (bits.get(i) == false) { bitSet.clear(i); } } return bitSet; } }
if (doc < delDocLimit) { if (state.liveDocs == null) { state.liveDocs = new FixedBitSet(state.segmentInfo.maxDoc()); state.liveDocs.set(0, state.segmentInfo.maxDoc()); if (state.liveDocs.get(doc)) { state.delCountOnFlush++; state.liveDocs.clear(doc);
protected synchronized FixedBitSet concludeVote(int target) { int target2 = (int) Math.ceil((double) target / (double) 2); target2 = target2 - minimum; // Unlikely other than in testing: minimum more than half the votes if (target2 < 0) { FixedBitSet ans = new FixedBitSet(dimension); ans.set(0, dimension); return ans; } boolean even = (target % 2 == 0); FixedBitSet result = concludeVote(target2, votingRecord.size() - 1); if (even) { setTempSetToExactMatches(target2); boolean switcher = true; // 50% chance of being true with split vote. int q = tempSet.nextSetBit(0); while (q != DocIdSetIterator.NO_MORE_DOCS) { switcher = !switcher; if (switcher) tempSet.clear(q); q = tempSet.nextSetBit(q); } result.andNot(tempSet); } return result; }
assert denseBuffer.cardinality() == currentBlockCardinality; if (denseBuffer.length() == BLOCK_SIZE && BLOCK_SIZE - currentBlockCardinality < MAX_ARRAY_LENGTH) { denseBuffer.flip(0, denseBuffer.length()); int excludedDoc = -1; for (int i = 0; i < excludedDocs.length; ++i) { excludedDoc = denseBuffer.nextSetBit(excludedDoc + 1); assert excludedDoc != DocIdSetIterator.NO_MORE_DOCS; excludedDocs[i] = (short) excludedDoc; assert excludedDoc + 1 == denseBuffer.length() || denseBuffer.nextSetBit(excludedDoc + 1) == DocIdSetIterator.NO_MORE_DOCS; sets[currentBlock] = new NotDocIdSet(BLOCK_SIZE, new ShortArrayDocIdSet(excludedDocs)); } else {
int numBitsSet = filter.cardinality(); FixedBitSet rightSizedBitSet = filter; int rightSizedBitSetSize = bloomSize; rightSizedBitSet = new FixedBitSet(rightSizedBitSetSize + 1); bitIndex = filter.nextSetBit(bitIndex); if (bitIndex != DocIdSetIterator.NO_MORE_DOCS) { rightSizedBitSet.set(downSizedBitIndex); bitIndex++;
protected FixedBitSet getMutableBits() { // if we pull mutable bits but we haven't been initialized something is completely off. // this means we receive deletes without having the bitset that is on-disk ready to be cloned assert liveDocsInitialized : "can't delete if liveDocs are not initialized"; if (writeableLiveDocs == null) { // Copy on write: this means we've cloned a // SegmentReader sharing the current liveDocs // instance; must now make a private clone so we can // change it: if (liveDocs != null) { writeableLiveDocs = FixedBitSet.copyOf(liveDocs); } else { writeableLiveDocs = new FixedBitSet(info.info.maxDoc()); writeableLiveDocs.set(0, info.info.maxDoc()); } liveDocs = writeableLiveDocs.asReadOnlyBits(); } return writeableLiveDocs; }
@Override public Bits readLiveDocs(Directory dir, SegmentCommitInfo info, IOContext context) throws IOException { long gen = info.getDelGen(); String name = IndexFileNames.fileNameFromGeneration(info.info.name, EXTENSION, gen); final int length = info.info.maxDoc(); try (ChecksumIndexInput input = dir.openChecksumInput(name, context)) { Throwable priorE = null; try { CodecUtil.checkIndexHeader(input, CODEC_NAME, VERSION_START, VERSION_CURRENT, info.info.getId(), Long.toString(gen, Character.MAX_RADIX)); long data[] = new long[FixedBitSet.bits2words(length)]; for (int i = 0; i < data.length; i++) { data[i] = input.readLong(); } FixedBitSet fbs = new FixedBitSet(data, length); if (fbs.length() - fbs.cardinality() != info.getDelCount()) { throw new CorruptIndexException("bits.deleted=" + (fbs.length() - fbs.cardinality()) + " info.delcount=" + info.getDelCount(), input); } return fbs.asReadOnlyBits(); } catch (Throwable exception) { priorE = exception; } finally { CodecUtil.checkFooter(input, priorE); } } throw new AssertionError(); }
/** * Clears all bits in the given bitset that are set and are also in the given DocIdSetIterator. * * @param iterator the doc ID set iterator for apply * @param bits the bit set to apply the deletes to * @return the number of bits changed by this function */ static int applySoftDeletes(DocIdSetIterator iterator, FixedBitSet bits) throws IOException { assert iterator != null; int newDeletes = 0; int docID; DocValuesFieldUpdates.Iterator hasValue = iterator instanceof DocValuesFieldUpdates.Iterator ? (DocValuesFieldUpdates.Iterator) iterator : null; while ((docID = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { if (hasValue == null || hasValue.hasValue()) { if (bits.get(docID)) { // doc is live - clear it bits.clear(docID); newDeletes++; // now that we know we deleted it and we fully control the hard deletes we can do correct accounting // below. } } else { if (bits.get(docID) == false) { bits.set(docID); newDeletes--; } } } return newDeletes; }
static LeafReader wrap(LeafReader reader, String field) throws IOException { DocIdSetIterator iterator = DocValuesFieldExistsQuery.getDocValuesDocIdSetIterator(field, reader); if (iterator == null) { return reader; } Bits liveDocs = reader.getLiveDocs(); final FixedBitSet bits; if (liveDocs != null) { bits = FixedBitSet.copyOf(liveDocs); } else { bits = new FixedBitSet(reader.maxDoc()); bits.set(0, reader.maxDoc()); } int numSoftDeletes = PendingSoftDeletes.applySoftDeletes(iterator, bits); int numDeletes = reader.numDeletedDocs() + numSoftDeletes; int numDocs = reader.maxDoc() - numDeletes; assert assertDocCounts(numDocs, numSoftDeletes, reader); return reader instanceof CodecReader ? new SoftDeletesFilterCodecReader((CodecReader) reader, bits, numDocs) : new SoftDeletesFilterLeafReader(reader, bits, numDocs); }