/** * Static constructor-like method similar to other (generic) collections. * * @return New instance. */ public static BitSet newInstance() { return new BitSet(); }
/** * @return Returns true if there are no set bits */ public boolean isEmpty() { return cardinality() == 0; }
@Override public boolean contains(int index) { return index < 0 || BitSet.this.get(index); }
/** * Ensure that the long[] is big enough to hold numBits, expanding it if necessary. * getNumWords() is unchanged by this call. * * @param numBits The number of bits to expand to */ public void ensureCapacity(long numBits) { ensureCapacityWords(bits2words(numBits)); }
public SegmentationDefinition getSubsetDefinition(int wordsetSize) { /* * Code the combinations of elements not with ids but with bits. 01 is * only the first element, 10 is the second and 11 is the combination of * both. */ int mask = (1 << wordsetSize) - 1; int conditions[][] = new int[][] { { mask } }; int segments[] = new int[] { mask }; BitSet neededCounts = new BitSet(1 << wordsetSize); neededCounts.set(mask); return new SegmentationDefinition(segments, conditions, neededCounts); }
/** * apply consumer to each connected community one time * * @param node node nodeId * @param consumer community nodeId consumer */ private void forEachConnectedCommunity(int node, IntConsumer consumer) { final BitSet visited = new BitSet(nodeCount); graph.forEachRelationship(node, D, (s, t, r) -> { final int c = localCommunities[t]; if (c == NONE) { return true; } if (visited.get(c)) { return true; } visited.set(c); consumer.accept(c); return true; }); }
public boolean isArcPresent(int clusterA, int clusterB) { temp.clear(); int size; BitSet setA = clusterDocuments[clusterA]; BitSet setB = clusterDocuments[clusterB]; // Suitable for flat clustering // A small subgroup contained within a bigger group // will give small overlap ratio. Big ratios will // be produced only for balanced group sizes. if (setA.cardinality() < setB.cardinality()) { // addAll == or // reiatinAll == and | intersect temp.or(setA); temp.intersect(setB); size = (int) setB.cardinality(); } else { temp.or(setB); temp.intersect(setA); size = (int) setA.cardinality(); } return temp.cardinality() / (double) size >= clusterMergingThreshold; } }, true);
private int[] createCounts(BitSet bitsets[], BitSet neededCounts) { // TODO use the neededCounts bit set to avoid the creation of bit sets which are not needed // TODO Check the minimum frequency at this stage --> all BitSets with a lower cardinality can be set to null // and all following don't have to be created. BitSet[] combinations = new BitSet[(1 << bitsets.length)]; int pos, pos2; for (int i = 0; i < bitsets.length; ++i) { pos = (1 << i); combinations[pos] = bitsets[i]; pos2 = pos + 1; for (int j = 1; j < pos; ++j) { combinations[pos2] = ((BitSet) bitsets[i].clone()); combinations[pos2].intersect(combinations[j]); ++pos2; } } int cardinalities[] = new int[combinations.length]; for (int i = 1; i < combinations.length; ++i) { cardinalities[i] = (int) combinations[i].cardinality(); } return cardinalities; } }
/** * Verifies that the tree is in a valid shape. * * @throws IllegalStateException for invalid trees. */ public void verifyTreeIntegrity() { BitSet seen = new BitSet(capacity); for (int child : children) { if (child != INVALID_NODE) { if (seen.getAndSet(child)) { throw new IllegalStateException("node (" + child + ") has multiple parents"); } } } for (int sibling : siblings) { if (sibling != INVALID_NODE) { if (seen.getAndSet(sibling)) { throw new IllegalStateException("node (" + sibling + ") has multiple parents"); } } } }
final BitSet bset = new BitSet(map.size()); for (IntIntCursor c : map) bset.set(c.key); for (int key = bset.nextSetBit(0); key >= 0; key = bset.nextSetBit(key + 1))
final BitSet documentIndices = new BitSet(documentCount); final BitSet temp = new BitSet(documentCount); addTfByDocumentToBitSet(temp, stemsTfByDocument[wordsStemIndex[wordIndex]]); documentIndices.and(temp); if (labelsDocumentIndices[i].cardinality() >= minClusterSize)
me.set(documentIndex); child.clear(); edges.push(stree.getStartIndex(edge), stree.getEndIndex(edge)); countDocs(level + 1, childState); edges.discard(2); me.or(child); final int card = (int) me.cardinality(); if (card >= minCardinality)
@Override public long [] toArray() { final long [] data = new long [getCurrentCardinality()]; final BitSet bset = BitSet.this; int j = 0; for (long bit = bset.nextSetBit((long) 0); bit >= 0; bit = bset.nextSetBit(bit + 1)) { data[j++] = bit; } return data; }
/** * Collect documents from a bitset. */ private List<Document> collectDocuments(List<Document> l, BitSet bitset) { if (l == null) { l = Lists.newArrayListWithCapacity((int) bitset.cardinality()); } final BitSetIterator i = bitset.iterator(); for (int d = i.nextSetBit(); d >= 0; d = i.nextSetBit()) { l.add(documents.get(d)); } return l; }
final BitSet requiredStemIndices = new BitSet(labelsFeatureIndex.length); return requiredStemIndices.asIntLookupContainer().toArray();
private void run(int goal, Direction direction) { while (!queue.isEmpty() && running()) { int node = queue.pop(); if (node == goal) { return; } visited.set(node); double costs = this.costs.getOrDefault(node, Double.MAX_VALUE); graph.forEachRelationship( node, direction, (source, target, relId, weight) -> { boolean oldCostChanged = updateCosts(source, target, weight + costs); if (!visited.get(target)) { if (oldCostChanged) { queue.update(target); } else { queue.add(target, 0); } } return true; }); progressLogger.logProgress((double) node / (nodeCount - 1)); } }
@Override public <T extends IntPredicate> T forEach(T predicate) { final BitSetIterator i = BitSet.this.iterator(); for (int bit = i.nextSetBit(); bit >= 0; bit = i.nextSetBit()) { if (predicate.apply(bit) == false) break; } return predicate; }
equivalent.documents.or(cc.documents); cc.cardinality = (int) cc.documents.cardinality(); scratch.buffer = cc.phrases.get(0); scratch.elementsCount = scratch.buffer.length;