/** * Get the resulting dimensionality. * * @return dimensionality */ public int getDimensionality() { return BitsUtil.cardinality(selectedAttributes); }
/** * Compute the vector cardinality (uncached!) * * @return Vector cardinality */ public int cardinality() { return BitsUtil.cardinality(bits); }
/** * Creates a new k-dimensional subspace of the original data space. * * @param dimensions the dimensions building this subspace */ public Subspace(long[] dimensions) { this.dimensions = dimensions; dimensionality = BitsUtil.cardinality(dimensions); }
/** * Convert a bitset into integer column ids. * * @param cols * @return integer column ids */ protected int[] colsBitsetToIDs(long[] cols) { int[] colIDs = new int[BitsUtil.cardinality(cols)]; int colsIndex = 0; for (int cpos = 0, clpos = 0; clpos < cols.length; ++clpos) { long clong = cols[clpos]; if (clong == 0L) { cpos += Long.SIZE; continue; } for (int j = 0; j < Long.SIZE; ++j, ++cpos, clong >>>= 1) { if ((clong & 1L) == 1L) { colIDs[colsIndex] = cpos; ++colsIndex; } } } return colIDs; }
@Override public int size() { if(inverse) { return data.size() - BitsUtil.cardinality(bits); } return BitsUtil.cardinality(bits); }
/** * Compute SOD score. * * @param queryObject Query object * @param center Center vector * @param weightVector Weight vector * @return sod score */ private double subspaceOutlierDegree(V queryObject, Vector center, long[] weightVector) { final int card = BitsUtil.cardinality(weightVector); if(card == 0) { return 0; } final SubspaceEuclideanDistanceFunction df = new SubspaceEuclideanDistanceFunction(weightVector); double distance = df.distance(queryObject, center); distance /= card; // FIXME: defined and published as card, should be // sqrt(card), unfortunately return distance; }
while(BitsUtil.cardinality(bitset) < cardinality) { BitsUtil.setI(bitset, random.nextInt(capacity)); while(BitsUtil.cardinality(bitset) > cardinality) { BitsUtil.clearI(bitset, random.nextInt(capacity));
/** * Performs outlier detection by testing the Mahalanobis distance of each * point in a cluster against the critical value of the ChiSquared * distribution with as many degrees of freedom as the cluster has relevant * attributes. * * @param relation Data relation * @param models Cluster models * @param clusterCandidates the list of clusters to check. * @param noise the set to which to add points deemed outliers. */ private void findOutliers(Relation<V> relation, List<MultivariateGaussianModel> models, ArrayList<ClusterCandidate> clusterCandidates, ModifiableDBIDs noise) { Iterator<MultivariateGaussianModel> it = models.iterator(); for(int c = 0; it.hasNext(); c++) { MultivariateGaussianModel model = it.next(); final ClusterCandidate candidate = clusterCandidates.get(c); final int dof = BitsUtil.cardinality(candidate.dimensions); final double threshold = ChiSquaredDistribution.quantile(1 - alpha, dof); for(DBIDMIter iter = candidate.ids.iter(); iter.valid(); iter.advance()) { final double distance = model.mahalanobisDistance(relation.get(iter)); if(distance >= threshold) { // Outlier, remove it and add it to the outlier set. noise.add(iter); iter.remove(); } } } }
int childCardinality = BitsUtil.cardinality(childPV); for(long[] parentPV : clustersMap.keySet()) { int parentCardinality = BitsUtil.cardinality(parentPV); if(parentCardinality >= childCardinality) { continue;
/** * Compute the common subspace dimensionality of two vectors. * * @param v1 First vector * @param v2 Second vector * @param pv1 First preference * @param pv2 Second preference * @param commonPreferenceVector Common preference * @return Usually, v1.dim - commonPreference.cardinality, unless either pv1 * and pv2 are a subset of the other. */ private int subspaceDimensionality(NumberVector v1, NumberVector v2, long[] pv1, long[] pv2, long[] commonPreferenceVector) { // number of zero values in commonPreferenceVector int subspaceDim = v1.getDimensionality() - BitsUtil.cardinality(commonPreferenceVector); // special case: v1 and v2 are in parallel subspaces if(BitsUtil.equal(commonPreferenceVector, pv1) || BitsUtil.equal(commonPreferenceVector, pv2)) { double d = weightedDistance(v1, v2, commonPreferenceVector); if(d > 2 * epsilon) { subspaceDim++; } } return subspaceDim; }
if(BitsUtil.cardinality(scratch.items) != 2) { break prefix; // No prefix match; since sorted, no more can follow!
/** * Convert a bitset into integer row ids. * * @param rows * @return integer row ids */ protected ArrayDBIDs rowsBitsetToIDs(long[] rows) { ArrayModifiableDBIDs rowIDs = DBIDUtil.newArray(BitsUtil.cardinality(rows)); DBIDArrayIter iter = this.rowIDs.iter(); outer: for (int rlpos = 0; rlpos < rows.length; ++rlpos) { long rlong = rows[rlpos]; // Fast skip blocks of 64 masked values. if (rlong == 0L) { iter.advance(Long.SIZE); continue; } for (int i = 0; i < Long.SIZE; ++i, rlong >>>= 1, iter.advance()) { if (!iter.valid()) { break outer; } if ((rlong & 1L) == 1L) { rowIDs.add(iter); } } } return rowIDs; }
for(long[] pv : clustersMap.keySet()) { if(BitsUtil.cardinality(pv) == 0) { List<ArrayModifiableDBIDs> parallelClusters = clustersMap.get(pv); for(ArrayModifiableDBIDs c : parallelClusters) {
if(D == null || BitsUtil.cardinality(nD) > BitsUtil.cardinality(D)) { D = nD; dV.set(iter); if(BitsUtil.cardinality(D) >= d_zero) { if(iprogress != null) { iprogress.setProcessed(iprogress.getTotal(), LOG); if(D == null || BitsUtil.cardinality(D) == 0) { return null;
maxdim = Math.max(maxdim, BitsUtil.cardinality(cluster.getModel().getDimensions())); for(Cluster<? extends SubspaceModel> cluster : clustering.getAllClusters()) { double relsize = cluster.size() / (double) maxsize; double reldim = BitsUtil.cardinality(cluster.getModel().getDimensions()) / (double) maxdim;
for(int d = 0; d < dim; d++) { long[] cand = closer[d]; int card = BitsUtil.cardinality(cand); card = Math.max(card, num - card); if(card == num) {
int card = BitsUtil.cardinality(selectedAttributes); if(factory instanceof SparseNumberVector.Factory) { final SparseNumberVector.Factory<?> sfactory = (SparseNumberVector.Factory<?>) factory;
if(BitsUtil.cardinality(nD) > 0) { LOG.finer("Testing a cluster candidate, |C| = " + nC.size() + ", |D| = " + BitsUtil.cardinality(nD)); double nQuality = computeClusterQuality(nC.size(), BitsUtil.cardinality(nD)); if(nQuality > quality) { if(LOG.isDebuggingFiner()) {
setupCanvas(); final StyleLibrary style = context.getStyleLibrary(); int projdim = BitsUtil.cardinality(proj.getVisibleDimensions2D()); ColorLibrary colors = style.getColorSet(StyleLibrary.PLOT);
@Override public void fullRedraw() { setupCanvas(); final StyleLibrary style = context.getStyleLibrary(); int projdim = BitsUtil.cardinality(proj.getVisibleDimensions2D()); ColorLibrary colors = style.getColorSet(StyleLibrary.PLOT); if(tree != null) { E root = tree.getRootEntry(); for(int i = 0; i < tree.getHeight(); i++) { CSSClass cls = new CSSClass(this, INDEX + i); // Relative depth of this level. 1.0 = toplevel final double relDepth = 1. - (((double) i) / tree.getHeight()); if(settings.fill) { cls.setStatement(SVGConstants.CSS_STROKE_PROPERTY, colors.getColor(i)); cls.setStatement(SVGConstants.CSS_STROKE_WIDTH_PROPERTY, relDepth * style.getLineWidth(StyleLibrary.PLOT)); cls.setStatement(SVGConstants.CSS_FILL_PROPERTY, colors.getColor(i)); cls.setStatement(SVGConstants.CSS_FILL_OPACITY_PROPERTY, 0.1 / (projdim - 1)); } else { cls.setStatement(SVGConstants.CSS_STROKE_PROPERTY, colors.getColor(i)); cls.setStatement(SVGConstants.CSS_STROKE_WIDTH_PROPERTY, relDepth * style.getLineWidth(StyleLibrary.PLOT)); cls.setStatement(SVGConstants.CSS_FILL_PROPERTY, SVGConstants.CSS_NONE_VALUE); } cls.setStatement(SVGConstants.CSS_STROKE_LINECAP_PROPERTY, SVGConstants.CSS_ROUND_VALUE); cls.setStatement(SVGConstants.CSS_STROKE_LINEJOIN_PROPERTY, SVGConstants.CSS_ROUND_VALUE); svgp.addCSSClassOrLogError(cls); } visualizeRTreeEntry(svgp, layer, proj, tree, root, 0); } }