@Override public Vector divide(double x) { return delegate.divide(x); }
@Override public Vector divide(double x) { return delegate.divide(x); }
u = u.divide(beta);
@Test public void testDivideDouble() throws Exception { Vector val = test.divide(3); assertEquals("size", 3, val.size()); for (int i = 0; i < test.size(); i++) { assertEquals("get [" + i + ']', values[OFFSET + i] / 3, val.get(i), EPSILON); } }
@Test public void testDivideDouble() { Vector val = test.divide(3); assertEquals("size", test.size(), val.size()); for (int i = 0; i < test.size(); i++) { if (i % 2 == 0) { assertEquals("get [" + i + ']', 0.0, val.get(i), EPSILON); } else { assertEquals("get [" + i + ']', values[i/2] / 3.0, val.get(i), EPSILON); } } }
@Test public void testUpdate() { MultiNormal f = new MultiNormal(20); Vector a = f.sample(); Vector b = f.sample(); Vector c = f.sample(); DenseVector x = new DenseVector(a); Centroid x1 = new Centroid(1, x); x1.update(new Centroid(2, new DenseVector(b))); Centroid x2 = new Centroid(x1); x1.update(c); // check for correct value Vector mean = a.plus(b).plus(c).assign(Functions.div(3)); assertEquals(0, x1.getVector().minus(mean).norm(1), 1.0e-8); assertEquals(3, x1.getWeight(), 0); assertEquals(0, x2.minus(a.plus(b).divide(2)).norm(1), 1.0e-8); assertEquals(2, x2.getWeight(), 0); assertEquals(0, new Centroid(x1.getIndex(), x1, x1.getWeight()).minus(x1).norm(1), 1.0e-8); // and verify shared storage assertEquals(0, x.minus(x1).norm(1), 0); assertEquals(3, x1.getWeight(), 1.0e-8); assertEquals(1, x1.getIndex()); }
@Override public Vector divide(double x) { return delegate.divide(x); }
@Override public Vector divide(double x) { return delegate.divide(x); }
expected = vec1.divide(cube);
private void updateGradient(){ Vector weights = this.mlLogisticRegression.getWeights().getAllWeights(); this.gradient = this.predictedCounts.minus(empiricalCounts).plus(weights.divide(gaussianPriorVariance)); }
/** * Compute the centroid by averaging the pointTotals * * @return the new centroid */ public Vector computeCentroid() { return getS0() == 0 ? getCenter() : getS1().divide(getS0()); }
/** * Compute the centroid by averaging the pointTotals * * @return the new centroid */ public Vector computeCentroid() { return getS0() == 0 ? getCenter() : getS1().divide(getS0()); }
@Override public Vector classify(Vector instance) { Vector result = classifyNoLink(instance); // Convert to probabilities by exponentiation. double max = result.maxValue(); result.assign(Functions.minus(max)).assign(Functions.EXP); result = result.divide(result.norm(1)); return result.viewPart(1, result.size() - 1); }
private Vector penaltyGradient(){ Vector weightsVector = this.logisticRegression.getWeights().getAllWeights(); Vector penalty = new DenseVector(weightsVector.size()); penalty = penalty.plus(weightsVector.divide(priorGaussianVariance)); for (int j:logisticRegression.getWeights().getAllBiasPositions()){ penalty.set(j,0); } return penalty; }
@Override public Vector classify(Vector instance) { Vector result = classifyNoLink(instance); // Convert to probabilities by exponentiation. double max = result.maxValue(); result.assign(Functions.minus(max)).assign(Functions.EXP); result = result.divide(result.norm(1)); return result.viewPart(1, result.size() - 1); }
public static Weights getMean(CBM bmm, int label){ int numClusters = bmm.getNumComponents(); int length = ((LogisticRegression)bmm.getBinaryClassifiers()[0][0]).getWeights().getAllWeights().size(); int numFeatures = ((LogisticRegression)bmm.getBinaryClassifiers()[0][0]).getNumFeatures(); Vector mean = new DenseVector(length); for (int k=0;k<numClusters;k++){ mean = mean.plus(((LogisticRegression)bmm.getBinaryClassifiers()[k][label]).getWeights().getAllWeights()); } mean = mean.divide(numClusters); return new Weights(2,numFeatures,mean); }