@Override public void learnBasis(double[][] data) { inner.learnBasis(data); this.basis = inner.getBasis(); this.eigenvalues = inner.getEigenValues(); this.mean = inner.getMean(); }
/** * Learn the principal components of the given data matrix. Each row * corresponds to an observation with the number of dimensions equal to the * number of columns. * * @param data * the data */ public void learnBasis(Matrix data) { final Matrix norm = this.buildNormalisedDataMatrix(data); learnBasisNorm(norm); }
/** * Select a subset of the principal components such that all remaining * components have a cumulative energy less than the given value. * * Calling this method throws away any extra basis vectors and eigenvalues. * * @param threshold * threshold on the cumulative energy. */ public void selectSubsetEnergyThreshold(double threshold) { final double[] energy = getCumulativeEnergies(); for (int i = 1; i < energy.length; i++) { if (energy[i] < threshold) { selectSubset(i - 1); return; } } }
/** * Set the number of components of the PDM * * @param n * number of components */ public void setNumComponents(int n) { pc.selectSubset(n); numComponents = this.pc.getEigenValues().length; }
@Override public void train(List<? extends IndependentPair<?, FImage>> data) { width = data.get(0).secondObject().width; height = data.get(0).secondObject().height; final Map<Object, List<double[]>> mapData = new HashMap<Object, List<double[]>>(); final List<double[]> listData = new ArrayList<double[]>(); for (final IndependentPair<?, FImage> item : data) { List<double[]> fvs = mapData.get(item.firstObject()); if (fvs == null) mapData.put(item.firstObject(), fvs = new ArrayList<double[]>()); final double[] fv = FImage2DoubleFV.INSTANCE.extractFeature(item.getSecondObject()).values; fvs.add(fv); listData.add(fv); } final PrincipalComponentAnalysis pca = new ThinSvdPrincipalComponentAnalysis(numComponents); pca.learnBasis(listData); final List<double[][]> ldaData = new ArrayList<double[][]>(mapData.size()); for (final Entry<?, List<double[]>> e : mapData.entrySet()) { final List<double[]> vecs = e.getValue(); final double[][] classData = new double[vecs.size()][]; for (int i = 0; i < classData.length; i++) { classData[i] = pca.project(vecs.get(i)); } ldaData.add(classData); } final LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(numComponents); lda.learnBasis(ldaData); basis = pca.getBasis().times(lda.getBasis()); mean = pca.getMean(); }
final Matrix xbar = new Matrix(new double[][] { pc.getMean() }); double[] newModel = (y.minus(xbar)).times(pc.getBasis()).getArray()[0]; newModel = constraint.apply(newModel, pc.getEigenValues());
/** * Construct a {@link PointDistributionModel} from the given data and * {@link Constraint}. * * @param constraint * @param data */ public PointDistributionModel(Constraint constraint, List<PointList> data) { this.constraint = constraint; // align mean = GeneralisedProcrustesAnalysis.alignPoints(data, 5, 10); // build data matrix final Matrix m = buildDataMatrix(data); // perform pca this.pc = new SvdPrincipalComponentAnalysis(); pc.learnBasis(m); numComponents = this.pc.getEigenValues().length; }
@Override protected void learnBasisNorm(Matrix norm) { inner.learnBasis(norm); }
private static void projectPCA( MemoryLocalFeatureList<FloatDSIFTKeypoint> loadDSIFT, PrincipalComponentAnalysis pca) { for (final FloatDSIFTKeypoint kp : loadDSIFT) { kp.descriptor = ArrayUtils.convertToFloat(pca.project(ArrayUtils.convertToDouble(kp.descriptor))); final int nf = kp.descriptor.length; kp.descriptor = Arrays.copyOf(kp.descriptor, nf + 2); kp.descriptor[nf] = (kp.x / 125f) - 0.5f; kp.descriptor[nf + 1] = (kp.y / 160f) - 0.5f; } loadDSIFT.resetVecLength(); }
/** * Generate a plausible new shape from the scaling vector. The scaling * vector is constrained by the underlying {@link Constraint} before being * used to generate the model. * * @param scaling * scaling vector. * @return a new shape */ public PointList generateNewShape(double[] scaling) { final PointList newShape = new PointList(); final double[] pts = pc.generate(constraint.apply(scaling, pc.getEigenValues())); for (int i = 0; i < pts.length; i += 2) { final float x = (float) pts[i]; final float y = (float) pts[i + 1]; newShape.points.add(new Point2dImpl(x, y)); } return newShape; }
/** * Get the standard deviations (sqrt of eigenvalues) of the principal * components. * * @return vector of standard deviations */ public double[] getStandardDeviations() { return getStandardDeviations(eigenvalues.length); }
@Override public void train(List<? extends IndependentPair<?, FImage>> data) { width = data.get(0).secondObject().width; height = data.get(0).secondObject().height; final Map<Object, List<double[]>> mapData = new HashMap<Object, List<double[]>>(); final List<double[]> listData = new ArrayList<double[]>(); for (final IndependentPair<?, FImage> item : data) { List<double[]> fvs = mapData.get(item.firstObject()); if (fvs == null) mapData.put(item.firstObject(), fvs = new ArrayList<double[]>()); final double[] fv = FImage2DoubleFV.INSTANCE.extractFeature(item.getSecondObject()).values; fvs.add(fv); listData.add(fv); } final PrincipalComponentAnalysis pca = new ThinSvdPrincipalComponentAnalysis(numComponents); pca.learnBasis(listData); final List<double[][]> ldaData = new ArrayList<double[][]>(mapData.size()); for (final Entry<?, List<double[]>> e : mapData.entrySet()) { final List<double[]> vecs = e.getValue(); final double[][] classData = new double[vecs.size()][]; for (int i = 0; i < classData.length; i++) { classData[i] = pca.project(vecs.get(i)); } ldaData.add(classData); } final LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(numComponents); lda.learnBasis(ldaData); basis = pca.getBasis().times(lda.getBasis()); mean = pca.getMean(); }
/** * Learn the principal components of the given data list. Each item * corresponds to an observation with the number of dimensions equal to the * length of the array. * * @param data * the data */ public void learnBasis(List<double[]> data) { learnBasis(data.toArray(new double[data.size()][])); }
/** * Set the number of components of the PDM using a {@link ComponentSelector} * . * * @param selector * the {@link ComponentSelector} to apply. */ public void setNumComponents(ComponentSelector selector) { pc.selectSubset(selector); numComponents = this.pc.getEigenValues().length; }
private static void projectPCA( MemoryLocalFeatureList<FloatDSIFTKeypoint> loadDSIFT, PrincipalComponentAnalysis pca) { for (final FloatDSIFTKeypoint kp : loadDSIFT) { kp.descriptor = ArrayUtils.convertToFloat(pca.project(ArrayUtils.convertToDouble(kp.descriptor))); final int nf = kp.descriptor.length; kp.descriptor = Arrays.copyOf(kp.descriptor, nf + 2); kp.descriptor[nf] = (kp.x / 125f) - 0.5f; kp.descriptor[nf + 1] = (kp.y / 160f) - 0.5f; } loadDSIFT.resetVecLength(); }
/** * Compute the standard deviations of the shape components, multiplied by * the given value. * * @param multiplier * the multiplier * @return the multiplied standard deviations */ public double[] getStandardDeviations(double multiplier) { final double[] rngs = pc.getStandardDeviations(); for (int i = 0; i < rngs.length; i++) { rngs[i] = rngs[i] * multiplier; } return rngs; }
/** * Learn the principal components of the given data array. Each row * corresponds to an observation with the number of dimensions equal to the * number of columns. * * @param data * the data */ public void learnBasis(double[][] data) { learnBasis(new Matrix(data)); }
descriptor = ArrayUtils.convertToFloat(pca.project(ArrayUtils.convertToDouble(descriptor)));
/** * Select a subset of the principal components such that all remaining * components have a certain percentage cumulative energy of the total. The * percentage is calculated relative to the total energy of the eigenvalues. * Bear in mind that if not all the eigenvalues were calculated, or if some * have previously been removed through {@link #selectSubset(int)}, * {@link #selectSubsetEnergyThreshold(double)} or * {@link #selectSubsetPercentageEnergy(double)}, then the percentage * calculation only factors in the remaining eigenvalues that are available * to it. * * Calling this method throws away any extra basis vectors and eigenvalues. * * @param percentage * percentage of the total cumulative energy to retain [0..1]. */ public void selectSubsetPercentageEnergy(double percentage) { final double[] energy = getCumulativeEnergies(); final double total = energy[energy.length - 1]; for (int i = 1; i < energy.length; i++) { if (energy[i] / total > percentage) { selectSubset(i - 1); return; } } }
descriptor = ArrayUtils.convertToFloat(pca.project(ArrayUtils.convertToDouble(descriptor)));