public static void testAggregation() { KernelEstimator ke = new KernelEstimator(0.01); KernelEstimator one = new KernelEstimator(0.01); KernelEstimator two = new KernelEstimator(0.01); java.util.Random r = new java.util.Random(1); for (int i = 0; i < 100; i++) { double z = r.nextDouble(); ke.addValue(z, 1); if (i < 50) { one.addValue(z, 1); } else { two.addValue(z, 1); } } try { System.out.println("\n\nFull\n"); System.out.println(ke.toString()); System.out.println("Prob (0): " + ke.getProbability(0)); System.out.println("\nOne\n" + one.toString()); System.out.println("Prob (0): " + one.getProbability(0)); System.out.println("\nTwo\n" + two.toString()); System.out.println("Prob (0): " + two.getProbability(0)); one = one.aggregate(two); System.out.println("Aggregated\n"); System.out.println(one.toString()); System.out.println("Prob (0): " + one.getProbability(0)); } catch (Exception ex) { ex.printStackTrace(); } }
containsKernel = true; KernelEstimator ke = (KernelEstimator) m_Distribution[j]; int numK = ke.getNumKernels(); String temps = "K" + numK + ": mean (weight)"; if (maxAttWidth < temps.length()) { if (ke.getNumKernels() > 0) { double[] means = ke.getMeans(); double[] weights = ke.getWeights(); for (int k = 0; k < ke.getNumKernels(); k++) { String m = Utils.doubleToString(means[k], maxWidth, 4).trim(); m += " (" for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String nk = "" + ke.getNumKernels(); temp.append(pad(nk, " ", maxWidth + 1 - nk.length(), true)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String stdD = Utils.doubleToString(ke.getStdDev(), maxWidth, 4) .trim(); temp.append(pad(stdD, " ", maxWidth + 1 - stdD.length(), true)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String prec = Utils.doubleToString(ke.getPrecision(), maxWidth, 4) .trim(); temp.append(pad(prec, " ", maxWidth + 1 - prec.length(), true));
return; KernelEstimator newEst = new KernelEstimator(0.01); for (int i = 0; i < argv.length - 3; i += 2) { newEst.addValue(Double.valueOf(argv[i]).doubleValue(), Double.valueOf(argv[i + 1]).doubleValue()); for (double current = start; current < finish; current += (finish - start) / 50) { System.out.println("Data: " + current + " " + newEst.getProbability(current)); KernelEstimator.testAggregation(); } catch (Exception e) { System.out.println(e.getMessage());
/** * Constructor * * @param numCondSymbols the number of conditioning symbols * @param precision the precision to which numeric values are given. For * example, if the precision is stated to be 0.1, the values in the * interval (0.25,0.35] are all treated as 0.3. */ public KDConditionalEstimator(int numCondSymbols, double precision) { m_Estimators = new KernelEstimator [numCondSymbols]; for(int i = 0; i < numCondSymbols; i++) { m_Estimators[i] = new KernelEstimator(precision); } }
return; data = round(data); int insertIndex = findNearestValue(data); if ((m_NumValues <= insertIndex) || (m_Values[insertIndex] != data)) { if (m_NumValues < m_Values.length) {
@Override public KernelEstimator aggregate(KernelEstimator toAggregate) throws Exception { for (int i = 0; i < toAggregate.m_NumValues; i++) { addValue(toAggregate.m_Values[i], toAggregate.m_Weights[i]); } return this; }
/** * Get a probability estimator for a value * * @param given the new value that data is conditional upon * @return the estimator for the supplied value given the condition */ public Estimator getEstimator(double given) { Estimator result = new DiscreteEstimator(m_Estimators.length,false); for(int i = 0; i < m_Estimators.length; i++) { //System.out.println("Val " + i // + " Weight:" + m_Weights.getProbability(i) // +" EstProb(" + given + ")=" // + m_Estimators[i].getProbability(given)); result.addValue(i, m_Weights.getProbability(i) * m_Estimators[i].getProbability(given)); } return result; }
int start = findNearestValue(data); for (int i = start; i < m_NumValues; i++) { delta = m_Values[i] - data;
containsKernel = true; KernelEstimator ke = (KernelEstimator) m_Distribution[j]; int numK = ke.getNumKernels(); String temps = "K" + numK + ": mean (weight)"; if (maxAttWidth < temps.length()) { if (ke.getNumKernels() > 0) { double[] means = ke.getMeans(); double[] weights = ke.getWeights(); for (int k = 0; k < ke.getNumKernels(); k++) { String m = Utils.doubleToString(means[k], maxWidth, 4).trim(); m += " (" for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String nk = "" + ke.getNumKernels(); temp.append(pad(nk, " ", maxWidth + 1 - nk.length(), true)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String stdD = Utils.doubleToString(ke.getStdDev(), maxWidth, 4) .trim(); temp.append(pad(stdD, " ", maxWidth + 1 - stdD.length(), true)); for (int k = 0; k < m_Instances.numClasses(); k++) { KernelEstimator ke = (KernelEstimator) m_Distributions[counter][k]; String prec = Utils.doubleToString(ke.getPrecision(), maxWidth, 4) .trim(); temp.append(pad(prec, " ", maxWidth + 1 - prec.length(), true));
return; KernelEstimator newEst = new KernelEstimator(0.01); for (int i = 0; i < argv.length - 3; i += 2) { newEst.addValue(Double.valueOf(argv[i]).doubleValue(), Double.valueOf(argv[i + 1]).doubleValue()); for (double current = start; current < finish; current += (finish - start) / 50) { System.out.println("Data: " + current + " " + newEst.getProbability(current)); KernelEstimator.testAggregation(); } catch (Exception e) { System.out.println(e.getMessage());
/** * Constructor * * @param numCondSymbols the number of conditioning symbols * @param precision the precision to which numeric values are given. For * example, if the precision is stated to be 0.1, the values in the * interval (0.25,0.35] are all treated as 0.3. */ public KDConditionalEstimator(int numCondSymbols, double precision) { m_Estimators = new KernelEstimator [numCondSymbols]; for(int i = 0; i < numCondSymbols; i++) { m_Estimators[i] = new KernelEstimator(precision); } }
return; data = round(data); int insertIndex = findNearestValue(data); if ((m_NumValues <= insertIndex) || (m_Values[insertIndex] != data)) { if (m_NumValues < m_Values.length) {
@Override public KernelEstimator aggregate(KernelEstimator toAggregate) throws Exception { for (int i = 0; i < toAggregate.m_NumValues; i++) { addValue(toAggregate.m_Values[i], toAggregate.m_Weights[i]); } return this; }
/** * Get a probability estimator for a value * * @param given the new value that data is conditional upon * @return the estimator for the supplied value given the condition */ public Estimator getEstimator(double given) { Estimator result = new DiscreteEstimator(m_Estimators.length,false); for(int i = 0; i < m_Estimators.length; i++) { //System.out.println("Val " + i // + " Weight:" + m_Weights.getProbability(i) // +" EstProb(" + given + ")=" // + m_Estimators[i].getProbability(given)); result.addValue(i, m_Weights.getProbability(i) * m_Estimators[i].getProbability(given)); } return result; }
int start = findNearestValue(data); for (int i = start; i < m_NumValues; i++) { delta = m_Values[i] - data;
public static void testAggregation() { KernelEstimator ke = new KernelEstimator(0.01); KernelEstimator one = new KernelEstimator(0.01); KernelEstimator two = new KernelEstimator(0.01); java.util.Random r = new java.util.Random(1); for (int i = 0; i < 100; i++) { double z = r.nextDouble(); ke.addValue(z, 1); if (i < 50) { one.addValue(z, 1); } else { two.addValue(z, 1); } } try { System.out.println("\n\nFull\n"); System.out.println(ke.toString()); System.out.println("Prob (0): " + ke.getProbability(0)); System.out.println("\nOne\n" + one.toString()); System.out.println("Prob (0): " + one.getProbability(0)); System.out.println("\nTwo\n" + two.toString()); System.out.println("Prob (0): " + two.getProbability(0)); one = one.aggregate(two); System.out.println("Aggregated\n"); System.out.println(one.toString()); System.out.println("Prob (0): " + one.getProbability(0)); } catch (Exception ex) { ex.printStackTrace(); } }
/** * Constructor * * @param numSymbols the number of symbols * @param precision the precision to which numeric values are given. For * example, if the precision is stated to be 0.1, the values in the * interval (0.25,0.35] are all treated as 0.3. */ public DKConditionalEstimator(int numSymbols, double precision) { m_Estimators = new KernelEstimator [numSymbols]; for(int i = 0; i < numSymbols; i++) { m_Estimators[i] = new KernelEstimator(precision); } m_Weights = new DiscreteEstimator(numSymbols, true); }
/** * Add a new data value to the current estimator. * * @param data the new data value * @param given the new value that data is conditional upon * @param weight the weight assigned to the data value */ public void addValue(double data, double given, double weight) { m_Estimators[(int)given].addValue(data, weight); }
/** * Constructor * * @param numSymbols the number of symbols * @param precision the precision to which numeric values are given. For * example, if the precision is stated to be 0.1, the values in the * interval (0.25,0.35] are all treated as 0.3. */ public DKConditionalEstimator(int numSymbols, double precision) { m_Estimators = new KernelEstimator [numSymbols]; for(int i = 0; i < numSymbols; i++) { m_Estimators[i] = new KernelEstimator(precision); } m_Weights = new DiscreteEstimator(numSymbols, true); }
/** * Add a new data value to the current estimator. * * @param data the new data value * @param given the new value that data is conditional upon * @param weight the weight assigned to the data value */ public void addValue(double data, double given, double weight) { m_Estimators[(int)given].addValue(data, weight); }