@Override public State<T, U> call() { double v = fn.apply(state.getPayload(), state.getMappedParams()); state.setValue(v); return state; } });
/** * Deep copies a state, useful in mutation. */ public State<T, U> copy() { State<T, U> r = new State<T, U>(); r.params = Arrays.copyOf(this.params, this.params.length); r.omni = this.omni; r.step = Arrays.copyOf(this.step, this.step.length); r.maps = Arrays.copyOf(this.maps, this.maps.length); if (this.payload != null) { r.payload = (T) this.payload.copy(); } r.gen = this.gen; return r; }
Abstract Class GameObject { private State previous_state; private State current_state; private State draw_state; void Update(float dt) { previous_state = current_state.copy(); current_state = current_state + dt; } void Draw(float dt) { draw_state = interpolate(previous_state, current_state, dt); //Do opengl commands to draw draw_state } }
public static void freeze(State<Wrapper, CrossFoldLearner> s) { // radically decrease learning rate double[] params = s.getParams(); params[1] -= 10; // and cause evolution to hold (almost) s.setOmni(s.getOmni() / 20); double[] step = s.getStep(); for (int i = 0; i < step.length; i++) { step[i] /= 20; } }
@Test public void testConverges() throws Exception { State<Foo, Double> s0 = new State<Foo, Double>(new double[5], 1); s0.setPayload(new Foo()); EvolutionaryProcess<Foo, Double> ep = new EvolutionaryProcess<Foo, Double>(10, 100, s0); State<Foo, Double> best = null; for (int i = 0; i < 20; i++) { best = ep.parallelDo(new EvolutionaryProcess.Function<Payload<Double>>() { @Override public double apply(Payload<Double> payload, double[] params) { int i = 1; double sum = 0; for (double x : params) { sum += i * (x - i) * (x - i); i++; } return -sum; } }); ep.mutatePopulation(3); System.out.printf("%10.3f %.3f\n", best.getValue(), best.getOmni()); } ep.close(); assertNotNull(best); assertEquals(0.0, best.getValue(), 0.02); }
public int getNumCategories() { return seed.getPayload().getLearner().numCategories(); }
/** * Clones this state with a random change in position. Copies the payload and * lets it know about the change. * * @return A new state. */ public State<T, U> mutate() { double sum = 0; for (double v : step) { sum += v * v; } sum = Math.sqrt(sum); double lambda = 1 + gen.nextGaussian(); State<T, U> r = this.copy(); double magnitude = 0.9 * omni + sum / 10; r.omni = magnitude * -Math.log1p(-gen.nextDouble()); for (int i = 0; i < step.length; i++) { r.step[i] = lambda * step[i] + r.omni * gen.nextGaussian(); r.params[i] += r.step[i]; } if (this.payload != null) { r.payload.update(r.getMappedParams()); } return r; }
/** * * @param numCategories The number of categories (labels) to train on * @param numFeatures The number of features used in creating the vectors (i.e. the cardinality of the vector) * @param prior The {@link org.apache.mahout.classifier.sgd.PriorFunction} to use * @param threadCount The number of threads to use for training * @param poolSize The number of {@link org.apache.mahout.classifier.sgd.CrossFoldLearner} to use. */ public AdaptiveLogisticRegression(int numCategories, int numFeatures, PriorFunction prior, int threadCount, int poolSize) { this.numFeatures = numFeatures; this.threadCount = threadCount; this.poolSize = poolSize; seed = new State<Wrapper, CrossFoldLearner>(new double[2], 10); Wrapper w = new Wrapper(numCategories, numFeatures, prior); seed.setPayload(w); Wrapper.setMappings(seed); seed.setPayload(w); setPoolSize(this.poolSize); }
CrossFoldLearner state = best.getPayload().getLearner(); averageCorrect = state.percentCorrect(); averageLL = state.logLikelihood(); norm = beta.aggregate(Functions.PLUS, Functions.ABS); lambda = learningAlgorithm.getBest().getMappedParams()[0]; mu = learningAlgorithm.getBest().getMappedParams()[1]; } else { maxBeta = 0; if (learningAlgorithm.getBest() != null) { ModelSerializer.writeBinary("/tmp/news-group-" + k + ".model", learningAlgorithm.getBest().getPayload().getLearner().getModels().get(0)); learningAlgorithm.getBest().getPayload().getLearner().getModels().get(0));
ep.readFields(in); best = new State<Wrapper, CrossFoldLearner>(); best.readFields(in); seed = new State<Wrapper, CrossFoldLearner>(); seed.readFields(in);
for (Future<State<T, U>> future : r) { State<T, U> s = future.get(); double value = s.getValue(); if (!Double.isNaN(value) && value >= max) { max = value;
System.out.printf("%10d %10.4f %10.8f %.3f\n", i, adaptiveLogisticRegression.auc(), Math.log10(adaptiveLogisticRegression.getBest().getMappedParams()[0]), adaptiveLogisticRegression.getBest().getMappedParams()[1]);
/** * Returns all the parameters in mapped form. * @return An array of parameters. */ public double[] getMappedParams() { double[] r = Arrays.copyOf(params, params.length); for (int i = 0; i < params.length; i++) { r[i] = get(i); } return r; }
public static void freeze(State<Wrapper, CrossFoldLearner> s) { // radically decrease learning rate double[] params = s.getParams(); params[1] -= 10; // and cause evolution to hold (almost) s.setOmni(s.getOmni() / 20); double[] step = s.getStep(); for (int i = 0; i < step.length; i++) { step[i] /= 20; } }
/** * What is the AUC for the current best member of the population. If no member is best, usually * because we haven't done any training yet, then the result is set to NaN. * * @return The AUC of the best member of the population or NaN if we can't figure that out. */ public double auc() { if (best == null) { return Double.NaN; } else { Wrapper payload = best.getPayload(); return payload.getLearner().auc(); } }
/** * Clones this state with a random change in position. Copies the payload and * lets it know about the change. * * @return A new state. */ public State<T, U> mutate() { double sum = 0; for (double v : step) { sum += v * v; } sum = Math.sqrt(sum); double lambda = 1 + gen.nextGaussian(); State<T, U> r = this.copy(); double magnitude = 0.9 * omni + sum / 10; r.omni = magnitude * -Math.log1p(-gen.nextDouble()); for (int i = 0; i < step.length; i++) { r.step[i] = lambda * step[i] + r.omni * gen.nextGaussian(); r.params[i] += r.step[i]; } if (this.payload != null) { r.payload.update(r.getMappedParams()); } return r; }
/** * * @param numCategories The number of categories (labels) to train on * @param numFeatures The number of features used in creating the vectors (i.e. the cardinality of the vector) * @param prior The {@link org.apache.mahout.classifier.sgd.PriorFunction} to use * @param threadCount The number of threads to use for training * @param poolSize The number of {@link org.apache.mahout.classifier.sgd.CrossFoldLearner} to use. */ public AdaptiveLogisticRegression(int numCategories, int numFeatures, PriorFunction prior, int threadCount, int poolSize) { this.numFeatures = numFeatures; this.threadCount = threadCount; this.poolSize = poolSize; seed = new State<Wrapper, CrossFoldLearner>(new double[2], 10); Wrapper w = new Wrapper(numCategories, numFeatures, prior); seed.setPayload(w); Wrapper.setMappings(seed); seed.setPayload(w); setPoolSize(this.poolSize); }
ep.readFields(in); best = new State<Wrapper, CrossFoldLearner>(); best.readFields(in); seed = new State<Wrapper, CrossFoldLearner>(); seed.readFields(in);
for (Future<State<T, U>> future : r) { State<T, U> s = future.get(); double value = s.getValue(); if (!Double.isNaN(value) && value >= max) { max = value;
/** * Returns all the parameters in mapped form. * @return An array of parameters. */ public double[] getMappedParams() { double[] r = Arrays.copyOf(params, params.length); for (int i = 0; i < params.length; i++) { r[i] = get(i); } return r; }