public ElasticBandPrior(double alphaByLambda) { this.alphaByLambda = alphaByLambda; l1 = new L1(); l2 = new L2(1); }
public ElasticBandPrior(double alphaByLambda) { this.alphaByLambda = alphaByLambda; l1 = new L1(); l2 = new L2(1); }
public ElasticBandPrior(double alphaByLambda) { this.alphaByLambda = alphaByLambda; l1 = new L1(); l2 = new L2(1); }
@Override public void readFields(DataInput in) throws IOException { alphaByLambda = in.readDouble(); l1 = new L1(); l1.readFields(in); l2 = new L2(); l2.readFields(in); } }
@Override public void readFields(DataInput in) throws IOException { alphaByLambda = in.readDouble(); l1 = new L1(); l1.readFields(in); l2 = new L2(); l2.readFields(in); } }
@Override public void readFields(DataInput in) throws IOException { alphaByLambda = in.readDouble(); l1 = new L1(); l1.readFields(in); l2 = new L2(); l2.readFields(in); } }
throw new BadClassifierSpecException("Must have more than one target category. Remember that categories is a space separated list"); model = new OnlineLogisticRegression(categories.size(), Integer.parseInt(options.get("features")), new L1()); options.remove("categories"); options.remove("features");
learningAlgo = new OnlineLogisticRegression(2, 3, new L1()); learningAlgo.lambda(0.1); learningAlgo.learningRate(10);
private PriorFunction getPrior() { switch(prior) { case L1: return new L1(); case L2: return prior.custom ? new L2(prior.param) : new L2(); case elastic: return prior.custom ? new ElasticBandPrior(prior.param) : new ElasticBandPrior(); case uniform: return new UniformPrior(); case t: if (!prior.custom) throw new IllegalArgumentException("Must specify df with 'priorParam' in config to use T prior function."); return new TPrior(prior.param); case unknown: return new L2(1); default: return new L2(1); } }
OnlineLogisticRegression lr = new OnlineLogisticRegression(2, 8, new L1()) .lambda(1 * 1.0e-3) .stepOffset(11)
@Test public void testTrain() throws Exception { Vector target = readStandardData(); // lambda here needs to be relatively small to avoid swamping the actual signal, but can be // larger than usual because the data are dense. The learning rate doesn't matter too much // for this example, but should generally be < 1 // --passes 1 --rate 50 --lambda 0.001 --input sgd-y.csv --features 21 --output model --noBias // --target y --categories 2 --predictors V2 V3 V4 V5 V6 V7 --types n OnlineLogisticRegression lr = new OnlineLogisticRegression(2, 8, new L1()) .lambda(1 * 1.0e-3) .learningRate(50); train(getInput(), target, lr); test(getInput(), target, lr, 0.05, 0.3); }
/** * The CrossFoldLearner is probably the best learner to use for new applications. * * @throws IOException If test resources aren't readable. */ @Test public void crossValidation() throws IOException { Vector target = readStandardData(); CrossFoldLearner lr = new CrossFoldLearner(5, 2, 8, new L1()) .lambda(1 * 1.0e-3) .learningRate(50); train(getInput(), target, lr); System.out.printf("%.2f %.5f\n", lr.auc(), lr.logLikelihood()); test(getInput(), target, lr, 0.05, 0.3); }
@Test @ThreadLeakLingering(linger = 1000) public void constantStep() { AdaptiveLogisticRegression lr = new AdaptiveLogisticRegression(2, 1000, new L1()); lr.setInterval(5000); assertEquals(20000, lr.nextStep(15000)); assertEquals(20000, lr.nextStep(15001)); assertEquals(20000, lr.nextStep(16500)); assertEquals(20000, lr.nextStep(19999)); lr.close(); }
@Test @ThreadLeakLingering(linger = 1000) public void growingStep() { AdaptiveLogisticRegression lr = new AdaptiveLogisticRegression(2, 1000, new L1()); lr.setInterval(2000, 10000); // start with minimum step size for (int i = 2000; i < 20000; i+=2000) { assertEquals(i + 2000, lr.nextStep(i)); } // then level up a bit for (int i = 20000; i < 50000; i += 5000) { assertEquals(i + 5000, lr.nextStep(i)); } // and more, but we top out with this step size for (int i = 50000; i < 500000; i += 10000) { assertEquals(i + 10000, lr.nextStep(i)); } lr.close(); } }
@Test public void onlineLogisticRegressionRoundTrip() throws IOException { OnlineLogisticRegression olr = new OnlineLogisticRegression(2, 5, new L1()); train(olr, 100); OnlineLogisticRegression olr3 = roundTrip(olr, OnlineLogisticRegression.class); assertEquals(0, olr.getBeta().minus(olr3.getBeta()).aggregate(Functions.MAX, Functions.IDENTITY), 1.0e-6); train(olr, 100); train(olr3, 100); assertEquals(0, olr.getBeta().minus(olr3.getBeta()).aggregate(Functions.MAX, Functions.IDENTITY), 1.0e-6); olr.close(); olr3.close(); }
@Test public void crossValidatedAuc() throws IOException { RandomUtils.useTestSeed(); Random gen = RandomUtils.getRandom(); Matrix data = readCsv("cancer.csv"); CrossFoldLearner lr = new CrossFoldLearner(5, 2, 10, new L1()) .stepOffset(10) .decayExponent(0.7) .lambda(1 * 1.0e-3) .learningRate(5); int k = 0; int[] ordering = permute(gen, data.numRows()); for (int epoch = 0; epoch < 100; epoch++) { for (int row : ordering) { lr.train(row, (int) data.get(row, 9), data.viewRow(row)); System.out.printf("%d,%d,%.3f\n", epoch, k++, lr.auc()); } assertEquals(1, lr.auc(), 0.2); } assertEquals(1, lr.auc(), 0.1); }
@ThreadLeakLingering(linger = 1000) @Test public void adaptiveLogisticRegressionRoundTrip() throws IOException { AdaptiveLogisticRegression learner = new AdaptiveLogisticRegression(2, 5, new L1()); learner.setInterval(200); train(learner, 400); AdaptiveLogisticRegression olr3 = roundTrip(learner, AdaptiveLogisticRegression.class); double auc1 = learner.auc(); assertTrue(auc1 > 0.85); assertEquals(auc1, learner.auc(), 1.0e-6); assertEquals(auc1, olr3.auc(), 1.0e-6); train(learner, 1000); train(learner, 1000); train(olr3, 1000); assertEquals(learner.auc(), learner.auc(), 0.005); assertEquals(learner.auc(), olr3.auc(), 0.005); double auc2 = learner.auc(); assertTrue(String.format("%.3f > %.3f", auc2, auc1), auc2 > auc1); learner.close(); olr3.close(); }
@Test public void crossFoldLearnerRoundTrip() throws IOException { CrossFoldLearner learner = new CrossFoldLearner(5, 2, 5, new L1()); train(learner, 100); CrossFoldLearner olr3 = roundTrip(learner, CrossFoldLearner.class); double auc1 = learner.auc(); assertTrue(auc1 > 0.85); assertEquals(auc1, learner.auc(), 1.0e-6); assertEquals(auc1, olr3.auc(), 1.0e-6); train(learner, 100); train(learner, 100); train(olr3, 100); assertEquals(learner.auc(), learner.auc(), 0.02); assertEquals(learner.auc(), olr3.auc(), 0.02); double auc2 = learner.auc(); assertTrue(auc2 > auc1); learner.close(); olr3.close(); }
AdaptiveLogisticRegression.Wrapper w = new AdaptiveLogisticRegression.Wrapper(2, 200, new L1()); for (int i = 0; i < 3000; i++) { AdaptiveLogisticRegression.TrainingExample r = getExample(i, gen, beta);