learningAlgorithm.close(); dissect(newsGroups, learningAlgorithm, files); System.out.println("exiting main");
@ThreadLeakLingering(linger = 1000) @Test public void adaptiveLogisticRegressionRoundTrip() throws IOException { AdaptiveLogisticRegression learner = new AdaptiveLogisticRegression(2, 5, new L1()); learner.setInterval(200); train(learner, 400); AdaptiveLogisticRegression olr3 = roundTrip(learner, AdaptiveLogisticRegression.class); double auc1 = learner.auc(); assertTrue(auc1 > 0.85); assertEquals(auc1, learner.auc(), 1.0e-6); assertEquals(auc1, olr3.auc(), 1.0e-6); train(learner, 1000); train(learner, 1000); train(olr3, 1000); assertEquals(learner.auc(), learner.auc(), 0.005); assertEquals(learner.auc(), olr3.auc(), 0.005); double auc2 = learner.auc(); assertTrue(String.format("%.3f > %.3f", auc2, auc1), auc2 > auc1); learner.close(); olr3.close(); }
@Test @ThreadLeakLingering(linger = 1000) public void constantStep() { AdaptiveLogisticRegression lr = new AdaptiveLogisticRegression(2, 1000, new L1()); lr.setInterval(5000); assertEquals(20000, lr.nextStep(15000)); assertEquals(20000, lr.nextStep(15001)); assertEquals(20000, lr.nextStep(16500)); assertEquals(20000, lr.nextStep(19999)); lr.close(); }
@Test @ThreadLeakLingering(linger = 1000) public void growingStep() { AdaptiveLogisticRegression lr = new AdaptiveLogisticRegression(2, 1000, new L1()); lr.setInterval(2000, 10000); // start with minimum step size for (int i = 2000; i < 20000; i+=2000) { assertEquals(i + 2000, lr.nextStep(i)); } // then level up a bit for (int i = 20000; i < 50000; i += 5000) { assertEquals(i + 5000, lr.nextStep(i)); } // and more, but we top out with this step size for (int i = 50000; i < 500000; i += 10000) { assertEquals(i + 10000, lr.nextStep(i)); } lr.close(); } }
adaptiveLogisticRegression.close();