MultilayerPerceptronClassifier trainer = new MultilayerPerceptronClassifier()...; MultilayerPerceptronClassificationModel model = trainer.fit(trainingData); TopologyModel topoModel = FeedForwardTopology.multiLayerPerceptron(model.layers(), true).getInstance(model.weights());
public void printModel() { LogisticRegressionModel lrModel = (LogisticRegressionModel) model.stages()[2]; System.out.println("intercept = " + lrModel.intercept()); System.out.println("number of features = " + lrModel.numFeatures()); System.out.println("regularization parameter = " + lrModel.getRegParam()); System.out.println(lrModel.explainParams()); }
@Test public void logisticRegressionTrainingSummary() { LogisticRegression lr = new LogisticRegression(); LogisticRegressionModel model = lr.fit(dataset); LogisticRegressionTrainingSummary summary = model.summary(); Assert.assertEquals(summary.totalIterations(), summary.objectiveHistory().length); } }
@Test public void logisticRegressionDefaultParams() { LogisticRegression lr = new LogisticRegression(); Assert.assertEquals(lr.getLabelCol(), "label"); LogisticRegressionModel model = lr.fit(dataset); model.transform(dataset).createOrReplaceTempView("prediction"); Dataset<Row> predictions = spark.sql("SELECT label, probability, prediction FROM prediction"); predictions.collectAsList(); // Check defaults Assert.assertEquals(0.5, model.getThreshold(), eps); Assert.assertEquals("features", model.getFeaturesCol()); Assert.assertEquals("prediction", model.getPredictionCol()); Assert.assertEquals("probability", model.getProbabilityCol()); }
@Test public void oneVsRestDefaultParams() { OneVsRest ova = new OneVsRest(); ova.setClassifier(new LogisticRegression()); Assert.assertEquals(ova.getLabelCol(), "label"); Assert.assertEquals(ova.getPredictionCol(), "prediction"); OneVsRestModel ovaModel = ova.fit(dataset); Dataset<Row> predictions = ovaModel.transform(dataset).select("label", "prediction"); predictions.collectAsList(); Assert.assertEquals(ovaModel.getLabelCol(), "label"); Assert.assertEquals(ovaModel.getPredictionCol(), "prediction"); } }
@Test public void naiveBayesDefaultParams() { NaiveBayes nb = new NaiveBayes(); assertEquals("label", nb.getLabelCol()); assertEquals("features", nb.getFeaturesCol()); assertEquals("prediction", nb.getPredictionCol()); assertEquals(1.0, nb.getSmoothing(), 1E-5); assertEquals("multinomial", nb.getModelType()); }
@Override public void setUp() throws IOException { super.setUp(); List<LabeledPoint> points = generateLogisticInputAsList(1.0, 1.0, 100, 42); dataset = spark.createDataFrame(jsc.parallelize(points, 2), LabeledPoint.class); }
@Test public void logisticRegressionDefaultParams() { LogisticRegression lr = new LogisticRegression(); Assert.assertEquals(lr.getLabelCol(), "label"); LogisticRegressionModel model = lr.fit(dataset); model.transform(dataset).createOrReplaceTempView("prediction"); Dataset<Row> predictions = spark.sql("SELECT label, probability, prediction FROM prediction"); predictions.collectAsList(); // Check defaults Assert.assertEquals(0.5, model.getThreshold(), eps); Assert.assertEquals("features", model.getFeaturesCol()); Assert.assertEquals("prediction", model.getPredictionCol()); Assert.assertEquals("probability", model.getProbabilityCol()); }
@Test public void oneVsRestDefaultParams() { OneVsRest ova = new OneVsRest(); ova.setClassifier(new LogisticRegression()); Assert.assertEquals(ova.getLabelCol(), "label"); Assert.assertEquals(ova.getPredictionCol(), "prediction"); OneVsRestModel ovaModel = ova.fit(dataset); Dataset<Row> predictions = ovaModel.transform(dataset).select("label", "prediction"); predictions.collectAsList(); Assert.assertEquals(ovaModel.getLabelCol(), "label"); Assert.assertEquals(ovaModel.getPredictionCol(), "prediction"); } }
@Test public void logisticRegressionTrainingSummary() { LogisticRegression lr = new LogisticRegression(); LogisticRegressionModel model = lr.fit(dataset); LogisticRegressionTrainingSummary summary = model.summary(); Assert.assertEquals(summary.totalIterations(), summary.objectiveHistory().length); } }
@Test public void naiveBayesDefaultParams() { NaiveBayes nb = new NaiveBayes(); assertEquals("label", nb.getLabelCol()); assertEquals("features", nb.getFeaturesCol()); assertEquals("prediction", nb.getPredictionCol()); assertEquals(1.0, nb.getSmoothing(), 1E-5); assertEquals("multinomial", nb.getModelType()); }
@Override public void setUp() throws IOException { super.setUp(); List<LabeledPoint> points = generateLogisticInputAsList(1.0, 1.0, 100, 42); dataset = spark.createDataFrame(jsc.parallelize(points, 2), LabeledPoint.class); }
@Test public void logisticRegressionDefaultParams() { LogisticRegression lr = new LogisticRegression(); Assert.assertEquals(lr.getLabelCol(), "label"); LogisticRegressionModel model = lr.fit(dataset); model.transform(dataset).createOrReplaceTempView("prediction"); Dataset<Row> predictions = spark.sql("SELECT label, probability, prediction FROM prediction"); predictions.collectAsList(); // Check defaults Assert.assertEquals(0.5, model.getThreshold(), eps); Assert.assertEquals("features", model.getFeaturesCol()); Assert.assertEquals("prediction", model.getPredictionCol()); Assert.assertEquals("probability", model.getProbabilityCol()); }
@Test public void oneVsRestDefaultParams() { OneVsRest ova = new OneVsRest(); ova.setClassifier(new LogisticRegression()); Assert.assertEquals(ova.getLabelCol(), "label"); Assert.assertEquals(ova.getPredictionCol(), "prediction"); OneVsRestModel ovaModel = ova.fit(dataset); Dataset<Row> predictions = ovaModel.transform(dataset).select("label", "prediction"); predictions.collectAsList(); Assert.assertEquals(ovaModel.getLabelCol(), "label"); Assert.assertEquals(ovaModel.getPredictionCol(), "prediction"); } }
@Test public void logisticRegressionTrainingSummary() { LogisticRegression lr = new LogisticRegression(); LogisticRegressionModel model = lr.fit(dataset); LogisticRegressionTrainingSummary summary = model.summary(); Assert.assertEquals(summary.totalIterations(), summary.objectiveHistory().length); } }
@Test public void naiveBayesDefaultParams() { NaiveBayes nb = new NaiveBayes(); assertEquals("label", nb.getLabelCol()); assertEquals("features", nb.getFeaturesCol()); assertEquals("prediction", nb.getPredictionCol()); assertEquals(1.0, nb.getSmoothing(), 1E-5); assertEquals("multinomial", nb.getModelType()); }
@Override public void setUp() throws IOException { super.setUp(); JavaRDD<LabeledPoint> points = jsc.parallelize(generateLogisticInputAsList(1.0, 1.0, 100, 42), 2); dataset = spark.createDataFrame(points, LabeledPoint.class); }
@Override public void setUp() throws IOException { super.setUp(); JavaRDD<LabeledPoint> points = jsc.parallelize(generateLogisticInputAsList(1.0, 1.0, 100, 42), 2); dataset = spark.createDataFrame(points, LabeledPoint.class); }
@Override public void setUp() throws IOException { super.setUp(); JavaRDD<LabeledPoint> points = jsc.parallelize(generateLogisticInputAsList(1.0, 1.0, 100, 42), 2); dataset = spark.createDataFrame(points, LabeledPoint.class); }
@Override public void setUp() throws IOException { super.setUp(); List<LabeledPoint> points = generateLogisticInputAsList(1.0, 1.0, 100, 42); dataset = spark.createDataFrame(jsc.parallelize(points, 2), LabeledPoint.class); }