@Override public MiningModel encodeModel(Schema schema){ MiningModel miningModel = ForestUtil.encodeBaseForest(this, Segmentation.MultipleModelMethod.AVERAGE, MiningFunction.CLASSIFICATION, schema) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, (CategoricalLabel)schema.getLabel())); return miningModel; }
@Override public TreeModel encodeModel(Schema schema){ TreeModel treeModel = TreeModelUtil.encodeTreeModel(this, MiningFunction.CLASSIFICATION, schema) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, (CategoricalLabel)schema.getLabel())); return TreeModelUtil.transform(this, treeModel); }
.setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel));
.setNormalizationMethod(normalizationMethod) .setMathContext(ModelUtil.simplifyMathContext(mathContext)) .setOutput(hasProbabilityDistribution ? ModelUtil.createProbabilityOutput(mathContext, categoricalLabel) : null);
@Override public NeuralNetwork encodeModel(Schema schema){ String activation = getActivation(); List<? extends HasArray> coefs = getCoefs(); List<? extends HasArray> intercepts = getIntercepts(); NeuralNetwork neuralNetwork = MultilayerPerceptronUtil.encodeNeuralNetwork(MiningFunction.CLASSIFICATION, activation, coefs, intercepts, schema) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, (CategoricalLabel)schema.getLabel())); return neuralNetwork; }
@Override public Model encodeModel(Schema schema){ List<? extends Classifier> estimators = getEstimators(); List<? extends Number> weights = getWeights(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<Model> models = new ArrayList<>(); for(Classifier estimator : estimators){ Model model = estimator.encodeModel(schema); models.add(model); } String voting = getVoting(); Segmentation.MultipleModelMethod multipleModelMethod = parseVoting(voting, (weights != null && weights.size() > 0)); MiningModel miningModel = new MiningModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel)) .setSegmentation(MiningModelUtil.createSegmentation(multipleModelMethod, models, weights)) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel)); return miningModel; }
.setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel));
@Override public MiningModel encodeModel(Schema schema){ List<? extends Classifier> estimators = getEstimators(); List<List<Integer>> estimatorsFeatures = getEstimatorsFeatures(); Segmentation.MultipleModelMethod multipleModelMethod = Segmentation.MultipleModelMethod.AVERAGE; for(Classifier estimator : estimators){ if(!estimator.hasProbabilityDistribution()){ multipleModelMethod = Segmentation.MultipleModelMethod.MAJORITY_VOTE; break; } } MiningModel miningModel = BaggingUtil.encodeBagging(estimators, estimatorsFeatures, multipleModelMethod, MiningFunction.CLASSIFICATION, schema) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, (CategoricalLabel)schema.getLabel())); return miningModel; }
.setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel));
@Override public TreeModel encodeModel(Schema schema){ S4Object binaryTree = getObject(); RGenericVector tree = (RGenericVector)binaryTree.getAttributeValue("tree"); Output output; switch(this.miningFunction){ case REGRESSION: output = new Output(); break; case CLASSIFICATION: CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); output = ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel); break; default: throw new IllegalArgumentException(); } output.addOutputFields(ModelUtil.createEntityIdField(FieldName.create("nodeId"))); TreeModel treeModel = encodeTreeModel(tree, schema) .setOutput(output); return treeModel; }
.setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel));
@Override public Model encodeModel(Schema schema){ RGenericVector bagging = getObject(); RGenericVector trees = (RGenericVector)bagging.getValue("trees"); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<TreeModel> treeModels = encodeTreeModels(trees); MiningModel miningModel = new MiningModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel)) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.MAJORITY_VOTE, treeModels)) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel)); return miningModel; } }
.setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel)); .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel));
@Override public Model encodeModel(Schema schema){ RGenericVector boosting = getObject(); RGenericVector trees = (RGenericVector)boosting.getValue("trees"); RDoubleVector weights = (RDoubleVector)boosting.getValue("weights"); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); List<TreeModel> treeModels = encodeTreeModels(trees); MiningModel miningModel = new MiningModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel)) .setSegmentation(MiningModelUtil.createSegmentation(Segmentation.MultipleModelMethod.WEIGHTED_MAJORITY_VOTE, treeModels, weights.getValues())) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel)); return miningModel; } }
RegressionModel regressionModel = new RegressionModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel), null) .setNormalizationMethod(RegressionModel.NormalizationMethod.SOFTMAX) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel));
.setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel));
@Override public Model encodeModel(Schema schema){ RGenericVector party = getObject(); RGenericVector partyNode = (RGenericVector)party.getValue("node"); RGenericVector predicted = (RGenericVector)DecorationUtil.getValue(party, "predicted"); RVector<?> response = (RVector<?>)predicted.getValue("(response)"); RDoubleVector prob = (RDoubleVector)predicted.getValue("(prob)", true); Node root = encodeNode(new True(), partyNode, response, prob, schema); TreeModel treeModel; if(RExpUtil.isFactor(response)){ CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); treeModel = new TreeModel(MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel), root) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel)); } else { treeModel = new TreeModel(MiningFunction.REGRESSION, ModelUtil.createMiningSchema(schema.getLabel()), root); } return treeModel; }
generalRegressionModel.setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, (CategoricalLabel)label)); break; default:
.setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel));
@Override public NaiveBayesModel encodeModel(Schema schema){ int[] shape = getThetaShape(); int numberOfClasses = shape[0]; int numberOfFeatures = shape[1]; List<? extends Number> theta = getTheta(); List<? extends Number> sigma = getSigma(); CategoricalLabel categoricalLabel = (CategoricalLabel)schema.getLabel(); BayesInputs bayesInputs = new BayesInputs(); for(int i = 0; i < numberOfFeatures; i++){ Feature feature = schema.getFeature(i); List<? extends Number> means = CMatrixUtil.getColumn(theta, numberOfClasses, numberOfFeatures, i); List<? extends Number> variances = CMatrixUtil.getColumn(sigma, numberOfClasses, numberOfFeatures, i); ContinuousFeature continuousFeature = feature.toContinuousFeature(); BayesInput bayesInput = new BayesInput(continuousFeature.getName()) .setTargetValueStats(encodeTargetValueStats(categoricalLabel.getValues(), means, variances)); bayesInputs.addBayesInputs(bayesInput); } List<Integer> classCount = getClassCount(); BayesOutput bayesOutput = new BayesOutput(categoricalLabel.getName(), null) .setTargetValueCounts(encodeTargetValueCounts(categoricalLabel.getValues(), classCount)); NaiveBayesModel naiveBayesModel = new NaiveBayesModel(0d, MiningFunction.CLASSIFICATION, ModelUtil.createMiningSchema(categoricalLabel), bayesInputs, bayesOutput) .setOutput(ModelUtil.createProbabilityOutput(DataType.DOUBLE, categoricalLabel)); return naiveBayesModel; }