public ClassifierTrainer<MCMaxEnt> createTrainer(String... args) { MCMaxEntTrainer trainer = new MCMaxEntTrainer(); if (args != null) { if (args.length % 2 != 0) { throw new IllegalArgumentException("each argument must be supplied with a value: " + getUsageMessage()); } for (int i = 0; i < args.length; i += 2) { String optionName = args[i]; String optionValue = args[i + 1]; if (optionName.equals("--useHyperbolicPrior")) trainer.setUseHyperbolicPrior(Boolean.parseBoolean(optionValue)); else if (optionName.equals("--gaussianPriorVariance")) trainer.setGaussianPriorVariance(Double.parseDouble(optionValue)); else if (optionName.equals("--hyperbolicPriorSlope")) trainer.setHyperbolicPriorSlope(Double.parseDouble(optionValue)); else if (optionName.equals("--hyperbolicPriorSharpness")) trainer.setHyperbolicPriorSharpness(Double.parseDouble(optionValue)); else if (optionName.equals("--numIterations")) trainer.setNumIterations(Integer.parseInt(optionValue)); else throw new IllegalArgumentException(String.format( "the argument %1$s is invalid. ", optionName) + getUsageMessage()); } } return trainer; }
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }
public MCMaxEnt train (InstanceList trainingSet) { logger.fine ("trainingSet.size() = "+trainingSet.size()); mt = new MaximizableTrainer (trainingSet, (MCMaxEnt)initialClassifier); Optimizer maximizer = new LimitedMemoryBFGS(mt); // CPAL - change the tolerance for large vocab experiments ((LimitedMemoryBFGS)maximizer).setTolerance(.00001); // std is .0001; maximizer.optimize (); // XXX given the loop below, this seems wrong. logger.info("MCMaxEnt ngetValueCalls:"+getValueCalls()+"\nMCMaxEnt ngetValueGradientCalls:"+getValueGradientCalls()); // boolean converged; // // for (int i = 0; i < numIterations; i++) { // converged = maximizer.maximize (mt, 1); // if (converged) // break; // else if (evaluator != null) // if (!evaluator.evaluate (mt.getClassifier(), converged, i, mt.getValue(), // trainingSet, validationSet, testSet)) // break; // } // TestMaximizable.testValueAndGradient (mt); progressLogger.info("\n"); // progess messages are on one line; move on. return mt.getClassifier (); }
public ClassifierTrainer<MCMaxEnt> createTrainer(String... args) { MCMaxEntTrainer trainer = new MCMaxEntTrainer(); if (args != null) { if (args.length % 2 != 0) { throw new IllegalArgumentException("each argument must be supplied with a value: " + getUsageMessage()); } for (int i = 0; i < args.length; i += 2) { String optionName = args[i]; String optionValue = args[i + 1]; if (optionName.equals("--useHyperbolicPrior")) trainer.setUseHyperbolicPrior(Boolean.parseBoolean(optionValue)); else if (optionName.equals("--gaussianPriorVariance")) trainer.setGaussianPriorVariance(Double.parseDouble(optionValue)); else if (optionName.equals("--hyperbolicPriorSlope")) trainer.setHyperbolicPriorSlope(Double.parseDouble(optionValue)); else if (optionName.equals("--hyperbolicPriorSharpness")) trainer.setHyperbolicPriorSharpness(Double.parseDouble(optionValue)); else if (optionName.equals("--numIterations")) trainer.setNumIterations(Integer.parseInt(optionValue)); else throw new IllegalArgumentException(String.format( "the argument %1$s is invalid. ", optionName) + getUsageMessage()); } } return trainer; }