private static Callable<svm_model> getTrainingFunction(svm_problem problem, svm_parameter param) { return () -> svm.svm_train(problem, param); }
svm_model model = svm.svm_train(prob, params);
private static Callable<svm_model> getTrainingFunction(svm_problem problem, svm_parameter param) { return () -> svm.svm_train(problem, param); }
public void buildClassifier(List<TrainingElement<BxZoneLabel>> trainingElements) { assert trainingElements.size() > 0; if(features == null) { features = (String[])trainingElements.get(0).getObservation().getFeatureNames().toArray(new String[1]); } scaler.setFeatureLimits(trainingElements); problem = buildDatasetForTraining(trainingElements); model = libsvm.svm.svm_train(problem, param); }
private static Classifier trainJava(final Parameters parameters, final Iterable<LabelledVector> trainingSet) throws IOException { // Prepare the svm_parameter object based on supplied parameters final svm_parameter parameter = encodeParameters(parameters); // Encode the training set as an svm_problem object, filling a dictionary meanwhile final Dictionary<String> dictionary = Dictionary.create(); final svm_problem problem = encodeProblem(dictionary, trainingSet); // Perform training final svm_model model = svm.svm_train(problem, parameter); // Compute model hash, by saving and reloading SVM model final File tmpFile = File.createTempFile("svm", ".bin"); tmpFile.deleteOnExit(); svm.svm_save_model(tmpFile.getAbsolutePath(), model); final String modelString = com.google.common.io.Files.toString(tmpFile, Charset.defaultCharset()); final String modelHash = computeHash(dictionary, modelString); final svm_model reloadedModel = svm .svm_load_model(new BufferedReader(new StringReader(modelString))); tmpFile.delete(); // Build and return the SVM object return new LibSvmClassifier(parameters, modelHash, dictionary, reloadedModel); }
public void train(Vector<svm_node[]> vx, Vector<Double> vy, Parameters parameters){ this.parameters = parameters; this.param = parameters.getParam(); load(vx, vy); model = svm.svm_train(prob,param); parameters.setParam( param ); }
public void run(String argv[]) throws Exception { parse_command_line(argv); read_problem(); error_msg = svm.svm_check_parameter(prob, param); if (error_msg != null) { throw new Exception(error_msg); } model = svm.svm_train(prob, param); svm.svm_save_model(model_file_name, model); }
public void run(String argv[]) throws Exception { parse_command_line(argv); read_problem(); error_msg = svm.svm_check_parameter(prob, param); if (error_msg != null) { throw new Exception(error_msg); } model = svm.svm_train(prob, param); svm.svm_save_model(model_file_name, model); }
public void run(File input_file, File model_file, double c, int mem, double[] weight) throws IOException { input_file_name = input_file.getAbsolutePath(); model_file_name = model_file.getAbsolutePath(); //System.out.println("input_file_name: " + input_file_name); //System.out.println("model_file_name: " + model_file_name); //System.out.println("mem: " + mem); set_param(c, mem, weight); read_problem(); error_msg = svm.svm_check_parameter(prob,param); if(error_msg != null) { System.err.print("Error: "+error_msg+"\n"); System.exit(1); } if(cross_validation != 0) { //do_cross_validation(); } else { model = svm.svm_train(prob,param); svm.svm_save_model(model_file_name, model); } }
public void internal_learn() throws Exception { // dumps a file with the vectors for the documents File learningFile = new File(this.vector_location); // make space parse_command_line(); if (cross_validation && nfold < 2) throw new Exception("n-fold cross validation: n must >= 2\n"); read_problem(learningFile); error_msg = svm.svm_check_parameter(prob, param); if (error_msg != null) { System.err.print("Error: " + error_msg + "\n"); throw new Exception(error_msg); } if (cross_validation) { do_cross_validation(); } else { model = svm.svm_train(prob, param); svm.svm_save_model(model_file_name, model); } }
private void run(String argv[]) throws IOException { parse_command_line(argv); read_problem(); error_msg = svm.svm_check_parameter(prob, param); if (error_msg != null) { System.err.print("ERROR: " + error_msg + "\n"); System.exit(1); } if (cross_validation != 0) { do_cross_validation(); } else { model = svm.svm_train(prob, param); svm.svm_save_model(model_file_name, model); } }
private void run(String argv[]) throws IOException { parse_command_line(argv); read_problem(); error_msg = svm.svm_check_parameter(prob, param); if (error_msg != null) { System.err.print("ERROR: " + error_msg + "\n"); System.exit(1); } if (cross_validation != 0) { do_cross_validation(); } else { model = svm.svm_train(prob, param); svm.svm_save_model(model_file_name, model); } }
public svm_model trainModel(Dataset dataset) { List<Observation> observations = dataset.getObservations(); svm_problem learningProblem = new svm_problem(); int dataCount = observations.size(); learningProblem.y = new double[dataCount]; learningProblem.l = dataCount; learningProblem.x = new svm_node[dataCount][]; for (int i = 0; i < dataCount; i++) { List<Double> features = observations.get(i).getFeatures(); learningProblem.x[i] = new svm_node[features.size()]; for (int j = 0; j < features.size(); j++) { svm_node node = new svm_node(); node.index = j + 1; node.value = features.get(j); learningProblem.x[i][j] = node; } learningProblem.y[i] = dataset.getClassCode(observations.get(i)); } svm_parameter param = new svm_parameter(); param.probability = 1; param.gamma = 0.5; param.nu = 0.5; param.C = 1; param.svm_type = svm_parameter.C_SVC; param.kernel_type = svm_parameter.LINEAR; param.cache_size = 20000; param.eps = 0.0001; svm_model model = svm.svm_train(learningProblem, param); return model; }
/** * {@inheritDoc} * @see org.openimaj.ml.training.BatchTrainer#train(java.util.List) */ @Override public void train( final List<? extends Annotated<OBJECT, ANNOTATION>> data ) { // Check the data has 2 classes and update the class map. if( this.checkInputDataOK( data ) ) { // Setup the SVM problem final svm_parameter param = SVMAnnotator.getDefaultSVMParameters(); final svm_problem prob = this.getSVMProblem( data, param, this.extractor ); // Train the SVM this.model = libsvm.svm.svm_train( prob, param ); // Save the model if we're going to do that. if( this.saveModel != null ) try { svm.svm_save_model( this.saveModel.getAbsolutePath(), this.model ); } catch( final IOException e ) { e.printStackTrace(); } } }
@Override public IClassifier train(List<Tuple> trainingData) { if (para == null) { LOG.warn("Parameter is null. Use the default parameter."); this.para = getDefaultPara(); } labelIndexer = new LabelIndexer(trainingData); svm_problem prob = new svm_problem(); int featSize = trainingData.iterator().next().vector.getVector().length; prob.l = trainingData.size(); prob.y = new double[prob.l]; prob.x = new svm_node[prob.l][featSize]; for (int i = 0; i < trainingData.size(); i++) { Tuple tuple = trainingData.get(i); prob.x[i] = new svm_node[featSize]; for (int j = 0; j < tuple.vector.getVector().length; j++) { svm_node node = new svm_node(); node.index = j; node.value = tuple.vector.getVector()[j]; prob.x[i][j] = node; } prob.y[i] = labelIndexer.getIndex(tuple.label); } model = svm.svm_train(prob, para); return this; }
System.setOut(NoPrintStream.NO_PRINTSTREAM); System.setErr(NoPrintStream.NO_PRINTSTREAM); svm_model model = svm.svm_train(prob, param); System.setOut(err); System.setOut(out);
System.setOut(NoPrintStream.NO_PRINTSTREAM); System.setErr(NoPrintStream.NO_PRINTSTREAM); svm_model model = svm.svm_train(prob, param); System.setOut(err); System.setOut(out);
@Override public SupportVectorMachine train(TrainingSet<DefaultObservation, DoubleCategory> trainingSet, ParameterSet params) { if(!params.containsParameter(C_PARAMETER) || !params.containsParameter(GAMMA_PARAMETER)) throw new IllegalArgumentException("Parameters missing."); svm_parameter param = new svm_parameter(); //TODO the following properties should be parameterized as well // Type of SVM param.svm_type = svm_parameter.C_SVC; // Kernel type (leave it at RBF for now) param.kernel_type = svm_parameter.RBF; // stopping criteria param.eps = 0.001; // cache size of kernel param.cache_size = 256; // do not set penalties for specific classes param.nr_weight = 0; // Given parameters // gamma parameter of RBF kernel param.gamma = params.getParameter(GAMMA_PARAMETER).getValue(); // C parameter of RBF kernel param.C = params.getParameter(C_PARAMETER).getValue(); return new SupportVectorMachine(svm.svm_train(trainingSet.toLibsvmProblem(), param)); }
problem.x[i][1].value=i-6; svm_model model=svm.svm_train(problem,parameter); svm_node[] unknown=new svm_node[]{ new svm_node(),