/** * Resets the Loader ready to read a new data set or the same data set again. * * @throws IOException if something goes wrong */ @Override public void reset() throws IOException { m_structure = null; m_ArffReader = null; setRetrieval(NONE); if (m_File != null && !(new File(m_File).isDirectory())) { setFile(new File(m_File)); } else if (m_URL != null && !m_URL.equals("http://")) { setURL(m_URL); } }
ArffLoader loader = new ArffLoader(); loader.setFile(new File("some.arff")); Instances data= loader.getStructure(); // you missed this sampler.setInputFormat(data);
public class Main { private static final String ARFF_FILE_PATH = "YOUR_ARFF_FILE_PATH"; public static void main(String[] args) throws IOException { ArffLoader arffLoader = new ArffLoader(); File datasetFile = new File(ARFF_FILE_PATH); arffLoader.setFile(datasetFile); Instances dataInstances = arffLoader.getDataSet(); for(Instance inst : dataInstances){ System.out.println("Instance:" + inst); } } }
import java.io.*; import weka.core.Instance; import weka.core.Instances; import weka.core.converters.ArffLoader; import weka.core.converters.ArffLoader.ArffReader; public class assign3 { public static void main(String args[]) throws IOException { ArffLoader arffloader=new ArffLoader(); File filedata = new File("/home/cse611/Downloads/iris.arff"); arffloader.setFile(filedata); Instances data = arffloader.getDataSet();`enter code here` for(Instance inst : data){ System.out.println("Instance:" + inst); } } }
/** * Resets the Loader ready to read a new data set or the same data set again. * * @throws IOException if something goes wrong */ @Override public void reset() throws IOException { m_structure = null; m_ArffReader = null; setRetrieval(NONE); if (m_File != null && !(new File(m_File).isDirectory())) { setFile(new File(m_File)); } else if (m_URL != null && !m_URL.equals("http://")) { setURL(m_URL); } }
ArffLoader loader = new ArffLoader(); loader.setFile(new File("data.arff")); Instances structure = loader.getDataSet(); structure.setClassIndex(1);
ArffLoader testingData = new ArffLoader(); testingData.setFile(new File("sample2.txt")); Instances testingStructure = testingData.getStructure(); testingStructure.setClassIndex(structure.numAttributes() - 1); Instance test; while ((test = testingData.getNextInstance(testingStructure)) != null) { System.out.println(nb.classifyInstance(test)); }
private static void build_model() { // TODO Auto-generated method stub try{ // load data ArffLoader loader = new ArffLoader(); loader.setFile(new File("D:\\MAIN PROJECT\\data.arff")); Instances structure = loader.getDataSet(); structure.setClassIndex(structure.numAttributes() - 1); System.out.println("Attributes : "+structure.numAttributes()); System.out.println("Instances : "+structure.numInstances()); // train SMO System.out.println("Before creating smo object"); SMO smo = new SMO(); System.out.println("SMO object created"); smo.buildClassifier(structure); System.out.println("Classifier build"); System.out.println(smo); System.out.println("\nModel build successfully"); } catch(Exception e){ System.out.println("\nstack trace : " + e); } }
try { File input = new File(inputString); loader.setFile(input); setInstances(loader.getDataSet()); } catch (Exception ex) {
try { File input = new File(inputString); loader.setFile(input); setInstances(loader.getDataSet()); } catch (Exception ex) {
ArffLoader loader = new ArffLoader(); loader.setFile(new File(""));//file is valid Instances structure = loader.getStructure(); structure.setClassIndex(0); // train NaiveBayes NaiveBayesMultinomialUpdateable n = new NaiveBayesMultinomialUpdateable(); FilteredClassifier f = new FilteredClassifier(); StringToWordVector s = new StringToWordVector(); f.setFilter(s); f.setClassifier(n); f.buildClassifier(structure); Instance current; while ((current = loader.getNextInstance(structure)) != null) n.updateClassifier(current); // output generated model System.out.println(n);
try { File input = new File(inputString); loader.setFile(input); Instances inst = loader.getDataSet(); if (index == -1) {
try { File input = new File(inputString); loader.setFile(input); Instances inst = loader.getDataSet(); if (index == -1) {
if (filename.length() != 0) { loader = new weka.core.converters.ArffLoader(); loader.setFile(new java.io.File(filename)); } else { throw new Exception("No training file specified!");
if (filename.length() != 0) { loader = new weka.core.converters.ArffLoader(); loader.setFile(new java.io.File(filename)); } else { throw new Exception("No training file specified!");
loader.setFile(input);
loader.setFile(tmpFile); Instances tempStructure = loader.getStructure(); Instance tempLoaded = loader.getNextInstance(tempStructure);
weka.core.converters.ArffLoader arffL = new weka.core.converters.ArffLoader(); arffL.setFile(new java.io.File("${user.home}/datasets/UCI/iris.arff")); step.setLoader(arffL);
weka.core.converters.ArffLoader arffL = new weka.core.converters.ArffLoader(); arffL.setFile(new java.io.File("${user.home}/datasets/UCI/iris.arff")); step.setLoader(arffL);