public CRF4 (Alphabet inputAlphabet, Alphabet outputAlphabet) { inputAlphabet.stopGrowth(); logger.info ("CRF input dictionary size = "+inputAlphabet.size()); //xxx outputAlphabet.stopGrowth(); this.inputAlphabet = inputAlphabet; this.outputAlphabet = outputAlphabet; //this.defaultFeatureIndex = inputAlphabet.size(); }
public Dirichlet (double[] alphas, Alphabet dict) { if (dict != null && alphas.length != dict.size()) throw new IllegalArgumentException ("alphas and dict sizes do not match."); this.alphas = alphas; this.dict = dict; if (dict != null) dict.stopGrowth(); }
public HMM (Alphabet inputAlphabet, Alphabet outputAlphabet) { inputAlphabet.stopGrowth(); logger.info ("HMM input dictionary size = "+inputAlphabet.size()); this.inputAlphabet = inputAlphabet; this.outputAlphabet = outputAlphabet; }
public CRF2 (Alphabet inputAlphabet, Alphabet outputAlphabet) { inputAlphabet.stopGrowth(); logger.info ("CRF input dictionary size = "+inputAlphabet.size()); //xxx outputAlphabet.stopGrowth(); this.inputAlphabet = inputAlphabet; this.outputAlphabet = outputAlphabet; //this.defaultFeatureIndex = inputAlphabet.size(); }
public CRF3 (Alphabet inputAlphabet, Alphabet outputAlphabet) { inputAlphabet.stopGrowth(); logger.info ("CRF input dictionary size = "+inputAlphabet.size()); //xxx outputAlphabet.stopGrowth(); this.inputAlphabet = inputAlphabet; this.outputAlphabet = outputAlphabet; //this.defaultFeatureIndex = inputAlphabet.size(); }
public Dirichlet (Alphabet dict, double alpha) { this(dict.size(), alpha); this.dict = dict; dict.stopGrowth(); }
public CRFByGISUpdate (Alphabet inputAlphabet, Alphabet outputAlphabet) { inputAlphabet.stopGrowth(); logger.info ("CRF input dictionary size = "+inputAlphabet.size()); //xxx outputAlphabet.stopGrowth(); this.inputAlphabet = inputAlphabet; this.outputAlphabet = outputAlphabet; //this.defaultFeatureIndex = inputAlphabet.size(); }
public CRF (Alphabet inputAlphabet, Alphabet outputAlphabet) { inputAlphabet.stopGrowth(); logger.info ("CRF input dictionary size = "+inputAlphabet.size()); //xxx outputAlphabet.stopGrowth(); this.inputAlphabet = inputAlphabet; this.outputAlphabet = outputAlphabet; this.defaultFeatureIndex = inputAlphabet.size(); }
public MaxEnt trainClassifier (InstanceList ilist) { // just to plain MaxEnt training for now System.out.println("Training NOW: "); MaxEnt me = (MaxEnt)(new MaxEntTrainer().train (ilist, null, null, null, null)); Alphabet alpha = ilist.getDataAlphabet(); alpha.stopGrowth(); // hack to prevent alphabet from growing Trial t = new Trial(me, ilist); System.out.println("CorefClusterAdv -> Training F1 on \"yes\" is: " + t.labelF1("yes")); //me.write(new File("/tmp/MaxEnt_Output")); return me; }
public TreeModel (Pipe instancePipe, ArrayList nodes, ArrayList pubs) { this.instancePipe = instancePipe; ilist = new InstanceList (instancePipe); ilist.add (new PubCitIterator (nodes, pubs) ); instancePipe.getDataAlphabet().stopGrowth(); System.out.println(" >>>> Training Tree Model <<<< "); treeModel = (MaxEnt)(new MaxEntTrainer().train (ilist, null, null, null, null)); }
public Classifier trainPairwiseClassifier (ArrayList[] nodes, Pipe p) { InstanceList ilist = new InstanceList (p); for (int i=0; i < nodes.length; i++) ilist.add (CitationUtils.makePairs (p, nodes[i])); System.err.println ("Training size: " + ilist.size() + "\tNum features: " + ilist.getDataAlphabet().size()); MaxEnt me = (MaxEnt)(new MaxEntTrainer().train(ilist, null, null, null, null)); ilist.getDataAlphabet().stopGrowth(); Trial t = new Trial(me, ilist); System.out.println("Pairwise classifier: -> Training F1 on \"yes\" is: " + t.labelF1("yes")); System.out.println("Pairwise classifier: -> Training F1 on \"no\" is: " + t.labelF1("no")); return me; }
private static Classifier trainPairwiseClassifier (ArrayList[] nodes, Pipe p) { InstanceList ilist = new InstanceList (p); for (int i=0; i < nodes.length; i++) ilist.add (CitationUtils.makePairs (p, nodes[i])); MaxEnt me = (MaxEnt)(new MaxEntTrainer().train(ilist, null, null, null, null)); ilist.getDataAlphabet().stopGrowth(); Trial t = new Trial(me, ilist); System.out.println("Pairwise classifier: -> Training F1 on \"yes\" is: " + t.labelF1("yes")); return me; }
private static Classifier trainPairwiseClassifier (ArrayList[] nodes, Pipe p) { InstanceList ilist = new InstanceList (p); for (int i=0; i < nodes.length; i++) ilist.add (CitationUtils.makePairs (p, nodes[i])); MaxEnt me = (MaxEnt)(new MaxEntTrainer().train(ilist, null, null, null, null)); ilist.getDataAlphabet().stopGrowth(); Trial t = new Trial(me, ilist); System.out.println("Pairwise classifier: -> Training F1 on \"yes\" is: " + t.labelF1("yes")); return me; }
logger.info("The training accuracy is "+ classifier.getAccuracy (trainingList)); features.stopGrowth(); if (save != null) { ObjectOutputStream s =
public TreeModel (Pipe instancePipe, ArrayList nodes1, ArrayList nodes2, ArrayList nodes3, ArrayList pubs1, ArrayList pubs2, ArrayList pubs3) { this.instancePipe = instancePipe; ilist = new InstanceList (instancePipe); ilist.add (new PubCitIterator (nodes1, pubs1) ); ilist.add (new PubCitIterator (nodes2, pubs2) ); ilist.add (new PubCitIterator (nodes3, pubs3) ); instancePipe.getDataAlphabet().stopGrowth(); treeModel = (MaxEnt)(new MaxEntTrainer().train (ilist, null, null, null, null)); }
public ConditionalClusterer train (AbstractPipeInputIterator instanceIterator, boolean useFeatureInduction) { InstanceList trainingList = new InstanceList (p); trainingList.add (instanceIterator); System.err.println ("Training on " + trainingList.size() + " instances with distribution " + trainingList.targetLabelDistribution() + " and " + trainingList.getPipe().getDataAlphabet().size() + " features");; InfoGain ig = new InfoGain (trainingList); for (int i=0; i < ig.numLocations(); i++) System.err.println ("InfoGain["+ig.getObjectAtRank(i)+"]="+ig.getValueAtRank(i)); if (useFeatureInduction) { System.err.println ("Beginning Feature Induction"); RankedFeatureVector.Factory gainFactory = new InfoGain.Factory(); FeatureInducer fi = new FeatureInducer (gainFactory, trainingList, 20); fi.induceFeaturesFor(trainingList, false, false); } classifier = classifierTrainer.train (trainingList); classifier.getInstancePipe().getDataAlphabet().stopGrowth(); return new ConditionalClusterer(p, classifier, threshold); }
trainingList.getDataAlphabet().stopGrowth(); trainingList.getTargetAlphabet().stopGrowth(); Pipe dataPipe = trainingList.getPipe (); Alphabet dict = (Alphabet) trainingList.getDataAlphabet ();