Lexicon labelLexicon = learner.getLabelLexicon(); && learner.getLabelLexicon().size() == 0) learner.setLabelLexicon(labelLexicon);
Arrays.sort(rounds); final int totalRounds = rounds[rounds.length - 1]; Lexicon labelLexicon = learner.getLabelLexicon(); && learner.getLabelLexicon().size() == 0) learner.setLabelLexicon(labelLexicon);
public static void main(String[] args) { String exFileName = null; String lexFileName = null; String lcFileName = null; try { exFileName = args[0]; lexFileName = args[1]; lcFileName = args[2]; if (args.length > 3) throw new Exception(); } catch (Exception e) { System.err .println("usage: java edu.illinois.cs.cogcomp.lbjava.parse.ArrayFileParser <example file> <lexicon file> <lc file>"); System.exit(1); } ArrayFileParser parser = new ArrayFileParser(exFileName); Learner learner = Learner.readLearner(lcFileName); learner.readLexicon(lexFileName); for (Object e = parser.next(); e != null; e = parser.next()) { FeatureVector v = new FeatureVector((Object[]) e, learner.getLexicon(), learner.getLabelLexicon()); v.sort(); System.out.println(v); } } }
preExtractLearner.setLabelLexicon(learner.getLabelLexicon()); Lexicon lexicon = learner.getLexicon(); preExtractLearner.setLexicon(lexicon); parser = new ArrayFileParser(baos.toByteArray(), zip); learner.setLabelLexicon(preExtractLearner.getLabelLexicon()); return preExtractLearner;
if (example instanceof Object[] && ((Object[]) example)[0] instanceof int[]) { preExtraction = true; labelLexicon = ((Learner) classifier).getLabelLexicon(); gold = ((Feature) labelLexicon.lookupKey(((int[]) ((Object[]) example)[2])[0])) if (example instanceof Object[] && ((Object[]) example)[0] instanceof int[]) { preExtraction = true; labelLexicon = ((Learner) classifier).getLabelLexicon(); gold = ((Feature) labelLexicon.lookupKey(((int[]) ((Object[]) example)[2])[0]))
/** * Update the score of each binary variable (label) based on the gold value of each example for * that variable. When using a {@code SparseNetworkLearner} to keep the model there is an LTU * for each label. If the gold is same as a specific label then its binary value for that label * is 1 and the score for that label will be {@code oldScore - lossOffset}; otherwise it will be * 0 and the score will be {@code oldScore + lossOffset}. * * @param example The object to make decisions about. * @param resultS The original scores (see {@link #scores(Object)}). * @return The augmented set of scores. */ public ScoreSet scoresAugmented(Object example, ScoreSet resultS) { ScoreSet augmentedScores = new ScoreSet(); Lexicon lLexicon = getLabelLexicon(); String gold = getLabeler().discreteValue(example); for (int i = 0; i < lLexicon.size(); i++) { String candidate = lLexicon.lookupKey(i).getStringValue(); double originalScore = resultS.getScore(candidate).score; double lossOffset = 1 / (double) (candidates); if (candidate.equals(gold)) augmentedScores.put(candidate, originalScore - lossOffset); else augmentedScores.put(candidate, originalScore + lossOffset); } return augmentedScores; }
/** * Reads the binary representation of any type of learner (including the label lexicon, but not * including the feature lexicon), with the option of cutting off the reading process after the * label lexicon and before any learned parameters. When <code>whole</code> is * <code>false</code>, the reading process is cut off in this way. * * <p> * This method is appropriate for reading learners as written by * {@link #write(ExceptionlessOutputStream)}. * * @param in The input stream. * @param whole Whether or not to read the whole model. * @return The learner read from the stream. **/ public static Learner readLearner(ExceptionlessInputStream in, boolean whole) { String name = in.readString(); if (name == null) return null; Learner result = ClassUtils.getLearner(name); result.unclone(); if (whole) result.read(in); // Overridden by decendents else { result.readLabelLexicon(in); // Should not be overridden by decendents Lexicon labelLexicon = result.getLabelLexicon(); result.forget(); result.setLabelLexicon(labelLexicon); } return result; }