private void addPresentFeatures (BitSet wp, FeatureVector fv) { for (int i = 0; i < fv.numLocations (); i++) { int index = fv.indexAtLocation (i); wp.set (index); } }
private void addPresentFeatures (BitSet wp, FeatureVector fv) { for (int i = 0; i < fv.numLocations (); i++) { int index = fv.indexAtLocation (i); wp.set (index); } }
public String toString (boolean oneLine) { StringBuffer b = new StringBuffer(); int[] keys = field2values.keys(); for (int i = 0; i < keys.length; i++) { b.append(fieldAlph.lookupObject(keys[i]) + "="); FeatureVector v = (FeatureVector) field2values.get(keys[i]); for (int j = 0; j < v.numLocations(); j++) b.append(valueAlph.lookupObject(v.indexAtLocation(j)) + ","); if (!oneLine) b.append("\n"); } return b.toString(); }
public String toString (boolean oneLine) { StringBuffer b = new StringBuffer(); int[] keys = field2values.keys(); for (int i = 0; i < keys.length; i++) { b.append(fieldAlph.lookupObject(keys[i]) + "="); FeatureVector v = (FeatureVector) field2values.get(keys[i]); for (int j = 0; j < v.numLocations(); j++) b.append(valueAlph.lookupObject(v.indexAtLocation(j)) + ","); if (!oneLine) b.append("\n"); } return b.toString(); }
public String toString (boolean oneLine) { StringBuffer b = new StringBuffer(); int[] keys = field2values.keys(); for (int i = 0; i < keys.length; i++) { b.append(fieldAlph.lookupObject(keys[i]) + "="); FeatureVector v = (FeatureVector) field2values.get(keys[i]); for (int j = 0; j < v.numLocations(); j++) b.append(valueAlph.lookupObject(v.indexAtLocation(j)) + ","); if (!oneLine) b.append("\n"); } return b.toString(); }
public double predict(Instance instance) { double prediction = parameters[interceptIndex]; FeatureVector predictors = (FeatureVector) instance.getData(); for (int location = 0; location < predictors.numLocations(); location++) { int index = predictors.indexAtLocation(location); prediction += parameters[index] * predictors.valueAtLocation(location); } return prediction; }
public void increment (FeatureVector fv, double scale) { if (fv.getAlphabet() != dictionary) throw new IllegalArgumentException ("Vocabularies don't match."); for (int fvi = 0; fvi < fv.numLocations(); fvi++) // Originally, the value of the feature was not being taken into account here, // so words were only counted once per document! - gdruck // increment (fv.indexAtLocation(fvi), scale); increment(fv.indexAtLocation(fvi), scale * fv.valueAtLocation(fvi)); }
private static void countVector(double[] counts, FeatureVector fv, boolean countInstances) { for (int j = 0; j < fv.numLocations(); j++) { if (countInstances) { counts[fv.indexAtLocation(j)] += 1; } else { counts[fv.indexAtLocation(j)] += fv.valueAtLocation(j); } } }
public double predict(Instance instance) { double prediction = parameters[interceptIndex]; FeatureVector predictors = (FeatureVector) instance.getData(); for (int location = 0; location < predictors.numLocations(); location++) { int index = predictors.indexAtLocation(location); prediction += parameters[index] * predictors.valueAtLocation(location); } return prediction; }
public void increment (FeatureVector fv, double scale) { if (fv.getAlphabet() != dictionary) throw new IllegalArgumentException ("Vocabularies don't match."); for (int fvi = 0; fvi < fv.numLocations(); fvi++) // Originally, the value of the feature was not being taken into account here, // so words were only counted once per document! - gdruck // increment (fv.indexAtLocation(fvi), scale); increment(fv.indexAtLocation(fvi), scale * fv.valueAtLocation(fvi)); }
public void preProcess(FeatureVector fv) { cache.resetQuick(); int fi; // cache constrained input features for (int loc = 0; loc < fv.numLocations(); loc++) { fi = fv.indexAtLocation(loc); if (constraints.containsKey(fi)) { cache.add(fi); } } }
public void preProcess(FeatureVector fv) { cache.resetQuick(); int fi; // cache constrained input features for (int loc = 0; loc < fv.numLocations(); loc++) { fi = fv.indexAtLocation(loc); if (constraints.containsKey(fi)) { cache.add(fi); } } }
private double dataLogProbability (Instance instance, int labelIndex) { FeatureVector fv = (FeatureVector) instance.getData (); int fvisize = fv.numLocations(); double logProb = 0; for (int fvi = 0; fvi < fvisize; fvi++) logProb += fv.valueAtLocation(fvi) * p[labelIndex].logProbability(fv.indexAtLocation(fvi)); return logProb; }
public void preProcess(FeatureVector fv) { cache.resetQuick(); int fi; // cache constrained input features for (int loc = 0; loc < fv.numLocations(); loc++) { fi = fv.indexAtLocation(loc); if (constraints.containsKey(fi)) { cache.add(fi); } } }
private double dataLogProbability (Instance instance, int labelIndex) { FeatureVector fv = (FeatureVector) instance.getData (); int fvisize = fv.numLocations(); double logProb = 0; for (int fvi = 0; fvi < fvisize; fvi++) logProb += fv.valueAtLocation(fvi) * p[labelIndex].logProbability(fv.indexAtLocation(fvi)); return logProb; }
public void preProcess(FeatureVector fv) { cache.resetQuick(); int fi; // cache constrained input features for (int loc = 0; loc < fv.numLocations(); loc++) { fi = fv.indexAtLocation(loc); if (constraints.containsKey(fi)) { cache.add(fi); } } }
public void preProcess(FeatureVector fv) { cache.resetQuick(); int fi; // cache constrained input features for (int loc = 0; loc < fv.numLocations(); loc++) { fi = fv.indexAtLocation(loc); if (constraints.containsKey(fi)) { cache.add(fi); } } }
public void preProcess(FeatureVector fv) { cache.resetQuick(); int fi; for (int loc = 0; loc < fv.numLocations(); loc++) { fi = fv.indexAtLocation(loc); if (constraintsMap.containsKey(fi)) { cache.add(constraintsMap.get(fi)); } } }
public void preProcess(FeatureVector fv) { cache.resetQuick(); int fi; for (int loc = 0; loc < fv.numLocations(); loc++) { fi = fv.indexAtLocation(loc); if (constraintsMap.containsKey(fi)) { cache.add(constraintsMap.get(fi)); } } }
public void preProcess(FeatureVector fv) { cache.resetQuick(); int fi; for (int loc = 0; loc < fv.numLocations(); loc++) { fi = fv.indexAtLocation(loc); if (constraintsMap.containsKey(fi)) { cache.add(constraintsMap.get(fi)); } } }