/** {@inheritDoc} */ @Override public double decrementCount(E key, double count) { return incrementCount(key, -count); }
/** {@inheritDoc} */ @Override public double decrementCount(E key) { return incrementCount(key, -1.0); }
/** {@inheritDoc} */ @Override public final double incrementCount(E key) { return incrementCount(key, 1.0); }
/** * Constructs a new Counter by counting the elements in the given Collection. * The Counter is backed by a HashMap. * * @param collection Each item in the Collection is made a key in the * Counter with count being its multiplicity in the Collection. */ public ClassicCounter(Collection<E> collection) { this(); for (E key : collection) { incrementCount(key); } }
/** * Divides this template into partial templates, and updates the counts of these * partial templates in the {@link CliqueTemplates} object. * * @param ct the partial templates counter object * @param score increment counts by this much */ public void unpackToCliqueTemplates(CliqueTemplates ct, double score) { ct.dateCliqueCounter.incrementCount(new DateTemplate(values[0], values[1], values[2], values[3]), score); if (values[4] != null) { ct.locationCliqueCounter.incrementCount(values[4], score); } ct.workshopInfoCliqueCounter.incrementCount(new InfoTemplate(values[6], values[5], values[7], values[9], values[8], values[10], ct), score); }
public DirichletProcess(ProbabilityDistribution<E> baseMeasure, double alpha) { this.baseMeasure = baseMeasure; this.alpha = alpha; this.sampled = new ClassicCounter<>(); sampled.incrementCount(null, alpha); }
@Override protected void tallyRoot(Tree lt, double weight) { // this list is in full (not reduced) tag space List<IntDependency> deps = MLEDependencyGrammar.treeToDependencyList(lt, wordIndex, tagIndex); for (IntDependency dependency : deps) { dependencyCounter.incrementCount(dependency, weight); } }
protected void sideCounters(String label, List rewrite, List sideSisters, Map sideRules) { for (Object sideSister : sideSisters) { String sis = (String) sideSister; if (!((Map) sideRules.get(label)).containsKey(sis)) { ((Map) sideRules.get(label)).put(sis, new ClassicCounter()); } ((ClassicCounter) ((HashMap) sideRules.get(label)).get(sis)).incrementCount(rewrite); } }
/** */ @Override public void incrementCount(K1 o1, K2 o2, double count) { ClassicCounter<K2> c = getCounter(o1); c.incrementCount(o2, count); total += count; }
/** * Adds the tagging with count to the data structures in this Lexicon. */ @Override public void addTagging(boolean seen, IntTaggedWord itw, double count) { if (seen) { log.info("UWM.addTagging: Shouldn't call with seen word!"); } else { unSeenCounter.incrementCount(itw, count); // if (itw.tag() == nullTag) { // sigs.add(itw); // } } }
public void train(TaggedWord tw, double weight) { tokens = tokens + weight; String word = tw.word(); String tag = tw.tag(); // TaggedWord has crummy equality conditions Pair<String,String> wt = new Pair<>(word, tag); wtCount.incrementCount(wt, weight); tagCount.incrementCount(tag, weight); seenWords.add(word); }
public static RVFDatum<String, String> svmLightLineToRVFDatum(String l) { l = l.replaceFirst("#.*$", ""); // remove any trailing comments String[] line = l.split("\\s+"); ClassicCounter<String> features = new ClassicCounter<>(); for (int i = 1; i < line.length; i++) { String[] f = line[i].split(":"); if (f.length != 2) { throw new IllegalArgumentException("Bad data format: " + l); } double val = Double.parseDouble(f[1]); features.incrementCount(f[0], val); } return new RVFDatum<>(features, line[0]); }
private void addAllInterningAndPrefixingRVF(ClassicCounter<String> accumulator, ClassicCounter<String> addend, String prefix) { assert prefix != null; for (String protoFeat : addend.keySet()) { double count = addend.getCount(protoFeat); if ( ! prefix.isEmpty()) { protoFeat = prefix + protoFeat; } if (globalFlags.intern) { protoFeat = protoFeat.intern(); } accumulator.incrementCount(protoFeat, count); } }
private Distribution<Integer> getWordLengthDistribution() { int samples = 0; ClassicCounter<Integer> c = new ClassicCounter<>(); while (samples++ < 10000) { String s = sampleFrom(); c.incrementCount(Integer.valueOf(s.length())); if (samples % 1000 == 0) { System.out.print("."); } } System.out.println(); Distribution<Integer> genWordLengthDist = Distribution.getDistribution(c); return genWordLengthDist; }
public E drawSample(Random random) { E drawn = Counters.sample(sampled); if (drawn == null) { drawn = baseMeasure.drawSample(random); } sampled.incrementCount(drawn); return drawn; }
/** * If markovOrder is zero, we always transition back to the start state * If markovOrder is negative, we assume that it is infinite */ public static TransducerGraph createGraphFromPaths(List paths, int markovOrder) { ClassicCounter pathCounter = new ClassicCounter(); for (Object o : paths) { pathCounter.incrementCount(o); } return createGraphFromPaths(pathCounter, markovOrder); }
protected static Distribution<String> computeInputPrior(Map<String, List<List<String>>> allTrainPaths) { ClassicCounter<String> result = new ClassicCounter<>(); for (List<List<String>> pathList : allTrainPaths.values()) { for (List<String> path : pathList) { for (String input : path) { result.incrementCount(input); } } } return Distribution.laplaceSmoothedDistribution(result, result.size() * 2, 0.5); }
public ClassicCounter<L> numDatumsPerLabel(){ labels = trimToSize(labels); ClassicCounter<L> numDatums = new ClassicCounter<>(); for(int i : labels){ numDatums.incrementCount(labelIndex.get(i)); } return numDatums; }
/** * @return the index-ed datum */ @Override public RVFDatum<L, F> getRVFDatum(int index) { ClassicCounter<F> c = new ClassicCounter<>(); for (F key : featureIndex.objects(data[index])) { c.incrementCount(key); } return new RVFDatum<>(c, labelIndex.get(labels[index])); }
/** * @return the index-ed datum * * Note, this returns a new RVFDatum object, not the original RVFDatum * that was added to the dataset. */ @Override public RVFDatum<L, F> getRVFDatum(int index) { ClassicCounter<F> c = new ClassicCounter<>(); for (int i = 0; i < data[index].length; i++) { c.incrementCount(featureIndex.get(data[index][i]), values[index][i]); } return new RVFDatum<>(c, labelIndex.get(labels[index])); }