public void createFeatures(List<String> features, String[] tokens, int index, String[] previousOutcomes) { List<String> cacheFeatures; if (tokens == prevTokens) { cacheFeatures = contextsCache.get(index); if (cacheFeatures != null) { numberOfCacheHits++; features.addAll(cacheFeatures); return; } } else { contextsCache.clear(); prevTokens = tokens; } cacheFeatures = new ArrayList<>(); numberOfCacheMisses++; generator.createFeatures(cacheFeatures, tokens, index, previousOutcomes); contextsCache.put(index, cacheFeatures); features.addAll(cacheFeatures); }
/** * Initializes the current instance. * * @param cacheSize * @param dict */ public DefaultPOSContextGenerator(int cacheSize, Dictionary dict) { this.dict = dict; if (cacheSize > 0) { contextsCache = new Cache<>(cacheSize); } }
@Override protected boolean removeEldestEntry(Map.Entry<K,V> eldest) { return this.size() > this.capacity; } }
if (cache.containsKey(tagString)) { return ((MorphologicalTag) cache.get(tagString)).clone(); if (!cache.containsKey(tagString)) { cache.put(tagString, m.clone());
private String[] lookup(WordTag key) { if(key == null) { return null; } String[] arr = (String[]) cache.get(key); if(arr != null) { return arr; } synchronized (dictLookup) { List<WordData> data = dictLookup.lookup(key.getWord()); if (data.size() > 0) { final String prefix = key.getPostag() + "#"; List<String> tags = new ArrayList<String>(data.size()); for (int i = 0; i < data.size(); i++) { String completeTag = data.get(i).getTag().toString(); if (completeTag.startsWith(prefix) || key.getPostag() == null) { tags.add(completeTag.substring(completeTag.indexOf("#") + 1)); } } return tags.toArray(new String[tags.size()]); } } return null; }
double[] scores; if (contextsCache != null) { scores = contextsCache.computeIfAbsent(contexts, c -> model.eval(c, probs)); } else { scores = model.eval(contexts, probs);
if (cache.containsKey(tagString)) { return ((MorphologicalTag) cache.get(tagString)).clone(); if (!cache.containsKey(tagString)) { cache.put(tagString, m.clone());
private String[] lookup(WordTag key) { if(key == null) { return null; } String[] arr = (String[]) cache.get(key); if(arr != null) { return arr; } synchronized (dictLookup) { List<WordData> data = dictLookup.lookup(key.getWord()); if (data.size() > 0) { final String prefix = key.getPostag() + "#"; List<String> tags = new ArrayList<String>(data.size()); for (int i = 0; i < data.size(); i++) { String completeTag = data.get(i).getTag().toString(); if (completeTag.startsWith(prefix) || key.getPostag() == null) { tags.add(completeTag.substring(completeTag.indexOf("#") + 1)); } } return tags.toArray(new String[tags.size()]); } } return null; }
double[] scores; if (contextsCache != null) { scores = contextsCache.computeIfAbsent(contexts, c -> model.eval(c, probs)); } else { scores = model.eval(contexts, probs);
if (cache.containsKey(tagString)) { return ((MorphologicalTag) cache.get(tagString)).clone(); if (!cache.containsKey(tagString)) { cache.put(tagString, m.clone());
public CachedFeatureGenerator(AdaptiveFeatureGenerator generator) { this.generator = generator; contextsCache = new Cache<>(100); }
@Override protected boolean removeEldestEntry(Map.Entry<K,V> eldest) { return this.size() > this.capacity; } }
double[] scores; if (contextsCache != null) { scores = contextsCache.computeIfAbsent(contexts, c -> model.eval(c, probs)); } else { scores = model.eval(contexts, probs);
if (cache.containsKey(tagString)) { return ((MorphologicalTag) cache.get(tagString)).clone(); if (!cache.containsKey(tagString)) { cache.put(tagString, m.clone());
public ChunkContextGenerator(int cacheSize) { super(); if (cacheSize > 0) { contextsCache = new Cache<>(cacheSize); } }