@Override public Chunk put(Key key, Chunk value) { curSizeBytes += value.getData().length; Chunk ret = super.put(key, value); statCurSize.add(curSizeBytes); statCurChunks.add(size()); return ret; }
/** * {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public IValueConnector getModuleConnector(Module module) { if (module == null) { return null; } // we must rehash entries in case in case modules hashcode have changed and // still preserve LRU order. Map<Module, IValueConnector> buff = new LinkedHashMap<>(moduleConnectors); moduleConnectors.clear(); moduleConnectors.putAll(buff); IValueConnector moduleConnector = moduleConnectors.get(module); if (moduleConnector == null) { moduleConnector = createModelConnector(module.getName(), ModuleDescriptor.MODULE_DESCRIPTOR); moduleConnectors.put(module, moduleConnector); } moduleConnector.setConnectorValue(module); return moduleConnector; }
/** * Adds TransactionInfo to the store. * If entries for this transaction already exist the method adds new entry to the list * if no entry for the same block exists * @return true if TransactionInfo was added, false if already exist */ public boolean put(TransactionInfo tx) { byte[] txHash = tx.getReceipt().getTransaction().getHash(); List<TransactionInfo> existingInfos = null; synchronized (lastSavedTxHash) { if (lastSavedTxHash.put(new ByteArrayWrapper(txHash), object) != null || !lastSavedTxHash.isFull()) { existingInfos = get(txHash); } } // else it is highly unlikely that the transaction was included into another block // earlier than 5000 transactions before with regard to regular block import process if (existingInfos == null) { existingInfos = new ArrayList<>(); } else { for (TransactionInfo info : existingInfos) { if (FastByteComparisons.equal(info.getBlockHash(), tx.getBlockHash())) { return false; } } } existingInfos.add(tx); put(txHash, existingInfos); return true; }
@Override public V put(final K key, final V value) { final V oldValue = super.put(key, value); if(null == oldValue) { size.incrementAndGet(); } return oldValue; }
/** * set the cache value. * * @param key * The key to set the value for. * @param value * The value to set. */ private void cachePut(final CacheKey key, final boolean value) { final LRUMap<CacheKey, Boolean> cache = SecuredItemImpl.CACHE.get(); if (cache != null) { cache.put(key, value); SecuredItemImpl.CACHE.set(cache); } }
/** * Method for keeping pending writes manageable by auto-flushing once it reaches a certain size. */ public void putToState(ByteArray key, ByteArraySet value) { if(value.size() > LRU_CACHE_THRESHOLD) entryCache.put(key, value); pendingWrites.put(key, value); if(pendingWrites.size() > indexWriteBatchSize) { for(Map.Entry<ByteArray, ByteArraySet> entry: pendingWrites.entrySet()) { writeToState(entry.getKey(), entry.getValue()); } pendingWrites.clear(); state.flush(indexName); } }
/** * Method for keeping RI pending writes manageable by auto-flushing once it reaches a certain size. */ public void putRIToState(ByteArray key, ByteArraySet value) { if(value.size() > LRU_CACHE_THRESHOLD) entryRICache.put(key, value); pendingRIWrites.put(key, value); if(pendingRIWrites.size() > indexWriteBatchSize) { for (Map.Entry<ByteArray, ByteArraySet> entry : pendingRIWrites.entrySet()) { writeRIToState(entry.getKey(), entry.getValue()); } pendingRIWrites.clear(); state.flush(reverseIndexName); } }
public BigInteger freq(String aUnigram) { BigInteger f = (BigInteger) unigramCache.get(aUnigram); if (f != null) { return f; } // System.out.printf("Frequency for [%s]... ", aUnigram); try { f = BigInteger.valueOf(web1tSearcher.getFrequency(aUnigram)); // System.out.printf("%d%n", f.longValue()); unigramCache.put(aUnigram, f); return f; } catch (IOException e) { throw new IllegalStateException(e); } }
@Override protected TypeDefinition resolveType(final String descriptor, final boolean mightBePrimitive) { if (failedTypes.containsKey(descriptor)) return null; TypeDefinition result = resolveTypeInternal(descriptor, mightBePrimitive); if (result == null) failedTypes.put(descriptor, true); return result; }
public void addTypeDefinition(final TypeDefinition type) { VerifyArgument.notNull(type, "type"); resolvedTypes.put(type.getInternalName(), type); }
private ColumnVisibility getCV(Key k) { Text expr = k.getColumnVisibility(cvHolder); ColumnVisibility vis = (ColumnVisibility) cvCache.get(expr); if (vis == null) { // the column visibility needs to take ownership of the expression vis = new ColumnVisibility(new Text(expr)); cvCache.put(expr, vis); } return vis; }
@Override protected TypeDefinition resolveType(final String descriptor, final boolean mightBePrimitive) { if (failedTypes.containsKey(descriptor)) return null; TypeDefinition result = resolveTypeInternal(descriptor, mightBePrimitive); if (result == null) failedTypes.put(descriptor, true); return result; }
public void addTypeDefinition(final TypeDefinition type) { VerifyArgument.notNull(type, "type"); resolvedTypes.put(type.getInternalName(), type); }
/** * 读取文章并进行分页处理 * * @param chapter 章节id * @return 章节内容的分页集合 */ public synchronized ArrayList<ChapterPage> getChapterContent(int chapter) { ArrayList<ChapterPage> pages = chapters.get(bookId + "-" + chapter); if (pages != null && pages.size() > 0) { //内存缓存中存在该章节图书,直接返回缓存, return pages; } //内存缓存中不存在该章节,读取文件缓存,然后添加到LRU缓存 String temp = readChapterFile(chapter); if (temp == null) { //无本地缓存 return null; } try { //分页章节内容 pages = split(chapter, temp, mLineWordCount * 2, "GBK"); chapters.put(bookId + "-" + chapter, pages); return pages; } catch (UnsupportedEncodingException e) { e.printStackTrace(); } //编码不支持,结果为空 return null; }
@Override public ByteArraySet getIndexEntry(ByteArray foreignKey) { Preconditions.checkNotNull(foreignKey); if(entryCache.containsKey(foreignKey)) { return entryCache.get(foreignKey); } else if(pendingWrites.containsKey(foreignKey)) { return pendingWrites.get(foreignKey); } else { byte[] bytes = state.get(indexName, foreignKey.getBytes()); if (bytes == null) { return null; } else { ByteArraySet set = ByteArraySet.deserialize(bytes); if(set.size() > LRU_CACHE_THRESHOLD) entryCache.put(foreignKey, set); return set; } } }
@Override public ByteArraySet getForeignKeys(ByteArray primaryKey) { if(entryRICache.containsKey(primaryKey)) { return entryRICache.get(primaryKey); } else if(pendingRIWrites.containsKey(primaryKey)) { return pendingRIWrites.get(primaryKey); } else { byte[] bytes = state.get(reverseIndexName, primaryKey.getBytes()); if (bytes == null) { return null; } else { ByteArraySet set = ByteArraySet.deserialize(bytes); if(set.size() > LRU_CACHE_THRESHOLD) entryRICache.put(primaryKey, set); return set; } } }
cachedDefinition = resolvedTypes.put(descriptor, typeDefinition); typeDefinition.setTypeLoader(this.typeLoader);
List<NGramModel> ngrams = collector.getNgrams(); ngramCache.put(cacheKey, ngrams);
@Override public synchronized UserAgent parse(UserAgent userAgent) { if (userAgent == null) { return null; } userAgent.reset(); if (parseCache == null) { return super.parse(userAgent); } String userAgentString = userAgent.getUserAgentString(); UserAgent cachedValue = parseCache.get(userAgentString); if (cachedValue != null) { userAgent.clone(cachedValue); } else { cachedValue = new UserAgent(super.parse(userAgent)); parseCache.put(userAgentString, cachedValue); } // We have our answer. return userAgent; }
private ExtensionRepository getRepository(ExtensionRepositoryDescriptor repositoryDescriptor) throws ExtensionRepositoryException { // Try in the cache ExtensionRepository repository = this.repositoriesCache.get(repositoryDescriptor); if (repository == null) { // Try in the registered repositories if (repositoryDescriptor.getId() != null) { repository = getRepository(repositoryDescriptor.getId()); } if (repository == null || !repository.getDescriptor().equals(repositoryDescriptor)) { // Create one ExtensionRepositoryFactory repositoryFactory; try { repositoryFactory = this.componentManager.getInstance(ExtensionRepositoryFactory.class, repositoryDescriptor.getType()); } catch (ComponentLookupException e) { throw new ExtensionRepositoryException( "Unsupported extension repository type [{" + repositoryDescriptor.getType() + "}]", e); } repository = repositoryFactory.createRepository(repositoryDescriptor); } this.repositoriesCache.put(repositoryDescriptor, repository); } return repository; }