@Override public boolean evictBlock(BlockCacheKey cacheKey) { return evictBlock(cacheKey, true); }
@Override public void run() { cache.evictBlock(cacheKey); }
public long free(long toFree) { Map.Entry<BlockCacheKey, BucketEntry> entry; long freedBytes = 0; // TODO avoid a cycling siutation. We find no block which is not in use and so no way to free // What to do then? Caching attempt fail? Need some changes in cacheBlock API? while ((entry = queue.pollLast()) != null) { if (evictBlock(entry.getKey(), false)) { freedBytes += entry.getValue().getLength(); } if (freedBytes >= toFree) { return freedBytes; } } return freedBytes; }
/** * This method will find the buckets that are minimally occupied * and are not reference counted and will free them completely * without any constraint on the access times of the elements, * and as a process will completely free at most the number of buckets * passed, sometimes it might not due to changing refCounts * * @param completelyFreeBucketsNeeded number of buckets to free **/ private void freeEntireBuckets(int completelyFreeBucketsNeeded) { if (completelyFreeBucketsNeeded != 0) { // First we will build a set where the offsets are reference counted, usually // this set is small around O(Handler Count) unless something else is wrong Set<Integer> inUseBuckets = new HashSet<Integer>(); for (BucketEntry entry : backingMap.values()) { if (entry.getRefCount() != 0) { inUseBuckets.add(bucketAllocator.getBucketIndex(entry.offset())); } } Set<Integer> candidateBuckets = bucketAllocator.getLeastFilledBuckets( inUseBuckets, completelyFreeBucketsNeeded); for (Map.Entry<BlockCacheKey, BucketEntry> entry : backingMap.entrySet()) { if (candidateBuckets.contains(bucketAllocator .getBucketIndex(entry.getValue().offset()))) { evictBlock(entry.getKey(), false); } } } }
/** * Evicts all blocks for a specific HFile. * <p> * This is used for evict-on-close to remove all blocks of a specific HFile. * * @return the number of blocks evicted */ @Override public int evictBlocksByHfileName(String hfileName) { Set<BlockCacheKey> keySet = blocksByHFile.subSet( new BlockCacheKey(hfileName, Long.MIN_VALUE), true, new BlockCacheKey(hfileName, Long.MAX_VALUE), true); int numEvicted = 0; for (BlockCacheKey key : keySet) { if (evictBlock(key)) { ++numEvicted; } } return numEvicted; }
cache.evictBlock(key); assertNull(cache.getBlock(key, false, false, false)); CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithoutNextBlockMetadata, actualBuffer,
@Override public void run() { cache.evictBlock(cacheKey); }
public long free(long toFree) { Map.Entry<BlockCacheKey, BucketEntry> entry; long freedBytes = 0; while ((entry = queue.pollLast()) != null) { evictBlock(entry.getKey()); freedBytes += entry.getValue().getLength(); if (freedBytes >= toFree) { return freedBytes; } } return freedBytes; }
/** * Evicts all blocks for a specific HFile. * <p> * This is used for evict-on-close to remove all blocks of a specific HFile. * * @return the number of blocks evicted */ @Override public int evictBlocksByHfileName(String hfileName) { // Copy the list to avoid ConcurrentModificationException // as evictBlockKey removes the key from the index Set<BlockCacheKey> keySet = blocksByHFile.values(hfileName); if (keySet == null) { return 0; } int numEvicted = 0; List<BlockCacheKey> keysForHFile = ImmutableList.copyOf(keySet); for (BlockCacheKey key : keysForHFile) { if (evictBlock(key)) { ++numEvicted; } } return numEvicted; }
cache.evictBlock(key); assertNull(cache.getBlock(key, false, false, false)); CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithoutNextBlockMetadata, actualBuffer,