/** * Let t = percentage of max memory. * Let e = round(log_2 t). * Then, we choose capacity = 2^e/(size of reference), * unless it is outside the close interval [1, 2^30]. */ public static int computeCapacity(double percentage, String mapName) { return computeCapacity(Runtime.getRuntime().maxMemory(), percentage, mapName); }
/** * Constructor * @param cacheName name to identify the cache by * @param percentage percentage of total java heap space used by this cache * @param expirationTime time for an entry to expire in nanoseconds */ public RetryCache(String cacheName, double percentage, long expirationTime) { int capacity = LightWeightGSet.computeCapacity(percentage, cacheName); capacity = capacity > MAX_CAPACITY ? capacity : MAX_CAPACITY; this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity, expirationTime, 0); this.expirationTime = expirationTime; this.cacheName = cacheName; this.retryCacheMetrics = RetryCacheMetrics.create(this); }
CacheManager(FSNamesystem namesystem, Configuration conf, BlockManager blockManager) { this.namesystem = namesystem; this.blockManager = blockManager; this.nextDirectiveId = 1; this.enabled = conf.getBoolean(DFS_NAMENODE_CACHING_ENABLED_KEY, DFS_NAMENODE_CACHING_ENABLED_DEFAULT); this.maxListCachePoolsResponses = conf.getInt( DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES, DFS_NAMENODE_LIST_CACHE_POOLS_NUM_RESPONSES_DEFAULT); this.maxListCacheDirectivesNumResponses = conf.getInt( DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES, DFS_NAMENODE_LIST_CACHE_DIRECTIVES_NUM_RESPONSES_DEFAULT); scanIntervalMs = conf.getLong( DFS_NAMENODE_PATH_BASED_CACHE_REFRESH_INTERVAL_MS, DFS_NAMENODE_PATH_BASED_CACHE_REFRESH_INTERVAL_MS_DEFAULT); float cachedBlocksPercent = conf.getFloat( DFS_NAMENODE_PATH_BASED_CACHE_BLOCK_MAP_ALLOCATION_PERCENT, DFS_NAMENODE_PATH_BASED_CACHE_BLOCK_MAP_ALLOCATION_PERCENT_DEFAULT); if (cachedBlocksPercent < MIN_CACHED_BLOCKS_PERCENT) { LOG.info("Using minimum value {} for {}", MIN_CACHED_BLOCKS_PERCENT, DFS_NAMENODE_PATH_BASED_CACHE_BLOCK_MAP_ALLOCATION_PERCENT); cachedBlocksPercent = MIN_CACHED_BLOCKS_PERCENT; } this.cachedBlocks = enabled ? new LightWeightGSet<CachedBlock, CachedBlock>( LightWeightGSet.computeCapacity(cachedBlocksPercent, "cachedBlocks")) : new LightWeightGSet<>(0); }
static INodeMap newInstance(INodeDirectory rootDir) { // Compute the map capacity by allocating 1% of total memory int capacity = LightWeightGSet.computeCapacity(1, "INodeMap"); GSet<INode, INodeWithAdditionalFields> map = new LightWeightGSet<>(capacity); map.put(rootDir); return new INodeMap(map); }
LightWeightGSet.computeCapacity(2.0, "BlocksMap")); placementPolicies = new BlockPlacementPolicies( conf, datanodeManager.getFSClusterStats(),
/** * Let t = percentage of max memory. * Let e = round(log_2 t). * Then, we choose capacity = 2^e/(size of reference), * unless it is outside the close interval [1, 2^30]. */ public static int computeCapacity(double percentage, String mapName) { return computeCapacity(Runtime.getRuntime().maxMemory(), percentage, mapName); }
/** * Let t = percentage of max memory. * Let e = round(log_2 t). * Then, we choose capacity = 2^e/(size of reference), * unless it is outside the close interval [1, 2^30]. */ public static int computeCapacity(double percentage, String mapName) { return computeCapacity(Runtime.getRuntime().maxMemory(), percentage, mapName); }
/** * Let t = percentage of max memory. * Let e = round(log_2 t). * Then, we choose capacity = 2^e/(size of reference), * unless it is outside the close interval [1, 2^30]. */ public static int computeCapacity(double percentage, String mapName) { return computeCapacity(Runtime.getRuntime().maxMemory(), percentage, mapName); }
/** * Let t = percentage of max memory. * Let e = round(log_2 t). * Then, we choose capacity = 2^e/(size of reference), * unless it is outside the close interval [1, 2^30]. */ public static int computeCapacity(double percentage, String mapName) { return computeCapacity(Runtime.getRuntime().maxMemory(), percentage, mapName); }
/** * Constructor * @param cacheName name to identify the cache by * @param percentage percentage of total java heap space used by this cache * @param expirationTime time for an entry to expire in nanoseconds */ public RetryCacheDistributed(String cacheName, double percentage, long expirationTime) { super(cacheName, percentage, expirationTime); int capacity = LightWeightGSet.computeCapacity(percentage, cacheName); capacity = capacity > MAX_CAPACITY ? capacity : MAX_CAPACITY; this.set = new LightWeightCacheDistributed(capacity, capacity, expirationTime, 0); }
/** * Constructor * @param cacheName name to identify the cache by * @param percentage percentage of total java heap space used by this cache * @param expirationTime time for an entry to expire in nanoseconds */ public RetryCache(String cacheName, double percentage, long expirationTime) { int capacity = LightWeightGSet.computeCapacity(percentage, cacheName); capacity = capacity > MAX_CAPACITY ? capacity : MAX_CAPACITY; this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity, expirationTime, 0); this.expirationTime = expirationTime; this.cacheName = cacheName; this.retryCacheMetrics = RetryCacheMetrics.create(this); }
/** * Constructor * @param cacheName name to identify the cache by * @param percentage percentage of total java heap space used by this cache * @param expirationTime time for an entry to expire in nanoseconds */ public RetryCache(String cacheName, double percentage, long expirationTime) { int capacity = LightWeightGSet.computeCapacity(percentage, cacheName); capacity = capacity > 16 ? capacity : 16; this.set = new LightWeightCache<CacheEntry, CacheEntry>(capacity, capacity, expirationTime, 0); this.expirationTime = expirationTime; this.cacheName = cacheName; this.retryCacheMetrics = RetryCacheMetrics.create(this); }
/** * Test for {@link LightWeightGSet#computeCapacity(double, String)} * with invalid negative max memory */ @Test(expected=HadoopIllegalArgumentException.class) public void testComputeCapacityInvalidMemory() { LightWeightGSet.computeCapacity(-1, 50.0, "testMap"); }
static INodeMap newInstance(INodeDirectory rootDir) { // Compute the map capacity by allocating 1% of total memory int capacity = LightWeightGSet.computeCapacity(1, "INodeMap"); GSet<INode, INodeWithAdditionalFields> map = new LightWeightGSet<INode, INodeWithAdditionalFields>(capacity); map.put(rootDir); return new INodeMap(map); }
/** * Test for {@link LightWeightGSet#computeCapacity(double, String)} * with invalid percent less than 0. */ @Test(expected=HadoopIllegalArgumentException.class) public void testComputeCapacityNegativePercent() { LightWeightGSet.computeCapacity(1024, -1.0, "testMap"); }
/** * Test for {@link LightWeightGSet#computeCapacity(double, String)} * with invalid percent less than 0. */ @Test(expected=HadoopIllegalArgumentException.class) public void testComputeCapacityNegativePercent() { LightWeightGSet.computeCapacity(1024, -1.0, "testMap"); }
/** * Test for {@link LightWeightGSet#computeCapacity(double, String)} * with invalid percent greater than 100. */ @Test(expected=HadoopIllegalArgumentException.class) public void testComputeCapacityInvalidPercent() { LightWeightGSet.computeCapacity(1024, 101.0, "testMap"); }
/** * Test for {@link LightWeightGSet#computeCapacity(double, String)} * with invalid percent greater than 100. */ @Test(expected=HadoopIllegalArgumentException.class) public void testComputeCapacityInvalidPercent() { LightWeightGSet.computeCapacity(1024, 101.0, "testMap"); }
/** * Test for {@link LightWeightGSet#computeCapacity(double, String)} * with invalid negative max memory */ @Test(expected=HadoopIllegalArgumentException.class) public void testComputeCapacityInvalidMemory() { LightWeightGSet.computeCapacity(-1, 50.0, "testMap"); }
static INodeMap newInstance(INodeDirectory rootDir) { // Compute the map capacity by allocating 1% of total memory int capacity = LightWeightGSet.computeCapacity(1, "INodeMap"); GSet<INode, INodeWithAdditionalFields> map = new LightWeightGSet<INode, INodeWithAdditionalFields>(capacity); map.put(rootDir); return new INodeMap(map); }