@Override public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat, boolean updateCacheMetrics) { // TODO: is there a hole here, or just awkwardness since in the lruCache getBlock // we end up calling l2Cache.getBlock. // We are not in a position to exactly look at LRU cache or BC as BlockType may not be getting // passed always. return onHeapCache.containsBlock(cacheKey)? onHeapCache.getBlock(cacheKey, caching, repeat, updateCacheMetrics): l2Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics); }
Cacheable existingBlock = blockCache.getBlock(cacheKey, false, false, false); try { int comparison = BlockCacheUtil.validateBlockAddition(existingBlock, newBlock, cacheKey);
@Override public void doAnAction() throws Exception { ByteArrayCacheable returned = (ByteArrayCacheable) toBeTested .getBlock(key, false, false, true); if (returned != null) { assertArrayEquals(buf, returned.buf); } else { Thread.sleep(10); } totalQueries.incrementAndGet(); } };
@Override public void doAnAction() throws Exception { if (!blocksToTest.isEmpty()) { HFileBlockPair ourBlock = blocksToTest.poll(); // if we run out of blocks to test, then we should stop the tests. if (ourBlock == null) { ctx.setStopFlag(true); return; } toBeTested.cacheBlock(ourBlock.blockName, ourBlock.block); Cacheable retrievedBlock = toBeTested.getBlock(ourBlock.blockName, false, false, true); if (retrievedBlock != null) { assertEquals(ourBlock.block, retrievedBlock); toBeTested.evictBlock(ourBlock.blockName); hits.incrementAndGet(); assertNull(toBeTested.getBlock(ourBlock.blockName, false, false, true)); } else { miss.incrementAndGet(); } totalQueries.incrementAndGet(); } } };
public static void getBlockAndAssertEquals(BlockCache cache, BlockCacheKey key, Cacheable blockToCache, ByteBuffer destBuffer, ByteBuffer expectedBuffer) { destBuffer.clear(); cache.cacheBlock(key, blockToCache); Cacheable actualBlock = cache.getBlock(key, false, false, false); actualBlock.serialize(destBuffer, true); assertEquals(expectedBuffer, destBuffer); cache.returnBlock(key, actualBlock); } }
@Override public void doAnAction() throws Exception { for (int j = 0; j < 100; j++) { BlockCacheKey key = new BlockCacheKey("key_" + finalI + "_" + j, 0); Arrays.fill(buf, (byte) (finalI * j)); final ByteArrayCacheable bac = new ByteArrayCacheable(buf); ByteArrayCacheable gotBack = (ByteArrayCacheable) toBeTested .getBlock(key, true, false, true); if (gotBack != null) { assertArrayEquals(gotBack.buf, bac.buf); } else { toBeTested.cacheBlock(key, bac); } } totalQueries.incrementAndGet(); } };
assertNull(toBeTested.getBlock(block.blockName, true, false, true)); HFileBlock buf = (HFileBlock) toBeTested.getBlock(block.blockName, true, false, true); if (buf != null) { assertEquals(block.block, buf); if (toBeTested.getBlock(block.blockName, true, false, true) != null) { toBeTested.cacheBlock(block.blockName, block.block); if (!(toBeTested instanceof BucketCache)) {
Cacheable result = victimHandler.getBlock(cacheKey, caching, repeat, updateCacheMetrics);
private void addDataAndHits(final BlockCache bc, final int count) { Cacheable dce = new DataCacheEntry(); Cacheable ice = new IndexCacheEntry(); for (int i = 0; i < count; i++) { BlockCacheKey bckd = new BlockCacheKey("f", i); BlockCacheKey bcki = new BlockCacheKey("f", i + count); bc.getBlock(bckd, true, false, true); bc.cacheBlock(bckd, dce); bc.cacheBlock(bcki, ice); bc.getBlock(bckd, true, false, true); bc.getBlock(bcki, true, false, true); } assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount()); BlockCacheKey bckd = new BlockCacheKey("f", 0); BlockCacheKey bcki = new BlockCacheKey("f", 0 + count); bc.evictBlock(bckd); bc.evictBlock(bcki); bc.getStats().getEvictedCount(); }
if (cache != null) { HFileBlock cachedBlock = (HFileBlock) cache.getBlock(cacheKey, cacheBlock, useLock, updateCacheMetrics); if (cachedBlock != null) { if (cacheConf.shouldCacheCompressed(cachedBlock.getBlockType().getCategory())) {
private void readStoreFile(Path storeFilePath) throws Exception { // Open the file HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf); while (!reader.prefetchComplete()) { // Sleep for a bit Thread.sleep(1000); } // Check that all of the data blocks were preloaded BlockCache blockCache = cacheConf.getBlockCache().get(); long offset = 0; while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) { HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null); BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset); boolean isCached = blockCache.getBlock(blockCacheKey, true, false, true) != null; if (block.getBlockType() == BlockType.DATA || block.getBlockType() == BlockType.ROOT_INDEX || block.getBlockType() == BlockType.INTERMEDIATE_INDEX) { assertTrue(isCached); } offset += block.getOnDiskSizeWithHeader(); } }
BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset); boolean isCached = cache.getBlock(blockCacheKey, true, false, true) != null; boolean shouldBeCached = cowType.shouldBeCached(block.getBlockType()); if (shouldBeCached != isCached) {
BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset); HFileBlock fromCache = (HFileBlock) blockCache.getBlock(blockCacheKey, true, false, true); boolean isCached = fromCache != null; cachedBlocksOffset.add(offset);
@Override public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat, boolean updateCacheMetrics) { // TODO: is there a hole here, or just awkwardness since in the lruCache getBlock // we end up calling l2Cache.getBlock. if (lruCache.containsBlock(cacheKey)) { return lruCache.getBlock(cacheKey, caching, repeat, updateCacheMetrics); } Cacheable result = l2Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics); return result; }
@Override public void doAnAction() throws Exception { ByteArrayCacheable returned = (ByteArrayCacheable) toBeTested .getBlock(key, false, false, true); if (returned != null) { assertArrayEquals(buf, returned.buf); } else { Thread.sleep(10); } totalQueries.incrementAndGet(); } };
@Override public void doAnAction() throws Exception { if (!blocksToTest.isEmpty()) { HFileBlockPair ourBlock = blocksToTest.poll(); // if we run out of blocks to test, then we should stop the tests. if (ourBlock == null) { ctx.setStopFlag(true); return; } toBeTested.cacheBlock(ourBlock.blockName, ourBlock.block); Cacheable retrievedBlock = toBeTested.getBlock(ourBlock.blockName, false, false, true); if (retrievedBlock != null) { assertEquals(ourBlock.block, retrievedBlock); toBeTested.evictBlock(ourBlock.blockName); hits.incrementAndGet(); assertNull(toBeTested.getBlock(ourBlock.blockName, false, false, true)); } else { miss.incrementAndGet(); } totalQueries.incrementAndGet(); } } };
public static void getBlockAndAssertEquals(BlockCache cache, BlockCacheKey key, Cacheable blockToCache, ByteBuffer destBuffer, ByteBuffer expectedBuffer) { destBuffer.clear(); cache.cacheBlock(key, blockToCache); Cacheable actualBlock = cache.getBlock(key, false, false, false); actualBlock.serialize(destBuffer, true); assertEquals(expectedBuffer, destBuffer); cache.returnBlock(key, actualBlock); } }
@Override public void doAnAction() throws Exception { for (int j = 0; j < 100; j++) { BlockCacheKey key = new BlockCacheKey("key_" + finalI + "_" + j, 0); Arrays.fill(buf, (byte) (finalI * j)); final ByteArrayCacheable bac = new ByteArrayCacheable(buf); ByteArrayCacheable gotBack = (ByteArrayCacheable) toBeTested .getBlock(key, true, false, true); if (gotBack != null) { assertArrayEquals(gotBack.buf, bac.buf); } else { toBeTested.cacheBlock(key, bac); } } totalQueries.incrementAndGet(); } };
private void addDataAndHits(final BlockCache bc, final int count) { Cacheable dce = new DataCacheEntry(); Cacheable ice = new IndexCacheEntry(); for (int i = 0; i < count; i++) { BlockCacheKey bckd = new BlockCacheKey("f", i); BlockCacheKey bcki = new BlockCacheKey("f", i + count); bc.getBlock(bckd, true, false, true); bc.cacheBlock(bckd, dce); bc.cacheBlock(bcki, ice); bc.getBlock(bckd, true, false, true); bc.getBlock(bcki, true, false, true); } assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount()); BlockCacheKey bckd = new BlockCacheKey("f", 0); BlockCacheKey bcki = new BlockCacheKey("f", 0 + count); bc.evictBlock(bckd); bc.evictBlock(bcki); bc.getStats().getEvictedCount(); }
private void readStoreFile(Path storeFilePath) throws Exception { // Open the file HFile.Reader reader = HFile.createReader(fs, storeFilePath, cacheConf, true, conf); while (!reader.prefetchComplete()) { // Sleep for a bit Thread.sleep(1000); } // Check that all of the data blocks were preloaded BlockCache blockCache = cacheConf.getBlockCache(); long offset = 0; while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) { HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, null); BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset); boolean isCached = blockCache.getBlock(blockCacheKey, true, false, true) != null; if (block.getBlockType() == BlockType.DATA || block.getBlockType() == BlockType.ROOT_INDEX || block.getBlockType() == BlockType.INTERMEDIATE_INDEX) { assertTrue(isCached); } offset += block.getOnDiskSizeWithHeader(); } }