@Override public void run() { Value value; if (shouldDefer()) { deferredUncachingExecutor.schedule( this, revocationPollingMs, TimeUnit.MILLISECONDS); return; } synchronized (FsDatasetCache.this) { value = mappableBlockMap.get(key); } Preconditions.checkNotNull(value); Preconditions.checkArgument(value.state == State.UNCACHING); IOUtils.closeQuietly(value.mappableBlock); synchronized (FsDatasetCache.this) { mappableBlockMap.remove(key); } long newUsedBytes = usedBytesCount.release(value.mappableBlock.getLength()); numBlocksCached.addAndGet(-1); dataset.datanode.getMetrics().incrBlocksUncached(1); if (revocationTimeMs != 0) { LOG.debug("Uncaching of {} completed. usedBytes = {}", key, newUsedBytes); } else { LOG.debug("Deferred uncaching of {} completed. usedBytes = {}", key, newUsedBytes); } } }
if (!success) { if (reservedBytes) { usedBytesCount.release(length);
@Override public void run() { Value value; if (shouldDefer()) { deferredUncachingExecutor.schedule( this, revocationPollingMs, TimeUnit.MILLISECONDS); return; } synchronized (FsDatasetCache.this) { value = mappableBlockMap.get(key); } Preconditions.checkNotNull(value); Preconditions.checkArgument(value.state == State.UNCACHING); IOUtils.closeQuietly(value.mappableBlock); synchronized (FsDatasetCache.this) { mappableBlockMap.remove(key); } long newUsedBytes = usedBytesCount.release(value.mappableBlock.getLength()); numBlocksCached.addAndGet(-1); dataset.datanode.getMetrics().incrBlocksUncached(1); if (revocationTimeMs != 0) { LOG.debug("Uncaching of {} completed. usedBytes = {}", key, newUsedBytes); } else { LOG.debug("Deferred uncaching of {} completed. usedBytes = {}", key, newUsedBytes); } } }
if (!success) { if (reservedBytes) { usedBytesCount.release(length);
/** * Release some bytes that we're using. * * @param count The number of bytes to release. We will round this * up to the page size. * * @return The new number of usedBytes. */ long release(long count) { return usedBytesCount.release(count); }