@Override public Void call() throws IOException { boolean evictOnClose = cacheConf != null? cacheConf.shouldEvictOnClose(): true; f.closeStoreFile(evictOnClose); return null; } });
@Override public void close() throws IOException { close(cacheConf.shouldEvictOnClose()); }
/** * Delete this file * @throws IOException */ public void deleteStoreFile() throws IOException { boolean evictOnClose = cacheConf != null ? cacheConf.shouldEvictOnClose() : true; closeStoreFile(evictOnClose); this.fs.delete(getPath(), true); }
/** * Initialize the reader used for pread. */ public void initReader() throws IOException { if (reader == null) { try { open(); } catch (Exception e) { try { boolean evictOnClose = cacheConf != null ? cacheConf.shouldEvictOnClose() : true; this.closeStoreFile(evictOnClose); } catch (IOException ee) { LOG.warn("failed to close reader", ee); } throw e; } } }
cacheConf != null? cacheConf.shouldEvictOnClose(): true; for (HStoreFile file : results) { try {
/** * Computes the length of a store file without succumbing to any errors along the way. If an * error is encountered, the implementation returns {@code 0} instead of the actual size. * * @param file The file to compute the size of. * @return The size in bytes of the provided {@code file}. */ long getStoreFileSize(HStoreFile file) { long length = 0; try { file.initReader(); length = file.getReader().length(); } catch (IOException e) { LOG.trace("Failed to open reader when trying to compute store file size, ignoring", e); } finally { try { file.closeStoreFile( file.getCacheConf() != null ? file.getCacheConf().shouldEvictOnClose() : true); } catch (IOException e) { LOG.trace("Failed to close reader after computing store file size, ignoring", e); } } return length; }
@Override protected List<HStoreFile> doCompaction(CompactionRequestImpl cr, Collection<HStoreFile> filesToCompact, User user, long compactionStartTime, List<Path> newFiles) throws IOException { // let compaction incomplete. if (!this.conf.getBoolean("hbase.hstore.compaction.complete", true)) { LOG.warn("hbase.hstore.compaction.complete is set to false"); List<HStoreFile> sfs = new ArrayList<>(newFiles.size()); final boolean evictOnClose = cacheConf != null? cacheConf.shouldEvictOnClose(): true; for (Path newFile : newFiles) { // Create storefile around what we wrote with a reader on it. HStoreFile sf = createStoreFileAndReader(newFile); sf.closeStoreFile(evictOnClose); sfs.add(sf); } return sfs; } return super.doCompaction(cr, filesToCompact, user, compactionStartTime, newFiles); } }
@Override public String toString() { return "cacheDataOnRead=" + shouldCacheDataOnRead() + ", cacheDataOnWrite=" + shouldCacheDataOnWrite() + ", cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite() + ", cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() + ", cacheEvictOnClose=" + shouldEvictOnClose() + ", cacheDataCompressed=" + shouldCacheDataCompressed() + ", prefetchOnOpen=" + shouldPrefetchOnOpen(); } }
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(cacheConfig.shouldEvictOnClose()), jamonWriter);
private List<Cell> prepareListOfTestSeeks(Path path) throws IOException { List<Cell> allKeyValues = new ArrayList<>(); // read all of the key values HStoreFile storeFile = new HStoreFile(testingUtility.getTestFileSystem(), path, configuration, cacheConf, BloomType.NONE, true); storeFile.initReader(); StoreFileReader reader = storeFile.getReader(); StoreFileScanner scanner = reader.getStoreFileScanner(true, false, false, 0, 0, false); Cell current; scanner.seek(KeyValue.LOWESTKEY); while (null != (current = scanner.next())) { allKeyValues.add(current); } storeFile.closeStoreFile(cacheConf.shouldEvictOnClose()); // pick seeks by random List<Cell> seeks = new ArrayList<>(); for (int i = 0; i < numberOfSeeks; ++i) { Cell keyValue = allKeyValues.get( randomizer.nextInt(allKeyValues.size())); seeks.add(keyValue); } clearBlockCache(); return seeks; }
f.closeStoreFile(f.getCacheConf() != null ? f.getCacheConf().shouldEvictOnClose() : true);
if (halfReader != null) { try { halfReader.close(cacheConf.shouldEvictOnClose()); } catch (IOException e) { LOG.warn("failed to close hfile reader for " + inFile, e);
(finishSeeksTime - startSeeksTime); storeFile.closeStoreFile(cacheConf.shouldEvictOnClose()); clearBlockCache();
reader.close(cacheConf.shouldEvictOnClose());
startMiss += 3; scanner.close(); reader.close(cacheConf.shouldEvictOnClose()); startHit += 3; scanner.close(); reader.close(cacheConf.shouldEvictOnClose()); startHit += 6; scannerOne.close(); readerOne.close(cacheConf.shouldEvictOnClose()); scannerTwo.close(); readerTwo.close(cacheConf.shouldEvictOnClose()); hsf.initReader(); reader = hsf.getReader(); reader.close(cacheConf.shouldEvictOnClose()); hsf.initReader(); reader = hsf.getReader(); reader.close(cacheConf.shouldEvictOnClose());
@Override public Void call() throws IOException { boolean evictOnClose = cacheConf != null? cacheConf.shouldEvictOnClose(): true; f.closeReader(evictOnClose); return null; } });
@Override public void close() throws IOException { close(cacheConf.shouldEvictOnClose()); }
@Override public void close() throws IOException { close(cacheConf.shouldEvictOnClose()); }
/** * Delete this file * @throws IOException */ public void deleteReader() throws IOException { boolean evictOnClose = cacheConf != null? cacheConf.shouldEvictOnClose(): true; closeReader(evictOnClose); this.fs.delete(getPath(), true); }
@Override public String toString() { if (!isBlockCacheEnabled()) { return "CacheConfig:disabled"; } return "CacheConfig:enabled " + "[cacheDataOnRead=" + shouldCacheDataOnRead() + "] " + "[cacheDataOnWrite=" + shouldCacheDataOnWrite() + "] " + "[cacheIndexesOnWrite=" + shouldCacheIndexesOnWrite() + "] " + "[cacheBloomsOnWrite=" + shouldCacheBloomsOnWrite() + "] " + "[cacheEvictOnClose=" + shouldEvictOnClose() + "] " + "[cacheCompressed=" + shouldCacheCompressed() + "]"; }