@Override public boolean contains(String name) { return db.exists(name); }
/** * Returns the StorageType using the name of the map. It assumes that names * are unique across all StorageType. If not found null is returned. * * @param name * @return */ private StorageType getStorageTypeFromName(String name) { for(Map.Entry<StorageType, DB> entry : storageRegistry.entrySet()) { DB storage = entry.getValue(); if(isOpenStorage(storage) && storage.exists(name)) { return entry.getKey(); } } return null; //either the Map has not created yet OR it is in memory }
/** {@inheritDoc} */ @Override public boolean existsObject(String name) { assertConnectionOpen(); DB storage = openStorage(StorageType.PRIMARY_STORAGE); return storage.exists(name); }
protected NavigableMap makeMap(KeyFunction keyfunc) { if (db == null) init(); String name = keyfunc.getClass().getName(); if (!db.exists(name)) return db.createTreeMap(name) .valueSerializer(new BlockSerializer()) .make(); else return db.getTreeMap(name); }
private void init() { DBMaker maker; if (file == null) maker = DBMaker.newMemoryDB(); else { if (overwrite) wipe(file); maker = DBMaker.newFileDB(new File(file)); maker = maker.cacheSize(cache_size); if (async) { maker = maker.asyncWriteEnable(); maker = maker.asyncWriteFlushDelay(10000); } if (mmap) maker = maker.mmapFileEnableIfSupported(); if (compression) maker = maker.compressionEnable(); if (snapshot) maker = maker.snapshotEnable(); if (notxn) maker = maker.transactionDisable(); } db = maker.make(); if (!db.exists("idmap")) idmap = db.createHashMap("idmap") .valueSerializer(new RecordSerializer()) .make(); else idmap = db.getHashMap("idmap"); }
public boolean exists(final String name) { this.assertNotClosed(); return this.tx.exists(name); }
@Override public synchronized void clearAllData() { for (Map.Entry<String, Set<String>> entry : nodeKeySetsByValue.entrySet()) { entry.getValue().clear(); String collectionName = collectionName(entry.getKey()); if (db.exists(collectionName)) { db.delete(collectionName); } } nodeKeySetsByValue.clear(); totalCount.set(0); }
@Override public synchronized void clearAllData() { for (Map.Entry<String, Set<String>> entry : nodeKeySetsByValue.entrySet()) { entry.getValue().clear(); String collectionName = collectionName(entry.getKey()); if (db.exists(collectionName)) { db.delete(collectionName); } } nodeKeySetsByValue.clear(); totalCount.set(0); }
@Override public synchronized void shutdown( boolean destroyed ) { if (destroyed) { // Remove the database since the index was destroyed ... for (String value : nodeKeySetsByValue.keySet()) { String collectionName = collectionName(value); if (db.exists(collectionName)) { db.delete(collectionName); } } nodeKeySetsByValue.clear(); totalCount.set(0); } }
@Override public synchronized void shutdown( boolean destroyed ) { if (destroyed) { // Remove the database since the index was destroyed ... for (String value : nodeKeySetsByValue.keySet()) { String collectionName = collectionName(value); if (db.exists(collectionName)) { db.delete(collectionName); } } nodeKeySetsByValue.clear(); totalCount.set(0); } }
protected NavigableMap makeMap(KeyFunction keyfunc) { if (db == null) init(); String name = keyfunc.getClass().getName(); if (!db.exists(name)) return db.createTreeMap(name) .valueSerializer(new BlockSerializer()) .make(); else return db.getTreeMap(name); }
private Set<String> createOrGetKeySet( String value ) { String collectionName = collectionName(value); if (logger.isDebugEnabled()) { if (db.exists(collectionName)) { logger.debug("Reopening enum storage '{0}' for '{1}' index in workspace '{2}'", collectionName, name, workspace); } else { logger.debug("Creating enum storage '{0}' for '{1}' index in workspace '{2}'", collectionName, name, workspace); } } // Try to create the set ... Set<String> keySet = db.createHashSet(collectionName).counterEnable().makeOrGet(); // make sure this is ATOMIC ! Set<String> previous = nodeKeySetsByValue.putIfAbsent(value, keySet); if (previous != null) keySet = previous; return keySet; }
private Set<String> createOrGetKeySet( String value ) { String collectionName = collectionName(value); if (logger.isDebugEnabled()) { if (db.exists(collectionName)) { logger.debug("Reopening enum storage '{0}' for '{1}' index in workspace '{2}'", collectionName, name, workspace); } else { logger.debug("Creating enum storage '{0}' for '{1}' index in workspace '{2}'", collectionName, name, workspace); } } // Try to create the set ... Set<String> keySet = db.createHashSet(collectionName).counterEnable().makeOrGet(); // make sure this is ATOMIC ! Set<String> previous = nodeKeySetsByValue.putIfAbsent(value, keySet); if (previous != null) keySet = previous; return keySet; }
private void initTaxonCache() throws PropertyEnricherException { DB db = initDb("taxonCache"); String taxonCacheName = "taxonCacheById"; if (db.exists(taxonCacheName)) { resolvedIdToTaxonMap = db.getTreeMap(taxonCacheName); } else { LOG.info("local taxon cache of [" + taxonCache.getResource() + "] building..."); StopWatch watch = new StopWatch(); watch.start(); try { resolvedIdToTaxonMap = db .createTreeMap(taxonCacheName) .pumpPresort(100000) .pumpIgnoreDuplicates() .pumpSource(taxonCacheIterator(taxonCache)) .keySerializer(BTreeKeySerializer.STRING) .make(); } catch (IOException e) { throw new PropertyEnricherException("failed to instantiate taxonCache: [" + e.getMessage() + "]", e); } watch.stop(); logCacheLoadStats(watch.getTime(), resolvedIdToTaxonMap.size()); LOG.info("local taxon cache of [" + taxonCache.getResource() + "] built."); watch.reset(); } }
if (!db.exists("idmap")) idmap = db.createHashMap("idmap") .valueSerializer(new RecordSerializer())
assert valueSerializer != null; this.converter = converter; if (db.exists(name)) { logger.debug("Reopening storage for '{0}' index in workspace '{1}'", name, workspaceName); this.options = db.getHashMap(name + "/options");
assert valueSerializer != null; this.converter = converter; if (db.exists(name)) { logger.debug("Reopening storage for '{0}' index in workspace '{1}'", name, workspaceName); this.options = db.getHashMap(name + "/options");