protected NavigableMap makeMap(KeyFunction keyfunc) { if (db == null) init(); String name = keyfunc.getClass().getName(); if (!db.exists(name)) return db.createTreeMap(name) .valueSerializer(new BlockSerializer()) .make(); else return db.getTreeMap(name); }
.valueSerializer(Serializer.JAVA) .makeOrGet(); .<String, PointSet>make().put("pointset", pset);
.valuesOutsideNodesEnable() .counterEnable() .keySerializer(getBTreeKeySerializerFromClass(keyClass)) .valueSerializer(getSerializerFromClass(valueClass)) .makeOrGet();
.keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_LONG) .valueSerializer(new NodeSerializer()) .makeOrGet(); .keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_LONG) .valueSerializer(new WaySerializer()) .makeOrGet(); .keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_LONG) .makeOrGet(); this.relations = relations;
doiCitationMap = db .createTreeMap("doiCache") .pumpPresort(300000) .pumpIgnoreDuplicates() .pumpSource(new Iterator<Fun.Tuple2<String, DOI>>() { private String[] line = null; final AtomicBoolean nextLineParsed = new AtomicBoolean(false); .make(); watch.stop(); LOG.info("doi cache built in [" + watch.getTime() / 1000 + "] s.");
this.isNew = true; this.options = db.createHashMap(name + "/options").makeOrGet(); this.keysByValue = db.createTreeMap(name).counterEnable().comparator(valueSerializer.getComparator()) .keySerializer(valueSerializer).makeOrGet();
.keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_LONG) .counterEnable() .makeOrGet(); Atomic.String journalAtomic = this.journalDB.getAtomicString(JOURNAL_ID_FIELD);
this.isNew = true; this.options = db.createHashMap(name + "/options").makeOrGet(); this.keysByValue = db.createTreeMap(name).counterEnable().comparator(valueSerializer.getComparator()) .keySerializer(valueSerializer).makeOrGet();
.keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_LONG) .counterEnable() .makeOrGet(); Atomic.String journalAtomic = this.journalDB.getAtomicString(JOURNAL_ID_FIELD);
.valueSerializer(Serializer.JAVA) .makeOrGet();
private void initTaxonCache() throws PropertyEnricherException { DB db = initDb("taxonCache"); String taxonCacheName = "taxonCacheById"; if (db.exists(taxonCacheName)) { resolvedIdToTaxonMap = db.getTreeMap(taxonCacheName); } else { LOG.info("local taxon cache of [" + taxonCache.getResource() + "] building..."); StopWatch watch = new StopWatch(); watch.start(); try { resolvedIdToTaxonMap = db .createTreeMap(taxonCacheName) .pumpPresort(100000) .pumpIgnoreDuplicates() .pumpSource(taxonCacheIterator(taxonCache)) .keySerializer(BTreeKeySerializer.STRING) .make(); } catch (IOException e) { throw new PropertyEnricherException("failed to instantiate taxonCache: [" + e.getMessage() + "]", e); } watch.stop(); logCacheLoadStats(watch.getTime(), resolvedIdToTaxonMap.size()); LOG.info("local taxon cache of [" + taxonCache.getResource() + "] built."); watch.reset(); } }
public static void exportDistinctInteractionsByStudy(ExportUtil.Appender writer, GraphDatabaseService graphDatabase, RowWriter rowWriter) throws IOException { DB db = DBMaker .newMemoryDirectDB() .compressionEnable() .transactionDisable() .make(); final Map<Fun.Tuple3<Long, String, String>, List<String>> studyOccAggregate = db.createTreeMap("studyOccAggregate").make(); NodeUtil.findStudies(graphDatabase, new StudyNodeListener() { @Override public void onStudy(StudyNode aStudy) { collectDistinctInteractions(aStudy, studyOccAggregate); } }); for (Map.Entry<Fun.Tuple3<Long, String, String>, List<String>> distinctInteractions : studyOccAggregate.entrySet()) { rowWriter.writeRow( writer, new StudyNode(graphDatabase.getNodeById(distinctInteractions.getKey().a)), distinctInteractions.getKey().b, distinctInteractions.getKey().c, distinctInteractions.getValue() ); } db.close(); }
public void setFile(File file) { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); db = DBMaker.newFileDB(file) .closeOnJvmShutdown() .make(); synchronized (db) { Map<String, Object> m = db.createTreeMap("admin").makeOrGet(); if (!m.containsKey("password")) { m.put("user", DEFAULT_USER); m.put("password", DigestUtils.sha256Hex("password")); m.put("givenName", "Administrator"); m.put("familyName", "User"); m.put("roles", Collections.singletonList("administrator")); db.commit(); } } } finally { Thread.currentThread().setContextClassLoader(old); } }
/** * Create a new DataStore. * @param directory Where should it be created? */ public JumperDataStore(File directory) { if(!directory.exists()) directory.mkdirs(); DBMaker dbm = DBMaker.newFileDB(new File(directory, "jumpers.db")) .mmapFileEnableIfSupported() .cacheLRUEnable() .cacheSize(100000) .asyncWriteEnable() .asyncWriteFlushDelay(1000) .closeOnJvmShutdown(); db = dbm.make(); jumperMap = db.createTreeMap("jumperMap") .valueSerializer(new JumperSerializer()) .makeOrGet(); jumperStartIndex = db.createTreeSet("startIndex") .serializer(BTreeKeySerializer.TUPLE2) .makeOrGet(); jumperEndIndex = db.createTreeSet("endIndex") .serializer(BTreeKeySerializer.TUPLE2) .makeOrGet(); }
/** * Create a new DataStore. * @param directory Where should it be created? * @param dataFile What should it be called? */ public SpatialDataStore(File directory, String dataFile, Serializer serializer, Integer cacheSize) { this.dataFile = dataFile; if(!directory.exists()) directory.mkdirs(); spatialId = new IdStore(directory, dataFile); DBMaker dbm = DBMaker.newFileDB(new File(directory, dataFile + ".db")) .mmapFileEnableIfSupported() .cacheLRUEnable() .cacheSize(cacheSize) .closeOnJvmShutdown(); db = dbm.make(); BTreeMapMaker maker = db.createTreeMap(dataFile) .valueSerializer(serializer) .keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_LONG); map = maker.makeOrGet(); tileIndex = db.createTreeSet(dataFile + "_tileIndex") .serializer(BTreeKeySerializer.TUPLE3).makeOrGet(); }
@Override public void addUser(String username, String password, String givenName, String familyName, Collection<String> roles) { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); synchronized (db) { Map<String, Object> m = db.createTreeMap(username).makeOrGet(); m.put("user", username); m.put("password", DigestUtils.sha256Hex(password)); m.put("givenName", givenName); m.put("familyName", familyName); m.put("roles", new ArrayList<>(roles)); db.commit(); } } finally { Thread.currentThread().setContextClassLoader(old); } }
public MapDbByteStore(File dir, String dbname, boolean readonly) { this.dir = dir; LessFiles.initDirectory(dir); this.dbFile = new File(dir, "mapdb"); DBMaker dbMaker = DBMaker.newFileDB(dbFile) .transactionDisable() .cacheDisable() .mmapFileEnableIfSupported(); if (readonly) { dbMaker = dbMaker.readOnly(); } this.db = dbMaker.make(); this.btree = db.createTreeMap(dbname) .valuesOutsideNodesEnable() .comparator(UnsignedBytes.lexicographicalComparator()) .makeOrGet(); log.info("MapDB ByteStore started up for {}", dir); }
public void indexInteractions() { DB db = DBMaker .newMemoryDirectDB() .compressionEnable() .transactionDisable() .make(); final Map<Fun.Tuple3<Long, String, Long>, Long> taxonInteractions = db .createTreeMap("ottIdMap") .make(); collectTaxonInteractions(taxonInteractions); createTaxonInteractions(taxonInteractions); db.close(); }
public static <K extends Comparable<K>, V> BTreeMap<K, V> batchCreate(final Entries<K, V> entries, BTreeMapMaker maker) { return maker.pumpSource(entries.batchInsertIterator()).make(); };
public MapDbStorage(DB db, String name, @Nullable ClassLoader classLoader) { this.name = name; this.db = db; this.classLoader = classLoader; this.map = db.createTreeMap(name).makeOrGet(); this.mapper = new GsonBuilder().registerTypeAdapterFactory(new PropertiesTypeAdapterFactory()).create(); }