map = storage.createTreeMap(name) .valuesOutsideNodesEnable() .counterEnable()
.make(); final BTreeMap<Fun.Tuple3<String, String, ResultEnvelope.Which>, Integer> output = outputDb.createTreeMap("results") .valueSerializer(Serializer.JAVA) .makeOrGet(); outputDb.createTreeMap("pointset") .<String, PointSet>make().put("pointset", pset);
protected NavigableMap makeMap(KeyFunction keyfunc) { if (db == null) init(); String name = keyfunc.getClass().getName(); if (!db.exists(name)) return db.createTreeMap(name) .valueSerializer(new BlockSerializer()) .make(); else return db.getTreeMap(name); }
protected NavigableMap<BindingSet, Integer> makeOrderedMap() { if (db == null) { // no disk-syncing - we use a simple in-memory TreeMap instead. return new TreeMap<BindingSet, Integer>(comparator); } else { return db.createTreeMap("iteration").comparator(comparator).makeOrGet(); } }
@Override public void addUser(String username, String password, String givenName, String familyName, Collection<String> roles) { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); synchronized (db) { Map<String, Object> m = db.createTreeMap(username).makeOrGet(); m.put("user", username); m.put("password", DigestUtils.sha256Hex(password)); m.put("givenName", givenName); m.put("familyName", familyName); m.put("roles", new ArrayList<>(roles)); db.commit(); } } finally { Thread.currentThread().setContextClassLoader(old); } }
public MapDbStorage(DB db, String name, @Nullable ClassLoader classLoader) { this.name = name; this.db = db; this.classLoader = classLoader; this.map = db.createTreeMap(name).makeOrGet(); this.mapper = new GsonBuilder().registerTypeAdapterFactory(new PropertiesTypeAdapterFactory()).create(); }
public void setFile(File file) { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); db = DBMaker.newFileDB(file) .closeOnJvmShutdown() .make(); synchronized (db) { Map<String, Object> m = db.createTreeMap("admin").makeOrGet(); if (!m.containsKey("password")) { m.put("user", DEFAULT_USER); m.put("password", DigestUtils.sha256Hex("password")); m.put("givenName", "Administrator"); m.put("familyName", "User"); m.put("roles", Collections.singletonList("administrator")); db.commit(); } } } finally { Thread.currentThread().setContextClassLoader(old); } }
public StreetDataStore(File directory, String dataFile, Serializer serializer, Integer cacheSize) { super(directory, dataFile, serializer, cacheSize); BTreeMapMaker idMapMaker = db.createTreeMap(dataFile + "_segmentIndex") .valueSerializer(Serializer.LONG) .keySerializer(BTreeKeySerializer.TUPLE3); segmentIndex = idMapMaker.makeOrGet(); BTreeMapMaker segmentTypeMapMaker = db.createTreeMap(dataFile + "_segmentTypeIndex") .valueSerializer(Serializer.INTEGER) .keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_LONG); segmentTypeMap = segmentTypeMapMaker.makeOrGet(); }
protected NavigableMap makeMap(KeyFunction keyfunc) { if (db == null) init(); String name = keyfunc.getClass().getName(); if (!db.exists(name)) return db.createTreeMap(name) .valueSerializer(new BlockSerializer()) .make(); else return db.getTreeMap(name); }
public static void exportDistinctInteractionsByStudy(ExportUtil.Appender writer, GraphDatabaseService graphDatabase, RowWriter rowWriter) throws IOException { DB db = DBMaker .newMemoryDirectDB() .compressionEnable() .transactionDisable() .make(); final Map<Fun.Tuple3<Long, String, String>, List<String>> studyOccAggregate = db.createTreeMap("studyOccAggregate").make(); NodeUtil.findStudies(graphDatabase, new StudyNodeListener() { @Override public void onStudy(StudyNode aStudy) { collectDistinctInteractions(aStudy, studyOccAggregate); } }); for (Map.Entry<Fun.Tuple3<Long, String, String>, List<String>> distinctInteractions : studyOccAggregate.entrySet()) { rowWriter.writeRow( writer, new StudyNode(graphDatabase.getNodeById(distinctInteractions.getKey().a)), distinctInteractions.getKey().b, distinctInteractions.getKey().c, distinctInteractions.getValue() ); } db.close(); }
@Override public SortingBuffer<K, V> make() { BTreeMapMaker maker = db(useHeap).createTreeMap(name).keySerializer(keySerializer).valueSerializer(valueSerializer); if (keepsize) maker = maker.counterEnable(); NavigableMap<K, V> buffer = maker.make(); return new CloseableSortingBuffer<K, V>(name, useHeap, buffer); } }
@Override public SortingBuffer<K, V> make() { BTreeMapMaker maker = db(useHeap).createTreeMap(name).keySerializer(keySerializer).valueSerializer(valueSerializer); if (keepsize) maker = maker.counterEnable(); NavigableMap<K, V> buffer = maker.make(); return new CloseableSortingBuffer<K, V>(name, useHeap, buffer); } }
/** * Constructs Sorter with optional limit and optional distinct filtering * @param limit long limit, where Long.MAXLONG means no limit * @param distinct optional boolean switch to do not preserve multiple equal elements */ public Sorter(long limit, boolean distinct) { this.db = DBMaker.newTempFileDB().deleteFilesAfterClose().closeOnJvmShutdown().transactionDisable().make(); this.map = db.createTreeMap(MAP_NAME).make(); this.limit = limit; this.distinct = distinct; }
public MapDbByteStore(File dir, String dbname, boolean readonly) { this.dir = dir; LessFiles.initDirectory(dir); this.dbFile = new File(dir, "mapdb"); DBMaker dbMaker = DBMaker.newFileDB(dbFile) .transactionDisable() .cacheDisable() .mmapFileEnableIfSupported(); if (readonly) { dbMaker = dbMaker.readOnly(); } this.db = dbMaker.make(); this.btree = db.createTreeMap(dbname) .valuesOutsideNodesEnable() .comparator(UnsignedBytes.lexicographicalComparator()) .makeOrGet(); log.info("MapDB ByteStore started up for {}", dir); }
/** * Create a new DataStore. * @param directory Where should it be created? * @param dataFile What should it be called? */ public SpatialDataStore(File directory, String dataFile, Serializer serializer, Integer cacheSize) { this.dataFile = dataFile; if(!directory.exists()) directory.mkdirs(); spatialId = new IdStore(directory, dataFile); DBMaker dbm = DBMaker.newFileDB(new File(directory, dataFile + ".db")) .mmapFileEnableIfSupported() .cacheLRUEnable() .cacheSize(cacheSize) .closeOnJvmShutdown(); db = dbm.make(); BTreeMapMaker maker = db.createTreeMap(dataFile) .valueSerializer(serializer) .keySerializer(BTreeKeySerializer.ZERO_OR_POSITIVE_LONG); map = maker.makeOrGet(); tileIndex = db.createTreeSet(dataFile + "_tileIndex") .serializer(BTreeKeySerializer.TUPLE3).makeOrGet(); }
public void indexInteractions() { DB db = DBMaker .newMemoryDirectDB() .compressionEnable() .transactionDisable() .make(); final Map<Fun.Tuple3<Long, String, Long>, Long> taxonInteractions = db .createTreeMap("ottIdMap") .make(); collectTaxonInteractions(taxonInteractions); createTaxonInteractions(taxonInteractions); db.close(); }
@Override public SortingBuffer<K, V> make() { Comparator<UniqueKey<K>> comparator = this.keyComparator != null ? new UniqueKeyComparator<K>(keyComparator) : new ComparableUniqueKeyComparator<K>(); BTreeKeySerializer<UniqueKey<K>> uniqueKeySerializer = new UniqueKeyBTreeSerializer<K>(keySerializer, comparator); BTreeMapMaker maker = db(useHeap).createTreeMap(name).keySerializer(uniqueKeySerializer) .valueSerializer(valueSerializer); if (keepsize) maker = maker.counterEnable(); NavigableMap<UniqueKey<K>, V> buffer = maker.make(); return new CloseableSortingBufferWithDuplicates<K, V>(name, useHeap, buffer); } }
@Override public SortingBuffer<K, V> make() { Comparator<UniqueKey<K>> comparator = this.keyComparator != null ? new UniqueKeyComparator<K>(keyComparator) : new ComparableUniqueKeyComparator<K>(); BTreeKeySerializer<UniqueKey<K>> uniqueKeySerializer = new UniqueKeyBTreeSerializer<K>(keySerializer, comparator); BTreeMapMaker maker = db(useHeap).createTreeMap(name).keySerializer(uniqueKeySerializer) .valueSerializer(valueSerializer); if (keepsize) maker = maker.counterEnable(); NavigableMap<UniqueKey<K>, V> buffer = maker.make(); return new CloseableSortingBufferWithDuplicates<K, V>(name, useHeap, buffer); } }
/** * Create a new DataStore. * @param directory Where should it be created? */ public JumperDataStore(File directory) { if(!directory.exists()) directory.mkdirs(); DBMaker dbm = DBMaker.newFileDB(new File(directory, "jumpers.db")) .mmapFileEnableIfSupported() .cacheLRUEnable() .cacheSize(100000) .asyncWriteEnable() .asyncWriteFlushDelay(1000) .closeOnJvmShutdown(); db = dbm.make(); jumperMap = db.createTreeMap("jumperMap") .valueSerializer(new JumperSerializer()) .makeOrGet(); jumperStartIndex = db.createTreeSet("startIndex") .serializer(BTreeKeySerializer.TUPLE2) .makeOrGet(); jumperEndIndex = db.createTreeSet("endIndex") .serializer(BTreeKeySerializer.TUPLE2) .makeOrGet(); }
private void initTaxonCache() throws PropertyEnricherException { DB db = initDb("taxonCache"); String taxonCacheName = "taxonCacheById"; if (db.exists(taxonCacheName)) { resolvedIdToTaxonMap = db.getTreeMap(taxonCacheName); } else { LOG.info("local taxon cache of [" + taxonCache.getResource() + "] building..."); StopWatch watch = new StopWatch(); watch.start(); try { resolvedIdToTaxonMap = db .createTreeMap(taxonCacheName) .pumpPresort(100000) .pumpIgnoreDuplicates() .pumpSource(taxonCacheIterator(taxonCache)) .keySerializer(BTreeKeySerializer.STRING) .make(); } catch (IOException e) { throw new PropertyEnricherException("failed to instantiate taxonCache: [" + e.getMessage() + "]", e); } watch.stop(); logCacheLoadStats(watch.getTime(), resolvedIdToTaxonMap.size()); LOG.info("local taxon cache of [" + taxonCache.getResource() + "] built."); watch.reset(); } }