private GTFSFeed (DB db) { this.db = db; agency = db.getTreeMap("agency"); feedInfo = db.getTreeMap("feed_info"); routes = db.getTreeMap("routes"); trips = db.getTreeMap("trips"); stop_times = db.getTreeMap("stop_times"); frequencies = db.getTreeSet("frequencies"); transfers = db.getTreeMap("transfers"); stops = db.getTreeMap("stops"); fares = db.getTreeMap("fares"); services = db.getTreeMap("services"); shape_points = db.getTreeMap("shape_points"); feedId = db.getAtomicString("feed_id").get(); checksum = db.getAtomicLong("checksum").get(); errors = db.getTreeSet("errors"); }
private void init() { this.gtfsFeedIds = data.getHashSet("gtfsFeeds"); this.operatingDayPatterns = data.getHashMap("validities"); this.timeZones = data.getHashMap("timeZones"); Map<Integer, FeedIdWithTimezone> readableTimeZones = new HashMap<>(); for (Map.Entry<FeedIdWithTimezone, Integer> entry : this.timeZones.entrySet()) { readableTimeZones.put(entry.getValue(), entry.getKey()); } Bind.mapInverse(this.timeZones, readableTimeZones); this.readableTimeZones = Collections.unmodifiableMap(readableTimeZones); this.tripDescriptors = data.getTreeMap("tripDescriptors"); this.stopSequences = data.getTreeMap("stopSequences"); this.fares = data.getTreeMap("fares"); this.boardEdgesForTrip = data.getHashMap("boardEdgesForTrip"); this.leaveEdgesForTrip = data.getHashMap("leaveEdgesForTrip"); this.stationNodes = data.getHashMap("stationNodes"); this.routes = data.getHashMap("routes"); }
DB db = DBMaker.newDirectMemoryDB().make(); ConcurrentNavigableMap<Integer, String> map = db.getTreeMap("MyCache");
DB db = DBMaker.newFileDB(new File("/home/collection.db")).closeOnJvmShutdown().make(); ConcurrentNavigableMap<Integer,String> map = db.getTreeMap("MyCache");
.make(); comparison = comparisonDb.getTreeMap("results"); pset = comparisonDb.<String, PointSet>getTreeMap("pointset").get("pointset");
protected NavigableMap makeMap(KeyFunction keyfunc) { if (db == null) init(); String name = keyfunc.getClass().getName(); if (!db.exists(name)) return db.createTreeMap(name) .valueSerializer(new BlockSerializer()) .make(); else return db.getTreeMap(name); }
@Override public HobsonUser getUser(String username) { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); Map<String,Object> map = db.getTreeMap(username); return createUser(username, map); } finally { Thread.currentThread().setContextClassLoader(old); } }
public Map<String, Object> getMappingFor(String tableId, String keyId, String valueId) { String mapid = null; String key = tableId + "|" + keyId + "|" + valueId; if (cache.containsKey(key)) { return cache.get(key); } if (mappings.get(key) == null) { mapid = UUID.randomUUID().toString(); mappings.put(key, mapid); } else { mapid = mappings.get(key); } Map<String, Object> ret = db.getTreeMap(mapid); db.commit(); cache.put(key, ret); return ret; } }
@Override public HobsonUser authenticate(String username, String password) throws HobsonAuthenticationException { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); Map<String,Object> map = db.getTreeMap(username); if (map != null) { String p = (String)map.get("password"); if (p != null && p.equals(DigestUtils.sha256Hex(password))) { return createUser(username, map); } } throw new HobsonAuthenticationException("Invalid username and/or password."); } finally { Thread.currentThread().setContextClassLoader(old); } }
private GTFSFeed (DB db) { this.db = db; agency = db.getTreeMap("agency"); feedInfo = db.getTreeMap("feed_info"); routes = db.getTreeMap("routes"); trips = db.getTreeMap("trips"); stop_times = db.getTreeMap("stop_times"); frequencies = db.getTreeSet("frequencies"); transfers = db.getTreeMap("transfers"); stops = db.getTreeMap("stops"); fares = db.getTreeMap("fares"); services = db.getTreeMap("services"); shape_points = db.getTreeMap("shape_points"); .makeOrGet(); tripPatternMap = db.getTreeMap("patternForTrip"); stopCountByStopTime = db.getTreeMap("stopCountByStopTime"); stopStopTimeSet = db.getTreeSet("stopStopTimeSet"); tripsPerService = db.getTreeSet("tripsPerService");
public void initialize() { if (db == null) { db = DBMaker .newFileDB(new File(KLAB.CONFIG.getDataPath("authorization") + File.separator + "authcache")) .closeOnJvmShutdown() .make(); authorizedUsers = db.getTreeMap("authorizedUsers"); authorizedNodes = db.getTreeMap("authorizedNodes"); nodeAuthorization = db.getTreeMap("nodeAuthorization"); timestamps = db.getTreeMap("timestamps"); _this = this; // /* // * TODO cleanup on creation? Should schedule regular cycles every day or so. // */ // Trigger trigger = // TriggerBuilder.newTrigger().withIdentity("authorizationCleanup", "auth") // .withSchedule(SimpleScheduleBuilder.simpleSchedule().withIntervalInHours(24).repeatForever()) // .build(); // try { // this.scheduler = new StdSchedulerFactory().getScheduler(); // this.scheduler.start(); // this.scheduler.scheduleJob(JobBuilder.newJob(CleanupJob.class).withIdentity("authReaper", // "auth") // .build(), trigger); // } catch (SchedulerException e) { // KLAB.error("could not start authorization cleanup scheduler"); // } } }
public AbstractTableSet(File file) { if (db == null) { db = DBMaker .newFileDB(new File(KLAB.CONFIG.getDataPath("tablesets") + File.separator + "tabledata")) .closeOnJvmShutdown() .make(); timestamps = db.getTreeMap("timestamps"); mappings = db.getTreeMap("tableids"); Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { // NPE - hm. // db.close(); } }); } long timestamp = file.lastModified(); if (timestamps.get(file.toString()) == null) { timestamps.put(file.toString(), timestamp); isNew = true; } else { if (timestamp > timestamps.get(file.toString())) { isModified = true; timestamps.put(file.toString(), timestamp); } } }
protected NavigableMap makeMap(KeyFunction keyfunc) { if (db == null) init(); String name = keyfunc.getClass().getName(); if (!db.exists(name)) return db.createTreeMap(name) .valueSerializer(new BlockSerializer()) .make(); else return db.getTreeMap(name); }
@Override public void changeUserPassword(String username, PasswordChange change) { synchronized (db) { Map<String, Object> map = db.getTreeMap(username); if (map != null) { String currentPassword = (String) map.get("password"); if (DigestUtils.sha256Hex(change.getCurrentPassword()).equals(currentPassword)) { map.put("password", DigestUtils.sha256Hex(change.getNewPassword())); db.commit(); } else { throw new HobsonAuthorizationException("Unable to change user password"); } } else { throw new HobsonAuthorizationException("Unable to change user password"); } } }
private void initTaxonCache() throws PropertyEnricherException { DB db = initDb("taxonCache"); String taxonCacheName = "taxonCacheById"; if (db.exists(taxonCacheName)) { resolvedIdToTaxonMap = db.getTreeMap(taxonCacheName); } else { LOG.info("local taxon cache of [" + taxonCache.getResource() + "] building..."); StopWatch watch = new StopWatch(); watch.start(); try { resolvedIdToTaxonMap = db .createTreeMap(taxonCacheName) .pumpPresort(100000) .pumpIgnoreDuplicates() .pumpSource(taxonCacheIterator(taxonCache)) .keySerializer(BTreeKeySerializer.STRING) .make(); } catch (IOException e) { throw new PropertyEnricherException("failed to instantiate taxonCache: [" + e.getMessage() + "]", e); } watch.stop(); logCacheLoadStats(watch.getTime(), resolvedIdToTaxonMap.size()); LOG.info("local taxon cache of [" + taxonCache.getResource() + "] built."); watch.reset(); } }
logger.debug("Reopening storage for '{0}' index in workspace '{1}'", name, workspaceName); this.options = db.getHashMap(name + "/options"); this.keysByValue = db.getTreeMap(name); this.valuesByKey = db.getTreeSet(name + "/inverse"); this.isNew = false;
logger.debug("Reopening storage for '{0}' index in workspace '{1}'", name, workspaceName); this.options = db.getHashMap(name + "/options"); this.keysByValue = db.getTreeMap(name); this.valuesByKey = db.getTreeSet(name + "/inverse"); this.isNew = false;