private void validateTripUpdate(GtfsReader.TripWithStopTimes tripUpdate) { com.conveyal.gtfs.model.Trip originalTrip = gtfsFeed.trips.get(tripUpdate.trip.trip_id); try { Iterable<StopTime> interpolatedStopTimesForTrip = gtfsFeed.getInterpolatedStopTimesForTrip(tripUpdate.trip.trip_id); long nStopTimes = StreamSupport.stream(interpolatedStopTimesForTrip.spliterator(), false).count(); logger.trace("Original stop times: {} Updated stop times: {}", nStopTimes, tripUpdate.stopTimes.size()); if (nStopTimes != tripUpdate.stopTimes.size()) { logger.error("Original stop times: {} Updated stop times: {}", nStopTimes, tripUpdate.stopTimes.size()); } } catch (GTFSFeed.FirstAndLastStopsDoNotHaveTimes firstAndLastStopsDoNotHaveTimes) { throw new RuntimeException(firstAndLastStopsDoNotHaveTimes); } }
@Override public void loadOneRow() throws IOException { Trip t = new Trip(); t.sourceFileLine = row + 1; // offset line number by 1 to account for 0-based row index t.route_id = getStringField("route_id", true); t.service_id = getStringField("service_id", true); t.trip_id = getStringField("trip_id", true); t.trip_headsign = getStringField("trip_headsign", false); t.trip_short_name = getStringField("trip_short_name", false); t.direction_id = getIntField("direction_id", false, 0, 1); t.block_id = getStringField("block_id", false); // make a blocks multimap t.shape_id = getStringField("shape_id", false); t.bikes_allowed = getIntField("bikes_allowed", false, 0, 2); t.wheelchair_accessible = getIntField("wheelchair_accessible", false, 0, 2); t.feed = feed; t.feed_id = feed.feedId; feed.trips.put(t.trip_id, t); /* Check referential integrity without storing references. Trip cannot directly reference Services or Routes because they would be serialized into the MapDB. */ // TODO confirm existence of shape ID getRefField("service_id", true, feed.services); getRefField("route_id", true, feed.routes); }
Transfers(GTFSFeed feed) { this.transfersToStop = feed.transfers.values().stream().collect(Collectors.groupingBy(t -> t.to_stop_id)); this.transfersFromStop = feed.transfers.values().stream().collect(Collectors.groupingBy(t -> t.from_stop_id)); this.routesByStop = feed.stop_times.values().stream() .collect(Collectors.groupingBy(stopTime -> stopTime.stop_id, Collectors.mapping(stopTime -> feed.trips.get(stopTime.trip_id).route_id, Collectors.toSet()))); }
for (Fun.Tuple2 key : feed.stop_times.keySet()) { StopTime stopTime = feed.stop_times.get(key); String replacementStopId = stopIdReplacements.get(stopTime.stop_id); if (replacementStopId != null) { feed.stop_times.put(key, stopTime); System.out.println("Removing shapes table and removing shape IDs from trips..."); feed.shape_points.clear(); for (String tripId : feed.trips.keySet()) { Trip trip = feed.trips.get(tripId); trip.shape_id = null; feed.trips.put(tripId, trip); Iterator<StopTime> stIterator = feed.stop_times.values().iterator(); while (stIterator.hasNext()) { StopTime stopTime = stIterator.next(); stIterator = feed.stop_times.values().iterator(); while (stIterator.hasNext()) { StopTime stopTime = stIterator.next(); Iterator<Trip> tripIterator = feed.trips.values().iterator(); while (tripIterator.hasNext()) { Trip trip = tripIterator.next();
public void save(SpeedSample speedSample) { synchronized (this) { int week = SegmentStatistics.getWeekSinceEpoch(speedSample.getTime()); int hour = SegmentStatistics.getHourOfWeek(speedSample.getTime()); weekSet.add(week); Fun.Tuple2<Long, Integer> sampleId = new Fun.Tuple2<>(speedSample.getSegmentId(), week); SegmentStatistics segmentStatistics; segmentStatistics = statsMap.get(sampleId); if(segmentStatistics == null) segmentStatistics = new SegmentStatistics(); segmentStatistics.addSample(speedSample); statsMap.put(sampleId, segmentStatistics); } }
case ALL: final Object[] allKeyArray = new Object[]{key.getHashValue()}; valueArrays = table.prefixSubMap(allKeyArray).values(); break; case BETWEEN: final Object[] fromKeyArray = new Object[]{key.getHashValue(), ((TwoValueHolder)key.getRangeKey().getValue()).getValue1()}; final Object[] toKeyArray = new Object[]{key.getHashValue(), ((TwoValueHolder)key.getRangeKey().getValue()).getValue2()}; valueArrays = table.subMap(fromKeyArray, true, toKeyArray, true).values(); break; case EQUAL_TO: final Object[] equalToKeyArray = convertSimpleKeyToArray(key); valueArrays = Optional.ofNullable(table.get(equalToKeyArray)) .map(value -> Collections.singletonList(value)) .orElseGet(() -> Collections.emptyList()); case GREATER_THAN_OR_EQUAL_TO: final Object[] greaterThanKey = convertSimpleKeyToArray(key); valueArrays = table.tailMap(greaterThanKey, key.getRangeKey().getOperator() == Standard.GREATER_THAN_OR_EQUAL_TO).values(); break; case LESS_THAN_OR_EQUAL_TO: final Object[] lessThanKey = convertSimpleKeyToArray(key); valueArrays = table.headMap(lessThanKey, key.getRangeKey().getOperator() == Standard.LESS_THAN_OR_EQUAL_TO).values(); break;
@Test public void testOnHeapDB() { DB db = DBMaker.heapDB().make(); BTreeMap<Long, String> map = db.treeMap("btree").keySerializer(Serializer.LONG).valueSerializer(Serializer.STRING).create(); Assert.assertFalse(map.putIfAbsentBoolean(1L, "val_1")); Assert.assertTrue(map.putIfAbsentBoolean(1L, "val_2")); Assert.assertTrue(map.putIfAbsentBoolean(1L, "val_3")); Assert.assertFalse(map.putIfAbsentBoolean(2L, "val_4")); Assert.assertEquals("val_1", map.get(1L)); Assert.assertEquals("val_4", map.get(2L)); Assert.assertTrue(map.replace(2L, "val_4", "val_5")); Assert.assertEquals("val_5", map.get(2L)); map.close(); db.close(); } }
@Override public byte[] remove(Object key) { byte[] b = this.treeMap.remove(key); this.db.commit(); return b; }
@Override public void modify( final @NotNull Object[] key, final @Nullable byte[] oldValue, final @Nullable byte[] newValue, final boolean triggered ) { if (newValue == null) { final Object[] oldKey = keyFunction.apply(oldValue); index.remove(oldKey); } else { final Object[] newKey = keyFunction.apply(newValue); index.put(newKey, newValue); } } }
: columns); ConcurrentNavigableMap<HistoKey, Map<String, Object>> filtered = netMap.subMap( new HistoKey(HistoDbHorizon.DACF.toString(), query.getTimeFrom(), query.getForecastTime() >= 0 ? query.getForecastTime() : Integer.MIN_VALUE), .forEach(fk -> { HistoKey sk = new HistoKey(HistoDbHorizon.SN.toString(), fk.getDateTime(), 0); Map sm = netMap.get(sk); if (sm != null) { result.add(filterColumns(filtered.get(fk), query, columns));
long timestamp = event.getEvent().getTimestamp(); if (accept(timestamp)) { boolean absent = btreeMap.putIfAbsentBoolean(timestamp, new PartitionedEvent[] {event}); if (!absent) { size.incrementAndGet(); LOG.debug("Duplicated timestamp {}, will reduce performance as replacing", timestamp); PartitionedEvent[] oldValue = btreeMap.get(timestamp); PartitionedEvent[] newValue = oldValue == null ? new PartitionedEvent[1] : Arrays.copyOf(oldValue, oldValue.length + 1); newValue[newValue.length - 1] = event; PartitionedEvent[] removedValue = btreeMap.replace(timestamp, newValue); replaceOpCount++; if (replaceOpCount % 1000 == 0) {
public void save(List<SpatialDataItem> objs) { for(SpatialDataItem obj : objs) { if (map.containsKey(obj.id)) continue; map.put(obj.id, obj); for (Tuple3<Integer, Integer, Long> tuple : obj.getTiles(Z_INDEX)) { tileIndex.add(tuple); } } db.commit(); }
@Override public void close() throws IOException { this.db.commit(); this.treeMap.close(); this.db.close(); }
public static void main(String[] args) { //import org.mapdb.* DB db = DBMaker.memoryDB().make(); BTreeMap<byte[], Integer> map = db .treeMap("towns", Serializer.BYTE_ARRAY, Serializer.INTEGER) .createOrOpen(); map.put("New York".getBytes(), 1); map.put("New Jersey".getBytes(), 2); map.put("Boston".getBytes(), 3); //get all New* cities Map<byte[], Integer> newCities = map.prefixSubMap("New".getBytes()); } }
@Override public boolean createIndex( final MapStoreIndexName indexName, final MapStoreTableKey indexKey ) { // Create a new index table using the map store final MapDBIndexMetadata indexMetadata = indexMetadata(indexName, indexKey); final boolean createdIndex = mapStore.createTable(indexMetadata.toString(), indexKey); if (createdIndex) { // Copy all table records to the new index final Function<byte[], Object[]> indexKeyFunction = indexKeyFunction(indexMetadata); final BTreeMap<?, byte[]> table = mapDB.treeMap(indexName.getTableName()) .valueSerializer(Serializer.BYTE_ARRAY) .open(); final BTreeMap<Object[], byte[]> index = mapDB.treeMap(indexMetadata.toString()) .keySerializer(createKeySerializer(indexKey.getHashFieldType(), indexKey.getRangeFieldType())) .valueSerializer(Serializer.BYTE_ARRAY) .open(); table.getValues().forEach((byte[] value) -> { final Object[] newKey = indexKeyFunction.apply(value); index.put(newKey, value); }); } return createdIndex; }
/** * For the given trip ID, fetch all the stop times in order of increasing stop_sequence. * This is an efficient iteration over a tree map. */ public Iterable<StopTime> getOrderedStopTimesForTrip (String trip_id) { Map<Fun.Tuple2, StopTime> tripStopTimes = stop_times.subMap( Fun.t2(trip_id, null), Fun.t2(trip_id, Fun.HI) ); return tripStopTimes.values(); }
@Override public void clear() { this.treeMap.clear(); this.db.commit(); }