public Shape (GTFSFeed feed, String shape_id) { Map<Fun.Tuple2<String, Integer>, ShapePoint> points = feed.shape_points.subMap(new Fun.Tuple2(shape_id, null), new Fun.Tuple2(shape_id, Fun.HI)); Coordinate[] coords = points.values().stream() .map(point -> new Coordinate(point.shape_pt_lon, point.shape_pt_lat)) .toArray(i -> new Coordinate[i]); geometry = geometryFactory.createLineString(coords); shape_dist_traveled = points.values().stream().mapToDouble(point -> point.shape_dist_traveled).toArray(); } }
public Collection<Frequency> getFrequencies (String trip_id) { // IntelliJ tells me all these casts are unnecessary, and that's also my feeling, but the code won't compile // without them return (List<Frequency>) frequencies.subSet(new Fun.Tuple2(trip_id, null), new Fun.Tuple2(trip_id, Fun.HI)).stream() .map(t2 -> ((Tuple2<String, Frequency>) t2).b) .collect(Collectors.toList()); }
public StopTime getStopTime(GTFSFeed staticFeed, GtfsRealtime.TripDescriptor tripDescriptor, Label.Transition t, Instant boardTime, int stopSequence) { StopTime stopTime = staticFeed.stop_times.get(new Fun.Tuple2<>(tripDescriptor.getTripId(), stopSequence)); if (stopTime == null) { return getTripUpdate(staticFeed, tripDescriptor, t, boardTime).get().stopTimes.get(stopSequence-1); } else { return stopTime; } }
public Shape (GTFSFeed feed, String shape_id) { Map<Fun.Tuple2<String, Integer>, ShapePoint> points = feed.shape_points.subMap(new Fun.Tuple2(shape_id, null), new Fun.Tuple2(shape_id, Fun.HI)); Coordinate[] coords = points.values().stream() .map(point -> new Coordinate(point.shape_pt_lon, point.shape_pt_lat)) .toArray(i -> new Coordinate[i]); geometry = Util.geometryFactory.createLineString(coords); shape_dist_traveled = points.values().stream().mapToDouble(point -> point.shape_dist_traveled).toArray(); } }
public Collection<Frequency> getFrequencies (String trip_id) { // IntelliJ tells me all these casts are unnecessary, and that's also my feeling, but the code won't compile // without them return (List<Frequency>) frequencies.subSet(new Fun.Tuple2(trip_id, null), new Fun.Tuple2(trip_id, Fun.HI)).stream() .map(t2 -> ((Tuple2<String, Frequency>) t2).b) .collect(Collectors.toList()); }
@Override public void loadOneRow() throws IOException { String shape_id = getStringField("shape_id", true); double shape_pt_lat = getDoubleField("shape_pt_lat", true, -90D, 90D); double shape_pt_lon = getDoubleField("shape_pt_lon", true, -180D, 180D); int shape_pt_sequence = getIntField("shape_pt_sequence", true, 0, Integer.MAX_VALUE); double shape_dist_traveled = getDoubleField("shape_dist_traveled", false, 0D, Double.MAX_VALUE); ShapePoint s = new ShapePoint(shape_id, shape_pt_lat, shape_pt_lon, shape_pt_sequence, shape_dist_traveled); s.id = row + 1; // offset line number by 1 to account for 0-based row index s.feed = null; // since we're putting this into MapDB, we don't want circular serialization feed.shape_points.put(new Tuple2<String, Integer>(s.shape_id, s.shape_pt_sequence), s); } }
public static Fun.Tuple2<Integer, Integer> getOsmId(double lat, double lon) { int x = SpatialDataStore.getTileX(lon, Z_INDEX); int y = SpatialDataStore.getTileY(lat, Z_INDEX); return new Fun.Tuple2<Integer, Integer>(x, y); }
/** * Build a linkage and store it, bypassing the PointSet's internal cache of linkages because we want this particular * linkage to be serialized with the network (the Guava cache does not serialize its contents) and never evicted. */ public void buildUnevictableLinkage(StreetLayer streetLayer, StreetMode mode) { Tuple2<StreetLayer, StreetMode> key = new Tuple2<>(streetLayer, mode); if (linkageMap.containsKey(key) || linkageCache.getIfPresent(key) != null) { LOG.error("Un-evictable linkage is being built more than once."); } LinkedPointSet newLinkage = new LinkedPointSet(this, streetLayer, mode, null); linkageMap.put(key, newLinkage); }
private SegmentStatistics loadSegmentStats(long id) { SegmentStatistics cumulativeStats = new SegmentStatistics(); NavigableMap<Fun.Tuple2<Long, Integer>, SegmentStatistics> subMap = statsMap.subMap(new Fun.Tuple2(id, null), true, new Fun.Tuple2(id, Fun.HI), true); for(Object stats : subMap.values()) { cumulativeStats.addStats((SegmentStatistics)stats); } return cumulativeStats; }
/** * Build a linkage and store it, bypassing the PointSet's internal cache of linkages because we want this particular * linkage to be serialized with the network (the Guava cache does not serialize its contents) and never evicted. */ public void buildUnevictableLinkage(StreetLayer streetLayer, StreetMode mode) { Tuple2<StreetLayer, StreetMode> key = new Tuple2<>(streetLayer, mode); if (linkageMap.containsKey(key) || linkageCache.getIfPresent(key) != null) { LOG.error("Un-evictable linkage is being built more than once."); } LinkedPointSet newLinkage = new LinkedPointSet(this, streetLayer, mode, null); linkageMap.put(key, newLinkage); }
/** Get list of stop_times for a given stop_id. */ public List<StopTime> getStopTimesForStop (String stop_id) { SortedSet<Tuple2<String, Tuple2>> index = this.stopStopTimeSet .subSet(new Tuple2<>(stop_id, null), new Tuple2(stop_id, Fun.HI)); return index.stream() .map(tuple -> this.stop_times.get(tuple.b)) .collect(Collectors.toList()); }
/** Get list of services for each date of service. */ public List<Service> getServicesForDate (LocalDate date) { String dateString = date.format(dateFormatter); SortedSet<Tuple2<String, String>> index = this.servicesPerDate .subSet(new Tuple2<>(dateString, null), new Tuple2(dateString, Fun.HI)); return index.stream() .map(tuple -> this.services.get(tuple.b)) .collect(Collectors.toList()); }
SimpleTransfer t = (SimpleTransfer) e; int time = (int) (t.getDistance() / request.walkSpeed); accessTimes.add(new Tuple2((TransitStop) e.getToVertex(), time));
return; final StopTime originalStopTime = feed.stop_times.get(new Fun.Tuple2(tripUpdate.getTrip().getTripId(), stopTime.stop_sequence)); int arrivalDelay = stopTime.arrival_time - originalStopTime.arrival_time; delaysForAlightEdges.put(leaveEdges[stopTime.stop_sequence], arrivalDelay * 1000);
int nextStopSequence = stopTimes.isEmpty() ? 1 : stopTimes.get(stopTimes.size()-1).stop_sequence+1; for (int i=nextStopSequence; i<stopTimeUpdate.getStopSequence(); i++) { StopTime previousOriginalStopTime = feed.stop_times.get(new Fun.Tuple2(tripUpdate.getTrip().getTripId(), i)); if (previousOriginalStopTime == null) { continue; // This can and does happen. Stop sequence numbers can be left out. final StopTime originalStopTime = feed.stop_times.get(new Fun.Tuple2(tripUpdate.getTrip().getTripId(), stopTimeUpdate.getStopSequence())); if (originalStopTime != null) { StopTime updatedStopTime = originalStopTime.clone();
int addDelayedBoardEdge(ZoneId zoneId, GtfsRealtime.TripDescriptor tripDescriptor, int stopSequence, int departureTime, int departureNode, BitSet validOnDay) { Trip trip = feed.trips.get(tripDescriptor.getTripId()); StopTime stopTime = feed.stop_times.get(new Fun.Tuple2(tripDescriptor.getTripId(), stopSequence)); Stop stop = feed.stops.get(stopTime.stop_id); Map<GtfsStorageI.PlatformDescriptor, NavigableMap<Integer, Integer>> departureTimelineNodesByRoute = departureTimelinesByStop.computeIfAbsent(stopTime.stop_id, s -> new HashMap<>());
@Override public void loadOneRow() throws IOException { StopTime st = new StopTime(); st.sourceFileLine = row + 1; // offset line number by 1 to account for 0-based row index st.trip_id = getStringField("trip_id", true); // TODO: arrival_time and departure time are not required, but if one is present the other should be // also, if this is the first or last stop, they are both required st.arrival_time = getTimeField("arrival_time", false); st.departure_time = getTimeField("departure_time", false); st.stop_id = getStringField("stop_id", true); st.stop_sequence = getIntField("stop_sequence", true, 0, Integer.MAX_VALUE); st.stop_headsign = getStringField("stop_headsign", false); st.pickup_type = getIntField("pickup_type", false, 0, 3); // TODO add ranges as parameters st.drop_off_type = getIntField("drop_off_type", false, 0, 3); st.shape_dist_traveled = getDoubleField("shape_dist_traveled", false, 0D, Double.MAX_VALUE); // FIXME using both 0 and NaN for "missing", define DOUBLE_MISSING st.timepoint = getIntField("timepoint", false, 0, 1, INT_MISSING); st.feed = null; // this could circular-serialize the whole feed feed.stop_times.put(new Fun.Tuple2(st.trip_id, st.stop_sequence), st); /* Check referential integrity without storing references. StopTime cannot directly reference Trips or Stops because they would be serialized into the MapDB. */ getRefField("trip_id", true, feed.trips); getRefField("stop_id", true, feed.stops); }
@Override public void loadOneRow() throws IOException { String shape_id = getStringField("shape_id", true); double shape_pt_lat = getDoubleField("shape_pt_lat", true, -90D, 90D); double shape_pt_lon = getDoubleField("shape_pt_lon", true, -180D, 180D); int shape_pt_sequence = getIntField("shape_pt_sequence", true, 0, Integer.MAX_VALUE); double shape_dist_traveled = getDoubleField("shape_dist_traveled", false, 0D, Double.MAX_VALUE); ShapePoint s = new ShapePoint(shape_id, shape_pt_lat, shape_pt_lon, shape_pt_sequence, shape_dist_traveled); s.sourceFileLine = row + 1; // offset line number by 1 to account for 0-based row index s.feed = null; // since we're putting this into MapDB, we don't want circular serialization feed.shape_points.put(new Tuple2<String, Integer>(s.shape_id, s.shape_pt_sequence), s); } }