r5py 1.0.0.dev11__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of r5py might be problematic. Click here for more details.

r5py/__init__.py CHANGED
@@ -2,12 +2,13 @@
2
2
 
3
3
  """Python wrapper for the R5 routing analysis engine."""
4
4
 
5
- __version__ = "1.0.0.dev11"
5
+ __version__ = "1.0.1"
6
6
 
7
7
 
8
8
  from .r5 import (
9
9
  DetailedItineraries,
10
10
  DetailedItinerariesComputer,
11
+ Isochrones,
11
12
  RegionalTask,
12
13
  TransportMode,
13
14
  TransportNetwork,
@@ -18,6 +19,7 @@ from .r5 import (
18
19
  __all__ = [
19
20
  "DetailedItineraries",
20
21
  "DetailedItinerariesComputer",
22
+ "Isochrones",
21
23
  "RegionalTask",
22
24
  "TransportMode",
23
25
  "TransportNetwork",
r5py/r5/__init__.py CHANGED
@@ -7,6 +7,7 @@ from .breakdown_stat import BreakdownStat
7
7
  from .detailed_itineraries import DetailedItineraries, DetailedItinerariesComputer
8
8
  from .direct_leg import DirectLeg
9
9
  from .egress_leg import EgressLeg
10
+ from .isochrones import Isochrones
10
11
  from .regional_task import RegionalTask
11
12
  from .scenario import Scenario
12
13
  from .street_layer import StreetLayer
@@ -25,6 +26,7 @@ __all__ = [
25
26
  "DetailedItinerariesComputer",
26
27
  "DirectLeg",
27
28
  "EgressLeg",
29
+ "Isochrones",
28
30
  "RegionalTask",
29
31
  "Scenario",
30
32
  "SpeedConfig",
@@ -84,7 +84,7 @@ class DetailedItineraries(BaseTravelTimeMatrix):
84
84
  ``access_modes``, ``egress_modes``, ``max_time``, ``max_time_walking``,
85
85
  ``max_time_cycling``, ``max_time_driving``, ``speed_cycling``, ``speed_walking``,
86
86
  ``max_public_transport_rides``, ``max_bicycle_traffic_stress``
87
- Not that not all arguments might make sense in this context, and the
87
+ Note that not all arguments might make sense in this context, and the
88
88
  underlying R5 engine might ignore some of them.
89
89
  """
90
90
  super().__init__(
@@ -122,11 +122,20 @@ class DetailedItineraries(BaseTravelTimeMatrix):
122
122
 
123
123
  data = self._compute()
124
124
  with warnings.catch_warnings():
125
- warnings.simplefilter("ignore", category=FutureWarning)
125
+ warnings.filterwarnings(
126
+ "ignore",
127
+ message=(
128
+ "You are adding a column named 'geometry' to a GeoDataFrame "
129
+ "constructed without an active geometry column"
130
+ ),
131
+ category=FutureWarning,
132
+ )
126
133
  for column in data.columns:
127
134
  self[column] = data[column]
128
135
  self.set_geometry("geometry")
129
136
 
137
+ del self.transport_network
138
+
130
139
  def _compute(self):
131
140
  """
132
141
  Compute travel times from all origins to all destinations.
r5py/r5/isochrones.py ADDED
@@ -0,0 +1,351 @@
1
+ #!/usr/bin/env python3
2
+
3
+
4
+ """Compute polygons of equal travel time from a destination."""
5
+
6
+
7
+ import datetime
8
+ import warnings
9
+
10
+ import geohexgrid
11
+ import geopandas
12
+ import pandas
13
+ import pyproj
14
+ import shapely
15
+ import simplification.cutil
16
+
17
+ from .base_travel_time_matrix import BaseTravelTimeMatrix
18
+ from .transport_mode import TransportMode
19
+ from .travel_time_matrix import TravelTimeMatrix
20
+ from ..util import GoodEnoughEquidistantCrs, SpatiallyClusteredGeoDataFrame
21
+
22
+
23
+ __all__ = ["Isochrones"]
24
+
25
+
26
+ EMPTY_POINT = shapely.Point()
27
+ R5_CRS = "EPSG:4326"
28
+
29
+ CONCAVE_HULL_BUFFER_SIZE = 20.0 # metres
30
+ CONCAVE_HULL_RATIO = 0.3
31
+
32
+ VERY_SMALL_BUFFER_SIZE = 0.001 # turn points into polygons
33
+
34
+
35
+ class Isochrones(BaseTravelTimeMatrix):
36
+ """Compute polygons of equal travel time from a destination."""
37
+
38
+ _r5py_attributes = BaseTravelTimeMatrix._r5py_attributes + [
39
+ "_isochrones",
40
+ "isochrones",
41
+ "point_grid_resolution",
42
+ "point_grid_sample_ratio",
43
+ ]
44
+
45
+ def __init__(
46
+ self,
47
+ transport_network,
48
+ origins,
49
+ isochrones=pandas.timedelta_range(
50
+ start=datetime.timedelta(minutes=0),
51
+ end=datetime.timedelta(hours=1),
52
+ freq=datetime.timedelta(minutes=15),
53
+ ),
54
+ point_grid_resolution=100,
55
+ point_grid_sample_ratio=1.0,
56
+ **kwargs,
57
+ ):
58
+ """
59
+ Compute polygons of equal travel time from one or more destinations.
60
+
61
+ ``r5py.Isochrones`` are child classes of ``geopandas.GeoDataFrame`` and
62
+ support all of their methods and properties, see
63
+ https://geopandas.org/en/stable/docs.html
64
+
65
+ Arguments
66
+ ---------
67
+ transport_network : r5py.TransportNetwork | tuple(str, list(str), dict)
68
+ The transport network to route on. This can either be a readily
69
+ initialised r5py.TransportNetwork or a tuple of the parameters
70
+ passed to ``TransportNetwork.__init__()``: the path to an OpenStreetMap
71
+ extract in PBF format, a list of zero of more paths to GTFS transport
72
+ schedule files, and a dict with ``build_config`` options.
73
+ origins : geopandas.GeoDataFrame | shapely.Point
74
+ Place(s) to find a route _from_
75
+ Must be/have a point geometry. If multiple origin points are passed,
76
+ isochrones will be computed as minimum travel time from any of them.
77
+ isochrones : pandas.TimedeltaIndex | collections.abc.Iterable[int]
78
+ For which interval to compute isochrone polygons. An iterable of
79
+ integers is interpreted as minutes.
80
+ point_grid_resolution : int
81
+ Distance in meters between points in the regular grid of points laid over the
82
+ transport network’s extent that is used to compute isochrones.
83
+ Increase this value for performance, decrease it for precision.
84
+ point_grid_sample_ratio : float
85
+ Share of points of the point grid that are used in computation,
86
+ ranging from 0.01 to 1.0.
87
+ Increase this value for performance, decrease it for precision.
88
+ **kwargs : mixed
89
+ Any arguments than can be passed to r5py.RegionalTask:
90
+ ``departure``, ``departure_time_window``, ``percentiles``, ``transport_modes``,
91
+ ``access_modes``, ``egress_modes``, ``max_time``, ``max_time_walking``,
92
+ ``max_time_cycling``, ``max_time_driving``, ``speed_cycling``, ``speed_walking``,
93
+ ``max_public_transport_rides``, ``max_bicycle_traffic_stress``
94
+ Note that not all arguments might make sense in this context, and the
95
+ underlying R5 engine might ignore some of them.
96
+ If percentiles are specified, the lowest one will be used for
97
+ isochrone computation.
98
+ """
99
+ geopandas.GeoDataFrame.__init__(self)
100
+ BaseTravelTimeMatrix.__init__(
101
+ self,
102
+ transport_network,
103
+ **kwargs,
104
+ )
105
+
106
+ self.EQUIDISTANT_CRS = GoodEnoughEquidistantCrs(self.transport_network.extent)
107
+
108
+ if isinstance(origins, shapely.Geometry):
109
+ origins = geopandas.GeoDataFrame(
110
+ {
111
+ "id": [
112
+ "origin",
113
+ ],
114
+ "geometry": [
115
+ origins,
116
+ ],
117
+ },
118
+ crs=R5_CRS,
119
+ )
120
+ self.origins = origins
121
+ self.isochrones = isochrones
122
+
123
+ self.point_grid_resolution = point_grid_resolution
124
+ self.point_grid_sample_ratio = max(0.01, min(1.0, point_grid_sample_ratio))
125
+
126
+ travel_times = TravelTimeMatrix(
127
+ transport_network,
128
+ origins=self.origins,
129
+ destinations=self.destinations,
130
+ max_time=self.isochrones.max(),
131
+ **kwargs,
132
+ )
133
+
134
+ data = self._compute_isochrones_from_travel_times(travel_times)
135
+
136
+ with warnings.catch_warnings():
137
+ warnings.filterwarnings(
138
+ "ignore",
139
+ message=(
140
+ "You are adding a column named 'geometry' to a GeoDataFrame "
141
+ "constructed without an active geometry column"
142
+ ),
143
+ category=FutureWarning,
144
+ )
145
+ for column in data.columns:
146
+ self[column] = data[column]
147
+ self.set_geometry("geometry")
148
+
149
+ del self.transport_network
150
+
151
+ def _compute_isochrones_from_travel_times(self, travel_times):
152
+ travel_times = travel_times.dropna().groupby("to_id").min().reset_index()
153
+
154
+ if self.request.percentiles == [50]:
155
+ travel_time_column = "travel_time"
156
+ else:
157
+ travel_time_column = f"travel_time_p{self.request.percentiles[0]:d}"
158
+
159
+ isochrones = {
160
+ "travel_time": [],
161
+ "geometry": [],
162
+ }
163
+
164
+ for isochrone in self.isochrones:
165
+ reached_nodes = (
166
+ self.destinations.set_index("id")
167
+ .join(
168
+ travel_times[
169
+ travel_times[travel_time_column]
170
+ <= (isochrone.total_seconds() / 60)
171
+ ].set_index("to_id"),
172
+ how="inner",
173
+ )
174
+ .reset_index()
175
+ )
176
+
177
+ # isochrone polygons might be disjoint (e.g., around metro stops)
178
+ if not reached_nodes.empty:
179
+ reached_nodes = SpatiallyClusteredGeoDataFrame(
180
+ reached_nodes, eps=(2.0 * self.point_grid_resolution)
181
+ ).to_crs(self.EQUIDISTANT_CRS)
182
+ isochrone_polygons = pandas.concat(
183
+ [
184
+ (
185
+ reached_nodes[reached_nodes["cluster"] != -1]
186
+ .dissolve(by="cluster")
187
+ .concave_hull(ratio=CONCAVE_HULL_RATIO)
188
+ .buffer(VERY_SMALL_BUFFER_SIZE)
189
+ ),
190
+ (
191
+ reached_nodes[reached_nodes["cluster"] == -1].buffer(
192
+ VERY_SMALL_BUFFER_SIZE
193
+ )
194
+ ),
195
+ ]
196
+ ).union_all()
197
+
198
+ isochrones["travel_time"].append(isochrone)
199
+ isochrones["geometry"].append(isochrone_polygons)
200
+
201
+ isochrones = geopandas.GeoDataFrame(
202
+ isochrones, geometry="geometry", crs=self.EQUIDISTANT_CRS
203
+ )
204
+
205
+ # clip smaller isochrones by larger isochrones
206
+ # (concave_hull’s ratio parameter depends on input shapes and does not
207
+ # produce the same results, e.g., around bridges or at the coast line)
208
+ for row in range(len(isochrones) - 2, 0, -1):
209
+ isochrones.loc[row, "geometry"] = shapely.intersection(
210
+ isochrones.loc[row, "geometry"], isochrones.loc[row + 1, "geometry"]
211
+ )
212
+
213
+ isochrones["geometry"] = (
214
+ isochrones["geometry"]
215
+ .buffer(CONCAVE_HULL_BUFFER_SIZE)
216
+ .boundary.apply(
217
+ lambda geometry: (
218
+ geometry
219
+ if isinstance(geometry, shapely.MultiLineString)
220
+ else shapely.MultiLineString([geometry])
221
+ )
222
+ )
223
+ .apply(
224
+ lambda multilinestring: (
225
+ shapely.MultiLineString(
226
+ [
227
+ simplification.cutil.simplify_coords_vwp(
228
+ linestring.coords,
229
+ self.point_grid_resolution * 5.0,
230
+ )
231
+ for linestring in multilinestring.geoms
232
+ ]
233
+ )
234
+ )
235
+ )
236
+ .to_crs(R5_CRS)
237
+ )
238
+
239
+ return isochrones
240
+
241
+ @property
242
+ def destinations(self):
243
+ """A regular grid of points covering the range of the chosen transport mode."""
244
+ try:
245
+ return self._destinations
246
+ except AttributeError:
247
+ destinations = self._regular_point_grid
248
+ destinations["geometry"] = self.transport_network.snap_to_network(
249
+ destinations["geometry"]
250
+ )
251
+ destinations = destinations[destinations["geometry"] != EMPTY_POINT]
252
+ destinations["geometry"] = destinations["geometry"].normalize()
253
+ destinations = destinations.drop_duplicates()
254
+
255
+ # with snapping, sometimes we end up with clumps of points
256
+ # below, we try to form clusters, from all clusters we retain
257
+ # one geometry, only
258
+ destinations = SpatiallyClusteredGeoDataFrame(
259
+ destinations, eps=(0.5 * self.point_grid_resolution)
260
+ )
261
+ destinations = pandas.concat(
262
+ [
263
+ (
264
+ destinations[destinations["cluster"] != -1]
265
+ .groupby("cluster")
266
+ .first()
267
+ .set_crs(R5_CRS)
268
+ ),
269
+ destinations[destinations["cluster"] == -1],
270
+ ]
271
+ )[["id", "geometry"]].copy()
272
+
273
+ if self.point_grid_sample_ratio < 1.0:
274
+ destinations = destinations.sample(frac=self.point_grid_sample_ratio)
275
+
276
+ self._destinations = destinations
277
+
278
+ return destinations
279
+
280
+ @destinations.setter
281
+ def destinations(self, destinations):
282
+ # https://bugs.python.org/issue14965
283
+ super(self.__class__, self.__class__).destinations.__set__(self, destinations)
284
+
285
+ @property
286
+ def isochrones(self):
287
+ """
288
+ Compute isochrones for these travel times.
289
+
290
+ pandas.TimedeltaIndex | collections.abc.Iterable[int]
291
+ An iterable of integers is interpreted as minutes.
292
+ """
293
+ try:
294
+ return self._isochrones
295
+ except AttributeError:
296
+ raise
297
+
298
+ @isochrones.setter
299
+ def isochrones(self, isochrones):
300
+ if not isinstance(isochrones, pandas.TimedeltaIndex):
301
+ isochrones = pandas.to_timedelta(isochrones, unit="minutes")
302
+ try:
303
+ # do not compute for 0 travel time
304
+ isochrones = isochrones.drop(datetime.timedelta(0))
305
+ except KeyError:
306
+ pass
307
+ self._isochrones = isochrones
308
+
309
+ @property
310
+ def _regular_point_grid(self):
311
+ extent = shapely.ops.transform(
312
+ pyproj.Transformer.from_crs(
313
+ R5_CRS,
314
+ self.EQUIDISTANT_CRS,
315
+ always_xy=True,
316
+ ).transform,
317
+ self.transport_network.extent,
318
+ )
319
+
320
+ grid = geohexgrid.make_grid_from_bounds(
321
+ *extent.bounds,
322
+ self.point_grid_resolution,
323
+ crs=self.EQUIDISTANT_CRS,
324
+ )
325
+ grid["geometry"] = grid["geometry"].centroid
326
+ grid["id"] = grid.index
327
+ grid = grid[["id", "geometry"]].to_crs(R5_CRS)
328
+
329
+ # for walking and cycling, we can clip the extent to an area reachable
330
+ # by the (well-defined) travel speeds:
331
+ if set(self.request.transport_modes) <= set(
332
+ (TransportMode.WALK, TransportMode.BICYCLE)
333
+ ):
334
+ if TransportMode.WALK in self.request.transport_modes:
335
+ speed = self.request.speed_walking
336
+ if TransportMode.BICYCLE in self.request.transport_modes:
337
+ speed = self.request.speed_cycling
338
+
339
+ speed = speed * (1000.0 / 3600.0) * 1.1 # km/h -> m/s, plus a bit of buffer
340
+
341
+ grid = grid.clip(
342
+ (
343
+ pandas.concat([self.origins] * 2) # workaround until
344
+ # https://github.com/pyproj4/pyproj/issues/1309 is fixed
345
+ .to_crs(self.EQUIDISTANT_CRS)
346
+ .buffer(speed * max(self.isochrones).total_seconds())
347
+ .to_crs(R5_CRS)
348
+ )
349
+ )
350
+
351
+ return grid.copy()
r5py/r5/street_layer.py CHANGED
@@ -22,6 +22,9 @@ __all__ = ["StreetLayer"]
22
22
  start_jvm()
23
23
 
24
24
 
25
+ EMPTY_POINT = shapely.Point()
26
+
27
+
25
28
  class StreetLayer:
26
29
  """Wrap a com.conveyal.r5.streets.StreetLayer."""
27
30
 
@@ -72,13 +75,14 @@ class StreetLayer:
72
75
  Closest location on the street network or `POINT EMPTY` if no
73
76
  such location could be found within `radius`
74
77
  """
75
- if split := self._street_layer.findSplit(point.y, point.x, radius, street_mode):
78
+ try:
79
+ split = self._street_layer.findSplit(point.y, point.x, radius, street_mode)
76
80
  return shapely.Point(
77
81
  split.fixedLon / com.conveyal.r5.streets.VertexStore.FIXED_FACTOR,
78
82
  split.fixedLat / com.conveyal.r5.streets.VertexStore.FIXED_FACTOR,
79
83
  )
80
- else:
81
- return shapely.Point()
84
+ except (AttributeError, TypeError):
85
+ return EMPTY_POINT
82
86
 
83
87
 
84
88
  @jpype._jcustomizer.JConversion(
@@ -0,0 +1,41 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """A less complex representation of com.conveyal.r5.api.util.StreetSegment."""
4
+
5
+
6
+ import datetime
7
+
8
+ import shapely
9
+
10
+
11
+ __all__ = ["StreetSegment"]
12
+
13
+
14
+ class StreetSegment:
15
+ """A less complex representation of com.conveyal.r5.api.util.StreetSegment."""
16
+
17
+ distance = 0
18
+ duration = datetime.timedelta()
19
+ geometry = shapely.LineString()
20
+
21
+ def __init__(self, street_path):
22
+ """
23
+ Initialise a less complex representation of com.conveyal.r5.api.util.StreetSegment.
24
+
25
+ Arguments
26
+ ---------
27
+ street_path : com.conveyal.r5.profile.StreetPath
28
+ StreetPath, obtained, e.g., from StreetRouter state
29
+ """
30
+ self.distance = street_path.getDistance()
31
+ self.duration = street_path.getDuration()
32
+ self.geometry = shapely.line_merge(
33
+ shapely.MultiLineString(
34
+ [
35
+ shapely.from_wkt(
36
+ str(street_path.getEdge(edge).getGeometry().toText())
37
+ )
38
+ for edge in street_path.getEdges()
39
+ ]
40
+ )
41
+ )
@@ -5,24 +5,22 @@
5
5
 
6
6
 
7
7
  import functools
8
+ import hashlib
8
9
  import pathlib
9
- import random
10
- import shutil
11
- import time
12
10
  import warnings
13
11
 
14
- import filelock
15
12
  import jpype
16
13
  import jpype.types
17
14
 
18
15
  from .street_layer import StreetLayer
19
16
  from .transit_layer import TransitLayer
20
17
  from .transport_mode import TransportMode
21
- from ..util import Config, contains_gtfs_data, start_jvm
18
+ from ..util import Config, contains_gtfs_data, FileDigest, start_jvm, WorkingCopy
22
19
 
23
20
  import com.conveyal.gtfs
24
21
  import com.conveyal.osmlib
25
22
  import com.conveyal.r5
23
+ import java.io
26
24
 
27
25
 
28
26
  __all__ = ["TransportNetwork"]
@@ -48,114 +46,63 @@ class TransportNetwork:
48
46
  gtfs : str | pathlib.Path | list[str] | list[pathlib.Path]
49
47
  path(s) to public transport schedule information in GTFS format
50
48
  """
51
- osm_pbf = self._working_copy(pathlib.Path(osm_pbf)).absolute()
49
+ osm_pbf = WorkingCopy(osm_pbf)
52
50
  if isinstance(gtfs, (str, pathlib.Path)):
53
51
  gtfs = [gtfs]
54
- gtfs = [str(self._working_copy(path).absolute()) for path in gtfs]
52
+ gtfs = [WorkingCopy(path) for path in gtfs]
55
53
 
56
- transport_network = com.conveyal.r5.transit.TransportNetwork()
57
- transport_network.scenarioId = PACKAGE
58
-
59
- osm_mapdb = pathlib.Path(f"{osm_pbf}.mapdb")
60
- osm_file = com.conveyal.osmlib.OSM(f"{osm_mapdb}")
61
- osm_file.intersectionDetection = True
62
- osm_file.readFromFile(f"{osm_pbf}")
63
-
64
- self.osm_file = osm_file # keep the mapdb open, close in destructor
65
-
66
- transport_network.streetLayer = com.conveyal.r5.streets.StreetLayer()
67
- transport_network.streetLayer.loadFromOsm(osm_file)
68
- transport_network.streetLayer.parentNetwork = transport_network
69
- transport_network.streetLayer.indexStreets()
70
-
71
- transport_network.transitLayer = com.conveyal.r5.transit.TransitLayer()
72
- for gtfs_file in gtfs:
73
- gtfs_feed = com.conveyal.gtfs.GTFSFeed.readOnlyTempFileFromGtfs(gtfs_file)
74
- transport_network.transitLayer.loadFromGtfs(gtfs_feed)
75
- gtfs_feed.close()
76
- transport_network.transitLayer.parentNetwork = transport_network
77
-
78
- transport_network.streetLayer.associateStops(transport_network.transitLayer)
79
- transport_network.streetLayer.buildEdgeLists()
80
-
81
- transport_network.transitLayer.rebuildTransientIndexes()
54
+ # a hash representing all input files
55
+ digest = hashlib.sha256(
56
+ "".join([FileDigest(osm_pbf)] + [FileDigest(path) for path in gtfs]).encode(
57
+ "utf-8"
58
+ )
59
+ ).hexdigest()
82
60
 
83
- transfer_finder = com.conveyal.r5.transit.TransferFinder(transport_network)
84
- transfer_finder.findTransfers()
85
- transfer_finder.findParkRideTransfer()
61
+ try:
62
+ transport_network = self._load_pickled_transport_network(
63
+ Config().CACHE_DIR / f"{digest}.transport_network"
64
+ )
65
+ except FileNotFoundError:
66
+ transport_network = com.conveyal.r5.transit.TransportNetwork()
67
+ transport_network.scenarioId = PACKAGE
68
+
69
+ osm_mapdb = Config().CACHE_DIR / f"{digest}.mapdb"
70
+ osm_file = com.conveyal.osmlib.OSM(f"{osm_mapdb}")
71
+ osm_file.intersectionDetection = True
72
+ osm_file.readFromFile(f"{osm_pbf}")
73
+
74
+ transport_network.streetLayer = com.conveyal.r5.streets.StreetLayer()
75
+ transport_network.streetLayer.parentNetwork = transport_network
76
+ transport_network.streetLayer.loadFromOsm(osm_file)
77
+ transport_network.streetLayer.indexStreets()
78
+
79
+ transport_network.transitLayer = com.conveyal.r5.transit.TransitLayer()
80
+ transport_network.transitLayer.parentNetwork = transport_network
81
+ for gtfs_file in gtfs:
82
+ gtfs_feed = com.conveyal.gtfs.GTFSFeed.readOnlyTempFileFromGtfs(
83
+ f"{gtfs_file}"
84
+ )
85
+ transport_network.transitLayer.loadFromGtfs(gtfs_feed)
86
+ gtfs_feed.close()
86
87
 
87
- transport_network.transitLayer.buildDistanceTables(None)
88
+ transport_network.streetLayer.associateStops(transport_network.transitLayer)
89
+ transport_network.streetLayer.buildEdgeLists()
88
90
 
89
- self._transport_network = transport_network
91
+ transport_network.transitLayer.rebuildTransientIndexes()
90
92
 
91
- def __del__(self):
92
- """Delete all temporary files upon destruction."""
93
- MAX_TRIES = 10
93
+ transfer_finder = com.conveyal.r5.transit.TransferFinder(transport_network)
94
+ transfer_finder.findTransfers()
95
+ transfer_finder.findParkRideTransfer()
94
96
 
95
- # first, close the open osm_file,
96
- # delete Java objects, and
97
- # trigger Java garbage collection
98
- try:
99
- self.osm_file.close()
100
- except jpype.JVMNotRunning:
101
- # JVM was stopped already, file should be closed
102
- pass
103
- try:
104
- del self.street_layer
105
- except AttributeError: # might not have been accessed a single time
106
- pass
107
- try:
108
- del self.transit_layer
109
- except AttributeError:
110
- pass
111
- try:
112
- del self._transport_network
113
- except AttributeError:
114
- pass
97
+ transport_network.transitLayer.buildDistanceTables(None)
115
98
 
116
- time.sleep(1.0)
117
- try:
118
- jpype.java.lang.System.gc()
119
- except jpype.JVMNotRunning:
120
- pass
99
+ osm_file.close() # not needed after here?
121
100
 
122
- # then, try to delete all files in cache directory
123
- try:
124
- temporary_files = [child for child in self._cache_directory.iterdir()]
125
- except FileNotFoundError: # deleted in the meantime/race condition
126
- temporary_files = []
127
-
128
- for _ in range(MAX_TRIES):
129
- for temporary_file in temporary_files:
130
- try:
131
- temporary_file.unlink()
132
- temporary_files.remove(temporary_file)
133
- except (FileNotFoundError, IOError, OSError):
134
- print(
135
- f"could not delete {temporary_file}, keeping in {temporary_files}"
136
- )
137
- pass
138
-
139
- if not temporary_files: # empty
140
- break
141
-
142
- # there are still files open, let’s wait a moment and try again
143
- time.sleep(0.1)
144
- else:
145
- remaining_files = ", ".join(
146
- [f"{temporary_file}" for temporary_file in temporary_files]
147
- )
148
- warnings.warn(
149
- f"Failed to clean cache directory ‘{self._cache_directory}’. "
150
- f"Remaining file(s): {remaining_files}",
151
- RuntimeWarning,
101
+ self._save_pickled_transport_network(
102
+ transport_network, Config().CACHE_DIR / f"{digest}.transport_network"
152
103
  )
153
104
 
154
- # finally, try to delete the cache directory itself
155
- try:
156
- self._cache_directory.rmdir()
157
- except OSError: # not empty
158
- pass # the JVM destructor is going to take care of this
105
+ self._transport_network = transport_network
159
106
 
160
107
  @classmethod
161
108
  def from_directory(cls, path):
@@ -219,56 +166,22 @@ class TransportNetwork:
219
166
  # then find the smaller extent of the two (or the larger one?)
220
167
  return self.street_layer.extent
221
168
 
222
- @functools.cached_property
223
- def _cache_directory(self):
224
- cache_dir = (
225
- pathlib.Path(Config().TEMP_DIR)
226
- / f"{self.__class__.__name__:s}_{id(self):x}_{random.randrange(16**5):07x}"
227
- )
228
- cache_dir.mkdir(exist_ok=True)
229
- return cache_dir
230
-
231
- def _working_copy(self, input_file):
232
- """Create a copy or link of an input file in a cache directory.
233
-
234
- This method exists because R5 creates temporary files in the
235
- directory of input files. This can not only be annoying clutter,
236
- but also create problems of concurrency, performance, etc., for
237
- instance, when the data comes from a shared network drive or a
238
- read-only file system.
239
-
240
- Arguments
241
- ---------
242
- input_file : str or pathlib.Path
243
- The file to create a copy or link of in a cache directory
244
-
245
- Returns
246
- -------
247
- pathlib.Path
248
- The path to the copy or link created
249
- """
250
- # try to first create a symbolic link, if that fails (e.g., on Windows),
251
- # copy the file to a cache directory
252
- input_file = pathlib.Path(input_file).absolute()
253
- destination_file = pathlib.Path(
254
- self._cache_directory / input_file.name
255
- ).absolute()
256
-
257
- with filelock.FileLock(
258
- destination_file.parent / f"{destination_file.name}.lock"
259
- ):
260
- if not destination_file.exists():
261
- try:
262
- destination_file.symlink_to(input_file)
263
- except OSError:
264
- shutil.copyfile(str(input_file), str(destination_file))
265
- return destination_file
266
-
267
169
  @property
268
170
  def linkage_cache(self):
269
171
  """Expose the `TransportNetwork`’s `linkageCache` to Python."""
270
172
  return self._transport_network.linkageCache
271
173
 
174
+ def _load_pickled_transport_network(self, path):
175
+ try:
176
+ input_file = java.io.File(f"{path}")
177
+ return com.conveyal.r5.kryo.KryoNetworkSerializer.read(input_file)
178
+ except java.io.FileNotFoundException:
179
+ raise FileNotFoundError
180
+
181
+ def _save_pickled_transport_network(self, transport_network, path):
182
+ output_file = java.io.File(f"{path}")
183
+ com.conveyal.r5.kryo.KryoNetworkSerializer.write(transport_network, output_file)
184
+
272
185
  def snap_to_network(
273
186
  self,
274
187
  points,
@@ -78,6 +78,7 @@ class TravelTimeMatrix(BaseTravelTimeMatrix):
78
78
  data = self._compute()
79
79
  for column in data.columns:
80
80
  self[column] = data[column]
81
+ del self.transport_network
81
82
 
82
83
  def _compute(self):
83
84
  """
r5py/r5/trip_planner.py CHANGED
@@ -17,6 +17,7 @@ import shapely
17
17
  from .access_leg import AccessLeg
18
18
  from .direct_leg import DirectLeg
19
19
  from .egress_leg import EgressLeg
20
+ from .street_segment import StreetSegment
20
21
  from .transfer_leg import TransferLeg
21
22
  from .transit_leg import TransitLeg
22
23
  from .transport_mode import TransportMode
@@ -64,7 +65,9 @@ class TripPlanner:
64
65
 
65
66
  EQUIDISTANT_CRS = GoodEnoughEquidistantCrs(self.transport_network.extent)
66
67
  self._crs_transformer_function = pyproj.Transformer.from_crs(
67
- R5_CRS, EQUIDISTANT_CRS
68
+ R5_CRS,
69
+ EQUIDISTANT_CRS,
70
+ always_xy=True,
68
71
  ).transform
69
72
 
70
73
  @property
@@ -165,17 +168,13 @@ class TripPlanner:
165
168
  return direct_paths
166
169
 
167
170
  def _street_segment_from_router_state(self, router_state, transport_mode):
168
- """Retrieve a com.conveyal.r5.street.StreetSegment for a route."""
171
+ """Retrieve a StreetSegment for a route."""
169
172
  street_path = com.conveyal.r5.profile.StreetPath(
170
173
  router_state,
171
174
  self.transport_network,
172
175
  False,
173
176
  )
174
- street_segment = com.conveyal.r5.api.util.StreetSegment(
175
- street_path,
176
- transport_mode,
177
- self.transport_network.street_layer,
178
- )
177
+ street_segment = StreetSegment(street_path)
179
178
  return street_segment
180
179
 
181
180
  @functools.cached_property
r5py/util/__init__.py CHANGED
@@ -8,18 +8,24 @@ from .camel_to_snake_case import camel_to_snake_case
8
8
  from .config import Config
9
9
  from .contains_gtfs_data import contains_gtfs_data
10
10
  from .data_validation import check_od_data_set
11
+ from .file_digest import FileDigest
11
12
  from .good_enough_equidistant_crs import GoodEnoughEquidistantCrs
12
13
  from .jvm import start_jvm
13
14
  from .parse_int_date import parse_int_date
14
15
  from .snake_to_camel_case import snake_to_camel_case
16
+ from .spatially_clustered_geodataframe import SpatiallyClusteredGeoDataFrame
17
+ from .working_copy import WorkingCopy
15
18
 
16
19
  __all__ = [
17
20
  "camel_to_snake_case",
18
21
  "check_od_data_set",
19
22
  "Config",
20
23
  "contains_gtfs_data",
24
+ "FileDigest",
21
25
  "GoodEnoughEquidistantCrs",
22
26
  "parse_int_date",
23
27
  "snake_to_camel_case",
28
+ "SpatiallyClusteredGeoDataFrame",
24
29
  "start_jvm",
30
+ "WorkingCopy",
25
31
  ]
r5py/util/config.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  """Handle configuration options and command line options."""
4
4
 
5
+ import datetime
5
6
  import functools
6
7
  import importlib.resources
7
8
  import os
@@ -19,6 +20,7 @@ PACKAGE = __package__.split(".")[0]
19
20
  CONFIG_FILE_TEMPLATE = importlib.resources.files(f"{PACKAGE}.util").joinpath(
20
21
  f"{PACKAGE}.yml.template"
21
22
  )
23
+ CACHE_MAX_AGE = datetime.timedelta(weeks=2)
22
24
 
23
25
  if "HOME" not in os.environ: # e.g., testing environment or container
24
26
  os.environ["HOME"] = "."
@@ -73,11 +75,25 @@ class Config:
73
75
  pathlib.Path(
74
76
  os.environ.get("LOCALAPPDATA")
75
77
  or os.environ.get("XDG_CACHE_HOME")
76
- or (pathlib.Path(os.environ["HOME"]) / ".cache")
78
+ or (pathlib.Path(os.environ.get("HOME")) / ".cache")
77
79
  )
78
80
  / PACKAGE
79
81
  )
80
82
  cache_dir.mkdir(parents=True, exist_ok=True)
83
+
84
+ # clean old files to keep cache dir from growing too much
85
+ cache_treshold = (datetime.datetime.now() - CACHE_MAX_AGE).timestamp()
86
+ for cached_file in cache_dir.glob("**/*"):
87
+ try:
88
+ *_, atime, mtime, _ = cached_file.stat()
89
+ assert max(atime, mtime) > cache_treshold
90
+ except (
91
+ AssertionError, # expired
92
+ FileNotFoundError, # broken symlink
93
+ PermissionError,
94
+ ):
95
+ cached_file.unlink()
96
+
81
97
  return cache_dir
82
98
 
83
99
  @functools.cached_property
@@ -88,7 +104,7 @@ class Config:
88
104
  pathlib.Path(
89
105
  os.environ.get("APPDATA")
90
106
  or os.environ.get("XDG_CONFIG_HOME")
91
- or (pathlib.Path(os.environ["HOME"]) / ".config")
107
+ or (pathlib.Path(os.environ.get("HOME")) / ".config")
92
108
  )
93
109
  / f"{PACKAGE}.yml",
94
110
  ]
@@ -0,0 +1,42 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Create a hash sum of a file."""
4
+
5
+
6
+ import hashlib
7
+ import pathlib
8
+
9
+
10
+ __all__ = ["FileDigest"]
11
+
12
+
13
+ BUFFER_SIZE = 64 * 1024
14
+
15
+
16
+ class FileDigest(str):
17
+ """Create a hash sum of a file."""
18
+
19
+ def __new__(cls, input_file, digest="blake2s"):
20
+ """
21
+ Create a hash sum of a file.
22
+
23
+ Arguments
24
+ ---------
25
+ input_file : pathlib.Path | str
26
+ for which file to compute a hash digest
27
+ digest : str | func
28
+ name of hash algorithm (s.
29
+ https://docs.python.org/3/library/hashlib.html) or function that
30
+ returns a hash sum
31
+ """
32
+ input_file = pathlib.Path(input_file)
33
+ try:
34
+ with input_file.open("rb") as f:
35
+ hashdigest = hashlib.file_digest(f, digest)
36
+ except AttributeError: # Python<=3.10
37
+ hashdigest = hashlib.new(digest)
38
+ with input_file.open("rb") as f:
39
+ while data := f.read(BUFFER_SIZE):
40
+ hashdigest.update(data)
41
+
42
+ return hashdigest.hexdigest()
@@ -10,6 +10,11 @@ import shapely
10
10
  from .exceptions import UnexpectedCrsError
11
11
 
12
12
 
13
+ FALLBACK_CRS = 3857
14
+ DATUM_NAME = "WGS 84"
15
+ VERY_SMALL_BUFFER_SIZE = 0.001
16
+
17
+
13
18
  class GoodEnoughEquidistantCrs(pyproj.CRS):
14
19
  """
15
20
  Find the most appropriate UTM reference system for the current extent.
@@ -33,13 +38,13 @@ class GoodEnoughEquidistantCrs(pyproj.CRS):
33
38
  """
34
39
  if GoodEnoughEquidistantCrs._is_plausible_in_epsg4326(extent):
35
40
  # default CRS in case we do not find any better match
36
- crs = pyproj.CRS.from_epsg(3857)
41
+ crs = pyproj.CRS.from_epsg(FALLBACK_CRS)
37
42
 
38
43
  # buffer extent (so everything is a polygon)
39
- extent = extent.buffer(0.1)
44
+ extent = extent.buffer(VERY_SMALL_BUFFER_SIZE)
40
45
 
41
46
  crsinfo = pyproj.database.query_utm_crs_info(
42
- datum_name="WGS 84",
47
+ datum_name=DATUM_NAME,
43
48
  area_of_interest=pyproj.aoi.AreaOfInterest(*extent.bounds),
44
49
  )
45
50
  for candidate_crs in crsinfo:
@@ -0,0 +1,78 @@
1
+ #!/usr/bin/env python3
2
+
3
+
4
+ """Assign a cluster label column to a point-geometry GeoDataFrame."""
5
+
6
+
7
+ import warnings
8
+
9
+ import geopandas
10
+ import numpy
11
+ import shapely
12
+ import sklearn.cluster
13
+
14
+ from .good_enough_equidistant_crs import GoodEnoughEquidistantCrs
15
+
16
+
17
+ __all__ = ["SpatiallyClusteredGeoDataFrame"]
18
+
19
+
20
+ class SpatiallyClusteredGeoDataFrame(geopandas.GeoDataFrame):
21
+ """Assign a cluster label column to a point-geometry GeoDataFrame."""
22
+
23
+ def __init__(self, data, *args, eps=200.0, min_cluster_size=3, **kwargs):
24
+ """
25
+ Assign a cluster label column to a point-geometry GeoDataFrame.
26
+
27
+ Arguments:
28
+ ----------
29
+ data : geopandas.GeoDataFrame
30
+ input data set
31
+ eps : int | float
32
+ EPS parameter to a DBSCAN cluster algorithm, the maximum
33
+ intra-cluster distance between two points
34
+ *args, **kwargs: passed to geopandas.GeoDataFrame.__init__()
35
+ """
36
+ geopandas.GeoDataFrame.__init__(self, *args, **kwargs)
37
+
38
+ EQUIDISTANT_CRS = GoodEnoughEquidistantCrs(
39
+ shapely.box(*data.to_crs("EPSG:4326").geometry.total_bounds)
40
+ )
41
+
42
+ # loosely based on:
43
+ # https://github.com/geopandas/scipy2018-geospatial-data/blob/master/08-clustering.ipynb
44
+
45
+ coordinates = numpy.vstack(
46
+ data.to_crs(EQUIDISTANT_CRS)["geometry"]
47
+ .apply(lambda geometry: numpy.hstack(geometry.xy))
48
+ .values
49
+ )
50
+
51
+ with warnings.catch_warnings():
52
+ warnings.filterwarnings(
53
+ "ignore",
54
+ "Could not find the number of physical cores",
55
+ category=UserWarning,
56
+ )
57
+ data["cluster"] = (
58
+ sklearn.cluster.DBSCAN(
59
+ eps=eps,
60
+ min_samples=min_cluster_size,
61
+ n_jobs=-1,
62
+ )
63
+ .fit(coordinates)
64
+ .labels_
65
+ )
66
+
67
+ with warnings.catch_warnings():
68
+ warnings.filterwarnings(
69
+ "ignore",
70
+ message=(
71
+ "You are adding a column named 'geometry' to a GeoDataFrame "
72
+ "constructed without an active geometry column"
73
+ ),
74
+ category=FutureWarning,
75
+ )
76
+ for column in data.columns:
77
+ self[column] = data[column]
78
+ self.set_geometry("geometry")
@@ -0,0 +1,44 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Create a copy or link of an input file in a cache directory."""
4
+
5
+
6
+ import filelock
7
+ import pathlib
8
+ import shutil
9
+
10
+ from .config import Config
11
+
12
+
13
+ __all__ = ["WorkingCopy"]
14
+
15
+
16
+ class WorkingCopy(pathlib.Path):
17
+ """Create a copy or link of an input file in a cache directory."""
18
+
19
+ def __new__(cls, path):
20
+ """
21
+ Create a copy or link of an input file in a cache directory.
22
+
23
+ This exists because R5 creates temporary files in the directory of input
24
+ files. This can not only be annoying clutter, but also create problems
25
+ of concurrency, performance, etc., for instance, when the data comes
26
+ from a shared network drive or a read-only file system.
27
+
28
+ Arguments
29
+ ---------
30
+ path : str or pathlib.Path
31
+ The file to create a copy or link of in a cache directory
32
+ """
33
+ # try to first create a symbolic link, if that fails (e.g., on Windows),
34
+ # copy the file to a cache directory
35
+ path = pathlib.Path(path).absolute()
36
+ destination = pathlib.Path(Config().CACHE_DIR / path.name).absolute()
37
+
38
+ with filelock.FileLock(destination.parent / f"{destination.name}.lock"):
39
+ if not destination.exists():
40
+ try:
41
+ destination.symlink_to(path)
42
+ except OSError:
43
+ shutil.copyfile(f"{path}", f"{destination}")
44
+ return destination
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: r5py
3
- Version: 1.0.0.dev11
3
+ Version: 1.0.1
4
4
  Summary: Python wrapper for the R5 routing analysis engine
5
5
  Author: Christoph Fink, Willem Klumpenhouwer, Marcus Sairava, Rafael Pereira, Henrikki Tenkanen
6
6
  License: GPL-3.0-or-later or MIT
@@ -13,12 +13,12 @@ Classifier: Programming Language :: Python :: 3
13
13
  Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
14
14
  Classifier: License :: OSI Approved :: MIT License
15
15
  Classifier: Operating System :: OS Independent
16
- Requires-Python: >=3.9
16
+ Requires-Python: >=3.10
17
17
  Description-Content-Type: text/markdown
18
18
  License-File: LICENSE
19
19
  Requires-Dist: ConfigArgParse
20
20
  Requires-Dist: filelock
21
- Requires-Dist: fiona
21
+ Requires-Dist: geohexgrid
22
22
  Requires-Dist: geopandas
23
23
  Requires-Dist: joblib
24
24
  Requires-Dist: jpype1
@@ -27,7 +27,10 @@ Requires-Dist: pandas>=2.1.0
27
27
  Requires-Dist: psutil
28
28
  Requires-Dist: pyproj
29
29
  Requires-Dist: requests
30
+ Requires-Dist: scikit-learn
30
31
  Requires-Dist: shapely>=2.0
32
+ Requires-Dist: simplification
33
+ Requires-Dist: typing_extensions; python_version < "3.13"
31
34
  Provides-Extra: docs
32
35
  Requires-Dist: contextily; extra == "docs"
33
36
  Requires-Dist: folium; extra == "docs"
@@ -68,7 +71,7 @@ Requires-Dist: typing-extensions; extra == "tests"
68
71
  [![downloads (pypi)][downloads-pypi-badge]][downloads-pypi-link]
69
72
  [![downloads (conda-forge)][downloads-conda-forge-badge]][downloads-conda-forge-link]
70
73
  <br />
71
- [![Trunk build status][build-status-badge]][build-status-link]
74
+ [![Unit tests][test-status-badge]][test-status-link]
72
75
  [![Documentation Status][rtd-status-badge]][rtd-status-link]
73
76
  [![Coverage][coverage-badge]][coverage-link]
74
77
  <br />
@@ -135,9 +138,7 @@ your project better.
135
138
 
136
139
  <!-- (1) badges -->
137
140
  [binder-badge]: https://img.shields.io/badge/Try%20r5py%20with-binder-F5A252.svg?logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAFkAAABZCAMAAABi1XidAAAB8lBMVEX///9XmsrmZYH1olJXmsr1olJXmsrmZYH1olJXmsr1olJXmsrmZYH1olL1olJXmsr1olJXmsrmZYH1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olJXmsrmZYH1olL1olL0nFf1olJXmsrmZYH1olJXmsq8dZb1olJXmsrmZYH1olJXmspXmspXmsr1olL1olJXmsrmZYH1olJXmsr1olL1olJXmsrmZYH1olL1olLeaIVXmsrmZYH1olL1olL1olJXmsrmZYH1olLna31Xmsr1olJXmsr1olJXmsrmZYH1olLqoVr1olJXmsr1olJXmsrmZYH1olL1olKkfaPobXvviGabgadXmsqThKuofKHmZ4Dobnr1olJXmsr1olJXmspXmsr1olJXmsrfZ4TuhWn1olL1olJXmsqBi7X1olJXmspZmslbmMhbmsdemsVfl8ZgmsNim8Jpk8F0m7R4m7F5nLB6jbh7jbiDirOEibOGnKaMhq+PnaCVg6qWg6qegKaff6WhnpKofKGtnomxeZy3noG6dZi+n3vCcpPDcpPGn3bLb4/Mb47UbIrVa4rYoGjdaIbeaIXhoWHmZYHobXvpcHjqdHXreHLroVrsfG/uhGnuh2bwj2Hxk17yl1vzmljzm1j0nlX1olL3AJXWAAAAbXRSTlMAEBAQHx8gICAuLjAwMDw9PUBAQEpQUFBXV1hgYGBkcHBwcXl8gICAgoiIkJCQlJicnJ2goKCmqK+wsLC4usDAwMjP0NDQ1NbW3Nzg4ODi5+3v8PDw8/T09PX29vb39/f5+fr7+/z8/Pz9/v7+zczCxgAABC5JREFUeAHN1ul3k0UUBvCb1CTVpmpaitAGSLSpSuKCLWpbTKNJFGlcSMAFF63iUmRccNG6gLbuxkXU66JAUef/9LSpmXnyLr3T5AO/rzl5zj137p136BISy44fKJXuGN/d19PUfYeO67Znqtf2KH33Id1psXoFdW30sPZ1sMvs2D060AHqws4FHeJojLZqnw53cmfvg+XR8mC0OEjuxrXEkX5ydeVJLVIlV0e10PXk5k7dYeHu7Cj1j+49uKg7uLU61tGLw1lq27ugQYlclHC4bgv7VQ+TAyj5Zc/UjsPvs1sd5cWryWObtvWT2EPa4rtnWW3JkpjggEpbOsPr7F7EyNewtpBIslA7p43HCsnwooXTEc3UmPmCNn5lrqTJxy6nRmcavGZVt/3Da2pD5NHvsOHJCrdc1G2r3DITpU7yic7w/7Rxnjc0kt5GC4djiv2Sz3Fb2iEZg41/ddsFDoyuYrIkmFehz0HR2thPgQqMyQYb2OtB0WxsZ3BeG3+wpRb1vzl2UYBog8FfGhttFKjtAclnZYrRo9ryG9uG/FZQU4AEg8ZE9LjGMzTmqKXPLnlWVnIlQQTvxJf8ip7VgjZjyVPrjw1te5otM7RmP7xm+sK2Gv9I8Gi++BRbEkR9EBw8zRUcKxwp73xkaLiqQb+kGduJTNHG72zcW9LoJgqQxpP3/Tj//c3yB0tqzaml05/+orHLksVO+95kX7/7qgJvnjlrfr2Ggsyx0eoy9uPzN5SPd86aXggOsEKW2Prz7du3VID3/tzs/sSRs2w7ovVHKtjrX2pd7ZMlTxAYfBAL9jiDwfLkq55Tm7ifhMlTGPyCAs7RFRhn47JnlcB9RM5T97ASuZXIcVNuUDIndpDbdsfrqsOppeXl5Y+XVKdjFCTh+zGaVuj0d9zy05PPK3QzBamxdwtTCrzyg/2Rvf2EstUjordGwa/kx9mSJLr8mLLtCW8HHGJc2R5hS219IiF6PnTusOqcMl57gm0Z8kanKMAQg0qSyuZfn7zItsbGyO9QlnxY0eCuD1XL2ys/MsrQhltE7Ug0uFOzufJFE2PxBo/YAx8XPPdDwWN0MrDRYIZF0mSMKCNHgaIVFoBbNoLJ7tEQDKxGF0kcLQimojCZopv0OkNOyWCCg9XMVAi7ARJzQdM2QUh0gmBozjc3Skg6dSBRqDGYSUOu66Zg+I2fNZs/M3/f/Grl/XnyF1Gw3VKCez0PN5IUfFLqvgUN4C0qNqYs5YhPL+aVZYDE4IpUk57oSFnJm4FyCqqOE0jhY2SMyLFoo56zyo6becOS5UVDdj7Vih0zp+tcMhwRpBeLyqtIjlJKAIZSbI8SGSF3k0pA3mR5tHuwPFoa7N7reoq2bqCsAk1HqCu5uvI1n6JuRXI+S1Mco54YmYTwcn6Aeic+kssXi8XpXC4V3t7/ADuTNKaQJdScAAAAAElFTkSuQmCC
138
- [binder-link]: https://notebooks.gesis.org/binder/v2/gh/r5py/r5py/stable?urlpath=tree/docs/user-guide/user-manual/quickstart.md
139
- [build-status-badge]: https://github.com/r5py/r5py/actions/workflows/build-merged-pull-requests.yml/badge.svg
140
- [build-status-link]: https://github.com/r5py/r5py/actions/workflows/build-merged-pull-requests.yml
141
+ [binder-link]: https://mybinder.org/v2/gh/r5py/r5py/stable?urlpath=tree/docs/user-guide/user-manual/quickstart.md
141
142
  [coverage-badge]: https://codecov.io/gh/r5py/r5py/branch/main/graph/badge.svg?token=WG8RBMZBK6
142
143
  [coverage-link]: https://codecov.io/gh/r5py/r5py
143
144
  [doi-badge]: https://zenodo.org/badge/DOI/10.5281/zenodo.7060437.svg
@@ -150,11 +151,13 @@ your project better.
150
151
  [rtd-status-link]: https://r5py.readthedocs.io/
151
152
  [stable-version-badge]: https://img.shields.io/pypi/v/r5py?label=Stable
152
153
  [stable-version-link]: https://github.com/r5py/r5py/releases
154
+ [test-status-badge]: https://github.com/r5py/r5py/actions/workflows/test.yml/badge.svg
155
+ [test-status-link]: https://github.com/r5py/r5py/actions/workflows/test.yml
153
156
 
154
157
  <!-- (2) other links -->
155
158
  [conda-create-env-from-yml]: https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#creating-an-environment-from-an-environment-yml-file
156
159
  [conveyal]: https://www.conveyal.com/
157
- [env-file]: https://github.com/r5py/r5py/blob/main/ci/r5py_distro.yaml
160
+ [env-file]: https://github.com/r5py/r5py/blob/main/ci/r5py.yaml
158
161
  [geopandas]: https://geopandas.org/
159
162
  [r5-github]: https://github.com/conveyal/r5/
160
163
  [r5r-github]: https://github.com/ipeaGIT/r5r/
@@ -1,42 +1,47 @@
1
- r5py/__init__.py,sha256=Ox4bOJgqUvWbaRmVEBBaNPpg6rP3OwlVdAbWMfIOueA,518
1
+ r5py/__init__.py,sha256=G4_W_JrAJL28AYFzW0kImNgE-wAfjb15kpCwQulRuNc,546
2
2
  r5py/__main__.py,sha256=Wvn0ChD7E-dCSZ8b8k_HhHG0KMOk0qMNFkijGuSH3-0,81
3
- r5py/r5/__init__.py,sha256=N_55XlwBDUtljFEHIDYSFb4pPPHXsHdPfF-8qu9IjhQ,1071
3
+ r5py/r5/__init__.py,sha256=6IQpvStxKeNxflizfRWh05USpdM18KBSB3UZ8Z_cGY4,1124
4
4
  r5py/r5/access_leg.py,sha256=W3GfPEpqmWD1c4xipd6UcVIaBC-yb6srGCZV30E2dPY,293
5
5
  r5py/r5/base_travel_time_matrix.py,sha256=Vl82Wkk2iANNy6L3r937yXNnQ9lmMOErGT_-fQnb1Ms,6978
6
6
  r5py/r5/breakdown_stat.py,sha256=ZQkWA0hXlcRH3KVgtxPSNHP0FUDri8MWqdFk8EUdDMU,533
7
- r5py/r5/detailed_itineraries.py,sha256=kLIlMMMGC3pbjzkhIQDx9AdQ7wFBcMzCteB8E7JLdQQ,10964
7
+ r5py/r5/detailed_itineraries.py,sha256=Oo8JnF5jM2FsYFR1ma9r4y3evOmU7itDYs5M4vbqrZo,11245
8
8
  r5py/r5/direct_leg.py,sha256=T7wX8puhOVIssCpflXthYs-G9OA8pasFbdz9p8k8teg,1054
9
9
  r5py/r5/egress_leg.py,sha256=9rsCIcwlZUzoZE6q4imNY3VWpjJepO1IJvheVrlPi90,297
10
+ r5py/r5/isochrones.py,sha256=NNpV3Df4NeLdDksGERkb2Eos33ziMBGEeaWyCKPt5P8,12974
10
11
  r5py/r5/regional_task.py,sha256=wTNx2NT3-GCEvDyz0e-_YYkVWtpE66dg2IlXTA1gI-4,23234
11
12
  r5py/r5/scenario.py,sha256=nUNAlN3cO7E_b4sMpNqdL0FD7WQaQ49iIvh-k8l4YRM,763
12
- r5py/r5/street_layer.py,sha256=iGlAWftzmwzaRUpXngis7prVuH3Oq8i-AXS8-pnVXMk,2259
13
+ r5py/r5/street_layer.py,sha256=2AWhIE0-aTNGQenX6bF1xv5bmhR_LV0CgqM4BKgVYfk,2329
14
+ r5py/r5/street_segment.py,sha256=0O0QV8Eyfss-xHJShKGSQV1IusZfTrrxzu_AWl3KACo,1109
13
15
  r5py/r5/transfer_leg.py,sha256=_IpzQJAyW4hDPO5V4k-ZjIPd3uyxhHPa4U6_b8UbKt4,311
14
16
  r5py/r5/transit_layer.py,sha256=vVo_o10yDCzpujOQ99xdzmznwVjAbANjdDflQy2QOpI,3223
15
17
  r5py/r5/transit_leg.py,sha256=R0Qc9YLMEXYu51NIdo7Q0bdmpYIJf5irEDXWrW6pZWE,221
16
18
  r5py/r5/transport_mode.py,sha256=zHSqXb0R4oyjTp069CzO69IgoCKt0nmOAwsSy272rGo,3675
17
- r5py/r5/transport_network.py,sha256=d4PPBEBk3t2QbUI5KMS9zM-a4s4E4zEYOHIV6txCnYg,10777
18
- r5py/r5/travel_time_matrix.py,sha256=jrOt4n9kfShYGF5dou7xgKCEw8FBfyqc0wZeQZqD4wQ,7968
19
+ r5py/r5/transport_network.py,sha256=wy7jsIqcladee6FdUiOu0kJgMhGYymtJpfyjX9RRnNU,7925
20
+ r5py/r5/travel_time_matrix.py,sha256=Z_ErylB8mMD_eO2BogV3K_OFdYFVCcmIPmcMe7GGRiU,8003
19
21
  r5py/r5/trip.py,sha256=AqhlhgYaGRL5jVzV08BhsqgWxe8f4wAb5HMP8HIGwc8,2944
20
22
  r5py/r5/trip_leg.py,sha256=9E4vZpBEJCXIVqAXWJvnPloC-upEASKhFnjiuen8i8A,6495
21
- r5py/r5/trip_planner.py,sha256=qUzzTA3PHcHkN7kNzAywhLFQswGDyPSgtkkvPZ9eJVQ,23819
22
- r5py/util/__init__.py,sha256=S-agt-08twU7hFIH1_x_VjuNC-WHfP6844n0xM0E8t8,714
23
+ r5py/r5/trip_planner.py,sha256=QM3kSx-EC1VWtmReMtwxzG0CdbtlRi3-PpLxOAoHGrA,23754
24
+ r5py/util/__init__.py,sha256=3iqzebRt7RE1TMzzuGNzyXca0SBcBx1rHLs8eW3ijo4,940
23
25
  r5py/util/camel_to_snake_case.py,sha256=zj5F3PNBvsuS6vqN4USeeo8NI-3hnscGhwun0G95AK0,673
24
26
  r5py/util/classpath.py,sha256=b16xL94pDxTpc0vrf68R1nvZHnHqZXGcFJaN36eW3wc,2773
25
- r5py/util/config.py,sha256=sAT3jJI69KH80fQsy9xi9-PkEUoOabLvLys9djRBils,4667
27
+ r5py/util/config.py,sha256=5jz42iUaftgBfJ2HNnktZw5oXIPE2ytl3Nxt2RjjDoM,5267
26
28
  r5py/util/contains_gtfs_data.py,sha256=ooX4hfVDKK0aqX1MI46jSFZ7dZ6riyXaORrgF6PUFrk,1211
27
29
  r5py/util/data_validation.py,sha256=H5Mcp2nS4vu5RKym20mPnGpl-8d0SDchzDRJBrrL6WE,1039
28
30
  r5py/util/environment.py,sha256=cbSM8TKTuhbXsTIIB06pMtydBOiqLkitF2Lj2asVTho,1082
29
31
  r5py/util/exceptions.py,sha256=r65XUg_AJ_bTw8ARNj7A2-GbFZlSTrOAjDynx1pSD2Y,1049
30
- r5py/util/good_enough_equidistant_crs.py,sha256=1aqJLghNwcd2FbLfODcht_6pyOEqhsrE2KPaC3NLoek,2354
32
+ r5py/util/file_digest.py,sha256=95UbaxbTZLa54j1CupsKria028xZ8f6ueZsTupnjlYE,1061
33
+ r5py/util/good_enough_equidistant_crs.py,sha256=7FX3Ly3qegSV_YRA4OFk49LC29xUyTte1Gc5qOEi_9E,2458
31
34
  r5py/util/jvm.py,sha256=NCwoYLDznXydcIRAZl2kzUQA6D6NCvzjVG74pm6ioR0,5027
32
35
  r5py/util/memory_footprint.py,sha256=p8efCUs4UXRg6P1GrRxVs71m7SpEw2mASoz6PVTRvgQ,4672
33
36
  r5py/util/parse_int_date.py,sha256=JmnV8TwdUdUp3kSp2e73ZSxCbRyqv2FmQzNt0I_MsM0,667
34
37
  r5py/util/sample_data_set.py,sha256=aqUCx6drWD-WbCauewO4EzgOGnFr35mAZt-YHlqb92k,2463
35
38
  r5py/util/snake_to_camel_case.py,sha256=uJ5hTCVDUEmIxTyy4LGFTbpGC_rtnjDZVQ2vmVRTQ4k,485
39
+ r5py/util/spatially_clustered_geodataframe.py,sha256=FxG8V3SSeK-PuCep565p1b3TNcl8oCkY764tk1L0sKM,2410
36
40
  r5py/util/validating_requests_session.py,sha256=sH5FgpS9eGax5DG2qA2GrGuiwgTJgh8tKsZ9OiXKmvk,1807
37
41
  r5py/util/warnings.py,sha256=CvxKWKlNO_p3riB4SkNqbU5AGPsaY_3-OzqaBObE3B8,139
38
- r5py-1.0.0.dev11.dist-info/LICENSE,sha256=VAnuGDX1TPylSN9G2xLa-urDpj_SQwn-qqs068dx4tk,51
39
- r5py-1.0.0.dev11.dist-info/METADATA,sha256=RKg-im_y_9gT08QQyGjYGtJAyb43OYza9PNNz2YyoLY,9915
40
- r5py-1.0.0.dev11.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
41
- r5py-1.0.0.dev11.dist-info/top_level.txt,sha256=fOH1R85dkNDOI7jkg-lIsl5CQIO4fE5X868K9dTqs9U,5
42
- r5py-1.0.0.dev11.dist-info/RECORD,,
42
+ r5py/util/working_copy.py,sha256=sbLbRCi39LtC-0tXxvh2y7ZN2D15chbhleCZXzHAFSc,1432
43
+ r5py-1.0.1.dist-info/LICENSE,sha256=VAnuGDX1TPylSN9G2xLa-urDpj_SQwn-qqs068dx4tk,51
44
+ r5py-1.0.1.dist-info/METADATA,sha256=URprsHGGXPJ0Ve6vCwJHw7R4iEZ7d7y84fJvmNdTrV4,9954
45
+ r5py-1.0.1.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
46
+ r5py-1.0.1.dist-info/top_level.txt,sha256=fOH1R85dkNDOI7jkg-lIsl5CQIO4fE5X868K9dTqs9U,5
47
+ r5py-1.0.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.8.0)
2
+ Generator: setuptools (76.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5