r5py 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. r5py/__init__.py +27 -0
  2. r5py/__main__.py +3 -0
  3. r5py/r5/__init__.py +39 -0
  4. r5py/r5/access_leg.py +12 -0
  5. r5py/r5/base_travel_time_matrix.py +255 -0
  6. r5py/r5/detailed_itineraries.py +226 -0
  7. r5py/r5/direct_leg.py +38 -0
  8. r5py/r5/egress_leg.py +12 -0
  9. r5py/r5/elevation_cost_function.py +50 -0
  10. r5py/r5/elevation_model.py +89 -0
  11. r5py/r5/file_storage.py +82 -0
  12. r5py/r5/isochrones.py +345 -0
  13. r5py/r5/regional_task.py +600 -0
  14. r5py/r5/scenario.py +36 -0
  15. r5py/r5/street_layer.py +90 -0
  16. r5py/r5/street_segment.py +39 -0
  17. r5py/r5/transfer_leg.py +12 -0
  18. r5py/r5/transit_layer.py +87 -0
  19. r5py/r5/transit_leg.py +12 -0
  20. r5py/r5/transport_mode.py +148 -0
  21. r5py/r5/transport_network.py +299 -0
  22. r5py/r5/travel_time_matrix.py +186 -0
  23. r5py/r5/trip.py +97 -0
  24. r5py/r5/trip_leg.py +204 -0
  25. r5py/r5/trip_planner.py +576 -0
  26. r5py/util/__init__.py +31 -0
  27. r5py/util/camel_to_snake_case.py +25 -0
  28. r5py/util/classpath.py +95 -0
  29. r5py/util/config.py +176 -0
  30. r5py/util/contains_gtfs_data.py +46 -0
  31. r5py/util/data_validation.py +28 -0
  32. r5py/util/environment.py +32 -0
  33. r5py/util/exceptions.py +43 -0
  34. r5py/util/file_digest.py +40 -0
  35. r5py/util/good_enough_equidistant_crs.py +73 -0
  36. r5py/util/jvm.py +138 -0
  37. r5py/util/memory_footprint.py +178 -0
  38. r5py/util/parse_int_date.py +24 -0
  39. r5py/util/sample_data_set.py +76 -0
  40. r5py/util/snake_to_camel_case.py +16 -0
  41. r5py/util/spatially_clustered_geodataframe.py +66 -0
  42. r5py/util/validating_requests_session.py +58 -0
  43. r5py/util/warnings.py +7 -0
  44. r5py/util/working_copy.py +42 -0
  45. r5py-1.1.0.dist-info/METADATA +176 -0
  46. r5py-1.1.0.dist-info/RECORD +49 -0
  47. r5py-1.1.0.dist-info/WHEEL +5 -0
  48. r5py-1.1.0.dist-info/licenses/LICENSE +3 -0
  49. r5py-1.1.0.dist-info/top_level.txt +1 -0
r5py/util/classpath.py ADDED
@@ -0,0 +1,95 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Make sure R5 is in the class path, download it if not."""
4
+
5
+ import hashlib
6
+ import pathlib
7
+ import string
8
+ import urllib.parse
9
+ import warnings
10
+
11
+ import requests
12
+
13
+ from .config import Config
14
+ from .exceptions import UnexpectedClasspathSchema
15
+ from .validating_requests_session import ValidatingRequestsSession
16
+ from .warnings import R5pyWarning
17
+
18
+ # update these to use a newer R5 version if no R5 available locally
19
+ R5_JAR_URL = (
20
+ "https://github.com/r5py/r5/releases/download/v7.5-r5py/r5-v7.5-r5py-all.jar"
21
+ )
22
+ R5_JAR_SHA256 = "70c89e57f9f98dd96c9d2be5fdc00d53a133893b2620e0d8d05e3e602f34e4d5"
23
+ # ---
24
+
25
+
26
+ __all__ = ["R5_CLASSPATH"]
27
+
28
+
29
+ config = Config()
30
+
31
+ config.argparser.add(
32
+ "-r",
33
+ "--r5-classpath",
34
+ help="R5’s class path, can point to r5-all.jar",
35
+ default="",
36
+ )
37
+
38
+
39
+ def find_r5_classpath(arguments):
40
+ r5_classpath = None
41
+
42
+ if arguments.r5_classpath:
43
+ schema, *_ = urllib.parse.urlparse(arguments.r5_classpath)
44
+
45
+ # fmt: off
46
+ if (
47
+ schema in ("file", "")
48
+ or (len(schema) == 1 and schema in string.ascii_letters)
49
+ # windows drive letter
50
+ ):
51
+ # fmt: on
52
+ if pathlib.Path(arguments.r5_classpath).exists():
53
+ r5_classpath = arguments.r5_classpath
54
+
55
+ elif schema in ("https", "http"):
56
+ r5_classpath = config.CACHE_DIR / pathlib.Path(arguments.r5_classpath).name
57
+ with requests.get(arguments.r5_classpath) as response:
58
+ r5_classpath.write_bytes(response.content)
59
+ r5_classpath = str(r5_classpath)
60
+
61
+ else:
62
+ raise UnexpectedClasspathSchema(
63
+ f"Could not parse `r5_classpath`: "
64
+ f"schema {schema}:// is not supported"
65
+ )
66
+
67
+ if r5_classpath is None:
68
+ r5_classpath = str(config.CACHE_DIR / pathlib.Path(R5_JAR_URL).name)
69
+ try:
70
+ with open(r5_classpath, "rb") as jar:
71
+ assert hashlib.sha256(jar.read()).hexdigest() == R5_JAR_SHA256
72
+ except (AssertionError, FileNotFoundError):
73
+ if arguments.verbose:
74
+ warnings.warn(
75
+ "Could not find R5 jar, trying to download it from upstream",
76
+ R5pyWarning,
77
+ stacklevel=1,
78
+ )
79
+ with (
80
+ ValidatingRequestsSession() as session,
81
+ session.get(R5_JAR_URL, R5_JAR_SHA256) as response,
82
+ open(r5_classpath, "wb") as jar,
83
+ ):
84
+ jar.write(response.content)
85
+ if arguments.verbose:
86
+ warnings.warn(
87
+ f"Successfully downloaded {pathlib.Path(R5_JAR_URL).name}",
88
+ R5pyWarning,
89
+ stacklevel=1,
90
+ )
91
+
92
+ return r5_classpath
93
+
94
+
95
+ R5_CLASSPATH = find_r5_classpath(config.arguments)
r5py/util/config.py ADDED
@@ -0,0 +1,176 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Handle configuration options and command line options."""
4
+
5
+ import datetime
6
+ import functools
7
+ import importlib.resources
8
+ import os
9
+ import pathlib
10
+ import sys
11
+ import tempfile
12
+
13
+ import configargparse
14
+
15
+ __all__ = ["Config"]
16
+
17
+
18
+ PACKAGE = __package__.split(".")[0]
19
+ CONFIG_FILE_TEMPLATE = importlib.resources.files(f"{PACKAGE}.util").joinpath(
20
+ f"{PACKAGE}.yml.template"
21
+ )
22
+ CACHE_MAX_AGE = datetime.timedelta(weeks=2)
23
+
24
+ if "HOME" not in os.environ: # e.g., testing environment or container
25
+ os.environ["HOME"] = "."
26
+
27
+
28
+ class Config:
29
+ """Load configuration from config files or command line arguments."""
30
+
31
+ _instance = None # stores singleton instance
32
+
33
+ def __init__(self):
34
+ """Load configuration from config files or command line arguments."""
35
+ self.argparser.add(
36
+ "-v",
37
+ "--verbose",
38
+ help="Enable verbose output from R5 and r5py",
39
+ action="store_true",
40
+ )
41
+
42
+ def __new__(cls):
43
+ """Load configuration from config files or command line arguments."""
44
+ if cls._instance is None:
45
+ cls._instance = super(Config, cls).__new__(cls)
46
+ return cls._instance
47
+
48
+ @property
49
+ def arguments(self):
50
+ """
51
+ Arguments passed from command line or config file.
52
+
53
+ Ignores `--help`: can be used while not all modules have added arguments.
54
+ """
55
+ return self.get_arguments(ignore_help_args=True)
56
+
57
+ @property
58
+ def argparser(self):
59
+ """Return a singleton instance of a `configargparse.ArgumentParser`."""
60
+ try:
61
+ argparser = configargparse.get_argument_parser(
62
+ prog=PACKAGE,
63
+ description=sys.modules[PACKAGE].__doc__,
64
+ default_config_files=self.CONFIG_FILES,
65
+ )
66
+ except ValueError: # has been instantiated, already
67
+ argparser = configargparse.get_argument_parser()
68
+ return argparser
69
+
70
+ @functools.cached_property
71
+ def CACHE_DIR(self):
72
+ """Save persistent cache files into this directory."""
73
+ cache_dir = (
74
+ pathlib.Path(
75
+ os.environ.get("LOCALAPPDATA")
76
+ or os.environ.get("XDG_CACHE_HOME")
77
+ or (pathlib.Path(os.environ.get("HOME")) / ".cache")
78
+ )
79
+ / PACKAGE
80
+ )
81
+ cache_dir.mkdir(parents=True, exist_ok=True)
82
+
83
+ # clean old files to keep cache dir from growing too much
84
+ cache_treshold = (datetime.datetime.now() - CACHE_MAX_AGE).timestamp()
85
+ for cached_file in cache_dir.glob("**/*"):
86
+ try:
87
+ *_, atime, mtime, _ = cached_file.stat()
88
+ assert max(atime, mtime) > cache_treshold
89
+ except (
90
+ AssertionError, # expired
91
+ FileNotFoundError, # broken symlink
92
+ PermissionError,
93
+ ):
94
+ try:
95
+ cached_file.unlink()
96
+ except (
97
+ IsADirectoryError, # only available on Linux kernels
98
+ PermissionError, # what’s raised instead on Win and MacOs
99
+ ):
100
+ pass
101
+
102
+ return cache_dir
103
+
104
+ @functools.cached_property
105
+ def CONFIG_FILES(self):
106
+ """List locations of potential configuration files."""
107
+ config_files = [
108
+ pathlib.Path(f"/etc/{PACKAGE}.yml"),
109
+ pathlib.Path(
110
+ os.environ.get("APPDATA")
111
+ or os.environ.get("XDG_CONFIG_HOME")
112
+ or (pathlib.Path(os.environ.get("HOME")) / ".config")
113
+ )
114
+ / f"{PACKAGE}.yml",
115
+ ]
116
+
117
+ # write a template configuration file to possible locations
118
+ for config_file in config_files:
119
+ self._copy_config_file_template(config_file)
120
+
121
+ # argparse does not understand pathlib.Path
122
+ config_files = [str(config_file) for config_file in config_files]
123
+
124
+ return config_files
125
+
126
+ @staticmethod
127
+ def _copy_config_file_template(destination_path):
128
+ """
129
+ Try to copy a configuration file template.
130
+
131
+ Arguments:
132
+ ----------
133
+ destination_path : pathlib.Path
134
+ Where could/should a configuration file exist?
135
+ """
136
+ if not destination_path.exists():
137
+ try:
138
+ destination_path.parent.mkdir(parents=True, exist_ok=True)
139
+
140
+ with importlib.resources.as_file(CONFIG_FILE_TEMPLATE) as template:
141
+ destination_path.write_text(template.read_text())
142
+
143
+ except (
144
+ FileNotFoundError,
145
+ FileExistsError,
146
+ IsADirectoryError,
147
+ PermissionError,
148
+ ):
149
+ pass
150
+
151
+ def get_arguments(self, ignore_help_args=False):
152
+ """Parse arguments passed from command line or config file."""
153
+ return self.argparser.parse_known_args(ignore_help_args=ignore_help_args)[0]
154
+
155
+ @functools.cached_property
156
+ def TEMP_DIR(self):
157
+ """
158
+ Save temporary files to this directory.
159
+
160
+ read-only property,
161
+ use command-line option `--temporary-directory` to change.
162
+ """
163
+ parent_dir = self.arguments.temporary_directory
164
+ temp_dir = pathlib.Path(tempfile.mkdtemp(prefix=self.PACKAGE, dir=parent_dir))
165
+ return temp_dir
166
+
167
+ PACKAGE = PACKAGE
168
+
169
+
170
+ Config().argparser.add(
171
+ "-t",
172
+ "--temporary-directory",
173
+ help="Directory for temporary files, overrides system default",
174
+ default=None,
175
+ type=pathlib.Path,
176
+ )
@@ -0,0 +1,46 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Check whether a file contains a GTFS data set."""
4
+
5
+ import zipfile
6
+
7
+ __all__ = ["contains_gtfs_data"]
8
+
9
+
10
+ # https://developers.google.com/transit/gtfs/reference#field_definitions
11
+ GTFS_REQUIRED_FILES = (
12
+ "agency.txt",
13
+ "stops.txt",
14
+ "routes.txt",
15
+ "trips.txt",
16
+ "stop_times.txt",
17
+ )
18
+
19
+
20
+ def contains_gtfs_data(path):
21
+ """
22
+ Check whether the file in `path` contains a GTFS dataset.
23
+
24
+ This is a rather heuristic approach: it tries to open the file
25
+ as a ZIP archive, and confirm the presence of the files required
26
+ by the GTFS standard reference.
27
+
28
+ Arguments:
29
+ ----------
30
+ path : str | pathlib.Path | file-like
31
+ The file to check. Should be opened in binary mode, if passed as a
32
+ file-like object.
33
+
34
+ Returns:
35
+ --------
36
+ bool : `True` if `path` likely contains a GTFS dataset, `False` if not.
37
+ """
38
+ try:
39
+ archive = zipfile.ZipFile(path)
40
+ assert all(
41
+ gtfs_field in archive.namelist() for gtfs_field in GTFS_REQUIRED_FILES
42
+ )
43
+ contains_gtfs_data = True
44
+ except (AssertionError, FileNotFoundError, zipfile.BadZipFile):
45
+ contains_gtfs_data = False
46
+ return contains_gtfs_data
@@ -0,0 +1,28 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Utility functions for validating input data."""
4
+
5
+ from .exceptions import NoIDColumnError, NonUniqueIDError, NoCrsError
6
+
7
+ __all__ = ["check_od_data_set"]
8
+
9
+
10
+ def check_od_data_set(od_data_set):
11
+ """Check whether an origin/destination data set fulfils certain minimum criteria.
12
+
13
+ Checks whether `od_data_set` has an `id` column with unique values, and a coordinate
14
+ reference system defined. Raises a `r5py.util.exceptions.NoIDColumnError`, a
15
+ `r5py.util.exceptions.NonUniqueIDError`, or a `r5py.util.exceptions.NoCrsError`
16
+ otherwise.
17
+
18
+ Arguments
19
+ ---------
20
+ od_data_set : geopandas.GeoDataFrame
21
+ The origin/destination data set to check.
22
+ """
23
+ if "id" not in od_data_set.columns:
24
+ raise NoIDColumnError("Data set must contain an 'id' column.")
25
+ if not od_data_set.id.is_unique:
26
+ raise NonUniqueIDError("Id values must be unique.")
27
+ if od_data_set.crs is None:
28
+ raise NoCrsError("Data set has to have a coordinate reference system defined.")
@@ -0,0 +1,32 @@
1
+ #!/usr/bin/env python3
2
+
3
+
4
+ """Normalise some environment variables that might not always get set."""
5
+
6
+ import os
7
+ import pathlib
8
+
9
+ # if a readthedocs runner uses a conda environment, it fails to
10
+ # properly initialise the JAVA_HOME and PROJ_LIB environment variables
11
+ #
12
+ # this might happen on other installation, so let’s keep this as general
13
+ # as possible.
14
+ #
15
+ # As readthedocs also does not export CONDA_PREFIX, we first reconstruct
16
+ # it from CONDA_ENVS_PATH and CONDA_DEFAULT_ENV
17
+ if (
18
+ "CONDA_PREFIX" not in os.environ
19
+ and "CONDA_DEFAULT_ENV" in os.environ
20
+ and "CONDA_ENVS_PATH" in os.environ
21
+ ): # pragma: no cover
22
+ os.environ["CONDA_PREFIX"] = str(
23
+ pathlib.Path(os.environ["CONDA_ENVS_PATH"]) / os.environ["CONDA_DEFAULT_ENV"]
24
+ )
25
+ if "JAVA_HOME" not in os.environ and "CONDA_PREFIX" in os.environ: # pragma: no cover
26
+ os.environ["JAVA_HOME"] = str(
27
+ pathlib.Path(os.environ["CONDA_PREFIX"]) / "lib" / "jvm"
28
+ )
29
+ if "PROJ_LIB" not in os.environ and "CONDA_PREFIX" in os.environ: # pragma: no cover
30
+ os.environ["PROJ_LIB"] = str(
31
+ pathlib.Path(os.environ["CONDA_PREFIX"]) / "share" / "proj"
32
+ )
@@ -0,0 +1,43 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """R5py-specific exceptions."""
4
+
5
+ import requests
6
+
7
+
8
+ # base class for r5py-related exceptions
9
+ class R5pyError(Exception):
10
+ """Generic base exception for r5py errors."""
11
+
12
+
13
+ # more specific exceptions
14
+ class GtfsFileError(R5pyError):
15
+ """GTFS file contained errors."""
16
+
17
+
18
+ class ChecksumFailed(requests.RequestException, R5pyError):
19
+ """Requested resource did not pass checksum test."""
20
+
21
+
22
+ class MissingColumnError(ValueError, R5pyError):
23
+ """An input data set is missing a required column."""
24
+
25
+
26
+ class NoCrsError(ValueError, R5pyError):
27
+ """An input data set’s geometry column does not have a reference system defined."""
28
+
29
+
30
+ class NonUniqueIDError(ValueError, R5pyError):
31
+ """An input data set’s `id` column has non-unique values."""
32
+
33
+
34
+ class NoIDColumnError(MissingColumnError):
35
+ """An input data set does not have a required `id` column."""
36
+
37
+
38
+ class UnexpectedClasspathSchema(ValueError, R5pyError):
39
+ """A classpath was supplied as an URI, but could not be parsed."""
40
+
41
+
42
+ class UnexpectedCrsError(ValueError, R5pyError):
43
+ """A geometry is in an unexpected reference system."""
@@ -0,0 +1,40 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Create a hash sum of a file."""
4
+
5
+ import hashlib
6
+ import pathlib
7
+
8
+ __all__ = ["FileDigest"]
9
+
10
+
11
+ BUFFER_SIZE = 64 * 1024
12
+
13
+
14
+ class FileDigest(str):
15
+ """Create a hash sum of a file."""
16
+
17
+ def __new__(cls, input_file, digest="blake2s"):
18
+ """
19
+ Create a hash sum of a file.
20
+
21
+ Arguments
22
+ ---------
23
+ input_file : pathlib.Path | str
24
+ for which file to compute a hash digest
25
+ digest : str | func
26
+ name of hash algorithm (s.
27
+ https://docs.python.org/3/library/hashlib.html) or function that
28
+ returns a hash sum
29
+ """
30
+ input_file = pathlib.Path(input_file)
31
+ try:
32
+ with input_file.open("rb") as f:
33
+ hashdigest = hashlib.file_digest(f, digest)
34
+ except AttributeError: # Python<=3.10
35
+ hashdigest = hashlib.new(digest)
36
+ with input_file.open("rb") as f:
37
+ while data := f.read(BUFFER_SIZE):
38
+ hashdigest.update(data)
39
+
40
+ return hashdigest.hexdigest()
@@ -0,0 +1,73 @@
1
+ #!/usr/bin/env python3
2
+
3
+
4
+ """Find the most appropriate equidistant (UTM) reference system for an extent."""
5
+
6
+ import pyproj
7
+ import shapely
8
+
9
+ from .exceptions import UnexpectedCrsError
10
+
11
+ FALLBACK_CRS = 3857
12
+ DATUM_NAME = "WGS 84"
13
+ VERY_SMALL_BUFFER_SIZE = 0.001
14
+
15
+
16
+ class GoodEnoughEquidistantCrs(pyproj.CRS):
17
+ """
18
+ Find the most appropriate UTM reference system for the current extent.
19
+
20
+ (We need this to be able to calculate lengths in meters.
21
+ Results don’t have to be perfect, so also the neighbouring UTM grid will do.)
22
+ """
23
+
24
+ def __new__(cls, extent):
25
+ """
26
+ Find the most appropriate UTM reference system for the current extent.
27
+
28
+ (We need this to be able to calculate lengths in meters.
29
+ Results don’t have to be perfect, so also the neighbouring UTM grid will do.)
30
+
31
+ Arguments
32
+ ---------
33
+ extent: shapely.Geometry
34
+ The geographical extent for which to find an equidistant reference
35
+ system, in `EPSG:4326`
36
+ """
37
+ if GoodEnoughEquidistantCrs._is_plausible_in_epsg4326(extent):
38
+ # default CRS in case we do not find any better match
39
+ crs = pyproj.CRS.from_epsg(FALLBACK_CRS)
40
+
41
+ # buffer extent (so everything is a polygon)
42
+ extent = extent.buffer(VERY_SMALL_BUFFER_SIZE)
43
+
44
+ crsinfo = pyproj.database.query_utm_crs_info(
45
+ datum_name=DATUM_NAME,
46
+ area_of_interest=pyproj.aoi.AreaOfInterest(*extent.bounds),
47
+ )
48
+ for candidate_crs in crsinfo:
49
+ area_of_use = shapely.box(*candidate_crs.area_of_use.bounds)
50
+ coverage = shapely.intersection(extent, area_of_use).area / extent.area
51
+
52
+ if coverage > 0.5:
53
+ # more than half of extent covered by crs’ area of use
54
+ # -> good enough
55
+ crs = pyproj.CRS.from_authority(
56
+ candidate_crs.auth_name, candidate_crs.code
57
+ )
58
+ break
59
+
60
+ return crs
61
+
62
+ else:
63
+ raise UnexpectedCrsError("`extent` does not seem to be in `EPSG:4326`")
64
+
65
+ @staticmethod
66
+ def _is_plausible_in_epsg4326(geometry):
67
+ try:
68
+ minx, miny, maxx, maxy = geometry.bounds
69
+ assert -180 <= minx <= maxx <= 180
70
+ assert -90 <= miny <= maxy <= 90
71
+ return True
72
+ except AssertionError:
73
+ return False
r5py/util/jvm.py ADDED
@@ -0,0 +1,138 @@
1
+ #!/usr/bin/env python3
2
+
3
+ """Set up a JVM and import basic java classes."""
4
+
5
+ import os
6
+ import pathlib
7
+ import shutil
8
+ import sys
9
+
10
+ import jpype
11
+ import jpype.imports
12
+
13
+ from .classpath import R5_CLASSPATH
14
+ from .config import Config
15
+ from .memory_footprint import MAX_JVM_MEMORY
16
+
17
+ __all__ = ["start_jvm"]
18
+
19
+
20
+ def start_jvm():
21
+ """
22
+ Start a Java Virtual Machine (JVM) if none is running already.
23
+
24
+ Takes into account the `--max-memory` and `--verbose` command
25
+ line and configuration options.
26
+ """
27
+ if not jpype.isJVMStarted():
28
+ # preload signal handling; this, among other things, prevents some of
29
+ # the warning messages we have been seeing
30
+ # (cf. https://stackoverflow.com/q/15790403 and
31
+ # https://docs.oracle.com/en/java/javase/19/vm/signal-chaining.html )
32
+ JVM_PATH = pathlib.Path(jpype.getDefaultJVMPath()).resolve()
33
+ if sys.platform == "linux":
34
+ try:
35
+ LIBJSIG = next(JVM_PATH.parent.glob("**/libjsig.so"))
36
+ os.environ["LD_PRELOAD"] = str(LIBJSIG)
37
+ except StopIteration: # pragma: no cover
38
+ pass # don’t fail completely if libjsig not found
39
+ elif sys.platform == "darwin":
40
+ try:
41
+ LIBJSIG = next(JVM_PATH.parent.glob("**/libjsig.dylib"))
42
+ os.environ["DYLD_INSERT_LIBRARIES"] = str(LIBJSIG)
43
+ except StopIteration: # pragma: no cover
44
+ pass # don’t fail completely if libjsig not found
45
+
46
+ TEMP_DIR = Config().TEMP_DIR
47
+
48
+ jpype.startJVM(
49
+ f"-Xmx{MAX_JVM_MEMORY:d}",
50
+ "-XX:+RestoreMXCSROnJNICalls", # https://github.com/r5py/r5py/issues/485
51
+ "-Xcheck:jni",
52
+ "-Xrs", # https://stackoverflow.com/q/34951812
53
+ "-Duser.language=en", # Set a default locale, …
54
+ "-Duser.country=US", # … as R5 formats numeric return …
55
+ "-Duser.variant=", # … values as a localised string
56
+ f"-Djava.io.tmpdir={TEMP_DIR}",
57
+ "--enable-native-access=ALL-UNNAMED",
58
+ classpath=[R5_CLASSPATH],
59
+ interrupt=True,
60
+ )
61
+
62
+ # Add shutdown hook that cleans up the temporary directory
63
+ @jpype.JImplements("java.lang.Runnable")
64
+ class ShutdownHookToCleanUpTempDir:
65
+ @jpype.JOverride
66
+ def run(self): # pragma: no cover
67
+ shutil.rmtree(TEMP_DIR)
68
+
69
+ import java.lang
70
+
71
+ java.lang.Runtime.getRuntime().addShutdownHook(
72
+ java.lang.Thread(ShutdownHookToCleanUpTempDir())
73
+ )
74
+
75
+ if not Config().arguments.verbose:
76
+ import ch.qos.logback.classic
77
+ import java.io
78
+ import java.lang
79
+ import org.slf4j.LoggerFactory
80
+
81
+ logger_context = org.slf4j.LoggerFactory.getILoggerFactory()
82
+ for log_target in (
83
+ "com.conveyal.gtfs",
84
+ "com.conveyal.osmlib",
85
+ "com.conveyal.r5",
86
+ "com.conveyal.r5.profile.ExecutionTimer",
87
+ "com.conveyal.r5.profile.FastRaptorWorker",
88
+ "graphql.GraphQL",
89
+ "org.eclipse.jetty",
90
+ "org.hsqldb.persist.Logger" "org.mongodb.driver.connection",
91
+ ):
92
+ logger_context.getLogger(log_target).setLevel(
93
+ ch.qos.logback.classic.Level.valueOf("OFF")
94
+ )
95
+
96
+ if sys.platform == "win32": # Windows
97
+ null_stream = java.io.PrintStream("NUL")
98
+ else:
99
+ null_stream = java.io.PrintStream("/dev/null")
100
+ java.lang.System.setErr(null_stream)
101
+ java.lang.System.setOut(null_stream)
102
+
103
+
104
+ # The JVM should be started before we attempt to import any Java package.
105
+ # If we run `start_jvm()` before another `import` statement, linting our
106
+ # code would result in many E402 (‘Module level import not at top of file’)
107
+ # warnings, if the JVM would start implicitely when `__file__` is imported,
108
+ # we would end up with F401 (‘Module imported but unused’) warnings.
109
+
110
+ # This below is a middle way: We don’t start the JVM right away, only
111
+ # when `start_jvm()` is called. However, if we attempt to import a
112
+ # Java package (or, more precisely, a package that’s likely to be a
113
+ # Java package), the `import` statement would trigger `start_jvm()`
114
+
115
+ # see:
116
+ # https://github.com/jpype-project/jpype/blob/master/jpype/imports.py#L146
117
+
118
+
119
+ class _JImportLoaderThatStartsTheJvm(jpype.imports._JImportLoader):
120
+ """Find Java packages for import statements, start JVM before that."""
121
+
122
+ def find_spec(self, name, path, target=None):
123
+ # we got this far in `sys.meta_path` (no other finder/loader
124
+ # knew about the package we try to load), and naturally, we’re
125
+ # towards the end of that list.
126
+
127
+ # Let’s assume the requested packages is a Java package,
128
+ # and start the JVM
129
+ start_jvm()
130
+
131
+ # then go the standard jpype way:
132
+ return super().find_spec(name, path, target)
133
+
134
+
135
+ # replace jpype’s _JImportLoader with our own:
136
+ for i, finder in enumerate(sys.meta_path):
137
+ if isinstance(finder, jpype.imports._JImportLoader):
138
+ sys.meta_path[i] = _JImportLoaderThatStartsTheJvm()