pycontrails 0.41.0__cp39-cp39-macosx_10_9_x86_64.whl → 0.42.0__cp39-cp39-macosx_10_9_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/_version.py +2 -2
- pycontrails/core/airports.py +228 -0
- pycontrails/core/datalib.py +8 -4
- pycontrails/core/fleet.py +13 -13
- pycontrails/core/flight.py +311 -86
- pycontrails/core/rgi_cython.cpython-39-darwin.so +0 -0
- pycontrails/core/vector.py +63 -51
- pycontrails/datalib/__init__.py +1 -1
- pycontrails/datalib/spire/__init__.py +19 -0
- pycontrails/datalib/spire/spire.py +739 -0
- pycontrails/models/cocip/wind_shear.py +2 -2
- pycontrails/models/emissions/emissions.py +1 -1
- pycontrails/models/humidity_scaling.py +1 -1
- pycontrails/models/issr.py +1 -1
- pycontrails/models/pcr.py +1 -1
- pycontrails/models/sac.py +5 -5
- pycontrails/physics/geo.py +3 -2
- pycontrails/physics/jet.py +66 -113
- {pycontrails-0.41.0.dist-info → pycontrails-0.42.0.dist-info}/METADATA +1 -1
- {pycontrails-0.41.0.dist-info → pycontrails-0.42.0.dist-info}/RECORD +24 -21
- {pycontrails-0.41.0.dist-info → pycontrails-0.42.0.dist-info}/LICENSE +0 -0
- {pycontrails-0.41.0.dist-info → pycontrails-0.42.0.dist-info}/NOTICE +0 -0
- {pycontrails-0.41.0.dist-info → pycontrails-0.42.0.dist-info}/WHEEL +0 -0
- {pycontrails-0.41.0.dist-info → pycontrails-0.42.0.dist-info}/top_level.txt +0 -0
pycontrails/_version.py
CHANGED
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
"""Airport data support."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pandas as pd
|
|
7
|
+
|
|
8
|
+
from pycontrails.core import cache
|
|
9
|
+
from pycontrails.physics import geo, units
|
|
10
|
+
|
|
11
|
+
#: URL for `Our Airports <https://ourairports.com/>`_ database.
|
|
12
|
+
#: Fork of the `ourairports-data repository <https://github.com/davidmegginson/ourairports-data>`_.
|
|
13
|
+
OURAIRPORTS_DATABASE_URL: str = (
|
|
14
|
+
"https://github.com/contrailcirrus/ourairports-data/raw/main/airports.csv"
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def _download_ourairports_csv() -> pd.DataFrame:
|
|
19
|
+
"""Download CSV file from fork of ourairports-data github."""
|
|
20
|
+
return pd.read_csv(
|
|
21
|
+
OURAIRPORTS_DATABASE_URL,
|
|
22
|
+
usecols=[
|
|
23
|
+
"type",
|
|
24
|
+
"name",
|
|
25
|
+
"latitude_deg",
|
|
26
|
+
"longitude_deg",
|
|
27
|
+
"elevation_ft",
|
|
28
|
+
"iso_country",
|
|
29
|
+
"iso_region",
|
|
30
|
+
"municipality",
|
|
31
|
+
"scheduled_service",
|
|
32
|
+
"gps_code",
|
|
33
|
+
"iata_code",
|
|
34
|
+
],
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def global_airport_database(
|
|
39
|
+
cachestore: cache.CacheStore | None = None, update_cache: bool = False
|
|
40
|
+
) -> pd.DataFrame:
|
|
41
|
+
"""
|
|
42
|
+
Load and process global airport database from `Our Airports <https://ourairports.com/>`_.
|
|
43
|
+
|
|
44
|
+
The database includes coordinates and metadata for 74867 unique airports.
|
|
45
|
+
|
|
46
|
+
Parameters
|
|
47
|
+
----------
|
|
48
|
+
cachestore : cache.CacheStore | None, optional
|
|
49
|
+
Cache store for airport database.
|
|
50
|
+
Defaults to :class:`cache.DiskCacheStore`.
|
|
51
|
+
update_cache : bool, optional
|
|
52
|
+
Force update to cached airports database.
|
|
53
|
+
|
|
54
|
+
Returns
|
|
55
|
+
-------
|
|
56
|
+
pd.DataFrame
|
|
57
|
+
Processed global airport database.
|
|
58
|
+
|
|
59
|
+
Global airport database.
|
|
60
|
+
|
|
61
|
+
Notes
|
|
62
|
+
-----
|
|
63
|
+
As of 2023 March 30, the global airport database contains:
|
|
64
|
+
|
|
65
|
+
.. csv-table::
|
|
66
|
+
:header: "Airport Type", "Number"
|
|
67
|
+
:widths: 70, 30
|
|
68
|
+
|
|
69
|
+
"small_airport", 39327
|
|
70
|
+
"heliport", 19039
|
|
71
|
+
"closed", 10107
|
|
72
|
+
"medium_airport", 4753
|
|
73
|
+
"seaplane_base", 1133
|
|
74
|
+
"large_airport", 463
|
|
75
|
+
"balloonport", 45
|
|
76
|
+
|
|
77
|
+
References
|
|
78
|
+
----------
|
|
79
|
+
- :cite:`megginsonOpendataDownloadsOurAirports2023`
|
|
80
|
+
"""
|
|
81
|
+
cachestore = cachestore or cache.DiskCacheStore()
|
|
82
|
+
|
|
83
|
+
cache_key = "ourairports-data_airports.csv"
|
|
84
|
+
if cachestore.exists(cache_key) and not update_cache:
|
|
85
|
+
airports = pd.read_csv(cachestore.path(cache_key))
|
|
86
|
+
else:
|
|
87
|
+
airports = _download_ourairports_csv()
|
|
88
|
+
airports.to_csv(cachestore.path(cache_key), index=False)
|
|
89
|
+
|
|
90
|
+
#: Format dataset by renaming columns & filling nan values
|
|
91
|
+
airports.rename(
|
|
92
|
+
columns={"latitude_deg": "latitude", "longitude_deg": "longitude", "gps_code": "icao_code"},
|
|
93
|
+
inplace=True,
|
|
94
|
+
)
|
|
95
|
+
airports["elevation_ft"].fillna(0, inplace=True)
|
|
96
|
+
|
|
97
|
+
# Keep specific airport types used by commercial aviation
|
|
98
|
+
select_airport_types = airports["type"].isin(
|
|
99
|
+
["large_airport", "medium_airport", "small_airport", "heliport"]
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# Keep airports with valid ICAO codes
|
|
103
|
+
select_icao_codes = (airports["icao_code"].str.len() == 4) & (
|
|
104
|
+
airports["icao_code"].str.isalpha()
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
# filter airports
|
|
108
|
+
airports = airports.loc[select_airport_types & select_icao_codes]
|
|
109
|
+
|
|
110
|
+
# Format dataset
|
|
111
|
+
airports["elevation_m"] = units.ft_to_m(airports["elevation_ft"].to_numpy())
|
|
112
|
+
airports.sort_values(by=["icao_code"], ascending=True, inplace=True)
|
|
113
|
+
|
|
114
|
+
return airports.reset_index(drop=True)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def find_nearest_airport(
|
|
118
|
+
airports: pd.DataFrame,
|
|
119
|
+
longitude: float,
|
|
120
|
+
latitude: float,
|
|
121
|
+
altitude: float,
|
|
122
|
+
*,
|
|
123
|
+
bbox: float = 2.0,
|
|
124
|
+
) -> str | None:
|
|
125
|
+
r"""
|
|
126
|
+
Find airport nearest to the waypoints.
|
|
127
|
+
|
|
128
|
+
Parameters
|
|
129
|
+
----------
|
|
130
|
+
airports: pd.DataFrame
|
|
131
|
+
Airport database in the format returned from :func:`global_airport_database`.
|
|
132
|
+
longitude: float
|
|
133
|
+
Waypoint longitude, [:math:`\deg`]
|
|
134
|
+
latitude: float
|
|
135
|
+
Waypoint latitude, [:math:`\deg`]
|
|
136
|
+
altitude: float
|
|
137
|
+
Waypoint altitude, [:math:`m`]
|
|
138
|
+
bbox: float
|
|
139
|
+
Search airports within spatial bounding box of ± `bbox` from the waypoint, [:math:`\deg`]
|
|
140
|
+
Defaults to :math:`2\deg`
|
|
141
|
+
|
|
142
|
+
Returns
|
|
143
|
+
-------
|
|
144
|
+
str
|
|
145
|
+
ICAO code of nearest airport.
|
|
146
|
+
Returns None if no airport is found within ``bbox``.
|
|
147
|
+
|
|
148
|
+
Notes
|
|
149
|
+
-----
|
|
150
|
+
Function will first search for large airports around the waypoint vicinity.
|
|
151
|
+
If none is found, it will search for medium and small airports
|
|
152
|
+
around the waypoint vicinity.
|
|
153
|
+
|
|
154
|
+
The waypoint must be below 10,000 feet to increase the
|
|
155
|
+
probability of identifying the correct airport.
|
|
156
|
+
"""
|
|
157
|
+
if altitude > 3000:
|
|
158
|
+
raise ValueError(
|
|
159
|
+
f"Altitude ({altitude} m) is too high (> 3000 m) to identify nearest airport."
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
is_near_waypoint = airports["longitude"].between(
|
|
163
|
+
(longitude - bbox), (longitude + bbox)
|
|
164
|
+
) & airports["latitude"].between((latitude - bbox), (latitude + bbox))
|
|
165
|
+
|
|
166
|
+
# Find the nearest airport from largest to smallest airport type
|
|
167
|
+
search_priority = ["large_airport", "medium_airport", "small_airport"]
|
|
168
|
+
|
|
169
|
+
for airport_type in search_priority:
|
|
170
|
+
is_airport_type = airports["type"] == airport_type
|
|
171
|
+
nearest_airports = airports.loc[is_near_waypoint & is_airport_type]
|
|
172
|
+
|
|
173
|
+
if len(nearest_airports) == 1:
|
|
174
|
+
return nearest_airports["icao_code"].values[0]
|
|
175
|
+
|
|
176
|
+
elif len(nearest_airports) > 1:
|
|
177
|
+
distance = distance_to_airports(
|
|
178
|
+
nearest_airports,
|
|
179
|
+
longitude,
|
|
180
|
+
latitude,
|
|
181
|
+
altitude,
|
|
182
|
+
)
|
|
183
|
+
i_nearest = np.argmin(distance)
|
|
184
|
+
return nearest_airports["icao_code"].values[i_nearest]
|
|
185
|
+
|
|
186
|
+
else:
|
|
187
|
+
continue
|
|
188
|
+
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
def distance_to_airports(
|
|
193
|
+
airports: pd.DataFrame,
|
|
194
|
+
longitude: float,
|
|
195
|
+
latitude: float,
|
|
196
|
+
altitude: float,
|
|
197
|
+
) -> np.ndarray:
|
|
198
|
+
r"""
|
|
199
|
+
Calculate the 3D distance from the waypoint to the provided airports.
|
|
200
|
+
|
|
201
|
+
Parameters
|
|
202
|
+
----------
|
|
203
|
+
airports : pd.DataFrame
|
|
204
|
+
Airport database in the format returned from :func:`global_airport_database`.
|
|
205
|
+
longitude : float
|
|
206
|
+
Waypoint longitude, [:math:`\deg`]
|
|
207
|
+
latitude : float
|
|
208
|
+
Waypoint latitude, [:math:`\deg`]
|
|
209
|
+
altitude : float
|
|
210
|
+
Waypoint altitude, [:math:`m`]
|
|
211
|
+
|
|
212
|
+
Returns
|
|
213
|
+
-------
|
|
214
|
+
np.ndarray
|
|
215
|
+
3D distance from waypoint to airports, [:math:`m`]
|
|
216
|
+
|
|
217
|
+
See Also
|
|
218
|
+
--------
|
|
219
|
+
:func:`geo.haversine`
|
|
220
|
+
"""
|
|
221
|
+
dist_horizontal = geo.haversine(
|
|
222
|
+
np.full(airports["longitude"].shape, longitude),
|
|
223
|
+
np.full(airports["latitude"].shape, latitude),
|
|
224
|
+
airports["longitude"].to_numpy(),
|
|
225
|
+
airports["latitude"].to_numpy(),
|
|
226
|
+
)
|
|
227
|
+
dist_vertical = altitude - airports["elevation_m"].to_numpy()
|
|
228
|
+
return (dist_horizontal**2 + dist_vertical**2) ** 0.5
|
pycontrails/core/datalib.py
CHANGED
|
@@ -33,7 +33,7 @@ NETCDF_ENGINE: str = "netcdf4"
|
|
|
33
33
|
DEFAULT_CHUNKS: dict[str, int] = {"time": 1}
|
|
34
34
|
|
|
35
35
|
|
|
36
|
-
def parse_timesteps(time: TimeInput | None, freq: str = "1H") -> list[datetime]:
|
|
36
|
+
def parse_timesteps(time: TimeInput | None, freq: str | None = "1H") -> list[datetime]:
|
|
37
37
|
"""Parse time input into set of time steps.
|
|
38
38
|
|
|
39
39
|
If input time is length 2, this creates a range of equally spaced time
|
|
@@ -46,10 +46,11 @@ def parse_timesteps(time: TimeInput | None, freq: str = "1H") -> list[datetime]:
|
|
|
46
46
|
Either a single datetime-like or tuple of datetime-like with the first value
|
|
47
47
|
the start of the date range and second value the end of the time range.
|
|
48
48
|
Input values can be any type compatible with :meth:`pandas.to_datetime`.
|
|
49
|
-
freq : str, optional
|
|
49
|
+
freq : str | None, optional
|
|
50
50
|
Timestep interval in range.
|
|
51
51
|
See https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#timeseries-offset-aliases
|
|
52
52
|
for a list of frequency aliases.
|
|
53
|
+
If None, returns input `time` as a list.
|
|
53
54
|
Defaults to "1H".
|
|
54
55
|
|
|
55
56
|
Returns
|
|
@@ -84,8 +85,11 @@ def parse_timesteps(time: TimeInput | None, freq: str = "1H") -> list[datetime]:
|
|
|
84
85
|
"must be compatible with 'pd.to_datetime()'"
|
|
85
86
|
)
|
|
86
87
|
|
|
87
|
-
|
|
88
|
-
|
|
88
|
+
if freq is None:
|
|
89
|
+
daterange = pd.DatetimeIndex([timestamps[0], timestamps[1]])
|
|
90
|
+
else:
|
|
91
|
+
# get date range that encompasses all whole hours
|
|
92
|
+
daterange = pd.date_range(timestamps[0].floor(freq), timestamps[1].ceil(freq), freq=freq)
|
|
89
93
|
|
|
90
94
|
# return list of datetimes
|
|
91
95
|
return daterange.to_pydatetime().tolist()
|
pycontrails/core/fleet.py
CHANGED
|
@@ -22,12 +22,12 @@ class Fleet(Flight):
|
|
|
22
22
|
|
|
23
23
|
def __init__(
|
|
24
24
|
self,
|
|
25
|
-
data: dict[str,
|
|
26
|
-
longitude:
|
|
27
|
-
latitude:
|
|
28
|
-
altitude:
|
|
29
|
-
level:
|
|
30
|
-
time:
|
|
25
|
+
data: dict[str, npt.ArrayLike] | None = None,
|
|
26
|
+
longitude: npt.ArrayLike | None = None,
|
|
27
|
+
latitude: npt.ArrayLike | None = None,
|
|
28
|
+
altitude: npt.ArrayLike | None = None,
|
|
29
|
+
level: npt.ArrayLike | None = None,
|
|
30
|
+
time: npt.ArrayLike | None = None,
|
|
31
31
|
attrs: dict[str, Any] | None = None,
|
|
32
32
|
copy: bool = True,
|
|
33
33
|
fuel: Fuel | None = None,
|
|
@@ -60,12 +60,12 @@ class Fleet(Flight):
|
|
|
60
60
|
|
|
61
61
|
self.final_waypoints = self.calc_final_waypoints()
|
|
62
62
|
|
|
63
|
-
def calc_final_waypoints(self) -> np.
|
|
63
|
+
def calc_final_waypoints(self) -> npt.NDArray[np.bool_]:
|
|
64
64
|
"""Validate data and calculate the final waypoint of each flight.
|
|
65
65
|
|
|
66
66
|
Returns
|
|
67
67
|
-------
|
|
68
|
-
np.
|
|
68
|
+
npt.NDArray[np.bool_]
|
|
69
69
|
A boolean array in which True values correspond to final waypoint of each flight.
|
|
70
70
|
|
|
71
71
|
Raises
|
|
@@ -291,7 +291,7 @@ class Fleet(Flight):
|
|
|
291
291
|
self[key] = v_wind
|
|
292
292
|
|
|
293
293
|
# Calculate TAS on each flight individually
|
|
294
|
-
def calc_tas(fl: Flight) -> np.
|
|
294
|
+
def calc_tas(fl: Flight) -> npt.NDArray[np.float_]:
|
|
295
295
|
u_wind = fl.get("__u_wind", None)
|
|
296
296
|
v_wind = fl.get("__v_wind", None)
|
|
297
297
|
|
|
@@ -314,7 +314,7 @@ class Fleet(Flight):
|
|
|
314
314
|
return np.concatenate(tas)
|
|
315
315
|
|
|
316
316
|
@overrides
|
|
317
|
-
def segment_groundspeed(self, *args: Any, **kwargs: Any) -> np.
|
|
317
|
+
def segment_groundspeed(self, *args: Any, **kwargs: Any) -> npt.NDArray[np.float_]:
|
|
318
318
|
# Implement if we have a usecase for this.
|
|
319
319
|
# Because the super() method uses a smoothing pattern, it will not reliably
|
|
320
320
|
# work on Fleet.
|
|
@@ -327,15 +327,15 @@ class Fleet(Flight):
|
|
|
327
327
|
raise NotImplementedError
|
|
328
328
|
|
|
329
329
|
@overrides
|
|
330
|
-
def segment_length(self) -> np.
|
|
330
|
+
def segment_length(self) -> npt.NDArray[np.float_]:
|
|
331
331
|
return np.where(self.final_waypoints, np.nan, super().segment_length())
|
|
332
332
|
|
|
333
333
|
@overrides
|
|
334
|
-
def segment_azimuth(self) -> np.
|
|
334
|
+
def segment_azimuth(self) -> npt.NDArray[np.float_]:
|
|
335
335
|
return np.where(self.final_waypoints, np.nan, super().segment_azimuth())
|
|
336
336
|
|
|
337
337
|
@overrides
|
|
338
|
-
def segment_angle(self) -> tuple[np.
|
|
338
|
+
def segment_angle(self) -> tuple[npt.NDArray[np.float_], npt.NDArray[np.float_]]:
|
|
339
339
|
sin_a, cos_a = super().segment_angle()
|
|
340
340
|
sin_a[self.final_waypoints] = np.nan
|
|
341
341
|
cos_a[self.final_waypoints] = np.nan
|