pycontrails 0.41.0__cp311-cp311-macosx_11_0_arm64.whl → 0.42.2__cp311-cp311-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (40) hide show
  1. pycontrails/_version.py +2 -2
  2. pycontrails/core/airports.py +228 -0
  3. pycontrails/core/cache.py +4 -6
  4. pycontrails/core/datalib.py +13 -6
  5. pycontrails/core/fleet.py +72 -20
  6. pycontrails/core/flight.py +485 -134
  7. pycontrails/core/flightplan.py +238 -0
  8. pycontrails/core/interpolation.py +11 -15
  9. pycontrails/core/met.py +5 -5
  10. pycontrails/core/models.py +4 -0
  11. pycontrails/core/rgi_cython.cpython-311-darwin.so +0 -0
  12. pycontrails/core/vector.py +80 -63
  13. pycontrails/datalib/__init__.py +1 -1
  14. pycontrails/datalib/ecmwf/common.py +14 -19
  15. pycontrails/datalib/spire/__init__.py +19 -0
  16. pycontrails/datalib/spire/spire.py +739 -0
  17. pycontrails/ext/bada/__init__.py +6 -6
  18. pycontrails/ext/cirium/__init__.py +2 -2
  19. pycontrails/models/cocip/cocip.py +37 -39
  20. pycontrails/models/cocip/cocip_params.py +37 -30
  21. pycontrails/models/cocip/cocip_uncertainty.py +47 -58
  22. pycontrails/models/cocip/radiative_forcing.py +220 -193
  23. pycontrails/models/cocip/wake_vortex.py +96 -91
  24. pycontrails/models/cocip/wind_shear.py +2 -2
  25. pycontrails/models/emissions/emissions.py +1 -1
  26. pycontrails/models/humidity_scaling.py +266 -9
  27. pycontrails/models/issr.py +2 -2
  28. pycontrails/models/pcr.py +1 -1
  29. pycontrails/models/quantiles/era5_ensemble_quantiles.npy +0 -0
  30. pycontrails/models/quantiles/iagos_quantiles.npy +0 -0
  31. pycontrails/models/sac.py +7 -5
  32. pycontrails/physics/geo.py +5 -3
  33. pycontrails/physics/jet.py +66 -113
  34. pycontrails/utils/json.py +3 -3
  35. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/METADATA +4 -7
  36. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/RECORD +40 -34
  37. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/LICENSE +0 -0
  38. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/NOTICE +0 -0
  39. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/WHEEL +0 -0
  40. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/top_level.txt +0 -0
pycontrails/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # file generated by setuptools_scm
2
2
  # don't change, don't track in version control
3
- __version__ = version = '0.41.0'
4
- __version_tuple__ = version_tuple = (0, 41, 0)
3
+ __version__ = version = '0.42.2'
4
+ __version_tuple__ = version_tuple = (0, 42, 2)
@@ -0,0 +1,228 @@
1
+ """Airport data support."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import numpy as np
6
+ import pandas as pd
7
+
8
+ from pycontrails.core import cache
9
+ from pycontrails.physics import geo, units
10
+
11
+ #: URL for `Our Airports <https://ourairports.com/>`_ database.
12
+ #: Fork of the `ourairports-data repository <https://github.com/davidmegginson/ourairports-data>`_.
13
+ OURAIRPORTS_DATABASE_URL: str = (
14
+ "https://github.com/contrailcirrus/ourairports-data/raw/main/airports.csv"
15
+ )
16
+
17
+
18
+ def _download_ourairports_csv() -> pd.DataFrame:
19
+ """Download CSV file from fork of ourairports-data github."""
20
+ return pd.read_csv(
21
+ OURAIRPORTS_DATABASE_URL,
22
+ usecols=[
23
+ "type",
24
+ "name",
25
+ "latitude_deg",
26
+ "longitude_deg",
27
+ "elevation_ft",
28
+ "iso_country",
29
+ "iso_region",
30
+ "municipality",
31
+ "scheduled_service",
32
+ "gps_code",
33
+ "iata_code",
34
+ ],
35
+ )
36
+
37
+
38
+ def global_airport_database(
39
+ cachestore: cache.CacheStore | None = None, update_cache: bool = False
40
+ ) -> pd.DataFrame:
41
+ """
42
+ Load and process global airport database from `Our Airports <https://ourairports.com/>`_.
43
+
44
+ The database includes coordinates and metadata for 74867 unique airports.
45
+
46
+ Parameters
47
+ ----------
48
+ cachestore : cache.CacheStore | None, optional
49
+ Cache store for airport database.
50
+ Defaults to :class:`cache.DiskCacheStore`.
51
+ update_cache : bool, optional
52
+ Force update to cached airports database.
53
+
54
+ Returns
55
+ -------
56
+ pd.DataFrame
57
+ Processed global airport database.
58
+
59
+ Global airport database.
60
+
61
+ Notes
62
+ -----
63
+ As of 2023 March 30, the global airport database contains:
64
+
65
+ .. csv-table::
66
+ :header: "Airport Type", "Number"
67
+ :widths: 70, 30
68
+
69
+ "small_airport", 39327
70
+ "heliport", 19039
71
+ "closed", 10107
72
+ "medium_airport", 4753
73
+ "seaplane_base", 1133
74
+ "large_airport", 463
75
+ "balloonport", 45
76
+
77
+ References
78
+ ----------
79
+ - :cite:`megginsonOpendataDownloadsOurAirports2023`
80
+ """
81
+ cachestore = cachestore or cache.DiskCacheStore()
82
+
83
+ cache_key = "ourairports-data_airports.csv"
84
+ if cachestore.exists(cache_key) and not update_cache:
85
+ airports = pd.read_csv(cachestore.path(cache_key))
86
+ else:
87
+ airports = _download_ourairports_csv()
88
+ airports.to_csv(cachestore.path(cache_key), index=False)
89
+
90
+ #: Format dataset by renaming columns & filling nan values
91
+ airports.rename(
92
+ columns={"latitude_deg": "latitude", "longitude_deg": "longitude", "gps_code": "icao_code"},
93
+ inplace=True,
94
+ )
95
+ airports["elevation_ft"].fillna(0, inplace=True)
96
+
97
+ # Keep specific airport types used by commercial aviation
98
+ select_airport_types = airports["type"].isin(
99
+ ["large_airport", "medium_airport", "small_airport", "heliport"]
100
+ )
101
+
102
+ # Keep airports with valid ICAO codes
103
+ select_icao_codes = (airports["icao_code"].str.len() == 4) & (
104
+ airports["icao_code"].str.isalpha()
105
+ )
106
+
107
+ # filter airports
108
+ airports = airports.loc[select_airport_types & select_icao_codes]
109
+
110
+ # Format dataset
111
+ airports["elevation_m"] = units.ft_to_m(airports["elevation_ft"].to_numpy())
112
+ airports.sort_values(by=["icao_code"], ascending=True, inplace=True)
113
+
114
+ return airports.reset_index(drop=True)
115
+
116
+
117
+ def find_nearest_airport(
118
+ airports: pd.DataFrame,
119
+ longitude: float,
120
+ latitude: float,
121
+ altitude: float,
122
+ *,
123
+ bbox: float = 2.0,
124
+ ) -> str | None:
125
+ r"""
126
+ Find airport nearest to the waypoints.
127
+
128
+ Parameters
129
+ ----------
130
+ airports: pd.DataFrame
131
+ Airport database in the format returned from :func:`global_airport_database`.
132
+ longitude: float
133
+ Waypoint longitude, [:math:`\deg`]
134
+ latitude: float
135
+ Waypoint latitude, [:math:`\deg`]
136
+ altitude: float
137
+ Waypoint altitude, [:math:`m`]
138
+ bbox: float
139
+ Search airports within spatial bounding box of ± `bbox` from the waypoint, [:math:`\deg`]
140
+ Defaults to :math:`2\deg`
141
+
142
+ Returns
143
+ -------
144
+ str
145
+ ICAO code of nearest airport.
146
+ Returns None if no airport is found within ``bbox``.
147
+
148
+ Notes
149
+ -----
150
+ Function will first search for large airports around the waypoint vicinity.
151
+ If none is found, it will search for medium and small airports
152
+ around the waypoint vicinity.
153
+
154
+ The waypoint must be below 10,000 feet to increase the
155
+ probability of identifying the correct airport.
156
+ """
157
+ if altitude > 3000:
158
+ raise ValueError(
159
+ f"Altitude ({altitude} m) is too high (> 3000 m) to identify nearest airport."
160
+ )
161
+
162
+ is_near_waypoint = airports["longitude"].between(
163
+ (longitude - bbox), (longitude + bbox)
164
+ ) & airports["latitude"].between((latitude - bbox), (latitude + bbox))
165
+
166
+ # Find the nearest airport from largest to smallest airport type
167
+ search_priority = ["large_airport", "medium_airport", "small_airport"]
168
+
169
+ for airport_type in search_priority:
170
+ is_airport_type = airports["type"] == airport_type
171
+ nearest_airports = airports.loc[is_near_waypoint & is_airport_type]
172
+
173
+ if len(nearest_airports) == 1:
174
+ return nearest_airports["icao_code"].values[0]
175
+
176
+ elif len(nearest_airports) > 1:
177
+ distance = distance_to_airports(
178
+ nearest_airports,
179
+ longitude,
180
+ latitude,
181
+ altitude,
182
+ )
183
+ i_nearest = np.argmin(distance)
184
+ return nearest_airports["icao_code"].values[i_nearest]
185
+
186
+ else:
187
+ continue
188
+
189
+ return None
190
+
191
+
192
+ def distance_to_airports(
193
+ airports: pd.DataFrame,
194
+ longitude: float,
195
+ latitude: float,
196
+ altitude: float,
197
+ ) -> np.ndarray:
198
+ r"""
199
+ Calculate the 3D distance from the waypoint to the provided airports.
200
+
201
+ Parameters
202
+ ----------
203
+ airports : pd.DataFrame
204
+ Airport database in the format returned from :func:`global_airport_database`.
205
+ longitude : float
206
+ Waypoint longitude, [:math:`\deg`]
207
+ latitude : float
208
+ Waypoint latitude, [:math:`\deg`]
209
+ altitude : float
210
+ Waypoint altitude, [:math:`m`]
211
+
212
+ Returns
213
+ -------
214
+ np.ndarray
215
+ 3D distance from waypoint to airports, [:math:`m`]
216
+
217
+ See Also
218
+ --------
219
+ :func:`geo.haversine`
220
+ """
221
+ dist_horizontal = geo.haversine(
222
+ np.full(airports["longitude"].shape, longitude),
223
+ np.full(airports["latitude"].shape, latitude),
224
+ airports["longitude"].to_numpy(),
225
+ airports["latitude"].to_numpy(),
226
+ )
227
+ dist_vertical = altitude - airports["elevation_m"].to_numpy()
228
+ return (dist_horizontal**2 + dist_vertical**2) ** 0.5
pycontrails/core/cache.py CHANGED
@@ -370,18 +370,16 @@ class DiskCacheStore(CacheStore):
370
370
  if disk_path.is_file():
371
371
  logger.debug("Remove file at path %s", disk_path)
372
372
  disk_path.unlink()
373
+ return
373
374
 
374
375
  # Assume anything else is a directory
375
- elif disk_path.exists():
376
+ if disk_path.exists():
376
377
  # rm directory recursively
377
378
  logger.debug("Remove directory at path %s", disk_path)
378
379
  shutil.rmtree(disk_path, ignore_errors=True)
380
+ return
379
381
 
380
- else:
381
- warnings.warn(f"No cache path found at {disk_path}")
382
-
383
- # make sure local cache directory exists
384
- # pathlib.Path(self.cache_dir).mkdir(parents=True, exist_ok=True)
382
+ warnings.warn(f"No cache path found at {disk_path}")
385
383
 
386
384
 
387
385
  class GCPCacheStore(CacheStore):
@@ -32,8 +32,11 @@ NETCDF_ENGINE: str = "netcdf4"
32
32
  #: Default chunking strategy when opening datasets with xarray
33
33
  DEFAULT_CHUNKS: dict[str, int] = {"time": 1}
34
34
 
35
+ #: Whether to open multi-file datasets in parallel
36
+ OPEN_IN_PARALLEL: bool = False
35
37
 
36
- def parse_timesteps(time: TimeInput | None, freq: str = "1H") -> list[datetime]:
38
+
39
+ def parse_timesteps(time: TimeInput | None, freq: str | None = "1H") -> list[datetime]:
37
40
  """Parse time input into set of time steps.
38
41
 
39
42
  If input time is length 2, this creates a range of equally spaced time
@@ -46,10 +49,11 @@ def parse_timesteps(time: TimeInput | None, freq: str = "1H") -> list[datetime]:
46
49
  Either a single datetime-like or tuple of datetime-like with the first value
47
50
  the start of the date range and second value the end of the time range.
48
51
  Input values can be any type compatible with :meth:`pandas.to_datetime`.
49
- freq : str, optional
52
+ freq : str | None, optional
50
53
  Timestep interval in range.
51
54
  See https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#timeseries-offset-aliases
52
55
  for a list of frequency aliases.
56
+ If None, returns input `time` as a list.
53
57
  Defaults to "1H".
54
58
 
55
59
  Returns
@@ -84,8 +88,11 @@ def parse_timesteps(time: TimeInput | None, freq: str = "1H") -> list[datetime]:
84
88
  "must be compatible with 'pd.to_datetime()'"
85
89
  )
86
90
 
87
- # get date range that encompasses all whole hours
88
- daterange = pd.date_range(timestamps[0].floor(freq), timestamps[1].ceil(freq), freq=freq)
91
+ if freq is None:
92
+ daterange = pd.DatetimeIndex([timestamps[0], timestamps[1]])
93
+ else:
94
+ # get date range that encompasses all whole hours
95
+ daterange = pd.date_range(timestamps[0].floor(freq), timestamps[1].ceil(freq), freq=freq)
89
96
 
90
97
  # return list of datetimes
91
98
  return daterange.to_pydatetime().tolist()
@@ -641,7 +648,7 @@ class MetDataSource(abc.ABC):
641
648
 
642
649
  - chunks: {"time": 1}
643
650
  - engine: "netcdf4"
644
- - parallel: True
651
+ - parallel: False
645
652
 
646
653
  Returns
647
654
  -------
@@ -650,5 +657,5 @@ class MetDataSource(abc.ABC):
650
657
  """
651
658
  xr_kwargs.setdefault("engine", NETCDF_ENGINE)
652
659
  xr_kwargs.setdefault("chunks", DEFAULT_CHUNKS)
653
- xr_kwargs.setdefault("parallel", True)
660
+ xr_kwargs.setdefault("parallel", OPEN_IN_PARALLEL)
654
661
  return xr.open_mfdataset(disk_paths, **xr_kwargs)
pycontrails/core/fleet.py CHANGED
@@ -22,12 +22,12 @@ class Fleet(Flight):
22
22
 
23
23
  def __init__(
24
24
  self,
25
- data: dict[str, np.ndarray] | None = None,
26
- longitude: np.ndarray | None = None,
27
- latitude: np.ndarray | None = None,
28
- altitude: np.ndarray | None = None,
29
- level: np.ndarray | None = None,
30
- time: np.ndarray | None = None,
25
+ data: dict[str, npt.ArrayLike] | None = None,
26
+ longitude: npt.ArrayLike | None = None,
27
+ latitude: npt.ArrayLike | None = None,
28
+ altitude: npt.ArrayLike | None = None,
29
+ level: npt.ArrayLike | None = None,
30
+ time: npt.ArrayLike | None = None,
31
31
  attrs: dict[str, Any] | None = None,
32
32
  copy: bool = True,
33
33
  fuel: Fuel | None = None,
@@ -60,12 +60,12 @@ class Fleet(Flight):
60
60
 
61
61
  self.final_waypoints = self.calc_final_waypoints()
62
62
 
63
- def calc_final_waypoints(self) -> np.ndarray:
63
+ def calc_final_waypoints(self) -> npt.NDArray[np.bool_]:
64
64
  """Validate data and calculate the final waypoint of each flight.
65
65
 
66
66
  Returns
67
67
  -------
68
- np.ndarray
68
+ npt.NDArray[np.bool_]
69
69
  A boolean array in which True values correspond to final waypoint of each flight.
70
70
 
71
71
  Raises
@@ -93,6 +93,49 @@ class Fleet(Flight):
93
93
  final_waypoints[final_waypoint_indices] = True
94
94
  return final_waypoints
95
95
 
96
+ def fit_altitude(
97
+ self,
98
+ max_segments: int = 30,
99
+ pop: int = 3,
100
+ r2_target: float = 0.999,
101
+ max_cruise_rocd: float = 10,
102
+ sg_window: int = 7,
103
+ sg_polyorder: int = 1,
104
+ ) -> Fleet:
105
+ """Use piecewise linear fitting to smooth a flight profile.
106
+
107
+ Fit a flight profile to a series of line segments. Segments that have a
108
+ small rocd will be set to have a slope of zero and snapped to the
109
+ nearest thousand foot level. A Savitzky-Golay filter will then be
110
+ applied to the profile to smooth the climbs and descents. This filter
111
+ works best for high frequency flight data, sampled at a 1-3 second
112
+ sampling period.
113
+
114
+ Parameters
115
+ ----------
116
+ max_segments : int, optional
117
+ The maximum number of line segements to fit to the flight profile.
118
+ pop: int, optional
119
+ Population parameter used for the stocastic optimization routine
120
+ used to fit the flight profile.
121
+ r2_target: float, optional
122
+ Target r^2 value for solver. Solver will continue to add line
123
+ segments until the resulting r^2 value is greater than this.
124
+ max_cruise_rocd: float, optional
125
+ The maximum ROCD for a segment that will be forced to a slope of
126
+ zero, [:math:`ft s^{-1}`]
127
+ sg_window: int, optional
128
+ Parameter for :func:`scipy.signal.savgol_filter`
129
+ sg_polyorder: int, optional
130
+ Parameter for :func:`scipy.signal.savgol_filter`
131
+
132
+ Returns
133
+ -------
134
+ Fleet
135
+ Smoothed flight
136
+ """
137
+ raise NotImplementedError("Only implemented for Flight instances")
138
+
96
139
  @classmethod
97
140
  def from_seq(
98
141
  cls,
@@ -257,8 +300,8 @@ class Fleet(Flight):
257
300
 
258
301
  def segment_true_airspeed(
259
302
  self,
260
- u_wind: npt.NDArray[np.float_] | None = None,
261
- v_wind: npt.NDArray[np.float_] | None = None,
303
+ u_wind: npt.NDArray[np.float_] | float = 0.0,
304
+ v_wind: npt.NDArray[np.float_] | float = 0.0,
262
305
  smooth: bool = True,
263
306
  window_length: int = 7,
264
307
  polyorder: int = 1,
@@ -277,13 +320,14 @@ class Fleet(Flight):
277
320
  RuntimeError
278
321
  Unexpected key `__u_wind` or `__v_wind` found in :attr:`data`.
279
322
  """
280
- if u_wind is not None:
323
+ if isinstance(u_wind, np.ndarray):
281
324
  # Choosing a key we don't think exists
282
325
  key = "__u_wind"
283
326
  if key in self:
284
327
  raise RuntimeError(f"Unexpected key {key} found")
285
328
  self[key] = u_wind
286
- if v_wind is not None:
329
+
330
+ if isinstance(v_wind, np.ndarray):
287
331
  # Choosing a key we don't think exists
288
332
  key = "__v_wind"
289
333
  if key in self:
@@ -291,12 +335,12 @@ class Fleet(Flight):
291
335
  self[key] = v_wind
292
336
 
293
337
  # Calculate TAS on each flight individually
294
- def calc_tas(fl: Flight) -> np.ndarray:
295
- u_wind = fl.get("__u_wind", None)
296
- v_wind = fl.get("__v_wind", None)
338
+ def calc_tas(fl: Flight) -> npt.NDArray[np.float_]:
339
+ u = fl.get("__u_wind", u_wind)
340
+ v = fl.get("__v_wind", v_wind)
297
341
 
298
342
  return fl.segment_true_airspeed(
299
- u_wind, v_wind, smooth=smooth, window_length=window_length, polyorder=polyorder
343
+ u, v, smooth=smooth, window_length=window_length, polyorder=polyorder
300
344
  )
301
345
 
302
346
  fls = self.to_flight_list(copy=False)
@@ -314,7 +358,7 @@ class Fleet(Flight):
314
358
  return np.concatenate(tas)
315
359
 
316
360
  @overrides
317
- def segment_groundspeed(self, *args: Any, **kwargs: Any) -> np.ndarray:
361
+ def segment_groundspeed(self, *args: Any, **kwargs: Any) -> npt.NDArray[np.float_]:
318
362
  # Implement if we have a usecase for this.
319
363
  # Because the super() method uses a smoothing pattern, it will not reliably
320
364
  # work on Fleet.
@@ -327,15 +371,23 @@ class Fleet(Flight):
327
371
  raise NotImplementedError
328
372
 
329
373
  @overrides
330
- def segment_length(self) -> np.ndarray:
374
+ def segment_length(self) -> npt.NDArray[np.float_]:
331
375
  return np.where(self.final_waypoints, np.nan, super().segment_length())
332
376
 
377
+ @property
378
+ @overrides
379
+ def max_distance_gap(self) -> float:
380
+ if self.attrs["crs"] != "EPSG:4326":
381
+ raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
382
+
383
+ return np.nanmax(self.segment_length()).item()
384
+
333
385
  @overrides
334
- def segment_azimuth(self) -> np.ndarray:
386
+ def segment_azimuth(self) -> npt.NDArray[np.float_]:
335
387
  return np.where(self.final_waypoints, np.nan, super().segment_azimuth())
336
388
 
337
389
  @overrides
338
- def segment_angle(self) -> tuple[np.ndarray, np.ndarray]:
390
+ def segment_angle(self) -> tuple[npt.NDArray[np.float_], npt.NDArray[np.float_]]:
339
391
  sin_a, cos_a = super().segment_angle()
340
392
  sin_a[self.final_waypoints] = np.nan
341
393
  cos_a[self.final_waypoints] = np.nan