pycontrails 0.54.6__cp313-cp313-macosx_11_0_arm64.whl → 0.54.8__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/__init__.py +1 -1
- pycontrails/_version.py +9 -4
- pycontrails/core/aircraft_performance.py +12 -30
- pycontrails/core/airports.py +4 -1
- pycontrails/core/cache.py +4 -0
- pycontrails/core/flight.py +4 -4
- pycontrails/core/flightplan.py +10 -2
- pycontrails/core/met.py +53 -40
- pycontrails/core/met_var.py +18 -0
- pycontrails/core/models.py +79 -3
- pycontrails/core/rgi_cython.cpython-313-darwin.so +0 -0
- pycontrails/core/vector.py +74 -0
- pycontrails/datalib/spire/__init__.py +5 -0
- pycontrails/datalib/spire/exceptions.py +62 -0
- pycontrails/datalib/spire/spire.py +604 -0
- pycontrails/models/accf.py +4 -4
- pycontrails/models/cocip/cocip.py +52 -6
- pycontrails/models/cocip/cocip_params.py +10 -1
- pycontrails/models/cocip/contrail_properties.py +4 -6
- pycontrails/models/cocip/output_formats.py +12 -4
- pycontrails/models/cocip/radiative_forcing.py +2 -8
- pycontrails/models/cocip/unterstrasser_wake_vortex.py +132 -30
- pycontrails/models/cocipgrid/cocip_grid.py +14 -11
- pycontrails/models/emissions/black_carbon.py +19 -14
- pycontrails/models/emissions/emissions.py +8 -8
- pycontrails/models/humidity_scaling/humidity_scaling.py +49 -4
- pycontrails/models/ps_model/ps_aircraft_params.py +1 -1
- pycontrails/models/ps_model/ps_grid.py +22 -22
- pycontrails/models/ps_model/ps_model.py +4 -7
- pycontrails/models/ps_model/static/{ps-aircraft-params-20240524.csv → ps-aircraft-params-20250328.csv} +58 -57
- pycontrails/models/ps_model/static/{ps-synonym-list-20240524.csv → ps-synonym-list-20250328.csv} +1 -0
- pycontrails/models/tau_cirrus.py +1 -0
- pycontrails/physics/constants.py +2 -1
- pycontrails/physics/jet.py +5 -4
- pycontrails/physics/static/{iata-cargo-load-factors-20241115.csv → iata-cargo-load-factors-20250221.csv} +3 -0
- pycontrails/physics/static/{iata-passenger-load-factors-20241115.csv → iata-passenger-load-factors-20250221.csv} +3 -0
- {pycontrails-0.54.6.dist-info → pycontrails-0.54.8.dist-info}/METADATA +5 -4
- {pycontrails-0.54.6.dist-info → pycontrails-0.54.8.dist-info}/RECORD +42 -40
- {pycontrails-0.54.6.dist-info → pycontrails-0.54.8.dist-info}/WHEEL +2 -1
- {pycontrails-0.54.6.dist-info → pycontrails-0.54.8.dist-info/licenses}/NOTICE +1 -1
- pycontrails/datalib/spire.py +0 -739
- {pycontrails-0.54.6.dist-info → pycontrails-0.54.8.dist-info/licenses}/LICENSE +0 -0
- {pycontrails-0.54.6.dist-info → pycontrails-0.54.8.dist-info}/top_level.txt +0 -0
pycontrails/__init__.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""
|
|
2
2
|
``pycontrails`` public API.
|
|
3
3
|
|
|
4
|
-
Copyright 2021-present Breakthrough Energy
|
|
4
|
+
Copyright 2021-present Contrails.org and the Breakthrough Energy Foundation
|
|
5
5
|
|
|
6
6
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
7
7
|
you may not use this file except in compliance with the License.
|
pycontrails/_version.py
CHANGED
|
@@ -1,8 +1,13 @@
|
|
|
1
|
-
# file generated by
|
|
1
|
+
# file generated by setuptools-scm
|
|
2
2
|
# don't change, don't track in version control
|
|
3
|
+
|
|
4
|
+
__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
|
|
5
|
+
|
|
3
6
|
TYPE_CHECKING = False
|
|
4
7
|
if TYPE_CHECKING:
|
|
5
|
-
from typing import Tuple
|
|
8
|
+
from typing import Tuple
|
|
9
|
+
from typing import Union
|
|
10
|
+
|
|
6
11
|
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
7
12
|
else:
|
|
8
13
|
VERSION_TUPLE = object
|
|
@@ -12,5 +17,5 @@ __version__: str
|
|
|
12
17
|
__version_tuple__: VERSION_TUPLE
|
|
13
18
|
version_tuple: VERSION_TUPLE
|
|
14
19
|
|
|
15
|
-
__version__ = version = '0.54.
|
|
16
|
-
__version_tuple__ = version_tuple = (0, 54,
|
|
20
|
+
__version__ = version = '0.54.8'
|
|
21
|
+
__version_tuple__ = version_tuple = (0, 54, 8)
|
|
@@ -71,6 +71,8 @@ class AircraftPerformanceParams(ModelParams, CommonAircraftPerformanceParams):
|
|
|
71
71
|
#: level with zero wind when computing true airspeed. In other words,
|
|
72
72
|
#: approximate low-altitude true airspeed with the ground speed. Enabling
|
|
73
73
|
#: this does NOT remove any NaN values in the ``met`` data itself.
|
|
74
|
+
#: In the case that ``met`` is not provided, any missing values are
|
|
75
|
+
#: filled with zero wind.
|
|
74
76
|
fill_low_altitude_with_zero_wind: bool = False
|
|
75
77
|
|
|
76
78
|
|
|
@@ -509,7 +511,8 @@ class AircraftPerformance(Model):
|
|
|
509
511
|
tas[cond] = self.source.segment_groundspeed()[cond]
|
|
510
512
|
return tas
|
|
511
513
|
|
|
512
|
-
|
|
514
|
+
# Use current cocip convention: eastward_wind on met, u_wind on source
|
|
515
|
+
wind_available = ("u_wind" in self.source and "v_wind" in self.source) or (
|
|
513
516
|
self.met is not None and "eastward_wind" in self.met and "northward_wind" in self.met
|
|
514
517
|
)
|
|
515
518
|
|
|
@@ -526,12 +529,16 @@ class AircraftPerformance(Model):
|
|
|
526
529
|
)
|
|
527
530
|
raise ValueError(msg)
|
|
528
531
|
|
|
529
|
-
u = interpolate_met(self.met, self.source, "eastward_wind", **self.interp_kwargs)
|
|
530
|
-
v = interpolate_met(self.met, self.source, "northward_wind", **self.interp_kwargs)
|
|
532
|
+
u = interpolate_met(self.met, self.source, "eastward_wind", "u_wind", **self.interp_kwargs)
|
|
533
|
+
v = interpolate_met(self.met, self.source, "northward_wind", "v_wind", **self.interp_kwargs)
|
|
531
534
|
|
|
532
535
|
if fill_with_groundspeed:
|
|
533
|
-
|
|
534
|
-
|
|
536
|
+
if self.met is None:
|
|
537
|
+
cond = np.isnan(u) & np.isnan(v)
|
|
538
|
+
else:
|
|
539
|
+
met_level_max = self.met.data["level"][-1].item() # type: ignore[union-attr]
|
|
540
|
+
cond = self.source.level > met_level_max
|
|
541
|
+
|
|
535
542
|
# We DON'T overwrite the original u and v arrays already attached to the source
|
|
536
543
|
u = np.where(cond, 0.0, u)
|
|
537
544
|
v = np.where(cond, 0.0, v)
|
|
@@ -657,28 +664,3 @@ def _fill_low_altitude_with_isa_temperature(vector: GeoVectorDataset, met_level_
|
|
|
657
664
|
|
|
658
665
|
t_isa = vector.T_isa()
|
|
659
666
|
air_temperature[cond] = t_isa[cond]
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
def _fill_low_altitude_tas_with_true_groundspeed(fl: Flight, met_level_max: float) -> None:
|
|
663
|
-
"""Fill low-altitude NaN values in ``true_airspeed`` with ground speed.
|
|
664
|
-
|
|
665
|
-
The ``true_airspeed`` param is assumed to have been computed by
|
|
666
|
-
interpolating against a gridded wind field that did not necessarily
|
|
667
|
-
extend to the surface. This function fills points below the lowest
|
|
668
|
-
altitude in the gridded data with ground speed values.
|
|
669
|
-
|
|
670
|
-
This function operates in-place and modifies the ``true_airspeed`` field.
|
|
671
|
-
|
|
672
|
-
Parameters
|
|
673
|
-
----------
|
|
674
|
-
fl : Flight
|
|
675
|
-
Flight instance associated with the ``true_airspeed`` data.
|
|
676
|
-
met_level_max : float
|
|
677
|
-
The maximum level in the met data, [:math:`hPa`].
|
|
678
|
-
"""
|
|
679
|
-
tas = fl["true_airspeed"]
|
|
680
|
-
is_nan = np.isnan(tas)
|
|
681
|
-
low_alt = fl.level > met_level_max
|
|
682
|
-
cond = is_nan & low_alt
|
|
683
|
-
|
|
684
|
-
tas[cond] = fl.segment_groundspeed()[cond]
|
pycontrails/core/airports.py
CHANGED
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import functools
|
|
6
|
+
|
|
5
7
|
import numpy as np
|
|
6
8
|
import pandas as pd
|
|
7
9
|
|
|
@@ -35,6 +37,7 @@ def _download_ourairports_csv() -> pd.DataFrame:
|
|
|
35
37
|
)
|
|
36
38
|
|
|
37
39
|
|
|
40
|
+
@functools.cache
|
|
38
41
|
def global_airport_database(
|
|
39
42
|
cachestore: cache.CacheStore | None = None, update_cache: bool = False
|
|
40
43
|
) -> pd.DataFrame:
|
|
@@ -91,7 +94,7 @@ def global_airport_database(
|
|
|
91
94
|
airports = airports.rename(
|
|
92
95
|
columns={"latitude_deg": "latitude", "longitude_deg": "longitude", "gps_code": "icao_code"},
|
|
93
96
|
)
|
|
94
|
-
airports.fillna({"elevation_ft": 0}, inplace=True)
|
|
97
|
+
airports.fillna({"elevation_ft": 0.0}, inplace=True)
|
|
95
98
|
|
|
96
99
|
# Keep specific airport types used by commercial aviation
|
|
97
100
|
subset = ("large_airport", "medium_airport", "small_airport", "heliport")
|
pycontrails/core/cache.py
CHANGED
|
@@ -266,6 +266,8 @@ class DiskCacheStore(CacheStore):
|
|
|
266
266
|
>>> # put a file directly
|
|
267
267
|
>>> disk_cache.put("README.md", "test/file.md")
|
|
268
268
|
'test/file.md'
|
|
269
|
+
|
|
270
|
+
>>> disk_cache.clear() # cleanup
|
|
269
271
|
"""
|
|
270
272
|
|
|
271
273
|
if not pathlib.Path(data_path).is_file():
|
|
@@ -312,6 +314,8 @@ class DiskCacheStore(CacheStore):
|
|
|
312
314
|
>>> # returns a path
|
|
313
315
|
>>> disk_cache.get("test/file.md")
|
|
314
316
|
'cache/test/file.md'
|
|
317
|
+
|
|
318
|
+
>>> disk_cache.clear() # cleanup
|
|
315
319
|
"""
|
|
316
320
|
return self.path(cache_path)
|
|
317
321
|
|
pycontrails/core/flight.py
CHANGED
|
@@ -1388,7 +1388,7 @@ class Flight(GeoVectorDataset):
|
|
|
1388
1388
|
|
|
1389
1389
|
jump_indices = _antimeridian_index(pd.Series(self["longitude"]))
|
|
1390
1390
|
|
|
1391
|
-
def _group_to_feature(group: pd.DataFrame) -> dict[str, str | dict[str, Any]]:
|
|
1391
|
+
def _group_to_feature(name: str, group: pd.DataFrame) -> dict[str, str | dict[str, Any]]:
|
|
1392
1392
|
# assigns a different value to each group of consecutive indices
|
|
1393
1393
|
subgrouping = group.index.to_series().diff().ne(1).cumsum()
|
|
1394
1394
|
|
|
@@ -1405,7 +1405,7 @@ class Flight(GeoVectorDataset):
|
|
|
1405
1405
|
geometry = {"type": "MultiLineString", "coordinates": multi_ls}
|
|
1406
1406
|
|
|
1407
1407
|
# adding in static properties
|
|
1408
|
-
properties: dict[str, Any] = {key:
|
|
1408
|
+
properties: dict[str, Any] = {key: name} if key is not None else {}
|
|
1409
1409
|
properties.update(self.constants)
|
|
1410
1410
|
return {"type": "Feature", "geometry": geometry, "properties": properties}
|
|
1411
1411
|
|
|
@@ -1415,11 +1415,11 @@ class Flight(GeoVectorDataset):
|
|
|
1415
1415
|
# create a single group containing all rows of dataframe
|
|
1416
1416
|
groups = self.dataframe.groupby(lambda _: 0)
|
|
1417
1417
|
|
|
1418
|
-
features =
|
|
1418
|
+
features = [_group_to_feature(*name_group) for name_group in groups]
|
|
1419
1419
|
return {"type": "FeatureCollection", "features": features}
|
|
1420
1420
|
|
|
1421
1421
|
def to_traffic(self) -> traffic.core.Flight:
|
|
1422
|
-
"""Convert to :class:`traffic.core.Flight`instance.
|
|
1422
|
+
"""Convert to :class:`traffic.core.Flight` instance.
|
|
1423
1423
|
|
|
1424
1424
|
Returns
|
|
1425
1425
|
-------
|
pycontrails/core/flightplan.py
CHANGED
|
@@ -38,10 +38,13 @@ def to_atc_plan(plan: dict[str, Any]) -> str:
|
|
|
38
38
|
if "second_alt_icao" in plan:
|
|
39
39
|
ret += f" {plan['second_alt_icao']}"
|
|
40
40
|
ret += "\n"
|
|
41
|
-
ret += f"-{plan['other_info']}
|
|
41
|
+
ret += f"-{plan['other_info']}"
|
|
42
42
|
if "supplementary_info" in plan:
|
|
43
|
+
ret += "\n-"
|
|
43
44
|
ret += " ".join([f"{i[0]}/{i[1]}" for i in plan["supplementary_info"].items()])
|
|
44
45
|
|
|
46
|
+
ret += ")"
|
|
47
|
+
|
|
45
48
|
if ret[-1] == "\n":
|
|
46
49
|
ret = ret[:-1]
|
|
47
50
|
|
|
@@ -194,7 +197,12 @@ def parse_atc_plan(atc_plan: str) -> dict[str, str]:
|
|
|
194
197
|
|
|
195
198
|
# Other info
|
|
196
199
|
if len(basic) > 8:
|
|
197
|
-
|
|
200
|
+
info = basic[8]
|
|
201
|
+
idx = info.find("DOF")
|
|
202
|
+
if idx != -1:
|
|
203
|
+
flightplan["departure_date"] = info[idx + 4 : idx + 10]
|
|
204
|
+
|
|
205
|
+
flightplan["other_info"] = info.strip()
|
|
198
206
|
|
|
199
207
|
# Supl. Info
|
|
200
208
|
if len(basic) > 9:
|
pycontrails/core/met.py
CHANGED
|
@@ -150,11 +150,8 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
150
150
|
"""
|
|
151
151
|
longitude = self.indexes["longitude"].to_numpy()
|
|
152
152
|
if longitude.dtype != COORD_DTYPE:
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
"Instantiate with 'copy=True' to convert to float64. "
|
|
156
|
-
"Instantiate with 'validate=False' to skip validation."
|
|
157
|
-
)
|
|
153
|
+
msg = f"Longitude values must have dtype {COORD_DTYPE}. Instantiate with 'copy=True'."
|
|
154
|
+
raise ValueError(msg)
|
|
158
155
|
|
|
159
156
|
if self.is_wrapped:
|
|
160
157
|
# Relax verification if the longitude has already been processed and wrapped
|
|
@@ -194,11 +191,8 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
194
191
|
"""
|
|
195
192
|
latitude = self.indexes["latitude"].to_numpy()
|
|
196
193
|
if latitude.dtype != COORD_DTYPE:
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
"Instantiate with 'copy=True' to convert to float64. "
|
|
200
|
-
"Instantiate with 'validate=False' to skip validation."
|
|
201
|
-
)
|
|
194
|
+
msg = f"Latitude values must have dtype {COORD_DTYPE}. Instantiate with 'copy=True'."
|
|
195
|
+
raise ValueError(msg)
|
|
202
196
|
|
|
203
197
|
if latitude[0] < -90.0:
|
|
204
198
|
raise ValueError(
|
|
@@ -233,8 +227,8 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
233
227
|
if da.dims != self.dim_order:
|
|
234
228
|
if key is not None:
|
|
235
229
|
msg = (
|
|
236
|
-
f"Data dimension not transposed on variable '{key}'.
|
|
237
|
-
" 'copy=True'."
|
|
230
|
+
f"Data dimension not transposed on variable '{key}'. "
|
|
231
|
+
"Instantiate with 'copy=True'."
|
|
238
232
|
)
|
|
239
233
|
else:
|
|
240
234
|
msg = "Data dimension not transposed. Instantiate with 'copy=True'."
|
|
@@ -258,11 +252,8 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
258
252
|
self._validate_latitude()
|
|
259
253
|
self._validate_transpose()
|
|
260
254
|
if self.data["level"].dtype != COORD_DTYPE:
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
"Instantiate with 'copy=True' to convert to float64. "
|
|
264
|
-
"Instantiate with 'validate=False' to skip validation."
|
|
265
|
-
)
|
|
255
|
+
msg = f"Level values must have dtype {COORD_DTYPE}. Instantiate with 'copy=True'."
|
|
256
|
+
raise ValueError(msg)
|
|
266
257
|
|
|
267
258
|
def _preprocess_dims(self, wrap_longitude: bool) -> None:
|
|
268
259
|
"""Confirm DataArray or Dataset include required dimension in a consistent format.
|
|
@@ -435,7 +426,7 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
435
426
|
Assumes the longitude dimension is sorted (this is established by the
|
|
436
427
|
:class:`MetDataset` or :class:`MetDataArray` constructor).
|
|
437
428
|
|
|
438
|
-
.. versionchanged 0.26.0
|
|
429
|
+
.. versionchanged:: 0.26.0
|
|
439
430
|
|
|
440
431
|
The previous implementation checked for the minimum and maximum longitude
|
|
441
432
|
dimension values to be duplicated. The current implementation only checks for
|
|
@@ -492,7 +483,7 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
492
483
|
|
|
493
484
|
Does not yet save in parallel.
|
|
494
485
|
|
|
495
|
-
.. versionchanged::0.34.1
|
|
486
|
+
.. versionchanged:: 0.34.1
|
|
496
487
|
|
|
497
488
|
If :attr:`cachestore` is None, this method assigns it
|
|
498
489
|
to new :class:`DiskCacheStore`.
|
|
@@ -1178,19 +1169,45 @@ class MetDataset(MetBase):
|
|
|
1178
1169
|
}
|
|
1179
1170
|
return self._get_pycontrails_attr_template("product", supported, examples)
|
|
1180
1171
|
|
|
1181
|
-
|
|
1182
|
-
|
|
1172
|
+
@overload
|
|
1173
|
+
def standardize_variables(
|
|
1174
|
+
self, variables: Iterable[MetVariable], inplace: Literal[False] = ...
|
|
1175
|
+
) -> Self: ...
|
|
1176
|
+
|
|
1177
|
+
@overload
|
|
1178
|
+
def standardize_variables(
|
|
1179
|
+
self, variables: Iterable[MetVariable], inplace: Literal[True]
|
|
1180
|
+
) -> None: ...
|
|
1181
|
+
|
|
1182
|
+
def standardize_variables(
|
|
1183
|
+
self, variables: Iterable[MetVariable], inplace: bool = False
|
|
1184
|
+
) -> Self | None:
|
|
1185
|
+
"""Standardize variable names.
|
|
1186
|
+
|
|
1187
|
+
.. versionchanged:: 0.54.7
|
|
1188
|
+
|
|
1189
|
+
By default, this method returns a new :class:`MetDataset` instead
|
|
1190
|
+
of renaming in place. To retain the old behavior, set ``inplace=True``.
|
|
1183
1191
|
|
|
1184
1192
|
Parameters
|
|
1185
1193
|
----------
|
|
1186
1194
|
variables : Iterable[MetVariable]
|
|
1187
1195
|
Data source variables
|
|
1196
|
+
inplace : bool, optional
|
|
1197
|
+
If True, rename variables in place. Otherwise, return a new
|
|
1198
|
+
:class:`MetDataset` with renamed variables.
|
|
1188
1199
|
|
|
1189
1200
|
See Also
|
|
1190
1201
|
--------
|
|
1191
1202
|
:func:`standardize_variables`
|
|
1192
1203
|
"""
|
|
1193
|
-
standardize_variables(self, variables)
|
|
1204
|
+
data_renamed = standardize_variables(self.data, variables)
|
|
1205
|
+
|
|
1206
|
+
if inplace:
|
|
1207
|
+
self.data = data_renamed
|
|
1208
|
+
return None
|
|
1209
|
+
|
|
1210
|
+
return type(self)._from_fastpath(data_renamed, cachestore=self.cachestore)
|
|
1194
1211
|
|
|
1195
1212
|
@classmethod
|
|
1196
1213
|
def from_coords(
|
|
@@ -2626,23 +2643,28 @@ def downselect(data: XArrayType, bbox: tuple[float, ...]) -> XArrayType:
|
|
|
2626
2643
|
"or length 6 [west, south, min-level, east, north, max-level]"
|
|
2627
2644
|
)
|
|
2628
2645
|
|
|
2646
|
+
if west <= east:
|
|
2647
|
+
# Return a view of the data
|
|
2648
|
+
# If data is lazy, this will not load the data
|
|
2649
|
+
return data.sel(
|
|
2650
|
+
longitude=slice(west, east),
|
|
2651
|
+
latitude=slice(south, north),
|
|
2652
|
+
level=slice(level_min, level_max),
|
|
2653
|
+
)
|
|
2654
|
+
|
|
2655
|
+
# In this case, the bbox spans the antimeridian
|
|
2656
|
+
# If data is lazy, this will load the data (data.where is not lazy AFAIK)
|
|
2629
2657
|
cond = (
|
|
2630
2658
|
(data["latitude"] >= south)
|
|
2631
2659
|
& (data["latitude"] <= north)
|
|
2632
2660
|
& (data["level"] >= level_min)
|
|
2633
2661
|
& (data["level"] <= level_max)
|
|
2662
|
+
& ((data["longitude"] >= west) | (data["longitude"] <= east))
|
|
2634
2663
|
)
|
|
2635
|
-
|
|
2636
|
-
# wrapping longitude
|
|
2637
|
-
if west <= east:
|
|
2638
|
-
cond = cond & (data["longitude"] >= west) & (data["longitude"] <= east)
|
|
2639
|
-
else:
|
|
2640
|
-
cond = cond & ((data["longitude"] >= west) | (data["longitude"] <= east))
|
|
2641
|
-
|
|
2642
2664
|
return data.where(cond, drop=True)
|
|
2643
2665
|
|
|
2644
2666
|
|
|
2645
|
-
def standardize_variables(ds:
|
|
2667
|
+
def standardize_variables(ds: xr.Dataset, variables: Iterable[MetVariable]) -> xr.Dataset:
|
|
2646
2668
|
"""Rename all variables in dataset from short name to standard name.
|
|
2647
2669
|
|
|
2648
2670
|
This function does not change any variables in ``ds`` that are not found in ``variables``.
|
|
@@ -2652,8 +2674,7 @@ def standardize_variables(ds: DatasetType, variables: Iterable[MetVariable]) ->
|
|
|
2652
2674
|
Parameters
|
|
2653
2675
|
----------
|
|
2654
2676
|
ds : DatasetType
|
|
2655
|
-
An :class:`xr.Dataset
|
|
2656
|
-
passed, the underlying :class:`xr.Dataset` is modified in place.
|
|
2677
|
+
An :class:`xr.Dataset`.
|
|
2657
2678
|
variables : Iterable[MetVariable]
|
|
2658
2679
|
Data source variables
|
|
2659
2680
|
|
|
@@ -2662,14 +2683,6 @@ def standardize_variables(ds: DatasetType, variables: Iterable[MetVariable]) ->
|
|
|
2662
2683
|
DatasetType
|
|
2663
2684
|
Dataset with variables renamed to standard names
|
|
2664
2685
|
"""
|
|
2665
|
-
if isinstance(ds, xr.Dataset):
|
|
2666
|
-
return _standardize_variables(ds, variables)
|
|
2667
|
-
|
|
2668
|
-
ds.data = _standardize_variables(ds.data, variables)
|
|
2669
|
-
return ds
|
|
2670
|
-
|
|
2671
|
-
|
|
2672
|
-
def _standardize_variables(ds: xr.Dataset, variables: Iterable[MetVariable]) -> xr.Dataset:
|
|
2673
2686
|
variables_dict: dict[Hashable, str] = {v.short_name: v.standard_name for v in variables}
|
|
2674
2687
|
name_dict = {var: variables_dict[var] for var in ds.data_vars if var in variables_dict}
|
|
2675
2688
|
return ds.rename(name_dict)
|
pycontrails/core/met_var.py
CHANGED
|
@@ -367,3 +367,21 @@ TOAOutgoingLongwaveFlux = MetVariable(
|
|
|
367
367
|
'"flux" implies per unit area, called "flux density" in physics.'
|
|
368
368
|
),
|
|
369
369
|
)
|
|
370
|
+
|
|
371
|
+
PRESSURE_LEVEL_VARIABLES = [
|
|
372
|
+
AirTemperature,
|
|
373
|
+
SpecificHumidity,
|
|
374
|
+
RelativeHumidity,
|
|
375
|
+
Geopotential,
|
|
376
|
+
GeopotentialHeight,
|
|
377
|
+
EastwardWind,
|
|
378
|
+
NorthwardWind,
|
|
379
|
+
VerticalVelocity,
|
|
380
|
+
MassFractionOfCloudLiquidWaterInAir,
|
|
381
|
+
MassFractionOfCloudIceInAir,
|
|
382
|
+
CloudAreaFractionInAtmosphereLayer,
|
|
383
|
+
]
|
|
384
|
+
|
|
385
|
+
SINGLE_LEVEL_VARIABLES = [SurfacePressure, TOANetDownwardShortwaveFlux, TOAOutgoingLongwaveFlux]
|
|
386
|
+
|
|
387
|
+
MET_VARIABLES = PRESSURE_LEVEL_VARIABLES + SINGLE_LEVEL_VARIABLES
|
pycontrails/core/models.py
CHANGED
|
@@ -22,8 +22,10 @@ import xarray as xr
|
|
|
22
22
|
from pycontrails.core.fleet import Fleet
|
|
23
23
|
from pycontrails.core.flight import Flight
|
|
24
24
|
from pycontrails.core.met import MetDataArray, MetDataset, MetVariable, originates_from_ecmwf
|
|
25
|
-
from pycontrails.core.met_var import SpecificHumidity
|
|
25
|
+
from pycontrails.core.met_var import MET_VARIABLES, SpecificHumidity
|
|
26
26
|
from pycontrails.core.vector import GeoVectorDataset
|
|
27
|
+
from pycontrails.datalib.ecmwf import ECMWF_VARIABLES
|
|
28
|
+
from pycontrails.datalib.gfs import GFS_VARIABLES
|
|
27
29
|
from pycontrails.utils.json import NumpyEncoder
|
|
28
30
|
from pycontrails.utils.types import type_guard
|
|
29
31
|
|
|
@@ -179,8 +181,10 @@ class Model(ABC):
|
|
|
179
181
|
|
|
180
182
|
#: Required meteorology pressure level variables.
|
|
181
183
|
#: Each element in the list is a :class:`MetVariable` or a ``tuple[MetVariable]``.
|
|
182
|
-
#: If element is a ``tuple[MetVariable]``, the variable depends on the data source
|
|
183
|
-
#:
|
|
184
|
+
#: If element is a ``tuple[MetVariable]``, the variable depends on the data source
|
|
185
|
+
#: and the tuple must include entries for a model-agnostic variable,
|
|
186
|
+
#: an ECMWF-specific variable, and a GFS-specific variable.
|
|
187
|
+
#: Only one of the three variable in the tuple is required for model evaluation.
|
|
184
188
|
met_variables: tuple[MetVariable | tuple[MetVariable, ...], ...]
|
|
185
189
|
|
|
186
190
|
#: Set of required parameters if processing already complete on ``met`` input.
|
|
@@ -276,6 +280,42 @@ class Model(ABC):
|
|
|
276
280
|
|
|
277
281
|
return hashlib.sha1(bytes(_hash, "utf-8")).hexdigest()
|
|
278
282
|
|
|
283
|
+
@classmethod
|
|
284
|
+
def generic_met_variables(cls) -> tuple[MetVariable, ...]:
|
|
285
|
+
"""Return a model-agnostic list of required meteorology variables.
|
|
286
|
+
|
|
287
|
+
Returns
|
|
288
|
+
-------
|
|
289
|
+
tuple[MetVariable]
|
|
290
|
+
List of model-agnostic variants of required variables
|
|
291
|
+
"""
|
|
292
|
+
available = set(MET_VARIABLES)
|
|
293
|
+
return tuple(_find_match(required, available) for required in cls.met_variables)
|
|
294
|
+
|
|
295
|
+
@classmethod
|
|
296
|
+
def ecmwf_met_variables(cls) -> tuple[MetVariable, ...]:
|
|
297
|
+
"""Return an ECMWF-specific list of required meteorology variables.
|
|
298
|
+
|
|
299
|
+
Returns
|
|
300
|
+
-------
|
|
301
|
+
tuple[MetVariable]
|
|
302
|
+
List of ECMWF-specific variants of required variables
|
|
303
|
+
"""
|
|
304
|
+
available = set(ECMWF_VARIABLES)
|
|
305
|
+
return tuple(_find_match(required, available) for required in cls.met_variables)
|
|
306
|
+
|
|
307
|
+
@classmethod
|
|
308
|
+
def gfs_met_variables(cls) -> tuple[MetVariable, ...]:
|
|
309
|
+
"""Return a GFS-specific list of required meteorology variables.
|
|
310
|
+
|
|
311
|
+
Returns
|
|
312
|
+
-------
|
|
313
|
+
tuple[MetVariable]
|
|
314
|
+
List of GFS-specific variants of required variables
|
|
315
|
+
"""
|
|
316
|
+
available = set(GFS_VARIABLES)
|
|
317
|
+
return tuple(_find_match(required, available) for required in cls.met_variables)
|
|
318
|
+
|
|
279
319
|
def _verify_met(self) -> None:
|
|
280
320
|
"""Verify integrity of :attr:`met`.
|
|
281
321
|
|
|
@@ -805,6 +845,42 @@ def _interp_grid_to_grid(
|
|
|
805
845
|
raise NotImplementedError(msg)
|
|
806
846
|
|
|
807
847
|
|
|
848
|
+
def _find_match(
|
|
849
|
+
required: MetVariable | Sequence[MetVariable], available: set[MetVariable]
|
|
850
|
+
) -> MetVariable:
|
|
851
|
+
"""Find match for required met variable in list of data-source-specific met variables.
|
|
852
|
+
|
|
853
|
+
Parameters
|
|
854
|
+
----------
|
|
855
|
+
required : MetVariable | Sequence[MetVariable]
|
|
856
|
+
Required met variable
|
|
857
|
+
|
|
858
|
+
available : Sequence[MetVariable]
|
|
859
|
+
Collection of data-source-specific met variables
|
|
860
|
+
|
|
861
|
+
Returns
|
|
862
|
+
-------
|
|
863
|
+
MetVariable
|
|
864
|
+
Match for required met variable in collection of data-source-specific met variables
|
|
865
|
+
|
|
866
|
+
Raises
|
|
867
|
+
------
|
|
868
|
+
KeyError
|
|
869
|
+
Raised if not match is found
|
|
870
|
+
"""
|
|
871
|
+
if isinstance(required, MetVariable):
|
|
872
|
+
return required
|
|
873
|
+
|
|
874
|
+
for var in required:
|
|
875
|
+
if var in available:
|
|
876
|
+
return var
|
|
877
|
+
|
|
878
|
+
required_keys = [v.standard_name for v in required]
|
|
879
|
+
available_keys = [v.standard_name for v in available]
|
|
880
|
+
msg = f"None of {required_keys} match variable in {available_keys}"
|
|
881
|
+
raise KeyError(msg)
|
|
882
|
+
|
|
883
|
+
|
|
808
884
|
def _raise_missing_met_var(var: MetVariable | Sequence[MetVariable]) -> NoReturn:
|
|
809
885
|
"""Raise KeyError on missing met variable.
|
|
810
886
|
|
|
Binary file
|
pycontrails/core/vector.py
CHANGED
|
@@ -507,6 +507,9 @@ class VectorDataset:
|
|
|
507
507
|
>>> vector.get_data_or_attr("c", default=5)
|
|
508
508
|
5
|
|
509
509
|
|
|
510
|
+
See Also
|
|
511
|
+
--------
|
|
512
|
+
get_constant
|
|
510
513
|
"""
|
|
511
514
|
marker = self.__marker
|
|
512
515
|
|
|
@@ -1013,6 +1016,77 @@ class VectorDataset:
|
|
|
1013
1016
|
)
|
|
1014
1017
|
self.broadcast_attrs(numeric_attrs, overwrite)
|
|
1015
1018
|
|
|
1019
|
+
def get_constant(self, key: str, default: Any = __marker) -> Any:
|
|
1020
|
+
"""Get a constant value from :attr:`attrs` or :attr:`data`.
|
|
1021
|
+
|
|
1022
|
+
- If ``key`` is found in :attr:`attrs`, the value is returned.
|
|
1023
|
+
- If ``key`` is found in :attr:`data`, the common value is returned if all
|
|
1024
|
+
values are equal.
|
|
1025
|
+
- If ``key`` is not found in :attr:`attrs` or :attr:`data` and a ``default`` is provided,
|
|
1026
|
+
the ``default`` is returned.
|
|
1027
|
+
- Otherwise, a KeyError is raised.
|
|
1028
|
+
|
|
1029
|
+
Parameters
|
|
1030
|
+
----------
|
|
1031
|
+
key : str
|
|
1032
|
+
Key to look for.
|
|
1033
|
+
default : Any, optional
|
|
1034
|
+
Default value to return if ``key`` is not found in :attr:`attrs` or :attr:`data`.
|
|
1035
|
+
|
|
1036
|
+
Returns
|
|
1037
|
+
-------
|
|
1038
|
+
Any
|
|
1039
|
+
The constant value for ``key``.
|
|
1040
|
+
|
|
1041
|
+
Raises
|
|
1042
|
+
------
|
|
1043
|
+
KeyError
|
|
1044
|
+
If ``key`` is not found in :attr:`attrs` or the values in :attr:`data` are not equal
|
|
1045
|
+
and ``default`` is not provided.
|
|
1046
|
+
|
|
1047
|
+
Examples
|
|
1048
|
+
--------
|
|
1049
|
+
>>> vector = VectorDataset({"a": [1, 1, 1], "b": [2, 2, 3]})
|
|
1050
|
+
>>> vector.get_constant("a")
|
|
1051
|
+
np.int64(1)
|
|
1052
|
+
>>> vector.get_constant("b")
|
|
1053
|
+
Traceback (most recent call last):
|
|
1054
|
+
...
|
|
1055
|
+
KeyError: "A constant key 'b' not found in attrs or data"
|
|
1056
|
+
>>> vector.get_constant("b", 3)
|
|
1057
|
+
3
|
|
1058
|
+
|
|
1059
|
+
See Also
|
|
1060
|
+
--------
|
|
1061
|
+
get_data_or_attr
|
|
1062
|
+
GeoVectorDataset.constants
|
|
1063
|
+
"""
|
|
1064
|
+
marker = self.__marker
|
|
1065
|
+
|
|
1066
|
+
out = self.attrs.get(key, marker)
|
|
1067
|
+
if out is not marker:
|
|
1068
|
+
return out
|
|
1069
|
+
|
|
1070
|
+
arr: np.ndarray = self.data.get(key, marker) # type: ignore[arg-type]
|
|
1071
|
+
if arr is not marker:
|
|
1072
|
+
try:
|
|
1073
|
+
vals = np.unique(arr)
|
|
1074
|
+
except TypeError:
|
|
1075
|
+
# A TypeError can occur if the arr has object dtype and contains None
|
|
1076
|
+
# Handle this case by returning None
|
|
1077
|
+
if arr.dtype == object and np.all(arr == None): # noqa: E711
|
|
1078
|
+
return None
|
|
1079
|
+
raise
|
|
1080
|
+
|
|
1081
|
+
if len(vals) == 1:
|
|
1082
|
+
return vals[0]
|
|
1083
|
+
|
|
1084
|
+
if default is not marker:
|
|
1085
|
+
return default
|
|
1086
|
+
|
|
1087
|
+
msg = f"A constant key '{key}' not found in attrs or data"
|
|
1088
|
+
raise KeyError(msg)
|
|
1089
|
+
|
|
1016
1090
|
# ------------
|
|
1017
1091
|
# I / O
|
|
1018
1092
|
# ------------
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""Custom exceptions used for spire data validation."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class BaseSpireError(Exception):
|
|
5
|
+
"""Base class for all spire exceptions."""
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class BadTrajectoryException(BaseSpireError):
|
|
9
|
+
"""A generic exception indicating a trajectory (flight instance) is invalid."""
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SchemaError(BaseSpireError):
|
|
13
|
+
"""Data object is inconsistent with required schema."""
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class OrderingError(BaseSpireError):
|
|
17
|
+
"""Data object has incorrect ordering."""
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class OriginAirportError(BaseSpireError):
|
|
21
|
+
"""
|
|
22
|
+
Trajectory is not originating at expected location.
|
|
23
|
+
|
|
24
|
+
We do not assume that the departure airports are invariant in the dataframe,
|
|
25
|
+
thus we handle the case of multiple airports listed.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class DestinationAirportError(BaseSpireError):
|
|
30
|
+
"""Trajectory is not terminating at expected location."""
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class FlightTooShortError(BaseSpireError):
|
|
34
|
+
"""Trajectory is unreasonably short in flight time."""
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class FlightTooLongError(BaseSpireError):
|
|
38
|
+
"""Trajectory is unreasonably long in flight time."""
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class FlightTooSlowError(BaseSpireError):
|
|
42
|
+
"""Trajectory has period(s) of unrealistically slow speed."""
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class FlightTooFastError(BaseSpireError):
|
|
46
|
+
"""Trajectory has period(s) of unrealistically high speed."""
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class ROCDError(BaseSpireError):
|
|
50
|
+
"""Trajectory has an unrealistic rate of climb or descent."""
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class FlightAltitudeProfileError(BaseSpireError):
|
|
54
|
+
"""Trajectory has an unrealistic rate of climb or descent."""
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class FlightDuplicateTimestamps(BaseSpireError):
|
|
58
|
+
"""Trajectory contains waypoints with the same timestamp."""
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class FlightInvariantFieldViolation(BaseSpireError):
|
|
62
|
+
"""Trajectory has multiple values for field(s) that should be invariant."""
|