pycontrails 0.41.0__cp310-cp310-macosx_11_0_arm64.whl → 0.42.2__cp310-cp310-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (40) hide show
  1. pycontrails/_version.py +2 -2
  2. pycontrails/core/airports.py +228 -0
  3. pycontrails/core/cache.py +4 -6
  4. pycontrails/core/datalib.py +13 -6
  5. pycontrails/core/fleet.py +72 -20
  6. pycontrails/core/flight.py +485 -134
  7. pycontrails/core/flightplan.py +238 -0
  8. pycontrails/core/interpolation.py +11 -15
  9. pycontrails/core/met.py +5 -5
  10. pycontrails/core/models.py +4 -0
  11. pycontrails/core/rgi_cython.cpython-310-darwin.so +0 -0
  12. pycontrails/core/vector.py +80 -63
  13. pycontrails/datalib/__init__.py +1 -1
  14. pycontrails/datalib/ecmwf/common.py +14 -19
  15. pycontrails/datalib/spire/__init__.py +19 -0
  16. pycontrails/datalib/spire/spire.py +739 -0
  17. pycontrails/ext/bada/__init__.py +6 -6
  18. pycontrails/ext/cirium/__init__.py +2 -2
  19. pycontrails/models/cocip/cocip.py +37 -39
  20. pycontrails/models/cocip/cocip_params.py +37 -30
  21. pycontrails/models/cocip/cocip_uncertainty.py +47 -58
  22. pycontrails/models/cocip/radiative_forcing.py +220 -193
  23. pycontrails/models/cocip/wake_vortex.py +96 -91
  24. pycontrails/models/cocip/wind_shear.py +2 -2
  25. pycontrails/models/emissions/emissions.py +1 -1
  26. pycontrails/models/humidity_scaling.py +266 -9
  27. pycontrails/models/issr.py +2 -2
  28. pycontrails/models/pcr.py +1 -1
  29. pycontrails/models/quantiles/era5_ensemble_quantiles.npy +0 -0
  30. pycontrails/models/quantiles/iagos_quantiles.npy +0 -0
  31. pycontrails/models/sac.py +7 -5
  32. pycontrails/physics/geo.py +5 -3
  33. pycontrails/physics/jet.py +66 -113
  34. pycontrails/utils/json.py +3 -3
  35. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/METADATA +4 -7
  36. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/RECORD +40 -34
  37. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/LICENSE +0 -0
  38. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/NOTICE +0 -0
  39. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/WHEEL +0 -0
  40. {pycontrails-0.41.0.dist-info → pycontrails-0.42.2.dist-info}/top_level.txt +0 -0
@@ -97,7 +97,7 @@ class VectorDataDict(Dict[str, np.ndarray]):
97
97
  for arr in self.values():
98
98
  self._validate_array(arr)
99
99
 
100
- def __setitem__(self, k: str, v: np.ndarray) -> None:
100
+ def __setitem__(self, k: str, v: npt.ArrayLike) -> None:
101
101
  """Set new key-value pair to instance and warn when overwriting existing key.
102
102
 
103
103
  This method casts ``v`` to a ``np.ndarray`` and ensures that the array size is
@@ -131,7 +131,7 @@ class VectorDataDict(Dict[str, np.ndarray]):
131
131
  if not len(self):
132
132
  del self._size
133
133
 
134
- def setdefault(self, k: str, default: np.ndarray | None = None) -> np.ndarray:
134
+ def setdefault(self, k: str, default: npt.ArrayLike | None = None) -> np.ndarray:
135
135
  """Thin wrapper around ``dict.setdefault``.
136
136
 
137
137
  The main purpose of overriding is to run :meth:`_validate_array()` on set.
@@ -140,7 +140,7 @@ class VectorDataDict(Dict[str, np.ndarray]):
140
140
  ----------
141
141
  k : str
142
142
  Key
143
- default : np.ndarray, optional
143
+ default : npt.ArrayLike, optional
144
144
  Default value for key ``k``
145
145
 
146
146
  Returns
@@ -156,10 +156,10 @@ class VectorDataDict(Dict[str, np.ndarray]):
156
156
  default = np.array([])
157
157
 
158
158
  self[k] = default
159
- return default
159
+ return self[k]
160
160
 
161
161
  def update( # type: ignore[override]
162
- self, other: dict[str, np.ndarray] | None = None, **kwargs: np.ndarray
162
+ self, other: dict[str, npt.ArrayLike] | None = None, **kwargs: npt.ArrayLike
163
163
  ) -> None:
164
164
  """Update values without warning if overwriting.
165
165
 
@@ -168,24 +168,24 @@ class VectorDataDict(Dict[str, np.ndarray]):
168
168
 
169
169
  Parameters
170
170
  ----------
171
- other : dict[str, np.ndarray] | None, optional
171
+ other : dict[str, npt.ArrayLike] | None, optional
172
172
  Fields to update as dict
173
- **kwargs : np.ndarray
173
+ **kwargs : npt.ArrayLike
174
174
  Fields to update as kwargs
175
175
  """
176
176
  other = other or {}
177
- other = {k: np.asarray(v) for k, v in other.items()}
178
- for arr in other.values():
177
+ other_arrs = {k: np.asarray(v) for k, v in other.items()}
178
+ for arr in other_arrs.values():
179
179
  self._validate_array(arr)
180
180
 
181
- super().update(other)
181
+ super().update(other_arrs)
182
182
 
183
183
  # validate any kwarg arrays
184
- kwargs = {k: np.asarray(v) for k, v in kwargs.items()}
185
- for arr in kwargs.values():
184
+ kwargs_arr = {k: np.asarray(v) for k, v in kwargs.items()}
185
+ for arr in kwargs_arr.values():
186
186
  self._validate_array(arr)
187
187
 
188
- super().update(kwargs)
188
+ super().update(kwargs_arr)
189
189
 
190
190
  def _validate_array(self, arr: np.ndarray) -> None:
191
191
  """Ensure that `arr` is compatible with instance.
@@ -240,7 +240,7 @@ class VectorDataset:
240
240
 
241
241
  Parameters
242
242
  ----------
243
- data : dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
243
+ data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
244
244
  Initial data, by default None
245
245
  attrs : dict[str, Any] | AttrDict, optional
246
246
  Dictionary of attributes, by default None
@@ -265,7 +265,11 @@ class VectorDataset:
265
265
 
266
266
  def __init__(
267
267
  self,
268
- data: dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None = None,
268
+ data: dict[str, npt.ArrayLike]
269
+ | pd.DataFrame
270
+ | VectorDataDict
271
+ | VectorDataset
272
+ | None = None,
269
273
  attrs: dict[str, Any] | AttrDict | None = None,
270
274
  copy: bool = True,
271
275
  **attrs_kwargs: Any,
@@ -283,15 +287,17 @@ class VectorDataset:
283
287
  # Take extra caution with a time column
284
288
 
285
289
  if "time" in data:
286
- if not hasattr(data["time"], "dt"):
290
+ time = data["time"]
291
+
292
+ if not hasattr(time, "dt"):
287
293
  # If the time column is a string, we try to convert it to a datetime
288
294
  # If it fails (for example, a unix integer time), we raise an error
289
295
  # and let the user figure it out.
290
296
  try:
291
- data["time"] = pd.to_datetime(data["time"])
297
+ time = pd.to_datetime(time)
292
298
  except ValueError:
293
299
  raise ValueError(
294
- "Column `time` must hold datetimelike values. "
300
+ "The 'time' field must hold datetime-like values. "
295
301
  'Try data["time"] = pd.to_datetime(data["time"], unit=...) '
296
302
  "with the appropriate unit."
297
303
  )
@@ -301,13 +307,19 @@ class VectorDataset:
301
307
  # we raise an error in this case. Timezone issues are complicated,
302
308
  # and so it is better for the user to handle them rather than try
303
309
  # to address them here.
304
- if data["time"].dt.tz is not None:
310
+ if time.dt.tz is not None:
305
311
  raise ValueError(
306
- "Column `time` must be timezone naive. "
312
+ "The 'time' field must be timezone naive. "
307
313
  "This can be achieved with: "
308
314
  'data["time"] = data["time"].dt.tz_localize(None)'
309
315
  )
310
- self.data = VectorDataDict({col: ser.to_numpy(copy=copy) for col, ser in data.items()})
316
+
317
+ data = {col: ser.to_numpy(copy=copy) for col, ser in data.items() if col != "time"}
318
+ data["time"] = time.to_numpy(copy=copy)
319
+ else:
320
+ data = {col: ser.to_numpy(copy=copy) for col, ser in data.items()}
321
+
322
+ self.data = VectorDataDict(data)
311
323
 
312
324
  elif isinstance(data, VectorDataDict) and not copy:
313
325
  self.data = data
@@ -368,14 +380,14 @@ class VectorDataset:
368
380
  """
369
381
  return self.data.get(key, default_value)
370
382
 
371
- def __setitem__(self, key: str, values: np.ndarray) -> None:
383
+ def __setitem__(self, key: str, values: npt.ArrayLike) -> None:
372
384
  """Set values at key `key` on :attr:`data`.
373
385
 
374
386
  Parameters
375
387
  ----------
376
388
  key : str
377
389
  Key name in :attr:`data`
378
- values : np.ndarray
390
+ values : npt.ArrayLike
379
391
  Values to set to :attr:`data`. Array size must be compatible with existing data.
380
392
  """
381
393
  self.data[key] = values
@@ -416,27 +428,29 @@ class VectorDataset:
416
428
  return key in self.data
417
429
 
418
430
  def update(
419
- self, other: VectorDataDict | dict[str, np.ndarray] | None = None, **kwargs: np.ndarray
431
+ self,
432
+ other: dict[str, npt.ArrayLike] | None = None,
433
+ **kwargs: npt.ArrayLike,
420
434
  ) -> None:
421
435
  """Update values in :attr:`data` dict without warning if overwriting.
422
436
 
423
437
  Parameters
424
438
  ----------
425
- other : VectorDataDict | dict[str, np.ndarray] | None, optional
439
+ other : dict[str, npt.ArrayLike] | None, optional
426
440
  Fields to update as dict
427
- **kwargs : np.ndarray
441
+ **kwargs : npt.ArrayLike
428
442
  Fields to update as kwargs
429
443
  """
430
444
  self.data.update(other, **kwargs)
431
445
 
432
- def setdefault(self, key: str, default: np.ndarray | None = None) -> np.ndarray:
446
+ def setdefault(self, key: str, default: npt.ArrayLike | None = None) -> np.ndarray:
433
447
  """Shortcut to :attr:`data.setdefault`.
434
448
 
435
449
  Parameters
436
450
  ----------
437
451
  key : str
438
452
  Key in :attr:`data` dict.
439
- default : np.ndarray, optional
453
+ default : npt.ArrayLike, optional
440
454
  Values to use as default, if key is not defined
441
455
 
442
456
  Returns
@@ -777,14 +791,16 @@ class VectorDataset:
777
791
  data = {key: self[key] for key in keys}
778
792
  return VectorDataset(data=data, attrs=self.attrs, copy=copy)
779
793
 
780
- def filter(self: VectorDatasetType, mask: np.ndarray, copy: bool = True) -> VectorDatasetType:
794
+ def filter(
795
+ self: VectorDatasetType, mask: npt.NDArray[np.bool_], copy: bool = True
796
+ ) -> VectorDatasetType:
781
797
  """Filter :attr:`data` according to a boolean array ``mask``.
782
798
 
783
799
  Entries corresponding to ``mask == True`` are kept.
784
800
 
785
801
  Parameters
786
802
  ----------
787
- mask : np.ndarray
803
+ mask : npt.NDArray[np.bool_]
788
804
  Boolean array with compatible shape.
789
805
  copy : bool, optional
790
806
  Copy data on filter. Defaults to True. See
@@ -1035,26 +1051,26 @@ class GeoVectorDataset(VectorDataset):
1035
1051
 
1036
1052
  Parameters
1037
1053
  ----------
1038
- data : dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
1054
+ data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
1039
1055
  Data dictionary or :class:`pandas.DataFrame` .
1040
1056
  Must include keys/columns ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``.
1041
1057
  Keyword arguments for ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``
1042
1058
  override ``data`` inputs. Expects ``altitude`` in meters and ``time``
1043
1059
  as a DatetimeLike (or array that can processed with :meth:`pd.to_datetime`).
1044
1060
  Additional waypoint-specific data can be included as additional keys/columns.
1045
- longitude : np.ndarray, optional
1061
+ longitude : npt.ArrayLike, optional
1046
1062
  Longitude data.
1047
1063
  Defaults to None.
1048
- latitude : np.ndarray, optional
1064
+ latitude : npt.ArrayLike, optional
1049
1065
  Latitude data.
1050
1066
  Defaults to None.
1051
- altitude : np.ndarray, optional
1067
+ altitude : npt.ArrayLike, optional
1052
1068
  Altitude data, [:math:`m`].
1053
1069
  Defaults to None.
1054
- level : np.ndarray, optional
1070
+ level : npt.ArrayLike, optional
1055
1071
  Level data, [:math:`hPa`].
1056
1072
  Defaults to None.
1057
- time : np.ndarray, optional
1073
+ time : npt.ArrayLike, optional
1058
1074
  Time data.
1059
1075
  Expects an array of DatetimeLike values,
1060
1076
  or array that can proccessed with :meth:`pd.to_datetime`.
@@ -1084,7 +1100,11 @@ class GeoVectorDataset(VectorDataset):
1084
1100
 
1085
1101
  def __init__(
1086
1102
  self,
1087
- data: dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None = None,
1103
+ data: dict[str, npt.ArrayLike]
1104
+ | pd.DataFrame
1105
+ | VectorDataDict
1106
+ | VectorDataset
1107
+ | None = None,
1088
1108
  longitude: npt.ArrayLike | None = None,
1089
1109
  latitude: npt.ArrayLike | None = None,
1090
1110
  altitude: npt.ArrayLike | None = None,
@@ -1201,7 +1221,7 @@ class GeoVectorDataset(VectorDataset):
1201
1221
  return attrs
1202
1222
 
1203
1223
  @property
1204
- def level(self) -> np.ndarray:
1224
+ def level(self) -> npt.NDArray[np.float_]:
1205
1225
  """Get pressure ``level`` values for points.
1206
1226
 
1207
1227
  Automatically calculates pressure level using :func:`units.m_to_pl` using ``altitude`` key.
@@ -1212,7 +1232,7 @@ class GeoVectorDataset(VectorDataset):
1212
1232
 
1213
1233
  Returns
1214
1234
  -------
1215
- np.ndarray
1235
+ npt.NDArray[np.float_]
1216
1236
  Point pressure level values, [:math:`hPa`]
1217
1237
  """
1218
1238
  try:
@@ -1221,7 +1241,7 @@ class GeoVectorDataset(VectorDataset):
1221
1241
  return units.m_to_pl(self.altitude)
1222
1242
 
1223
1243
  @property
1224
- def altitude(self) -> np.ndarray:
1244
+ def altitude(self) -> npt.NDArray[np.float_]:
1225
1245
  """Get altitude.
1226
1246
 
1227
1247
  Automatically calculates altitude using :func:`units.pl_to_m` using ``level`` key.
@@ -1232,7 +1252,7 @@ class GeoVectorDataset(VectorDataset):
1232
1252
 
1233
1253
  Returns
1234
1254
  -------
1235
- np.ndarray
1255
+ npt.NDArray[np.float_]
1236
1256
  Altitude, [:math:`m`]
1237
1257
  """
1238
1258
  try:
@@ -1246,12 +1266,12 @@ class GeoVectorDataset(VectorDataset):
1246
1266
  return units.ft_to_m(self["altitude_ft"])
1247
1267
 
1248
1268
  @property
1249
- def air_pressure(self) -> np.ndarray:
1269
+ def air_pressure(self) -> npt.NDArray[np.float_]:
1250
1270
  """Get ``air_pressure`` values for points.
1251
1271
 
1252
1272
  Returns
1253
1273
  -------
1254
- np.ndarray
1274
+ npt.NDArray[np.float_]
1255
1275
  Point air pressure values, [:math:`Pa`]
1256
1276
  """
1257
1277
  try:
@@ -1260,12 +1280,12 @@ class GeoVectorDataset(VectorDataset):
1260
1280
  return 100 * self.level
1261
1281
 
1262
1282
  @property
1263
- def altitude_ft(self) -> np.ndarray:
1283
+ def altitude_ft(self) -> npt.NDArray[np.float_]:
1264
1284
  """Get altitude in feet.
1265
1285
 
1266
1286
  Returns
1267
1287
  -------
1268
- np.ndarray
1288
+ npt.NDArray[np.float_]
1269
1289
  Altitude, [:math:`ft`]
1270
1290
  """
1271
1291
  try:
@@ -1368,7 +1388,7 @@ class GeoVectorDataset(VectorDataset):
1368
1388
 
1369
1389
  def coords_intersect_met(
1370
1390
  self, met: met_module.MetDataset | met_module.MetDataArray
1371
- ) -> np.ndarray:
1391
+ ) -> npt.NDArray[np.bool_]:
1372
1392
  """Return boolean mask of data inside the bounding box defined by ``met``.
1373
1393
 
1374
1394
  Parameters
@@ -1378,7 +1398,7 @@ class GeoVectorDataset(VectorDataset):
1378
1398
 
1379
1399
  Returns
1380
1400
  -------
1381
- np.ndarray
1401
+ npt.NDArray[np.bool_]
1382
1402
  True if point is inside the bounding box defined by ``met``.
1383
1403
  """
1384
1404
 
@@ -1405,26 +1425,26 @@ class GeoVectorDataset(VectorDataset):
1405
1425
  self,
1406
1426
  mda: met_module.MetDataArray,
1407
1427
  *,
1408
- longitude: np.ndarray | None = None,
1409
- latitude: np.ndarray | None = None,
1410
- level: np.ndarray | None = None,
1411
- time: np.ndarray | None = None,
1428
+ longitude: npt.NDArray[np.float_] | None = None,
1429
+ latitude: npt.NDArray[np.float_] | None = None,
1430
+ level: npt.NDArray[np.float_] | None = None,
1431
+ time: npt.NDArray[np.datetime64] | None = None,
1412
1432
  use_indices: bool = False,
1413
1433
  **interp_kwargs: Any,
1414
- ) -> np.ndarray:
1434
+ ) -> npt.NDArray[np.float_]:
1415
1435
  """Intersect waypoints with MetDataArray.
1416
1436
 
1417
1437
  Parameters
1418
1438
  ----------
1419
1439
  mda : MetDataArray
1420
1440
  MetDataArray containing a meteorological variable at spatio-temporal coordinates.
1421
- longitude : np.ndarray, optional
1441
+ longitude : npt.NDArray[np.float_], optional
1422
1442
  Override existing coordinates for met interpolation
1423
- latitude : np.ndarray, optional
1443
+ latitude : npt.NDArray[np.float_], optional
1424
1444
  Override existing coordinates for met interpolation
1425
- level : np.ndarray, optional
1445
+ level : npt.NDArray[np.float_], optional
1426
1446
  Override existing coordinates for met interpolation
1427
- time : np.ndarray, optional
1447
+ time : npt.NDArray[np.datetime64], optional
1428
1448
  Override existing coordinates for met interpolation
1429
1449
  use_indices : bool, optional
1430
1450
  Experimental.
@@ -1437,7 +1457,7 @@ class GeoVectorDataset(VectorDataset):
1437
1457
 
1438
1458
  Returns
1439
1459
  -------
1440
- np.ndarray
1460
+ npt.NDArray[np.float_]
1441
1461
  Interpolated values
1442
1462
 
1443
1463
  Examples
@@ -1453,7 +1473,7 @@ class GeoVectorDataset(VectorDataset):
1453
1473
  >>> variables = ["air_temperature", "specific_humidity"]
1454
1474
  >>> levels = [300, 250, 200]
1455
1475
  >>> era5 = ERA5(time=times, variables=variables, pressure_levels=levels)
1456
- >>> met = era5.open_metdataset(xr_kwargs=dict(parallel=False))
1476
+ >>> met = era5.open_metdataset()
1457
1477
 
1458
1478
  >>> # Example flight
1459
1479
  >>> df = pd.DataFrame()
@@ -1550,9 +1570,7 @@ class GeoVectorDataset(VectorDataset):
1550
1570
  self["_distances_y"] = distances_y
1551
1571
  self["_distances_z"] = distances_z
1552
1572
  self["_distances_t"] = distances_t
1553
-
1554
- if out_of_bounds is not None:
1555
- self["_out_of_bounds"] = out_of_bounds
1573
+ self["_out_of_bounds"] = out_of_bounds
1556
1574
 
1557
1575
  def _get_indices(self) -> interpolation.RGIArtifacts | None:
1558
1576
  """Get entries from call to :meth:`_put_indices`.
@@ -1577,14 +1595,13 @@ class GeoVectorDataset(VectorDataset):
1577
1595
  distances_y = self["_distances_y"]
1578
1596
  distances_z = self["_distances_z"]
1579
1597
  distances_t = self["_distances_t"]
1598
+ out_of_bounds = self["_out_of_bounds"]
1580
1599
  except KeyError:
1581
1600
  return None
1582
1601
 
1583
1602
  indices = np.asarray([indices_x, indices_y, indices_z, indices_t])
1584
1603
  distances = np.asarray([distances_x, distances_y, distances_z, distances_t])
1585
1604
 
1586
- out_of_bounds = self.get("_out_of_bounds", None)
1587
-
1588
1605
  return interpolation.RGIArtifacts(indices, distances, out_of_bounds)
1589
1606
 
1590
1607
  def _invalidate_indices(self) -> None:
@@ -5,5 +5,5 @@ See individual modules for met variables and additional exports.
5
5
 
6
6
  - :module:`pycontrails.datalib.ecmwf`
7
7
  - :module:`pycontrails.datalib.gfs`
8
-
8
+ - :module:`pycontrails.datalib.spire`
9
9
  """
@@ -77,21 +77,18 @@ class ECMWFAPI(datalib.MetDataSource):
77
77
  raise KeyError(f"Input dataset is missing variables {e}")
78
78
 
79
79
  # downselect times
80
- try:
81
- if self.timesteps:
80
+ if not self.timesteps:
81
+ self.timesteps = ds["time"].values.astype("datetime64[ns]").tolist()
82
+ else:
83
+ try:
82
84
  ds = ds.sel(time=self.timesteps)
83
- else:
84
- # set timesteps from dataset "time" coordinates
85
- # np.datetime64 doesn't covert to list[datetime] unless its unit is us
86
- self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
87
- except KeyError:
88
- # this snippet shows the missing times for convenience
89
- np_timesteps = [np.datetime64(t, "ns") for t in self.timesteps]
90
- missing_times = list(set(np_timesteps) - set(ds["time"].values))
91
- missing_times.sort()
92
- raise KeyError(
93
- f"Input dataset is missing time coordinates {[str(t) for t in missing_times]}"
94
- )
85
+ except KeyError:
86
+ # this snippet shows the missing times for convenience
87
+ np_timesteps = [np.datetime64(t, "ns") for t in self.timesteps]
88
+ missing_times = sorted(set(np_timesteps) - set(ds["time"].values))
89
+ raise KeyError(
90
+ f"Input dataset is missing time coordinates {[str(t) for t in missing_times]}"
91
+ )
95
92
 
96
93
  # downselect pressure level
97
94
  # if "level" is not in dims and
@@ -104,16 +101,12 @@ class ECMWFAPI(datalib.MetDataSource):
104
101
  ds = ds.sel(level=self.pressure_levels)
105
102
  except KeyError:
106
103
  # this snippet shows the missing levels for convenience
107
- missing_levels = list(set(self.pressure_levels) - set(ds["level"].values))
108
- missing_levels.sort()
104
+ missing_levels = sorted(set(self.pressure_levels) - set(ds["level"].values))
109
105
  raise KeyError(f"Input dataset is missing level coordinates {missing_levels}")
110
106
 
111
107
  # harmonize variable names
112
108
  ds = met.standardize_variables(ds, self.variables)
113
109
 
114
- if "cachestore" not in kwargs:
115
- kwargs["cachestore"] = self.cachestore
116
-
117
110
  # modify values
118
111
 
119
112
  # rescale relative humidity from % -> dimensionless if its in dataset
@@ -129,4 +122,6 @@ class ECMWFAPI(datalib.MetDataSource):
129
122
  ] = "Relative humidity rescaled to [0 - 1] instead of %"
130
123
 
131
124
  ds.attrs["met_source"] = type(self).__name__
125
+
126
+ kwargs.setdefault("cachestore", self.cachestore)
132
127
  return met.MetDataset(ds, **kwargs)
@@ -0,0 +1,19 @@
1
+ """ECMWF Data Access."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pycontrails.datalib.spire.spire import (
6
+ clean,
7
+ generate_flight_id,
8
+ identify_flights,
9
+ is_valid_trajectory,
10
+ validate_flights,
11
+ )
12
+
13
+ __all__ = [
14
+ "clean",
15
+ "generate_flight_id",
16
+ "identify_flights",
17
+ "is_valid_trajectory",
18
+ "validate_flights",
19
+ ]