pycontrails 0.54.1__cp310-cp310-win_amd64.whl → 0.54.3__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (43) hide show
  1. pycontrails/_version.py +2 -2
  2. pycontrails/core/aircraft_performance.py +24 -5
  3. pycontrails/core/cache.py +14 -10
  4. pycontrails/core/fleet.py +22 -12
  5. pycontrails/core/flight.py +25 -15
  6. pycontrails/core/met.py +34 -22
  7. pycontrails/core/rgi_cython.cp310-win_amd64.pyd +0 -0
  8. pycontrails/core/vector.py +38 -38
  9. pycontrails/datalib/ecmwf/arco_era5.py +10 -5
  10. pycontrails/datalib/ecmwf/common.py +7 -2
  11. pycontrails/datalib/ecmwf/era5.py +9 -4
  12. pycontrails/datalib/ecmwf/era5_model_level.py +9 -5
  13. pycontrails/datalib/ecmwf/hres.py +12 -7
  14. pycontrails/datalib/ecmwf/hres_model_level.py +10 -5
  15. pycontrails/datalib/ecmwf/ifs.py +11 -6
  16. pycontrails/datalib/ecmwf/variables.py +1 -0
  17. pycontrails/datalib/gfs/gfs.py +52 -34
  18. pycontrails/datalib/gfs/variables.py +6 -2
  19. pycontrails/datalib/landsat.py +5 -8
  20. pycontrails/datalib/sentinel.py +7 -11
  21. pycontrails/ext/bada.py +3 -2
  22. pycontrails/ext/synthetic_flight.py +3 -2
  23. pycontrails/models/accf.py +40 -19
  24. pycontrails/models/apcemm/apcemm.py +2 -1
  25. pycontrails/models/cocip/cocip.py +8 -4
  26. pycontrails/models/cocipgrid/cocip_grid.py +25 -20
  27. pycontrails/models/dry_advection.py +50 -54
  28. pycontrails/models/humidity_scaling/humidity_scaling.py +12 -7
  29. pycontrails/models/ps_model/__init__.py +2 -1
  30. pycontrails/models/ps_model/ps_aircraft_params.py +3 -2
  31. pycontrails/models/ps_model/ps_grid.py +187 -1
  32. pycontrails/models/ps_model/ps_model.py +12 -10
  33. pycontrails/models/ps_model/ps_operational_limits.py +39 -52
  34. pycontrails/physics/geo.py +149 -0
  35. pycontrails/physics/jet.py +141 -11
  36. pycontrails/physics/static/iata-cargo-load-factors-20241115.csv +71 -0
  37. pycontrails/physics/static/iata-passenger-load-factors-20241115.csv +71 -0
  38. {pycontrails-0.54.1.dist-info → pycontrails-0.54.3.dist-info}/METADATA +12 -11
  39. {pycontrails-0.54.1.dist-info → pycontrails-0.54.3.dist-info}/RECORD +43 -41
  40. {pycontrails-0.54.1.dist-info → pycontrails-0.54.3.dist-info}/WHEEL +1 -1
  41. {pycontrails-0.54.1.dist-info → pycontrails-0.54.3.dist-info}/LICENSE +0 -0
  42. {pycontrails-0.54.1.dist-info → pycontrails-0.54.3.dist-info}/NOTICE +0 -0
  43. {pycontrails-0.54.1.dist-info → pycontrails-0.54.3.dist-info}/top_level.txt +0 -0
@@ -5,15 +5,25 @@ from __future__ import annotations
5
5
  import hashlib
6
6
  import json
7
7
  import logging
8
+ import sys
8
9
  import warnings
9
10
  from collections.abc import Generator, Iterable, Iterator, Sequence
10
- from typing import Any, TypeVar, overload
11
+ from typing import Any, overload
12
+
13
+ if sys.version_info >= (3, 11):
14
+ from typing import Self
15
+ else:
16
+ from typing_extensions import Self
17
+
18
+ if sys.version_info >= (3, 12):
19
+ from typing import override
20
+ else:
21
+ from typing_extensions import override
11
22
 
12
23
  import numpy as np
13
24
  import numpy.typing as npt
14
25
  import pandas as pd
15
26
  import xarray as xr
16
- from overrides import overrides
17
27
 
18
28
  from pycontrails.core import coordinates, interpolation
19
29
  from pycontrails.core import met as met_module
@@ -23,10 +33,6 @@ from pycontrails.utils import json as json_utils
23
33
 
24
34
  logger = logging.getLogger(__name__)
25
35
 
26
- #: Vector types
27
- VectorDatasetType = TypeVar("VectorDatasetType", bound="VectorDataset")
28
- GeoVectorDatasetType = TypeVar("GeoVectorDatasetType", bound="GeoVectorDataset")
29
-
30
36
 
31
37
  class AttrDict(dict[str, Any]):
32
38
  """Thin wrapper around dict to warn when setting a key that already exists."""
@@ -574,7 +580,7 @@ class VectorDataset:
574
580
  """
575
581
  return self.size > 0
576
582
 
577
- def __add__(self: VectorDatasetType, other: VectorDatasetType | None) -> VectorDatasetType:
583
+ def __add__(self, other: Self | None) -> Self:
578
584
  """Concatenate two compatible instances of VectorDataset.
579
585
 
580
586
  In this context, compatibility means that both have identical :attr:`data` keys.
@@ -586,12 +592,12 @@ class VectorDataset:
586
592
 
587
593
  Parameters
588
594
  ----------
589
- other : VectorDatasetType
595
+ other : Self | None
590
596
  Other values to concatenate
591
597
 
592
598
  Returns
593
599
  -------
594
- VectorDatasetType
600
+ Self
595
601
  Concatenated values.
596
602
 
597
603
  Raises
@@ -610,11 +616,11 @@ class VectorDataset:
610
616
 
611
617
  @classmethod
612
618
  def sum(
613
- cls: type[VectorDatasetType],
619
+ cls,
614
620
  vectors: Sequence[VectorDataset],
615
621
  infer_attrs: bool = True,
616
622
  fill_value: float | None = None,
617
- ) -> VectorDatasetType:
623
+ ) -> Self:
618
624
  """Sum a list of :class:`VectorDataset` instances.
619
625
 
620
626
  Parameters
@@ -692,7 +698,7 @@ class VectorDataset:
692
698
  return cls(data, attrs=vectors[0].attrs, copy=False)
693
699
  return cls(data, copy=False)
694
700
 
695
- def __eq__(self: VectorDatasetType, other: object) -> bool:
701
+ def __eq__(self, other: object) -> bool:
696
702
  """Determine if two instances are equal.
697
703
 
698
704
  NaN values are considered equal in this comparison.
@@ -700,7 +706,7 @@ class VectorDataset:
700
706
  Parameters
701
707
  ----------
702
708
  other : object
703
- VectorDatasetType to compare with
709
+ VectorDataset to compare with
704
710
 
705
711
  Returns
706
712
  -------
@@ -784,8 +790,8 @@ class VectorDataset:
784
790
  # Utilities
785
791
  # ------------
786
792
 
787
- def copy(self: VectorDatasetType, **kwargs: Any) -> VectorDatasetType:
788
- """Return a copy of this VectorDatasetType class.
793
+ def copy(self, **kwargs: Any) -> Self:
794
+ """Return a copy of this instance.
789
795
 
790
796
  Parameters
791
797
  ----------
@@ -794,7 +800,7 @@ class VectorDataset:
794
800
 
795
801
  Returns
796
802
  -------
797
- VectorDatasetType
803
+ Self
798
804
  Copy of class
799
805
  """
800
806
  return type(self)(data=self.data, attrs=self.attrs, copy=True, **kwargs)
@@ -820,9 +826,7 @@ class VectorDataset:
820
826
  data = {key: self[key] for key in keys}
821
827
  return VectorDataset(data=data, attrs=self.attrs, copy=copy)
822
828
 
823
- def filter(
824
- self: VectorDatasetType, mask: npt.NDArray[np.bool_], copy: bool = True, **kwargs: Any
825
- ) -> VectorDatasetType:
829
+ def filter(self, mask: npt.NDArray[np.bool_], copy: bool = True, **kwargs: Any) -> Self:
826
830
  """Filter :attr:`data` according to a boolean array ``mask``.
827
831
 
828
832
  Entries corresponding to ``mask == True`` are kept.
@@ -840,7 +844,7 @@ class VectorDataset:
840
844
 
841
845
  Returns
842
846
  -------
843
- VectorDatasetType
847
+ Self
844
848
  Containing filtered data
845
849
 
846
850
  Raises
@@ -855,7 +859,7 @@ class VectorDataset:
855
859
  data = {key: value[mask] for key, value in self.data.items()}
856
860
  return type(self)(data=data, attrs=self.attrs, copy=copy, **kwargs)
857
861
 
858
- def sort(self: VectorDatasetType, by: str | list[str]) -> VectorDatasetType:
862
+ def sort(self, by: str | list[str]) -> Self:
859
863
  """Sort data by key(s).
860
864
 
861
865
  This method always creates a copy of the data by calling
@@ -868,7 +872,7 @@ class VectorDataset:
868
872
 
869
873
  Returns
870
874
  -------
871
- VectorDatasetType
875
+ Self
872
876
  Instance with sorted data.
873
877
  """
874
878
  return type(self)(data=self.dataframe.sort_values(by=by), attrs=self.attrs, copy=False)
@@ -1109,12 +1113,12 @@ class VectorDataset:
1109
1113
 
1110
1114
  @classmethod
1111
1115
  def create_empty(
1112
- cls: type[VectorDatasetType],
1116
+ cls,
1113
1117
  keys: Iterable[str],
1114
1118
  attrs: dict[str, Any] | None = None,
1115
1119
  **attrs_kwargs: Any,
1116
- ) -> VectorDatasetType:
1117
- """Create instance with variables defined by `keys` and size 0.
1120
+ ) -> Self:
1121
+ """Create instance with variables defined by ``keys`` and size 0.
1118
1122
 
1119
1123
  If instance requires additional variables to be defined, these keys will automatically
1120
1124
  be attached to returned instance.
@@ -1130,15 +1134,13 @@ class VectorDataset:
1130
1134
 
1131
1135
  Returns
1132
1136
  -------
1133
- VectorDatasetType
1137
+ Self
1134
1138
  Empty VectorDataset instance.
1135
1139
  """
1136
1140
  return cls(data=_empty_vector_dict(keys or set()), attrs=attrs, copy=False, **attrs_kwargs)
1137
1141
 
1138
1142
  @classmethod
1139
- def from_dict(
1140
- cls: type[VectorDatasetType], obj: dict[str, Any], copy: bool = True, **obj_kwargs: Any
1141
- ) -> VectorDatasetType:
1143
+ def from_dict(cls, obj: dict[str, Any], copy: bool = True, **obj_kwargs: Any) -> Self:
1142
1144
  """Create instance from dict representation containing data and attrs.
1143
1145
 
1144
1146
  Parameters
@@ -1153,7 +1155,7 @@ class VectorDataset:
1153
1155
 
1154
1156
  Returns
1155
1157
  -------
1156
- VectorDatasetType
1158
+ Self
1157
1159
  VectorDataset instance.
1158
1160
 
1159
1161
  See Also
@@ -1171,9 +1173,7 @@ class VectorDataset:
1171
1173
 
1172
1174
  return cls(data=data, attrs=attrs, copy=copy)
1173
1175
 
1174
- def generate_splits(
1175
- self: VectorDatasetType, n_splits: int, copy: bool = True
1176
- ) -> Generator[VectorDatasetType, None, None]:
1176
+ def generate_splits(self, n_splits: int, copy: bool = True) -> Generator[Self, None, None]:
1177
1177
  """Split instance into ``n_split`` sub-vectors.
1178
1178
 
1179
1179
  Parameters
@@ -1186,7 +1186,7 @@ class VectorDataset:
1186
1186
 
1187
1187
  Returns
1188
1188
  -------
1189
- Generator[VectorDatasetType, None, None]
1189
+ Generator[Self, None, None]
1190
1190
  Generator of split vectors.
1191
1191
 
1192
1192
  See Also
@@ -1367,7 +1367,7 @@ class GeoVectorDataset(VectorDataset):
1367
1367
  if np.any(latitude > 90.0) or np.any(latitude < -90.0):
1368
1368
  raise ValueError("EPSG:4326 latitude coordinates should lie between [-90, 90].")
1369
1369
 
1370
- @overrides
1370
+ @override
1371
1371
  def _display_attrs(self) -> dict[str, str]:
1372
1372
  try:
1373
1373
  time0 = pd.Timestamp(np.nanmin(self["time"]))
@@ -1922,13 +1922,13 @@ class GeoVectorDataset(VectorDataset):
1922
1922
  # ------------
1923
1923
 
1924
1924
  @classmethod
1925
- @overrides
1925
+ @override
1926
1926
  def create_empty(
1927
- cls: type[GeoVectorDatasetType],
1927
+ cls,
1928
1928
  keys: Iterable[str] | None = None,
1929
1929
  attrs: dict[str, Any] | None = None,
1930
1930
  **attrs_kwargs: Any,
1931
- ) -> GeoVectorDatasetType:
1931
+ ) -> Self:
1932
1932
  keys = *cls.required_keys, "altitude", *(keys or ())
1933
1933
  return super().create_empty(keys, attrs, **attrs_kwargs)
1934
1934
 
@@ -19,11 +19,16 @@ from __future__ import annotations
19
19
 
20
20
  import datetime
21
21
  import hashlib
22
+ import sys
22
23
  from typing import Any
23
24
 
25
+ if sys.version_info >= (3, 12):
26
+ from typing import override
27
+ else:
28
+ from typing_extensions import override
29
+
24
30
  import numpy.typing as npt
25
31
  import xarray as xr
26
- from overrides import overrides
27
32
 
28
33
  from pycontrails.core import cache, met_var
29
34
  from pycontrails.core.met import MetDataset
@@ -274,7 +279,7 @@ class ERA5ARCO(ecmwf_common.ECMWFAPI):
274
279
  """
275
280
  return ecmwf_variables.SURFACE_VARIABLES
276
281
 
277
- @overrides
282
+ @override
278
283
  def download_dataset(self, times: list[datetime.datetime]) -> None:
279
284
  if not times:
280
285
  return
@@ -286,7 +291,7 @@ class ERA5ARCO(ecmwf_common.ECMWFAPI):
286
291
 
287
292
  self.cache_dataset(ds)
288
293
 
289
- @overrides
294
+ @override
290
295
  def create_cachepath(self, t: datetime.datetime) -> str:
291
296
  if self.cachestore is None:
292
297
  msg = "Attribute self.cachestore must be defined to create cache path"
@@ -302,7 +307,7 @@ class ERA5ARCO(ecmwf_common.ECMWFAPI):
302
307
 
303
308
  return self.cachestore.path(cache_path)
304
309
 
305
- @overrides
310
+ @override
306
311
  def open_metdataset(
307
312
  self,
308
313
  dataset: xr.Dataset | None = None,
@@ -331,7 +336,7 @@ class ERA5ARCO(ecmwf_common.ECMWFAPI):
331
336
  self.set_metadata(mds)
332
337
  return mds
333
338
 
334
- @overrides
339
+ @override
335
340
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
336
341
  ds.attrs.update(
337
342
  provider="ECMWF",
@@ -4,14 +4,19 @@ from __future__ import annotations
4
4
 
5
5
  import logging
6
6
  import os
7
+ import sys
7
8
  from typing import Any
8
9
 
10
+ if sys.version_info >= (3, 12):
11
+ from typing import override
12
+ else:
13
+ from typing_extensions import override
14
+
9
15
  LOG = logging.getLogger(__name__)
10
16
 
11
17
  import numpy as np
12
18
  import pandas as pd
13
19
  import xarray as xr
14
- from overrides import overrides
15
20
 
16
21
  from pycontrails.core import met
17
22
  from pycontrails.datalib._met_utils import metsource
@@ -88,7 +93,7 @@ class ECMWFAPI(metsource.MetDataSource):
88
93
  kwargs.setdefault("cachestore", self.cachestore)
89
94
  return met.MetDataset(ds, **kwargs)
90
95
 
91
- @overrides
96
+ @override
92
97
  def cache_dataset(self, dataset: xr.Dataset) -> None:
93
98
  if self.cachestore is None:
94
99
  LOG.debug("Cache is turned off, skipping")
@@ -7,16 +7,21 @@ import hashlib
7
7
  import logging
8
8
  import os
9
9
  import pathlib
10
+ import sys
10
11
  import warnings
11
12
  from contextlib import ExitStack
12
13
  from datetime import datetime
13
14
  from typing import TYPE_CHECKING, Any
14
15
 
16
+ if sys.version_info >= (3, 12):
17
+ from typing import override
18
+ else:
19
+ from typing_extensions import override
20
+
15
21
  LOG = logging.getLogger(__name__)
16
22
 
17
23
  import pandas as pd
18
24
  import xarray as xr
19
- from overrides import overrides
20
25
 
21
26
  import pycontrails
22
27
  from pycontrails.core import cache
@@ -347,7 +352,7 @@ class ERA5(ECMWFAPI):
347
352
  # return cache path
348
353
  return self.cachestore.path(f"{datestr}-{suffix}.nc")
349
354
 
350
- @overrides
355
+ @override
351
356
  def download_dataset(self, times: list[datetime]) -> None:
352
357
  download_times: dict[datetime, list[datetime]] = collections.defaultdict(list)
353
358
  for t in times:
@@ -359,7 +364,7 @@ class ERA5(ECMWFAPI):
359
364
  for times_for_day in download_times.values():
360
365
  self._download_file(times_for_day)
361
366
 
362
- @overrides
367
+ @override
363
368
  def open_metdataset(
364
369
  self,
365
370
  dataset: xr.Dataset | None = None,
@@ -399,7 +404,7 @@ class ERA5(ECMWFAPI):
399
404
  self.set_metadata(mds)
400
405
  return mds
401
406
 
402
- @overrides
407
+ @override
403
408
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
404
409
  if self.product_type == "reanalysis":
405
410
  product = "reanalysis"
@@ -25,12 +25,16 @@ import contextlib
25
25
  import hashlib
26
26
  import logging
27
27
  import os
28
+ import sys
28
29
  import threading
29
30
  import warnings
30
31
  from datetime import datetime
31
32
  from typing import Any
32
33
 
33
- from overrides import overrides
34
+ if sys.version_info >= (3, 12):
35
+ from typing import override
36
+ else:
37
+ from typing_extensions import override
34
38
 
35
39
  LOG = logging.getLogger(__name__)
36
40
 
@@ -245,7 +249,7 @@ class ERA5ModelLevel(ECMWFAPI):
245
249
  """
246
250
  return "reanalysis-era5-complete"
247
251
 
248
- @overrides
252
+ @override
249
253
  def create_cachepath(self, t: datetime | pd.Timestamp) -> str:
250
254
  """Return cachepath to local ERA5 data file based on datetime.
251
255
 
@@ -277,7 +281,7 @@ class ERA5ModelLevel(ECMWFAPI):
277
281
 
278
282
  return self.cachestore.path(cache_path)
279
283
 
280
- @overrides
284
+ @override
281
285
  def download_dataset(self, times: list[datetime]) -> None:
282
286
  # group data to request by month (nominal) or by day (ensemble)
283
287
  requests: dict[datetime, list[datetime]] = collections.defaultdict(list)
@@ -294,7 +298,7 @@ class ERA5ModelLevel(ECMWFAPI):
294
298
  for times_in_request in requests.values():
295
299
  self._download_convert_cache_handler(times_in_request)
296
300
 
297
- @overrides
301
+ @override
298
302
  def open_metdataset(
299
303
  self,
300
304
  dataset: xr.Dataset | None = None,
@@ -320,7 +324,7 @@ class ERA5ModelLevel(ECMWFAPI):
320
324
  self.set_metadata(mds)
321
325
  return mds
322
326
 
323
- @overrides
327
+ @override
324
328
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
325
329
  if self.product_type == "reanalysis":
326
330
  product = "reanalysis"
@@ -5,16 +5,21 @@ from __future__ import annotations
5
5
  import hashlib
6
6
  import logging
7
7
  import pathlib
8
+ import sys
8
9
  from contextlib import ExitStack
9
10
  from datetime import datetime
10
11
  from typing import TYPE_CHECKING, Any
11
12
 
13
+ if sys.version_info >= (3, 12):
14
+ from typing import override
15
+ else:
16
+ from typing_extensions import override
17
+
12
18
  LOG = logging.getLogger(__name__)
13
19
 
14
20
  import numpy as np
15
21
  import pandas as pd
16
22
  import xarray as xr
17
- from overrides import overrides
18
23
 
19
24
  import pycontrails
20
25
  from pycontrails.core import cache
@@ -326,9 +331,9 @@ class HRES(ECMWFAPI):
326
331
  f" {getattr(self, 'steps', '')}"
327
332
  )
328
333
 
329
- @classmethod
334
+ @staticmethod
330
335
  def create_synoptic_time_ranges(
331
- self, timesteps: list[pd.Timestamp]
336
+ timesteps: list[pd.Timestamp],
332
337
  ) -> list[tuple[pd.Timestamp, pd.Timestamp]]:
333
338
  """Create synoptic time bounds encompassing date range.
334
339
 
@@ -556,7 +561,7 @@ class HRES(ECMWFAPI):
556
561
  f"\n\tgrid={request['grid']},\n\tlevtype={request['levtype']}{levelist}"
557
562
  )
558
563
 
559
- @overrides
564
+ @override
560
565
  def create_cachepath(self, t: datetime) -> str:
561
566
  if self.cachestore is None:
562
567
  raise ValueError("self.cachestore attribute must be defined to create cache path")
@@ -574,7 +579,7 @@ class HRES(ECMWFAPI):
574
579
  # return cache path
575
580
  return self.cachestore.path(f"{datestr}-{step}-{suffix}.nc")
576
581
 
577
- @overrides
582
+ @override
578
583
  def download_dataset(self, times: list[datetime]) -> None:
579
584
  """Download data from data source for input times.
580
585
 
@@ -595,7 +600,7 @@ class HRES(ECMWFAPI):
595
600
  elif len(steps) > 0:
596
601
  self._download_file(steps)
597
602
 
598
- @overrides
603
+ @override
599
604
  def open_metdataset(
600
605
  self,
601
606
  dataset: xr.Dataset | None = None,
@@ -635,7 +640,7 @@ class HRES(ECMWFAPI):
635
640
  self.set_metadata(mds)
636
641
  return mds
637
642
 
638
- @overrides
643
+ @override
639
644
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
640
645
  if self.stream == "oper":
641
646
  product = "forecast"
@@ -13,15 +13,20 @@ from __future__ import annotations
13
13
  import contextlib
14
14
  import hashlib
15
15
  import logging
16
+ import sys
16
17
  import warnings
17
18
  from datetime import datetime, timedelta
18
19
  from typing import Any
19
20
 
21
+ if sys.version_info >= (3, 12):
22
+ from typing import override
23
+ else:
24
+ from typing_extensions import override
25
+
20
26
  LOG = logging.getLogger(__name__)
21
27
 
22
28
  import pandas as pd
23
29
  import xarray as xr
24
- from overrides import overrides
25
30
 
26
31
  import pycontrails
27
32
  from pycontrails.core import cache
@@ -283,7 +288,7 @@ class HRESModelLevel(ECMWFAPI):
283
288
  """
284
289
  return []
285
290
 
286
- @overrides
291
+ @override
287
292
  def create_cachepath(self, t: datetime | pd.Timestamp) -> str:
288
293
  """Return cachepath to local HRES data file based on datetime.
289
294
 
@@ -316,13 +321,13 @@ class HRESModelLevel(ECMWFAPI):
316
321
 
317
322
  return self.cachestore.path(cache_path)
318
323
 
319
- @overrides
324
+ @override
320
325
  def download_dataset(self, times: list[datetime]) -> None:
321
326
  # will always submit a single MARS request since each forecast is a separate file on tape
322
327
  LOG.debug(f"Retrieving ERA5 data for times {times} from forecast {self.forecast_time}")
323
328
  self._download_convert_cache_handler(times)
324
329
 
325
- @overrides
330
+ @override
326
331
  def open_metdataset(
327
332
  self,
328
333
  dataset: xr.Dataset | None = None,
@@ -348,7 +353,7 @@ class HRESModelLevel(ECMWFAPI):
348
353
  self.set_metadata(mds)
349
354
  return mds
350
355
 
351
- @overrides
356
+ @override
352
357
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
353
358
  ds.attrs.update(
354
359
  provider="ECMWF", dataset="HRES", product="forecast", radiation_accumulated=True
@@ -4,16 +4,21 @@ from __future__ import annotations
4
4
 
5
5
  import logging
6
6
  import pathlib
7
+ import sys
7
8
  import warnings
8
9
  from datetime import datetime
9
10
  from typing import Any
10
11
 
12
+ if sys.version_info >= (3, 12):
13
+ from typing import override
14
+ else:
15
+ from typing_extensions import override
16
+
11
17
  LOG = logging.getLogger(__name__)
12
18
 
13
19
  import numpy as np
14
20
  import pandas as pd
15
21
  import xarray as xr
16
- from overrides import overrides
17
22
 
18
23
  from pycontrails.core import met
19
24
  from pycontrails.datalib._met_utils import metsource
@@ -119,7 +124,7 @@ class IFS(metsource.MetDataSource):
119
124
  """
120
125
  return None
121
126
 
122
- @overrides
127
+ @override
123
128
  def open_metdataset(
124
129
  self,
125
130
  dataset: xr.Dataset | None = None,
@@ -190,7 +195,7 @@ class IFS(metsource.MetDataSource):
190
195
  self.set_metadata(ds)
191
196
  return met.MetDataset(ds, **kwargs)
192
197
 
193
- @overrides
198
+ @override
194
199
  def set_metadata(self, ds: xr.Dataset | met.MetDataset) -> None:
195
200
  ds.attrs.update(
196
201
  provider="ECMWF",
@@ -198,15 +203,15 @@ class IFS(metsource.MetDataSource):
198
203
  product="forecast",
199
204
  )
200
205
 
201
- @overrides
206
+ @override
202
207
  def download_dataset(self, times: list[datetime]) -> None:
203
208
  raise NotImplementedError("IFS download is not supported")
204
209
 
205
- @overrides
210
+ @override
206
211
  def cache_dataset(self, dataset: xr.Dataset) -> None:
207
212
  raise NotImplementedError("IFS dataset caching not supported")
208
213
 
209
- @overrides
214
+ @override
210
215
  def create_cachepath(self, t: datetime) -> str:
211
216
  raise NotImplementedError("IFS download is not supported")
212
217
 
@@ -107,6 +107,7 @@ RelativeHumidity = MetVariable(
107
107
  long_name=met_var.RelativeHumidity.long_name,
108
108
  units="%",
109
109
  level_type=met_var.RelativeHumidity.level_type,
110
+ grib1_id=met_var.RelativeHumidity.grib1_id,
110
111
  ecmwf_id=met_var.RelativeHumidity.ecmwf_id,
111
112
  grib2_id=met_var.RelativeHumidity.grib2_id,
112
113
  description=(