pycontrails 0.54.0__cp310-cp310-win_amd64.whl → 0.54.2__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

@@ -5,15 +5,25 @@ from __future__ import annotations
5
5
  import hashlib
6
6
  import json
7
7
  import logging
8
+ import sys
8
9
  import warnings
9
10
  from collections.abc import Generator, Iterable, Iterator, Sequence
10
- from typing import Any, TypeVar, overload
11
+ from typing import Any, overload
12
+
13
+ if sys.version_info >= (3, 11):
14
+ from typing import Self
15
+ else:
16
+ from typing_extensions import Self
17
+
18
+ if sys.version_info >= (3, 12):
19
+ from typing import override
20
+ else:
21
+ from typing_extensions import override
11
22
 
12
23
  import numpy as np
13
24
  import numpy.typing as npt
14
25
  import pandas as pd
15
26
  import xarray as xr
16
- from overrides import overrides
17
27
 
18
28
  from pycontrails.core import coordinates, interpolation
19
29
  from pycontrails.core import met as met_module
@@ -23,10 +33,6 @@ from pycontrails.utils import json as json_utils
23
33
 
24
34
  logger = logging.getLogger(__name__)
25
35
 
26
- #: Vector types
27
- VectorDatasetType = TypeVar("VectorDatasetType", bound="VectorDataset")
28
- GeoVectorDatasetType = TypeVar("GeoVectorDatasetType", bound="GeoVectorDataset")
29
-
30
36
 
31
37
  class AttrDict(dict[str, Any]):
32
38
  """Thin wrapper around dict to warn when setting a key that already exists."""
@@ -574,7 +580,7 @@ class VectorDataset:
574
580
  """
575
581
  return self.size > 0
576
582
 
577
- def __add__(self: VectorDatasetType, other: VectorDatasetType | None) -> VectorDatasetType:
583
+ def __add__(self, other: Self | None) -> Self:
578
584
  """Concatenate two compatible instances of VectorDataset.
579
585
 
580
586
  In this context, compatibility means that both have identical :attr:`data` keys.
@@ -586,12 +592,12 @@ class VectorDataset:
586
592
 
587
593
  Parameters
588
594
  ----------
589
- other : VectorDatasetType
595
+ other : Self | None
590
596
  Other values to concatenate
591
597
 
592
598
  Returns
593
599
  -------
594
- VectorDatasetType
600
+ Self
595
601
  Concatenated values.
596
602
 
597
603
  Raises
@@ -610,11 +616,11 @@ class VectorDataset:
610
616
 
611
617
  @classmethod
612
618
  def sum(
613
- cls: type[VectorDatasetType],
619
+ cls,
614
620
  vectors: Sequence[VectorDataset],
615
621
  infer_attrs: bool = True,
616
622
  fill_value: float | None = None,
617
- ) -> VectorDatasetType:
623
+ ) -> Self:
618
624
  """Sum a list of :class:`VectorDataset` instances.
619
625
 
620
626
  Parameters
@@ -692,7 +698,7 @@ class VectorDataset:
692
698
  return cls(data, attrs=vectors[0].attrs, copy=False)
693
699
  return cls(data, copy=False)
694
700
 
695
- def __eq__(self: VectorDatasetType, other: object) -> bool:
701
+ def __eq__(self, other: object) -> bool:
696
702
  """Determine if two instances are equal.
697
703
 
698
704
  NaN values are considered equal in this comparison.
@@ -700,7 +706,7 @@ class VectorDataset:
700
706
  Parameters
701
707
  ----------
702
708
  other : object
703
- VectorDatasetType to compare with
709
+ VectorDataset to compare with
704
710
 
705
711
  Returns
706
712
  -------
@@ -784,8 +790,8 @@ class VectorDataset:
784
790
  # Utilities
785
791
  # ------------
786
792
 
787
- def copy(self: VectorDatasetType, **kwargs: Any) -> VectorDatasetType:
788
- """Return a copy of this VectorDatasetType class.
793
+ def copy(self, **kwargs: Any) -> Self:
794
+ """Return a copy of this instance.
789
795
 
790
796
  Parameters
791
797
  ----------
@@ -794,7 +800,7 @@ class VectorDataset:
794
800
 
795
801
  Returns
796
802
  -------
797
- VectorDatasetType
803
+ Self
798
804
  Copy of class
799
805
  """
800
806
  return type(self)(data=self.data, attrs=self.attrs, copy=True, **kwargs)
@@ -820,9 +826,7 @@ class VectorDataset:
820
826
  data = {key: self[key] for key in keys}
821
827
  return VectorDataset(data=data, attrs=self.attrs, copy=copy)
822
828
 
823
- def filter(
824
- self: VectorDatasetType, mask: npt.NDArray[np.bool_], copy: bool = True, **kwargs: Any
825
- ) -> VectorDatasetType:
829
+ def filter(self, mask: npt.NDArray[np.bool_], copy: bool = True, **kwargs: Any) -> Self:
826
830
  """Filter :attr:`data` according to a boolean array ``mask``.
827
831
 
828
832
  Entries corresponding to ``mask == True`` are kept.
@@ -840,7 +844,7 @@ class VectorDataset:
840
844
 
841
845
  Returns
842
846
  -------
843
- VectorDatasetType
847
+ Self
844
848
  Containing filtered data
845
849
 
846
850
  Raises
@@ -855,7 +859,7 @@ class VectorDataset:
855
859
  data = {key: value[mask] for key, value in self.data.items()}
856
860
  return type(self)(data=data, attrs=self.attrs, copy=copy, **kwargs)
857
861
 
858
- def sort(self: VectorDatasetType, by: str | list[str]) -> VectorDatasetType:
862
+ def sort(self, by: str | list[str]) -> Self:
859
863
  """Sort data by key(s).
860
864
 
861
865
  This method always creates a copy of the data by calling
@@ -868,7 +872,7 @@ class VectorDataset:
868
872
 
869
873
  Returns
870
874
  -------
871
- VectorDatasetType
875
+ Self
872
876
  Instance with sorted data.
873
877
  """
874
878
  return type(self)(data=self.dataframe.sort_values(by=by), attrs=self.attrs, copy=False)
@@ -1044,7 +1048,6 @@ class VectorDataset:
1044
1048
  >>> pprint.pprint(fl.to_dict())
1045
1049
  {'aircraft_type': 'B737',
1046
1050
  'altitude_ft': [38661.0, 38661.0, 38661.0, 38661.0, 38661.0, 38661.0, 38661.0],
1047
- 'crs': 'EPSG:4326',
1048
1051
  'latitude': [40.0, 41.724, 43.428, 45.111, 46.769, 48.399, 50.0],
1049
1052
  'longitude': [-100.0,
1050
1053
  -101.441,
@@ -1110,12 +1113,12 @@ class VectorDataset:
1110
1113
 
1111
1114
  @classmethod
1112
1115
  def create_empty(
1113
- cls: type[VectorDatasetType],
1116
+ cls,
1114
1117
  keys: Iterable[str],
1115
1118
  attrs: dict[str, Any] | None = None,
1116
1119
  **attrs_kwargs: Any,
1117
- ) -> VectorDatasetType:
1118
- """Create instance with variables defined by `keys` and size 0.
1120
+ ) -> Self:
1121
+ """Create instance with variables defined by ``keys`` and size 0.
1119
1122
 
1120
1123
  If instance requires additional variables to be defined, these keys will automatically
1121
1124
  be attached to returned instance.
@@ -1131,15 +1134,13 @@ class VectorDataset:
1131
1134
 
1132
1135
  Returns
1133
1136
  -------
1134
- VectorDatasetType
1137
+ Self
1135
1138
  Empty VectorDataset instance.
1136
1139
  """
1137
1140
  return cls(data=_empty_vector_dict(keys or set()), attrs=attrs, copy=False, **attrs_kwargs)
1138
1141
 
1139
1142
  @classmethod
1140
- def from_dict(
1141
- cls: type[VectorDatasetType], obj: dict[str, Any], copy: bool = True, **obj_kwargs: Any
1142
- ) -> VectorDatasetType:
1143
+ def from_dict(cls, obj: dict[str, Any], copy: bool = True, **obj_kwargs: Any) -> Self:
1143
1144
  """Create instance from dict representation containing data and attrs.
1144
1145
 
1145
1146
  Parameters
@@ -1154,7 +1155,7 @@ class VectorDataset:
1154
1155
 
1155
1156
  Returns
1156
1157
  -------
1157
- VectorDatasetType
1158
+ Self
1158
1159
  VectorDataset instance.
1159
1160
 
1160
1161
  See Also
@@ -1172,9 +1173,7 @@ class VectorDataset:
1172
1173
 
1173
1174
  return cls(data=data, attrs=attrs, copy=copy)
1174
1175
 
1175
- def generate_splits(
1176
- self: VectorDatasetType, n_splits: int, copy: bool = True
1177
- ) -> Generator[VectorDatasetType, None, None]:
1176
+ def generate_splits(self, n_splits: int, copy: bool = True) -> Generator[Self, None, None]:
1178
1177
  """Split instance into ``n_split`` sub-vectors.
1179
1178
 
1180
1179
  Parameters
@@ -1187,7 +1186,7 @@ class VectorDataset:
1187
1186
 
1188
1187
  Returns
1189
1188
  -------
1190
- Generator[VectorDatasetType, None, None]
1189
+ Generator[Self, None, None]
1191
1190
  Generator of split vectors.
1192
1191
 
1193
1192
  See Also
@@ -1215,9 +1214,6 @@ class GeoVectorDataset(VectorDataset):
1215
1214
  Each spatial variable is expected to have "float32" or "float64" ``dtype``.
1216
1215
  The time variable is expected to have "datetime64[ns]" ``dtype``.
1217
1216
 
1218
- Use the attribute :attr:`attr["crs"]` to specify coordinate reference system
1219
- using `PROJ <https://proj.org/>`_ or `EPSG <https://epsg.org/home.html>`_ syntax.
1220
-
1221
1217
  Parameters
1222
1218
  ----------
1223
1219
  data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
@@ -1364,18 +1360,14 @@ class GeoVectorDataset(VectorDataset):
1364
1360
  if arr.dtype not in float_dtype:
1365
1361
  self.update({coord: arr.astype(np.float64)})
1366
1362
 
1367
- # set CRS to "EPSG:4326" by default
1368
- crs = self.attrs.setdefault("crs", "EPSG:4326")
1369
-
1370
- if crs == "EPSG:4326":
1371
- longitude = self["longitude"]
1372
- if np.any(longitude > 180.0) or np.any(longitude < -180.0):
1373
- raise ValueError("EPSG:4326 longitude coordinates should lie between [-180, 180).")
1374
- latitude = self["latitude"]
1375
- if np.any(latitude > 90.0) or np.any(latitude < -90.0):
1376
- raise ValueError("EPSG:4326 latitude coordinates should lie between [-90, 90].")
1363
+ longitude = self["longitude"]
1364
+ if np.any(longitude > 180.0) or np.any(longitude < -180.0):
1365
+ raise ValueError("EPSG:4326 longitude coordinates should lie between [-180, 180).")
1366
+ latitude = self["latitude"]
1367
+ if np.any(latitude > 90.0) or np.any(latitude < -90.0):
1368
+ raise ValueError("EPSG:4326 latitude coordinates should lie between [-90, 90].")
1377
1369
 
1378
- @overrides
1370
+ @override
1379
1371
  def _display_attrs(self) -> dict[str, str]:
1380
1372
  try:
1381
1373
  time0 = pd.Timestamp(np.nanmin(self["time"]))
@@ -1530,24 +1522,21 @@ class GeoVectorDataset(VectorDataset):
1530
1522
  # Utilities
1531
1523
  # ------------
1532
1524
 
1533
- def transform_crs(
1534
- self: GeoVectorDatasetType, crs: str, copy: bool = True
1535
- ) -> GeoVectorDatasetType:
1525
+ def transform_crs(self, crs: str) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]:
1536
1526
  """Transform trajectory data from one coordinate reference system (CRS) to another.
1537
1527
 
1538
1528
  Parameters
1539
1529
  ----------
1540
1530
  crs : str
1541
1531
  Target CRS. Passed into to :class:`pyproj.Transformer`. The source CRS
1542
- is inferred from the :attr:`attrs["crs"]` attribute.
1532
+ is assumed to be EPSG:4326.
1543
1533
  copy : bool, optional
1544
1534
  Copy data on transformation. Defaults to True.
1545
1535
 
1546
1536
  Returns
1547
1537
  -------
1548
- GeoVectorDatasetType
1549
- Converted dataset with new coordinate reference system.
1550
- :attr:`attrs["crs"]` reflects new crs.
1538
+ tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]
1539
+ New x and y coordinates in the target CRS.
1551
1540
  """
1552
1541
  try:
1553
1542
  import pyproj
@@ -1559,14 +1548,9 @@ class GeoVectorDataset(VectorDataset):
1559
1548
  pycontrails_optional_package="pyproj",
1560
1549
  )
1561
1550
 
1562
- transformer = pyproj.Transformer.from_crs(self.attrs["crs"], crs, always_xy=True)
1563
- lon, lat = transformer.transform(self["longitude"], self["latitude"])
1564
-
1565
- ret = self.copy() if copy else self
1566
-
1567
- ret.update(longitude=lon, latitude=lat)
1568
- ret.attrs.update(crs=crs)
1569
- return ret
1551
+ crs_from = "EPSG:4326"
1552
+ transformer = pyproj.Transformer.from_crs(crs_from, crs, always_xy=True)
1553
+ return transformer.transform(self["longitude"], self["latitude"])
1570
1554
 
1571
1555
  def T_isa(self) -> npt.NDArray[np.float64]:
1572
1556
  """Calculate the ICAO standard atmosphere temperature at each point.
@@ -1938,13 +1922,13 @@ class GeoVectorDataset(VectorDataset):
1938
1922
  # ------------
1939
1923
 
1940
1924
  @classmethod
1941
- @overrides
1925
+ @override
1942
1926
  def create_empty(
1943
- cls: type[GeoVectorDatasetType],
1927
+ cls,
1944
1928
  keys: Iterable[str] | None = None,
1945
1929
  attrs: dict[str, Any] | None = None,
1946
1930
  **attrs_kwargs: Any,
1947
- ) -> GeoVectorDatasetType:
1931
+ ) -> Self:
1948
1932
  keys = *cls.required_keys, "altitude", *(keys or ())
1949
1933
  return super().create_empty(keys, attrs, **attrs_kwargs)
1950
1934
 
@@ -1961,21 +1945,6 @@ class GeoVectorDataset(VectorDataset):
1961
1945
  """
1962
1946
  return json_utils.dataframe_to_geojson_points(self.dataframe)
1963
1947
 
1964
- def to_pseudo_mercator(self: GeoVectorDatasetType, copy: bool = True) -> GeoVectorDatasetType:
1965
- """Convert data from :attr:`attrs["crs"]` to Pseudo Mercator (EPSG:3857).
1966
-
1967
- Parameters
1968
- ----------
1969
- copy : bool, optional
1970
- Copy data on transformation.
1971
- Defaults to True.
1972
-
1973
- Returns
1974
- -------
1975
- GeoVectorDatasetType
1976
- """
1977
- return self.transform_crs("EPSG:3857", copy=copy)
1978
-
1979
1948
  # ------------
1980
1949
  # Vector to grid
1981
1950
  # ------------
@@ -3,7 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  from pycontrails.datalib.ecmwf.arco_era5 import (
6
- ARCOERA5,
6
+ ERA5ARCO,
7
7
  open_arco_era5_model_level_data,
8
8
  open_arco_era5_single_level,
9
9
  )
@@ -40,7 +40,7 @@ from pycontrails.datalib.ecmwf.variables import (
40
40
  )
41
41
 
42
42
  __all__ = [
43
- "ARCOERA5",
43
+ "ERA5ARCO",
44
44
  "CDSCredentialsNotFound",
45
45
  "ERA5",
46
46
  "ERA5ModelLevel",
@@ -19,11 +19,16 @@ from __future__ import annotations
19
19
 
20
20
  import datetime
21
21
  import hashlib
22
+ import sys
22
23
  from typing import Any
23
24
 
25
+ if sys.version_info >= (3, 12):
26
+ from typing import override
27
+ else:
28
+ from typing_extensions import override
29
+
24
30
  import numpy.typing as npt
25
31
  import xarray as xr
26
- from overrides import overrides
27
32
 
28
33
  from pycontrails.core import cache, met_var
29
34
  from pycontrails.core.met import MetDataset
@@ -197,7 +202,7 @@ def open_arco_era5_single_level(
197
202
  return MetDataset(ds).data
198
203
 
199
204
 
200
- class ARCOERA5(ecmwf_common.ECMWFAPI):
205
+ class ERA5ARCO(ecmwf_common.ECMWFAPI):
201
206
  r"""ARCO ERA5 data accessed remotely through Google Cloud Storage.
202
207
 
203
208
  This is a high-level interface to access and cache
@@ -274,7 +279,7 @@ class ARCOERA5(ecmwf_common.ECMWFAPI):
274
279
  """
275
280
  return ecmwf_variables.SURFACE_VARIABLES
276
281
 
277
- @overrides
282
+ @override
278
283
  def download_dataset(self, times: list[datetime.datetime]) -> None:
279
284
  if not times:
280
285
  return
@@ -286,7 +291,7 @@ class ARCOERA5(ecmwf_common.ECMWFAPI):
286
291
 
287
292
  self.cache_dataset(ds)
288
293
 
289
- @overrides
294
+ @override
290
295
  def create_cachepath(self, t: datetime.datetime) -> str:
291
296
  if self.cachestore is None:
292
297
  msg = "Attribute self.cachestore must be defined to create cache path"
@@ -302,7 +307,7 @@ class ARCOERA5(ecmwf_common.ECMWFAPI):
302
307
 
303
308
  return self.cachestore.path(cache_path)
304
309
 
305
- @overrides
310
+ @override
306
311
  def open_metdataset(
307
312
  self,
308
313
  dataset: xr.Dataset | None = None,
@@ -331,7 +336,7 @@ class ARCOERA5(ecmwf_common.ECMWFAPI):
331
336
  self.set_metadata(mds)
332
337
  return mds
333
338
 
334
- @overrides
339
+ @override
335
340
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
336
341
  ds.attrs.update(
337
342
  provider="ECMWF",
@@ -4,14 +4,19 @@ from __future__ import annotations
4
4
 
5
5
  import logging
6
6
  import os
7
+ import sys
7
8
  from typing import Any
8
9
 
10
+ if sys.version_info >= (3, 12):
11
+ from typing import override
12
+ else:
13
+ from typing_extensions import override
14
+
9
15
  LOG = logging.getLogger(__name__)
10
16
 
11
17
  import numpy as np
12
18
  import pandas as pd
13
19
  import xarray as xr
14
- from overrides import overrides
15
20
 
16
21
  from pycontrails.core import met
17
22
  from pycontrails.datalib._met_utils import metsource
@@ -88,7 +93,7 @@ class ECMWFAPI(metsource.MetDataSource):
88
93
  kwargs.setdefault("cachestore", self.cachestore)
89
94
  return met.MetDataset(ds, **kwargs)
90
95
 
91
- @overrides
96
+ @override
92
97
  def cache_dataset(self, dataset: xr.Dataset) -> None:
93
98
  if self.cachestore is None:
94
99
  LOG.debug("Cache is turned off, skipping")
@@ -7,16 +7,21 @@ import hashlib
7
7
  import logging
8
8
  import os
9
9
  import pathlib
10
+ import sys
10
11
  import warnings
11
12
  from contextlib import ExitStack
12
13
  from datetime import datetime
13
14
  from typing import TYPE_CHECKING, Any
14
15
 
16
+ if sys.version_info >= (3, 12):
17
+ from typing import override
18
+ else:
19
+ from typing_extensions import override
20
+
15
21
  LOG = logging.getLogger(__name__)
16
22
 
17
23
  import pandas as pd
18
24
  import xarray as xr
19
- from overrides import overrides
20
25
 
21
26
  import pycontrails
22
27
  from pycontrails.core import cache
@@ -34,7 +39,7 @@ class ERA5(ECMWFAPI):
34
39
  """Class to support ERA5 data access, download, and organization.
35
40
 
36
41
  Requires account with
37
- `Copernicus Data Portal <https://cds.climate.copernicus.eu/cdsapp#!/home>`_
42
+ `Copernicus Data Portal <https://cds.climate.copernicus.eu/how-to-api>`_
38
43
  and local credentials.
39
44
 
40
45
  API credentials can be stored in a ``~/.cdsapirc`` file
@@ -347,7 +352,7 @@ class ERA5(ECMWFAPI):
347
352
  # return cache path
348
353
  return self.cachestore.path(f"{datestr}-{suffix}.nc")
349
354
 
350
- @overrides
355
+ @override
351
356
  def download_dataset(self, times: list[datetime]) -> None:
352
357
  download_times: dict[datetime, list[datetime]] = collections.defaultdict(list)
353
358
  for t in times:
@@ -359,7 +364,7 @@ class ERA5(ECMWFAPI):
359
364
  for times_for_day in download_times.values():
360
365
  self._download_file(times_for_day)
361
366
 
362
- @overrides
367
+ @override
363
368
  def open_metdataset(
364
369
  self,
365
370
  dataset: xr.Dataset | None = None,
@@ -399,7 +404,7 @@ class ERA5(ECMWFAPI):
399
404
  self.set_metadata(mds)
400
405
  return mds
401
406
 
402
- @overrides
407
+ @override
403
408
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
404
409
  if self.product_type == "reanalysis":
405
410
  product = "reanalysis"
@@ -7,9 +7,9 @@ This module supports
7
7
  - Local caching of processed netCDF files.
8
8
  - Opening processed and cached files as a :class:`pycontrails.MetDataset` object.
9
9
 
10
- Consider using :class:`pycontrails.datalib.ecmwf.ARCOERA5`
10
+ Consider using :class:`pycontrails.datalib.ecmwf.ERA5ARCO`
11
11
  to access model-level data from the nominal ERA5 reanalysis between 1959 and 2022.
12
- :class:`pycontrails.datalib.ecmwf.ARCOERA5` accesses data through Google's
12
+ :class:`pycontrails.datalib.ecmwf.ERA5ARCO` accesses data through Google's
13
13
  `Analysis-Ready, Cloud Optimized ERA5 dataset <https://cloud.google.com/storage/docs/public-datasets/era5>`_
14
14
  and has lower latency than this module, which retrieves data from the
15
15
  `Copernicus Climate Data Store <https://cds.climate.copernicus.eu/#!/home>`_.
@@ -25,12 +25,16 @@ import contextlib
25
25
  import hashlib
26
26
  import logging
27
27
  import os
28
+ import sys
28
29
  import threading
29
30
  import warnings
30
31
  from datetime import datetime
31
32
  from typing import Any
32
33
 
33
- from overrides import overrides
34
+ if sys.version_info >= (3, 12):
35
+ from typing import override
36
+ else:
37
+ from typing_extensions import override
34
38
 
35
39
  LOG = logging.getLogger(__name__)
36
40
 
@@ -56,7 +60,7 @@ class ERA5ModelLevel(ECMWFAPI):
56
60
  pressure-level with much lower vertical resolution.
57
61
 
58
62
  Requires account with
59
- `Copernicus Data Portal <https://cds.climate.copernicus.eu/cdsapp#!/home>`_
63
+ `Copernicus Data Portal <https://cds.climate.copernicus.eu/how-to-api>`_
60
64
  and local credentials.
61
65
 
62
66
  API credentials can be stored in a ``~/.cdsapirc`` file
@@ -245,7 +249,7 @@ class ERA5ModelLevel(ECMWFAPI):
245
249
  """
246
250
  return "reanalysis-era5-complete"
247
251
 
248
- @overrides
252
+ @override
249
253
  def create_cachepath(self, t: datetime | pd.Timestamp) -> str:
250
254
  """Return cachepath to local ERA5 data file based on datetime.
251
255
 
@@ -277,7 +281,7 @@ class ERA5ModelLevel(ECMWFAPI):
277
281
 
278
282
  return self.cachestore.path(cache_path)
279
283
 
280
- @overrides
284
+ @override
281
285
  def download_dataset(self, times: list[datetime]) -> None:
282
286
  # group data to request by month (nominal) or by day (ensemble)
283
287
  requests: dict[datetime, list[datetime]] = collections.defaultdict(list)
@@ -294,7 +298,7 @@ class ERA5ModelLevel(ECMWFAPI):
294
298
  for times_in_request in requests.values():
295
299
  self._download_convert_cache_handler(times_in_request)
296
300
 
297
- @overrides
301
+ @override
298
302
  def open_metdataset(
299
303
  self,
300
304
  dataset: xr.Dataset | None = None,
@@ -320,7 +324,7 @@ class ERA5ModelLevel(ECMWFAPI):
320
324
  self.set_metadata(mds)
321
325
  return mds
322
326
 
323
- @overrides
327
+ @override
324
328
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
325
329
  if self.product_type == "reanalysis":
326
330
  product = "reanalysis"
@@ -5,16 +5,21 @@ from __future__ import annotations
5
5
  import hashlib
6
6
  import logging
7
7
  import pathlib
8
+ import sys
8
9
  from contextlib import ExitStack
9
10
  from datetime import datetime
10
11
  from typing import TYPE_CHECKING, Any
11
12
 
13
+ if sys.version_info >= (3, 12):
14
+ from typing import override
15
+ else:
16
+ from typing_extensions import override
17
+
12
18
  LOG = logging.getLogger(__name__)
13
19
 
14
20
  import numpy as np
15
21
  import pandas as pd
16
22
  import xarray as xr
17
- from overrides import overrides
18
23
 
19
24
  import pycontrails
20
25
  from pycontrails.core import cache
@@ -326,9 +331,9 @@ class HRES(ECMWFAPI):
326
331
  f" {getattr(self, 'steps', '')}"
327
332
  )
328
333
 
329
- @classmethod
334
+ @staticmethod
330
335
  def create_synoptic_time_ranges(
331
- self, timesteps: list[pd.Timestamp]
336
+ timesteps: list[pd.Timestamp],
332
337
  ) -> list[tuple[pd.Timestamp, pd.Timestamp]]:
333
338
  """Create synoptic time bounds encompassing date range.
334
339
 
@@ -556,7 +561,7 @@ class HRES(ECMWFAPI):
556
561
  f"\n\tgrid={request['grid']},\n\tlevtype={request['levtype']}{levelist}"
557
562
  )
558
563
 
559
- @overrides
564
+ @override
560
565
  def create_cachepath(self, t: datetime) -> str:
561
566
  if self.cachestore is None:
562
567
  raise ValueError("self.cachestore attribute must be defined to create cache path")
@@ -574,7 +579,7 @@ class HRES(ECMWFAPI):
574
579
  # return cache path
575
580
  return self.cachestore.path(f"{datestr}-{step}-{suffix}.nc")
576
581
 
577
- @overrides
582
+ @override
578
583
  def download_dataset(self, times: list[datetime]) -> None:
579
584
  """Download data from data source for input times.
580
585
 
@@ -595,7 +600,7 @@ class HRES(ECMWFAPI):
595
600
  elif len(steps) > 0:
596
601
  self._download_file(steps)
597
602
 
598
- @overrides
603
+ @override
599
604
  def open_metdataset(
600
605
  self,
601
606
  dataset: xr.Dataset | None = None,
@@ -635,7 +640,7 @@ class HRES(ECMWFAPI):
635
640
  self.set_metadata(mds)
636
641
  return mds
637
642
 
638
- @overrides
643
+ @override
639
644
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
640
645
  if self.stream == "oper":
641
646
  product = "forecast"