pycontrails 0.54.0__cp311-cp311-win_amd64.whl → 0.54.2__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

pycontrails/core/met.py CHANGED
@@ -6,6 +6,7 @@ import hashlib
6
6
  import json
7
7
  import logging
8
8
  import pathlib
9
+ import sys
9
10
  import typing
10
11
  import warnings
11
12
  from abc import ABC, abstractmethod
@@ -29,11 +30,20 @@ from typing import (
29
30
  overload,
30
31
  )
31
32
 
33
+ if sys.version_info >= (3, 11):
34
+ from typing import Self
35
+ else:
36
+ from typing_extensions import Self
37
+
38
+ if sys.version_info >= (3, 12):
39
+ from typing import override
40
+ else:
41
+ from typing_extensions import override
42
+
32
43
  import numpy as np
33
44
  import numpy.typing as npt
34
45
  import pandas as pd
35
46
  import xarray as xr
36
- from overrides import overrides
37
47
 
38
48
  from pycontrails.core import interpolation
39
49
  from pycontrails.core import vector as vector_module
@@ -70,7 +80,7 @@ class MetBase(ABC, Generic[XArrayType]):
70
80
  cachestore: CacheStore | None
71
81
 
72
82
  #: Default dimension order for DataArray or Dataset (x, y, z, t)
73
- dim_order: tuple[Hashable, Hashable, Hashable, Hashable] = (
83
+ dim_order = (
74
84
  "longitude",
75
85
  "latitude",
76
86
  "level",
@@ -97,17 +107,18 @@ class MetBase(ABC, Generic[XArrayType]):
97
107
  ValueError
98
108
  If data does not contain all four coordinates (longitude, latitude, level, time).
99
109
  """
100
- for dim in self.dim_order:
101
- if dim not in self.data.dims:
102
- if dim == "level":
103
- msg = (
104
- f"Meteorology data must contain dimension '{dim}'. "
105
- "For single level data, set 'level' coordinate to constant -1 "
106
- "using `ds = ds.expand_dims({'level': [-1]})`"
107
- )
108
- else:
109
- msg = f"Meteorology data must contain dimension '{dim}'."
110
- raise ValueError(msg)
110
+ missing = set(self.dim_order).difference(self.data.dims)
111
+ if not missing:
112
+ return
113
+
114
+ dim = sorted(missing)
115
+ msg = f"Meteorology data must contain dimension(s): {dim}."
116
+ if "level" in dim:
117
+ msg += (
118
+ " For single level data, set 'level' coordinate to constant -1 "
119
+ "using `ds = ds.expand_dims({'level': [-1]})`"
120
+ )
121
+ raise ValueError(msg)
111
122
 
112
123
  def _validate_longitude(self) -> None:
113
124
  """Check longitude bounds.
@@ -123,8 +134,8 @@ class MetBase(ABC, Generic[XArrayType]):
123
134
  if longitude.dtype != COORD_DTYPE:
124
135
  raise ValueError(
125
136
  "Longitude values must be of type float64. "
126
- "Initiate with 'copy=True' to convert to float64. "
127
- "Initiate with 'validate=False' to skip validation."
137
+ "Instantiate with 'copy=True' to convert to float64. "
138
+ "Instantiate with 'validate=False' to skip validation."
128
139
  )
129
140
 
130
141
  if self.is_wrapped:
@@ -167,8 +178,8 @@ class MetBase(ABC, Generic[XArrayType]):
167
178
  if latitude.dtype != COORD_DTYPE:
168
179
  raise ValueError(
169
180
  "Latitude values must be of type float64. "
170
- "Initiate with 'copy=True' to convert to float64. "
171
- "Initiate with 'validate=False' to skip validation."
181
+ "Instantiate with 'copy=True' to convert to float64. "
182
+ "Instantiate with 'validate=False' to skip validation."
172
183
  )
173
184
 
174
185
  if latitude[0] < -90.0:
@@ -192,10 +203,10 @@ class MetBase(ABC, Generic[XArrayType]):
192
203
  """
193
204
  indexes = self.indexes
194
205
  if not np.all(np.diff(indexes["time"]) > np.timedelta64(0, "ns")):
195
- raise ValueError("Coordinate `time` not sorted. Initiate with `copy=True`.")
206
+ raise ValueError("Coordinate 'time' not sorted. Instantiate with 'copy=True'.")
196
207
  for coord in self.dim_order[:3]: # exclude time, the 4th dimension
197
208
  if not np.all(np.diff(indexes[coord]) > 0.0):
198
- raise ValueError(f"Coordinate '{coord}' not sorted. Initiate with 'copy=True'.")
209
+ raise ValueError(f"Coordinate '{coord}' not sorted. Instantiate with 'copy=True'.")
199
210
 
200
211
  def _validate_transpose(self) -> None:
201
212
  """Check that data is transposed according to :attr:`dim_order`."""
@@ -204,11 +215,11 @@ class MetBase(ABC, Generic[XArrayType]):
204
215
  if da.dims != self.dim_order:
205
216
  if key is not None:
206
217
  msg = (
207
- f"Data dimension not transposed on variable '{key}'. Initiate with"
218
+ f"Data dimension not transposed on variable '{key}'. Instantiate with"
208
219
  " 'copy=True'."
209
220
  )
210
221
  else:
211
- msg = "Data dimension not transposed. Initiate with 'copy=True'."
222
+ msg = "Data dimension not transposed. Instantiate with 'copy=True'."
212
223
  raise ValueError(msg)
213
224
 
214
225
  data = self.data
@@ -228,6 +239,12 @@ class MetBase(ABC, Generic[XArrayType]):
228
239
  self._validate_longitude()
229
240
  self._validate_latitude()
230
241
  self._validate_transpose()
242
+ if self.data["level"].dtype != COORD_DTYPE:
243
+ raise ValueError(
244
+ "Level values must be of type float64. "
245
+ "Instantiate with 'copy=True' to convert to float64. "
246
+ "Instantiate with 'validate=False' to skip validation."
247
+ )
231
248
 
232
249
  def _preprocess_dims(self, wrap_longitude: bool) -> None:
233
250
  """Confirm DataArray or Dataset include required dimension in a consistent format.
@@ -363,16 +380,6 @@ class MetBase(ABC, Generic[XArrayType]):
363
380
  "time": variables["time"].to_numpy(),
364
381
  }
365
382
 
366
- @property
367
- def variables(self) -> dict[Hashable, pd.Index]:
368
- """See :attr:`indexes`."""
369
- warnings.warn(
370
- "The 'variables' property is deprecated and will be removed in a future release. "
371
- "Use 'indexes' instead.",
372
- DeprecationWarning,
373
- )
374
- return self.indexes
375
-
376
383
  @property
377
384
  def indexes(self) -> dict[Hashable, pd.Index]:
378
385
  """Low level access to underlying :attr:`data` indexes.
@@ -745,8 +752,8 @@ class MetDataset(MetBase):
745
752
  except KeyError as e:
746
753
  raise KeyError(
747
754
  f"Variable {key} not found. Available variables: {', '.join(self.data.data_vars)}. "
748
- "To get items (e.g. `time` or `level`) from underlying `xr.Dataset` object, "
749
- "use the `data` attribute."
755
+ "To get items (e.g. 'time' or 'level') from underlying xr.Dataset object, "
756
+ "use the 'data' attribute."
750
757
  ) from e
751
758
  return MetDataArray(da, copy=False, validate=False)
752
759
 
@@ -866,13 +873,13 @@ class MetDataset(MetBase):
866
873
  return key in self.data
867
874
 
868
875
  @property
869
- @overrides
876
+ @override
870
877
  def shape(self) -> tuple[int, int, int, int]:
871
878
  sizes = self.data.sizes
872
879
  return sizes["longitude"], sizes["latitude"], sizes["level"], sizes["time"]
873
880
 
874
881
  @property
875
- @overrides
882
+ @override
876
883
  def size(self) -> int:
877
884
  return np.prod(self.shape).item()
878
885
 
@@ -981,7 +988,7 @@ class MetDataset(MetBase):
981
988
  hash: str,
982
989
  cachestore: CacheStore | None = None,
983
990
  chunks: dict[str, int] | None = None,
984
- ) -> MetDataset:
991
+ ) -> Self:
985
992
  """Load saved intermediate from :attr:`cachestore`.
986
993
 
987
994
  Parameters
@@ -996,7 +1003,7 @@ class MetDataset(MetBase):
996
1003
 
997
1004
  Returns
998
1005
  -------
999
- MetDataset
1006
+ Self
1000
1007
  New MetDataArray with loaded data.
1001
1008
  """
1002
1009
  cachestore = cachestore or DiskCacheStore()
@@ -1018,14 +1025,14 @@ class MetDataset(MetBase):
1018
1025
  cachestore=self.cachestore,
1019
1026
  )
1020
1027
 
1021
- @overrides
1028
+ @override
1022
1029
  def broadcast_coords(self, name: str) -> xr.DataArray:
1023
1030
  da = xr.ones_like(self.data[next(iter(self.data.keys()))]) * self.data[name]
1024
1031
  da.name = name
1025
1032
 
1026
1033
  return da
1027
1034
 
1028
- @overrides
1035
+ @override
1029
1036
  def downselect(self, bbox: tuple[float, ...]) -> MetDataset:
1030
1037
  data = downselect(self.data, bbox)
1031
1038
  return MetDataset(data, cachestore=self.cachestore, copy=False)
@@ -1057,14 +1064,13 @@ class MetDataset(MetBase):
1057
1064
  >>> era5 = ERA5(time=times, variables=variables, pressure_levels=levels)
1058
1065
  >>> met = era5.open_metdataset()
1059
1066
  >>> met.to_vector(transfer_attrs=False)
1060
- GeoVectorDataset [6 keys x 4152960 length, 1 attributes]
1067
+ GeoVectorDataset [6 keys x 4152960 length, 0 attributes]
1061
1068
  Keys: longitude, latitude, level, time, air_temperature, ..., specific_humidity
1062
1069
  Attributes:
1063
1070
  time [2022-03-01 00:00:00, 2022-03-01 01:00:00]
1064
1071
  longitude [-180.0, 179.75]
1065
1072
  latitude [-90.0, 90.0]
1066
1073
  altitude [10362.8, 11783.9]
1067
- crs EPSG:4326
1068
1074
 
1069
1075
  """
1070
1076
  coords_keys = self.data.dims
@@ -1183,7 +1189,7 @@ class MetDataset(MetBase):
1183
1189
  latitude: npt.ArrayLike | float,
1184
1190
  level: npt.ArrayLike | float,
1185
1191
  time: npt.ArrayLike | np.datetime64,
1186
- ) -> MetDataset:
1192
+ ) -> Self:
1187
1193
  r"""Create a :class:`MetDataset` containing a coordinate skeleton from coordinate arrays.
1188
1194
 
1189
1195
  Parameters
@@ -1197,7 +1203,7 @@ class MetDataset(MetBase):
1197
1203
 
1198
1204
  Returns
1199
1205
  -------
1200
- MetDataset
1206
+ Self
1201
1207
  MetDataset with no variables.
1202
1208
 
1203
1209
  Examples
@@ -1283,7 +1289,7 @@ class MetDataset(MetBase):
1283
1289
  return cls(xr.Dataset({}, coords=coords))
1284
1290
 
1285
1291
  @classmethod
1286
- def from_zarr(cls, store: Any, **kwargs: Any) -> MetDataset:
1292
+ def from_zarr(cls, store: Any, **kwargs: Any) -> Self:
1287
1293
  """Create a :class:`MetDataset` from a path to a Zarr store.
1288
1294
 
1289
1295
  Parameters
@@ -1295,7 +1301,7 @@ class MetDataset(MetBase):
1295
1301
 
1296
1302
  Returns
1297
1303
  -------
1298
- MetDataset
1304
+ Self
1299
1305
  MetDataset with data from Zarr store.
1300
1306
  """
1301
1307
  kwargs.setdefault("storage_options", {"read_only": True})
@@ -1374,20 +1380,9 @@ class MetDataArray(MetBase):
1374
1380
  copy: bool = True,
1375
1381
  validate: bool = True,
1376
1382
  name: Hashable | None = None,
1377
- **kwargs: Any,
1378
1383
  ) -> None:
1379
- # init cache
1380
1384
  self.cachestore = cachestore
1381
1385
 
1382
- # try to create DataArray out of input data and **kwargs
1383
- if not isinstance(data, xr.DataArray):
1384
- warnings.warn(
1385
- "Input 'data' must be an xarray DataArray. "
1386
- "Passing arbitrary kwargs will be removed in future versions.",
1387
- DeprecationWarning,
1388
- )
1389
- data = xr.DataArray(data, **kwargs)
1390
-
1391
1386
  if copy:
1392
1387
  self.data = data.copy()
1393
1388
  self._preprocess_dims(wrap_longitude)
@@ -1415,8 +1410,9 @@ class MetDataArray(MetBase):
1415
1410
 
1416
1411
  See Also
1417
1412
  --------
1418
- - :meth:`xr.Dataset.load`
1419
- - :meth:`xr.DataArray.load`
1413
+ :meth:`xarray.Dataset.load`
1414
+ :meth:`xarray.DataArray.load`
1415
+
1420
1416
  """
1421
1417
  if not self.in_memory:
1422
1418
  self._check_memory("Extracting numpy array from")
@@ -1447,12 +1443,12 @@ class MetDataArray(MetBase):
1447
1443
  return np.array_equal(self.data, self.data.astype(bool))
1448
1444
 
1449
1445
  @property
1450
- @overrides
1446
+ @override
1451
1447
  def size(self) -> int:
1452
1448
  return self.data.size
1453
1449
 
1454
1450
  @property
1455
- @overrides
1451
+ @override
1456
1452
  def shape(self) -> tuple[int, int, int, int]:
1457
1453
  # https://github.com/python/mypy/issues/1178
1458
1454
  return typing.cast(tuple[int, int, int, int], self.data.shape)
@@ -1842,7 +1838,7 @@ class MetDataArray(MetBase):
1842
1838
  hash: str,
1843
1839
  cachestore: CacheStore | None = None,
1844
1840
  chunks: dict[str, int] | None = None,
1845
- ) -> MetDataArray:
1841
+ ) -> Self:
1846
1842
  """Load saved intermediate from :attr:`cachestore`.
1847
1843
 
1848
1844
  Parameters
@@ -2031,7 +2027,7 @@ class MetDataArray(MetBase):
2031
2027
  See Also
2032
2028
  --------
2033
2029
  :meth:`to_polyhedra`
2034
- :func:`pycontrails.core.polygons.find_multipolygons`
2030
+ :func:`polygons.find_multipolygons`
2035
2031
 
2036
2032
  Examples
2037
2033
  --------
@@ -2235,7 +2231,7 @@ class MetDataArray(MetBase):
2235
2231
 
2236
2232
  Returns
2237
2233
  -------
2238
- dict | :class:`o3d.geometry.TriangleMesh`
2234
+ dict | open3d.geometry.TriangleMesh
2239
2235
  Python representation of geojson object or `Open3D Triangle Mesh
2240
2236
  <http://www.open3d.org/docs/release/tutorial/geometry/mesh.html>`_ depending on the
2241
2237
  `return_type` parameter.
@@ -2249,8 +2245,9 @@ class MetDataArray(MetBase):
2249
2245
 
2250
2246
  See Also
2251
2247
  --------
2252
- :meth:`to_polygons`
2253
- `skimage.measure.marching_cubes <https://scikit-image.org/docs/dev/api/skimage.measure.html#skimage.measure.marching_cubes>`_
2248
+ :meth:`to_polygon_feature`
2249
+ :func:`skimage.measure.marching_cubes`
2250
+ :class:`open3d.geometry.TriangleMesh`
2254
2251
 
2255
2252
  Notes
2256
2253
  -----
@@ -2395,14 +2392,14 @@ class MetDataArray(MetBase):
2395
2392
  )
2396
2393
  return mesh
2397
2394
 
2398
- @overrides
2395
+ @override
2399
2396
  def broadcast_coords(self, name: str) -> xr.DataArray:
2400
2397
  da = xr.ones_like(self.data) * self.data[name]
2401
2398
  da.name = name
2402
2399
 
2403
2400
  return da
2404
2401
 
2405
- @overrides
2402
+ @override
2406
2403
  def downselect(self, bbox: tuple[float, ...]) -> MetDataArray:
2407
2404
  data = downselect(self.data, bbox)
2408
2405
  return MetDataArray(data, cachestore=self.cachestore)