pycontrails 0.41.0__cp39-cp39-macosx_11_0_arm64.whl → 0.42.0__cp39-cp39-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

@@ -97,7 +97,7 @@ class VectorDataDict(Dict[str, np.ndarray]):
97
97
  for arr in self.values():
98
98
  self._validate_array(arr)
99
99
 
100
- def __setitem__(self, k: str, v: np.ndarray) -> None:
100
+ def __setitem__(self, k: str, v: npt.ArrayLike) -> None:
101
101
  """Set new key-value pair to instance and warn when overwriting existing key.
102
102
 
103
103
  This method casts ``v`` to a ``np.ndarray`` and ensures that the array size is
@@ -131,7 +131,7 @@ class VectorDataDict(Dict[str, np.ndarray]):
131
131
  if not len(self):
132
132
  del self._size
133
133
 
134
- def setdefault(self, k: str, default: np.ndarray | None = None) -> np.ndarray:
134
+ def setdefault(self, k: str, default: npt.ArrayLike | None = None) -> np.ndarray:
135
135
  """Thin wrapper around ``dict.setdefault``.
136
136
 
137
137
  The main purpose of overriding is to run :meth:`_validate_array()` on set.
@@ -140,7 +140,7 @@ class VectorDataDict(Dict[str, np.ndarray]):
140
140
  ----------
141
141
  k : str
142
142
  Key
143
- default : np.ndarray, optional
143
+ default : npt.ArrayLike, optional
144
144
  Default value for key ``k``
145
145
 
146
146
  Returns
@@ -156,10 +156,10 @@ class VectorDataDict(Dict[str, np.ndarray]):
156
156
  default = np.array([])
157
157
 
158
158
  self[k] = default
159
- return default
159
+ return self[k]
160
160
 
161
161
  def update( # type: ignore[override]
162
- self, other: dict[str, np.ndarray] | None = None, **kwargs: np.ndarray
162
+ self, other: dict[str, npt.ArrayLike] | None = None, **kwargs: npt.ArrayLike
163
163
  ) -> None:
164
164
  """Update values without warning if overwriting.
165
165
 
@@ -168,24 +168,24 @@ class VectorDataDict(Dict[str, np.ndarray]):
168
168
 
169
169
  Parameters
170
170
  ----------
171
- other : dict[str, np.ndarray] | None, optional
171
+ other : dict[str, npt.ArrayLike] | None, optional
172
172
  Fields to update as dict
173
- **kwargs : np.ndarray
173
+ **kwargs : npt.ArrayLike
174
174
  Fields to update as kwargs
175
175
  """
176
176
  other = other or {}
177
- other = {k: np.asarray(v) for k, v in other.items()}
178
- for arr in other.values():
177
+ other_arrs = {k: np.asarray(v) for k, v in other.items()}
178
+ for arr in other_arrs.values():
179
179
  self._validate_array(arr)
180
180
 
181
- super().update(other)
181
+ super().update(other_arrs)
182
182
 
183
183
  # validate any kwarg arrays
184
- kwargs = {k: np.asarray(v) for k, v in kwargs.items()}
185
- for arr in kwargs.values():
184
+ kwargs_arr = {k: np.asarray(v) for k, v in kwargs.items()}
185
+ for arr in kwargs_arr.values():
186
186
  self._validate_array(arr)
187
187
 
188
- super().update(kwargs)
188
+ super().update(kwargs_arr)
189
189
 
190
190
  def _validate_array(self, arr: np.ndarray) -> None:
191
191
  """Ensure that `arr` is compatible with instance.
@@ -240,7 +240,7 @@ class VectorDataset:
240
240
 
241
241
  Parameters
242
242
  ----------
243
- data : dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
243
+ data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
244
244
  Initial data, by default None
245
245
  attrs : dict[str, Any] | AttrDict, optional
246
246
  Dictionary of attributes, by default None
@@ -265,7 +265,11 @@ class VectorDataset:
265
265
 
266
266
  def __init__(
267
267
  self,
268
- data: dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None = None,
268
+ data: dict[str, npt.ArrayLike]
269
+ | pd.DataFrame
270
+ | VectorDataDict
271
+ | VectorDataset
272
+ | None = None,
269
273
  attrs: dict[str, Any] | AttrDict | None = None,
270
274
  copy: bool = True,
271
275
  **attrs_kwargs: Any,
@@ -368,14 +372,14 @@ class VectorDataset:
368
372
  """
369
373
  return self.data.get(key, default_value)
370
374
 
371
- def __setitem__(self, key: str, values: np.ndarray) -> None:
375
+ def __setitem__(self, key: str, values: npt.ArrayLike) -> None:
372
376
  """Set values at key `key` on :attr:`data`.
373
377
 
374
378
  Parameters
375
379
  ----------
376
380
  key : str
377
381
  Key name in :attr:`data`
378
- values : np.ndarray
382
+ values : npt.ArrayLike
379
383
  Values to set to :attr:`data`. Array size must be compatible with existing data.
380
384
  """
381
385
  self.data[key] = values
@@ -416,27 +420,29 @@ class VectorDataset:
416
420
  return key in self.data
417
421
 
418
422
  def update(
419
- self, other: VectorDataDict | dict[str, np.ndarray] | None = None, **kwargs: np.ndarray
423
+ self,
424
+ other: dict[str, npt.ArrayLike] | None = None,
425
+ **kwargs: npt.ArrayLike,
420
426
  ) -> None:
421
427
  """Update values in :attr:`data` dict without warning if overwriting.
422
428
 
423
429
  Parameters
424
430
  ----------
425
- other : VectorDataDict | dict[str, np.ndarray] | None, optional
431
+ other : dict[str, npt.ArrayLike] | None, optional
426
432
  Fields to update as dict
427
- **kwargs : np.ndarray
433
+ **kwargs : npt.ArrayLike
428
434
  Fields to update as kwargs
429
435
  """
430
436
  self.data.update(other, **kwargs)
431
437
 
432
- def setdefault(self, key: str, default: np.ndarray | None = None) -> np.ndarray:
438
+ def setdefault(self, key: str, default: npt.ArrayLike | None = None) -> np.ndarray:
433
439
  """Shortcut to :attr:`data.setdefault`.
434
440
 
435
441
  Parameters
436
442
  ----------
437
443
  key : str
438
444
  Key in :attr:`data` dict.
439
- default : np.ndarray, optional
445
+ default : npt.ArrayLike, optional
440
446
  Values to use as default, if key is not defined
441
447
 
442
448
  Returns
@@ -777,14 +783,16 @@ class VectorDataset:
777
783
  data = {key: self[key] for key in keys}
778
784
  return VectorDataset(data=data, attrs=self.attrs, copy=copy)
779
785
 
780
- def filter(self: VectorDatasetType, mask: np.ndarray, copy: bool = True) -> VectorDatasetType:
786
+ def filter(
787
+ self: VectorDatasetType, mask: npt.NDArray[np.bool_], copy: bool = True
788
+ ) -> VectorDatasetType:
781
789
  """Filter :attr:`data` according to a boolean array ``mask``.
782
790
 
783
791
  Entries corresponding to ``mask == True`` are kept.
784
792
 
785
793
  Parameters
786
794
  ----------
787
- mask : np.ndarray
795
+ mask : npt.NDArray[np.bool_]
788
796
  Boolean array with compatible shape.
789
797
  copy : bool, optional
790
798
  Copy data on filter. Defaults to True. See
@@ -1035,26 +1043,26 @@ class GeoVectorDataset(VectorDataset):
1035
1043
 
1036
1044
  Parameters
1037
1045
  ----------
1038
- data : dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
1046
+ data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
1039
1047
  Data dictionary or :class:`pandas.DataFrame` .
1040
1048
  Must include keys/columns ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``.
1041
1049
  Keyword arguments for ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``
1042
1050
  override ``data`` inputs. Expects ``altitude`` in meters and ``time``
1043
1051
  as a DatetimeLike (or array that can processed with :meth:`pd.to_datetime`).
1044
1052
  Additional waypoint-specific data can be included as additional keys/columns.
1045
- longitude : np.ndarray, optional
1053
+ longitude : npt.ArrayLike, optional
1046
1054
  Longitude data.
1047
1055
  Defaults to None.
1048
- latitude : np.ndarray, optional
1056
+ latitude : npt.ArrayLike, optional
1049
1057
  Latitude data.
1050
1058
  Defaults to None.
1051
- altitude : np.ndarray, optional
1059
+ altitude : npt.ArrayLike, optional
1052
1060
  Altitude data, [:math:`m`].
1053
1061
  Defaults to None.
1054
- level : np.ndarray, optional
1062
+ level : npt.ArrayLike, optional
1055
1063
  Level data, [:math:`hPa`].
1056
1064
  Defaults to None.
1057
- time : np.ndarray, optional
1065
+ time : npt.ArrayLike, optional
1058
1066
  Time data.
1059
1067
  Expects an array of DatetimeLike values,
1060
1068
  or array that can proccessed with :meth:`pd.to_datetime`.
@@ -1084,7 +1092,11 @@ class GeoVectorDataset(VectorDataset):
1084
1092
 
1085
1093
  def __init__(
1086
1094
  self,
1087
- data: dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None = None,
1095
+ data: dict[str, npt.ArrayLike]
1096
+ | pd.DataFrame
1097
+ | VectorDataDict
1098
+ | VectorDataset
1099
+ | None = None,
1088
1100
  longitude: npt.ArrayLike | None = None,
1089
1101
  latitude: npt.ArrayLike | None = None,
1090
1102
  altitude: npt.ArrayLike | None = None,
@@ -1201,7 +1213,7 @@ class GeoVectorDataset(VectorDataset):
1201
1213
  return attrs
1202
1214
 
1203
1215
  @property
1204
- def level(self) -> np.ndarray:
1216
+ def level(self) -> npt.NDArray[np.float_]:
1205
1217
  """Get pressure ``level`` values for points.
1206
1218
 
1207
1219
  Automatically calculates pressure level using :func:`units.m_to_pl` using ``altitude`` key.
@@ -1212,7 +1224,7 @@ class GeoVectorDataset(VectorDataset):
1212
1224
 
1213
1225
  Returns
1214
1226
  -------
1215
- np.ndarray
1227
+ npt.NDArray[np.float_]
1216
1228
  Point pressure level values, [:math:`hPa`]
1217
1229
  """
1218
1230
  try:
@@ -1221,7 +1233,7 @@ class GeoVectorDataset(VectorDataset):
1221
1233
  return units.m_to_pl(self.altitude)
1222
1234
 
1223
1235
  @property
1224
- def altitude(self) -> np.ndarray:
1236
+ def altitude(self) -> npt.NDArray[np.float_]:
1225
1237
  """Get altitude.
1226
1238
 
1227
1239
  Automatically calculates altitude using :func:`units.pl_to_m` using ``level`` key.
@@ -1232,7 +1244,7 @@ class GeoVectorDataset(VectorDataset):
1232
1244
 
1233
1245
  Returns
1234
1246
  -------
1235
- np.ndarray
1247
+ npt.NDArray[np.float_]
1236
1248
  Altitude, [:math:`m`]
1237
1249
  """
1238
1250
  try:
@@ -1246,12 +1258,12 @@ class GeoVectorDataset(VectorDataset):
1246
1258
  return units.ft_to_m(self["altitude_ft"])
1247
1259
 
1248
1260
  @property
1249
- def air_pressure(self) -> np.ndarray:
1261
+ def air_pressure(self) -> npt.NDArray[np.float_]:
1250
1262
  """Get ``air_pressure`` values for points.
1251
1263
 
1252
1264
  Returns
1253
1265
  -------
1254
- np.ndarray
1266
+ npt.NDArray[np.float_]
1255
1267
  Point air pressure values, [:math:`Pa`]
1256
1268
  """
1257
1269
  try:
@@ -1260,12 +1272,12 @@ class GeoVectorDataset(VectorDataset):
1260
1272
  return 100 * self.level
1261
1273
 
1262
1274
  @property
1263
- def altitude_ft(self) -> np.ndarray:
1275
+ def altitude_ft(self) -> npt.NDArray[np.float_]:
1264
1276
  """Get altitude in feet.
1265
1277
 
1266
1278
  Returns
1267
1279
  -------
1268
- np.ndarray
1280
+ npt.NDArray[np.float_]
1269
1281
  Altitude, [:math:`ft`]
1270
1282
  """
1271
1283
  try:
@@ -1368,7 +1380,7 @@ class GeoVectorDataset(VectorDataset):
1368
1380
 
1369
1381
  def coords_intersect_met(
1370
1382
  self, met: met_module.MetDataset | met_module.MetDataArray
1371
- ) -> np.ndarray:
1383
+ ) -> npt.NDArray[np.bool_]:
1372
1384
  """Return boolean mask of data inside the bounding box defined by ``met``.
1373
1385
 
1374
1386
  Parameters
@@ -1378,7 +1390,7 @@ class GeoVectorDataset(VectorDataset):
1378
1390
 
1379
1391
  Returns
1380
1392
  -------
1381
- np.ndarray
1393
+ npt.NDArray[np.bool_]
1382
1394
  True if point is inside the bounding box defined by ``met``.
1383
1395
  """
1384
1396
 
@@ -1405,26 +1417,26 @@ class GeoVectorDataset(VectorDataset):
1405
1417
  self,
1406
1418
  mda: met_module.MetDataArray,
1407
1419
  *,
1408
- longitude: np.ndarray | None = None,
1409
- latitude: np.ndarray | None = None,
1410
- level: np.ndarray | None = None,
1411
- time: np.ndarray | None = None,
1420
+ longitude: npt.NDArray[np.float_] | None = None,
1421
+ latitude: npt.NDArray[np.float_] | None = None,
1422
+ level: npt.NDArray[np.float_] | None = None,
1423
+ time: npt.NDArray[np.datetime64] | None = None,
1412
1424
  use_indices: bool = False,
1413
1425
  **interp_kwargs: Any,
1414
- ) -> np.ndarray:
1426
+ ) -> npt.NDArray[np.float_]:
1415
1427
  """Intersect waypoints with MetDataArray.
1416
1428
 
1417
1429
  Parameters
1418
1430
  ----------
1419
1431
  mda : MetDataArray
1420
1432
  MetDataArray containing a meteorological variable at spatio-temporal coordinates.
1421
- longitude : np.ndarray, optional
1433
+ longitude : npt.NDArray[np.float_], optional
1422
1434
  Override existing coordinates for met interpolation
1423
- latitude : np.ndarray, optional
1435
+ latitude : npt.NDArray[np.float_], optional
1424
1436
  Override existing coordinates for met interpolation
1425
- level : np.ndarray, optional
1437
+ level : npt.NDArray[np.float_], optional
1426
1438
  Override existing coordinates for met interpolation
1427
- time : np.ndarray, optional
1439
+ time : npt.NDArray[np.datetime64], optional
1428
1440
  Override existing coordinates for met interpolation
1429
1441
  use_indices : bool, optional
1430
1442
  Experimental.
@@ -1437,7 +1449,7 @@ class GeoVectorDataset(VectorDataset):
1437
1449
 
1438
1450
  Returns
1439
1451
  -------
1440
- np.ndarray
1452
+ npt.NDArray[np.float_]
1441
1453
  Interpolated values
1442
1454
 
1443
1455
  Examples
@@ -5,5 +5,5 @@ See individual modules for met variables and additional exports.
5
5
 
6
6
  - :module:`pycontrails.datalib.ecmwf`
7
7
  - :module:`pycontrails.datalib.gfs`
8
-
8
+ - :module:`pycontrails.datalib.spire`
9
9
  """
@@ -0,0 +1,19 @@
1
+ """ECMWF Data Access."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from pycontrails.datalib.spire.spire import (
6
+ clean,
7
+ generate_flight_id,
8
+ identify_flights,
9
+ is_valid_trajectory,
10
+ validate_flights,
11
+ )
12
+
13
+ __all__ = [
14
+ "clean",
15
+ "generate_flight_id",
16
+ "identify_flights",
17
+ "is_valid_trajectory",
18
+ "validate_flights",
19
+ ]