pycontrails 0.54.3__cp310-cp310-win_amd64.whl → 0.54.5__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (62) hide show
  1. pycontrails/__init__.py +2 -2
  2. pycontrails/_version.py +2 -2
  3. pycontrails/core/__init__.py +1 -1
  4. pycontrails/core/aircraft_performance.py +58 -58
  5. pycontrails/core/cache.py +7 -7
  6. pycontrails/core/fleet.py +54 -29
  7. pycontrails/core/flight.py +218 -301
  8. pycontrails/core/interpolation.py +63 -60
  9. pycontrails/core/met.py +193 -125
  10. pycontrails/core/models.py +27 -13
  11. pycontrails/core/polygon.py +15 -15
  12. pycontrails/core/rgi_cython.cp310-win_amd64.pyd +0 -0
  13. pycontrails/core/vector.py +119 -96
  14. pycontrails/datalib/_met_utils/metsource.py +8 -5
  15. pycontrails/datalib/ecmwf/__init__.py +14 -14
  16. pycontrails/datalib/ecmwf/common.py +1 -1
  17. pycontrails/datalib/ecmwf/era5.py +7 -7
  18. pycontrails/datalib/ecmwf/hres.py +3 -3
  19. pycontrails/datalib/ecmwf/ifs.py +1 -1
  20. pycontrails/datalib/gfs/__init__.py +6 -6
  21. pycontrails/datalib/gfs/gfs.py +2 -2
  22. pycontrails/datalib/goes.py +5 -5
  23. pycontrails/ext/empirical_grid.py +1 -1
  24. pycontrails/models/apcemm/apcemm.py +5 -5
  25. pycontrails/models/apcemm/utils.py +1 -1
  26. pycontrails/models/cocip/__init__.py +2 -2
  27. pycontrails/models/cocip/cocip.py +23 -24
  28. pycontrails/models/cocip/cocip_params.py +2 -11
  29. pycontrails/models/cocip/cocip_uncertainty.py +24 -18
  30. pycontrails/models/cocip/contrail_properties.py +331 -316
  31. pycontrails/models/cocip/output_formats.py +53 -53
  32. pycontrails/models/cocip/radiative_forcing.py +135 -131
  33. pycontrails/models/cocip/radiative_heating.py +135 -135
  34. pycontrails/models/cocip/unterstrasser_wake_vortex.py +90 -87
  35. pycontrails/models/cocip/wake_vortex.py +92 -92
  36. pycontrails/models/cocip/wind_shear.py +8 -8
  37. pycontrails/models/cocipgrid/cocip_grid.py +37 -96
  38. pycontrails/models/dry_advection.py +60 -19
  39. pycontrails/models/emissions/__init__.py +2 -2
  40. pycontrails/models/emissions/black_carbon.py +108 -108
  41. pycontrails/models/emissions/emissions.py +87 -87
  42. pycontrails/models/emissions/ffm2.py +35 -35
  43. pycontrails/models/humidity_scaling/humidity_scaling.py +23 -23
  44. pycontrails/models/issr.py +2 -2
  45. pycontrails/models/ps_model/__init__.py +1 -1
  46. pycontrails/models/ps_model/ps_aircraft_params.py +8 -4
  47. pycontrails/models/ps_model/ps_grid.py +76 -66
  48. pycontrails/models/ps_model/ps_model.py +16 -16
  49. pycontrails/models/ps_model/ps_operational_limits.py +20 -18
  50. pycontrails/models/tau_cirrus.py +8 -1
  51. pycontrails/physics/geo.py +67 -67
  52. pycontrails/physics/jet.py +79 -79
  53. pycontrails/physics/units.py +14 -14
  54. pycontrails/utils/json.py +1 -2
  55. pycontrails/utils/types.py +12 -7
  56. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/METADATA +2 -2
  57. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/NOTICE +1 -1
  58. pycontrails-0.54.5.dist-info/RECORD +111 -0
  59. pycontrails-0.54.3.dist-info/RECORD +0 -111
  60. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/LICENSE +0 -0
  61. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/WHEEL +0 -0
  62. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/top_level.txt +0 -0
@@ -42,7 +42,7 @@ except ModuleNotFoundError as exc:
42
42
 
43
43
 
44
44
  def buffer_and_clean(
45
- contour: npt.NDArray[np.float64],
45
+ contour: npt.NDArray[np.floating],
46
46
  min_area: float,
47
47
  convex_hull: bool,
48
48
  epsilon: float,
@@ -54,7 +54,7 @@ def buffer_and_clean(
54
54
 
55
55
  Parameters
56
56
  ----------
57
- contour : npt.NDArray[np.float64]
57
+ contour : npt.NDArray[np.floating]
58
58
  Contour to buffer and clean. A 2d array of shape (n, 2) where n is the number
59
59
  of vertices in the contour.
60
60
  min_area : float
@@ -157,13 +157,13 @@ def _round_polygon(polygon: shapely.Polygon, precision: int) -> shapely.Polygon:
157
157
 
158
158
 
159
159
  def _contours_to_polygons(
160
- contours: Sequence[npt.NDArray[np.float64]],
160
+ contours: Sequence[npt.NDArray[np.floating]],
161
161
  hierarchy: npt.NDArray[np.int_],
162
162
  min_area: float,
163
163
  convex_hull: bool,
164
164
  epsilon: float,
165
- longitude: npt.NDArray[np.float64] | None,
166
- latitude: npt.NDArray[np.float64] | None,
165
+ longitude: npt.NDArray[np.floating] | None,
166
+ latitude: npt.NDArray[np.floating] | None,
167
167
  precision: int | None,
168
168
  buffer: float,
169
169
  i: int = 0,
@@ -172,7 +172,7 @@ def _contours_to_polygons(
172
172
 
173
173
  Parameters
174
174
  ----------
175
- contours : Sequence[npt.NDArray[np.float64]]
175
+ contours : Sequence[npt.NDArray[np.floating]]
176
176
  The contours output from :func:`cv2.findContours`.
177
177
  hierarchy : npt.NDArray[np.int_]
178
178
  The hierarchy output from :func:`cv2.findContours`.
@@ -182,9 +182,9 @@ def _contours_to_polygons(
182
182
  Whether to take the convex hull of each polygon.
183
183
  epsilon : float
184
184
  Epsilon value to use when simplifying the polygons.
185
- longitude : npt.NDArray[np.float64] | None
185
+ longitude : npt.NDArray[np.floating] | None
186
186
  Longitude values for the grid.
187
- latitude : npt.NDArray[np.float64] | None
187
+ latitude : npt.NDArray[np.floating] | None
188
188
  Latitude values for the grid.
189
189
  precision : int | None
190
190
  Precision to use when rounding the coordinates.
@@ -254,7 +254,7 @@ def _contours_to_polygons(
254
254
 
255
255
 
256
256
  def determine_buffer(
257
- longitude: npt.NDArray[np.float64], latitude: npt.NDArray[np.float64]
257
+ longitude: npt.NDArray[np.floating], latitude: npt.NDArray[np.floating]
258
258
  ) -> float:
259
259
  """Determine the proper buffer size to use when converting to polygons."""
260
260
 
@@ -279,22 +279,22 @@ def determine_buffer(
279
279
 
280
280
 
281
281
  def find_multipolygon(
282
- arr: npt.NDArray[np.float64],
282
+ arr: npt.NDArray[np.floating],
283
283
  threshold: float,
284
284
  min_area: float,
285
285
  epsilon: float,
286
286
  lower_bound: bool = True,
287
287
  interiors: bool = True,
288
288
  convex_hull: bool = False,
289
- longitude: npt.NDArray[np.float64] | None = None,
290
- latitude: npt.NDArray[np.float64] | None = None,
289
+ longitude: npt.NDArray[np.floating] | None = None,
290
+ latitude: npt.NDArray[np.floating] | None = None,
291
291
  precision: int | None = None,
292
292
  ) -> shapely.MultiPolygon:
293
293
  """Compute a multipolygon from a 2d array.
294
294
 
295
295
  Parameters
296
296
  ----------
297
- arr : npt.NDArray[np.float64]
297
+ arr : npt.NDArray[np.floating]
298
298
  Array to convert to a multipolygon. The array will be converted to a binary
299
299
  array by comparing each element to ``threshold``. This binary array is then
300
300
  passed into :func:`cv2.findContours` to find the contours.
@@ -312,11 +312,11 @@ def find_multipolygon(
312
312
  Whether to include interior polygons. By default, True.
313
313
  convex_hull : bool, optional
314
314
  Experimental. Whether to take the convex hull of each polygon. By default, False.
315
- longitude : npt.NDArray[np.float64] | None, optional
315
+ longitude : npt.NDArray[np.floating] | None, optional
316
316
  If provided, the coordinates values corresponding to the longitude dimensions of ``arr``.
317
317
  The contour coordinates will be converted to longitude-latitude values by indexing
318
318
  into this array. Defaults to None.
319
- latitude : npt.NDArray[np.float64] | None, optional
319
+ latitude : npt.NDArray[np.floating] | None, optional
320
320
  If provided, the coordinates values corresponding to the latitude dimensions of ``arr``.
321
321
  precision : int | None, optional
322
322
  If provided, the precision to use when rounding the coordinates. Defaults to None.
@@ -37,6 +37,8 @@ logger = logging.getLogger(__name__)
37
37
  class AttrDict(dict[str, Any]):
38
38
  """Thin wrapper around dict to warn when setting a key that already exists."""
39
39
 
40
+ __slots__ = ()
41
+
40
42
  def __setitem__(self, k: str, v: Any) -> None:
41
43
  """Warn when setting values that already contain values.
42
44
 
@@ -85,7 +87,7 @@ class VectorDataDict(dict[str, np.ndarray]):
85
87
  Parameters
86
88
  ----------
87
89
  data : dict[str, np.ndarray], optional
88
- Dictionary input
90
+ Dictionary input. A shallow copy is always made.
89
91
  """
90
92
 
91
93
  __slots__ = ("_size",)
@@ -130,8 +132,8 @@ class VectorDataDict(dict[str, np.ndarray]):
130
132
  def __delitem__(self, k: str) -> None:
131
133
  super().__delitem__(k)
132
134
 
133
- # if not data keys left, set size to 0
134
- if not len(self):
135
+ # if no keys remain, delete _size attribute
136
+ if not self:
135
137
  del self._size
136
138
 
137
139
  def setdefault(self, k: str, default: npt.ArrayLike | None = None) -> np.ndarray:
@@ -191,9 +193,9 @@ class VectorDataDict(dict[str, np.ndarray]):
191
193
  super().update(kwargs_arr)
192
194
 
193
195
  def _validate_array(self, arr: np.ndarray) -> None:
194
- """Ensure that `arr` is compatible with instance.
196
+ """Ensure that ``arr`` is compatible (1 dimensional of equal size) with instance.
195
197
 
196
- Set attribute `_size` if it has not yet been defined.
198
+ Set attribute ``_size`` if it has not yet been defined.
197
199
 
198
200
  Parameters
199
201
  ----------
@@ -203,34 +205,34 @@ class VectorDataDict(dict[str, np.ndarray]):
203
205
  Raises
204
206
  ------
205
207
  ValueError
206
- If `arr` is not compatible with instance.
208
+ If ``arr`` is not compatible with instance.
207
209
  """
208
210
  if arr.ndim != 1:
209
211
  raise ValueError("All np.arrays must have dimension 1.")
210
212
 
211
213
  size = getattr(self, "_size", 0)
212
- if size != 0:
213
- if arr.size != size:
214
- raise ValueError(f"Incompatible array sizes: {arr.size} and {size}.")
215
- else:
214
+ if not size:
216
215
  self._size = arr.size
216
+ return
217
+
218
+ if arr.size != size:
219
+ raise ValueError(f"Incompatible array sizes: {arr.size} and {size}.")
217
220
 
218
221
 
219
- def _empty_vector_dict(keys: Iterable[str]) -> VectorDataDict:
220
- """Create instance of VectorDataDict with variables defined by `keys` and size 0.
222
+ def _empty_vector_dict(keys: Iterable[str]) -> dict[str, np.ndarray]:
223
+ """Create a dictionary with keys defined by ``keys`` and empty arrays.
221
224
 
222
225
  Parameters
223
226
  ----------
224
227
  keys : Iterable[str]
225
- Keys to include in empty VectorDataset instance.
228
+ Keys to include in dictionary.
226
229
 
227
230
  Returns
228
231
  -------
229
- VectorDataDict
230
- Empty :class:`VectorDataDict` instance.
232
+ dict[str, np.ndarray]
233
+ Dictionary with empty arrays.
231
234
  """
232
- keys = keys or ()
233
- data = VectorDataDict({key: np.array([]) for key in keys})
235
+ data = {key: np.array([]) for key in keys}
234
236
 
235
237
  # The default dtype is float64
236
238
  # Time is special and should have a non-default dtype of datetime64[ns]
@@ -245,14 +247,15 @@ class VectorDataset:
245
247
 
246
248
  Parameters
247
249
  ----------
248
- data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
249
- Initial data, by default None
250
- attrs : dict[str, Any] | AttrDict, optional
251
- Dictionary of attributes, by default None
250
+ data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None, optional
251
+ Initial data, by default None. A shallow copy is always made. Use the ``copy``
252
+ parameter to copy the underlying array data.
253
+ attrs : dict[str, Any] | None, optional
254
+ Dictionary of attributes, by default None. A shallow copy is always made.
252
255
  copy : bool, optional
253
- Copy data on class creation, by default True
256
+ Copy individual arrays on instantiation, by default True.
254
257
  **attrs_kwargs : Any
255
- Additional attributes passed as keyword arguments
258
+ Additional attributes passed as keyword arguments.
256
259
 
257
260
  Raises
258
261
  ------
@@ -260,26 +263,24 @@ class VectorDataset:
260
263
  If "time" variable cannot be converted to numpy array.
261
264
  """
262
265
 
263
- __slots__ = ("data", "attrs")
264
-
265
- #: Vector data with labels as keys and :class:`numpy.ndarray` as values
266
- data: VectorDataDict
266
+ __slots__ = ("attrs", "data")
267
267
 
268
268
  #: Generic dataset attributes
269
269
  attrs: AttrDict
270
270
 
271
+ #: Vector data with labels as keys and :class:`numpy.ndarray` as values
272
+ data: VectorDataDict
273
+
271
274
  def __init__(
272
275
  self,
273
- data: (
274
- dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None
275
- ) = None,
276
+ data: dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None = None,
276
277
  *,
277
- attrs: dict[str, Any] | AttrDict | None = None,
278
+ attrs: dict[str, Any] | None = None,
278
279
  copy: bool = True,
279
280
  **attrs_kwargs: Any,
280
281
  ) -> None:
281
- # Set data
282
- # --------
282
+ # Set data: always shallow copy
283
+ # -----------------------------
283
284
 
284
285
  # Casting from one VectorDataset type to another
285
286
  # e.g., flight = Flight(...); vector = VectorDataset(flight)
@@ -288,7 +289,7 @@ class VectorDataset:
288
289
  if copy:
289
290
  self.data = VectorDataDict({k: v.copy() for k, v in data.data.items()})
290
291
  else:
291
- self.data = data.data
292
+ self.data = VectorDataDict(data.data)
292
293
 
293
294
  elif data is None:
294
295
  self.data = VectorDataDict()
@@ -307,31 +308,45 @@ class VectorDataset:
307
308
  data["time"] = time.to_numpy(copy=copy)
308
309
  self.data = VectorDataDict(data)
309
310
 
310
- elif isinstance(data, VectorDataDict):
311
- if copy:
312
- self.data = VectorDataDict({k: v.copy() for k, v in data.items()})
313
- else:
314
- self.data = data
315
-
316
311
  # For anything else, we assume it is a dictionary of array-like and attach it
317
312
  else:
318
313
  self.data = VectorDataDict({k: np.array(v, copy=copy) for k, v in data.items()})
319
314
 
320
- # Set attributes
321
- # --------------
315
+ # Set attributes: always shallow copy
316
+ # -----------------------------------
317
+
318
+ self.attrs = AttrDict(attrs or {}) # type: ignore[arg-type]
319
+ self.attrs.update(attrs_kwargs)
320
+
321
+ @classmethod
322
+ def _from_fastpath(
323
+ cls,
324
+ data: dict[str, np.ndarray],
325
+ attrs: dict[str, Any] | None = None,
326
+ **kwargs: Any,
327
+ ) -> Self:
328
+ """Create new instance from consistent data.
322
329
 
323
- if attrs is None:
324
- self.attrs = AttrDict()
330
+ This is a low-level method that bypasses the standard constructor in certain
331
+ special cases. It is intended for internal use only.
325
332
 
326
- elif isinstance(attrs, AttrDict) and not copy:
327
- self.attrs = attrs
333
+ In essence, this method skips any validation from __init__ and directly sets
334
+ ``data`` and ``attrs``. This is useful when creating a new instance from an existing
335
+ instance the data has already been validated.
336
+ """
337
+ obj = cls.__new__(cls)
328
338
 
329
- # shallow copy if dict
330
- else:
331
- self.attrs = AttrDict(attrs.copy())
339
+ obj.data = VectorDataDict(data)
340
+ obj.attrs = AttrDict(attrs or {})
332
341
 
333
- # update with kwargs
334
- self.attrs.update(attrs_kwargs)
342
+ for key, value in kwargs.items():
343
+ try:
344
+ setattr(obj, key, value)
345
+ # If key not present in __slots__ of class (or parents), it's intended for attrs
346
+ except AttributeError:
347
+ obj.attrs[key] = value
348
+
349
+ return obj
335
350
 
336
351
  # ------------
337
352
  # dict-like methods
@@ -663,6 +678,13 @@ class VectorDataset:
663
678
  8 15 18
664
679
 
665
680
  """
681
+ if cls not in (VectorDataset, GeoVectorDataset):
682
+ msg = (
683
+ "Method 'sum' is only available on 'VectorDataset' and 'GeoVectorDataset'. "
684
+ "To sum 'Flight' instances, use 'Fleet.from_seq'."
685
+ )
686
+ raise TypeError(msg)
687
+
666
688
  vectors = [v for v in vectors if v is not None] # remove None values
667
689
 
668
690
  if not vectors:
@@ -693,10 +715,9 @@ class VectorDataset:
693
715
  return np.concatenate(values)
694
716
 
695
717
  data = {key: concat(key) for key in keys}
718
+ attrs = vectors[0].attrs if infer_attrs else None
696
719
 
697
- if infer_attrs:
698
- return cls(data, attrs=vectors[0].attrs, copy=False)
699
- return cls(data, copy=False)
720
+ return cls._from_fastpath(data, attrs)
700
721
 
701
722
  def __eq__(self, other: object) -> bool:
702
723
  """Determine if two instances are equal.
@@ -803,7 +824,8 @@ class VectorDataset:
803
824
  Self
804
825
  Copy of class
805
826
  """
806
- return type(self)(data=self.data, attrs=self.attrs, copy=True, **kwargs)
827
+ data = {key: value.copy() for key, value in self.data.items()}
828
+ return type(self)._from_fastpath(data, self.attrs, **kwargs)
807
829
 
808
830
  def select(self: VectorDataset, keys: Iterable[str], copy: bool = True) -> VectorDataset:
809
831
  """Return new class instance only containing specified keys.
@@ -823,8 +845,8 @@ class VectorDataset:
823
845
  Note that this method always returns a :class:`VectorDataset`, even if
824
846
  the calling class is a proper subclass of :class:`VectorDataset`.
825
847
  """
826
- data = {key: self[key] for key in keys}
827
- return VectorDataset(data=data, attrs=self.attrs, copy=copy)
848
+ data = {key: np.array(self[key], copy=copy) for key in keys}
849
+ return VectorDataset._from_fastpath(data, self.attrs)
828
850
 
829
851
  def filter(self, mask: npt.NDArray[np.bool_], copy: bool = True, **kwargs: Any) -> Self:
830
852
  """Filter :attr:`data` according to a boolean array ``mask``.
@@ -856,8 +878,8 @@ class VectorDataset:
856
878
  if mask.dtype != bool:
857
879
  raise TypeError("Parameter `mask` must be a boolean array.")
858
880
 
859
- data = {key: value[mask] for key, value in self.data.items()}
860
- return type(self)(data=data, attrs=self.attrs, copy=copy, **kwargs)
881
+ data = {key: np.array(value[mask], copy=copy) for key, value in self.data.items()}
882
+ return type(self)._from_fastpath(data, self.attrs, **kwargs)
861
883
 
862
884
  def sort(self, by: str | list[str]) -> Self:
863
885
  """Sort data by key(s).
@@ -1116,7 +1138,7 @@ class VectorDataset:
1116
1138
  cls,
1117
1139
  keys: Iterable[str],
1118
1140
  attrs: dict[str, Any] | None = None,
1119
- **attrs_kwargs: Any,
1141
+ **kwargs: Any,
1120
1142
  ) -> Self:
1121
1143
  """Create instance with variables defined by ``keys`` and size 0.
1122
1144
 
@@ -1129,15 +1151,16 @@ class VectorDataset:
1129
1151
  Keys to include in empty VectorDataset instance.
1130
1152
  attrs : dict[str, Any] | None, optional
1131
1153
  Attributes to attach instance.
1132
- **attrs_kwargs : Any
1133
- Define attributes as keyword arguments.
1154
+ **kwargs : Any
1155
+ Additional keyword arguments passed into the constructor of the returned class.
1134
1156
 
1135
1157
  Returns
1136
1158
  -------
1137
1159
  Self
1138
1160
  Empty VectorDataset instance.
1139
1161
  """
1140
- return cls(data=_empty_vector_dict(keys or set()), attrs=attrs, copy=False, **attrs_kwargs)
1162
+ data = _empty_vector_dict(keys)
1163
+ return cls._from_fastpath(data, attrs, **kwargs)
1141
1164
 
1142
1165
  @classmethod
1143
1166
  def from_dict(cls, obj: dict[str, Any], copy: bool = True, **obj_kwargs: Any) -> Self:
@@ -1216,7 +1239,7 @@ class GeoVectorDataset(VectorDataset):
1216
1239
 
1217
1240
  Parameters
1218
1241
  ----------
1219
- data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
1242
+ data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None, optional
1220
1243
  Data dictionary or :class:`pandas.DataFrame` .
1221
1244
  Must include keys/columns ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``.
1222
1245
  Keyword arguments for ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``
@@ -1269,9 +1292,7 @@ class GeoVectorDataset(VectorDataset):
1269
1292
 
1270
1293
  def __init__(
1271
1294
  self,
1272
- data: (
1273
- dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None
1274
- ) = None,
1295
+ data: dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None = None,
1275
1296
  *,
1276
1297
  longitude: npt.ArrayLike | None = None,
1277
1298
  latitude: npt.ArrayLike | None = None,
@@ -1279,7 +1300,7 @@ class GeoVectorDataset(VectorDataset):
1279
1300
  altitude_ft: npt.ArrayLike | None = None,
1280
1301
  level: npt.ArrayLike | None = None,
1281
1302
  time: npt.ArrayLike | None = None,
1282
- attrs: dict[str, Any] | AttrDict | None = None,
1303
+ attrs: dict[str, Any] | None = None,
1283
1304
  copy: bool = True,
1284
1305
  **attrs_kwargs: Any,
1285
1306
  ) -> None:
@@ -1293,7 +1314,10 @@ class GeoVectorDataset(VectorDataset):
1293
1314
  and time is None
1294
1315
  ):
1295
1316
  keys = *self.required_keys, "altitude"
1296
- data = _empty_vector_dict(keys)
1317
+ self.data = VectorDataDict(_empty_vector_dict(keys))
1318
+ self.attrs = AttrDict(attrs or {}) # type: ignore[arg-type]
1319
+ self.attrs.update(attrs_kwargs)
1320
+ return
1297
1321
 
1298
1322
  super().__init__(data=data, attrs=attrs, copy=copy, **attrs_kwargs)
1299
1323
 
@@ -1392,7 +1416,7 @@ class GeoVectorDataset(VectorDataset):
1392
1416
  return attrs
1393
1417
 
1394
1418
  @property
1395
- def level(self) -> npt.NDArray[np.float64]:
1419
+ def level(self) -> npt.NDArray[np.floating]:
1396
1420
  """Get pressure ``level`` values for points.
1397
1421
 
1398
1422
  Automatically calculates pressure level using :func:`units.m_to_pl` using ``altitude`` key.
@@ -1403,7 +1427,7 @@ class GeoVectorDataset(VectorDataset):
1403
1427
 
1404
1428
  Returns
1405
1429
  -------
1406
- npt.NDArray[np.float64]
1430
+ npt.NDArray[np.floating]
1407
1431
  Point pressure level values, [:math:`hPa`]
1408
1432
  """
1409
1433
  try:
@@ -1412,7 +1436,7 @@ class GeoVectorDataset(VectorDataset):
1412
1436
  return units.m_to_pl(self.altitude)
1413
1437
 
1414
1438
  @property
1415
- def altitude(self) -> npt.NDArray[np.float64]:
1439
+ def altitude(self) -> npt.NDArray[np.floating]:
1416
1440
  """Get altitude.
1417
1441
 
1418
1442
  Automatically calculates altitude using :func:`units.pl_to_m` using ``level`` key.
@@ -1423,7 +1447,7 @@ class GeoVectorDataset(VectorDataset):
1423
1447
 
1424
1448
  Returns
1425
1449
  -------
1426
- npt.NDArray[np.float64]
1450
+ npt.NDArray[np.floating]
1427
1451
  Altitude, [:math:`m`]
1428
1452
  """
1429
1453
  try:
@@ -1437,12 +1461,12 @@ class GeoVectorDataset(VectorDataset):
1437
1461
  return units.ft_to_m(self["altitude_ft"])
1438
1462
 
1439
1463
  @property
1440
- def air_pressure(self) -> npt.NDArray[np.float64]:
1464
+ def air_pressure(self) -> npt.NDArray[np.floating]:
1441
1465
  """Get ``air_pressure`` values for points.
1442
1466
 
1443
1467
  Returns
1444
1468
  -------
1445
- npt.NDArray[np.float64]
1469
+ npt.NDArray[np.floating]
1446
1470
  Point air pressure values, [:math:`Pa`]
1447
1471
  """
1448
1472
  try:
@@ -1451,12 +1475,12 @@ class GeoVectorDataset(VectorDataset):
1451
1475
  return 100.0 * self.level
1452
1476
 
1453
1477
  @property
1454
- def altitude_ft(self) -> npt.NDArray[np.float64]:
1478
+ def altitude_ft(self) -> npt.NDArray[np.floating]:
1455
1479
  """Get altitude in feet.
1456
1480
 
1457
1481
  Returns
1458
1482
  -------
1459
- npt.NDArray[np.float64]
1483
+ npt.NDArray[np.floating]
1460
1484
  Altitude, [:math:`ft`]
1461
1485
  """
1462
1486
  try:
@@ -1522,7 +1546,7 @@ class GeoVectorDataset(VectorDataset):
1522
1546
  # Utilities
1523
1547
  # ------------
1524
1548
 
1525
- def transform_crs(self, crs: str) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]:
1549
+ def transform_crs(self, crs: str) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]:
1526
1550
  """Transform trajectory data from one coordinate reference system (CRS) to another.
1527
1551
 
1528
1552
  Parameters
@@ -1535,7 +1559,7 @@ class GeoVectorDataset(VectorDataset):
1535
1559
 
1536
1560
  Returns
1537
1561
  -------
1538
- tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]
1562
+ tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]
1539
1563
  New x and y coordinates in the target CRS.
1540
1564
  """
1541
1565
  try:
@@ -1552,12 +1576,12 @@ class GeoVectorDataset(VectorDataset):
1552
1576
  transformer = pyproj.Transformer.from_crs(crs_from, crs, always_xy=True)
1553
1577
  return transformer.transform(self["longitude"], self["latitude"])
1554
1578
 
1555
- def T_isa(self) -> npt.NDArray[np.float64]:
1579
+ def T_isa(self) -> npt.NDArray[np.floating]:
1556
1580
  """Calculate the ICAO standard atmosphere temperature at each point.
1557
1581
 
1558
1582
  Returns
1559
1583
  -------
1560
- npt.NDArray[np.float64]
1584
+ npt.NDArray[np.floating]
1561
1585
  ISA temperature, [:math:`K`]
1562
1586
 
1563
1587
  See Also
@@ -1610,24 +1634,24 @@ class GeoVectorDataset(VectorDataset):
1610
1634
  self,
1611
1635
  mda: met_module.MetDataArray,
1612
1636
  *,
1613
- longitude: npt.NDArray[np.float64] | None = None,
1614
- latitude: npt.NDArray[np.float64] | None = None,
1615
- level: npt.NDArray[np.float64] | None = None,
1637
+ longitude: npt.NDArray[np.floating] | None = None,
1638
+ latitude: npt.NDArray[np.floating] | None = None,
1639
+ level: npt.NDArray[np.floating] | None = None,
1616
1640
  time: npt.NDArray[np.datetime64] | None = None,
1617
1641
  use_indices: bool = False,
1618
1642
  **interp_kwargs: Any,
1619
- ) -> npt.NDArray[np.float64]:
1643
+ ) -> npt.NDArray[np.floating]:
1620
1644
  """Intersect waypoints with MetDataArray.
1621
1645
 
1622
1646
  Parameters
1623
1647
  ----------
1624
1648
  mda : MetDataArray
1625
1649
  MetDataArray containing a meteorological variable at spatio-temporal coordinates.
1626
- longitude : npt.NDArray[np.float64], optional
1650
+ longitude : npt.NDArray[np.floating], optional
1627
1651
  Override existing coordinates for met interpolation
1628
- latitude : npt.NDArray[np.float64], optional
1652
+ latitude : npt.NDArray[np.floating], optional
1629
1653
  Override existing coordinates for met interpolation
1630
- level : npt.NDArray[np.float64], optional
1654
+ level : npt.NDArray[np.floating], optional
1631
1655
  Override existing coordinates for met interpolation
1632
1656
  time : npt.NDArray[np.datetime64], optional
1633
1657
  Override existing coordinates for met interpolation
@@ -1646,7 +1670,7 @@ class GeoVectorDataset(VectorDataset):
1646
1670
 
1647
1671
  Returns
1648
1672
  -------
1649
- npt.NDArray[np.float64]
1673
+ npt.NDArray[np.floating]
1650
1674
  Interpolated values
1651
1675
 
1652
1676
  Examples
@@ -1819,7 +1843,6 @@ class GeoVectorDataset(VectorDataset):
1819
1843
  latitude_buffer: tuple[float, float] = ...,
1820
1844
  level_buffer: tuple[float, float] = ...,
1821
1845
  time_buffer: tuple[np.timedelta64, np.timedelta64] = ...,
1822
- copy: bool = ...,
1823
1846
  ) -> met_module.MetDataset: ...
1824
1847
 
1825
1848
  @overload
@@ -1831,7 +1854,6 @@ class GeoVectorDataset(VectorDataset):
1831
1854
  latitude_buffer: tuple[float, float] = ...,
1832
1855
  level_buffer: tuple[float, float] = ...,
1833
1856
  time_buffer: tuple[np.timedelta64, np.timedelta64] = ...,
1834
- copy: bool = ...,
1835
1857
  ) -> met_module.MetDataArray: ...
1836
1858
 
1837
1859
  def downselect_met(
@@ -1845,10 +1867,13 @@ class GeoVectorDataset(VectorDataset):
1845
1867
  np.timedelta64(0, "h"),
1846
1868
  np.timedelta64(0, "h"),
1847
1869
  ),
1848
- copy: bool = True,
1849
1870
  ) -> met_module.MetDataType:
1850
1871
  """Downselect ``met`` to encompass a spatiotemporal region of the data.
1851
1872
 
1873
+ .. versionchanged:: 0.54.5
1874
+
1875
+ Returned object is no longer copied.
1876
+
1852
1877
  Parameters
1853
1878
  ----------
1854
1879
  met : MetDataset | MetDataArray
@@ -1873,8 +1898,6 @@ class GeoVectorDataset(VectorDataset):
1873
1898
  and ``time_buffer[1]`` on the high side.
1874
1899
  Units must be the same as class coordinates.
1875
1900
  Defaults to ``(np.timedelta64(0, "h"), np.timedelta64(0, "h"))``.
1876
- copy : bool
1877
- If returned object is a copy or view of the original. True by default.
1878
1901
 
1879
1902
  Returns
1880
1903
  -------
@@ -1915,7 +1938,7 @@ class GeoVectorDataset(VectorDataset):
1915
1938
  level=level_slice,
1916
1939
  time=time_slice,
1917
1940
  )
1918
- return type(met)(data, copy=copy)
1941
+ return type(met)._from_fastpath(data)
1919
1942
 
1920
1943
  # ------------
1921
1944
  # I / O
@@ -2019,7 +2042,7 @@ def vector_to_lon_lat_grid(
2019
2042
  ...,
2020
2043
  [1.97, 3.02, 1.84, ..., 2.37, 3.87, 2.09],
2021
2044
  [3.74, 1.6 , 4.01, ..., 4.6 , 4.27, 3.4 ],
2022
- [2.97, 0.12, 1.33, ..., 3.54, 0.74, 2.59]])
2045
+ [2.97, 0.12, 1.33, ..., 3.54, 0.74, 2.59]], shape=(40, 40))
2023
2046
 
2024
2047
  >>> da.sum().item() == vector["foo"].sum()
2025
2048
  np.True_
@@ -175,13 +175,16 @@ def parse_pressure_levels(
175
175
 
176
176
  out = arr.tolist()
177
177
  if supported is None:
178
- return out
178
+ return out # type: ignore[return-value]
179
179
 
180
- if missing := set(out).difference(supported):
181
- msg = f"Pressure levels {sorted(missing)} are not supported. Supported levels: {supported}"
180
+ if missing := set(out).difference(supported): # type: ignore[arg-type]
181
+ msg = (
182
+ f"Pressure levels {sorted(missing)} are not supported. " # type: ignore[type-var]
183
+ f"Supported levels: {supported}"
184
+ )
182
185
  raise ValueError(msg)
183
186
 
184
- return out
187
+ return out # type: ignore[return-value]
185
188
 
186
189
 
187
190
  def parse_variables(variables: VariableInput, supported: list[MetVariable]) -> list[MetVariable]:
@@ -347,7 +350,7 @@ def round_hour(time: datetime, hour: int) -> datetime:
347
350
  class MetDataSource(abc.ABC):
348
351
  """Abstract class for wrapping meteorology data sources."""
349
352
 
350
- __slots__ = ("timesteps", "variables", "pressure_levels", "grid", "paths")
353
+ __slots__ = ("grid", "paths", "pressure_levels", "timesteps", "variables")
351
354
 
352
355
  #: List of individual timesteps from data source derived from :attr:`time`
353
356
  #: Use :func:`parse_time` to handle :class:`TimeInput`.