pycontrails 0.53.1__cp313-cp313-macosx_10_13_x86_64.whl → 0.54.1__cp313-cp313-macosx_10_13_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (30) hide show
  1. pycontrails/_version.py +2 -2
  2. pycontrails/core/aircraft_performance.py +9 -3
  3. pycontrails/core/fleet.py +5 -22
  4. pycontrails/core/flight.py +12 -78
  5. pycontrails/core/met.py +30 -45
  6. pycontrails/core/rgi_cython.cpython-313-darwin.so +0 -0
  7. pycontrails/core/vector.py +13 -45
  8. pycontrails/datalib/_met_utils/metsource.py +1 -1
  9. pycontrails/datalib/ecmwf/__init__.py +23 -3
  10. pycontrails/datalib/ecmwf/arco_era5.py +119 -306
  11. pycontrails/datalib/ecmwf/era5.py +2 -1
  12. pycontrails/datalib/ecmwf/era5_model_level.py +115 -117
  13. pycontrails/datalib/ecmwf/hres_model_level.py +38 -74
  14. pycontrails/datalib/ecmwf/model_levels.py +400 -44
  15. pycontrails/datalib/ecmwf/variables.py +11 -0
  16. pycontrails/datalib/landsat.py +3 -2
  17. pycontrails/datalib/sentinel.py +0 -1
  18. pycontrails/ext/synthetic_flight.py +5 -1
  19. pycontrails/models/apcemm/apcemm.py +0 -1
  20. pycontrails/models/cocip/cocip.py +0 -1
  21. pycontrails/models/cocipgrid/cocip_grid.py +5 -3
  22. pycontrails/models/dry_advection.py +11 -3
  23. pycontrails/models/issr.py +2 -2
  24. pycontrails/models/ps_model/ps_model.py +39 -24
  25. {pycontrails-0.53.1.dist-info → pycontrails-0.54.1.dist-info}/METADATA +2 -4
  26. {pycontrails-0.53.1.dist-info → pycontrails-0.54.1.dist-info}/RECORD +30 -30
  27. {pycontrails-0.53.1.dist-info → pycontrails-0.54.1.dist-info}/WHEEL +1 -1
  28. {pycontrails-0.53.1.dist-info → pycontrails-0.54.1.dist-info}/LICENSE +0 -0
  29. {pycontrails-0.53.1.dist-info → pycontrails-0.54.1.dist-info}/NOTICE +0 -0
  30. {pycontrails-0.53.1.dist-info → pycontrails-0.54.1.dist-info}/top_level.txt +0 -0
pycontrails/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.53.1'
16
- __version_tuple__ = version_tuple = (0, 53, 1)
15
+ __version__ = version = '0.54.1'
16
+ __version_tuple__ = version_tuple = (0, 54, 1)
@@ -12,6 +12,7 @@ import numpy.typing as npt
12
12
  from overrides import overrides
13
13
 
14
14
  from pycontrails.core import flight, fuel
15
+ from pycontrails.core.fleet import Fleet
15
16
  from pycontrails.core.flight import Flight
16
17
  from pycontrails.core.met import MetDataset
17
18
  from pycontrails.core.models import Model, ModelParams, interpolate_met
@@ -76,6 +77,10 @@ class AircraftPerformance(Model):
76
77
 
77
78
  source: Flight
78
79
 
80
+ @abc.abstractmethod
81
+ @overload
82
+ def eval(self, source: Fleet, **params: Any) -> Fleet: ...
83
+
79
84
  @abc.abstractmethod
80
85
  @overload
81
86
  def eval(self, source: Flight, **params: Any) -> Flight: ...
@@ -467,10 +472,11 @@ class AircraftPerformance(Model):
467
472
  tas[cond] = self.source.segment_groundspeed()[cond]
468
473
  return tas
469
474
 
470
- met_incomplete = (
471
- self.met is None or "eastward_wind" not in self.met or "northward_wind" not in self.met
475
+ wind_available = ("eastward_wind" in self.source and "northward_wind" in self.source) or (
476
+ self.met is not None and "eastward_wind" in self.met and "northward_wind" in self.met
472
477
  )
473
- if met_incomplete:
478
+
479
+ if not wind_available:
474
480
  if fill_with_groundspeed:
475
481
  tas = self.source.segment_groundspeed()
476
482
  self.source["true_airspeed"] = tas
pycontrails/core/fleet.py CHANGED
@@ -196,17 +196,15 @@ class Fleet(Flight):
196
196
 
197
197
  fl_attrs: dict[str, Any] = {}
198
198
 
199
- # Pluck from the first flight to get fuel, data_keys, and crs
199
+ # Pluck from the first flight to get fuel and data_keys
200
200
  fuel = seq[0].fuel
201
201
  data_keys = set(seq[0]) # convert to a new instance to because we mutate seq[0]
202
- crs = seq[0].attrs["crs"]
203
202
 
204
203
  for fl in seq:
205
204
  _validate_fl(
206
205
  fl,
207
206
  fl_attrs=fl_attrs,
208
207
  data_keys=data_keys,
209
- crs=crs,
210
208
  fuel=fuel,
211
209
  broadcast_numeric=broadcast_numeric,
212
210
  )
@@ -318,10 +316,9 @@ class Fleet(Flight):
318
316
 
319
317
  @overrides
320
318
  def segment_groundspeed(self, *args: Any, **kwargs: Any) -> npt.NDArray[np.float64]:
321
- # Implement if we have a usecase for this.
322
- # Because the super() method uses a smoothing pattern, it will not reliably
323
- # work on Fleet.
324
- raise NotImplementedError
319
+ fls = self.to_flight_list(copy=False)
320
+ gs = [fl.segment_groundspeed(*args, **kwargs) for fl in fls]
321
+ return np.concatenate(gs)
325
322
 
326
323
  @overrides
327
324
  def resample_and_fill(self, *args: Any, **kwargs: Any) -> Fleet:
@@ -336,10 +333,6 @@ class Fleet(Flight):
336
333
  @property
337
334
  @overrides
338
335
  def max_distance_gap(self) -> float:
339
- if self.attrs["crs"] != "EPSG:4326":
340
- msg = "Only implemented for EPSG:4326 CRS."
341
- raise NotImplementedError(msg)
342
-
343
336
  return np.nanmax(self.segment_length()).item()
344
337
 
345
338
  @overrides
@@ -400,7 +393,6 @@ def _validate_fl(
400
393
  *,
401
394
  fl_attrs: dict[str, Any],
402
395
  data_keys: set[str],
403
- crs: str,
404
396
  fuel: Fuel,
405
397
  broadcast_numeric: bool,
406
398
  ) -> None:
@@ -419,8 +411,6 @@ def _validate_fl(
419
411
  Set of data keys expected in each flight.
420
412
  fuel : Fuel
421
413
  Fuel used all flights
422
- crs : str
423
- CRS to use all flights
424
414
  broadcast_numeric : bool
425
415
  If True, broadcast numeric attributes to data variables.
426
416
 
@@ -429,7 +419,7 @@ def _validate_fl(
429
419
  KeyError
430
420
  ``fl`` does not have a ``flight_id`` key in :attr:`attrs`.
431
421
  ValueError
432
- If ``flight_id`` is duplicated or incompatible CRS found.
422
+ If ``flight_id`` is duplicated or if ``fuel`` or ``data_keys`` are inconsistent.
433
423
  """
434
424
  flight_id = _extract_flight_id(fl)
435
425
 
@@ -446,13 +436,6 @@ def _validate_fl(
446
436
  "The 'fuel' attributes must be consistent between flights in a Fleet."
447
437
  )
448
438
  raise ValueError(msg)
449
- if fl.attrs["crs"] != crs:
450
- msg = (
451
- f"CRS on Flight {flight_id} ({fl.attrs['crs']}) "
452
- f"is not inconsistent with previous flights ({crs}). "
453
- "The 'crs' attributes must be consistent between flights in a Fleet."
454
- )
455
- raise ValueError(msg)
456
439
  if fl.data.keys() != data_keys:
457
440
  msg = (
458
441
  f"Data keys on Flight {flight_id} ({fl.data.keys()}) "
@@ -75,9 +75,6 @@ class Flight(GeoVectorDataset):
75
75
  Expect altitude in [:math:`m`].
76
76
  Expect pressure level (`level`) in [:math:`hPa`].
77
77
 
78
- Use the attribute :attr:`attrs["crs"]` to specify coordinate reference system
79
- using `PROJ <https://proj.org/>`_ or `EPSG <https://epsg.org/home.html>`_ syntax.
80
-
81
78
  Parameters
82
79
  ----------
83
80
  data : dict[str, np.ndarray] | pd.DataFrame | VectorDataDict | VectorDataset | None
@@ -159,7 +156,7 @@ class Flight(GeoVectorDataset):
159
156
  ... })
160
157
  >>> fl = Flight(data=df, flight_id=123) # specify a flight_id by keyword
161
158
  >>> fl
162
- Flight [4 keys x 500 length, 2 attributes]
159
+ Flight [4 keys x 500 length, 1 attributes]
163
160
  Keys: longitude, latitude, altitude, time
164
161
  Attributes:
165
162
  time [2021-01-01 10:00:00, 2021-01-01 15:00:00]
@@ -167,7 +164,6 @@ class Flight(GeoVectorDataset):
167
164
  latitude [10.0, 40.0]
168
165
  altitude [10500.0, 10500.0]
169
166
  flight_id 123
170
- crs EPSG:4326
171
167
 
172
168
  >>> # Create `Flight` from keywords
173
169
  >>> fl = Flight(
@@ -177,14 +173,13 @@ class Flight(GeoVectorDataset):
177
173
  ... time=pd.date_range('2021-01-01T12', '2021-01-01T14', periods=200),
178
174
  ... )
179
175
  >>> fl
180
- Flight [4 keys x 200 length, 1 attributes]
176
+ Flight [4 keys x 200 length, 0 attributes]
181
177
  Keys: longitude, latitude, time, altitude
182
178
  Attributes:
183
179
  time [2021-01-01 12:00:00, 2021-01-01 14:00:00]
184
180
  longitude [20.0, 30.0]
185
181
  latitude [30.0, 40.0]
186
182
  altitude [11000.0, 11000.0]
187
- crs EPSG:4326
188
183
 
189
184
  >>> # Access the underlying data as DataFrame
190
185
  >>> fl.dataframe.head()
@@ -369,11 +364,6 @@ class Flight(GeoVectorDataset):
369
364
  float
370
365
  Maximum distance between waypoints, [:math:`m`]
371
366
 
372
- Raises
373
- ------
374
- NotImplementedError
375
- Raises when attr:`attrs["crs"]` is not EPSG:4326
376
-
377
367
  Examples
378
368
  --------
379
369
  >>> import numpy as np
@@ -386,9 +376,6 @@ class Flight(GeoVectorDataset):
386
376
  >>> fl.max_distance_gap
387
377
  np.float64(7391.27...)
388
378
  """
389
- if self.attrs["crs"] != "EPSG:4326":
390
- raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
391
-
392
379
  return self.segment_length()[:-1].max()
393
380
 
394
381
  @property
@@ -400,11 +387,6 @@ class Flight(GeoVectorDataset):
400
387
  float
401
388
  Total flight length, [:math:`m`]
402
389
 
403
- Raises
404
- ------
405
- NotImplementedError
406
- Raises when attr:`attrs["crs"]` is not EPSG:4326
407
-
408
390
  Examples
409
391
  --------
410
392
  >>> import numpy as np
@@ -417,9 +399,6 @@ class Flight(GeoVectorDataset):
417
399
  >>> fl.length
418
400
  np.float64(1436924.67...)
419
401
  """
420
- if self.attrs["crs"] != "EPSG:4326":
421
- raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
422
-
423
402
  # drop off the nan
424
403
  return np.nansum(self.segment_length()[:-1])
425
404
 
@@ -461,11 +440,6 @@ class Flight(GeoVectorDataset):
461
440
  npt.NDArray[np.float64]
462
441
  Array of great circle distances in [:math:`m`] between waypoints
463
442
 
464
- Raises
465
- ------
466
- NotImplementedError
467
- Raises when attr:`attrs["crs"]` is not EPSG:4326
468
-
469
443
  Examples
470
444
  --------
471
445
  >>> from pycontrails import Flight
@@ -484,9 +458,6 @@ class Flight(GeoVectorDataset):
484
458
  :func:`segment_haversine`
485
459
  :meth:`segment_length`
486
460
  """
487
- if self.attrs["crs"] != "EPSG:4326":
488
- raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
489
-
490
461
  return geo.segment_haversine(self["longitude"], self["latitude"])
491
462
 
492
463
  def segment_length(self) -> npt.NDArray[np.float64]:
@@ -500,11 +471,6 @@ class Flight(GeoVectorDataset):
500
471
  npt.NDArray[np.float64]
501
472
  Array of distances in [:math:`m`] between waypoints
502
473
 
503
- Raises
504
- ------
505
- NotImplementedError
506
- Raises when attr:`attrs["crs"]` is not EPSG:4326
507
-
508
474
  Examples
509
475
  --------
510
476
  >>> from pycontrails import Flight
@@ -522,9 +488,6 @@ class Flight(GeoVectorDataset):
522
488
  --------
523
489
  :func:`segment_length`
524
490
  """
525
- if self.attrs["crs"] != "EPSG:4326":
526
- raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
527
-
528
491
  return geo.segment_length(self["longitude"], self["latitude"], self.altitude)
529
492
 
530
493
  def segment_angle(self) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]:
@@ -1151,7 +1114,9 @@ class Flight(GeoVectorDataset):
1151
1114
  out.data.pop("level", None) # avoid any ambiguity
1152
1115
  return out
1153
1116
 
1154
- def distance_to_coords(self: Flight, distance: ArrayOrFloat) -> tuple[
1117
+ def distance_to_coords(
1118
+ self: Flight, distance: ArrayOrFloat
1119
+ ) -> tuple[
1155
1120
  ArrayOrFloat,
1156
1121
  ArrayOrFloat,
1157
1122
  np.intp | npt.NDArray[np.intp],
@@ -1302,15 +1267,7 @@ class Flight(GeoVectorDataset):
1302
1267
  pd.DataFrame | None
1303
1268
  Generated waypoints to be merged into underlying :attr:`data`.
1304
1269
  Return `None` if no new waypoints are created.
1305
-
1306
- Raises
1307
- ------
1308
- NotImplementedError
1309
- Raises when attr:`attrs["crs"]` is not EPSG:4326
1310
1270
  """
1311
- if self.attrs["crs"] != "EPSG:4326":
1312
- raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
1313
-
1314
1271
  # Omit the final nan and ensure index + 1 (below) is well defined
1315
1272
  segs = self.segment_haversine()[:-1]
1316
1273
 
@@ -1429,7 +1386,7 @@ class Flight(GeoVectorDataset):
1429
1386
  if key is not None and key not in self.dataframe.columns:
1430
1387
  raise KeyError(f"Column {key} does not exist in data.")
1431
1388
 
1432
- jump_indices = _antimeridian_index(pd.Series(self["longitude"]), self.attrs["crs"])
1389
+ jump_indices = _antimeridian_index(pd.Series(self["longitude"]))
1433
1390
 
1434
1391
  def _group_to_feature(group: pd.DataFrame) -> dict[str, str | dict[str, Any]]:
1435
1392
  # assigns a different value to each group of consecutive indices
@@ -1513,8 +1470,6 @@ class Flight(GeoVectorDataset):
1513
1470
  ------
1514
1471
  KeyError
1515
1472
  :attr:`data` does not contain column ``key``
1516
- NotImplementedError
1517
- Raised when ``attrs["crs"]`` is not EPSG:4326
1518
1473
 
1519
1474
  Examples
1520
1475
  --------
@@ -1555,8 +1510,6 @@ class Flight(GeoVectorDataset):
1555
1510
  """
1556
1511
  if key not in self.data:
1557
1512
  raise KeyError(f"Column {key} does not exist in data.")
1558
- if self.attrs["crs"] != "EPSG:4326":
1559
- raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
1560
1513
 
1561
1514
  # The column of interest may contain floating point values less than 1.
1562
1515
  # In this case, if the default threshold is not changed, warn the user that the behavior
@@ -1666,40 +1619,23 @@ def _return_linestring(data: dict[str, npt.NDArray[np.float64]]) -> list[list[fl
1666
1619
  return [list(p) for p in points]
1667
1620
 
1668
1621
 
1669
- def _antimeridian_index(longitude: pd.Series, crs: str = "EPSG:4326") -> list[int]:
1622
+ def _antimeridian_index(longitude: pd.Series) -> list[int]:
1670
1623
  """Return indices after flight crosses antimeridian, or an empty list if flight does not cross.
1671
1624
 
1625
+ This function assumes EPSG:4326 coordinates.
1626
+
1672
1627
  Parameters
1673
1628
  ----------
1674
1629
  longitude : pd.Series
1675
1630
  longitude values with an integer index
1676
- crs : str, optional
1677
- Coordinate Reference system for longitude specified in EPSG format.
1678
- Currently only supports "EPSG:4326" and "EPSG:3857".
1679
1631
 
1680
1632
  Returns
1681
1633
  -------
1682
1634
  list[int]
1683
1635
  Indices after jump, or empty list of flight does not cross antimeridian.
1684
-
1685
- Raises
1686
- ------
1687
- ValueError
1688
- CRS is not supported.
1689
1636
  """
1690
- # WGS84
1691
- if crs in ["EPSG:4326"]:
1692
- l1 = (-180.0, -90.0)
1693
- l2 = (90.0, 180.0)
1694
-
1695
- # pseudo mercator
1696
- elif crs in ["EPSG:3857"]:
1697
- # values calculated through pyproj.Transformer
1698
- l1 = (-20037508.342789244, -10018754.171394622)
1699
- l2 = (10018754.171394622, 20037508.342789244)
1700
-
1701
- else:
1702
- raise ValueError("CRS must be one of EPSG:4326 or EPSG:3857")
1637
+ l1 = (-180.0, -90.0)
1638
+ l2 = (90.0, 180.0)
1703
1639
 
1704
1640
  # TODO: When nans exist, this method *may* not find the meridian
1705
1641
  if np.any(np.isnan(longitude)):
@@ -1709,9 +1645,7 @@ def _antimeridian_index(longitude: pd.Series, crs: str = "EPSG:4326") -> list[in
1709
1645
  s2 = longitude.between(*l2)
1710
1646
  jump12 = longitude[s1 & s2.shift()]
1711
1647
  jump21 = longitude[s1.shift() & s2]
1712
- jump_index = pd.concat([jump12, jump21]).index.to_list()
1713
-
1714
- return jump_index
1648
+ return pd.concat([jump12, jump21]).index.to_list()
1715
1649
 
1716
1650
 
1717
1651
  def _sg_filter(
pycontrails/core/met.py CHANGED
@@ -70,7 +70,7 @@ class MetBase(ABC, Generic[XArrayType]):
70
70
  cachestore: CacheStore | None
71
71
 
72
72
  #: Default dimension order for DataArray or Dataset (x, y, z, t)
73
- dim_order: tuple[Hashable, Hashable, Hashable, Hashable] = (
73
+ dim_order = (
74
74
  "longitude",
75
75
  "latitude",
76
76
  "level",
@@ -97,17 +97,18 @@ class MetBase(ABC, Generic[XArrayType]):
97
97
  ValueError
98
98
  If data does not contain all four coordinates (longitude, latitude, level, time).
99
99
  """
100
- for dim in self.dim_order:
101
- if dim not in self.data.dims:
102
- if dim == "level":
103
- msg = (
104
- f"Meteorology data must contain dimension '{dim}'. "
105
- "For single level data, set 'level' coordinate to constant -1 "
106
- "using `ds = ds.expand_dims({'level': [-1]})`"
107
- )
108
- else:
109
- msg = f"Meteorology data must contain dimension '{dim}'."
110
- raise ValueError(msg)
100
+ missing = set(self.dim_order).difference(self.data.dims)
101
+ if not missing:
102
+ return
103
+
104
+ dim = sorted(missing)
105
+ msg = f"Meteorology data must contain dimension(s): {dim}."
106
+ if "level" in dim:
107
+ msg += (
108
+ " For single level data, set 'level' coordinate to constant -1 "
109
+ "using `ds = ds.expand_dims({'level': [-1]})`"
110
+ )
111
+ raise ValueError(msg)
111
112
 
112
113
  def _validate_longitude(self) -> None:
113
114
  """Check longitude bounds.
@@ -123,8 +124,8 @@ class MetBase(ABC, Generic[XArrayType]):
123
124
  if longitude.dtype != COORD_DTYPE:
124
125
  raise ValueError(
125
126
  "Longitude values must be of type float64. "
126
- "Initiate with 'copy=True' to convert to float64. "
127
- "Initiate with 'validate=False' to skip validation."
127
+ "Instantiate with 'copy=True' to convert to float64. "
128
+ "Instantiate with 'validate=False' to skip validation."
128
129
  )
129
130
 
130
131
  if self.is_wrapped:
@@ -167,8 +168,8 @@ class MetBase(ABC, Generic[XArrayType]):
167
168
  if latitude.dtype != COORD_DTYPE:
168
169
  raise ValueError(
169
170
  "Latitude values must be of type float64. "
170
- "Initiate with 'copy=True' to convert to float64. "
171
- "Initiate with 'validate=False' to skip validation."
171
+ "Instantiate with 'copy=True' to convert to float64. "
172
+ "Instantiate with 'validate=False' to skip validation."
172
173
  )
173
174
 
174
175
  if latitude[0] < -90.0:
@@ -192,10 +193,10 @@ class MetBase(ABC, Generic[XArrayType]):
192
193
  """
193
194
  indexes = self.indexes
194
195
  if not np.all(np.diff(indexes["time"]) > np.timedelta64(0, "ns")):
195
- raise ValueError("Coordinate `time` not sorted. Initiate with `copy=True`.")
196
+ raise ValueError("Coordinate 'time' not sorted. Instantiate with 'copy=True'.")
196
197
  for coord in self.dim_order[:3]: # exclude time, the 4th dimension
197
198
  if not np.all(np.diff(indexes[coord]) > 0.0):
198
- raise ValueError(f"Coordinate '{coord}' not sorted. Initiate with 'copy=True'.")
199
+ raise ValueError(f"Coordinate '{coord}' not sorted. Instantiate with 'copy=True'.")
199
200
 
200
201
  def _validate_transpose(self) -> None:
201
202
  """Check that data is transposed according to :attr:`dim_order`."""
@@ -204,11 +205,11 @@ class MetBase(ABC, Generic[XArrayType]):
204
205
  if da.dims != self.dim_order:
205
206
  if key is not None:
206
207
  msg = (
207
- f"Data dimension not transposed on variable '{key}'. Initiate with"
208
+ f"Data dimension not transposed on variable '{key}'. Instantiate with"
208
209
  " 'copy=True'."
209
210
  )
210
211
  else:
211
- msg = "Data dimension not transposed. Initiate with 'copy=True'."
212
+ msg = "Data dimension not transposed. Instantiate with 'copy=True'."
212
213
  raise ValueError(msg)
213
214
 
214
215
  data = self.data
@@ -228,6 +229,12 @@ class MetBase(ABC, Generic[XArrayType]):
228
229
  self._validate_longitude()
229
230
  self._validate_latitude()
230
231
  self._validate_transpose()
232
+ if self.data["level"].dtype != COORD_DTYPE:
233
+ raise ValueError(
234
+ "Level values must be of type float64. "
235
+ "Instantiate with 'copy=True' to convert to float64. "
236
+ "Instantiate with 'validate=False' to skip validation."
237
+ )
231
238
 
232
239
  def _preprocess_dims(self, wrap_longitude: bool) -> None:
233
240
  """Confirm DataArray or Dataset include required dimension in a consistent format.
@@ -363,16 +370,6 @@ class MetBase(ABC, Generic[XArrayType]):
363
370
  "time": variables["time"].to_numpy(),
364
371
  }
365
372
 
366
- @property
367
- def variables(self) -> dict[Hashable, pd.Index]:
368
- """See :attr:`indexes`."""
369
- warnings.warn(
370
- "The 'variables' property is deprecated and will be removed in a future release. "
371
- "Use 'indexes' instead.",
372
- DeprecationWarning,
373
- )
374
- return self.indexes
375
-
376
373
  @property
377
374
  def indexes(self) -> dict[Hashable, pd.Index]:
378
375
  """Low level access to underlying :attr:`data` indexes.
@@ -745,8 +742,8 @@ class MetDataset(MetBase):
745
742
  except KeyError as e:
746
743
  raise KeyError(
747
744
  f"Variable {key} not found. Available variables: {', '.join(self.data.data_vars)}. "
748
- "To get items (e.g. `time` or `level`) from underlying `xr.Dataset` object, "
749
- "use the `data` attribute."
745
+ "To get items (e.g. 'time' or 'level') from underlying xr.Dataset object, "
746
+ "use the 'data' attribute."
750
747
  ) from e
751
748
  return MetDataArray(da, copy=False, validate=False)
752
749
 
@@ -1057,14 +1054,13 @@ class MetDataset(MetBase):
1057
1054
  >>> era5 = ERA5(time=times, variables=variables, pressure_levels=levels)
1058
1055
  >>> met = era5.open_metdataset()
1059
1056
  >>> met.to_vector(transfer_attrs=False)
1060
- GeoVectorDataset [6 keys x 4152960 length, 1 attributes]
1057
+ GeoVectorDataset [6 keys x 4152960 length, 0 attributes]
1061
1058
  Keys: longitude, latitude, level, time, air_temperature, ..., specific_humidity
1062
1059
  Attributes:
1063
1060
  time [2022-03-01 00:00:00, 2022-03-01 01:00:00]
1064
1061
  longitude [-180.0, 179.75]
1065
1062
  latitude [-90.0, 90.0]
1066
1063
  altitude [10362.8, 11783.9]
1067
- crs EPSG:4326
1068
1064
 
1069
1065
  """
1070
1066
  coords_keys = self.data.dims
@@ -1374,20 +1370,9 @@ class MetDataArray(MetBase):
1374
1370
  copy: bool = True,
1375
1371
  validate: bool = True,
1376
1372
  name: Hashable | None = None,
1377
- **kwargs: Any,
1378
1373
  ) -> None:
1379
- # init cache
1380
1374
  self.cachestore = cachestore
1381
1375
 
1382
- # try to create DataArray out of input data and **kwargs
1383
- if not isinstance(data, xr.DataArray):
1384
- warnings.warn(
1385
- "Input 'data' must be an xarray DataArray. "
1386
- "Passing arbitrary kwargs will be removed in future versions.",
1387
- DeprecationWarning,
1388
- )
1389
- data = xr.DataArray(data, **kwargs)
1390
-
1391
1376
  if copy:
1392
1377
  self.data = data.copy()
1393
1378
  self._preprocess_dims(wrap_longitude)
@@ -1044,7 +1044,6 @@ class VectorDataset:
1044
1044
  >>> pprint.pprint(fl.to_dict())
1045
1045
  {'aircraft_type': 'B737',
1046
1046
  'altitude_ft': [38661.0, 38661.0, 38661.0, 38661.0, 38661.0, 38661.0, 38661.0],
1047
- 'crs': 'EPSG:4326',
1048
1047
  'latitude': [40.0, 41.724, 43.428, 45.111, 46.769, 48.399, 50.0],
1049
1048
  'longitude': [-100.0,
1050
1049
  -101.441,
@@ -1073,7 +1072,6 @@ class VectorDataset:
1073
1072
 
1074
1073
  # Convert numpy objects to python objects
1075
1074
  if isinstance(obj, np.ndarray | np.generic):
1076
-
1077
1075
  # round time to unix seconds
1078
1076
  if key == "time":
1079
1077
  return np_encoder.default(obj.astype("datetime64[s]").astype(int))
@@ -1216,9 +1214,6 @@ class GeoVectorDataset(VectorDataset):
1216
1214
  Each spatial variable is expected to have "float32" or "float64" ``dtype``.
1217
1215
  The time variable is expected to have "datetime64[ns]" ``dtype``.
1218
1216
 
1219
- Use the attribute :attr:`attr["crs"]` to specify coordinate reference system
1220
- using `PROJ <https://proj.org/>`_ or `EPSG <https://epsg.org/home.html>`_ syntax.
1221
-
1222
1217
  Parameters
1223
1218
  ----------
1224
1219
  data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
@@ -1365,16 +1360,12 @@ class GeoVectorDataset(VectorDataset):
1365
1360
  if arr.dtype not in float_dtype:
1366
1361
  self.update({coord: arr.astype(np.float64)})
1367
1362
 
1368
- # set CRS to "EPSG:4326" by default
1369
- crs = self.attrs.setdefault("crs", "EPSG:4326")
1370
-
1371
- if crs == "EPSG:4326":
1372
- longitude = self["longitude"]
1373
- if np.any(longitude > 180.0) or np.any(longitude < -180.0):
1374
- raise ValueError("EPSG:4326 longitude coordinates should lie between [-180, 180).")
1375
- latitude = self["latitude"]
1376
- if np.any(latitude > 90.0) or np.any(latitude < -90.0):
1377
- raise ValueError("EPSG:4326 latitude coordinates should lie between [-90, 90].")
1363
+ longitude = self["longitude"]
1364
+ if np.any(longitude > 180.0) or np.any(longitude < -180.0):
1365
+ raise ValueError("EPSG:4326 longitude coordinates should lie between [-180, 180).")
1366
+ latitude = self["latitude"]
1367
+ if np.any(latitude > 90.0) or np.any(latitude < -90.0):
1368
+ raise ValueError("EPSG:4326 latitude coordinates should lie between [-90, 90].")
1378
1369
 
1379
1370
  @overrides
1380
1371
  def _display_attrs(self) -> dict[str, str]:
@@ -1531,24 +1522,21 @@ class GeoVectorDataset(VectorDataset):
1531
1522
  # Utilities
1532
1523
  # ------------
1533
1524
 
1534
- def transform_crs(
1535
- self: GeoVectorDatasetType, crs: str, copy: bool = True
1536
- ) -> GeoVectorDatasetType:
1525
+ def transform_crs(self, crs: str) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]:
1537
1526
  """Transform trajectory data from one coordinate reference system (CRS) to another.
1538
1527
 
1539
1528
  Parameters
1540
1529
  ----------
1541
1530
  crs : str
1542
1531
  Target CRS. Passed into to :class:`pyproj.Transformer`. The source CRS
1543
- is inferred from the :attr:`attrs["crs"]` attribute.
1532
+ is assumed to be EPSG:4326.
1544
1533
  copy : bool, optional
1545
1534
  Copy data on transformation. Defaults to True.
1546
1535
 
1547
1536
  Returns
1548
1537
  -------
1549
- GeoVectorDatasetType
1550
- Converted dataset with new coordinate reference system.
1551
- :attr:`attrs["crs"]` reflects new crs.
1538
+ tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]
1539
+ New x and y coordinates in the target CRS.
1552
1540
  """
1553
1541
  try:
1554
1542
  import pyproj
@@ -1560,14 +1548,9 @@ class GeoVectorDataset(VectorDataset):
1560
1548
  pycontrails_optional_package="pyproj",
1561
1549
  )
1562
1550
 
1563
- transformer = pyproj.Transformer.from_crs(self.attrs["crs"], crs, always_xy=True)
1564
- lon, lat = transformer.transform(self["longitude"], self["latitude"])
1565
-
1566
- ret = self.copy() if copy else self
1567
-
1568
- ret.update(longitude=lon, latitude=lat)
1569
- ret.attrs.update(crs=crs)
1570
- return ret
1551
+ crs_from = "EPSG:4326"
1552
+ transformer = pyproj.Transformer.from_crs(crs_from, crs, always_xy=True)
1553
+ return transformer.transform(self["longitude"], self["latitude"])
1571
1554
 
1572
1555
  def T_isa(self) -> npt.NDArray[np.float64]:
1573
1556
  """Calculate the ICAO standard atmosphere temperature at each point.
@@ -1962,21 +1945,6 @@ class GeoVectorDataset(VectorDataset):
1962
1945
  """
1963
1946
  return json_utils.dataframe_to_geojson_points(self.dataframe)
1964
1947
 
1965
- def to_pseudo_mercator(self: GeoVectorDatasetType, copy: bool = True) -> GeoVectorDatasetType:
1966
- """Convert data from :attr:`attrs["crs"]` to Pseudo Mercator (EPSG:3857).
1967
-
1968
- Parameters
1969
- ----------
1970
- copy : bool, optional
1971
- Copy data on transformation.
1972
- Defaults to True.
1973
-
1974
- Returns
1975
- -------
1976
- GeoVectorDatasetType
1977
- """
1978
- return self.transform_crs("EPSG:3857", copy=copy)
1979
-
1980
1948
  # ------------
1981
1949
  # Vector to grid
1982
1950
  # ------------
@@ -641,7 +641,7 @@ class MetDataSource(abc.ABC):
641
641
 
642
642
  - chunks: {"time": 1}
643
643
  - engine: "netcdf4"
644
- - parallel: True
644
+ - parallel: False
645
645
 
646
646
  Returns
647
647
  -------