pycontrails 0.54.4__cp310-cp310-macosx_11_0_arm64.whl → 0.54.6__cp310-cp310-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (38) hide show
  1. pycontrails/_version.py +2 -2
  2. pycontrails/core/aircraft_performance.py +34 -16
  3. pycontrails/core/airports.py +3 -4
  4. pycontrails/core/fleet.py +30 -9
  5. pycontrails/core/flight.py +8 -5
  6. pycontrails/core/flightplan.py +11 -11
  7. pycontrails/core/interpolation.py +7 -4
  8. pycontrails/core/met.py +145 -86
  9. pycontrails/core/met_var.py +62 -0
  10. pycontrails/core/models.py +3 -2
  11. pycontrails/core/rgi_cython.cpython-310-darwin.so +0 -0
  12. pycontrails/core/vector.py +97 -74
  13. pycontrails/datalib/_met_utils/metsource.py +1 -1
  14. pycontrails/datalib/ecmwf/era5.py +5 -6
  15. pycontrails/datalib/ecmwf/era5_model_level.py +4 -5
  16. pycontrails/datalib/ecmwf/ifs.py +1 -3
  17. pycontrails/datalib/gfs/gfs.py +1 -3
  18. pycontrails/models/apcemm/apcemm.py +2 -2
  19. pycontrails/models/apcemm/utils.py +1 -1
  20. pycontrails/models/cocip/cocip.py +86 -27
  21. pycontrails/models/cocip/output_formats.py +1 -0
  22. pycontrails/models/cocipgrid/cocip_grid.py +8 -73
  23. pycontrails/models/dry_advection.py +99 -31
  24. pycontrails/models/emissions/emissions.py +2 -2
  25. pycontrails/models/humidity_scaling/humidity_scaling.py +1 -1
  26. pycontrails/models/issr.py +2 -2
  27. pycontrails/models/pcc.py +1 -2
  28. pycontrails/models/ps_model/ps_grid.py +2 -2
  29. pycontrails/models/ps_model/ps_model.py +4 -32
  30. pycontrails/models/ps_model/ps_operational_limits.py +2 -6
  31. pycontrails/models/tau_cirrus.py +13 -6
  32. pycontrails/physics/geo.py +3 -3
  33. {pycontrails-0.54.4.dist-info → pycontrails-0.54.6.dist-info}/METADATA +3 -4
  34. {pycontrails-0.54.4.dist-info → pycontrails-0.54.6.dist-info}/RECORD +38 -38
  35. {pycontrails-0.54.4.dist-info → pycontrails-0.54.6.dist-info}/WHEEL +1 -1
  36. {pycontrails-0.54.4.dist-info → pycontrails-0.54.6.dist-info}/LICENSE +0 -0
  37. {pycontrails-0.54.4.dist-info → pycontrails-0.54.6.dist-info}/NOTICE +0 -0
  38. {pycontrails-0.54.4.dist-info → pycontrails-0.54.6.dist-info}/top_level.txt +0 -0
@@ -37,6 +37,8 @@ logger = logging.getLogger(__name__)
37
37
  class AttrDict(dict[str, Any]):
38
38
  """Thin wrapper around dict to warn when setting a key that already exists."""
39
39
 
40
+ __slots__ = ()
41
+
40
42
  def __setitem__(self, k: str, v: Any) -> None:
41
43
  """Warn when setting values that already contain values.
42
44
 
@@ -85,7 +87,7 @@ class VectorDataDict(dict[str, np.ndarray]):
85
87
  Parameters
86
88
  ----------
87
89
  data : dict[str, np.ndarray], optional
88
- Dictionary input
90
+ Dictionary input. A shallow copy is always made.
89
91
  """
90
92
 
91
93
  __slots__ = ("_size",)
@@ -130,8 +132,8 @@ class VectorDataDict(dict[str, np.ndarray]):
130
132
  def __delitem__(self, k: str) -> None:
131
133
  super().__delitem__(k)
132
134
 
133
- # if not data keys left, set size to 0
134
- if not len(self):
135
+ # if no keys remain, delete _size attribute
136
+ if not self:
135
137
  del self._size
136
138
 
137
139
  def setdefault(self, k: str, default: npt.ArrayLike | None = None) -> np.ndarray:
@@ -191,9 +193,9 @@ class VectorDataDict(dict[str, np.ndarray]):
191
193
  super().update(kwargs_arr)
192
194
 
193
195
  def _validate_array(self, arr: np.ndarray) -> None:
194
- """Ensure that `arr` is compatible with instance.
196
+ """Ensure that ``arr`` is compatible (1 dimensional of equal size) with instance.
195
197
 
196
- Set attribute `_size` if it has not yet been defined.
198
+ Set attribute ``_size`` if it has not yet been defined.
197
199
 
198
200
  Parameters
199
201
  ----------
@@ -203,34 +205,34 @@ class VectorDataDict(dict[str, np.ndarray]):
203
205
  Raises
204
206
  ------
205
207
  ValueError
206
- If `arr` is not compatible with instance.
208
+ If ``arr`` is not compatible with instance.
207
209
  """
208
210
  if arr.ndim != 1:
209
211
  raise ValueError("All np.arrays must have dimension 1.")
210
212
 
211
213
  size = getattr(self, "_size", 0)
212
- if size != 0:
213
- if arr.size != size:
214
- raise ValueError(f"Incompatible array sizes: {arr.size} and {size}.")
215
- else:
214
+ if not size:
216
215
  self._size = arr.size
216
+ return
217
+
218
+ if arr.size != size:
219
+ raise ValueError(f"Incompatible array sizes: {arr.size} and {size}.")
217
220
 
218
221
 
219
- def _empty_vector_dict(keys: Iterable[str]) -> VectorDataDict:
220
- """Create instance of VectorDataDict with variables defined by `keys` and size 0.
222
+ def _empty_vector_dict(keys: Iterable[str]) -> dict[str, np.ndarray]:
223
+ """Create a dictionary with keys defined by ``keys`` and empty arrays.
221
224
 
222
225
  Parameters
223
226
  ----------
224
227
  keys : Iterable[str]
225
- Keys to include in empty VectorDataset instance.
228
+ Keys to include in dictionary.
226
229
 
227
230
  Returns
228
231
  -------
229
- VectorDataDict
230
- Empty :class:`VectorDataDict` instance.
232
+ dict[str, np.ndarray]
233
+ Dictionary with empty arrays.
231
234
  """
232
- keys = keys or ()
233
- data = VectorDataDict({key: np.array([]) for key in keys})
235
+ data = {key: np.array([]) for key in keys}
234
236
 
235
237
  # The default dtype is float64
236
238
  # Time is special and should have a non-default dtype of datetime64[ns]
@@ -245,14 +247,15 @@ class VectorDataset:
245
247
 
246
248
  Parameters
247
249
  ----------
248
- data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
249
- Initial data, by default None
250
- attrs : dict[str, Any] | AttrDict, optional
251
- Dictionary of attributes, by default None
250
+ data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None, optional
251
+ Initial data, by default None. A shallow copy is always made. Use the ``copy``
252
+ parameter to copy the underlying array data.
253
+ attrs : dict[str, Any] | None, optional
254
+ Dictionary of attributes, by default None. A shallow copy is always made.
252
255
  copy : bool, optional
253
- Copy data on class creation, by default True
256
+ Copy individual arrays on instantiation, by default True.
254
257
  **attrs_kwargs : Any
255
- Additional attributes passed as keyword arguments
258
+ Additional attributes passed as keyword arguments.
256
259
 
257
260
  Raises
258
261
  ------
@@ -262,24 +265,22 @@ class VectorDataset:
262
265
 
263
266
  __slots__ = ("attrs", "data")
264
267
 
265
- #: Vector data with labels as keys and :class:`numpy.ndarray` as values
266
- data: VectorDataDict
267
-
268
268
  #: Generic dataset attributes
269
269
  attrs: AttrDict
270
270
 
271
+ #: Vector data with labels as keys and :class:`numpy.ndarray` as values
272
+ data: VectorDataDict
273
+
271
274
  def __init__(
272
275
  self,
273
- data: (
274
- dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None
275
- ) = None,
276
+ data: dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None = None,
276
277
  *,
277
- attrs: dict[str, Any] | AttrDict | None = None,
278
+ attrs: dict[str, Any] | None = None,
278
279
  copy: bool = True,
279
280
  **attrs_kwargs: Any,
280
281
  ) -> None:
281
- # Set data
282
- # --------
282
+ # Set data: always shallow copy
283
+ # -----------------------------
283
284
 
284
285
  # Casting from one VectorDataset type to another
285
286
  # e.g., flight = Flight(...); vector = VectorDataset(flight)
@@ -288,7 +289,7 @@ class VectorDataset:
288
289
  if copy:
289
290
  self.data = VectorDataDict({k: v.copy() for k, v in data.data.items()})
290
291
  else:
291
- self.data = data.data
292
+ self.data = VectorDataDict(data.data)
292
293
 
293
294
  elif data is None:
294
295
  self.data = VectorDataDict()
@@ -307,31 +308,45 @@ class VectorDataset:
307
308
  data["time"] = time.to_numpy(copy=copy)
308
309
  self.data = VectorDataDict(data)
309
310
 
310
- elif isinstance(data, VectorDataDict):
311
- if copy:
312
- self.data = VectorDataDict({k: v.copy() for k, v in data.items()})
313
- else:
314
- self.data = data
315
-
316
311
  # For anything else, we assume it is a dictionary of array-like and attach it
317
312
  else:
318
313
  self.data = VectorDataDict({k: np.array(v, copy=copy) for k, v in data.items()})
319
314
 
320
- # Set attributes
321
- # --------------
315
+ # Set attributes: always shallow copy
316
+ # -----------------------------------
317
+
318
+ self.attrs = AttrDict(attrs or {}) # type: ignore[arg-type]
319
+ self.attrs.update(attrs_kwargs)
320
+
321
+ @classmethod
322
+ def _from_fastpath(
323
+ cls,
324
+ data: dict[str, np.ndarray],
325
+ attrs: dict[str, Any] | None = None,
326
+ **kwargs: Any,
327
+ ) -> Self:
328
+ """Create new instance from consistent data.
322
329
 
323
- if attrs is None:
324
- self.attrs = AttrDict()
330
+ This is a low-level method that bypasses the standard constructor in certain
331
+ special cases. It is intended for internal use only.
325
332
 
326
- elif isinstance(attrs, AttrDict) and not copy:
327
- self.attrs = attrs
333
+ In essence, this method skips any validation from __init__ and directly sets
334
+ ``data`` and ``attrs``. This is useful when creating a new instance from an existing
335
+ instance the data has already been validated.
336
+ """
337
+ obj = cls.__new__(cls)
328
338
 
329
- # shallow copy if dict
330
- else:
331
- self.attrs = AttrDict(attrs.copy())
339
+ obj.data = VectorDataDict(data)
340
+ obj.attrs = AttrDict(attrs or {})
332
341
 
333
- # update with kwargs
334
- self.attrs.update(attrs_kwargs)
342
+ for key, value in kwargs.items():
343
+ try:
344
+ setattr(obj, key, value)
345
+ # If key not present in __slots__ of class (or parents), it's intended for attrs
346
+ except AttributeError:
347
+ obj.attrs[key] = value
348
+
349
+ return obj
335
350
 
336
351
  # ------------
337
352
  # dict-like methods
@@ -663,6 +678,13 @@ class VectorDataset:
663
678
  8 15 18
664
679
 
665
680
  """
681
+ if cls not in (VectorDataset, GeoVectorDataset):
682
+ msg = (
683
+ "Method 'sum' is only available on 'VectorDataset' and 'GeoVectorDataset'. "
684
+ "To sum 'Flight' instances, use 'Fleet.from_seq'."
685
+ )
686
+ raise TypeError(msg)
687
+
666
688
  vectors = [v for v in vectors if v is not None] # remove None values
667
689
 
668
690
  if not vectors:
@@ -693,10 +715,9 @@ class VectorDataset:
693
715
  return np.concatenate(values)
694
716
 
695
717
  data = {key: concat(key) for key in keys}
718
+ attrs = vectors[0].attrs if infer_attrs else None
696
719
 
697
- if infer_attrs:
698
- return cls(data, attrs=vectors[0].attrs, copy=False)
699
- return cls(data, copy=False)
720
+ return cls._from_fastpath(data, attrs)
700
721
 
701
722
  def __eq__(self, other: object) -> bool:
702
723
  """Determine if two instances are equal.
@@ -803,7 +824,8 @@ class VectorDataset:
803
824
  Self
804
825
  Copy of class
805
826
  """
806
- return type(self)(data=self.data, attrs=self.attrs, copy=True, **kwargs)
827
+ data = {key: value.copy() for key, value in self.data.items()}
828
+ return type(self)._from_fastpath(data, self.attrs, **kwargs)
807
829
 
808
830
  def select(self: VectorDataset, keys: Iterable[str], copy: bool = True) -> VectorDataset:
809
831
  """Return new class instance only containing specified keys.
@@ -823,8 +845,8 @@ class VectorDataset:
823
845
  Note that this method always returns a :class:`VectorDataset`, even if
824
846
  the calling class is a proper subclass of :class:`VectorDataset`.
825
847
  """
826
- data = {key: self[key] for key in keys}
827
- return VectorDataset(data=data, attrs=self.attrs, copy=copy)
848
+ data = {key: np.array(self[key], copy=copy) for key in keys}
849
+ return VectorDataset._from_fastpath(data, self.attrs)
828
850
 
829
851
  def filter(self, mask: npt.NDArray[np.bool_], copy: bool = True, **kwargs: Any) -> Self:
830
852
  """Filter :attr:`data` according to a boolean array ``mask``.
@@ -856,8 +878,8 @@ class VectorDataset:
856
878
  if mask.dtype != bool:
857
879
  raise TypeError("Parameter `mask` must be a boolean array.")
858
880
 
859
- data = {key: value[mask] for key, value in self.data.items()}
860
- return type(self)(data=data, attrs=self.attrs, copy=copy, **kwargs)
881
+ data = {key: np.array(value[mask], copy=copy) for key, value in self.data.items()}
882
+ return type(self)._from_fastpath(data, self.attrs, **kwargs)
861
883
 
862
884
  def sort(self, by: str | list[str]) -> Self:
863
885
  """Sort data by key(s).
@@ -1116,7 +1138,7 @@ class VectorDataset:
1116
1138
  cls,
1117
1139
  keys: Iterable[str],
1118
1140
  attrs: dict[str, Any] | None = None,
1119
- **attrs_kwargs: Any,
1141
+ **kwargs: Any,
1120
1142
  ) -> Self:
1121
1143
  """Create instance with variables defined by ``keys`` and size 0.
1122
1144
 
@@ -1129,15 +1151,16 @@ class VectorDataset:
1129
1151
  Keys to include in empty VectorDataset instance.
1130
1152
  attrs : dict[str, Any] | None, optional
1131
1153
  Attributes to attach instance.
1132
- **attrs_kwargs : Any
1133
- Define attributes as keyword arguments.
1154
+ **kwargs : Any
1155
+ Additional keyword arguments passed into the constructor of the returned class.
1134
1156
 
1135
1157
  Returns
1136
1158
  -------
1137
1159
  Self
1138
1160
  Empty VectorDataset instance.
1139
1161
  """
1140
- return cls(data=_empty_vector_dict(keys or set()), attrs=attrs, copy=False, **attrs_kwargs)
1162
+ data = _empty_vector_dict(keys)
1163
+ return cls._from_fastpath(data, attrs, **kwargs)
1141
1164
 
1142
1165
  @classmethod
1143
1166
  def from_dict(cls, obj: dict[str, Any], copy: bool = True, **obj_kwargs: Any) -> Self:
@@ -1216,7 +1239,7 @@ class GeoVectorDataset(VectorDataset):
1216
1239
 
1217
1240
  Parameters
1218
1241
  ----------
1219
- data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None, optional
1242
+ data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None, optional
1220
1243
  Data dictionary or :class:`pandas.DataFrame` .
1221
1244
  Must include keys/columns ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``.
1222
1245
  Keyword arguments for ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``
@@ -1269,9 +1292,7 @@ class GeoVectorDataset(VectorDataset):
1269
1292
 
1270
1293
  def __init__(
1271
1294
  self,
1272
- data: (
1273
- dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None
1274
- ) = None,
1295
+ data: dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None = None,
1275
1296
  *,
1276
1297
  longitude: npt.ArrayLike | None = None,
1277
1298
  latitude: npt.ArrayLike | None = None,
@@ -1279,7 +1300,7 @@ class GeoVectorDataset(VectorDataset):
1279
1300
  altitude_ft: npt.ArrayLike | None = None,
1280
1301
  level: npt.ArrayLike | None = None,
1281
1302
  time: npt.ArrayLike | None = None,
1282
- attrs: dict[str, Any] | AttrDict | None = None,
1303
+ attrs: dict[str, Any] | None = None,
1283
1304
  copy: bool = True,
1284
1305
  **attrs_kwargs: Any,
1285
1306
  ) -> None:
@@ -1293,7 +1314,10 @@ class GeoVectorDataset(VectorDataset):
1293
1314
  and time is None
1294
1315
  ):
1295
1316
  keys = *self.required_keys, "altitude"
1296
- data = _empty_vector_dict(keys)
1317
+ self.data = VectorDataDict(_empty_vector_dict(keys))
1318
+ self.attrs = AttrDict(attrs or {}) # type: ignore[arg-type]
1319
+ self.attrs.update(attrs_kwargs)
1320
+ return
1297
1321
 
1298
1322
  super().__init__(data=data, attrs=attrs, copy=copy, **attrs_kwargs)
1299
1323
 
@@ -1819,7 +1843,6 @@ class GeoVectorDataset(VectorDataset):
1819
1843
  latitude_buffer: tuple[float, float] = ...,
1820
1844
  level_buffer: tuple[float, float] = ...,
1821
1845
  time_buffer: tuple[np.timedelta64, np.timedelta64] = ...,
1822
- copy: bool = ...,
1823
1846
  ) -> met_module.MetDataset: ...
1824
1847
 
1825
1848
  @overload
@@ -1831,7 +1854,6 @@ class GeoVectorDataset(VectorDataset):
1831
1854
  latitude_buffer: tuple[float, float] = ...,
1832
1855
  level_buffer: tuple[float, float] = ...,
1833
1856
  time_buffer: tuple[np.timedelta64, np.timedelta64] = ...,
1834
- copy: bool = ...,
1835
1857
  ) -> met_module.MetDataArray: ...
1836
1858
 
1837
1859
  def downselect_met(
@@ -1845,10 +1867,13 @@ class GeoVectorDataset(VectorDataset):
1845
1867
  np.timedelta64(0, "h"),
1846
1868
  np.timedelta64(0, "h"),
1847
1869
  ),
1848
- copy: bool = True,
1849
1870
  ) -> met_module.MetDataType:
1850
1871
  """Downselect ``met`` to encompass a spatiotemporal region of the data.
1851
1872
 
1873
+ .. versionchanged:: 0.54.5
1874
+
1875
+ Returned object is no longer copied.
1876
+
1852
1877
  Parameters
1853
1878
  ----------
1854
1879
  met : MetDataset | MetDataArray
@@ -1873,8 +1898,6 @@ class GeoVectorDataset(VectorDataset):
1873
1898
  and ``time_buffer[1]`` on the high side.
1874
1899
  Units must be the same as class coordinates.
1875
1900
  Defaults to ``(np.timedelta64(0, "h"), np.timedelta64(0, "h"))``.
1876
- copy : bool
1877
- If returned object is a copy or view of the original. True by default.
1878
1901
 
1879
1902
  Returns
1880
1903
  -------
@@ -1915,7 +1938,7 @@ class GeoVectorDataset(VectorDataset):
1915
1938
  level=level_slice,
1916
1939
  time=time_slice,
1917
1940
  )
1918
- return type(met)(data, copy=copy)
1941
+ return type(met)._from_fastpath(data)
1919
1942
 
1920
1943
  # ------------
1921
1944
  # I / O
@@ -265,7 +265,7 @@ def _find_match(
265
265
 
266
266
  # list of MetVariable options
267
267
  # here we extract the first MetVariable in var that is supported
268
- elif isinstance(var, list | tuple):
268
+ if isinstance(var, list | tuple):
269
269
  for v in var:
270
270
  # sanity check since we don't support other types as lists
271
271
  if not isinstance(v, MetVariable):
@@ -88,9 +88,8 @@ class ERA5(ECMWFAPI):
88
88
  If None, cache is turned off.
89
89
  url : str | None
90
90
  Override the default `cdsapi <https://github.com/ecmwf/cdsapi>`_ url.
91
- As of August 2024, the url for the `CDS-Beta <https://cds-beta.climate.copernicus.eu>`_
92
- is "https://cds-beta.climate.copernicus.eu/api", and the url for the legacy server is
93
- "https://cds.climate.copernicus.eu/api/v2". If None, the url is set
91
+ As of January 2025, the url for the `CDS Server <https://cds.climate.copernicus.eu>`_
92
+ is "https://cds.climate.copernicus.eu/api". If None, the url is set
94
93
  by the ``CDSAPI_URL`` environment variable. If this is not defined, the
95
94
  ``cdsapi`` package will determine the url.
96
95
  key : str | None
@@ -539,12 +538,12 @@ class ERA5(ECMWFAPI):
539
538
  LOG.debug("Input dataset processed with pycontrails > 0.29")
540
539
  return ds
541
540
 
542
- # For "reanalysis-era5-single-levels"
543
- # then the netcdf file does not contain the dimension "level"
541
+ # For "reanalysis-era5-single-levels",
542
+ # the netcdf file does not contain the dimension "level"
544
543
  if self.is_single_level:
545
544
  ds = ds.expand_dims(level=self.pressure_levels)
546
545
 
547
- # New CDS-Beta gives "valid_time" instead of "time"
546
+ # New CDS (Aug 2024) gives "valid_time" instead of "time"
548
547
  # and "pressure_level" instead of "level"
549
548
  if "valid_time" in ds:
550
549
  ds = ds.rename(valid_time="time")
@@ -119,9 +119,8 @@ class ERA5ModelLevel(ECMWFAPI):
119
119
  By default, False.
120
120
  url : str | None
121
121
  Override the default `cdsapi <https://github.com/ecmwf/cdsapi>`_ url.
122
- As of August 2024, the url for the `CDS-Beta <https://cds-beta.climate.copernicus.eu>`_
123
- is "https://cds-beta.climate.copernicus.eu/api", and the url for the legacy server is
124
- "https://cds.climate.copernicus.eu/api/v2". If None, the url is set
122
+ As of January 2025, the url for the `CDS Server <https://cds.climate.copernicus.eu>`_
123
+ is "https://cds.climate.copernicus.eu/api". If None, the url is set
125
124
  by the ``CDSAPI_URL`` environment variable. If this is not defined, the
126
125
  ``cdsapi`` package will determine the url.
127
126
  key : str | None
@@ -465,13 +464,13 @@ class ERA5ModelLevel(ECMWFAPI):
465
464
  ds_ml = xr.open_dataset(ml_target)
466
465
  lnsp = xr.open_dataarray(lnsp_target)
467
466
 
468
- # New CDS-Beta gives "valid_time" instead of "time"
467
+ # New CDS (Aug 2024) gives "valid_time" instead of "time"
469
468
  if "valid_time" in ds_ml:
470
469
  ds_ml = ds_ml.rename(valid_time="time")
471
470
  if "valid_time" in lnsp.dims:
472
471
  lnsp = lnsp.rename(valid_time="time")
473
472
 
474
- # The legacy CDS gives "level" instead of "model_level"
473
+ # Legacy CDS (prior to Aug 2024) gives "level" instead of "model_level"
475
474
  if "level" in ds_ml.dims:
476
475
  ds_ml = ds_ml.rename(level="model_level")
477
476
 
@@ -247,9 +247,7 @@ class IFS(metsource.MetDataSource):
247
247
  ds_fl = ds_fl.drop_vars(names=["hyai", "hybi", "hyam", "hybm"])
248
248
 
249
249
  # merge all datasets using the "ds_fl" dimensions as the join keys
250
- ds = xr.merge([ds_fl, ds_full, ds_surface, ds_rad], join="left") # order matters!
251
-
252
- return ds
250
+ return xr.merge([ds_fl, ds_full, ds_surface, ds_rad], join="left") # order matters!
253
251
 
254
252
  def _calc_geopotential(self, ds: xr.Dataset) -> xr.DataArray:
255
253
  warnings.warn(
@@ -570,9 +570,7 @@ class GFSForecast(metsource.MetDataSource):
570
570
  ds = ds.expand_dims("time")
571
571
 
572
572
  # drop step/number
573
- ds = ds.drop_vars(["step", "nominalTop", "surface"], errors="ignore")
574
-
575
- return ds
573
+ return ds.drop_vars(["step", "nominalTop", "surface"], errors="ignore")
576
574
 
577
575
  def _process_dataset(self, ds: xr.Dataset, **kwargs: Any) -> met.MetDataset:
578
576
  """Process the :class:`xr.Dataset` opened from cache or local files.
@@ -474,7 +474,7 @@ class APCEMM(models.Model):
474
474
  for coord in ("longitude", "latitude", "level")
475
475
  }
476
476
  buffers["time_buffer"] = (0, self.params["max_age"] + self.params["dt_lagrangian"])
477
- met = self.source.downselect_met(self.met, **buffers, copy=False)
477
+ met = self.source.downselect_met(self.met, **buffers)
478
478
  model = DryAdvection(
479
479
  met=met,
480
480
  dt_integration=self.params["dt_lagrangian"],
@@ -816,7 +816,7 @@ class APCEMM(models.Model):
816
816
  # Ensure required met data is present.
817
817
  # No buffers needed for interpolation!
818
818
  vars = ap_model.met_variables + ap_model.optional_met_variables + emissions.met_variables
819
- met = self.source.downselect_met(self.met, copy=False)
819
+ met = self.source.downselect_met(self.met)
820
820
  met.ensure_vars(vars)
821
821
  met.standardize_variables(vars)
822
822
  for var in vars:
@@ -214,7 +214,7 @@ def generate_apcemm_input_met(
214
214
  )
215
215
 
216
216
  # Downselect met before interpolation
217
- met = vector.downselect_met(met, copy=False)
217
+ met = vector.downselect_met(met)
218
218
 
219
219
  # Interpolate meteorology data onto vector
220
220
  scale_humidity = humidity_scaling is not None and "specific_humidity" not in vector