pycontrails 0.54.3__cp313-cp313-macosx_11_0_arm64.whl → 0.54.5__cp313-cp313-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (62) hide show
  1. pycontrails/__init__.py +2 -2
  2. pycontrails/_version.py +2 -2
  3. pycontrails/core/__init__.py +1 -1
  4. pycontrails/core/aircraft_performance.py +58 -58
  5. pycontrails/core/cache.py +7 -7
  6. pycontrails/core/fleet.py +54 -29
  7. pycontrails/core/flight.py +218 -301
  8. pycontrails/core/interpolation.py +63 -60
  9. pycontrails/core/met.py +193 -125
  10. pycontrails/core/models.py +27 -13
  11. pycontrails/core/polygon.py +15 -15
  12. pycontrails/core/rgi_cython.cpython-313-darwin.so +0 -0
  13. pycontrails/core/vector.py +119 -96
  14. pycontrails/datalib/_met_utils/metsource.py +8 -5
  15. pycontrails/datalib/ecmwf/__init__.py +14 -14
  16. pycontrails/datalib/ecmwf/common.py +1 -1
  17. pycontrails/datalib/ecmwf/era5.py +7 -7
  18. pycontrails/datalib/ecmwf/hres.py +3 -3
  19. pycontrails/datalib/ecmwf/ifs.py +1 -1
  20. pycontrails/datalib/gfs/__init__.py +6 -6
  21. pycontrails/datalib/gfs/gfs.py +2 -2
  22. pycontrails/datalib/goes.py +5 -5
  23. pycontrails/ext/empirical_grid.py +1 -1
  24. pycontrails/models/apcemm/apcemm.py +5 -5
  25. pycontrails/models/apcemm/utils.py +1 -1
  26. pycontrails/models/cocip/__init__.py +2 -2
  27. pycontrails/models/cocip/cocip.py +23 -24
  28. pycontrails/models/cocip/cocip_params.py +2 -11
  29. pycontrails/models/cocip/cocip_uncertainty.py +24 -18
  30. pycontrails/models/cocip/contrail_properties.py +331 -316
  31. pycontrails/models/cocip/output_formats.py +53 -53
  32. pycontrails/models/cocip/radiative_forcing.py +135 -131
  33. pycontrails/models/cocip/radiative_heating.py +135 -135
  34. pycontrails/models/cocip/unterstrasser_wake_vortex.py +90 -87
  35. pycontrails/models/cocip/wake_vortex.py +92 -92
  36. pycontrails/models/cocip/wind_shear.py +8 -8
  37. pycontrails/models/cocipgrid/cocip_grid.py +37 -96
  38. pycontrails/models/dry_advection.py +60 -19
  39. pycontrails/models/emissions/__init__.py +2 -2
  40. pycontrails/models/emissions/black_carbon.py +108 -108
  41. pycontrails/models/emissions/emissions.py +87 -87
  42. pycontrails/models/emissions/ffm2.py +35 -35
  43. pycontrails/models/humidity_scaling/humidity_scaling.py +23 -23
  44. pycontrails/models/issr.py +2 -2
  45. pycontrails/models/ps_model/__init__.py +1 -1
  46. pycontrails/models/ps_model/ps_aircraft_params.py +8 -4
  47. pycontrails/models/ps_model/ps_grid.py +76 -66
  48. pycontrails/models/ps_model/ps_model.py +16 -16
  49. pycontrails/models/ps_model/ps_operational_limits.py +20 -18
  50. pycontrails/models/tau_cirrus.py +8 -1
  51. pycontrails/physics/geo.py +67 -67
  52. pycontrails/physics/jet.py +79 -79
  53. pycontrails/physics/units.py +14 -14
  54. pycontrails/utils/json.py +1 -2
  55. pycontrails/utils/types.py +12 -7
  56. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/METADATA +2 -2
  57. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/NOTICE +1 -1
  58. pycontrails-0.54.5.dist-info/RECORD +111 -0
  59. pycontrails-0.54.3.dist-info/RECORD +0 -111
  60. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/LICENSE +0 -0
  61. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/WHEEL +0 -0
  62. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/top_level.txt +0 -0
@@ -11,11 +11,10 @@ from typing import TYPE_CHECKING, Any, NoReturn, TypeVar, overload
11
11
  import numpy as np
12
12
  import numpy.typing as npt
13
13
  import pandas as pd
14
- import xarray as xr
15
14
 
16
15
  import pycontrails
17
16
  from pycontrails.core import models
18
- from pycontrails.core.met import MetDataset
17
+ from pycontrails.core.met import MetDataset, maybe_downselect_mds
19
18
  from pycontrails.core.vector import GeoVectorDataset, VectorDataset
20
19
  from pycontrails.models import humidity_scaling, sac
21
20
  from pycontrails.models.cocip import cocip, contrail_properties, wake_vortex, wind_shear
@@ -74,11 +73,11 @@ class CocipGrid(models.Model):
74
73
  """
75
74
 
76
75
  __slots__ = (
77
- "rad",
78
- "timesteps",
76
+ "_target_dtype",
79
77
  "contrail",
80
78
  "contrail_list",
81
- "_target_dtype",
79
+ "rad",
80
+ "timesteps",
82
81
  )
83
82
 
84
83
  name = "contrail_grid"
@@ -241,12 +240,12 @@ class CocipGrid(models.Model):
241
240
  existing_vectors: Iterator[GeoVectorDataset] = iter(())
242
241
 
243
242
  for time_idx, time_end in enumerate(self.timesteps):
244
- met, rad = self._maybe_downselect_met_rad(met, rad, time_end)
245
-
246
243
  evolved_this_step = []
247
244
  ef_summary_this_step = []
248
245
  downwash_vectors_this_step = []
249
246
  for vector in self._generate_new_vectors(time_idx):
247
+ t0 = vector["time"].min()
248
+ met, rad = self._maybe_downselect_met_rad(met, rad, t0, time_end)
250
249
  downwash, verbose_dict = _run_downwash(vector, met, rad, self.params)
251
250
 
252
251
  if downwash:
@@ -264,6 +263,8 @@ class CocipGrid(models.Model):
264
263
  pbar.update()
265
264
 
266
265
  for vector in itertools.chain(existing_vectors, downwash_vectors_this_step):
266
+ t0 = vector["time"].min()
267
+ met, rad = self._maybe_downselect_met_rad(met, rad, t0, time_end)
267
268
  contrail, ef = _evolve_vector(
268
269
  vector,
269
270
  met=met,
@@ -304,83 +305,25 @@ class CocipGrid(models.Model):
304
305
  self,
305
306
  met: MetDataset | None,
306
307
  rad: MetDataset | None,
307
- time_end: np.datetime64,
308
+ t0: np.datetime64,
309
+ t1: np.datetime64,
308
310
  ) -> tuple[MetDataset, MetDataset]:
309
- """Downselect ``self.met`` and ``self.rad`` if necessary to cover ``time_end``.
311
+ """Downselect ``self.met`` and ``self.rad`` if necessary to cover ``[t0, t1]``.
312
+
313
+ This implementation assumes ``t0 <= t1``, but does not enforce this.
310
314
 
311
315
  If the currently used ``met`` and ``rad`` slices do not include the time
312
- ``time_end``, new slices are selected from the larger ``self.met`` and
313
- ``self.rad`` data. The slicing only occurs in the time domain.
316
+ interval ``[t0, t1]``, new slices are selected from the larger ``self.met``
317
+ and ``self.rad`` data. The slicing only occurs in the time domain.
314
318
 
315
- The end of currently-used ``met`` and ``rad`` will be used as the start
316
- of newly-selected met slices when possible to avoid losing and re-loading
317
- already-loaded met data.
319
+ Existing slices from ``met`` and ``rad`` will be used when possible to avoid
320
+ losing and re-loading already-loaded met data.
318
321
 
319
- If ``self.params["downselect_met"]`` is True, :func:`_downselect_met` has
322
+ If ``self.params["downselect_met"]`` is True, the :func:`_downselect_met` has
320
323
  already performed a spatial downselection of the met data.
321
324
  """
322
-
323
- if met is None:
324
- # idx is the first index at which self.met.variables["time"].to_numpy() >= time_end
325
- idx = np.searchsorted(self.met.indexes["time"].to_numpy(), time_end).item()
326
- sl = slice(max(0, idx - 1), idx + 1)
327
- logger.debug("Select met slice %s", sl)
328
- met = MetDataset(self.met.data.isel(time=sl), copy=False)
329
-
330
- elif time_end > met.indexes["time"].to_numpy()[-1]:
331
- current_times = met.indexes["time"].to_numpy()
332
- all_times = self.met.indexes["time"].to_numpy()
333
- # idx is the first index at which all_times >= time_end
334
- idx = np.searchsorted(all_times, time_end).item()
335
- sl = slice(max(0, idx - 1), idx + 1)
336
-
337
- # case 1: cannot re-use end of current met as start of new met
338
- if current_times[-1] != all_times[sl.start]:
339
- logger.debug("Select met slice %s", sl)
340
- met = MetDataset(self.met.data.isel(time=sl), copy=False)
341
- # case 2: can re-use end of current met plus one step of new met
342
- elif sl.start < all_times.size - 1:
343
- sl = slice(sl.start + 1, sl.stop)
344
- logger.debug("Reuse end of met and select met slice %s", sl)
345
- met = MetDataset(
346
- xr.concat((met.data.isel(time=[-1]), self.met.data.isel(time=sl)), dim="time"),
347
- copy=False,
348
- )
349
- # case 3: can re-use end of current met and nothing else
350
- else:
351
- logger.debug("Reuse end of met")
352
- met = MetDataset(met.data.isel(time=[-1]), copy=False)
353
-
354
- if rad is None:
355
- # idx is the first index at which self.rad.variables["time"].to_numpy() >= time_end
356
- idx = np.searchsorted(self.rad.indexes["time"].to_numpy(), time_end).item()
357
- sl = slice(max(0, idx - 1), idx + 1)
358
- logger.debug("Select rad slice %s", sl)
359
- rad = MetDataset(self.rad.data.isel(time=sl), copy=False)
360
-
361
- elif time_end > rad.indexes["time"].to_numpy()[-1]:
362
- current_times = rad.indexes["time"].to_numpy()
363
- all_times = self.rad.indexes["time"].to_numpy()
364
- # idx is the first index at which all_times >= time_end
365
- idx = np.searchsorted(all_times, time_end).item()
366
- sl = slice(max(0, idx - 1), idx + 1)
367
-
368
- # case 1: cannot re-use end of current rad as start of new rad
369
- if current_times[-1] != all_times[sl.start]:
370
- logger.debug("Select rad slice %s", sl)
371
- rad = MetDataset(self.rad.data.isel(time=sl), copy=False)
372
- # case 2: can re-use end of current rad plus one step of new rad
373
- elif sl.start < all_times.size - 1:
374
- sl = slice(sl.start + 1, sl.stop)
375
- logger.debug("Reuse end of rad and select rad slice %s", sl)
376
- rad = MetDataset(
377
- xr.concat((rad.data.isel(time=[-1]), self.rad.data.isel(time=sl)), dim="time"),
378
- copy=False,
379
- )
380
- # case 3: can re-use end of current rad and nothing else
381
- else:
382
- logger.debug("Reuse end of rad")
383
- rad = MetDataset(rad.data.isel(time=[-1]), copy=False)
325
+ met = maybe_downselect_mds(self.met, met, t0, t1)
326
+ rad = maybe_downselect_mds(self.rad, rad, t0, t1)
384
327
 
385
328
  return met, rad
386
329
 
@@ -671,7 +614,7 @@ class CocipGrid(models.Model):
671
614
  for idx, time in enumerate(times_in_filt):
672
615
  # For now, sticking with the convention that every vector should
673
616
  # have a constant time value.
674
- source_slice = MetDataset(self.source.data.sel(time=[time]))
617
+ source_slice = MetDataset._from_fastpath(self.source.data.sel(time=[time]))
675
618
 
676
619
  # Convert the 4D grid to a vector
677
620
  vector = source_slice.to_vector()
@@ -806,10 +749,10 @@ class CocipGrid(models.Model):
806
749
 
807
750
  @staticmethod
808
751
  def create_source(
809
- level: npt.NDArray[np.float64] | list[float] | float,
752
+ level: npt.NDArray[np.floating] | list[float] | float,
810
753
  time: npt.NDArray[np.datetime64] | list[np.datetime64] | np.datetime64,
811
- longitude: npt.NDArray[np.float64] | list[float] | None = None,
812
- latitude: npt.NDArray[np.float64] | list[float] | None = None,
754
+ longitude: npt.NDArray[np.floating] | list[float] | None = None,
755
+ latitude: npt.NDArray[np.floating] | list[float] | None = None,
813
756
  lon_step: float = 1.0,
814
757
  lat_step: float = 1.0,
815
758
  ) -> MetDataset:
@@ -821,7 +764,7 @@ class CocipGrid(models.Model):
821
764
 
822
765
  Parameters
823
766
  ----------
824
- level : level: npt.NDArray[np.float64] | list[float] | float
767
+ level : level: npt.NDArray[np.floating] | list[float] | float
825
768
  Pressure levels for gridded cocip.
826
769
  To avoid interpolating outside of the passed ``met`` and ``rad`` data, this
827
770
  parameter should avoid the extreme values of the ``met`` and `rad` levels.
@@ -829,7 +772,7 @@ class CocipGrid(models.Model):
829
772
  ``met.data['level'].values[1: -1]``.
830
773
  time: npt.NDArray[np.datetime64 | list[np.datetime64] | np.datetime64,
831
774
  One or more time values for gridded cocip.
832
- longitude, latitude : npt.NDArray[np.float64] | list[float], optional
775
+ longitude, latitude : npt.NDArray[np.floating] | list[float], optional
833
776
  Longitude and latitude arrays, by default None. If not specified, values of
834
777
  ``lon_step`` and ``lat_step`` are used to define ``longitude`` and ``latitude``.
835
778
  lon_step, lat_step : float, optional
@@ -1458,7 +1401,7 @@ def simulate_wake_vortex_downwash(
1458
1401
 
1459
1402
  # Experimental segment-free model
1460
1403
  if _is_segment_free_mode(vector):
1461
- return GeoVectorDataset(data, attrs=vector.attrs, copy=True)
1404
+ return GeoVectorDataset._from_fastpath(data, attrs=vector.attrs).copy()
1462
1405
 
1463
1406
  # Stored in `_generate_new_grid_vectors`
1464
1407
  data["longitude_head"] = vector["longitude_head"]
@@ -1477,7 +1420,7 @@ def simulate_wake_vortex_downwash(
1477
1420
  # segment_length variable.
1478
1421
  data["segment_length"] = np.full_like(data["longitude"], segment_length)
1479
1422
 
1480
- return GeoVectorDataset(data, attrs=vector.attrs, copy=True)
1423
+ return GeoVectorDataset._from_fastpath(data, attrs=vector.attrs).copy()
1481
1424
 
1482
1425
 
1483
1426
  def find_initial_persistent_contrails(
@@ -2078,7 +2021,7 @@ def advect(
2078
2021
  assert _is_segment_free_mode(contrail)
2079
2022
  assert dt_tail is None
2080
2023
  assert dt_head is None
2081
- return GeoVectorDataset(data, attrs=contrail.attrs, copy=True)
2024
+ return GeoVectorDataset._from_fastpath(data, attrs=contrail.attrs).copy()
2082
2025
 
2083
2026
  longitude_head = contrail["longitude_head"]
2084
2027
  latitude_head = contrail["latitude_head"]
@@ -2120,7 +2063,7 @@ def advect(
2120
2063
  data["segment_length"] = segment_length_t2
2121
2064
  data["head_tail_dt"] = head_tail_dt_t2
2122
2065
 
2123
- return GeoVectorDataset(data, attrs=contrail.attrs, copy=True)
2066
+ return GeoVectorDataset._from_fastpath(data, attrs=contrail.attrs).copy()
2124
2067
 
2125
2068
 
2126
2069
  def _aggregate_ef_summary(vector_list: list[VectorDataset]) -> VectorDataset | None:
@@ -2173,7 +2116,7 @@ def _aggregate_ef_summary(vector_list: list[VectorDataset]) -> VectorDataset | N
2173
2116
 
2174
2117
  def result_to_metdataset(
2175
2118
  result: VectorDataset | None,
2176
- verbose_dict: dict[str, npt.NDArray[np.float64]],
2119
+ verbose_dict: dict[str, npt.NDArray[np.floating]],
2177
2120
  source: MetDataset,
2178
2121
  nominal_segment_length: float,
2179
2122
  attrs: dict[str, str],
@@ -2185,7 +2128,7 @@ def result_to_metdataset(
2185
2128
  result : VectorDataset | None
2186
2129
  Aggregated data arising from contrail evolution. Expected to contain keys:
2187
2130
  ``index``, ``age``, ``ef``.
2188
- verbose_dict : dict[str, npt.NDArray[np.float64]]:
2131
+ verbose_dict : dict[str, npt.NDArray[np.floating]]:
2189
2132
  Verbose outputs to attach to results.
2190
2133
  source : MetDataset
2191
2134
  :attr:`CocipGrid.`source` data on which to attach results.
@@ -2244,9 +2187,9 @@ def result_to_metdataset(
2244
2187
 
2245
2188
  def result_merge_source(
2246
2189
  result: VectorDataset | None,
2247
- verbose_dict: dict[str, npt.NDArray[np.float64]],
2190
+ verbose_dict: dict[str, npt.NDArray[np.floating]],
2248
2191
  source: GeoVectorDataset,
2249
- nominal_segment_length: float | npt.NDArray[np.float64],
2192
+ nominal_segment_length: float | npt.NDArray[np.floating],
2250
2193
  attrs: dict[str, str],
2251
2194
  ) -> GeoVectorDataset:
2252
2195
  """Merge ``results`` and ``verbose_dict`` onto ``source``."""
@@ -2282,7 +2225,7 @@ def _concat_verbose_dicts(
2282
2225
  verbose_dicts: list[dict[str, pd.Series]],
2283
2226
  source_size: int,
2284
2227
  verbose_outputs_formation: set[str],
2285
- ) -> dict[str, npt.NDArray[np.float64]]:
2228
+ ) -> dict[str, npt.NDArray[np.floating]]:
2286
2229
  # Concatenate the values and return
2287
2230
  ret: dict[str, np.ndarray] = {}
2288
2231
  for key in verbose_outputs_formation:
@@ -2375,7 +2318,7 @@ def _warn_not_wrap(met: MetDataset) -> None:
2375
2318
  )
2376
2319
 
2377
2320
 
2378
- def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np.float64]]:
2321
+ def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np.floating]]:
2379
2322
  """Return uncertainty parameters in ``contrail``.
2380
2323
 
2381
2324
  This function assumes the underlying humidity scaling model is
@@ -2398,7 +2341,7 @@ def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np
2398
2341
 
2399
2342
  Returns
2400
2343
  -------
2401
- dict[str, npt.NDArray[np.float64]]
2344
+ dict[str, npt.NDArray[np.floating]]
2402
2345
  Dictionary of uncertainty parameters.
2403
2346
  """
2404
2347
  keys = (
@@ -2494,7 +2437,6 @@ def _downselect_met(
2494
2437
  longitude_buffer=longitude_buffer,
2495
2438
  level_buffer=level_buffer,
2496
2439
  time_buffer=(t0, t1),
2497
- copy=False,
2498
2440
  )
2499
2441
 
2500
2442
  rad = source.downselect_met(
@@ -2502,7 +2444,6 @@ def _downselect_met(
2502
2444
  latitude_buffer=latitude_buffer,
2503
2445
  longitude_buffer=longitude_buffer,
2504
2446
  time_buffer=(t0, t1),
2505
- copy=False,
2506
2447
  )
2507
2448
 
2508
2449
  return met, rad
@@ -3,21 +3,34 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import dataclasses
6
+ import sys
6
7
  from typing import Any, NoReturn, overload
7
8
 
9
+ if sys.version_info >= (3, 12):
10
+ from typing import override
11
+ else:
12
+ from typing_extensions import override
13
+
8
14
  import numpy as np
9
15
  import numpy.typing as npt
16
+ import pandas as pd
10
17
 
11
18
  from pycontrails.core import models
12
- from pycontrails.core.met import MetDataset
13
- from pycontrails.core.met_var import AirTemperature, EastwardWind, NorthwardWind, VerticalVelocity
19
+ from pycontrails.core.met import MetDataset, maybe_downselect_mds
20
+ from pycontrails.core.met_var import (
21
+ AirTemperature,
22
+ EastwardWind,
23
+ MetVariable,
24
+ NorthwardWind,
25
+ VerticalVelocity,
26
+ )
14
27
  from pycontrails.core.vector import GeoVectorDataset
15
28
  from pycontrails.models.cocip import contrail_properties, wind_shear
16
29
  from pycontrails.physics import geo, thermo
17
30
 
18
31
 
19
32
  @dataclasses.dataclass
20
- class DryAdvectionParams(models.ModelParams):
33
+ class DryAdvectionParams(models.AdvectionBuffers):
21
34
  """Parameters for the :class:`DryAdvection` model."""
22
35
 
23
36
  #: Apply Euler's method with a fixed step size of ``dt_integration``. Advected waypoints
@@ -84,7 +97,12 @@ class DryAdvection(models.Model):
84
97
 
85
98
  name = "dry_advection"
86
99
  long_name = "Emission plume advection without sedimentation"
87
- met_variables = AirTemperature, EastwardWind, NorthwardWind, VerticalVelocity
100
+ met_variables: tuple[MetVariable, ...] = (
101
+ AirTemperature,
102
+ EastwardWind,
103
+ NorthwardWind,
104
+ VerticalVelocity,
105
+ )
88
106
  default_params = DryAdvectionParams
89
107
 
90
108
  met: MetDataset
@@ -122,6 +140,10 @@ class DryAdvection(models.Model):
122
140
  self.update_params(params)
123
141
  self.set_source(source)
124
142
  self.source = self.require_source_type(GeoVectorDataset)
143
+ self.downselect_met()
144
+ if not self.source.coords_intersect_met(self.met).any():
145
+ msg = "No source coordinates intersect met data."
146
+ raise ValueError(msg)
125
147
 
126
148
  self.source = self._prepare_source()
127
149
 
@@ -134,18 +156,24 @@ class DryAdvection(models.Model):
134
156
  max_depth = self.params["max_depth"]
135
157
 
136
158
  source_time = self.source["time"]
137
- t0 = source_time.min()
159
+ t0 = pd.Timestamp(source_time.min()).floor(pd.Timedelta(dt_integration)).to_numpy()
138
160
  t1 = source_time.max()
139
161
  timesteps = np.arange(t0 + dt_integration, t1 + dt_integration + max_age, dt_integration)
140
162
 
141
- vector = None
163
+ vector = GeoVectorDataset()
164
+ met = None
142
165
 
143
166
  evolved = []
144
167
  for t in timesteps:
145
168
  filt = (source_time < t) & (source_time >= t - dt_integration)
146
- vector = self.source.filter(filt, copy=False) + vector
169
+ vector = vector + self.source.filter(filt, copy=False)
170
+
171
+ t0 = vector["time"].min()
172
+ t1 = vector["time"].max()
173
+ met = maybe_downselect_mds(self.met, met, t0, t1)
174
+
147
175
  vector = _evolve_one_step(
148
- self.met,
176
+ met,
149
177
  vector,
150
178
  t,
151
179
  sedimentation_rate=sedimentation_rate,
@@ -197,7 +225,7 @@ class DryAdvection(models.Model):
197
225
  raise ValueError(
198
226
  "If 'azimuth' is None, then 'width' and 'depth' must also be None."
199
227
  )
200
- return GeoVectorDataset(self.source.select(columns, copy=False), copy=False)
228
+ return GeoVectorDataset._from_fastpath(self.source.select(columns, copy=False).data)
201
229
 
202
230
  if "azimuth" not in self.source:
203
231
  self.source["azimuth"] = np.full_like(self.source["longitude"], azimuth)
@@ -223,7 +251,19 @@ class DryAdvection(models.Model):
223
251
  width, depth, sigma_yz=0.0
224
252
  )
225
253
 
226
- return GeoVectorDataset(self.source.select(columns, copy=False), copy=False)
254
+ return GeoVectorDataset._from_fastpath(self.source.select(columns, copy=False).data)
255
+
256
+ @override
257
+ def downselect_met(self) -> None:
258
+ if not self.params["downselect_met"]:
259
+ return
260
+
261
+ buffers = {
262
+ f"{coord}_buffer": self.params[f"met_{coord}_buffer"]
263
+ for coord in ("longitude", "latitude", "level")
264
+ }
265
+ buffers["time_buffer"] = (np.timedelta64(0, "ns"), self.params["max_age"])
266
+ self.met = self.source.downselect_met(self.met, **buffers)
227
267
 
228
268
 
229
269
  def _perform_interp_for_step(
@@ -237,7 +277,6 @@ def _perform_interp_for_step(
237
277
  vector.setdefault("level", vector.level)
238
278
  air_pressure = vector.setdefault("air_pressure", vector.air_pressure)
239
279
 
240
- air_temperature = models.interpolate_met(met, vector, "air_temperature", **interp_kwargs)
241
280
  models.interpolate_met(met, vector, "northward_wind", "v_wind", **interp_kwargs)
242
281
  models.interpolate_met(met, vector, "eastward_wind", "u_wind", **interp_kwargs)
243
282
  models.interpolate_met(
@@ -253,6 +292,7 @@ def _perform_interp_for_step(
253
292
  # Early exit for pointwise only simulation
254
293
  return
255
294
 
295
+ air_temperature = models.interpolate_met(met, vector, "air_temperature", **interp_kwargs)
256
296
  air_pressure_lower = thermo.pressure_dz(air_temperature, air_pressure, dz_m)
257
297
  vector["air_pressure_lower"] = air_pressure_lower
258
298
  level_lower = air_pressure_lower / 100.0
@@ -459,15 +499,16 @@ def _evolve_one_step(
459
499
  dt, # type: ignore[arg-type]
460
500
  )
461
501
 
462
- out = GeoVectorDataset(
463
- longitude=longitude_2,
464
- latitude=latitude_2,
465
- level=level_2,
466
- time=np.full(longitude_2.shape, t),
467
- copy=False,
502
+ out = GeoVectorDataset._from_fastpath(
503
+ {
504
+ "longitude": longitude_2,
505
+ "latitude": latitude_2,
506
+ "level": level_2,
507
+ "time": np.full(longitude_2.shape, t),
508
+ "age": vector["age"] + dt,
509
+ "waypoint": vector["waypoint"],
510
+ }
468
511
  )
469
- out["age"] = vector["age"] + dt
470
- out["waypoint"] = vector["waypoint"]
471
512
 
472
513
  azimuth = vector.get("azimuth")
473
514
  if azimuth is None:
@@ -11,10 +11,10 @@ from pycontrails.models.emissions.emissions import (
11
11
  )
12
12
 
13
13
  __all__ = [
14
- "Emissions",
15
- "EmissionsParams",
16
14
  "EDBGaseous",
17
15
  "EDBnvpm",
16
+ "Emissions",
17
+ "EmissionsParams",
18
18
  "load_default_aircraft_engine_mapping",
19
19
  "load_engine_nvpm_profile_from_edb",
20
20
  "load_engine_params_from_edb",