pycontrails 0.54.2__cp313-cp313-macosx_11_0_arm64.whl → 0.54.4__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/__init__.py +2 -2
- pycontrails/_version.py +2 -2
- pycontrails/core/__init__.py +1 -1
- pycontrails/core/aircraft_performance.py +75 -61
- pycontrails/core/cache.py +7 -7
- pycontrails/core/fleet.py +25 -21
- pycontrails/core/flight.py +215 -301
- pycontrails/core/interpolation.py +56 -56
- pycontrails/core/met.py +48 -39
- pycontrails/core/models.py +25 -11
- pycontrails/core/polygon.py +15 -15
- pycontrails/core/rgi_cython.cpython-313-darwin.so +0 -0
- pycontrails/core/vector.py +22 -22
- pycontrails/datalib/_met_utils/metsource.py +8 -5
- pycontrails/datalib/ecmwf/__init__.py +14 -14
- pycontrails/datalib/ecmwf/common.py +1 -1
- pycontrails/datalib/ecmwf/era5.py +7 -7
- pycontrails/datalib/ecmwf/hres.py +3 -3
- pycontrails/datalib/ecmwf/ifs.py +1 -1
- pycontrails/datalib/ecmwf/variables.py +1 -0
- pycontrails/datalib/gfs/__init__.py +6 -6
- pycontrails/datalib/gfs/gfs.py +2 -2
- pycontrails/datalib/goes.py +5 -5
- pycontrails/datalib/landsat.py +5 -8
- pycontrails/datalib/sentinel.py +7 -11
- pycontrails/ext/bada.py +3 -2
- pycontrails/ext/empirical_grid.py +1 -1
- pycontrails/ext/synthetic_flight.py +3 -2
- pycontrails/models/accf.py +40 -19
- pycontrails/models/apcemm/apcemm.py +5 -4
- pycontrails/models/cocip/__init__.py +2 -2
- pycontrails/models/cocip/cocip.py +16 -17
- pycontrails/models/cocip/cocip_params.py +2 -11
- pycontrails/models/cocip/cocip_uncertainty.py +24 -18
- pycontrails/models/cocip/contrail_properties.py +331 -316
- pycontrails/models/cocip/output_formats.py +53 -53
- pycontrails/models/cocip/radiative_forcing.py +135 -131
- pycontrails/models/cocip/radiative_heating.py +135 -135
- pycontrails/models/cocip/unterstrasser_wake_vortex.py +90 -87
- pycontrails/models/cocip/wake_vortex.py +92 -92
- pycontrails/models/cocip/wind_shear.py +8 -8
- pycontrails/models/cocipgrid/cocip_grid.py +118 -107
- pycontrails/models/dry_advection.py +59 -58
- pycontrails/models/emissions/__init__.py +2 -2
- pycontrails/models/emissions/black_carbon.py +108 -108
- pycontrails/models/emissions/emissions.py +85 -85
- pycontrails/models/emissions/ffm2.py +35 -35
- pycontrails/models/humidity_scaling/humidity_scaling.py +23 -23
- pycontrails/models/ps_model/__init__.py +3 -2
- pycontrails/models/ps_model/ps_aircraft_params.py +11 -6
- pycontrails/models/ps_model/ps_grid.py +256 -60
- pycontrails/models/ps_model/ps_model.py +18 -21
- pycontrails/models/ps_model/ps_operational_limits.py +58 -69
- pycontrails/models/tau_cirrus.py +8 -1
- pycontrails/physics/geo.py +216 -67
- pycontrails/physics/jet.py +220 -90
- pycontrails/physics/static/iata-cargo-load-factors-20241115.csv +71 -0
- pycontrails/physics/static/iata-passenger-load-factors-20241115.csv +71 -0
- pycontrails/physics/units.py +14 -14
- pycontrails/utils/json.py +1 -2
- pycontrails/utils/types.py +12 -7
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/METADATA +10 -10
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/NOTICE +1 -1
- pycontrails-0.54.4.dist-info/RECORD +111 -0
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/WHEEL +1 -1
- pycontrails-0.54.2.dist-info/RECORD +0 -109
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/LICENSE +0 -0
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/top_level.txt +0 -0
|
@@ -74,11 +74,11 @@ class CocipGrid(models.Model):
|
|
|
74
74
|
"""
|
|
75
75
|
|
|
76
76
|
__slots__ = (
|
|
77
|
-
"
|
|
78
|
-
"timesteps",
|
|
77
|
+
"_target_dtype",
|
|
79
78
|
"contrail",
|
|
80
79
|
"contrail_list",
|
|
81
|
-
"
|
|
80
|
+
"rad",
|
|
81
|
+
"timesteps",
|
|
82
82
|
)
|
|
83
83
|
|
|
84
84
|
name = "contrail_grid"
|
|
@@ -241,12 +241,12 @@ class CocipGrid(models.Model):
|
|
|
241
241
|
existing_vectors: Iterator[GeoVectorDataset] = iter(())
|
|
242
242
|
|
|
243
243
|
for time_idx, time_end in enumerate(self.timesteps):
|
|
244
|
-
met, rad = self._maybe_downselect_met_rad(met, rad, time_end)
|
|
245
|
-
|
|
246
244
|
evolved_this_step = []
|
|
247
245
|
ef_summary_this_step = []
|
|
248
246
|
downwash_vectors_this_step = []
|
|
249
247
|
for vector in self._generate_new_vectors(time_idx):
|
|
248
|
+
t0 = vector["time"].min()
|
|
249
|
+
met, rad = self._maybe_downselect_met_rad(met, rad, t0, time_end)
|
|
250
250
|
downwash, verbose_dict = _run_downwash(vector, met, rad, self.params)
|
|
251
251
|
|
|
252
252
|
if downwash:
|
|
@@ -264,6 +264,8 @@ class CocipGrid(models.Model):
|
|
|
264
264
|
pbar.update()
|
|
265
265
|
|
|
266
266
|
for vector in itertools.chain(existing_vectors, downwash_vectors_this_step):
|
|
267
|
+
t0 = vector["time"].min()
|
|
268
|
+
met, rad = self._maybe_downselect_met_rad(met, rad, t0, time_end)
|
|
267
269
|
contrail, ef = _evolve_vector(
|
|
268
270
|
vector,
|
|
269
271
|
met=met,
|
|
@@ -304,83 +306,25 @@ class CocipGrid(models.Model):
|
|
|
304
306
|
self,
|
|
305
307
|
met: MetDataset | None,
|
|
306
308
|
rad: MetDataset | None,
|
|
307
|
-
|
|
309
|
+
t0: np.datetime64,
|
|
310
|
+
t1: np.datetime64,
|
|
308
311
|
) -> tuple[MetDataset, MetDataset]:
|
|
309
|
-
"""Downselect ``self.met`` and ``self.rad`` if necessary to cover ``
|
|
312
|
+
"""Downselect ``self.met`` and ``self.rad`` if necessary to cover ``[t0, t1]``.
|
|
313
|
+
|
|
314
|
+
This implementation assumes ``t0 <= t1``, but does not enforce this.
|
|
310
315
|
|
|
311
316
|
If the currently used ``met`` and ``rad`` slices do not include the time
|
|
312
|
-
``
|
|
313
|
-
``self.rad`` data. The slicing only occurs in the time domain.
|
|
317
|
+
interval ``[t0, t1]``, new slices are selected from the larger ``self.met``
|
|
318
|
+
and ``self.rad`` data. The slicing only occurs in the time domain.
|
|
314
319
|
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
already-loaded met data.
|
|
320
|
+
Existing slices from ``met`` and ``rad`` will be used when possible to avoid
|
|
321
|
+
losing and re-loading already-loaded met data.
|
|
318
322
|
|
|
319
|
-
If ``self.params["downselect_met"]`` is True, :func:`_downselect_met` has
|
|
323
|
+
If ``self.params["downselect_met"]`` is True, the :func:`_downselect_met` has
|
|
320
324
|
already performed a spatial downselection of the met data.
|
|
321
325
|
"""
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
# idx is the first index at which self.met.variables["time"].to_numpy() >= time_end
|
|
325
|
-
idx = np.searchsorted(self.met.indexes["time"].to_numpy(), time_end).item()
|
|
326
|
-
sl = slice(max(0, idx - 1), idx + 1)
|
|
327
|
-
logger.debug("Select met slice %s", sl)
|
|
328
|
-
met = MetDataset(self.met.data.isel(time=sl), copy=False)
|
|
329
|
-
|
|
330
|
-
elif time_end > met.indexes["time"].to_numpy()[-1]:
|
|
331
|
-
current_times = met.indexes["time"].to_numpy()
|
|
332
|
-
all_times = self.met.indexes["time"].to_numpy()
|
|
333
|
-
# idx is the first index at which all_times >= time_end
|
|
334
|
-
idx = np.searchsorted(all_times, time_end).item()
|
|
335
|
-
sl = slice(max(0, idx - 1), idx + 1)
|
|
336
|
-
|
|
337
|
-
# case 1: cannot re-use end of current met as start of new met
|
|
338
|
-
if current_times[-1] != all_times[sl.start]:
|
|
339
|
-
logger.debug("Select met slice %s", sl)
|
|
340
|
-
met = MetDataset(self.met.data.isel(time=sl), copy=False)
|
|
341
|
-
# case 2: can re-use end of current met plus one step of new met
|
|
342
|
-
elif sl.start < all_times.size - 1:
|
|
343
|
-
sl = slice(sl.start + 1, sl.stop)
|
|
344
|
-
logger.debug("Reuse end of met and select met slice %s", sl)
|
|
345
|
-
met = MetDataset(
|
|
346
|
-
xr.concat((met.data.isel(time=[-1]), self.met.data.isel(time=sl)), dim="time"),
|
|
347
|
-
copy=False,
|
|
348
|
-
)
|
|
349
|
-
# case 3: can re-use end of current met and nothing else
|
|
350
|
-
else:
|
|
351
|
-
logger.debug("Reuse end of met")
|
|
352
|
-
met = MetDataset(met.data.isel(time=[-1]), copy=False)
|
|
353
|
-
|
|
354
|
-
if rad is None:
|
|
355
|
-
# idx is the first index at which self.rad.variables["time"].to_numpy() >= time_end
|
|
356
|
-
idx = np.searchsorted(self.rad.indexes["time"].to_numpy(), time_end).item()
|
|
357
|
-
sl = slice(max(0, idx - 1), idx + 1)
|
|
358
|
-
logger.debug("Select rad slice %s", sl)
|
|
359
|
-
rad = MetDataset(self.rad.data.isel(time=sl), copy=False)
|
|
360
|
-
|
|
361
|
-
elif time_end > rad.indexes["time"].to_numpy()[-1]:
|
|
362
|
-
current_times = rad.indexes["time"].to_numpy()
|
|
363
|
-
all_times = self.rad.indexes["time"].to_numpy()
|
|
364
|
-
# idx is the first index at which all_times >= time_end
|
|
365
|
-
idx = np.searchsorted(all_times, time_end).item()
|
|
366
|
-
sl = slice(max(0, idx - 1), idx + 1)
|
|
367
|
-
|
|
368
|
-
# case 1: cannot re-use end of current rad as start of new rad
|
|
369
|
-
if current_times[-1] != all_times[sl.start]:
|
|
370
|
-
logger.debug("Select rad slice %s", sl)
|
|
371
|
-
rad = MetDataset(self.rad.data.isel(time=sl), copy=False)
|
|
372
|
-
# case 2: can re-use end of current rad plus one step of new rad
|
|
373
|
-
elif sl.start < all_times.size - 1:
|
|
374
|
-
sl = slice(sl.start + 1, sl.stop)
|
|
375
|
-
logger.debug("Reuse end of rad and select rad slice %s", sl)
|
|
376
|
-
rad = MetDataset(
|
|
377
|
-
xr.concat((rad.data.isel(time=[-1]), self.rad.data.isel(time=sl)), dim="time"),
|
|
378
|
-
copy=False,
|
|
379
|
-
)
|
|
380
|
-
# case 3: can re-use end of current rad and nothing else
|
|
381
|
-
else:
|
|
382
|
-
logger.debug("Reuse end of rad")
|
|
383
|
-
rad = MetDataset(rad.data.isel(time=[-1]), copy=False)
|
|
326
|
+
met = _maybe_downselect_mds(self.met, met, t0, t1)
|
|
327
|
+
rad = _maybe_downselect_mds(self.rad, rad, t0, t1)
|
|
384
328
|
|
|
385
329
|
return met, rad
|
|
386
330
|
|
|
@@ -806,19 +750,22 @@ class CocipGrid(models.Model):
|
|
|
806
750
|
|
|
807
751
|
@staticmethod
|
|
808
752
|
def create_source(
|
|
809
|
-
level: npt.NDArray[np.
|
|
753
|
+
level: npt.NDArray[np.floating] | list[float] | float,
|
|
810
754
|
time: npt.NDArray[np.datetime64] | list[np.datetime64] | np.datetime64,
|
|
811
|
-
longitude: npt.NDArray[np.
|
|
812
|
-
latitude: npt.NDArray[np.
|
|
755
|
+
longitude: npt.NDArray[np.floating] | list[float] | None = None,
|
|
756
|
+
latitude: npt.NDArray[np.floating] | list[float] | None = None,
|
|
813
757
|
lon_step: float = 1.0,
|
|
814
758
|
lat_step: float = 1.0,
|
|
815
759
|
) -> MetDataset:
|
|
816
760
|
"""
|
|
817
761
|
Shortcut to create a :class:`MetDataset` source from coordinate arrays.
|
|
818
762
|
|
|
763
|
+
.. versionchanged:: 0.54.3
|
|
764
|
+
By default, the returned latitude values now extend to the poles.
|
|
765
|
+
|
|
819
766
|
Parameters
|
|
820
767
|
----------
|
|
821
|
-
level : level: npt.NDArray[np.
|
|
768
|
+
level : level: npt.NDArray[np.floating] | list[float] | float
|
|
822
769
|
Pressure levels for gridded cocip.
|
|
823
770
|
To avoid interpolating outside of the passed ``met`` and ``rad`` data, this
|
|
824
771
|
parameter should avoid the extreme values of the ``met`` and `rad` levels.
|
|
@@ -826,11 +773,9 @@ class CocipGrid(models.Model):
|
|
|
826
773
|
``met.data['level'].values[1: -1]``.
|
|
827
774
|
time: npt.NDArray[np.datetime64 | list[np.datetime64] | np.datetime64,
|
|
828
775
|
One or more time values for gridded cocip.
|
|
829
|
-
longitude, latitude : npt.NDArray[np.
|
|
776
|
+
longitude, latitude : npt.NDArray[np.floating] | list[float], optional
|
|
830
777
|
Longitude and latitude arrays, by default None. If not specified, values of
|
|
831
778
|
``lon_step`` and ``lat_step`` are used to define ``longitude`` and ``latitude``.
|
|
832
|
-
To avoid model degradation at the poles, latitude values are expected to be
|
|
833
|
-
between -80 and 80 degrees.
|
|
834
779
|
lon_step, lat_step : float, optional
|
|
835
780
|
Longitude and latitude resolution, by default 1.0.
|
|
836
781
|
Only used if parameter ``longitude`` (respective ``latitude``) not specified.
|
|
@@ -847,15 +792,11 @@ class CocipGrid(models.Model):
|
|
|
847
792
|
if longitude is None:
|
|
848
793
|
longitude = np.arange(-180, 180, lon_step, dtype=float)
|
|
849
794
|
if latitude is None:
|
|
850
|
-
latitude = np.arange(-
|
|
851
|
-
|
|
852
|
-
out = MetDataset.from_coords(longitude=longitude, latitude=latitude, level=level, time=time)
|
|
795
|
+
latitude = np.arange(-90, 90.000001, lat_step, dtype=float)
|
|
853
796
|
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
return out
|
|
797
|
+
return MetDataset.from_coords(
|
|
798
|
+
longitude=longitude, latitude=latitude, level=level, time=time
|
|
799
|
+
)
|
|
859
800
|
|
|
860
801
|
|
|
861
802
|
################################
|
|
@@ -2054,10 +1995,13 @@ def advect(
|
|
|
2054
1995
|
time_t2 = time + dt
|
|
2055
1996
|
age_t2 = age + dt
|
|
2056
1997
|
|
|
2057
|
-
longitude_t2 = geo.
|
|
2058
|
-
longitude=longitude,
|
|
1998
|
+
longitude_t2, latitude_t2 = geo.advect_horizontal(
|
|
1999
|
+
longitude=longitude,
|
|
2000
|
+
latitude=latitude,
|
|
2001
|
+
u_wind=u_wind,
|
|
2002
|
+
v_wind=v_wind,
|
|
2003
|
+
dt=dt,
|
|
2059
2004
|
)
|
|
2060
|
-
latitude_t2 = geo.advect_latitude(latitude=latitude, v_wind=v_wind, dt=dt)
|
|
2061
2005
|
level_t2 = geo.advect_level(level, vertical_velocity, rho_air, terminal_fall_speed, dt)
|
|
2062
2006
|
altitude_t2 = units.pl_to_m(level_t2)
|
|
2063
2007
|
|
|
@@ -2089,15 +2033,20 @@ def advect(
|
|
|
2089
2033
|
u_wind_tail = contrail["eastward_wind_tail"]
|
|
2090
2034
|
v_wind_tail = contrail["northward_wind_tail"]
|
|
2091
2035
|
|
|
2092
|
-
longitude_head_t2 = geo.
|
|
2093
|
-
longitude=longitude_head,
|
|
2036
|
+
longitude_head_t2, latitude_head_t2 = geo.advect_horizontal(
|
|
2037
|
+
longitude=longitude_head,
|
|
2038
|
+
latitude=latitude_head,
|
|
2039
|
+
u_wind=u_wind_head,
|
|
2040
|
+
v_wind=v_wind_head,
|
|
2041
|
+
dt=dt_head,
|
|
2094
2042
|
)
|
|
2095
|
-
|
|
2096
|
-
|
|
2097
|
-
|
|
2098
|
-
|
|
2043
|
+
longitude_tail_t2, latitude_tail_t2 = geo.advect_horizontal(
|
|
2044
|
+
longitude=longitude_tail,
|
|
2045
|
+
latitude=latitude_tail,
|
|
2046
|
+
u_wind=u_wind_tail,
|
|
2047
|
+
v_wind=v_wind_tail,
|
|
2048
|
+
dt=dt_tail,
|
|
2099
2049
|
)
|
|
2100
|
-
latitude_tail_t2 = geo.advect_latitude(latitude=latitude_tail, v_wind=v_wind_tail, dt=dt_tail)
|
|
2101
2050
|
|
|
2102
2051
|
segment_length_t2 = geo.haversine(
|
|
2103
2052
|
lons0=longitude_head_t2,
|
|
@@ -2168,7 +2117,7 @@ def _aggregate_ef_summary(vector_list: list[VectorDataset]) -> VectorDataset | N
|
|
|
2168
2117
|
|
|
2169
2118
|
def result_to_metdataset(
|
|
2170
2119
|
result: VectorDataset | None,
|
|
2171
|
-
verbose_dict: dict[str, npt.NDArray[np.
|
|
2120
|
+
verbose_dict: dict[str, npt.NDArray[np.floating]],
|
|
2172
2121
|
source: MetDataset,
|
|
2173
2122
|
nominal_segment_length: float,
|
|
2174
2123
|
attrs: dict[str, str],
|
|
@@ -2180,7 +2129,7 @@ def result_to_metdataset(
|
|
|
2180
2129
|
result : VectorDataset | None
|
|
2181
2130
|
Aggregated data arising from contrail evolution. Expected to contain keys:
|
|
2182
2131
|
``index``, ``age``, ``ef``.
|
|
2183
|
-
verbose_dict : dict[str, npt.NDArray[np.
|
|
2132
|
+
verbose_dict : dict[str, npt.NDArray[np.floating]]:
|
|
2184
2133
|
Verbose outputs to attach to results.
|
|
2185
2134
|
source : MetDataset
|
|
2186
2135
|
:attr:`CocipGrid.`source` data on which to attach results.
|
|
@@ -2239,9 +2188,9 @@ def result_to_metdataset(
|
|
|
2239
2188
|
|
|
2240
2189
|
def result_merge_source(
|
|
2241
2190
|
result: VectorDataset | None,
|
|
2242
|
-
verbose_dict: dict[str, npt.NDArray[np.
|
|
2191
|
+
verbose_dict: dict[str, npt.NDArray[np.floating]],
|
|
2243
2192
|
source: GeoVectorDataset,
|
|
2244
|
-
nominal_segment_length: float | npt.NDArray[np.
|
|
2193
|
+
nominal_segment_length: float | npt.NDArray[np.floating],
|
|
2245
2194
|
attrs: dict[str, str],
|
|
2246
2195
|
) -> GeoVectorDataset:
|
|
2247
2196
|
"""Merge ``results`` and ``verbose_dict`` onto ``source``."""
|
|
@@ -2277,7 +2226,7 @@ def _concat_verbose_dicts(
|
|
|
2277
2226
|
verbose_dicts: list[dict[str, pd.Series]],
|
|
2278
2227
|
source_size: int,
|
|
2279
2228
|
verbose_outputs_formation: set[str],
|
|
2280
|
-
) -> dict[str, npt.NDArray[np.
|
|
2229
|
+
) -> dict[str, npt.NDArray[np.floating]]:
|
|
2281
2230
|
# Concatenate the values and return
|
|
2282
2231
|
ret: dict[str, np.ndarray] = {}
|
|
2283
2232
|
for key in verbose_outputs_formation:
|
|
@@ -2370,7 +2319,7 @@ def _warn_not_wrap(met: MetDataset) -> None:
|
|
|
2370
2319
|
)
|
|
2371
2320
|
|
|
2372
2321
|
|
|
2373
|
-
def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np.
|
|
2322
|
+
def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np.floating]]:
|
|
2374
2323
|
"""Return uncertainty parameters in ``contrail``.
|
|
2375
2324
|
|
|
2376
2325
|
This function assumes the underlying humidity scaling model is
|
|
@@ -2393,7 +2342,7 @@ def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np
|
|
|
2393
2342
|
|
|
2394
2343
|
Returns
|
|
2395
2344
|
-------
|
|
2396
|
-
dict[str, npt.NDArray[np.
|
|
2345
|
+
dict[str, npt.NDArray[np.floating]]
|
|
2397
2346
|
Dictionary of uncertainty parameters.
|
|
2398
2347
|
"""
|
|
2399
2348
|
keys = (
|
|
@@ -2573,3 +2522,65 @@ def _check_end_time(
|
|
|
2573
2522
|
f"Include additional time at the end of '{name}' or reduce 'max_age' parameter."
|
|
2574
2523
|
f"{note}"
|
|
2575
2524
|
)
|
|
2525
|
+
|
|
2526
|
+
|
|
2527
|
+
def _maybe_downselect_mds(
|
|
2528
|
+
big_mds: MetDataset,
|
|
2529
|
+
little_mds: MetDataset | None,
|
|
2530
|
+
t0: np.datetime64,
|
|
2531
|
+
t1: np.datetime64,
|
|
2532
|
+
) -> MetDataset:
|
|
2533
|
+
"""Possibly downselect ``big_mds`` to cover ``[t0, t1]``.
|
|
2534
|
+
|
|
2535
|
+
This implementation assumes ``t0 <= t1``, but this is not enforced.
|
|
2536
|
+
|
|
2537
|
+
If possible, ``little_mds`` is recycled to avoid re-loading data.
|
|
2538
|
+
|
|
2539
|
+
This function only downselects in the time domain.
|
|
2540
|
+
|
|
2541
|
+
If ``big_mds`` doesn't cover the time range, no error is raised.
|
|
2542
|
+
"""
|
|
2543
|
+
if little_mds is not None:
|
|
2544
|
+
little_time = little_mds.indexes["time"].to_numpy()
|
|
2545
|
+
ignore_little = t0 > little_time[-1] or t1 < little_time[0]
|
|
2546
|
+
|
|
2547
|
+
big_time = big_mds.indexes["time"].to_numpy()
|
|
2548
|
+
if little_mds is None or ignore_little:
|
|
2549
|
+
i0 = np.searchsorted(big_time, t0, side="right").item()
|
|
2550
|
+
i0 = max(0, i0 - 1)
|
|
2551
|
+
i1 = np.searchsorted(big_time, t1, side="left").item()
|
|
2552
|
+
i1 = min(i1 + 1, big_time.size)
|
|
2553
|
+
return MetDataset(big_mds.data.isel(time=slice(i0, i1)), copy=False)
|
|
2554
|
+
|
|
2555
|
+
j0 = np.searchsorted(little_time, t0, side="right").item()
|
|
2556
|
+
j0 = max(0, j0 - 1)
|
|
2557
|
+
j1 = np.searchsorted(little_time, t1, side="left").item()
|
|
2558
|
+
j1 = min(j1 + 1, little_time.size)
|
|
2559
|
+
|
|
2560
|
+
little_ds = little_mds.data.isel(time=slice(j0, j1))
|
|
2561
|
+
little_time0 = little_time[j0]
|
|
2562
|
+
little_time1 = little_time[j1 - 1]
|
|
2563
|
+
|
|
2564
|
+
if t0 >= little_time0 and t1 <= little_time1:
|
|
2565
|
+
return MetDataset(little_ds, copy=False)
|
|
2566
|
+
|
|
2567
|
+
ds_concat = []
|
|
2568
|
+
if t0 < little_time0: # unlikely to encounter this case
|
|
2569
|
+
i0 = np.searchsorted(big_time, t0, side="right").item()
|
|
2570
|
+
i0 = max(0, i0 - 1)
|
|
2571
|
+
i1 = np.searchsorted(big_time, little_time0, side="right").item()
|
|
2572
|
+
i1 = max(i1, i0 + 1)
|
|
2573
|
+
ds_concat.append(big_mds.data.isel(time=slice(i0, i1)))
|
|
2574
|
+
|
|
2575
|
+
ds_concat.append(little_ds)
|
|
2576
|
+
|
|
2577
|
+
if t1 > little_time1:
|
|
2578
|
+
i0 = np.searchsorted(big_time, little_time1, side="left").item()
|
|
2579
|
+
i0 = min(i0 + 1, big_time.size)
|
|
2580
|
+
i1 = np.searchsorted(big_time, t1, side="left").item()
|
|
2581
|
+
i1 = min(i1 + 1, big_time.size)
|
|
2582
|
+
ds_concat.append(big_mds.data.isel(time=slice(i0, i1)))
|
|
2583
|
+
|
|
2584
|
+
# If little_mds is loaded into memory but big_mds is not,
|
|
2585
|
+
# the concat operation below will load the slice of big_mds into memory.
|
|
2586
|
+
return MetDataset(xr.concat(ds_concat, dim="time"), copy=False)
|
|
@@ -9,16 +9,20 @@ import numpy as np
|
|
|
9
9
|
import numpy.typing as npt
|
|
10
10
|
|
|
11
11
|
from pycontrails.core import models
|
|
12
|
-
from pycontrails.core.flight import Flight
|
|
13
12
|
from pycontrails.core.met import MetDataset
|
|
14
|
-
from pycontrails.core.met_var import
|
|
13
|
+
from pycontrails.core.met_var import (
|
|
14
|
+
AirTemperature,
|
|
15
|
+
EastwardWind,
|
|
16
|
+
NorthwardWind,
|
|
17
|
+
VerticalVelocity,
|
|
18
|
+
)
|
|
15
19
|
from pycontrails.core.vector import GeoVectorDataset
|
|
16
20
|
from pycontrails.models.cocip import contrail_properties, wind_shear
|
|
17
21
|
from pycontrails.physics import geo, thermo
|
|
18
22
|
|
|
19
23
|
|
|
20
24
|
@dataclasses.dataclass
|
|
21
|
-
class DryAdvectionParams(models.
|
|
25
|
+
class DryAdvectionParams(models.AdvectionBuffers):
|
|
22
26
|
"""Parameters for the :class:`DryAdvection` model."""
|
|
23
27
|
|
|
24
28
|
#: Apply Euler's method with a fixed step size of ``dt_integration``. Advected waypoints
|
|
@@ -92,9 +96,6 @@ class DryAdvection(models.Model):
|
|
|
92
96
|
met_required = True
|
|
93
97
|
source: GeoVectorDataset
|
|
94
98
|
|
|
95
|
-
@overload
|
|
96
|
-
def eval(self, source: Flight, **params: Any) -> Flight: ...
|
|
97
|
-
|
|
98
99
|
@overload
|
|
99
100
|
def eval(self, source: GeoVectorDataset, **params: Any) -> GeoVectorDataset: ...
|
|
100
101
|
|
|
@@ -109,7 +110,12 @@ class DryAdvection(models.Model):
|
|
|
109
110
|
Parameters
|
|
110
111
|
----------
|
|
111
112
|
source : GeoVectorDataset
|
|
112
|
-
Arbitrary points to advect.
|
|
113
|
+
Arbitrary points to advect. A :class:`Flight` instance is not treated any
|
|
114
|
+
differently than a :class:`GeoVectorDataset`. In particular, the user must
|
|
115
|
+
explicitly set ``flight["azimuth"] = flight.segment_azimuth()`` if they
|
|
116
|
+
want to use wind shear effects for a flight.
|
|
117
|
+
In the current implementation, any existing meteorological variables in the ``source``
|
|
118
|
+
are ignored. The ``source`` will be interpolated against the :attr:`met` dataset.
|
|
113
119
|
params : Any
|
|
114
120
|
Overwrite model parameters defined in ``__init__``.
|
|
115
121
|
|
|
@@ -122,7 +128,7 @@ class DryAdvection(models.Model):
|
|
|
122
128
|
self.set_source(source)
|
|
123
129
|
self.source = self.require_source_type(GeoVectorDataset)
|
|
124
130
|
|
|
125
|
-
self._prepare_source()
|
|
131
|
+
self.source = self._prepare_source()
|
|
126
132
|
|
|
127
133
|
interp_kwargs = self.interp_kwargs
|
|
128
134
|
|
|
@@ -137,12 +143,12 @@ class DryAdvection(models.Model):
|
|
|
137
143
|
t1 = source_time.max()
|
|
138
144
|
timesteps = np.arange(t0 + dt_integration, t1 + dt_integration + max_age, dt_integration)
|
|
139
145
|
|
|
140
|
-
vector =
|
|
146
|
+
vector = GeoVectorDataset()
|
|
141
147
|
|
|
142
148
|
evolved = []
|
|
143
149
|
for t in timesteps:
|
|
144
150
|
filt = (source_time < t) & (source_time >= t - dt_integration)
|
|
145
|
-
vector = self.source.filter(filt)
|
|
151
|
+
vector = vector + self.source.filter(filt, copy=False)
|
|
146
152
|
vector = _evolve_one_step(
|
|
147
153
|
self.met,
|
|
148
154
|
vector,
|
|
@@ -162,49 +168,44 @@ class DryAdvection(models.Model):
|
|
|
162
168
|
|
|
163
169
|
return GeoVectorDataset.sum(evolved, fill_value=np.nan)
|
|
164
170
|
|
|
165
|
-
def _prepare_source(self) ->
|
|
171
|
+
def _prepare_source(self) -> GeoVectorDataset:
|
|
166
172
|
r"""Prepare :attr:`source` vector for advection by wind-shear-derived variables.
|
|
167
173
|
|
|
168
|
-
|
|
169
|
-
parameter is not None:
|
|
174
|
+
The following variables are always guaranteed to be present in :attr:`source`:
|
|
170
175
|
|
|
171
176
|
- ``age``: Age of plume.
|
|
177
|
+
- ``waypoint``: Identifier for each waypoint.
|
|
178
|
+
|
|
179
|
+
If `"azimuth"` is present in :attr:`source`, `source.attrs`, or :attr:`params`,
|
|
180
|
+
the following variables will also be added:
|
|
181
|
+
|
|
172
182
|
- ``azimuth``: Initial plume direction, measured in clockwise direction from
|
|
173
|
-
|
|
183
|
+
true north, [:math:`\deg`].
|
|
174
184
|
- ``width``: Initial plume width, [:math:`m`].
|
|
175
185
|
- ``depth``: Initial plume depth, [:math:`m`].
|
|
176
186
|
- ``sigma_yz``: All zeros for cross-term term in covariance matrix of plume.
|
|
177
|
-
"""
|
|
178
187
|
|
|
188
|
+
Returns
|
|
189
|
+
-------
|
|
190
|
+
GeoVectorDataset
|
|
191
|
+
A filtered version of the source with only the required columns.
|
|
192
|
+
"""
|
|
179
193
|
self.source.setdefault("level", self.source.level)
|
|
180
|
-
|
|
181
|
-
columns: tuple[str, ...] = ("longitude", "latitude", "level", "time")
|
|
182
|
-
if "azimuth" in self.source:
|
|
183
|
-
columns += ("azimuth",)
|
|
184
|
-
self.source = GeoVectorDataset(self.source.select(columns, copy=False))
|
|
185
|
-
|
|
186
|
-
# Get waypoint index if not already set
|
|
194
|
+
self.source["age"] = np.full(self.source.size, np.timedelta64(0, "ns"))
|
|
187
195
|
self.source.setdefault("waypoint", np.arange(self.source.size))
|
|
188
196
|
|
|
189
|
-
|
|
197
|
+
columns = ["longitude", "latitude", "level", "time", "age", "waypoint"]
|
|
198
|
+
azimuth = self.get_source_param("azimuth", set_attr=False)
|
|
199
|
+
if azimuth is None:
|
|
200
|
+
# Early exit for pointwise only simulation
|
|
201
|
+
if self.params["width"] is not None or self.params["depth"] is not None:
|
|
202
|
+
raise ValueError(
|
|
203
|
+
"If 'azimuth' is None, then 'width' and 'depth' must also be None."
|
|
204
|
+
)
|
|
205
|
+
return GeoVectorDataset(self.source.select(columns, copy=False), copy=False)
|
|
190
206
|
|
|
191
207
|
if "azimuth" not in self.source:
|
|
192
|
-
|
|
193
|
-
pointwise_only = False
|
|
194
|
-
self.source["azimuth"] = self.source.segment_azimuth()
|
|
195
|
-
else:
|
|
196
|
-
try:
|
|
197
|
-
self.source.broadcast_attrs("azimuth")
|
|
198
|
-
except KeyError:
|
|
199
|
-
if (azimuth := self.params["azimuth"]) is not None:
|
|
200
|
-
pointwise_only = False
|
|
201
|
-
self.source["azimuth"] = np.full_like(self.source["longitude"], azimuth)
|
|
202
|
-
else:
|
|
203
|
-
pointwise_only = True
|
|
204
|
-
else:
|
|
205
|
-
pointwise_only = False
|
|
206
|
-
else:
|
|
207
|
-
pointwise_only = False
|
|
208
|
+
self.source["azimuth"] = np.full_like(self.source["longitude"], azimuth)
|
|
208
209
|
|
|
209
210
|
for key in ("width", "depth"):
|
|
210
211
|
if key in self.source:
|
|
@@ -214,18 +215,12 @@ class DryAdvection(models.Model):
|
|
|
214
215
|
continue
|
|
215
216
|
|
|
216
217
|
val = self.params[key]
|
|
217
|
-
if val is None
|
|
218
|
+
if val is None:
|
|
218
219
|
raise ValueError(f"If '{key}' is None, then 'azimuth' must also be None.")
|
|
219
220
|
|
|
220
|
-
|
|
221
|
-
raise ValueError(f"Cannot specify '{key}' without specifying 'azimuth'.")
|
|
222
|
-
|
|
223
|
-
if not pointwise_only:
|
|
224
|
-
self.source[key] = np.full_like(self.source["longitude"], val)
|
|
225
|
-
|
|
226
|
-
if pointwise_only:
|
|
227
|
-
return
|
|
221
|
+
self.source[key] = np.full_like(self.source["longitude"], val)
|
|
228
222
|
|
|
223
|
+
columns.extend(["azimuth", "width", "depth", "sigma_yz", "area_eff"])
|
|
229
224
|
self.source["sigma_yz"] = np.zeros_like(self.source["longitude"])
|
|
230
225
|
width = self.source["width"]
|
|
231
226
|
depth = self.source["depth"]
|
|
@@ -233,6 +228,8 @@ class DryAdvection(models.Model):
|
|
|
233
228
|
width, depth, sigma_yz=0.0
|
|
234
229
|
)
|
|
235
230
|
|
|
231
|
+
return GeoVectorDataset(self.source.select(columns, copy=False), copy=False)
|
|
232
|
+
|
|
236
233
|
|
|
237
234
|
def _perform_interp_for_step(
|
|
238
235
|
met: MetDataset,
|
|
@@ -245,7 +242,6 @@ def _perform_interp_for_step(
|
|
|
245
242
|
vector.setdefault("level", vector.level)
|
|
246
243
|
air_pressure = vector.setdefault("air_pressure", vector.air_pressure)
|
|
247
244
|
|
|
248
|
-
air_temperature = models.interpolate_met(met, vector, "air_temperature", **interp_kwargs)
|
|
249
245
|
models.interpolate_met(met, vector, "northward_wind", "v_wind", **interp_kwargs)
|
|
250
246
|
models.interpolate_met(met, vector, "eastward_wind", "u_wind", **interp_kwargs)
|
|
251
247
|
models.interpolate_met(
|
|
@@ -261,6 +257,7 @@ def _perform_interp_for_step(
|
|
|
261
257
|
# Early exit for pointwise only simulation
|
|
262
258
|
return
|
|
263
259
|
|
|
260
|
+
air_temperature = models.interpolate_met(met, vector, "air_temperature", **interp_kwargs)
|
|
264
261
|
air_pressure_lower = thermo.pressure_dz(air_temperature, air_pressure, dz_m)
|
|
265
262
|
vector["air_pressure_lower"] = air_pressure_lower
|
|
266
263
|
level_lower = air_pressure_lower / 100.0
|
|
@@ -412,15 +409,20 @@ def _calc_geometry(
|
|
|
412
409
|
u_wind_tail = vector.data.pop("eastward_wind_tail")
|
|
413
410
|
v_wind_tail = vector.data.pop("northward_wind_tail")
|
|
414
411
|
|
|
415
|
-
longitude_head_t2 = geo.
|
|
416
|
-
longitude=longitude_head,
|
|
412
|
+
longitude_head_t2, latitude_head_t2 = geo.advect_horizontal(
|
|
413
|
+
longitude=longitude_head,
|
|
414
|
+
latitude=latitude_head,
|
|
415
|
+
u_wind=u_wind_head,
|
|
416
|
+
v_wind=v_wind_head,
|
|
417
|
+
dt=dt,
|
|
417
418
|
)
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
419
|
+
longitude_tail_t2, latitude_tail_t2 = geo.advect_horizontal(
|
|
420
|
+
longitude=longitude_tail,
|
|
421
|
+
latitude=latitude_tail,
|
|
422
|
+
u_wind=u_wind_tail,
|
|
423
|
+
v_wind=v_wind_tail,
|
|
424
|
+
dt=dt,
|
|
422
425
|
)
|
|
423
|
-
latitude_tail_t2 = geo.advect_latitude(latitude=latitude_tail, v_wind=v_wind_tail, dt=dt)
|
|
424
426
|
|
|
425
427
|
azimuth_2 = geo.azimuth(
|
|
426
428
|
lons0=longitude_tail_t2,
|
|
@@ -453,8 +455,7 @@ def _evolve_one_step(
|
|
|
453
455
|
longitude = vector["longitude"]
|
|
454
456
|
|
|
455
457
|
dt = t - vector["time"]
|
|
456
|
-
longitude_2 = geo.
|
|
457
|
-
latitude_2 = geo.advect_latitude(latitude, v_wind, dt) # type: ignore[arg-type]
|
|
458
|
+
longitude_2, latitude_2 = geo.advect_horizontal(longitude, latitude, u_wind, v_wind, dt) # type: ignore[arg-type]
|
|
458
459
|
level_2 = geo.advect_level(
|
|
459
460
|
vector.level,
|
|
460
461
|
vertical_velocity,
|
|
@@ -11,10 +11,10 @@ from pycontrails.models.emissions.emissions import (
|
|
|
11
11
|
)
|
|
12
12
|
|
|
13
13
|
__all__ = [
|
|
14
|
-
"Emissions",
|
|
15
|
-
"EmissionsParams",
|
|
16
14
|
"EDBGaseous",
|
|
17
15
|
"EDBnvpm",
|
|
16
|
+
"Emissions",
|
|
17
|
+
"EmissionsParams",
|
|
18
18
|
"load_default_aircraft_engine_mapping",
|
|
19
19
|
"load_engine_nvpm_profile_from_edb",
|
|
20
20
|
"load_engine_params_from_edb",
|