pycontrails 0.54.3__cp311-cp311-win_amd64.whl → 0.54.4__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/__init__.py +2 -2
- pycontrails/_version.py +2 -2
- pycontrails/core/__init__.py +1 -1
- pycontrails/core/aircraft_performance.py +58 -58
- pycontrails/core/cache.py +7 -7
- pycontrails/core/fleet.py +25 -21
- pycontrails/core/flight.py +213 -301
- pycontrails/core/interpolation.py +56 -56
- pycontrails/core/met.py +48 -39
- pycontrails/core/models.py +25 -11
- pycontrails/core/polygon.py +15 -15
- pycontrails/core/rgi_cython.cp311-win_amd64.pyd +0 -0
- pycontrails/core/vector.py +22 -22
- pycontrails/datalib/_met_utils/metsource.py +8 -5
- pycontrails/datalib/ecmwf/__init__.py +14 -14
- pycontrails/datalib/ecmwf/common.py +1 -1
- pycontrails/datalib/ecmwf/era5.py +7 -7
- pycontrails/datalib/ecmwf/hres.py +3 -3
- pycontrails/datalib/ecmwf/ifs.py +1 -1
- pycontrails/datalib/gfs/__init__.py +6 -6
- pycontrails/datalib/gfs/gfs.py +2 -2
- pycontrails/datalib/goes.py +5 -5
- pycontrails/ext/empirical_grid.py +1 -1
- pycontrails/models/apcemm/apcemm.py +3 -3
- pycontrails/models/cocip/__init__.py +2 -2
- pycontrails/models/cocip/cocip.py +15 -15
- pycontrails/models/cocip/cocip_params.py +2 -11
- pycontrails/models/cocip/cocip_uncertainty.py +24 -18
- pycontrails/models/cocip/contrail_properties.py +331 -316
- pycontrails/models/cocip/output_formats.py +53 -53
- pycontrails/models/cocip/radiative_forcing.py +135 -131
- pycontrails/models/cocip/radiative_heating.py +135 -135
- pycontrails/models/cocip/unterstrasser_wake_vortex.py +90 -87
- pycontrails/models/cocip/wake_vortex.py +92 -92
- pycontrails/models/cocip/wind_shear.py +8 -8
- pycontrails/models/cocipgrid/cocip_grid.py +93 -87
- pycontrails/models/dry_advection.py +10 -5
- pycontrails/models/emissions/__init__.py +2 -2
- pycontrails/models/emissions/black_carbon.py +108 -108
- pycontrails/models/emissions/emissions.py +85 -85
- pycontrails/models/emissions/ffm2.py +35 -35
- pycontrails/models/humidity_scaling/humidity_scaling.py +23 -23
- pycontrails/models/ps_model/__init__.py +1 -1
- pycontrails/models/ps_model/ps_aircraft_params.py +8 -4
- pycontrails/models/ps_model/ps_grid.py +74 -64
- pycontrails/models/ps_model/ps_model.py +14 -14
- pycontrails/models/ps_model/ps_operational_limits.py +20 -18
- pycontrails/models/tau_cirrus.py +8 -1
- pycontrails/physics/geo.py +67 -67
- pycontrails/physics/jet.py +79 -79
- pycontrails/physics/units.py +14 -14
- pycontrails/utils/json.py +1 -2
- pycontrails/utils/types.py +12 -7
- {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/METADATA +2 -2
- {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/NOTICE +1 -1
- pycontrails-0.54.4.dist-info/RECORD +111 -0
- pycontrails-0.54.3.dist-info/RECORD +0 -111
- {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/LICENSE +0 -0
- {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/WHEEL +0 -0
- {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/top_level.txt +0 -0
|
@@ -74,11 +74,11 @@ class CocipGrid(models.Model):
|
|
|
74
74
|
"""
|
|
75
75
|
|
|
76
76
|
__slots__ = (
|
|
77
|
-
"
|
|
78
|
-
"timesteps",
|
|
77
|
+
"_target_dtype",
|
|
79
78
|
"contrail",
|
|
80
79
|
"contrail_list",
|
|
81
|
-
"
|
|
80
|
+
"rad",
|
|
81
|
+
"timesteps",
|
|
82
82
|
)
|
|
83
83
|
|
|
84
84
|
name = "contrail_grid"
|
|
@@ -241,12 +241,12 @@ class CocipGrid(models.Model):
|
|
|
241
241
|
existing_vectors: Iterator[GeoVectorDataset] = iter(())
|
|
242
242
|
|
|
243
243
|
for time_idx, time_end in enumerate(self.timesteps):
|
|
244
|
-
met, rad = self._maybe_downselect_met_rad(met, rad, time_end)
|
|
245
|
-
|
|
246
244
|
evolved_this_step = []
|
|
247
245
|
ef_summary_this_step = []
|
|
248
246
|
downwash_vectors_this_step = []
|
|
249
247
|
for vector in self._generate_new_vectors(time_idx):
|
|
248
|
+
t0 = vector["time"].min()
|
|
249
|
+
met, rad = self._maybe_downselect_met_rad(met, rad, t0, time_end)
|
|
250
250
|
downwash, verbose_dict = _run_downwash(vector, met, rad, self.params)
|
|
251
251
|
|
|
252
252
|
if downwash:
|
|
@@ -264,6 +264,8 @@ class CocipGrid(models.Model):
|
|
|
264
264
|
pbar.update()
|
|
265
265
|
|
|
266
266
|
for vector in itertools.chain(existing_vectors, downwash_vectors_this_step):
|
|
267
|
+
t0 = vector["time"].min()
|
|
268
|
+
met, rad = self._maybe_downselect_met_rad(met, rad, t0, time_end)
|
|
267
269
|
contrail, ef = _evolve_vector(
|
|
268
270
|
vector,
|
|
269
271
|
met=met,
|
|
@@ -304,83 +306,25 @@ class CocipGrid(models.Model):
|
|
|
304
306
|
self,
|
|
305
307
|
met: MetDataset | None,
|
|
306
308
|
rad: MetDataset | None,
|
|
307
|
-
|
|
309
|
+
t0: np.datetime64,
|
|
310
|
+
t1: np.datetime64,
|
|
308
311
|
) -> tuple[MetDataset, MetDataset]:
|
|
309
|
-
"""Downselect ``self.met`` and ``self.rad`` if necessary to cover ``
|
|
312
|
+
"""Downselect ``self.met`` and ``self.rad`` if necessary to cover ``[t0, t1]``.
|
|
313
|
+
|
|
314
|
+
This implementation assumes ``t0 <= t1``, but does not enforce this.
|
|
310
315
|
|
|
311
316
|
If the currently used ``met`` and ``rad`` slices do not include the time
|
|
312
|
-
``
|
|
313
|
-
``self.rad`` data. The slicing only occurs in the time domain.
|
|
317
|
+
interval ``[t0, t1]``, new slices are selected from the larger ``self.met``
|
|
318
|
+
and ``self.rad`` data. The slicing only occurs in the time domain.
|
|
314
319
|
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
already-loaded met data.
|
|
320
|
+
Existing slices from ``met`` and ``rad`` will be used when possible to avoid
|
|
321
|
+
losing and re-loading already-loaded met data.
|
|
318
322
|
|
|
319
|
-
If ``self.params["downselect_met"]`` is True, :func:`_downselect_met` has
|
|
323
|
+
If ``self.params["downselect_met"]`` is True, the :func:`_downselect_met` has
|
|
320
324
|
already performed a spatial downselection of the met data.
|
|
321
325
|
"""
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
# idx is the first index at which self.met.variables["time"].to_numpy() >= time_end
|
|
325
|
-
idx = np.searchsorted(self.met.indexes["time"].to_numpy(), time_end).item()
|
|
326
|
-
sl = slice(max(0, idx - 1), idx + 1)
|
|
327
|
-
logger.debug("Select met slice %s", sl)
|
|
328
|
-
met = MetDataset(self.met.data.isel(time=sl), copy=False)
|
|
329
|
-
|
|
330
|
-
elif time_end > met.indexes["time"].to_numpy()[-1]:
|
|
331
|
-
current_times = met.indexes["time"].to_numpy()
|
|
332
|
-
all_times = self.met.indexes["time"].to_numpy()
|
|
333
|
-
# idx is the first index at which all_times >= time_end
|
|
334
|
-
idx = np.searchsorted(all_times, time_end).item()
|
|
335
|
-
sl = slice(max(0, idx - 1), idx + 1)
|
|
336
|
-
|
|
337
|
-
# case 1: cannot re-use end of current met as start of new met
|
|
338
|
-
if current_times[-1] != all_times[sl.start]:
|
|
339
|
-
logger.debug("Select met slice %s", sl)
|
|
340
|
-
met = MetDataset(self.met.data.isel(time=sl), copy=False)
|
|
341
|
-
# case 2: can re-use end of current met plus one step of new met
|
|
342
|
-
elif sl.start < all_times.size - 1:
|
|
343
|
-
sl = slice(sl.start + 1, sl.stop)
|
|
344
|
-
logger.debug("Reuse end of met and select met slice %s", sl)
|
|
345
|
-
met = MetDataset(
|
|
346
|
-
xr.concat((met.data.isel(time=[-1]), self.met.data.isel(time=sl)), dim="time"),
|
|
347
|
-
copy=False,
|
|
348
|
-
)
|
|
349
|
-
# case 3: can re-use end of current met and nothing else
|
|
350
|
-
else:
|
|
351
|
-
logger.debug("Reuse end of met")
|
|
352
|
-
met = MetDataset(met.data.isel(time=[-1]), copy=False)
|
|
353
|
-
|
|
354
|
-
if rad is None:
|
|
355
|
-
# idx is the first index at which self.rad.variables["time"].to_numpy() >= time_end
|
|
356
|
-
idx = np.searchsorted(self.rad.indexes["time"].to_numpy(), time_end).item()
|
|
357
|
-
sl = slice(max(0, idx - 1), idx + 1)
|
|
358
|
-
logger.debug("Select rad slice %s", sl)
|
|
359
|
-
rad = MetDataset(self.rad.data.isel(time=sl), copy=False)
|
|
360
|
-
|
|
361
|
-
elif time_end > rad.indexes["time"].to_numpy()[-1]:
|
|
362
|
-
current_times = rad.indexes["time"].to_numpy()
|
|
363
|
-
all_times = self.rad.indexes["time"].to_numpy()
|
|
364
|
-
# idx is the first index at which all_times >= time_end
|
|
365
|
-
idx = np.searchsorted(all_times, time_end).item()
|
|
366
|
-
sl = slice(max(0, idx - 1), idx + 1)
|
|
367
|
-
|
|
368
|
-
# case 1: cannot re-use end of current rad as start of new rad
|
|
369
|
-
if current_times[-1] != all_times[sl.start]:
|
|
370
|
-
logger.debug("Select rad slice %s", sl)
|
|
371
|
-
rad = MetDataset(self.rad.data.isel(time=sl), copy=False)
|
|
372
|
-
# case 2: can re-use end of current rad plus one step of new rad
|
|
373
|
-
elif sl.start < all_times.size - 1:
|
|
374
|
-
sl = slice(sl.start + 1, sl.stop)
|
|
375
|
-
logger.debug("Reuse end of rad and select rad slice %s", sl)
|
|
376
|
-
rad = MetDataset(
|
|
377
|
-
xr.concat((rad.data.isel(time=[-1]), self.rad.data.isel(time=sl)), dim="time"),
|
|
378
|
-
copy=False,
|
|
379
|
-
)
|
|
380
|
-
# case 3: can re-use end of current rad and nothing else
|
|
381
|
-
else:
|
|
382
|
-
logger.debug("Reuse end of rad")
|
|
383
|
-
rad = MetDataset(rad.data.isel(time=[-1]), copy=False)
|
|
326
|
+
met = _maybe_downselect_mds(self.met, met, t0, t1)
|
|
327
|
+
rad = _maybe_downselect_mds(self.rad, rad, t0, t1)
|
|
384
328
|
|
|
385
329
|
return met, rad
|
|
386
330
|
|
|
@@ -806,10 +750,10 @@ class CocipGrid(models.Model):
|
|
|
806
750
|
|
|
807
751
|
@staticmethod
|
|
808
752
|
def create_source(
|
|
809
|
-
level: npt.NDArray[np.
|
|
753
|
+
level: npt.NDArray[np.floating] | list[float] | float,
|
|
810
754
|
time: npt.NDArray[np.datetime64] | list[np.datetime64] | np.datetime64,
|
|
811
|
-
longitude: npt.NDArray[np.
|
|
812
|
-
latitude: npt.NDArray[np.
|
|
755
|
+
longitude: npt.NDArray[np.floating] | list[float] | None = None,
|
|
756
|
+
latitude: npt.NDArray[np.floating] | list[float] | None = None,
|
|
813
757
|
lon_step: float = 1.0,
|
|
814
758
|
lat_step: float = 1.0,
|
|
815
759
|
) -> MetDataset:
|
|
@@ -821,7 +765,7 @@ class CocipGrid(models.Model):
|
|
|
821
765
|
|
|
822
766
|
Parameters
|
|
823
767
|
----------
|
|
824
|
-
level : level: npt.NDArray[np.
|
|
768
|
+
level : level: npt.NDArray[np.floating] | list[float] | float
|
|
825
769
|
Pressure levels for gridded cocip.
|
|
826
770
|
To avoid interpolating outside of the passed ``met`` and ``rad`` data, this
|
|
827
771
|
parameter should avoid the extreme values of the ``met`` and `rad` levels.
|
|
@@ -829,7 +773,7 @@ class CocipGrid(models.Model):
|
|
|
829
773
|
``met.data['level'].values[1: -1]``.
|
|
830
774
|
time: npt.NDArray[np.datetime64 | list[np.datetime64] | np.datetime64,
|
|
831
775
|
One or more time values for gridded cocip.
|
|
832
|
-
longitude, latitude : npt.NDArray[np.
|
|
776
|
+
longitude, latitude : npt.NDArray[np.floating] | list[float], optional
|
|
833
777
|
Longitude and latitude arrays, by default None. If not specified, values of
|
|
834
778
|
``lon_step`` and ``lat_step`` are used to define ``longitude`` and ``latitude``.
|
|
835
779
|
lon_step, lat_step : float, optional
|
|
@@ -2173,7 +2117,7 @@ def _aggregate_ef_summary(vector_list: list[VectorDataset]) -> VectorDataset | N
|
|
|
2173
2117
|
|
|
2174
2118
|
def result_to_metdataset(
|
|
2175
2119
|
result: VectorDataset | None,
|
|
2176
|
-
verbose_dict: dict[str, npt.NDArray[np.
|
|
2120
|
+
verbose_dict: dict[str, npt.NDArray[np.floating]],
|
|
2177
2121
|
source: MetDataset,
|
|
2178
2122
|
nominal_segment_length: float,
|
|
2179
2123
|
attrs: dict[str, str],
|
|
@@ -2185,7 +2129,7 @@ def result_to_metdataset(
|
|
|
2185
2129
|
result : VectorDataset | None
|
|
2186
2130
|
Aggregated data arising from contrail evolution. Expected to contain keys:
|
|
2187
2131
|
``index``, ``age``, ``ef``.
|
|
2188
|
-
verbose_dict : dict[str, npt.NDArray[np.
|
|
2132
|
+
verbose_dict : dict[str, npt.NDArray[np.floating]]:
|
|
2189
2133
|
Verbose outputs to attach to results.
|
|
2190
2134
|
source : MetDataset
|
|
2191
2135
|
:attr:`CocipGrid.`source` data on which to attach results.
|
|
@@ -2244,9 +2188,9 @@ def result_to_metdataset(
|
|
|
2244
2188
|
|
|
2245
2189
|
def result_merge_source(
|
|
2246
2190
|
result: VectorDataset | None,
|
|
2247
|
-
verbose_dict: dict[str, npt.NDArray[np.
|
|
2191
|
+
verbose_dict: dict[str, npt.NDArray[np.floating]],
|
|
2248
2192
|
source: GeoVectorDataset,
|
|
2249
|
-
nominal_segment_length: float | npt.NDArray[np.
|
|
2193
|
+
nominal_segment_length: float | npt.NDArray[np.floating],
|
|
2250
2194
|
attrs: dict[str, str],
|
|
2251
2195
|
) -> GeoVectorDataset:
|
|
2252
2196
|
"""Merge ``results`` and ``verbose_dict`` onto ``source``."""
|
|
@@ -2282,7 +2226,7 @@ def _concat_verbose_dicts(
|
|
|
2282
2226
|
verbose_dicts: list[dict[str, pd.Series]],
|
|
2283
2227
|
source_size: int,
|
|
2284
2228
|
verbose_outputs_formation: set[str],
|
|
2285
|
-
) -> dict[str, npt.NDArray[np.
|
|
2229
|
+
) -> dict[str, npt.NDArray[np.floating]]:
|
|
2286
2230
|
# Concatenate the values and return
|
|
2287
2231
|
ret: dict[str, np.ndarray] = {}
|
|
2288
2232
|
for key in verbose_outputs_formation:
|
|
@@ -2375,7 +2319,7 @@ def _warn_not_wrap(met: MetDataset) -> None:
|
|
|
2375
2319
|
)
|
|
2376
2320
|
|
|
2377
2321
|
|
|
2378
|
-
def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np.
|
|
2322
|
+
def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np.floating]]:
|
|
2379
2323
|
"""Return uncertainty parameters in ``contrail``.
|
|
2380
2324
|
|
|
2381
2325
|
This function assumes the underlying humidity scaling model is
|
|
@@ -2398,7 +2342,7 @@ def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np
|
|
|
2398
2342
|
|
|
2399
2343
|
Returns
|
|
2400
2344
|
-------
|
|
2401
|
-
dict[str, npt.NDArray[np.
|
|
2345
|
+
dict[str, npt.NDArray[np.floating]]
|
|
2402
2346
|
Dictionary of uncertainty parameters.
|
|
2403
2347
|
"""
|
|
2404
2348
|
keys = (
|
|
@@ -2578,3 +2522,65 @@ def _check_end_time(
|
|
|
2578
2522
|
f"Include additional time at the end of '{name}' or reduce 'max_age' parameter."
|
|
2579
2523
|
f"{note}"
|
|
2580
2524
|
)
|
|
2525
|
+
|
|
2526
|
+
|
|
2527
|
+
def _maybe_downselect_mds(
|
|
2528
|
+
big_mds: MetDataset,
|
|
2529
|
+
little_mds: MetDataset | None,
|
|
2530
|
+
t0: np.datetime64,
|
|
2531
|
+
t1: np.datetime64,
|
|
2532
|
+
) -> MetDataset:
|
|
2533
|
+
"""Possibly downselect ``big_mds`` to cover ``[t0, t1]``.
|
|
2534
|
+
|
|
2535
|
+
This implementation assumes ``t0 <= t1``, but this is not enforced.
|
|
2536
|
+
|
|
2537
|
+
If possible, ``little_mds`` is recycled to avoid re-loading data.
|
|
2538
|
+
|
|
2539
|
+
This function only downselects in the time domain.
|
|
2540
|
+
|
|
2541
|
+
If ``big_mds`` doesn't cover the time range, no error is raised.
|
|
2542
|
+
"""
|
|
2543
|
+
if little_mds is not None:
|
|
2544
|
+
little_time = little_mds.indexes["time"].to_numpy()
|
|
2545
|
+
ignore_little = t0 > little_time[-1] or t1 < little_time[0]
|
|
2546
|
+
|
|
2547
|
+
big_time = big_mds.indexes["time"].to_numpy()
|
|
2548
|
+
if little_mds is None or ignore_little:
|
|
2549
|
+
i0 = np.searchsorted(big_time, t0, side="right").item()
|
|
2550
|
+
i0 = max(0, i0 - 1)
|
|
2551
|
+
i1 = np.searchsorted(big_time, t1, side="left").item()
|
|
2552
|
+
i1 = min(i1 + 1, big_time.size)
|
|
2553
|
+
return MetDataset(big_mds.data.isel(time=slice(i0, i1)), copy=False)
|
|
2554
|
+
|
|
2555
|
+
j0 = np.searchsorted(little_time, t0, side="right").item()
|
|
2556
|
+
j0 = max(0, j0 - 1)
|
|
2557
|
+
j1 = np.searchsorted(little_time, t1, side="left").item()
|
|
2558
|
+
j1 = min(j1 + 1, little_time.size)
|
|
2559
|
+
|
|
2560
|
+
little_ds = little_mds.data.isel(time=slice(j0, j1))
|
|
2561
|
+
little_time0 = little_time[j0]
|
|
2562
|
+
little_time1 = little_time[j1 - 1]
|
|
2563
|
+
|
|
2564
|
+
if t0 >= little_time0 and t1 <= little_time1:
|
|
2565
|
+
return MetDataset(little_ds, copy=False)
|
|
2566
|
+
|
|
2567
|
+
ds_concat = []
|
|
2568
|
+
if t0 < little_time0: # unlikely to encounter this case
|
|
2569
|
+
i0 = np.searchsorted(big_time, t0, side="right").item()
|
|
2570
|
+
i0 = max(0, i0 - 1)
|
|
2571
|
+
i1 = np.searchsorted(big_time, little_time0, side="right").item()
|
|
2572
|
+
i1 = max(i1, i0 + 1)
|
|
2573
|
+
ds_concat.append(big_mds.data.isel(time=slice(i0, i1)))
|
|
2574
|
+
|
|
2575
|
+
ds_concat.append(little_ds)
|
|
2576
|
+
|
|
2577
|
+
if t1 > little_time1:
|
|
2578
|
+
i0 = np.searchsorted(big_time, little_time1, side="left").item()
|
|
2579
|
+
i0 = min(i0 + 1, big_time.size)
|
|
2580
|
+
i1 = np.searchsorted(big_time, t1, side="left").item()
|
|
2581
|
+
i1 = min(i1 + 1, big_time.size)
|
|
2582
|
+
ds_concat.append(big_mds.data.isel(time=slice(i0, i1)))
|
|
2583
|
+
|
|
2584
|
+
# If little_mds is loaded into memory but big_mds is not,
|
|
2585
|
+
# the concat operation below will load the slice of big_mds into memory.
|
|
2586
|
+
return MetDataset(xr.concat(ds_concat, dim="time"), copy=False)
|
|
@@ -10,14 +10,19 @@ import numpy.typing as npt
|
|
|
10
10
|
|
|
11
11
|
from pycontrails.core import models
|
|
12
12
|
from pycontrails.core.met import MetDataset
|
|
13
|
-
from pycontrails.core.met_var import
|
|
13
|
+
from pycontrails.core.met_var import (
|
|
14
|
+
AirTemperature,
|
|
15
|
+
EastwardWind,
|
|
16
|
+
NorthwardWind,
|
|
17
|
+
VerticalVelocity,
|
|
18
|
+
)
|
|
14
19
|
from pycontrails.core.vector import GeoVectorDataset
|
|
15
20
|
from pycontrails.models.cocip import contrail_properties, wind_shear
|
|
16
21
|
from pycontrails.physics import geo, thermo
|
|
17
22
|
|
|
18
23
|
|
|
19
24
|
@dataclasses.dataclass
|
|
20
|
-
class DryAdvectionParams(models.
|
|
25
|
+
class DryAdvectionParams(models.AdvectionBuffers):
|
|
21
26
|
"""Parameters for the :class:`DryAdvection` model."""
|
|
22
27
|
|
|
23
28
|
#: Apply Euler's method with a fixed step size of ``dt_integration``. Advected waypoints
|
|
@@ -138,12 +143,12 @@ class DryAdvection(models.Model):
|
|
|
138
143
|
t1 = source_time.max()
|
|
139
144
|
timesteps = np.arange(t0 + dt_integration, t1 + dt_integration + max_age, dt_integration)
|
|
140
145
|
|
|
141
|
-
vector =
|
|
146
|
+
vector = GeoVectorDataset()
|
|
142
147
|
|
|
143
148
|
evolved = []
|
|
144
149
|
for t in timesteps:
|
|
145
150
|
filt = (source_time < t) & (source_time >= t - dt_integration)
|
|
146
|
-
vector = self.source.filter(filt, copy=False)
|
|
151
|
+
vector = vector + self.source.filter(filt, copy=False)
|
|
147
152
|
vector = _evolve_one_step(
|
|
148
153
|
self.met,
|
|
149
154
|
vector,
|
|
@@ -237,7 +242,6 @@ def _perform_interp_for_step(
|
|
|
237
242
|
vector.setdefault("level", vector.level)
|
|
238
243
|
air_pressure = vector.setdefault("air_pressure", vector.air_pressure)
|
|
239
244
|
|
|
240
|
-
air_temperature = models.interpolate_met(met, vector, "air_temperature", **interp_kwargs)
|
|
241
245
|
models.interpolate_met(met, vector, "northward_wind", "v_wind", **interp_kwargs)
|
|
242
246
|
models.interpolate_met(met, vector, "eastward_wind", "u_wind", **interp_kwargs)
|
|
243
247
|
models.interpolate_met(
|
|
@@ -253,6 +257,7 @@ def _perform_interp_for_step(
|
|
|
253
257
|
# Early exit for pointwise only simulation
|
|
254
258
|
return
|
|
255
259
|
|
|
260
|
+
air_temperature = models.interpolate_met(met, vector, "air_temperature", **interp_kwargs)
|
|
256
261
|
air_pressure_lower = thermo.pressure_dz(air_temperature, air_pressure, dz_m)
|
|
257
262
|
vector["air_pressure_lower"] = air_pressure_lower
|
|
258
263
|
level_lower = air_pressure_lower / 100.0
|
|
@@ -11,10 +11,10 @@ from pycontrails.models.emissions.emissions import (
|
|
|
11
11
|
)
|
|
12
12
|
|
|
13
13
|
__all__ = [
|
|
14
|
-
"Emissions",
|
|
15
|
-
"EmissionsParams",
|
|
16
14
|
"EDBGaseous",
|
|
17
15
|
"EDBnvpm",
|
|
16
|
+
"Emissions",
|
|
17
|
+
"EmissionsParams",
|
|
18
18
|
"load_default_aircraft_engine_mapping",
|
|
19
19
|
"load_engine_nvpm_profile_from_edb",
|
|
20
20
|
"load_engine_params_from_edb",
|