pycontrails 0.49.3__cp312-cp312-win_amd64.whl → 0.49.5__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/_version.py +2 -2
- pycontrails/core/datalib.py +1 -1
- pycontrails/core/flight.py +11 -11
- pycontrails/core/interpolation.py +29 -19
- pycontrails/core/met.py +192 -104
- pycontrails/core/models.py +29 -15
- pycontrails/core/rgi_cython.cp312-win_amd64.pyd +0 -0
- pycontrails/core/vector.py +14 -15
- pycontrails/datalib/gfs/gfs.py +1 -1
- pycontrails/datalib/spire/spire.py +23 -19
- pycontrails/ext/synthetic_flight.py +3 -1
- pycontrails/models/accf.py +6 -4
- pycontrails/models/cocip/cocip.py +48 -18
- pycontrails/models/cocip/cocip_params.py +13 -10
- pycontrails/models/cocip/output_formats.py +62 -52
- pycontrails/models/cocipgrid/cocip_grid.py +459 -275
- pycontrails/models/cocipgrid/cocip_grid_params.py +12 -18
- pycontrails/models/emissions/ffm2.py +10 -8
- pycontrails/models/pcc.py +1 -1
- pycontrails/models/ps_model/ps_aircraft_params.py +1 -1
- pycontrails/models/ps_model/static/{ps-aircraft-params-20231117.csv → ps-aircraft-params-20240209.csv} +12 -3
- pycontrails/utils/json.py +12 -10
- {pycontrails-0.49.3.dist-info → pycontrails-0.49.5.dist-info}/METADATA +2 -2
- {pycontrails-0.49.3.dist-info → pycontrails-0.49.5.dist-info}/RECORD +28 -29
- pycontrails/models/cocipgrid/cocip_time_handling.py +0 -342
- {pycontrails-0.49.3.dist-info → pycontrails-0.49.5.dist-info}/LICENSE +0 -0
- {pycontrails-0.49.3.dist-info → pycontrails-0.49.5.dist-info}/NOTICE +0 -0
- {pycontrails-0.49.3.dist-info → pycontrails-0.49.5.dist-info}/WHEEL +0 -0
- {pycontrails-0.49.3.dist-info → pycontrails-0.49.5.dist-info}/top_level.txt +0 -0
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import itertools
|
|
5
6
|
import logging
|
|
6
7
|
import warnings
|
|
7
8
|
from collections.abc import Generator, Iterable, Iterator, Sequence
|
|
@@ -13,15 +14,14 @@ import pandas as pd
|
|
|
13
14
|
|
|
14
15
|
import pycontrails
|
|
15
16
|
from pycontrails.core import models
|
|
16
|
-
from pycontrails.core.flight import Flight
|
|
17
17
|
from pycontrails.core.met import MetDataset
|
|
18
18
|
from pycontrails.core.vector import GeoVectorDataset, VectorDataset
|
|
19
19
|
from pycontrails.models import humidity_scaling, sac
|
|
20
20
|
from pycontrails.models.cocip import cocip, contrail_properties, wake_vortex, wind_shear
|
|
21
|
-
from pycontrails.models.cocipgrid import cocip_time_handling
|
|
22
21
|
from pycontrails.models.cocipgrid.cocip_grid_params import CocipGridParams
|
|
23
22
|
from pycontrails.models.emissions import Emissions
|
|
24
23
|
from pycontrails.physics import geo, thermo, units
|
|
24
|
+
from pycontrails.utils import dependencies
|
|
25
25
|
|
|
26
26
|
if TYPE_CHECKING:
|
|
27
27
|
import tqdm
|
|
@@ -29,7 +29,7 @@ if TYPE_CHECKING:
|
|
|
29
29
|
logger = logging.getLogger(__name__)
|
|
30
30
|
|
|
31
31
|
|
|
32
|
-
class CocipGrid(models.Model
|
|
32
|
+
class CocipGrid(models.Model):
|
|
33
33
|
"""Run CoCiP simulation on a grid.
|
|
34
34
|
|
|
35
35
|
See :meth:`eval` for a description of model evaluation ``source`` parameters.
|
|
@@ -45,6 +45,15 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
45
45
|
param_kwargs : Any
|
|
46
46
|
Override CocipGridParams defaults with arbitrary keyword arguments.
|
|
47
47
|
|
|
48
|
+
Notes
|
|
49
|
+
-----
|
|
50
|
+
- If ``rad`` contains accumulated radiative fluxes, differencing to obtain
|
|
51
|
+
time-averaged fluxes will reduce the time coverage of ``rad`` by half a forecast
|
|
52
|
+
step. A warning will be produced during :meth:`eval` if the time coverage of
|
|
53
|
+
``rad`` (after differencing) is too short given the model evaluation parameters.
|
|
54
|
+
If this occurs, provide an additional step of radiation data at the start or end
|
|
55
|
+
of ``rad``.
|
|
56
|
+
|
|
48
57
|
References
|
|
49
58
|
----------
|
|
50
59
|
- :cite:`schumannPotentialReduceClimate2011`
|
|
@@ -63,6 +72,14 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
63
72
|
:mod:`tau_cirrus`
|
|
64
73
|
"""
|
|
65
74
|
|
|
75
|
+
__slots__ = (
|
|
76
|
+
"rad",
|
|
77
|
+
"timesteps",
|
|
78
|
+
"contrail",
|
|
79
|
+
"contrail_list",
|
|
80
|
+
"_target_dtype",
|
|
81
|
+
)
|
|
82
|
+
|
|
66
83
|
name = "contrail_grid"
|
|
67
84
|
long_name = "Gridded Contrail Cirrus Prediction Model"
|
|
68
85
|
default_params = CocipGridParams
|
|
@@ -94,7 +111,6 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
94
111
|
**params_kwargs: Any,
|
|
95
112
|
):
|
|
96
113
|
super().__init__(met, params=params, **params_kwargs)
|
|
97
|
-
self.validate_time_params()
|
|
98
114
|
|
|
99
115
|
compute_tau_cirrus = self.params["compute_tau_cirrus_in_model_init"]
|
|
100
116
|
self.met, self.rad = cocip.process_met_datasets(met, rad, compute_tau_cirrus)
|
|
@@ -103,9 +119,8 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
103
119
|
self.params["_interp_kwargs"] = self.interp_kwargs
|
|
104
120
|
|
|
105
121
|
if self.params["radiative_heating_effects"]:
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
)
|
|
122
|
+
msg = "Parameter 'radiative_heating_effects' is not yet implemented in CocipGrid"
|
|
123
|
+
raise NotImplementedError(msg)
|
|
109
124
|
|
|
110
125
|
self._target_dtype = np.result_type(*self.met.data.values())
|
|
111
126
|
|
|
@@ -139,9 +154,19 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
139
154
|
Parameters
|
|
140
155
|
----------
|
|
141
156
|
source : GeoVectorDataset | MetDataset | None
|
|
142
|
-
Input :class:`GeoVectorDataset` or :class:`MetDataset`. If
|
|
143
|
-
|
|
144
|
-
|
|
157
|
+
Input :class:`GeoVectorDataset` or :class:`MetDataset`. If None,
|
|
158
|
+
a ``NotImplementedError`` is raised. If any subclass of :class:`GeoVectorDataset`
|
|
159
|
+
is passed (e.g., :class:`Flight`), the additional structure is forgotten and
|
|
160
|
+
the model is evaluated as if it were a :class:`GeoVectorDataset`.
|
|
161
|
+
Additional variables may be passed as ``source`` data or attrs. These
|
|
162
|
+
include:
|
|
163
|
+
|
|
164
|
+
- ``aircraft_type``: This overrides any value in :attr:`params`. Must be included
|
|
165
|
+
in the source attrs (not data).
|
|
166
|
+
- ``fuel_flow``, ``engine_efficiency``, ``true_airspeed``, ``wingspan``,
|
|
167
|
+
``aircraft_mass``: These override any value in :attr:`params`.
|
|
168
|
+
- ``azimuth``: This overrides any value in :attr:`params`.
|
|
169
|
+
- ``segment_length``: This overrides any value in :attr:`params`.
|
|
145
170
|
**params : Any
|
|
146
171
|
Overwrite model parameters before eval
|
|
147
172
|
|
|
@@ -160,16 +185,18 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
160
185
|
Notes
|
|
161
186
|
-----
|
|
162
187
|
At a high level, the model is broken down into the following steps:
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
188
|
+
- Convert any :class:`MetDataset` ``source`` to :class:`GeoVectorDataset`.
|
|
189
|
+
- Split the ``source`` into chunks of size ``params["target_split_size"]``.
|
|
190
|
+
- For each timestep in :attr:`timesteps`:
|
|
191
|
+
|
|
192
|
+
- Generate any new waypoints from the source data. Calculate aircraft performance
|
|
193
|
+
and run the CoCiP downwash routine over the new waypoints.
|
|
168
194
|
- For each "active" contrail (i.e., a contrail that has been initialized but
|
|
169
|
-
has not yet reach its end of life), evolve the contrail
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
195
|
+
has not yet reach its end of life), evolve the contrail forward one step.
|
|
196
|
+
Filter any waypoint that has reached its end of life.
|
|
197
|
+
|
|
198
|
+
- Aggregate contrail age and energy forcing predictions to a single
|
|
199
|
+
output variable to return.
|
|
173
200
|
"""
|
|
174
201
|
self.update_params(params)
|
|
175
202
|
if source is None:
|
|
@@ -178,61 +205,83 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
178
205
|
# in the source (we need to evolve contrails forward in time).
|
|
179
206
|
# Perhaps we could use the isel(time=0) slice to construct the source
|
|
180
207
|
# from the met and rad data.
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
("true_airspeed", "azimuth"), False
|
|
184
|
-
):
|
|
185
|
-
warnings.warn(
|
|
186
|
-
"Flight source no longer supported by CocipGrid. "
|
|
187
|
-
"Any Flight source will be viewed as a GeoVectorDataset. "
|
|
188
|
-
"In particular, flight segment variable such as azimuth and true_airspeed "
|
|
189
|
-
"are not used by CocipGrid (nominal values are used instead). Attach "
|
|
190
|
-
"these to the Flight source before calling 'eval' to use them in CocipGrid."
|
|
191
|
-
)
|
|
208
|
+
msg = "CocipGrid.eval() with 'source=None' is not implemented."
|
|
209
|
+
raise NotImplementedError(msg)
|
|
192
210
|
self.set_source(source)
|
|
193
211
|
|
|
194
212
|
self.met, self.rad = _downselect_met(self.source, self.met, self.rad, self.params)
|
|
195
|
-
# Add tau_cirrus if it doesn't exist already.
|
|
196
213
|
self.met = cocip.add_tau_cirrus(self.met)
|
|
197
|
-
self.
|
|
214
|
+
self._check_met_covers_source()
|
|
198
215
|
|
|
199
216
|
# Save humidity scaling type to output attrs
|
|
200
|
-
|
|
201
|
-
|
|
217
|
+
humidity_scaling = self.params["humidity_scaling"]
|
|
218
|
+
if humidity_scaling is not None:
|
|
219
|
+
for k, v in humidity_scaling.description.items():
|
|
202
220
|
self.source.attrs[f"humidity_scaling_{k}"] = v
|
|
203
221
|
|
|
204
222
|
self._parse_verbose_outputs()
|
|
205
|
-
self.attach_timedict()
|
|
206
|
-
pbar = self.init_pbar()
|
|
207
223
|
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
224
|
+
self._set_timesteps()
|
|
225
|
+
pbar = self._init_pbar()
|
|
226
|
+
|
|
227
|
+
met: MetDataset | None = None
|
|
228
|
+
rad: MetDataset | None = None
|
|
229
|
+
|
|
230
|
+
ef_summary: list[VectorDataset] = []
|
|
211
231
|
verbose_dicts: list[dict[str, pd.Series]] = []
|
|
212
232
|
contrail_list: list[GeoVectorDataset] = []
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
233
|
+
existing_vectors: Iterator[GeoVectorDataset] = iter(())
|
|
234
|
+
|
|
235
|
+
for time_idx, time_end in enumerate(self.timesteps):
|
|
236
|
+
met, rad = self._maybe_downselect_met_rad(met, rad, time_end)
|
|
237
|
+
|
|
238
|
+
evolved_this_step = []
|
|
239
|
+
ef_summary_this_step = []
|
|
240
|
+
downwash_vectors_this_step = []
|
|
241
|
+
for vector in self._generate_new_vectors(time_idx):
|
|
242
|
+
downwash, verbose_dict = _run_downwash(vector, met, rad, self.params)
|
|
243
|
+
|
|
244
|
+
if downwash:
|
|
245
|
+
# T_crit_sac is no longer needed. If verbose_outputs_formation is True,
|
|
246
|
+
# it's already storied in the verbose_dict data
|
|
247
|
+
downwash.data.pop("T_crit_sac", None)
|
|
248
|
+
downwash_vectors_this_step.append(downwash)
|
|
249
|
+
if self.params["verbose_outputs_evolution"]:
|
|
250
|
+
contrail_list.append(downwash)
|
|
251
|
+
|
|
252
|
+
if self.params["verbose_outputs_formation"] and verbose_dict:
|
|
253
|
+
verbose_dicts.append(verbose_dict)
|
|
254
|
+
|
|
255
|
+
if pbar is not None:
|
|
256
|
+
pbar.update()
|
|
257
|
+
|
|
258
|
+
for vector in itertools.chain(existing_vectors, downwash_vectors_this_step):
|
|
259
|
+
contrail, ef = _evolve_vector(
|
|
260
|
+
vector,
|
|
261
|
+
met=met,
|
|
262
|
+
rad=rad,
|
|
263
|
+
params=self.params,
|
|
264
|
+
t=time_end,
|
|
265
|
+
)
|
|
266
|
+
if ef:
|
|
267
|
+
evolved_this_step.append(contrail)
|
|
268
|
+
ef_summary_this_step.append(ef)
|
|
269
|
+
if self.params["verbose_outputs_evolution"]:
|
|
270
|
+
contrail_list.append(contrail)
|
|
271
|
+
|
|
272
|
+
if pbar is not None:
|
|
273
|
+
pbar.update()
|
|
274
|
+
|
|
275
|
+
if not evolved_this_step:
|
|
276
|
+
if np.all(time_end > self.source_time):
|
|
277
|
+
break
|
|
278
|
+
continue
|
|
279
|
+
|
|
280
|
+
existing_vectors = combine_vectors(evolved_this_step, self.params["target_split_size"])
|
|
281
|
+
|
|
282
|
+
summary = VectorDataset.sum(ef_summary_this_step)
|
|
283
|
+
if summary:
|
|
284
|
+
ef_summary.append(summary)
|
|
236
285
|
|
|
237
286
|
if pbar is not None:
|
|
238
287
|
logger.debug("Close progress bar")
|
|
@@ -240,9 +289,39 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
240
289
|
pbar.close()
|
|
241
290
|
|
|
242
291
|
self._attach_verbose_outputs_evolution(contrail_list)
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
292
|
+
total_ef_summary = _aggregate_ef_summary(ef_summary)
|
|
293
|
+
return self._bundle_results(total_ef_summary, verbose_dicts)
|
|
294
|
+
|
|
295
|
+
def _maybe_downselect_met_rad(
|
|
296
|
+
self,
|
|
297
|
+
met: MetDataset | None,
|
|
298
|
+
rad: MetDataset | None,
|
|
299
|
+
time_end: np.datetime64,
|
|
300
|
+
) -> tuple[MetDataset, MetDataset]:
|
|
301
|
+
"""Downselect ``self.met`` and ``self.rad`` if necessary to cover ``time_end``.
|
|
302
|
+
|
|
303
|
+
If the currently used ``met`` and ``rad`` slices do not include the time
|
|
304
|
+
``time_end``, new slices are selected from the larger ``self.met`` and
|
|
305
|
+
``self.rad`` data. The slicing only occurs in the time domain.
|
|
306
|
+
|
|
307
|
+
If ``self.params["downselect_met"]`` is True, :func:`_downselect_met` has
|
|
308
|
+
already performed a spatial downselection of the met data.
|
|
309
|
+
"""
|
|
310
|
+
if met is None or time_end > met.indexes["time"].to_numpy()[-1]:
|
|
311
|
+
# idx is the first index at which self.met.variables["time"].to_numpy() >= time_end
|
|
312
|
+
idx = np.searchsorted(self.met.indexes["time"].to_numpy(), time_end)
|
|
313
|
+
sl = slice(max(0, idx - 1), idx + 1)
|
|
314
|
+
logger.debug("Select met slice %s", sl)
|
|
315
|
+
met = MetDataset(self.met.data.isel(time=sl), copy=False)
|
|
316
|
+
|
|
317
|
+
if rad is None or time_end > rad.indexes["time"].to_numpy()[-1]:
|
|
318
|
+
# idx is the first index at which self.rad.variables["time"].to_numpy() >= time_end
|
|
319
|
+
idx = np.searchsorted(self.rad.indexes["time"].to_numpy(), time_end)
|
|
320
|
+
sl = slice(max(0, idx - 1), idx + 1)
|
|
321
|
+
logger.debug("Select rad slice %s", sl)
|
|
322
|
+
rad = MetDataset(self.rad.data.isel(time=sl), copy=False)
|
|
323
|
+
|
|
324
|
+
return met, rad
|
|
246
325
|
|
|
247
326
|
def _attach_verbose_outputs_evolution(self, contrail_list: list[GeoVectorDataset]) -> None:
|
|
248
327
|
"""Attach intermediate artifacts to the model.
|
|
@@ -253,17 +332,18 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
253
332
|
Mirrors implementation in :class:`Cocip`. We could do additional work here
|
|
254
333
|
if this turns out to be useful.
|
|
255
334
|
"""
|
|
256
|
-
if self.params["verbose_outputs_evolution"]:
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
335
|
+
if not self.params["verbose_outputs_evolution"]:
|
|
336
|
+
return
|
|
337
|
+
|
|
338
|
+
self.contrail_list = contrail_list # attach raw data
|
|
339
|
+
|
|
340
|
+
if contrail_list:
|
|
341
|
+
# And the contrail DataFrame (pd.concat is expensive here)
|
|
342
|
+
dfs = [contrail.dataframe for contrail in contrail_list]
|
|
343
|
+
dfs = [df.assign(timestep=t_idx) for t_idx, df in enumerate(dfs)]
|
|
344
|
+
self.contrail = pd.concat(dfs)
|
|
345
|
+
else:
|
|
346
|
+
self.contrail = pd.DataFrame()
|
|
267
347
|
|
|
268
348
|
def _bundle_results(
|
|
269
349
|
self,
|
|
@@ -332,6 +412,108 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
332
412
|
# Common Methods & Properties
|
|
333
413
|
# ---------------------------
|
|
334
414
|
|
|
415
|
+
@property
|
|
416
|
+
def source_time(self) -> npt.NDArray[np.datetime64]:
|
|
417
|
+
"""Return the time array of the :attr:`source` data."""
|
|
418
|
+
try:
|
|
419
|
+
source = self.source
|
|
420
|
+
except AttributeError as exc:
|
|
421
|
+
msg = "No source set"
|
|
422
|
+
raise AttributeError(msg) from exc
|
|
423
|
+
|
|
424
|
+
if isinstance(source, GeoVectorDataset):
|
|
425
|
+
return source["time"]
|
|
426
|
+
if isinstance(source, MetDataset):
|
|
427
|
+
return source.indexes["time"].values
|
|
428
|
+
|
|
429
|
+
msg = f"Cannot calculate timesteps for {source}"
|
|
430
|
+
raise TypeError(msg)
|
|
431
|
+
|
|
432
|
+
def _set_timesteps(self) -> None:
|
|
433
|
+
"""Set the :attr:`timesteps` based on the ``source`` time range."""
|
|
434
|
+
source_time = self.source_time
|
|
435
|
+
tmin = source_time.min()
|
|
436
|
+
tmax = source_time.max()
|
|
437
|
+
|
|
438
|
+
tmin = pd.to_datetime(tmin)
|
|
439
|
+
tmax = pd.to_datetime(tmax)
|
|
440
|
+
dt = pd.to_timedelta(self.params["dt_integration"])
|
|
441
|
+
|
|
442
|
+
t_start = tmin.ceil(dt)
|
|
443
|
+
t_end = tmax.floor(dt) + self.params["max_age"] + dt
|
|
444
|
+
|
|
445
|
+
# Pass in t_end (as opposed to tmax) to ensure that the met and rad data
|
|
446
|
+
# cover the entire evolution period.
|
|
447
|
+
_check_met_rad_time(self.met, self.rad, tmin, t_end)
|
|
448
|
+
|
|
449
|
+
self.timesteps = np.arange(t_start, t_end, dt)
|
|
450
|
+
|
|
451
|
+
def _init_pbar(self) -> tqdm.tqdm | None:
|
|
452
|
+
"""Initialize a progress bar for model evaluation.
|
|
453
|
+
|
|
454
|
+
The total number of steps is estimated in a very crude way. Do not
|
|
455
|
+
rely on the progress bar for accurate estimates of runtime.
|
|
456
|
+
|
|
457
|
+
Returns
|
|
458
|
+
-------
|
|
459
|
+
tqdm.tqdm | None
|
|
460
|
+
A progress bar for model evaluation. If ``show_progress`` is False, returns None.
|
|
461
|
+
"""
|
|
462
|
+
|
|
463
|
+
if not self.params["show_progress"]:
|
|
464
|
+
return None
|
|
465
|
+
|
|
466
|
+
try:
|
|
467
|
+
from tqdm.auto import tqdm
|
|
468
|
+
except ModuleNotFoundError as exc:
|
|
469
|
+
dependencies.raise_module_not_found_error(
|
|
470
|
+
name="CocipGrid._init_pbar method",
|
|
471
|
+
package_name="tqdm",
|
|
472
|
+
module_not_found_error=exc,
|
|
473
|
+
extra="Alternatively, set model parameter 'show_progress=False'.",
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
split_size = self.params["target_split_size"]
|
|
477
|
+
if isinstance(self.source, MetDataset):
|
|
478
|
+
n_splits_by_time = self._metdataset_source_n_splits()
|
|
479
|
+
n_splits = len(self.source_time) * n_splits_by_time
|
|
480
|
+
else:
|
|
481
|
+
tmp1 = self.source_time[:, None] < self.timesteps[1:]
|
|
482
|
+
tmp2 = self.source_time[:, None] >= self.timesteps[:-1]
|
|
483
|
+
n_points_by_timestep = np.sum(tmp1 & tmp2, axis=0)
|
|
484
|
+
|
|
485
|
+
init_split_size = self.params["target_split_size_pre_SAC_boost"] * split_size
|
|
486
|
+
n_splits_by_time = np.ceil(n_points_by_timestep / init_split_size)
|
|
487
|
+
n_splits = np.sum(n_splits_by_time)
|
|
488
|
+
|
|
489
|
+
n_init_surv = 0.1 * n_splits # assume 10% of points survive the downwash
|
|
490
|
+
n_evo_steps = len(self.timesteps) * n_init_surv
|
|
491
|
+
total = n_splits + n_evo_steps
|
|
492
|
+
|
|
493
|
+
return tqdm(total=int(total), desc=f"{type(self).__name__} eval")
|
|
494
|
+
|
|
495
|
+
def _metdataset_source_n_splits(self) -> int:
|
|
496
|
+
"""Compute the number of splits at a given time for a :class:`MetDataset` source.
|
|
497
|
+
|
|
498
|
+
This method assumes :attr:`source` is a :class:`MetDataset`.
|
|
499
|
+
|
|
500
|
+
Returns
|
|
501
|
+
-------
|
|
502
|
+
int
|
|
503
|
+
The number of splits.
|
|
504
|
+
"""
|
|
505
|
+
if not isinstance(self.source, MetDataset):
|
|
506
|
+
msg = f"Expected source to be a MetDataset, found {type(self.source)}"
|
|
507
|
+
raise TypeError(msg)
|
|
508
|
+
|
|
509
|
+
indexes = self.source.indexes
|
|
510
|
+
grid_size = indexes["longitude"].size * indexes["latitude"].size * indexes["level"].size
|
|
511
|
+
|
|
512
|
+
split_size = int(
|
|
513
|
+
self.params["target_split_size_pre_SAC_boost"] * self.params["target_split_size"]
|
|
514
|
+
)
|
|
515
|
+
return max(grid_size // split_size, 1)
|
|
516
|
+
|
|
335
517
|
def _parse_verbose_outputs(self) -> None:
|
|
336
518
|
"""Confirm param "verbose_outputs" has the expected type for grid and path mode.
|
|
337
519
|
|
|
@@ -341,10 +523,11 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
341
523
|
is determine by :func:`_supported_verbose_outputs`.
|
|
342
524
|
"""
|
|
343
525
|
if self.params["verbose_outputs"]:
|
|
344
|
-
|
|
526
|
+
msg = (
|
|
345
527
|
"Parameter 'verbose_outputs' is no longer supported for grid mode. "
|
|
346
528
|
"Instead, use 'verbose_outputs_formation' and 'verbose_outputs_evolution'."
|
|
347
529
|
)
|
|
530
|
+
raise ValueError(msg)
|
|
348
531
|
vo = self.params["verbose_outputs_formation"]
|
|
349
532
|
supported = _supported_verbose_outputs_formation()
|
|
350
533
|
|
|
@@ -365,15 +548,13 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
365
548
|
)
|
|
366
549
|
self.params["verbose_outputs_formation"] = vo & supported
|
|
367
550
|
|
|
368
|
-
def _generate_new_vectors(
|
|
369
|
-
self, filt: npt.NDArray[np.bool_]
|
|
370
|
-
) -> Generator[GeoVectorDataset, None, None]:
|
|
551
|
+
def _generate_new_vectors(self, time_idx: int) -> Generator[GeoVectorDataset, None, None]:
|
|
371
552
|
"""Generate :class:`GeoVectorDataset` instances from :attr:`source`.
|
|
372
553
|
|
|
373
554
|
Parameters
|
|
374
555
|
----------
|
|
375
|
-
|
|
376
|
-
|
|
556
|
+
time_idx : int
|
|
557
|
+
The index of the current time slice in :attr:`timesteps`.
|
|
377
558
|
|
|
378
559
|
Yields
|
|
379
560
|
------
|
|
@@ -384,14 +565,25 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
384
565
|
"""
|
|
385
566
|
if "index" in self.source:
|
|
386
567
|
# FIXME: We can simply change the internal variable to __index
|
|
387
|
-
|
|
568
|
+
msg = "The variable 'index' is used internally. Found in source."
|
|
569
|
+
raise RuntimeError(msg)
|
|
570
|
+
|
|
571
|
+
source_time = self.source_time
|
|
572
|
+
t_cur = self.timesteps[time_idx]
|
|
573
|
+
if time_idx == 0:
|
|
574
|
+
filt = source_time < t_cur
|
|
575
|
+
else:
|
|
576
|
+
t_prev = self.timesteps[time_idx - 1]
|
|
577
|
+
filt = (source_time >= t_prev) & (source_time < t_cur)
|
|
578
|
+
|
|
579
|
+
if not filt.any():
|
|
580
|
+
return
|
|
388
581
|
|
|
389
582
|
if isinstance(self.source, MetDataset):
|
|
390
|
-
|
|
391
|
-
times_in_filt = self.source_time[filt]
|
|
583
|
+
times_in_filt = source_time[filt]
|
|
392
584
|
filt_start_idx = np.argmax(filt).item() # needed to ensure globally unique indexes
|
|
393
585
|
|
|
394
|
-
n_splits = self.
|
|
586
|
+
n_splits = self._metdataset_source_n_splits()
|
|
395
587
|
for idx, time in enumerate(times_in_filt):
|
|
396
588
|
# For now, sticking with the convention that every vector should
|
|
397
589
|
# have a constant time value.
|
|
@@ -399,9 +591,11 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
399
591
|
|
|
400
592
|
# Convert the 4D grid to a vector
|
|
401
593
|
vector = source_slice.to_vector()
|
|
402
|
-
vector.update(
|
|
403
|
-
|
|
404
|
-
|
|
594
|
+
vector.update(
|
|
595
|
+
longitude=vector["longitude"].astype(self._target_dtype, copy=False),
|
|
596
|
+
latitude=vector["latitude"].astype(self._target_dtype, copy=False),
|
|
597
|
+
level=vector["level"].astype(self._target_dtype, copy=False),
|
|
598
|
+
)
|
|
405
599
|
vector["index"] = source_time.size * np.arange(vector.size) + filt_start_idx + idx
|
|
406
600
|
|
|
407
601
|
# Split into chunks
|
|
@@ -431,7 +625,8 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
431
625
|
yield subvector
|
|
432
626
|
|
|
433
627
|
else:
|
|
434
|
-
|
|
628
|
+
msg = f"Unknown source {self.source}"
|
|
629
|
+
raise TypeError(msg)
|
|
435
630
|
|
|
436
631
|
def _build_subvector(self, vector: GeoVectorDataset) -> GeoVectorDataset:
|
|
437
632
|
"""Mutate `vector` by adding additional keys."""
|
|
@@ -455,11 +650,14 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
455
650
|
if azimuth is None and segment_length is None:
|
|
456
651
|
return vector
|
|
457
652
|
if azimuth is None:
|
|
458
|
-
|
|
653
|
+
msg = "Set 'segment_length' to None for experimental segment-free model"
|
|
654
|
+
raise ValueError(msg)
|
|
459
655
|
if segment_length is None:
|
|
460
|
-
|
|
656
|
+
msg = "Set 'azimuth' to None for experimental segment-free model"
|
|
657
|
+
raise ValueError(msg)
|
|
461
658
|
if self.params["dsn_dz_factor"]:
|
|
462
|
-
|
|
659
|
+
msg = "'dsn_dz_factor' not supported outside of the segment-free mode"
|
|
660
|
+
raise ValueError(msg)
|
|
463
661
|
|
|
464
662
|
lons = vector["longitude"]
|
|
465
663
|
lats = vector["latitude"]
|
|
@@ -476,28 +674,40 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
476
674
|
|
|
477
675
|
return vector
|
|
478
676
|
|
|
479
|
-
def
|
|
480
|
-
|
|
481
|
-
raise ValueError("No source set")
|
|
677
|
+
def _check_met_covers_source(self) -> None:
|
|
678
|
+
"""Ensure that the met and rad data cover the source data.
|
|
482
679
|
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
680
|
+
See also :func:`_check_met_rad_time` which checks the time coverage
|
|
681
|
+
in more detail.
|
|
682
|
+
"""
|
|
683
|
+
try:
|
|
684
|
+
source = self.source
|
|
685
|
+
except AttributeError as exc:
|
|
686
|
+
msg = "No source set"
|
|
687
|
+
raise AttributeError(msg) from exc
|
|
688
|
+
|
|
689
|
+
if isinstance(source, MetDataset):
|
|
690
|
+
indexes = source.indexes
|
|
691
|
+
longitude = indexes["longitude"].to_numpy()
|
|
692
|
+
latitude = indexes["latitude"].to_numpy()
|
|
693
|
+
level = indexes["level"].to_numpy()
|
|
694
|
+
time = indexes["time"].to_numpy()
|
|
488
695
|
else:
|
|
489
|
-
longitude =
|
|
490
|
-
latitude =
|
|
491
|
-
level =
|
|
492
|
-
time =
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
696
|
+
longitude = source["longitude"]
|
|
697
|
+
latitude = source["latitude"]
|
|
698
|
+
level = source.level
|
|
699
|
+
time = source["time"]
|
|
700
|
+
|
|
701
|
+
indexes = self.met.indexes
|
|
702
|
+
_check_coverage(indexes["longitude"].to_numpy(), longitude, "longitude", "met")
|
|
703
|
+
_check_coverage(indexes["latitude"].to_numpy(), latitude, "latitude", "met")
|
|
704
|
+
_check_coverage(indexes["level"].to_numpy(), level, "level", "met")
|
|
705
|
+
_check_coverage(indexes["time"].to_numpy(), time, "time", "met")
|
|
706
|
+
|
|
707
|
+
indexes = self.rad.indexes
|
|
708
|
+
_check_coverage(indexes["longitude"].to_numpy(), longitude, "longitude", "rad")
|
|
709
|
+
_check_coverage(indexes["latitude"].to_numpy(), latitude, "latitude", "rad")
|
|
710
|
+
_check_coverage(indexes["time"].to_numpy(), time, "time", "rad")
|
|
501
711
|
|
|
502
712
|
_warn_not_wrap(self.met)
|
|
503
713
|
_warn_not_wrap(self.rad)
|
|
@@ -557,7 +767,8 @@ class CocipGrid(models.Model, cocip_time_handling.CocipTimeHandlingMixin):
|
|
|
557
767
|
out = MetDataset.from_coords(longitude=longitude, latitude=latitude, level=level, time=time)
|
|
558
768
|
|
|
559
769
|
if np.any(out.data.latitude > 80.0001) or np.any(out.data.latitude < -80.0001):
|
|
560
|
-
|
|
770
|
+
msg = "Model only supports latitude between -80 and 80."
|
|
771
|
+
raise ValueError(msg)
|
|
561
772
|
|
|
562
773
|
return out
|
|
563
774
|
|
|
@@ -602,10 +813,11 @@ def _setdefault_from_params(key: str, vector: GeoVectorDataset, params: dict[str
|
|
|
602
813
|
return
|
|
603
814
|
|
|
604
815
|
if not isinstance(scalar, (int, float)):
|
|
605
|
-
|
|
816
|
+
msg = (
|
|
606
817
|
f"Parameter {key} must be a scalar. For non-scalar values, directly "
|
|
607
818
|
"set the data on the 'source'."
|
|
608
819
|
)
|
|
820
|
+
raise TypeError(msg)
|
|
609
821
|
vector.attrs[key] = float(scalar)
|
|
610
822
|
|
|
611
823
|
|
|
@@ -688,7 +900,8 @@ def run_interpolators(
|
|
|
688
900
|
|
|
689
901
|
if keys:
|
|
690
902
|
if rad is not None:
|
|
691
|
-
|
|
903
|
+
msg = "The 'keys' override only valid for 'met' input"
|
|
904
|
+
raise ValueError(msg)
|
|
692
905
|
|
|
693
906
|
for met_key in keys:
|
|
694
907
|
# NOTE: Changed in v0.43: no longer overwrites existing variables
|
|
@@ -697,9 +910,11 @@ def run_interpolators(
|
|
|
697
910
|
return _apply_humidity_scaling(vector, humidity_scaling, humidity_interpolated)
|
|
698
911
|
|
|
699
912
|
if dz_m is None:
|
|
700
|
-
|
|
913
|
+
msg = "Specify 'dz_m'."
|
|
914
|
+
raise TypeError(msg)
|
|
701
915
|
if rad is None:
|
|
702
|
-
|
|
916
|
+
msg = "Specify 'rad'."
|
|
917
|
+
raise TypeError(msg)
|
|
703
918
|
|
|
704
919
|
# Interpolation at usual level
|
|
705
920
|
# Excluded keys are not needed -- only used to initially compute tau_cirrus
|
|
@@ -803,20 +1018,13 @@ def _apply_humidity_scaling(
|
|
|
803
1018
|
|
|
804
1019
|
def _evolve_vector(
|
|
805
1020
|
vector: GeoVectorDataset,
|
|
1021
|
+
*,
|
|
806
1022
|
met: MetDataset,
|
|
807
1023
|
rad: MetDataset,
|
|
808
1024
|
params: dict[str, Any],
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
GeoVectorDataset,
|
|
813
|
-
VectorDataset | None,
|
|
814
|
-
dict[str, pd.Series] | None,
|
|
815
|
-
list[GeoVectorDataset] | None,
|
|
816
|
-
]:
|
|
817
|
-
"""Evolve ``vector`` over lifespan of parameter ``met``.
|
|
818
|
-
|
|
819
|
-
The parameter ``met`` is used as the source of timesteps for contrail evolution.
|
|
1025
|
+
t: np.datetime64,
|
|
1026
|
+
) -> tuple[GeoVectorDataset, VectorDataset]:
|
|
1027
|
+
"""Evolve ``vector`` to time ``t``.
|
|
820
1028
|
|
|
821
1029
|
Return surviving contrail at end of evolution and aggregate metrics from evolution.
|
|
822
1030
|
|
|
@@ -830,122 +1038,48 @@ def _evolve_vector(
|
|
|
830
1038
|
Parameters
|
|
831
1039
|
----------
|
|
832
1040
|
vector : GeoVectorDataset
|
|
833
|
-
|
|
1041
|
+
Contrail points that have been initialized and are ready for evolution.
|
|
834
1042
|
met, rad : MetDataset
|
|
835
1043
|
CoCiP met and rad slices. See :class:`CocipGrid`.
|
|
836
1044
|
params : dict[str, Any]
|
|
837
1045
|
CoCiP model parameters. See :class:`CocipGrid`.
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
pbar : tqdm.tqmd | None
|
|
841
|
-
Track ``tqdm`` progress bar over simulation.
|
|
1046
|
+
t : np.datetime64
|
|
1047
|
+
Time to evolve to.
|
|
842
1048
|
|
|
843
1049
|
Returns
|
|
844
1050
|
-------
|
|
845
|
-
|
|
846
|
-
Evolved contrail at end of evolution.
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
Dictionary of verbose outputs. None if ``run_downwash`` is False.
|
|
851
|
-
contrail_list : list[GeoVectorDataset] | None
|
|
852
|
-
List of intermediate evolved contrails. None if
|
|
853
|
-
``params["verbose_outputs_evolution"]`` is False.
|
|
1051
|
+
contrail : GeoVectorDataset
|
|
1052
|
+
Evolved contrail at end of the evolution step.
|
|
1053
|
+
ef_summary : VectorDataset
|
|
1054
|
+
The ``contrail`` summary statistics. The result of
|
|
1055
|
+
``contrail.select(("index", "age", "ef"), copy=False)``.
|
|
854
1056
|
"""
|
|
855
|
-
|
|
856
|
-
contrail_list: list[GeoVectorDataset] | None
|
|
857
|
-
contrail_list = [] if params["verbose_outputs_evolution"] else None
|
|
858
|
-
|
|
859
|
-
# Run downwash and first contrail calculation
|
|
860
|
-
if run_downwash:
|
|
861
|
-
vector, verbose_dict = _run_downwash(vector, met, rad, params)
|
|
862
|
-
if contrail_list is not None:
|
|
863
|
-
contrail_list.append(vector.copy())
|
|
864
|
-
|
|
865
|
-
# T_crit_sac is no longer needed. If verbose_outputs_formation is True,
|
|
866
|
-
# it's already storied in the verbose_dict adta
|
|
867
|
-
vector.data.pop("T_crit_sac", None)
|
|
868
|
-
if pbar is not None:
|
|
869
|
-
pbar.update()
|
|
1057
|
+
dt = t - vector["time"]
|
|
870
1058
|
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
1059
|
+
if _is_segment_free_mode(vector):
|
|
1060
|
+
dt_head = None
|
|
1061
|
+
dt_tail = None
|
|
874
1062
|
else:
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
met_times = met.data["time"].values
|
|
880
|
-
t0 = met_times[0]
|
|
881
|
-
t1 = met_times[-1]
|
|
882
|
-
dt_integration = params["dt_integration"]
|
|
883
|
-
timesteps = np.arange(t0 + dt_integration, t1 + dt_integration, dt_integration)
|
|
884
|
-
|
|
885
|
-
# Not strictly necessary: Avoid looping few first few timesteps if waypoints not
|
|
886
|
-
# yet online. Cocip uses similar logic in _calc_timesteps.
|
|
887
|
-
timesteps = timesteps[timesteps > vector["time"].min()]
|
|
888
|
-
|
|
889
|
-
# Only used for logging below
|
|
890
|
-
start_size = vector.size
|
|
891
|
-
|
|
892
|
-
for t in timesteps:
|
|
893
|
-
if not vector:
|
|
894
|
-
break
|
|
895
|
-
|
|
896
|
-
# This if-else below is not strictly necessary ... it might be slightly
|
|
897
|
-
# more performant to avoid the call to vector.filter, which only occurs
|
|
898
|
-
# with GeoVectorDataset sources.
|
|
899
|
-
filt = vector["time"] < t
|
|
900
|
-
if np.all(filt):
|
|
901
|
-
v_now = vector
|
|
902
|
-
v_future = None
|
|
903
|
-
else:
|
|
904
|
-
v_now = vector.filter(filt)
|
|
905
|
-
v_future = vector.filter(~filt)
|
|
906
|
-
|
|
907
|
-
if not v_now:
|
|
908
|
-
continue
|
|
1063
|
+
head_tail_dt = vector["head_tail_dt"]
|
|
1064
|
+
half_head_tail_dt = head_tail_dt / 2
|
|
1065
|
+
dt_head = dt - half_head_tail_dt # type: ignore[operator]
|
|
1066
|
+
dt_tail = dt + half_head_tail_dt # type: ignore[operator]
|
|
909
1067
|
|
|
910
|
-
|
|
1068
|
+
# After advection, out has time t
|
|
1069
|
+
out = advect(vector, dt, dt_head, dt_tail) # type: ignore[arg-type]
|
|
911
1070
|
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
|
|
917
|
-
|
|
918
|
-
|
|
919
|
-
|
|
920
|
-
|
|
921
|
-
|
|
922
|
-
# After advection, v_next has time t
|
|
923
|
-
v_next = advect(v_now, dt, dt_head, dt_tail)
|
|
924
|
-
|
|
925
|
-
v_next = run_interpolators(
|
|
926
|
-
v_next,
|
|
927
|
-
met,
|
|
928
|
-
rad,
|
|
929
|
-
dz_m=params["dz_m"],
|
|
930
|
-
humidity_scaling=params["humidity_scaling"],
|
|
931
|
-
**params["_interp_kwargs"],
|
|
932
|
-
)
|
|
933
|
-
v_next = calc_evolve_one_step(v_now, v_next, params)
|
|
934
|
-
if v_next:
|
|
935
|
-
summary_data.append(v_next.select(("index", "age", "ef")))
|
|
936
|
-
vector = v_next + v_future
|
|
937
|
-
|
|
938
|
-
if contrail_list is not None:
|
|
939
|
-
contrail_list.append(vector)
|
|
940
|
-
if pbar is not None:
|
|
941
|
-
pbar.update()
|
|
942
|
-
|
|
943
|
-
# Bundle results, return tuple
|
|
944
|
-
end_size = vector.size
|
|
945
|
-
logger.debug("After evolution, contrail contains %s / %s points.", end_size, start_size)
|
|
1071
|
+
out = run_interpolators(
|
|
1072
|
+
out,
|
|
1073
|
+
met,
|
|
1074
|
+
rad,
|
|
1075
|
+
dz_m=params["dz_m"],
|
|
1076
|
+
humidity_scaling=params["humidity_scaling"],
|
|
1077
|
+
**params["_interp_kwargs"],
|
|
1078
|
+
)
|
|
1079
|
+
out = calc_evolve_one_step(vector, out, params)
|
|
1080
|
+
ef_summary = out.select(("index", "age", "ef"), copy=False)
|
|
946
1081
|
|
|
947
|
-
|
|
948
|
-
return vector, summary, verbose_dict, contrail_list
|
|
1082
|
+
return out, ef_summary
|
|
949
1083
|
|
|
950
1084
|
|
|
951
1085
|
def _run_downwash(
|
|
@@ -1007,24 +1141,24 @@ def _run_downwash(
|
|
|
1007
1141
|
return vector, verbose_dict
|
|
1008
1142
|
|
|
1009
1143
|
vector = run_interpolators(vector, met, rad, dz_m=params["dz_m"], **params["_interp_kwargs"])
|
|
1010
|
-
|
|
1144
|
+
out = simulate_wake_vortex_downwash(vector, params)
|
|
1011
1145
|
|
|
1012
|
-
|
|
1013
|
-
|
|
1146
|
+
out = run_interpolators(
|
|
1147
|
+
out,
|
|
1014
1148
|
met,
|
|
1015
1149
|
rad,
|
|
1016
1150
|
dz_m=params["dz_m"],
|
|
1017
1151
|
humidity_scaling=params["humidity_scaling"],
|
|
1018
1152
|
**params["_interp_kwargs"],
|
|
1019
1153
|
)
|
|
1020
|
-
|
|
1154
|
+
out, persistent = find_initial_persistent_contrails(vector, out, params)
|
|
1021
1155
|
|
|
1022
1156
|
if (key := "persistent") in verbose_outputs_formation:
|
|
1023
1157
|
verbose_dict[key] = persistent
|
|
1024
|
-
if (key := "iwc") in verbose_outputs_formation and (data :=
|
|
1025
|
-
verbose_dict[key] = pd.Series(data=data, index=
|
|
1158
|
+
if (key := "iwc") in verbose_outputs_formation and (data := out.get(key)) is not None:
|
|
1159
|
+
verbose_dict[key] = pd.Series(data=data, index=out["index"])
|
|
1026
1160
|
|
|
1027
|
-
return
|
|
1161
|
+
return out, verbose_dict
|
|
1028
1162
|
|
|
1029
1163
|
|
|
1030
1164
|
def combine_vectors(
|
|
@@ -1671,6 +1805,7 @@ def calc_emissions(vector: GeoVectorDataset, params: dict[str, Any]) -> None:
|
|
|
1671
1805
|
def calc_wind_shear(
|
|
1672
1806
|
contrail: GeoVectorDataset,
|
|
1673
1807
|
dz_m: float,
|
|
1808
|
+
*,
|
|
1674
1809
|
is_downwash: bool,
|
|
1675
1810
|
dsn_dz_factor: float,
|
|
1676
1811
|
) -> None:
|
|
@@ -1770,7 +1905,7 @@ def calc_thermal_properties(contrail: GeoVectorDataset) -> None:
|
|
|
1770
1905
|
|
|
1771
1906
|
def advect(
|
|
1772
1907
|
contrail: GeoVectorDataset,
|
|
1773
|
-
dt: np.timedelta64,
|
|
1908
|
+
dt: np.timedelta64 | npt.NDArray[np.timedelta64],
|
|
1774
1909
|
dt_head: np.timedelta64 | None,
|
|
1775
1910
|
dt_tail: np.timedelta64 | None,
|
|
1776
1911
|
) -> GeoVectorDataset:
|
|
@@ -1788,7 +1923,7 @@ def advect(
|
|
|
1788
1923
|
----------
|
|
1789
1924
|
contrail : GeoVectorDataset
|
|
1790
1925
|
Grid points already interpolated against wind data
|
|
1791
|
-
dt : np.timedelta64
|
|
1926
|
+
dt : np.timedelta64 | npt.NDArray[np.timedelta64]
|
|
1792
1927
|
Time step for advection
|
|
1793
1928
|
dt_head : np.timedelta64 | None
|
|
1794
1929
|
Time step for segment head advection. Use None for segment-free mode.
|
|
@@ -1896,8 +2031,8 @@ def advect(
|
|
|
1896
2031
|
return GeoVectorDataset(data, attrs=contrail.attrs, copy=True)
|
|
1897
2032
|
|
|
1898
2033
|
|
|
1899
|
-
def
|
|
1900
|
-
"""Aggregate results after cocip simulation.
|
|
2034
|
+
def _aggregate_ef_summary(vector_list: list[VectorDataset]) -> VectorDataset | None:
|
|
2035
|
+
"""Aggregate EF results after cocip simulation.
|
|
1901
2036
|
|
|
1902
2037
|
Results are summed over each vector in ``vector_list``.
|
|
1903
2038
|
|
|
@@ -2059,7 +2194,7 @@ def _concat_verbose_dicts(
|
|
|
2059
2194
|
# Concatenate the values and return
|
|
2060
2195
|
ret: dict[str, np.ndarray] = {}
|
|
2061
2196
|
for key in verbose_outputs_formation:
|
|
2062
|
-
series_list = [v for d in verbose_dicts if (v := d.get(key)) is not None]
|
|
2197
|
+
series_list = [v for d in verbose_dicts if d and (v := d.get(key)) is not None]
|
|
2063
2198
|
data = np.concatenate(series_list)
|
|
2064
2199
|
index = np.concatenate([s.index for s in series_list])
|
|
2065
2200
|
|
|
@@ -2093,6 +2228,7 @@ def _contrail_grid_variable_attrs() -> dict[str, dict[str, str]]:
|
|
|
2093
2228
|
},
|
|
2094
2229
|
"engine_efficiency": {"long_name": "Engine efficiency"},
|
|
2095
2230
|
"true_airspeed": {"long_name": "True airspeed", "units": "m / s"},
|
|
2231
|
+
"aircraft_mass": {"long_name": "Aircraft mass", "units": "kg"},
|
|
2096
2232
|
"nvpm_ei_n": {
|
|
2097
2233
|
"long_name": "Black carbon emissions index number",
|
|
2098
2234
|
"units": "kg^{-1}",
|
|
@@ -2124,13 +2260,14 @@ def _warn_not_wrap(met: MetDataset) -> None:
|
|
|
2124
2260
|
met : MetDataset
|
|
2125
2261
|
Met dataset
|
|
2126
2262
|
"""
|
|
2127
|
-
if
|
|
2128
|
-
|
|
2129
|
-
|
|
2130
|
-
|
|
2131
|
-
|
|
2132
|
-
|
|
2133
|
-
)
|
|
2263
|
+
if met.is_wrapped:
|
|
2264
|
+
return
|
|
2265
|
+
lon = met.indexes["longitude"]
|
|
2266
|
+
if lon.min() == -180.0 and lon.max() == 179.75:
|
|
2267
|
+
warnings.warn(
|
|
2268
|
+
"The MetDataset `met` not been wrapped. The CocipGrid model may "
|
|
2269
|
+
"perform better if `met.wrap_longitude()` is called first."
|
|
2270
|
+
)
|
|
2134
2271
|
|
|
2135
2272
|
|
|
2136
2273
|
def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np.float64]]:
|
|
@@ -2171,12 +2308,10 @@ def _get_uncertainty_params(contrail: VectorDataset) -> dict[str, npt.NDArray[np
|
|
|
2171
2308
|
_T = TypeVar("_T", np.float64, np.datetime64)
|
|
2172
2309
|
|
|
2173
2310
|
|
|
2174
|
-
def
|
|
2311
|
+
def _check_coverage(
|
|
2175
2312
|
met_array: npt.NDArray[_T], grid_array: npt.NDArray[_T], coord: str, name: str
|
|
2176
2313
|
) -> None:
|
|
2177
|
-
"""
|
|
2178
|
-
|
|
2179
|
-
Warn if grid coordinate extends beyond met coordinate.
|
|
2314
|
+
"""Warn if the met data does not cover the entire source domain.
|
|
2180
2315
|
|
|
2181
2316
|
Parameters
|
|
2182
2317
|
----------
|
|
@@ -2191,8 +2326,8 @@ def _check_overlap(
|
|
|
2191
2326
|
"""
|
|
2192
2327
|
if met_array.min() > grid_array.min() or met_array.max() < grid_array.max():
|
|
2193
2328
|
warnings.warn(
|
|
2194
|
-
f"Met data '{name}' does not
|
|
2195
|
-
"This causes interpolated values to be nan, leading to meaningless results."
|
|
2329
|
+
f"Met data '{name}' does not cover the source domain along the {coord} axis. "
|
|
2330
|
+
"This causes some interpolated values to be nan, leading to meaningless results."
|
|
2196
2331
|
)
|
|
2197
2332
|
|
|
2198
2333
|
|
|
@@ -2232,7 +2367,6 @@ def _downselect_met(
|
|
|
2232
2367
|
:meth:`Model.downselect_met`
|
|
2233
2368
|
"""
|
|
2234
2369
|
|
|
2235
|
-
# return if downselect_met is False
|
|
2236
2370
|
if not params["downselect_met"]:
|
|
2237
2371
|
logger.debug("Avoiding downselecting met because params['downselect_met'] is False")
|
|
2238
2372
|
return met, rad
|
|
@@ -2249,23 +2383,6 @@ def _downselect_met(
|
|
|
2249
2383
|
t0 = time_buffer[0]
|
|
2250
2384
|
t1 = time_buffer[1] + params["max_age"] + params["dt_integration"]
|
|
2251
2385
|
|
|
2252
|
-
if isinstance(source, MetDataset):
|
|
2253
|
-
# MetDataset doesn't have a downselect_met method, so create a
|
|
2254
|
-
# GeoVectorDataset and downselect there
|
|
2255
|
-
# Just take extreme here for downselection
|
|
2256
|
-
# We may want to change min / max to nanmin / nanmax
|
|
2257
|
-
ds = source.data
|
|
2258
|
-
lon = ds["longitude"].values
|
|
2259
|
-
lat = ds["latitude"].values
|
|
2260
|
-
level = ds["level"].values
|
|
2261
|
-
time = ds["time"].values
|
|
2262
|
-
source = GeoVectorDataset(
|
|
2263
|
-
longitude=[lon.min(), lon.max()],
|
|
2264
|
-
latitude=[lat.min(), lat.max()],
|
|
2265
|
-
level=[level.min(), level.max()],
|
|
2266
|
-
time=[time.min(), time.max()],
|
|
2267
|
-
)
|
|
2268
|
-
|
|
2269
2386
|
met = source.downselect_met(
|
|
2270
2387
|
met,
|
|
2271
2388
|
latitude_buffer=latitude_buffer,
|
|
@@ -2289,3 +2406,70 @@ def _downselect_met(
|
|
|
2289
2406
|
def _is_segment_free_mode(vector: GeoVectorDataset) -> bool:
|
|
2290
2407
|
"""Determine if model is run in a segment-free mode."""
|
|
2291
2408
|
return "longitude_head" not in vector
|
|
2409
|
+
|
|
2410
|
+
|
|
2411
|
+
def _check_met_rad_time(
|
|
2412
|
+
met: MetDataset,
|
|
2413
|
+
rad: MetDataset,
|
|
2414
|
+
tmin: pd.Timestamp,
|
|
2415
|
+
tmax: pd.Timestamp,
|
|
2416
|
+
) -> None:
|
|
2417
|
+
"""Warn if meteorology data doesn't cover a required time range.
|
|
2418
|
+
|
|
2419
|
+
Parameters
|
|
2420
|
+
----------
|
|
2421
|
+
met : MetDataset
|
|
2422
|
+
Meteorology dataset
|
|
2423
|
+
rad : MetDataset
|
|
2424
|
+
Radiative flux dataset
|
|
2425
|
+
tmin: pd.Timestamp
|
|
2426
|
+
Start of required time range
|
|
2427
|
+
tmax:pd.Timestamp
|
|
2428
|
+
End of required time range
|
|
2429
|
+
"""
|
|
2430
|
+
met_time = met.data["time"].values
|
|
2431
|
+
met_tmin = pd.to_datetime(met_time.min())
|
|
2432
|
+
met_tmax = pd.to_datetime(met_time.max())
|
|
2433
|
+
_check_start_time(met_tmin, tmin, "met")
|
|
2434
|
+
_check_end_time(met_tmax, tmax, "met")
|
|
2435
|
+
|
|
2436
|
+
rad_time = rad.data["time"].values
|
|
2437
|
+
rad_tmin = pd.to_datetime(rad_time.min())
|
|
2438
|
+
rad_tmax = pd.to_datetime(rad_time.max())
|
|
2439
|
+
note = "differencing reduces time coverage when providing accumulated radiative fluxes."
|
|
2440
|
+
_check_start_time(rad_tmin, tmin, "rad", note=note)
|
|
2441
|
+
_check_end_time(rad_tmax, tmax, "rad", note=note)
|
|
2442
|
+
|
|
2443
|
+
|
|
2444
|
+
def _check_start_time(
|
|
2445
|
+
met_start: pd.Timestamp,
|
|
2446
|
+
model_start: pd.Timestamp,
|
|
2447
|
+
name: str,
|
|
2448
|
+
*,
|
|
2449
|
+
note: str | None = None,
|
|
2450
|
+
) -> None:
|
|
2451
|
+
if met_start > model_start:
|
|
2452
|
+
note = f" Note: {note}" if note else ""
|
|
2453
|
+
warnings.warn(
|
|
2454
|
+
f"Start time of parameter '{name}' ({met_start}) "
|
|
2455
|
+
f"is after model start time ({model_start}). "
|
|
2456
|
+
f"Include additional time at the start of '{name}'."
|
|
2457
|
+
f"{note}"
|
|
2458
|
+
)
|
|
2459
|
+
|
|
2460
|
+
|
|
2461
|
+
def _check_end_time(
|
|
2462
|
+
met_end: pd.Timestamp,
|
|
2463
|
+
model_end: pd.Timestamp,
|
|
2464
|
+
name: str,
|
|
2465
|
+
*,
|
|
2466
|
+
note: str | None = None,
|
|
2467
|
+
) -> None:
|
|
2468
|
+
if met_end < model_end:
|
|
2469
|
+
note = f" Note: {note}" if note else ""
|
|
2470
|
+
warnings.warn(
|
|
2471
|
+
f"End time of parameter '{name}' ({met_end}) "
|
|
2472
|
+
f"is before model end time ({model_end}). "
|
|
2473
|
+
f"Include additional time at the end of '{name}' or reduce 'max_age' parameter."
|
|
2474
|
+
f"{note}"
|
|
2475
|
+
)
|