pycontrails 0.53.0__cp313-cp313-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (109) hide show
  1. pycontrails/__init__.py +70 -0
  2. pycontrails/_version.py +16 -0
  3. pycontrails/core/__init__.py +30 -0
  4. pycontrails/core/aircraft_performance.py +641 -0
  5. pycontrails/core/airports.py +226 -0
  6. pycontrails/core/cache.py +881 -0
  7. pycontrails/core/coordinates.py +174 -0
  8. pycontrails/core/fleet.py +470 -0
  9. pycontrails/core/flight.py +2312 -0
  10. pycontrails/core/flightplan.py +220 -0
  11. pycontrails/core/fuel.py +140 -0
  12. pycontrails/core/interpolation.py +721 -0
  13. pycontrails/core/met.py +2833 -0
  14. pycontrails/core/met_var.py +307 -0
  15. pycontrails/core/models.py +1181 -0
  16. pycontrails/core/polygon.py +549 -0
  17. pycontrails/core/rgi_cython.cpython-313-darwin.so +0 -0
  18. pycontrails/core/vector.py +2191 -0
  19. pycontrails/datalib/__init__.py +12 -0
  20. pycontrails/datalib/_leo_utils/search.py +250 -0
  21. pycontrails/datalib/_leo_utils/static/bq_roi_query.sql +6 -0
  22. pycontrails/datalib/_leo_utils/vis.py +59 -0
  23. pycontrails/datalib/_met_utils/metsource.py +743 -0
  24. pycontrails/datalib/ecmwf/__init__.py +53 -0
  25. pycontrails/datalib/ecmwf/arco_era5.py +527 -0
  26. pycontrails/datalib/ecmwf/common.py +109 -0
  27. pycontrails/datalib/ecmwf/era5.py +538 -0
  28. pycontrails/datalib/ecmwf/era5_model_level.py +482 -0
  29. pycontrails/datalib/ecmwf/hres.py +782 -0
  30. pycontrails/datalib/ecmwf/hres_model_level.py +495 -0
  31. pycontrails/datalib/ecmwf/ifs.py +284 -0
  32. pycontrails/datalib/ecmwf/model_levels.py +79 -0
  33. pycontrails/datalib/ecmwf/static/model_level_dataframe_v20240418.csv +139 -0
  34. pycontrails/datalib/ecmwf/variables.py +256 -0
  35. pycontrails/datalib/gfs/__init__.py +28 -0
  36. pycontrails/datalib/gfs/gfs.py +646 -0
  37. pycontrails/datalib/gfs/variables.py +100 -0
  38. pycontrails/datalib/goes.py +772 -0
  39. pycontrails/datalib/landsat.py +568 -0
  40. pycontrails/datalib/sentinel.py +512 -0
  41. pycontrails/datalib/spire.py +739 -0
  42. pycontrails/ext/bada.py +41 -0
  43. pycontrails/ext/cirium.py +14 -0
  44. pycontrails/ext/empirical_grid.py +140 -0
  45. pycontrails/ext/synthetic_flight.py +426 -0
  46. pycontrails/models/__init__.py +1 -0
  47. pycontrails/models/accf.py +406 -0
  48. pycontrails/models/apcemm/__init__.py +8 -0
  49. pycontrails/models/apcemm/apcemm.py +983 -0
  50. pycontrails/models/apcemm/inputs.py +226 -0
  51. pycontrails/models/apcemm/static/apcemm_yaml_template.yaml +183 -0
  52. pycontrails/models/apcemm/utils.py +437 -0
  53. pycontrails/models/cocip/__init__.py +29 -0
  54. pycontrails/models/cocip/cocip.py +2617 -0
  55. pycontrails/models/cocip/cocip_params.py +299 -0
  56. pycontrails/models/cocip/cocip_uncertainty.py +285 -0
  57. pycontrails/models/cocip/contrail_properties.py +1517 -0
  58. pycontrails/models/cocip/output_formats.py +2261 -0
  59. pycontrails/models/cocip/radiative_forcing.py +1262 -0
  60. pycontrails/models/cocip/radiative_heating.py +520 -0
  61. pycontrails/models/cocip/unterstrasser_wake_vortex.py +403 -0
  62. pycontrails/models/cocip/wake_vortex.py +396 -0
  63. pycontrails/models/cocip/wind_shear.py +120 -0
  64. pycontrails/models/cocipgrid/__init__.py +9 -0
  65. pycontrails/models/cocipgrid/cocip_grid.py +2573 -0
  66. pycontrails/models/cocipgrid/cocip_grid_params.py +138 -0
  67. pycontrails/models/dry_advection.py +486 -0
  68. pycontrails/models/emissions/__init__.py +21 -0
  69. pycontrails/models/emissions/black_carbon.py +594 -0
  70. pycontrails/models/emissions/emissions.py +1353 -0
  71. pycontrails/models/emissions/ffm2.py +336 -0
  72. pycontrails/models/emissions/static/default-engine-uids.csv +239 -0
  73. pycontrails/models/emissions/static/edb-gaseous-v29b-engines.csv +596 -0
  74. pycontrails/models/emissions/static/edb-nvpm-v29b-engines.csv +215 -0
  75. pycontrails/models/humidity_scaling/__init__.py +37 -0
  76. pycontrails/models/humidity_scaling/humidity_scaling.py +1025 -0
  77. pycontrails/models/humidity_scaling/quantiles/era5-model-level-quantiles.pq +0 -0
  78. pycontrails/models/humidity_scaling/quantiles/era5-pressure-level-quantiles.pq +0 -0
  79. pycontrails/models/issr.py +210 -0
  80. pycontrails/models/pcc.py +327 -0
  81. pycontrails/models/pcr.py +154 -0
  82. pycontrails/models/ps_model/__init__.py +17 -0
  83. pycontrails/models/ps_model/ps_aircraft_params.py +376 -0
  84. pycontrails/models/ps_model/ps_grid.py +505 -0
  85. pycontrails/models/ps_model/ps_model.py +1017 -0
  86. pycontrails/models/ps_model/ps_operational_limits.py +540 -0
  87. pycontrails/models/ps_model/static/ps-aircraft-params-20240524.csv +68 -0
  88. pycontrails/models/ps_model/static/ps-synonym-list-20240524.csv +103 -0
  89. pycontrails/models/sac.py +459 -0
  90. pycontrails/models/tau_cirrus.py +168 -0
  91. pycontrails/physics/__init__.py +1 -0
  92. pycontrails/physics/constants.py +116 -0
  93. pycontrails/physics/geo.py +989 -0
  94. pycontrails/physics/jet.py +837 -0
  95. pycontrails/physics/thermo.py +451 -0
  96. pycontrails/physics/units.py +472 -0
  97. pycontrails/py.typed +0 -0
  98. pycontrails/utils/__init__.py +1 -0
  99. pycontrails/utils/dependencies.py +66 -0
  100. pycontrails/utils/iteration.py +13 -0
  101. pycontrails/utils/json.py +188 -0
  102. pycontrails/utils/temp.py +50 -0
  103. pycontrails/utils/types.py +165 -0
  104. pycontrails-0.53.0.dist-info/LICENSE +178 -0
  105. pycontrails-0.53.0.dist-info/METADATA +181 -0
  106. pycontrails-0.53.0.dist-info/NOTICE +43 -0
  107. pycontrails-0.53.0.dist-info/RECORD +109 -0
  108. pycontrails-0.53.0.dist-info/WHEEL +5 -0
  109. pycontrails-0.53.0.dist-info/top_level.txt +3 -0
@@ -0,0 +1,1181 @@
1
+ """Physical model data structures."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import contextlib
6
+ import functools
7
+ import hashlib
8
+ import json
9
+ import logging
10
+ import warnings
11
+ from abc import ABC, abstractmethod
12
+ from collections.abc import Sequence
13
+ from dataclasses import dataclass, fields
14
+ from typing import Any, NoReturn, TypeVar, overload
15
+
16
+ import numpy as np
17
+ import numpy.typing as npt
18
+ import pandas as pd
19
+ import scipy.interpolate
20
+ import xarray as xr
21
+
22
+ from pycontrails.core.fleet import Fleet
23
+ from pycontrails.core.flight import Flight
24
+ from pycontrails.core.met import MetDataArray, MetDataset, MetVariable, originates_from_ecmwf
25
+ from pycontrails.core.met_var import SpecificHumidity
26
+ from pycontrails.core.vector import GeoVectorDataset
27
+ from pycontrails.utils.json import NumpyEncoder
28
+ from pycontrails.utils.types import type_guard
29
+
30
+ logger = logging.getLogger(__name__)
31
+
32
+ #: Model input source types
33
+ ModelInput = MetDataset | GeoVectorDataset | Flight | Sequence[Flight] | None
34
+
35
+ #: Model output source types
36
+ ModelOutput = MetDataArray | MetDataset | GeoVectorDataset | Flight | list[Flight]
37
+
38
+ #: Model attribute source types
39
+ SourceType = MetDataset | GeoVectorDataset | Flight | Fleet
40
+
41
+ _Source = TypeVar("_Source")
42
+
43
+ # ------------
44
+ # Model Params
45
+ # ------------
46
+
47
+
48
+ @dataclass
49
+ class ModelParams:
50
+ """Class for constructing model parameters.
51
+
52
+ Implementing classes must still use the ``@dataclass`` operator.
53
+ """
54
+
55
+ #: Copy input ``source`` data on eval
56
+ copy_source: bool = True
57
+
58
+ # -----------
59
+ # Interpolate
60
+ # -----------
61
+
62
+ #: Interpolation method. Supported methods include "linear", "nearest", "slinear",
63
+ #: "cubic", and "quintic". See :class:`scipy.interpolate.RegularGridInterpolator`
64
+ #: for the description of each method. Not all methods are supported by all
65
+ #: met grids. For example, the "cubic" method requires at least 4 points per
66
+ #: dimension.
67
+ interpolation_method: str = "linear"
68
+
69
+ #: If True, points lying outside interpolation will raise an error
70
+ interpolation_bounds_error: bool = False
71
+
72
+ #: Used for outside interpolation value if :attr:`interpolation_bounds_error` is False
73
+ interpolation_fill_value: float = np.nan
74
+
75
+ #: Experimental. See :mod:`pycontrails.core.interpolation`.
76
+ interpolation_localize: bool = False
77
+
78
+ #: Experimental. See :mod:`pycontrails.core.interpolation`.
79
+ interpolation_use_indices: bool = False
80
+
81
+ #: Experimental. Alternative interpolation method to account for specific humidity
82
+ #: lapse rate bias. Must be one of ``None``, ``"cubic-spline"``, or ``"log-q-log-p"``.
83
+ #: If ``None``, no special interpolation is used for specific humidity.
84
+ #: The ``"cubic-spline"`` method applies a custom stretching of the met interpolation
85
+ #: table to account for the specific humidity lapse rate bias. The ``"log-q-log-p"``
86
+ #: method interpolates in the log of specific humidity and pressure, then converts
87
+ #: back to specific humidity.
88
+ #: Only used by models calling to :func:`interpolate_met`.
89
+ interpolation_q_method: str | None = None
90
+
91
+ # -----------
92
+ # Meteorology
93
+ # -----------
94
+
95
+ #: Call :meth:`_verify_met` on model instantiation.
96
+ verify_met: bool = True
97
+
98
+ #: Downselect input :class:`MetDataset`` to region around ``source``.
99
+ downselect_met: bool = True
100
+
101
+ #: Met longitude buffer for input to :meth:`Flight.downselect_met`,
102
+ #: in WGS84 coordinates.
103
+ #: Only applies when :attr:`downselect_met` is True.
104
+ met_longitude_buffer: tuple[float, float] = (0.0, 0.0)
105
+
106
+ #: Met latitude buffer for input to :meth:`Flight.downselect_met`,
107
+ #: in WGS84 coordinates.
108
+ #: Only applies when :attr:`downselect_met` is True.
109
+ met_latitude_buffer: tuple[float, float] = (0.0, 0.0)
110
+
111
+ #: Met level buffer for input to :meth:`Flight.downselect_met`,
112
+ #: in [:math:`hPa`].
113
+ #: Only applies when :attr:`downselect_met` is True.
114
+ met_level_buffer: tuple[float, float] = (0.0, 0.0)
115
+
116
+ #: Met time buffer for input to :meth:`Flight.downselect_met`
117
+ #: Only applies when :attr:`downselect_met` is True.
118
+ met_time_buffer: tuple[np.timedelta64, np.timedelta64] = (
119
+ np.timedelta64(0, "h"),
120
+ np.timedelta64(0, "h"),
121
+ )
122
+
123
+ def as_dict(self) -> dict[str, Any]:
124
+ """Convert object to dictionary.
125
+
126
+ We use this method instead of `dataclasses.asdict`
127
+ to use a shallow/unrecursive copy.
128
+ This will return values as Any instead of dict.
129
+
130
+ Returns
131
+ -------
132
+ dict[str, Any]
133
+ Dictionary version of self.
134
+ """
135
+ return {(name := field.name): getattr(self, name) for field in fields(self)}
136
+
137
+
138
+ # ------
139
+ # Models
140
+ # ------
141
+
142
+
143
+ class Model(ABC):
144
+ """Base class for physical models.
145
+
146
+ Implementing classes must implement the :meth:`eval` method
147
+ """
148
+
149
+ __slots__ = ("params", "met", "source")
150
+
151
+ #: Default model parameter dataclass
152
+ default_params: type[ModelParams] = ModelParams
153
+
154
+ #: Instantiated model parameters, in dictionary form
155
+ params: dict[str, Any]
156
+
157
+ #: Data evaluated in model
158
+ source: SourceType
159
+
160
+ #: Meteorology data
161
+ met: MetDataset | None
162
+
163
+ #: Require meteorology is not None on __init__()
164
+ met_required: bool = False
165
+
166
+ #: Required meteorology pressure level variables.
167
+ #: Each element in the list is a :class:`MetVariable` or a ``tuple[MetVariable]``.
168
+ #: If element is a ``tuple[MetVariable]``, the variable depends on the data source.
169
+ #: Only one variable in the tuple is required.
170
+ met_variables: tuple[MetVariable | tuple[MetVariable, ...], ...]
171
+
172
+ #: Set of required parameters if processing already complete on ``met`` input.
173
+ processed_met_variables: tuple[MetVariable, ...]
174
+
175
+ #: Optional meteorology variables
176
+ optional_met_variables: tuple[MetVariable | tuple[MetVariable, ...], ...]
177
+
178
+ def __init__(
179
+ self,
180
+ met: MetDataset | None = None,
181
+ params: ModelParams | dict[str, Any] | None = None,
182
+ **params_kwargs: Any,
183
+ ) -> None:
184
+ # Load base params, override default and user params
185
+ self._load_params(params, **params_kwargs)
186
+
187
+ # Do *not* copy met on input
188
+ self.met = met
189
+
190
+ # require met inputs
191
+ if self.met_required:
192
+ self.require_met()
193
+
194
+ # verify met variables
195
+ if self.params["verify_met"]:
196
+ self._verify_met()
197
+
198
+ # Warn if humidity_scaling param is NOT present for ECMWF met data
199
+ humidity_scaling = self.params.get("humidity_scaling")
200
+
201
+ if (
202
+ humidity_scaling is None
203
+ and self.met is not None
204
+ and SpecificHumidity in getattr(self, "met_variables", ())
205
+ and originates_from_ecmwf(self.met)
206
+ ):
207
+ warnings.warn(
208
+ "\nMet data appears to have originated from ECMWF and no humidity "
209
+ "scaling is enabled. For ECMWF data, consider using one of: \n"
210
+ " - 'ConstantHumidityScaling'\n"
211
+ " - 'ExponentialBoostHumidityScaling'\n"
212
+ " - 'ExponentialBoostLatitudeCorrectionHumidityScaling'\n"
213
+ " - 'HistogramMatching'\n"
214
+ "For example: \n"
215
+ ">>> from pycontrails.models.humidity_scaling import ConstantHumidityScaling\n"
216
+ f">>> {type(self).__name__}(met=met, ..., humidity_scaling=ConstantHumidityScaling(rhi_adj=0.99))" # noqa: E501
217
+ )
218
+
219
+ # Ensure humidity_scaling q_method matches parent model
220
+ elif humidity_scaling is not None:
221
+ # Some humidity scaling models use the interpolation_q_method parameter to determine
222
+ # which parameters to use for scaling. Ensure that both models are consistent.
223
+ parent_q = self.params["interpolation_q_method"]
224
+ if humidity_scaling.params["interpolation_q_method"] != parent_q:
225
+ warnings.warn(
226
+ f"Model {type(self).__name__} uses interpolation_q_method={parent_q} but "
227
+ f"humidity_scaling model {type(humidity_scaling).__name__} uses "
228
+ f"interpolation_q_method={humidity_scaling.params['interpolation_q_method']}. "
229
+ "Overriding humidity_scaling interpolation_q_method to match parent model."
230
+ )
231
+ humidity_scaling.params["interpolation_q_method"] = parent_q
232
+
233
+ def __repr__(self) -> str:
234
+ params = getattr(self, "params", {})
235
+ return f"{type(self).__name__} model\n\t{self.long_name}\n\tParams: {params}\n"
236
+
237
+ @property
238
+ @abstractmethod
239
+ def name(self) -> str:
240
+ """Get model name for use as a data key in :class:`xr.DataArray` or :class`Flight`."""
241
+
242
+ @property
243
+ @abstractmethod
244
+ def long_name(self) -> str:
245
+ """Get long name descriptor, annotated on :class:`xr.DataArray` outputs."""
246
+
247
+ @property
248
+ def hash(self) -> str:
249
+ """Generate a unique hash for model instance.
250
+
251
+ Returns
252
+ -------
253
+ str
254
+ Unique hash for model instance (sha1)
255
+ """
256
+ params = json.dumps(self.params, sort_keys=True, cls=NumpyEncoder)
257
+ _hash = self.name + params
258
+ if self.met is not None:
259
+ _hash += self.met.hash
260
+ if hasattr(self, "source"):
261
+ _hash += self.source.hash
262
+
263
+ return hashlib.sha1(bytes(_hash, "utf-8")).hexdigest()
264
+
265
+ def _verify_met(self) -> None:
266
+ """Verify integrity of :attr:`met`.
267
+
268
+ This method confirms that :attr:`met` contains each variable in
269
+ :attr:`met_variables`. If this check fails, and :attr:`processed_met_variables`
270
+ is defined, confirm :attr:`met` contains each variable there.
271
+
272
+ Does not raise errors if :attr:`met` is None.
273
+
274
+ Raises
275
+ ------
276
+ KeyError
277
+ Raises KeyError if data does not contain variables :attr:`met_variables`
278
+ """
279
+ if self.met is None:
280
+ return
281
+
282
+ if not hasattr(self, "met_variables"):
283
+ return
284
+
285
+ # Try to verify met_variables
286
+ try:
287
+ self.met.ensure_vars(self.met_variables)
288
+ except KeyError as e1:
289
+ # If that fails, try to verify processed_met_variables
290
+ if hasattr(self, "processed_met_variables"):
291
+ try:
292
+ self.met.ensure_vars(self.processed_met_variables)
293
+ except KeyError as e2:
294
+ raise e2 from e1
295
+ else:
296
+ raise
297
+
298
+ def _load_params(
299
+ self, params: ModelParams | dict[str, Any] | None = None, **params_kwargs: Any
300
+ ) -> None:
301
+ """Load parameters to model :attr:`params`.
302
+
303
+ Load order:
304
+
305
+ 1. If ``params`` is a :attr:`default_params` instance, use as is. Otherwise
306
+ instantiate as :attr:`default_params`.
307
+ 2. ``params`` input dict
308
+ 3. ``params_kwargs`` override keys in params
309
+
310
+ Parameters
311
+ ----------
312
+ params : dict[str, Any], optional
313
+ Model parameter dictionary or :attr:`default_params` instance.
314
+ Defaults to {}
315
+ **params_kwargs : Any
316
+ Override keys in ``params`` with keyword arguments.
317
+
318
+ Raises
319
+ ------
320
+ KeyError
321
+ Unknown parameter passed into model
322
+ """
323
+ if isinstance(params, self.default_params):
324
+ base_params = params
325
+ params = None
326
+ elif isinstance(params, ModelParams):
327
+ msg = f"Model parameters must be of type {self.default_params.__name__} or dict"
328
+ raise TypeError(msg)
329
+ else:
330
+ base_params = self.default_params()
331
+
332
+ self.params = base_params.as_dict()
333
+ self.update_params(params, **params_kwargs)
334
+
335
+ @abstractmethod
336
+ def eval(self, source: Any = None, **params: Any) -> ModelOutput:
337
+ """Abstract method to handle evaluation.
338
+
339
+ Implementing classes should override call signature to overload ``source`` inputs
340
+ and model outputs.
341
+
342
+ Parameters
343
+ ----------
344
+ source : ModelInput, optional
345
+ Dataset defining coordinates to evaluate model.
346
+ Defined by implementing class, but must be a subset of ModelInput.
347
+ If None, :attr:`met` is assumed to be evaluation points.
348
+ **params : Any
349
+ Overwrite model parameters before evaluation.
350
+
351
+ Returns
352
+ -------
353
+ ModelOutput
354
+ Return type depends on implementing model
355
+ """
356
+
357
+ # ---------
358
+ # Utilities
359
+ # ---------
360
+
361
+ @property
362
+ def interp_kwargs(self) -> dict[str, Any]:
363
+ """Shortcut to create interpolation arguments from :attr:`params`.
364
+
365
+ The output of this is useful for passing to :func:`interpolate_met`.
366
+
367
+ Returns
368
+ -------
369
+ dict[str, Any]
370
+ Dictionary with keys
371
+
372
+ - "method"
373
+ - "bounds_error"
374
+ - "fill_value"
375
+ - "localize"
376
+ - "use_indices"
377
+ - "q_method"
378
+
379
+ as determined by :attr:`params`.
380
+ """
381
+ params = self.params
382
+ return {
383
+ "method": params["interpolation_method"],
384
+ "bounds_error": params["interpolation_bounds_error"],
385
+ "fill_value": params["interpolation_fill_value"],
386
+ "localize": params["interpolation_localize"],
387
+ "use_indices": params["interpolation_use_indices"],
388
+ "q_method": params["interpolation_q_method"],
389
+ }
390
+
391
+ def require_met(self) -> MetDataset:
392
+ """Ensure that :attr:`met` is a MetDataset.
393
+
394
+ Returns
395
+ -------
396
+ MetDataset
397
+ Returns reference to :attr:`met`.
398
+ This is helpful for type narrowing :attr:`met` when meteorology is required.
399
+
400
+ Raises
401
+ ------
402
+ ValueError
403
+ Raises when :attr:`met` is None.
404
+ """
405
+ return type_guard(
406
+ self.met,
407
+ MetDataset,
408
+ f"Meteorology is required for this model. Specify with {type(self).__name__}(met=...) ",
409
+ )
410
+
411
+ def require_source_type(self, type_: type[_Source] | tuple[type[_Source], ...]) -> _Source:
412
+ """Ensure that :attr:`source` is ``type_``.
413
+
414
+ Returns
415
+ -------
416
+ _Source
417
+ Returns reference to :attr:`source`.
418
+ This is helpful for type narrowing :attr:`source` to specific type(s).
419
+
420
+ Raises
421
+ ------
422
+ ValueError
423
+ Raises when :attr:`source` is not ``_type_``.
424
+ """
425
+ return type_guard(getattr(self, "source", None), type_, f"Source must be of type {type_}")
426
+
427
+ @overload
428
+ def _get_source(self, source: MetDataset | None) -> MetDataset: ...
429
+
430
+ @overload
431
+ def _get_source(self, source: GeoVectorDataset) -> GeoVectorDataset: ...
432
+
433
+ @overload
434
+ def _get_source(self, source: Sequence[Flight]) -> Fleet: ...
435
+
436
+ def _get_source(self, source: ModelInput) -> SourceType:
437
+ """Construct :attr:`source` from ``source`` parameter."""
438
+
439
+ # Fallback to met coordinates if source is None
440
+ if source is None:
441
+ self.met = self.require_met()
442
+
443
+ # Return dataset with the same coords as self.met, but empty data_vars
444
+ return MetDataset(xr.Dataset(coords=self.met.data.coords))
445
+
446
+ copy_source = self.params["copy_source"]
447
+
448
+ # Turn Sequence into Fleet
449
+ if isinstance(source, Sequence):
450
+ if not copy_source:
451
+ msg = "Parameter copy_source=False is not supported for Sequence[Flight] source"
452
+ raise ValueError(msg)
453
+ return Fleet.from_seq(source)
454
+
455
+ # Raise error if source is not a MetDataset or GeoVectorDataset
456
+ if not isinstance(source, MetDataset | GeoVectorDataset):
457
+ msg = f"Unknown source type: {type(source)}"
458
+ raise TypeError(msg)
459
+
460
+ if copy_source:
461
+ source = source.copy()
462
+
463
+ if not isinstance(source, Flight):
464
+ return source
465
+
466
+ # Ensure flight_id is present on Flight instances
467
+ # Either broadcast from attrs or add as 0
468
+ if "flight_id" not in source:
469
+ if "flight_id" in source.attrs:
470
+ source.broadcast_attrs("flight_id")
471
+
472
+ else:
473
+ warnings.warn(
474
+ "Source flight does not contain `flight_id` data or attr. "
475
+ "Adding `flight_id` of 0"
476
+ )
477
+ source["flight_id"] = np.zeros(len(source), dtype=int)
478
+
479
+ return source
480
+
481
+ def set_source(self, source: ModelInput = None) -> None:
482
+ """Attach original or copy of input ``source`` to :attr:`source`.
483
+
484
+ Parameters
485
+ ----------
486
+ source : MetDataset | GeoVectorDataset | Flight | Iterable[Flight] | None
487
+ Parameter ``source`` passed in :meth:`eval`.
488
+ If None, an empty MetDataset with coordinates like :attr:`met` is set to :attr:`source`.
489
+
490
+ See Also
491
+ --------
492
+ - :meth:`eval`
493
+ """
494
+ self.source = self._get_source(source)
495
+
496
+ def update_params(self, params: dict[str, Any] | None = None, **params_kwargs: Any) -> None:
497
+ """Update model parameters on :attr:`params`.
498
+
499
+ Parameters
500
+ ----------
501
+ params : dict[str, Any], optional
502
+ Model parameters to update, as dictionary.
503
+ Defaults to {}
504
+ **params_kwargs : Any
505
+ Override keys in ``params`` with keyword arguments.
506
+ """
507
+ update_param_dict(self.params, params or {})
508
+ update_param_dict(self.params, params_kwargs)
509
+
510
+ def downselect_met(self) -> None:
511
+ """Downselect :attr:`met` domain to the max/min bounds of :attr:`source`.
512
+
513
+ Override this method if special handling is needed in met down-selection.
514
+
515
+ - :attr:`source` must be defined before calling :meth:`downselect_met`.
516
+ - This method copies and re-assigns :attr:`met` using :meth:`met.copy()`
517
+ to avoid side-effects.
518
+
519
+ Raises
520
+ ------
521
+ ValueError
522
+ Raised if :attr:`source` is not defined.
523
+ Raised if :attr:`source` is not a :class:`GeoVectorDataset`.
524
+ TypeError
525
+ Raised if :attr:`met` is not a :class:`MetDataset`.
526
+ """
527
+ try:
528
+ source = self.source
529
+ except AttributeError as exc:
530
+ msg = "Attribute 'source' must be defined before calling 'downselect_met'."
531
+ raise AttributeError(msg) from exc
532
+
533
+ # TODO: This could be generalized for a MetDataset source
534
+ if not isinstance(source, GeoVectorDataset):
535
+ msg = "Attribute 'source' must be a GeoVectorDataset"
536
+ raise TypeError(msg)
537
+
538
+ if self.met is None:
539
+ return
540
+
541
+ # return if downselect_met is False
542
+ if not self.params["downselect_met"]:
543
+ logger.debug("Avoiding downselecting met because params['downselect_met'] is False")
544
+ return
545
+
546
+ logger.debug("Downselecting met in model %s", self.name)
547
+
548
+ # get buffers from params
549
+ buffers = {
550
+ "longitude_buffer": self.params.get("met_longitude_buffer"),
551
+ "latitude_buffer": self.params.get("met_latitude_buffer"),
552
+ "level_buffer": self.params.get("met_level_buffer"),
553
+ "time_buffer": self.params.get("met_time_buffer"),
554
+ }
555
+ kwargs = {k: v for k, v in buffers.items() if v is not None}
556
+
557
+ self.met = source.downselect_met(self.met, **kwargs, copy=False)
558
+
559
+ def set_source_met(
560
+ self,
561
+ optional: bool = False,
562
+ variable: MetVariable | Sequence[MetVariable] | None = None,
563
+ ) -> None:
564
+ """Ensure or interpolate each required :attr:`met_variables` on :attr:`source` .
565
+
566
+ For each variable in :attr:`met_variables`, check :attr:`source` for data variable
567
+ with the same name.
568
+
569
+ For :class:`GeoVectorDataset` sources, try to interpolate :attr:`met`
570
+ if variable does not exist.
571
+
572
+ For :class:`MetDataset` sources, try to get data from :attr:`met`
573
+ if variable does not exist.
574
+
575
+ Parameters
576
+ ----------
577
+ optional : bool, optional
578
+ Include :attr:`optional_met_variables`
579
+ variable : MetVariable | Sequence[MetVariable] | None, optional
580
+ MetVariable to set, from :attr:`met_variables`.
581
+ If None, set all variables in :attr:`met_variables`
582
+ and :attr:`optional_met_variables` if ``optional`` is True.
583
+
584
+ Raises
585
+ ------
586
+ ValueError
587
+ Variable does not exist and :attr:`source` is a MetDataset.
588
+ KeyError
589
+ Variable not found in :attr:`source` or :attr:`met`.
590
+ """
591
+ variables = self._determine_relevant_variables(optional, variable)
592
+
593
+ q_method = self.params["interpolation_q_method"]
594
+
595
+ for var in variables:
596
+ # If var is a tuple of options, check if at least one of them exists in source
597
+ if isinstance(var, tuple):
598
+ for v in var:
599
+ if v.standard_name in self.source:
600
+ continue
601
+
602
+ # Check if var exists in source
603
+ elif var.standard_name in self.source:
604
+ continue
605
+
606
+ # Otherwise, interpolate / set from met
607
+ if not isinstance(self.met, MetDataset):
608
+ _raise_missing_met_var(var)
609
+
610
+ # take the first var name output from ensure_vars
611
+ met_key = self.met.ensure_vars(var)[0]
612
+
613
+ # interpolate GeoVectorDataset
614
+ if isinstance(self.source, GeoVectorDataset):
615
+ interpolate_met(self.met, self.source, met_key, **self.interp_kwargs)
616
+ continue
617
+
618
+ if not isinstance(self.source, MetDataset):
619
+ msg = f"Unknown source type: {type(self.source)}"
620
+ raise TypeError(msg)
621
+
622
+ da = self.met.data[met_key].reset_coords(drop=True)
623
+ try:
624
+ # This case is when self.source is a subgrid of self.met
625
+ # The call to .sel will raise a KeyError if this is not the case
626
+
627
+ # XXX: Sometimes this hangs when using dask!
628
+ # This issue is somewhat similar to
629
+ # https://github.com/pydata/xarray/issues/4406
630
+ self.source[met_key] = da.sel(self.source.coords)
631
+
632
+ except KeyError:
633
+ self.source[met_key] = _interp_grid_to_grid(
634
+ met_key, da, self.source, self.params, q_method
635
+ )
636
+
637
+ def _determine_relevant_variables(
638
+ self,
639
+ optional: bool,
640
+ variable: MetVariable | Sequence[MetVariable] | None,
641
+ ) -> Sequence[MetVariable | tuple[MetVariable, ...]]:
642
+ """Determine the relevant variables used in :meth:`set_source_met`."""
643
+ if variable is None:
644
+ if optional:
645
+ return (*self.met_variables, *self.optional_met_variables)
646
+ return self.met_variables
647
+ if isinstance(variable, MetVariable):
648
+ return (variable,)
649
+ return variable
650
+
651
+ # Following python implementation
652
+ # https://github.com/python/cpython/blob/618b7a8260bb40290d6551f24885931077309590/Lib/collections/__init__.py#L231
653
+ __marker = object()
654
+
655
+ def get_source_param(self, key: str, default: Any = __marker, *, set_attr: bool = True) -> Any:
656
+ """Get source data with default set by parameter key.
657
+
658
+ Retrieves data with the following hierarchy:
659
+
660
+ 1. :attr:`source.data[key]`. Returns ``np.ndarray | xr.DataArray``.
661
+ 2. :attr:`source.attrs[key]`
662
+ 3. :attr:`params[key]`
663
+ 4. ``default``
664
+
665
+ In case 3., the value of :attr:`params[key]` is attached to :attr:`source.attrs[key]`.
666
+
667
+ Parameters
668
+ ----------
669
+ key : str
670
+ Key to retrieve
671
+ default : Any, optional
672
+ Default value if key is not found.
673
+ set_attr : bool, optional
674
+ If True (default), set :attr:`source.attrs[key]` to :attr:`params[key]` if found.
675
+ This allows for better post model evaluation tracking.
676
+
677
+ Returns
678
+ -------
679
+ Any
680
+ Value(s) found for key in source data, source attrs, or model params
681
+
682
+ Raises
683
+ ------
684
+ KeyError
685
+ Raises KeyError if key is not found in any location and ``default`` is not provided.
686
+
687
+ See Also
688
+ --------
689
+ - GeoVectorDataset.get_data_or_attr
690
+ """
691
+ marker = self.__marker
692
+
693
+ out = self.source.data.get(key, marker)
694
+ if out is not marker:
695
+ return out
696
+
697
+ out = self.source.attrs.get(key, marker)
698
+ if out is not marker:
699
+ return out
700
+
701
+ out = self.params.get(key, marker)
702
+ if out is not marker:
703
+ if set_attr:
704
+ self.source.attrs[key] = out
705
+
706
+ return out
707
+
708
+ if default is not marker:
709
+ return default
710
+
711
+ msg = f"Key '{key}' not found in source data, attrs, or model params"
712
+ raise KeyError(msg)
713
+
714
+ def _cleanup_indices(self) -> None:
715
+ """Cleanup indices artifacts if ``params["interpolation_use_indices"]`` is True."""
716
+ if self.params["interpolation_use_indices"] and isinstance(self.source, GeoVectorDataset):
717
+ self.source._invalidate_indices()
718
+
719
+ def transfer_met_source_attrs(self, source: SourceType | None = None) -> None:
720
+ """Transfer met source metadata from :attr:`met` to ``source``."""
721
+
722
+ if self.met is None:
723
+ return
724
+
725
+ source = source or self.source
726
+ with contextlib.suppress(KeyError):
727
+ source.attrs["met_source_provider"] = self.met.provider_attr
728
+
729
+ with contextlib.suppress(KeyError):
730
+ source.attrs["met_source_dataset"] = self.met.dataset_attr
731
+
732
+ with contextlib.suppress(KeyError):
733
+ source.attrs["met_source_product"] = self.met.product_attr
734
+
735
+ with contextlib.suppress(KeyError):
736
+ source.attrs["met_source_forecast_time"] = self.met.attrs["forecast_time"]
737
+
738
+
739
+ def _interp_grid_to_grid(
740
+ met_key: str,
741
+ da: xr.DataArray,
742
+ source: MetDataset,
743
+ params: dict[str, Any],
744
+ q_method: str,
745
+ ) -> xr.DataArray:
746
+ # This call to DataArray.interp was added in pycontrails 0.28.1
747
+ # For arbitrary grids, use xr.DataArray.interp
748
+ # Extract certain parameters to pass into interp
749
+ interp_kwargs = {
750
+ "method": params["interpolation_method"],
751
+ "kwargs": {
752
+ "bounds_error": params["interpolation_bounds_error"],
753
+ "fill_value": params["interpolation_fill_value"],
754
+ },
755
+ "assume_sorted": True,
756
+ }
757
+ # Correct dtype if promoted
758
+ # Somewhat of a pain: dask believes the dtype is float32, but
759
+ # when it is actually computed, it comes out as float64
760
+ # Call load() here to smooth over this issue
761
+ # https://github.com/pydata/xarray/issues/4770
762
+ # There is also an issue in which xarray assumes non-singleton
763
+ # dimensions. This causes issues when the ``da`` variable has
764
+ # a scalar dimension, or the ``self.source`` variable coincides
765
+ # with an edge of the ``da`` variable. For now, we try an additional
766
+ # sel over just the time dimension, which is the most common case.
767
+ # This stuff isn't so well unit tested in pycontrails, and the xarray
768
+ # and scipy interpolate conventions are always changing, so more
769
+ # issues may arise here in the future.
770
+ coords = source.coords
771
+ try:
772
+ da = da.sel(time=coords["time"])
773
+ except KeyError:
774
+ pass
775
+ else:
776
+ del coords["time"]
777
+
778
+ if q_method is None or met_key not in ("q", "specific_humidity"):
779
+ return da.interp(coords, **interp_kwargs).load().astype(da.dtype, copy=False)
780
+
781
+ if q_method == "cubic-spline":
782
+ ppoly = _load_spline()
783
+
784
+ da = da.assign_coords(level=ppoly(da["level"]))
785
+ level0 = coords.pop("level")
786
+ coords["level"] = ppoly(level0)
787
+ interped = da.interp(coords, **interp_kwargs).load().astype(da.dtype, copy=False)
788
+ return interped.assign_coords(level=level0)
789
+
790
+ msg = f"Unsupported q_method: {q_method}"
791
+ raise NotImplementedError(msg)
792
+
793
+
794
+ def _raise_missing_met_var(var: MetVariable | Sequence[MetVariable]) -> NoReturn:
795
+ """Raise KeyError on missing met variable.
796
+
797
+ Parameters
798
+ ----------
799
+ var : MetVariable | list[MetVariable]
800
+ Met variable
801
+
802
+ Raises
803
+ ------
804
+ KeyError
805
+ """
806
+ if isinstance(var, MetVariable):
807
+ msg = (
808
+ f"Variable `{var.standard_name}` not found. Either pass parameter `met`"
809
+ f"in model constructor, or define `{var.standard_name}` data on input data."
810
+ )
811
+ raise KeyError(msg)
812
+ missing_keys = [v.standard_name for v in var]
813
+ msg = (
814
+ f"One of `{missing_keys}` is required. Either pass parameter `met`"
815
+ f"in model constructor, or define one of `{missing_keys}` data on input data."
816
+ )
817
+ raise KeyError(msg)
818
+
819
+
820
+ def interpolate_met(
821
+ met: MetDataset | None,
822
+ vector: GeoVectorDataset,
823
+ met_key: str,
824
+ vector_key: str | None = None,
825
+ *,
826
+ q_method: str | None = None,
827
+ **interp_kwargs: Any,
828
+ ) -> npt.NDArray[np.float64]:
829
+ """Interpolate ``vector`` against ``met`` gridded data.
830
+
831
+ If ``vector_key`` (=``met_key`` by default) already exists,
832
+ return values at ``vector_key``.
833
+
834
+ Mutates parameter ``vector`` in place by attaching new key
835
+ and returns values.
836
+
837
+ Parameters
838
+ ----------
839
+ met : MetDataset | None
840
+ Met data to interpolate against
841
+ vector : GeoVectorDataset
842
+ Flight or GeoVectorDataset instance
843
+ met_key : str
844
+ Key of met variable in ``met``.
845
+ vector_key : str, optional
846
+ Key of variable to attach to ``vector``.
847
+ By default, use ``met_key``.
848
+ q_method : str, optional
849
+ Experimental method to use for interpolating specific humidity. See
850
+ :class:`ModelParams` for more information.
851
+ **interp_kwargs : Any,
852
+ Additional keyword only arguments passed to :meth:`GeoVectorDataset.intersect_met`.
853
+ For example, ``level=[...]``.
854
+
855
+ Returns
856
+ -------
857
+ npt.NDArray[np.float64]
858
+ Interpolated values.
859
+
860
+ Raises
861
+ ------
862
+ KeyError
863
+ Parameter ``met_key`` not found in ``met``.
864
+ """
865
+ vector_key = vector_key or met_key
866
+
867
+ if (out := vector.get(vector_key, None)) is not None:
868
+ return out
869
+
870
+ if met is None:
871
+ msg = f"No variable key '{vector_key}' in 'vector' and 'met' is None"
872
+ raise KeyError(msg)
873
+
874
+ if met_key in ("q", "specific_humidity") and q_method is not None:
875
+ mda, log_applied = _extract_q(met, met_key, q_method)
876
+ out = interpolate_gridded_specific_humidity(
877
+ mda, vector, q_method, log_applied, **interp_kwargs
878
+ )
879
+
880
+ else:
881
+ try:
882
+ mda = met[met_key]
883
+ except KeyError as exc:
884
+ msg = f"No variable key '{met_key}' in 'met'."
885
+ raise KeyError(msg) from exc
886
+
887
+ out = vector.intersect_met(mda, **interp_kwargs)
888
+
889
+ vector[vector_key] = out
890
+ return out
891
+
892
+
893
+ def _extract_q(met: MetDataset, met_key: str, q_method: str) -> tuple[MetDataArray, bool]:
894
+ """Extract specific humidity from ``met`` :class:`MetDataset`.
895
+
896
+ Parameters
897
+ ----------
898
+ met : MetDataset
899
+ Met data
900
+ met_key : str
901
+ Key of specific humidity in ``met``. Typically either ``"q"`` or ``"specific_humidity"``.
902
+ q_method : str
903
+ Method to use for interpolating specific humidity.
904
+
905
+ Returns
906
+ -------
907
+ mda : MetDataArray
908
+ Specific humidity data
909
+ log_applied : bool
910
+ Whether a log transform was applied to ``mda``.
911
+ """
912
+ if q_method != "log-q-log-p":
913
+ try:
914
+ return met[met_key], False
915
+ except KeyError as exc:
916
+ msg = f"No variable key '{met_key}' in 'met'."
917
+ raise KeyError(msg) from exc
918
+
919
+ try:
920
+ return met["log_specific_humidity"], True
921
+ except KeyError:
922
+ warnings.warn(
923
+ "No variable key 'log_specific_humidity' in 'met'. "
924
+ "Falling back to 'specific_humidity'. "
925
+ "Computation will be faster if 'log_specific_humidity' is provided."
926
+ )
927
+
928
+ try:
929
+ return met[met_key], False
930
+ except KeyError as exc:
931
+ msg = f"No variable key '{met_key}' in 'met'."
932
+ raise KeyError(msg) from exc
933
+
934
+
935
+ def _prepare_q(
936
+ mda: MetDataArray, level: npt.NDArray[np.float64], q_method: str, log_applied: bool
937
+ ) -> tuple[MetDataArray, npt.NDArray[np.float64]]:
938
+ """Prepare specific humidity for interpolation with experimental ``q_method``.
939
+
940
+ Parameters
941
+ ----------
942
+ mda : MetDataArray
943
+ MetDataArray of specific humidity.
944
+ level : npt.NDArray[np.float64]
945
+ Levels to interpolate to, [:math:`hPa`].
946
+ q_method : str
947
+ One of ``"log-q-log-p"`` or ``"cubic-spline"``.
948
+ log_applied : bool
949
+ Whether a log transform was applied to ``mda``.
950
+
951
+ Returns
952
+ -------
953
+ mda : MetDataArray
954
+ MetDataArray of specific humidity transformed for interpolation.
955
+ level : npt.NDArray[np.float64]
956
+ Transformed levels for interpolation.
957
+ """
958
+ da = mda.data
959
+ if not da._in_memory:
960
+ # XXX: It's unclear where this should go. If we wait too long to load,
961
+ # we may need to reload into memory on each call to intersect_met.
962
+ # If we load here, we only load once, but we may load data that is
963
+ # never used. For now, we load here.
964
+ da.load()
965
+
966
+ if q_method == "log-q-log-p":
967
+ return _prepare_q_log_q_log_p(da, level, log_applied)
968
+
969
+ assert not log_applied, "Log transform should not be applied for cubic spline interpolation"
970
+
971
+ if q_method == "cubic-spline":
972
+ return _prepare_q_cubic_spline(da, level)
973
+
974
+ raise_invalid_q_method_error(q_method)
975
+
976
+
977
+ def _prepare_q_log_q_log_p(
978
+ da: xr.DataArray, level: npt.NDArray[np.float64], log_applied: bool
979
+ ) -> tuple[MetDataArray, npt.NDArray[np.float64]]:
980
+ da = da.assign_coords(level=np.log(da["level"]))
981
+
982
+ if not log_applied:
983
+ # ERA5 specific humidity can have negative values
984
+ # These will get converted to NaNs
985
+ # Ignore the xarray warning
986
+ with warnings.catch_warnings():
987
+ warnings.filterwarnings("ignore", message="invalid value encountered in log")
988
+ da = np.log(da) # type: ignore[assignment]
989
+
990
+ mda = MetDataArray(da, copy=False)
991
+
992
+ level = np.log(level)
993
+ return mda, level
994
+
995
+
996
+ def _prepare_q_cubic_spline(
997
+ da: xr.DataArray, level: npt.NDArray[np.float64]
998
+ ) -> tuple[MetDataArray, npt.NDArray[np.float64]]:
999
+ if da["level"][0] < 50.0 or da["level"][-1] > 1000.0:
1000
+ msg = "Cubic spline interpolation requires data to span 50-1000 hPa."
1001
+ raise ValueError(msg)
1002
+ ppoly = _load_spline()
1003
+
1004
+ da = da.assign_coords(level=ppoly(da["level"]))
1005
+ mda = MetDataArray(da, copy=False)
1006
+
1007
+ level = ppoly(level)
1008
+
1009
+ return mda, level
1010
+
1011
+
1012
+ def interpolate_gridded_specific_humidity(
1013
+ mda: MetDataArray,
1014
+ vector: GeoVectorDataset,
1015
+ q_method: str | None,
1016
+ log_applied: bool,
1017
+ **interp_kwargs: Any,
1018
+ ) -> np.ndarray:
1019
+ """Interpolate specific humidity against ``vector`` with experimental ``q_method``.
1020
+
1021
+ Parameters
1022
+ ----------
1023
+ mda : MetDataArray
1024
+ MetDataArray of specific humidity.
1025
+ vector : GeoVectorDataset
1026
+ Flight or GeoVectorDataset instance
1027
+ q_method : {None, "cubic-spline", "log-q-log-p"}
1028
+ Experimental method to use for interpolating specific humidity.
1029
+ log_applied : bool
1030
+ Whether or not a log transform was applied to specific humidity.
1031
+ **interp_kwargs : Any,
1032
+ Additional keyword only arguments passed to `intersect_met`.
1033
+
1034
+ Returns
1035
+ -------
1036
+ np.ndarray
1037
+ Interpolated values.
1038
+ """
1039
+ if q_method is None:
1040
+ return vector.intersect_met(mda, **interp_kwargs)
1041
+
1042
+ level = interp_kwargs.get("level", vector.level)
1043
+ mda, level = _prepare_q(mda, level, q_method, log_applied)
1044
+ interp_kwargs = {**interp_kwargs, "level": level}
1045
+
1046
+ out = vector.intersect_met(mda, **interp_kwargs)
1047
+ if q_method == "log-q-log-p":
1048
+ out = np.exp(out)
1049
+
1050
+ return out
1051
+
1052
+
1053
+ def raise_invalid_q_method_error(q_method: str) -> NoReturn:
1054
+ """Raise error for invalid ``q_method``.
1055
+
1056
+ Parameters
1057
+ ----------
1058
+ q_method : str
1059
+ ``q_method`` to raise error for.
1060
+
1061
+ Raises
1062
+ ------
1063
+ ValueError
1064
+ ``q_method`` is not one of ``None``, ``"log-q-log-p"``, or ``"cubic-spline"``.
1065
+ """
1066
+ available = None, "log-q-log-p", "cubic-spline"
1067
+ msg = f"Invalid 'q_method' value '{q_method}'. Must be one of {available}."
1068
+ raise ValueError(msg)
1069
+
1070
+
1071
+ @functools.cache
1072
+ def _load_spline() -> scipy.interpolate.PchipInterpolator:
1073
+ """Load spline interpolator estimating the specific humidity vertical profile (ie, lapse rate).
1074
+
1075
+ Data computed from historic ERA5 reanalysis data for 2019.
1076
+
1077
+ The first data point ``(50.0, 1.8550577e-06)`` is added to the spline to
1078
+ ensure that the spline is monotonic for high altitudes. It was chosen
1079
+ so that the resulting spline has a continuous second derivative at 100 hPa.
1080
+
1081
+ Returns
1082
+ -------
1083
+ scipy.interpolate.PchipInterpolator
1084
+ Spline interpolator.
1085
+ """
1086
+
1087
+ level = [
1088
+ 50.0,
1089
+ 100.0,
1090
+ 125.0,
1091
+ 150.0,
1092
+ 175.0,
1093
+ 200.0,
1094
+ 225.0,
1095
+ 250.0,
1096
+ 300.0,
1097
+ 350.0,
1098
+ 400.0,
1099
+ 450.0,
1100
+ 500.0,
1101
+ 550.0,
1102
+ 600.0,
1103
+ 650.0,
1104
+ 700.0,
1105
+ 750.0,
1106
+ 775.0,
1107
+ 800.0,
1108
+ 825.0,
1109
+ 850.0,
1110
+ 875.0,
1111
+ 900.0,
1112
+ 925.0,
1113
+ 950.0,
1114
+ 975.0,
1115
+ 1000.0,
1116
+ ]
1117
+ q = [
1118
+ 1.8550577e-06,
1119
+ 2.6863474e-06,
1120
+ 3.4371210e-06,
1121
+ 5.6529648e-06,
1122
+ 1.0849595e-05,
1123
+ 2.0879523e-05,
1124
+ 3.7430935e-05,
1125
+ 6.1511033e-05,
1126
+ 1.3460252e-04,
1127
+ 2.4769874e-04,
1128
+ 4.0938452e-04,
1129
+ 6.2360929e-04,
1130
+ 8.9822523e-04,
1131
+ 1.2304801e-03,
1132
+ 1.5927359e-03,
1133
+ 2.0140875e-03,
1134
+ 2.5222234e-03,
1135
+ 3.1251940e-03,
1136
+ 3.4660504e-03,
1137
+ 3.8333545e-03,
1138
+ 4.2424337e-03,
1139
+ 4.7023618e-03,
1140
+ 5.1869694e-03,
1141
+ 5.6702676e-03,
1142
+ 6.1630723e-03,
1143
+ 6.6630659e-03,
1144
+ 7.0036170e-03,
1145
+ 7.1794386e-03,
1146
+ ]
1147
+
1148
+ return scipy.interpolate.PchipInterpolator(level, q, extrapolate=False)
1149
+
1150
+
1151
+ def update_param_dict(param_dict: dict[str, Any], new_params: dict[str, Any]) -> None:
1152
+ """Update parameter dictionary in place.
1153
+
1154
+ Parameters
1155
+ ----------
1156
+ param_dict : dict[str, Any]
1157
+ Active model parameter dictionary
1158
+ new_params : dict[str, Any]
1159
+ Model parameters to update, as a dictionary
1160
+
1161
+ Raises
1162
+ ------
1163
+ KeyError
1164
+ Raises when ``new_params`` key is not found in ``param_dict``
1165
+
1166
+ """
1167
+ for param, value in new_params.items():
1168
+ try:
1169
+ old_value = param_dict[param]
1170
+ except KeyError:
1171
+ msg = (
1172
+ f"Unknown parameter '{param}' passed into model. Possible "
1173
+ f"parameters include {', '.join(param_dict)}."
1174
+ )
1175
+ raise KeyError(msg) from None
1176
+
1177
+ # Convenience: convert timedelta64-like params
1178
+ if isinstance(old_value, np.timedelta64) and not isinstance(value, np.timedelta64):
1179
+ value = pd.to_timedelta(value).to_numpy()
1180
+
1181
+ param_dict[param] = value