pycontrails 0.54.4__cp313-cp313-win_amd64.whl → 0.54.5__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/_version.py +2 -2
- pycontrails/core/fleet.py +30 -9
- pycontrails/core/flight.py +6 -1
- pycontrails/core/interpolation.py +7 -4
- pycontrails/core/met.py +145 -86
- pycontrails/core/models.py +2 -2
- pycontrails/core/rgi_cython.cp313-win_amd64.pyd +0 -0
- pycontrails/core/vector.py +97 -74
- pycontrails/models/apcemm/apcemm.py +2 -2
- pycontrails/models/apcemm/utils.py +1 -1
- pycontrails/models/cocip/cocip.py +8 -9
- pycontrails/models/cocipgrid/cocip_grid.py +8 -73
- pycontrails/models/dry_advection.py +50 -14
- pycontrails/models/emissions/emissions.py +2 -2
- pycontrails/models/issr.py +2 -2
- pycontrails/models/ps_model/ps_grid.py +2 -2
- pycontrails/models/ps_model/ps_model.py +2 -2
- {pycontrails-0.54.4.dist-info → pycontrails-0.54.5.dist-info}/METADATA +1 -1
- {pycontrails-0.54.4.dist-info → pycontrails-0.54.5.dist-info}/RECORD +23 -23
- {pycontrails-0.54.4.dist-info → pycontrails-0.54.5.dist-info}/LICENSE +0 -0
- {pycontrails-0.54.4.dist-info → pycontrails-0.54.5.dist-info}/NOTICE +0 -0
- {pycontrails-0.54.4.dist-info → pycontrails-0.54.5.dist-info}/WHEEL +0 -0
- {pycontrails-0.54.4.dist-info → pycontrails-0.54.5.dist-info}/top_level.txt +0 -0
pycontrails/core/vector.py
CHANGED
|
@@ -37,6 +37,8 @@ logger = logging.getLogger(__name__)
|
|
|
37
37
|
class AttrDict(dict[str, Any]):
|
|
38
38
|
"""Thin wrapper around dict to warn when setting a key that already exists."""
|
|
39
39
|
|
|
40
|
+
__slots__ = ()
|
|
41
|
+
|
|
40
42
|
def __setitem__(self, k: str, v: Any) -> None:
|
|
41
43
|
"""Warn when setting values that already contain values.
|
|
42
44
|
|
|
@@ -85,7 +87,7 @@ class VectorDataDict(dict[str, np.ndarray]):
|
|
|
85
87
|
Parameters
|
|
86
88
|
----------
|
|
87
89
|
data : dict[str, np.ndarray], optional
|
|
88
|
-
Dictionary input
|
|
90
|
+
Dictionary input. A shallow copy is always made.
|
|
89
91
|
"""
|
|
90
92
|
|
|
91
93
|
__slots__ = ("_size",)
|
|
@@ -130,8 +132,8 @@ class VectorDataDict(dict[str, np.ndarray]):
|
|
|
130
132
|
def __delitem__(self, k: str) -> None:
|
|
131
133
|
super().__delitem__(k)
|
|
132
134
|
|
|
133
|
-
# if
|
|
134
|
-
if not
|
|
135
|
+
# if no keys remain, delete _size attribute
|
|
136
|
+
if not self:
|
|
135
137
|
del self._size
|
|
136
138
|
|
|
137
139
|
def setdefault(self, k: str, default: npt.ArrayLike | None = None) -> np.ndarray:
|
|
@@ -191,9 +193,9 @@ class VectorDataDict(dict[str, np.ndarray]):
|
|
|
191
193
|
super().update(kwargs_arr)
|
|
192
194
|
|
|
193
195
|
def _validate_array(self, arr: np.ndarray) -> None:
|
|
194
|
-
"""Ensure that
|
|
196
|
+
"""Ensure that ``arr`` is compatible (1 dimensional of equal size) with instance.
|
|
195
197
|
|
|
196
|
-
Set attribute
|
|
198
|
+
Set attribute ``_size`` if it has not yet been defined.
|
|
197
199
|
|
|
198
200
|
Parameters
|
|
199
201
|
----------
|
|
@@ -203,34 +205,34 @@ class VectorDataDict(dict[str, np.ndarray]):
|
|
|
203
205
|
Raises
|
|
204
206
|
------
|
|
205
207
|
ValueError
|
|
206
|
-
If
|
|
208
|
+
If ``arr`` is not compatible with instance.
|
|
207
209
|
"""
|
|
208
210
|
if arr.ndim != 1:
|
|
209
211
|
raise ValueError("All np.arrays must have dimension 1.")
|
|
210
212
|
|
|
211
213
|
size = getattr(self, "_size", 0)
|
|
212
|
-
if size
|
|
213
|
-
if arr.size != size:
|
|
214
|
-
raise ValueError(f"Incompatible array sizes: {arr.size} and {size}.")
|
|
215
|
-
else:
|
|
214
|
+
if not size:
|
|
216
215
|
self._size = arr.size
|
|
216
|
+
return
|
|
217
|
+
|
|
218
|
+
if arr.size != size:
|
|
219
|
+
raise ValueError(f"Incompatible array sizes: {arr.size} and {size}.")
|
|
217
220
|
|
|
218
221
|
|
|
219
|
-
def _empty_vector_dict(keys: Iterable[str]) ->
|
|
220
|
-
"""Create
|
|
222
|
+
def _empty_vector_dict(keys: Iterable[str]) -> dict[str, np.ndarray]:
|
|
223
|
+
"""Create a dictionary with keys defined by ``keys`` and empty arrays.
|
|
221
224
|
|
|
222
225
|
Parameters
|
|
223
226
|
----------
|
|
224
227
|
keys : Iterable[str]
|
|
225
|
-
Keys to include in
|
|
228
|
+
Keys to include in dictionary.
|
|
226
229
|
|
|
227
230
|
Returns
|
|
228
231
|
-------
|
|
229
|
-
|
|
230
|
-
|
|
232
|
+
dict[str, np.ndarray]
|
|
233
|
+
Dictionary with empty arrays.
|
|
231
234
|
"""
|
|
232
|
-
|
|
233
|
-
data = VectorDataDict({key: np.array([]) for key in keys})
|
|
235
|
+
data = {key: np.array([]) for key in keys}
|
|
234
236
|
|
|
235
237
|
# The default dtype is float64
|
|
236
238
|
# Time is special and should have a non-default dtype of datetime64[ns]
|
|
@@ -245,14 +247,15 @@ class VectorDataset:
|
|
|
245
247
|
|
|
246
248
|
Parameters
|
|
247
249
|
----------
|
|
248
|
-
data : dict[str, npt.ArrayLike] | pd.DataFrame |
|
|
249
|
-
Initial data, by default None
|
|
250
|
-
|
|
251
|
-
|
|
250
|
+
data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None, optional
|
|
251
|
+
Initial data, by default None. A shallow copy is always made. Use the ``copy``
|
|
252
|
+
parameter to copy the underlying array data.
|
|
253
|
+
attrs : dict[str, Any] | None, optional
|
|
254
|
+
Dictionary of attributes, by default None. A shallow copy is always made.
|
|
252
255
|
copy : bool, optional
|
|
253
|
-
Copy
|
|
256
|
+
Copy individual arrays on instantiation, by default True.
|
|
254
257
|
**attrs_kwargs : Any
|
|
255
|
-
Additional attributes passed as keyword arguments
|
|
258
|
+
Additional attributes passed as keyword arguments.
|
|
256
259
|
|
|
257
260
|
Raises
|
|
258
261
|
------
|
|
@@ -262,24 +265,22 @@ class VectorDataset:
|
|
|
262
265
|
|
|
263
266
|
__slots__ = ("attrs", "data")
|
|
264
267
|
|
|
265
|
-
#: Vector data with labels as keys and :class:`numpy.ndarray` as values
|
|
266
|
-
data: VectorDataDict
|
|
267
|
-
|
|
268
268
|
#: Generic dataset attributes
|
|
269
269
|
attrs: AttrDict
|
|
270
270
|
|
|
271
|
+
#: Vector data with labels as keys and :class:`numpy.ndarray` as values
|
|
272
|
+
data: VectorDataDict
|
|
273
|
+
|
|
271
274
|
def __init__(
|
|
272
275
|
self,
|
|
273
|
-
data:
|
|
274
|
-
dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None
|
|
275
|
-
) = None,
|
|
276
|
+
data: dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None = None,
|
|
276
277
|
*,
|
|
277
|
-
attrs: dict[str, Any] |
|
|
278
|
+
attrs: dict[str, Any] | None = None,
|
|
278
279
|
copy: bool = True,
|
|
279
280
|
**attrs_kwargs: Any,
|
|
280
281
|
) -> None:
|
|
281
|
-
# Set data
|
|
282
|
-
#
|
|
282
|
+
# Set data: always shallow copy
|
|
283
|
+
# -----------------------------
|
|
283
284
|
|
|
284
285
|
# Casting from one VectorDataset type to another
|
|
285
286
|
# e.g., flight = Flight(...); vector = VectorDataset(flight)
|
|
@@ -288,7 +289,7 @@ class VectorDataset:
|
|
|
288
289
|
if copy:
|
|
289
290
|
self.data = VectorDataDict({k: v.copy() for k, v in data.data.items()})
|
|
290
291
|
else:
|
|
291
|
-
self.data = data.data
|
|
292
|
+
self.data = VectorDataDict(data.data)
|
|
292
293
|
|
|
293
294
|
elif data is None:
|
|
294
295
|
self.data = VectorDataDict()
|
|
@@ -307,31 +308,45 @@ class VectorDataset:
|
|
|
307
308
|
data["time"] = time.to_numpy(copy=copy)
|
|
308
309
|
self.data = VectorDataDict(data)
|
|
309
310
|
|
|
310
|
-
elif isinstance(data, VectorDataDict):
|
|
311
|
-
if copy:
|
|
312
|
-
self.data = VectorDataDict({k: v.copy() for k, v in data.items()})
|
|
313
|
-
else:
|
|
314
|
-
self.data = data
|
|
315
|
-
|
|
316
311
|
# For anything else, we assume it is a dictionary of array-like and attach it
|
|
317
312
|
else:
|
|
318
313
|
self.data = VectorDataDict({k: np.array(v, copy=copy) for k, v in data.items()})
|
|
319
314
|
|
|
320
|
-
# Set attributes
|
|
321
|
-
#
|
|
315
|
+
# Set attributes: always shallow copy
|
|
316
|
+
# -----------------------------------
|
|
317
|
+
|
|
318
|
+
self.attrs = AttrDict(attrs or {}) # type: ignore[arg-type]
|
|
319
|
+
self.attrs.update(attrs_kwargs)
|
|
320
|
+
|
|
321
|
+
@classmethod
|
|
322
|
+
def _from_fastpath(
|
|
323
|
+
cls,
|
|
324
|
+
data: dict[str, np.ndarray],
|
|
325
|
+
attrs: dict[str, Any] | None = None,
|
|
326
|
+
**kwargs: Any,
|
|
327
|
+
) -> Self:
|
|
328
|
+
"""Create new instance from consistent data.
|
|
322
329
|
|
|
323
|
-
|
|
324
|
-
|
|
330
|
+
This is a low-level method that bypasses the standard constructor in certain
|
|
331
|
+
special cases. It is intended for internal use only.
|
|
325
332
|
|
|
326
|
-
|
|
327
|
-
|
|
333
|
+
In essence, this method skips any validation from __init__ and directly sets
|
|
334
|
+
``data`` and ``attrs``. This is useful when creating a new instance from an existing
|
|
335
|
+
instance the data has already been validated.
|
|
336
|
+
"""
|
|
337
|
+
obj = cls.__new__(cls)
|
|
328
338
|
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
self.attrs = AttrDict(attrs.copy())
|
|
339
|
+
obj.data = VectorDataDict(data)
|
|
340
|
+
obj.attrs = AttrDict(attrs or {})
|
|
332
341
|
|
|
333
|
-
|
|
334
|
-
|
|
342
|
+
for key, value in kwargs.items():
|
|
343
|
+
try:
|
|
344
|
+
setattr(obj, key, value)
|
|
345
|
+
# If key not present in __slots__ of class (or parents), it's intended for attrs
|
|
346
|
+
except AttributeError:
|
|
347
|
+
obj.attrs[key] = value
|
|
348
|
+
|
|
349
|
+
return obj
|
|
335
350
|
|
|
336
351
|
# ------------
|
|
337
352
|
# dict-like methods
|
|
@@ -663,6 +678,13 @@ class VectorDataset:
|
|
|
663
678
|
8 15 18
|
|
664
679
|
|
|
665
680
|
"""
|
|
681
|
+
if cls not in (VectorDataset, GeoVectorDataset):
|
|
682
|
+
msg = (
|
|
683
|
+
"Method 'sum' is only available on 'VectorDataset' and 'GeoVectorDataset'. "
|
|
684
|
+
"To sum 'Flight' instances, use 'Fleet.from_seq'."
|
|
685
|
+
)
|
|
686
|
+
raise TypeError(msg)
|
|
687
|
+
|
|
666
688
|
vectors = [v for v in vectors if v is not None] # remove None values
|
|
667
689
|
|
|
668
690
|
if not vectors:
|
|
@@ -693,10 +715,9 @@ class VectorDataset:
|
|
|
693
715
|
return np.concatenate(values)
|
|
694
716
|
|
|
695
717
|
data = {key: concat(key) for key in keys}
|
|
718
|
+
attrs = vectors[0].attrs if infer_attrs else None
|
|
696
719
|
|
|
697
|
-
|
|
698
|
-
return cls(data, attrs=vectors[0].attrs, copy=False)
|
|
699
|
-
return cls(data, copy=False)
|
|
720
|
+
return cls._from_fastpath(data, attrs)
|
|
700
721
|
|
|
701
722
|
def __eq__(self, other: object) -> bool:
|
|
702
723
|
"""Determine if two instances are equal.
|
|
@@ -803,7 +824,8 @@ class VectorDataset:
|
|
|
803
824
|
Self
|
|
804
825
|
Copy of class
|
|
805
826
|
"""
|
|
806
|
-
|
|
827
|
+
data = {key: value.copy() for key, value in self.data.items()}
|
|
828
|
+
return type(self)._from_fastpath(data, self.attrs, **kwargs)
|
|
807
829
|
|
|
808
830
|
def select(self: VectorDataset, keys: Iterable[str], copy: bool = True) -> VectorDataset:
|
|
809
831
|
"""Return new class instance only containing specified keys.
|
|
@@ -823,8 +845,8 @@ class VectorDataset:
|
|
|
823
845
|
Note that this method always returns a :class:`VectorDataset`, even if
|
|
824
846
|
the calling class is a proper subclass of :class:`VectorDataset`.
|
|
825
847
|
"""
|
|
826
|
-
data = {key: self[key] for key in keys}
|
|
827
|
-
return VectorDataset(data
|
|
848
|
+
data = {key: np.array(self[key], copy=copy) for key in keys}
|
|
849
|
+
return VectorDataset._from_fastpath(data, self.attrs)
|
|
828
850
|
|
|
829
851
|
def filter(self, mask: npt.NDArray[np.bool_], copy: bool = True, **kwargs: Any) -> Self:
|
|
830
852
|
"""Filter :attr:`data` according to a boolean array ``mask``.
|
|
@@ -856,8 +878,8 @@ class VectorDataset:
|
|
|
856
878
|
if mask.dtype != bool:
|
|
857
879
|
raise TypeError("Parameter `mask` must be a boolean array.")
|
|
858
880
|
|
|
859
|
-
data = {key: value[mask] for key, value in self.data.items()}
|
|
860
|
-
return type(self)(data
|
|
881
|
+
data = {key: np.array(value[mask], copy=copy) for key, value in self.data.items()}
|
|
882
|
+
return type(self)._from_fastpath(data, self.attrs, **kwargs)
|
|
861
883
|
|
|
862
884
|
def sort(self, by: str | list[str]) -> Self:
|
|
863
885
|
"""Sort data by key(s).
|
|
@@ -1116,7 +1138,7 @@ class VectorDataset:
|
|
|
1116
1138
|
cls,
|
|
1117
1139
|
keys: Iterable[str],
|
|
1118
1140
|
attrs: dict[str, Any] | None = None,
|
|
1119
|
-
**
|
|
1141
|
+
**kwargs: Any,
|
|
1120
1142
|
) -> Self:
|
|
1121
1143
|
"""Create instance with variables defined by ``keys`` and size 0.
|
|
1122
1144
|
|
|
@@ -1129,15 +1151,16 @@ class VectorDataset:
|
|
|
1129
1151
|
Keys to include in empty VectorDataset instance.
|
|
1130
1152
|
attrs : dict[str, Any] | None, optional
|
|
1131
1153
|
Attributes to attach instance.
|
|
1132
|
-
**
|
|
1133
|
-
|
|
1154
|
+
**kwargs : Any
|
|
1155
|
+
Additional keyword arguments passed into the constructor of the returned class.
|
|
1134
1156
|
|
|
1135
1157
|
Returns
|
|
1136
1158
|
-------
|
|
1137
1159
|
Self
|
|
1138
1160
|
Empty VectorDataset instance.
|
|
1139
1161
|
"""
|
|
1140
|
-
|
|
1162
|
+
data = _empty_vector_dict(keys)
|
|
1163
|
+
return cls._from_fastpath(data, attrs, **kwargs)
|
|
1141
1164
|
|
|
1142
1165
|
@classmethod
|
|
1143
1166
|
def from_dict(cls, obj: dict[str, Any], copy: bool = True, **obj_kwargs: Any) -> Self:
|
|
@@ -1216,7 +1239,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1216
1239
|
|
|
1217
1240
|
Parameters
|
|
1218
1241
|
----------
|
|
1219
|
-
data : dict[str, npt.ArrayLike] | pd.DataFrame |
|
|
1242
|
+
data : dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None, optional
|
|
1220
1243
|
Data dictionary or :class:`pandas.DataFrame` .
|
|
1221
1244
|
Must include keys/columns ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``.
|
|
1222
1245
|
Keyword arguments for ``time``, ``latitude``, ``longitude``, ``altitude`` or ``level``
|
|
@@ -1269,9 +1292,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1269
1292
|
|
|
1270
1293
|
def __init__(
|
|
1271
1294
|
self,
|
|
1272
|
-
data:
|
|
1273
|
-
dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataDict | VectorDataset | None
|
|
1274
|
-
) = None,
|
|
1295
|
+
data: dict[str, npt.ArrayLike] | pd.DataFrame | VectorDataset | None = None,
|
|
1275
1296
|
*,
|
|
1276
1297
|
longitude: npt.ArrayLike | None = None,
|
|
1277
1298
|
latitude: npt.ArrayLike | None = None,
|
|
@@ -1279,7 +1300,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1279
1300
|
altitude_ft: npt.ArrayLike | None = None,
|
|
1280
1301
|
level: npt.ArrayLike | None = None,
|
|
1281
1302
|
time: npt.ArrayLike | None = None,
|
|
1282
|
-
attrs: dict[str, Any] |
|
|
1303
|
+
attrs: dict[str, Any] | None = None,
|
|
1283
1304
|
copy: bool = True,
|
|
1284
1305
|
**attrs_kwargs: Any,
|
|
1285
1306
|
) -> None:
|
|
@@ -1293,7 +1314,10 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1293
1314
|
and time is None
|
|
1294
1315
|
):
|
|
1295
1316
|
keys = *self.required_keys, "altitude"
|
|
1296
|
-
data = _empty_vector_dict(keys)
|
|
1317
|
+
self.data = VectorDataDict(_empty_vector_dict(keys))
|
|
1318
|
+
self.attrs = AttrDict(attrs or {}) # type: ignore[arg-type]
|
|
1319
|
+
self.attrs.update(attrs_kwargs)
|
|
1320
|
+
return
|
|
1297
1321
|
|
|
1298
1322
|
super().__init__(data=data, attrs=attrs, copy=copy, **attrs_kwargs)
|
|
1299
1323
|
|
|
@@ -1819,7 +1843,6 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1819
1843
|
latitude_buffer: tuple[float, float] = ...,
|
|
1820
1844
|
level_buffer: tuple[float, float] = ...,
|
|
1821
1845
|
time_buffer: tuple[np.timedelta64, np.timedelta64] = ...,
|
|
1822
|
-
copy: bool = ...,
|
|
1823
1846
|
) -> met_module.MetDataset: ...
|
|
1824
1847
|
|
|
1825
1848
|
@overload
|
|
@@ -1831,7 +1854,6 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1831
1854
|
latitude_buffer: tuple[float, float] = ...,
|
|
1832
1855
|
level_buffer: tuple[float, float] = ...,
|
|
1833
1856
|
time_buffer: tuple[np.timedelta64, np.timedelta64] = ...,
|
|
1834
|
-
copy: bool = ...,
|
|
1835
1857
|
) -> met_module.MetDataArray: ...
|
|
1836
1858
|
|
|
1837
1859
|
def downselect_met(
|
|
@@ -1845,10 +1867,13 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1845
1867
|
np.timedelta64(0, "h"),
|
|
1846
1868
|
np.timedelta64(0, "h"),
|
|
1847
1869
|
),
|
|
1848
|
-
copy: bool = True,
|
|
1849
1870
|
) -> met_module.MetDataType:
|
|
1850
1871
|
"""Downselect ``met`` to encompass a spatiotemporal region of the data.
|
|
1851
1872
|
|
|
1873
|
+
.. versionchanged:: 0.54.5
|
|
1874
|
+
|
|
1875
|
+
Returned object is no longer copied.
|
|
1876
|
+
|
|
1852
1877
|
Parameters
|
|
1853
1878
|
----------
|
|
1854
1879
|
met : MetDataset | MetDataArray
|
|
@@ -1873,8 +1898,6 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1873
1898
|
and ``time_buffer[1]`` on the high side.
|
|
1874
1899
|
Units must be the same as class coordinates.
|
|
1875
1900
|
Defaults to ``(np.timedelta64(0, "h"), np.timedelta64(0, "h"))``.
|
|
1876
|
-
copy : bool
|
|
1877
|
-
If returned object is a copy or view of the original. True by default.
|
|
1878
1901
|
|
|
1879
1902
|
Returns
|
|
1880
1903
|
-------
|
|
@@ -1915,7 +1938,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1915
1938
|
level=level_slice,
|
|
1916
1939
|
time=time_slice,
|
|
1917
1940
|
)
|
|
1918
|
-
return type(met)(data
|
|
1941
|
+
return type(met)._from_fastpath(data)
|
|
1919
1942
|
|
|
1920
1943
|
# ------------
|
|
1921
1944
|
# I / O
|
|
@@ -474,7 +474,7 @@ class APCEMM(models.Model):
|
|
|
474
474
|
for coord in ("longitude", "latitude", "level")
|
|
475
475
|
}
|
|
476
476
|
buffers["time_buffer"] = (0, self.params["max_age"] + self.params["dt_lagrangian"])
|
|
477
|
-
met = self.source.downselect_met(self.met, **buffers
|
|
477
|
+
met = self.source.downselect_met(self.met, **buffers)
|
|
478
478
|
model = DryAdvection(
|
|
479
479
|
met=met,
|
|
480
480
|
dt_integration=self.params["dt_lagrangian"],
|
|
@@ -816,7 +816,7 @@ class APCEMM(models.Model):
|
|
|
816
816
|
# Ensure required met data is present.
|
|
817
817
|
# No buffers needed for interpolation!
|
|
818
818
|
vars = ap_model.met_variables + ap_model.optional_met_variables + emissions.met_variables
|
|
819
|
-
met = self.source.downselect_met(self.met
|
|
819
|
+
met = self.source.downselect_met(self.met)
|
|
820
820
|
met.ensure_vars(vars)
|
|
821
821
|
met.standardize_variables(vars)
|
|
822
822
|
for var in vars:
|
|
@@ -214,7 +214,7 @@ def generate_apcemm_input_met(
|
|
|
214
214
|
)
|
|
215
215
|
|
|
216
216
|
# Downselect met before interpolation
|
|
217
|
-
met = vector.downselect_met(met
|
|
217
|
+
met = vector.downselect_met(met)
|
|
218
218
|
|
|
219
219
|
# Interpolate meteorology data onto vector
|
|
220
220
|
scale_humidity = humidity_scaling is not None and "specific_humidity" not in vector
|
|
@@ -391,7 +391,7 @@ class Cocip(Model):
|
|
|
391
391
|
# which is the positive direction for level
|
|
392
392
|
logger.debug("Downselect met for Cocip initialization")
|
|
393
393
|
level_buffer = 0, self.params["met_level_buffer"][1]
|
|
394
|
-
met = self.source.downselect_met(self.met, level_buffer=level_buffer
|
|
394
|
+
met = self.source.downselect_met(self.met, level_buffer=level_buffer)
|
|
395
395
|
met = add_tau_cirrus(met)
|
|
396
396
|
|
|
397
397
|
# Prepare flight for model
|
|
@@ -976,9 +976,9 @@ class Cocip(Model):
|
|
|
976
976
|
for coord in ("longitude", "latitude", "level")
|
|
977
977
|
}
|
|
978
978
|
logger.debug("Downselect met for start of Cocip evolution")
|
|
979
|
-
met = self._downwash_contrail.downselect_met(self.met, **buffers
|
|
979
|
+
met = self._downwash_contrail.downselect_met(self.met, **buffers)
|
|
980
980
|
met = add_tau_cirrus(met)
|
|
981
|
-
rad = self._downwash_contrail.downselect_met(self.rad, **buffers
|
|
981
|
+
rad = self._downwash_contrail.downselect_met(self.rad, **buffers)
|
|
982
982
|
|
|
983
983
|
calc_continuous(self._downwash_contrail)
|
|
984
984
|
calc_timestep_geometry(self._downwash_contrail)
|
|
@@ -1135,11 +1135,11 @@ class Cocip(Model):
|
|
|
1135
1135
|
& (self._downwash_flight["time"] <= lookahead),
|
|
1136
1136
|
copy=False,
|
|
1137
1137
|
)
|
|
1138
|
-
vector = GeoVectorDataset(
|
|
1138
|
+
vector = GeoVectorDataset._from_fastpath(
|
|
1139
1139
|
{
|
|
1140
1140
|
key: np.concatenate((latest_contrail[key], future_contrails[key]))
|
|
1141
1141
|
for key in ("longitude", "latitude", "level", "time")
|
|
1142
|
-
}
|
|
1142
|
+
},
|
|
1143
1143
|
)
|
|
1144
1144
|
|
|
1145
1145
|
# compute time buffer to ensure downselection extends to time_end
|
|
@@ -1152,7 +1152,7 @@ class Cocip(Model):
|
|
|
1152
1152
|
max(np.timedelta64(0, "ns"), time_end - vector["time"].max()),
|
|
1153
1153
|
)
|
|
1154
1154
|
|
|
1155
|
-
return vector.downselect_met(met, **buffers
|
|
1155
|
+
return vector.downselect_met(met, **buffers)
|
|
1156
1156
|
|
|
1157
1157
|
def _create_downwash_contrail(self) -> GeoVectorDataset:
|
|
1158
1158
|
"""Get Contrail representation of downwash flight."""
|
|
@@ -1180,7 +1180,7 @@ class Cocip(Model):
|
|
|
1180
1180
|
"persistent": self._downwash_flight["persistent_1"],
|
|
1181
1181
|
}
|
|
1182
1182
|
|
|
1183
|
-
contrail = GeoVectorDataset(downwash_contrail_data
|
|
1183
|
+
contrail = GeoVectorDataset._from_fastpath(downwash_contrail_data).copy()
|
|
1184
1184
|
contrail["formation_time"] = contrail["time"].copy()
|
|
1185
1185
|
contrail["age"] = contrail["formation_time"] - contrail["time"]
|
|
1186
1186
|
|
|
@@ -2300,7 +2300,7 @@ def calc_timestep_contrail_evolution(
|
|
|
2300
2300
|
level_2 = geo.advect_level(level_1, vertical_velocity_1, rho_air_1, terminal_fall_speed_1, dt)
|
|
2301
2301
|
altitude_2 = units.pl_to_m(level_2)
|
|
2302
2302
|
|
|
2303
|
-
contrail_2 = GeoVectorDataset(
|
|
2303
|
+
contrail_2 = GeoVectorDataset._from_fastpath(
|
|
2304
2304
|
{
|
|
2305
2305
|
"waypoint": waypoint_2,
|
|
2306
2306
|
"flight_id": contrail_1["flight_id"],
|
|
@@ -2312,7 +2312,6 @@ def calc_timestep_contrail_evolution(
|
|
|
2312
2312
|
"altitude": altitude_2,
|
|
2313
2313
|
"level": level_2,
|
|
2314
2314
|
},
|
|
2315
|
-
copy=False,
|
|
2316
2315
|
)
|
|
2317
2316
|
intersection = contrail_2.coords_intersect_met(met)
|
|
2318
2317
|
if not np.any(intersection):
|
|
@@ -11,11 +11,10 @@ from typing import TYPE_CHECKING, Any, NoReturn, TypeVar, overload
|
|
|
11
11
|
import numpy as np
|
|
12
12
|
import numpy.typing as npt
|
|
13
13
|
import pandas as pd
|
|
14
|
-
import xarray as xr
|
|
15
14
|
|
|
16
15
|
import pycontrails
|
|
17
16
|
from pycontrails.core import models
|
|
18
|
-
from pycontrails.core.met import MetDataset
|
|
17
|
+
from pycontrails.core.met import MetDataset, maybe_downselect_mds
|
|
19
18
|
from pycontrails.core.vector import GeoVectorDataset, VectorDataset
|
|
20
19
|
from pycontrails.models import humidity_scaling, sac
|
|
21
20
|
from pycontrails.models.cocip import cocip, contrail_properties, wake_vortex, wind_shear
|
|
@@ -323,8 +322,8 @@ class CocipGrid(models.Model):
|
|
|
323
322
|
If ``self.params["downselect_met"]`` is True, the :func:`_downselect_met` has
|
|
324
323
|
already performed a spatial downselection of the met data.
|
|
325
324
|
"""
|
|
326
|
-
met =
|
|
327
|
-
rad =
|
|
325
|
+
met = maybe_downselect_mds(self.met, met, t0, t1)
|
|
326
|
+
rad = maybe_downselect_mds(self.rad, rad, t0, t1)
|
|
328
327
|
|
|
329
328
|
return met, rad
|
|
330
329
|
|
|
@@ -615,7 +614,7 @@ class CocipGrid(models.Model):
|
|
|
615
614
|
for idx, time in enumerate(times_in_filt):
|
|
616
615
|
# For now, sticking with the convention that every vector should
|
|
617
616
|
# have a constant time value.
|
|
618
|
-
source_slice = MetDataset(self.source.data.sel(time=[time]))
|
|
617
|
+
source_slice = MetDataset._from_fastpath(self.source.data.sel(time=[time]))
|
|
619
618
|
|
|
620
619
|
# Convert the 4D grid to a vector
|
|
621
620
|
vector = source_slice.to_vector()
|
|
@@ -1402,7 +1401,7 @@ def simulate_wake_vortex_downwash(
|
|
|
1402
1401
|
|
|
1403
1402
|
# Experimental segment-free model
|
|
1404
1403
|
if _is_segment_free_mode(vector):
|
|
1405
|
-
return GeoVectorDataset(data, attrs=vector.attrs
|
|
1404
|
+
return GeoVectorDataset._from_fastpath(data, attrs=vector.attrs).copy()
|
|
1406
1405
|
|
|
1407
1406
|
# Stored in `_generate_new_grid_vectors`
|
|
1408
1407
|
data["longitude_head"] = vector["longitude_head"]
|
|
@@ -1421,7 +1420,7 @@ def simulate_wake_vortex_downwash(
|
|
|
1421
1420
|
# segment_length variable.
|
|
1422
1421
|
data["segment_length"] = np.full_like(data["longitude"], segment_length)
|
|
1423
1422
|
|
|
1424
|
-
return GeoVectorDataset(data, attrs=vector.attrs
|
|
1423
|
+
return GeoVectorDataset._from_fastpath(data, attrs=vector.attrs).copy()
|
|
1425
1424
|
|
|
1426
1425
|
|
|
1427
1426
|
def find_initial_persistent_contrails(
|
|
@@ -2022,7 +2021,7 @@ def advect(
|
|
|
2022
2021
|
assert _is_segment_free_mode(contrail)
|
|
2023
2022
|
assert dt_tail is None
|
|
2024
2023
|
assert dt_head is None
|
|
2025
|
-
return GeoVectorDataset(data, attrs=contrail.attrs
|
|
2024
|
+
return GeoVectorDataset._from_fastpath(data, attrs=contrail.attrs).copy()
|
|
2026
2025
|
|
|
2027
2026
|
longitude_head = contrail["longitude_head"]
|
|
2028
2027
|
latitude_head = contrail["latitude_head"]
|
|
@@ -2064,7 +2063,7 @@ def advect(
|
|
|
2064
2063
|
data["segment_length"] = segment_length_t2
|
|
2065
2064
|
data["head_tail_dt"] = head_tail_dt_t2
|
|
2066
2065
|
|
|
2067
|
-
return GeoVectorDataset(data, attrs=contrail.attrs
|
|
2066
|
+
return GeoVectorDataset._from_fastpath(data, attrs=contrail.attrs).copy()
|
|
2068
2067
|
|
|
2069
2068
|
|
|
2070
2069
|
def _aggregate_ef_summary(vector_list: list[VectorDataset]) -> VectorDataset | None:
|
|
@@ -2438,7 +2437,6 @@ def _downselect_met(
|
|
|
2438
2437
|
longitude_buffer=longitude_buffer,
|
|
2439
2438
|
level_buffer=level_buffer,
|
|
2440
2439
|
time_buffer=(t0, t1),
|
|
2441
|
-
copy=False,
|
|
2442
2440
|
)
|
|
2443
2441
|
|
|
2444
2442
|
rad = source.downselect_met(
|
|
@@ -2446,7 +2444,6 @@ def _downselect_met(
|
|
|
2446
2444
|
latitude_buffer=latitude_buffer,
|
|
2447
2445
|
longitude_buffer=longitude_buffer,
|
|
2448
2446
|
time_buffer=(t0, t1),
|
|
2449
|
-
copy=False,
|
|
2450
2447
|
)
|
|
2451
2448
|
|
|
2452
2449
|
return met, rad
|
|
@@ -2522,65 +2519,3 @@ def _check_end_time(
|
|
|
2522
2519
|
f"Include additional time at the end of '{name}' or reduce 'max_age' parameter."
|
|
2523
2520
|
f"{note}"
|
|
2524
2521
|
)
|
|
2525
|
-
|
|
2526
|
-
|
|
2527
|
-
def _maybe_downselect_mds(
|
|
2528
|
-
big_mds: MetDataset,
|
|
2529
|
-
little_mds: MetDataset | None,
|
|
2530
|
-
t0: np.datetime64,
|
|
2531
|
-
t1: np.datetime64,
|
|
2532
|
-
) -> MetDataset:
|
|
2533
|
-
"""Possibly downselect ``big_mds`` to cover ``[t0, t1]``.
|
|
2534
|
-
|
|
2535
|
-
This implementation assumes ``t0 <= t1``, but this is not enforced.
|
|
2536
|
-
|
|
2537
|
-
If possible, ``little_mds`` is recycled to avoid re-loading data.
|
|
2538
|
-
|
|
2539
|
-
This function only downselects in the time domain.
|
|
2540
|
-
|
|
2541
|
-
If ``big_mds`` doesn't cover the time range, no error is raised.
|
|
2542
|
-
"""
|
|
2543
|
-
if little_mds is not None:
|
|
2544
|
-
little_time = little_mds.indexes["time"].to_numpy()
|
|
2545
|
-
ignore_little = t0 > little_time[-1] or t1 < little_time[0]
|
|
2546
|
-
|
|
2547
|
-
big_time = big_mds.indexes["time"].to_numpy()
|
|
2548
|
-
if little_mds is None or ignore_little:
|
|
2549
|
-
i0 = np.searchsorted(big_time, t0, side="right").item()
|
|
2550
|
-
i0 = max(0, i0 - 1)
|
|
2551
|
-
i1 = np.searchsorted(big_time, t1, side="left").item()
|
|
2552
|
-
i1 = min(i1 + 1, big_time.size)
|
|
2553
|
-
return MetDataset(big_mds.data.isel(time=slice(i0, i1)), copy=False)
|
|
2554
|
-
|
|
2555
|
-
j0 = np.searchsorted(little_time, t0, side="right").item()
|
|
2556
|
-
j0 = max(0, j0 - 1)
|
|
2557
|
-
j1 = np.searchsorted(little_time, t1, side="left").item()
|
|
2558
|
-
j1 = min(j1 + 1, little_time.size)
|
|
2559
|
-
|
|
2560
|
-
little_ds = little_mds.data.isel(time=slice(j0, j1))
|
|
2561
|
-
little_time0 = little_time[j0]
|
|
2562
|
-
little_time1 = little_time[j1 - 1]
|
|
2563
|
-
|
|
2564
|
-
if t0 >= little_time0 and t1 <= little_time1:
|
|
2565
|
-
return MetDataset(little_ds, copy=False)
|
|
2566
|
-
|
|
2567
|
-
ds_concat = []
|
|
2568
|
-
if t0 < little_time0: # unlikely to encounter this case
|
|
2569
|
-
i0 = np.searchsorted(big_time, t0, side="right").item()
|
|
2570
|
-
i0 = max(0, i0 - 1)
|
|
2571
|
-
i1 = np.searchsorted(big_time, little_time0, side="right").item()
|
|
2572
|
-
i1 = max(i1, i0 + 1)
|
|
2573
|
-
ds_concat.append(big_mds.data.isel(time=slice(i0, i1)))
|
|
2574
|
-
|
|
2575
|
-
ds_concat.append(little_ds)
|
|
2576
|
-
|
|
2577
|
-
if t1 > little_time1:
|
|
2578
|
-
i0 = np.searchsorted(big_time, little_time1, side="left").item()
|
|
2579
|
-
i0 = min(i0 + 1, big_time.size)
|
|
2580
|
-
i1 = np.searchsorted(big_time, t1, side="left").item()
|
|
2581
|
-
i1 = min(i1 + 1, big_time.size)
|
|
2582
|
-
ds_concat.append(big_mds.data.isel(time=slice(i0, i1)))
|
|
2583
|
-
|
|
2584
|
-
# If little_mds is loaded into memory but big_mds is not,
|
|
2585
|
-
# the concat operation below will load the slice of big_mds into memory.
|
|
2586
|
-
return MetDataset(xr.concat(ds_concat, dim="time"), copy=False)
|