pycontrails 0.49.5__cp311-cp311-macosx_10_9_x86_64.whl → 0.50.1__cp311-cp311-macosx_10_9_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/_version.py +2 -2
- pycontrails/core/datalib.py +60 -38
- pycontrails/core/flight.py +11 -6
- pycontrails/core/interpolation.py +39 -1
- pycontrails/core/met.py +14 -16
- pycontrails/core/met_var.py +2 -2
- pycontrails/core/models.py +7 -3
- pycontrails/core/rgi_cython.cpython-311-darwin.so +0 -0
- pycontrails/core/vector.py +15 -13
- pycontrails/datalib/ecmwf/__init__.py +4 -0
- pycontrails/datalib/ecmwf/arco_era5.py +577 -0
- pycontrails/datalib/ecmwf/common.py +1 -1
- pycontrails/datalib/ecmwf/era5.py +2 -5
- pycontrails/datalib/ecmwf/variables.py +18 -0
- pycontrails/datalib/gfs/gfs.py +2 -2
- pycontrails/datalib/goes.py +14 -12
- pycontrails/models/cocip/cocip.py +48 -8
- pycontrails/models/cocip/cocip_params.py +20 -1
- pycontrails/models/cocip/contrail_properties.py +4 -9
- pycontrails/models/cocip/unterstrasser_wake_vortex.py +403 -0
- pycontrails/models/cocip/wake_vortex.py +22 -1
- pycontrails/models/cocipgrid/cocip_grid.py +103 -6
- pycontrails/models/cocipgrid/cocip_grid_params.py +25 -19
- pycontrails/models/issr.py +1 -1
- pycontrails/physics/constants.py +6 -0
- pycontrails/utils/dependencies.py +13 -11
- {pycontrails-0.49.5.dist-info → pycontrails-0.50.1.dist-info}/METADATA +4 -2
- {pycontrails-0.49.5.dist-info → pycontrails-0.50.1.dist-info}/RECORD +32 -30
- {pycontrails-0.49.5.dist-info → pycontrails-0.50.1.dist-info}/WHEEL +1 -1
- {pycontrails-0.49.5.dist-info → pycontrails-0.50.1.dist-info}/LICENSE +0 -0
- {pycontrails-0.49.5.dist-info → pycontrails-0.50.1.dist-info}/NOTICE +0 -0
- {pycontrails-0.49.5.dist-info → pycontrails-0.50.1.dist-info}/top_level.txt +0 -0
pycontrails/_version.py
CHANGED
pycontrails/core/datalib.py
CHANGED
|
@@ -3,7 +3,6 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import abc
|
|
6
|
-
import dataclasses
|
|
7
6
|
import hashlib
|
|
8
7
|
import logging
|
|
9
8
|
import pathlib
|
|
@@ -109,6 +108,11 @@ def parse_pressure_levels(
|
|
|
109
108
|
) -> list[int]:
|
|
110
109
|
"""Check input pressure levels are consistent type and ensure levels exist in ECMWF data source.
|
|
111
110
|
|
|
111
|
+
.. versionchanged:: 0.50.0
|
|
112
|
+
|
|
113
|
+
The returned pressure levels are now sorted. Pressure levels must be unique.
|
|
114
|
+
Raises ValueError if pressure levels have mixed signs.
|
|
115
|
+
|
|
112
116
|
Parameters
|
|
113
117
|
----------
|
|
114
118
|
pressure_levels : PressureLevelInput
|
|
@@ -127,18 +131,31 @@ def parse_pressure_levels(
|
|
|
127
131
|
ValueError
|
|
128
132
|
Raises ValueError if pressure level is not supported by ECMWF data source
|
|
129
133
|
"""
|
|
130
|
-
#
|
|
134
|
+
# Ensure pressure_levels is array-like
|
|
131
135
|
if isinstance(pressure_levels, (int, float)):
|
|
132
136
|
pressure_levels = [pressure_levels]
|
|
133
137
|
|
|
134
|
-
# Cast array-like to
|
|
135
|
-
|
|
138
|
+
# Cast array-like to int dtype and sort
|
|
139
|
+
arr = np.asarray(pressure_levels, dtype=int)
|
|
140
|
+
arr.sort()
|
|
136
141
|
|
|
137
|
-
#
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
+
# If any values are non-positive, the entire array should be [-1]
|
|
143
|
+
if np.any(arr <= 0) and not np.array_equal(arr, [-1]):
|
|
144
|
+
msg = f"Pressure levels must be all positive or all -1, got {arr}"
|
|
145
|
+
raise ValueError(msg)
|
|
146
|
+
|
|
147
|
+
# Ensure pressure levels are unique
|
|
148
|
+
if np.any(np.diff(arr) == 0):
|
|
149
|
+
msg = f"Pressure levels must be unique, got {arr}"
|
|
150
|
+
raise ValueError(msg)
|
|
151
|
+
|
|
152
|
+
out = arr.tolist()
|
|
153
|
+
if supported is None:
|
|
154
|
+
return out
|
|
155
|
+
|
|
156
|
+
if missing := set(out).difference(supported):
|
|
157
|
+
msg = f"Pressure levels {sorted(missing)} are not supported. Supported levels: {supported}"
|
|
158
|
+
raise ValueError(msg)
|
|
142
159
|
|
|
143
160
|
return out
|
|
144
161
|
|
|
@@ -146,6 +163,11 @@ def parse_pressure_levels(
|
|
|
146
163
|
def parse_variables(variables: VariableInput, supported: list[MetVariable]) -> list[MetVariable]:
|
|
147
164
|
"""Parse input variables.
|
|
148
165
|
|
|
166
|
+
.. versionchanged:: 0.50.0
|
|
167
|
+
|
|
168
|
+
The output is no longer copied. Each :class:`MetVariable` is a frozen dataclass,
|
|
169
|
+
so copying is unnecessary.
|
|
170
|
+
|
|
149
171
|
Parameters
|
|
150
172
|
----------
|
|
151
173
|
variables : VariableInput
|
|
@@ -178,35 +200,31 @@ def parse_variables(variables: VariableInput, supported: list[MetVariable]) -> l
|
|
|
178
200
|
else:
|
|
179
201
|
parsed_variables = variables
|
|
180
202
|
|
|
181
|
-
# unpack dict of supported str values from supported
|
|
182
203
|
short_names = {v.short_name: v for v in supported}
|
|
183
204
|
standard_names = {v.standard_name: v for v in supported}
|
|
184
205
|
long_names = {v.long_name: v for v in supported}
|
|
185
|
-
|
|
186
|
-
# unpack dict of support int values from supported
|
|
187
206
|
ecmwf_ids = {v.ecmwf_id: v for v in supported}
|
|
188
207
|
grib1_ids = {v.grib1_id: v for v in supported}
|
|
208
|
+
supported_set = set(supported)
|
|
189
209
|
|
|
190
210
|
for var in parsed_variables:
|
|
191
211
|
matched = _find_match(
|
|
192
212
|
var,
|
|
193
|
-
|
|
213
|
+
supported_set,
|
|
194
214
|
ecmwf_ids, # type: ignore[arg-type]
|
|
195
215
|
grib1_ids, # type: ignore[arg-type]
|
|
196
216
|
short_names,
|
|
197
217
|
standard_names,
|
|
198
218
|
long_names, # type: ignore[arg-type]
|
|
199
219
|
)
|
|
200
|
-
|
|
201
|
-
# "replace" copies dataclass
|
|
202
|
-
met_var_list.append(dataclasses.replace(matched))
|
|
220
|
+
met_var_list.append(matched)
|
|
203
221
|
|
|
204
222
|
return met_var_list
|
|
205
223
|
|
|
206
224
|
|
|
207
225
|
def _find_match(
|
|
208
226
|
var: VariableInput,
|
|
209
|
-
supported:
|
|
227
|
+
supported: set[MetVariable],
|
|
210
228
|
ecmwf_ids: dict[int, MetVariable],
|
|
211
229
|
grib1_ids: dict[int, MetVariable],
|
|
212
230
|
short_names: dict[str, MetVariable],
|
|
@@ -215,9 +233,8 @@ def _find_match(
|
|
|
215
233
|
) -> MetVariable:
|
|
216
234
|
"""Find a match for input variable in supported."""
|
|
217
235
|
|
|
218
|
-
if isinstance(var, MetVariable):
|
|
219
|
-
|
|
220
|
-
return var
|
|
236
|
+
if isinstance(var, MetVariable) and var in supported:
|
|
237
|
+
return var
|
|
221
238
|
|
|
222
239
|
# list of MetVariable options
|
|
223
240
|
# here we extract the first MetVariable in var that is supported
|
|
@@ -230,21 +247,19 @@ def _find_match(
|
|
|
230
247
|
if v in supported:
|
|
231
248
|
return v
|
|
232
249
|
|
|
233
|
-
# int code
|
|
234
250
|
elif isinstance(var, int):
|
|
235
|
-
if
|
|
236
|
-
return
|
|
237
|
-
if
|
|
238
|
-
return
|
|
251
|
+
if ret := ecmwf_ids.get(var):
|
|
252
|
+
return ret
|
|
253
|
+
if ret := grib1_ids.get(var):
|
|
254
|
+
return ret
|
|
239
255
|
|
|
240
|
-
# string reference
|
|
241
256
|
elif isinstance(var, str):
|
|
242
|
-
if
|
|
243
|
-
return
|
|
244
|
-
if
|
|
245
|
-
return
|
|
246
|
-
if
|
|
247
|
-
return
|
|
257
|
+
if ret := short_names.get(var):
|
|
258
|
+
return ret
|
|
259
|
+
if ret := standard_names.get(var):
|
|
260
|
+
return ret
|
|
261
|
+
if ret := long_names.get(var):
|
|
262
|
+
return ret
|
|
248
263
|
|
|
249
264
|
msg = f"{var} is not in supported parameters. Supported parameters include: {standard_names}"
|
|
250
265
|
raise ValueError(msg)
|
|
@@ -395,6 +410,14 @@ class MetDataSource(abc.ABC):
|
|
|
395
410
|
"""
|
|
396
411
|
return [v.standard_name for v in self.variables]
|
|
397
412
|
|
|
413
|
+
@property
|
|
414
|
+
def is_single_level(self) -> bool:
|
|
415
|
+
"""Return True if the datasource is single level data.
|
|
416
|
+
|
|
417
|
+
.. versionadded:: 0.50.0
|
|
418
|
+
"""
|
|
419
|
+
return self.pressure_levels == [-1]
|
|
420
|
+
|
|
398
421
|
@property
|
|
399
422
|
def pressure_level_variables(self) -> list[MetVariable]:
|
|
400
423
|
"""Parameters available from data source.
|
|
@@ -426,10 +449,9 @@ class MetDataSource(abc.ABC):
|
|
|
426
449
|
list[MetVariable] | None
|
|
427
450
|
List of MetVariable available in datasource
|
|
428
451
|
"""
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
return self.single_level_variables
|
|
452
|
+
return (
|
|
453
|
+
self.single_level_variables if self.is_single_level else self.pressure_level_variables
|
|
454
|
+
)
|
|
433
455
|
|
|
434
456
|
@property
|
|
435
457
|
def supported_pressure_levels(self) -> list[int] | None:
|
|
@@ -497,7 +519,7 @@ class MetDataSource(abc.ABC):
|
|
|
497
519
|
def open_metdataset(
|
|
498
520
|
self,
|
|
499
521
|
dataset: xr.Dataset | None = None,
|
|
500
|
-
xr_kwargs: dict[str,
|
|
522
|
+
xr_kwargs: dict[str, Any] | None = None,
|
|
501
523
|
**kwargs: Any,
|
|
502
524
|
) -> MetDataset:
|
|
503
525
|
"""Open MetDataset from data source.
|
|
@@ -510,7 +532,7 @@ class MetDataSource(abc.ABC):
|
|
|
510
532
|
dataset : xr.Dataset | None, optional
|
|
511
533
|
Input :class:`xr.Dataset` loaded manually.
|
|
512
534
|
The dataset must have the same format as the original data source API or files.
|
|
513
|
-
xr_kwargs : dict[str,
|
|
535
|
+
xr_kwargs : dict[str, Any] | None, optional
|
|
514
536
|
Dictionary of keyword arguments passed into :func:`xarray.open_mfdataset`
|
|
515
537
|
when opening files. Examples include "chunks", "engine", "parallel", etc.
|
|
516
538
|
Ignored if ``dataset`` is input.
|
pycontrails/core/flight.py
CHANGED
|
@@ -787,6 +787,13 @@ class Flight(GeoVectorDataset):
|
|
|
787
787
|
Waypoints are resampled according to the frequency ``freq``. Values for :attr:`data`
|
|
788
788
|
columns ``longitude``, ``latitude``, and ``altitude`` are interpolated.
|
|
789
789
|
|
|
790
|
+
Resampled waypoints will include all multiples of ``freq`` between the flight
|
|
791
|
+
start and end time. For example, when resampling to a frequency of 1 minute,
|
|
792
|
+
a flight that starts at 2020/1/1 00:00:59 and ends at 2020/1/1 00:01:01
|
|
793
|
+
will return a single waypoint at 2020/1/1 00:01:00, whereas a flight that
|
|
794
|
+
starts at 2020/1/1 00:01:01 and ends at 2020/1/1 00:01:59 will return an empty
|
|
795
|
+
flight.
|
|
796
|
+
|
|
790
797
|
Parameters
|
|
791
798
|
----------
|
|
792
799
|
freq : str, optional
|
|
@@ -1349,8 +1356,8 @@ class Flight(GeoVectorDataset):
|
|
|
1349
1356
|
>>> # Intersect and attach
|
|
1350
1357
|
>>> fl["air_temperature"] = fl.intersect_met(met['air_temperature'])
|
|
1351
1358
|
>>> fl["air_temperature"]
|
|
1352
|
-
array([235.
|
|
1353
|
-
234.
|
|
1359
|
+
array([235.94657007, 235.95766965, 235.96873412, ..., 234.59917962,
|
|
1360
|
+
234.60387402, 234.60845312])
|
|
1354
1361
|
|
|
1355
1362
|
>>> # Length (in meters) of waypoints whose temperature exceeds 236K
|
|
1356
1363
|
>>> fl.length_met("air_temperature", threshold=236)
|
|
@@ -2063,11 +2070,9 @@ def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.Dat
|
|
|
2063
2070
|
|
|
2064
2071
|
# Manually create a new index that includes all the original index values
|
|
2065
2072
|
# and the resampled-to-freq index values.
|
|
2066
|
-
t0 = df.index[0]
|
|
2073
|
+
t0 = df.index[0].ceil(freq)
|
|
2067
2074
|
t1 = df.index[-1]
|
|
2068
|
-
t = pd.date_range(t0, t1, freq=freq, name="time")
|
|
2069
|
-
if t[0] < t0:
|
|
2070
|
-
t = t[1:]
|
|
2075
|
+
t = pd.date_range(t0, t1, freq=freq, name="time")
|
|
2071
2076
|
|
|
2072
2077
|
concat_arr = np.concatenate([df.index, t])
|
|
2073
2078
|
concat_arr = np.unique(concat_arr)
|
|
@@ -71,7 +71,9 @@ class PycontrailsRegularGridInterpolator(scipy.interpolate.RegularGridInterpolat
|
|
|
71
71
|
|
|
72
72
|
self.grid = points
|
|
73
73
|
self.values = values
|
|
74
|
-
|
|
74
|
+
# TODO: consider supporting updated tensor-product spline methods
|
|
75
|
+
# see https://github.com/scipy/scipy/releases/tag/v1.13.0
|
|
76
|
+
self.method = _pick_method(scipy.__version__, method)
|
|
75
77
|
self.bounds_error = bounds_error
|
|
76
78
|
self.fill_value = fill_value
|
|
77
79
|
|
|
@@ -219,6 +221,42 @@ class PycontrailsRegularGridInterpolator(scipy.interpolate.RegularGridInterpolat
|
|
|
219
221
|
raise ValueError(msg)
|
|
220
222
|
|
|
221
223
|
|
|
224
|
+
def _pick_method(scipy_version: str, method: str) -> str:
|
|
225
|
+
"""Select an interpolation method.
|
|
226
|
+
|
|
227
|
+
For scipy versions 1.13.0 and later, fall back on legacy implementations
|
|
228
|
+
of tensor-product spline methods. The default implementations in 1.13.0
|
|
229
|
+
and later are incompatible with this class.
|
|
230
|
+
|
|
231
|
+
Parameters
|
|
232
|
+
----------
|
|
233
|
+
scipy_version : str
|
|
234
|
+
scipy version (major.minor.patch)
|
|
235
|
+
|
|
236
|
+
method : str
|
|
237
|
+
Interpolation method. Passed into :class:`scipy.interpolate.RegularGridInterpolator`
|
|
238
|
+
as-is unless ``scipy_version`` is 1.13.0 or later and ``method`` is ``"slinear"``,
|
|
239
|
+
``"cubic"``, or ``"quintic"``. In this case, ``"_legacy"`` is appended to ``method``.
|
|
240
|
+
|
|
241
|
+
Returns
|
|
242
|
+
-------
|
|
243
|
+
str
|
|
244
|
+
Interpolation method adjusted for compatibility with this class.
|
|
245
|
+
"""
|
|
246
|
+
try:
|
|
247
|
+
version = scipy_version.split(".")
|
|
248
|
+
major = int(version[0])
|
|
249
|
+
minor = int(version[1])
|
|
250
|
+
except (IndexError, ValueError) as exc:
|
|
251
|
+
msg = f"Failed to parse major and minor version from {scipy_version}"
|
|
252
|
+
raise ValueError(msg) from exc
|
|
253
|
+
|
|
254
|
+
reimplemented_methods = ["slinear", "cubic", "quintic"]
|
|
255
|
+
if major > 1 or (major == 1 and minor >= 13) and method in reimplemented_methods:
|
|
256
|
+
return method + "_legacy"
|
|
257
|
+
return method
|
|
258
|
+
|
|
259
|
+
|
|
222
260
|
def _floatize_time(
|
|
223
261
|
time: npt.NDArray[np.datetime64], offset: np.datetime64
|
|
224
262
|
) -> npt.NDArray[np.float64]:
|
pycontrails/core/met.py
CHANGED
|
@@ -674,13 +674,10 @@ class MetDataset(MetBase):
|
|
|
674
674
|
>>> da = mda.data # Underlying `xarray` object
|
|
675
675
|
|
|
676
676
|
>>> # Check out a few values
|
|
677
|
-
>>> da[5:
|
|
678
|
-
array([[224.
|
|
679
|
-
[224.
|
|
680
|
-
[224.
|
|
681
|
-
[224.10617, 224.43282, 224.7777 , 225.17812, 225.62166],
|
|
682
|
-
[224.11115, 224.44028, 224.7835 , 225.18393, 225.62663]],
|
|
683
|
-
dtype=float32)
|
|
677
|
+
>>> da[5:8, 5:8, 1, 1].values
|
|
678
|
+
array([[224.08959005, 224.41374427, 224.75945349],
|
|
679
|
+
[224.09456429, 224.42037658, 224.76525676],
|
|
680
|
+
[224.10036756, 224.42617985, 224.77106004]])
|
|
684
681
|
|
|
685
682
|
>>> # Mean temperature over entire array
|
|
686
683
|
>>> da.mean().load().item()
|
|
@@ -1065,8 +1062,8 @@ class MetDataset(MetBase):
|
|
|
1065
1062
|
|
|
1066
1063
|
"""
|
|
1067
1064
|
coords_keys = self.data.dims
|
|
1068
|
-
|
|
1069
|
-
coords_vals = [
|
|
1065
|
+
indexes = self.indexes
|
|
1066
|
+
coords_vals = [indexes[key].values for key in coords_keys]
|
|
1070
1067
|
coords_meshes = np.meshgrid(*coords_vals, indexing="ij")
|
|
1071
1068
|
raveled_coords = (mesh.ravel() for mesh in coords_meshes)
|
|
1072
1069
|
data = dict(zip(coords_keys, raveled_coords))
|
|
@@ -1181,12 +1178,12 @@ class MetDataset(MetBase):
|
|
|
1181
1178
|
level: npt.ArrayLike | float,
|
|
1182
1179
|
time: npt.ArrayLike | np.datetime64,
|
|
1183
1180
|
) -> MetDataset:
|
|
1184
|
-
"""Create a :class:`MetDataset` containing a coordinate skeleton from coordinate arrays.
|
|
1181
|
+
r"""Create a :class:`MetDataset` containing a coordinate skeleton from coordinate arrays.
|
|
1185
1182
|
|
|
1186
1183
|
Parameters
|
|
1187
1184
|
----------
|
|
1188
1185
|
longitude, latitude : npt.ArrayLike | float
|
|
1189
|
-
Horizontal coordinates, in [:math
|
|
1186
|
+
Horizontal coordinates, in [:math:`\deg`]
|
|
1190
1187
|
level : npt.ArrayLike | float
|
|
1191
1188
|
Vertical coordinate, in [:math:`hPa`]
|
|
1192
1189
|
time: npt.ArrayLike | np.datetime64,
|
|
@@ -1618,15 +1615,15 @@ class MetDataArray(MetBase):
|
|
|
1618
1615
|
|
|
1619
1616
|
>>> # Interpolation at a grid point agrees with value
|
|
1620
1617
|
>>> mda.interpolate(1, 2, 300, np.datetime64('2022-03-01T14:00'))
|
|
1621
|
-
array([241.
|
|
1618
|
+
array([241.91972984])
|
|
1622
1619
|
|
|
1623
1620
|
>>> da = mda.data
|
|
1624
1621
|
>>> da.sel(longitude=1, latitude=2, level=300, time=np.datetime64('2022-03-01T14')).item()
|
|
1625
|
-
241.
|
|
1622
|
+
241.9197298421629
|
|
1626
1623
|
|
|
1627
1624
|
>>> # Interpolation off grid
|
|
1628
1625
|
>>> mda.interpolate(1.1, 2.1, 290, np.datetime64('2022-03-01 13:10'))
|
|
1629
|
-
array([239.
|
|
1626
|
+
array([239.83793798])
|
|
1630
1627
|
|
|
1631
1628
|
>>> # Interpolate along path
|
|
1632
1629
|
>>> longitude = np.linspace(1, 2, 10)
|
|
@@ -1634,8 +1631,9 @@ class MetDataArray(MetBase):
|
|
|
1634
1631
|
>>> level = np.linspace(200, 300, 10)
|
|
1635
1632
|
>>> time = pd.date_range("2022-03-01T14", periods=10, freq="5min")
|
|
1636
1633
|
>>> mda.interpolate(longitude, latitude, level, time)
|
|
1637
|
-
array([220.
|
|
1638
|
-
|
|
1634
|
+
array([220.44347694, 223.08900738, 225.74338924, 228.41642088,
|
|
1635
|
+
231.10858599, 233.54857391, 235.71504913, 237.86478872,
|
|
1636
|
+
239.99274623, 242.10792167])
|
|
1639
1637
|
"""
|
|
1640
1638
|
# Load if necessary
|
|
1641
1639
|
if not self.in_memory:
|
pycontrails/core/met_var.py
CHANGED
|
@@ -76,12 +76,12 @@ class MetVariable:
|
|
|
76
76
|
|
|
77
77
|
@property
|
|
78
78
|
def ecmwf_link(self) -> str | None:
|
|
79
|
-
"""Database link in the ECMWF
|
|
79
|
+
"""Database link in the ECMWF Parameter Database if :attr:`ecmwf_id` is defined.
|
|
80
80
|
|
|
81
81
|
Returns
|
|
82
82
|
-------
|
|
83
83
|
str
|
|
84
|
-
Database link in the ECMWF
|
|
84
|
+
Database link in the ECMWF Parameter Database
|
|
85
85
|
"""
|
|
86
86
|
return (
|
|
87
87
|
f"https://apps.ecmwf.int/codes/grib/param-db?id={self.ecmwf_id}"
|
pycontrails/core/models.py
CHANGED
|
@@ -440,17 +440,21 @@ class Model(ABC):
|
|
|
440
440
|
# Return dataset with the same coords as self.met, but empty data_vars
|
|
441
441
|
return MetDataset(xr.Dataset(coords=self.met.data.coords))
|
|
442
442
|
|
|
443
|
+
copy_source = self.params["copy_source"]
|
|
444
|
+
|
|
443
445
|
# Turn Sequence into Fleet
|
|
444
446
|
if isinstance(source, Sequence):
|
|
445
|
-
|
|
446
|
-
|
|
447
|
+
if not copy_source:
|
|
448
|
+
msg = "Parameter copy_source=False is not supported for Sequence[Flight] source"
|
|
449
|
+
raise ValueError(msg)
|
|
450
|
+
return Fleet.from_seq(source)
|
|
447
451
|
|
|
448
452
|
# Raise error if source is not a MetDataset or GeoVectorDataset
|
|
449
453
|
if not isinstance(source, (MetDataset, GeoVectorDataset)):
|
|
450
454
|
msg = f"Unknown source type: {type(source)}"
|
|
451
455
|
raise TypeError(msg)
|
|
452
456
|
|
|
453
|
-
if
|
|
457
|
+
if copy_source:
|
|
454
458
|
source = source.copy()
|
|
455
459
|
|
|
456
460
|
if not isinstance(source, Flight):
|
|
Binary file
|
pycontrails/core/vector.py
CHANGED
|
@@ -1694,12 +1694,14 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1694
1694
|
|
|
1695
1695
|
>>> # Intersect
|
|
1696
1696
|
>>> fl.intersect_met(met['air_temperature'], method='nearest')
|
|
1697
|
-
array([231.
|
|
1698
|
-
231.
|
|
1697
|
+
array([231.62969892, 230.72604651, 232.24318771, 231.88338483,
|
|
1698
|
+
231.06429438, 231.59073409, 231.65125393, 231.93064004,
|
|
1699
|
+
232.03344087, 231.65954432])
|
|
1699
1700
|
|
|
1700
1701
|
>>> fl.intersect_met(met['air_temperature'], method='linear')
|
|
1701
|
-
array([225.
|
|
1702
|
-
|
|
1702
|
+
array([225.77794552, 225.13908414, 226.231218 , 226.31831528,
|
|
1703
|
+
225.56102321, 225.81192149, 226.03192642, 226.22056121,
|
|
1704
|
+
226.03770174, 225.63226188])
|
|
1703
1705
|
|
|
1704
1706
|
>>> # Interpolate and attach to `Flight` instance
|
|
1705
1707
|
>>> for key in met:
|
|
@@ -1708,11 +1710,11 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1708
1710
|
>>> # Show the final three columns of the dataframe
|
|
1709
1711
|
>>> fl.dataframe.iloc[:, -3:].head()
|
|
1710
1712
|
time air_temperature specific_humidity
|
|
1711
|
-
0 2022-03-01 00:00:00 225.
|
|
1713
|
+
0 2022-03-01 00:00:00 225.777946 0.000132
|
|
1712
1714
|
1 2022-03-01 00:13:20 225.139084 0.000132
|
|
1713
|
-
2 2022-03-01 00:26:40 226.
|
|
1714
|
-
3 2022-03-01 00:40:00 226.
|
|
1715
|
-
4 2022-03-01 00:53:20 225.
|
|
1715
|
+
2 2022-03-01 00:26:40 226.231218 0.000107
|
|
1716
|
+
3 2022-03-01 00:40:00 226.318315 0.000171
|
|
1717
|
+
4 2022-03-01 00:53:20 225.561022 0.000109
|
|
1716
1718
|
|
|
1717
1719
|
"""
|
|
1718
1720
|
# Override use_indices in certain situations
|
|
@@ -1899,19 +1901,19 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1899
1901
|
MetDataset | MetDataArray
|
|
1900
1902
|
Copy of downselected MetDataset or MetDataArray.
|
|
1901
1903
|
"""
|
|
1902
|
-
|
|
1904
|
+
indexes = met.indexes
|
|
1903
1905
|
lon_slice = coordinates.slice_domain(
|
|
1904
|
-
|
|
1906
|
+
indexes["longitude"].to_numpy(),
|
|
1905
1907
|
self["longitude"],
|
|
1906
1908
|
buffer=longitude_buffer,
|
|
1907
1909
|
)
|
|
1908
1910
|
lat_slice = coordinates.slice_domain(
|
|
1909
|
-
|
|
1911
|
+
indexes["latitude"].to_numpy(),
|
|
1910
1912
|
self["latitude"],
|
|
1911
1913
|
buffer=latitude_buffer,
|
|
1912
1914
|
)
|
|
1913
1915
|
time_slice = coordinates.slice_domain(
|
|
1914
|
-
|
|
1916
|
+
indexes["time"].to_numpy(),
|
|
1915
1917
|
self["time"],
|
|
1916
1918
|
buffer=time_buffer,
|
|
1917
1919
|
)
|
|
@@ -1921,7 +1923,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1921
1923
|
level_slice = slice(None)
|
|
1922
1924
|
else:
|
|
1923
1925
|
level_slice = coordinates.slice_domain(
|
|
1924
|
-
|
|
1926
|
+
indexes["level"].to_numpy(),
|
|
1925
1927
|
self.level,
|
|
1926
1928
|
buffer=level_buffer,
|
|
1927
1929
|
)
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
from pycontrails.datalib.ecmwf.arco_era5 import ARCOERA5
|
|
5
6
|
from pycontrails.datalib.ecmwf.era5 import ERA5
|
|
6
7
|
from pycontrails.datalib.ecmwf.hres import HRES
|
|
7
8
|
from pycontrails.datalib.ecmwf.ifs import IFS
|
|
@@ -11,6 +12,7 @@ from pycontrails.datalib.ecmwf.variables import (
|
|
|
11
12
|
SURFACE_VARIABLES,
|
|
12
13
|
CloudAreaFraction,
|
|
13
14
|
CloudAreaFractionInLayer,
|
|
15
|
+
Divergence,
|
|
14
16
|
PotentialVorticity,
|
|
15
17
|
RelativeHumidity,
|
|
16
18
|
RelativeVorticity,
|
|
@@ -23,11 +25,13 @@ from pycontrails.datalib.ecmwf.variables import (
|
|
|
23
25
|
)
|
|
24
26
|
|
|
25
27
|
__all__ = [
|
|
28
|
+
"ARCOERA5",
|
|
26
29
|
"ERA5",
|
|
27
30
|
"HRES",
|
|
28
31
|
"IFS",
|
|
29
32
|
"CloudAreaFraction",
|
|
30
33
|
"CloudAreaFractionInLayer",
|
|
34
|
+
"Divergence",
|
|
31
35
|
"PotentialVorticity",
|
|
32
36
|
"RelativeHumidity",
|
|
33
37
|
"RelativeVorticity",
|