pycontrails 0.47.3__cp310-cp310-macosx_10_9_x86_64.whl → 0.48.1__cp310-cp310-macosx_10_9_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/__init__.py +2 -2
- pycontrails/_version.py +2 -2
- pycontrails/core/coordinates.py +17 -10
- pycontrails/core/datalib.py +155 -113
- pycontrails/core/flight.py +45 -28
- pycontrails/core/met.py +163 -39
- pycontrails/core/met_var.py +9 -9
- pycontrails/core/models.py +27 -0
- pycontrails/core/rgi_cython.cpython-310-darwin.so +0 -0
- pycontrails/core/vector.py +257 -33
- pycontrails/datalib/ecmwf/common.py +14 -65
- pycontrails/datalib/ecmwf/era5.py +22 -27
- pycontrails/datalib/ecmwf/hres.py +53 -88
- pycontrails/datalib/ecmwf/ifs.py +10 -2
- pycontrails/datalib/gfs/gfs.py +68 -106
- pycontrails/models/accf.py +181 -154
- pycontrails/models/cocip/cocip.py +205 -105
- pycontrails/models/cocip/cocip_params.py +0 -4
- pycontrails/models/cocip/wake_vortex.py +9 -7
- pycontrails/models/cocipgrid/cocip_grid.py +2 -6
- pycontrails/models/issr.py +29 -31
- pycontrails/models/pcr.py +5 -12
- pycontrails/models/sac.py +24 -27
- pycontrails/models/tau_cirrus.py +22 -5
- pycontrails/utils/types.py +1 -1
- {pycontrails-0.47.3.dist-info → pycontrails-0.48.1.dist-info}/METADATA +2 -2
- {pycontrails-0.47.3.dist-info → pycontrails-0.48.1.dist-info}/RECORD +31 -31
- {pycontrails-0.47.3.dist-info → pycontrails-0.48.1.dist-info}/WHEEL +1 -1
- {pycontrails-0.47.3.dist-info → pycontrails-0.48.1.dist-info}/LICENSE +0 -0
- {pycontrails-0.47.3.dist-info → pycontrails-0.48.1.dist-info}/NOTICE +0 -0
- {pycontrails-0.47.3.dist-info → pycontrails-0.48.1.dist-info}/top_level.txt +0 -0
pycontrails/__init__.py
CHANGED
|
@@ -21,13 +21,13 @@ from __future__ import annotations
|
|
|
21
21
|
import logging
|
|
22
22
|
from importlib import metadata
|
|
23
23
|
|
|
24
|
-
import dask
|
|
24
|
+
import dask.config
|
|
25
25
|
|
|
26
26
|
# Work around for https://github.com/pydata/xarray/issues/7259
|
|
27
27
|
# Only occurs for xarray 2022.11 and above
|
|
28
28
|
try:
|
|
29
29
|
import netCDF4 # noqa: F401
|
|
30
|
-
except
|
|
30
|
+
except ImportError:
|
|
31
31
|
pass
|
|
32
32
|
|
|
33
33
|
from pycontrails.core.cache import DiskCacheStore, GCPCacheStore
|
pycontrails/_version.py
CHANGED
pycontrails/core/coordinates.py
CHANGED
|
@@ -5,12 +5,13 @@ from __future__ import annotations
|
|
|
5
5
|
import warnings
|
|
6
6
|
|
|
7
7
|
import numpy as np
|
|
8
|
+
import numpy.typing as npt
|
|
8
9
|
import pandas as pd
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
def slice_domain(
|
|
12
13
|
domain: np.ndarray,
|
|
13
|
-
request:
|
|
14
|
+
request: npt.ArrayLike,
|
|
14
15
|
buffer: tuple[float | np.timedelta64, float | np.timedelta64] = (0.0, 0.0),
|
|
15
16
|
) -> slice:
|
|
16
17
|
"""Return slice of ``domain`` containing coordinates overlapping ``request``.
|
|
@@ -34,9 +35,8 @@ def slice_domain(
|
|
|
34
35
|
----------
|
|
35
36
|
domain : np.ndarray
|
|
36
37
|
Full set of domain values
|
|
37
|
-
request :
|
|
38
|
-
Requested values. Only the
|
|
39
|
-
a full array-like object or a tuple of ``(min, max)``.
|
|
38
|
+
request : npt.ArrayLike
|
|
39
|
+
Requested values. Only the nanmin and nanmax values are considered.
|
|
40
40
|
buffer : tuple[float | np.timedelta64, float | np.timedelta64], optional
|
|
41
41
|
Extend the domain past the requested coordinates by ``buffer[0]`` on the low side
|
|
42
42
|
and ``buffer[1]`` on the high side.
|
|
@@ -89,23 +89,30 @@ def slice_domain(
|
|
|
89
89
|
if buffer == (None, None):
|
|
90
90
|
return slice(None, None)
|
|
91
91
|
|
|
92
|
-
#
|
|
93
|
-
|
|
92
|
+
# Remove nans from request
|
|
93
|
+
request = np.asarray(request)
|
|
94
|
+
mask = np.isnan(request)
|
|
95
|
+
if mask.all():
|
|
94
96
|
return slice(None, None)
|
|
95
97
|
|
|
98
|
+
request = request[~mask]
|
|
99
|
+
|
|
96
100
|
# if the whole domain or request is nan, then there is nothing to slice
|
|
97
101
|
if np.isnan(domain).all():
|
|
98
102
|
raise ValueError("Domain is all nan on request")
|
|
99
103
|
|
|
100
104
|
# ensure domain is sorted
|
|
101
|
-
zero: float | np.timedelta64
|
|
105
|
+
zero: float | np.timedelta64
|
|
102
106
|
if pd.api.types.is_datetime64_dtype(domain.dtype):
|
|
103
107
|
zero = np.timedelta64(0)
|
|
108
|
+
else:
|
|
109
|
+
zero = 0.0
|
|
104
110
|
|
|
105
111
|
if not np.all(np.diff(domain) >= zero):
|
|
106
112
|
raise ValueError("Domain must be sorted in ascending order")
|
|
107
113
|
|
|
108
|
-
|
|
114
|
+
buf0, buf1 = buffer
|
|
115
|
+
if buf0 < zero or buf1 < zero:
|
|
109
116
|
warnings.warn(
|
|
110
117
|
"Found buffer with negative value. This is unexpected "
|
|
111
118
|
"and will reduce the size of the requested domain instead of "
|
|
@@ -116,8 +123,8 @@ def slice_domain(
|
|
|
116
123
|
# get the index of the closest value to request min and max
|
|
117
124
|
# side left returns `i`: domain[i-1] < request <= domain[i]
|
|
118
125
|
# side right returns `i`: domain[i-1] <= request < domain[i]
|
|
119
|
-
idx_min = np.searchsorted(domain, np.
|
|
120
|
-
idx_max = np.searchsorted(domain, np.
|
|
126
|
+
idx_min = np.searchsorted(domain, np.min(request) - buf0, side="right") - 1
|
|
127
|
+
idx_max = np.searchsorted(domain, np.max(request) + buf1, side="left") + 1
|
|
121
128
|
|
|
122
129
|
# clip idx_min between [0, len(domain) - 2]
|
|
123
130
|
idx_min = min(len(domain) - 2, max(idx_min, 0))
|
pycontrails/core/datalib.py
CHANGED
|
@@ -80,16 +80,18 @@ def parse_timesteps(time: TimeInput | None, freq: str | None = "1H") -> list[dat
|
|
|
80
80
|
elif len(time) == 1:
|
|
81
81
|
time = (time[0], time[0])
|
|
82
82
|
elif len(time) != 2:
|
|
83
|
-
|
|
83
|
+
msg = f"Input time bounds must have length < 2 and > 0, got {len(time)}"
|
|
84
|
+
raise ValueError(msg)
|
|
84
85
|
|
|
85
86
|
# convert all to pandas Timestamp
|
|
86
87
|
try:
|
|
87
88
|
timestamps = [pd.to_datetime(t) for t in time]
|
|
88
89
|
except ValueError as e:
|
|
89
|
-
|
|
90
|
-
f"Failed to parse
|
|
91
|
-
"must be compatible with 'pd.to_datetime()'"
|
|
90
|
+
msg = (
|
|
91
|
+
f"Failed to parse time input {time}. "
|
|
92
|
+
"Time input must be compatible with 'pd.to_datetime()'"
|
|
92
93
|
)
|
|
94
|
+
raise ValueError(msg) from e
|
|
93
95
|
|
|
94
96
|
if freq is None:
|
|
95
97
|
daterange = pd.DatetimeIndex([timestamps[0], timestamps[1]])
|
|
@@ -129,15 +131,15 @@ def parse_pressure_levels(
|
|
|
129
131
|
pressure_levels = [pressure_levels]
|
|
130
132
|
|
|
131
133
|
# Cast array-like to list of ints
|
|
132
|
-
|
|
133
|
-
pressure_levels_ = np.asarray(pressure_levels, dtype=int).tolist()
|
|
134
|
+
out = np.asarray(pressure_levels, dtype=int).tolist()
|
|
134
135
|
|
|
135
136
|
# ensure pressure levels are valid
|
|
136
|
-
for pl in
|
|
137
|
-
if supported
|
|
138
|
-
|
|
137
|
+
for pl in out:
|
|
138
|
+
if supported and pl not in supported:
|
|
139
|
+
msg = f"Pressure level {pl} is not supported. Supported levels: {supported}"
|
|
140
|
+
raise ValueError(msg)
|
|
139
141
|
|
|
140
|
-
return
|
|
142
|
+
return out
|
|
141
143
|
|
|
142
144
|
|
|
143
145
|
def parse_variables(variables: VariableInput, supported: list[MetVariable]) -> list[MetVariable]:
|
|
@@ -175,70 +177,87 @@ def parse_variables(variables: VariableInput, supported: list[MetVariable]) -> l
|
|
|
175
177
|
else:
|
|
176
178
|
parsed_variables = variables
|
|
177
179
|
|
|
178
|
-
# unpack
|
|
179
|
-
short_names =
|
|
180
|
-
standard_names =
|
|
181
|
-
long_names =
|
|
180
|
+
# unpack dict of supported str values from supported
|
|
181
|
+
short_names = {v.short_name: v for v in supported}
|
|
182
|
+
standard_names = {v.standard_name: v for v in supported}
|
|
183
|
+
long_names = {v.long_name: v for v in supported}
|
|
182
184
|
|
|
183
|
-
# unpack
|
|
184
|
-
ecmwf_ids =
|
|
185
|
-
grib1_ids =
|
|
185
|
+
# unpack dict of support int values from supported
|
|
186
|
+
ecmwf_ids = {v.ecmwf_id: v for v in supported}
|
|
187
|
+
grib1_ids = {v.grib1_id: v for v in supported}
|
|
186
188
|
|
|
187
189
|
for var in parsed_variables:
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
for v in var:
|
|
198
|
-
# sanity check since we don't support other types as lists
|
|
199
|
-
if not isinstance(v, MetVariable):
|
|
200
|
-
raise ValueError("Variable options must be of type MetVariable.")
|
|
201
|
-
if v in supported:
|
|
202
|
-
matched_variable = v
|
|
203
|
-
break
|
|
204
|
-
|
|
205
|
-
# int code
|
|
206
|
-
elif isinstance(var, int):
|
|
207
|
-
if var in ecmwf_ids:
|
|
208
|
-
matched_variable = supported[ecmwf_ids.index(var)]
|
|
209
|
-
elif var in grib1_ids:
|
|
210
|
-
matched_variable = supported[grib1_ids.index(var)]
|
|
211
|
-
|
|
212
|
-
# string reference
|
|
213
|
-
elif isinstance(var, str):
|
|
214
|
-
if var in short_names:
|
|
215
|
-
matched_variable = supported[short_names.index(var)]
|
|
216
|
-
elif var in standard_names:
|
|
217
|
-
matched_variable = supported[standard_names.index(var)]
|
|
218
|
-
elif var in long_names:
|
|
219
|
-
matched_variable = supported[long_names.index(var)]
|
|
220
|
-
|
|
221
|
-
if matched_variable is None:
|
|
222
|
-
raise ValueError(
|
|
223
|
-
f"{var} is not in supported parameters. "
|
|
224
|
-
+ f"Supported parameters include: {standard_names}"
|
|
225
|
-
)
|
|
190
|
+
matched = _find_match(
|
|
191
|
+
var,
|
|
192
|
+
supported,
|
|
193
|
+
ecmwf_ids, # type: ignore[arg-type]
|
|
194
|
+
grib1_ids, # type: ignore[arg-type]
|
|
195
|
+
short_names,
|
|
196
|
+
standard_names,
|
|
197
|
+
long_names, # type: ignore[arg-type]
|
|
198
|
+
)
|
|
226
199
|
|
|
227
200
|
# "replace" copies dataclass
|
|
228
|
-
met_var_list.append(dataclasses.replace(
|
|
201
|
+
met_var_list.append(dataclasses.replace(matched))
|
|
229
202
|
|
|
230
203
|
return met_var_list
|
|
231
204
|
|
|
232
205
|
|
|
233
|
-
def
|
|
206
|
+
def _find_match(
|
|
207
|
+
var: VariableInput,
|
|
208
|
+
supported: list[MetVariable],
|
|
209
|
+
ecmwf_ids: dict[int, MetVariable],
|
|
210
|
+
grib1_ids: dict[int, MetVariable],
|
|
211
|
+
short_names: dict[str, MetVariable],
|
|
212
|
+
standard_names: dict[str, MetVariable],
|
|
213
|
+
long_names: dict[str, MetVariable],
|
|
214
|
+
) -> MetVariable:
|
|
215
|
+
"""Find a match for input variable in supported."""
|
|
216
|
+
|
|
217
|
+
if isinstance(var, MetVariable):
|
|
218
|
+
if var in supported:
|
|
219
|
+
return var
|
|
220
|
+
|
|
221
|
+
# list of MetVariable options
|
|
222
|
+
# here we extract the first MetVariable in var that is supported
|
|
223
|
+
elif isinstance(var, (list, tuple)):
|
|
224
|
+
for v in var:
|
|
225
|
+
# sanity check since we don't support other types as lists
|
|
226
|
+
if not isinstance(v, MetVariable):
|
|
227
|
+
msg = "Variable options must be of type MetVariable."
|
|
228
|
+
raise TypeError(msg)
|
|
229
|
+
if v in supported:
|
|
230
|
+
return v
|
|
231
|
+
|
|
232
|
+
# int code
|
|
233
|
+
elif isinstance(var, int):
|
|
234
|
+
if var in ecmwf_ids:
|
|
235
|
+
return ecmwf_ids[var]
|
|
236
|
+
if var in grib1_ids:
|
|
237
|
+
return grib1_ids[var]
|
|
238
|
+
|
|
239
|
+
# string reference
|
|
240
|
+
elif isinstance(var, str):
|
|
241
|
+
if var in short_names:
|
|
242
|
+
return short_names[var]
|
|
243
|
+
if var in standard_names:
|
|
244
|
+
return standard_names[var]
|
|
245
|
+
if var in long_names:
|
|
246
|
+
return long_names[var]
|
|
247
|
+
|
|
248
|
+
msg = f"{var} is not in supported parameters. Supported parameters include: {standard_names}"
|
|
249
|
+
raise ValueError(msg)
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def parse_grid(grid: float, supported: Sequence[float]) -> float:
|
|
234
253
|
"""Parse input grid spacing.
|
|
235
254
|
|
|
236
255
|
Parameters
|
|
237
256
|
----------
|
|
238
257
|
grid : float
|
|
239
258
|
Input grid float
|
|
240
|
-
supported :
|
|
241
|
-
|
|
259
|
+
supported : Sequence[float]
|
|
260
|
+
Sequence of support grid values
|
|
242
261
|
|
|
243
262
|
Returns
|
|
244
263
|
-------
|
|
@@ -251,7 +270,8 @@ def parse_grid(grid: float, supported: list[float]) -> float:
|
|
|
251
270
|
Raises ValueError when ``grid`` is not in supported
|
|
252
271
|
"""
|
|
253
272
|
if grid not in supported:
|
|
254
|
-
|
|
273
|
+
msg = f"Grid input {grid} must be one of {supported}"
|
|
274
|
+
raise ValueError(msg)
|
|
255
275
|
|
|
256
276
|
return grid
|
|
257
277
|
|
|
@@ -277,7 +297,8 @@ def round_hour(time: datetime, hour: int) -> datetime:
|
|
|
277
297
|
Description
|
|
278
298
|
"""
|
|
279
299
|
if hour not in range(1, 24):
|
|
280
|
-
|
|
300
|
+
msg = f"hour must be between [1, 23], got {hour}"
|
|
301
|
+
raise ValueError(msg)
|
|
281
302
|
|
|
282
303
|
hour = (time.hour // hour) * hour
|
|
283
304
|
return datetime(time.year, time.month, time.day, hour, 0, 0)
|
|
@@ -505,6 +526,19 @@ class MetDataSource(abc.ABC):
|
|
|
505
526
|
:func:`xarray.open_mfdataset`
|
|
506
527
|
"""
|
|
507
528
|
|
|
529
|
+
@abc.abstractmethod
|
|
530
|
+
def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
|
|
531
|
+
"""Set met source metadata on ``ds.attrs``.
|
|
532
|
+
|
|
533
|
+
This is called within the :meth:`open_metdataset` method to set metadata
|
|
534
|
+
on the returned :class:`MetDataset` instance.
|
|
535
|
+
|
|
536
|
+
Parameters
|
|
537
|
+
----------
|
|
538
|
+
ds : xr.Dataset | MetDataset
|
|
539
|
+
Dataset to set metadata on. Mutated in place.
|
|
540
|
+
"""
|
|
541
|
+
|
|
508
542
|
# ----------------------
|
|
509
543
|
# Common utility methods
|
|
510
544
|
# ----------------------
|
|
@@ -518,7 +552,7 @@ class MetDataSource(abc.ABC):
|
|
|
518
552
|
"""
|
|
519
553
|
if times_to_download := self.list_timesteps_not_cached(**xr_kwargs):
|
|
520
554
|
logger.debug(
|
|
521
|
-
|
|
555
|
+
"Not all files found in cachestore. Downloading times %s", times_to_download
|
|
522
556
|
)
|
|
523
557
|
self.download_dataset(times_to_download)
|
|
524
558
|
else:
|
|
@@ -547,7 +581,7 @@ class MetDataSource(abc.ABC):
|
|
|
547
581
|
def is_datafile_cached(self, t: datetime, **xr_kwargs: Any) -> bool:
|
|
548
582
|
"""Check datafile defined by datetime for variables and pressure levels in class.
|
|
549
583
|
|
|
550
|
-
If using a cloud cache store (i.e. :class:`cache.GCPCacheStore`) this is where the datafile
|
|
584
|
+
If using a cloud cache store (i.e. :class:`cache.GCPCacheStore`), this is where the datafile
|
|
551
585
|
will be mirrored to a local file for access.
|
|
552
586
|
|
|
553
587
|
Parameters
|
|
@@ -575,56 +609,64 @@ class MetDataSource(abc.ABC):
|
|
|
575
609
|
|
|
576
610
|
# see if cache data file exists, and if so, get the file + path
|
|
577
611
|
cache_path = self.create_cachepath(t)
|
|
578
|
-
if self.cachestore.exists(cache_path):
|
|
579
|
-
logger.debug(
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
612
|
+
if not self.cachestore.exists(cache_path):
|
|
613
|
+
logger.debug("Cachepath %s does not exist in cache", cache_path)
|
|
614
|
+
return False
|
|
615
|
+
|
|
616
|
+
logger.debug("Cachepath %s exists, getting from cache.", cache_path)
|
|
617
|
+
|
|
618
|
+
# If GCP cache is used, this will download file and return the local mirrored path
|
|
619
|
+
# If the local file already exists, this will return the local path
|
|
620
|
+
disk_path = self.cachestore.get(cache_path)
|
|
621
|
+
|
|
622
|
+
# check if all variables and pressure levels are in that path
|
|
623
|
+
try:
|
|
624
|
+
with self.open_dataset(disk_path, **xr_kwargs) as ds:
|
|
625
|
+
return self._check_is_ds_complete(ds, cache_path)
|
|
626
|
+
|
|
627
|
+
except OSError as err:
|
|
628
|
+
if isinstance(self.cachestore, cache.GCPCacheStore):
|
|
629
|
+
# If a GCPCacheStore is used, remove the corrupt file and try again.
|
|
630
|
+
# If the file is corrupt in the bucket, we'll get stuck in an infinite loop here.
|
|
631
|
+
logger.warning(
|
|
632
|
+
"Found corrupt file %s on local disk. Try again to download from %s.",
|
|
633
|
+
disk_path,
|
|
634
|
+
self.cachestore,
|
|
635
|
+
exc_info=err,
|
|
636
|
+
)
|
|
637
|
+
self.cachestore.clear_disk(disk_path)
|
|
638
|
+
return self.is_datafile_cached(t, **xr_kwargs)
|
|
639
|
+
|
|
640
|
+
msg = (
|
|
641
|
+
f"Unable to open NETCDF file at '{disk_path}'. "
|
|
642
|
+
"This may be due to a incomplete download. "
|
|
643
|
+
f"Consider manually removing '{disk_path}' and retrying."
|
|
644
|
+
)
|
|
645
|
+
raise OSError(msg) from err
|
|
646
|
+
|
|
647
|
+
def _check_is_ds_complete(self, ds: xr.Dataset, cache_path: str) -> bool:
|
|
648
|
+
"""Check if ``ds`` has all variables and pressure levels defined by the instance."""
|
|
649
|
+
for var in self.variable_shortnames:
|
|
650
|
+
if var not in ds:
|
|
651
|
+
logger.warning(
|
|
652
|
+
"Variable %s not in downloaded dataset. Found variables: %s",
|
|
653
|
+
var,
|
|
654
|
+
ds.data_vars,
|
|
655
|
+
)
|
|
656
|
+
return False
|
|
657
|
+
|
|
658
|
+
pl = np.asarray(self.pressure_levels)
|
|
659
|
+
cond = np.isin(pl, ds["level"].values)
|
|
660
|
+
if not np.all(cond):
|
|
661
|
+
logger.warning(
|
|
662
|
+
"Pressure Levels %s not in downloaded dataset. Found pressure levels: %s",
|
|
663
|
+
pl[~cond].tolist(),
|
|
664
|
+
ds["level"].values.tolist(),
|
|
665
|
+
)
|
|
666
|
+
return False
|
|
667
|
+
|
|
668
|
+
logger.debug("All variables and pressure levels found in %s", cache_path)
|
|
669
|
+
return True
|
|
628
670
|
|
|
629
671
|
def open_dataset(
|
|
630
672
|
self,
|
pycontrails/core/flight.py
CHANGED
|
@@ -27,12 +27,26 @@ if TYPE_CHECKING:
|
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
class FlightPhase(enum.IntEnum):
|
|
30
|
-
"""Flight phase enumeration.
|
|
30
|
+
"""Flight phase enumeration.
|
|
31
31
|
|
|
32
|
+
Use :func:`segment_phase` or :meth:`Flight.segment_phase` to determine flight phase.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
#: Waypoints at which the flight is in a climb phase
|
|
32
36
|
CLIMB = enum.auto()
|
|
37
|
+
|
|
38
|
+
#: Waypoints at which the flight is in a cruise phase
|
|
33
39
|
CRUISE = enum.auto()
|
|
40
|
+
|
|
41
|
+
#: Waypoints at which the flight is in a descent phase
|
|
34
42
|
DESCENT = enum.auto()
|
|
43
|
+
|
|
44
|
+
#: Waypoints at which the flight is not in a climb, cruise, or descent phase.
|
|
45
|
+
#: In practice, this category is used for waypoints at which the ROCD resembles
|
|
46
|
+
#: that of a cruise phase, but the altitude is below the minimum cruise altitude.
|
|
35
47
|
LEVEL_FLIGHT = enum.auto()
|
|
48
|
+
|
|
49
|
+
#: Waypoints at which the ROCD is not defined.
|
|
36
50
|
NAN = enum.auto()
|
|
37
51
|
|
|
38
52
|
|
|
@@ -890,11 +904,10 @@ class Flight(GeoVectorDataset):
|
|
|
890
904
|
df["longitude"] = ((df["longitude"] + 180.0) % 360.0) - 180.0
|
|
891
905
|
|
|
892
906
|
# STEP 6: Interpolate nan values in altitude
|
|
893
|
-
|
|
907
|
+
altitude = df["altitude"].to_numpy()
|
|
908
|
+
if np.any(np.isnan(altitude)):
|
|
894
909
|
df_freq = pd.Timedelta(freq).to_numpy()
|
|
895
|
-
new_alt = _altitude_interpolation(
|
|
896
|
-
df["altitude"].to_numpy(), nominal_rocd, df_freq, climb_descend_at_end
|
|
897
|
-
)
|
|
910
|
+
new_alt = _altitude_interpolation(altitude, nominal_rocd, df_freq, climb_descend_at_end)
|
|
898
911
|
_verify_altitude(new_alt, nominal_rocd, df_freq)
|
|
899
912
|
df["altitude"] = new_alt
|
|
900
913
|
|
|
@@ -954,17 +967,18 @@ class Flight(GeoVectorDataset):
|
|
|
954
967
|
elapsed_time = np.nancumsum(np.roll(seg_dur, 1))
|
|
955
968
|
alt_ft = fit_altitude(
|
|
956
969
|
elapsed_time,
|
|
957
|
-
|
|
958
|
-
max_segments,
|
|
959
|
-
pop,
|
|
960
|
-
r2_target,
|
|
961
|
-
max_cruise_rocd,
|
|
962
|
-
sg_window,
|
|
970
|
+
self.altitude_ft,
|
|
971
|
+
max_segments=max_segments,
|
|
972
|
+
pop=pop,
|
|
973
|
+
r2_target=r2_target,
|
|
974
|
+
max_cruise_rocd=max_cruise_rocd,
|
|
975
|
+
sg_window=sg_window,
|
|
976
|
+
sg_polyorder=sg_polyorder,
|
|
963
977
|
)
|
|
964
978
|
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
return
|
|
979
|
+
out = self.copy()
|
|
980
|
+
out.update(altitude_ft=alt_ft)
|
|
981
|
+
return out
|
|
968
982
|
|
|
969
983
|
def _geodesic_interpolation(self, geodesic_threshold: float) -> pd.DataFrame | None:
|
|
970
984
|
"""Geodesic interpolate between large gaps between waypoints.
|
|
@@ -1014,17 +1028,18 @@ class Flight(GeoVectorDataset):
|
|
|
1014
1028
|
latitudes: list[float] = []
|
|
1015
1029
|
times: list[np.ndarray] = []
|
|
1016
1030
|
|
|
1031
|
+
longitude = self["longitude"]
|
|
1032
|
+
latitude = self["latitude"]
|
|
1033
|
+
time = self["time"]
|
|
1034
|
+
|
|
1017
1035
|
for index in gap_indices:
|
|
1018
|
-
lon0
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
self["latitude"][index + 1],
|
|
1026
|
-
self["time"][index + 1],
|
|
1027
|
-
)
|
|
1036
|
+
lon0 = longitude[index]
|
|
1037
|
+
lat0 = latitude[index]
|
|
1038
|
+
t0 = time[index]
|
|
1039
|
+
lon1 = longitude[index + 1]
|
|
1040
|
+
lat1 = latitude[index + 1]
|
|
1041
|
+
t1 = time[index + 1]
|
|
1042
|
+
|
|
1028
1043
|
distance = segs[index]
|
|
1029
1044
|
n_steps = distance // geodesic_threshold # number of new waypoints to generate
|
|
1030
1045
|
|
|
@@ -1037,8 +1052,9 @@ class Flight(GeoVectorDataset):
|
|
|
1037
1052
|
latitudes.extend(lats)
|
|
1038
1053
|
|
|
1039
1054
|
# + 1 to denominator to stay consistent with geod.npts (only interior points)
|
|
1040
|
-
t_step = (t1 - t0) / (n_steps + 1)
|
|
1041
|
-
|
|
1055
|
+
t_step = (t1 - t0) / (n_steps + 1.0)
|
|
1056
|
+
|
|
1057
|
+
# subtract 0.5 * t_step to ensure round-off error doesn't put final arange point
|
|
1042
1058
|
# very close to t1
|
|
1043
1059
|
t_range = np.arange(t0 + t_step, t1 - 0.5 * t_step, t_step)
|
|
1044
1060
|
times.append(t_range)
|
|
@@ -1808,8 +1824,8 @@ def segment_phase(
|
|
|
1808
1824
|
cruise = (
|
|
1809
1825
|
(rocd < threshold_rocd) & (rocd > -threshold_rocd) & (altitude_ft > min_cruise_altitude_ft)
|
|
1810
1826
|
)
|
|
1811
|
-
climb = ~cruise & (rocd > 0)
|
|
1812
|
-
descent = ~cruise & (rocd < 0)
|
|
1827
|
+
climb = ~cruise & (rocd > 0.0)
|
|
1828
|
+
descent = ~cruise & (rocd < 0.0)
|
|
1813
1829
|
level_flight = ~(nan | cruise | climb | descent)
|
|
1814
1830
|
|
|
1815
1831
|
phase = np.empty(rocd.shape, dtype=np.uint8)
|
|
@@ -1857,6 +1873,7 @@ def segment_rocd(
|
|
|
1857
1873
|
def fit_altitude(
|
|
1858
1874
|
elapsed_time: npt.NDArray[np.float_],
|
|
1859
1875
|
altitude_ft: npt.NDArray[np.float_],
|
|
1876
|
+
*,
|
|
1860
1877
|
max_segments: int = 30,
|
|
1861
1878
|
pop: int = 3,
|
|
1862
1879
|
r2_target: float = 0.999,
|