pycontrails 0.48.0__cp311-cp311-macosx_11_0_arm64.whl → 0.48.1__cp311-cp311-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/_version.py +2 -2
- pycontrails/core/met.py +34 -22
- pycontrails/core/met_var.py +2 -2
- pycontrails/core/models.py +5 -0
- pycontrails/core/rgi_cython.cpython-311-darwin.so +0 -0
- pycontrails/core/vector.py +243 -24
- pycontrails/models/cocip/cocip.py +21 -11
- pycontrails/models/cocip/wake_vortex.py +9 -7
- {pycontrails-0.48.0.dist-info → pycontrails-0.48.1.dist-info}/METADATA +2 -1
- {pycontrails-0.48.0.dist-info → pycontrails-0.48.1.dist-info}/RECORD +14 -14
- {pycontrails-0.48.0.dist-info → pycontrails-0.48.1.dist-info}/WHEEL +1 -1
- {pycontrails-0.48.0.dist-info → pycontrails-0.48.1.dist-info}/LICENSE +0 -0
- {pycontrails-0.48.0.dist-info → pycontrails-0.48.1.dist-info}/NOTICE +0 -0
- {pycontrails-0.48.0.dist-info → pycontrails-0.48.1.dist-info}/top_level.txt +0 -0
pycontrails/_version.py
CHANGED
pycontrails/core/met.py
CHANGED
|
@@ -96,13 +96,14 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
96
96
|
for dim in self.dim_order:
|
|
97
97
|
if dim not in self.data.dims:
|
|
98
98
|
if dim == "level":
|
|
99
|
-
|
|
99
|
+
msg = (
|
|
100
100
|
f"Meteorology data must contain dimension '{dim}'. "
|
|
101
101
|
"For single level data, set 'level' coordinate to constant -1 "
|
|
102
102
|
"using `ds = ds.expand_dims({'level': [-1]})`"
|
|
103
103
|
)
|
|
104
104
|
else:
|
|
105
|
-
|
|
105
|
+
msg = f"Meteorology data must contain dimension '{dim}'."
|
|
106
|
+
raise ValueError(msg)
|
|
106
107
|
|
|
107
108
|
def _validate_longitude(self) -> None:
|
|
108
109
|
"""Check longitude bounds.
|
|
@@ -196,15 +197,15 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
196
197
|
|
|
197
198
|
dims_tuple = tuple(self.dim_order)
|
|
198
199
|
|
|
199
|
-
def _check_da(da: xr.DataArray, key:
|
|
200
|
+
def _check_da(da: xr.DataArray, key: Hashable | None = None) -> None:
|
|
200
201
|
if da.dims != dims_tuple:
|
|
201
202
|
if key is not None:
|
|
202
203
|
msg = (
|
|
203
|
-
"Data dimension not transposed on variable '{key}'. Initiate with"
|
|
204
|
-
"
|
|
204
|
+
f"Data dimension not transposed on variable '{key}'. Initiate with"
|
|
205
|
+
" 'copy=True'."
|
|
205
206
|
)
|
|
206
207
|
else:
|
|
207
|
-
msg = "Data dimension not transposed. Initiate with
|
|
208
|
+
msg = "Data dimension not transposed. Initiate with 'copy=True'."
|
|
208
209
|
raise ValueError(msg)
|
|
209
210
|
|
|
210
211
|
data = self.data
|
|
@@ -212,7 +213,7 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
212
213
|
_check_da(data)
|
|
213
214
|
return
|
|
214
215
|
|
|
215
|
-
for key, da in self.data.
|
|
216
|
+
for key, da in self.data.items():
|
|
216
217
|
_check_da(da, key)
|
|
217
218
|
|
|
218
219
|
def _validate_dims(self) -> None:
|
|
@@ -654,11 +655,9 @@ class MetDataset(MetBase):
|
|
|
654
655
|
attrs: dict[str, Any] | None = None,
|
|
655
656
|
**attrs_kwargs: Any,
|
|
656
657
|
) -> None:
|
|
657
|
-
# init cache
|
|
658
658
|
self.cachestore = cachestore
|
|
659
659
|
|
|
660
|
-
data.attrs.update(attrs or {})
|
|
661
|
-
data.attrs.update(attrs_kwargs)
|
|
660
|
+
data.attrs.update(attrs or {}, **attrs_kwargs)
|
|
662
661
|
|
|
663
662
|
# if input is already a Dataset, copy into data
|
|
664
663
|
if not isinstance(data, xr.Dataset):
|
|
@@ -872,7 +871,7 @@ class MetDataset(MetBase):
|
|
|
872
871
|
Raises when dataset does not contain variable in ``vars``
|
|
873
872
|
"""
|
|
874
873
|
if isinstance(vars, (MetVariable, str)):
|
|
875
|
-
vars =
|
|
874
|
+
vars = (vars,)
|
|
876
875
|
|
|
877
876
|
met_keys: list[str] = []
|
|
878
877
|
for variable in vars:
|
|
@@ -1372,7 +1371,7 @@ class MetDataArray(MetBase):
|
|
|
1372
1371
|
"""
|
|
1373
1372
|
if not self.in_memory:
|
|
1374
1373
|
self._check_memory("Extracting numpy array from")
|
|
1375
|
-
self.data
|
|
1374
|
+
self.data.load()
|
|
1376
1375
|
|
|
1377
1376
|
return self.data.values
|
|
1378
1377
|
|
|
@@ -1618,8 +1617,20 @@ class MetDataArray(MetBase):
|
|
|
1618
1617
|
)
|
|
1619
1618
|
|
|
1620
1619
|
def _check_memory(self, msg_start: str) -> None:
|
|
1620
|
+
"""Check the memory usage of the underlying data.
|
|
1621
|
+
|
|
1622
|
+
If the data is larger than 4 GB, a warning is issued. If the data is
|
|
1623
|
+
larger than 32 GB, a RuntimeError is raised.
|
|
1624
|
+
"""
|
|
1621
1625
|
n_bytes = self.data.nbytes
|
|
1626
|
+
mb = round(n_bytes / int(1e6), 2)
|
|
1627
|
+
logger.debug("Loading %s into memory consumes %s MB.", self.name, mb)
|
|
1628
|
+
|
|
1622
1629
|
n_gb = n_bytes // int(1e9)
|
|
1630
|
+
if n_gb <= 4:
|
|
1631
|
+
return
|
|
1632
|
+
|
|
1633
|
+
# Prevent something stupid
|
|
1623
1634
|
msg = (
|
|
1624
1635
|
f"{msg_start} MetDataArray {self.name} requires loading "
|
|
1625
1636
|
f"at least {n_gb} GB of data into memory. Downselect data if possible. "
|
|
@@ -1627,13 +1638,9 @@ class MetDataArray(MetBase):
|
|
|
1627
1638
|
"with the method 'downselect_met'."
|
|
1628
1639
|
)
|
|
1629
1640
|
|
|
1630
|
-
if n_gb > 32:
|
|
1641
|
+
if n_gb > 32:
|
|
1631
1642
|
raise RuntimeError(msg)
|
|
1632
|
-
|
|
1633
|
-
warnings.warn(msg)
|
|
1634
|
-
|
|
1635
|
-
mb = round(n_bytes / int(1e6), 2)
|
|
1636
|
-
logger.debug("Loading %s into memory consumes %s MB.", self.name, mb)
|
|
1643
|
+
warnings.warn(msg)
|
|
1637
1644
|
|
|
1638
1645
|
def save(self, **kwargs: Any) -> list[str]:
|
|
1639
1646
|
"""Save intermediate to :attr:`cachestore` as netcdf.
|
|
@@ -2228,24 +2235,29 @@ def _is_zarr(ds: xr.Dataset | xr.DataArray) -> bool:
|
|
|
2228
2235
|
return dask0.array.array.array.__class__.__name__ == "ZarrArrayWrapper"
|
|
2229
2236
|
|
|
2230
2237
|
|
|
2231
|
-
def shift_longitude(data: XArrayType) -> XArrayType:
|
|
2232
|
-
"""Shift longitude values from
|
|
2238
|
+
def shift_longitude(data: XArrayType, bound: float = -180.0) -> XArrayType:
|
|
2239
|
+
"""Shift longitude values from any input domain to [bound, 360 + bound) domain.
|
|
2233
2240
|
|
|
2234
2241
|
Sorts data by ascending longitude values.
|
|
2235
2242
|
|
|
2243
|
+
|
|
2236
2244
|
Parameters
|
|
2237
2245
|
----------
|
|
2238
2246
|
data : XArrayType
|
|
2239
2247
|
:class:`xr.Dataset` or :class:`xr.DataArray` with longitude dimension
|
|
2248
|
+
bound : float, optional
|
|
2249
|
+
Lower bound of the domain.
|
|
2250
|
+
Output domain will be [bound, 360 + bound).
|
|
2251
|
+
Defaults to -180, which results in longitude domain [-180, 180).
|
|
2240
2252
|
|
|
2241
2253
|
|
|
2242
2254
|
Returns
|
|
2243
2255
|
-------
|
|
2244
2256
|
XArrayType
|
|
2245
|
-
:class:`xr.Dataset` or :class:`xr.DataArray` with longitude values on [
|
|
2257
|
+
:class:`xr.Dataset` or :class:`xr.DataArray` with longitude values on [a, 360 + a).
|
|
2246
2258
|
"""
|
|
2247
2259
|
return data.assign_coords(
|
|
2248
|
-
longitude=((data["longitude"].values
|
|
2260
|
+
longitude=((data["longitude"].values - bound) % 360.0) + bound
|
|
2249
2261
|
).sortby("longitude", ascending=True)
|
|
2250
2262
|
|
|
2251
2263
|
|
pycontrails/core/met_var.py
CHANGED
pycontrails/core/models.py
CHANGED
|
Binary file
|
pycontrails/core/vector.py
CHANGED
|
@@ -18,7 +18,7 @@ from pycontrails.core import coordinates, interpolation
|
|
|
18
18
|
from pycontrails.core import met as met_module
|
|
19
19
|
from pycontrails.physics import units
|
|
20
20
|
from pycontrails.utils import dependencies
|
|
21
|
-
from pycontrails.utils import json as
|
|
21
|
+
from pycontrails.utils import json as json_utils
|
|
22
22
|
|
|
23
23
|
logger = logging.getLogger(__name__)
|
|
24
24
|
|
|
@@ -633,6 +633,8 @@ class VectorDataset:
|
|
|
633
633
|
8 15 18
|
|
634
634
|
|
|
635
635
|
"""
|
|
636
|
+
vectors = [v for v in vectors if v] # remove empty vectors
|
|
637
|
+
|
|
636
638
|
if not vectors:
|
|
637
639
|
return cls()
|
|
638
640
|
|
|
@@ -753,7 +755,7 @@ class VectorDataset:
|
|
|
753
755
|
str
|
|
754
756
|
Unique hash for flight instance (sha1)
|
|
755
757
|
"""
|
|
756
|
-
_hash = json.dumps(self.data, cls=
|
|
758
|
+
_hash = json.dumps(self.data, cls=json_utils.NumpyEncoder)
|
|
757
759
|
return hashlib.sha1(bytes(_hash, "utf-8")).hexdigest()
|
|
758
760
|
|
|
759
761
|
# ------------
|
|
@@ -982,6 +984,104 @@ class VectorDataset:
|
|
|
982
984
|
df.attrs = self.attrs
|
|
983
985
|
return df
|
|
984
986
|
|
|
987
|
+
def to_dict(self) -> dict[str, Any]:
|
|
988
|
+
"""Create dictionary with :attr:`data` and :attr:`attrs`.
|
|
989
|
+
|
|
990
|
+
If geo-spatial coordinates (e.g. `"latitude"`, `"longitude"`, `"altitude"`)
|
|
991
|
+
are present, round to a reasonable precision. If a `"time"` variable is present,
|
|
992
|
+
round to unix seconds. When the instance is a :class:`GeoVectorDataset`,
|
|
993
|
+
disregard any `"altitude"` or `"level"` coordinate and only include
|
|
994
|
+
`"altitude_ft"`in the output.
|
|
995
|
+
|
|
996
|
+
Returns
|
|
997
|
+
-------
|
|
998
|
+
dict[str, Any]
|
|
999
|
+
Dictionary with :attr:`data` and :attr:`attrs`.
|
|
1000
|
+
|
|
1001
|
+
See Also
|
|
1002
|
+
--------
|
|
1003
|
+
:meth:`from_dict`
|
|
1004
|
+
|
|
1005
|
+
Examples
|
|
1006
|
+
--------
|
|
1007
|
+
>>> import pprint
|
|
1008
|
+
>>> from pycontrails import Flight
|
|
1009
|
+
>>> fl = Flight(
|
|
1010
|
+
... longitude=[-100, -110],
|
|
1011
|
+
... latitude=[40, 50],
|
|
1012
|
+
... level=[200, 200],
|
|
1013
|
+
... time=[np.datetime64("2020-01-01T09"), np.datetime64("2020-01-01T09:30")],
|
|
1014
|
+
... aircraft_type="B737",
|
|
1015
|
+
... )
|
|
1016
|
+
>>> fl = fl.resample_and_fill("5T")
|
|
1017
|
+
>>> pprint.pprint(fl.to_dict())
|
|
1018
|
+
{'aircraft_type': 'B737',
|
|
1019
|
+
'altitude_ft': [38661.0, 38661.0, 38661.0, 38661.0, 38661.0, 38661.0, 38661.0],
|
|
1020
|
+
'crs': 'EPSG:4326',
|
|
1021
|
+
'latitude': [40.0, 41.724, 43.428, 45.111, 46.769, 48.399, 50.0],
|
|
1022
|
+
'longitude': [-100.0,
|
|
1023
|
+
-101.441,
|
|
1024
|
+
-102.959,
|
|
1025
|
+
-104.563,
|
|
1026
|
+
-106.267,
|
|
1027
|
+
-108.076,
|
|
1028
|
+
-110.0],
|
|
1029
|
+
'time': [1577869200,
|
|
1030
|
+
1577869500,
|
|
1031
|
+
1577869800,
|
|
1032
|
+
1577870100,
|
|
1033
|
+
1577870400,
|
|
1034
|
+
1577870700,
|
|
1035
|
+
1577871000]}
|
|
1036
|
+
"""
|
|
1037
|
+
np_encoder = json_utils.NumpyEncoder()
|
|
1038
|
+
|
|
1039
|
+
# round latitude, longitude, and altitude
|
|
1040
|
+
precision = {"longitude": 3, "latitude": 3, "altitude_ft": 0}
|
|
1041
|
+
|
|
1042
|
+
def encode(key: str, obj: Any) -> Any:
|
|
1043
|
+
# Try to handle some pandas objects
|
|
1044
|
+
if hasattr(obj, "to_numpy"):
|
|
1045
|
+
obj = obj.to_numpy()
|
|
1046
|
+
|
|
1047
|
+
# Convert numpy objects to python objects
|
|
1048
|
+
if isinstance(obj, (np.ndarray, np.generic)):
|
|
1049
|
+
|
|
1050
|
+
# round time to unix seconds
|
|
1051
|
+
if key == "time":
|
|
1052
|
+
return np_encoder.default(obj.astype("datetime64[s]").astype(int))
|
|
1053
|
+
|
|
1054
|
+
# round specific keys in precision
|
|
1055
|
+
try:
|
|
1056
|
+
d = precision[key]
|
|
1057
|
+
except KeyError:
|
|
1058
|
+
return np_encoder.default(obj)
|
|
1059
|
+
|
|
1060
|
+
return np_encoder.default(obj.astype(float).round(d))
|
|
1061
|
+
|
|
1062
|
+
# Pass through everything else
|
|
1063
|
+
return obj
|
|
1064
|
+
|
|
1065
|
+
data = {k: encode(k, v) for k, v in self.data.items()}
|
|
1066
|
+
attrs = {k: encode(k, v) for k, v in self.attrs.items()}
|
|
1067
|
+
|
|
1068
|
+
# Only include one of the vertical coordinate keys
|
|
1069
|
+
if isinstance(self, GeoVectorDataset):
|
|
1070
|
+
data.pop("altitude", None)
|
|
1071
|
+
data.pop("level", None)
|
|
1072
|
+
if "altitude_ft" not in data:
|
|
1073
|
+
data["altitude_ft"] = self.altitude_ft.round(precision["altitude_ft"]).tolist()
|
|
1074
|
+
|
|
1075
|
+
# Issue warning if any keys are duplicated
|
|
1076
|
+
common_keys = data.keys() & attrs.keys()
|
|
1077
|
+
if common_keys:
|
|
1078
|
+
warnings.warn(
|
|
1079
|
+
f"Found duplicate keys in data and attrs: {common_keys}. "
|
|
1080
|
+
"Data keys will overwrite attrs keys in returned dictionary."
|
|
1081
|
+
)
|
|
1082
|
+
|
|
1083
|
+
return {**attrs, **data}
|
|
1084
|
+
|
|
985
1085
|
@classmethod
|
|
986
1086
|
def create_empty(
|
|
987
1087
|
cls: Type[VectorDatasetType],
|
|
@@ -1010,6 +1110,42 @@ class VectorDataset:
|
|
|
1010
1110
|
"""
|
|
1011
1111
|
return cls(data=_empty_vector_dict(keys or set()), attrs=attrs, copy=False, **attrs_kwargs)
|
|
1012
1112
|
|
|
1113
|
+
@classmethod
|
|
1114
|
+
def from_dict(
|
|
1115
|
+
cls: Type[VectorDatasetType], obj: dict[str, Any], copy: bool = True, **obj_kwargs: Any
|
|
1116
|
+
) -> VectorDatasetType:
|
|
1117
|
+
"""Create instance from dict representation containing data and attrs.
|
|
1118
|
+
|
|
1119
|
+
Parameters
|
|
1120
|
+
----------
|
|
1121
|
+
obj : dict[str, Any]
|
|
1122
|
+
Dict representation of VectorDataset (e.g. :meth:`to_dict`)
|
|
1123
|
+
copy : bool, optional
|
|
1124
|
+
Passed to VectorDataset constructor.
|
|
1125
|
+
Defaults to True.
|
|
1126
|
+
**obj_kwargs : Any
|
|
1127
|
+
Additional properties passed as keyword arguments.
|
|
1128
|
+
|
|
1129
|
+
Returns
|
|
1130
|
+
-------
|
|
1131
|
+
VectorDatasetType
|
|
1132
|
+
VectorDataset instance.
|
|
1133
|
+
|
|
1134
|
+
See Also
|
|
1135
|
+
--------
|
|
1136
|
+
:meth:`to_dict`
|
|
1137
|
+
"""
|
|
1138
|
+
data = {}
|
|
1139
|
+
attrs = {}
|
|
1140
|
+
|
|
1141
|
+
for k, v in {**obj, **obj_kwargs}.items():
|
|
1142
|
+
if isinstance(v, (list, np.ndarray)):
|
|
1143
|
+
data[k] = v
|
|
1144
|
+
else:
|
|
1145
|
+
attrs[k] = v
|
|
1146
|
+
|
|
1147
|
+
return cls(data=data, attrs=attrs, copy=copy)
|
|
1148
|
+
|
|
1013
1149
|
def generate_splits(
|
|
1014
1150
|
self: VectorDatasetType, n_splits: int, copy: bool = True
|
|
1015
1151
|
) -> Generator[VectorDatasetType, None, None]:
|
|
@@ -1182,7 +1318,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1182
1318
|
if not np.issubdtype(time.dtype, np.datetime64):
|
|
1183
1319
|
warnings.warn("Time data is not np.datetime64. Attempting to coerce.")
|
|
1184
1320
|
try:
|
|
1185
|
-
pd_time = pd.
|
|
1321
|
+
pd_time = _handle_time_column(pd.Series(self["time"]))
|
|
1186
1322
|
except ValueError as e:
|
|
1187
1323
|
raise ValueError("Could not coerce time data to datetime64.") from e
|
|
1188
1324
|
np_time = pd_time.to_numpy(dtype="datetime64[ns]")
|
|
@@ -1790,7 +1926,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1790
1926
|
dict[str, Any]
|
|
1791
1927
|
Python representation of GeoJSON FeatureCollection
|
|
1792
1928
|
"""
|
|
1793
|
-
return
|
|
1929
|
+
return json_utils.dataframe_to_geojson_points(self.dataframe)
|
|
1794
1930
|
|
|
1795
1931
|
def to_pseudo_mercator(self: GeoVectorDatasetType, copy: bool = True) -> GeoVectorDatasetType:
|
|
1796
1932
|
"""Convert data from :attr:`attrs["crs"]` to Pseudo Mercator (EPSG:3857).
|
|
@@ -1913,29 +2049,112 @@ def vector_to_lon_lat_grid(
|
|
|
1913
2049
|
|
|
1914
2050
|
|
|
1915
2051
|
def _handle_time_column(time: pd.Series) -> pd.Series:
|
|
2052
|
+
"""Ensure that pd.Series has compatible Timestamps.
|
|
2053
|
+
|
|
2054
|
+
Parameters
|
|
2055
|
+
----------
|
|
2056
|
+
time : pd.Series
|
|
2057
|
+
Pandas dataframe column labeled "time".
|
|
2058
|
+
|
|
2059
|
+
|
|
2060
|
+
Returns
|
|
2061
|
+
-------
|
|
2062
|
+
pd.Series
|
|
2063
|
+
Parsed pandas time series.
|
|
2064
|
+
|
|
2065
|
+
|
|
2066
|
+
Raises
|
|
2067
|
+
------
|
|
2068
|
+
ValueError
|
|
2069
|
+
When time series can't be parsed, or is not timezone naive.
|
|
2070
|
+
"""
|
|
1916
2071
|
if not hasattr(time, "dt"):
|
|
1917
|
-
|
|
1918
|
-
# If it fails (for example, a unix integer time), we raise an error
|
|
1919
|
-
# and let the user figure it out.
|
|
1920
|
-
try:
|
|
1921
|
-
return pd.to_datetime(time)
|
|
1922
|
-
except ValueError as exc:
|
|
1923
|
-
raise ValueError(
|
|
1924
|
-
"The 'time' field must hold datetime-like values. "
|
|
1925
|
-
'Try data["time"] = pd.to_datetime(data["time"], unit=...) '
|
|
1926
|
-
"with the appropriate unit."
|
|
1927
|
-
) from exc
|
|
2072
|
+
time = _parse_pandas_time(time)
|
|
1928
2073
|
|
|
2074
|
+
# Translate all times to UTC and then remove timezone.
|
|
1929
2075
|
# If the time column contains a timezone, the call to `to_numpy`
|
|
1930
|
-
# will convert it to an array of object.
|
|
1931
|
-
#
|
|
1932
|
-
# and so it is better for the user to handle them rather than try
|
|
1933
|
-
# to address them here.
|
|
2076
|
+
# will convert it to an array of object.
|
|
2077
|
+
# Note `.tz_convert(None)` automatically converts to UTC first.
|
|
1934
2078
|
if time.dt.tz is not None:
|
|
1935
|
-
|
|
1936
|
-
"The 'time' field must be timezone naive. "
|
|
1937
|
-
"This can be achieved with: "
|
|
1938
|
-
'data["time"] = data["time"].dt.tz_localize(None)'
|
|
1939
|
-
)
|
|
2079
|
+
time = time.dt.tz_convert(None)
|
|
1940
2080
|
|
|
1941
2081
|
return time
|
|
2082
|
+
|
|
2083
|
+
|
|
2084
|
+
def _parse_pandas_time(time: pd.Series) -> pd.Series:
|
|
2085
|
+
"""Parse pandas dataframe column labelled "time".
|
|
2086
|
+
|
|
2087
|
+
Parameters
|
|
2088
|
+
----------
|
|
2089
|
+
time : pd.Series
|
|
2090
|
+
Time series
|
|
2091
|
+
|
|
2092
|
+
Returns
|
|
2093
|
+
-------
|
|
2094
|
+
pd.Series
|
|
2095
|
+
Parsed time series
|
|
2096
|
+
|
|
2097
|
+
Raises
|
|
2098
|
+
------
|
|
2099
|
+
ValueError
|
|
2100
|
+
When series values can't be inferred.
|
|
2101
|
+
"""
|
|
2102
|
+
try:
|
|
2103
|
+
# If the time series is a string, try to convert it to a datetime
|
|
2104
|
+
if time.dtype == "O":
|
|
2105
|
+
return pd.to_datetime(time)
|
|
2106
|
+
|
|
2107
|
+
# If the time is an int, try to parse it as unix time
|
|
2108
|
+
if np.issubdtype(time.dtype, np.integer):
|
|
2109
|
+
return _parse_unix_time(time)
|
|
2110
|
+
|
|
2111
|
+
raise ValueError("Unsupported time format")
|
|
2112
|
+
|
|
2113
|
+
except ValueError as exc:
|
|
2114
|
+
raise ValueError(
|
|
2115
|
+
"The 'time' field must hold datetime-like values. "
|
|
2116
|
+
'Try data["time"] = pd.to_datetime(data["time"], unit=...) '
|
|
2117
|
+
"with the appropriate unit."
|
|
2118
|
+
) from exc
|
|
2119
|
+
|
|
2120
|
+
|
|
2121
|
+
def _parse_unix_time(time: list[int] | npt.NDArray[np.int_] | pd.Series) -> pd.Series:
|
|
2122
|
+
"""Parse array of int times as unix epoch timestamps.
|
|
2123
|
+
|
|
2124
|
+
Attempts to parse the time in "s", "ms", "us", "ns"
|
|
2125
|
+
|
|
2126
|
+
|
|
2127
|
+
Parameters
|
|
2128
|
+
----------
|
|
2129
|
+
time : list[int] | npt.NDArray[np.int_] | pd.Series
|
|
2130
|
+
Sequence of unix timestamps
|
|
2131
|
+
|
|
2132
|
+
|
|
2133
|
+
Returns
|
|
2134
|
+
-------
|
|
2135
|
+
pd.Series
|
|
2136
|
+
Series of timezone naive pandas Timestamps
|
|
2137
|
+
|
|
2138
|
+
Raises
|
|
2139
|
+
------
|
|
2140
|
+
ValueError
|
|
2141
|
+
When unable to parse time as unix epoch timestamp
|
|
2142
|
+
"""
|
|
2143
|
+
units = "s", "ms", "us", "ns"
|
|
2144
|
+
for unit in units:
|
|
2145
|
+
try:
|
|
2146
|
+
out = pd.to_datetime(time, unit=unit, utc=True)
|
|
2147
|
+
except ValueError:
|
|
2148
|
+
continue
|
|
2149
|
+
|
|
2150
|
+
# make timezone naive
|
|
2151
|
+
out = out.dt.tz_convert(None)
|
|
2152
|
+
|
|
2153
|
+
# make sure time is reasonable
|
|
2154
|
+
if (pd.Timestamp("1980-01-01") <= out).all() and (out <= pd.Timestamp("2030-01-01")).all():
|
|
2155
|
+
return out
|
|
2156
|
+
|
|
2157
|
+
raise ValueError(
|
|
2158
|
+
f"Unable to parse time parameter '{time}' as unix epoch timestamp between "
|
|
2159
|
+
"1980-01-01 and 2030-01-01"
|
|
2160
|
+
)
|
|
@@ -403,14 +403,13 @@ class Cocip(Model):
|
|
|
403
403
|
self.contrail_list = []
|
|
404
404
|
self._simulate_contrail_evolution()
|
|
405
405
|
|
|
406
|
-
self._cleanup_indices()
|
|
407
|
-
|
|
408
406
|
if not self.contrail_list:
|
|
409
407
|
logger.debug("No contrails formed by %s", label)
|
|
410
408
|
return self._fill_empty_flight_results(return_flight_list)
|
|
411
409
|
|
|
412
410
|
logger.debug("Complete contrail simulation for %s", label)
|
|
413
411
|
|
|
412
|
+
self._cleanup_indices()
|
|
414
413
|
self._bundle_results()
|
|
415
414
|
|
|
416
415
|
if return_flight_list:
|
|
@@ -1098,12 +1097,20 @@ class Cocip(Model):
|
|
|
1098
1097
|
|
|
1099
1098
|
@overrides
|
|
1100
1099
|
def _cleanup_indices(self) -> None:
|
|
1101
|
-
|
|
1100
|
+
"""Cleanup interpolation artifacts."""
|
|
1101
|
+
|
|
1102
|
+
if not self.params["interpolation_use_indices"]:
|
|
1103
|
+
return
|
|
1104
|
+
|
|
1105
|
+
if hasattr(self, "contrail_list"):
|
|
1102
1106
|
for contrail in self.contrail_list:
|
|
1103
1107
|
contrail._invalidate_indices()
|
|
1104
|
-
|
|
1105
|
-
|
|
1108
|
+
|
|
1109
|
+
self.source._invalidate_indices()
|
|
1110
|
+
self._sac_flight._invalidate_indices()
|
|
1111
|
+
if hasattr(self, "_downwash_flight"):
|
|
1106
1112
|
self._downwash_flight._invalidate_indices()
|
|
1113
|
+
if hasattr(self, "_downwash_contrail"):
|
|
1107
1114
|
self._downwash_contrail._invalidate_indices()
|
|
1108
1115
|
|
|
1109
1116
|
def _bundle_results(self) -> None:
|
|
@@ -1115,7 +1122,9 @@ class Cocip(Model):
|
|
|
1115
1122
|
self.contrail = pd.concat(dfs)
|
|
1116
1123
|
|
|
1117
1124
|
# add age in hours to the contrail waypoint outputs
|
|
1118
|
-
|
|
1125
|
+
age_hours = np.empty_like(self.contrail["ef"])
|
|
1126
|
+
np.divide(self.contrail["age"], np.timedelta64(1, "h"), out=age_hours)
|
|
1127
|
+
self.contrail["age_hours"] = age_hours
|
|
1119
1128
|
|
|
1120
1129
|
if self.params["verbose_outputs"]:
|
|
1121
1130
|
# Compute dt_integration -- logic is somewhat complicated, but
|
|
@@ -1140,10 +1149,9 @@ class Cocip(Model):
|
|
|
1140
1149
|
|
|
1141
1150
|
self.contrail = seq_index.set_index("index")
|
|
1142
1151
|
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
if self.params["verbose_outputs"]:
|
|
1152
|
+
# ---
|
|
1153
|
+
# Create contrail xr.Dataset (self.contrail_dataset)
|
|
1154
|
+
# ---
|
|
1147
1155
|
if isinstance(self.source, Fleet):
|
|
1148
1156
|
self.contrail_dataset = xr.Dataset.from_dataframe(
|
|
1149
1157
|
self.contrail.set_index(["flight_id", "timestep", "waypoint"])
|
|
@@ -1250,9 +1258,11 @@ class Cocip(Model):
|
|
|
1250
1258
|
Flight or list[Flight]
|
|
1251
1259
|
Flight or list of Flight objects with empty variables.
|
|
1252
1260
|
"""
|
|
1261
|
+
self._cleanup_indices()
|
|
1253
1262
|
|
|
1254
1263
|
intersection = self.source.data.pop("_met_intersection")
|
|
1255
|
-
zeros_and_nans = np.
|
|
1264
|
+
zeros_and_nans = np.zeros(intersection.shape, dtype=np.float32)
|
|
1265
|
+
zeros_and_nans[~intersection] = np.nan
|
|
1256
1266
|
self.source["ef"] = zeros_and_nans.copy()
|
|
1257
1267
|
self.source["persistent_1"] = zeros_and_nans.copy()
|
|
1258
1268
|
self.source["cocip"] = np.sign(zeros_and_nans)
|
|
@@ -54,22 +54,24 @@ def max_downward_displacement(
|
|
|
54
54
|
- :cite:`holzapfelProbabilisticTwoPhaseWake2003`
|
|
55
55
|
- :cite:`schumannContrailCirrusPrediction2012`
|
|
56
56
|
"""
|
|
57
|
-
wingspan_arr = np.broadcast_to(wingspan, true_airspeed.shape)
|
|
58
|
-
aircraft_mass_arr = np.broadcast_to(aircraft_mass, true_airspeed.shape)
|
|
59
|
-
|
|
60
57
|
rho_air = thermo.rho_d(air_temperature, air_pressure)
|
|
61
58
|
n_bv = thermo.brunt_vaisala_frequency(air_pressure, air_temperature, dT_dz)
|
|
62
|
-
t_0 = effective_time_scale(wingspan, true_airspeed,
|
|
59
|
+
t_0 = effective_time_scale(wingspan, true_airspeed, aircraft_mass, rho_air)
|
|
63
60
|
|
|
64
61
|
dz_max_strong = downward_displacement_strongly_stratified(
|
|
65
|
-
wingspan, true_airspeed,
|
|
62
|
+
wingspan, true_airspeed, aircraft_mass, rho_air, n_bv
|
|
66
63
|
)
|
|
67
64
|
|
|
68
65
|
is_weakly_stratified = n_bv * t_0 < 0.8
|
|
66
|
+
if isinstance(wingspan, np.ndarray):
|
|
67
|
+
wingspan = wingspan[is_weakly_stratified]
|
|
68
|
+
if isinstance(aircraft_mass, np.ndarray):
|
|
69
|
+
aircraft_mass = aircraft_mass[is_weakly_stratified]
|
|
70
|
+
|
|
69
71
|
dz_max_weak = downward_displacement_weakly_stratified(
|
|
70
|
-
wingspan=
|
|
72
|
+
wingspan=wingspan,
|
|
71
73
|
true_airspeed=true_airspeed[is_weakly_stratified],
|
|
72
|
-
aircraft_mass=
|
|
74
|
+
aircraft_mass=aircraft_mass,
|
|
73
75
|
rho_air=rho_air[is_weakly_stratified],
|
|
74
76
|
n_bv=n_bv[is_weakly_stratified],
|
|
75
77
|
dz_max_strong=dz_max_strong[is_weakly_stratified],
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pycontrails
|
|
3
|
-
Version: 0.48.
|
|
3
|
+
Version: 0.48.1
|
|
4
4
|
Summary: Python library for modeling aviation climate impacts
|
|
5
5
|
Author-email: Breakthrough Energy <py@contrails.org>
|
|
6
6
|
License: Apache-2.0
|
|
@@ -86,6 +86,7 @@ Provides-Extra: goes
|
|
|
86
86
|
Requires-Dist: cartopy >=0.22 ; extra == 'goes'
|
|
87
87
|
Requires-Dist: gcsfs >=2022.3 ; extra == 'goes'
|
|
88
88
|
Requires-Dist: h5netcdf >=1.2 ; extra == 'goes'
|
|
89
|
+
Requires-Dist: aiohttp >=3.9.0b0 ; (python_version >= "3.12") and extra == 'goes'
|
|
89
90
|
Provides-Extra: jupyter
|
|
90
91
|
Requires-Dist: ipywidgets >=7.6 ; extra == 'jupyter'
|
|
91
92
|
Requires-Dist: jupyterlab >=2.2 ; extra == 'jupyter'
|
|
@@ -1,15 +1,15 @@
|
|
|
1
|
-
pycontrails-0.48.
|
|
2
|
-
pycontrails-0.48.
|
|
3
|
-
pycontrails-0.48.
|
|
4
|
-
pycontrails-0.48.
|
|
5
|
-
pycontrails-0.48.
|
|
6
|
-
pycontrails-0.48.
|
|
7
|
-
pycontrails/_version.py,sha256=
|
|
1
|
+
pycontrails-0.48.1.dist-info/RECORD,,
|
|
2
|
+
pycontrails-0.48.1.dist-info/LICENSE,sha256=gJ-h7SFFD1mCfR6a7HILvEtodDT6Iig8bLXdgqR6ucA,10175
|
|
3
|
+
pycontrails-0.48.1.dist-info/WHEEL,sha256=2nXsVu36abDiNGsH9Fliml6zEeomzRt7eNy1CE_Eupc,110
|
|
4
|
+
pycontrails-0.48.1.dist-info/NOTICE,sha256=gKI8DcN1WhiXB2SFRKDogcjONldGubTvBxiOYdC4CXU,1926
|
|
5
|
+
pycontrails-0.48.1.dist-info/top_level.txt,sha256=Z8J1R_AiBAyCVjNw6jYLdrA68PrQqTg0t3_Yek_IZ0Q,29
|
|
6
|
+
pycontrails-0.48.1.dist-info/METADATA,sha256=-DmCMG3RmYaxq1sX_lfwfKAv8gV7naDqpKTadgpTCcI,8423
|
|
7
|
+
pycontrails/_version.py,sha256=SmV-QgndmfqQSl3EnRtXH5ks2pvcWLjNDwIDauQbrGk,413
|
|
8
8
|
pycontrails/__init__.py,sha256=jQlqY1c5d9cKwN5ItqaQdJdhAWnQbHIk4TX6QI-CYww,1962
|
|
9
9
|
pycontrails/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
-
pycontrails/core/rgi_cython.cpython-311-darwin.so,sha256=
|
|
11
|
-
pycontrails/core/vector.py,sha256=
|
|
12
|
-
pycontrails/core/models.py,sha256=
|
|
10
|
+
pycontrails/core/rgi_cython.cpython-311-darwin.so,sha256=xLdjU0utSeBmWGbDPVbgJkBdHL1VJz9dTi27O7ILX-Y,316269
|
|
11
|
+
pycontrails/core/vector.py,sha256=dHWUnsJasIHldRUtHy00zhONJH1pV8fgJ6sKJHIJrNk,70400
|
|
12
|
+
pycontrails/core/models.py,sha256=MtaujNXbUd0fWINWPqCIqOoWD6jv_sU649D7COavHpc,38306
|
|
13
13
|
pycontrails/core/interpolation.py,sha256=C275gTrX-GJWfMm6gd2LiD-fKSHmMmDFrq--Pn5w_x4,23703
|
|
14
14
|
pycontrails/core/fleet.py,sha256=Zxp23KPV2yugiY210wOLelI66HJLrjEDYa2y8JP1CmY,13278
|
|
15
15
|
pycontrails/core/flight.py,sha256=QJCbVvY5P0JncjfxOHMme1uU3SPXAbs8hXHSRJKJCM4,71895
|
|
@@ -19,10 +19,10 @@ pycontrails/core/datalib.py,sha256=L_n0ns2Yspy2NswBJ5fIa5x8vJKdBxknsdWTYL14Jew,2
|
|
|
19
19
|
pycontrails/core/cache.py,sha256=FvE50YK8dddydozpQHe-XxE1tj8xT5SWcb2LtLj-dC0,28453
|
|
20
20
|
pycontrails/core/__init__.py,sha256=4ZE7x1gMa_Q7GcgcWpPP9fQ-sTulCXWmMjsCJ0odakY,840
|
|
21
21
|
pycontrails/core/flightplan.py,sha256=7BwYPPo4QfqCIWzBwXD848xg34Ggza4ldi4VUogWVYQ,7643
|
|
22
|
-
pycontrails/core/met.py,sha256=
|
|
22
|
+
pycontrails/core/met.py,sha256=q1ZyinJ5pzin111ALRqruJV8b1TfoLKq5C5MZTrGDJc,89239
|
|
23
23
|
pycontrails/core/aircraft_performance.py,sha256=8Ig0vWMIe_EDOK4KUhOlzzjkmbvpXz6z0_0f_xZCacI,22149
|
|
24
24
|
pycontrails/core/airports.py,sha256=5D0VF9DxleQlVOmMW-P8_fVVkob-aB3doO1Co2XNFss,6766
|
|
25
|
-
pycontrails/core/met_var.py,sha256=
|
|
25
|
+
pycontrails/core/met_var.py,sha256=zyXWI_9dRS4FfH1Io_k0YcIxZIsp8y4YPiC_Oayi_gs,9195
|
|
26
26
|
pycontrails/core/coordinates.py,sha256=ALGzFXjr4poFsqflg8D0kzUTC5bBjsVcXwiHfEsw7vs,5098
|
|
27
27
|
pycontrails/datalib/goes.py,sha256=2JQ8C3iUtSl6hDjVivv4VB3oCRXxuT3ciB0OCFF2lIE,26150
|
|
28
28
|
pycontrails/datalib/__init__.py,sha256=s5b8W6scXgespKZjPcaC-8jyLd3FqZqN0BZccLxaGuE,237
|
|
@@ -67,11 +67,11 @@ pycontrails/models/humidity_scaling/__init__.py,sha256=nqsab_j9BCwMbTfCn4BjXMdhI
|
|
|
67
67
|
pycontrails/models/humidity_scaling/quantiles/era5-quantiles.pq,sha256=tfYhbafF9Z-gGCg6VQ1YBlOaK_01e65Dc6s9b-hQ6Zo,286375
|
|
68
68
|
pycontrails/models/cocip/radiative_forcing.py,sha256=_dJ6_OM4fBvmhgL6MoDObhfTAUEXx-JzKqb5htYe1H8,44799
|
|
69
69
|
pycontrails/models/cocip/wind_shear.py,sha256=Pr6ZBMnuWK380uzX2mSx0wkNXnufuCjOmV5vpUStRv0,3852
|
|
70
|
-
pycontrails/models/cocip/cocip.py,sha256=
|
|
70
|
+
pycontrails/models/cocip/cocip.py,sha256=AMA5I_eR3m959Gfh7VXDy1L9laernIQ_PTV2Kdp3HCc,91752
|
|
71
71
|
pycontrails/models/cocip/output_formats.py,sha256=UKzVc2R4Ij8uAsJbOFz1O9SZo9N5-mG0YfBLI6e_SZA,77118
|
|
72
72
|
pycontrails/models/cocip/__init__.py,sha256=7Wy_CnmVqg_Gpg2UhIlisJOJ3naL6c5BBzTSJqdbiM4,902
|
|
73
73
|
pycontrails/models/cocip/cocip_params.py,sha256=rvOvE028yEs4H0WEPRyYx2kk8PDjvPmNkk43wUu-YRM,10295
|
|
74
|
-
pycontrails/models/cocip/wake_vortex.py,sha256=
|
|
74
|
+
pycontrails/models/cocip/wake_vortex.py,sha256=HclCU7CuNK_OkkhinWvZB5ipucyGnEsJ3q-_4E6bC5c,13314
|
|
75
75
|
pycontrails/models/cocip/cocip_uncertainty.py,sha256=wJ43NJr3YZX92GiMhY6LUVni9qbsqKg0duCRSS_r_mw,11782
|
|
76
76
|
pycontrails/models/cocip/radiative_heating.py,sha256=PngrHriQDSX1iDKu1v2g9NPRe4w6eGi6123xJD6itz8,18712
|
|
77
77
|
pycontrails/models/cocip/contrail_properties.py,sha256=yMqDbAg751orkqPSKlUlnItsoa9JC8lcfxzeoGc8yEk,56007
|
|
File without changes
|
|
File without changes
|
|
File without changes
|