pycontrails 0.54.2__cp313-cp313-win_amd64.whl → 0.54.4__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/__init__.py +2 -2
- pycontrails/_version.py +2 -2
- pycontrails/core/__init__.py +1 -1
- pycontrails/core/aircraft_performance.py +75 -61
- pycontrails/core/cache.py +7 -7
- pycontrails/core/fleet.py +25 -21
- pycontrails/core/flight.py +215 -301
- pycontrails/core/interpolation.py +56 -56
- pycontrails/core/met.py +48 -39
- pycontrails/core/models.py +25 -11
- pycontrails/core/polygon.py +15 -15
- pycontrails/core/rgi_cython.cp313-win_amd64.pyd +0 -0
- pycontrails/core/vector.py +22 -22
- pycontrails/datalib/_met_utils/metsource.py +8 -5
- pycontrails/datalib/ecmwf/__init__.py +14 -14
- pycontrails/datalib/ecmwf/common.py +1 -1
- pycontrails/datalib/ecmwf/era5.py +7 -7
- pycontrails/datalib/ecmwf/hres.py +3 -3
- pycontrails/datalib/ecmwf/ifs.py +1 -1
- pycontrails/datalib/ecmwf/variables.py +1 -0
- pycontrails/datalib/gfs/__init__.py +6 -6
- pycontrails/datalib/gfs/gfs.py +2 -2
- pycontrails/datalib/goes.py +5 -5
- pycontrails/datalib/landsat.py +5 -8
- pycontrails/datalib/sentinel.py +7 -11
- pycontrails/ext/bada.py +3 -2
- pycontrails/ext/empirical_grid.py +1 -1
- pycontrails/ext/synthetic_flight.py +3 -2
- pycontrails/models/accf.py +40 -19
- pycontrails/models/apcemm/apcemm.py +5 -4
- pycontrails/models/cocip/__init__.py +2 -2
- pycontrails/models/cocip/cocip.py +16 -17
- pycontrails/models/cocip/cocip_params.py +2 -11
- pycontrails/models/cocip/cocip_uncertainty.py +24 -18
- pycontrails/models/cocip/contrail_properties.py +331 -316
- pycontrails/models/cocip/output_formats.py +53 -53
- pycontrails/models/cocip/radiative_forcing.py +135 -131
- pycontrails/models/cocip/radiative_heating.py +135 -135
- pycontrails/models/cocip/unterstrasser_wake_vortex.py +90 -87
- pycontrails/models/cocip/wake_vortex.py +92 -92
- pycontrails/models/cocip/wind_shear.py +8 -8
- pycontrails/models/cocipgrid/cocip_grid.py +118 -107
- pycontrails/models/dry_advection.py +59 -58
- pycontrails/models/emissions/__init__.py +2 -2
- pycontrails/models/emissions/black_carbon.py +108 -108
- pycontrails/models/emissions/emissions.py +85 -85
- pycontrails/models/emissions/ffm2.py +35 -35
- pycontrails/models/humidity_scaling/humidity_scaling.py +23 -23
- pycontrails/models/ps_model/__init__.py +3 -2
- pycontrails/models/ps_model/ps_aircraft_params.py +11 -6
- pycontrails/models/ps_model/ps_grid.py +256 -60
- pycontrails/models/ps_model/ps_model.py +18 -21
- pycontrails/models/ps_model/ps_operational_limits.py +58 -69
- pycontrails/models/tau_cirrus.py +8 -1
- pycontrails/physics/geo.py +216 -67
- pycontrails/physics/jet.py +220 -90
- pycontrails/physics/static/iata-cargo-load-factors-20241115.csv +71 -0
- pycontrails/physics/static/iata-passenger-load-factors-20241115.csv +71 -0
- pycontrails/physics/units.py +14 -14
- pycontrails/utils/json.py +1 -2
- pycontrails/utils/types.py +12 -7
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/METADATA +10 -10
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/NOTICE +1 -1
- pycontrails-0.54.4.dist-info/RECORD +111 -0
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/WHEEL +1 -1
- pycontrails-0.54.2.dist-info/RECORD +0 -109
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/LICENSE +0 -0
- {pycontrails-0.54.2.dist-info → pycontrails-0.54.4.dist-info}/top_level.txt +0 -0
pycontrails/core/polygon.py
CHANGED
|
@@ -42,7 +42,7 @@ except ModuleNotFoundError as exc:
|
|
|
42
42
|
|
|
43
43
|
|
|
44
44
|
def buffer_and_clean(
|
|
45
|
-
contour: npt.NDArray[np.
|
|
45
|
+
contour: npt.NDArray[np.floating],
|
|
46
46
|
min_area: float,
|
|
47
47
|
convex_hull: bool,
|
|
48
48
|
epsilon: float,
|
|
@@ -54,7 +54,7 @@ def buffer_and_clean(
|
|
|
54
54
|
|
|
55
55
|
Parameters
|
|
56
56
|
----------
|
|
57
|
-
contour : npt.NDArray[np.
|
|
57
|
+
contour : npt.NDArray[np.floating]
|
|
58
58
|
Contour to buffer and clean. A 2d array of shape (n, 2) where n is the number
|
|
59
59
|
of vertices in the contour.
|
|
60
60
|
min_area : float
|
|
@@ -157,13 +157,13 @@ def _round_polygon(polygon: shapely.Polygon, precision: int) -> shapely.Polygon:
|
|
|
157
157
|
|
|
158
158
|
|
|
159
159
|
def _contours_to_polygons(
|
|
160
|
-
contours: Sequence[npt.NDArray[np.
|
|
160
|
+
contours: Sequence[npt.NDArray[np.floating]],
|
|
161
161
|
hierarchy: npt.NDArray[np.int_],
|
|
162
162
|
min_area: float,
|
|
163
163
|
convex_hull: bool,
|
|
164
164
|
epsilon: float,
|
|
165
|
-
longitude: npt.NDArray[np.
|
|
166
|
-
latitude: npt.NDArray[np.
|
|
165
|
+
longitude: npt.NDArray[np.floating] | None,
|
|
166
|
+
latitude: npt.NDArray[np.floating] | None,
|
|
167
167
|
precision: int | None,
|
|
168
168
|
buffer: float,
|
|
169
169
|
i: int = 0,
|
|
@@ -172,7 +172,7 @@ def _contours_to_polygons(
|
|
|
172
172
|
|
|
173
173
|
Parameters
|
|
174
174
|
----------
|
|
175
|
-
contours : Sequence[npt.NDArray[np.
|
|
175
|
+
contours : Sequence[npt.NDArray[np.floating]]
|
|
176
176
|
The contours output from :func:`cv2.findContours`.
|
|
177
177
|
hierarchy : npt.NDArray[np.int_]
|
|
178
178
|
The hierarchy output from :func:`cv2.findContours`.
|
|
@@ -182,9 +182,9 @@ def _contours_to_polygons(
|
|
|
182
182
|
Whether to take the convex hull of each polygon.
|
|
183
183
|
epsilon : float
|
|
184
184
|
Epsilon value to use when simplifying the polygons.
|
|
185
|
-
longitude : npt.NDArray[np.
|
|
185
|
+
longitude : npt.NDArray[np.floating] | None
|
|
186
186
|
Longitude values for the grid.
|
|
187
|
-
latitude : npt.NDArray[np.
|
|
187
|
+
latitude : npt.NDArray[np.floating] | None
|
|
188
188
|
Latitude values for the grid.
|
|
189
189
|
precision : int | None
|
|
190
190
|
Precision to use when rounding the coordinates.
|
|
@@ -254,7 +254,7 @@ def _contours_to_polygons(
|
|
|
254
254
|
|
|
255
255
|
|
|
256
256
|
def determine_buffer(
|
|
257
|
-
longitude: npt.NDArray[np.
|
|
257
|
+
longitude: npt.NDArray[np.floating], latitude: npt.NDArray[np.floating]
|
|
258
258
|
) -> float:
|
|
259
259
|
"""Determine the proper buffer size to use when converting to polygons."""
|
|
260
260
|
|
|
@@ -279,22 +279,22 @@ def determine_buffer(
|
|
|
279
279
|
|
|
280
280
|
|
|
281
281
|
def find_multipolygon(
|
|
282
|
-
arr: npt.NDArray[np.
|
|
282
|
+
arr: npt.NDArray[np.floating],
|
|
283
283
|
threshold: float,
|
|
284
284
|
min_area: float,
|
|
285
285
|
epsilon: float,
|
|
286
286
|
lower_bound: bool = True,
|
|
287
287
|
interiors: bool = True,
|
|
288
288
|
convex_hull: bool = False,
|
|
289
|
-
longitude: npt.NDArray[np.
|
|
290
|
-
latitude: npt.NDArray[np.
|
|
289
|
+
longitude: npt.NDArray[np.floating] | None = None,
|
|
290
|
+
latitude: npt.NDArray[np.floating] | None = None,
|
|
291
291
|
precision: int | None = None,
|
|
292
292
|
) -> shapely.MultiPolygon:
|
|
293
293
|
"""Compute a multipolygon from a 2d array.
|
|
294
294
|
|
|
295
295
|
Parameters
|
|
296
296
|
----------
|
|
297
|
-
arr : npt.NDArray[np.
|
|
297
|
+
arr : npt.NDArray[np.floating]
|
|
298
298
|
Array to convert to a multipolygon. The array will be converted to a binary
|
|
299
299
|
array by comparing each element to ``threshold``. This binary array is then
|
|
300
300
|
passed into :func:`cv2.findContours` to find the contours.
|
|
@@ -312,11 +312,11 @@ def find_multipolygon(
|
|
|
312
312
|
Whether to include interior polygons. By default, True.
|
|
313
313
|
convex_hull : bool, optional
|
|
314
314
|
Experimental. Whether to take the convex hull of each polygon. By default, False.
|
|
315
|
-
longitude : npt.NDArray[np.
|
|
315
|
+
longitude : npt.NDArray[np.floating] | None, optional
|
|
316
316
|
If provided, the coordinates values corresponding to the longitude dimensions of ``arr``.
|
|
317
317
|
The contour coordinates will be converted to longitude-latitude values by indexing
|
|
318
318
|
into this array. Defaults to None.
|
|
319
|
-
latitude : npt.NDArray[np.
|
|
319
|
+
latitude : npt.NDArray[np.floating] | None, optional
|
|
320
320
|
If provided, the coordinates values corresponding to the latitude dimensions of ``arr``.
|
|
321
321
|
precision : int | None, optional
|
|
322
322
|
If provided, the precision to use when rounding the coordinates. Defaults to None.
|
|
Binary file
|
pycontrails/core/vector.py
CHANGED
|
@@ -260,7 +260,7 @@ class VectorDataset:
|
|
|
260
260
|
If "time" variable cannot be converted to numpy array.
|
|
261
261
|
"""
|
|
262
262
|
|
|
263
|
-
__slots__ = ("
|
|
263
|
+
__slots__ = ("attrs", "data")
|
|
264
264
|
|
|
265
265
|
#: Vector data with labels as keys and :class:`numpy.ndarray` as values
|
|
266
266
|
data: VectorDataDict
|
|
@@ -1392,7 +1392,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1392
1392
|
return attrs
|
|
1393
1393
|
|
|
1394
1394
|
@property
|
|
1395
|
-
def level(self) -> npt.NDArray[np.
|
|
1395
|
+
def level(self) -> npt.NDArray[np.floating]:
|
|
1396
1396
|
"""Get pressure ``level`` values for points.
|
|
1397
1397
|
|
|
1398
1398
|
Automatically calculates pressure level using :func:`units.m_to_pl` using ``altitude`` key.
|
|
@@ -1403,7 +1403,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1403
1403
|
|
|
1404
1404
|
Returns
|
|
1405
1405
|
-------
|
|
1406
|
-
npt.NDArray[np.
|
|
1406
|
+
npt.NDArray[np.floating]
|
|
1407
1407
|
Point pressure level values, [:math:`hPa`]
|
|
1408
1408
|
"""
|
|
1409
1409
|
try:
|
|
@@ -1412,7 +1412,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1412
1412
|
return units.m_to_pl(self.altitude)
|
|
1413
1413
|
|
|
1414
1414
|
@property
|
|
1415
|
-
def altitude(self) -> npt.NDArray[np.
|
|
1415
|
+
def altitude(self) -> npt.NDArray[np.floating]:
|
|
1416
1416
|
"""Get altitude.
|
|
1417
1417
|
|
|
1418
1418
|
Automatically calculates altitude using :func:`units.pl_to_m` using ``level`` key.
|
|
@@ -1423,7 +1423,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1423
1423
|
|
|
1424
1424
|
Returns
|
|
1425
1425
|
-------
|
|
1426
|
-
npt.NDArray[np.
|
|
1426
|
+
npt.NDArray[np.floating]
|
|
1427
1427
|
Altitude, [:math:`m`]
|
|
1428
1428
|
"""
|
|
1429
1429
|
try:
|
|
@@ -1437,12 +1437,12 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1437
1437
|
return units.ft_to_m(self["altitude_ft"])
|
|
1438
1438
|
|
|
1439
1439
|
@property
|
|
1440
|
-
def air_pressure(self) -> npt.NDArray[np.
|
|
1440
|
+
def air_pressure(self) -> npt.NDArray[np.floating]:
|
|
1441
1441
|
"""Get ``air_pressure`` values for points.
|
|
1442
1442
|
|
|
1443
1443
|
Returns
|
|
1444
1444
|
-------
|
|
1445
|
-
npt.NDArray[np.
|
|
1445
|
+
npt.NDArray[np.floating]
|
|
1446
1446
|
Point air pressure values, [:math:`Pa`]
|
|
1447
1447
|
"""
|
|
1448
1448
|
try:
|
|
@@ -1451,12 +1451,12 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1451
1451
|
return 100.0 * self.level
|
|
1452
1452
|
|
|
1453
1453
|
@property
|
|
1454
|
-
def altitude_ft(self) -> npt.NDArray[np.
|
|
1454
|
+
def altitude_ft(self) -> npt.NDArray[np.floating]:
|
|
1455
1455
|
"""Get altitude in feet.
|
|
1456
1456
|
|
|
1457
1457
|
Returns
|
|
1458
1458
|
-------
|
|
1459
|
-
npt.NDArray[np.
|
|
1459
|
+
npt.NDArray[np.floating]
|
|
1460
1460
|
Altitude, [:math:`ft`]
|
|
1461
1461
|
"""
|
|
1462
1462
|
try:
|
|
@@ -1522,7 +1522,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1522
1522
|
# Utilities
|
|
1523
1523
|
# ------------
|
|
1524
1524
|
|
|
1525
|
-
def transform_crs(self, crs: str) -> tuple[npt.NDArray[np.
|
|
1525
|
+
def transform_crs(self, crs: str) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]:
|
|
1526
1526
|
"""Transform trajectory data from one coordinate reference system (CRS) to another.
|
|
1527
1527
|
|
|
1528
1528
|
Parameters
|
|
@@ -1535,7 +1535,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1535
1535
|
|
|
1536
1536
|
Returns
|
|
1537
1537
|
-------
|
|
1538
|
-
tuple[npt.NDArray[np.
|
|
1538
|
+
tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]
|
|
1539
1539
|
New x and y coordinates in the target CRS.
|
|
1540
1540
|
"""
|
|
1541
1541
|
try:
|
|
@@ -1552,12 +1552,12 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1552
1552
|
transformer = pyproj.Transformer.from_crs(crs_from, crs, always_xy=True)
|
|
1553
1553
|
return transformer.transform(self["longitude"], self["latitude"])
|
|
1554
1554
|
|
|
1555
|
-
def T_isa(self) -> npt.NDArray[np.
|
|
1555
|
+
def T_isa(self) -> npt.NDArray[np.floating]:
|
|
1556
1556
|
"""Calculate the ICAO standard atmosphere temperature at each point.
|
|
1557
1557
|
|
|
1558
1558
|
Returns
|
|
1559
1559
|
-------
|
|
1560
|
-
npt.NDArray[np.
|
|
1560
|
+
npt.NDArray[np.floating]
|
|
1561
1561
|
ISA temperature, [:math:`K`]
|
|
1562
1562
|
|
|
1563
1563
|
See Also
|
|
@@ -1610,24 +1610,24 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1610
1610
|
self,
|
|
1611
1611
|
mda: met_module.MetDataArray,
|
|
1612
1612
|
*,
|
|
1613
|
-
longitude: npt.NDArray[np.
|
|
1614
|
-
latitude: npt.NDArray[np.
|
|
1615
|
-
level: npt.NDArray[np.
|
|
1613
|
+
longitude: npt.NDArray[np.floating] | None = None,
|
|
1614
|
+
latitude: npt.NDArray[np.floating] | None = None,
|
|
1615
|
+
level: npt.NDArray[np.floating] | None = None,
|
|
1616
1616
|
time: npt.NDArray[np.datetime64] | None = None,
|
|
1617
1617
|
use_indices: bool = False,
|
|
1618
1618
|
**interp_kwargs: Any,
|
|
1619
|
-
) -> npt.NDArray[np.
|
|
1619
|
+
) -> npt.NDArray[np.floating]:
|
|
1620
1620
|
"""Intersect waypoints with MetDataArray.
|
|
1621
1621
|
|
|
1622
1622
|
Parameters
|
|
1623
1623
|
----------
|
|
1624
1624
|
mda : MetDataArray
|
|
1625
1625
|
MetDataArray containing a meteorological variable at spatio-temporal coordinates.
|
|
1626
|
-
longitude : npt.NDArray[np.
|
|
1626
|
+
longitude : npt.NDArray[np.floating], optional
|
|
1627
1627
|
Override existing coordinates for met interpolation
|
|
1628
|
-
latitude : npt.NDArray[np.
|
|
1628
|
+
latitude : npt.NDArray[np.floating], optional
|
|
1629
1629
|
Override existing coordinates for met interpolation
|
|
1630
|
-
level : npt.NDArray[np.
|
|
1630
|
+
level : npt.NDArray[np.floating], optional
|
|
1631
1631
|
Override existing coordinates for met interpolation
|
|
1632
1632
|
time : npt.NDArray[np.datetime64], optional
|
|
1633
1633
|
Override existing coordinates for met interpolation
|
|
@@ -1646,7 +1646,7 @@ class GeoVectorDataset(VectorDataset):
|
|
|
1646
1646
|
|
|
1647
1647
|
Returns
|
|
1648
1648
|
-------
|
|
1649
|
-
npt.NDArray[np.
|
|
1649
|
+
npt.NDArray[np.floating]
|
|
1650
1650
|
Interpolated values
|
|
1651
1651
|
|
|
1652
1652
|
Examples
|
|
@@ -2019,7 +2019,7 @@ def vector_to_lon_lat_grid(
|
|
|
2019
2019
|
...,
|
|
2020
2020
|
[1.97, 3.02, 1.84, ..., 2.37, 3.87, 2.09],
|
|
2021
2021
|
[3.74, 1.6 , 4.01, ..., 4.6 , 4.27, 3.4 ],
|
|
2022
|
-
[2.97, 0.12, 1.33, ..., 3.54, 0.74, 2.59]])
|
|
2022
|
+
[2.97, 0.12, 1.33, ..., 3.54, 0.74, 2.59]], shape=(40, 40))
|
|
2023
2023
|
|
|
2024
2024
|
>>> da.sum().item() == vector["foo"].sum()
|
|
2025
2025
|
np.True_
|
|
@@ -175,13 +175,16 @@ def parse_pressure_levels(
|
|
|
175
175
|
|
|
176
176
|
out = arr.tolist()
|
|
177
177
|
if supported is None:
|
|
178
|
-
return out
|
|
178
|
+
return out # type: ignore[return-value]
|
|
179
179
|
|
|
180
|
-
if missing := set(out).difference(supported):
|
|
181
|
-
msg =
|
|
180
|
+
if missing := set(out).difference(supported): # type: ignore[arg-type]
|
|
181
|
+
msg = (
|
|
182
|
+
f"Pressure levels {sorted(missing)} are not supported. " # type: ignore[type-var]
|
|
183
|
+
f"Supported levels: {supported}"
|
|
184
|
+
)
|
|
182
185
|
raise ValueError(msg)
|
|
183
186
|
|
|
184
|
-
return out
|
|
187
|
+
return out # type: ignore[return-value]
|
|
185
188
|
|
|
186
189
|
|
|
187
190
|
def parse_variables(variables: VariableInput, supported: list[MetVariable]) -> list[MetVariable]:
|
|
@@ -347,7 +350,7 @@ def round_hour(time: datetime, hour: int) -> datetime:
|
|
|
347
350
|
class MetDataSource(abc.ABC):
|
|
348
351
|
"""Abstract class for wrapping meteorology data sources."""
|
|
349
352
|
|
|
350
|
-
__slots__ = ("
|
|
353
|
+
__slots__ = ("grid", "paths", "pressure_levels", "timesteps", "variables")
|
|
351
354
|
|
|
352
355
|
#: List of individual timesteps from data source derived from :attr:`time`
|
|
353
356
|
#: Use :func:`parse_time` to handle :class:`TimeInput`.
|
|
@@ -40,21 +40,21 @@ from pycontrails.datalib.ecmwf.variables import (
|
|
|
40
40
|
)
|
|
41
41
|
|
|
42
42
|
__all__ = [
|
|
43
|
-
"
|
|
44
|
-
"CDSCredentialsNotFound",
|
|
43
|
+
"ECMWF_VARIABLES",
|
|
45
44
|
"ERA5",
|
|
46
|
-
"
|
|
45
|
+
"ERA5ARCO",
|
|
47
46
|
"HRES",
|
|
48
|
-
"HRESModelLevel",
|
|
49
47
|
"IFS",
|
|
50
|
-
"
|
|
51
|
-
"
|
|
52
|
-
"
|
|
53
|
-
"
|
|
54
|
-
"
|
|
48
|
+
"MODEL_LEVELS_PATH",
|
|
49
|
+
"MODEL_LEVEL_VARIABLES",
|
|
50
|
+
"PRESSURE_LEVEL_VARIABLES",
|
|
51
|
+
"SURFACE_VARIABLES",
|
|
52
|
+
"CDSCredentialsNotFound",
|
|
55
53
|
"CloudAreaFraction",
|
|
56
54
|
"CloudAreaFractionInLayer",
|
|
57
55
|
"Divergence",
|
|
56
|
+
"ERA5ModelLevel",
|
|
57
|
+
"HRESModelLevel",
|
|
58
58
|
"OzoneMassMixingRatio",
|
|
59
59
|
"PotentialVorticity",
|
|
60
60
|
"RelativeHumidity",
|
|
@@ -65,9 +65,9 @@ __all__ = [
|
|
|
65
65
|
"TOAIncidentSolarRadiation",
|
|
66
66
|
"TopNetSolarRadiation",
|
|
67
67
|
"TopNetThermalRadiation",
|
|
68
|
-
"
|
|
69
|
-
"
|
|
70
|
-
"
|
|
71
|
-
"
|
|
72
|
-
"
|
|
68
|
+
"ml_to_pl",
|
|
69
|
+
"model_level_pressure",
|
|
70
|
+
"model_level_reference_pressure",
|
|
71
|
+
"open_arco_era5_model_level_data",
|
|
72
|
+
"open_arco_era5_single_level",
|
|
73
73
|
]
|
|
@@ -61,7 +61,7 @@ class ECMWFAPI(metsource.MetDataSource):
|
|
|
61
61
|
|
|
62
62
|
# downselect times
|
|
63
63
|
if not self.timesteps:
|
|
64
|
-
self.timesteps = ds["time"].values.astype("datetime64[ns]").tolist()
|
|
64
|
+
self.timesteps = ds["time"].values.astype("datetime64[ns]").tolist() # type: ignore[assignment]
|
|
65
65
|
else:
|
|
66
66
|
try:
|
|
67
67
|
ds = ds.sel(time=self.timesteps)
|
|
@@ -137,10 +137,10 @@ class ERA5(ECMWFAPI):
|
|
|
137
137
|
"""
|
|
138
138
|
|
|
139
139
|
__slots__ = (
|
|
140
|
-
"product_type",
|
|
141
140
|
"cds",
|
|
142
|
-
"url",
|
|
143
141
|
"key",
|
|
142
|
+
"product_type",
|
|
143
|
+
"url",
|
|
144
144
|
)
|
|
145
145
|
|
|
146
146
|
#: Product type, one of "reanalysis", "ensemble_mean", "ensemble_members", "ensemble_spread"
|
|
@@ -319,9 +319,9 @@ class ERA5(ECMWFAPI):
|
|
|
319
319
|
str
|
|
320
320
|
ERA5 dataset name in CDS
|
|
321
321
|
"""
|
|
322
|
-
if self.
|
|
323
|
-
return "reanalysis-era5-
|
|
324
|
-
return "reanalysis-era5-
|
|
322
|
+
if self.is_single_level:
|
|
323
|
+
return "reanalysis-era5-single-levels"
|
|
324
|
+
return "reanalysis-era5-pressure-levels"
|
|
325
325
|
|
|
326
326
|
def create_cachepath(self, t: datetime | pd.Timestamp) -> str:
|
|
327
327
|
"""Return cachepath to local ERA5 data file based on datetime.
|
|
@@ -539,9 +539,9 @@ class ERA5(ECMWFAPI):
|
|
|
539
539
|
LOG.debug("Input dataset processed with pycontrails > 0.29")
|
|
540
540
|
return ds
|
|
541
541
|
|
|
542
|
-
# For "reanalysis-era5-single-levels"
|
|
542
|
+
# For "reanalysis-era5-single-levels"
|
|
543
543
|
# then the netcdf file does not contain the dimension "level"
|
|
544
|
-
if
|
|
544
|
+
if self.is_single_level:
|
|
545
545
|
ds = ds.expand_dims(level=self.pressure_levels)
|
|
546
546
|
|
|
547
547
|
# New CDS-Beta gives "valid_time" instead of "time"
|
|
@@ -228,7 +228,7 @@ class HRES(ECMWFAPI):
|
|
|
228
228
|
... )
|
|
229
229
|
"""
|
|
230
230
|
|
|
231
|
-
__slots__ = ("
|
|
231
|
+
__slots__ = ("email", "field_type", "forecast_time", "key", "server", "stream", "url")
|
|
232
232
|
|
|
233
233
|
#: stream type, "oper" = atmospheric model/HRES, "enfo" = ensemble forecast.
|
|
234
234
|
stream: str
|
|
@@ -691,7 +691,7 @@ class HRES(ECMWFAPI):
|
|
|
691
691
|
|
|
692
692
|
# set forecast time if its not already defined
|
|
693
693
|
if not getattr(self, "forecast_time", None):
|
|
694
|
-
self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist()
|
|
694
|
+
self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
|
|
695
695
|
|
|
696
696
|
# check that forecast_time is correct if defined
|
|
697
697
|
# note the "time" coordinate here is the HRES forecast_time
|
|
@@ -706,7 +706,7 @@ class HRES(ECMWFAPI):
|
|
|
706
706
|
# set timesteps if not defined
|
|
707
707
|
# note that "time" is now the actual timestep coordinates
|
|
708
708
|
if not self.timesteps:
|
|
709
|
-
self.timesteps = ds["time"].values.astype("datetime64[s]").tolist()
|
|
709
|
+
self.timesteps = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
|
|
710
710
|
|
|
711
711
|
self.cache_dataset(ds)
|
|
712
712
|
|
pycontrails/datalib/ecmwf/ifs.py
CHANGED
|
@@ -149,7 +149,7 @@ class IFS(metsource.MetDataSource):
|
|
|
149
149
|
else:
|
|
150
150
|
# set timesteps from dataset "time" coordinates
|
|
151
151
|
# np.datetime64 doesn't covert to list[datetime] unless its unit is us
|
|
152
|
-
self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
|
|
152
|
+
self.timesteps = ds["time"].values.astype("datetime64[us]").tolist() # type: ignore[assignment]
|
|
153
153
|
|
|
154
154
|
# downselect hyam/hybm coefficients by the "lev" coordinate
|
|
155
155
|
# (this is a 1-indexed verison of nhym)
|
|
@@ -107,6 +107,7 @@ RelativeHumidity = MetVariable(
|
|
|
107
107
|
long_name=met_var.RelativeHumidity.long_name,
|
|
108
108
|
units="%",
|
|
109
109
|
level_type=met_var.RelativeHumidity.level_type,
|
|
110
|
+
grib1_id=met_var.RelativeHumidity.grib1_id,
|
|
110
111
|
ecmwf_id=met_var.RelativeHumidity.ecmwf_id,
|
|
111
112
|
grib2_id=met_var.RelativeHumidity.grib2_id,
|
|
112
113
|
description=(
|
|
@@ -16,13 +16,13 @@ from pycontrails.datalib.gfs.variables import (
|
|
|
16
16
|
|
|
17
17
|
__all__ = [
|
|
18
18
|
"GFS_FORECAST_BUCKET",
|
|
19
|
-
"GFSForecast",
|
|
20
|
-
"CloudIceWaterMixingRatio",
|
|
21
|
-
"TotalCloudCoverIsobaric",
|
|
22
|
-
"Visibility",
|
|
23
|
-
"TOAUpwardShortwaveRadiation",
|
|
24
|
-
"TOAUpwardLongwaveRadiation",
|
|
25
19
|
"GFS_VARIABLES",
|
|
26
20
|
"PRESSURE_LEVEL_VARIABLES",
|
|
27
21
|
"SURFACE_VARIABLES",
|
|
22
|
+
"CloudIceWaterMixingRatio",
|
|
23
|
+
"GFSForecast",
|
|
24
|
+
"TOAUpwardLongwaveRadiation",
|
|
25
|
+
"TOAUpwardShortwaveRadiation",
|
|
26
|
+
"TotalCloudCoverIsobaric",
|
|
27
|
+
"Visibility",
|
|
28
28
|
]
|
pycontrails/datalib/gfs/gfs.py
CHANGED
|
@@ -125,7 +125,7 @@ class GFSForecast(metsource.MetDataSource):
|
|
|
125
125
|
- `GFS Documentation <https://www.emc.ncep.noaa.gov/emc/pages/numerical_forecast_systems/gfs/documentation.php>`_
|
|
126
126
|
"""
|
|
127
127
|
|
|
128
|
-
__slots__ = ("
|
|
128
|
+
__slots__ = ("cache_download", "cachestore", "client", "forecast_time", "grid", "show_progress")
|
|
129
129
|
|
|
130
130
|
#: S3 client for accessing GFS bucket
|
|
131
131
|
client: botocore.client.S3
|
|
@@ -597,7 +597,7 @@ class GFSForecast(metsource.MetDataSource):
|
|
|
597
597
|
else:
|
|
598
598
|
# set timesteps from dataset "time" coordinates
|
|
599
599
|
# np.datetime64 doesn't covert to list[datetime] unless its unit is us
|
|
600
|
-
self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
|
|
600
|
+
self.timesteps = ds["time"].values.astype("datetime64[us]").tolist() # type: ignore[assignment]
|
|
601
601
|
|
|
602
602
|
# if "level" is not in dims and
|
|
603
603
|
# length of the requested pressure levels is 1
|
pycontrails/datalib/goes.py
CHANGED
|
@@ -384,7 +384,7 @@ class GOES:
|
|
|
384
384
|
[277.24512, 277.45377, 278.18408, ..., 274.6369 , 274.01093,
|
|
385
385
|
274.06308],
|
|
386
386
|
[276.8278 , 277.14078, 277.7146 , ..., 274.6369 , 273.9066 ,
|
|
387
|
-
274.16742]], dtype=float32)
|
|
387
|
+
274.16742]], shape=(500, 500), dtype=float32)
|
|
388
388
|
|
|
389
389
|
"""
|
|
390
390
|
|
|
@@ -745,8 +745,8 @@ def to_ash(da: xr.DataArray, convention: str = "SEVIRI") -> npt.NDArray[np.float
|
|
|
745
745
|
|
|
746
746
|
|
|
747
747
|
def _clip_and_scale(
|
|
748
|
-
arr: npt.NDArray[np.
|
|
749
|
-
) -> npt.NDArray[np.
|
|
748
|
+
arr: npt.NDArray[np.floating], low: float, high: float
|
|
749
|
+
) -> npt.NDArray[np.floating]:
|
|
750
750
|
"""Clip array and rescale to the interval [0, 1].
|
|
751
751
|
|
|
752
752
|
Array is first clipped to the interval [low, high] and then linearly rescaled
|
|
@@ -757,7 +757,7 @@ def _clip_and_scale(
|
|
|
757
757
|
|
|
758
758
|
Parameters
|
|
759
759
|
----------
|
|
760
|
-
arr : npt.NDArray[np.
|
|
760
|
+
arr : npt.NDArray[np.floating]
|
|
761
761
|
Array to clip and scale.
|
|
762
762
|
low : float
|
|
763
763
|
Lower clipping bound.
|
|
@@ -766,7 +766,7 @@ def _clip_and_scale(
|
|
|
766
766
|
|
|
767
767
|
Returns
|
|
768
768
|
-------
|
|
769
|
-
npt.NDArray[np.
|
|
769
|
+
npt.NDArray[np.floating]
|
|
770
770
|
Clipped and scaled array.
|
|
771
771
|
"""
|
|
772
772
|
return (arr.clip(low, high) - low) / (high - low)
|
pycontrails/datalib/landsat.py
CHANGED
|
@@ -152,7 +152,7 @@ class Landsat:
|
|
|
152
152
|
are used. Bands must share a common resolution. The resolutions of each band are:
|
|
153
153
|
|
|
154
154
|
- B1-B7, B9: 30 m
|
|
155
|
-
-
|
|
155
|
+
- B8: 15 m
|
|
156
156
|
- B10, B11: 30 m (upsampled from true resolution of 100 m)
|
|
157
157
|
|
|
158
158
|
cachestore : cache.CacheStore, optional
|
|
@@ -291,9 +291,7 @@ def _check_band_resolution(bands: set[str]) -> None:
|
|
|
291
291
|
there are two valid cases: only band 8, or any bands except band 8.
|
|
292
292
|
"""
|
|
293
293
|
groups = [
|
|
294
|
-
{
|
|
295
|
-
"B8",
|
|
296
|
-
}, # 15 m
|
|
294
|
+
{"B8"}, # 15 m
|
|
297
295
|
{f"B{i}" for i in range(1, 12) if i != 8}, # 30 m
|
|
298
296
|
]
|
|
299
297
|
if not any(bands.issubset(group) for group in groups):
|
|
@@ -313,10 +311,9 @@ def _read(path: str, meta: str, band: str, processing: str) -> xr.DataArray:
|
|
|
313
311
|
pycontrails_optional_package="sat",
|
|
314
312
|
)
|
|
315
313
|
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
src.close()
|
|
314
|
+
with rasterio.open(path) as src:
|
|
315
|
+
img = src.read(1)
|
|
316
|
+
crs = pyproj.CRS.from_epsg(src.crs.to_epsg())
|
|
320
317
|
|
|
321
318
|
if processing == "reflectance":
|
|
322
319
|
mult, add = _read_band_reflectance_rescaling(meta, band)
|
pycontrails/datalib/sentinel.py
CHANGED
|
@@ -313,9 +313,8 @@ def _check_band_resolution(bands: set[str]) -> None:
|
|
|
313
313
|
def _read(path: str, granule_meta: str, safe_meta: str, band: str, processing: str) -> xr.DataArray:
|
|
314
314
|
"""Read imagery data from Sentinel-2 files."""
|
|
315
315
|
Image.MAX_IMAGE_PIXELS = None # avoid decompression bomb warning
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
src.close()
|
|
316
|
+
with Image.open(path) as src:
|
|
317
|
+
img = np.asarray(src)
|
|
319
318
|
|
|
320
319
|
if processing == "reflectance":
|
|
321
320
|
gain, offset = _read_band_reflectance_rescaling(safe_meta, band)
|
|
@@ -357,10 +356,9 @@ def _band_id(band: str) -> int:
|
|
|
357
356
|
"""Get band ID used in some metadata files."""
|
|
358
357
|
if band in (f"B{i:2d}" for i in range(1, 9)):
|
|
359
358
|
return int(band[1:]) - 1
|
|
360
|
-
|
|
359
|
+
if band == "B8A":
|
|
361
360
|
return 8
|
|
362
|
-
|
|
363
|
-
return int(band[1:])
|
|
361
|
+
return int(band[1:])
|
|
364
362
|
|
|
365
363
|
|
|
366
364
|
def _read_band_reflectance_rescaling(meta: str, band: str) -> tuple[float, float]:
|
|
@@ -389,12 +387,10 @@ def _read_band_reflectance_rescaling(meta: str, band: str) -> tuple[float, float
|
|
|
389
387
|
for elem in elems:
|
|
390
388
|
if int(elem.attrib["band_id"]) == band_id and elem.text is not None:
|
|
391
389
|
offset = float(elem.text)
|
|
392
|
-
|
|
393
|
-
else:
|
|
394
|
-
msg = f"Could not find reflectance offset for band {band} (band ID {band_id})"
|
|
395
|
-
raise ValueError(msg)
|
|
390
|
+
return gain, offset
|
|
396
391
|
|
|
397
|
-
|
|
392
|
+
msg = f"Could not find reflectance offset for band {band} (band ID {band_id})"
|
|
393
|
+
raise ValueError(msg)
|
|
398
394
|
|
|
399
395
|
|
|
400
396
|
def _read_image_coordinates(meta: str, band: str) -> tuple[np.ndarray, np.ndarray]:
|
pycontrails/ext/bada.py
CHANGED
|
@@ -22,8 +22,9 @@ try:
|
|
|
22
22
|
|
|
23
23
|
except ImportError as e:
|
|
24
24
|
raise ImportError(
|
|
25
|
-
"Failed to import the 'pycontrails-bada' package. Install with 'pip install"
|
|
26
|
-
|
|
25
|
+
"Failed to import the 'pycontrails-bada' package. Install with 'pip install "
|
|
26
|
+
"--index-url https://us-central1-python.pkg.dev/contrails-301217/pycontrails/simple "
|
|
27
|
+
"pycontrails-bada'."
|
|
27
28
|
) from e
|
|
28
29
|
else:
|
|
29
30
|
__all__ = [
|
|
@@ -118,7 +118,7 @@ class EmpiricalGrid(AircraftPerformanceGrid):
|
|
|
118
118
|
|
|
119
119
|
return data[["altitude_ft", *columns]].drop(columns=["aircraft_type"])
|
|
120
120
|
|
|
121
|
-
def _sample(self, altitude_ft: npt.NDArray[np.
|
|
121
|
+
def _sample(self, altitude_ft: npt.NDArray[np.floating]) -> None:
|
|
122
122
|
"""Sample the data and update the source."""
|
|
123
123
|
|
|
124
124
|
df = self._query_data()
|
|
@@ -20,8 +20,9 @@ try:
|
|
|
20
20
|
from pycontrails.ext.bada import bada_model
|
|
21
21
|
except ImportError as e:
|
|
22
22
|
raise ImportError(
|
|
23
|
-
|
|
24
|
-
|
|
23
|
+
"Failed to import the 'pycontrails-bada' package. Install with 'pip install "
|
|
24
|
+
"--index-url https://us-central1-python.pkg.dev/contrails-301217/pycontrails/simple "
|
|
25
|
+
"pycontrails-bada'."
|
|
25
26
|
) from e
|
|
26
27
|
|
|
27
28
|
logger = logging.getLogger(__name__)
|