pycontrails 0.58.0__cp312-cp312-macosx_10_13_x86_64.whl → 0.59.0__cp312-cp312-macosx_10_13_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pycontrails might be problematic. Click here for more details.
- pycontrails/_version.py +3 -3
- pycontrails/core/interpolation.py +6 -6
- pycontrails/core/met.py +26 -21
- pycontrails/core/rgi_cython.cpython-312-darwin.so +0 -0
- pycontrails/datalib/ecmwf/model_levels.py +1 -1
- pycontrails/datalib/goes.py +10 -3
- pycontrails/datalib/gruan.py +343 -0
- pycontrails/datalib/himawari/himawari.py +4 -0
- pycontrails/models/cocip/contrail_properties.py +1 -1
- pycontrails/physics/thermo.py +3 -3
- {pycontrails-0.58.0.dist-info → pycontrails-0.59.0.dist-info}/METADATA +1 -2
- {pycontrails-0.58.0.dist-info → pycontrails-0.59.0.dist-info}/RECORD +16 -15
- {pycontrails-0.58.0.dist-info → pycontrails-0.59.0.dist-info}/WHEEL +0 -0
- {pycontrails-0.58.0.dist-info → pycontrails-0.59.0.dist-info}/licenses/LICENSE +0 -0
- {pycontrails-0.58.0.dist-info → pycontrails-0.59.0.dist-info}/licenses/NOTICE +0 -0
- {pycontrails-0.58.0.dist-info → pycontrails-0.59.0.dist-info}/top_level.txt +0 -0
pycontrails/_version.py
CHANGED
|
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
|
28
28
|
commit_id: COMMIT_ID
|
|
29
29
|
__commit_id__: COMMIT_ID
|
|
30
30
|
|
|
31
|
-
__version__ = version = '0.
|
|
32
|
-
__version_tuple__ = version_tuple = (0,
|
|
31
|
+
__version__ = version = '0.59.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 59, 0)
|
|
33
33
|
|
|
34
|
-
__commit_id__ = commit_id = '
|
|
34
|
+
__commit_id__ = commit_id = 'g3ff518b95'
|
|
@@ -26,15 +26,15 @@ class PycontrailsRegularGridInterpolator(scipy.interpolate.RegularGridInterpolat
|
|
|
26
26
|
|
|
27
27
|
This class is a thin wrapper around the
|
|
28
28
|
:class:`scipy.interpolate.RegularGridInterpolator` in order to make typical
|
|
29
|
-
|
|
29
|
+
pycontrails linear interpolation use-cases more performant:
|
|
30
30
|
|
|
31
|
-
#. Avoid ``RegularGridInterpolator`` constructor validation when
|
|
31
|
+
#. Avoid ``RegularGridInterpolator`` constructor validation when ``method="linear"``.
|
|
32
32
|
In :func:`interp`, parameters are carefully crafted to fit into the intended form,
|
|
33
33
|
thereby making validation unnecessary.
|
|
34
34
|
#. Override the :meth:`_evaluate_linear` method with a faster implementation. See
|
|
35
35
|
the :meth:`_evaluate_linear` docstring for more information.
|
|
36
36
|
|
|
37
|
-
**This class should not be used directly. Instead, use the
|
|
37
|
+
**This class should not be used directly. Instead, use the** :func:`interp` **function.**
|
|
38
38
|
|
|
39
39
|
.. versionchanged:: 0.40.0
|
|
40
40
|
|
|
@@ -428,8 +428,8 @@ def interp(
|
|
|
428
428
|
In particular, the dimensions of ``da`` must be ``longitude``, ``latitude``,
|
|
429
429
|
``level``, and ``time``. The three spatial dimensions must be monotonically
|
|
430
430
|
increasing with ``float64`` dtype. The ``time`` dimension must be
|
|
431
|
-
monotonically increasing with
|
|
432
|
-
Assumed to be cheap to load into memory (:attr:`
|
|
431
|
+
monotonically increasing with :class:`numpy.datetime64` dtype.
|
|
432
|
+
Assumed to be cheap to load into memory (:attr:`xarray.DataArray.values` is
|
|
433
433
|
used without hesitation).
|
|
434
434
|
method : str
|
|
435
435
|
Passed into :class:`scipy.interpolate.RegularGridInterpolator`.
|
|
@@ -442,7 +442,7 @@ def interp(
|
|
|
442
442
|
``coords``.
|
|
443
443
|
indices : tuple | None, optional
|
|
444
444
|
Experimental. Provide intermediate artifacts computed by
|
|
445
|
-
:meth
|
|
445
|
+
:meth:`scipy.interpolate.RegularGridInterpolator._find_indices`
|
|
446
446
|
to avoid redundant computation. If known and provided, this can speed
|
|
447
447
|
up interpolation by avoiding an unnecessary call to ``_find_indices``.
|
|
448
448
|
By default, None. Must be used precisely.
|
pycontrails/core/met.py
CHANGED
|
@@ -354,8 +354,7 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
354
354
|
def hash(self) -> str:
|
|
355
355
|
"""Generate a unique hash for this met instance.
|
|
356
356
|
|
|
357
|
-
Note this is not as robust as it could be since
|
|
358
|
-
cuts off.
|
|
357
|
+
Note this is not as robust as it could be since :func:`repr` cuts off.
|
|
359
358
|
|
|
360
359
|
Returns
|
|
361
360
|
-------
|
|
@@ -686,7 +685,7 @@ class MetBase(ABC, Generic[XArrayType]):
|
|
|
686
685
|
class MetDataset(MetBase):
|
|
687
686
|
"""Meteorological dataset with multiple variables.
|
|
688
687
|
|
|
689
|
-
Composition around
|
|
688
|
+
Composition around :class:`xarray.Dataset` to enforce certain
|
|
690
689
|
variables and dimensions for internal usage
|
|
691
690
|
|
|
692
691
|
Parameters
|
|
@@ -698,17 +697,17 @@ class MetDataset(MetBase):
|
|
|
698
697
|
Defaults to None.
|
|
699
698
|
wrap_longitude : bool, optional
|
|
700
699
|
Wrap data along the longitude dimension. If True, duplicate and shift longitude
|
|
701
|
-
values (ie,
|
|
700
|
+
values (ie, ``-180 -> 180``) to ensure that the longitude dimension covers the entire
|
|
702
701
|
interval ``[-180, 180]``. Defaults to False.
|
|
703
702
|
copy : bool, optional
|
|
704
703
|
Copy data on construction. Defaults to True.
|
|
705
704
|
attrs : dict[str, Any], optional
|
|
706
|
-
Attributes to add to :attr:`data.attrs`. Defaults to None.
|
|
707
|
-
|
|
705
|
+
Attributes to add to :attr:`data.attrs`. Defaults to None. Generally, pycontrails
|
|
706
|
+
:class:`pycontrails.core.models.Models` may use the following attributes:
|
|
708
707
|
|
|
709
|
-
- ``provider``: Name of the data provider (e.g. "ECMWF").
|
|
710
|
-
- ``dataset``: Name of the dataset (e.g. "ERA5").
|
|
711
|
-
- ``product``: Name of the product type (e.g. "reanalysis").
|
|
708
|
+
- ``provider``: Name of the data provider (e.g. ``"ECMWF"``).
|
|
709
|
+
- ``dataset``: Name of the dataset (e.g. ``"ERA5"``).
|
|
710
|
+
- ``product``: Name of the product type (e.g. ``"reanalysis"``).
|
|
712
711
|
|
|
713
712
|
**attrs_kwargs : Any
|
|
714
713
|
Keyword arguments to add to :attr:`data.attrs`. Defaults to None.
|
|
@@ -812,7 +811,7 @@ class MetDataset(MetBase):
|
|
|
812
811
|
return MetDataArray._from_fastpath(da)
|
|
813
812
|
|
|
814
813
|
def get(self, key: str, default_value: Any = None) -> Any:
|
|
815
|
-
"""Shortcut to :meth:`
|
|
814
|
+
"""Shortcut to :meth:`xarray.Dataset.get` method.
|
|
816
815
|
|
|
817
816
|
Parameters
|
|
818
817
|
----------
|
|
@@ -889,7 +888,7 @@ class MetDataset(MetBase):
|
|
|
889
888
|
|
|
890
889
|
See Also
|
|
891
890
|
--------
|
|
892
|
-
|
|
891
|
+
xarray.Dataset.update
|
|
893
892
|
"""
|
|
894
893
|
other = other or {}
|
|
895
894
|
other.update(kwargs)
|
|
@@ -957,8 +956,8 @@ class MetDataset(MetBase):
|
|
|
957
956
|
Returns
|
|
958
957
|
-------
|
|
959
958
|
list[str]
|
|
960
|
-
List of met keys verified in MetDataset
|
|
961
|
-
Returns an empty list if any MetVariable is missing.
|
|
959
|
+
List of met keys verified in :class:`MetDataset`.
|
|
960
|
+
Returns an empty list if any :class:`MetVariable` is missing.
|
|
962
961
|
|
|
963
962
|
Raises
|
|
964
963
|
------
|
|
@@ -1141,12 +1140,12 @@ class MetDataset(MetBase):
|
|
|
1141
1140
|
|
|
1142
1141
|
@property
|
|
1143
1142
|
def provider_attr(self) -> str:
|
|
1144
|
-
"""Look up the
|
|
1143
|
+
"""Look up the ``"provider"`` attribute with a custom error message.
|
|
1145
1144
|
|
|
1146
1145
|
Returns
|
|
1147
1146
|
-------
|
|
1148
1147
|
str
|
|
1149
|
-
Provider of the data. If not one of
|
|
1148
|
+
Provider of the data. If not one of ``"ECMWF"`` or ``"NCEP"``,
|
|
1150
1149
|
a warning is issued.
|
|
1151
1150
|
"""
|
|
1152
1151
|
supported = ("ECMWF", "NCEP")
|
|
@@ -1155,13 +1154,13 @@ class MetDataset(MetBase):
|
|
|
1155
1154
|
|
|
1156
1155
|
@property
|
|
1157
1156
|
def dataset_attr(self) -> str:
|
|
1158
|
-
"""Look up the
|
|
1157
|
+
"""Look up the ``"dataset"`` attribute with a custom error message.
|
|
1159
1158
|
|
|
1160
1159
|
Returns
|
|
1161
1160
|
-------
|
|
1162
1161
|
str
|
|
1163
|
-
Dataset of the data. If not one of
|
|
1164
|
-
or
|
|
1162
|
+
Dataset of the data. If not one of ``"ERA5"``, ``"HRES"``, ``"IFS"``,
|
|
1163
|
+
or ``"GFS"``, a warning is issued.
|
|
1165
1164
|
"""
|
|
1166
1165
|
supported = ("ERA5", "HRES", "IFS", "GFS")
|
|
1167
1166
|
examples = {
|
|
@@ -1173,13 +1172,13 @@ class MetDataset(MetBase):
|
|
|
1173
1172
|
|
|
1174
1173
|
@property
|
|
1175
1174
|
def product_attr(self) -> str:
|
|
1176
|
-
"""Look up the
|
|
1175
|
+
"""Look up the ``"product"`` attribute with a custom error message.
|
|
1177
1176
|
|
|
1178
1177
|
Returns
|
|
1179
1178
|
-------
|
|
1180
1179
|
str
|
|
1181
|
-
Product of the data. If not one of
|
|
1182
|
-
a warning is issued.
|
|
1180
|
+
Product of the data. If not one of ``"forecast"``, ``"ensemble"``,
|
|
1181
|
+
or ``"reanalysis"``, a warning is issued.
|
|
1183
1182
|
|
|
1184
1183
|
"""
|
|
1185
1184
|
supported = ("reanalysis", "forecast", "ensemble")
|
|
@@ -1208,6 +1207,7 @@ class MetDataset(MetBase):
|
|
|
1208
1207
|
|
|
1209
1208
|
By default, this method returns a new :class:`MetDataset` instead
|
|
1210
1209
|
of renaming in place. To retain the old behavior, set ``inplace=True``.
|
|
1210
|
+
The ``inplace`` behavior is deprecated and will be removed in a future release.
|
|
1211
1211
|
|
|
1212
1212
|
Parameters
|
|
1213
1213
|
----------
|
|
@@ -1224,6 +1224,11 @@ class MetDataset(MetBase):
|
|
|
1224
1224
|
data_renamed = standardize_variables(self.data, variables)
|
|
1225
1225
|
|
|
1226
1226
|
if inplace:
|
|
1227
|
+
warnings.warn(
|
|
1228
|
+
"The inplace behavior is deprecated and will be removed in a future release. ",
|
|
1229
|
+
DeprecationWarning,
|
|
1230
|
+
stacklevel=2,
|
|
1231
|
+
)
|
|
1227
1232
|
self.data = data_renamed
|
|
1228
1233
|
return None
|
|
1229
1234
|
|
|
Binary file
|
|
@@ -139,9 +139,9 @@ def model_level_pressure(sp: xr.DataArray, model_levels: npt.ArrayLike) -> xr.Da
|
|
|
139
139
|
[564.02437124, 560.81744834, 557.61052544, 554.40360254],
|
|
140
140
|
[551.19667964, 547.98975674, 544.78283384, 541.57591094]]])
|
|
141
141
|
Coordinates:
|
|
142
|
+
* model_level (model_level) int64 16B 80 100
|
|
142
143
|
* longitude (longitude) float64 32B -180.0 -60.0 60.0 180.0
|
|
143
144
|
* latitude (latitude) float64 32B -90.0 -30.0 30.0 90.0
|
|
144
|
-
* model_level (model_level) int64 16B 80 100
|
|
145
145
|
|
|
146
146
|
See Also
|
|
147
147
|
--------
|
pycontrails/datalib/goes.py
CHANGED
|
@@ -308,9 +308,14 @@ def gcs_goes_path(
|
|
|
308
308
|
fs = fs or gcsfs.GCSFileSystem(token="anon")
|
|
309
309
|
rpaths = fs.glob(rpath)
|
|
310
310
|
|
|
311
|
-
out = [
|
|
312
|
-
|
|
313
|
-
|
|
311
|
+
out = []
|
|
312
|
+
for r in rpaths:
|
|
313
|
+
if (band := _extract_band_from_rpath(r)) in bands:
|
|
314
|
+
out.append(r)
|
|
315
|
+
bands.remove(band)
|
|
316
|
+
|
|
317
|
+
if bands:
|
|
318
|
+
raise FileNotFoundError(f"No data found for {time} in {region} for bands {bands}")
|
|
314
319
|
return out
|
|
315
320
|
|
|
316
321
|
|
|
@@ -427,6 +432,8 @@ class GOES:
|
|
|
427
432
|
|
|
428
433
|
"""
|
|
429
434
|
|
|
435
|
+
__slots__ = ("bands", "bucket", "cachestore", "fs", "region")
|
|
436
|
+
|
|
430
437
|
__marker = object()
|
|
431
438
|
|
|
432
439
|
def __init__(
|
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
"""Support for accessing `GRUAN <https://www.gruan.org/>`_ data over FTP."""
|
|
2
|
+
|
|
3
|
+
import datetime
|
|
4
|
+
import ftplib
|
|
5
|
+
import functools
|
|
6
|
+
import os
|
|
7
|
+
import tempfile
|
|
8
|
+
from concurrent import futures
|
|
9
|
+
|
|
10
|
+
import xarray as xr
|
|
11
|
+
|
|
12
|
+
from pycontrails.core import cache
|
|
13
|
+
|
|
14
|
+
#: GRUAN FTP server address
|
|
15
|
+
FTP_SERVER = "ftp.ncdc.noaa.gov"
|
|
16
|
+
|
|
17
|
+
#: Base path for GRUAN data on the FTP server
|
|
18
|
+
FTP_BASE_PATH = "/pub/data/gruan/processing/level2"
|
|
19
|
+
|
|
20
|
+
#: All available GRUAN products and sites on the FTP server as of 2025-10
|
|
21
|
+
#: This is simply the hardcoded output of :func:`available_sites` at that time to
|
|
22
|
+
#: avoid a lookup that changes infrequently.
|
|
23
|
+
AVAILABLE_PRODUCTS_TO_SITES = {
|
|
24
|
+
"RS-11G-GDP.1": ["SYO", "TAT", "NYA", "LIN"],
|
|
25
|
+
"RS41-EDT.1": ["LIN", "POT", "SNG"],
|
|
26
|
+
"RS92-GDP.1": ["BOU", "CAB", "LIN", "PAY", "POT", "SOD", "TAT"],
|
|
27
|
+
"RS92-GDP.2": [
|
|
28
|
+
"BAR",
|
|
29
|
+
"BEL",
|
|
30
|
+
"BOU",
|
|
31
|
+
"CAB",
|
|
32
|
+
"DAR",
|
|
33
|
+
"GRA",
|
|
34
|
+
"LAU",
|
|
35
|
+
"LIN",
|
|
36
|
+
"MAN",
|
|
37
|
+
"NAU",
|
|
38
|
+
"NYA",
|
|
39
|
+
"PAY",
|
|
40
|
+
"POT",
|
|
41
|
+
"REU",
|
|
42
|
+
"SGP",
|
|
43
|
+
"SOD",
|
|
44
|
+
"TAT",
|
|
45
|
+
"TEN",
|
|
46
|
+
"GVN",
|
|
47
|
+
],
|
|
48
|
+
"RS92-PROFILE-BETA.2": ["BOU", "CAB", "LIN", "POT", "SOD", "TAT"],
|
|
49
|
+
"RS92-PROFILE-BETA.3": ["BOU", "CAB", "LIN", "POT", "SOD", "TAT"],
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def extract_gruan_time(filename: str) -> tuple[datetime.datetime, int]:
|
|
54
|
+
"""Extract launch time and revision number from a GRUAN filename.
|
|
55
|
+
|
|
56
|
+
Parameters
|
|
57
|
+
----------
|
|
58
|
+
filename : str
|
|
59
|
+
GRUAN filename, e.g. "LIN-RS-01_2_RS92-GDP_002_20210125T132400_1-000-001.nc"
|
|
60
|
+
|
|
61
|
+
Returns
|
|
62
|
+
-------
|
|
63
|
+
tuple[datetime.datetime, int]
|
|
64
|
+
Launch time as a datetime object and revision number as an integer.
|
|
65
|
+
"""
|
|
66
|
+
parts = filename.split("_")
|
|
67
|
+
if len(parts) != 6:
|
|
68
|
+
raise ValueError(f"Unexpected filename format: {filename}")
|
|
69
|
+
time_part = parts[4]
|
|
70
|
+
try:
|
|
71
|
+
time = datetime.datetime.strptime(time_part, "%Y%m%dT%H%M%S")
|
|
72
|
+
except ValueError as e:
|
|
73
|
+
raise ValueError(f"Unexpected time segment: {time_part}") from e
|
|
74
|
+
|
|
75
|
+
revision_part = parts[5].removesuffix(".nc")
|
|
76
|
+
if not revision_part[-3:].isdigit():
|
|
77
|
+
raise ValueError(f"Unexpected revision segment: {revision_part}")
|
|
78
|
+
revision = int(revision_part[-3:])
|
|
79
|
+
|
|
80
|
+
return time, revision
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def _fetch_product_tree(prod: str) -> dict[str, list[str]]:
|
|
84
|
+
result = {}
|
|
85
|
+
with ftplib.FTP(FTP_SERVER) as ftp:
|
|
86
|
+
ftp.login()
|
|
87
|
+
prod_path = f"{FTP_BASE_PATH}/{prod}"
|
|
88
|
+
versions = [v.split("/")[-1] for v in ftp.nlst(prod_path)]
|
|
89
|
+
|
|
90
|
+
for v in versions:
|
|
91
|
+
version_path = f"{prod_path}/{v}"
|
|
92
|
+
sites = [s.split("/")[-1] for s in ftp.nlst(version_path)]
|
|
93
|
+
|
|
94
|
+
key = f"{prod}.{int(v.split('-')[-1])}"
|
|
95
|
+
result[key] = sites
|
|
96
|
+
return result
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@functools.cache
|
|
100
|
+
def available_sites() -> dict[str, list[str]]:
|
|
101
|
+
"""Get a list of available GRUAN sites for each supported product.
|
|
102
|
+
|
|
103
|
+
The :attr:`GRUAN.AVAILABLE` is a hardcoded snapshot of this data. The data returned
|
|
104
|
+
by this function does not change frequently, so it is cached for efficiency.
|
|
105
|
+
|
|
106
|
+
Returns
|
|
107
|
+
-------
|
|
108
|
+
dict[str, list[str]]
|
|
109
|
+
Mapping of product names to lists of available site identifiers.
|
|
110
|
+
"""
|
|
111
|
+
with ftplib.FTP(FTP_SERVER) as ftp:
|
|
112
|
+
ftp.login()
|
|
113
|
+
files = [p.split("/")[-1] for p in ftp.nlst(FTP_BASE_PATH)]
|
|
114
|
+
products = [p for p in files if "." not in p] # crude filter to exclude non-directories
|
|
115
|
+
|
|
116
|
+
# Compute each product tree in separate thread to speed up retrieval
|
|
117
|
+
# The FTP server only allows up to 5 connections from the same client
|
|
118
|
+
out = {}
|
|
119
|
+
with futures.ThreadPoolExecutor(max_workers=min(len(products), 5)) as tpe:
|
|
120
|
+
result = tpe.map(_fetch_product_tree, products)
|
|
121
|
+
for r in result:
|
|
122
|
+
out.update(r)
|
|
123
|
+
|
|
124
|
+
return out
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class GRUAN:
|
|
128
|
+
"""Access `GRUAN <https://www.gruan.org/>`_ data over anonymous FTP.
|
|
129
|
+
|
|
130
|
+
GRUAN is the Global Climate Observing System Reference Upper-Air Network. It provides
|
|
131
|
+
high-quality measurements of atmospheric variables from ground to stratosphere
|
|
132
|
+
through a global network of radiosonde stations.
|
|
133
|
+
|
|
134
|
+
.. versionadded:: 0.59.0
|
|
135
|
+
|
|
136
|
+
Parameters
|
|
137
|
+
----------
|
|
138
|
+
product : str
|
|
139
|
+
GRUAN data product. See :attr:`AVAILABLE` for available products. These currently
|
|
140
|
+
include:
|
|
141
|
+
- ``RS92-GDP.2``
|
|
142
|
+
- ``RS92-GDP.1``
|
|
143
|
+
- ``RS92-PROFILE-BETA.2``
|
|
144
|
+
- ``RS92-PROFILE-BETA.3``
|
|
145
|
+
- ``RS41-EDT.1``
|
|
146
|
+
- ``RS-11G-GDP.1``
|
|
147
|
+
site : str
|
|
148
|
+
GRUAN station identifier. See :attr:`AVAILABLE` for available sites for each product.
|
|
149
|
+
cachestore : cache.CacheStore | None, optional
|
|
150
|
+
Cache store to use for downloaded files. If not provided, a disk cache store
|
|
151
|
+
will be created in the user cache directory under ``gruan/``. Set to ``None``
|
|
152
|
+
to disable caching.
|
|
153
|
+
|
|
154
|
+
Notes
|
|
155
|
+
-----
|
|
156
|
+
The FTP files have the following hierarchy::
|
|
157
|
+
|
|
158
|
+
/pub/data/gruan/processing/level2/
|
|
159
|
+
{product-root}/
|
|
160
|
+
version-{NNN}/
|
|
161
|
+
{SITE}/
|
|
162
|
+
{YYYY}/
|
|
163
|
+
<filename>.nc
|
|
164
|
+
|
|
165
|
+
- {product-root} is the product name without the trailing version integer (e.g. ``RS92-GDP``)
|
|
166
|
+
- version-{NNN} zero-pads to three digits (suffix ``.2`` -> ``version-002``)
|
|
167
|
+
- {SITE} is the station code (e.g. ``LIN``)
|
|
168
|
+
- {YYYY} is launch year
|
|
169
|
+
- Filenames encode launch time and revision (parsed by :func:`extract_gruan_time`)
|
|
170
|
+
|
|
171
|
+
Discovery helpers methods:
|
|
172
|
+
|
|
173
|
+
- :attr:`AVAILABLE` or :func:`available_sites` -> products and sites
|
|
174
|
+
- :meth:`years` -> list available years for (product, site)
|
|
175
|
+
- :meth:`list_files` -> list available NetCDF files for the given year
|
|
176
|
+
- :meth:`get` -> download and open a single NetCDF file as an :class:`xarray.Dataset`
|
|
177
|
+
|
|
178
|
+
Typical workflow:
|
|
179
|
+
|
|
180
|
+
1. Inspect :attr:`AVAILABLE` (fast) or call :func:`available_sites` (live)
|
|
181
|
+
2. Instantiate ``GRUAN(product, site)``
|
|
182
|
+
3. Call ``years()``
|
|
183
|
+
4. Call ``list_files(year)``
|
|
184
|
+
5. Call ``get(filename)`` for an ``xarray.Dataset``
|
|
185
|
+
|
|
186
|
+
"""
|
|
187
|
+
|
|
188
|
+
# Convenience access to available sites
|
|
189
|
+
available_sites = staticmethod(available_sites)
|
|
190
|
+
AVAILABLE = AVAILABLE_PRODUCTS_TO_SITES
|
|
191
|
+
|
|
192
|
+
__slots__ = ("_ftp", "cachestore", "product", "site")
|
|
193
|
+
|
|
194
|
+
__marker = object()
|
|
195
|
+
|
|
196
|
+
def __init__(
|
|
197
|
+
self,
|
|
198
|
+
product: str,
|
|
199
|
+
site: str,
|
|
200
|
+
cachestore: cache.CacheStore | None = __marker, # type: ignore[assignment]
|
|
201
|
+
) -> None:
|
|
202
|
+
known = AVAILABLE_PRODUCTS_TO_SITES
|
|
203
|
+
|
|
204
|
+
if product not in known:
|
|
205
|
+
known = available_sites() # perhaps AVAILABLE_PRODUCTS_TO_SITES is outdated
|
|
206
|
+
if product not in known:
|
|
207
|
+
raise ValueError(f"Unknown GRUAN product: {product}. Known products: {list(known)}")
|
|
208
|
+
self.product = product
|
|
209
|
+
|
|
210
|
+
if site not in known[product]:
|
|
211
|
+
known = available_sites() # perhaps AVAILABLE_PRODUCTS_TO_SITES is outdated
|
|
212
|
+
if site not in known[product]:
|
|
213
|
+
raise ValueError(
|
|
214
|
+
f"Unknown GRUAN site '{site}' for product '{product}'. "
|
|
215
|
+
f"Known sites: {known[product]}"
|
|
216
|
+
)
|
|
217
|
+
self.site = site
|
|
218
|
+
|
|
219
|
+
if cachestore is self.__marker:
|
|
220
|
+
cache_root = cache._get_user_cache_dir()
|
|
221
|
+
cache_dir = f"{cache_root}/gruan"
|
|
222
|
+
cachestore = cache.DiskCacheStore(cache_dir=cache_dir)
|
|
223
|
+
self.cachestore = cachestore
|
|
224
|
+
|
|
225
|
+
self._ftp: ftplib.FTP | None = None
|
|
226
|
+
|
|
227
|
+
def __repr__(self) -> str:
|
|
228
|
+
return f"GRUAN(product='{self.product}', site='{self.site}')"
|
|
229
|
+
|
|
230
|
+
def _connect(self) -> ftplib.FTP:
|
|
231
|
+
"""Connect to the GRUAN FTP server."""
|
|
232
|
+
if self._ftp is None or self._ftp.sock is None:
|
|
233
|
+
self._ftp = ftplib.FTP(FTP_SERVER)
|
|
234
|
+
self._ftp.login()
|
|
235
|
+
return self._ftp
|
|
236
|
+
|
|
237
|
+
try:
|
|
238
|
+
self._ftp.pwd() # check if connection is still alive
|
|
239
|
+
except (*ftplib.all_errors, ConnectionError): # type: ignore[misc]
|
|
240
|
+
# If we encounter any error, reset the connection and retry
|
|
241
|
+
self._ftp = None
|
|
242
|
+
return self._connect()
|
|
243
|
+
return self._ftp
|
|
244
|
+
|
|
245
|
+
@property
|
|
246
|
+
def base_path_product(self) -> str:
|
|
247
|
+
"""Get the base path for GRUAN data product on the FTP server."""
|
|
248
|
+
product, version = self.product.rsplit(".")
|
|
249
|
+
return f"/pub/data/gruan/processing/level2/{product}/version-{version.zfill(3)}"
|
|
250
|
+
|
|
251
|
+
@property
|
|
252
|
+
def base_path_site(self) -> str:
|
|
253
|
+
"""Get the base path for GRUAN data site on the FTP server."""
|
|
254
|
+
return f"{self.base_path_product}/{self.site}"
|
|
255
|
+
|
|
256
|
+
def years(self) -> list[int]:
|
|
257
|
+
"""Get a list of available years for the selected product and site."""
|
|
258
|
+
ftp = self._connect()
|
|
259
|
+
ftp.cwd(self.base_path_site)
|
|
260
|
+
years = ftp.nlst()
|
|
261
|
+
return sorted(int(year) for year in years)
|
|
262
|
+
|
|
263
|
+
def list_files(self, year: int | None = None) -> list[str]:
|
|
264
|
+
"""List available files for a given year.
|
|
265
|
+
|
|
266
|
+
Parameters
|
|
267
|
+
----------
|
|
268
|
+
year : int | None, optional
|
|
269
|
+
Year to list files for. If ``None``, list files for all available years. The later
|
|
270
|
+
may be time-consuming.
|
|
271
|
+
|
|
272
|
+
Returns
|
|
273
|
+
-------
|
|
274
|
+
list[str]
|
|
275
|
+
List of available GRUAN filenames for the specified year.
|
|
276
|
+
"""
|
|
277
|
+
if year is None:
|
|
278
|
+
years = self.years()
|
|
279
|
+
return sorted(file for y in years for file in self.list_files(y))
|
|
280
|
+
|
|
281
|
+
path = f"{self.base_path_site}/{year}"
|
|
282
|
+
|
|
283
|
+
ftp = self._connect()
|
|
284
|
+
try:
|
|
285
|
+
ftp.cwd(path)
|
|
286
|
+
except ftplib.error_perm as e:
|
|
287
|
+
available = self.years()
|
|
288
|
+
if year not in available:
|
|
289
|
+
msg = f"No data available for year {year}. Available years are: {available}"
|
|
290
|
+
raise ValueError(msg) from e
|
|
291
|
+
raise
|
|
292
|
+
return sorted(ftp.nlst())
|
|
293
|
+
|
|
294
|
+
def get(self, filename: str) -> xr.Dataset:
|
|
295
|
+
"""Download a GRUAN dataset by filename.
|
|
296
|
+
|
|
297
|
+
Parameters
|
|
298
|
+
----------
|
|
299
|
+
filename : str
|
|
300
|
+
GRUAN filename to download, e.g. "LIN-RS-01_2_RS92-GDP_002_20210125T132400_1-000-001.nc"
|
|
301
|
+
|
|
302
|
+
Returns
|
|
303
|
+
-------
|
|
304
|
+
xr.Dataset
|
|
305
|
+
The GRUAN dataset retrieved from the FTP server. If caching is enabled,
|
|
306
|
+
the file is downloaded to the cache store and loaded from there on subsequent calls.
|
|
307
|
+
"""
|
|
308
|
+
if self.cachestore is None:
|
|
309
|
+
return self._get_no_cache(filename)
|
|
310
|
+
return self._get_with_cache(filename)
|
|
311
|
+
|
|
312
|
+
def _get_no_cache(self, filename: str) -> xr.Dataset:
|
|
313
|
+
t, _ = extract_gruan_time(filename)
|
|
314
|
+
path = f"{self.base_path_site}/{t.year}/{filename}"
|
|
315
|
+
|
|
316
|
+
ftp = self._connect()
|
|
317
|
+
|
|
318
|
+
try:
|
|
319
|
+
# On windows, NamedTemporaryFile cannot be reopened while still open.
|
|
320
|
+
# After python 3.11 support is dropped, we can use delete_on_close=False
|
|
321
|
+
# in NamedTemporaryFile to streamline this.
|
|
322
|
+
with tempfile.NamedTemporaryFile(delete=False) as tmp:
|
|
323
|
+
ftp.retrbinary(f"RETR {path}", tmp.write)
|
|
324
|
+
return xr.load_dataset(tmp.name)
|
|
325
|
+
finally:
|
|
326
|
+
os.remove(tmp.name)
|
|
327
|
+
|
|
328
|
+
def _get_with_cache(self, filename: str) -> xr.Dataset:
|
|
329
|
+
if self.cachestore is None:
|
|
330
|
+
raise ValueError("Cachestore is not configured.")
|
|
331
|
+
|
|
332
|
+
lpath = self.cachestore.path(filename)
|
|
333
|
+
if self.cachestore.exists(lpath):
|
|
334
|
+
return xr.open_dataset(lpath)
|
|
335
|
+
|
|
336
|
+
t, _ = extract_gruan_time(filename)
|
|
337
|
+
path = f"{self.base_path_site}/{t.year}/{filename}"
|
|
338
|
+
|
|
339
|
+
ftp = self._connect()
|
|
340
|
+
with open(lpath, "wb") as f:
|
|
341
|
+
ftp.retrbinary(f"RETR {path}", f.write)
|
|
342
|
+
|
|
343
|
+
return xr.open_dataset(lpath)
|
|
@@ -372,6 +372,8 @@ class Himawari:
|
|
|
372
372
|
This interface requires the ``s3fs`` package to download data from the
|
|
373
373
|
`AWS Public Dataset <https://registry.opendata.aws/himawari/>`_.
|
|
374
374
|
|
|
375
|
+
.. versionadded:: 0.57.0
|
|
376
|
+
|
|
375
377
|
Parameters
|
|
376
378
|
----------
|
|
377
379
|
region : HimawariRegion | str, optional
|
|
@@ -395,6 +397,8 @@ class Himawari:
|
|
|
395
397
|
HimawariRegion
|
|
396
398
|
"""
|
|
397
399
|
|
|
400
|
+
__slots__ = ("bands", "bucket", "cachestore", "fs", "region")
|
|
401
|
+
|
|
398
402
|
__marker = object()
|
|
399
403
|
|
|
400
404
|
def __init__(
|
pycontrails/physics/thermo.py
CHANGED
|
@@ -84,7 +84,7 @@ def p_vapor(q: ArrayScalarLike, p: ArrayScalarLike) -> ArrayScalarLike:
|
|
|
84
84
|
ArrayScalarTypeVar
|
|
85
85
|
Vapor pressure, [:math:`Pa`]
|
|
86
86
|
"""
|
|
87
|
-
return q * p
|
|
87
|
+
return q * p / constants.epsilon
|
|
88
88
|
|
|
89
89
|
|
|
90
90
|
def water_vapor_partial_pressure_along_mixing_line(
|
|
@@ -399,7 +399,7 @@ def rh(q: ArrayScalarLike, T: ArrayScalarLike, p: ArrayScalarLike) -> ArrayScala
|
|
|
399
399
|
ArrayScalarLike
|
|
400
400
|
Relative Humidity, :math:`[0 - 1]`
|
|
401
401
|
"""
|
|
402
|
-
return (q * p
|
|
402
|
+
return (q * p) / (constants.epsilon * e_sat_liquid(T))
|
|
403
403
|
|
|
404
404
|
|
|
405
405
|
def rhi(q: ArrayScalarLike, T: ArrayScalarLike, p: ArrayScalarLike) -> ArrayScalarLike:
|
|
@@ -419,7 +419,7 @@ def rhi(q: ArrayScalarLike, T: ArrayScalarLike, p: ArrayScalarLike) -> ArrayScal
|
|
|
419
419
|
ArrayScalarLike
|
|
420
420
|
Relative Humidity over ice, :math:`[0 - 1]`
|
|
421
421
|
"""
|
|
422
|
-
return (q * p
|
|
422
|
+
return (q * p) / (constants.epsilon * e_sat_ice(T))
|
|
423
423
|
|
|
424
424
|
|
|
425
425
|
# --------------
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pycontrails
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.59.0
|
|
4
4
|
Summary: Python library for modeling aviation climate impacts
|
|
5
5
|
Author-email: "Contrails.org" <py@contrails.org>
|
|
6
6
|
License-Expression: Apache-2.0
|
|
@@ -35,7 +35,6 @@ Requires-Dist: xarray>=2022.3
|
|
|
35
35
|
Provides-Extra: complete
|
|
36
36
|
Requires-Dist: pycontrails[ecmwf,gcp,gfs,jupyter,pyproj,sat,vis,zarr]; extra == "complete"
|
|
37
37
|
Provides-Extra: dev
|
|
38
|
-
Requires-Dist: fastparquet>=0.8; extra == "dev"
|
|
39
38
|
Requires-Dist: ipdb>=0.13; extra == "dev"
|
|
40
39
|
Requires-Dist: memory_profiler; extra == "dev"
|
|
41
40
|
Requires-Dist: mypy>=1.8; extra == "dev"
|
|
@@ -1,15 +1,15 @@
|
|
|
1
|
-
pycontrails-0.
|
|
2
|
-
pycontrails-0.
|
|
3
|
-
pycontrails-0.
|
|
4
|
-
pycontrails-0.
|
|
5
|
-
pycontrails-0.
|
|
6
|
-
pycontrails-0.
|
|
7
|
-
pycontrails/_version.py,sha256=
|
|
1
|
+
pycontrails-0.59.0.dist-info/RECORD,,
|
|
2
|
+
pycontrails-0.59.0.dist-info/WHEEL,sha256=mer8vOuI-KlBtJIdzgyZALHdls6RZGRaE6RPt7dfKrk,138
|
|
3
|
+
pycontrails-0.59.0.dist-info/top_level.txt,sha256=Z8J1R_AiBAyCVjNw6jYLdrA68PrQqTg0t3_Yek_IZ0Q,29
|
|
4
|
+
pycontrails-0.59.0.dist-info/METADATA,sha256=s3R-ww4kBEE-scE3ZTAxgvdndp--LPpmF1Bgoa9dofc,9081
|
|
5
|
+
pycontrails-0.59.0.dist-info/licenses/LICENSE,sha256=gJ-h7SFFD1mCfR6a7HILvEtodDT6Iig8bLXdgqR6ucA,10175
|
|
6
|
+
pycontrails-0.59.0.dist-info/licenses/NOTICE,sha256=fiBPdjYibMpDzf8hqcn7TvAQ-yeK10q_Nqq24DnskYg,1962
|
|
7
|
+
pycontrails/_version.py,sha256=4JDTQzRvD4h--EgLI4W50oyfLqa3M8rQJJovGr52AZA,714
|
|
8
8
|
pycontrails/__init__.py,sha256=9ypSB2fKZlKghTvSrjWo6OHm5qfASwiTIvlMew3Olu4,2037
|
|
9
9
|
pycontrails/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
10
|
pycontrails/core/vector.py,sha256=hms3hea2Y86LOuZFfOqzGZToqpOVksgXikM_S5w355w,73498
|
|
11
11
|
pycontrails/core/models.py,sha256=3mDTqp1V5aae9akuYwbMGIUEkESKSYTjZeyu2IiMW7s,43915
|
|
12
|
-
pycontrails/core/interpolation.py,sha256=
|
|
12
|
+
pycontrails/core/interpolation.py,sha256=vttZiW78RnHmzLv2lmflwR9po3ghRxwT7MtAj49WvUY,24836
|
|
13
13
|
pycontrails/core/fleet.py,sha256=a_vVwAjMbjkhszg7ejP3V0yly_wJ9Va_OQEATG-9UHw,16572
|
|
14
14
|
pycontrails/core/flight.py,sha256=dmqO1PxADMHIcK9U8XSnuXbP59ftQgeKvdI2xGP04ig,81460
|
|
15
15
|
pycontrails/core/fuel.py,sha256=kJZ3P1lPm1L6rdPREM55XQ-VfJ_pt35cP4sO2Nnvmjs,4332
|
|
@@ -17,18 +17,19 @@ pycontrails/core/polygon.py,sha256=kXYwj1Xy-mo8GEWXFAO_OJEtZbGx11DE_sZw6iyNvN4,1
|
|
|
17
17
|
pycontrails/core/cache.py,sha256=JQIy1sQf0Vil7wlEk9ZIvVacnOD4wM3X8-UkMFjR2wQ,28177
|
|
18
18
|
pycontrails/core/__init__.py,sha256=p0O09HxdeXU0X5Z3zrHMlTfXa92YumT3fJ8wJBI5ido,856
|
|
19
19
|
pycontrails/core/flightplan.py,sha256=0mvA3IO19Sap-7gwpmEIV35_mg6ChvajwhurvjZZt_U,7521
|
|
20
|
-
pycontrails/core/met.py,sha256=
|
|
20
|
+
pycontrails/core/met.py,sha256=C_2NoRovK6Irz20XyDDLNYRfOwfHcOFtI9hE-IeuvHA,104859
|
|
21
21
|
pycontrails/core/aircraft_performance.py,sha256=CPIgIi5nUuCHiNVLAvZcWECRfakmMd-wUWd3lMA6oGM,28204
|
|
22
22
|
pycontrails/core/airports.py,sha256=CzZrgJNZ7wtNv8vg9sJczMhFov7k0gmrGR4tRKCH8i8,6782
|
|
23
23
|
pycontrails/core/met_var.py,sha256=g69vqbxpJeXEQU8vrrcoUR1PX3zCo2-k3au1Lv2TiIw,12027
|
|
24
|
-
pycontrails/core/rgi_cython.cpython-312-darwin.so,sha256=
|
|
24
|
+
pycontrails/core/rgi_cython.cpython-312-darwin.so,sha256=6p7q-UbhSrtIggDRiB8CphGikkElMQSrBWwCu4S3xOo,330104
|
|
25
25
|
pycontrails/core/coordinates.py,sha256=0ySsHtqTon7GMbuwmmxMbI92j3ueMteJZh4xxNm5zto,5391
|
|
26
|
-
pycontrails/datalib/goes.py,sha256=
|
|
26
|
+
pycontrails/datalib/goes.py,sha256=QFy7peNrfQcmujEcmUzK8zR1xBxli2vHxqak84PtYTc,27632
|
|
27
27
|
pycontrails/datalib/landsat.py,sha256=6ylDkAjnyX7b4ZbHn4bprO8HB8ADPFyMkwWehIs8FLg,20915
|
|
28
28
|
pycontrails/datalib/geo_utils.py,sha256=w6VYhJQeMpBXaBclqANv4Nn0yqPIxlQr6GTUpjArTj0,9070
|
|
29
29
|
pycontrails/datalib/__init__.py,sha256=hW9NWdFPC3y_2vHMteQ7GgQdop3917MkDaf5ZhU2RBY,369
|
|
30
|
+
pycontrails/datalib/gruan.py,sha256=n-MI2iCAi3FdlbuWJKSYJFeKCtiRn3e-o4pRrUOC_dU,11673
|
|
30
31
|
pycontrails/datalib/sentinel.py,sha256=ed1l1avq8lBvQinY_vNSsWRcpqxUdAPY61AGyPcLawo,23532
|
|
31
|
-
pycontrails/datalib/himawari/himawari.py,sha256=
|
|
32
|
+
pycontrails/datalib/himawari/himawari.py,sha256=GueKQsytmuy4ojaECnRq-PZKNfIcJLvftDn56fqwMLQ,23424
|
|
32
33
|
pycontrails/datalib/himawari/__init__.py,sha256=SWupVbeuyK07IPDCgiNjN6hoLB7hlceabJ3fixhDkl0,619
|
|
33
34
|
pycontrails/datalib/himawari/header_struct.py,sha256=WbPkNBNUVm8tGKU8wpj4rldY17g5MHQ_OfbWicZSokc,9975
|
|
34
35
|
pycontrails/datalib/_met_utils/metsource.py,sha256=mlKcRko5ZKuYK5uwWn6AAgUSJLMQAYq1nFqskVMGgYo,23999
|
|
@@ -40,7 +41,7 @@ pycontrails/datalib/ecmwf/variables.py,sha256=lU3BNe265XVhCXvdMwZqfkWQwtsetZxVRL
|
|
|
40
41
|
pycontrails/datalib/ecmwf/hres_model_level.py,sha256=CcxMKiFJyLvM9njmBVywAXJxyWE7atsgHXBubKJQqHM,17779
|
|
41
42
|
pycontrails/datalib/ecmwf/__init__.py,sha256=wdfhplEaW2UKTItIoshTtVEjbPyfDYoprTJNxbKZuvA,2021
|
|
42
43
|
pycontrails/datalib/ecmwf/common.py,sha256=axOxvdrey9YD34uk0Ocav08MxKvC2uVaiwvyQgFZMEw,3970
|
|
43
|
-
pycontrails/datalib/ecmwf/model_levels.py,sha256=
|
|
44
|
+
pycontrails/datalib/ecmwf/model_levels.py,sha256=xdH2B5KvzAIk2_OIOgZXFIZ6ndR6MxxqZsvstO0cNmg,16996
|
|
44
45
|
pycontrails/datalib/ecmwf/ifs.py,sha256=_1UarorPp9VlgFZc-NnZy8YnfEqBdp7GV1A-ye6JqS8,10733
|
|
45
46
|
pycontrails/datalib/ecmwf/static/model_level_dataframe_v20240418.csv,sha256=PmvGLRzn6uuCKSwiasSuVcehvvmSaqP7cnLuN6hhCQQ,9788
|
|
46
47
|
pycontrails/datalib/gfs/gfs.py,sha256=VqS0MRLawgzkBDpjDUYoswXByIy6XUqA9XP7lM1ueBk,22238
|
|
@@ -100,7 +101,7 @@ pycontrails/models/cocip/cocip_params.py,sha256=BWmTt6yE4m-LM7lyCtj05FK3wVvU9n7i
|
|
|
100
101
|
pycontrails/models/cocip/wake_vortex.py,sha256=F5S8n4eBrBM-7qNcVUtX3IrXD7Kt9pWnrKj6UK-HGeA,14555
|
|
101
102
|
pycontrails/models/cocip/cocip_uncertainty.py,sha256=TZ85xAbDc5zRgQKP7wb3AfHoUIvkfHycuX86dwZCqwM,12257
|
|
102
103
|
pycontrails/models/cocip/radiative_heating.py,sha256=1U4SQWwogtyQ2u6J996kAHP0OfpZ3hH2_x4Cyt3Cy8U,18984
|
|
103
|
-
pycontrails/models/cocip/contrail_properties.py,sha256=
|
|
104
|
+
pycontrails/models/cocip/contrail_properties.py,sha256=V1ACeeYaD_a_piqarEnjzxOvYh7cP-1c-xaWv5Kgc8U,55703
|
|
104
105
|
pycontrails/models/cocip/unterstrasser_wake_vortex.py,sha256=bIRS-Z4MRMdkYtth2RaDe5h1ZN0HvCE_Sw96PXQEHKQ,18931
|
|
105
106
|
pycontrails/models/ps_model/__init__.py,sha256=Fuum5Rq8ya8qkvbeq2wh6NDo-42RCRnK1Y-2syYy0Ck,553
|
|
106
107
|
pycontrails/models/ps_model/ps_model.py,sha256=fgFekJpGuAu73KvpfLhlAbIwR7JJGwQpLILWmrONywc,31925
|
|
@@ -116,7 +117,7 @@ pycontrails/physics/geo.py,sha256=ITK23l1A2lzjNPTFC8ZKyQH59I5Cy_TvuvM_gbALo94,36
|
|
|
116
117
|
pycontrails/physics/units.py,sha256=p-6PzFLpVCMpvmfrhXVh3Hs-nMJw9Y1x-hvgnL9Lo9c,12281
|
|
117
118
|
pycontrails/physics/constants.py,sha256=JHYL2IJY7del2BE_1QfKaEwtIwkbtyHvyxlm_JPHR90,3201
|
|
118
119
|
pycontrails/physics/__init__.py,sha256=_1eWbEy6evEWdfJCEkwDiSdpiDNzNWEPVqaPekHyhwU,44
|
|
119
|
-
pycontrails/physics/thermo.py,sha256=
|
|
120
|
+
pycontrails/physics/thermo.py,sha256=bKn1rlHHIoLvkrd42Vpox6pQVAu4OjonYSmxXXFvPug,15338
|
|
120
121
|
pycontrails/physics/jet.py,sha256=Je1d3vgbBEaVIAL1WZ3C-4p2f9fy9dWOjP5vFVsGGh8,30358
|
|
121
122
|
pycontrails/physics/static/iata-cargo-load-factors-20250221.csv,sha256=ixsnQk1DyGxHMo0pDy4aOoQIwgOyrGfhMRPumEwPMBc,3841
|
|
122
123
|
pycontrails/physics/static/iata-passenger-load-factors-20250221.csv,sha256=Q2olRIqUpbOaavvM5ikG8m1v1YQAN3KLNHeFDPvM53Q,3835
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|