pycontrails 0.57.0__cp311-cp311-macosx_11_0_arm64.whl → 0.59.0__cp311-cp311-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (40) hide show
  1. pycontrails/_version.py +3 -3
  2. pycontrails/core/aircraft_performance.py +1 -1
  3. pycontrails/core/cache.py +2 -2
  4. pycontrails/core/fleet.py +2 -7
  5. pycontrails/core/flight.py +2 -7
  6. pycontrails/core/interpolation.py +45 -67
  7. pycontrails/core/met.py +62 -37
  8. pycontrails/core/polygon.py +3 -3
  9. pycontrails/core/rgi_cython.cpython-311-darwin.so +0 -0
  10. pycontrails/core/vector.py +3 -8
  11. pycontrails/datalib/_met_utils/metsource.py +4 -7
  12. pycontrails/datalib/ecmwf/common.py +2 -2
  13. pycontrails/datalib/ecmwf/hres.py +2 -2
  14. pycontrails/datalib/ecmwf/ifs.py +1 -1
  15. pycontrails/datalib/ecmwf/model_levels.py +1 -1
  16. pycontrails/datalib/gfs/gfs.py +1 -1
  17. pycontrails/datalib/goes.py +10 -3
  18. pycontrails/datalib/gruan.py +343 -0
  19. pycontrails/datalib/himawari/header_struct.py +1 -1
  20. pycontrails/datalib/himawari/himawari.py +24 -7
  21. pycontrails/datalib/leo_utils/sentinel_metadata.py +9 -9
  22. pycontrails/ext/synthetic_flight.py +2 -2
  23. pycontrails/models/cocip/cocip_uncertainty.py +1 -1
  24. pycontrails/models/cocip/contrail_properties.py +1 -1
  25. pycontrails/models/cocip/output_formats.py +1 -1
  26. pycontrails/models/cocipgrid/cocip_grid.py +3 -3
  27. pycontrails/models/dry_advection.py +1 -1
  28. pycontrails/models/extended_k15.py +4 -4
  29. pycontrails/models/humidity_scaling/humidity_scaling.py +2 -2
  30. pycontrails/models/ps_model/ps_grid.py +2 -2
  31. pycontrails/models/sac.py +1 -1
  32. pycontrails/models/tau_cirrus.py +1 -1
  33. pycontrails/physics/thermo.py +4 -4
  34. pycontrails/utils/iteration.py +1 -1
  35. {pycontrails-0.57.0.dist-info → pycontrails-0.59.0.dist-info}/METADATA +5 -6
  36. {pycontrails-0.57.0.dist-info → pycontrails-0.59.0.dist-info}/RECORD +40 -39
  37. {pycontrails-0.57.0.dist-info → pycontrails-0.59.0.dist-info}/WHEEL +0 -0
  38. {pycontrails-0.57.0.dist-info → pycontrails-0.59.0.dist-info}/licenses/LICENSE +0 -0
  39. {pycontrails-0.57.0.dist-info → pycontrails-0.59.0.dist-info}/licenses/NOTICE +0 -0
  40. {pycontrails-0.57.0.dist-info → pycontrails-0.59.0.dist-info}/top_level.txt +0 -0
@@ -708,7 +708,7 @@ class HRES(ECMWFAPI):
708
708
 
709
709
  # set forecast time if it's not defined (this occurs when only the paths param is provided)
710
710
  if not hasattr(self, "forecast_time"):
711
- self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
711
+ self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist()
712
712
 
713
713
  # check that forecast_time is correct if defined
714
714
  # note the "time" coordinate here is the HRES forecast_time
@@ -723,7 +723,7 @@ class HRES(ECMWFAPI):
723
723
  # set timesteps if not defined
724
724
  # note that "time" is now the actual timestep coordinates
725
725
  if not self.timesteps:
726
- self.timesteps = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
726
+ self.timesteps = ds["time"].values.astype("datetime64[s]").tolist()
727
727
 
728
728
  self.cache_dataset(ds)
729
729
 
@@ -149,7 +149,7 @@ class IFS(metsource.MetDataSource):
149
149
  else:
150
150
  # set timesteps from dataset "time" coordinates
151
151
  # np.datetime64 doesn't covert to list[datetime] unless its unit is us
152
- self.timesteps = ds["time"].values.astype("datetime64[us]").tolist() # type: ignore[assignment]
152
+ self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
153
153
 
154
154
  # downselect hyam/hybm coefficients by the "lev" coordinate
155
155
  # (this is a 1-indexed verison of nhym)
@@ -139,9 +139,9 @@ def model_level_pressure(sp: xr.DataArray, model_levels: npt.ArrayLike) -> xr.Da
139
139
  [564.02437124, 560.81744834, 557.61052544, 554.40360254],
140
140
  [551.19667964, 547.98975674, 544.78283384, 541.57591094]]])
141
141
  Coordinates:
142
+ * model_level (model_level) int64 16B 80 100
142
143
  * longitude (longitude) float64 32B -180.0 -60.0 60.0 180.0
143
144
  * latitude (latitude) float64 32B -90.0 -30.0 30.0 90.0
144
- * model_level (model_level) int64 16B 80 100
145
145
 
146
146
  See Also
147
147
  --------
@@ -595,7 +595,7 @@ class GFSForecast(metsource.MetDataSource):
595
595
  else:
596
596
  # set timesteps from dataset "time" coordinates
597
597
  # np.datetime64 doesn't covert to list[datetime] unless its unit is us
598
- self.timesteps = ds["time"].values.astype("datetime64[us]").tolist() # type: ignore[assignment]
598
+ self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
599
599
 
600
600
  # if "level" is not in dims and
601
601
  # length of the requested pressure levels is 1
@@ -308,9 +308,14 @@ def gcs_goes_path(
308
308
  fs = fs or gcsfs.GCSFileSystem(token="anon")
309
309
  rpaths = fs.glob(rpath)
310
310
 
311
- out = [r for r in rpaths if _extract_band_from_rpath(r) in bands]
312
- if not out:
313
- raise RuntimeError(f"No data found for {time} in {region} for bands {bands}")
311
+ out = []
312
+ for r in rpaths:
313
+ if (band := _extract_band_from_rpath(r)) in bands:
314
+ out.append(r)
315
+ bands.remove(band)
316
+
317
+ if bands:
318
+ raise FileNotFoundError(f"No data found for {time} in {region} for bands {bands}")
314
319
  return out
315
320
 
316
321
 
@@ -427,6 +432,8 @@ class GOES:
427
432
 
428
433
  """
429
434
 
435
+ __slots__ = ("bands", "bucket", "cachestore", "fs", "region")
436
+
430
437
  __marker = object()
431
438
 
432
439
  def __init__(
@@ -0,0 +1,343 @@
1
+ """Support for accessing `GRUAN <https://www.gruan.org/>`_ data over FTP."""
2
+
3
+ import datetime
4
+ import ftplib
5
+ import functools
6
+ import os
7
+ import tempfile
8
+ from concurrent import futures
9
+
10
+ import xarray as xr
11
+
12
+ from pycontrails.core import cache
13
+
14
+ #: GRUAN FTP server address
15
+ FTP_SERVER = "ftp.ncdc.noaa.gov"
16
+
17
+ #: Base path for GRUAN data on the FTP server
18
+ FTP_BASE_PATH = "/pub/data/gruan/processing/level2"
19
+
20
+ #: All available GRUAN products and sites on the FTP server as of 2025-10
21
+ #: This is simply the hardcoded output of :func:`available_sites` at that time to
22
+ #: avoid a lookup that changes infrequently.
23
+ AVAILABLE_PRODUCTS_TO_SITES = {
24
+ "RS-11G-GDP.1": ["SYO", "TAT", "NYA", "LIN"],
25
+ "RS41-EDT.1": ["LIN", "POT", "SNG"],
26
+ "RS92-GDP.1": ["BOU", "CAB", "LIN", "PAY", "POT", "SOD", "TAT"],
27
+ "RS92-GDP.2": [
28
+ "BAR",
29
+ "BEL",
30
+ "BOU",
31
+ "CAB",
32
+ "DAR",
33
+ "GRA",
34
+ "LAU",
35
+ "LIN",
36
+ "MAN",
37
+ "NAU",
38
+ "NYA",
39
+ "PAY",
40
+ "POT",
41
+ "REU",
42
+ "SGP",
43
+ "SOD",
44
+ "TAT",
45
+ "TEN",
46
+ "GVN",
47
+ ],
48
+ "RS92-PROFILE-BETA.2": ["BOU", "CAB", "LIN", "POT", "SOD", "TAT"],
49
+ "RS92-PROFILE-BETA.3": ["BOU", "CAB", "LIN", "POT", "SOD", "TAT"],
50
+ }
51
+
52
+
53
+ def extract_gruan_time(filename: str) -> tuple[datetime.datetime, int]:
54
+ """Extract launch time and revision number from a GRUAN filename.
55
+
56
+ Parameters
57
+ ----------
58
+ filename : str
59
+ GRUAN filename, e.g. "LIN-RS-01_2_RS92-GDP_002_20210125T132400_1-000-001.nc"
60
+
61
+ Returns
62
+ -------
63
+ tuple[datetime.datetime, int]
64
+ Launch time as a datetime object and revision number as an integer.
65
+ """
66
+ parts = filename.split("_")
67
+ if len(parts) != 6:
68
+ raise ValueError(f"Unexpected filename format: {filename}")
69
+ time_part = parts[4]
70
+ try:
71
+ time = datetime.datetime.strptime(time_part, "%Y%m%dT%H%M%S")
72
+ except ValueError as e:
73
+ raise ValueError(f"Unexpected time segment: {time_part}") from e
74
+
75
+ revision_part = parts[5].removesuffix(".nc")
76
+ if not revision_part[-3:].isdigit():
77
+ raise ValueError(f"Unexpected revision segment: {revision_part}")
78
+ revision = int(revision_part[-3:])
79
+
80
+ return time, revision
81
+
82
+
83
+ def _fetch_product_tree(prod: str) -> dict[str, list[str]]:
84
+ result = {}
85
+ with ftplib.FTP(FTP_SERVER) as ftp:
86
+ ftp.login()
87
+ prod_path = f"{FTP_BASE_PATH}/{prod}"
88
+ versions = [v.split("/")[-1] for v in ftp.nlst(prod_path)]
89
+
90
+ for v in versions:
91
+ version_path = f"{prod_path}/{v}"
92
+ sites = [s.split("/")[-1] for s in ftp.nlst(version_path)]
93
+
94
+ key = f"{prod}.{int(v.split('-')[-1])}"
95
+ result[key] = sites
96
+ return result
97
+
98
+
99
+ @functools.cache
100
+ def available_sites() -> dict[str, list[str]]:
101
+ """Get a list of available GRUAN sites for each supported product.
102
+
103
+ The :attr:`GRUAN.AVAILABLE` is a hardcoded snapshot of this data. The data returned
104
+ by this function does not change frequently, so it is cached for efficiency.
105
+
106
+ Returns
107
+ -------
108
+ dict[str, list[str]]
109
+ Mapping of product names to lists of available site identifiers.
110
+ """
111
+ with ftplib.FTP(FTP_SERVER) as ftp:
112
+ ftp.login()
113
+ files = [p.split("/")[-1] for p in ftp.nlst(FTP_BASE_PATH)]
114
+ products = [p for p in files if "." not in p] # crude filter to exclude non-directories
115
+
116
+ # Compute each product tree in separate thread to speed up retrieval
117
+ # The FTP server only allows up to 5 connections from the same client
118
+ out = {}
119
+ with futures.ThreadPoolExecutor(max_workers=min(len(products), 5)) as tpe:
120
+ result = tpe.map(_fetch_product_tree, products)
121
+ for r in result:
122
+ out.update(r)
123
+
124
+ return out
125
+
126
+
127
+ class GRUAN:
128
+ """Access `GRUAN <https://www.gruan.org/>`_ data over anonymous FTP.
129
+
130
+ GRUAN is the Global Climate Observing System Reference Upper-Air Network. It provides
131
+ high-quality measurements of atmospheric variables from ground to stratosphere
132
+ through a global network of radiosonde stations.
133
+
134
+ .. versionadded:: 0.59.0
135
+
136
+ Parameters
137
+ ----------
138
+ product : str
139
+ GRUAN data product. See :attr:`AVAILABLE` for available products. These currently
140
+ include:
141
+ - ``RS92-GDP.2``
142
+ - ``RS92-GDP.1``
143
+ - ``RS92-PROFILE-BETA.2``
144
+ - ``RS92-PROFILE-BETA.3``
145
+ - ``RS41-EDT.1``
146
+ - ``RS-11G-GDP.1``
147
+ site : str
148
+ GRUAN station identifier. See :attr:`AVAILABLE` for available sites for each product.
149
+ cachestore : cache.CacheStore | None, optional
150
+ Cache store to use for downloaded files. If not provided, a disk cache store
151
+ will be created in the user cache directory under ``gruan/``. Set to ``None``
152
+ to disable caching.
153
+
154
+ Notes
155
+ -----
156
+ The FTP files have the following hierarchy::
157
+
158
+ /pub/data/gruan/processing/level2/
159
+ {product-root}/
160
+ version-{NNN}/
161
+ {SITE}/
162
+ {YYYY}/
163
+ <filename>.nc
164
+
165
+ - {product-root} is the product name without the trailing version integer (e.g. ``RS92-GDP``)
166
+ - version-{NNN} zero-pads to three digits (suffix ``.2`` -> ``version-002``)
167
+ - {SITE} is the station code (e.g. ``LIN``)
168
+ - {YYYY} is launch year
169
+ - Filenames encode launch time and revision (parsed by :func:`extract_gruan_time`)
170
+
171
+ Discovery helpers methods:
172
+
173
+ - :attr:`AVAILABLE` or :func:`available_sites` -> products and sites
174
+ - :meth:`years` -> list available years for (product, site)
175
+ - :meth:`list_files` -> list available NetCDF files for the given year
176
+ - :meth:`get` -> download and open a single NetCDF file as an :class:`xarray.Dataset`
177
+
178
+ Typical workflow:
179
+
180
+ 1. Inspect :attr:`AVAILABLE` (fast) or call :func:`available_sites` (live)
181
+ 2. Instantiate ``GRUAN(product, site)``
182
+ 3. Call ``years()``
183
+ 4. Call ``list_files(year)``
184
+ 5. Call ``get(filename)`` for an ``xarray.Dataset``
185
+
186
+ """
187
+
188
+ # Convenience access to available sites
189
+ available_sites = staticmethod(available_sites)
190
+ AVAILABLE = AVAILABLE_PRODUCTS_TO_SITES
191
+
192
+ __slots__ = ("_ftp", "cachestore", "product", "site")
193
+
194
+ __marker = object()
195
+
196
+ def __init__(
197
+ self,
198
+ product: str,
199
+ site: str,
200
+ cachestore: cache.CacheStore | None = __marker, # type: ignore[assignment]
201
+ ) -> None:
202
+ known = AVAILABLE_PRODUCTS_TO_SITES
203
+
204
+ if product not in known:
205
+ known = available_sites() # perhaps AVAILABLE_PRODUCTS_TO_SITES is outdated
206
+ if product not in known:
207
+ raise ValueError(f"Unknown GRUAN product: {product}. Known products: {list(known)}")
208
+ self.product = product
209
+
210
+ if site not in known[product]:
211
+ known = available_sites() # perhaps AVAILABLE_PRODUCTS_TO_SITES is outdated
212
+ if site not in known[product]:
213
+ raise ValueError(
214
+ f"Unknown GRUAN site '{site}' for product '{product}'. "
215
+ f"Known sites: {known[product]}"
216
+ )
217
+ self.site = site
218
+
219
+ if cachestore is self.__marker:
220
+ cache_root = cache._get_user_cache_dir()
221
+ cache_dir = f"{cache_root}/gruan"
222
+ cachestore = cache.DiskCacheStore(cache_dir=cache_dir)
223
+ self.cachestore = cachestore
224
+
225
+ self._ftp: ftplib.FTP | None = None
226
+
227
+ def __repr__(self) -> str:
228
+ return f"GRUAN(product='{self.product}', site='{self.site}')"
229
+
230
+ def _connect(self) -> ftplib.FTP:
231
+ """Connect to the GRUAN FTP server."""
232
+ if self._ftp is None or self._ftp.sock is None:
233
+ self._ftp = ftplib.FTP(FTP_SERVER)
234
+ self._ftp.login()
235
+ return self._ftp
236
+
237
+ try:
238
+ self._ftp.pwd() # check if connection is still alive
239
+ except (*ftplib.all_errors, ConnectionError): # type: ignore[misc]
240
+ # If we encounter any error, reset the connection and retry
241
+ self._ftp = None
242
+ return self._connect()
243
+ return self._ftp
244
+
245
+ @property
246
+ def base_path_product(self) -> str:
247
+ """Get the base path for GRUAN data product on the FTP server."""
248
+ product, version = self.product.rsplit(".")
249
+ return f"/pub/data/gruan/processing/level2/{product}/version-{version.zfill(3)}"
250
+
251
+ @property
252
+ def base_path_site(self) -> str:
253
+ """Get the base path for GRUAN data site on the FTP server."""
254
+ return f"{self.base_path_product}/{self.site}"
255
+
256
+ def years(self) -> list[int]:
257
+ """Get a list of available years for the selected product and site."""
258
+ ftp = self._connect()
259
+ ftp.cwd(self.base_path_site)
260
+ years = ftp.nlst()
261
+ return sorted(int(year) for year in years)
262
+
263
+ def list_files(self, year: int | None = None) -> list[str]:
264
+ """List available files for a given year.
265
+
266
+ Parameters
267
+ ----------
268
+ year : int | None, optional
269
+ Year to list files for. If ``None``, list files for all available years. The later
270
+ may be time-consuming.
271
+
272
+ Returns
273
+ -------
274
+ list[str]
275
+ List of available GRUAN filenames for the specified year.
276
+ """
277
+ if year is None:
278
+ years = self.years()
279
+ return sorted(file for y in years for file in self.list_files(y))
280
+
281
+ path = f"{self.base_path_site}/{year}"
282
+
283
+ ftp = self._connect()
284
+ try:
285
+ ftp.cwd(path)
286
+ except ftplib.error_perm as e:
287
+ available = self.years()
288
+ if year not in available:
289
+ msg = f"No data available for year {year}. Available years are: {available}"
290
+ raise ValueError(msg) from e
291
+ raise
292
+ return sorted(ftp.nlst())
293
+
294
+ def get(self, filename: str) -> xr.Dataset:
295
+ """Download a GRUAN dataset by filename.
296
+
297
+ Parameters
298
+ ----------
299
+ filename : str
300
+ GRUAN filename to download, e.g. "LIN-RS-01_2_RS92-GDP_002_20210125T132400_1-000-001.nc"
301
+
302
+ Returns
303
+ -------
304
+ xr.Dataset
305
+ The GRUAN dataset retrieved from the FTP server. If caching is enabled,
306
+ the file is downloaded to the cache store and loaded from there on subsequent calls.
307
+ """
308
+ if self.cachestore is None:
309
+ return self._get_no_cache(filename)
310
+ return self._get_with_cache(filename)
311
+
312
+ def _get_no_cache(self, filename: str) -> xr.Dataset:
313
+ t, _ = extract_gruan_time(filename)
314
+ path = f"{self.base_path_site}/{t.year}/{filename}"
315
+
316
+ ftp = self._connect()
317
+
318
+ try:
319
+ # On windows, NamedTemporaryFile cannot be reopened while still open.
320
+ # After python 3.11 support is dropped, we can use delete_on_close=False
321
+ # in NamedTemporaryFile to streamline this.
322
+ with tempfile.NamedTemporaryFile(delete=False) as tmp:
323
+ ftp.retrbinary(f"RETR {path}", tmp.write)
324
+ return xr.load_dataset(tmp.name)
325
+ finally:
326
+ os.remove(tmp.name)
327
+
328
+ def _get_with_cache(self, filename: str) -> xr.Dataset:
329
+ if self.cachestore is None:
330
+ raise ValueError("Cachestore is not configured.")
331
+
332
+ lpath = self.cachestore.path(filename)
333
+ if self.cachestore.exists(lpath):
334
+ return xr.open_dataset(lpath)
335
+
336
+ t, _ = extract_gruan_time(filename)
337
+ path = f"{self.base_path_site}/{t.year}/{filename}"
338
+
339
+ ftp = self._connect()
340
+ with open(lpath, "wb") as f:
341
+ ftp.retrbinary(f"RETR {path}", f.write)
342
+
343
+ return xr.open_dataset(lpath)
@@ -205,7 +205,7 @@ HEADER_STRUCT_SCHEMA: dict[int, _HeaderBlock] = {
205
205
  }
206
206
 
207
207
 
208
- def parse_himawari_header(content: bytes) -> dict:
208
+ def parse_himawari_header(content: bytes) -> dict[str, dict[str, Any]]:
209
209
  """Parse the Himawari header data.
210
210
 
211
211
  Skips variable-length fields and spares.
@@ -160,7 +160,10 @@ def _extract_band_from_rpath(rpath: str) -> str:
160
160
  return f"B{suffix[:2]}" # B??
161
161
 
162
162
 
163
- def _mask_invalid(data: npt.NDArray[np.uint16], calib_info: dict) -> npt.NDArray[np.float32]:
163
+ def _mask_invalid(
164
+ data: npt.NDArray[np.uint16],
165
+ calib_info: dict[str, Any],
166
+ ) -> npt.NDArray[np.float32]:
164
167
  """Mask invalid data."""
165
168
  error_pixel = calib_info["count_error_pixels"]
166
169
  outside_pixel = calib_info["count_outside_scan_area"]
@@ -218,7 +221,9 @@ def _counts_to_radiance(
218
221
  return counts * gain + const
219
222
 
220
223
 
221
- def _load_image_data(content: bytes, metadata: dict) -> npt.NDArray[np.float32]:
224
+ def _load_image_data(
225
+ content: bytes, metadata: dict[str, dict[str, Any]]
226
+ ) -> npt.NDArray[np.float32]:
222
227
  counts = _load_raw_counts(content, metadata)
223
228
 
224
229
  calib_info = metadata["calibration_information"]
@@ -230,7 +235,10 @@ def _load_image_data(content: bytes, metadata: dict) -> npt.NDArray[np.float32]:
230
235
  return _radiance_to_brightness_temperature(radiance, calib_info)
231
236
 
232
237
 
233
- def _ahi_fixed_grid(proj_info: dict, arr: np.ndarray) -> tuple[xr.DataArray, xr.DataArray]:
238
+ def _ahi_fixed_grid(
239
+ proj_info: dict[str, Any],
240
+ arr: np.ndarray,
241
+ ) -> tuple[xr.DataArray, xr.DataArray]:
234
242
  n_lines, n_columns = arr.shape
235
243
 
236
244
  i = np.arange(n_columns, dtype=np.float32)
@@ -277,7 +285,11 @@ def _himawari_proj4_string(proj_info: dict[str, Any]) -> str:
277
285
  return f"+proj=geos +h={h} +a={a} +b={b} +lon_0={lon} +sweep=x +units=m +no_defs"
278
286
 
279
287
 
280
- def _earth_disk_mask(proj_info: dict, x: xr.DataArray, y: xr.DataArray) -> npt.NDArray[np.bool_]:
288
+ def _earth_disk_mask(
289
+ proj_info: dict[str, Any],
290
+ x: xr.DataArray,
291
+ y: xr.DataArray,
292
+ ) -> npt.NDArray[np.bool_]:
281
293
  """Return a boolean mask where True indicates pixels over the Earth disk."""
282
294
  a = proj_info["equatorial_radius"] * 1000.0 # km -> m
283
295
  b = proj_info["polar_radius"] * 1000.0 # km -> m
@@ -301,7 +313,7 @@ def _earth_disk_mask(proj_info: dict, x: xr.DataArray, y: xr.DataArray) -> npt.N
301
313
  return discriminant >= 0.0
302
314
 
303
315
 
304
- def _parse_start_time(metadata: dict) -> datetime.datetime:
316
+ def _parse_start_time(metadata: dict[str, dict[str, Any]]) -> datetime.datetime:
305
317
  """Parse the start time from the metadata."""
306
318
  mjd_value = metadata["basic_information"]["obs_start_time"]
307
319
  mjd_epoch = datetime.datetime(1858, 11, 17)
@@ -355,9 +367,12 @@ def _parse_s3_raw_data(raw_data: list[bytes]) -> xr.DataArray:
355
367
 
356
368
 
357
369
  class Himawari:
358
- """Support for Himawari-8/9 satellite data accessed via AWS S3.
370
+ """Support for Himawari-8/9 satellite data access via AWS S3.
359
371
 
360
- This interface requires the ``s3fs`` package.
372
+ This interface requires the ``s3fs`` package to download data from the
373
+ `AWS Public Dataset <https://registry.opendata.aws/himawari/>`_.
374
+
375
+ .. versionadded:: 0.57.0
361
376
 
362
377
  Parameters
363
378
  ----------
@@ -382,6 +397,8 @@ class Himawari:
382
397
  HimawariRegion
383
398
  """
384
399
 
400
+ __slots__ = ("bands", "bucket", "cachestore", "fs", "region")
401
+
385
402
  __marker = object()
386
403
 
387
404
  def __init__(
@@ -1,10 +1,10 @@
1
1
  """Download and parse Sentinel metadata."""
2
2
 
3
+ import datetime
3
4
  import os
4
5
  import re
5
6
  import xml.etree.ElementTree as ET
6
7
  from collections.abc import Collection
7
- from datetime import datetime, timedelta, timezone
8
8
 
9
9
  import numpy as np
10
10
  import numpy.typing as npt
@@ -464,10 +464,10 @@ def parse_ephemeris_sentinel(datatsrip_metadata_path: str) -> pd.DataFrame:
464
464
  if position_elem is None or position_elem.text is None:
465
465
  continue # skip if missing
466
466
 
467
- gps_time = datetime.strptime(gps_time_elem.text, "%Y-%m-%dT%H:%M:%S")
467
+ gps_time = datetime.datetime.strptime(gps_time_elem.text, "%Y-%m-%dT%H:%M:%S")
468
468
 
469
469
  # Convert GPS to UTC time as there is a few seconds between them
470
- utc_time = gps_to_utc(gps_time).replace(tzinfo=timezone.utc)
470
+ utc_time = gps_to_utc(gps_time).replace(tzinfo=datetime.UTC)
471
471
 
472
472
  # Parse positions in ECEF coordinate system
473
473
  x, y, z = map(float, position_elem.text.split())
@@ -643,30 +643,30 @@ def get_time_delay_detectors(
643
643
  # Time helper functions
644
644
 
645
645
 
646
- def gps_to_utc(gps_time: datetime) -> datetime:
646
+ def gps_to_utc(gps_time: datetime.datetime) -> datetime.datetime:
647
647
  """Convert GPS time (datetime object) to UTC time.
648
648
 
649
649
  https://gssc.esa.int/navipedia/index.php/Transformations_between_Time_Systems
650
650
  """
651
651
 
652
- gps_tai_offset = timedelta(seconds=19)
653
- utc_tai_offset = timedelta(seconds=37)
652
+ gps_tai_offset = datetime.timedelta(seconds=19)
653
+ utc_tai_offset = datetime.timedelta(seconds=37)
654
654
 
655
655
  # Convert GPS time to UTC
656
656
  return gps_time + gps_tai_offset - utc_tai_offset
657
657
 
658
658
 
659
- def _calculate_average_time(times: Collection[datetime]) -> datetime:
659
+ def _calculate_average_time(times: Collection[datetime.datetime]) -> datetime.datetime:
660
660
  """Return the average time from a list of times."""
661
661
  # Compute the average time
662
662
  avg_timestamp = sum(t.timestamp() for t in times) / len(times)
663
- return datetime.fromtimestamp(avg_timestamp)
663
+ return datetime.datetime.fromtimestamp(avg_timestamp)
664
664
 
665
665
 
666
666
  def _calculate_timedeltas(detector_times: dict[int, str]) -> dict[int, pd.Timedelta]:
667
667
  """Calculate the time difference between a detector and the average time."""
668
668
  detector_times_dt = {
669
- detector_id: datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%f")
669
+ detector_id: datetime.datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%f")
670
670
  for detector_id, time_str in detector_times.items()
671
671
  }
672
672
 
@@ -305,8 +305,8 @@ class SyntheticFlight:
305
305
  *src,
306
306
  az,
307
307
  npts,
308
- m_per_timestep, # type: ignore
309
- return_back_azimuth=False, # type: ignore
308
+ m_per_timestep,
309
+ return_back_azimuth=False,
310
310
  )
311
311
  longitude = np.asarray(result.lons)
312
312
  latitude = np.asarray(result.lats)
@@ -30,7 +30,7 @@ class habit_dirichlet(rv_frozen):
30
30
  - Table 2 in :cite:`schumannEffectiveRadiusIce2011`
31
31
  """
32
32
 
33
- def __init__(self, C: float = 96.0):
33
+ def __init__(self, C: float = 96.0) -> None:
34
34
  self.C = C
35
35
 
36
36
  def rvs(self, *args: Any, **kwds: Any) -> npt.NDArray[np.float32]:
@@ -236,7 +236,7 @@ def initial_ice_particle_number(
236
236
  phase, [:math:`# m^{-1}`]
237
237
  """
238
238
  if min_aei is not None:
239
- aei = np.clip(aei, min=min_aei) # type: ignore[arg-type,call-overload]
239
+ aei = np.clip(aei, min_aei, None)
240
240
  return fuel_dist * aei
241
241
 
242
242
 
@@ -2228,7 +2228,7 @@ def compare_cocip_with_goes(
2228
2228
  fig = plt.figure(figsize=(1.2 * x_dim, y_dim))
2229
2229
  pc = ccrs.PlateCarree()
2230
2230
  ax = fig.add_subplot(projection=pc, extent=bbox)
2231
- ax.coastlines() # type: ignore[attr-defined]
2231
+ ax.coastlines()
2232
2232
  ax.imshow(rgb, extent=extent, transform=transform)
2233
2233
 
2234
2234
  ax.set_xticks([spatial_bbox[0], spatial_bbox[2]], crs=ccrs.PlateCarree())
@@ -114,7 +114,7 @@ class CocipGrid(models.Model):
114
114
  rad: MetDataset,
115
115
  params: dict[str, Any] | None = None,
116
116
  **params_kwargs: Any,
117
- ):
117
+ ) -> None:
118
118
  super().__init__(met, params=params, **params_kwargs)
119
119
 
120
120
  compute_tau_cirrus = self.params["compute_tau_cirrus_in_model_init"]
@@ -386,7 +386,7 @@ class CocipGrid(models.Model):
386
386
  "dt_integration": dt_integration_str,
387
387
  "aircraft_type": self.get_source_param("aircraft_type"),
388
388
  "pycontrails_version": pycontrails.__version__,
389
- **self.source.attrs, # type: ignore[dict-item]
389
+ **self.source.attrs,
390
390
  }
391
391
  if ap_model := self.params["aircraft_performance"]:
392
392
  attrs["ap_model"] = type(ap_model).__name__
@@ -2210,7 +2210,7 @@ def result_to_metdataset(
2210
2210
  # Update source
2211
2211
  for k, v in data_vars.items(): # type: ignore[assignment]
2212
2212
  source[k] = v
2213
- source.attrs.update(attrs) # type: ignore[arg-type]
2213
+ source.attrs.update(attrs)
2214
2214
 
2215
2215
  # Return reference to source
2216
2216
  return source
@@ -590,7 +590,7 @@ def _evolve_one_step(
590
590
  vector,
591
591
  dz_m=dz_m,
592
592
  dt=dt, # type: ignore[arg-type]
593
- max_depth=max_depth, # type: ignore[arg-type]
593
+ max_depth=max_depth,
594
594
  verbose_outputs=verbose_outputs,
595
595
  )
596
596
  out["azimuth"] = azimuth_2
@@ -24,12 +24,12 @@ DEFAULT_EXHAUST_T = 600.0 # Exhaust temperature, [K]
24
24
  EXPERIMENTAL_WARNING = True
25
25
 
26
26
 
27
- class ParticleType(enum.Enum):
27
+ class ParticleType(enum.StrEnum):
28
28
  """Enumeration of particle types."""
29
29
 
30
- NVPM = "nvPM"
31
- VPM = "vPM"
32
- AMBIENT = "ambient"
30
+ NVPM = enum.auto()
31
+ VPM = enum.auto()
32
+ AMBIENT = enum.auto()
33
33
 
34
34
 
35
35
  @dataclasses.dataclass(frozen=True)