pycontrails 0.54.3__cp311-cp311-win_amd64.whl → 0.54.4__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (60) hide show
  1. pycontrails/__init__.py +2 -2
  2. pycontrails/_version.py +2 -2
  3. pycontrails/core/__init__.py +1 -1
  4. pycontrails/core/aircraft_performance.py +58 -58
  5. pycontrails/core/cache.py +7 -7
  6. pycontrails/core/fleet.py +25 -21
  7. pycontrails/core/flight.py +213 -301
  8. pycontrails/core/interpolation.py +56 -56
  9. pycontrails/core/met.py +48 -39
  10. pycontrails/core/models.py +25 -11
  11. pycontrails/core/polygon.py +15 -15
  12. pycontrails/core/rgi_cython.cp311-win_amd64.pyd +0 -0
  13. pycontrails/core/vector.py +22 -22
  14. pycontrails/datalib/_met_utils/metsource.py +8 -5
  15. pycontrails/datalib/ecmwf/__init__.py +14 -14
  16. pycontrails/datalib/ecmwf/common.py +1 -1
  17. pycontrails/datalib/ecmwf/era5.py +7 -7
  18. pycontrails/datalib/ecmwf/hres.py +3 -3
  19. pycontrails/datalib/ecmwf/ifs.py +1 -1
  20. pycontrails/datalib/gfs/__init__.py +6 -6
  21. pycontrails/datalib/gfs/gfs.py +2 -2
  22. pycontrails/datalib/goes.py +5 -5
  23. pycontrails/ext/empirical_grid.py +1 -1
  24. pycontrails/models/apcemm/apcemm.py +3 -3
  25. pycontrails/models/cocip/__init__.py +2 -2
  26. pycontrails/models/cocip/cocip.py +15 -15
  27. pycontrails/models/cocip/cocip_params.py +2 -11
  28. pycontrails/models/cocip/cocip_uncertainty.py +24 -18
  29. pycontrails/models/cocip/contrail_properties.py +331 -316
  30. pycontrails/models/cocip/output_formats.py +53 -53
  31. pycontrails/models/cocip/radiative_forcing.py +135 -131
  32. pycontrails/models/cocip/radiative_heating.py +135 -135
  33. pycontrails/models/cocip/unterstrasser_wake_vortex.py +90 -87
  34. pycontrails/models/cocip/wake_vortex.py +92 -92
  35. pycontrails/models/cocip/wind_shear.py +8 -8
  36. pycontrails/models/cocipgrid/cocip_grid.py +93 -87
  37. pycontrails/models/dry_advection.py +10 -5
  38. pycontrails/models/emissions/__init__.py +2 -2
  39. pycontrails/models/emissions/black_carbon.py +108 -108
  40. pycontrails/models/emissions/emissions.py +85 -85
  41. pycontrails/models/emissions/ffm2.py +35 -35
  42. pycontrails/models/humidity_scaling/humidity_scaling.py +23 -23
  43. pycontrails/models/ps_model/__init__.py +1 -1
  44. pycontrails/models/ps_model/ps_aircraft_params.py +8 -4
  45. pycontrails/models/ps_model/ps_grid.py +74 -64
  46. pycontrails/models/ps_model/ps_model.py +14 -14
  47. pycontrails/models/ps_model/ps_operational_limits.py +20 -18
  48. pycontrails/models/tau_cirrus.py +8 -1
  49. pycontrails/physics/geo.py +67 -67
  50. pycontrails/physics/jet.py +79 -79
  51. pycontrails/physics/units.py +14 -14
  52. pycontrails/utils/json.py +1 -2
  53. pycontrails/utils/types.py +12 -7
  54. {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/METADATA +2 -2
  55. {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/NOTICE +1 -1
  56. pycontrails-0.54.4.dist-info/RECORD +111 -0
  57. pycontrails-0.54.3.dist-info/RECORD +0 -111
  58. {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/LICENSE +0 -0
  59. {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/WHEEL +0 -0
  60. {pycontrails-0.54.3.dist-info → pycontrails-0.54.4.dist-info}/top_level.txt +0 -0
@@ -42,7 +42,7 @@ except ModuleNotFoundError as exc:
42
42
 
43
43
 
44
44
  def buffer_and_clean(
45
- contour: npt.NDArray[np.float64],
45
+ contour: npt.NDArray[np.floating],
46
46
  min_area: float,
47
47
  convex_hull: bool,
48
48
  epsilon: float,
@@ -54,7 +54,7 @@ def buffer_and_clean(
54
54
 
55
55
  Parameters
56
56
  ----------
57
- contour : npt.NDArray[np.float64]
57
+ contour : npt.NDArray[np.floating]
58
58
  Contour to buffer and clean. A 2d array of shape (n, 2) where n is the number
59
59
  of vertices in the contour.
60
60
  min_area : float
@@ -157,13 +157,13 @@ def _round_polygon(polygon: shapely.Polygon, precision: int) -> shapely.Polygon:
157
157
 
158
158
 
159
159
  def _contours_to_polygons(
160
- contours: Sequence[npt.NDArray[np.float64]],
160
+ contours: Sequence[npt.NDArray[np.floating]],
161
161
  hierarchy: npt.NDArray[np.int_],
162
162
  min_area: float,
163
163
  convex_hull: bool,
164
164
  epsilon: float,
165
- longitude: npt.NDArray[np.float64] | None,
166
- latitude: npt.NDArray[np.float64] | None,
165
+ longitude: npt.NDArray[np.floating] | None,
166
+ latitude: npt.NDArray[np.floating] | None,
167
167
  precision: int | None,
168
168
  buffer: float,
169
169
  i: int = 0,
@@ -172,7 +172,7 @@ def _contours_to_polygons(
172
172
 
173
173
  Parameters
174
174
  ----------
175
- contours : Sequence[npt.NDArray[np.float64]]
175
+ contours : Sequence[npt.NDArray[np.floating]]
176
176
  The contours output from :func:`cv2.findContours`.
177
177
  hierarchy : npt.NDArray[np.int_]
178
178
  The hierarchy output from :func:`cv2.findContours`.
@@ -182,9 +182,9 @@ def _contours_to_polygons(
182
182
  Whether to take the convex hull of each polygon.
183
183
  epsilon : float
184
184
  Epsilon value to use when simplifying the polygons.
185
- longitude : npt.NDArray[np.float64] | None
185
+ longitude : npt.NDArray[np.floating] | None
186
186
  Longitude values for the grid.
187
- latitude : npt.NDArray[np.float64] | None
187
+ latitude : npt.NDArray[np.floating] | None
188
188
  Latitude values for the grid.
189
189
  precision : int | None
190
190
  Precision to use when rounding the coordinates.
@@ -254,7 +254,7 @@ def _contours_to_polygons(
254
254
 
255
255
 
256
256
  def determine_buffer(
257
- longitude: npt.NDArray[np.float64], latitude: npt.NDArray[np.float64]
257
+ longitude: npt.NDArray[np.floating], latitude: npt.NDArray[np.floating]
258
258
  ) -> float:
259
259
  """Determine the proper buffer size to use when converting to polygons."""
260
260
 
@@ -279,22 +279,22 @@ def determine_buffer(
279
279
 
280
280
 
281
281
  def find_multipolygon(
282
- arr: npt.NDArray[np.float64],
282
+ arr: npt.NDArray[np.floating],
283
283
  threshold: float,
284
284
  min_area: float,
285
285
  epsilon: float,
286
286
  lower_bound: bool = True,
287
287
  interiors: bool = True,
288
288
  convex_hull: bool = False,
289
- longitude: npt.NDArray[np.float64] | None = None,
290
- latitude: npt.NDArray[np.float64] | None = None,
289
+ longitude: npt.NDArray[np.floating] | None = None,
290
+ latitude: npt.NDArray[np.floating] | None = None,
291
291
  precision: int | None = None,
292
292
  ) -> shapely.MultiPolygon:
293
293
  """Compute a multipolygon from a 2d array.
294
294
 
295
295
  Parameters
296
296
  ----------
297
- arr : npt.NDArray[np.float64]
297
+ arr : npt.NDArray[np.floating]
298
298
  Array to convert to a multipolygon. The array will be converted to a binary
299
299
  array by comparing each element to ``threshold``. This binary array is then
300
300
  passed into :func:`cv2.findContours` to find the contours.
@@ -312,11 +312,11 @@ def find_multipolygon(
312
312
  Whether to include interior polygons. By default, True.
313
313
  convex_hull : bool, optional
314
314
  Experimental. Whether to take the convex hull of each polygon. By default, False.
315
- longitude : npt.NDArray[np.float64] | None, optional
315
+ longitude : npt.NDArray[np.floating] | None, optional
316
316
  If provided, the coordinates values corresponding to the longitude dimensions of ``arr``.
317
317
  The contour coordinates will be converted to longitude-latitude values by indexing
318
318
  into this array. Defaults to None.
319
- latitude : npt.NDArray[np.float64] | None, optional
319
+ latitude : npt.NDArray[np.floating] | None, optional
320
320
  If provided, the coordinates values corresponding to the latitude dimensions of ``arr``.
321
321
  precision : int | None, optional
322
322
  If provided, the precision to use when rounding the coordinates. Defaults to None.
@@ -260,7 +260,7 @@ class VectorDataset:
260
260
  If "time" variable cannot be converted to numpy array.
261
261
  """
262
262
 
263
- __slots__ = ("data", "attrs")
263
+ __slots__ = ("attrs", "data")
264
264
 
265
265
  #: Vector data with labels as keys and :class:`numpy.ndarray` as values
266
266
  data: VectorDataDict
@@ -1392,7 +1392,7 @@ class GeoVectorDataset(VectorDataset):
1392
1392
  return attrs
1393
1393
 
1394
1394
  @property
1395
- def level(self) -> npt.NDArray[np.float64]:
1395
+ def level(self) -> npt.NDArray[np.floating]:
1396
1396
  """Get pressure ``level`` values for points.
1397
1397
 
1398
1398
  Automatically calculates pressure level using :func:`units.m_to_pl` using ``altitude`` key.
@@ -1403,7 +1403,7 @@ class GeoVectorDataset(VectorDataset):
1403
1403
 
1404
1404
  Returns
1405
1405
  -------
1406
- npt.NDArray[np.float64]
1406
+ npt.NDArray[np.floating]
1407
1407
  Point pressure level values, [:math:`hPa`]
1408
1408
  """
1409
1409
  try:
@@ -1412,7 +1412,7 @@ class GeoVectorDataset(VectorDataset):
1412
1412
  return units.m_to_pl(self.altitude)
1413
1413
 
1414
1414
  @property
1415
- def altitude(self) -> npt.NDArray[np.float64]:
1415
+ def altitude(self) -> npt.NDArray[np.floating]:
1416
1416
  """Get altitude.
1417
1417
 
1418
1418
  Automatically calculates altitude using :func:`units.pl_to_m` using ``level`` key.
@@ -1423,7 +1423,7 @@ class GeoVectorDataset(VectorDataset):
1423
1423
 
1424
1424
  Returns
1425
1425
  -------
1426
- npt.NDArray[np.float64]
1426
+ npt.NDArray[np.floating]
1427
1427
  Altitude, [:math:`m`]
1428
1428
  """
1429
1429
  try:
@@ -1437,12 +1437,12 @@ class GeoVectorDataset(VectorDataset):
1437
1437
  return units.ft_to_m(self["altitude_ft"])
1438
1438
 
1439
1439
  @property
1440
- def air_pressure(self) -> npt.NDArray[np.float64]:
1440
+ def air_pressure(self) -> npt.NDArray[np.floating]:
1441
1441
  """Get ``air_pressure`` values for points.
1442
1442
 
1443
1443
  Returns
1444
1444
  -------
1445
- npt.NDArray[np.float64]
1445
+ npt.NDArray[np.floating]
1446
1446
  Point air pressure values, [:math:`Pa`]
1447
1447
  """
1448
1448
  try:
@@ -1451,12 +1451,12 @@ class GeoVectorDataset(VectorDataset):
1451
1451
  return 100.0 * self.level
1452
1452
 
1453
1453
  @property
1454
- def altitude_ft(self) -> npt.NDArray[np.float64]:
1454
+ def altitude_ft(self) -> npt.NDArray[np.floating]:
1455
1455
  """Get altitude in feet.
1456
1456
 
1457
1457
  Returns
1458
1458
  -------
1459
- npt.NDArray[np.float64]
1459
+ npt.NDArray[np.floating]
1460
1460
  Altitude, [:math:`ft`]
1461
1461
  """
1462
1462
  try:
@@ -1522,7 +1522,7 @@ class GeoVectorDataset(VectorDataset):
1522
1522
  # Utilities
1523
1523
  # ------------
1524
1524
 
1525
- def transform_crs(self, crs: str) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]:
1525
+ def transform_crs(self, crs: str) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]:
1526
1526
  """Transform trajectory data from one coordinate reference system (CRS) to another.
1527
1527
 
1528
1528
  Parameters
@@ -1535,7 +1535,7 @@ class GeoVectorDataset(VectorDataset):
1535
1535
 
1536
1536
  Returns
1537
1537
  -------
1538
- tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]
1538
+ tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]
1539
1539
  New x and y coordinates in the target CRS.
1540
1540
  """
1541
1541
  try:
@@ -1552,12 +1552,12 @@ class GeoVectorDataset(VectorDataset):
1552
1552
  transformer = pyproj.Transformer.from_crs(crs_from, crs, always_xy=True)
1553
1553
  return transformer.transform(self["longitude"], self["latitude"])
1554
1554
 
1555
- def T_isa(self) -> npt.NDArray[np.float64]:
1555
+ def T_isa(self) -> npt.NDArray[np.floating]:
1556
1556
  """Calculate the ICAO standard atmosphere temperature at each point.
1557
1557
 
1558
1558
  Returns
1559
1559
  -------
1560
- npt.NDArray[np.float64]
1560
+ npt.NDArray[np.floating]
1561
1561
  ISA temperature, [:math:`K`]
1562
1562
 
1563
1563
  See Also
@@ -1610,24 +1610,24 @@ class GeoVectorDataset(VectorDataset):
1610
1610
  self,
1611
1611
  mda: met_module.MetDataArray,
1612
1612
  *,
1613
- longitude: npt.NDArray[np.float64] | None = None,
1614
- latitude: npt.NDArray[np.float64] | None = None,
1615
- level: npt.NDArray[np.float64] | None = None,
1613
+ longitude: npt.NDArray[np.floating] | None = None,
1614
+ latitude: npt.NDArray[np.floating] | None = None,
1615
+ level: npt.NDArray[np.floating] | None = None,
1616
1616
  time: npt.NDArray[np.datetime64] | None = None,
1617
1617
  use_indices: bool = False,
1618
1618
  **interp_kwargs: Any,
1619
- ) -> npt.NDArray[np.float64]:
1619
+ ) -> npt.NDArray[np.floating]:
1620
1620
  """Intersect waypoints with MetDataArray.
1621
1621
 
1622
1622
  Parameters
1623
1623
  ----------
1624
1624
  mda : MetDataArray
1625
1625
  MetDataArray containing a meteorological variable at spatio-temporal coordinates.
1626
- longitude : npt.NDArray[np.float64], optional
1626
+ longitude : npt.NDArray[np.floating], optional
1627
1627
  Override existing coordinates for met interpolation
1628
- latitude : npt.NDArray[np.float64], optional
1628
+ latitude : npt.NDArray[np.floating], optional
1629
1629
  Override existing coordinates for met interpolation
1630
- level : npt.NDArray[np.float64], optional
1630
+ level : npt.NDArray[np.floating], optional
1631
1631
  Override existing coordinates for met interpolation
1632
1632
  time : npt.NDArray[np.datetime64], optional
1633
1633
  Override existing coordinates for met interpolation
@@ -1646,7 +1646,7 @@ class GeoVectorDataset(VectorDataset):
1646
1646
 
1647
1647
  Returns
1648
1648
  -------
1649
- npt.NDArray[np.float64]
1649
+ npt.NDArray[np.floating]
1650
1650
  Interpolated values
1651
1651
 
1652
1652
  Examples
@@ -2019,7 +2019,7 @@ def vector_to_lon_lat_grid(
2019
2019
  ...,
2020
2020
  [1.97, 3.02, 1.84, ..., 2.37, 3.87, 2.09],
2021
2021
  [3.74, 1.6 , 4.01, ..., 4.6 , 4.27, 3.4 ],
2022
- [2.97, 0.12, 1.33, ..., 3.54, 0.74, 2.59]])
2022
+ [2.97, 0.12, 1.33, ..., 3.54, 0.74, 2.59]], shape=(40, 40))
2023
2023
 
2024
2024
  >>> da.sum().item() == vector["foo"].sum()
2025
2025
  np.True_
@@ -175,13 +175,16 @@ def parse_pressure_levels(
175
175
 
176
176
  out = arr.tolist()
177
177
  if supported is None:
178
- return out
178
+ return out # type: ignore[return-value]
179
179
 
180
- if missing := set(out).difference(supported):
181
- msg = f"Pressure levels {sorted(missing)} are not supported. Supported levels: {supported}"
180
+ if missing := set(out).difference(supported): # type: ignore[arg-type]
181
+ msg = (
182
+ f"Pressure levels {sorted(missing)} are not supported. " # type: ignore[type-var]
183
+ f"Supported levels: {supported}"
184
+ )
182
185
  raise ValueError(msg)
183
186
 
184
- return out
187
+ return out # type: ignore[return-value]
185
188
 
186
189
 
187
190
  def parse_variables(variables: VariableInput, supported: list[MetVariable]) -> list[MetVariable]:
@@ -347,7 +350,7 @@ def round_hour(time: datetime, hour: int) -> datetime:
347
350
  class MetDataSource(abc.ABC):
348
351
  """Abstract class for wrapping meteorology data sources."""
349
352
 
350
- __slots__ = ("timesteps", "variables", "pressure_levels", "grid", "paths")
353
+ __slots__ = ("grid", "paths", "pressure_levels", "timesteps", "variables")
351
354
 
352
355
  #: List of individual timesteps from data source derived from :attr:`time`
353
356
  #: Use :func:`parse_time` to handle :class:`TimeInput`.
@@ -40,21 +40,21 @@ from pycontrails.datalib.ecmwf.variables import (
40
40
  )
41
41
 
42
42
  __all__ = [
43
- "ERA5ARCO",
44
- "CDSCredentialsNotFound",
43
+ "ECMWF_VARIABLES",
45
44
  "ERA5",
46
- "ERA5ModelLevel",
45
+ "ERA5ARCO",
47
46
  "HRES",
48
- "HRESModelLevel",
49
47
  "IFS",
50
- "model_level_reference_pressure",
51
- "model_level_pressure",
52
- "ml_to_pl",
53
- "open_arco_era5_model_level_data",
54
- "open_arco_era5_single_level",
48
+ "MODEL_LEVELS_PATH",
49
+ "MODEL_LEVEL_VARIABLES",
50
+ "PRESSURE_LEVEL_VARIABLES",
51
+ "SURFACE_VARIABLES",
52
+ "CDSCredentialsNotFound",
55
53
  "CloudAreaFraction",
56
54
  "CloudAreaFractionInLayer",
57
55
  "Divergence",
56
+ "ERA5ModelLevel",
57
+ "HRESModelLevel",
58
58
  "OzoneMassMixingRatio",
59
59
  "PotentialVorticity",
60
60
  "RelativeHumidity",
@@ -65,9 +65,9 @@ __all__ = [
65
65
  "TOAIncidentSolarRadiation",
66
66
  "TopNetSolarRadiation",
67
67
  "TopNetThermalRadiation",
68
- "ECMWF_VARIABLES",
69
- "MODEL_LEVELS_PATH",
70
- "MODEL_LEVEL_VARIABLES",
71
- "PRESSURE_LEVEL_VARIABLES",
72
- "SURFACE_VARIABLES",
68
+ "ml_to_pl",
69
+ "model_level_pressure",
70
+ "model_level_reference_pressure",
71
+ "open_arco_era5_model_level_data",
72
+ "open_arco_era5_single_level",
73
73
  ]
@@ -61,7 +61,7 @@ class ECMWFAPI(metsource.MetDataSource):
61
61
 
62
62
  # downselect times
63
63
  if not self.timesteps:
64
- self.timesteps = ds["time"].values.astype("datetime64[ns]").tolist()
64
+ self.timesteps = ds["time"].values.astype("datetime64[ns]").tolist() # type: ignore[assignment]
65
65
  else:
66
66
  try:
67
67
  ds = ds.sel(time=self.timesteps)
@@ -137,10 +137,10 @@ class ERA5(ECMWFAPI):
137
137
  """
138
138
 
139
139
  __slots__ = (
140
- "product_type",
141
140
  "cds",
142
- "url",
143
141
  "key",
142
+ "product_type",
143
+ "url",
144
144
  )
145
145
 
146
146
  #: Product type, one of "reanalysis", "ensemble_mean", "ensemble_members", "ensemble_spread"
@@ -319,9 +319,9 @@ class ERA5(ECMWFAPI):
319
319
  str
320
320
  ERA5 dataset name in CDS
321
321
  """
322
- if self.pressure_levels != [-1]:
323
- return "reanalysis-era5-pressure-levels"
324
- return "reanalysis-era5-single-levels"
322
+ if self.is_single_level:
323
+ return "reanalysis-era5-single-levels"
324
+ return "reanalysis-era5-pressure-levels"
325
325
 
326
326
  def create_cachepath(self, t: datetime | pd.Timestamp) -> str:
327
327
  """Return cachepath to local ERA5 data file based on datetime.
@@ -539,9 +539,9 @@ class ERA5(ECMWFAPI):
539
539
  LOG.debug("Input dataset processed with pycontrails > 0.29")
540
540
  return ds
541
541
 
542
- # For "reanalysis-era5-single-levels" or if self.pressure_levels length == 1,
542
+ # For "reanalysis-era5-single-levels"
543
543
  # then the netcdf file does not contain the dimension "level"
544
- if len(self.pressure_levels) == 1:
544
+ if self.is_single_level:
545
545
  ds = ds.expand_dims(level=self.pressure_levels)
546
546
 
547
547
  # New CDS-Beta gives "valid_time" instead of "time"
@@ -228,7 +228,7 @@ class HRES(ECMWFAPI):
228
228
  ... )
229
229
  """
230
230
 
231
- __slots__ = ("server", "stream", "field_type", "forecast_time", "url", "key", "email")
231
+ __slots__ = ("email", "field_type", "forecast_time", "key", "server", "stream", "url")
232
232
 
233
233
  #: stream type, "oper" = atmospheric model/HRES, "enfo" = ensemble forecast.
234
234
  stream: str
@@ -691,7 +691,7 @@ class HRES(ECMWFAPI):
691
691
 
692
692
  # set forecast time if its not already defined
693
693
  if not getattr(self, "forecast_time", None):
694
- self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist()
694
+ self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
695
695
 
696
696
  # check that forecast_time is correct if defined
697
697
  # note the "time" coordinate here is the HRES forecast_time
@@ -706,7 +706,7 @@ class HRES(ECMWFAPI):
706
706
  # set timesteps if not defined
707
707
  # note that "time" is now the actual timestep coordinates
708
708
  if not self.timesteps:
709
- self.timesteps = ds["time"].values.astype("datetime64[s]").tolist()
709
+ self.timesteps = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
710
710
 
711
711
  self.cache_dataset(ds)
712
712
 
@@ -149,7 +149,7 @@ class IFS(metsource.MetDataSource):
149
149
  else:
150
150
  # set timesteps from dataset "time" coordinates
151
151
  # np.datetime64 doesn't covert to list[datetime] unless its unit is us
152
- self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
152
+ self.timesteps = ds["time"].values.astype("datetime64[us]").tolist() # type: ignore[assignment]
153
153
 
154
154
  # downselect hyam/hybm coefficients by the "lev" coordinate
155
155
  # (this is a 1-indexed verison of nhym)
@@ -16,13 +16,13 @@ from pycontrails.datalib.gfs.variables import (
16
16
 
17
17
  __all__ = [
18
18
  "GFS_FORECAST_BUCKET",
19
- "GFSForecast",
20
- "CloudIceWaterMixingRatio",
21
- "TotalCloudCoverIsobaric",
22
- "Visibility",
23
- "TOAUpwardShortwaveRadiation",
24
- "TOAUpwardLongwaveRadiation",
25
19
  "GFS_VARIABLES",
26
20
  "PRESSURE_LEVEL_VARIABLES",
27
21
  "SURFACE_VARIABLES",
22
+ "CloudIceWaterMixingRatio",
23
+ "GFSForecast",
24
+ "TOAUpwardLongwaveRadiation",
25
+ "TOAUpwardShortwaveRadiation",
26
+ "TotalCloudCoverIsobaric",
27
+ "Visibility",
28
28
  ]
@@ -125,7 +125,7 @@ class GFSForecast(metsource.MetDataSource):
125
125
  - `GFS Documentation <https://www.emc.ncep.noaa.gov/emc/pages/numerical_forecast_systems/gfs/documentation.php>`_
126
126
  """
127
127
 
128
- __slots__ = ("client", "grid", "cachestore", "show_progress", "forecast_time", "cache_download")
128
+ __slots__ = ("cache_download", "cachestore", "client", "forecast_time", "grid", "show_progress")
129
129
 
130
130
  #: S3 client for accessing GFS bucket
131
131
  client: botocore.client.S3
@@ -597,7 +597,7 @@ class GFSForecast(metsource.MetDataSource):
597
597
  else:
598
598
  # set timesteps from dataset "time" coordinates
599
599
  # np.datetime64 doesn't covert to list[datetime] unless its unit is us
600
- self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
600
+ self.timesteps = ds["time"].values.astype("datetime64[us]").tolist() # type: ignore[assignment]
601
601
 
602
602
  # if "level" is not in dims and
603
603
  # length of the requested pressure levels is 1
@@ -384,7 +384,7 @@ class GOES:
384
384
  [277.24512, 277.45377, 278.18408, ..., 274.6369 , 274.01093,
385
385
  274.06308],
386
386
  [276.8278 , 277.14078, 277.7146 , ..., 274.6369 , 273.9066 ,
387
- 274.16742]], dtype=float32)
387
+ 274.16742]], shape=(500, 500), dtype=float32)
388
388
 
389
389
  """
390
390
 
@@ -745,8 +745,8 @@ def to_ash(da: xr.DataArray, convention: str = "SEVIRI") -> npt.NDArray[np.float
745
745
 
746
746
 
747
747
  def _clip_and_scale(
748
- arr: npt.NDArray[np.float64], low: float, high: float
749
- ) -> npt.NDArray[np.float64]:
748
+ arr: npt.NDArray[np.floating], low: float, high: float
749
+ ) -> npt.NDArray[np.floating]:
750
750
  """Clip array and rescale to the interval [0, 1].
751
751
 
752
752
  Array is first clipped to the interval [low, high] and then linearly rescaled
@@ -757,7 +757,7 @@ def _clip_and_scale(
757
757
 
758
758
  Parameters
759
759
  ----------
760
- arr : npt.NDArray[np.float64]
760
+ arr : npt.NDArray[np.floating]
761
761
  Array to clip and scale.
762
762
  low : float
763
763
  Lower clipping bound.
@@ -766,7 +766,7 @@ def _clip_and_scale(
766
766
 
767
767
  Returns
768
768
  -------
769
- npt.NDArray[np.float64]
769
+ npt.NDArray[np.floating]
770
770
  Clipped and scaled array.
771
771
  """
772
772
  return (arr.clip(low, high) - low) / (high - low)
@@ -118,7 +118,7 @@ class EmpiricalGrid(AircraftPerformanceGrid):
118
118
 
119
119
  return data[["altitude_ft", *columns]].drop(columns=["aircraft_type"])
120
120
 
121
- def _sample(self, altitude_ft: npt.NDArray[np.float64]) -> None:
121
+ def _sample(self, altitude_ft: npt.NDArray[np.floating]) -> None:
122
122
  """Sample the data and update the source."""
123
123
 
124
124
  df = self._query_data()
@@ -277,13 +277,13 @@ class APCEMM(models.Model):
277
277
  """
278
278
 
279
279
  __slots__ = (
280
- "apcemm_path",
280
+ "_trajectory_downsampling",
281
281
  "apcemm_input_params",
282
+ "apcemm_path",
282
283
  "cachestore",
284
+ "contrail",
283
285
  "trajectories",
284
286
  "vortex",
285
- "contrail",
286
- "_trajectory_downsampling",
287
287
  )
288
288
 
289
289
  name = "apcemm"
@@ -15,9 +15,10 @@ from pycontrails.models.cocip.output_formats import (
15
15
 
16
16
  __all__ = [
17
17
  "Cocip",
18
+ "CocipFlightParams",
18
19
  "CocipParams",
19
20
  "CocipUncertaintyParams",
20
- "CocipFlightParams",
21
+ "compare_cocip_with_goes",
21
22
  "contrail_flight_summary_statistics",
22
23
  "contrails_to_hi_res_grid",
23
24
  "flight_waypoint_summary_statistics",
@@ -25,5 +26,4 @@ __all__ = [
25
26
  "longitude_latitude_grid",
26
27
  "natural_cirrus_properties_to_hi_res_grid",
27
28
  "time_slice_statistics",
28
- "compare_cocip_with_goes",
29
29
  ]
@@ -195,14 +195,14 @@ class Cocip(Model):
195
195
  """
196
196
 
197
197
  __slots__ = (
198
- "rad",
198
+ "_downwash_contrail",
199
+ "_downwash_flight",
200
+ "_sac_flight",
199
201
  "contrail",
200
202
  "contrail_dataset",
201
203
  "contrail_list",
204
+ "rad",
202
205
  "timesteps",
203
- "_sac_flight",
204
- "_downwash_flight",
205
- "_downwash_contrail",
206
206
  )
207
207
 
208
208
  name = "cocip"
@@ -660,7 +660,7 @@ class Cocip(Model):
660
660
  attrs = self.source.attrs
661
661
  attrs.pop("fl_attrs", None)
662
662
  attrs.pop("data_keys", None)
663
- self.source = Fleet.from_seq(fls, broadcast_numeric=False, copy=False, attrs=attrs)
663
+ self.source = Fleet.from_seq(fls, broadcast_numeric=False, attrs=attrs)
664
664
 
665
665
  # Single flight
666
666
  else:
@@ -2055,9 +2055,9 @@ def calc_radiative_properties(contrail: GeoVectorDataset, params: dict[str, Any]
2055
2055
 
2056
2056
  def calc_contrail_properties(
2057
2057
  contrail: GeoVectorDataset,
2058
- effective_vertical_resolution: float | npt.NDArray[np.float64],
2059
- wind_shear_enhancement_exponent: float | npt.NDArray[np.float64],
2060
- sedimentation_impact_factor: float | npt.NDArray[np.float64],
2058
+ effective_vertical_resolution: float | npt.NDArray[np.floating],
2059
+ wind_shear_enhancement_exponent: float | npt.NDArray[np.floating],
2060
+ sedimentation_impact_factor: float | npt.NDArray[np.floating],
2061
2061
  radiative_heating_effects: bool,
2062
2062
  ) -> None:
2063
2063
  """Calculate geometric and ice-related properties of contrail.
@@ -2084,11 +2084,11 @@ def calc_contrail_properties(
2084
2084
  ----------
2085
2085
  contrail : GeoVectorDataset
2086
2086
  Grid points with many precomputed keys.
2087
- effective_vertical_resolution : float | npt.NDArray[np.float64]
2087
+ effective_vertical_resolution : float | npt.NDArray[np.floating]
2088
2088
  Passed into :func:`wind_shear.wind_shear_enhancement_factor`.
2089
- wind_shear_enhancement_exponent : float | npt.NDArray[np.float64]
2089
+ wind_shear_enhancement_exponent : float | npt.NDArray[np.floating]
2090
2090
  Passed into :func:`wind_shear.wind_shear_enhancement_factor`.
2091
- sedimentation_impact_factor: float | npt.NDArray[np.float64]
2091
+ sedimentation_impact_factor: float | npt.NDArray[np.floating]
2092
2092
  Passed into `contrail_properties.vertical_diffusivity`.
2093
2093
  radiative_heating_effects: bool
2094
2094
  Include radiative heating effects on contrail cirrus properties.
@@ -2525,8 +2525,8 @@ def calc_timestep_contrail_evolution(
2525
2525
  def _rad_accumulation_to_average_instantaneous(
2526
2526
  rad: MetDataset,
2527
2527
  name: str,
2528
- arr: npt.NDArray[np.float64],
2529
- ) -> npt.NDArray[np.float64]:
2528
+ arr: npt.NDArray[np.floating],
2529
+ ) -> npt.NDArray[np.floating]:
2530
2530
  """Convert from radiation accumulation to average instantaneous values.
2531
2531
 
2532
2532
  .. versionadded:: 0.48.0
@@ -2537,12 +2537,12 @@ def _rad_accumulation_to_average_instantaneous(
2537
2537
  Radiation data
2538
2538
  name : str
2539
2539
  Variable name
2540
- arr : npt.NDArray[np.float64]
2540
+ arr : npt.NDArray[np.floating]
2541
2541
  Array of values already interpolated from ``rad``
2542
2542
 
2543
2543
  Returns
2544
2544
  -------
2545
- npt.NDArray[np.float64]
2545
+ npt.NDArray[np.floating]
2546
2546
  Array of values converted from accumulation to average instantaneous values
2547
2547
 
2548
2548
  Raises
@@ -11,7 +11,7 @@ import numpy as np
11
11
  import numpy.typing as npt
12
12
 
13
13
  from pycontrails.core.aircraft_performance import AircraftPerformance
14
- from pycontrails.core.models import ModelParams
14
+ from pycontrails.core.models import AdvectionBuffers
15
15
  from pycontrails.models.emissions.emissions import EmissionsParams
16
16
  from pycontrails.models.humidity_scaling import HumidityScaling
17
17
 
@@ -50,7 +50,7 @@ def _habits() -> npt.NDArray[np.str_]:
50
50
 
51
51
 
52
52
  @dataclasses.dataclass
53
- class CocipParams(ModelParams):
53
+ class CocipParams(AdvectionBuffers):
54
54
  """Model parameters required by the CoCiP models."""
55
55
 
56
56
  # -------------------------
@@ -116,15 +116,6 @@ class CocipParams(ModelParams):
116
116
  #: evaluation after the met data is downselected.
117
117
  compute_tau_cirrus_in_model_init: bool | str = "auto"
118
118
 
119
- #: Met longitude [WGS84] buffer for Cocip evolution.
120
- met_longitude_buffer: tuple[float, float] = (10.0, 10.0)
121
-
122
- #: Met latitude buffer [WGS84] for Cocip evolution.
123
- met_latitude_buffer: tuple[float, float] = (10.0, 10.0)
124
-
125
- #: Met level buffer [:math:`hPa`] for Cocip initialization and evolution.
126
- met_level_buffer: tuple[float, float] = (40.0, 40.0)
127
-
128
119
  # ---------
129
120
  # Filtering
130
121
  # ---------