pycontrails 0.54.3__cp312-cp312-macosx_11_0_arm64.whl → 0.54.5__cp312-cp312-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (62) hide show
  1. pycontrails/__init__.py +2 -2
  2. pycontrails/_version.py +2 -2
  3. pycontrails/core/__init__.py +1 -1
  4. pycontrails/core/aircraft_performance.py +58 -58
  5. pycontrails/core/cache.py +7 -7
  6. pycontrails/core/fleet.py +54 -29
  7. pycontrails/core/flight.py +218 -301
  8. pycontrails/core/interpolation.py +63 -60
  9. pycontrails/core/met.py +193 -125
  10. pycontrails/core/models.py +27 -13
  11. pycontrails/core/polygon.py +15 -15
  12. pycontrails/core/rgi_cython.cpython-312-darwin.so +0 -0
  13. pycontrails/core/vector.py +119 -96
  14. pycontrails/datalib/_met_utils/metsource.py +8 -5
  15. pycontrails/datalib/ecmwf/__init__.py +14 -14
  16. pycontrails/datalib/ecmwf/common.py +1 -1
  17. pycontrails/datalib/ecmwf/era5.py +7 -7
  18. pycontrails/datalib/ecmwf/hres.py +3 -3
  19. pycontrails/datalib/ecmwf/ifs.py +1 -1
  20. pycontrails/datalib/gfs/__init__.py +6 -6
  21. pycontrails/datalib/gfs/gfs.py +2 -2
  22. pycontrails/datalib/goes.py +5 -5
  23. pycontrails/ext/empirical_grid.py +1 -1
  24. pycontrails/models/apcemm/apcemm.py +5 -5
  25. pycontrails/models/apcemm/utils.py +1 -1
  26. pycontrails/models/cocip/__init__.py +2 -2
  27. pycontrails/models/cocip/cocip.py +23 -24
  28. pycontrails/models/cocip/cocip_params.py +2 -11
  29. pycontrails/models/cocip/cocip_uncertainty.py +24 -18
  30. pycontrails/models/cocip/contrail_properties.py +331 -316
  31. pycontrails/models/cocip/output_formats.py +53 -53
  32. pycontrails/models/cocip/radiative_forcing.py +135 -131
  33. pycontrails/models/cocip/radiative_heating.py +135 -135
  34. pycontrails/models/cocip/unterstrasser_wake_vortex.py +90 -87
  35. pycontrails/models/cocip/wake_vortex.py +92 -92
  36. pycontrails/models/cocip/wind_shear.py +8 -8
  37. pycontrails/models/cocipgrid/cocip_grid.py +37 -96
  38. pycontrails/models/dry_advection.py +60 -19
  39. pycontrails/models/emissions/__init__.py +2 -2
  40. pycontrails/models/emissions/black_carbon.py +108 -108
  41. pycontrails/models/emissions/emissions.py +87 -87
  42. pycontrails/models/emissions/ffm2.py +35 -35
  43. pycontrails/models/humidity_scaling/humidity_scaling.py +23 -23
  44. pycontrails/models/issr.py +2 -2
  45. pycontrails/models/ps_model/__init__.py +1 -1
  46. pycontrails/models/ps_model/ps_aircraft_params.py +8 -4
  47. pycontrails/models/ps_model/ps_grid.py +76 -66
  48. pycontrails/models/ps_model/ps_model.py +16 -16
  49. pycontrails/models/ps_model/ps_operational_limits.py +20 -18
  50. pycontrails/models/tau_cirrus.py +8 -1
  51. pycontrails/physics/geo.py +67 -67
  52. pycontrails/physics/jet.py +79 -79
  53. pycontrails/physics/units.py +14 -14
  54. pycontrails/utils/json.py +1 -2
  55. pycontrails/utils/types.py +12 -7
  56. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/METADATA +2 -2
  57. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/NOTICE +1 -1
  58. pycontrails-0.54.5.dist-info/RECORD +111 -0
  59. pycontrails-0.54.3.dist-info/RECORD +0 -111
  60. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/LICENSE +0 -0
  61. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/WHEEL +0 -0
  62. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/top_level.txt +0 -0
@@ -40,21 +40,21 @@ from pycontrails.datalib.ecmwf.variables import (
40
40
  )
41
41
 
42
42
  __all__ = [
43
- "ERA5ARCO",
44
- "CDSCredentialsNotFound",
43
+ "ECMWF_VARIABLES",
45
44
  "ERA5",
46
- "ERA5ModelLevel",
45
+ "ERA5ARCO",
47
46
  "HRES",
48
- "HRESModelLevel",
49
47
  "IFS",
50
- "model_level_reference_pressure",
51
- "model_level_pressure",
52
- "ml_to_pl",
53
- "open_arco_era5_model_level_data",
54
- "open_arco_era5_single_level",
48
+ "MODEL_LEVELS_PATH",
49
+ "MODEL_LEVEL_VARIABLES",
50
+ "PRESSURE_LEVEL_VARIABLES",
51
+ "SURFACE_VARIABLES",
52
+ "CDSCredentialsNotFound",
55
53
  "CloudAreaFraction",
56
54
  "CloudAreaFractionInLayer",
57
55
  "Divergence",
56
+ "ERA5ModelLevel",
57
+ "HRESModelLevel",
58
58
  "OzoneMassMixingRatio",
59
59
  "PotentialVorticity",
60
60
  "RelativeHumidity",
@@ -65,9 +65,9 @@ __all__ = [
65
65
  "TOAIncidentSolarRadiation",
66
66
  "TopNetSolarRadiation",
67
67
  "TopNetThermalRadiation",
68
- "ECMWF_VARIABLES",
69
- "MODEL_LEVELS_PATH",
70
- "MODEL_LEVEL_VARIABLES",
71
- "PRESSURE_LEVEL_VARIABLES",
72
- "SURFACE_VARIABLES",
68
+ "ml_to_pl",
69
+ "model_level_pressure",
70
+ "model_level_reference_pressure",
71
+ "open_arco_era5_model_level_data",
72
+ "open_arco_era5_single_level",
73
73
  ]
@@ -61,7 +61,7 @@ class ECMWFAPI(metsource.MetDataSource):
61
61
 
62
62
  # downselect times
63
63
  if not self.timesteps:
64
- self.timesteps = ds["time"].values.astype("datetime64[ns]").tolist()
64
+ self.timesteps = ds["time"].values.astype("datetime64[ns]").tolist() # type: ignore[assignment]
65
65
  else:
66
66
  try:
67
67
  ds = ds.sel(time=self.timesteps)
@@ -137,10 +137,10 @@ class ERA5(ECMWFAPI):
137
137
  """
138
138
 
139
139
  __slots__ = (
140
- "product_type",
141
140
  "cds",
142
- "url",
143
141
  "key",
142
+ "product_type",
143
+ "url",
144
144
  )
145
145
 
146
146
  #: Product type, one of "reanalysis", "ensemble_mean", "ensemble_members", "ensemble_spread"
@@ -319,9 +319,9 @@ class ERA5(ECMWFAPI):
319
319
  str
320
320
  ERA5 dataset name in CDS
321
321
  """
322
- if self.pressure_levels != [-1]:
323
- return "reanalysis-era5-pressure-levels"
324
- return "reanalysis-era5-single-levels"
322
+ if self.is_single_level:
323
+ return "reanalysis-era5-single-levels"
324
+ return "reanalysis-era5-pressure-levels"
325
325
 
326
326
  def create_cachepath(self, t: datetime | pd.Timestamp) -> str:
327
327
  """Return cachepath to local ERA5 data file based on datetime.
@@ -539,9 +539,9 @@ class ERA5(ECMWFAPI):
539
539
  LOG.debug("Input dataset processed with pycontrails > 0.29")
540
540
  return ds
541
541
 
542
- # For "reanalysis-era5-single-levels" or if self.pressure_levels length == 1,
542
+ # For "reanalysis-era5-single-levels"
543
543
  # then the netcdf file does not contain the dimension "level"
544
- if len(self.pressure_levels) == 1:
544
+ if self.is_single_level:
545
545
  ds = ds.expand_dims(level=self.pressure_levels)
546
546
 
547
547
  # New CDS-Beta gives "valid_time" instead of "time"
@@ -228,7 +228,7 @@ class HRES(ECMWFAPI):
228
228
  ... )
229
229
  """
230
230
 
231
- __slots__ = ("server", "stream", "field_type", "forecast_time", "url", "key", "email")
231
+ __slots__ = ("email", "field_type", "forecast_time", "key", "server", "stream", "url")
232
232
 
233
233
  #: stream type, "oper" = atmospheric model/HRES, "enfo" = ensemble forecast.
234
234
  stream: str
@@ -691,7 +691,7 @@ class HRES(ECMWFAPI):
691
691
 
692
692
  # set forecast time if its not already defined
693
693
  if not getattr(self, "forecast_time", None):
694
- self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist()
694
+ self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
695
695
 
696
696
  # check that forecast_time is correct if defined
697
697
  # note the "time" coordinate here is the HRES forecast_time
@@ -706,7 +706,7 @@ class HRES(ECMWFAPI):
706
706
  # set timesteps if not defined
707
707
  # note that "time" is now the actual timestep coordinates
708
708
  if not self.timesteps:
709
- self.timesteps = ds["time"].values.astype("datetime64[s]").tolist()
709
+ self.timesteps = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
710
710
 
711
711
  self.cache_dataset(ds)
712
712
 
@@ -149,7 +149,7 @@ class IFS(metsource.MetDataSource):
149
149
  else:
150
150
  # set timesteps from dataset "time" coordinates
151
151
  # np.datetime64 doesn't covert to list[datetime] unless its unit is us
152
- self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
152
+ self.timesteps = ds["time"].values.astype("datetime64[us]").tolist() # type: ignore[assignment]
153
153
 
154
154
  # downselect hyam/hybm coefficients by the "lev" coordinate
155
155
  # (this is a 1-indexed verison of nhym)
@@ -16,13 +16,13 @@ from pycontrails.datalib.gfs.variables import (
16
16
 
17
17
  __all__ = [
18
18
  "GFS_FORECAST_BUCKET",
19
- "GFSForecast",
20
- "CloudIceWaterMixingRatio",
21
- "TotalCloudCoverIsobaric",
22
- "Visibility",
23
- "TOAUpwardShortwaveRadiation",
24
- "TOAUpwardLongwaveRadiation",
25
19
  "GFS_VARIABLES",
26
20
  "PRESSURE_LEVEL_VARIABLES",
27
21
  "SURFACE_VARIABLES",
22
+ "CloudIceWaterMixingRatio",
23
+ "GFSForecast",
24
+ "TOAUpwardLongwaveRadiation",
25
+ "TOAUpwardShortwaveRadiation",
26
+ "TotalCloudCoverIsobaric",
27
+ "Visibility",
28
28
  ]
@@ -125,7 +125,7 @@ class GFSForecast(metsource.MetDataSource):
125
125
  - `GFS Documentation <https://www.emc.ncep.noaa.gov/emc/pages/numerical_forecast_systems/gfs/documentation.php>`_
126
126
  """
127
127
 
128
- __slots__ = ("client", "grid", "cachestore", "show_progress", "forecast_time", "cache_download")
128
+ __slots__ = ("cache_download", "cachestore", "client", "forecast_time", "grid", "show_progress")
129
129
 
130
130
  #: S3 client for accessing GFS bucket
131
131
  client: botocore.client.S3
@@ -597,7 +597,7 @@ class GFSForecast(metsource.MetDataSource):
597
597
  else:
598
598
  # set timesteps from dataset "time" coordinates
599
599
  # np.datetime64 doesn't covert to list[datetime] unless its unit is us
600
- self.timesteps = ds["time"].values.astype("datetime64[us]").tolist()
600
+ self.timesteps = ds["time"].values.astype("datetime64[us]").tolist() # type: ignore[assignment]
601
601
 
602
602
  # if "level" is not in dims and
603
603
  # length of the requested pressure levels is 1
@@ -384,7 +384,7 @@ class GOES:
384
384
  [277.24512, 277.45377, 278.18408, ..., 274.6369 , 274.01093,
385
385
  274.06308],
386
386
  [276.8278 , 277.14078, 277.7146 , ..., 274.6369 , 273.9066 ,
387
- 274.16742]], dtype=float32)
387
+ 274.16742]], shape=(500, 500), dtype=float32)
388
388
 
389
389
  """
390
390
 
@@ -745,8 +745,8 @@ def to_ash(da: xr.DataArray, convention: str = "SEVIRI") -> npt.NDArray[np.float
745
745
 
746
746
 
747
747
  def _clip_and_scale(
748
- arr: npt.NDArray[np.float64], low: float, high: float
749
- ) -> npt.NDArray[np.float64]:
748
+ arr: npt.NDArray[np.floating], low: float, high: float
749
+ ) -> npt.NDArray[np.floating]:
750
750
  """Clip array and rescale to the interval [0, 1].
751
751
 
752
752
  Array is first clipped to the interval [low, high] and then linearly rescaled
@@ -757,7 +757,7 @@ def _clip_and_scale(
757
757
 
758
758
  Parameters
759
759
  ----------
760
- arr : npt.NDArray[np.float64]
760
+ arr : npt.NDArray[np.floating]
761
761
  Array to clip and scale.
762
762
  low : float
763
763
  Lower clipping bound.
@@ -766,7 +766,7 @@ def _clip_and_scale(
766
766
 
767
767
  Returns
768
768
  -------
769
- npt.NDArray[np.float64]
769
+ npt.NDArray[np.floating]
770
770
  Clipped and scaled array.
771
771
  """
772
772
  return (arr.clip(low, high) - low) / (high - low)
@@ -118,7 +118,7 @@ class EmpiricalGrid(AircraftPerformanceGrid):
118
118
 
119
119
  return data[["altitude_ft", *columns]].drop(columns=["aircraft_type"])
120
120
 
121
- def _sample(self, altitude_ft: npt.NDArray[np.float64]) -> None:
121
+ def _sample(self, altitude_ft: npt.NDArray[np.floating]) -> None:
122
122
  """Sample the data and update the source."""
123
123
 
124
124
  df = self._query_data()
@@ -277,13 +277,13 @@ class APCEMM(models.Model):
277
277
  """
278
278
 
279
279
  __slots__ = (
280
- "apcemm_path",
280
+ "_trajectory_downsampling",
281
281
  "apcemm_input_params",
282
+ "apcemm_path",
282
283
  "cachestore",
284
+ "contrail",
283
285
  "trajectories",
284
286
  "vortex",
285
- "contrail",
286
- "_trajectory_downsampling",
287
287
  )
288
288
 
289
289
  name = "apcemm"
@@ -474,7 +474,7 @@ class APCEMM(models.Model):
474
474
  for coord in ("longitude", "latitude", "level")
475
475
  }
476
476
  buffers["time_buffer"] = (0, self.params["max_age"] + self.params["dt_lagrangian"])
477
- met = self.source.downselect_met(self.met, **buffers, copy=False)
477
+ met = self.source.downselect_met(self.met, **buffers)
478
478
  model = DryAdvection(
479
479
  met=met,
480
480
  dt_integration=self.params["dt_lagrangian"],
@@ -816,7 +816,7 @@ class APCEMM(models.Model):
816
816
  # Ensure required met data is present.
817
817
  # No buffers needed for interpolation!
818
818
  vars = ap_model.met_variables + ap_model.optional_met_variables + emissions.met_variables
819
- met = self.source.downselect_met(self.met, copy=False)
819
+ met = self.source.downselect_met(self.met)
820
820
  met.ensure_vars(vars)
821
821
  met.standardize_variables(vars)
822
822
  for var in vars:
@@ -214,7 +214,7 @@ def generate_apcemm_input_met(
214
214
  )
215
215
 
216
216
  # Downselect met before interpolation
217
- met = vector.downselect_met(met, copy=False)
217
+ met = vector.downselect_met(met)
218
218
 
219
219
  # Interpolate meteorology data onto vector
220
220
  scale_humidity = humidity_scaling is not None and "specific_humidity" not in vector
@@ -15,9 +15,10 @@ from pycontrails.models.cocip.output_formats import (
15
15
 
16
16
  __all__ = [
17
17
  "Cocip",
18
+ "CocipFlightParams",
18
19
  "CocipParams",
19
20
  "CocipUncertaintyParams",
20
- "CocipFlightParams",
21
+ "compare_cocip_with_goes",
21
22
  "contrail_flight_summary_statistics",
22
23
  "contrails_to_hi_res_grid",
23
24
  "flight_waypoint_summary_statistics",
@@ -25,5 +26,4 @@ __all__ = [
25
26
  "longitude_latitude_grid",
26
27
  "natural_cirrus_properties_to_hi_res_grid",
27
28
  "time_slice_statistics",
28
- "compare_cocip_with_goes",
29
29
  ]
@@ -195,14 +195,14 @@ class Cocip(Model):
195
195
  """
196
196
 
197
197
  __slots__ = (
198
- "rad",
198
+ "_downwash_contrail",
199
+ "_downwash_flight",
200
+ "_sac_flight",
199
201
  "contrail",
200
202
  "contrail_dataset",
201
203
  "contrail_list",
204
+ "rad",
202
205
  "timesteps",
203
- "_sac_flight",
204
- "_downwash_flight",
205
- "_downwash_contrail",
206
206
  )
207
207
 
208
208
  name = "cocip"
@@ -391,7 +391,7 @@ class Cocip(Model):
391
391
  # which is the positive direction for level
392
392
  logger.debug("Downselect met for Cocip initialization")
393
393
  level_buffer = 0, self.params["met_level_buffer"][1]
394
- met = self.source.downselect_met(self.met, level_buffer=level_buffer, copy=False)
394
+ met = self.source.downselect_met(self.met, level_buffer=level_buffer)
395
395
  met = add_tau_cirrus(met)
396
396
 
397
397
  # Prepare flight for model
@@ -660,7 +660,7 @@ class Cocip(Model):
660
660
  attrs = self.source.attrs
661
661
  attrs.pop("fl_attrs", None)
662
662
  attrs.pop("data_keys", None)
663
- self.source = Fleet.from_seq(fls, broadcast_numeric=False, copy=False, attrs=attrs)
663
+ self.source = Fleet.from_seq(fls, broadcast_numeric=False, attrs=attrs)
664
664
 
665
665
  # Single flight
666
666
  else:
@@ -976,9 +976,9 @@ class Cocip(Model):
976
976
  for coord in ("longitude", "latitude", "level")
977
977
  }
978
978
  logger.debug("Downselect met for start of Cocip evolution")
979
- met = self._downwash_contrail.downselect_met(self.met, **buffers, copy=False)
979
+ met = self._downwash_contrail.downselect_met(self.met, **buffers)
980
980
  met = add_tau_cirrus(met)
981
- rad = self._downwash_contrail.downselect_met(self.rad, **buffers, copy=False)
981
+ rad = self._downwash_contrail.downselect_met(self.rad, **buffers)
982
982
 
983
983
  calc_continuous(self._downwash_contrail)
984
984
  calc_timestep_geometry(self._downwash_contrail)
@@ -1135,11 +1135,11 @@ class Cocip(Model):
1135
1135
  & (self._downwash_flight["time"] <= lookahead),
1136
1136
  copy=False,
1137
1137
  )
1138
- vector = GeoVectorDataset(
1138
+ vector = GeoVectorDataset._from_fastpath(
1139
1139
  {
1140
1140
  key: np.concatenate((latest_contrail[key], future_contrails[key]))
1141
1141
  for key in ("longitude", "latitude", "level", "time")
1142
- }
1142
+ },
1143
1143
  )
1144
1144
 
1145
1145
  # compute time buffer to ensure downselection extends to time_end
@@ -1152,7 +1152,7 @@ class Cocip(Model):
1152
1152
  max(np.timedelta64(0, "ns"), time_end - vector["time"].max()),
1153
1153
  )
1154
1154
 
1155
- return vector.downselect_met(met, **buffers, copy=False)
1155
+ return vector.downselect_met(met, **buffers)
1156
1156
 
1157
1157
  def _create_downwash_contrail(self) -> GeoVectorDataset:
1158
1158
  """Get Contrail representation of downwash flight."""
@@ -1180,7 +1180,7 @@ class Cocip(Model):
1180
1180
  "persistent": self._downwash_flight["persistent_1"],
1181
1181
  }
1182
1182
 
1183
- contrail = GeoVectorDataset(downwash_contrail_data, copy=True)
1183
+ contrail = GeoVectorDataset._from_fastpath(downwash_contrail_data).copy()
1184
1184
  contrail["formation_time"] = contrail["time"].copy()
1185
1185
  contrail["age"] = contrail["formation_time"] - contrail["time"]
1186
1186
 
@@ -2055,9 +2055,9 @@ def calc_radiative_properties(contrail: GeoVectorDataset, params: dict[str, Any]
2055
2055
 
2056
2056
  def calc_contrail_properties(
2057
2057
  contrail: GeoVectorDataset,
2058
- effective_vertical_resolution: float | npt.NDArray[np.float64],
2059
- wind_shear_enhancement_exponent: float | npt.NDArray[np.float64],
2060
- sedimentation_impact_factor: float | npt.NDArray[np.float64],
2058
+ effective_vertical_resolution: float | npt.NDArray[np.floating],
2059
+ wind_shear_enhancement_exponent: float | npt.NDArray[np.floating],
2060
+ sedimentation_impact_factor: float | npt.NDArray[np.floating],
2061
2061
  radiative_heating_effects: bool,
2062
2062
  ) -> None:
2063
2063
  """Calculate geometric and ice-related properties of contrail.
@@ -2084,11 +2084,11 @@ def calc_contrail_properties(
2084
2084
  ----------
2085
2085
  contrail : GeoVectorDataset
2086
2086
  Grid points with many precomputed keys.
2087
- effective_vertical_resolution : float | npt.NDArray[np.float64]
2087
+ effective_vertical_resolution : float | npt.NDArray[np.floating]
2088
2088
  Passed into :func:`wind_shear.wind_shear_enhancement_factor`.
2089
- wind_shear_enhancement_exponent : float | npt.NDArray[np.float64]
2089
+ wind_shear_enhancement_exponent : float | npt.NDArray[np.floating]
2090
2090
  Passed into :func:`wind_shear.wind_shear_enhancement_factor`.
2091
- sedimentation_impact_factor: float | npt.NDArray[np.float64]
2091
+ sedimentation_impact_factor: float | npt.NDArray[np.floating]
2092
2092
  Passed into `contrail_properties.vertical_diffusivity`.
2093
2093
  radiative_heating_effects: bool
2094
2094
  Include radiative heating effects on contrail cirrus properties.
@@ -2300,7 +2300,7 @@ def calc_timestep_contrail_evolution(
2300
2300
  level_2 = geo.advect_level(level_1, vertical_velocity_1, rho_air_1, terminal_fall_speed_1, dt)
2301
2301
  altitude_2 = units.pl_to_m(level_2)
2302
2302
 
2303
- contrail_2 = GeoVectorDataset(
2303
+ contrail_2 = GeoVectorDataset._from_fastpath(
2304
2304
  {
2305
2305
  "waypoint": waypoint_2,
2306
2306
  "flight_id": contrail_1["flight_id"],
@@ -2312,7 +2312,6 @@ def calc_timestep_contrail_evolution(
2312
2312
  "altitude": altitude_2,
2313
2313
  "level": level_2,
2314
2314
  },
2315
- copy=False,
2316
2315
  )
2317
2316
  intersection = contrail_2.coords_intersect_met(met)
2318
2317
  if not np.any(intersection):
@@ -2525,8 +2524,8 @@ def calc_timestep_contrail_evolution(
2525
2524
  def _rad_accumulation_to_average_instantaneous(
2526
2525
  rad: MetDataset,
2527
2526
  name: str,
2528
- arr: npt.NDArray[np.float64],
2529
- ) -> npt.NDArray[np.float64]:
2527
+ arr: npt.NDArray[np.floating],
2528
+ ) -> npt.NDArray[np.floating]:
2530
2529
  """Convert from radiation accumulation to average instantaneous values.
2531
2530
 
2532
2531
  .. versionadded:: 0.48.0
@@ -2537,12 +2536,12 @@ def _rad_accumulation_to_average_instantaneous(
2537
2536
  Radiation data
2538
2537
  name : str
2539
2538
  Variable name
2540
- arr : npt.NDArray[np.float64]
2539
+ arr : npt.NDArray[np.floating]
2541
2540
  Array of values already interpolated from ``rad``
2542
2541
 
2543
2542
  Returns
2544
2543
  -------
2545
- npt.NDArray[np.float64]
2544
+ npt.NDArray[np.floating]
2546
2545
  Array of values converted from accumulation to average instantaneous values
2547
2546
 
2548
2547
  Raises
@@ -11,7 +11,7 @@ import numpy as np
11
11
  import numpy.typing as npt
12
12
 
13
13
  from pycontrails.core.aircraft_performance import AircraftPerformance
14
- from pycontrails.core.models import ModelParams
14
+ from pycontrails.core.models import AdvectionBuffers
15
15
  from pycontrails.models.emissions.emissions import EmissionsParams
16
16
  from pycontrails.models.humidity_scaling import HumidityScaling
17
17
 
@@ -50,7 +50,7 @@ def _habits() -> npt.NDArray[np.str_]:
50
50
 
51
51
 
52
52
  @dataclasses.dataclass
53
- class CocipParams(ModelParams):
53
+ class CocipParams(AdvectionBuffers):
54
54
  """Model parameters required by the CoCiP models."""
55
55
 
56
56
  # -------------------------
@@ -116,15 +116,6 @@ class CocipParams(ModelParams):
116
116
  #: evaluation after the met data is downselected.
117
117
  compute_tau_cirrus_in_model_init: bool | str = "auto"
118
118
 
119
- #: Met longitude [WGS84] buffer for Cocip evolution.
120
- met_longitude_buffer: tuple[float, float] = (10.0, 10.0)
121
-
122
- #: Met latitude buffer [WGS84] for Cocip evolution.
123
- met_latitude_buffer: tuple[float, float] = (10.0, 10.0)
124
-
125
- #: Met level buffer [:math:`hPa`] for Cocip initialization and evolution.
126
- met_level_buffer: tuple[float, float] = (40.0, 40.0)
127
-
128
119
  # ---------
129
120
  # Filtering
130
121
  # ---------
@@ -2,8 +2,8 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import dataclasses
5
6
  import logging
6
- from dataclasses import asdict, dataclass
7
7
  from typing import Any, ClassVar
8
8
 
9
9
  import numpy as np
@@ -70,7 +70,7 @@ class habit_dirichlet(rv_frozen):
70
70
  return habit_weights
71
71
 
72
72
 
73
- @dataclass
73
+ @dataclasses.dataclass
74
74
  class CocipUncertaintyParams(CocipParams):
75
75
  """Model parameters for CoCiP epistemic uncertainty.
76
76
 
@@ -117,26 +117,30 @@ class CocipUncertaintyParams(CocipParams):
117
117
  #: Schumann takes ``wind_shear_enhancement_exponent`` = 0.5 and discusses the case of 0 and 2/3
118
118
  #: as possibilities.
119
119
  #: With a value of 0, wind shear is not enhanced.
120
- wind_shear_enhancement_exponent_uncertainty: rv_frozen | None = stats.triang(
121
- loc=0.0, c=CocipParams.wind_shear_enhancement_exponent, scale=1.0
120
+ wind_shear_enhancement_exponent_uncertainty: rv_frozen | None = dataclasses.field(
121
+ default_factory=lambda: stats.triang(
122
+ loc=0.0, c=CocipParams.wind_shear_enhancement_exponent, scale=1.0
123
+ )
122
124
  )
123
125
 
124
126
  #: Schumann takes ``initial_wake_vortex_depth`` = 0.5 and discusses some
125
127
  #: uncertainty in this value. This parameter should be non-negative.
126
- initial_wake_vortex_depth_uncertainty: rv_frozen | None = stats.triang(
127
- loc=0.3, c=CocipParams.initial_wake_vortex_depth, scale=0.4
128
+ initial_wake_vortex_depth_uncertainty: rv_frozen | None = dataclasses.field(
129
+ default_factory=lambda: stats.triang(
130
+ loc=0.3, c=CocipParams.initial_wake_vortex_depth, scale=0.4
131
+ )
128
132
  )
129
133
 
130
134
  #: Schumann takes a default value of 0.1 and describes it as an "important adjustable parameter"
131
135
  #: Currently, `CocipParams` uses a default value of 0.5
132
- sedimentation_impact_factor_uncertainty: rv_frozen | None = stats.norm(
133
- loc=CocipParams.sedimentation_impact_factor, scale=0.1
136
+ sedimentation_impact_factor_uncertainty: rv_frozen | None = dataclasses.field(
137
+ default_factory=lambda: stats.norm(loc=CocipParams.sedimentation_impact_factor, scale=0.1)
134
138
  )
135
139
 
136
140
  #: Teoh 2022 (to appear) takes values between 70% decrease and 100% increase.
137
141
  #: This coincides with the log normal distribution defined below.
138
- nvpm_ei_n_enhancement_factor_uncertainty: rv_frozen | None = stats.lognorm(
139
- s=0.15, scale=1 / stats.lognorm(s=0.15).mean()
142
+ nvpm_ei_n_enhancement_factor_uncertainty: rv_frozen | None = dataclasses.field(
143
+ default_factory=lambda: stats.lognorm(s=0.15, scale=1 / stats.lognorm(s=0.15).mean())
140
144
  )
141
145
 
142
146
  #: Scale shortwave radiative forcing.
@@ -145,8 +149,8 @@ class CocipUncertaintyParams(CocipParams):
145
149
  #: by `libRadTran <http://www.libradtran.org/doku.php>`_.
146
150
  #: We use the average RMS error across all habit types (pg 1397) as the standard deviation
147
151
  #: of a normally distributed scaling factor for SW forcing
148
- rf_sw_enhancement_factor_uncertainty: rv_frozen | None = stats.norm(
149
- loc=CocipParams.rf_sw_enhancement_factor, scale=0.106
152
+ rf_sw_enhancement_factor_uncertainty: rv_frozen | None = dataclasses.field(
153
+ default_factory=lambda: stats.norm(loc=CocipParams.rf_sw_enhancement_factor, scale=0.106)
150
154
  )
151
155
 
152
156
  #: Scale longwave radiative forcing.
@@ -154,8 +158,8 @@ class CocipUncertaintyParams(CocipParams):
154
158
  #: fit to the data generated by `libRadTran <http://www.libradtran.org/doku.php>`_.
155
159
  #: We use the average RMS error across all habit types (pg 1397) as the standard deviation
156
160
  #: of a normally distributed scaling factor for LW forcing.
157
- rf_lw_enhancement_factor_uncertainty: rv_frozen | None = stats.norm(
158
- loc=CocipParams.rf_lw_enhancement_factor, scale=0.071
161
+ rf_lw_enhancement_factor_uncertainty: rv_frozen | None = dataclasses.field(
162
+ default_factory=lambda: stats.norm(loc=CocipParams.rf_lw_enhancement_factor, scale=0.071)
159
163
  )
160
164
 
161
165
  #: Scale the habit distributions by a dirichlet distribution
@@ -163,7 +167,9 @@ class CocipUncertaintyParams(CocipParams):
163
167
  #: where :math:`\text{G}_{i}` is the approximate habit weight distributions
164
168
  #: defined in :attr:`CocipParams().habit_distributions`.
165
169
  #: Higher values of :math:`\text{C}` correspond to higher confidence in initial estimates.
166
- habit_distributions_uncertainty: rv_frozen | None = habit_dirichlet(C=96.0)
170
+ habit_distributions_uncertainty: rv_frozen | None = dataclasses.field(
171
+ default_factory=habit_dirichlet
172
+ )
167
173
 
168
174
  def __post_init__(self) -> None:
169
175
  """Override values of model parameters according to ranges."""
@@ -192,7 +198,7 @@ class CocipUncertaintyParams(CocipParams):
192
198
 
193
199
  out = {}
194
200
 
195
- param_dict = asdict(self)
201
+ param_dict = dataclasses.asdict(self)
196
202
  for uncertainty_param, dist in param_dict.items():
197
203
  if uncertainty_param.endswith("_uncertainty") and dist is not None:
198
204
  param = uncertainty_param.split("_uncertainty")[0]
@@ -212,7 +218,7 @@ class CocipUncertaintyParams(CocipParams):
212
218
 
213
219
  return out
214
220
 
215
- def rvs(self, size: None | int = None) -> dict[str, np.float64 | npt.NDArray[np.float64]]:
221
+ def rvs(self, size: None | int = None) -> dict[str, np.float64 | npt.NDArray[np.floating]]:
216
222
  """Call each distribution's `rvs` method to generate random parameters.
217
223
 
218
224
  Seed calls to `rvs` with class variable `rng`.
@@ -224,7 +230,7 @@ class CocipUncertaintyParams(CocipParams):
224
230
 
225
231
  Returns
226
232
  -------
227
- dict[str, float | npt.NDArray[np.float64]]
233
+ dict[str, float | npt.NDArray[np.floating]]
228
234
  Dictionary of random parameters. Dictionary keys consists of names of parameters in
229
235
  `CocipParams` to be overridden by random value.
230
236