pycontrails 0.50.0__cp312-cp312-win_amd64.whl → 0.50.2__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (32) hide show
  1. pycontrails/_version.py +2 -2
  2. pycontrails/core/flight.py +108 -6
  3. pycontrails/core/interpolation.py +39 -1
  4. pycontrails/core/met.py +10 -12
  5. pycontrails/core/rgi_cython.cp312-win_amd64.pyd +0 -0
  6. pycontrails/core/vector.py +10 -8
  7. pycontrails/datalib/goes.py +14 -12
  8. pycontrails/models/cocip/cocip.py +22 -5
  9. pycontrails/models/cocip/cocip_params.py +11 -2
  10. pycontrails/models/cocip/contrail_properties.py +4 -9
  11. pycontrails/models/cocip/unterstrasser_wake_vortex.py +403 -0
  12. pycontrails/models/cocip/wake_vortex.py +22 -1
  13. pycontrails/models/cocipgrid/cocip_grid.py +103 -6
  14. pycontrails/models/emissions/emissions.py +2 -2
  15. pycontrails/models/emissions/static/default-engine-uids.csv +1 -1
  16. pycontrails/models/emissions/static/{edb-gaseous-v28c-engines.csv → edb-gaseous-v29b-engines.csv} +49 -11
  17. pycontrails/models/emissions/static/{edb-nvpm-v28c-engines.csv → edb-nvpm-v29b-engines.csv} +90 -54
  18. pycontrails/models/issr.py +1 -1
  19. pycontrails/models/ps_model/ps_aircraft_params.py +13 -1
  20. pycontrails/models/ps_model/ps_grid.py +20 -20
  21. pycontrails/models/ps_model/ps_model.py +1 -1
  22. pycontrails/models/ps_model/ps_operational_limits.py +202 -1
  23. pycontrails/models/ps_model/static/ps-aircraft-params-20240417.csv +64 -0
  24. pycontrails/physics/units.py +2 -2
  25. pycontrails/utils/types.py +3 -1
  26. {pycontrails-0.50.0.dist-info → pycontrails-0.50.2.dist-info}/METADATA +1 -1
  27. {pycontrails-0.50.0.dist-info → pycontrails-0.50.2.dist-info}/RECORD +31 -30
  28. pycontrails/models/ps_model/static/ps-aircraft-params-20240209.csv +0 -63
  29. {pycontrails-0.50.0.dist-info → pycontrails-0.50.2.dist-info}/LICENSE +0 -0
  30. {pycontrails-0.50.0.dist-info → pycontrails-0.50.2.dist-info}/NOTICE +0 -0
  31. {pycontrails-0.50.0.dist-info → pycontrails-0.50.2.dist-info}/WHEEL +0 -0
  32. {pycontrails-0.50.0.dist-info → pycontrails-0.50.2.dist-info}/top_level.txt +0 -0
pycontrails/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.50.0'
16
- __version_tuple__ = version_tuple = (0, 50, 0)
15
+ __version__ = version = '0.50.2'
16
+ __version_tuple__ = version_tuple = (0, 50, 2)
@@ -17,6 +17,7 @@ from pycontrails.core.fuel import Fuel, JetA
17
17
  from pycontrails.core.vector import AttrDict, GeoVectorDataset, VectorDataDict, VectorDataset
18
18
  from pycontrails.physics import constants, geo, units
19
19
  from pycontrails.utils import dependencies
20
+ from pycontrails.utils.types import ArrayOrFloat
20
21
 
21
22
  logger = logging.getLogger(__name__)
22
23
 
@@ -787,6 +788,13 @@ class Flight(GeoVectorDataset):
787
788
  Waypoints are resampled according to the frequency ``freq``. Values for :attr:`data`
788
789
  columns ``longitude``, ``latitude``, and ``altitude`` are interpolated.
789
790
 
791
+ Resampled waypoints will include all multiples of ``freq`` between the flight
792
+ start and end time. For example, when resampling to a frequency of 1 minute,
793
+ a flight that starts at 2020/1/1 00:00:59 and ends at 2020/1/1 00:01:01
794
+ will return a single waypoint at 2020/1/1 00:01:00, whereas a flight that
795
+ starts at 2020/1/1 00:01:01 and ends at 2020/1/1 00:01:59 will return an empty
796
+ flight.
797
+
790
798
  Parameters
791
799
  ----------
792
800
  freq : str, optional
@@ -1105,6 +1113,102 @@ class Flight(GeoVectorDataset):
1105
1113
  out.data.pop("level", None) # avoid any ambiguity
1106
1114
  return out
1107
1115
 
1116
+ def distance_to_coords(self: Flight, distance: ArrayOrFloat) -> tuple[
1117
+ ArrayOrFloat,
1118
+ ArrayOrFloat,
1119
+ np.intp | npt.NDArray[np.intp],
1120
+ ]:
1121
+ """
1122
+ Convert distance along flight path to geodesic coordinates.
1123
+
1124
+ Will return a tuple containing `(lat, lon, index)`, where index indicates which flight
1125
+ segment contains the returned coordinate.
1126
+
1127
+ Parameters
1128
+ ----------
1129
+ distance : ArrayOrFloat
1130
+ Distance along flight path, [:math:`m`]
1131
+
1132
+ Returns
1133
+ -------
1134
+ (ArrayOrFloat, ArrayOrFloat, int | npt.NDArray[int])
1135
+ latitude, longitude, and segment index cooresponding to distance.
1136
+ """
1137
+
1138
+ # Check if flight crosses antimeridian line
1139
+ lon_ = self["longitude"]
1140
+ lat_ = self["latitude"]
1141
+ sign_ = np.sign(lon_)
1142
+ min_pos = np.min(lon_[sign_ == 1.0], initial=np.inf)
1143
+ max_neg = np.max(lon_[sign_ == -1.0], initial=-np.inf)
1144
+
1145
+ if (180.0 - min_pos) + (180.0 + max_neg) < 180.0 and min_pos < np.inf and max_neg > -np.inf:
1146
+ # In this case, we believe the flight crosses the antimeridian
1147
+ shift = min_pos
1148
+ # So we shift the longitude "chart"
1149
+ lon_ = (lon_ - shift) % 360.0
1150
+ else:
1151
+ shift = None
1152
+
1153
+ # Make a fake flight that flies at constant height so distance is just
1154
+ # distance traveled across groud
1155
+ flat_dataset = Flight(
1156
+ longitude=self.coords["longitude"],
1157
+ latitude=self.coords["latitude"],
1158
+ time=self.coords["time"],
1159
+ level=[self.coords["level"][0] for _ in range(self.size)],
1160
+ )
1161
+
1162
+ lengths = flat_dataset.segment_length()
1163
+ cumulative_lengths = np.nancumsum(lengths)
1164
+ cumulative_lengths = np.insert(cumulative_lengths[:-1], 0, 0)
1165
+ seg_idx: np.intp | npt.NDArray[np.intp]
1166
+
1167
+ if isinstance(distance, float):
1168
+ seg_idx = np.argmax(cumulative_lengths > distance)
1169
+ else:
1170
+ seg_idx = np.argmax(cumulative_lengths > distance.reshape((distance.size, 1)), axis=1)
1171
+
1172
+ # If in the last segment (which has length 0), then just return the last waypoint
1173
+ seg_idx -= 1
1174
+
1175
+ # linear interpolation in lat/lon - assuming the way points are within 100-200km so this
1176
+ # should be accurate enough without needed to reproject or use spherical distance
1177
+ lat1: ArrayOrFloat = lat_[seg_idx]
1178
+ lon1: ArrayOrFloat = lon_[seg_idx]
1179
+ lat2: ArrayOrFloat = lat_[seg_idx + 1]
1180
+ lon2: ArrayOrFloat = lon_[seg_idx + 1]
1181
+
1182
+ dx = distance - cumulative_lengths[seg_idx]
1183
+ fx = dx / lengths[seg_idx]
1184
+ lat = (1 - fx) * lat1 + fx * lat2
1185
+ lon = (1 - fx) * lon1 + fx * lon2
1186
+
1187
+ if isinstance(distance, float):
1188
+ if distance < 0:
1189
+ lat = np.nan
1190
+ lon = np.nan
1191
+ seg_idx = np.intp(0)
1192
+ elif distance >= cumulative_lengths[-1]:
1193
+ lat = lat_[-1]
1194
+ lon = lon_[-1]
1195
+ seg_idx = np.intp(self.size - 1)
1196
+ else:
1197
+ lat[distance < 0] = np.nan
1198
+ lon[distance < 0] = np.nan
1199
+ seg_idx[distance < 0] = 0 # type: ignore
1200
+
1201
+ lat[distance >= cumulative_lengths[-1]] = lat_[-1]
1202
+ lon[distance >= cumulative_lengths[-1]] = lon_[-1]
1203
+ seg_idx[distance >= cumulative_lengths[-1]] = self.size - 1 # type: ignore
1204
+
1205
+ if shift is not None:
1206
+ # We need to translate back to the original chart here
1207
+ lon += shift
1208
+ lon = ((lon + 180.0) % 360.0) - 180.0
1209
+
1210
+ return lat, lon, seg_idx
1211
+
1108
1212
  def _geodesic_interpolation(self, geodesic_threshold: float) -> pd.DataFrame | None:
1109
1213
  """Geodesic interpolate between large gaps between waypoints.
1110
1214
 
@@ -1349,8 +1453,8 @@ class Flight(GeoVectorDataset):
1349
1453
  >>> # Intersect and attach
1350
1454
  >>> fl["air_temperature"] = fl.intersect_met(met['air_temperature'])
1351
1455
  >>> fl["air_temperature"]
1352
- array([235.94658, 235.95767, 235.96873, ..., 234.59918, 234.60388,
1353
- 234.60846], dtype=float32)
1456
+ array([235.94657007, 235.95766965, 235.96873412, ..., 234.59917962,
1457
+ 234.60387402, 234.60845312])
1354
1458
 
1355
1459
  >>> # Length (in meters) of waypoints whose temperature exceeds 236K
1356
1460
  >>> fl.length_met("air_temperature", threshold=236)
@@ -2063,11 +2167,9 @@ def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.Dat
2063
2167
 
2064
2168
  # Manually create a new index that includes all the original index values
2065
2169
  # and the resampled-to-freq index values.
2066
- t0 = df.index[0]
2170
+ t0 = df.index[0].ceil(freq)
2067
2171
  t1 = df.index[-1]
2068
- t = pd.date_range(t0, t1, freq=freq, name="time").floor(freq)
2069
- if t[0] < t0:
2070
- t = t[1:]
2172
+ t = pd.date_range(t0, t1, freq=freq, name="time")
2071
2173
 
2072
2174
  concat_arr = np.concatenate([df.index, t])
2073
2175
  concat_arr = np.unique(concat_arr)
@@ -71,7 +71,9 @@ class PycontrailsRegularGridInterpolator(scipy.interpolate.RegularGridInterpolat
71
71
 
72
72
  self.grid = points
73
73
  self.values = values
74
- self.method = method
74
+ # TODO: consider supporting updated tensor-product spline methods
75
+ # see https://github.com/scipy/scipy/releases/tag/v1.13.0
76
+ self.method = _pick_method(scipy.__version__, method)
75
77
  self.bounds_error = bounds_error
76
78
  self.fill_value = fill_value
77
79
 
@@ -219,6 +221,42 @@ class PycontrailsRegularGridInterpolator(scipy.interpolate.RegularGridInterpolat
219
221
  raise ValueError(msg)
220
222
 
221
223
 
224
+ def _pick_method(scipy_version: str, method: str) -> str:
225
+ """Select an interpolation method.
226
+
227
+ For scipy versions 1.13.0 and later, fall back on legacy implementations
228
+ of tensor-product spline methods. The default implementations in 1.13.0
229
+ and later are incompatible with this class.
230
+
231
+ Parameters
232
+ ----------
233
+ scipy_version : str
234
+ scipy version (major.minor.patch)
235
+
236
+ method : str
237
+ Interpolation method. Passed into :class:`scipy.interpolate.RegularGridInterpolator`
238
+ as-is unless ``scipy_version`` is 1.13.0 or later and ``method`` is ``"slinear"``,
239
+ ``"cubic"``, or ``"quintic"``. In this case, ``"_legacy"`` is appended to ``method``.
240
+
241
+ Returns
242
+ -------
243
+ str
244
+ Interpolation method adjusted for compatibility with this class.
245
+ """
246
+ try:
247
+ version = scipy_version.split(".")
248
+ major = int(version[0])
249
+ minor = int(version[1])
250
+ except (IndexError, ValueError) as exc:
251
+ msg = f"Failed to parse major and minor version from {scipy_version}"
252
+ raise ValueError(msg) from exc
253
+
254
+ reimplemented_methods = ["slinear", "cubic", "quintic"]
255
+ if major > 1 or (major == 1 and minor >= 13) and method in reimplemented_methods:
256
+ return method + "_legacy"
257
+ return method
258
+
259
+
222
260
  def _floatize_time(
223
261
  time: npt.NDArray[np.datetime64], offset: np.datetime64
224
262
  ) -> npt.NDArray[np.float64]:
pycontrails/core/met.py CHANGED
@@ -674,13 +674,10 @@ class MetDataset(MetBase):
674
674
  >>> da = mda.data # Underlying `xarray` object
675
675
 
676
676
  >>> # Check out a few values
677
- >>> da[5:10, 5:10, 1, 1].values
678
- array([[224.0896 , 224.41374, 224.75946, 225.16237, 225.60507],
679
- [224.09457, 224.42038, 224.76526, 225.16817, 225.61089],
680
- [224.10037, 224.42618, 224.77106, 225.17314, 225.61586],
681
- [224.10617, 224.43282, 224.7777 , 225.17812, 225.62166],
682
- [224.11115, 224.44028, 224.7835 , 225.18393, 225.62663]],
683
- dtype=float32)
677
+ >>> da[5:8, 5:8, 1, 1].values
678
+ array([[224.08959005, 224.41374427, 224.75945349],
679
+ [224.09456429, 224.42037658, 224.76525676],
680
+ [224.10036756, 224.42617985, 224.77106004]])
684
681
 
685
682
  >>> # Mean temperature over entire array
686
683
  >>> da.mean().load().item()
@@ -1618,15 +1615,15 @@ class MetDataArray(MetBase):
1618
1615
 
1619
1616
  >>> # Interpolation at a grid point agrees with value
1620
1617
  >>> mda.interpolate(1, 2, 300, np.datetime64('2022-03-01T14:00'))
1621
- array([241.91974], dtype=float32)
1618
+ array([241.91972984])
1622
1619
 
1623
1620
  >>> da = mda.data
1624
1621
  >>> da.sel(longitude=1, latitude=2, level=300, time=np.datetime64('2022-03-01T14')).item()
1625
- 241.91974
1622
+ 241.9197298421629
1626
1623
 
1627
1624
  >>> # Interpolation off grid
1628
1625
  >>> mda.interpolate(1.1, 2.1, 290, np.datetime64('2022-03-01 13:10'))
1629
- array([239.83794], dtype=float32)
1626
+ array([239.83793798])
1630
1627
 
1631
1628
  >>> # Interpolate along path
1632
1629
  >>> longitude = np.linspace(1, 2, 10)
@@ -1634,8 +1631,9 @@ class MetDataArray(MetBase):
1634
1631
  >>> level = np.linspace(200, 300, 10)
1635
1632
  >>> time = pd.date_range("2022-03-01T14", periods=10, freq="5min")
1636
1633
  >>> mda.interpolate(longitude, latitude, level, time)
1637
- array([220.44348, 223.089 , 225.7434 , 228.41643, 231.10858, 233.54858,
1638
- 235.71506, 237.86479, 239.99275, 242.10793], dtype=float32)
1634
+ array([220.44347694, 223.08900738, 225.74338924, 228.41642088,
1635
+ 231.10858599, 233.54857391, 235.71504913, 237.86478872,
1636
+ 239.99274623, 242.10792167])
1639
1637
  """
1640
1638
  # Load if necessary
1641
1639
  if not self.in_memory:
@@ -1694,12 +1694,14 @@ class GeoVectorDataset(VectorDataset):
1694
1694
 
1695
1695
  >>> # Intersect
1696
1696
  >>> fl.intersect_met(met['air_temperature'], method='nearest')
1697
- array([231.6297 , 230.72604, 232.2432 , 231.88339, 231.0643 , 231.59073,
1698
- 231.65126, 231.93065, 232.03345, 231.65955], dtype=float32)
1697
+ array([231.62969892, 230.72604651, 232.24318771, 231.88338483,
1698
+ 231.06429438, 231.59073409, 231.65125393, 231.93064004,
1699
+ 232.03344087, 231.65954432])
1699
1700
 
1700
1701
  >>> fl.intersect_met(met['air_temperature'], method='linear')
1701
- array([225.77794, 225.13908, 226.23122, 226.31831, 225.56102, 225.81192,
1702
- 226.03194, 226.22057, 226.0377 , 225.63226], dtype=float32)
1702
+ array([225.77794552, 225.13908414, 226.231218 , 226.31831528,
1703
+ 225.56102321, 225.81192149, 226.03192642, 226.22056121,
1704
+ 226.03770174, 225.63226188])
1703
1705
 
1704
1706
  >>> # Interpolate and attach to `Flight` instance
1705
1707
  >>> for key in met:
@@ -1708,11 +1710,11 @@ class GeoVectorDataset(VectorDataset):
1708
1710
  >>> # Show the final three columns of the dataframe
1709
1711
  >>> fl.dataframe.iloc[:, -3:].head()
1710
1712
  time air_temperature specific_humidity
1711
- 0 2022-03-01 00:00:00 225.777939 0.000132
1713
+ 0 2022-03-01 00:00:00 225.777946 0.000132
1712
1714
  1 2022-03-01 00:13:20 225.139084 0.000132
1713
- 2 2022-03-01 00:26:40 226.231216 0.000107
1714
- 3 2022-03-01 00:40:00 226.318314 0.000171
1715
- 4 2022-03-01 00:53:20 225.561020 0.000109
1715
+ 2 2022-03-01 00:26:40 226.231218 0.000107
1716
+ 3 2022-03-01 00:40:00 226.318315 0.000171
1717
+ 4 2022-03-01 00:53:20 225.561022 0.000109
1716
1718
 
1717
1719
  """
1718
1720
  # Override use_indices in certain situations
@@ -50,7 +50,7 @@ except ModuleNotFoundError as exc:
50
50
 
51
51
 
52
52
  #: Default channels to use if none are specified. These are the channels
53
- #: required by the MIT ash color scheme.
53
+ #: required by the SEVIRI (MIT) ash color scheme.
54
54
  DEFAULT_CHANNELS = "C11", "C14", "C15"
55
55
 
56
56
  #: The time at which the GOES scan mode changed from mode 3 to mode 6. This
@@ -203,10 +203,10 @@ def gcs_goes_path(
203
203
  GOES Region of interest.
204
204
  channels : str | Iterable[str]
205
205
  Set of channels or bands for CMIP data. The 16 possible channels are
206
- represented by the strings "C01" to "C16". For the MIT ash color scheme,
206
+ represented by the strings "C01" to "C16". For the SEVIRI ash color scheme,
207
207
  set ``channels=("C11", "C14", "C15")``. For the true color scheme,
208
208
  set ``channels=("C01", "C02", "C03")``. By default, the channels
209
- required by the MIT ash color scheme are used.
209
+ required by the SEVIRI ash color scheme are used.
210
210
 
211
211
  Returns
212
212
  -------
@@ -306,10 +306,10 @@ class GOES:
306
306
 
307
307
  channels : str | set[str] | None
308
308
  Set of channels or bands for CMIP data. The 16 possible channels are
309
- represented by the strings "C01" to "C16". For the MIT ash color scheme,
309
+ represented by the strings "C01" to "C16". For the SEVIRI ash color scheme,
310
310
  set ``channels=("C11", "C14", "C15")``. For the true color scheme,
311
311
  set ``channels=("C01", "C02", "C03")``. By default, the channels
312
- required by the MIT ash color scheme are used. The channels must have
312
+ required by the SEVIRI ash color scheme are used. The channels must have
313
313
  a common horizontal resolution. The resolutions are:
314
314
 
315
315
  - C01: 1.0 km
@@ -585,7 +585,7 @@ def _concat_c02(ds1: XArrayType, ds2: XArrayType) -> XArrayType:
585
585
  def extract_goes_visualization(
586
586
  da: xr.DataArray,
587
587
  color_scheme: str = "ash",
588
- ash_convention: str = "MIT",
588
+ ash_convention: str = "SEVIRI",
589
589
  gamma: float = 2.2,
590
590
  ) -> tuple[npt.NDArray[np.float32], ccrs.Geostationary, tuple[float, float, float, float]]:
591
591
  """Extract artifacts for visualizing GOES data with the given color scheme.
@@ -597,7 +597,7 @@ def extract_goes_visualization(
597
597
  required by :func:`to_ash`.
598
598
  color_scheme : str = {"ash", "true"}
599
599
  Color scheme to use for visualization.
600
- ash_convention : str = {"MIT", "standard"}
600
+ ash_convention : str = {"SEVIRI", "standard"}
601
601
  Passed into :func:`to_ash`. Only used if ``color_scheme="ash"``.
602
602
  gamma : float = 2.2
603
603
  Passed into :func:`to_true_color`. Only used if ``color_scheme="true"``.
@@ -672,17 +672,18 @@ def to_true_color(da: xr.DataArray, gamma: float = 2.2) -> npt.NDArray[np.float3
672
672
  return np.dstack([red, green, blue])
673
673
 
674
674
 
675
- def to_ash(da: xr.DataArray, convention: str = "MIT") -> npt.NDArray[np.float32]:
675
+ def to_ash(da: xr.DataArray, convention: str = "SEVIRI") -> npt.NDArray[np.float32]:
676
676
  """Compute 3d RGB array for the ASH color scheme.
677
677
 
678
678
  Parameters
679
679
  ----------
680
680
  da : xr.DataArray
681
681
  DataArray of GOES data with appropriate channels.
682
- convention : str = {"MIT", "standard"}
682
+ convention : str = {"SEVIRI", "standard"}
683
683
  Convention for color space.
684
684
 
685
- - MIT convention requires channels C11, C14, C15
685
+ - SEVIRI convention requires channels C11, C14, C15.
686
+ Used in :cite:`kulikSatellitebasedDetectionContrails2019`.
686
687
  - Standard convention requires channels C11, C13, C14, C15
687
688
 
688
689
  Returns
@@ -693,6 +694,7 @@ def to_ash(da: xr.DataArray, convention: str = "MIT") -> npt.NDArray[np.float32]
693
694
  References
694
695
  ----------
695
696
  - `Ash RGB quick guide (the color space and color interpretations) <https://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf>`_
697
+ - :cite:`SEVIRIRGBCal`
696
698
  - :cite:`kulikSatellitebasedDetectionContrails2019`
697
699
 
698
700
  Examples
@@ -716,7 +718,7 @@ def to_ash(da: xr.DataArray, convention: str = "MIT") -> npt.NDArray[np.float32]
716
718
  green = c14 - c11
717
719
  blue = c13
718
720
 
719
- elif convention == "MIT":
721
+ elif convention in ["SEVIRI", "MIT"]: # retain MIT for backwards compatibility
720
722
  c11 = da.sel(band_id=11).values # 8.44
721
723
  c14 = da.sel(band_id=14).values # 11.19
722
724
  c15 = da.sel(band_id=15).values # 12.27
@@ -726,7 +728,7 @@ def to_ash(da: xr.DataArray, convention: str = "MIT") -> npt.NDArray[np.float32]
726
728
  blue = c14
727
729
 
728
730
  else:
729
- raise ValueError("Convention must be either 'MIT' or 'standard'")
731
+ raise ValueError("Convention must be either 'SEVIRI' or 'standard'")
730
732
 
731
733
  # See colostate pdf for slightly wider values
732
734
  red = _clip_and_scale(red, -4.0, 2.0)
@@ -26,6 +26,7 @@ from pycontrails.models.cocip import (
26
26
  contrail_properties,
27
27
  radiative_forcing,
28
28
  radiative_heating,
29
+ unterstrasser_wake_vortex,
29
30
  wake_vortex,
30
31
  wind_shear,
31
32
  )
@@ -838,9 +839,9 @@ class Cocip(Model):
838
839
  T_critical_sac = self._sac_flight["T_critical_sac"]
839
840
 
840
841
  # Flight performance parameters
841
- fuel_dist = (
842
- self._sac_flight.get_data_or_attr("fuel_flow") / self._sac_flight["true_airspeed"]
843
- )
842
+ fuel_flow = self._sac_flight.get_data_or_attr("fuel_flow")
843
+ true_airspeed = self._sac_flight["true_airspeed"]
844
+ fuel_dist = fuel_flow / true_airspeed
844
845
 
845
846
  nvpm_ei_n = self._sac_flight.get_data_or_attr("nvpm_ei_n")
846
847
  ei_h2o = self._sac_flight.fuel.ei_h2o
@@ -890,11 +891,27 @@ class Cocip(Model):
890
891
  air_temperature, air_pressure, air_pressure_1
891
892
  )
892
893
  iwc_1 = contrail_properties.iwc_post_wake_vortex(iwc, iwc_ad)
894
+
895
+ if self.params["unterstrasser_ice_survival_fraction"]:
896
+ wingspan = self._sac_flight.get_data_or_attr("wingspan")
897
+ rhi_0 = thermo.rhi(specific_humidity, air_temperature, air_pressure)
898
+ f_surv = unterstrasser_wake_vortex.ice_particle_number_survival_fraction(
899
+ air_temperature,
900
+ rhi_0,
901
+ ei_h2o,
902
+ wingspan,
903
+ true_airspeed,
904
+ fuel_flow,
905
+ nvpm_ei_n,
906
+ 0.5 * depth, # Taking the mid-point of the contrail plume
907
+ )
908
+ else:
909
+ f_surv = contrail_properties.ice_particle_survival_fraction(iwc, iwc_1)
910
+
893
911
  n_ice_per_m_1 = contrail_properties.ice_particle_number(
894
912
  nvpm_ei_n=nvpm_ei_n,
895
913
  fuel_dist=fuel_dist,
896
- iwc=iwc,
897
- iwc_1=iwc_1,
914
+ f_surv=f_surv,
898
915
  air_temperature=air_temperature,
899
916
  T_crit_sac=T_critical_sac,
900
917
  min_ice_particle_number_nvpm_ei_n=self.params["min_ice_particle_number_nvpm_ei_n"],
@@ -138,12 +138,12 @@ class CocipParams(ModelParams):
138
138
 
139
139
  #: Add additional metric of ATR20 and global yearly mean RF to model output.
140
140
  #: These are not standard CoCiP outputs but based on the derivation used
141
- #: in the first supplement to :cite:`yin_predicting_2023`. ATR20 is defined
141
+ #: in the first supplement to :cite:`yinPredictingClimateImpact2023`. ATR20 is defined
142
142
  #: as the average temperature response over a 20 year horizon.
143
143
  compute_atr20: bool = False
144
144
 
145
145
  #: Constant factor used to convert global- and year-mean RF, [:math:`W m^{-2}`],
146
- #: to ATR20, [:math:`K`], given by :cite:`yin_predicting_2023`.
146
+ #: to ATR20, [:math:`K`], given by :cite:`yinPredictingClimateImpact2023`.
147
147
  global_rf_to_atr20_factor: float = 0.0151
148
148
 
149
149
  # ----------------
@@ -184,6 +184,15 @@ class CocipParams(ModelParams):
184
184
  #: :attr:`radiative_heating_effects` is enabled.
185
185
  max_depth: float = 1500.0
186
186
 
187
+ #: Experimental. Improved ice crystal number survival fraction in the wake vortex phase.
188
+ #: Implement :cite:`unterstrasserPropertiesYoungContrails2016`, who developed a
189
+ #: parametric model that estimates the survival fraction of the contrail ice crystal
190
+ #: number after the wake vortex phase based on the results from large eddy simulations.
191
+ #: This replicates Fig. 4 of :cite:`karcherFormationRadiativeForcing2018`.
192
+ #:
193
+ #: .. versionadded:: 0.50.1
194
+ unterstrasser_ice_survival_fraction: bool = False
195
+
187
196
  #: Experimental. Radiative heating effects on contrail cirrus properties.
188
197
  #: Terrestrial and solar radiances warm the contrail ice particles and cause
189
198
  #: convective turbulence. This effect is expected to enhance vertical mixing
@@ -204,8 +204,7 @@ def iwc_post_wake_vortex(
204
204
  def ice_particle_number(
205
205
  nvpm_ei_n: npt.NDArray[np.float64],
206
206
  fuel_dist: npt.NDArray[np.float64],
207
- iwc: npt.NDArray[np.float64],
208
- iwc_1: npt.NDArray[np.float64],
207
+ f_surv: npt.NDArray[np.float64],
209
208
  air_temperature: npt.NDArray[np.float64],
210
209
  T_crit_sac: npt.NDArray[np.float64],
211
210
  min_ice_particle_number_nvpm_ei_n: float,
@@ -223,11 +222,8 @@ def ice_particle_number(
223
222
  black carbon number emissions index, [:math:`kg^{-1}`]
224
223
  fuel_dist : npt.NDArray[np.float64]
225
224
  fuel consumption of the flight segment per distance traveled, [:math:`kg m^{-1}`]
226
- iwc : npt.NDArray[np.float64]
227
- initial ice water content at each flight waypoint before the wake vortex
228
- phase, [:math:`kg_{H_{2}O}/kg_{air}`]
229
- iwc_1 : npt.NDArray[np.float64]
230
- ice water content after the wake vortex phase, [:math:`kg_{H_{2}O}/kg_{air}`]
225
+ f_surv : npt.NDArray[np.float64]
226
+ Fraction of contrail ice particle number that survive the wake vortex phase.
231
227
  air_temperature : npt.NDArray[np.float64]
232
228
  ambient temperature for each waypoint, [:math:`K`]
233
229
  T_crit_sac : npt.NDArray[np.float64]
@@ -241,7 +237,6 @@ def ice_particle_number(
241
237
  npt.NDArray[np.float64]
242
238
  initial number of ice particles per distance after the wake vortex phase, [:math:`# m^{-1}`]
243
239
  """
244
- f_surv = ice_particle_survival_factor(iwc, iwc_1)
245
240
  f_activation = ice_particle_activation_rate(air_temperature, T_crit_sac)
246
241
  nvpm_ei_n_activated = nvpm_ei_n * f_activation
247
242
  return fuel_dist * np.maximum(nvpm_ei_n_activated, min_ice_particle_number_nvpm_ei_n) * f_surv
@@ -289,7 +284,7 @@ def ice_particle_activation_rate(
289
284
  return -0.661 * np.exp(d_temp) + 1.0
290
285
 
291
286
 
292
- def ice_particle_survival_factor(
287
+ def ice_particle_survival_fraction(
293
288
  iwc: npt.NDArray[np.float64], iwc_1: npt.NDArray[np.float64]
294
289
  ) -> npt.NDArray[np.float64]:
295
290
  """