pycontrails 0.52.0__cp310-cp310-macosx_11_0_arm64.whl → 0.52.2__cp310-cp310-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

pycontrails/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.52.0'
16
- __version_tuple__ = version_tuple = (0, 52, 0)
15
+ __version__ = version = '0.52.2'
16
+ __version_tuple__ = version_tuple = (0, 52, 2)
@@ -9,6 +9,7 @@ from typing import Any, Generic, NoReturn, overload
9
9
 
10
10
  import numpy as np
11
11
  import numpy.typing as npt
12
+ from overrides import overrides
12
13
 
13
14
  from pycontrails.core import flight, fuel
14
15
  from pycontrails.core.flight import Flight
@@ -39,6 +40,19 @@ class AircraftPerformanceParams(ModelParams):
39
40
  #: The default value of 3 is sufficient for most cases.
40
41
  n_iter: int = 3
41
42
 
43
+ #: Experimental. If True, fill waypoints below the lowest altitude met
44
+ #: level with ISA temperature when interpolating "air_temperature" or "t".
45
+ #: If the ``met`` data is not provided, the entire air temperature array
46
+ #: is approximated with the ISA temperature. Enabling this does NOT
47
+ #: remove any NaN values in the ``met`` data itself.
48
+ fill_low_altitude_with_isa_temperature: bool = False
49
+
50
+ #: Experimental. If True, fill waypoints below the lowest altitude met
51
+ #: level with zero wind when computing true airspeed. In other words,
52
+ #: approximate low-altitude true airspeed with the ground speed. Enabling
53
+ #: this does NOT remove any NaN values in the ``met`` data itself.
54
+ fill_low_altitude_with_zero_wind: bool = False
55
+
42
56
 
43
57
  class AircraftPerformance(Model):
44
58
  """
@@ -104,6 +118,23 @@ class AircraftPerformance(Model):
104
118
  Flight trajectory with aircraft performance data.
105
119
  """
106
120
 
121
+ @overrides
122
+ def set_source_met(self, *args: Any, **kwargs: Any) -> None:
123
+ fill_with_isa = self.params["fill_low_altitude_with_isa_temperature"]
124
+ if fill_with_isa and (self.met is None or "air_temperature" not in self.met):
125
+ if "air_temperature" in self.source:
126
+ _fill_low_altitude_with_isa_temperature(self.source, 0.0)
127
+ else:
128
+ self.source["air_temperature"] = self.source.T_isa()
129
+ fill_with_isa = False # we've just filled it
130
+
131
+ super().set_source_met(*args, **kwargs)
132
+ if not fill_with_isa:
133
+ return
134
+
135
+ met_level_0 = self.met.data["level"][-1].item() # type: ignore[union-attr]
136
+ _fill_low_altitude_with_isa_temperature(self.source, met_level_0)
137
+
107
138
  def simulate_fuel_and_performance(
108
139
  self,
109
140
  *,
@@ -426,27 +457,41 @@ class AircraftPerformance(Model):
426
457
  on :attr:`source`, this is returned directly. Otherwise, it is calculated
427
458
  using :meth:`Flight.segment_true_airspeed`.
428
459
  """
460
+ tas = self.source.get("true_airspeed")
461
+ fill_with_groundspeed = self.params["fill_low_altitude_with_zero_wind"]
462
+
463
+ if tas is not None:
464
+ if not fill_with_groundspeed:
465
+ return tas
466
+ cond = np.isnan(tas)
467
+ tas[cond] = self.source.segment_groundspeed()[cond]
468
+ return tas
469
+
470
+ met_incomplete = (
471
+ self.met is None or "eastward_wind" not in self.met or "northward_wind" not in self.met
472
+ )
473
+ if met_incomplete:
474
+ if fill_with_groundspeed:
475
+ tas = self.source.segment_groundspeed()
476
+ self.source["true_airspeed"] = tas
477
+ return tas
478
+ msg = (
479
+ "Cannot compute 'true_airspeed' without 'eastward_wind' and 'northward_wind' "
480
+ "met data. Either include met data in the model constructor, define "
481
+ "'true_airspeed' data on the flight, or set "
482
+ "'fill_low_altitude_with_zero_wind' to True."
483
+ )
484
+ raise ValueError(msg)
429
485
 
430
- try:
431
- return self.source["true_airspeed"]
432
- except KeyError:
433
- pass
434
-
435
- if not isinstance(self.source, Flight):
436
- raise TypeError("Model source must be a Flight to calculate true airspeed.")
437
-
438
- # Two step fallback: try to find u_wind and v_wind.
439
- try:
440
- u = interpolate_met(self.met, self.source, "eastward_wind", **self.interp_kwargs)
441
- v = interpolate_met(self.met, self.source, "northward_wind", **self.interp_kwargs)
486
+ u = interpolate_met(self.met, self.source, "eastward_wind", **self.interp_kwargs)
487
+ v = interpolate_met(self.met, self.source, "northward_wind", **self.interp_kwargs)
442
488
 
443
- except (ValueError, KeyError) as exc:
444
- raise ValueError(
445
- "Variable 'true_airspeed' not found. Include 'eastward_wind' and"
446
- " 'northward_wind' variables on 'met' in model constructor, or define"
447
- " 'true_airspeed' data on flight. This can be achieved by calling the"
448
- " 'Flight.segment_true_airspeed' method."
449
- ) from exc
489
+ if fill_with_groundspeed:
490
+ met_level_max = self.met.data["level"][-1].item() # type: ignore[union-attr]
491
+ cond = self.source.level > met_level_max
492
+ # We DON'T overwrite the original u and v arrays already attached to the source
493
+ u = np.where(cond, 0.0, u)
494
+ v = np.where(cond, 0.0, v)
450
495
 
451
496
  out = self.source.segment_true_airspeed(u, v)
452
497
  self.source["true_airspeed"] = out
@@ -543,3 +588,54 @@ class AircraftPerformanceGridData(Generic[ArrayOrFloat]):
543
588
 
544
589
  #: Engine efficiency, [:math:`0-1`]
545
590
  engine_efficiency: ArrayOrFloat
591
+
592
+
593
+ def _fill_low_altitude_with_isa_temperature(vector: GeoVectorDataset, met_level_max: float) -> None:
594
+ """Fill low-altitude NaN values in ``air_temperature`` with ISA values.
595
+
596
+ The ``air_temperature`` param is assumed to have been computed by
597
+ interpolating against a gridded air temperature field that did not
598
+ necessarily extend to the surface. This function fills points below the
599
+ lowest altitude in the gridded data with ISA temperature values.
600
+
601
+ This function operates in-place and modifies the ``air_temperature`` field.
602
+
603
+ Parameters
604
+ ----------
605
+ vector : GeoVectorDataset
606
+ GeoVectorDataset instance associated with the ``air_temperature`` data.
607
+ met_level_max : float
608
+ The maximum level in the met data, [:math:`hPa`].
609
+ """
610
+ air_temperature = vector["air_temperature"]
611
+ is_nan = np.isnan(air_temperature)
612
+ low_alt = vector.level > met_level_max
613
+ cond = is_nan & low_alt
614
+
615
+ t_isa = vector.T_isa()
616
+ air_temperature[cond] = t_isa[cond]
617
+
618
+
619
+ def _fill_low_altitude_tas_with_true_groundspeed(fl: Flight, met_level_max: float) -> None:
620
+ """Fill low-altitude NaN values in ``true_airspeed`` with ground speed.
621
+
622
+ The ``true_airspeed`` param is assumed to have been computed by
623
+ interpolating against a gridded wind field that did not necessarily
624
+ extend to the surface. This function fills points below the lowest
625
+ altitude in the gridded data with ground speed values.
626
+
627
+ This function operates in-place and modifies the ``true_airspeed`` field.
628
+
629
+ Parameters
630
+ ----------
631
+ fl : Flight
632
+ Flight instance associated with the ``true_airspeed`` data.
633
+ met_level_max : float
634
+ The maximum level in the met data, [:math:`hPa`].
635
+ """
636
+ tas = fl["true_airspeed"]
637
+ is_nan = np.isnan(tas)
638
+ low_alt = fl.level > met_level_max
639
+ cond = is_nan & low_alt
640
+
641
+ tas[cond] = fl.segment_groundspeed()[cond]
@@ -59,27 +59,27 @@ def slice_domain(
59
59
  >>> # Call with request as np.array
60
60
  >>> request = np.linspace(-20, 20, 100)
61
61
  >>> slice_domain(domain, request)
62
- slice(640, 801, None)
62
+ slice(np.int64(640), np.int64(801), None)
63
63
 
64
64
  >>> # Call with request as tuple
65
65
  >>> request = -20, 20
66
66
  >>> slice_domain(domain, request)
67
- slice(640, 801, None)
67
+ slice(np.int64(640), np.int64(801), None)
68
68
 
69
69
  >>> # Call with a buffer
70
70
  >>> request = -16, 13
71
71
  >>> buffer = 4, 7
72
72
  >>> slice_domain(domain, request, buffer)
73
- slice(640, 801, None)
73
+ slice(np.int64(640), np.int64(801), None)
74
74
 
75
75
  >>> # Call with request as a single number
76
76
  >>> request = -20
77
77
  >>> slice_domain(domain, request)
78
- slice(640, 641, None)
78
+ slice(np.int64(640), np.int64(641), None)
79
79
 
80
80
  >>> request = -19.9
81
81
  >>> slice_domain(domain, request)
82
- slice(640, 642, None)
82
+ slice(np.int64(640), np.int64(642), None)
83
83
 
84
84
  """
85
85
  # if the length of domain coordinates is <= 2, return the whole domain
pycontrails/core/fleet.py CHANGED
@@ -226,28 +226,29 @@ class Fleet(Flight):
226
226
  return len(self.fl_attrs)
227
227
 
228
228
  def to_flight_list(self, copy: bool = True) -> list[Flight]:
229
- """De-concatenate merged waypoints into a list of Flight instances.
229
+ """De-concatenate merged waypoints into a list of :class:`Flight` instances.
230
230
 
231
231
  Any global :attr:`attrs` are lost.
232
232
 
233
233
  Parameters
234
234
  ----------
235
235
  copy : bool, optional
236
- If True, make copy of each flight instance in `seq`.
236
+ If True, make copy of each :class:`Flight` instance.
237
237
 
238
238
  Returns
239
239
  -------
240
240
  list[Flight]
241
- List of Flights in the same order as was passed into the `Fleet` instance.
241
+ List of Flights in the same order as was passed into the ``Fleet`` instance.
242
242
  """
243
-
244
- # Avoid self.dataframe to purposely drop global attrs
245
- tmp = pd.DataFrame(self.data, copy=copy)
246
- grouped = tmp.groupby("flight_id", sort=False)
247
-
243
+ indices = self.dataframe.groupby("flight_id", sort=False).indices
248
244
  return [
249
- Flight(df, attrs=self.fl_attrs[flight_id], fuel=self.fuel, copy=copy)
250
- for flight_id, df in grouped
245
+ Flight(
246
+ data=VectorDataDict({k: v[idx] for k, v in self.data.items()}),
247
+ attrs=self.fl_attrs[flight_id],
248
+ copy=copy,
249
+ fuel=self.fuel,
250
+ )
251
+ for flight_id, idx in indices.items()
251
252
  ]
252
253
 
253
254
  ###################################
@@ -384,7 +384,7 @@ class Flight(GeoVectorDataset):
384
384
  ... time=pd.date_range('2021-01-01T12', '2021-01-01T14', periods=200),
385
385
  ... )
386
386
  >>> fl.max_distance_gap
387
- 7391.27...
387
+ np.float64(7391.27...)
388
388
  """
389
389
  if self.attrs["crs"] != "EPSG:4326":
390
390
  raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
@@ -415,7 +415,7 @@ class Flight(GeoVectorDataset):
415
415
  ... time=pd.date_range('2021-01-01T12', '2021-01-01T14', periods=200),
416
416
  ... )
417
417
  >>> fl.length
418
- 1436924.67...
418
+ np.float64(1436924.67...)
419
419
  """
420
420
  if self.attrs["crs"] != "EPSG:4326":
421
421
  raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
@@ -954,28 +954,13 @@ class Flight(GeoVectorDataset):
954
954
  # STEP 3: Set the time index, and sort it
955
955
  df = df.set_index("time", verify_integrity=True).sort_index()
956
956
 
957
- # STEP 4: Some adhoc code for dealing with antimeridian.
958
- # Idea: A flight likely crosses the antimeridian if
959
- # `min_pos > 90` and `max_neg < -90`
960
- # This is not foolproof: it assumes the full trajectory will not
961
- # span more than 180 longitude degrees. There could be flights that
962
- # violate this near the poles (but this would be very rare -- flights
963
- # would instead wrap the other way). For this flights spanning the
964
- # antimeridian, we translate them to a common "chart" away from the
965
- # antimeridian (see variable `shift`), then apply the interpolation,
966
- # then shift back to their original position.
967
- lon = df["longitude"].to_numpy()
968
- sign_ = np.sign(lon)
969
- min_pos = np.min(lon[sign_ == 1.0], initial=np.inf)
970
- max_neg = np.max(lon[sign_ == -1.0], initial=-np.inf)
971
-
972
- if (180.0 - min_pos) + (180.0 + max_neg) < 180.0 and min_pos < np.inf and max_neg > -np.inf:
973
- # In this case, we believe the flight crosses the antimeridian
974
- shift = min_pos
975
- # So we shift the longitude "chart"
957
+ # STEP 4: handle antimeridian crossings
958
+ # For flights spanning the antimeridian, we translate them to a
959
+ # common "chart" away from the antimeridian (see variable `shift`),
960
+ # then apply the interpolation, then shift back to their original position.
961
+ shift = self._antimeridian_shift()
962
+ if shift is not None:
976
963
  df["longitude"] = (df["longitude"] - shift) % 360.0
977
- else:
978
- shift = None
979
964
 
980
965
  # STEP 5: Resample flight to freq
981
966
  # Save altitudes to copy over - these just get rounded down in time.
@@ -1189,19 +1174,12 @@ class Flight(GeoVectorDataset):
1189
1174
  """
1190
1175
 
1191
1176
  # Check if flight crosses antimeridian line
1177
+ # If it does, shift longitude chart to remove jump
1192
1178
  lon_ = self["longitude"]
1193
1179
  lat_ = self["latitude"]
1194
- sign_ = np.sign(lon_)
1195
- min_pos = np.min(lon_[sign_ == 1.0], initial=np.inf)
1196
- max_neg = np.max(lon_[sign_ == -1.0], initial=-np.inf)
1197
-
1198
- if (180.0 - min_pos) + (180.0 + max_neg) < 180.0 and min_pos < np.inf and max_neg > -np.inf:
1199
- # In this case, we believe the flight crosses the antimeridian
1200
- shift = min_pos
1201
- # So we shift the longitude "chart"
1180
+ shift = self._antimeridian_shift()
1181
+ if shift is not None:
1202
1182
  lon_ = (lon_ - shift) % 360.0
1203
- else:
1204
- shift = None
1205
1183
 
1206
1184
  # Make a fake flight that flies at constant height so distance is just
1207
1185
  # distance traveled across groud
@@ -1262,6 +1240,55 @@ class Flight(GeoVectorDataset):
1262
1240
 
1263
1241
  return lat, lon, seg_idx
1264
1242
 
1243
+ def _antimeridian_shift(self) -> float | None:
1244
+ """Determine shift required for resampling trajectories that cross antimeridian.
1245
+
1246
+ Because flights sometimes span more than 180 degree longitude (for example,
1247
+ when flight-level winds favor travel in a specific direction, typically eastward),
1248
+ antimeridian crossings cannot reliably be detected by looking only at minimum
1249
+ and maximum longitudes.
1250
+
1251
+ Instead, this function checks each flight segment for an antimeridian crossing,
1252
+ and if it finds one returns the coordinate of a meridian that is not crossed by
1253
+ the flight.
1254
+
1255
+ Returns
1256
+ -------
1257
+ float | None
1258
+ Longitude shift for handling antimeridian crossings, or None if the
1259
+ flight does not cross the antimeridian.
1260
+ """
1261
+
1262
+ # logic for detecting crossings is consistent with _antimeridian_crossing,
1263
+ # but implementation is separate to keep performance costs as low as possible
1264
+ lon = self["longitude"]
1265
+ if np.any(np.isnan(lon)):
1266
+ warnings.warn("Anti-meridian crossings can't be reliably detected with nan longitudes")
1267
+
1268
+ s1 = (lon >= -180) & (lon <= -90)
1269
+ s2 = (lon <= 180) & (lon >= 90)
1270
+ jump12 = s1[:-1] & s2[1:] # westward
1271
+ jump21 = s2[:-1] & s1[1:] # eastward
1272
+ if not np.any(jump12 | jump21):
1273
+ return None
1274
+
1275
+ # separate flight into segments that are east and west of crossings
1276
+ net_westward = np.insert(np.cumsum(jump12.astype(int) - jump21.astype(int)), 0, 0)
1277
+ max_westward = net_westward.max()
1278
+ if max_westward - net_westward.min() > 1:
1279
+ msg = "Cannot handle consecutive antimeridian crossings in the same direction"
1280
+ raise ValueError(msg)
1281
+ east = (net_westward == 0) if max_westward == 1 else (net_westward == -1)
1282
+
1283
+ # shift must be between maximum longitude east of crossings
1284
+ # and minimum longitude west of crossings
1285
+ shift_min = np.nanmax(lon[east])
1286
+ shift_max = np.nanmin(lon[~east])
1287
+ if shift_min >= shift_max:
1288
+ msg = "Cannot handle flight that spans more than 360 degrees longitude"
1289
+ raise ValueError(msg)
1290
+ return (shift_min + shift_max) / 2
1291
+
1265
1292
  def _geodesic_interpolation(self, geodesic_threshold: float) -> pd.DataFrame | None:
1266
1293
  """Geodesic interpolate between large gaps between waypoints.
1267
1294
 
@@ -1506,25 +1533,25 @@ class Flight(GeoVectorDataset):
1506
1533
 
1507
1534
  >>> # Build flight
1508
1535
  >>> df = pd.DataFrame()
1509
- >>> df['time'] = pd.date_range('2022-03-01T00', '2022-03-01T03', periods=11)
1510
- >>> df['longitude'] = np.linspace(-20, 20, 11)
1511
- >>> df['latitude'] = np.linspace(-20, 20, 11)
1512
- >>> df['altitude'] = np.linspace(9500, 10000, 11)
1513
- >>> fl = Flight(df).resample_and_fill('10s')
1536
+ >>> df["time"] = pd.date_range("2022-03-01T00", "2022-03-01T03", periods=11)
1537
+ >>> df["longitude"] = np.linspace(-20, 20, 11)
1538
+ >>> df["latitude"] = np.linspace(-20, 20, 11)
1539
+ >>> df["altitude"] = np.linspace(9500, 10000, 11)
1540
+ >>> fl = Flight(df).resample_and_fill("10s")
1514
1541
 
1515
1542
  >>> # Intersect and attach
1516
- >>> fl["air_temperature"] = fl.intersect_met(met['air_temperature'])
1543
+ >>> fl["air_temperature"] = fl.intersect_met(met["air_temperature"])
1517
1544
  >>> fl["air_temperature"]
1518
- array([235.94657007, 235.95766965, 235.96873412, ..., 234.59917962,
1545
+ array([235.94657007, 235.55745645, 235.56709768, ..., 234.59917962,
1519
1546
  234.60387402, 234.60845312])
1520
1547
 
1521
1548
  >>> # Length (in meters) of waypoints whose temperature exceeds 236K
1522
1549
  >>> fl.length_met("air_temperature", threshold=236)
1523
- 4132178.159...
1550
+ np.float64(3589705.998...)
1524
1551
 
1525
1552
  >>> # Proportion (with respect to distance) of waypoints whose temperature exceeds 236K
1526
1553
  >>> fl.proportion_met("air_temperature", threshold=236)
1527
- 0.663552...
1554
+ np.float64(0.576...)
1528
1555
  """
1529
1556
  if key not in self.data:
1530
1557
  raise KeyError(f"Column {key} does not exist in data.")
@@ -1591,10 +1618,30 @@ class Flight(GeoVectorDataset):
1591
1618
  :class:`matplotlib.axes.Axes`
1592
1619
  Plot
1593
1620
  """
1594
- ax = self.dataframe.plot(x="longitude", y="latitude", legend=False, **kwargs)
1621
+ kwargs.setdefault("legend", False)
1622
+ ax = self.dataframe.plot(x="longitude", y="latitude", **kwargs)
1595
1623
  ax.set(xlabel="longitude", ylabel="latitude")
1596
1624
  return ax
1597
1625
 
1626
+ def plot_profile(self, **kwargs: Any) -> matplotlib.axes.Axes:
1627
+ """Plot flight trajectory time-altitude values.
1628
+
1629
+ Parameters
1630
+ ----------
1631
+ **kwargs : Any
1632
+ Additional plot properties to passed to `pd.DataFrame.plot`
1633
+
1634
+ Returns
1635
+ -------
1636
+ :class:`matplotlib.axes.Axes`
1637
+ Plot
1638
+ """
1639
+ kwargs.setdefault("legend", False)
1640
+ df = self.dataframe.assign(altitude_ft=self.altitude_ft)
1641
+ ax = df.plot(x="time", y="altitude_ft", **kwargs)
1642
+ ax.set(xlabel="time", ylabel="altitude_ft")
1643
+ return ax
1644
+
1598
1645
 
1599
1646
  def _return_linestring(data: dict[str, npt.NDArray[np.float64]]) -> list[list[float]]:
1600
1647
  """Return list of coordinates for geojson constructions.
@@ -1631,18 +1678,14 @@ def _antimeridian_index(longitude: pd.Series, crs: str = "EPSG:4326") -> list[in
1631
1678
 
1632
1679
  Returns
1633
1680
  -------
1634
- int
1635
- Index after jump or -1
1681
+ list[int]
1682
+ Indices after jump, or empty list of flight does not cross antimeridian.
1636
1683
 
1637
1684
  Raises
1638
1685
  ------
1639
1686
  ValueError
1640
1687
  CRS is not supported.
1641
- Flight crosses antimeridian several times.
1642
1688
  """
1643
- # FIXME: This logic here is somewhat outdated - the _interpolate_altitude
1644
- # method handles this somewhat more reliably
1645
- # This function should get updated to follow the logic there.
1646
1689
  # WGS84
1647
1690
  if crs in ["EPSG:4326"]:
1648
1691
  l1 = (-180.0, -90.0)
@@ -1878,7 +1921,7 @@ def _altitude_interpolation_climb_descend_middle(
1878
1921
  s = pd.Series(altitude)
1879
1922
 
1880
1923
  # Check to see if we have gaps greater than two hours
1881
- step_threshold = 120.0 * freq / np.timedelta64(1, "m")
1924
+ step_threshold = np.timedelta64(2, "h") / freq
1882
1925
  step_groups = na_group_size > step_threshold
1883
1926
  if np.any(step_groups):
1884
1927
  # If there are gaps greater than two hours, step through one by one
@@ -2214,16 +2257,14 @@ def segment_rocd(
2214
2257
  if air_temperature is None:
2215
2258
  return out
2216
2259
 
2217
- else:
2218
- altitude_m = units.ft_to_m(altitude_ft)
2219
- T_isa = units.m_to_T_isa(altitude_m)
2260
+ altitude_m = units.ft_to_m(altitude_ft)
2261
+ T_isa = units.m_to_T_isa(altitude_m)
2220
2262
 
2221
- T_correction = np.empty_like(altitude_ft)
2222
- T_correction[:-1] = (0.5 * (air_temperature[:-1] + air_temperature[1:])) / (
2223
- 0.5 * (T_isa[:-1] + T_isa[1:])
2224
- )
2225
- T_correction[-1] = np.nan
2226
- return T_correction * out
2263
+ T_correction = np.empty_like(altitude_ft)
2264
+ T_correction[:-1] = (air_temperature[:-1] + air_temperature[1:]) / (T_isa[:-1] + T_isa[1:])
2265
+ T_correction[-1] = np.nan
2266
+
2267
+ return T_correction * out
2227
2268
 
2228
2269
 
2229
2270
  def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.DatetimeIndex]:
@@ -621,11 +621,11 @@ class EmissionsProfileInterpolator:
621
621
  >>> epi = EmissionsProfileInterpolator(xp, fp)
622
622
  >>> # Interpolate a single value
623
623
  >>> epi.interp(5)
624
- 0.150000...
624
+ np.float64(0.150000...)
625
625
 
626
626
  >>> # Interpolate a single value on a logarithmic scale
627
627
  >>> epi.log_interp(5)
628
- 1.105171...
628
+ np.float64(1.105171...)
629
629
 
630
630
  >>> # Demonstrate speed up compared with xarray.DataArray interpolation
631
631
  >>> import time, xarray as xr
pycontrails/core/met.py CHANGED
@@ -1375,9 +1375,10 @@ class MetDataArray(MetBase):
1375
1375
 
1376
1376
  # try to create DataArray out of input data and **kwargs
1377
1377
  if not isinstance(data, xr.DataArray):
1378
- DeprecationWarning(
1378
+ warnings.warn(
1379
1379
  "Input 'data' must be an xarray DataArray. "
1380
- "Passing arbitrary kwargs will be removed in future versions."
1380
+ "Passing arbitrary kwargs will be removed in future versions.",
1381
+ DeprecationWarning,
1381
1382
  )
1382
1383
  data = xr.DataArray(data, **kwargs)
1383
1384
 
@@ -2609,13 +2610,14 @@ def _load(hash: str, cachestore: CacheStore, chunks: dict[str, int]) -> xr.Datas
2609
2610
 
2610
2611
 
2611
2612
  def _add_vertical_coords(data: XArrayType) -> XArrayType:
2612
- """Add "air_pressure" and "altitude" coordinates to data."""
2613
+ """Add "air_pressure" and "altitude" coordinates to data.
2613
2614
 
2614
- data["level"].attrs.update(units="hPa", long_name="Pressure", positive="down")
2615
+ .. versionchanged:: 0.52.1
2616
+ Ensure that the ``dtype`` of the additional vertical coordinates agree
2617
+ with the ``dtype`` of the underlying gridded data.
2618
+ """
2615
2619
 
2616
- coords = data.coords
2617
- if "air_pressure" in coords and "altitude" in coords:
2618
- return data
2620
+ data["level"].attrs.update(units="hPa", long_name="Pressure", positive="down")
2619
2621
 
2620
2622
  # XXX: use the dtype of the data to determine the precision of these coordinates
2621
2623
  # There are two competing conventions here:
@@ -2625,25 +2627,32 @@ def _add_vertical_coords(data: XArrayType) -> XArrayType:
2625
2627
  # It is more important for air_pressure and altitude to be grid-aligned than to be
2626
2628
  # coordinate-aligned, so we use the dtype of the data to determine the precision of
2627
2629
  # these coordinates
2628
- if isinstance(data, xr.Dataset):
2629
- dtype = np.result_type(*data.data_vars.values(), np.float32)
2630
- else:
2631
- dtype = data.dtype
2632
- level = data["level"].values.astype(dtype, copy=False)
2630
+ dtype = (
2631
+ np.result_type(*data.data_vars.values(), np.float32)
2632
+ if isinstance(data, xr.Dataset)
2633
+ else data.dtype
2634
+ )
2633
2635
 
2634
- if "air_pressure" not in coords:
2636
+ level = data["level"].values
2637
+
2638
+ if "air_pressure" not in data.coords:
2635
2639
  data = data.assign_coords(air_pressure=("level", level * 100.0))
2636
2640
  data.coords["air_pressure"].attrs.update(
2637
2641
  standard_name=AirPressure.standard_name,
2638
2642
  long_name=AirPressure.long_name,
2639
2643
  units=AirPressure.units,
2640
2644
  )
2641
- if "altitude" not in coords:
2645
+ if data.coords["air_pressure"].dtype != dtype:
2646
+ data.coords["air_pressure"] = data.coords["air_pressure"].astype(dtype, copy=False)
2647
+
2648
+ if "altitude" not in data.coords:
2642
2649
  data = data.assign_coords(altitude=("level", units.pl_to_m(level)))
2643
2650
  data.coords["altitude"].attrs.update(
2644
2651
  standard_name=Altitude.standard_name,
2645
2652
  long_name=Altitude.long_name,
2646
2653
  units=Altitude.units,
2647
2654
  )
2655
+ if data.coords["altitude"].dtype != dtype:
2656
+ data.coords["altitude"] = data.coords["altitude"].astype(dtype, copy=False)
2648
2657
 
2649
2658
  return data
@@ -362,6 +362,8 @@ class Model(ABC):
362
362
  def interp_kwargs(self) -> dict[str, Any]:
363
363
  """Shortcut to create interpolation arguments from :attr:`params`.
364
364
 
365
+ The output of this is useful for passing to :func:`interpolate_met`.
366
+
365
367
  Returns
366
368
  -------
367
369
  dict[str, Any]
@@ -376,13 +378,14 @@ class Model(ABC):
376
378
 
377
379
  as determined by :attr:`params`.
378
380
  """
381
+ params = self.params
379
382
  return {
380
- "method": self.params["interpolation_method"],
381
- "bounds_error": self.params["interpolation_bounds_error"],
382
- "fill_value": self.params["interpolation_fill_value"],
383
- "localize": self.params["interpolation_localize"],
384
- "use_indices": self.params["interpolation_use_indices"],
385
- "q_method": self.params["interpolation_q_method"],
383
+ "method": params["interpolation_method"],
384
+ "bounds_error": params["interpolation_bounds_error"],
385
+ "fill_value": params["interpolation_fill_value"],
386
+ "localize": params["interpolation_localize"],
387
+ "use_indices": params["interpolation_use_indices"],
388
+ "q_method": params["interpolation_q_method"],
386
389
  }
387
390
 
388
391
  def require_met(self) -> MetDataset:
@@ -585,16 +588,7 @@ class Model(ABC):
585
588
  KeyError
586
589
  Variable not found in :attr:`source` or :attr:`met`.
587
590
  """
588
- variables: Sequence[MetVariable | tuple[MetVariable, ...]]
589
- if variable is None:
590
- if optional:
591
- variables = (*self.met_variables, *self.optional_met_variables)
592
- else:
593
- variables = self.met_variables
594
- elif isinstance(variable, MetVariable):
595
- variables = (variable,)
596
- else:
597
- variables = variable
591
+ variables = self._determine_relevant_variables(optional, variable)
598
592
 
599
593
  q_method = self.params["interpolation_q_method"]
600
594
 
@@ -640,6 +634,20 @@ class Model(ABC):
640
634
  met_key, da, self.source, self.params, q_method
641
635
  )
642
636
 
637
+ def _determine_relevant_variables(
638
+ self,
639
+ optional: bool,
640
+ variable: MetVariable | Sequence[MetVariable] | None,
641
+ ) -> Sequence[MetVariable | tuple[MetVariable, ...]]:
642
+ """Determine the relevant variables used in :meth:`set_source_met`."""
643
+ if variable is None:
644
+ if optional:
645
+ return (*self.met_variables, *self.optional_met_variables)
646
+ return self.met_variables
647
+ if isinstance(variable, MetVariable):
648
+ return (variable,)
649
+ return variable
650
+
643
651
  # Following python implementation
644
652
  # https://github.com/python/cpython/blob/618b7a8260bb40290d6551f24885931077309590/Lib/collections/__init__.py#L231
645
653
  __marker = object()
@@ -814,6 +822,7 @@ def interpolate_met(
814
822
  vector: GeoVectorDataset,
815
823
  met_key: str,
816
824
  vector_key: str | None = None,
825
+ *,
817
826
  q_method: str | None = None,
818
827
  **interp_kwargs: Any,
819
828
  ) -> npt.NDArray[np.float64]:
@@ -536,7 +536,7 @@ def multipolygon_to_geojson(
536
536
  else:
537
537
  shape = len(ring.coords), 3
538
538
  coords = np.empty(shape)
539
- coords[:, :2] = ring.coords
539
+ coords[:, :2] = np.asarray(ring.coords)
540
540
  coords[:, 2] = altitude
541
541
 
542
542
  poly_coords.append(coords.tolist())
@@ -657,7 +657,7 @@ class VectorDataset:
657
657
  8 15 18
658
658
 
659
659
  """
660
- vectors = [v for v in vectors if v] # remove empty vectors
660
+ vectors = [v for v in vectors if v is not None] # remove None values
661
661
 
662
662
  if not vectors:
663
663
  return cls()
@@ -707,36 +707,33 @@ class VectorDataset:
707
707
  bool
708
708
  True if both instances have identical :attr:`data` and :attr:`attrs`.
709
709
  """
710
- if isinstance(other, VectorDataset):
711
- # assert attrs equal
712
- for key in self.attrs:
713
- if isinstance(self.attrs[key], np.ndarray):
714
- # equal_nan not supported for non-numeric data
715
- equal_nan = not np.issubdtype(self.attrs[key].dtype, "O")
716
- try:
717
- eq = np.array_equal(self.attrs[key], other.attrs[key], equal_nan=equal_nan)
718
- except KeyError:
719
- return False
720
- else:
721
- eq = self.attrs[key] == other.attrs[key]
722
-
723
- if not eq:
724
- return False
710
+ if not isinstance(other, VectorDataset):
711
+ return False
725
712
 
726
- # assert data equal
727
- for key in self:
728
- # equal_nan not supported for non-numeric data (e.g. strings)
729
- equal_nan = not np.issubdtype(self[key].dtype, "O")
730
- try:
731
- eq = np.array_equal(self[key], other[key], equal_nan=equal_nan)
732
- except KeyError:
733
- return False
713
+ # Check attrs
714
+ if self.attrs.keys() != other.attrs.keys():
715
+ return False
734
716
 
735
- if not eq:
717
+ for key, val in self.attrs.items():
718
+ if isinstance(val, np.ndarray):
719
+ # equal_nan not supported for non-numeric data
720
+ equal_nan = not np.issubdtype(val.dtype, "O")
721
+ if not np.array_equal(val, other.attrs[key], equal_nan=equal_nan):
736
722
  return False
723
+ elif val != other.attrs[key]:
724
+ return False
725
+
726
+ # Check data
727
+ if self.data.keys() != other.data.keys():
728
+ return False
737
729
 
738
- return True
739
- return False
730
+ for key, val in self.data.items():
731
+ # equal_nan not supported for non-numeric data (e.g. strings)
732
+ equal_nan = not np.issubdtype(val.dtype, "O")
733
+ if not np.array_equal(val, other[key], equal_nan=equal_nan):
734
+ return False
735
+
736
+ return True
740
737
 
741
738
  @property
742
739
  def size(self) -> int:
@@ -986,7 +983,7 @@ class VectorDataset:
986
983
  numeric_attrs = (
987
984
  attr
988
985
  for attr, val in self.attrs.items()
989
- if (isinstance(val, (int, float)) and attr not in ignore_keys)
986
+ if (isinstance(val, (int, float, np.number)) and attr not in ignore_keys)
990
987
  )
991
988
  self.broadcast_attrs(numeric_attrs, overwrite)
992
989
 
@@ -2057,7 +2054,7 @@ def vector_to_lon_lat_grid(
2057
2054
  [2.97, 0.12, 1.33, ..., 3.54, 0.74, 2.59]])
2058
2055
 
2059
2056
  >>> da.sum().item() == vector["foo"].sum()
2060
- True
2057
+ np.True_
2061
2058
 
2062
2059
  """
2063
2060
  df = vector.select(("longitude", "latitude", *agg), copy=False).dataframe
@@ -35,9 +35,6 @@ DEFAULT_CHUNKS: dict[str, int] = {"time": 1}
35
35
  #: Whether to open multi-file datasets in parallel
36
36
  OPEN_IN_PARALLEL: bool = False
37
37
 
38
- #: Whether to use file locking when opening multi-file datasets
39
- OPEN_WITH_LOCK: bool = False
40
-
41
38
 
42
39
  def parse_timesteps(time: TimeInput | None, freq: str | None = "1h") -> list[datetime]:
43
40
  """Parse time input into set of time steps.
@@ -741,5 +738,4 @@ class MetDataSource(abc.ABC):
741
738
  xr_kwargs.setdefault("engine", NETCDF_ENGINE)
742
739
  xr_kwargs.setdefault("chunks", DEFAULT_CHUNKS)
743
740
  xr_kwargs.setdefault("parallel", OPEN_IN_PARALLEL)
744
- xr_kwargs.setdefault("lock", OPEN_WITH_LOCK)
745
741
  return xr.open_mfdataset(disk_paths, **xr_kwargs)
@@ -16,7 +16,7 @@ from __future__ import annotations
16
16
 
17
17
  import datetime
18
18
  import enum
19
- import io
19
+ import tempfile
20
20
  from collections.abc import Iterable
21
21
 
22
22
  import numpy as np
@@ -535,7 +535,7 @@ class GOES:
535
535
  da_dict = {}
536
536
  for rpath, init_bytes in data.items():
537
537
  channel = _extract_channel_from_rpath(rpath)
538
- ds = xr.open_dataset(io.BytesIO(init_bytes), engine="h5netcdf")
538
+ ds = _load_via_tempfile(init_bytes)
539
539
 
540
540
  da = ds["CMI"]
541
541
  da = da.expand_dims(band_id=ds["band_id"].values)
@@ -551,7 +551,7 @@ class GOES:
551
551
  da = xr.concat(da_dict.values(), dim="band_id")
552
552
 
553
553
  else:
554
- ds = xr.open_dataset(io.BytesIO(data), engine="h5netcdf")
554
+ ds = _load_via_tempfile(data)
555
555
  da = ds["CMI"]
556
556
  da = da.expand_dims(band_id=ds["band_id"].values)
557
557
 
@@ -564,6 +564,13 @@ class GOES:
564
564
  return da
565
565
 
566
566
 
567
+ def _load_via_tempfile(data: bytes) -> xr.Dataset:
568
+ """Load xarray dataset via temporary file."""
569
+ with tempfile.NamedTemporaryFile(buffering=0) as tmp:
570
+ tmp.write(data)
571
+ return xr.load_dataset(tmp.name)
572
+
573
+
567
574
  def _concat_c02(ds1: XArrayType, ds2: XArrayType) -> XArrayType:
568
575
  """Concatenate two datasets with C01 and C02 data."""
569
576
  # Average the C02 data to the C01 resolution
@@ -1166,49 +1166,43 @@ class Cocip(Model):
1166
1166
  # ---
1167
1167
  # Create contrail dataframe (self.contrail)
1168
1168
  # ---
1169
- dfs = [contrail.dataframe for contrail in self.contrail_list]
1170
- dfs = [df.assign(timestep=t_idx) for t_idx, df in enumerate(dfs)]
1171
- self.contrail = pd.concat(dfs)
1169
+ self.contrail = GeoVectorDataset.sum(self.contrail_list).dataframe
1170
+ self.contrail["timestep"] = np.concatenate(
1171
+ [np.full(c.size, i) for i, c in enumerate(self.contrail_list)]
1172
+ )
1172
1173
 
1173
1174
  # add age in hours to the contrail waypoint outputs
1174
1175
  age_hours = np.empty_like(self.contrail["ef"])
1175
1176
  np.divide(self.contrail["age"], np.timedelta64(1, "h"), out=age_hours)
1176
1177
  self.contrail["age_hours"] = age_hours
1177
1178
 
1178
- if self.params["verbose_outputs"]:
1179
+ verbose_outputs = self.params["verbose_outputs"]
1180
+ if verbose_outputs:
1179
1181
  # Compute dt_integration -- logic is somewhat complicated, but
1180
1182
  # we're simply addressing that the first dt_integration
1181
1183
  # is different from the rest
1182
1184
 
1183
- # We call reset_index twice. The first call introduces an `index`
1184
- # column, and the second introduces a `level_0` column. This `level_0`
1185
- # is a RangeIndex, which we use in the `groupby` to identify the
1185
+ # We call reset_index to introduces an `index` RangeIndex column,
1186
+ # Which we use in the `groupby` to identify the
1186
1187
  # index of the first evolution step at each waypoint.
1187
- # The `level_0` is used to insert back into the `seq_index` dataframe,
1188
- # then it is dropped in replace of the original `index`.
1189
- seq_index = self.contrail.reset_index().reset_index()
1190
- cols = ["formation_time", "time", "level_0"]
1191
- first_form_time = seq_index.groupby("waypoint")[cols].first()
1188
+ tmp = self.contrail.reset_index()
1189
+ cols = ["formation_time", "time", "index"]
1190
+ first_form_time = tmp.groupby("waypoint")[cols].first()
1192
1191
  first_dt = first_form_time["time"] - first_form_time["formation_time"]
1193
- first_dt.index = first_form_time["level_0"]
1194
-
1195
- seq_index = seq_index.set_index("level_0")
1196
- seq_index["dt_integration"] = first_dt
1197
- seq_index.fillna({"dt_integration": self.params["dt_integration"]}, inplace=True)
1192
+ first_dt = first_dt.set_axis(first_form_time["index"])
1198
1193
 
1199
- self.contrail = seq_index.set_index("index")
1194
+ self.contrail = tmp.set_index("index")
1195
+ self.contrail["dt_integration"] = first_dt
1196
+ self.contrail.fillna({"dt_integration": self.params["dt_integration"]}, inplace=True)
1200
1197
 
1201
1198
  # ---
1202
1199
  # Create contrail xr.Dataset (self.contrail_dataset)
1203
1200
  # ---
1204
1201
  if isinstance(self.source, Fleet):
1205
- self.contrail_dataset = xr.Dataset.from_dataframe(
1206
- self.contrail.set_index(["flight_id", "timestep", "waypoint"])
1207
- )
1202
+ keys = ["flight_id", "timestep", "waypoint"]
1208
1203
  else:
1209
- self.contrail_dataset = xr.Dataset.from_dataframe(
1210
- self.contrail.set_index(["timestep", "waypoint"])
1211
- )
1204
+ keys = ["timestep", "waypoint"]
1205
+ self.contrail_dataset = xr.Dataset.from_dataframe(self.contrail.set_index(keys))
1212
1206
 
1213
1207
  # ---
1214
1208
  # Create output Flight / Fleet (self.source)
@@ -1229,7 +1223,7 @@ class Cocip(Model):
1229
1223
  ]
1230
1224
 
1231
1225
  # add additional columns
1232
- if self.params["verbose_outputs"]:
1226
+ if verbose_outputs:
1233
1227
  sac_cols += ["dT_dz", "ds_dz", "dz_max"]
1234
1228
 
1235
1229
  downwash_cols = ["rho_air_1", "iwc_1", "n_ice_per_m_1"]
@@ -1253,7 +1247,7 @@ class Cocip(Model):
1253
1247
 
1254
1248
  rad_keys = ["sdr", "rsr", "olr", "rf_sw", "rf_lw", "rf_net"]
1255
1249
  for key in rad_keys:
1256
- if self.params["verbose_outputs"]:
1250
+ if verbose_outputs:
1257
1251
  agg_dict[key] = ["mean", "min", "max"]
1258
1252
  else:
1259
1253
  agg_dict[key] = ["mean"]
@@ -92,12 +92,12 @@ class CocipUncertaintyParams(CocipParams):
92
92
  >>> distr = scipy.stats.uniform(loc=0.4, scale=0.2)
93
93
  >>> params = CocipUncertaintyParams(seed=123, initial_wake_vortex_depth_uncertainty=distr)
94
94
  >>> params.initial_wake_vortex_depth
95
- 0.41076420
95
+ np.float64(0.41076420...)
96
96
 
97
97
  >>> # Once seeded, calling the class again gives a new value
98
98
  >>> params = CocipUncertaintyParams(initial_wake_vortex_depth=distr)
99
99
  >>> params.initial_wake_vortex_depth
100
- 0.43526372
100
+ np.float64(0.43526372...)
101
101
 
102
102
  >>> # To retain the default value, set the uncertainty to None
103
103
  >>> params = CocipUncertaintyParams(rf_lw_enhancement_factor_uncertainty=None)
@@ -212,7 +212,7 @@ class CocipUncertaintyParams(CocipParams):
212
212
 
213
213
  return out
214
214
 
215
- def rvs(self, size: None | int = None) -> dict[str, float | npt.NDArray[np.float64]]:
215
+ def rvs(self, size: None | int = None) -> dict[str, np.float64 | npt.NDArray[np.float64]]:
216
216
  """Call each distribution's `rvs` method to generate random parameters.
217
217
 
218
218
  Seed calls to `rvs` with class variable `rng`.
@@ -247,12 +247,12 @@ class CocipUncertaintyParams(CocipParams):
247
247
  [7.9063961e-04, 3.0336906e-03, 7.7571563e-04, 2.0577813e-02,
248
248
  9.4205803e-01, 4.3379897e-03, 3.6786550e-03, 2.4747452e-02]],
249
249
  dtype=float32),
250
- 'initial_wake_vortex_depth': 0.39805019708566847,
251
- 'nvpm_ei_n_enhancement_factor': 0.9371878437312526,
252
- 'rf_lw_enhancement_factor': 1.1017491252832377,
253
- 'rf_sw_enhancement_factor': 0.99721639115012,
254
- 'sedimentation_impact_factor': 0.5071779847244678,
255
- 'wind_shear_enhancement_exponent': 0.34100931239701004}
250
+ 'initial_wake_vortex_depth': np.float64(0.39805019708566847),
251
+ 'nvpm_ei_n_enhancement_factor': np.float64(0.9371878437312526),
252
+ 'rf_lw_enhancement_factor': np.float64(1.1017491252832377),
253
+ 'rf_sw_enhancement_factor': np.float64(0.99721639115012),
254
+ 'sedimentation_impact_factor': np.float64(0.5071779847244678),
255
+ 'wind_shear_enhancement_exponent': np.float64(0.34100931239701004)}
256
256
  """
257
257
  return {
258
258
  param: distr.rvs(size=size, random_state=self.rng)
@@ -32,7 +32,6 @@ import xarray as xr
32
32
 
33
33
  from pycontrails.core.met import MetDataArray, MetDataset
34
34
  from pycontrails.core.vector import GeoVectorDataset, vector_to_lon_lat_grid
35
- from pycontrails.datalib.goes import GOES, extract_goes_visualization
36
35
  from pycontrails.models.cocip.contrail_properties import contrail_edges, plume_mass_per_distance
37
36
  from pycontrails.models.cocip.radiative_forcing import albedo
38
37
  from pycontrails.models.humidity_scaling import HumidityScaling
@@ -217,7 +216,7 @@ def contrail_flight_summary_statistics(flight_waypoints: GeoVectorDataset) -> pd
217
216
  )
218
217
 
219
218
  flight_waypoints["persistent_contrail_length"] = np.where(
220
- np.isnan(flight_waypoints["ef"]), 0.0, flight_waypoints["segment_length"]
219
+ np.nan_to_num(flight_waypoints["ef"]) == 0.0, 0.0, flight_waypoints["segment_length"]
221
220
  )
222
221
 
223
222
  # Calculate contrail statistics for each flight
@@ -1192,6 +1191,7 @@ def meteorological_time_slice_statistics(
1192
1191
  # ISSR: Volume of airspace with RHi > 100% between FL300 and FL450
1193
1192
  met = humidity_scaling.eval(met)
1194
1193
  rhi = met["rhi"].data.sel(level=slice(150, 300))
1194
+ rhi = rhi.interp(time=time)
1195
1195
  is_issr = rhi > 1
1196
1196
 
1197
1197
  # Cirrus in a longitude-latitude grid
@@ -1246,9 +1246,15 @@ def radiation_time_slice_statistics(
1246
1246
  surface_area = geo.grid_surface_area(rad["longitude"].values, rad["latitude"].values)
1247
1247
  weights = surface_area.values / np.nansum(surface_area)
1248
1248
  stats = {
1249
- "mean_sdr_domain": np.nansum(rad["sdr"].data.sel(level=-1, time=time).values * weights),
1250
- "mean_rsr_domain": np.nansum(rad["rsr"].data.sel(level=-1, time=time).values * weights),
1251
- "mean_olr_domain": np.nansum(rad["olr"].data.sel(level=-1, time=time).values * weights),
1249
+ "mean_sdr_domain": np.nansum(
1250
+ np.squeeze(rad["sdr"].data.interp(time=time).values) * weights
1251
+ ),
1252
+ "mean_rsr_domain": np.nansum(
1253
+ np.squeeze(rad["rsr"].data.interp(time=time).values) * weights
1254
+ ),
1255
+ "mean_olr_domain": np.nansum(
1256
+ np.squeeze(rad["olr"].data.interp(time=time).values) * weights
1257
+ ),
1252
1258
  }
1253
1259
  return pd.Series(stats)
1254
1260
 
@@ -1599,7 +1605,7 @@ def contrails_to_hi_res_grid(
1599
1605
  module_not_found_error=exc,
1600
1606
  )
1601
1607
 
1602
- for i in tqdm(heads_t.index[:2000]):
1608
+ for i in tqdm(heads_t.index):
1603
1609
  contrail_segment = GeoVectorDataset(
1604
1610
  pd.concat([heads_t[cols_req].loc[i], tails_t[cols_req].loc[i]], axis=1).T, copy=True
1605
1611
  )
@@ -2125,6 +2131,8 @@ def compare_cocip_with_goes(
2125
2131
  File path of saved CoCiP-GOES image if ``path_write_img`` is provided.
2126
2132
  """
2127
2133
 
2134
+ from pycontrails.datalib.goes import GOES, extract_goes_visualization
2135
+
2128
2136
  try:
2129
2137
  import cartopy.crs as ccrs
2130
2138
  from cartopy.mpl.ticker import LatitudeFormatter, LongitudeFormatter
@@ -561,7 +561,7 @@ UID No,Manufacturer,Engine Identification,Combustor Description,Eng Type,B/P Rat
561
561
  1TL002,Textron Lycoming,ALF 502R-3,,TF,5.7,11.4,29.8,0.3476,0.288,0.1027,0.0432,11.2,9.94,6.15,3.3,0.433,0.5,8.43,44.67,0.056,0.053,0.287,6.51,12.63,12,5.47,2.133,13,101.3,102.4,288,293,0.0088,0.0108
562
562
  1TL003,Textron Lycoming,ALF 502R-5,,TF,5.6,12,31,0.3581,0.2955,0.1034,0.0408,13.35,10.56,6.6,3.78,0.3,0.25,7.1,40.93,0.06,0.053,0.217,5.39,13.5,12.7,5.7,2.3,15.4,101.3,102.4,288,293,0.0088,0.0108
563
563
  1TL004,Textron Lycoming,"LF507-1F, -1H",,TF,5.1,13,31,0.3578,0.2961,0.1083,0.0453,14.52,12.02,6.39,3.28,0.2,0.3,4.43,37.83,0.01,0.01,0.12,4.72,10.3,10.2,6.9,6.8,10.6,101.3,102.4,276,280,0.0023,0.0038
564
- 1ZM001,IVCHENKO PROGRESS ZMBK,D-36,,TF,5,19.9,63.765,0.634,0.533,0.211,,26,22,9,5.5,0.5,0.4,2.7,20.7,0,0,0,5.4,14.8,,,,14.8,99.9,101.5,268,295,0.0017,0.0083
564
+ 1ZM001,IVCHENKO PROGRESS ZMBK,D-36,,TF,5,19.9,63.765,0.634,0.533,0.211,0.092,26,22,9,5.5,0.5,0.4,2.7,20.7,0,0,0,5.4,14.8,,,,14.8,99.9,101.5,268,295,0.0017,0.0083
565
565
  13ZM002,IVCHENKO PROGRESS ZMBK,D-36 ser. 4A,,TF,5,19.9,63.77,0.634,0.533,0.211,0.092,26,22,9,5.5,0.5,0.4,2.7,20.7,0,0,0,5.4,14.8,,,,14.8,99.9,101.5,268,295,0.0017,0.0083
566
566
  13ZM003,IVCHENKO PROGRESS ZMBK,D-436-148 F1,,TF,4.9,19.8,64.43,0.548,0.468,0.218,0.093,18.93,16,7.26,3.64,0.54,0.54,2.99,23.46,0.1,0.04,0.07,2.26,6.7,,,,6.7,99.02,102.66,266,301,0.00248,0.01624
567
567
  13ZM004,IVCHENKO PROGRESS ZMBK,D-436-148 F2,,TF,4.9,20.73,68.72,0.581,0.493,0.225,0.099,19.76,16.64,7.31,3.78,0.48,0.4,2.71,19.56,0.09,0.05,0.08,1.39,6.7,,,,6.9,99.02,102.66,266,301,0.00248,0.01624
@@ -886,7 +886,7 @@ def spatial_bounding_box(
886
886
  >>> lon = rng.uniform(-180, 180, size=30)
887
887
  >>> lat = rng.uniform(-90, 90, size=30)
888
888
  >>> spatial_bounding_box(lon, lat)
889
- (-168.0, -77.0, 155.0, 82.0)
889
+ (np.float64(-168.0), np.float64(-77.0), np.float64(155.0), np.float64(82.0))
890
890
  """
891
891
  lon_min = max(np.floor(np.min(longitude) - buffer), -180.0)
892
892
  lon_max = min(np.ceil(np.max(longitude) + buffer), 179.99)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pycontrails
3
- Version: 0.52.0
3
+ Version: 0.52.2
4
4
  Summary: Python library for modeling aviation climate impacts
5
5
  Author-email: Breakthrough Energy <py@contrails.org>
6
6
  License: Apache-2.0
@@ -28,7 +28,7 @@ Description-Content-Type: text/markdown
28
28
  License-File: LICENSE
29
29
  License-File: NOTICE
30
30
  Requires-Dist: dask >=2022.3
31
- Requires-Dist: numpy <2.0.0,>=1.22
31
+ Requires-Dist: numpy >=1.22
32
32
  Requires-Dist: overrides >=6.1
33
33
  Requires-Dist: pandas >=2.2
34
34
  Requires-Dist: scipy >=1.10
@@ -36,7 +36,7 @@ Requires-Dist: xarray >=2022.3
36
36
  Provides-Extra: complete
37
37
  Requires-Dist: pycontrails[ecmwf,gcp,gfs,jupyter,pyproj,sat,vis,zarr] ; extra == 'complete'
38
38
  Provides-Extra: dev
39
- Requires-Dist: black[jupyter] ==24.4.1 ; extra == 'dev'
39
+ Requires-Dist: black[jupyter] ==24.4.2 ; extra == 'dev'
40
40
  Requires-Dist: dep-license ; extra == 'dev'
41
41
  Requires-Dist: fastparquet >=0.8 ; extra == 'dev'
42
42
  Requires-Dist: ipdb >=0.13 ; extra == 'dev'
@@ -50,7 +50,7 @@ Requires-Dist: pyarrow >=5.0 ; extra == 'dev'
50
50
  Requires-Dist: pytest >=8.2 ; extra == 'dev'
51
51
  Requires-Dist: pytest-cov >=2.11 ; extra == 'dev'
52
52
  Requires-Dist: requests >=2.25 ; extra == 'dev'
53
- Requires-Dist: ruff ==0.4.1 ; extra == 'dev'
53
+ Requires-Dist: ruff ==0.5.3 ; extra == 'dev'
54
54
  Requires-Dist: setuptools ; extra == 'dev'
55
55
  Provides-Extra: docs
56
56
  Requires-Dist: doc8 >=1.1 ; extra == 'docs'
@@ -70,7 +70,7 @@ Requires-Dist: cdsapi >=0.4 ; extra == 'ecmwf'
70
70
  Requires-Dist: cfgrib >=0.9 ; extra == 'ecmwf'
71
71
  Requires-Dist: eccodes >=1.4 ; extra == 'ecmwf'
72
72
  Requires-Dist: ecmwf-api-client >=1.6 ; extra == 'ecmwf'
73
- Requires-Dist: netcdf4 <1.7.0,>=1.6.1 ; extra == 'ecmwf'
73
+ Requires-Dist: netcdf4 >=1.6.1 ; extra == 'ecmwf'
74
74
  Requires-Dist: platformdirs >=3.0 ; extra == 'ecmwf'
75
75
  Requires-Dist: requests >=2.25 ; extra == 'ecmwf'
76
76
  Requires-Dist: lxml >=5.1.0 ; extra == 'ecmwf'
@@ -98,7 +98,6 @@ Requires-Dist: gcsfs >=2022.3 ; extra == 'sat'
98
98
  Requires-Dist: geojson >=3.1 ; extra == 'sat'
99
99
  Requires-Dist: google-cloud-bigquery >=3.23 ; extra == 'sat'
100
100
  Requires-Dist: google-cloud-bigquery-storage >=2.25 ; extra == 'sat'
101
- Requires-Dist: h5netcdf >=1.2 ; extra == 'sat'
102
101
  Requires-Dist: pillow >=10.3 ; extra == 'sat'
103
102
  Requires-Dist: pyproj >=3.5 ; extra == 'sat'
104
103
  Requires-Dist: rasterio >=1.3 ; extra == 'sat'
@@ -121,9 +120,9 @@ Requires-Dist: zarr >=2.12 ; extra == 'zarr'
121
120
 
122
121
  | | |
123
122
  |---------------|-------------------------------------------------------------------|
124
- | **Version** | [![PyPI version](https://img.shields.io/pypi/v/pycontrails.svg)](https://pypi.python.org/pypi/pycontrails) [![Supported python versions](https://img.shields.io/pypi/pyversions/pycontrails.svg)](https://pypi.python.org/pypi/pycontrails) |
123
+ | **Version** | [![PyPI version](https://img.shields.io/pypi/v/pycontrails.svg)](https://pypi.python.org/pypi/pycontrails) [![conda-forge version](https://anaconda.org/conda-forge/pycontrails/badges/version.svg)](https://anaconda.org/conda-forge/pycontrails) [![Supported python versions](https://img.shields.io/pypi/pyversions/pycontrails.svg)](https://pypi.python.org/pypi/pycontrails) |
125
124
  | **Citation** | [![DOI](https://zenodo.org/badge/617248930.svg)](https://zenodo.org/badge/latestdoi/617248930) |
126
- | **Tests** | [![Unit test](https://github.com/contrailcirrus/pycontrails/actions/workflows/test.yaml/badge.svg)](https://github.com/contrailcirrus/pycontrails/actions/workflows/test.yaml) [![Docs](https://github.com/contrailcirrus/pycontrails/actions/workflows/docs.yaml/badge.svg)](https://github.com/contrailcirrus/pycontrails/actions/workflows/docs.yaml) [![Release](https://github.com/contrailcirrus/pycontrails/actions/workflows/release.yaml/badge.svg)](https://github.com/contrailcirrus/pycontrails/actions/workflows/release.yaml) [![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/contrailcirrus/pycontrails/badge)](https://securityscorecards.dev/viewer?uri=github.com/contrailcirrus/pycontrails)|
125
+ | **Tests** | [![Unit test](https://github.com/contrailcirrus/pycontrails/actions/workflows/test.yaml/badge.svg)](https://github.com/contrailcirrus/pycontrails/actions/workflows/test.yaml) [![Docs](https://github.com/contrailcirrus/pycontrails/actions/workflows/docs.yaml/badge.svg?event=push)](https://github.com/contrailcirrus/pycontrails/actions/workflows/docs.yaml) [![Release](https://github.com/contrailcirrus/pycontrails/actions/workflows/release.yaml/badge.svg)](https://github.com/contrailcirrus/pycontrails/actions/workflows/release.yaml) [![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/contrailcirrus/pycontrails/badge)](https://securityscorecards.dev/viewer?uri=github.com/contrailcirrus/pycontrails)|
127
126
  | **License** | [![Apache License 2.0](https://img.shields.io/pypi/l/pycontrails.svg)](https://github.com/contrailcirrus/pycontrails/blob/main/LICENSE) |
128
127
  | **Community** | [![Github Discussions](https://img.shields.io/github/discussions/contrailcirrus/pycontrails)](https://github.com/contrailcirrus/pycontrails/discussions) [![Github Issues](https://img.shields.io/github/issues/contrailcirrus/pycontrails)](https://github.com/contrailcirrus/pycontrails/issues) [![Github PRs](https://img.shields.io/github/issues-pr/contrailcirrus/pycontrails)](https://github.com/contrailcirrus/pycontrails/pulls) |
129
128
 
@@ -140,7 +139,9 @@ Documentation and examples available at [py.contrails.org](https://py.contrails.
140
139
 
141
140
  ## Install
142
141
 
143
- Requires Python (3.9 or later)
142
+ ### Install with pip
143
+
144
+ You can install pycontrails from PyPI with `pip` (Python 3.9 or later required):
144
145
 
145
146
  ```bash
146
147
  $ pip install pycontrails
@@ -155,7 +156,17 @@ Install the latest development version directly from GitHub:
155
156
  pip install git+https://github.com/contrailcirrus/pycontrails.git
156
157
  ```
157
158
 
158
- See more options in the [install documentation](https://py.contrails.org/install).
159
+ ### Install with conda
160
+
161
+ You can install pycontrails from the [conda-forge](https://conda-forge.org/) channel with `conda` (or other `conda`-like package managers such as `mamba`):
162
+
163
+ ```bash
164
+ conda install -c conda-forge pycontrails
165
+ ```
166
+
167
+ The conda-forge package includes all optional runtime dependencies.
168
+
169
+ See more installation options in the [install documentation](https://py.contrails.org/install).
159
170
 
160
171
  ## Get Involved
161
172
 
@@ -1,34 +1,34 @@
1
- pycontrails-0.52.0.dist-info/RECORD,,
2
- pycontrails-0.52.0.dist-info/LICENSE,sha256=gJ-h7SFFD1mCfR6a7HILvEtodDT6Iig8bLXdgqR6ucA,10175
3
- pycontrails-0.52.0.dist-info/WHEEL,sha256=E2L8cNry8_qENFWMb8KxRWLe-RhZW94hLc32Xo3HiTg,110
4
- pycontrails-0.52.0.dist-info/NOTICE,sha256=gKI8DcN1WhiXB2SFRKDogcjONldGubTvBxiOYdC4CXU,1926
5
- pycontrails-0.52.0.dist-info/top_level.txt,sha256=Z8J1R_AiBAyCVjNw6jYLdrA68PrQqTg0t3_Yek_IZ0Q,29
6
- pycontrails-0.52.0.dist-info/METADATA,sha256=q757wdU7ow-WkMWeLtjLZ3Mo8h28yfWfx0F-EtfLfbI,8831
7
- pycontrails/_version.py,sha256=HchZIBAQcFK44NnsWNeG0x1TGXt4xcgLicOAT4yevSE,413
1
+ pycontrails-0.52.2.dist-info/RECORD,,
2
+ pycontrails-0.52.2.dist-info/LICENSE,sha256=gJ-h7SFFD1mCfR6a7HILvEtodDT6Iig8bLXdgqR6ucA,10175
3
+ pycontrails-0.52.2.dist-info/WHEEL,sha256=jPrF5oWulsOxop3iQfMuD22j-CtnUOYm5n_3r9ZbENY,109
4
+ pycontrails-0.52.2.dist-info/NOTICE,sha256=gKI8DcN1WhiXB2SFRKDogcjONldGubTvBxiOYdC4CXU,1926
5
+ pycontrails-0.52.2.dist-info/top_level.txt,sha256=Z8J1R_AiBAyCVjNw6jYLdrA68PrQqTg0t3_Yek_IZ0Q,29
6
+ pycontrails-0.52.2.dist-info/METADATA,sha256=_6cclDjRduE1ZmJm15bLAx4BibdxxfKd2ElNj8CwyNY,9307
7
+ pycontrails/_version.py,sha256=7hRRz1Yg5iw29YoTywrkjgawiL8eg5Gzlc-mhwWCdtM,413
8
8
  pycontrails/__init__.py,sha256=O2T9kXCMhcELcMZz7HEnwiBhh4Gfcj-yG1HtrotOKHQ,2001
9
9
  pycontrails/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- pycontrails/core/vector.py,sha256=P61Rv1M040A1SRHmdWVR_RfXcWa6faM330n-9499ilI,71722
11
- pycontrails/core/models.py,sha256=sipUAH_r0A1TxQ0yBjaPj8WC33Piw34ABHyrmRePfhg,39003
12
- pycontrails/core/interpolation.py,sha256=FFYdUnTzGnkjoGs-FKsK3Y3nSK5lQI-mpq5HPALZ-y4,25495
13
- pycontrails/core/fleet.py,sha256=84oTb8RJ3-bPVvZn3O2ljMEZLwJ9Q-E455ZzQJ4QaQU,16075
14
- pycontrails/core/rgi_cython.cpython-310-darwin.so,sha256=97XwF4jFJ6des5R36ybseepHyiD7UXc9zZCsKHUxbok,310832
15
- pycontrails/core/flight.py,sha256=BOnbGZuYNpHO0zJu75u27djoVbttnEPMTtNZjagAd4Q,83599
10
+ pycontrails/core/vector.py,sha256=ZvI-hQylANy6X8cSBMPa51FnMkmkjASsPyKLflZQJOg,71571
11
+ pycontrails/core/models.py,sha256=z06ggBjtWJyDXUexGtoaUgHJA6e7jajqPvZvMXvlSwk,39310
12
+ pycontrails/core/interpolation.py,sha256=beTKgJKQDu07rqwfkAJFOZu83jQTKm1mOEC23EDiLdA,25519
13
+ pycontrails/core/fleet.py,sha256=wqYY_2xD9X-Og0_oxU8ZPqTHYDau9TOPLQcmEnB1kiQ,16140
14
+ pycontrails/core/rgi_cython.cpython-310-darwin.so,sha256=aSxW1q0rXzh9Ma3gCgUJxui806x_JCiZBn1CLZ1gTiM,310832
15
+ pycontrails/core/flight.py,sha256=EdB8_P8BELSyXx01CXw_js0zgv-bdsqj3_mt28lhKMU,85144
16
16
  pycontrails/core/fuel.py,sha256=kJZ3P1lPm1L6rdPREM55XQ-VfJ_pt35cP4sO2Nnvmjs,4332
17
- pycontrails/core/polygon.py,sha256=NhoK91oVE2pse1kw2mkkhxxQpJrvBld8ofTMCwNH_h8,18016
17
+ pycontrails/core/polygon.py,sha256=gosyZBX1XBKD2EcHycIZb7uM-xGs8rCfdpiSZlhc2Hc,18028
18
18
  pycontrails/core/cache.py,sha256=9TaHyLtCpDazgN5zEt0aa4xNgvNcMc4oO_6nEz0XhYE,27971
19
19
  pycontrails/core/__init__.py,sha256=x1z6x8w3sYmEqYcNWyWHuNkS9lPUPbHUoYJZs1K0q98,856
20
20
  pycontrails/core/flightplan.py,sha256=s7tHbjMFbHAJkSWV6hPkghuW6jDb1n5UhWAo9XbJ9z0,7349
21
- pycontrails/core/met.py,sha256=HORTWNre9rSyOgNnVnVaW0A6jDjU5W5A5s2L89KIZyM,93602
22
- pycontrails/core/aircraft_performance.py,sha256=4yNGfArt741HNFjxpWvEu86BTzie9LaPjC4KH3dXYts,21966
21
+ pycontrails/core/met.py,sha256=tUoo0RT9FBhUWp_IavbyUm-uze1GuWnms0XrYZtZ8Ds,93967
22
+ pycontrails/core/aircraft_performance.py,sha256=4KnLj0zK-mk8Oo3As1CXUkQWBQGMeDdrKi5TeOhOmUA,26107
23
23
  pycontrails/core/airports.py,sha256=aeyAXVkioIRomrP79UtNrxindL4f1DJyXFaojZCuBBw,6758
24
24
  pycontrails/core/met_var.py,sha256=EhrLGdrCAp8cKb-3Whd_ttLMZn4_zLMhE-QyFinESqo,9197
25
- pycontrails/core/coordinates.py,sha256=vVITA90x0Jx-UQG2XMm3JAKKsIrFUcU861xV-L9czTI,5291
26
- pycontrails/datalib/goes.py,sha256=CDCwNWv35MGm20s0AS5Xxy0hGyiUhtX9S9cc8cI_bKI,26357
25
+ pycontrails/core/coordinates.py,sha256=0ySsHtqTon7GMbuwmmxMbI92j3ueMteJZh4xxNm5zto,5391
26
+ pycontrails/datalib/goes.py,sha256=Muh_pqAXSqUlM4ssStUT9QmPxGPEKK21LHFroaqTq7k,26533
27
27
  pycontrails/datalib/landsat.py,sha256=jV6E9TMmo3x_EEb-4BnD3L7nB5C3NCLT7FaXoMxXP64,19645
28
28
  pycontrails/datalib/spire.py,sha256=66SnMdA8KOS69USjKmqrJmTKPK08Ehih9tnlsCt-AJw,25331
29
29
  pycontrails/datalib/__init__.py,sha256=hW9NWdFPC3y_2vHMteQ7GgQdop3917MkDaf5ZhU2RBY,369
30
30
  pycontrails/datalib/sentinel.py,sha256=Rzsp5Hv6Rh3XVEfvFeofmClId4Eq2KhdYiEhIqFPE3U,17222
31
- pycontrails/datalib/_met_utils/metsource.py,sha256=lAMkvbwix1ziqWGZuHvsu69p3etwOjmAtYiriaEz7Hg,23991
31
+ pycontrails/datalib/_met_utils/metsource.py,sha256=mh7nWfpnP5l7unuQHDSYjar4hN7k9iSDNd6cYZcwwWM,23844
32
32
  pycontrails/datalib/ecmwf/arco_era5.py,sha256=YuoPmPlP9TpZ6qhUPLbb30y3D3dTNDasTLZqP5MAWtw,18624
33
33
  pycontrails/datalib/ecmwf/era5.py,sha256=TMX9bJHvALvSbxFI0BpwI9l3yMoxI1ecc2ROfvPaNIE,18242
34
34
  pycontrails/datalib/ecmwf/era5_model_level.py,sha256=0R96ATNVp1UB63M_ER6aEqdY60dfkj_0SNVjyD6a8fE,18945
@@ -69,7 +69,7 @@ pycontrails/models/emissions/ffm2.py,sha256=h_bmB4pxxvC1ptqz5jB_rpf9QgaAv9J7Lu-6
69
69
  pycontrails/models/emissions/emissions.py,sha256=MSyCMHdB-OXf9__CTHtAi85sCflqvifk4HT_1Qp_q4A,47564
70
70
  pycontrails/models/emissions/black_carbon.py,sha256=F2SCUiV39zg2mUxbWsct6vvr_JgHdyB59DVWkw40eX0,20234
71
71
  pycontrails/models/emissions/static/edb-nvpm-v29b-engines.csv,sha256=NatpVI1D2tTDLK7uVvlanm9DhfFB44nmFA4aocUcXco,77318
72
- pycontrails/models/emissions/static/edb-gaseous-v29b-engines.csv,sha256=Oub-FkyR4cPlTEPo9wDPAn4i4CpGpKH2tet30MNitI0,127518
72
+ pycontrails/models/emissions/static/edb-gaseous-v29b-engines.csv,sha256=jCjt7cP6sqLdbDp5NUoaqllVkZNE7NJtSnbB3rX_zQI,127523
73
73
  pycontrails/models/emissions/static/default-engine-uids.csv,sha256=3blb0aqtM8YRsyT1WDo0UYTBtv1h4BwXRIC_Ll9fhnI,6217
74
74
  pycontrails/models/apcemm/__init__.py,sha256=M-hrJklbSgBckclm526MiBAhpKPLHgJbB58ArbJuGIk,175
75
75
  pycontrails/models/apcemm/inputs.py,sha256=88GylkiaymEW_XZeFxLsICI9wV6kl8wVYsuyTe8zIQ8,6585
@@ -82,12 +82,12 @@ pycontrails/models/humidity_scaling/quantiles/era5-pressure-level-quantiles.pq,s
82
82
  pycontrails/models/humidity_scaling/quantiles/era5-model-level-quantiles.pq,sha256=pShCvNUo0NYtAHhT9IBRuj38X9jejdlKfv-ZoOKmtKI,35943
83
83
  pycontrails/models/cocip/radiative_forcing.py,sha256=ERuFcYMo0_1iiOricnZ8D4ext23bMnTCeZwg9vd6Vzs,44944
84
84
  pycontrails/models/cocip/wind_shear.py,sha256=p8d3iaNzxPA3MoxFEM1ZDKt0aticoD6U9cv0QmbuBzs,3860
85
- pycontrails/models/cocip/cocip.py,sha256=0ILMcjbgsM00rCXAeo9UkSnUsmvq1XvORhNPI-ReNcM,97591
86
- pycontrails/models/cocip/output_formats.py,sha256=e3K-23EvEE2z9PuHNr2OQhwvN8_PYmNmR7Li-dsYZq8,83229
85
+ pycontrails/models/cocip/cocip.py,sha256=1ReioOLubxyRjXAAZYwer2g144312wZQc72XhmUEVNA,97188
86
+ pycontrails/models/cocip/output_formats.py,sha256=bmQKyyuwbn92W1IvdJI3NY4WfEpN6dT8HbxU38B_nnM,83352
87
87
  pycontrails/models/cocip/__init__.py,sha256=jd-9Tq20s1kwQBlxsYfZLi3hlT5MnWOY2XsPazq1fgE,962
88
88
  pycontrails/models/cocip/cocip_params.py,sha256=R4bewge3xLgWYbBbGwd8e8r0NlaFx2IaQPZEfiqJZRI,11392
89
89
  pycontrails/models/cocip/wake_vortex.py,sha256=i_OF193KK5BCMdVCgK0_4Aqn55f6rnL4WDWEac8um-w,14421
90
- pycontrails/models/cocip/cocip_uncertainty.py,sha256=4JtlCVFpLBnPRlvyEp9QFpRfHFK9joSTnxe0NJdONG4,11784
90
+ pycontrails/models/cocip/cocip_uncertainty.py,sha256=-3ICEbrhB6eQiYIqpEahzhf12AwV7ge-yvj_NaOqW3g,11891
91
91
  pycontrails/models/cocip/radiative_heating.py,sha256=YRpwfXgFnf89iuJiIM96q-jbdcMAwlX8QLsADTKMABE,18848
92
92
  pycontrails/models/cocip/contrail_properties.py,sha256=tycCxKf8j9GvVYDQBPxjtp6xLll-r00C0XW-w1jGbMI,55594
93
93
  pycontrails/models/cocip/unterstrasser_wake_vortex.py,sha256=kDxFpAIkcqqhGmwXoxv3_cSESj1Ur45GbLJF56IACJs,14573
@@ -101,7 +101,7 @@ pycontrails/models/ps_model/static/ps-aircraft-params-20240524.csv,sha256=3eNhSw
101
101
  pycontrails/models/cocipgrid/cocip_grid_params.py,sha256=l4vBPrOKCJDz5Y1uMjmOGVyUcSWgfZtFWbjW968OPz8,5875
102
102
  pycontrails/models/cocipgrid/__init__.py,sha256=ar6bF_8Pusbb-myujz_q5ntFylQTNH8yiM8fxP7Zk30,262
103
103
  pycontrails/models/cocipgrid/cocip_grid.py,sha256=MLsh3rZb-6Q7vnad6YYTfZs83-kSJEbhjfWTS9oooXI,94333
104
- pycontrails/physics/geo.py,sha256=lqEpTLhex2r-o-6EHSSEiPC8xqZ-NlwwCBob9-cJA_w,30240
104
+ pycontrails/physics/geo.py,sha256=9ZWIXyEEgrBNqsoeBBlYLTA-8GUTgyc-jgeVgchxXa8,30288
105
105
  pycontrails/physics/units.py,sha256=j-G5AC9eWIvv2MTOq9lUOoOQKFNJJuHzWLanHRji2tE,12272
106
106
  pycontrails/physics/constants.py,sha256=pHQQmccMUwuNnY4hFtm3L8G2rnUQcfJnroyQr8HAVeM,3146
107
107
  pycontrails/physics/__init__.py,sha256=_1eWbEy6evEWdfJCEkwDiSdpiDNzNWEPVqaPekHyhwU,44
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp310-cp310-macosx_11_0_arm64
5
5