pycontrails 0.42.0__cp39-cp39-macosx_11_0_arm64.whl → 0.42.2__cp39-cp39-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (32) hide show
  1. pycontrails/_version.py +2 -2
  2. pycontrails/core/cache.py +4 -6
  3. pycontrails/core/datalib.py +5 -2
  4. pycontrails/core/fleet.py +59 -7
  5. pycontrails/core/flight.py +175 -49
  6. pycontrails/core/flightplan.py +238 -0
  7. pycontrails/core/interpolation.py +11 -15
  8. pycontrails/core/met.py +5 -5
  9. pycontrails/core/models.py +4 -0
  10. pycontrails/core/rgi_cython.cpython-39-darwin.so +0 -0
  11. pycontrails/core/vector.py +17 -12
  12. pycontrails/datalib/ecmwf/common.py +14 -19
  13. pycontrails/ext/bada/__init__.py +6 -6
  14. pycontrails/ext/cirium/__init__.py +2 -2
  15. pycontrails/models/cocip/cocip.py +37 -39
  16. pycontrails/models/cocip/cocip_params.py +37 -30
  17. pycontrails/models/cocip/cocip_uncertainty.py +47 -58
  18. pycontrails/models/cocip/radiative_forcing.py +220 -193
  19. pycontrails/models/cocip/wake_vortex.py +96 -91
  20. pycontrails/models/humidity_scaling.py +265 -8
  21. pycontrails/models/issr.py +1 -1
  22. pycontrails/models/quantiles/era5_ensemble_quantiles.npy +0 -0
  23. pycontrails/models/quantiles/iagos_quantiles.npy +0 -0
  24. pycontrails/models/sac.py +2 -0
  25. pycontrails/physics/geo.py +2 -1
  26. pycontrails/utils/json.py +3 -3
  27. {pycontrails-0.42.0.dist-info → pycontrails-0.42.2.dist-info}/METADATA +4 -7
  28. {pycontrails-0.42.0.dist-info → pycontrails-0.42.2.dist-info}/RECORD +32 -29
  29. {pycontrails-0.42.0.dist-info → pycontrails-0.42.2.dist-info}/LICENSE +0 -0
  30. {pycontrails-0.42.0.dist-info → pycontrails-0.42.2.dist-info}/NOTICE +0 -0
  31. {pycontrails-0.42.0.dist-info → pycontrails-0.42.2.dist-info}/WHEEL +0 -0
  32. {pycontrails-0.42.0.dist-info → pycontrails-0.42.2.dist-info}/top_level.txt +0 -0
pycontrails/_version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  # file generated by setuptools_scm
2
2
  # don't change, don't track in version control
3
- __version__ = version = '0.42.0'
4
- __version_tuple__ = version_tuple = (0, 42, 0)
3
+ __version__ = version = '0.42.2'
4
+ __version_tuple__ = version_tuple = (0, 42, 2)
pycontrails/core/cache.py CHANGED
@@ -370,18 +370,16 @@ class DiskCacheStore(CacheStore):
370
370
  if disk_path.is_file():
371
371
  logger.debug("Remove file at path %s", disk_path)
372
372
  disk_path.unlink()
373
+ return
373
374
 
374
375
  # Assume anything else is a directory
375
- elif disk_path.exists():
376
+ if disk_path.exists():
376
377
  # rm directory recursively
377
378
  logger.debug("Remove directory at path %s", disk_path)
378
379
  shutil.rmtree(disk_path, ignore_errors=True)
380
+ return
379
381
 
380
- else:
381
- warnings.warn(f"No cache path found at {disk_path}")
382
-
383
- # make sure local cache directory exists
384
- # pathlib.Path(self.cache_dir).mkdir(parents=True, exist_ok=True)
382
+ warnings.warn(f"No cache path found at {disk_path}")
385
383
 
386
384
 
387
385
  class GCPCacheStore(CacheStore):
@@ -32,6 +32,9 @@ NETCDF_ENGINE: str = "netcdf4"
32
32
  #: Default chunking strategy when opening datasets with xarray
33
33
  DEFAULT_CHUNKS: dict[str, int] = {"time": 1}
34
34
 
35
+ #: Whether to open multi-file datasets in parallel
36
+ OPEN_IN_PARALLEL: bool = False
37
+
35
38
 
36
39
  def parse_timesteps(time: TimeInput | None, freq: str | None = "1H") -> list[datetime]:
37
40
  """Parse time input into set of time steps.
@@ -645,7 +648,7 @@ class MetDataSource(abc.ABC):
645
648
 
646
649
  - chunks: {"time": 1}
647
650
  - engine: "netcdf4"
648
- - parallel: True
651
+ - parallel: False
649
652
 
650
653
  Returns
651
654
  -------
@@ -654,5 +657,5 @@ class MetDataSource(abc.ABC):
654
657
  """
655
658
  xr_kwargs.setdefault("engine", NETCDF_ENGINE)
656
659
  xr_kwargs.setdefault("chunks", DEFAULT_CHUNKS)
657
- xr_kwargs.setdefault("parallel", True)
660
+ xr_kwargs.setdefault("parallel", OPEN_IN_PARALLEL)
658
661
  return xr.open_mfdataset(disk_paths, **xr_kwargs)
pycontrails/core/fleet.py CHANGED
@@ -93,6 +93,49 @@ class Fleet(Flight):
93
93
  final_waypoints[final_waypoint_indices] = True
94
94
  return final_waypoints
95
95
 
96
+ def fit_altitude(
97
+ self,
98
+ max_segments: int = 30,
99
+ pop: int = 3,
100
+ r2_target: float = 0.999,
101
+ max_cruise_rocd: float = 10,
102
+ sg_window: int = 7,
103
+ sg_polyorder: int = 1,
104
+ ) -> Fleet:
105
+ """Use piecewise linear fitting to smooth a flight profile.
106
+
107
+ Fit a flight profile to a series of line segments. Segments that have a
108
+ small rocd will be set to have a slope of zero and snapped to the
109
+ nearest thousand foot level. A Savitzky-Golay filter will then be
110
+ applied to the profile to smooth the climbs and descents. This filter
111
+ works best for high frequency flight data, sampled at a 1-3 second
112
+ sampling period.
113
+
114
+ Parameters
115
+ ----------
116
+ max_segments : int, optional
117
+ The maximum number of line segements to fit to the flight profile.
118
+ pop: int, optional
119
+ Population parameter used for the stocastic optimization routine
120
+ used to fit the flight profile.
121
+ r2_target: float, optional
122
+ Target r^2 value for solver. Solver will continue to add line
123
+ segments until the resulting r^2 value is greater than this.
124
+ max_cruise_rocd: float, optional
125
+ The maximum ROCD for a segment that will be forced to a slope of
126
+ zero, [:math:`ft s^{-1}`]
127
+ sg_window: int, optional
128
+ Parameter for :func:`scipy.signal.savgol_filter`
129
+ sg_polyorder: int, optional
130
+ Parameter for :func:`scipy.signal.savgol_filter`
131
+
132
+ Returns
133
+ -------
134
+ Fleet
135
+ Smoothed flight
136
+ """
137
+ raise NotImplementedError("Only implemented for Flight instances")
138
+
96
139
  @classmethod
97
140
  def from_seq(
98
141
  cls,
@@ -257,8 +300,8 @@ class Fleet(Flight):
257
300
 
258
301
  def segment_true_airspeed(
259
302
  self,
260
- u_wind: npt.NDArray[np.float_] | None = None,
261
- v_wind: npt.NDArray[np.float_] | None = None,
303
+ u_wind: npt.NDArray[np.float_] | float = 0.0,
304
+ v_wind: npt.NDArray[np.float_] | float = 0.0,
262
305
  smooth: bool = True,
263
306
  window_length: int = 7,
264
307
  polyorder: int = 1,
@@ -277,13 +320,14 @@ class Fleet(Flight):
277
320
  RuntimeError
278
321
  Unexpected key `__u_wind` or `__v_wind` found in :attr:`data`.
279
322
  """
280
- if u_wind is not None:
323
+ if isinstance(u_wind, np.ndarray):
281
324
  # Choosing a key we don't think exists
282
325
  key = "__u_wind"
283
326
  if key in self:
284
327
  raise RuntimeError(f"Unexpected key {key} found")
285
328
  self[key] = u_wind
286
- if v_wind is not None:
329
+
330
+ if isinstance(v_wind, np.ndarray):
287
331
  # Choosing a key we don't think exists
288
332
  key = "__v_wind"
289
333
  if key in self:
@@ -292,11 +336,11 @@ class Fleet(Flight):
292
336
 
293
337
  # Calculate TAS on each flight individually
294
338
  def calc_tas(fl: Flight) -> npt.NDArray[np.float_]:
295
- u_wind = fl.get("__u_wind", None)
296
- v_wind = fl.get("__v_wind", None)
339
+ u = fl.get("__u_wind", u_wind)
340
+ v = fl.get("__v_wind", v_wind)
297
341
 
298
342
  return fl.segment_true_airspeed(
299
- u_wind, v_wind, smooth=smooth, window_length=window_length, polyorder=polyorder
343
+ u, v, smooth=smooth, window_length=window_length, polyorder=polyorder
300
344
  )
301
345
 
302
346
  fls = self.to_flight_list(copy=False)
@@ -330,6 +374,14 @@ class Fleet(Flight):
330
374
  def segment_length(self) -> npt.NDArray[np.float_]:
331
375
  return np.where(self.final_waypoints, np.nan, super().segment_length())
332
376
 
377
+ @property
378
+ @overrides
379
+ def max_distance_gap(self) -> float:
380
+ if self.attrs["crs"] != "EPSG:4326":
381
+ raise NotImplementedError("Only implemented for EPSG:4326 CRS.")
382
+
383
+ return np.nanmax(self.segment_length()).item()
384
+
333
385
  @overrides
334
386
  def segment_azimuth(self) -> npt.NDArray[np.float_]:
335
387
  return np.where(self.final_waypoints, np.nan, super().segment_azimuth())
@@ -242,7 +242,7 @@ class Flight(GeoVectorDataset):
242
242
  self.fuel = fuel or JetA()
243
243
 
244
244
  # Check flight data for possible errors
245
- if np.any(self.altitude > 16000):
245
+ if np.any(self.altitude > 16000.0):
246
246
  flight_id = self.attrs.get("flight_id", "")
247
247
  flight_id = flight_id and f" for flight {flight_id}"
248
248
  warnings.warn(
@@ -257,35 +257,36 @@ class Flight(GeoVectorDataset):
257
257
  "with segment-based methods (e.g. 'segment_true_airspeed')."
258
258
  )
259
259
 
260
- diff_ = np.diff(self["time"])
260
+ time_diff = np.diff(self["time"])
261
261
 
262
262
  # Ensure that time is sorted
263
- if self and np.any(diff_ < np.timedelta64(0)):
263
+ if self and np.any(time_diff < np.timedelta64(0)):
264
264
  if not copy:
265
265
  raise ValueError(
266
- "`time` data must be sorted if `copy` is False on creation. "
266
+ "The 'time' array must be sorted if 'copy=False' on creation. "
267
267
  "Set copy=False, or sort data before creating Flight."
268
268
  )
269
- warnings.warn("Sorting Flight data by `time`.")
269
+ warnings.warn("Sorting Flight data by time.")
270
270
 
271
271
  sorted_flight = self.sort("time")
272
272
  self.data = sorted_flight.data
273
- # Update diff_ ... we use it again below
274
- diff_ = np.diff(self["time"])
273
+
274
+ # Update time_diff ... we use it again below
275
+ time_diff = np.diff(self["time"])
275
276
 
276
277
  # Check for duplicate times. If dropping duplicates,
277
- # keep the *first* occurrence of each time. This is achieved with
278
- # the np.insert (rather than np.append) function.
279
- filt = np.insert(diff_ > np.timedelta64(0), 0, True)
280
- if not np.all(filt):
278
+ # keep the *first* occurrence of each time.
279
+ duplicated_times = time_diff == np.timedelta64(0)
280
+ if self and np.any(duplicated_times):
281
281
  if drop_duplicated_times:
282
- filtered_flight = self.filter(filt, copy=False)
282
+ mask = np.insert(duplicated_times, 0, False)
283
+ filtered_flight = self.filter(~mask, copy=False)
283
284
  self.data = filtered_flight.data
284
285
  else:
285
286
  warnings.warn(
286
- "Flight contains duplicate times. This will cause errors "
287
- "with segment-based methods (e.g. 'segment_true_airspeed'). Set "
288
- "'drop_duplicated=True' or call the 'resample_and_fill' method."
287
+ f"Flight contains {duplicated_times.sum()} duplicate times. "
288
+ "This will cause errors with segment-based methods. Set "
289
+ "'drop_duplicated_times=True' or call the 'resample_and_fill' method."
289
290
  )
290
291
 
291
292
  @overrides
@@ -417,7 +418,7 @@ class Flight(GeoVectorDataset):
417
418
  # Segment Properties
418
419
  # ------------
419
420
 
420
- def segment_duration(self, dtype: np.dtype = np.dtype(np.float32)) -> npt.NDArray[np.float_]:
421
+ def segment_duration(self, dtype: npt.DTypeLike = np.float32) -> npt.NDArray[np.float_]:
421
422
  r"""Compute time elapsed between waypoints in seconds.
422
423
 
423
424
  ``np.nan`` appended so the length of the output is the same as number of waypoints.
@@ -581,13 +582,11 @@ class Flight(GeoVectorDataset):
581
582
  npt.NDArray[np.float_]
582
583
  Groundspeed of the segment, [:math:`m s^{-1}`]
583
584
  """
584
- # get horizontal distance - set altitude to 0
585
+ # get horizontal distance (altitude is ignored)
585
586
  horizontal_segment_length = geo.segment_haversine(self["longitude"], self["latitude"])
586
587
 
587
588
  # time between waypoints, in seconds
588
- dt_sec = np.empty_like(horizontal_segment_length)
589
- dt_sec[:-1] = np.diff(self["time"]) / np.timedelta64(1, "s")
590
- dt_sec[-1] = np.nan
589
+ dt_sec = self.segment_duration(dtype=horizontal_segment_length.dtype)
591
590
 
592
591
  # calculate groundspeed
593
592
  groundspeed = horizontal_segment_length / dt_sec
@@ -602,8 +601,8 @@ class Flight(GeoVectorDataset):
602
601
 
603
602
  def segment_true_airspeed(
604
603
  self,
605
- u_wind: npt.NDArray[np.float_] | None = None,
606
- v_wind: npt.NDArray[np.float_] | None = None,
604
+ u_wind: npt.NDArray[np.float_] | float = 0.0,
605
+ v_wind: npt.NDArray[np.float_] | float = 0.0,
607
606
  smooth: bool = True,
608
607
  window_length: int = 7,
609
608
  polyorder: int = 1,
@@ -614,10 +613,10 @@ class Flight(GeoVectorDataset):
614
613
 
615
614
  Parameters
616
615
  ----------
617
- u_wind : npt.NDArray[np.float_], optional
616
+ u_wind : npt.NDArray[np.float_] | float
618
617
  U wind speed, [:math:`m \ s^{-1}`].
619
618
  Defaults to 0 for all waypoints.
620
- v_wind : npt.NDArray[np.float_], optional
619
+ v_wind : npt.NDArray[np.float_] | float
621
620
  V wind speed, [:math:`m \ s^{-1}`].
622
621
  Defaults to 0 for all waypoints.
623
622
  smooth : bool, optional
@@ -635,12 +634,6 @@ class Flight(GeoVectorDataset):
635
634
  """
636
635
  groundspeed = self.segment_groundspeed(smooth, window_length, polyorder)
637
636
 
638
- if u_wind is None:
639
- u_wind = np.zeros_like(groundspeed)
640
-
641
- if v_wind is None:
642
- v_wind = np.zeros_like(groundspeed)
643
-
644
637
  sin_a, cos_a = self.segment_angle()
645
638
  gs_x = groundspeed * cos_a
646
639
  gs_y = groundspeed * sin_a
@@ -820,20 +813,20 @@ class Flight(GeoVectorDataset):
820
813
  2 50.0 0.0 0.0 2020-01-01 02:00:00
821
814
 
822
815
  >>> fl.resample_and_fill('10T').dataframe # resample with 10 minute frequency
823
- time longitude latitude altitude
824
- 0 2020-01-01 00:00:00 0.000000 0.0 0.0
825
- 1 2020-01-01 00:10:00 0.000000 0.0 0.0
826
- 2 2020-01-01 00:20:00 0.000000 0.0 0.0
827
- 3 2020-01-01 00:30:00 0.000000 0.0 0.0
828
- 4 2020-01-01 00:40:00 0.000000 0.0 0.0
829
- 5 2020-01-01 00:50:00 0.000000 0.0 0.0
830
- 6 2020-01-01 01:00:00 0.000000 0.0 0.0
831
- 7 2020-01-01 01:10:00 8.928571 0.0 0.0
832
- 8 2020-01-01 01:20:00 16.964286 0.0 0.0
833
- 9 2020-01-01 01:30:00 25.892857 0.0 0.0
834
- 10 2020-01-01 01:40:00 33.928571 0.0 0.0
835
- 11 2020-01-01 01:50:00 41.964286 0.0 0.0
836
- 12 2020-01-01 02:00:00 50.000000 0.0 0.0
816
+ longitude latitude altitude time
817
+ 0 0.000000 0.0 0.0 2020-01-01 00:00:00
818
+ 1 0.000000 0.0 0.0 2020-01-01 00:10:00
819
+ 2 0.000000 0.0 0.0 2020-01-01 00:20:00
820
+ 3 0.000000 0.0 0.0 2020-01-01 00:30:00
821
+ 4 0.000000 0.0 0.0 2020-01-01 00:40:00
822
+ 5 0.000000 0.0 0.0 2020-01-01 00:50:00
823
+ 6 0.000000 0.0 0.0 2020-01-01 01:00:00
824
+ 7 8.928571 0.0 0.0 2020-01-01 01:10:00
825
+ 8 16.964286 0.0 0.0 2020-01-01 01:20:00
826
+ 9 25.892857 0.0 0.0 2020-01-01 01:30:00
827
+ 10 33.928571 0.0 0.0 2020-01-01 01:40:00
828
+ 11 41.964286 0.0 0.0 2020-01-01 01:50:00
829
+ 12 50.000000 0.0 0.0 2020-01-01 02:00:00
837
830
  """
838
831
  methods = "geodesic", "linear"
839
832
  if fill_method not in methods:
@@ -908,6 +901,65 @@ class Flight(GeoVectorDataset):
908
901
  df = df.reset_index()
909
902
  return Flight(data=df, attrs=self.attrs)
910
903
 
904
+ def fit_altitude(
905
+ self,
906
+ max_segments: int = 30,
907
+ pop: int = 3,
908
+ r2_target: float = 0.999,
909
+ max_cruise_rocd: float = 10.0,
910
+ sg_window: int = 7,
911
+ sg_polyorder: int = 1,
912
+ ) -> Flight:
913
+ """Use piecewise linear fitting to smooth a flight profile.
914
+
915
+ Fit a flight profile to a series of line segments. Segments that have a
916
+ small rocd will be set to have a slope of zero and snapped to the
917
+ nearest thousand foot level. A Savitzky-Golay filter will then be
918
+ applied to the profile to smooth the climbs and descents. This filter
919
+ works best for high frequency flight data, sampled at a 1-3 second
920
+ sampling period.
921
+
922
+ Parameters
923
+ ----------
924
+ max_segments : int, optional
925
+ The maximum number of line segements to fit to the flight profile.
926
+ pop: int, optional
927
+ Population parameter used for the stocastic optimization routine
928
+ used to fit the flight profile.
929
+ r2_target: float, optional
930
+ Target r^2 value for solver. Solver will continue to add line
931
+ segments until the resulting r^2 value is greater than this.
932
+ max_cruise_rocd: float, optional
933
+ The maximum ROCD for a segment that will be forced to a slope of
934
+ zero, [:math:`ft s^{-1}`]
935
+ sg_window: int, optional
936
+ Parameter for :func:`scipy.signal.savgol_filter`
937
+ sg_polyorder: int, optional
938
+ Parameter for :func:`scipy.signal.savgol_filter`
939
+
940
+ Returns
941
+ -------
942
+ Flight
943
+ Smoothed flight
944
+ """
945
+ # np.roll pushes the last NaN value from `segment_duration` to the front
946
+ # so the elapsed time at the first waypoint will be 0
947
+ seg_dur = self.segment_duration(dtype=np.float64)
948
+ elapsed_time = np.nancumsum(np.roll(seg_dur, 1))
949
+ alt_ft = fit_altitude(
950
+ elapsed_time,
951
+ np.copy(self.altitude_ft),
952
+ max_segments,
953
+ pop,
954
+ r2_target,
955
+ max_cruise_rocd,
956
+ sg_window,
957
+ )
958
+
959
+ flight = self.copy()
960
+ flight.update(altitude_ft=alt_ft)
961
+ return flight
962
+
911
963
  def _geodesic_interpolation(self, geodesic_threshold: float) -> pd.DataFrame | None:
912
964
  """Geodesic interpolate between large gaps between waypoints.
913
965
 
@@ -1123,7 +1175,7 @@ class Flight(GeoVectorDataset):
1123
1175
  >>> variables = ["air_temperature", "specific_humidity"]
1124
1176
  >>> levels = [300, 250, 200]
1125
1177
  >>> era5 = ERA5(time=times, variables=variables, pressure_levels=levels)
1126
- >>> met = era5.open_metdataset(xr_kwargs=dict(parallel=False))
1178
+ >>> met = era5.open_metdataset()
1127
1179
 
1128
1180
  >>> # Build flight
1129
1181
  >>> df = pd.DataFrame()
@@ -1232,9 +1284,9 @@ def _return_linestring(data: dict[str, npt.NDArray[np.float_]]) -> list[list[flo
1232
1284
  """
1233
1285
  # rounding to reduce the size of resultant json arrays
1234
1286
  points = zip( # pylint: disable=zip-builtin-not-iterating
1235
- np.around(data["longitude"], decimals=4),
1236
- np.around(data["latitude"], decimals=4),
1237
- np.around(data["altitude"], decimals=4),
1287
+ np.round(data["longitude"], decimals=4),
1288
+ np.round(data["latitude"], decimals=4),
1289
+ np.round(data["altitude"], decimals=4),
1238
1290
  )
1239
1291
  return [list(p) for p in points]
1240
1292
 
@@ -1515,7 +1567,7 @@ def filter_altitude(
1515
1567
 
1516
1568
 
1517
1569
  def segment_duration(
1518
- time: npt.NDArray[np.datetime64], dtype: np.dtype = np.dtype(np.float32)
1570
+ time: npt.NDArray[np.datetime64], dtype: npt.DTypeLike = np.float32
1519
1571
  ) -> npt.NDArray[np.float_]:
1520
1572
  """Calculate the time difference between waypoints.
1521
1573
 
@@ -1634,3 +1686,77 @@ def segment_rocd(
1634
1686
  out[-1] = np.nan
1635
1687
 
1636
1688
  return out
1689
+
1690
+
1691
+ def fit_altitude(
1692
+ elapsed_time: npt.NDArray[np.float_],
1693
+ altitude_ft: npt.NDArray[np.float_],
1694
+ max_segments: int = 30,
1695
+ pop: int = 3,
1696
+ r2_target: float = 0.999,
1697
+ max_cruise_rocd: float = 10.0,
1698
+ sg_window: int = 7,
1699
+ sg_polyorder: int = 1,
1700
+ ) -> npt.NDArray[np.float_]:
1701
+ """Use piecewise linear fitting to smooth a flight profile.
1702
+
1703
+ Fit a flight profile to a series of line segments. Segments that have a
1704
+ small rocd will be set to have a slope of zero and snapped to the
1705
+ nearest thousand foot level. A Savitzky-Golay filter will then be
1706
+ applied to the profile to smooth the climbs and descents. This filter
1707
+ works best for high frequency flight data, sampled at a 1-3 second
1708
+ sampling period.
1709
+
1710
+ Parameters
1711
+ ----------
1712
+ elapsed_time: npt.NDArray[np.float_]
1713
+ Cumulative time of flight between waypoints, [:math:`s`]
1714
+ altitude_ft: npt.NDArray[np.float_]
1715
+ Altitude of each waypoint, [:math:`ft`
1716
+ max_segments: int, optional
1717
+ The maximum number of line segements to fit to the flight profile.
1718
+ pop: int, optional
1719
+ Population parameter used for the stocastic optimization routine
1720
+ used to fit the flight profile.
1721
+ r2_target: float, optional
1722
+ Target r^2 value for solver. Solver will continue to add line
1723
+ segments until the resulting r^2 value is greater than this.
1724
+ max_cruise_rocd: float, optional
1725
+ The maximum ROCD for a segment that will be forced to a slope of
1726
+ zero, [:math:`ft s^{-1}`]
1727
+ sg_window: int, optional
1728
+ Parameter for :func:`scipy.signal.savgol_filter`
1729
+ sg_polyorder: int, optional
1730
+ Parameter for :func:`scipy.signal.savgol_filter`
1731
+
1732
+ Returns
1733
+ -------
1734
+ npt.NDArray[np.float_]
1735
+ Smoothed flight altitudes
1736
+ """
1737
+ try:
1738
+ import pwlf
1739
+ except ModuleNotFoundError:
1740
+ raise ModuleNotFoundError(
1741
+ "The 'fit_altitude' function requires the 'pwlf' package."
1742
+ "This can be installed with 'pip install pwlf'."
1743
+ )
1744
+ for i in range(1, max_segments):
1745
+ m2 = pwlf.PiecewiseLinFit(elapsed_time, altitude_ft)
1746
+ r = m2.fitfast(i, pop)
1747
+ r2 = m2.r_squared()
1748
+ if r2 > r2_target:
1749
+ break
1750
+
1751
+ mask = abs(m2.slopes) < max_cruise_rocd / 60.0
1752
+ bounds = r[:-1][mask], r[1:][mask]
1753
+ lvl = np.round(m2.intercepts[mask], -3)
1754
+ time_stack = np.repeat(elapsed_time[:, np.newaxis], lvl.size, axis=1)
1755
+ filt = (time_stack >= bounds[0]) & (time_stack <= bounds[1])
1756
+ altitude_ft = np.copy(altitude_ft)
1757
+ for i in range(lvl.size):
1758
+ altitude_ft[filt[:, i]] = lvl[i]
1759
+
1760
+ altitude_ft = scipy.signal.savgol_filter(altitude_ft, sg_window, sg_polyorder)
1761
+
1762
+ return altitude_ft