pycontrails 0.54.10__cp313-cp313-macosx_11_0_arm64.whl → 0.54.12__cp313-cp313-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

pycontrails/_version.py CHANGED
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.54.10'
21
- __version_tuple__ = version_tuple = (0, 54, 10)
31
+ __version__ = version = '0.54.12'
32
+ __version_tuple__ = version_tuple = (0, 54, 12)
33
+
34
+ __commit_id__ = commit_id = 'g5920a0c98'
@@ -806,19 +806,24 @@ class Flight(GeoVectorDataset):
806
806
  nominal_rocd: float = constants.nominal_rocd,
807
807
  drop: bool = True,
808
808
  keep_original_index: bool = False,
809
+ time: npt.NDArray[np.datetime64] | None = None,
809
810
  ) -> Self:
810
811
  """Resample and fill flight trajectory with geodesics and linear interpolation.
811
812
 
812
- Waypoints are resampled according to the frequency ``freq``. Values for :attr:`data`
813
- columns ``longitude``, ``latitude``, and ``altitude`` are interpolated.
813
+ Waypoints are resampled according to the frequency ``freq`` or to the times in ``time``.
814
+ Values for :attr:`data` columns ``longitude``, ``latitude``, and ``altitude``
815
+ are interpolated.
814
816
 
815
- Resampled waypoints will include all multiples of ``freq`` between the flight
816
- start and end time. For example, when resampling to a frequency of 1 minute,
817
- a flight that starts at 2020/1/1 00:00:59 and ends at 2020/1/1 00:01:01
817
+ When resampled based on ``freq``, waypoints will include all multiples of ``freq``
818
+ between the flight start and end time. For example, when resampling to a frequency of
819
+ 1 minute, a flight that starts at 2020/1/1 00:00:59 and ends at 2020/1/1 00:01:01
818
820
  will return a single waypoint at 2020/1/1 00:01:00, whereas a flight that
819
821
  starts at 2020/1/1 00:01:01 and ends at 2020/1/1 00:01:59 will return an empty
820
822
  flight.
821
823
 
824
+ When resampled based on ``time``, waypoints will include all times between the
825
+ flight start and end time.
826
+
822
827
  Parameters
823
828
  ----------
824
829
  freq : str, optional
@@ -844,6 +849,9 @@ class Flight(GeoVectorDataset):
844
849
  Keep the original index of the :class:`Flight` in addition to the new
845
850
  resampled index. Defaults to ``False``.
846
851
  .. versionadded:: 0.45.2
852
+ time : npt.NDArray[np.datetime64], optional
853
+ Times to resample to. Will override ``freq`` if provided.
854
+ .. versionadded:: 0.54.11
847
855
 
848
856
  Returns
849
857
  -------
@@ -930,10 +938,10 @@ class Flight(GeoVectorDataset):
930
938
  if shift is not None:
931
939
  df["longitude"] = (df["longitude"] - shift) % 360.0
932
940
 
933
- # STEP 5: Resample flight to freq
941
+ # STEP 5: Resample flight
934
942
  # Save altitudes to copy over - these just get rounded down in time.
935
943
  # Also get target sample indices
936
- df, t = _resample_to_freq(df, freq)
944
+ df, t = _resample_to_freq_or_time(df, freq, time)
937
945
 
938
946
  if shift is not None:
939
947
  # We need to translate back to the original chart here
@@ -2129,13 +2137,14 @@ def segment_rocd(
2129
2137
  return T_correction * out # type: ignore[return-value]
2130
2138
 
2131
2139
 
2132
- def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.DatetimeIndex]:
2140
+ def _resample_to_freq_or_time(
2141
+ df: pd.DataFrame, freq: str, time: npt.NDArray[np.datetime64] | None
2142
+ ) -> tuple[pd.DataFrame, pd.DatetimeIndex]:
2133
2143
  """Resample a DataFrame to a given frequency.
2134
2144
 
2135
- This function is used to resample a DataFrame to a given frequency. The new
2136
- index will include all the original index values and the new resampled-to-freq
2137
- index values. The "longitude" and "latitude" columns will be linearly interpolated
2138
- to the new index values.
2145
+ This function is used to resample a DataFrame to a given frequency or a specified set of times.
2146
+ The new index will include all the original index values and the new resampled index values.
2147
+ The "longitude" and "latitude" columns will be linearly interpolated to the new index values.
2139
2148
 
2140
2149
  Parameters
2141
2150
  ----------
@@ -2145,6 +2154,8 @@ def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.Dat
2145
2154
  freq : str
2146
2155
  Frequency to resample to. See :func:`pd.DataFrame.resample` for
2147
2156
  valid frequency strings.
2157
+ time : pd.DatetimeIndex | None
2158
+ Times to resample to. Overrides ``freq`` if not ``None``.
2148
2159
 
2149
2160
  Returns
2150
2161
  -------
@@ -2153,10 +2164,14 @@ def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.Dat
2153
2164
  """
2154
2165
 
2155
2166
  # Manually create a new index that includes all the original index values
2156
- # and the resampled-to-freq index values.
2157
- t0 = df.index[0].ceil(freq)
2158
- t1 = df.index[-1]
2159
- t = pd.date_range(t0, t1, freq=freq, name="time")
2167
+ # and the resampled index values
2168
+ if time is None:
2169
+ t0 = df.index[0].ceil(freq)
2170
+ t1 = df.index[-1]
2171
+ t = pd.date_range(t0, t1, freq=freq, name="time")
2172
+ else:
2173
+ mask = (time >= df.index[0]) & (time <= df.index[-1])
2174
+ t = pd.DatetimeIndex(time[mask], name="time")
2160
2175
 
2161
2176
  concat_arr = np.concatenate([df.index, t])
2162
2177
  concat_arr = np.unique(concat_arr)
@@ -97,8 +97,8 @@ def parse_atc_plan(atc_plan: str) -> dict[str, str]:
97
97
  --------
98
98
  :func:`to_atc_plan`
99
99
  """
100
- atc_plan = atc_plan.replace("\r", "")
101
- atc_plan = atc_plan.replace("\n", "")
100
+ atc_plan = atc_plan.replace("\r", " ")
101
+ atc_plan = atc_plan.replace("\n", " ")
102
102
  atc_plan = atc_plan.upper()
103
103
  atc_plan = atc_plan.strip()
104
104
 
pycontrails/core/met.py CHANGED
@@ -522,7 +522,7 @@ class MetBase(ABC, Generic[XArrayType]):
522
522
  return self.data.__len__()
523
523
 
524
524
  @property
525
- def attrs(self) -> dict[Hashable, Any]:
525
+ def attrs(self) -> dict[str, Any]:
526
526
  """Pass through to :attr:`self.data.attrs`."""
527
527
  return self.data.attrs
528
528
 
@@ -1311,7 +1311,11 @@ def update_param_dict(param_dict: dict[str, Any], new_params: dict[str, Any]) ->
1311
1311
  raise KeyError(msg) from None
1312
1312
 
1313
1313
  # Convenience: convert timedelta64-like params
1314
- if isinstance(old_value, np.timedelta64) and not isinstance(value, np.timedelta64):
1314
+ if (
1315
+ isinstance(old_value, np.timedelta64)
1316
+ and not isinstance(value, np.timedelta64)
1317
+ and value is not None
1318
+ ):
1315
1319
  value = pd.to_timedelta(value).to_numpy()
1316
1320
 
1317
1321
  param_dict[param] = value
@@ -238,7 +238,7 @@ def _contours_to_polygons(
238
238
  latitude=latitude,
239
239
  precision=precision,
240
240
  buffer=buffer,
241
- i=child_i,
241
+ i=child_i, # type: ignore[arg-type]
242
242
  )
243
243
 
244
244
  candidate = shapely.Polygon(polygon.exterior, [h.exterior for h in holes])
@@ -242,7 +242,7 @@ def _empty_vector_dict(keys: Iterable[str]) -> dict[str, np.ndarray]:
242
242
  return data
243
243
 
244
244
 
245
- class VectorDataset:
245
+ class VectorDataset: # noqa: PLW1641
246
246
  """Base class to hold 1D arrays of consistent size.
247
247
 
248
248
  Parameters
@@ -304,9 +304,9 @@ class VectorDataset:
304
304
  self.data = VectorDataDict({k: v.to_numpy(copy=copy) for k, v in data.items()})
305
305
  else:
306
306
  time = _handle_time_column(time)
307
- data = {k: v.to_numpy(copy=copy) for k, v in data.items() if k != "time"}
308
- data["time"] = time.to_numpy(copy=copy)
309
- self.data = VectorDataDict(data)
307
+ data_np = {k: v.to_numpy(copy=copy) for k, v in data.items() if k != "time"}
308
+ data_np["time"] = time.to_numpy(copy=copy)
309
+ self.data = VectorDataDict(data_np)
310
310
 
311
311
  # For anything else, we assume it is a dictionary of array-like and attach it
312
312
  else:
@@ -564,7 +564,7 @@ class VectorDataset:
564
564
  _repr = f"{class_name} [{n_keys} keys x {self.size} length, {n_attrs} attributes]"
565
565
 
566
566
  keys = list(self)
567
- keys = keys[0:5] + ["..."] + keys[-1:] if len(keys) > 5 else keys
567
+ keys = [*keys[0:5], "...", *keys[-1:]] if len(keys) > 5 else keys
568
568
  _repr += f"\n\tKeys: {', '.join(keys)}"
569
569
 
570
570
  attrs = self._display_attrs()
@@ -320,7 +320,7 @@ def parse_grid(grid: float, supported: Sequence[float]) -> float:
320
320
 
321
321
 
322
322
  def round_hour(time: datetime, hour: int) -> datetime:
323
- """Round time to the nearest whole hour before input time.
323
+ """Floor time to the nearest whole hour before input time.
324
324
 
325
325
  Parameters
326
326
  ----------
@@ -337,7 +337,7 @@ def round_hour(time: datetime, hour: int) -> datetime:
337
337
  Raises
338
338
  ------
339
339
  ValueError
340
- Description
340
+ If ``hour`` isn't one of 1, 2, 3, ..., 22, 23.
341
341
  """
342
342
  if hour not in range(1, 24):
343
343
  msg = f"hour must be between [1, 23], got {hour}"
@@ -17,7 +17,6 @@ else:
17
17
 
18
18
  LOG = logging.getLogger(__name__)
19
19
 
20
- import numpy as np
21
20
  import pandas as pd
22
21
  import xarray as xr
23
22
 
@@ -120,6 +119,8 @@ class HRES(ECMWFAPI):
120
119
  }
121
120
 
122
121
  Credentials can also be provided directly ``url`` ``key``, and ``email`` keyword args.
122
+ A third option is to set the environment variables ``ECMWF_API_URL``, ``ECMWF_API_KEY``,
123
+ and ``ECMWF_API_EMAIL``.
123
124
 
124
125
  See `ecmwf-api-client <https://github.com/ecmwf/ecmwf-api-client>`_ documentation
125
126
  for more information.
@@ -131,7 +132,7 @@ class HRES(ECMWFAPI):
131
132
  Input must be a datetime-like or tuple of datetime-like
132
133
  (datetime, :class:`pandas.Timestamp`, :class:`numpy.datetime64`)
133
134
  specifying the (start, end) of the date range, inclusive.
134
- If ``forecast_time`` is unspecified, the forecast time will
135
+ If ``forecast_time`` is unspecified, the forecast reference time will
135
136
  be assumed to be the nearest synoptic hour: 00, 06, 12, 18.
136
137
  All subsequent times will be downloaded for relative to :attr:`forecast_time`.
137
138
  If None, ``paths`` must be defined and all time coordinates will be loaded from files.
@@ -150,14 +151,21 @@ class HRES(ECMWFAPI):
150
151
  Specify latitude/longitude grid spacing in data.
151
152
  Defaults to 0.25.
152
153
  stream : str, optional
153
- "oper" = atmospheric model/HRES, "enfo" = ensemble forecast.
154
- Defaults to "oper" (HRES),
154
+ - "oper" = high resolution forecast, atmospheric fields, run at hours 00Z and 12Z
155
+ - "scda" = short cut-off high resolution forecast, atmospheric fields,
156
+ run at hours 06Z and 18Z
157
+ - "enfo" = ensemble forecast, atmospheric fields, run at hours 00Z, 06Z, 12Z, and 18Z
158
+
159
+ Defaults to "oper" (HRES).
160
+ If the stream is incompatible with a provided forecast_time, a ``ValueError`` is raised.
161
+ See the `ECMWF documentation <https://confluence.ecmwf.int/display/DAC/ECMWF+open+data%3A+real-time+forecasts+from+IFS+and+AIFS>`_
162
+ for additional information.
155
163
  field_type : str, optional
156
164
  Field type can be e.g. forecast (fc), perturbed forecast (pf),
157
165
  control forecast (cf), analysis (an).
158
166
  Defaults to "fc".
159
167
  forecast_time : DatetimeLike, optional
160
- Specify forecast run by runtime.
168
+ Specify forecast reference time (the time at which the forecast was initialized).
161
169
  Defaults to None.
162
170
  cachestore : cache.CacheStore | None, optional
163
171
  Cache data store for staging data files.
@@ -230,7 +238,7 @@ class HRES(ECMWFAPI):
230
238
 
231
239
  __slots__ = ("email", "field_type", "forecast_time", "key", "server", "stream", "url")
232
240
 
233
- #: stream type, "oper" = atmospheric model/HRES, "enfo" = ensemble forecast.
241
+ #: stream type, "oper" or "scda" for atmospheric model/HRES, "enfo" for ensemble forecast.
234
242
  stream: str
235
243
 
236
244
  #: Field type, forecast ("fc"), perturbed forecast ("pf"),
@@ -251,7 +259,6 @@ class HRES(ECMWFAPI):
251
259
  variables: metsource.VariableInput,
252
260
  pressure_levels: metsource.PressureLevelInput = -1,
253
261
  paths: str | list[str] | pathlib.Path | list[pathlib.Path] | None = None,
254
- cachepath: str | list[str] | pathlib.Path | list[pathlib.Path] | None = None,
255
262
  grid: float = 0.25,
256
263
  stream: str = "oper",
257
264
  field_type: str = "fc",
@@ -276,9 +283,7 @@ class HRES(ECMWFAPI):
276
283
  self.server = ECMWFService("mars", url=url, key=key, email=email)
277
284
  self.paths = paths
278
285
 
279
- if cachestore is self.__marker:
280
- cachestore = cache.DiskCacheStore()
281
- self.cachestore = cachestore
286
+ self.cachestore = cache.DiskCacheStore() if cachestore is self.__marker else cachestore
282
287
 
283
288
  if time is None and paths is None:
284
289
  raise ValueError("Time input is required when paths is None")
@@ -291,14 +296,6 @@ class HRES(ECMWFAPI):
291
296
 
292
297
  self.grid = metsource.parse_grid(grid, [0.1, 0.25, 0.5, 1]) # lat/lon degree resolution
293
298
 
294
- # "enfo" = ensemble forecast
295
- # "oper" = atmospheric model/HRES
296
- if stream not in ("oper", "enfo"):
297
- msg = "Parameter stream must be 'oper' or 'enfo'"
298
- raise ValueError(msg)
299
-
300
- self.stream = stream
301
-
302
299
  # "fc" = forecast
303
300
  # "pf" = perturbed forecast
304
301
  # "cf" = control forecast
@@ -322,7 +319,29 @@ class HRES(ECMWFAPI):
322
319
  # round first element to the nearest 6 hour time (00, 06, 12, 18 UTC) for forecast_time
323
320
  self.forecast_time = metsource.round_hour(self.timesteps[0], 6)
324
321
 
325
- # when no forecast_time or time input, forecast_time is defined in _open_and_cache
322
+ # NOTE: when no forecast_time or time input, forecast_time is defined in _open_and_cache
323
+ # This could occur when only the paths parameter is provided
324
+
325
+ # "enfo" = ensemble forecast
326
+ # "oper" = atmospheric model/HRES for 00 and 12 model runs
327
+ # "scda" = atmospheric model/HRES for 06 and 18 model runs
328
+ available_streams = ("oper", "enfo", "scda")
329
+ if stream not in available_streams:
330
+ msg = f"Parameter stream must be one of {available_streams}"
331
+ raise ValueError(msg)
332
+
333
+ if self.forecast_time.hour in (0, 12) and stream == "scda":
334
+ raise ValueError(
335
+ f"Stream {stream} is not compatible with forecast_time {self.forecast_time}. "
336
+ "Set stream='oper' for 00 and 12 UTC forecast times."
337
+ )
338
+
339
+ if self.forecast_time.hour in (6, 18) and stream == "oper":
340
+ raise ValueError(
341
+ f"Stream {stream} is not compatible with forecast_time {self.forecast_time}. "
342
+ "Set stream='scda' for 06 and 18 UTC forecast times."
343
+ )
344
+ self.stream = stream
326
345
 
327
346
  def __repr__(self) -> str:
328
347
  base = super().__repr__()
@@ -351,16 +370,14 @@ class HRES(ECMWFAPI):
351
370
  list[tuple[pd.Timestamp, pd.Timestamp]]
352
371
  List of tuple time bounds that can be used as inputs to :class:`HRES(time=...)`
353
372
  """
354
- time_ranges = np.unique(
355
- [pd.Timestamp(t.year, t.month, t.day, 12 * (t.hour // 12)) for t in timesteps]
356
- )
373
+ time_ranges = sorted({t.floor("12h") for t in timesteps})
357
374
 
358
375
  if len(time_ranges) == 1:
359
- time_ranges = [(timesteps[0], timesteps[-1])]
360
- else:
361
- time_ranges[0] = (timesteps[0], time_ranges[1] - pd.Timedelta(hours=1))
362
- time_ranges[1:-1] = [(t, t + pd.Timedelta(hours=11)) for t in time_ranges[1:-1]]
363
- time_ranges[-1] = (time_ranges[-1], timesteps[-1])
376
+ return [(timesteps[0], timesteps[-1])]
377
+
378
+ time_ranges[0] = (timesteps[0], time_ranges[1] - pd.Timedelta(hours=1))
379
+ time_ranges[1:-1] = [(t, t + pd.Timedelta(hours=11)) for t in time_ranges[1:-1]]
380
+ time_ranges[-1] = (time_ranges[-1], timesteps[-1])
364
381
 
365
382
  return time_ranges
366
383
 
@@ -642,7 +659,7 @@ class HRES(ECMWFAPI):
642
659
 
643
660
  @override
644
661
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
645
- if self.stream == "oper":
662
+ if self.stream in ("oper", "scda"):
646
663
  product = "forecast"
647
664
  elif self.stream == "enfo":
648
665
  product = "ensemble"
@@ -689,8 +706,8 @@ class HRES(ECMWFAPI):
689
706
  xr_kwargs.setdefault("parallel", False)
690
707
  ds = self.open_dataset(self.paths, **xr_kwargs)
691
708
 
692
- # set forecast time if its not already defined
693
- if not getattr(self, "forecast_time", None):
709
+ # set forecast time if it's not defined (this occurs when only the paths param is provided)
710
+ if not hasattr(self, "forecast_time"):
694
711
  self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
695
712
 
696
713
  # check that forecast_time is correct if defined
@@ -66,6 +66,8 @@ class HRESModelLevel(ECMWFAPI):
66
66
  }
67
67
 
68
68
  Credentials can also be provided directly in ``url``, ``key``, and ``email`` keyword args.
69
+ A third option is to set the environment variables ``ECMWF_API_URL``, ``ECMWF_API_KEY``,
70
+ and ``ECMWF_API_EMAIL``.
69
71
 
70
72
  See `ecmwf-api-client <https://github.com/ecmwf/ecmwf-api-client>`_ documentation
71
73
  for more information.
@@ -453,7 +453,10 @@ class GOES:
453
453
 
454
454
  def __repr__(self) -> str:
455
455
  """Return string representation."""
456
- return f"GOES(region='{self.region}', channels={sorted(self.channels)})"
456
+ return (
457
+ f"GOES(region={self.region}, channels={sorted(self.channels)}, "
458
+ f"goes_bucket={self.goes_bucket})"
459
+ )
457
460
 
458
461
  def gcs_goes_path(self, time: datetime.datetime, channels: set[str] | None = None) -> list[str]:
459
462
  """Return GCS paths to GOES data at given time.
@@ -488,7 +491,11 @@ class GOES:
488
491
 
489
492
  out = {}
490
493
  for c in self.channels:
491
- name = f"{self.region.name}_{t_str}_{c}.nc"
494
+ if self.goes_bucket:
495
+ name = f"{self.goes_bucket}_{self.region.name}_{t_str}_{c}.nc"
496
+ else:
497
+ name = f"{self.region.name}_{t_str}_{c}.nc"
498
+
492
499
  lpath = self.cachestore.path(name)
493
500
  out[c] = lpath
494
501
 
@@ -586,10 +593,10 @@ class GOES:
586
593
  da = da_dict.popitem()[1]
587
594
  elif "C02" in da_dict:
588
595
  da2 = da_dict.pop("C02")
589
- da1 = xr.concat(da_dict.values(), dim="band_id")
590
- da = _concat_c02(da1, da2)
596
+ da = xr.concat(da_dict.values(), dim="band_id", coords="different", compat="equals")
597
+ da = _concat_c02(da, da2)
591
598
  else:
592
- da = xr.concat(da_dict.values(), dim="band_id")
599
+ da = xr.concat(da_dict.values(), dim="band_id", coords="different", compat="equals")
593
600
 
594
601
  else:
595
602
  ds = _load_via_tempfile(data)
@@ -22,7 +22,6 @@ from pycontrails.core.met_var import (
22
22
  from pycontrails.core.models import Model, ModelParams
23
23
  from pycontrails.core.vector import GeoVectorDataset
24
24
  from pycontrails.datalib import ecmwf
25
- from pycontrails.utils import dependencies
26
25
 
27
26
 
28
27
  def wide_body_jets() -> set[str]:
@@ -224,12 +223,11 @@ class ACCF(Model):
224
223
  try:
225
224
  from climaccf.accf import GeTaCCFs
226
225
  except ModuleNotFoundError as e:
227
- dependencies.raise_module_not_found_error(
228
- name="ACCF.eval method",
229
- package_name="climaccf",
230
- module_not_found_error=e,
231
- pycontrails_optional_package="accf",
226
+ msg = (
227
+ "ACCF.eval method requires the 'climaccf' package. This can be installed "
228
+ "with 'pip install git+https://github.com/dlr-pa/climaccf.git'."
232
229
  )
230
+ raise ModuleNotFoundError(msg) from e
233
231
 
234
232
  self.update_params(params)
235
233
  self.set_source(source)
@@ -2138,7 +2138,8 @@ def compare_cocip_with_goes(
2138
2138
  File path of saved CoCiP-GOES image if ``path_write_img`` is provided.
2139
2139
  """
2140
2140
 
2141
- from pycontrails.datalib.goes import GOES, extract_goes_visualization
2141
+ # We'll get a nice error message if dependencies are not installed
2142
+ from pycontrails.datalib import goes
2142
2143
 
2143
2144
  try:
2144
2145
  import cartopy.crs as ccrs
@@ -2213,9 +2214,8 @@ def compare_cocip_with_goes(
2213
2214
  _contrail = _contrail.filter(is_in_domain)
2214
2215
 
2215
2216
  # Download GOES image at `time`
2216
- goes = GOES(region=region)
2217
- da = goes.get(time)
2218
- rgb, transform, extent = extract_goes_visualization(da)
2217
+ da = goes.GOES(region=region).get(time)
2218
+ rgb, transform, extent = goes.extract_goes_visualization(da)
2219
2219
  bbox = spatial_bbox[0], spatial_bbox[2], spatial_bbox[1], spatial_bbox[3]
2220
2220
 
2221
2221
  # Calculate optimal figure dimensions
@@ -2198,11 +2198,11 @@ def result_merge_source(
2198
2198
  """Merge ``results`` and ``verbose_dict`` onto ``source``."""
2199
2199
 
2200
2200
  # Initialize the main output arrays to all zeros
2201
- dtype = result["age"].dtype if result else "timedelta64[ns]"
2202
- contrail_age = np.zeros(source.size, dtype=dtype)
2201
+ age_dtype = result["age"].dtype if result else "timedelta64[ns]"
2202
+ contrail_age = np.zeros(source.size, dtype=age_dtype)
2203
2203
 
2204
- dtype = result["ef"].dtype if result else np.float32
2205
- ef_per_m = np.zeros(source.size, dtype=dtype)
2204
+ ef_dtype = result["ef"].dtype if result else np.float32
2205
+ ef_per_m = np.zeros(source.size, dtype=ef_dtype)
2206
2206
 
2207
2207
  # If there are results, merge them in
2208
2208
  if result:
@@ -37,8 +37,14 @@ class DryAdvectionParams(models.AdvectionBuffers):
37
37
  #: are interpolated against met data once each ``dt_integration``.
38
38
  dt_integration: np.timedelta64 = np.timedelta64(30, "m")
39
39
 
40
- #: Max age of plume evolution.
41
- max_age: np.timedelta64 = np.timedelta64(20, "h")
40
+ #: Max age of plume evolution. If set to ``None``, ``timesteps`` must not be None
41
+ #: and advection will continue until the final timestep for all plumes.
42
+ max_age: np.timedelta64 | None = np.timedelta64(20, "h")
43
+
44
+ #: Advection timesteps. If provided, ``dt_integration`` will be ignored.
45
+ #:
46
+ #: .. versionadded:: 0.54.11
47
+ timesteps: npt.NDArray[np.datetime64] | None = None
42
48
 
43
49
  #: Rate of change of pressure due to sedimentation [:math:`Pa/s`]
44
50
  sedimentation_rate: float = 0.0
@@ -147,6 +153,13 @@ class DryAdvection(models.Model):
147
153
  Advected points.
148
154
  """
149
155
  self.update_params(params)
156
+
157
+ max_age = self.params["max_age"]
158
+ timesteps = self.params["timesteps"]
159
+ if max_age is None and timesteps is None:
160
+ msg = "Timesteps must be set using the timesteps parameter when max_age is None"
161
+ raise ValueError(msg)
162
+
150
163
  self.set_source(source)
151
164
  self.source = self.require_source_type(GeoVectorDataset)
152
165
  self.downselect_met()
@@ -159,24 +172,33 @@ class DryAdvection(models.Model):
159
172
  interp_kwargs = self.interp_kwargs
160
173
 
161
174
  dt_integration = self.params["dt_integration"]
162
- max_age = self.params["max_age"]
163
175
  sedimentation_rate = self.params["sedimentation_rate"]
164
176
  dz_m = self.params["dz_m"]
165
177
  max_depth = self.params["max_depth"]
166
178
  verbose_outputs = self.params["verbose_outputs"]
167
-
168
179
  source_time = self.source["time"]
169
- t0 = pd.Timestamp(source_time.min()).floor(pd.Timedelta(dt_integration)).to_numpy()
170
- t1 = source_time.max()
171
- timesteps = np.arange(t0 + dt_integration, t1 + dt_integration + max_age, dt_integration)
180
+
181
+ if timesteps is None:
182
+ t0 = pd.Timestamp(source_time.min()).floor(pd.Timedelta(dt_integration)).to_numpy()
183
+ t1 = source_time.max()
184
+ timesteps = np.arange(
185
+ t0 + dt_integration, t1 + dt_integration + max_age, dt_integration
186
+ )
172
187
 
173
188
  vector2 = GeoVectorDataset()
174
189
  met = None
175
190
 
176
191
  evolved = []
192
+ tmin = source_time.min()
177
193
  for t in timesteps:
178
- filt = (source_time < t) & (source_time >= t - dt_integration)
194
+ filt = (source_time < t) & (source_time >= tmin)
195
+ tmin = t
196
+
179
197
  vector1 = vector2 + self.source.filter(filt, copy=False)
198
+ if vector1.size == 0:
199
+ vector2 = GeoVectorDataset()
200
+ continue
201
+ evolved.append(vector1) # NOTE: vector1 is mutated below (geometry and weather added)
180
202
 
181
203
  t0 = vector1["time"].min()
182
204
  t1 = vector1["time"].max()
@@ -192,9 +214,10 @@ class DryAdvection(models.Model):
192
214
  verbose_outputs=verbose_outputs,
193
215
  **interp_kwargs,
194
216
  )
195
- evolved.append(vector1)
196
217
 
197
- filt = (vector2["age"] <= max_age) & vector2.coords_intersect_met(self.met)
218
+ filt = vector2.coords_intersect_met(self.met)
219
+ if max_age is not None:
220
+ filt &= vector2["age"] <= max_age
198
221
  vector2 = vector2.filter(filt)
199
222
 
200
223
  if not vector2 and np.all(source_time < t):
@@ -285,7 +308,14 @@ class DryAdvection(models.Model):
285
308
  f"{coord}_buffer": self.params[f"met_{coord}_buffer"]
286
309
  for coord in ("longitude", "latitude", "level")
287
310
  }
288
- buffers["time_buffer"] = (np.timedelta64(0, "ns"), self.params["max_age"])
311
+
312
+ max_age = self.params["max_age"]
313
+ if max_age is None:
314
+ max_age = max(
315
+ np.timedelta64(0), self.params["timesteps"].max() - self.source["time"].max()
316
+ )
317
+ buffers["time_buffer"] = (np.timedelta64(0, "ns"), max_age)
318
+
289
319
  self.met = self.source.downselect_met(self.met, **buffers)
290
320
 
291
321
 
@@ -134,7 +134,7 @@ class ISSR(Model):
134
134
  if scale_humidity:
135
135
  humidity_scaling.eval(self.source, copy_source=False)
136
136
 
137
- self.source["issr"] = issr(
137
+ self.source["issr"] = issr( # type: ignore[type-var]
138
138
  air_temperature=self.source.data["air_temperature"],
139
139
  specific_humidity=self.source.data["specific_humidity"],
140
140
  air_pressure=self.source.data["air_pressure"],
pycontrails/models/pcr.py CHANGED
@@ -90,7 +90,7 @@ class PCR(Model):
90
90
  sac_model = sac.SAC(met=None, params=sac_params, copy_source=False)
91
91
  sac_model.eval(self.source)
92
92
 
93
- pcr_ = _pcr_from_issr_and_sac(self.source.data["issr"], self.source.data["sac"])
93
+ pcr_ = _pcr_from_issr_and_sac(self.source.data["issr"], self.source.data["sac"]) # type: ignore[type-var]
94
94
  self.source["pcr"] = pcr_
95
95
 
96
96
  return self.source
pycontrails/models/sac.py CHANGED
@@ -17,7 +17,7 @@ from pycontrails.core.models import Model, ModelParams
17
17
  from pycontrails.core.vector import GeoVectorDataset
18
18
  from pycontrails.models.humidity_scaling import HumidityScaling
19
19
  from pycontrails.physics import constants, thermo
20
- from pycontrails.utils.types import ArrayLike, ArrayScalarLike, apply_nan_mask_to_arraylike
20
+ from pycontrails.utils.types import ArrayLike, apply_nan_mask_to_arraylike
21
21
 
22
22
  # -----------------
23
23
  # Models as classes
@@ -133,8 +133,8 @@ class SAC(Model):
133
133
 
134
134
  G = slope_mixing_line(specific_humidity, air_pressure, engine_efficiency, ei_h2o, q_fuel)
135
135
  T_sat_liquid_ = T_sat_liquid(G)
136
- rh_crit_sac = rh_critical_sac(air_temperature, T_sat_liquid_, G)
137
- rh = thermo.rh(specific_humidity, air_temperature, air_pressure)
136
+ rh_crit_sac = rh_critical_sac(air_temperature, T_sat_liquid_, G) # type: ignore[type-var]
137
+ rh = thermo.rh(specific_humidity, air_temperature, air_pressure) # type: ignore[type-var]
138
138
  sac_ = sac(rh, rh_crit_sac)
139
139
 
140
140
  # Attaching some intermediate artifacts onto the source
@@ -239,23 +239,6 @@ def T_sat_liquid(G: ArrayLike) -> ArrayLike:
239
239
  return -46.46 - constants.absolute_zero + 9.43 * log_ + 0.72 * log_**2 # type: ignore[return-value]
240
240
 
241
241
 
242
- def _e_sat_liquid_prime(T: ArrayScalarLike) -> ArrayScalarLike:
243
- r"""Calculate derivative of :func:`thermo.e_sat_liquid`.
244
-
245
- Parameters
246
- ----------
247
- T : ArrayScalarLike
248
- Temperature, [:math:`K`].
249
-
250
- Returns
251
- -------
252
- ArrayScalarLike
253
- Derivative of :func:`thermo.e_sat_liquid`, [:math:``Pa \ K^{-1}`].
254
- """
255
- d_inside = 6096.9385 / (T**2) - 0.02711193 + 1.673952 * 1e-5 * 2 * T + 2.433502 / T
256
- return thermo.e_sat_liquid(T) * d_inside
257
-
258
-
259
242
  def T_sat_liquid_high_accuracy(
260
243
  G: ArrayLike,
261
244
  maxiter: int = 5,
@@ -293,7 +276,7 @@ def T_sat_liquid_high_accuracy(
293
276
 
294
277
  def func(T: ArrayLike) -> ArrayLike:
295
278
  """Equation (10) from Schumann 1996."""
296
- return _e_sat_liquid_prime(T) - G
279
+ return thermo.e_sat_liquid_prime(T) - G
297
280
 
298
281
  return scipy.optimize.newton(func, init_guess, maxiter=maxiter)
299
282
 
@@ -441,7 +424,7 @@ def T_critical_sac(
441
424
  return T - T_LM_filt + (e_L_of_T_LM_filt - U_filt * thermo.e_sat_liquid(T)) / G_filt
442
425
 
443
426
  def fprime(T: ArrayLike) -> ArrayLike:
444
- return 1.0 - U_filt * _e_sat_liquid_prime(T) / G_filt
427
+ return 1.0 - U_filt * thermo.e_sat_liquid_prime(T) / G_filt
445
428
 
446
429
  # This initial guess should be less than T_LM.
447
430
  # For relative_humidity away from 1, Newton's method converges quickly, and so
@@ -7,7 +7,7 @@ import numpy.typing as npt
7
7
  import xarray as xr
8
8
 
9
9
  from pycontrails.physics import constants, units
10
- from pycontrails.utils.types import ArrayLike
10
+ from pycontrails.utils.types import ArrayLike, ArrayOrFloat
11
11
 
12
12
  # ------------------
13
13
  # Spherical Geometry
@@ -373,8 +373,8 @@ def segment_length(
373
373
  def forward_azimuth(
374
374
  lons: npt.NDArray[np.floating],
375
375
  lats: npt.NDArray[np.floating],
376
- az: npt.NDArray[np.floating] | float,
377
- dist: npt.NDArray[np.floating] | float,
376
+ az: ArrayOrFloat,
377
+ dist: ArrayOrFloat,
378
378
  ) -> tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]:
379
379
  r"""Calculate coordinates along forward azimuth.
380
380
 
@@ -87,6 +87,42 @@ def p_vapor(q: ArrayScalarLike, p: ArrayScalarLike) -> ArrayScalarLike:
87
87
  return q * p * (constants.R_v / constants.R_d)
88
88
 
89
89
 
90
+ def water_vapor_partial_pressure_along_mixing_line(
91
+ specific_humidity: ArrayScalarLike,
92
+ air_pressure: ArrayScalarLike,
93
+ T_plume: ArrayScalarLike,
94
+ T_ambient: ArrayScalarLike,
95
+ G: ArrayScalarLike,
96
+ ) -> ArrayScalarLike:
97
+ """
98
+ Calculate water vapor partial pressure along mixing line.
99
+
100
+ Parameters
101
+ ----------
102
+ specific_humidity : ArrayScalarLike
103
+ Specific humidity at each waypoint, [:math:`kg_{H_{2}O} / kg_{air}`]
104
+ air_pressure : ArrayScalarLike
105
+ Pressure altitude at each waypoint, [:math:`Pa`]
106
+ T_plume : ArrayScalarLike
107
+ Plume temperature evolution along mixing line, [:math:`K`]
108
+ T_ambient : ArrayScalarLike
109
+ Ambient temperature for each waypoint, [:math:`K`]
110
+ G : ArrayScalarLike
111
+ Slope of the mixing line in a temperature-humidity diagram.
112
+
113
+ Returns
114
+ -------
115
+ ArrayScalarLike
116
+ Water vapor partial pressure along mixing line (p_mw), [:math:`Pa`]
117
+
118
+ References
119
+ ----------
120
+ Eq. (2) of Karcher et al. (2015).
121
+ """
122
+ p_wa = p_vapor(specific_humidity, air_pressure)
123
+ return p_wa + G * (T_plume - T_ambient)
124
+
125
+
90
126
  # -------------------
91
127
  # Saturation Pressure
92
128
  # -------------------
@@ -142,8 +178,8 @@ def e_sat_ice(T: ArrayScalarLike) -> ArrayScalarLike:
142
178
  )
143
179
 
144
180
 
145
- def e_sat_liquid(T: ArrayScalarLike) -> ArrayScalarLike:
146
- r"""Calculate saturation pressure of water vapor over liquid water.
181
+ def sonntag_e_sat_liquid(T: ArrayScalarLike) -> ArrayScalarLike:
182
+ """Calculate saturation pressure of water vapor over liquid water using Sonntag (1994).
147
183
 
148
184
  Parameters
149
185
  ----------
@@ -154,35 +190,99 @@ def e_sat_liquid(T: ArrayScalarLike) -> ArrayScalarLike:
154
190
  -------
155
191
  ArrayScalarLike
156
192
  Saturation pressure of water vapor over liquid water, [:math:`Pa`]
193
+ """
194
+ return 100.0 * np.exp( # type: ignore[return-value]
195
+ -6096.9385 / T + 16.635794 - 0.02711193 * T + 1.673952 * 1e-5 * T**2 + 2.433502 * np.log(T)
196
+ )
157
197
 
158
- References
198
+
199
+ def mk05_e_sat_liquid(T: ArrayScalarLike) -> ArrayScalarLike:
200
+ """Calculate saturation pressure of water vapor over liquid water using Murphy and Koop (2005).
201
+
202
+ Parameters
159
203
  ----------
160
- - :cite:`sonntag1994`
204
+ T : ArrayScalarLike
205
+ Temperature, [:math:`K`]
206
+
207
+ Returns
208
+ -------
209
+ ArrayScalarLike
210
+ Saturation pressure of water vapor over liquid water, [:math:`Pa`]
211
+
212
+ Notes
213
+ -----
214
+ Several formulations exist for the saturation vapor pressure over liquid water.
215
+
216
+ Buck (Buck Research Manual 1996)..
217
+
218
+ 6.1121 * np.exp((18.678 * (T - 273.15) / 234.5) * (T - 273.15) / (257.14 + (T - 273.15)))
219
+
220
+ Magnus Tetens (Murray, 1967)..
221
+
222
+ 6.1078 * np.exp(17.269388 * (T - 273.16) / (T - 35.86))
223
+
224
+ Guide to Meteorological Instruments and Methods of Observation (CIMO Guide) (WMO, 2008)..
225
+
226
+ 6.112 * np.exp(17.62 * (T - 273.15) / (243.12 + T - 273.15))
227
+
228
+ Sonntag (1994) (see :func:`sonntag_e_sat_liquid`) is used in older versions of CoCiP.
161
229
  """
162
- # Buck (Buck Research Manual 1996)
163
- # 6.1121 * np.exp((18.678 * (T - 273.15) / 234.5) * (T - 273.15) / (257.14 + (T - 273.15)))
164
230
 
165
- # Magnus Tetens (Murray, 1967)
166
- # 6.1078 * np.exp(17.269388 * (T - 273.16) / (T - 35.86))
231
+ return np.exp( # type: ignore[return-value]
232
+ 54.842763
233
+ - 6763.22 / T
234
+ - 4.21 * np.log(T)
235
+ + 0.000367 * T
236
+ + np.tanh(0.0415 * (T - 218.8))
237
+ * (53.878 - 1331.22 / T - 9.44523 * np.log(T) + 0.014025 * T)
238
+ )
167
239
 
168
- # Guide to Meteorological Instruments and Methods of Observation (CIMO Guide) (WMO, 2008)
169
- # 6.112 * np.exp(17.62 * (T - 273.15) / (243.12 + T - 273.15))
170
240
 
171
- # Sonntag (1994) is used in CoCiP
241
+ def sonntag_e_sat_liquid_prime(T: ArrayScalarLike) -> ArrayScalarLike:
242
+ """Calculate the derivative of :func:`sonntag_e_sat_liquid`.
172
243
 
173
- # FIXME: Presently, mypy is not aware that numpy ufuncs will return `xr.DataArray``
174
- # when xr.DataArray is passed in. This will get fixed at some point in the future
175
- # as `numpy` their typing patterns, after which the "type: ignore" comment can
176
- # get ripped out.
177
- # We could explicitly check for `xr.DataArray` then use `xr.apply_ufunc`, but
178
- # this only renders our code more boilerplate and less performant.
179
- # This comment is pasted several places in `pycontrails` -- they should all be
180
- # addressed at the same time.
181
- return 100.0 * np.exp( # type: ignore[return-value]
182
- -6096.9385 / T + 16.635794 - 0.02711193 * T + 1.673952 * 1e-5 * T**2 + 2.433502 * np.log(T)
244
+ Parameters
245
+ ----------
246
+ T : ArrayScalarLike
247
+ Temperature, [:math:`K`].
248
+
249
+ Returns
250
+ -------
251
+ ArrayScalarLike
252
+ Derivative of :func:`sonntag_e_sat_liquid`
253
+ """
254
+ d_inside = 6096.9385 / (T**2) - 0.02711193 + 1.673952 * 1e-5 * 2 * T + 2.433502 / T
255
+ return sonntag_e_sat_liquid(T) * d_inside
256
+
257
+
258
+ def mk05_e_sat_liquid_prime(T: ArrayScalarLike) -> ArrayScalarLike:
259
+ """Calculate the derivative of :func:`mk05_e_sat_liquid`.
260
+
261
+ Parameters
262
+ ----------
263
+ T : ArrayScalarLike
264
+ Temperature, [:math:`K`].
265
+
266
+ Returns
267
+ -------
268
+ ArrayScalarLike
269
+ Derivative of :func:`mk05_e_sat_liquid`
270
+ """
271
+ tanh_term = np.tanh(0.0415 * (T - 218.8))
272
+ return mk05_e_sat_liquid(T) * ( # type: ignore[return-value]
273
+ 6763.22 / T**2
274
+ - 4.21 / T
275
+ + 0.000367
276
+ + 0.0415 * (1 - tanh_term**2) * (53.878 - 1331.22 / T - 9.44523 * np.log(T) + 0.014025 * T)
277
+ + tanh_term * (1331.22 / T**2 - 9.44523 / T + 0.014025)
183
278
  )
184
279
 
185
280
 
281
+ # Set aliases. These could be swapped out or made configurable.
282
+ e_sat_liquid = mk05_e_sat_liquid
283
+ e_sat_liquid_prime = mk05_e_sat_liquid_prime
284
+
285
+
186
286
  @support_arraylike
187
287
  def _e_sat_piecewise(T: np.ndarray) -> np.ndarray:
188
288
  """Calculate `e_sat_liquid` when T is above freezing otherwise `e_sat_ice`.
@@ -107,7 +107,7 @@ def m_to_T_isa(h: ArrayScalarLike) -> ArrayScalarLike:
107
107
 
108
108
 
109
109
  def _low_altitude_m_to_pl(h: npt.NDArray[np.floating]) -> npt.NDArray[np.floating]:
110
- T_isa: np.ndarray = m_to_T_isa(h)
110
+ T_isa = m_to_T_isa(h)
111
111
  power_term = -constants.g / (constants.T_lapse_rate * constants.R_d)
112
112
  return (constants.p_surface * (T_isa / constants.T_msl) ** power_term) / 100.0
113
113
 
@@ -14,20 +14,13 @@ import pandas as pd
14
14
  import xarray as xr
15
15
 
16
16
  #: Array like (np.ndarray, xr.DataArray)
17
- ArrayLike = TypeVar("ArrayLike", np.ndarray, xr.DataArray, xr.DataArray | np.ndarray)
17
+ ArrayLike = TypeVar("ArrayLike", np.ndarray, xr.DataArray)
18
18
 
19
19
  #: Array or Float (np.ndarray, float)
20
- ArrayOrFloat = TypeVar("ArrayOrFloat", npt.NDArray[np.floating], float)
20
+ ArrayOrFloat = TypeVar("ArrayOrFloat", np.ndarray, float)
21
21
 
22
22
  #: Array like input (np.ndarray, xr.DataArray, float)
23
- ArrayScalarLike = TypeVar(
24
- "ArrayScalarLike",
25
- np.ndarray,
26
- xr.DataArray,
27
- float,
28
- np.ndarray | float,
29
- xr.DataArray | np.ndarray,
30
- )
23
+ ArrayScalarLike = TypeVar("ArrayScalarLike", np.ndarray, xr.DataArray, float)
31
24
 
32
25
  #: Datetime like input (datetime, pd.Timestamp, np.datetime64)
33
26
  DatetimeLike = TypeVar("DatetimeLike", datetime, pd.Timestamp, np.datetime64, str)
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pycontrails
3
- Version: 0.54.10
3
+ Version: 0.54.12
4
4
  Summary: Python library for modeling aviation climate impacts
5
5
  Author-email: "Contrails.org" <py@contrails.org>
6
- License: Apache-2.0
6
+ License-Expression: Apache-2.0
7
7
  Project-URL: Changelog, https://py.contrails.org/changelog.html
8
8
  Project-URL: Documentation, https://py.contrails.org
9
9
  Project-URL: Issues, https://github.com/contrailcirrus/pycontrails/issues
@@ -11,7 +11,6 @@ Project-URL: Repository, https://github.com/contrailcirrus/pycontrails
11
11
  Keywords: contrails,climate,aviation,geospatial
12
12
  Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Intended Audience :: Science/Research
14
- Classifier: License :: OSI Approved :: Apache Software License
15
14
  Classifier: Operating System :: OS Independent
16
15
  Classifier: Programming Language :: Python :: 3
17
16
  Classifier: Programming Language :: Python :: 3.10
@@ -36,7 +35,6 @@ Requires-Dist: xarray>=2022.3
36
35
  Provides-Extra: complete
37
36
  Requires-Dist: pycontrails[ecmwf,gcp,gfs,jupyter,pyproj,sat,vis,zarr]; extra == "complete"
38
37
  Provides-Extra: dev
39
- Requires-Dist: dep_license; extra == "dev"
40
38
  Requires-Dist: fastparquet>=0.8; extra == "dev"
41
39
  Requires-Dist: ipdb>=0.13; extra == "dev"
42
40
  Requires-Dist: memory_profiler; extra == "dev"
@@ -1,39 +1,39 @@
1
- pycontrails-0.54.10.dist-info/RECORD,,
2
- pycontrails-0.54.10.dist-info/WHEEL,sha256=VIOxLMMkshvS_PbEukmsYu1sg_oxjW5SBJ1JnbuDdDk,136
3
- pycontrails-0.54.10.dist-info/top_level.txt,sha256=Z8J1R_AiBAyCVjNw6jYLdrA68PrQqTg0t3_Yek_IZ0Q,29
4
- pycontrails-0.54.10.dist-info/METADATA,sha256=6_jv8g-Jc_JE5J9qHUY3iv9S2ZrggE_4zOg-7nbtJ98,9132
5
- pycontrails-0.54.10.dist-info/licenses/LICENSE,sha256=gJ-h7SFFD1mCfR6a7HILvEtodDT6Iig8bLXdgqR6ucA,10175
6
- pycontrails-0.54.10.dist-info/licenses/NOTICE,sha256=fiBPdjYibMpDzf8hqcn7TvAQ-yeK10q_Nqq24DnskYg,1962
7
- pycontrails/_version.py,sha256=XWNQstht0_G88RBCuMbHwa-c0eQNLQAijV1bVbgzzW8,515
1
+ pycontrails-0.54.12.dist-info/RECORD,,
2
+ pycontrails-0.54.12.dist-info/WHEEL,sha256=oqGJCpG61FZJmvyZ3C_0aCv-2mdfcY9e3fXvyUNmWfM,136
3
+ pycontrails-0.54.12.dist-info/top_level.txt,sha256=Z8J1R_AiBAyCVjNw6jYLdrA68PrQqTg0t3_Yek_IZ0Q,29
4
+ pycontrails-0.54.12.dist-info/METADATA,sha256=UaUC4O8LMv-iJ_7xbgDskuRMX_GjT6dTodRHeda75Zc,9037
5
+ pycontrails-0.54.12.dist-info/licenses/LICENSE,sha256=gJ-h7SFFD1mCfR6a7HILvEtodDT6Iig8bLXdgqR6ucA,10175
6
+ pycontrails-0.54.12.dist-info/licenses/NOTICE,sha256=fiBPdjYibMpDzf8hqcn7TvAQ-yeK10q_Nqq24DnskYg,1962
7
+ pycontrails/_version.py,sha256=UTr-0gLpdmG3tPVgS4kgxnF4fjydZpe2yN5_iKvXWmU,716
8
8
  pycontrails/__init__.py,sha256=9ypSB2fKZlKghTvSrjWo6OHm5qfASwiTIvlMew3Olu4,2037
9
9
  pycontrails/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- pycontrails/core/vector.py,sha256=N-3VhPaUEyFSJWjplMKFcv9GLvEqAibKn1zqJWuNZQU,73601
11
- pycontrails/core/models.py,sha256=gpG0hnXZox5caojierunxKkgFLQt-6nHRhzQZJKNrzo,43845
10
+ pycontrails/core/vector.py,sha256=X_g8lzY6plJ6oeUHigSjt9qcPv34a3m1DeK1pqocrDw,73627
11
+ pycontrails/core/models.py,sha256=3mDTqp1V5aae9akuYwbMGIUEkESKSYTjZeyu2IiMW7s,43915
12
12
  pycontrails/core/interpolation.py,sha256=wovjj3TAf3xonVxjarclpvZLyLq6N7wZQQXsI9hT3YA,25713
13
13
  pycontrails/core/fleet.py,sha256=0hi_N4R93St-7iD29SE0EnadpBEl_p9lSGtDwpWvGkk,16704
14
- pycontrails/core/flight.py,sha256=QZTGeZVnZ14UUWHSqgCSU49g_EGQZel-hzKwm_9dcFY,80653
14
+ pycontrails/core/flight.py,sha256=kQ78YdvjPZI6v2Bj_2Fr1MgNmrrtIN6j21l4fwcRW4E,81380
15
15
  pycontrails/core/fuel.py,sha256=kJZ3P1lPm1L6rdPREM55XQ-VfJ_pt35cP4sO2Nnvmjs,4332
16
- pycontrails/core/polygon.py,sha256=EmfHPj0e58whsHvR-3YvDgMWkvMFgp_BgwaoG8IZ4n0,18044
16
+ pycontrails/core/polygon.py,sha256=g7YqWzUbOHWT65XrLqLUZLrQXYcx_x1NcJ041-Cj7UY,18070
17
17
  pycontrails/core/cache.py,sha256=IIyx726zN7JzNSKV0JJDksMI9OhCLdnJShmBVStRqzI,28154
18
18
  pycontrails/core/__init__.py,sha256=p0O09HxdeXU0X5Z3zrHMlTfXa92YumT3fJ8wJBI5ido,856
19
- pycontrails/core/rgi_cython.cpython-313-darwin.so,sha256=wys6Uo9rG1wwigoPqXOHGsMsMbzmi-Y-1VhxMYP2dPQ,297728
20
- pycontrails/core/flightplan.py,sha256=xgyYLi36OlNKtIFuOHaifcDM6XMBYTyMQlXAtfd-6Js,7519
21
- pycontrails/core/met.py,sha256=4XQAJrKWBN0SZQSeBpMUnkLn87vYpn2VMiY3dQyFRIw,103992
19
+ pycontrails/core/rgi_cython.cpython-313-darwin.so,sha256=JXh8FzEFPTX7C_h0mwDPUKWVOPKyiQX5c-3fJ_uTWMc,298384
20
+ pycontrails/core/flightplan.py,sha256=0mvA3IO19Sap-7gwpmEIV35_mg6ChvajwhurvjZZt_U,7521
21
+ pycontrails/core/met.py,sha256=O9W6RaEwUsg7ZERR47Q-6fYjg13BzOZtcQdw92444xg,103987
22
22
  pycontrails/core/aircraft_performance.py,sha256=Kk_Rb61jDOWPmCQHwn2jR5vMPmB8b3aq1iTWfiUMj9U,28232
23
23
  pycontrails/core/airports.py,sha256=ubYo-WvxKPd_dUcADx6yew9Tqh1a4VJDgX7aFqLYwB8,6775
24
24
  pycontrails/core/met_var.py,sha256=lAbp3cko_rzMk_u0kq-F27sUXUxUKikUvCNycwp9ILY,12020
25
25
  pycontrails/core/coordinates.py,sha256=0ySsHtqTon7GMbuwmmxMbI92j3ueMteJZh4xxNm5zto,5391
26
- pycontrails/datalib/goes.py,sha256=4bKtu1l3IVsjKv7mLAWhStRzOVaY9Wi_cZxvL_g-V3w,34081
26
+ pycontrails/datalib/goes.py,sha256=_TB32tGWxumASOEZUQ_PwGWz8QMZPTJf8pG4jbPR-WY,34358
27
27
  pycontrails/datalib/landsat.py,sha256=r6366rEF7fOA7mT5KySCPGJplgGE5LvBw5fMqk-U1oM,19697
28
28
  pycontrails/datalib/__init__.py,sha256=hW9NWdFPC3y_2vHMteQ7GgQdop3917MkDaf5ZhU2RBY,369
29
29
  pycontrails/datalib/sentinel.py,sha256=hYSxIlQnyJHqtHWlKn73HOK_1pm-_IbGebmkHnh4UcA,17172
30
- pycontrails/datalib/_met_utils/metsource.py,sha256=omgrBrAap11G5hV8a9qS3umJVuwoX_Mca6QctRa6xn8,24116
30
+ pycontrails/datalib/_met_utils/metsource.py,sha256=B4Gd9gkfMMlXe-xc_xcNNZAJ0gOeRelvrBsFyk6tEs4,24151
31
31
  pycontrails/datalib/ecmwf/arco_era5.py,sha256=7HXQU5S02PzX9Ew2ZrDKSp0tDEG1eeVAvbP3decmm20,12437
32
32
  pycontrails/datalib/ecmwf/era5.py,sha256=4ULNdDlUN0kP6Tbp8D_-Bc12nAsLf0iNfZaDoj_AoZU,18952
33
33
  pycontrails/datalib/ecmwf/era5_model_level.py,sha256=AO7ePIGZtavx5nQSPYP4p07RNZeg3bbzmoZC7RUC4Gg,19354
34
- pycontrails/datalib/ecmwf/hres.py,sha256=9QHYxMLK7zyQEOFpbVrZfIht9WqVXnhhyOd7YKEgAe0,28381
34
+ pycontrails/datalib/ecmwf/hres.py,sha256=isRQkybVZyKxVvqpQyGQsMtePFfv_qqAMBQ98wDLYtc,29680
35
35
  pycontrails/datalib/ecmwf/variables.py,sha256=lU3BNe265XVhCXvdMwZqfkWQwtsetZxVRLSfPqHFKAE,9913
36
- pycontrails/datalib/ecmwf/hres_model_level.py,sha256=EjBDYbbPZotTsveFlEiAAWJhhPYiao1DQrLyS4kVCrA,17657
36
+ pycontrails/datalib/ecmwf/hres_model_level.py,sha256=CcxMKiFJyLvM9njmBVywAXJxyWE7atsgHXBubKJQqHM,17779
37
37
  pycontrails/datalib/ecmwf/__init__.py,sha256=wdfhplEaW2UKTItIoshTtVEjbPyfDYoprTJNxbKZuvA,2021
38
38
  pycontrails/datalib/ecmwf/common.py,sha256=qRMSzDQikGMi3uqvz-Y57e3biHPzSoVMfUwOu9iTxHc,4024
39
39
  pycontrails/datalib/ecmwf/model_levels.py,sha256=_kgpnogaS6MlfvTX9dB5ASTHFUlZuQ_DRb-VADwEa0k,16996
@@ -54,18 +54,18 @@ pycontrails/ext/empirical_grid.py,sha256=FPNQA0x4nVwBXFlbs3DgIapSrXFYhoc8b8IX0M4
54
54
  pycontrails/ext/bada.py,sha256=YlQq4nnFyWza1Am2e2ZucpaICHDuUFRTrtVzIKMzf9s,1091
55
55
  pycontrails/utils/iteration.py,sha256=q_vb39VjxRr4hqTyPYko3gK4sboJOJf_Evq6m_2DL-g,319
56
56
  pycontrails/utils/__init__.py,sha256=Gt_57sBgfliFSxx9sDpuchykFDxmM11Wg9xAeSqPcnI,32
57
- pycontrails/utils/types.py,sha256=dN2oYVNNbekqvM89Lfs0FmmhavRQGC7NgGhi_7m6UBU,4984
57
+ pycontrails/utils/types.py,sha256=1AaY1x_qGlYAl08xg6PS0MPKm3OZwFBM7xLI_nHK7EY,4869
58
58
  pycontrails/utils/temp.py,sha256=lGU0b_R8ze4yKlsOusHIIBaoNFBrmrB3vBjgHRlfcXk,1109
59
59
  pycontrails/utils/json.py,sha256=oTiO8xh603esfBGaGVmA5eUzR0NhAqNpQCegMMgnSbg,5896
60
60
  pycontrails/utils/dependencies.py,sha256=ATP45xYdUbIyGFzgbOe5SbokMytvB84TcexUEFnEUZE,2559
61
61
  pycontrails/models/pcc.py,sha256=0Qdl4u8PmUEpNYd398glTChkbTwsh83wYPt0Bmi8qd8,11068
62
62
  pycontrails/models/tau_cirrus.py,sha256=2Z4egt-QFprkyITRgtarA5alOTTQRQbjzgmSqE49_1g,5778
63
63
  pycontrails/models/__init__.py,sha256=dQTOLQb7RdUdUwslt5se__5y_ymbInBexQmNrmAeOdE,33
64
- pycontrails/models/issr.py,sha256=AYLYLHxtG8je5UG6x1zLV0ul89MJPqe5Xk0oWIyZ7b0,7378
65
- pycontrails/models/sac.py,sha256=lV1Or0AaLxuS1Zo5V8h5c1fkSKC-hKEgiFm7bmmusWw,15946
66
- pycontrails/models/accf.py,sha256=egdBa4_G3BUaoUQYWvVlTlAIWpLEuNdtCxlK3eckLOc,13599
67
- pycontrails/models/dry_advection.py,sha256=BlOQeap3rXKRhRlvhFfpOLIX3bFgYE_bJg2LlPRHIas,19424
68
- pycontrails/models/pcr.py,sha256=ZzbEuTOuDdUmmL5T3Wk3HL-O8XzX3HMnn98WcPbASaU,5348
64
+ pycontrails/models/issr.py,sha256=_qIKDgO0Owxeb0Q4WJlxcn1FJEvF3QDU-cqh2fpDsBo,7404
65
+ pycontrails/models/sac.py,sha256=8Vx5wg4-Kb8l4GK67wp7VNVpdFM4Wyux1xKuNrjZ_IQ,15516
66
+ pycontrails/models/accf.py,sha256=_tunWpw1sYW8ES8RvpdhNahXwaf4LwdHMEdXhv7-cCI,13566
67
+ pycontrails/models/dry_advection.py,sha256=8vkHesYx3rM858LrIpXCZ9xQ6GmL3tZeOhj5MJh42Q0,20483
68
+ pycontrails/models/pcr.py,sha256=Xde0aF8cMV9jTQ_uI2UvdHSLqotVUgPutb1Wgq7LtfY,5374
69
69
  pycontrails/models/emissions/__init__.py,sha256=CZB2zIkLUI3NGNmq2ddvRYjEtiboY6PWJjiEiXj_zII,478
70
70
  pycontrails/models/emissions/ffm2.py,sha256=mAvBHnp-p3hIn2fjKGq50eaMHi0jcb5hA5uXbJGeE9I,12068
71
71
  pycontrails/models/emissions/emissions.py,sha256=qt689n4MuWw4XZ0NneBTNR2RqVG3XRYafgkkdSc1yEw,47757
@@ -85,7 +85,7 @@ pycontrails/models/humidity_scaling/quantiles/era5-model-level-quantiles.pq,sha2
85
85
  pycontrails/models/cocip/radiative_forcing.py,sha256=A-k3V7Cb9tXvCpne3CsQpWIKDR9ZD4k8Jf3z6FfSkA0,44650
86
86
  pycontrails/models/cocip/wind_shear.py,sha256=m6ZlWjORfI-lI-D74Z_dIMOHnK4FDYmkb0S6vSpKTO8,3868
87
87
  pycontrails/models/cocip/cocip.py,sha256=uSorvK_AgAceTaeN8AqSiT4jqZO1lsqmewuLW2U02K4,104095
88
- pycontrails/models/cocip/output_formats.py,sha256=cvuliaxhUBRZKBGkGkVOeV4-CN7IVAeZ2tIwXqHmUKw,83948
88
+ pycontrails/models/cocip/output_formats.py,sha256=dBT5-1yJsX_T_EoVhuja8ow4u-WlJRJ-7DihCgkyl7U,83980
89
89
  pycontrails/models/cocip/__init__.py,sha256=CWrkNd6S3ZJq04pjTc2W22sVAJeJD3bJJRy_zLW8Kkc,962
90
90
  pycontrails/models/cocip/cocip_params.py,sha256=34_F7mXyJpSfek7iRhLVj6JaZeSoFmfcxx2WmmZN42Q,12534
91
91
  pycontrails/models/cocip/wake_vortex.py,sha256=YmOuv_oWJ9-fmTx9PVHr6gsXwex0qzLhvoZIJNB9rsk,14515
@@ -102,12 +102,12 @@ pycontrails/models/ps_model/static/ps-aircraft-params-20250328.csv,sha256=LUYuWo
102
102
  pycontrails/models/ps_model/static/ps-synonym-list-20250328.csv,sha256=phtrf0m-UYQ7gjoKtIIwINzftTSNd-Bwe9CPen_Gvc8,1048
103
103
  pycontrails/models/cocipgrid/cocip_grid_params.py,sha256=l4vBPrOKCJDz5Y1uMjmOGVyUcSWgfZtFWbjW968OPz8,5875
104
104
  pycontrails/models/cocipgrid/__init__.py,sha256=ar6bF_8Pusbb-myujz_q5ntFylQTNH8yiM8fxP7Zk30,262
105
- pycontrails/models/cocipgrid/cocip_grid.py,sha256=di6LDHCPqOzuTAK0xB_Re8NLLd8HK-c1sFSIW9MSKFk,91387
106
- pycontrails/physics/geo.py,sha256=5THIXgpaHBQdSYWLgtK4mV_8e1hWW9XeTsSHOShFMeA,36323
107
- pycontrails/physics/units.py,sha256=BC0e0l_pDeijqN179tXl8eX_Qpw8d17MVujBu1SV3IE,12293
105
+ pycontrails/models/cocipgrid/cocip_grid.py,sha256=OTltSP9wWNEZbi0Pcr19sDeBlbRWssmJy085X5TZ-lo,91401
106
+ pycontrails/physics/geo.py,sha256=ITK23l1A2lzjNPTFC8ZKyQH59I5Cy_TvuvM_gbALo94,36297
107
+ pycontrails/physics/units.py,sha256=p-6PzFLpVCMpvmfrhXVh3Hs-nMJw9Y1x-hvgnL9Lo9c,12281
108
108
  pycontrails/physics/constants.py,sha256=xWy7OkDOJNM6umq5dYiuzwG0aTEl5aECLxEpg3Z2SBQ,3202
109
109
  pycontrails/physics/__init__.py,sha256=_1eWbEy6evEWdfJCEkwDiSdpiDNzNWEPVqaPekHyhwU,44
110
- pycontrails/physics/thermo.py,sha256=sWGpKa12daSpqZYNgyXd8Ii5nfA_1Mm5mMbnM5GsW-E,12787
110
+ pycontrails/physics/thermo.py,sha256=v7-66PE31SJXz45MXAB9Iq9XfPg1Sn5FpOsTngLaDDI,15406
111
111
  pycontrails/physics/jet.py,sha256=Je1d3vgbBEaVIAL1WZ3C-4p2f9fy9dWOjP5vFVsGGh8,30358
112
112
  pycontrails/physics/static/iata-cargo-load-factors-20250221.csv,sha256=ixsnQk1DyGxHMo0pDy4aOoQIwgOyrGfhMRPumEwPMBc,3841
113
113
  pycontrails/physics/static/iata-passenger-load-factors-20250221.csv,sha256=Q2olRIqUpbOaavvM5ikG8m1v1YQAN3KLNHeFDPvM53Q,3835
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.7.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp313-cp313-macosx_11_0_arm64
5
5
  Generator: delocate 0.13.0