pycontrails 0.54.9__cp311-cp311-win_amd64.whl → 0.54.11__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

pycontrails/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.54.9'
21
- __version_tuple__ = version_tuple = (0, 54, 9)
20
+ __version__ = version = '0.54.11'
21
+ __version_tuple__ = version_tuple = (0, 54, 11)
@@ -806,19 +806,24 @@ class Flight(GeoVectorDataset):
806
806
  nominal_rocd: float = constants.nominal_rocd,
807
807
  drop: bool = True,
808
808
  keep_original_index: bool = False,
809
+ time: npt.NDArray[np.datetime64] | None = None,
809
810
  ) -> Self:
810
811
  """Resample and fill flight trajectory with geodesics and linear interpolation.
811
812
 
812
- Waypoints are resampled according to the frequency ``freq``. Values for :attr:`data`
813
- columns ``longitude``, ``latitude``, and ``altitude`` are interpolated.
813
+ Waypoints are resampled according to the frequency ``freq`` or to the times in ``time``.
814
+ Values for :attr:`data` columns ``longitude``, ``latitude``, and ``altitude``
815
+ are interpolated.
814
816
 
815
- Resampled waypoints will include all multiples of ``freq`` between the flight
816
- start and end time. For example, when resampling to a frequency of 1 minute,
817
- a flight that starts at 2020/1/1 00:00:59 and ends at 2020/1/1 00:01:01
817
+ When resampled based on ``freq``, waypoints will include all multiples of ``freq``
818
+ between the flight start and end time. For example, when resampling to a frequency of
819
+ 1 minute, a flight that starts at 2020/1/1 00:00:59 and ends at 2020/1/1 00:01:01
818
820
  will return a single waypoint at 2020/1/1 00:01:00, whereas a flight that
819
821
  starts at 2020/1/1 00:01:01 and ends at 2020/1/1 00:01:59 will return an empty
820
822
  flight.
821
823
 
824
+ When resampled based on ``time``, waypoints will include all times between the
825
+ flight start and end time.
826
+
822
827
  Parameters
823
828
  ----------
824
829
  freq : str, optional
@@ -844,6 +849,9 @@ class Flight(GeoVectorDataset):
844
849
  Keep the original index of the :class:`Flight` in addition to the new
845
850
  resampled index. Defaults to ``False``.
846
851
  .. versionadded:: 0.45.2
852
+ time : npt.NDArray[np.datetime64], optional
853
+ Times to resample to. Will override ``freq`` if provided.
854
+ .. versionadded:: 0.54.11
847
855
 
848
856
  Returns
849
857
  -------
@@ -930,10 +938,10 @@ class Flight(GeoVectorDataset):
930
938
  if shift is not None:
931
939
  df["longitude"] = (df["longitude"] - shift) % 360.0
932
940
 
933
- # STEP 5: Resample flight to freq
941
+ # STEP 5: Resample flight
934
942
  # Save altitudes to copy over - these just get rounded down in time.
935
943
  # Also get target sample indices
936
- df, t = _resample_to_freq(df, freq)
944
+ df, t = _resample_to_freq_or_time(df, freq, time)
937
945
 
938
946
  if shift is not None:
939
947
  # We need to translate back to the original chart here
@@ -2129,13 +2137,14 @@ def segment_rocd(
2129
2137
  return T_correction * out # type: ignore[return-value]
2130
2138
 
2131
2139
 
2132
- def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.DatetimeIndex]:
2140
+ def _resample_to_freq_or_time(
2141
+ df: pd.DataFrame, freq: str, time: npt.NDArray[np.datetime64] | None
2142
+ ) -> tuple[pd.DataFrame, pd.DatetimeIndex]:
2133
2143
  """Resample a DataFrame to a given frequency.
2134
2144
 
2135
- This function is used to resample a DataFrame to a given frequency. The new
2136
- index will include all the original index values and the new resampled-to-freq
2137
- index values. The "longitude" and "latitude" columns will be linearly interpolated
2138
- to the new index values.
2145
+ This function is used to resample a DataFrame to a given frequency or a specified set of times.
2146
+ The new index will include all the original index values and the new resampled index values.
2147
+ The "longitude" and "latitude" columns will be linearly interpolated to the new index values.
2139
2148
 
2140
2149
  Parameters
2141
2150
  ----------
@@ -2145,6 +2154,8 @@ def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.Dat
2145
2154
  freq : str
2146
2155
  Frequency to resample to. See :func:`pd.DataFrame.resample` for
2147
2156
  valid frequency strings.
2157
+ time : pd.DatetimeIndex | None
2158
+ Times to resample to. Overrides ``freq`` if not ``None``.
2148
2159
 
2149
2160
  Returns
2150
2161
  -------
@@ -2153,10 +2164,14 @@ def _resample_to_freq(df: pd.DataFrame, freq: str) -> tuple[pd.DataFrame, pd.Dat
2153
2164
  """
2154
2165
 
2155
2166
  # Manually create a new index that includes all the original index values
2156
- # and the resampled-to-freq index values.
2157
- t0 = df.index[0].ceil(freq)
2158
- t1 = df.index[-1]
2159
- t = pd.date_range(t0, t1, freq=freq, name="time")
2167
+ # and the resampled index values
2168
+ if time is None:
2169
+ t0 = df.index[0].ceil(freq)
2170
+ t1 = df.index[-1]
2171
+ t = pd.date_range(t0, t1, freq=freq, name="time")
2172
+ else:
2173
+ mask = (time >= df.index[0]) & (time <= df.index[-1])
2174
+ t = pd.DatetimeIndex(time[mask], name="time")
2160
2175
 
2161
2176
  concat_arr = np.concatenate([df.index, t])
2162
2177
  concat_arr = np.unique(concat_arr)
@@ -97,8 +97,8 @@ def parse_atc_plan(atc_plan: str) -> dict[str, str]:
97
97
  --------
98
98
  :func:`to_atc_plan`
99
99
  """
100
- atc_plan = atc_plan.replace("\r", "")
101
- atc_plan = atc_plan.replace("\n", "")
100
+ atc_plan = atc_plan.replace("\r", " ")
101
+ atc_plan = atc_plan.replace("\n", " ")
102
102
  atc_plan = atc_plan.upper()
103
103
  atc_plan = atc_plan.strip()
104
104
 
pycontrails/core/met.py CHANGED
@@ -522,7 +522,7 @@ class MetBase(ABC, Generic[XArrayType]):
522
522
  return self.data.__len__()
523
523
 
524
524
  @property
525
- def attrs(self) -> dict[Hashable, Any]:
525
+ def attrs(self) -> dict[str, Any]:
526
526
  """Pass through to :attr:`self.data.attrs`."""
527
527
  return self.data.attrs
528
528
 
@@ -543,7 +543,7 @@ class Model(ABC):
543
543
 
544
544
  See Also
545
545
  --------
546
- - :meth:`eval`
546
+ eval
547
547
  """
548
548
  self.source = self._get_source(source)
549
549
 
@@ -744,8 +744,8 @@ class Model(ABC):
744
744
 
745
745
  See Also
746
746
  --------
747
- - get_source_param
748
- - GeoVectorDataset.get_data_or_attr
747
+ get_source_param
748
+ pycontrails.core.vector.GeoVectorDataset.get_data_or_attr
749
749
  """
750
750
  marker = self.__marker
751
751
 
@@ -805,8 +805,8 @@ class Model(ABC):
805
805
 
806
806
  See Also
807
807
  --------
808
- - get_data_param
809
- - GeoVectorDataset.get_data_or_attr
808
+ get_data_param
809
+ pycontrails.core.vector.GeoVectorDataset.get_data_or_attr
810
810
  """
811
811
  return self.get_data_param(self.source, key, default, set_attr=set_attr)
812
812
 
@@ -1311,7 +1311,11 @@ def update_param_dict(param_dict: dict[str, Any], new_params: dict[str, Any]) ->
1311
1311
  raise KeyError(msg) from None
1312
1312
 
1313
1313
  # Convenience: convert timedelta64-like params
1314
- if isinstance(old_value, np.timedelta64) and not isinstance(value, np.timedelta64):
1314
+ if (
1315
+ isinstance(old_value, np.timedelta64)
1316
+ and not isinstance(value, np.timedelta64)
1317
+ and value is not None
1318
+ ):
1315
1319
  value = pd.to_timedelta(value).to_numpy()
1316
1320
 
1317
1321
  param_dict[param] = value
@@ -238,7 +238,7 @@ def _contours_to_polygons(
238
238
  latitude=latitude,
239
239
  precision=precision,
240
240
  buffer=buffer,
241
- i=child_i,
241
+ i=child_i, # type: ignore[arg-type]
242
242
  )
243
243
 
244
244
  candidate = shapely.Polygon(polygon.exterior, [h.exterior for h in holes])
@@ -242,7 +242,7 @@ def _empty_vector_dict(keys: Iterable[str]) -> dict[str, np.ndarray]:
242
242
  return data
243
243
 
244
244
 
245
- class VectorDataset:
245
+ class VectorDataset: # noqa: PLW1641
246
246
  """Base class to hold 1D arrays of consistent size.
247
247
 
248
248
  Parameters
@@ -304,9 +304,9 @@ class VectorDataset:
304
304
  self.data = VectorDataDict({k: v.to_numpy(copy=copy) for k, v in data.items()})
305
305
  else:
306
306
  time = _handle_time_column(time)
307
- data = {k: v.to_numpy(copy=copy) for k, v in data.items() if k != "time"}
308
- data["time"] = time.to_numpy(copy=copy)
309
- self.data = VectorDataDict(data)
307
+ data_np = {k: v.to_numpy(copy=copy) for k, v in data.items() if k != "time"}
308
+ data_np["time"] = time.to_numpy(copy=copy)
309
+ self.data = VectorDataDict(data_np)
310
310
 
311
311
  # For anything else, we assume it is a dictionary of array-like and attach it
312
312
  else:
@@ -564,7 +564,7 @@ class VectorDataset:
564
564
  _repr = f"{class_name} [{n_keys} keys x {self.size} length, {n_attrs} attributes]"
565
565
 
566
566
  keys = list(self)
567
- keys = keys[0:5] + ["..."] + keys[-1:] if len(keys) > 5 else keys
567
+ keys = [*keys[0:5], "...", *keys[-1:]] if len(keys) > 5 else keys
568
568
  _repr += f"\n\tKeys: {', '.join(keys)}"
569
569
 
570
570
  attrs = self._display_attrs()
@@ -320,7 +320,7 @@ def parse_grid(grid: float, supported: Sequence[float]) -> float:
320
320
 
321
321
 
322
322
  def round_hour(time: datetime, hour: int) -> datetime:
323
- """Round time to the nearest whole hour before input time.
323
+ """Floor time to the nearest whole hour before input time.
324
324
 
325
325
  Parameters
326
326
  ----------
@@ -337,7 +337,7 @@ def round_hour(time: datetime, hour: int) -> datetime:
337
337
  Raises
338
338
  ------
339
339
  ValueError
340
- Description
340
+ If ``hour`` isn't one of 1, 2, 3, ..., 22, 23.
341
341
  """
342
342
  if hour not in range(1, 24):
343
343
  msg = f"hour must be between [1, 23], got {hour}"
@@ -17,7 +17,6 @@ else:
17
17
 
18
18
  LOG = logging.getLogger(__name__)
19
19
 
20
- import numpy as np
21
20
  import pandas as pd
22
21
  import xarray as xr
23
22
 
@@ -120,6 +119,8 @@ class HRES(ECMWFAPI):
120
119
  }
121
120
 
122
121
  Credentials can also be provided directly ``url`` ``key``, and ``email`` keyword args.
122
+ A third option is to set the environment variables ``ECMWF_API_URL``, ``ECMWF_API_KEY``,
123
+ and ``ECMWF_API_EMAIL``.
123
124
 
124
125
  See `ecmwf-api-client <https://github.com/ecmwf/ecmwf-api-client>`_ documentation
125
126
  for more information.
@@ -131,7 +132,7 @@ class HRES(ECMWFAPI):
131
132
  Input must be a datetime-like or tuple of datetime-like
132
133
  (datetime, :class:`pandas.Timestamp`, :class:`numpy.datetime64`)
133
134
  specifying the (start, end) of the date range, inclusive.
134
- If ``forecast_time`` is unspecified, the forecast time will
135
+ If ``forecast_time`` is unspecified, the forecast reference time will
135
136
  be assumed to be the nearest synoptic hour: 00, 06, 12, 18.
136
137
  All subsequent times will be downloaded for relative to :attr:`forecast_time`.
137
138
  If None, ``paths`` must be defined and all time coordinates will be loaded from files.
@@ -150,14 +151,20 @@ class HRES(ECMWFAPI):
150
151
  Specify latitude/longitude grid spacing in data.
151
152
  Defaults to 0.25.
152
153
  stream : str, optional
153
- "oper" = atmospheric model/HRES, "enfo" = ensemble forecast.
154
- Defaults to "oper" (HRES),
154
+ - "oper" = high resolution forecast, atmospheric fields, run at hours 00Z and 12Z
155
+ - "scda" = short cut-off high resolution forecast, atmospheric fields,
156
+ run at hours 06Z and 18Z
157
+ - "enfo" = ensemble forecast, atmospheric fields, run at hours 00Z, 06Z, 12Z, and 18Z
158
+ Defaults to "oper" (HRES).
159
+ If the stream is incompatible with a provided forecast_time, a ``ValueError`` is raised.
160
+ See the `ECMWF documentation <https://confluence.ecmwf.int/display/DAC/ECMWF+open+data%3A+real-time+forecasts+from+IFS+and+AIFS>`_
161
+ for additional information.
155
162
  field_type : str, optional
156
163
  Field type can be e.g. forecast (fc), perturbed forecast (pf),
157
164
  control forecast (cf), analysis (an).
158
165
  Defaults to "fc".
159
166
  forecast_time : DatetimeLike, optional
160
- Specify forecast run by runtime.
167
+ Specify forecast reference time (the time at which the forecast was initialized).
161
168
  Defaults to None.
162
169
  cachestore : cache.CacheStore | None, optional
163
170
  Cache data store for staging data files.
@@ -230,7 +237,7 @@ class HRES(ECMWFAPI):
230
237
 
231
238
  __slots__ = ("email", "field_type", "forecast_time", "key", "server", "stream", "url")
232
239
 
233
- #: stream type, "oper" = atmospheric model/HRES, "enfo" = ensemble forecast.
240
+ #: stream type, "oper" or "scda" for atmospheric model/HRES, "enfo" for ensemble forecast.
234
241
  stream: str
235
242
 
236
243
  #: Field type, forecast ("fc"), perturbed forecast ("pf"),
@@ -251,7 +258,6 @@ class HRES(ECMWFAPI):
251
258
  variables: metsource.VariableInput,
252
259
  pressure_levels: metsource.PressureLevelInput = -1,
253
260
  paths: str | list[str] | pathlib.Path | list[pathlib.Path] | None = None,
254
- cachepath: str | list[str] | pathlib.Path | list[pathlib.Path] | None = None,
255
261
  grid: float = 0.25,
256
262
  stream: str = "oper",
257
263
  field_type: str = "fc",
@@ -276,9 +282,7 @@ class HRES(ECMWFAPI):
276
282
  self.server = ECMWFService("mars", url=url, key=key, email=email)
277
283
  self.paths = paths
278
284
 
279
- if cachestore is self.__marker:
280
- cachestore = cache.DiskCacheStore()
281
- self.cachestore = cachestore
285
+ self.cachestore = cache.DiskCacheStore() if cachestore is self.__marker else cachestore
282
286
 
283
287
  if time is None and paths is None:
284
288
  raise ValueError("Time input is required when paths is None")
@@ -291,14 +295,6 @@ class HRES(ECMWFAPI):
291
295
 
292
296
  self.grid = metsource.parse_grid(grid, [0.1, 0.25, 0.5, 1]) # lat/lon degree resolution
293
297
 
294
- # "enfo" = ensemble forecast
295
- # "oper" = atmospheric model/HRES
296
- if stream not in ("oper", "enfo"):
297
- msg = "Parameter stream must be 'oper' or 'enfo'"
298
- raise ValueError(msg)
299
-
300
- self.stream = stream
301
-
302
298
  # "fc" = forecast
303
299
  # "pf" = perturbed forecast
304
300
  # "cf" = control forecast
@@ -322,7 +318,29 @@ class HRES(ECMWFAPI):
322
318
  # round first element to the nearest 6 hour time (00, 06, 12, 18 UTC) for forecast_time
323
319
  self.forecast_time = metsource.round_hour(self.timesteps[0], 6)
324
320
 
325
- # when no forecast_time or time input, forecast_time is defined in _open_and_cache
321
+ # NOTE: when no forecast_time or time input, forecast_time is defined in _open_and_cache
322
+ # This could occur when only the paths parameter is provided
323
+
324
+ # "enfo" = ensemble forecast
325
+ # "oper" = atmospheric model/HRES for 00 and 12 model runs
326
+ # "scda" = atmospheric model/HRES for 06 and 18 model runs
327
+ available_streams = ("oper", "enfo", "scda")
328
+ if stream not in available_streams:
329
+ msg = f"Parameter stream must be one of {available_streams}"
330
+ raise ValueError(msg)
331
+
332
+ if self.forecast_time.hour in (0, 12) and stream == "scda":
333
+ raise ValueError(
334
+ f"Stream {stream} is not compatible with forecast_time {self.forecast_time}. "
335
+ "Set stream='oper' for 00 and 12 UTC forecast times."
336
+ )
337
+
338
+ if self.forecast_time.hour in (6, 18) and stream == "oper":
339
+ raise ValueError(
340
+ f"Stream {stream} is not compatible with forecast_time {self.forecast_time}. "
341
+ "Set stream='scda' for 06 and 18 UTC forecast times."
342
+ )
343
+ self.stream = stream
326
344
 
327
345
  def __repr__(self) -> str:
328
346
  base = super().__repr__()
@@ -351,16 +369,14 @@ class HRES(ECMWFAPI):
351
369
  list[tuple[pd.Timestamp, pd.Timestamp]]
352
370
  List of tuple time bounds that can be used as inputs to :class:`HRES(time=...)`
353
371
  """
354
- time_ranges = np.unique(
355
- [pd.Timestamp(t.year, t.month, t.day, 12 * (t.hour // 12)) for t in timesteps]
356
- )
372
+ time_ranges = sorted({t.floor("12h") for t in timesteps})
357
373
 
358
374
  if len(time_ranges) == 1:
359
- time_ranges = [(timesteps[0], timesteps[-1])]
360
- else:
361
- time_ranges[0] = (timesteps[0], time_ranges[1] - pd.Timedelta(hours=1))
362
- time_ranges[1:-1] = [(t, t + pd.Timedelta(hours=11)) for t in time_ranges[1:-1]]
363
- time_ranges[-1] = (time_ranges[-1], timesteps[-1])
375
+ return [(timesteps[0], timesteps[-1])]
376
+
377
+ time_ranges[0] = (timesteps[0], time_ranges[1] - pd.Timedelta(hours=1))
378
+ time_ranges[1:-1] = [(t, t + pd.Timedelta(hours=11)) for t in time_ranges[1:-1]]
379
+ time_ranges[-1] = (time_ranges[-1], timesteps[-1])
364
380
 
365
381
  return time_ranges
366
382
 
@@ -642,7 +658,7 @@ class HRES(ECMWFAPI):
642
658
 
643
659
  @override
644
660
  def set_metadata(self, ds: xr.Dataset | MetDataset) -> None:
645
- if self.stream == "oper":
661
+ if self.stream in ("oper", "scda"):
646
662
  product = "forecast"
647
663
  elif self.stream == "enfo":
648
664
  product = "ensemble"
@@ -689,8 +705,8 @@ class HRES(ECMWFAPI):
689
705
  xr_kwargs.setdefault("parallel", False)
690
706
  ds = self.open_dataset(self.paths, **xr_kwargs)
691
707
 
692
- # set forecast time if its not already defined
693
- if not getattr(self, "forecast_time", None):
708
+ # set forecast time if it's not defined (this occurs when only the paths param is provided)
709
+ if not hasattr(self, "forecast_time"):
694
710
  self.forecast_time = ds["time"].values.astype("datetime64[s]").tolist() # type: ignore[assignment]
695
711
 
696
712
  # check that forecast_time is correct if defined
@@ -66,6 +66,8 @@ class HRESModelLevel(ECMWFAPI):
66
66
  }
67
67
 
68
68
  Credentials can also be provided directly in ``url``, ``key``, and ``email`` keyword args.
69
+ A third option is to set the environment variables ``ECMWF_API_URL``, ``ECMWF_API_KEY``,
70
+ and ``ECMWF_API_EMAIL``.
69
71
 
70
72
  See `ecmwf-api-client <https://github.com/ecmwf/ecmwf-api-client>`_ documentation
71
73
  for more information.
@@ -59,6 +59,22 @@ DEFAULT_CHANNELS = "C11", "C14", "C15"
59
59
  #: See `GOES ABI scan information <https://www.goes-r.gov/users/abiScanModeInfo.html>`_.
60
60
  GOES_SCAN_MODE_CHANGE = datetime.datetime(2019, 4, 2, 16)
61
61
 
62
+ #: The date at which GOES-19 data started being available. This is used to
63
+ #: determine the source (GOES-16 or GOES-19) of requested. In particular,
64
+ #: Mesoscale images are only available for GOES-East from GOES-19 after this date.
65
+ #: See the `NOAA press release <https://www.noaa.gov/news-release/noaas-goes-19-satellite-now-operational-providing-critical-new-data-to-forecasters>`_.
66
+ GOES_16_19_SWITCH_DATE = datetime.datetime(2025, 4, 4)
67
+
68
+ #: The GCS bucket for GOES-East data before ``GOES_16_19_SWITCH_DATE``.
69
+ GOES_16_BUCKET = "gcp-public-data-goes-16"
70
+
71
+ #: The GCS bucket for GOES-West data. Note that GOES-17 has degraded data quality
72
+ #: and is not recommended for use. This bucket isn't used by the ``GOES`` handler by default.
73
+ GOES_18_BUCKET = "gcp-public-data-goes-18"
74
+
75
+ #: The GCS bucket for GOES-East data after ``GOES_16_19_SWITCH_DATE``.
76
+ GOES_19_BUCKET = "gcp-public-data-goes-19"
77
+
62
78
 
63
79
  class GOESRegion(enum.Enum):
64
80
  """GOES Region of interest.
@@ -187,7 +203,7 @@ def gcs_goes_path(
187
203
  time: datetime.datetime,
188
204
  region: GOESRegion,
189
205
  channels: str | Iterable[str] | None = None,
190
- bucket: str = "gcp-public-data-goes-16",
206
+ bucket: str | None = None,
191
207
  fs: gcsfs.GCSFileSystem | None = None,
192
208
  ) -> list[str]:
193
209
  """Return GCS paths to GOES data at the given time for the given region and channels.
@@ -208,6 +224,12 @@ def gcs_goes_path(
208
224
  set ``channels=("C11", "C14", "C15")``. For the true color scheme,
209
225
  set ``channels=("C01", "C02", "C03")``. By default, the channels
210
226
  required by the SEVIRI ash color scheme are used.
227
+ bucket : str | None
228
+ GCS bucket for GOES data. If None, the bucket is automatically
229
+ set to ``GOES_16_BUCKET`` if ``time`` is before
230
+ ``GOES_16_19_SWITCH_DATE`` and ``GOES_19_BUCKET`` otherwise.
231
+ fs : gcsfs.GCSFileSystem | None
232
+ GCS file system instance. If None, a default anonymous instance is created.
211
233
 
212
234
  Returns
213
235
  -------
@@ -236,6 +258,11 @@ def gcs_goes_path(
236
258
  >>> pprint(paths)
237
259
  ['gcp-public-data-goes-16/ABI-L2-CMIPM/2023/093/02/OR_ABI-L2-CMIPM1-M6C01_G16_s20230930211249_e20230930211309_c20230930211386.nc']
238
260
 
261
+ >>> t = datetime.datetime(2025, 5, 4, 3, 2)
262
+ >>> paths = gcs_goes_path(t, GOESRegion.M2, channels="C01")
263
+ >>> pprint(paths)
264
+ ['gcp-public-data-goes-19/ABI-L2-CMIPM/2025/124/03/OR_ABI-L2-CMIPM2-M6C01_G19_s20251240302557_e20251240303014_c20251240303092.nc']
265
+
239
266
  """
240
267
  time = _check_time_resolution(time, region)
241
268
  year = time.strftime("%Y")
@@ -247,7 +274,10 @@ def gcs_goes_path(
247
274
  product_name = "CMIP" # Cloud and Moisture Imagery
248
275
  product = f"{sensor}-{level}-{product_name}{region.name[0]}"
249
276
 
250
- bucket = bucket.removeprefix("gs://")
277
+ if bucket is None:
278
+ bucket = GOES_16_BUCKET if time < GOES_16_19_SWITCH_DATE else GOES_19_BUCKET
279
+ else:
280
+ bucket = bucket.removeprefix("gs://")
251
281
 
252
282
  path_prefix = f"gs://{bucket}/{product}/{year}/{yday}/{hour}/"
253
283
 
@@ -267,7 +297,13 @@ def gcs_goes_path(
267
297
  time_str = f"{time_str[:-1]}6"
268
298
 
269
299
  name_prefix = f"OR_{product[:-1]}{region.name}-{mode}"
270
- name_suffix = f"_G16_s{time_str}*"
300
+
301
+ try:
302
+ satellite_number = int(bucket[-2:]) # 16 or 18 or 19 -- this may fail for custom buckets
303
+ except (ValueError, IndexError) as exc:
304
+ msg = f"Bucket name {bucket} does not end with a valid satellite number."
305
+ raise ValueError(msg) from exc
306
+ name_suffix = f"_G{satellite_number}_s{time_str}*"
271
307
 
272
308
  channels = _parse_channels(channels)
273
309
 
@@ -323,8 +359,12 @@ class GOES:
323
359
  cachestore : cache.CacheStore | None
324
360
  Cache store for GOES data. If None, data is downloaded directly into
325
361
  memory. By default, a :class:`cache.DiskCacheStore` is used.
326
- goes_bucket : str = "gcp-public-data-goes-16"
327
- GCP bucket for GOES data. AWS access is not supported.
362
+ goes_bucket : str | None = None
363
+ GCP bucket for GOES data. If None, the bucket is automatically
364
+ set to ``GOES_16_BUCKET`` if the requested time is before
365
+ ``GOES_16_19_SWITCH_DATE`` and ``GOES_19_BUCKET`` otherwise.
366
+ The satellite number used for filename construction is derived from the
367
+ last two characters of this bucket name.
328
368
 
329
369
  See Also
330
370
  --------
@@ -396,7 +436,7 @@ class GOES:
396
436
  region: GOESRegion | str = GOESRegion.F,
397
437
  channels: str | Iterable[str] | None = None,
398
438
  cachestore: cache.CacheStore | None = __marker, # type: ignore[assignment]
399
- goes_bucket: str = "gcp-public-data-goes-16",
439
+ goes_bucket: str | None = None,
400
440
  ) -> None:
401
441
  self.region = _parse_region(region)
402
442
  self.channels = _parse_channels(channels)
@@ -413,7 +453,10 @@ class GOES:
413
453
 
414
454
  def __repr__(self) -> str:
415
455
  """Return string representation."""
416
- return f"GOES(region='{self.region}', channels={sorted(self.channels)})"
456
+ return (
457
+ f"GOES(region={self.region}, channels={sorted(self.channels)}, "
458
+ f"goes_bucket={self.goes_bucket})"
459
+ )
417
460
 
418
461
  def gcs_goes_path(self, time: datetime.datetime, channels: set[str] | None = None) -> list[str]:
419
462
  """Return GCS paths to GOES data at given time.
@@ -435,7 +478,7 @@ class GOES:
435
478
  List of GCS paths to GOES data.
436
479
  """
437
480
  channels = channels or self.channels
438
- return gcs_goes_path(time, self.region, channels, self.goes_bucket)
481
+ return gcs_goes_path(time, self.region, channels, bucket=self.goes_bucket, fs=self.fs)
439
482
 
440
483
  def _lpaths(self, time: datetime.datetime) -> dict[str, str]:
441
484
  """Construct names for local netcdf files using the :attr:`cachestore`.
@@ -448,7 +491,11 @@ class GOES:
448
491
 
449
492
  out = {}
450
493
  for c in self.channels:
451
- name = f"{self.region.name}_{t_str}_{c}.nc"
494
+ if self.goes_bucket:
495
+ name = f"{self.goes_bucket}_{self.region.name}_{t_str}_{c}.nc"
496
+ else:
497
+ name = f"{self.region.name}_{t_str}_{c}.nc"
498
+
452
499
  lpath = self.cachestore.path(name)
453
500
  out[c] = lpath
454
501
 
@@ -22,7 +22,6 @@ from pycontrails.core.met_var import (
22
22
  from pycontrails.core.models import Model, ModelParams
23
23
  from pycontrails.core.vector import GeoVectorDataset
24
24
  from pycontrails.datalib import ecmwf
25
- from pycontrails.utils import dependencies
26
25
 
27
26
 
28
27
  def wide_body_jets() -> set[str]:
@@ -224,12 +223,11 @@ class ACCF(Model):
224
223
  try:
225
224
  from climaccf.accf import GeTaCCFs
226
225
  except ModuleNotFoundError as e:
227
- dependencies.raise_module_not_found_error(
228
- name="ACCF.eval method",
229
- package_name="climaccf",
230
- module_not_found_error=e,
231
- pycontrails_optional_package="accf",
226
+ msg = (
227
+ "ACCF.eval method requires the 'climaccf' package. This can be installed "
228
+ "with 'pip install git+https://github.com/dlr-pa/climaccf.git'."
232
229
  )
230
+ raise ModuleNotFoundError(msg) from e
233
231
 
234
232
  self.update_params(params)
235
233
  self.set_source(source)
@@ -2138,7 +2138,8 @@ def compare_cocip_with_goes(
2138
2138
  File path of saved CoCiP-GOES image if ``path_write_img`` is provided.
2139
2139
  """
2140
2140
 
2141
- from pycontrails.datalib.goes import GOES, extract_goes_visualization
2141
+ # We'll get a nice error message if dependencies are not installed
2142
+ from pycontrails.datalib import goes
2142
2143
 
2143
2144
  try:
2144
2145
  import cartopy.crs as ccrs
@@ -2213,9 +2214,8 @@ def compare_cocip_with_goes(
2213
2214
  _contrail = _contrail.filter(is_in_domain)
2214
2215
 
2215
2216
  # Download GOES image at `time`
2216
- goes = GOES(region=region)
2217
- da = goes.get(time)
2218
- rgb, transform, extent = extract_goes_visualization(da)
2217
+ da = goes.GOES(region=region).get(time)
2218
+ rgb, transform, extent = goes.extract_goes_visualization(da)
2219
2219
  bbox = spatial_bbox[0], spatial_bbox[2], spatial_bbox[1], spatial_bbox[3]
2220
2220
 
2221
2221
  # Calculate optimal figure dimensions
@@ -2198,11 +2198,11 @@ def result_merge_source(
2198
2198
  """Merge ``results`` and ``verbose_dict`` onto ``source``."""
2199
2199
 
2200
2200
  # Initialize the main output arrays to all zeros
2201
- dtype = result["age"].dtype if result else "timedelta64[ns]"
2202
- contrail_age = np.zeros(source.size, dtype=dtype)
2201
+ age_dtype = result["age"].dtype if result else "timedelta64[ns]"
2202
+ contrail_age = np.zeros(source.size, dtype=age_dtype)
2203
2203
 
2204
- dtype = result["ef"].dtype if result else np.float32
2205
- ef_per_m = np.zeros(source.size, dtype=dtype)
2204
+ ef_dtype = result["ef"].dtype if result else np.float32
2205
+ ef_per_m = np.zeros(source.size, dtype=ef_dtype)
2206
2206
 
2207
2207
  # If there are results, merge them in
2208
2208
  if result:
@@ -37,8 +37,14 @@ class DryAdvectionParams(models.AdvectionBuffers):
37
37
  #: are interpolated against met data once each ``dt_integration``.
38
38
  dt_integration: np.timedelta64 = np.timedelta64(30, "m")
39
39
 
40
- #: Max age of plume evolution.
41
- max_age: np.timedelta64 = np.timedelta64(20, "h")
40
+ #: Max age of plume evolution. If set to ``None``, ``timesteps`` must not be None
41
+ #: and advection will continue until the final timestep for all plumes.
42
+ max_age: np.timedelta64 | None = np.timedelta64(20, "h")
43
+
44
+ #: Advection timesteps. If provided, ``dt_integration`` will be ignored.
45
+ #:
46
+ #: .. versionadded:: 0.54.11
47
+ timesteps: npt.NDArray[np.datetime64] | None = None
42
48
 
43
49
  #: Rate of change of pressure due to sedimentation [:math:`Pa/s`]
44
50
  sedimentation_rate: float = 0.0
@@ -147,6 +153,13 @@ class DryAdvection(models.Model):
147
153
  Advected points.
148
154
  """
149
155
  self.update_params(params)
156
+
157
+ max_age = self.params["max_age"]
158
+ timesteps = self.params["timesteps"]
159
+ if max_age is None and timesteps is None:
160
+ msg = "Timesteps must be set using the timesteps parameter when max_age is None"
161
+ raise ValueError(msg)
162
+
150
163
  self.set_source(source)
151
164
  self.source = self.require_source_type(GeoVectorDataset)
152
165
  self.downselect_met()
@@ -159,24 +172,33 @@ class DryAdvection(models.Model):
159
172
  interp_kwargs = self.interp_kwargs
160
173
 
161
174
  dt_integration = self.params["dt_integration"]
162
- max_age = self.params["max_age"]
163
175
  sedimentation_rate = self.params["sedimentation_rate"]
164
176
  dz_m = self.params["dz_m"]
165
177
  max_depth = self.params["max_depth"]
166
178
  verbose_outputs = self.params["verbose_outputs"]
167
-
168
179
  source_time = self.source["time"]
169
- t0 = pd.Timestamp(source_time.min()).floor(pd.Timedelta(dt_integration)).to_numpy()
170
- t1 = source_time.max()
171
- timesteps = np.arange(t0 + dt_integration, t1 + dt_integration + max_age, dt_integration)
180
+
181
+ if timesteps is None:
182
+ t0 = pd.Timestamp(source_time.min()).floor(pd.Timedelta(dt_integration)).to_numpy()
183
+ t1 = source_time.max()
184
+ timesteps = np.arange(
185
+ t0 + dt_integration, t1 + dt_integration + max_age, dt_integration
186
+ )
172
187
 
173
188
  vector2 = GeoVectorDataset()
174
189
  met = None
175
190
 
176
191
  evolved = []
192
+ tmin = source_time.min()
177
193
  for t in timesteps:
178
- filt = (source_time < t) & (source_time >= t - dt_integration)
194
+ filt = (source_time < t) & (source_time >= tmin)
195
+ tmin = t
196
+
179
197
  vector1 = vector2 + self.source.filter(filt, copy=False)
198
+ if vector1.size == 0:
199
+ vector2 = GeoVectorDataset()
200
+ continue
201
+ evolved.append(vector1) # NOTE: vector1 is mutated below (geometry and weather added)
180
202
 
181
203
  t0 = vector1["time"].min()
182
204
  t1 = vector1["time"].max()
@@ -192,9 +214,10 @@ class DryAdvection(models.Model):
192
214
  verbose_outputs=verbose_outputs,
193
215
  **interp_kwargs,
194
216
  )
195
- evolved.append(vector1)
196
217
 
197
- filt = (vector2["age"] <= max_age) & vector2.coords_intersect_met(self.met)
218
+ filt = vector2.coords_intersect_met(self.met)
219
+ if max_age is not None:
220
+ filt &= vector2["age"] <= max_age
198
221
  vector2 = vector2.filter(filt)
199
222
 
200
223
  if not vector2 and np.all(source_time < t):
@@ -217,6 +240,8 @@ class DryAdvection(models.Model):
217
240
  - ``age``: Age of plume.
218
241
  - ``waypoint``: Identifier for each waypoint.
219
242
 
243
+ If ``flight_id`` is present in :attr:`source`, it is retained.
244
+
220
245
  If `"azimuth"` is present in :attr:`source`, `source.attrs`, or :attr:`params`,
221
246
  the following variables will also be added:
222
247
 
@@ -236,6 +261,9 @@ class DryAdvection(models.Model):
236
261
  self.source.setdefault("waypoint", np.arange(self.source.size))
237
262
 
238
263
  columns = ["longitude", "latitude", "level", "time", "age", "waypoint"]
264
+ if "flight_id" in self.source:
265
+ columns.append("flight_id")
266
+
239
267
  azimuth = self.get_source_param("azimuth", set_attr=False)
240
268
  if azimuth is None:
241
269
  # Early exit for pointwise only simulation
@@ -280,7 +308,14 @@ class DryAdvection(models.Model):
280
308
  f"{coord}_buffer": self.params[f"met_{coord}_buffer"]
281
309
  for coord in ("longitude", "latitude", "level")
282
310
  }
283
- buffers["time_buffer"] = (np.timedelta64(0, "ns"), self.params["max_age"])
311
+
312
+ max_age = self.params["max_age"]
313
+ if max_age is None:
314
+ max_age = max(
315
+ np.timedelta64(0), self.params["timesteps"].max() - self.source["time"].max()
316
+ )
317
+ buffers["time_buffer"] = (np.timedelta64(0, "ns"), max_age)
318
+
284
319
  self.met = self.source.downselect_met(self.met, **buffers)
285
320
 
286
321
 
@@ -541,6 +576,10 @@ def _evolve_one_step(
541
576
  }
542
577
  )
543
578
 
579
+ flight_id = vector.get("flight_id")
580
+ if flight_id is not None:
581
+ out["flight_id"] = flight_id
582
+
544
583
  azimuth = vector.get("azimuth")
545
584
  if azimuth is None:
546
585
  # Early exit for "pointwise only" simulation
@@ -107,7 +107,7 @@ def m_to_T_isa(h: ArrayScalarLike) -> ArrayScalarLike:
107
107
 
108
108
 
109
109
  def _low_altitude_m_to_pl(h: npt.NDArray[np.floating]) -> npt.NDArray[np.floating]:
110
- T_isa: np.ndarray = m_to_T_isa(h)
110
+ T_isa = m_to_T_isa(h)
111
111
  power_term = -constants.g / (constants.T_lapse_rate * constants.R_d)
112
112
  return (constants.p_surface * (T_isa / constants.T_msl) ** power_term) / 100.0
113
113
 
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pycontrails
3
- Version: 0.54.9
3
+ Version: 0.54.11
4
4
  Summary: Python library for modeling aviation climate impacts
5
5
  Author-email: "Contrails.org" <py@contrails.org>
6
- License: Apache-2.0
6
+ License-Expression: Apache-2.0
7
7
  Project-URL: Changelog, https://py.contrails.org/changelog.html
8
8
  Project-URL: Documentation, https://py.contrails.org
9
9
  Project-URL: Issues, https://github.com/contrailcirrus/pycontrails/issues
@@ -11,7 +11,6 @@ Project-URL: Repository, https://github.com/contrailcirrus/pycontrails
11
11
  Keywords: contrails,climate,aviation,geospatial
12
12
  Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Intended Audience :: Science/Research
14
- Classifier: License :: OSI Approved :: Apache Software License
15
14
  Classifier: Operating System :: OS Independent
16
15
  Classifier: Programming Language :: Python :: 3
17
16
  Classifier: Programming Language :: Python :: 3.10
@@ -36,7 +35,6 @@ Requires-Dist: xarray>=2022.3
36
35
  Provides-Extra: complete
37
36
  Requires-Dist: pycontrails[ecmwf,gcp,gfs,jupyter,pyproj,sat,vis,zarr]; extra == "complete"
38
37
  Provides-Extra: dev
39
- Requires-Dist: dep_license; extra == "dev"
40
38
  Requires-Dist: fastparquet>=0.8; extra == "dev"
41
39
  Requires-Dist: ipdb>=0.13; extra == "dev"
42
40
  Requires-Dist: memory_profiler; extra == "dev"
@@ -1,5 +1,5 @@
1
1
  pycontrails/__init__.py,sha256=mKNmGUS5wW1n1PukeaOkmLwQVN24i1__mk0odjBzwEE,2107
2
- pycontrails/_version.py,sha256=577cE47qIYB88v4CDza3alm_spIkYihWh6u-GZrOEyY,534
2
+ pycontrails/_version.py,sha256=3ZYGWQZfQ5wgpP3lg8ortMs3pMchQHAI8UBrkGV34P8,536
3
3
  pycontrails/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  pycontrails/core/__init__.py,sha256=kOAehIZBbvksSW3MuU2DfzsyeE4PaFnOTpYMeq2ZDPE,886
5
5
  pycontrails/core/aircraft_performance.py,sha256=dasanaqfm5eP9XUDhgoKGj-eQHTWwMZ_mN6_ZdFulT0,28911
@@ -7,31 +7,31 @@ pycontrails/core/airports.py,sha256=ELTH5P7SXs-LGxGfDPFCZnH7bs8GQ9SNbdLpLt7t6zk,
7
7
  pycontrails/core/cache.py,sha256=aWNlj8cXAcXW9oHuaGZgqxOO7YK0AtYrj5tO1pp-Wmw,29043
8
8
  pycontrails/core/coordinates.py,sha256=J5qjGuXgbLUw_U9_qREdgOaHl0ngK6Hbbjj3uw7FwNE,5565
9
9
  pycontrails/core/fleet.py,sha256=ddujPC79K975gWVk8NDskE79OZaUam8tPR9rONaT918,17192
10
- pycontrails/core/flight.py,sha256=jWGmeDDCZx6zjl5Xy-uLcKGeoIrLwcJ-nmYCSklbVLM,82824
11
- pycontrails/core/flightplan.py,sha256=WOd5lm1D5uqxYJmANiMGgx3j4Tc6mVbBfmAw6iG1NXM,7747
10
+ pycontrails/core/flight.py,sha256=AIkTPh58t-zAowcs_46YjPHV-y6_a649YODXxrkwucM,83566
11
+ pycontrails/core/flightplan.py,sha256=9_o_kGQm-yG7MibCgfDCJK8mym2BYgENrizIKl1zzgM,7749
12
12
  pycontrails/core/fuel.py,sha256=06YUDhvC8Rx6KbUXRB9qLTsJX2V7tLbzjwAfDH0R6l8,4472
13
13
  pycontrails/core/interpolation.py,sha256=-GC3T6yh3nMtt7JCawoYeCUnDNRY9GHhxhkRhhnntxE,26437
14
- pycontrails/core/met.py,sha256=a6BHxO6Thlj85uFB5F7zrZkEupStU1FL9tc_m-Ipobg,106903
14
+ pycontrails/core/met.py,sha256=NBboBTRKNt5WFSnKUdm2R_9N68EQTYii4P5A_hs79YQ,106898
15
15
  pycontrails/core/met_var.py,sha256=bFpBFpQnN6osVAuxNNhG6vG_NMThEIhDcL2B9VpXgww,12407
16
- pycontrails/core/models.py,sha256=S-smQKIyl0HWrpcOfDAigKa80OdLXmjoRCflq6JP_PY,45132
17
- pycontrails/core/polygon.py,sha256=NZ4YBhdALidXYOPsSX1cwGQ022j-AXgbWIQg7LA-f-I,18593
18
- pycontrails/core/rgi_cython.cp311-win_amd64.pyd,sha256=_RYcmOq9Z5ZQV3br6ywRA4hWuy9ToO5Vql4sMMc3c5Y,258560
19
- pycontrails/core/vector.py,sha256=sMhBfCmwmIcexIBe-aJc5sSbzbSOL1dzvbwwiiz7Rdw,75857
16
+ pycontrails/core/models.py,sha256=MRDNYVr8WMTF5EJrwZ8zxPHKKMcU09apBcqycimCWwk,45236
17
+ pycontrails/core/polygon.py,sha256=3_vYmlQoP3x3lmgwFyqQVgl9ziAQ5e160MCm2fwFou0,18619
18
+ pycontrails/core/rgi_cython.cp311-win_amd64.pyd,sha256=p93a0rBPtkZcSdiSTjzIk4sU06Vtmvm4OQtcmXbuSLE,234496
19
+ pycontrails/core/vector.py,sha256=n9b_HxQSWOUbxvut7mNaK523JlISi3-TLd5YwrmeOwM,75883
20
20
  pycontrails/datalib/__init__.py,sha256=Q2RrnjwtFzfsmJ2tEojDCzDMkd8R0MYw4mQz3YwUsqI,381
21
- pycontrails/datalib/goes.py,sha256=v8fYi6LPp-40jm2TTGDkja77FZIj-th39KNI88p5OYE,32835
21
+ pycontrails/datalib/goes.py,sha256=llpiUakz2Jo9B86rafS6yrBIDn53mqWdTojplZdp3AU,35249
22
22
  pycontrails/datalib/landsat.py,sha256=YrDpngF5HtvWFVwxN0FLFxCfZIEmeBMiifdkbH7fQTk,20263
23
23
  pycontrails/datalib/sentinel.py,sha256=ukzdSeHKC1UBWEYzehS2LqtKoCpKpaPobLfbZDGy6KU,17679
24
24
  pycontrails/datalib/_leo_utils/search.py,sha256=8JzT56ps3SH1W-5rwL8BWuxLLljwxa_5fjLAuZdL_Vg,8937
25
25
  pycontrails/datalib/_leo_utils/vis.py,sha256=0UDVcqMRqHmAORDV4Xyk-HVnTAjbOCf7KCpWm2ilTLE,1861
26
26
  pycontrails/datalib/_leo_utils/static/bq_roi_query.sql,sha256=r_gVjpoEvCcAJP56QlXaXzgfWPZdf-kYo3D316glJLU,266
27
- pycontrails/datalib/_met_utils/metsource.py,sha256=7QGqAt3FXmXnU7AfN-w6wkXzZKgpCS1WYfoTg5uhtZA,24865
27
+ pycontrails/datalib/_met_utils/metsource.py,sha256=fCObXHCKo1--v22SRayZDzWlnz0lxTMdGVNhhUyywlM,24900
28
28
  pycontrails/datalib/ecmwf/__init__.py,sha256=9EkfWlGki8LYt7ySKf87gS8RzZjAOxK2w87_Sok3CCo,2094
29
29
  pycontrails/datalib/ecmwf/arco_era5.py,sha256=PojAfT0N12SLcgiecZtHiN96sbRWwFx3PThrXIwSX5M,12782
30
30
  pycontrails/datalib/ecmwf/common.py,sha256=6fcZC_-3FvWJ3vtlZX89PiiS7-DSQhAOgxrLjwU4iW4,4138
31
31
  pycontrails/datalib/ecmwf/era5.py,sha256=TbZlOqn3fPmfvCUR1XrVBWxNgIBpSXgRx0S4M49TSeY,19506
32
32
  pycontrails/datalib/ecmwf/era5_model_level.py,sha256=NVquyG_3SzdmfoQl25Wvp4oB_pOe7K_AQOfNv7no14E,19844
33
- pycontrails/datalib/ecmwf/hres.py,sha256=k7VK1KUOopzTd0TrO5FYwFSSagArKq4q8oAyK3hNPso,29168
34
- pycontrails/datalib/ecmwf/hres_model_level.py,sha256=ghrN-z5bjV-ztv6L5KlGiCLlGR9ABbAe5k38CaARmLU,18121
33
+ pycontrails/datalib/ecmwf/hres.py,sha256=JLLxv0O8UD8STT9gnlGPkjxAMdhAXzMV1O8bnmLEs9k,30482
34
+ pycontrails/datalib/ecmwf/hres_model_level.py,sha256=EfKbCLyib2aMfeeTFuop4dutB7FiVFhX7UWW_hKtESE,18245
35
35
  pycontrails/datalib/ecmwf/ifs.py,sha256=a5QmXuihBNGx1eNN7EJGjR5dL9dO142nqkDSkPYGGlc,11048
36
36
  pycontrails/datalib/ecmwf/model_levels.py,sha256=noLSx45AHZ0rFPiUh3aK3iaEueHgsg6mG_AplHqHeU8,17431
37
37
  pycontrails/datalib/ecmwf/variables.py,sha256=49uzpkk9YV5OGBnq-Po5e3ig2JXi2i1ZtsOOEC-AQFI,10181
@@ -47,8 +47,8 @@ pycontrails/ext/cirium.py,sha256=zRPVBBWwocZKkX3XhonSBf54x7P_xnjRcA7psI0Kqnw,429
47
47
  pycontrails/ext/empirical_grid.py,sha256=mveQltokaGeQcxxbdMSLQ6wQ14oh3XX5dfzjWaFpxbk,4503
48
48
  pycontrails/ext/synthetic_flight.py,sha256=dEWm9vrg6SAWieh6GLAE0m1orTrApav8HHP42-4bIHg,17233
49
49
  pycontrails/models/__init__.py,sha256=TKhrXe1Pu1-mV1gctx8cUAMrVxCCAtBkbZi9olfWq8s,34
50
- pycontrails/models/accf.py,sha256=llpEtvEqrA0N2iefEpj8wbObHPhWkuoMpfln0wu7fBc,14026
51
- pycontrails/models/dry_advection.py,sha256=vDPjNrECefMvRVnfkhWCWbYQPpB2YYhGUvLiIuW10TM,19727
50
+ pycontrails/models/accf.py,sha256=rbEn6oTqXsgDPA3Ky0y-bADHWTxGXixa8OwpHH_pXag,13991
51
+ pycontrails/models/dry_advection.py,sha256=4oOCMhUkmHJiQDdkLJGoODw0ebE4avoMpMDoF-6L59M,21085
52
52
  pycontrails/models/issr.py,sha256=J6mh4pze31XpD2_zD9ujzYPXsZFrmSwNcRORCcLoOVI,7588
53
53
  pycontrails/models/pcc.py,sha256=7k8kICqDeZ99O2n2Zpnu7EFNGjEpPka_9cu9nrmP44s,11394
54
54
  pycontrails/models/pcr.py,sha256=G_0yR5PsCMeJBP6tZFi3M7A6Wcq8s71UvosdA7ozUkI,5502
@@ -64,14 +64,14 @@ pycontrails/models/cocip/cocip.py,sha256=zMJ-sDMO4mTAt6Qgc5uDvc-zJxZxO-Y_pbYvYD7
64
64
  pycontrails/models/cocip/cocip_params.py,sha256=pk0fimh_Wz8g8Q75BIrsOlmeH85rbpHKttjp6rkcFGY,12833
65
65
  pycontrails/models/cocip/cocip_uncertainty.py,sha256=fKQVAg-HyviegwNauxLgX9wdA0cRpK8XAOCNjZZIRWI,12528
66
66
  pycontrails/models/cocip/contrail_properties.py,sha256=JUngbMCw3SUCYkNdOWsp66J4v1kK4KtrWh9QajXwH-s,57583
67
- pycontrails/models/cocip/output_formats.py,sha256=TVQOzTcGAZUW_r8ZWTuOVUTPkY8Yb5e3P-CMu-FPopY,86218
67
+ pycontrails/models/cocip/output_formats.py,sha256=pffbcl9-7HpcJRWDCABHg7yGxjjX_-90uVyt3rIpLHc,86250
68
68
  pycontrails/models/cocip/radiative_forcing.py,sha256=qs5pEAPec0DAhqqCKJXJbRRH5hMIz4xwUQqZjCoJIyg,45910
69
69
  pycontrails/models/cocip/radiative_heating.py,sha256=PcOEkqRtQJNq7bxOoz1baBbVV2ku1UQRMrrQXXsRBwc,19504
70
70
  pycontrails/models/cocip/unterstrasser_wake_vortex.py,sha256=0TE1gK2p0b7RQjBGRGgfg7BmzmdxbJLPJ-9sdlak1gQ,19444
71
71
  pycontrails/models/cocip/wake_vortex.py,sha256=i6P1UDxde_WPP8SAliPdiaVCdeFMRxCFR7_zKaoNlno,14911
72
72
  pycontrails/models/cocip/wind_shear.py,sha256=qhmP3RJ9SEjd-qnXcgRiYis9-apKGF-1d78z6N__tq8,3988
73
73
  pycontrails/models/cocipgrid/__init__.py,sha256=OYSdZ1Htbr_IP7N_HuOAj1Pa_KLHtdEeJfXP-cN-gnU,271
74
- pycontrails/models/cocipgrid/cocip_grid.py,sha256=-wJi_iABZwo8KPwz3N2EQ5GusIYKlc12cQb_vYh9LsA,93911
74
+ pycontrails/models/cocipgrid/cocip_grid.py,sha256=2EzfbYhHrGfb6jfVuxNDW06PSvyJ2FGJqyRK_1gyH_g,93925
75
75
  pycontrails/models/cocipgrid/cocip_grid_params.py,sha256=ZpN00VEmeRYaeZhvSfVjnEjrgn6XdClf1eqJC8Ytcuw,6013
76
76
  pycontrails/models/emissions/__init__.py,sha256=phai3wH5VuUyfyVpu5vHOFI0jXSyoYSWvLTknS78xs0,499
77
77
  pycontrails/models/emissions/black_carbon.py,sha256=o8mVfDZLnNlfnvsqk8O-ljXrMn4Y_ApFuPROAQWHaQY,21294
@@ -96,7 +96,7 @@ pycontrails/physics/constants.py,sha256=_MVuhk6GxxZhj5RL_ci6IAgSe4oJyxLZwBvpMuGc
96
96
  pycontrails/physics/geo.py,sha256=WyZKLj-63yGCfjePEhiwxLp26be44VCdEiisu9tXtzE,37461
97
97
  pycontrails/physics/jet.py,sha256=nUJY-TVowCPYlhrTkEncDocoVKCeN2IAhvP-6vWP2dQ,31326
98
98
  pycontrails/physics/thermo.py,sha256=HAcg2wmNXW-vJbOF2kOXBoUyJiAosPY0nRWeM37otdY,13238
99
- pycontrails/physics/units.py,sha256=P6j9v2-29TDoy2JE_FQlcXH-2mlihVulSP1wBLqZY44,12765
99
+ pycontrails/physics/units.py,sha256=r6ncLqhFi9Roi73SfGvfjuB_jpwtsjJ39L3yxr8ndIc,12753
100
100
  pycontrails/physics/static/iata-cargo-load-factors-20250221.csv,sha256=ePGCUak5noyY63aL1a8T7EJf8sWzIpeY95-sbaYKF5w,3915
101
101
  pycontrails/physics/static/iata-passenger-load-factors-20250221.csv,sha256=sK9caPg9MvRYBHm_HVvXGA90x4j4OVIxkxFHF1HOKnA,3909
102
102
  pycontrails/utils/__init__.py,sha256=VmklFC-5I5lGFQEzuomlPk_bM6CoM9XDljfjCovG3vw,33
@@ -105,9 +105,9 @@ pycontrails/utils/iteration.py,sha256=En2YY4NiNwCNtAVO8HL6tv9byBGKs8MKSI7R8P-gZy
105
105
  pycontrails/utils/json.py,sha256=Pqashwoupuf_GfrrSfHclwug9Hg-kYQ4WNxEqay_0Rc,6083
106
106
  pycontrails/utils/temp.py,sha256=5XXqQoEfWjz1OrhoOBZD5vkkCFeuq9LpZkyhc38gIeY,1159
107
107
  pycontrails/utils/types.py,sha256=hPqUwaeRLgga69nj7LVbPojPg1k7pUSvYzFlGAiPKIM,5154
108
- pycontrails-0.54.9.dist-info/licenses/LICENSE,sha256=HVr8JnZfTaA-12BfKUQZi5hdrB3awOwLWs5X_ga5QzA,10353
109
- pycontrails-0.54.9.dist-info/licenses/NOTICE,sha256=VIhzKNYi4lQx6fpZyqiY6eMHpLuwp-_G0JQkmYYa7h0,2005
110
- pycontrails-0.54.9.dist-info/METADATA,sha256=Bm45DhhjF9mTc3c2r-aeMqGv0f5BToLFFyTHz7hoVaY,9311
111
- pycontrails-0.54.9.dist-info/WHEEL,sha256=fWq2Ny-ILPpur8yMAYhVFY_9RLasIpo77AGvi3AUunY,101
112
- pycontrails-0.54.9.dist-info/top_level.txt,sha256=Z8J1R_AiBAyCVjNw6jYLdrA68PrQqTg0t3_Yek_IZ0Q,29
113
- pycontrails-0.54.9.dist-info/RECORD,,
108
+ pycontrails-0.54.11.dist-info/licenses/LICENSE,sha256=HVr8JnZfTaA-12BfKUQZi5hdrB3awOwLWs5X_ga5QzA,10353
109
+ pycontrails-0.54.11.dist-info/licenses/NOTICE,sha256=VIhzKNYi4lQx6fpZyqiY6eMHpLuwp-_G0JQkmYYa7h0,2005
110
+ pycontrails-0.54.11.dist-info/METADATA,sha256=ATxrKUPw9Q-7nbMQ7r3CFgRfgM3wuEn7OUSUWEUO2CM,9215
111
+ pycontrails-0.54.11.dist-info/WHEEL,sha256=JLOMsP7F5qtkAkINx5UnzbFguf8CqZeraV8o04b0I8I,101
112
+ pycontrails-0.54.11.dist-info/top_level.txt,sha256=Z8J1R_AiBAyCVjNw6jYLdrA68PrQqTg0t3_Yek_IZ0Q,29
113
+ pycontrails-0.54.11.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.3.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp311-cp311-win_amd64
5
5