pycontrails 0.54.3__cp312-cp312-win_amd64.whl → 0.54.5__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

Files changed (62) hide show
  1. pycontrails/__init__.py +2 -2
  2. pycontrails/_version.py +2 -2
  3. pycontrails/core/__init__.py +1 -1
  4. pycontrails/core/aircraft_performance.py +58 -58
  5. pycontrails/core/cache.py +7 -7
  6. pycontrails/core/fleet.py +54 -29
  7. pycontrails/core/flight.py +218 -301
  8. pycontrails/core/interpolation.py +63 -60
  9. pycontrails/core/met.py +193 -125
  10. pycontrails/core/models.py +27 -13
  11. pycontrails/core/polygon.py +15 -15
  12. pycontrails/core/rgi_cython.cp312-win_amd64.pyd +0 -0
  13. pycontrails/core/vector.py +119 -96
  14. pycontrails/datalib/_met_utils/metsource.py +8 -5
  15. pycontrails/datalib/ecmwf/__init__.py +14 -14
  16. pycontrails/datalib/ecmwf/common.py +1 -1
  17. pycontrails/datalib/ecmwf/era5.py +7 -7
  18. pycontrails/datalib/ecmwf/hres.py +3 -3
  19. pycontrails/datalib/ecmwf/ifs.py +1 -1
  20. pycontrails/datalib/gfs/__init__.py +6 -6
  21. pycontrails/datalib/gfs/gfs.py +2 -2
  22. pycontrails/datalib/goes.py +5 -5
  23. pycontrails/ext/empirical_grid.py +1 -1
  24. pycontrails/models/apcemm/apcemm.py +5 -5
  25. pycontrails/models/apcemm/utils.py +1 -1
  26. pycontrails/models/cocip/__init__.py +2 -2
  27. pycontrails/models/cocip/cocip.py +23 -24
  28. pycontrails/models/cocip/cocip_params.py +2 -11
  29. pycontrails/models/cocip/cocip_uncertainty.py +24 -18
  30. pycontrails/models/cocip/contrail_properties.py +331 -316
  31. pycontrails/models/cocip/output_formats.py +53 -53
  32. pycontrails/models/cocip/radiative_forcing.py +135 -131
  33. pycontrails/models/cocip/radiative_heating.py +135 -135
  34. pycontrails/models/cocip/unterstrasser_wake_vortex.py +90 -87
  35. pycontrails/models/cocip/wake_vortex.py +92 -92
  36. pycontrails/models/cocip/wind_shear.py +8 -8
  37. pycontrails/models/cocipgrid/cocip_grid.py +37 -96
  38. pycontrails/models/dry_advection.py +60 -19
  39. pycontrails/models/emissions/__init__.py +2 -2
  40. pycontrails/models/emissions/black_carbon.py +108 -108
  41. pycontrails/models/emissions/emissions.py +87 -87
  42. pycontrails/models/emissions/ffm2.py +35 -35
  43. pycontrails/models/humidity_scaling/humidity_scaling.py +23 -23
  44. pycontrails/models/issr.py +2 -2
  45. pycontrails/models/ps_model/__init__.py +1 -1
  46. pycontrails/models/ps_model/ps_aircraft_params.py +8 -4
  47. pycontrails/models/ps_model/ps_grid.py +76 -66
  48. pycontrails/models/ps_model/ps_model.py +16 -16
  49. pycontrails/models/ps_model/ps_operational_limits.py +20 -18
  50. pycontrails/models/tau_cirrus.py +8 -1
  51. pycontrails/physics/geo.py +67 -67
  52. pycontrails/physics/jet.py +79 -79
  53. pycontrails/physics/units.py +14 -14
  54. pycontrails/utils/json.py +1 -2
  55. pycontrails/utils/types.py +12 -7
  56. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/METADATA +2 -2
  57. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/NOTICE +1 -1
  58. pycontrails-0.54.5.dist-info/RECORD +111 -0
  59. pycontrails-0.54.3.dist-info/RECORD +0 -111
  60. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/LICENSE +0 -0
  61. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/WHEEL +0 -0
  62. {pycontrails-0.54.3.dist-info → pycontrails-0.54.5.dist-info}/top_level.txt +0 -0
pycontrails/core/met.py CHANGED
@@ -73,6 +73,8 @@ class MetBase(ABC, Generic[XArrayType]):
73
73
  and xr.Dataset.
74
74
  """
75
75
 
76
+ __slots__ = ("cachestore", "data")
77
+
76
78
  #: DataArray or Dataset
77
79
  data: XArrayType
78
80
 
@@ -87,6 +89,22 @@ class MetBase(ABC, Generic[XArrayType]):
87
89
  "time",
88
90
  )
89
91
 
92
+ @classmethod
93
+ def _from_fastpath(cls, data: XArrayType, cachestore: CacheStore | None = None) -> Self:
94
+ """Create new instance from consistent data.
95
+
96
+ This is a low-level method that bypasses the standard constructor in certain
97
+ special cases. It is intended for internal use only.
98
+
99
+ In essence, this method skips any validation from __init__ and directly sets
100
+ ``data`` and ``attrs``. This is useful when creating a new instance from an existing
101
+ instance the data has already been validated.
102
+ """
103
+ obj = cls.__new__(cls)
104
+ obj.data = data
105
+ obj.cachestore = cachestore
106
+ return obj
107
+
90
108
  def __repr__(self) -> str:
91
109
  data = getattr(self, "data", None)
92
110
  return (
@@ -517,8 +535,7 @@ class MetBase(ABC, Generic[XArrayType]):
517
535
  """Pass through to :attr:`self.data.attrs`."""
518
536
  return self.data.attrs
519
537
 
520
- @abstractmethod
521
- def downselect(self, bbox: tuple[float, ...]) -> MetBase:
538
+ def downselect(self, bbox: tuple[float, ...]) -> Self:
522
539
  """Downselect met data within spatial bounding box.
523
540
 
524
541
  Parameters
@@ -529,12 +546,13 @@ class MetBase(ABC, Generic[XArrayType]):
529
546
  For 3D queries, list is [west, south, min-level, east, north, max-level]
530
547
  with level defined in [:math:`hPa`].
531
548
 
532
-
533
549
  Returns
534
550
  -------
535
- MetBase
551
+ Self
536
552
  Return downselected data
537
553
  """
554
+ data = downselect(self.data, bbox)
555
+ return type(self)._from_fastpath(data, cachestore=self.cachestore)
538
556
 
539
557
  @property
540
558
  def is_zarr(self) -> bool:
@@ -565,7 +583,6 @@ class MetBase(ABC, Generic[XArrayType]):
565
583
  np.timedelta64(0, "h"),
566
584
  np.timedelta64(0, "h"),
567
585
  ),
568
- copy: bool = True,
569
586
  ) -> MetDataType:
570
587
  """Downselect ``met`` to encompass a spatiotemporal region of the data.
571
588
 
@@ -576,6 +593,10 @@ class MetBase(ABC, Generic[XArrayType]):
576
593
  ``met`` input. This method is different from :meth:`downselect` which
577
594
  operates on the instance data.
578
595
 
596
+ .. versionchanged:: 0.54.5
597
+
598
+ Data is no longer copied when downselecting.
599
+
579
600
  Parameters
580
601
  ----------
581
602
  met : MetDataset | MetDataArray
@@ -600,8 +621,6 @@ class MetBase(ABC, Generic[XArrayType]):
600
621
  and ``time_buffer[1]`` on the high side.
601
622
  Units must be the same as class coordinates.
602
623
  Defaults to ``(np.timedelta64(0, "h"), np.timedelta64(0, "h"))``.
603
- copy : bool
604
- If returned object is a copy or view of the original. True by default.
605
624
 
606
625
  Returns
607
626
  -------
@@ -627,9 +646,31 @@ class MetBase(ABC, Generic[XArrayType]):
627
646
  latitude_buffer=latitude_buffer,
628
647
  level_buffer=level_buffer,
629
648
  time_buffer=time_buffer,
630
- copy=copy,
631
649
  )
632
650
 
651
+ def wrap_longitude(self) -> Self:
652
+ """Wrap longitude coordinates.
653
+
654
+ Returns
655
+ -------
656
+ Self
657
+ Copy of instance with wrapped longitude values.
658
+ Returns copy of data when longitude values are already wrapped
659
+ """
660
+ return type(self)._from_fastpath(_wrap_longitude(self.data), cachestore=self.cachestore)
661
+
662
+ def copy(self) -> Self:
663
+ """Create a shallow copy of the current class.
664
+
665
+ See :meth:`xarray.Dataset.copy` for reference.
666
+
667
+ Returns
668
+ -------
669
+ Self
670
+ Copy of the current class
671
+ """
672
+ return type(self)._from_fastpath(self.data.copy(), cachestore=self.cachestore)
673
+
633
674
 
634
675
  class MetDataset(MetBase):
635
676
  """Meteorological dataset with multiple variables.
@@ -697,6 +738,8 @@ class MetDataset(MetBase):
697
738
  223.5083
698
739
  """
699
740
 
741
+ __slots__ = ()
742
+
700
743
  data: xr.Dataset
701
744
 
702
745
  def __init__(
@@ -755,7 +798,7 @@ class MetDataset(MetBase):
755
798
  "To get items (e.g. 'time' or 'level') from underlying xr.Dataset object, "
756
799
  "use the 'data' attribute."
757
800
  ) from e
758
- return MetDataArray(da, copy=False, validate=False)
801
+ return MetDataArray._from_fastpath(da)
759
802
 
760
803
  def get(self, key: str, default_value: Any = None) -> Any:
761
804
  """Shortcut to :meth:`data.get(k, v)` method.
@@ -883,20 +926,6 @@ class MetDataset(MetBase):
883
926
  def size(self) -> int:
884
927
  return np.prod(self.shape).item()
885
928
 
886
- def copy(self) -> MetDataset:
887
- """Create a copy of the current class.
888
-
889
- Returns
890
- -------
891
- MetDataset
892
- MetDataset copy
893
- """
894
- return MetDataset(
895
- self.data,
896
- cachestore=self.cachestore,
897
- copy=True, # True by default, but being extra explicit
898
- )
899
-
900
929
  def ensure_vars(
901
930
  self,
902
931
  vars: MetVariable | str | Sequence[MetVariable | str | Sequence[MetVariable]],
@@ -1011,20 +1040,6 @@ class MetDataset(MetBase):
1011
1040
  data = _load(hash, cachestore, chunks)
1012
1041
  return cls(data)
1013
1042
 
1014
- def wrap_longitude(self) -> MetDataset:
1015
- """Wrap longitude coordinates.
1016
-
1017
- Returns
1018
- -------
1019
- MetDataset
1020
- Copy of MetDataset with wrapped longitude values.
1021
- Returns copy of current MetDataset when longitude values are already wrapped
1022
- """
1023
- return MetDataset(
1024
- _wrap_longitude(self.data),
1025
- cachestore=self.cachestore,
1026
- )
1027
-
1028
1043
  @override
1029
1044
  def broadcast_coords(self, name: str) -> xr.DataArray:
1030
1045
  da = xr.ones_like(self.data[next(iter(self.data.keys()))]) * self.data[name]
@@ -1032,11 +1047,6 @@ class MetDataset(MetBase):
1032
1047
 
1033
1048
  return da
1034
1049
 
1035
- @override
1036
- def downselect(self, bbox: tuple[float, ...]) -> MetDataset:
1037
- data = downselect(self.data, bbox)
1038
- return MetDataset(data, cachestore=self.cachestore, copy=False)
1039
-
1040
1050
  def to_vector(self, transfer_attrs: bool = True) -> vector_module.GeoVectorDataset:
1041
1051
  """Convert a :class:`MetDataset` to a :class:`GeoVectorDataset` by raveling data.
1042
1052
 
@@ -1312,9 +1322,13 @@ class MetDataset(MetBase):
1312
1322
  class MetDataArray(MetBase):
1313
1323
  """Meteorological DataArray of single variable.
1314
1324
 
1315
- Wrapper around xr.DataArray to enforce certain
1325
+ Wrapper around :class:`xarray.DataArray` to enforce certain
1316
1326
  variables and dimensions for internal usage.
1317
1327
 
1328
+ .. versionchanged:: 0.54.4
1329
+
1330
+ Remove ``validate`` parameter. Validation is now always performed.
1331
+
1318
1332
  Parameters
1319
1333
  ----------
1320
1334
  data : ArrayLike
@@ -1332,15 +1346,8 @@ class MetDataArray(MetBase):
1332
1346
  Copy `data` parameter on construction, by default `True`. If `data` is lazy-loaded
1333
1347
  via `dask`, this parameter has no effect. If `data` is already loaded into memory,
1334
1348
  a copy of the data (rather than a view) may be created if `True`.
1335
- validate : bool, optional
1336
- Confirm that the parameter `data` has correct specification. This automatically handled
1337
- in the case that `copy=True`. Validation only introduces a very small overhead.
1338
- This parameter should only be set to `False` if working with data derived from an
1339
- existing MetDataset or :class`MetDataArray`. By default `True`.
1340
1349
  name : Hashable, optional
1341
1350
  Name of the data variable. If not specified, the name will be set to "met".
1342
- **kwargs
1343
- To be removed in future versions. Passed directly to xr.DataArray constructor.
1344
1351
 
1345
1352
  Examples
1346
1353
  --------
@@ -1370,6 +1377,8 @@ class MetDataArray(MetBase):
1370
1377
  0.41884649899766946
1371
1378
  """
1372
1379
 
1380
+ __slots__ = ()
1381
+
1373
1382
  data: xr.DataArray
1374
1383
 
1375
1384
  def __init__(
@@ -1378,7 +1387,6 @@ class MetDataArray(MetBase):
1378
1387
  cachestore: CacheStore | None = None,
1379
1388
  wrap_longitude: bool = False,
1380
1389
  copy: bool = True,
1381
- validate: bool = True,
1382
1390
  name: Hashable | None = None,
1383
1391
  ) -> None:
1384
1392
  self.cachestore = cachestore
@@ -1386,16 +1394,14 @@ class MetDataArray(MetBase):
1386
1394
  if copy:
1387
1395
  self.data = data.copy()
1388
1396
  self._preprocess_dims(wrap_longitude)
1397
+ elif wrap_longitude:
1398
+ raise ValueError("Set 'copy=True' when using 'wrap_longitude=True'.")
1389
1399
  else:
1390
- if wrap_longitude:
1391
- raise ValueError("Set 'copy=True' when using 'wrap_longitude=True'.")
1392
1400
  self.data = data
1393
- if validate:
1394
- self._validate_dims()
1401
+ self._validate_dims()
1395
1402
 
1396
1403
  # Priority: name > data.name > "met"
1397
- name = name or self.data.name or "met"
1398
- self.data.name = name
1404
+ self.data.name = name or self.data.name or "met"
1399
1405
 
1400
1406
  @property
1401
1407
  def values(self) -> np.ndarray:
@@ -1453,27 +1459,6 @@ class MetDataArray(MetBase):
1453
1459
  # https://github.com/python/mypy/issues/1178
1454
1460
  return typing.cast(tuple[int, int, int, int], self.data.shape)
1455
1461
 
1456
- def copy(self) -> MetDataArray:
1457
- """Create a copy of the current class.
1458
-
1459
- Returns
1460
- -------
1461
- MetDataArray
1462
- MetDataArray copy
1463
- """
1464
- return MetDataArray(self.data, cachestore=self.cachestore, copy=True)
1465
-
1466
- def wrap_longitude(self) -> MetDataArray:
1467
- """Wrap longitude coordinates.
1468
-
1469
- Returns
1470
- -------
1471
- MetDataArray
1472
- Copy of MetDataArray with wrapped longitude values.
1473
- Returns copy of current MetDataArray when longitude values are already wrapped
1474
- """
1475
- return MetDataArray(_wrap_longitude(self.data), cachestore=self.cachestore)
1476
-
1477
1462
  @property
1478
1463
  def in_memory(self) -> bool:
1479
1464
  """Check if underlying :attr:`data` is loaded into memory.
@@ -1495,9 +1480,9 @@ class MetDataArray(MetBase):
1495
1480
  @overload
1496
1481
  def interpolate(
1497
1482
  self,
1498
- longitude: float | npt.NDArray[np.float64],
1499
- latitude: float | npt.NDArray[np.float64],
1500
- level: float | npt.NDArray[np.float64],
1483
+ longitude: float | npt.NDArray[np.floating],
1484
+ latitude: float | npt.NDArray[np.floating],
1485
+ level: float | npt.NDArray[np.floating],
1501
1486
  time: np.datetime64 | npt.NDArray[np.datetime64],
1502
1487
  *,
1503
1488
  method: str = ...,
@@ -1507,14 +1492,14 @@ class MetDataArray(MetBase):
1507
1492
  lowmem: bool = ...,
1508
1493
  indices: interpolation.RGIArtifacts | None = ...,
1509
1494
  return_indices: Literal[False] = ...,
1510
- ) -> npt.NDArray[np.float64]: ...
1495
+ ) -> npt.NDArray[np.floating]: ...
1511
1496
 
1512
1497
  @overload
1513
1498
  def interpolate(
1514
1499
  self,
1515
- longitude: float | npt.NDArray[np.float64],
1516
- latitude: float | npt.NDArray[np.float64],
1517
- level: float | npt.NDArray[np.float64],
1500
+ longitude: float | npt.NDArray[np.floating],
1501
+ latitude: float | npt.NDArray[np.floating],
1502
+ level: float | npt.NDArray[np.floating],
1518
1503
  time: np.datetime64 | npt.NDArray[np.datetime64],
1519
1504
  *,
1520
1505
  method: str = ...,
@@ -1524,13 +1509,13 @@ class MetDataArray(MetBase):
1524
1509
  lowmem: bool = ...,
1525
1510
  indices: interpolation.RGIArtifacts | None = ...,
1526
1511
  return_indices: Literal[True],
1527
- ) -> tuple[npt.NDArray[np.float64], interpolation.RGIArtifacts]: ...
1512
+ ) -> tuple[npt.NDArray[np.floating], interpolation.RGIArtifacts]: ...
1528
1513
 
1529
1514
  def interpolate(
1530
1515
  self,
1531
- longitude: float | npt.NDArray[np.float64],
1532
- latitude: float | npt.NDArray[np.float64],
1533
- level: float | npt.NDArray[np.float64],
1516
+ longitude: float | npt.NDArray[np.floating],
1517
+ latitude: float | npt.NDArray[np.floating],
1518
+ level: float | npt.NDArray[np.floating],
1534
1519
  time: np.datetime64 | npt.NDArray[np.datetime64],
1535
1520
  *,
1536
1521
  method: str = "linear",
@@ -1540,7 +1525,7 @@ class MetDataArray(MetBase):
1540
1525
  lowmem: bool = False,
1541
1526
  indices: interpolation.RGIArtifacts | None = None,
1542
1527
  return_indices: bool = False,
1543
- ) -> npt.NDArray[np.float64] | tuple[npt.NDArray[np.float64], interpolation.RGIArtifacts]:
1528
+ ) -> npt.NDArray[np.floating] | tuple[npt.NDArray[np.floating], interpolation.RGIArtifacts]:
1544
1529
  """Interpolate values over underlying DataArray.
1545
1530
 
1546
1531
  Zero dimensional coordinates are reshaped to 1D arrays.
@@ -1569,11 +1554,11 @@ class MetDataArray(MetBase):
1569
1554
 
1570
1555
  Parameters
1571
1556
  ----------
1572
- longitude : float | npt.NDArray[np.float64]
1557
+ longitude : float | npt.NDArray[np.floating]
1573
1558
  Longitude values to interpolate. Assumed to be 0 or 1 dimensional.
1574
- latitude : float | npt.NDArray[np.float64]
1559
+ latitude : float | npt.NDArray[np.floating]
1575
1560
  Latitude values to interpolate. Assumed to be 0 or 1 dimensional.
1576
- level : float | npt.NDArray[np.float64]
1561
+ level : float | npt.NDArray[np.floating]
1577
1562
  Level values to interpolate. Assumed to be 0 or 1 dimensional.
1578
1563
  time : np.datetime64 | npt.NDArray[np.datetime64]
1579
1564
  Time values to interpolate. Assumed to be 0 or 1 dimensional.
@@ -1696,18 +1681,17 @@ class MetDataArray(MetBase):
1696
1681
 
1697
1682
  def _interp_lowmem(
1698
1683
  self,
1699
- longitude: float | npt.NDArray[np.float64],
1700
- latitude: float | npt.NDArray[np.float64],
1701
- level: float | npt.NDArray[np.float64],
1684
+ longitude: float | npt.NDArray[np.floating],
1685
+ latitude: float | npt.NDArray[np.floating],
1686
+ level: float | npt.NDArray[np.floating],
1702
1687
  time: np.datetime64 | npt.NDArray[np.datetime64],
1703
1688
  *,
1704
1689
  method: str = "linear",
1705
1690
  bounds_error: bool = False,
1706
1691
  fill_value: float | np.float64 | None = np.nan,
1707
- minimize_memory: bool = False,
1708
1692
  indices: interpolation.RGIArtifacts | None = None,
1709
1693
  return_indices: bool = False,
1710
- ) -> npt.NDArray[np.float64] | tuple[npt.NDArray[np.float64], interpolation.RGIArtifacts]:
1694
+ ) -> npt.NDArray[np.floating] | tuple[npt.NDArray[np.floating], interpolation.RGIArtifacts]:
1711
1695
  """Interpolate values against underlying DataArray.
1712
1696
 
1713
1697
  This method is used by :meth:`interpolate` when ``lowmem=True``.
@@ -1762,27 +1746,37 @@ class MetDataArray(MetBase):
1762
1746
  )
1763
1747
  da.load()
1764
1748
 
1765
- tmp = interpolation.interp(
1766
- longitude=lon_sl,
1767
- latitude=lat_sl,
1768
- level=lev_sl,
1769
- time=t_sl,
1770
- da=da,
1771
- method=method,
1772
- bounds_error=bounds_error,
1773
- fill_value=fill_value,
1774
- localize=False, # would be no-op; da is localized already
1775
- indices=indices_sl,
1776
- return_indices=return_indices,
1777
- )
1778
-
1779
1749
  if return_indices:
1780
- out[mask], rgi_sl = tmp
1750
+ out[mask], rgi_sl = interpolation.interp(
1751
+ longitude=lon_sl,
1752
+ latitude=lat_sl,
1753
+ level=lev_sl,
1754
+ time=t_sl,
1755
+ da=da,
1756
+ method=method,
1757
+ bounds_error=bounds_error,
1758
+ fill_value=fill_value,
1759
+ localize=False, # would be no-op; da is localized already
1760
+ indices=indices_sl,
1761
+ return_indices=return_indices,
1762
+ )
1781
1763
  rgi_artifacts.xi_indices[:, mask] = rgi_sl.xi_indices
1782
1764
  rgi_artifacts.norm_distances[:, mask] = rgi_sl.norm_distances
1783
1765
  rgi_artifacts.out_of_bounds[mask] = rgi_sl.out_of_bounds
1784
1766
  else:
1785
- out[mask] = tmp
1767
+ out[mask] = interpolation.interp(
1768
+ longitude=lon_sl,
1769
+ latitude=lat_sl,
1770
+ level=lev_sl,
1771
+ time=t_sl,
1772
+ da=da,
1773
+ method=method,
1774
+ bounds_error=bounds_error,
1775
+ fill_value=fill_value,
1776
+ localize=False, # would be no-op; da is localized already
1777
+ indices=indices_sl,
1778
+ return_indices=return_indices,
1779
+ )
1786
1780
 
1787
1781
  if return_indices:
1788
1782
  return out, rgi_artifacts
@@ -1878,14 +1872,14 @@ class MetDataArray(MetBase):
1878
1872
  if not self.binary:
1879
1873
  raise NotImplementedError("proportion method is only implemented for binary fields")
1880
1874
 
1881
- return self.data.sum().values.item() / self.data.count().values.item()
1875
+ return self.data.sum().values.item() / self.data.count().values.item() # type: ignore[operator]
1882
1876
 
1883
- def find_edges(self) -> MetDataArray:
1877
+ def find_edges(self) -> Self:
1884
1878
  """Find edges of regions.
1885
1879
 
1886
1880
  Returns
1887
1881
  -------
1888
- MetDataArray
1882
+ Self
1889
1883
  MetDataArray with a binary field, 1 on the edge of the regions,
1890
1884
  0 outside and inside the regions.
1891
1885
 
@@ -1916,7 +1910,7 @@ class MetDataArray(MetBase):
1916
1910
  self.data.load()
1917
1911
 
1918
1912
  data = self.data.groupby("level", squeeze=False).map(_edges)
1919
- return MetDataArray(data, cachestore=self.cachestore)
1913
+ return type(self)(data, cachestore=self.cachestore)
1920
1914
 
1921
1915
  def to_polygon_feature(
1922
1916
  self,
@@ -2399,11 +2393,6 @@ class MetDataArray(MetBase):
2399
2393
 
2400
2394
  return da
2401
2395
 
2402
- @override
2403
- def downselect(self, bbox: tuple[float, ...]) -> MetDataArray:
2404
- data = downselect(self.data, bbox)
2405
- return MetDataArray(data, cachestore=self.cachestore)
2406
-
2407
2396
 
2408
2397
  def _is_wrapped(longitude: np.ndarray) -> bool:
2409
2398
  """Check if ``longitude`` covers ``[-180, 180]``."""
@@ -2592,9 +2581,9 @@ def _extract_2d_arr_and_altitude(
2592
2581
  except KeyError:
2593
2582
  altitude = None
2594
2583
  else:
2595
- altitude = round(altitude)
2584
+ altitude = round(altitude) # type: ignore[call-overload]
2596
2585
 
2597
- return arr, altitude
2586
+ return arr, altitude # type: ignore[return-value]
2598
2587
 
2599
2588
 
2600
2589
  def downselect(data: XArrayType, bbox: tuple[float, ...]) -> XArrayType:
@@ -2828,3 +2817,82 @@ def _lowmem_masks(
2828
2817
  mask = ((time >= t_met[i]) if i == istart else (time > t_met[i])) & (time <= t_met[i + 1])
2829
2818
  if np.any(mask):
2830
2819
  yield mask
2820
+
2821
+
2822
+ def maybe_downselect_mds(
2823
+ big_mds: MetDataset,
2824
+ little_mds: MetDataset | None,
2825
+ t0: np.datetime64,
2826
+ t1: np.datetime64,
2827
+ ) -> MetDataset:
2828
+ """Possibly downselect ``big_mds`` in the time domain to cover ``[t0, t1]``.
2829
+
2830
+ If possible, ``little_mds`` is recycled to avoid re-loading data.
2831
+
2832
+ This implementation assumes ``t0 <= t1``, but this is not enforced.
2833
+
2834
+ If ``little_mds`` already covers the time range, it is returned as-is.
2835
+
2836
+ If ``big_mds`` doesn't cover the time range, no error is raised.
2837
+
2838
+ Parameters
2839
+ ----------
2840
+ big_mds : MetDataset
2841
+ Larger MetDataset
2842
+ little_mds : MetDataset | None
2843
+ Smaller MetDataset. This is assumed to be a subset of ``big_mds``,
2844
+ though the implementation may work if this is not the case.
2845
+ t0, t1 : np.datetime64
2846
+ Time range to cover
2847
+
2848
+ Returns
2849
+ -------
2850
+ MetDataset
2851
+ MetDataset covering the time range ``[t0, t1]`` comprised of data from
2852
+ ``little_mds`` when possible, otherwise from ``big_mds``.
2853
+ """
2854
+ if little_mds is None:
2855
+ big_time = big_mds.indexes["time"].values
2856
+ i0 = np.searchsorted(big_time, t0, side="right").item()
2857
+ i0 = max(0, i0 - 1)
2858
+ i1 = np.searchsorted(big_time, t1, side="left").item()
2859
+ i1 = min(i1 + 1, big_time.size)
2860
+ return MetDataset._from_fastpath(big_mds.data.isel(time=slice(i0, i1)))
2861
+
2862
+ little_time = little_mds.indexes["time"].values
2863
+ if t0 >= little_time[0] and t1 <= little_time[-1]:
2864
+ return little_mds
2865
+
2866
+ big_time = big_mds.indexes["time"].values
2867
+ i0 = np.searchsorted(big_time, t0, side="right").item()
2868
+ i0 = max(0, i0 - 1)
2869
+ i1 = np.searchsorted(big_time, t1, side="left").item()
2870
+ i1 = min(i1 + 1, big_time.size)
2871
+ big_ds = big_mds.data.isel(time=slice(i0, i1))
2872
+ big_time = big_ds._indexes["time"].index.values # type: ignore[attr-defined]
2873
+
2874
+ # Select exactly the times in big_ds that are not in little_ds
2875
+ _, little_indices, big_indices = np.intersect1d(
2876
+ little_time, big_time, assume_unique=True, return_indices=True
2877
+ )
2878
+ little_ds = little_mds.data.isel(time=little_indices)
2879
+ filt = np.ones_like(big_time, dtype=bool)
2880
+ filt[big_indices] = False
2881
+ big_ds = big_ds.isel(time=filt)
2882
+
2883
+ # Manually load relevant parts of big_ds into memory before xr.concat
2884
+ # It appears that without this, xr.concat will forget the in-memory
2885
+ # arrays in little_ds
2886
+ for var, da in little_ds.items():
2887
+ if da._in_memory:
2888
+ da2 = big_ds[var]
2889
+ if not da2._in_memory:
2890
+ da2.load()
2891
+
2892
+ ds = xr.concat([little_ds, big_ds], dim="time")
2893
+ if not ds._indexes["time"].index.is_monotonic_increasing: # type: ignore[attr-defined]
2894
+ # Rarely would we enter this: t0 would have to be before the first
2895
+ # time in little_mds, and the various advection-based models generally
2896
+ # proceed forward in time.
2897
+ ds = ds.sortby("time")
2898
+ return MetDataset._from_fastpath(ds)
@@ -135,6 +135,20 @@ class ModelParams:
135
135
  return {(name := field.name): getattr(self, name) for field in fields(self)}
136
136
 
137
137
 
138
+ @dataclass
139
+ class AdvectionBuffers(ModelParams):
140
+ """Override buffers in :class:`ModelParams` for advection models."""
141
+
142
+ #: Met longitude [WGS84] buffer for evolution by advection.
143
+ met_longitude_buffer: tuple[float, float] = (10.0, 10.0)
144
+
145
+ #: Met latitude buffer [WGS84] for evolution by advection.
146
+ met_latitude_buffer: tuple[float, float] = (10.0, 10.0)
147
+
148
+ #: Met level buffer [:math:`hPa`] for evolution by advection.
149
+ met_level_buffer: tuple[float, float] = (40.0, 40.0)
150
+
151
+
138
152
  # ------
139
153
  # Models
140
154
  # ------
@@ -146,7 +160,7 @@ class Model(ABC):
146
160
  Implementing classes must implement the :meth:`eval` method
147
161
  """
148
162
 
149
- __slots__ = ("params", "met", "source")
163
+ __slots__ = ("met", "params", "source")
150
164
 
151
165
  #: Default model parameter dataclass
152
166
  default_params: type[ModelParams] = ModelParams
@@ -441,7 +455,7 @@ class Model(ABC):
441
455
  self.met = self.require_met()
442
456
 
443
457
  # Return dataset with the same coords as self.met, but empty data_vars
444
- return MetDataset(xr.Dataset(coords=self.met.data.coords))
458
+ return MetDataset._from_fastpath(xr.Dataset(coords=self.met.data.coords))
445
459
 
446
460
  copy_source = self.params["copy_source"]
447
461
 
@@ -554,7 +568,7 @@ class Model(ABC):
554
568
  }
555
569
  kwargs = {k: v for k, v in buffers.items() if v is not None}
556
570
 
557
- self.met = source.downselect_met(self.met, **kwargs, copy=False)
571
+ self.met = source.downselect_met(self.met, **kwargs)
558
572
 
559
573
  def set_source_met(
560
574
  self,
@@ -825,7 +839,7 @@ def interpolate_met(
825
839
  *,
826
840
  q_method: str | None = None,
827
841
  **interp_kwargs: Any,
828
- ) -> npt.NDArray[np.float64]:
842
+ ) -> npt.NDArray[np.floating]:
829
843
  """Interpolate ``vector`` against ``met`` gridded data.
830
844
 
831
845
  If ``vector_key`` (=``met_key`` by default) already exists,
@@ -854,7 +868,7 @@ def interpolate_met(
854
868
 
855
869
  Returns
856
870
  -------
857
- npt.NDArray[np.float64]
871
+ npt.NDArray[np.floating]
858
872
  Interpolated values.
859
873
 
860
874
  Raises
@@ -933,15 +947,15 @@ def _extract_q(met: MetDataset, met_key: str, q_method: str) -> tuple[MetDataArr
933
947
 
934
948
 
935
949
  def _prepare_q(
936
- mda: MetDataArray, level: npt.NDArray[np.float64], q_method: str, log_applied: bool
937
- ) -> tuple[MetDataArray, npt.NDArray[np.float64]]:
950
+ mda: MetDataArray, level: npt.NDArray[np.floating], q_method: str, log_applied: bool
951
+ ) -> tuple[MetDataArray, npt.NDArray[np.floating]]:
938
952
  """Prepare specific humidity for interpolation with experimental ``q_method``.
939
953
 
940
954
  Parameters
941
955
  ----------
942
956
  mda : MetDataArray
943
957
  MetDataArray of specific humidity.
944
- level : npt.NDArray[np.float64]
958
+ level : npt.NDArray[np.floating]
945
959
  Levels to interpolate to, [:math:`hPa`].
946
960
  q_method : str
947
961
  One of ``"log-q-log-p"`` or ``"cubic-spline"``.
@@ -952,7 +966,7 @@ def _prepare_q(
952
966
  -------
953
967
  mda : MetDataArray
954
968
  MetDataArray of specific humidity transformed for interpolation.
955
- level : npt.NDArray[np.float64]
969
+ level : npt.NDArray[np.floating]
956
970
  Transformed levels for interpolation.
957
971
  """
958
972
  da = mda.data
@@ -975,8 +989,8 @@ def _prepare_q(
975
989
 
976
990
 
977
991
  def _prepare_q_log_q_log_p(
978
- da: xr.DataArray, level: npt.NDArray[np.float64], log_applied: bool
979
- ) -> tuple[MetDataArray, npt.NDArray[np.float64]]:
992
+ da: xr.DataArray, level: npt.NDArray[np.floating], log_applied: bool
993
+ ) -> tuple[MetDataArray, npt.NDArray[np.floating]]:
980
994
  da = da.assign_coords(level=np.log(da["level"]))
981
995
 
982
996
  if not log_applied:
@@ -994,8 +1008,8 @@ def _prepare_q_log_q_log_p(
994
1008
 
995
1009
 
996
1010
  def _prepare_q_cubic_spline(
997
- da: xr.DataArray, level: npt.NDArray[np.float64]
998
- ) -> tuple[MetDataArray, npt.NDArray[np.float64]]:
1011
+ da: xr.DataArray, level: npt.NDArray[np.floating]
1012
+ ) -> tuple[MetDataArray, npt.NDArray[np.floating]]:
999
1013
  if da["level"][0] < 50.0 or da["level"][-1] > 1000.0:
1000
1014
  msg = "Cubic spline interpolation requires data to span 50-1000 hPa."
1001
1015
  raise ValueError(msg)