pycontrails 0.54.4__cp311-cp311-win_amd64.whl → 0.54.5__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pycontrails might be problematic. Click here for more details.

pycontrails/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.54.4'
16
- __version_tuple__ = version_tuple = (0, 54, 4)
15
+ __version__ = version = '0.54.5'
16
+ __version_tuple__ = version_tuple = (0, 54, 5)
pycontrails/core/fleet.py CHANGED
@@ -133,18 +133,19 @@ class Fleet(Flight):
133
133
 
134
134
  @override
135
135
  def copy(self, **kwargs: Any) -> Self:
136
- kwargs.setdefault("fuel", self.fuel)
137
136
  kwargs.setdefault("fl_attrs", self.fl_attrs)
137
+ kwargs.setdefault("final_waypoints", self.final_waypoints)
138
138
  return super().copy(**kwargs)
139
139
 
140
140
  @override
141
141
  def filter(self, mask: npt.NDArray[np.bool_], copy: bool = True, **kwargs: Any) -> Self:
142
- kwargs.setdefault("fuel", self.fuel)
143
-
144
142
  flight_ids = set(np.unique(self["flight_id"][mask]))
145
143
  fl_attrs = {k: v for k, v in self.fl_attrs.items() if k in flight_ids}
146
144
  kwargs.setdefault("fl_attrs", fl_attrs)
147
145
 
146
+ final_waypoints = np.array(self.final_waypoints[mask], copy=copy)
147
+ kwargs.setdefault("final_waypoints", final_waypoints)
148
+
148
149
  return super().filter(mask, copy=copy, **kwargs)
149
150
 
150
151
  @override
@@ -187,8 +188,9 @@ class Fleet(Flight):
187
188
  in ``seq``.
188
189
  """
189
190
 
191
+ # Create a shallow copy because we add additional keys in _validate_fl
190
192
  def _shallow_copy(fl: Flight) -> Flight:
191
- return Flight(VectorDataDict(fl.data), attrs=fl.attrs, copy=False, fuel=fl.fuel)
193
+ return Flight._from_fastpath(fl.data, fl.attrs, fuel=fl.fuel)
192
194
 
193
195
  def _maybe_warn(fl: Flight) -> Flight:
194
196
  if not fl:
@@ -217,7 +219,18 @@ class Fleet(Flight):
217
219
  )
218
220
 
219
221
  data = {var: np.concatenate([fl[var] for fl in seq]) for var in seq[0]}
220
- return cls(data=data, attrs=attrs, copy=False, fuel=fuel, fl_attrs=fl_attrs)
222
+
223
+ final_waypoints = np.zeros(data["time"].size, dtype=bool)
224
+ final_waypoint_indices = np.cumsum([fl.size for fl in seq]) - 1
225
+ final_waypoints[final_waypoint_indices] = True
226
+
227
+ return cls._from_fastpath(
228
+ data,
229
+ attrs,
230
+ fuel=fuel,
231
+ fl_attrs=fl_attrs,
232
+ final_waypoints=final_waypoints,
233
+ )
221
234
 
222
235
  @property
223
236
  def n_flights(self) -> int:
@@ -246,11 +259,19 @@ class Fleet(Flight):
246
259
  List of Flights in the same order as was passed into the ``Fleet`` instance.
247
260
  """
248
261
  indices = self.dataframe.groupby("flight_id", sort=False).indices
262
+ if copy:
263
+ return [
264
+ Flight._from_fastpath(
265
+ {k: v[idx] for k, v in self.data.items()},
266
+ self.fl_attrs[flight_id],
267
+ fuel=self.fuel,
268
+ ).copy()
269
+ for flight_id, idx in indices.items()
270
+ ]
249
271
  return [
250
- Flight(
251
- data=VectorDataDict({k: v[idx] for k, v in self.data.items()}),
252
- attrs=self.fl_attrs[flight_id],
253
- copy=copy,
272
+ Flight._from_fastpath(
273
+ {k: v[idx] for k, v in self.data.items()},
274
+ self.fl_attrs[flight_id],
254
275
  fuel=self.fuel,
255
276
  )
256
277
  for flight_id, idx in indices.items()
@@ -957,7 +957,12 @@ class Flight(GeoVectorDataset):
957
957
  msg = f"{msg} Pass 'keep_original_index=True' to keep the original index."
958
958
  warnings.warn(msg)
959
959
 
960
- return type(self)(data=df, attrs=self.attrs, fuel=self.fuel)
960
+ # Reorder columns (this is unimportant but makes the output more canonical)
961
+ coord_names = ("longitude", "latitude", "altitude", "time")
962
+ df = df[[*coord_names, *[c for c in df.columns if c not in set(coord_names)]]]
963
+
964
+ data = {k: v.to_numpy() for k, v in df.items()}
965
+ return type(self)._from_fastpath(data, attrs=self.attrs, fuel=self.fuel)
961
966
 
962
967
  def clean_and_resample(
963
968
  self,
@@ -245,6 +245,9 @@ def _pick_method(scipy_version: str, method: str) -> str:
245
245
  str
246
246
  Interpolation method adjusted for compatibility with this class.
247
247
  """
248
+ if method == "linear":
249
+ return method
250
+
248
251
  try:
249
252
  version = scipy_version.split(".")
250
253
  major = int(version[0])
@@ -486,15 +489,15 @@ def interp(
486
489
  da = _localize(da, coords)
487
490
 
488
491
  indexes = da._indexes
489
- x = indexes["longitude"].index.to_numpy() # type: ignore[attr-defined]
490
- y = indexes["latitude"].index.to_numpy() # type: ignore[attr-defined]
491
- z = indexes["level"].index.to_numpy() # type: ignore[attr-defined]
492
+ x = indexes["longitude"].index.values # type: ignore[attr-defined]
493
+ y = indexes["latitude"].index.values # type: ignore[attr-defined]
494
+ z = indexes["level"].index.values # type: ignore[attr-defined]
492
495
  if any(v.dtype != np.float64 for v in (x, y, z)):
493
496
  msg = "da must have float64 dtype for longitude, latitude, and level coordinates"
494
497
  raise ValueError(msg)
495
498
 
496
499
  # Convert t and time to float64
497
- t = indexes["time"].index.to_numpy() # type: ignore[attr-defined]
500
+ t = indexes["time"].index.values # type: ignore[attr-defined]
498
501
  offset = t[0]
499
502
  t = _floatize_time(t, offset)
500
503
 
pycontrails/core/met.py CHANGED
@@ -73,6 +73,8 @@ class MetBase(ABC, Generic[XArrayType]):
73
73
  and xr.Dataset.
74
74
  """
75
75
 
76
+ __slots__ = ("cachestore", "data")
77
+
76
78
  #: DataArray or Dataset
77
79
  data: XArrayType
78
80
 
@@ -87,6 +89,22 @@ class MetBase(ABC, Generic[XArrayType]):
87
89
  "time",
88
90
  )
89
91
 
92
+ @classmethod
93
+ def _from_fastpath(cls, data: XArrayType, cachestore: CacheStore | None = None) -> Self:
94
+ """Create new instance from consistent data.
95
+
96
+ This is a low-level method that bypasses the standard constructor in certain
97
+ special cases. It is intended for internal use only.
98
+
99
+ In essence, this method skips any validation from __init__ and directly sets
100
+ ``data`` and ``attrs``. This is useful when creating a new instance from an existing
101
+ instance the data has already been validated.
102
+ """
103
+ obj = cls.__new__(cls)
104
+ obj.data = data
105
+ obj.cachestore = cachestore
106
+ return obj
107
+
90
108
  def __repr__(self) -> str:
91
109
  data = getattr(self, "data", None)
92
110
  return (
@@ -517,8 +535,7 @@ class MetBase(ABC, Generic[XArrayType]):
517
535
  """Pass through to :attr:`self.data.attrs`."""
518
536
  return self.data.attrs
519
537
 
520
- @abstractmethod
521
- def downselect(self, bbox: tuple[float, ...]) -> MetBase:
538
+ def downselect(self, bbox: tuple[float, ...]) -> Self:
522
539
  """Downselect met data within spatial bounding box.
523
540
 
524
541
  Parameters
@@ -529,12 +546,13 @@ class MetBase(ABC, Generic[XArrayType]):
529
546
  For 3D queries, list is [west, south, min-level, east, north, max-level]
530
547
  with level defined in [:math:`hPa`].
531
548
 
532
-
533
549
  Returns
534
550
  -------
535
- MetBase
551
+ Self
536
552
  Return downselected data
537
553
  """
554
+ data = downselect(self.data, bbox)
555
+ return type(self)._from_fastpath(data, cachestore=self.cachestore)
538
556
 
539
557
  @property
540
558
  def is_zarr(self) -> bool:
@@ -565,7 +583,6 @@ class MetBase(ABC, Generic[XArrayType]):
565
583
  np.timedelta64(0, "h"),
566
584
  np.timedelta64(0, "h"),
567
585
  ),
568
- copy: bool = True,
569
586
  ) -> MetDataType:
570
587
  """Downselect ``met`` to encompass a spatiotemporal region of the data.
571
588
 
@@ -576,6 +593,10 @@ class MetBase(ABC, Generic[XArrayType]):
576
593
  ``met`` input. This method is different from :meth:`downselect` which
577
594
  operates on the instance data.
578
595
 
596
+ .. versionchanged:: 0.54.5
597
+
598
+ Data is no longer copied when downselecting.
599
+
579
600
  Parameters
580
601
  ----------
581
602
  met : MetDataset | MetDataArray
@@ -600,8 +621,6 @@ class MetBase(ABC, Generic[XArrayType]):
600
621
  and ``time_buffer[1]`` on the high side.
601
622
  Units must be the same as class coordinates.
602
623
  Defaults to ``(np.timedelta64(0, "h"), np.timedelta64(0, "h"))``.
603
- copy : bool
604
- If returned object is a copy or view of the original. True by default.
605
624
 
606
625
  Returns
607
626
  -------
@@ -627,9 +646,31 @@ class MetBase(ABC, Generic[XArrayType]):
627
646
  latitude_buffer=latitude_buffer,
628
647
  level_buffer=level_buffer,
629
648
  time_buffer=time_buffer,
630
- copy=copy,
631
649
  )
632
650
 
651
+ def wrap_longitude(self) -> Self:
652
+ """Wrap longitude coordinates.
653
+
654
+ Returns
655
+ -------
656
+ Self
657
+ Copy of instance with wrapped longitude values.
658
+ Returns copy of data when longitude values are already wrapped
659
+ """
660
+ return type(self)._from_fastpath(_wrap_longitude(self.data), cachestore=self.cachestore)
661
+
662
+ def copy(self) -> Self:
663
+ """Create a shallow copy of the current class.
664
+
665
+ See :meth:`xarray.Dataset.copy` for reference.
666
+
667
+ Returns
668
+ -------
669
+ Self
670
+ Copy of the current class
671
+ """
672
+ return type(self)._from_fastpath(self.data.copy(), cachestore=self.cachestore)
673
+
633
674
 
634
675
  class MetDataset(MetBase):
635
676
  """Meteorological dataset with multiple variables.
@@ -697,6 +738,8 @@ class MetDataset(MetBase):
697
738
  223.5083
698
739
  """
699
740
 
741
+ __slots__ = ()
742
+
700
743
  data: xr.Dataset
701
744
 
702
745
  def __init__(
@@ -755,7 +798,7 @@ class MetDataset(MetBase):
755
798
  "To get items (e.g. 'time' or 'level') from underlying xr.Dataset object, "
756
799
  "use the 'data' attribute."
757
800
  ) from e
758
- return MetDataArray(da, copy=False, validate=False)
801
+ return MetDataArray._from_fastpath(da)
759
802
 
760
803
  def get(self, key: str, default_value: Any = None) -> Any:
761
804
  """Shortcut to :meth:`data.get(k, v)` method.
@@ -883,20 +926,6 @@ class MetDataset(MetBase):
883
926
  def size(self) -> int:
884
927
  return np.prod(self.shape).item()
885
928
 
886
- def copy(self) -> MetDataset:
887
- """Create a copy of the current class.
888
-
889
- Returns
890
- -------
891
- MetDataset
892
- MetDataset copy
893
- """
894
- return MetDataset(
895
- self.data,
896
- cachestore=self.cachestore,
897
- copy=True, # True by default, but being extra explicit
898
- )
899
-
900
929
  def ensure_vars(
901
930
  self,
902
931
  vars: MetVariable | str | Sequence[MetVariable | str | Sequence[MetVariable]],
@@ -1011,20 +1040,6 @@ class MetDataset(MetBase):
1011
1040
  data = _load(hash, cachestore, chunks)
1012
1041
  return cls(data)
1013
1042
 
1014
- def wrap_longitude(self) -> MetDataset:
1015
- """Wrap longitude coordinates.
1016
-
1017
- Returns
1018
- -------
1019
- MetDataset
1020
- Copy of MetDataset with wrapped longitude values.
1021
- Returns copy of current MetDataset when longitude values are already wrapped
1022
- """
1023
- return MetDataset(
1024
- _wrap_longitude(self.data),
1025
- cachestore=self.cachestore,
1026
- )
1027
-
1028
1043
  @override
1029
1044
  def broadcast_coords(self, name: str) -> xr.DataArray:
1030
1045
  da = xr.ones_like(self.data[next(iter(self.data.keys()))]) * self.data[name]
@@ -1032,11 +1047,6 @@ class MetDataset(MetBase):
1032
1047
 
1033
1048
  return da
1034
1049
 
1035
- @override
1036
- def downselect(self, bbox: tuple[float, ...]) -> MetDataset:
1037
- data = downselect(self.data, bbox)
1038
- return MetDataset(data, cachestore=self.cachestore, copy=False)
1039
-
1040
1050
  def to_vector(self, transfer_attrs: bool = True) -> vector_module.GeoVectorDataset:
1041
1051
  """Convert a :class:`MetDataset` to a :class:`GeoVectorDataset` by raveling data.
1042
1052
 
@@ -1312,9 +1322,13 @@ class MetDataset(MetBase):
1312
1322
  class MetDataArray(MetBase):
1313
1323
  """Meteorological DataArray of single variable.
1314
1324
 
1315
- Wrapper around xr.DataArray to enforce certain
1325
+ Wrapper around :class:`xarray.DataArray` to enforce certain
1316
1326
  variables and dimensions for internal usage.
1317
1327
 
1328
+ .. versionchanged:: 0.54.4
1329
+
1330
+ Remove ``validate`` parameter. Validation is now always performed.
1331
+
1318
1332
  Parameters
1319
1333
  ----------
1320
1334
  data : ArrayLike
@@ -1332,15 +1346,8 @@ class MetDataArray(MetBase):
1332
1346
  Copy `data` parameter on construction, by default `True`. If `data` is lazy-loaded
1333
1347
  via `dask`, this parameter has no effect. If `data` is already loaded into memory,
1334
1348
  a copy of the data (rather than a view) may be created if `True`.
1335
- validate : bool, optional
1336
- Confirm that the parameter `data` has correct specification. This automatically handled
1337
- in the case that `copy=True`. Validation only introduces a very small overhead.
1338
- This parameter should only be set to `False` if working with data derived from an
1339
- existing MetDataset or :class`MetDataArray`. By default `True`.
1340
1349
  name : Hashable, optional
1341
1350
  Name of the data variable. If not specified, the name will be set to "met".
1342
- **kwargs
1343
- To be removed in future versions. Passed directly to xr.DataArray constructor.
1344
1351
 
1345
1352
  Examples
1346
1353
  --------
@@ -1370,6 +1377,8 @@ class MetDataArray(MetBase):
1370
1377
  0.41884649899766946
1371
1378
  """
1372
1379
 
1380
+ __slots__ = ()
1381
+
1373
1382
  data: xr.DataArray
1374
1383
 
1375
1384
  def __init__(
@@ -1378,7 +1387,6 @@ class MetDataArray(MetBase):
1378
1387
  cachestore: CacheStore | None = None,
1379
1388
  wrap_longitude: bool = False,
1380
1389
  copy: bool = True,
1381
- validate: bool = True,
1382
1390
  name: Hashable | None = None,
1383
1391
  ) -> None:
1384
1392
  self.cachestore = cachestore
@@ -1386,16 +1394,14 @@ class MetDataArray(MetBase):
1386
1394
  if copy:
1387
1395
  self.data = data.copy()
1388
1396
  self._preprocess_dims(wrap_longitude)
1397
+ elif wrap_longitude:
1398
+ raise ValueError("Set 'copy=True' when using 'wrap_longitude=True'.")
1389
1399
  else:
1390
- if wrap_longitude:
1391
- raise ValueError("Set 'copy=True' when using 'wrap_longitude=True'.")
1392
1400
  self.data = data
1393
- if validate:
1394
- self._validate_dims()
1401
+ self._validate_dims()
1395
1402
 
1396
1403
  # Priority: name > data.name > "met"
1397
- name = name or self.data.name or "met"
1398
- self.data.name = name
1404
+ self.data.name = name or self.data.name or "met"
1399
1405
 
1400
1406
  @property
1401
1407
  def values(self) -> np.ndarray:
@@ -1453,27 +1459,6 @@ class MetDataArray(MetBase):
1453
1459
  # https://github.com/python/mypy/issues/1178
1454
1460
  return typing.cast(tuple[int, int, int, int], self.data.shape)
1455
1461
 
1456
- def copy(self) -> MetDataArray:
1457
- """Create a copy of the current class.
1458
-
1459
- Returns
1460
- -------
1461
- MetDataArray
1462
- MetDataArray copy
1463
- """
1464
- return MetDataArray(self.data, cachestore=self.cachestore, copy=True)
1465
-
1466
- def wrap_longitude(self) -> MetDataArray:
1467
- """Wrap longitude coordinates.
1468
-
1469
- Returns
1470
- -------
1471
- MetDataArray
1472
- Copy of MetDataArray with wrapped longitude values.
1473
- Returns copy of current MetDataArray when longitude values are already wrapped
1474
- """
1475
- return MetDataArray(_wrap_longitude(self.data), cachestore=self.cachestore)
1476
-
1477
1462
  @property
1478
1463
  def in_memory(self) -> bool:
1479
1464
  """Check if underlying :attr:`data` is loaded into memory.
@@ -1889,12 +1874,12 @@ class MetDataArray(MetBase):
1889
1874
 
1890
1875
  return self.data.sum().values.item() / self.data.count().values.item() # type: ignore[operator]
1891
1876
 
1892
- def find_edges(self) -> MetDataArray:
1877
+ def find_edges(self) -> Self:
1893
1878
  """Find edges of regions.
1894
1879
 
1895
1880
  Returns
1896
1881
  -------
1897
- MetDataArray
1882
+ Self
1898
1883
  MetDataArray with a binary field, 1 on the edge of the regions,
1899
1884
  0 outside and inside the regions.
1900
1885
 
@@ -1925,7 +1910,7 @@ class MetDataArray(MetBase):
1925
1910
  self.data.load()
1926
1911
 
1927
1912
  data = self.data.groupby("level", squeeze=False).map(_edges)
1928
- return MetDataArray(data, cachestore=self.cachestore)
1913
+ return type(self)(data, cachestore=self.cachestore)
1929
1914
 
1930
1915
  def to_polygon_feature(
1931
1916
  self,
@@ -2408,11 +2393,6 @@ class MetDataArray(MetBase):
2408
2393
 
2409
2394
  return da
2410
2395
 
2411
- @override
2412
- def downselect(self, bbox: tuple[float, ...]) -> MetDataArray:
2413
- data = downselect(self.data, bbox)
2414
- return MetDataArray(data, cachestore=self.cachestore)
2415
-
2416
2396
 
2417
2397
  def _is_wrapped(longitude: np.ndarray) -> bool:
2418
2398
  """Check if ``longitude`` covers ``[-180, 180]``."""
@@ -2837,3 +2817,82 @@ def _lowmem_masks(
2837
2817
  mask = ((time >= t_met[i]) if i == istart else (time > t_met[i])) & (time <= t_met[i + 1])
2838
2818
  if np.any(mask):
2839
2819
  yield mask
2820
+
2821
+
2822
+ def maybe_downselect_mds(
2823
+ big_mds: MetDataset,
2824
+ little_mds: MetDataset | None,
2825
+ t0: np.datetime64,
2826
+ t1: np.datetime64,
2827
+ ) -> MetDataset:
2828
+ """Possibly downselect ``big_mds`` in the time domain to cover ``[t0, t1]``.
2829
+
2830
+ If possible, ``little_mds`` is recycled to avoid re-loading data.
2831
+
2832
+ This implementation assumes ``t0 <= t1``, but this is not enforced.
2833
+
2834
+ If ``little_mds`` already covers the time range, it is returned as-is.
2835
+
2836
+ If ``big_mds`` doesn't cover the time range, no error is raised.
2837
+
2838
+ Parameters
2839
+ ----------
2840
+ big_mds : MetDataset
2841
+ Larger MetDataset
2842
+ little_mds : MetDataset | None
2843
+ Smaller MetDataset. This is assumed to be a subset of ``big_mds``,
2844
+ though the implementation may work if this is not the case.
2845
+ t0, t1 : np.datetime64
2846
+ Time range to cover
2847
+
2848
+ Returns
2849
+ -------
2850
+ MetDataset
2851
+ MetDataset covering the time range ``[t0, t1]`` comprised of data from
2852
+ ``little_mds`` when possible, otherwise from ``big_mds``.
2853
+ """
2854
+ if little_mds is None:
2855
+ big_time = big_mds.indexes["time"].values
2856
+ i0 = np.searchsorted(big_time, t0, side="right").item()
2857
+ i0 = max(0, i0 - 1)
2858
+ i1 = np.searchsorted(big_time, t1, side="left").item()
2859
+ i1 = min(i1 + 1, big_time.size)
2860
+ return MetDataset._from_fastpath(big_mds.data.isel(time=slice(i0, i1)))
2861
+
2862
+ little_time = little_mds.indexes["time"].values
2863
+ if t0 >= little_time[0] and t1 <= little_time[-1]:
2864
+ return little_mds
2865
+
2866
+ big_time = big_mds.indexes["time"].values
2867
+ i0 = np.searchsorted(big_time, t0, side="right").item()
2868
+ i0 = max(0, i0 - 1)
2869
+ i1 = np.searchsorted(big_time, t1, side="left").item()
2870
+ i1 = min(i1 + 1, big_time.size)
2871
+ big_ds = big_mds.data.isel(time=slice(i0, i1))
2872
+ big_time = big_ds._indexes["time"].index.values # type: ignore[attr-defined]
2873
+
2874
+ # Select exactly the times in big_ds that are not in little_ds
2875
+ _, little_indices, big_indices = np.intersect1d(
2876
+ little_time, big_time, assume_unique=True, return_indices=True
2877
+ )
2878
+ little_ds = little_mds.data.isel(time=little_indices)
2879
+ filt = np.ones_like(big_time, dtype=bool)
2880
+ filt[big_indices] = False
2881
+ big_ds = big_ds.isel(time=filt)
2882
+
2883
+ # Manually load relevant parts of big_ds into memory before xr.concat
2884
+ # It appears that without this, xr.concat will forget the in-memory
2885
+ # arrays in little_ds
2886
+ for var, da in little_ds.items():
2887
+ if da._in_memory:
2888
+ da2 = big_ds[var]
2889
+ if not da2._in_memory:
2890
+ da2.load()
2891
+
2892
+ ds = xr.concat([little_ds, big_ds], dim="time")
2893
+ if not ds._indexes["time"].index.is_monotonic_increasing: # type: ignore[attr-defined]
2894
+ # Rarely would we enter this: t0 would have to be before the first
2895
+ # time in little_mds, and the various advection-based models generally
2896
+ # proceed forward in time.
2897
+ ds = ds.sortby("time")
2898
+ return MetDataset._from_fastpath(ds)
@@ -455,7 +455,7 @@ class Model(ABC):
455
455
  self.met = self.require_met()
456
456
 
457
457
  # Return dataset with the same coords as self.met, but empty data_vars
458
- return MetDataset(xr.Dataset(coords=self.met.data.coords))
458
+ return MetDataset._from_fastpath(xr.Dataset(coords=self.met.data.coords))
459
459
 
460
460
  copy_source = self.params["copy_source"]
461
461
 
@@ -568,7 +568,7 @@ class Model(ABC):
568
568
  }
569
569
  kwargs = {k: v for k, v in buffers.items() if v is not None}
570
570
 
571
- self.met = source.downselect_met(self.met, **kwargs, copy=False)
571
+ self.met = source.downselect_met(self.met, **kwargs)
572
572
 
573
573
  def set_source_met(
574
574
  self,