pyopenrivercam 0.8.9__tar.gz → 0.8.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/CHANGELOG.md +15 -0
  2. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/PKG-INFO +2 -2
  3. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/__init__.py +1 -1
  4. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/frames.py +15 -1
  5. pyopenrivercam-0.8.10/pyorc/api/mask.py +387 -0
  6. pyopenrivercam-0.8.10/pyorc/velocimetry/ffpiv.py +429 -0
  7. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyproject.toml +1 -1
  8. pyopenrivercam-0.8.9/pyorc/api/mask.py +0 -349
  9. pyopenrivercam-0.8.9/pyorc/velocimetry/ffpiv.py +0 -181
  10. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/.gitignore +0 -0
  11. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/.pre-commit-config.yaml +0 -0
  12. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/Dockerfile +0 -0
  13. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/LICENSE +0 -0
  14. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/README.md +0 -0
  15. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/TRADEMARK.md +0 -0
  16. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/environment.yml +0 -0
  17. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/__init__.py +0 -0
  18. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/cameraconfig.py +0 -0
  19. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/cross_section.py +0 -0
  20. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/orcbase.py +0 -0
  21. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/plot.py +0 -0
  22. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/transect.py +0 -0
  23. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/velocimetry.py +0 -0
  24. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/api/video.py +0 -0
  25. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/cli/__init__.py +0 -0
  26. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/cli/cli_elements.py +0 -0
  27. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/cli/cli_utils.py +0 -0
  28. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/cli/log.py +0 -0
  29. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/cli/main.py +0 -0
  30. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/const.py +0 -0
  31. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/cv.py +0 -0
  32. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/helpers.py +0 -0
  33. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/plot_helpers.py +0 -0
  34. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/project.py +0 -0
  35. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/pyorc.sh +0 -0
  36. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/sample_data.py +0 -0
  37. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/service/__init__.py +0 -0
  38. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/service/camera_config.py +0 -0
  39. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/service/velocimetry.py +0 -0
  40. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/velocimetry/__init__.py +0 -0
  41. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/pyorc/velocimetry/openpiv.py +0 -0
  42. {pyopenrivercam-0.8.9 → pyopenrivercam-0.8.10}/sonar-project.properties +0 -0
@@ -1,3 +1,18 @@
1
+ ## [0.8.10] = 2025-09-22
2
+ ### Added
3
+ - option `ensemble_corr` with `frames.get_piv`. This performs ensemble correlation averaging on cross-correlation and
4
+ results in a less noisy estimate of surface velocities, albeit only one single time stamp. With this option, also a
5
+ number additional thresholds can be defined:
6
+ - `corr_min` (default: 0.2): minimum correlation accepted in the cross-correlation results.
7
+ - `s2n_min` (default: 3): minimum signal=to-noise ratio accepted in the cross-correlation results.
8
+ - `count_min` (default: 0.2) minimum valid correlation results after masking on `corr_min` and `s2n_min` to accept
9
+ results over the interrogation window.
10
+ ### Changed
11
+ ### Deprecated
12
+ ### Removed
13
+ ### Fixed
14
+
15
+
1
16
  ## [0.8.9] = 2025-08-29
2
17
  ### Added
3
18
  ### Changed
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyopenrivercam
3
- Version: 0.8.9
3
+ Version: 0.8.10
4
4
  Summary: pyorc: free and open-source image-based surface velocity and discharge.
5
5
  Author-email: Hessel Winsemius <winsemius@rainbowsensing.com>
6
6
  Requires-Python: >=3.9
@@ -21,7 +21,7 @@ Requires-Dist: click
21
21
  Requires-Dist: cython; platform_machine == 'armv7l'
22
22
  Requires-Dist: dask
23
23
  Requires-Dist: descartes
24
- Requires-Dist: ffpiv
24
+ Requires-Dist: ffpiv>=0.1.4
25
25
  Requires-Dist: flox
26
26
  Requires-Dist: geojson
27
27
  Requires-Dist: geopandas
@@ -1,6 +1,6 @@
1
1
  """pyorc: free and open-source image-based surface velocity and discharge."""
2
2
 
3
- __version__ = "0.8.9"
3
+ __version__ = "0.8.10"
4
4
 
5
5
  from .api import CameraConfig, CrossSection, Frames, Transect, Velocimetry, Video, get_camera_config, load_camera_config # noqa
6
6
  from .project import * # noqa
@@ -107,6 +107,7 @@ class Frames(ORCBase):
107
107
  window_size: Optional[tuple[int, int]] = None,
108
108
  overlap: Optional[tuple[int, int]] = None,
109
109
  engine: str = "numba",
110
+ ensemble_corr: bool = False,
110
111
  **kwargs,
111
112
  ) -> xr.Dataset:
112
113
  """Perform PIV computation on projected frames.
@@ -126,6 +127,11 @@ class Frames(ORCBase):
126
127
  select the compute engine, can be "openpiv" (default), "numba", or "numpy". "numba" will give the fastest
127
128
  performance but is still experimental. It can boost performance by almost an order of magnitude compared
128
129
  to openpiv or numpy. both "numba" and "numpy" use the FF-PIV library as back-end.
130
+ ensemble_corr : bool, optional
131
+ only used with `engine="numba"` or `engine="numpy"`.
132
+ If True, performs PIV by first averaging cross-correlations across all frames and then deriving velocities.
133
+ If False, computes velocities for each frame pair separately. Default is True.
134
+
129
135
  **kwargs : dict
130
136
  keyword arguments to pass to the piv engine. For "numba" and "numpy" the argument `chunks` can be provided
131
137
  with an integer defining in how many batches of work the total velocimetry problem should be subdivided.
@@ -162,6 +168,8 @@ class Frames(ORCBase):
162
168
  coords, mesh_coords = self.get_piv_coords(window_size, search_area_size, overlap)
163
169
  # provide kwargs for OpenPIV analysis
164
170
  if engine == "openpiv":
171
+ # thresholds are not used.
172
+
165
173
  import warnings
166
174
 
167
175
  warnings.warn(
@@ -169,6 +177,10 @@ class Frames(ORCBase):
169
177
  DeprecationWarning,
170
178
  stacklevel=2,
171
179
  )
180
+ # Remove threshold parameters from kwargs
181
+ kwargs.pop("corr_min", None)
182
+ kwargs.pop("s2n_min", None)
183
+ kwargs.pop("count_min", None)
172
184
  kwargs = {
173
185
  **kwargs,
174
186
  "search_area_size": search_area_size[0],
@@ -187,7 +199,9 @@ class Frames(ORCBase):
187
199
  "res_x": camera_config.resolution,
188
200
  "res_y": camera_config.resolution,
189
201
  }
190
- ds = ffpiv.get_ffpiv(self._obj, coords["y"], coords["x"], dt, engine=engine, **kwargs)
202
+ ds = ffpiv.get_ffpiv(
203
+ self._obj, coords["y"], coords["x"], dt, engine=engine, ensemble_corr=ensemble_corr, **kwargs
204
+ )
191
205
  else:
192
206
  raise ValueError(f"Selected PIV engine {engine} does not exist.")
193
207
  # add all 2D-coordinates
@@ -0,0 +1,387 @@
1
+ """Masking methods for velocimetry."""
2
+
3
+ import copy
4
+ import functools
5
+ import warnings
6
+
7
+ import numpy as np
8
+
9
+ from pyorc import helpers
10
+ from pyorc.const import corr, s2n, v_x, v_y
11
+
12
+ commondoc = """
13
+ Returns
14
+ -------
15
+ mask : xr.DataArray
16
+ mask applicable to input dataset with ``ds.velocimetry.filter(mask)``.
17
+ If ``inplace=True``, the dataset will be returned masked with ``mask``.
18
+
19
+ """
20
+
21
+
22
+ def _base_mask(time_allowed=False, time_required=False, multi_timestep_required=False):
23
+ """Wrap generator for creating generalized structure masking methods for velocimetry.
24
+
25
+ Parameters
26
+ ----------
27
+ time_allowed : bool, optional
28
+ If set, the dimension "time" is allowed, if not set, mask method can only be applied on datasets without "time"
29
+ time_required
30
+ If set, the dimension "time" is required, if not set, mask method does not require dimension "time" in dataset.
31
+ multi_timestep_required : bool, optional
32
+ If set, the masking method requires multiple timesteps in the dataset in order to be applicable.
33
+
34
+ Returns
35
+ -------
36
+ func : function
37
+ masking method, decorated with standard procedures
38
+
39
+ """
40
+
41
+ def decorator_func(mask_func):
42
+ mask_func.__doc__ = f"{mask_func.__doc__}{commondoc}"
43
+
44
+ # wrap function so that it takes over the docstring and is seen as integral part of the class
45
+ @functools.wraps(mask_func)
46
+ def wrapper_func(ref, inplace=False, reduce_time=False, *args, **kwargs):
47
+ # check if obj seems to contain velocimetry data
48
+ if reduce_time and "time" in ref._obj:
49
+ ds = ref._obj.mean(dim="time", keep_attrs=True)
50
+ else:
51
+ ds = ref._obj
52
+ if not ds.velocimetry.is_velocimetry:
53
+ raise AssertionError("Dataset is not a valid velocimetry dataset")
54
+ if time_required:
55
+ # then automatically time is also allowed
56
+ if "time" not in ds.dims:
57
+ raise AssertionError(
58
+ 'This mask requires dimension "time". The dataset does not contain dimension "time" or you '
59
+ "have set `reduce_time=True`. Apply this mask without applying any reducers in time."
60
+ )
61
+ if time_required:
62
+ if "time" not in ds:
63
+ raise AssertionError(
64
+ "This mask requires dimension `time`. The dataset does not contain dimension `time`."
65
+ "Apply this mask before applying any reducers in time."
66
+ )
67
+ # only check for this, when time is required
68
+ if multi_timestep_required:
69
+ if len(ds.time) < 2:
70
+ raise AssertionError(
71
+ "This mask requires multiple timesteps in the dataset in order to be applicable. This "
72
+ "error typically occurs when applying `Frames.get_piv(ensemble_corr=True)` as this only "
73
+ "yields one single time step."
74
+ )
75
+ if not (time_allowed or time_required) and "time" in ds:
76
+ # function must be applied per time step
77
+ mask = ds.groupby("time", squeeze=False).map(mask_func, **kwargs)
78
+ else:
79
+ # apply the wrapped mask function as is
80
+ mask = mask_func(ds, **kwargs)
81
+ # apply mask if inplace
82
+ if inplace:
83
+ # set the _obj data points
84
+ for var in ref._obj.data_vars:
85
+ ref._obj[var] = ref._obj[var].where(mask)
86
+ return mask
87
+
88
+ return wrapper_func
89
+
90
+ return decorator_func
91
+
92
+
93
+ class _Velocimetry_MaskMethods:
94
+ """Enable use of ``ds.velocimetry.filter`` functions as attributes on a Dataset containing velocimetry results.
95
+
96
+ For example, ``Dataset.velocimetry.filter.minmax``. This will return either the dataset with filtered data using
97
+ For example, ``Dataset.velocimetry.filter.minmax``. This will return either the dataset with filtered data using
98
+ the ``minmax`` filter when ``inplace=True`` or the mask that should be applied to filter when ``inplace=False``
99
+ (default). ds.velocimetry.filter([mask1, mask2, ...]) applies the provided filters in the list of filters on
100
+ the dataset by first combining all masks into one, and then applying that mask on the dataset.
101
+ """
102
+
103
+ def __init__(self, velocimetry):
104
+ # make the original dataset also available on the plotting object
105
+ self.velocimetry = velocimetry
106
+ self._obj = velocimetry._obj
107
+ # Add to class _FilterMethods
108
+
109
+ def __call__(self, mask, inplace=False, *args, **kwargs):
110
+ """Perform mask operation on dataset.
111
+
112
+ Parameters
113
+ ----------
114
+ mask : xr.DataArray or list of xr.DataArrays
115
+ mask(s) to be applied on dataset, can have mix of y, x and time y, x dimensions
116
+ inplace : bool, optional
117
+ If set (default unset), the mask is applied to the dataset inplace. Otherwise, a mask is returned.
118
+ *args : list
119
+ list arguments passed to mask function
120
+ **kwargs : dict
121
+ keyword arguments passed to mask function
122
+
123
+ Returns
124
+ -------
125
+ ds : xr.Dataset
126
+ Dataset containing filtered velocimetry results
127
+
128
+ """
129
+ if not (isinstance(mask, list)):
130
+ # combine masks
131
+ mask = [mask]
132
+ if inplace:
133
+ for m in mask:
134
+ self._obj[v_x] = self._obj[v_x].where(m)
135
+ self._obj[v_y] = self._obj[v_y].where(m)
136
+ self._obj[corr] = self._obj[corr].where(m)
137
+ self._obj[s2n] = self._obj[s2n].where(m)
138
+ else:
139
+ ds = copy.deepcopy(self._obj)
140
+ for m in mask:
141
+ ds[v_x] = ds[v_x].where(m)
142
+ ds[v_y] = ds[v_y].where(m)
143
+ ds[corr] = ds[corr].where(m)
144
+ ds[s2n] = ds[s2n].where(m)
145
+ return ds
146
+
147
+ @_base_mask(time_allowed=True)
148
+ def minmax(self, s_min=0.1, s_max=5.0):
149
+ """Masks values if the velocity scalar lies outside a user-defined valid range.
150
+
151
+ Parameters
152
+ ----------
153
+ s_min : float, optional
154
+ minimum scalar velocity [m s-1] (default: 0.1)
155
+ s_max : float, optional
156
+ maximum scalar velocity [m s-1] (default: 5.)
157
+
158
+ """
159
+ s = (self[v_x] ** 2 + self[v_y] ** 2) ** 0.5
160
+ # create filter
161
+ mask = (s > s_min) & (s < s_max)
162
+ return mask
163
+
164
+ @_base_mask(time_allowed=True)
165
+ def angle(self, angle_expected=0.5 * np.pi, angle_tolerance=0.25 * np.pi):
166
+ """Mask values that are outside expected direction with angle tolerance.
167
+
168
+ The function filters points entirely where the mean angle over time
169
+ deviates more than input parameter angle_bounds (in radians). The function also filters individual
170
+ estimates in time, in case the user wants this (filter_per_timestep=True), in case the angle on
171
+ a specific time step deviates more than the defined amount from the average.
172
+ note: this function does not work appropriately, if the expected angle (+/- anglebounds) are within
173
+ range of zero, as zero is the same as 2*pi. This exception may be resolved in the future if necessary.
174
+
175
+ Parameters
176
+ ----------
177
+ angle_expected : float
178
+ angle (0 - 2*pi), measured clock-wise from vertical upwards direction, expected
179
+ in the velocities, default: 0.5*np.pi (meaning from left to right in the x, y coordinate system)
180
+ angle_tolerance : float (0 - 2*pi)
181
+ maximum deviation from expected angle allowed (default: 0.25 * np.pi).
182
+
183
+ """
184
+ angle = np.arctan2(self[v_x], self[v_y])
185
+ mask = np.abs(angle - angle_expected) < angle_tolerance
186
+ return mask
187
+
188
+ @_base_mask(time_required=True, multi_timestep_required=True)
189
+ def count(self, tolerance=0.33):
190
+ """Mask locations with a too low amount of valid velocities in time, measured by fraction with ``tolerance``.
191
+
192
+ Usually applied *after* having applied several other filters.
193
+
194
+ Parameters
195
+ ----------
196
+ tolerance : float (0-1)
197
+ tolerance for fractional amount of valid velocities after all filters. If less than the fraction is
198
+ available, the entire velocity will be set to missings.
199
+
200
+ """
201
+ mask = self[v_x].count(dim="time") > tolerance * len(self.time)
202
+ return mask
203
+
204
+ @_base_mask(time_allowed=True)
205
+ def corr(self, tolerance=0.1):
206
+ """Mass values with too low correlation.
207
+
208
+ Parameters
209
+ ----------
210
+ tolerance : float (0-1)
211
+ tolerance for correlation value (default: 0.1). If correlation is lower than tolerance, it is masked
212
+
213
+ """
214
+ return self[corr] > tolerance
215
+
216
+ @_base_mask(time_required=True, multi_timestep_required=True)
217
+ def outliers(self, tolerance=1.0, mode="or"):
218
+ """Mask outliers measured by amount of standard deviations from the mean.
219
+
220
+ Parameters
221
+ ----------
222
+ tolerance : float
223
+ amount of standard deviations allowed from the mean
224
+ mode : str
225
+ can be "and" or "or" (default). If "or" ("and"), then only one (both) of two vector components need(s) to
226
+ be within tolerance.
227
+
228
+ """
229
+ with warnings.catch_warnings():
230
+ warnings.simplefilter("ignore", category=RuntimeWarning)
231
+ x_std = self[v_x].std(dim="time")
232
+ y_std = self[v_y].std(dim="time")
233
+ x_mean = self[v_x].mean(dim="time")
234
+ y_mean = self[v_y].mean(dim="time")
235
+ x_condition = np.abs((self[v_x] - x_mean) / x_std) < tolerance
236
+ y_condition = np.abs((self[v_y] - y_mean) / y_std) < tolerance
237
+ if mode == "or":
238
+ mask = x_condition | y_condition
239
+ else:
240
+ mask = x_condition & y_condition
241
+ return mask
242
+
243
+ @_base_mask(time_required=True, multi_timestep_required=True)
244
+ def variance(self, tolerance=5, mode="and"):
245
+ """Mask locations if their variance (std/mean in time) is above a tolerance level.
246
+
247
+ This is calculated for either or both x and y direction.
248
+
249
+ Parameters
250
+ ----------
251
+ tolerance : float
252
+ amount of standard deviations allowed from the mean
253
+ mode : str
254
+ can be "and" (default) or "or". If "or" ("and"), then only one (both) of two vector components need(s) to
255
+ be within tolerance.
256
+
257
+ """
258
+ x_std = self[v_x].std(dim="time")
259
+ y_std = self[v_y].std(dim="time")
260
+ x_mean = np.maximum(self[v_x].mean(dim="time"), 1e30)
261
+ y_mean = np.maximum(self[v_y].mean(dim="time"), 1e30)
262
+ x_var = np.abs(x_std / x_mean)
263
+ y_var = np.abs(y_std / y_mean)
264
+ x_condition = x_var < tolerance
265
+ y_condition = y_var < tolerance
266
+ if mode == "or":
267
+ mask = x_condition | y_condition
268
+ else:
269
+ mask = x_condition & y_condition
270
+ return mask
271
+
272
+ @_base_mask(time_required=True, multi_timestep_required=True)
273
+ def rolling(self, wdw=5, tolerance=0.5):
274
+ """Mask values for strongly deviating values from neighbours over rolling length.
275
+
276
+ Deviation is measured by ``tolerance``.
277
+
278
+ Parameters
279
+ ----------
280
+ wdw : int, optional
281
+ amount of time steps in rolling window (centred) (default: 5)
282
+ tolerance : float, optional
283
+ tolerance as relative deviation from mean of values, including value itself (default: 0.5)
284
+
285
+ """
286
+ s = (self[v_x] ** 2 + self[v_y] ** 2) ** 0.5
287
+ s_rolling = s.fillna(0.0).rolling(time=wdw, center=True).max()
288
+ mask = s > tolerance * s_rolling
289
+ return mask
290
+
291
+ @_base_mask()
292
+ def window_nan(self, tolerance=0.7, wdw=1, **kwargs):
293
+ """Masks values if their surrounding neighbours (inc. value itself) contain too many NaNs.
294
+
295
+ Meant to remove isolated velocity estimates.
296
+
297
+ Parameters
298
+ ----------
299
+ tolerance : float, optional
300
+ minimum amount of valid values in search window measured as a fraction of total amount of values [0-1]
301
+ (default: 0.3)
302
+ wdw : int, optional
303
+ window size to use for sampling neighbours. zero means, only cell itself, 1 means 3x3 window.
304
+ (default: 1) wdw is used to fill wdw_x_min and wdwd_y_min with its negative (-wdw) value, and wdw_y_min and
305
+ kwargs : dict
306
+ keyword arguments to pass to ``helpers.stack_window``. These can be:
307
+ wdw_x_min : int, optional
308
+ window size in negative x-direction of grid (must be negative), overrules wdw in negative x-direction
309
+ if set.
310
+ wdw_x_max : int, optional
311
+ window size in positive x-direction of grid, overrules wdw in positive x-direction if set
312
+ wdw_y_min : int, optional
313
+ window size in negative y-direction of grid (must be negative), overrules wdw in negative y-direction
314
+ if set.
315
+ wdw_y_max : int, optional
316
+ window size in positive y-direction of grid, overrules wdw in positive x-direction if set.
317
+
318
+ """
319
+ # collect points within a stride, collate and analyze for nan fraction
320
+ ds_wdw = helpers.stack_window(self, wdw=wdw, **kwargs)
321
+ valid_neighbours = ds_wdw[v_x].count(dim="stride")
322
+ mask = valid_neighbours >= tolerance * len(ds_wdw.stride)
323
+ return mask
324
+
325
+ @_base_mask()
326
+ def window_mean(self, tolerance=0.7, wdw=1, mode="or", **kwargs):
327
+ """Mask values when their value deviates significantly from mean.
328
+
329
+ This is computed as relative fraction from the mean of its neighbours (inc. itself).
330
+
331
+ Parameters
332
+ ----------
333
+ tolerance : float, optional
334
+ amount of velocity relative to the mean velocity (default: 0.7)
335
+ wdw : int, optional
336
+ window used to determine relevant neighbours
337
+ mode : str
338
+ can be "and" (default) or "or". If "or" ("and"), then only one (both) of two vector components need(s) to
339
+ be within tolerance.
340
+ kwargs : dict
341
+ keyword arguments to pass to ``helpers.stack_window``. These can be:
342
+ wdw_x_min : int, optional
343
+ window size in negative x-direction of grid (must be negative), overrules wdw in negative x-direction
344
+ if set.
345
+ wdw_x_max : int, optional
346
+ window size in positive x-direction of grid, overrules wdw in positive x-direction if set.
347
+ wdw_y_min : int, optional
348
+ window size in negative y-direction of grid (must be negative), overrules wdw in negative y-direction
349
+ if set.
350
+ wdw_y_max : int, optional
351
+ window size in positive y-direction of grid, overrules wdw in positive x-direction if set.
352
+
353
+ """
354
+ # collect points within a stride, collate and analyze for median value and deviation
355
+ ds_wdw = helpers.stack_window(self, wdw=wdw, **kwargs)
356
+ ds_mean = ds_wdw.mean(dim="stride")
357
+ x_condition = np.abs(self[v_x] - ds_mean[v_x]) / ds_mean[v_x] < tolerance
358
+ y_condition = np.abs(self[v_y] - ds_mean[v_y]) / ds_mean[v_y] < tolerance
359
+ if mode == "or":
360
+ mask = x_condition | y_condition
361
+ else:
362
+ mask = x_condition & y_condition
363
+ return mask
364
+
365
+ @_base_mask()
366
+ def window_replace(self, wdw=1, iter=1, **kwargs):
367
+ """Replace values in a certain window size with mean of their neighbours. Returns a Dataset instead of a mask.
368
+
369
+ NOTE: This functionality may be moved to a different subclass in later releases.
370
+
371
+ Parameters
372
+ ----------
373
+ wdw : int, optional
374
+ window used to determine relevant neighbours
375
+ iter : int, optional
376
+ amount of times to repeat window operator
377
+ kwargs : dict
378
+ keyword arguments to pass to ``helpers.stack_window``
379
+
380
+ """
381
+ ds = copy.deepcopy(self)
382
+ for _ in range(iter):
383
+ # collect points within a stride, collate and analyze for median value and deviation
384
+ ds_wdw = helpers.stack_window(ds, wdw=wdw, **kwargs)
385
+ ds_mean = ds_wdw.mean(dim="stride")
386
+ ds = ds.fillna(ds_mean)
387
+ return ds