grdwindinversion 0.3.2__tar.gz → 0.3.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {grdwindinversion-0.3.2/grdwindinversion.egg-info → grdwindinversion-0.3.5}/PKG-INFO +2 -2
  2. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/__init__.py +1 -1
  3. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/gradientFeatures.py +204 -138
  4. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/inversion.py +730 -421
  5. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/load_config.py +10 -7
  6. grdwindinversion-0.3.5/grdwindinversion/main.py +102 -0
  7. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/utils.py +22 -16
  8. grdwindinversion-0.3.5/grdwindinversion/utils_memory.py +44 -0
  9. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5/grdwindinversion.egg-info}/PKG-INFO +2 -2
  10. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion.egg-info/SOURCES.txt +2 -0
  11. grdwindinversion-0.3.5/tests/listing_rcm_safe.txt +1506 -0
  12. grdwindinversion-0.3.5/tests/test_getOutputName.py +31 -0
  13. grdwindinversion-0.3.2/grdwindinversion/main.py +0 -59
  14. grdwindinversion-0.3.2/grdwindinversion/utils_memory.py +0 -46
  15. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/.editorconfig +0 -0
  16. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/.github/dependabot.yml +0 -0
  17. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/.github/workflows/build.yml +0 -0
  18. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/.github/workflows/ci.yml +0 -0
  19. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/.github/workflows/publish.yml +0 -0
  20. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/.gitignore +0 -0
  21. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/.pre-commit-config.yaml +0 -0
  22. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/AUTHORS.rst +0 -0
  23. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/CONTRIBUTING.rst +0 -0
  24. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/HISTORY.rst +0 -0
  25. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/LICENSE +0 -0
  26. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/MANIFEST.in +0 -0
  27. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/Makefile +0 -0
  28. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/README.md +0 -0
  29. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/ci/requirements/docs.yaml +0 -0
  30. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/ci/requirements/environment.yaml +0 -0
  31. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/Makefile +0 -0
  32. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/_static/css/grdwindinversion.css +0 -0
  33. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/algorithm.rst +0 -0
  34. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/authors.rst +0 -0
  35. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/conf.py +0 -0
  36. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/contributing.rst +0 -0
  37. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/examples/streaks-display.ipynb +0 -0
  38. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/examples/wind-inversion-from-grd.ipynb +0 -0
  39. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/history.rst +0 -0
  40. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/index.rst +0 -0
  41. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/installation.rst +0 -0
  42. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/make.bat +0 -0
  43. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/modules.rst +0 -0
  44. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/docs/usage.rst +0 -0
  45. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/.github/ISSUE_TEMPLATE.md +0 -0
  46. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/.gitignore +0 -0
  47. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/config_prod.yaml +0 -0
  48. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/config_prod_recal.yaml +0 -0
  49. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/config_prod_recal_streaks_nrcsmod.yaml +0 -0
  50. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/config_prod_streaks.yaml +0 -0
  51. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/config_prod_streaks_nrcsmod.yaml +0 -0
  52. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion/data_config.yaml +0 -0
  53. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion.egg-info/dependency_links.txt +0 -0
  54. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion.egg-info/entry_points.txt +0 -0
  55. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion.egg-info/requires.txt +0 -0
  56. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/grdwindinversion.egg-info/top_level.txt +0 -0
  57. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/pyproject.toml +0 -0
  58. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/recipe/meta.yaml +0 -0
  59. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/requirements_dev.txt +0 -0
  60. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/requirements_doc.txt +0 -0
  61. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/setup.cfg +0 -0
  62. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/tests/__init__.py +0 -0
  63. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/tests/config_test.yaml +0 -0
  64. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/tests/test_grdwindinversion_ci.py +0 -0
  65. {grdwindinversion-0.3.2 → grdwindinversion-0.3.5}/tox.ini +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: grdwindinversion
3
- Version: 0.3.2
3
+ Version: 0.3.5
4
4
  Summary: Package to perform Wind inversion from GRD Level-1 SAR images
5
5
  Author-email: Antoine Grouazel <antoine.grouazel@ifremer.fr>
6
6
  License: MIT
@@ -1,4 +1,4 @@
1
- __all__ = ['inversion']
1
+ __all__ = ["inversion"]
2
2
  # try:
3
3
  # from importlib import metadata
4
4
  # except ImportError: # for Python<3.8
@@ -1,5 +1,4 @@
1
1
  import xsarsea.gradients
2
- import cv2
3
2
  import xarray as xr
4
3
  import xarray as xr
5
4
  from scipy.ndimage import binary_dilation
@@ -7,12 +6,20 @@ import numpy as np
7
6
  import logging
8
7
 
9
8
  import logging
10
- logger = logging.getLogger('grdwindinversion.gradientFeatures')
9
+
10
+ logger = logging.getLogger("grdwindinversion.gradientFeatures")
11
11
  logger.addHandler(logging.NullHandler())
12
12
 
13
13
 
14
14
  class GradientFeatures:
15
- def __init__(self, xr_dataset, xr_dataset_100, windows_sizes, downscales_factors, window_step=1):
15
+ def __init__(
16
+ self,
17
+ xr_dataset,
18
+ xr_dataset_100,
19
+ windows_sizes,
20
+ downscales_factors,
21
+ window_step=1,
22
+ ):
16
23
  """
17
24
  Initialize variables and xsarsea.gradients.Gradients.
18
25
 
@@ -55,15 +62,16 @@ class GradientFeatures:
55
62
  None
56
63
 
57
64
  """
65
+
58
66
  self.gradients = xsarsea.gradients.Gradients(
59
- self.xr_dataset_100['sigma0_detrend'],
67
+ self.xr_dataset_100["sigma0_detrend"],
60
68
  windows_sizes=self.windows_sizes,
61
69
  downscales_factors=self.downscales_factors,
62
- window_step=self.window_step
70
+ window_step=self.window_step,
63
71
  )
64
72
  self.hist = self.gradients.histogram
65
73
  # Get orthogonal gradients
66
- self.hist['angles'] = self.hist['angles'] + np.pi / 2
74
+ self.hist["angles"] = self.hist["angles"] + np.pi / 2
67
75
 
68
76
  def get_heterogeneity_mask(self, config):
69
77
  """
@@ -86,13 +94,25 @@ class GradientFeatures:
86
94
 
87
95
  try:
88
96
 
89
- sigma0_400_co = [da.sigma0 for da in self.gradients.gradients_list if (
90
- da.sigma0["pol"] == config["l2_params"]["copol"] and da.sigma0.downscale_factor == 4)][0]
97
+ sigma0_400_co = [
98
+ da.sigma0
99
+ for da in self.gradients.gradients_list
100
+ if (
101
+ da.sigma0["pol"] == config["l2_params"]["copol"]
102
+ and da.sigma0.downscale_factor == 4
103
+ )
104
+ ][0]
91
105
  sigs = [sigma0_400_co]
92
106
 
93
107
  if dual_pol:
94
- sigma0_800_cross = [da.sigma0 for da in self.gradients.gradients_list if (
95
- da.sigma0["pol"] == config["l2_params"]["crosspol"] and da.sigma0.downscale_factor == 8)][0]
108
+ sigma0_800_cross = [
109
+ da.sigma0
110
+ for da in self.gradients.gradients_list
111
+ if (
112
+ da.sigma0["pol"] == config["l2_params"]["crosspol"]
113
+ and da.sigma0.downscale_factor == 8
114
+ )
115
+ ][0]
96
116
  sigs.append(sigma0_800_cross)
97
117
 
98
118
  filters = {}
@@ -102,21 +122,27 @@ class GradientFeatures:
102
122
  res = 100 * sig.downscale_factor.values
103
123
 
104
124
  # delete useless coords : could be problematic to have it later
105
- if 'downscale_factor' in sig.coords:
125
+ if "downscale_factor" in sig.coords:
106
126
  sig = sig.reset_coords("downscale_factor", drop=True)
107
127
 
108
- if 'window_size' in sig.coords:
128
+ if "window_size" in sig.coords:
109
129
  sig = sig.reset_coords("window_size", drop=True)
110
130
  # mask
111
131
  sig = xr.where(sig <= 0, 1e-15, sig)
112
132
 
113
133
  # map incidence for detrend
114
- incidence = xr.DataArray(data=cv2.resize(
115
- self.xr_dataset_100.incidence.values, sig.shape[::-1], cv2.INTER_NEAREST), dims=sig.dims, coords=sig.coords)
134
+ # incidence = xr.DataArray(data=cv2.resize(
135
+ # self.xr_dataset_100.incidence.values, sig.shape[::-1], cv2.INTER_NEAREST), dims=sig.dims, coords=sig.coords)
136
+
137
+ incidence = self.xr_dataset_100.incidence.interp(
138
+ line=sig.coords["line"],
139
+ sample=sig.coords["sample"],
140
+ method="nearest", # Équivaut à INTER_NEAREST
141
+ )
116
142
 
117
143
  sigma0_detrend = xsarsea.sigma0_detrend(sig, incidence)
118
144
 
119
- filter_name = str(res)+"_"+str(pol)
145
+ filter_name = str(res) + "_" + str(pol)
120
146
  I = sigma0_detrend
121
147
  f1, f2, f3, f4, f = xsarsea.gradients.filtering_parameters(I)
122
148
  filters[filter_name] = f
@@ -129,35 +155,54 @@ class GradientFeatures:
129
155
  for idx_filter, filter in enumerate(filters):
130
156
  # interp to user resolution and map on dataset grid
131
157
  new_dataArrays[filter] = filters[filter].interp(
132
- line=self.xr_dataset.line, sample=self.xr_dataset.sample, method="nearest")
133
- new_dataArrays[filter+"_mask"] = xr.where(
134
- new_dataArrays[filter] > thresholds[idx_filter], True, False)
158
+ line=self.xr_dataset.line,
159
+ sample=self.xr_dataset.sample,
160
+ method="nearest",
161
+ )
162
+ new_dataArrays[filter + "_mask"] = xr.where(
163
+ new_dataArrays[filter] > thresholds[idx_filter], True, False
164
+ )
135
165
 
136
166
  varname_400_copol_mask = f'400_{config["l2_params"]["copol"]}_mask'
137
167
  varname_800_crosspol_mask = f'800_{config["l2_params"]["crosspol"]}_mask'
138
168
 
139
169
  # Cas 0 : no heterogeneity
140
170
  new_dataArrays["heterogeneity_mask"] = xr.full_like(
141
- new_dataArrays[varname_400_copol_mask], 0)
171
+ new_dataArrays[varname_400_copol_mask], 0
172
+ )
142
173
 
143
174
  if dual_pol:
144
175
  # Cas 3 : Dual-polarization
145
176
  new_dataArrays["heterogeneity_mask"] = xr.where(
146
- new_dataArrays[varname_400_copol_mask] & new_dataArrays[varname_800_crosspol_mask], 3, new_dataArrays["heterogeneity_mask"])
177
+ new_dataArrays[varname_400_copol_mask]
178
+ & new_dataArrays[varname_800_crosspol_mask],
179
+ 3,
180
+ new_dataArrays["heterogeneity_mask"],
181
+ )
147
182
 
148
183
  # Cas 1 : Co-polarization only
149
184
  new_dataArrays["heterogeneity_mask"] = xr.where(
150
- new_dataArrays[varname_400_copol_mask] & ~new_dataArrays[varname_800_crosspol_mask], 1, new_dataArrays["heterogeneity_mask"])
185
+ new_dataArrays[varname_400_copol_mask]
186
+ & ~new_dataArrays[varname_800_crosspol_mask],
187
+ 1,
188
+ new_dataArrays["heterogeneity_mask"],
189
+ )
151
190
 
152
191
  # Cas 2 : Cross-polarization only
153
192
  new_dataArrays["heterogeneity_mask"] = xr.where(
154
- ~new_dataArrays[varname_400_copol_mask] & new_dataArrays[varname_800_crosspol_mask], 2, new_dataArrays["heterogeneity_mask"])
193
+ ~new_dataArrays[varname_400_copol_mask]
194
+ & new_dataArrays[varname_800_crosspol_mask],
195
+ 2,
196
+ new_dataArrays["heterogeneity_mask"],
197
+ )
155
198
 
156
199
  # Attributes
157
- new_dataArrays["heterogeneity_mask"].attrs["valid_range"] = np.array([
158
- 0, 3])
159
- new_dataArrays["heterogeneity_mask"].attrs["flag_values"] = np.array([
160
- 0, 1, 2, 3])
200
+ new_dataArrays["heterogeneity_mask"].attrs["valid_range"] = np.array(
201
+ [0, 3]
202
+ )
203
+ new_dataArrays["heterogeneity_mask"].attrs["flag_values"] = np.array(
204
+ [0, 1, 2, 3]
205
+ )
161
206
  new_dataArrays["heterogeneity_mask"].attrs["flag_meanings"] = (
162
207
  "homogeneous_NRCS, heterogeneous_from_co-polarization_NRCS, "
163
208
  "heterogeneous_from_cross-polarization_NRCS, heterogeneous_from_dual-polarization_NRCS"
@@ -165,30 +210,40 @@ class GradientFeatures:
165
210
  else:
166
211
  # no crosspol
167
212
  new_dataArrays["heterogeneity_mask"] = xr.where(
168
- new_dataArrays[varname_400_copol_mask], 1, new_dataArrays["heterogeneity_mask"])
213
+ new_dataArrays[varname_400_copol_mask],
214
+ 1,
215
+ new_dataArrays["heterogeneity_mask"],
216
+ )
169
217
 
170
218
  # Attributs pour le cas single-pol
171
- new_dataArrays["heterogeneity_mask"].attrs["valid_range"] = np.array([
172
- 0, 1])
173
- new_dataArrays["heterogeneity_mask"].attrs["flag_values"] = np.array([
174
- 0, 1])
175
- new_dataArrays["heterogeneity_mask"].attrs["flag_meanings"] = (
176
- "homogeneous_NRCS, heterogeneous_from_co-polarization_NRCS"
219
+ new_dataArrays["heterogeneity_mask"].attrs["valid_range"] = np.array(
220
+ [0, 1]
221
+ )
222
+ new_dataArrays["heterogeneity_mask"].attrs["flag_values"] = np.array(
223
+ [0, 1]
177
224
  )
225
+ new_dataArrays["heterogeneity_mask"].attrs[
226
+ "flag_meanings"
227
+ ] = "homogeneous_NRCS, heterogeneous_from_co-polarization_NRCS"
178
228
 
179
229
  # Attributs généraux
180
- new_dataArrays["heterogeneity_mask"].attrs["long_name"] = "Quality flag taking into account the local heterogeneity"
230
+ new_dataArrays["heterogeneity_mask"].attrs[
231
+ "long_name"
232
+ ] = "Quality flag taking into account the local heterogeneity"
181
233
  return new_dataArrays
182
234
 
183
235
  except Exception as e:
184
236
  logging.error("Error in get_heterogeneity_mask: %s", e)
185
237
 
186
- new_dataArrays["heterogeneity_mask"] = xr.DataArray(data=np.nan * np.ones([len(self.xr_dataset.coords[dim]) for dim in ['line', 'sample']]),
187
- dims=[
188
- 'line', 'sample'],
189
- coords=[self.xr_dataset.coords[dim]
190
- for dim in ['line', 'sample']],
191
- attrs={"comment": "no heterogeneity mask found"})
238
+ new_dataArrays["heterogeneity_mask"] = xr.DataArray(
239
+ data=np.nan
240
+ * np.ones(
241
+ [len(self.xr_dataset.coords[dim]) for dim in ["line", "sample"]]
242
+ ),
243
+ dims=["line", "sample"],
244
+ coords=[self.xr_dataset.coords[dim] for dim in ["line", "sample"]],
245
+ attrs={"comment": "no heterogeneity mask found"},
246
+ )
192
247
 
193
248
  return new_dataArrays
194
249
 
@@ -208,24 +263,23 @@ class GradientFeatures:
208
263
  """
209
264
 
210
265
  # Load ancillary wind in antenna convention
211
- ancillary_wind = self.xr_dataset['ancillary_wind'].interp(
212
- line=streaks.line,
213
- sample=streaks.sample,
214
- method='nearest'
215
- ).compute()
266
+ ancillary_wind = (
267
+ self.xr_dataset["ancillary_wind"]
268
+ .interp(line=streaks.line, sample=streaks.sample, method="nearest")
269
+ .compute()
270
+ )
216
271
 
217
272
  # Convert angles to complex numbers
218
- streaks_c = streaks['weight'] * np.exp(1j * streaks['angle'])
273
+ streaks_c = streaks["weight"] * np.exp(1j * streaks["angle"])
219
274
  # Calculate the difference in angle
220
275
  diff_angle = xr.apply_ufunc(np.angle, ancillary_wind / streaks_c)
221
276
 
222
277
  # Remove ambiguity
223
- streaks_c = xr.where(np.abs(diff_angle) > np.pi /
224
- 2, -streaks_c, streaks_c)
278
+ streaks_c = xr.where(np.abs(diff_angle) > np.pi / 2, -streaks_c, streaks_c)
225
279
 
226
280
  # Update streaks with corrected values
227
- streaks['weight'] = np.abs(streaks_c)
228
- streaks['angle'] = xr.apply_ufunc(np.angle, streaks_c)
281
+ streaks["weight"] = np.abs(streaks_c)
282
+ streaks["angle"] = xr.apply_ufunc(np.angle, streaks_c)
229
283
  return streaks
230
284
 
231
285
  def convert_to_meteo_convention(self, streaks):
@@ -243,15 +297,16 @@ class GradientFeatures:
243
297
  Dataset containing the streaks with wind direction in meteorological convention.
244
298
 
245
299
  """
246
- streaks_meteo = self.xr_dataset[['longitude', 'latitude', 'ground_heading', 'ancillary_wind']].interp(
247
- line=streaks.line,
248
- sample=streaks.sample,
249
- method='nearest')
300
+ streaks_meteo = self.xr_dataset[
301
+ ["longitude", "latitude", "ground_heading", "ancillary_wind"]
302
+ ].interp(line=streaks.line, sample=streaks.sample, method="nearest")
250
303
 
251
- streaks_meteo['angle'] = xsarsea.dir_sample_to_meteo(
252
- np.rad2deg(streaks['angle']), streaks_meteo['ground_heading'])
253
- streaks_meteo['angle'].attrs[
254
- 'winddir_convention'] = "Wind direction in meteorological convention (clockwise, from), ex: 0°=from north, 90°=from east"
304
+ streaks_meteo["angle"] = xsarsea.dir_sample_to_meteo(
305
+ np.rad2deg(streaks["angle"]), streaks_meteo["ground_heading"]
306
+ )
307
+ streaks_meteo["angle"].attrs[
308
+ "winddir_convention"
309
+ ] = "Wind direction in meteorological convention (clockwise, from), ex: 0°=from north, 90°=from east"
255
310
 
256
311
  return streaks_meteo
257
312
 
@@ -266,58 +321,62 @@ class GradientFeatures:
266
321
  Returns
267
322
  -------
268
323
  xarray.DataArray
269
- DataArray containing the streaks.
324
+ DataArray containing the streaks.
270
325
  """
271
326
 
272
327
  try:
273
328
  hist_smooth = self.hist.copy()
274
- hist_smooth['weight'] = xsarsea.gradients.circ_smooth(
275
- hist_smooth['weight'])
329
+ hist_smooth["weight"] = xsarsea.gradients.circ_smooth(hist_smooth["weight"])
276
330
 
277
331
  # Compute the mean across 'downscale_factor', 'window_size', and 'pol'
278
332
  hist_smooth_mean = hist_smooth.mean(
279
- ['downscale_factor', 'window_size', 'pol'])
333
+ ["downscale_factor", "window_size", "pol"]
334
+ )
280
335
 
281
336
  # Select histogram peak
282
- iangle_smooth_mean = hist_smooth_mean['weight'].fillna(
283
- 0).argmax(dim='angles')
284
- streaks_dir_smooth_mean = hist_smooth_mean['angles'].isel(
285
- angles=iangle_smooth_mean)
286
- streaks_weight_smooth_mean = hist_smooth_mean['weight'].isel(
287
- angles=iangle_smooth_mean)
337
+ iangle_smooth_mean = (
338
+ hist_smooth_mean["weight"].fillna(0).argmax(dim="angles")
339
+ )
340
+ streaks_dir_smooth_mean = hist_smooth_mean["angles"].isel(
341
+ angles=iangle_smooth_mean
342
+ )
343
+ streaks_weight_smooth_mean = hist_smooth_mean["weight"].isel(
344
+ angles=iangle_smooth_mean
345
+ )
288
346
 
289
347
  # Combine angles and weights into a dataset
290
- streaks_smooth_mean = xr.Dataset({
291
- 'angle': streaks_dir_smooth_mean,
292
- 'weight': streaks_weight_smooth_mean
293
- })
348
+ streaks_smooth_mean = xr.Dataset(
349
+ {"angle": streaks_dir_smooth_mean, "weight": streaks_weight_smooth_mean}
350
+ )
294
351
 
295
352
  # Remove 'angles' coordinate
296
- streaks_smooth_mean = streaks_smooth_mean.reset_coords(
297
- 'angles', drop=True)
353
+ streaks_smooth_mean = streaks_smooth_mean.reset_coords("angles", drop=True)
298
354
 
299
355
  # Remove ambiguity with ancillary wind
300
- streaks_smooth_mean = self._remove_ambiguity(
301
- streaks_smooth_mean)
356
+ streaks_smooth_mean = self._remove_ambiguity(streaks_smooth_mean)
302
357
 
303
358
  # Convert to meteo convention
304
- streaks_smooth_mean = self.convert_to_meteo_convention(
305
- streaks_smooth_mean)
359
+ streaks_smooth_mean = self.convert_to_meteo_convention(streaks_smooth_mean)
306
360
 
307
361
  # Set attributes
308
- streaks_smooth_mean['angle'].attrs['description'] = 'Wind direction estimated from local gradient; histograms smoothed first, then mean computed'
362
+ streaks_smooth_mean["angle"].attrs[
363
+ "description"
364
+ ] = "Wind direction estimated from local gradient; histograms smoothed first, then mean computed"
309
365
 
310
366
  return streaks_smooth_mean
311
367
 
312
368
  except Exception as e:
313
369
  logging.error("Error in streaks_smooth_mean: %s", e)
314
370
 
315
- streaks_dir_smooth_mean_interp = xr.DataArray(data=np.nan * np.ones([len(self.xr_dataset.coords[dim]) for dim in ['line', 'sample']]),
316
- dims=[
317
- 'line', 'sample'],
318
- coords=[self.xr_dataset.coords[dim]
319
- for dim in ['line', 'sample']],
320
- attrs={"comment": "no streaks_smooth_mean found"})
371
+ streaks_dir_smooth_mean_interp = xr.DataArray(
372
+ data=np.nan
373
+ * np.ones(
374
+ [len(self.xr_dataset.coords[dim]) for dim in ["line", "sample"]]
375
+ ),
376
+ dims=["line", "sample"],
377
+ coords=[self.xr_dataset.coords[dim] for dim in ["line", "sample"]],
378
+ attrs={"comment": "no streaks_smooth_mean found"},
379
+ )
321
380
 
322
381
  return streaks_dir_smooth_mean_interp
323
382
 
@@ -332,58 +391,64 @@ class GradientFeatures:
332
391
  Returns
333
392
  -------
334
393
  xarray.DataArray
335
- DataArray containing the streaks.
394
+ DataArray containing the streaks.
336
395
  """
337
396
  try:
338
397
  # Compute the mean of the histograms
339
398
  hist_mean = self.hist.copy().mean(
340
- ['downscale_factor', 'window_size', 'pol'])
399
+ ["downscale_factor", "window_size", "pol"]
400
+ )
341
401
 
342
402
  # Smooth the mean histogram
343
403
  hist_mean_smooth = hist_mean.copy()
344
- hist_mean_smooth['weight'] = xsarsea.gradients.circ_smooth(
345
- hist_mean['weight'])
404
+ hist_mean_smooth["weight"] = xsarsea.gradients.circ_smooth(
405
+ hist_mean["weight"]
406
+ )
346
407
 
347
408
  # Select histogram peak
348
- iangle_mean_smooth = hist_mean_smooth['weight'].fillna(
349
- 0).argmax(dim='angles')
350
- streaks_dir_mean_smooth = hist_mean_smooth['angles'].isel(
351
- angles=iangle_mean_smooth)
352
- streaks_weight_mean_smooth = hist_mean_smooth['weight'].isel(
353
- angles=iangle_mean_smooth)
409
+ iangle_mean_smooth = (
410
+ hist_mean_smooth["weight"].fillna(0).argmax(dim="angles")
411
+ )
412
+ streaks_dir_mean_smooth = hist_mean_smooth["angles"].isel(
413
+ angles=iangle_mean_smooth
414
+ )
415
+ streaks_weight_mean_smooth = hist_mean_smooth["weight"].isel(
416
+ angles=iangle_mean_smooth
417
+ )
354
418
 
355
419
  # Combine angles and weights into a dataset
356
- streaks_mean_smooth = xr.Dataset({
357
- 'angle': streaks_dir_mean_smooth,
358
- 'weight': streaks_weight_mean_smooth
359
- })
420
+ streaks_mean_smooth = xr.Dataset(
421
+ {"angle": streaks_dir_mean_smooth, "weight": streaks_weight_mean_smooth}
422
+ )
360
423
 
361
424
  # Remove 'angles' coordinate
362
- streaks_mean_smooth = streaks_mean_smooth.reset_coords(
363
- 'angles', drop=True)
425
+ streaks_mean_smooth = streaks_mean_smooth.reset_coords("angles", drop=True)
364
426
 
365
427
  # Remove ambiguity with ancillary wind
366
- streaks_mean_smooth = self._remove_ambiguity(
367
- streaks_mean_smooth)
428
+ streaks_mean_smooth = self._remove_ambiguity(streaks_mean_smooth)
368
429
 
369
430
  # Convert to meteo convention
370
- streaks_mean_smooth = self.convert_to_meteo_convention(
371
- streaks_mean_smooth)
431
+ streaks_mean_smooth = self.convert_to_meteo_convention(streaks_mean_smooth)
372
432
 
373
433
  # Set attributes
374
- streaks_mean_smooth['angle'].attrs['description'] = 'Wind direction estimated from local gradient; histograms mean first, then smooth computed'
434
+ streaks_mean_smooth["angle"].attrs[
435
+ "description"
436
+ ] = "Wind direction estimated from local gradient; histograms mean first, then smooth computed"
375
437
 
376
438
  return streaks_mean_smooth
377
439
 
378
440
  except Exception as e:
379
441
  logging.error("Error in streaks_mean_smooth: %s", e)
380
442
 
381
- streaks_mean_smooth = xr.DataArray(data=np.nan * np.ones([len(self.xr_dataset.coords[dim]) for dim in ['line', 'sample']]),
382
- dims=[
383
- 'line', 'sample'],
384
- coords=[self.xr_dataset.coords[dim]
385
- for dim in ['line', 'sample']],
386
- attrs={"comment": "no streaks_mean_smooth found"})
443
+ streaks_mean_smooth = xr.DataArray(
444
+ data=np.nan
445
+ * np.ones(
446
+ [len(self.xr_dataset.coords[dim]) for dim in ["line", "sample"]]
447
+ ),
448
+ dims=["line", "sample"],
449
+ coords=[self.xr_dataset.coords[dim] for dim in ["line", "sample"]],
450
+ attrs={"comment": "no streaks_mean_smooth found"},
451
+ )
387
452
 
388
453
  return streaks_mean_smooth
389
454
 
@@ -398,51 +463,52 @@ class GradientFeatures:
398
463
  Returns
399
464
  -------
400
465
  xarray.DataArray
401
- DataArray containing the individual streaks for each window_size, downscale_factor, polarisation (no combination).
466
+ DataArray containing the individual streaks for each window_size, downscale_factor, polarisation (no combination).
402
467
  """
403
468
  try:
404
469
  # Compute the mean of the histograms
405
470
  hist_smooth = self.hist.copy()
406
- hist_smooth['weight'] = xsarsea.gradients.circ_smooth(
407
- hist_smooth['weight'])
471
+ hist_smooth["weight"] = xsarsea.gradients.circ_smooth(hist_smooth["weight"])
408
472
 
409
473
  # Select histogram peak for each individual solution
410
- iangle_individual = hist_smooth['weight'].fillna(
411
- 0).argmax(dim='angles')
412
- streaks_dir_individual = hist_smooth['angles'].isel(
413
- angles=iangle_individual)
414
- streaks_weight_individual = hist_smooth['weight'].isel(
415
- angles=iangle_individual)
474
+ iangle_individual = hist_smooth["weight"].fillna(0).argmax(dim="angles")
475
+ streaks_dir_individual = hist_smooth["angles"].isel(
476
+ angles=iangle_individual
477
+ )
478
+ streaks_weight_individual = hist_smooth["weight"].isel(
479
+ angles=iangle_individual
480
+ )
416
481
  # Combine angles and weights into a dataset
417
- streaks_individual = xr.Dataset({
418
- 'angle': streaks_dir_individual,
419
- 'weight': streaks_weight_individual
420
- })
482
+ streaks_individual = xr.Dataset(
483
+ {"angle": streaks_dir_individual, "weight": streaks_weight_individual}
484
+ )
421
485
  # Remove 'angles' coordinate
422
- streaks_individual = streaks_individual.reset_coords(
423
- 'angles', drop=True)
486
+ streaks_individual = streaks_individual.reset_coords("angles", drop=True)
424
487
 
425
488
  # Remove ambiguity with ancillary wind for each individual solution
426
- streaks_individual = self._remove_ambiguity(
427
- streaks_individual)
489
+ streaks_individual = self._remove_ambiguity(streaks_individual)
428
490
 
429
491
  # Convert to meteo convention
430
- streaks_individual = self.convert_to_meteo_convention(
431
- streaks_individual)
492
+ streaks_individual = self.convert_to_meteo_convention(streaks_individual)
432
493
 
433
494
  # Set attributes
434
- streaks_individual['angle'].attrs['description'] = 'Wind direction estimated from local gradient for each individual solution; histograms smoothed individually'
495
+ streaks_individual["angle"].attrs[
496
+ "description"
497
+ ] = "Wind direction estimated from local gradient for each individual solution; histograms smoothed individually"
435
498
 
436
499
  return streaks_individual
437
500
 
438
501
  except Exception as e:
439
502
  logging.error("Error in streaks_individual: %s", e)
440
503
 
441
- streaks_individual = xr.DataArray(data=np.nan * np.ones([len(self.xr_dataset.coords[dim]) for dim in ['line', 'sample']]),
442
- dims=[
443
- 'line', 'sample'],
444
- coords=[self.xr_dataset.coords[dim]
445
- for dim in ['line', 'sample']],
446
- attrs={"comment": "no streaks_individual found"})
504
+ streaks_individual = xr.DataArray(
505
+ data=np.nan
506
+ * np.ones(
507
+ [len(self.xr_dataset.coords[dim]) for dim in ["line", "sample"]]
508
+ ),
509
+ dims=["line", "sample"],
510
+ coords=[self.xr_dataset.coords[dim] for dim in ["line", "sample"]],
511
+ attrs={"comment": "no streaks_individual found"},
512
+ )
447
513
 
448
514
  return streaks_individual