grdwindinversion 0.3.9__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,12 +13,11 @@ import datetime
13
13
  import yaml
14
14
  from scipy.ndimage import binary_dilation
15
15
  import re
16
- import string
17
- from grdwindinversion.utils import check_incidence_range, get_pol_ratio_name, timing, convert_polarization_name
18
- from grdwindinversion.load_config import getConf
19
- import logging
20
16
  import os
17
+ import logging
21
18
 
19
+
20
+ from grdwindinversion.utils import check_incidence_range, get_pol_ratio_name, timing, convert_polarization_name
22
21
  os.environ["OMP_NUM_THREADS"] = "1"
23
22
  os.environ["OPENBLAS_NUM_THREADS"] = "1"
24
23
  os.environ["MKL_NUM_THREADS"] = "1"
@@ -31,9 +30,22 @@ except:
31
30
  cv2.setNumThreads(1)
32
31
 
33
32
 
34
- # optional debug messages
35
- logger = logging.getLogger('grdwindinversion.inversion')
36
- logger.addHandler(logging.NullHandler())
33
+ root_logger = logging.getLogger("grdwindinversion.inversion")
34
+
35
+ # Sensor metadata registry
36
+ SENSOR_METADATA = {
37
+ "S1A": ("S1A", "SENTINEL-1 A", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
38
+ "S1B": ("S1B", "SENTINEL-1 B", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
39
+ "S1C": ("S1C", "SENTINEL-1 C", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
40
+ "S1D": ("S1D", "SENTINEL-1 D", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
41
+ "RS2": ("RS2", "RADARSAT-2", xsar.RadarSat2Meta, xsar.RadarSat2Dataset),
42
+ "RCM1": ("RCM", "RADARSAT Constellation 1", xsar.RcmMeta, xsar.RcmDataset),
43
+ "RCM2": ("RCM", "RADARSAT Constellation 2", xsar.RcmMeta, xsar.RcmDataset),
44
+ "RCM3": ("RCM", "RADARSAT Constellation 3", xsar.RcmMeta, xsar.RcmDataset),
45
+ }
46
+
47
+ # Mask naming convention used by xsar
48
+ XSAR_MASK_SUFFIX = "_mask"
37
49
 
38
50
 
39
51
  def getSensorMetaDataset(filename):
@@ -50,27 +62,14 @@ def getSensorMetaDataset(filename):
50
62
  tuple
51
63
  sensor name, sensor long name, meta function, dataset function
52
64
  """
53
- if "S1A" in filename:
54
- return "S1A", "SENTINEL-1 A", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
55
- elif "S1B" in filename:
56
- return "S1B", "SENTINEL-1 B", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
57
- elif "S1C" in filename:
58
- return "S1C", "SENTINEL-1 C", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
59
- elif "S1D" in filename:
60
- return "S1D", "SENTINEL-1 D", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
61
- elif "RS2" in filename:
62
- return "RS2", "RADARSAT-2", xsar.RadarSat2Meta, xsar.RadarSat2Dataset
63
- elif "RCM1" in filename:
64
- return "RCM", "RADARSAT Constellation 1", xsar.RcmMeta, xsar.RcmDataset
65
- elif "RCM2" in filename:
66
- return "RCM", "RADARSAT Constellation 2", xsar.RcmMeta, xsar.RcmDataset
67
- elif "RCM3" in filename:
68
- return "RCM", "RADARSAT Constellation 3", xsar.RcmMeta, xsar.RcmDataset
65
+ for sensor_key, sensor_info in SENSOR_METADATA.items():
66
+ if sensor_key in filename:
67
+ return sensor_info
69
68
 
70
- else:
71
- raise ValueError(
72
- "must be S1A|S1B|S1C|S1D|RS2|RCM1|RCM2|RCM3, got filename %s" % filename
73
- )
69
+ supported_sensors = "|".join(SENSOR_METADATA.keys())
70
+ raise ValueError(
71
+ f"must be {supported_sensors}, got filename {filename}"
72
+ )
74
73
 
75
74
 
76
75
  def getOutputName(
@@ -98,20 +97,16 @@ def getOutputName(
98
97
  output filename
99
98
  """
100
99
  basename = os.path.basename(input_file)
101
- basename_match = basename
102
100
 
103
- if sensor == "S1A" or sensor == "S1B" or sensor == "S1C" or sensor == "S1D":
101
+ if sensor in ["S1A", "S1B", "S1C", "S1D"]:
102
+ # Example: S1A_IW_GRDH_1SDV_20210909T130650_20210909T130715_039605_04AE83_C34F.SAFE
104
103
  regex = re.compile(
105
- "(...)_(..)_(...)(.)_(.)(.)(..)_(........T......)_(........T......)_(......)_(......)_(....).SAFE"
106
- )
107
- template = string.Template(
108
- "${MISSIONID}_${SWATH}_${PRODUCT}${RESOLUTION}_${LEVEL}${CLASS}${POLARIZATION}_${STARTDATE}_${STOPDATE}_${ORBIT}_${TAKEID}_${PRODID}.SAFE"
104
+ r"(...)_(..)_(...)(.)_(.)(.)(..)_(........T......)_(........T......)_(......)_(......)_(....).SAFE"
109
105
  )
110
- # S1A_IW_GRDH_1SDV_20210909T130650_20210909T130715_039605_04AE83_C34F
111
- match = regex.match(basename_match)
106
+ match = regex.match(basename)
112
107
  if not match:
113
108
  raise AttributeError(
114
- f"S1 file {basename_match} does not match the expected pattern"
109
+ f"S1 file {basename} does not match the expected pattern"
115
110
  )
116
111
 
117
112
  (
@@ -128,37 +123,33 @@ def getOutputName(
128
123
  TAKEID,
129
124
  PRODID,
130
125
  ) = match.groups()
131
- # last two terms of polarization are removed
132
126
  new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{POLARIZATION.lower()}-{STARTDATE.lower()}-{STOPDATE.lower()}-{ORBIT}-{TAKEID}.nc"
127
+
133
128
  elif sensor == "RS2":
129
+ # Example: RS2_OK141302_PK1242223_DK1208537_SCWA_20220904_093402_VV_VH_SGF
134
130
  regex = re.compile(
135
- "(RS2)_OK([0-9]+)_PK([0-9]+)_DK([0-9]+)_(....)_(........)_(......)_(.._?.?.?)_(S.F)"
136
- )
137
- # RS2_OK141302_PK1242223_DK1208537_SCWA_20220904_093402_VV_VH_SGF
138
- template = string.Template(
139
- "${MISSIONID}_OK${DATA1}_PK${DATA2}_DK${DATA3}_${SWATH}_${DATE}_${TIME}_${POLARIZATION}_${LAST}"
131
+ r"(RS2)_OK([0-9]+)_PK([0-9]+)_DK([0-9]+)_(....)_(........)_(......)_(.._?.?.?)_(S.F)"
140
132
  )
141
- match = regex.match(basename_match)
133
+ match = regex.match(basename)
142
134
  if not match:
143
135
  raise AttributeError(
144
- f"RC2 file {basename_match} does not match the expected pattern"
136
+ f"RS2 file {basename} does not match the expected pattern"
145
137
  )
146
138
 
147
139
  MISSIONID, DATA1, DATA2, DATA3, SWATH, DATE, TIME, POLARIZATION, LAST = (
148
140
  match.groups()
149
141
  )
150
142
  new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{convert_polarization_name(POLARIZATION)}-{meta_start_date.lower()}-{meta_stop_date.lower()}-xxxxx-xxxxx.nc"
151
- elif sensor == "RCM":
152
143
 
144
+ elif sensor == "RCM":
145
+ # Example: RCM1_OK2767220_PK2769320_1_SCLND_20230930_214014_VV_VH_GRD
153
146
  regex = re.compile(
154
147
  r"(RCM[0-9])_OK([0-9]+)_PK([0-9]+)_([0-9]+)_([A-Z0-9]+)_(\d{8})_(\d{6})_([A-Z]{2}(?:_[A-Z]{2})?)_([A-Z]+)$"
155
148
  )
156
- # RCM1_OK2767220_PK2769320_1_SCLND_20230930_214014_VV_VH_GRD
157
-
158
- match = regex.match(basename_match)
149
+ match = regex.match(basename)
159
150
  if not match:
160
151
  raise AttributeError(
161
- f"RCM file {basename_match} does not match the expected pattern"
152
+ f"RCM file {basename} does not match the expected pattern"
162
153
  )
163
154
 
164
155
  MISSIONID, DATA1, DATA2, DATA3, SWATH, DATE, TIME, POLARIZATION, PRODUCT = (
@@ -168,7 +159,8 @@ def getOutputName(
168
159
 
169
160
  else:
170
161
  raise ValueError(
171
- "sensor must be S1A|S1B|S1C|RS2|RCM, got sensor %s" % sensor)
162
+ f"sensor must be S1A|S1B|S1C|S1D|RS2|RCM, got sensor {sensor}"
163
+ )
172
164
 
173
165
  if subdir:
174
166
  out_file = os.path.join(outdir, basename, new_format)
@@ -177,115 +169,327 @@ def getOutputName(
177
169
  return out_file
178
170
 
179
171
 
180
- def getAncillary(meta, ancillary_name="ecmwf"):
172
+ def addMasks_toMeta(meta: xsar.BaseMeta, conf: dict) -> dict:
181
173
  """
182
- Map ancillary wind from ECMWF or ERA5.
183
- This function is used to check if the model files are available and to map the model to the SAR data.
174
+ Add high-resolution masks (land, ice, lakes, etc.) from shapefiles to meta object.
175
+
176
+ Configuration format:
177
+ masks:
178
+ land:
179
+ - name: 'gshhsH'
180
+ path: '/path/to/mask.shp'
181
+ - name: 'custom_land'
182
+ path: '/path/to/custom.shp'
183
+ ice:
184
+ - name: 'ice_mask'
185
+ path: '/path/to/ice.shp'
186
+
187
+ Note: xsar will automatically add '_mask' suffix to the variable names in the dataset.
188
+ For example, 'gshhsH' becomes 'gshhsH_mask' in the xarray dataset.
184
189
 
185
190
  Parameters
186
191
  ----------
187
- meta: obj `xsar.BaseMeta` (one of the supported SAR mission)
192
+ meta : xsar.BaseMeta
193
+ Metadata object to add mask features to. Must have a set_mask_feature method.
194
+ conf : dict
195
+ Configuration dictionary containing masks definition
188
196
 
189
197
  Returns
190
198
  -------
191
199
  dict
192
- map model to SAR data
200
+ Dictionary with mask categories as keys and lists of mask names as values.
201
+ Names are returned WITHOUT the '_mask' suffix that xsar adds internally.
202
+ Example: {'land': ['gshhsH', 'custom_land'], 'ice': ['ice_mask']}
203
+
204
+ Raises
205
+ ------
206
+ AttributeError
207
+ If meta object doesn't have set_mask_feature method
193
208
  """
194
-
195
- if ancillary_name == "ecmwf":
196
- logging.debug("conf: %s", getConf())
197
- ec01 = getConf()["ecmwf_0100_1h"]
198
- ec0125 = getConf()["ecmwf_0125_1h"]
199
- logging.debug("ec01 : %s", ec01)
200
- meta.set_raster("ecmwf_0100_1h", ec01)
201
- meta.set_raster("ecmwf_0125_1h", ec0125)
202
-
203
- map_model = None
204
- # only keep best ecmwf (FIXME: it's hacky, and xsar should provide a better method to handle this)
205
- for ecmwf_name in ["ecmwf_0125_1h", "ecmwf_0100_1h"]:
206
- ecmwf_infos = meta.rasters.loc[ecmwf_name]
207
- try:
208
- ecmwf_file = ecmwf_infos["get_function"](
209
- ecmwf_infos["resource"],
210
- date=datetime.datetime.strptime(
211
- meta.start_date, "%Y-%m-%d %H:%M:%S.%f"
212
- ),
213
- )[1]
214
- # temporary for RCM issue https://github.com/umr-lops/xarray-safe-rcm/issues/34
215
- except Exception as e:
216
- ecmwf_file = ecmwf_infos["get_function"](
217
- ecmwf_infos["resource"],
218
- date=datetime.datetime.strptime(
219
- meta.start_date, "%Y-%m-%d %H:%M:%S"
220
- ),
221
- )[1]
222
- if not os.path.isfile(ecmwf_file):
223
- # temporary
224
- # if repro does not exist we look at not repro folder (only one will exist after)
225
- """
226
- if ecmwf_name == "ecmwf_0100_1h":
227
- ecmwf_infos['resource'] = ecmwf_infos['resource'].replace(
228
- "netcdf_light_REPRO_tree", "netcdf_light")
229
- try:
230
- ecmwf_file = ecmwf_infos['get_function'](ecmwf_infos['resource'],
231
- date=datetime.datetime.strptime(meta.start_date,
232
- '%Y-%m-%d %H:%M:%S.%f'))[1]
233
- except Exception as e:
234
- ecmwf_file = ecmwf_infos['get_function'](ecmwf_infos['resource'],
235
- date=datetime.datetime.strptime(meta.start_date,
236
- '%Y-%m-%d %H:%M:%S'))[1]
237
-
238
- if not os.path.isfile(ecmwf_file):
239
- meta.rasters = meta.rasters.drop([ecmwf_name])
209
+ # Validate meta object has required method
210
+ if not hasattr(meta, 'set_mask_feature'):
211
+ raise AttributeError(
212
+ f"Meta object of type {type(meta).__name__} must have a 'set_mask_feature' method")
213
+
214
+ masks_by_category = {}
215
+
216
+ # Check for 'masks' key
217
+ if "masks" in conf and isinstance(conf["masks"], dict):
218
+ logging.debug("Found 'masks' configuration")
219
+
220
+ for category, mask_list in conf["masks"].items():
221
+ if isinstance(mask_list, list):
222
+ masks_by_category[category] = []
223
+ for mask_item in mask_list:
224
+ if isinstance(mask_item, dict) and "path" in mask_item and "name" in mask_item:
225
+ mask_name = mask_item["name"]
226
+ mask_path = mask_item["path"]
227
+ try:
228
+ logging.debug("%s path: %s", mask_name, mask_path)
229
+ meta.set_mask_feature(mask_name, mask_path)
230
+ logging.info(
231
+ "Mask feature '%s' set from %s", mask_name, mask_path)
232
+ masks_by_category[category].append(mask_name)
233
+ except (IOError, OSError, FileNotFoundError) as e:
234
+ logging.error(
235
+ "Failed to load mask file '%s' from path '%s': %s",
236
+ mask_name, mask_path, str(e))
237
+ logging.debug("%s", traceback.format_exc())
238
+ except (ValueError, RuntimeError) as e:
239
+ logging.error(
240
+ "Failed to process mask '%s': %s", mask_name, str(e))
241
+ logging.debug("%s", traceback.format_exc())
240
242
  else:
241
- map_model = {'%s_%s' % (ecmwf_name, uv): 'model_%s' % uv for uv in [
242
- 'U10', 'V10']}
243
-
244
- else:
245
- """
246
- meta.rasters = meta.rasters.drop([ecmwf_name])
243
+ logging.warning(
244
+ "Invalid mask configuration in category '%s': missing 'name' or 'path' field",
245
+ category)
247
246
  else:
248
- map_model = {
249
- "%s_%s" % (ecmwf_name, uv): "model_%s" % uv for uv in ["U10", "V10"]
250
- }
247
+ logging.warning(
248
+ "Mask category '%s' should contain a list, got %s",
249
+ category, type(mask_list).__name__
250
+ )
251
251
 
252
- return map_model
252
+ return masks_by_category
253
253
 
254
- elif ancillary_name == "era5":
255
- era5_name = "era5_0250_1h"
256
- logging.debug("conf: %s", getConf())
257
- era0250 = getConf()[era5_name]
258
- logging.debug("%s : %s", (era5_name, era0250))
259
- meta.set_raster(era5_name, era0250)
260
254
 
261
- era5_infos = meta.rasters.loc[era5_name]
262
- try:
263
- era5_file = era5_infos["get_function"](
264
- era5_infos["resource"],
265
- date=datetime.datetime.strptime(
266
- meta.start_date, "%Y-%m-%d %H:%M:%S.%f"
267
- ),
268
- )[1]
269
- except Exception as e:
270
- era5_file = era5_infos["get_function"](
271
- era5_infos["resource"],
272
- date=datetime.datetime.strptime(
273
- meta.start_date, "%Y-%m-%d %H:%M:%S"),
274
- )[1]
275
- if not os.path.isfile(era5_file):
276
- raise ValueError(f"era5 file {era5_file} not found")
277
-
278
- map_model = {
279
- "%s_%s" % (era5_name, uv): "model_%s" % uv for uv in ["U10", "V10"]
280
- }
281
- return map_model
255
+ def mergeLandMasks(xr_dataset: xr.Dataset, land_mask_names: list) -> xr.Dataset:
256
+ """
257
+ Merge multiple land masks into the main land_mask variable.
258
+
259
+ This function takes all individual land masks added via addMasks_toMeta() and combines
260
+ them using a logical OR operation to create a unified land mask that covers
261
+ all land areas from all sources.
262
+
263
+ Parameters
264
+ ----------
265
+ xr_dataset : xr.Dataset
266
+ Dataset containing individual land mask variables. Must contain a 'land_mask' variable.
267
+ land_mask_names : list of str
268
+ Names of the land mask variables to merge (WITHOUT the '_mask' suffix).
269
+ For example: ['gshhsH', 'custom_land'].
270
+ These names will have XSAR_MASK_SUFFIX automatically appended to match
271
+ the variable names in the dataset.
272
+
273
+ Returns
274
+ -------
275
+ xr.Dataset
276
+ The input dataset with its land_mask variable updated by merging all specified masks.
277
+ Note: The dataset is modified in place AND returned for convenience.
278
+
279
+ Raises
280
+ ------
281
+ ValueError
282
+ If 'land_mask' variable is not present in the dataset
283
+ """
284
+ # Validate that land_mask exists in the dataset
285
+ if "land_mask" not in xr_dataset:
286
+ raise ValueError(
287
+ "Dataset must contain a 'land_mask' variable. "
288
+ f"Available variables: {list(xr_dataset.data_vars.keys())}")
282
289
 
290
+ if not land_mask_names:
291
+ logging.debug("No additional land masks to merge")
292
+ return xr_dataset
293
+
294
+ logging.info("Merging %d land masks: %s", len(
295
+ land_mask_names), land_mask_names)
296
+
297
+ # Start with the default land_mask from xsar
298
+ merged_mask = xr_dataset["land_mask"].values.astype("uint8")
299
+
300
+ # Merge all configured land masks
301
+ for mask_name in land_mask_names:
302
+ # xsar adds XSAR_MASK_SUFFIX to mask names in the dataset
303
+ dataset_mask_name = f"{mask_name}{XSAR_MASK_SUFFIX}"
304
+
305
+ if dataset_mask_name in xr_dataset:
306
+ logging.info("Merging mask '%s' into land_mask", dataset_mask_name)
307
+ mask_values = xr_dataset[dataset_mask_name].values.astype("uint8")
308
+ # Logical OR: any pixel marked as land (1) in any mask becomes land
309
+ merged_mask = np.maximum(merged_mask, mask_values)
310
+ else:
311
+ logging.warning(
312
+ "Mask '%s' not found in dataset, skipping", dataset_mask_name)
313
+
314
+ # Update the main land_mask
315
+ xr_dataset.land_mask.values = merged_mask
316
+ logging.info("Land masks merged")
317
+
318
+ return xr_dataset
319
+
320
+
321
+ def processLandMask(xr_dataset, dilation_iterations=3, merged_masks=None):
322
+ """
323
+ Process land mask to create a 3-level mask system with coastal zone detection.
324
+
325
+ This function:
326
+ 1. Takes the original land_mask (merged from all configured sources)
327
+ 2. Applies binary dilation to detect coastal zones
328
+ 3. Creates a 3-level land_mask:
329
+ - 0 = ocean (water far from coast)
330
+ - 1 = coastal (zone between original mask and dilated mask)
331
+ - 2 = land (original land mask)
332
+
333
+ Parameters
334
+ ----------
335
+ xr_dataset : xarray.Dataset
336
+ Dataset containing the land_mask variable
337
+ dilation_iterations : int, optional
338
+ Number of dilation iterations to define coastal zone width (default: 3)
339
+ merged_masks : list of str, optional
340
+ Names of masks that were merged into land_mask (for history tracking)
341
+
342
+ Returns
343
+ -------
344
+ None
345
+ Modifies xr_dataset.land_mask in place
346
+ """
347
+ logging.info("Processing land mask and adding a coastal zone")
348
+
349
+ # Store original land mask (2 = land)
350
+ original_land_mask = xr_dataset["land_mask"].values.astype("uint8")
351
+
352
+ # Apply dilation to create coastal zone
353
+ dilated_mask = binary_dilation(
354
+ original_land_mask,
355
+ structure=np.ones((3, 3), np.uint8),
356
+ iterations=dilation_iterations,
357
+ )
358
+
359
+ # Create 3-level mask
360
+ # Start with all zeros (ocean)
361
+ three_level_mask = np.zeros_like(original_land_mask, dtype="uint8")
362
+
363
+ # Mark land areas (2)
364
+ three_level_mask[original_land_mask == 1] = 2
365
+
366
+ # Mark coastal areas (1) - dilated area minus original land
367
+ coastal_zone = (dilated_mask == 1) & (original_land_mask == 0)
368
+ three_level_mask[coastal_zone] = 1
369
+
370
+ # Update the land_mask with 3-level system
371
+ xr_dataset.land_mask.values = three_level_mask
372
+
373
+ # Update attributes
374
+ xr_dataset.land_mask.attrs["long_name"] = "Land mask with coastal zone"
375
+ xr_dataset.land_mask.attrs["valid_range"] = np.array([0, 2])
376
+ xr_dataset.land_mask.attrs["flag_values"] = np.array([0, 1, 2])
377
+ xr_dataset.land_mask.attrs["flag_meanings"] = "ocean coastal land"
378
+ xr_dataset.land_mask.attrs["meaning"] = "0: ocean, 1: coastal, 2: land"
379
+
380
+ # Append to history instead of replacing
381
+ existing_history = xr_dataset.land_mask.attrs.get("history", "")
382
+
383
+ # Build history message
384
+ if merged_masks:
385
+ merge_info = f"merged with {', '.join(merged_masks)}"
283
386
  else:
387
+ merge_info = ""
388
+
389
+ new_history = f"{merge_info}3-level land mask with coastal zone detection via binary dilation"
390
+
391
+ if existing_history:
392
+ xr_dataset.land_mask.attrs["history"] = existing_history + \
393
+ "; " + new_history
394
+ else:
395
+ xr_dataset.land_mask.attrs["history"] = new_history
396
+
397
+
398
+ def getAncillary(meta, ancillary_name, conf):
399
+ """
400
+ Map ancillary wind from "ecmwf" or "era5" or other sources.
401
+ This function is used to check if the model files are available and to map the model to the SAR data.
402
+ This function will use with priority the first model of the config file.
403
+
404
+ Parameters
405
+ ----------
406
+ meta: obj `xsar.BaseMeta` (one of the supported SAR mission)
407
+ ancillary_name: str
408
+ Name of the ancillary source (ecmwf or era5)
409
+ conf: dict
410
+ Configuration dictionary containing ancillary_sources
411
+
412
+ Returns
413
+ -------
414
+ tuple
415
+ (map_model, metadata) where:
416
+ - map_model (dict): mapping of model variables to SAR data
417
+ - metadata (dict): ancillary metadata with 'source' and 'source_path' keys
418
+ """
419
+ logging.debug("conf: %s", conf)
420
+ if 'ancillary_sources' not in conf:
421
+ raise ValueError("Configuration must contain 'ancillary_sources'")
422
+
423
+ if ancillary_name not in conf['ancillary_sources']:
424
+ raise ValueError(
425
+ f"Configuration 'ancillary_sources' must contain '{ancillary_name}'")
426
+
427
+ if ancillary_name not in ["ecmwf", "era5"]:
428
+ logging.warning("We advice to use either ecmwf or era5.")
429
+
430
+ ancillary_sources = conf['ancillary_sources'][ancillary_name]
431
+ if not ancillary_sources:
284
432
  raise ValueError(
285
- "ancillary_name must be ecmwf/era5, got %s" % ancillary_name)
433
+ f"At least one ancillary model {ancillary_name} must be configured in ancillary_sources")
434
+
435
+ map_model = None
436
+ selected_name = None
437
+ selected_path = None
438
+ tried_names = []
439
+
440
+ # Loop through models in config order to find the first one that exists
441
+ for source in ancillary_sources:
442
+ model_name = source['name']
443
+ model_path = source['path']
444
+ logging.debug("%s : %s", model_name, model_path)
445
+
446
+ # Set raster to check if file exists
447
+ meta.set_raster(model_name, model_path)
448
+ tried_names.append(model_name)
449
+
450
+ model_info = meta.rasters.loc[model_name]
451
+
452
+ model_file = model_info["get_function"](
453
+ model_info["resource"],
454
+ date=datetime.datetime.strptime(
455
+ meta.start_date, "%Y-%m-%d %H:%M:%S.%f"
456
+ ),
457
+ )[1]
458
+
459
+ if os.path.isfile(model_file):
460
+ # File exists! This is our selection
461
+ selected_name = model_name
462
+ selected_path = model_file
463
+ map_model = {
464
+ "%s_%s" % (selected_name, uv): "model_%s" % uv for uv in ["U10", "V10"]
465
+ }
466
+ # Log selection
467
+ if len(ancillary_sources) > 1:
468
+ logging.info(
469
+ f"Multiple {ancillary_name} models configured. Using {selected_name} (with respect to priority order)")
470
+ else:
471
+ logging.info(
472
+ f"Only one {ancillary_name} model configured: using {selected_name}")
473
+ break
474
+
475
+ # Clean up: remove all tried models EXCEPT the selected one
476
+ if selected_name is not None:
477
+ for name in tried_names:
478
+ if name != selected_name:
479
+ meta.rasters = meta.rasters.drop([name])
480
+
481
+ # Prepare metadata for traceability
482
+ ancillary_metadata = None
483
+ if selected_name is not None:
484
+ ancillary_metadata = {
485
+ 'ancillary_source_model': selected_name,
486
+ 'ancillary_source_path': selected_path
487
+ }
488
+
489
+ return map_model, ancillary_metadata
286
490
 
287
491
 
288
- @timing(logger=logger.debug)
492
+ @timing(logger=root_logger.debug)
289
493
  def inverse_dsig_wspd(
290
494
  dual_pol,
291
495
  inc,
@@ -315,7 +519,7 @@ def inverse_dsig_wspd(
315
519
  ancillary wind
316
520
  | (for example ecmwf winds), in **ANTENNA convention**,
317
521
  nesz_cr: xarray.DataArray
318
- noise equivalent sigma0 | flattened or not
522
+ noise equivalent sigma0 | flattened or not
319
523
  dsig_cr_name: str
320
524
  dsig_cr name
321
525
  model_co: str
@@ -325,11 +529,11 @@ def inverse_dsig_wspd(
325
529
 
326
530
  Returns
327
531
  -------
328
- xarray.DataArray
532
+ xarray.DataArray
329
533
  inverted wind in copol in ** antenna convention** .
330
- xarray.DataArray
534
+ xarray.DataArray
331
535
  inverted wind in dualpol in ** antenna convention** .
332
- xarray.DataArray
536
+ xarray.DataArray
333
537
  inverted wind in crosspol in ** antenna convention** .
334
538
  xarray.DataArray | array
335
539
  alpha (ponderation between co and crosspol)
@@ -372,7 +576,7 @@ def inverse_dsig_wspd(
372
576
  return wind_co, None, None, None
373
577
 
374
578
 
375
- @timing(logger=logger.debug)
579
+ @timing(logger=root_logger.debug)
376
580
  def inverse(
377
581
  dual_pol,
378
582
  inc,
@@ -411,11 +615,11 @@ def inverse(
411
615
 
412
616
  Returns
413
617
  -------
414
- xarray.DataArray
618
+ xarray.DataArray
415
619
  inverted wind in copol in ** antenna convention** .
416
- xarray.DataArray
620
+ xarray.DataArray
417
621
  inverted wind in dualpol in ** antenna convention** .
418
- xarray.DataArray
622
+ xarray.DataArray
419
623
  inverted wind in crosspol in ** antenna convention** .
420
624
 
421
625
  See Also
@@ -467,7 +671,7 @@ def inverse(
467
671
  return wind_co, None, None
468
672
 
469
673
 
470
- @timing(logger=logger.debug)
674
+ @timing(logger=root_logger.debug)
471
675
  def makeL2asOwi(xr_dataset, config):
472
676
  """
473
677
  Rename xr_dataset variables and attributes to match naming convention.
@@ -727,7 +931,9 @@ def makeL2asOwi(xr_dataset, config):
727
931
  "sigma0_raw",
728
932
  "ancillary_wind",
729
933
  "nesz",
730
- "spatial_ref",
934
+ "model_U10",
935
+ "model_V10"
936
+
731
937
  ]
732
938
  )
733
939
  if "sigma0_raw__corrected" in xr_dataset:
@@ -797,6 +1003,11 @@ def preprocess(
797
1003
  if os.path.exists(config_path):
798
1004
  with open(config_path, "r") as file:
799
1005
  config_base = yaml.load(file, Loader=yaml.FullLoader)
1006
+
1007
+ # Validate configuration structure
1008
+ from grdwindinversion.utils import test_config
1009
+ test_config(config_base)
1010
+
800
1011
  try:
801
1012
  # check if sensor is in the config
802
1013
  config = config_base[sensor]
@@ -809,6 +1020,9 @@ def preprocess(
809
1020
  recalibration = config["recalibration"]
810
1021
  meta = fct_meta(filename)
811
1022
 
1023
+ # Add masks to meta if configured (land, ice, lakes, etc.)
1024
+ masks_by_category = addMasks_toMeta(meta, config_base)
1025
+
812
1026
  # si une des deux n'est pas VV VH HH HV on ne fait rien
813
1027
  if not all([pol in ["VV", "VH", "HH", "HV"] for pol in meta.pols.split(" ")]):
814
1028
  raise ValueError(f"Polarisation non gérée : meta.pols = {meta.pols}")
@@ -825,7 +1039,7 @@ def preprocess(
825
1039
  winddir_convention = config_base["winddir_convention"]
826
1040
  else:
827
1041
  winddir_convention = "meteorological"
828
- logging.warning(
1042
+ logging.info(
829
1043
  f'Using meteorological convention because "winddir_convention" was not found in config.'
830
1044
  )
831
1045
  config["winddir_convention"] = winddir_convention
@@ -834,17 +1048,17 @@ def preprocess(
834
1048
  add_gradientsfeatures = config_base["add_gradientsfeatures"]
835
1049
  else:
836
1050
  add_gradientsfeatures = False
837
- logging.warning(f"Not computing gradients by default")
1051
+ logging.info(f"Not computing gradients by default")
838
1052
  config["add_gradientsfeatures"] = add_gradientsfeatures
839
1053
 
840
1054
  if "add_nrcs_model" in config_base:
841
1055
  add_nrcs_model = config_base["add_nrcs_model"]
842
1056
  add_nrcs_model = False
843
- logging.warning(
1057
+ logging.info(
844
1058
  f"Force add_nrcs_model to be false, before fixing an issue")
845
1059
  else:
846
1060
  add_nrcs_model = False
847
- logging.warning(f"Not computing nrcs from model by default")
1061
+ logging.info(f"Not computing nrcs from model by default")
848
1062
  config["add_nrcs_model"] = add_nrcs_model
849
1063
 
850
1064
  # creating a dictionnary of parameters
@@ -878,11 +1092,15 @@ def preprocess(
878
1092
  raise FileExistsError("outfile %s already exists" % out_file)
879
1093
 
880
1094
  ancillary_name = config["ancillary"]
881
- map_model = getAncillary(meta, ancillary_name)
1095
+ map_model, ancillary_metadata = getAncillary(
1096
+ meta, ancillary_name, config_base)
882
1097
  if map_model is None:
883
1098
  raise Exception(
884
1099
  f"the weather model is not set `map_model` is None -> you probably don't have access to {ancillary_name} archive"
885
1100
  )
1101
+ if ancillary_metadata is None:
1102
+ raise Exception(
1103
+ f"ancillary_metadata must be defined. There is an error in getAncillary function")
886
1104
 
887
1105
  try:
888
1106
  logging.info(f"recalibration = {recalibration}")
@@ -932,7 +1150,6 @@ def preprocess(
932
1150
  config["fct_dataset"] = fct_dataset
933
1151
  config["map_model"] = map_model
934
1152
 
935
- # load
936
1153
  xr_dataset = xr_dataset.load()
937
1154
 
938
1155
  # defining dual_pol, and gmfs by channel
@@ -948,10 +1165,12 @@ def preprocess(
948
1165
  crosspol_gmf = "VH"
949
1166
  else:
950
1167
  logging.warning(
951
- "for now this processor does not support entirely HH+HV acquisitions\n "
1168
+ "inversion_rules warning : for now this processor does not support entirely HH+HV acquisitions\n "
952
1169
  "it wont crash but it will use HH+VH GMF for wind inversion -> wrong hypothesis\n "
953
1170
  "!! dual WIND SPEED IS NOT USABLE !! But co WIND SPEED IS USABLE !!"
954
1171
  )
1172
+ config["return_status"] = 99
1173
+
955
1174
  copol = "HH"
956
1175
  crosspol = "HV"
957
1176
  copol_gmf = "HH"
@@ -996,15 +1215,14 @@ def preprocess(
996
1215
  config["dsig_cr_step"] = dsig_cr_step
997
1216
  config["dsig_cr_name"] = dsig_cr_name
998
1217
  config["apply_flattening"] = apply_flattening
999
-
1000
1218
  # need to load LUTs before inversion
1001
1219
  nc_luts = [x for x in [model_co, model_cross] if x.startswith("nc_lut")]
1002
1220
 
1003
1221
  if len(nc_luts) > 0:
1004
- windspeed.register_nc_luts(getConf()["nc_luts_path"])
1222
+ windspeed.register_nc_luts(config_base["nc_luts_path"])
1005
1223
 
1006
1224
  if model_co == "gmf_cmod7":
1007
- windspeed.register_cmod7(getConf()["lut_cmod7_path"])
1225
+ windspeed.register_cmod7(config_base["lut_cmod7_path"])
1008
1226
  #  Step 2 - clean and prepare dataset
1009
1227
 
1010
1228
  # variables to not keep in the L2
@@ -1057,33 +1275,39 @@ def preprocess(
1057
1275
  xr_dataset.elevation.attrs["standard_name"] = "elevation"
1058
1276
 
1059
1277
  # offboresight
1060
- # TOREMOVE
1061
- if "offboresight" in xr_dataset:
1062
- xr_dataset.offboresight.attrs["units"] = "degrees"
1063
- xr_dataset.offboresight.attrs["long_name"] = (
1064
- "Offboresight angle at wind cell center"
1065
- )
1066
- xr_dataset.elevation.attrs["standard_name"] = "offboresight"
1067
-
1068
- # masks (no ice / no_valid)
1069
- xr_dataset.land_mask.values = binary_dilation(
1070
- xr_dataset["land_mask"].values.astype("uint8"),
1071
- structure=np.ones((3, 3), np.uint8),
1072
- iterations=3,
1278
+ xr_dataset.offboresight.attrs["units"] = "degrees"
1279
+ xr_dataset.offboresight.attrs["long_name"] = (
1280
+ "Offboresight angle at wind cell center"
1073
1281
  )
1074
- xr_dataset.land_mask.attrs["long_name"] = "Mask of data"
1075
- xr_dataset.land_mask.attrs["valid_range"] = np.array([0, 1])
1076
- xr_dataset.land_mask.attrs["flag_values"] = np.array([0, 1])
1077
- xr_dataset.land_mask.attrs["flag_meanings"] = "valid no_valid"
1282
+ xr_dataset.offboresight.attrs["standard_name"] = "offboresight"
1078
1283
 
1079
- logging.debug("mask is a copy of land_mask")
1284
+ # merge land masks
1285
+ land_mask_strategy = config_base.get("LAND_MASK_STRATEGY", "merge")
1286
+ logging.info(f"land_mask_strategy = {land_mask_strategy}")
1287
+
1288
+ # Store masks_by_category in config for later cleanup
1289
+ config["masks_by_category"] = masks_by_category
1290
+
1291
+ merged_land_masks = None
1292
+ if land_mask_strategy == "merge" and "land" in masks_by_category:
1293
+ mergeLandMasks(xr_dataset, masks_by_category["land"])
1294
+ merged_land_masks = masks_by_category["land"]
1080
1295
 
1296
+ # Process land mask with coastal zone detection (3-level system)
1297
+ # 0 = ocean, 1 = coastal, 2 = land
1298
+ processLandMask(xr_dataset, dilation_iterations=3,
1299
+ merged_masks=merged_land_masks)
1300
+
1301
+ # Create main mask from land_mask
1302
+ # For now, mask uses the same values as land_mask
1303
+ # Can be extended later to include ice (value 3) and other categories
1304
+ logging.debug("mask is a copy of land_mask")
1081
1305
  xr_dataset["mask"] = xr.DataArray(xr_dataset.land_mask)
1082
1306
  xr_dataset.mask.attrs = {}
1083
1307
  xr_dataset.mask.attrs["long_name"] = "Mask of data"
1084
1308
  xr_dataset.mask.attrs["valid_range"] = np.array([0, 3])
1085
1309
  xr_dataset.mask.attrs["flag_values"] = np.array([0, 1, 2, 3])
1086
- xr_dataset.mask.attrs["flag_meanings"] = "valid land ice no_valid"
1310
+ xr_dataset.mask.attrs["flag_meanings"] = "ocean coastal land ice"
1087
1311
 
1088
1312
  # ancillary
1089
1313
  xr_dataset["ancillary_wind_direction"] = (
@@ -1091,8 +1315,9 @@ def preprocess(
1091
1315
  xr_dataset.model_U10)) + 180
1092
1316
  ) % 360
1093
1317
 
1318
+ # Keep ocean (0) and coastal (1) zones for ancillary wind
1094
1319
  xr_dataset["ancillary_wind_direction"] = xr.where(
1095
- xr_dataset["mask"], np.nan, xr_dataset["ancillary_wind_direction"]
1320
+ xr_dataset["mask"] >= 2, np.nan, xr_dataset["ancillary_wind_direction"]
1096
1321
  ).transpose(*xr_dataset["ancillary_wind_direction"].dims)
1097
1322
  xr_dataset["ancillary_wind_direction"].attrs = {}
1098
1323
  xr_dataset["ancillary_wind_direction"].attrs["units"] = "degrees_north"
@@ -1105,7 +1330,7 @@ def preprocess(
1105
1330
  xr_dataset["model_U10"] ** 2 + xr_dataset["model_V10"] ** 2
1106
1331
  )
1107
1332
  xr_dataset["ancillary_wind_speed"] = xr.where(
1108
- xr_dataset["mask"], np.nan, xr_dataset["ancillary_wind_speed"]
1333
+ xr_dataset["mask"] >= 2, np.nan, xr_dataset["ancillary_wind_speed"]
1109
1334
  ).transpose(*xr_dataset["ancillary_wind_speed"].dims)
1110
1335
  xr_dataset["ancillary_wind_speed"].attrs = {}
1111
1336
  xr_dataset["ancillary_wind_speed"].attrs["units"] = "m s^-1"
@@ -1115,7 +1340,7 @@ def preprocess(
1115
1340
  xr_dataset["ancillary_wind_speed"].attrs["standart_name"] = "wind_speed"
1116
1341
 
1117
1342
  xr_dataset["ancillary_wind"] = xr.where(
1118
- xr_dataset["mask"],
1343
+ xr_dataset["mask"] >= 2,
1119
1344
  np.nan,
1120
1345
  (
1121
1346
  xr_dataset.ancillary_wind_speed
@@ -1127,15 +1352,24 @@ def preprocess(
1127
1352
  )
1128
1353
  ),
1129
1354
  ).transpose(*xr_dataset["ancillary_wind_speed"].dims)
1355
+ xr_dataset["ancillary_wind"].attrs = {}
1356
+ xr_dataset["ancillary_wind"].attrs["long_name"] = f"{ancillary_name} wind in complex form for inversion"
1357
+ xr_dataset["ancillary_wind"].attrs[
1358
+ "description"] = "Complex wind (speed * exp(i*direction)) in antenna convention for GMF inversion"
1130
1359
 
1131
- xr_dataset.attrs["ancillary_source"] = (
1132
- xr_dataset["model_U10"].attrs["history"].split("decoded: ")[1].strip()
1133
- )
1134
- xr_dataset = xr_dataset.drop_vars(["model_U10", "model_V10"])
1360
+ # Add ancillary metadata to model variables
1361
+
1362
+ for attr_key, attr_value in ancillary_metadata.items():
1363
+ for var_name in ['model_U10', 'model_V10', 'ancillary_wind_speed', 'ancillary_wind_direction', 'ancillary_wind']:
1364
+ if var_name in xr_dataset:
1365
+ xr_dataset[var_name].attrs[attr_key] = attr_value
1366
+
1367
+ xr_dataset.attrs[attr_key] = attr_value
1135
1368
 
1136
1369
  # nrcs processing
1370
+ # Keep ocean (0) and coastal (1) zones, mask out land (2) and ice (3)
1137
1371
  xr_dataset["sigma0_ocean"] = xr.where(
1138
- xr_dataset["mask"], np.nan, xr_dataset["sigma0"]
1372
+ xr_dataset["mask"] >= 2, np.nan, xr_dataset["sigma0"]
1139
1373
  ).transpose(*xr_dataset["sigma0"].dims)
1140
1374
  xr_dataset["sigma0_ocean"].attrs = xr_dataset["sigma0"].attrs
1141
1375
  #  we forced it to 1e-15
@@ -1143,19 +1377,21 @@ def preprocess(
1143
1377
  "comment"
1144
1378
  ] = "clipped, no values <=0 ; 1e-15 instread"
1145
1379
 
1146
- # rajout d'un mask pour les valeurs <=0:
1380
+ xr_dataset["sigma0_ocean"] = xr.where(
1381
+ xr_dataset["sigma0_ocean"] <= 0, 1e-15, xr_dataset["sigma0_ocean"]
1382
+ )
1383
+
1384
+ # add a mask for values <=0:
1147
1385
  xr_dataset["sigma0_mask"] = xr.where(
1148
1386
  xr_dataset["sigma0_ocean"] <= 0, 1, 0
1149
1387
  ).transpose(*xr_dataset["sigma0"].dims)
1150
1388
  xr_dataset.sigma0_mask.attrs["valid_range"] = np.array([0, 1])
1151
1389
  xr_dataset.sigma0_mask.attrs["flag_values"] = np.array([0, 1])
1152
1390
  xr_dataset.sigma0_mask.attrs["flag_meanings"] = "valid no_valid"
1153
- xr_dataset["sigma0_ocean"] = xr.where(
1154
- xr_dataset["sigma0_ocean"] <= 0, 1e-15, xr_dataset["sigma0_ocean"]
1155
- )
1156
1391
 
1392
+ # Keep ocean (0) and coastal (1) zones for sigma0_ocean_raw too
1157
1393
  xr_dataset["sigma0_ocean_raw"] = xr.where(
1158
- xr_dataset["mask"], np.nan, xr_dataset["sigma0_raw"]
1394
+ xr_dataset["mask"] >= 2, np.nan, xr_dataset["sigma0_raw"]
1159
1395
  ).transpose(*xr_dataset["sigma0_raw"].dims)
1160
1396
 
1161
1397
  xr_dataset["sigma0_ocean_raw"].attrs = xr_dataset["sigma0_raw"].attrs
@@ -1166,12 +1402,26 @@ def preprocess(
1166
1402
 
1167
1403
  # processing
1168
1404
  if dual_pol:
1169
-
1170
1405
  xr_dataset['sigma0_detrend_cross'] = xsarsea.sigma0_detrend(
1171
1406
  xr_dataset.sigma0.sel(pol=crosspol), xr_dataset.incidence, model=model_cross)
1172
1407
 
1173
- xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
1174
- ['line', 'sample'], windspeed.nesz_flattening(xr_dataset.nesz.sel(pol=crosspol), xr_dataset.incidence).data))
1408
+ try:
1409
+ xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
1410
+ ['line', 'sample'], windspeed.nesz_flattening(xr_dataset.nesz.sel(pol=crosspol), xr_dataset.incidence).data))
1411
+ except Exception as e:
1412
+ if apply_flattening:
1413
+ # error
1414
+ logging.error("Error during NESZ flattening computation")
1415
+ logging.info("%s", traceback.format_exc())
1416
+ raise e
1417
+ else:
1418
+ # replace with nans
1419
+ logging.warning("nesz_flattening warning => Error during NESZ flattening computation, but apply_flattening is False, \
1420
+ so continuing without nesz_cross_flattened and replace with NaNs\n \
1421
+ The error comes probably from NaN in incidence angle")
1422
+ config["return_status"] = 99
1423
+ xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
1424
+ ['line', 'sample'], np.full(xr_dataset.nesz.sel(pol=crosspol).shape, np.nan)))
1175
1425
 
1176
1426
  xr_dataset['nesz_cross_flattened'].attrs[
1177
1427
  "comment"] = 'nesz has been flattened using windspeed.nesz_flattening'
@@ -1228,7 +1478,7 @@ def preprocess(
1228
1478
 
1229
1479
  for idx, gmf_name in enumerate(gmf_names):
1230
1480
 
1231
- @timing(logger=logger.info)
1481
+ @timing(logger=root_logger.info)
1232
1482
  def apply_lut_to_dataset():
1233
1483
  lut = xsarsea.windspeed.get_model(
1234
1484
  gmf_name).to_lut(unit="linear")
@@ -1319,13 +1569,13 @@ def process_gradients(xr_dataset, config):
1319
1569
 
1320
1570
  xr_dataset_100["sigma0_detrend"] = sigma0_detrend_combined
1321
1571
 
1322
- xr_dataset_100.land_mask.values = binary_dilation(
1323
- xr_dataset_100["land_mask"].values.astype("uint8"),
1324
- structure=np.ones((3, 3), np.uint8),
1325
- iterations=3,
1326
- )
1572
+ # Process land mask with coastal zone detection (3-level system)
1573
+ processLandMask(xr_dataset_100, dilation_iterations=3)
1574
+
1575
+ # Mask sigma0_detrend where land_mask >= 2 (land and ice)
1576
+ # Keep ocean (0) and coastal (1) zones
1327
1577
  xr_dataset_100["sigma0_detrend"] = xr.where(
1328
- xr_dataset_100["land_mask"], np.nan, xr_dataset_100["sigma0"]
1578
+ xr_dataset_100["land_mask"] >= 2, np.nan, xr_dataset_100["sigma0"]
1329
1579
  ).transpose(*xr_dataset_100["sigma0"].dims)
1330
1580
 
1331
1581
  xr_dataset_100["ancillary_wind"] = (
@@ -1361,15 +1611,15 @@ def process_gradients(xr_dataset, config):
1361
1611
  }
1362
1612
  )
1363
1613
  else:
1364
- logger.warn(
1365
- "'longitude' not found in streaks_indiv : there is probably an error"
1614
+ root_logger.warning(
1615
+ "process_gradients warning : 'longitude' not found in streaks_indiv : there is probably an error"
1366
1616
  )
1367
1617
  xr_dataset_streaks = None
1368
1618
 
1369
1619
  return xr_dataset, xr_dataset_streaks
1370
1620
 
1371
1621
 
1372
- @timing(logger=logger.info)
1622
+ @timing(logger=root_logger.info)
1373
1623
  def makeL2(
1374
1624
  filename, outdir, config_path, overwrite=False, generateCSV=True, resolution="1000m"
1375
1625
  ):
@@ -1404,6 +1654,19 @@ def makeL2(
1404
1654
  filename, outdir, config_path, overwrite, resolution
1405
1655
  )
1406
1656
 
1657
+ # Drop only masks added from config (not internal masks like sigma0_mask, owiMask_Nrcs)
1658
+ masks_by_category = config.get("masks_by_category", {})
1659
+ masks_to_drop = []
1660
+ for category, mask_list in masks_by_category.items():
1661
+ masks_to_drop.extend(mask_list)
1662
+
1663
+ # Only drop masks that actually exist in the dataset (with XSAR suffix)
1664
+ vars_to_drop = [
1665
+ m+XSAR_MASK_SUFFIX for m in masks_to_drop if (m+XSAR_MASK_SUFFIX) in xr_dataset.data_vars]
1666
+ if vars_to_drop:
1667
+ logging.info(f"Dropping external masks of dataset: {vars_to_drop}")
1668
+ xr_dataset = xr_dataset.drop_vars(vars_to_drop)
1669
+
1407
1670
  if config["add_gradientsfeatures"]:
1408
1671
  xr_dataset, xr_dataset_streaks = process_gradients(xr_dataset, config)
1409
1672
  else:
@@ -1441,14 +1704,19 @@ def makeL2(
1441
1704
  "resolution": config.pop("resolution", None),
1442
1705
  }
1443
1706
 
1707
+ config["return_status"] = 0 # default value SUCCESS
1444
1708
  logging.info("Checking incidence range within LUTS incidence range")
1445
- #  warning if incidence is out of lut incidence range
1446
1709
  inc_check_co, inc_check_cross = check_incidence_range(
1447
1710
  xr_dataset["incidence"], [model_co, model_cross], **kwargs
1448
1711
  )
1712
+
1713
+ if not inc_check_co or not inc_check_cross:
1714
+ config["return_status"] = 99
1715
+
1449
1716
  if dsig_cr_step == "nrcs":
1450
- logging.info(
1451
- "dsig_cr_step is nrcs : polarization are mixed at cost function step")
1717
+ if dual_pol:
1718
+ logging.info(
1719
+ "dsig_cr_step is nrcs : polarization are mixed at cost function step")
1452
1720
  wind_co, wind_dual, windspeed_cr = inverse(
1453
1721
  dual_pol,
1454
1722
  inc=xr_dataset["incidence"],
@@ -1461,13 +1729,17 @@ def makeL2(
1461
1729
  **kwargs,
1462
1730
  )
1463
1731
  elif dsig_cr_step == "wspd":
1464
- logging.info(
1465
- "dsig_cr_step is wspd : polarization are mixed at winds speed step")
1732
+ if dual_pol:
1733
+ logging.info(
1734
+ "dsig_cr_step is wspd : polarization are mixed at winds speed step")
1466
1735
 
1467
- if apply_flattening:
1468
- nesz_cross = xr_dataset["nesz_cross_flattened"]
1736
+ if dual_pol:
1737
+ if apply_flattening:
1738
+ nesz_cross = xr_dataset["nesz_cross_flattened"]
1739
+ else:
1740
+ nesz_cross = xr_dataset.nesz.sel(pol=crosspol)
1469
1741
  else:
1470
- nesz_cross = xr_dataset.nesz.sel(pol=crosspol)
1742
+ nesz_cross = None
1471
1743
 
1472
1744
  wind_co, wind_dual, windspeed_cr, alpha = inverse_dsig_wspd(
1473
1745
  dual_pol,
@@ -1481,10 +1753,12 @@ def makeL2(
1481
1753
  model_cross=model_cross,
1482
1754
  **kwargs
1483
1755
  )
1484
- xr_dataset["alpha"] = xr.DataArray(
1485
- data=alpha, dims=xr_dataset["incidence"].dims, coords=xr_dataset["incidence"].coords)
1486
- xr_dataset["alpha"].attrs["apply_flattening"] = str(apply_flattening)
1487
- xr_dataset["alpha"].attrs["comments"] = "alpha used to ponderate copol and crosspol. this ponderation is done will combining wind speeds."
1756
+ if dual_pol and alpha is not None:
1757
+ xr_dataset["alpha"] = xr.DataArray(
1758
+ data=alpha, dims=xr_dataset["incidence"].dims, coords=xr_dataset["incidence"].coords)
1759
+ xr_dataset["alpha"].attrs["apply_flattening"] = str(
1760
+ apply_flattening)
1761
+ xr_dataset["alpha"].attrs["comments"] = "alpha used to ponderate copol and crosspol. this ponderation is done will combining wind speeds."
1488
1762
 
1489
1763
  else:
1490
1764
  raise ValueError(
@@ -1615,13 +1889,15 @@ def makeL2(
1615
1889
  "wnf_3km_average": "False",
1616
1890
  "owiWindSpeedSrc": "owiWindSpeed",
1617
1891
  "owiWindDirectionSrc": "/",
1618
- "ancillary_source": xr_dataset.attrs["ancillary_source"],
1892
+ "ancillary_source_model": xr_dataset.attrs["ancillary_source_model"],
1893
+ "ancillary_source_path": xr_dataset.attrs["ancillary_source_path"],
1619
1894
  "winddir_convention": config["winddir_convention"],
1620
1895
  "incidence_within_lut_copol_incidence_range": str(inc_check_co),
1621
1896
  "incidence_within_lut_crosspol_incidence_range": str(inc_check_cross),
1622
1897
  "swath": xr_dataset.attrs["swath"],
1623
1898
  "footprint": xr_dataset.attrs["footprint"],
1624
1899
  "coverage": xr_dataset.attrs["coverage"],
1900
+ "cross_antimeridian": str(config["meta"].cross_antimeridian)
1625
1901
  }
1626
1902
 
1627
1903
  for recalib_attrs in ["aux_pp1_recal", "aux_pp1", "aux_cal_recal", "aux_cal"]:
@@ -1679,7 +1955,10 @@ def makeL2(
1679
1955
 
1680
1956
  logging.info("OK for %s ", os.path.basename(filename))
1681
1957
 
1682
- return out_file, xr_dataset
1958
+ if config["add_gradientsfeatures"] and xr_dataset_streaks is None:
1959
+ config["return_status"] = 99
1960
+
1961
+ return out_file, xr_dataset, config["return_status"]
1683
1962
 
1684
1963
 
1685
1964
  def transform_winddir(wind_cpx, ground_heading, winddir_convention="meteorological"):
@@ -1719,6 +1998,7 @@ def transform_winddir(wind_cpx, ground_heading, winddir_convention="meteorologic
1719
1998
  logging.warning(
1720
1999
  f"wind direction convention {winddir_convention} is not supported, using meteorological",
1721
2000
  )
2001
+
1722
2002
  long_name = "Wind direction in meteorological convention (clockwise, from), ex: 0°=from north, 90°=from east"
1723
2003
 
1724
2004
  dataArray = xsarsea.dir_to_360(dataArray)