grdwindinversion 0.3.9__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,12 +13,12 @@ import datetime
13
13
  import yaml
14
14
  from scipy.ndimage import binary_dilation
15
15
  import re
16
+ import os
17
+ import logging
16
18
  import string
19
+
17
20
  from grdwindinversion.utils import check_incidence_range, get_pol_ratio_name, timing, convert_polarization_name
18
21
  from grdwindinversion.load_config import getConf
19
- import logging
20
- import os
21
-
22
22
  os.environ["OMP_NUM_THREADS"] = "1"
23
23
  os.environ["OPENBLAS_NUM_THREADS"] = "1"
24
24
  os.environ["MKL_NUM_THREADS"] = "1"
@@ -31,9 +31,22 @@ except:
31
31
  cv2.setNumThreads(1)
32
32
 
33
33
 
34
- # optional debug messages
35
- logger = logging.getLogger('grdwindinversion.inversion')
36
- logger.addHandler(logging.NullHandler())
34
+ root_logger = logging.getLogger("grdwindinversion.inversion")
35
+
36
+ # Sensor metadata registry
37
+ SENSOR_METADATA = {
38
+ "S1A": ("S1A", "SENTINEL-1 A", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
39
+ "S1B": ("S1B", "SENTINEL-1 B", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
40
+ "S1C": ("S1C", "SENTINEL-1 C", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
41
+ "S1D": ("S1D", "SENTINEL-1 D", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
42
+ "RS2": ("RS2", "RADARSAT-2", xsar.RadarSat2Meta, xsar.RadarSat2Dataset),
43
+ "RCM1": ("RCM", "RADARSAT Constellation 1", xsar.RcmMeta, xsar.RcmDataset),
44
+ "RCM2": ("RCM", "RADARSAT Constellation 2", xsar.RcmMeta, xsar.RcmDataset),
45
+ "RCM3": ("RCM", "RADARSAT Constellation 3", xsar.RcmMeta, xsar.RcmDataset),
46
+ }
47
+
48
+ # Mask naming convention used by xsar
49
+ XSAR_MASK_SUFFIX = "_mask"
37
50
 
38
51
 
39
52
  def getSensorMetaDataset(filename):
@@ -50,27 +63,14 @@ def getSensorMetaDataset(filename):
50
63
  tuple
51
64
  sensor name, sensor long name, meta function, dataset function
52
65
  """
53
- if "S1A" in filename:
54
- return "S1A", "SENTINEL-1 A", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
55
- elif "S1B" in filename:
56
- return "S1B", "SENTINEL-1 B", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
57
- elif "S1C" in filename:
58
- return "S1C", "SENTINEL-1 C", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
59
- elif "S1D" in filename:
60
- return "S1D", "SENTINEL-1 D", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
61
- elif "RS2" in filename:
62
- return "RS2", "RADARSAT-2", xsar.RadarSat2Meta, xsar.RadarSat2Dataset
63
- elif "RCM1" in filename:
64
- return "RCM", "RADARSAT Constellation 1", xsar.RcmMeta, xsar.RcmDataset
65
- elif "RCM2" in filename:
66
- return "RCM", "RADARSAT Constellation 2", xsar.RcmMeta, xsar.RcmDataset
67
- elif "RCM3" in filename:
68
- return "RCM", "RADARSAT Constellation 3", xsar.RcmMeta, xsar.RcmDataset
66
+ for sensor_key, sensor_info in SENSOR_METADATA.items():
67
+ if sensor_key in filename:
68
+ return sensor_info
69
69
 
70
- else:
71
- raise ValueError(
72
- "must be S1A|S1B|S1C|S1D|RS2|RCM1|RCM2|RCM3, got filename %s" % filename
73
- )
70
+ supported_sensors = "|".join(SENSOR_METADATA.keys())
71
+ raise ValueError(
72
+ f"must be {supported_sensors}, got filename {filename}"
73
+ )
74
74
 
75
75
 
76
76
  def getOutputName(
@@ -98,20 +98,16 @@ def getOutputName(
98
98
  output filename
99
99
  """
100
100
  basename = os.path.basename(input_file)
101
- basename_match = basename
102
101
 
103
- if sensor == "S1A" or sensor == "S1B" or sensor == "S1C" or sensor == "S1D":
102
+ if sensor in ["S1A", "S1B", "S1C", "S1D"]:
103
+ # Example: S1A_IW_GRDH_1SDV_20210909T130650_20210909T130715_039605_04AE83_C34F.SAFE
104
104
  regex = re.compile(
105
- "(...)_(..)_(...)(.)_(.)(.)(..)_(........T......)_(........T......)_(......)_(......)_(....).SAFE"
106
- )
107
- template = string.Template(
108
- "${MISSIONID}_${SWATH}_${PRODUCT}${RESOLUTION}_${LEVEL}${CLASS}${POLARIZATION}_${STARTDATE}_${STOPDATE}_${ORBIT}_${TAKEID}_${PRODID}.SAFE"
105
+ r"(...)_(..)_(...)(.)_(.)(.)(..)_(........T......)_(........T......)_(......)_(......)_(....).SAFE"
109
106
  )
110
- # S1A_IW_GRDH_1SDV_20210909T130650_20210909T130715_039605_04AE83_C34F
111
- match = regex.match(basename_match)
107
+ match = regex.match(basename)
112
108
  if not match:
113
109
  raise AttributeError(
114
- f"S1 file {basename_match} does not match the expected pattern"
110
+ f"S1 file {basename} does not match the expected pattern"
115
111
  )
116
112
 
117
113
  (
@@ -128,37 +124,33 @@ def getOutputName(
128
124
  TAKEID,
129
125
  PRODID,
130
126
  ) = match.groups()
131
- # last two terms of polarization are removed
132
127
  new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{POLARIZATION.lower()}-{STARTDATE.lower()}-{STOPDATE.lower()}-{ORBIT}-{TAKEID}.nc"
128
+
133
129
  elif sensor == "RS2":
130
+ # Example: RS2_OK141302_PK1242223_DK1208537_SCWA_20220904_093402_VV_VH_SGF
134
131
  regex = re.compile(
135
- "(RS2)_OK([0-9]+)_PK([0-9]+)_DK([0-9]+)_(....)_(........)_(......)_(.._?.?.?)_(S.F)"
136
- )
137
- # RS2_OK141302_PK1242223_DK1208537_SCWA_20220904_093402_VV_VH_SGF
138
- template = string.Template(
139
- "${MISSIONID}_OK${DATA1}_PK${DATA2}_DK${DATA3}_${SWATH}_${DATE}_${TIME}_${POLARIZATION}_${LAST}"
132
+ r"(RS2)_OK([0-9]+)_PK([0-9]+)_DK([0-9]+)_(....)_(........)_(......)_(.._?.?.?)_(S.F)"
140
133
  )
141
- match = regex.match(basename_match)
134
+ match = regex.match(basename)
142
135
  if not match:
143
136
  raise AttributeError(
144
- f"RC2 file {basename_match} does not match the expected pattern"
137
+ f"RS2 file {basename} does not match the expected pattern"
145
138
  )
146
139
 
147
140
  MISSIONID, DATA1, DATA2, DATA3, SWATH, DATE, TIME, POLARIZATION, LAST = (
148
141
  match.groups()
149
142
  )
150
143
  new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{convert_polarization_name(POLARIZATION)}-{meta_start_date.lower()}-{meta_stop_date.lower()}-xxxxx-xxxxx.nc"
151
- elif sensor == "RCM":
152
144
 
145
+ elif sensor == "RCM":
146
+ # Example: RCM1_OK2767220_PK2769320_1_SCLND_20230930_214014_VV_VH_GRD
153
147
  regex = re.compile(
154
148
  r"(RCM[0-9])_OK([0-9]+)_PK([0-9]+)_([0-9]+)_([A-Z0-9]+)_(\d{8})_(\d{6})_([A-Z]{2}(?:_[A-Z]{2})?)_([A-Z]+)$"
155
149
  )
156
- # RCM1_OK2767220_PK2769320_1_SCLND_20230930_214014_VV_VH_GRD
157
-
158
- match = regex.match(basename_match)
150
+ match = regex.match(basename)
159
151
  if not match:
160
152
  raise AttributeError(
161
- f"RCM file {basename_match} does not match the expected pattern"
153
+ f"RCM file {basename} does not match the expected pattern"
162
154
  )
163
155
 
164
156
  MISSIONID, DATA1, DATA2, DATA3, SWATH, DATE, TIME, POLARIZATION, PRODUCT = (
@@ -168,7 +160,8 @@ def getOutputName(
168
160
 
169
161
  else:
170
162
  raise ValueError(
171
- "sensor must be S1A|S1B|S1C|RS2|RCM, got sensor %s" % sensor)
163
+ f"sensor must be S1A|S1B|S1C|S1D|RS2|RCM, got sensor {sensor}"
164
+ )
172
165
 
173
166
  if subdir:
174
167
  out_file = os.path.join(outdir, basename, new_format)
@@ -177,115 +170,325 @@ def getOutputName(
177
170
  return out_file
178
171
 
179
172
 
180
- def getAncillary(meta, ancillary_name="ecmwf"):
173
+ def addMasks_toMeta(meta: xsar.BaseMeta) -> dict:
181
174
  """
182
- Map ancillary wind from ECMWF or ERA5.
183
- This function is used to check if the model files are available and to map the model to the SAR data.
175
+ Add high-resolution masks (land, ice, lakes, etc.) from shapefiles to meta object.
176
+
177
+ Configuration format:
178
+ masks:
179
+ land:
180
+ - name: 'gshhsH'
181
+ path: '/path/to/mask.shp'
182
+ - name: 'custom_land'
183
+ path: '/path/to/custom.shp'
184
+ ice:
185
+ - name: 'ice_mask'
186
+ path: '/path/to/ice.shp'
187
+
188
+ Note: xsar will automatically add '_mask' suffix to the variable names in the dataset.
189
+ For example, 'gshhsH' becomes 'gshhsH_mask' in the xarray dataset.
184
190
 
185
191
  Parameters
186
192
  ----------
187
- meta: obj `xsar.BaseMeta` (one of the supported SAR mission)
193
+ meta : xsar.BaseMeta
194
+ Metadata object to add mask features to. Must have a set_mask_feature method.
188
195
 
189
196
  Returns
190
197
  -------
191
198
  dict
192
- map model to SAR data
199
+ Dictionary with mask categories as keys and lists of mask names as values.
200
+ Names are returned WITHOUT the '_mask' suffix that xsar adds internally.
201
+ Example: {'land': ['gshhsH', 'custom_land'], 'ice': ['ice_mask']}
202
+
203
+ Raises
204
+ ------
205
+ AttributeError
206
+ If meta object doesn't have set_mask_feature method
193
207
  """
194
-
195
- if ancillary_name == "ecmwf":
196
- logging.debug("conf: %s", getConf())
197
- ec01 = getConf()["ecmwf_0100_1h"]
198
- ec0125 = getConf()["ecmwf_0125_1h"]
199
- logging.debug("ec01 : %s", ec01)
200
- meta.set_raster("ecmwf_0100_1h", ec01)
201
- meta.set_raster("ecmwf_0125_1h", ec0125)
202
-
203
- map_model = None
204
- # only keep best ecmwf (FIXME: it's hacky, and xsar should provide a better method to handle this)
205
- for ecmwf_name in ["ecmwf_0125_1h", "ecmwf_0100_1h"]:
206
- ecmwf_infos = meta.rasters.loc[ecmwf_name]
207
- try:
208
- ecmwf_file = ecmwf_infos["get_function"](
209
- ecmwf_infos["resource"],
210
- date=datetime.datetime.strptime(
211
- meta.start_date, "%Y-%m-%d %H:%M:%S.%f"
212
- ),
213
- )[1]
214
- # temporary for RCM issue https://github.com/umr-lops/xarray-safe-rcm/issues/34
215
- except Exception as e:
216
- ecmwf_file = ecmwf_infos["get_function"](
217
- ecmwf_infos["resource"],
218
- date=datetime.datetime.strptime(
219
- meta.start_date, "%Y-%m-%d %H:%M:%S"
220
- ),
221
- )[1]
222
- if not os.path.isfile(ecmwf_file):
223
- # temporary
224
- # if repro does not exist we look at not repro folder (only one will exist after)
225
- """
226
- if ecmwf_name == "ecmwf_0100_1h":
227
- ecmwf_infos['resource'] = ecmwf_infos['resource'].replace(
228
- "netcdf_light_REPRO_tree", "netcdf_light")
229
- try:
230
- ecmwf_file = ecmwf_infos['get_function'](ecmwf_infos['resource'],
231
- date=datetime.datetime.strptime(meta.start_date,
232
- '%Y-%m-%d %H:%M:%S.%f'))[1]
233
- except Exception as e:
234
- ecmwf_file = ecmwf_infos['get_function'](ecmwf_infos['resource'],
235
- date=datetime.datetime.strptime(meta.start_date,
236
- '%Y-%m-%d %H:%M:%S'))[1]
237
-
238
- if not os.path.isfile(ecmwf_file):
239
- meta.rasters = meta.rasters.drop([ecmwf_name])
208
+ # Validate meta object has required method
209
+ if not hasattr(meta, 'set_mask_feature'):
210
+ raise AttributeError(
211
+ f"Meta object of type {type(meta).__name__} must have a 'set_mask_feature' method")
212
+
213
+ conf = getConf()
214
+ masks_by_category = {}
215
+
216
+ # Check for 'masks' key
217
+ if "masks" in conf and isinstance(conf["masks"], dict):
218
+ logging.debug("Found 'masks' configuration")
219
+
220
+ for category, mask_list in conf["masks"].items():
221
+ if isinstance(mask_list, list):
222
+ masks_by_category[category] = []
223
+ for mask_item in mask_list:
224
+ if isinstance(mask_item, dict) and "path" in mask_item and "name" in mask_item:
225
+ mask_name = mask_item["name"]
226
+ mask_path = mask_item["path"]
227
+ try:
228
+ logging.debug("%s path: %s", mask_name, mask_path)
229
+ meta.set_mask_feature(mask_name, mask_path)
230
+ logging.info(
231
+ "Mask feature '%s' set from %s", mask_name, mask_path)
232
+ masks_by_category[category].append(mask_name)
233
+ except (IOError, OSError, FileNotFoundError) as e:
234
+ logging.error(
235
+ "Failed to load mask file '%s' from path '%s': %s",
236
+ mask_name, mask_path, str(e))
237
+ logging.debug("%s", traceback.format_exc())
238
+ except (ValueError, RuntimeError) as e:
239
+ logging.error(
240
+ "Failed to process mask '%s': %s", mask_name, str(e))
241
+ logging.debug("%s", traceback.format_exc())
240
242
  else:
241
- map_model = {'%s_%s' % (ecmwf_name, uv): 'model_%s' % uv for uv in [
242
- 'U10', 'V10']}
243
-
244
- else:
245
- """
246
- meta.rasters = meta.rasters.drop([ecmwf_name])
243
+ logging.warning(
244
+ "Invalid mask configuration in category '%s': missing 'name' or 'path' field",
245
+ category)
247
246
  else:
248
- map_model = {
249
- "%s_%s" % (ecmwf_name, uv): "model_%s" % uv for uv in ["U10", "V10"]
250
- }
247
+ logging.warning(
248
+ "Mask category '%s' should contain a list, got %s",
249
+ category, type(mask_list).__name__
250
+ )
251
251
 
252
- return map_model
252
+ return masks_by_category
253
253
 
254
- elif ancillary_name == "era5":
255
- era5_name = "era5_0250_1h"
256
- logging.debug("conf: %s", getConf())
257
- era0250 = getConf()[era5_name]
258
- logging.debug("%s : %s", (era5_name, era0250))
259
- meta.set_raster(era5_name, era0250)
260
254
 
261
- era5_infos = meta.rasters.loc[era5_name]
262
- try:
263
- era5_file = era5_infos["get_function"](
264
- era5_infos["resource"],
265
- date=datetime.datetime.strptime(
266
- meta.start_date, "%Y-%m-%d %H:%M:%S.%f"
267
- ),
268
- )[1]
269
- except Exception as e:
270
- era5_file = era5_infos["get_function"](
271
- era5_infos["resource"],
272
- date=datetime.datetime.strptime(
273
- meta.start_date, "%Y-%m-%d %H:%M:%S"),
274
- )[1]
275
- if not os.path.isfile(era5_file):
276
- raise ValueError(f"era5 file {era5_file} not found")
277
-
278
- map_model = {
279
- "%s_%s" % (era5_name, uv): "model_%s" % uv for uv in ["U10", "V10"]
280
- }
281
- return map_model
255
+ def mergeLandMasks(xr_dataset: xr.Dataset, land_mask_names: list) -> xr.Dataset:
256
+ """
257
+ Merge multiple land masks into the main land_mask variable.
258
+
259
+ This function takes all individual land masks added via addMasks_toMeta() and combines
260
+ them using a logical OR operation to create a unified land mask that covers
261
+ all land areas from all sources.
262
+
263
+ Parameters
264
+ ----------
265
+ xr_dataset : xr.Dataset
266
+ Dataset containing individual land mask variables. Must contain a 'land_mask' variable.
267
+ land_mask_names : list of str
268
+ Names of the land mask variables to merge (WITHOUT the '_mask' suffix).
269
+ For example: ['gshhsH', 'custom_land'].
270
+ These names will have XSAR_MASK_SUFFIX automatically appended to match
271
+ the variable names in the dataset.
272
+
273
+ Returns
274
+ -------
275
+ xr.Dataset
276
+ The input dataset with its land_mask variable updated by merging all specified masks.
277
+ Note: The dataset is modified in place AND returned for convenience.
278
+
279
+ Raises
280
+ ------
281
+ ValueError
282
+ If 'land_mask' variable is not present in the dataset
283
+ """
284
+ # Validate that land_mask exists in the dataset
285
+ if "land_mask" not in xr_dataset:
286
+ raise ValueError(
287
+ "Dataset must contain a 'land_mask' variable. "
288
+ f"Available variables: {list(xr_dataset.data_vars.keys())}")
289
+
290
+ if not land_mask_names:
291
+ logging.debug("No additional land masks to merge")
292
+ return xr_dataset
293
+
294
+ logging.info("Merging %d land masks: %s", len(
295
+ land_mask_names), land_mask_names)
296
+
297
+ # Start with the default land_mask from xsar
298
+ merged_mask = xr_dataset["land_mask"].values.astype("uint8")
299
+
300
+ # Merge all configured land masks
301
+ for mask_name in land_mask_names:
302
+ # xsar adds XSAR_MASK_SUFFIX to mask names in the dataset
303
+ dataset_mask_name = f"{mask_name}{XSAR_MASK_SUFFIX}"
304
+
305
+ if dataset_mask_name in xr_dataset:
306
+ logging.info("Merging mask '%s' into land_mask", dataset_mask_name)
307
+ mask_values = xr_dataset[dataset_mask_name].values.astype("uint8")
308
+ # Logical OR: any pixel marked as land (1) in any mask becomes land
309
+ merged_mask = np.maximum(merged_mask, mask_values)
310
+ else:
311
+ logging.warning(
312
+ "Mask '%s' not found in dataset, skipping", dataset_mask_name)
313
+
314
+ # Update the main land_mask
315
+ xr_dataset.land_mask.values = merged_mask
316
+ logging.info("Land masks merged")
317
+
318
+ return xr_dataset
319
+
320
+
321
+ def processLandMask(xr_dataset, dilation_iterations=3, merged_masks=None):
322
+ """
323
+ Process land mask to create a 3-level mask system with coastal zone detection.
324
+
325
+ This function:
326
+ 1. Takes the original land_mask (merged from all configured sources)
327
+ 2. Applies binary dilation to detect coastal zones
328
+ 3. Creates a 3-level land_mask:
329
+ - 0 = ocean (water far from coast)
330
+ - 1 = coastal (zone between original mask and dilated mask)
331
+ - 2 = land (original land mask)
332
+
333
+ Parameters
334
+ ----------
335
+ xr_dataset : xarray.Dataset
336
+ Dataset containing the land_mask variable
337
+ dilation_iterations : int, optional
338
+ Number of dilation iterations to define coastal zone width (default: 3)
339
+ merged_masks : list of str, optional
340
+ Names of masks that were merged into land_mask (for history tracking)
341
+
342
+ Returns
343
+ -------
344
+ None
345
+ Modifies xr_dataset.land_mask in place
346
+ """
347
+ logging.info("Processing land mask and adding a coastal zone")
348
+
349
+ # Store original land mask (2 = land)
350
+ original_land_mask = xr_dataset["land_mask"].values.astype("uint8")
351
+
352
+ # Apply dilation to create coastal zone
353
+ dilated_mask = binary_dilation(
354
+ original_land_mask,
355
+ structure=np.ones((3, 3), np.uint8),
356
+ iterations=dilation_iterations,
357
+ )
358
+
359
+ # Create 3-level mask
360
+ # Start with all zeros (ocean)
361
+ three_level_mask = np.zeros_like(original_land_mask, dtype="uint8")
362
+
363
+ # Mark land areas (2)
364
+ three_level_mask[original_land_mask == 1] = 2
365
+
366
+ # Mark coastal areas (1) - dilated area minus original land
367
+ coastal_zone = (dilated_mask == 1) & (original_land_mask == 0)
368
+ three_level_mask[coastal_zone] = 1
369
+
370
+ # Update the land_mask with 3-level system
371
+ xr_dataset.land_mask.values = three_level_mask
372
+
373
+ # Update attributes
374
+ xr_dataset.land_mask.attrs["long_name"] = "Land mask with coastal zone"
375
+ xr_dataset.land_mask.attrs["valid_range"] = np.array([0, 2])
376
+ xr_dataset.land_mask.attrs["flag_values"] = np.array([0, 1, 2])
377
+ xr_dataset.land_mask.attrs["flag_meanings"] = "ocean coastal land"
378
+ xr_dataset.land_mask.attrs["meaning"] = "0: ocean, 1: coastal, 2: land"
282
379
 
380
+ # Append to history instead of replacing
381
+ existing_history = xr_dataset.land_mask.attrs.get("history", "")
382
+
383
+ # Build history message
384
+ if merged_masks:
385
+ merge_info = f"merged with {', '.join(merged_masks)}"
283
386
  else:
387
+ merge_info = ""
388
+
389
+ new_history = f"{merge_info}3-level land mask with coastal zone detection via binary dilation"
390
+
391
+ if existing_history:
392
+ xr_dataset.land_mask.attrs["history"] = existing_history + \
393
+ "; " + new_history
394
+ else:
395
+ xr_dataset.land_mask.attrs["history"] = new_history
396
+
397
+
398
+ def getAncillary(meta, ancillary_name="ecmwf"):
399
+ """
400
+ Map ancillary wind from "ecmwf" or "era5" or other sources.
401
+ This function is used to check if the model files are available and to map the model to the SAR data.
402
+ This function will use with priority the first model of the config file.
403
+
404
+ Parameters
405
+ ----------
406
+ meta: obj `xsar.BaseMeta` (one of the supported SAR mission)
407
+ ancillary_name: str
408
+ Name of the ancillary source (ecmwf or era5)
409
+
410
+ Returns
411
+ -------
412
+ tuple
413
+ (map_model, metadata) where:
414
+ - map_model (dict): mapping of model variables to SAR data
415
+ - metadata (dict): ancillary metadata with 'source' and 'source_path' keys
416
+ """
417
+ logging.debug("conf: %s", getConf())
418
+ conf = getConf()
419
+ if 'ancillary_sources' not in conf:
420
+ raise ValueError("Configuration must contain 'ancillary_sources'")
421
+
422
+ if ancillary_name not in conf['ancillary_sources']:
423
+ raise ValueError(
424
+ f"Configuration 'ancillary_sources' must contain '{ancillary_name}'")
425
+
426
+ if ancillary_name not in ["ecmwf", "era5"]:
427
+ logging.warning("We advice to use either ecmwf or era5.")
428
+
429
+ ancillary_sources = conf['ancillary_sources'][ancillary_name]
430
+ if not ancillary_sources:
284
431
  raise ValueError(
285
- "ancillary_name must be ecmwf/era5, got %s" % ancillary_name)
432
+ f"At least one ancillary model {ancillary_name} must be configured in ancillary_sources")
433
+
434
+ map_model = None
435
+ selected_name = None
436
+ selected_path = None
437
+ tried_names = []
438
+
439
+ # Loop through models in config order to find the first one that exists
440
+ for source in ancillary_sources:
441
+ model_name = source['name']
442
+ model_path = source['path']
443
+ logging.debug("%s : %s", model_name, model_path)
444
+
445
+ # Set raster to check if file exists
446
+ meta.set_raster(model_name, model_path)
447
+ tried_names.append(model_name)
448
+
449
+ model_info = meta.rasters.loc[model_name]
450
+
451
+ model_file = model_info["get_function"](
452
+ model_info["resource"],
453
+ date=datetime.datetime.strptime(
454
+ meta.start_date, "%Y-%m-%d %H:%M:%S.%f"
455
+ ),
456
+ )[1]
457
+
458
+ if os.path.isfile(model_file):
459
+ # File exists! This is our selection
460
+ selected_name = model_name
461
+ selected_path = model_file
462
+ map_model = {
463
+ "%s_%s" % (selected_name, uv): "model_%s" % uv for uv in ["U10", "V10"]
464
+ }
465
+ # Log selection
466
+ if len(ancillary_sources) > 1:
467
+ logging.info(
468
+ f"Multiple {ancillary_name} models configured. Using {selected_name} (priority order)")
469
+ else:
470
+ logging.info(
471
+ f"Only one {ancillary_name} model configured: using {selected_name}")
472
+ break
473
+
474
+ # Clean up: remove all tried models EXCEPT the selected one
475
+ if selected_name is not None:
476
+ for name in tried_names:
477
+ if name != selected_name:
478
+ meta.rasters = meta.rasters.drop([name])
479
+
480
+ # Prepare metadata for traceability
481
+ ancillary_metadata = None
482
+ if selected_name is not None:
483
+ ancillary_metadata = {
484
+ 'ancillary_source_model': selected_name,
485
+ 'ancillary_source_path': selected_path
486
+ }
487
+
488
+ return map_model, ancillary_metadata
286
489
 
287
490
 
288
- @timing(logger=logger.debug)
491
+ @timing(logger=root_logger.debug)
289
492
  def inverse_dsig_wspd(
290
493
  dual_pol,
291
494
  inc,
@@ -315,7 +518,7 @@ def inverse_dsig_wspd(
315
518
  ancillary wind
316
519
  | (for example ecmwf winds), in **ANTENNA convention**,
317
520
  nesz_cr: xarray.DataArray
318
- noise equivalent sigma0 | flattened or not
521
+ noise equivalent sigma0 | flattened or not
319
522
  dsig_cr_name: str
320
523
  dsig_cr name
321
524
  model_co: str
@@ -325,11 +528,11 @@ def inverse_dsig_wspd(
325
528
 
326
529
  Returns
327
530
  -------
328
- xarray.DataArray
531
+ xarray.DataArray
329
532
  inverted wind in copol in ** antenna convention** .
330
- xarray.DataArray
533
+ xarray.DataArray
331
534
  inverted wind in dualpol in ** antenna convention** .
332
- xarray.DataArray
535
+ xarray.DataArray
333
536
  inverted wind in crosspol in ** antenna convention** .
334
537
  xarray.DataArray | array
335
538
  alpha (ponderation between co and crosspol)
@@ -372,7 +575,7 @@ def inverse_dsig_wspd(
372
575
  return wind_co, None, None, None
373
576
 
374
577
 
375
- @timing(logger=logger.debug)
578
+ @timing(logger=root_logger.debug)
376
579
  def inverse(
377
580
  dual_pol,
378
581
  inc,
@@ -411,11 +614,11 @@ def inverse(
411
614
 
412
615
  Returns
413
616
  -------
414
- xarray.DataArray
617
+ xarray.DataArray
415
618
  inverted wind in copol in ** antenna convention** .
416
- xarray.DataArray
619
+ xarray.DataArray
417
620
  inverted wind in dualpol in ** antenna convention** .
418
- xarray.DataArray
621
+ xarray.DataArray
419
622
  inverted wind in crosspol in ** antenna convention** .
420
623
 
421
624
  See Also
@@ -467,7 +670,7 @@ def inverse(
467
670
  return wind_co, None, None
468
671
 
469
672
 
470
- @timing(logger=logger.debug)
673
+ @timing(logger=root_logger.debug)
471
674
  def makeL2asOwi(xr_dataset, config):
472
675
  """
473
676
  Rename xr_dataset variables and attributes to match naming convention.
@@ -727,7 +930,9 @@ def makeL2asOwi(xr_dataset, config):
727
930
  "sigma0_raw",
728
931
  "ancillary_wind",
729
932
  "nesz",
730
- "spatial_ref",
933
+ "model_U10",
934
+ "model_V10"
935
+
731
936
  ]
732
937
  )
733
938
  if "sigma0_raw__corrected" in xr_dataset:
@@ -809,6 +1014,9 @@ def preprocess(
809
1014
  recalibration = config["recalibration"]
810
1015
  meta = fct_meta(filename)
811
1016
 
1017
+ # Add masks to meta if configured (land, ice, lakes, etc.)
1018
+ masks_by_category = addMasks_toMeta(meta)
1019
+
812
1020
  # si une des deux n'est pas VV VH HH HV on ne fait rien
813
1021
  if not all([pol in ["VV", "VH", "HH", "HV"] for pol in meta.pols.split(" ")]):
814
1022
  raise ValueError(f"Polarisation non gérée : meta.pols = {meta.pols}")
@@ -825,7 +1033,7 @@ def preprocess(
825
1033
  winddir_convention = config_base["winddir_convention"]
826
1034
  else:
827
1035
  winddir_convention = "meteorological"
828
- logging.warning(
1036
+ logging.info(
829
1037
  f'Using meteorological convention because "winddir_convention" was not found in config.'
830
1038
  )
831
1039
  config["winddir_convention"] = winddir_convention
@@ -834,17 +1042,17 @@ def preprocess(
834
1042
  add_gradientsfeatures = config_base["add_gradientsfeatures"]
835
1043
  else:
836
1044
  add_gradientsfeatures = False
837
- logging.warning(f"Not computing gradients by default")
1045
+ logging.info(f"Not computing gradients by default")
838
1046
  config["add_gradientsfeatures"] = add_gradientsfeatures
839
1047
 
840
1048
  if "add_nrcs_model" in config_base:
841
1049
  add_nrcs_model = config_base["add_nrcs_model"]
842
1050
  add_nrcs_model = False
843
- logging.warning(
1051
+ logging.info(
844
1052
  f"Force add_nrcs_model to be false, before fixing an issue")
845
1053
  else:
846
1054
  add_nrcs_model = False
847
- logging.warning(f"Not computing nrcs from model by default")
1055
+ logging.info(f"Not computing nrcs from model by default")
848
1056
  config["add_nrcs_model"] = add_nrcs_model
849
1057
 
850
1058
  # creating a dictionnary of parameters
@@ -878,11 +1086,14 @@ def preprocess(
878
1086
  raise FileExistsError("outfile %s already exists" % out_file)
879
1087
 
880
1088
  ancillary_name = config["ancillary"]
881
- map_model = getAncillary(meta, ancillary_name)
1089
+ map_model, ancillary_metadata = getAncillary(meta, ancillary_name)
882
1090
  if map_model is None:
883
1091
  raise Exception(
884
1092
  f"the weather model is not set `map_model` is None -> you probably don't have access to {ancillary_name} archive"
885
1093
  )
1094
+ if ancillary_metadata is None:
1095
+ raise Exception(
1096
+ f"ancillary_metadata must be defined. There is an error in getAncillary function")
886
1097
 
887
1098
  try:
888
1099
  logging.info(f"recalibration = {recalibration}")
@@ -932,7 +1143,6 @@ def preprocess(
932
1143
  config["fct_dataset"] = fct_dataset
933
1144
  config["map_model"] = map_model
934
1145
 
935
- # load
936
1146
  xr_dataset = xr_dataset.load()
937
1147
 
938
1148
  # defining dual_pol, and gmfs by channel
@@ -948,10 +1158,12 @@ def preprocess(
948
1158
  crosspol_gmf = "VH"
949
1159
  else:
950
1160
  logging.warning(
951
- "for now this processor does not support entirely HH+HV acquisitions\n "
1161
+ "inversion_rules warning : for now this processor does not support entirely HH+HV acquisitions\n "
952
1162
  "it wont crash but it will use HH+VH GMF for wind inversion -> wrong hypothesis\n "
953
1163
  "!! dual WIND SPEED IS NOT USABLE !! But co WIND SPEED IS USABLE !!"
954
1164
  )
1165
+ config["return_status"] = 99
1166
+
955
1167
  copol = "HH"
956
1168
  crosspol = "HV"
957
1169
  copol_gmf = "HH"
@@ -996,7 +1208,6 @@ def preprocess(
996
1208
  config["dsig_cr_step"] = dsig_cr_step
997
1209
  config["dsig_cr_name"] = dsig_cr_name
998
1210
  config["apply_flattening"] = apply_flattening
999
-
1000
1211
  # need to load LUTs before inversion
1001
1212
  nc_luts = [x for x in [model_co, model_cross] if x.startswith("nc_lut")]
1002
1213
 
@@ -1057,33 +1268,41 @@ def preprocess(
1057
1268
  xr_dataset.elevation.attrs["standard_name"] = "elevation"
1058
1269
 
1059
1270
  # offboresight
1060
- # TOREMOVE
1061
- if "offboresight" in xr_dataset:
1062
- xr_dataset.offboresight.attrs["units"] = "degrees"
1063
- xr_dataset.offboresight.attrs["long_name"] = (
1064
- "Offboresight angle at wind cell center"
1065
- )
1066
- xr_dataset.elevation.attrs["standard_name"] = "offboresight"
1067
-
1068
- # masks (no ice / no_valid)
1069
- xr_dataset.land_mask.values = binary_dilation(
1070
- xr_dataset["land_mask"].values.astype("uint8"),
1071
- structure=np.ones((3, 3), np.uint8),
1072
- iterations=3,
1271
+ xr_dataset.offboresight.attrs["units"] = "degrees"
1272
+ xr_dataset.offboresight.attrs["long_name"] = (
1273
+ "Offboresight angle at wind cell center"
1073
1274
  )
1074
- xr_dataset.land_mask.attrs["long_name"] = "Mask of data"
1075
- xr_dataset.land_mask.attrs["valid_range"] = np.array([0, 1])
1076
- xr_dataset.land_mask.attrs["flag_values"] = np.array([0, 1])
1077
- xr_dataset.land_mask.attrs["flag_meanings"] = "valid no_valid"
1275
+ xr_dataset.offboresight.attrs["standard_name"] = "offboresight"
1276
+
1277
+ # merge land masks
1278
+ conf = getConf()
1279
+ land_mask_strategy = conf.get("LAND_MASK_STRATEGY", "merge")
1280
+ logging.info(f"land_mask_strategy = {land_mask_strategy}")
1281
+
1282
+ # Store masks_by_category in config for later cleanup
1283
+ config["masks_by_category"] = masks_by_category
1284
+
1285
+ merged_land_masks = None
1286
+ if land_mask_strategy == "merge" and "land" in masks_by_category:
1287
+ mergeLandMasks(xr_dataset, masks_by_category["land"])
1288
+ merged_land_masks = masks_by_category["land"]
1289
+
1290
+ # Process land mask with coastal zone detection (3-level system)
1291
+ # 0 = ocean, 1 = coastal, 2 = land
1292
+ processLandMask(xr_dataset, dilation_iterations=3,
1293
+ merged_masks=merged_land_masks)
1078
1294
 
1079
1295
  logging.debug("mask is a copy of land_mask")
1080
1296
 
1297
+ # Create main mask from land_mask
1298
+ # For now, mask uses the same values as land_mask
1299
+ # Can be extended later to include ice (value 3) and other categories
1081
1300
  xr_dataset["mask"] = xr.DataArray(xr_dataset.land_mask)
1082
1301
  xr_dataset.mask.attrs = {}
1083
1302
  xr_dataset.mask.attrs["long_name"] = "Mask of data"
1084
1303
  xr_dataset.mask.attrs["valid_range"] = np.array([0, 3])
1085
1304
  xr_dataset.mask.attrs["flag_values"] = np.array([0, 1, 2, 3])
1086
- xr_dataset.mask.attrs["flag_meanings"] = "valid land ice no_valid"
1305
+ xr_dataset.mask.attrs["flag_meanings"] = "ocean coastal land ice"
1087
1306
 
1088
1307
  # ancillary
1089
1308
  xr_dataset["ancillary_wind_direction"] = (
@@ -1091,8 +1310,9 @@ def preprocess(
1091
1310
  xr_dataset.model_U10)) + 180
1092
1311
  ) % 360
1093
1312
 
1313
+ # Keep ocean (0) and coastal (1) zones for ancillary wind
1094
1314
  xr_dataset["ancillary_wind_direction"] = xr.where(
1095
- xr_dataset["mask"], np.nan, xr_dataset["ancillary_wind_direction"]
1315
+ xr_dataset["mask"] >= 2, np.nan, xr_dataset["ancillary_wind_direction"]
1096
1316
  ).transpose(*xr_dataset["ancillary_wind_direction"].dims)
1097
1317
  xr_dataset["ancillary_wind_direction"].attrs = {}
1098
1318
  xr_dataset["ancillary_wind_direction"].attrs["units"] = "degrees_north"
@@ -1105,7 +1325,7 @@ def preprocess(
1105
1325
  xr_dataset["model_U10"] ** 2 + xr_dataset["model_V10"] ** 2
1106
1326
  )
1107
1327
  xr_dataset["ancillary_wind_speed"] = xr.where(
1108
- xr_dataset["mask"], np.nan, xr_dataset["ancillary_wind_speed"]
1328
+ xr_dataset["mask"] >= 2, np.nan, xr_dataset["ancillary_wind_speed"]
1109
1329
  ).transpose(*xr_dataset["ancillary_wind_speed"].dims)
1110
1330
  xr_dataset["ancillary_wind_speed"].attrs = {}
1111
1331
  xr_dataset["ancillary_wind_speed"].attrs["units"] = "m s^-1"
@@ -1115,7 +1335,7 @@ def preprocess(
1115
1335
  xr_dataset["ancillary_wind_speed"].attrs["standart_name"] = "wind_speed"
1116
1336
 
1117
1337
  xr_dataset["ancillary_wind"] = xr.where(
1118
- xr_dataset["mask"],
1338
+ xr_dataset["mask"] >= 2,
1119
1339
  np.nan,
1120
1340
  (
1121
1341
  xr_dataset.ancillary_wind_speed
@@ -1127,15 +1347,24 @@ def preprocess(
1127
1347
  )
1128
1348
  ),
1129
1349
  ).transpose(*xr_dataset["ancillary_wind_speed"].dims)
1350
+ xr_dataset["ancillary_wind"].attrs = {}
1351
+ xr_dataset["ancillary_wind"].attrs["long_name"] = f"{ancillary_name} wind in complex form for inversion"
1352
+ xr_dataset["ancillary_wind"].attrs[
1353
+ "description"] = "Complex wind (speed * exp(i*direction)) in antenna convention for GMF inversion"
1130
1354
 
1131
- xr_dataset.attrs["ancillary_source"] = (
1132
- xr_dataset["model_U10"].attrs["history"].split("decoded: ")[1].strip()
1133
- )
1134
- xr_dataset = xr_dataset.drop_vars(["model_U10", "model_V10"])
1355
+ # Add ancillary metadata to model variables
1356
+
1357
+ for attr_key, attr_value in ancillary_metadata.items():
1358
+ for var_name in ['model_U10', 'model_V10', 'ancillary_wind_speed', 'ancillary_wind_direction', 'ancillary_wind']:
1359
+ if var_name in xr_dataset:
1360
+ xr_dataset[var_name].attrs[attr_key] = attr_value
1361
+
1362
+ xr_dataset.attrs[attr_key] = attr_value
1135
1363
 
1136
1364
  # nrcs processing
1365
+ # Keep ocean (0) and coastal (1) zones, mask out land (2) and ice (3)
1137
1366
  xr_dataset["sigma0_ocean"] = xr.where(
1138
- xr_dataset["mask"], np.nan, xr_dataset["sigma0"]
1367
+ xr_dataset["mask"] >= 2, np.nan, xr_dataset["sigma0"]
1139
1368
  ).transpose(*xr_dataset["sigma0"].dims)
1140
1369
  xr_dataset["sigma0_ocean"].attrs = xr_dataset["sigma0"].attrs
1141
1370
  #  we forced it to 1e-15
@@ -1143,19 +1372,21 @@ def preprocess(
1143
1372
  "comment"
1144
1373
  ] = "clipped, no values <=0 ; 1e-15 instread"
1145
1374
 
1146
- # rajout d'un mask pour les valeurs <=0:
1375
+ xr_dataset["sigma0_ocean"] = xr.where(
1376
+ xr_dataset["sigma0_ocean"] <= 0, 1e-15, xr_dataset["sigma0_ocean"]
1377
+ )
1378
+
1379
+ # add a mask for values <=0:
1147
1380
  xr_dataset["sigma0_mask"] = xr.where(
1148
1381
  xr_dataset["sigma0_ocean"] <= 0, 1, 0
1149
1382
  ).transpose(*xr_dataset["sigma0"].dims)
1150
1383
  xr_dataset.sigma0_mask.attrs["valid_range"] = np.array([0, 1])
1151
1384
  xr_dataset.sigma0_mask.attrs["flag_values"] = np.array([0, 1])
1152
1385
  xr_dataset.sigma0_mask.attrs["flag_meanings"] = "valid no_valid"
1153
- xr_dataset["sigma0_ocean"] = xr.where(
1154
- xr_dataset["sigma0_ocean"] <= 0, 1e-15, xr_dataset["sigma0_ocean"]
1155
- )
1156
1386
 
1387
+ # Keep ocean (0) and coastal (1) zones for sigma0_ocean_raw too
1157
1388
  xr_dataset["sigma0_ocean_raw"] = xr.where(
1158
- xr_dataset["mask"], np.nan, xr_dataset["sigma0_raw"]
1389
+ xr_dataset["mask"] >= 2, np.nan, xr_dataset["sigma0_raw"]
1159
1390
  ).transpose(*xr_dataset["sigma0_raw"].dims)
1160
1391
 
1161
1392
  xr_dataset["sigma0_ocean_raw"].attrs = xr_dataset["sigma0_raw"].attrs
@@ -1166,12 +1397,26 @@ def preprocess(
1166
1397
 
1167
1398
  # processing
1168
1399
  if dual_pol:
1169
-
1170
1400
  xr_dataset['sigma0_detrend_cross'] = xsarsea.sigma0_detrend(
1171
1401
  xr_dataset.sigma0.sel(pol=crosspol), xr_dataset.incidence, model=model_cross)
1172
1402
 
1173
- xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
1174
- ['line', 'sample'], windspeed.nesz_flattening(xr_dataset.nesz.sel(pol=crosspol), xr_dataset.incidence).data))
1403
+ try:
1404
+ xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
1405
+ ['line', 'sample'], windspeed.nesz_flattening(xr_dataset.nesz.sel(pol=crosspol), xr_dataset.incidence).data))
1406
+ except Exception as e:
1407
+ if apply_flattening:
1408
+ # error
1409
+ logging.error("Error during NESZ flattening computation")
1410
+ logging.info("%s", traceback.format_exc())
1411
+ raise e
1412
+ else:
1413
+ # replace with nans
1414
+ logging.warning("nesz_flattening warning => Error during NESZ flattening computation, but apply_flattening is False, \
1415
+ so continuing without nesz_cross_flattened and replace with NaNs\n \
1416
+ The error comes probably from NaN in incidence angle")
1417
+ config["return_status"] = 99
1418
+ xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
1419
+ ['line', 'sample'], np.full(xr_dataset.nesz.sel(pol=crosspol).shape, np.nan)))
1175
1420
 
1176
1421
  xr_dataset['nesz_cross_flattened'].attrs[
1177
1422
  "comment"] = 'nesz has been flattened using windspeed.nesz_flattening'
@@ -1228,7 +1473,7 @@ def preprocess(
1228
1473
 
1229
1474
  for idx, gmf_name in enumerate(gmf_names):
1230
1475
 
1231
- @timing(logger=logger.info)
1476
+ @timing(logger=root_logger.info)
1232
1477
  def apply_lut_to_dataset():
1233
1478
  lut = xsarsea.windspeed.get_model(
1234
1479
  gmf_name).to_lut(unit="linear")
@@ -1319,13 +1564,13 @@ def process_gradients(xr_dataset, config):
1319
1564
 
1320
1565
  xr_dataset_100["sigma0_detrend"] = sigma0_detrend_combined
1321
1566
 
1322
- xr_dataset_100.land_mask.values = binary_dilation(
1323
- xr_dataset_100["land_mask"].values.astype("uint8"),
1324
- structure=np.ones((3, 3), np.uint8),
1325
- iterations=3,
1326
- )
1567
+ # Process land mask with coastal zone detection (3-level system)
1568
+ processLandMask(xr_dataset_100, dilation_iterations=3)
1569
+
1570
+ # Mask sigma0_detrend where land_mask >= 2 (land and ice)
1571
+ # Keep ocean (0) and coastal (1) zones
1327
1572
  xr_dataset_100["sigma0_detrend"] = xr.where(
1328
- xr_dataset_100["land_mask"], np.nan, xr_dataset_100["sigma0"]
1573
+ xr_dataset_100["land_mask"] >= 2, np.nan, xr_dataset_100["sigma0"]
1329
1574
  ).transpose(*xr_dataset_100["sigma0"].dims)
1330
1575
 
1331
1576
  xr_dataset_100["ancillary_wind"] = (
@@ -1361,15 +1606,15 @@ def process_gradients(xr_dataset, config):
1361
1606
  }
1362
1607
  )
1363
1608
  else:
1364
- logger.warn(
1365
- "'longitude' not found in streaks_indiv : there is probably an error"
1609
+ root_logger.warning(
1610
+ "process_gradients warning : 'longitude' not found in streaks_indiv : there is probably an error"
1366
1611
  )
1367
1612
  xr_dataset_streaks = None
1368
1613
 
1369
1614
  return xr_dataset, xr_dataset_streaks
1370
1615
 
1371
1616
 
1372
- @timing(logger=logger.info)
1617
+ @timing(logger=root_logger.info)
1373
1618
  def makeL2(
1374
1619
  filename, outdir, config_path, overwrite=False, generateCSV=True, resolution="1000m"
1375
1620
  ):
@@ -1404,6 +1649,19 @@ def makeL2(
1404
1649
  filename, outdir, config_path, overwrite, resolution
1405
1650
  )
1406
1651
 
1652
+ # Drop only masks added from config (not internal masks like sigma0_mask, owiMask_Nrcs)
1653
+ masks_by_category = config.get("masks_by_category", {})
1654
+ masks_to_drop = []
1655
+ for category, mask_list in masks_by_category.items():
1656
+ masks_to_drop.extend(mask_list)
1657
+
1658
+ # Only drop masks that actually exist in the dataset (with XSAR suffix)
1659
+ vars_to_drop = [
1660
+ m+XSAR_MASK_SUFFIX for m in masks_to_drop if (m+XSAR_MASK_SUFFIX) in xr_dataset.data_vars]
1661
+ if vars_to_drop:
1662
+ logging.info(f"Dropping external masks of dataset: {vars_to_drop}")
1663
+ xr_dataset = xr_dataset.drop_vars(vars_to_drop)
1664
+
1407
1665
  if config["add_gradientsfeatures"]:
1408
1666
  xr_dataset, xr_dataset_streaks = process_gradients(xr_dataset, config)
1409
1667
  else:
@@ -1441,11 +1699,15 @@ def makeL2(
1441
1699
  "resolution": config.pop("resolution", None),
1442
1700
  }
1443
1701
 
1702
+ config["return_status"] = 0 # default value SUCCESS
1444
1703
  logging.info("Checking incidence range within LUTS incidence range")
1445
- #  warning if incidence is out of lut incidence range
1446
1704
  inc_check_co, inc_check_cross = check_incidence_range(
1447
1705
  xr_dataset["incidence"], [model_co, model_cross], **kwargs
1448
1706
  )
1707
+
1708
+ if not inc_check_co or not inc_check_cross:
1709
+ config["return_status"] = 99
1710
+
1449
1711
  if dsig_cr_step == "nrcs":
1450
1712
  logging.info(
1451
1713
  "dsig_cr_step is nrcs : polarization are mixed at cost function step")
@@ -1615,13 +1877,15 @@ def makeL2(
1615
1877
  "wnf_3km_average": "False",
1616
1878
  "owiWindSpeedSrc": "owiWindSpeed",
1617
1879
  "owiWindDirectionSrc": "/",
1618
- "ancillary_source": xr_dataset.attrs["ancillary_source"],
1880
+ "ancillary_source_model": xr_dataset.attrs["ancillary_source_model"],
1881
+ "ancillary_source_path": xr_dataset.attrs["ancillary_source_path"],
1619
1882
  "winddir_convention": config["winddir_convention"],
1620
1883
  "incidence_within_lut_copol_incidence_range": str(inc_check_co),
1621
1884
  "incidence_within_lut_crosspol_incidence_range": str(inc_check_cross),
1622
1885
  "swath": xr_dataset.attrs["swath"],
1623
1886
  "footprint": xr_dataset.attrs["footprint"],
1624
1887
  "coverage": xr_dataset.attrs["coverage"],
1888
+ "cross_antimeridian": str(config["meta"].cross_antimeridian)
1625
1889
  }
1626
1890
 
1627
1891
  for recalib_attrs in ["aux_pp1_recal", "aux_pp1", "aux_cal_recal", "aux_cal"]:
@@ -1679,7 +1943,10 @@ def makeL2(
1679
1943
 
1680
1944
  logging.info("OK for %s ", os.path.basename(filename))
1681
1945
 
1682
- return out_file, xr_dataset
1946
+ if config["add_gradientsfeatures"] and xr_dataset_streaks is None:
1947
+ config["return_status"] = 99
1948
+
1949
+ return out_file, xr_dataset, config["return_status"]
1683
1950
 
1684
1951
 
1685
1952
  def transform_winddir(wind_cpx, ground_heading, winddir_convention="meteorological"):
@@ -1719,6 +1986,7 @@ def transform_winddir(wind_cpx, ground_heading, winddir_convention="meteorologic
1719
1986
  logging.warning(
1720
1987
  f"wind direction convention {winddir_convention} is not supported, using meteorological",
1721
1988
  )
1989
+
1722
1990
  long_name = "Wind direction in meteorological convention (clockwise, from), ex: 0°=from north, 90°=from east"
1723
1991
 
1724
1992
  dataArray = xsarsea.dir_to_360(dataArray)