grdwindinversion 0.3.8__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- grdwindinversion/__init__.py +11 -8
- grdwindinversion/config_prod_recal.yaml +72 -8
- grdwindinversion/config_prod_recal_streaks_nrcsmod.yaml +70 -5
- grdwindinversion/config_prod_streaks.yaml +66 -5
- grdwindinversion/config_prod_streaks_nrcsmod.yaml +66 -5
- grdwindinversion/config_prod_v3.yaml +43 -0
- grdwindinversion/data_config.yaml +22 -5
- grdwindinversion/inversion.py +484 -204
- grdwindinversion/main.py +3 -1
- grdwindinversion/utils.py +8 -6
- {grdwindinversion-0.3.8.dist-info → grdwindinversion-1.0.0.dist-info}/METADATA +1 -1
- grdwindinversion-1.0.0.dist-info/RECORD +22 -0
- {grdwindinversion-0.3.8.dist-info → grdwindinversion-1.0.0.dist-info}/WHEEL +1 -1
- {grdwindinversion-0.3.8.dist-info → grdwindinversion-1.0.0.dist-info}/licenses/AUTHORS.rst +1 -5
- grdwindinversion/config_prod.yaml +0 -52
- grdwindinversion-0.3.8.dist-info/RECORD +0 -23
- {grdwindinversion-0.3.8.dist-info → grdwindinversion-1.0.0.dist-info}/entry_points.txt +0 -0
- {grdwindinversion-0.3.8.dist-info → grdwindinversion-1.0.0.dist-info}/licenses/LICENSE +0 -0
- {grdwindinversion-0.3.8.dist-info → grdwindinversion-1.0.0.dist-info}/top_level.txt +0 -0
grdwindinversion/inversion.py
CHANGED
|
@@ -13,12 +13,12 @@ import datetime
|
|
|
13
13
|
import yaml
|
|
14
14
|
from scipy.ndimage import binary_dilation
|
|
15
15
|
import re
|
|
16
|
+
import os
|
|
17
|
+
import logging
|
|
16
18
|
import string
|
|
19
|
+
|
|
17
20
|
from grdwindinversion.utils import check_incidence_range, get_pol_ratio_name, timing, convert_polarization_name
|
|
18
21
|
from grdwindinversion.load_config import getConf
|
|
19
|
-
import logging
|
|
20
|
-
import os
|
|
21
|
-
|
|
22
22
|
os.environ["OMP_NUM_THREADS"] = "1"
|
|
23
23
|
os.environ["OPENBLAS_NUM_THREADS"] = "1"
|
|
24
24
|
os.environ["MKL_NUM_THREADS"] = "1"
|
|
@@ -31,9 +31,22 @@ except:
|
|
|
31
31
|
cv2.setNumThreads(1)
|
|
32
32
|
|
|
33
33
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
34
|
+
root_logger = logging.getLogger("grdwindinversion.inversion")
|
|
35
|
+
|
|
36
|
+
# Sensor metadata registry
|
|
37
|
+
SENSOR_METADATA = {
|
|
38
|
+
"S1A": ("S1A", "SENTINEL-1 A", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
|
|
39
|
+
"S1B": ("S1B", "SENTINEL-1 B", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
|
|
40
|
+
"S1C": ("S1C", "SENTINEL-1 C", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
|
|
41
|
+
"S1D": ("S1D", "SENTINEL-1 D", xsar.Sentinel1Meta, xsar.Sentinel1Dataset),
|
|
42
|
+
"RS2": ("RS2", "RADARSAT-2", xsar.RadarSat2Meta, xsar.RadarSat2Dataset),
|
|
43
|
+
"RCM1": ("RCM", "RADARSAT Constellation 1", xsar.RcmMeta, xsar.RcmDataset),
|
|
44
|
+
"RCM2": ("RCM", "RADARSAT Constellation 2", xsar.RcmMeta, xsar.RcmDataset),
|
|
45
|
+
"RCM3": ("RCM", "RADARSAT Constellation 3", xsar.RcmMeta, xsar.RcmDataset),
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
# Mask naming convention used by xsar
|
|
49
|
+
XSAR_MASK_SUFFIX = "_mask"
|
|
37
50
|
|
|
38
51
|
|
|
39
52
|
def getSensorMetaDataset(filename):
|
|
@@ -50,23 +63,14 @@ def getSensorMetaDataset(filename):
|
|
|
50
63
|
tuple
|
|
51
64
|
sensor name, sensor long name, meta function, dataset function
|
|
52
65
|
"""
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
return "S1B", "SENTINEL-1 B", xsar.Sentinel1Meta, xsar.Sentinel1Dataset
|
|
57
|
-
elif "RS2" in filename:
|
|
58
|
-
return "RS2", "RADARSAT-2", xsar.RadarSat2Meta, xsar.RadarSat2Dataset
|
|
59
|
-
elif "RCM1" in filename:
|
|
60
|
-
return "RCM", "RADARSAT Constellation 1", xsar.RcmMeta, xsar.RcmDataset
|
|
61
|
-
elif "RCM2" in filename:
|
|
62
|
-
return "RCM", "RADARSAT Constellation 2", xsar.RcmMeta, xsar.RcmDataset
|
|
63
|
-
elif "RCM3" in filename:
|
|
64
|
-
return "RCM", "RADARSAT Constellation 3", xsar.RcmMeta, xsar.RcmDataset
|
|
66
|
+
for sensor_key, sensor_info in SENSOR_METADATA.items():
|
|
67
|
+
if sensor_key in filename:
|
|
68
|
+
return sensor_info
|
|
65
69
|
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
+
supported_sensors = "|".join(SENSOR_METADATA.keys())
|
|
71
|
+
raise ValueError(
|
|
72
|
+
f"must be {supported_sensors}, got filename {filename}"
|
|
73
|
+
)
|
|
70
74
|
|
|
71
75
|
|
|
72
76
|
def getOutputName(
|
|
@@ -94,20 +98,16 @@ def getOutputName(
|
|
|
94
98
|
output filename
|
|
95
99
|
"""
|
|
96
100
|
basename = os.path.basename(input_file)
|
|
97
|
-
basename_match = basename
|
|
98
101
|
|
|
99
|
-
if sensor
|
|
102
|
+
if sensor in ["S1A", "S1B", "S1C", "S1D"]:
|
|
103
|
+
# Example: S1A_IW_GRDH_1SDV_20210909T130650_20210909T130715_039605_04AE83_C34F.SAFE
|
|
100
104
|
regex = re.compile(
|
|
101
|
-
"(...)_(..)_(...)(.)_(.)(.)(..)_(........T......)_(........T......)_(......)_(......)_(....).SAFE"
|
|
105
|
+
r"(...)_(..)_(...)(.)_(.)(.)(..)_(........T......)_(........T......)_(......)_(......)_(....).SAFE"
|
|
102
106
|
)
|
|
103
|
-
|
|
104
|
-
"${MISSIONID}_${SWATH}_${PRODUCT}${RESOLUTION}_${LEVEL}${CLASS}${POLARIZATION}_${STARTDATE}_${STOPDATE}_${ORBIT}_${TAKEID}_${PRODID}.SAFE"
|
|
105
|
-
)
|
|
106
|
-
# S1A_IW_GRDH_1SDV_20210909T130650_20210909T130715_039605_04AE83_C34F
|
|
107
|
-
match = regex.match(basename_match)
|
|
107
|
+
match = regex.match(basename)
|
|
108
108
|
if not match:
|
|
109
109
|
raise AttributeError(
|
|
110
|
-
f"S1 file {
|
|
110
|
+
f"S1 file {basename} does not match the expected pattern"
|
|
111
111
|
)
|
|
112
112
|
|
|
113
113
|
(
|
|
@@ -124,47 +124,44 @@ def getOutputName(
|
|
|
124
124
|
TAKEID,
|
|
125
125
|
PRODID,
|
|
126
126
|
) = match.groups()
|
|
127
|
-
# last two terms of polarization are removed
|
|
128
127
|
new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{POLARIZATION.lower()}-{STARTDATE.lower()}-{STOPDATE.lower()}-{ORBIT}-{TAKEID}.nc"
|
|
128
|
+
|
|
129
129
|
elif sensor == "RS2":
|
|
130
|
+
# Example: RS2_OK141302_PK1242223_DK1208537_SCWA_20220904_093402_VV_VH_SGF
|
|
130
131
|
regex = re.compile(
|
|
131
|
-
"(RS2)_OK([0-9]+)_PK([0-9]+)_DK([0-9]+)_(....)_(........)_(......)_(.._?.?.?)_(S.F)"
|
|
132
|
+
r"(RS2)_OK([0-9]+)_PK([0-9]+)_DK([0-9]+)_(....)_(........)_(......)_(.._?.?.?)_(S.F)"
|
|
132
133
|
)
|
|
133
|
-
|
|
134
|
-
template = string.Template(
|
|
135
|
-
"${MISSIONID}_OK${DATA1}_PK${DATA2}_DK${DATA3}_${SWATH}_${DATE}_${TIME}_${POLARIZATION}_${LAST}"
|
|
136
|
-
)
|
|
137
|
-
match = regex.match(basename_match)
|
|
134
|
+
match = regex.match(basename)
|
|
138
135
|
if not match:
|
|
139
136
|
raise AttributeError(
|
|
140
|
-
f"
|
|
137
|
+
f"RS2 file {basename} does not match the expected pattern"
|
|
141
138
|
)
|
|
142
139
|
|
|
143
140
|
MISSIONID, DATA1, DATA2, DATA3, SWATH, DATE, TIME, POLARIZATION, LAST = (
|
|
144
141
|
match.groups()
|
|
145
142
|
)
|
|
146
|
-
new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{convert_polarization_name(POLARIZATION)}-{meta_start_date.lower()}-{meta_stop_date.lower()}-
|
|
147
|
-
elif sensor == "RCM":
|
|
143
|
+
new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{convert_polarization_name(POLARIZATION)}-{meta_start_date.lower()}-{meta_stop_date.lower()}-xxxxx-xxxxx.nc"
|
|
148
144
|
|
|
145
|
+
elif sensor == "RCM":
|
|
146
|
+
# Example: RCM1_OK2767220_PK2769320_1_SCLND_20230930_214014_VV_VH_GRD
|
|
149
147
|
regex = re.compile(
|
|
150
148
|
r"(RCM[0-9])_OK([0-9]+)_PK([0-9]+)_([0-9]+)_([A-Z0-9]+)_(\d{8})_(\d{6})_([A-Z]{2}(?:_[A-Z]{2})?)_([A-Z]+)$"
|
|
151
149
|
)
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
match = regex.match(basename_match)
|
|
150
|
+
match = regex.match(basename)
|
|
155
151
|
if not match:
|
|
156
152
|
raise AttributeError(
|
|
157
|
-
f"RCM file {
|
|
153
|
+
f"RCM file {basename} does not match the expected pattern"
|
|
158
154
|
)
|
|
159
155
|
|
|
160
156
|
MISSIONID, DATA1, DATA2, DATA3, SWATH, DATE, TIME, POLARIZATION, PRODUCT = (
|
|
161
157
|
match.groups()
|
|
162
158
|
)
|
|
163
|
-
new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{convert_polarization_name(POLARIZATION)}-{meta_start_date.lower()}-{meta_stop_date.lower()}-
|
|
159
|
+
new_format = f"{MISSIONID.lower()}-{SWATH.lower()}-owi-{convert_polarization_name(POLARIZATION)}-{meta_start_date.lower()}-{meta_stop_date.lower()}-xxxxx-xxxxx.nc"
|
|
164
160
|
|
|
165
161
|
else:
|
|
166
162
|
raise ValueError(
|
|
167
|
-
"sensor must be S1A|S1B|RS2|RCM, got sensor
|
|
163
|
+
f"sensor must be S1A|S1B|S1C|S1D|RS2|RCM, got sensor {sensor}"
|
|
164
|
+
)
|
|
168
165
|
|
|
169
166
|
if subdir:
|
|
170
167
|
out_file = os.path.join(outdir, basename, new_format)
|
|
@@ -173,115 +170,325 @@ def getOutputName(
|
|
|
173
170
|
return out_file
|
|
174
171
|
|
|
175
172
|
|
|
176
|
-
def
|
|
173
|
+
def addMasks_toMeta(meta: xsar.BaseMeta) -> dict:
|
|
177
174
|
"""
|
|
178
|
-
|
|
179
|
-
|
|
175
|
+
Add high-resolution masks (land, ice, lakes, etc.) from shapefiles to meta object.
|
|
176
|
+
|
|
177
|
+
Configuration format:
|
|
178
|
+
masks:
|
|
179
|
+
land:
|
|
180
|
+
- name: 'gshhsH'
|
|
181
|
+
path: '/path/to/mask.shp'
|
|
182
|
+
- name: 'custom_land'
|
|
183
|
+
path: '/path/to/custom.shp'
|
|
184
|
+
ice:
|
|
185
|
+
- name: 'ice_mask'
|
|
186
|
+
path: '/path/to/ice.shp'
|
|
187
|
+
|
|
188
|
+
Note: xsar will automatically add '_mask' suffix to the variable names in the dataset.
|
|
189
|
+
For example, 'gshhsH' becomes 'gshhsH_mask' in the xarray dataset.
|
|
180
190
|
|
|
181
191
|
Parameters
|
|
182
192
|
----------
|
|
183
|
-
meta:
|
|
193
|
+
meta : xsar.BaseMeta
|
|
194
|
+
Metadata object to add mask features to. Must have a set_mask_feature method.
|
|
184
195
|
|
|
185
196
|
Returns
|
|
186
197
|
-------
|
|
187
198
|
dict
|
|
188
|
-
|
|
199
|
+
Dictionary with mask categories as keys and lists of mask names as values.
|
|
200
|
+
Names are returned WITHOUT the '_mask' suffix that xsar adds internally.
|
|
201
|
+
Example: {'land': ['gshhsH', 'custom_land'], 'ice': ['ice_mask']}
|
|
202
|
+
|
|
203
|
+
Raises
|
|
204
|
+
------
|
|
205
|
+
AttributeError
|
|
206
|
+
If meta object doesn't have set_mask_feature method
|
|
189
207
|
"""
|
|
190
|
-
|
|
191
|
-
if
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
"netcdf_light_REPRO_tree", "netcdf_light")
|
|
225
|
-
try:
|
|
226
|
-
ecmwf_file = ecmwf_infos['get_function'](ecmwf_infos['resource'],
|
|
227
|
-
date=datetime.datetime.strptime(meta.start_date,
|
|
228
|
-
'%Y-%m-%d %H:%M:%S.%f'))[1]
|
|
229
|
-
except Exception as e:
|
|
230
|
-
ecmwf_file = ecmwf_infos['get_function'](ecmwf_infos['resource'],
|
|
231
|
-
date=datetime.datetime.strptime(meta.start_date,
|
|
232
|
-
'%Y-%m-%d %H:%M:%S'))[1]
|
|
233
|
-
|
|
234
|
-
if not os.path.isfile(ecmwf_file):
|
|
235
|
-
meta.rasters = meta.rasters.drop([ecmwf_name])
|
|
208
|
+
# Validate meta object has required method
|
|
209
|
+
if not hasattr(meta, 'set_mask_feature'):
|
|
210
|
+
raise AttributeError(
|
|
211
|
+
f"Meta object of type {type(meta).__name__} must have a 'set_mask_feature' method")
|
|
212
|
+
|
|
213
|
+
conf = getConf()
|
|
214
|
+
masks_by_category = {}
|
|
215
|
+
|
|
216
|
+
# Check for 'masks' key
|
|
217
|
+
if "masks" in conf and isinstance(conf["masks"], dict):
|
|
218
|
+
logging.debug("Found 'masks' configuration")
|
|
219
|
+
|
|
220
|
+
for category, mask_list in conf["masks"].items():
|
|
221
|
+
if isinstance(mask_list, list):
|
|
222
|
+
masks_by_category[category] = []
|
|
223
|
+
for mask_item in mask_list:
|
|
224
|
+
if isinstance(mask_item, dict) and "path" in mask_item and "name" in mask_item:
|
|
225
|
+
mask_name = mask_item["name"]
|
|
226
|
+
mask_path = mask_item["path"]
|
|
227
|
+
try:
|
|
228
|
+
logging.debug("%s path: %s", mask_name, mask_path)
|
|
229
|
+
meta.set_mask_feature(mask_name, mask_path)
|
|
230
|
+
logging.info(
|
|
231
|
+
"Mask feature '%s' set from %s", mask_name, mask_path)
|
|
232
|
+
masks_by_category[category].append(mask_name)
|
|
233
|
+
except (IOError, OSError, FileNotFoundError) as e:
|
|
234
|
+
logging.error(
|
|
235
|
+
"Failed to load mask file '%s' from path '%s': %s",
|
|
236
|
+
mask_name, mask_path, str(e))
|
|
237
|
+
logging.debug("%s", traceback.format_exc())
|
|
238
|
+
except (ValueError, RuntimeError) as e:
|
|
239
|
+
logging.error(
|
|
240
|
+
"Failed to process mask '%s': %s", mask_name, str(e))
|
|
241
|
+
logging.debug("%s", traceback.format_exc())
|
|
236
242
|
else:
|
|
237
|
-
|
|
238
|
-
'
|
|
239
|
-
|
|
240
|
-
else:
|
|
241
|
-
"""
|
|
242
|
-
meta.rasters = meta.rasters.drop([ecmwf_name])
|
|
243
|
+
logging.warning(
|
|
244
|
+
"Invalid mask configuration in category '%s': missing 'name' or 'path' field",
|
|
245
|
+
category)
|
|
243
246
|
else:
|
|
244
|
-
|
|
245
|
-
"
|
|
246
|
-
|
|
247
|
+
logging.warning(
|
|
248
|
+
"Mask category '%s' should contain a list, got %s",
|
|
249
|
+
category, type(mask_list).__name__
|
|
250
|
+
)
|
|
247
251
|
|
|
248
|
-
|
|
252
|
+
return masks_by_category
|
|
249
253
|
|
|
250
|
-
elif ancillary_name == "era5":
|
|
251
|
-
era5_name = "era5_0250_1h"
|
|
252
|
-
logging.debug("conf: %s", getConf())
|
|
253
|
-
era0250 = getConf()[era5_name]
|
|
254
|
-
logging.debug("%s : %s", (era5_name, era0250))
|
|
255
|
-
meta.set_raster(era5_name, era0250)
|
|
256
254
|
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
255
|
+
def mergeLandMasks(xr_dataset: xr.Dataset, land_mask_names: list) -> xr.Dataset:
|
|
256
|
+
"""
|
|
257
|
+
Merge multiple land masks into the main land_mask variable.
|
|
258
|
+
|
|
259
|
+
This function takes all individual land masks added via addMasks_toMeta() and combines
|
|
260
|
+
them using a logical OR operation to create a unified land mask that covers
|
|
261
|
+
all land areas from all sources.
|
|
262
|
+
|
|
263
|
+
Parameters
|
|
264
|
+
----------
|
|
265
|
+
xr_dataset : xr.Dataset
|
|
266
|
+
Dataset containing individual land mask variables. Must contain a 'land_mask' variable.
|
|
267
|
+
land_mask_names : list of str
|
|
268
|
+
Names of the land mask variables to merge (WITHOUT the '_mask' suffix).
|
|
269
|
+
For example: ['gshhsH', 'custom_land'].
|
|
270
|
+
These names will have XSAR_MASK_SUFFIX automatically appended to match
|
|
271
|
+
the variable names in the dataset.
|
|
272
|
+
|
|
273
|
+
Returns
|
|
274
|
+
-------
|
|
275
|
+
xr.Dataset
|
|
276
|
+
The input dataset with its land_mask variable updated by merging all specified masks.
|
|
277
|
+
Note: The dataset is modified in place AND returned for convenience.
|
|
278
|
+
|
|
279
|
+
Raises
|
|
280
|
+
------
|
|
281
|
+
ValueError
|
|
282
|
+
If 'land_mask' variable is not present in the dataset
|
|
283
|
+
"""
|
|
284
|
+
# Validate that land_mask exists in the dataset
|
|
285
|
+
if "land_mask" not in xr_dataset:
|
|
286
|
+
raise ValueError(
|
|
287
|
+
"Dataset must contain a 'land_mask' variable. "
|
|
288
|
+
f"Available variables: {list(xr_dataset.data_vars.keys())}")
|
|
289
|
+
|
|
290
|
+
if not land_mask_names:
|
|
291
|
+
logging.debug("No additional land masks to merge")
|
|
292
|
+
return xr_dataset
|
|
293
|
+
|
|
294
|
+
logging.info("Merging %d land masks: %s", len(
|
|
295
|
+
land_mask_names), land_mask_names)
|
|
296
|
+
|
|
297
|
+
# Start with the default land_mask from xsar
|
|
298
|
+
merged_mask = xr_dataset["land_mask"].values.astype("uint8")
|
|
299
|
+
|
|
300
|
+
# Merge all configured land masks
|
|
301
|
+
for mask_name in land_mask_names:
|
|
302
|
+
# xsar adds XSAR_MASK_SUFFIX to mask names in the dataset
|
|
303
|
+
dataset_mask_name = f"{mask_name}{XSAR_MASK_SUFFIX}"
|
|
304
|
+
|
|
305
|
+
if dataset_mask_name in xr_dataset:
|
|
306
|
+
logging.info("Merging mask '%s' into land_mask", dataset_mask_name)
|
|
307
|
+
mask_values = xr_dataset[dataset_mask_name].values.astype("uint8")
|
|
308
|
+
# Logical OR: any pixel marked as land (1) in any mask becomes land
|
|
309
|
+
merged_mask = np.maximum(merged_mask, mask_values)
|
|
310
|
+
else:
|
|
311
|
+
logging.warning(
|
|
312
|
+
"Mask '%s' not found in dataset, skipping", dataset_mask_name)
|
|
313
|
+
|
|
314
|
+
# Update the main land_mask
|
|
315
|
+
xr_dataset.land_mask.values = merged_mask
|
|
316
|
+
logging.info("Land masks merged")
|
|
317
|
+
|
|
318
|
+
return xr_dataset
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def processLandMask(xr_dataset, dilation_iterations=3, merged_masks=None):
|
|
322
|
+
"""
|
|
323
|
+
Process land mask to create a 3-level mask system with coastal zone detection.
|
|
324
|
+
|
|
325
|
+
This function:
|
|
326
|
+
1. Takes the original land_mask (merged from all configured sources)
|
|
327
|
+
2. Applies binary dilation to detect coastal zones
|
|
328
|
+
3. Creates a 3-level land_mask:
|
|
329
|
+
- 0 = ocean (water far from coast)
|
|
330
|
+
- 1 = coastal (zone between original mask and dilated mask)
|
|
331
|
+
- 2 = land (original land mask)
|
|
332
|
+
|
|
333
|
+
Parameters
|
|
334
|
+
----------
|
|
335
|
+
xr_dataset : xarray.Dataset
|
|
336
|
+
Dataset containing the land_mask variable
|
|
337
|
+
dilation_iterations : int, optional
|
|
338
|
+
Number of dilation iterations to define coastal zone width (default: 3)
|
|
339
|
+
merged_masks : list of str, optional
|
|
340
|
+
Names of masks that were merged into land_mask (for history tracking)
|
|
341
|
+
|
|
342
|
+
Returns
|
|
343
|
+
-------
|
|
344
|
+
None
|
|
345
|
+
Modifies xr_dataset.land_mask in place
|
|
346
|
+
"""
|
|
347
|
+
logging.info("Processing land mask and adding a coastal zone")
|
|
348
|
+
|
|
349
|
+
# Store original land mask (2 = land)
|
|
350
|
+
original_land_mask = xr_dataset["land_mask"].values.astype("uint8")
|
|
351
|
+
|
|
352
|
+
# Apply dilation to create coastal zone
|
|
353
|
+
dilated_mask = binary_dilation(
|
|
354
|
+
original_land_mask,
|
|
355
|
+
structure=np.ones((3, 3), np.uint8),
|
|
356
|
+
iterations=dilation_iterations,
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
# Create 3-level mask
|
|
360
|
+
# Start with all zeros (ocean)
|
|
361
|
+
three_level_mask = np.zeros_like(original_land_mask, dtype="uint8")
|
|
362
|
+
|
|
363
|
+
# Mark land areas (2)
|
|
364
|
+
three_level_mask[original_land_mask == 1] = 2
|
|
365
|
+
|
|
366
|
+
# Mark coastal areas (1) - dilated area minus original land
|
|
367
|
+
coastal_zone = (dilated_mask == 1) & (original_land_mask == 0)
|
|
368
|
+
three_level_mask[coastal_zone] = 1
|
|
369
|
+
|
|
370
|
+
# Update the land_mask with 3-level system
|
|
371
|
+
xr_dataset.land_mask.values = three_level_mask
|
|
372
|
+
|
|
373
|
+
# Update attributes
|
|
374
|
+
xr_dataset.land_mask.attrs["long_name"] = "Land mask with coastal zone"
|
|
375
|
+
xr_dataset.land_mask.attrs["valid_range"] = np.array([0, 2])
|
|
376
|
+
xr_dataset.land_mask.attrs["flag_values"] = np.array([0, 1, 2])
|
|
377
|
+
xr_dataset.land_mask.attrs["flag_meanings"] = "ocean coastal land"
|
|
378
|
+
xr_dataset.land_mask.attrs["meaning"] = "0: ocean, 1: coastal, 2: land"
|
|
379
|
+
|
|
380
|
+
# Append to history instead of replacing
|
|
381
|
+
existing_history = xr_dataset.land_mask.attrs.get("history", "")
|
|
382
|
+
|
|
383
|
+
# Build history message
|
|
384
|
+
if merged_masks:
|
|
385
|
+
merge_info = f"merged with {', '.join(merged_masks)}"
|
|
386
|
+
else:
|
|
387
|
+
merge_info = ""
|
|
388
|
+
|
|
389
|
+
new_history = f"{merge_info}3-level land mask with coastal zone detection via binary dilation"
|
|
278
390
|
|
|
391
|
+
if existing_history:
|
|
392
|
+
xr_dataset.land_mask.attrs["history"] = existing_history + \
|
|
393
|
+
"; " + new_history
|
|
279
394
|
else:
|
|
395
|
+
xr_dataset.land_mask.attrs["history"] = new_history
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
def getAncillary(meta, ancillary_name="ecmwf"):
|
|
399
|
+
"""
|
|
400
|
+
Map ancillary wind from "ecmwf" or "era5" or other sources.
|
|
401
|
+
This function is used to check if the model files are available and to map the model to the SAR data.
|
|
402
|
+
This function will use with priority the first model of the config file.
|
|
403
|
+
|
|
404
|
+
Parameters
|
|
405
|
+
----------
|
|
406
|
+
meta: obj `xsar.BaseMeta` (one of the supported SAR mission)
|
|
407
|
+
ancillary_name: str
|
|
408
|
+
Name of the ancillary source (ecmwf or era5)
|
|
409
|
+
|
|
410
|
+
Returns
|
|
411
|
+
-------
|
|
412
|
+
tuple
|
|
413
|
+
(map_model, metadata) where:
|
|
414
|
+
- map_model (dict): mapping of model variables to SAR data
|
|
415
|
+
- metadata (dict): ancillary metadata with 'source' and 'source_path' keys
|
|
416
|
+
"""
|
|
417
|
+
logging.debug("conf: %s", getConf())
|
|
418
|
+
conf = getConf()
|
|
419
|
+
if 'ancillary_sources' not in conf:
|
|
420
|
+
raise ValueError("Configuration must contain 'ancillary_sources'")
|
|
421
|
+
|
|
422
|
+
if ancillary_name not in conf['ancillary_sources']:
|
|
280
423
|
raise ValueError(
|
|
281
|
-
"
|
|
424
|
+
f"Configuration 'ancillary_sources' must contain '{ancillary_name}'")
|
|
282
425
|
|
|
426
|
+
if ancillary_name not in ["ecmwf", "era5"]:
|
|
427
|
+
logging.warning("We advice to use either ecmwf or era5.")
|
|
283
428
|
|
|
284
|
-
|
|
429
|
+
ancillary_sources = conf['ancillary_sources'][ancillary_name]
|
|
430
|
+
if not ancillary_sources:
|
|
431
|
+
raise ValueError(
|
|
432
|
+
f"At least one ancillary model {ancillary_name} must be configured in ancillary_sources")
|
|
433
|
+
|
|
434
|
+
map_model = None
|
|
435
|
+
selected_name = None
|
|
436
|
+
selected_path = None
|
|
437
|
+
tried_names = []
|
|
438
|
+
|
|
439
|
+
# Loop through models in config order to find the first one that exists
|
|
440
|
+
for source in ancillary_sources:
|
|
441
|
+
model_name = source['name']
|
|
442
|
+
model_path = source['path']
|
|
443
|
+
logging.debug("%s : %s", model_name, model_path)
|
|
444
|
+
|
|
445
|
+
# Set raster to check if file exists
|
|
446
|
+
meta.set_raster(model_name, model_path)
|
|
447
|
+
tried_names.append(model_name)
|
|
448
|
+
|
|
449
|
+
model_info = meta.rasters.loc[model_name]
|
|
450
|
+
|
|
451
|
+
model_file = model_info["get_function"](
|
|
452
|
+
model_info["resource"],
|
|
453
|
+
date=datetime.datetime.strptime(
|
|
454
|
+
meta.start_date, "%Y-%m-%d %H:%M:%S.%f"
|
|
455
|
+
),
|
|
456
|
+
)[1]
|
|
457
|
+
|
|
458
|
+
if os.path.isfile(model_file):
|
|
459
|
+
# File exists! This is our selection
|
|
460
|
+
selected_name = model_name
|
|
461
|
+
selected_path = model_file
|
|
462
|
+
map_model = {
|
|
463
|
+
"%s_%s" % (selected_name, uv): "model_%s" % uv for uv in ["U10", "V10"]
|
|
464
|
+
}
|
|
465
|
+
# Log selection
|
|
466
|
+
if len(ancillary_sources) > 1:
|
|
467
|
+
logging.info(
|
|
468
|
+
f"Multiple {ancillary_name} models configured. Using {selected_name} (priority order)")
|
|
469
|
+
else:
|
|
470
|
+
logging.info(
|
|
471
|
+
f"Only one {ancillary_name} model configured: using {selected_name}")
|
|
472
|
+
break
|
|
473
|
+
|
|
474
|
+
# Clean up: remove all tried models EXCEPT the selected one
|
|
475
|
+
if selected_name is not None:
|
|
476
|
+
for name in tried_names:
|
|
477
|
+
if name != selected_name:
|
|
478
|
+
meta.rasters = meta.rasters.drop([name])
|
|
479
|
+
|
|
480
|
+
# Prepare metadata for traceability
|
|
481
|
+
ancillary_metadata = None
|
|
482
|
+
if selected_name is not None:
|
|
483
|
+
ancillary_metadata = {
|
|
484
|
+
'ancillary_source_model': selected_name,
|
|
485
|
+
'ancillary_source_path': selected_path
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
return map_model, ancillary_metadata
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
@timing(logger=root_logger.debug)
|
|
285
492
|
def inverse_dsig_wspd(
|
|
286
493
|
dual_pol,
|
|
287
494
|
inc,
|
|
@@ -311,7 +518,7 @@ def inverse_dsig_wspd(
|
|
|
311
518
|
ancillary wind
|
|
312
519
|
| (for example ecmwf winds), in **ANTENNA convention**,
|
|
313
520
|
nesz_cr: xarray.DataArray
|
|
314
|
-
noise equivalent sigma0 |
|
|
521
|
+
noise equivalent sigma0 | flattened or not
|
|
315
522
|
dsig_cr_name: str
|
|
316
523
|
dsig_cr name
|
|
317
524
|
model_co: str
|
|
@@ -321,11 +528,11 @@ def inverse_dsig_wspd(
|
|
|
321
528
|
|
|
322
529
|
Returns
|
|
323
530
|
-------
|
|
324
|
-
xarray.DataArray
|
|
531
|
+
xarray.DataArray
|
|
325
532
|
inverted wind in copol in ** antenna convention** .
|
|
326
|
-
xarray.DataArray
|
|
533
|
+
xarray.DataArray
|
|
327
534
|
inverted wind in dualpol in ** antenna convention** .
|
|
328
|
-
xarray.DataArray
|
|
535
|
+
xarray.DataArray
|
|
329
536
|
inverted wind in crosspol in ** antenna convention** .
|
|
330
537
|
xarray.DataArray | array
|
|
331
538
|
alpha (ponderation between co and crosspol)
|
|
@@ -368,7 +575,7 @@ def inverse_dsig_wspd(
|
|
|
368
575
|
return wind_co, None, None, None
|
|
369
576
|
|
|
370
577
|
|
|
371
|
-
@timing(logger=
|
|
578
|
+
@timing(logger=root_logger.debug)
|
|
372
579
|
def inverse(
|
|
373
580
|
dual_pol,
|
|
374
581
|
inc,
|
|
@@ -407,11 +614,11 @@ def inverse(
|
|
|
407
614
|
|
|
408
615
|
Returns
|
|
409
616
|
-------
|
|
410
|
-
xarray.DataArray
|
|
617
|
+
xarray.DataArray
|
|
411
618
|
inverted wind in copol in ** antenna convention** .
|
|
412
|
-
xarray.DataArray
|
|
619
|
+
xarray.DataArray
|
|
413
620
|
inverted wind in dualpol in ** antenna convention** .
|
|
414
|
-
xarray.DataArray
|
|
621
|
+
xarray.DataArray
|
|
415
622
|
inverted wind in crosspol in ** antenna convention** .
|
|
416
623
|
|
|
417
624
|
See Also
|
|
@@ -463,7 +670,7 @@ def inverse(
|
|
|
463
670
|
return wind_co, None, None
|
|
464
671
|
|
|
465
672
|
|
|
466
|
-
@timing(logger=
|
|
673
|
+
@timing(logger=root_logger.debug)
|
|
467
674
|
def makeL2asOwi(xr_dataset, config):
|
|
468
675
|
"""
|
|
469
676
|
Rename xr_dataset variables and attributes to match naming convention.
|
|
@@ -723,7 +930,9 @@ def makeL2asOwi(xr_dataset, config):
|
|
|
723
930
|
"sigma0_raw",
|
|
724
931
|
"ancillary_wind",
|
|
725
932
|
"nesz",
|
|
726
|
-
"
|
|
933
|
+
"model_U10",
|
|
934
|
+
"model_V10"
|
|
935
|
+
|
|
727
936
|
]
|
|
728
937
|
)
|
|
729
938
|
if "sigma0_raw__corrected" in xr_dataset:
|
|
@@ -805,6 +1014,9 @@ def preprocess(
|
|
|
805
1014
|
recalibration = config["recalibration"]
|
|
806
1015
|
meta = fct_meta(filename)
|
|
807
1016
|
|
|
1017
|
+
# Add masks to meta if configured (land, ice, lakes, etc.)
|
|
1018
|
+
masks_by_category = addMasks_toMeta(meta)
|
|
1019
|
+
|
|
808
1020
|
# si une des deux n'est pas VV VH HH HV on ne fait rien
|
|
809
1021
|
if not all([pol in ["VV", "VH", "HH", "HV"] for pol in meta.pols.split(" ")]):
|
|
810
1022
|
raise ValueError(f"Polarisation non gérée : meta.pols = {meta.pols}")
|
|
@@ -821,7 +1033,7 @@ def preprocess(
|
|
|
821
1033
|
winddir_convention = config_base["winddir_convention"]
|
|
822
1034
|
else:
|
|
823
1035
|
winddir_convention = "meteorological"
|
|
824
|
-
logging.
|
|
1036
|
+
logging.info(
|
|
825
1037
|
f'Using meteorological convention because "winddir_convention" was not found in config.'
|
|
826
1038
|
)
|
|
827
1039
|
config["winddir_convention"] = winddir_convention
|
|
@@ -830,17 +1042,17 @@ def preprocess(
|
|
|
830
1042
|
add_gradientsfeatures = config_base["add_gradientsfeatures"]
|
|
831
1043
|
else:
|
|
832
1044
|
add_gradientsfeatures = False
|
|
833
|
-
logging.
|
|
1045
|
+
logging.info(f"Not computing gradients by default")
|
|
834
1046
|
config["add_gradientsfeatures"] = add_gradientsfeatures
|
|
835
1047
|
|
|
836
1048
|
if "add_nrcs_model" in config_base:
|
|
837
1049
|
add_nrcs_model = config_base["add_nrcs_model"]
|
|
838
1050
|
add_nrcs_model = False
|
|
839
|
-
logging.
|
|
1051
|
+
logging.info(
|
|
840
1052
|
f"Force add_nrcs_model to be false, before fixing an issue")
|
|
841
1053
|
else:
|
|
842
1054
|
add_nrcs_model = False
|
|
843
|
-
logging.
|
|
1055
|
+
logging.info(f"Not computing nrcs from model by default")
|
|
844
1056
|
config["add_nrcs_model"] = add_nrcs_model
|
|
845
1057
|
|
|
846
1058
|
# creating a dictionnary of parameters
|
|
@@ -874,11 +1086,14 @@ def preprocess(
|
|
|
874
1086
|
raise FileExistsError("outfile %s already exists" % out_file)
|
|
875
1087
|
|
|
876
1088
|
ancillary_name = config["ancillary"]
|
|
877
|
-
map_model = getAncillary(meta, ancillary_name)
|
|
1089
|
+
map_model, ancillary_metadata = getAncillary(meta, ancillary_name)
|
|
878
1090
|
if map_model is None:
|
|
879
1091
|
raise Exception(
|
|
880
1092
|
f"the weather model is not set `map_model` is None -> you probably don't have access to {ancillary_name} archive"
|
|
881
1093
|
)
|
|
1094
|
+
if ancillary_metadata is None:
|
|
1095
|
+
raise Exception(
|
|
1096
|
+
f"ancillary_metadata must be defined. There is an error in getAncillary function")
|
|
882
1097
|
|
|
883
1098
|
try:
|
|
884
1099
|
logging.info(f"recalibration = {recalibration}")
|
|
@@ -928,7 +1143,6 @@ def preprocess(
|
|
|
928
1143
|
config["fct_dataset"] = fct_dataset
|
|
929
1144
|
config["map_model"] = map_model
|
|
930
1145
|
|
|
931
|
-
# load
|
|
932
1146
|
xr_dataset = xr_dataset.load()
|
|
933
1147
|
|
|
934
1148
|
# defining dual_pol, and gmfs by channel
|
|
@@ -944,17 +1158,27 @@ def preprocess(
|
|
|
944
1158
|
crosspol_gmf = "VH"
|
|
945
1159
|
else:
|
|
946
1160
|
logging.warning(
|
|
947
|
-
"for now this processor does not support entirely HH+HV acquisitions\n "
|
|
1161
|
+
"inversion_rules warning : for now this processor does not support entirely HH+HV acquisitions\n "
|
|
948
1162
|
"it wont crash but it will use HH+VH GMF for wind inversion -> wrong hypothesis\n "
|
|
949
1163
|
"!! dual WIND SPEED IS NOT USABLE !! But co WIND SPEED IS USABLE !!"
|
|
950
1164
|
)
|
|
1165
|
+
config["return_status"] = 99
|
|
1166
|
+
|
|
951
1167
|
copol = "HH"
|
|
952
1168
|
crosspol = "HV"
|
|
953
1169
|
copol_gmf = "HH"
|
|
954
1170
|
crosspol_gmf = "VH"
|
|
955
1171
|
|
|
956
|
-
|
|
957
|
-
|
|
1172
|
+
if (sensor == "S1A" or sensor == "S1B" or sensor == "S1C" or sensor == "S1D") and xsar_dataset.dataset.attrs["aux_cal"] is None:
|
|
1173
|
+
raise ValueError(
|
|
1174
|
+
"aux_cal attribute is None, xsar_dataset.dataset.attrs['aux_cal'] must be set to a valid value"
|
|
1175
|
+
)
|
|
1176
|
+
cond_aux_cal = (
|
|
1177
|
+
(sensor == "S1A" or sensor == "S1B" or sensor == "S1C" or sensor == "S1D")
|
|
1178
|
+
and xsar_dataset.dataset.attrs["aux_cal"] is not None
|
|
1179
|
+
and xsar_dataset.dataset.attrs["aux_cal"].split("_")[-1][1:9] > "20190731"
|
|
1180
|
+
)
|
|
1181
|
+
|
|
958
1182
|
if cond_aux_cal and xr_dataset.attrs["swath"] == "EW" and "S1_EW_calG>20190731" in config.keys():
|
|
959
1183
|
model_co = config["S1_EW_calG>20190731"]["GMF_" + copol_gmf + "_NAME"]
|
|
960
1184
|
model_cross = config["S1_EW_calG>20190731"]["GMF_" +
|
|
@@ -984,7 +1208,6 @@ def preprocess(
|
|
|
984
1208
|
config["dsig_cr_step"] = dsig_cr_step
|
|
985
1209
|
config["dsig_cr_name"] = dsig_cr_name
|
|
986
1210
|
config["apply_flattening"] = apply_flattening
|
|
987
|
-
|
|
988
1211
|
# need to load LUTs before inversion
|
|
989
1212
|
nc_luts = [x for x in [model_co, model_cross] if x.startswith("nc_lut")]
|
|
990
1213
|
|
|
@@ -1045,33 +1268,41 @@ def preprocess(
|
|
|
1045
1268
|
xr_dataset.elevation.attrs["standard_name"] = "elevation"
|
|
1046
1269
|
|
|
1047
1270
|
# offboresight
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
xr_dataset.offboresight.attrs["long_name"] = (
|
|
1052
|
-
"Offboresight angle at wind cell center"
|
|
1053
|
-
)
|
|
1054
|
-
xr_dataset.elevation.attrs["standard_name"] = "offboresight"
|
|
1055
|
-
|
|
1056
|
-
# masks (no ice / no_valid)
|
|
1057
|
-
xr_dataset.land_mask.values = binary_dilation(
|
|
1058
|
-
xr_dataset["land_mask"].values.astype("uint8"),
|
|
1059
|
-
structure=np.ones((3, 3), np.uint8),
|
|
1060
|
-
iterations=3,
|
|
1271
|
+
xr_dataset.offboresight.attrs["units"] = "degrees"
|
|
1272
|
+
xr_dataset.offboresight.attrs["long_name"] = (
|
|
1273
|
+
"Offboresight angle at wind cell center"
|
|
1061
1274
|
)
|
|
1062
|
-
xr_dataset.
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1275
|
+
xr_dataset.offboresight.attrs["standard_name"] = "offboresight"
|
|
1276
|
+
|
|
1277
|
+
# merge land masks
|
|
1278
|
+
conf = getConf()
|
|
1279
|
+
land_mask_strategy = conf.get("LAND_MASK_STRATEGY", "merge")
|
|
1280
|
+
logging.info(f"land_mask_strategy = {land_mask_strategy}")
|
|
1281
|
+
|
|
1282
|
+
# Store masks_by_category in config for later cleanup
|
|
1283
|
+
config["masks_by_category"] = masks_by_category
|
|
1284
|
+
|
|
1285
|
+
merged_land_masks = None
|
|
1286
|
+
if land_mask_strategy == "merge" and "land" in masks_by_category:
|
|
1287
|
+
mergeLandMasks(xr_dataset, masks_by_category["land"])
|
|
1288
|
+
merged_land_masks = masks_by_category["land"]
|
|
1289
|
+
|
|
1290
|
+
# Process land mask with coastal zone detection (3-level system)
|
|
1291
|
+
# 0 = ocean, 1 = coastal, 2 = land
|
|
1292
|
+
processLandMask(xr_dataset, dilation_iterations=3,
|
|
1293
|
+
merged_masks=merged_land_masks)
|
|
1066
1294
|
|
|
1067
1295
|
logging.debug("mask is a copy of land_mask")
|
|
1068
1296
|
|
|
1297
|
+
# Create main mask from land_mask
|
|
1298
|
+
# For now, mask uses the same values as land_mask
|
|
1299
|
+
# Can be extended later to include ice (value 3) and other categories
|
|
1069
1300
|
xr_dataset["mask"] = xr.DataArray(xr_dataset.land_mask)
|
|
1070
1301
|
xr_dataset.mask.attrs = {}
|
|
1071
1302
|
xr_dataset.mask.attrs["long_name"] = "Mask of data"
|
|
1072
1303
|
xr_dataset.mask.attrs["valid_range"] = np.array([0, 3])
|
|
1073
1304
|
xr_dataset.mask.attrs["flag_values"] = np.array([0, 1, 2, 3])
|
|
1074
|
-
xr_dataset.mask.attrs["flag_meanings"] = "
|
|
1305
|
+
xr_dataset.mask.attrs["flag_meanings"] = "ocean coastal land ice"
|
|
1075
1306
|
|
|
1076
1307
|
# ancillary
|
|
1077
1308
|
xr_dataset["ancillary_wind_direction"] = (
|
|
@@ -1079,8 +1310,9 @@ def preprocess(
|
|
|
1079
1310
|
xr_dataset.model_U10)) + 180
|
|
1080
1311
|
) % 360
|
|
1081
1312
|
|
|
1313
|
+
# Keep ocean (0) and coastal (1) zones for ancillary wind
|
|
1082
1314
|
xr_dataset["ancillary_wind_direction"] = xr.where(
|
|
1083
|
-
xr_dataset["mask"], np.nan, xr_dataset["ancillary_wind_direction"]
|
|
1315
|
+
xr_dataset["mask"] >= 2, np.nan, xr_dataset["ancillary_wind_direction"]
|
|
1084
1316
|
).transpose(*xr_dataset["ancillary_wind_direction"].dims)
|
|
1085
1317
|
xr_dataset["ancillary_wind_direction"].attrs = {}
|
|
1086
1318
|
xr_dataset["ancillary_wind_direction"].attrs["units"] = "degrees_north"
|
|
@@ -1093,7 +1325,7 @@ def preprocess(
|
|
|
1093
1325
|
xr_dataset["model_U10"] ** 2 + xr_dataset["model_V10"] ** 2
|
|
1094
1326
|
)
|
|
1095
1327
|
xr_dataset["ancillary_wind_speed"] = xr.where(
|
|
1096
|
-
xr_dataset["mask"], np.nan, xr_dataset["ancillary_wind_speed"]
|
|
1328
|
+
xr_dataset["mask"] >= 2, np.nan, xr_dataset["ancillary_wind_speed"]
|
|
1097
1329
|
).transpose(*xr_dataset["ancillary_wind_speed"].dims)
|
|
1098
1330
|
xr_dataset["ancillary_wind_speed"].attrs = {}
|
|
1099
1331
|
xr_dataset["ancillary_wind_speed"].attrs["units"] = "m s^-1"
|
|
@@ -1103,7 +1335,7 @@ def preprocess(
|
|
|
1103
1335
|
xr_dataset["ancillary_wind_speed"].attrs["standart_name"] = "wind_speed"
|
|
1104
1336
|
|
|
1105
1337
|
xr_dataset["ancillary_wind"] = xr.where(
|
|
1106
|
-
xr_dataset["mask"],
|
|
1338
|
+
xr_dataset["mask"] >= 2,
|
|
1107
1339
|
np.nan,
|
|
1108
1340
|
(
|
|
1109
1341
|
xr_dataset.ancillary_wind_speed
|
|
@@ -1115,15 +1347,24 @@ def preprocess(
|
|
|
1115
1347
|
)
|
|
1116
1348
|
),
|
|
1117
1349
|
).transpose(*xr_dataset["ancillary_wind_speed"].dims)
|
|
1350
|
+
xr_dataset["ancillary_wind"].attrs = {}
|
|
1351
|
+
xr_dataset["ancillary_wind"].attrs["long_name"] = f"{ancillary_name} wind in complex form for inversion"
|
|
1352
|
+
xr_dataset["ancillary_wind"].attrs[
|
|
1353
|
+
"description"] = "Complex wind (speed * exp(i*direction)) in antenna convention for GMF inversion"
|
|
1118
1354
|
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
)
|
|
1122
|
-
|
|
1355
|
+
# Add ancillary metadata to model variables
|
|
1356
|
+
|
|
1357
|
+
for attr_key, attr_value in ancillary_metadata.items():
|
|
1358
|
+
for var_name in ['model_U10', 'model_V10', 'ancillary_wind_speed', 'ancillary_wind_direction', 'ancillary_wind']:
|
|
1359
|
+
if var_name in xr_dataset:
|
|
1360
|
+
xr_dataset[var_name].attrs[attr_key] = attr_value
|
|
1361
|
+
|
|
1362
|
+
xr_dataset.attrs[attr_key] = attr_value
|
|
1123
1363
|
|
|
1124
1364
|
# nrcs processing
|
|
1365
|
+
# Keep ocean (0) and coastal (1) zones, mask out land (2) and ice (3)
|
|
1125
1366
|
xr_dataset["sigma0_ocean"] = xr.where(
|
|
1126
|
-
xr_dataset["mask"], np.nan, xr_dataset["sigma0"]
|
|
1367
|
+
xr_dataset["mask"] >= 2, np.nan, xr_dataset["sigma0"]
|
|
1127
1368
|
).transpose(*xr_dataset["sigma0"].dims)
|
|
1128
1369
|
xr_dataset["sigma0_ocean"].attrs = xr_dataset["sigma0"].attrs
|
|
1129
1370
|
# we forced it to 1e-15
|
|
@@ -1131,19 +1372,21 @@ def preprocess(
|
|
|
1131
1372
|
"comment"
|
|
1132
1373
|
] = "clipped, no values <=0 ; 1e-15 instread"
|
|
1133
1374
|
|
|
1134
|
-
|
|
1375
|
+
xr_dataset["sigma0_ocean"] = xr.where(
|
|
1376
|
+
xr_dataset["sigma0_ocean"] <= 0, 1e-15, xr_dataset["sigma0_ocean"]
|
|
1377
|
+
)
|
|
1378
|
+
|
|
1379
|
+
# add a mask for values <=0:
|
|
1135
1380
|
xr_dataset["sigma0_mask"] = xr.where(
|
|
1136
1381
|
xr_dataset["sigma0_ocean"] <= 0, 1, 0
|
|
1137
1382
|
).transpose(*xr_dataset["sigma0"].dims)
|
|
1138
1383
|
xr_dataset.sigma0_mask.attrs["valid_range"] = np.array([0, 1])
|
|
1139
1384
|
xr_dataset.sigma0_mask.attrs["flag_values"] = np.array([0, 1])
|
|
1140
1385
|
xr_dataset.sigma0_mask.attrs["flag_meanings"] = "valid no_valid"
|
|
1141
|
-
xr_dataset["sigma0_ocean"] = xr.where(
|
|
1142
|
-
xr_dataset["sigma0_ocean"] <= 0, 1e-15, xr_dataset["sigma0_ocean"]
|
|
1143
|
-
)
|
|
1144
1386
|
|
|
1387
|
+
# Keep ocean (0) and coastal (1) zones for sigma0_ocean_raw too
|
|
1145
1388
|
xr_dataset["sigma0_ocean_raw"] = xr.where(
|
|
1146
|
-
xr_dataset["mask"], np.nan, xr_dataset["sigma0_raw"]
|
|
1389
|
+
xr_dataset["mask"] >= 2, np.nan, xr_dataset["sigma0_raw"]
|
|
1147
1390
|
).transpose(*xr_dataset["sigma0_raw"].dims)
|
|
1148
1391
|
|
|
1149
1392
|
xr_dataset["sigma0_ocean_raw"].attrs = xr_dataset["sigma0_raw"].attrs
|
|
@@ -1154,12 +1397,26 @@ def preprocess(
|
|
|
1154
1397
|
|
|
1155
1398
|
# processing
|
|
1156
1399
|
if dual_pol:
|
|
1157
|
-
|
|
1158
1400
|
xr_dataset['sigma0_detrend_cross'] = xsarsea.sigma0_detrend(
|
|
1159
1401
|
xr_dataset.sigma0.sel(pol=crosspol), xr_dataset.incidence, model=model_cross)
|
|
1160
1402
|
|
|
1161
|
-
|
|
1162
|
-
|
|
1403
|
+
try:
|
|
1404
|
+
xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
|
|
1405
|
+
['line', 'sample'], windspeed.nesz_flattening(xr_dataset.nesz.sel(pol=crosspol), xr_dataset.incidence).data))
|
|
1406
|
+
except Exception as e:
|
|
1407
|
+
if apply_flattening:
|
|
1408
|
+
# error
|
|
1409
|
+
logging.error("Error during NESZ flattening computation")
|
|
1410
|
+
logging.info("%s", traceback.format_exc())
|
|
1411
|
+
raise e
|
|
1412
|
+
else:
|
|
1413
|
+
# replace with nans
|
|
1414
|
+
logging.warning("nesz_flattening warning => Error during NESZ flattening computation, but apply_flattening is False, \
|
|
1415
|
+
so continuing without nesz_cross_flattened and replace with NaNs\n \
|
|
1416
|
+
The error comes probably from NaN in incidence angle")
|
|
1417
|
+
config["return_status"] = 99
|
|
1418
|
+
xr_dataset = xr_dataset.assign(nesz_cross_flattened=(
|
|
1419
|
+
['line', 'sample'], np.full(xr_dataset.nesz.sel(pol=crosspol).shape, np.nan)))
|
|
1163
1420
|
|
|
1164
1421
|
xr_dataset['nesz_cross_flattened'].attrs[
|
|
1165
1422
|
"comment"] = 'nesz has been flattened using windspeed.nesz_flattening'
|
|
@@ -1216,7 +1473,7 @@ def preprocess(
|
|
|
1216
1473
|
|
|
1217
1474
|
for idx, gmf_name in enumerate(gmf_names):
|
|
1218
1475
|
|
|
1219
|
-
@timing(logger=
|
|
1476
|
+
@timing(logger=root_logger.info)
|
|
1220
1477
|
def apply_lut_to_dataset():
|
|
1221
1478
|
lut = xsarsea.windspeed.get_model(
|
|
1222
1479
|
gmf_name).to_lut(unit="linear")
|
|
@@ -1307,13 +1564,13 @@ def process_gradients(xr_dataset, config):
|
|
|
1307
1564
|
|
|
1308
1565
|
xr_dataset_100["sigma0_detrend"] = sigma0_detrend_combined
|
|
1309
1566
|
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
)
|
|
1567
|
+
# Process land mask with coastal zone detection (3-level system)
|
|
1568
|
+
processLandMask(xr_dataset_100, dilation_iterations=3)
|
|
1569
|
+
|
|
1570
|
+
# Mask sigma0_detrend where land_mask >= 2 (land and ice)
|
|
1571
|
+
# Keep ocean (0) and coastal (1) zones
|
|
1315
1572
|
xr_dataset_100["sigma0_detrend"] = xr.where(
|
|
1316
|
-
xr_dataset_100["land_mask"], np.nan, xr_dataset_100["sigma0"]
|
|
1573
|
+
xr_dataset_100["land_mask"] >= 2, np.nan, xr_dataset_100["sigma0"]
|
|
1317
1574
|
).transpose(*xr_dataset_100["sigma0"].dims)
|
|
1318
1575
|
|
|
1319
1576
|
xr_dataset_100["ancillary_wind"] = (
|
|
@@ -1349,15 +1606,15 @@ def process_gradients(xr_dataset, config):
|
|
|
1349
1606
|
}
|
|
1350
1607
|
)
|
|
1351
1608
|
else:
|
|
1352
|
-
|
|
1353
|
-
"'longitude' not found in streaks_indiv : there is probably an error"
|
|
1609
|
+
root_logger.warning(
|
|
1610
|
+
"process_gradients warning : 'longitude' not found in streaks_indiv : there is probably an error"
|
|
1354
1611
|
)
|
|
1355
1612
|
xr_dataset_streaks = None
|
|
1356
1613
|
|
|
1357
1614
|
return xr_dataset, xr_dataset_streaks
|
|
1358
1615
|
|
|
1359
1616
|
|
|
1360
|
-
@timing(logger=
|
|
1617
|
+
@timing(logger=root_logger.info)
|
|
1361
1618
|
def makeL2(
|
|
1362
1619
|
filename, outdir, config_path, overwrite=False, generateCSV=True, resolution="1000m"
|
|
1363
1620
|
):
|
|
@@ -1392,6 +1649,19 @@ def makeL2(
|
|
|
1392
1649
|
filename, outdir, config_path, overwrite, resolution
|
|
1393
1650
|
)
|
|
1394
1651
|
|
|
1652
|
+
# Drop only masks added from config (not internal masks like sigma0_mask, owiMask_Nrcs)
|
|
1653
|
+
masks_by_category = config.get("masks_by_category", {})
|
|
1654
|
+
masks_to_drop = []
|
|
1655
|
+
for category, mask_list in masks_by_category.items():
|
|
1656
|
+
masks_to_drop.extend(mask_list)
|
|
1657
|
+
|
|
1658
|
+
# Only drop masks that actually exist in the dataset (with XSAR suffix)
|
|
1659
|
+
vars_to_drop = [
|
|
1660
|
+
m+XSAR_MASK_SUFFIX for m in masks_to_drop if (m+XSAR_MASK_SUFFIX) in xr_dataset.data_vars]
|
|
1661
|
+
if vars_to_drop:
|
|
1662
|
+
logging.info(f"Dropping external masks of dataset: {vars_to_drop}")
|
|
1663
|
+
xr_dataset = xr_dataset.drop_vars(vars_to_drop)
|
|
1664
|
+
|
|
1395
1665
|
if config["add_gradientsfeatures"]:
|
|
1396
1666
|
xr_dataset, xr_dataset_streaks = process_gradients(xr_dataset, config)
|
|
1397
1667
|
else:
|
|
@@ -1429,11 +1699,15 @@ def makeL2(
|
|
|
1429
1699
|
"resolution": config.pop("resolution", None),
|
|
1430
1700
|
}
|
|
1431
1701
|
|
|
1702
|
+
config["return_status"] = 0 # default value SUCCESS
|
|
1432
1703
|
logging.info("Checking incidence range within LUTS incidence range")
|
|
1433
|
-
# warning if incidence is out of lut incidence range
|
|
1434
1704
|
inc_check_co, inc_check_cross = check_incidence_range(
|
|
1435
1705
|
xr_dataset["incidence"], [model_co, model_cross], **kwargs
|
|
1436
1706
|
)
|
|
1707
|
+
|
|
1708
|
+
if not inc_check_co or not inc_check_cross:
|
|
1709
|
+
config["return_status"] = 99
|
|
1710
|
+
|
|
1437
1711
|
if dsig_cr_step == "nrcs":
|
|
1438
1712
|
logging.info(
|
|
1439
1713
|
"dsig_cr_step is nrcs : polarization are mixed at cost function step")
|
|
@@ -1603,13 +1877,15 @@ def makeL2(
|
|
|
1603
1877
|
"wnf_3km_average": "False",
|
|
1604
1878
|
"owiWindSpeedSrc": "owiWindSpeed",
|
|
1605
1879
|
"owiWindDirectionSrc": "/",
|
|
1606
|
-
"
|
|
1880
|
+
"ancillary_source_model": xr_dataset.attrs["ancillary_source_model"],
|
|
1881
|
+
"ancillary_source_path": xr_dataset.attrs["ancillary_source_path"],
|
|
1607
1882
|
"winddir_convention": config["winddir_convention"],
|
|
1608
1883
|
"incidence_within_lut_copol_incidence_range": str(inc_check_co),
|
|
1609
1884
|
"incidence_within_lut_crosspol_incidence_range": str(inc_check_cross),
|
|
1610
1885
|
"swath": xr_dataset.attrs["swath"],
|
|
1611
1886
|
"footprint": xr_dataset.attrs["footprint"],
|
|
1612
1887
|
"coverage": xr_dataset.attrs["coverage"],
|
|
1888
|
+
"cross_antimeridian": str(config["meta"].cross_antimeridian)
|
|
1613
1889
|
}
|
|
1614
1890
|
|
|
1615
1891
|
for recalib_attrs in ["aux_pp1_recal", "aux_pp1", "aux_cal_recal", "aux_cal"]:
|
|
@@ -1667,7 +1943,10 @@ def makeL2(
|
|
|
1667
1943
|
|
|
1668
1944
|
logging.info("OK for %s ", os.path.basename(filename))
|
|
1669
1945
|
|
|
1670
|
-
|
|
1946
|
+
if config["add_gradientsfeatures"] and xr_dataset_streaks is None:
|
|
1947
|
+
config["return_status"] = 99
|
|
1948
|
+
|
|
1949
|
+
return out_file, xr_dataset, config["return_status"]
|
|
1671
1950
|
|
|
1672
1951
|
|
|
1673
1952
|
def transform_winddir(wind_cpx, ground_heading, winddir_convention="meteorological"):
|
|
@@ -1707,6 +1986,7 @@ def transform_winddir(wind_cpx, ground_heading, winddir_convention="meteorologic
|
|
|
1707
1986
|
logging.warning(
|
|
1708
1987
|
f"wind direction convention {winddir_convention} is not supported, using meteorological",
|
|
1709
1988
|
)
|
|
1989
|
+
|
|
1710
1990
|
long_name = "Wind direction in meteorological convention (clockwise, from), ex: 0°=from north, 90°=from east"
|
|
1711
1991
|
|
|
1712
1992
|
dataArray = xsarsea.dir_to_360(dataArray)
|