sarkit-convert 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1589 @@
1
+ """
2
+ =====================
3
+ Sentinel SAFE to SICD
4
+ =====================
5
+
6
+ Convert a complex image(s) from the Sentinel SAFE to SICD(s).
7
+
8
+ """
9
+
10
+ import argparse
11
+ import pathlib
12
+
13
+ import dateutil.parser
14
+ import lxml.builder
15
+ import lxml.etree as et
16
+ import numpy as np
17
+ import numpy.linalg as npl
18
+ import numpy.polynomial.polynomial as npp
19
+ import sarkit.sicd as sksicd
20
+ import sarkit.verification
21
+ import sarkit.wgs84
22
+ import scipy.interpolate
23
+ import scipy.signal
24
+ from sarkit import _constants
25
+ from tifffile import tifffile
26
+
27
+ from sarkit_convert import __version__
28
+ from sarkit_convert import _utils as utils
29
+
30
+ NSMAP = {
31
+ "sicd": "urn:SICD:1.4.0",
32
+ }
33
+
34
+
35
+ def _get_file_sets(safe_product_folder, root_node):
36
+ """Extracts paths for measurement and metadata files from a Sentinel manifest.safe file."""
37
+
38
+ def _get_file_location(root_node, schema_type, fids):
39
+ if isinstance(fids, str):
40
+ fids = [
41
+ fids,
42
+ ]
43
+ for tid in fids:
44
+ data_object = root_node.find(
45
+ f"dataObjectSection/dataObject[@repID='{schema_type}']/[@ID='{tid}']",
46
+ )
47
+ if data_object is None:
48
+ continue
49
+ return (
50
+ safe_product_folder
51
+ / data_object.find("./byteStream/fileLocation").attrib["href"]
52
+ )
53
+ return None
54
+
55
+ files = []
56
+ for mdu in root_node.findall(
57
+ "./informationPackageMap/{*}contentUnit/{*}contentUnit/[@repID='s1Level1MeasurementSchema']",
58
+ ):
59
+ # get the data file for this measurement
60
+ fnames = {
61
+ "data": _get_file_location(
62
+ root_node,
63
+ "s1Level1MeasurementSchema",
64
+ mdu.find("dataObjectPointer").attrib["dataObjectID"],
65
+ ),
66
+ }
67
+ # get the ids for product, noise, and calibration associated with this measurement data unit
68
+ ids = mdu.attrib["dmdID"].split()
69
+ # translate these ids to data object ids=file ids for the data files
70
+ fids = [
71
+ root_node.find(
72
+ f"./metadataSection/metadataObject[@ID='{did}']/dataObjectPointer"
73
+ ).attrib["dataObjectID"]
74
+ for did in ids
75
+ ]
76
+ # NB: there is (at most) one of these per measurement data unit
77
+ fnames["product"] = _get_file_location(root_node, "s1Level1ProductSchema", fids)
78
+ fnames["noise"] = _get_file_location(root_node, "s1Level1NoiseSchema", fids)
79
+ fnames["calibration"] = _get_file_location(
80
+ root_node, "s1Level1CalibrationSchema", fids
81
+ )
82
+ files.append(fnames)
83
+ return files
84
+
85
+
86
+ def _get_slice(product_root_node):
87
+ slice_number = product_root_node.find(
88
+ "./imageAnnotation/imageInformation/sliceNumber"
89
+ )
90
+ if slice_number is None:
91
+ return "0"
92
+ else:
93
+ return slice_number.text
94
+
95
+
96
+ def _get_swath(product_root_node):
97
+ return product_root_node.find("./adsHeader/swath").text
98
+
99
+
100
+ def _compute_arp_poly_coefs(root_node, start):
101
+ orbit_list = root_node.findall("./generalAnnotation/orbitList/orbit")
102
+ shp = (len(orbit_list),)
103
+ t_s = np.empty(shp, dtype=np.float64)
104
+ x_s = np.empty(shp, dtype=np.float64)
105
+ y_s = np.empty(shp, dtype=np.float64)
106
+ z_s = np.empty(shp, dtype=np.float64)
107
+ for j, orbit in enumerate(orbit_list):
108
+ t_s[j] = (
109
+ dateutil.parser.parse(orbit.find("./time").text) - start
110
+ ).total_seconds()
111
+ x_s[j] = float(orbit.find("./position/x").text)
112
+ y_s[j] = float(orbit.find("./position/y").text)
113
+ z_s[j] = float(orbit.find("./position/z").text)
114
+
115
+ poly_order = min(5, t_s.size - 1)
116
+ p_x = npp.polyfit(t_s, x_s, poly_order)
117
+ p_y = npp.polyfit(t_s, y_s, poly_order)
118
+ p_z = npp.polyfit(t_s, z_s, poly_order)
119
+ return np.stack((p_x, p_y, p_z))
120
+
121
+
122
+ def _collect_base_info(root_node):
123
+ # Collection Info
124
+ base_info = dict()
125
+ platform = root_node.find(
126
+ "./metadataSection/metadataObject[@ID='platform']/metadataWrap/xmlData/{*}platform",
127
+ )
128
+ base_info["collector_name"] = (
129
+ platform.find("{*}familyName").text + platform.find("{*}number").text
130
+ )
131
+ base_info["collect_type"] = "MONOSTATIC"
132
+ mode_id = platform.find(
133
+ "./{*}instrument/{*}extension/{*}instrumentMode/{*}mode"
134
+ ).text
135
+ if mode_id == "SM":
136
+ base_info["mode_type"] = "STRIPMAP"
137
+ else:
138
+ # TOPSAR - closest SICD analog is Dynamic Stripmap
139
+ base_info["mode_type"] = "DYNAMIC STRIPMAP"
140
+
141
+ # Image Creation
142
+ processing = root_node.find(
143
+ "./metadataSection/metadataObject[@ID='processing']/metadataWrap/xmlData/{*}processing",
144
+ )
145
+ facility = processing.find("{*}facility")
146
+ software = facility.find("{*}software")
147
+ base_info["creation_application"] = (
148
+ f"{software.attrib['name']} {software.attrib['version']}"
149
+ )
150
+ base_info["creation_date_time"] = dateutil.parser.parse(processing.attrib["stop"])
151
+ base_info["creation_site"] = (
152
+ f"{facility.attrib['name']}, {facility.attrib['site']}, {facility.attrib['country']}"
153
+ )
154
+
155
+ # Radar Collection
156
+ polarizations = root_node.findall(
157
+ "./metadataSection/metadataObject[@ID='generalProductInformation']/metadataWrap/xmlData/{*}standAloneProductInformation/{*}transmitterReceiverPolarisation",
158
+ )
159
+
160
+ base_info["tx_rcv_polarization"] = []
161
+ for pol in polarizations:
162
+ base_info["tx_rcv_polarization"].append(f"{pol.text[0]}:{pol.text[1]}")
163
+
164
+ return base_info
165
+
166
+
167
+ def _collect_swath_info(product_root_node):
168
+ swath_info = dict()
169
+ burst_list = product_root_node.findall("./swathTiming/burstList/burst")
170
+
171
+ # Collection Info
172
+ swath_info["collector_name"] = product_root_node.find("./adsHeader/missionId").text
173
+ swath_info["mode_id"] = product_root_node.find("./adsHeader/mode").text
174
+ t_slice = _get_slice(product_root_node)
175
+ swath = _get_swath(product_root_node)
176
+ swath_info["parameters"] = {
177
+ "SLICE": t_slice,
178
+ "SWATH": swath,
179
+ "ORBIT_SOURCE": "SLC_INTERNAL",
180
+ }
181
+
182
+ # Radar Collection
183
+ center_frequency = float(
184
+ product_root_node.find(
185
+ "./generalAnnotation/productInformation/radarFrequency"
186
+ ).text
187
+ )
188
+ swath_info["tx_freq_start"] = center_frequency + float(
189
+ product_root_node.find(
190
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/downlinkValues/txPulseStartFrequency"
191
+ ).text
192
+ )
193
+ swath_info["tx_pulse_length"] = float(
194
+ product_root_node.find(
195
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/downlinkValues/txPulseLength"
196
+ ).text
197
+ )
198
+ swath_info["tx_fm_rate"] = float(
199
+ product_root_node.find(
200
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/downlinkValues/txPulseRampRate"
201
+ ).text
202
+ )
203
+ swath_info["tx_rf_bw"] = swath_info["tx_pulse_length"] * swath_info["tx_fm_rate"]
204
+ pol = product_root_node.find("./adsHeader/polarisation").text
205
+ swath_info["tx_polarization"] = pol[0]
206
+ swath_info["rcv_polarization"] = pol[1]
207
+ swath_info["tx_freq"] = (
208
+ swath_info["tx_freq_start"],
209
+ swath_info["tx_freq_start"] + swath_info["tx_rf_bw"],
210
+ )
211
+ swath_info["adc_sample_rate"] = float(
212
+ product_root_node.find(
213
+ "./generalAnnotation/productInformation/rangeSamplingRate"
214
+ ).text
215
+ )
216
+ swl_list = product_root_node.findall(
217
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/downlinkValues/swlList/swl"
218
+ )
219
+ swath_info["rcv_window_length"] = [
220
+ float(swl.find("./value").text) for swl in swl_list
221
+ ]
222
+
223
+ # Timeline
224
+ swath_info["prf"] = float(
225
+ product_root_node.find(
226
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/prf"
227
+ ).text
228
+ )
229
+ swath_info["t_start"] = 0
230
+ swath_info["ipp_start"] = 0
231
+ swath_info["ipp_poly"] = (0, swath_info["prf"])
232
+
233
+ # Image Formation
234
+ swath_info["tx_rcv_polarization_proc"] = (
235
+ f"{swath_info['tx_polarization']}:{swath_info['rcv_polarization']}"
236
+ )
237
+ swath_info["image_form_algo"] = "RMA"
238
+ swath_info["t_start_proc"] = 0
239
+ swath_info["tx_freq_proc"] = swath_info["tx_freq"]
240
+ swath_info["image_beam_comp"] = "SV"
241
+ swath_info["az_autofocus"] = "NO"
242
+ swath_info["rg_autofocus"] = "NO"
243
+ swath_info["st_beam_comp"] = "GLOBAL" if swath_info["mode_id"][0] == "S" else "SV"
244
+
245
+ # Image Data
246
+ pixel_value = product_root_node.find(
247
+ "./imageAnnotation/imageInformation/pixelValue"
248
+ ).text
249
+ output_pixels = product_root_node.find(
250
+ "./imageAnnotation/imageInformation/outputPixels"
251
+ ).text
252
+ if pixel_value == "Complex" and output_pixels == "16 bit Signed Integer":
253
+ swath_info["pixel_type"] = "RE16I_IM16I"
254
+ else:
255
+ raise ValueError(
256
+ f"SLC data should be 16-bit complex, got pixelValue = {pixel_value} and outputPixels = {output_pixels}."
257
+ )
258
+ if len(burst_list) > 0:
259
+ # TOPSAR
260
+ swath_info["num_rows"] = int(
261
+ product_root_node.find("./swathTiming/samplesPerBurst").text
262
+ )
263
+ swath_info["num_cols"] = int(
264
+ product_root_node.find("./swathTiming/linesPerBurst").text
265
+ )
266
+ else:
267
+ # STRIPMAP
268
+ swath_info["num_rows"] = int(
269
+ product_root_node.find(
270
+ "./imageAnnotation/imageInformation/numberOfSamples"
271
+ ).text
272
+ )
273
+ swath_info["num_cols"] = int(
274
+ product_root_node.find(
275
+ "./imageAnnotation/imageInformation/numberOfLines"
276
+ ).text
277
+ )
278
+ swath_info["first_row"] = 0
279
+ swath_info["first_col"] = 0
280
+ swath_info["scp_pixel"] = (
281
+ (swath_info["num_rows"] - 1) // 2,
282
+ (swath_info["num_cols"] - 1) // 2,
283
+ )
284
+
285
+ # RMA
286
+ swath_info["freq_zero"] = center_frequency
287
+ swath_info["dop_centroid_coa"] = "true"
288
+ tau_0 = float(
289
+ product_root_node.find("./imageAnnotation/imageInformation/slantRangeTime").text
290
+ )
291
+ delta_tau_s = 1.0 / float(
292
+ product_root_node.find(
293
+ "./generalAnnotation/productInformation/rangeSamplingRate"
294
+ ).text
295
+ )
296
+ swath_info["r_ca_scp"] = (0.5 * _constants.speed_of_light) * (
297
+ tau_0 + swath_info["scp_pixel"][0] * delta_tau_s
298
+ )
299
+ swath_info["rm_algo_type"] = "RG_DOP"
300
+ swath_info["image_type"] = "INCA"
301
+
302
+ # Grid
303
+ swath_info["image_plane"] = (
304
+ "SLANT"
305
+ if product_root_node.find(
306
+ "./generalAnnotation/productInformation/projection"
307
+ ).text
308
+ == "Slant Range"
309
+ else None
310
+ )
311
+ swath_info["grid_type"] = "RGZERO"
312
+ range_proc = product_root_node.find(
313
+ "./imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/rangeProcessing"
314
+ )
315
+ swath_info["row_window_name"] = range_proc.find("./windowType").text.upper()
316
+ swath_info["row_params"] = range_proc.find("./windowCoefficient").text
317
+ if swath_info["row_window_name"] == "HAMMING":
318
+ swath_info["row_wgts"] = scipy.signal.windows.general_hamming(
319
+ 512, float(swath_info["row_params"]), sym=True
320
+ )
321
+ elif swath_info["row_window_name"] == "KAISER":
322
+ swath_info["row_wgts"] = scipy.signal.windows.kaiser(
323
+ 512, float(swath_info["row_params"]), sym=True
324
+ )
325
+ else: # Default to UNIFORM
326
+ swath_info["row_window_name"] = "UNIFORM"
327
+ swath_info["row_params"] = None
328
+ swath_info["row_wgts"] = np.ones(256)
329
+
330
+ swath_info["row_ss"] = (_constants.speed_of_light / 2) * delta_tau_s
331
+ swath_info["row_sgn"] = -1
332
+ swath_info["row_kctr"] = 2 * center_frequency / _constants.speed_of_light
333
+ swath_info["row_imp_res_bw"] = (
334
+ 2.0
335
+ * float(range_proc.find("./processingBandwidth").text)
336
+ / _constants.speed_of_light
337
+ )
338
+ swath_info["row_deltak_coa_poly"] = np.array([[0]])
339
+
340
+ az_proc = product_root_node.find(
341
+ "./imageAnnotation/processingInformation/swathProcParamsList/swathProcParams/azimuthProcessing"
342
+ )
343
+ swath_info["col_ss"] = float(
344
+ product_root_node.find(
345
+ "./imageAnnotation/imageInformation/azimuthPixelSpacing"
346
+ ).text
347
+ )
348
+ dop_bw = float(az_proc.find("./processingBandwidth").text)
349
+ swath_info["ss_zd_s"] = float(
350
+ product_root_node.find(
351
+ "./imageAnnotation/imageInformation/azimuthTimeInterval"
352
+ ).text
353
+ )
354
+
355
+ swath_info["col_window_name"] = az_proc.find("./windowType").text.upper()
356
+ swath_info["col_params"] = az_proc.find("./windowCoefficient").text
357
+ if swath_info["col_window_name"] == "HAMMING":
358
+ swath_info["col_wgts"] = scipy.signal.windows.general_hamming(
359
+ 512, float(swath_info["col_params"]), sym=True
360
+ )
361
+ elif swath_info["col_window_name"] == "KAISER":
362
+ swath_info["col_wgts"] = scipy.signal.windows.kaiser(
363
+ 512, float(swath_info["col_params"]), sym=True
364
+ )
365
+ else: # Default to UNIFORM
366
+ swath_info["col_window_name"] = "UNIFORM"
367
+ swath_info["col_params"] = None
368
+ swath_info["col_wgts"] = np.ones(256)
369
+
370
+ swath_info["col_sgn"] = -1
371
+ swath_info["col_kctr"] = 0.0
372
+ swath_info["col_imp_res_bw"] = dop_bw * swath_info["ss_zd_s"] / swath_info["col_ss"]
373
+
374
+ row_broadening_factor = utils.broadening_from_amp(swath_info["row_wgts"])
375
+ col_broadening_factor = utils.broadening_from_amp(swath_info["col_wgts"])
376
+ swath_info["row_imp_res_wid"] = row_broadening_factor / swath_info["row_imp_res_bw"]
377
+ swath_info["col_imp_res_wid"] = col_broadening_factor / swath_info["col_imp_res_bw"]
378
+
379
+ return swath_info
380
+
381
+
382
+ def _collect_burst_info(product_root_node, swath_info):
383
+ burst_list = product_root_node.findall("./swathTiming/burstList/burst")
384
+ # parse the geolocation information - for SCP calculation
385
+ geo_grid_point_list = product_root_node.findall(
386
+ "./geolocationGrid/geolocationGridPointList/geolocationGridPoint"
387
+ )
388
+ geo_pixels = np.zeros((len(geo_grid_point_list), 2), dtype=np.float64)
389
+ geo_coords_llh = np.zeros((len(geo_grid_point_list), 3), dtype=np.float64)
390
+ for i, grid_point in enumerate(geo_grid_point_list):
391
+ geo_pixels[i, :] = (
392
+ float(grid_point.find("./pixel").text),
393
+ float(grid_point.find("./line").text),
394
+ )
395
+ geo_coords_llh[i, :] = (
396
+ float(grid_point.find("./latitude").text),
397
+ float(grid_point.find("./longitude").text),
398
+ float(grid_point.find("./height").text),
399
+ )
400
+ geo_coords_ecf = sarkit.wgs84.geodetic_to_cartesian(geo_coords_llh)
401
+
402
+ def _shift(coefs, t_0: float, alpha: float = 1):
403
+ # prepare array workspace
404
+ out = np.copy(coefs)
405
+ if t_0 != 0 and out.size > 1:
406
+ siz = out.size
407
+ for i in range(siz):
408
+ index = siz - i - 1
409
+ if i > 0:
410
+ out[index : siz - 1] -= t_0 * out[index + 1 : siz]
411
+
412
+ if alpha != 1 and out.size > 1:
413
+ out *= np.power(alpha, np.arange(out.size))
414
+
415
+ return out
416
+
417
+ def _calc_deltaks(x_coords, y_coords, deltak_coa_poly, imp_resp_bw, spacing):
418
+ """Calculate the minimum and maximum DeltaK values"""
419
+ deltaks = npp.polyval2d(x_coords, y_coords, deltak_coa_poly)
420
+ min_deltak = np.amin(deltaks) - 0.5 * imp_resp_bw
421
+ max_deltak = np.amax(deltaks) + 0.5 * imp_resp_bw
422
+
423
+ if (min_deltak < -0.5 / abs(spacing)) or (max_deltak > 0.5 / abs(spacing)):
424
+ min_deltak = -0.5 / abs(spacing)
425
+ max_deltak = -min_deltak
426
+
427
+ return min_deltak, max_deltak
428
+
429
+ def _get_scps(swath_info, count):
430
+ # SCPPixel - points at which to interpolate geo_pixels & geo_coords data
431
+ num_rows = swath_info["num_rows"]
432
+ num_cols = swath_info["num_cols"]
433
+ scp_pixels = np.zeros((count, 2), dtype=np.float64)
434
+ scp_pixels[:, 0] = int((num_rows - 1) / 2.0)
435
+ scp_pixels[:, 1] = int((num_cols - 1) / 2.0) + num_cols * (
436
+ np.arange(count, dtype=np.float64)
437
+ )
438
+ scps = np.zeros((count, 3), dtype=np.float64)
439
+
440
+ for j in range(3):
441
+ scps[:, j] = scipy.interpolate.griddata(
442
+ geo_pixels, geo_coords_ecf[:, j], scp_pixels
443
+ )
444
+ return scps
445
+
446
+ def _calc_rma_and_grid_info(
447
+ swath_info, burst_info, first_line_relative_start, start
448
+ ):
449
+ # set TimeCAPoly
450
+ scp_pixel = swath_info["scp_pixel"]
451
+ eta_mid = swath_info["ss_zd_s"] * scp_pixel[1]
452
+ row_ss = swath_info["row_ss"]
453
+ col_ss = swath_info["col_ss"]
454
+
455
+ time_ca_poly_coefs = [
456
+ first_line_relative_start + eta_mid,
457
+ swath_info["ss_zd_s"] / col_ss,
458
+ ]
459
+ burst_info["time_ca_poly_coefs"] = time_ca_poly_coefs
460
+ r_ca_scp = swath_info["r_ca_scp"]
461
+ range_time_scp = r_ca_scp * 2 / _constants.speed_of_light
462
+ # get velocity polynomial
463
+ arp_poly = burst_info["arp_poly_coefs"]
464
+ vel_poly = npp.polyder(arp_poly)
465
+ # We pick a single velocity magnitude at closest approach to represent
466
+ # the entire burst. This is valid, since the magnitude of the velocity
467
+ # changes very little.
468
+
469
+ vm_ca = np.linalg.norm(npp.polyval(time_ca_poly_coefs[0], vel_poly))
470
+ azimuth_fm_rate_list = product_root_node.findall(
471
+ "./generalAnnotation/azimuthFmRateList/azimuthFmRate"
472
+ )
473
+ shp = (len(azimuth_fm_rate_list),)
474
+ az_rate_times = np.empty(shp, dtype=np.float64)
475
+ az_rate_t0 = np.empty(shp, dtype=np.float64)
476
+ k_a_poly = []
477
+ for j, az_fm_rate in enumerate(azimuth_fm_rate_list):
478
+ az_rate_times[j] = (
479
+ dateutil.parser.parse(az_fm_rate.find("./azimuthTime").text) - start
480
+ ).total_seconds()
481
+ az_rate_t0[j] = float(az_fm_rate.find("./t0").text)
482
+ if az_fm_rate.find("c0") is not None:
483
+ k_a_poly.append(
484
+ np.array(
485
+ [
486
+ float(az_fm_rate.find("./c0").text),
487
+ float(az_fm_rate.find("./c1").text),
488
+ float(az_fm_rate.find("./c2").text),
489
+ ],
490
+ dtype=np.float64,
491
+ )
492
+ )
493
+ else:
494
+ k_a_poly.append(
495
+ np.fromstring(
496
+ az_fm_rate.find("./azimuthFmRatePolynomial").text, sep=" "
497
+ )
498
+ )
499
+
500
+ # find the closest fm rate polynomial
501
+ az_rate_poly_ind = int(np.argmin(np.abs(az_rate_times - time_ca_poly_coefs[0])))
502
+ az_rate_poly_coefs = k_a_poly[az_rate_poly_ind]
503
+ dr_ca_poly = _shift(
504
+ az_rate_poly_coefs,
505
+ t_0=az_rate_t0[az_rate_poly_ind] - range_time_scp,
506
+ alpha=2 / _constants.speed_of_light,
507
+ )
508
+ r_ca = np.array([r_ca_scp, 1], dtype=np.float64)
509
+ burst_info["drsf_poly_coefs"] = np.reshape(
510
+ -np.convolve(dr_ca_poly, r_ca)
511
+ * (
512
+ _constants.speed_of_light
513
+ / (2 * swath_info["freq_zero"] * vm_ca * vm_ca)
514
+ ),
515
+ (-1, 1),
516
+ )
517
+
518
+ # Doppler Centroid
519
+ dc_estimate_list = product_root_node.findall(
520
+ "./dopplerCentroid/dcEstimateList/dcEstimate"
521
+ )
522
+ shp = (len(dc_estimate_list),)
523
+ dc_est_times = np.empty(shp, dtype=np.float64)
524
+ dc_t0 = np.empty(shp, dtype=np.float64)
525
+ data_dc_poly = []
526
+ for j, dc_estimate in enumerate(dc_estimate_list):
527
+ dc_est_times[j] = (
528
+ dateutil.parser.parse(dc_estimate.find("./azimuthTime").text) - start
529
+ ).total_seconds()
530
+ dc_t0[j] = float(dc_estimate.find("./t0").text)
531
+ data_dc_poly.append(
532
+ np.fromstring(dc_estimate.find("./dataDcPolynomial").text, sep=" ")
533
+ )
534
+ # find the closest doppler centroid polynomial
535
+ dc_poly_ind = int(np.argmin(np.abs(dc_est_times - time_ca_poly_coefs[0])))
536
+ # we are going to move the respective polynomial from reference point as dc_t0 to
537
+ # reference point at SCP time.
538
+ dc_poly_coefs = data_dc_poly[dc_poly_ind]
539
+ # Fit DeltaKCOAPoly, DopCentroidPoly, and TimeCOAPoly from data
540
+ tau_0 = float(
541
+ product_root_node.find(
542
+ "./imageAnnotation/imageInformation/slantRangeTime"
543
+ ).text
544
+ )
545
+ delta_tau_s = 1.0 / float(
546
+ product_root_node.find(
547
+ "./generalAnnotation/productInformation/rangeSamplingRate"
548
+ ).text
549
+ )
550
+
551
+ # common use for the fitting efforts
552
+ poly_order = 2
553
+ grid_samples = poly_order + 4
554
+ num_rows = swath_info["num_rows"]
555
+ num_cols = swath_info["num_cols"]
556
+ first_row = swath_info["first_row"]
557
+ first_col = swath_info["first_col"]
558
+ cols = np.linspace(0, num_cols - 1, grid_samples, dtype=np.int64)
559
+ rows = np.linspace(0, num_rows - 1, grid_samples, dtype=np.int64)
560
+ coords_az = (cols - scp_pixel[1] + first_col) * col_ss
561
+ coords_rg = (rows - scp_pixel[0] + first_row) * row_ss
562
+ coords_az_2d, coords_rg_2d = np.meshgrid(coords_az, coords_rg)
563
+
564
+ # fit DeltaKCOAPoly
565
+ tau = tau_0 + delta_tau_s * rows
566
+ # Azimuth steering rate (constant, not dependent on burst or range)
567
+ k_psi = np.deg2rad(
568
+ float(
569
+ product_root_node.find(
570
+ "./generalAnnotation/productInformation/azimuthSteeringRate"
571
+ ).text
572
+ )
573
+ )
574
+ k_s = vm_ca * swath_info["freq_zero"] * k_psi * 2 / _constants.speed_of_light
575
+ k_a = npp.polyval(tau - az_rate_t0[az_rate_poly_ind], az_rate_poly_coefs)
576
+ k_t = (k_a * k_s) / (k_a - k_s)
577
+ f_eta_c = npp.polyval(tau - dc_t0[dc_poly_ind], dc_poly_coefs)
578
+ eta = (cols - scp_pixel[1]) * swath_info["ss_zd_s"]
579
+ eta_c = -f_eta_c / k_a # Beam center crossing time (TimeCOA)
580
+ eta_ref = eta_c - eta_c[0]
581
+ eta_2d, eta_ref_2d = np.meshgrid(eta, eta_ref)
582
+ eta_arg = eta_2d - eta_ref_2d
583
+ deramp_phase = 0.5 * k_t[:, np.newaxis] * eta_arg * eta_arg
584
+ demod_phase = eta_arg * f_eta_c[:, np.newaxis]
585
+ total_phase = deramp_phase + demod_phase
586
+
587
+ phase = utils.polyfit2d(
588
+ coords_rg_2d.flatten(),
589
+ coords_az_2d.flatten(),
590
+ total_phase.flatten(),
591
+ poly_order,
592
+ poly_order,
593
+ )
594
+
595
+ # DeltaKCOAPoly is derivative of phase in azimuth/Col direction
596
+ burst_info["col_deltak_coa_poly"] = npp.polyder(phase, axis=1)
597
+
598
+ # derive the DopCentroidPoly directly
599
+ burst_info["doppler_centroid_poly_coefs"] = (
600
+ burst_info["col_deltak_coa_poly"] * col_ss / swath_info["ss_zd_s"]
601
+ )
602
+
603
+ # complete deriving the TimeCOAPoly, which depends on the DOPCentroidPoly
604
+ time_ca_sampled = npp.polyval(coords_az_2d, time_ca_poly_coefs)
605
+ doppler_rate_sampled = npp.polyval(coords_rg_2d, dr_ca_poly)
606
+ dop_centroid_sampled = npp.polyval2d(
607
+ coords_rg_2d, coords_az_2d, burst_info["doppler_centroid_poly_coefs"]
608
+ )
609
+ time_coa_sampled = time_ca_sampled + dop_centroid_sampled / doppler_rate_sampled
610
+
611
+ burst_info["time_coa_poly_coefs"] = utils.polyfit2d(
612
+ coords_rg_2d.flatten(),
613
+ coords_az_2d.flatten(),
614
+ time_coa_sampled.flatten(),
615
+ poly_order,
616
+ poly_order,
617
+ )
618
+
619
+ full_img_verticies = np.array(
620
+ [
621
+ [0, 0],
622
+ [0, num_cols - 1],
623
+ [num_rows - 1, num_cols - 1],
624
+ [num_rows - 1, 0],
625
+ ],
626
+ )
627
+ x_coords = row_ss * (full_img_verticies[:, 0] - (scp_pixel[0] - first_row))
628
+ y_coords = col_ss * (full_img_verticies[:, 1] - (scp_pixel[1] - first_col))
629
+
630
+ row_delta_k1, row_delta_k2 = _calc_deltaks(
631
+ x_coords,
632
+ y_coords,
633
+ swath_info["row_deltak_coa_poly"],
634
+ swath_info["row_imp_res_bw"],
635
+ row_ss,
636
+ )
637
+ col_delta_k1, col_delta_k2 = _calc_deltaks(
638
+ x_coords,
639
+ y_coords,
640
+ burst_info["col_deltak_coa_poly"],
641
+ swath_info["col_imp_res_bw"],
642
+ col_ss,
643
+ )
644
+ burst_info["row_delta_k1"] = row_delta_k1
645
+ burst_info["row_delta_k2"] = row_delta_k2
646
+ burst_info["col_delta_k1"] = col_delta_k1
647
+ burst_info["col_delta_k2"] = col_delta_k2
648
+
649
+ return time_coa_sampled.min(), time_coa_sampled.max()
650
+
651
+ def _update_geo_data_info(swath_info, burst_info):
652
+ scp_drsf = burst_info["drsf_poly_coefs"][0, 0]
653
+ scp_tca = burst_info["time_ca_poly_coefs"][0]
654
+ scp_tcoa = burst_info["time_coa_poly_coefs"][0, 0]
655
+ scp_delta_t_coa = scp_tcoa - scp_tca
656
+ scp_varp_ca_mag = npl.norm(
657
+ npp.polyval(scp_tca, npp.polyder(burst_info["arp_poly_coefs"]))
658
+ )
659
+ scp_rcoa = np.sqrt(
660
+ swath_info["r_ca_scp"] ** 2
661
+ + scp_drsf * scp_varp_ca_mag**2 * scp_delta_t_coa**2
662
+ )
663
+ scp_rratecoa = scp_drsf / scp_rcoa * scp_varp_ca_mag**2 * scp_delta_t_coa
664
+ scp_set = sksicd.projection.ProjectionSetsMono(
665
+ t_COA=np.array([scp_tcoa]),
666
+ ARP_COA=np.array([npp.polyval(scp_tcoa, burst_info["arp_poly_coefs"])]),
667
+ VARP_COA=np.array(
668
+ [npp.polyval(scp_tcoa, npp.polyder(burst_info["arp_poly_coefs"]))]
669
+ ),
670
+ R_COA=np.array([scp_rcoa]),
671
+ Rdot_COA=np.array([scp_rratecoa]),
672
+ )
673
+ scp_ecf = sksicd.projection.r_rdot_to_ground_plane_mono(
674
+ -1,
675
+ scp_set,
676
+ sarkit.wgs84.geodetic_to_cartesian(burst_info["init_scp_llh"]),
677
+ sarkit.wgs84.up(burst_info["init_scp_llh"]),
678
+ )[0]
679
+ scp_llh = sarkit.wgs84.cartesian_to_geodetic(scp_ecf)
680
+
681
+ return scp_ecf, scp_llh
682
+
683
+ def _calc_grid_unit_vectors(burst_info):
684
+ # Calc Grid unit vectors based on updated RMA, Position, and GeoData
685
+ scp_tca = burst_info["time_ca_poly_coefs"][0]
686
+ scp_ca_pos = npp.polyval(scp_tca, burst_info["arp_poly_coefs"])
687
+ scp_ca_vel = npp.polyval(scp_tca, npp.polyder(burst_info["arp_poly_coefs"]))
688
+ los = burst_info["scp_ecf"] - scp_ca_pos
689
+ row_uvect_ecf = los / npl.norm(los)
690
+ left = np.cross(scp_ca_pos, scp_ca_vel)
691
+ look = np.sign(np.dot(left, row_uvect_ecf))
692
+ spz = -look * np.cross(row_uvect_ecf, scp_ca_vel)
693
+ uspz = spz / npl.norm(spz)
694
+
695
+ return row_uvect_ecf, np.cross(uspz, row_uvect_ecf)
696
+
697
+ def _finalize_stripmap():
698
+ burst_info = dict()
699
+
700
+ scp = _get_scps(swath_info, 1)
701
+
702
+ burst_info["scp_ecf"] = scp[0, :]
703
+ burst_info["init_scp_llh"] = sarkit.wgs84.cartesian_to_geodetic(scp[0, :])
704
+
705
+ num_rows = swath_info["num_rows"]
706
+ num_cols = swath_info["num_cols"]
707
+ burst_info["valid_data"] = [
708
+ (0, 0),
709
+ (0, num_cols - 1),
710
+ (num_rows - 1, num_cols - 1),
711
+ (num_rows - 1, 0),
712
+ ]
713
+
714
+ start = dateutil.parser.parse(
715
+ product_root_node.find(
716
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/firstLineSensingTime"
717
+ ).text
718
+ )
719
+ stop = dateutil.parser.parse(
720
+ product_root_node.find(
721
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/lastLineSensingTime"
722
+ ).text
723
+ )
724
+ slice = int(_get_slice(product_root_node))
725
+ swath = _get_swath(product_root_node)
726
+ burst_info["core_name"] = (
727
+ f"{start.strftime('%d%b%YT%H%M%S').upper()}_{product_root_node.find('./adsHeader/missionId').text}{product_root_node.find('./adsHeader/missionDataTakeId').text}_{slice:02d}_{swath}_01"
728
+ )
729
+
730
+ burst_info["parameters"] = {"BURST": f"{1:d}"}
731
+ prf = float(
732
+ product_root_node.find(
733
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/prf"
734
+ ).text
735
+ )
736
+ duration = (stop - start).total_seconds()
737
+
738
+ burst_info["collect_start"] = start
739
+ burst_info["collect_duration"] = duration
740
+ burst_info["ipp_set_tend"] = duration
741
+ burst_info["ipp_set_ippend"] = round(duration * prf) - 1
742
+ burst_info["tend_proc"] = duration
743
+
744
+ burst_info["arp_poly_coefs"] = _compute_arp_poly_coefs(
745
+ product_root_node, start
746
+ ).T
747
+
748
+ azimuth_time_first_line = dateutil.parser.parse(
749
+ product_root_node.find(
750
+ "./imageAnnotation/imageInformation/productFirstLineUtcTime"
751
+ ).text
752
+ )
753
+ first_line_relative_start = (azimuth_time_first_line - start).total_seconds()
754
+ _, _ = _calc_rma_and_grid_info(
755
+ swath_info, burst_info, first_line_relative_start, start
756
+ )
757
+ burst_info["scp_ecf"], burst_info["scp_llh"] = _update_geo_data_info(
758
+ swath_info, burst_info
759
+ )
760
+ burst_info["row_uvect_ecf"], burst_info["col_uvect_ecf"] = (
761
+ _calc_grid_unit_vectors(burst_info)
762
+ )
763
+
764
+ return [burst_info]
765
+
766
+ def _finalize_bursts():
767
+ burst_info_list = []
768
+
769
+ scps = _get_scps(swath_info, len(burst_list))
770
+ for j, burst in enumerate(burst_list):
771
+ # set preliminary geodata (required for projection)
772
+ burst_info = dict()
773
+ burst_info["scp_ecf"] = scps[j, :]
774
+ burst_info["init_scp_llh"] = sarkit.wgs84.cartesian_to_geodetic(scps[j, :])
775
+ xml_first_cols = np.fromstring(
776
+ burst.find("./firstValidSample").text, sep=" ", dtype=np.int64
777
+ )
778
+ xml_last_cols = np.fromstring(
779
+ burst.find("./lastValidSample").text, sep=" ", dtype=np.int64
780
+ )
781
+ valid = (xml_first_cols >= 0) & (xml_last_cols >= 0)
782
+ valid_cols = np.arange(xml_first_cols.size, dtype=np.int64)[valid]
783
+ first_row = int(np.min(xml_first_cols[valid]))
784
+ last_row = int(np.max(xml_last_cols[valid]))
785
+ first_col = valid_cols[0]
786
+ last_col = valid_cols[-1]
787
+ burst_info["valid_data"] = [
788
+ (first_row, first_col),
789
+ (first_row, last_col),
790
+ (last_row, last_col),
791
+ (last_row, first_col),
792
+ ]
793
+
794
+ # This is the first and last zero doppler times of the columns in the burst.
795
+ # Not really CollectStart and CollectDuration in SICD (first last pulse time)
796
+ start = dateutil.parser.parse(burst.find("./azimuthTime").text)
797
+ t_slice = int(_get_slice(product_root_node))
798
+ swath = _get_swath(product_root_node)
799
+ burst_info["core_name"] = (
800
+ f"{start.strftime('%d%b%YT%H%M%S').upper()}_{product_root_node.find('./adsHeader/missionId').text}{product_root_node.find('./adsHeader/missionDataTakeId').text}_{t_slice:02d}_{swath}_{j + 1:02d}"
801
+ )
802
+
803
+ burst_info["parameters"] = {"BURST": f"{j + 1:d}"}
804
+ arp_poly_coefs = _compute_arp_poly_coefs(product_root_node, start)
805
+ burst_info["arp_poly_coefs"] = arp_poly_coefs.T
806
+ early, late = _calc_rma_and_grid_info(swath_info, burst_info, 0, start)
807
+ new_start = start + np.timedelta64(np.int64(early * 1e6), "us")
808
+ duration = late - early
809
+ prf = float(
810
+ product_root_node.find(
811
+ "./generalAnnotation/downlinkInformationList/downlinkInformation/prf"
812
+ ).text
813
+ )
814
+ burst_info["collect_start"] = new_start
815
+ burst_info["collect_duration"] = duration
816
+ burst_info["ipp_set_tend"] = duration
817
+ burst_info["ipp_set_ippend"] = round(duration * prf) - 1
818
+ burst_info["tend_proc"] = duration
819
+
820
+ # adjust time offset
821
+ burst_info["time_coa_poly_coefs"][0, 0] -= early
822
+ burst_info["time_ca_poly_coefs"][0] -= early
823
+
824
+ arp_poly_coefs = np.array(
825
+ [
826
+ _shift(arp_poly_coefs[i], t_0=-early)
827
+ for i in range(len(arp_poly_coefs))
828
+ ]
829
+ )
830
+ burst_info["arp_poly_coefs"] = arp_poly_coefs.T
831
+
832
+ burst_info["scp_ecf"], burst_info["scp_llh"] = _update_geo_data_info(
833
+ swath_info, burst_info
834
+ )
835
+ burst_info["row_uvect_ecf"], burst_info["col_uvect_ecf"] = (
836
+ _calc_grid_unit_vectors(burst_info)
837
+ )
838
+
839
+ burst_info_list.append(burst_info)
840
+
841
+ return burst_info_list
842
+
843
+ if len(burst_list) > 0:
844
+ return _finalize_bursts()
845
+ else:
846
+ return _finalize_stripmap()
847
+
848
+
849
+ def _calc_radiometric_info(cal_file_name, swath_info, burst_info_list):
850
+ """Compute radiometric polys"""
851
+ cal_root_node = et.parse(cal_file_name).getroot()
852
+ cal_vector_list = cal_root_node.findall(
853
+ "./{*}calibrationVectorList/{*}calibrationVector"
854
+ )
855
+ line = np.empty((len(cal_vector_list),), dtype=np.float64)
856
+ pixel, sigma, beta, gamma = [], [], [], []
857
+ for i, cal_vector in enumerate(cal_vector_list):
858
+ line[i] = float(cal_vector.find("./line").text)
859
+ pixel.append(
860
+ np.fromstring(cal_vector.find("./pixel").text, sep=" ", dtype=np.float64)
861
+ )
862
+ sigma.append(
863
+ np.fromstring(
864
+ cal_vector.find("./sigmaNought").text, sep=" ", dtype=np.float64
865
+ )
866
+ )
867
+ beta.append(
868
+ np.fromstring(
869
+ cal_vector.find("./betaNought").text, sep=" ", dtype=np.float64
870
+ )
871
+ )
872
+ gamma.append(
873
+ np.fromstring(cal_vector.find("./gamma").text, sep=" ", dtype=np.float64)
874
+ )
875
+
876
+ lines_per_burst = swath_info["num_cols"]
877
+ pixel = np.array(pixel)
878
+ sigma = np.array(sigma)
879
+ beta = np.array(beta)
880
+ gamma = np.array(gamma)
881
+ # adjust sentinel values for sicd convention (square and invert)
882
+ sigma = 1.0 / (sigma * sigma)
883
+ beta = 1.0 / (beta * beta)
884
+ gamma = 1.0 / (gamma * gamma)
885
+
886
+ for idx, burst_info in enumerate(burst_info_list):
887
+ valid_lines = (line >= idx * lines_per_burst) & (
888
+ line < (idx + 1) * lines_per_burst
889
+ )
890
+ valid_count = np.sum(valid_lines)
891
+ if valid_count == 0:
892
+ # this burst contained no useful calibration data
893
+ return
894
+
895
+ first_row = swath_info["first_row"]
896
+ first_col = swath_info["first_col"]
897
+ scp_row = swath_info["scp_pixel"][0]
898
+ scp_col = swath_info["scp_pixel"][1]
899
+ row_ss = swath_info["row_ss"]
900
+ col_ss = swath_info["col_ss"]
901
+ coords_rg = (pixel[valid_lines] + first_row - scp_row) * row_ss
902
+ coords_az = (line[valid_lines] + first_col - scp_col) * col_ss
903
+ # NB: coords_rg = (valid_count, M) and coords_az = (valid_count, )
904
+ coords_az = np.repeat(coords_az, pixel.shape[1])
905
+ if valid_count > 1:
906
+ coords_az = coords_az.reshape((valid_count, -1))
907
+
908
+ burst_info["radiometric"]["sigma_zero_poly_coefs"] = utils.polyfit2d(
909
+ coords_rg.flatten(),
910
+ coords_az.flatten(),
911
+ sigma[valid_lines, :].flatten(),
912
+ 2,
913
+ 2,
914
+ )
915
+ burst_info["radiometric"]["beta_zero_poly_coefs"] = utils.polyfit2d(
916
+ coords_rg.flatten(),
917
+ coords_az.flatten(),
918
+ beta[valid_lines, :].flatten(),
919
+ 2,
920
+ 2,
921
+ )
922
+ burst_info["radiometric"]["gamma_zero_poly_coefs"] = utils.polyfit2d(
923
+ coords_rg.flatten(),
924
+ coords_az.flatten(),
925
+ gamma[valid_lines, :].flatten(),
926
+ 2,
927
+ 2,
928
+ )
929
+
930
+ range_weight_f = azimuth_weight_f = 1.0
931
+ row_wgt_funct = swath_info["row_wgts"]
932
+ var = np.var(row_wgt_funct)
933
+ mean = np.mean(row_wgt_funct)
934
+ range_weight_f += var / (mean * mean)
935
+
936
+ col_wgt_funct = swath_info["col_wgts"]
937
+ var = np.var(col_wgt_funct)
938
+ mean = np.mean(col_wgt_funct)
939
+ azimuth_weight_f += var / (mean * mean)
940
+ sp_area = (range_weight_f * azimuth_weight_f) / (
941
+ swath_info["row_imp_res_bw"] * swath_info["col_imp_res_bw"]
942
+ )
943
+
944
+ burst_info["radiometric"]["rcs_sf_poly_coefs"] = (
945
+ burst_info["radiometric"]["beta_zero_poly_coefs"] * sp_area
946
+ )
947
+
948
+ return
949
+
950
+
951
+ def _calc_noise_level_info(noise_file_name, swath_info, burst_info_list):
952
+ """Compute noise poly"""
953
+ noise_root_node = et.parse(noise_file_name).getroot()
954
+ mode_id = swath_info["mode_id"]
955
+ lines_per_burst = swath_info["num_cols"]
956
+ range_size_pixels = swath_info["num_rows"]
957
+
958
+ def _extract_vector(stem):
959
+ lines, pixels, noises = [], [], []
960
+ noise_vector_list = noise_root_node.findall(
961
+ f"./{stem:s}VectorList/{stem:s}Vector"
962
+ )
963
+ for i, noise_vector in enumerate(noise_vector_list):
964
+ line = np.fromstring(
965
+ noise_vector.find("./line").text, dtype=np.int64, sep=" "
966
+ )
967
+ # some datasets have noise vectors for negative lines - ignore these
968
+ if np.all(line < 0):
969
+ continue
970
+
971
+ pixel_node = noise_vector.find(
972
+ "./pixel"
973
+ ) # does not exist for azimuth noise
974
+ if pixel_node is not None:
975
+ pixel = np.fromstring(pixel_node.text, dtype=np.int64, sep=" ")
976
+ else:
977
+ pixel = None
978
+ noise = np.fromstring(
979
+ noise_vector.find(f"./{stem}Lut").text, dtype=np.float64, sep=" "
980
+ )
981
+ # some datasets do not have any noise data (all 0's) - skipping these will throw things into disarray
982
+ if not np.all(noise == 0):
983
+ # convert noise to dB - what about -inf values?
984
+ noise = 10 * np.log10(noise)
985
+ assert isinstance(noise, np.ndarray)
986
+
987
+ # do some validity checks
988
+ if (
989
+ (mode_id == "IW")
990
+ and np.any((line % lines_per_burst) != 0)
991
+ and (i != len(noise_vector_list) - 1)
992
+ ):
993
+ # NB: the final burst has different timing
994
+ raise ValueError(
995
+ "Noise file should have one lut per burst, but more are present"
996
+ )
997
+ if (pixel is not None) and (pixel[-1] > range_size_pixels):
998
+ raise ValueError("Noise file has more pixels in LUT than range size")
999
+
1000
+ lines.append(line)
1001
+ pixels.append(pixel)
1002
+ noises.append(noise)
1003
+ return lines, pixels, noises
1004
+
1005
+ # extract noise vectors
1006
+ if noise_root_node.find("./noiseVectorList") is not None:
1007
+ # probably prior to March 2018
1008
+ range_line, range_pixel, range_noise = _extract_vector("noise")
1009
+ else:
1010
+ # noiseRange and noiseAzimuth fields began in March 2018
1011
+ range_line, range_pixel, range_noise = _extract_vector("noiseRange")
1012
+ range_line = np.concatenate(range_line, axis=0)
1013
+
1014
+ if noise_root_node.find("./noiseAzimuthVectorList/noiseAzimuthVector") is not None:
1015
+ azimuth_line, _, azimuth_noise = _extract_vector("noiseAzimuth")
1016
+ azimuth_line = np.concatenate(azimuth_line, axis=0)
1017
+ else:
1018
+ azimuth_line, azimuth_noise = None, None
1019
+
1020
+ rg_poly_order = min(5, range_pixel[0].size - 1)
1021
+ first_row = swath_info["first_row"]
1022
+ first_col = swath_info["first_col"]
1023
+ scp_row = swath_info["scp_pixel"][0]
1024
+ scp_col = swath_info["scp_pixel"][1]
1025
+ row_ss = swath_info["row_ss"]
1026
+ col_ss = swath_info["col_ss"]
1027
+ for idx, burst_info in enumerate(burst_info_list):
1028
+ if mode_id[0] == "S":
1029
+ # STRIPMAP - all LUTs apply
1030
+ az_poly_order = min(4, len(range_line) - 1)
1031
+ coords_rg = (range_pixel[0] + first_row - scp_row) * row_ss
1032
+ coords_az = (range_line + first_col - scp_col) * col_ss
1033
+
1034
+ coords_az_2d, coords_rg_2d = np.meshgrid(coords_az, coords_rg)
1035
+
1036
+ noise_poly = utils.polyfit2d(
1037
+ coords_rg_2d.flatten(),
1038
+ coords_az_2d.flatten(),
1039
+ np.array(range_noise).flatten(),
1040
+ rg_poly_order,
1041
+ az_poly_order,
1042
+ )
1043
+ else:
1044
+ # TOPSAR has single LUT per burst
1045
+ # Treat range and azimuth polynomial components as weakly independent
1046
+ if idx >= len(range_pixel):
1047
+ raise ValueError(
1048
+ f"We have run out of noise information. Current index = {idx}, length of noise array = {len(range_pixel)}."
1049
+ )
1050
+ rp_array = range_pixel[idx]
1051
+ rn_array = range_noise[idx]
1052
+ coords_rg = (rp_array + first_row - scp_row) * row_ss
1053
+
1054
+ rg_poly = np.array(npp.polyfit(coords_rg, rn_array, rg_poly_order))
1055
+ az_poly = None
1056
+ if azimuth_noise is not None:
1057
+ line0 = lines_per_burst * idx
1058
+ coords_az = (azimuth_line[0] - line0 - scp_col) * col_ss
1059
+ valid_lines = (azimuth_line[0] >= line0) & (
1060
+ azimuth_line[0] < line0 + lines_per_burst
1061
+ )
1062
+ valid_count = np.sum(valid_lines)
1063
+ if valid_count > 1:
1064
+ az_poly_order = min(2, valid_count - 1)
1065
+ az_poly = np.array(
1066
+ npp.polyfit(
1067
+ coords_az[valid_lines],
1068
+ azimuth_noise[valid_lines],
1069
+ az_poly_order,
1070
+ )
1071
+ )
1072
+ if az_poly is not None:
1073
+ noise_poly = np.zeros((rg_poly.size, az_poly.size), dtype=np.float64)
1074
+ noise_poly[:, 0] += rg_poly
1075
+ noise_poly[0, :] += az_poly
1076
+ else:
1077
+ noise_poly = np.reshape(rg_poly, (-1, 1))
1078
+
1079
+ burst_info["radiometric"]["noise_level_type"] = "ABSOLUTE"
1080
+ burst_info["radiometric"]["noise_poly_coefs"] = noise_poly
1081
+
1082
+ return
1083
+
1084
+
1085
+ def _complete_filename(swath_info, burst_info, filename_template):
1086
+ core_name = burst_info["core_name"]
1087
+ burst = core_name[-2:]
1088
+ swath = swath_info["parameters"]["SWATH"]
1089
+ polarization = swath_info["tx_rcv_polarization_proc"].replace(":", "")
1090
+ formatted_name = filename_template.name.format(
1091
+ swath=swath, burst=burst, pol=polarization
1092
+ )
1093
+ final_filename = filename_template.with_name(formatted_name)
1094
+
1095
+ return final_filename
1096
+
1097
+
1098
+ def _create_sicd_xml(base_info, swath_info, burst_info, classification):
1099
+ em = lxml.builder.ElementMaker(namespace=NSMAP["sicd"], nsmap={None: NSMAP["sicd"]})
1100
+
1101
+ # Collection Info
1102
+ collection_info_node = em.CollectionInfo(
1103
+ em.CollectorName(swath_info["collector_name"]),
1104
+ em.CoreName(burst_info["core_name"]),
1105
+ em.CollectType(base_info["collect_type"]),
1106
+ em.RadarMode(
1107
+ em.ModeType(base_info["mode_type"]),
1108
+ em.ModeID(swath_info["mode_id"]),
1109
+ ),
1110
+ em.Classification(classification),
1111
+ em.Parameter({"name": "SLICE"}, swath_info["parameters"]["SLICE"]),
1112
+ em.Parameter({"name": "BURST"}, burst_info["parameters"]["BURST"]),
1113
+ em.Parameter({"name": "SWATH"}, swath_info["parameters"]["SWATH"]),
1114
+ em.Parameter(
1115
+ {"name": "ORBIT_SOURCE"}, swath_info["parameters"]["ORBIT_SOURCE"]
1116
+ ),
1117
+ )
1118
+
1119
+ # Image Creation
1120
+ image_creation_node = em.ImageCreation(
1121
+ em.Application(base_info["creation_application"]),
1122
+ em.DateTime(base_info["creation_date_time"].strftime("%Y-%m-%dT%H:%M:%SZ")),
1123
+ em.Site(base_info["creation_site"]),
1124
+ em.Profile(f"sarkit-convert {__version__}"),
1125
+ )
1126
+
1127
+ # Image Data
1128
+ image_data_node = em.ImageData(
1129
+ em.PixelType(swath_info["pixel_type"]),
1130
+ em.NumRows(str(swath_info["num_rows"])),
1131
+ em.NumCols(str(swath_info["num_cols"])),
1132
+ em.FirstRow(str(swath_info["first_row"])),
1133
+ em.FirstCol(str(swath_info["first_col"])),
1134
+ em.FullImage(
1135
+ em.NumRows(str(swath_info["num_rows"])),
1136
+ em.NumCols(str(swath_info["num_cols"])),
1137
+ ),
1138
+ em.SCPPixel(
1139
+ em.Row(str(swath_info["scp_pixel"][0])),
1140
+ em.Col(str(swath_info["scp_pixel"][1])),
1141
+ ),
1142
+ em.ValidData(
1143
+ {"size": "4"},
1144
+ em.Vertex(
1145
+ {"index": "1"},
1146
+ em.Row(str(burst_info["valid_data"][0][0])),
1147
+ em.Col(str(burst_info["valid_data"][0][1])),
1148
+ ),
1149
+ em.Vertex(
1150
+ {"index": "2"},
1151
+ em.Row(str(burst_info["valid_data"][1][0])),
1152
+ em.Col(str(burst_info["valid_data"][1][1])),
1153
+ ),
1154
+ em.Vertex(
1155
+ {"index": "3"},
1156
+ em.Row(str(burst_info["valid_data"][2][0])),
1157
+ em.Col(str(burst_info["valid_data"][2][1])),
1158
+ ),
1159
+ em.Vertex(
1160
+ {"index": "4"},
1161
+ em.Row(str(burst_info["valid_data"][3][0])),
1162
+ em.Col(str(burst_info["valid_data"][3][1])),
1163
+ ),
1164
+ ),
1165
+ )
1166
+
1167
+ def _make_xyz(arr):
1168
+ return [em.X(str(arr[0])), em.Y(str(arr[1])), em.Z(str(arr[2]))]
1169
+
1170
+ def __make_llh(arr):
1171
+ return [em.Lat(str(arr[0])), em.Lon(str(arr[1])), em.HAE(str(arr[2]))]
1172
+
1173
+ # Geo Data
1174
+ geo_data_node = em.GeoData(
1175
+ em.EarthModel("WGS_84"),
1176
+ em.SCP(
1177
+ em.ECF(*_make_xyz(burst_info["scp_ecf"])),
1178
+ em.LLH(*__make_llh(burst_info["scp_llh"])),
1179
+ ),
1180
+ em.ImageCorners(),
1181
+ em.ValidData(),
1182
+ )
1183
+
1184
+ # Grid
1185
+ grid_node = em.Grid(
1186
+ em.ImagePlane(swath_info["image_plane"]),
1187
+ em.Type(swath_info["grid_type"]),
1188
+ em.TimeCOAPoly(),
1189
+ em.Row(
1190
+ em.UVectECF(*_make_xyz(burst_info["row_uvect_ecf"])),
1191
+ em.SS(str(swath_info["row_ss"])),
1192
+ em.ImpRespWid(str(swath_info["row_imp_res_wid"])),
1193
+ em.Sgn(str(swath_info["row_sgn"])),
1194
+ em.ImpRespBW(str(swath_info["row_imp_res_bw"])),
1195
+ em.KCtr(str(swath_info["row_kctr"])),
1196
+ em.DeltaK1(str(burst_info["row_delta_k1"])),
1197
+ em.DeltaK2(str(burst_info["row_delta_k2"])),
1198
+ em.DeltaKCOAPoly(),
1199
+ em.WgtType(
1200
+ em.WindowName(
1201
+ str(swath_info["row_window_name"]),
1202
+ ),
1203
+ *(
1204
+ [
1205
+ em.Parameter(
1206
+ {"name": "COEFFICIENT"},
1207
+ str(swath_info["row_params"]),
1208
+ )
1209
+ ]
1210
+ if swath_info["row_params"] is not None
1211
+ else []
1212
+ ),
1213
+ ),
1214
+ ),
1215
+ em.Col(
1216
+ em.UVectECF(*_make_xyz(burst_info["col_uvect_ecf"])),
1217
+ em.SS(str(swath_info["col_ss"])),
1218
+ em.ImpRespWid(str(swath_info["col_imp_res_wid"])),
1219
+ em.Sgn(str(swath_info["col_sgn"])),
1220
+ em.ImpRespBW(str(swath_info["col_imp_res_bw"])),
1221
+ em.KCtr(str(swath_info["col_kctr"])),
1222
+ em.DeltaK1(str(burst_info["col_delta_k1"])),
1223
+ em.DeltaK2(str(burst_info["col_delta_k2"])),
1224
+ em.DeltaKCOAPoly(),
1225
+ em.WgtType(
1226
+ em.WindowName(
1227
+ str(swath_info["col_window_name"]),
1228
+ ),
1229
+ *(
1230
+ [
1231
+ em.Parameter(
1232
+ {"name": "COEFFICIENT"},
1233
+ str(swath_info["col_params"]),
1234
+ )
1235
+ ]
1236
+ if swath_info["col_params"] is not None
1237
+ else []
1238
+ ),
1239
+ ),
1240
+ ),
1241
+ )
1242
+ sksicd.Poly2dType().set_elem(
1243
+ grid_node.find("./{*}TimeCOAPoly"), burst_info["time_coa_poly_coefs"]
1244
+ )
1245
+ sksicd.Poly2dType().set_elem(
1246
+ grid_node.find("./{*}Row/{*}DeltaKCOAPoly"), swath_info["row_deltak_coa_poly"]
1247
+ )
1248
+ sksicd.Poly2dType().set_elem(
1249
+ grid_node.find("./{*}Col/{*}DeltaKCOAPoly"), burst_info["col_deltak_coa_poly"]
1250
+ )
1251
+ wgtfunc = em.WgtFunct()
1252
+ sksicd.TRANSCODERS["Grid/Row/WgtFunct"].set_elem(wgtfunc, swath_info["row_wgts"])
1253
+ grid_node.find("./{*}Row").append(wgtfunc)
1254
+ wgtfunc = em.WgtFunct()
1255
+ sksicd.TRANSCODERS["Grid/Col/WgtFunct"].set_elem(wgtfunc, swath_info["col_wgts"])
1256
+ grid_node.find("./{*}Col").append(wgtfunc)
1257
+
1258
+ # Timeline
1259
+ timeline_node = em.Timeline(
1260
+ em.CollectStart(burst_info["collect_start"].strftime("%Y-%m-%dT%H:%M:%S.%fZ")),
1261
+ em.CollectDuration(str(burst_info["collect_duration"])),
1262
+ em.IPP(
1263
+ {"size": "1"},
1264
+ em.Set(
1265
+ {"index": "1"},
1266
+ em.TStart(str(swath_info["t_start"])),
1267
+ em.TEnd(str(burst_info["ipp_set_tend"])),
1268
+ em.IPPStart(str(swath_info["ipp_start"])),
1269
+ em.IPPEnd(str(burst_info["ipp_set_ippend"])),
1270
+ em.IPPPoly(),
1271
+ ),
1272
+ ),
1273
+ )
1274
+ sksicd.PolyType().set_elem(
1275
+ timeline_node.find("./{*}IPP/{*}Set/{*}IPPPoly"), swath_info["ipp_poly"]
1276
+ )
1277
+
1278
+ # Position
1279
+ position_node = em.Position(em.ARPPoly())
1280
+ sksicd.XyzPolyType().set_elem(
1281
+ position_node.find("./{*}ARPPoly"), burst_info["arp_poly_coefs"]
1282
+ )
1283
+
1284
+ # Radar Collection
1285
+ radar_collection_node = em.RadarCollection(
1286
+ em.TxFrequency(
1287
+ em.Min(str(swath_info["tx_freq"][0])),
1288
+ em.Max(str(swath_info["tx_freq"][1])),
1289
+ ),
1290
+ em.Waveform(
1291
+ {"size": f"{len(swath_info['rcv_window_length'])}"},
1292
+ *[
1293
+ em.WFParameters(
1294
+ {"index": str(i)},
1295
+ em.TxPulseLength(str(swath_info["tx_pulse_length"])),
1296
+ em.TxRFBandwidth(str(swath_info["tx_rf_bw"])),
1297
+ em.TxFreqStart(str(swath_info["tx_freq_start"])),
1298
+ em.TxFMRate(str(swath_info["tx_fm_rate"])),
1299
+ em.RcvWindowLength(str(swl)),
1300
+ em.ADCSampleRate(str(swath_info["adc_sample_rate"])),
1301
+ )
1302
+ for i, swl in enumerate(swath_info["rcv_window_length"], start=1)
1303
+ ],
1304
+ ),
1305
+ em.TxPolarization(swath_info["tx_polarization"]),
1306
+ em.RcvChannels(
1307
+ {"size": f"{len(base_info['tx_rcv_polarization'])}"},
1308
+ *[
1309
+ em.ChanParameters(
1310
+ {"index": str(i)},
1311
+ em.TxRcvPolarization(entry),
1312
+ )
1313
+ for i, entry in enumerate(base_info["tx_rcv_polarization"], start=1)
1314
+ ],
1315
+ ),
1316
+ )
1317
+
1318
+ chan_indices = None
1319
+ for i, pol in enumerate(base_info["tx_rcv_polarization"], start=1):
1320
+ if pol == swath_info["tx_rcv_polarization_proc"]:
1321
+ chan_indices = str(i)
1322
+
1323
+ # Image Formation
1324
+ image_formation_node = em.ImageFormation(
1325
+ em.RcvChanProc(
1326
+ em.NumChanProc("1"),
1327
+ em.PRFScaleFactor("1"),
1328
+ em.ChanIndex(chan_indices),
1329
+ ),
1330
+ em.TxRcvPolarizationProc(swath_info["tx_rcv_polarization_proc"]),
1331
+ em.TStartProc(str(swath_info["t_start_proc"])),
1332
+ em.TEndProc(str(burst_info["tend_proc"])),
1333
+ em.TxFrequencyProc(
1334
+ em.MinProc(str(swath_info["tx_freq_proc"][0])),
1335
+ em.MaxProc(str(swath_info["tx_freq_proc"][1])),
1336
+ ),
1337
+ em.ImageFormAlgo(swath_info["image_form_algo"]),
1338
+ em.STBeamComp(swath_info["st_beam_comp"]),
1339
+ em.ImageBeamComp(swath_info["image_beam_comp"]),
1340
+ em.AzAutofocus(swath_info["az_autofocus"]),
1341
+ em.RgAutofocus(swath_info["rg_autofocus"]),
1342
+ )
1343
+
1344
+ # RMA
1345
+ rma_node = em.RMA(
1346
+ em.RMAlgoType(swath_info["rm_algo_type"]),
1347
+ em.ImageType(swath_info["image_type"]),
1348
+ em.INCA(
1349
+ em.TimeCAPoly(),
1350
+ em.R_CA_SCP(str(swath_info["r_ca_scp"])),
1351
+ em.FreqZero(str(swath_info["freq_zero"])),
1352
+ em.DRateSFPoly(),
1353
+ em.DopCentroidPoly(),
1354
+ em.DopCentroidCOA(swath_info["dop_centroid_coa"]),
1355
+ ),
1356
+ )
1357
+ sksicd.PolyType().set_elem(
1358
+ rma_node.find("./{*}INCA/{*}TimeCAPoly"), burst_info["time_ca_poly_coefs"]
1359
+ )
1360
+ sksicd.Poly2dType().set_elem(
1361
+ rma_node.find("./{*}INCA/{*}DRateSFPoly"), burst_info["drsf_poly_coefs"]
1362
+ )
1363
+ sksicd.Poly2dType().set_elem(
1364
+ rma_node.find("./{*}INCA/{*}DopCentroidPoly"),
1365
+ burst_info["doppler_centroid_poly_coefs"],
1366
+ )
1367
+
1368
+ sicd_xml_obj = em.SICD(
1369
+ collection_info_node,
1370
+ image_creation_node,
1371
+ image_data_node,
1372
+ geo_data_node,
1373
+ grid_node,
1374
+ timeline_node,
1375
+ position_node,
1376
+ radar_collection_node,
1377
+ image_formation_node,
1378
+ rma_node,
1379
+ )
1380
+ breakpoint()
1381
+ # Add Radiometric after Sentinel baseline processing calibration update on 25 Nov 2015.
1382
+ if "radiometric" in burst_info:
1383
+ # Radiometric
1384
+ radiometric_node = em.Radiometric(
1385
+ em.NoiseLevel(
1386
+ em.NoiseLevelType(burst_info["radiometric"]["noise_level_type"]),
1387
+ em.NoisePoly(),
1388
+ ),
1389
+ em.RCSSFPoly(),
1390
+ em.SigmaZeroSFPoly(),
1391
+ em.BetaZeroSFPoly(),
1392
+ em.GammaZeroSFPoly(),
1393
+ )
1394
+ sksicd.Poly2dType().set_elem(
1395
+ radiometric_node.find("./{*}NoiseLevel/{*}NoisePoly"),
1396
+ burst_info["radiometric"]["noise_poly_coefs"],
1397
+ )
1398
+ sksicd.Poly2dType().set_elem(
1399
+ radiometric_node.find("./{*}RCSSFPoly"),
1400
+ burst_info["radiometric"]["rcs_sf_poly_coefs"],
1401
+ )
1402
+ sksicd.Poly2dType().set_elem(
1403
+ radiometric_node.find("./{*}SigmaZeroSFPoly"),
1404
+ burst_info["radiometric"]["sigma_zero_poly_coefs"],
1405
+ )
1406
+ sksicd.Poly2dType().set_elem(
1407
+ radiometric_node.find("./{*}BetaZeroSFPoly"),
1408
+ burst_info["radiometric"]["beta_zero_poly_coefs"],
1409
+ )
1410
+ sksicd.Poly2dType().set_elem(
1411
+ radiometric_node.find("./{*}GammaZeroSFPoly"),
1412
+ burst_info["radiometric"]["gamma_zero_poly_coefs"],
1413
+ )
1414
+
1415
+ sicd_xml_obj.find("{*}RMA").addprevious(radiometric_node)
1416
+
1417
+ return sicd_xml_obj
1418
+
1419
+
1420
+ def _update_geo_data(xml_helper):
1421
+ # Update ImageCorners
1422
+ num_rows = xml_helper.load("./{*}ImageData/{*}NumRows")
1423
+ num_cols = xml_helper.load("./{*}ImageData/{*}NumCols")
1424
+ row_ss = xml_helper.load("./{*}Grid/{*}Row/{*}SS")
1425
+ col_ss = xml_helper.load("./{*}Grid/{*}Col/{*}SS")
1426
+ scp_pixel = xml_helper.load("./{*}ImageData/{*}SCPPixel")
1427
+ scp_ecf = xml_helper.load("./{*}GeoData/{*}SCP/{*}ECF")
1428
+ image_grid_locations = (
1429
+ np.array(
1430
+ [
1431
+ [0, 0],
1432
+ [0, num_cols - 1],
1433
+ [num_rows - 1, num_cols - 1],
1434
+ [num_rows - 1, 0],
1435
+ ]
1436
+ )
1437
+ - scp_pixel
1438
+ ) * [row_ss, col_ss]
1439
+
1440
+ icp_ecef, _, _ = sksicd.image_to_ground_plane(
1441
+ xml_helper.element_tree,
1442
+ image_grid_locations,
1443
+ scp_ecf,
1444
+ sarkit.wgs84.up(sarkit.wgs84.cartesian_to_geodetic(scp_ecf)),
1445
+ )
1446
+ icp_llh = sarkit.wgs84.cartesian_to_geodetic(icp_ecef)
1447
+ xml_helper.set("./{*}GeoData/{*}ImageCorners", icp_llh[:, :2])
1448
+ xml_helper.set("./{*}GeoData/{*}ValidData", icp_llh[:, :2])
1449
+
1450
+
1451
+ def _update_rniirs_info(xml_helper):
1452
+ em = lxml.builder.ElementMaker(namespace=NSMAP["sicd"], nsmap={None: NSMAP["sicd"]})
1453
+ info_density, predicted_rniirs = utils.get_rniirs_estimate(xml_helper)
1454
+ collection_info_node = xml_helper.element_tree.find("./{*}CollectionInfo")
1455
+
1456
+ param_node = em.Parameter({"name": "INFORMATION_DENSITY"}, f"{info_density:0.2G}")
1457
+ collection_info_node.append(param_node)
1458
+ param_node = em.Parameter({"name": "PREDICTED_RNIIRS"}, f"{predicted_rniirs:0.1f}")
1459
+ collection_info_node.append(param_node)
1460
+
1461
+ return
1462
+
1463
+
1464
+ def main(args=None):
1465
+ """CLI for converting Sentinel SAFE to SICD"""
1466
+ parser = argparse.ArgumentParser(
1467
+ description="Converts a Sentinel-1 SAFE folder into SICD.",
1468
+ )
1469
+ parser.add_argument(
1470
+ "safe_product_folder",
1471
+ type=pathlib.Path,
1472
+ help="path of the input SAFE product folder",
1473
+ )
1474
+ parser.add_argument(
1475
+ "classification",
1476
+ type=str,
1477
+ help="content of the /SICD/CollectionInfo/Classification node in the SICD XML",
1478
+ )
1479
+ parser.add_argument(
1480
+ "output_sicd_file",
1481
+ type=pathlib.Path,
1482
+ help="path of the output SICD file. The strings '{swath}', '{burst}', '{pol}' will be replaced as appropriate for multiple images",
1483
+ )
1484
+ parser.add_argument(
1485
+ "--ostaid",
1486
+ type=str,
1487
+ help="content of the originating station ID (OSTAID) field of the NITF header",
1488
+ default="Unknown",
1489
+ )
1490
+ config = parser.parse_args(args)
1491
+
1492
+ manifest_filename = config.safe_product_folder / "manifest.safe"
1493
+
1494
+ manifest_root = et.parse(manifest_filename).getroot()
1495
+ base_info = _collect_base_info(manifest_root)
1496
+ files = _get_file_sets(config.safe_product_folder, manifest_root)
1497
+
1498
+ used_filenames = set()
1499
+ for entry in files:
1500
+ product_root_node = et.parse(entry["product"]).getroot()
1501
+ swath_info = _collect_swath_info(product_root_node)
1502
+ burst_info_list = _collect_burst_info(product_root_node, swath_info)
1503
+ breakpoint()
1504
+ if base_info["creation_date_time"].date() >= np.datetime64("2015-11-25"):
1505
+ [burst_info.update({"radiometric": {}}) for burst_info in burst_info_list]
1506
+ _calc_radiometric_info(entry["calibration"], swath_info, burst_info_list)
1507
+ _calc_noise_level_info(entry["noise"], swath_info, burst_info_list)
1508
+
1509
+ # Grab the data and write the files
1510
+ with tifffile.TiffFile(entry["data"]) as tif:
1511
+ image = tif.asarray().T
1512
+ image_width = tif.pages[0].tags.values()[0].value
1513
+ begin_col = 0
1514
+ for burst_info in burst_info_list:
1515
+ sicd = _create_sicd_xml(
1516
+ base_info, swath_info, burst_info, config.classification.upper()
1517
+ )
1518
+ # Add SCPCOA node
1519
+ scp_coa = sksicd.compute_scp_coa(sicd.getroottree())
1520
+ sicd.find("./{*}ImageFormation").addnext(scp_coa)
1521
+ xml_helper = sksicd.XmlHelper(et.ElementTree(sicd))
1522
+ # Update ImageCorners and ValidData
1523
+ _update_geo_data(xml_helper)
1524
+
1525
+ # RNIIRS calcs require radiometric info
1526
+ if "radiometric" in burst_info:
1527
+ _update_rniirs_info(xml_helper)
1528
+
1529
+ # Check for XML consistency
1530
+ sicd_con = sarkit.verification.SicdConsistency(sicd)
1531
+ sicd_con.check()
1532
+ sicd_con.print_result(fail_detail=True)
1533
+
1534
+ end_col = begin_col + xml_helper.load("{*}ImageData/{*}NumCols")
1535
+ subset = (slice(0, image_width, 1), slice(begin_col, end_col, 1))
1536
+ begin_col = end_col
1537
+ image_subset = image[subset]
1538
+ pixel_type = swath_info["pixel_type"]
1539
+ view_dtype = sksicd.PIXEL_TYPES[pixel_type]["dtype"]
1540
+ complex_data = np.empty(image_subset.shape, dtype=view_dtype)
1541
+ complex_data["real"] = image_subset.real.astype(np.int16)
1542
+ complex_data["imag"] = image_subset.imag.astype(np.int16)
1543
+
1544
+ metadata = sksicd.NitfMetadata(
1545
+ xmltree=sicd.getroottree(),
1546
+ file_header_part={
1547
+ "ostaid": config.ostaid,
1548
+ "ftitle": xml_helper.load("{*}CollectionInfo/{*}CoreName"),
1549
+ "security": {
1550
+ "clas": config.classification[0].upper(),
1551
+ "clsy": "US",
1552
+ },
1553
+ },
1554
+ im_subheader_part={
1555
+ "iid2": xml_helper.load("{*}CollectionInfo/{*}CoreName"),
1556
+ "security": {
1557
+ "clas": config.classification[0].upper(),
1558
+ "clsy": "US",
1559
+ },
1560
+ "isorce": xml_helper.load("{*}CollectionInfo/{*}CollectorName"),
1561
+ },
1562
+ de_subheader_part={
1563
+ "security": {
1564
+ "clas": config.classification[0].upper(),
1565
+ "clsy": "US",
1566
+ },
1567
+ },
1568
+ )
1569
+
1570
+ output_filename = _complete_filename(
1571
+ swath_info,
1572
+ burst_info,
1573
+ config.output_sicd_file,
1574
+ )
1575
+
1576
+ if output_filename in used_filenames:
1577
+ raise ValueError(
1578
+ "Output filename does not include necessary swath, burst and/or polarization slug"
1579
+ )
1580
+
1581
+ used_filenames.add(output_filename)
1582
+
1583
+ with output_filename.open("wb") as f:
1584
+ with sksicd.NitfWriter(f, metadata) as writer:
1585
+ writer.write_image(complex_data)
1586
+
1587
+
1588
+ if __name__ == "__main__":
1589
+ main()