sarpyx 0.1.5__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. docs/examples/advanced/batch_processing.py +1 -1
  2. docs/examples/advanced/custom_processing_chains.py +1 -1
  3. docs/examples/advanced/performance_optimization.py +1 -1
  4. docs/examples/basic/snap_integration.py +1 -1
  5. docs/examples/intermediate/quality_assessment.py +1 -1
  6. outputs/baseline/20260205-234828/__init__.py +33 -0
  7. outputs/baseline/20260205-234828/main.py +493 -0
  8. outputs/final/20260205-234851/__init__.py +33 -0
  9. outputs/final/20260205-234851/main.py +493 -0
  10. sarpyx/__init__.py +2 -2
  11. sarpyx/algorithms/__init__.py +2 -2
  12. sarpyx/cli/__init__.py +1 -1
  13. sarpyx/cli/focus.py +3 -5
  14. sarpyx/cli/main.py +106 -7
  15. sarpyx/cli/shipdet.py +1 -1
  16. sarpyx/cli/worldsar.py +549 -0
  17. sarpyx/processor/__init__.py +1 -1
  18. sarpyx/processor/core/decode.py +43 -8
  19. sarpyx/processor/core/focus.py +104 -57
  20. sarpyx/science/__init__.py +1 -1
  21. sarpyx/sla/__init__.py +8 -0
  22. sarpyx/sla/metrics.py +101 -0
  23. sarpyx/{snap → snapflow}/__init__.py +1 -1
  24. sarpyx/snapflow/engine.py +6165 -0
  25. sarpyx/{snap → snapflow}/op.py +0 -1
  26. sarpyx/utils/__init__.py +1 -1
  27. sarpyx/utils/geos.py +652 -0
  28. sarpyx/utils/grid.py +285 -0
  29. sarpyx/utils/io.py +77 -9
  30. sarpyx/utils/meta.py +55 -0
  31. sarpyx/utils/nisar_utils.py +652 -0
  32. sarpyx/utils/rfigen.py +108 -0
  33. sarpyx/utils/wkt_utils.py +109 -0
  34. sarpyx/utils/zarr_utils.py +55 -37
  35. {sarpyx-0.1.5.dist-info → sarpyx-0.1.6.dist-info}/METADATA +9 -5
  36. {sarpyx-0.1.5.dist-info → sarpyx-0.1.6.dist-info}/RECORD +41 -32
  37. {sarpyx-0.1.5.dist-info → sarpyx-0.1.6.dist-info}/WHEEL +1 -1
  38. sarpyx-0.1.6.dist-info/licenses/LICENSE +201 -0
  39. sarpyx-0.1.6.dist-info/top_level.txt +4 -0
  40. tests/test_zarr_compat.py +35 -0
  41. sarpyx/processor/core/decode_v0.py +0 -0
  42. sarpyx/processor/core/decode_v1.py +0 -849
  43. sarpyx/processor/core/focus_old.py +0 -1550
  44. sarpyx/processor/core/focus_v1.py +0 -1566
  45. sarpyx/processor/core/focus_v2.py +0 -1625
  46. sarpyx/snap/engine.py +0 -633
  47. sarpyx-0.1.5.dist-info/top_level.txt +0 -2
  48. {sarpyx-0.1.5.dist-info → sarpyx-0.1.6.dist-info}/entry_points.txt +0 -0
sarpyx/snap/engine.py DELETED
@@ -1,633 +0,0 @@
1
- import os
2
- import subprocess
3
- import warnings
4
- from pathlib import Path
5
- import urllib.request
6
- import zipfile
7
-
8
- import pandas as pd
9
-
10
- from ..utils.io import delProd
11
-
12
- warnings.filterwarnings("ignore")
13
-
14
-
15
- class GPT:
16
- """A wrapper class for executing SNAP Graph Processing Tool (GPT) commands."""
17
-
18
- # Default GPT paths and parallelism for different OS
19
- GPT_PATHS = {
20
- 'Ubuntu': '/home/<username>/ESA-STEP/snap/bin/gpt',
21
- 'MacOS': '/Applications/snap/bin/gpt',
22
- 'Windows': 'gpt.exe'
23
- }
24
-
25
- DEFAULT_PARALLELISM = {
26
- 'Ubuntu': 8,
27
- 'MacOS': 8,
28
- 'Windows': 8
29
- }
30
-
31
- # Supported output formats for SNAP GPT processing
32
- OUTPUT_FORMATS = [
33
- 'PyRate export', # PyRate configuration format
34
- 'GeoTIFF+XML', # GeoTIFF with XML metadata
35
- 'JPEG2000', # JPEG2000 compressed format
36
- 'GDAL-BMP-WRITER', # Windows Bitmap format
37
- 'NetCDF4-CF', # NetCDF4 Climate and Forecast conventions
38
- 'PolSARPro', # Polarimetric SAR data analysis format
39
- 'Snaphu', # Statistical-cost network-flow algorithm format
40
- 'Generic Binary BSQ', # Band Sequential binary format
41
- 'CSV', # Comma-separated values format
42
- 'GDAL-GS7BG-WRITER', # Golden Software 7 Binary Grid format
43
- 'GDAL-GTiff-WRITER', # GDAL GeoTIFF writer
44
- 'GDAL-BT-WRITER', # VTP .bt terrain format
45
- 'GeoTIFF-BigTIFF', # BigTIFF format for large files
46
- 'GDAL-RMF-WRITER', # Raster Matrix Format
47
- 'GDAL-KRO-WRITER', # KOLOR Raw format
48
- 'GDAL-PNM-WRITER', # Portable Anymap format
49
- 'Gamma', # Gamma Remote Sensing format
50
- 'GDAL-MFF-WRITER', # Vexcel MFF format
51
- 'GeoTIFF', # Standard GeoTIFF format
52
- 'NetCDF4-BEAM', # NetCDF4 BEAM format
53
- 'GDAL-GTX-WRITER', # NOAA .gtx vertical datum shift format
54
- 'GDAL-RST-WRITER', # Idrisi Raster format
55
- 'GDAL-SGI-WRITER', # SGI Image format
56
- 'ZNAP', # SNAP compressed format
57
- 'GDAL-GSBG-WRITER', # Golden Software Binary Grid format
58
- 'ENVI', # ENVI header labeled raster format
59
- 'BEAM-DIMAP', # BEAM-DIMAP XML product format
60
- 'GDAL-HFA-WRITER', # Erdas Imagine format
61
- 'GDAL-COG-WRITER', # Cloud Optimized GeoTIFF format
62
- 'HDF5', # Hierarchical Data Format version 5
63
- 'GDAL-NITF-WRITER', # National Imagery Transmission Format
64
- 'GDAL-SAGA-WRITER', # SAGA GIS Binary format
65
- 'GDAL-ILWIS-WRITER', # ILWIS Raster Map format
66
- 'JP2,JPG,PNG,BMP,GIF', # Common image formats
67
- 'GDAL-PCIDSK-WRITER' # PCI PCIDSK Database File format
68
- ]
69
-
70
- def __init__(self, product: str | Path,
71
- outdir: str | Path,
72
- format: str = 'BEAM-DIMAP',
73
- gpt_path: str | None = "/usr/local/snap/bin/gpt",
74
- mode: str | None = None):
75
- """
76
- SNAP GPT processing engine.
77
-
78
- Args:
79
- product (str | Path): Path to the input SAR product file or directory.
80
- outdir (str | Path): Output directory where processed results will be saved.
81
- format (str, optional): Output format for processed data. Defaults to 'BEAM-DIMAP'.
82
- Supported formats include 'BEAM-DIMAP' and 'GEOTIFF'.
83
- gpt_path (str | None, optional): Path to the SNAP GPT executable.
84
- Defaults to "/usr/local/snap/bin/gpt".
85
- mode (str | None, optional): Processing mode configuration. Defaults to None.
86
- Attributes:
87
-
88
- prod_path (Path): Path object for the input product.
89
- name (str): Stem name of the input product file.
90
- format (str): Output format for processed data.
91
- outdir (Path): Path object for the output directory.
92
- mode (str | None): Processing mode configuration.
93
- gpt_executable: Path to the validated GPT executable.
94
- parallelism: Configured parallelism settings for processing.
95
- current_cmd (list): List to store current command components.
96
- """
97
-
98
- self.prod_path = Path(product)
99
- assert self.prod_path.exists(), f"Product path does not exist: {self.prod_path}"
100
- self.name = self.prod_path.stem
101
- assert format in self.OUTPUT_FORMATS, f"Unsupported format: {format}. Supported formats are: {self.OUTPUT_FORMATS}"
102
- self.format = format
103
- assert outdir, "Output directory must be specified"
104
- self.outdir = Path(outdir)
105
- self.mode = mode
106
- self.gpt_executable = self._get_gpt_executable(gpt_path)
107
- self.parallelism = self._get_parallelism()
108
- self.current_cmd = []
109
-
110
- def _get_gpt_executable(self, gpt_path: str | None = None) -> str:
111
- """Determines the correct GPT executable path."""
112
- if gpt_path:
113
- return gpt_path
114
-
115
- if self.mode and self.mode in self.GPT_PATHS:
116
- return self.GPT_PATHS[self.mode]
117
-
118
- # Auto-detect based on OS
119
- if os.name == 'posix':
120
- for path in [self.GPT_PATHS['MacOS'], self.GPT_PATHS['Ubuntu']]:
121
- if Path(path).exists():
122
- return path
123
- return 'gpt'
124
- elif os.name == 'nt':
125
- return self.GPT_PATHS['Windows']
126
- else:
127
- return 'gpt'
128
-
129
- def _get_parallelism(self) -> int:
130
- """Determines the parallelism level."""
131
- if self.mode and self.mode in self.DEFAULT_PARALLELISM:
132
- return self.DEFAULT_PARALLELISM[self.mode]
133
-
134
- # Auto-detect based on OS
135
- if os.name == 'posix':
136
- if Path(self.GPT_PATHS['MacOS']).exists():
137
- return self.DEFAULT_PARALLELISM['MacOS']
138
- return self.DEFAULT_PARALLELISM.get('Ubuntu', 6)
139
- elif os.name == 'nt':
140
- return self.DEFAULT_PARALLELISM['Windows']
141
- else:
142
- return 6
143
-
144
- def _reset_command(self):
145
- """Resets the command list for a new GPT operation."""
146
- self.current_cmd = [
147
- self.gpt_executable,
148
- f'-q {self.parallelism}',
149
- '-x',
150
- '-e',
151
- f'-Ssource={self.prod_path.as_posix()}'
152
- ]
153
-
154
- def _build_output_path(self, suffix: str) -> Path:
155
- """Builds the output path for a processing step."""
156
- base_name = self.outdir / f"{self.name}_{suffix}"
157
- if self.format == 'GEOTIFF':
158
- return base_name.with_suffix('.tif')
159
- else:
160
- return base_name.with_suffix('.dim')
161
-
162
- def _execute_command(self) -> bool:
163
- """Executes the currently built GPT command."""
164
- cmd_str = ' '.join(self.current_cmd)
165
- print(f"Executing GPT command: {cmd_str}")
166
-
167
- try:
168
- process = subprocess.run(
169
- cmd_str,
170
- shell=True,
171
- check=True,
172
- capture_output=True,
173
- text=True,
174
- timeout=3600
175
- )
176
-
177
- if process.stdout:
178
- print("GPT Output:", process.stdout)
179
- if process.stderr:
180
- print("GPT Warnings:", process.stderr)
181
-
182
- print("Command executed successfully!")
183
- return True
184
-
185
- except subprocess.TimeoutExpired:
186
- print("Error: GPT command timed out after 1 hour")
187
- return False
188
-
189
- except subprocess.CalledProcessError as e:
190
- print(f"Error executing GPT command: {cmd_str}")
191
- print(f"Return code: {e.returncode}")
192
- if e.stdout:
193
- print(f"Stdout: {e.stdout}")
194
- if e.stderr:
195
- print(f"Stderr: {e.stderr}")
196
- return False
197
-
198
- except FileNotFoundError:
199
- print(f"Error: GPT executable '{self.gpt_executable}' not found!")
200
- print("Ensure SNAP is installed and configured correctly.")
201
- return False
202
-
203
- except Exception as e:
204
- print(f"Unexpected error during GPT execution: {type(e).__name__}: {e}")
205
- return False
206
-
207
- def _call(self, suffix: str) -> str | None:
208
- """Finalizes and executes the GPT command."""
209
- output_path = self._build_output_path(suffix)
210
- self.current_cmd.extend([
211
- f'-t {output_path.as_posix()}',
212
- f'-f {self.format}'
213
- ])
214
-
215
- if self._execute_command():
216
- self.prod_path = output_path
217
- return output_path.as_posix()
218
- else:
219
- return None
220
-
221
- def ImportVector(self, vector_data: str | Path):
222
- """Imports vector data into the product."""
223
- vector_path = Path(vector_data)
224
-
225
- # Check if the shapefile exists
226
- if not vector_path.exists():
227
- print(f"Shapefile not found: {vector_path}")
228
- print("Downloading from Zenodo...")
229
-
230
- # Download and extract from Zenodo
231
- zenodo_url = "https://zenodo.org/api/records/6992586/files-archive"
232
- download_dir = Path.cwd() / "zenodo_download"
233
- archive_path = download_dir / "zenodo_archive.zip"
234
-
235
- try:
236
- # Download the archive
237
- urllib.request.urlretrieve(zenodo_url, archive_path)
238
- print(f"Downloaded archive to: {archive_path}")
239
-
240
- # Extract the archive
241
- with zipfile.ZipFile(archive_path, 'r') as zip_ref:
242
- zip_ref.extractall(download_dir)
243
- print(f"Extracted archive to: {download_dir}")
244
-
245
- # Find shapefile in extracted contents
246
- shp_files = list(download_dir.rglob("*.shp"))
247
- if shp_files:
248
- vector_path = shp_files[0] # Use the first shapefile found
249
- print(f"Using shapefile: {vector_path}")
250
- else:
251
- raise FileNotFoundError("No shapefile found in downloaded archive")
252
-
253
- # Clean up the archive
254
- archive_path.unlink()
255
-
256
- except Exception as e:
257
- print(f"Error downloading or extracting shapefile: {e}")
258
- return None
259
-
260
- self._reset_command()
261
- self.current_cmd.append(f'Import-Vector -PseparateShapes=false -PvectorFile={vector_path.as_posix()}')
262
- return self._call(suffix='SHP')
263
-
264
- def LandMask(self,
265
- shoreline_extension: int = 300,
266
- geometry_name: str = "Buff_750",
267
- use_srtm: bool = True,
268
- invert_geometry: bool = True,
269
- land_mask: bool = False):
270
- """Applies Land-Sea Masking using a predefined XML graph structure."""
271
-
272
- self._reset_command()
273
- suffix = 'LM'
274
- output_path = self._build_output_path(suffix)
275
- xml_path = self.outdir / f"{self.name}_landmask_graph.xml"
276
-
277
- # Determine product type if not already set
278
- if not hasattr(self, 'prod_type'):
279
- try:
280
- self.prod_type = mode_identifier(self.prod_path.name)
281
- print(f"Inferred product type: {self.prod_type}")
282
- except Exception as e:
283
- print(f"Warning: Could not determine product type: {e}")
284
- self.prod_type = None
285
-
286
- # Determine source band based on product type
287
- if self.prod_type == "COSMO-SkyMed":
288
- source_band = 'Intensity_null'
289
- elif self.prod_type == "Sentinel-1":
290
- source_band = 'Intensity_VH'
291
- else:
292
- print(f"Warning: Product type is '{self.prod_type}'. Using default source band 'Intensity_VH'.")
293
- source_band = 'Intensity_VH'
294
-
295
- # XML Graph Template
296
- graph_xml = f"""<graph id="Graph">
297
- <version>1.0</version>
298
- <node id="Read">
299
- <operator>Read</operator>
300
- <sources/>
301
- <parameters class="com.bc.ceres.binding.dom.XppDomElement">
302
- <file>{self.prod_path.as_posix()}</file>
303
- </parameters>
304
- </node>
305
- <node id="Land-Sea-Mask">
306
- <operator>Land-Sea-Mask</operator>
307
- <sources>
308
- <sourceProduct refid="Read"/>
309
- </sources>
310
- <parameters class="com.bc.ceres.binding.dom.XppDomElement">
311
- <sourceBands>{source_band}</sourceBands>
312
- <landMask>{str(land_mask).lower()}</landMask>
313
- <useSRTM>{str(use_srtm).lower()}</useSRTM>
314
- <geometry>{geometry_name}</geometry>
315
- <invertGeometry>{str(invert_geometry).lower()}</invertGeometry>
316
- <shorelineExtension>{shoreline_extension}</shorelineExtension>
317
- </parameters>
318
- </node>
319
- <node id="Write">
320
- <operator>Write</operator>
321
- <sources>
322
- <sourceProduct refid="Land-Sea-Mask"/>
323
- </sources>
324
- <parameters class="com.bc.ceres.binding.dom.XppDomElement">
325
- <file>{output_path.as_posix()}</file>
326
- <formatName>{self.format}</formatName>
327
- </parameters>
328
- </node>
329
- </graph>"""
330
-
331
- try:
332
- with open(xml_path, "w", encoding="utf-8") as f:
333
- f.write(graph_xml)
334
-
335
- self.current_cmd = [self.gpt_executable, xml_path.as_posix()]
336
-
337
- if self._execute_command():
338
- self.prod_path = output_path
339
- os.remove(xml_path)
340
- return output_path.as_posix()
341
- else:
342
- return None
343
-
344
- except Exception as e:
345
- print(f"Error generating LandMask XML graph: {e}")
346
- if xml_path.exists():
347
- os.remove(xml_path)
348
- return None
349
-
350
- def Calibration(self, Pols: list[str] = ['VV'], output_complex: bool = True):
351
- """Applies radiometric calibration."""
352
- self._reset_command()
353
- pol_str = ','.join(Pols)
354
- self.current_cmd.append(f'Calibration -PoutputImageInComplex={str(output_complex).lower()} -PselectedPolarisations={pol_str}')
355
- return self._call(suffix='CAL')
356
-
357
- def Deburst(self, Pols: list[str] = ['VH']):
358
- """Applies TOPSAR Debursting."""
359
- self._reset_command()
360
- pol_str = ','.join(Pols)
361
- self.current_cmd.append(f'TOPSAR-Deburst -PselectedPolarisations={pol_str}')
362
- return self._call(suffix='DEB')
363
-
364
- def Multilook(self, nRgLooks: int, nAzLooks: int):
365
- """Applies Multilooking."""
366
- self._reset_command()
367
- self.current_cmd.append(f'Multilook -PnRgLooks={nRgLooks} -PnAzLooks={nAzLooks}')
368
- return self._call(suffix='ML')
369
-
370
- def AdaptiveThresholding(self, background_window_m: float = 800, guard_window_m: float = 500,
371
- target_window_m: float = 50, pfa: float = 6.5):
372
- """Applies Adaptive Thresholding for object detection."""
373
- self._reset_command()
374
- self.current_cmd.append(f'AdaptiveThresholding -PbackgroundWindowSizeInMeter={background_window_m} -PguardWindowSizeInMeter={guard_window_m} -Ppfa={pfa} -PtargetWindowSizeInMeter={target_window_m}')
375
- return self._call(suffix='AT')
376
-
377
- def ObjectDiscrimination(self, min_target_m: float, max_target_m: float):
378
- """Discriminates objects based on size."""
379
- self._reset_command()
380
- self.current_cmd.append(f'Object-Discrimination -PminTargetSizeInMeter={min_target_m} -PmaxTargetSizeInMeter={max_target_m}')
381
- return self._call(suffix='OD')
382
-
383
- def Subset(self, loc: list[float], sourceBands: list[str], idx: str, winSize: int = 128,
384
- GeoCoords: bool = False, copy_metadata: bool = True):
385
- """Creates a subset of the product."""
386
- original_format = self.format
387
- self.format = 'GeoTIFF'
388
- self._reset_command()
389
-
390
- source_bands_str = ','.join(sourceBands)
391
-
392
- if not GeoCoords:
393
- x = int(loc[0]) - winSize // 2
394
- y = int(loc[1]) - winSize // 2
395
- region = f'{x},{y},{winSize},{winSize}'
396
- self.current_cmd.append(f'Subset -PcopyMetadata={str(copy_metadata).lower()} -Pregion={region} -PsourceBands={source_bands_str}')
397
- else:
398
- lon, lat = loc[0], loc[1]
399
- half_size_deg = winSize * 0.0001
400
- min_lon, max_lon = lon - half_size_deg, lon + half_size_deg
401
- min_lat, max_lat = lat - half_size_deg, lat + half_size_deg
402
- wkt_roi = f'POLYGON(({min_lon} {min_lat}, {max_lon} {min_lat}, {max_lon} {max_lat}, {min_lon} {max_lat}, {min_lon} {min_lat}))'
403
- self.current_cmd.append(f"Subset -PcopyMetadata={str(copy_metadata).lower()} -PgeoRegion='{wkt_roi}' -PsourceBands={source_bands_str}")
404
-
405
- result = self._call(suffix=f'SUB{idx}')
406
- self.format = original_format
407
- return result
408
-
409
- def AatsrSST(self, dual: bool = True, dual_coefficients_file: str = 'AVERAGE_POLAR_DUAL_VIEW',
410
- dual_mask_expression: str = '!cloud_flags_nadir.LAND and !cloud_flags_nadir.CLOUDY and !cloud_flags_nadir.SUN_GLINT and !cloud_flags_fward.LAND and !cloud_flags_fward.CLOUDY and !cloud_flags_fward.SUN_GLINT',
411
- invalid_sst_value: float = -999.0, nadir: bool = True,
412
- nadir_coefficients_file: str = 'AVERAGE_POLAR_SINGLE_VIEW',
413
- nadir_mask_expression: str = '!cloud_flags_nadir.LAND and !cloud_flags_nadir.CLOUDY and !cloud_flags_nadir.SUN_GLINT'):
414
- """
415
- Computes sea surface temperature (SST) from (A)ATSR products.
416
- This method processes ATSR (Along Track Scanning Radiometer) data to derive
417
- sea surface temperature using both dual-view and nadir-view algorithms.
418
- Args:
419
- dual (bool, optional): Enable dual-view SST processing. Defaults to True.
420
- dual_coefficients_file (str, optional): Coefficients file for dual-view processing.
421
- Defaults to 'AVERAGE_POLAR_DUAL_VIEW'.
422
- dual_mask_expression (str, optional): Mask expression for dual-view processing
423
- to exclude land, clouds, and sun glint pixels. Defaults to expression
424
- excluding these conditions for both nadir and forward views.
425
- invalid_sst_value (float, optional): Value assigned to invalid SST pixels.
426
- Defaults to -999.0.
427
- nadir (bool, optional): Enable nadir-view SST processing. Defaults to True.
428
- nadir_coefficients_file (str, optional): Coefficients file for nadir-view processing.
429
- Defaults to 'AVERAGE_POLAR_SINGLE_VIEW'.
430
- nadir_mask_expression (str, optional): Mask expression for nadir-view processing
431
- to exclude land, clouds, and sun glint pixels. Defaults to expression
432
- excluding these conditions for nadir view only.
433
- Returns:
434
- The result of the SST computation operation.
435
- Note:
436
- The method builds command parameters for the SNAP Aatsr.SST operator and
437
- executes the processing chain with a 'SST' suffix.
438
- """
439
- self._reset_command()
440
-
441
- cmd_params = []
442
- cmd_params.append(f'-Pdual={str(dual).lower()}')
443
- cmd_params.append(f'-PdualCoefficientsFile={dual_coefficients_file}')
444
- cmd_params.append(f'-PdualMaskExpression="{dual_mask_expression}"')
445
- cmd_params.append(f'-PinvalidSstValue={invalid_sst_value}')
446
- cmd_params.append(f'-Pnadir={str(nadir).lower()}')
447
- cmd_params.append(f'-PnadirCoefficientsFile={nadir_coefficients_file}')
448
- cmd_params.append(f'-PnadirMaskExpression="{nadir_mask_expression}"')
449
-
450
- self.current_cmd.append(f'Aatsr.SST {" ".join(cmd_params)}')
451
- return self._call(suffix='SST')
452
-
453
-
454
- def _process_product_cfar(product_path: Path, mask_shp_path: Path, gpt_mode: str | None,
455
- delete_intermediate: bool, pfa_thresholds: list[float]):
456
- """Helper function to process a single product through the CFAR chain."""
457
- out_dir = product_path.parent
458
- try:
459
- prod_type = mode_identifier(product_path.name)
460
- except Exception as e:
461
- print(f"Error determining product type for {product_path}: {e}")
462
- return None, None
463
-
464
- op = GPT(product=product_path.as_posix(), outdir=out_dir.as_posix(), mode=gpt_mode)
465
- op.prod_type = prod_type
466
-
467
- processed_products = []
468
- prod_start_cfar = product_path.as_posix()
469
-
470
- # Process based on product type
471
- if prod_type == "Sentinel-1":
472
- prod_deb = op.Deburst()
473
- if not prod_deb:
474
- return None, None
475
- processed_products.append(Path(prod_deb))
476
-
477
- prod_cal = op.Calibration(Pols=['VH'])
478
- if not prod_cal:
479
- return None, None
480
- processed_products.append(Path(prod_cal))
481
-
482
- prod_shp = op.ImportVector(vector_data=mask_shp_path)
483
- if not prod_shp:
484
- return None, None
485
- processed_products.append(Path(prod_shp))
486
-
487
- prod_lm = op.LandMask()
488
- if not prod_lm:
489
- return None, None
490
- processed_products.append(Path(prod_lm))
491
- prod_start_cfar = prod_lm
492
-
493
- if delete_intermediate:
494
- delProd(processed_products[1])
495
- delProd(processed_products[2])
496
-
497
- elif prod_type == "COSMO-SkyMed":
498
- prod_ml = op.Multilook(nRgLooks=2, nAzLooks=2)
499
- if not prod_ml:
500
- return None, None
501
- processed_products.append(Path(prod_ml))
502
-
503
- prod_cal = op.Calibration(Pols=['HH'])
504
- if not prod_cal:
505
- return None, None
506
- processed_products.append(Path(prod_cal))
507
-
508
- prod_shp = op.ImportVector(vector_data=mask_shp_path)
509
- if not prod_shp:
510
- return None, None
511
- processed_products.append(Path(prod_shp))
512
-
513
- prod_lm = op.LandMask()
514
- if not prod_lm:
515
- return None, None
516
- processed_products.append(Path(prod_lm))
517
- prod_start_cfar = prod_lm
518
-
519
- if delete_intermediate:
520
- delProd(processed_products[0])
521
- delProd(processed_products[1])
522
- delProd(processed_products[2])
523
-
524
- elif prod_type == "SAOCOM":
525
- prod_shp = op.ImportVector(vector_data=mask_shp_path)
526
- if not prod_shp:
527
- return None, None
528
- processed_products.append(Path(prod_shp))
529
-
530
- prod_lm = op.LandMask()
531
- if not prod_lm:
532
- return None, None
533
- processed_products.append(Path(prod_lm))
534
- prod_start_cfar = prod_lm
535
-
536
- if delete_intermediate:
537
- delProd(processed_products[0])
538
-
539
- # Process CFAR for each PFA threshold
540
- last_successful_excel = None
541
- for pfa in pfa_thresholds:
542
- op_cfar = GPT(product=prod_start_cfar, outdir=out_dir.as_posix(), mode=gpt_mode)
543
-
544
- at_params = {'pfa': pfa}
545
- if prod_type == "COSMO-SkyMed":
546
- at_params.update({'background_window_m': 650, 'guard_window_m': 400, 'target_window_m': 25})
547
-
548
- prod_at = op_cfar.AdaptiveThresholding(**at_params)
549
- if not prod_at:
550
- continue
551
-
552
- prod_od = op_cfar.ObjectDiscrimination(min_target_m=35, max_target_m=500)
553
- if not prod_od:
554
- if delete_intermediate:
555
- delProd(prod_at)
556
- continue
557
-
558
- prod_od_path = Path(prod_od)
559
- prod_od_data_dir = prod_od_path.with_suffix('.data')
560
-
561
- try:
562
- csv_files = list(prod_od_data_dir.glob('*.csv'))
563
- ship_csv_path = next((f for f in csv_files if f.stem.lower().startswith('ship')), None)
564
-
565
- if ship_csv_path:
566
- ship_detections_df = pd.read_csv(ship_csv_path, header=1, sep='\t')
567
- out_excel_path = out_dir / f"{product_path.stem}_pfa_{pfa}.xlsx"
568
- ship_detections_df.to_excel(out_excel_path, index=False)
569
- print(f'Saved ExcelFile to: {out_excel_path}')
570
- last_successful_excel = out_excel_path.as_posix()
571
- else:
572
- print(f"No Ship detection CSV found for PFA {pfa}")
573
-
574
- except Exception as e:
575
- print(f"Error processing detection results for PFA {pfa}: {e}")
576
- finally:
577
- if delete_intermediate:
578
- delProd(prod_at)
579
- delProd(prod_od)
580
-
581
- if delete_intermediate and Path(prod_start_cfar).exists() and prod_start_cfar != product_path.as_posix():
582
- delProd(prod_start_cfar)
583
-
584
- first_processed = processed_products[0].as_posix() if processed_products else product_path.as_posix()
585
- return first_processed, last_successful_excel
586
-
587
-
588
- def CFAR(prod: str | Path, mask_shp_path: str | Path, mode: str | None = None,
589
- Thresh: list[float] | float = 12.5, DELETE: bool = False):
590
- """
591
- Performs Constant False Alarm Rate (CFAR) ship detection processing chain.
592
-
593
- Args:
594
- prod: Path to the input SAR product file
595
- mask_shp_path: Path to the shapefile used for land masking
596
- mode: OS mode ('MacOS', 'Ubuntu', None) for GPT configuration
597
- Thresh: A single PFA threshold or a list of PFA thresholds to test
598
- DELETE: If True, delete intermediate processing files
599
-
600
- Returns:
601
- Tuple[str | None, str | None]: Path to the first major processed product
602
- and path to the last generated Excel file
603
- """
604
- product_path = Path(prod)
605
- mask_path = Path(mask_shp_path)
606
-
607
- if isinstance(Thresh, (int, float)):
608
- pfa_thresholds = [float(Thresh)]
609
- elif isinstance(Thresh, list):
610
- pfa_thresholds = Thresh
611
- else:
612
- print("Warning: Invalid type for Thresh, using default [12.5]")
613
- pfa_thresholds = [12.5]
614
-
615
- return _process_product_cfar(
616
- product_path=product_path,
617
- mask_shp_path=mask_path,
618
- gpt_mode=mode,
619
- delete_intermediate=DELETE,
620
- pfa_thresholds=pfa_thresholds
621
- )
622
-
623
-
624
- def mode_identifier(filename: str) -> str:
625
- """Identifies the product type based on filename."""
626
- if 'S1' in filename:
627
- return "Sentinel-1"
628
- elif 'CSK' in filename:
629
- return "COSMO-SkyMed"
630
- elif 'SAO' in filename:
631
- return "SAOCOM"
632
- else:
633
- raise ValueError(f"Unknown product type for file: {filename}")
@@ -1,2 +0,0 @@
1
- docs
2
- sarpyx