sarpyx 0.1.5__py3-none-any.whl → 0.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. docs/examples/advanced/batch_processing.py +1 -1
  2. docs/examples/advanced/custom_processing_chains.py +1 -1
  3. docs/examples/advanced/performance_optimization.py +1 -1
  4. docs/examples/basic/snap_integration.py +1 -1
  5. docs/examples/intermediate/quality_assessment.py +1 -1
  6. outputs/baseline/20260205-234828/__init__.py +33 -0
  7. outputs/baseline/20260205-234828/main.py +493 -0
  8. outputs/final/20260205-234851/__init__.py +33 -0
  9. outputs/final/20260205-234851/main.py +493 -0
  10. sarpyx/__init__.py +2 -2
  11. sarpyx/algorithms/__init__.py +2 -2
  12. sarpyx/cli/__init__.py +1 -1
  13. sarpyx/cli/focus.py +3 -5
  14. sarpyx/cli/main.py +106 -7
  15. sarpyx/cli/shipdet.py +1 -1
  16. sarpyx/cli/worldsar.py +549 -0
  17. sarpyx/processor/__init__.py +1 -1
  18. sarpyx/processor/core/decode.py +43 -8
  19. sarpyx/processor/core/focus.py +104 -57
  20. sarpyx/science/__init__.py +1 -1
  21. sarpyx/sla/__init__.py +8 -0
  22. sarpyx/sla/metrics.py +101 -0
  23. sarpyx/{snap → snapflow}/__init__.py +1 -1
  24. sarpyx/snapflow/engine.py +6165 -0
  25. sarpyx/{snap → snapflow}/op.py +0 -1
  26. sarpyx/utils/__init__.py +1 -1
  27. sarpyx/utils/geos.py +652 -0
  28. sarpyx/utils/grid.py +285 -0
  29. sarpyx/utils/io.py +77 -9
  30. sarpyx/utils/meta.py +55 -0
  31. sarpyx/utils/nisar_utils.py +652 -0
  32. sarpyx/utils/rfigen.py +108 -0
  33. sarpyx/utils/wkt_utils.py +109 -0
  34. sarpyx/utils/zarr_utils.py +55 -37
  35. {sarpyx-0.1.5.dist-info → sarpyx-0.1.6.dist-info}/METADATA +9 -5
  36. {sarpyx-0.1.5.dist-info → sarpyx-0.1.6.dist-info}/RECORD +41 -32
  37. {sarpyx-0.1.5.dist-info → sarpyx-0.1.6.dist-info}/WHEEL +1 -1
  38. sarpyx-0.1.6.dist-info/licenses/LICENSE +201 -0
  39. sarpyx-0.1.6.dist-info/top_level.txt +4 -0
  40. tests/test_zarr_compat.py +35 -0
  41. sarpyx/processor/core/decode_v0.py +0 -0
  42. sarpyx/processor/core/decode_v1.py +0 -849
  43. sarpyx/processor/core/focus_old.py +0 -1550
  44. sarpyx/processor/core/focus_v1.py +0 -1566
  45. sarpyx/processor/core/focus_v2.py +0 -1625
  46. sarpyx/snap/engine.py +0 -633
  47. sarpyx-0.1.5.dist-info/top_level.txt +0 -2
  48. {sarpyx-0.1.5.dist-info → sarpyx-0.1.6.dist-info}/entry_points.txt +0 -0
sarpyx/cli/worldsar.py ADDED
@@ -0,0 +1,549 @@
1
+ """
2
+ This script processes satellite SAR data from various missions using the SNAP GPT tool and the sarpyx library. It supports different pipelines for Sentinel-1, Terrasar-X, COSMO-SkyMed, BIOMASS, and NISAR products. The processing steps include debursting, calibration, terrain
3
+
4
+
5
+ TODO: metadate reorganization.
6
+ TODO: SUBAPERTURE PROCESSING for all missions.
7
+ TODO: PolSAR support.
8
+ TODO: InSAR support.
9
+
10
+
11
+
12
+ """
13
+
14
+
15
+ from pathlib import Path
16
+ from dotenv import load_dotenv
17
+ import re, os, sys
18
+ import pandas as pd
19
+ from functools import partial
20
+ import argparse
21
+
22
+ from sarpyx.snapflow.engine import GPT
23
+ from sarpyx.utils.geos import check_points_in_polygon, rectangle_to_wkt, rectanglify
24
+ from sarpyx.utils.io import read_h5
25
+ from sarpyx.utils.nisar_utils import NISARReader, NISARCutter, NISARMetadata
26
+
27
+
28
+ # Load environment variables from .env file
29
+ load_dotenv()
30
+ # Read paths from environment variables
31
+ GPT_PATH = os.getenv('gpt_path')
32
+ GRID_PATH = os.getenv('grid_path')
33
+ DB_DIR = os.getenv('db_dir')
34
+ # ========================================================================================================================================
35
+ # ================================================================================================================================ Parser
36
+ # Parse command-line arguments
37
+ def create_parser() -> argparse.ArgumentParser:
38
+ """
39
+ Create command-line argument parser.
40
+
41
+ Returns:
42
+ argparse.ArgumentParser: Parser for worldsar command.
43
+ """
44
+ parser = argparse.ArgumentParser(description='Process SAR data using SNAP GPT and sarpyx pipelines.')
45
+ parser.add_argument(
46
+ '--input',
47
+ '-i',
48
+ dest='product_path',
49
+ type=str,
50
+ required=True,
51
+ help='Path to the input SAR product.'
52
+ )
53
+ parser.add_argument(
54
+ '--output',
55
+ '-o',
56
+ dest='output_dir',
57
+ type=str,
58
+ required=True,
59
+ help='Directory to save the processed output.'
60
+ )
61
+ parser.add_argument(
62
+ '--cuts-outdir',
63
+ '--cuts_outdir',
64
+ dest='cuts_outdir',
65
+ type=str,
66
+ required=True,
67
+ help='Where to store the tiles after extraction.'
68
+ )
69
+ parser.add_argument(
70
+ '--product-wkt',
71
+ '--product_wkt',
72
+ dest='product_wkt',
73
+ type=str,
74
+ required=True,
75
+ help='WKT string defining the product region of interest.'
76
+ )
77
+ parser.add_argument(
78
+ '--prod-mode',
79
+ '--prod_mode',
80
+ dest='prod_mode',
81
+ type=str,
82
+ required=True,
83
+ help='Product mode: ["S1TOPS", "S1STRIP", "BM", "NISAR", "TSX", "CSG", "ICE"].'
84
+ )
85
+ parser.add_argument(
86
+ '--gpt-path',
87
+ dest='gpt_path',
88
+ type=str,
89
+ default=None,
90
+ help='Override GPT executable path (default: gpt_path env var).'
91
+ )
92
+ parser.add_argument(
93
+ '--grid-path',
94
+ dest='grid_path',
95
+ type=str,
96
+ default=None,
97
+ help='Override grid GeoJSON path (default: grid_path env var).'
98
+ )
99
+ parser.add_argument(
100
+ '--db-dir',
101
+ dest='db_dir',
102
+ type=str,
103
+ default=None,
104
+ help='Override database output directory (default: db_dir env var).'
105
+ )
106
+ parser.add_argument(
107
+ '--gpt-memory',
108
+ dest='gpt_memory',
109
+ type=str,
110
+ default=None,
111
+ help='Override GPT Java heap (e.g., 24G).'
112
+ )
113
+ parser.add_argument(
114
+ '--gpt-parallelism',
115
+ dest='gpt_parallelism',
116
+ type=int,
117
+ default=None,
118
+ help='Override GPT parallelism (number of tiles).'
119
+ )
120
+ return parser
121
+
122
+
123
+
124
+
125
+
126
+
127
+ # ======================================================================================================================== SETTINGS
128
+ """ Processing settings"""
129
+ #TODO: to be removed in final.
130
+
131
+ prepro = True
132
+ tiling = True
133
+ db_indexing = False
134
+
135
+ # ======================================================================================================================== AUXILIARY
136
+ """ Auxiliary functions for database creation and product subsetting. """
137
+ def extract_product_id(path: str) -> str | None:
138
+ m = re.search(r"/([^/]+?)_[^/_]+\.dim$", path)
139
+ return m.group(1) if m else None
140
+
141
+
142
+ def create_tile_database(input_folder: str, output_db_folder: str) -> pd.DataFrame:
143
+ """Create a database of tile metadata from h5 files.
144
+
145
+ Args:
146
+ input_folder: Path to folder containing h5 tile files
147
+ output_db_folder: Path to folder where database parquet file will be saved
148
+
149
+ Returns:
150
+ DataFrame containing the metadata for all tiles
151
+ """
152
+ # Find all h5 tiles in the input folder
153
+ tile_path = Path(input_folder)
154
+ h5_tiles = list(tile_path.rglob('*.h5'))
155
+ print(f"Found {len(h5_tiles)} h5 files in {input_folder}")
156
+
157
+ # Initialize empty database
158
+ db = pd.DataFrame()
159
+
160
+ # Process each tile
161
+ for idx, tile_file in enumerate(h5_tiles):
162
+ print(f"Processing tile {idx + 1}/{len(h5_tiles)}: {tile_file.name}")
163
+
164
+ # Read h5 file and extract metadata
165
+ data, metadata = read_h5(tile_file)
166
+ row = pd.Series(metadata['quickinfo'])
167
+ row['ID'] = tile_file.stem # Add TileID to the row
168
+
169
+ # Append to database
170
+ db = pd.concat([db, pd.DataFrame([row])], ignore_index=True)
171
+
172
+ # Save database to parquet file
173
+ output_db_path = Path(output_db_folder)
174
+ output_db_path.mkdir(parents=True, exist_ok=True)
175
+
176
+ prod_name = tile_path.name
177
+ output_file = output_db_path / f'{prod_name}_core_metadata.parquet'
178
+ db.to_parquet(output_file, index=False)
179
+
180
+ print(f"Core metadata saved to {output_file}")
181
+
182
+ return db
183
+
184
+
185
+ def to_geotiff(
186
+ product_path: Path,
187
+ output_dir: Path,
188
+ geo_region: str = None,
189
+ output_name: str = None,
190
+ gpt_memory: str | None = None,
191
+ gpt_parallelism: int | None = None,
192
+ ):
193
+ assert geo_region is not None, "Geo region WKT string must be provided for subsetting."
194
+ gpt_kwargs = {}
195
+ if gpt_memory:
196
+ gpt_kwargs['memory'] = gpt_memory
197
+ if gpt_parallelism:
198
+ gpt_kwargs['parallelism'] = gpt_parallelism
199
+
200
+ op = GPT(
201
+ product=product_path,
202
+ outdir=output_dir,
203
+ format='GDAL-GTiff-WRITER',
204
+ gpt_path=GPT_PATH,
205
+ **gpt_kwargs,
206
+ )
207
+ op.Write()
208
+
209
+ return op.prod_path
210
+
211
+
212
+ def subset(
213
+ product_path: Path,
214
+ output_dir: Path,
215
+ geo_region: str = None,
216
+ output_name: str = None,
217
+ gpt_memory: str | None = None,
218
+ gpt_parallelism: int | None = None,
219
+ ):
220
+ assert geo_region is not None, "Geo region WKT string must be provided for subsetting."
221
+ gpt_kwargs = {}
222
+ if gpt_memory:
223
+ gpt_kwargs['memory'] = gpt_memory
224
+ if gpt_parallelism:
225
+ gpt_kwargs['parallelism'] = gpt_parallelism
226
+
227
+ op = GPT(
228
+ product=product_path,
229
+ outdir=output_dir,
230
+ format='HDF5',
231
+ gpt_path=GPT_PATH,
232
+ **gpt_kwargs,
233
+ )
234
+ op.Subset(
235
+ copy_metadata=True,
236
+ output_name=output_name,
237
+ geo_region=geo_region,
238
+ )
239
+
240
+ return op.prod_path
241
+
242
+ # ======================================================================================================================== PIPELINES
243
+ """ Different pipelines for different missions/products. """
244
+ def pipeline_sentinel(
245
+ product_path: Path,
246
+ output_dir: Path,
247
+ is_TOPS: bool = False,
248
+ subaperture: bool = False,
249
+ gpt_memory: str | None = None,
250
+ gpt_parallelism: int | None = None,
251
+ ):
252
+ """A simple test pipeline to validate the GPT wrapper functionality.
253
+
254
+ The operations included are:
255
+ - Debursting
256
+ - Calibration to complex
257
+ - (Optional) Subsetting by geographic coordinates
258
+
259
+ Args:
260
+ product_path (Path): Path to the input product.
261
+ output_dir (Path): Directory to save the processed output.
262
+
263
+ Returns:
264
+ Path: Path to the processed product.
265
+ """
266
+ gpt_kwargs = {}
267
+ if gpt_memory:
268
+ gpt_kwargs['memory'] = gpt_memory
269
+ if gpt_parallelism:
270
+ gpt_kwargs['parallelism'] = gpt_parallelism
271
+ op = GPT(
272
+ product=product_path,
273
+ outdir=output_dir,
274
+ format='BEAM-DIMAP',
275
+ gpt_path=GPT_PATH,
276
+ **gpt_kwargs,
277
+ )
278
+ op.ApplyOrbitFile()
279
+ if is_TOPS and subaperture:
280
+ op.TopsarDerampDemod()
281
+ op.Deburst()
282
+ op.Calibration(output_complex=True)
283
+ # TODO: Add subaperture.
284
+ op.TerrainCorrection(map_projection='AUTO:42001', pixel_spacing_in_meter=10.0)
285
+ return op.prod_path
286
+
287
+
288
+ def pipeline_terrasar(
289
+ product_path: Path,
290
+ output_dir: Path,
291
+ gpt_memory: str | None = None,
292
+ gpt_parallelism: int | None = None,
293
+ ):
294
+ """Terrasar-X pipeline.
295
+
296
+ The operations included are:
297
+ - Calibration, outputting complex data if available.
298
+ - Terrain Correction with automatic map projection and 5m pixel spacing.
299
+
300
+ Args:
301
+ product_path (Path): Path to the input product.
302
+ output_dir (Path): Directory to save the processed output.
303
+
304
+ Returns:
305
+ Path: Path to the processed product.
306
+ """
307
+ gpt_kwargs = {}
308
+ if gpt_memory:
309
+ gpt_kwargs['memory'] = gpt_memory
310
+ if gpt_parallelism:
311
+ gpt_kwargs['parallelism'] = gpt_parallelism
312
+ op = GPT(
313
+ product=product_path,
314
+ outdir=output_dir,
315
+ format='BEAM-DIMAP',
316
+ gpt_path=GPT_PATH,
317
+ **gpt_kwargs,
318
+ )
319
+ op.Calibration(output_complex=True)
320
+ # TODO: Add subaperture.
321
+ op.TerrainCorrection(map_projection='AUTO:42001', pixel_spacing_in_meter=5.0)
322
+ return op.prod_path
323
+
324
+
325
+ def pipeline_cosmo(
326
+ product_path: Path,
327
+ output_dir: Path,
328
+ gpt_memory: str | None = None,
329
+ gpt_parallelism: int | None = None,
330
+ ):
331
+ """COSMO-SkyMed pipeline.
332
+
333
+ The operations included are:
334
+ - Calibration, outputting complex data if available.
335
+ - Terrain Correction with automatic map projection and 5m pixel spacing.
336
+
337
+ Args:
338
+ product_path (Path): Path to the input product.
339
+ output_dir (Path): Directory to save the processed output.
340
+
341
+ Returns:
342
+ Path: Path to the processed product.
343
+ """
344
+ gpt_kwargs = {}
345
+ if gpt_memory:
346
+ gpt_kwargs['memory'] = gpt_memory
347
+ if gpt_parallelism:
348
+ gpt_kwargs['parallelism'] = gpt_parallelism
349
+ op = GPT(
350
+ product=product_path,
351
+ outdir=output_dir,
352
+ format='BEAM-DIMAP',
353
+ gpt_path=GPT_PATH,
354
+ **gpt_kwargs,
355
+ )
356
+ op.Calibration(output_complex=True)
357
+ # TODO: Add subaperture.
358
+ op.TerrainCorrection(map_projection='AUTO:42001', pixel_spacing_in_meter=5.0)
359
+ return op.prod_path
360
+
361
+
362
+ def pipeline_biomass(
363
+ product_path: Path,
364
+ output_dir: Path,
365
+ gpt_memory: str | None = None,
366
+ gpt_parallelism: int | None = None,
367
+ ):
368
+ """BIOMASS pipeline.
369
+
370
+ Args:
371
+ product_path (Path): Path to the input product.
372
+ output_dir (Path): Directory to save the processed output.
373
+
374
+ Returns:
375
+ Path: Path to the processed product.
376
+ """
377
+ gpt_kwargs = {}
378
+ if gpt_memory:
379
+ gpt_kwargs['memory'] = gpt_memory
380
+ if gpt_parallelism:
381
+ gpt_kwargs['parallelism'] = gpt_parallelism
382
+ op = GPT(
383
+ product=product_path,
384
+ outdir=output_dir,
385
+ format='GDAL-GTiff-WRITER',
386
+ gpt_path=GPT_PATH,
387
+ **gpt_kwargs,
388
+ )
389
+ op.Write()
390
+ # TODO: Calculate SubApertures with BIOMASS Data.
391
+ return op.prod_path
392
+
393
+
394
+ def pipeline_nisar(product_path: Path, output_dir: Path):
395
+ """ NISAR Pipeline.
396
+
397
+ The operations included are:
398
+
399
+ Args:
400
+ product_path (Path): Path to the input product.
401
+ output_dir (Path): Directory to save the processed output. [Not used]
402
+
403
+ Returns:
404
+ Path: Path to the processed product.
405
+ """
406
+ assert product_path.suffix == '.h5', "NISAR products must be in .h5 format."
407
+ # Monkey patching for NISAR products
408
+ return product_path
409
+ # ========================================================================================================================================
410
+
411
+
412
+
413
+ # ========================================================================================================================================
414
+ """ The router switches between different pipelines based on the product mode. """
415
+ ROUTER_PIPE = {
416
+ 'S1TOPS': partial(pipeline_sentinel, is_TOPS=True),
417
+ 'S1STRIP': partial(pipeline_sentinel, is_TOPS=False),
418
+ 'BM': pipeline_biomass,
419
+ 'TSX': pipeline_terrasar,
420
+ 'NISAR': pipeline_nisar,
421
+ 'CSG': pipeline_cosmo,
422
+ }
423
+ # ========================================================================================================================================
424
+
425
+
426
+
427
+
428
+ # =============================================== MAIN =========================================================================
429
+ def main():
430
+ parser = create_parser()
431
+ args = parser.parse_args()
432
+
433
+ global GPT_PATH, GRID_PATH, DB_DIR
434
+ if args.gpt_path:
435
+ GPT_PATH = args.gpt_path
436
+ if args.grid_path:
437
+ GRID_PATH = args.grid_path
438
+ if args.db_dir:
439
+ DB_DIR = args.db_dir
440
+
441
+ product_path = Path(args.product_path)
442
+ output_dir = Path(args.output_dir)
443
+ product_wkt = args.product_wkt
444
+ cuts_outdir = Path(args.cuts_outdir)
445
+ grid_geoj_path = Path(GRID_PATH) if GRID_PATH else None
446
+ product_mode = args.prod_mode
447
+ gpt_memory = args.gpt_memory
448
+ gpt_parallelism = args.gpt_parallelism
449
+
450
+ # STEP1:
451
+ if prepro:
452
+ intermediate_product = ROUTER_PIPE[product_mode](
453
+ product_path,
454
+ output_dir,
455
+ gpt_memory=gpt_memory,
456
+ gpt_parallelism=gpt_parallelism,
457
+ )
458
+ print(f"Intermediate processed product located at: {intermediate_product}")
459
+ assert Path(intermediate_product).exists(), f"Intermediate product {intermediate_product} does not exist."
460
+
461
+ # STEP2:
462
+ if tiling:
463
+ # ------ Cutting according to the tile griding system: UTM / WGS84 Auto ------
464
+ print(f'Checking points within polygon: {product_wkt}')
465
+ assert grid_geoj_path is not None and grid_geoj_path.exists(), 'grid_10km.geojson does not exist.'
466
+ # step 1: check the contained grid points in the prod
467
+ contained = check_points_in_polygon(product_wkt, geojson_path=grid_geoj_path)
468
+ if not contained:
469
+ print('No grid points contained within the provided WKT.')
470
+ raise ValueError('No grid points contained; check WKT and grid CRS alignment.')
471
+ # step 2: Build the rectangles for cutting
472
+ rectangles = rectanglify(contained)
473
+ if not rectangles:
474
+ print('No rectangles could be formed from contained points.')
475
+ raise ValueError('No rectangles formed; check WKT coverage and grid alignment.')
476
+ product_path = Path(intermediate_product)
477
+ name = extract_product_id(product_path.as_posix()) if product_mode != 'NISAR' else product_path.stem
478
+ if name is None:
479
+ raise ValueError(f"Could not extract product id from: {product_path}")
480
+
481
+ for rect in rectangles: # CUT!
482
+ geo_region = rectangle_to_wkt(rect)
483
+ if product_mode != 'NISAR':
484
+ final_product = subset(
485
+ product_path,
486
+ cuts_outdir / name,
487
+ output_name=rect['BL']['properties']['name'],
488
+ geo_region=geo_region,
489
+ gpt_memory=gpt_memory,
490
+ gpt_parallelism=gpt_parallelism,
491
+ )
492
+ print(f"Final processed product located at: {final_product}")
493
+ else:
494
+ reader = NISARReader(product_path.as_posix())
495
+ cutter = NISARCutter(reader)
496
+ subset_data = cutter.cut_by_wkt(geo_region, "HH", apply_mask=False)
497
+ nisar_tile_path = cuts_outdir / name / f"{rect['BL']['properties']['name']}.tiff"
498
+ cutter.save_subset(subset_data, nisar_tile_path)
499
+ # TODO: write write method to save to h5.
500
+ print(f"Final processed NISAR tile saved at: {nisar_tile_path}")
501
+
502
+
503
+
504
+ total_tiles = len(rectangles)
505
+ num_cuts = list(Path(cuts_outdir / name).rglob('*.h5'))
506
+ assert total_tiles == len(num_cuts), f"Expected {total_tiles} tiles, but found {len(num_cuts)}."
507
+
508
+
509
+ # STEP3:
510
+ # Database indexing
511
+ if db_indexing:
512
+ cuts_folder = cuts_outdir / name
513
+ db = create_tile_database(cuts_folder.as_posix(), DB_DIR) # type: ignore
514
+ assert not db.empty, "Database creation failed, resulting DataFrame is empty."
515
+ print("Database created successfully.")
516
+
517
+ sys.exit(0)
518
+ # ========================================================================================================================================
519
+
520
+
521
+
522
+
523
+
524
+ if __name__ == "__main__":
525
+ print("=======================================================================================================================")
526
+ print(" __ __ ___ ____ _ ____ ____ _ ____ ")
527
+ print(" \\ \\ / / / _ \\ | _ \\ | | | _ \\ / ___| / \\ | 0 _ \\ ")
528
+ print(" \\ \\ /\\ / / | | | | | |_) || | | | | | \\___ \\ / _ \\ | |_) |")
529
+ print(" \\ V V / | |_| | | _ < | |___ | |_| | ___) | / ___ \\ | _ < ")
530
+ print(" \\_/\\_/ \\___/ |_| \\_\\|_____| |____/ |____/ /_/ \\_\\ |_| \\_\\")
531
+ print("=======================================================================================================================")
532
+ print("====================================== DATA PROCESSOR ===================================================")
533
+ print("=======================================================================================================================")
534
+ print("")
535
+ print(f"Using virtual environment at: {os.getenv('VENV_PATH')}")
536
+ print(f"Using GPT at: {GPT_PATH}")
537
+ print(f"Output directory: {os.getenv('OUTPUT_DIR')}")
538
+ print(f"Output cuts directory: {os.getenv('OUTPUT_CUTS_DIR')}")
539
+ print(f"Scripts directory: {os.getenv('SCRIPTS_DIR')}")
540
+ print(f"DB directory: {DB_DIR}")
541
+ print(f"Bash dir: {os.getenv('SCRIPTS_DIR')}")
542
+ print(f"Python scripts dir: {os.getenv('PYSCRIPTS_DIR')}")
543
+ print(f"Product mode: {os.getenv('MODE')}")
544
+ print(f"Upload repo: {os.getenv('UPLOAD_REPO')}")
545
+ print(f"DB Upload repo: {os.getenv('DB_UPLOAD_REPO')}")
546
+ print("")
547
+ print("=======================================================================================================================")
548
+
549
+ main()
@@ -30,4 +30,4 @@ __all__ = [
30
30
  'utils',
31
31
  ]
32
32
 
33
- __version__ = "0.2.0"
33
+ __version__ = "0.1.6"
@@ -8,13 +8,27 @@ import json
8
8
 
9
9
  import numpy as np
10
10
  import pandas as pd
11
- from s1isp.decoder import (
12
- EUdfDecodingMode,
13
- SubCommutatedDataDecoder,
14
- decode_stream,
15
- decoded_stream_to_dict,
16
- decoded_subcomm_to_dict
17
- )
11
+
12
+ # Optional s1isp import - required for SAR decoding functionality
13
+ try:
14
+ from s1isp.decoder import (
15
+ EUdfDecodingMode,
16
+ SubCommutatedDataDecoder,
17
+ decode_stream,
18
+ decoded_stream_to_dict,
19
+ decoded_subcomm_to_dict
20
+ )
21
+ S1ISP_AVAILABLE = True
22
+ except ImportError as e:
23
+ S1ISP_AVAILABLE = False
24
+ _IMPORT_ERROR = str(e)
25
+ # Create dummy classes/functions to prevent immediate failures
26
+ EUdfDecodingMode = None
27
+ SubCommutatedDataDecoder = None
28
+ decode_stream = None
29
+ decoded_stream_to_dict = None
30
+ decoded_subcomm_to_dict = None
31
+
18
32
  from . import code2physical as pt
19
33
  from ...utils import zarr_utils
20
34
 
@@ -25,6 +39,19 @@ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(
25
39
  logger = logging.getLogger(__name__)
26
40
 
27
41
 
42
+ def _check_s1isp_available():
43
+ """Check if s1isp is available and raise informative error if not."""
44
+ if not S1ISP_AVAILABLE:
45
+ error_msg = (
46
+ f"s1isp package is not available: {_IMPORT_ERROR}\n"
47
+ f"SAR decoding functionality requires s1isp to be installed.\n"
48
+ f"Please install it separately using:\n"
49
+ f" pip install git+https://github.com/avalentino/s1isp.git\n"
50
+ f"Or see docs/user_guide/INSTALL_S1ISP.md for detailed instructions."
51
+ )
52
+ raise ImportError(error_msg)
53
+
54
+
28
55
  def extract_echo_bursts(records: List[Any]) -> Tuple[List[List[Any]], List[int]]:
29
56
  """Extract echo bursts from radar records and return burst data with indexes.
30
57
 
@@ -143,7 +170,11 @@ def extract_headers(file_path: Union[str, Path], mode: str = 's1isp', apply_tran
143
170
  ValueError: If mode is not supported or file_path is invalid.
144
171
  FileNotFoundError: If file_path does not exist.
145
172
  RuntimeError: If decoding fails or transformation fails.
173
+ ImportError: If s1isp is not available.
146
174
  """
175
+ # Check if s1isp is available
176
+ _check_s1isp_available()
177
+
147
178
  supported_modes = ['richa', 's1isp']
148
179
  if mode not in supported_modes:
149
180
  raise ValueError(f"Mode must be one of {supported_modes}, got '{mode}'")
@@ -283,7 +314,11 @@ def decode_radar_file(input_file: Union[str, Path], apply_transformations: bool
283
314
  Raises:
284
315
  FileNotFoundError: If input file does not exist.
285
316
  RuntimeError: If decoding process fails.
317
+ ImportError: If s1isp is not available.
286
318
  """
319
+ # Check if s1isp is available
320
+ _check_s1isp_available()
321
+
287
322
  input_file = Path(input_file)
288
323
  if not input_file.exists():
289
324
  raise FileNotFoundError(f'Input file not found: {input_file}')
@@ -942,4 +977,4 @@ def main() -> int:
942
977
 
943
978
 
944
979
  if __name__ == '__main__':
945
- exit(main())
980
+ exit(main())