water-column-sonar-processing 25.1.3__py3-none-any.whl → 25.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of water-column-sonar-processing might be problematic. Click here for more details.
- water_column_sonar_processing/aws/s3fs_manager.py +1 -1
- water_column_sonar_processing/cruise/resample_regrid.py +19 -24
- water_column_sonar_processing/model/zarr_manager.py +13 -9
- water_column_sonar_processing/utility/constants.py +1 -1
- {water_column_sonar_processing-25.1.3.dist-info → water_column_sonar_processing-25.1.4.dist-info}/METADATA +24 -8
- {water_column_sonar_processing-25.1.3.dist-info → water_column_sonar_processing-25.1.4.dist-info}/RECORD +9 -9
- {water_column_sonar_processing-25.1.3.dist-info → water_column_sonar_processing-25.1.4.dist-info}/LICENSE +0 -0
- {water_column_sonar_processing-25.1.3.dist-info → water_column_sonar_processing-25.1.4.dist-info}/WHEEL +0 -0
- {water_column_sonar_processing-25.1.3.dist-info → water_column_sonar_processing-25.1.4.dist-info}/top_level.txt +0 -0
|
@@ -16,7 +16,7 @@ class S3FSManager:
|
|
|
16
16
|
# self.output_bucket_name = os.environ.get("OUTPUT_BUCKET_NAME")
|
|
17
17
|
self.s3_region = os.environ.get("AWS_REGION", default="us-east-1")
|
|
18
18
|
self.s3fs = s3fs.S3FileSystem(
|
|
19
|
-
asynchronous=False,
|
|
19
|
+
# asynchronous=False,
|
|
20
20
|
endpoint_url=endpoint_url,
|
|
21
21
|
key=os.environ.get("OUTPUT_BUCKET_ACCESS_KEY"),
|
|
22
22
|
secret=os.environ.get("OUTPUT_BUCKET_SECRET_ACCESS_KEY"),
|
|
@@ -197,9 +197,9 @@ class ResampleRegrid:
|
|
|
197
197
|
# df[df['PIPELINE_STATUS'] < PipelineStatus.LEVEL_1_PROCESSING] = np.nan
|
|
198
198
|
|
|
199
199
|
# Get index from all cruise files. Note: should be based on which are included in cruise.
|
|
200
|
-
index = cruise_df.index[
|
|
200
|
+
index = int(cruise_df.index[
|
|
201
201
|
cruise_df["FILE_NAME"] == f"{file_name_stem}.raw"
|
|
202
|
-
][0]
|
|
202
|
+
][0])
|
|
203
203
|
|
|
204
204
|
# get input store
|
|
205
205
|
input_xr_zarr_store = zarr_manager.open_s3_zarr_store_with_xarray(
|
|
@@ -226,18 +226,20 @@ class ResampleRegrid:
|
|
|
226
226
|
min_echo_range = np.nanmin(np.float32(cruise_df["MIN_ECHO_RANGE"]))
|
|
227
227
|
max_echo_range = np.nanmax(np.float32(cruise_df["MAX_ECHO_RANGE"]))
|
|
228
228
|
|
|
229
|
-
print(
|
|
230
|
-
|
|
231
|
-
|
|
229
|
+
print("Creating empty ndarray for Sv data.") # Note: cruise dims (depth, time, frequency)
|
|
230
|
+
output_zarr_store_shape = output_zarr_store.Sv.shape
|
|
231
|
+
end_ping_time_index - start_ping_time_index
|
|
232
|
+
output_zarr_store_height = output_zarr_store_shape[0]
|
|
233
|
+
output_zarr_store_width = end_ping_time_index - start_ping_time_index
|
|
234
|
+
output_zarr_store_depth = output_zarr_store_shape[2]
|
|
232
235
|
cruise_sv_subset = np.empty(
|
|
233
|
-
shape=
|
|
234
|
-
:, start_ping_time_index:end_ping_time_index, :
|
|
235
|
-
].shape
|
|
236
|
+
shape=(output_zarr_store_height, output_zarr_store_width, output_zarr_store_depth)
|
|
236
237
|
)
|
|
237
238
|
cruise_sv_subset[:, :, :] = np.nan
|
|
238
239
|
|
|
239
240
|
all_cruise_depth_values = zarr_manager.get_depth_values(
|
|
240
|
-
min_echo_range=min_echo_range,
|
|
241
|
+
min_echo_range=min_echo_range,
|
|
242
|
+
max_echo_range=max_echo_range
|
|
241
243
|
) # (5262,) and
|
|
242
244
|
|
|
243
245
|
print(" ".join(list(input_xr_zarr_store.Sv.dims)))
|
|
@@ -281,16 +283,6 @@ class ResampleRegrid:
|
|
|
281
283
|
#########################################################################
|
|
282
284
|
# write Sv values to cruise-level-model-store
|
|
283
285
|
output_zarr_store.Sv[:, start_ping_time_index:end_ping_time_index, :] = regrid_resample.values
|
|
284
|
-
|
|
285
|
-
#########################################################################
|
|
286
|
-
# [5] write subset of latitude/longitude
|
|
287
|
-
output_zarr_store.latitude[
|
|
288
|
-
start_ping_time_index:end_ping_time_index
|
|
289
|
-
] = geospatial.dropna()["latitude"].values # TODO: get from ds_sv directly, dont need geojson anymore
|
|
290
|
-
output_zarr_store.longitude[
|
|
291
|
-
start_ping_time_index:end_ping_time_index
|
|
292
|
-
] = geospatial.dropna()["longitude"].values
|
|
293
|
-
|
|
294
286
|
#########################################################################
|
|
295
287
|
# TODO: add the "detected_seafloor_depth/" to the
|
|
296
288
|
# L2 cruise dataarrays
|
|
@@ -311,11 +303,14 @@ class ResampleRegrid:
|
|
|
311
303
|
start_ping_time_index:end_ping_time_index
|
|
312
304
|
] = detected_seafloor_depths
|
|
313
305
|
#
|
|
314
|
-
|
|
315
|
-
#
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
#
|
|
306
|
+
#########################################################################
|
|
307
|
+
# [5] write subset of latitude/longitude
|
|
308
|
+
output_zarr_store.latitude[
|
|
309
|
+
start_ping_time_index:end_ping_time_index
|
|
310
|
+
] = geospatial.dropna()["latitude"].values # TODO: get from ds_sv directly, dont need geojson anymore
|
|
311
|
+
output_zarr_store.longitude[
|
|
312
|
+
start_ping_time_index:end_ping_time_index
|
|
313
|
+
] = geospatial.dropna()["longitude"].values
|
|
319
314
|
#########################################################################
|
|
320
315
|
#########################################################################
|
|
321
316
|
except Exception as err:
|
|
@@ -2,6 +2,7 @@ import numcodecs
|
|
|
2
2
|
import numpy as np
|
|
3
3
|
import xarray as xr
|
|
4
4
|
import zarr
|
|
5
|
+
import importlib.metadata
|
|
5
6
|
from numcodecs import Blosc
|
|
6
7
|
|
|
7
8
|
from water_column_sonar_processing.aws import S3FSManager
|
|
@@ -249,9 +250,9 @@ class ZarrManager:
|
|
|
249
250
|
root.attrs["sensor_name"] = sensor_name
|
|
250
251
|
#
|
|
251
252
|
root.attrs["processing_software_name"] = Coordinates.PROJECT_NAME.value
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
253
|
+
|
|
254
|
+
current_project_version = importlib.metadata.version('water_column_sonar_processing')
|
|
255
|
+
root.attrs["processing_software_version"] = current_project_version
|
|
255
256
|
root.attrs["processing_software_time"] = Timestamp.get_timestamp()
|
|
256
257
|
#
|
|
257
258
|
root.attrs["calibration_status"] = calibration_status
|
|
@@ -290,7 +291,7 @@ class ZarrManager:
|
|
|
290
291
|
# zarr_synchronizer: Union[str, None] = None, # TODO:
|
|
291
292
|
output_bucket_name: str,
|
|
292
293
|
endpoint_url=None,
|
|
293
|
-
):
|
|
294
|
+
) -> zarr.hierarchy.Group:
|
|
294
295
|
# Mounts a Zarr store using pythons Zarr implementation. The mounted store
|
|
295
296
|
# will have read/write privileges so that store can be updated.
|
|
296
297
|
print("Opening L2 Zarr store with Zarr for writing.")
|
|
@@ -316,18 +317,21 @@ class ZarrManager:
|
|
|
316
317
|
input_bucket_name: str,
|
|
317
318
|
endpoint_url=None,
|
|
318
319
|
) -> xr.Dataset:
|
|
319
|
-
print("Opening L1 Zarr store in S3 with Xarray.")
|
|
320
|
+
print("Opening L1 Zarr store in S3 with Xarray.") # TODO: Is this only used for reading from?
|
|
320
321
|
try:
|
|
321
322
|
zarr_path = f"s3://{input_bucket_name}/level_1/{ship_name}/{cruise_name}/{sensor_name}/{file_name_stem}.zarr"
|
|
322
323
|
s3fs_manager = S3FSManager(endpoint_url=endpoint_url)
|
|
323
324
|
store_s3_map = s3fs_manager.s3_map(s3_zarr_store_path=zarr_path)
|
|
324
|
-
ds = xr.
|
|
325
|
-
|
|
326
|
-
|
|
325
|
+
ds = xr.open_dataset(
|
|
326
|
+
filename_or_obj=store_s3_map,
|
|
327
|
+
engine="zarr",
|
|
328
|
+
chunks={}
|
|
329
|
+
)
|
|
327
330
|
except Exception as err:
|
|
328
331
|
print("Problem opening Zarr store in S3 as Xarray.")
|
|
329
332
|
raise err
|
|
330
|
-
|
|
333
|
+
finally:
|
|
334
|
+
print("Exiting opening Zarr store in S3 as Xarray.")
|
|
331
335
|
return ds
|
|
332
336
|
|
|
333
337
|
def open_l2_zarr_store_with_xarray(
|
|
@@ -3,7 +3,7 @@ from enum import Enum, Flag, unique
|
|
|
3
3
|
|
|
4
4
|
@unique
|
|
5
5
|
class Constants(Flag):
|
|
6
|
-
TILE_SIZE = 1024
|
|
6
|
+
TILE_SIZE = 1024
|
|
7
7
|
|
|
8
8
|
# Average https://noaa-wcsd-zarr-pds.s3.us-east-1.amazonaws.com/level_2/Henry_B._Bigelow/HB0902/EK60/HB0902.zarr/time/927
|
|
9
9
|
# chunk size is ~1.3 kB, HB0902 cruise takes ~30 seconds to load all time/lat/lon data
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: water_column_sonar_processing
|
|
3
|
-
Version: 25.1.
|
|
4
|
-
Summary:
|
|
3
|
+
Version: 25.1.4
|
|
4
|
+
Summary: Processing tool for water column sonar data.
|
|
5
5
|
Author-email: Rudy Klucik <rudy.klucik@noaa.gov>
|
|
6
6
|
Project-URL: Homepage, https://github.com/CI-CMG/water-column-sonar-processing
|
|
7
7
|
Project-URL: Issues, https://github.com/CI-CMG/water-column-sonar-processing/issues
|
|
8
8
|
Classifier: Programming Language :: Python :: 3
|
|
9
9
|
Classifier: License :: OSI Approved :: MIT License
|
|
10
10
|
Classifier: Operating System :: OS Independent
|
|
11
|
-
Requires-Python: >=3.
|
|
11
|
+
Requires-Python: >=3.10
|
|
12
12
|
Description-Content-Type: text/markdown
|
|
13
13
|
License-File: LICENSE
|
|
14
14
|
Requires-Dist: aiobotocore==2.19.0
|
|
@@ -34,6 +34,19 @@ Requires-Dist: typing-extensions==4.10.0
|
|
|
34
34
|
Requires-Dist: xarray==2024.10.0
|
|
35
35
|
Requires-Dist: xbatcher==0.4.0
|
|
36
36
|
Requires-Dist: zarr==2.18.3
|
|
37
|
+
Provides-Extra: dev
|
|
38
|
+
Requires-Dist: bandit[toml]==1.8.0; extra == "dev"
|
|
39
|
+
Requires-Dist: build; extra == "dev"
|
|
40
|
+
Requires-Dist: pre-commit; extra == "dev"
|
|
41
|
+
Requires-Dist: pyinstaller; extra == "dev"
|
|
42
|
+
Requires-Dist: twine; extra == "dev"
|
|
43
|
+
Requires-Dist: flake8==7.1.1; extra == "dev"
|
|
44
|
+
Requires-Dist: pooch==1.8.2; extra == "dev"
|
|
45
|
+
Requires-Dist: pytest~=8.3.3; extra == "dev"
|
|
46
|
+
Requires-Dist: tqdm; extra == "dev"
|
|
47
|
+
Requires-Dist: bandit; extra == "dev"
|
|
48
|
+
Provides-Extra: test
|
|
49
|
+
Requires-Dist: pytest-cov; extra == "test"
|
|
37
50
|
|
|
38
51
|
# Water Column Sonar Processing
|
|
39
52
|
Processing tool for converting L0 data to L1 and L2 as well as generating geospatial information
|
|
@@ -80,14 +93,17 @@ Processing tool for converting L0 data to L1 and L2 as well as generating geospa
|
|
|
80
93
|
3. Set interpreter
|
|
81
94
|
|
|
82
95
|
# Installing Dependencies
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
96
|
+
```
|
|
97
|
+
uv pip install --upgrade pip
|
|
98
|
+
#uv pip install -r requirements_dev.txt
|
|
99
|
+
uv pip install -r pyproject.toml --extra dev
|
|
100
|
+
```
|
|
86
101
|
|
|
87
102
|
|
|
88
103
|
# Pytest
|
|
89
104
|
```commandline
|
|
90
|
-
pytest
|
|
105
|
+
uv run pytest tests
|
|
106
|
+
#pytest --disable-warnings
|
|
91
107
|
```
|
|
92
108
|
or
|
|
93
109
|
> pytest --cache-clear --cov=src tests/ --cov-report=xml
|
|
@@ -120,7 +136,7 @@ https://colab.research.google.com/drive/1KiLMueXiz9WVB9o4RuzYeGjNZ6PsZU7a#scroll
|
|
|
120
136
|
# Tag a Release
|
|
121
137
|
Step 1 --> increment the semantic version in the zarr_manager.py "metadata" & the "pyproject.toml"
|
|
122
138
|
```commandline
|
|
123
|
-
git tag -a v25.1.
|
|
139
|
+
git tag -a v25.1.4 -m "Releasing version v25.1.4"
|
|
124
140
|
git push origin --tags
|
|
125
141
|
```
|
|
126
142
|
|
|
@@ -3,13 +3,13 @@ water_column_sonar_processing/process.py,sha256=-yQtK3rnZq6lGAr3q02zLDe1NuMH9c0P
|
|
|
3
3
|
water_column_sonar_processing/aws/__init__.py,sha256=KJqK8oYMn-u8n8i-Jp_lG5BvCOTjwWSjWP8yAyDlWVo,297
|
|
4
4
|
water_column_sonar_processing/aws/dynamodb_manager.py,sha256=htP4Y2rmOSFtdzUFrgK14Bn-UXAFG22Ow-dDrR2alSw,13949
|
|
5
5
|
water_column_sonar_processing/aws/s3_manager.py,sha256=-PCiW7YF31nGIPa1oVOVTzjTSExAAkT_IyNNnvWv2HU,16214
|
|
6
|
-
water_column_sonar_processing/aws/s3fs_manager.py,sha256=
|
|
6
|
+
water_column_sonar_processing/aws/s3fs_manager.py,sha256=oouzV9DZLplPC6vzbouWPzyfyNPABx_LGxRGJGc1vWg,2563
|
|
7
7
|
water_column_sonar_processing/aws/sns_manager.py,sha256=Dp9avG5VSugSWPR1dZ-askuAw1fCZkNUHbOUP65iR-k,1867
|
|
8
8
|
water_column_sonar_processing/aws/sqs_manager.py,sha256=NSUrWmnSC8h8Gf7gT0U8zFaQQ-yX89h0Q0mDLKGqp2Y,1597
|
|
9
9
|
water_column_sonar_processing/cruise/__init__.py,sha256=H5hW0JMORuaFvQk_R31B4VL8RnRyKeanOOiWmqEMZJk,156
|
|
10
10
|
water_column_sonar_processing/cruise/create_empty_zarr_store.py,sha256=ZsFQTDA0gXfQHlxDsXBGD1qQ0ipmx4kS81DcY6ml5Ew,7767
|
|
11
11
|
water_column_sonar_processing/cruise/datatree_manager.py,sha256=Qy4dZCW8_q31lbjxbMsx3JtBS4BvQT17_2P0QD1RQcY,639
|
|
12
|
-
water_column_sonar_processing/cruise/resample_regrid.py,sha256=
|
|
12
|
+
water_column_sonar_processing/cruise/resample_regrid.py,sha256=wnog-qwFXRH20AYRD_3BXUgP-TN5ZnyPHpFws3527Mk,14533
|
|
13
13
|
water_column_sonar_processing/geometry/__init__.py,sha256=GIzzc-_7pwEwbOkGpc4i_fmjWI5ymllXqzdHq_d3Rio,299
|
|
14
14
|
water_column_sonar_processing/geometry/elevation_manager.py,sha256=eq9w691WJknPwWYkvO3giKTPleIxCVc2tMGR0e8ZRxQ,4267
|
|
15
15
|
water_column_sonar_processing/geometry/geometry_manager.py,sha256=nz5T1vCDWHYIfQ853EqKYHDetTul7jRWS3y8Evep8QU,10855
|
|
@@ -18,17 +18,17 @@ water_column_sonar_processing/geometry/pmtile_generation.py,sha256=7Lm08Jr6YaM4n
|
|
|
18
18
|
water_column_sonar_processing/index/__init__.py,sha256=izEObsKiOoIJ0kZCFhvaYsBd6Ga71XJxnogjrNInw68,68
|
|
19
19
|
water_column_sonar_processing/index/index_manager.py,sha256=qsS6rKObJlFXKyzRuT1bk2_qW1YagW-Fg_AkQ1U_KRs,14213
|
|
20
20
|
water_column_sonar_processing/model/__init__.py,sha256=FXaCdbPqxp0ogmZm9NplRirqpgMiYs1iRYgJbFbbX2Y,65
|
|
21
|
-
water_column_sonar_processing/model/zarr_manager.py,sha256=
|
|
21
|
+
water_column_sonar_processing/model/zarr_manager.py,sha256=wAO_8jcKIEonTNH12Fzzdirz0XLS7qgwOJGrmcVALR8,15678
|
|
22
22
|
water_column_sonar_processing/processing/__init__.py,sha256=tdpSfwnY6lbAS_yBTu4aG0SjPgCKqh6LAFvIj_t3j3U,168
|
|
23
23
|
water_column_sonar_processing/processing/batch_downloader.py,sha256=qXoruHdbgzAolmroK6eRn9bWgeHFgaVQLwhJ6X5oHRE,6299
|
|
24
24
|
water_column_sonar_processing/processing/raw_to_zarr.py,sha256=Sn0_zBT7yYP6abbSTlQBPA6iZSBxeVqPYYSgoroiBEU,17599
|
|
25
25
|
water_column_sonar_processing/utility/__init__.py,sha256=yDObMOL0_OxKWet5wffK2-XVJgoE9iwiY2q04GZrtBQ,234
|
|
26
26
|
water_column_sonar_processing/utility/cleaner.py,sha256=bNbs-hopWxtKAFBK0Eu18xdRErZCGZvtla3j-1bTwQw,619
|
|
27
|
-
water_column_sonar_processing/utility/constants.py,sha256=
|
|
27
|
+
water_column_sonar_processing/utility/constants.py,sha256=UtzFkvH5VE7eb8PzeKUDVt-nX6SOdlHtkul1zycF_Z0,2146
|
|
28
28
|
water_column_sonar_processing/utility/pipeline_status.py,sha256=O-0SySqdRGJ6bs3zQe1NV9vkOpmsRM7zj5QoHgzYioY,4395
|
|
29
29
|
water_column_sonar_processing/utility/timestamp.py,sha256=bO0oir7KxxoEHPGRkz9FCBfOligkocUyRiWRzAq8fnU,361
|
|
30
|
-
water_column_sonar_processing-25.1.
|
|
31
|
-
water_column_sonar_processing-25.1.
|
|
32
|
-
water_column_sonar_processing-25.1.
|
|
33
|
-
water_column_sonar_processing-25.1.
|
|
34
|
-
water_column_sonar_processing-25.1.
|
|
30
|
+
water_column_sonar_processing-25.1.4.dist-info/LICENSE,sha256=lz4IpJ5_adG3S0ali-WaIpQFVTnEAOucMDQPECUVEYw,1110
|
|
31
|
+
water_column_sonar_processing-25.1.4.dist-info/METADATA,sha256=vI9oh0A7AwlM3jQSKWNw3CdPR_yz4yHiynYuKbzNYH0,5959
|
|
32
|
+
water_column_sonar_processing-25.1.4.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
33
|
+
water_column_sonar_processing-25.1.4.dist-info/top_level.txt,sha256=aRYU4A7RNBlNrL4vzjytFAir3BNnmOgsvIGKKA36tg4,30
|
|
34
|
+
water_column_sonar_processing-25.1.4.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|