water-column-sonar-processing 0.0.1__py3-none-any.whl → 25.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of water-column-sonar-processing might be problematic. Click here for more details.
- water_column_sonar_processing/__init__.py +13 -0
- water_column_sonar_processing/aws/__init__.py +7 -0
- water_column_sonar_processing/aws/dynamodb_manager.py +355 -0
- water_column_sonar_processing/aws/s3_manager.py +420 -0
- water_column_sonar_processing/aws/s3fs_manager.py +72 -0
- {model → water_column_sonar_processing}/aws/sns_manager.py +10 -21
- {model → water_column_sonar_processing}/aws/sqs_manager.py +11 -19
- water_column_sonar_processing/cruise/__init__.py +4 -0
- water_column_sonar_processing/cruise/create_empty_zarr_store.py +191 -0
- water_column_sonar_processing/cruise/datatree_manager.py +21 -0
- water_column_sonar_processing/cruise/resample_regrid.py +339 -0
- water_column_sonar_processing/geometry/__init__.py +11 -0
- water_column_sonar_processing/geometry/elevation_manager.py +111 -0
- water_column_sonar_processing/geometry/geometry_manager.py +243 -0
- water_column_sonar_processing/geometry/line_simplification.py +176 -0
- water_column_sonar_processing/geometry/pmtile_generation.py +261 -0
- water_column_sonar_processing/index/__init__.py +3 -0
- water_column_sonar_processing/index/index_manager.py +384 -0
- water_column_sonar_processing/model/__init__.py +3 -0
- water_column_sonar_processing/model/zarr_manager.py +722 -0
- water_column_sonar_processing/process.py +149 -0
- water_column_sonar_processing/processing/__init__.py +4 -0
- water_column_sonar_processing/processing/raw_to_netcdf.py +320 -0
- water_column_sonar_processing/processing/raw_to_zarr.py +425 -0
- water_column_sonar_processing/utility/__init__.py +13 -0
- {model → water_column_sonar_processing}/utility/cleaner.py +7 -8
- water_column_sonar_processing/utility/constants.py +118 -0
- {model → water_column_sonar_processing}/utility/pipeline_status.py +47 -24
- water_column_sonar_processing/utility/timestamp.py +12 -0
- water_column_sonar_processing-25.11.1.dist-info/METADATA +182 -0
- water_column_sonar_processing-25.11.1.dist-info/RECORD +34 -0
- {water_column_sonar_processing-0.0.1.dist-info → water_column_sonar_processing-25.11.1.dist-info}/WHEEL +1 -1
- {water_column_sonar_processing-0.0.1.dist-info → water_column_sonar_processing-25.11.1.dist-info/licenses}/LICENSE +1 -1
- water_column_sonar_processing-25.11.1.dist-info/top_level.txt +1 -0
- __init__.py +0 -0
- model/__init__.py +0 -0
- model/aws/__init__.py +0 -0
- model/aws/dynamodb_manager.py +0 -149
- model/aws/s3_manager.py +0 -356
- model/aws/s3fs_manager.py +0 -74
- model/cruise/__init__.py +0 -0
- model/cruise/create_empty_zarr_store.py +0 -166
- model/cruise/resample_regrid.py +0 -248
- model/geospatial/__init__.py +0 -0
- model/geospatial/geometry_manager.py +0 -194
- model/geospatial/geometry_simplification.py +0 -81
- model/geospatial/pmtile_generation.py +0 -74
- model/index/__init__.py +0 -0
- model/index/index.py +0 -228
- model/model.py +0 -138
- model/utility/__init__.py +0 -0
- model/utility/constants.py +0 -56
- model/utility/timestamp.py +0 -12
- model/zarr/__init__.py +0 -0
- model/zarr/bar.py +0 -28
- model/zarr/foo.py +0 -11
- model/zarr/zarr_manager.py +0 -298
- water_column_sonar_processing-0.0.1.dist-info/METADATA +0 -89
- water_column_sonar_processing-0.0.1.dist-info/RECORD +0 -32
- water_column_sonar_processing-0.0.1.dist-info/top_level.txt +0 -2
|
@@ -16,13 +16,17 @@ class PipelineStatus(Flag):
|
|
|
16
16
|
PROCESSING_RAW_PROCESSOR = auto()
|
|
17
17
|
FAILURE_RAW_PROCESSOR = auto()
|
|
18
18
|
SUCCESS_RAW_PROCESSOR = auto()
|
|
19
|
-
RAW_PROCESSOR =
|
|
19
|
+
RAW_PROCESSOR = (
|
|
20
|
+
PROCESSING_RAW_PROCESSOR | FAILURE_RAW_PROCESSOR | SUCCESS_RAW_PROCESSOR
|
|
21
|
+
)
|
|
20
22
|
#
|
|
21
23
|
# RAW_AGGREGATOR --> AGGREGATOR (Scatter-Gather EIP)
|
|
22
24
|
PROCESSING_RAW_AGGREGATOR = auto()
|
|
23
25
|
FAILURE_RAW_AGGREGATOR = auto()
|
|
24
26
|
SUCCESS_RAW_AGGREGATOR = auto()
|
|
25
|
-
RAW_AGGREGATOR =
|
|
27
|
+
RAW_AGGREGATOR = (
|
|
28
|
+
PROCESSING_RAW_AGGREGATOR | FAILURE_RAW_AGGREGATOR | SUCCESS_RAW_AGGREGATOR
|
|
29
|
+
)
|
|
26
30
|
#
|
|
27
31
|
LEVEL_1_PROCESSING = RAW_SPLITTER | RAW_PROCESSOR | RAW_AGGREGATOR
|
|
28
32
|
#
|
|
@@ -35,27 +39,43 @@ class PipelineStatus(Flag):
|
|
|
35
39
|
PROCESSING_CRUISE_INITIALIZER = auto()
|
|
36
40
|
FAILURE_CRUISE_INITIALIZER = auto()
|
|
37
41
|
SUCCESS_CRUISE_INITIALIZER = auto()
|
|
38
|
-
CRUISE_INITIALIZER =
|
|
42
|
+
CRUISE_INITIALIZER = (
|
|
43
|
+
PROCESSING_CRUISE_INITIALIZER
|
|
44
|
+
| FAILURE_CRUISE_INITIALIZER
|
|
45
|
+
| SUCCESS_CRUISE_INITIALIZER
|
|
46
|
+
)
|
|
39
47
|
#
|
|
40
48
|
# CRUISE_SPLITTER --> SPLITTER
|
|
41
49
|
PROCESSING_CRUISE_SPLITTER = auto()
|
|
42
50
|
FAILURE_CRUISE_SPLITTER = auto()
|
|
43
51
|
SUCCESS_CRUISE_SPLITTER = auto()
|
|
44
|
-
CRUISE_SPLITTER =
|
|
52
|
+
CRUISE_SPLITTER = (
|
|
53
|
+
PROCESSING_CRUISE_SPLITTER | FAILURE_CRUISE_SPLITTER | SUCCESS_CRUISE_SPLITTER
|
|
54
|
+
)
|
|
45
55
|
#
|
|
46
56
|
# CRUISE_PROCESSOR --> PROCESSOR <-- Note: these need to run sequentially now
|
|
47
57
|
PROCESSING_CRUISE_PROCESSOR = auto()
|
|
48
58
|
FAILURE_CRUISE_PROCESSOR = auto()
|
|
49
59
|
SUCCESS_CRUISE_PROCESSOR = auto()
|
|
50
|
-
CRUISE_PROCESSOR =
|
|
60
|
+
CRUISE_PROCESSOR = (
|
|
61
|
+
PROCESSING_CRUISE_PROCESSOR
|
|
62
|
+
| FAILURE_CRUISE_PROCESSOR
|
|
63
|
+
| SUCCESS_CRUISE_PROCESSOR
|
|
64
|
+
)
|
|
51
65
|
#
|
|
52
66
|
# CRUISE_AGGREGATOR --> AGGREGATOR
|
|
53
67
|
PROCESSING_CRUISE_AGGREGATOR = auto()
|
|
54
68
|
FAILURE_CRUISE_AGGREGATOR = auto()
|
|
55
69
|
SUCCESS_CRUISE_AGGREGATOR = auto()
|
|
56
|
-
CRUISE_AGGREGATOR =
|
|
70
|
+
CRUISE_AGGREGATOR = (
|
|
71
|
+
PROCESSING_CRUISE_AGGREGATOR
|
|
72
|
+
| FAILURE_CRUISE_AGGREGATOR
|
|
73
|
+
| SUCCESS_CRUISE_AGGREGATOR
|
|
74
|
+
)
|
|
57
75
|
#
|
|
58
|
-
LEVEL_2_PROCESSING =
|
|
76
|
+
LEVEL_2_PROCESSING = (
|
|
77
|
+
CRUISE_INITIALIZER | CRUISE_SPLITTER | CRUISE_PROCESSOR | CRUISE_AGGREGATOR
|
|
78
|
+
)
|
|
59
79
|
#
|
|
60
80
|
# --------------------------------------------------- #
|
|
61
81
|
# --- Level 3 Data ---------------------------------- #
|
|
@@ -64,35 +84,38 @@ class PipelineStatus(Flag):
|
|
|
64
84
|
PROCESSING_TILE_PROCESSOR = auto()
|
|
65
85
|
FAILURE_TILE_PROCESSOR = auto()
|
|
66
86
|
SUCCESS_TILE_PROCESSOR = auto()
|
|
67
|
-
TILE_PROCESSOR =
|
|
87
|
+
TILE_PROCESSOR = (
|
|
88
|
+
PROCESSING_TILE_PROCESSOR | FAILURE_TILE_PROCESSOR | SUCCESS_TILE_PROCESSOR
|
|
89
|
+
)
|
|
68
90
|
#
|
|
69
91
|
# GEOHASH_PROCESSOR
|
|
70
92
|
PROCESSING_GEOHASH_PROCESSOR = auto()
|
|
71
93
|
FAILURE_GEOHASH_PROCESSOR = auto()
|
|
72
94
|
SUCCESS_GEOHASH_PROCESSOR = auto()
|
|
73
|
-
GEOHASH_PROCESSOR =
|
|
95
|
+
GEOHASH_PROCESSOR = (
|
|
96
|
+
PROCESSING_GEOHASH_PROCESSOR
|
|
97
|
+
| FAILURE_GEOHASH_PROCESSOR
|
|
98
|
+
| SUCCESS_GEOHASH_PROCESSOR
|
|
99
|
+
)
|
|
74
100
|
#
|
|
75
101
|
LEVEL_3_PROCESSING = TILE_PROCESSOR | GEOHASH_PROCESSOR
|
|
76
102
|
# --------------------------------------------------- #
|
|
77
103
|
# --------------------------------------------------- #
|
|
78
104
|
|
|
105
|
+
|
|
79
106
|
# Status.PROCESSING_RAW_AGGREGATOR in Status.LEVEL_1_PROCESSING
|
|
80
107
|
# Status.LEVEL_1_PROCESSING.value < Status.LEVEL_2_PROCESSING.value
|
|
81
108
|
|
|
82
109
|
# https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudformation-stack.html
|
|
83
|
-
"""
|
|
84
|
-
CREATE_IN_PROGRESS | CREATE_FAILED | CREATE_COMPLETE |
|
|
85
|
-
ROLLBACK_IN_PROGRESS | ROLLBACK_FAILED | ROLLBACK_COMPLETE |
|
|
86
|
-
DELETE_IN_PROGRESS | DELETE_FAILED | DELETE_COMPLETE |
|
|
87
|
-
UPDATE_IN_PROGRESS | UPDATE_COMPLETE_CLEANUP_IN_PROGRESS | UPDATE_COMPLETE |
|
|
88
|
-
UPDATE_FAILED | UPDATE_ROLLBACK_IN_PROGRESS | UPDATE_ROLLBACK_FAILED |
|
|
89
|
-
UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS | UPDATE_ROLLBACK_COMPLETE |
|
|
90
|
-
REVIEW_IN_PROGRESS | IMPORT_IN_PROGRESS | IMPORT_COMPLETE |
|
|
91
|
-
IMPORT_ROLLBACK_IN_PROGRESS | IMPORT_ROLLBACK_FAILED | IMPORT_ROLLBACK_COMPLETE
|
|
92
|
-
|
|
93
|
-
failure - noun -
|
|
94
|
-
failed - verb - "verbs should be avoided"
|
|
95
|
-
|
|
96
|
-
success - noun
|
|
97
110
|
|
|
98
|
-
|
|
111
|
+
# CREATE_IN_PROGRESS | CREATE_FAILED | CREATE_COMPLETE |
|
|
112
|
+
# ROLLBACK_IN_PROGRESS | ROLLBACK_FAILED | ROLLBACK_COMPLETE |
|
|
113
|
+
# DELETE_IN_PROGRESS | DELETE_FAILED | DELETE_COMPLETE |
|
|
114
|
+
# UPDATE_IN_PROGRESS | UPDATE_COMPLETE_CLEANUP_IN_PROGRESS | UPDATE_COMPLETE |
|
|
115
|
+
# UPDATE_FAILED | UPDATE_ROLLBACK_IN_PROGRESS | UPDATE_ROLLBACK_FAILED |
|
|
116
|
+
# UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS | UPDATE_ROLLBACK_COMPLETE |
|
|
117
|
+
# REVIEW_IN_PROGRESS | IMPORT_IN_PROGRESS | IMPORT_COMPLETE |
|
|
118
|
+
# IMPORT_ROLLBACK_IN_PROGRESS | IMPORT_ROLLBACK_FAILED | IMPORT_ROLLBACK_COMPLETE
|
|
119
|
+
# failure - noun -
|
|
120
|
+
# failed - verb - "verbs should be avoided"
|
|
121
|
+
# success - noun
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
###########################################################
|
|
5
|
+
class Timestamp:
|
|
6
|
+
@staticmethod
|
|
7
|
+
def get_timestamp():
|
|
8
|
+
# return timestamp in form: '2024-03-29T19:36:52.433Z'
|
|
9
|
+
return f"{datetime.datetime.now(datetime.UTC).isoformat()[:23]}Z"
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
###########################################################
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: water-column-sonar-processing
|
|
3
|
+
Version: 25.11.1
|
|
4
|
+
Summary: Processing tool for water column sonar data.
|
|
5
|
+
Author-email: Rudy Klucik <rudy.klucik@noaa.gov>
|
|
6
|
+
Maintainer-email: Rudy Klucik <rudy.klucik@noaa.gov>
|
|
7
|
+
License-Expression: MIT
|
|
8
|
+
Project-URL: Homepage, https://echo.fish
|
|
9
|
+
Project-URL: Repository, https://github.com/CI-CMG/water-column-sonar-processing
|
|
10
|
+
Project-URL: Issues, https://github.com/CI-CMG/water-column-sonar-processing/issues
|
|
11
|
+
Keywords: ocean,sonar,water column,zarr
|
|
12
|
+
Requires-Python: >=3.12
|
|
13
|
+
Description-Content-Type: text/markdown
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Requires-Dist: bandit==1.8.6
|
|
16
|
+
Requires-Dist: black>=25.11.0
|
|
17
|
+
Requires-Dist: boto3==1.40.49
|
|
18
|
+
Requires-Dist: boto3-stubs==1.40.49
|
|
19
|
+
Requires-Dist: botocore==1.40.49
|
|
20
|
+
Requires-Dist: build>=1.3.0
|
|
21
|
+
Requires-Dist: echopype==0.11.0
|
|
22
|
+
Requires-Dist: fiona==1.10.1
|
|
23
|
+
Requires-Dist: geopandas==1.1.1
|
|
24
|
+
Requires-Dist: importlib
|
|
25
|
+
Requires-Dist: mock==5.2.0
|
|
26
|
+
Requires-Dist: moto[all]==5.1.16
|
|
27
|
+
Requires-Dist: moto[server]==5.1.16
|
|
28
|
+
Requires-Dist: multidict==6.7.0
|
|
29
|
+
Requires-Dist: netcdf4==1.7.3
|
|
30
|
+
Requires-Dist: numpy==2.3.4
|
|
31
|
+
Requires-Dist: pandas==2.3.3
|
|
32
|
+
Requires-Dist: pandas-stubs==2.3.2.250926
|
|
33
|
+
Requires-Dist: pooch==1.8.2
|
|
34
|
+
Requires-Dist: pyarrow==22.0.0
|
|
35
|
+
Requires-Dist: pykalman==0.10.2
|
|
36
|
+
Requires-Dist: python-dotenv==1.2.1
|
|
37
|
+
Requires-Dist: requests==2.32.5
|
|
38
|
+
Requires-Dist: s3fs==2025.10.0
|
|
39
|
+
Requires-Dist: scipy==1.16.3
|
|
40
|
+
Requires-Dist: setuptools==80.9.0
|
|
41
|
+
Requires-Dist: shapely==2.1.2
|
|
42
|
+
Requires-Dist: xarray==2025.10.1
|
|
43
|
+
Requires-Dist: zarr==3.1.3
|
|
44
|
+
Dynamic: license-file
|
|
45
|
+
|
|
46
|
+
# Water Column Sonar Processing
|
|
47
|
+
|
|
48
|
+
Processing tool for converting Level_0 water column sonar data to Level_1 and Level_2 derived data sets as well as
|
|
49
|
+
generating geospatial information.
|
|
50
|
+
|
|
51
|
+

|
|
52
|
+
  
|
|
53
|
+
|
|
54
|
+
# Setting up the Python Environment
|
|
55
|
+
|
|
56
|
+
> Python 3.12.12
|
|
57
|
+
|
|
58
|
+
# Installing Dependencies
|
|
59
|
+
|
|
60
|
+
```
|
|
61
|
+
source .venv/bin/activate
|
|
62
|
+
# or ".venv\Scripts\activate" in windows
|
|
63
|
+
|
|
64
|
+
uv pip install --upgrade pip
|
|
65
|
+
|
|
66
|
+
uv sync --all-groups
|
|
67
|
+
|
|
68
|
+
uv run pre-commit install
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
# Pytest
|
|
72
|
+
|
|
73
|
+
```
|
|
74
|
+
uv run pytest --cache-clear tests # -W ignore::DeprecationWarning
|
|
75
|
+
```
|
|
76
|
+
|
|
77
|
+
or
|
|
78
|
+
> pytest --cache-clear --cov=src tests/ --cov-report=xml
|
|
79
|
+
|
|
80
|
+
# Instructions
|
|
81
|
+
|
|
82
|
+
Following this tutorial:
|
|
83
|
+
https://packaging.python.org/en/latest/tutorials/packaging-projects/
|
|
84
|
+
|
|
85
|
+
# Pre Commit Hook
|
|
86
|
+
|
|
87
|
+
see here for installation: https://pre-commit.com/
|
|
88
|
+
https://dev.to/rafaelherik/using-trufflehog-and-pre-commit-hook-to-prevent-secret-exposure-edo
|
|
89
|
+
|
|
90
|
+
```
|
|
91
|
+
uv run pre-commit install --allow-missing-config
|
|
92
|
+
# or
|
|
93
|
+
uv run pre-commit install
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
# Black
|
|
97
|
+
|
|
98
|
+
https://ljvmiranda921.github.io/notebook/2018/06/21/precommits-using-black-and-flake8/
|
|
99
|
+
|
|
100
|
+
```
|
|
101
|
+
Settings > Black
|
|
102
|
+
Execution mode: Package
|
|
103
|
+
Python Interpreter: .../.venv/bin/python
|
|
104
|
+
Use Black Formatter: X On Code reformat, X On Save
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
# Linting
|
|
108
|
+
|
|
109
|
+
Ruff
|
|
110
|
+
https://plugins.jetbrains.com/plugin/20574-ruff
|
|
111
|
+
|
|
112
|
+
# Colab Test
|
|
113
|
+
|
|
114
|
+
https://colab.research.google.com/drive/1KiLMueXiz9WVB9o4RuzYeGjNZ6PsZU7a#scrollTo=AayVyvpBdfIZ
|
|
115
|
+
|
|
116
|
+
# Test Coverage
|
|
117
|
+
|
|
118
|
+
TODO
|
|
119
|
+
|
|
120
|
+
# Tag a Release
|
|
121
|
+
|
|
122
|
+
Step 1 --> increment the semantic version in the zarr_manager.py "metadata" & the "pyproject.toml"
|
|
123
|
+
|
|
124
|
+
```commandline
|
|
125
|
+
git tag -a v25.11.1 -m "Releasing v25.11.1"
|
|
126
|
+
git push origin --tags
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
# To Publish To PROD
|
|
130
|
+
|
|
131
|
+
```
|
|
132
|
+
uv build --no-sources
|
|
133
|
+
uv publish
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
# TODO:
|
|
137
|
+
|
|
138
|
+
add https://pypi.org/project/setuptools-scm/
|
|
139
|
+
for extracting the version
|
|
140
|
+
|
|
141
|
+
# Security scanning
|
|
142
|
+
|
|
143
|
+
> bandit -r water_column_sonar_processing/
|
|
144
|
+
|
|
145
|
+
# Data Debugging
|
|
146
|
+
|
|
147
|
+
Experimental Plotting in Xarray (hvPlot):
|
|
148
|
+
https://colab.research.google.com/drive/18vrI9LAip4xRGEX6EvnuVFp35RAiVYwU#scrollTo=q9_j9p2yXsLV
|
|
149
|
+
|
|
150
|
+
HB0707 Zoomable Cruise:
|
|
151
|
+
https://hb0707.s3.us-east-1.amazonaws.com/index.html
|
|
152
|
+
|
|
153
|
+
# UV Debugging
|
|
154
|
+
|
|
155
|
+
```
|
|
156
|
+
uv pip install --upgrade pip
|
|
157
|
+
uv sync --all-groups
|
|
158
|
+
uv run pre-commit install
|
|
159
|
+
uv lock --check
|
|
160
|
+
uv lock
|
|
161
|
+
uv sync --all-groups
|
|
162
|
+
uv run pytest --cache-clear tests
|
|
163
|
+
```
|
|
164
|
+
|
|
165
|
+
# Fixing S3FS Problems
|
|
166
|
+
|
|
167
|
+
```commandline
|
|
168
|
+
To enable/disa asyncio for the debugger, follow the steps:
|
|
169
|
+
Open PyCharm
|
|
170
|
+
Use Shift + Shift (Search Everywhere)
|
|
171
|
+
In the popup type: Registry and press Enter
|
|
172
|
+
Find "Registry" in the list of results and click on it.
|
|
173
|
+
In the new popup find python.debug.asyncio.repl line and check the respective checkbox
|
|
174
|
+
Press Close.
|
|
175
|
+
Restart the IDE.
|
|
176
|
+
The asyncio support will be enabled in the debugger.
|
|
177
|
+
```
|
|
178
|
+
|
|
179
|
+
# Fixing windows/wsl/ubuntu/mac git compatability
|
|
180
|
+
|
|
181
|
+
> git config --global core.filemode false
|
|
182
|
+
> git config --global core.autocrlf true
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
water_column_sonar_processing/__init__.py,sha256=fvRK4uFo_A0l7w_T4yckvDqJ3wMUq4JB3VVPXqWfewE,226
|
|
2
|
+
water_column_sonar_processing/process.py,sha256=eatdHHc7_mPapTjxsOyD24OlEWeqj56MmVTMPjxF2QM,5290
|
|
3
|
+
water_column_sonar_processing/aws/__init__.py,sha256=KJqK8oYMn-u8n8i-Jp_lG5BvCOTjwWSjWP8yAyDlWVo,297
|
|
4
|
+
water_column_sonar_processing/aws/dynamodb_manager.py,sha256=EwSc4UTVQTR6tXsrx2eNdbhvexxfrz3GkkvO97F_9fM,14017
|
|
5
|
+
water_column_sonar_processing/aws/s3_manager.py,sha256=ZuShUfg4hx7XUWRiSywHJ3m0Szl699Bd8AfPualbuhs,16299
|
|
6
|
+
water_column_sonar_processing/aws/s3fs_manager.py,sha256=vl_NbekHoAMv81wtx3QaNOFmv0erxAtOyD5t_RLUqCA,2546
|
|
7
|
+
water_column_sonar_processing/aws/sns_manager.py,sha256=Dp9avG5VSugSWPR1dZ-askuAw1fCZkNUHbOUP65iR-k,1867
|
|
8
|
+
water_column_sonar_processing/aws/sqs_manager.py,sha256=j_DGOKble3i-DlHT_uGxCFEmHVbYhFrpbhXdJKLtBSo,1600
|
|
9
|
+
water_column_sonar_processing/cruise/__init__.py,sha256=H5hW0JMORuaFvQk_R31B4VL8RnRyKeanOOiWmqEMZJk,156
|
|
10
|
+
water_column_sonar_processing/cruise/create_empty_zarr_store.py,sha256=IRPsEEAZ5C1XKlQYxNuYbPd7yrvN2aO_RqGBkRLKo90,7987
|
|
11
|
+
water_column_sonar_processing/cruise/datatree_manager.py,sha256=ed_lY04MyaXKADl7EaqHyEZ8JQMiyIAXVhvtE7YnWcg,662
|
|
12
|
+
water_column_sonar_processing/cruise/resample_regrid.py,sha256=wsO1_RwBlxRJ2QeAmvSBNN6Q5sOtS6WI8ADuEAd6rQU,15197
|
|
13
|
+
water_column_sonar_processing/geometry/__init__.py,sha256=KNhgfKimxuMtyZwMQlIc_OTtEkYXNXl1WyvfNJThlWY,306
|
|
14
|
+
water_column_sonar_processing/geometry/elevation_manager.py,sha256=Wi7xl6LFsxqUPGbrquqtPEKnOMpl3dMLJtEb7DR5o1c,4249
|
|
15
|
+
water_column_sonar_processing/geometry/geometry_manager.py,sha256=dmPdFjeGXfh-bhdRSNVr1pBjnSlcjFUGwXIwXmq4Fe0,10986
|
|
16
|
+
water_column_sonar_processing/geometry/line_simplification.py,sha256=adnFKOYby6Z37Io-6UZR-9QPxhWGC4IU1EtNpphQQ-w,9294
|
|
17
|
+
water_column_sonar_processing/geometry/pmtile_generation.py,sha256=kZPG5oskCqsVyCr97c9nlhgNBkLtu_-gOlB2GKvtQhA,9462
|
|
18
|
+
water_column_sonar_processing/index/__init__.py,sha256=izEObsKiOoIJ0kZCFhvaYsBd6Ga71XJxnogjrNInw68,68
|
|
19
|
+
water_column_sonar_processing/index/index_manager.py,sha256=lW4FAczAih3e1nwlYhwL_-idLx2cLYRgTEwR2KgMQ2Q,16124
|
|
20
|
+
water_column_sonar_processing/model/__init__.py,sha256=FXaCdbPqxp0ogmZm9NplRirqpgMiYs1iRYgJbFbbX2Y,65
|
|
21
|
+
water_column_sonar_processing/model/zarr_manager.py,sha256=PNgvxEJ3efidqNqTX2TLaX6YVESKOHnJz7mLpzShruQ,32971
|
|
22
|
+
water_column_sonar_processing/processing/__init__.py,sha256=TDnA4_Nyb8MnU8HaixaCzEdTbFkXsUXA0ekvxtYm1-U,150
|
|
23
|
+
water_column_sonar_processing/processing/raw_to_netcdf.py,sha256=3UdglrWXFLxa_jHkhvC9P9NUkp-iKmp6MvnXFnXi4ns,13098
|
|
24
|
+
water_column_sonar_processing/processing/raw_to_zarr.py,sha256=EfB_JWiatlAQKc17jDR1GD6k7txlKpQPFI4HknlFuoA,18826
|
|
25
|
+
water_column_sonar_processing/utility/__init__.py,sha256=5gV8sJcnQGrV2AecwkBFZljOC-fQwLuDX70DxX59x2o,289
|
|
26
|
+
water_column_sonar_processing/utility/cleaner.py,sha256=q9xx4NeVhaxQgGCCYyZv29PxvMHfD9txbnMahOccF8U,607
|
|
27
|
+
water_column_sonar_processing/utility/constants.py,sha256=R0dF68O4I8mlOyZbI1nJuU8_cDMCcHC9fu10rMPtTpA,3390
|
|
28
|
+
water_column_sonar_processing/utility/pipeline_status.py,sha256=xbl6-4ePq1Krfo18Mgr0jzWz9C_g2Kbey_QZNJuwdkI,4406
|
|
29
|
+
water_column_sonar_processing/utility/timestamp.py,sha256=rn8SDbGYjRvanDUOrvKpNjp3-AK6-KlU2NaCk6Ok8rc,337
|
|
30
|
+
water_column_sonar_processing-25.11.1.dist-info/licenses/LICENSE,sha256=TosqaZpJgYvhgXIyYBti-ggJaO8rxRg3FtThY08s9Aw,1110
|
|
31
|
+
water_column_sonar_processing-25.11.1.dist-info/METADATA,sha256=ytZEScINaRFxfYJibro9GHeNfCtgUm8xiwe9nCe1ico,4718
|
|
32
|
+
water_column_sonar_processing-25.11.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
33
|
+
water_column_sonar_processing-25.11.1.dist-info/top_level.txt,sha256=aRYU4A7RNBlNrL4vzjytFAir3BNnmOgsvIGKKA36tg4,30
|
|
34
|
+
water_column_sonar_processing-25.11.1.dist-info/RECORD,,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
MIT License
|
|
2
2
|
|
|
3
|
-
Copyright (c)
|
|
3
|
+
Copyright (c) 2025 Cooperative Institutes, Coastal and Marine Geophysics
|
|
4
4
|
|
|
5
5
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
6
|
of this software and associated documentation files (the "Software"), to deal
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
water_column_sonar_processing
|
__init__.py
DELETED
|
File without changes
|
model/__init__.py
DELETED
|
File without changes
|
model/aws/__init__.py
DELETED
|
File without changes
|
model/aws/dynamodb_manager.py
DELETED
|
@@ -1,149 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import boto3
|
|
3
|
-
import pandas as pd
|
|
4
|
-
from boto3.dynamodb.types import TypeSerializer, TypeDeserializer
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
#########################################################################
|
|
8
|
-
class DynamoDBManager:
|
|
9
|
-
#####################################################################
|
|
10
|
-
def __init__(self):
|
|
11
|
-
self.__dynamodb_session = boto3.Session(
|
|
12
|
-
aws_access_key_id=os.environ.get('ACCESS_KEY_ID'),
|
|
13
|
-
aws_secret_access_key=os.environ.get('SECRET_ACCESS_KEY'),
|
|
14
|
-
region_name=os.environ.get("AWS_REGION", default="us-east-1")
|
|
15
|
-
)
|
|
16
|
-
self.__dynamodb_resource = self.__dynamodb_session.resource(
|
|
17
|
-
service_name="dynamodb",
|
|
18
|
-
)
|
|
19
|
-
self.__dynamodb_client = self.__dynamodb_session.client(
|
|
20
|
-
service_name="dynamodb",
|
|
21
|
-
)
|
|
22
|
-
self.type_serializer = TypeSerializer() # https://stackoverflow.com/a/46738251
|
|
23
|
-
self.type_deserializer = TypeDeserializer()
|
|
24
|
-
|
|
25
|
-
#####################################################################
|
|
26
|
-
### defined in raw-to-zarr, not used
|
|
27
|
-
# def put_item(
|
|
28
|
-
# self,
|
|
29
|
-
# table_name,
|
|
30
|
-
# item
|
|
31
|
-
# ):
|
|
32
|
-
# response = boto3.Session().client(service_name='dynamodb').put_item(TableName=table_name, Item=item)
|
|
33
|
-
# status_code = response['ResponseMetadata']['HTTPStatusCode']
|
|
34
|
-
# assert (status_code == 200), "Problem, unable to update dynamodb table."
|
|
35
|
-
|
|
36
|
-
#####################################################################
|
|
37
|
-
def create_table(
|
|
38
|
-
self,
|
|
39
|
-
table_name,
|
|
40
|
-
key_schema,
|
|
41
|
-
attribute_definitions,
|
|
42
|
-
):
|
|
43
|
-
self.__dynamodb_client.create_table(
|
|
44
|
-
AttributeDefinitions=attribute_definitions,
|
|
45
|
-
TableName=table_name,
|
|
46
|
-
KeySchema=key_schema,
|
|
47
|
-
BillingMode="PAY_PER_REQUEST", # "PROVISIONED",
|
|
48
|
-
# ProvisionedThroughput={
|
|
49
|
-
# 'ReadCapacityUnits': 1_000,
|
|
50
|
-
# 'WriteCapacityUnits': 1_000
|
|
51
|
-
# }
|
|
52
|
-
)
|
|
53
|
-
|
|
54
|
-
#####################################################################
|
|
55
|
-
def get_item(
|
|
56
|
-
self,
|
|
57
|
-
table_name,
|
|
58
|
-
key
|
|
59
|
-
):
|
|
60
|
-
response = self.__dynamodb_client.get_item(TableName=table_name, Key=key)
|
|
61
|
-
item = None
|
|
62
|
-
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
|
|
63
|
-
if 'Item' in response:
|
|
64
|
-
item = response['Item']
|
|
65
|
-
return item
|
|
66
|
-
|
|
67
|
-
#####################################################################
|
|
68
|
-
def update_item(
|
|
69
|
-
self,
|
|
70
|
-
table_name,
|
|
71
|
-
key,
|
|
72
|
-
expression_attribute_names,
|
|
73
|
-
expression_attribute_values,
|
|
74
|
-
update_expression
|
|
75
|
-
):
|
|
76
|
-
response = self.__dynamodb_client.update_item(
|
|
77
|
-
TableName=table_name,
|
|
78
|
-
Key=key,
|
|
79
|
-
ExpressionAttributeNames=expression_attribute_names,
|
|
80
|
-
ExpressionAttributeValues=expression_attribute_values,
|
|
81
|
-
UpdateExpression=update_expression
|
|
82
|
-
)
|
|
83
|
-
status_code = response['ResponseMetadata']['HTTPStatusCode']
|
|
84
|
-
# TODO: change to exception
|
|
85
|
-
assert (status_code == 200), "Problem, unable to update dynamodb table."
|
|
86
|
-
|
|
87
|
-
#####################################################################
|
|
88
|
-
def get_table_as_df(
|
|
89
|
-
self,
|
|
90
|
-
ship_name,
|
|
91
|
-
cruise_name,
|
|
92
|
-
sensor_name,
|
|
93
|
-
table_name,
|
|
94
|
-
):
|
|
95
|
-
expression_attribute_values = {
|
|
96
|
-
':cr': {'S': cruise_name},
|
|
97
|
-
':se': {'S': sensor_name},
|
|
98
|
-
':sh': {'S': ship_name},
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
filter_expression = 'CRUISE_NAME = :cr and SENSOR_NAME = :se and SHIP_NAME = :sh'
|
|
102
|
-
response = self.__dynamodb_client.scan(
|
|
103
|
-
TableName=table_name,
|
|
104
|
-
Select='ALL_ATTRIBUTES',
|
|
105
|
-
ExpressionAttributeValues=expression_attribute_values,
|
|
106
|
-
FilterExpression=filter_expression,
|
|
107
|
-
)
|
|
108
|
-
# Note: table.scan() has 1 MB limit on results so pagination is used
|
|
109
|
-
data = response['Items']
|
|
110
|
-
|
|
111
|
-
while 'LastEvaluatedKey' in response:
|
|
112
|
-
response = self.__dynamodb_client.scan(
|
|
113
|
-
TableName=table_name,
|
|
114
|
-
Select='ALL_ATTRIBUTES',
|
|
115
|
-
ExpressionAttributeValues=expression_attribute_values,
|
|
116
|
-
FilterExpression=filter_expression,
|
|
117
|
-
ExclusiveStartKey=response['LastEvaluatedKey']
|
|
118
|
-
)
|
|
119
|
-
data.extend(response['Items'])
|
|
120
|
-
|
|
121
|
-
deserializer = self.type_deserializer
|
|
122
|
-
df = pd.DataFrame([deserializer.deserialize({"M": i}) for i in data])
|
|
123
|
-
|
|
124
|
-
return df.sort_values(by='START_TIME', ignore_index=True)
|
|
125
|
-
|
|
126
|
-
#####################################################################
|
|
127
|
-
# is this used?
|
|
128
|
-
def get_table_item(
|
|
129
|
-
self,
|
|
130
|
-
table_name,
|
|
131
|
-
key,
|
|
132
|
-
):
|
|
133
|
-
# a bit more high level, uses resource to get table item
|
|
134
|
-
table = self.__dynamodb_resource.Table(table_name)
|
|
135
|
-
response = table.get_item(
|
|
136
|
-
Key=key
|
|
137
|
-
)
|
|
138
|
-
return response
|
|
139
|
-
|
|
140
|
-
#####################################################################
|
|
141
|
-
# TODO: add helper method to delete the data
|
|
142
|
-
def delete_cruise(
|
|
143
|
-
self,
|
|
144
|
-
table_name,
|
|
145
|
-
cruise_name,
|
|
146
|
-
):
|
|
147
|
-
pass
|
|
148
|
-
|
|
149
|
-
#########################################################################
|