water-column-sonar-processing 0.0.1__py3-none-any.whl → 25.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of water-column-sonar-processing might be problematic. Click here for more details.
- water_column_sonar_processing/__init__.py +13 -0
- water_column_sonar_processing/aws/__init__.py +7 -0
- water_column_sonar_processing/aws/dynamodb_manager.py +355 -0
- water_column_sonar_processing/aws/s3_manager.py +420 -0
- water_column_sonar_processing/aws/s3fs_manager.py +72 -0
- {model → water_column_sonar_processing}/aws/sns_manager.py +10 -21
- {model → water_column_sonar_processing}/aws/sqs_manager.py +11 -19
- water_column_sonar_processing/cruise/__init__.py +4 -0
- water_column_sonar_processing/cruise/create_empty_zarr_store.py +191 -0
- water_column_sonar_processing/cruise/datatree_manager.py +21 -0
- water_column_sonar_processing/cruise/resample_regrid.py +339 -0
- water_column_sonar_processing/geometry/__init__.py +11 -0
- water_column_sonar_processing/geometry/elevation_manager.py +111 -0
- water_column_sonar_processing/geometry/geometry_manager.py +243 -0
- water_column_sonar_processing/geometry/line_simplification.py +176 -0
- water_column_sonar_processing/geometry/pmtile_generation.py +261 -0
- water_column_sonar_processing/index/__init__.py +3 -0
- water_column_sonar_processing/index/index_manager.py +384 -0
- water_column_sonar_processing/model/__init__.py +3 -0
- water_column_sonar_processing/model/zarr_manager.py +722 -0
- water_column_sonar_processing/process.py +149 -0
- water_column_sonar_processing/processing/__init__.py +4 -0
- water_column_sonar_processing/processing/raw_to_netcdf.py +320 -0
- water_column_sonar_processing/processing/raw_to_zarr.py +425 -0
- water_column_sonar_processing/utility/__init__.py +13 -0
- {model → water_column_sonar_processing}/utility/cleaner.py +7 -8
- water_column_sonar_processing/utility/constants.py +118 -0
- {model → water_column_sonar_processing}/utility/pipeline_status.py +47 -24
- water_column_sonar_processing/utility/timestamp.py +12 -0
- water_column_sonar_processing-25.11.1.dist-info/METADATA +182 -0
- water_column_sonar_processing-25.11.1.dist-info/RECORD +34 -0
- {water_column_sonar_processing-0.0.1.dist-info → water_column_sonar_processing-25.11.1.dist-info}/WHEEL +1 -1
- {water_column_sonar_processing-0.0.1.dist-info → water_column_sonar_processing-25.11.1.dist-info/licenses}/LICENSE +1 -1
- water_column_sonar_processing-25.11.1.dist-info/top_level.txt +1 -0
- __init__.py +0 -0
- model/__init__.py +0 -0
- model/aws/__init__.py +0 -0
- model/aws/dynamodb_manager.py +0 -149
- model/aws/s3_manager.py +0 -356
- model/aws/s3fs_manager.py +0 -74
- model/cruise/__init__.py +0 -0
- model/cruise/create_empty_zarr_store.py +0 -166
- model/cruise/resample_regrid.py +0 -248
- model/geospatial/__init__.py +0 -0
- model/geospatial/geometry_manager.py +0 -194
- model/geospatial/geometry_simplification.py +0 -81
- model/geospatial/pmtile_generation.py +0 -74
- model/index/__init__.py +0 -0
- model/index/index.py +0 -228
- model/model.py +0 -138
- model/utility/__init__.py +0 -0
- model/utility/constants.py +0 -56
- model/utility/timestamp.py +0 -12
- model/zarr/__init__.py +0 -0
- model/zarr/bar.py +0 -28
- model/zarr/foo.py +0 -11
- model/zarr/zarr_manager.py +0 -298
- water_column_sonar_processing-0.0.1.dist-info/METADATA +0 -89
- water_column_sonar_processing-0.0.1.dist-info/RECORD +0 -32
- water_column_sonar_processing-0.0.1.dist-info/top_level.txt +0 -2
model/zarr/zarr_manager.py
DELETED
|
@@ -1,298 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import zarr
|
|
3
|
-
import numcodecs
|
|
4
|
-
import numpy as np
|
|
5
|
-
import xarray as xr
|
|
6
|
-
from numcodecs import Blosc
|
|
7
|
-
from ..aws.s3fs_manager import S3FSManager
|
|
8
|
-
from ..utility.constants import Constants, Coordinates
|
|
9
|
-
from ..utility.timestamp import Timestamp
|
|
10
|
-
|
|
11
|
-
numcodecs.blosc.use_threads = False
|
|
12
|
-
numcodecs.blosc.set_nthreads(1)
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
# TODO: when ready switch to version 3 of zarr spec
|
|
16
|
-
# ZARR_V3_EXPERIMENTAL_API = 1
|
|
17
|
-
|
|
18
|
-
# creates the latlon data: foo = ep.consolidate.add_location(ds_Sv, echodata)
|
|
19
|
-
|
|
20
|
-
class ZarrManager:
|
|
21
|
-
#######################################################
|
|
22
|
-
def __init__(
|
|
23
|
-
self,
|
|
24
|
-
):
|
|
25
|
-
# TODO: revert to Blosc.BITSHUFFLE, troubleshooting misc error
|
|
26
|
-
self.__compressor = Blosc(cname="zstd", clevel=2) # shuffle=Blosc.NOSHUFFLE
|
|
27
|
-
self.__overwrite = True
|
|
28
|
-
self.__num_threads = numcodecs.blosc.get_nthreads()
|
|
29
|
-
self.input_bucket_name = os.environ.get("INPUT_BUCKET_NAME")
|
|
30
|
-
self.output_bucket_name = os.environ.get("OUTPUT_BUCKET_NAME")
|
|
31
|
-
|
|
32
|
-
#######################################################
|
|
33
|
-
@staticmethod
|
|
34
|
-
def get_depth_values(
|
|
35
|
-
min_echo_range: float = 1., # minimum depth measured (zero non-inclusive) from whole cruise
|
|
36
|
-
max_echo_range: float = 100., # maximum depth measured from whole cruise
|
|
37
|
-
):
|
|
38
|
-
# Gets the set of depth values that will be used when resampling and
|
|
39
|
-
# regridding the data to a cruise level zarr store.
|
|
40
|
-
# Note: returned values do not start at zero.
|
|
41
|
-
print('Getting depth values.')
|
|
42
|
-
all_cruise_depth_values = np.linspace(
|
|
43
|
-
start=min_echo_range,
|
|
44
|
-
stop=max_echo_range,
|
|
45
|
-
num=int(max_echo_range / min_echo_range) + 1,
|
|
46
|
-
endpoint=True
|
|
47
|
-
)
|
|
48
|
-
|
|
49
|
-
print("Done getting depth values.")
|
|
50
|
-
return all_cruise_depth_values.round(decimals=2)
|
|
51
|
-
|
|
52
|
-
#######################################################
|
|
53
|
-
def create_zarr_store(
|
|
54
|
-
self,
|
|
55
|
-
path: str,
|
|
56
|
-
ship_name: str,
|
|
57
|
-
cruise_name: str,
|
|
58
|
-
sensor_name: str,
|
|
59
|
-
frequencies: list, # units in Hz
|
|
60
|
-
width: int, # TODO: needs better name... "ping_time"
|
|
61
|
-
min_echo_range: float, # smallest resolution in meters
|
|
62
|
-
max_echo_range: float,
|
|
63
|
-
calibration_status: bool = False # Assume uncalibrated
|
|
64
|
-
) -> str:
|
|
65
|
-
print(f'Creating local zarr store at {cruise_name}.zarr for ship {ship_name}')
|
|
66
|
-
|
|
67
|
-
# There should be no repeated frequencies
|
|
68
|
-
assert len(frequencies) == len(set(frequencies))
|
|
69
|
-
# TODO: eventually switch coordinate to "channel"
|
|
70
|
-
|
|
71
|
-
print(f"Debugging number of threads: {self.__num_threads}")
|
|
72
|
-
|
|
73
|
-
zarr_path = f"{path}/{cruise_name}.zarr"
|
|
74
|
-
store = zarr.DirectoryStore(path=zarr_path, normalize_keys=False)
|
|
75
|
-
root = zarr.group(store=store, overwrite=self.__overwrite, cache_attrs=True)
|
|
76
|
-
|
|
77
|
-
#####################################################################
|
|
78
|
-
# --- Coordinate: Time --- #
|
|
79
|
-
# https://zarr.readthedocs.io/en/stable/spec/v2.html#data-type-encoding
|
|
80
|
-
root.create_dataset(
|
|
81
|
-
name=Coordinates.TIME.value,
|
|
82
|
-
data=np.repeat(0., width),
|
|
83
|
-
shape=width,
|
|
84
|
-
chunks=(Constants.TILE_SIZE.value, ), # TODO: the chunking scheme doesn't seem to be working here
|
|
85
|
-
dtype=np.dtype(Coordinates.TIME_DTYPE.value),
|
|
86
|
-
compressor=self.__compressor,
|
|
87
|
-
# fill_value=0.,
|
|
88
|
-
fill_value=np.nan, # TODO: do i want nan's?
|
|
89
|
-
overwrite=self.__overwrite
|
|
90
|
-
)
|
|
91
|
-
|
|
92
|
-
root.time.attrs['_ARRAY_DIMENSIONS'] = [Coordinates.TIME.value]
|
|
93
|
-
|
|
94
|
-
root.time.attrs['calendar'] = Coordinates.TIME_CALENDAR.value
|
|
95
|
-
root.time.attrs['units'] = Coordinates.TIME_UNITS.value
|
|
96
|
-
root.time.attrs['long_name'] = Coordinates.TIME_LONG_NAME.value
|
|
97
|
-
root.time.attrs['standard_name'] = Coordinates.TIME_STANDARD_NAME.value
|
|
98
|
-
|
|
99
|
-
#####################################################################
|
|
100
|
-
# --- Coordinate: Depth --- #
|
|
101
|
-
depth_values = self.get_depth_values(
|
|
102
|
-
min_echo_range=min_echo_range,
|
|
103
|
-
max_echo_range=max_echo_range
|
|
104
|
-
)
|
|
105
|
-
|
|
106
|
-
root.create_dataset(
|
|
107
|
-
name=Coordinates.DEPTH.value,
|
|
108
|
-
# TODO: verify that these values are correct
|
|
109
|
-
data=depth_values,
|
|
110
|
-
shape=len(depth_values),
|
|
111
|
-
chunks=Constants.TILE_SIZE.value,
|
|
112
|
-
dtype=np.dtype(Coordinates.DEPTH_DTYPE.value), # float16 == 2 significant digits would be ideal
|
|
113
|
-
compressor=self.__compressor,
|
|
114
|
-
# fill_value=np.nan,
|
|
115
|
-
overwrite=self.__overwrite
|
|
116
|
-
)
|
|
117
|
-
# TODO: change to exception
|
|
118
|
-
assert not np.any(np.isnan(depth_values))
|
|
119
|
-
|
|
120
|
-
root.depth.attrs['_ARRAY_DIMENSIONS'] = [Coordinates.DEPTH.value]
|
|
121
|
-
|
|
122
|
-
root.depth.attrs['long_name'] = Coordinates.DEPTH_LONG_NAME.value
|
|
123
|
-
root.depth.attrs['units'] = Coordinates.DEPTH_UNITS.value
|
|
124
|
-
|
|
125
|
-
#####################################################################
|
|
126
|
-
# --- Coordinate: Latitude --- #
|
|
127
|
-
root.create_dataset(
|
|
128
|
-
name=Coordinates.LATITUDE.value,
|
|
129
|
-
data=np.repeat(0., width),
|
|
130
|
-
shape=width,
|
|
131
|
-
chunks=Constants.TILE_SIZE.value,
|
|
132
|
-
dtype=np.dtype(Coordinates.LATITUDE_DTYPE.value),
|
|
133
|
-
compressor=self.__compressor,
|
|
134
|
-
fill_value=0.,
|
|
135
|
-
overwrite=self.__overwrite
|
|
136
|
-
)
|
|
137
|
-
|
|
138
|
-
root.latitude.attrs['_ARRAY_DIMENSIONS'] = [Coordinates.TIME.value]
|
|
139
|
-
|
|
140
|
-
root.latitude.attrs['long_name'] = Coordinates.LATITUDE_LONG_NAME.value
|
|
141
|
-
root.latitude.attrs['units'] = Coordinates.LATITUDE_UNITS.value
|
|
142
|
-
|
|
143
|
-
#####################################################################
|
|
144
|
-
# --- Coordinate: Longitude --- #
|
|
145
|
-
root.create_dataset(
|
|
146
|
-
name=Coordinates.LONGITUDE.value,
|
|
147
|
-
data=np.repeat(0., width), # root.longitude[:] = np.nan
|
|
148
|
-
shape=width,
|
|
149
|
-
chunks=Constants.TILE_SIZE.value,
|
|
150
|
-
dtype=np.dtype(Coordinates.LONGITUDE_DTYPE.value),
|
|
151
|
-
compressor=self.__compressor,
|
|
152
|
-
fill_value=0.,
|
|
153
|
-
overwrite=self.__overwrite
|
|
154
|
-
)
|
|
155
|
-
|
|
156
|
-
root.longitude.attrs['_ARRAY_DIMENSIONS'] = [Coordinates.TIME.value]
|
|
157
|
-
|
|
158
|
-
root.longitude.attrs['long_name'] = Coordinates.LONGITUDE_LONG_NAME.value
|
|
159
|
-
root.longitude.attrs['units'] = Coordinates.LONGITUDE_UNITS.value
|
|
160
|
-
|
|
161
|
-
#####################################################################
|
|
162
|
-
# --- Coordinate: Frequency --- #
|
|
163
|
-
root.create_dataset(
|
|
164
|
-
name=Coordinates.FREQUENCY.value,
|
|
165
|
-
data=frequencies,
|
|
166
|
-
shape=len(frequencies),
|
|
167
|
-
chunks=1,
|
|
168
|
-
dtype=np.dtype(Coordinates.FREQUENCY_DTYPE.value),
|
|
169
|
-
compressor=self.__compressor,
|
|
170
|
-
fill_value=0.,
|
|
171
|
-
overwrite=self.__overwrite
|
|
172
|
-
)
|
|
173
|
-
|
|
174
|
-
# TODO: best coordinate would be channel with str type
|
|
175
|
-
root.frequency.attrs['_ARRAY_DIMENSIONS'] = [Coordinates.FREQUENCY.value] # TODO: is this correct
|
|
176
|
-
|
|
177
|
-
root.frequency.attrs['long_name'] = Coordinates.FREQUENCY_LONG_NAME.value
|
|
178
|
-
root.frequency.attrs['standard_name'] = Coordinates.FREQUENCY_STANDARD_NAME.value
|
|
179
|
-
root.frequency.attrs['units'] = Coordinates.FREQUENCY_UNITS.value
|
|
180
|
-
|
|
181
|
-
#####################################################################
|
|
182
|
-
# --- Sv Data --- #
|
|
183
|
-
root.create_dataset(
|
|
184
|
-
name=Coordinates.SV.value,
|
|
185
|
-
shape=(len(depth_values), width, len(frequencies)),
|
|
186
|
-
chunks=(Constants.TILE_SIZE.value, Constants.TILE_SIZE.value, 1),
|
|
187
|
-
dtype=np.dtype(Coordinates.SV_DTYPE.value), # TODO: try to experiment with 'float16'
|
|
188
|
-
compressor=self.__compressor,
|
|
189
|
-
fill_value=np.nan,
|
|
190
|
-
overwrite=self.__overwrite
|
|
191
|
-
)
|
|
192
|
-
|
|
193
|
-
root.Sv.attrs['_ARRAY_DIMENSIONS'] = [
|
|
194
|
-
Coordinates.DEPTH.value,
|
|
195
|
-
Coordinates.TIME.value,
|
|
196
|
-
Coordinates.FREQUENCY.value,
|
|
197
|
-
]
|
|
198
|
-
|
|
199
|
-
root.Sv.attrs['long_name'] = Coordinates.SV_LONG_NAME.value
|
|
200
|
-
root.Sv.attrs['units'] = Coordinates.SV_UNITS.value
|
|
201
|
-
root.Sv.attrs['tile_size'] = Constants.TILE_SIZE.value
|
|
202
|
-
|
|
203
|
-
#####################################################################
|
|
204
|
-
# --- Metadata --- #
|
|
205
|
-
root.attrs["ship_name"] = ship_name
|
|
206
|
-
root.attrs["cruise_name"] = cruise_name
|
|
207
|
-
root.attrs["sensor_name"] = sensor_name
|
|
208
|
-
#
|
|
209
|
-
root.attrs["processing_software_name"] = Coordinates.PROJECT_NAME.value
|
|
210
|
-
root.attrs["processing_software_version"] = "0.0.2" # TODO: get programmatically
|
|
211
|
-
root.attrs["processing_software_time"] = Timestamp.get_timestamp()
|
|
212
|
-
#
|
|
213
|
-
root.attrs["calibration_status"] = calibration_status
|
|
214
|
-
|
|
215
|
-
zarr.consolidate_metadata(store)
|
|
216
|
-
#####################################################################
|
|
217
|
-
"""
|
|
218
|
-
# zzz = zarr.open('https://echofish-dev-master-118234403147-echofish-zarr-store.s3.us-west-2.amazonaws.com/GU1002_resample.zarr')
|
|
219
|
-
# zzz.time[0] = 1274979445.423
|
|
220
|
-
# Initialize all to origin time, will be overwritten late
|
|
221
|
-
"""
|
|
222
|
-
return zarr_path
|
|
223
|
-
|
|
224
|
-
############################################################################
|
|
225
|
-
# def update_zarr_store(
|
|
226
|
-
# self,
|
|
227
|
-
# path: str,
|
|
228
|
-
# ship_name: str,
|
|
229
|
-
# cruise_name: str, # TODO: just pass stem
|
|
230
|
-
# sensor_name: str,
|
|
231
|
-
# ) -> None:
|
|
232
|
-
# """
|
|
233
|
-
# Opens an existing Zarr store living in a s3 bucket for the purpose
|
|
234
|
-
# of updating just a subset of the cruise-level Zarr store associated
|
|
235
|
-
# with a file-level Zarr store.
|
|
236
|
-
# """
|
|
237
|
-
# pass
|
|
238
|
-
|
|
239
|
-
############################################################################
|
|
240
|
-
def open_s3_zarr_store_with_zarr(
|
|
241
|
-
self,
|
|
242
|
-
ship_name: str,
|
|
243
|
-
cruise_name: str,
|
|
244
|
-
sensor_name: str,
|
|
245
|
-
# zarr_synchronizer: Union[str, None] = None,
|
|
246
|
-
):
|
|
247
|
-
# Mounts a Zarr store using pythons Zarr implementation. The mounted store
|
|
248
|
-
# will have read/write privileges so that store can be updated.
|
|
249
|
-
print('Opening Zarr store with Zarr.')
|
|
250
|
-
try:
|
|
251
|
-
s3fs_manager = S3FSManager()
|
|
252
|
-
root = f'{self.output_bucket_name}/level_2/{ship_name}/{cruise_name}/{sensor_name}/{cruise_name}.zarr'
|
|
253
|
-
store = s3fs_manager.s3_map(s3_zarr_store_path=root)
|
|
254
|
-
# synchronizer = zarr.ProcessSynchronizer(f"/tmp/{ship_name}_{cruise_name}.sync")
|
|
255
|
-
cruise_zarr = zarr.open(store=store, mode="r+")
|
|
256
|
-
except Exception as err: # Failure
|
|
257
|
-
print(f'Exception encountered opening Zarr store with Zarr.: {err}')
|
|
258
|
-
raise
|
|
259
|
-
print('Done opening Zarr store with Zarr.')
|
|
260
|
-
return cruise_zarr
|
|
261
|
-
|
|
262
|
-
############################################################################
|
|
263
|
-
def open_s3_zarr_store_with_xarray(
|
|
264
|
-
self,
|
|
265
|
-
ship_name: str,
|
|
266
|
-
cruise_name: str,
|
|
267
|
-
sensor_name: str,
|
|
268
|
-
file_name_stem: str,
|
|
269
|
-
) -> xr.Dataset:
|
|
270
|
-
print('Opening Zarr store in S3 as Xarray.')
|
|
271
|
-
try:
|
|
272
|
-
zarr_path = f"s3://{self.output_bucket_name}/level_1/{ship_name}/{cruise_name}/{sensor_name}/{file_name_stem}.zarr"
|
|
273
|
-
s3fs_manager = S3FSManager()
|
|
274
|
-
store_s3_map = s3fs_manager.s3_map(s3_zarr_store_path=zarr_path)
|
|
275
|
-
ds = xr.open_zarr(store=store_s3_map, consolidated=None) # synchronizer=SYNCHRONIZER
|
|
276
|
-
except Exception as err:
|
|
277
|
-
print('Problem opening Zarr store in S3 as Xarray.')
|
|
278
|
-
raise err
|
|
279
|
-
print("Done opening Zarr store in S3 as Xarray.")
|
|
280
|
-
return ds
|
|
281
|
-
|
|
282
|
-
############################################################################
|
|
283
|
-
|
|
284
|
-
#######################################################
|
|
285
|
-
# def create_process_synchronizer(self):
|
|
286
|
-
# # TODO: explore aws redis options
|
|
287
|
-
# pass
|
|
288
|
-
|
|
289
|
-
#######################################################
|
|
290
|
-
# def verify_cruise_store_data(self):
|
|
291
|
-
# # TODO: run a check on a finished zarr store to ensure that
|
|
292
|
-
# # none of the time, latitude, longitude, or depth values
|
|
293
|
-
# # are NaN.
|
|
294
|
-
# pass
|
|
295
|
-
|
|
296
|
-
#######################################################
|
|
297
|
-
|
|
298
|
-
###########################################################
|
|
@@ -1,89 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: water-column-sonar-processing
|
|
3
|
-
Version: 0.0.1
|
|
4
|
-
Summary: A processing tool for water column sonar data.
|
|
5
|
-
Author-email: Rudy Klucik <rudy.klucik@noaa.gov>
|
|
6
|
-
Project-URL: Homepage, https://github.com/CI-CMG/water-column-sonar-processing
|
|
7
|
-
Project-URL: Issues, https://github.com/CI-CMG/water-column-sonar-processing/issues
|
|
8
|
-
Classifier: Programming Language :: Python :: 3
|
|
9
|
-
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
-
Classifier: Operating System :: OS Independent
|
|
11
|
-
Requires-Python: >=3.10
|
|
12
|
-
Description-Content-Type: text/markdown
|
|
13
|
-
License-File: LICENSE
|
|
14
|
-
|
|
15
|
-
# water-column-sonar-processing
|
|
16
|
-
Processing tool for converting L0 data to L1 and L2 as well as generating geospatial information
|
|
17
|
-
|
|
18
|
-
## Setting up the Python Environment
|
|
19
|
-
> Python 3.10.12
|
|
20
|
-
|
|
21
|
-
# MacOS Pyenv Installation Instructions
|
|
22
|
-
1. Install pyenv (https://github.com/pyenv/pyenv#set-up-your-shell-environment-for-pyenv)
|
|
23
|
-
1. ```brew update```
|
|
24
|
-
2. ```arch -arm64 brew install pyenv```
|
|
25
|
-
3. In ~/.bashrc add
|
|
26
|
-
1. ```export PYENV_ROOT="$HOME/.pyenv"```
|
|
27
|
-
2. ```export PATH="$PYENV_ROOT/bin:$PATH"```
|
|
28
|
-
3. ```eval "$(pyenv init -)"```
|
|
29
|
-
4. ```arch -arm64 brew install openssl readline sqlite3 xz zlib tcl-tk```
|
|
30
|
-
2. Install pyenv-virtualenv (https://github.com/pyenv/pyenv-virtualenv)
|
|
31
|
-
1. ```arch -arm64 brew install pyenv-virtualenv```
|
|
32
|
-
2. In ~/.bashrc add
|
|
33
|
-
1. ```eval "$(pyenv virtualenv-init -)"```
|
|
34
|
-
3. Open a new terminal
|
|
35
|
-
4. Install Python version
|
|
36
|
-
1. ```env PYTHON_CONFIGURE_OPTS="--enable-shared"```
|
|
37
|
-
2. ```env CONFIGURE_OPTS='--enable-optimizations' arch -arm64 pyenv install 3.10.12```
|
|
38
|
-
5. Create virtual env (to delete 'pyenv uninstall 3.10.12/water-column-sonar-processing')
|
|
39
|
-
1. ```pyenv virtualenv 3.10.12 water-column-sonar-processing```
|
|
40
|
-
6. Set local version of python (if not done already)
|
|
41
|
-
1. change directory to root of project
|
|
42
|
-
2. ```pyenv local 3.10.12 water-column-sonar-processing```
|
|
43
|
-
3. ```pyenv activate water-column-sonar-processing```
|
|
44
|
-
|
|
45
|
-
## Setting up IntelliJ
|
|
46
|
-
|
|
47
|
-
1. Install the IntelliJ Python plugin
|
|
48
|
-
2. Set up pyenv
|
|
49
|
-
1. File -> Project Structure or CMD + ;
|
|
50
|
-
2. SDKs -> + -> Add Python SDK -> Virtual Environment
|
|
51
|
-
3. Select Existing Environment
|
|
52
|
-
4. Choose ~/.pyenv/versions/mocking_aws/bin/python
|
|
53
|
-
3. Set up Python Facet (not sure if this is required)
|
|
54
|
-
1. File -> Project Structure or CMD + ;
|
|
55
|
-
2. Facets -> + -> Python
|
|
56
|
-
3. Set interpreter
|
|
57
|
-
|
|
58
|
-
## Installing Dependencies
|
|
59
|
-
|
|
60
|
-
1. Add dependencies with versions to requirements.txt
|
|
61
|
-
2. ```pip install --upgrade pip && pip install -r requirements_dev.txt```
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
## Pytest
|
|
65
|
-
```commandline
|
|
66
|
-
pytest --disable-warnings
|
|
67
|
-
```
|
|
68
|
-
|
|
69
|
-
## Instructions
|
|
70
|
-
Following this tutorial:
|
|
71
|
-
https://packaging.python.org/en/latest/tutorials/packaging-projects/
|
|
72
|
-
|
|
73
|
-
## To Publish To TEST
|
|
74
|
-
```commandline
|
|
75
|
-
python -m build
|
|
76
|
-
python -m twine upload --repository testpypi dist/*
|
|
77
|
-
pytho -m pip install --index-url https://test.pypi.org/simple/ hello-pypi-rudy-klucik
|
|
78
|
-
python
|
|
79
|
-
```
|
|
80
|
-
```python
|
|
81
|
-
from water-column-sonar-processing import ZarrManager
|
|
82
|
-
example.add_one(2)
|
|
83
|
-
```
|
|
84
|
-
|
|
85
|
-
## To Publish To PROD
|
|
86
|
-
```commandline
|
|
87
|
-
python -m build
|
|
88
|
-
python -m twine upload --repository pypi dist/*
|
|
89
|
-
```
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
3
|
-
model/model.py,sha256=JjhqZM2t8evZrFjbFHBOdHH0tlG_twOsysRMoIh2Zdw,5099
|
|
4
|
-
model/aws/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
-
model/aws/dynamodb_manager.py,sha256=fppu4Kvl-22psu8MrF-NGKJwymwVf5CyvO7-Ye8zeFc,5338
|
|
6
|
-
model/aws/s3_manager.py,sha256=_DcBOYRcuyRTcmlTQ1qzMCtE0GRcGU8LaZ-JXKGKaOo,13397
|
|
7
|
-
model/aws/s3fs_manager.py,sha256=k4nW48E0reQwj1t4pe_ybK5kX0H1SHeG8urddHelGZ4,2463
|
|
8
|
-
model/aws/sns_manager.py,sha256=5ZW0bi7BQHldxBbmAxN4tT1KVfDAF_eYPJwpa_W1C7Q,2202
|
|
9
|
-
model/aws/sqs_manager.py,sha256=01y3LB5B_R0rmxB00SWLwL2rBUSDy_8OgjpZImEuxYQ,1694
|
|
10
|
-
model/cruise/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
-
model/cruise/create_empty_zarr_store.py,sha256=gbQWvMsmmGPl359aakYF--x09OMOzlOmk8T1aIOe-Bs,6907
|
|
12
|
-
model/cruise/resample_regrid.py,sha256=Nhb1yMRG7dCYEgsS4fPxtlJ_iYKMcdgcOHPy_Xxx9kw,11228
|
|
13
|
-
model/geospatial/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
|
-
model/geospatial/geometry_manager.py,sha256=ele0bd8sDp2iyOjx9scZay8Iy3D-IhGO8L3DlPOonWs,9400
|
|
15
|
-
model/geospatial/geometry_simplification.py,sha256=_TCrlxqWlhO05Od3ZB4Sfgob9p3tQr3r1vjseG0qMNQ,3071
|
|
16
|
-
model/geospatial/pmtile_generation.py,sha256=ND9rqAH4ow6FkRr_6yFF0o0DBby8u0zRVj6cqci2cPw,2790
|
|
17
|
-
model/index/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
-
model/index/index.py,sha256=s9GhEJHKF5urHWqjehHeral-lQlmTS1Zx1tvlQ7vcEM,10705
|
|
19
|
-
model/utility/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
-
model/utility/cleaner.py,sha256=l4G3iRso1xOTGH8hBXRRVPEn1ZTOJJT9_9JA0yRpcyA,599
|
|
21
|
-
model/utility/constants.py,sha256=uTuSyH9YGG8bDd5xNbT4zA5O_lu874C-8m1MRvVppPw,1631
|
|
22
|
-
model/utility/pipeline_status.py,sha256=iRLyzI3gsesyz0I4NukbJwqiRmXgSZBOdB3C1x2eMSg,4185
|
|
23
|
-
model/utility/timestamp.py,sha256=i4ub7ob24edHU2yt4aOmvle_Nmlf1F_9HVGa9TJ5HgM,359
|
|
24
|
-
model/zarr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
25
|
-
model/zarr/bar.py,sha256=0mBMTw_QmZZdEWCscRsCk0Axw6B6Yy1ujvuG1QuHcQo,504
|
|
26
|
-
model/zarr/foo.py,sha256=KpXmnl2_Tp2Ig1A_WPnoWuMVhi85CvC-JIaPe6LtbTg,215
|
|
27
|
-
model/zarr/zarr_manager.py,sha256=DsQsBe-lN4e2MExKRCg7PIFyra5GG_vTGKqA9hHwKS0,12357
|
|
28
|
-
water_column_sonar_processing-0.0.1.dist-info/LICENSE,sha256=lz4IpJ5_adG3S0ali-WaIpQFVTnEAOucMDQPECUVEYw,1110
|
|
29
|
-
water_column_sonar_processing-0.0.1.dist-info/METADATA,sha256=tY2NjTDmibt3kJBvEhqAzA1xU1i-UpeZSElMBgdXvBg,3163
|
|
30
|
-
water_column_sonar_processing-0.0.1.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
|
|
31
|
-
water_column_sonar_processing-0.0.1.dist-info/top_level.txt,sha256=Uq9bL3hpULwyr0JXiGOnwhgNE1h68gVXxofcjXVZpPo,15
|
|
32
|
-
water_column_sonar_processing-0.0.1.dist-info/RECORD,,
|