water-column-sonar-processing 0.0.1__py3-none-any.whl → 25.11.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of water-column-sonar-processing might be problematic. Click here for more details.

Files changed (60) hide show
  1. water_column_sonar_processing/__init__.py +13 -0
  2. water_column_sonar_processing/aws/__init__.py +7 -0
  3. water_column_sonar_processing/aws/dynamodb_manager.py +355 -0
  4. water_column_sonar_processing/aws/s3_manager.py +420 -0
  5. water_column_sonar_processing/aws/s3fs_manager.py +72 -0
  6. {model → water_column_sonar_processing}/aws/sns_manager.py +10 -21
  7. {model → water_column_sonar_processing}/aws/sqs_manager.py +11 -19
  8. water_column_sonar_processing/cruise/__init__.py +4 -0
  9. water_column_sonar_processing/cruise/create_empty_zarr_store.py +191 -0
  10. water_column_sonar_processing/cruise/datatree_manager.py +21 -0
  11. water_column_sonar_processing/cruise/resample_regrid.py +339 -0
  12. water_column_sonar_processing/geometry/__init__.py +11 -0
  13. water_column_sonar_processing/geometry/elevation_manager.py +111 -0
  14. water_column_sonar_processing/geometry/geometry_manager.py +243 -0
  15. water_column_sonar_processing/geometry/line_simplification.py +176 -0
  16. water_column_sonar_processing/geometry/pmtile_generation.py +261 -0
  17. water_column_sonar_processing/index/__init__.py +3 -0
  18. water_column_sonar_processing/index/index_manager.py +384 -0
  19. water_column_sonar_processing/model/__init__.py +3 -0
  20. water_column_sonar_processing/model/zarr_manager.py +722 -0
  21. water_column_sonar_processing/process.py +149 -0
  22. water_column_sonar_processing/processing/__init__.py +4 -0
  23. water_column_sonar_processing/processing/raw_to_netcdf.py +320 -0
  24. water_column_sonar_processing/processing/raw_to_zarr.py +425 -0
  25. water_column_sonar_processing/utility/__init__.py +13 -0
  26. {model → water_column_sonar_processing}/utility/cleaner.py +7 -8
  27. water_column_sonar_processing/utility/constants.py +118 -0
  28. {model → water_column_sonar_processing}/utility/pipeline_status.py +47 -24
  29. water_column_sonar_processing/utility/timestamp.py +12 -0
  30. water_column_sonar_processing-25.11.1.dist-info/METADATA +182 -0
  31. water_column_sonar_processing-25.11.1.dist-info/RECORD +34 -0
  32. {water_column_sonar_processing-0.0.1.dist-info → water_column_sonar_processing-25.11.1.dist-info}/WHEEL +1 -1
  33. {water_column_sonar_processing-0.0.1.dist-info → water_column_sonar_processing-25.11.1.dist-info/licenses}/LICENSE +1 -1
  34. water_column_sonar_processing-25.11.1.dist-info/top_level.txt +1 -0
  35. __init__.py +0 -0
  36. model/__init__.py +0 -0
  37. model/aws/__init__.py +0 -0
  38. model/aws/dynamodb_manager.py +0 -149
  39. model/aws/s3_manager.py +0 -356
  40. model/aws/s3fs_manager.py +0 -74
  41. model/cruise/__init__.py +0 -0
  42. model/cruise/create_empty_zarr_store.py +0 -166
  43. model/cruise/resample_regrid.py +0 -248
  44. model/geospatial/__init__.py +0 -0
  45. model/geospatial/geometry_manager.py +0 -194
  46. model/geospatial/geometry_simplification.py +0 -81
  47. model/geospatial/pmtile_generation.py +0 -74
  48. model/index/__init__.py +0 -0
  49. model/index/index.py +0 -228
  50. model/model.py +0 -138
  51. model/utility/__init__.py +0 -0
  52. model/utility/constants.py +0 -56
  53. model/utility/timestamp.py +0 -12
  54. model/zarr/__init__.py +0 -0
  55. model/zarr/bar.py +0 -28
  56. model/zarr/foo.py +0 -11
  57. model/zarr/zarr_manager.py +0 -298
  58. water_column_sonar_processing-0.0.1.dist-info/METADATA +0 -89
  59. water_column_sonar_processing-0.0.1.dist-info/RECORD +0 -32
  60. water_column_sonar_processing-0.0.1.dist-info/top_level.txt +0 -2
@@ -0,0 +1,149 @@
1
+ import json
2
+ import os
3
+
4
+ import numpy as np
5
+
6
+ from water_column_sonar_processing.aws import (
7
+ DynamoDBManager,
8
+ S3FSManager,
9
+ S3Manager,
10
+ SNSManager,
11
+ )
12
+
13
+
14
+ ###########################################################
15
+ class Process:
16
+ #######################################################
17
+ def __init__(
18
+ self,
19
+ ):
20
+ self.input_bucket_name = os.environ["INPUT_BUCKET_NAME"]
21
+ self.output_bucket_name = os.environ["OUTPUT_BUCKET_NAME"]
22
+ self.table_name = os.environ["TABLE_NAME"]
23
+ self.topic_arn = os.environ["TOPIC_ARN"]
24
+ # self.output_bucket_access_key = ?
25
+ # self.output_bucket_secret_access_key = ?
26
+
27
+ def execute(self):
28
+ # input_s3_manager = (
29
+ # S3Manager()
30
+ # ) # TODO: Need to allow passing in of credentials when writing to protected bucket
31
+ s3fs_manager = S3FSManager() # TODO: delete this
32
+ print(s3fs_manager) # TODO: delete this
33
+ output_s3_manager = S3Manager()
34
+ # TODO: s3fs?
35
+ sns_manager = SNSManager()
36
+ ddb_manager = DynamoDBManager()
37
+
38
+ # [1 of 5] Update Pipeline Status in DynamoDB
39
+ # self.dynamodb.update_ status ()
40
+
41
+ # [2 of 5] Download Object From Input Bucket
42
+ # return_value = input_s3_manager.download_file(
43
+ # bucket_name=self.input_bucket_name,
44
+ # key="the_input_key",
45
+ # file_name="the_input_key",
46
+ # )
47
+ # print(return_value)
48
+
49
+ # [3 of 5] Update Entry in DynamoDB
50
+ ship_name = "David_Starr_Jordan" # TODO: get this from input sns message
51
+ cruise_name = "DS0604"
52
+ sensor_name = "EK60"
53
+ file_name = "DSJ0604-D20060406-T113407.raw"
54
+
55
+ test_channels = [
56
+ "GPT 38 kHz 009072055a7f 2 ES38B",
57
+ "GPT 70 kHz 00907203400a 3 ES70-7C",
58
+ "GPT 120 kHz 009072034d52 1 ES120-7",
59
+ "GPT 200 kHz 0090720564e4 4 ES200-7C",
60
+ ]
61
+ test_frequencies = [38_000, 70_000, 120_000, 200_000]
62
+ ddb_manager.update_item(
63
+ table_name=self.table_name,
64
+ key={
65
+ "FILE_NAME": {"S": file_name}, # Partition Key
66
+ "CRUISE_NAME": {"S": cruise_name}, # Sort Key
67
+ },
68
+ expression_attribute_names={
69
+ "#CH": "CHANNELS",
70
+ "#ET": "END_TIME",
71
+ "#ED": "ERROR_DETAIL",
72
+ "#FR": "FREQUENCIES",
73
+ "#MA": "MAX_ECHO_RANGE",
74
+ "#MI": "MIN_ECHO_RANGE",
75
+ "#ND": "NUM_PING_TIME_DROPNA",
76
+ "#PS": "PIPELINE_STATUS", # testing this updated
77
+ "#PT": "PIPELINE_TIME", # testing this updated
78
+ "#SE": "SENSOR_NAME",
79
+ "#SH": "SHIP_NAME",
80
+ "#ST": "START_TIME",
81
+ # "#ZB": "ZARR_BUCKET",
82
+ # "#ZP": "ZARR_PATH",
83
+ },
84
+ expression_attribute_values={
85
+ ":ch": {"L": [{"S": i} for i in test_channels]},
86
+ ":et": {"S": "2006-04-06T13:35:28.688Z"},
87
+ ":ed": {"S": ""},
88
+ ":fr": {"L": [{"N": str(i)} for i in test_frequencies]},
89
+ ":ma": {"N": str(np.round(499.7653, 4))},
90
+ ":mi": {"N": str(np.round(0.25, 4))},
91
+ ":nd": {"N": str(2458)},
92
+ ":ps": {"S": "SUCCESS_AGGREGATOR"},
93
+ ":pt": {"S": "2023-10-02T08:54:43Z"},
94
+ ":se": {"S": sensor_name},
95
+ ":sh": {"S": ship_name},
96
+ ":st": {"S": "2006-04-06T11:34:07.288Z"},
97
+ # ":zb": {"S": "r2d2-dev-echofish2-118234403147-echofish-dev-output"},
98
+ # ":zp": {
99
+ # "S": "level_1/David_Starr_Jordan/DS0604/EK60/DSJ0604-D20060406-T113407.model"
100
+ # },
101
+ },
102
+ update_expression=(
103
+ "SET "
104
+ "#CH = :ch, "
105
+ "#ET = :et, "
106
+ "#ED = :ed, "
107
+ "#FR = :fr, "
108
+ "#MA = :ma, "
109
+ "#MI = :mi, "
110
+ "#ND = :nd, "
111
+ "#PS = :ps, "
112
+ "#PT = :pt, "
113
+ "#SE = :se, "
114
+ "#SH = :sh, "
115
+ "#ST = :st, "
116
+ "#ZB = :zb, "
117
+ "#ZP = :zp"
118
+ ),
119
+ )
120
+
121
+ # [4 of 5] Write Object to Output Bucket
122
+ output_s3_manager.put(
123
+ bucket_name=self.output_bucket_name, key="123", body="456"
124
+ )
125
+
126
+ # [_ of _] Read file-level Zarr store from bucket, Create GeoJSON, Write to bucket
127
+ # [_ of _] Create empty cruise-level Zarr store
128
+ # [_ of _] Resample and write to cruise-level Zarr Store
129
+
130
+ # [5 of 5] Publish Done Message
131
+ success_message = {
132
+ "default": {
133
+ "shipName": ship_name,
134
+ "cruiseName": cruise_name,
135
+ "sensorName": sensor_name,
136
+ "fileName": file_name,
137
+ }
138
+ }
139
+ sns_manager.publish(
140
+ topic_arn=self.topic_arn,
141
+ message=json.dumps(success_message),
142
+ )
143
+ print("done...")
144
+
145
+ #######################################################
146
+
147
+
148
+ ###########################################################
149
+ ###########################################################
@@ -0,0 +1,4 @@
1
+ from .raw_to_netcdf import RawToNetCDF
2
+ from .raw_to_zarr import RawToZarr, get_water_level
3
+
4
+ __all__ = ["RawToZarr", "get_water_level", "RawToNetCDF"]
@@ -0,0 +1,320 @@
1
+ import gc
2
+ import os
3
+ from datetime import datetime
4
+ from pathlib import Path # , PurePath
5
+
6
+ import echopype as ep
7
+ import numpy as np
8
+ from zarr.codecs import Blosc
9
+
10
+ from water_column_sonar_processing.aws import DynamoDBManager, S3Manager
11
+ from water_column_sonar_processing.geometry import GeometryManager
12
+ from water_column_sonar_processing.utility import Cleaner
13
+
14
+
15
+ # This code is getting copied from echofish-aws-raw-to-zarr-lambda
16
+ class RawToNetCDF:
17
+ #######################################################
18
+ def __init__(
19
+ self,
20
+ # output_bucket_access_key,
21
+ # output_bucket_secret_access_key,
22
+ # # overwrite_existing_zarr_store,
23
+ ):
24
+ # TODO: revert to Blosc.BITSHUFFLE, troubleshooting misc error
25
+ self.__compressor = Blosc(cname="zstd", clevel=9) # shuffle=Blosc.NOSHUFFLE
26
+ self.__overwrite = True
27
+ # self.__num_threads = numcodecs.blosc.get_nthreads()
28
+ # self.input_bucket_name = os.environ.get("INPUT_BUCKET_NAME")
29
+ # self.output_bucket_name = os.environ.get("OUTPUT_BUCKET_NAME")
30
+ # self.__table_name = table_name
31
+ # # self.__overwrite_existing_zarr_store = overwrite_existing_zarr_store
32
+
33
+ ############################################################################
34
+ ############################################################################
35
+ def __netcdf_info_to_table(
36
+ self,
37
+ # output_bucket_name,
38
+ table_name,
39
+ ship_name,
40
+ cruise_name,
41
+ sensor_name,
42
+ file_name,
43
+ # zarr_path,
44
+ min_echo_range,
45
+ max_echo_range,
46
+ num_ping_time_dropna,
47
+ start_time,
48
+ end_time,
49
+ frequencies,
50
+ channels,
51
+ water_level,
52
+ ):
53
+ print("Writing Zarr information to DynamoDB table.")
54
+ dynamodb_manager = DynamoDBManager()
55
+ dynamodb_manager.update_item(
56
+ table_name=table_name,
57
+ key={
58
+ "FILE_NAME": {"S": file_name}, # Partition Key
59
+ "CRUISE_NAME": {"S": cruise_name}, # Sort Key
60
+ },
61
+ expression_attribute_names={
62
+ "#CH": "CHANNELS",
63
+ "#ET": "END_TIME",
64
+ # "#ED": "ERROR_DETAIL",
65
+ "#FR": "FREQUENCIES",
66
+ "#MA": "MAX_ECHO_RANGE",
67
+ "#MI": "MIN_ECHO_RANGE",
68
+ "#ND": "NUM_PING_TIME_DROPNA",
69
+ # "#PS": "PIPELINE_STATUS",
70
+ "#PT": "PIPELINE_TIME",
71
+ "#SE": "SENSOR_NAME",
72
+ "#SH": "SHIP_NAME",
73
+ "#ST": "START_TIME",
74
+ # "#ZB": "ZARR_BUCKET",
75
+ # "#ZP": "ZARR_PATH",
76
+ "#WL": "WATER_LEVEL",
77
+ },
78
+ expression_attribute_values={
79
+ ":ch": {"L": [{"S": i} for i in channels]},
80
+ ":et": {"S": end_time},
81
+ # ":ed": {"S": ""},
82
+ ":fr": {"L": [{"N": str(i)} for i in frequencies]},
83
+ ":ma": {"N": str(np.round(max_echo_range, 4))},
84
+ ":mi": {"N": str(np.round(min_echo_range, 4))},
85
+ ":nd": {"N": str(num_ping_time_dropna)},
86
+ # ":ps": {"S": "PROCESSING_RESAMPLE_AND_WRITE_TO_ZARR_STORE"},
87
+ # ":ps": {"S": PipelineStatus.LEVEL_1_PROCESSING.name},
88
+ ":pt": {"S": datetime.now().isoformat(timespec="seconds") + "Z"},
89
+ ":se": {"S": sensor_name},
90
+ ":sh": {"S": ship_name},
91
+ ":st": {"S": start_time},
92
+ ":wl": {"N": str(np.round(water_level, 2))},
93
+ # ":zb": {"S": output_bucket_name},
94
+ # ":zp": {"S": zarr_path},
95
+ },
96
+ update_expression=(
97
+ "SET "
98
+ "#CH = :ch, "
99
+ "#ET = :et, "
100
+ # "#ED = :ed, "
101
+ "#FR = :fr, "
102
+ "#MA = :ma, "
103
+ "#MI = :mi, "
104
+ "#ND = :nd, "
105
+ # "#PS = :ps, "
106
+ "#PT = :pt, "
107
+ "#SE = :se, "
108
+ "#SH = :sh, "
109
+ "#ST = :st, "
110
+ "#WL = :wl"
111
+ # "#ZB = :zb, "
112
+ # "#ZP = :zp"
113
+ ),
114
+ )
115
+ print("Done writing Zarr information to DynamoDB table.")
116
+
117
+ ############################################################################
118
+ ############################################################################
119
+ ############################################################################
120
+ def __upload_files_to_output_bucket(
121
+ self,
122
+ output_bucket_name,
123
+ local_directory,
124
+ object_prefix,
125
+ endpoint_url,
126
+ ):
127
+ # Note: this will be passed credentials if using NODD
128
+ s3_manager = S3Manager(endpoint_url=endpoint_url)
129
+ print("Uploading files using thread pool executor.")
130
+ all_files = []
131
+ for subdir, dirs, files in os.walk(local_directory):
132
+ for file in files:
133
+ local_path = os.path.join(subdir, file)
134
+ s3_key = os.path.join(object_prefix, local_path)
135
+ all_files.append([local_path, s3_key])
136
+ # all_files
137
+ all_uploads = s3_manager.upload_files_with_thread_pool_executor(
138
+ output_bucket_name=output_bucket_name,
139
+ all_files=all_files,
140
+ )
141
+ return all_uploads
142
+
143
+ def __upload_file_to_output_bucket(
144
+ self,
145
+ output_bucket_name,
146
+ local_directory,
147
+ object_prefix,
148
+ endpoint_url,
149
+ ):
150
+ # Note: this will be passed credentials if using NODD
151
+ s3_manager = S3Manager(endpoint_url=endpoint_url)
152
+ print("Uploading files using thread pool executor.")
153
+ all_files = [local_directory]
154
+ all_uploads = s3_manager.upload_files_with_thread_pool_executor(
155
+ output_bucket_name=output_bucket_name,
156
+ all_files=all_files,
157
+ )
158
+ return all_uploads
159
+
160
+ ############################################################################
161
+ def raw_to_netcdf(
162
+ self,
163
+ table_name,
164
+ input_bucket_name,
165
+ output_bucket_name,
166
+ ship_name,
167
+ cruise_name,
168
+ sensor_name,
169
+ raw_file_name,
170
+ endpoint_url=None,
171
+ include_bot=True,
172
+ ):
173
+ """
174
+ Downloads the raw files, processes them with echopype, and uploads files
175
+ to the nodd bucket.
176
+
177
+ Needs to create two files, one echopype opened file, one is Sv calibrated file
178
+ """
179
+ print(f"Opening raw: {raw_file_name} and creating netcdf.")
180
+ try:
181
+ geometry_manager = GeometryManager()
182
+ cleaner = Cleaner()
183
+ cleaner.delete_local_files(
184
+ file_types=["*.nc", "*.json"]
185
+ ) # TODO: include bot and raw?
186
+
187
+ s3_manager = S3Manager(endpoint_url=endpoint_url)
188
+ s3_file_path = (
189
+ f"data/raw/{ship_name}/{cruise_name}/{sensor_name}/{raw_file_name}"
190
+ )
191
+ bottom_file_name = f"{Path(raw_file_name).stem}.bot"
192
+ s3_bottom_file_path = (
193
+ f"data/raw/{ship_name}/{cruise_name}/{sensor_name}/{bottom_file_name}"
194
+ )
195
+ s3_manager.download_file(
196
+ bucket_name=input_bucket_name, key=s3_file_path, file_name=raw_file_name
197
+ )
198
+ # TODO: add the bottom file
199
+ if include_bot:
200
+ s3_manager.download_file(
201
+ bucket_name=input_bucket_name,
202
+ key=s3_bottom_file_path,
203
+ file_name=bottom_file_name,
204
+ )
205
+
206
+ gc.collect()
207
+ print("Opening raw file with echopype.")
208
+ # s3_file_path = f"s3://{bucket_name}/data/raw/{ship_name}/{cruise_name}/{sensor_name}/{file_name}"
209
+ # s3_file_path = Path(f"s3://noaa-wcsd-pds/data/raw/{ship_name}/{cruise_name}/{sensor_name}/{file_name}")
210
+ echodata = ep.open_raw(
211
+ raw_file=raw_file_name,
212
+ sonar_model=sensor_name,
213
+ include_bot=include_bot,
214
+ )
215
+
216
+ netcdf_name = f"{Path(raw_file_name).stem}.nc"
217
+ # Xarray Dataset to netcdf
218
+ echodata.to_netcdf(
219
+ save_path=netcdf_name,
220
+ compress=True,
221
+ overwrite=True,
222
+ )
223
+
224
+ print("Compute volume backscattering strength (Sv) from raw dataset.")
225
+ ds_sv = ep.calibrate.compute_Sv(echodata)
226
+ ds_sv = ep.consolidate.add_depth(
227
+ ds_sv, echodata
228
+ ) # TODO: consolidate with other depth values
229
+ # water_level = ds_sv["water_level"].values
230
+ gc.collect()
231
+ print("Done computing volume backscatter strength (Sv) from raw dataset.")
232
+ # Note: detected_seafloor_depth is located at echodata.vendor.detected_seafloor_depth
233
+ # but is not written out with ds_sv
234
+ if "detected_seafloor_depth" in list(echodata.vendor.variables):
235
+ ds_sv["detected_seafloor_depth"] = (
236
+ echodata.vendor.detected_seafloor_depth
237
+ )
238
+ #
239
+ # frequencies = echodata.environment.frequency_nominal.values
240
+ #################################################################
241
+ # Get GPS coordinates, just overwrite the lat lon values
242
+ gps_data, lat, lon = geometry_manager.read_echodata_gps_data(
243
+ echodata=echodata,
244
+ output_bucket_name=output_bucket_name,
245
+ ship_name=ship_name,
246
+ cruise_name=cruise_name,
247
+ sensor_name=sensor_name,
248
+ file_name=raw_file_name,
249
+ endpoint_url=endpoint_url,
250
+ write_geojson=False,
251
+ )
252
+ ds_sv = ep.consolidate.add_location(ds_sv, echodata)
253
+ ds_sv.latitude.values = (
254
+ lat # overwriting echopype gps values to include missing values
255
+ )
256
+ ds_sv.longitude.values = lon
257
+ # gps_data, lat, lon = self.__get_gps_data(echodata=echodata)
258
+
259
+ # Create the netcdf
260
+ netcdf_name_computed_Sv = f"{Path(raw_file_name).stem}_computed_Sv.nc"
261
+
262
+ # Xarray Dataset to netcdf
263
+ ds_sv.to_netcdf(
264
+ path=netcdf_name_computed_Sv,
265
+ mode="w",
266
+ )
267
+ gc.collect()
268
+ #################################################################
269
+ # output_netcdf_prefix = f"level_1/{ship_name}/{cruise_name}/{sensor_name}/"
270
+ #################################################################
271
+ # If netcdf already exists then delete
272
+ s3_manager = S3Manager(endpoint_url=endpoint_url)
273
+ child_objects = s3_manager.get_child_objects(
274
+ bucket_name=output_bucket_name,
275
+ sub_prefix=f"level_1/{ship_name}/{cruise_name}/{sensor_name}/{Path(raw_file_name).stem}.nc",
276
+ )
277
+ if len(child_objects) > 0:
278
+ print(
279
+ "NetCDF dataset already exists in s3, deleting existing and continuing."
280
+ )
281
+ s3_manager.delete_nodd_objects(
282
+ bucket_name=output_bucket_name,
283
+ objects=child_objects,
284
+ )
285
+ child_objects_computed_Sv = s3_manager.get_child_objects(
286
+ bucket_name=output_bucket_name,
287
+ sub_prefix=f"level_1/{ship_name}/{cruise_name}/{sensor_name}/{Path(raw_file_name).stem}_computed_Sv.nc",
288
+ )
289
+ if len(child_objects_computed_Sv) > 0:
290
+ print("data already exists in s3, deleting existing and continuing.")
291
+ s3_manager.delete_nodd_objects(
292
+ bucket_name=output_bucket_name,
293
+ objects=child_objects_computed_Sv,
294
+ )
295
+ #################################################################
296
+ s3_manager.upload_file(
297
+ filename=netcdf_name,
298
+ bucket_name=output_bucket_name,
299
+ key=f"level_1/{ship_name}/{cruise_name}/{sensor_name}/{Path(raw_file_name).stem}.nc",
300
+ )
301
+ s3_manager.upload_file(
302
+ filename=netcdf_name_computed_Sv,
303
+ bucket_name=output_bucket_name,
304
+ key=f"level_1/{ship_name}/{cruise_name}/{sensor_name}/{Path(raw_file_name).stem}_computed_Sv.nc",
305
+ )
306
+ except Exception as err:
307
+ print(f"Exception encountered creating local netcdf with echopype: {err}")
308
+ raise RuntimeError(f"Problem creating local netcdf, {err}")
309
+ finally:
310
+ gc.collect()
311
+ cleaner.delete_local_files(
312
+ file_types=["*.raw", "*.bot", "*.zarr", "*.nc", "*.json"]
313
+ )
314
+ print("Done creating local zarr store.")
315
+
316
+ ############################################################################
317
+
318
+
319
+ ################################################################################
320
+ ############################################################################