pyadps 0.1.0b0__py3-none-any.whl → 0.2.0b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,4 @@
1
+ import configparser
1
2
  import tempfile
2
3
 
3
4
  import numpy as np
@@ -6,8 +7,6 @@ import plotly.graph_objects as go
6
7
  import streamlit as st
7
8
  import utils.writenc as wr
8
9
  from plotly_resampler import FigureResampler
9
- import configparser
10
-
11
10
 
12
11
  if "flead" not in st.session_state:
13
12
  st.write(":red[Please Select Data!]")
@@ -89,6 +88,7 @@ else:
89
88
  st.session_state.final_depth = st.session_state.depth
90
89
 
91
90
 
91
+ # Functions for plotting
92
92
  @st.cache_data
93
93
  def fillplot_plotly(
94
94
  x, y, data, maskdata, colorscale="balance", title="Data", mask=False
@@ -113,6 +113,7 @@ def fillplot_plotly(
113
113
  yaxis=dict(showline=True, mirror=True),
114
114
  title_text=title,
115
115
  )
116
+ fig.update_yaxes(autorange="reversed")
116
117
  st.plotly_chart(fig)
117
118
 
118
119
 
@@ -155,6 +156,7 @@ def call_plot(varname, beam, mask=False):
155
156
  )
156
157
 
157
158
 
159
+ # Option to View Processed Data
158
160
  st.header("View Processed Data", divider="blue")
159
161
  var_option = st.selectbox(
160
162
  "Select a data type", ("Velocity", "Echo", "Correlation", "Percent Good")
@@ -170,6 +172,7 @@ if plot_button:
170
172
  call_plot(var_option, beam, mask=False)
171
173
 
172
174
 
175
+ # Option to Write Processed Data
173
176
  st.header("Write Data", divider="blue")
174
177
 
175
178
  mask_data_radio = st.radio("Do you want to mask the final data?", ("Yes", "No"))
@@ -186,37 +189,57 @@ file_type_radio = st.radio("Select output file format:", ("NetCDF", "CSV"))
186
189
 
187
190
  if file_type_radio == "NetCDF":
188
191
  add_attr_button = st.checkbox("Add attributes to NetCDF file")
189
-
192
+
190
193
  if add_attr_button:
191
194
  st.write("### Modify Attributes")
192
-
195
+
193
196
  # Create two-column layout for attributes
194
197
  col1, col2 = st.columns(2)
195
-
198
+
196
199
  with col1:
197
200
  # Display attributes in the first column
198
- for key in ["Cruise_No.", "Ship_Name", "Project_No.", "Water_Depth_m", "Deployment_Depth_m","Deployment_Date","Recovery_Date"]:
201
+ for key in [
202
+ "Cruise_No.",
203
+ "Ship_Name",
204
+ "Project_No.",
205
+ "Water_Depth_m",
206
+ "Deployment_Depth_m",
207
+ "Deployment_Date",
208
+ "Recovery_Date",
209
+ ]:
199
210
  if key in st.session_state.attributes:
200
- st.session_state.attributes[key] = st.text_input(key, value=st.session_state.attributes[key])
211
+ st.session_state.attributes[key] = st.text_input(
212
+ key, value=st.session_state.attributes[key]
213
+ )
201
214
  else:
202
- st.session_state.attributes[key] = st.text_input(key)
203
-
215
+ st.session_state.attributes[key] = st.text_input(key)
216
+
204
217
  with col2:
205
218
  # Display attributes in the second column
206
- for key in ["Latitude", "Longitude","Platform_Type","Participants", "File_created_by", "Contact", "Comments"]:
219
+ for key in [
220
+ "Latitude",
221
+ "Longitude",
222
+ "Platform_Type",
223
+ "Participants",
224
+ "File_created_by",
225
+ "Contact",
226
+ "Comments",
227
+ ]:
207
228
  if key in st.session_state.attributes:
208
- st.session_state.attributes[key] = st.text_input(key, value=st.session_state.attributes[key])
229
+ st.session_state.attributes[key] = st.text_input(
230
+ key, value=st.session_state.attributes[key]
231
+ )
209
232
  else:
210
- st.session_state.attributes[key] = st.text_input(key)
211
-
212
- download_button = st.button("Generate Processed files")
233
+ st.session_state.attributes[key] = st.text_input(key)
234
+
235
+ download_button = st.button("Generate Processed files")
213
236
 
214
237
  if download_button:
215
238
  st.session_state.processed_filename = file_write()
216
239
  st.write(":grey[Processed file created. Click the download button.]")
217
240
  st.write(st.session_state.processed_filename)
218
241
  depth = np.trunc(st.session_state.final_depth)
219
-
242
+
220
243
  if file_type_radio == "NetCDF":
221
244
  if add_attr_button and st.session_state.attributes:
222
245
  # Generate file with attributes
@@ -225,7 +248,7 @@ if download_button:
225
248
  depth,
226
249
  st.session_state.date,
227
250
  st.session_state.write_velocity,
228
- attributes=st.session_state.attributes # Pass edited attributes
251
+ attributes=st.session_state.attributes, # Pass edited attributes
229
252
  )
230
253
  else:
231
254
  # Generate file without attributes
@@ -233,9 +256,9 @@ if download_button:
233
256
  st.session_state.processed_filename,
234
257
  depth,
235
258
  st.session_state.date,
236
- st.session_state.write_velocity
259
+ st.session_state.write_velocity,
237
260
  )
238
-
261
+
239
262
  with open(st.session_state.processed_filename, "rb") as file:
240
263
  st.download_button(
241
264
  label="Download NetCDF File",
@@ -280,73 +303,129 @@ if download_button:
280
303
  file_name="vertical_velocity.csv",
281
304
  mime="text/csf",
282
305
  )
283
-
284
-
306
+
307
+
308
+ # Option to Download Config file
309
+ # ------------------------------
310
+
285
311
  # Header for the Config.ini File Generator
286
312
  st.header("Config.ini File Generator", divider="blue")
287
313
 
288
314
  # Radio button to decide whether to generate the config.ini file
289
- generate_config_radio = st.radio("Do you want to generate a config.ini file?", ("No", "Yes"))
315
+ generate_config_radio = st.radio(
316
+ "Do you want to generate a config.ini file?", ("No", "Yes")
317
+ )
318
+
290
319
 
291
320
  if generate_config_radio == "Yes":
292
321
  # Create a config parser object
293
322
  config = configparser.ConfigParser()
294
323
 
295
324
  # Main section
296
- config["Main"] = {
297
- "Input_FileName": st.session_state.fname
298
- }
299
-
325
+ config["FileSettings"] = {}
326
+ config["DownloadOptions"] = {}
327
+ config["QCTest"] = {"qc_test": "False"}
328
+ config["ProfileTest"] = {"profile_test": "False"}
329
+ config["VelocityTest"] = {"velocity_test": "False"}
330
+ config["Optional"] = {"attributes": "False"}
331
+
332
+ config["FileSettings"]["input_file_path"] = ""
333
+ config["FileSettings"]["input_file_name"] = st.session_state.fname
334
+ config["FileSettings"]["output_file_path"] = ""
335
+ config["FileSettings"]["output_file_name_raw"] = ""
336
+ config["FileSettings"]["output_file_name_processed"] = ""
337
+ config["FileSettings"]["output_format_raw"] = str(file_type_radio).lower()
338
+ config["FileSettings"]["output_format_processed"] = str(file_type_radio).lower()
339
+
340
+ config["DownloadOptions"]["download_raw"] = "True"
341
+ config["DownloadOptions"]["download_processed"] = "True"
342
+ config["DownloadOptions"]["apply_mask"] = "True"
343
+ config["DownloadOptions"]["download_mask"] = "True"
344
+
345
+ # QC Test Options
300
346
  if st.session_state.isQCMask:
301
- config["QC Test"] = {}
347
+ config["QCTest"]["qc_test"] = "True"
302
348
 
303
349
  # Add the contents of the current QC Mask thresholds
304
350
  if "newthresh" in st.session_state:
305
351
  for idx, row in st.session_state.newthresh.iterrows():
306
- config["QC Test"][row["Threshold"].replace(" ", "_")] = row["Values"]
352
+ config["QCTest"][row["Threshold"].replace(" ", "_")] = row["Values"]
307
353
 
308
-
309
- # Profile Test section
354
+ # Profile Test Options
310
355
  if st.session_state.isProfileMask:
311
- config["Profile Test"] = {}
312
-
313
- if st.session_state.update_mask:
356
+ config["ProfileTest"]["profile_test"] = "True"
314
357
 
315
- config["Profile Test"]["Change_Range"] = str(st.session_state.ens_range)
316
- config["Profile Test"]["Deployment_ensembles"] = str(st.session_state.start_ens)
317
- config["Profile Test"]["Recovery_ensembles"] = str(st.session_state.end_ens)
358
+ if st.session_state.isTrimEnds:
359
+ config["ProfileTest"]["trim_ends"] = "True"
360
+ config["ProfileTest"]["trim_ends_start_index"] = str(
361
+ st.session_state.start_ens
362
+ )
363
+ config["ProfileTest"]["trim_ends_end_index"] = str(st.session_state.end_ens)
364
+ else:
365
+ config["ProfileTest"]["trim_ends"] = "False"
318
366
 
319
- if st.session_state.update_mask_cutbin:
320
- config["Profile Test"]["Beam"] = str(st.session_state.beam + 1) # Adding 1 since beams are 1-based
321
- config["Profile Test"]["cell_to_delete"] = str(st.session_state.extra_cells)
367
+ if st.session_state.isCutBins:
368
+ config["ProfileTest"]["cut_bins"] = "True"
369
+ config["ProfileTest"]["cut_bins_add_cells"] = str(
370
+ st.session_state.extra_cells
371
+ )
372
+ else:
373
+ config["ProfileTest"]["cut_bins"] = "False"
322
374
 
323
375
  if st.session_state.isGrid:
324
- config["Profile Test"]["Regrid_Depth_cells"] = st.session_state.last_cell # Bin or Surface
325
-
376
+ config["ProfileTest"]["regrid"] = "True"
377
+ config["ProfileTest"][
378
+ "Regrid_Option"
379
+ ] = st.session_state.end_bin_option
380
+ else:
381
+ config["ProfileTest"]["regrid"] = "False"
326
382
 
327
383
  # Velocity Test Section
328
384
  if st.session_state.isVelocityMask:
329
- config["Velocity Test"] = {}
385
+ config["VelocityTest"]["velocity_test"] = "True"
330
386
 
331
387
  if st.session_state.isMagnet:
332
- config["Velocity Test"]["Latitude"] = str(st.session_state.lat)
333
- config["Velocity Test"]["Longitude"] = str(st.session_state.lon)
334
- config["Velocity Test"]["Depth"] = str(st.session_state.magnetic_dec_depth)
335
- config["Velocity Test"]["Year"] = str(st.session_state.year)
388
+ config["VelocityTest"]["magnetic_declination"] = str(True)
389
+ config["VelocityTest"]["latitude"] = str(st.session_state.lat)
390
+ config["VelocityTest"]["longitude"] = str(st.session_state.lon)
391
+ config["VelocityTest"]["depth"] = str(st.session_state.magnetic_dec_depth)
392
+ config["VelocityTest"]["year"] = str(st.session_state.year)
393
+ else:
394
+ config["VelocityTest"]["magnetic_declination"] = str(False)
336
395
 
337
396
  if st.session_state.isCutoff:
338
- config["Velocity Test"]["Max_Zoank"] = str(st.session_state.maxuvel)
339
- config["Velocity Test"]["Max_Meridional"] = str(st.session_state.maxvvel)
340
- config["Velocity Test"]["Max_Vertical"] = str(st.session_state.maxwvel)
397
+ config["VelocityTest"]["cutoff"] = str(True)
398
+ config["VelocityTest"]["max_zonal_velocity"] = str(st.session_state.maxuvel)
399
+ config["VelocityTest"]["max_meridional_velocity"] = str(
400
+ st.session_state.maxvvel
401
+ )
402
+ config["VelocityTest"]["max_vertical_velocity"] = str(
403
+ st.session_state.maxwvel
404
+ )
405
+ else:
406
+ config["VelocityTest"]["cutoff"] = str(False)
341
407
 
342
408
  if st.session_state.isDespike:
343
- config["Velocity Test"]["Despike_Kernal_Size"] = str(st.session_state.despike_kernal)
344
- config["Velocity Test"]["Despike_Cutoff"] = str(st.session_state.despike_cutoff)
409
+ config["VelocityTest"]["despike"] = str(True)
410
+ config["VelocityTest"]["despike_Kernal_Size"] = str(
411
+ st.session_state.despike_kernal
412
+ )
413
+ config["VelocityTest"]["despike_Cutoff"] = str(
414
+ st.session_state.despike_cutoff
415
+ )
416
+ else:
417
+ config["VelocityTest"]["Despike"] = str(False)
345
418
 
346
419
  if st.session_state.isFlatline:
347
- config["Velocity Test"]["Flatline_Kernal"] = str(st.session_state.flatline_kernal)
348
- config["Velocity Test"]["Flatline_Deviation"] = str(st.session_state.flatline_cutoff)
349
-
420
+ config["VelocityTest"]["flatline"] = str(True)
421
+ config["VelocityTest"]["flatline_kernal_size"] = str(
422
+ st.session_state.flatline_kernal
423
+ )
424
+ config["VelocityTest"]["flatline_deviation"] = str(
425
+ st.session_state.flatline_cutoff
426
+ )
427
+ else:
428
+ config["VelocityTest"]["flatline"] = str(False)
350
429
 
351
430
  # Optional section (attributes)
352
431
  config["Optional"] = {}
@@ -365,3 +444,9 @@ if generate_config_radio == "Yes":
365
444
  data=file,
366
445
  file_name="config.ini",
367
446
  )
447
+
448
+ display_config_radio = st.radio(
449
+ "Do you want to display config.ini file?", ("No", "Yes")
450
+ )
451
+ if display_config_radio == "Yes":
452
+ st.write({section: dict(config[section]) for section in config.sections()})
pyadps/utils/__init__.py CHANGED
@@ -9,4 +9,4 @@ from pyadps.utils.regrid import *
9
9
  from pyadps.utils.signal_quality import *
10
10
  from pyadps.utils.velocity_test import *
11
11
  from pyadps.utils.writenc import *
12
-
12
+ # from pyadps.utils.autoprocess import *
@@ -0,0 +1,282 @@
1
+ import configparser
2
+ import os
3
+
4
+ import numpy as np
5
+ import pandas as pd
6
+ import pyadps.utils.writenc as wr
7
+ from pyadps.utils import readrdi
8
+ from pyadps.utils.profile_test import side_lobe_beam_angle
9
+ from pyadps.utils.regrid import regrid2d, regrid3d
10
+ from pyadps.utils.signal_quality import (
11
+ default_mask,
12
+ ev_check,
13
+ false_target,
14
+ pg_check,
15
+ qc_check,
16
+ )
17
+ from pyadps.utils.velocity_test import (
18
+ despike,
19
+ flatline,
20
+ magnetic_declination,
21
+ velocity_cutoff,
22
+ )
23
+
24
+ def main():
25
+ # Get the config file
26
+ try:
27
+ filepath = input("Enter config file name: ")
28
+ if os.path.exists(filepath):
29
+ autoprocess(filepath)
30
+ else:
31
+ print("File not found!")
32
+ except:
33
+ print("Error: Unable to process the data.")
34
+
35
+ def autoprocess(filepath):
36
+ config = configparser.ConfigParser()
37
+ config.read(filepath)
38
+ input_file_name = config.get("FileSettings", "input_file_name")
39
+ input_file_path = config.get("FileSettings", "input_file_path")
40
+
41
+ full_input_file_path = os.path.join(input_file_path, input_file_name)
42
+
43
+ print("File reading started. Please wait for a few seconds ...")
44
+ ds = readrdi.ReadFile(full_input_file_path)
45
+ print("File reading complete.")
46
+
47
+ header = ds.fileheader
48
+ flobj = ds.fixedleader
49
+ vlobj = ds.variableleader
50
+ velocity = ds.velocity.data
51
+ echo = ds.echo.data
52
+ correlation = ds.correlation.data
53
+ pgood = ds.percentgood.data
54
+ ensembles = header.ensembles
55
+ cells = flobj.field()["Cells"]
56
+ fdata = flobj.fleader
57
+ vdata = vlobj.vleader
58
+
59
+ mask = default_mask(flobj, velocity)
60
+ print("Default Mask created.")
61
+ x = np.arange(0, ensembles, 1)
62
+ y = np.arange(0, cells, 1)
63
+ depth = None
64
+
65
+ # QC Test
66
+ isQCTest = config.getboolean("QCTest", "qc_test")
67
+
68
+ if isQCTest:
69
+ ct = config.getint("QCTest", "correlation")
70
+ evt = config.getint("QCTest", "error_velocity")
71
+ et = config.getint("QCTest", "echo_intensity")
72
+ ft = config.getint("QCTest", "false_target")
73
+ is3Beam = config.getboolean("QCTest", "three_beam")
74
+ pgt = config.getint("QCTest", "percentage_good")
75
+
76
+ mask = pg_check(pgood, mask, pgt, threebeam=is3Beam)
77
+ mask = qc_check(correlation, mask, ct)
78
+ mask = qc_check(echo, mask, et)
79
+ mask = ev_check(velocity[3, :, :], mask, evt)
80
+ mask = false_target(echo, mask, ft, threebeam=True)
81
+ print("QC Test complete.")
82
+
83
+ endpoints = None
84
+ isProfileTest = config.getboolean("ProfileTest", "profile_test")
85
+ if isProfileTest:
86
+ isTrimEnds = config.getboolean("ProfileTest", "trim_ends")
87
+ if isTrimEnds:
88
+ start_index = config.getint("ProfileTest", "trim_ends_start_index")
89
+ end_index = config.getint("ProfileTest", "trim_ends_end_index")
90
+ # if start_index < 0 or start_index > ensembles:
91
+
92
+ if start_index > 0:
93
+ mask[:, :start_index] = 1
94
+
95
+ if end_index < x[-1]:
96
+ mask[:, end_index:] = 1
97
+
98
+ endpoints = np.array([start_index, end_index])
99
+
100
+ print("Trim Ends complete.")
101
+
102
+ isCutBins = config.getboolean("ProfileTest", "cut_bins")
103
+ if isCutBins:
104
+ add_cells = config.getint("ProfileTest", "cut_bins_add_cells")
105
+ mask = side_lobe_beam_angle(flobj, vlobj, mask, extra_cells=add_cells)
106
+
107
+ print("Cutbins complete.")
108
+
109
+ isRegrid = config.getboolean("ProfileTest", "regrid")
110
+ if isRegrid:
111
+ print("File regridding started. This will take a few seconds ...")
112
+ regrid_option = config.get("ProfileTest", "regrid_option")
113
+ z, velocity = regrid3d(
114
+ flobj,
115
+ vlobj,
116
+ velocity,
117
+ -32768,
118
+ trimends=endpoints,
119
+ )
120
+ z, echo = regrid3d(flobj, vlobj, echo, -32768, trimends=endpoints)
121
+ z, correlation = regrid3d(
122
+ flobj, vlobj, correlation, -32768, trimends=endpoints
123
+ )
124
+ z, pgood = regrid3d(flobj, vlobj, pgood, -32768, trimends=endpoints)
125
+ z, mask = regrid2d(flobj, vlobj, mask, 1, trimends=endpoints)
126
+ depth = z
127
+ print("Regrid Complete.")
128
+
129
+ print("Profile Test complete.")
130
+
131
+ isVelocityTest = config.getboolean("VelocityTest", "velocity_test")
132
+ if isVelocityTest:
133
+ isMagneticDeclination = config.getboolean(
134
+ "VelocityTest", "magnetic_declination"
135
+ )
136
+ if isMagneticDeclination:
137
+ maglat = config.getfloat("VelocityTest", "latitude")
138
+ maglon = config.getfloat("VelocityTest", "longitude")
139
+ magdep = config.getfloat("VelocityTest", "depth")
140
+ magyear = config.getfloat("VelocityTest", "year")
141
+
142
+ velocity, mag = magnetic_declination(
143
+ velocity, maglat, maglon, magdep, magyear
144
+ )
145
+ print(f"Magnetic Declination applied. The value is {mag[0]} degrees.")
146
+
147
+ isCutOff = config.getboolean("VelocityTest", "cutoff")
148
+ if isCutOff:
149
+ maxu = config.getint("VelocityTest", "max_zonal_velocity")
150
+ maxv = config.getint("VelocityTest", "max_meridional_velocity")
151
+ maxw = config.getint("VelocityTest", "max_vertical_velocity")
152
+ mask = velocity_cutoff(velocity[0, :, :], mask, cutoff=maxu)
153
+ mask = velocity_cutoff(velocity[1, :, :], mask, cutoff=maxv)
154
+ mask = velocity_cutoff(velocity[2, :, :], mask, cutoff=maxw)
155
+ print("Maximum velocity cutoff applied.")
156
+
157
+ isDespike = config.getboolean("VelocityTest", "despike")
158
+ if isDespike:
159
+ despike_kernal = config.getint("VelocityTest", "despike_kernal_size")
160
+ despike_cutoff = config.getint("VelocityTest", "despike_cutoff")
161
+
162
+ mask = despike(
163
+ velocity[0, :, :],
164
+ mask,
165
+ kernal_size=despike_kernal,
166
+ cutoff=despike_cutoff,
167
+ )
168
+ mask = despike(
169
+ velocity[1, :, :],
170
+ mask,
171
+ kernal_size=despike_kernal,
172
+ cutoff=despike_cutoff,
173
+ )
174
+ print("Velocity data despiked.")
175
+
176
+ isFlatline = config.getboolean("VelocityTest", "flatline")
177
+ if isFlatline:
178
+ despike_kernal = config.getint("VelocityTest", "flatline_kernal_size")
179
+ despike_cutoff = config.getint("VelocityTest", "flatline_deviation")
180
+
181
+ mask = flatline(
182
+ velocity[0, :, :],
183
+ mask,
184
+ kernal_size=despike_kernal,
185
+ cutoff=despike_cutoff,
186
+ )
187
+ mask = flatline(
188
+ velocity[1, :, :],
189
+ mask,
190
+ kernal_size=despike_kernal,
191
+ cutoff=despike_cutoff,
192
+ )
193
+ mask = flatline(
194
+ velocity[2, :, :],
195
+ mask,
196
+ kernal_size=despike_kernal,
197
+ cutoff=despike_cutoff,
198
+ )
199
+ print("Flatlines in velocity removed.")
200
+
201
+ print("Velocity Test complete.")
202
+
203
+ # Apply mask to velocity data
204
+ isApplyMask = config.get("DownloadOptions", "apply_mask")
205
+ if isApplyMask:
206
+ velocity[:, mask == 1] = -32768
207
+ print("Mask Applied.")
208
+
209
+ # Create Depth axis if regrid not applied
210
+ if depth is None:
211
+ mean_depth = np.mean(vlobj.vleader["Depth of Transducer"]) / 10
212
+ mean_depth = np.trunc(mean_depth)
213
+ cells = flobj.field()["Cells"]
214
+ cell_size = flobj.field()["Depth Cell Len"] / 100
215
+ bin1dist = flobj.field()["Bin 1 Dist"] / 100
216
+ max_depth = mean_depth - bin1dist
217
+ min_depth = max_depth - cells * cell_size
218
+ depth = np.arange(-1 * max_depth, -1 * min_depth, cell_size)
219
+
220
+ print("WARNING: File not regrided. Depth axis created based on mean depth.")
221
+
222
+ # Create Time axis
223
+ year = vlobj.vleader["RTC Year"]
224
+ month = vlobj.vleader["RTC Month"]
225
+ day = vlobj.vleader["RTC Day"]
226
+ hour = vlobj.vleader["RTC Hour"]
227
+ minute = vlobj.vleader["RTC Minute"]
228
+ second = vlobj.vleader["RTC Second"]
229
+
230
+ year = year + 2000
231
+ date_df = pd.DataFrame(
232
+ {
233
+ "year": year,
234
+ "month": month,
235
+ "day": day,
236
+ "hour": hour,
237
+ "minute": minute,
238
+ "second": second,
239
+ }
240
+ )
241
+
242
+ date = pd.to_datetime(date_df)
243
+
244
+ print("Time axis created.")
245
+
246
+ isWriteRawNC = config.get("DownloadOptions", "download_raw")
247
+ isWriteProcNC = config.get("DownloadOptions", "download_processed")
248
+ isAttributes = config.get("Optional", "attributes")
249
+
250
+ if isAttributes:
251
+ attributes = [att for att in config["Optional"]]
252
+ attributes = dict(config["Optional"].items())
253
+ del attributes["attributes"]
254
+ else:
255
+ attributes = None
256
+
257
+ if isWriteRawNC:
258
+ filepath = config.get("FileSettings", "output_file_path")
259
+ filename = config.get("FileSettings", "output_file_name_raw")
260
+ output_file_path = os.path.join(filepath, filename)
261
+ if isAttributes:
262
+ wr.rawnc(full_input_file_path, output_file_path, attributes=attributes)
263
+
264
+ print("Raw file written.")
265
+
266
+ if isWriteProcNC:
267
+ filepath = config.get("FileSettings", "output_file_path")
268
+ filename = config.get("FileSettings", "output_file_name_processed")
269
+ full_file_path = os.path.join(filepath, filename)
270
+
271
+ wr.finalnc(
272
+ full_file_path,
273
+ depth,
274
+ date,
275
+ velocity,
276
+ attributes=attributes, # Pass edited attributes
277
+ )
278
+ print("Processed file written.")
279
+
280
+
281
+ if __name__ == "__main__":
282
+ main()
@@ -0,0 +1,81 @@
1
+ [FileSettings]
2
+ # Input file settings
3
+ input_file_path = /home/user/data/
4
+ input_file_name = adcp_raw.000
5
+
6
+ # Output file settings. Do not enter file extension.
7
+ output_file_path = /home/user/output/
8
+ output_file_name_raw = adcp_raw.nc
9
+ output_file_name_processed = adcp_proc.nc
10
+
11
+ # Choose between 'netcdf' or 'csv' for the raw output format
12
+ output_format_raw = netcdf
13
+
14
+ # Choose between 'netcdf' or 'csv' for the processed output format
15
+ output_format_processed = csv
16
+
17
+ [DownloadOptions]
18
+ # Options to download raw and/or processed output files
19
+ download_raw = True
20
+ download_processed = True
21
+ apply_mask = True
22
+ download_mask = True
23
+
24
+ [QCTest]
25
+ # Enable or Disable QC Test (True/False)
26
+ qc_test = True
27
+ correlation = 64
28
+ error_velocity = 2000
29
+ echo_intensity = 40
30
+ false_target = 50
31
+ three_beam = True
32
+ percentage_good = 50
33
+
34
+ [ProfileTest]
35
+ # Enable or Disable Profile Test (True/False)
36
+ profile_test = True
37
+ trim_ends = True
38
+ trim_ends_start_index = 2
39
+ trim_ends_end_index = 17086
40
+ cut_bins = True
41
+ cut_bins_add_cells = 2
42
+ regrid = True
43
+ regrid_option = Bin
44
+
45
+ [VelocityTest]
46
+ # Enable or Disable Velocity Test (True/False)
47
+ velocity_test = True
48
+ magnetic_declination = True
49
+ latitude = 0.0
50
+ longitude = 0.1
51
+ depth = 0
52
+ year = 2024
53
+ cutoff = True
54
+ max_zonal_velocity = 250
55
+ max_meridional_velocity = 250
56
+ max_vertical_velocity = 15
57
+ despike = True
58
+ despike_kernal_size = 5
59
+ despike_cutoff = 150
60
+ flatline = True
61
+ flatline_kernal_size = 13
62
+ flatline_deviation = 1
63
+
64
+ [Optional]
65
+ # Options to add attributes to netcdf file (True/False)
66
+ attributes = True
67
+ cruise_no. = Ship999
68
+ ship_name = RV Vessel Name
69
+ project_no. = GAP9999
70
+ water_depth_m = 1000
71
+ deployment_depth_m = 300
72
+ deployment_date = 12/10/2023
73
+ recovery_date = 8/10/2024
74
+ latitude = 15.0
75
+ longitude = 77.0
76
+ platform_type = Moored
77
+ participants = abcd, efgh, ijkl
78
+ file_created_by = xxxx
79
+ contact = abcd
80
+ comments = No comments
81
+