pyadps 0.1.0__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. {pyadps-0.1.0 → pyadps-0.1.1}/PKG-INFO +3 -3
  2. {pyadps-0.1.0 → pyadps-0.1.1}/pyproject.toml +3 -3
  3. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/01_Read_File.py +3 -2
  4. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/03_Download_Raw_File.py +39 -2
  5. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/06_Profile_Test.py +9 -9
  6. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/07_Velocity_Test.py +13 -13
  7. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/08_Write_File.py +19 -6
  8. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/09_Auto_process.py +2 -0
  9. pyadps-0.1.1/src/pyadps/pages/__pycache__/__init__.cpython-312.pyc +0 -0
  10. pyadps-0.1.1/src/pyadps/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  11. pyadps-0.1.1/src/pyadps/utils/__pycache__/autoprocess.cpython-312.pyc +0 -0
  12. pyadps-0.1.1/src/pyadps/utils/__pycache__/cutbin.cpython-312.pyc +0 -0
  13. pyadps-0.1.1/src/pyadps/utils/__pycache__/plotgen.cpython-312.pyc +0 -0
  14. pyadps-0.1.1/src/pyadps/utils/__pycache__/profile_test.cpython-312.pyc +0 -0
  15. pyadps-0.1.1/src/pyadps/utils/__pycache__/pyreadrdi.cpython-312.pyc +0 -0
  16. pyadps-0.1.1/src/pyadps/utils/__pycache__/readrdi.cpython-312.pyc +0 -0
  17. pyadps-0.1.1/src/pyadps/utils/__pycache__/regrid.cpython-312.pyc +0 -0
  18. pyadps-0.1.1/src/pyadps/utils/__pycache__/script.cpython-312.pyc +0 -0
  19. pyadps-0.1.1/src/pyadps/utils/__pycache__/sensor_health.cpython-312.pyc +0 -0
  20. pyadps-0.1.1/src/pyadps/utils/__pycache__/signal_quality.cpython-312.pyc +0 -0
  21. pyadps-0.1.1/src/pyadps/utils/__pycache__/velocity_test.cpython-312.pyc +0 -0
  22. pyadps-0.1.1/src/pyadps/utils/__pycache__/writenc.cpython-312.pyc +0 -0
  23. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/autoprocess.py +26 -8
  24. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/pyreadrdi.py +15 -12
  25. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/script.py +9 -9
  26. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/velocity_test.py +6 -6
  27. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/writenc.py +120 -1
  28. {pyadps-0.1.0 → pyadps-0.1.1}/LICENSE +0 -0
  29. {pyadps-0.1.0 → pyadps-0.1.1}/README.md +0 -0
  30. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/Home_Page.py +0 -0
  31. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/__init__.py +0 -0
  32. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/__main__.py +0 -0
  33. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/02_View_Raw_Data.py +0 -0
  34. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/04_Sensor_Health.py +1 -1
  35. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/05_QC_Test.py +0 -0
  36. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/pages/__init__.py +0 -0
  37. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/__init__.py +0 -0
  38. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/metadata/config.ini +0 -0
  39. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/metadata/demo.000 +0 -0
  40. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/metadata/flmeta.json +0 -0
  41. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/metadata/vlmeta.json +0 -0
  42. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/plotgen.py +0 -0
  43. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/profile_test.py +0 -0
  44. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/readrdi.py +0 -0
  45. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/sensor_health.py +0 -0
  46. {pyadps-0.1.0 → pyadps-0.1.1}/src/pyadps/utils/signal_quality.py +0 -0
@@ -1,8 +1,8 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: pyadps
3
- Version: 0.1.0
3
+ Version: 0.1.1
4
4
  Summary: A Python package for ADCP data processing
5
- Home-page: https://example.com
5
+ Home-page: https://pyadps.readthedocs.io/en/latest/index.html
6
6
  License: MIT
7
7
  Keywords: adcp,data-processing,oceanography
8
8
  Author: P. Amol
@@ -25,7 +25,7 @@ Requires-Dist: plotly-resampler (>=0.10.0)
25
25
  Requires-Dist: pygeomag (>=1.1.0,<2.0.0)
26
26
  Requires-Dist: scipy (>=1.14.0)
27
27
  Requires-Dist: streamlit (>=1.36.0)
28
- Project-URL: Documentation, https://example.com/docs
28
+ Project-URL: Documentation, https://pyadps.readthedocs.io/en/latest/index.html
29
29
  Project-URL: Repository, https://github.com/p-amol/pyadps
30
30
  Description-Content-Type: text/markdown
31
31
 
@@ -1,13 +1,13 @@
1
1
  [tool.poetry]
2
2
  name = "pyadps"
3
- version = "0.1.0"
3
+ version = "0.1.1"
4
4
  description = "A Python package for ADCP data processing"
5
5
  authors = ["P. Amol <prakashamol@gmail.com>"]
6
6
  readme = "README.md"
7
7
  license = "MIT"
8
- homepage = "https://example.com" # You can add your homepage URL or GitHub URL here
8
+ homepage = "https://pyadps.readthedocs.io/en/latest/index.html" # You can add your homepage URL or GitHub URL here
9
9
  repository = "https://github.com/p-amol/pyadps" # Replace with your repository link
10
- documentation = "https://example.com/docs" # Optional documentation link
10
+ documentation = "https://pyadps.readthedocs.io/en/latest/index.html" # Optional documentation link
11
11
  keywords = ["adcp", "data-processing", "oceanography"]
12
12
  include = ["utils/metadata/*.json"]
13
13
  classifiers = [
@@ -182,6 +182,7 @@ date_df = pd.DataFrame(
182
182
  st.session_state.date = pd.to_datetime(date_df)
183
183
  st.session_state.date1 = pd.to_datetime(date_df)
184
184
  st.session_state.date2 = pd.to_datetime(date_df)
185
+ st.session_state.date3 = pd.to_datetime(date_df)
185
186
  st.session_state.ensemble_axis = np.arange(0, st.session_state.head.ensembles, 1)
186
187
  st.session_state.axis_option = "time"
187
188
 
@@ -312,12 +313,12 @@ st.session_state.maxwvel_VT = 15
312
313
 
313
314
  # Tab3: Despike
314
315
  st.session_state.isDespikeCheck_VT = False
315
- st.session_state.despike_kernal_VT = 5
316
+ st.session_state.despike_kernel_VT = 5
316
317
  st.session_state.despike_cutoff_VT = 3
317
318
 
318
319
  # Tab4: Flatline
319
320
  st.session_state.isFlatlineCheck_VT = False
320
- st.session_state.flatline_kernal_VT = 5
321
+ st.session_state.flatline_kernel_VT = 5
321
322
  st.session_state.flatline_cutoff_VT = 3
322
323
 
323
324
  # ------------------
@@ -20,6 +20,9 @@ if "fname" not in st.session_state:
20
20
  if "rawfilename" not in st.session_state:
21
21
  st.session_state.rawfilename = "rawfile.nc"
22
22
 
23
+ if "fleadfilename" not in st.session_state:
24
+ st.session_state.fleadfilename = "flead.nc"
25
+
23
26
  if "vleadfilename" not in st.session_state:
24
27
  st.session_state.vleadfilename = "vlead.nc"
25
28
 
@@ -66,6 +69,23 @@ def file_write(path, axis_option, add_attributes=True):
66
69
  path, st.session_state.rawfilename, st.session_state.date1, axis_option
67
70
  )
68
71
 
72
+ @st.cache_data
73
+ def file_write_flead(path, axis_option, add_attributes=True):
74
+ tempvardirname = tempfile.TemporaryDirectory(delete=False)
75
+ st.session_state.fleadfilename = tempvardirname.name + "/flead.nc"
76
+
77
+ if add_attributes:
78
+ wr.flead_nc(
79
+ path,
80
+ st.session_state.fleadfilename,
81
+ st.session_state.date2,
82
+ axis_option,
83
+ attributes=st.session_state.attributes,
84
+ )
85
+ else:
86
+ wr.flead_nc(
87
+ path, st.session_state.fleadfilename, st.session_state.date2, axis_option
88
+ )
69
89
 
70
90
  @st.cache_data
71
91
  def file_write_vlead(path, axis_option, add_attributes=True):
@@ -76,13 +96,13 @@ def file_write_vlead(path, axis_option, add_attributes=True):
76
96
  wr.vlead_nc(
77
97
  path,
78
98
  st.session_state.vleadfilename,
79
- st.session_state.date2,
99
+ st.session_state.date3,
80
100
  axis_option,
81
101
  attributes=st.session_state.attributes,
82
102
  )
83
103
  else:
84
104
  wr.vlead_nc(
85
- path, st.session_state.vleadfilename, st.session_state.date2, axis_option
105
+ path, st.session_state.vleadfilename, st.session_state.date3, axis_option
86
106
  )
87
107
 
88
108
 
@@ -141,6 +161,9 @@ st.session_state.axis_option_DRW = st.selectbox(
141
161
 
142
162
  # Buttons to generate files
143
163
  st.session_state.rawnc_download_DRW = st.button("Generate Raw NetCDF File")
164
+ st.session_state.fleadnc_download_DRW = st.button(
165
+ "Generate Raw Fixed Leader NetCDF File"
166
+ )
144
167
  st.session_state.vleadnc_download_DRW = st.button(
145
168
  "Generate Raw Variable Leader NetCDF File"
146
169
  )
@@ -159,6 +182,20 @@ if st.session_state.rawnc_download_DRW:
159
182
  file_name="rawfile.nc",
160
183
  )
161
184
 
185
+ if st.session_state.fleadnc_download_DRW:
186
+ file_write_flead(
187
+ st.session_state.fpath,
188
+ st.session_state.axis_option,
189
+ st.session_state.add_attributes_DRW == "Yes",
190
+ )
191
+ st.write(st.session_state.fleadfilename)
192
+ with open(st.session_state.fleadfilename, "rb") as file:
193
+ st.download_button(
194
+ label="Download Fixed Leader",
195
+ data=file,
196
+ file_name="flead.nc",
197
+ )
198
+
162
199
  if st.session_state.vleadnc_download_DRW:
163
200
  file_write_vlead(
164
201
  st.session_state.fpath,
@@ -135,7 +135,7 @@ beam_angle = int(flobj.system_configuration()["Beam Angle"])
135
135
  x = np.arange(0, ensembles, 1)
136
136
  y = np.arange(0, cells, 1)
137
137
 
138
- # Regrided data
138
+ # Regridded data
139
139
  # if "velocity_regrid" not in st.session_state:
140
140
  # st.session_state.echo_regrid = np.copy(echo)
141
141
  # st.session_state.velocity_regrid = np.copy(velocity)
@@ -472,7 +472,7 @@ tab1, tab2, tab3, tab4, tab5 = st.tabs(
472
472
  "Trim Ends",
473
473
  "Cut Bins - Sidelobe",
474
474
  "Cut Bins - Manual",
475
- "Regriding",
475
+ "Regridding",
476
476
  "Save & Reset",
477
477
  ]
478
478
  )
@@ -731,7 +731,7 @@ with tab4:
731
731
  """
732
732
  When the ADCP buoy has vertical oscillations (greater than depth cell size),
733
733
  the depth bins has to be regridded based on the pressure sensor data. The data
734
- can be regrided either till the surface or till the last bin.
734
+ can be regridded either till the surface or till the last bin.
735
735
  If the `Cell` option is selected, ensure that the end data are trimmed.
736
736
  Manual option permits choosing the end cell depth.
737
737
  """
@@ -803,7 +803,7 @@ with tab4:
803
803
  beams=beams,
804
804
  )
805
805
  grid_bar.progress(20, text=progress_text)
806
- st.write(":grey[Regrided velocity ...]")
806
+ st.write(":grey[Regridded velocity ...]")
807
807
  z, st.session_state.echo_regrid = regrid3d(
808
808
  transdepth,
809
809
  echo,
@@ -819,7 +819,7 @@ with tab4:
819
819
  beams=beams,
820
820
  )
821
821
  grid_bar.progress(40, text=progress_text)
822
- st.write(":grey[Regrided echo intensity ...]")
822
+ st.write(":grey[Regridded echo intensity ...]")
823
823
  z, st.session_state.correlation_regrid = regrid3d(
824
824
  transdepth,
825
825
  correlation,
@@ -835,7 +835,7 @@ with tab4:
835
835
  beams=beams,
836
836
  )
837
837
  grid_bar.progress(60, text=progress_text)
838
- st.write(":grey[Regrided correlation...]")
838
+ st.write(":grey[Regridded correlation...]")
839
839
  z, st.session_state.pgood_regrid = regrid3d(
840
840
  transdepth,
841
841
  pgood,
@@ -851,7 +851,7 @@ with tab4:
851
851
  beams=beams,
852
852
  )
853
853
  grid_bar.progress(80, text=progress_text)
854
- st.write(":grey[Regrided percent good...]")
854
+ st.write(":grey[Regridded percent good...]")
855
855
 
856
856
  z, st.session_state.profile_mask_regrid = regrid2d(
857
857
  transdepth,
@@ -868,13 +868,13 @@ with tab4:
868
868
  )
869
869
 
870
870
  grid_bar.progress(99, text=progress_text)
871
- st.write(":grey[Regrided mask...]")
871
+ st.write(":grey[Regridded mask...]")
872
872
 
873
873
  st.session_state.depth_axis = z
874
874
  st.write(":grey[New depth axis created...]")
875
875
 
876
876
  grid_bar.progress(100, text="Completed")
877
- st.write(":green[All data regrided!]")
877
+ st.write(":green[All data regridded!]")
878
878
 
879
879
  st.write(
880
880
  "No. of grid depth bins before regridding: ", np.shape(velocity)[1]
@@ -379,15 +379,15 @@ with tab3:
379
379
  ############## DESPIKE DATA #################
380
380
  st.header("Despike Data", divider="blue")
381
381
  st.write("""A rolling median filter is applied to remove spikes from the data.
382
- The kernal size determines the number of ensembles (time interval) for the filter window.
382
+ The kernel size determines the number of ensembles (time interval) for the filter window.
383
383
  The standard deviation specifies the maximum allowable deviation to remove the spike.""")
384
384
 
385
385
  # time_interval = pd.Timedelta(st.session_state.date[-1] - st.session_state.date[0]).seconds/(3600*st.session_state.head.ensembles)
386
386
 
387
387
  st.write("Time interval: ", st.session_state.date[1] - st.session_state.date[0])
388
388
 
389
- despike_kernal = st.number_input(
390
- "Enter Despike Kernal Size for Median Filter",
389
+ despike_kernel = st.number_input(
390
+ "Enter Despike kernel Size for Median Filter",
391
391
  0,
392
392
  st.session_state.head.ensembles,
393
393
  5,
@@ -399,19 +399,19 @@ with tab3:
399
399
  )
400
400
  despike_button = st.button("Despike")
401
401
  if despike_button:
402
- st.session_state.despike_kernal_VT = despike_kernal
402
+ st.session_state.despike_kernel_VT = despike_kernel
403
403
  st.session_state.despike_cutoff_VT = despike_cutoff
404
404
 
405
405
  st.session_state.velocity_mask_despike = despike(
406
406
  velocity[0, :, :],
407
407
  st.session_state.velocity_mask_temp,
408
- kernal_size=despike_kernal,
408
+ kernel_size=despike_kernel,
409
409
  cutoff=despike_cutoff,
410
410
  )
411
411
  st.session_state.velocity_mask_despike = despike(
412
412
  velocity[1, :, :],
413
413
  st.session_state.velocity_mask_temp,
414
- kernal_size=despike_kernal,
414
+ kernel_size=despike_kernel,
415
415
  cutoff=despike_cutoff,
416
416
  )
417
417
 
@@ -424,7 +424,7 @@ with tab3:
424
424
  if st.session_state.isDespikeCheck_VT:
425
425
  st.success("Data Despiked")
426
426
  b = {
427
- "Kernal Size": despike_kernal,
427
+ "kernel Size": despike_kernel,
428
428
  "Despike Cutoff": despike_cutoff,
429
429
  }
430
430
  st.write(b)
@@ -462,31 +462,31 @@ with tab4:
462
462
 
463
463
  st.write("Time interval: ", st.session_state.date[1] - st.session_state.date[0])
464
464
 
465
- flatline_kernal = st.number_input("Enter Flatline Kernal Size", 0, 100, 13, 1)
465
+ flatline_kernel = st.number_input("Enter Flatline kernel Size", 0, 100, 13, 1)
466
466
  flatline_cutoff = st.number_input("Enter Flatline deviation (mm/s)", 0, 100, 1, 1)
467
467
 
468
468
  flatline_button = st.button("Remove Flatline")
469
469
 
470
470
  if flatline_button:
471
- st.session_state.flatline_kernal_VT = flatline_kernal
471
+ st.session_state.flatline_kernel_VT = flatline_kernel
472
472
  st.session_state.flatline_cutoff_VT = flatline_cutoff
473
473
 
474
474
  st.session_state.velocity_mask_flatline = flatline(
475
475
  velocity[0, :, :],
476
476
  st.session_state.velocity_mask_temp,
477
- kernal_size=flatline_kernal,
477
+ kernel_size=flatline_kernel,
478
478
  cutoff=flatline_cutoff,
479
479
  )
480
480
  st.session_state.velocity_mask_flatline = flatline(
481
481
  velocity[1, :, :],
482
482
  st.session_state.velocity_mask_temp,
483
- kernal_size=flatline_kernal,
483
+ kernel_size=flatline_kernel,
484
484
  cutoff=flatline_cutoff,
485
485
  )
486
486
  st.session_state.velocity_mask_flatline = flatline(
487
487
  velocity[2, :, :],
488
488
  st.session_state.velocity_mask_temp,
489
- kernal_size=flatline_kernal,
489
+ kernel_size=flatline_kernel,
490
490
  cutoff=flatline_cutoff,
491
491
  )
492
492
  # Modify the temporary mask file
@@ -498,7 +498,7 @@ with tab4:
498
498
  if st.session_state.isFlatlineCheck:
499
499
  st.success("Flatline Removed")
500
500
  b = {
501
- "Kernal Size": flatline_kernal,
501
+ "kernel Size": flatline_kernel,
502
502
  "Flatline Cutoff": flatline_cutoff,
503
503
  }
504
504
  st.write(b)
@@ -198,12 +198,12 @@ st.session_state.mask_data_WF = st.radio(
198
198
 
199
199
  if st.session_state.mask_data_WF == "Yes":
200
200
  mask = st.session_state.final_mask
201
- st.session_state.write_velocity = np.copy(st.session_state.final_velocity)
201
+ st.session_state.write_velocity = np.copy(st.session_state.final_velocity).astype(np.int16)
202
202
  st.session_state.write_velocity[:, mask == 1] = -32768
203
+
203
204
  else:
204
205
  st.session_state.write_velocity = np.copy(st.session_state.final_velocity)
205
206
 
206
-
207
207
  st.session_state.file_type_WF = st.radio(
208
208
  "Select output file format:", ("NetCDF", "CSV")
209
209
  )
@@ -262,6 +262,9 @@ if download_button:
262
262
  # st.write(st.session_state.processed_filename)
263
263
  depth_axis = np.trunc(st.session_state.final_depth_axis)
264
264
  final_mask = st.session_state.final_mask
265
+ st.session_state.write_echo = np.copy(st.session_state.final_echo)
266
+ st.session_state.write_correlation = np.copy(st.session_state.final_correlation)
267
+ st.session_state.write_pgood = np.copy(st.session_state.final_pgood)
265
268
 
266
269
  if st.session_state.file_type_WF == "NetCDF":
267
270
  if add_attr_button and st.session_state.attributes:
@@ -270,6 +273,9 @@ if download_button:
270
273
  st.session_state.processed_filename,
271
274
  depth_axis,
272
275
  final_mask,
276
+ st.session_state.write_echo,
277
+ st.session_state.write_correlation,
278
+ st.session_state.write_pgood,
273
279
  st.session_state.date,
274
280
  st.session_state.write_velocity,
275
281
  attributes=st.session_state.attributes, # Pass edited attributes
@@ -280,6 +286,9 @@ if download_button:
280
286
  st.session_state.processed_filename,
281
287
  depth_axis,
282
288
  final_mask,
289
+ st.session_state.write_echo,
290
+ st.session_state.write_correlation,
291
+ st.session_state.write_pgood,
283
292
  st.session_state.date,
284
293
  st.session_state.write_velocity,
285
294
  )
@@ -370,6 +379,7 @@ if generate_config_radio == "Yes":
370
379
  config["FileSettings"]["input_file_name"] = st.session_state.fname
371
380
  config["FileSettings"]["output_file_path"] = ""
372
381
  config["FileSettings"]["output_file_name_raw_netcdf"] = ""
382
+ config["FileSettings"]["output_file_name_flead_netcdf"] = ""
373
383
  config["FileSettings"]["output_file_name_vlead_netcdf"] = ""
374
384
  config["FileSettings"]["output_file_name_raw_csv"] = ""
375
385
  config["FileSettings"]["output_file_name_processed_netcdf"] = ""
@@ -386,6 +396,9 @@ if generate_config_radio == "Yes":
386
396
  config["DownloadOptions"]["download_raw_netcdf"] = str(
387
397
  st.session_state.rawnc_download_DRW
388
398
  )
399
+ config["DownloadOptions"]["download_flead_netcdf"] = str(
400
+ st.session_state.fleadnc_download_DRW
401
+ )
389
402
  config["DownloadOptions"]["download_vlead_netcdf"] = str(
390
403
  st.session_state.vleadnc_download_DRW
391
404
  )
@@ -534,15 +547,15 @@ if generate_config_radio == "Yes":
534
547
 
535
548
  # Tab 3
536
549
  config["VelocityTest"]["despike"] = str(st.session_state.isDespikeCheck_VT)
537
- config["VelocityTest"]["despike_kernal_size"] = str(
538
- st.session_state.despike_kernal_VT
550
+ config["VelocityTest"]["despike_kernel_size"] = str(
551
+ st.session_state.despike_kernel_VT
539
552
  )
540
553
  config["VelocityTest"]["despike_cutoff"] = str(st.session_state.despike_cutoff_VT)
541
554
 
542
555
  # Tab 4
543
556
  config["VelocityTest"]["flatline"] = str(st.session_state.isFlatlineCheck_VT)
544
- config["VelocityTest"]["flatline_kernal_size"] = str(
545
- st.session_state.flatline_kernal_VT
557
+ config["VelocityTest"]["flatline_kernel_size"] = str(
558
+ st.session_state.flatline_kernel_VT
546
559
  )
547
560
  config["VelocityTest"]["flatline_cutoff"] = str(st.session_state.flatline_cutoff_VT)
548
561
 
@@ -6,6 +6,8 @@ import json
6
6
  import streamlit as st
7
7
  from utils.autoprocess import autoprocess
8
8
 
9
+ # To make the page wider if the user presses the reload button.
10
+ st.set_page_config(layout="wide")
9
11
 
10
12
  @st.cache_data
11
13
  def file_access(uploaded_file):
@@ -378,44 +378,44 @@ def autoprocess(config_file, binary_file_path=None):
378
378
 
379
379
  isDespike = config.getboolean("VelocityTest", "despike")
380
380
  if isDespike:
381
- despike_kernal = config.getint("VelocityTest", "despike_kernal_size")
382
- despike_cutoff = config.getint("VelocityTest", "despike_cutoff")
381
+ despike_kernel = config.getint("VelocityTest", "despike_kernel_size")
382
+ despike_cutoff = config.getfloat("VelocityTest", "despike_cutoff")
383
383
 
384
384
  mask = despike(
385
385
  velocity[0, :, :],
386
386
  mask,
387
- kernal_size=despike_kernal,
387
+ kernel_size=despike_kernel,
388
388
  cutoff=despike_cutoff,
389
389
  )
390
390
  mask = despike(
391
391
  velocity[1, :, :],
392
392
  mask,
393
- kernal_size=despike_kernal,
393
+ kernel_size=despike_kernel,
394
394
  cutoff=despike_cutoff,
395
395
  )
396
396
  print("Velocity data despiked.")
397
397
 
398
398
  isFlatline = config.getboolean("VelocityTest", "flatline")
399
399
  if isFlatline:
400
- despike_kernal = config.getint("VelocityTest", "flatline_kernal_size")
400
+ despike_kernel = config.getint("VelocityTest", "flatline_kernel_size")
401
401
  despike_cutoff = config.getint("VelocityTest", "flatline_deviation")
402
402
 
403
403
  mask = flatline(
404
404
  velocity[0, :, :],
405
405
  mask,
406
- kernal_size=despike_kernal,
406
+ kernel_size=despike_kernel,
407
407
  cutoff=despike_cutoff,
408
408
  )
409
409
  mask = flatline(
410
410
  velocity[1, :, :],
411
411
  mask,
412
- kernal_size=despike_kernal,
412
+ kernel_size=despike_kernel,
413
413
  cutoff=despike_cutoff,
414
414
  )
415
415
  mask = flatline(
416
416
  velocity[2, :, :],
417
417
  mask,
418
- kernal_size=despike_kernal,
418
+ kernel_size=despike_kernel,
419
419
  cutoff=despike_cutoff,
420
420
  )
421
421
 
@@ -463,6 +463,7 @@ def autoprocess(config_file, binary_file_path=None):
463
463
  )
464
464
 
465
465
  date_raw = pd.to_datetime(date_df)
466
+ date_flead = pd.to_datetime(date_df)
466
467
  date_vlead = pd.to_datetime(date_df)
467
468
  date_final = pd.to_datetime(date_df)
468
469
 
@@ -477,6 +478,7 @@ def autoprocess(config_file, binary_file_path=None):
477
478
  attributes = None
478
479
 
479
480
  isWriteRawNC = config.getboolean("DownloadOptions", "download_raw_netcdf")
481
+ isWritefleadNc = config.getboolean("DownloadOptions", "download_flead_netcdf")
480
482
  isWriteVleadNC = config.getboolean("DownloadOptions", "download_vlead_netcdf")
481
483
  isWriteProcNC = config.getboolean("DownloadOptions", "download_processed_netcdf")
482
484
  filepath = config.get("FileSettings", "output_file_path")
@@ -496,6 +498,19 @@ def autoprocess(config_file, binary_file_path=None):
496
498
 
497
499
  print("Raw file written.")
498
500
 
501
+ if isWritefleadNc:
502
+ filename = config.get("FileSettings", "output_file_name_flead_netcdf")
503
+ output_file_path = os.path.join(filepath, filename)
504
+ wr.flead_nc(
505
+ full_input_file_path,
506
+ output_file_path,
507
+ date_flead,
508
+ axis_option=axis_option,
509
+ attributes=attributes,
510
+ )
511
+
512
+ print("Flead File written")
513
+
499
514
  if isWriteVleadNC:
500
515
  filename = config.get("FileSettings", "output_file_name_vlead_netcdf")
501
516
  output_file_path = os.path.join(filepath, filename)
@@ -519,6 +534,9 @@ def autoprocess(config_file, binary_file_path=None):
519
534
  output_file_path,
520
535
  depth1,
521
536
  mask,
537
+ echo,
538
+ correlation,
539
+ pgood,
522
540
  date_final,
523
541
  velocity,
524
542
  attributes=attributes, # Pass edited attributes
@@ -890,7 +890,7 @@ def datatype(
890
890
 
891
891
  # These arguments are outputs of fixedleader function.
892
892
  # Makes the code faster if the fixedheader function is already executed.
893
- if cell == 0 or beam == 0:
893
+ if isinstance(cell, (np.integer, int)) or isinstance(beam, (np.integer, int)):
894
894
  flead, ensemble, fl_error_code = fixedleader(
895
895
  filename,
896
896
  byteskip=byteskip,
@@ -898,22 +898,23 @@ def datatype(
898
898
  idarray=idarray,
899
899
  ensemble=ensemble,
900
900
  )
901
- cell = int(flead[7][0])
902
- beam = int(flead[6][0])
901
+ cell = []
902
+ beam = []
903
+ cell = flead[7][:]
904
+ beam = flead[6][:]
903
905
  if fl_error_code != 0:
904
906
  error_code = fl_error_code
905
907
  else:
906
- cell = int(cell)
907
- beam = int(beam)
908
-
908
+ cell = cell
909
+ beam = beam
909
910
  # Velocity is 16 bits and all others are 8 bits.
910
911
  # Create empty array for the chosen variable name.
911
912
  if var_name == "velocity":
912
- var_array = np.zeros((beam, cell, ensemble), dtype="int16")
913
+ var_array = np.full((int(max(beam)), int(max(cell)), ensemble), -32768, dtype="int16")
913
914
  bitstr = "<h"
914
915
  bitint = 2
915
916
  else: # inserted
916
- var_array = np.zeros((beam, cell, ensemble), dtype="uint8")
917
+ var_array = np.zeros((int(max(beam)), int(max(cell)), ensemble), dtype="uint8")
917
918
  bitstr = "<B"
918
919
  bitint = 1
919
920
  # -----------------------------
@@ -942,7 +943,9 @@ def datatype(
942
943
  fbyteskip = None
943
944
  for count, item in enumerate(idarray[0][:]):
944
945
  if item in vid:
945
- fbyteskip = offset[0][count]
946
+ fbyteskip = []
947
+ for i in range(ensemble):
948
+ fbyteskip.append(int(offset[i][count]))
946
949
  break
947
950
  if fbyteskip is None:
948
951
  print(
@@ -955,10 +958,10 @@ def datatype(
955
958
 
956
959
  # READ DATA
957
960
  for i in range(ensemble):
958
- bfile.seek(fbyteskip, 1)
961
+ bfile.seek(fbyteskip[i], 1)
959
962
  bdata = bfile.read(2)
960
- for cno in range(cell):
961
- for bno in range(beam):
963
+ for cno in range(int(cell[i])):
964
+ for bno in range(int(beam[i])):
962
965
  bdata = bfile.read(bitint)
963
966
  varunpack = unpack(bitstr, bdata)
964
967
  var_array[bno][cno][i] = varunpack[0]
@@ -160,33 +160,33 @@ def run_script(filename):
160
160
 
161
161
  affirm = input("Despike the data? [y/n]: ")
162
162
  if affirm.lower() == "y":
163
- despike_kernal = input("Enter despike kernal size:")
164
- despike_kernal = int(despike_kernal)
163
+ despike_kernel = input("Enter despike kernel size:")
164
+ despike_kernel = int(despike_kernel)
165
165
 
166
166
  despike_cutoff = input("Enter despike cutoff (mm/s): ")
167
167
  despike_cutoff = float(despike_cutoff)
168
168
 
169
169
  mask = despike(
170
- vel[0, :, :], mask, kernal_size=despike_kernal, cutoff=despike_cutoff
170
+ vel[0, :, :], mask, kernel_size=despike_kernel, cutoff=despike_cutoff
171
171
  )
172
172
  mask = despike(
173
- vel[1, :, :], mask, kernal_size=despike_kernal, cutoff=despike_cutoff
173
+ vel[1, :, :], mask, kernel_size=despike_kernel, cutoff=despike_cutoff
174
174
  )
175
175
 
176
176
  affirm = input("Remove flatlines? [y/n]: ")
177
177
  if affirm.lower() == "y":
178
- flatline_kernal = input("Enter despike kernal size:")
179
- flatline_kernal = int(flatline_kernal)
178
+ flatline_kernel = input("Enter despike kernel size:")
179
+ flatline_kernel = int(flatline_kernel)
180
180
  flatline_cutoff = input("Enter Flatline deviation: [y/n]")
181
181
  flatlineL_cutoff = int(flatline_cutoff)
182
182
  mask = flatline(
183
- vel[0, :, :], mask, kernal_size=flatline_kernal, cutoff=flatline_cutoff
183
+ vel[0, :, :], mask, kernel_size=flatline_kernel, cutoff=flatline_cutoff
184
184
  )
185
185
  mask = flatline(
186
- vel[1, :, :], mask, kernal_size=flatline_kernal, cutoff=flatline_cutoff
186
+ vel[1, :, :], mask, kernel_size=flatline_kernel, cutoff=flatline_cutoff
187
187
  )
188
188
  mask = flatline(
189
- vel[2, :, :], mask, kernal_size=flatline_kernal, cutoff=flatline_cutoff
189
+ vel[2, :, :], mask, kernel_size=flatline_kernel, cutoff=flatline_cutoff
190
190
  )
191
191
  apply_mask = input("Apply mask? [y/n]: ")
192
192
  if apply_mask.lower() == "y":
@@ -132,7 +132,7 @@ def velocity_cutoff(velocity, mask, cutoff=250):
132
132
  return mask
133
133
 
134
134
 
135
- def despike(velocity, mask, kernal_size=13, cutoff=3):
135
+ def despike(velocity, mask, kernel_size=13, cutoff=3):
136
136
  """
137
137
  Function to remove anomalous spikes in the data over a period of time.
138
138
  A median filter is used to despike the data.
@@ -140,7 +140,7 @@ def despike(velocity, mask, kernal_size=13, cutoff=3):
140
140
  Args:
141
141
  velocity (numpy array, integer): Velocity(depth, time) in mm/s
142
142
  mask (numpy array, integer): Mask file
143
- kernal_size (paramater, integer): Window size for rolling median filter
143
+ kernel_size (paramater, integer): Window size for rolling median filter
144
144
  cutoff (parameter, integer): Number of standard deviations to identify spikes
145
145
 
146
146
  Returns:
@@ -150,7 +150,7 @@ def despike(velocity, mask, kernal_size=13, cutoff=3):
150
150
  shape = np.shape(velocity)
151
151
  for j in range(shape[0]):
152
152
  # Apply median filter
153
- filt = sp.signal.medfilt(velocity[j, :], kernal_size)
153
+ filt = sp.signal.medfilt(velocity[j, :], kernel_size)
154
154
  # Calculate absolute deviation from the rolling median
155
155
  diff = np.abs(velocity[j, :] - filt)
156
156
  # Calculate threshold for spikes based on standard deviation
@@ -164,7 +164,7 @@ def despike(velocity, mask, kernal_size=13, cutoff=3):
164
164
  def flatline(
165
165
  velocity,
166
166
  mask,
167
- kernal_size=4,
167
+ kernel_size=4,
168
168
  cutoff=1,
169
169
  ):
170
170
  """
@@ -174,7 +174,7 @@ def flatline(
174
174
  Args:
175
175
  velocity (numpy arrray, integer): Velocity (depth, time)
176
176
  mask (numpy array, integer): Mask file
177
- kernal_size (parameter, integer): No. of ensembles over which flatline has to be detected
177
+ kernel_size (parameter, integer): No. of ensembles over which flatline has to be detected
178
178
  cutoff (parameter, integer): Permitted deviation in velocity
179
179
 
180
180
  Returns:
@@ -191,7 +191,7 @@ def flatline(
191
191
  for k, g in groupby(dummymask):
192
192
  # subset_size = sum(1 for i in g)
193
193
  subset_size = len(list(g))
194
- if k == 1 and subset_size >= kernal_size:
194
+ if k == 1 and subset_size >= kernel_size:
195
195
  mask[j, index : index + subset_size] = 1
196
196
  index = index + subset_size
197
197
  dummymask = np.zeros(shape[1])
@@ -179,6 +179,74 @@ def rawnc(
179
179
 
180
180
  outnc.close()
181
181
 
182
+ def flead_nc(
183
+ infile,
184
+ outfile,
185
+ time,
186
+ axis_option=None,
187
+ attributes=None,
188
+ t0="hours since 2000-01-01",
189
+ ):
190
+ """
191
+ Function to create ncfile containing Variable Leader.
192
+
193
+ Args:
194
+ infile (string): Input file path including filename
195
+ outfile (string): Output file path including filename
196
+ """
197
+ outnc = nc4.Dataset(outfile, "w", format="NETCDF4")
198
+
199
+ # Dimensions
200
+ # Define the primary axis based on axis_option
201
+ if axis_option == "ensemble":
202
+ outnc.createDimension("ensemble", None)
203
+ primary_axis = "ensemble"
204
+ ensemble = outnc.createVariable("ensemble", "i4", ("ensemble",))
205
+ ensemble.axis = "T"
206
+ elif axis_option == "time":
207
+ tsize = len(time)
208
+ outnc.createDimension("time", tsize)
209
+ primary_axis = "time"
210
+ time_var = outnc.createVariable("time", "i4", ("time",))
211
+ time_var.axis = "T"
212
+ time_var.units = t0
213
+ time_var.long_name = "time"
214
+
215
+ # Convert time_data to numerical format
216
+ nctime = pd2nctime(time, t0)
217
+ time_var[:] = nctime
218
+
219
+ else:
220
+ raise ValueError(f"Invalid axis_option: {axis_option}.")
221
+
222
+ # Variables
223
+
224
+ flead = rd.FixedLeader(infile)
225
+ fdict = flead.fleader
226
+ varid = [0] * len(fdict)
227
+
228
+ i = 0
229
+
230
+ for key, values in fdict.items():
231
+ format_item = key.replace(" ", "_")
232
+ varid[i] = outnc.createVariable(
233
+ format_item, "i4", primary_axis, fill_value=-32768
234
+ )
235
+ var = values
236
+ vshape = var.shape
237
+ if i == 0:
238
+ if primary_axis == "ensemble":
239
+ ensemble[:] = np.arange(1, vshape[0] + 1, 1)
240
+
241
+ varid[i][0 : vshape[0]] = var
242
+ i += 1
243
+
244
+ # Add global attributes if provided
245
+ if attributes:
246
+ for key, value in attributes.items():
247
+ setattr(outnc, key, str(value)) # Store attributes as strings
248
+
249
+ outnc.close()
182
250
 
183
251
  def vlead_nc(
184
252
  infile,
@@ -251,7 +319,7 @@ def vlead_nc(
251
319
 
252
320
 
253
321
  def finalnc(
254
- outfile, depth, final_mask, time, data, t0="hours since 2000-01-01", attributes=None
322
+ outfile, depth, final_mask, final_echo, final_corr , final_pgood, time, data, t0="hours since 2000-01-01", attributes=None
255
323
  ):
256
324
  """
257
325
  Function to create the processed NetCDF file.
@@ -277,6 +345,9 @@ def finalnc(
277
345
  depth = depth[::-1]
278
346
  data = data[:, ::-1, :]
279
347
  final_mask = final_mask[::-1, :]
348
+ final_echo = final_echo[:,::-1, :]
349
+ final_corr = final_corr[:,::-1, :]
350
+ final_pgood = final_pgood[:,::-1, :]
280
351
 
281
352
  ncfile = nc4.Dataset(outfile, mode="w", format="NETCDF4")
282
353
  # Check if depth is scalar or array
@@ -319,6 +390,42 @@ def finalnc(
319
390
  mvel = ncfile.createVariable("mask", np.float32, ("time", "depth"), fill_value=fill)
320
391
  mvel.long_name = "Velocity Mask (1: bad value, 0: good value)"
321
392
 
393
+ echo1 = ncfile.createVariable("echo1", np.float32, ("time", "depth"), fill_value=-32768)
394
+ echo1.long_name = "Echo intensity Beam 1"
395
+
396
+ echo2 = ncfile.createVariable("echo2", np.float32, ("time", "depth"), fill_value=-32768)
397
+ echo2.long_name = "Echo intensity Beam 2"
398
+
399
+ echo3 = ncfile.createVariable("echo3", np.float32, ("time", "depth"), fill_value=-32768)
400
+ echo3.long_name = "Echo intensity Beam 3"
401
+
402
+ echo4 = ncfile.createVariable("echo4", np.float32, ("time", "depth"), fill_value=-32768)
403
+ echo4.long_name = "Echo intensity Beam 4"
404
+
405
+ corr1 = ncfile.createVariable("corr1", np.float32, ("time", "depth"), fill_value=-32768)
406
+ corr1.long_name = "Beam 1 correlation"
407
+
408
+ corr2 = ncfile.createVariable("corr2", np.float32, ("time", "depth"), fill_value=-32768)
409
+ corr2.long_name = "Beam 2 correlation"
410
+
411
+ corr3 = ncfile.createVariable("corr3", np.float32, ("time", "depth"), fill_value=-32768)
412
+ corr3.long_name = "Beam 3 correlation"
413
+
414
+ corr4 = ncfile.createVariable("corr4", np.float32, ("time", "depth"), fill_value=-32768)
415
+ corr4.long_name = "Beam 4 correlation"
416
+
417
+ pgd1 = ncfile.createVariable("pgd1", np.float32, ("time", "depth"), fill_value=-32768)
418
+ pgd1.long_name = "Percent Good Beam 1"
419
+
420
+ pgd2 = ncfile.createVariable("pgd2", np.float32, ("time", "depth"), fill_value=-32768)
421
+ pgd2.long_name = "Percent Good Beam 2"
422
+
423
+ pgd3 = ncfile.createVariable("pgd3", np.float32, ("time", "depth"), fill_value=-32768)
424
+ pgd3.long_name = "Percent Good Beam 3"
425
+
426
+ pgd4 = ncfile.createVariable("pgd4", np.float32, ("time", "depth"), fill_value=-32768)
427
+ pgd4.long_name = "Percent Good Beam 4"
428
+
322
429
  nctime = pd2nctime(time, t0)
323
430
  # write data
324
431
  z[:] = depth
@@ -328,6 +435,18 @@ def finalnc(
328
435
  wvel[:, :] = data[2, :, :].T
329
436
  evel[:, :] = data[3, :, :].T
330
437
  mvel[:, :] = final_mask.T
438
+ echo1[:, :] = final_echo[0, :, :].T
439
+ echo2[:, :] = final_echo[1, :, :].T
440
+ echo3[:, :] = final_echo[2, :, :].T
441
+ echo4[:, :] = final_echo[3, :, :].T
442
+ corr1[:, :] = final_corr[0, :, :].T
443
+ corr2[:, :] = final_corr[1, :, :].T
444
+ corr3[:, :] = final_corr[2, :, :].T
445
+ corr4[:, :] = final_corr[3, :, :].T
446
+ pgd1[:, :] = final_pgood[0, :, :].T
447
+ pgd2[:, :] = final_pgood[1, :, :].T
448
+ pgd3[:, :] = final_pgood[2, :, :].T
449
+ pgd4[:, :] = final_pgood[3, :, :].T
331
450
 
332
451
  # Add global attributes if provided
333
452
  if attributes:
File without changes
File without changes
File without changes
File without changes
File without changes
@@ -289,8 +289,8 @@ tab1, tab2, tab3, tab4, tab5, tab6, tab7, tab8 = st.tabs(
289
289
  "Salinity",
290
290
  "Temperature",
291
291
  "Heading",
292
- "Roll",
293
292
  "Pitch",
293
+ "Roll",
294
294
  "Corrections",
295
295
  "Save/Reset",
296
296
  ]