pyadps 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,244 @@
1
+ """
2
+ ADCP (Acoustic Doppler Current Profiler) File Processor
3
+ A clean, maintainable implementation for processing and combining ADCP binary files.
4
+ """
5
+
6
+ from pathlib import Path
7
+ from dataclasses import dataclass
8
+ from typing import List, Union
9
+
10
+ # Import from our separate logging module
11
+ from .logging_utils import LogLevel, get_console_logger
12
+
13
+
14
+ @dataclass
15
+ class ADCPConfig:
16
+ """Configuration for ADCP file processing"""
17
+
18
+ file_extension: str = "*.000"
19
+ header_signature: bytes = b"\x7f\x7f"
20
+ header_signature_ext: bytes = b"\x7f\x7f\xf0\x02"
21
+ ensemble_size_offset: int = 2
22
+ ensemble_size_length: int = 2
23
+ header_size_adjustment: int = 2
24
+ chunk_size: int = 8192 # For large file processing
25
+
26
+
27
+ class ADCPError(Exception):
28
+ """Base exception for ADCP processing errors"""
29
+
30
+ pass
31
+
32
+
33
+ class InvalidHeaderError(ADCPError):
34
+ """Raised when ADCP file has invalid header"""
35
+
36
+ pass
37
+
38
+
39
+ class CorruptedFileError(ADCPError):
40
+ """Raised when ADCP file is corrupted"""
41
+
42
+ pass
43
+
44
+
45
+ class ADCPFileValidator:
46
+ """Validates ADCP files and headers"""
47
+
48
+ def __init__(self, config: ADCPConfig, logger_name: str = "adcp_validator"):
49
+ self.config = config
50
+ self.logger = get_console_logger(logger_name, LogLevel.INFO)
51
+
52
+ def find_header_start(self, data: bytes) -> int:
53
+ """Find the first occurrence of the extended header signature"""
54
+ return data.find(self.config.header_signature_ext)
55
+
56
+ def validate_file_path(self, filepath: Path) -> None:
57
+ """Validate file path exists and is accessible"""
58
+ if not filepath.exists():
59
+ raise FileNotFoundError(f"File {filepath} does not exist")
60
+ if not filepath.is_file():
61
+ raise ValueError(f"Path {filepath} is not a file")
62
+ if filepath.stat().st_size == 0:
63
+ raise ValueError(f"File {filepath} is empty")
64
+
65
+ def has_valid_header(self, data: bytes) -> bool:
66
+ """Check if data starts with valid ADCP header"""
67
+ return data.startswith(self.config.header_signature)
68
+
69
+
70
+ class ADCPFileProcessor:
71
+ """Processes individual ADCP files"""
72
+
73
+ def __init__(self, config: ADCPConfig = None, logger_name: str = "adcp_processor"):
74
+ self.config = config or ADCPConfig()
75
+ self.validator = ADCPFileValidator(self.config, f"{logger_name}_validator")
76
+ self.logger = get_console_logger(logger_name, LogLevel.INFO)
77
+
78
+ def _calculate_ensemble_size(self, data: bytes) -> int:
79
+ """Calculate size of single ensemble from header"""
80
+ offset = self.config.ensemble_size_offset
81
+ length = self.config.ensemble_size_length
82
+ return (
83
+ int.from_bytes(data[offset : offset + length], byteorder="little")
84
+ + self.config.header_size_adjustment
85
+ )
86
+
87
+ def _validate_file_integrity(
88
+ self, filepath: Path, data: bytes, ensemble_size: int
89
+ ) -> int:
90
+ """Validate file integrity and return number of valid ensembles"""
91
+ file_size = filepath.stat().st_size
92
+ if file_size % ensemble_size != 0:
93
+ valid_ensembles = file_size // ensemble_size
94
+ self.logger.warning(
95
+ f"File {filepath.name} is corrupted. "
96
+ f"Valid ensembles: {valid_ensembles}/{valid_ensembles + 1}"
97
+ )
98
+ return valid_ensembles
99
+ return file_size // ensemble_size
100
+
101
+ def process_file(self, filepath: Union[str, Path]) -> bytes:
102
+ """Process a single ADCP file and return valid data"""
103
+ filepath = Path(filepath)
104
+ try:
105
+ self.validator.validate_file_path(filepath)
106
+
107
+ with open(filepath, "rb") as f:
108
+ data = f.read()
109
+
110
+ header_index = 0
111
+ # Check if file starts with valid header
112
+ if not self.validator.has_valid_header(data):
113
+ header_index = self.validator.find_header_start(data)
114
+ if header_index == -1:
115
+ raise InvalidHeaderError(
116
+ f"File {filepath.name} contains no valid ADCP header"
117
+ )
118
+ self.logger.warning(
119
+ f"File {filepath.name} header found at byte {header_index}. "
120
+ "Truncating invalid data before header."
121
+ )
122
+ else:
123
+ self.logger.info(f"Valid ADCP file: {filepath.name}")
124
+
125
+ # Calculate ensemble size and validate file integrity
126
+ ensemble_size = self._calculate_ensemble_size(data[header_index:])
127
+ valid_ensembles = self._validate_file_integrity(
128
+ filepath, data, ensemble_size
129
+ )
130
+
131
+ # Return only valid data
132
+ end_index = header_index + (valid_ensembles * ensemble_size)
133
+ return data[header_index:end_index]
134
+
135
+ except (InvalidHeaderError, FileNotFoundError, ValueError) as e:
136
+ self.logger.error(f"Error processing {filepath.name}: {e}")
137
+ return b""
138
+ except Exception as e:
139
+ self.logger.error(f"Unexpected error processing {filepath.name}: {e}")
140
+ return b""
141
+
142
+
143
+ class ADCPBinFileCombiner:
144
+ """Combines or joins multiple ADCP files"""
145
+
146
+ def __init__(self, config: ADCPConfig = None, logger_name: str = "adcp_combiner"):
147
+ self.config = config or ADCPConfig()
148
+ self.processor = ADCPFileProcessor(self.config, f"{logger_name}_processor")
149
+ self.logger = get_console_logger(logger_name, LogLevel.INFO)
150
+
151
+ def get_adcp_files(self, folder_path: Union[str, Path]) -> List[Path]:
152
+ """Get all ADCP files from folder"""
153
+ folder_path = Path(folder_path)
154
+ if not folder_path.exists():
155
+ raise FileNotFoundError(f"Folder {folder_path} does not exist")
156
+ if not folder_path.is_dir():
157
+ raise NotADirectoryError(f"Path {folder_path} is not a directory")
158
+
159
+ files = sorted(folder_path.glob(self.config.file_extension))
160
+ if not files:
161
+ self.logger.error(
162
+ f"No {self.config.file_extension} files found in {folder_path}"
163
+ )
164
+ return files
165
+
166
+ def combine_files(self, files: List[Union[str, Path]]) -> bytearray:
167
+ """Combine multiple ADCP files into single bytearray"""
168
+ if not files:
169
+ self.logger.warning("No files provided for combination")
170
+ return bytearray()
171
+
172
+ combined_data = bytearray()
173
+ processed_count = 0
174
+
175
+ for file_path in files:
176
+ valid_data = self.processor.process_file(file_path)
177
+ if valid_data:
178
+ combined_data.extend(valid_data)
179
+ processed_count += 1
180
+
181
+ self.logger.info(f"Successfully combined {processed_count}/{len(files)} files")
182
+ return combined_data
183
+
184
+ def combine_folder(
185
+ self, folder_path: Union[str, Path], output_file: Union[str, Path]
186
+ ) -> bool:
187
+ """Combine all ADCP files from folder and write to output file"""
188
+ try:
189
+ files = self.get_adcp_files(folder_path)
190
+ if not files:
191
+ self.logger.error("No valid files found to combine")
192
+ return False
193
+
194
+ combined_data = self.combine_files(files)
195
+ if not combined_data:
196
+ self.logger.error("No valid data to write")
197
+ return False
198
+
199
+ output_path = Path(output_file)
200
+ output_path.parent.mkdir(parents=True, exist_ok=True)
201
+
202
+ with open(output_path, "wb") as f:
203
+ f.write(combined_data)
204
+
205
+ self.logger.info(
206
+ # f"Successfully combined {len(files)} files. "
207
+ f"Output written to: {output_path} ({len(combined_data)} bytes)"
208
+ )
209
+ return True
210
+
211
+ except Exception as e:
212
+ self.logger.error(f"Error combining folder {folder_path}: {e}")
213
+ return False
214
+
215
+
216
+ def main():
217
+ """Main entry point for CLI usage"""
218
+ try:
219
+ folder = input("Enter folder containing ADCP files (*.000): ").strip()
220
+ if not folder:
221
+ print("No folder specified. Exiting.")
222
+ return
223
+
224
+ output = input("Enter output filename (default: merged_000.000): ").strip()
225
+ if not output:
226
+ output = "merged_000.000"
227
+
228
+ # Create combiner with custom logger configuration
229
+ combiner = ADCPBinFileCombiner(logger_name="adcp_main")
230
+ success = combiner.combine_folder(folder, output)
231
+
232
+ if success:
233
+ print(f"✅ Files successfully combined to {output}")
234
+ else:
235
+ print("❌ Failed to combine files. Check logs for details.")
236
+
237
+ except KeyboardInterrupt:
238
+ print("\n⚠️ Operation cancelled by user.")
239
+ except Exception as e:
240
+ print(f"❌ Unexpected error: {e}")
241
+
242
+
243
+ if __name__ == "__main__":
244
+ main()
pyadps/utils/plotgen.py CHANGED
@@ -4,7 +4,7 @@ import matplotlib.pyplot as plt
4
4
  from matplotlib.widgets import Button, RadioButtons, Slider, TextBox
5
5
  from matplotlib.widgets import RectangleSelector
6
6
 
7
- mpl.use("TkAgg")
7
+ # mpl.use("TkAgg")
8
8
 
9
9
 
10
10
  class CutBins:
pyadps/utils/pyreadrdi.py CHANGED
@@ -3,17 +3,17 @@ pyreadrdi.py
3
3
 
4
4
  Module Overview
5
5
  ---------------
6
- This module provides functionalities to read and parse RDI ADCP files.
7
- It includes functions for reading file headers, fixed and variable leaders,
6
+ This module provides functionalities to read and parse RDI ADCP files.
7
+ It includes functions for reading file headers, fixed and variable leaders,
8
8
  and data types like velocity, correlation, echo intensity, and percent good.
9
- Currently reads only PD0 format.
9
+ Currently reads only PD0 format.
10
10
 
11
11
  Modules
12
12
  -------------------
13
13
  - fileheader: Function to read and parse the file header information.
14
14
  - fixedleader: Function to read and parse the fixed leader section of an RDI file.
15
15
  - variableleader: Function to read and parse the variable leader section of an RDI file.
16
- - datatype: Function to read and parse 3D data types.
16
+ - datatype: Function to read and parse 3D data types.
17
17
  - ErrorCode: Enum class to define and manage error codes for file operations.
18
18
 
19
19
  Creation Date
@@ -57,7 +57,7 @@ Examples
57
57
  >>> vel_data = datatype('example.rdi', "velocity")
58
58
  >>> vel_data = datatype('example.rdi', "echo", beam=4, cell=20)
59
59
 
60
- Other add-on functions and classes inlcude bcolors, safe_open, and ErrorCode.
60
+ Other add-on functions and classes inlcude bcolors, safe_open, and ErrorCode.
61
61
  Examples (add-on)
62
62
  -------------------
63
63
  >>> error = ErrorCode.FILE_NOT_FOUND
@@ -403,72 +403,85 @@ def fileheader(rdi_file):
403
403
  bfile.seek(0, 0)
404
404
  bskip = i = 0
405
405
  hid = [None] * 5
406
- while byt := bfile.read(6):
407
- hid[0], hid[1], hid[2], hid[3], hid[4] = unpack("<BBHBB", byt)
408
- headerid = np.append(headerid, np.int8(hid[0]))
409
- sourceid = np.append(sourceid, np.int16(hid[1]))
410
- byte = np.append(byte, np.int16(hid[2]))
411
- spare = np.append(spare, np.int16(hid[3]))
412
- datatype = np.append(datatype, np.int16(hid[4]))
413
-
414
- # dbyte = bfile.read(2 * datatype[i])
415
- dbyte, error = safe_read(bfile, 2 * datatype[i])
416
- if dbyte is None:
406
+ try:
407
+ while byt := bfile.read(6):
408
+ hid[0], hid[1], hid[2], hid[3], hid[4] = unpack("<BBHBB", byt)
409
+ headerid = np.append(headerid, np.int8(hid[0]))
410
+ sourceid = np.append(sourceid, np.int16(hid[1]))
411
+ byte = np.append(byte, np.int16(hid[2]))
412
+ spare = np.append(spare, np.int16(hid[3]))
413
+ datatype = np.append(datatype, np.int16(hid[4]))
414
+
415
+ # dbyte = bfile.read(2 * datatype[i])
416
+ dbyte, error = safe_read(bfile, 2 * datatype[i])
417
+ if dbyte is None:
418
+ if i == 0:
419
+ error_code = error.code
420
+ dummytuple = ([], [], [], [], [], ensemble, error_code)
421
+ return dummytuple
422
+ else:
423
+ break
424
+
425
+ # Check for id and datatype errors
417
426
  if i == 0:
418
- error_code = error.code
419
- dummytuple = ([], [], [], [], [], ensemble, error_code)
420
- return dummytuple
427
+ if headerid[0] != 127 or sourceid[0] != 127:
428
+ error = ErrorCode.WRONG_RDIFILE_TYPE
429
+ print(bcolors.FAIL + error.message + bcolors.ENDC)
430
+ error_code = error.code
431
+ dummytuple = ([], [], [], [], [], ensemble, error_code)
432
+ return dummytuple
421
433
  else:
422
- break
434
+ if headerid[i] != 127 or sourceid[i] != 127:
435
+ error = ErrorCode.ID_NOT_FOUND
436
+ print(bcolors.FAIL + error.message)
437
+ print(f"Ensembles reset to {i}" + bcolors.ENDC)
438
+ break
439
+
440
+ if datatype[i] != datatype[i - 1]:
441
+ error = ErrorCode.DATATYPE_MISMATCH
442
+ print(bcolors.FAIL + error.message)
443
+ print(f"Data Types for ensemble {i} is {datatype[i - 1]}.")
444
+ print(f"Data Types for ensemble {i + 1} is {datatype[i]}.")
445
+ print(f"Ensembles reset to {i}" + bcolors.ENDC)
446
+ break
423
447
 
424
- # Check for id and datatype errors
425
- if i == 0:
426
- if headerid[0] != 127 or sourceid[0] != 127:
427
- error = ErrorCode.WRONG_RDIFILE_TYPE
428
- print(bcolors.FAIL + error.message + bcolors.ENDC)
448
+ try:
449
+ data = unpack("H" * datatype[i], dbyte)
450
+ address_offset.append(data)
451
+ except:
452
+ error = ErrorCode.FILE_CORRUPTED
429
453
  error_code = error.code
430
454
  dummytuple = ([], [], [], [], [], ensemble, error_code)
431
455
  return dummytuple
432
- else:
433
- if headerid[i] != 127 or sourceid[i] != 127:
434
- error = ErrorCode.ID_NOT_FOUND
435
- print(bcolors.FAIL + error.message + bcolors.ENDC)
436
- break
437
-
438
- if datatype[i] != datatype[i - 1]:
439
- error = ErrorCode.DATATYPE_MISMATCH
440
- print(bcolors.FAIL + error.message)
441
- print(f"Data Types for ensemble {i} is {datatype[i - 1]}.")
442
- print(f"Data Types for ensemble {i + 1} is {datatype[i]}.")
443
- print(f"Ensembles reset to {i}" + bcolors.ENDC)
444
- break
445
-
446
- try:
447
- data = unpack("H" * datatype[i], dbyte)
448
- address_offset.append(data)
449
- except:
450
- error = ErrorCode.FILE_CORRUPTED
451
- error_code = error.code
452
- dummytuple = ([], [], [], [], [], ensemble, error_code)
453
- return dummytuple
454
-
455
- skip_array = [None] * datatype[i]
456
- for dtype in range(datatype[i]):
457
- bseek = int(bskip) + int(address_offset[i][dtype])
458
- bfile.seek(bseek, 0)
459
- readbyte = bfile.read(2)
460
- skip_array[dtype] = int.from_bytes(
461
- readbyte, byteorder="little", signed=False
462
- )
463
-
464
- dataid.append(skip_array)
465
- # bytekip is the number of bytes to skip to reach
466
- # an ensemble from beginning of file.
467
- # ?? Should byteskip be from current position ??
468
- bskip = int(bskip) + int(byte[i]) + 2
469
- bfile.seek(bskip, 0)
470
- byteskip = np.append(byteskip, np.int32(bskip))
471
- i += 1
456
+
457
+ skip_array = [None] * datatype[i]
458
+ for dtype in range(datatype[i]):
459
+ bseek = int(bskip) + int(address_offset[i][dtype])
460
+ bfile.seek(bseek, 0)
461
+ readbyte = bfile.read(2)
462
+ skip_array[dtype] = int.from_bytes(
463
+ readbyte, byteorder="little", signed=False
464
+ )
465
+
466
+ dataid.append(skip_array)
467
+ # bytekip is the number of bytes to skip to reach
468
+ # an ensemble from beginning of file.
469
+ # ?? Should byteskip be from current position ??
470
+ bskip = int(bskip) + int(byte[i]) + 2
471
+ bfile.seek(bskip, 0)
472
+ byteskip = np.append(byteskip, np.int32(bskip))
473
+ i += 1
474
+ except (ValueError, StructError, OverflowError) as e:
475
+ # except:
476
+ print(bcolors.WARNING + "WARNING: The file is broken.")
477
+ print(
478
+ f"Function `fileheader` unable to extract data for ensemble {i + 1}. Total ensembles reset to {i}."
479
+ )
480
+ print(bcolors.UNDERLINE + "Details from struct function" + bcolors.ENDC)
481
+ print(f" Error Type: {type(e).__name__}")
482
+ print(f" Error Details: {e}")
483
+ error = ErrorCode.FILE_CORRUPTED
484
+ ensemble = i
472
485
 
473
486
  ensemble = i
474
487
  bfile.close()
@@ -612,13 +625,14 @@ def fixedleader(rdi_file, byteskip=None, offset=None, idarray=None, ensemble=0):
612
625
 
613
626
  bfile.seek(byteskip[i], 0)
614
627
 
615
- except (ValueError, StructError) as e:
628
+ except (ValueError, StructError, OverflowError) as e:
616
629
  print(bcolors.WARNING + "WARNING: The file is broken.")
617
630
  print(
618
631
  f"Function `fixedleader` unable to extract data for ensemble {i + 1}. Total ensembles reset to {i}."
619
632
  )
620
- print("Details from struct function:")
621
- print(f"An error occurred: {e}" + bcolors.ENDC)
633
+ print(bcolors.UNDERLINE + "Details from struct function" + bcolors.ENDC)
634
+ print(f" Error Type: {type(e).__name__}")
635
+ print(f" Error Details: {e}")
622
636
  error = ErrorCode.FILE_CORRUPTED
623
637
  ensemble = i
624
638
 
@@ -794,13 +808,14 @@ def variableleader(rdi_file, byteskip=None, offset=None, idarray=None, ensemble=
794
808
 
795
809
  bfile.seek(byteskip[i], 0)
796
810
 
797
- except (ValueError, StructError) as e:
811
+ except (ValueError, StructError, OverflowError) as e:
798
812
  print(bcolors.WARNING + "WARNING: The file is broken.")
799
813
  print(
800
814
  f"Function `variableleader` unable to extract data for ensemble {i + 1}. Total ensembles reset to {i}."
801
815
  )
802
- print("Details from struct function:")
803
- print(f"An error occurred: {e}" + bcolors.ENDC)
816
+ print(bcolors.UNDERLINE + "Details from struct function" + bcolors.ENDC)
817
+ print(f" Error Type: {type(e).__name__}")
818
+ print(f" Error Details: {e}")
804
819
  error = ErrorCode.FILE_CORRUPTED
805
820
  ensemble = i
806
821
 
@@ -910,7 +925,9 @@ def datatype(
910
925
  # Velocity is 16 bits and all others are 8 bits.
911
926
  # Create empty array for the chosen variable name.
912
927
  if var_name == "velocity":
913
- var_array = np.full((int(max(beam)), int(max(cell)), ensemble), -32768, dtype="int16")
928
+ var_array = np.full(
929
+ (int(max(beam)), int(max(cell)), ensemble), -32768, dtype="int16"
930
+ )
914
931
  bitstr = "<h"
915
932
  bitint = 2
916
933
  else: # inserted
@@ -957,16 +974,28 @@ def datatype(
957
974
  return (var_array, error.code)
958
975
 
959
976
  # READ DATA
960
- for i in range(ensemble):
961
- bfile.seek(fbyteskip[i], 1)
962
- bdata = bfile.read(2)
963
- for cno in range(int(cell[i])):
964
- for bno in range(int(beam[i])):
965
- bdata = bfile.read(bitint)
966
- varunpack = unpack(bitstr, bdata)
967
- var_array[bno][cno][i] = varunpack[0]
968
- bfile.seek(byteskip[i], 0)
969
- bfile.close()
977
+ i = 0
978
+ try:
979
+ for i in range(ensemble):
980
+ bfile.seek(fbyteskip[i], 1)
981
+ bdata = bfile.read(2)
982
+ for cno in range(int(cell[i])):
983
+ for bno in range(int(beam[i])):
984
+ bdata = bfile.read(bitint)
985
+ varunpack = unpack(bitstr, bdata)
986
+ var_array[bno][cno][i] = varunpack[0]
987
+ bfile.seek(byteskip[i], 0)
988
+ bfile.close()
989
+ except (ValueError, StructError, OverflowError) as e:
990
+ print(bcolors.WARNING + "WARNING: The file is broken.")
991
+ print(
992
+ f"Function `datatype` unable to extract {var_name} for ensemble {i + 1}. Total ensembles reset to {i}."
993
+ )
994
+ print(bcolors.UNDERLINE + "Details from struct function" + bcolors.ENDC)
995
+ print(f" Error Type: {type(e).__name__}")
996
+ print(f" Error Details: {e}")
997
+ error = ErrorCode.FILE_CORRUPTED
998
+ ensemble = i
970
999
 
971
- data = var_array
1000
+ data = var_array[:, :, :ensemble]
972
1001
  return (data, ensemble, cell, beam, error_code)
pyadps/utils/readrdi.py CHANGED
@@ -103,6 +103,7 @@ import sys
103
103
  import numpy as np
104
104
  import pandas as pd
105
105
  from pyadps.utils import pyreadrdi
106
+ from pyadps.utils.pyreadrdi import bcolors
106
107
 
107
108
 
108
109
  class DotDict:
@@ -773,7 +774,7 @@ def vlead_dict(vid):
773
774
  "MPT Minute": "int16",
774
775
  "MPT Second": "int16",
775
776
  "MPT Hundredth": "int16",
776
- "Hdg Std Dev": "int16",
777
+ "Head Std Dev": "int16",
777
778
  "Pitch Std Dev": "int16",
778
779
  "Roll Std Dev": "int16",
779
780
  "ADC Channel 0": "int16",
@@ -1358,7 +1359,7 @@ class ReadFile:
1358
1359
  The RDI ADCP binary file to be read.
1359
1360
  """
1360
1361
 
1361
- def __init__(self, filename):
1362
+ def __init__(self, filename, is_fix_ensemble=True):
1362
1363
  """
1363
1364
  Initializes the ReadFile object and extracts data from the RDI ADCP binary file.
1364
1365
  """
@@ -1534,6 +1535,10 @@ class ReadFile:
1534
1535
  # Add attribute that lists all variables/functions
1535
1536
  self.list_vars = list(vars(self).keys())
1536
1537
 
1538
+ # By default fix ensemble
1539
+ if is_fix_ensemble and not self.isEnsembleEqual:
1540
+ self.fixensemble()
1541
+
1537
1542
  def _copy_attributes_from_var(self):
1538
1543
  for attr_name, attr_value in self.variableleader.__dict__.items():
1539
1544
  # Copy each attribute of var into self
@@ -1552,6 +1557,18 @@ class ReadFile:
1552
1557
  f"'{self.__class__.__name__}' object has no attribute '{name}'"
1553
1558
  )
1554
1559
 
1560
+ def resize_fixedleader(self, newshape):
1561
+ for key in self.fixedleader.fleader:
1562
+ attr_name = key.lower().replace(" ", "_")
1563
+ attr_obj = getattr(self.fixedleader, attr_name)
1564
+ attr_obj.data = attr_obj.data[:newshape]
1565
+
1566
+ def resize_variableleader(self, newshape):
1567
+ for key in self.variableleader.vleader:
1568
+ attr_name = key.lower().replace(" ", "_")
1569
+ attr_obj = getattr(self.variableleader, attr_name)
1570
+ attr_obj.data = attr_obj.data[:newshape]
1571
+
1555
1572
  def fixensemble(self, min_cutoff=0):
1556
1573
  """
1557
1574
  Fixes the ensemble size across all data types in the file if they differ.
@@ -1582,10 +1599,18 @@ class ReadFile:
1582
1599
  self.fileheader.dataid = self.fileheader.dataid[:minens, :]
1583
1600
  if "Fixed Leader" in datatype_array:
1584
1601
  self.fixedleader.data = self.fixedleader.data[:, :minens]
1602
+ self.fixedleader.fleader = {
1603
+ k: v[:minens] for k, v in self.fixedleader.fleader.items()
1604
+ }
1585
1605
  self.fixedleader.ensembles = minens
1606
+ self.resize_fixedleader(minens)
1586
1607
  if "Variable Leader" in datatype_array:
1587
1608
  self.variableleader.data = self.variableleader.data[:, :minens]
1609
+ self.variableleader.vleader = {
1610
+ k: v[:minens] for k, v in self.variableleader.vleader.items()
1611
+ }
1588
1612
  self.variableleader.ensembles = minens
1613
+ self.resize_variableleader(minens)
1589
1614
  if "Velocity" in datatype_array:
1590
1615
  self.velocity.data = self.velocity.data[:, :, :minens]
1591
1616
  self.velocity.ensembles = minens
@@ -1601,7 +1626,13 @@ class ReadFile:
1601
1626
  if "Status" in datatype_array:
1602
1627
  self.status.data = self.status.data[:, :, :minens]
1603
1628
  self.status.ensembles = minens
1604
- print(f"Ensembles fixed to {minens}. All data types have same ensembles.")
1629
+
1630
+ self.time = self.time[:minens]
1631
+ print(
1632
+ bcolors.OKBLUE
1633
+ + f"Ensembles fixed to {minens}. All data types have same ensembles."
1634
+ + bcolors.ENDC
1635
+ )
1605
1636
  else:
1606
1637
  print(
1607
1638
  "WARNING: No response was initiated. All data types have same ensemble."
@@ -54,7 +54,7 @@ def qc_check(var, mask, cutoff=0):
54
54
  return mask
55
55
 
56
56
 
57
- def correlation_check(ds, mask, cutoff=64):
57
+ def correlation_check(ds, mask, cutoff,threebeam,beam_ignore=None):
58
58
  """
59
59
  Perform an correlation check on the provided variable and update the
60
60
  mask to mark valid and invalid values based on a cutoff threshold.
@@ -93,10 +93,15 @@ def correlation_check(ds, mask, cutoff=64):
93
93
  >>> outmask = correlation_check(ds, mask, cutoff=9999)
94
94
  """
95
95
  correlation = ds.correlation.data
96
+ if threebeam ==True:
97
+ if beam_ignore == None:
98
+ correlation = correlation
99
+ else:
100
+ correlation = np.delete(correlation,beam_ignore,axis=0)
96
101
  mask = qc_check(correlation, mask, cutoff=cutoff)
97
102
  return mask
98
103
 
99
- def echo_check(ds, mask, cutoff=40):
104
+ def echo_check(ds, mask, cutoff, threebeam, beam_ignore=None):
100
105
  """
101
106
  Perform an echo intensity check on the provided variable and update the
102
107
  mask to mark valid and invalid values based on a cutoff threshold.
@@ -137,6 +142,11 @@ def echo_check(ds, mask, cutoff=40):
137
142
  """
138
143
 
139
144
  echo = ds.echo.data
145
+ if threebeam ==True:
146
+ if beam_ignore == None:
147
+ echo = echo
148
+ else:
149
+ echo = np.delete(echo,beam_ignore,axis=0)
140
150
  mask = qc_check(echo, mask, cutoff=cutoff)
141
151
  return mask
142
152
 
@@ -246,7 +256,7 @@ def pg_check(ds, mask, cutoff=0, threebeam=True):
246
256
  return mask
247
257
 
248
258
 
249
- def false_target(ds, mask, cutoff=255, threebeam=True):
259
+ def false_target(ds, mask, cutoff=255, threebeam=True, beam_ignore=None):
250
260
  """
251
261
  Apply a false target detection algorithm based on echo intensity values.
252
262
  This function identifies invalid or false targets in the data and updates
@@ -292,12 +302,14 @@ def false_target(ds, mask, cutoff=255, threebeam=True):
292
302
  """
293
303
 
294
304
  echo = ds.echo.data
305
+ if beam_ignore != None:
306
+ echo = np.delete(echo,beam_ignore, axis=0)
295
307
 
296
308
  shape = np.shape(echo)
297
309
  for i in range(shape[1]):
298
310
  for j in range(shape[2]):
299
311
  x = np.sort(echo[:, i, j])
300
- if threebeam:
312
+ if threebeam and beam_ignore is None:
301
313
  if x[-1] - x[1] > cutoff:
302
314
  mask[i, j] = 1
303
315
  else: