pyadps 0.2.1b0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. pyadps/Home_Page.py +11 -5
  2. pyadps/pages/01_Read_File.py +623 -215
  3. pyadps/pages/02_View_Raw_Data.py +97 -41
  4. pyadps/pages/03_Download_Raw_File.py +200 -67
  5. pyadps/pages/04_Sensor_Health.py +905 -0
  6. pyadps/pages/05_QC_Test.py +493 -0
  7. pyadps/pages/06_Profile_Test.py +971 -0
  8. pyadps/pages/07_Velocity_Test.py +600 -0
  9. pyadps/pages/08_Write_File.py +623 -0
  10. pyadps/pages/09_Add-Ons.py +168 -0
  11. pyadps/utils/__init__.py +5 -3
  12. pyadps/utils/autoprocess.py +371 -80
  13. pyadps/utils/logging_utils.py +269 -0
  14. pyadps/utils/metadata/config.ini +22 -4
  15. pyadps/utils/metadata/demo.000 +0 -0
  16. pyadps/utils/metadata/flmeta.json +420 -420
  17. pyadps/utils/metadata/vlmeta.json +611 -565
  18. pyadps/utils/multifile.py +292 -0
  19. pyadps/utils/plotgen.py +505 -3
  20. pyadps/utils/profile_test.py +720 -125
  21. pyadps/utils/pyreadrdi.py +164 -92
  22. pyadps/utils/readrdi.py +436 -186
  23. pyadps/utils/script.py +197 -147
  24. pyadps/utils/sensor_health.py +120 -0
  25. pyadps/utils/signal_quality.py +472 -68
  26. pyadps/utils/velocity_test.py +79 -31
  27. pyadps/utils/writenc.py +222 -39
  28. {pyadps-0.2.1b0.dist-info → pyadps-0.3.0.dist-info}/METADATA +13 -14
  29. pyadps-0.3.0.dist-info/RECORD +35 -0
  30. {pyadps-0.2.1b0.dist-info → pyadps-0.3.0.dist-info}/WHEEL +1 -1
  31. {pyadps-0.2.1b0.dist-info → pyadps-0.3.0.dist-info}/entry_points.txt +1 -0
  32. pyadps/pages/04_QC_Test.py +0 -334
  33. pyadps/pages/05_Profile_Test.py +0 -575
  34. pyadps/pages/06_Velocity_Test.py +0 -341
  35. pyadps/pages/07_Write_File.py +0 -452
  36. pyadps/utils/cutbin.py +0 -413
  37. pyadps/utils/regrid.py +0 -279
  38. pyadps-0.2.1b0.dist-info/RECORD +0 -31
  39. {pyadps-0.2.1b0.dist-info → pyadps-0.3.0.dist-info}/LICENSE +0 -0
@@ -0,0 +1,292 @@
1
+ """
2
+ ADCP (Acoustic Doppler Current Profiler) File Processor
3
+ A Python implementation for processing and combining ADCP binary files.
4
+ """
5
+
6
+ from pathlib import Path
7
+ from dataclasses import dataclass
8
+ from typing import List, Union
9
+ import argparse # Import argparse module
10
+
11
+
12
+ # Import from our separate logging module
13
+ from .logging_utils import LogLevel, get_console_logger
14
+
15
+
16
+ @dataclass
17
+ class ADCPConfig:
18
+ """Configuration for ADCP file processing"""
19
+
20
+ file_extension: str = "*.000"
21
+ header_signature: bytes = b"\x7f\x7f"
22
+ header_signature_ext: bytes = b"\x7f\x7f\xf0\x02"
23
+ ensemble_size_offset: int = 2
24
+ ensemble_size_length: int = 2
25
+ header_size_adjustment: int = 2
26
+ chunk_size: int = 8192 # For large file processing
27
+
28
+
29
+ class ADCPError(Exception):
30
+ """Base exception for ADCP processing errors"""
31
+
32
+ pass
33
+
34
+
35
+ class InvalidHeaderError(ADCPError):
36
+ """Raised when ADCP file has invalid header"""
37
+
38
+ pass
39
+
40
+
41
+ class CorruptedFileError(ADCPError):
42
+ """Raised when ADCP file is corrupted"""
43
+
44
+ pass
45
+
46
+
47
+ class ADCPFileValidator:
48
+ """Validates ADCP files and headers"""
49
+
50
+ def __init__(
51
+ self,
52
+ config: ADCPConfig,
53
+ logger_name: str = "adcp_validator",
54
+ logger_level: LogLevel = LogLevel.INFO,
55
+ ):
56
+ self.config = config
57
+ self.logger = get_console_logger(logger_name, logger_level)
58
+
59
+ def find_header_start(self, data: bytes) -> int:
60
+ """Find the first occurrence of the extended header signature"""
61
+ return data.find(self.config.header_signature_ext)
62
+
63
+ def validate_file_path(self, filepath: Path) -> None:
64
+ """Validate file path exists and is accessible"""
65
+ if not filepath.exists():
66
+ raise FileNotFoundError(f"File {filepath} does not exist")
67
+ if not filepath.is_file():
68
+ raise ValueError(f"Path {filepath} is not a file")
69
+ if filepath.stat().st_size == 0:
70
+ raise ValueError(f"File {filepath} is empty")
71
+
72
+ def has_valid_header(self, data: bytes) -> bool:
73
+ """Check if data starts with valid ADCP header"""
74
+ return data.startswith(self.config.header_signature)
75
+
76
+
77
+ class ADCPFileProcessor:
78
+ """Processes individual ADCP files"""
79
+
80
+ def __init__(
81
+ self,
82
+ config: ADCPConfig = None,
83
+ logger_name: str = "adcp_processor",
84
+ logger_level: LogLevel = LogLevel.INFO,
85
+ ):
86
+ self.config = config or ADCPConfig()
87
+ self.validator = ADCPFileValidator(self.config, f"{logger_name}_validator")
88
+ self.logger = get_console_logger(logger_name, logger_level)
89
+
90
+ def _calculate_ensemble_size(self, data: bytes) -> int:
91
+ """Calculate size of single ensemble from header"""
92
+ offset = self.config.ensemble_size_offset
93
+ length = self.config.ensemble_size_length
94
+ return (
95
+ int.from_bytes(data[offset : offset + length], byteorder="little")
96
+ + self.config.header_size_adjustment
97
+ )
98
+
99
+ def _validate_file_integrity(
100
+ self, filepath: Path, data: bytes, ensemble_size: int
101
+ ) -> int:
102
+ """Validate file integrity and return number of valid ensembles"""
103
+ file_size = filepath.stat().st_size
104
+ if file_size % ensemble_size != 0:
105
+ valid_ensembles = file_size // ensemble_size
106
+ self.logger.warning(
107
+ f"File {filepath.name} is corrupted. "
108
+ f"Valid ensembles: {valid_ensembles}/{valid_ensembles + 1}"
109
+ )
110
+ return valid_ensembles
111
+ return file_size // ensemble_size
112
+
113
+ def process_file(self, filepath: Union[str, Path]) -> bytes:
114
+ """Process a single ADCP file and return valid data"""
115
+ filepath = Path(filepath)
116
+ try:
117
+ self.validator.validate_file_path(filepath)
118
+
119
+ with open(filepath, "rb") as f:
120
+ data = f.read()
121
+
122
+ header_index = 0
123
+ # Check if file starts with valid header
124
+ if not self.validator.has_valid_header(data):
125
+ header_index = self.validator.find_header_start(data)
126
+ if header_index == -1:
127
+ raise InvalidHeaderError(
128
+ f"File {filepath.name} contains no valid ADCP header"
129
+ )
130
+ self.logger.warning(
131
+ f"File {filepath.name} header found at byte {header_index}. "
132
+ "Truncating invalid data before header."
133
+ )
134
+ else:
135
+ self.logger.info(f"Valid ADCP file: {filepath.name}")
136
+
137
+ # Calculate ensemble size and validate file integrity
138
+ ensemble_size = self._calculate_ensemble_size(data[header_index:])
139
+ valid_ensembles = self._validate_file_integrity(
140
+ filepath, data, ensemble_size
141
+ )
142
+
143
+ # Return only valid data
144
+ end_index = header_index + (valid_ensembles * ensemble_size)
145
+ return data[header_index:end_index]
146
+
147
+ except (InvalidHeaderError, FileNotFoundError, ValueError) as e:
148
+ self.logger.error(f"Error processing {filepath.name}: {e}")
149
+ return b""
150
+ except Exception as e:
151
+ self.logger.error(f"Unexpected error processing {filepath.name}: {e}")
152
+ return b""
153
+
154
+
155
+ class ADCPBinFileCombiner:
156
+ """Combines or joins multiple ADCP files"""
157
+
158
+ # Assuming logging_utils.py has a get_console_logger function
159
+ # that accepts a log level as an argument.
160
+
161
+ def __init__(
162
+ self,
163
+ config: ADCPConfig = None,
164
+ logger_name: str = "adcp_combiner",
165
+ logger_level: LogLevel = LogLevel.INFO,
166
+ ):
167
+ self.config = config or ADCPConfig()
168
+ self.processor = ADCPFileProcessor(self.config, f"{logger_name}_processor")
169
+ self.logger = get_console_logger(logger_name, logger_level)
170
+
171
+ def get_adcp_files(self, folder_path: Union[str, Path]) -> List[Path]:
172
+ """Get all ADCP files from folder"""
173
+ folder_path = Path(folder_path)
174
+ if not folder_path.exists():
175
+ raise FileNotFoundError(f"Folder {folder_path} does not exist")
176
+ if not folder_path.is_dir():
177
+ raise NotADirectoryError(f"Path {folder_path} is not a directory")
178
+
179
+ files = sorted(folder_path.glob(self.config.file_extension))
180
+ if not files:
181
+ self.logger.error(
182
+ f"No {self.config.file_extension} files found in {folder_path}"
183
+ )
184
+ return files
185
+
186
+ def combine_files(self, files: List[Union[str, Path]]) -> bytearray:
187
+ """Combine multiple ADCP files into single bytearray"""
188
+ if not files:
189
+ self.logger.warning("No files provided for combination")
190
+ return bytearray()
191
+
192
+ combined_data = bytearray()
193
+ processed_count = 0
194
+
195
+ for file_path in files:
196
+ valid_data = self.processor.process_file(file_path)
197
+ if valid_data:
198
+ combined_data.extend(valid_data)
199
+ processed_count += 1
200
+
201
+ self.logger.info(f"Successfully combined {processed_count}/{len(files)} files")
202
+ return combined_data
203
+
204
+ def combine_folder(
205
+ self, folder_path: Union[str, Path], output_file: Union[str, Path]
206
+ ) -> bool:
207
+ """Combine all ADCP files from folder and write to output file"""
208
+ try:
209
+ files = self.get_adcp_files(folder_path)
210
+ if not files:
211
+ self.logger.error("No valid files found to combine")
212
+ return False
213
+
214
+ combined_data = self.combine_files(files)
215
+ if not combined_data:
216
+ self.logger.error("No valid data to write")
217
+ return False
218
+
219
+ output_path = Path(output_file)
220
+ output_path.parent.mkdir(parents=True, exist_ok=True)
221
+
222
+ with open(output_path, "wb") as f:
223
+ f.write(combined_data)
224
+
225
+ self.logger.info(
226
+ # f"Successfully combined {len(files)} files. "
227
+ f"Output written to: {output_path} ({len(combined_data)} bytes)"
228
+ )
229
+ return True
230
+
231
+ except Exception as e:
232
+ self.logger.error(f"Error combining folder {folder_path}: {e}")
233
+ return False
234
+
235
+
236
+ def main():
237
+ """Main entry point for CLI usage using argparse"""
238
+ parser = argparse.ArgumentParser(
239
+ description="Combine multiple ADCP binary files into a single file."
240
+ )
241
+
242
+ # Positional argument for the input folder
243
+ parser.add_argument(
244
+ "folder", type=str, help="Path to the folder containing ADCP files (*.000)."
245
+ )
246
+ # Optional argument for the output filename
247
+ parser.add_argument(
248
+ "-o",
249
+ "--output",
250
+ type=str,
251
+ default="merged_000.000",
252
+ help="Output filename for the combined ADCP data (default: merged_000.000).",
253
+ )
254
+ # Optional argument for verbosity (logging level)
255
+ parser.add_argument(
256
+ "-v",
257
+ "--verbose",
258
+ action="count", # 'count' action increments the value for each -v
259
+ default=0,
260
+ help="Increase verbosity level. Use -v for INFO, -vv for DEBUG. (Default: WARNING)",
261
+ )
262
+
263
+ args = parser.parse_args()
264
+
265
+ # Determine logging level based on verbosity count
266
+ if args.verbose == 0:
267
+ log_level = LogLevel.WARNING # Default if no -v is given
268
+ elif args.verbose == 1:
269
+ log_level = LogLevel.INFO
270
+ elif args.verbose >= 2:
271
+ log_level = LogLevel.DEBUG
272
+ else:
273
+ log_level = (
274
+ LogLevel.INFO
275
+ ) # Fallback, though 'action="count"' makes this less likely
276
+
277
+ try:
278
+ # Initialize the combiner, passing the determined logging level
279
+ combiner = ADCPBinFileCombiner(logger_name="adcp_main", logger_level=log_level)
280
+ success = combiner.combine_folder(args.folder, args.output)
281
+
282
+ if success:
283
+ print(f"\n✅ Files successfully combined to {args.output}")
284
+ else:
285
+ print("\n❌ Failed to combine files. Check logs for details.")
286
+
287
+ except Exception as e:
288
+ print(f"\n❌ An unhandled error occurred during script execution: {e}")
289
+
290
+
291
+ if __name__ == "__main__":
292
+ main()