pyadps 0.2.0b0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. pyadps/Home_Page.py +11 -5
  2. pyadps/pages/01_Read_File.py +623 -211
  3. pyadps/pages/02_View_Raw_Data.py +97 -41
  4. pyadps/pages/03_Download_Raw_File.py +200 -67
  5. pyadps/pages/04_Sensor_Health.py +905 -0
  6. pyadps/pages/05_QC_Test.py +493 -0
  7. pyadps/pages/06_Profile_Test.py +971 -0
  8. pyadps/pages/07_Velocity_Test.py +600 -0
  9. pyadps/pages/08_Write_File.py +623 -0
  10. pyadps/pages/09_Add-Ons.py +168 -0
  11. pyadps/utils/__init__.py +5 -3
  12. pyadps/utils/autoprocess.py +371 -80
  13. pyadps/utils/logging_utils.py +269 -0
  14. pyadps/utils/metadata/config.ini +22 -4
  15. pyadps/utils/metadata/demo.000 +0 -0
  16. pyadps/utils/metadata/flmeta.json +420 -420
  17. pyadps/utils/metadata/vlmeta.json +611 -565
  18. pyadps/utils/multifile.py +292 -0
  19. pyadps/utils/plotgen.py +505 -3
  20. pyadps/utils/profile_test.py +720 -125
  21. pyadps/utils/pyreadrdi.py +164 -92
  22. pyadps/utils/readrdi.py +436 -186
  23. pyadps/utils/script.py +197 -147
  24. pyadps/utils/sensor_health.py +120 -0
  25. pyadps/utils/signal_quality.py +472 -68
  26. pyadps/utils/velocity_test.py +79 -31
  27. pyadps/utils/writenc.py +222 -39
  28. {pyadps-0.2.0b0.dist-info → pyadps-0.3.0.dist-info}/METADATA +63 -33
  29. pyadps-0.3.0.dist-info/RECORD +35 -0
  30. {pyadps-0.2.0b0.dist-info → pyadps-0.3.0.dist-info}/WHEEL +1 -1
  31. {pyadps-0.2.0b0.dist-info → pyadps-0.3.0.dist-info}/entry_points.txt +1 -0
  32. pyadps/pages/04_QC_Test.py +0 -334
  33. pyadps/pages/05_Profile_Test.py +0 -575
  34. pyadps/pages/06_Velocity_Test.py +0 -341
  35. pyadps/pages/07_Write_File.py +0 -452
  36. pyadps/utils/cutbin.py +0 -413
  37. pyadps/utils/regrid.py +0 -279
  38. pyadps-0.2.0b0.dist-info/RECORD +0 -31
  39. {pyadps-0.2.0b0.dist-info → pyadps-0.3.0.dist-info}/LICENSE +0 -0
@@ -1,112 +1,516 @@
1
+ """
2
+ Signal quality control module for ADCP data processing.
3
+
4
+ This module provides quality control functions for Acoustic Doppler Current Profiler
5
+ (ADCP) data, including echo intensity, correlation, error velocity, and percent-good checks.
6
+ """
7
+
8
+ from typing import Optional, Union
1
9
  import numpy as np
10
+ from numpy.typing import NDArray
11
+
2
12
  from pyadps.utils.plotgen import PlotNoise
13
+ from pyadps.utils.readrdi import ReadFile
14
+
15
+ # Constants
16
+ DEFAULT_ECHO_THRESHOLD = 0
17
+ DEFAULT_CORRELATION_THRESHOLD = 64
18
+ DEFAULT_ERROR_VELOCITY_THRESHOLD = 9999
19
+ DEFAULT_PERCENT_GOOD_THRESHOLD = 0
20
+ DEFAULT_FALSE_TARGET_THRESHOLD = 255
21
+ MISSING_VALUE_THRESHOLD = -32767
22
+ MAX_VELOCITY_VALUE = 32768
23
+
24
+ # Threshold ranges for validation
25
+ THRESHOLD_RANGES = {
26
+ "Echo Intensity Thresh": (0, 255),
27
+ "Echo Thresh": (0, 255),
28
+ "Correlation Thresh": (0, 255),
29
+ "False Target Thresh": (0, 255),
30
+ "Percent Good Min": (0, 100),
31
+ "Error Velocity Thresh": (0, 5000),
32
+ }
3
33
 
4
34
 
5
- def qc_check(var, mask, cutoff=0):
35
+ def qc_check(
36
+ var: NDArray[np.float64], mask: NDArray[np.int32], cutoff: float = 0
37
+ ) -> NDArray[np.int32]:
6
38
  """
7
- The module returns the modified mask file after checking the cutoff criteria.
8
- All values less than the cuttoff are masked.
39
+ Perform a quality control check on the provided data and update the mask
40
+ based on a cutoff threshold. Values in `var` that are less than the cutoff
41
+ are marked as invalid in the mask.
9
42
 
10
- Args:
11
- var (numpy.ndarray):
12
- mask (numpy.ndarray): A mask file having same array size as var
13
- cutoff (int): Default cutoff is 0
43
+ Parameters
44
+ ----------
45
+ var : numpy.ndarray
46
+ The input array containing data to be checked against the cutoff.
47
+ mask : numpy.ndarray
48
+ An integer array of the same shape as `var`, where `1` indicates
49
+ invalid data and `0` indicates valid data.
50
+ cutoff : int, optional
51
+ The threshold value for quality control. Any value in `var` less than
52
+ or equal to this cutoff will be marked as invalid in the mask. Default is 0.
14
53
 
15
- Returns:
16
- mask (numpy.ndarray): Modified mask file based on cutoff
54
+ Returns
55
+ -------
56
+ numpy.ndarray
57
+ An updated integer mask array of the same shape as `var`, with `1`
58
+ indicating invalid data and `0` indicating valid data.
59
+
60
+ Notes
61
+ -----
62
+ - The function modifies the `mask` by applying the cutoff condition.
63
+ Values in `var` that are less than or equal to the cutoff will be
64
+ marked as invalid (`1`), while all other values will remain valid (`0`).
65
+ - Ensure that `var` and `mask` are compatible in shape for element-wise
66
+ operations.
67
+
68
+ Example
69
+ -------
70
+ >>> import pyadps
71
+ >>> ds = pyadps.Readfile('dummy.000')
72
+ >>> var = ds.echo.data
73
+ >>> mask = qc_check(var, mask, cutoff=40)
17
74
  """
18
- shape = np.shape(var)
19
- if len(shape) == 2:
20
- mask[var[:, :] < cutoff] = 1
75
+ shape = var.shape
76
+
77
+ if var.ndim == 2:
78
+ mask[var < cutoff] = 1
21
79
  else:
22
- beam = shape[0]
23
- for i in range(beam):
80
+ num_beams = shape[0]
81
+ for i in range(num_beams):
24
82
  mask[var[i, :, :] < cutoff] = 1
25
- values, counts = np.unique(mask, return_counts=True)
26
- # print(values, counts, np.round(counts[1] * 100 / np.sum(counts)))
83
+
84
+ return mask
85
+
86
+
87
+ def correlation_check(
88
+ ds: ReadFile,
89
+ mask: NDArray[np.int32],
90
+ cutoff: float,
91
+ threebeam: bool,
92
+ beam_ignore: Optional[int] = None,
93
+ ) -> NDArray[np.int32]:
94
+ """
95
+ Perform an correlation check on the provided variable and update the
96
+ mask to mark valid and invalid values based on a cutoff threshold.
97
+
98
+ Parameters
99
+ ----------
100
+ ds : pyadps.dataset
101
+ The input pyadps dataframe containing correlation data to be checked.
102
+ Accepts 2-D or 3-D masks.
103
+ mask : numpy.ndarray
104
+ An integer array of the same shape as `var`, where `1` indicates invalid
105
+ data or masked data and `0` indicates valid data.
106
+ cutoff : float, optional
107
+ The threshold value for echo intensity. Any value in `ds.correlation.data` below
108
+ this cutoff will be considered invalid and marked as `1` in the mask.
109
+ Default is 64.
110
+ threebeam : bool
111
+ If True, enables three-beam solution mode.
112
+ beam_ignore : int, optional
113
+ Beam index to ignore in three-beam mode. Default is None.
114
+
115
+ Returns
116
+ -------
117
+ numpy.ndarray
118
+ An updated integer mask array of the same shape as `var`, with `1`
119
+ indicating invalid or masked data (within the cutoff limit) and `0` indicating
120
+ valid.
121
+
122
+ Notes
123
+ -----
124
+ - The function modifies the `mask` based on the cutoff condition. Valid
125
+ values in `var` retain their corresponding mask value as `0`, while
126
+ invalid values or previously masked elements are marked as `1`.
127
+ operations.
128
+
129
+ Example
130
+ -------
131
+ >>> import pyadps
132
+ >>> ds = pyadps.Readfile('dummy.000')
133
+ >>> outmask = correlation_check(ds, mask, cutoff=9999, threebeam=True)
134
+ """
135
+ correlation = ds.correlation.data
136
+
137
+ if threebeam and beam_ignore is not None:
138
+ correlation = np.delete(correlation, beam_ignore, axis=0)
139
+
140
+ mask = qc_check(correlation, mask, cutoff=cutoff)
27
141
  return mask
28
142
 
29
143
 
30
- cor_check = qc_check
31
- echo_check = qc_check
144
+ def echo_check(
145
+ ds: ReadFile,
146
+ mask: NDArray[np.int32],
147
+ cutoff: float,
148
+ threebeam: bool,
149
+ beam_ignore: Optional[int] = None,
150
+ ) -> NDArray[np.int32]:
151
+ """
152
+ Perform an echo intensity check on the provided variable and update the
153
+ mask to mark valid and invalid values based on a cutoff threshold.
154
+
155
+ Parameters
156
+ ----------
157
+ ds : pyadps.dataset
158
+ The input pyadps dataframe containing echo intensity data to be checked.
159
+ Accepts 2-D or 3-D masks.
160
+ mask : numpy.ndarray
161
+ An integer array of the same shape as `var`, where `1` indicates invalid
162
+ data or masked data and `0` indicates valid data.
163
+ cutoff : float, optional
164
+ The threshold value for echo intensity. Any value in `ds.echo.data` below
165
+ this cutoff will be considered invalid and marked as `1` in the mask.
166
+ Default is 40.
167
+ threebeam : bool
168
+ If True, enables three-beam solution mode.
169
+ beam_ignore : int, optional
170
+ Beam index to ignore in three-beam mode. Default is None.
171
+
172
+ Returns
173
+ -------
174
+ numpy.ndarray
175
+ An updated integer mask array of the same shape as `var`, with `1`
176
+ indicating invalid or masked data (within the cutoff limit) and `0` indicating
177
+ valid.
178
+
179
+ Notes
180
+ -----
181
+ - The function modifies the `mask` based on the cutoff condition. Valid
182
+ values in `var` retain their corresponding mask value as `0`, while
183
+ invalid values or previously masked elements are marked as `1`.
184
+ - Ensure that `var` and `mask` are compatible in shape for element-wise
185
+ operations.
186
+
187
+ Example
188
+ -------
189
+ >>> import pyadps
190
+ >>> ds = pyadps.Readfile('dummy.000')
191
+ >>> outmask = echo_check(ds, mask, cutoff=9999, threebeam=True)
192
+ """
193
+ echo = ds.echo.data
194
+
195
+ if threebeam and beam_ignore is not None:
196
+ echo = np.delete(echo, beam_ignore, axis=0)
197
+
198
+ mask = qc_check(echo, mask, cutoff=cutoff)
199
+ return mask
200
+
32
201
 
202
+ def ev_check(
203
+ ds: ReadFile, mask: NDArray[np.int32], cutoff: float = 9999
204
+ ) -> NDArray[np.int32]:
205
+ """
206
+ Perform an error velocity check on the provided variable and update the
207
+ mask to mark valid and invalid values based on a cutoff threshold.
208
+
209
+ Parameters
210
+ ----------
211
+ ds : pyadps.dataset
212
+ The input pyadps dataframe containing error velocity data to be checked.
213
+ mask : numpy.ndarray
214
+ An integer array of the same shape as `var`, where `1` indicates invalid
215
+ data or masked data and `0` indicates valid data.
216
+ cutoff : float, optional
217
+ The threshold value for error velocity. Any value in `var` exceeding
218
+ this cutoff will be considered invalid and marked as `0` in the mask.
219
+ Default is 9999.
220
+
221
+ Returns
222
+ -------
223
+ numpy.ndarray
224
+ An updated integer mask array of the same shape as `var`, with `1`
225
+ indicating invalid or masked data (within the cutoff limit) and `0` indicating
226
+ valid.
227
+
228
+ Notes
229
+ -----
230
+ - The function modifies the `mask` based on the cutoff condition. Valid
231
+ values in `var` retain their corresponding mask value as `0`, while
232
+ invalid values or previously masked elements are marked as `1`.
233
+ - Ensure that `var` and `mask` are compatible in shape for element-wise
234
+ operations.
235
+
236
+ Example
237
+ -------
238
+ >>> import pyadps
239
+ >>> ds = pyadps.Readfile('dummy.000')
240
+ >>> outmask = ev_check(ds, mask, cutoff=9999)
241
+ """
242
+ var = ds.velocity.data[3, :, :]
243
+ var = np.abs(var)
244
+
245
+ shape = var.shape
33
246
 
34
- def ev_check(var, mask, cutoff=9999):
35
- shape = np.shape(var)
36
- var = abs(var)
37
- if len(shape) == 2:
38
- mask[(var[:, :] >= cutoff) & (var[:, :] < 32768)] = 1
247
+ if var.ndim == 2:
248
+ mask[(var >= cutoff) & (var < MAX_VELOCITY_VALUE)] = 1
39
249
  else:
40
- beam = shape[2]
41
- for i in range(beam):
42
- mask[(var[i, :, :] >= cutoff) & (var[i, :, :] < 32768)] = 1
43
- values, counts = np.unique(mask, return_counts=True)
44
- # print(values, counts, np.round(counts[1] * 100 / np.sum(counts)))
250
+ num_beams = shape[2]
251
+ for i in range(num_beams):
252
+ mask[(var[i, :, :] >= cutoff) & (var[i, :, :] < MAX_VELOCITY_VALUE)] = 1
253
+
45
254
  return mask
46
255
 
47
256
 
48
- def pg_check(pgood, mask, cutoff=0, threebeam=True):
257
+ def pg_check(
258
+ ds: ReadFile, mask: NDArray[np.int32], cutoff: float = 0, threebeam: bool = True
259
+ ) -> NDArray[np.int32]:
260
+ """
261
+ Perform a percent-good check on the provided data and update the mask
262
+ to mark valid and invalid values based on a cutoff threshold.
263
+
264
+ Parameters
265
+ ----------
266
+ ds : pyadps.dataset
267
+ The input pyadps dataframe containing percent-good data, where values range from
268
+ 0 to 100 (maximum percent good).
269
+ mask : numpy.ndarray
270
+ An integer array of the same shape as `pgood`, where `1` indicates
271
+ invalid data and `0` indicates valid data.
272
+ cutoff : float, optional
273
+ The threshold value for percent good. Any value in `pgood` greater than
274
+ or equal to this cutoff will be considered valid (marked as `0`),
275
+ while values not exceeding the cutoff are marked as invalid (`1`).
276
+ Default is 0.
277
+ threebeam : bool, optional
278
+ If `True`, sums up Percent Good 1 and Percent Good 4 for the check.
279
+
280
+ Returns
281
+ -------
282
+ numpy.ndarray
283
+ An updated integer mask array of the same shape as `pgood`, with `1`
284
+ indicating invalid data and `0` indicating valid data.
285
+
286
+ Notes
287
+ -----
288
+ - The function modifies the `mask` based on the cutoff condition. Valid
289
+ values in `pgood` are marked as `0`, while invalid values are marked
290
+ as `1` in the mask.
291
+ - Ensure that `pgood` and `mask` are compatible in shape for element-wise
292
+ operations.
293
+ - If `threebeam` is `True`, the logic may be adjusted to allow partial
294
+ validity based on specific criteria.
295
+
296
+ Example
297
+ -------
298
+ >>> import pyadps
299
+ >>> ds = pyadps.Readfile('dummy.000')
300
+ >>> outmask = pg_check(ds, mask, cutoff=50, threebeam=True)
301
+ """
302
+ pgood = ds.percentgood.data
303
+
49
304
  if threebeam:
50
305
  pgood1 = pgood[0, :, :] + pgood[3, :, :]
51
306
  else:
52
- pgood1 = pgood[:, :, :]
307
+ pgood1 = pgood[3, :, :]
53
308
 
54
- mask[pgood1[:, :] < cutoff] = 1
55
- values, counts = np.unique(mask, return_counts=True)
56
- # print(values, counts, np.round(counts[1] * 100 / np.sum(counts)))
309
+ mask[pgood1 < cutoff] = 1
57
310
  return mask
58
311
 
59
312
 
60
- def false_target(echo, mask, cutoff=255, threebeam=True):
61
- shape = np.shape(echo)
62
- for i in range(shape[1]):
63
- for j in range(shape[2]):
64
- x = np.sort(echo[:, i, j])
65
- if threebeam:
66
- if x[-1] - x[1] > cutoff:
67
- mask[i, j] = 1
68
- else:
69
- if x[-1] - x[0] > cutoff:
70
- mask[i, j] = 1
313
+ def false_target(
314
+ ds: ReadFile,
315
+ mask: NDArray[np.int32],
316
+ cutoff: float = 255,
317
+ threebeam: bool = True,
318
+ beam_ignore: Optional[int] = None,
319
+ ) -> NDArray[np.int32]:
320
+ """
321
+ Apply a false target detection algorithm based on echo intensity values.
322
+ This function identifies invalid or false targets in the data and updates
323
+ the mask accordingly based on a specified cutoff threshold.
324
+
325
+ Parameters
326
+ ----------
327
+ ds : pyadps.dataset
328
+ The input pyadps dataframe containing echo intensity values, which are used to
329
+ detect false targets.
330
+ mask : numpy.ndarray
331
+ An integer array of the same shape as `echo`, where `1` indicates
332
+ invalid or false target data and `0` indicates valid data.
333
+ cutoff : int, optional
334
+ The threshold value for echo intensity. Any value in `echo` greater
335
+ than or equal to this cutoff will be considered a false target (invalid),
336
+ marked as `1` in the mask. Default is 255.
337
+ threebeam : bool, optional
338
+ If `True`, applies a relaxed check that considers data valid even
339
+ when only three beams report valid data. Default is `True`.
340
+ beam_ignore : int, optional
341
+ Beam index to ignore. Default is None.
342
+
343
+ Returns
344
+ -------
345
+ numpy.ndarray
346
+ An updated integer mask array of the same shape as `echo`, with `1`
347
+ indicating false target or invalid data and `0` indicating valid data.
348
+
349
+ Notes
350
+ -----
351
+ - The function modifies the `mask` by applying the cutoff condition.
352
+ Echo values greater than or equal to the cutoff are marked as false
353
+ targets (`1`), while values below the cutoff are considered valid (`0`).
354
+ - If `threebeam` is `True`, a more lenient check may be applied to handle
355
+ data with fewer valid beams.
356
+ - Ensure that `echo` and `mask` are compatible in shape for element-wise
357
+ operations.
358
+
359
+ Example
360
+ -------
361
+ >>> import pyadps
362
+ >>> ds = pyadps.Readfile('dummy.000')
363
+ >>> mask = false_target(ds, mask, cutoff=255, threebeam=True)
364
+ """
365
+ echo = ds.echo.data
366
+
367
+ if beam_ignore is not None:
368
+ echo = np.delete(echo, beam_ignore, axis=0)
369
+
370
+ shape = echo.shape
371
+ num_beams, num_cells, num_ensembles = shape
372
+
373
+ # Vectorized approach for better performance
374
+ sorted_echo = np.sort(echo, axis=0)
375
+
376
+ if threebeam and beam_ignore is None:
377
+ # Compare highest to second-highest
378
+ difference = sorted_echo[-1, :, :] - sorted_echo[-2, :, :]
379
+ else:
380
+ # Compare highest to lowest
381
+ difference = sorted_echo[-1, :, :] - sorted_echo[0, :, :]
382
+
383
+ mask[difference > cutoff] = 1
71
384
 
72
- values, counts = np.unique(mask, return_counts=True)
73
- # print(values, counts, np.round(counts[1] * 100 / np.sum(counts)))
74
385
  return mask
75
386
 
76
387
 
77
- def default_mask(flobj, velocity):
78
- cells = flobj.field()["Cells"]
79
- beams = flobj.field()["Beams"]
80
- ensembles = flobj.ensembles
81
- mask = np.zeros((cells, ensembles))
82
- # Ignore mask for error velocity
388
+ def default_mask(ds: Union[ReadFile, NDArray[np.float64]]) -> NDArray[np.int32]:
389
+ """
390
+ Create a default 2-D mask file based on the velocity data.
391
+ This function generates a mask where values are marked as valid or invalid
392
+ based on the missing values from the velocity data.
393
+
394
+ Parameters
395
+ ----------
396
+ ds : pyadps.dataset or numpy.ndarray
397
+ A pyadps data frame is used to extract velocity and dimensions for the mask.
398
+ If numpy.ndarray, enter the values for beams, cells and ensembles.
399
+
400
+ Returns
401
+ -------
402
+ numpy.ndarray
403
+ A mask array of the same shape as `velocity`, where `1` indicates invalid
404
+ data and `0` indicates valid data.
405
+
406
+ Notes
407
+ -----
408
+ - The function uses the velocity data along with the information from the
409
+ Fixed Leader object to determine which values are valid and which are invalid.
410
+
411
+ Example
412
+ -------
413
+ >>> import pyadps
414
+ >>> ds = pyadps.ReadFile('demo.000')
415
+ >>> mask = pyadps.default_mask(ds)
416
+ """
417
+ # Type narrowing for ReadFile
418
+ if isinstance(ds, np.ndarray):
419
+ if ds.ndim != 3:
420
+ raise ValueError(
421
+ "Input numpy array must be 3-D (beams × cells × ensembles)"
422
+ )
423
+ velocity = ds
424
+ beams = ds.shape[0]
425
+ cells = ds.shape[1]
426
+ ensembles = ds.shape[2]
427
+ elif isinstance(ds, ReadFile) or ds.__class__.__name__ == "ReadFile":
428
+ # Now Pyright knows ds is ReadFile in this branch
429
+ flobj = ds.fixedleader
430
+ velocity = ds.velocity.data
431
+ cells = int(flobj.field()["Cells"])
432
+ beams = int(flobj.field()["Beams"])
433
+ ensembles = flobj.ensembles
434
+ else:
435
+ raise ValueError("Input must be a 3-D numpy array or a PyADPS instance")
436
+
437
+ mask = np.zeros((cells, ensembles), dtype=np.int32)
438
+
439
+ # Ignore mask for error velocity (last beam)
83
440
  for i in range(beams - 1):
84
- mask[velocity[i, :, :] < -32767] = 1
441
+ mask[velocity[i, :, :] < MISSING_VALUE_THRESHOLD] = 1
442
+
85
443
  return mask
86
444
 
87
445
 
88
- def qc_prompt(flobj, name, data=None):
89
- cutoff = 0
446
+ def qc_prompt(
447
+ ds: ReadFile, name: str, data: Optional[NDArray[np.float64]] = None
448
+ ) -> int:
449
+ """
450
+ Prompt the user to confirm or adjust the quality control threshold for a specific
451
+ parameter based on predefined ranges. The function provides an interactive interface
452
+ for the user to adjust thresholds for various quality control criteria, with options
453
+ for certain thresholds like "Echo Intensity Thresh" to check the noise floor.
454
+
455
+ Parameters
456
+ ----------
457
+ ds : pyadps.dataset
458
+ The input pyadps dataframe that holds metadata and configuration data.
459
+ The `ds` is used to retrieve the current threshold values based on
460
+ the provided parameter name.
461
+ name : str
462
+ The name of the parameter for which the threshold is being adjusted. Examples
463
+ include "Echo Intensity Thresh", "Correlation Thresh", "Percent Good Min", etc.
464
+ data : numpy.ndarray, optional
465
+ The data associated with the threshold. This is required for parameters like
466
+ "Echo Intensity Thresh" where a noise floor check might be performed. Default is None.
467
+
468
+ Returns
469
+ -------
470
+ int
471
+ The updated threshold value, either the default or the new value entered by the user.
472
+
473
+ Notes
474
+ -----
475
+ - The function will prompt the user to change the threshold for the given `name` parameter.
476
+ - For certain parameters, the user may be asked if they would like to check the noise floor
477
+ (for example, for "Echo Intensity Thresh"). This triggers the display of a plot and lets
478
+ the user select a new threshold.
479
+ - The function ensures that the new threshold is within the acceptable range for each parameter.
480
+ - The default thresholds are provided if the user chooses not to change them.
481
+
482
+ Example
483
+ -------
484
+ >>> import pyadps
485
+ >>> ds = pyadps.ReadFile('demo.000')
486
+ >>> name = "Echo Intensity Thresh"
487
+ >>> threshold = qc_prompt(ds, name, data)
488
+ The default threshold for echo intensity thresh is 0
489
+ Would you like to change the threshold [y/n]: y
490
+ Would you like to check the noise floor [y/n]: y
491
+ Threshold changed to 50
492
+ """
493
+ flobj = ds.fixedleader
494
+
90
495
  if name == "Echo Intensity Thresh":
91
- cutoff = 0
496
+ cutoff = DEFAULT_ECHO_THRESHOLD
92
497
  else:
93
498
  cutoff = flobj.field()[name]
94
499
 
95
- if name in ["Echo Thresh", "Correlation Thresh", "False Target Thresh"]:
500
+ if name not in THRESHOLD_RANGES:
96
501
  var_range = [0, 255]
97
- elif name == "Percent Good Min":
98
- var_range = [0, 100]
99
- elif name == "Error Velocity Thresh":
100
- var_range = [0, 5000]
101
502
  else:
102
- var_range = [0, 255]
503
+ var_range = list(THRESHOLD_RANGES[name])
103
504
 
104
505
  print(f"The default threshold for {name.lower()} is {cutoff}")
105
- affirm = input("Would you like to change the threshold [y/n]: ")
506
+ affirm = input("Would you like to change the threshold [y/n]: ").strip()
507
+
106
508
  if affirm.lower() == "y":
107
509
  while True:
108
510
  if name == "Echo Intensity Thresh":
109
- affirm2 = input("Would you like to check the noise floor [y/n]: ")
511
+ affirm2 = input(
512
+ "Would you like to check the noise floor [y/n]: "
513
+ ).strip()
110
514
  if affirm2.lower() == "y":
111
515
  p = PlotNoise(data)
112
516
  p.show()
@@ -118,9 +522,9 @@ def qc_prompt(flobj, name, data=None):
118
522
  else:
119
523
  cutoff = input(f"Enter new {name} [{var_range[0]}-{var_range[1]}]: ")
120
524
 
121
- cutoff = int(cutoff)
122
525
  try:
123
- if cutoff >= var_range[0] and int(cutoff) <= var_range[1]:
526
+ cutoff = int(cutoff)
527
+ if var_range[0] <= cutoff <= var_range[1]:
124
528
  break
125
529
  else:
126
530
  print(f"Enter an integer between {var_range[0]} and {var_range[1]}")
@@ -128,8 +532,8 @@ def qc_prompt(flobj, name, data=None):
128
532
  print("Enter a valid number")
129
533
 
130
534
  print(f"Threshold changed to {cutoff}")
131
-
132
535
  else:
133
536
  print(f"Default threshold {cutoff} used.")
134
- # return int(ct)
537
+
135
538
  return cutoff
539
+