pingmapper 5.3.7__tar.gz → 5.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. {pingmapper-5.3.7 → pingmapper-5.4.0}/PKG-INFO +1 -1
  2. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/class_sonObj.py +247 -0
  3. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/class_sonObj_nadirgaptest.py +247 -0
  4. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/doWork.py +3 -0
  5. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/main_readFiles.py +20 -6
  6. pingmapper-5.4.0/pingmapper/nonGUI_batch_main.py +129 -0
  7. pingmapper-5.4.0/pingmapper/nonGui_main.py +126 -0
  8. pingmapper-5.4.0/pingmapper/test_dq_filter.py +203 -0
  9. pingmapper-5.4.0/pingmapper/version.py +1 -0
  10. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper.egg-info/PKG-INFO +1 -1
  11. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper.egg-info/SOURCES.txt +3 -0
  12. pingmapper-5.3.7/pingmapper/version.py +0 -1
  13. {pingmapper-5.3.7 → pingmapper-5.4.0}/LICENSE +0 -0
  14. {pingmapper-5.3.7 → pingmapper-5.4.0}/README.md +0 -0
  15. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/__init__.py +0 -0
  16. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/__main__.py +0 -0
  17. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/class_mapSubstrateObj.py +0 -0
  18. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/class_portstarObj.py +0 -0
  19. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/class_rectObj.py +0 -0
  20. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/default_params.json +0 -0
  21. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/funcs_common.py +0 -0
  22. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/funcs_model.py +0 -0
  23. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/funcs_rectify.py +0 -0
  24. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/gui_main.py +0 -0
  25. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/main_mapSubstrate.py +0 -0
  26. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/main_rectify.py +0 -0
  27. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/processing_scripts/main_batchDirectory_2024-01-18_0926.py +0 -0
  28. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/processing_scripts/main_batchDirectory_2024-01-18_0929.py +0 -0
  29. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/scratch/funcs_pyhum_correct.py +0 -0
  30. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/scratch/main.py +0 -0
  31. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/scratch/main_batchDirectory.py +0 -0
  32. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/test_PINGMapper.py +0 -0
  33. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/test_time.py +0 -0
  34. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/DRAFT_Workflows/avg_predictions_Mussel_WBL.py +0 -0
  35. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/DRAFT_Workflows/gen_centerline.py +0 -0
  36. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/DRAFT_Workflows/gen_centerline_from_bankline.py +0 -0
  37. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/DRAFT_Workflows/gen_centerline_trkpnts_fitspline_DRAFT.py +0 -0
  38. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/DRAFT_Workflows/testEXAMPLE_mosaic_logit.py +0 -0
  39. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/RawEGN_avg_predictions.py +0 -0
  40. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/00_substrate_logits_mosaic_transects.py +0 -0
  41. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/00_substrate_shps_mosaic_transects.py +0 -0
  42. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/01_gen_centerline_from_coverage.py +0 -0
  43. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/02_gen_summary_stamp_shps.py +0 -0
  44. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/03_gen_summary_shp.py +0 -0
  45. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/04_combine_summary_shp_csv.py +0 -0
  46. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/05_gen_summary_shp_plots.py +0 -0
  47. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/06_compare_raw-egn_volume.py +0 -0
  48. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/08_raw-egn_hardReacheFreq_hist.py +0 -0
  49. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/09_raw-egn_PatchSize_density.py +0 -0
  50. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/Substrate_Summaries/summarize_project_substrate.py +0 -0
  51. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/export_coverage.py +0 -0
  52. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper/utils/main_mosaic_transects.py +0 -0
  53. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper.egg-info/dependency_links.txt +0 -0
  54. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper.egg-info/requires.txt +0 -0
  55. {pingmapper-5.3.7 → pingmapper-5.4.0}/pingmapper.egg-info/top_level.txt +0 -0
  56. {pingmapper-5.3.7 → pingmapper-5.4.0}/pyproject.toml +0 -0
  57. {pingmapper-5.3.7 → pingmapper-5.4.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pingmapper
3
- Version: 5.3.7
3
+ Version: 5.4.0
4
4
  Summary: Open-source interface for processing recreation-grade side scan sonar datasets and reproducibly mapping benthic habitat
5
5
  Author: Daniel Buscombe
6
6
  Author-email: Cameron Bodine <bodine.cs@gmail.email>
@@ -253,6 +253,13 @@ class sonObj(object):
253
253
  max_speed,
254
254
  aoi,
255
255
  time_table,
256
+ dq_table=False,
257
+ dq_time_field=False,
258
+ dq_flag_field=False,
259
+ dq_keep_values=False,
260
+ dq_src_utc_offset=0.0,
261
+ dq_target_utc_offset=0.0,
262
+ dq_time_offset=0.0,
256
263
  ):
257
264
  '''
258
265
  '''
@@ -284,6 +291,20 @@ class sonObj(object):
284
291
  if time_table:
285
292
  sonDF = self._filterTime(sonDF, time_table)
286
293
 
294
+ ####################
295
+ # Data Quality Filter
296
+ if dq_table:
297
+ sonDF = self._filterDQ(
298
+ sonDF,
299
+ dq_table,
300
+ dq_time_field,
301
+ dq_flag_field,
302
+ dq_keep_values,
303
+ dq_src_utc_offset,
304
+ dq_target_utc_offset,
305
+ dq_time_offset,
306
+ )
307
+
287
308
  return sonDF
288
309
 
289
310
  # ======================================================================
@@ -304,6 +325,232 @@ class sonObj(object):
304
325
  return df
305
326
 
306
327
 
328
+ # ======================================================================
329
+ def _filterDQ(self,
330
+ sonDF,
331
+ dq_table,
332
+ dq_time_field,
333
+ dq_flag_field,
334
+ dq_keep_values,
335
+ dq_src_utc_offset,
336
+ dq_target_utc_offset,
337
+ dq_time_offset,
338
+ ):
339
+ '''
340
+ Filter sonar pings using a data-quality (DQ) log CSV.
341
+
342
+ Each row in the DQ log is treated as a state-change event: the flag
343
+ recorded at time T applies to every sonar ping from T until the next
344
+ event row. Pings that occur before the first DQ event are removed.
345
+ '''
346
+
347
+ filtDQCol = 'filter_dq'
348
+ filtCol = 'filter'
349
+ dqTimeCol = '_dq_ts'
350
+
351
+ if not dq_time_field:
352
+ raise ValueError('dq_time_field is required when dq_table is provided.')
353
+ if not dq_flag_field:
354
+ raise ValueError('dq_flag_field is required when dq_table is provided.')
355
+
356
+ keep_vals = self._normalizeDQKeepValues(dq_keep_values)
357
+ if len(keep_vals) == 0:
358
+ raise ValueError('dq_keep_values must contain at least one value to keep.')
359
+
360
+ sonDF = sonDF.copy()
361
+ sonDF[filtDQCol] = False
362
+
363
+ if filtCol not in sonDF.columns:
364
+ sonDF[filtCol] = True
365
+
366
+ dqDF = pd.read_csv(dq_table)
367
+ missing_cols = [c for c in [dq_time_field, dq_flag_field] if c not in dqDF.columns]
368
+ if missing_cols:
369
+ raise ValueError('dqLog missing required column(s): {}'.format(', '.join(missing_cols)))
370
+
371
+ dqTimes, dqKind = self._coerceDQTimestampSeries(dqDF[dq_time_field], dq_time_field)
372
+ sonTimes, sonKind = self._getSonarFilterTimestamp(sonDF)
373
+
374
+ if dqKind != sonKind:
375
+ raise ValueError(
376
+ 'dqLog timestamp type ({}) does not match sonar timestamp type ({}).'.format(dqKind, sonKind)
377
+ )
378
+
379
+ if dqKind == 'datetime':
380
+ dqTimes = self._shiftDQDatetimeToTargetOffset(
381
+ dqTimes,
382
+ dq_src_utc_offset,
383
+ dq_target_utc_offset,
384
+ )
385
+
386
+ dqDF = dqDF.copy()
387
+ dqDF[dqTimeCol] = dqTimes
388
+ dqDF = dqDF[dqDF[dqTimeCol].notna()].copy()
389
+ if dqDF.empty:
390
+ raise ValueError('dqLog contained no valid timestamps after parsing {}.'.format(dq_time_field))
391
+
392
+ offset = float(dq_time_offset)
393
+ if sonKind == 'datetime':
394
+ sonTimes = sonTimes + pd.to_timedelta(offset, unit='s')
395
+ else:
396
+ sonTimes = sonTimes + offset
397
+
398
+ sonMerge = pd.DataFrame({
399
+ '_son_idx': sonDF.index,
400
+ '_son_ts': sonTimes,
401
+ })
402
+ sonMerge = sonMerge[sonMerge['_son_ts'].notna()].copy()
403
+
404
+ dqDF['_dq_keep'] = dqDF[dq_flag_field].map(self._normalizeDQValue).isin(keep_vals)
405
+
406
+ event_state = dqDF[[dqTimeCol, '_dq_keep']].copy()
407
+ event_state.sort_values(dqTimeCol, inplace=True)
408
+ event_state = event_state.groupby(dqTimeCol, as_index=False)['_dq_keep'].last()
409
+
410
+ keep_idx = self._applyDQEventState(sonMerge, event_state, dqTimeCol)
411
+
412
+ sonDF.loc[keep_idx, filtDQCol] = True
413
+ sonDF[filtCol] = sonDF[filtCol] & sonDF[filtDQCol]
414
+
415
+ return sonDF
416
+
417
+ # ======================================================================
418
+ def _normalizeDQKeepValues(self, dq_keep_values):
419
+
420
+ if dq_keep_values is False or dq_keep_values is None:
421
+ return set()
422
+
423
+ if isinstance(dq_keep_values, str):
424
+ dq_keep_values = dq_keep_values.split(',')
425
+
426
+ keep_vals = set()
427
+ for value in dq_keep_values:
428
+ norm = self._normalizeDQValue(value)
429
+ if norm:
430
+ keep_vals.add(norm)
431
+
432
+ return keep_vals
433
+
434
+ # ======================================================================
435
+ def _normalizeDQValue(self, value):
436
+
437
+ if pd.isna(value):
438
+ return ''
439
+ return str(value).strip().lower()
440
+
441
+ # ======================================================================
442
+ def _coerceDQTimestampSeries(self, series, field_name):
443
+
444
+ non_na = series.dropna()
445
+ numeric = pd.to_numeric(series, errors='coerce')
446
+ if len(non_na) > 0 and numeric.notna().sum() == len(non_na):
447
+ return numeric, 'numeric'
448
+
449
+ dt = pd.to_datetime(series, errors='coerce')
450
+ if dt.notna().any():
451
+ try:
452
+ if dt.dt.tz is not None:
453
+ dt = dt.dt.tz_localize(None)
454
+ except AttributeError:
455
+ pass
456
+ return dt, 'datetime'
457
+
458
+ if numeric.notna().any():
459
+ return numeric, 'numeric'
460
+
461
+ raise ValueError('Unable to parse dqLog timestamps from column: {}'.format(field_name))
462
+
463
+ # ======================================================================
464
+ def _shiftDQDatetimeToTargetOffset(self,
465
+ dq_times,
466
+ dq_src_utc_offset=False,
467
+ dq_target_utc_offset=False):
468
+
469
+ src_offset = self._coerceDQUtcOffset(dq_src_utc_offset, 'dq_src_utc_offset')
470
+ target_offset = self._coerceDQUtcOffset(dq_target_utc_offset, 'dq_target_utc_offset')
471
+
472
+ if src_offset is None and target_offset is None:
473
+ return dq_times
474
+
475
+ if src_offset is None or target_offset is None:
476
+ raise ValueError(
477
+ 'dq_src_utc_offset and dq_target_utc_offset must both be provided when either is set.'
478
+ )
479
+
480
+ return dq_times + pd.to_timedelta(target_offset - src_offset, unit='h')
481
+
482
+ # ======================================================================
483
+ def _coerceDQUtcOffset(self, value, field_name):
484
+
485
+ if value is False or value is None or value == '':
486
+ return None
487
+
488
+ try:
489
+ return float(value)
490
+ except (TypeError, ValueError):
491
+ raise ValueError('{} must be a numeric UTC offset in hours.'.format(field_name))
492
+
493
+ # ======================================================================
494
+ def _getSonarFilterTimestamp(self, sonDF):
495
+
496
+ if 'date' in sonDF.columns and 'time' in sonDF.columns:
497
+ dt = pd.to_datetime(
498
+ sonDF['date'].astype(str).str.strip() + ' ' + sonDF['time'].astype(str).str.strip(),
499
+ errors='coerce',
500
+ format='mixed',
501
+ )
502
+ if dt.notna().any():
503
+ try:
504
+ if dt.dt.tz is not None:
505
+ dt = dt.dt.tz_localize(None)
506
+ except AttributeError:
507
+ pass
508
+ return dt, 'datetime'
509
+
510
+ if 'time' in sonDF.columns:
511
+ dt = pd.to_datetime(sonDF['time'], errors='coerce', format='mixed')
512
+ if dt.notna().any():
513
+ try:
514
+ if dt.dt.tz is not None:
515
+ dt = dt.dt.tz_localize(None)
516
+ except AttributeError:
517
+ pass
518
+ return dt, 'datetime'
519
+
520
+ if 'time_s' in sonDF.columns:
521
+ numeric = pd.to_numeric(sonDF['time_s'], errors='coerce')
522
+ if numeric.notna().any():
523
+ return numeric, 'numeric'
524
+
525
+ raise ValueError('Unable to determine sonar timestamps for dqLog filtering.')
526
+
527
+
528
+ # ======================================================================
529
+ def _applyDQEventState(self, son, event_state, dqTimeCol):
530
+ '''
531
+ Return the subset of sonar indices whose ping timestamp falls within a
532
+ "keep" state block as defined by the DQ event log.
533
+
534
+ Uses np.searchsorted so each ping inherits the state of the most-recent
535
+ event that preceded it. Pings before the first event are excluded.
536
+ '''
537
+
538
+ event_times = event_state[dqTimeCol].to_numpy()
539
+ event_keep = event_state['_dq_keep'].to_numpy(dtype=bool)
540
+ son_times = son['_son_ts'].to_numpy()
541
+
542
+ # searchsorted(side='right') - 1 gives index of last event <= ping time
543
+ event_idx = np.searchsorted(event_times, son_times, side='right') - 1
544
+
545
+ # Pings before the first event get event_idx == -1 → exclude
546
+ valid_idx = event_idx >= 0
547
+
548
+ keep_mask = np.zeros(len(son), dtype=bool)
549
+ keep_mask[valid_idx] = event_keep[event_idx[valid_idx]]
550
+
551
+ return son.loc[keep_mask, '_son_idx']
552
+
553
+
307
554
  # ======================================================================
308
555
  def _filterHeading(self,
309
556
  df,
@@ -253,6 +253,13 @@ class sonObj(object):
253
253
  max_speed,
254
254
  aoi,
255
255
  time_table,
256
+ dq_table=False,
257
+ dq_time_field=False,
258
+ dq_flag_field=False,
259
+ dq_keep_values=False,
260
+ dq_src_utc_offset=0.0,
261
+ dq_target_utc_offset=0.0,
262
+ dq_time_offset=0.0,
256
263
  ):
257
264
  '''
258
265
  '''
@@ -284,6 +291,20 @@ class sonObj(object):
284
291
  if time_table:
285
292
  sonDF = self._filterTime(sonDF, time_table)
286
293
 
294
+ ####################
295
+ # Data Quality Filter
296
+ if dq_table:
297
+ sonDF = self._filterDQ(
298
+ sonDF,
299
+ dq_table,
300
+ dq_time_field,
301
+ dq_flag_field,
302
+ dq_keep_values,
303
+ dq_src_utc_offset,
304
+ dq_target_utc_offset,
305
+ dq_time_offset,
306
+ )
307
+
287
308
  return sonDF
288
309
 
289
310
  # ======================================================================
@@ -304,6 +325,232 @@ class sonObj(object):
304
325
  return df
305
326
 
306
327
 
328
+ # ======================================================================
329
+ def _filterDQ(self,
330
+ sonDF,
331
+ dq_table,
332
+ dq_time_field,
333
+ dq_flag_field,
334
+ dq_keep_values,
335
+ dq_src_utc_offset,
336
+ dq_target_utc_offset,
337
+ dq_time_offset,
338
+ ):
339
+ '''
340
+ Filter sonar pings using a data-quality (DQ) log CSV.
341
+
342
+ Each row in the DQ log is treated as a state-change event: the flag
343
+ recorded at time T applies to every sonar ping from T until the next
344
+ event row. Pings that occur before the first DQ event are removed.
345
+ '''
346
+
347
+ filtDQCol = 'filter_dq'
348
+ filtCol = 'filter'
349
+ dqTimeCol = '_dq_ts'
350
+
351
+ if not dq_time_field:
352
+ raise ValueError('dq_time_field is required when dq_table is provided.')
353
+ if not dq_flag_field:
354
+ raise ValueError('dq_flag_field is required when dq_table is provided.')
355
+
356
+ keep_vals = self._normalizeDQKeepValues(dq_keep_values)
357
+ if len(keep_vals) == 0:
358
+ raise ValueError('dq_keep_values must contain at least one value to keep.')
359
+
360
+ sonDF = sonDF.copy()
361
+ sonDF[filtDQCol] = False
362
+
363
+ if filtCol not in sonDF.columns:
364
+ sonDF[filtCol] = True
365
+
366
+ dqDF = pd.read_csv(dq_table)
367
+ missing_cols = [c for c in [dq_time_field, dq_flag_field] if c not in dqDF.columns]
368
+ if missing_cols:
369
+ raise ValueError('dqLog missing required column(s): {}'.format(', '.join(missing_cols)))
370
+
371
+ dqTimes, dqKind = self._coerceDQTimestampSeries(dqDF[dq_time_field], dq_time_field)
372
+ sonTimes, sonKind = self._getSonarFilterTimestamp(sonDF)
373
+
374
+ if dqKind != sonKind:
375
+ raise ValueError(
376
+ 'dqLog timestamp type ({}) does not match sonar timestamp type ({}).'.format(dqKind, sonKind)
377
+ )
378
+
379
+ if dqKind == 'datetime':
380
+ dqTimes = self._shiftDQDatetimeToTargetOffset(
381
+ dqTimes,
382
+ dq_src_utc_offset,
383
+ dq_target_utc_offset,
384
+ )
385
+
386
+ dqDF = dqDF.copy()
387
+ dqDF[dqTimeCol] = dqTimes
388
+ dqDF = dqDF[dqDF[dqTimeCol].notna()].copy()
389
+ if dqDF.empty:
390
+ raise ValueError('dqLog contained no valid timestamps after parsing {}.'.format(dq_time_field))
391
+
392
+ offset = float(dq_time_offset)
393
+ if sonKind == 'datetime':
394
+ sonTimes = sonTimes + pd.to_timedelta(offset, unit='s')
395
+ else:
396
+ sonTimes = sonTimes + offset
397
+
398
+ sonMerge = pd.DataFrame({
399
+ '_son_idx': sonDF.index,
400
+ '_son_ts': sonTimes,
401
+ })
402
+ sonMerge = sonMerge[sonMerge['_son_ts'].notna()].copy()
403
+
404
+ dqDF['_dq_keep'] = dqDF[dq_flag_field].map(self._normalizeDQValue).isin(keep_vals)
405
+
406
+ event_state = dqDF[[dqTimeCol, '_dq_keep']].copy()
407
+ event_state.sort_values(dqTimeCol, inplace=True)
408
+ event_state = event_state.groupby(dqTimeCol, as_index=False)['_dq_keep'].last()
409
+
410
+ keep_idx = self._applyDQEventState(sonMerge, event_state, dqTimeCol)
411
+
412
+ sonDF.loc[keep_idx, filtDQCol] = True
413
+ sonDF[filtCol] = sonDF[filtCol] & sonDF[filtDQCol]
414
+
415
+ return sonDF
416
+
417
+ # ======================================================================
418
+ def _normalizeDQKeepValues(self, dq_keep_values):
419
+
420
+ if dq_keep_values is False or dq_keep_values is None:
421
+ return set()
422
+
423
+ if isinstance(dq_keep_values, str):
424
+ dq_keep_values = dq_keep_values.split(',')
425
+
426
+ keep_vals = set()
427
+ for value in dq_keep_values:
428
+ norm = self._normalizeDQValue(value)
429
+ if norm:
430
+ keep_vals.add(norm)
431
+
432
+ return keep_vals
433
+
434
+ # ======================================================================
435
+ def _normalizeDQValue(self, value):
436
+
437
+ if pd.isna(value):
438
+ return ''
439
+ return str(value).strip().lower()
440
+
441
+ # ======================================================================
442
+ def _coerceDQTimestampSeries(self, series, field_name):
443
+
444
+ non_na = series.dropna()
445
+ numeric = pd.to_numeric(series, errors='coerce')
446
+ if len(non_na) > 0 and numeric.notna().sum() == len(non_na):
447
+ return numeric, 'numeric'
448
+
449
+ dt = pd.to_datetime(series, errors='coerce')
450
+ if dt.notna().any():
451
+ try:
452
+ if dt.dt.tz is not None:
453
+ dt = dt.dt.tz_localize(None)
454
+ except AttributeError:
455
+ pass
456
+ return dt, 'datetime'
457
+
458
+ if numeric.notna().any():
459
+ return numeric, 'numeric'
460
+
461
+ raise ValueError('Unable to parse dqLog timestamps from column: {}'.format(field_name))
462
+
463
+ # ======================================================================
464
+ def _shiftDQDatetimeToTargetOffset(self,
465
+ dq_times,
466
+ dq_src_utc_offset=False,
467
+ dq_target_utc_offset=False):
468
+
469
+ src_offset = self._coerceDQUtcOffset(dq_src_utc_offset, 'dq_src_utc_offset')
470
+ target_offset = self._coerceDQUtcOffset(dq_target_utc_offset, 'dq_target_utc_offset')
471
+
472
+ if src_offset is None and target_offset is None:
473
+ return dq_times
474
+
475
+ if src_offset is None or target_offset is None:
476
+ raise ValueError(
477
+ 'dq_src_utc_offset and dq_target_utc_offset must both be provided when either is set.'
478
+ )
479
+
480
+ return dq_times + pd.to_timedelta(target_offset - src_offset, unit='h')
481
+
482
+ # ======================================================================
483
+ def _coerceDQUtcOffset(self, value, field_name):
484
+
485
+ if value is False or value is None or value == '':
486
+ return None
487
+
488
+ try:
489
+ return float(value)
490
+ except (TypeError, ValueError):
491
+ raise ValueError('{} must be a numeric UTC offset in hours.'.format(field_name))
492
+
493
+ # ======================================================================
494
+ def _getSonarFilterTimestamp(self, sonDF):
495
+
496
+ if 'date' in sonDF.columns and 'time' in sonDF.columns:
497
+ dt = pd.to_datetime(
498
+ sonDF['date'].astype(str).str.strip() + ' ' + sonDF['time'].astype(str).str.strip(),
499
+ errors='coerce',
500
+ format='mixed',
501
+ )
502
+ if dt.notna().any():
503
+ try:
504
+ if dt.dt.tz is not None:
505
+ dt = dt.dt.tz_localize(None)
506
+ except AttributeError:
507
+ pass
508
+ return dt, 'datetime'
509
+
510
+ if 'time' in sonDF.columns:
511
+ dt = pd.to_datetime(sonDF['time'], errors='coerce', format='mixed')
512
+ if dt.notna().any():
513
+ try:
514
+ if dt.dt.tz is not None:
515
+ dt = dt.dt.tz_localize(None)
516
+ except AttributeError:
517
+ pass
518
+ return dt, 'datetime'
519
+
520
+ if 'time_s' in sonDF.columns:
521
+ numeric = pd.to_numeric(sonDF['time_s'], errors='coerce')
522
+ if numeric.notna().any():
523
+ return numeric, 'numeric'
524
+
525
+ raise ValueError('Unable to determine sonar timestamps for dqLog filtering.')
526
+
527
+
528
+ # ======================================================================
529
+ def _applyDQEventState(self, son, event_state, dqTimeCol):
530
+ '''
531
+ Return the subset of sonar indices whose ping timestamp falls within a
532
+ "keep" state block as defined by the DQ event log.
533
+
534
+ Uses np.searchsorted so each ping inherits the state of the most-recent
535
+ event that preceded it. Pings before the first event are excluded.
536
+ '''
537
+
538
+ event_times = event_state[dqTimeCol].to_numpy()
539
+ event_keep = event_state['_dq_keep'].to_numpy(dtype=bool)
540
+ son_times = son['_son_ts'].to_numpy()
541
+
542
+ # searchsorted(side='right') - 1 gives index of last event <= ping time
543
+ event_idx = np.searchsorted(event_times, son_times, side='right') - 1
544
+
545
+ # Pings before the first event get event_idx == -1 → exclude
546
+ valid_idx = event_idx >= 0
547
+
548
+ keep_mask = np.zeros(len(son), dtype=bool)
549
+ keep_mask[valid_idx] = event_keep[event_idx[valid_idx]]
550
+
551
+ return son.loc[keep_mask, '_son_idx']
552
+
553
+
307
554
  # ======================================================================
308
555
  def _filterHeading(self,
309
556
  df,
@@ -98,6 +98,9 @@ def doWork(
98
98
  exportUnknown (bool), fixNoDat (bool), threadCnt (int | float)
99
99
  aoi (str | False), max_heading_deviation (float), max_heading_distance (float)
100
100
  min_speed (float), max_speed (float), time_table (str | False)
101
+ dq_table (str | False), dq_time_field (str | False), dq_flag_field (str | False)
102
+ dq_keep_values (list[str] | str | False), dq_src_utc_offset (float)
103
+ dq_target_utc_offset (float), dq_time_offset (float)
101
104
  pix_res_son (float), pix_res_map (float)
102
105
  x_offset (float), y_offset (float)
103
106
  egn (bool), egn_stretch (int: 0/1/2), egn_stretch_factor (float)
@@ -119,6 +119,13 @@ def read_master_func(logfilename='',
119
119
  min_speed = False,
120
120
  max_speed = False,
121
121
  time_table = False,
122
+ dq_table = False,
123
+ dq_time_field = False,
124
+ dq_flag_field = False,
125
+ dq_keep_values = False,
126
+ dq_src_utc_offset = 0.0,
127
+ dq_target_utc_offset = 0.0,
128
+ dq_time_offset = 0.0,
122
129
  tempC=10,
123
130
  nchunk=500,
124
131
  cropRange=0,
@@ -1018,7 +1025,7 @@ def read_master_func(logfilename='',
1018
1025
  # For Filtering #
1019
1026
  ############################################################################
1020
1027
 
1021
- if max_heading_deviation > 0 or min_speed > 0 or max_speed > 0 or aoi or time_table:
1028
+ if dq_table or max_heading_deviation > 0 or min_speed > 0 or max_speed > 0 or aoi or time_table:
1022
1029
 
1023
1030
  start_time = time.time()
1024
1031
 
@@ -1051,7 +1058,9 @@ def read_master_func(logfilename='',
1051
1058
 
1052
1059
  # Do filtering on longest recording
1053
1060
  son0 = portstar[maxRec]
1054
- df0 = son0._doSonarFiltering(max_heading_deviation, max_heading_distance, min_speed, max_speed, aoi, time_table)
1061
+ df0 = son0._doSonarFiltering(max_heading_deviation, max_heading_distance, min_speed, max_speed, aoi, time_table,
1062
+ dq_table, dq_time_field, dq_flag_field, dq_keep_values,
1063
+ dq_src_utc_offset, dq_target_utc_offset, dq_time_offset)
1055
1064
 
1056
1065
  # Add filter to other beam
1057
1066
  son1 = portstar[minRec]
@@ -1074,7 +1083,7 @@ def read_master_func(logfilename='',
1074
1083
  if df0.empty or df1.empty:
1075
1084
  raise ValueError(
1076
1085
  '\n\nFiltering removed all side-scan pings. No metadata remains to process. '\
1077
- 'Adjust filtering parameters (max_heading_deviation, min_speed, max_speed, aoi, time_table) '\
1086
+ 'Adjust filtering parameters (dq_table, max_heading_deviation, min_speed, max_speed, aoi, time_table) '\
1078
1087
  'or reduce nchunk.'
1079
1088
  )
1080
1089
 
@@ -1099,7 +1108,9 @@ def read_master_func(logfilename='',
1099
1108
 
1100
1109
  # Do filtering on downbeams
1101
1110
  for son in downbeams:
1102
- df = son._doSonarFiltering(max_heading_deviation, max_heading_distance, min_speed, max_speed, aoi, time_table)
1111
+ df = son._doSonarFiltering(max_heading_deviation, max_heading_distance, min_speed, max_speed, aoi, time_table,
1112
+ dq_table, dq_time_field, dq_flag_field, dq_keep_values,
1113
+ dq_src_utc_offset, dq_target_utc_offset, dq_time_offset)
1103
1114
 
1104
1115
  df = df[df['filter'] == True]
1105
1116
 
@@ -1452,9 +1463,12 @@ def read_master_func(logfilename='',
1452
1463
  # Cleanup
1453
1464
  try:
1454
1465
  psObj._cleanup()
1455
- del psObj, portstar
1456
- except:
1466
+ except Exception:
1457
1467
  pass
1468
+ if 'psObj' in locals():
1469
+ del psObj
1470
+ if 'portstar' in locals():
1471
+ del portstar
1458
1472
 
1459
1473
 
1460
1474
  ############################################################################
@@ -0,0 +1,129 @@
1
+ import sys
2
+ from pathlib import Path
3
+
4
+ # Ensure local repo package is imported when running this script directly.
5
+ REPO_ROOT = Path(__file__).resolve().parents[1]
6
+ if str(REPO_ROOT) not in sys.path:
7
+ sys.path.insert(0, str(REPO_ROOT))
8
+
9
+ from pingmapper.doWork import doWork
10
+
11
+ params = {
12
+ # ------------------------------------------------------------------
13
+ # Project / Runtime
14
+ # ------------------------------------------------------------------
15
+ "project_mode": 1,
16
+ "threadCnt": 0,
17
+
18
+ # ------------------------------------------------------------------
19
+ # Survey / Sonar Basics
20
+ # ------------------------------------------------------------------
21
+ "tempC": 12.0,
22
+ "nchunk": 500,
23
+ "cropRange": 0,
24
+
25
+ # ------------------------------------------------------------------
26
+ # Navigation + Filtering
27
+ # ------------------------------------------------------------------
28
+ "aoi": False,
29
+ "max_heading_deviation": 0,
30
+ "max_heading_distance": 0,
31
+ "min_speed": 0,
32
+ "max_speed": 0,
33
+ "time_table": False,
34
+
35
+ # dqLog event-state filtering
36
+ # Set dq_table and field names to enable; keep values can be list or csv string.
37
+ "dq_table": False,
38
+ "dq_time_field": False,
39
+ "dq_flag_field": False,
40
+ "dq_keep_values": False,
41
+ "dq_src_utc_offset": 0.0,
42
+ "dq_target_utc_offset": 0.0,
43
+ "dq_time_offset": 0.0,
44
+
45
+ # ------------------------------------------------------------------
46
+ # Input Handling
47
+ # ------------------------------------------------------------------
48
+ "exportUnknown": False,
49
+ "fixNoDat": False,
50
+
51
+ # ------------------------------------------------------------------
52
+ # Georeferencing / Resolution
53
+ # ------------------------------------------------------------------
54
+ "pix_res_son": 0.1,
55
+ "pix_res_map": 0,
56
+ "x_offset": 0.0,
57
+ "y_offset": 0.0,
58
+
59
+ # ------------------------------------------------------------------
60
+ # Intensity / Tone
61
+ # ------------------------------------------------------------------
62
+ "egn": False,
63
+ "egn_stretch": 0,
64
+ "egn_stretch_factor": 1.0,
65
+
66
+ # ------------------------------------------------------------------
67
+ # Sonogram Exports
68
+ # ------------------------------------------------------------------
69
+ "wcp": False,
70
+ "wcm": False,
71
+ "wcr": False,
72
+ "wco": False,
73
+ "sonogram_colorMap": "Greys_r",
74
+ "mask_shdw": False,
75
+ "tileFile": ".png",
76
+ "spdCor": False,
77
+ "maxCrop": False,
78
+
79
+ # ------------------------------------------------------------------
80
+ # Depth / Shadows
81
+ # ------------------------------------------------------------------
82
+ "remShadow": 0,
83
+ "detectDep": 0,
84
+ "smthDep": False,
85
+ "adjDep": 0.0,
86
+ "pltBedPick": False,
87
+
88
+ # ------------------------------------------------------------------
89
+ # Rectification / Mosaics
90
+ # ------------------------------------------------------------------
91
+ "rect_wcp": False,
92
+ "rect_wcr": False,
93
+ "rubberSheeting": False,
94
+ "rectMethod": "Heading",
95
+ "rectInterpDist": 50,
96
+ "son_colorMap": "Greys",
97
+ "mosaic_nchunk": 0,
98
+
99
+ # ------------------------------------------------------------------
100
+ # Substrate Mapping
101
+ # ------------------------------------------------------------------
102
+ "pred_sub": False,
103
+ "pltSubClass": False,
104
+ "map_sub": False,
105
+ "export_poly": False,
106
+ "map_class_method": "max",
107
+ "map_predict": 0,
108
+
109
+ # ------------------------------------------------------------------
110
+ # Final Exports
111
+ # ------------------------------------------------------------------
112
+ "mosaic": 0,
113
+ "map_mosaic": 0,
114
+ "banklines": False,
115
+ "coverage": False,
116
+ }
117
+
118
+ results = doWork(
119
+ in_dir=r"C:\Users\cbodine\Downloads\NewRiver\SonarRecording",
120
+ out_dir=r"C:\Users\cbodine\Downloads\NewRiver",
121
+ proj_name=None,
122
+ batch=True,
123
+ preserve_subdirs=False,
124
+ prefix="",
125
+ suffix="",
126
+ params=params,
127
+ )
128
+
129
+ print(results)
@@ -0,0 +1,126 @@
1
+ import sys
2
+ from pathlib import Path
3
+
4
+ # Ensure local repo package is imported when running this script directly.
5
+ REPO_ROOT = Path(__file__).resolve().parents[1]
6
+ if str(REPO_ROOT) not in sys.path:
7
+ sys.path.insert(0, str(REPO_ROOT))
8
+
9
+ from pingmapper.doWork import doWork
10
+
11
+ params = {
12
+ # ------------------------------------------------------------------
13
+ # Project / Runtime
14
+ # ------------------------------------------------------------------
15
+ "project_mode": 1,
16
+ "threadCnt": 0,
17
+
18
+ # ------------------------------------------------------------------
19
+ # Survey / Sonar Basics
20
+ # ------------------------------------------------------------------
21
+ "tempC": 12.0,
22
+ "nchunk": 500,
23
+ "cropRange": 0,
24
+
25
+ # ------------------------------------------------------------------
26
+ # Navigation + Filtering
27
+ # ------------------------------------------------------------------
28
+ "aoi": False,
29
+ "max_heading_deviation": 0,
30
+ "max_heading_distance": 0,
31
+ "min_speed": 0,
32
+ "max_speed": 0,
33
+ "time_table": False,
34
+
35
+ # dqLog event-state filtering
36
+ # Set dq_table and field names to enable; keep values can be list or csv string.
37
+ "dq_table": r"C:\Users\cbodine\Downloads\NewRiver\NewRvr_DataLogger_Cameron_R00066.csv",
38
+ "dq_time_field": "Timestamp..UTC.",
39
+ "dq_flag_field": "DataQuality",
40
+ "dq_keep_values": ["Use"],
41
+ "dq_src_utc_offset": 0.0,
42
+ "dq_target_utc_offset": -4.0,
43
+ "dq_time_offset": 5.0,
44
+
45
+ # ------------------------------------------------------------------
46
+ # Input Handling
47
+ # ------------------------------------------------------------------
48
+ "exportUnknown": False,
49
+ "fixNoDat": False,
50
+
51
+ # ------------------------------------------------------------------
52
+ # Georeferencing / Resolution
53
+ # ------------------------------------------------------------------
54
+ "pix_res_son": 0.1,
55
+ "pix_res_map": 0,
56
+ "x_offset": 0.0,
57
+ "y_offset": 0.0,
58
+
59
+ # ------------------------------------------------------------------
60
+ # Intensity / Tone
61
+ # ------------------------------------------------------------------
62
+ "egn": False,
63
+ "egn_stretch": 0,
64
+ "egn_stretch_factor": 1.0,
65
+
66
+ # ------------------------------------------------------------------
67
+ # Sonogram Exports
68
+ # ------------------------------------------------------------------
69
+ "wcp": False,
70
+ "wcm": False,
71
+ "wcr": False,
72
+ "wco": False,
73
+ "sonogram_colorMap": "Greys_r",
74
+ "mask_shdw": False,
75
+ "tileFile": ".png",
76
+ "spdCor": False,
77
+ "maxCrop": False,
78
+
79
+ # ------------------------------------------------------------------
80
+ # Depth / Shadows
81
+ # ------------------------------------------------------------------
82
+ "remShadow": 0,
83
+ "detectDep": 0,
84
+ "smthDep": False,
85
+ "adjDep": 0.0,
86
+ "pltBedPick": False,
87
+
88
+ # ------------------------------------------------------------------
89
+ # Rectification / Mosaics
90
+ # ------------------------------------------------------------------
91
+ "rect_wcp": False,
92
+ "rect_wcr": False,
93
+ "rubberSheeting": False,
94
+ "rectMethod": "Heading",
95
+ "rectInterpDist": 50,
96
+ "son_colorMap": "Greys",
97
+ "mosaic_nchunk": 0,
98
+
99
+ # ------------------------------------------------------------------
100
+ # Substrate Mapping
101
+ # ------------------------------------------------------------------
102
+ "pred_sub": False,
103
+ "pltSubClass": False,
104
+ "map_sub": False,
105
+ "export_poly": False,
106
+ "map_class_method": "max",
107
+ "map_predict": 0,
108
+
109
+ # ------------------------------------------------------------------
110
+ # Final Exports
111
+ # ------------------------------------------------------------------
112
+ "mosaic": 0,
113
+ "map_mosaic": 0,
114
+ "banklines": False,
115
+ "coverage": False,
116
+ }
117
+
118
+ results = doWork(
119
+ in_file=r"C:\Users\cbodine\Downloads\NewRiver\SonarRecording\R00066.DAT",
120
+ out_dir=r"C:\Users\cbodine\Downloads\NewRiver",
121
+ proj_name="FilterTest_take2",
122
+ batch=False,
123
+ params=params,
124
+ )
125
+
126
+ print(results)
@@ -0,0 +1,203 @@
1
+ """Unit tests for _filterDQ / _applyDQEventState on sonObj."""
2
+
3
+ import io
4
+ import unittest
5
+ import types
6
+ import numpy as np
7
+ import pandas as pd
8
+
9
+ # ---------------------------------------------------------------------------
10
+ # Minimal stub that satisfies _filterDQ without loading real sonar files
11
+ # ---------------------------------------------------------------------------
12
+ from pingmapper.class_sonObj import sonObj
13
+
14
+
15
+ def _make_stub(son_rows: list[dict]) -> sonObj:
16
+ """Return a bare sonObj instance with sonMetaDF pre-populated."""
17
+ obj = sonObj.__new__(sonObj)
18
+ obj.sonMetaDF = pd.DataFrame(son_rows)
19
+ return obj
20
+
21
+
22
+ # ---------------------------------------------------------------------------
23
+ # Helper to write a temporary CSV for dq_table
24
+ # ---------------------------------------------------------------------------
25
+ def _dq_csv(rows: list[dict]) -> str:
26
+ """Serialise rows to a CSV string and return a path via tmp file."""
27
+ import tempfile, os
28
+ df = pd.DataFrame(rows)
29
+ fd, path = tempfile.mkstemp(suffix='.csv')
30
+ df.to_csv(path, index=False)
31
+ os.close(fd)
32
+ return path
33
+
34
+
35
+ # ===========================================================================
36
+ class TestDQFilterDatetimeOffset(unittest.TestCase):
37
+ """dq_time_offset shifts sonar timestamps before matching."""
38
+
39
+ def _run(self, son_ts_list, dq_rows, dq_time_offset, expected_kept_indices):
40
+ stub = _make_stub([
41
+ {'date': ts.split()[0], 'time': ts.split()[1], 'time_s': i + 1}
42
+ for i, ts in enumerate(son_ts_list)
43
+ ])
44
+ stub.sonMetaDF.index = range(len(son_ts_list))
45
+
46
+ path = _dq_csv(dq_rows)
47
+ result = stub._filterDQ(
48
+ stub.sonMetaDF,
49
+ dq_table=path,
50
+ dq_time_field='ts',
51
+ dq_flag_field='flag',
52
+ dq_keep_values=['Use'],
53
+ dq_src_utc_offset=0.0,
54
+ dq_target_utc_offset=0.0,
55
+ dq_time_offset=dq_time_offset,
56
+ )
57
+ kept = list(result[result['filter_dq'] == True].index)
58
+ self.assertEqual(kept, expected_kept_indices)
59
+
60
+ def test_dq_filter_uses_datetime_and_offset(self):
61
+ """Positive offset shifts sonar timestamps forward into a 'Use' block."""
62
+ son_ts = [
63
+ '2024-01-01 00:00:01',
64
+ '2024-01-01 00:00:02',
65
+ '2024-01-01 00:00:03',
66
+ ]
67
+ dq_rows = [
68
+ {'ts': '2024-01-01T00:00:10+00:00', 'flag': 'Use'},
69
+ ]
70
+ # offset +10 → sonar times become 11, 12, 13 — all after the Use event at t=10
71
+ self._run(son_ts, dq_rows, dq_time_offset=10.0, expected_kept_indices=[0, 1, 2])
72
+
73
+ def test_dq_filter_uses_datetime_timezone_offsets(self):
74
+ """UTC offset difference is applied to dq timestamps."""
75
+ son_ts = [
76
+ '2024-01-01 00:00:01',
77
+ '2024-01-01 00:00:02',
78
+ '2024-01-01 00:00:03',
79
+ ]
80
+ # DQ stored in UTC+1 (3600 s ahead), no sonar offset
81
+ dq_rows = [
82
+ {'ts': '2024-01-01T01:00:00+01:00', 'flag': 'Use'},
83
+ ]
84
+ # After tz conversion dq ts == 2024-01-01 00:00:00 UTC == 0 epoch offset
85
+ # sonar pings at 1,2,3 s — all after the Use event
86
+ from pingmapper.class_sonObj import sonObj as SO
87
+ stub = _make_stub([
88
+ {'date': ts.split()[0], 'time': ts.split()[1], 'time_s': i + 1}
89
+ for i, ts in enumerate(son_ts)
90
+ ])
91
+ path = _dq_csv(dq_rows)
92
+ result = stub._filterDQ(
93
+ stub.sonMetaDF,
94
+ dq_table=path,
95
+ dq_time_field='ts',
96
+ dq_flag_field='flag',
97
+ dq_keep_values=['Use'],
98
+ dq_src_utc_offset=1.0, # DQ was recorded in UTC+1
99
+ dq_target_utc_offset=0.0,
100
+ dq_time_offset=0.0,
101
+ )
102
+ kept = list(result[result['filter_dq'] == True].index)
103
+ self.assertEqual(kept, [0, 1, 2])
104
+
105
+ def test_dq_filter_handles_fractional_second_sonar_times(self):
106
+ stub = _make_stub([
107
+ {'date': '2025-08-10', 'time': '08:04:07', 'time_s': 0.0},
108
+ {'date': '2025-08-10', 'time': '08:04:07.064000', 'time_s': 0.064},
109
+ {'date': '2025-08-10', 'time': '08:04:07.130000', 'time_s': 0.130},
110
+ {'date': '2025-08-10', 'time': '08:04:07.195000', 'time_s': 0.195},
111
+ ])
112
+
113
+ path = _dq_csv([
114
+ {'ts': '2025/08/10 12:04:07', 'flag': 'Use'},
115
+ ])
116
+
117
+ result = stub._filterDQ(
118
+ stub.sonMetaDF,
119
+ dq_table=path,
120
+ dq_time_field='ts',
121
+ dq_flag_field='flag',
122
+ dq_keep_values=['Use'],
123
+ dq_src_utc_offset=0.0,
124
+ dq_target_utc_offset=-4.0,
125
+ dq_time_offset=0.0,
126
+ )
127
+
128
+ kept = list(result[result['filter_dq'] == True].index)
129
+ self.assertEqual(kept, [0, 1, 2, 3])
130
+
131
+
132
+ class TestDQFilterNumericTime(unittest.TestCase):
133
+ """Falls back to numeric time_s when datetime parse fails."""
134
+
135
+ def test_dq_filter_uses_numeric_time(self):
136
+ rows = [{'time_s': float(i)} for i in range(1, 6)]
137
+ stub = _make_stub(rows)
138
+
139
+ dq_rows = [{'ts': 2.5, 'flag': 'Use'}]
140
+ path = _dq_csv(dq_rows)
141
+
142
+ result = stub._filterDQ(
143
+ stub.sonMetaDF,
144
+ dq_table=path,
145
+ dq_time_field='ts',
146
+ dq_flag_field='flag',
147
+ dq_keep_values=['Use'],
148
+ dq_src_utc_offset=0.0,
149
+ dq_target_utc_offset=0.0,
150
+ dq_time_offset=0.0,
151
+ )
152
+ # pings at time_s 1,2 are before the Use event at 2.5 → excluded
153
+ # pings at time_s 3,4,5 are in the Use block
154
+ kept = list(result[result['filter_dq'] == True].index)
155
+ self.assertEqual(kept, [2, 3, 4])
156
+
157
+
158
+ class TestDQFilterEventStateBlocks(unittest.TestCase):
159
+ """State-block semantics: alternating Use/NoUse transitions."""
160
+
161
+ def test_dq_filter_uses_event_state_blocks(self):
162
+ """
163
+ DQ transitions:
164
+ t=2.2 → Use (good)
165
+ t=4.1 → NoUse (bad)
166
+ t=6.4 → Use (good)
167
+
168
+ Sonar pings at t=1..7 (numeric).
169
+ Expected kept: pings whose (adjusted) time falls in a Use block
170
+ ping 0 (t=1): before first event → excluded
171
+ ping 1 (t=2): before first event → excluded
172
+ ping 2 (t=3): in [2.2, 4.1) Use → kept
173
+ ping 3 (t=4): in [2.2, 4.1) Use → kept
174
+ ping 4 (t=5): in [4.1, 6.4) NoUse → excluded
175
+ ping 5 (t=6): in [4.1, 6.4) NoUse → excluded
176
+ ping 6 (t=7): in [6.4, ∞) Use → kept
177
+ """
178
+ rows = [{'time_s': float(t)} for t in range(1, 8)]
179
+ stub = _make_stub(rows)
180
+
181
+ dq_rows = [
182
+ {'ts': 2.2, 'flag': 'Use'},
183
+ {'ts': 4.1, 'flag': 'NoUse'},
184
+ {'ts': 6.4, 'flag': 'Use'},
185
+ ]
186
+ path = _dq_csv(dq_rows)
187
+
188
+ result = stub._filterDQ(
189
+ stub.sonMetaDF,
190
+ dq_table=path,
191
+ dq_time_field='ts',
192
+ dq_flag_field='flag',
193
+ dq_keep_values=['Use'],
194
+ dq_src_utc_offset=0.0,
195
+ dq_target_utc_offset=0.0,
196
+ dq_time_offset=0.0,
197
+ )
198
+ kept = list(result[result['filter_dq'] == True].index)
199
+ self.assertEqual(kept, [2, 3, 6])
200
+
201
+
202
+ if __name__ == '__main__':
203
+ unittest.main()
@@ -0,0 +1 @@
1
+ __version__ = '5.4.0'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pingmapper
3
- Version: 5.3.7
3
+ Version: 5.4.0
4
4
  Summary: Open-source interface for processing recreation-grade side scan sonar datasets and reproducibly mapping benthic habitat
5
5
  Author: Daniel Buscombe
6
6
  Author-email: Cameron Bodine <bodine.cs@gmail.email>
@@ -17,7 +17,10 @@ pingmapper/gui_main.py
17
17
  pingmapper/main_mapSubstrate.py
18
18
  pingmapper/main_readFiles.py
19
19
  pingmapper/main_rectify.py
20
+ pingmapper/nonGUI_batch_main.py
21
+ pingmapper/nonGui_main.py
20
22
  pingmapper/test_PINGMapper.py
23
+ pingmapper/test_dq_filter.py
21
24
  pingmapper/test_time.py
22
25
  pingmapper/version.py
23
26
  pingmapper.egg-info/PKG-INFO
@@ -1 +0,0 @@
1
- __version__ = '5.3.7'
File without changes
File without changes
File without changes
File without changes