pingmapper 5.0.5__py3-none-any.whl → 5.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2335 @@
1
+ # Part of PING-Mapper software
2
+ #
3
+ # GitHub: https://github.com/CameronBodine/PINGMapper
4
+ # Website: https://cameronbodine.github.io/PINGMapper/
5
+ #
6
+ # Co-Developed by Cameron S. Bodine and Dr. Daniel Buscombe
7
+ #
8
+ # Inspired by PyHum: https://github.com/dbuscombe-usgs/PyHum
9
+ #
10
+ # MIT License
11
+ #
12
+ # Copyright (c) 2025 Cameron S. Bodine
13
+ #
14
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
15
+ # of this software and associated documentation files (the "Software"), to deal
16
+ # in the Software without restriction, including without limitation the rights
17
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
18
+ # copies of the Software, and to permit persons to whom the Software is
19
+ # furnished to do so, subject to the following conditions:
20
+ #
21
+ # The above copyright notice and this permission notice shall be included in all
22
+ # copies or substantial portions of the Software.
23
+ #
24
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
25
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
26
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
27
+ # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
28
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
29
+ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
30
+ # SOFTWARE.
31
+
32
+ import os, sys
33
+
34
+ # Add 'pingmapper' to the path, may not need after pypi package...
35
+ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
36
+ PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
37
+ sys.path.append(PACKAGE_DIR)
38
+
39
+ from pingmapper.funcs_common import *
40
+
41
+ class sonObj(object):
42
+ '''
43
+ Python class to store everything related to reading and exporting data from
44
+ Humminbird sonar recordings.
45
+
46
+ ----------------
47
+ Class Attributes
48
+ ----------------
49
+ * Alphabetical order *
50
+ self.beam : str
51
+ DESCRIPTION - Beam number B***
52
+
53
+ self.beamName : str
54
+ DESCRIPTION - Name of sonar beam.
55
+
56
+ self.datLen : int
57
+ DESCRIPTION - Number of bytes in DAT file.
58
+
59
+ self.datMetaFile : str
60
+ DESCRIPTION - Path to .DAT metadata file (.csv).
61
+
62
+ self.headBytes : int
63
+ DESCRIPTION - Number of header bytes for a ping.
64
+
65
+ self.headIdx : list
66
+ DESCRIPTION - List to hold byte index (offset) of each ping.
67
+
68
+ self.headStruct : dict
69
+ DESCRIPTION - Dictionary to store ping header structure.
70
+
71
+ self.headValid : bool
72
+ DESCRIPTION - Flag indicating if SON header structure is correct.
73
+
74
+ self.humDat : dict
75
+ DESCRIPTION - Dictionary to store .DAT file contents.
76
+
77
+ self.humDatStruct : dict
78
+ DESCRIPTION - Dictionary to store .DAT file structure.
79
+
80
+ self.humFile : str
81
+ DESCRIPTION - Path to .DAT file.
82
+
83
+ self.isOnix : bool
84
+ DESCRIPTION - Flag indicating if sonar recording from ONIX.
85
+
86
+ self.metaDir : str
87
+ DESCRIPTION - Path to metadata directory.
88
+
89
+ self.nchunk : int
90
+ DESCRIPTION - Number of pings/sonar records per chunk.
91
+
92
+ self.outDir : str
93
+ DESCRIPTION - Path where outputs are saved.
94
+
95
+ self.pingCnt : int
96
+ DESCRIPTION - Number of ping returns for each ping.
97
+
98
+ self.pingMax : int
99
+ DESCRIPTION - Stores largest pingCnt value (max range) for a currently
100
+ loaded sonar chunk.
101
+
102
+ self.projDir : str
103
+ DESCRIPTION - Path (top level) to output directory.
104
+
105
+ self.sonDat : arr
106
+ DESCRIPTION - Array to hold ping ping returns for currently
107
+ loaded chunk.
108
+
109
+ self.sonFile : str
110
+ DESCRIPTION - Path to .SON file.
111
+
112
+ self.sonIdxFile : str
113
+ DESCRIPTION - Path to .IDX file.
114
+
115
+ self.sonMetaDF : DataFrame
116
+ DESCRIPTION - Pandas dataframe to store .SON metadata.
117
+
118
+ self.sonMetaFile : str
119
+ DESCRIPTION - Path to .SON metadata file (.csv).
120
+
121
+ self.sonMetaPickle : str
122
+ DESCRIPTION - Path to .SON metadata pickle file (.meta).
123
+
124
+ self.wcr : bool
125
+ DESCRIPTION - Flag to export non-rectified sonar tiles w/ water column
126
+ removed (wcr) & slant range corrected.
127
+
128
+ self.tempC : float
129
+ DESCRIPTION - Water temperature (Celcius) during survey divided by 10.
130
+
131
+ self.trans : non-class function
132
+ DESCRIPTION - Function to convert utm to lat/lon.
133
+
134
+ self.wcp : bool
135
+ DESCRIPTION - Flag to export non-rectified sonar tiles w/ water column
136
+ present (wcp).
137
+ '''
138
+
139
+ #===========================================================================
140
+ def __init__(self,
141
+ sonFile,
142
+ humFile,
143
+ projDir,
144
+ tempC=0.1,
145
+ nchunk=500,
146
+ pH=8.0):
147
+ '''
148
+ Initialize a sonObj instance.
149
+
150
+ ----------
151
+ Parameters
152
+ ----------
153
+ sonFile : str
154
+ DESCRIPTION - Path to .SON file.
155
+ EXAMPLE - sonFile = 'C:/PINGMapper/SonarRecordings/R00001/B002.SON'
156
+ humFile : str
157
+ DESCRIPTION - Path to .DAT file associated w/ .SON directory.
158
+ EXAMPLE - humFile = 'C:/PINGMapper/SonarRecordings/R00001.DAT'
159
+ projDir : str
160
+ DESCRIPTION - Path to output directory.
161
+ EXAMPLE - projDir = 'C:/PINGMapper/procData/R00001'
162
+ tempC : float : [Default=0.1]
163
+ DESCRIPTION - Water temperature (Celcius) during survey divided by 10.
164
+ EXAMPLE - tempC = (20/10)
165
+ nchunk : int : [Default=500]
166
+ DESCRIPTION - Number of pings per chunk. Chunk size dictates size of
167
+ sonar tiles (sonograms). Most testing has been on chunk
168
+ sizes of 500 (recommended).
169
+ EXAMPLE - nchunk = 500
170
+ pH : float : [Default=8.0]
171
+ DESCRIPTION - pH of the water during sonar survey. Used in the phase
172
+ preserving filtering of high dynamic range images.
173
+ EXAMPLE - pH = 8
174
+
175
+ -------
176
+ Returns
177
+ -------
178
+ sonObj instance.
179
+ '''
180
+ # Create necessary attributes
181
+ self.sonFile = sonFile # SON file path
182
+ self.projDir = projDir # Project directory
183
+ self.humFile = humFile # DAT file path
184
+ self.tempC = tempC # Water temperature
185
+ self.nchunk = nchunk # Number of sonar records per chunk
186
+ self.pH = pH # Water pH during survey
187
+
188
+ return
189
+
190
+ ############################################################################
191
+ # Decode DAT file (varies by model) #
192
+ ############################################################################
193
+
194
+ # ======================================================================
195
+ def _fread(self,
196
+ infile,
197
+ num,
198
+ typ):
199
+ '''
200
+ Helper function that reads binary data in a file.
201
+
202
+ ----------------------------
203
+ Required Pre-processing step
204
+ ----------------------------
205
+ Called from self._getHumDat(), self._cntHead(), self._decodeHeadStruct(),
206
+ self._getSonMeta(), self._loadSonChunk()
207
+
208
+ ----------
209
+ Parameters
210
+ ----------
211
+ infile : file
212
+ DESCRIPTION - A binary file opened in read mode at a pre-specified
213
+ location.
214
+ num : int
215
+ DESCRIPTION - Number of bytes to read.
216
+ typ : type
217
+ DESCRIPTION - Byte type
218
+
219
+ -------
220
+ Returns
221
+ -------
222
+ List of decoded binary data
223
+
224
+ --------------------
225
+ Next Processing Step
226
+ --------------------
227
+ Returns list to function it was called from.
228
+ '''
229
+ dat = arr(typ)
230
+ dat.fromfile(infile, num)
231
+ return(list(dat))
232
+
233
+ #=======================================================================
234
+ def _saveSonMetaCSV(self, sonMetaAll):
235
+ # Write metadata to csv
236
+ if not hasattr(self, 'sonMetaFile'):
237
+ outCSV = os.path.join(self.metaDir, self.beam+"_"+self.beamName+"_meta.csv")
238
+ sonMetaAll.to_csv(outCSV, index=False, float_format='%.14f')
239
+ self.sonMetaFile = outCSV
240
+ else:
241
+ sonMetaAll.to_csv(self.sonMetaFile, index=False, float_format='%.14f')
242
+
243
+
244
+ ############################################################################
245
+ # Filter sonar recording from user params #
246
+ ############################################################################
247
+
248
+ # ======================================================================
249
+ def _doSonarFiltering(self,
250
+ max_heading_dev,
251
+ distance,
252
+ min_speed,
253
+ max_speed,
254
+ aoi,
255
+ time_table,
256
+ ):
257
+ '''
258
+ '''
259
+ #################
260
+ # Get metadata df
261
+ self._loadSonMeta()
262
+ sonDF = self.sonMetaDF
263
+
264
+ # print('len', len(sonDF))
265
+ # print(sonDF)
266
+
267
+ #############################
268
+ # Do Heading Deviation Filter
269
+ if max_heading_dev > 0:
270
+ sonDF = self._filterHeading(sonDF, max_heading_dev, distance)
271
+
272
+ #################
273
+ # Do Speed Filter
274
+ if min_speed > 0 or max_speed > 0:
275
+ sonDF = self._filterSpeed(sonDF, min_speed, max_speed)
276
+
277
+ ###############
278
+ # Do AOI Filter
279
+ if aoi:
280
+ sonDF = self._filterAOI(sonDF, aoi)
281
+
282
+ #############
283
+ # Time Filter
284
+ if time_table:
285
+ sonDF = self._filterTime(sonDF, time_table)
286
+
287
+ return sonDF
288
+
289
+ # ======================================================================
290
+ def _filterShortTran(self, df):
291
+
292
+ '''
293
+ '''
294
+
295
+ # Make transects from consective pings using dataframe index
296
+ idx = df.index.values
297
+ transect_groups = np.split(idx, np.where(np.diff(idx) != 1)[0]+1)
298
+
299
+ for t in transect_groups:
300
+ if len(t) < self.nchunk:
301
+ # False means remove
302
+ df.loc[t, 'filter'] = False
303
+
304
+ return df
305
+
306
+
307
+ # ======================================================================
308
+ def _filterHeading(self,
309
+ df,
310
+ dev,
311
+ d,
312
+ ):
313
+ '''
314
+ '''
315
+
316
+ #######
317
+ # Setup
318
+
319
+ # convert dev to radians
320
+ dev = np.deg2rad(dev)
321
+
322
+ # Set Fields for Filtering
323
+ trk_dist = 'trk_dist' # Along track distance
324
+ head = 'instr_heading' # Heading reported by instrument
325
+ filtCol = 'filter'
326
+
327
+ # Get max distance
328
+ max_dist = df[trk_dist].max()
329
+
330
+ # Set counters
331
+ win = 1 # stride of moving window
332
+ dist_start = 0 # Counter for beginning of current window
333
+ dist_end = dist_start + d # Counbter for end of current window
334
+
335
+ df[filtCol] = False
336
+
337
+ ##############################
338
+ # Iterator through each window
339
+
340
+ # Compare heading deviation from first and last ping for current window
341
+ while dist_end < max_dist:
342
+
343
+ # Filter df by window
344
+ dfFilt = df[(df[trk_dist] >= dist_start) & (df[trk_dist] < dist_end)]
345
+
346
+ dfFilt[head] = np.deg2rad(dfFilt[head])
347
+ # Unwrap the heading because heading is circular
348
+ dfFilt[head] = np.unwrap(dfFilt[head])
349
+
350
+ if len(dfFilt) > 0:
351
+
352
+ # Get difference between start and end heading
353
+ start = dfFilt[head].iloc[0]
354
+ end = dfFilt[head].iloc[-1]
355
+ vessel_dev = np.abs(start - end)
356
+
357
+ # Compare vessel deviation to threshold deviation
358
+ if vessel_dev < dev:
359
+ # Keep these pings
360
+ df[filtCol].loc[dfFilt.index] = True
361
+
362
+ # dist_start += win
363
+ # dist_start = dist_end
364
+
365
+ else:
366
+ # dist_start = dist_end
367
+
368
+ # df[filtCol].loc[dfFilt.index] = False
369
+
370
+ # dist_start += win
371
+
372
+ pass
373
+
374
+
375
+ dist_start = dist_end
376
+ dist_end = dist_start + d
377
+
378
+ try:
379
+ return df
380
+ except:
381
+ sys.exit('\n\n\nERROR:\nMax heading standard deviation too small.\nPlease specify a larger value.')
382
+
383
+ # ======================================================================
384
+ def _filterTime(self,
385
+ sonDF,
386
+ time_table):
387
+
388
+ '''
389
+ '''
390
+
391
+ time_col = 'time_s'
392
+ filtTimeCol = 'filter_time'
393
+ filtCol = 'filter'
394
+
395
+ sonDF[filtTimeCol] = False
396
+
397
+ if not filtCol in sonDF.columns:
398
+ sonDF[filtCol] = True
399
+
400
+ time_table = pd.read_csv(time_table)
401
+
402
+ for i, row in time_table.iterrows():
403
+
404
+ start = row['start_seconds']
405
+ end = row['end_seconds']
406
+
407
+ # dfFilt = sonDF[(sonDF['time_s'] >= start) & (sonDF['time_s'] <= end)]
408
+ sonDF.loc[(sonDF[time_col] >= start) & (sonDF[time_col] <= end) & (sonDF[filtCol] == True), filtTimeCol] = True
409
+
410
+ sonDF[filtCol] *= sonDF[filtTimeCol]
411
+
412
+ return sonDF
413
+
414
+
415
+ # ======================================================================
416
+ def _filterSpeed(self,
417
+ sonDF,
418
+ min_speed,
419
+ max_speed):
420
+
421
+ '''
422
+
423
+ '''
424
+
425
+ speed_col = 'speed_ms'
426
+ filtCol = 'filter'
427
+
428
+ if not filtCol in sonDF.columns:
429
+ sonDF[filtCol] = True
430
+
431
+ # Filter min_speed
432
+ if min_speed > 0:
433
+ # sonDF = sonDF[sonDF['speed_ms'] >= min_speed]
434
+ sonDF.loc[sonDF[speed_col] < min_speed, filtCol] = False
435
+
436
+ # Filter max_speed
437
+ if max_speed > 0:
438
+ # sonDF = sonDF[sonDF['speed_ms'] <= max_speed]
439
+ sonDF.loc[sonDF[speed_col] > max_speed, filtCol] = False
440
+
441
+ return sonDF
442
+
443
+ # ======================================================================
444
+ def _filterAOI(self,
445
+ sonDF,
446
+ aoi):
447
+
448
+ filtCol = 'filter'
449
+
450
+ if not filtCol in sonDF.columns:
451
+ sonDF[filtCol] = True
452
+
453
+ # If .plan file (from Hydronaulix)
454
+ if os.path.basename(aoi.split('.')[-1]) == 'plan':
455
+ with open(aoi, 'r', encoding='utf-8') as f:
456
+ f = json.load(f)
457
+ # Find 'polygon' coords in nested json
458
+ # polys = []
459
+ # poly_coords = getPolyCoords(f, 'polygon')
460
+ # print(poly_coords)
461
+
462
+ f = f['mission']
463
+ f = f['items']
464
+ poly_coords = []
465
+ for i in f:
466
+ for k, v in i.items():
467
+ if k == 'polygon':
468
+ poly_coords.append(v)
469
+
470
+ aoi_poly_all = gpd.GeoDataFrame()
471
+
472
+ for poly in poly_coords:
473
+
474
+ # Extract coordinates
475
+ lat_coords = [i[0] for i in poly]
476
+ lon_coords = [i[1] for i in poly]
477
+
478
+ polygon_geom = Polygon(zip(lon_coords, lat_coords))
479
+ aoi_poly = gpd.GeoDataFrame(index=[0], crs='epsg:4326', geometry=[polygon_geom])
480
+
481
+ aoi_poly_all = pd.concat([aoi_poly_all, aoi_poly], ignore_index=True)
482
+
483
+ # If shapefile
484
+ elif os.path.basename(aoi.split('.')[-1]) == 'shp':
485
+ aoi_poly_all = gpd.read_file(aoi)
486
+
487
+ else:
488
+ print(os.path.basename, ' is not a valid aoi file type.')
489
+ sys.exit()
490
+
491
+ # Reproject to utm
492
+ epsg = int(self.humDat['epsg'].split(':')[-1])
493
+ aoi_poly = aoi_poly_all.to_crs(crs=epsg)
494
+ aoi_poly = aoi_poly.dissolve()
495
+
496
+ # Buffer aoi
497
+ if os.path.basename(aoi.split('.')[-1]) == 'plan':
498
+ buf_dist = 0.5
499
+ aoi_poly['geometry'] = aoi_poly.geometry.buffer(buf_dist)
500
+
501
+ # Save aoi
502
+ aoi_dir = os.path.join(self.projDir, 'aoi')
503
+ aoiOut = os.path.basename(self.projDir) + '_aoi.shp'
504
+ if not os.path.exists(aoi_dir):
505
+ os.makedirs(aoi_dir)
506
+
507
+ aoiOut = os.path.join(aoi_dir, aoiOut)
508
+ aoi_poly.to_file(aoiOut)
509
+
510
+ # Convert to geodataframe
511
+ epsg = int(self.humDat['epsg'].split(':')[-1])
512
+ sonDF = gpd.GeoDataFrame(sonDF, geometry=gpd.points_from_xy(sonDF.e, sonDF.n), crs=epsg)
513
+
514
+ # Get polygon
515
+ aoi_poly = aoi_poly.geometry[0]
516
+
517
+ # Subset
518
+ mask = sonDF.within(aoi_poly)
519
+ sonDF[filtCol] *= mask
520
+
521
+ return sonDF
522
+
523
+ # ======================================================================
524
+ def _reassignChunks(self,
525
+ sonDF):
526
+
527
+ #################
528
+ # Reassign Chunks
529
+ nchunk = self.nchunk
530
+
531
+ # Make transects from consective pings using dataframe index
532
+ idx = sonDF.index.values
533
+ transect_groups = np.split(idx, np.where(np.diff(idx) != 1)[0]+1)
534
+
535
+
536
+ # Assign transect
537
+ transect = 0
538
+ for t in transect_groups:
539
+ sonDF.loc[sonDF.index>=t[0], 'transect'] = transect
540
+ transect += 1
541
+
542
+ # Set chunks
543
+ lastChunk = 0
544
+ newChunk = []
545
+ for name, group in sonDF.groupby('transect'):
546
+
547
+ if (len(group)%nchunk) != 0:
548
+ rdr = nchunk-(len(group)%nchunk)
549
+ chunkCnt = int(len(group)/nchunk)
550
+ chunkCnt += 1
551
+ else:
552
+ rdr = False
553
+ chunkCnt = int(len(group)/nchunk)
554
+
555
+ chunks = np.arange(chunkCnt) + lastChunk
556
+ chunks = np.repeat(chunks, nchunk)
557
+
558
+ if rdr:
559
+ chunks = chunks[:-rdr]
560
+
561
+ newChunk += list(chunks)
562
+ lastChunk = chunks[-1] + 1
563
+ del chunkCnt
564
+
565
+ sonDF['chunk_id'] = newChunk
566
+
567
+ # self._saveSonMetaCSV(sonDF)
568
+ # self._cleanup()
569
+
570
+ return sonDF
571
+
572
+
573
+ ############################################################################
574
+ # Fix corrupt recording w/ missing pings #
575
+ ############################################################################
576
+
577
+ # ======================================================================
578
+ def _fixNoDat(self, dfA, beams):
579
+ # Empty dataframe to store final results
580
+ df = pd.DataFrame(columns = dfA.columns)
581
+
582
+ # For tracking beam presence
583
+ b = defaultdict()
584
+ bCnt = 0
585
+ for i in beams:
586
+ b[i] = np.nan
587
+ bCnt+=1
588
+ del i
589
+
590
+ c = 0 # Current row index
591
+
592
+ while ((c) < len(dfA)):
593
+
594
+ cRow = dfA.loc[[c]]
595
+
596
+ # Check if b['beam'] is > 0, if it is, we found end of 'ping packet':
597
+ ## add unfound beams as NoData to ping packet
598
+ if ~np.isnan(b[cRow['beam'].values[0]]):
599
+ # Add valid data to df
600
+ noDat = []
601
+ for k, v in b.items():
602
+ # Store valid data in df
603
+ if ~np.isnan(v):
604
+ df = pd.concat([df,dfA.loc[[v]]], ignore_index=True)
605
+ # Add beam to noDat list
606
+ else:
607
+ noDat.append(k)
608
+
609
+ # Duplicate valid data for missing rows. Remove unneccessary values.
610
+ for beam in noDat:
611
+ df = pd.concat([df, df.iloc[[-1]]], ignore_index=True)
612
+ # df.iloc[-1, df.columns.get_loc('record_num')] = np.nan
613
+ df.iloc[-1, df.columns.get_loc('index')] = np.nan
614
+ df.iloc[-1, df.columns.get_loc('volt_scale')] = np.nan
615
+ df.iloc[-1, df.columns.get_loc('f')] = np.nan
616
+ # df.iloc[-1, df.columns.get_loc('ping_cnt')] = np.nan
617
+ df.iloc[-1, df.columns.get_loc('beam')] = beam
618
+ del beam
619
+ del noDat
620
+
621
+ # reset b
622
+ for k, v in b.items():
623
+ b.update({k:np.nan})
624
+ del k, v
625
+
626
+ else:
627
+ # Add c idx to b and keep searching for beams in current packet
628
+ b[cRow['beam'].values[0]] = c
629
+ c+=1
630
+
631
+ del beams, dfA, cRow, bCnt, c, b
632
+
633
+ return df
634
+
635
+ ############################################################################
636
+ # Export un-rectified sonar tiles #
637
+ ############################################################################
638
+
639
+ # ==========================================================================
640
+ def _exportTiles(self,
641
+ chunk,
642
+ tileFile):
643
+ '''
644
+ Main function to read sonar record ping return values. Stores the
645
+ number of pings per chunk, chunk id, and byte index location in son file,
646
+ then calls self._loadSonChunk() to read the data into memory, then calls
647
+ self._writeTiles to save an unrectified image.
648
+
649
+ ----------------------------
650
+ Required Pre-processing step
651
+ ----------------------------
652
+ self._getSonMeta()
653
+
654
+ -------
655
+ Returns
656
+ -------
657
+ *.PNG un-rectified sonar tiles (sonograms)
658
+
659
+ --------------------
660
+ Next Processing Step
661
+ --------------------
662
+ NA
663
+ '''
664
+ filterIntensity = False
665
+
666
+ # Make sonar imagery directory for each beam if it doesn't exist
667
+ try:
668
+ os.mkdir(self.outDir)
669
+ except:
670
+ pass
671
+
672
+ # Filter sonMetaDF by chunk
673
+ isChunk = self.sonMetaDF['chunk_id']==chunk
674
+ sonMeta = self.sonMetaDF[isChunk].copy().reset_index()
675
+
676
+ # Update class attributes based on current chunk
677
+ self.pingMax = np.nanmax(sonMeta['ping_cnt']) # store to determine max range per chunk
678
+ # self.headIdx = sonMeta['index'] # store byte offset per ping
679
+ # self.pingCnt = sonMeta['ping_cnt'] # store ping count per ping
680
+
681
+ if ~np.isnan(self.pingMax):
682
+ # Load chunk's sonar data into memory
683
+ # self._loadSonChunk()
684
+ self._getScanChunkSingle(chunk)
685
+
686
+ # Remove shadows
687
+ if self.remShadow:
688
+ # Get mask
689
+ self._SHW_mask(chunk)
690
+
691
+ # Mask out shadows
692
+ self.sonDat = self.sonDat*self.shadowMask
693
+
694
+
695
+ # Export water column present (wcp) image
696
+ if self.wcp:
697
+ son_copy = self.sonDat.copy()
698
+ # self._doPPDRC()
699
+
700
+ # egn
701
+ if self.egn:
702
+ self._egn_wcp(chunk, sonMeta)
703
+
704
+ if self.egn_stretch > 0:
705
+ self._egnDoStretch()
706
+
707
+ self._writeTiles(chunk, imgOutPrefix='wcp', tileFile=tileFile) # Save image
708
+
709
+ self.sonDat = son_copy
710
+ del son_copy
711
+
712
+ # Export slant range corrected (water column removed) imagery
713
+ if self.wcr_src:
714
+ self._WCR_SRC(sonMeta) # Remove water column and redistribute ping returns based on FlatBottom assumption
715
+
716
+ # self._doPPDRC()
717
+
718
+ # Empirical gain normalization
719
+ if self.egn:
720
+ self._egn()
721
+ self.sonDat = np.nan_to_num(self.sonDat, nan=0)
722
+
723
+ if self.egn_stretch > 0:
724
+ self._egnDoStretch()
725
+
726
+ self._writeTiles(chunk, imgOutPrefix='wcr', tileFile=tileFile) # Save image
727
+
728
+ gc.collect()
729
+ return #self
730
+
731
+
732
+ # ==========================================================================
733
+ def _loadSonChunk(self):
734
+ '''
735
+ Reads ping returns into memory based on byte index location in son file
736
+ and number of pings to return.
737
+
738
+ ----------------------------
739
+ Required Pre-processing step
740
+ ----------------------------
741
+ Called from self._getScanChunkALL() or self._getScanChunkSingle()
742
+
743
+ -------
744
+ Returns
745
+ -------
746
+ 2-D numpy array containing sonar intensity
747
+
748
+ --------------------
749
+ Next Processing Step
750
+ --------------------
751
+ Return numpy array to self._getScanChunkALL() or self._getScanChunkSingle()
752
+ '''
753
+
754
+ sonDat = np.zeros((int(self.pingMax), len(self.pingCnt))).astype(int) # Initialize array to hold sonar returns
755
+ file = open(self.sonFile, 'rb') # Open .SON file
756
+
757
+ for i in range(len(self.headIdx)):
758
+ if ~np.isnan(self.headIdx[i]):
759
+ ping_len = min(self.pingCnt[i].astype(int), self.pingMax)
760
+
761
+
762
+ # #### Do not commit!!!!
763
+ # # if self.beamName == 'ss_star' or self.beamName == 'ss_port':
764
+ # # ping_len *= 2
765
+ if not self.son8bit:
766
+ ping_len *= 2
767
+
768
+ headIDX = self.headIdx[i].astype(int)
769
+ son_offset = self.son_offset[i].astype(int)
770
+ # pingIdx = headIDX + self.headBytes # Determine byte offset to sonar returns
771
+ pingIdx = headIDX + son_offset
772
+
773
+ file.seek(pingIdx) # Move to that location
774
+
775
+ # Get the ping
776
+ buffer = file.read(ping_len)
777
+
778
+ if self.flip_port:
779
+ buffer = buffer[::-1]
780
+
781
+ # Read the data
782
+ if self.son8bit:# and self.beamName != 'ss_star' and self.beamName != 'ss_port':
783
+ dat = np.frombuffer(buffer, dtype='>u1')
784
+ else:
785
+ try:
786
+ dat = np.frombuffer(buffer, dtype='>u2')
787
+ except:
788
+ dat = np.frombuffer(buffer[:-1], dtype='>u2')
789
+
790
+ try:
791
+ sonDat[:ping_len, i] = dat
792
+ except:
793
+ ping_len = len(dat)
794
+ sonDat[:ping_len, i] = dat
795
+
796
+ file.close()
797
+ self.sonDat = sonDat.astype(np.uint8)
798
+ return
799
+
800
+ # ======================================================================
801
+ def _WC_mask(self, i, son=True):
802
+ '''
803
+
804
+ '''
805
+ # Get sonMeta
806
+ if not hasattr(self, 'sonMetaDF'):
807
+ self._loadSonMeta()
808
+
809
+ if son:
810
+ # self._loadSonMeta()
811
+ self._getScanChunkSingle(i)
812
+
813
+ # Filter sonMetaDF by chunk
814
+ isChunk = self.sonMetaDF['chunk_id']==i
815
+ sonMeta = self.sonMetaDF[isChunk].copy().reset_index()
816
+
817
+ # Load depth (in real units) and convert to pixels
818
+ # bedPick = round(sonMeta['dep_m'] / sonMeta['pix_m'], 0).astype(int)
819
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
820
+ minDep = min(bedPick)
821
+
822
+ del sonMeta, self.sonMetaDF
823
+
824
+ # Make zero mask
825
+ wc_mask = np.zeros((self.sonDat.shape))
826
+
827
+ # Fill non-wc pixels with 1
828
+ for p, s in enumerate(bedPick):
829
+ wc_mask[s:, p] = 1
830
+
831
+ self.wcMask = wc_mask
832
+ self.minDep = minDep
833
+ self.bedPick = bedPick
834
+
835
+ return
836
+
837
+
838
+
839
+ # ======================================================================
840
+ def _WCR_SRC(self, sonMeta, son=True):
841
+ '''
842
+ Slant range correction is the process of relocating sonar returns after
843
+ water column removal by converting slant range distances to the bed into
844
+ horizontal distances based off the depth at nadir. As SSS does not
845
+ measure depth across the track, we must assume depth is constant across
846
+ the track (Flat bottom assumption). The pathagorean theorem is used
847
+ to calculate horizontal distance from slant range distance and depth at
848
+ nadir.
849
+
850
+ ----------
851
+ Parameters
852
+ ----------
853
+ sonMeta : DataFrame
854
+ DESCRIPTION - Dataframe containing ping metadata.
855
+
856
+ ----------------------------
857
+ Required Pre-processing step
858
+ ----------------------------
859
+ Called from self._getScanChunkALL() or self._getScanChunkSingle()
860
+
861
+ -------
862
+ Returns
863
+ -------
864
+ Self w/ array of relocated intensities stored in self.sonDat.
865
+
866
+ --------------------
867
+ Next Processing Step
868
+ --------------------
869
+ Returns relocated bed intensities to self._getScanChunkALL() or
870
+ self._getScanChunkSingle()
871
+ '''
872
+ # Load depth (in real units) and convert to pixels
873
+ # bedPick = round(sonMeta['dep_m'] / sonMeta['pix_m'], 0).astype(int)
874
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).reset_index(drop=True)
875
+
876
+ # Get max depth
877
+ maxDep = max(bedPick)
878
+ maxGap = int(round(np.tan(np.deg2rad(4)) * maxDep, 0)) # Max nadir gap (assume 4 degrees)
879
+
880
+ # Initialize 2d array to store relocated sonar records
881
+ srcDat = np.zeros((self.sonDat.shape[0]+maxGap, self.sonDat.shape[1])).astype(np.float32)#.astype(int)
882
+
883
+ print(srcDat.shape, maxGap, self.sonDat.shape)
884
+
885
+ #Iterate each ping
886
+ for j in range(self.sonDat.shape[1]):
887
+ depth = bedPick[j] # Get depth (in pixels) at nadir
888
+ dd = depth**2
889
+
890
+ # Calculate gap at nadir (assume 4 degrees)
891
+ nadirGap = int(round(np.tan(np.deg2rad(4)) * depth, 0))
892
+
893
+ print(depth, nadirGap)
894
+
895
+ # Create 1d array to store relocated bed pixels. Set to nan so we
896
+ ## can later interpolate over gaps.
897
+ pingDat = (np.ones((self.sonDat.shape[0]+maxGap)).astype(np.float32)) * np.nan
898
+ dataExtent = 0
899
+
900
+ # # Calculate nadir gap (assume 4 degrees)
901
+ # nadirGap = int(round(np.tan(np.deg2rad(4)) * depth, 0))
902
+
903
+ #Iterate each sonar/ping return
904
+ for i in range(self.sonDat.shape[0]):
905
+ if i >= depth:
906
+ intensity = self.sonDat[i,j] # Get the intensity value
907
+ srcIndex = int(round(math.sqrt((i)**2 - dd),0)) #Calculate horizontal range (in pixels) using pathagorean theorem
908
+
909
+ srcIndex = srcIndex + nadirGap # Add nadir gap to horizontal range
910
+
911
+ pingDat[srcIndex] = intensity # Store intensity at appropriate horizontal range
912
+ dataExtent = srcIndex # Store range extent (max range) of ping
913
+ else:
914
+ pass
915
+ pingDat[dataExtent:]=0 # Zero out values past range extent so we don't interpolate past this
916
+
917
+ # # Process of relocating bed pixels will introduce across track gaps
918
+ # ## in the array so we will interpolate over gaps to fill them.
919
+ # nans, x = np.isnan(pingDat), lambda z: z.nonzero()[0]
920
+ # pingDat[nans] = np.interp(x(nans), x(~nans), pingDat[~nans])
921
+
922
+ pingDat[:nadirGap] = np.nan # Zero out top maxGap pixels to remove relocated water column
923
+ print(pingDat)
924
+
925
+ # Find where firs non-zero pixel is
926
+ nonZero = np.where(pingDat>0)[0]
927
+ print(pingDat[nonZero])
928
+
929
+ # Interpolate over gaps past nonZero
930
+ if len(nonZero) > 0:
931
+ firstNonZero = nonZero[0]
932
+ nans, x = np.isnan(pingDat[firstNonZero:]), lambda z: z.nonzero()[0]
933
+ pingDat[firstNonZero:][nans] = np.interp(x(nans), x(~nans), pingDat[firstNonZero:][~nans])
934
+
935
+ # Store relocated ping in output array
936
+ if son:
937
+ srcDat[:,j] = np.around(pingDat, 0)
938
+ else:
939
+ srcDat[:,j] = pingDat
940
+
941
+ del pingDat
942
+
943
+ if son:
944
+ self.sonDat = srcDat.astype(int) # Store in class attribute for later use
945
+ else:
946
+ self.sonDat = srcDat
947
+ del srcDat
948
+ return #self
949
+
950
+ # ======================================================================
951
+ def _WCR_crop(self,
952
+ sonMeta,
953
+ crop=True):
954
+ # Load depth (in real units) and convert to pixels
955
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
956
+ minDep = min(bedPick)
957
+
958
+ sonDat = self.sonDat
959
+ # Zero out water column
960
+ for j, d in enumerate(bedPick):
961
+ sonDat[:d, j] = 0
962
+
963
+ # Crop to min depth
964
+ if crop:
965
+ sonDat = sonDat[minDep:,]
966
+
967
+ self.sonDat = sonDat
968
+
969
+ return minDep
970
+
971
+ # ======================================================================
972
+ def _WCO(self,
973
+ sonMeta):
974
+ # Load depth (in real units) and convert to pixels
975
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
976
+ maxDep = max(bedPick)
977
+
978
+ sonDat = self.sonDat
979
+ # Zero out water column
980
+ for j, d in enumerate(bedPick):
981
+ sonDat[d:, j] = 0
982
+
983
+ # Crop to min depth
984
+ sonDat = sonDat[:maxDep,]
985
+
986
+ self.sonDat = sonDat
987
+
988
+ return maxDep
989
+
990
+ # ======================================================================
991
+ def _SHW_mask(self, i, son=True):
992
+ '''
993
+
994
+ '''
995
+
996
+ # Get sonar data and shadow pix coordinates
997
+ if son:
998
+ self._getScanChunkSingle(i)
999
+ sonDat = self.sonDat
1000
+ shw_pix = self.shadow[i]
1001
+
1002
+ # Create a mask and work on that first, then mask sonDat
1003
+ mask = np.ones(sonDat.shape)
1004
+
1005
+ for k, val in shw_pix.items():
1006
+ for v in val:
1007
+ try:
1008
+ mask[v[0]:v[1], k] = 0
1009
+ except:
1010
+ pass
1011
+
1012
+ self.shadowMask = mask
1013
+
1014
+ return #self
1015
+
1016
+
1017
+ # ======================================================================
1018
+ def _SHW_crop(self, i, maxCrop=True, croprange=True):
1019
+ '''
1020
+ maxCrop: True: ping-wise crop; False: crop tile to max range
1021
+ '''
1022
+ buf=50 # Add buf if maxCrop is false
1023
+
1024
+ # Get sonar data
1025
+ sonDat = self.sonDat
1026
+
1027
+ # Get sonar data and shadow pix coordinates
1028
+ self._SHW_mask(i)
1029
+ mask = self.shadowMask
1030
+
1031
+ # Remove non-contiguous regions
1032
+ reg = label(mask)
1033
+
1034
+ # Find region w/ min row value/highest up on sonogram
1035
+ highReg = -1
1036
+ minRow = mask.shape[0]
1037
+ for region in regionprops(reg):
1038
+ minr, minc, maxr, maxc = region.bbox
1039
+
1040
+ if (minr < minRow) and (highReg != 0):
1041
+ highReg = region.label
1042
+ minRow = minr
1043
+
1044
+ # Keep only region matching highReg, update mask with reg
1045
+ mask = np.where(reg==highReg, 1, 0)
1046
+
1047
+ # Find max range of valid son returns
1048
+ max_r = []
1049
+ mask[mask.shape[0]-1, :] = 0 # Zero-out last row
1050
+
1051
+ R = mask.shape[0] # max range
1052
+ P = mask.shape[1] # number of pings
1053
+
1054
+ for c in range(P):
1055
+ bed = np.where(mask[:,c]==1)[0]
1056
+ try:
1057
+ bed = np.split(bed, np.where(np.diff(bed) != 1)[0]+1)[-1][-1]
1058
+ except:
1059
+ bed = np.nan
1060
+
1061
+ max_r.append(bed)
1062
+
1063
+ # Find max range
1064
+ max_r = np.nanmax(max_r).astype(int)
1065
+
1066
+ if maxCrop:
1067
+ # Keep ping-wise crop (aggressive crop)
1068
+ pass
1069
+ else:
1070
+ # Keep all returns up to max_r
1071
+ if (max_r+buf) > mask.shape[0]:
1072
+ mask[:max_r,:] = 1
1073
+ else:
1074
+ mask[:max_r+buf,:] = 1
1075
+ max_r += buf
1076
+
1077
+ # Mask shadows on sonDat
1078
+ sonDat = sonDat * mask
1079
+
1080
+ # Crop SonDat
1081
+ if croprange:
1082
+ sonDat = sonDat[:max_r,:]
1083
+
1084
+ self.sonDat = sonDat
1085
+ del mask, reg
1086
+
1087
+ return max_r
1088
+
1089
+ # ======================================================================
1090
+ def _writeTiles(self,
1091
+ k,
1092
+ imgOutPrefix,
1093
+ tileFile='.jpg',
1094
+ colormap=False):
1095
+ '''
1096
+ Using currently saved ping ping returns stored in self.sonDAT,
1097
+ saves an unrectified image of the sonar echogram.
1098
+
1099
+ ----------
1100
+ Parameters
1101
+ ----------
1102
+ k : int
1103
+ DESCRIPTION - Chunk number
1104
+ imgOutPrefix : str
1105
+ DESCRIPTION - Prefix to add to exported image
1106
+
1107
+ ----------------------------
1108
+ Required Pre-processing step
1109
+ ----------------------------
1110
+ Called from self._getScanChunkALL() or self._getScanChunkSingle() after
1111
+ sonar data loaded to memory with self._loadSonChunk()
1112
+
1113
+ -------
1114
+ Returns
1115
+ -------
1116
+ *.PNG of sonogram to output directory
1117
+
1118
+ --------------------
1119
+ Next Processing Step
1120
+ --------------------
1121
+ NA
1122
+ '''
1123
+ data = self.sonDat.astype('uint8') # Get the sonar data
1124
+
1125
+ # File name zero padding
1126
+ addZero = self._addZero(k)
1127
+
1128
+ # Prepare output directory if it doesn't exist
1129
+ outDir = os.path.join(self.outDir, imgOutPrefix)
1130
+ try:
1131
+ os.mkdir(outDir)
1132
+ except:
1133
+ pass
1134
+
1135
+ channel = os.path.split(self.outDir)[-1] #ss_port, ss_star, etc.
1136
+ projName = os.path.split(self.projDir)[-1] #to append project name to filename
1137
+ imsave(os.path.join(outDir, projName+'_'+imgOutPrefix+'_'+channel+'_'+addZero+str(k)+tileFile), data, check_contrast=False)
1138
+
1139
+ return
1140
+
1141
+ def _writeTilesPlot(self,
1142
+ k,
1143
+ imgOutPrefix,
1144
+ tileFile='.jpg',
1145
+ colormap=False):
1146
+ '''
1147
+ Using currently saved ping ping returns stored in self.sonDAT,
1148
+ saves an unrectified image of the sonar echogram.
1149
+
1150
+ ----------
1151
+ Parameters
1152
+ ----------
1153
+ k : int
1154
+ DESCRIPTION - Chunk number
1155
+ imgOutPrefix : str
1156
+ DESCRIPTION - Prefix to add to exported image
1157
+
1158
+ ----------------------------
1159
+ Required Pre-processing step
1160
+ ----------------------------
1161
+ Called from self._getScanChunkALL() or self._getScanChunkSingle() after
1162
+ sonar data loaded to memory with self._loadSonChunk()
1163
+
1164
+ -------
1165
+ Returns
1166
+ -------
1167
+ *.PNG of sonogram to output directory
1168
+
1169
+ --------------------
1170
+ Next Processing Step
1171
+ --------------------
1172
+ NA
1173
+ '''
1174
+ data = self.sonDat.astype('uint8') # Get the sonar data
1175
+
1176
+ # File name zero padding
1177
+ addZero = self._addZero(k)
1178
+
1179
+ # Prepare output directory if it doesn't exist
1180
+ outDir = os.path.join(self.outDir, imgOutPrefix)
1181
+ try:
1182
+ os.mkdir(outDir)
1183
+ except:
1184
+ pass
1185
+
1186
+ # Prepare the name
1187
+ channel = os.path.split(self.outDir)[-1] #ss_port, ss_star, etc.
1188
+ projName = os.path.split(self.projDir)[-1] #to append project name to filename
1189
+ outfile = os.path.join(outDir, projName+'_'+imgOutPrefix+'_'+channel+'_'+addZero+str(k)+tileFile)
1190
+
1191
+ # Save as a plot for colormap
1192
+ if colormap:
1193
+ # plt.imshow(data, cmap=self.sonogram_colorMap)
1194
+ # plt.savefig(outfile)
1195
+
1196
+ norm_data = data / 255.0
1197
+ colored_data = plt.cm.get_cmap(self.sonogram_colorMap)(norm_data)
1198
+ colored_data = (colored_data[:, :, :3] * 255).astype('uint8')
1199
+ data = colored_data
1200
+
1201
+ # imsave(outfile, data)
1202
+
1203
+ else:
1204
+ pass
1205
+
1206
+ # imsave(outfile, data, check_contrast=False)
1207
+
1208
+ imsave(outfile, data, check_contrast=False)
1209
+
1210
+
1211
+ return
1212
+
1213
+
1214
+ ############################################################################
1215
+ # Export imagery for labeling #
1216
+ ############################################################################
1217
+
1218
+ # ======================================================================
1219
+ def _exportTilesSpd(self,
1220
+ chunk,
1221
+ spdCor = False,
1222
+ mask_shdw = False,
1223
+ maxCrop = False,
1224
+ tileFile='.jpg'):
1225
+ '''
1226
+
1227
+ '''
1228
+ # Make sonar imagery directory for each beam if it doesn't exist
1229
+ try:
1230
+ os.mkdir(self.outDir)
1231
+ except:
1232
+ pass
1233
+
1234
+ if self.wcp:
1235
+ # Do speed correction
1236
+ self._doSpdCor(chunk, spdCor=spdCor, mask_shdw=mask_shdw, maxCrop=maxCrop, do_egn=self.egn, stretch_wcp=True)
1237
+
1238
+ if self.sonDat is not np.nan:
1239
+ self._writeTilesPlot(chunk, imgOutPrefix='wcp', tileFile=tileFile, colormap=True)
1240
+ else:
1241
+ pass
1242
+
1243
+ if self.wcm:
1244
+ # Do speed correction
1245
+ self._doSpdCor(chunk, spdCor=spdCor, mask_shdw=mask_shdw, mask_wc=True, maxCrop=maxCrop, do_egn=self.egn, stretch_wcp=True)
1246
+
1247
+ if self.sonDat is not np.nan:
1248
+ self._writeTilesPlot(chunk, imgOutPrefix='wcm', tileFile=tileFile, colormap=True)
1249
+ else:
1250
+ pass
1251
+
1252
+ if self.wcr_src:
1253
+ # Do speed correction
1254
+ self._doSpdCor(chunk, spdCor=spdCor, mask_shdw=mask_shdw, src=True, maxCrop=maxCrop, do_egn=self.egn, stretch_wcp=True)
1255
+
1256
+ if self.sonDat is not np.nan:
1257
+ self._writeTilesPlot(chunk, imgOutPrefix='src', tileFile=tileFile, colormap=True)
1258
+ else:
1259
+ pass
1260
+
1261
+ if self.wco:
1262
+ # Do speed correction
1263
+ self._doSpdCor(chunk, spdCor=spdCor, mask_bed=True, maxCrop=maxCrop, do_egn=self.egn, stretch_wcp=True)
1264
+
1265
+ if self.sonDat is not np.nan:
1266
+ self._writeTilesPlot(chunk, imgOutPrefix='wco', tileFile=tileFile, colormap=True)
1267
+ else:
1268
+ pass
1269
+
1270
+ gc.collect()
1271
+ return
1272
+
1273
+
1274
+ # ======================================================================
1275
+ def _doSpdCor(self,
1276
+ chunk,
1277
+ spdCor=False,
1278
+ mask_shdw=False,
1279
+ src=False,
1280
+ mask_wc=False,
1281
+ mask_bed=False,
1282
+ maxCrop=0,
1283
+ son=True,
1284
+ integer=True,
1285
+ do_egn=False,
1286
+ stretch_wcp=False):
1287
+
1288
+ if not hasattr(self, 'sonMetaDF'):
1289
+ self._loadSonMeta()
1290
+
1291
+ # Filter sonMetaDF by chunk
1292
+ isChunk = self.sonMetaDF['chunk_id']==chunk
1293
+ sonMeta = self.sonMetaDF[isChunk].copy().reset_index()
1294
+
1295
+ # Update class attributes based on current chunk
1296
+ self.pingMax = np.nanmax(sonMeta['ping_cnt']) # store to determine max range per chunk
1297
+
1298
+ if ~np.isnan(self.pingMax):
1299
+ # Load chunk's sonar data into memory
1300
+ if son:
1301
+ # self._loadSonChunk()
1302
+ self._getScanChunkSingle(chunk)
1303
+
1304
+ # egn
1305
+ if do_egn:
1306
+
1307
+ self._egn_wcp(chunk, sonMeta, do_rescale=True)
1308
+ self._egnDoStretch(stretch_wcp=stretch_wcp)
1309
+
1310
+ # Remove shadows and crop
1311
+ # if self.remShadow and (lbl_set==2):
1312
+ if (self.remShadow and mask_shdw) or (self.remShadow and maxCrop):
1313
+ self._SHW_crop(chunk, maxCrop=mask_shdw, croprange=maxCrop)
1314
+ sonDat = self.sonDat
1315
+
1316
+ if src:
1317
+ # slant range correction
1318
+ self._WCR_SRC(sonMeta)
1319
+
1320
+ # Remove water column and crop
1321
+ if mask_wc:
1322
+ _ = self._WCR_crop(sonMeta, crop=maxCrop)
1323
+
1324
+ if mask_bed:
1325
+ _ = self._WCO(sonMeta)
1326
+
1327
+ sonDat = self.sonDat
1328
+
1329
+ if spdCor == 0:
1330
+ # Don't do speed correction
1331
+ pass
1332
+ elif spdCor == 1:
1333
+
1334
+ # Distance (in meters)
1335
+ d = sonMeta['trk_dist'].to_numpy()
1336
+ d = np.max(d) - np.min(d)
1337
+
1338
+ pixM = sonMeta['pixM']
1339
+ # Find most common pixel size
1340
+ if len(pixM.unique()) > 1:
1341
+ pixM = pixM.mode()[0]
1342
+ else:
1343
+ pixM = pixM.iloc[0]
1344
+
1345
+ # Distance in pix
1346
+ d = round(d / pixM, 0).astype(int)
1347
+
1348
+ sonDat = resize(sonDat,
1349
+ (sonDat.shape[0], d),
1350
+ mode='reflect',
1351
+ clip=True,
1352
+ preserve_range=True)
1353
+
1354
+ else:
1355
+ sonDat = resize(sonDat,
1356
+ (sonDat.shape[0], sonDat.shape[1]*spdCor),
1357
+ mode='reflect',
1358
+ clip=False, preserve_range=True)#.astype('uint8')
1359
+
1360
+ if integer:
1361
+ self.sonDat = sonDat.astype('uint8')
1362
+ else:
1363
+ self.sonDat = sonDat
1364
+
1365
+ else:
1366
+ self.sonDat = np.nan
1367
+
1368
+ return
1369
+
1370
+ # ======================================================================
1371
+ def _exportMovWin(self,
1372
+ i : int,
1373
+ stride : int,
1374
+ tileType: list,
1375
+ pingMax: int,
1376
+ depMax: int):
1377
+
1378
+ '''
1379
+ '''
1380
+
1381
+ # Set chunk index
1382
+ a_idx = i-1
1383
+ b_idx = i
1384
+
1385
+ # Iterate each tile type
1386
+ for t in tileType:
1387
+ if t == 'wco':
1388
+ cropMax = depMax
1389
+ else:
1390
+ cropMax = pingMax
1391
+ inDir = os.path.join(self.outDir, t)
1392
+ outDir = os.path.join(self.outDir, t+'_mw')
1393
+
1394
+ if not os.path.exists(outDir):
1395
+ try:
1396
+ os.mkdir(outDir)
1397
+ except:
1398
+ pass
1399
+
1400
+ # Find the images
1401
+ images = os.listdir(inDir)
1402
+ images.sort()
1403
+
1404
+ # Get each image
1405
+ a_img = images[a_idx]
1406
+ b_img = images[b_idx]
1407
+
1408
+ # Get image name
1409
+ img_name = a_img.split('.')[0]
1410
+
1411
+ # Open each image
1412
+ a_img = imread(os.path.join(inDir, a_img))
1413
+ b_img = imread(os.path.join(inDir, b_img))
1414
+
1415
+ def resize_to_pingMax(img, cropMax):
1416
+ ndims = img.ndim
1417
+ current_size = img.shape[0]
1418
+ if current_size < cropMax:
1419
+ # Pad with zeros
1420
+ if ndims == 2:
1421
+ padding = ((0, cropMax - current_size), (0, 0))
1422
+ else:
1423
+ padding = ((0, cropMax - current_size), (0, 0), (0,0))
1424
+ resized_img = np.pad(img, padding, mode='constant', constant_values=0)
1425
+ elif current_size > cropMax:
1426
+ # Truncate the array
1427
+ resized_img = img[:cropMax, :]
1428
+ else:
1429
+ # No change needed
1430
+ resized_img = img
1431
+ return resized_img
1432
+
1433
+ # Resize a_img and b_img
1434
+ a_img = resize_to_pingMax(a_img, cropMax)
1435
+ b_img = resize_to_pingMax(b_img, cropMax)
1436
+
1437
+ # Set stride based on first image
1438
+ # stride = int(round(a_img.shape[1] * stride, 0))
1439
+ to_stride = int(round(self.nchunk * stride, 0))
1440
+
1441
+ # Set window size based on first image
1442
+ # winSize = a_img.shape[1]
1443
+ winSize = self.nchunk
1444
+
1445
+ # Concatenate images
1446
+ movWin = np.concatenate((a_img, b_img), axis=1)
1447
+
1448
+ # Last window idx
1449
+ lastWinIDX = self.nchunk
1450
+
1451
+ win = 0
1452
+ # Iterate each window
1453
+ while win < lastWinIDX:
1454
+ window = movWin[:, win:win+winSize]
1455
+
1456
+ zero = self._addZero(win)
1457
+
1458
+ # Save window
1459
+ imsave(os.path.join(outDir, img_name+'_'+zero+str(win)+'.jpg'), window)
1460
+
1461
+ win += to_stride
1462
+
1463
+ return
1464
+
1465
+
1466
+
1467
+ ############################################################################
1468
+ # Miscellaneous #
1469
+ ############################################################################
1470
+
1471
+ # ======================================================================
1472
+ def _getScanChunkSingle(self,
1473
+ chunk,
1474
+ cog=True,
1475
+ filterIntensity = False,
1476
+ remWater = False):
1477
+ '''
1478
+ During rectification, if non-rectified tiles have not been exported,
1479
+ this will load the chunk's scan data from the sonar recording.
1480
+
1481
+ Stores the number of pings per chunk, chunk id, and byte index location
1482
+ in son file, then calls self._loadSonChunk() to read the data.
1483
+
1484
+ ----------
1485
+ Parameters
1486
+ ----------
1487
+ chunk : int
1488
+ DESCRIPTION - Chunk number
1489
+ remWater : bool
1490
+ DESCRIPTION - Flag indicating if water column should be removed and
1491
+ slant range corrected.
1492
+
1493
+ ----------------------------
1494
+ Required Pre-processing step
1495
+ ----------------------------
1496
+ Called from child class c_rectObj._rectSon()
1497
+
1498
+ -------
1499
+ Returns
1500
+ -------
1501
+ Self with chunk's sonar intensities loaded in memory
1502
+
1503
+ --------------------
1504
+ Next Processing Step
1505
+ --------------------
1506
+ Return to child class c_rectObj._rectSon() to complete rectification
1507
+ '''
1508
+ # Open sonar metadata file to df
1509
+ sonMetaAll = pd.read_csv(self.sonMetaFile)
1510
+
1511
+ # Filter df by chunk
1512
+ if cog:
1513
+ isChunk = sonMetaAll['chunk_id']==chunk
1514
+ else:
1515
+ isChunk = sonMetaAll['chunk_id_2']==chunk
1516
+ isChunk.iloc[chunk+1] = True
1517
+ sonMeta = sonMetaAll[isChunk].reset_index()
1518
+
1519
+ # Update class attributes based on current chunk
1520
+ rangeCnt = np.unique(sonMeta['ping_cnt'], return_counts=True)
1521
+ pingMaxi = np.argmax(rangeCnt[1])
1522
+ self.pingMax = int(rangeCnt[0][pingMaxi])
1523
+
1524
+ self.headIdx = sonMeta['index']#.astype(int) # store byte offset per ping
1525
+ self.son_offset = sonMeta['son_offset']
1526
+ self.pingCnt = sonMeta['ping_cnt']#.astype(int) # store ping count per ping
1527
+
1528
+ # Load chunk's sonar data into memory
1529
+ self._loadSonChunk()
1530
+ # Do PPDRC filter
1531
+ if filterIntensity:
1532
+ self._doPPDRC()
1533
+ # Remove water if exporting wcr imagery
1534
+ if remWater:
1535
+ self._WCR(sonMeta)
1536
+
1537
+ del self.headIdx, self.pingCnt
1538
+
1539
+ return
1540
+
1541
+ def _getScanSlice(self, transect, start_idx, end_idx, remWater = False):
1542
+ '''
1543
+
1544
+ '''
1545
+
1546
+ # Open sonar metadata file to df
1547
+ sonMetaAll = pd.read_csv(self.sonMetaFile)
1548
+
1549
+ # Filter by transect
1550
+ sonMetaAll = sonMetaAll[sonMetaAll['transect'] == transect].reset_index(drop=True)
1551
+
1552
+ # Filter sonMeta
1553
+ # sonMeta = sonMeta[(sonMeta['index'] >= start_idx) & (sonMeta['index'] <= end_idx)]
1554
+ sonMeta = sonMeta.iloc[start_idx:end_idx]
1555
+ sonMeta = sonMeta.reset_index()
1556
+
1557
+ # Update class attributes based on current chunk
1558
+ rangeCnt = np.unique(sonMeta['ping_cnt'], return_counts=True)
1559
+ pingMaxi = np.argmax(rangeCnt[1])
1560
+ self.pingMax = int(rangeCnt[0][pingMaxi])
1561
+
1562
+ self.headIdx = sonMeta['index']#.astype(int) # store byte offset per ping
1563
+ self.son_offset = sonMeta['son_offset']
1564
+ self.pingCnt = sonMeta['ping_cnt']#.astype(int) # store ping count per ping
1565
+
1566
+ # Load chunk's sonar data into memory
1567
+ self._loadSonChunk()
1568
+
1569
+ # Remove water if exporting wcr imagery
1570
+ if remWater:
1571
+ self._WCR(sonMeta)
1572
+
1573
+ del self.headIdx, self.pingCnt
1574
+
1575
+ return
1576
+
1577
+ # ======================================================================
1578
+ def _loadSonMeta(self):
1579
+ '''
1580
+ Load sonar metadata from csv to pandas df
1581
+ '''
1582
+ meta = pd.read_csv(self.sonMetaFile)
1583
+ self.sonMetaDF = meta
1584
+ return
1585
+
1586
+ # ======================================================================
1587
+ def _getChunkID(self):
1588
+ '''
1589
+ Utility to load unique chunk ID's from son obj and return in a list
1590
+ '''
1591
+
1592
+ # Load son metadata csv to df
1593
+ self._loadSonMeta()
1594
+
1595
+ df = self.sonMetaDF
1596
+
1597
+ if 'filter' in df.columns:
1598
+ # Remove filtered pings
1599
+ df = df[df['filter'] == True]
1600
+
1601
+ # Get unique chunk id's
1602
+ df = df.groupby(['chunk_id', 'index']).size().reset_index().rename(columns={0:'count'})
1603
+ chunks = pd.unique(df['chunk_id']).astype(int)
1604
+
1605
+ del self.sonMetaDF, df
1606
+ return chunks
1607
+
1608
+ # ======================================================================
1609
+ def _getChunkID_Update(self):
1610
+ '''
1611
+ Utility to load unique chunk ID's from son obj and return in a list
1612
+ '''
1613
+
1614
+ # Load son metadata csv to df
1615
+ self._loadSonMeta()
1616
+
1617
+ # # Get unique chunk id's
1618
+ # df = self.sonMetaDF.groupby(['chunk_id', 'index']).size().reset_index().rename(columns={0:'count'})
1619
+ # chunks = pd.unique(df['chunk_id']).astype(int)
1620
+
1621
+ # Use index as chunk id
1622
+ df = self.sonMetaDF
1623
+ chunks = df.index.values.astype(int)
1624
+
1625
+ df['chunk_id_2'] = chunks
1626
+ self._saveSonMetaCSV(df)
1627
+
1628
+ del self.sonMetaDF, df
1629
+ return chunks
1630
+
1631
+ # ======================================================================
1632
+ def _addZero(self, chunk):
1633
+ # Determine leading zeros to match naming convention
1634
+ if chunk < 10:
1635
+ addZero = '0000'
1636
+ elif chunk < 100:
1637
+ addZero = '000'
1638
+ elif chunk < 1000:
1639
+ addZero = '00'
1640
+ elif chunk < 10000:
1641
+ addZero = '0'
1642
+ else:
1643
+ addZero = ''
1644
+
1645
+ return addZero
1646
+
1647
+
1648
+ # ======================================================================
1649
+ def _cleanup(self):
1650
+
1651
+ try:
1652
+ del self.sonMetaDF
1653
+ except:
1654
+ pass
1655
+
1656
+ try:
1657
+ del self.sonDat
1658
+ except:
1659
+ pass
1660
+
1661
+ # Delete temp files
1662
+ t = glob(os.path.join(self.projDir, '*', '*temp*'), recursive=True)
1663
+ for f in t:
1664
+ try:
1665
+ os.remove(f)
1666
+ except:
1667
+ pass
1668
+
1669
+ # ======================================================================
1670
+ def _pickleSon(self):
1671
+ '''
1672
+ Pickle sonObj so we can reload later if needed.
1673
+ '''
1674
+ if not hasattr(self, 'sonMetaPickle'):
1675
+ outFile = self.sonMetaFile.replace(".csv", ".meta")
1676
+ self.sonMetaPickle = outFile
1677
+ else:
1678
+ outFile = self.sonMetaPickle
1679
+
1680
+ with open(outFile, 'wb') as sonFile:
1681
+ pickle.dump(self, sonFile)
1682
+
1683
+ return
1684
+
1685
+
1686
+ # ======================================================================
1687
+ def __str__(self):
1688
+ '''
1689
+ Generic print function to print contents of sonObj.
1690
+ '''
1691
+ output = "sonObj Contents"
1692
+ output += '\n\t'
1693
+ output += self.__repr__()
1694
+ temp = vars(self)
1695
+ for item in temp:
1696
+ output += '\n\t'
1697
+ output += "{} : {}".format(item, temp[item])
1698
+ return output
1699
+
1700
+ ############################################################################
1701
+ # Corrections #
1702
+ ############################################################################
1703
+
1704
+ # ======================================================================
1705
+ def _egnCalcChunkMeans(self, chunk):
1706
+ '''
1707
+
1708
+ '''
1709
+
1710
+ # Filter sonMetaDF by chunk
1711
+ isChunk = self.sonMetaDF['chunk_id']==chunk
1712
+ sonMeta = self.sonMetaDF[isChunk].copy().reset_index()
1713
+
1714
+ # Update class attributes based on current chunk
1715
+ self.pingMax = np.nanmax(sonMeta['ping_cnt']) # store to determine max range per chunk
1716
+ self.headIdx = sonMeta['index'] # store byte offset per ping
1717
+ self.pingCnt = sonMeta['ping_cnt'] # store ping count per ping
1718
+
1719
+ ############
1720
+ # load sonar
1721
+ # self._loadSonChunk()
1722
+ self._getScanChunkSingle(chunk)
1723
+
1724
+ #####################################
1725
+ # Get wc avg (for wcp egn) before src
1726
+ self._WC_mask(chunk, son=False) # Son false because sonDat already loaded
1727
+ bedMask = 1-self.wcMask # Invert zeros and ones
1728
+ wc = self.sonDat*bedMask # Mask bed pixels
1729
+ wc[wc == 0] = np.nan # Set zeros to nan
1730
+ mean_intensity_wc = np.nanmean(wc, axis=1) # get one avg for wc
1731
+ del bedMask, wc, self.wcMask
1732
+
1733
+ ################
1734
+ # remove shadows
1735
+ if self.remShadow:
1736
+ # Get mask
1737
+ self._SHW_mask(chunk)
1738
+
1739
+ # Mask out shadows
1740
+ self.sonDat = self.sonDat*self.shadowMask
1741
+ del self.shadowMask
1742
+
1743
+ ########################
1744
+ # slant range correction
1745
+ self._WCR_SRC(sonMeta)
1746
+
1747
+ # Set zeros to nans (????)
1748
+ self.sonDat = self.sonDat.astype('float')
1749
+ self.sonDat[self.sonDat == 0] = np.nan
1750
+
1751
+ #####
1752
+ # WCR
1753
+ #####
1754
+
1755
+ ##############################
1756
+ # Calculate range-wise average
1757
+ mean_intensity_wcr = np.nanmean(self.sonDat, axis=1)
1758
+
1759
+ del self.sonDat
1760
+ gc.collect()
1761
+ return mean_intensity_wcr, mean_intensity_wc
1762
+
1763
+ # ======================================================================
1764
+ def _egnCalcGlobalMeans(self, chunk_means):
1765
+ '''
1766
+ Calculate weighted average of chunk_means
1767
+ '''
1768
+
1769
+ #####################
1770
+ # Find largest vector
1771
+ lv = 0
1772
+ for c in chunk_means:
1773
+ if c[0].shape[0] > lv:
1774
+ lv = c[0].shape[0]
1775
+
1776
+ ########################
1777
+ # Stack vectors in array
1778
+
1779
+ # Create nan array
1780
+ wc_means = np.empty((lv, len(chunk_means)))
1781
+ wc_means[:] = np.nan
1782
+
1783
+ bed_means = np.empty((lv, len(chunk_means)))
1784
+ bed_means[:] = np.nan
1785
+
1786
+ # Stack arrays
1787
+ for i, m in enumerate(chunk_means):
1788
+ ## Bed means
1789
+ bed_means[:m[0].shape[0], i] = m[0]
1790
+
1791
+ ## WC means
1792
+ wc_means[:m[1].shape[0], i] = m[1]
1793
+
1794
+ del chunk_means
1795
+
1796
+ ################
1797
+ # Calculate mean
1798
+ self.egn_bed_means = np.nanmean(bed_means, axis=1)
1799
+ self.egn_wc_means = np.nanmean(wc_means, axis=1)
1800
+ del bed_means, wc_means
1801
+
1802
+ gc.collect()
1803
+ return
1804
+
1805
+ # ======================================================================
1806
+ def _egnCalcHist(self, chunk):
1807
+ '''
1808
+ Calculate EGN statistics
1809
+ '''
1810
+ if not hasattr(self, "sonMetaDF"):
1811
+ self._loadSonMeta()
1812
+
1813
+ # Filter sonMetaDF by chunk
1814
+ isChunk = self.sonMetaDF['chunk_id']==chunk
1815
+ sonMeta = self.sonMetaDF[isChunk].copy().reset_index()
1816
+
1817
+ # Update class attributes based on current chunk
1818
+ self.pingMax = np.nanmax(sonMeta['ping_cnt']) # store to determine max range per chunk
1819
+ self.headIdx = sonMeta['index'] # store byte offset per ping
1820
+ self.pingCnt = sonMeta['ping_cnt'] # store ping count per ping
1821
+
1822
+ ############
1823
+ # load sonar
1824
+ # self._loadSonChunk()
1825
+ self._getScanChunkSingle(chunk)
1826
+
1827
+
1828
+ ################
1829
+ # remove shadows
1830
+ if self.remShadow:
1831
+ # Get mask
1832
+ self._SHW_mask(chunk)
1833
+
1834
+ # Mask out shadows
1835
+ self.sonDat = self.sonDat*self.shadowMask
1836
+ del self.shadowMask
1837
+
1838
+ ########
1839
+ # Do EGN
1840
+ nonEGNSonDat = self.sonDat.copy()
1841
+ self._egn_wcp(chunk, sonMeta, do_rescale=True)
1842
+
1843
+
1844
+ ######################
1845
+ # Calculate histograms
1846
+
1847
+ # Histgram with water column present
1848
+ wcp_hist, _ = np.histogram(self.sonDat, bins=255, range=(0,255))
1849
+
1850
+ ########
1851
+ # Do EGN
1852
+ self.sonDat = nonEGNSonDat
1853
+ self._egn()
1854
+
1855
+ ######################
1856
+ # Calculate histograms
1857
+
1858
+ # Histogram with water column removed
1859
+ self._WCR_SRC(sonMeta)
1860
+ wcr_hist, _ = np.histogram(self.sonDat, bins=255, range=(0,255))
1861
+
1862
+
1863
+ del self.sonDat, nonEGNSonDat
1864
+
1865
+ return wcp_hist, wcr_hist
1866
+ # return wcr_hist
1867
+
1868
+ # ======================================================================
1869
+ def _egnCalcGlobalHist(self, hist):
1870
+ '''
1871
+ '''
1872
+
1873
+ # Zero arrays to store sum of histograms
1874
+ wcp_hist = np.zeros((hist[0][0].shape))
1875
+ wcr_hist = np.zeros((hist[0][0].shape))
1876
+
1877
+ for (wcp, wcr) in hist:
1878
+ wcp_hist += wcp
1879
+ wcr_hist += wcr
1880
+
1881
+ del hist
1882
+
1883
+ self.egn_wcp_hist = wcp_hist
1884
+ self.egn_wcr_hist = wcr_hist
1885
+
1886
+ return
1887
+
1888
+ # ======================================================================
1889
+ def _egnCalcMinMax(self, chunk):
1890
+ '''
1891
+ Calculate local min and max values after applying EGN
1892
+ '''
1893
+ # Get sonMetaDF
1894
+ if not hasattr(self, 'sonMetaDF'):
1895
+ self._loadSonMeta()
1896
+
1897
+ # Filter sonMetaDF by chunk
1898
+ isChunk = self.sonMetaDF['chunk_id']==chunk
1899
+ sonMeta = self.sonMetaDF[isChunk].copy().reset_index()
1900
+
1901
+ # Update class attributes based on current chunk
1902
+ self.pingMax = np.nanmax(sonMeta['ping_cnt']) # store to determine max range per chunk
1903
+ self.headIdx = sonMeta['index'] # store byte offset per ping
1904
+ self.pingCnt = sonMeta['ping_cnt'] # store ping count per ping
1905
+
1906
+ ############
1907
+ # load sonar
1908
+ # self._loadSonChunk()
1909
+ self._getScanChunkSingle(chunk)
1910
+
1911
+ ################
1912
+ # remove shadows
1913
+ if self.remShadow:
1914
+ # Get mask
1915
+ self._SHW_mask(chunk)
1916
+
1917
+ # Mask out shadows
1918
+ self.sonDat = self.sonDat*self.shadowMask
1919
+ del self.shadowMask
1920
+
1921
+ #############
1922
+ # Do wc stats
1923
+
1924
+ # Get wc pixels
1925
+ self._WC_mask(chunk, son=False) # Son false because sonDat already loaded
1926
+ bedMask = 1-self.wcMask # Invert zeros and ones
1927
+ wc = self.sonDat*bedMask # Mask bed pixels
1928
+ wc[wc == 0] = np.nan # Set zeros to nan
1929
+
1930
+ # Get copy of sonDat so we can calculate egn on wc pixels
1931
+ sonDat = self.sonDat.copy()
1932
+ self.sonDat = wc
1933
+
1934
+ # Do EGN
1935
+ self._egn(wc=True, do_rescale=False)
1936
+
1937
+ # Calculate min and max
1938
+ wc_min = np.nanmin(self.sonDat)
1939
+ wc_max = np.nanmax(self.sonDat)
1940
+
1941
+ ##############
1942
+ # Do bed stats
1943
+
1944
+ # Store sonDat
1945
+ self.sonDat = sonDat
1946
+
1947
+ ########################
1948
+ # slant range correction
1949
+ self._WCR_SRC(sonMeta)
1950
+
1951
+ ########
1952
+ # Do EGN
1953
+ self._egn(do_rescale=False)
1954
+
1955
+ ###################
1956
+ # Calculate min/max
1957
+ min = np.nanmin(self.sonDat)
1958
+ max = np.nanmax(self.sonDat)
1959
+
1960
+ del self.sonDat
1961
+
1962
+ return (min, max), (wc_min, wc_max)
1963
+
1964
+ # ======================================================================
1965
+ def _egnCalcGlobalMinMax(self, min_max):
1966
+ '''
1967
+ '''
1968
+
1969
+ bed_mins = []
1970
+ bed_maxs = []
1971
+ wc_mins = []
1972
+ wc_maxs = []
1973
+ for ((b_min, b_max), (w_min, w_max)) in min_max:
1974
+ bed_mins.append(b_min)
1975
+ bed_maxs.append(b_max)
1976
+
1977
+ wc_mins.append(w_min)
1978
+ wc_maxs.append(w_max)
1979
+
1980
+ self.egn_bed_min = np.nanmin(bed_mins)
1981
+ self.egn_bed_max = np.nanmax(bed_maxs)
1982
+
1983
+ self.egn_wc_min = np.nanmin(wc_mins)
1984
+ self.egn_wc_max = np.nanmax(wc_maxs)
1985
+
1986
+ return
1987
+
1988
+ # ======================================================================
1989
+ def _egn(self, wc = False, do_rescale=True):
1990
+ '''
1991
+ Apply empirical gain normalization to sonDat
1992
+ '''
1993
+
1994
+ # Get sonar data
1995
+ sonDat = self.sonDat
1996
+
1997
+ # Get egn means
1998
+ if wc:
1999
+ egn_means = self.egn_wc_means.copy() # Don't want to overwrite
2000
+ else:
2001
+ egn_means = self.egn_bed_means.copy() # Don't want to overwrite
2002
+
2003
+ # Slice egn means if too long
2004
+ egn_means = egn_means[:sonDat.shape[0]]
2005
+
2006
+ # Take last value of egn means and add to end if not long enough
2007
+ if sonDat.shape[0] > egn_means.shape[0]:
2008
+ t = np.ones((sonDat.shape[0]))
2009
+ l = egn_means[-1] # last value
2010
+ t[:egn_means.shape[0]] = egn_means
2011
+ t[egn_means.shape[0]:] = l # insert last value
2012
+ egn_means = t
2013
+ del t
2014
+
2015
+ # Divide each ping by mean vector
2016
+ sonDat = sonDat / egn_means[:, None]
2017
+
2018
+ if do_rescale:
2019
+ # Rescale by global min and max
2020
+ if wc:
2021
+ m = self.egn_wc_min
2022
+ M = self.egn_wc_max
2023
+ else:
2024
+ m = self.egn_bed_min
2025
+ M = self.egn_bed_max
2026
+
2027
+ mn = 0
2028
+ mx = 255
2029
+ sonDat = (mx-mn)*(sonDat-m)/(M-m)+mn
2030
+
2031
+ self.sonDat = sonDat
2032
+ del sonDat, egn_means
2033
+ return
2034
+
2035
+ # ======================================================================
2036
+ def _egn_wcp_OLD(self, chunk, sonMeta, do_rescale=True):
2037
+ '''
2038
+ Apply empirical gain normalization to sonDat
2039
+ '''
2040
+
2041
+ # Get sonar data
2042
+ sonDat = self.sonDat.astype(np.float32).copy()
2043
+
2044
+ # Get water column mask
2045
+ self._WC_mask(chunk, son=False) # So we don't reload sonDat
2046
+ wcMask = 1-self.wcMask # Get the mask, invert zeros and ones
2047
+ del self.sonDat
2048
+
2049
+ # Get water column, mask bed
2050
+ wc = sonDat * wcMask
2051
+
2052
+ # Apply egn to wc
2053
+ self.sonDat = wc
2054
+ self._egn(wc=True, do_rescale=False)
2055
+ wc = self.sonDat.copy()
2056
+ wc = np.nan_to_num(wc, nan=0) # replace nans with zero
2057
+ del self.sonDat
2058
+
2059
+ # Get egn_means
2060
+ egn_means = self.egn_bed_means.copy() # Don't want to overwrite
2061
+
2062
+ # Get bedpicks, in pixel units
2063
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).to_numpy()
2064
+
2065
+ # Iterate each ping
2066
+ for j in range(sonDat.shape[1]):
2067
+ depth = bedPick[j] # Get bedpick
2068
+ # Create 1d array to store relocated egn avgs for given ping.
2069
+ egn_p = (np.zeros((sonDat.shape[0])).astype(np.float32))
2070
+
2071
+ # Iterate over each avg
2072
+ for i in range(sonDat.shape[0]):
2073
+ # Set wc avgs to 1 (unchanged)
2074
+ if i < depth:
2075
+ egn_p[i] = 1
2076
+
2077
+ else:
2078
+ # Get egn_means index (range) for given slant range (i)
2079
+ avgIndex = round(np.sqrt(i**2 - depth**2),0).astype(int)
2080
+ r_avg = egn_means[avgIndex] # Get egn_mean value
2081
+ egn_p[i] = r_avg # Store range avg at appropriate slant range
2082
+
2083
+ sonDat[:,j] = sonDat[:,j] / egn_p
2084
+
2085
+ # Mask water column in sonDat
2086
+ # wcMask = 1-wcMask
2087
+ sonDat = sonDat * self.wcMask
2088
+ sonDat = np.nan_to_num(sonDat, nan=0) # replace nans with zero
2089
+
2090
+ # Add water column pixels back in
2091
+ sonDat = sonDat + wc
2092
+
2093
+ if do_rescale:
2094
+ # Rescale by global min and max
2095
+ m = min(self.egn_wc_min, self.egn_bed_min)
2096
+ M = max(self.egn_wc_max, self.egn_bed_max)
2097
+ mn = 0
2098
+ mx = 255
2099
+ sonDat = (mx-mn)*(sonDat-m)/(M-m)+mn
2100
+
2101
+ sonDat = np.where(sonDat < mn, mn, sonDat)
2102
+ sonDat = np.where(sonDat > mx, mx, sonDat)
2103
+
2104
+ self.sonDat = sonDat.astype('uint8')
2105
+ return
2106
+
2107
+ # ======================================================================
2108
+ def _egn_wcp(self, chunk, sonMeta, do_rescale=True):
2109
+ '''
2110
+ Apply empirical gain normalization to sonDat
2111
+ '''
2112
+
2113
+ # Get sonar data
2114
+ sonDat = self.sonDat.astype(np.float32).copy()
2115
+
2116
+ # Get egn_bed_means
2117
+ egn_means = self.egn_bed_means.copy() # Don't want to overwrite
2118
+
2119
+ # Get egn_wc_means
2120
+ egn_wc_means = self.egn_wc_means.copy()
2121
+
2122
+ # Take last value of egn means and add to end if not long enough
2123
+ if sonDat.shape[0] > egn_means.shape[0]:
2124
+ t = np.ones((sonDat.shape[0]))
2125
+ l = egn_means[-1] # last value
2126
+ t[:egn_means.shape[0]] = egn_means
2127
+ t[egn_means.shape[0]:] = l # insert last value
2128
+ egn_means = t
2129
+ del t, l
2130
+
2131
+ # Get bedpicks, in pixel units
2132
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).to_numpy()
2133
+
2134
+ # Iterate each ping
2135
+ for j in range(sonDat.shape[1]):
2136
+ depth = bedPick[j] # Get bedpick
2137
+ dd = depth**2
2138
+
2139
+ # Create 1d array to store relocated egn avgs for given ping.
2140
+ egn_p = (np.zeros((sonDat.shape[0])).astype(np.float32))
2141
+
2142
+ # Iterate over each avg
2143
+ for i in range(sonDat.shape[0]):
2144
+ # Set wc avgs to 1 (unchanged)
2145
+ if i < depth:
2146
+ # egn_p[i] = 1 # Original crappy method
2147
+
2148
+
2149
+ # Using the WC means is the 'correct' way to normalize the wc
2150
+ # but it ends up being brighter and 'noisier' which may be good
2151
+ # for looking at suspended sediments, but using the bed means
2152
+ # helps to eliminate the noise
2153
+ denoiseWC = True # Could be added as param in future
2154
+
2155
+ if denoiseWC:
2156
+ egn_p[i] = egn_means[i] # Use bed means
2157
+ else:
2158
+ egn_p[i] = egn_wc_means[i] # Use wc means
2159
+
2160
+ # Relocate egn mean based on slant range
2161
+ else:
2162
+ # Get egn_means index (range) for given slant range (i)
2163
+ avgIndex = int(round(math.sqrt(i**2 - dd),0))
2164
+ r_avg = egn_means[avgIndex] # Get egn_mean value
2165
+ egn_p[i] = r_avg # Store range avg at appropriate slant range
2166
+ del avgIndex, r_avg
2167
+
2168
+ # Apply correction to ping
2169
+ sonDat[:,j] = sonDat[:,j] / egn_p
2170
+
2171
+ del egn_p
2172
+
2173
+ mn = 0
2174
+ mx = 255
2175
+
2176
+ if do_rescale:
2177
+ # Rescale by global min and max
2178
+
2179
+ m = min(self.egn_wc_min, self.egn_bed_min)
2180
+ M = max(self.egn_wc_max, self.egn_bed_max)
2181
+
2182
+ sonDat = (mx-mn)*(sonDat-m)/(M-m)+mn
2183
+
2184
+ # Set values below/above 0/255 to 0/255
2185
+ sonDat = np.where(sonDat < mn, mn, sonDat)
2186
+ sonDat = np.where(sonDat > mx, mx, sonDat)
2187
+
2188
+ self.sonDat = sonDat.astype('uint8')
2189
+ del sonDat
2190
+ return
2191
+
2192
+
2193
+ # ======================================================================
2194
+ def _egnCalcStretch(self, egn_stretch, egn_stretch_factor):
2195
+ '''
2196
+ '''
2197
+ # Store variables
2198
+ self.egn_stretch = egn_stretch
2199
+ self.egn_stretch_factor = egn_stretch_factor
2200
+
2201
+ # Get histogram percentages
2202
+ wcp_pcnt = self.egn_wcp_hist_pcnt
2203
+ wcr_pcnt = self.egn_wcr_hist_pcnt
2204
+
2205
+ if egn_stretch == 1:
2206
+
2207
+ # Find Globabl mins and max
2208
+ histIndex = np.where(wcr_pcnt[1:]>0)[0]
2209
+ self.egn_wcr_stretch_min = histIndex[0]
2210
+ self.egn_wcr_stretch_max = histIndex[-1]
2211
+
2212
+ histIndex = np.where(wcp_pcnt[1:]>0)[0]
2213
+ self.egn_wcp_stretch_min = histIndex[0]
2214
+ self.egn_wcp_stretch_max = histIndex[-1]
2215
+
2216
+
2217
+ elif egn_stretch == 2:
2218
+ # Percent clip
2219
+ egn_stretch_factor = egn_stretch_factor / 100
2220
+
2221
+ #####
2222
+ # WCP
2223
+
2224
+ # Left tail
2225
+ m = 1 # Store pixel value (Don't count 0)
2226
+ mp = 0 # Store percentage
2227
+ v = wcp_pcnt[m]
2228
+ while (mp+v) < egn_stretch_factor:
2229
+ # while ((mp+v) < egn_stretch_factor) and (m < 255):
2230
+ m += 1
2231
+ mp += v
2232
+ # v = wcp_pcnt[m]
2233
+ try:
2234
+ v = wcp_pcnt[m]
2235
+ except:
2236
+ v = 0
2237
+ break
2238
+
2239
+ self.egn_wcp_stretch_min = m
2240
+ del m, mp, v
2241
+
2242
+ # Right tail
2243
+ m = 254
2244
+ mp = 0
2245
+ v = wcp_pcnt[m]
2246
+ while (mp+v) < egn_stretch_factor:
2247
+ # while ((mp+v) < egn_stretch_factor) and (m >= 0):
2248
+ m -= 1
2249
+ mp += v
2250
+ # v = wcp_pcnt[m]
2251
+ try:
2252
+ v = wcp_pcnt[m]
2253
+ except:
2254
+ v = 0
2255
+ break
2256
+
2257
+
2258
+ self.egn_wcp_stretch_max = m
2259
+ del m, mp, v
2260
+
2261
+ #####
2262
+ # WCR
2263
+
2264
+ # Left tail
2265
+ m = 1 # Store pixel value (Don't count 0)
2266
+ mp = 0 # Store percentage
2267
+ v = wcr_pcnt[m]
2268
+ while (mp+v) < egn_stretch_factor:
2269
+ # while ((mp+v) < egn_stretch_factor) and (m < 255):
2270
+ m += 1
2271
+ mp += v
2272
+ # v = wcp_pcnt[m]
2273
+ try:
2274
+ v = wcr_pcnt[m]
2275
+ except:
2276
+ v = 0
2277
+ break
2278
+
2279
+ self.egn_wcr_stretch_min = m
2280
+ del m, mp, v
2281
+
2282
+ # Right tail
2283
+ m = 254
2284
+ mp = 0
2285
+ v = wcr_pcnt[m]
2286
+ while (mp+v) < egn_stretch_factor:
2287
+ # while ((mp+v) < egn_stretch_factor) and (m >= 0):
2288
+ m -= 1
2289
+ mp += v
2290
+ # v = wcp_pcnt[m]
2291
+ try:
2292
+ v = wcr_pcnt[m]
2293
+ except:
2294
+ v = 0
2295
+ break
2296
+
2297
+ self.egn_wcr_stretch_max = m
2298
+ del m, mp, v
2299
+
2300
+ return (self.egn_wcp_stretch_min, self.egn_wcp_stretch_max), (self.egn_wcr_stretch_min, self.egn_wcr_stretch_max)
2301
+
2302
+
2303
+ # ======================================================================
2304
+ def _egnDoStretch(self, stretch_wcp=False):
2305
+ '''
2306
+ '''
2307
+
2308
+ # Get sonDat
2309
+ sonDat = self.sonDat.astype('float64')
2310
+
2311
+ # Create mask from zero values
2312
+ mask = np.where(sonDat == 0, 0, 1)
2313
+
2314
+ # Get stretch min max
2315
+ if stretch_wcp:
2316
+ m = self.egn_wcp_stretch_min
2317
+ M = self.egn_wcp_stretch_max
2318
+ else:
2319
+ m = self.egn_wcr_stretch_min
2320
+ M = self.egn_wcr_stretch_max
2321
+
2322
+ mn = 0
2323
+ mx = 255
2324
+
2325
+ sonDat = np.clip(sonDat, m, M)
2326
+
2327
+ sonDat = (mx-mn)*(sonDat-m)/(M-m)+mn
2328
+
2329
+ # Try masking out zeros
2330
+ sonDat = sonDat*mask
2331
+ del mask
2332
+
2333
+ self.sonDat = sonDat.astype('uint8')
2334
+ del sonDat
2335
+ return