pingmapper 4.2.12__py3-none-any.whl → 5.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -36,9 +36,15 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
36
36
  PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
37
37
  sys.path.append(PACKAGE_DIR)
38
38
 
39
+ # # For Debug
40
+ # from funcs_common import *
41
+ # from funcs_model import *
42
+ # from class_rectObj import rectObj
43
+
39
44
  from pingmapper.funcs_common import *
40
45
  from pingmapper.funcs_model import *
41
46
  from pingmapper.class_rectObj import rectObj
47
+
42
48
  from mpl_toolkits.axes_grid1 import make_axes_locatable
43
49
 
44
50
  import matplotlib
@@ -313,7 +319,7 @@ class mapSubObj(rectObj):
313
319
  lOffL = self.sonDat.shape[1]
314
320
 
315
321
  # Get sonMetaDF
316
- lMetaDF = df.loc[df['chunk_id'] == l, ['dep_m']].copy().reset_index()
322
+ lMetaDF = df.loc[df['chunk_id'] == l, ['dep_m', 'pixM']].copy().reset_index()
317
323
 
318
324
  # Remove shadows
319
325
  if self.remShadow:
@@ -357,7 +363,7 @@ class mapSubObj(rectObj):
357
363
  lOffR = lOffL + self.sonDat.shape[1]
358
364
 
359
365
  # Get sonMetaDF
360
- cMetaDF = df.loc[df['chunk_id'] == c, ['dep_m']].copy().reset_index()
366
+ cMetaDF = df.loc[df['chunk_id'] == c, ['dep_m', 'pixM']].copy().reset_index()
361
367
 
362
368
  # Remove shadows
363
369
  if self.remShadow:
@@ -391,7 +397,7 @@ class mapSubObj(rectObj):
391
397
  self._getScanChunkSingle(r)
392
398
 
393
399
  # Get sonMetaDF
394
- rMetaDF = df.loc[df['chunk_id'] == r, ['dep_m']].copy().reset_index()
400
+ rMetaDF = df.loc[df['chunk_id'] == r, ['dep_m', 'pixM']].copy().reset_index()
395
401
 
396
402
  # Remove shadows
397
403
  if self.remShadow:
@@ -688,7 +694,7 @@ class mapSubObj(rectObj):
688
694
  df = self.sonMetaDF
689
695
 
690
696
  # Get sonMetaDF
691
- df = df.loc[df['chunk_id'] == chunk, ['dep_m']].copy().reset_index()
697
+ df = df.loc[df['chunk_id'] == chunk, ['dep_m', 'pixM']].copy().reset_index()
692
698
 
693
699
  # Load sonDat
694
700
  self._getScanChunkSingle(chunk)
@@ -718,7 +724,7 @@ class mapSubObj(rectObj):
718
724
  # Plot Classification
719
725
 
720
726
  # Get final classification
721
- label = self._classifySoftmax(chunk, softmax, map_class_method, mask_wc=True, mask_shw=True)
727
+ label = self._classifySoftmax(chunk, softmax, map_class_method, df=df, mask_wc=True, mask_shw=True)
722
728
 
723
729
  # Do speed correction
724
730
  if spdCor>0:
@@ -904,7 +910,7 @@ class mapSubObj(rectObj):
904
910
  ############################################################################
905
911
 
906
912
  #=======================================================================
907
- def _classifySoftmax(self, i, arr, map_class_method='max', mask_wc=True, mask_shw=True, do_filt=True):
913
+ def _classifySoftmax(self, i, arr, map_class_method='max', df=None, mask_wc=True, mask_shw=True, do_filt=True):
908
914
  '''
909
915
  Classify pixels from softmax values.
910
916
 
@@ -1005,7 +1011,7 @@ class mapSubObj(rectObj):
1005
1011
  min_size = 28
1006
1012
 
1007
1013
  # Filter small regions and holes
1008
- label = self._filterLabel(label, min_size)
1014
+ label = self._filterLabel(label, min_size, df=df)
1009
1015
 
1010
1016
  return label
1011
1017
 
@@ -1058,7 +1064,7 @@ class mapSubObj(rectObj):
1058
1064
 
1059
1065
 
1060
1066
  #=======================================================================
1061
- def _filterLabel(self, l, min_size):
1067
+ def _filterLabel(self, l, min_size, df=None):
1062
1068
  '''
1063
1069
  For a classified substrate label, small holes/objects are removed,
1064
1070
  and pixels classified as NoData are removed and adjecent class is
@@ -1080,8 +1086,10 @@ class mapSubObj(rectObj):
1080
1086
  Next Processing Step
1081
1087
  --------------------
1082
1088
  '''
1083
- # Get pixel size (in meters)
1084
- pix_m = self.pixM
1089
+ # # Get pixel size (in meters)
1090
+ # pix_m = self.pixM
1091
+ pix_m = df['pixM'].values[0] if df is not None else 0.02
1092
+
1085
1093
 
1086
1094
  # Convert min size to pixels
1087
1095
  min_size = int(min_size/pix_m)
@@ -37,6 +37,10 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
37
37
  PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
38
38
  sys.path.append(PACKAGE_DIR)
39
39
 
40
+ # # For Debug
41
+ # from funcs_common import *
42
+ # from funcs_model import *
43
+
40
44
  from pingmapper.funcs_common import *
41
45
  from pingmapper.funcs_model import *
42
46
 
@@ -1278,7 +1282,7 @@ class portstarObj(object):
1278
1282
  isChunk = son.sonMetaDF['chunk_id']==1
1279
1283
  sonMeta = son.sonMetaDF[isChunk].reset_index()
1280
1284
  # acousticBed = round(sonMeta['inst_dep_m'] / sonMeta['pix_m'], 0).astype(int)
1281
- acousticBed = round(sonMeta['inst_dep_m'] / self.pixM, 0).astype(int)
1285
+ acousticBed = round(sonMeta['inst_dep_m'] / sonMeta['pixM'], 0).astype(int)
1282
1286
 
1283
1287
  ##################################
1284
1288
  # Step 1 : Acoustic Bedpick Filter
@@ -1403,7 +1407,8 @@ class portstarObj(object):
1403
1407
  chunksPred,
1404
1408
  detectDep=0,
1405
1409
  smthDep=False,
1406
- adjDep=False):
1410
+ adjDep=False,
1411
+ instDepAvail=True):
1407
1412
  '''
1408
1413
  Converts bedpick location (in pixels) to a depth in meters and additionally
1409
1414
  smooth and adjust depth estimate.
@@ -1478,8 +1483,8 @@ class portstarObj(object):
1478
1483
  portDF['dep_m_smth'] = smthDep
1479
1484
  starDF['dep_m_smth'] = smthDep
1480
1485
 
1481
- portDF['dep_m_adjBy'] = str(adjDep / self.port.pixM) + ' pixels'
1482
- starDF['dep_m_adjBy'] = str(adjDep / self.port.pixM) + ' pixels'
1486
+ portDF['dep_m_adjBy'] = str(adjDep / portDF['pixM']) + ' pixels'
1487
+ starDF['dep_m_adjBy'] = str(adjDep / starDF['pixM']) + ' pixels'
1483
1488
 
1484
1489
  elif detectDep > 0:
1485
1490
  # Prepare depth detection dictionaries
@@ -1532,8 +1537,8 @@ class portstarObj(object):
1532
1537
  starFinal = savgol_filter(starFinal, 51, 3)
1533
1538
 
1534
1539
  # Convert pix to depth [m]
1535
- portFinal = np.asarray(portFinal) * self.port.pixM
1536
- starFinal = np.asarray(starFinal) * self.star.pixM
1540
+ portFinal = np.asarray(portFinal) * portDF['pixM']
1541
+ starFinal = np.asarray(starFinal) * starDF['pixM']
1537
1542
 
1538
1543
  # Set negatives to 0
1539
1544
  portFinal = np.where(portFinal<0, 0, portFinal)
@@ -1545,6 +1550,10 @@ class portstarObj(object):
1545
1550
  portDF['dep_m'] = portFinal
1546
1551
  starDF['dep_m'] = starFinal
1547
1552
 
1553
+ if not instDepAvail:
1554
+ portDF['inst_dep_m'] = 0
1555
+ starDF['inst_dep_m'] = 0
1556
+
1548
1557
  if adjDep != 0:
1549
1558
  adjBy = adjDep
1550
1559
  portDF['dep_m'] += adjBy
@@ -1560,8 +1569,23 @@ class portstarObj(object):
1560
1569
  portDF['dep_m_smth'] = smthDep
1561
1570
  starDF['dep_m_smth'] = smthDep
1562
1571
 
1563
- portDF['dep_m_adjBy'] = str(adjDep / self.port.pixM) + ' pixels'
1564
- starDF['dep_m_adjBy'] = str(adjDep / self.port.pixM) + ' pixels'
1572
+ portDF['dep_m_adjBy'] = str(adjDep / portDF['pixM']) + ' pixels'
1573
+ starDF['dep_m_adjBy'] = str(adjDep / starDF['pixM']) + ' pixels'
1574
+
1575
+ # Interpolate over nan's (and set zeros to nan)
1576
+ portDep = portDF['dep_m'].to_numpy()
1577
+ starDep = starDF['dep_m'].to_numpy()
1578
+
1579
+ portDep[portDep == 0] = np.nan
1580
+ starDep[starDep == 0] = np.nan
1581
+
1582
+ nans, x = np.isnan(portDep), lambda z: z.nonzero()[0]
1583
+ portDep[nans] = np.interp(x(nans), x(~nans), portDep[~nans])
1584
+ portDF['dep_m'] = portDep
1585
+
1586
+ nans, x = np.isnan(starDep), lambda z: z.nonzero()[0]
1587
+ starDep[nans] = np.interp(x(nans), x(~nans), starDep[~nans])
1588
+ starDF['dep_m'] = starDep
1565
1589
 
1566
1590
  # Export to csv
1567
1591
  portDF.to_csv(self.port.sonMetaFile, index=False, float_format='%.14f')
@@ -1638,14 +1662,14 @@ class portstarObj(object):
1638
1662
  self.star._loadSonMeta()
1639
1663
  starDF = self.star.sonMetaDF
1640
1664
 
1641
- portDF = portDF.loc[portDF['chunk_id'] == i, ['inst_dep_m', 'dep_m']]
1642
- starDF = starDF.loc[starDF['chunk_id'] == i, ['inst_dep_m', 'dep_m']]
1665
+ portDF = portDF.loc[portDF['chunk_id'] == i, ['inst_dep_m', 'dep_m', 'pixM']]
1666
+ starDF = starDF.loc[starDF['chunk_id'] == i, ['inst_dep_m', 'dep_m', 'pixM']]
1643
1667
 
1644
- portInst = (portDF['inst_dep_m'] / self.port.pixM).to_numpy(dtype=int, copy=True)
1645
- portAuto = (portDF['dep_m'] / self.port.pixM).to_numpy(dtype=int, copy=True)
1668
+ portInst = (portDF['inst_dep_m'] / portDF['pixM']).to_numpy(dtype=int, copy=True)
1669
+ portAuto = (portDF['dep_m'] / portDF['pixM']).to_numpy(dtype=int, copy=True)
1646
1670
 
1647
- starInst = (starDF['inst_dep_m'] / self.star.pixM).to_numpy(dtype=int, copy=True)
1648
- starAuto = (starDF['dep_m'] / self.star.pixM).to_numpy(dtype=int, copy=True)
1671
+ starInst = (starDF['inst_dep_m'] / starDF['pixM']).to_numpy(dtype=int, copy=True)
1672
+ starAuto = (starDF['dep_m'] / starDF['pixM']).to_numpy(dtype=int, copy=True)
1649
1673
 
1650
1674
  # Ensure port/star same length
1651
1675
  if (portAuto.shape[0] != starAuto.shape[0]):
@@ -1807,8 +1831,8 @@ class portstarObj(object):
1807
1831
  starDF = self.star.sonMetaDF
1808
1832
 
1809
1833
  # Get depth/ pix scaler for given chunk
1810
- portDF = portDF.loc[portDF['chunk_id'] == i, ['dep_m']].reset_index()
1811
- starDF = starDF.loc[starDF['chunk_id'] == i, ['dep_m']].reset_index()
1834
+ portDF = portDF.loc[portDF['chunk_id'] == i, ['dep_m', 'pixM']].reset_index()
1835
+ starDF = starDF.loc[starDF['chunk_id'] == i, ['dep_m', 'pixM']].reset_index()
1812
1836
 
1813
1837
  # Load sonar
1814
1838
  self.port._getScanChunkSingle(i)
@@ -1842,8 +1866,8 @@ class portstarObj(object):
1842
1866
 
1843
1867
  ###########################################
1844
1868
  # Remove shadow predictions in water column
1845
- bedpickPort = round(portDF['dep_m'] / self.port.pixM, 0).astype(int)
1846
- bedpickStar = round(starDF['dep_m'] / self.star.pixM, 0).astype(int)
1869
+ bedpickPort = round(portDF['dep_m'] / portDF['pixM'], 0).astype(int)
1870
+ bedpickStar = round(starDF['dep_m'] / starDF['pixM'], 0).astype(int)
1847
1871
 
1848
1872
  for j in range(pMask.shape[1]):
1849
1873
  depth = bedpickPort[j]
@@ -2336,7 +2360,17 @@ class portstarObj(object):
2336
2360
  ## top-left coordinate a value of (0,0)
2337
2361
 
2338
2362
  # Get pixel size
2339
- pix_m = self.port.pixM
2363
+ # pix_m = self.port.pixM
2364
+ self.port._loadSonMeta()
2365
+ isChunk = self.port.sonMetaDF['chunk_id']==chunk
2366
+ sonMeta = self.port.sonMetaDF[isChunk].reset_index()
2367
+
2368
+ pixM = sonMeta['pixM']
2369
+ # Find most common pixel size
2370
+ if len(pixM.unique()) > 1:
2371
+ pixM = pixM.mode()[0]
2372
+ else:
2373
+ pixM = pixM.iloc[0]
2340
2374
 
2341
2375
  # Determine min/max for rescaling
2342
2376
  xMin, xMax = dst[:,0].min(), dst[:,0].max() # Min/Max of x coordinates
@@ -2346,7 +2380,7 @@ class portstarObj(object):
2346
2380
  outShapeM = [xMax-xMin, yMax-yMin] # Calculate range of x,y coordinates
2347
2381
  outShape=[0,0]
2348
2382
  # Divide by pixel size to arrive at output shape of warped image
2349
- outShape[0], outShape[1] = round(outShapeM[0]/pix_m,0), round(outShapeM[1]/pix_m,0)
2383
+ outShape[0], outShape[1] = round(outShapeM[0]/pixM,0), round(outShapeM[1]/pixM,0)
2350
2384
 
2351
2385
  # Rescale destination coordinates
2352
2386
  # X values
@@ -36,8 +36,13 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
36
36
  PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
37
37
  sys.path.append(PACKAGE_DIR)
38
38
 
39
+ # # For depug
40
+ # from funcs_common import *
41
+ # from class_sonObj import sonObj
42
+
39
43
  from pingmapper.funcs_common import *
40
44
  from pingmapper.class_sonObj import sonObj
45
+
41
46
  from osgeo import gdal, ogr, osr
42
47
  from osgeo_utils.gdal_sieve import gdal_sieve
43
48
  from scipy.interpolate import splprep, splev
@@ -245,9 +250,15 @@ class rectObj(sonObj):
245
250
  # Attempt to fix error
246
251
  # https://stackoverflow.com/questions/47948453/scipy-interpolate-splprep-error-invalid-inputs
247
252
  okay = np.where(np.abs(np.diff(x))+np.abs(np.diff(y))>0)
248
- x = np.r_[x[okay], x[-1]]
249
- y = np.r_[y[okay], y[-1]]
250
- t = np.r_[t[okay], t[-1]]
253
+ x = np.r_[x[okay], x[-1]].astype('float64')
254
+ y = np.r_[y[okay], y[-1]].astype('float64')
255
+ t = np.r_[t[okay], t[-1]].astype('float64')
256
+
257
+ # Remove any non-finite values (NaN or inf)
258
+ mask = np.isfinite(x) & np.isfinite(y) & np.isfinite(t)
259
+ x = x[mask]
260
+ y = y[mask]
261
+ t = t[mask]
251
262
 
252
263
  # Check if enough points to interpolate
253
264
  # If not, too many overlapping pings
@@ -258,14 +269,29 @@ class rectObj(sonObj):
258
269
  # Fit a spline to filtered coordinates and parameterize with time ellapsed
259
270
  try:
260
271
  tck, _ = splprep([x,y], u=t, k=deg, s=0)
261
- except:
272
+ # except:
273
+ # # Time is messed up (negative time offset)
274
+ # # Parameterize with record num instead
275
+ # zU = 'record_num'
276
+ # t = dfFilt[zU].to_numpy()
277
+ # t = np.r_[t[okay], t[-1]]
278
+ # tck, _ = splprep([x,y], u=t, k=deg, s=0)
279
+ # u_interp = dfOrig[zU].to_numpy()
280
+ except Exception as e:
281
+ print("splprep failed with error:", e)
262
282
  # Time is messed up (negative time offset)
263
283
  # Parameterize with record num instead
264
284
  zU = 'record_num'
265
- t = dfFilt[zU].to_numpy()
285
+ t = dfFilt[zU].to_numpy(dtype='float64')
266
286
  t = np.r_[t[okay], t[-1]]
267
- tck, _ = splprep([x,y], u=t, k=deg, s=0)
268
- u_interp = dfOrig[zU].to_numpy()
287
+ # Ensure float and finite
288
+ t = np.asarray(t, dtype='float64')
289
+ mask = np.isfinite(x) & np.isfinite(y) & np.isfinite(t)
290
+ x = x[mask]
291
+ y = y[mask]
292
+ t = t[mask]
293
+ tck, _ = splprep([x, y], u=t, k=deg, s=0)
294
+ u_interp = dfOrig[zU].to_numpy(dtype='float64')
269
295
 
270
296
  x_interp = splev(u_interp, tck) # Use u_interp to get smoothed x/y coordinates from spline
271
297
 
@@ -279,7 +305,7 @@ class rectObj(sonObj):
279
305
  'record_num': dfOrig['record_num'],
280
306
  'ping_cnt': dfOrig['ping_cnt'],
281
307
  'time_s': dfOrig['time_s'],
282
- 'pix_m': self.pixM,
308
+ 'pixM': dfOrig['pixM'],
283
309
  lons: x_interp[0],
284
310
  lats: x_interp[1],
285
311
  'dep_m': dfOrig['dep_m'],
@@ -563,17 +589,29 @@ class rectObj(sonObj):
563
589
  # maxPing = chunk[ping_cnt].max() # Find max ping count for each chunk
564
590
  # New method to find maxPing based on most numerous ping count
565
591
  maxPing = []
592
+ pixM_all = []
566
593
  for name, group in sDF.groupby(chunk_id):
567
594
  rangeCnt = np.unique(group[ping_cnt], return_counts=True)
568
595
  pingMaxi = np.argmax(rangeCnt[1])
569
596
  maxPing.append(int(rangeCnt[0][pingMaxi]))
597
+
598
+ # Get pixM from sonMetaDF
599
+ pixM = sonMetaDF.loc[sonMetaDF['chunk_id']==name, 'pixM'] # Get pixel size for each chunk
600
+ # Find most common pixel size
601
+ if len(pixM.unique()) > 1:
602
+ pixM = pixM.mode()[0]
603
+ else:
604
+ pixM = pixM.iloc[0]
605
+ pixM_all.append(pixM)
606
+
607
+
570
608
  # Convert maxPing i to pd series
571
609
  maxPing = pd.Series(maxPing)
572
610
 
573
611
  # pix_m = chunk['pix_m'].min() # Get pixel size for each chunk
574
- pix_m = self.pixM # Get pixel size for each chunk
612
+ # pix_m = self.pixM # Get pixel size for each chunk
575
613
  for i in maxPing.index: # Calculate range (in meters) for each chunk
576
- sDF.loc[sDF[chunk_id]==i, range_] = maxPing[i]*pix_m
614
+ sDF.loc[sDF[chunk_id]==i, range_] = maxPing[i]* pixM_all[i] # Calculate range in meters for each chunk
577
615
 
578
616
  ##################################################
579
617
  # Calculate range extent coordinates for each ping
@@ -617,7 +655,7 @@ class rectObj(sonObj):
617
655
  if cog:
618
656
  self._interpRangeCoords(filt)
619
657
  else:
620
- sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'range', 'range_lon', 'range_lat', 'range_e', 'range_n', ping_bearing, 'transect']].copy()
658
+ sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'range', 'range_lon', 'range_lat', 'range_e', 'range_n', ping_bearing, 'transect', 'pixM']].copy()
621
659
  sDF.rename(columns={'lons': 'trk_lons', 'lats': 'trk_lats', 'utm_es': 'trk_utm_es', 'utm_ns': 'trk_utm_ns', 'cog': 'trk_cog', 'range_lat':'range_lats', 'range_lon':'range_lons', 'range_e':'range_es', 'range_n':'range_ns'}, inplace=True)
622
660
  sDF['chunk_id_2'] = sDF.index.astype(int)
623
661
 
@@ -736,7 +774,7 @@ class rectObj(sonObj):
736
774
  ##################################################
737
775
  # Join smoothed trackline to smoothed range extent
738
776
  # sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'pix_m', 'lons', 'lats', 'utm_es', 'utm_ns', 'cog', 'dep_m']].copy()
739
- sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'transect']].copy()
777
+ sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'transect', 'pixM']].copy()
740
778
  sDF.rename(columns={'lons': 'trk_lons', 'lats': 'trk_lats', 'utm_es': 'trk_utm_es', 'utm_ns': 'trk_utm_ns', 'cog': 'trk_cog'}, inplace=True)
741
779
  rsDF.rename(columns={'cog': 'range_cog'}, inplace=True)
742
780
  rsDF = rsDF[['record_num', 'range_lons', 'range_lats', 'range_cog']]
@@ -1037,6 +1075,7 @@ class rectObj(sonObj):
1037
1075
  n = 'n'
1038
1076
  record_num = 'record_num'
1039
1077
  chunk_id = 'chunk_id'
1078
+ pixM = 'pixM'
1040
1079
 
1041
1080
  flip = False
1042
1081
 
@@ -1066,10 +1105,12 @@ class rectObj(sonObj):
1066
1105
  # Calculate ping bearing and normalize to range 0-360
1067
1106
  pingDF[ping_bearing] = (row[heading]+rotate) % 360
1068
1107
 
1069
- pix_m = self.pixM # Get pixel size for each chunk
1108
+ # pix_m = self.pixM # Get pixel size for each chunk
1109
+ pix_m = row['pixM'] # Get pixel size for each chunk
1070
1110
 
1071
1111
  # Calculate pixel size
1072
1112
  pingDF[son_range] = pingDF[son_idx] * pix_m
1113
+ pingDF[pixM] = pix_m # Store pixel size in dataframe
1073
1114
 
1074
1115
  ##################################################
1075
1116
  # Calculate range extent coordinates for each ping
@@ -1123,7 +1164,7 @@ class rectObj(sonObj):
1123
1164
  # Calculate easting and northing
1124
1165
  pingDF[e], pingDF[n] = self.trans(pingDF[lons].to_numpy(), pingDF[lats].to_numpy())
1125
1166
 
1126
- pingDF = pingDF[[chunk_id, record_num, son_idx, lons, lats, e, n, son_range]]
1167
+ pingDF = pingDF[[chunk_id, record_num, son_idx, lons, lats, e, n, son_range, pixM]]
1127
1168
 
1128
1169
  # Set index to help speed concatenation
1129
1170
  pingDF.set_index([record_num, son_idx], inplace=True)
@@ -1157,7 +1198,14 @@ class rectObj(sonObj):
1157
1198
  ## Destination coordinates describe the geographic location in lat/lon
1158
1199
  ## or easting/northing that directly map to the pix coordinates.
1159
1200
 
1160
- pix_m = self.pixM # Get pixel size
1201
+ # pix_m = self.pixM # Get pixel size
1202
+ pixM = df['pixM']
1203
+ # Find most common pixel size
1204
+ if len(pixM.unique()) > 1:
1205
+ pixM = pixM.mode()[0]
1206
+ else:
1207
+ pixM = pixM.iloc[0]
1208
+ pix_m = pixM
1161
1209
 
1162
1210
  # Get extent of chunk
1163
1211
  xMin, xMax = df[xCoord].min(), df[xCoord].max()
@@ -1324,10 +1372,17 @@ class rectObj(sonObj):
1324
1372
 
1325
1373
  '''
1326
1374
 
1375
+ pixM = df['pixM']
1376
+ # Find most common pixel size
1377
+ if len(pixM.unique()) > 1:
1378
+ pixM = pixM.mode()[0]
1379
+ else:
1380
+ pixM = pixM.iloc[0]
1381
+
1327
1382
  pix_res = self.pix_res_son
1328
1383
  do_resize = True
1329
1384
  if pix_res == 0:
1330
- pix_res = self.pixM
1385
+ pix_res = pixM
1331
1386
  do_resize = False
1332
1387
 
1333
1388
  if son:
@@ -1364,13 +1419,13 @@ class rectObj(sonObj):
1364
1419
  ##################
1365
1420
  # Do Rectification
1366
1421
 
1367
- pix_m = self.pixM # Get pixel size
1422
+ pix_m = pixM # Get pixel size
1368
1423
 
1369
- xPixMax, yPixMax = df[xPix].max().astype(int), df[yPix].max().astype(int)
1424
+ xPixMax, yPixMax = int(df[xPix].max()), int(df[yPix].max())
1370
1425
 
1371
1426
  # Get extent of chunk
1372
- xMin, xMax = df[xCoord].min().astype(int), df[xCoord].max().astype(int)
1373
- yMin, yMax = df[yCoord].min().astype(int), df[yCoord].max().astype(int)
1427
+ xMin, xMax = int(df[xCoord].min()), int(df[xCoord].max())
1428
+ yMin, yMax = int(df[yCoord].min()), int(df[yCoord].max())
1374
1429
 
1375
1430
  # Setup outupt array
1376
1431
  # Determine output shape dimensions
@@ -1876,9 +1931,20 @@ class rectObj(sonObj):
1876
1931
  filterIntensity = False
1877
1932
  pix_res = self.pix_res_son
1878
1933
  do_resize = True
1879
- if pix_res == 0:
1880
- pix_res = self.pixM
1881
- do_resize = False
1934
+
1935
+ # # Set pixel resolution
1936
+ # self._loadSonMeta()
1937
+ # sonMeta = self.sonMetaDF['chunk_id']==chunk
1938
+ # pixM = sonMeta['pixM']
1939
+ # # Find most common pixel size
1940
+ # if len(pixM.unique()) > 1:
1941
+ # pixM = pixM.mode()[0]
1942
+ # else:
1943
+ # pixM = pixM.iloc[0]
1944
+
1945
+ # if pix_res == 0:
1946
+ # pix_res = pixM
1947
+ # do_resize = False
1882
1948
 
1883
1949
  if son:
1884
1950
  # Create output directory if it doesn't exist
@@ -1909,10 +1975,18 @@ class rectObj(sonObj):
1909
1975
  # # Determine leading zeros to match naming convention
1910
1976
  addZero = self._addZero(chunk)
1911
1977
 
1912
- #################################
1913
- # Prepare pixel (pix) coordinates
1914
- ## Pix coordinates describe the size of the coordinates in pixel
1915
- ## coordinates (top left of image == (0,0); top right == (0,nchunk)...)
1978
+ #############################################################
1979
+ # Open smoothed trackline/range extent file
1980
+ trkMeta = pd.read_csv(trkMetaFile)
1981
+ if cog:
1982
+ trkMeta = trkMeta[trkMeta['chunk_id']==chunk].reset_index(drop=False) # Filter df by chunk_id
1983
+ else:
1984
+ # trkMeta = trkMeta[trkMeta['chunk_id_2']==chunk].reset_index(drop=False)
1985
+ # next = trkMeta[trkMeta['chunk_id_2']==chunk+1].reset_index(drop=False)
1986
+ # trkMeta = pd.concat([trkMeta, next], ignore_index=True)
1987
+ isChunk = trkMeta['chunk_id_2']==chunk
1988
+ isChunk.iloc[chunk+1] = True
1989
+ trkMeta = trkMeta[isChunk].reset_index(drop=False)
1916
1990
 
1917
1991
  # Filter sonMetaDF by chunk
1918
1992
  if not hasattr(self, 'sonMetaDF'):
@@ -1929,10 +2003,56 @@ class rectObj(sonObj):
1929
2003
 
1930
2004
  sonMeta = sonMetaAll[isChunk].reset_index()
1931
2005
 
2006
+ filtSon = False
2007
+ if len(sonMeta) != len(trkMeta):
2008
+ if len(sonMeta) > len(trkMeta):
2009
+ filtSon = True
2010
+
2011
+ # Filter df's to make sure they both have the same record_num
2012
+ # Get the intersection of record_num values
2013
+ common_record_nums = np.intersect1d(sonMeta['record_num'], trkMeta['record_num'])
2014
+
2015
+ # Filter both DataFrames to only include these record_num values
2016
+ sonMeta_filtered = sonMeta[sonMeta['record_num'].isin(common_record_nums)]#.reset_index(drop=True)
2017
+ trkMeta_filtered = trkMeta[trkMeta['record_num'].isin(common_record_nums)]#.reset_index(drop=True)
2018
+
2019
+ # Store the index's that were dropped
2020
+ # Get dropped indexes for each DataFrame
2021
+ dropped_sonMeta_idx = sonMeta.index.difference(sonMeta_filtered.index)
2022
+ dropped_trkMeta_idx = trkMeta.index.difference(trkMeta_filtered.index)
2023
+
2024
+ sonMeta = sonMeta_filtered
2025
+ trkMeta = trkMeta_filtered
2026
+
2027
+ if filtSon:
2028
+ idx_to_filt = dropped_sonMeta_idx.tolist()
2029
+
2030
+
2031
+ #################################
2032
+ # Prepare pixel (pix) coordinates
2033
+ ## Pix coordinates describe the size of the coordinates in pixel
2034
+ ## coordinates (top left of image == (0,0); top right == (0,nchunk)...)
2035
+
2036
+ # # Filter sonMetaDF by chunk
2037
+ # if not hasattr(self, 'sonMetaDF'):
2038
+ # self._loadSonMeta()
2039
+
2040
+ # sonMetaAll = self.sonMetaDF
2041
+ # if cog:
2042
+ # isChunk = sonMetaAll['chunk_id']==chunk
2043
+ # else:
2044
+ # isChunk = sonMetaAll['chunk_id_2']==chunk
2045
+ # # next = sonMetaAll['chunk_id_2']==(chunk+1)
2046
+ # # isChunk = pd.concat([isChunk, next], ignore_index=True)
2047
+ # isChunk.iloc[chunk+1] = True
2048
+
2049
+ # sonMeta = sonMetaAll[isChunk].reset_index()
2050
+
1932
2051
  # Update class attributes based on current chunk
1933
2052
  self.pingMax = np.nanmax(sonMeta['ping_cnt']) # store to determine max range per chunk
1934
2053
  self.headIdx = sonMeta['index'] # store byte offset per ping
1935
2054
  self.pingCnt = sonMeta['ping_cnt'] # store ping count per ping
2055
+ self.pixM = sonMeta['pixM'] # store pixel size per ping
1936
2056
 
1937
2057
  if son:
1938
2058
  # Open image to rectify
@@ -1951,6 +2071,13 @@ class rectObj(sonObj):
1951
2071
  del self.shadowMask
1952
2072
 
1953
2073
  img = self.sonDat
2074
+
2075
+ # Drop image columns if needed
2076
+ if filtSon:
2077
+ img = np.delete(img, dropped_sonMeta_idx, axis=1)
2078
+ self.sonDat = img.copy()
2079
+
2080
+
1954
2081
  # if not cog:
1955
2082
  # # Zero out second ping
1956
2083
  # img[:,1] = 0
@@ -1983,19 +2110,26 @@ class rectObj(sonObj):
1983
2110
  ## Destination coordinates describe the geographic location in lat/lon
1984
2111
  ## or easting/northing that directly map to the pix coordinates.
1985
2112
 
1986
- # Open smoothed trackline/range extent file
1987
- trkMeta = pd.read_csv(trkMetaFile)
1988
- if cog:
1989
- trkMeta = trkMeta[trkMeta['chunk_id']==chunk].reset_index(drop=False) # Filter df by chunk_id
2113
+ # # Open smoothed trackline/range extent file
2114
+ # trkMeta = pd.read_csv(trkMetaFile)
2115
+ # if cog:
2116
+ # trkMeta = trkMeta[trkMeta['chunk_id']==chunk].reset_index(drop=False) # Filter df by chunk_id
2117
+ # else:
2118
+ # # trkMeta = trkMeta[trkMeta['chunk_id_2']==chunk].reset_index(drop=False)
2119
+ # # next = trkMeta[trkMeta['chunk_id_2']==chunk+1].reset_index(drop=False)
2120
+ # # trkMeta = pd.concat([trkMeta, next], ignore_index=True)
2121
+ # isChunk = trkMeta['chunk_id_2']==chunk
2122
+ # isChunk.iloc[chunk+1] = True
2123
+ # trkMeta = trkMeta[isChunk].reset_index(drop=False)
2124
+
2125
+ pixM = self.pixM
2126
+ # Find most common pixel size
2127
+ if len(pixM.unique()) > 1:
2128
+ pixM = pixM.mode()[0]
1990
2129
  else:
1991
- # trkMeta = trkMeta[trkMeta['chunk_id_2']==chunk].reset_index(drop=False)
1992
- # next = trkMeta[trkMeta['chunk_id_2']==chunk+1].reset_index(drop=False)
1993
- # trkMeta = pd.concat([trkMeta, next], ignore_index=True)
1994
- isChunk = trkMeta['chunk_id_2']==chunk
1995
- isChunk.iloc[chunk+1] = True
1996
- trkMeta = trkMeta[isChunk].reset_index(drop=False)
2130
+ pixM = pixM.iloc[0]
1997
2131
 
1998
- pix_m = self.pixM # Get pixel size
2132
+ pix_m = pixM # Get pixel size
1999
2133
 
2000
2134
  # Get range (outer extent) coordinates [xR, yR] to transposed numpy arrays
2001
2135
  xR, yR = trkMeta[xRange].to_numpy().T, trkMeta[yRange].to_numpy().T
@@ -2031,7 +2165,9 @@ class rectObj(sonObj):
2031
2165
  outShapeM = [xMax-xMin, yMax-yMin] # Calculate range of x,y coordinates
2032
2166
  outShape=[0,0]
2033
2167
  # Divide by pixel size to arrive at output shape of warped image
2034
- outShape[0], outShape[1] = round(outShapeM[0]/pix_m,0), round(outShapeM[1]/pix_m,0)
2168
+ # outShape[0], outShape[1] = round(outShapeM[0]/pix_m,0), round(outShapeM[1]/pix_m,0)
2169
+ outShape[0], outShape[1] = round(outShapeM[0]/pix_res,0), round(outShapeM[1]/pix_res,0)
2170
+ outShape = np.array(outShape).astype(int) # Convert to int
2035
2171
 
2036
2172
  # Rescale destination coordinates
2037
2173
  # X values
@@ -285,6 +285,23 @@ class sonObj(object):
285
285
  sonDF = self._filterTime(sonDF, time_table)
286
286
 
287
287
  return sonDF
288
+
289
+ # ======================================================================
290
+ def _filterShortTran(self, df):
291
+
292
+ '''
293
+ '''
294
+
295
+ # Make transects from consective pings using dataframe index
296
+ idx = df.index.values
297
+ transect_groups = np.split(idx, np.where(np.diff(idx) != 1)[0]+1)
298
+
299
+ for t in transect_groups:
300
+ if len(t) < self.nchunk:
301
+ # False means remove
302
+ df.loc[t, 'filter'] = False
303
+
304
+ return df
288
305
 
289
306
 
290
307
  # ======================================================================
@@ -515,7 +532,6 @@ class sonObj(object):
515
532
  idx = sonDF.index.values
516
533
  transect_groups = np.split(idx, np.where(np.diff(idx) != 1)[0]+1)
517
534
 
518
- # print(transect_groups)
519
535
 
520
536
  # Assign transect
521
537
  transect = 0
@@ -741,6 +757,14 @@ class sonObj(object):
741
757
  for i in range(len(self.headIdx)):
742
758
  if ~np.isnan(self.headIdx[i]):
743
759
  ping_len = min(self.pingCnt[i].astype(int), self.pingMax)
760
+
761
+
762
+ # #### Do not commit!!!!
763
+ # # if self.beamName == 'ss_star' or self.beamName == 'ss_port':
764
+ # # ping_len *= 2
765
+ if not self.son8bit:
766
+ ping_len *= 2
767
+
744
768
  headIDX = self.headIdx[i].astype(int)
745
769
  son_offset = self.son_offset[i].astype(int)
746
770
  # pingIdx = headIDX + self.headBytes # Determine byte offset to sonar returns
@@ -755,7 +779,13 @@ class sonObj(object):
755
779
  buffer = buffer[::-1]
756
780
 
757
781
  # Read the data
758
- dat = np.frombuffer(buffer, dtype='>u1')
782
+ if self.son8bit:# and self.beamName != 'ss_star' and self.beamName != 'ss_port':
783
+ dat = np.frombuffer(buffer, dtype='>u1')
784
+ else:
785
+ try:
786
+ dat = np.frombuffer(buffer, dtype='>u2')
787
+ except:
788
+ dat = np.frombuffer(buffer[:-1], dtype='>u2')
759
789
 
760
790
  try:
761
791
  sonDat[:ping_len, i] = dat
@@ -764,7 +794,7 @@ class sonObj(object):
764
794
  sonDat[:ping_len, i] = dat
765
795
 
766
796
  file.close()
767
- self.sonDat = sonDat
797
+ self.sonDat = sonDat.astype(np.uint8)
768
798
  return
769
799
 
770
800
  # ======================================================================
@@ -786,7 +816,7 @@ class sonObj(object):
786
816
 
787
817
  # Load depth (in real units) and convert to pixels
788
818
  # bedPick = round(sonMeta['dep_m'] / sonMeta['pix_m'], 0).astype(int)
789
- bedPick = round(sonMeta['dep_m'] / self.pixM, 0).astype(int)
819
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
790
820
  minDep = min(bedPick)
791
821
 
792
822
  del sonMeta, self.sonMetaDF
@@ -841,7 +871,7 @@ class sonObj(object):
841
871
  '''
842
872
  # Load depth (in real units) and convert to pixels
843
873
  # bedPick = round(sonMeta['dep_m'] / sonMeta['pix_m'], 0).astype(int)
844
- bedPick = round(sonMeta['dep_m'] / self.pixM, 0).astype(int)
874
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).reset_index(drop=True)
845
875
 
846
876
  # Initialize 2d array to store relocated sonar records
847
877
  srcDat = np.zeros((self.sonDat.shape[0], self.sonDat.shape[1])).astype(np.float32)#.astype(int)
@@ -890,7 +920,7 @@ class sonObj(object):
890
920
  sonMeta,
891
921
  crop=True):
892
922
  # Load depth (in real units) and convert to pixels
893
- bedPick = round(sonMeta['dep_m'] / self.pixM, 0).astype(int)
923
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
894
924
  minDep = min(bedPick)
895
925
 
896
926
  sonDat = self.sonDat
@@ -910,7 +940,7 @@ class sonObj(object):
910
940
  def _WCO(self,
911
941
  sonMeta):
912
942
  # Load depth (in real units) and convert to pixels
913
- bedPick = round(sonMeta['dep_m'] / self.pixM, 0).astype(int)
943
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
914
944
  maxDep = max(bedPick)
915
945
 
916
946
  sonDat = self.sonDat
@@ -1273,8 +1303,15 @@ class sonObj(object):
1273
1303
  d = sonMeta['trk_dist'].to_numpy()
1274
1304
  d = np.max(d) - np.min(d)
1275
1305
 
1306
+ pixM = sonMeta['pixM']
1307
+ # Find most common pixel size
1308
+ if len(pixM.unique()) > 1:
1309
+ pixM = pixM.mode()[0]
1310
+ else:
1311
+ pixM = pixM.iloc[0]
1312
+
1276
1313
  # Distance in pix
1277
- d = round(d / self.pixM, 0).astype(int)
1314
+ d = round(d / pixM, 0).astype(int)
1278
1315
 
1279
1316
  sonDat = resize(sonDat,
1280
1317
  (sonDat.shape[0], d),
@@ -1988,7 +2025,7 @@ class sonObj(object):
1988
2025
  egn_means = self.egn_bed_means.copy() # Don't want to overwrite
1989
2026
 
1990
2027
  # Get bedpicks, in pixel units
1991
- bedPick = round(sonMeta['dep_m'] / self.pixM, 0).astype(int)
2028
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).to_numpy()
1992
2029
 
1993
2030
  # Iterate each ping
1994
2031
  for j in range(sonDat.shape[1]):
@@ -2057,7 +2094,7 @@ class sonObj(object):
2057
2094
  del t, l
2058
2095
 
2059
2096
  # Get bedpicks, in pixel units
2060
- bedPick = round(sonMeta['dep_m'] / self.pixM, 0).astype(int)
2097
+ bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).to_numpy()
2061
2098
 
2062
2099
  # Iterate each ping
2063
2100
  for j in range(sonDat.shape[1]):
@@ -36,6 +36,10 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
36
36
  PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
37
37
  sys.path.append(PACKAGE_DIR)
38
38
 
39
+ # # For Debug
40
+ # from funcs_common import *
41
+ # from class_rectObj import rectObj
42
+
39
43
  from pingmapper.funcs_common import *
40
44
  from pingmapper.class_rectObj import rectObj
41
45
 
@@ -171,6 +175,7 @@ def smoothTrackline(projDir='', x_offset='', y_offset='', nchunk ='', cog=True,
171
175
 
172
176
  sDF['chunk_id'] = sonDF['chunk_id']
173
177
  sDF['transect'] = sonDF['transect']
178
+ # sDF['pixM'] = sonDF['pixM'] # Add pixel size to smoothed trackline coordinates
174
179
 
175
180
  sDF.reset_index(inplace=True)
176
181
 
@@ -250,14 +255,17 @@ def smoothTrackline(projDir='', x_offset='', y_offset='', nchunk ='', cog=True,
250
255
  sDF.at[curRow, "utm_ns"] = lastRow["utm_ns"]
251
256
  sDF.at[curRow, "cog"] = lastRow["cog"]
252
257
  sDF.at[curRow, "instr_heading"] = lastRow["instr_heading"]
258
+ # sDF.at[curRow, 'pixM'] = lastRow['pixM']
259
+
260
+ del lastRow
253
261
  else:
254
262
  t += 1
255
263
 
256
264
  i+=1
257
- del lastRow, curRow, i
265
+ del curRow, i
258
266
 
259
267
  son0.smthTrk = sDF # Store smoothed trackline coordinates in rectObj.
260
-
268
+
261
269
  # Do positional correction
262
270
  if x_offset != 0.0 or y_offset != 0.0:
263
271
  son0._applyPosOffset(x_offset, y_offset)
@@ -274,6 +282,7 @@ def smoothTrackline(projDir='', x_offset='', y_offset='', nchunk ='', cog=True,
274
282
  df = son1.sonMetaDF
275
283
  sDF['chunk_id'] = df['chunk_id'] # Update chunk_id for smoothed coordinates
276
284
  sDF['record_num'] = df['record_num'] # Update record_num for smoothed coordinates
285
+ # sDF['pixM'] = df['pixM']
277
286
  son1.smthTrk = sDF # Store smoothed trackline coordinates in rectObj
278
287
 
279
288
  del sDF, df, son0, son1
pingmapper/gui_main.py CHANGED
@@ -80,7 +80,8 @@ def gui(batch: bool):
80
80
  text_input = sg.Text('Recording to Process')
81
81
  # in_input = sg.In(key='inFile', size=(80,1))
82
82
  in_input = sg.In(key='inFile', size=(80,1), default_text=default_params['inFile'])
83
- browse_input = sg.FileBrowse(file_types=(("Sonar File", "*.DAT *.sl2 *.sl3 *.svlog") ), initial_folder=os.path.dirname(default_params['inFile']))
83
+ browse_input = sg.FileBrowse(file_types=(("Sonar File", "*.DAT *.sl2 *.sl3 *.RSD *.svlog") ), initial_folder=os.path.dirname(default_params['inFile']))
84
+ # browse_input = sg.FileBrowse(file_types=(("Sonar File", "*.DAT *.sl2 *.sl3 *.svlog") ), initial_folder=os.path.dirname(default_params['inFile']))
84
85
 
85
86
  # Add to layout
86
87
  layout.append([text_io])
@@ -591,9 +592,10 @@ def gui(batch: bool):
591
592
  # Find all DAT and SON files in all subdirectories of inDir
592
593
  inFiles=[]
593
594
  for root, dirs, files in os.walk(inDir):
594
- for file in files:
595
- if file.endswith('.DAT') or file.endswith('.sl2') or file.endswith('.sl3'):
596
- inFiles.append(os.path.join(root, file))
595
+ if '__MACOSX' not in root:
596
+ for file in files:
597
+ if file.endswith('.DAT') or file.endswith('.sl2') or file.endswith('.sl3') or file.endswith('.RSD') or file.endswith('.svlog'):
598
+ inFiles.append(os.path.join(root, file))
597
599
 
598
600
  inFiles = sorted(inFiles)
599
601
 
@@ -35,8 +35,13 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
35
35
  PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
36
36
  sys.path.append(PACKAGE_DIR)
37
37
 
38
- from pingmapper.funcs_common import *
38
+ # # For debug
39
+ # from funcs_common import *
40
+ # from class_mapSubstrateObj import mapSubObj
41
+ # from class_portstarObj import portstarObj
42
+ # from funcs_model import *
39
43
 
44
+ from pingmapper.funcs_common import *
40
45
  from pingmapper.class_mapSubstrateObj import mapSubObj
41
46
  from pingmapper.class_portstarObj import portstarObj
42
47
  from pingmapper.funcs_model import *
@@ -275,7 +280,7 @@ def map_master_func(logfilename='',
275
280
  # Do prediction (make parallel later)
276
281
  print('\n\tPredicting substrate for', len(chunks), son.beamName, 'chunks')
277
282
 
278
- Parallel(n_jobs=np.min([len(chunks), threadCnt]), verbose=10)(delayed(son._detectSubstrate)(i, USE_GPU) for i in chunks)
283
+ Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(son._detectSubstrate)(i, USE_GPU) for i in tqdm(chunks))
279
284
 
280
285
  son._cleanup()
281
286
  son._pickleSon()
@@ -324,7 +329,7 @@ def map_master_func(logfilename='',
324
329
 
325
330
  # Plot substrate classification()
326
331
  # sys.exit()
327
- Parallel(n_jobs=np.min([len(toMap), threadCnt]), verbose=10)(delayed(son._pltSubClass)(map_class_method, c, f, spdCor=spdCor, maxCrop=maxCrop, probs=probs) for c, f in toMap.items())
332
+ Parallel(n_jobs=np.min([len(toMap), threadCnt]))(delayed(son._pltSubClass)(map_class_method, c, f, spdCor=spdCor, maxCrop=maxCrop, probs=probs) for c, f in tqdm((toMap.items())))
328
333
  son._pickleSon()
329
334
  del toMap
330
335
 
@@ -384,7 +389,7 @@ def map_master_func(logfilename='',
384
389
  # Create portstarObj
385
390
  psObj = portstarObj(mapObjs)
386
391
 
387
- Parallel(n_jobs=np.min([len(toMap), threadCnt]), verbose=10)(delayed(psObj._mapSubstrate)(map_class_method, c, f) for c, f in toMap.items())
392
+ Parallel(n_jobs=np.min([len(toMap), threadCnt]))(delayed(psObj._mapSubstrate)(map_class_method, c, f) for c, f in tqdm(toMap.items()))
388
393
 
389
394
  del toMap
390
395
  print("\nDone!")
@@ -524,7 +529,7 @@ def map_master_func(logfilename='',
524
529
  # Create portstarObj
525
530
  psObj = portstarObj(mapObjs)
526
531
 
527
- Parallel(n_jobs=np.min([len(toMap), threadCnt]), verbose=10)(delayed(psObj._mapPredictions)(map_predict, 'map_'+a, c, f) for c, f in toMap.items())
532
+ Parallel(n_jobs=np.min([len(toMap), threadCnt]))(delayed(psObj._mapPredictions)(map_predict, 'map_'+a, c, f) for c, f in tqdm(toMap.items()))
528
533
 
529
534
  del toMap, psObj
530
535
  print("\nDone!")
@@ -50,9 +50,9 @@ from doodleverse_utils.imports import *
50
50
 
51
51
  from scipy.signal import savgol_filter
52
52
 
53
- # sys.path.insert(0, r'C:\Users\cbodine\PythonRepos\PINGVerter')
53
+ sys.path.insert(0, r'Z:\UDEL\PythonRepos\PINGVerter')
54
54
 
55
- from pingverter import hum2pingmapper, low2pingmapper, cerul2pingmapper
55
+ from pingverter import hum2pingmapper, low2pingmapper, cerul2pingmapper, gar2pingmapper
56
56
 
57
57
  import cv2
58
58
 
@@ -314,6 +314,7 @@ def read_master_func(logfilename='',
314
314
  # Use PINGVerter to read the sonar file
315
315
  #######################################
316
316
 
317
+ instDepAvail = True
317
318
  start_time = time.time()
318
319
  # Determine sonar recording type
319
320
  _, file_type = os.path.splitext(inFile)
@@ -326,15 +327,27 @@ def read_master_func(logfilename='',
326
327
  elif file_type == '.sl2' or file_type == '.sl3':
327
328
  sonar_obj = low2pingmapper(inFile, projDir, nchunk, tempC, exportUnknown)
328
329
 
330
+ # Prepare Garmin file for PINGMapper
331
+ elif file_type == '.RSD':
332
+ sonar_obj = gar2pingmapper(inFile, projDir, nchunk, tempC, exportUnknown)
333
+
329
334
  # Prepare Cerulean file for PINGMapper
330
335
  elif file_type == '.svlog':
331
336
  sonar_obj = cerul2pingmapper(inFile, projDir, nchunk, tempC, exportUnknown)
332
337
  detectDep = 1 # No depth in cerulean files, so set to Zheng et al. 2021
338
+ instDepAvail = False
339
+
340
+ # Unknown
341
+ else:
342
+ print('\n\nERROR!\n\nFile type {} not supported at this time.'.format(file_type))
343
+ sys.exit()
333
344
 
334
345
  ####################
335
346
  # Create son objects
336
347
  ####################
337
348
 
349
+ # print(sonar_obj)
350
+
338
351
  # Get available beams and metadata
339
352
  beamMeta = sonar_obj.beamMeta
340
353
 
@@ -356,10 +369,17 @@ def read_master_func(logfilename='',
356
369
  son.beamName = meta['beamName']
357
370
  son.beam = beam
358
371
  son.headBytes = sonar_obj.headBytes
359
- son.pixM = sonar_obj.pixM
372
+ # son.pixM = sonar_obj.pixM
360
373
  son.isOnix = sonar_obj.isOnix
361
374
  son.trans = sonar_obj.trans
362
375
  son.humDat = sonar_obj.humDat
376
+ # if son.beamName == 'ss_port' or son.beamName == 'ss_star':
377
+ # son.son8bit = sonar_obj.son8bit
378
+ # else:
379
+ son.son8bit = sonar_obj.son8bit
380
+
381
+ # print(son.beamName, son.son8bit)
382
+
363
383
 
364
384
  if pix_res_son == 0:
365
385
  son.pix_res_son = 0
@@ -408,13 +428,24 @@ def read_master_func(logfilename='',
408
428
  son.cropRange = cropRange
409
429
  # Do range crop, if necessary
410
430
  if cropRange > 0.0:
411
- # Convert to distance in pix
412
- d = round(cropRange / son.pixM, 0).astype(int)
431
+ # # Convert to distance in pix
432
+ # d = round(cropRange / son.pixM, 0).astype(int)
433
+
434
+ # # Get sonMetaDF
435
+ # son._loadSonMeta()
436
+ # son.sonMetaDF.loc[son.sonMetaDF['ping_cnt'] > d, 'ping_cnt'] = d
437
+ # son._saveSonMetaCSV(son.sonMetaDF)
413
438
 
414
439
  # Get sonMetaDF
415
440
  son._loadSonMeta()
416
- son.sonMetaDF.loc[son.sonMetaDF['ping_cnt'] > d, 'ping_cnt'] = d
417
- son._saveSonMetaCSV(son.sonMetaDF)
441
+ df = son.sonMetaDF
442
+
443
+ # Convert to distance in pixels
444
+ d = round(cropRange / df['pixM'], 0).astype(int)
445
+
446
+ # Filter df
447
+ df.loc[df['ping_cnt'] > d, 'ping_cnt'] = d
448
+ son._saveSonMetaCSV(df)
418
449
 
419
450
  # Store flag to export un-rectified sonar tiles in each sonObj.
420
451
  for son in sonObjs:
@@ -632,7 +663,7 @@ def read_master_func(logfilename='',
632
663
  del c, r, n, startB, rowCnt
633
664
 
634
665
  # Fix no data in parallel
635
- r = Parallel(n_jobs=threadCnt)(delayed(son._fixNoDat)(dfAll[r[0]:r[1]].copy().reset_index(drop=True), beams) for r in tqdm(range(len(rowsToProc))))
666
+ r = Parallel(n_jobs=threadCnt)(delayed(son._fixNoDat)(dfAll[r[0]:r[1]].copy().reset_index(drop=True), beams) for r in tqdm(rowsToProc))
636
667
  gc.collect()
637
668
 
638
669
  # Concatenate results from parallel processing
@@ -820,6 +851,14 @@ def read_master_func(logfilename='',
820
851
  df0 = df0[df0['filter'] == True]
821
852
  df1 = df1[df1['filter'] == True]
822
853
 
854
+ # Remove transect shorter then nchunk
855
+ df0=son0._filterShortTran(df0)
856
+ df1['filter'] = df0['filter']
857
+
858
+ # Apply the filter
859
+ df0 = df0[df0['filter'] == True]
860
+ df1 = df1[df1['filter'] == True]
861
+
823
862
  # Reasign the chunks
824
863
  df0 = son0._reassignChunks(df0)
825
864
  df1['chunk_id'] = df0['chunk_id']
@@ -916,7 +955,7 @@ def read_master_func(logfilename='',
916
955
  print('\n\tUsing binary thresholding...')
917
956
 
918
957
  # Parallel estimate depth for each chunk using appropriate method
919
- r = Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._detectDepth)(detectDep, int(chunk), USE_GPU, tileFile) for chunk in tqdm(range(len(chunks))))
958
+ r = Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._detectDepth)(detectDep, int(chunk), USE_GPU, tileFile) for chunk in tqdm(chunks))
920
959
 
921
960
  # store the depth predictions in the class
922
961
  for ret in r:
@@ -941,7 +980,7 @@ def read_master_func(logfilename='',
941
980
 
942
981
  if saveDepth:
943
982
  # Save detected depth to csv
944
- depDF = psObj._saveDepth(chunks, detectDep, smthDep, adjDep)
983
+ depDF = psObj._saveDepth(chunks, detectDep, smthDep, adjDep, instDepAvail)
945
984
 
946
985
  # Store depths in downlooking sonar files also
947
986
  for son in sonObjs:
@@ -961,6 +1000,12 @@ def read_master_func(logfilename='',
961
1000
  dep = sonDF['inst_dep_m']
962
1001
  if smthDep:
963
1002
  dep = savgol_filter(dep, 51, 3)
1003
+
1004
+ # Interpolate over nan's (and set zero's to nan)
1005
+ dep = dep.to_numpy()
1006
+ dep[dep==0] = np.nan
1007
+ nans, x = np.isnan(dep), lambda z: z.nonzero()[0]
1008
+ dep[nans] = np.interp(x(nans), x(~nans), dep[~nans])
964
1009
 
965
1010
  sonDF['dep_m'] = dep + adjDep
966
1011
 
@@ -982,7 +1027,7 @@ def read_master_func(logfilename='',
982
1027
  start_time = time.time()
983
1028
 
984
1029
  print("\n\nExporting bedpick plots to {}...".format(tileFile))
985
- Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._plotBedPick)(int(chunk), True, autoBed, tileFile) for chunk in tqdm(range(len(chunks))))
1030
+ Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._plotBedPick)(int(chunk), True, autoBed, tileFile) for chunk in tqdm(chunks))
986
1031
 
987
1032
  print("\nDone!")
988
1033
  print("Time (s):", round(time.time() - start_time, ndigits=1))
@@ -1068,7 +1113,7 @@ def read_master_func(logfilename='',
1068
1113
  psObj.port.shadow = defaultdict()
1069
1114
  psObj.star.shadow = defaultdict()
1070
1115
 
1071
- r = Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._detectShadow)(remShadow, int(chunk), USE_GPU, False, tileFile) for chunk in tqdm(range(len(chunks))))
1116
+ r = Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._detectShadow)(remShadow, int(chunk), USE_GPU, False, tileFile) for chunk in tqdm(chunks))
1072
1117
 
1073
1118
  for ret in r:
1074
1119
  psObj.port.shadow[ret[0]] = ret[1]
@@ -1123,7 +1168,7 @@ def read_master_func(logfilename='',
1123
1168
 
1124
1169
  # Calculate range-wise mean intensity for each chunk
1125
1170
  print('\n\tCalculating range-wise mean intensity for each chunk...')
1126
- chunk_means = Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._egnCalcChunkMeans)(i) for i in tqdm(range(len(chunks))))
1171
+ chunk_means = Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._egnCalcChunkMeans)(i) for i in tqdm(chunks))
1127
1172
 
1128
1173
  # Calculate global means
1129
1174
  print('\n\tCalculating range-wise global means...')
@@ -1132,7 +1177,7 @@ def read_master_func(logfilename='',
1132
1177
 
1133
1178
  # Calculate egn min and max for each chunk
1134
1179
  print('\n\tCalculating EGN min and max values for each chunk...')
1135
- min_max = Parallel(n_jobs= np.min([len(chunks)]))(delayed(son._egnCalcMinMax)(i) for i in tqdm(range(len(chunks))))
1180
+ min_max = Parallel(n_jobs= np.min([len(chunks)]))(delayed(son._egnCalcMinMax)(i) for i in tqdm(chunks))
1136
1181
 
1137
1182
  # Calculate global min max for each channel
1138
1183
  son._egnCalcGlobalMinMax(min_max)
@@ -1184,7 +1229,7 @@ def read_master_func(logfilename='',
1184
1229
  chunks = chunks[:-1] # remove last chunk
1185
1230
 
1186
1231
  print('\n\tCalculating EGN corrected histogram for', son.beamName)
1187
- hist = Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._egnCalcHist)(i) for i in tqdm(range(len(chunks))))
1232
+ hist = Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._egnCalcHist)(i) for i in tqdm(chunks))
1188
1233
 
1189
1234
  print('\n\tCalculating global EGN corrected histogram')
1190
1235
  son._egnCalcGlobalHist(hist)
@@ -1311,8 +1356,10 @@ def read_master_func(logfilename='',
1311
1356
  # Load sonMetaDF
1312
1357
  son._loadSonMeta()
1313
1358
 
1314
- # Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._exportTiles)(i, tileFile) for i in tqdm(range(len(chunks))))
1315
- Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._exportTilesSpd)(i, tileFile=imgType, spdCor=spdCor, mask_shdw=mask_shdw, maxCrop=maxCrop) for i in tqdm(range(len(chunks))))
1359
+ Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._exportTilesSpd)(i, tileFile=imgType, spdCor=spdCor, mask_shdw=mask_shdw, maxCrop=maxCrop) for i in tqdm(chunks))
1360
+ # for i in tqdm(chunks):
1361
+ # son._exportTilesSpd(i, tileFile=imgType, spdCor=spdCor, mask_shdw=mask_shdw, maxCrop=maxCrop)
1362
+ # sys.exit()
1316
1363
 
1317
1364
  if moving_window and not spdCor:
1318
1365
 
@@ -44,10 +44,10 @@ sys.path.append(PACKAGE_DIR)
44
44
  # from class_portstarObj import portstarObj
45
45
  # from funcs_rectify import smoothTrackline
46
46
 
47
- # from pingmapper.funcs_common import *
48
- # from pingmapper.class_rectObj import rectObj
49
- # from pingmapper.class_portstarObj import portstarObj
50
- # from pingmapper.funcs_rectify import smoothTrackline
47
+ from pingmapper.funcs_common import *
48
+ from pingmapper.class_rectObj import rectObj
49
+ from pingmapper.class_portstarObj import portstarObj
50
+ from pingmapper.funcs_rectify import smoothTrackline
51
51
 
52
52
  import inspect
53
53
 
@@ -310,7 +310,7 @@ def rectify_master_func(logfilename='',
310
310
  # COG Pre-processing #
311
311
  # ##########################################################################
312
312
 
313
- for son in portstar:
313
+ # for son in portstar:
314
314
  son.rect_wcp = rect_wcp
315
315
  son.rect_wcr = rect_wcr
316
316
 
@@ -364,7 +364,7 @@ def rectify_master_func(logfilename='',
364
364
  print('\n\tExporting', len(chunks), 'GeoTiffs for', son.beamName)
365
365
 
366
366
  # Parallel(n_jobs= np.min([len(sDF), threadCnt]))(delayed(son._rectSonHeadingMain)(sonarCoordsDF[sonarCoordsDF['chunk_id']==chunk], chunk) for chunk in tqdm(range(len(chunks))))
367
- Parallel(n_jobs= np.min([len(sDF), threadCnt]))(delayed(son._rectSonHeadingMain)(sDF[sDF['chunk_id']==chunk], chunk, heading=heading, interp_dist=rectInterpDist) for chunk in tqdm(range(len(chunks))))
367
+ Parallel(n_jobs= np.min([len(sDF), threadCnt]))(delayed(son._rectSonHeadingMain)(sDF[sDF['chunk_id']==chunk], chunk, heading=heading, interp_dist=rectInterpDist) for chunk in tqdm(chunks))
368
368
  # for i in chunks:
369
369
  # # son._rectSonHeading(sonarCoordsDF[sonarCoordsDF['chunk_id']==i], i)
370
370
  # r = son._rectSonHeadingMain(sDF[sDF['chunk_id']==i], i, heading=heading, interp_dist=rectInterpDist)
@@ -402,6 +402,8 @@ def rectify_master_func(logfilename='',
402
402
  son.rect_wcr = rect_wcr
403
403
 
404
404
  if (rect_wcp and rubberSheeting) or (rect_wcr and rubberSheeting):
405
+ # Always use COG for rubber sheeting
406
+ cog = True
405
407
  for son in portstar:
406
408
  # Set output directory
407
409
  son.outDir = os.path.join(son.projDir, son.beamName)
@@ -418,8 +420,8 @@ def rectify_master_func(logfilename='',
418
420
  print('\n\tExporting', len(chunks), 'GeoTiffs for', son.beamName)
419
421
  # for i in chunks:
420
422
  # son._rectSonRubber(i, filter, cog, wgs=False)
421
- # sys.exit()
422
- Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._rectSonRubber)(i, filter, cog, wgs=False) for i in tqdm(range(len(chunks))))
423
+ # sys.exit()
424
+ Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._rectSonRubber)(i, filter, cog, wgs=False) for i in tqdm(chunks))
423
425
  son._cleanup()
424
426
  gc.collect()
425
427
  printUsage()
pingmapper/test_time.py CHANGED
@@ -17,9 +17,5 @@ converted_date = start_date + timedelta(seconds=custom_unix_time)
17
17
  eastern = pytz.timezone("US/Eastern")
18
18
  converted_date_eastern = converted_date.astimezone(eastern)
19
19
 
20
- print(start_date)
21
- print(unix_timestamp)
22
20
  print("Custom Unix Start Time:", unix_timestamp)
23
- print(converted_date)
24
- print("Converted Date and Time:", converted_date_eastern)
25
- print(timedelta(seconds=custom_unix_time))
21
+ print("Converted Date and Time:", converted_date_eastern)
pingmapper/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = '4.2.12'
1
+ __version__ = '5.0.0'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pingmapper
3
- Version: 4.2.12
3
+ Version: 5.0.0
4
4
  Summary: Open-source interface for processing recreation-grade side scan sonar datasets and reproducibly mapping benthic habitat
5
5
  Author: Cameron Bodine
6
6
  Author-email: bodine.cs@gmail.email
@@ -0,0 +1,22 @@
1
+ pingmapper/__init__.py,sha256=8zLGg-DfQhnDl2Ky0n-zXpN-8e-g7iR0AcaI4l4Vvpk,32
2
+ pingmapper/__main__.py,sha256=6qBbTK3eg_Er2SlUgBUaR0CBrJCKB9P2dpJaq2y5y3g,1421
3
+ pingmapper/class_mapSubstrateObj.py,sha256=txB9YqXfVgDFi628jo2F1RtEw1lCNR8wYpkhy94bn0U,37036
4
+ pingmapper/class_portstarObj.py,sha256=yeYn-0M3-Zna0F-goPQMca-keNivCiXg9YHQovsluoE,106719
5
+ pingmapper/class_rectObj.py,sha256=aJbM-q3UNBhvWutOJwnweGkWTrx6avslX2DCWdofW4Q,96182
6
+ pingmapper/class_sonObj.py,sha256=pohaVOZEGazWXbRETbGPEkNXIH4GaWN3nNKWBt0BLFc,75606
7
+ pingmapper/funcs_common.py,sha256=pCOIy83srtYKtKUnF4oXkd83_kimZI0NiSrO0TE0O4g,13414
8
+ pingmapper/funcs_model.py,sha256=dO9J4-0s1COggHkSUqHFC1qKTs20A6PSvkDqXWMUT6A,7916
9
+ pingmapper/funcs_rectify.py,sha256=bAFWbNr4dyOuQzF1j4Je-K4zxfJZUjISc0nYrOot8Ng,12418
10
+ pingmapper/gui_main.py,sha256=5JsOYqRu146T1xhRgYC9ag3vnpRWqa2PDuJky595YA0,34780
11
+ pingmapper/main_mapSubstrate.py,sha256=E7jYmKHATXSk5XWhPR-pWH0288wurhX5ph94Gp_v0eg,21217
12
+ pingmapper/main_readFiles.py,sha256=JHSBzemgwqLFXOZEqPbn87yUbsyqHaj6aD7kY9vtuN0,55327
13
+ pingmapper/main_rectify.py,sha256=818lQDTrtLQ-pMK6lbLBRMJ9tA0KdS9creRArnqj_Jg,19913
14
+ pingmapper/test_PINGMapper.py,sha256=-SYMsdK-tTodXp5dCFSWPn-KRN7-OjX6OwjQ2-8hQg0,14176
15
+ pingmapper/test_time.py,sha256=uHT0mtLDP1J6w-snoELyk4UzZ2LrDeF6jGgJJVloalg,750
16
+ pingmapper/version.py,sha256=N8pD9_ASuFWBcLPnEfCtCRdScc4QgDFxHmk8kQfA2ww,21
17
+ pingmapper-5.0.0.data/data/pingmapper_config/default_params.json,sha256=YA9Rx1PSdUy4cTq-vtKORo3nNLisCYNOeUBxClldmHs,1285
18
+ pingmapper-5.0.0.dist-info/licenses/LICENSE,sha256=lowDp_th1CGR0Z224a-jYRi-oNFe_0fdldL3USXhX-k,1095
19
+ pingmapper-5.0.0.dist-info/METADATA,sha256=80jMvKHmzDcCSlS0iRFI7-hxCJN2Ki8hvSHHw6-zy88,9173
20
+ pingmapper-5.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
21
+ pingmapper-5.0.0.dist-info/top_level.txt,sha256=RlV4sDoE3uIIDzNMOjN2t012Ia_jsblNVojJvg4q84w,11
22
+ pingmapper-5.0.0.dist-info/RECORD,,
@@ -1,22 +0,0 @@
1
- pingmapper/__init__.py,sha256=8zLGg-DfQhnDl2Ky0n-zXpN-8e-g7iR0AcaI4l4Vvpk,32
2
- pingmapper/__main__.py,sha256=6qBbTK3eg_Er2SlUgBUaR0CBrJCKB9P2dpJaq2y5y3g,1421
3
- pingmapper/class_mapSubstrateObj.py,sha256=FXw61jZ2Fnpk74vZl4PT0ijlN9gz1S-kIf8RyyrrI1Y,36785
4
- pingmapper/class_portstarObj.py,sha256=1jFyKUXVL_0rzCA0sQREiCS9qp0ZQqPshOIk0XHcknM,105527
5
- pingmapper/class_rectObj.py,sha256=GV14tTFHc_2XCLTZ-niVDMU3ohH9puHVqWgtjndMW4M,90897
6
- pingmapper/class_sonObj.py,sha256=jxo_QBtYJmM9oJlxPfcKutc_8HkEj1hBdP5F6-61Mz8,74242
7
- pingmapper/funcs_common.py,sha256=pCOIy83srtYKtKUnF4oXkd83_kimZI0NiSrO0TE0O4g,13414
8
- pingmapper/funcs_model.py,sha256=dO9J4-0s1COggHkSUqHFC1qKTs20A6PSvkDqXWMUT6A,7916
9
- pingmapper/funcs_rectify.py,sha256=Goh5Yon_qP93dLjq8Vv_qezPxw1H0Yb7Rw4MCTt9Z8U,12114
10
- pingmapper/gui_main.py,sha256=w9E9pHjXLTzeu8rt1iohcMxGxR1ot8O_G6prHRW13bc,34509
11
- pingmapper/main_mapSubstrate.py,sha256=obzB_uM0N8z7C0DJFsk1HGaFWd7g_U5Ejghdxey86u0,21073
12
- pingmapper/main_readFiles.py,sha256=vUKP44xnuza0sRcsyfkB2jOtYwR2tlkgYMnSdThgHuI,53861
13
- pingmapper/main_rectify.py,sha256=npSnzyNtGgVaeHN7IAzuufjzkttFA46buEm_g-I-QRY,19877
14
- pingmapper/test_PINGMapper.py,sha256=-SYMsdK-tTodXp5dCFSWPn-KRN7-OjX6OwjQ2-8hQg0,14176
15
- pingmapper/test_time.py,sha256=ZdlaA9ODWgFYi63Jjz0byP6aLV7E9QTQYf0BGfa7SaY,859
16
- pingmapper/version.py,sha256=-FYI3q0s14GXAZBzWvNE0NIBQuGXxXwo7zf0FXvGBuU,22
17
- pingmapper-4.2.12.data/data/pingmapper_config/default_params.json,sha256=YA9Rx1PSdUy4cTq-vtKORo3nNLisCYNOeUBxClldmHs,1285
18
- pingmapper-4.2.12.dist-info/licenses/LICENSE,sha256=lowDp_th1CGR0Z224a-jYRi-oNFe_0fdldL3USXhX-k,1095
19
- pingmapper-4.2.12.dist-info/METADATA,sha256=FlnDfiiB1K0WNOIbxdckLmSCv6rsHV5BmtJcma9jCR8,9174
20
- pingmapper-4.2.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
21
- pingmapper-4.2.12.dist-info/top_level.txt,sha256=RlV4sDoE3uIIDzNMOjN2t012Ia_jsblNVojJvg4q84w,11
22
- pingmapper-4.2.12.dist-info/RECORD,,