pingmapper 4.2.13__tar.gz → 5.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pingmapper-4.2.13 → pingmapper-5.0.1}/PKG-INFO +1 -1
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/class_mapSubstrateObj.py +18 -10
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/class_portstarObj.py +54 -20
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/class_rectObj.py +167 -36
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/class_sonObj.py +31 -8
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/funcs_rectify.py +11 -2
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/gui_main.py +6 -5
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/main_mapSubstrate.py +10 -5
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/main_readFiles.py +48 -20
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/main_rectify.py +6 -4
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/test_time.py +1 -5
- pingmapper-5.0.1/pingmapper/version.py +1 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper.egg-info/PKG-INFO +1 -1
- pingmapper-4.2.13/pingmapper/version.py +0 -1
- {pingmapper-4.2.13 → pingmapper-5.0.1}/LICENSE +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/README.md +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/__init__.py +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/__main__.py +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/default_params.json +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/funcs_common.py +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/funcs_model.py +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper/test_PINGMapper.py +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper.egg-info/SOURCES.txt +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper.egg-info/dependency_links.txt +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper.egg-info/requires.txt +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/pingmapper.egg-info/top_level.txt +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/setup.cfg +0 -0
- {pingmapper-4.2.13 → pingmapper-5.0.1}/setup.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pingmapper
|
|
3
|
-
Version:
|
|
3
|
+
Version: 5.0.1
|
|
4
4
|
Summary: Open-source interface for processing recreation-grade side scan sonar datasets and reproducibly mapping benthic habitat
|
|
5
5
|
Author: Cameron Bodine
|
|
6
6
|
Author-email: bodine.cs@gmail.email
|
|
@@ -36,9 +36,15 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
36
36
|
PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
|
|
37
37
|
sys.path.append(PACKAGE_DIR)
|
|
38
38
|
|
|
39
|
+
# # For Debug
|
|
40
|
+
# from funcs_common import *
|
|
41
|
+
# from funcs_model import *
|
|
42
|
+
# from class_rectObj import rectObj
|
|
43
|
+
|
|
39
44
|
from pingmapper.funcs_common import *
|
|
40
45
|
from pingmapper.funcs_model import *
|
|
41
46
|
from pingmapper.class_rectObj import rectObj
|
|
47
|
+
|
|
42
48
|
from mpl_toolkits.axes_grid1 import make_axes_locatable
|
|
43
49
|
|
|
44
50
|
import matplotlib
|
|
@@ -313,7 +319,7 @@ class mapSubObj(rectObj):
|
|
|
313
319
|
lOffL = self.sonDat.shape[1]
|
|
314
320
|
|
|
315
321
|
# Get sonMetaDF
|
|
316
|
-
lMetaDF = df.loc[df['chunk_id'] == l, ['dep_m']].copy().reset_index()
|
|
322
|
+
lMetaDF = df.loc[df['chunk_id'] == l, ['dep_m', 'pixM']].copy().reset_index()
|
|
317
323
|
|
|
318
324
|
# Remove shadows
|
|
319
325
|
if self.remShadow:
|
|
@@ -357,7 +363,7 @@ class mapSubObj(rectObj):
|
|
|
357
363
|
lOffR = lOffL + self.sonDat.shape[1]
|
|
358
364
|
|
|
359
365
|
# Get sonMetaDF
|
|
360
|
-
cMetaDF = df.loc[df['chunk_id'] == c, ['dep_m']].copy().reset_index()
|
|
366
|
+
cMetaDF = df.loc[df['chunk_id'] == c, ['dep_m', 'pixM']].copy().reset_index()
|
|
361
367
|
|
|
362
368
|
# Remove shadows
|
|
363
369
|
if self.remShadow:
|
|
@@ -391,7 +397,7 @@ class mapSubObj(rectObj):
|
|
|
391
397
|
self._getScanChunkSingle(r)
|
|
392
398
|
|
|
393
399
|
# Get sonMetaDF
|
|
394
|
-
rMetaDF = df.loc[df['chunk_id'] == r, ['dep_m']].copy().reset_index()
|
|
400
|
+
rMetaDF = df.loc[df['chunk_id'] == r, ['dep_m', 'pixM']].copy().reset_index()
|
|
395
401
|
|
|
396
402
|
# Remove shadows
|
|
397
403
|
if self.remShadow:
|
|
@@ -688,7 +694,7 @@ class mapSubObj(rectObj):
|
|
|
688
694
|
df = self.sonMetaDF
|
|
689
695
|
|
|
690
696
|
# Get sonMetaDF
|
|
691
|
-
df = df.loc[df['chunk_id'] == chunk, ['dep_m']].copy().reset_index()
|
|
697
|
+
df = df.loc[df['chunk_id'] == chunk, ['dep_m', 'pixM']].copy().reset_index()
|
|
692
698
|
|
|
693
699
|
# Load sonDat
|
|
694
700
|
self._getScanChunkSingle(chunk)
|
|
@@ -718,7 +724,7 @@ class mapSubObj(rectObj):
|
|
|
718
724
|
# Plot Classification
|
|
719
725
|
|
|
720
726
|
# Get final classification
|
|
721
|
-
label = self._classifySoftmax(chunk, softmax, map_class_method, mask_wc=True, mask_shw=True)
|
|
727
|
+
label = self._classifySoftmax(chunk, softmax, map_class_method, df=df, mask_wc=True, mask_shw=True)
|
|
722
728
|
|
|
723
729
|
# Do speed correction
|
|
724
730
|
if spdCor>0:
|
|
@@ -904,7 +910,7 @@ class mapSubObj(rectObj):
|
|
|
904
910
|
############################################################################
|
|
905
911
|
|
|
906
912
|
#=======================================================================
|
|
907
|
-
def _classifySoftmax(self, i, arr, map_class_method='max', mask_wc=True, mask_shw=True, do_filt=True):
|
|
913
|
+
def _classifySoftmax(self, i, arr, map_class_method='max', df=None, mask_wc=True, mask_shw=True, do_filt=True):
|
|
908
914
|
'''
|
|
909
915
|
Classify pixels from softmax values.
|
|
910
916
|
|
|
@@ -1005,7 +1011,7 @@ class mapSubObj(rectObj):
|
|
|
1005
1011
|
min_size = 28
|
|
1006
1012
|
|
|
1007
1013
|
# Filter small regions and holes
|
|
1008
|
-
label = self._filterLabel(label, min_size)
|
|
1014
|
+
label = self._filterLabel(label, min_size, df=df)
|
|
1009
1015
|
|
|
1010
1016
|
return label
|
|
1011
1017
|
|
|
@@ -1058,7 +1064,7 @@ class mapSubObj(rectObj):
|
|
|
1058
1064
|
|
|
1059
1065
|
|
|
1060
1066
|
#=======================================================================
|
|
1061
|
-
def _filterLabel(self, l, min_size):
|
|
1067
|
+
def _filterLabel(self, l, min_size, df=None):
|
|
1062
1068
|
'''
|
|
1063
1069
|
For a classified substrate label, small holes/objects are removed,
|
|
1064
1070
|
and pixels classified as NoData are removed and adjecent class is
|
|
@@ -1080,8 +1086,10 @@ class mapSubObj(rectObj):
|
|
|
1080
1086
|
Next Processing Step
|
|
1081
1087
|
--------------------
|
|
1082
1088
|
'''
|
|
1083
|
-
# Get pixel size (in meters)
|
|
1084
|
-
pix_m = self.pixM
|
|
1089
|
+
# # Get pixel size (in meters)
|
|
1090
|
+
# pix_m = self.pixM
|
|
1091
|
+
pix_m = df['pixM'].values[0] if df is not None else 0.02
|
|
1092
|
+
|
|
1085
1093
|
|
|
1086
1094
|
# Convert min size to pixels
|
|
1087
1095
|
min_size = int(min_size/pix_m)
|
|
@@ -37,6 +37,10 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
37
37
|
PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
|
|
38
38
|
sys.path.append(PACKAGE_DIR)
|
|
39
39
|
|
|
40
|
+
# # For Debug
|
|
41
|
+
# from funcs_common import *
|
|
42
|
+
# from funcs_model import *
|
|
43
|
+
|
|
40
44
|
from pingmapper.funcs_common import *
|
|
41
45
|
from pingmapper.funcs_model import *
|
|
42
46
|
|
|
@@ -1278,7 +1282,7 @@ class portstarObj(object):
|
|
|
1278
1282
|
isChunk = son.sonMetaDF['chunk_id']==1
|
|
1279
1283
|
sonMeta = son.sonMetaDF[isChunk].reset_index()
|
|
1280
1284
|
# acousticBed = round(sonMeta['inst_dep_m'] / sonMeta['pix_m'], 0).astype(int)
|
|
1281
|
-
acousticBed = round(sonMeta['inst_dep_m'] /
|
|
1285
|
+
acousticBed = round(sonMeta['inst_dep_m'] / sonMeta['pixM'], 0).astype(int)
|
|
1282
1286
|
|
|
1283
1287
|
##################################
|
|
1284
1288
|
# Step 1 : Acoustic Bedpick Filter
|
|
@@ -1403,7 +1407,8 @@ class portstarObj(object):
|
|
|
1403
1407
|
chunksPred,
|
|
1404
1408
|
detectDep=0,
|
|
1405
1409
|
smthDep=False,
|
|
1406
|
-
adjDep=False
|
|
1410
|
+
adjDep=False,
|
|
1411
|
+
instDepAvail=True):
|
|
1407
1412
|
'''
|
|
1408
1413
|
Converts bedpick location (in pixels) to a depth in meters and additionally
|
|
1409
1414
|
smooth and adjust depth estimate.
|
|
@@ -1478,8 +1483,8 @@ class portstarObj(object):
|
|
|
1478
1483
|
portDF['dep_m_smth'] = smthDep
|
|
1479
1484
|
starDF['dep_m_smth'] = smthDep
|
|
1480
1485
|
|
|
1481
|
-
portDF['dep_m_adjBy'] = str(adjDep /
|
|
1482
|
-
starDF['dep_m_adjBy'] = str(adjDep /
|
|
1486
|
+
portDF['dep_m_adjBy'] = str(adjDep / portDF['pixM']) + ' pixels'
|
|
1487
|
+
starDF['dep_m_adjBy'] = str(adjDep / starDF['pixM']) + ' pixels'
|
|
1483
1488
|
|
|
1484
1489
|
elif detectDep > 0:
|
|
1485
1490
|
# Prepare depth detection dictionaries
|
|
@@ -1532,8 +1537,8 @@ class portstarObj(object):
|
|
|
1532
1537
|
starFinal = savgol_filter(starFinal, 51, 3)
|
|
1533
1538
|
|
|
1534
1539
|
# Convert pix to depth [m]
|
|
1535
|
-
portFinal = np.asarray(portFinal) *
|
|
1536
|
-
starFinal = np.asarray(starFinal) *
|
|
1540
|
+
portFinal = np.asarray(portFinal) * portDF['pixM']
|
|
1541
|
+
starFinal = np.asarray(starFinal) * starDF['pixM']
|
|
1537
1542
|
|
|
1538
1543
|
# Set negatives to 0
|
|
1539
1544
|
portFinal = np.where(portFinal<0, 0, portFinal)
|
|
@@ -1545,6 +1550,10 @@ class portstarObj(object):
|
|
|
1545
1550
|
portDF['dep_m'] = portFinal
|
|
1546
1551
|
starDF['dep_m'] = starFinal
|
|
1547
1552
|
|
|
1553
|
+
if not instDepAvail:
|
|
1554
|
+
portDF['inst_dep_m'] = 0
|
|
1555
|
+
starDF['inst_dep_m'] = 0
|
|
1556
|
+
|
|
1548
1557
|
if adjDep != 0:
|
|
1549
1558
|
adjBy = adjDep
|
|
1550
1559
|
portDF['dep_m'] += adjBy
|
|
@@ -1560,8 +1569,23 @@ class portstarObj(object):
|
|
|
1560
1569
|
portDF['dep_m_smth'] = smthDep
|
|
1561
1570
|
starDF['dep_m_smth'] = smthDep
|
|
1562
1571
|
|
|
1563
|
-
portDF['dep_m_adjBy'] = str(adjDep /
|
|
1564
|
-
starDF['dep_m_adjBy'] = str(adjDep /
|
|
1572
|
+
portDF['dep_m_adjBy'] = str(adjDep / portDF['pixM']) + ' pixels'
|
|
1573
|
+
starDF['dep_m_adjBy'] = str(adjDep / starDF['pixM']) + ' pixels'
|
|
1574
|
+
|
|
1575
|
+
# Interpolate over nan's (and set zeros to nan)
|
|
1576
|
+
portDep = portDF['dep_m'].to_numpy()
|
|
1577
|
+
starDep = starDF['dep_m'].to_numpy()
|
|
1578
|
+
|
|
1579
|
+
portDep[portDep == 0] = np.nan
|
|
1580
|
+
starDep[starDep == 0] = np.nan
|
|
1581
|
+
|
|
1582
|
+
nans, x = np.isnan(portDep), lambda z: z.nonzero()[0]
|
|
1583
|
+
portDep[nans] = np.interp(x(nans), x(~nans), portDep[~nans])
|
|
1584
|
+
portDF['dep_m'] = portDep
|
|
1585
|
+
|
|
1586
|
+
nans, x = np.isnan(starDep), lambda z: z.nonzero()[0]
|
|
1587
|
+
starDep[nans] = np.interp(x(nans), x(~nans), starDep[~nans])
|
|
1588
|
+
starDF['dep_m'] = starDep
|
|
1565
1589
|
|
|
1566
1590
|
# Export to csv
|
|
1567
1591
|
portDF.to_csv(self.port.sonMetaFile, index=False, float_format='%.14f')
|
|
@@ -1638,14 +1662,14 @@ class portstarObj(object):
|
|
|
1638
1662
|
self.star._loadSonMeta()
|
|
1639
1663
|
starDF = self.star.sonMetaDF
|
|
1640
1664
|
|
|
1641
|
-
portDF = portDF.loc[portDF['chunk_id'] == i, ['inst_dep_m', 'dep_m']]
|
|
1642
|
-
starDF = starDF.loc[starDF['chunk_id'] == i, ['inst_dep_m', 'dep_m']]
|
|
1665
|
+
portDF = portDF.loc[portDF['chunk_id'] == i, ['inst_dep_m', 'dep_m', 'pixM']]
|
|
1666
|
+
starDF = starDF.loc[starDF['chunk_id'] == i, ['inst_dep_m', 'dep_m', 'pixM']]
|
|
1643
1667
|
|
|
1644
|
-
portInst = (portDF['inst_dep_m'] /
|
|
1645
|
-
portAuto = (portDF['dep_m'] /
|
|
1668
|
+
portInst = (portDF['inst_dep_m'] / portDF['pixM']).to_numpy(dtype=int, copy=True)
|
|
1669
|
+
portAuto = (portDF['dep_m'] / portDF['pixM']).to_numpy(dtype=int, copy=True)
|
|
1646
1670
|
|
|
1647
|
-
starInst = (starDF['inst_dep_m'] /
|
|
1648
|
-
starAuto = (starDF['dep_m'] /
|
|
1671
|
+
starInst = (starDF['inst_dep_m'] / starDF['pixM']).to_numpy(dtype=int, copy=True)
|
|
1672
|
+
starAuto = (starDF['dep_m'] / starDF['pixM']).to_numpy(dtype=int, copy=True)
|
|
1649
1673
|
|
|
1650
1674
|
# Ensure port/star same length
|
|
1651
1675
|
if (portAuto.shape[0] != starAuto.shape[0]):
|
|
@@ -1807,8 +1831,8 @@ class portstarObj(object):
|
|
|
1807
1831
|
starDF = self.star.sonMetaDF
|
|
1808
1832
|
|
|
1809
1833
|
# Get depth/ pix scaler for given chunk
|
|
1810
|
-
portDF = portDF.loc[portDF['chunk_id'] == i, ['dep_m']].reset_index()
|
|
1811
|
-
starDF = starDF.loc[starDF['chunk_id'] == i, ['dep_m']].reset_index()
|
|
1834
|
+
portDF = portDF.loc[portDF['chunk_id'] == i, ['dep_m', 'pixM']].reset_index()
|
|
1835
|
+
starDF = starDF.loc[starDF['chunk_id'] == i, ['dep_m', 'pixM']].reset_index()
|
|
1812
1836
|
|
|
1813
1837
|
# Load sonar
|
|
1814
1838
|
self.port._getScanChunkSingle(i)
|
|
@@ -1842,8 +1866,8 @@ class portstarObj(object):
|
|
|
1842
1866
|
|
|
1843
1867
|
###########################################
|
|
1844
1868
|
# Remove shadow predictions in water column
|
|
1845
|
-
bedpickPort = round(portDF['dep_m'] /
|
|
1846
|
-
bedpickStar = round(starDF['dep_m'] /
|
|
1869
|
+
bedpickPort = round(portDF['dep_m'] / portDF['pixM'], 0).astype(int)
|
|
1870
|
+
bedpickStar = round(starDF['dep_m'] / starDF['pixM'], 0).astype(int)
|
|
1847
1871
|
|
|
1848
1872
|
for j in range(pMask.shape[1]):
|
|
1849
1873
|
depth = bedpickPort[j]
|
|
@@ -2336,7 +2360,17 @@ class portstarObj(object):
|
|
|
2336
2360
|
## top-left coordinate a value of (0,0)
|
|
2337
2361
|
|
|
2338
2362
|
# Get pixel size
|
|
2339
|
-
pix_m = self.port.pixM
|
|
2363
|
+
# pix_m = self.port.pixM
|
|
2364
|
+
self.port._loadSonMeta()
|
|
2365
|
+
isChunk = self.port.sonMetaDF['chunk_id']==chunk
|
|
2366
|
+
sonMeta = self.port.sonMetaDF[isChunk].reset_index()
|
|
2367
|
+
|
|
2368
|
+
pixM = sonMeta['pixM']
|
|
2369
|
+
# Find most common pixel size
|
|
2370
|
+
if len(pixM.unique()) > 1:
|
|
2371
|
+
pixM = pixM.mode()[0]
|
|
2372
|
+
else:
|
|
2373
|
+
pixM = pixM.iloc[0]
|
|
2340
2374
|
|
|
2341
2375
|
# Determine min/max for rescaling
|
|
2342
2376
|
xMin, xMax = dst[:,0].min(), dst[:,0].max() # Min/Max of x coordinates
|
|
@@ -2346,7 +2380,7 @@ class portstarObj(object):
|
|
|
2346
2380
|
outShapeM = [xMax-xMin, yMax-yMin] # Calculate range of x,y coordinates
|
|
2347
2381
|
outShape=[0,0]
|
|
2348
2382
|
# Divide by pixel size to arrive at output shape of warped image
|
|
2349
|
-
outShape[0], outShape[1] = round(outShapeM[0]/
|
|
2383
|
+
outShape[0], outShape[1] = round(outShapeM[0]/pixM,0), round(outShapeM[1]/pixM,0)
|
|
2350
2384
|
|
|
2351
2385
|
# Rescale destination coordinates
|
|
2352
2386
|
# X values
|
|
@@ -250,9 +250,15 @@ class rectObj(sonObj):
|
|
|
250
250
|
# Attempt to fix error
|
|
251
251
|
# https://stackoverflow.com/questions/47948453/scipy-interpolate-splprep-error-invalid-inputs
|
|
252
252
|
okay = np.where(np.abs(np.diff(x))+np.abs(np.diff(y))>0)
|
|
253
|
-
x = np.r_[x[okay], x[-1]]
|
|
254
|
-
y = np.r_[y[okay], y[-1]]
|
|
255
|
-
t = np.r_[t[okay], t[-1]]
|
|
253
|
+
x = np.r_[x[okay], x[-1]].astype('float64')
|
|
254
|
+
y = np.r_[y[okay], y[-1]].astype('float64')
|
|
255
|
+
t = np.r_[t[okay], t[-1]].astype('float64')
|
|
256
|
+
|
|
257
|
+
# Remove any non-finite values (NaN or inf)
|
|
258
|
+
mask = np.isfinite(x) & np.isfinite(y) & np.isfinite(t)
|
|
259
|
+
x = x[mask]
|
|
260
|
+
y = y[mask]
|
|
261
|
+
t = t[mask]
|
|
256
262
|
|
|
257
263
|
# Check if enough points to interpolate
|
|
258
264
|
# If not, too many overlapping pings
|
|
@@ -263,14 +269,29 @@ class rectObj(sonObj):
|
|
|
263
269
|
# Fit a spline to filtered coordinates and parameterize with time ellapsed
|
|
264
270
|
try:
|
|
265
271
|
tck, _ = splprep([x,y], u=t, k=deg, s=0)
|
|
266
|
-
except:
|
|
272
|
+
# except:
|
|
273
|
+
# # Time is messed up (negative time offset)
|
|
274
|
+
# # Parameterize with record num instead
|
|
275
|
+
# zU = 'record_num'
|
|
276
|
+
# t = dfFilt[zU].to_numpy()
|
|
277
|
+
# t = np.r_[t[okay], t[-1]]
|
|
278
|
+
# tck, _ = splprep([x,y], u=t, k=deg, s=0)
|
|
279
|
+
# u_interp = dfOrig[zU].to_numpy()
|
|
280
|
+
except Exception as e:
|
|
281
|
+
print("splprep failed with error:", e)
|
|
267
282
|
# Time is messed up (negative time offset)
|
|
268
283
|
# Parameterize with record num instead
|
|
269
284
|
zU = 'record_num'
|
|
270
|
-
t = dfFilt[zU].to_numpy()
|
|
285
|
+
t = dfFilt[zU].to_numpy(dtype='float64')
|
|
271
286
|
t = np.r_[t[okay], t[-1]]
|
|
272
|
-
|
|
273
|
-
|
|
287
|
+
# Ensure float and finite
|
|
288
|
+
t = np.asarray(t, dtype='float64')
|
|
289
|
+
mask = np.isfinite(x) & np.isfinite(y) & np.isfinite(t)
|
|
290
|
+
x = x[mask]
|
|
291
|
+
y = y[mask]
|
|
292
|
+
t = t[mask]
|
|
293
|
+
tck, _ = splprep([x, y], u=t, k=deg, s=0)
|
|
294
|
+
u_interp = dfOrig[zU].to_numpy(dtype='float64')
|
|
274
295
|
|
|
275
296
|
x_interp = splev(u_interp, tck) # Use u_interp to get smoothed x/y coordinates from spline
|
|
276
297
|
|
|
@@ -284,7 +305,7 @@ class rectObj(sonObj):
|
|
|
284
305
|
'record_num': dfOrig['record_num'],
|
|
285
306
|
'ping_cnt': dfOrig['ping_cnt'],
|
|
286
307
|
'time_s': dfOrig['time_s'],
|
|
287
|
-
'
|
|
308
|
+
'pixM': dfOrig['pixM'],
|
|
288
309
|
lons: x_interp[0],
|
|
289
310
|
lats: x_interp[1],
|
|
290
311
|
'dep_m': dfOrig['dep_m'],
|
|
@@ -568,17 +589,29 @@ class rectObj(sonObj):
|
|
|
568
589
|
# maxPing = chunk[ping_cnt].max() # Find max ping count for each chunk
|
|
569
590
|
# New method to find maxPing based on most numerous ping count
|
|
570
591
|
maxPing = []
|
|
592
|
+
pixM_all = []
|
|
571
593
|
for name, group in sDF.groupby(chunk_id):
|
|
572
594
|
rangeCnt = np.unique(group[ping_cnt], return_counts=True)
|
|
573
595
|
pingMaxi = np.argmax(rangeCnt[1])
|
|
574
596
|
maxPing.append(int(rangeCnt[0][pingMaxi]))
|
|
597
|
+
|
|
598
|
+
# Get pixM from sonMetaDF
|
|
599
|
+
pixM = sonMetaDF.loc[sonMetaDF['chunk_id']==name, 'pixM'] # Get pixel size for each chunk
|
|
600
|
+
# Find most common pixel size
|
|
601
|
+
if len(pixM.unique()) > 1:
|
|
602
|
+
pixM = pixM.mode()[0]
|
|
603
|
+
else:
|
|
604
|
+
pixM = pixM.iloc[0]
|
|
605
|
+
pixM_all.append(pixM)
|
|
606
|
+
|
|
607
|
+
|
|
575
608
|
# Convert maxPing i to pd series
|
|
576
609
|
maxPing = pd.Series(maxPing)
|
|
577
610
|
|
|
578
611
|
# pix_m = chunk['pix_m'].min() # Get pixel size for each chunk
|
|
579
|
-
pix_m = self.pixM # Get pixel size for each chunk
|
|
612
|
+
# pix_m = self.pixM # Get pixel size for each chunk
|
|
580
613
|
for i in maxPing.index: # Calculate range (in meters) for each chunk
|
|
581
|
-
sDF.loc[sDF[chunk_id]==i, range_] = maxPing[i]*
|
|
614
|
+
sDF.loc[sDF[chunk_id]==i, range_] = maxPing[i]* pixM_all[i] # Calculate range in meters for each chunk
|
|
582
615
|
|
|
583
616
|
##################################################
|
|
584
617
|
# Calculate range extent coordinates for each ping
|
|
@@ -622,7 +655,7 @@ class rectObj(sonObj):
|
|
|
622
655
|
if cog:
|
|
623
656
|
self._interpRangeCoords(filt)
|
|
624
657
|
else:
|
|
625
|
-
sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'range', 'range_lon', 'range_lat', 'range_e', 'range_n', ping_bearing, 'transect']].copy()
|
|
658
|
+
sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'range', 'range_lon', 'range_lat', 'range_e', 'range_n', ping_bearing, 'transect', 'pixM']].copy()
|
|
626
659
|
sDF.rename(columns={'lons': 'trk_lons', 'lats': 'trk_lats', 'utm_es': 'trk_utm_es', 'utm_ns': 'trk_utm_ns', 'cog': 'trk_cog', 'range_lat':'range_lats', 'range_lon':'range_lons', 'range_e':'range_es', 'range_n':'range_ns'}, inplace=True)
|
|
627
660
|
sDF['chunk_id_2'] = sDF.index.astype(int)
|
|
628
661
|
|
|
@@ -741,7 +774,7 @@ class rectObj(sonObj):
|
|
|
741
774
|
##################################################
|
|
742
775
|
# Join smoothed trackline to smoothed range extent
|
|
743
776
|
# sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'pix_m', 'lons', 'lats', 'utm_es', 'utm_ns', 'cog', 'dep_m']].copy()
|
|
744
|
-
sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'transect']].copy()
|
|
777
|
+
sDF = sDF[['record_num', 'chunk_id', 'ping_cnt', 'time_s', 'lons', 'lats', 'utm_es', 'utm_ns', 'instr_heading', 'cog', 'dep_m', 'transect', 'pixM']].copy()
|
|
745
778
|
sDF.rename(columns={'lons': 'trk_lons', 'lats': 'trk_lats', 'utm_es': 'trk_utm_es', 'utm_ns': 'trk_utm_ns', 'cog': 'trk_cog'}, inplace=True)
|
|
746
779
|
rsDF.rename(columns={'cog': 'range_cog'}, inplace=True)
|
|
747
780
|
rsDF = rsDF[['record_num', 'range_lons', 'range_lats', 'range_cog']]
|
|
@@ -1042,6 +1075,7 @@ class rectObj(sonObj):
|
|
|
1042
1075
|
n = 'n'
|
|
1043
1076
|
record_num = 'record_num'
|
|
1044
1077
|
chunk_id = 'chunk_id'
|
|
1078
|
+
pixM = 'pixM'
|
|
1045
1079
|
|
|
1046
1080
|
flip = False
|
|
1047
1081
|
|
|
@@ -1071,10 +1105,12 @@ class rectObj(sonObj):
|
|
|
1071
1105
|
# Calculate ping bearing and normalize to range 0-360
|
|
1072
1106
|
pingDF[ping_bearing] = (row[heading]+rotate) % 360
|
|
1073
1107
|
|
|
1074
|
-
pix_m = self.pixM # Get pixel size for each chunk
|
|
1108
|
+
# pix_m = self.pixM # Get pixel size for each chunk
|
|
1109
|
+
pix_m = row['pixM'] # Get pixel size for each chunk
|
|
1075
1110
|
|
|
1076
1111
|
# Calculate pixel size
|
|
1077
1112
|
pingDF[son_range] = pingDF[son_idx] * pix_m
|
|
1113
|
+
pingDF[pixM] = pix_m # Store pixel size in dataframe
|
|
1078
1114
|
|
|
1079
1115
|
##################################################
|
|
1080
1116
|
# Calculate range extent coordinates for each ping
|
|
@@ -1128,7 +1164,7 @@ class rectObj(sonObj):
|
|
|
1128
1164
|
# Calculate easting and northing
|
|
1129
1165
|
pingDF[e], pingDF[n] = self.trans(pingDF[lons].to_numpy(), pingDF[lats].to_numpy())
|
|
1130
1166
|
|
|
1131
|
-
pingDF = pingDF[[chunk_id, record_num, son_idx, lons, lats, e, n, son_range]]
|
|
1167
|
+
pingDF = pingDF[[chunk_id, record_num, son_idx, lons, lats, e, n, son_range, pixM]]
|
|
1132
1168
|
|
|
1133
1169
|
# Set index to help speed concatenation
|
|
1134
1170
|
pingDF.set_index([record_num, son_idx], inplace=True)
|
|
@@ -1162,7 +1198,14 @@ class rectObj(sonObj):
|
|
|
1162
1198
|
## Destination coordinates describe the geographic location in lat/lon
|
|
1163
1199
|
## or easting/northing that directly map to the pix coordinates.
|
|
1164
1200
|
|
|
1165
|
-
pix_m = self.pixM # Get pixel size
|
|
1201
|
+
# pix_m = self.pixM # Get pixel size
|
|
1202
|
+
pixM = df['pixM']
|
|
1203
|
+
# Find most common pixel size
|
|
1204
|
+
if len(pixM.unique()) > 1:
|
|
1205
|
+
pixM = pixM.mode()[0]
|
|
1206
|
+
else:
|
|
1207
|
+
pixM = pixM.iloc[0]
|
|
1208
|
+
pix_m = pixM
|
|
1166
1209
|
|
|
1167
1210
|
# Get extent of chunk
|
|
1168
1211
|
xMin, xMax = df[xCoord].min(), df[xCoord].max()
|
|
@@ -1329,10 +1372,17 @@ class rectObj(sonObj):
|
|
|
1329
1372
|
|
|
1330
1373
|
'''
|
|
1331
1374
|
|
|
1375
|
+
pixM = df['pixM']
|
|
1376
|
+
# Find most common pixel size
|
|
1377
|
+
if len(pixM.unique()) > 1:
|
|
1378
|
+
pixM = pixM.mode()[0]
|
|
1379
|
+
else:
|
|
1380
|
+
pixM = pixM.iloc[0]
|
|
1381
|
+
|
|
1332
1382
|
pix_res = self.pix_res_son
|
|
1333
1383
|
do_resize = True
|
|
1334
1384
|
if pix_res == 0:
|
|
1335
|
-
pix_res =
|
|
1385
|
+
pix_res = pixM
|
|
1336
1386
|
do_resize = False
|
|
1337
1387
|
|
|
1338
1388
|
if son:
|
|
@@ -1369,7 +1419,7 @@ class rectObj(sonObj):
|
|
|
1369
1419
|
##################
|
|
1370
1420
|
# Do Rectification
|
|
1371
1421
|
|
|
1372
|
-
pix_m =
|
|
1422
|
+
pix_m = pixM # Get pixel size
|
|
1373
1423
|
|
|
1374
1424
|
xPixMax, yPixMax = int(df[xPix].max()), int(df[yPix].max())
|
|
1375
1425
|
|
|
@@ -1881,9 +1931,20 @@ class rectObj(sonObj):
|
|
|
1881
1931
|
filterIntensity = False
|
|
1882
1932
|
pix_res = self.pix_res_son
|
|
1883
1933
|
do_resize = True
|
|
1884
|
-
|
|
1885
|
-
|
|
1886
|
-
|
|
1934
|
+
|
|
1935
|
+
# # Set pixel resolution
|
|
1936
|
+
# self._loadSonMeta()
|
|
1937
|
+
# sonMeta = self.sonMetaDF['chunk_id']==chunk
|
|
1938
|
+
# pixM = sonMeta['pixM']
|
|
1939
|
+
# # Find most common pixel size
|
|
1940
|
+
# if len(pixM.unique()) > 1:
|
|
1941
|
+
# pixM = pixM.mode()[0]
|
|
1942
|
+
# else:
|
|
1943
|
+
# pixM = pixM.iloc[0]
|
|
1944
|
+
|
|
1945
|
+
# if pix_res == 0:
|
|
1946
|
+
# pix_res = pixM
|
|
1947
|
+
# do_resize = False
|
|
1887
1948
|
|
|
1888
1949
|
if son:
|
|
1889
1950
|
# Create output directory if it doesn't exist
|
|
@@ -1914,10 +1975,18 @@ class rectObj(sonObj):
|
|
|
1914
1975
|
# # Determine leading zeros to match naming convention
|
|
1915
1976
|
addZero = self._addZero(chunk)
|
|
1916
1977
|
|
|
1917
|
-
|
|
1918
|
-
#
|
|
1919
|
-
|
|
1920
|
-
|
|
1978
|
+
#############################################################
|
|
1979
|
+
# Open smoothed trackline/range extent file
|
|
1980
|
+
trkMeta = pd.read_csv(trkMetaFile)
|
|
1981
|
+
if cog:
|
|
1982
|
+
trkMeta = trkMeta[trkMeta['chunk_id']==chunk].reset_index(drop=False) # Filter df by chunk_id
|
|
1983
|
+
else:
|
|
1984
|
+
# trkMeta = trkMeta[trkMeta['chunk_id_2']==chunk].reset_index(drop=False)
|
|
1985
|
+
# next = trkMeta[trkMeta['chunk_id_2']==chunk+1].reset_index(drop=False)
|
|
1986
|
+
# trkMeta = pd.concat([trkMeta, next], ignore_index=True)
|
|
1987
|
+
isChunk = trkMeta['chunk_id_2']==chunk
|
|
1988
|
+
isChunk.iloc[chunk+1] = True
|
|
1989
|
+
trkMeta = trkMeta[isChunk].reset_index(drop=False)
|
|
1921
1990
|
|
|
1922
1991
|
# Filter sonMetaDF by chunk
|
|
1923
1992
|
if not hasattr(self, 'sonMetaDF'):
|
|
@@ -1934,10 +2003,56 @@ class rectObj(sonObj):
|
|
|
1934
2003
|
|
|
1935
2004
|
sonMeta = sonMetaAll[isChunk].reset_index()
|
|
1936
2005
|
|
|
2006
|
+
filtSon = False
|
|
2007
|
+
if len(sonMeta) != len(trkMeta):
|
|
2008
|
+
if len(sonMeta) > len(trkMeta):
|
|
2009
|
+
filtSon = True
|
|
2010
|
+
|
|
2011
|
+
# Filter df's to make sure they both have the same record_num
|
|
2012
|
+
# Get the intersection of record_num values
|
|
2013
|
+
common_record_nums = np.intersect1d(sonMeta['record_num'], trkMeta['record_num'])
|
|
2014
|
+
|
|
2015
|
+
# Filter both DataFrames to only include these record_num values
|
|
2016
|
+
sonMeta_filtered = sonMeta[sonMeta['record_num'].isin(common_record_nums)]#.reset_index(drop=True)
|
|
2017
|
+
trkMeta_filtered = trkMeta[trkMeta['record_num'].isin(common_record_nums)]#.reset_index(drop=True)
|
|
2018
|
+
|
|
2019
|
+
# Store the index's that were dropped
|
|
2020
|
+
# Get dropped indexes for each DataFrame
|
|
2021
|
+
dropped_sonMeta_idx = sonMeta.index.difference(sonMeta_filtered.index)
|
|
2022
|
+
dropped_trkMeta_idx = trkMeta.index.difference(trkMeta_filtered.index)
|
|
2023
|
+
|
|
2024
|
+
sonMeta = sonMeta_filtered
|
|
2025
|
+
trkMeta = trkMeta_filtered
|
|
2026
|
+
|
|
2027
|
+
if filtSon:
|
|
2028
|
+
idx_to_filt = dropped_sonMeta_idx.tolist()
|
|
2029
|
+
|
|
2030
|
+
|
|
2031
|
+
#################################
|
|
2032
|
+
# Prepare pixel (pix) coordinates
|
|
2033
|
+
## Pix coordinates describe the size of the coordinates in pixel
|
|
2034
|
+
## coordinates (top left of image == (0,0); top right == (0,nchunk)...)
|
|
2035
|
+
|
|
2036
|
+
# # Filter sonMetaDF by chunk
|
|
2037
|
+
# if not hasattr(self, 'sonMetaDF'):
|
|
2038
|
+
# self._loadSonMeta()
|
|
2039
|
+
|
|
2040
|
+
# sonMetaAll = self.sonMetaDF
|
|
2041
|
+
# if cog:
|
|
2042
|
+
# isChunk = sonMetaAll['chunk_id']==chunk
|
|
2043
|
+
# else:
|
|
2044
|
+
# isChunk = sonMetaAll['chunk_id_2']==chunk
|
|
2045
|
+
# # next = sonMetaAll['chunk_id_2']==(chunk+1)
|
|
2046
|
+
# # isChunk = pd.concat([isChunk, next], ignore_index=True)
|
|
2047
|
+
# isChunk.iloc[chunk+1] = True
|
|
2048
|
+
|
|
2049
|
+
# sonMeta = sonMetaAll[isChunk].reset_index()
|
|
2050
|
+
|
|
1937
2051
|
# Update class attributes based on current chunk
|
|
1938
2052
|
self.pingMax = np.nanmax(sonMeta['ping_cnt']) # store to determine max range per chunk
|
|
1939
2053
|
self.headIdx = sonMeta['index'] # store byte offset per ping
|
|
1940
2054
|
self.pingCnt = sonMeta['ping_cnt'] # store ping count per ping
|
|
2055
|
+
self.pixM = sonMeta['pixM'] # store pixel size per ping
|
|
1941
2056
|
|
|
1942
2057
|
if son:
|
|
1943
2058
|
# Open image to rectify
|
|
@@ -1956,6 +2071,13 @@ class rectObj(sonObj):
|
|
|
1956
2071
|
del self.shadowMask
|
|
1957
2072
|
|
|
1958
2073
|
img = self.sonDat
|
|
2074
|
+
|
|
2075
|
+
# Drop image columns if needed
|
|
2076
|
+
if filtSon:
|
|
2077
|
+
img = np.delete(img, dropped_sonMeta_idx, axis=1)
|
|
2078
|
+
self.sonDat = img.copy()
|
|
2079
|
+
|
|
2080
|
+
|
|
1959
2081
|
# if not cog:
|
|
1960
2082
|
# # Zero out second ping
|
|
1961
2083
|
# img[:,1] = 0
|
|
@@ -1988,19 +2110,26 @@ class rectObj(sonObj):
|
|
|
1988
2110
|
## Destination coordinates describe the geographic location in lat/lon
|
|
1989
2111
|
## or easting/northing that directly map to the pix coordinates.
|
|
1990
2112
|
|
|
1991
|
-
# Open smoothed trackline/range extent file
|
|
1992
|
-
trkMeta = pd.read_csv(trkMetaFile)
|
|
1993
|
-
if cog:
|
|
1994
|
-
|
|
2113
|
+
# # Open smoothed trackline/range extent file
|
|
2114
|
+
# trkMeta = pd.read_csv(trkMetaFile)
|
|
2115
|
+
# if cog:
|
|
2116
|
+
# trkMeta = trkMeta[trkMeta['chunk_id']==chunk].reset_index(drop=False) # Filter df by chunk_id
|
|
2117
|
+
# else:
|
|
2118
|
+
# # trkMeta = trkMeta[trkMeta['chunk_id_2']==chunk].reset_index(drop=False)
|
|
2119
|
+
# # next = trkMeta[trkMeta['chunk_id_2']==chunk+1].reset_index(drop=False)
|
|
2120
|
+
# # trkMeta = pd.concat([trkMeta, next], ignore_index=True)
|
|
2121
|
+
# isChunk = trkMeta['chunk_id_2']==chunk
|
|
2122
|
+
# isChunk.iloc[chunk+1] = True
|
|
2123
|
+
# trkMeta = trkMeta[isChunk].reset_index(drop=False)
|
|
2124
|
+
|
|
2125
|
+
pixM = self.pixM
|
|
2126
|
+
# Find most common pixel size
|
|
2127
|
+
if len(pixM.unique()) > 1:
|
|
2128
|
+
pixM = pixM.mode()[0]
|
|
1995
2129
|
else:
|
|
1996
|
-
|
|
1997
|
-
# next = trkMeta[trkMeta['chunk_id_2']==chunk+1].reset_index(drop=False)
|
|
1998
|
-
# trkMeta = pd.concat([trkMeta, next], ignore_index=True)
|
|
1999
|
-
isChunk = trkMeta['chunk_id_2']==chunk
|
|
2000
|
-
isChunk.iloc[chunk+1] = True
|
|
2001
|
-
trkMeta = trkMeta[isChunk].reset_index(drop=False)
|
|
2130
|
+
pixM = pixM.iloc[0]
|
|
2002
2131
|
|
|
2003
|
-
pix_m =
|
|
2132
|
+
pix_m = pixM # Get pixel size
|
|
2004
2133
|
|
|
2005
2134
|
# Get range (outer extent) coordinates [xR, yR] to transposed numpy arrays
|
|
2006
2135
|
xR, yR = trkMeta[xRange].to_numpy().T, trkMeta[yRange].to_numpy().T
|
|
@@ -2036,7 +2165,9 @@ class rectObj(sonObj):
|
|
|
2036
2165
|
outShapeM = [xMax-xMin, yMax-yMin] # Calculate range of x,y coordinates
|
|
2037
2166
|
outShape=[0,0]
|
|
2038
2167
|
# Divide by pixel size to arrive at output shape of warped image
|
|
2039
|
-
outShape[0], outShape[1] = round(outShapeM[0]/pix_m,0), round(outShapeM[1]/pix_m,0)
|
|
2168
|
+
# outShape[0], outShape[1] = round(outShapeM[0]/pix_m,0), round(outShapeM[1]/pix_m,0)
|
|
2169
|
+
outShape[0], outShape[1] = round(outShapeM[0]/pix_res,0), round(outShapeM[1]/pix_res,0)
|
|
2170
|
+
outShape = np.array(outShape).astype(int) # Convert to int
|
|
2040
2171
|
|
|
2041
2172
|
# Rescale destination coordinates
|
|
2042
2173
|
# X values
|
|
@@ -285,6 +285,23 @@ class sonObj(object):
|
|
|
285
285
|
sonDF = self._filterTime(sonDF, time_table)
|
|
286
286
|
|
|
287
287
|
return sonDF
|
|
288
|
+
|
|
289
|
+
# ======================================================================
|
|
290
|
+
def _filterShortTran(self, df):
|
|
291
|
+
|
|
292
|
+
'''
|
|
293
|
+
'''
|
|
294
|
+
|
|
295
|
+
# Make transects from consective pings using dataframe index
|
|
296
|
+
idx = df.index.values
|
|
297
|
+
transect_groups = np.split(idx, np.where(np.diff(idx) != 1)[0]+1)
|
|
298
|
+
|
|
299
|
+
for t in transect_groups:
|
|
300
|
+
if len(t) < self.nchunk:
|
|
301
|
+
# False means remove
|
|
302
|
+
df.loc[t, 'filter'] = False
|
|
303
|
+
|
|
304
|
+
return df
|
|
288
305
|
|
|
289
306
|
|
|
290
307
|
# ======================================================================
|
|
@@ -515,7 +532,6 @@ class sonObj(object):
|
|
|
515
532
|
idx = sonDF.index.values
|
|
516
533
|
transect_groups = np.split(idx, np.where(np.diff(idx) != 1)[0]+1)
|
|
517
534
|
|
|
518
|
-
# print(transect_groups)
|
|
519
535
|
|
|
520
536
|
# Assign transect
|
|
521
537
|
transect = 0
|
|
@@ -800,7 +816,7 @@ class sonObj(object):
|
|
|
800
816
|
|
|
801
817
|
# Load depth (in real units) and convert to pixels
|
|
802
818
|
# bedPick = round(sonMeta['dep_m'] / sonMeta['pix_m'], 0).astype(int)
|
|
803
|
-
bedPick = round(sonMeta['dep_m'] /
|
|
819
|
+
bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
|
|
804
820
|
minDep = min(bedPick)
|
|
805
821
|
|
|
806
822
|
del sonMeta, self.sonMetaDF
|
|
@@ -855,7 +871,7 @@ class sonObj(object):
|
|
|
855
871
|
'''
|
|
856
872
|
# Load depth (in real units) and convert to pixels
|
|
857
873
|
# bedPick = round(sonMeta['dep_m'] / sonMeta['pix_m'], 0).astype(int)
|
|
858
|
-
bedPick = round(sonMeta['dep_m'] /
|
|
874
|
+
bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).reset_index(drop=True)
|
|
859
875
|
|
|
860
876
|
# Initialize 2d array to store relocated sonar records
|
|
861
877
|
srcDat = np.zeros((self.sonDat.shape[0], self.sonDat.shape[1])).astype(np.float32)#.astype(int)
|
|
@@ -904,7 +920,7 @@ class sonObj(object):
|
|
|
904
920
|
sonMeta,
|
|
905
921
|
crop=True):
|
|
906
922
|
# Load depth (in real units) and convert to pixels
|
|
907
|
-
bedPick = round(sonMeta['dep_m'] /
|
|
923
|
+
bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
|
|
908
924
|
minDep = min(bedPick)
|
|
909
925
|
|
|
910
926
|
sonDat = self.sonDat
|
|
@@ -924,7 +940,7 @@ class sonObj(object):
|
|
|
924
940
|
def _WCO(self,
|
|
925
941
|
sonMeta):
|
|
926
942
|
# Load depth (in real units) and convert to pixels
|
|
927
|
-
bedPick = round(sonMeta['dep_m'] /
|
|
943
|
+
bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int)
|
|
928
944
|
maxDep = max(bedPick)
|
|
929
945
|
|
|
930
946
|
sonDat = self.sonDat
|
|
@@ -1287,8 +1303,15 @@ class sonObj(object):
|
|
|
1287
1303
|
d = sonMeta['trk_dist'].to_numpy()
|
|
1288
1304
|
d = np.max(d) - np.min(d)
|
|
1289
1305
|
|
|
1306
|
+
pixM = sonMeta['pixM']
|
|
1307
|
+
# Find most common pixel size
|
|
1308
|
+
if len(pixM.unique()) > 1:
|
|
1309
|
+
pixM = pixM.mode()[0]
|
|
1310
|
+
else:
|
|
1311
|
+
pixM = pixM.iloc[0]
|
|
1312
|
+
|
|
1290
1313
|
# Distance in pix
|
|
1291
|
-
d = round(d /
|
|
1314
|
+
d = round(d / pixM, 0).astype(int)
|
|
1292
1315
|
|
|
1293
1316
|
sonDat = resize(sonDat,
|
|
1294
1317
|
(sonDat.shape[0], d),
|
|
@@ -2002,7 +2025,7 @@ class sonObj(object):
|
|
|
2002
2025
|
egn_means = self.egn_bed_means.copy() # Don't want to overwrite
|
|
2003
2026
|
|
|
2004
2027
|
# Get bedpicks, in pixel units
|
|
2005
|
-
bedPick = round(sonMeta['dep_m'] /
|
|
2028
|
+
bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).to_numpy()
|
|
2006
2029
|
|
|
2007
2030
|
# Iterate each ping
|
|
2008
2031
|
for j in range(sonDat.shape[1]):
|
|
@@ -2071,7 +2094,7 @@ class sonObj(object):
|
|
|
2071
2094
|
del t, l
|
|
2072
2095
|
|
|
2073
2096
|
# Get bedpicks, in pixel units
|
|
2074
|
-
bedPick = round(sonMeta['dep_m'] /
|
|
2097
|
+
bedPick = round(sonMeta['dep_m'] / sonMeta['pixM'], 0).astype(int).to_numpy()
|
|
2075
2098
|
|
|
2076
2099
|
# Iterate each ping
|
|
2077
2100
|
for j in range(sonDat.shape[1]):
|
|
@@ -36,6 +36,10 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
36
36
|
PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
|
|
37
37
|
sys.path.append(PACKAGE_DIR)
|
|
38
38
|
|
|
39
|
+
# # For Debug
|
|
40
|
+
# from funcs_common import *
|
|
41
|
+
# from class_rectObj import rectObj
|
|
42
|
+
|
|
39
43
|
from pingmapper.funcs_common import *
|
|
40
44
|
from pingmapper.class_rectObj import rectObj
|
|
41
45
|
|
|
@@ -171,6 +175,7 @@ def smoothTrackline(projDir='', x_offset='', y_offset='', nchunk ='', cog=True,
|
|
|
171
175
|
|
|
172
176
|
sDF['chunk_id'] = sonDF['chunk_id']
|
|
173
177
|
sDF['transect'] = sonDF['transect']
|
|
178
|
+
# sDF['pixM'] = sonDF['pixM'] # Add pixel size to smoothed trackline coordinates
|
|
174
179
|
|
|
175
180
|
sDF.reset_index(inplace=True)
|
|
176
181
|
|
|
@@ -250,14 +255,17 @@ def smoothTrackline(projDir='', x_offset='', y_offset='', nchunk ='', cog=True,
|
|
|
250
255
|
sDF.at[curRow, "utm_ns"] = lastRow["utm_ns"]
|
|
251
256
|
sDF.at[curRow, "cog"] = lastRow["cog"]
|
|
252
257
|
sDF.at[curRow, "instr_heading"] = lastRow["instr_heading"]
|
|
258
|
+
# sDF.at[curRow, 'pixM'] = lastRow['pixM']
|
|
259
|
+
|
|
260
|
+
del lastRow
|
|
253
261
|
else:
|
|
254
262
|
t += 1
|
|
255
263
|
|
|
256
264
|
i+=1
|
|
257
|
-
del
|
|
265
|
+
del curRow, i
|
|
258
266
|
|
|
259
267
|
son0.smthTrk = sDF # Store smoothed trackline coordinates in rectObj.
|
|
260
|
-
|
|
268
|
+
|
|
261
269
|
# Do positional correction
|
|
262
270
|
if x_offset != 0.0 or y_offset != 0.0:
|
|
263
271
|
son0._applyPosOffset(x_offset, y_offset)
|
|
@@ -274,6 +282,7 @@ def smoothTrackline(projDir='', x_offset='', y_offset='', nchunk ='', cog=True,
|
|
|
274
282
|
df = son1.sonMetaDF
|
|
275
283
|
sDF['chunk_id'] = df['chunk_id'] # Update chunk_id for smoothed coordinates
|
|
276
284
|
sDF['record_num'] = df['record_num'] # Update record_num for smoothed coordinates
|
|
285
|
+
# sDF['pixM'] = df['pixM']
|
|
277
286
|
son1.smthTrk = sDF # Store smoothed trackline coordinates in rectObj
|
|
278
287
|
|
|
279
288
|
del sDF, df, son0, son1
|
|
@@ -80,8 +80,8 @@ def gui(batch: bool):
|
|
|
80
80
|
text_input = sg.Text('Recording to Process')
|
|
81
81
|
# in_input = sg.In(key='inFile', size=(80,1))
|
|
82
82
|
in_input = sg.In(key='inFile', size=(80,1), default_text=default_params['inFile'])
|
|
83
|
-
|
|
84
|
-
browse_input = sg.FileBrowse(file_types=(("Sonar File", "*.DAT *.sl2 *.sl3 *.svlog") ), initial_folder=os.path.dirname(default_params['inFile']))
|
|
83
|
+
browse_input = sg.FileBrowse(file_types=(("Sonar File", "*.DAT *.sl2 *.sl3 *.RSD *.svlog") ), initial_folder=os.path.dirname(default_params['inFile']))
|
|
84
|
+
# browse_input = sg.FileBrowse(file_types=(("Sonar File", "*.DAT *.sl2 *.sl3 *.svlog") ), initial_folder=os.path.dirname(default_params['inFile']))
|
|
85
85
|
|
|
86
86
|
# Add to layout
|
|
87
87
|
layout.append([text_io])
|
|
@@ -592,9 +592,10 @@ def gui(batch: bool):
|
|
|
592
592
|
# Find all DAT and SON files in all subdirectories of inDir
|
|
593
593
|
inFiles=[]
|
|
594
594
|
for root, dirs, files in os.walk(inDir):
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
595
|
+
if '__MACOSX' not in root:
|
|
596
|
+
for file in files:
|
|
597
|
+
if file.endswith('.DAT') or file.endswith('.sl2') or file.endswith('.sl3') or file.endswith('.RSD') or file.endswith('.svlog'):
|
|
598
|
+
inFiles.append(os.path.join(root, file))
|
|
598
599
|
|
|
599
600
|
inFiles = sorted(inFiles)
|
|
600
601
|
|
|
@@ -35,8 +35,13 @@ SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
35
35
|
PACKAGE_DIR = os.path.dirname(SCRIPT_DIR)
|
|
36
36
|
sys.path.append(PACKAGE_DIR)
|
|
37
37
|
|
|
38
|
-
|
|
38
|
+
# # For debug
|
|
39
|
+
# from funcs_common import *
|
|
40
|
+
# from class_mapSubstrateObj import mapSubObj
|
|
41
|
+
# from class_portstarObj import portstarObj
|
|
42
|
+
# from funcs_model import *
|
|
39
43
|
|
|
44
|
+
from pingmapper.funcs_common import *
|
|
40
45
|
from pingmapper.class_mapSubstrateObj import mapSubObj
|
|
41
46
|
from pingmapper.class_portstarObj import portstarObj
|
|
42
47
|
from pingmapper.funcs_model import *
|
|
@@ -275,7 +280,7 @@ def map_master_func(logfilename='',
|
|
|
275
280
|
# Do prediction (make parallel later)
|
|
276
281
|
print('\n\tPredicting substrate for', len(chunks), son.beamName, 'chunks')
|
|
277
282
|
|
|
278
|
-
Parallel(n_jobs=np.min([len(chunks), threadCnt])
|
|
283
|
+
Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(son._detectSubstrate)(i, USE_GPU) for i in tqdm(chunks))
|
|
279
284
|
|
|
280
285
|
son._cleanup()
|
|
281
286
|
son._pickleSon()
|
|
@@ -324,7 +329,7 @@ def map_master_func(logfilename='',
|
|
|
324
329
|
|
|
325
330
|
# Plot substrate classification()
|
|
326
331
|
# sys.exit()
|
|
327
|
-
Parallel(n_jobs=np.min([len(toMap), threadCnt])
|
|
332
|
+
Parallel(n_jobs=np.min([len(toMap), threadCnt]))(delayed(son._pltSubClass)(map_class_method, c, f, spdCor=spdCor, maxCrop=maxCrop, probs=probs) for c, f in tqdm((toMap.items())))
|
|
328
333
|
son._pickleSon()
|
|
329
334
|
del toMap
|
|
330
335
|
|
|
@@ -384,7 +389,7 @@ def map_master_func(logfilename='',
|
|
|
384
389
|
# Create portstarObj
|
|
385
390
|
psObj = portstarObj(mapObjs)
|
|
386
391
|
|
|
387
|
-
Parallel(n_jobs=np.min([len(toMap), threadCnt])
|
|
392
|
+
Parallel(n_jobs=np.min([len(toMap), threadCnt]))(delayed(psObj._mapSubstrate)(map_class_method, c, f) for c, f in tqdm(toMap.items()))
|
|
388
393
|
|
|
389
394
|
del toMap
|
|
390
395
|
print("\nDone!")
|
|
@@ -524,7 +529,7 @@ def map_master_func(logfilename='',
|
|
|
524
529
|
# Create portstarObj
|
|
525
530
|
psObj = portstarObj(mapObjs)
|
|
526
531
|
|
|
527
|
-
Parallel(n_jobs=np.min([len(toMap), threadCnt])
|
|
532
|
+
Parallel(n_jobs=np.min([len(toMap), threadCnt]))(delayed(psObj._mapPredictions)(map_predict, 'map_'+a, c, f) for c, f in tqdm(toMap.items()))
|
|
528
533
|
|
|
529
534
|
del toMap, psObj
|
|
530
535
|
print("\nDone!")
|
|
@@ -50,9 +50,9 @@ from doodleverse_utils.imports import *
|
|
|
50
50
|
|
|
51
51
|
from scipy.signal import savgol_filter
|
|
52
52
|
|
|
53
|
-
|
|
53
|
+
sys.path.insert(0, r'Z:\UDEL\PythonRepos\PINGVerter')
|
|
54
54
|
|
|
55
|
-
from pingverter import hum2pingmapper, low2pingmapper, cerul2pingmapper
|
|
55
|
+
from pingverter import hum2pingmapper, low2pingmapper, cerul2pingmapper, gar2pingmapper
|
|
56
56
|
|
|
57
57
|
import cv2
|
|
58
58
|
|
|
@@ -314,6 +314,7 @@ def read_master_func(logfilename='',
|
|
|
314
314
|
# Use PINGVerter to read the sonar file
|
|
315
315
|
#######################################
|
|
316
316
|
|
|
317
|
+
instDepAvail = True
|
|
317
318
|
start_time = time.time()
|
|
318
319
|
# Determine sonar recording type
|
|
319
320
|
_, file_type = os.path.splitext(inFile)
|
|
@@ -326,14 +327,15 @@ def read_master_func(logfilename='',
|
|
|
326
327
|
elif file_type == '.sl2' or file_type == '.sl3':
|
|
327
328
|
sonar_obj = low2pingmapper(inFile, projDir, nchunk, tempC, exportUnknown)
|
|
328
329
|
|
|
329
|
-
#
|
|
330
|
-
|
|
331
|
-
|
|
330
|
+
# Prepare Garmin file for PINGMapper
|
|
331
|
+
elif file_type == '.RSD':
|
|
332
|
+
sonar_obj = gar2pingmapper(inFile, projDir, nchunk, tempC, exportUnknown)
|
|
332
333
|
|
|
333
334
|
# Prepare Cerulean file for PINGMapper
|
|
334
335
|
elif file_type == '.svlog':
|
|
335
336
|
sonar_obj = cerul2pingmapper(inFile, projDir, nchunk, tempC, exportUnknown)
|
|
336
337
|
detectDep = 1 # No depth in cerulean files, so set to Zheng et al. 2021
|
|
338
|
+
instDepAvail = False
|
|
337
339
|
|
|
338
340
|
# Unknown
|
|
339
341
|
else:
|
|
@@ -367,7 +369,7 @@ def read_master_func(logfilename='',
|
|
|
367
369
|
son.beamName = meta['beamName']
|
|
368
370
|
son.beam = beam
|
|
369
371
|
son.headBytes = sonar_obj.headBytes
|
|
370
|
-
son.pixM = sonar_obj.pixM
|
|
372
|
+
# son.pixM = sonar_obj.pixM
|
|
371
373
|
son.isOnix = sonar_obj.isOnix
|
|
372
374
|
son.trans = sonar_obj.trans
|
|
373
375
|
son.humDat = sonar_obj.humDat
|
|
@@ -426,13 +428,24 @@ def read_master_func(logfilename='',
|
|
|
426
428
|
son.cropRange = cropRange
|
|
427
429
|
# Do range crop, if necessary
|
|
428
430
|
if cropRange > 0.0:
|
|
429
|
-
# Convert to distance in pix
|
|
430
|
-
d = round(cropRange / son.pixM, 0).astype(int)
|
|
431
|
+
# # Convert to distance in pix
|
|
432
|
+
# d = round(cropRange / son.pixM, 0).astype(int)
|
|
433
|
+
|
|
434
|
+
# # Get sonMetaDF
|
|
435
|
+
# son._loadSonMeta()
|
|
436
|
+
# son.sonMetaDF.loc[son.sonMetaDF['ping_cnt'] > d, 'ping_cnt'] = d
|
|
437
|
+
# son._saveSonMetaCSV(son.sonMetaDF)
|
|
431
438
|
|
|
432
439
|
# Get sonMetaDF
|
|
433
440
|
son._loadSonMeta()
|
|
434
|
-
son.sonMetaDF
|
|
435
|
-
|
|
441
|
+
df = son.sonMetaDF
|
|
442
|
+
|
|
443
|
+
# Convert to distance in pixels
|
|
444
|
+
d = round(cropRange / df['pixM'], 0).astype(int)
|
|
445
|
+
|
|
446
|
+
# Filter df
|
|
447
|
+
df.loc[df['ping_cnt'] > d, 'ping_cnt'] = d
|
|
448
|
+
son._saveSonMetaCSV(df)
|
|
436
449
|
|
|
437
450
|
# Store flag to export un-rectified sonar tiles in each sonObj.
|
|
438
451
|
for son in sonObjs:
|
|
@@ -650,7 +663,7 @@ def read_master_func(logfilename='',
|
|
|
650
663
|
del c, r, n, startB, rowCnt
|
|
651
664
|
|
|
652
665
|
# Fix no data in parallel
|
|
653
|
-
r = Parallel(n_jobs=threadCnt)(delayed(son._fixNoDat)(dfAll[r[0]:r[1]].copy().reset_index(drop=True), beams) for r in tqdm(
|
|
666
|
+
r = Parallel(n_jobs=threadCnt)(delayed(son._fixNoDat)(dfAll[r[0]:r[1]].copy().reset_index(drop=True), beams) for r in tqdm(rowsToProc))
|
|
654
667
|
gc.collect()
|
|
655
668
|
|
|
656
669
|
# Concatenate results from parallel processing
|
|
@@ -838,6 +851,14 @@ def read_master_func(logfilename='',
|
|
|
838
851
|
df0 = df0[df0['filter'] == True]
|
|
839
852
|
df1 = df1[df1['filter'] == True]
|
|
840
853
|
|
|
854
|
+
# Remove transect shorter then nchunk
|
|
855
|
+
df0=son0._filterShortTran(df0)
|
|
856
|
+
df1['filter'] = df0['filter']
|
|
857
|
+
|
|
858
|
+
# Apply the filter
|
|
859
|
+
df0 = df0[df0['filter'] == True]
|
|
860
|
+
df1 = df1[df1['filter'] == True]
|
|
861
|
+
|
|
841
862
|
# Reasign the chunks
|
|
842
863
|
df0 = son0._reassignChunks(df0)
|
|
843
864
|
df1['chunk_id'] = df0['chunk_id']
|
|
@@ -934,7 +955,7 @@ def read_master_func(logfilename='',
|
|
|
934
955
|
print('\n\tUsing binary thresholding...')
|
|
935
956
|
|
|
936
957
|
# Parallel estimate depth for each chunk using appropriate method
|
|
937
|
-
r = Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._detectDepth)(detectDep, int(chunk), USE_GPU, tileFile) for chunk in tqdm(
|
|
958
|
+
r = Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._detectDepth)(detectDep, int(chunk), USE_GPU, tileFile) for chunk in tqdm(chunks))
|
|
938
959
|
|
|
939
960
|
# store the depth predictions in the class
|
|
940
961
|
for ret in r:
|
|
@@ -959,7 +980,7 @@ def read_master_func(logfilename='',
|
|
|
959
980
|
|
|
960
981
|
if saveDepth:
|
|
961
982
|
# Save detected depth to csv
|
|
962
|
-
depDF = psObj._saveDepth(chunks, detectDep, smthDep, adjDep)
|
|
983
|
+
depDF = psObj._saveDepth(chunks, detectDep, smthDep, adjDep, instDepAvail)
|
|
963
984
|
|
|
964
985
|
# Store depths in downlooking sonar files also
|
|
965
986
|
for son in sonObjs:
|
|
@@ -979,6 +1000,11 @@ def read_master_func(logfilename='',
|
|
|
979
1000
|
dep = sonDF['inst_dep_m']
|
|
980
1001
|
if smthDep:
|
|
981
1002
|
dep = savgol_filter(dep, 51, 3)
|
|
1003
|
+
|
|
1004
|
+
# Interpolate over nan's (and set zero's to nan)
|
|
1005
|
+
dep[dep==0] = np.nan
|
|
1006
|
+
nans, x = np.isnan(dep), lambda z: z.nonzero()[0]
|
|
1007
|
+
dep[nans] = np.interp(x(nans), x(~nans), dep[~nans])
|
|
982
1008
|
|
|
983
1009
|
sonDF['dep_m'] = dep + adjDep
|
|
984
1010
|
|
|
@@ -1000,7 +1026,7 @@ def read_master_func(logfilename='',
|
|
|
1000
1026
|
start_time = time.time()
|
|
1001
1027
|
|
|
1002
1028
|
print("\n\nExporting bedpick plots to {}...".format(tileFile))
|
|
1003
|
-
Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._plotBedPick)(int(chunk), True, autoBed, tileFile) for chunk in tqdm(
|
|
1029
|
+
Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._plotBedPick)(int(chunk), True, autoBed, tileFile) for chunk in tqdm(chunks))
|
|
1004
1030
|
|
|
1005
1031
|
print("\nDone!")
|
|
1006
1032
|
print("Time (s):", round(time.time() - start_time, ndigits=1))
|
|
@@ -1086,7 +1112,7 @@ def read_master_func(logfilename='',
|
|
|
1086
1112
|
psObj.port.shadow = defaultdict()
|
|
1087
1113
|
psObj.star.shadow = defaultdict()
|
|
1088
1114
|
|
|
1089
|
-
r = Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._detectShadow)(remShadow, int(chunk), USE_GPU, False, tileFile) for chunk in tqdm(
|
|
1115
|
+
r = Parallel(n_jobs=np.min([len(chunks), threadCnt]))(delayed(psObj._detectShadow)(remShadow, int(chunk), USE_GPU, False, tileFile) for chunk in tqdm(chunks))
|
|
1090
1116
|
|
|
1091
1117
|
for ret in r:
|
|
1092
1118
|
psObj.port.shadow[ret[0]] = ret[1]
|
|
@@ -1141,7 +1167,7 @@ def read_master_func(logfilename='',
|
|
|
1141
1167
|
|
|
1142
1168
|
# Calculate range-wise mean intensity for each chunk
|
|
1143
1169
|
print('\n\tCalculating range-wise mean intensity for each chunk...')
|
|
1144
|
-
chunk_means = Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._egnCalcChunkMeans)(i) for i in tqdm(
|
|
1170
|
+
chunk_means = Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._egnCalcChunkMeans)(i) for i in tqdm(chunks))
|
|
1145
1171
|
|
|
1146
1172
|
# Calculate global means
|
|
1147
1173
|
print('\n\tCalculating range-wise global means...')
|
|
@@ -1150,7 +1176,7 @@ def read_master_func(logfilename='',
|
|
|
1150
1176
|
|
|
1151
1177
|
# Calculate egn min and max for each chunk
|
|
1152
1178
|
print('\n\tCalculating EGN min and max values for each chunk...')
|
|
1153
|
-
min_max = Parallel(n_jobs= np.min([len(chunks)]))(delayed(son._egnCalcMinMax)(i) for i in tqdm(
|
|
1179
|
+
min_max = Parallel(n_jobs= np.min([len(chunks)]))(delayed(son._egnCalcMinMax)(i) for i in tqdm(chunks))
|
|
1154
1180
|
|
|
1155
1181
|
# Calculate global min max for each channel
|
|
1156
1182
|
son._egnCalcGlobalMinMax(min_max)
|
|
@@ -1202,7 +1228,7 @@ def read_master_func(logfilename='',
|
|
|
1202
1228
|
chunks = chunks[:-1] # remove last chunk
|
|
1203
1229
|
|
|
1204
1230
|
print('\n\tCalculating EGN corrected histogram for', son.beamName)
|
|
1205
|
-
hist = Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._egnCalcHist)(i) for i in tqdm(
|
|
1231
|
+
hist = Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._egnCalcHist)(i) for i in tqdm(chunks))
|
|
1206
1232
|
|
|
1207
1233
|
print('\n\tCalculating global EGN corrected histogram')
|
|
1208
1234
|
son._egnCalcGlobalHist(hist)
|
|
@@ -1329,8 +1355,10 @@ def read_master_func(logfilename='',
|
|
|
1329
1355
|
# Load sonMetaDF
|
|
1330
1356
|
son._loadSonMeta()
|
|
1331
1357
|
|
|
1332
|
-
|
|
1333
|
-
|
|
1358
|
+
Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._exportTilesSpd)(i, tileFile=imgType, spdCor=spdCor, mask_shdw=mask_shdw, maxCrop=maxCrop) for i in tqdm(chunks))
|
|
1359
|
+
# for i in tqdm(chunks):
|
|
1360
|
+
# son._exportTilesSpd(i, tileFile=imgType, spdCor=spdCor, mask_shdw=mask_shdw, maxCrop=maxCrop)
|
|
1361
|
+
# sys.exit()
|
|
1334
1362
|
|
|
1335
1363
|
if moving_window and not spdCor:
|
|
1336
1364
|
|
|
@@ -310,7 +310,7 @@ def rectify_master_func(logfilename='',
|
|
|
310
310
|
# COG Pre-processing #
|
|
311
311
|
# ##########################################################################
|
|
312
312
|
|
|
313
|
-
for son in portstar:
|
|
313
|
+
# for son in portstar:
|
|
314
314
|
son.rect_wcp = rect_wcp
|
|
315
315
|
son.rect_wcr = rect_wcr
|
|
316
316
|
|
|
@@ -364,7 +364,7 @@ def rectify_master_func(logfilename='',
|
|
|
364
364
|
print('\n\tExporting', len(chunks), 'GeoTiffs for', son.beamName)
|
|
365
365
|
|
|
366
366
|
# Parallel(n_jobs= np.min([len(sDF), threadCnt]))(delayed(son._rectSonHeadingMain)(sonarCoordsDF[sonarCoordsDF['chunk_id']==chunk], chunk) for chunk in tqdm(range(len(chunks))))
|
|
367
|
-
Parallel(n_jobs= np.min([len(sDF), threadCnt]))(delayed(son._rectSonHeadingMain)(sDF[sDF['chunk_id']==chunk], chunk, heading=heading, interp_dist=rectInterpDist) for chunk in tqdm(
|
|
367
|
+
Parallel(n_jobs= np.min([len(sDF), threadCnt]))(delayed(son._rectSonHeadingMain)(sDF[sDF['chunk_id']==chunk], chunk, heading=heading, interp_dist=rectInterpDist) for chunk in tqdm(chunks))
|
|
368
368
|
# for i in chunks:
|
|
369
369
|
# # son._rectSonHeading(sonarCoordsDF[sonarCoordsDF['chunk_id']==i], i)
|
|
370
370
|
# r = son._rectSonHeadingMain(sDF[sDF['chunk_id']==i], i, heading=heading, interp_dist=rectInterpDist)
|
|
@@ -402,6 +402,8 @@ def rectify_master_func(logfilename='',
|
|
|
402
402
|
son.rect_wcr = rect_wcr
|
|
403
403
|
|
|
404
404
|
if (rect_wcp and rubberSheeting) or (rect_wcr and rubberSheeting):
|
|
405
|
+
# Always use COG for rubber sheeting
|
|
406
|
+
cog = True
|
|
405
407
|
for son in portstar:
|
|
406
408
|
# Set output directory
|
|
407
409
|
son.outDir = os.path.join(son.projDir, son.beamName)
|
|
@@ -418,8 +420,8 @@ def rectify_master_func(logfilename='',
|
|
|
418
420
|
print('\n\tExporting', len(chunks), 'GeoTiffs for', son.beamName)
|
|
419
421
|
# for i in chunks:
|
|
420
422
|
# son._rectSonRubber(i, filter, cog, wgs=False)
|
|
421
|
-
|
|
422
|
-
Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._rectSonRubber)(i, filter, cog, wgs=False) for i in tqdm(
|
|
423
|
+
# sys.exit()
|
|
424
|
+
Parallel(n_jobs= np.min([len(chunks), threadCnt]))(delayed(son._rectSonRubber)(i, filter, cog, wgs=False) for i in tqdm(chunks))
|
|
423
425
|
son._cleanup()
|
|
424
426
|
gc.collect()
|
|
425
427
|
printUsage()
|
|
@@ -17,9 +17,5 @@ converted_date = start_date + timedelta(seconds=custom_unix_time)
|
|
|
17
17
|
eastern = pytz.timezone("US/Eastern")
|
|
18
18
|
converted_date_eastern = converted_date.astimezone(eastern)
|
|
19
19
|
|
|
20
|
-
print(start_date)
|
|
21
|
-
print(unix_timestamp)
|
|
22
20
|
print("Custom Unix Start Time:", unix_timestamp)
|
|
23
|
-
print(
|
|
24
|
-
print("Converted Date and Time:", converted_date_eastern)
|
|
25
|
-
print(timedelta(seconds=custom_unix_time))
|
|
21
|
+
print("Converted Date and Time:", converted_date_eastern)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '5.0.1'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pingmapper
|
|
3
|
-
Version:
|
|
3
|
+
Version: 5.0.1
|
|
4
4
|
Summary: Open-source interface for processing recreation-grade side scan sonar datasets and reproducibly mapping benthic habitat
|
|
5
5
|
Author: Cameron Bodine
|
|
6
6
|
Author-email: bodine.cs@gmail.email
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '4.2.13'
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|