pyckster 26.1.6__py3-none-any.whl → 26.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyckster/core.py CHANGED
@@ -80,10 +80,13 @@ from .obspy_utils import (
80
80
  remove_trace,
81
81
  move_trace,
82
82
  mute_trace,
83
+ zero_pad_trace,
83
84
  swap_header_format,
84
85
  merge_streams,
85
86
  get_max_decimals,
86
- assisted_picking
87
+ assisted_picking,
88
+ export_shot_to_ascii,
89
+ import_shot_from_ascii
87
90
  )
88
91
 
89
92
  from .auto_picking import adaptive_picker
@@ -874,7 +877,7 @@ class TraceSelector(QDialog):
874
877
  self.layout.addWidget(self.createFormItem("Trace Position (m):", self.tracePositionLineEdit))
875
878
 
876
879
  # If trace_positions provided, initialize with the first trace position
877
- if trace_positions and len(trace_positions) > 0:
880
+ if trace_positions is not None and len(trace_positions) > 0:
878
881
  self.tracePositionLineEdit.setText(str(trace_positions[0]))
879
882
 
880
883
  # Add a checkbox to apply the changes to all shots
@@ -978,6 +981,50 @@ class HeaderDialog(QDialog):
978
981
  # Resize the columns to fit the contents
979
982
  self.table_widget.resizeColumnsToContents()
980
983
 
984
+
985
+ class OverallHeaderDialog(QDialog):
986
+ """Dialog to display overall headers for all files in a single table"""
987
+ def __init__(self, files, headers, file_data, parent=None):
988
+ super().__init__(parent)
989
+ self.setWindowTitle("Overall Header Fields")
990
+ self.setGeometry(100, 100, 1200, 600)
991
+
992
+ layout = QVBoxLayout(self)
993
+
994
+ # Create a QTableWidget
995
+ self.table_widget = QTableWidget()
996
+ self.table_widget.setRowCount(len(files))
997
+ self.table_widget.setColumnCount(len(headers) + 1) # +1 for file name column
998
+
999
+ # Set headers: File name + all other headers
1000
+ all_headers = ["File Name"] + headers
1001
+ self.table_widget.setHorizontalHeaderLabels(all_headers)
1002
+
1003
+ # Populate the table
1004
+ for row, file in enumerate(files):
1005
+ # File name in first column
1006
+ self.table_widget.setItem(row, 0, QTableWidgetItem(file))
1007
+
1008
+ # Data for each header
1009
+ for col, header in enumerate(headers):
1010
+ value = file_data[file].get(header, ['N/A'])[0] # Get first element from list
1011
+ self.table_widget.setItem(row, col + 1, QTableWidgetItem(str(value)))
1012
+
1013
+ # Resize the columns to fit the contents
1014
+ self.table_widget.resizeColumnsToContents()
1015
+
1016
+ # Make the table scrollable
1017
+ scroll_area = QScrollArea()
1018
+ scroll_area.setWidgetResizable(True)
1019
+ scroll_area.setWidget(self.table_widget)
1020
+
1021
+ layout.addWidget(scroll_area)
1022
+
1023
+ # OK button
1024
+ self.okButton = QPushButton("OK")
1025
+ self.okButton.clicked.connect(self.accept)
1026
+ layout.addWidget(self.okButton)
1027
+
981
1028
  #######################################
982
1029
  # Cross-correlation Dialog Classes
983
1030
  #######################################
@@ -1249,6 +1296,184 @@ def find_icon_path():
1249
1296
  # Auto Pick Dialog
1250
1297
  #######################################
1251
1298
 
1299
+ class AcquisitionParametersDialog(QDialog):
1300
+ """Dialog to display acquisition parameters summary"""
1301
+ def __init__(self, params, parent=None):
1302
+ super().__init__(parent)
1303
+ self.setWindowTitle("Acquisition Parameters")
1304
+ self.setMinimumWidth(500)
1305
+ self.setMinimumHeight(400)
1306
+ self.resize(600, 500) # Set initial size
1307
+
1308
+ # Enable resizing and maximize button
1309
+ self.setWindowFlags(Qt.Window | Qt.WindowMaximizeButtonHint | Qt.WindowCloseButtonHint)
1310
+
1311
+ layout = QVBoxLayout(self)
1312
+
1313
+ # Create a QTextEdit to display the parameters
1314
+ self.textEdit = QTextEdit()
1315
+ self.textEdit.setReadOnly(True)
1316
+ self.textEdit.setFont(QFont("Courier", 10))
1317
+
1318
+ # Format the parameters text
1319
+ text = self.formatParameters(params)
1320
+ self.textEdit.setPlainText(text)
1321
+
1322
+ layout.addWidget(self.textEdit)
1323
+
1324
+ # Add OK button
1325
+ buttonBox = QDialogButtonBox(QDialogButtonBox.Ok)
1326
+ buttonBox.accepted.connect(self.accept)
1327
+ layout.addWidget(buttonBox)
1328
+
1329
+ def formatParameters(self, params):
1330
+ """Format the parameters into a readable text"""
1331
+ lines = []
1332
+ lines.append("=" * 60)
1333
+ lines.append("ACQUISITION PARAMETERS SUMMARY")
1334
+ lines.append("=" * 60)
1335
+ lines.append("")
1336
+
1337
+ if 'error' in params:
1338
+ lines.append(f"Error: {params['error']}")
1339
+ return "\n".join(lines)
1340
+
1341
+ # Determine decimal precision for positions based on actual data
1342
+ # Check all position values to find the needed precision
1343
+ all_position_values = []
1344
+ for key in ['first_trace_pos', 'last_trace_pos', 'first_shot_pos', 'last_shot_pos',
1345
+ 'trace_spacing', 'source_spacing', 'total_trace_extent', 'total_source_extent',
1346
+ 'min_offset', 'max_offset']:
1347
+ if params.get(key) is not None:
1348
+ all_position_values.append(params[key])
1349
+
1350
+ # Determine needed decimal places (check for up to 3 decimals)
1351
+ decimal_places = 2 # default
1352
+ for val in all_position_values:
1353
+ # Convert to string and check decimal places
1354
+ val_str = f"{val:.6f}".rstrip('0').rstrip('.')
1355
+ if '.' in val_str:
1356
+ decimals = len(val_str.split('.')[1])
1357
+ decimal_places = max(decimal_places, min(decimals, 3)) # cap at 3
1358
+
1359
+ # Helper function to format values with adaptive precision
1360
+ def fmt_val(val):
1361
+ if val is None:
1362
+ return "N/A"
1363
+ return f"{val:.{decimal_places}f}"
1364
+
1365
+ # Roll-along detection FIRST
1366
+ lines.append("ACQUISITION TYPE:")
1367
+ lines.append("-" * 60)
1368
+ if params.get('roll_along_detected'):
1369
+ lines.append(f" Roll-along acquisition: YES")
1370
+ if params.get('roll_along_segments') is not None:
1371
+ lines.append(f" Number of segments: {params['roll_along_segments']}")
1372
+ else:
1373
+ lines.append(f" Roll-along acquisition: NO (single deployment)")
1374
+
1375
+ # Shot positioning
1376
+ if params.get('shot_positioning'):
1377
+ lines.append(f" Shot positioning: {params['shot_positioning']}")
1378
+
1379
+ # Number of source/geophone pairs
1380
+ if params.get('n_traces') is not None and params.get('n_shots') is not None:
1381
+ total_traces = params['n_traces'] * params['n_shots']
1382
+ lines.append(f" Source/geophone pair: {total_traces}")
1383
+ lines.append("")
1384
+
1385
+ # Geophones/Traces section
1386
+ lines.append("GEOPHONES/TRACES:")
1387
+ lines.append("-" * 60)
1388
+ if params.get('total_unique_traces') is not None:
1389
+ lines.append(f" Total number of traces: {params['total_unique_traces']}")
1390
+ else:
1391
+ lines.append(f" Number of traces/shot: {params.get('n_traces', 'N/A')}")
1392
+
1393
+ # Show missing geophones if detected
1394
+ if params.get('n_missing_traces', 0) > 0:
1395
+ lines.append(f" Missing geophones: {params['n_missing_traces']} geophone(s) missing")
1396
+
1397
+ lines.append(f" Geophone spacing: {fmt_val(params.get('trace_spacing'))} m")
1398
+ lines.append(f" Total geophone extent: {fmt_val(params.get('total_trace_extent'))} m")
1399
+ lines.append(f" First geophone position: {fmt_val(params.get('first_trace_pos'))} m")
1400
+ lines.append(f" Last geophone position: {fmt_val(params.get('last_trace_pos'))} m")
1401
+
1402
+ # Show details of missing geophones if any
1403
+ if params.get('missing_traces'):
1404
+ lines.append(" Missing geophone positions:")
1405
+ missing_list = params['missing_traces'][:10] # Show first 10
1406
+ for pos in missing_list:
1407
+ lines.append(f" - {fmt_val(pos)} m")
1408
+ if len(params['missing_traces']) > 10:
1409
+ lines.append(f" ... and {len(params['missing_traces']) - 10} more")
1410
+
1411
+ # Add roll-along segment details if available
1412
+ if params.get('roll_along_detected') and params.get('roll_along_segment_details'):
1413
+ lines.append("")
1414
+ lines.append(" Roll-along segment details:")
1415
+ for i, seg_detail in enumerate(params['roll_along_segment_details'], 1):
1416
+ lines.append(f" Segment {i}: {seg_detail}")
1417
+ lines.append("")
1418
+
1419
+ # Sources/Shots section
1420
+ lines.append("SOURCES/SHOTS:")
1421
+ lines.append("-" * 60)
1422
+ lines.append(f" Total number of shots: {params.get('n_shots', 'N/A')}")
1423
+
1424
+ # Show missing sources if detected
1425
+ if params.get('n_missing_sources', 0) > 0:
1426
+ lines.append(f" Missing sources: {params['n_missing_sources']} source(s) missing")
1427
+
1428
+ spacing_label = " Source spacing (median): " if params.get('roll_along_detected') else " Source spacing: "
1429
+ lines.append(f"{spacing_label}{fmt_val(params.get('source_spacing'))} m")
1430
+ lines.append(f" Total source extent: {fmt_val(params.get('total_source_extent'))} m")
1431
+ lines.append(f" First shot position: {fmt_val(params.get('first_shot_pos'))} m")
1432
+ lines.append(f" Last shot position: {fmt_val(params.get('last_shot_pos'))} m")
1433
+
1434
+ min_offset = abs(params['min_offset']) if params.get('min_offset') is not None else None
1435
+ max_offset = abs(params['max_offset']) if params.get('max_offset') is not None else None
1436
+ lines.append(f" Minimum offset: {fmt_val(min_offset)} m")
1437
+ lines.append(f" Maximum offset: {fmt_val(max_offset)} m")
1438
+
1439
+ # Show details of missing sources if any
1440
+ if params.get('missing_sources'):
1441
+ lines.append(" Missing source positions:")
1442
+ missing_list = params['missing_sources'][:10] # Show first 10
1443
+ for pos in missing_list:
1444
+ lines.append(f" - {fmt_val(pos)} m")
1445
+ if len(params['missing_sources']) > 10:
1446
+ lines.append(f" ... and {len(params['missing_sources']) - 10} more")
1447
+
1448
+ # Add roll-along shot segment details if available
1449
+ if params.get('roll_along_detected') and params.get('roll_along_shot_segment_details'):
1450
+ lines.append("")
1451
+ lines.append(" Roll-along segment details:")
1452
+ for i, seg_detail in enumerate(params['roll_along_shot_segment_details'], 1):
1453
+ lines.append(f" Segment {i}: {seg_detail}")
1454
+ lines.append("")
1455
+
1456
+ # Timing section
1457
+ lines.append("TIMING:")
1458
+ lines.append("-" * 60)
1459
+ if params.get('sample_interval') is not None:
1460
+ lines.append(f" Sampling interval: {params['sample_interval']*1000:.3f} ms")
1461
+ else:
1462
+ lines.append(f" Sampling interval: N/A")
1463
+ if params.get('record_length') is not None:
1464
+ lines.append(f" Recording time: {params['record_length']*1000:.2f} ms")
1465
+ else:
1466
+ lines.append(f" Recording time: N/A")
1467
+ if params.get('delay') is not None:
1468
+ lines.append(f" Delay time: {params['delay']*1000:.3f} ms")
1469
+ else:
1470
+ lines.append(f" Delay time: N/A")
1471
+ lines.append("")
1472
+ lines.append("=" * 60)
1473
+
1474
+ return "\n".join(lines)
1475
+
1476
+
1252
1477
  class AutoPickDialog(QDialog):
1253
1478
  """Dialog for entering frequency range for auto pick operation"""
1254
1479
  def __init__(self, parent=None, fmin_default=0.0, fmax_default=200.0):
@@ -1493,6 +1718,12 @@ class MainWindow(QMainWindow):
1493
1718
  self.fileListWidget.setMinimumWidth(50) # Set minimum width
1494
1719
  leftLayout.addWidget(self.fileListWidget)
1495
1720
 
1721
+ # Add Acquisition parameters button
1722
+ self.acqParamsButton = QPushButton("Acquisition parameters")
1723
+ self.acqParamsButton.clicked.connect(self.showAcquisitionParameters)
1724
+ self.acqParamsButton.setToolTip("Show acquisition parameters summary")
1725
+ leftLayout.addWidget(self.acqParamsButton)
1726
+
1496
1727
  # Add separator line
1497
1728
  separator = QFrame()
1498
1729
  separator.setFrameShape(QFrame.HLine)
@@ -2034,6 +2265,11 @@ class MainWindow(QMainWindow):
2034
2265
  self.importAsciiAction = QAction('Import ASCII matrix...', self)
2035
2266
  self.fileMenu.addAction(self.importAsciiAction)
2036
2267
  self.importAsciiAction.triggered.connect(self.importAsciiMatrix)
2268
+
2269
+ # Create QAction for importing ASCII archive
2270
+ self.importAsciiArchiveAction = QAction('Import ASCII archive...', self)
2271
+ self.fileMenu.addAction(self.importAsciiArchiveAction)
2272
+ self.importAsciiArchiveAction.triggered.connect(self.importAsciiArchive)
2037
2273
 
2038
2274
  # Add separator
2039
2275
  self.fileMenu.addSeparator()
@@ -2059,6 +2295,11 @@ class MainWindow(QMainWindow):
2059
2295
  self.saveSingleFileSubMenu.addAction(self.saveSingleFileSuAction)
2060
2296
  self.saveSingleFileSuAction.triggered.connect(self.saveSingleFileSU)
2061
2297
 
2298
+ # Create QAction for saving current file as ASCII
2299
+ self.saveSingleFileAsciiAction = QAction('as ASCII files (archive)', self)
2300
+ self.saveSingleFileSubMenu.addAction(self.saveSingleFileAsciiAction)
2301
+ self.saveSingleFileAsciiAction.triggered.connect(self.saveSingleFileASCII)
2302
+
2062
2303
  # Create a submenu for saving all files
2063
2304
  self.saveFileSubMenu = self.fileMenu.addMenu('Save all shots')
2064
2305
 
@@ -2082,6 +2323,11 @@ class MainWindow(QMainWindow):
2082
2323
  self.saveFileSubMenu.addAction(self.saveAllFilesSingleSuAction)
2083
2324
  self.saveAllFilesSingleSuAction.triggered.connect(self.saveAllFilesSingleSU)
2084
2325
 
2326
+ # Create QAction for saving all files as ASCII
2327
+ self.saveAllFilesAsciiAction = QAction('as ASCII files (archives)', self)
2328
+ self.saveFileSubMenu.addAction(self.saveAllFilesAsciiAction)
2329
+ self.saveAllFilesAsciiAction.triggered.connect(self.saveAllFilesASCII)
2330
+
2085
2331
  # Add separator
2086
2332
  self.fileMenu.addSeparator()
2087
2333
 
@@ -2108,11 +2354,16 @@ class MainWindow(QMainWindow):
2108
2354
  self.showHeadersSubMenu.addAction(self.showRawHeadersAction)
2109
2355
  self.showRawHeadersAction.triggered.connect(self.showRawHeaders)
2110
2356
 
2111
- # Create QAction for showing clean headers
2112
- self.showSelectedHeadersAction = QAction('Show clean headers', self)
2357
+ # Create QAction for showing clean shot headers
2358
+ self.showSelectedHeadersAction = QAction('Show clean shot headers', self)
2113
2359
  self.showHeadersSubMenu.addAction(self.showSelectedHeadersAction)
2114
2360
  self.showSelectedHeadersAction.triggered.connect(self.showHeaders)
2115
2361
 
2362
+ # Create QAction for showing clean overall headers
2363
+ self.showOverallHeadersAction = QAction('Show clean overall headers', self)
2364
+ self.showHeadersSubMenu.addAction(self.showOverallHeadersAction)
2365
+ self.showOverallHeadersAction.triggered.connect(self.showOverallHeaders)
2366
+
2116
2367
  # Add separator
2117
2368
  self.headerMenu.addSeparator()
2118
2369
 
@@ -2149,6 +2400,11 @@ class MainWindow(QMainWindow):
2149
2400
  self.editTraceSubMenu.addAction(self.insertMutedTracesAction)
2150
2401
  self.insertMutedTracesAction.triggered.connect(self.insertMutedTraces)
2151
2402
 
2403
+ # Create a QAction for zero padding traces
2404
+ self.zeroPadTraceAction = QAction('Zero pad traces', self)
2405
+ self.editTraceSubMenu.addAction(self.zeroPadTraceAction)
2406
+ self.zeroPadTraceAction.triggered.connect(self.zeroPadTrace)
2407
+
2152
2408
  # Create a submenu for batch editing traces
2153
2409
  self.batchEditTraceSubMenu = self.headerMenu.addMenu('Batch edit traces')
2154
2410
 
@@ -2182,6 +2438,11 @@ class MainWindow(QMainWindow):
2182
2438
  self.batchEditTraceSubMenu.addAction(self.batchInsertMutedTracesAction)
2183
2439
  self.batchInsertMutedTracesAction.triggered.connect(self.batchInsertMutedTraces)
2184
2440
 
2441
+ # Create a QAction for batch zero padding traces
2442
+ self.batchZeroPadTracesAction = QAction('Batch zero pad traces', self)
2443
+ self.batchEditTraceSubMenu.addAction(self.batchZeroPadTracesAction)
2444
+ self.batchZeroPadTracesAction.triggered.connect(self.batchZeroPadTraces)
2445
+
2185
2446
  # Add separator
2186
2447
  self.headerMenu.addSeparator()
2187
2448
 
@@ -3862,6 +4123,11 @@ class MainWindow(QMainWindow):
3862
4123
  """Clear all dynamic controls from top widget"""
3863
4124
  if hasattr(self, 'topDynamicControlsLayout'):
3864
4125
  self._clear_layout(self.topDynamicControlsLayout)
4126
+
4127
+ def _clearBottomDynamicControls(self):
4128
+ """Clear all dynamic controls from bottom widget"""
4129
+ if hasattr(self, 'bottomDynamicControlsLayout'):
4130
+ self._clear_layout(self.bottomDynamicControlsLayout)
3865
4131
 
3866
4132
  def onNormPerTraceChanged_Spectrogram(self, state):
3867
4133
  """Handle per-trace normalization toggle for Spectrogram view"""
@@ -5067,27 +5333,6 @@ class MainWindow(QMainWindow):
5067
5333
  # Update title to show on correct plot widget based on current view types
5068
5334
  self.updateTitle()
5069
5335
 
5070
- def onTimesColormapChanged(self, colormap_name):
5071
- """Handle times colormap change (legacy colormap dropdown)"""
5072
- # Update the view-specific colormap based on current bottom view (unified)
5073
- if hasattr(self, 'bottomViewComboBox'):
5074
- view_type = self.bottomViewComboBox.currentText()
5075
- plot_type_to_view = {"Traveltimes": "layout_view", "Layout": "layout_view", "Topography": "topography",
5076
- "Spectrogram": "spectrogram", "Dispersion": "dispersion", "Seismogram": "seismogram"}
5077
- view_key = plot_type_to_view.get(view_type, "layout_view")
5078
- setattr(self, f'{view_key}_colormap_str', colormap_name)
5079
-
5080
- # Keep legacy colormap_str for backward compatibility
5081
- self.colormap_str = colormap_name
5082
- self.colormap = pqg.colormap.get(self.colormap_str, source='matplotlib')
5083
-
5084
- # Show source info in status bar
5085
- if self.streams and self.currentIndex is not None:
5086
- self.ffidLabel.setText(f'FFID: {self.ffid[self.currentIndex]} | Source at {self.source_position[self.currentIndex]} m')
5087
- # Refresh the plot for current bottom view
5088
- if hasattr(self, 'bottomViewComboBox'):
5089
- self.updatePlots()
5090
-
5091
5336
  def onBottomNormChanged(self, state):
5092
5337
  """Handle normalization checkbox toggle for bottom analytical views"""
5093
5338
  # Store normalization state as boolean
@@ -5795,8 +6040,8 @@ class MainWindow(QMainWindow):
5795
6040
  if file_path.lower().endswith(('.su', '.segy', '.sgy', '.sg2', '.dat', '.seg2')):
5796
6041
  accepted = True
5797
6042
  break
5798
- # Check for pick files: .sgt
5799
- if file_path.lower().endswith('.sgt'):
6043
+ # Check for pick files: .sgt, .lst, .vs
6044
+ if file_path.lower().endswith(('.sgt', '.lst', '.vs')):
5800
6045
  accepted = True
5801
6046
  break
5802
6047
  # Check for dispersion picks: .pvc
@@ -5829,7 +6074,7 @@ class MainWindow(QMainWindow):
5829
6074
  if file_path.lower().endswith(('.su', '.segy', '.sgy', '.sg2', '.dat', '.seg2')):
5830
6075
  seismic_files.append(file_path)
5831
6076
  # Traveltime pick files
5832
- elif file_path.lower().endswith('.sgt'):
6077
+ elif file_path.lower().endswith(('.sgt', '.lst', '.vs')):
5833
6078
  pick_files.append(file_path)
5834
6079
  # Dispersion pick files
5835
6080
  elif file_path.lower().endswith('.pvc'):
@@ -6553,6 +6798,8 @@ class MainWindow(QMainWindow):
6553
6798
  self._batch_loading = False # Flag to prevent plotting during batch loading
6554
6799
  self.plotTypeX = 'file_trace_number' # Plot type for X axis
6555
6800
  self.plotTypeY = 'ffid' # Plot type for Y axis
6801
+ self.x_label = 'Trace Number (All Streams)' # Initialize X axis label
6802
+ self.y_label = 'FFID' # Initialize Y axis label
6556
6803
  self.t_label = 'Time (s)'
6557
6804
  self.relativeError = 0.05
6558
6805
  self.absoluteError = 0
@@ -7117,6 +7364,299 @@ class MainWindow(QMainWindow):
7117
7364
 
7118
7365
  self.updatePlotTypeDict()
7119
7366
 
7367
+ def showAcquisitionParameters(self):
7368
+ """Display a dialog with acquisition parameters summary"""
7369
+ if not self.streams or len(self.streams) == 0:
7370
+ QMessageBox.information(self, "No Data", "No seismic files loaded.")
7371
+ return
7372
+
7373
+ # Calculate parameters from all loaded streams
7374
+ params = self.calculateAcquisitionParameters()
7375
+
7376
+ # Show the dialog
7377
+ dialog = AcquisitionParametersDialog(params, self)
7378
+ dialog.exec_()
7379
+
7380
+ def calculateAcquisitionParameters(self):
7381
+ """Calculate acquisition parameters from loaded data"""
7382
+ params = {}
7383
+
7384
+ try:
7385
+ # Number of shots
7386
+ params['n_shots'] = len(self.streams)
7387
+
7388
+ # Get parameters from current or first file
7389
+ idx = self.currentIndex if self.currentIndex is not None else 0
7390
+
7391
+ if idx >= len(self.streams):
7392
+ params['error'] = "Invalid file index"
7393
+ return params
7394
+
7395
+ # Number of traces (from current/first shot)
7396
+ params['n_traces'] = len(self.streams[idx])
7397
+
7398
+ # Collect all unique trace positions across all shots for total extent
7399
+ all_trace_positions = set()
7400
+ if hasattr(self, 'trace_position') and len(self.trace_position) > 0:
7401
+ for trace_pos_list in self.trace_position:
7402
+ if trace_pos_list is not None:
7403
+ all_trace_positions.update(trace_pos_list)
7404
+
7405
+ # Total unique traces and overall extent
7406
+ if len(all_trace_positions) > 0:
7407
+ params['total_unique_traces'] = len(all_trace_positions)
7408
+ params['total_trace_extent'] = max(all_trace_positions) - min(all_trace_positions)
7409
+ params['first_trace_pos'] = min(all_trace_positions)
7410
+ params['last_trace_pos'] = max(all_trace_positions)
7411
+ else:
7412
+ params['total_unique_traces'] = None
7413
+ params['total_trace_extent'] = None
7414
+ params['first_trace_pos'] = None
7415
+ params['last_trace_pos'] = None
7416
+
7417
+ # Trace spacing (from current/first shot)
7418
+ if hasattr(self, 'trace_position') and idx < len(self.trace_position):
7419
+ trace_pos = self.trace_position[idx]
7420
+ if trace_pos is not None and len(trace_pos) > 1:
7421
+ # Calculate spacing using median for robustness
7422
+ spacings = [trace_pos[i+1] - trace_pos[i] for i in range(len(trace_pos)-1)]
7423
+ params['trace_spacing'] = np.median(spacings)
7424
+ elif trace_pos is not None and len(trace_pos) == 1:
7425
+ params['trace_spacing'] = None
7426
+ else:
7427
+ params['trace_spacing'] = None
7428
+ else:
7429
+ params['trace_spacing'] = None
7430
+
7431
+ # Detect missing traces
7432
+ params['missing_traces'] = None
7433
+ params['n_missing_traces'] = 0
7434
+ if len(all_trace_positions) > 1 and params.get('trace_spacing') is not None:
7435
+ sorted_positions = sorted(all_trace_positions)
7436
+ expected_spacing = params['trace_spacing']
7437
+ tolerance = expected_spacing * 0.3 # 30% tolerance
7438
+ missing_positions = []
7439
+
7440
+ for i in range(len(sorted_positions) - 1):
7441
+ gap = sorted_positions[i+1] - sorted_positions[i]
7442
+ if gap > expected_spacing + tolerance:
7443
+ # Gap detected - calculate how many traces are missing
7444
+ n_missing = round(gap / expected_spacing) - 1
7445
+ if n_missing > 0:
7446
+ # Calculate approximate positions of missing traces
7447
+ for j in range(1, n_missing + 1):
7448
+ missing_pos = sorted_positions[i] + j * expected_spacing
7449
+ missing_positions.append(missing_pos)
7450
+
7451
+ if missing_positions:
7452
+ params['missing_traces'] = missing_positions
7453
+ params['n_missing_traces'] = len(missing_positions)
7454
+
7455
+ # Source spacing and extent
7456
+ if hasattr(self, 'source_position') and len(self.source_position) > 1:
7457
+ source_positions = self.source_position
7458
+ if len(source_positions) > 1:
7459
+ # Calculate spacing using MEDIAN to handle roll-along gaps
7460
+ spacings = [source_positions[i+1] - source_positions[i] for i in range(len(source_positions)-1)]
7461
+ params['source_spacing'] = np.median(np.abs(spacings))
7462
+ # Total extent from all shots
7463
+ params['total_source_extent'] = max(source_positions) - min(source_positions)
7464
+ params['first_shot_pos'] = min(source_positions)
7465
+ params['last_shot_pos'] = max(source_positions)
7466
+ else:
7467
+ params['source_spacing'] = None
7468
+ params['total_source_extent'] = None
7469
+ params['first_shot_pos'] = source_positions[0] if len(source_positions) > 0 else None
7470
+ params['last_shot_pos'] = source_positions[0] if len(source_positions) > 0 else None
7471
+ else:
7472
+ params['source_spacing'] = None
7473
+ params['total_source_extent'] = None
7474
+ params['first_shot_pos'] = None
7475
+ params['last_shot_pos'] = None
7476
+
7477
+ # Detect missing sources/shots
7478
+ params['missing_sources'] = None
7479
+ params['n_missing_sources'] = 0
7480
+ if (hasattr(self, 'source_position') and len(self.source_position) > 1 and
7481
+ params.get('source_spacing') is not None):
7482
+ sorted_positions = sorted(self.source_position)
7483
+ expected_spacing = params['source_spacing']
7484
+ tolerance = expected_spacing * 0.3 # 30% tolerance
7485
+ missing_positions = []
7486
+
7487
+ for i in range(len(sorted_positions) - 1):
7488
+ gap = sorted_positions[i+1] - sorted_positions[i]
7489
+ if gap > expected_spacing + tolerance:
7490
+ # Gap detected - calculate how many sources are missing
7491
+ n_missing = round(gap / expected_spacing) - 1
7492
+ if n_missing > 0:
7493
+ # Calculate approximate positions of missing sources
7494
+ for j in range(1, n_missing + 1):
7495
+ missing_pos = sorted_positions[i] + j * expected_spacing
7496
+ missing_positions.append(missing_pos)
7497
+
7498
+ if missing_positions:
7499
+ params['missing_sources'] = missing_positions
7500
+ params['n_missing_sources'] = len(missing_positions)
7501
+
7502
+ # Detect shot positioning relative to traces
7503
+ params['shot_positioning'] = None
7504
+ if (hasattr(self, 'source_position') and hasattr(self, 'trace_position') and
7505
+ len(self.source_position) > 0 and len(self.trace_position) > 0):
7506
+ try:
7507
+ # Collect all unique trace positions across all shots
7508
+ all_trace_positions = set()
7509
+ for trace_pos_list in self.trace_position:
7510
+ if trace_pos_list is not None:
7511
+ all_trace_positions.update(trace_pos_list)
7512
+
7513
+ # Check each source position against trace positions
7514
+ tolerance = 0.1 # 10 cm tolerance for considering positions equal
7515
+ at_trace_count = 0
7516
+ between_trace_count = 0
7517
+
7518
+ for src_pos in self.source_position:
7519
+ is_at_trace = any(abs(src_pos - trace_pos) < tolerance for trace_pos in all_trace_positions)
7520
+ if is_at_trace:
7521
+ at_trace_count += 1
7522
+ else:
7523
+ between_trace_count += 1
7524
+
7525
+ # Determine positioning type
7526
+ if at_trace_count > 0 and between_trace_count == 0:
7527
+ params['shot_positioning'] = "At trace positions"
7528
+ elif between_trace_count > 0 and at_trace_count == 0:
7529
+ params['shot_positioning'] = "Between traces"
7530
+ elif at_trace_count > 0 and between_trace_count > 0:
7531
+ params['shot_positioning'] = f"Mixed ({at_trace_count} at traces, {between_trace_count} between)"
7532
+ else:
7533
+ params['shot_positioning'] = "Unknown"
7534
+ except Exception:
7535
+ params['shot_positioning'] = "Unknown"
7536
+
7537
+ # Min/Max offset
7538
+ if hasattr(self, 'offset') and len(self.offset) > 0:
7539
+ all_offsets = []
7540
+ for offset_list in self.offset:
7541
+ if offset_list is not None and len(offset_list) > 0:
7542
+ all_offsets.extend(offset_list)
7543
+ if len(all_offsets) > 0:
7544
+ # Use absolute values for offset ranges
7545
+ abs_offsets = [abs(offset) for offset in all_offsets]
7546
+ params['min_offset'] = min(abs_offsets)
7547
+ params['max_offset'] = max(abs_offsets)
7548
+ else:
7549
+ params['min_offset'] = None
7550
+ params['max_offset'] = None
7551
+ else:
7552
+ params['min_offset'] = None
7553
+ params['max_offset'] = None
7554
+
7555
+ # Roll-along detection
7556
+ params['roll_along_detected'] = False
7557
+ params['roll_along_segments'] = None
7558
+ params['roll_along_segment_details'] = None
7559
+ params['roll_along_shot_segment_details'] = None
7560
+
7561
+ if (hasattr(self, 'source_position') and hasattr(self, 'trace_position') and
7562
+ len(self.source_position) > 1 and len(self.trace_position) > 1):
7563
+ try:
7564
+ # Detect roll-along by checking if trace positions change while
7565
+ # some shots might be at similar positions
7566
+ # Group shots by similar trace array positions
7567
+ segments = []
7568
+ current_segment = [0]
7569
+
7570
+ for i in range(1, len(self.trace_position)):
7571
+ if self.trace_position[i] is not None and self.trace_position[i-1] is not None:
7572
+ # Check if trace positions have changed significantly
7573
+ prev_min = min(self.trace_position[i-1])
7574
+ prev_max = max(self.trace_position[i-1])
7575
+ curr_min = min(self.trace_position[i])
7576
+ curr_max = max(self.trace_position[i])
7577
+
7578
+ # If the trace array has shifted (different min/max positions)
7579
+ # This indicates a new segment in roll-along
7580
+ if abs(curr_min - prev_min) > 0.1 or abs(curr_max - prev_max) > 0.1:
7581
+ segments.append(current_segment)
7582
+ current_segment = [i]
7583
+ else:
7584
+ current_segment.append(i)
7585
+ else:
7586
+ current_segment.append(i)
7587
+
7588
+ # Add last segment
7589
+ if current_segment:
7590
+ segments.append(current_segment)
7591
+
7592
+ # Roll-along is detected if we have multiple segments
7593
+ if len(segments) > 1:
7594
+ params['roll_along_detected'] = True
7595
+ params['roll_along_segments'] = len(segments)
7596
+
7597
+ # Get detailed info for each segment (traces)
7598
+ segment_details = []
7599
+ for seg_idx, segment in enumerate(segments):
7600
+ # Get trace positions for this segment
7601
+ seg_trace_pos = self.trace_position[segment[0]]
7602
+ if seg_trace_pos is not None:
7603
+ seg_unique_traces = set()
7604
+ for shot_idx in segment:
7605
+ if self.trace_position[shot_idx] is not None:
7606
+ seg_unique_traces.update(self.trace_position[shot_idx])
7607
+
7608
+ seg_min = min(seg_unique_traces)
7609
+ seg_max = max(seg_unique_traces)
7610
+ seg_extent = seg_max - seg_min
7611
+ seg_n_traces = len(seg_unique_traces)
7612
+
7613
+ detail = f"{seg_n_traces} traces, {seg_min:.0f} to {seg_max:.0f} m (extent: {seg_extent:.0f} m)"
7614
+ segment_details.append(detail)
7615
+
7616
+ params['roll_along_segment_details'] = segment_details
7617
+
7618
+ # Get detailed info for each segment (shots)
7619
+ shot_segment_details = []
7620
+ for seg_idx, segment in enumerate(segments):
7621
+ # Get shot positions for this segment
7622
+ seg_shot_positions = [self.source_position[i] for i in segment if i < len(self.source_position)]
7623
+ if seg_shot_positions:
7624
+ seg_min_shot = min(seg_shot_positions)
7625
+ seg_max_shot = max(seg_shot_positions)
7626
+ seg_extent_shot = seg_max_shot - seg_min_shot
7627
+ seg_n_shots = len(seg_shot_positions)
7628
+
7629
+ detail = f"{seg_n_shots} shots, {seg_min_shot:.0f} to {seg_max_shot:.0f} m (extent: {seg_extent_shot:.0f} m)"
7630
+ shot_segment_details.append(detail)
7631
+
7632
+ params['roll_along_shot_segment_details'] = shot_segment_details
7633
+ except Exception:
7634
+ # If roll-along detection fails, just mark as not detected
7635
+ pass
7636
+
7637
+ # Sampling interval
7638
+ if hasattr(self, 'sample_interval') and idx < len(self.sample_interval):
7639
+ params['sample_interval'] = self.sample_interval[idx]
7640
+ else:
7641
+ params['sample_interval'] = None
7642
+
7643
+ # Recording time
7644
+ if hasattr(self, 'record_length') and idx < len(self.record_length):
7645
+ params['record_length'] = self.record_length[idx]
7646
+ else:
7647
+ params['record_length'] = None
7648
+
7649
+ # Delay time
7650
+ if hasattr(self, 'delay') and idx < len(self.delay):
7651
+ params['delay'] = self.delay[idx]
7652
+ else:
7653
+ params['delay'] = None
7654
+
7655
+ except Exception as e:
7656
+ params['error'] = f"Error calculating parameters: {str(e)}"
7657
+
7658
+ return params
7659
+
7120
7660
  def clearMemory(self):
7121
7661
  # Clear memory (reset the application)
7122
7662
 
@@ -9761,15 +10301,68 @@ class MainWindow(QMainWindow):
9761
10301
 
9762
10302
  def _on_dispersion_interp_method_changed(self, index):
9763
10303
  """Handler for interpolation method change"""
10304
+ # Get method-specific minimum points requirement
10305
+ method_names = ["Cubic", "Linear", "Quadratic", "Spline"]
10306
+ method_text = method_names[index] if index < len(method_names) else "Cubic"
10307
+ min_points_required = {
10308
+ "Linear": 2, "Quadratic": 3, "Cubic": 4, "Spline": 4
10309
+ }
10310
+ min_points = min_points_required.get(method_text, 2)
10311
+
9764
10312
  view = self._get_active_dispersion_view()
9765
- if view and self.dispersionLiveInterpolateCheckbox.isChecked():
9766
- # Re-interpolate with new method if live mode is on
10313
+ if view:
10314
+ # Get plot widget for removing items
10315
+ plot_widget = self.plotWidget if view == 'top' else self.bottomPlotWidget
10316
+
10317
+ # Handle all modes or just current mode
9767
10318
  if self.dispersionShowAllModesCheckbox.isChecked():
9768
- self.interpolateAllDispersionCurves(view)
10319
+ # Process all modes
10320
+ state = self.dispersion_picking_state[view]
10321
+ for mode_num, mode_data in state['modes'].items():
10322
+ if len(mode_data['picked_points']) < min_points:
10323
+ # Not enough points for new method - remove curve
10324
+ if mode_data['curve_line'] is not None:
10325
+ try:
10326
+ plot_widget.removeItem(mode_data['curve_line'])
10327
+ except:
10328
+ pass
10329
+ mode_data['curve_line'] = None
10330
+ if mode_data.get('interp_error_fill') is not None:
10331
+ try:
10332
+ plot_widget.removeItem(mode_data['interp_error_fill'])
10333
+ except:
10334
+ pass
10335
+ mode_data['interp_error_fill'] = None
10336
+ mode_data['curve_data'] = None
10337
+
10338
+ # Re-interpolate all modes that have enough points
10339
+ if self.dispersionLiveInterpolateCheckbox.isChecked():
10340
+ self.interpolateAllDispersionCurves(view)
9769
10341
  else:
10342
+ # Process current mode only
9770
10343
  mode_data = self._get_current_mode_data(view)
9771
- if len(mode_data['picked_points']) >= 2:
10344
+ if len(mode_data['picked_points']) < min_points:
10345
+ # Not enough points for new method - remove curve
10346
+ if mode_data['curve_line'] is not None:
10347
+ try:
10348
+ plot_widget.removeItem(mode_data['curve_line'])
10349
+ except:
10350
+ pass
10351
+ mode_data['curve_line'] = None
10352
+ if mode_data.get('interp_error_fill') is not None:
10353
+ try:
10354
+ plot_widget.removeItem(mode_data['interp_error_fill'])
10355
+ except:
10356
+ pass
10357
+ mode_data['interp_error_fill'] = None
10358
+ mode_data['curve_data'] = None
10359
+ elif self.dispersionLiveInterpolateCheckbox.isChecked():
10360
+ # Re-interpolate with new method if live mode is on and enough points
9772
10361
  self.interpolateDispersionCurve(view)
10362
+
10363
+ # Refresh display to update all views
10364
+ self._refreshDispersionPicksDisplay()
10365
+ self._updatePseudoSectionIfVisible()
9773
10366
 
9774
10367
  def onShowSingleStacksToggled(self, state=None):
9775
10368
  """Toggle display of single stacks viewer"""
@@ -10923,6 +11516,21 @@ class MainWindow(QMainWindow):
10923
11516
  self.offset[self.currentIndex] = [i * 1.0 for i in range(num_traces)] # Default 1m spacing
10924
11517
  self.updatePlotTypeDict()
10925
11518
 
11519
+ # Ensure time array is initialized
11520
+ if (self.currentIndex < len(self.time) and
11521
+ self.time[self.currentIndex] is None and
11522
+ self.currentIndex < len(self.streams) and
11523
+ self.streams[self.currentIndex]):
11524
+ # Initialize time array from stream
11525
+ stream = self.streams[self.currentIndex]
11526
+ if len(stream) > 0:
11527
+ dt = stream[0].stats.delta
11528
+ n_samples = stream[0].stats.npts
11529
+ self.time[self.currentIndex] = np.arange(n_samples) * dt
11530
+ self.n_sample[self.currentIndex] = n_samples
11531
+ self.sample_interval[self.currentIndex] = dt
11532
+ self.record_length[self.currentIndex] = self.time[self.currentIndex][-1]
11533
+
10926
11534
  # Restore trace ranges from memory if available
10927
11535
  self._restoreTraceRangesFromMemory()
10928
11536
 
@@ -11014,9 +11622,9 @@ class MainWindow(QMainWindow):
11014
11622
  class AsciiImportDialog(QDialog):
11015
11623
  def __init__(self, parent=None):
11016
11624
  super().__init__(parent)
11017
- self.setWindowTitle("ASCII Matrix Import Parameters")
11625
+ self.setWindowTitle("Import ASCII Matrix")
11018
11626
  self.setModal(True)
11019
- self.resize(400, 500)
11627
+ self.resize(450, 250)
11020
11628
 
11021
11629
  layout = QVBoxLayout(self)
11022
11630
 
@@ -11026,88 +11634,55 @@ class MainWindow(QMainWindow):
11026
11634
 
11027
11635
  self.file_path_edit = QLineEdit()
11028
11636
  self.file_path_edit.setReadOnly(True)
11637
+ self.file_path_edit.setPlaceholderText("No files selected")
11029
11638
  browse_button = QPushButton("Browse...")
11030
11639
  browse_button.clicked.connect(self.browse_file)
11031
11640
 
11032
11641
  file_row = QHBoxLayout()
11033
11642
  file_row.addWidget(self.file_path_edit)
11034
11643
  file_row.addWidget(browse_button)
11035
- file_layout.addRow("ASCII File:", file_row)
11644
+ file_layout.addRow("ASCII File(s):", file_row)
11036
11645
 
11037
- layout.addWidget(file_group)
11646
+ # Store selected file paths
11647
+ self.selected_files = []
11038
11648
 
11039
- # Time parameters
11040
- time_group = QGroupBox("Time Parameters")
11041
- time_layout = QFormLayout(time_group)
11649
+ layout.addWidget(file_group)
11042
11650
 
11043
- self.first_time_edit = QDoubleSpinBox()
11044
- self.first_time_edit.setRange(-999999, 999999)
11045
- self.first_time_edit.setDecimals(6)
11046
- self.first_time_edit.setValue(0.0)
11047
- self.first_time_edit.setSuffix(" s")
11048
- self.first_time_edit.setToolTip("First time sample (time of first row)")
11049
- time_layout.addRow("First time:", self.first_time_edit)
11651
+ # Data parameters
11652
+ data_group = QGroupBox("Data Parameters")
11653
+ data_layout = QFormLayout(data_group)
11050
11654
 
11051
11655
  self.time_sampling_edit = QDoubleSpinBox()
11052
11656
  self.time_sampling_edit.setRange(0.000001, 1.0)
11053
11657
  self.time_sampling_edit.setDecimals(6)
11054
- self.time_sampling_edit.setValue(0.001)
11658
+ self.time_sampling_edit.setValue(0.00025)
11055
11659
  self.time_sampling_edit.setSuffix(" s")
11056
11660
  self.time_sampling_edit.setToolTip("Time sampling interval (dt)")
11057
- time_layout.addRow("Time sampling (dt):", self.time_sampling_edit)
11058
-
11059
- layout.addWidget(time_group)
11060
-
11061
- # Trace parameters
11062
- trace_group = QGroupBox("Trace Parameters")
11063
- trace_layout = QFormLayout(trace_group)
11064
-
11065
- self.first_trace_edit = QDoubleSpinBox()
11066
- self.first_trace_edit.setRange(-999999, 999999)
11067
- self.first_trace_edit.setDecimals(2)
11068
- self.first_trace_edit.setValue(0.0)
11069
- self.first_trace_edit.setSuffix(" m")
11070
- self.first_trace_edit.setToolTip("Position of first trace (first column)")
11071
- trace_layout.addRow("First trace position:", self.first_trace_edit)
11072
-
11073
- self.trace_sampling_edit = QDoubleSpinBox()
11074
- self.trace_sampling_edit.setRange(0.01, 1000.0)
11075
- self.trace_sampling_edit.setDecimals(2)
11076
- self.trace_sampling_edit.setValue(1.0)
11077
- self.trace_sampling_edit.setSuffix(" m")
11078
- self.trace_sampling_edit.setToolTip("Distance between traces")
11079
- trace_layout.addRow("Trace spacing:", self.trace_sampling_edit)
11080
-
11081
- layout.addWidget(trace_group)
11082
-
11083
- # Shot parameters
11084
- shot_group = QGroupBox("Shot Parameters")
11085
- shot_layout = QFormLayout(shot_group)
11086
-
11087
- self.shot_position_edit = QDoubleSpinBox()
11088
- self.shot_position_edit.setRange(-999999, 999999)
11089
- self.shot_position_edit.setDecimals(2)
11090
- self.shot_position_edit.setValue(0.0)
11091
- self.shot_position_edit.setSuffix(" m")
11092
- self.shot_position_edit.setToolTip("Position of the seismic shot/source")
11093
- shot_layout.addRow("Shot position:", self.shot_position_edit)
11094
-
11095
- layout.addWidget(shot_group)
11096
-
11097
- # Data format options
11098
- format_group = QGroupBox("Data Format Options")
11099
- format_layout = QFormLayout(format_group)
11661
+ data_layout.addRow("Time sampling (dt):", self.time_sampling_edit)
11662
+
11663
+ self.skip_header_edit = QSpinBox()
11664
+ self.skip_header_edit.setRange(0, 1000)
11665
+ self.skip_header_edit.setValue(0)
11666
+ self.skip_header_edit.setToolTip("Number of header lines to skip in the ASCII file")
11667
+ data_layout.addRow("Skip header lines:", self.skip_header_edit)
11100
11668
 
11101
11669
  self.delimiter_edit = QLineEdit()
11102
11670
  self.delimiter_edit.setText("auto")
11103
- self.delimiter_edit.setToolTip("Column delimiter ('auto', 'tab', 'space', ',' or custom)")
11104
- format_layout.addRow("Column delimiter:", self.delimiter_edit)
11671
+ self.delimiter_edit.setToolTip("Column delimiter ('auto', 'tab', 'space', ',', or custom character)")
11672
+ data_layout.addRow("Column delimiter:", self.delimiter_edit)
11105
11673
 
11106
11674
  self.transpose_check = QCheckBox()
11107
- self.transpose_check.setToolTip("Check if traces are in rows instead of columns")
11108
- format_layout.addRow("Transpose matrix:", self.transpose_check)
11675
+ self.transpose_check.setChecked(False)
11676
+ self.transpose_check.setToolTip("Check if traces are organized row-wise (default: traces in columns)")
11677
+ data_layout.addRow("Traces in rows:", self.transpose_check)
11678
+
11679
+ layout.addWidget(data_group)
11109
11680
 
11110
- layout.addWidget(format_group)
11681
+ # Info label
11682
+ info_label = QLabel("Note: All positions and elevations will be set to 0. Time starts at 0.")
11683
+ info_label.setWordWrap(True)
11684
+ info_label.setStyleSheet("color: gray; font-style: italic;")
11685
+ layout.addWidget(info_label)
11111
11686
 
11112
11687
  # Dialog buttons
11113
11688
  buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
@@ -11116,12 +11691,16 @@ class MainWindow(QMainWindow):
11116
11691
  layout.addWidget(buttons)
11117
11692
 
11118
11693
  def browse_file(self):
11119
- file_path, _ = QFileDialog.getOpenFileName(
11120
- self, "Select ASCII Matrix File", "",
11694
+ file_paths, _ = QFileDialog.getOpenFileNames(
11695
+ self, "Select ASCII Matrix File(s)", "",
11121
11696
  "Text files (*.txt *.dat *.asc *.csv);;All files (*)"
11122
11697
  )
11123
- if file_path:
11124
- self.file_path_edit.setText(file_path)
11698
+ if file_paths:
11699
+ self.selected_files = file_paths
11700
+ if len(file_paths) == 1:
11701
+ self.file_path_edit.setText(file_paths[0])
11702
+ else:
11703
+ self.file_path_edit.setText(f"{len(file_paths)} files selected")
11125
11704
 
11126
11705
  def get_parameters(self):
11127
11706
  delimiter = self.delimiter_edit.text().strip()
@@ -11133,12 +11712,9 @@ class MainWindow(QMainWindow):
11133
11712
  delimiter = ' '
11134
11713
 
11135
11714
  return {
11136
- 'file_path': self.file_path_edit.text(),
11137
- 'first_time': self.first_time_edit.value(),
11715
+ 'file_paths': self.selected_files,
11138
11716
  'time_sampling': self.time_sampling_edit.value(),
11139
- 'first_trace': self.first_trace_edit.value(),
11140
- 'trace_spacing': self.trace_sampling_edit.value(),
11141
- 'shot_position': self.shot_position_edit.value(),
11717
+ 'skip_header': self.skip_header_edit.value(),
11142
11718
  'delimiter': delimiter,
11143
11719
  'transpose': self.transpose_check.isChecked()
11144
11720
  }
@@ -11154,129 +11730,149 @@ class MainWindow(QMainWindow):
11154
11730
  QMessageBox.critical(self, "Error", "Parameters are None - dialog error")
11155
11731
  return
11156
11732
 
11157
- if not params['file_path']:
11158
- QMessageBox.warning(self, "Warning", "Please select an ASCII file to import.")
11733
+ if not params['file_paths']:
11734
+ QMessageBox.warning(self, "Warning", "Please select at least one ASCII file to import.")
11159
11735
  return
11160
11736
 
11161
- try:
11162
- # Load the ASCII matrix
11163
- QApplication.processEvents()
11164
-
11165
- # Determine delimiter automatically if needed
11166
- delimiter = params['delimiter']
11167
- if delimiter is None:
11168
- # Try to auto-detect delimiter
11169
- with open(params['file_path'], 'r') as f:
11170
- first_line = f.readline().strip()
11171
- if '\t' in first_line:
11172
- delimiter = '\t'
11173
- elif ',' in first_line:
11174
- delimiter = ','
11175
- else:
11176
- delimiter = None # Let numpy handle it
11177
-
11178
- # Load the data
11179
- try:
11180
- data_matrix = np.loadtxt(params['file_path'], delimiter=delimiter)
11181
- except ValueError:
11182
- # Try with different delimiters
11183
- for delim in [None, ',', '\t', ' ']:
11184
- try:
11185
- data_matrix = np.loadtxt(params['file_path'], delimiter=delim)
11186
- break
11187
- except ValueError:
11188
- continue
11189
- else:
11190
- raise ValueError("Could not parse the ASCII file with any standard delimiter")
11191
-
11192
- # Transpose if requested
11193
- if params['transpose']:
11194
- data_matrix = data_matrix.T
11195
-
11196
- # Validate the data matrix
11197
- if data_matrix.size == 0:
11198
- raise ValueError("The loaded matrix is empty")
11199
-
11200
- # Check for problematic values
11201
- if np.any(np.isnan(data_matrix)):
11202
- raise ValueError("The matrix contains NaN values")
11203
- if np.any(np.isinf(data_matrix)):
11204
- raise ValueError("The matrix contains infinite values")
11205
-
11206
- # Convert to ObsPy stream
11207
- stream = self.ascii_to_obspy_stream(data_matrix, params)
11208
-
11209
- # Add to file list and load
11210
- file_name = os.path.basename(params['file_path'])
11211
- display_name = f"{file_name} (ASCII import)"
11212
-
11213
- # Create a temporary file info structure
11214
- file_info = {
11215
- 'fileName': params['file_path'],
11216
- 'displayName': display_name,
11217
- 'stream': stream,
11218
- 'isAsciiImport': True
11219
- }
11220
-
11221
- # Add to the file lists
11222
- self.fileNames.append(params['file_path'])
11223
- self.streams.append(stream)
11224
-
11225
- # Extend all other lists to match the new length
11737
+ # Process each file
11738
+ successful_imports = 0
11739
+ failed_imports = []
11740
+
11741
+ for file_path in params['file_paths']:
11226
11742
  try:
11227
- for attr in self.attributes_to_initialize:
11228
- if attr not in ['fileNames', 'streams']: # These are already added
11229
- attr_list = getattr(self, attr)
11230
- if attr_list is None:
11231
- print(f"Warning: {attr} is None, initializing as empty list")
11232
- setattr(self, attr, [])
11743
+ # Load the ASCII matrix
11744
+ QApplication.processEvents()
11745
+
11746
+ # Determine delimiter automatically if needed
11747
+ delimiter = params['delimiter']
11748
+ if delimiter is None:
11749
+ # Try to auto-detect delimiter by reading first data line
11750
+ with open(file_path, 'r') as f:
11751
+ # Skip header lines
11752
+ for _ in range(params['skip_header']):
11753
+ f.readline()
11754
+ first_data_line = f.readline().strip()
11755
+ if '\t' in first_data_line:
11756
+ delimiter = '\t'
11757
+ elif ',' in first_data_line:
11758
+ delimiter = ','
11759
+ else:
11760
+ delimiter = None # Let numpy handle it
11761
+
11762
+ # Load the data with skiprows parameter
11763
+ try:
11764
+ data_matrix = np.loadtxt(file_path, delimiter=delimiter, skiprows=params['skip_header'])
11765
+ except ValueError:
11766
+ # Try with different delimiters
11767
+ for delim in [None, ',', '\t', ' ']:
11768
+ try:
11769
+ data_matrix = np.loadtxt(file_path, delimiter=delim, skiprows=params['skip_header'])
11770
+ break
11771
+ except ValueError:
11772
+ continue
11773
+ else:
11774
+ raise ValueError("Could not parse the ASCII file with any standard delimiter")
11775
+
11776
+ # Transpose if traces are in rows
11777
+ if params['transpose']:
11778
+ data_matrix = data_matrix.T
11779
+
11780
+ # Ensure data is 2D (rows=time, columns=traces)
11781
+ if data_matrix.ndim == 1:
11782
+ data_matrix = data_matrix.reshape(-1, 1)
11783
+
11784
+ # Validate the data matrix
11785
+ if data_matrix.size == 0:
11786
+ raise ValueError("The loaded matrix is empty")
11787
+
11788
+ # Check for problematic values
11789
+ if np.any(np.isnan(data_matrix)):
11790
+ raise ValueError("The matrix contains NaN values")
11791
+ if np.any(np.isinf(data_matrix)):
11792
+ raise ValueError("The matrix contains infinite values")
11793
+
11794
+ # Convert to ObsPy stream
11795
+ stream = self.ascii_to_obspy_stream(data_matrix, params)
11796
+
11797
+ # Add to file list and load
11798
+ file_name = os.path.basename(file_path)
11799
+ display_name = f"{file_name} (ASCII import)"
11800
+
11801
+ # Add to the file lists
11802
+ self.fileNames.append(file_path)
11803
+ self.streams.append(stream)
11804
+
11805
+ # Extend all other lists to match the new length
11806
+ try:
11807
+ for attr in self.attributes_to_initialize:
11808
+ if attr not in ['fileNames', 'streams']: # These are already added
11233
11809
  attr_list = getattr(self, attr)
11234
- attr_list.append(None) # Initialize with None, will be set in loadStream()
11810
+ if attr_list is None:
11811
+ print(f"Warning: {attr} is None, initializing as empty list")
11812
+ setattr(self, attr, [])
11813
+ attr_list = getattr(self, attr)
11814
+ attr_list.append(None) # Initialize with None, will be set in loadStream()
11815
+ except Exception as e:
11816
+ print(f"Error in list extension: {e}")
11817
+ print(f"attributes_to_initialize: {self.attributes_to_initialize}")
11818
+ raise
11819
+
11820
+ self.fileListWidget.addItem(display_name)
11821
+
11822
+ # Get the index for this file
11823
+ file_index = len(self.streams) - 1
11824
+
11825
+ # Set up the stream properly like loadStream() does
11826
+ self.input_format[file_index] = check_format(self.streams[file_index])
11827
+
11828
+ # For ASCII data, set up basic plotting parameters manually
11829
+ num_traces = len(self.streams[file_index])
11830
+
11831
+ # Set up basic parameters for ASCII data (all positions default to 0)
11832
+ self.shot_trace_number[file_index] = list(range(1, num_traces + 1))
11833
+ self.file_trace_number[file_index] = np.arange(1, num_traces + 1)
11834
+ self.trace_position[file_index] = list(range(num_traces)) # 0-based indexing for trace positions
11835
+ self.source_position[file_index] = 0.0 # Default source position
11836
+ self.offset[file_index] = [i * 1.0 for i in range(num_traces)] # Default 1m spacing
11837
+ self.trace_elevation[file_index] = [0.0] * num_traces # Default elevation
11838
+ self.source_elevation[file_index] = 0.0 # Default source elevation
11839
+
11840
+ # Set up time parameters
11841
+ self.n_sample[file_index] = len(self.streams[file_index][0].data)
11842
+ self.sample_interval[file_index] = params['time_sampling']
11843
+ self.delay[file_index] = 0.0
11844
+ self.time[file_index] = np.arange(self.n_sample[file_index]) * self.sample_interval[file_index]
11845
+ self.record_length[file_index] = self.time[file_index][-1]
11846
+ self.ffid[file_index] = 1 # Default FFID
11847
+
11848
+ successful_imports += 1
11849
+
11235
11850
  except Exception as e:
11236
- print(f"Error in list extension: {e}")
11237
- print(f"attributes_to_initialize: {self.attributes_to_initialize}")
11238
- raise
11239
-
11240
- self.fileListWidget.addItem(display_name)
11241
-
11242
- # Select the new file
11851
+ failed_imports.append((os.path.basename(file_path), str(e)))
11852
+
11853
+ # After processing all files, update UI and show results
11854
+ if successful_imports > 0:
11855
+ # Select the last imported file
11243
11856
  self.currentIndex = len(self.streams) - 1
11244
11857
  self.fileListWidget.setCurrentRow(self.currentIndex)
11245
11858
 
11246
- # Set up the stream properly like loadStream() does
11247
- self.input_format[self.currentIndex] = check_format(self.streams[self.currentIndex])
11248
-
11249
- # For ASCII data, set up basic plotting parameters manually
11250
- # since SEGY headers don't exist
11251
- num_traces = len(self.streams[self.currentIndex])
11252
-
11253
- # Set up basic parameters for ASCII data
11254
- self.shot_trace_number[self.currentIndex] = list(range(1, num_traces + 1))
11255
- self.file_trace_number[self.currentIndex] = np.arange(1, num_traces + 1)
11256
- self.trace_position[self.currentIndex] = list(range(num_traces)) # 0-based indexing for trace positions
11257
- self.source_position[self.currentIndex] = 0.0 # Default source position
11258
- self.offset[self.currentIndex] = [i * params.get('trace_spacing', 1.0) for i in range(num_traces)]
11259
- self.trace_elevation[self.currentIndex] = [0.0] * num_traces # Default elevation
11260
- self.source_elevation[self.currentIndex] = 0.0 # Default source elevation
11261
-
11262
- # Set up time parameters
11263
- self.n_sample[self.currentIndex] = len(self.streams[self.currentIndex][0].data)
11264
- self.sample_interval[self.currentIndex] = params.get('time_sampling', 0.001) # Use correct key
11265
- self.delay[self.currentIndex] = 0.0
11266
- self.time[self.currentIndex] = np.arange(self.n_sample[self.currentIndex]) * self.sample_interval[self.currentIndex]
11267
- self.record_length[self.currentIndex] = self.time[self.currentIndex][-1]
11268
- self.ffid[self.currentIndex] = 1 # Default FFID
11269
-
11270
11859
  # Update the plot type dictionary
11271
11860
  self.updatePlotTypeDict()
11272
11861
 
11273
- # Plot the new data
11862
+ # Plot the data
11274
11863
  self.updatePlots()
11275
-
11276
- QMessageBox.information(self, "Import Successful", f"ASCII matrix imported successfully: {data_matrix.shape[1]} traces, {data_matrix.shape[0]} time samples")
11277
-
11278
- except Exception as e:
11279
- QMessageBox.critical(self, "Error", f"Failed to import ASCII matrix:\n{str(e)}")
11864
+
11865
+ # Show import summary
11866
+ if failed_imports:
11867
+ error_msg = f"Successfully imported {successful_imports} file(s).\n\nFailed imports ({len(failed_imports)}):\n"
11868
+ for fname, error in failed_imports[:5]: # Show first 5 errors
11869
+ error_msg += f"\n• {fname}: {error}"
11870
+ if len(failed_imports) > 5:
11871
+ error_msg += f"\n\n... and {len(failed_imports) - 5} more"
11872
+ QMessageBox.warning(self, "Import Complete with Errors", error_msg)
11873
+ else:
11874
+ QMessageBox.information(self, "Import Successful",
11875
+ f"Successfully imported {successful_imports} ASCII file(s)")
11280
11876
 
11281
11877
  def ascii_to_obspy_stream(self, data_matrix, params):
11282
11878
  """Convert ASCII matrix to ObsPy Stream object"""
@@ -11289,14 +11885,8 @@ class MainWindow(QMainWindow):
11289
11885
  # Create ObsPy Stream
11290
11886
  stream = Stream()
11291
11887
 
11292
- # Calculate time vector
11293
- times = np.arange(n_times) * params['time_sampling'] + params['first_time']
11294
-
11295
11888
  # Create a trace for each column
11296
11889
  for i in range(n_traces):
11297
- # Calculate trace position
11298
- trace_position = params['first_trace'] + i * params['trace_spacing']
11299
-
11300
11890
  # Create trace data
11301
11891
  trace_data = data_matrix[:, i]
11302
11892
 
@@ -11304,33 +11894,244 @@ class MainWindow(QMainWindow):
11304
11894
  stats = Stats()
11305
11895
  stats.sampling_rate = 1.0 / params['time_sampling']
11306
11896
  stats.npts = n_times
11307
- stats.starttime = UTCDateTime(0) + params['first_time'] # Relative time
11897
+ stats.starttime = UTCDateTime(0) # Start time at 0
11308
11898
  stats.delta = params['time_sampling']
11309
11899
 
11310
- # Set trace headers (using SEGY-like conventions)
11311
- stats.segy = {}
11312
- stats.segy.trace_header = {}
11313
- stats.segy.trace_header.trace_sequence_number_within_line = i + 1
11314
- stats.segy.trace_header.trace_sequence_number_within_segy_file = i + 1
11315
- stats.segy.trace_header.original_field_record_number = 1
11316
- stats.segy.trace_header.trace_number_within_the_original_field_record = i + 1
11317
- stats.segy.trace_header.source_coordinate_x = int(params['shot_position'] * 1000) # Convert to mm
11318
- stats.segy.trace_header.group_coordinate_x = int(trace_position * 1000) # Convert to mm
11319
- stats.segy.trace_header.coordinate_units = 2 # Meters
11320
- stats.segy.trace_header.number_of_samples_in_this_trace = n_times
11321
- stats.segy.trace_header.sample_interval_in_microseconds = int(params['time_sampling'] * 1e6)
11900
+ # Set trace headers (using SEGY-like conventions, all positions at 0)
11901
+ stats.segy = {}
11902
+ stats.segy.trace_header = {}
11903
+ stats.segy.trace_header.trace_sequence_number_within_line = i + 1
11904
+ stats.segy.trace_header.trace_sequence_number_within_segy_file = i + 1
11905
+ stats.segy.trace_header.original_field_record_number = 1
11906
+ stats.segy.trace_header.trace_number_within_the_original_field_record = i + 1
11907
+ stats.segy.trace_header.source_coordinate_x = 0 # Source at 0
11908
+ stats.segy.trace_header.group_coordinate_x = i # Trace position from 0 to ntrace-1
11909
+ stats.segy.trace_header.coordinate_units = 2 # Meters
11910
+ stats.segy.trace_header.number_of_samples_in_this_trace = n_times
11911
+ stats.segy.trace_header.sample_interval_in_microseconds = int(params['time_sampling'] * 1e6)
11912
+
11913
+ # Create channel naming
11914
+ stats.network = "XX"
11915
+ stats.station = f"T{i+1:03d}"
11916
+ stats.location = ""
11917
+ stats.channel = "SHZ" # Assume vertical seismometer
11918
+
11919
+ # Create the trace
11920
+ trace = Trace(data=trace_data, header=stats)
11921
+ stream.append(trace)
11922
+
11923
+ return stream
11924
+
11925
+ def importAsciiArchive(self):
11926
+ """Import shot data from ASCII archive (.tar.gz or .zip)"""
11927
+ # Open file dialog to select archives (multiple selection enabled)
11928
+ archive_paths, _ = QFileDialog.getOpenFileNames(
11929
+ self, "Import ASCII Archive(s)", "",
11930
+ "All archives (*.tar.gz *.tgz *.zip);;All files (*)"
11931
+ )
11932
+
11933
+ if not archive_paths:
11934
+ return
11935
+
11936
+ # Reset auto-increment flag for this import session
11937
+ self._auto_increment_ffid = False
11938
+
11939
+ successful_imports = 0
11940
+ failed_imports = []
11941
+
11942
+ for archive_path in archive_paths:
11943
+ try:
11944
+ QApplication.setOverrideCursor(Qt.WaitCursor)
11945
+
11946
+ # Import the archive
11947
+ stream, metadata = import_shot_from_ascii(archive_path)
11948
+
11949
+ # Extract shot number
11950
+ shot_number = int(metadata.get('shot_number', 1))
11951
+
11952
+ # Check for duplicate FFID
11953
+ if shot_number in self.ffid:
11954
+ QApplication.restoreOverrideCursor()
11955
+
11956
+ # Check if user has already chosen "Yes to All" for this loading session
11957
+ if not self._auto_increment_ffid:
11958
+ # Prompt user to choose to load the file with incrementing FFID
11959
+ msg = QMessageBox()
11960
+ msg.setIcon(QMessageBox.Warning)
11961
+ msg.setText(f'FFID {shot_number} already loaded. Do you want to load the file with incrementing FFID?')
11962
+ msg.setWindowTitle("FFID already loaded")
11963
+
11964
+ # Add custom buttons: Yes, Yes to All, No
11965
+ btn_yes = msg.addButton("Yes", QMessageBox.YesRole)
11966
+ btn_yes_all = msg.addButton("Yes to All", QMessageBox.YesRole)
11967
+ btn_no = msg.addButton("No", QMessageBox.NoRole)
11968
+
11969
+ msg.setDefaultButton(btn_yes)
11970
+ msg.exec_()
11971
+ clicked = msg.clickedButton()
11972
+
11973
+ if clicked == btn_yes_all:
11974
+ # Set flag to auto-increment for all remaining files
11975
+ self._auto_increment_ffid = True
11976
+ response = QMessageBox.Yes
11977
+ elif clicked == btn_yes:
11978
+ response = QMessageBox.Yes
11979
+ else: # btn_no
11980
+ response = QMessageBox.No
11981
+ else:
11982
+ # User already chose "Yes to All", auto-increment
11983
+ response = QMessageBox.Yes
11984
+
11985
+ if response == QMessageBox.Yes:
11986
+ # Increment FFID until we find an unused one
11987
+ while shot_number in self.ffid:
11988
+ shot_number += 1
11989
+ # Update metadata
11990
+ metadata['shot_number'] = shot_number
11991
+ else:
11992
+ # User declined FFID increment, skip this archive
11993
+ continue
11994
+
11995
+ QApplication.setOverrideCursor(Qt.WaitCursor)
11996
+
11997
+ # Add to file lists
11998
+ file_name = os.path.basename(archive_path)
11999
+ display_name = f"{file_name} (ASCII import)"
12000
+
12001
+ self.fileNames.append(archive_path)
12002
+ self.streams.append(stream)
12003
+
12004
+ # Extend all other lists to match the new length
12005
+ for attr in self.attributes_to_initialize:
12006
+ if attr not in ['fileNames', 'streams']:
12007
+ attr_list = getattr(self, attr)
12008
+ if attr_list is None:
12009
+ setattr(self, attr, [])
12010
+ attr_list = getattr(self, attr)
12011
+ attr_list.append(None)
12012
+
12013
+ self.fileListWidget.addItem(display_name)
12014
+
12015
+ # Get the index for this file
12016
+ file_index = len(self.streams) - 1
12017
+
12018
+ # Set up the stream properly
12019
+ self.input_format[file_index] = check_format(stream)
12020
+
12021
+ # Extract parameters from metadata (shot_number may have been incremented earlier)
12022
+ num_traces = int(metadata.get('n_traces', len(stream)))
12023
+ n_samples = int(metadata.get('n_samples', len(stream[0].data)))
12024
+ dt = float(metadata.get('sampling_interval', stream[0].stats.delta))
12025
+ shot_number = int(metadata.get('shot_number', 1)) # Use potentially incremented value
12026
+
12027
+ # Set up time parameters FIRST (before extracting trace info)
12028
+ self.n_sample[file_index] = n_samples
12029
+ self.sample_interval[file_index] = dt
12030
+ self.delay[file_index] = 0.0
12031
+ self.time[file_index] = np.arange(n_samples) * dt
12032
+ self.record_length[file_index] = self.time[file_index][-1]
12033
+ self.ffid[file_index] = shot_number
12034
+
12035
+ # Set up trace parameters
12036
+ self.shot_trace_number[file_index] = list(range(1, num_traces + 1))
12037
+ self.file_trace_number[file_index] = np.arange(1, num_traces + 1)
12038
+
12039
+ # Extract positions from stream headers
12040
+ trace_positions = []
12041
+ trace_elevations = []
12042
+ input_format = self.input_format[file_index]
12043
+
12044
+ for tr in stream:
12045
+ try:
12046
+ coord_scalar = tr.stats[input_format].trace_header.scalar_to_be_applied_to_all_coordinates
12047
+ if coord_scalar < 0:
12048
+ coord_scalar = -1.0 / coord_scalar
12049
+ elif coord_scalar == 0:
12050
+ coord_scalar = 1.0
12051
+ except (AttributeError, KeyError):
12052
+ coord_scalar = 1.0
12053
+
12054
+ try:
12055
+ elev_scalar = tr.stats[input_format].trace_header.scalar_to_be_applied_to_all_elevations_and_depths
12056
+ if elev_scalar < 0:
12057
+ elev_scalar = -1.0 / elev_scalar
12058
+ elif elev_scalar == 0:
12059
+ elev_scalar = 1.0
12060
+ except (AttributeError, KeyError):
12061
+ elev_scalar = coord_scalar
12062
+
12063
+ try:
12064
+ pos = tr.stats[input_format].trace_header.group_coordinate_x * coord_scalar
12065
+ except (AttributeError, KeyError):
12066
+ pos = 0.0
12067
+ trace_positions.append(pos)
12068
+
12069
+ try:
12070
+ elev = tr.stats[input_format].trace_header.receiver_group_elevation * elev_scalar
12071
+ except (AttributeError, KeyError):
12072
+ elev = 0.0
12073
+ trace_elevations.append(elev)
12074
+
12075
+ self.trace_position[file_index] = trace_positions
12076
+ self.trace_elevation[file_index] = trace_elevations
12077
+
12078
+ # Set source position
12079
+ source_x = float(metadata.get('source_x', 0.0))
12080
+ source_z = float(metadata.get('source_z', 0.0))
12081
+ self.source_position[file_index] = source_x
12082
+ self.source_elevation[file_index] = source_z
12083
+
12084
+ # Calculate offsets
12085
+ self.offset[file_index] = [pos - source_x for pos in trace_positions]
12086
+
12087
+ # Initialize picks and error arrays
12088
+ self.picks[file_index] = [None] * num_traces
12089
+ self.error[file_index] = [None] * num_traces
12090
+ self.pickSeismoItems[file_index] = [None] * num_traces
12091
+ self.pickLayoutItems[file_index] = [None] * num_traces
12092
+ self.airWaveItems[file_index] = [None] * num_traces
12093
+ self.dispersion_curves[file_index] = {}
12094
+
12095
+ successful_imports += 1
12096
+
12097
+ except Exception as e:
12098
+ failed_imports.append((os.path.basename(archive_path), str(e)))
12099
+ import traceback
12100
+ traceback.print_exc()
12101
+ finally:
12102
+ QApplication.restoreOverrideCursor()
12103
+
12104
+ # After processing all files, update UI and show results
12105
+ if successful_imports > 0:
12106
+ # Initialize plot labels if they don't exist
12107
+ if not hasattr(self, 'x_label'):
12108
+ self.x_label = 'Trace Number'
12109
+ if not hasattr(self, 'y_label'):
12110
+ self.y_label = 'Source Position (m)'
12111
+
12112
+ # Select the last imported file
12113
+ self.currentIndex = len(self.streams) - 1
12114
+ self.fileListWidget.setCurrentRow(self.currentIndex)
11322
12115
 
11323
- # Create channel naming
11324
- stats.network = "XX"
11325
- stats.station = f"T{i+1:03d}"
11326
- stats.location = ""
11327
- stats.channel = "SHZ" # Assume vertical seismometer
12116
+ # Update the plot type dictionary
12117
+ self.updatePlotTypeDict()
11328
12118
 
11329
- # Create the trace
11330
- trace = Trace(data=trace_data, header=stats)
11331
- stream.append(trace)
12119
+ # Plot the new data
12120
+ self.updatePlots()
11332
12121
 
11333
- return stream
12122
+ # Show import summary
12123
+ if failed_imports:
12124
+ error_msg = f"Successfully imported {successful_imports} archive(s).\n\nFailed imports ({len(failed_imports)}):\n"
12125
+ for fname, error in failed_imports[:5]: # Show first 5 errors
12126
+ error_msg += f"\n• {fname}: {error}"
12127
+ if len(failed_imports) > 5:
12128
+ error_msg += f"\n\n... and {len(failed_imports) - 5} more"
12129
+ QMessageBox.warning(self, "Import Complete with Errors", error_msg)
12130
+ elif successful_imports > 0:
12131
+ QMessageBox.information(
12132
+ self, "Import Successful",
12133
+ f"Successfully imported {successful_imports} ASCII archive(s)"
12134
+ )
11334
12135
 
11335
12136
  #######################################
11336
12137
  # File loading and processing functions
@@ -11945,6 +12746,11 @@ class MainWindow(QMainWindow):
11945
12746
 
11946
12747
  stream = swap_header_format(stream, format)
11947
12748
 
12749
+ # Ensure data is float32 for SEGY/SU compatibility
12750
+ for trace in stream:
12751
+ if trace.data.dtype != np.float32:
12752
+ trace.data = trace.data.astype(np.float32)
12753
+
11948
12754
  # Save the stream as a SEGY or SU file
11949
12755
  stream.write(savePath, format=format,
11950
12756
  data_encoding=5, byteorder='>')
@@ -11986,6 +12792,11 @@ class MainWindow(QMainWindow):
11986
12792
 
11987
12793
  stream = swap_header_format(stream, self.output_format)
11988
12794
  self.input_format[i] = check_format(stream)
12795
+
12796
+ # Ensure data is float32 for SEGY/SU compatibility
12797
+ for trace in stream:
12798
+ if trace.data.dtype != np.float32:
12799
+ trace.data = trace.data.astype(np.float32)
11989
12800
 
11990
12801
  # Default file name is fileNames[i] without extension _ffid.format
11991
12802
  # check if ffid is in the original file name
@@ -12026,6 +12837,11 @@ class MainWindow(QMainWindow):
12026
12837
  merged_stream = swap_header_format(merged_stream, self.output_format)
12027
12838
  for i, stream in enumerate(self.streams):
12028
12839
  self.input_format[i] = check_format(stream)
12840
+
12841
+ # Ensure data is float32 for SEGY/SU compatibility
12842
+ for trace in merged_stream:
12843
+ if trace.data.dtype != np.float32:
12844
+ trace.data = trace.data.astype(np.float32)
12029
12845
 
12030
12846
  # Save the merged stream as a SEGY or SU file
12031
12847
  merged_stream.write(savePath, format=self.output_format,
@@ -12068,6 +12884,120 @@ class MainWindow(QMainWindow):
12068
12884
  # Save all shots in a single SU file
12069
12885
  self.output_format = 'SU'
12070
12886
  self.saveAllFiles(single=True)
12887
+
12888
+ def saveSingleFileASCII(self):
12889
+ """Save the current shot as ASCII files in a tar.gz archive"""
12890
+ if not self.streams:
12891
+ QMessageBox.warning(self, "No Data", "No data loaded to save.")
12892
+ return
12893
+
12894
+ # Set the headers
12895
+ self.setHeaders()
12896
+
12897
+ # Get the directory to save the archive
12898
+ if str(self.ffid[self.currentIndex]) in os.path.basename(self.currentFileName):
12899
+ defaultSavePath = os.path.splitext(self.currentFileName)[0] + '.tar.gz'
12900
+ else:
12901
+ defaultSavePath = os.path.splitext(self.currentFileName)[0] + '_' + str(self.ffid[self.currentIndex]) + '.tar.gz'
12902
+
12903
+ savePath, _ = QFileDialog.getSaveFileName(
12904
+ self, "Save as ASCII archive", defaultSavePath,
12905
+ "TAR.GZ archives (*.tar.gz);;ZIP archives (*.zip)"
12906
+ )
12907
+
12908
+ if savePath:
12909
+ QApplication.setOverrideCursor(Qt.WaitCursor)
12910
+ try:
12911
+ stream = self.streams[self.currentIndex]
12912
+ shot_index = self.ffid[self.currentIndex]
12913
+
12914
+ # Create temporary directory for the files
12915
+ import tempfile
12916
+ temp_dir = tempfile.mkdtemp()
12917
+
12918
+ # Determine archive type
12919
+ use_zip = savePath.endswith('.zip')
12920
+
12921
+ # Export to ASCII
12922
+ export_shot_to_ascii(stream, shot_index, temp_dir, archive=False)
12923
+
12924
+ # Create archive
12925
+ if use_zip:
12926
+ import zipfile
12927
+ with zipfile.ZipFile(savePath, 'w', zipfile.ZIP_DEFLATED) as zipf:
12928
+ for filename in os.listdir(temp_dir):
12929
+ file_path = os.path.join(temp_dir, filename)
12930
+ zipf.write(file_path, arcname=filename)
12931
+ else:
12932
+ import tarfile
12933
+ with tarfile.open(savePath, "w:gz") as tar:
12934
+ for filename in os.listdir(temp_dir):
12935
+ file_path = os.path.join(temp_dir, filename)
12936
+ tar.add(file_path, arcname=filename)
12937
+
12938
+ # Clean up temp directory
12939
+ import shutil
12940
+ shutil.rmtree(temp_dir)
12941
+
12942
+ print(f"Saved single shot (ASCII): {savePath}")
12943
+ QMessageBox.information(self, "File Saved", f"ASCII archive saved as:\n{savePath}")
12944
+ except Exception as e:
12945
+ QMessageBox.critical(self, "Save Error", f"Failed to save ASCII archive:\n{str(e)}")
12946
+ finally:
12947
+ QApplication.restoreOverrideCursor()
12948
+
12949
+ def saveAllFilesASCII(self):
12950
+ """Save all shots as ASCII files in separate tar.gz archives"""
12951
+ if not self.streams:
12952
+ QMessageBox.warning(self, "No Data", "No data loaded to save.")
12953
+ return
12954
+
12955
+ # Set the headers
12956
+ self.setHeaders()
12957
+
12958
+ # Get the directory to save the archives
12959
+ output_dir = QFileDialog.getExistingDirectory(
12960
+ self, "Select Directory for ASCII Archives",
12961
+ os.path.dirname(self.currentFileName) if self.currentFileName else ""
12962
+ )
12963
+
12964
+ if output_dir:
12965
+ QApplication.setOverrideCursor(Qt.WaitCursor)
12966
+ saved_count = 0
12967
+ try:
12968
+ for i, stream in enumerate(self.streams):
12969
+ shot_index = self.ffid[i]
12970
+
12971
+ # Create temporary directory for the files
12972
+ import tempfile
12973
+ temp_dir = tempfile.mkdtemp()
12974
+
12975
+ # Export to ASCII
12976
+ export_shot_to_ascii(stream, shot_index, temp_dir, archive=False)
12977
+
12978
+ # Create archive
12979
+ archive_path = os.path.join(output_dir, f"shot_{shot_index:04d}.tar.gz")
12980
+ import tarfile
12981
+ with tarfile.open(archive_path, "w:gz") as tar:
12982
+ for filename in os.listdir(temp_dir):
12983
+ file_path = os.path.join(temp_dir, filename)
12984
+ tar.add(file_path, arcname=filename)
12985
+
12986
+ # Clean up temp directory
12987
+ import shutil
12988
+ shutil.rmtree(temp_dir)
12989
+
12990
+ saved_count += 1
12991
+ print(f"Saved shot {shot_index} (ASCII): {archive_path}")
12992
+
12993
+ QMessageBox.information(
12994
+ self, "Files Saved",
12995
+ f"Saved {saved_count} ASCII archives to:\n{output_dir}"
12996
+ )
12997
+ except Exception as e:
12998
+ QMessageBox.critical(self, "Save Error", f"Failed to save ASCII archives:\n{str(e)}")
12999
+ finally:
13000
+ QApplication.restoreOverrideCursor()
12071
13001
 
12072
13002
  def setHeaders(self):
12073
13003
  # Set stream headers based on self attributes
@@ -12075,6 +13005,26 @@ class MainWindow(QMainWindow):
12075
13005
 
12076
13006
  for i, st in enumerate(self.streams):
12077
13007
  input_format = check_format(st)
13008
+
13009
+ # Calculate max coordinate value to adjust scalar if needed
13010
+ max_coord = 0
13011
+ for trace_index in range(len(st)):
13012
+ max_coord = max(max_coord,
13013
+ abs(self.source_position[i]) if self.source_position[i] is not None else 0,
13014
+ abs(self.trace_position[i][trace_index]) if self.trace_position[i][trace_index] is not None else 0,
13015
+ abs(self.trace_elevation[i][trace_index]) if self.trace_elevation[i][trace_index] is not None else 0,
13016
+ abs(self.source_elevation[i]) if self.source_elevation[i] is not None else 0)
13017
+
13018
+ # Adjust scalar if values would exceed int16 range
13019
+ if max_coord * coordinate_scalar >= 32000:
13020
+ # Find appropriate scalar
13021
+ if max_coord * 100 < 32000:
13022
+ coordinate_scalar = 100
13023
+ elif max_coord * 10 < 32000:
13024
+ coordinate_scalar = 10
13025
+ else:
13026
+ coordinate_scalar = 1
13027
+
12078
13028
  for trace_index, trace in enumerate(st):
12079
13029
  trace.stats[input_format].trace_header.scalar_to_be_applied_to_all_coordinates = int(-coordinate_scalar)
12080
13030
  trace.stats[input_format].trace_header.scalar_to_be_applied_to_all_elevations_and_depths = int(-coordinate_scalar)
@@ -12082,12 +13032,14 @@ class MainWindow(QMainWindow):
12082
13032
  trace.stats[input_format].trace_header.delay_recording_time = int(self.delay[i]*1000)
12083
13033
  trace.stats[input_format].trace_header.datum_elevation_at_receiver_group = int(np.round(np.mean(np.diff(self.trace_position[i])),self.rounding)*coordinate_scalar)
12084
13034
 
12085
- trace.stats[input_format].trace_header.distance_from_center_of_the_source_point_to_the_center_of_the_receiver_group = int(self.source_position[i]*coordinate_scalar) - int(self.trace_position[i][trace_index]*coordinate_scalar)
12086
- trace.stats[input_format].trace_header.group_coordinate_x = int(self.trace_position[i][trace_index]*coordinate_scalar)
13035
+ # Clamp all coordinate values to int16 range
13036
+ offset_val = int(self.source_position[i]*coordinate_scalar) - int(self.trace_position[i][trace_index]*coordinate_scalar)
13037
+ trace.stats[input_format].trace_header.distance_from_center_of_the_source_point_to_the_center_of_the_receiver_group = np.clip(offset_val, -32767, 32767)
13038
+ trace.stats[input_format].trace_header.group_coordinate_x = np.clip(int(self.trace_position[i][trace_index]*coordinate_scalar), -32767, 32767)
12087
13039
  trace.stats[input_format].trace_header.group_coordinate_y = int(0)
12088
- trace.stats[input_format].trace_header.receiver_group_elevation = int(self.trace_elevation[i][trace_index]*coordinate_scalar)
13040
+ trace.stats[input_format].trace_header.receiver_group_elevation = np.clip(int(self.trace_elevation[i][trace_index]*coordinate_scalar), -32767, 32767)
12089
13041
 
12090
- trace.stats[input_format].trace_header.source_coordinate_x = int(self.source_position[i]*coordinate_scalar)
13042
+ trace.stats[input_format].trace_header.source_coordinate_x = np.clip(int(self.source_position[i]*coordinate_scalar), -32767, 32767)
12091
13043
  trace.stats[input_format].trace_header.source_coordinate_y = int(0)
12092
13044
  trace.stats[input_format].trace_header.surface_elevation_at_source = int(self.source_elevation[i]*coordinate_scalar)
12093
13045
 
@@ -12520,6 +13472,93 @@ class MainWindow(QMainWindow):
12520
13472
  dialog = HeaderDialog(files, list(attributes_to_collect.values()), header_values, self)
12521
13473
  dialog.exec_()
12522
13474
 
13475
+ def showOverallHeaders(self):
13476
+ """Show single values for each file (overall headers)"""
13477
+ if self.streams:
13478
+ files = [os.path.basename(file) for file in self.fileNames]
13479
+ attributes_to_collect = {
13480
+ "ffid": "FFID",
13481
+ "delay": "Delay (s)",
13482
+ "sample_interval": "Sample Interval (s)",
13483
+ "n_sample": "Number of Samples",
13484
+ "record_length": "Record Length (s)",
13485
+ "source_position": "Source Position (m)",
13486
+ "source_elevation": "Source Elevation (m)",
13487
+ }
13488
+
13489
+ # Collect single values for each file
13490
+ header_values = {}
13491
+
13492
+ for i, file in enumerate(files):
13493
+ header_values[file] = {}
13494
+ for header, display_name in attributes_to_collect.items():
13495
+ attribute_value = getattr(self, header, [])[i]
13496
+ # Store as single-element list for compatibility with HeaderDialog
13497
+ if isinstance(attribute_value, (list, tuple, np.ndarray)):
13498
+ # Take first value if it's a list
13499
+ header_values[file][display_name] = [attribute_value[0] if len(attribute_value) > 0 else 'N/A']
13500
+ else:
13501
+ header_values[file][display_name] = [attribute_value]
13502
+
13503
+ # Add number of traces per shot
13504
+ if i < len(self.streams) and self.streams[i]:
13505
+ header_values[file]["Number of Traces"] = [len(self.streams[i])]
13506
+ else:
13507
+ header_values[file]["Number of Traces"] = ['N/A']
13508
+
13509
+ # Add first trace position, elevation, and offset
13510
+ if i < len(self.trace_position) and self.trace_position[i] is not None and len(self.trace_position[i]) > 0:
13511
+ header_values[file]["First Trace Position (m)"] = [self.trace_position[i][0]]
13512
+ else:
13513
+ header_values[file]["First Trace Position (m)"] = ['N/A']
13514
+
13515
+ if i < len(self.trace_elevation) and self.trace_elevation[i] is not None and len(self.trace_elevation[i]) > 0:
13516
+ header_values[file]["First Trace Elevation (m)"] = [self.trace_elevation[i][0]]
13517
+ else:
13518
+ header_values[file]["First Trace Elevation (m)"] = ['N/A']
13519
+
13520
+ if i < len(self.offset) and self.offset[i] is not None and len(self.offset[i]) > 0:
13521
+ header_values[file]["First Trace Offset (m)"] = [self.offset[i][0]]
13522
+ else:
13523
+ header_values[file]["First Trace Offset (m)"] = ['N/A']
13524
+
13525
+ # Add last trace position, elevation, and offset
13526
+ if i < len(self.trace_position) and self.trace_position[i] is not None and len(self.trace_position[i]) > 0:
13527
+ header_values[file]["Last Trace Position (m)"] = [self.trace_position[i][-1]]
13528
+ else:
13529
+ header_values[file]["Last Trace Position (m)"] = ['N/A']
13530
+
13531
+ if i < len(self.trace_elevation) and self.trace_elevation[i] is not None and len(self.trace_elevation[i]) > 0:
13532
+ header_values[file]["Last Trace Elevation (m)"] = [self.trace_elevation[i][-1]]
13533
+ else:
13534
+ header_values[file]["Last Trace Elevation (m)"] = ['N/A']
13535
+
13536
+ if i < len(self.offset) and self.offset[i] is not None and len(self.offset[i]) > 0:
13537
+ header_values[file]["Last Trace Offset (m)"] = [self.offset[i][-1]]
13538
+ else:
13539
+ header_values[file]["Last Trace Offset (m)"] = ['N/A']
13540
+
13541
+ # Create headers list in the desired order
13542
+ headers = [
13543
+ "FFID",
13544
+ "Number of Traces",
13545
+ "Delay (s)",
13546
+ "Sample Interval (s)",
13547
+ "Number of Samples",
13548
+ "Record Length (s)",
13549
+ "Source Position (m)",
13550
+ "Source Elevation (m)",
13551
+ "First Trace Position (m)",
13552
+ "First Trace Elevation (m)",
13553
+ "First Trace Offset (m)",
13554
+ "Last Trace Position (m)",
13555
+ "Last Trace Elevation (m)",
13556
+ "Last Trace Offset (m)"
13557
+ ]
13558
+
13559
+ dialog = OverallHeaderDialog(files, headers, header_values, self)
13560
+ dialog.exec_()
13561
+
12523
13562
  def editFFID(self):
12524
13563
  if self.streams:
12525
13564
  parameters = [
@@ -15499,6 +16538,85 @@ class MainWindow(QMainWindow):
15499
16538
  if self.pickLayoutItems[file_idx] is not None:
15500
16539
  self.pickLayoutItems[file_idx][first_trace:last_trace+1] = self.pickLayoutItems[file_idx][first_trace:last_trace+1][::-1]
15501
16540
 
16541
+ def zeroPadTrace(self):
16542
+ """Zero pad all traces in a shot by adding zeros at the end of the trace data."""
16543
+ if self.streams:
16544
+ parameters = [
16545
+ {'label': 'Pad at end (seconds)', 'initial_value': 0.0, 'type': 'float'}
16546
+ ]
16547
+
16548
+ dialog = GenericParameterDialog(
16549
+ title="Zero Pad Shot",
16550
+ parameters=parameters,
16551
+ add_checkbox=True,
16552
+ checkbox_text="Apply to all shots",
16553
+ parent=self
16554
+ )
16555
+
16556
+ if dialog.exec_():
16557
+ values = dialog.getValues()
16558
+ pad_end_seconds = values['Pad at end (seconds)']
16559
+ apply_to_all = dialog.isChecked()
16560
+
16561
+ # Validate inputs
16562
+ if pad_end_seconds < 0:
16563
+ QMessageBox.warning(self, "Invalid Input", "Padding value must be non-negative")
16564
+ return
16565
+
16566
+ if pad_end_seconds == 0:
16567
+ QMessageBox.warning(self, "Invalid Input", "Padding value must be greater than 0")
16568
+ return
16569
+
16570
+ # Show wait cursor while processing
16571
+ QApplication.setOverrideCursor(Qt.WaitCursor)
16572
+ try:
16573
+ if apply_to_all:
16574
+ # Pad all traces for all files
16575
+ for i in range(len(self.streams)):
16576
+ # Convert seconds to samples
16577
+ pad_end = int(pad_end_seconds / self.sample_interval[i])
16578
+
16579
+ # Pad all traces in this shot
16580
+ for trace_num in self.shot_trace_number[i]:
16581
+ self.streams[i] = zero_pad_trace(self.streams[i], trace_num, 0, pad_end)
16582
+
16583
+ # Update n_sample and time array after padding
16584
+ if len(self.streams[i]) > 0:
16585
+ self.n_sample[i] = len(self.streams[i][0].data)
16586
+ self.time[i] = np.arange(self.n_sample[i]) * self.sample_interval[i] + self.delay[i]
16587
+ # Sync headers to obspy streams immediately
16588
+ self.syncHeadersToStreams(i)
16589
+
16590
+ self.headers_modified = True
16591
+ QMessageBox.information(self, "Zero Padding Applied",
16592
+ f"All traces padded with {pad_end_seconds} seconds at end for all shots")
16593
+ else:
16594
+ # Pad all traces for current file only
16595
+ # Convert seconds to samples
16596
+ pad_end = int(pad_end_seconds / self.sample_interval[self.currentIndex])
16597
+
16598
+ # Pad all traces in this shot
16599
+ for trace_num in self.shot_trace_number[self.currentIndex]:
16600
+ self.streams[self.currentIndex] = zero_pad_trace(self.streams[self.currentIndex], trace_num, 0, pad_end)
16601
+
16602
+ # Update n_sample and time array after padding
16603
+ if len(self.streams[self.currentIndex]) > 0:
16604
+ self.n_sample[self.currentIndex] = len(self.streams[self.currentIndex][0].data)
16605
+ self.time[self.currentIndex] = np.arange(self.n_sample[self.currentIndex]) * self.sample_interval[self.currentIndex] + self.delay[self.currentIndex]
16606
+ # Sync headers to obspy streams immediately
16607
+ self.syncHeadersToStreams(self.currentIndex)
16608
+
16609
+ self.headers_modified = True
16610
+ QMessageBox.information(self, "Zero Padding Applied",
16611
+ f"All traces padded with {pad_end_seconds} seconds at end")
16612
+
16613
+ self.updatePlots()
16614
+ self.updateFileListDisplay()
16615
+ self.updatePlots()
16616
+ finally:
16617
+ # Always restore cursor even if there's an error
16618
+ QApplication.restoreOverrideCursor()
16619
+
15502
16620
  def batchEditFFID(self):
15503
16621
  if self.streams:
15504
16622
  parameters = [
@@ -16099,6 +17217,69 @@ class MainWindow(QMainWindow):
16099
17217
  # Always restore cursor even if there's an error
16100
17218
  QApplication.restoreOverrideCursor()
16101
17219
 
17220
+ def batchZeroPadTraces(self):
17221
+ """Batch zero pad all traces for a range of shots."""
17222
+ if self.streams:
17223
+ parameters = [
17224
+ {'label': 'First Source #', 'initial_value': 1, 'type': 'int'},
17225
+ {'label': 'Last Source #', 'initial_value': len(self.streams), 'type': 'int'},
17226
+ {'label': 'Pad at end (seconds)', 'initial_value': 0.0, 'type': 'float'}
17227
+ ]
17228
+
17229
+ dialog = GenericParameterDialog(
17230
+ title="Batch Zero Pad Shots",
17231
+ parameters=parameters,
17232
+ add_checkbox=False,
17233
+ parent=self
17234
+ )
17235
+
17236
+ if dialog.exec_():
17237
+ values = dialog.getValues()
17238
+ first_shot = values['First Source #'] - 1
17239
+ last_shot = values['Last Source #'] - 1
17240
+ pad_end_seconds = values['Pad at end (seconds)']
17241
+
17242
+ # Validate inputs
17243
+ if pad_end_seconds < 0:
17244
+ QMessageBox.warning(self, "Invalid Input", "Padding value must be non-negative")
17245
+ return
17246
+
17247
+ if pad_end_seconds == 0:
17248
+ QMessageBox.warning(self, "Invalid Input", "Padding value must be greater than 0")
17249
+ return
17250
+
17251
+ # Show wait cursor while processing
17252
+ QApplication.setOverrideCursor(Qt.WaitCursor)
17253
+ try:
17254
+ # Pad all traces for specified range of shots
17255
+ for i in range(first_shot, min(last_shot + 1, len(self.streams))):
17256
+ # Convert seconds to samples for this file
17257
+ pad_end = int(pad_end_seconds / self.sample_interval[i])
17258
+
17259
+ # Process all traces in this shot
17260
+ for trace_num in self.shot_trace_number[i]:
17261
+ self.streams[i] = zero_pad_trace(self.streams[i], trace_num, 0, pad_end)
17262
+
17263
+ # Update n_sample and time array after padding
17264
+ if len(self.streams[i]) > 0:
17265
+ self.n_sample[i] = len(self.streams[i][0].data)
17266
+ self.time[i] = np.arange(self.n_sample[i]) * self.sample_interval[i] + self.delay[i]
17267
+ # Sync headers to obspy streams immediately after all padding for this file
17268
+ self.syncHeadersToStreams(i)
17269
+
17270
+ # Mark headers/traces as modified so the unsaved-changes warning appears
17271
+ self.headers_modified = True
17272
+
17273
+ QMessageBox.information(self, "Zero Padding Applied",
17274
+ f"All traces padded with {pad_end_seconds} seconds at end for shots {first_shot+1} to {last_shot+1}")
17275
+
17276
+ self.updatePlots()
17277
+ self.updateFileListDisplay()
17278
+ self.updatePlots()
17279
+ finally:
17280
+ # Always restore cursor even if there's an error
17281
+ QApplication.restoreOverrideCursor()
17282
+
16102
17283
  def batchReverseTraces(self):
16103
17284
  """Batch reverse trace data (flip data matrix left-to-right) while keeping headers in place"""
16104
17285
  if self.streams:
@@ -18411,8 +19592,10 @@ class MainWindow(QMainWindow):
18411
19592
  plot_widget.plot(x_filled, t_interpolated, pen=None,
18412
19593
  fillLevel=fillLevel, fillBrush=self.fill_brush)
18413
19594
 
18414
- # Plot the picks (only if picks have been initialized)
19595
+ # Plot the picks (only if picks have been initialized and are not None)
18415
19596
  if (self.picks[self.currentIndex] is not None and
19597
+ i < len(self.picks[self.currentIndex]) and
19598
+ self.picks[self.currentIndex][i] is not None and
18416
19599
  not np.isnan(self.picks[self.currentIndex][i])):
18417
19600
  # Get pick position
18418
19601
  y_ok = self.picks[self.currentIndex][i]
@@ -18620,6 +19803,8 @@ class MainWindow(QMainWindow):
18620
19803
 
18621
19804
  for i in range(len(self.streams[self.currentIndex])):
18622
19805
  if (self.picks[self.currentIndex] is not None and
19806
+ i < len(self.picks[self.currentIndex]) and
19807
+ self.picks[self.currentIndex][i] is not None and
18623
19808
  not np.isnan(self.picks[self.currentIndex][i])):
18624
19809
  # Get pick position
18625
19810
  y_ok = self.picks[self.currentIndex][i]
@@ -18855,9 +20040,13 @@ class MainWindow(QMainWindow):
18855
20040
  plot_y = self.plotTypeDict[self.plotTypeY] # List of sources for the current trace
18856
20041
 
18857
20042
  if self.plotTypeY == 'offset':
18858
- y_all.extend(plot_y[i])
20043
+ # Handle offset - ensure it's a list and no None values
20044
+ offset_values = plot_y[i] if plot_y[i] is not None else [0.0] * m
20045
+ y_all.extend(offset_values)
18859
20046
  else:
18860
- y_all.extend([plot_y[i]] * m)
20047
+ # Handle other plot types - ensure no None values
20048
+ y_value = plot_y[i] if plot_y[i] is not None else 0.0
20049
+ y_all.extend([y_value] * m)
18861
20050
 
18862
20051
  # Only add picks if they have been initialized
18863
20052
  if self.picks[i] is not None:
@@ -18871,10 +20060,11 @@ class MainWindow(QMainWindow):
18871
20060
  def getAllPicks(self, x_all, y_all, pick_all):
18872
20061
  # Get all picks
18873
20062
 
18874
- # If there are picks that are not nan, plot them with colors
18875
- x_pick = [x_all[i] for i in range(len(x_all)) if not np.isnan(pick_all[i])]
18876
- y_pick = [y_all[i] for i in range(len(y_all)) if not np.isnan(pick_all[i])]
18877
- pick_all = [pick_all[i] for i in range(len(pick_all)) if not np.isnan(pick_all[i])]
20063
+ # If there are picks that are not nan, plot them with colors
20064
+ # Filter out None values and NaN values
20065
+ x_pick = [x_all[i] for i in range(len(x_all)) if pick_all[i] is not None and not np.isnan(pick_all[i])]
20066
+ y_pick = [y_all[i] for i in range(len(y_all)) if pick_all[i] is not None and not np.isnan(pick_all[i])]
20067
+ pick_all = [pick_all[i] for i in range(len(pick_all)) if pick_all[i] is not None and not np.isnan(pick_all[i])]
18878
20068
 
18879
20069
  return x_pick, y_pick, pick_all
18880
20070
 
@@ -18915,14 +20105,16 @@ class MainWindow(QMainWindow):
18915
20105
 
18916
20106
  x_all, y_all, pick_all = self.getAllPositions()
18917
20107
 
18918
- scatter = pqg.ScatterPlotItem(x=x_all, y=y_all, symbol='o',
18919
- brush=self.fill_brush, size=5)
18920
- plot_widget.addItem(scatter)
20108
+ # Only create scatter plot if we have valid positions
20109
+ if len(x_all) > 0 and len(y_all) > 0:
20110
+ scatter = pqg.ScatterPlotItem(x=x_all, y=y_all, symbol='o',
20111
+ brush=self.fill_brush, size=5)
20112
+ plot_widget.addItem(scatter)
18921
20113
 
18922
20114
  x_pick, y_pick, pick_all = self.getAllPicks(x_all, y_all, pick_all)
18923
20115
 
18924
20116
  # If there are more than one pick, plot them with colors
18925
- if len(x_pick) > 1:
20117
+ if len(x_pick) > 1 and len(pick_all) > 0:
18926
20118
 
18927
20119
  # Remove colorbar if it exists
18928
20120
  self.removeColorBar()
@@ -18930,10 +20122,12 @@ class MainWindow(QMainWindow):
18930
20122
  # Create a colormap
18931
20123
  self.createPicksColorMap()
18932
20124
 
18933
- # Create ScatterPlotItem with colors
18934
- scatter = pqg.ScatterPlotItem(x=x_pick, y=y_pick, symbol='s',
18935
- brush=self.colors, pen=self.colors, size=8)
18936
- plot_widget.addItem(scatter)
20125
+ # Only create scatter if we have colors (check again after createPicksColorMap)
20126
+ if hasattr(self, 'colors') and len(self.colors) > 0 and len(x_pick) == len(self.colors):
20127
+ # Create ScatterPlotItem with colors (only if we have valid data)
20128
+ scatter = pqg.ScatterPlotItem(x=x_pick, y=y_pick, symbol='s',
20129
+ brush=self.colors, pen=self.colors, size=8)
20130
+ plot_widget.addItem(scatter)
18937
20131
 
18938
20132
  # Add colorbar when there are picks
18939
20133
  self.colorbar = pqg.ColorBarItem(colorMap=self.colormap, values=(min(pick_all), max(pick_all)),
@@ -21343,7 +22537,14 @@ class MainWindow(QMainWindow):
21343
22537
  self.colormap = pqg.colormap.get(colormap_str, source='matplotlib')
21344
22538
 
21345
22539
  # Get the values of the picks that are not nan in a list of list
21346
- values = [value for sublist in self.picks for value in sublist if not np.isnan(value)]
22540
+ # Filter out None values before checking for NaN
22541
+ values = [value for sublist in self.picks for value in sublist if value is not None and not np.isnan(value)]
22542
+
22543
+ # Check if we have any valid values
22544
+ if not values:
22545
+ # No valid picks, create a dummy colormap
22546
+ self.colors = []
22547
+ return
21347
22548
 
21348
22549
  # Normalize the values to the range [0, 1]
21349
22550
  min_val = min(values)
@@ -21775,8 +22976,171 @@ Failed to match: {skipped_no_trace_found}"""
21775
22976
 
21776
22977
  return picks_matched, len(sources_with_picks), max_pick_time
21777
22978
 
22979
+ def _loadPicksFromLST(self, lst_file, verbose=False):
22980
+ """
22981
+ Load picks from a Rayfract .LST file.
22982
+
22983
+ Parameters
22984
+ ----------
22985
+ lst_file : str
22986
+ Path to .LST file
22987
+ verbose : bool
22988
+ Print debug information
22989
+
22990
+ Returns
22991
+ -------
22992
+ tuple
22993
+ (n_picks_in_file, n_matched_picks)
22994
+ """
22995
+ from .pick_io import read_lst_file, match_lst_picks_to_geometry
22996
+
22997
+ try:
22998
+ # Read LST file
22999
+ lst_data = read_lst_file(lst_file, verbose=verbose)
23000
+
23001
+ if lst_data['n_picks'] == 0:
23002
+ QMessageBox.warning(self, "No Picks", f"No valid picks found in {os.path.basename(lst_file)}")
23003
+ return 0, 0
23004
+
23005
+ # Calculate position scaling factor
23006
+ # Compare median geophone spacing in LST to mean_dg in seismic data
23007
+ position_scale = 1.0
23008
+
23009
+ # Calculate median spacing from LST positions
23010
+ all_positions = [pos for _, _, pos, _ in lst_data['picks']]
23011
+ if len(all_positions) > 1:
23012
+ unique_positions = sorted(set(all_positions))
23013
+ if len(unique_positions) > 1:
23014
+ spacings = [unique_positions[i+1] - unique_positions[i]
23015
+ for i in range(len(unique_positions)-1)]
23016
+ median_spacing_lst = np.median(spacings)
23017
+
23018
+ # Use self.mean_dg from seismic data
23019
+ if hasattr(self, 'mean_dg') and self.mean_dg is not None and self.mean_dg > 0:
23020
+ if median_spacing_lst > 0:
23021
+ position_scale = self.mean_dg / median_spacing_lst
23022
+
23023
+ # Match picks to current geometry
23024
+ matched = match_lst_picks_to_geometry(
23025
+ lst_data,
23026
+ self.trace_position,
23027
+ self.source_position,
23028
+ shot_number_offset=0, # Could be made configurable via dialog
23029
+ position_tolerance=0.1, # Could be made configurable via dialog
23030
+ position_scale=position_scale,
23031
+ verbose=False
23032
+ )
23033
+
23034
+ # Apply matched picks
23035
+ for i, (pick_list, error_list) in enumerate(zip(matched['picks'], matched['errors'])):
23036
+ if self.picks[i] is None:
23037
+ self.picks[i] = [np.nan] * len(pick_list)
23038
+ if self.error[i] is None:
23039
+ self.error[i] = [0.001] * len(error_list)
23040
+
23041
+ for j, (pick, error) in enumerate(zip(pick_list, error_list)):
23042
+ if not np.isnan(pick):
23043
+ self.picks[i][j] = pick
23044
+ self.error[i][j] = error
23045
+
23046
+ # Mark picks as modified
23047
+ self.picks_modified = True
23048
+
23049
+ # Trigger pick update
23050
+ self.update_pick_flag = True
23051
+
23052
+ # Only show warning if no picks were matched
23053
+ if matched['n_matched'] == 0:
23054
+ QMessageBox.warning(self, "No Matches",
23055
+ f"Could not match any picks from {os.path.basename(lst_file)} to current geometry.\n\n"
23056
+ f"File contains {matched['n_total']} picks but none matched.")
23057
+
23058
+ return lst_data['n_picks'], matched['n_matched']
23059
+
23060
+ except Exception as e:
23061
+ QMessageBox.critical(self, "Error Loading LST File",
23062
+ f"Error reading {os.path.basename(lst_file)}:\n\n{str(e)}")
23063
+ if verbose:
23064
+ import traceback
23065
+ traceback.print_exc()
23066
+ return 0, 0
23067
+
23068
+ def _loadPicksFromVS(self, vs_file, verbose=False):
23069
+ """
23070
+ Load picks from a SeisImager/PickWin .vs file.
23071
+
23072
+ Parameters
23073
+ ----------
23074
+ vs_file : str
23075
+ Path to .vs file
23076
+ verbose : bool
23077
+ Print debug information
23078
+
23079
+ Returns
23080
+ -------
23081
+ tuple
23082
+ (n_picks_in_file, n_matched_picks)
23083
+ """
23084
+ from .pick_io import read_vs_file, match_vs_picks_to_geometry
23085
+
23086
+ try:
23087
+ # Read VS file
23088
+ vs_data = read_vs_file(vs_file, verbose=verbose)
23089
+
23090
+ if vs_data['n_picks'] == 0:
23091
+ QMessageBox.warning(self, "No Picks", f"No valid picks found in {os.path.basename(vs_file)}")
23092
+ return 0, 0
23093
+
23094
+ # Calculate position scaling factor
23095
+ # VS files use absolute positions for shots and geophones
23096
+ position_scale = 1.0
23097
+
23098
+ # Match picks to current geometry
23099
+ matched = match_vs_picks_to_geometry(
23100
+ vs_data,
23101
+ self.trace_position,
23102
+ self.source_position,
23103
+ offset_tolerance=0.5, # Could be made configurable via dialog
23104
+ position_scale=position_scale,
23105
+ verbose=False
23106
+ )
23107
+
23108
+ # Apply matched picks
23109
+ for i, (pick_list, error_list) in enumerate(zip(matched['picks'], matched['errors'])):
23110
+ if self.picks[i] is None:
23111
+ self.picks[i] = [np.nan] * len(pick_list)
23112
+ if self.error[i] is None:
23113
+ self.error[i] = [0.001] * len(error_list)
23114
+
23115
+ for j, (pick, error) in enumerate(zip(pick_list, error_list)):
23116
+ if not np.isnan(pick):
23117
+ self.picks[i][j] = pick
23118
+ self.error[i][j] = error
23119
+
23120
+ # Mark picks as modified
23121
+ self.picks_modified = True
23122
+
23123
+ # Trigger pick update
23124
+ self.update_pick_flag = True
23125
+
23126
+ # Only show warning if no picks were matched
23127
+ if matched['n_matched'] == 0:
23128
+ QMessageBox.warning(self, "No Matches",
23129
+ f"Could not match any picks from {os.path.basename(vs_file)} to current geometry.\n\n"
23130
+ f"File contains {matched['n_total']} picks but none matched.")
23131
+
23132
+ return vs_data['n_picks'], matched['n_matched']
23133
+
23134
+ except Exception as e:
23135
+ QMessageBox.critical(self, "Error Loading VS File",
23136
+ f"Error reading {os.path.basename(vs_file)}:\n\n{str(e)}")
23137
+ if verbose:
23138
+ import traceback
23139
+ traceback.print_exc()
23140
+ return 0, 0
23141
+
21778
23142
  def loadPicks(self, fname=None, verbose=False):
21779
- # Load picks from single or multiple pygimli .sgt files
23143
+ # Load picks from .sgt (PyGimli), .LST (Rayfract), or .vs (PickWin) files
21780
23144
 
21781
23145
  # Check if seismic data is loaded first
21782
23146
  if not self.streams:
@@ -21786,7 +23150,7 @@ Failed to match: {skipped_no_trace_found}"""
21786
23150
  # The first argument returned is the filename(s) and path(s)
21787
23151
  if fname is None or not fname:
21788
23152
  fnames, _ = QFileDialog.getOpenFileNames(
21789
- self, 'Open file(s)', filter='Source-Geophone-Time file (*.sgt)')
23153
+ self, 'Open file(s)', filter='Pick files (*.sgt *.LST *.lst *.vs);;SGT files (*.sgt);;LST files (*.LST *.lst);;VS files (*.vs)')
21790
23154
  else:
21791
23155
  # If fname is provided, ensure it's a list for consistent processing
21792
23156
  if isinstance(fname, str):
@@ -21797,7 +23161,7 @@ Failed to match: {skipped_no_trace_found}"""
21797
23161
  if fnames:
21798
23162
  # Initialize counters for all files
21799
23163
  total_picks_loaded = 0
21800
- total_picks_in_files = 0 # Track total picks across all SGT files
23164
+ total_picks_in_files = 0 # Track total picks across all files
21801
23165
  total_sources_loaded = 0
21802
23166
  overall_max_picked_time = 0
21803
23167
 
@@ -21806,6 +23170,28 @@ Failed to match: {skipped_no_trace_found}"""
21806
23170
  if verbose:
21807
23171
  print(f"\nProcessing file {file_idx + 1}/{len(fnames)}: {fname}")
21808
23172
 
23173
+ # Detect file type by extension
23174
+ file_ext = fname.lower().split('.')[-1]
23175
+
23176
+ if file_ext == 'lst':
23177
+ # Load LST (Rayfract) file
23178
+ n_picks_file, n_matched = self._loadPicksFromLST(fname, verbose=verbose)
23179
+ total_picks_in_files += n_picks_file
23180
+ total_picks_loaded += n_matched
23181
+ if n_matched > 0:
23182
+ total_sources_loaded += 1
23183
+ continue # Skip to next file
23184
+
23185
+ if file_ext == 'vs':
23186
+ # Load VS (PickWin/SeisImager) file
23187
+ n_picks_file, n_matched = self._loadPicksFromVS(fname, verbose=verbose)
23188
+ total_picks_in_files += n_picks_file
23189
+ total_picks_loaded += n_matched
23190
+ if n_matched > 0:
23191
+ total_sources_loaded += 1
23192
+ continue # Skip to next file
23193
+
23194
+ # Default: Load SGT (PyGimli) file
21809
23195
  with open(fname, 'r') as f:
21810
23196
  # Read number of stations
21811
23197
  n_stations = int(f.readline().split('#')[0].strip())