pyckster 26.2.1__py3-none-any.whl → 26.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyckster/__init__.py +1 -1
- pyckster/core.py +950 -320
- pyckster/obspy_utils.py +337 -1
- pyckster/pick_io.py +0 -56
- {pyckster-26.2.1.dist-info → pyckster-26.2.2.dist-info}/METADATA +1 -1
- {pyckster-26.2.1.dist-info → pyckster-26.2.2.dist-info}/RECORD +10 -10
- {pyckster-26.2.1.dist-info → pyckster-26.2.2.dist-info}/WHEEL +0 -0
- {pyckster-26.2.1.dist-info → pyckster-26.2.2.dist-info}/entry_points.txt +0 -0
- {pyckster-26.2.1.dist-info → pyckster-26.2.2.dist-info}/licenses/LICENCE +0 -0
- {pyckster-26.2.1.dist-info → pyckster-26.2.2.dist-info}/top_level.txt +0 -0
pyckster/obspy_utils.py
CHANGED
|
@@ -594,4 +594,340 @@ def assisted_picking(trace_data, time_array, y_pick, smoothing_window_size, devi
|
|
|
594
594
|
for j in range(pick_index, min(pick_index + picking_window_size, len(smoothed_trace_data))):
|
|
595
595
|
if np.abs(smoothed_trace_data[j] - mean_window) > threshold:
|
|
596
596
|
return time_array[j]
|
|
597
|
-
return y_pick
|
|
597
|
+
return y_pick
|
|
598
|
+
|
|
599
|
+
|
|
600
|
+
def export_shot_to_ascii(stream, shot_index, output_dir, archive=True):
|
|
601
|
+
"""
|
|
602
|
+
Export a single shot gather to ASCII files.
|
|
603
|
+
|
|
604
|
+
Creates separate ASCII files for:
|
|
605
|
+
- Trace data matrix (time x traces)
|
|
606
|
+
- Trace positions (receiver coordinates)
|
|
607
|
+
- Time array
|
|
608
|
+
- Shot metadata (source position, parameters)
|
|
609
|
+
|
|
610
|
+
Optionally bundles files into a tar.gz archive.
|
|
611
|
+
|
|
612
|
+
Parameters
|
|
613
|
+
----------
|
|
614
|
+
stream : obspy.Stream
|
|
615
|
+
Stream containing all traces for the shot
|
|
616
|
+
shot_index : int
|
|
617
|
+
Shot/FFID number for naming
|
|
618
|
+
output_dir : str
|
|
619
|
+
Directory where files will be saved
|
|
620
|
+
archive : bool, optional
|
|
621
|
+
If True, bundle all files into a .tar.gz archive. Default is True.
|
|
622
|
+
|
|
623
|
+
Returns
|
|
624
|
+
-------
|
|
625
|
+
str
|
|
626
|
+
Path to the created archive (if archive=True) or directory (if archive=False)
|
|
627
|
+
"""
|
|
628
|
+
import os
|
|
629
|
+
import tarfile
|
|
630
|
+
from datetime import datetime
|
|
631
|
+
|
|
632
|
+
# Create output directory if needed
|
|
633
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
634
|
+
|
|
635
|
+
# Base name for all files
|
|
636
|
+
base_name = f"shot_{shot_index:04d}"
|
|
637
|
+
|
|
638
|
+
# Extract data from stream
|
|
639
|
+
n_traces = len(stream)
|
|
640
|
+
if n_traces == 0:
|
|
641
|
+
raise ValueError("Stream is empty")
|
|
642
|
+
|
|
643
|
+
# Get time array from first trace
|
|
644
|
+
tr = stream[0]
|
|
645
|
+
n_samples = tr.stats.npts
|
|
646
|
+
dt = tr.stats.delta
|
|
647
|
+
time_array = np.arange(n_samples) * dt
|
|
648
|
+
|
|
649
|
+
# Initialize arrays
|
|
650
|
+
data_matrix = np.zeros((n_samples, n_traces))
|
|
651
|
+
trace_positions = np.zeros(n_traces)
|
|
652
|
+
trace_elevations = np.zeros(n_traces)
|
|
653
|
+
|
|
654
|
+
# Extract source position from first trace
|
|
655
|
+
source_x = 0.0
|
|
656
|
+
source_y = 0.0
|
|
657
|
+
source_z = 0.0
|
|
658
|
+
|
|
659
|
+
# Determine format
|
|
660
|
+
input_format = check_format(stream)
|
|
661
|
+
|
|
662
|
+
for i, tr in enumerate(stream):
|
|
663
|
+
# Fill data matrix
|
|
664
|
+
data_matrix[:, i] = tr.data
|
|
665
|
+
|
|
666
|
+
# Get coordinate scalar to properly interpret positions
|
|
667
|
+
try:
|
|
668
|
+
coord_scalar = tr.stats[input_format].trace_header.scalar_to_be_applied_to_all_coordinates
|
|
669
|
+
if coord_scalar < 0:
|
|
670
|
+
coord_scalar = -1.0 / coord_scalar
|
|
671
|
+
elif coord_scalar == 0:
|
|
672
|
+
coord_scalar = 1.0
|
|
673
|
+
except (AttributeError, KeyError):
|
|
674
|
+
coord_scalar = 1.0
|
|
675
|
+
|
|
676
|
+
# Get elevation scalar
|
|
677
|
+
try:
|
|
678
|
+
elev_scalar = tr.stats[input_format].trace_header.scalar_to_be_applied_to_all_elevations_and_depths
|
|
679
|
+
if elev_scalar < 0:
|
|
680
|
+
elev_scalar = -1.0 / elev_scalar
|
|
681
|
+
elif elev_scalar == 0:
|
|
682
|
+
elev_scalar = 1.0
|
|
683
|
+
except (AttributeError, KeyError):
|
|
684
|
+
elev_scalar = coord_scalar
|
|
685
|
+
|
|
686
|
+
# Get trace position and elevation
|
|
687
|
+
try:
|
|
688
|
+
trace_positions[i] = tr.stats[input_format].trace_header.group_coordinate_x * coord_scalar
|
|
689
|
+
except (AttributeError, KeyError):
|
|
690
|
+
trace_positions[i] = float(i)
|
|
691
|
+
|
|
692
|
+
try:
|
|
693
|
+
trace_elevations[i] = tr.stats[input_format].trace_header.receiver_group_elevation * elev_scalar
|
|
694
|
+
except (AttributeError, KeyError):
|
|
695
|
+
trace_elevations[i] = 0.0
|
|
696
|
+
|
|
697
|
+
# Extract source info from first trace
|
|
698
|
+
if i == 0:
|
|
699
|
+
try:
|
|
700
|
+
source_x = tr.stats[input_format].trace_header.source_coordinate_x * coord_scalar
|
|
701
|
+
except (AttributeError, KeyError):
|
|
702
|
+
source_x = 0.0
|
|
703
|
+
|
|
704
|
+
try:
|
|
705
|
+
source_y = tr.stats[input_format].trace_header.source_coordinate_y * coord_scalar
|
|
706
|
+
except (AttributeError, KeyError):
|
|
707
|
+
source_y = 0.0
|
|
708
|
+
|
|
709
|
+
try:
|
|
710
|
+
source_z = tr.stats[input_format].trace_header.surface_elevation_at_source * elev_scalar
|
|
711
|
+
except (AttributeError, KeyError):
|
|
712
|
+
source_z = 0.0
|
|
713
|
+
|
|
714
|
+
# File paths
|
|
715
|
+
data_file = os.path.join(output_dir, f"{base_name}_data.txt")
|
|
716
|
+
time_file = os.path.join(output_dir, f"{base_name}_time.txt")
|
|
717
|
+
positions_file = os.path.join(output_dir, f"{base_name}_receivers.txt")
|
|
718
|
+
metadata_file = os.path.join(output_dir, f"{base_name}_metadata.txt")
|
|
719
|
+
|
|
720
|
+
# Save data matrix (one column per trace)
|
|
721
|
+
np.savetxt(data_file, data_matrix, fmt='%.6e', delimiter='\t',
|
|
722
|
+
header=f'Trace data for shot {shot_index}\nRows: time samples, Columns: traces\n' +
|
|
723
|
+
f'Sampling interval: {dt} s, Number of samples: {n_samples}, Number of traces: {n_traces}',
|
|
724
|
+
comments='# ')
|
|
725
|
+
|
|
726
|
+
# Save time array
|
|
727
|
+
np.savetxt(time_file, time_array, fmt='%.6f',
|
|
728
|
+
header=f'Time array (s) for shot {shot_index}\nSampling interval: {dt} s',
|
|
729
|
+
comments='# ')
|
|
730
|
+
|
|
731
|
+
# Save receiver positions
|
|
732
|
+
receiver_data = np.column_stack((trace_positions, trace_elevations))
|
|
733
|
+
np.savetxt(positions_file, receiver_data, fmt='%.6f', delimiter='\t',
|
|
734
|
+
header='Receiver positions\nColumn 1: X coordinate (m)\nColumn 2: Elevation (m)',
|
|
735
|
+
comments='# ')
|
|
736
|
+
|
|
737
|
+
# Save metadata
|
|
738
|
+
with open(metadata_file, 'w') as f:
|
|
739
|
+
f.write(f"# Shot metadata for shot {shot_index}\n")
|
|
740
|
+
f.write(f"# Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n\n")
|
|
741
|
+
f.write(f"shot_number = {shot_index}\n")
|
|
742
|
+
f.write(f"source_x = {source_x:.6f} # m\n")
|
|
743
|
+
f.write(f"source_y = {source_y:.6f} # m\n")
|
|
744
|
+
f.write(f"source_z = {source_z:.6f} # m (elevation)\n")
|
|
745
|
+
f.write(f"n_traces = {n_traces}\n")
|
|
746
|
+
f.write(f"n_samples = {n_samples}\n")
|
|
747
|
+
f.write(f"sampling_rate = {1.0/dt:.1f} # Hz\n")
|
|
748
|
+
f.write(f"sampling_interval = {dt:.6f} # s\n")
|
|
749
|
+
f.write(f"record_length = {time_array[-1]:.6f} # s\n")
|
|
750
|
+
f.write(f"\n# Data files:\n")
|
|
751
|
+
f.write(f"# - {base_name}_data.txt: Trace amplitudes (time x traces matrix)\n")
|
|
752
|
+
f.write(f"# - {base_name}_time.txt: Time array (s)\n")
|
|
753
|
+
f.write(f"# - {base_name}_receivers.txt: Receiver positions (x, elevation)\n")
|
|
754
|
+
|
|
755
|
+
# Create archive if requested
|
|
756
|
+
if archive:
|
|
757
|
+
archive_path = os.path.join(output_dir, f"{base_name}.tar.gz")
|
|
758
|
+
with tarfile.open(archive_path, "w:gz") as tar:
|
|
759
|
+
tar.add(data_file, arcname=f"{base_name}_data.txt")
|
|
760
|
+
tar.add(time_file, arcname=f"{base_name}_time.txt")
|
|
761
|
+
tar.add(positions_file, arcname=f"{base_name}_receivers.txt")
|
|
762
|
+
tar.add(metadata_file, arcname=f"{base_name}_metadata.txt")
|
|
763
|
+
|
|
764
|
+
# Remove individual files
|
|
765
|
+
os.remove(data_file)
|
|
766
|
+
os.remove(time_file)
|
|
767
|
+
os.remove(positions_file)
|
|
768
|
+
os.remove(metadata_file)
|
|
769
|
+
|
|
770
|
+
return archive_path
|
|
771
|
+
else:
|
|
772
|
+
return output_dir
|
|
773
|
+
|
|
774
|
+
|
|
775
|
+
def import_shot_from_ascii(archive_path):
|
|
776
|
+
"""
|
|
777
|
+
Import a shot gather from an ASCII archive.
|
|
778
|
+
|
|
779
|
+
Reads an ASCII archive created by export_shot_to_ascii and converts it
|
|
780
|
+
back to an obspy Stream object.
|
|
781
|
+
|
|
782
|
+
Parameters
|
|
783
|
+
----------
|
|
784
|
+
archive_path : str
|
|
785
|
+
Path to the .tar.gz or .zip archive
|
|
786
|
+
|
|
787
|
+
Returns
|
|
788
|
+
-------
|
|
789
|
+
obspy.Stream
|
|
790
|
+
Stream containing the reconstructed shot gather
|
|
791
|
+
dict
|
|
792
|
+
Metadata dictionary containing shot information
|
|
793
|
+
"""
|
|
794
|
+
import os
|
|
795
|
+
import tarfile
|
|
796
|
+
import zipfile
|
|
797
|
+
import tempfile
|
|
798
|
+
import shutil
|
|
799
|
+
|
|
800
|
+
# Create temporary directory
|
|
801
|
+
temp_dir = tempfile.mkdtemp()
|
|
802
|
+
|
|
803
|
+
try:
|
|
804
|
+
# Extract archive
|
|
805
|
+
if archive_path.endswith('.tar.gz') or archive_path.endswith('.tgz'):
|
|
806
|
+
with tarfile.open(archive_path, 'r:gz') as tar:
|
|
807
|
+
tar.extractall(temp_dir)
|
|
808
|
+
elif archive_path.endswith('.zip'):
|
|
809
|
+
with zipfile.ZipFile(archive_path, 'r') as zipf:
|
|
810
|
+
zipf.extractall(temp_dir)
|
|
811
|
+
else:
|
|
812
|
+
raise ValueError("Archive must be .tar.gz or .zip format")
|
|
813
|
+
|
|
814
|
+
# Find the files (they should all have the same base name)
|
|
815
|
+
files = os.listdir(temp_dir)
|
|
816
|
+
data_file = None
|
|
817
|
+
time_file = None
|
|
818
|
+
receivers_file = None
|
|
819
|
+
metadata_file = None
|
|
820
|
+
|
|
821
|
+
for f in files:
|
|
822
|
+
if f.endswith('_data.txt'):
|
|
823
|
+
data_file = os.path.join(temp_dir, f)
|
|
824
|
+
elif f.endswith('_time.txt'):
|
|
825
|
+
time_file = os.path.join(temp_dir, f)
|
|
826
|
+
elif f.endswith('_receivers.txt'):
|
|
827
|
+
receivers_file = os.path.join(temp_dir, f)
|
|
828
|
+
elif f.endswith('_metadata.txt'):
|
|
829
|
+
metadata_file = os.path.join(temp_dir, f)
|
|
830
|
+
|
|
831
|
+
if not all([data_file, time_file, receivers_file, metadata_file]):
|
|
832
|
+
raise ValueError("Archive is missing required files")
|
|
833
|
+
|
|
834
|
+
# Read data with explicit dtype and comment handling
|
|
835
|
+
data_matrix = np.loadtxt(data_file, dtype=np.float64, comments='#')
|
|
836
|
+
time_array = np.loadtxt(time_file, dtype=np.float64, comments='#')
|
|
837
|
+
receivers = np.loadtxt(receivers_file, dtype=np.float64, comments='#')
|
|
838
|
+
|
|
839
|
+
# Ensure receivers is 2D (handles single trace case)
|
|
840
|
+
if receivers.ndim == 1:
|
|
841
|
+
receivers = receivers.reshape(1, -1)
|
|
842
|
+
|
|
843
|
+
# Ensure data_matrix is 2D (handles single trace case)
|
|
844
|
+
if data_matrix.ndim == 1:
|
|
845
|
+
data_matrix = data_matrix.reshape(-1, 1)
|
|
846
|
+
|
|
847
|
+
# Read metadata
|
|
848
|
+
metadata = {}
|
|
849
|
+
with open(metadata_file, 'r') as f:
|
|
850
|
+
for line in f:
|
|
851
|
+
if '=' in line and not line.startswith('#'):
|
|
852
|
+
key, value = line.split('=', 1)
|
|
853
|
+
value = value.split('#')[0].strip()
|
|
854
|
+
try:
|
|
855
|
+
metadata[key.strip()] = float(value)
|
|
856
|
+
except ValueError:
|
|
857
|
+
metadata[key.strip()] = value
|
|
858
|
+
|
|
859
|
+
# Create obspy stream
|
|
860
|
+
stream = obspy.Stream()
|
|
861
|
+
|
|
862
|
+
n_samples, n_traces = data_matrix.shape
|
|
863
|
+
dt = float(metadata.get('sampling_interval', time_array[1] - time_array[0]))
|
|
864
|
+
shot_number = int(metadata.get('shot_number', 1))
|
|
865
|
+
|
|
866
|
+
for i in range(n_traces):
|
|
867
|
+
# Create trace with float32 dtype for SEGY compatibility
|
|
868
|
+
trace = obspy.Trace(data=data_matrix[:, i].astype(np.float32))
|
|
869
|
+
|
|
870
|
+
# Set basic stats
|
|
871
|
+
trace.stats.delta = dt
|
|
872
|
+
trace.stats.npts = n_samples
|
|
873
|
+
trace.stats.sampling_rate = 1.0 / dt
|
|
874
|
+
|
|
875
|
+
# Add SU headers (default format)
|
|
876
|
+
trace.stats.su = obspy.core.util.AttribDict()
|
|
877
|
+
trace.stats.su.trace_header = obspy.core.util.AttribDict()
|
|
878
|
+
|
|
879
|
+
# Set trace headers
|
|
880
|
+
trace.stats.su.trace_header.trace_sequence_number_within_line = i + 1
|
|
881
|
+
trace.stats.su.trace_header.trace_sequence_number_within_segy_file = i + 1
|
|
882
|
+
trace.stats.su.trace_header.original_field_record_number = shot_number
|
|
883
|
+
trace.stats.su.trace_header.trace_number_within_the_original_field_record = i + 1
|
|
884
|
+
|
|
885
|
+
# Set coordinates - use appropriate scalar to keep values within int16 range (-32768 to 32767)
|
|
886
|
+
# Calculate scalar based on max coordinate value
|
|
887
|
+
receiver_x = receivers[i, 0]
|
|
888
|
+
receiver_z = receivers[i, 1] if receivers.shape[1] > 1 else 0.0
|
|
889
|
+
source_x = metadata.get('source_x', 0.0)
|
|
890
|
+
source_y = metadata.get('source_y', 0.0)
|
|
891
|
+
source_z = metadata.get('source_z', 0.0)
|
|
892
|
+
|
|
893
|
+
# Determine appropriate scalar to fit values in int16 range
|
|
894
|
+
max_coord = max(abs(receiver_x), abs(receiver_z), abs(source_x), abs(source_y), abs(source_z))
|
|
895
|
+
if max_coord > 0:
|
|
896
|
+
# Find scalar that keeps max_coord * scalar < 32000 (leave some margin)
|
|
897
|
+
if max_coord * 1000 < 32000:
|
|
898
|
+
coord_scalar = -1000 # mm precision
|
|
899
|
+
elif max_coord * 100 < 32000:
|
|
900
|
+
coord_scalar = -100 # cm precision
|
|
901
|
+
elif max_coord * 10 < 32000:
|
|
902
|
+
coord_scalar = -10 # dm precision
|
|
903
|
+
else:
|
|
904
|
+
coord_scalar = -1 # m precision
|
|
905
|
+
else:
|
|
906
|
+
coord_scalar = -1000
|
|
907
|
+
|
|
908
|
+
trace.stats.su.trace_header.group_coordinate_x = int(receiver_x * abs(coord_scalar))
|
|
909
|
+
trace.stats.su.trace_header.group_coordinate_y = 0
|
|
910
|
+
trace.stats.su.trace_header.receiver_group_elevation = int(receiver_z * abs(coord_scalar))
|
|
911
|
+
|
|
912
|
+
trace.stats.su.trace_header.source_coordinate_x = int(source_x * abs(coord_scalar))
|
|
913
|
+
trace.stats.su.trace_header.source_coordinate_y = int(source_y * abs(coord_scalar))
|
|
914
|
+
trace.stats.su.trace_header.surface_elevation_at_source = int(source_z * abs(coord_scalar))
|
|
915
|
+
|
|
916
|
+
trace.stats.su.trace_header.scalar_to_be_applied_to_all_coordinates = coord_scalar
|
|
917
|
+
trace.stats.su.trace_header.scalar_to_be_applied_to_all_elevations_and_depths = coord_scalar
|
|
918
|
+
|
|
919
|
+
# Calculate offset (apply scalar)
|
|
920
|
+
offset = receiver_x - source_x
|
|
921
|
+
trace.stats.su.trace_header.distance_from_center_of_the_source_point_to_the_center_of_the_receiver_group = int(offset * abs(coord_scalar))
|
|
922
|
+
|
|
923
|
+
# Set sampling info
|
|
924
|
+
trace.stats.su.trace_header.number_of_samples_in_this_trace = n_samples
|
|
925
|
+
trace.stats.su.trace_header.sample_interval_in_microseconds = int(dt * 1e6)
|
|
926
|
+
|
|
927
|
+
stream.append(trace)
|
|
928
|
+
|
|
929
|
+
return stream, metadata
|
|
930
|
+
|
|
931
|
+
finally:
|
|
932
|
+
# Clean up temp directory
|
|
933
|
+
shutil.rmtree(temp_dir)
|
pyckster/pick_io.py
CHANGED
|
@@ -544,8 +544,6 @@ def read_vs_file(vs_file, verbose=False):
|
|
|
544
544
|
parts = lines[1].strip().split()
|
|
545
545
|
n_shots = int(parts[1])
|
|
546
546
|
spacing = float(parts[2])
|
|
547
|
-
if verbose:
|
|
548
|
-
print(f"Header: {n_shots} shots, spacing {spacing}m")
|
|
549
547
|
except (ValueError, IndexError) as e:
|
|
550
548
|
if verbose:
|
|
551
549
|
print(f"Error parsing header line 2: {e}")
|
|
@@ -556,15 +554,6 @@ def read_vs_file(vs_file, verbose=False):
|
|
|
556
554
|
line_idx = 2
|
|
557
555
|
shot_count = 0
|
|
558
556
|
|
|
559
|
-
if verbose:
|
|
560
|
-
print(f"\n=== VS FILE PARSING DEBUG ===")
|
|
561
|
-
print(f"Starting to parse shots from line {line_idx}")
|
|
562
|
-
print(f"Expected {n_shots} shots")
|
|
563
|
-
print(f"First 15 lines from file:")
|
|
564
|
-
for i in range(min(15, len(lines))):
|
|
565
|
-
print(f" Line {i}: {lines[i].strip()}")
|
|
566
|
-
print(f"=== END DEBUG ===\n")
|
|
567
|
-
|
|
568
557
|
while line_idx < len(lines) and shot_count < n_shots:
|
|
569
558
|
line = lines[line_idx].strip()
|
|
570
559
|
|
|
@@ -683,21 +672,6 @@ def match_vs_picks_to_geometry(vs_picks_data, trace_positions, source_positions,
|
|
|
683
672
|
unmatched_shots = set()
|
|
684
673
|
unmatched_details = {}
|
|
685
674
|
|
|
686
|
-
if verbose:
|
|
687
|
-
print(f"\nMatching {n_total} VS picks to geometry...")
|
|
688
|
-
print(f"Dataset has {n_sources} sources")
|
|
689
|
-
print(f"VS file has {len(vs_picks_data['shots'])} shots")
|
|
690
|
-
print(f"Position scale factor: {position_scale}")
|
|
691
|
-
print(f"Offset tolerance: {offset_tolerance}m")
|
|
692
|
-
|
|
693
|
-
# Show first few source positions
|
|
694
|
-
print(f"\nFirst 5 source positions: {source_positions[:5]}")
|
|
695
|
-
print(f"First 5 VS shot positions (scaled): {[s*position_scale for s in vs_picks_data['shots'][:5]]}")
|
|
696
|
-
|
|
697
|
-
# Show first trace positions for first source
|
|
698
|
-
if trace_positions[0] is not None:
|
|
699
|
-
print(f"First source trace positions (first 5): {trace_positions[0][:5]}")
|
|
700
|
-
|
|
701
675
|
for shot_position, geophone_position, time in vs_picks_data['picks']:
|
|
702
676
|
# Apply position scaling
|
|
703
677
|
scaled_shot_position = shot_position * position_scale
|
|
@@ -831,14 +805,8 @@ def read_lst_file(lst_file, verbose=False):
|
|
|
831
805
|
|
|
832
806
|
except (ValueError, IndexError):
|
|
833
807
|
# Not a data line, skip
|
|
834
|
-
if verbose and 'First breaks' not in line and 'shot no' not in line:
|
|
835
|
-
print(f"Skipping non-data line: {line}")
|
|
836
808
|
continue
|
|
837
809
|
|
|
838
|
-
if verbose:
|
|
839
|
-
print(f"Read {len(picks)} valid picks from {len(shots)} shots")
|
|
840
|
-
print(f"Shot numbers: {sorted(shots)}")
|
|
841
|
-
|
|
842
810
|
return {
|
|
843
811
|
'picks': picks,
|
|
844
812
|
'n_picks': len(picks),
|
|
@@ -899,14 +867,6 @@ def match_lst_picks_to_geometry(lst_picks_data, trace_positions, source_position
|
|
|
899
867
|
unmatched_positions = []
|
|
900
868
|
unmatched_shot_details = {}
|
|
901
869
|
|
|
902
|
-
if verbose:
|
|
903
|
-
print(f"\nMatching {n_total} LST picks to geometry...")
|
|
904
|
-
print(f"Dataset has {n_sources} sources")
|
|
905
|
-
print(f"LST file has {len(lst_picks_data['shots'])} unique shots")
|
|
906
|
-
print(f"Shot number offset: {shot_number_offset}")
|
|
907
|
-
print(f"Position scale factor: {position_scale}")
|
|
908
|
-
print(f"Position tolerance: {position_tolerance}m")
|
|
909
|
-
|
|
910
870
|
for shot_no, trace_no, position, time in lst_picks_data['picks']:
|
|
911
871
|
# Convert shot number to source index (0-based)
|
|
912
872
|
source_idx = shot_no - 1 + shot_number_offset
|
|
@@ -954,22 +914,6 @@ def match_lst_picks_to_geometry(lst_picks_data, trace_positions, source_position
|
|
|
954
914
|
else:
|
|
955
915
|
unmatched_shot_details[shot_no] = "no traces available"
|
|
956
916
|
|
|
957
|
-
if verbose:
|
|
958
|
-
print(f"\nMatched {n_matched}/{n_total} picks ({n_matched*100.0/n_total:.1f}%)")
|
|
959
|
-
if unmatched_shots:
|
|
960
|
-
print(f"\nUnmatched shots: {len(unmatched_shots)}")
|
|
961
|
-
for shot in sorted(unmatched_shots)[:10]:
|
|
962
|
-
reason = unmatched_shot_details.get(shot, "unknown")
|
|
963
|
-
print(f" Shot {shot}: {reason}")
|
|
964
|
-
if len(unmatched_shots) > 10:
|
|
965
|
-
print(f" ... and {len(unmatched_shots)-10} more")
|
|
966
|
-
|
|
967
|
-
if unmatched_positions[:20]:
|
|
968
|
-
print(f"\nSample unmatched positions (first 20):")
|
|
969
|
-
for shot_no, position in unmatched_positions[:20]:
|
|
970
|
-
reason = unmatched_shot_details.get(shot_no, "unknown")
|
|
971
|
-
print(f" Shot {shot_no}, pos {position:.2f}m: {reason}")
|
|
972
|
-
|
|
973
917
|
return {
|
|
974
918
|
'picks': matched_picks,
|
|
975
919
|
'errors': matched_errors,
|
|
@@ -1,25 +1,25 @@
|
|
|
1
|
-
pyckster/__init__.py,sha256=
|
|
1
|
+
pyckster/__init__.py,sha256=LO70RUeDFgSj8VdFhawOny6CVOPFksmbZ18sxkVpTBg,905
|
|
2
2
|
pyckster/__main__.py,sha256=zv3AGVKorKo2tgWOEIcVnkDbp15eepSqka3IoWH_adU,406
|
|
3
3
|
pyckster/auto_picking.py,sha256=fyZiOj0Ib-SB_oxsKnUszECHbOjo4JE23JVQILGYZco,12754
|
|
4
4
|
pyckster/bayesian_inversion.py,sha256=kdnKOlAZ0JlYLipuFDHlwS7dU8LtI-0aMb90bYpEHhE,163523
|
|
5
|
-
pyckster/core.py,sha256=
|
|
5
|
+
pyckster/core.py,sha256=0D3vDbpaToJ-RuNvpL7C4uGPLlrQaPvxb2xBuOTFxsI,1240456
|
|
6
6
|
pyckster/dispersion_stack_viewer.py,sha256=7Dh2e1tSct062D7Qh6nNrMdJcqKWcJvDIv84V8sC6C8,12645
|
|
7
7
|
pyckster/inversion_app.py,sha256=ovM44oYBFsvfKxO7rjjThUhkJnLDLZZ0R6ZVp-5r66E,60676
|
|
8
8
|
pyckster/inversion_manager.py,sha256=P8i1fqUJKMWkd-9PoDmNtmQuKglGKTeSuptUUA57D-8,15393
|
|
9
9
|
pyckster/inversion_visualizer.py,sha256=vfKZIoJzKawbaEv29NsYYIGnWLDQCGef5bM2vY1aCBo,22135
|
|
10
10
|
pyckster/ipython_console.py,sha256=tZyyoiXCjCl7ozxOj_h-YR4eGjoC4kpKe7nZ48eUAJc,9313
|
|
11
11
|
pyckster/mpl_export.py,sha256=_WqPo9l9ABiSoU0ukLfm4caGV1-FKKbXjt8SoBHTR30,12346
|
|
12
|
-
pyckster/obspy_utils.py,sha256=
|
|
13
|
-
pyckster/pick_io.py,sha256
|
|
12
|
+
pyckster/obspy_utils.py,sha256=wm74oyLvvQmF97_ySXPZD95ya-8Aer-pLyVYlA3QWoM,35420
|
|
13
|
+
pyckster/pick_io.py,sha256=r7QoRCA2zaGeZlFKuAJ86KnAH_mh_l4qGvP02Q0mwVA,36001
|
|
14
14
|
pyckster/pyqtgraph_utils.py,sha256=PAeE3n_wz7skHOC5eLnkFczbie7diVH1xvuL8jtJ4T8,6049
|
|
15
15
|
pyckster/surface_wave_analysis.py,sha256=97BrDA-n5AZp89NdxQ2ekZPaCErMc7v8C6GmD5KTi-4,102695
|
|
16
16
|
pyckster/surface_wave_profiling.py,sha256=L9KidhKmfGvVoPZjf6us3c49VB7VPB_VcsDqRx45OYI,315401
|
|
17
17
|
pyckster/sw_utils.py,sha256=-2CpQ9BkmUHaMBrNy2qXx1R-g9qPX8D9igKi_G-iRHE,13213
|
|
18
18
|
pyckster/tab_factory.py,sha256=NlCIC6F8BrEu7a8BYOJJdWy5ftpX_zKDLj7SbcwBbh8,14519
|
|
19
19
|
pyckster/visualization_utils.py,sha256=bgODn21NAQx1FOMPj91kdDd0szKOgUyfZ3cQlyu2PF8,47947
|
|
20
|
-
pyckster-26.2.
|
|
21
|
-
pyckster-26.2.
|
|
22
|
-
pyckster-26.2.
|
|
23
|
-
pyckster-26.2.
|
|
24
|
-
pyckster-26.2.
|
|
25
|
-
pyckster-26.2.
|
|
20
|
+
pyckster-26.2.2.dist-info/licenses/LICENCE,sha256=-uaAIm20JrJKoMdCdn2GlFQfNU4fbsHWK3eh4kIQ_Ec,35143
|
|
21
|
+
pyckster-26.2.2.dist-info/METADATA,sha256=mxaCepOXltujMuxOYLLnG-n4nehgEGZDfd38G8h-O8c,4567
|
|
22
|
+
pyckster-26.2.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
23
|
+
pyckster-26.2.2.dist-info/entry_points.txt,sha256=yrOQx1wHi84rbxX_ZYtYaVcK3EeuRhHRQDZRc8mB0NI,100
|
|
24
|
+
pyckster-26.2.2.dist-info/top_level.txt,sha256=eaihhwhEmlysgdZE4HmELFdSUwlXcMv90YorkjOXujQ,9
|
|
25
|
+
pyckster-26.2.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|