pyckster 26.1.5__py3-none-any.whl → 26.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyckster/obspy_utils.py CHANGED
@@ -462,6 +462,45 @@ def mute_trace(stream, muted_trace):
462
462
 
463
463
  return stream
464
464
 
465
+ def zero_pad_trace(stream, trace_number, pad_start=0, pad_end=0):
466
+ """
467
+ Zero pad a trace in a stream based on trace number.
468
+
469
+ Parameters
470
+ ----------
471
+ stream : obspy.Stream
472
+ Stream containing the traces.
473
+ trace_number : int
474
+ Trace number to pad.
475
+ pad_start : int
476
+ Number of zero samples to add at the beginning.
477
+ pad_end : int
478
+ Number of zero samples to add at the end.
479
+
480
+ Returns
481
+ -------
482
+ stream : obspy.Stream
483
+ Stream with the specified trace padded.
484
+ """
485
+
486
+ input_format = check_format(stream)
487
+
488
+ # Pad trace based on the trace number in header
489
+ for trace in stream:
490
+ if trace.stats[input_format].trace_header.trace_number_within_the_original_field_record == trace_number:
491
+ # Pad with zeros
492
+ if pad_start > 0 or pad_end > 0:
493
+ trace.data = np.pad(trace.data, (pad_start, pad_end), mode='constant', constant_values=0)
494
+ # Update the number of samples in the header
495
+ trace.stats.npts = len(trace.data)
496
+ if input_format == 'segy':
497
+ trace.stats.segy.trace_header.number_of_samples_in_this_trace = len(trace.data)
498
+ elif input_format == 'su':
499
+ trace.stats.su.trace_header.number_of_samples_in_this_trace = len(trace.data)
500
+ break
501
+
502
+ return stream
503
+
465
504
  def swap_header_format(stream,output_format):
466
505
 
467
506
  output_format = output_format.lower()
pyckster/pick_io.py CHANGED
@@ -1,12 +1,53 @@
1
1
  #!/usr/bin/env python
2
2
  # -*- coding: utf-8 -*-
3
3
  """
4
- Pick I/O utilities for reading and writing seismic picks in PyGimLi .sgt format.
4
+ Pick I/O utilities for reading and writing seismic picks.
5
5
 
6
6
  This module provides functions to save and load first-arrival traveltime picks
7
- in the PyGimLi .sgt format, which is widely used in seismic refraction processing.
7
+ in multiple formats:
8
8
 
9
- Copyright (C) 2024, 2025 Sylvain Pasquet
9
+ **Supported Formats:**
10
+
11
+ 1. **PyGimLi .sgt format** (read/write)
12
+ - Widely used in seismic refraction processing
13
+ - Contains station coordinates and source-geophone-time picks
14
+ - Functions: read_sgt_file(), save_picks_to_sgt(), match_picks_to_geometry()
15
+
16
+ 2. **Rayfract .LST format** (read only)
17
+ - First break picks from Rayfract software
18
+ - Format: shot_no trace_no position time synthetic
19
+ - Invalid picks marked as -1.000
20
+ - Functions: read_lst_file(), match_lst_picks_to_geometry()
21
+
22
+ **Typical Usage:**
23
+
24
+ For SGT files::
25
+
26
+ from pick_io import read_sgt_file, match_picks_to_geometry
27
+
28
+ # Read SGT file
29
+ sgt_data = read_sgt_file('picks.sgt', verbose=True)
30
+
31
+ # Match to geometry
32
+ matched = match_picks_to_geometry(
33
+ sgt_data['stations'], sgt_data['picks'],
34
+ trace_positions, trace_elevations,
35
+ source_positions, source_elevations
36
+ )
37
+
38
+ For LST files::
39
+
40
+ from pick_io import read_lst_file, match_lst_picks_to_geometry
41
+
42
+ # Read LST file
43
+ lst_data = read_lst_file('picks.LST', verbose=True)
44
+
45
+ # Match to geometry
46
+ matched = match_lst_picks_to_geometry(
47
+ lst_data, trace_positions, source_positions
48
+ )
49
+
50
+ Copyright (C) 2024, 2025, 2026 Sylvain Pasquet
10
51
  Email: sylvain.pasquet@sorbonne-universite.fr
11
52
 
12
53
  This program is free software: you can redistribute it and/or modify
@@ -453,3 +494,487 @@ def match_picks_to_geometry(sgt_stations, sgt_picks,
453
494
  'n_matched': n_matched,
454
495
  'n_total': n_total
455
496
  }
497
+
498
+
499
+ def read_vs_file(vs_file, verbose=False):
500
+ """
501
+ Read SeisImager/PickWin .vs file and parse first break picks.
502
+
503
+ VS file format:
504
+ - Line 1: Header (discarded)
505
+ - Line 2: min_offset n_shots spacing
506
+ - Line 3: Header (discarded)
507
+ - For each shot:
508
+ - Shot header: shot_position n_picks 0
509
+ - n_picks data lines: geophone_offset time quality_flag
510
+
511
+ Parameters
512
+ ----------
513
+ vs_file : str
514
+ Path to .vs file
515
+ verbose : bool, optional
516
+ Print debug information, default False
517
+
518
+ Returns
519
+ -------
520
+ dict
521
+ Dictionary containing:
522
+ - 'picks': list of (shot_position, geophone_offset, time) tuples
523
+ - 'n_picks': int (number of valid picks)
524
+ - 'shots': list of shot positions
525
+ - 'n_shots': int (number of shots from header)
526
+ - 'spacing': float (spacing from header)
527
+ """
528
+ picks = []
529
+ shots = []
530
+ n_shots = 0
531
+ spacing = 0.0
532
+
533
+ with open(vs_file, 'r') as f:
534
+ lines = f.readlines()
535
+
536
+ if len(lines) < 3:
537
+ if verbose:
538
+ print(f"File too short: {len(lines)} lines")
539
+ return {'picks': [], 'n_picks': 0, 'shots': [], 'n_shots': 0, 'spacing': 0.0}
540
+
541
+ # Line 1: discard
542
+ # Line 2: 0 n_shots spacing
543
+ try:
544
+ parts = lines[1].strip().split()
545
+ n_shots = int(parts[1])
546
+ spacing = float(parts[2])
547
+ if verbose:
548
+ print(f"Header: {n_shots} shots, spacing {spacing}m")
549
+ except (ValueError, IndexError) as e:
550
+ if verbose:
551
+ print(f"Error parsing header line 2: {e}")
552
+ return {'picks': [], 'n_picks': 0, 'shots': [], 'n_shots': 0, 'spacing': 0.0}
553
+
554
+ # Start reading shot data from line 3 (index 2)
555
+ # No third discard line - shot data starts immediately after header
556
+ line_idx = 2
557
+ shot_count = 0
558
+
559
+ if verbose:
560
+ print(f"\n=== VS FILE PARSING DEBUG ===")
561
+ print(f"Starting to parse shots from line {line_idx}")
562
+ print(f"Expected {n_shots} shots")
563
+ print(f"First 15 lines from file:")
564
+ for i in range(min(15, len(lines))):
565
+ print(f" Line {i}: {lines[i].strip()}")
566
+ print(f"=== END DEBUG ===\n")
567
+
568
+ while line_idx < len(lines) and shot_count < n_shots:
569
+ line = lines[line_idx].strip()
570
+
571
+ if verbose and shot_count < 3:
572
+ print(f"\nProcessing line {line_idx}: '{line}'")
573
+
574
+ # Skip empty lines
575
+ if not line:
576
+ if verbose and shot_count < 3:
577
+ print(f" -> Empty line, skipping")
578
+ line_idx += 1
579
+ continue
580
+
581
+ # Check if this is a shot header line (third value equals 0)
582
+ parts = line.split()
583
+ if verbose and shot_count < 3:
584
+ print(f" -> Parts: {parts}, length: {len(parts)}")
585
+
586
+ if len(parts) >= 3:
587
+ try:
588
+ # Check if the third value (index 2) is 0
589
+ third_value = float(parts[2])
590
+ if verbose and shot_count < 3:
591
+ print(f" -> Third value: {third_value}, is shot header: {abs(third_value) < 0.0001}")
592
+
593
+ if abs(third_value) < 0.0001: # Close to zero (shot header)
594
+ shot_position = float(parts[0])
595
+ n_picks_for_shot = int(parts[1])
596
+ shot_position = float(parts[0])
597
+ n_picks_for_shot = int(parts[1])
598
+ shots.append(shot_position)
599
+ shot_count += 1
600
+
601
+ if verbose:
602
+ print(f"Shot {shot_count}: position {shot_position}m, {n_picks_for_shot} picks")
603
+
604
+ # Read the pick lines for this shot
605
+ for i in range(n_picks_for_shot):
606
+ line_idx += 1
607
+ if line_idx >= len(lines):
608
+ break
609
+
610
+ pick_line = lines[line_idx].strip()
611
+ pick_parts = pick_line.split()
612
+
613
+ if len(pick_parts) >= 3:
614
+ try:
615
+ geophone_position = float(pick_parts[0]) # Absolute geophone position
616
+ time = float(pick_parts[1])
617
+ # quality_flag = int(pick_parts[2])
618
+
619
+ picks.append((shot_position, geophone_position, time))
620
+ except (ValueError, IndexError):
621
+ if verbose:
622
+ print(f"Error parsing pick line: {pick_line}")
623
+ except (ValueError, IndexError) as e:
624
+ if verbose:
625
+ print(f"Error parsing shot header: {line}, error: {e}")
626
+
627
+ line_idx += 1
628
+
629
+ if verbose:
630
+ print(f"Read {len(picks)} picks from {len(shots)} shots")
631
+ print(f"Shot positions: {shots[:5]}..." if len(shots) > 5 else f"Shot positions: {shots}")
632
+
633
+ return {
634
+ 'picks': picks,
635
+ 'n_picks': len(picks),
636
+ 'shots': shots,
637
+ 'n_shots': n_shots,
638
+ 'spacing': spacing
639
+ }
640
+
641
+
642
+ def match_vs_picks_to_geometry(vs_picks_data, trace_positions, source_positions,
643
+ offset_tolerance=0.5, position_scale=1.0, verbose=False):
644
+ """
645
+ Match picks from .vs file to actual trace/source geometry.
646
+
647
+ VS files store shot positions and absolute geophone positions.
648
+
649
+ Parameters
650
+ ----------
651
+ vs_picks_data : dict
652
+ Dictionary returned by read_vs_file()
653
+ trace_positions : list of arrays
654
+ Actual trace positions in dataset
655
+ source_positions : list
656
+ Actual source positions in dataset
657
+ offset_tolerance : float, optional
658
+ Tolerance for matching offsets in meters, default 0.5
659
+ position_scale : float, optional
660
+ Scaling factor to apply to VS positions (VS_pos * scale = actual_pos), default 1.0
661
+ verbose : bool, optional
662
+ Print debug information, default False
663
+
664
+ Returns
665
+ -------
666
+ dict
667
+ Dictionary containing:
668
+ - 'picks': 2D list [source][trace] of matched pick times (NaN where no pick)
669
+ - 'errors': 2D list [source][trace] of matched errors (0.001 default)
670
+ - 'n_matched': Number of successfully matched picks
671
+ - 'n_total': Total picks in VS file
672
+ - 'unmatched_shots': List of shot positions that couldn't be matched
673
+ """
674
+ n_sources = len(source_positions)
675
+ n_traces_per_source = [len(tp) if tp is not None else 0 for tp in trace_positions]
676
+
677
+ # Initialize pick and error arrays with NaN
678
+ matched_picks = [[np.nan] * n_traces for n_traces in n_traces_per_source]
679
+ matched_errors = [[0.001] * n_traces for n_traces in n_traces_per_source]
680
+
681
+ n_matched = 0
682
+ n_total = vs_picks_data['n_picks']
683
+ unmatched_shots = set()
684
+ unmatched_details = {}
685
+
686
+ if verbose:
687
+ print(f"\nMatching {n_total} VS picks to geometry...")
688
+ print(f"Dataset has {n_sources} sources")
689
+ print(f"VS file has {len(vs_picks_data['shots'])} shots")
690
+ print(f"Position scale factor: {position_scale}")
691
+ print(f"Offset tolerance: {offset_tolerance}m")
692
+
693
+ # Show first few source positions
694
+ print(f"\nFirst 5 source positions: {source_positions[:5]}")
695
+ print(f"First 5 VS shot positions (scaled): {[s*position_scale for s in vs_picks_data['shots'][:5]]}")
696
+
697
+ # Show first trace positions for first source
698
+ if trace_positions[0] is not None:
699
+ print(f"First source trace positions (first 5): {trace_positions[0][:5]}")
700
+
701
+ for shot_position, geophone_position, time in vs_picks_data['picks']:
702
+ # Apply position scaling
703
+ scaled_shot_position = shot_position * position_scale
704
+ scaled_geophone_position = geophone_position * position_scale
705
+
706
+ # Find matching source by position
707
+ matched_source = None
708
+ min_source_dist = float('inf')
709
+
710
+ for i_src, src_pos in enumerate(source_positions):
711
+ dist = abs(src_pos - scaled_shot_position)
712
+ if dist < min_source_dist:
713
+ min_source_dist = dist
714
+ if dist <= offset_tolerance:
715
+ matched_source = i_src
716
+
717
+ if matched_source is None:
718
+ if shot_position not in unmatched_shots:
719
+ unmatched_shots.add(shot_position) # Store original position
720
+ if shot_position not in unmatched_details:
721
+ unmatched_details[shot_position] = {
722
+ 'scaled_position': scaled_shot_position,
723
+ 'min_dist': min_source_dist,
724
+ 'closest_source': source_positions[np.argmin(np.abs(source_positions - scaled_shot_position))],
725
+ 'n_picks': 0
726
+ }
727
+ unmatched_details[shot_position]['n_picks'] += 1
728
+ continue
729
+
730
+ # Find matching trace by geophone position
731
+ if trace_positions[matched_source] is None:
732
+ continue
733
+
734
+ matched_trace = None
735
+ min_trace_dist = float('inf')
736
+
737
+ for i_tr, tr_pos in enumerate(trace_positions[matched_source]):
738
+ dist = abs(tr_pos - scaled_geophone_position)
739
+ if dist < min_trace_dist and dist <= offset_tolerance:
740
+ min_trace_dist = dist
741
+ matched_trace = i_tr
742
+
743
+ if matched_trace is not None:
744
+ # Convert time to seconds if in milliseconds
745
+ time_seconds = time / 1000.0 if time > 1.0 else time
746
+ matched_picks[matched_source][matched_trace] = time_seconds
747
+ n_matched += 1
748
+
749
+ if verbose:
750
+ print(f"\nMatched {n_matched}/{n_total} picks ({n_matched*100.0/n_total:.1f}%)")
751
+ if unmatched_shots:
752
+ print(f"Unmatched shots: {len(unmatched_shots)}")
753
+ # Show first 10 unmatched shots with details
754
+ for i, shot_pos in enumerate(sorted(unmatched_shots)[:10]):
755
+ if shot_pos in unmatched_details:
756
+ details = unmatched_details[shot_pos]
757
+ print(f" Shot {shot_pos} (scaled: {details['scaled_position']:.2f}m) - "
758
+ f"closest source: {details['closest_source']:.2f}m, "
759
+ f"distance: {details['min_dist']:.2f}m, "
760
+ f"picks: {details['n_picks']}")
761
+ if len(unmatched_shots) > 10:
762
+ print(f" ... and {len(unmatched_shots) - 10} more")
763
+
764
+ return {
765
+ 'picks': matched_picks,
766
+ 'errors': matched_errors,
767
+ 'n_matched': n_matched,
768
+ 'n_total': n_total,
769
+ 'unmatched_shots': sorted(list(unmatched_shots))
770
+ }
771
+
772
+
773
+ def read_lst_file(lst_file, verbose=False):
774
+ """
775
+ Read Rayfract .LST file and parse first break picks.
776
+
777
+ LST file format:
778
+ - Header lines (ignored until data starts)
779
+ - Data lines: shot_no trace pos. time synthetic
780
+ - Time of -1.000 indicates no pick
781
+
782
+ Parameters
783
+ ----------
784
+ lst_file : str
785
+ Path to .LST file
786
+ verbose : bool, optional
787
+ Print debug information, default False
788
+
789
+ Returns
790
+ -------
791
+ dict
792
+ Dictionary containing:
793
+ - 'picks': list of (shot_no, trace_no, position, time) tuples
794
+ - 'n_picks': int (number of valid picks, excluding -1.000)
795
+ - 'shots': list of unique shot numbers
796
+ - 'traces_per_shot': dict mapping shot_no to list of (trace_no, position, time) tuples
797
+ """
798
+ picks = []
799
+ traces_per_shot = {}
800
+ shots = []
801
+
802
+ with open(lst_file, 'r') as f:
803
+ # Skip header lines until we find data
804
+ # Data starts when we see lines matching the pattern: integer integer float float
805
+ for line in f:
806
+ line = line.strip()
807
+
808
+ # Skip empty lines and comments
809
+ if not line or line.startswith('#'):
810
+ continue
811
+
812
+ # Try to parse as data line
813
+ parts = line.split()
814
+ if len(parts) >= 4:
815
+ try:
816
+ shot_no = int(parts[0])
817
+ trace_no = int(parts[1])
818
+ position = float(parts[2])
819
+ time = float(parts[3])
820
+ # synthetic = float(parts[4]) if len(parts) > 4 else None # Optional synthetic time
821
+
822
+ # Skip picks marked as invalid (-1.000)
823
+ if time > 0: # Valid pick
824
+ picks.append((shot_no, trace_no, position, time))
825
+
826
+ # Organize by shot
827
+ if shot_no not in traces_per_shot:
828
+ traces_per_shot[shot_no] = []
829
+ shots.append(shot_no)
830
+ traces_per_shot[shot_no].append((trace_no, position, time))
831
+
832
+ except (ValueError, IndexError):
833
+ # Not a data line, skip
834
+ if verbose and 'First breaks' not in line and 'shot no' not in line:
835
+ print(f"Skipping non-data line: {line}")
836
+ continue
837
+
838
+ if verbose:
839
+ print(f"Read {len(picks)} valid picks from {len(shots)} shots")
840
+ print(f"Shot numbers: {sorted(shots)}")
841
+
842
+ return {
843
+ 'picks': picks,
844
+ 'n_picks': len(picks),
845
+ 'shots': sorted(shots),
846
+ 'traces_per_shot': traces_per_shot
847
+ }
848
+
849
+
850
+ def match_lst_picks_to_geometry(lst_picks_data, trace_positions, source_positions,
851
+ shot_number_offset=0, position_tolerance=0.1,
852
+ position_scale=1.0, verbose=False):
853
+ """
854
+ Match picks from LST file to actual trace/source geometry.
855
+
856
+ LST files contain shot number, trace number, and position information.
857
+ This function matches based on:
858
+ 1. Shot number -> source index (with optional offset)
859
+ 2. Trace position -> actual trace position (within tolerance)
860
+
861
+ Parameters
862
+ ----------
863
+ lst_picks_data : dict
864
+ Dictionary returned by read_lst_file()
865
+ trace_positions : list of arrays
866
+ Actual trace positions in dataset
867
+ source_positions : list
868
+ Actual source positions in dataset
869
+ shot_number_offset : int, optional
870
+ Offset to apply to shot numbers (LST shot 1 = dataset shot 1+offset), default 0
871
+ position_tolerance : float, optional
872
+ Tolerance for matching trace positions in meters, default 0.1
873
+ position_scale : float, optional
874
+ Scaling factor to apply to LST positions (LST_pos * scale = actual_pos), default 1.0
875
+ verbose : bool, optional
876
+ Print debug information, default False
877
+
878
+ Returns
879
+ -------
880
+ dict
881
+ Dictionary containing:
882
+ - 'picks': 2D list [source][trace] of matched pick times (NaN where no pick)
883
+ - 'errors': 2D list [source][trace] of matched errors (0.001 default)
884
+ - 'n_matched': Number of successfully matched picks
885
+ - 'n_total': Total picks in LST file
886
+ - 'unmatched_shots': List of shot numbers that couldn't be matched
887
+ - 'unmatched_positions': List of (shot, position) that couldn't be matched
888
+ """
889
+ n_sources = len(source_positions)
890
+ n_traces_per_source = [len(tp) if tp is not None else 0 for tp in trace_positions]
891
+
892
+ # Initialize pick and error arrays with NaN
893
+ matched_picks = [[np.nan] * n_traces for n_traces in n_traces_per_source]
894
+ matched_errors = [[0.001] * n_traces for n_traces in n_traces_per_source] # Default error 1ms
895
+
896
+ n_matched = 0
897
+ n_total = lst_picks_data['n_picks']
898
+ unmatched_shots = set()
899
+ unmatched_positions = []
900
+ unmatched_shot_details = {}
901
+
902
+ if verbose:
903
+ print(f"\nMatching {n_total} LST picks to geometry...")
904
+ print(f"Dataset has {n_sources} sources")
905
+ print(f"LST file has {len(lst_picks_data['shots'])} unique shots")
906
+ print(f"Shot number offset: {shot_number_offset}")
907
+ print(f"Position scale factor: {position_scale}")
908
+ print(f"Position tolerance: {position_tolerance}m")
909
+
910
+ for shot_no, trace_no, position, time in lst_picks_data['picks']:
911
+ # Convert shot number to source index (0-based)
912
+ source_idx = shot_no - 1 + shot_number_offset
913
+
914
+ # Check if source index is valid
915
+ if source_idx < 0 or source_idx >= n_sources:
916
+ if shot_no not in unmatched_shots:
917
+ unmatched_shots.add(shot_no)
918
+ if shot_no not in unmatched_shot_details:
919
+ unmatched_shot_details[shot_no] = f"out of range [0, {n_sources-1}]"
920
+ continue
921
+
922
+ # Find matching trace by position
923
+ if trace_positions[source_idx] is None:
924
+ if shot_no not in unmatched_shot_details:
925
+ unmatched_shot_details[shot_no] = "no trace positions for this shot"
926
+ continue
927
+
928
+ # Apply position scaling
929
+ scaled_position = position * position_scale
930
+
931
+ matched_trace = None
932
+ min_distance = float('inf')
933
+
934
+ for i_tr, tr_pos in enumerate(trace_positions[source_idx]):
935
+ dist = abs(tr_pos - scaled_position)
936
+ if dist < min_distance and dist <= position_tolerance:
937
+ min_distance = dist
938
+ matched_trace = i_tr
939
+
940
+ if matched_trace is not None:
941
+ # Convert milliseconds to seconds if needed
942
+ time_seconds = time / 1000.0 if time > 1.0 else time
943
+ matched_picks[source_idx][matched_trace] = time_seconds
944
+ n_matched += 1
945
+ else:
946
+ unmatched_positions.append((shot_no, position))
947
+ # Track which traces are available for this shot
948
+ if shot_no not in unmatched_shot_details:
949
+ available_positions = trace_positions[source_idx] if trace_positions[source_idx] is not None else []
950
+ if len(available_positions) > 0:
951
+ min_avail = min(available_positions)
952
+ max_avail = max(available_positions)
953
+ unmatched_shot_details[shot_no] = f"scaled pos {scaled_position:.1f}m (LST: {position:.1f}m) not in [{min_avail:.1f}, {max_avail:.1f}]m"
954
+ else:
955
+ unmatched_shot_details[shot_no] = "no traces available"
956
+
957
+ if verbose:
958
+ print(f"\nMatched {n_matched}/{n_total} picks ({n_matched*100.0/n_total:.1f}%)")
959
+ if unmatched_shots:
960
+ print(f"\nUnmatched shots: {len(unmatched_shots)}")
961
+ for shot in sorted(unmatched_shots)[:10]:
962
+ reason = unmatched_shot_details.get(shot, "unknown")
963
+ print(f" Shot {shot}: {reason}")
964
+ if len(unmatched_shots) > 10:
965
+ print(f" ... and {len(unmatched_shots)-10} more")
966
+
967
+ if unmatched_positions[:20]:
968
+ print(f"\nSample unmatched positions (first 20):")
969
+ for shot_no, position in unmatched_positions[:20]:
970
+ reason = unmatched_shot_details.get(shot_no, "unknown")
971
+ print(f" Shot {shot_no}, pos {position:.2f}m: {reason}")
972
+
973
+ return {
974
+ 'picks': matched_picks,
975
+ 'errors': matched_errors,
976
+ 'n_matched': n_matched,
977
+ 'n_total': n_total,
978
+ 'unmatched_shots': sorted(unmatched_shots),
979
+ 'unmatched_positions': unmatched_positions
980
+ }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyckster
3
- Version: 26.1.5
3
+ Version: 26.2.1
4
4
  Summary: A PyQt5-based GUI for the processing and analysis of active near-surface seismic data
5
5
  Home-page: https://gitlab.in2p3.fr/metis-geophysics/pyckster
6
6
  Author: Sylvain Pasquet
@@ -1,25 +1,25 @@
1
- pyckster/__init__.py,sha256=uMDTvad1_QktTO-D059x7iGtIUeWegaW6BIdSfRIqsM,905
1
+ pyckster/__init__.py,sha256=pRpWyxN9aZ3Nc5qVhA9bc8VURQUWwwBmecGxfD_wVws,905
2
2
  pyckster/__main__.py,sha256=zv3AGVKorKo2tgWOEIcVnkDbp15eepSqka3IoWH_adU,406
3
3
  pyckster/auto_picking.py,sha256=fyZiOj0Ib-SB_oxsKnUszECHbOjo4JE23JVQILGYZco,12754
4
4
  pyckster/bayesian_inversion.py,sha256=kdnKOlAZ0JlYLipuFDHlwS7dU8LtI-0aMb90bYpEHhE,163523
5
- pyckster/core.py,sha256=6q1mUorJmTJHs_eUM1Kcdz7Iw-z3X89I9ks4OnuxWUo,1146176
5
+ pyckster/core.py,sha256=Sxyw0_ZtaDKgkGw6SarNM5_wgUgda4ishril3x0UlRA,1208902
6
6
  pyckster/dispersion_stack_viewer.py,sha256=7Dh2e1tSct062D7Qh6nNrMdJcqKWcJvDIv84V8sC6C8,12645
7
7
  pyckster/inversion_app.py,sha256=ovM44oYBFsvfKxO7rjjThUhkJnLDLZZ0R6ZVp-5r66E,60676
8
8
  pyckster/inversion_manager.py,sha256=P8i1fqUJKMWkd-9PoDmNtmQuKglGKTeSuptUUA57D-8,15393
9
9
  pyckster/inversion_visualizer.py,sha256=vfKZIoJzKawbaEv29NsYYIGnWLDQCGef5bM2vY1aCBo,22135
10
10
  pyckster/ipython_console.py,sha256=tZyyoiXCjCl7ozxOj_h-YR4eGjoC4kpKe7nZ48eUAJc,9313
11
11
  pyckster/mpl_export.py,sha256=_WqPo9l9ABiSoU0ukLfm4caGV1-FKKbXjt8SoBHTR30,12346
12
- pyckster/obspy_utils.py,sha256=01fNI9ryIYuiGOl4NR0J9C_xXupcnsBb1mLSz1Qo63A,20569
13
- pyckster/pick_io.py,sha256=1svAzh1g73zEsjHnIy1ruEOrCC-vVMQnicXim3oWFa0,18027
12
+ pyckster/obspy_utils.py,sha256=Me8j6FHIEQgx2GH0phEcg1t7TOpGKpyLl_AwmCRnDak,21943
13
+ pyckster/pick_io.py,sha256=-oDMR054tBep8STi9pYTIxc7qJEIPbnbTkhiWFK5Ems,38666
14
14
  pyckster/pyqtgraph_utils.py,sha256=PAeE3n_wz7skHOC5eLnkFczbie7diVH1xvuL8jtJ4T8,6049
15
15
  pyckster/surface_wave_analysis.py,sha256=97BrDA-n5AZp89NdxQ2ekZPaCErMc7v8C6GmD5KTi-4,102695
16
16
  pyckster/surface_wave_profiling.py,sha256=L9KidhKmfGvVoPZjf6us3c49VB7VPB_VcsDqRx45OYI,315401
17
17
  pyckster/sw_utils.py,sha256=-2CpQ9BkmUHaMBrNy2qXx1R-g9qPX8D9igKi_G-iRHE,13213
18
18
  pyckster/tab_factory.py,sha256=NlCIC6F8BrEu7a8BYOJJdWy5ftpX_zKDLj7SbcwBbh8,14519
19
19
  pyckster/visualization_utils.py,sha256=bgODn21NAQx1FOMPj91kdDd0szKOgUyfZ3cQlyu2PF8,47947
20
- pyckster-26.1.5.dist-info/licenses/LICENCE,sha256=-uaAIm20JrJKoMdCdn2GlFQfNU4fbsHWK3eh4kIQ_Ec,35143
21
- pyckster-26.1.5.dist-info/METADATA,sha256=gbenYr4NMbBnA1PSKibKwDnW0SP7WCfz8d_40RkFr6I,4567
22
- pyckster-26.1.5.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
23
- pyckster-26.1.5.dist-info/entry_points.txt,sha256=yrOQx1wHi84rbxX_ZYtYaVcK3EeuRhHRQDZRc8mB0NI,100
24
- pyckster-26.1.5.dist-info/top_level.txt,sha256=eaihhwhEmlysgdZE4HmELFdSUwlXcMv90YorkjOXujQ,9
25
- pyckster-26.1.5.dist-info/RECORD,,
20
+ pyckster-26.2.1.dist-info/licenses/LICENCE,sha256=-uaAIm20JrJKoMdCdn2GlFQfNU4fbsHWK3eh4kIQ_Ec,35143
21
+ pyckster-26.2.1.dist-info/METADATA,sha256=DIHwW6-f0WWApHwzeErKYov8sJmcAloeJQjodcp0ids,4567
22
+ pyckster-26.2.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
23
+ pyckster-26.2.1.dist-info/entry_points.txt,sha256=yrOQx1wHi84rbxX_ZYtYaVcK3EeuRhHRQDZRc8mB0NI,100
24
+ pyckster-26.2.1.dist-info/top_level.txt,sha256=eaihhwhEmlysgdZE4HmELFdSUwlXcMv90YorkjOXujQ,9
25
+ pyckster-26.2.1.dist-info/RECORD,,