pyckster 25.12.4__py3-none-any.whl → 26.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pyckster/__init__.py +1 -1
- pyckster/core.py +3948 -592
- pyckster/mpl_export.py +354 -0
- pyckster/obspy_utils.py +41 -0
- pyckster/pick_io.py +445 -0
- {pyckster-25.12.4.dist-info → pyckster-26.1.2.dist-info}/METADATA +8 -1
- {pyckster-25.12.4.dist-info → pyckster-26.1.2.dist-info}/RECORD +11 -9
- {pyckster-25.12.4.dist-info → pyckster-26.1.2.dist-info}/WHEEL +0 -0
- {pyckster-25.12.4.dist-info → pyckster-26.1.2.dist-info}/entry_points.txt +0 -0
- {pyckster-25.12.4.dist-info → pyckster-26.1.2.dist-info}/licenses/LICENCE +0 -0
- {pyckster-25.12.4.dist-info → pyckster-26.1.2.dist-info}/top_level.txt +0 -0
pyckster/pick_io.py
ADDED
|
@@ -0,0 +1,445 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
"""
|
|
4
|
+
Pick I/O utilities for reading and writing seismic picks in PyGimLi .sgt format.
|
|
5
|
+
|
|
6
|
+
This module provides functions to save and load first-arrival traveltime picks
|
|
7
|
+
in the PyGimLi .sgt format, which is widely used in seismic refraction processing.
|
|
8
|
+
|
|
9
|
+
Copyright (C) 2024, 2025 Sylvain Pasquet
|
|
10
|
+
Email: sylvain.pasquet@sorbonne-universite.fr
|
|
11
|
+
|
|
12
|
+
This program is free software: you can redistribute it and/or modify
|
|
13
|
+
it under the terms of the GNU General Public License as published by
|
|
14
|
+
the Free Software Foundation, either version 3 of the License, or
|
|
15
|
+
(at your option) any later version.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
import numpy as np
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def save_picks_to_sgt(output_file, trace_positions, trace_elevations,
|
|
22
|
+
source_positions, source_elevations, picks, errors):
|
|
23
|
+
"""
|
|
24
|
+
Save picks to PyGimLi .sgt file format.
|
|
25
|
+
|
|
26
|
+
The .sgt format stores station coordinates and source-geophone-time picks:
|
|
27
|
+
- Header with number of stations and their (x, z) coordinates
|
|
28
|
+
- Pick data with source index, geophone index, time, and error
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
output_file : str
|
|
33
|
+
Path to save the .sgt file
|
|
34
|
+
trace_positions : list of arrays
|
|
35
|
+
List of trace position arrays (one per source)
|
|
36
|
+
trace_elevations : list of arrays
|
|
37
|
+
List of trace elevation arrays (one per source)
|
|
38
|
+
source_positions : list
|
|
39
|
+
List of source positions
|
|
40
|
+
source_elevations : list
|
|
41
|
+
List of source elevations
|
|
42
|
+
picks : list of lists
|
|
43
|
+
Nested list of pick times [source][trace]
|
|
44
|
+
errors : list of lists
|
|
45
|
+
Nested list of pick errors [source][trace]
|
|
46
|
+
|
|
47
|
+
Returns
|
|
48
|
+
-------
|
|
49
|
+
int
|
|
50
|
+
Number of picks saved
|
|
51
|
+
|
|
52
|
+
Raises
|
|
53
|
+
------
|
|
54
|
+
ValueError
|
|
55
|
+
If no picks are available to save
|
|
56
|
+
"""
|
|
57
|
+
# Get unique traces from list of trace arrays
|
|
58
|
+
trace_pairs = []
|
|
59
|
+
for sublist_position, sublist_elevation in zip(trace_positions, trace_elevations):
|
|
60
|
+
if sublist_position is not None:
|
|
61
|
+
for trace, elevation in zip(sublist_position, sublist_elevation):
|
|
62
|
+
trace_pairs.append((trace, elevation))
|
|
63
|
+
|
|
64
|
+
# Get unique sources
|
|
65
|
+
source_pairs = [(source, elevation)
|
|
66
|
+
for source, elevation in zip(source_positions, source_elevations)
|
|
67
|
+
if source is not None]
|
|
68
|
+
|
|
69
|
+
# Convert to numpy structured arrays
|
|
70
|
+
trace_pairs = np.array(trace_pairs, dtype=[('position', float), ('elevation', float)])
|
|
71
|
+
source_pairs = np.array(source_pairs, dtype=[('position', float), ('elevation', float)])
|
|
72
|
+
|
|
73
|
+
# Concatenate and get unique stations
|
|
74
|
+
all_pairs = np.concatenate((trace_pairs, source_pairs))
|
|
75
|
+
stations = np.unique(all_pairs)
|
|
76
|
+
|
|
77
|
+
# Get trace indices in station list
|
|
78
|
+
trace_indices = [
|
|
79
|
+
np.where((stations['position'] == trace_pair['position']) &
|
|
80
|
+
(stations['elevation'] == trace_pair['elevation']))[0][0]
|
|
81
|
+
for trace_pair in trace_pairs
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
# Get source indices in station list
|
|
85
|
+
source_indices = [
|
|
86
|
+
np.where((stations['position'] == source_pair['position']) &
|
|
87
|
+
(stations['elevation'] == source_pair['elevation']))[0][0]
|
|
88
|
+
for source_pair in source_pairs
|
|
89
|
+
]
|
|
90
|
+
|
|
91
|
+
# Count non-NaN picks
|
|
92
|
+
picks_list = [pick for sublist in picks if sublist is not None for pick in sublist]
|
|
93
|
+
n_picks = np.sum(~np.isnan(picks_list))
|
|
94
|
+
|
|
95
|
+
if n_picks == 0:
|
|
96
|
+
raise ValueError("No picks to save!")
|
|
97
|
+
|
|
98
|
+
# Write .sgt file
|
|
99
|
+
with open(output_file, 'w') as f:
|
|
100
|
+
# Write number of stations
|
|
101
|
+
f.write(f"{len(stations)} # shot/geophone points\n")
|
|
102
|
+
f.write("# x\ty\n")
|
|
103
|
+
for station in stations:
|
|
104
|
+
x = station[0]
|
|
105
|
+
y = station[1]
|
|
106
|
+
f.write(f"{x}\t{y}\n")
|
|
107
|
+
|
|
108
|
+
# Write number of picks
|
|
109
|
+
f.write(f"{n_picks} # measurements\n")
|
|
110
|
+
f.write("# s\tg\tt\terr\n")
|
|
111
|
+
|
|
112
|
+
# Write pick data
|
|
113
|
+
for i, pick_list in enumerate(picks):
|
|
114
|
+
if pick_list is not None:
|
|
115
|
+
for j, pick in enumerate(pick_list):
|
|
116
|
+
if not np.isnan(pick):
|
|
117
|
+
error = errors[i][j]
|
|
118
|
+
# Write source index, trace index, pick time, pick error
|
|
119
|
+
# Indices are 1-based in .sgt format
|
|
120
|
+
f.write(f"{source_indices[i] + 1}\t{trace_indices[j] + 1}\t{pick:.5f}\t{error:.5f}\n")
|
|
121
|
+
|
|
122
|
+
return n_picks
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def read_sgt_file(sgt_file, verbose=False):
|
|
126
|
+
"""
|
|
127
|
+
Read PyGimLi .sgt file and parse stations and picks.
|
|
128
|
+
|
|
129
|
+
Automatically detects coordinate format:
|
|
130
|
+
- 2 columns: (x, z)
|
|
131
|
+
- 3 columns: (x, y, z) or (x, z, 0) depending on which columns have values
|
|
132
|
+
|
|
133
|
+
Automatically detects column order for picks using header comments.
|
|
134
|
+
Supports various synonyms: s/src/source, g/geo/geophone/r/recv, t/time/tt, err/error/unc
|
|
135
|
+
|
|
136
|
+
Parameters
|
|
137
|
+
----------
|
|
138
|
+
sgt_file : str
|
|
139
|
+
Path to .sgt file
|
|
140
|
+
verbose : bool, optional
|
|
141
|
+
Print debug information, default False
|
|
142
|
+
|
|
143
|
+
Returns
|
|
144
|
+
-------
|
|
145
|
+
dict
|
|
146
|
+
Dictionary containing:
|
|
147
|
+
- 'stations': list of (x, y, z) tuples
|
|
148
|
+
- 'picks': list of (source_idx, geophone_idx, time, error) tuples
|
|
149
|
+
- 'n_stations': int
|
|
150
|
+
- 'n_picks': int
|
|
151
|
+
"""
|
|
152
|
+
with open(sgt_file, 'r') as f:
|
|
153
|
+
# Read number of stations
|
|
154
|
+
n_stations = int(f.readline().split('#')[0].strip())
|
|
155
|
+
if verbose:
|
|
156
|
+
print(f"Number of stations: {n_stations}")
|
|
157
|
+
|
|
158
|
+
# Skip comment lines
|
|
159
|
+
flag_comment = True
|
|
160
|
+
while flag_comment:
|
|
161
|
+
line = f.readline().strip()
|
|
162
|
+
if '#' in line[0]:
|
|
163
|
+
if verbose:
|
|
164
|
+
print(f"Comment: {line}")
|
|
165
|
+
flag_comment = True
|
|
166
|
+
else:
|
|
167
|
+
flag_comment = False
|
|
168
|
+
|
|
169
|
+
# Read station coordinates
|
|
170
|
+
coords_list = []
|
|
171
|
+
for i in range(n_stations):
|
|
172
|
+
if i > 0:
|
|
173
|
+
line = f.readline().strip()
|
|
174
|
+
|
|
175
|
+
if verbose and (i < 5 or i > n_stations - 5):
|
|
176
|
+
print(f"Reading station line: {line}")
|
|
177
|
+
|
|
178
|
+
if line:
|
|
179
|
+
parts = line.split()
|
|
180
|
+
coords = [float(p) for p in parts]
|
|
181
|
+
coords_list.append(coords)
|
|
182
|
+
|
|
183
|
+
# Determine coordinate format
|
|
184
|
+
stations = []
|
|
185
|
+
if len(coords_list) > 0:
|
|
186
|
+
n_cols = len(coords_list[0])
|
|
187
|
+
|
|
188
|
+
if n_cols == 2:
|
|
189
|
+
# 2 columns: (x, z) format
|
|
190
|
+
for coords in coords_list:
|
|
191
|
+
stations.append((coords[0], 0.0, coords[1]))
|
|
192
|
+
if verbose:
|
|
193
|
+
print(f"Detected 2-column format: (x, z)")
|
|
194
|
+
|
|
195
|
+
elif n_cols == 3:
|
|
196
|
+
# Check which columns have non-zero values
|
|
197
|
+
col0_nonzero = any(abs(coords[0]) > 1e-10 for coords in coords_list)
|
|
198
|
+
col1_nonzero = any(abs(coords[1]) > 1e-10 for coords in coords_list)
|
|
199
|
+
col2_nonzero = any(abs(coords[2]) > 1e-10 for coords in coords_list)
|
|
200
|
+
|
|
201
|
+
if verbose:
|
|
202
|
+
print(f"Column non-zero detection: col0={col0_nonzero}, col1={col1_nonzero}, col2={col2_nonzero}")
|
|
203
|
+
|
|
204
|
+
if col0_nonzero and col1_nonzero and col2_nonzero:
|
|
205
|
+
# All 3 columns non-zero: (x, y, z)
|
|
206
|
+
for coords in coords_list:
|
|
207
|
+
stations.append((coords[0], coords[1], coords[2]))
|
|
208
|
+
if verbose:
|
|
209
|
+
print(f"Detected 3-column format: (x, y, z)")
|
|
210
|
+
elif col0_nonzero and col1_nonzero and not col2_nonzero:
|
|
211
|
+
# Columns 0 and 1 non-zero: (x, z, 0) -> store as (x, 0, z)
|
|
212
|
+
for coords in coords_list:
|
|
213
|
+
stations.append((coords[0], 0.0, coords[1]))
|
|
214
|
+
if verbose:
|
|
215
|
+
print(f"Detected 3-column format: (x, z, 0)")
|
|
216
|
+
|
|
217
|
+
# Read number of picks
|
|
218
|
+
n_picks = int(f.readline().split('#')[0].strip())
|
|
219
|
+
if verbose:
|
|
220
|
+
print(f"Number of picks: {n_picks}")
|
|
221
|
+
|
|
222
|
+
# Initialize default column indices (standard order: s g t err)
|
|
223
|
+
s_ind = 0
|
|
224
|
+
g_ind = 1
|
|
225
|
+
t_ind = 2
|
|
226
|
+
err_ind = 3
|
|
227
|
+
|
|
228
|
+
# Read optional header comment lines to infer column order
|
|
229
|
+
comment_lines = []
|
|
230
|
+
while True:
|
|
231
|
+
line = f.readline()
|
|
232
|
+
if not line:
|
|
233
|
+
line = ""
|
|
234
|
+
break
|
|
235
|
+
line = line.strip()
|
|
236
|
+
if line and line[0] == '#':
|
|
237
|
+
comment_lines.append(line)
|
|
238
|
+
if verbose:
|
|
239
|
+
print(f"Comment: {line}")
|
|
240
|
+
continue
|
|
241
|
+
else:
|
|
242
|
+
break
|
|
243
|
+
|
|
244
|
+
# Parse comment lines to detect column order
|
|
245
|
+
if comment_lines:
|
|
246
|
+
synonyms = {
|
|
247
|
+
's': 's', 'src': 's', 'source': 's',
|
|
248
|
+
'g': 'g', 'geo': 'g', 'geophone': 'g', 'r': 'g', 'recv': 'g', 'receiver': 'g',
|
|
249
|
+
't': 't', 'time': 't', 'tt': 't', 'pick': 't', 'tpick': 't',
|
|
250
|
+
'err': 'err', 'error': 'err', 'unc': 'err', 'uncertainty': 'err', 'sigma': 'err'
|
|
251
|
+
}
|
|
252
|
+
best_fields = []
|
|
253
|
+
for cl in comment_lines:
|
|
254
|
+
cl_proc = cl[1:] if cl.startswith('#') else cl
|
|
255
|
+
for sep in [',', ';', '|']:
|
|
256
|
+
cl_proc = cl_proc.replace(sep, ' ')
|
|
257
|
+
tokens = [tok.strip().lower().strip(':') for tok in cl_proc.split()]
|
|
258
|
+
fields = []
|
|
259
|
+
for tok in tokens:
|
|
260
|
+
if tok in synonyms:
|
|
261
|
+
canon = synonyms[tok]
|
|
262
|
+
if len(fields) == 0 or fields[-1] != canon:
|
|
263
|
+
fields.append(canon)
|
|
264
|
+
unique_fields = []
|
|
265
|
+
for fcanon in fields:
|
|
266
|
+
if fcanon not in unique_fields:
|
|
267
|
+
unique_fields.append(fcanon)
|
|
268
|
+
if len(unique_fields) > len(best_fields):
|
|
269
|
+
best_fields = unique_fields
|
|
270
|
+
|
|
271
|
+
if best_fields:
|
|
272
|
+
try:
|
|
273
|
+
if 's' in best_fields:
|
|
274
|
+
s_ind = best_fields.index('s')
|
|
275
|
+
if 'g' in best_fields:
|
|
276
|
+
g_ind = best_fields.index('g')
|
|
277
|
+
if 't' in best_fields:
|
|
278
|
+
t_ind = best_fields.index('t')
|
|
279
|
+
if 'err' in best_fields:
|
|
280
|
+
err_ind = best_fields.index('err')
|
|
281
|
+
if verbose:
|
|
282
|
+
print(f"Detected column order from header: {best_fields}")
|
|
283
|
+
print(f"Using indices: s={s_ind}, g={g_ind}, t={t_ind}, err={err_ind}")
|
|
284
|
+
except (ValueError, IndexError) as e:
|
|
285
|
+
if verbose:
|
|
286
|
+
print(f"Error parsing column order: {e}, using defaults")
|
|
287
|
+
|
|
288
|
+
# Read picks
|
|
289
|
+
picks = []
|
|
290
|
+
for i in range(n_picks):
|
|
291
|
+
if i == 0 and line:
|
|
292
|
+
# Use the line we already read
|
|
293
|
+
pass
|
|
294
|
+
else:
|
|
295
|
+
line = f.readline().strip()
|
|
296
|
+
|
|
297
|
+
if line:
|
|
298
|
+
parts = line.split()
|
|
299
|
+
if len(parts) >= 4:
|
|
300
|
+
# Parse based on detected column order
|
|
301
|
+
source_idx = int(parts[s_ind])
|
|
302
|
+
geophone_idx = int(parts[g_ind])
|
|
303
|
+
time = float(parts[t_ind])
|
|
304
|
+
error = float(parts[err_ind])
|
|
305
|
+
picks.append((source_idx, geophone_idx, time, error))
|
|
306
|
+
|
|
307
|
+
return {
|
|
308
|
+
'stations': stations,
|
|
309
|
+
'picks': picks,
|
|
310
|
+
'n_stations': n_stations,
|
|
311
|
+
'n_picks': n_picks
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def match_picks_to_geometry(sgt_stations, sgt_picks,
|
|
316
|
+
trace_positions, trace_elevations,
|
|
317
|
+
source_positions, source_elevations,
|
|
318
|
+
matching_mode='exact_x', tolerance=0.01,
|
|
319
|
+
max_distance=50.0, verbose=False):
|
|
320
|
+
"""
|
|
321
|
+
Match picks from SGT file to actual trace/source geometry.
|
|
322
|
+
|
|
323
|
+
Supports multiple matching strategies:
|
|
324
|
+
- 'exact_x': Match by exact X coordinate (within tolerance)
|
|
325
|
+
- 'nearest_x': Match to nearest X coordinate (within max_distance)
|
|
326
|
+
- 'nearest_xz': Match to nearest (X, Z) coordinate (within max_distance)
|
|
327
|
+
|
|
328
|
+
Parameters
|
|
329
|
+
----------
|
|
330
|
+
sgt_stations : list of tuples
|
|
331
|
+
Station coordinates [(x, y, z), ...] from SGT file
|
|
332
|
+
sgt_picks : list of tuples
|
|
333
|
+
Pick data [(source_idx, geophone_idx, time, error), ...] from SGT file
|
|
334
|
+
Note: Indices are 1-based
|
|
335
|
+
trace_positions : list of arrays
|
|
336
|
+
Actual trace positions in dataset
|
|
337
|
+
trace_elevations : list of arrays
|
|
338
|
+
Actual trace elevations in dataset
|
|
339
|
+
source_positions : list
|
|
340
|
+
Actual source positions in dataset
|
|
341
|
+
source_elevations : list
|
|
342
|
+
Actual source elevations in dataset
|
|
343
|
+
matching_mode : str, optional
|
|
344
|
+
Matching strategy: 'exact_x', 'nearest_x', or 'nearest_xz'
|
|
345
|
+
tolerance : float, optional
|
|
346
|
+
Tolerance for exact matching in meters, default 0.01
|
|
347
|
+
max_distance : float, optional
|
|
348
|
+
Maximum distance for nearest neighbor matching in meters, default 50.0
|
|
349
|
+
verbose : bool, optional
|
|
350
|
+
Print debug information, default False
|
|
351
|
+
|
|
352
|
+
Returns
|
|
353
|
+
-------
|
|
354
|
+
dict
|
|
355
|
+
Dictionary containing:
|
|
356
|
+
- 'picks': 2D list [source][trace] of matched pick times (NaN where no pick)
|
|
357
|
+
- 'errors': 2D list [source][trace] of matched errors
|
|
358
|
+
- 'n_matched': Number of successfully matched picks
|
|
359
|
+
- 'n_total': Total picks in SGT file
|
|
360
|
+
"""
|
|
361
|
+
n_sources = len(source_positions)
|
|
362
|
+
n_traces_per_source = [len(tp) if tp is not None else 0 for tp in trace_positions]
|
|
363
|
+
|
|
364
|
+
# Initialize pick and error arrays with NaN
|
|
365
|
+
matched_picks = [[np.nan] * n_traces for n_traces in n_traces_per_source]
|
|
366
|
+
matched_errors = [[np.nan] * n_traces for n_traces in n_traces_per_source]
|
|
367
|
+
|
|
368
|
+
n_matched = 0
|
|
369
|
+
n_total = len(sgt_picks)
|
|
370
|
+
|
|
371
|
+
for source_idx_1based, geophone_idx_1based, pick_time, pick_error in sgt_picks:
|
|
372
|
+
# Convert to 0-based indexing
|
|
373
|
+
source_idx = source_idx_1based - 1
|
|
374
|
+
geophone_idx = geophone_idx_1based - 1
|
|
375
|
+
|
|
376
|
+
# Get station coordinates from SGT file (0-based indexing)
|
|
377
|
+
if source_idx < len(sgt_stations):
|
|
378
|
+
sgt_source_x, sgt_source_y, sgt_source_z = sgt_stations[source_idx]
|
|
379
|
+
else:
|
|
380
|
+
continue
|
|
381
|
+
|
|
382
|
+
if geophone_idx < len(sgt_stations):
|
|
383
|
+
sgt_geo_x, sgt_geo_y, sgt_geo_z = sgt_stations[geophone_idx]
|
|
384
|
+
else:
|
|
385
|
+
continue
|
|
386
|
+
|
|
387
|
+
# Find matching source in actual geometry
|
|
388
|
+
matched_source = None
|
|
389
|
+
for i_src, (src_pos, src_elev) in enumerate(zip(source_positions, source_elevations)):
|
|
390
|
+
if src_pos is None:
|
|
391
|
+
continue
|
|
392
|
+
|
|
393
|
+
if matching_mode == 'exact_x':
|
|
394
|
+
if abs(src_pos - sgt_source_x) < tolerance:
|
|
395
|
+
matched_source = i_src
|
|
396
|
+
break
|
|
397
|
+
elif matching_mode == 'nearest_x':
|
|
398
|
+
if matched_source is None or abs(src_pos - sgt_source_x) < abs(source_positions[matched_source] - sgt_source_x):
|
|
399
|
+
if abs(src_pos - sgt_source_x) <= max_distance:
|
|
400
|
+
matched_source = i_src
|
|
401
|
+
elif matching_mode == 'nearest_xz':
|
|
402
|
+
dist = np.sqrt((src_pos - sgt_source_x)**2 + (src_elev - sgt_source_z)**2)
|
|
403
|
+
if matched_source is None or dist < min_dist:
|
|
404
|
+
if dist <= max_distance:
|
|
405
|
+
matched_source = i_src
|
|
406
|
+
min_dist = dist
|
|
407
|
+
|
|
408
|
+
if matched_source is None:
|
|
409
|
+
continue
|
|
410
|
+
|
|
411
|
+
# Find matching trace in actual geometry
|
|
412
|
+
matched_trace = None
|
|
413
|
+
if trace_positions[matched_source] is not None:
|
|
414
|
+
for i_tr, (tr_pos, tr_elev) in enumerate(zip(trace_positions[matched_source],
|
|
415
|
+
trace_elevations[matched_source])):
|
|
416
|
+
if matching_mode == 'exact_x':
|
|
417
|
+
if abs(tr_pos - sgt_geo_x) < tolerance:
|
|
418
|
+
matched_trace = i_tr
|
|
419
|
+
break
|
|
420
|
+
elif matching_mode == 'nearest_x':
|
|
421
|
+
if matched_trace is None or abs(tr_pos - sgt_geo_x) < abs(trace_positions[matched_source][matched_trace] - sgt_geo_x):
|
|
422
|
+
if abs(tr_pos - sgt_geo_x) <= max_distance:
|
|
423
|
+
matched_trace = i_tr
|
|
424
|
+
elif matching_mode == 'nearest_xz':
|
|
425
|
+
dist = np.sqrt((tr_pos - sgt_geo_x)**2 + (tr_elev - sgt_geo_z)**2)
|
|
426
|
+
if matched_trace is None or dist < min_dist_trace:
|
|
427
|
+
if dist <= max_distance:
|
|
428
|
+
matched_trace = i_tr
|
|
429
|
+
min_dist_trace = dist
|
|
430
|
+
|
|
431
|
+
# Assign pick if both source and trace matched
|
|
432
|
+
if matched_trace is not None:
|
|
433
|
+
matched_picks[matched_source][matched_trace] = pick_time
|
|
434
|
+
matched_errors[matched_source][matched_trace] = pick_error
|
|
435
|
+
n_matched += 1
|
|
436
|
+
|
|
437
|
+
if verbose:
|
|
438
|
+
print(f"Matched {n_matched}/{n_total} picks using {matching_mode} mode")
|
|
439
|
+
|
|
440
|
+
return {
|
|
441
|
+
'picks': matched_picks,
|
|
442
|
+
'errors': matched_errors,
|
|
443
|
+
'n_matched': n_matched,
|
|
444
|
+
'n_total': n_total
|
|
445
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pyckster
|
|
3
|
-
Version:
|
|
3
|
+
Version: 26.1.2
|
|
4
4
|
Summary: A PyQt5-based GUI for picking seismic traveltimes
|
|
5
5
|
Home-page: https://gitlab.in2p3.fr/metis-geophysics/pyckster
|
|
6
6
|
Author: Sylvain Pasquet
|
|
@@ -59,6 +59,13 @@ To update PyCKSTER, run the following command:
|
|
|
59
59
|
pip install pyckster --upgrade
|
|
60
60
|
```
|
|
61
61
|
|
|
62
|
+
## Troubleshooting
|
|
63
|
+
|
|
64
|
+
If numpy > 2 is installed in your environment, you might not be able to run pygimli. If so you can downgrade numpy with the following command :
|
|
65
|
+
``` bash
|
|
66
|
+
pip install numpy==1.26.4 --upgrade
|
|
67
|
+
```
|
|
68
|
+
|
|
62
69
|
## Running PyCKSTER
|
|
63
70
|
|
|
64
71
|
Open a terminal and run:
|
|
@@ -1,23 +1,25 @@
|
|
|
1
|
-
pyckster/__init__.py,sha256=
|
|
1
|
+
pyckster/__init__.py,sha256=F3G3USiEB_Vqn16wt2oifz8ant0asmLootKPJWI3cmE,894
|
|
2
2
|
pyckster/__main__.py,sha256=zv3AGVKorKo2tgWOEIcVnkDbp15eepSqka3IoWH_adU,406
|
|
3
3
|
pyckster/auto_picking.py,sha256=fyZiOj0Ib-SB_oxsKnUszECHbOjo4JE23JVQILGYZco,12754
|
|
4
4
|
pyckster/bayesian_inversion.py,sha256=kdnKOlAZ0JlYLipuFDHlwS7dU8LtI-0aMb90bYpEHhE,163523
|
|
5
|
-
pyckster/core.py,sha256=
|
|
5
|
+
pyckster/core.py,sha256=GZ0ENrOja_n3KlK_cRz6jEQJr66mAbWMov_-_rkMm4M,991206
|
|
6
6
|
pyckster/inversion_app.py,sha256=ovM44oYBFsvfKxO7rjjThUhkJnLDLZZ0R6ZVp-5r66E,60676
|
|
7
7
|
pyckster/inversion_manager.py,sha256=P8i1fqUJKMWkd-9PoDmNtmQuKglGKTeSuptUUA57D-8,15393
|
|
8
8
|
pyckster/inversion_visualizer.py,sha256=vfKZIoJzKawbaEv29NsYYIGnWLDQCGef5bM2vY1aCBo,22135
|
|
9
9
|
pyckster/ipython_console.py,sha256=tZyyoiXCjCl7ozxOj_h-YR4eGjoC4kpKe7nZ48eUAJc,9313
|
|
10
|
-
pyckster/
|
|
10
|
+
pyckster/mpl_export.py,sha256=_WqPo9l9ABiSoU0ukLfm4caGV1-FKKbXjt8SoBHTR30,12346
|
|
11
|
+
pyckster/obspy_utils.py,sha256=01fNI9ryIYuiGOl4NR0J9C_xXupcnsBb1mLSz1Qo63A,20569
|
|
11
12
|
pyckster/pac_inversion.py,sha256=9624dJvEsvJmYbgVFFg5FeaAg4yUfiXTTwrrAHRzdcs,30076
|
|
13
|
+
pyckster/pick_io.py,sha256=uCre4o7vUYMOkk0PMAZOqB7Td6UNXWoLlfX1qstQ_Ic,17340
|
|
12
14
|
pyckster/pyqtgraph_utils.py,sha256=PAeE3n_wz7skHOC5eLnkFczbie7diVH1xvuL8jtJ4T8,6049
|
|
13
15
|
pyckster/surface_wave_analysis.py,sha256=97BrDA-n5AZp89NdxQ2ekZPaCErMc7v8C6GmD5KTi-4,102695
|
|
14
16
|
pyckster/surface_wave_profiling.py,sha256=L9KidhKmfGvVoPZjf6us3c49VB7VPB_VcsDqRx45OYI,315401
|
|
15
17
|
pyckster/sw_utils.py,sha256=uwAisERVqjk2LWVTz5qc7ru0M_rHZFoYmqOipZbpiNg,6051
|
|
16
18
|
pyckster/tab_factory.py,sha256=NlCIC6F8BrEu7a8BYOJJdWy5ftpX_zKDLj7SbcwBbh8,14519
|
|
17
19
|
pyckster/visualization_utils.py,sha256=bgODn21NAQx1FOMPj91kdDd0szKOgUyfZ3cQlyu2PF8,47947
|
|
18
|
-
pyckster-
|
|
19
|
-
pyckster-
|
|
20
|
-
pyckster-
|
|
21
|
-
pyckster-
|
|
22
|
-
pyckster-
|
|
23
|
-
pyckster-
|
|
20
|
+
pyckster-26.1.2.dist-info/licenses/LICENCE,sha256=-uaAIm20JrJKoMdCdn2GlFQfNU4fbsHWK3eh4kIQ_Ec,35143
|
|
21
|
+
pyckster-26.1.2.dist-info/METADATA,sha256=qYNt21jEX3swf7hVe9T3haQhGIkzAMaId3V5YWMFP7o,4067
|
|
22
|
+
pyckster-26.1.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
23
|
+
pyckster-26.1.2.dist-info/entry_points.txt,sha256=yrOQx1wHi84rbxX_ZYtYaVcK3EeuRhHRQDZRc8mB0NI,100
|
|
24
|
+
pyckster-26.1.2.dist-info/top_level.txt,sha256=eaihhwhEmlysgdZE4HmELFdSUwlXcMv90YorkjOXujQ,9
|
|
25
|
+
pyckster-26.1.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|