batplot 1.8.4__py3-none-any.whl → 1.8.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- batplot/__init__.py +1 -1
- batplot/args.py +22 -4
- batplot/batch.py +12 -0
- batplot/batplot.py +383 -132
- batplot/converters.py +171 -122
- batplot/cpc_interactive.py +319 -161
- batplot/data/USER_MANUAL.md +49 -0
- batplot/electrochem_interactive.py +120 -80
- batplot/interactive.py +1766 -81
- batplot/modes.py +12 -11
- batplot/operando.py +22 -0
- batplot/operando_ec_interactive.py +390 -16
- batplot/session.py +85 -9
- batplot/style.py +198 -21
- {batplot-1.8.4.dist-info → batplot-1.8.11.dist-info}/METADATA +1 -1
- {batplot-1.8.4.dist-info → batplot-1.8.11.dist-info}/RECORD +20 -20
- {batplot-1.8.4.dist-info → batplot-1.8.11.dist-info}/WHEEL +1 -1
- {batplot-1.8.4.dist-info → batplot-1.8.11.dist-info}/licenses/LICENSE +0 -0
- {batplot-1.8.4.dist-info → batplot-1.8.11.dist-info}/entry_points.txt +0 -0
- {batplot-1.8.4.dist-info → batplot-1.8.11.dist-info}/top_level.txt +0 -0
batplot/interactive.py
CHANGED
|
@@ -57,6 +57,7 @@ from .color_utils import (
|
|
|
57
57
|
ensure_colormap,
|
|
58
58
|
_CUSTOM_CMAPS,
|
|
59
59
|
)
|
|
60
|
+
from .config import load_config, save_config
|
|
60
61
|
|
|
61
62
|
|
|
62
63
|
class _FilterIMKWarning:
|
|
@@ -253,16 +254,28 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
253
254
|
has_cif = bool(getattr(_bp, 'cif_tick_series', None))
|
|
254
255
|
except Exception:
|
|
255
256
|
pass
|
|
256
|
-
col1 = ["c: colors", "f: font", "l: line", "t: toggle axes", "g: size", "h: legend"]
|
|
257
|
+
col1 = ["c: colors", "f: font", "l: line", "t: toggle axes", "g: size", "h: legend", "sm: smooth"]
|
|
257
258
|
if has_cif:
|
|
258
259
|
col1.append("z: hkl")
|
|
259
260
|
col1.append("j: CIF titles")
|
|
260
|
-
col2 = ["a: rearrange", "
|
|
261
|
+
col2 = ["a: rearrange", "o: offset", "r: rename", "x: change X", "y: change Y", "d: derivative"]
|
|
261
262
|
col3 = ["v: find peaks", "n: crosshair", "p: print(export) style/geom", "i: import style/geom", "e: export figure", "s: save project", "b: undo", "q: quit"]
|
|
263
|
+
|
|
264
|
+
# Conditional overwrite shortcuts under (Options)
|
|
265
|
+
last_session = getattr(fig, "_last_session_save_path", None)
|
|
266
|
+
last_style = getattr(fig, "_last_style_export_path", None)
|
|
267
|
+
last_figure = getattr(fig, "_last_figure_export_path", None)
|
|
268
|
+
if last_session:
|
|
269
|
+
col3.append("os: overwrite session")
|
|
270
|
+
if last_style:
|
|
271
|
+
col3.append("ops: overwrite style")
|
|
272
|
+
col3.append("opsg: overwrite style+geom")
|
|
273
|
+
if last_figure:
|
|
274
|
+
col3.append("oe: overwrite figure")
|
|
262
275
|
|
|
263
276
|
# Hide offset/y-range in stack mode
|
|
264
277
|
if args.stack:
|
|
265
|
-
col2 = [item for item in col2 if not item.startswith("
|
|
278
|
+
col2 = [item for item in col2 if not item.startswith("o:") and not item.startswith("y:")]
|
|
266
279
|
|
|
267
280
|
if not is_diffraction:
|
|
268
281
|
col3 = [item for item in col3 if not item.startswith("n:")]
|
|
@@ -884,7 +897,17 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
884
897
|
# NEW: style / diagnostics printer (clean version)
|
|
885
898
|
def print_style_info():
|
|
886
899
|
cts = getattr(_bp, 'cif_tick_series', None) if _bp is not None else None
|
|
887
|
-
|
|
900
|
+
# Read show_cif_hkl from __main__ module (where it's stored when toggled)
|
|
901
|
+
show_hkl = None
|
|
902
|
+
try:
|
|
903
|
+
_bp_module = sys.modules.get('__main__')
|
|
904
|
+
if _bp_module is not None and hasattr(_bp_module, 'show_cif_hkl'):
|
|
905
|
+
show_hkl = bool(getattr(_bp_module, 'show_cif_hkl', False))
|
|
906
|
+
except Exception:
|
|
907
|
+
pass
|
|
908
|
+
# Fall back to _bp object if not in __main__
|
|
909
|
+
if show_hkl is None and _bp is not None:
|
|
910
|
+
show_hkl = bool(getattr(_bp, 'show_cif_hkl', False)) if hasattr(_bp, 'show_cif_hkl') else None
|
|
888
911
|
return _bp_print_style_info(
|
|
889
912
|
fig, ax,
|
|
890
913
|
y_data_list, labels,
|
|
@@ -898,7 +921,7 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
898
921
|
)
|
|
899
922
|
|
|
900
923
|
# NEW: export current style to .bpcfg
|
|
901
|
-
def export_style_config(filename, base_path=None, overwrite_path=None):
|
|
924
|
+
def export_style_config(filename, base_path=None, overwrite_path=None, force_kind=None):
|
|
902
925
|
cts = getattr(_bp, 'cif_tick_series', None) if _bp is not None else None
|
|
903
926
|
show_titles = bool(getattr(_bp, 'show_cif_titles', True)) if _bp is not None else True
|
|
904
927
|
from .style import export_style_config as _export_style_config
|
|
@@ -917,6 +940,7 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
917
940
|
base_path,
|
|
918
941
|
show_cif_titles=show_titles,
|
|
919
942
|
overwrite_path=overwrite_path,
|
|
943
|
+
force_kind=force_kind,
|
|
920
944
|
)
|
|
921
945
|
|
|
922
946
|
# NEW: apply imported style config (restricted application)
|
|
@@ -1083,6 +1107,342 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
1083
1107
|
# history management:
|
|
1084
1108
|
state_history = []
|
|
1085
1109
|
|
|
1110
|
+
# ====================================================================
|
|
1111
|
+
# SMOOTHING AND REDUCE ROWS HELPER FUNCTIONS
|
|
1112
|
+
# ====================================================================
|
|
1113
|
+
|
|
1114
|
+
def _savgol_kernel(window: int, poly: int) -> np.ndarray:
|
|
1115
|
+
"""Return Savitzky–Golay smoothing kernel of given window/poly."""
|
|
1116
|
+
half = window // 2
|
|
1117
|
+
x = np.arange(-half, half + 1, dtype=float)
|
|
1118
|
+
A = np.vander(x, poly + 1, increasing=True)
|
|
1119
|
+
ATA = A.T @ A
|
|
1120
|
+
ATA_inv = np.linalg.pinv(ATA)
|
|
1121
|
+
target = np.zeros(poly + 1, dtype=float)
|
|
1122
|
+
target[0] = 1.0 # evaluate polynomial at x=0
|
|
1123
|
+
coeffs = target @ ATA_inv @ A.T
|
|
1124
|
+
return coeffs
|
|
1125
|
+
|
|
1126
|
+
def _savgol_smooth(y: np.ndarray, window: int = 9, poly: int = 3) -> np.ndarray:
|
|
1127
|
+
"""Apply Savitzky–Golay smoothing (defaults from DiffCapAnalyzer) to data."""
|
|
1128
|
+
n = y.size
|
|
1129
|
+
if n < 3:
|
|
1130
|
+
return y
|
|
1131
|
+
if window > n:
|
|
1132
|
+
window = n if n % 2 == 1 else n - 1
|
|
1133
|
+
if window < 3:
|
|
1134
|
+
return y
|
|
1135
|
+
if window % 2 == 0:
|
|
1136
|
+
window -= 1
|
|
1137
|
+
if window < 3:
|
|
1138
|
+
return y
|
|
1139
|
+
if poly >= window:
|
|
1140
|
+
poly = window - 1
|
|
1141
|
+
coeffs = _savgol_kernel(window, poly)
|
|
1142
|
+
half = window // 2
|
|
1143
|
+
padded = np.pad(y, (half, half), mode='edge')
|
|
1144
|
+
smoothed = np.convolve(padded, coeffs[::-1], mode='valid')
|
|
1145
|
+
return smoothed
|
|
1146
|
+
|
|
1147
|
+
def _fft_smooth(y: np.ndarray, points: int = 5, cutoff: float = 0.1) -> np.ndarray:
|
|
1148
|
+
"""Apply FFT filter smoothing to data."""
|
|
1149
|
+
n = y.size
|
|
1150
|
+
if n < 3:
|
|
1151
|
+
return y
|
|
1152
|
+
# FFT
|
|
1153
|
+
fft_vals = np.fft.rfft(y)
|
|
1154
|
+
freq = np.fft.rfftfreq(n)
|
|
1155
|
+
# Low-pass filter: zero out frequencies above cutoff
|
|
1156
|
+
mask = freq <= cutoff
|
|
1157
|
+
fft_vals[~mask] = 0
|
|
1158
|
+
# Inverse FFT
|
|
1159
|
+
smoothed = np.fft.irfft(fft_vals, n)
|
|
1160
|
+
return smoothed
|
|
1161
|
+
|
|
1162
|
+
def _adjacent_average_smooth(y: np.ndarray, points: int = 5) -> np.ndarray:
|
|
1163
|
+
"""Apply Adjacent-Averaging smoothing to data."""
|
|
1164
|
+
n = y.size
|
|
1165
|
+
if n < points:
|
|
1166
|
+
return y
|
|
1167
|
+
if points < 2:
|
|
1168
|
+
return y
|
|
1169
|
+
# Use convolution for moving average
|
|
1170
|
+
kernel = np.ones(points) / points
|
|
1171
|
+
# Pad edges
|
|
1172
|
+
padded = np.pad(y, (points//2, points//2), mode='edge')
|
|
1173
|
+
smoothed = np.convolve(padded, kernel, mode='valid')
|
|
1174
|
+
return smoothed
|
|
1175
|
+
|
|
1176
|
+
def _get_last_reduce_rows_settings(method: str) -> dict:
|
|
1177
|
+
"""Get last reduce rows settings from config file.
|
|
1178
|
+
|
|
1179
|
+
Args:
|
|
1180
|
+
method: Method name ('delete_skip', 'delete_missing', 'merge')
|
|
1181
|
+
|
|
1182
|
+
Returns:
|
|
1183
|
+
Dictionary with last settings for the method, or empty dict if none
|
|
1184
|
+
"""
|
|
1185
|
+
config = load_config()
|
|
1186
|
+
last_settings = config.get('last_reduce_rows_settings', {})
|
|
1187
|
+
return last_settings.get(method, {})
|
|
1188
|
+
|
|
1189
|
+
def _save_last_reduce_rows_settings(method: str, settings: dict) -> None:
|
|
1190
|
+
"""Save last reduce rows settings to config file.
|
|
1191
|
+
|
|
1192
|
+
Args:
|
|
1193
|
+
method: Method name ('delete_skip', 'delete_missing', 'merge')
|
|
1194
|
+
settings: Dictionary with settings to save
|
|
1195
|
+
"""
|
|
1196
|
+
config = load_config()
|
|
1197
|
+
if 'last_reduce_rows_settings' not in config:
|
|
1198
|
+
config['last_reduce_rows_settings'] = {}
|
|
1199
|
+
config['last_reduce_rows_settings'][method] = settings
|
|
1200
|
+
save_config(config)
|
|
1201
|
+
|
|
1202
|
+
def _get_last_smooth_settings_from_config() -> dict:
|
|
1203
|
+
"""Get last smooth settings from config file (persistent across sessions).
|
|
1204
|
+
|
|
1205
|
+
Returns:
|
|
1206
|
+
Dictionary with last smooth settings, or empty dict if none
|
|
1207
|
+
"""
|
|
1208
|
+
config = load_config()
|
|
1209
|
+
return config.get('last_smooth_settings', {})
|
|
1210
|
+
|
|
1211
|
+
def _save_last_smooth_settings_to_config(settings: dict) -> None:
|
|
1212
|
+
"""Save last smooth settings to config file (persistent across sessions).
|
|
1213
|
+
|
|
1214
|
+
Args:
|
|
1215
|
+
settings: Dictionary with smooth settings to save
|
|
1216
|
+
"""
|
|
1217
|
+
config = load_config()
|
|
1218
|
+
config['last_smooth_settings'] = settings
|
|
1219
|
+
save_config(config)
|
|
1220
|
+
|
|
1221
|
+
def _ensure_original_data():
|
|
1222
|
+
"""Ensure original data is stored for all curves."""
|
|
1223
|
+
if not hasattr(fig, '_original_x_data_list'):
|
|
1224
|
+
fig._original_x_data_list = [np.array(a, copy=True) for a in x_data_list]
|
|
1225
|
+
fig._original_y_data_list = [np.array(a, copy=True) for a in y_data_list]
|
|
1226
|
+
|
|
1227
|
+
def _update_full_processed_data():
|
|
1228
|
+
"""Update the full processed data (after all processing steps, before any X-range filtering)."""
|
|
1229
|
+
# This stores the complete processed data (reduce + smooth + derivative) for X-range filtering
|
|
1230
|
+
fig._full_processed_x_data_list = [np.array(a, copy=True) for a in x_data_list]
|
|
1231
|
+
fig._full_processed_y_data_list = [np.array(a, copy=True) for a in y_data_list]
|
|
1232
|
+
|
|
1233
|
+
def _reset_to_original():
|
|
1234
|
+
"""Reset all curves to original data."""
|
|
1235
|
+
if not hasattr(fig, '_original_x_data_list'):
|
|
1236
|
+
return (False, 0, 0)
|
|
1237
|
+
reset_count = 0
|
|
1238
|
+
total_points = 0
|
|
1239
|
+
for i in range(min(len(fig._original_x_data_list), len(ax.lines))):
|
|
1240
|
+
try:
|
|
1241
|
+
orig_x = fig._original_x_data_list[i]
|
|
1242
|
+
orig_y = fig._original_y_data_list[i]
|
|
1243
|
+
# Restore offsets
|
|
1244
|
+
if i < len(offsets_list):
|
|
1245
|
+
orig_y_with_offset = orig_y + offsets_list[i]
|
|
1246
|
+
else:
|
|
1247
|
+
orig_y_with_offset = orig_y.copy()
|
|
1248
|
+
ax.lines[i].set_data(orig_x, orig_y_with_offset)
|
|
1249
|
+
x_data_list[i] = orig_x.copy()
|
|
1250
|
+
y_data_list[i] = orig_y_with_offset.copy()
|
|
1251
|
+
reset_count += 1
|
|
1252
|
+
total_points += len(orig_x)
|
|
1253
|
+
except Exception:
|
|
1254
|
+
pass
|
|
1255
|
+
# Clear processing settings
|
|
1256
|
+
if hasattr(fig, '_smooth_settings'):
|
|
1257
|
+
delattr(fig, '_smooth_settings')
|
|
1258
|
+
return (reset_count > 0, reset_count, total_points)
|
|
1259
|
+
|
|
1260
|
+
def _apply_data_changes():
|
|
1261
|
+
"""Update plot and data lists after data modification."""
|
|
1262
|
+
for i in range(min(len(ax.lines), len(x_data_list), len(y_data_list))):
|
|
1263
|
+
try:
|
|
1264
|
+
ax.lines[i].set_data(x_data_list[i], y_data_list[i])
|
|
1265
|
+
except Exception:
|
|
1266
|
+
pass
|
|
1267
|
+
try:
|
|
1268
|
+
fig.canvas.draw_idle()
|
|
1269
|
+
except Exception:
|
|
1270
|
+
pass
|
|
1271
|
+
|
|
1272
|
+
def _calculate_derivative(x: np.ndarray, y: np.ndarray, order: int = 1) -> np.ndarray:
|
|
1273
|
+
"""Calculate 1st or 2nd derivative using numpy gradient.
|
|
1274
|
+
|
|
1275
|
+
Args:
|
|
1276
|
+
x: X values
|
|
1277
|
+
y: Y values
|
|
1278
|
+
order: 1 for first derivative (dy/dx), 2 for second derivative (d²y/dx²)
|
|
1279
|
+
|
|
1280
|
+
Returns:
|
|
1281
|
+
Derivative array (same length as input)
|
|
1282
|
+
"""
|
|
1283
|
+
if len(y) < 2:
|
|
1284
|
+
return y.copy()
|
|
1285
|
+
# Calculate dy/dx
|
|
1286
|
+
dy_dx = np.gradient(y, x)
|
|
1287
|
+
if order == 1:
|
|
1288
|
+
return dy_dx
|
|
1289
|
+
elif order == 2:
|
|
1290
|
+
# Calculate d²y/dx² = d(dy/dx)/dx
|
|
1291
|
+
if len(dy_dx) < 2:
|
|
1292
|
+
return np.zeros_like(y)
|
|
1293
|
+
d2y_dx2 = np.gradient(dy_dx, x)
|
|
1294
|
+
return d2y_dx2
|
|
1295
|
+
else:
|
|
1296
|
+
return y.copy()
|
|
1297
|
+
|
|
1298
|
+
def _calculate_reversed_derivative(x, y, order):
|
|
1299
|
+
"""Calculate reversed 1st or 2nd derivative (dx/dy or d²x/dy²).
|
|
1300
|
+
|
|
1301
|
+
Args:
|
|
1302
|
+
x: X values
|
|
1303
|
+
y: Y values
|
|
1304
|
+
order: 1 for first reversed derivative (dx/dy), 2 for second reversed derivative (d²x/dy²)
|
|
1305
|
+
|
|
1306
|
+
Returns:
|
|
1307
|
+
Reversed derivative array (same length as input)
|
|
1308
|
+
"""
|
|
1309
|
+
if len(y) < 2:
|
|
1310
|
+
return y.copy()
|
|
1311
|
+
# First calculate dy/dx
|
|
1312
|
+
dy_dx = np.gradient(y, x)
|
|
1313
|
+
# Avoid division by zero - replace zeros with small epsilon
|
|
1314
|
+
epsilon = 1e-10
|
|
1315
|
+
dy_dx_safe = np.where(np.abs(dy_dx) < epsilon, np.sign(dy_dx) * epsilon, dy_dx)
|
|
1316
|
+
# Calculate dx/dy = 1 / (dy/dx)
|
|
1317
|
+
dx_dy = 1.0 / dy_dx_safe
|
|
1318
|
+
if order == 1:
|
|
1319
|
+
return dx_dy
|
|
1320
|
+
elif order == 2:
|
|
1321
|
+
# Calculate d²x/dy² = d(dx/dy)/dy
|
|
1322
|
+
# d(dx/dy)/dy = d(1/(dy/dx))/dy = -1/(dy/dx)² * d²y/dx²
|
|
1323
|
+
if len(dx_dy) < 2:
|
|
1324
|
+
return np.zeros_like(y)
|
|
1325
|
+
# Calculate d²y/dx² first
|
|
1326
|
+
d2y_dx2 = np.gradient(dy_dx, x)
|
|
1327
|
+
# d²x/dy² = -d²y/dx² / (dy/dx)³
|
|
1328
|
+
d2x_dy2 = -d2y_dx2 / (dy_dx_safe ** 3)
|
|
1329
|
+
return d2x_dy2
|
|
1330
|
+
else:
|
|
1331
|
+
return y.copy()
|
|
1332
|
+
|
|
1333
|
+
def _update_ylabel_for_derivative(order: int, current_label: str = None, is_reversed: bool = False) -> str:
|
|
1334
|
+
"""Generate appropriate y-axis label for derivative.
|
|
1335
|
+
|
|
1336
|
+
Args:
|
|
1337
|
+
order: 1 for first derivative, 2 for second derivative
|
|
1338
|
+
current_label: Current y-axis label (optional)
|
|
1339
|
+
is_reversed: True for reversed derivative (dx/dy), False for normal (dy/dx)
|
|
1340
|
+
|
|
1341
|
+
Returns:
|
|
1342
|
+
New y-axis label string
|
|
1343
|
+
"""
|
|
1344
|
+
if current_label is None:
|
|
1345
|
+
current_label = ax.get_ylabel() or "Y"
|
|
1346
|
+
|
|
1347
|
+
# Try to detect common patterns and update accordingly
|
|
1348
|
+
current_lower = current_label.lower()
|
|
1349
|
+
|
|
1350
|
+
if is_reversed:
|
|
1351
|
+
# Reversed derivative: dx/dy or d²x/dy²
|
|
1352
|
+
y_label = current_label if current_label and current_label != "Y" else (ax.get_ylabel() or "Y")
|
|
1353
|
+
if order == 1:
|
|
1354
|
+
# First reversed derivative: dx/dy
|
|
1355
|
+
if x_label:
|
|
1356
|
+
return f"d({x_label})/d({y_label})"
|
|
1357
|
+
else:
|
|
1358
|
+
return f"dx/d({y_label})"
|
|
1359
|
+
else: # order == 2
|
|
1360
|
+
# Second reversed derivative: d²x/dy²
|
|
1361
|
+
if x_label:
|
|
1362
|
+
return f"d²({x_label})/d({y_label})²"
|
|
1363
|
+
else:
|
|
1364
|
+
return f"d²x/d({y_label})²"
|
|
1365
|
+
|
|
1366
|
+
# Normal derivative: dy/dx or d²y/dx²
|
|
1367
|
+
if order == 1:
|
|
1368
|
+
# First derivative: dy/dx or dY/dX
|
|
1369
|
+
if "/" in current_label:
|
|
1370
|
+
# If already has derivative notation, try to increment
|
|
1371
|
+
if "d²" in current_label or "d2" in current_lower:
|
|
1372
|
+
# Change from 2nd to 1st (shouldn't normally happen, but handle it)
|
|
1373
|
+
new_label = current_label.replace("d²", "d").replace("d2", "d")
|
|
1374
|
+
return new_label
|
|
1375
|
+
elif "d" in current_label.lower() and "/" in current_label:
|
|
1376
|
+
# Already has derivative, keep as is but update order if needed
|
|
1377
|
+
return current_label
|
|
1378
|
+
# Add d/dx prefix or suffix
|
|
1379
|
+
if x_label:
|
|
1380
|
+
if any(op in current_label for op in ["/", "(", "["]):
|
|
1381
|
+
# Complex label, prepend d/dx
|
|
1382
|
+
return f"d({current_label})/d({x_label})"
|
|
1383
|
+
else:
|
|
1384
|
+
# Simple label, use d/dx notation
|
|
1385
|
+
return f"d({current_label})/d({x_label})"
|
|
1386
|
+
else:
|
|
1387
|
+
return f"d({current_label})/dx"
|
|
1388
|
+
else: # order == 2
|
|
1389
|
+
# Second derivative: d²y/dx² or d2Y/dX2
|
|
1390
|
+
if "/" in current_label:
|
|
1391
|
+
if "d²" in current_label or "d2" in current_lower:
|
|
1392
|
+
# Already 2nd derivative, keep as is
|
|
1393
|
+
return current_label
|
|
1394
|
+
elif "d" in current_label.lower() and "/" in current_label:
|
|
1395
|
+
# First derivative, convert to second
|
|
1396
|
+
new_label = current_label.replace("d(", "d²(").replace("d2(", "d²(").replace("d/", "d²/").replace("/d(", "²/d(")
|
|
1397
|
+
return new_label
|
|
1398
|
+
# Add d²/dx² prefix
|
|
1399
|
+
if x_label:
|
|
1400
|
+
if any(op in current_label for op in ["/", "(", "["]):
|
|
1401
|
+
return f"d²({current_label})/d({x_label})²"
|
|
1402
|
+
else:
|
|
1403
|
+
return f"d²({current_label})/d({x_label})²"
|
|
1404
|
+
else:
|
|
1405
|
+
return f"d²({current_label})/dx²"
|
|
1406
|
+
|
|
1407
|
+
return current_label
|
|
1408
|
+
|
|
1409
|
+
def _ensure_pre_derivative_data():
|
|
1410
|
+
"""Ensure pre-derivative data is stored for reset."""
|
|
1411
|
+
if not hasattr(fig, '_pre_derivative_x_data_list'):
|
|
1412
|
+
fig._pre_derivative_x_data_list = [np.array(a, copy=True) for a in x_data_list]
|
|
1413
|
+
fig._pre_derivative_y_data_list = [np.array(a, copy=True) for a in y_data_list]
|
|
1414
|
+
fig._pre_derivative_ylabel = ax.get_ylabel() or ""
|
|
1415
|
+
|
|
1416
|
+
def _reset_from_derivative():
|
|
1417
|
+
"""Reset all curves from derivative back to pre-derivative state."""
|
|
1418
|
+
if not hasattr(fig, '_pre_derivative_x_data_list'):
|
|
1419
|
+
return (False, 0, 0)
|
|
1420
|
+
reset_count = 0
|
|
1421
|
+
total_points = 0
|
|
1422
|
+
for i in range(min(len(fig._pre_derivative_x_data_list), len(ax.lines))):
|
|
1423
|
+
try:
|
|
1424
|
+
pre_x = fig._pre_derivative_x_data_list[i]
|
|
1425
|
+
pre_y = fig._pre_derivative_y_data_list[i]
|
|
1426
|
+
# Restore offsets
|
|
1427
|
+
if i < len(offsets_list):
|
|
1428
|
+
pre_y_with_offset = pre_y + offsets_list[i]
|
|
1429
|
+
else:
|
|
1430
|
+
pre_y_with_offset = pre_y.copy()
|
|
1431
|
+
ax.lines[i].set_data(pre_x, pre_y_with_offset)
|
|
1432
|
+
x_data_list[i] = pre_x.copy()
|
|
1433
|
+
y_data_list[i] = pre_y_with_offset.copy()
|
|
1434
|
+
reset_count += 1
|
|
1435
|
+
total_points += len(pre_x)
|
|
1436
|
+
except Exception:
|
|
1437
|
+
pass
|
|
1438
|
+
# Restore y-axis label
|
|
1439
|
+
if hasattr(fig, '_pre_derivative_ylabel'):
|
|
1440
|
+
ax.set_ylabel(fig._pre_derivative_ylabel)
|
|
1441
|
+
# Clear derivative settings
|
|
1442
|
+
if hasattr(fig, '_derivative_order'):
|
|
1443
|
+
delattr(fig, '_derivative_order')
|
|
1444
|
+
return (reset_count > 0, reset_count, total_points)
|
|
1445
|
+
|
|
1086
1446
|
def push_state(note=""):
|
|
1087
1447
|
"""Snapshot current editable state (before a modifying action)."""
|
|
1088
1448
|
try:
|
|
@@ -1161,6 +1521,26 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
1161
1521
|
snap["y_data_list"] = [np.array(a, copy=True) for a in y_data_list]
|
|
1162
1522
|
snap["orig_y"] = [np.array(a, copy=True) for a in orig_y]
|
|
1163
1523
|
snap["offsets"] = list(offsets_list)
|
|
1524
|
+
# Processed data (for smooth/reduce operations)
|
|
1525
|
+
if hasattr(fig, '_original_x_data_list'):
|
|
1526
|
+
snap["original_x_data_list"] = [np.array(a, copy=True) for a in fig._original_x_data_list]
|
|
1527
|
+
snap["original_y_data_list"] = [np.array(a, copy=True) for a in fig._original_y_data_list]
|
|
1528
|
+
if hasattr(fig, '_full_processed_x_data_list'):
|
|
1529
|
+
snap["full_processed_x_data_list"] = [np.array(a, copy=True) for a in fig._full_processed_x_data_list]
|
|
1530
|
+
snap["full_processed_y_data_list"] = [np.array(a, copy=True) for a in fig._full_processed_y_data_list]
|
|
1531
|
+
if hasattr(fig, '_smooth_settings'):
|
|
1532
|
+
snap["smooth_settings"] = dict(fig._smooth_settings)
|
|
1533
|
+
if hasattr(fig, '_last_smooth_settings'):
|
|
1534
|
+
snap["last_smooth_settings"] = dict(fig._last_smooth_settings)
|
|
1535
|
+
# Derivative data (for derivative operations)
|
|
1536
|
+
if hasattr(fig, '_pre_derivative_x_data_list'):
|
|
1537
|
+
snap["pre_derivative_x_data_list"] = [np.array(a, copy=True) for a in fig._pre_derivative_x_data_list]
|
|
1538
|
+
snap["pre_derivative_y_data_list"] = [np.array(a, copy=True) for a in fig._pre_derivative_y_data_list]
|
|
1539
|
+
snap["pre_derivative_ylabel"] = str(getattr(fig, '_pre_derivative_ylabel', ''))
|
|
1540
|
+
if hasattr(fig, '_derivative_order'):
|
|
1541
|
+
snap["derivative_order"] = int(fig._derivative_order)
|
|
1542
|
+
if hasattr(fig, '_derivative_reversed'):
|
|
1543
|
+
snap["derivative_reversed"] = bool(fig._derivative_reversed)
|
|
1164
1544
|
# Label text content
|
|
1165
1545
|
snap["label_texts"] = [t.get_text() for t in label_text_objects]
|
|
1166
1546
|
state_history.append(snap)
|
|
@@ -1210,6 +1590,12 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
1210
1590
|
plt.rcParams['font.size'] = snap["font_size"]
|
|
1211
1591
|
except Exception:
|
|
1212
1592
|
pass
|
|
1593
|
+
# Apply restored font settings to all existing text objects
|
|
1594
|
+
# This ensures labels, tick labels, etc. update to match restored font size/family
|
|
1595
|
+
try:
|
|
1596
|
+
sync_fonts()
|
|
1597
|
+
except Exception:
|
|
1598
|
+
pass
|
|
1213
1599
|
|
|
1214
1600
|
# Figure size & dpi
|
|
1215
1601
|
if snap.get("fig_size") and isinstance(snap["fig_size"], (list, tuple)) and len(snap["fig_size"])==2:
|
|
@@ -1381,14 +1767,65 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
1381
1767
|
offsets_list[:] = list(snap["offsets"])
|
|
1382
1768
|
delta = snap.get("delta", delta)
|
|
1383
1769
|
|
|
1384
|
-
#
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
|
|
1770
|
+
# Restore processed data (for smooth/reduce operations)
|
|
1771
|
+
if "original_x_data_list" in snap:
|
|
1772
|
+
fig._original_x_data_list = [np.array(a, copy=True) for a in snap["original_x_data_list"]]
|
|
1773
|
+
fig._original_y_data_list = [np.array(a, copy=True) for a in snap["original_y_data_list"]]
|
|
1774
|
+
elif hasattr(fig, '_original_x_data_list'):
|
|
1775
|
+
# Clear if not in snapshot
|
|
1776
|
+
delattr(fig, '_original_x_data_list')
|
|
1777
|
+
delattr(fig, '_original_y_data_list')
|
|
1778
|
+
if "full_processed_x_data_list" in snap:
|
|
1779
|
+
fig._full_processed_x_data_list = [np.array(a, copy=True) for a in snap["full_processed_x_data_list"]]
|
|
1780
|
+
fig._full_processed_y_data_list = [np.array(a, copy=True) for a in snap["full_processed_y_data_list"]]
|
|
1781
|
+
elif hasattr(fig, '_full_processed_x_data_list'):
|
|
1782
|
+
# Clear if not in snapshot
|
|
1783
|
+
delattr(fig, '_full_processed_x_data_list')
|
|
1784
|
+
delattr(fig, '_full_processed_y_data_list')
|
|
1785
|
+
if "smooth_settings" in snap:
|
|
1786
|
+
fig._smooth_settings = dict(snap["smooth_settings"])
|
|
1787
|
+
elif hasattr(fig, '_smooth_settings'):
|
|
1788
|
+
delattr(fig, '_smooth_settings')
|
|
1789
|
+
if "last_smooth_settings" in snap:
|
|
1790
|
+
fig._last_smooth_settings = dict(snap["last_smooth_settings"])
|
|
1791
|
+
elif hasattr(fig, '_last_smooth_settings'):
|
|
1792
|
+
delattr(fig, '_last_smooth_settings')
|
|
1793
|
+
# Restore derivative data (for derivative operations)
|
|
1794
|
+
if "pre_derivative_x_data_list" in snap:
|
|
1795
|
+
fig._pre_derivative_x_data_list = [np.array(a, copy=True) for a in snap["pre_derivative_x_data_list"]]
|
|
1796
|
+
fig._pre_derivative_y_data_list = [np.array(a, copy=True) for a in snap["pre_derivative_y_data_list"]]
|
|
1797
|
+
fig._pre_derivative_ylabel = str(snap.get("pre_derivative_ylabel", ""))
|
|
1798
|
+
elif hasattr(fig, '_pre_derivative_x_data_list'):
|
|
1799
|
+
delattr(fig, '_pre_derivative_x_data_list')
|
|
1800
|
+
delattr(fig, '_pre_derivative_y_data_list')
|
|
1801
|
+
if hasattr(fig, '_pre_derivative_ylabel'):
|
|
1802
|
+
delattr(fig, '_pre_derivative_ylabel')
|
|
1803
|
+
if "derivative_order" in snap:
|
|
1804
|
+
fig._derivative_order = int(snap["derivative_order"])
|
|
1805
|
+
elif hasattr(fig, '_derivative_order'):
|
|
1806
|
+
delattr(fig, '_derivative_order')
|
|
1807
|
+
if "derivative_reversed" in snap:
|
|
1808
|
+
fig._derivative_reversed = bool(snap["derivative_reversed"])
|
|
1809
|
+
elif hasattr(fig, '_derivative_reversed'):
|
|
1810
|
+
delattr(fig, '_derivative_reversed')
|
|
1811
|
+
# Restore y-axis label if derivative was applied
|
|
1812
|
+
if "derivative_order" in snap:
|
|
1813
|
+
try:
|
|
1814
|
+
current_ylabel = ax.get_ylabel() or ""
|
|
1815
|
+
order = int(snap["derivative_order"])
|
|
1816
|
+
is_reversed = snap.get("derivative_reversed", False)
|
|
1817
|
+
new_ylabel = _update_ylabel_for_derivative(order, current_ylabel, is_reversed=is_reversed)
|
|
1818
|
+
ax.set_ylabel(new_ylabel)
|
|
1819
|
+
except Exception:
|
|
1820
|
+
pass
|
|
1821
|
+
|
|
1822
|
+
# DON'T recalculate y_data_list - trust the snapshotted data to avoid offset drift
|
|
1823
|
+
# The snapshot already captured the correct y_data_list with offsets applied.
|
|
1824
|
+
# Recalculating from orig_y + offsets_list can introduce floating-point errors
|
|
1825
|
+
# or inconsistencies if the data underwent transformations (normalize, etc.)
|
|
1390
1826
|
|
|
1391
|
-
# Update line data with restored values
|
|
1827
|
+
# Update line data with restored values from snapshot
|
|
1828
|
+
# This ensures line visual data matches the snapshotted data lists exactly
|
|
1392
1829
|
for i in range(min(len(ax.lines), len(x_data_list), len(y_data_list))):
|
|
1393
1830
|
try:
|
|
1394
1831
|
ax.lines[i].set_data(x_data_list[i], y_data_list[i])
|
|
@@ -1423,7 +1860,15 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
1423
1860
|
pass
|
|
1424
1861
|
if _bp is not None and 'show_cif_hkl' in snap:
|
|
1425
1862
|
try:
|
|
1426
|
-
|
|
1863
|
+
new_state = bool(snap['show_cif_hkl'])
|
|
1864
|
+
setattr(_bp, 'show_cif_hkl', new_state)
|
|
1865
|
+
# Also store in __main__ module so draw function can access it
|
|
1866
|
+
try:
|
|
1867
|
+
_bp_module = sys.modules.get('__main__')
|
|
1868
|
+
if _bp_module is not None:
|
|
1869
|
+
setattr(_bp_module, 'show_cif_hkl', new_state)
|
|
1870
|
+
except Exception:
|
|
1871
|
+
pass
|
|
1427
1872
|
except Exception:
|
|
1428
1873
|
pass
|
|
1429
1874
|
if _bp is not None and 'show_cif_titles' in snap:
|
|
@@ -1508,8 +1953,16 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
1508
1953
|
try:
|
|
1509
1954
|
# Flip visibility flag in batplot module
|
|
1510
1955
|
cur = bool(getattr(_bp, 'show_cif_hkl', False)) if _bp is not None else False
|
|
1956
|
+
new_state = not cur
|
|
1511
1957
|
if _bp is not None:
|
|
1512
|
-
setattr(_bp, 'show_cif_hkl',
|
|
1958
|
+
setattr(_bp, 'show_cif_hkl', new_state)
|
|
1959
|
+
# Also store in __main__ module so draw function can access it
|
|
1960
|
+
try:
|
|
1961
|
+
_bp_module = sys.modules.get('__main__')
|
|
1962
|
+
if _bp_module is not None:
|
|
1963
|
+
setattr(_bp_module, 'show_cif_hkl', new_state)
|
|
1964
|
+
except Exception:
|
|
1965
|
+
pass
|
|
1513
1966
|
# Avoid re-entrant extension while redrawing
|
|
1514
1967
|
prev_ext = bool(getattr(_bp, 'cif_extend_suspended', False)) if _bp is not None else False
|
|
1515
1968
|
if _bp is not None:
|
|
@@ -1636,6 +2089,141 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
1636
2089
|
except Exception as e:
|
|
1637
2090
|
print(f"Error toggling crosshair: {e}")
|
|
1638
2091
|
continue
|
|
2092
|
+
elif key == 'os':
|
|
2093
|
+
# Quick overwrite of last saved session (.pkl)
|
|
2094
|
+
try:
|
|
2095
|
+
last_session_path = getattr(fig, '_last_session_save_path', None)
|
|
2096
|
+
if not last_session_path:
|
|
2097
|
+
print("No previous session save found.")
|
|
2098
|
+
continue
|
|
2099
|
+
if not os.path.exists(last_session_path):
|
|
2100
|
+
print(f"Previous save file not found: {last_session_path}")
|
|
2101
|
+
continue
|
|
2102
|
+
yn = _safe_input(f"Overwrite session '{os.path.basename(last_session_path)}'? (y/n): ").strip().lower()
|
|
2103
|
+
if yn != 'y':
|
|
2104
|
+
print("Canceled.")
|
|
2105
|
+
continue
|
|
2106
|
+
_bp_dump_session(
|
|
2107
|
+
last_session_path,
|
|
2108
|
+
fig=fig,
|
|
2109
|
+
ax=ax,
|
|
2110
|
+
x_data_list=x_data_list,
|
|
2111
|
+
y_data_list=y_data_list,
|
|
2112
|
+
orig_y=orig_y,
|
|
2113
|
+
offsets_list=offsets_list,
|
|
2114
|
+
labels=labels,
|
|
2115
|
+
delta=delta,
|
|
2116
|
+
args=args,
|
|
2117
|
+
tick_state=tick_state,
|
|
2118
|
+
cif_tick_series=(getattr(_bp, 'cif_tick_series', None) if _bp is not None else None),
|
|
2119
|
+
cif_hkl_map=(getattr(_bp, 'cif_hkl_map', None) if _bp is not None else None),
|
|
2120
|
+
cif_hkl_label_map=(getattr(_bp, 'cif_hkl_label_map', None) if _bp is not None else None),
|
|
2121
|
+
show_cif_hkl=(bool(getattr(_bp, 'show_cif_hkl', False)) if _bp is not None else False),
|
|
2122
|
+
show_cif_titles=(bool(getattr(_bp, 'show_cif_titles', True)) if _bp is not None else True),
|
|
2123
|
+
skip_confirm=True,
|
|
2124
|
+
)
|
|
2125
|
+
fig._last_session_save_path = last_session_path
|
|
2126
|
+
print(f"Overwritten session to {last_session_path}")
|
|
2127
|
+
except Exception as e:
|
|
2128
|
+
print(f"Error overwriting session: {e}")
|
|
2129
|
+
continue
|
|
2130
|
+
elif key in ('ops', 'opsg'):
|
|
2131
|
+
# Quick overwrite of last exported style file (.bps / .bpsg)
|
|
2132
|
+
try:
|
|
2133
|
+
last_style_path = getattr(fig, '_last_style_export_path', None)
|
|
2134
|
+
if not last_style_path:
|
|
2135
|
+
print("No previous style export found.")
|
|
2136
|
+
continue
|
|
2137
|
+
if not os.path.exists(last_style_path):
|
|
2138
|
+
print(f"Previous style file not found: {last_style_path}")
|
|
2139
|
+
continue
|
|
2140
|
+
if key == 'ops':
|
|
2141
|
+
mode = 'ps'
|
|
2142
|
+
label = "style-only"
|
|
2143
|
+
else:
|
|
2144
|
+
mode = 'psg'
|
|
2145
|
+
label = "style+geometry"
|
|
2146
|
+
yn = _safe_input(
|
|
2147
|
+
f"Overwrite {label} file '{os.path.basename(last_style_path)}'? (y/n): "
|
|
2148
|
+
).strip().lower()
|
|
2149
|
+
if yn != 'y':
|
|
2150
|
+
print("Canceled.")
|
|
2151
|
+
continue
|
|
2152
|
+
exported = export_style_config(
|
|
2153
|
+
None,
|
|
2154
|
+
base_path=None,
|
|
2155
|
+
overwrite_path=last_style_path,
|
|
2156
|
+
force_kind=mode,
|
|
2157
|
+
)
|
|
2158
|
+
if exported:
|
|
2159
|
+
fig._last_style_export_path = exported
|
|
2160
|
+
print(f"Overwritten {label} style to {exported}")
|
|
2161
|
+
except Exception as e:
|
|
2162
|
+
print(f"Error overwriting style: {e}")
|
|
2163
|
+
continue
|
|
2164
|
+
elif key == 'oe':
|
|
2165
|
+
# Quick overwrite of last exported figure
|
|
2166
|
+
try:
|
|
2167
|
+
last_figure_path = getattr(fig, '_last_figure_export_path', None)
|
|
2168
|
+
if not last_figure_path:
|
|
2169
|
+
print("No previous figure export found.")
|
|
2170
|
+
continue
|
|
2171
|
+
if not os.path.exists(last_figure_path):
|
|
2172
|
+
print(f"Previous export file not found: {last_figure_path}")
|
|
2173
|
+
continue
|
|
2174
|
+
yn = _safe_input(
|
|
2175
|
+
f"Overwrite figure '{os.path.basename(last_figure_path)}'? (y/n): "
|
|
2176
|
+
).strip().lower()
|
|
2177
|
+
if yn != 'y':
|
|
2178
|
+
print("Canceled.")
|
|
2179
|
+
continue
|
|
2180
|
+
export_target = last_figure_path
|
|
2181
|
+
from .utils import ensure_exact_case_filename
|
|
2182
|
+
export_target = ensure_exact_case_filename(export_target)
|
|
2183
|
+
# Temporarily remove numbering for export
|
|
2184
|
+
for i, txt in enumerate(label_text_objects):
|
|
2185
|
+
txt.set_text(labels[i])
|
|
2186
|
+
_, _ext = os.path.splitext(export_target)
|
|
2187
|
+
if _ext.lower() == '.svg':
|
|
2188
|
+
try:
|
|
2189
|
+
_fig_fc = fig.get_facecolor()
|
|
2190
|
+
except Exception:
|
|
2191
|
+
_fig_fc = None
|
|
2192
|
+
try:
|
|
2193
|
+
_ax_fc = ax.get_facecolor()
|
|
2194
|
+
except Exception:
|
|
2195
|
+
_ax_fc = None
|
|
2196
|
+
try:
|
|
2197
|
+
if getattr(fig, 'patch', None) is not None:
|
|
2198
|
+
fig.patch.set_alpha(0.0); fig.patch.set_facecolor('none')
|
|
2199
|
+
if getattr(ax, 'patch', None) is not None:
|
|
2200
|
+
ax.patch.set_alpha(0.0); ax.patch.set_facecolor('none')
|
|
2201
|
+
except Exception:
|
|
2202
|
+
pass
|
|
2203
|
+
try:
|
|
2204
|
+
fig.savefig(export_target, dpi=300, transparent=True, facecolor='none', edgecolor='none')
|
|
2205
|
+
finally:
|
|
2206
|
+
try:
|
|
2207
|
+
if _fig_fc is not None and getattr(fig, 'patch', None) is not None:
|
|
2208
|
+
fig.patch.set_alpha(1.0); fig.patch.set_facecolor(_fig_fc)
|
|
2209
|
+
except Exception:
|
|
2210
|
+
pass
|
|
2211
|
+
try:
|
|
2212
|
+
if _ax_fc is not None and getattr(ax, 'patch', None) is not None:
|
|
2213
|
+
ax.patch.set_alpha(1.0); ax.patch.set_facecolor(_ax_fc)
|
|
2214
|
+
except Exception:
|
|
2215
|
+
pass
|
|
2216
|
+
else:
|
|
2217
|
+
fig.savefig(export_target, dpi=300)
|
|
2218
|
+
print(f"Figure saved to {export_target}")
|
|
2219
|
+
fig._last_figure_export_path = export_target
|
|
2220
|
+
# Restore numbering
|
|
2221
|
+
for i, txt in enumerate(label_text_objects):
|
|
2222
|
+
txt.set_text(f"{i+1}: {labels[i]}")
|
|
2223
|
+
fig.canvas.draw()
|
|
2224
|
+
except Exception as e:
|
|
2225
|
+
print(f"Error overwriting figure: {e}")
|
|
2226
|
+
continue
|
|
1639
2227
|
elif key == 's':
|
|
1640
2228
|
# Save current interactive session with numbered overwrite picker
|
|
1641
2229
|
try:
|
|
@@ -2433,7 +3021,49 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
2433
3021
|
print("Invalid value, ignored.")
|
|
2434
3022
|
continue
|
|
2435
3023
|
push_state("xrange")
|
|
2436
|
-
|
|
3024
|
+
new_min = current_xlim[0]
|
|
3025
|
+
new_max = new_upper
|
|
3026
|
+
ax.set_xlim(new_min, new_max)
|
|
3027
|
+
# Re-filter data from original processed data if available
|
|
3028
|
+
data_is_processed = (hasattr(fig, '_original_x_data_list') or
|
|
3029
|
+
hasattr(fig, '_smooth_settings') or
|
|
3030
|
+
hasattr(fig, '_derivative_order') or
|
|
3031
|
+
hasattr(fig, '_pre_derivative_x_data_list'))
|
|
3032
|
+
if data_is_processed and hasattr(fig, '_original_x_data_list'):
|
|
3033
|
+
for i in range(len(labels)):
|
|
3034
|
+
if i < len(fig._original_x_data_list):
|
|
3035
|
+
x_current = fig._original_x_data_list[i]
|
|
3036
|
+
y_current = fig._original_y_data_list[i]
|
|
3037
|
+
if i < len(offsets_list):
|
|
3038
|
+
y_current_no_offset = y_current - offsets_list[i]
|
|
3039
|
+
else:
|
|
3040
|
+
y_current_no_offset = y_current.copy()
|
|
3041
|
+
mask = (x_current >= new_min) & (x_current <= new_max)
|
|
3042
|
+
x_sub = np.asarray(x_current[mask], dtype=float).flatten()
|
|
3043
|
+
y_sub = np.asarray(y_current_no_offset[mask], dtype=float).flatten()
|
|
3044
|
+
if x_sub.size == 0:
|
|
3045
|
+
ax.lines[i].set_data([], [])
|
|
3046
|
+
x_data_list[i] = np.array([], dtype=float)
|
|
3047
|
+
y_data_list[i] = np.array([], dtype=float)
|
|
3048
|
+
if i < len(orig_y):
|
|
3049
|
+
orig_y[i] = np.array([], dtype=float)
|
|
3050
|
+
continue
|
|
3051
|
+
if i < len(offsets_list):
|
|
3052
|
+
y_sub = y_sub + offsets_list[i]
|
|
3053
|
+
ax.lines[i].set_data(x_sub, y_sub)
|
|
3054
|
+
x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
|
|
3055
|
+
y_data_list[i] = np.asarray(y_sub, dtype=float).flatten()
|
|
3056
|
+
# Update orig_y with robust method
|
|
3057
|
+
while len(orig_y) <= i:
|
|
3058
|
+
orig_y.append(np.array([], dtype=float))
|
|
3059
|
+
try:
|
|
3060
|
+
y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
|
|
3061
|
+
y_no_offset_1d = np.array(y_no_offset, dtype=float).ravel()
|
|
3062
|
+
if i < len(orig_y):
|
|
3063
|
+
del orig_y[i]
|
|
3064
|
+
orig_y.insert(i, y_no_offset_1d)
|
|
3065
|
+
except Exception:
|
|
3066
|
+
pass
|
|
2437
3067
|
ax.relim()
|
|
2438
3068
|
ax.autoscale_view(scalex=False, scaley=True)
|
|
2439
3069
|
update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
|
|
@@ -2464,7 +3094,49 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
2464
3094
|
print("Invalid value, ignored.")
|
|
2465
3095
|
continue
|
|
2466
3096
|
push_state("xrange")
|
|
2467
|
-
|
|
3097
|
+
new_min = new_lower
|
|
3098
|
+
new_max = current_xlim[1]
|
|
3099
|
+
ax.set_xlim(new_min, new_max)
|
|
3100
|
+
# Re-filter data from original processed data if available
|
|
3101
|
+
data_is_processed = (hasattr(fig, '_original_x_data_list') or
|
|
3102
|
+
hasattr(fig, '_smooth_settings') or
|
|
3103
|
+
hasattr(fig, '_derivative_order') or
|
|
3104
|
+
hasattr(fig, '_pre_derivative_x_data_list'))
|
|
3105
|
+
if data_is_processed and hasattr(fig, '_original_x_data_list'):
|
|
3106
|
+
for i in range(len(labels)):
|
|
3107
|
+
if i < len(fig._original_x_data_list):
|
|
3108
|
+
x_current = fig._original_x_data_list[i]
|
|
3109
|
+
y_current = fig._original_y_data_list[i]
|
|
3110
|
+
if i < len(offsets_list):
|
|
3111
|
+
y_current_no_offset = y_current - offsets_list[i]
|
|
3112
|
+
else:
|
|
3113
|
+
y_current_no_offset = y_current.copy()
|
|
3114
|
+
mask = (x_current >= new_min) & (x_current <= new_max)
|
|
3115
|
+
x_sub = np.asarray(x_current[mask], dtype=float).flatten()
|
|
3116
|
+
y_sub = np.asarray(y_current_no_offset[mask], dtype=float).flatten()
|
|
3117
|
+
if x_sub.size == 0:
|
|
3118
|
+
ax.lines[i].set_data([], [])
|
|
3119
|
+
x_data_list[i] = np.array([], dtype=float)
|
|
3120
|
+
y_data_list[i] = np.array([], dtype=float)
|
|
3121
|
+
if i < len(orig_y):
|
|
3122
|
+
orig_y[i] = np.array([], dtype=float)
|
|
3123
|
+
continue
|
|
3124
|
+
if i < len(offsets_list):
|
|
3125
|
+
y_sub = y_sub + offsets_list[i]
|
|
3126
|
+
ax.lines[i].set_data(x_sub, y_sub)
|
|
3127
|
+
x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
|
|
3128
|
+
y_data_list[i] = np.asarray(y_sub, dtype=float).flatten()
|
|
3129
|
+
# Update orig_y with robust method
|
|
3130
|
+
while len(orig_y) <= i:
|
|
3131
|
+
orig_y.append(np.array([], dtype=float))
|
|
3132
|
+
try:
|
|
3133
|
+
y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
|
|
3134
|
+
y_no_offset_1d = np.array(y_no_offset, dtype=float).ravel()
|
|
3135
|
+
if i < len(orig_y):
|
|
3136
|
+
del orig_y[i]
|
|
3137
|
+
orig_y.insert(i, y_no_offset_1d)
|
|
3138
|
+
except Exception:
|
|
3139
|
+
pass
|
|
2468
3140
|
ax.relim()
|
|
2469
3141
|
ax.autoscale_view(scalex=False, scaley=True)
|
|
2470
3142
|
update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
|
|
@@ -2482,22 +3154,266 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
2482
3154
|
print(f"X range updated: {ax.get_xlim()[0]:.6g} to {ax.get_xlim()[1]:.6g}")
|
|
2483
3155
|
continue
|
|
2484
3156
|
if rng.lower() == 'a':
|
|
2485
|
-
# Auto: restore original range from
|
|
3157
|
+
# Auto: restore original range from CURRENT PROCESSED data (not original unprocessed)
|
|
2486
3158
|
push_state("xrange-auto")
|
|
2487
|
-
|
|
3159
|
+
try:
|
|
3160
|
+
# Check if data has been processed
|
|
3161
|
+
data_is_processed = (hasattr(fig, '_original_x_data_list') or
|
|
3162
|
+
hasattr(fig, '_smooth_settings') or
|
|
3163
|
+
hasattr(fig, '_derivative_order') or
|
|
3164
|
+
hasattr(fig, '_pre_derivative_x_data_list'))
|
|
3165
|
+
if data_is_processed and x_data_list and all(xd.size > 0 for xd in x_data_list):
|
|
3166
|
+
# Use CURRENT processed data to determine full range (preserves all processing)
|
|
3167
|
+
print(f"DEBUG: Using current processed data for auto restore (has {len(x_data_list)} curves)")
|
|
3168
|
+
new_min = min(xd.min() for xd in x_data_list if xd.size)
|
|
3169
|
+
new_max = max(xd.max() for xd in x_data_list if xd.size)
|
|
3170
|
+
print(f"DEBUG: Processed data range: {new_min:.6g} to {new_max:.6g}")
|
|
3171
|
+
elif x_full_list:
|
|
3172
|
+
print(f"DEBUG: Using original full data (no processing detected)")
|
|
3173
|
+
new_min = min(xf.min() for xf in x_full_list if xf.size)
|
|
3174
|
+
new_max = max(xf.max() for xf in x_full_list if xf.size)
|
|
3175
|
+
else:
|
|
3176
|
+
print("No original data available.")
|
|
3177
|
+
continue
|
|
3178
|
+
# Restore all data - use CURRENT PROCESSED data (preserves all processing steps)
|
|
3179
|
+
for i in range(len(labels)):
|
|
3180
|
+
if data_is_processed and hasattr(fig, '_full_processed_x_data_list') and i < len(fig._full_processed_x_data_list):
|
|
3181
|
+
# Use FULL processed data (preserves all processing: reduce + smooth + derivative)
|
|
3182
|
+
print(f"DEBUG: Auto restore curve {i+1}: Using full processed data ({len(fig._full_processed_x_data_list[i])} points)")
|
|
3183
|
+
xf = np.asarray(fig._full_processed_x_data_list[i], dtype=float).flatten()
|
|
3184
|
+
yf = np.asarray(fig._full_processed_y_data_list[i], dtype=float).flatten()
|
|
3185
|
+
yf_raw = yf - (offsets_list[i] if i < len(offsets_list) else 0.0)
|
|
3186
|
+
elif data_is_processed and i < len(x_data_list) and x_data_list[i].size > 0:
|
|
3187
|
+
# Fallback: use current processed data
|
|
3188
|
+
print(f"DEBUG: Auto restore curve {i+1}: Using current processed data ({len(x_data_list[i])} points)")
|
|
3189
|
+
xf = np.asarray(x_data_list[i], dtype=float).flatten()
|
|
3190
|
+
yf = np.asarray(y_data_list[i], dtype=float).flatten()
|
|
3191
|
+
yf_raw = yf - (offsets_list[i] if i < len(offsets_list) else 0.0)
|
|
3192
|
+
else:
|
|
3193
|
+
# Use full original data (no processing)
|
|
3194
|
+
print(f"DEBUG: Auto restore curve {i+1}: Using original full data")
|
|
3195
|
+
xf = x_full_list[i] if i < len(x_full_list) else x_data_list[i]
|
|
3196
|
+
yf_raw = raw_y_full_list[i] if i < len(raw_y_full_list) else (orig_y[i] if i < len(orig_y) else y_data_list[i])
|
|
3197
|
+
xf = np.asarray(xf, dtype=float).flatten()
|
|
3198
|
+
yf_raw = np.asarray(yf_raw, dtype=float).flatten()
|
|
3199
|
+
mask = (xf >= new_min) & (xf <= new_max)
|
|
3200
|
+
x_sub = np.asarray(xf[mask], dtype=float).flatten()
|
|
3201
|
+
y_sub_raw = np.asarray(yf_raw[mask], dtype=float).flatten()
|
|
3202
|
+
if x_sub.size == 0:
|
|
3203
|
+
ax.lines[i].set_data([], [])
|
|
3204
|
+
x_data_list[i] = np.array([], dtype=float)
|
|
3205
|
+
y_data_list[i] = np.array([], dtype=float)
|
|
3206
|
+
if i < len(orig_y):
|
|
3207
|
+
orig_y[i] = np.array([], dtype=float)
|
|
3208
|
+
continue
|
|
3209
|
+
should_normalize = args.stack or getattr(args, 'norm', False)
|
|
3210
|
+
if should_normalize:
|
|
3211
|
+
if y_sub_raw.size:
|
|
3212
|
+
y_min = float(y_sub_raw.min())
|
|
3213
|
+
y_max = float(y_sub_raw.max())
|
|
3214
|
+
span = y_max - y_min
|
|
3215
|
+
if span > 0:
|
|
3216
|
+
y_sub_norm = (y_sub_raw - y_min) / span
|
|
3217
|
+
else:
|
|
3218
|
+
y_sub_norm = np.zeros_like(y_sub_raw)
|
|
3219
|
+
else:
|
|
3220
|
+
y_sub_norm = y_sub_raw
|
|
3221
|
+
else:
|
|
3222
|
+
y_sub_norm = y_sub_raw
|
|
3223
|
+
offset_val = offsets_list[i] if i < len(offsets_list) else 0.0
|
|
3224
|
+
y_with_offset = y_sub_norm + offset_val
|
|
3225
|
+
ax.lines[i].set_data(x_sub, y_with_offset)
|
|
3226
|
+
x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
|
|
3227
|
+
y_data_list[i] = np.asarray(y_with_offset, dtype=float).flatten()
|
|
3228
|
+
# Ensure orig_y list has enough elements
|
|
3229
|
+
while len(orig_y) <= i:
|
|
3230
|
+
orig_y.append(np.array([], dtype=float))
|
|
3231
|
+
# Create a new 1D array - ensure it's a proper numpy array
|
|
3232
|
+
# Handle all edge cases: scalar, 0-d array, multi-d array
|
|
3233
|
+
try:
|
|
3234
|
+
if isinstance(y_sub_norm, np.ndarray):
|
|
3235
|
+
if y_sub_norm.ndim == 0:
|
|
3236
|
+
y_sub_norm_1d = np.array([float(y_sub_norm)], dtype=float)
|
|
3237
|
+
else:
|
|
3238
|
+
y_sub_norm_1d = np.array(y_sub_norm.flatten(), dtype=float, copy=True)
|
|
3239
|
+
else:
|
|
3240
|
+
# It's a scalar or list
|
|
3241
|
+
y_sub_norm_1d = np.array(y_sub_norm, dtype=float).flatten()
|
|
3242
|
+
# Ensure it's 1D
|
|
3243
|
+
if y_sub_norm_1d.ndim != 1:
|
|
3244
|
+
y_sub_norm_1d = y_sub_norm_1d.reshape(-1)
|
|
3245
|
+
# Replace list element - delete old one first if needed
|
|
3246
|
+
if i < len(orig_y):
|
|
3247
|
+
del orig_y[i]
|
|
3248
|
+
orig_y.insert(i, y_sub_norm_1d)
|
|
3249
|
+
except Exception as e:
|
|
3250
|
+
# Fallback: just create a simple array
|
|
3251
|
+
try:
|
|
3252
|
+
y_sub_norm_1d = np.array(y_sub_norm, dtype=float).ravel()
|
|
3253
|
+
if i < len(orig_y):
|
|
3254
|
+
orig_y[i] = y_sub_norm_1d
|
|
3255
|
+
else:
|
|
3256
|
+
orig_y.append(y_sub_norm_1d)
|
|
3257
|
+
except Exception:
|
|
3258
|
+
# Last resort: skip orig_y update
|
|
3259
|
+
pass
|
|
3260
|
+
ax.set_xlim(new_min, new_max)
|
|
3261
|
+
ax.relim(); ax.autoscale_view(scalex=False, scaley=True)
|
|
3262
|
+
update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
|
|
3263
|
+
try:
|
|
3264
|
+
if hasattr(ax, '_cif_extend_func'):
|
|
3265
|
+
ax._cif_extend_func(ax.get_xlim()[1])
|
|
3266
|
+
except Exception:
|
|
3267
|
+
pass
|
|
3268
|
+
try:
|
|
3269
|
+
if hasattr(ax, '_cif_draw_func'):
|
|
3270
|
+
ax._cif_draw_func()
|
|
3271
|
+
except Exception:
|
|
3272
|
+
pass
|
|
3273
|
+
fig.canvas.draw()
|
|
3274
|
+
print(f"X range restored to original: {ax.get_xlim()[0]:.6g} to {ax.get_xlim()[1]:.6g}")
|
|
3275
|
+
except Exception as e:
|
|
3276
|
+
print(f"Error during auto restore: {e}")
|
|
3277
|
+
import traceback
|
|
3278
|
+
traceback.print_exc()
|
|
3279
|
+
continue
|
|
3280
|
+
push_state("xrange")
|
|
3281
|
+
if rng.lower() == 'full':
|
|
3282
|
+
# Use full data if available, otherwise use current processed data
|
|
3283
|
+
if x_full_list and all(xf.size > 0 for xf in x_full_list):
|
|
2488
3284
|
new_min = min(xf.min() for xf in x_full_list if xf.size)
|
|
2489
3285
|
new_max = max(xf.max() for xf in x_full_list if xf.size)
|
|
2490
3286
|
else:
|
|
2491
|
-
|
|
2492
|
-
|
|
2493
|
-
|
|
2494
|
-
|
|
2495
|
-
|
|
2496
|
-
|
|
2497
|
-
|
|
3287
|
+
new_min = min(xd.min() for xd in x_data_list if xd.size)
|
|
3288
|
+
new_max = max(xd.max() for xd in x_data_list if xd.size)
|
|
3289
|
+
else:
|
|
3290
|
+
new_min, new_max = map(float, rng.split())
|
|
3291
|
+
ax.set_xlim(new_min, new_max)
|
|
3292
|
+
# Check if data has been processed (smooth/derivative/reduce)
|
|
3293
|
+
data_is_processed = (hasattr(fig, '_original_x_data_list') or
|
|
3294
|
+
hasattr(fig, '_smooth_settings') or
|
|
3295
|
+
hasattr(fig, '_derivative_order') or
|
|
3296
|
+
hasattr(fig, '_pre_derivative_x_data_list'))
|
|
3297
|
+
|
|
3298
|
+
for i in range(len(labels)):
|
|
3299
|
+
if data_is_processed and i < len(x_data_list) and x_data_list[i].size > 0:
|
|
3300
|
+
# Use full processed data if available (allows expansion), otherwise use current filtered data
|
|
3301
|
+
curr_x = np.asarray(x_data_list[i], dtype=float)
|
|
3302
|
+
curr_min = curr_x.min() if curr_x.size > 0 else float('inf')
|
|
3303
|
+
curr_max = curr_x.max() if curr_x.size > 0 else float('-inf')
|
|
3304
|
+
|
|
3305
|
+
# Check if we need full processed data (for expansion beyond current filter)
|
|
3306
|
+
need_full = (new_min < curr_min or new_max > curr_max)
|
|
3307
|
+
|
|
3308
|
+
if need_full and hasattr(fig, '_full_processed_x_data_list') and i < len(fig._full_processed_x_data_list):
|
|
3309
|
+
# Use full processed data to allow expansion
|
|
3310
|
+
full_x = np.asarray(fig._full_processed_x_data_list[i], dtype=float)
|
|
3311
|
+
if full_x.size > 0:
|
|
3312
|
+
full_min = full_x.min()
|
|
3313
|
+
full_max = full_x.max()
|
|
3314
|
+
print(f"DEBUG: Curve {i+1}: Expanding range ({curr_min:.6g}-{curr_max:.6g} -> {new_min:.6g}-{new_max:.6g}), using full processed data (range {full_min:.6g} to {full_max:.6g})")
|
|
3315
|
+
x_current = full_x
|
|
3316
|
+
y_current = np.asarray(fig._full_processed_y_data_list[i], dtype=float)
|
|
3317
|
+
else:
|
|
3318
|
+
print(f"DEBUG: Curve {i+1}: Full processed data empty, using current data")
|
|
3319
|
+
x_current = curr_x
|
|
3320
|
+
y_current = np.asarray(y_data_list[i], dtype=float)
|
|
3321
|
+
else:
|
|
3322
|
+
print(f"DEBUG: Curve {i+1}: Using current processed data (range {curr_min:.6g} to {curr_max:.6g}, requested {new_min:.6g} to {new_max:.6g})")
|
|
3323
|
+
x_current = curr_x
|
|
3324
|
+
y_current = np.asarray(y_data_list[i], dtype=float)
|
|
3325
|
+
# Remove offset for filtering
|
|
3326
|
+
if i < len(offsets_list):
|
|
3327
|
+
y_current_no_offset = y_current - offsets_list[i]
|
|
3328
|
+
else:
|
|
3329
|
+
y_current_no_offset = y_current.copy()
|
|
3330
|
+
mask = (x_current >= new_min) & (x_current <= new_max)
|
|
3331
|
+
x_sub = np.asarray(x_current[mask], dtype=float).flatten()
|
|
3332
|
+
y_sub = np.asarray(y_current_no_offset[mask], dtype=float).flatten()
|
|
2498
3333
|
if x_sub.size == 0:
|
|
2499
3334
|
ax.lines[i].set_data([], [])
|
|
2500
|
-
|
|
3335
|
+
x_data_list[i] = np.array([], dtype=float)
|
|
3336
|
+
y_data_list[i] = np.array([], dtype=float)
|
|
3337
|
+
if i < len(orig_y):
|
|
3338
|
+
orig_y[i] = np.array([], dtype=float)
|
|
3339
|
+
continue
|
|
3340
|
+
# Restore offset
|
|
3341
|
+
if i < len(offsets_list):
|
|
3342
|
+
y_sub = y_sub + offsets_list[i]
|
|
3343
|
+
ax.lines[i].set_data(x_sub, y_sub)
|
|
3344
|
+
x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
|
|
3345
|
+
y_data_list[i] = np.asarray(y_sub, dtype=float).flatten()
|
|
3346
|
+
# Update orig_y
|
|
3347
|
+
# Update orig_y with robust method
|
|
3348
|
+
while len(orig_y) <= i:
|
|
3349
|
+
orig_y.append(np.array([], dtype=float))
|
|
3350
|
+
try:
|
|
3351
|
+
y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
|
|
3352
|
+
y_no_offset_1d = np.array(y_no_offset, dtype=float).ravel()
|
|
3353
|
+
if i < len(orig_y):
|
|
3354
|
+
del orig_y[i]
|
|
3355
|
+
orig_y.insert(i, y_no_offset_1d)
|
|
3356
|
+
except Exception:
|
|
3357
|
+
pass
|
|
3358
|
+
elif data_is_processed and i < len(x_data_list) and x_data_list[i].size > 0:
|
|
3359
|
+
# Fallback: use current data if _original_x_data_list not available
|
|
3360
|
+
x_current = np.asarray(x_data_list[i], dtype=float)
|
|
3361
|
+
y_current = np.asarray(y_data_list[i], dtype=float)
|
|
3362
|
+
mask = (x_current >= new_min) & (x_current <= new_max)
|
|
3363
|
+
x_sub = np.asarray(x_current[mask], dtype=float).flatten()
|
|
3364
|
+
y_sub = np.asarray(y_current[mask], dtype=float).flatten()
|
|
3365
|
+
if x_sub.size == 0:
|
|
3366
|
+
ax.lines[i].set_data([], [])
|
|
3367
|
+
x_data_list[i] = np.array([], dtype=float)
|
|
3368
|
+
y_data_list[i] = np.array([], dtype=float)
|
|
3369
|
+
if i < len(orig_y):
|
|
3370
|
+
orig_y[i] = np.array([], dtype=float)
|
|
3371
|
+
continue
|
|
3372
|
+
ax.lines[i].set_data(x_sub, y_sub)
|
|
3373
|
+
x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
|
|
3374
|
+
y_data_list[i] = np.asarray(y_sub, dtype=float).flatten()
|
|
3375
|
+
# Update orig_y - use same robust method as in 'a' branch
|
|
3376
|
+
while len(orig_y) <= i:
|
|
3377
|
+
orig_y.append(np.array([], dtype=float))
|
|
3378
|
+
try:
|
|
3379
|
+
y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
|
|
3380
|
+
if isinstance(y_no_offset, np.ndarray):
|
|
3381
|
+
if y_no_offset.ndim == 0:
|
|
3382
|
+
y_no_offset_1d = np.array([float(y_no_offset)], dtype=float)
|
|
3383
|
+
else:
|
|
3384
|
+
y_no_offset_1d = np.array(y_no_offset.flatten(), dtype=float, copy=True)
|
|
3385
|
+
else:
|
|
3386
|
+
y_no_offset_1d = np.array(y_no_offset, dtype=float).flatten()
|
|
3387
|
+
if y_no_offset_1d.ndim != 1:
|
|
3388
|
+
y_no_offset_1d = y_no_offset_1d.reshape(-1)
|
|
3389
|
+
if i < len(orig_y):
|
|
3390
|
+
del orig_y[i]
|
|
3391
|
+
orig_y.insert(i, y_no_offset_1d)
|
|
3392
|
+
except Exception:
|
|
3393
|
+
try:
|
|
3394
|
+
y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
|
|
3395
|
+
y_no_offset_1d = np.array(y_no_offset, dtype=float).ravel()
|
|
3396
|
+
if i < len(orig_y):
|
|
3397
|
+
orig_y[i] = y_no_offset_1d
|
|
3398
|
+
else:
|
|
3399
|
+
orig_y.append(y_no_offset_1d)
|
|
3400
|
+
except Exception:
|
|
3401
|
+
pass
|
|
3402
|
+
else:
|
|
3403
|
+
# Use original full data as source
|
|
3404
|
+
xf = x_full_list[i] if i < len(x_full_list) else x_data_list[i]
|
|
3405
|
+
yf_raw = raw_y_full_list[i] if i < len(raw_y_full_list) else (orig_y[i] if i < len(orig_y) else y_data_list[i])
|
|
3406
|
+
mask = (xf >= new_min) & (xf <= new_max)
|
|
3407
|
+
x_sub = np.array(xf[mask], copy=True)
|
|
3408
|
+
y_sub_raw = np.array(yf_raw[mask], copy=True)
|
|
3409
|
+
if x_sub.size == 0:
|
|
3410
|
+
ax.lines[i].set_data([], [])
|
|
3411
|
+
x_data_list[i] = np.array([])
|
|
3412
|
+
y_data_list[i] = np.array([])
|
|
3413
|
+
if i < len(orig_y):
|
|
3414
|
+
orig_y[i] = np.array([])
|
|
3415
|
+
continue
|
|
3416
|
+
# Auto-normalize for --stack mode, or explicit --norm flag
|
|
2501
3417
|
should_normalize = args.stack or getattr(args, 'norm', False)
|
|
2502
3418
|
if should_normalize:
|
|
2503
3419
|
if y_sub_raw.size:
|
|
@@ -2512,63 +3428,13 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
2512
3428
|
y_sub_norm = y_sub_raw
|
|
2513
3429
|
else:
|
|
2514
3430
|
y_sub_norm = y_sub_raw
|
|
2515
|
-
offset_val = offsets_list[i]
|
|
3431
|
+
offset_val = offsets_list[i] if i < len(offsets_list) else 0.0
|
|
2516
3432
|
y_with_offset = y_sub_norm + offset_val
|
|
2517
3433
|
ax.lines[i].set_data(x_sub, y_with_offset)
|
|
2518
3434
|
x_data_list[i] = x_sub
|
|
2519
3435
|
y_data_list[i] = y_with_offset
|
|
2520
|
-
|
|
2521
|
-
|
|
2522
|
-
ax.relim(); ax.autoscale_view(scalex=False, scaley=True)
|
|
2523
|
-
update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
|
|
2524
|
-
try:
|
|
2525
|
-
if hasattr(ax, '_cif_extend_func'):
|
|
2526
|
-
ax._cif_extend_func(ax.get_xlim()[1])
|
|
2527
|
-
except Exception:
|
|
2528
|
-
pass
|
|
2529
|
-
try:
|
|
2530
|
-
if hasattr(ax, '_cif_draw_func'):
|
|
2531
|
-
ax._cif_draw_func()
|
|
2532
|
-
except Exception:
|
|
2533
|
-
pass
|
|
2534
|
-
fig.canvas.draw()
|
|
2535
|
-
print(f"X range restored to original: {ax.get_xlim()[0]:.6g} to {ax.get_xlim()[1]:.6g}")
|
|
2536
|
-
continue
|
|
2537
|
-
push_state("xrange")
|
|
2538
|
-
if rng.lower() == 'full':
|
|
2539
|
-
new_min = min(xf.min() for xf in x_full_list if xf.size)
|
|
2540
|
-
new_max = max(xf.max() for xf in x_full_list if xf.size)
|
|
2541
|
-
else:
|
|
2542
|
-
new_min, new_max = map(float, rng.split())
|
|
2543
|
-
ax.set_xlim(new_min, new_max)
|
|
2544
|
-
for i in range(len(labels)):
|
|
2545
|
-
xf = x_full_list[i]; yf_raw = raw_y_full_list[i]
|
|
2546
|
-
mask = (xf>=new_min) & (xf<=new_max)
|
|
2547
|
-
x_sub = xf[mask]; y_sub_raw = yf_raw[mask]
|
|
2548
|
-
if x_sub.size == 0:
|
|
2549
|
-
ax.lines[i].set_data([], [])
|
|
2550
|
-
y_data_list[i] = np.array([]); orig_y[i] = np.array([]); continue
|
|
2551
|
-
# Auto-normalize for --stack mode, or explicit --norm flag
|
|
2552
|
-
should_normalize = args.stack or getattr(args, 'norm', False)
|
|
2553
|
-
if should_normalize:
|
|
2554
|
-
if y_sub_raw.size:
|
|
2555
|
-
y_min = float(y_sub_raw.min())
|
|
2556
|
-
y_max = float(y_sub_raw.max())
|
|
2557
|
-
span = y_max - y_min
|
|
2558
|
-
if span > 0:
|
|
2559
|
-
y_sub_norm = (y_sub_raw - y_min) / span
|
|
2560
|
-
else:
|
|
2561
|
-
y_sub_norm = np.zeros_like(y_sub_raw)
|
|
2562
|
-
else:
|
|
2563
|
-
y_sub_norm = y_sub_raw
|
|
2564
|
-
else:
|
|
2565
|
-
y_sub_norm = y_sub_raw
|
|
2566
|
-
offset_val = offsets_list[i]
|
|
2567
|
-
y_with_offset = y_sub_norm + offset_val
|
|
2568
|
-
ax.lines[i].set_data(x_sub, y_with_offset)
|
|
2569
|
-
x_data_list[i] = x_sub
|
|
2570
|
-
y_data_list[i] = y_with_offset
|
|
2571
|
-
orig_y[i] = y_sub_norm
|
|
3436
|
+
if i < len(orig_y):
|
|
3437
|
+
orig_y[i] = y_sub_norm
|
|
2572
3438
|
ax.relim(); ax.autoscale_view(scalex=False, scaley=True)
|
|
2573
3439
|
update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
|
|
2574
3440
|
# Extend CIF ticks after x-range change
|
|
@@ -2697,7 +3563,98 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
2697
3563
|
print(f"Y range set to ({float(ymin)}, {float(ymax)})")
|
|
2698
3564
|
except Exception as e:
|
|
2699
3565
|
print(f"Error setting Y-axis range: {e}")
|
|
2700
|
-
elif key == 'd': # <--
|
|
3566
|
+
elif key == 'd': # <-- DERIVATIVE HANDLER
|
|
3567
|
+
while True:
|
|
3568
|
+
try:
|
|
3569
|
+
print("\n\033[1mDerivative Menu\033[0m")
|
|
3570
|
+
print("Commands:")
|
|
3571
|
+
print(" 1: Calculate 1st derivative (dy/dx)")
|
|
3572
|
+
print(" 2: Calculate 2nd derivative (d²y/dx²)")
|
|
3573
|
+
print(" 3: Calculate reversed 1st derivative (dx/dy)")
|
|
3574
|
+
print(" 4: Calculate reversed 2nd derivative (d²x/dy²)")
|
|
3575
|
+
print(" reset: Reset to data before derivative")
|
|
3576
|
+
print(" q: back to main menu")
|
|
3577
|
+
sub = _safe_input(colorize_prompt("d> ")).strip().lower()
|
|
3578
|
+
if not sub or sub == 'q':
|
|
3579
|
+
break
|
|
3580
|
+
if sub == 'reset':
|
|
3581
|
+
push_state("derivative-reset")
|
|
3582
|
+
success, reset_count, total_points = _reset_from_derivative()
|
|
3583
|
+
if success:
|
|
3584
|
+
print(f"Reset {reset_count} curve(s) from derivative to original data ({total_points} total points restored).")
|
|
3585
|
+
ax.relim()
|
|
3586
|
+
ax.autoscale_view(scalex=False, scaley=True)
|
|
3587
|
+
update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
|
|
3588
|
+
_apply_data_changes()
|
|
3589
|
+
else:
|
|
3590
|
+
print("No derivative data to reset.")
|
|
3591
|
+
continue
|
|
3592
|
+
if sub in ('1', '2', '3', '4'):
|
|
3593
|
+
try:
|
|
3594
|
+
option = int(sub)
|
|
3595
|
+
is_reversed = (option == 3 or option == 4)
|
|
3596
|
+
order = 1 if option in (1, 3) else 2
|
|
3597
|
+
push_state(f"derivative-{option}")
|
|
3598
|
+
_ensure_pre_derivative_data()
|
|
3599
|
+
processed = 0
|
|
3600
|
+
total_points = 0
|
|
3601
|
+
for i in range(len(x_data_list)):
|
|
3602
|
+
try:
|
|
3603
|
+
# Use current data (may already be processed)
|
|
3604
|
+
current_x = x_data_list[i].copy()
|
|
3605
|
+
current_y = y_data_list[i].copy()
|
|
3606
|
+
# Remove offset for processing
|
|
3607
|
+
if i < len(offsets_list):
|
|
3608
|
+
current_y_no_offset = current_y - offsets_list[i]
|
|
3609
|
+
else:
|
|
3610
|
+
current_y_no_offset = current_y.copy()
|
|
3611
|
+
n_points = len(current_y_no_offset)
|
|
3612
|
+
if n_points < 2:
|
|
3613
|
+
print(f"Curve {i+1} has too few points (<2) for derivative calculation.")
|
|
3614
|
+
continue
|
|
3615
|
+
# Calculate derivative
|
|
3616
|
+
if is_reversed:
|
|
3617
|
+
derivative_y = _calculate_reversed_derivative(current_x, current_y_no_offset, order)
|
|
3618
|
+
else:
|
|
3619
|
+
derivative_y = _calculate_derivative(current_x, current_y_no_offset, order)
|
|
3620
|
+
if len(derivative_y) > 0:
|
|
3621
|
+
# Restore offset
|
|
3622
|
+
if i < len(offsets_list):
|
|
3623
|
+
derivative_y = derivative_y + offsets_list[i]
|
|
3624
|
+
# Update data (keep same x, replace y with derivative)
|
|
3625
|
+
x_data_list[i] = current_x.copy()
|
|
3626
|
+
y_data_list[i] = derivative_y
|
|
3627
|
+
processed += 1
|
|
3628
|
+
total_points += n_points
|
|
3629
|
+
except Exception as e:
|
|
3630
|
+
print(f"Error processing curve {i+1}: {e}")
|
|
3631
|
+
if processed > 0:
|
|
3632
|
+
# Update y-axis label
|
|
3633
|
+
current_ylabel = ax.get_ylabel() or ""
|
|
3634
|
+
new_ylabel = _update_ylabel_for_derivative(order, current_ylabel, is_reversed=is_reversed)
|
|
3635
|
+
ax.set_ylabel(new_ylabel)
|
|
3636
|
+
# Store derivative order and reversed flag
|
|
3637
|
+
fig._derivative_order = order
|
|
3638
|
+
fig._derivative_reversed = is_reversed
|
|
3639
|
+
# Update plot
|
|
3640
|
+
_apply_data_changes()
|
|
3641
|
+
ax.relim()
|
|
3642
|
+
ax.autoscale_view(scalex=False, scaley=True)
|
|
3643
|
+
update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
|
|
3644
|
+
fig.canvas.draw_idle()
|
|
3645
|
+
order_name = "1st" if order == 1 else "2nd"
|
|
3646
|
+
direction = "reversed " if is_reversed else ""
|
|
3647
|
+
print(f"Applied {direction}{order_name} derivative to {processed} curve(s) with {total_points} total points.")
|
|
3648
|
+
print(f"Y-axis label updated to: {new_ylabel}")
|
|
3649
|
+
_update_full_processed_data() # Store full processed data for X-range filtering
|
|
3650
|
+
else:
|
|
3651
|
+
print("No curves were processed.")
|
|
3652
|
+
except ValueError:
|
|
3653
|
+
print("Invalid input.")
|
|
3654
|
+
continue
|
|
3655
|
+
except Exception as e:
|
|
3656
|
+
print(f"Error in derivative menu: {e}")
|
|
3657
|
+
elif key == 'o': # <-- OFFSET HANDLER (now only reachable if not args.stack)
|
|
2701
3658
|
print("\n\033[1mOffset adjustment menu:\033[0m")
|
|
2702
3659
|
print(f" {colorize_menu('1-{}: adjust individual curve offset'.format(len(labels)))}")
|
|
2703
3660
|
print(f" {colorize_menu('a: set spacing between curves')}")
|
|
@@ -3622,8 +4579,13 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
3622
4579
|
else:
|
|
3623
4580
|
print(f" {_i}: {fname}")
|
|
3624
4581
|
last_style_path = getattr(fig, '_last_style_export_path', None)
|
|
3625
|
-
if
|
|
4582
|
+
n_style = len(style_file_list) if style_file_list else 0
|
|
4583
|
+
if last_style_path and n_style:
|
|
4584
|
+
sub = _safe_input(colorize_prompt("Style submenu: (e=export, o=overwrite last, q=return, r=refresh). Press number to overwrite: ")).strip().lower()
|
|
4585
|
+
elif last_style_path:
|
|
3626
4586
|
sub = _safe_input(colorize_prompt("Style submenu: (e=export, o=overwrite last, q=return, r=refresh): ")).strip().lower()
|
|
4587
|
+
elif n_style:
|
|
4588
|
+
sub = _safe_input(colorize_prompt("Style submenu: (e=export, q=return, r=refresh). Press number to overwrite: ")).strip().lower()
|
|
3627
4589
|
else:
|
|
3628
4590
|
sub = _safe_input(colorize_prompt("Style submenu: (e=export, q=return, r=refresh): ")).strip().lower()
|
|
3629
4591
|
if sub == 'q':
|
|
@@ -3647,6 +4609,19 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
3647
4609
|
fig._last_style_export_path = exported_path
|
|
3648
4610
|
style_menu_active = False
|
|
3649
4611
|
break
|
|
4612
|
+
if sub.isdigit() and n_style and 1 <= int(sub) <= n_style:
|
|
4613
|
+
# Overwrite listed style file by number (same as e → export → number)
|
|
4614
|
+
idx = int(sub) - 1
|
|
4615
|
+
target_path = style_file_list[idx][1]
|
|
4616
|
+
fname = style_file_list[idx][0]
|
|
4617
|
+
yn = _safe_input(f"Overwrite '{fname}'? (y/n): ").strip().lower()
|
|
4618
|
+
if yn != 'y':
|
|
4619
|
+
continue
|
|
4620
|
+
exported_path = export_style_config(None, base_path=None, overwrite_path=target_path)
|
|
4621
|
+
if exported_path:
|
|
4622
|
+
fig._last_style_export_path = exported_path
|
|
4623
|
+
style_menu_active = False
|
|
4624
|
+
break
|
|
3650
4625
|
if sub == 'e':
|
|
3651
4626
|
save_base = choose_save_path(source_file_paths, purpose="style export")
|
|
3652
4627
|
if not save_base:
|
|
@@ -3669,6 +4644,12 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
3669
4644
|
if not fname:
|
|
3670
4645
|
print("Style import canceled.")
|
|
3671
4646
|
continue
|
|
4647
|
+
import os
|
|
4648
|
+
bname = os.path.basename(fname)
|
|
4649
|
+
yn = _safe_input(colorize_prompt(f"Apply style '{bname}'? (y/n): ")).strip().lower()
|
|
4650
|
+
if yn != 'y':
|
|
4651
|
+
print("Style import canceled.")
|
|
4652
|
+
continue
|
|
3672
4653
|
push_state("style-import")
|
|
3673
4654
|
apply_style_config(fname)
|
|
3674
4655
|
except Exception as e:
|
|
@@ -3797,6 +4778,668 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
3797
4778
|
fig.canvas.draw()
|
|
3798
4779
|
except Exception as e:
|
|
3799
4780
|
print(f"Error saving figure: {e}")
|
|
4781
|
+
elif key == 'sm':
|
|
4782
|
+
# Smoothing and data reduction menu
|
|
4783
|
+
_ensure_original_data()
|
|
4784
|
+
while True:
|
|
4785
|
+
print("\n\033[1mSmoothing and Data Reduction\033[0m")
|
|
4786
|
+
print("Commands:")
|
|
4787
|
+
print(" r: reduce rows (delete/merge rows based on pattern)")
|
|
4788
|
+
print(" s: smooth data (various smoothing methods)")
|
|
4789
|
+
print(" reset: reset all curves to original data")
|
|
4790
|
+
print(" q: back to main menu")
|
|
4791
|
+
sub = _safe_input(colorize_prompt("sm> ")).strip().lower()
|
|
4792
|
+
if not sub:
|
|
4793
|
+
continue
|
|
4794
|
+
if sub == 'q':
|
|
4795
|
+
break
|
|
4796
|
+
if sub == 'reset':
|
|
4797
|
+
push_state("smooth-reset")
|
|
4798
|
+
success, reset_count, total_points = _reset_to_original()
|
|
4799
|
+
if success:
|
|
4800
|
+
print(f"Reset {reset_count} curve(s) to original data ({total_points} total points restored).")
|
|
4801
|
+
_apply_data_changes()
|
|
4802
|
+
else:
|
|
4803
|
+
print("No processed data to reset.")
|
|
4804
|
+
continue
|
|
4805
|
+
if sub == 'r':
|
|
4806
|
+
# Reduce rows submenu
|
|
4807
|
+
while True:
|
|
4808
|
+
print("\n\033[1mReduce Rows\033[0m")
|
|
4809
|
+
print("Methods:")
|
|
4810
|
+
print(" 1: Delete N rows, then skip M rows")
|
|
4811
|
+
print(" 2: Delete rows with missing values")
|
|
4812
|
+
print(" 3: Reduce N rows with merged values (average/sum/min/max)")
|
|
4813
|
+
print(" q: back to smooth menu")
|
|
4814
|
+
method = _safe_input(colorize_prompt("sm>r> ")).strip().lower()
|
|
4815
|
+
if not method or method == 'q':
|
|
4816
|
+
break
|
|
4817
|
+
if method == '1':
|
|
4818
|
+
# Delete N rows, then skip M rows
|
|
4819
|
+
try:
|
|
4820
|
+
# Check for last settings
|
|
4821
|
+
last_settings = _get_last_reduce_rows_settings('delete_skip')
|
|
4822
|
+
last_n = last_settings.get('n')
|
|
4823
|
+
last_m = last_settings.get('m')
|
|
4824
|
+
last_start_row = last_settings.get('start_row')
|
|
4825
|
+
|
|
4826
|
+
if last_n is not None and last_m is not None and last_start_row is not None:
|
|
4827
|
+
use_last = _safe_input(f"Use last settings? (N={last_n}, M={last_m}, start_row={last_start_row+1}, y/n or enter N): ").strip().lower()
|
|
4828
|
+
# Check if user entered a number directly (skip "use last settings")
|
|
4829
|
+
if use_last and use_last.replace('-', '').replace('.', '').isdigit():
|
|
4830
|
+
n = int(float(use_last))
|
|
4831
|
+
if n < 1:
|
|
4832
|
+
print("N must be >= 1.")
|
|
4833
|
+
continue
|
|
4834
|
+
m_in = _safe_input(f"Enter M (rows to skip, default {last_m}): ").strip()
|
|
4835
|
+
m = int(m_in) if m_in else last_m
|
|
4836
|
+
if m < 0:
|
|
4837
|
+
print("M must be >= 0.")
|
|
4838
|
+
continue
|
|
4839
|
+
start_in = _safe_input(f"Starting row (1-based, default {last_start_row+1}): ").strip()
|
|
4840
|
+
start_row = int(start_in) - 1 if start_in else last_start_row
|
|
4841
|
+
elif use_last != 'n':
|
|
4842
|
+
n = last_n
|
|
4843
|
+
m = last_m
|
|
4844
|
+
start_row = last_start_row # Already 0-based in config
|
|
4845
|
+
else:
|
|
4846
|
+
n_in = _safe_input(f"Enter N (rows to delete, default {last_n}): ").strip()
|
|
4847
|
+
n = int(n_in) if n_in else last_n
|
|
4848
|
+
if n < 1:
|
|
4849
|
+
print("N must be >= 1.")
|
|
4850
|
+
continue
|
|
4851
|
+
m_in = _safe_input(f"Enter M (rows to skip, default {last_m}): ").strip()
|
|
4852
|
+
m = int(m_in) if m_in else last_m
|
|
4853
|
+
if m < 0:
|
|
4854
|
+
print("M must be >= 0.")
|
|
4855
|
+
continue
|
|
4856
|
+
start_in = _safe_input(f"Starting row (1-based, default {last_start_row+1}): ").strip()
|
|
4857
|
+
start_row = int(start_in) - 1 if start_in else last_start_row
|
|
4858
|
+
else:
|
|
4859
|
+
n_in = _safe_input("Enter N (rows to delete, default 1): ").strip()
|
|
4860
|
+
n = int(n_in) if n_in else 1
|
|
4861
|
+
if n < 1:
|
|
4862
|
+
print("N must be >= 1.")
|
|
4863
|
+
continue
|
|
4864
|
+
m_in = _safe_input("Enter M (rows to skip, default 0): ").strip()
|
|
4865
|
+
m = int(m_in) if m_in else 0
|
|
4866
|
+
if m < 0:
|
|
4867
|
+
print("M must be >= 0.")
|
|
4868
|
+
continue
|
|
4869
|
+
start_in = _safe_input("Starting row (1-based, default 1): ").strip()
|
|
4870
|
+
start_row = int(start_in) - 1 if start_in else 0
|
|
4871
|
+
|
|
4872
|
+
if start_row < 0:
|
|
4873
|
+
start_row = 0
|
|
4874
|
+
push_state("reduce-rows-delete-skip")
|
|
4875
|
+
_ensure_original_data()
|
|
4876
|
+
processed = 0
|
|
4877
|
+
total_before = 0
|
|
4878
|
+
total_after = 0
|
|
4879
|
+
for i in range(len(x_data_list)):
|
|
4880
|
+
try:
|
|
4881
|
+
# Use current data (may already be processed), not original
|
|
4882
|
+
orig_x = x_data_list[i].copy()
|
|
4883
|
+
orig_y = y_data_list[i].copy()
|
|
4884
|
+
# Remove offset for processing
|
|
4885
|
+
if i < len(offsets_list):
|
|
4886
|
+
orig_y = orig_y - offsets_list[i]
|
|
4887
|
+
if start_row >= len(orig_x):
|
|
4888
|
+
continue
|
|
4889
|
+
before = len(orig_x)
|
|
4890
|
+
# Create mask: delete n rows, then skip m rows, repeat
|
|
4891
|
+
mask = np.ones(len(orig_x), dtype=bool)
|
|
4892
|
+
idx = start_row
|
|
4893
|
+
while idx < len(orig_x):
|
|
4894
|
+
# Delete n rows
|
|
4895
|
+
end_del = min(idx + n, len(orig_x))
|
|
4896
|
+
mask[idx:end_del] = False
|
|
4897
|
+
idx = end_del
|
|
4898
|
+
# Skip m rows
|
|
4899
|
+
idx = min(idx + m, len(orig_x))
|
|
4900
|
+
new_x = orig_x[mask]
|
|
4901
|
+
new_y = orig_y[mask]
|
|
4902
|
+
after = len(new_x)
|
|
4903
|
+
if len(new_x) > 0:
|
|
4904
|
+
# Restore offset
|
|
4905
|
+
if i < len(offsets_list):
|
|
4906
|
+
new_y = new_y + offsets_list[i]
|
|
4907
|
+
x_data_list[i] = new_x
|
|
4908
|
+
y_data_list[i] = new_y
|
|
4909
|
+
processed += 1
|
|
4910
|
+
total_before += before
|
|
4911
|
+
total_after += after
|
|
4912
|
+
except Exception as e:
|
|
4913
|
+
print(f"Error processing curve {i+1}: {e}")
|
|
4914
|
+
if processed > 0:
|
|
4915
|
+
removed = total_before - total_after
|
|
4916
|
+
pct = 100 * removed / total_before if total_before else 0
|
|
4917
|
+
print(f"Processed {processed} curve(s); removed {removed} of {total_before} points ({pct:.1f}%).")
|
|
4918
|
+
_update_full_processed_data() # Store full processed data for X-range filtering
|
|
4919
|
+
_apply_data_changes()
|
|
4920
|
+
# Save settings for next time
|
|
4921
|
+
_save_last_reduce_rows_settings('delete_skip', {
|
|
4922
|
+
'n': n,
|
|
4923
|
+
'm': m,
|
|
4924
|
+
'start_row': start_row # Save as 0-based
|
|
4925
|
+
})
|
|
4926
|
+
else:
|
|
4927
|
+
print("No curves were processed.")
|
|
4928
|
+
except ValueError:
|
|
4929
|
+
print("Invalid number.")
|
|
4930
|
+
continue
|
|
4931
|
+
if method == '2':
|
|
4932
|
+
# Delete rows with missing values
|
|
4933
|
+
try:
|
|
4934
|
+
# Check for last settings
|
|
4935
|
+
last_settings = _get_last_reduce_rows_settings('delete_missing')
|
|
4936
|
+
last_delete_entire_row = last_settings.get('delete_entire_row')
|
|
4937
|
+
|
|
4938
|
+
if last_delete_entire_row is not None:
|
|
4939
|
+
default_str = "y" if last_delete_entire_row else "n"
|
|
4940
|
+
use_last = _safe_input(f"Use last settings? (delete_entire_row={'y' if last_delete_entire_row else 'n'}, y/n or enter y/n): ").strip().lower()
|
|
4941
|
+
# Check if user entered y/n directly (skip "use last settings")
|
|
4942
|
+
if use_last in ('y', 'n', 'yes', 'no'):
|
|
4943
|
+
delete_entire_row = use_last in ('y', 'yes')
|
|
4944
|
+
elif use_last != 'n':
|
|
4945
|
+
delete_entire_row = last_delete_entire_row
|
|
4946
|
+
else:
|
|
4947
|
+
delete_entire_row_in = _safe_input(f"Delete entire row? (y/n, default {default_str}): ").strip().lower()
|
|
4948
|
+
delete_entire_row = delete_entire_row_in != 'n'
|
|
4949
|
+
else:
|
|
4950
|
+
delete_entire_row_in = _safe_input("Delete entire row? (y/n, default y): ").strip().lower()
|
|
4951
|
+
delete_entire_row = delete_entire_row_in != 'n'
|
|
4952
|
+
push_state("reduce-rows-delete-missing")
|
|
4953
|
+
_ensure_original_data()
|
|
4954
|
+
processed = 0
|
|
4955
|
+
total_before = 0
|
|
4956
|
+
total_after = 0
|
|
4957
|
+
for i in range(len(x_data_list)):
|
|
4958
|
+
try:
|
|
4959
|
+
# Use current data (may already be processed), not original
|
|
4960
|
+
orig_x = x_data_list[i].copy()
|
|
4961
|
+
orig_y = y_data_list[i].copy()
|
|
4962
|
+
# Remove offset for processing
|
|
4963
|
+
if i < len(offsets_list):
|
|
4964
|
+
orig_y = orig_y - offsets_list[i]
|
|
4965
|
+
before = len(orig_x)
|
|
4966
|
+
# Check for missing values (NaN or inf)
|
|
4967
|
+
if delete_entire_row:
|
|
4968
|
+
mask = np.isfinite(orig_x) & np.isfinite(orig_y)
|
|
4969
|
+
else:
|
|
4970
|
+
# Only delete missing in current column
|
|
4971
|
+
mask = np.isfinite(orig_y)
|
|
4972
|
+
new_x = orig_x[mask]
|
|
4973
|
+
new_y = orig_y[mask]
|
|
4974
|
+
after = len(new_x)
|
|
4975
|
+
if len(new_x) > 0:
|
|
4976
|
+
# Restore offset
|
|
4977
|
+
if i < len(offsets_list):
|
|
4978
|
+
new_y = new_y + offsets_list[i]
|
|
4979
|
+
x_data_list[i] = new_x
|
|
4980
|
+
y_data_list[i] = new_y
|
|
4981
|
+
processed += 1
|
|
4982
|
+
total_before += before
|
|
4983
|
+
total_after += after
|
|
4984
|
+
except Exception as e:
|
|
4985
|
+
print(f"Error processing curve {i+1}: {e}")
|
|
4986
|
+
if processed > 0:
|
|
4987
|
+
removed = total_before - total_after
|
|
4988
|
+
pct = 100 * removed / total_before if total_before else 0
|
|
4989
|
+
print(f"Processed {processed} curve(s); removed {removed} of {total_before} points ({pct:.1f}%).")
|
|
4990
|
+
_update_full_processed_data() # Store full processed data for X-range filtering
|
|
4991
|
+
_apply_data_changes()
|
|
4992
|
+
# Save settings for next time
|
|
4993
|
+
_save_last_reduce_rows_settings('delete_missing', {
|
|
4994
|
+
'delete_entire_row': delete_entire_row
|
|
4995
|
+
})
|
|
4996
|
+
else:
|
|
4997
|
+
print("No curves were processed.")
|
|
4998
|
+
except Exception:
|
|
4999
|
+
print("Error processing data.")
|
|
5000
|
+
continue
|
|
5001
|
+
if method == '3':
|
|
5002
|
+
# Reduce N rows with merged values
|
|
5003
|
+
try:
|
|
5004
|
+
# Check for last settings
|
|
5005
|
+
last_settings = _get_last_reduce_rows_settings('merge')
|
|
5006
|
+
last_n = last_settings.get('n')
|
|
5007
|
+
last_merge_by = last_settings.get('merge_by')
|
|
5008
|
+
last_start_row = last_settings.get('start_row')
|
|
5009
|
+
|
|
5010
|
+
if last_n is not None and last_merge_by is not None and last_start_row is not None:
|
|
5011
|
+
merge_names = {
|
|
5012
|
+
'1': 'First point',
|
|
5013
|
+
'2': 'Last point',
|
|
5014
|
+
'3': 'Average',
|
|
5015
|
+
'4': 'Min',
|
|
5016
|
+
'5': 'Max',
|
|
5017
|
+
'6': 'Sum'
|
|
5018
|
+
}
|
|
5019
|
+
merge_name = merge_names.get(last_merge_by, 'Average')
|
|
5020
|
+
use_last = _safe_input(f"Use last settings? (N={last_n}, merge_by={merge_name}, start_row={last_start_row+1}, y/n or enter N): ").strip().lower()
|
|
5021
|
+
# Check if user entered a number directly (skip "use last settings")
|
|
5022
|
+
if use_last and use_last.replace('-', '').replace('.', '').isdigit():
|
|
5023
|
+
n = int(float(use_last))
|
|
5024
|
+
if n < 2:
|
|
5025
|
+
print("N must be >= 2.")
|
|
5026
|
+
continue
|
|
5027
|
+
print("Merge by:")
|
|
5028
|
+
print(" 1: First point")
|
|
5029
|
+
print(" 2: Last point")
|
|
5030
|
+
print(" 3: Average")
|
|
5031
|
+
print(" 4: Min")
|
|
5032
|
+
print(" 5: Max")
|
|
5033
|
+
print(" 6: Sum")
|
|
5034
|
+
merge_by_in = _safe_input(f"Choose (1-6, default {last_merge_by}): ").strip()
|
|
5035
|
+
merge_by = merge_by_in if merge_by_in else last_merge_by
|
|
5036
|
+
start_in = _safe_input(f"Starting row (1-based, default {last_start_row+1}): ").strip()
|
|
5037
|
+
start_row = int(start_in) - 1 if start_in else last_start_row
|
|
5038
|
+
elif use_last != 'n':
|
|
5039
|
+
n = last_n
|
|
5040
|
+
merge_by = last_merge_by
|
|
5041
|
+
start_row = last_start_row # Already 0-based in config
|
|
5042
|
+
else:
|
|
5043
|
+
n_in = _safe_input(f"Enter N (rows to merge, default {last_n}): ").strip()
|
|
5044
|
+
n = int(n_in) if n_in else last_n
|
|
5045
|
+
if n < 2:
|
|
5046
|
+
print("N must be >= 2.")
|
|
5047
|
+
continue
|
|
5048
|
+
print("Merge by:")
|
|
5049
|
+
print(" 1: First point")
|
|
5050
|
+
print(" 2: Last point")
|
|
5051
|
+
print(" 3: Average")
|
|
5052
|
+
print(" 4: Min")
|
|
5053
|
+
print(" 5: Max")
|
|
5054
|
+
print(" 6: Sum")
|
|
5055
|
+
merge_by_in = _safe_input(f"Choose (1-6, default {last_merge_by}): ").strip()
|
|
5056
|
+
merge_by = merge_by_in if merge_by_in else last_merge_by
|
|
5057
|
+
start_in = _safe_input(f"Starting row (1-based, default {last_start_row+1}): ").strip()
|
|
5058
|
+
start_row = int(start_in) - 1 if start_in else last_start_row
|
|
5059
|
+
else:
|
|
5060
|
+
n_in = _safe_input("Enter N (rows to merge, default 2): ").strip()
|
|
5061
|
+
n = int(n_in) if n_in else 2
|
|
5062
|
+
if n < 2:
|
|
5063
|
+
print("N must be >= 2.")
|
|
5064
|
+
continue
|
|
5065
|
+
print("Merge by:")
|
|
5066
|
+
print(" 1: First point")
|
|
5067
|
+
print(" 2: Last point")
|
|
5068
|
+
print(" 3: Average")
|
|
5069
|
+
print(" 4: Min")
|
|
5070
|
+
print(" 5: Max")
|
|
5071
|
+
print(" 6: Sum")
|
|
5072
|
+
merge_by_in = _safe_input("Choose (1-6, default 3): ").strip()
|
|
5073
|
+
merge_by = merge_by_in if merge_by_in else '3'
|
|
5074
|
+
start_in = _safe_input("Starting row (1-based, default 1): ").strip()
|
|
5075
|
+
start_row = int(start_in) - 1 if start_in else 0
|
|
5076
|
+
|
|
5077
|
+
if start_row < 0:
|
|
5078
|
+
start_row = 0
|
|
5079
|
+
|
|
5080
|
+
merge_funcs = {
|
|
5081
|
+
'1': lambda arr: arr[0] if len(arr) > 0 else np.nan,
|
|
5082
|
+
'2': lambda arr: arr[-1] if len(arr) > 0 else np.nan,
|
|
5083
|
+
'3': np.nanmean,
|
|
5084
|
+
'4': np.nanmin,
|
|
5085
|
+
'5': np.nanmax,
|
|
5086
|
+
'6': np.nansum,
|
|
5087
|
+
}
|
|
5088
|
+
merge_func = merge_funcs.get(merge_by, np.nanmean)
|
|
5089
|
+
push_state("reduce-rows-merge")
|
|
5090
|
+
_ensure_original_data()
|
|
5091
|
+
processed = 0
|
|
5092
|
+
total_before = 0
|
|
5093
|
+
total_after = 0
|
|
5094
|
+
for i in range(len(x_data_list)):
|
|
5095
|
+
try:
|
|
5096
|
+
# Use current data (may already be processed), not original
|
|
5097
|
+
orig_x = x_data_list[i].copy()
|
|
5098
|
+
orig_y = y_data_list[i].copy()
|
|
5099
|
+
# Remove offset for processing
|
|
5100
|
+
if i < len(offsets_list):
|
|
5101
|
+
orig_y = orig_y - offsets_list[i]
|
|
5102
|
+
if start_row >= len(orig_x):
|
|
5103
|
+
continue
|
|
5104
|
+
before = len(orig_x)
|
|
5105
|
+
# Group into chunks of N
|
|
5106
|
+
new_x_list = []
|
|
5107
|
+
new_y_list = []
|
|
5108
|
+
idx = 0
|
|
5109
|
+
while idx < start_row:
|
|
5110
|
+
new_x_list.append(orig_x[idx])
|
|
5111
|
+
new_y_list.append(orig_y[idx])
|
|
5112
|
+
idx += 1
|
|
5113
|
+
while idx < len(orig_x):
|
|
5114
|
+
end_idx = min(idx + n, len(orig_x))
|
|
5115
|
+
chunk_x = orig_x[idx:end_idx]
|
|
5116
|
+
chunk_y = orig_y[idx:end_idx]
|
|
5117
|
+
# Merge: use first x, merge y based on method
|
|
5118
|
+
new_x = chunk_x[0] if len(chunk_x) > 0 else np.nan
|
|
5119
|
+
new_y = merge_func(chunk_y) if len(chunk_y) > 0 else np.nan
|
|
5120
|
+
if np.isfinite(new_x) and np.isfinite(new_y):
|
|
5121
|
+
new_x_list.append(new_x)
|
|
5122
|
+
new_y_list.append(new_y)
|
|
5123
|
+
idx = end_idx
|
|
5124
|
+
if len(new_x_list) > 0:
|
|
5125
|
+
new_x = np.array(new_x_list)
|
|
5126
|
+
new_y = np.array(new_y_list)
|
|
5127
|
+
after = len(new_x)
|
|
5128
|
+
# Restore offset
|
|
5129
|
+
if i < len(offsets_list):
|
|
5130
|
+
new_y = new_y + offsets_list[i]
|
|
5131
|
+
x_data_list[i] = new_x
|
|
5132
|
+
y_data_list[i] = new_y
|
|
5133
|
+
processed += 1
|
|
5134
|
+
total_before += before
|
|
5135
|
+
total_after += after
|
|
5136
|
+
except Exception as e:
|
|
5137
|
+
print(f"Error processing curve {i+1}: {e}")
|
|
5138
|
+
if processed > 0:
|
|
5139
|
+
removed = total_before - total_after
|
|
5140
|
+
pct = 100 * removed / total_before if total_before else 0
|
|
5141
|
+
print(f"Processed {processed} curve(s); reduced {total_before} to {total_after} points (removed {removed}, {pct:.1f}%).")
|
|
5142
|
+
_update_full_processed_data() # Store full processed data for X-range filtering
|
|
5143
|
+
_apply_data_changes()
|
|
5144
|
+
# Save settings for next time
|
|
5145
|
+
_save_last_reduce_rows_settings('merge', {
|
|
5146
|
+
'n': n,
|
|
5147
|
+
'merge_by': merge_by,
|
|
5148
|
+
'start_row': start_row # Save as 0-based
|
|
5149
|
+
})
|
|
5150
|
+
else:
|
|
5151
|
+
print("No curves were processed.")
|
|
5152
|
+
except (ValueError, KeyError):
|
|
5153
|
+
print("Invalid input.")
|
|
5154
|
+
continue
|
|
5155
|
+
if sub == 's':
|
|
5156
|
+
# Smooth submenu
|
|
5157
|
+
while True:
|
|
5158
|
+
print("\n\033[1mSmooth Data\033[0m")
|
|
5159
|
+
print("Methods:")
|
|
5160
|
+
print(" 1: Adjacent-Averaging (moving average)")
|
|
5161
|
+
print(" 2: Savitzky-Golay (polynomial smoothing)")
|
|
5162
|
+
print(" 3: FFT Filter (low-pass frequency filter)")
|
|
5163
|
+
print(" q: back to smooth menu")
|
|
5164
|
+
method = _safe_input(colorize_prompt("sm>s> ")).strip().lower()
|
|
5165
|
+
if not method or method == 'q':
|
|
5166
|
+
break
|
|
5167
|
+
if method == '1':
|
|
5168
|
+
# Adjacent-Averaging
|
|
5169
|
+
try:
|
|
5170
|
+
# Check for last settings (from config file for persistence)
|
|
5171
|
+
config_settings = _get_last_smooth_settings_from_config()
|
|
5172
|
+
session_settings = getattr(fig, '_last_smooth_settings', {})
|
|
5173
|
+
# Prefer config settings (persistent) over session settings
|
|
5174
|
+
last_settings = config_settings if config_settings.get('method') == 'adjacent_average' else session_settings
|
|
5175
|
+
last_method = last_settings.get('method')
|
|
5176
|
+
last_points = last_settings.get('points')
|
|
5177
|
+
|
|
5178
|
+
if last_method == 'adjacent_average' and last_points is not None:
|
|
5179
|
+
use_last = _safe_input(f"Use last settings? (points={last_points}, y/n or enter points): ").strip().lower()
|
|
5180
|
+
# Check if user entered a number directly (skip "use last settings")
|
|
5181
|
+
if use_last and use_last.replace('-', '').replace('.', '').isdigit():
|
|
5182
|
+
points = int(float(use_last))
|
|
5183
|
+
elif use_last != 'n':
|
|
5184
|
+
points = last_points
|
|
5185
|
+
else:
|
|
5186
|
+
points_in = _safe_input(f"Number of points (default {last_points}): ").strip()
|
|
5187
|
+
points = int(points_in) if points_in else last_points
|
|
5188
|
+
else:
|
|
5189
|
+
points_in = _safe_input("Number of points (default 5): ").strip()
|
|
5190
|
+
points = int(points_in) if points_in else 5
|
|
5191
|
+
|
|
5192
|
+
if points < 2:
|
|
5193
|
+
print("Points must be >= 2.")
|
|
5194
|
+
continue
|
|
5195
|
+
push_state("smooth-adjacent-average")
|
|
5196
|
+
_ensure_original_data()
|
|
5197
|
+
processed = 0
|
|
5198
|
+
total_points = 0
|
|
5199
|
+
for i in range(len(x_data_list)):
|
|
5200
|
+
try:
|
|
5201
|
+
# Use current data (may already be processed), not original
|
|
5202
|
+
orig_x = x_data_list[i].copy()
|
|
5203
|
+
orig_y = y_data_list[i].copy()
|
|
5204
|
+
# Remove offset for processing
|
|
5205
|
+
if i < len(offsets_list):
|
|
5206
|
+
orig_y = orig_y - offsets_list[i]
|
|
5207
|
+
n_points = len(orig_y)
|
|
5208
|
+
# Apply smoothing
|
|
5209
|
+
smoothed_y = _adjacent_average_smooth(orig_y, points)
|
|
5210
|
+
if len(smoothed_y) > 0:
|
|
5211
|
+
# Restore offset
|
|
5212
|
+
if i < len(offsets_list):
|
|
5213
|
+
smoothed_y = smoothed_y + offsets_list[i]
|
|
5214
|
+
# Keep original x, update y
|
|
5215
|
+
x_data_list[i] = orig_x.copy()
|
|
5216
|
+
y_data_list[i] = smoothed_y
|
|
5217
|
+
processed += 1
|
|
5218
|
+
total_points += n_points
|
|
5219
|
+
except Exception as e:
|
|
5220
|
+
print(f"Error processing curve {i+1}: {e}")
|
|
5221
|
+
if processed > 0:
|
|
5222
|
+
print(f"Smoothed {processed} curve(s) with {total_points} total points using Adjacent-Averaging (window={points}).")
|
|
5223
|
+
_update_full_processed_data() # Store full processed data for X-range filtering
|
|
5224
|
+
_apply_data_changes()
|
|
5225
|
+
# Store settings (both current and last)
|
|
5226
|
+
if not hasattr(fig, '_smooth_settings'):
|
|
5227
|
+
fig._smooth_settings = {}
|
|
5228
|
+
fig._smooth_settings['method'] = 'adjacent_average'
|
|
5229
|
+
fig._smooth_settings['points'] = points
|
|
5230
|
+
# Store as last settings for next time (both in-memory and config file)
|
|
5231
|
+
if not hasattr(fig, '_last_smooth_settings'):
|
|
5232
|
+
fig._last_smooth_settings = {}
|
|
5233
|
+
fig._last_smooth_settings['method'] = 'adjacent_average'
|
|
5234
|
+
fig._last_smooth_settings['points'] = points
|
|
5235
|
+
# Save to config file for persistence across sessions
|
|
5236
|
+
_save_last_smooth_settings_to_config({
|
|
5237
|
+
'method': 'adjacent_average',
|
|
5238
|
+
'points': points
|
|
5239
|
+
})
|
|
5240
|
+
else:
|
|
5241
|
+
print("No curves were smoothed.")
|
|
5242
|
+
except ValueError:
|
|
5243
|
+
print("Invalid number.")
|
|
5244
|
+
continue
|
|
5245
|
+
if method == '2':
|
|
5246
|
+
# Savitzky-Golay
|
|
5247
|
+
try:
|
|
5248
|
+
# Check for last settings (from config file for persistence)
|
|
5249
|
+
config_settings = _get_last_smooth_settings_from_config()
|
|
5250
|
+
session_settings = getattr(fig, '_last_smooth_settings', {})
|
|
5251
|
+
# Prefer config settings (persistent) over session settings
|
|
5252
|
+
last_settings = config_settings if config_settings.get('method') == 'savgol' else session_settings
|
|
5253
|
+
last_method = last_settings.get('method')
|
|
5254
|
+
last_window = last_settings.get('window')
|
|
5255
|
+
last_poly = last_settings.get('poly')
|
|
5256
|
+
|
|
5257
|
+
if last_method == 'savgol' and last_window is not None and last_poly is not None:
|
|
5258
|
+
use_last = _safe_input(f"Use last settings? (window={last_window}, poly={last_poly}, y/n or enter window): ").strip().lower()
|
|
5259
|
+
# Check if user entered a number directly (skip "use last settings")
|
|
5260
|
+
if use_last and use_last.replace('-', '').replace('.', '').isdigit():
|
|
5261
|
+
window = int(float(use_last))
|
|
5262
|
+
if window < 3:
|
|
5263
|
+
window = 3
|
|
5264
|
+
if window % 2 == 0:
|
|
5265
|
+
window += 1
|
|
5266
|
+
poly_in = _safe_input(f"Polynomial order (default {last_poly}): ").strip()
|
|
5267
|
+
poly = int(poly_in) if poly_in else last_poly
|
|
5268
|
+
elif use_last != 'n':
|
|
5269
|
+
window = last_window
|
|
5270
|
+
poly = last_poly
|
|
5271
|
+
else:
|
|
5272
|
+
window_in = _safe_input(f"Window size (odd >= 3, default {last_window}): ").strip()
|
|
5273
|
+
window = int(window_in) if window_in else last_window
|
|
5274
|
+
if window < 3:
|
|
5275
|
+
window = 3
|
|
5276
|
+
if window % 2 == 0:
|
|
5277
|
+
window += 1
|
|
5278
|
+
poly_in = _safe_input(f"Polynomial order (default {last_poly}): ").strip()
|
|
5279
|
+
poly = int(poly_in) if poly_in else last_poly
|
|
5280
|
+
else:
|
|
5281
|
+
window_in = _safe_input("Window size (odd >= 3, default 9): ").strip()
|
|
5282
|
+
window = int(window_in) if window_in else 9
|
|
5283
|
+
if window < 3:
|
|
5284
|
+
window = 3
|
|
5285
|
+
if window % 2 == 0:
|
|
5286
|
+
window += 1
|
|
5287
|
+
poly_in = _safe_input("Polynomial order (default 3): ").strip()
|
|
5288
|
+
poly = int(poly_in) if poly_in else 3
|
|
5289
|
+
|
|
5290
|
+
if poly < 1:
|
|
5291
|
+
poly = 1
|
|
5292
|
+
if poly >= window:
|
|
5293
|
+
poly = window - 1
|
|
5294
|
+
push_state("smooth-savgol")
|
|
5295
|
+
_ensure_original_data()
|
|
5296
|
+
processed = 0
|
|
5297
|
+
total_points = 0
|
|
5298
|
+
for i in range(len(x_data_list)):
|
|
5299
|
+
try:
|
|
5300
|
+
# Use current data (may already be processed), not original
|
|
5301
|
+
orig_x = x_data_list[i].copy()
|
|
5302
|
+
orig_y = y_data_list[i].copy()
|
|
5303
|
+
# Remove offset for processing
|
|
5304
|
+
if i < len(offsets_list):
|
|
5305
|
+
orig_y = orig_y - offsets_list[i]
|
|
5306
|
+
n_points = len(orig_y)
|
|
5307
|
+
# Apply smoothing
|
|
5308
|
+
smoothed_y = _savgol_smooth(orig_y, window, poly)
|
|
5309
|
+
if len(smoothed_y) > 0:
|
|
5310
|
+
# Restore offset
|
|
5311
|
+
if i < len(offsets_list):
|
|
5312
|
+
smoothed_y = smoothed_y + offsets_list[i]
|
|
5313
|
+
# Keep original x, update y
|
|
5314
|
+
x_data_list[i] = orig_x.copy()
|
|
5315
|
+
y_data_list[i] = smoothed_y
|
|
5316
|
+
processed += 1
|
|
5317
|
+
total_points += n_points
|
|
5318
|
+
except Exception as e:
|
|
5319
|
+
print(f"Error processing curve {i+1}: {e}")
|
|
5320
|
+
if processed > 0:
|
|
5321
|
+
print(f"Smoothed {processed} curve(s) with {total_points} total points using Savitzky-Golay (window={window}, poly={poly}).")
|
|
5322
|
+
_update_full_processed_data() # Store full processed data for X-range filtering
|
|
5323
|
+
_apply_data_changes()
|
|
5324
|
+
# Store settings (both current and last)
|
|
5325
|
+
if not hasattr(fig, '_smooth_settings'):
|
|
5326
|
+
fig._smooth_settings = {}
|
|
5327
|
+
fig._smooth_settings['method'] = 'savgol'
|
|
5328
|
+
fig._smooth_settings['window'] = window
|
|
5329
|
+
fig._smooth_settings['poly'] = poly
|
|
5330
|
+
# Store as last settings for next time (both in-memory and config file)
|
|
5331
|
+
if not hasattr(fig, '_last_smooth_settings'):
|
|
5332
|
+
fig._last_smooth_settings = {}
|
|
5333
|
+
fig._last_smooth_settings['method'] = 'savgol'
|
|
5334
|
+
fig._last_smooth_settings['window'] = window
|
|
5335
|
+
fig._last_smooth_settings['poly'] = poly
|
|
5336
|
+
# Save to config file for persistence across sessions
|
|
5337
|
+
_save_last_smooth_settings_to_config({
|
|
5338
|
+
'method': 'savgol',
|
|
5339
|
+
'window': window,
|
|
5340
|
+
'poly': poly
|
|
5341
|
+
})
|
|
5342
|
+
else:
|
|
5343
|
+
print("No curves were smoothed.")
|
|
5344
|
+
except ValueError:
|
|
5345
|
+
print("Invalid number.")
|
|
5346
|
+
continue
|
|
5347
|
+
if method == '3':
|
|
5348
|
+
# FFT Filter
|
|
5349
|
+
try:
|
|
5350
|
+
# Check for last settings (from config file for persistence)
|
|
5351
|
+
config_settings = _get_last_smooth_settings_from_config()
|
|
5352
|
+
session_settings = getattr(fig, '_last_smooth_settings', {})
|
|
5353
|
+
# Prefer config settings (persistent) over session settings
|
|
5354
|
+
last_settings = config_settings if config_settings.get('method') == 'fft' else session_settings
|
|
5355
|
+
last_method = last_settings.get('method')
|
|
5356
|
+
last_points = last_settings.get('points')
|
|
5357
|
+
last_cutoff = last_settings.get('cutoff')
|
|
5358
|
+
|
|
5359
|
+
if last_method == 'fft' and last_points is not None and last_cutoff is not None:
|
|
5360
|
+
use_last = _safe_input(f"Use last settings? (points={last_points}, cutoff={last_cutoff:.3f}, y/n or enter points): ").strip().lower()
|
|
5361
|
+
# Check if user entered a number directly (skip "use last settings")
|
|
5362
|
+
if use_last and use_last.replace('-', '').replace('.', '').isdigit():
|
|
5363
|
+
points = int(float(use_last))
|
|
5364
|
+
if points < 2:
|
|
5365
|
+
points = 2
|
|
5366
|
+
cutoff_in = _safe_input(f"Cutoff frequency (0-1, default {last_cutoff:.3f}): ").strip()
|
|
5367
|
+
cutoff = float(cutoff_in) if cutoff_in else last_cutoff
|
|
5368
|
+
elif use_last != 'n':
|
|
5369
|
+
points = last_points
|
|
5370
|
+
cutoff = last_cutoff
|
|
5371
|
+
else:
|
|
5372
|
+
points_in = _safe_input(f"Points for FFT (default {last_points}): ").strip()
|
|
5373
|
+
points = int(points_in) if points_in else last_points
|
|
5374
|
+
if points < 2:
|
|
5375
|
+
points = 2
|
|
5376
|
+
cutoff_in = _safe_input(f"Cutoff frequency (0-1, default {last_cutoff:.3f}): ").strip()
|
|
5377
|
+
cutoff = float(cutoff_in) if cutoff_in else last_cutoff
|
|
5378
|
+
else:
|
|
5379
|
+
points_in = _safe_input("Points for FFT (default 5): ").strip()
|
|
5380
|
+
points = int(points_in) if points_in else 5
|
|
5381
|
+
if points < 2:
|
|
5382
|
+
points = 2
|
|
5383
|
+
cutoff_in = _safe_input("Cutoff frequency (0-1, default 0.1): ").strip()
|
|
5384
|
+
cutoff = float(cutoff_in) if cutoff_in else 0.1
|
|
5385
|
+
|
|
5386
|
+
if cutoff < 0:
|
|
5387
|
+
cutoff = 0
|
|
5388
|
+
if cutoff > 1:
|
|
5389
|
+
cutoff = 1
|
|
5390
|
+
push_state("smooth-fft")
|
|
5391
|
+
_ensure_original_data()
|
|
5392
|
+
processed = 0
|
|
5393
|
+
total_points = 0
|
|
5394
|
+
for i in range(len(x_data_list)):
|
|
5395
|
+
try:
|
|
5396
|
+
# Use current data (may already be processed), not original
|
|
5397
|
+
orig_x = x_data_list[i].copy()
|
|
5398
|
+
orig_y = y_data_list[i].copy()
|
|
5399
|
+
# Remove offset for processing
|
|
5400
|
+
if i < len(offsets_list):
|
|
5401
|
+
orig_y = orig_y - offsets_list[i]
|
|
5402
|
+
n_points = len(orig_y)
|
|
5403
|
+
# Apply smoothing
|
|
5404
|
+
smoothed_y = _fft_smooth(orig_y, points, cutoff)
|
|
5405
|
+
if len(smoothed_y) > 0:
|
|
5406
|
+
# Restore offset
|
|
5407
|
+
if i < len(offsets_list):
|
|
5408
|
+
smoothed_y = smoothed_y + offsets_list[i]
|
|
5409
|
+
# Keep original x, update y
|
|
5410
|
+
x_data_list[i] = orig_x.copy()
|
|
5411
|
+
y_data_list[i] = smoothed_y
|
|
5412
|
+
processed += 1
|
|
5413
|
+
total_points += n_points
|
|
5414
|
+
except Exception as e:
|
|
5415
|
+
print(f"Error processing curve {i+1}: {e}")
|
|
5416
|
+
if processed > 0:
|
|
5417
|
+
print(f"Smoothed {processed} curve(s) with {total_points} total points using FFT Filter (cutoff={cutoff:.3f}).")
|
|
5418
|
+
_update_full_processed_data() # Store full processed data for X-range filtering
|
|
5419
|
+
_apply_data_changes()
|
|
5420
|
+
# Store settings (both current and last)
|
|
5421
|
+
if not hasattr(fig, '_smooth_settings'):
|
|
5422
|
+
fig._smooth_settings = {}
|
|
5423
|
+
fig._smooth_settings['method'] = 'fft'
|
|
5424
|
+
fig._smooth_settings['points'] = points
|
|
5425
|
+
fig._smooth_settings['cutoff'] = cutoff
|
|
5426
|
+
# Store as last settings for next time (both in-memory and config file)
|
|
5427
|
+
if not hasattr(fig, '_last_smooth_settings'):
|
|
5428
|
+
fig._last_smooth_settings = {}
|
|
5429
|
+
fig._last_smooth_settings['method'] = 'fft'
|
|
5430
|
+
fig._last_smooth_settings['points'] = points
|
|
5431
|
+
fig._last_smooth_settings['cutoff'] = cutoff
|
|
5432
|
+
# Save to config file for persistence across sessions
|
|
5433
|
+
_save_last_smooth_settings_to_config({
|
|
5434
|
+
'method': 'fft',
|
|
5435
|
+
'points': points,
|
|
5436
|
+
'cutoff': cutoff
|
|
5437
|
+
})
|
|
5438
|
+
else:
|
|
5439
|
+
print("No curves were smoothed.")
|
|
5440
|
+
except ValueError:
|
|
5441
|
+
print("Invalid number.")
|
|
5442
|
+
continue
|
|
3800
5443
|
elif key == 'v':
|
|
3801
5444
|
while True:
|
|
3802
5445
|
try:
|
|
@@ -3836,6 +5479,7 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
3836
5479
|
|
|
3837
5480
|
print("\n--- Peak Report ---")
|
|
3838
5481
|
print(f"X range used: {x_min} .. {x_max} (relative height threshold={min_frac})")
|
|
5482
|
+
all_peak_results = [] # list of (curve_index, label, [(x, y), ...])
|
|
3839
5483
|
for i, (x_arr, y_off) in enumerate(zip(x_data_list, y_data_list)):
|
|
3840
5484
|
# Recover original curve (remove vertical offset)
|
|
3841
5485
|
if i < len(offsets_list):
|
|
@@ -3889,9 +5533,50 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
|
|
|
3889
5533
|
last_idx = pi
|
|
3890
5534
|
|
|
3891
5535
|
print(" Peaks (x, y):")
|
|
5536
|
+
peak_xy_list = []
|
|
3892
5537
|
for pi in peaks:
|
|
5538
|
+
px, py = float(x_sel[pi]), float(y_sel[pi])
|
|
5539
|
+
peak_xy_list.append((px, py))
|
|
3893
5540
|
print(f" x={x_sel[pi]:.6g}, y={y_sel[pi]:.6g}")
|
|
5541
|
+
if peak_xy_list:
|
|
5542
|
+
all_peak_results.append((i + 1, label, peak_xy_list))
|
|
3894
5543
|
print("\n--- End Peak Report ---\n")
|
|
5544
|
+
|
|
5545
|
+
# Export peaks to file
|
|
5546
|
+
if all_peak_results:
|
|
5547
|
+
export_yn = _safe_input("Export peaks to file? (y/n): ").strip().lower()
|
|
5548
|
+
if export_yn == 'y':
|
|
5549
|
+
folder = choose_save_path(source_file_paths, purpose="peak export")
|
|
5550
|
+
if folder:
|
|
5551
|
+
print(f"\nChosen path: {folder}")
|
|
5552
|
+
fname = _safe_input("Export filename (default: peaks.txt): ").strip()
|
|
5553
|
+
if not fname:
|
|
5554
|
+
fname = "peaks.txt"
|
|
5555
|
+
if not fname.endswith('.txt'):
|
|
5556
|
+
fname += '.txt'
|
|
5557
|
+
import os
|
|
5558
|
+
target = fname if os.path.isabs(fname) else os.path.join(folder, fname)
|
|
5559
|
+
do_write = not os.path.exists(target)
|
|
5560
|
+
if os.path.exists(target):
|
|
5561
|
+
ow = _safe_input(f"'{os.path.basename(target)}' exists. Overwrite? (y/n): ").strip().lower()
|
|
5562
|
+
if ow == 'y':
|
|
5563
|
+
do_write = True
|
|
5564
|
+
else:
|
|
5565
|
+
print("Export canceled.")
|
|
5566
|
+
if do_write:
|
|
5567
|
+
try:
|
|
5568
|
+
with open(target, 'w') as f:
|
|
5569
|
+
f.write("# Curve\tLabel\tPeak x\tPeak y\n")
|
|
5570
|
+
for curve_idx, label, peak_xy_list in all_peak_results:
|
|
5571
|
+
for px, py in peak_xy_list:
|
|
5572
|
+
f.write(f"{curve_idx}\t{label}\t{px:.6g}\t{py:.6g}\n")
|
|
5573
|
+
total_peaks = sum(len(pairs) for _, _, pairs in all_peak_results)
|
|
5574
|
+
print(f"Peak positions exported to {target}")
|
|
5575
|
+
print(f"Found {total_peaks} peaks across {len(all_peak_results)} curves.")
|
|
5576
|
+
except Exception as e:
|
|
5577
|
+
print(f"Error saving file: {e}")
|
|
5578
|
+
else:
|
|
5579
|
+
print("Export canceled.")
|
|
3895
5580
|
except Exception as e:
|
|
3896
5581
|
print(f"Error finding peaks: {e}")
|
|
3897
5582
|
|