batplot 1.8.4__py3-none-any.whl → 1.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of batplot might be problematic. Click here for more details.

batplot/interactive.py CHANGED
@@ -57,6 +57,7 @@ from .color_utils import (
57
57
  ensure_colormap,
58
58
  _CUSTOM_CMAPS,
59
59
  )
60
+ from .config import load_config, save_config
60
61
 
61
62
 
62
63
  class _FilterIMKWarning:
@@ -253,16 +254,16 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
253
254
  has_cif = bool(getattr(_bp, 'cif_tick_series', None))
254
255
  except Exception:
255
256
  pass
256
- col1 = ["c: colors", "f: font", "l: line", "t: toggle axes", "g: size", "h: legend"]
257
+ col1 = ["c: colors", "f: font", "l: line", "t: toggle axes", "g: size", "h: legend", "sm: smooth"]
257
258
  if has_cif:
258
259
  col1.append("z: hkl")
259
260
  col1.append("j: CIF titles")
260
- col2 = ["a: rearrange", "d: offset", "r: rename", "x: change X", "y: change Y"]
261
+ col2 = ["a: rearrange", "o: offset", "r: rename", "x: change X", "y: change Y", "d: derivative"]
261
262
  col3 = ["v: find peaks", "n: crosshair", "p: print(export) style/geom", "i: import style/geom", "e: export figure", "s: save project", "b: undo", "q: quit"]
262
263
 
263
264
  # Hide offset/y-range in stack mode
264
265
  if args.stack:
265
- col2 = [item for item in col2 if not item.startswith("d:") and not item.startswith("y:")]
266
+ col2 = [item for item in col2 if not item.startswith("o:") and not item.startswith("y:")]
266
267
 
267
268
  if not is_diffraction:
268
269
  col3 = [item for item in col3 if not item.startswith("n:")]
@@ -884,7 +885,17 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
884
885
  # NEW: style / diagnostics printer (clean version)
885
886
  def print_style_info():
886
887
  cts = getattr(_bp, 'cif_tick_series', None) if _bp is not None else None
887
- show_hkl = bool(getattr(_bp, 'show_cif_hkl', False)) if _bp is not None else None
888
+ # Read show_cif_hkl from __main__ module (where it's stored when toggled)
889
+ show_hkl = None
890
+ try:
891
+ _bp_module = sys.modules.get('__main__')
892
+ if _bp_module is not None and hasattr(_bp_module, 'show_cif_hkl'):
893
+ show_hkl = bool(getattr(_bp_module, 'show_cif_hkl', False))
894
+ except Exception:
895
+ pass
896
+ # Fall back to _bp object if not in __main__
897
+ if show_hkl is None and _bp is not None:
898
+ show_hkl = bool(getattr(_bp, 'show_cif_hkl', False)) if hasattr(_bp, 'show_cif_hkl') else None
888
899
  return _bp_print_style_info(
889
900
  fig, ax,
890
901
  y_data_list, labels,
@@ -1083,6 +1094,342 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
1083
1094
  # history management:
1084
1095
  state_history = []
1085
1096
 
1097
+ # ====================================================================
1098
+ # SMOOTHING AND REDUCE ROWS HELPER FUNCTIONS
1099
+ # ====================================================================
1100
+
1101
+ def _savgol_kernel(window: int, poly: int) -> np.ndarray:
1102
+ """Return Savitzky–Golay smoothing kernel of given window/poly."""
1103
+ half = window // 2
1104
+ x = np.arange(-half, half + 1, dtype=float)
1105
+ A = np.vander(x, poly + 1, increasing=True)
1106
+ ATA = A.T @ A
1107
+ ATA_inv = np.linalg.pinv(ATA)
1108
+ target = np.zeros(poly + 1, dtype=float)
1109
+ target[0] = 1.0 # evaluate polynomial at x=0
1110
+ coeffs = target @ ATA_inv @ A.T
1111
+ return coeffs
1112
+
1113
+ def _savgol_smooth(y: np.ndarray, window: int = 9, poly: int = 3) -> np.ndarray:
1114
+ """Apply Savitzky–Golay smoothing (defaults from DiffCapAnalyzer) to data."""
1115
+ n = y.size
1116
+ if n < 3:
1117
+ return y
1118
+ if window > n:
1119
+ window = n if n % 2 == 1 else n - 1
1120
+ if window < 3:
1121
+ return y
1122
+ if window % 2 == 0:
1123
+ window -= 1
1124
+ if window < 3:
1125
+ return y
1126
+ if poly >= window:
1127
+ poly = window - 1
1128
+ coeffs = _savgol_kernel(window, poly)
1129
+ half = window // 2
1130
+ padded = np.pad(y, (half, half), mode='edge')
1131
+ smoothed = np.convolve(padded, coeffs[::-1], mode='valid')
1132
+ return smoothed
1133
+
1134
+ def _fft_smooth(y: np.ndarray, points: int = 5, cutoff: float = 0.1) -> np.ndarray:
1135
+ """Apply FFT filter smoothing to data."""
1136
+ n = y.size
1137
+ if n < 3:
1138
+ return y
1139
+ # FFT
1140
+ fft_vals = np.fft.rfft(y)
1141
+ freq = np.fft.rfftfreq(n)
1142
+ # Low-pass filter: zero out frequencies above cutoff
1143
+ mask = freq <= cutoff
1144
+ fft_vals[~mask] = 0
1145
+ # Inverse FFT
1146
+ smoothed = np.fft.irfft(fft_vals, n)
1147
+ return smoothed
1148
+
1149
+ def _adjacent_average_smooth(y: np.ndarray, points: int = 5) -> np.ndarray:
1150
+ """Apply Adjacent-Averaging smoothing to data."""
1151
+ n = y.size
1152
+ if n < points:
1153
+ return y
1154
+ if points < 2:
1155
+ return y
1156
+ # Use convolution for moving average
1157
+ kernel = np.ones(points) / points
1158
+ # Pad edges
1159
+ padded = np.pad(y, (points//2, points//2), mode='edge')
1160
+ smoothed = np.convolve(padded, kernel, mode='valid')
1161
+ return smoothed
1162
+
1163
+ def _get_last_reduce_rows_settings(method: str) -> dict:
1164
+ """Get last reduce rows settings from config file.
1165
+
1166
+ Args:
1167
+ method: Method name ('delete_skip', 'delete_missing', 'merge')
1168
+
1169
+ Returns:
1170
+ Dictionary with last settings for the method, or empty dict if none
1171
+ """
1172
+ config = load_config()
1173
+ last_settings = config.get('last_reduce_rows_settings', {})
1174
+ return last_settings.get(method, {})
1175
+
1176
+ def _save_last_reduce_rows_settings(method: str, settings: dict) -> None:
1177
+ """Save last reduce rows settings to config file.
1178
+
1179
+ Args:
1180
+ method: Method name ('delete_skip', 'delete_missing', 'merge')
1181
+ settings: Dictionary with settings to save
1182
+ """
1183
+ config = load_config()
1184
+ if 'last_reduce_rows_settings' not in config:
1185
+ config['last_reduce_rows_settings'] = {}
1186
+ config['last_reduce_rows_settings'][method] = settings
1187
+ save_config(config)
1188
+
1189
+ def _get_last_smooth_settings_from_config() -> dict:
1190
+ """Get last smooth settings from config file (persistent across sessions).
1191
+
1192
+ Returns:
1193
+ Dictionary with last smooth settings, or empty dict if none
1194
+ """
1195
+ config = load_config()
1196
+ return config.get('last_smooth_settings', {})
1197
+
1198
+ def _save_last_smooth_settings_to_config(settings: dict) -> None:
1199
+ """Save last smooth settings to config file (persistent across sessions).
1200
+
1201
+ Args:
1202
+ settings: Dictionary with smooth settings to save
1203
+ """
1204
+ config = load_config()
1205
+ config['last_smooth_settings'] = settings
1206
+ save_config(config)
1207
+
1208
+ def _ensure_original_data():
1209
+ """Ensure original data is stored for all curves."""
1210
+ if not hasattr(fig, '_original_x_data_list'):
1211
+ fig._original_x_data_list = [np.array(a, copy=True) for a in x_data_list]
1212
+ fig._original_y_data_list = [np.array(a, copy=True) for a in y_data_list]
1213
+
1214
+ def _update_full_processed_data():
1215
+ """Update the full processed data (after all processing steps, before any X-range filtering)."""
1216
+ # This stores the complete processed data (reduce + smooth + derivative) for X-range filtering
1217
+ fig._full_processed_x_data_list = [np.array(a, copy=True) for a in x_data_list]
1218
+ fig._full_processed_y_data_list = [np.array(a, copy=True) for a in y_data_list]
1219
+
1220
+ def _reset_to_original():
1221
+ """Reset all curves to original data."""
1222
+ if not hasattr(fig, '_original_x_data_list'):
1223
+ return (False, 0, 0)
1224
+ reset_count = 0
1225
+ total_points = 0
1226
+ for i in range(min(len(fig._original_x_data_list), len(ax.lines))):
1227
+ try:
1228
+ orig_x = fig._original_x_data_list[i]
1229
+ orig_y = fig._original_y_data_list[i]
1230
+ # Restore offsets
1231
+ if i < len(offsets_list):
1232
+ orig_y_with_offset = orig_y + offsets_list[i]
1233
+ else:
1234
+ orig_y_with_offset = orig_y.copy()
1235
+ ax.lines[i].set_data(orig_x, orig_y_with_offset)
1236
+ x_data_list[i] = orig_x.copy()
1237
+ y_data_list[i] = orig_y_with_offset.copy()
1238
+ reset_count += 1
1239
+ total_points += len(orig_x)
1240
+ except Exception:
1241
+ pass
1242
+ # Clear processing settings
1243
+ if hasattr(fig, '_smooth_settings'):
1244
+ delattr(fig, '_smooth_settings')
1245
+ return (reset_count > 0, reset_count, total_points)
1246
+
1247
+ def _apply_data_changes():
1248
+ """Update plot and data lists after data modification."""
1249
+ for i in range(min(len(ax.lines), len(x_data_list), len(y_data_list))):
1250
+ try:
1251
+ ax.lines[i].set_data(x_data_list[i], y_data_list[i])
1252
+ except Exception:
1253
+ pass
1254
+ try:
1255
+ fig.canvas.draw_idle()
1256
+ except Exception:
1257
+ pass
1258
+
1259
+ def _calculate_derivative(x: np.ndarray, y: np.ndarray, order: int = 1) -> np.ndarray:
1260
+ """Calculate 1st or 2nd derivative using numpy gradient.
1261
+
1262
+ Args:
1263
+ x: X values
1264
+ y: Y values
1265
+ order: 1 for first derivative (dy/dx), 2 for second derivative (d²y/dx²)
1266
+
1267
+ Returns:
1268
+ Derivative array (same length as input)
1269
+ """
1270
+ if len(y) < 2:
1271
+ return y.copy()
1272
+ # Calculate dy/dx
1273
+ dy_dx = np.gradient(y, x)
1274
+ if order == 1:
1275
+ return dy_dx
1276
+ elif order == 2:
1277
+ # Calculate d²y/dx² = d(dy/dx)/dx
1278
+ if len(dy_dx) < 2:
1279
+ return np.zeros_like(y)
1280
+ d2y_dx2 = np.gradient(dy_dx, x)
1281
+ return d2y_dx2
1282
+ else:
1283
+ return y.copy()
1284
+
1285
+ def _calculate_reversed_derivative(x, y, order):
1286
+ """Calculate reversed 1st or 2nd derivative (dx/dy or d²x/dy²).
1287
+
1288
+ Args:
1289
+ x: X values
1290
+ y: Y values
1291
+ order: 1 for first reversed derivative (dx/dy), 2 for second reversed derivative (d²x/dy²)
1292
+
1293
+ Returns:
1294
+ Reversed derivative array (same length as input)
1295
+ """
1296
+ if len(y) < 2:
1297
+ return y.copy()
1298
+ # First calculate dy/dx
1299
+ dy_dx = np.gradient(y, x)
1300
+ # Avoid division by zero - replace zeros with small epsilon
1301
+ epsilon = 1e-10
1302
+ dy_dx_safe = np.where(np.abs(dy_dx) < epsilon, np.sign(dy_dx) * epsilon, dy_dx)
1303
+ # Calculate dx/dy = 1 / (dy/dx)
1304
+ dx_dy = 1.0 / dy_dx_safe
1305
+ if order == 1:
1306
+ return dx_dy
1307
+ elif order == 2:
1308
+ # Calculate d²x/dy² = d(dx/dy)/dy
1309
+ # d(dx/dy)/dy = d(1/(dy/dx))/dy = -1/(dy/dx)² * d²y/dx²
1310
+ if len(dx_dy) < 2:
1311
+ return np.zeros_like(y)
1312
+ # Calculate d²y/dx² first
1313
+ d2y_dx2 = np.gradient(dy_dx, x)
1314
+ # d²x/dy² = -d²y/dx² / (dy/dx)³
1315
+ d2x_dy2 = -d2y_dx2 / (dy_dx_safe ** 3)
1316
+ return d2x_dy2
1317
+ else:
1318
+ return y.copy()
1319
+
1320
+ def _update_ylabel_for_derivative(order: int, current_label: str = None, is_reversed: bool = False) -> str:
1321
+ """Generate appropriate y-axis label for derivative.
1322
+
1323
+ Args:
1324
+ order: 1 for first derivative, 2 for second derivative
1325
+ current_label: Current y-axis label (optional)
1326
+ is_reversed: True for reversed derivative (dx/dy), False for normal (dy/dx)
1327
+
1328
+ Returns:
1329
+ New y-axis label string
1330
+ """
1331
+ if current_label is None:
1332
+ current_label = ax.get_ylabel() or "Y"
1333
+
1334
+ # Try to detect common patterns and update accordingly
1335
+ current_lower = current_label.lower()
1336
+
1337
+ if is_reversed:
1338
+ # Reversed derivative: dx/dy or d²x/dy²
1339
+ y_label = current_label if current_label and current_label != "Y" else (ax.get_ylabel() or "Y")
1340
+ if order == 1:
1341
+ # First reversed derivative: dx/dy
1342
+ if x_label:
1343
+ return f"d({x_label})/d({y_label})"
1344
+ else:
1345
+ return f"dx/d({y_label})"
1346
+ else: # order == 2
1347
+ # Second reversed derivative: d²x/dy²
1348
+ if x_label:
1349
+ return f"d²({x_label})/d({y_label})²"
1350
+ else:
1351
+ return f"d²x/d({y_label})²"
1352
+
1353
+ # Normal derivative: dy/dx or d²y/dx²
1354
+ if order == 1:
1355
+ # First derivative: dy/dx or dY/dX
1356
+ if "/" in current_label:
1357
+ # If already has derivative notation, try to increment
1358
+ if "d²" in current_label or "d2" in current_lower:
1359
+ # Change from 2nd to 1st (shouldn't normally happen, but handle it)
1360
+ new_label = current_label.replace("d²", "d").replace("d2", "d")
1361
+ return new_label
1362
+ elif "d" in current_label.lower() and "/" in current_label:
1363
+ # Already has derivative, keep as is but update order if needed
1364
+ return current_label
1365
+ # Add d/dx prefix or suffix
1366
+ if x_label:
1367
+ if any(op in current_label for op in ["/", "(", "["]):
1368
+ # Complex label, prepend d/dx
1369
+ return f"d({current_label})/d({x_label})"
1370
+ else:
1371
+ # Simple label, use d/dx notation
1372
+ return f"d({current_label})/d({x_label})"
1373
+ else:
1374
+ return f"d({current_label})/dx"
1375
+ else: # order == 2
1376
+ # Second derivative: d²y/dx² or d2Y/dX2
1377
+ if "/" in current_label:
1378
+ if "d²" in current_label or "d2" in current_lower:
1379
+ # Already 2nd derivative, keep as is
1380
+ return current_label
1381
+ elif "d" in current_label.lower() and "/" in current_label:
1382
+ # First derivative, convert to second
1383
+ new_label = current_label.replace("d(", "d²(").replace("d2(", "d²(").replace("d/", "d²/").replace("/d(", "²/d(")
1384
+ return new_label
1385
+ # Add d²/dx² prefix
1386
+ if x_label:
1387
+ if any(op in current_label for op in ["/", "(", "["]):
1388
+ return f"d²({current_label})/d({x_label})²"
1389
+ else:
1390
+ return f"d²({current_label})/d({x_label})²"
1391
+ else:
1392
+ return f"d²({current_label})/dx²"
1393
+
1394
+ return current_label
1395
+
1396
+ def _ensure_pre_derivative_data():
1397
+ """Ensure pre-derivative data is stored for reset."""
1398
+ if not hasattr(fig, '_pre_derivative_x_data_list'):
1399
+ fig._pre_derivative_x_data_list = [np.array(a, copy=True) for a in x_data_list]
1400
+ fig._pre_derivative_y_data_list = [np.array(a, copy=True) for a in y_data_list]
1401
+ fig._pre_derivative_ylabel = ax.get_ylabel() or ""
1402
+
1403
+ def _reset_from_derivative():
1404
+ """Reset all curves from derivative back to pre-derivative state."""
1405
+ if not hasattr(fig, '_pre_derivative_x_data_list'):
1406
+ return (False, 0, 0)
1407
+ reset_count = 0
1408
+ total_points = 0
1409
+ for i in range(min(len(fig._pre_derivative_x_data_list), len(ax.lines))):
1410
+ try:
1411
+ pre_x = fig._pre_derivative_x_data_list[i]
1412
+ pre_y = fig._pre_derivative_y_data_list[i]
1413
+ # Restore offsets
1414
+ if i < len(offsets_list):
1415
+ pre_y_with_offset = pre_y + offsets_list[i]
1416
+ else:
1417
+ pre_y_with_offset = pre_y.copy()
1418
+ ax.lines[i].set_data(pre_x, pre_y_with_offset)
1419
+ x_data_list[i] = pre_x.copy()
1420
+ y_data_list[i] = pre_y_with_offset.copy()
1421
+ reset_count += 1
1422
+ total_points += len(pre_x)
1423
+ except Exception:
1424
+ pass
1425
+ # Restore y-axis label
1426
+ if hasattr(fig, '_pre_derivative_ylabel'):
1427
+ ax.set_ylabel(fig._pre_derivative_ylabel)
1428
+ # Clear derivative settings
1429
+ if hasattr(fig, '_derivative_order'):
1430
+ delattr(fig, '_derivative_order')
1431
+ return (reset_count > 0, reset_count, total_points)
1432
+
1086
1433
  def push_state(note=""):
1087
1434
  """Snapshot current editable state (before a modifying action)."""
1088
1435
  try:
@@ -1161,6 +1508,26 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
1161
1508
  snap["y_data_list"] = [np.array(a, copy=True) for a in y_data_list]
1162
1509
  snap["orig_y"] = [np.array(a, copy=True) for a in orig_y]
1163
1510
  snap["offsets"] = list(offsets_list)
1511
+ # Processed data (for smooth/reduce operations)
1512
+ if hasattr(fig, '_original_x_data_list'):
1513
+ snap["original_x_data_list"] = [np.array(a, copy=True) for a in fig._original_x_data_list]
1514
+ snap["original_y_data_list"] = [np.array(a, copy=True) for a in fig._original_y_data_list]
1515
+ if hasattr(fig, '_full_processed_x_data_list'):
1516
+ snap["full_processed_x_data_list"] = [np.array(a, copy=True) for a in fig._full_processed_x_data_list]
1517
+ snap["full_processed_y_data_list"] = [np.array(a, copy=True) for a in fig._full_processed_y_data_list]
1518
+ if hasattr(fig, '_smooth_settings'):
1519
+ snap["smooth_settings"] = dict(fig._smooth_settings)
1520
+ if hasattr(fig, '_last_smooth_settings'):
1521
+ snap["last_smooth_settings"] = dict(fig._last_smooth_settings)
1522
+ # Derivative data (for derivative operations)
1523
+ if hasattr(fig, '_pre_derivative_x_data_list'):
1524
+ snap["pre_derivative_x_data_list"] = [np.array(a, copy=True) for a in fig._pre_derivative_x_data_list]
1525
+ snap["pre_derivative_y_data_list"] = [np.array(a, copy=True) for a in fig._pre_derivative_y_data_list]
1526
+ snap["pre_derivative_ylabel"] = str(getattr(fig, '_pre_derivative_ylabel', ''))
1527
+ if hasattr(fig, '_derivative_order'):
1528
+ snap["derivative_order"] = int(fig._derivative_order)
1529
+ if hasattr(fig, '_derivative_reversed'):
1530
+ snap["derivative_reversed"] = bool(fig._derivative_reversed)
1164
1531
  # Label text content
1165
1532
  snap["label_texts"] = [t.get_text() for t in label_text_objects]
1166
1533
  state_history.append(snap)
@@ -1381,12 +1748,72 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
1381
1748
  offsets_list[:] = list(snap["offsets"])
1382
1749
  delta = snap.get("delta", delta)
1383
1750
 
1751
+ # Restore processed data (for smooth/reduce operations)
1752
+ if "original_x_data_list" in snap:
1753
+ fig._original_x_data_list = [np.array(a, copy=True) for a in snap["original_x_data_list"]]
1754
+ fig._original_y_data_list = [np.array(a, copy=True) for a in snap["original_y_data_list"]]
1755
+ elif hasattr(fig, '_original_x_data_list'):
1756
+ # Clear if not in snapshot
1757
+ delattr(fig, '_original_x_data_list')
1758
+ delattr(fig, '_original_y_data_list')
1759
+ if "full_processed_x_data_list" in snap:
1760
+ fig._full_processed_x_data_list = [np.array(a, copy=True) for a in snap["full_processed_x_data_list"]]
1761
+ fig._full_processed_y_data_list = [np.array(a, copy=True) for a in snap["full_processed_y_data_list"]]
1762
+ elif hasattr(fig, '_full_processed_x_data_list'):
1763
+ # Clear if not in snapshot
1764
+ delattr(fig, '_full_processed_x_data_list')
1765
+ delattr(fig, '_full_processed_y_data_list')
1766
+ if "smooth_settings" in snap:
1767
+ fig._smooth_settings = dict(snap["smooth_settings"])
1768
+ elif hasattr(fig, '_smooth_settings'):
1769
+ delattr(fig, '_smooth_settings')
1770
+ if "last_smooth_settings" in snap:
1771
+ fig._last_smooth_settings = dict(snap["last_smooth_settings"])
1772
+ elif hasattr(fig, '_last_smooth_settings'):
1773
+ delattr(fig, '_last_smooth_settings')
1774
+ # Restore derivative data (for derivative operations)
1775
+ if "pre_derivative_x_data_list" in snap:
1776
+ fig._pre_derivative_x_data_list = [np.array(a, copy=True) for a in snap["pre_derivative_x_data_list"]]
1777
+ fig._pre_derivative_y_data_list = [np.array(a, copy=True) for a in snap["pre_derivative_y_data_list"]]
1778
+ fig._pre_derivative_ylabel = str(snap.get("pre_derivative_ylabel", ""))
1779
+ elif hasattr(fig, '_pre_derivative_x_data_list'):
1780
+ delattr(fig, '_pre_derivative_x_data_list')
1781
+ delattr(fig, '_pre_derivative_y_data_list')
1782
+ if hasattr(fig, '_pre_derivative_ylabel'):
1783
+ delattr(fig, '_pre_derivative_ylabel')
1784
+ if "derivative_order" in snap:
1785
+ fig._derivative_order = int(snap["derivative_order"])
1786
+ elif hasattr(fig, '_derivative_order'):
1787
+ delattr(fig, '_derivative_order')
1788
+ if "derivative_reversed" in snap:
1789
+ fig._derivative_reversed = bool(snap["derivative_reversed"])
1790
+ elif hasattr(fig, '_derivative_reversed'):
1791
+ delattr(fig, '_derivative_reversed')
1792
+ # Restore y-axis label if derivative was applied
1793
+ if "derivative_order" in snap:
1794
+ try:
1795
+ current_ylabel = ax.get_ylabel() or ""
1796
+ order = int(snap["derivative_order"])
1797
+ is_reversed = snap.get("derivative_reversed", False)
1798
+ new_ylabel = _update_ylabel_for_derivative(order, current_ylabel, is_reversed=is_reversed)
1799
+ ax.set_ylabel(new_ylabel)
1800
+ except Exception:
1801
+ pass
1802
+
1384
1803
  # Recalculate y_data_list from orig_y and offsets_list to ensure consistency
1804
+ # Ensure lists have the same length before assigning
1805
+ max_len = max(len(orig_y), len(y_data_list), len(offsets_list))
1806
+ while len(orig_y) < max_len:
1807
+ orig_y.append(np.array([]))
1808
+ while len(y_data_list) < max_len:
1809
+ y_data_list.append(np.array([]))
1810
+ while len(offsets_list) < max_len:
1811
+ offsets_list.append(0.0)
1385
1812
  for i in range(len(orig_y)):
1386
1813
  if i < len(offsets_list):
1387
1814
  y_data_list[i] = orig_y[i] + offsets_list[i]
1388
1815
  else:
1389
- y_data_list[i] = orig_y[i].copy()
1816
+ y_data_list[i] = orig_y[i].copy() if orig_y[i].size > 0 else np.array([])
1390
1817
 
1391
1818
  # Update line data with restored values
1392
1819
  for i in range(min(len(ax.lines), len(x_data_list), len(y_data_list))):
@@ -1423,7 +1850,15 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
1423
1850
  pass
1424
1851
  if _bp is not None and 'show_cif_hkl' in snap:
1425
1852
  try:
1426
- setattr(_bp, 'show_cif_hkl', bool(snap['show_cif_hkl']))
1853
+ new_state = bool(snap['show_cif_hkl'])
1854
+ setattr(_bp, 'show_cif_hkl', new_state)
1855
+ # Also store in __main__ module so draw function can access it
1856
+ try:
1857
+ _bp_module = sys.modules.get('__main__')
1858
+ if _bp_module is not None:
1859
+ setattr(_bp_module, 'show_cif_hkl', new_state)
1860
+ except Exception:
1861
+ pass
1427
1862
  except Exception:
1428
1863
  pass
1429
1864
  if _bp is not None and 'show_cif_titles' in snap:
@@ -1508,8 +1943,16 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
1508
1943
  try:
1509
1944
  # Flip visibility flag in batplot module
1510
1945
  cur = bool(getattr(_bp, 'show_cif_hkl', False)) if _bp is not None else False
1946
+ new_state = not cur
1511
1947
  if _bp is not None:
1512
- setattr(_bp, 'show_cif_hkl', not cur)
1948
+ setattr(_bp, 'show_cif_hkl', new_state)
1949
+ # Also store in __main__ module so draw function can access it
1950
+ try:
1951
+ _bp_module = sys.modules.get('__main__')
1952
+ if _bp_module is not None:
1953
+ setattr(_bp_module, 'show_cif_hkl', new_state)
1954
+ except Exception:
1955
+ pass
1513
1956
  # Avoid re-entrant extension while redrawing
1514
1957
  prev_ext = bool(getattr(_bp, 'cif_extend_suspended', False)) if _bp is not None else False
1515
1958
  if _bp is not None:
@@ -2433,7 +2876,49 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
2433
2876
  print("Invalid value, ignored.")
2434
2877
  continue
2435
2878
  push_state("xrange")
2436
- ax.set_xlim(current_xlim[0], new_upper)
2879
+ new_min = current_xlim[0]
2880
+ new_max = new_upper
2881
+ ax.set_xlim(new_min, new_max)
2882
+ # Re-filter data from original processed data if available
2883
+ data_is_processed = (hasattr(fig, '_original_x_data_list') or
2884
+ hasattr(fig, '_smooth_settings') or
2885
+ hasattr(fig, '_derivative_order') or
2886
+ hasattr(fig, '_pre_derivative_x_data_list'))
2887
+ if data_is_processed and hasattr(fig, '_original_x_data_list'):
2888
+ for i in range(len(labels)):
2889
+ if i < len(fig._original_x_data_list):
2890
+ x_current = fig._original_x_data_list[i]
2891
+ y_current = fig._original_y_data_list[i]
2892
+ if i < len(offsets_list):
2893
+ y_current_no_offset = y_current - offsets_list[i]
2894
+ else:
2895
+ y_current_no_offset = y_current.copy()
2896
+ mask = (x_current >= new_min) & (x_current <= new_max)
2897
+ x_sub = np.asarray(x_current[mask], dtype=float).flatten()
2898
+ y_sub = np.asarray(y_current_no_offset[mask], dtype=float).flatten()
2899
+ if x_sub.size == 0:
2900
+ ax.lines[i].set_data([], [])
2901
+ x_data_list[i] = np.array([], dtype=float)
2902
+ y_data_list[i] = np.array([], dtype=float)
2903
+ if i < len(orig_y):
2904
+ orig_y[i] = np.array([], dtype=float)
2905
+ continue
2906
+ if i < len(offsets_list):
2907
+ y_sub = y_sub + offsets_list[i]
2908
+ ax.lines[i].set_data(x_sub, y_sub)
2909
+ x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
2910
+ y_data_list[i] = np.asarray(y_sub, dtype=float).flatten()
2911
+ # Update orig_y with robust method
2912
+ while len(orig_y) <= i:
2913
+ orig_y.append(np.array([], dtype=float))
2914
+ try:
2915
+ y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
2916
+ y_no_offset_1d = np.array(y_no_offset, dtype=float).ravel()
2917
+ if i < len(orig_y):
2918
+ del orig_y[i]
2919
+ orig_y.insert(i, y_no_offset_1d)
2920
+ except Exception:
2921
+ pass
2437
2922
  ax.relim()
2438
2923
  ax.autoscale_view(scalex=False, scaley=True)
2439
2924
  update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
@@ -2464,7 +2949,49 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
2464
2949
  print("Invalid value, ignored.")
2465
2950
  continue
2466
2951
  push_state("xrange")
2467
- ax.set_xlim(new_lower, current_xlim[1])
2952
+ new_min = new_lower
2953
+ new_max = current_xlim[1]
2954
+ ax.set_xlim(new_min, new_max)
2955
+ # Re-filter data from original processed data if available
2956
+ data_is_processed = (hasattr(fig, '_original_x_data_list') or
2957
+ hasattr(fig, '_smooth_settings') or
2958
+ hasattr(fig, '_derivative_order') or
2959
+ hasattr(fig, '_pre_derivative_x_data_list'))
2960
+ if data_is_processed and hasattr(fig, '_original_x_data_list'):
2961
+ for i in range(len(labels)):
2962
+ if i < len(fig._original_x_data_list):
2963
+ x_current = fig._original_x_data_list[i]
2964
+ y_current = fig._original_y_data_list[i]
2965
+ if i < len(offsets_list):
2966
+ y_current_no_offset = y_current - offsets_list[i]
2967
+ else:
2968
+ y_current_no_offset = y_current.copy()
2969
+ mask = (x_current >= new_min) & (x_current <= new_max)
2970
+ x_sub = np.asarray(x_current[mask], dtype=float).flatten()
2971
+ y_sub = np.asarray(y_current_no_offset[mask], dtype=float).flatten()
2972
+ if x_sub.size == 0:
2973
+ ax.lines[i].set_data([], [])
2974
+ x_data_list[i] = np.array([], dtype=float)
2975
+ y_data_list[i] = np.array([], dtype=float)
2976
+ if i < len(orig_y):
2977
+ orig_y[i] = np.array([], dtype=float)
2978
+ continue
2979
+ if i < len(offsets_list):
2980
+ y_sub = y_sub + offsets_list[i]
2981
+ ax.lines[i].set_data(x_sub, y_sub)
2982
+ x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
2983
+ y_data_list[i] = np.asarray(y_sub, dtype=float).flatten()
2984
+ # Update orig_y with robust method
2985
+ while len(orig_y) <= i:
2986
+ orig_y.append(np.array([], dtype=float))
2987
+ try:
2988
+ y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
2989
+ y_no_offset_1d = np.array(y_no_offset, dtype=float).ravel()
2990
+ if i < len(orig_y):
2991
+ del orig_y[i]
2992
+ orig_y.insert(i, y_no_offset_1d)
2993
+ except Exception:
2994
+ pass
2468
2995
  ax.relim()
2469
2996
  ax.autoscale_view(scalex=False, scaley=True)
2470
2997
  update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
@@ -2482,22 +3009,266 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
2482
3009
  print(f"X range updated: {ax.get_xlim()[0]:.6g} to {ax.get_xlim()[1]:.6g}")
2483
3010
  continue
2484
3011
  if rng.lower() == 'a':
2485
- # Auto: restore original range from x_full_list
3012
+ # Auto: restore original range from CURRENT PROCESSED data (not original unprocessed)
2486
3013
  push_state("xrange-auto")
2487
- if x_full_list:
3014
+ try:
3015
+ # Check if data has been processed
3016
+ data_is_processed = (hasattr(fig, '_original_x_data_list') or
3017
+ hasattr(fig, '_smooth_settings') or
3018
+ hasattr(fig, '_derivative_order') or
3019
+ hasattr(fig, '_pre_derivative_x_data_list'))
3020
+ if data_is_processed and x_data_list and all(xd.size > 0 for xd in x_data_list):
3021
+ # Use CURRENT processed data to determine full range (preserves all processing)
3022
+ print(f"DEBUG: Using current processed data for auto restore (has {len(x_data_list)} curves)")
3023
+ new_min = min(xd.min() for xd in x_data_list if xd.size)
3024
+ new_max = max(xd.max() for xd in x_data_list if xd.size)
3025
+ print(f"DEBUG: Processed data range: {new_min:.6g} to {new_max:.6g}")
3026
+ elif x_full_list:
3027
+ print(f"DEBUG: Using original full data (no processing detected)")
3028
+ new_min = min(xf.min() for xf in x_full_list if xf.size)
3029
+ new_max = max(xf.max() for xf in x_full_list if xf.size)
3030
+ else:
3031
+ print("No original data available.")
3032
+ continue
3033
+ # Restore all data - use CURRENT PROCESSED data (preserves all processing steps)
3034
+ for i in range(len(labels)):
3035
+ if data_is_processed and hasattr(fig, '_full_processed_x_data_list') and i < len(fig._full_processed_x_data_list):
3036
+ # Use FULL processed data (preserves all processing: reduce + smooth + derivative)
3037
+ print(f"DEBUG: Auto restore curve {i+1}: Using full processed data ({len(fig._full_processed_x_data_list[i])} points)")
3038
+ xf = np.asarray(fig._full_processed_x_data_list[i], dtype=float).flatten()
3039
+ yf = np.asarray(fig._full_processed_y_data_list[i], dtype=float).flatten()
3040
+ yf_raw = yf - (offsets_list[i] if i < len(offsets_list) else 0.0)
3041
+ elif data_is_processed and i < len(x_data_list) and x_data_list[i].size > 0:
3042
+ # Fallback: use current processed data
3043
+ print(f"DEBUG: Auto restore curve {i+1}: Using current processed data ({len(x_data_list[i])} points)")
3044
+ xf = np.asarray(x_data_list[i], dtype=float).flatten()
3045
+ yf = np.asarray(y_data_list[i], dtype=float).flatten()
3046
+ yf_raw = yf - (offsets_list[i] if i < len(offsets_list) else 0.0)
3047
+ else:
3048
+ # Use full original data (no processing)
3049
+ print(f"DEBUG: Auto restore curve {i+1}: Using original full data")
3050
+ xf = x_full_list[i] if i < len(x_full_list) else x_data_list[i]
3051
+ yf_raw = raw_y_full_list[i] if i < len(raw_y_full_list) else (orig_y[i] if i < len(orig_y) else y_data_list[i])
3052
+ xf = np.asarray(xf, dtype=float).flatten()
3053
+ yf_raw = np.asarray(yf_raw, dtype=float).flatten()
3054
+ mask = (xf >= new_min) & (xf <= new_max)
3055
+ x_sub = np.asarray(xf[mask], dtype=float).flatten()
3056
+ y_sub_raw = np.asarray(yf_raw[mask], dtype=float).flatten()
3057
+ if x_sub.size == 0:
3058
+ ax.lines[i].set_data([], [])
3059
+ x_data_list[i] = np.array([], dtype=float)
3060
+ y_data_list[i] = np.array([], dtype=float)
3061
+ if i < len(orig_y):
3062
+ orig_y[i] = np.array([], dtype=float)
3063
+ continue
3064
+ should_normalize = args.stack or getattr(args, 'norm', False)
3065
+ if should_normalize:
3066
+ if y_sub_raw.size:
3067
+ y_min = float(y_sub_raw.min())
3068
+ y_max = float(y_sub_raw.max())
3069
+ span = y_max - y_min
3070
+ if span > 0:
3071
+ y_sub_norm = (y_sub_raw - y_min) / span
3072
+ else:
3073
+ y_sub_norm = np.zeros_like(y_sub_raw)
3074
+ else:
3075
+ y_sub_norm = y_sub_raw
3076
+ else:
3077
+ y_sub_norm = y_sub_raw
3078
+ offset_val = offsets_list[i] if i < len(offsets_list) else 0.0
3079
+ y_with_offset = y_sub_norm + offset_val
3080
+ ax.lines[i].set_data(x_sub, y_with_offset)
3081
+ x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
3082
+ y_data_list[i] = np.asarray(y_with_offset, dtype=float).flatten()
3083
+ # Ensure orig_y list has enough elements
3084
+ while len(orig_y) <= i:
3085
+ orig_y.append(np.array([], dtype=float))
3086
+ # Create a new 1D array - ensure it's a proper numpy array
3087
+ # Handle all edge cases: scalar, 0-d array, multi-d array
3088
+ try:
3089
+ if isinstance(y_sub_norm, np.ndarray):
3090
+ if y_sub_norm.ndim == 0:
3091
+ y_sub_norm_1d = np.array([float(y_sub_norm)], dtype=float)
3092
+ else:
3093
+ y_sub_norm_1d = np.array(y_sub_norm.flatten(), dtype=float, copy=True)
3094
+ else:
3095
+ # It's a scalar or list
3096
+ y_sub_norm_1d = np.array(y_sub_norm, dtype=float).flatten()
3097
+ # Ensure it's 1D
3098
+ if y_sub_norm_1d.ndim != 1:
3099
+ y_sub_norm_1d = y_sub_norm_1d.reshape(-1)
3100
+ # Replace list element - delete old one first if needed
3101
+ if i < len(orig_y):
3102
+ del orig_y[i]
3103
+ orig_y.insert(i, y_sub_norm_1d)
3104
+ except Exception as e:
3105
+ # Fallback: just create a simple array
3106
+ try:
3107
+ y_sub_norm_1d = np.array(y_sub_norm, dtype=float).ravel()
3108
+ if i < len(orig_y):
3109
+ orig_y[i] = y_sub_norm_1d
3110
+ else:
3111
+ orig_y.append(y_sub_norm_1d)
3112
+ except Exception:
3113
+ # Last resort: skip orig_y update
3114
+ pass
3115
+ ax.set_xlim(new_min, new_max)
3116
+ ax.relim(); ax.autoscale_view(scalex=False, scaley=True)
3117
+ update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
3118
+ try:
3119
+ if hasattr(ax, '_cif_extend_func'):
3120
+ ax._cif_extend_func(ax.get_xlim()[1])
3121
+ except Exception:
3122
+ pass
3123
+ try:
3124
+ if hasattr(ax, '_cif_draw_func'):
3125
+ ax._cif_draw_func()
3126
+ except Exception:
3127
+ pass
3128
+ fig.canvas.draw()
3129
+ print(f"X range restored to original: {ax.get_xlim()[0]:.6g} to {ax.get_xlim()[1]:.6g}")
3130
+ except Exception as e:
3131
+ print(f"Error during auto restore: {e}")
3132
+ import traceback
3133
+ traceback.print_exc()
3134
+ continue
3135
+ push_state("xrange")
3136
+ if rng.lower() == 'full':
3137
+ # Use full data if available, otherwise use current processed data
3138
+ if x_full_list and all(xf.size > 0 for xf in x_full_list):
2488
3139
  new_min = min(xf.min() for xf in x_full_list if xf.size)
2489
3140
  new_max = max(xf.max() for xf in x_full_list if xf.size)
2490
3141
  else:
2491
- print("No original data available.")
2492
- continue
2493
- # Restore all data
2494
- for i in range(len(labels)):
2495
- xf = x_full_list[i]; yf_raw = raw_y_full_list[i]
2496
- mask = (xf>=new_min) & (xf<=new_max)
2497
- x_sub = xf[mask]; y_sub_raw = yf_raw[mask]
3142
+ new_min = min(xd.min() for xd in x_data_list if xd.size)
3143
+ new_max = max(xd.max() for xd in x_data_list if xd.size)
3144
+ else:
3145
+ new_min, new_max = map(float, rng.split())
3146
+ ax.set_xlim(new_min, new_max)
3147
+ # Check if data has been processed (smooth/derivative/reduce)
3148
+ data_is_processed = (hasattr(fig, '_original_x_data_list') or
3149
+ hasattr(fig, '_smooth_settings') or
3150
+ hasattr(fig, '_derivative_order') or
3151
+ hasattr(fig, '_pre_derivative_x_data_list'))
3152
+
3153
+ for i in range(len(labels)):
3154
+ if data_is_processed and i < len(x_data_list) and x_data_list[i].size > 0:
3155
+ # Use full processed data if available (allows expansion), otherwise use current filtered data
3156
+ curr_x = np.asarray(x_data_list[i], dtype=float)
3157
+ curr_min = curr_x.min() if curr_x.size > 0 else float('inf')
3158
+ curr_max = curr_x.max() if curr_x.size > 0 else float('-inf')
3159
+
3160
+ # Check if we need full processed data (for expansion beyond current filter)
3161
+ need_full = (new_min < curr_min or new_max > curr_max)
3162
+
3163
+ if need_full and hasattr(fig, '_full_processed_x_data_list') and i < len(fig._full_processed_x_data_list):
3164
+ # Use full processed data to allow expansion
3165
+ full_x = np.asarray(fig._full_processed_x_data_list[i], dtype=float)
3166
+ if full_x.size > 0:
3167
+ full_min = full_x.min()
3168
+ full_max = full_x.max()
3169
+ print(f"DEBUG: Curve {i+1}: Expanding range ({curr_min:.6g}-{curr_max:.6g} -> {new_min:.6g}-{new_max:.6g}), using full processed data (range {full_min:.6g} to {full_max:.6g})")
3170
+ x_current = full_x
3171
+ y_current = np.asarray(fig._full_processed_y_data_list[i], dtype=float)
3172
+ else:
3173
+ print(f"DEBUG: Curve {i+1}: Full processed data empty, using current data")
3174
+ x_current = curr_x
3175
+ y_current = np.asarray(y_data_list[i], dtype=float)
3176
+ else:
3177
+ print(f"DEBUG: Curve {i+1}: Using current processed data (range {curr_min:.6g} to {curr_max:.6g}, requested {new_min:.6g} to {new_max:.6g})")
3178
+ x_current = curr_x
3179
+ y_current = np.asarray(y_data_list[i], dtype=float)
3180
+ # Remove offset for filtering
3181
+ if i < len(offsets_list):
3182
+ y_current_no_offset = y_current - offsets_list[i]
3183
+ else:
3184
+ y_current_no_offset = y_current.copy()
3185
+ mask = (x_current >= new_min) & (x_current <= new_max)
3186
+ x_sub = np.asarray(x_current[mask], dtype=float).flatten()
3187
+ y_sub = np.asarray(y_current_no_offset[mask], dtype=float).flatten()
3188
+ if x_sub.size == 0:
3189
+ ax.lines[i].set_data([], [])
3190
+ x_data_list[i] = np.array([], dtype=float)
3191
+ y_data_list[i] = np.array([], dtype=float)
3192
+ if i < len(orig_y):
3193
+ orig_y[i] = np.array([], dtype=float)
3194
+ continue
3195
+ # Restore offset
3196
+ if i < len(offsets_list):
3197
+ y_sub = y_sub + offsets_list[i]
3198
+ ax.lines[i].set_data(x_sub, y_sub)
3199
+ x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
3200
+ y_data_list[i] = np.asarray(y_sub, dtype=float).flatten()
3201
+ # Update orig_y
3202
+ # Update orig_y with robust method
3203
+ while len(orig_y) <= i:
3204
+ orig_y.append(np.array([], dtype=float))
3205
+ try:
3206
+ y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
3207
+ y_no_offset_1d = np.array(y_no_offset, dtype=float).ravel()
3208
+ if i < len(orig_y):
3209
+ del orig_y[i]
3210
+ orig_y.insert(i, y_no_offset_1d)
3211
+ except Exception:
3212
+ pass
3213
+ elif data_is_processed and i < len(x_data_list) and x_data_list[i].size > 0:
3214
+ # Fallback: use current data if _original_x_data_list not available
3215
+ x_current = np.asarray(x_data_list[i], dtype=float)
3216
+ y_current = np.asarray(y_data_list[i], dtype=float)
3217
+ mask = (x_current >= new_min) & (x_current <= new_max)
3218
+ x_sub = np.asarray(x_current[mask], dtype=float).flatten()
3219
+ y_sub = np.asarray(y_current[mask], dtype=float).flatten()
2498
3220
  if x_sub.size == 0:
2499
3221
  ax.lines[i].set_data([], [])
2500
- y_data_list[i] = np.array([]); orig_y[i] = np.array([]); continue
3222
+ x_data_list[i] = np.array([], dtype=float)
3223
+ y_data_list[i] = np.array([], dtype=float)
3224
+ if i < len(orig_y):
3225
+ orig_y[i] = np.array([], dtype=float)
3226
+ continue
3227
+ ax.lines[i].set_data(x_sub, y_sub)
3228
+ x_data_list[i] = np.asarray(x_sub, dtype=float).flatten()
3229
+ y_data_list[i] = np.asarray(y_sub, dtype=float).flatten()
3230
+ # Update orig_y - use same robust method as in 'a' branch
3231
+ while len(orig_y) <= i:
3232
+ orig_y.append(np.array([], dtype=float))
3233
+ try:
3234
+ y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
3235
+ if isinstance(y_no_offset, np.ndarray):
3236
+ if y_no_offset.ndim == 0:
3237
+ y_no_offset_1d = np.array([float(y_no_offset)], dtype=float)
3238
+ else:
3239
+ y_no_offset_1d = np.array(y_no_offset.flatten(), dtype=float, copy=True)
3240
+ else:
3241
+ y_no_offset_1d = np.array(y_no_offset, dtype=float).flatten()
3242
+ if y_no_offset_1d.ndim != 1:
3243
+ y_no_offset_1d = y_no_offset_1d.reshape(-1)
3244
+ if i < len(orig_y):
3245
+ del orig_y[i]
3246
+ orig_y.insert(i, y_no_offset_1d)
3247
+ except Exception:
3248
+ try:
3249
+ y_no_offset = y_sub - offsets_list[i] if i < len(offsets_list) else y_sub
3250
+ y_no_offset_1d = np.array(y_no_offset, dtype=float).ravel()
3251
+ if i < len(orig_y):
3252
+ orig_y[i] = y_no_offset_1d
3253
+ else:
3254
+ orig_y.append(y_no_offset_1d)
3255
+ except Exception:
3256
+ pass
3257
+ else:
3258
+ # Use original full data as source
3259
+ xf = x_full_list[i] if i < len(x_full_list) else x_data_list[i]
3260
+ yf_raw = raw_y_full_list[i] if i < len(raw_y_full_list) else (orig_y[i] if i < len(orig_y) else y_data_list[i])
3261
+ mask = (xf >= new_min) & (xf <= new_max)
3262
+ x_sub = np.array(xf[mask], copy=True)
3263
+ y_sub_raw = np.array(yf_raw[mask], copy=True)
3264
+ if x_sub.size == 0:
3265
+ ax.lines[i].set_data([], [])
3266
+ x_data_list[i] = np.array([])
3267
+ y_data_list[i] = np.array([])
3268
+ if i < len(orig_y):
3269
+ orig_y[i] = np.array([])
3270
+ continue
3271
+ # Auto-normalize for --stack mode, or explicit --norm flag
2501
3272
  should_normalize = args.stack or getattr(args, 'norm', False)
2502
3273
  if should_normalize:
2503
3274
  if y_sub_raw.size:
@@ -2512,63 +3283,13 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
2512
3283
  y_sub_norm = y_sub_raw
2513
3284
  else:
2514
3285
  y_sub_norm = y_sub_raw
2515
- offset_val = offsets_list[i]
3286
+ offset_val = offsets_list[i] if i < len(offsets_list) else 0.0
2516
3287
  y_with_offset = y_sub_norm + offset_val
2517
3288
  ax.lines[i].set_data(x_sub, y_with_offset)
2518
3289
  x_data_list[i] = x_sub
2519
3290
  y_data_list[i] = y_with_offset
2520
- orig_y[i] = y_sub_norm
2521
- ax.set_xlim(new_min, new_max)
2522
- ax.relim(); ax.autoscale_view(scalex=False, scaley=True)
2523
- update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
2524
- try:
2525
- if hasattr(ax, '_cif_extend_func'):
2526
- ax._cif_extend_func(ax.get_xlim()[1])
2527
- except Exception:
2528
- pass
2529
- try:
2530
- if hasattr(ax, '_cif_draw_func'):
2531
- ax._cif_draw_func()
2532
- except Exception:
2533
- pass
2534
- fig.canvas.draw()
2535
- print(f"X range restored to original: {ax.get_xlim()[0]:.6g} to {ax.get_xlim()[1]:.6g}")
2536
- continue
2537
- push_state("xrange")
2538
- if rng.lower() == 'full':
2539
- new_min = min(xf.min() for xf in x_full_list if xf.size)
2540
- new_max = max(xf.max() for xf in x_full_list if xf.size)
2541
- else:
2542
- new_min, new_max = map(float, rng.split())
2543
- ax.set_xlim(new_min, new_max)
2544
- for i in range(len(labels)):
2545
- xf = x_full_list[i]; yf_raw = raw_y_full_list[i]
2546
- mask = (xf>=new_min) & (xf<=new_max)
2547
- x_sub = xf[mask]; y_sub_raw = yf_raw[mask]
2548
- if x_sub.size == 0:
2549
- ax.lines[i].set_data([], [])
2550
- y_data_list[i] = np.array([]); orig_y[i] = np.array([]); continue
2551
- # Auto-normalize for --stack mode, or explicit --norm flag
2552
- should_normalize = args.stack or getattr(args, 'norm', False)
2553
- if should_normalize:
2554
- if y_sub_raw.size:
2555
- y_min = float(y_sub_raw.min())
2556
- y_max = float(y_sub_raw.max())
2557
- span = y_max - y_min
2558
- if span > 0:
2559
- y_sub_norm = (y_sub_raw - y_min) / span
2560
- else:
2561
- y_sub_norm = np.zeros_like(y_sub_raw)
2562
- else:
2563
- y_sub_norm = y_sub_raw
2564
- else:
2565
- y_sub_norm = y_sub_raw
2566
- offset_val = offsets_list[i]
2567
- y_with_offset = y_sub_norm + offset_val
2568
- ax.lines[i].set_data(x_sub, y_with_offset)
2569
- x_data_list[i] = x_sub
2570
- y_data_list[i] = y_with_offset
2571
- orig_y[i] = y_sub_norm
3291
+ if i < len(orig_y):
3292
+ orig_y[i] = y_sub_norm
2572
3293
  ax.relim(); ax.autoscale_view(scalex=False, scaley=True)
2573
3294
  update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
2574
3295
  # Extend CIF ticks after x-range change
@@ -2697,7 +3418,98 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
2697
3418
  print(f"Y range set to ({float(ymin)}, {float(ymax)})")
2698
3419
  except Exception as e:
2699
3420
  print(f"Error setting Y-axis range: {e}")
2700
- elif key == 'd': # <-- DELTA / OFFSET HANDLER (now only reachable if not args.stack)
3421
+ elif key == 'd': # <-- DERIVATIVE HANDLER
3422
+ while True:
3423
+ try:
3424
+ print("\n\033[1mDerivative Menu\033[0m")
3425
+ print("Commands:")
3426
+ print(" 1: Calculate 1st derivative (dy/dx)")
3427
+ print(" 2: Calculate 2nd derivative (d²y/dx²)")
3428
+ print(" 3: Calculate reversed 1st derivative (dx/dy)")
3429
+ print(" 4: Calculate reversed 2nd derivative (d²x/dy²)")
3430
+ print(" reset: Reset to data before derivative")
3431
+ print(" q: back to main menu")
3432
+ sub = _safe_input(colorize_prompt("d> ")).strip().lower()
3433
+ if not sub or sub == 'q':
3434
+ break
3435
+ if sub == 'reset':
3436
+ push_state("derivative-reset")
3437
+ success, reset_count, total_points = _reset_from_derivative()
3438
+ if success:
3439
+ print(f"Reset {reset_count} curve(s) from derivative to original data ({total_points} total points restored).")
3440
+ ax.relim()
3441
+ ax.autoscale_view(scalex=False, scaley=True)
3442
+ update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
3443
+ _apply_data_changes()
3444
+ else:
3445
+ print("No derivative data to reset.")
3446
+ continue
3447
+ if sub in ('1', '2', '3', '4'):
3448
+ try:
3449
+ option = int(sub)
3450
+ is_reversed = (option == 3 or option == 4)
3451
+ order = 1 if option in (1, 3) else 2
3452
+ push_state(f"derivative-{option}")
3453
+ _ensure_pre_derivative_data()
3454
+ processed = 0
3455
+ total_points = 0
3456
+ for i in range(len(x_data_list)):
3457
+ try:
3458
+ # Use current data (may already be processed)
3459
+ current_x = x_data_list[i].copy()
3460
+ current_y = y_data_list[i].copy()
3461
+ # Remove offset for processing
3462
+ if i < len(offsets_list):
3463
+ current_y_no_offset = current_y - offsets_list[i]
3464
+ else:
3465
+ current_y_no_offset = current_y.copy()
3466
+ n_points = len(current_y_no_offset)
3467
+ if n_points < 2:
3468
+ print(f"Curve {i+1} has too few points (<2) for derivative calculation.")
3469
+ continue
3470
+ # Calculate derivative
3471
+ if is_reversed:
3472
+ derivative_y = _calculate_reversed_derivative(current_x, current_y_no_offset, order)
3473
+ else:
3474
+ derivative_y = _calculate_derivative(current_x, current_y_no_offset, order)
3475
+ if len(derivative_y) > 0:
3476
+ # Restore offset
3477
+ if i < len(offsets_list):
3478
+ derivative_y = derivative_y + offsets_list[i]
3479
+ # Update data (keep same x, replace y with derivative)
3480
+ x_data_list[i] = current_x.copy()
3481
+ y_data_list[i] = derivative_y
3482
+ processed += 1
3483
+ total_points += n_points
3484
+ except Exception as e:
3485
+ print(f"Error processing curve {i+1}: {e}")
3486
+ if processed > 0:
3487
+ # Update y-axis label
3488
+ current_ylabel = ax.get_ylabel() or ""
3489
+ new_ylabel = _update_ylabel_for_derivative(order, current_ylabel, is_reversed=is_reversed)
3490
+ ax.set_ylabel(new_ylabel)
3491
+ # Store derivative order and reversed flag
3492
+ fig._derivative_order = order
3493
+ fig._derivative_reversed = is_reversed
3494
+ # Update plot
3495
+ _apply_data_changes()
3496
+ ax.relim()
3497
+ ax.autoscale_view(scalex=False, scaley=True)
3498
+ update_labels(ax, y_data_list, label_text_objects, args.stack, getattr(fig, '_stack_label_at_bottom', False))
3499
+ fig.canvas.draw_idle()
3500
+ order_name = "1st" if order == 1 else "2nd"
3501
+ direction = "reversed " if is_reversed else ""
3502
+ print(f"Applied {direction}{order_name} derivative to {processed} curve(s) with {total_points} total points.")
3503
+ print(f"Y-axis label updated to: {new_ylabel}")
3504
+ _update_full_processed_data() # Store full processed data for X-range filtering
3505
+ else:
3506
+ print("No curves were processed.")
3507
+ except ValueError:
3508
+ print("Invalid input.")
3509
+ continue
3510
+ except Exception as e:
3511
+ print(f"Error in derivative menu: {e}")
3512
+ elif key == 'o': # <-- OFFSET HANDLER (now only reachable if not args.stack)
2701
3513
  print("\n\033[1mOffset adjustment menu:\033[0m")
2702
3514
  print(f" {colorize_menu('1-{}: adjust individual curve offset'.format(len(labels)))}")
2703
3515
  print(f" {colorize_menu('a: set spacing between curves')}")
@@ -3797,6 +4609,668 @@ def interactive_menu(fig, ax, y_data_list, x_data_list, labels, orig_y,
3797
4609
  fig.canvas.draw()
3798
4610
  except Exception as e:
3799
4611
  print(f"Error saving figure: {e}")
4612
+ elif key == 'sm':
4613
+ # Smoothing and data reduction menu
4614
+ _ensure_original_data()
4615
+ while True:
4616
+ print("\n\033[1mSmoothing and Data Reduction\033[0m")
4617
+ print("Commands:")
4618
+ print(" r: reduce rows (delete/merge rows based on pattern)")
4619
+ print(" s: smooth data (various smoothing methods)")
4620
+ print(" reset: reset all curves to original data")
4621
+ print(" q: back to main menu")
4622
+ sub = _safe_input(colorize_prompt("sm> ")).strip().lower()
4623
+ if not sub:
4624
+ continue
4625
+ if sub == 'q':
4626
+ break
4627
+ if sub == 'reset':
4628
+ push_state("smooth-reset")
4629
+ success, reset_count, total_points = _reset_to_original()
4630
+ if success:
4631
+ print(f"Reset {reset_count} curve(s) to original data ({total_points} total points restored).")
4632
+ _apply_data_changes()
4633
+ else:
4634
+ print("No processed data to reset.")
4635
+ continue
4636
+ if sub == 'r':
4637
+ # Reduce rows submenu
4638
+ while True:
4639
+ print("\n\033[1mReduce Rows\033[0m")
4640
+ print("Methods:")
4641
+ print(" 1: Delete N rows, then skip M rows")
4642
+ print(" 2: Delete rows with missing values")
4643
+ print(" 3: Reduce N rows with merged values (average/sum/min/max)")
4644
+ print(" q: back to smooth menu")
4645
+ method = _safe_input(colorize_prompt("sm>r> ")).strip().lower()
4646
+ if not method or method == 'q':
4647
+ break
4648
+ if method == '1':
4649
+ # Delete N rows, then skip M rows
4650
+ try:
4651
+ # Check for last settings
4652
+ last_settings = _get_last_reduce_rows_settings('delete_skip')
4653
+ last_n = last_settings.get('n')
4654
+ last_m = last_settings.get('m')
4655
+ last_start_row = last_settings.get('start_row')
4656
+
4657
+ if last_n is not None and last_m is not None and last_start_row is not None:
4658
+ use_last = _safe_input(f"Use last settings? (N={last_n}, M={last_m}, start_row={last_start_row+1}, y/n or enter N): ").strip().lower()
4659
+ # Check if user entered a number directly (skip "use last settings")
4660
+ if use_last and use_last.replace('-', '').replace('.', '').isdigit():
4661
+ n = int(float(use_last))
4662
+ if n < 1:
4663
+ print("N must be >= 1.")
4664
+ continue
4665
+ m_in = _safe_input(f"Enter M (rows to skip, default {last_m}): ").strip()
4666
+ m = int(m_in) if m_in else last_m
4667
+ if m < 0:
4668
+ print("M must be >= 0.")
4669
+ continue
4670
+ start_in = _safe_input(f"Starting row (1-based, default {last_start_row+1}): ").strip()
4671
+ start_row = int(start_in) - 1 if start_in else last_start_row
4672
+ elif use_last != 'n':
4673
+ n = last_n
4674
+ m = last_m
4675
+ start_row = last_start_row # Already 0-based in config
4676
+ else:
4677
+ n_in = _safe_input(f"Enter N (rows to delete, default {last_n}): ").strip()
4678
+ n = int(n_in) if n_in else last_n
4679
+ if n < 1:
4680
+ print("N must be >= 1.")
4681
+ continue
4682
+ m_in = _safe_input(f"Enter M (rows to skip, default {last_m}): ").strip()
4683
+ m = int(m_in) if m_in else last_m
4684
+ if m < 0:
4685
+ print("M must be >= 0.")
4686
+ continue
4687
+ start_in = _safe_input(f"Starting row (1-based, default {last_start_row+1}): ").strip()
4688
+ start_row = int(start_in) - 1 if start_in else last_start_row
4689
+ else:
4690
+ n_in = _safe_input("Enter N (rows to delete, default 1): ").strip()
4691
+ n = int(n_in) if n_in else 1
4692
+ if n < 1:
4693
+ print("N must be >= 1.")
4694
+ continue
4695
+ m_in = _safe_input("Enter M (rows to skip, default 0): ").strip()
4696
+ m = int(m_in) if m_in else 0
4697
+ if m < 0:
4698
+ print("M must be >= 0.")
4699
+ continue
4700
+ start_in = _safe_input("Starting row (1-based, default 1): ").strip()
4701
+ start_row = int(start_in) - 1 if start_in else 0
4702
+
4703
+ if start_row < 0:
4704
+ start_row = 0
4705
+ push_state("reduce-rows-delete-skip")
4706
+ _ensure_original_data()
4707
+ processed = 0
4708
+ total_before = 0
4709
+ total_after = 0
4710
+ for i in range(len(x_data_list)):
4711
+ try:
4712
+ # Use current data (may already be processed), not original
4713
+ orig_x = x_data_list[i].copy()
4714
+ orig_y = y_data_list[i].copy()
4715
+ # Remove offset for processing
4716
+ if i < len(offsets_list):
4717
+ orig_y = orig_y - offsets_list[i]
4718
+ if start_row >= len(orig_x):
4719
+ continue
4720
+ before = len(orig_x)
4721
+ # Create mask: delete n rows, then skip m rows, repeat
4722
+ mask = np.ones(len(orig_x), dtype=bool)
4723
+ idx = start_row
4724
+ while idx < len(orig_x):
4725
+ # Delete n rows
4726
+ end_del = min(idx + n, len(orig_x))
4727
+ mask[idx:end_del] = False
4728
+ idx = end_del
4729
+ # Skip m rows
4730
+ idx = min(idx + m, len(orig_x))
4731
+ new_x = orig_x[mask]
4732
+ new_y = orig_y[mask]
4733
+ after = len(new_x)
4734
+ if len(new_x) > 0:
4735
+ # Restore offset
4736
+ if i < len(offsets_list):
4737
+ new_y = new_y + offsets_list[i]
4738
+ x_data_list[i] = new_x
4739
+ y_data_list[i] = new_y
4740
+ processed += 1
4741
+ total_before += before
4742
+ total_after += after
4743
+ except Exception as e:
4744
+ print(f"Error processing curve {i+1}: {e}")
4745
+ if processed > 0:
4746
+ removed = total_before - total_after
4747
+ pct = 100 * removed / total_before if total_before else 0
4748
+ print(f"Processed {processed} curve(s); removed {removed} of {total_before} points ({pct:.1f}%).")
4749
+ _update_full_processed_data() # Store full processed data for X-range filtering
4750
+ _apply_data_changes()
4751
+ # Save settings for next time
4752
+ _save_last_reduce_rows_settings('delete_skip', {
4753
+ 'n': n,
4754
+ 'm': m,
4755
+ 'start_row': start_row # Save as 0-based
4756
+ })
4757
+ else:
4758
+ print("No curves were processed.")
4759
+ except ValueError:
4760
+ print("Invalid number.")
4761
+ continue
4762
+ if method == '2':
4763
+ # Delete rows with missing values
4764
+ try:
4765
+ # Check for last settings
4766
+ last_settings = _get_last_reduce_rows_settings('delete_missing')
4767
+ last_delete_entire_row = last_settings.get('delete_entire_row')
4768
+
4769
+ if last_delete_entire_row is not None:
4770
+ default_str = "y" if last_delete_entire_row else "n"
4771
+ use_last = _safe_input(f"Use last settings? (delete_entire_row={'y' if last_delete_entire_row else 'n'}, y/n or enter y/n): ").strip().lower()
4772
+ # Check if user entered y/n directly (skip "use last settings")
4773
+ if use_last in ('y', 'n', 'yes', 'no'):
4774
+ delete_entire_row = use_last in ('y', 'yes')
4775
+ elif use_last != 'n':
4776
+ delete_entire_row = last_delete_entire_row
4777
+ else:
4778
+ delete_entire_row_in = _safe_input(f"Delete entire row? (y/n, default {default_str}): ").strip().lower()
4779
+ delete_entire_row = delete_entire_row_in != 'n'
4780
+ else:
4781
+ delete_entire_row_in = _safe_input("Delete entire row? (y/n, default y): ").strip().lower()
4782
+ delete_entire_row = delete_entire_row_in != 'n'
4783
+ push_state("reduce-rows-delete-missing")
4784
+ _ensure_original_data()
4785
+ processed = 0
4786
+ total_before = 0
4787
+ total_after = 0
4788
+ for i in range(len(x_data_list)):
4789
+ try:
4790
+ # Use current data (may already be processed), not original
4791
+ orig_x = x_data_list[i].copy()
4792
+ orig_y = y_data_list[i].copy()
4793
+ # Remove offset for processing
4794
+ if i < len(offsets_list):
4795
+ orig_y = orig_y - offsets_list[i]
4796
+ before = len(orig_x)
4797
+ # Check for missing values (NaN or inf)
4798
+ if delete_entire_row:
4799
+ mask = np.isfinite(orig_x) & np.isfinite(orig_y)
4800
+ else:
4801
+ # Only delete missing in current column
4802
+ mask = np.isfinite(orig_y)
4803
+ new_x = orig_x[mask]
4804
+ new_y = orig_y[mask]
4805
+ after = len(new_x)
4806
+ if len(new_x) > 0:
4807
+ # Restore offset
4808
+ if i < len(offsets_list):
4809
+ new_y = new_y + offsets_list[i]
4810
+ x_data_list[i] = new_x
4811
+ y_data_list[i] = new_y
4812
+ processed += 1
4813
+ total_before += before
4814
+ total_after += after
4815
+ except Exception as e:
4816
+ print(f"Error processing curve {i+1}: {e}")
4817
+ if processed > 0:
4818
+ removed = total_before - total_after
4819
+ pct = 100 * removed / total_before if total_before else 0
4820
+ print(f"Processed {processed} curve(s); removed {removed} of {total_before} points ({pct:.1f}%).")
4821
+ _update_full_processed_data() # Store full processed data for X-range filtering
4822
+ _apply_data_changes()
4823
+ # Save settings for next time
4824
+ _save_last_reduce_rows_settings('delete_missing', {
4825
+ 'delete_entire_row': delete_entire_row
4826
+ })
4827
+ else:
4828
+ print("No curves were processed.")
4829
+ except Exception:
4830
+ print("Error processing data.")
4831
+ continue
4832
+ if method == '3':
4833
+ # Reduce N rows with merged values
4834
+ try:
4835
+ # Check for last settings
4836
+ last_settings = _get_last_reduce_rows_settings('merge')
4837
+ last_n = last_settings.get('n')
4838
+ last_merge_by = last_settings.get('merge_by')
4839
+ last_start_row = last_settings.get('start_row')
4840
+
4841
+ if last_n is not None and last_merge_by is not None and last_start_row is not None:
4842
+ merge_names = {
4843
+ '1': 'First point',
4844
+ '2': 'Last point',
4845
+ '3': 'Average',
4846
+ '4': 'Min',
4847
+ '5': 'Max',
4848
+ '6': 'Sum'
4849
+ }
4850
+ merge_name = merge_names.get(last_merge_by, 'Average')
4851
+ use_last = _safe_input(f"Use last settings? (N={last_n}, merge_by={merge_name}, start_row={last_start_row+1}, y/n or enter N): ").strip().lower()
4852
+ # Check if user entered a number directly (skip "use last settings")
4853
+ if use_last and use_last.replace('-', '').replace('.', '').isdigit():
4854
+ n = int(float(use_last))
4855
+ if n < 2:
4856
+ print("N must be >= 2.")
4857
+ continue
4858
+ print("Merge by:")
4859
+ print(" 1: First point")
4860
+ print(" 2: Last point")
4861
+ print(" 3: Average")
4862
+ print(" 4: Min")
4863
+ print(" 5: Max")
4864
+ print(" 6: Sum")
4865
+ merge_by_in = _safe_input(f"Choose (1-6, default {last_merge_by}): ").strip()
4866
+ merge_by = merge_by_in if merge_by_in else last_merge_by
4867
+ start_in = _safe_input(f"Starting row (1-based, default {last_start_row+1}): ").strip()
4868
+ start_row = int(start_in) - 1 if start_in else last_start_row
4869
+ elif use_last != 'n':
4870
+ n = last_n
4871
+ merge_by = last_merge_by
4872
+ start_row = last_start_row # Already 0-based in config
4873
+ else:
4874
+ n_in = _safe_input(f"Enter N (rows to merge, default {last_n}): ").strip()
4875
+ n = int(n_in) if n_in else last_n
4876
+ if n < 2:
4877
+ print("N must be >= 2.")
4878
+ continue
4879
+ print("Merge by:")
4880
+ print(" 1: First point")
4881
+ print(" 2: Last point")
4882
+ print(" 3: Average")
4883
+ print(" 4: Min")
4884
+ print(" 5: Max")
4885
+ print(" 6: Sum")
4886
+ merge_by_in = _safe_input(f"Choose (1-6, default {last_merge_by}): ").strip()
4887
+ merge_by = merge_by_in if merge_by_in else last_merge_by
4888
+ start_in = _safe_input(f"Starting row (1-based, default {last_start_row+1}): ").strip()
4889
+ start_row = int(start_in) - 1 if start_in else last_start_row
4890
+ else:
4891
+ n_in = _safe_input("Enter N (rows to merge, default 2): ").strip()
4892
+ n = int(n_in) if n_in else 2
4893
+ if n < 2:
4894
+ print("N must be >= 2.")
4895
+ continue
4896
+ print("Merge by:")
4897
+ print(" 1: First point")
4898
+ print(" 2: Last point")
4899
+ print(" 3: Average")
4900
+ print(" 4: Min")
4901
+ print(" 5: Max")
4902
+ print(" 6: Sum")
4903
+ merge_by_in = _safe_input("Choose (1-6, default 3): ").strip()
4904
+ merge_by = merge_by_in if merge_by_in else '3'
4905
+ start_in = _safe_input("Starting row (1-based, default 1): ").strip()
4906
+ start_row = int(start_in) - 1 if start_in else 0
4907
+
4908
+ if start_row < 0:
4909
+ start_row = 0
4910
+
4911
+ merge_funcs = {
4912
+ '1': lambda arr: arr[0] if len(arr) > 0 else np.nan,
4913
+ '2': lambda arr: arr[-1] if len(arr) > 0 else np.nan,
4914
+ '3': np.nanmean,
4915
+ '4': np.nanmin,
4916
+ '5': np.nanmax,
4917
+ '6': np.nansum,
4918
+ }
4919
+ merge_func = merge_funcs.get(merge_by, np.nanmean)
4920
+ push_state("reduce-rows-merge")
4921
+ _ensure_original_data()
4922
+ processed = 0
4923
+ total_before = 0
4924
+ total_after = 0
4925
+ for i in range(len(x_data_list)):
4926
+ try:
4927
+ # Use current data (may already be processed), not original
4928
+ orig_x = x_data_list[i].copy()
4929
+ orig_y = y_data_list[i].copy()
4930
+ # Remove offset for processing
4931
+ if i < len(offsets_list):
4932
+ orig_y = orig_y - offsets_list[i]
4933
+ if start_row >= len(orig_x):
4934
+ continue
4935
+ before = len(orig_x)
4936
+ # Group into chunks of N
4937
+ new_x_list = []
4938
+ new_y_list = []
4939
+ idx = 0
4940
+ while idx < start_row:
4941
+ new_x_list.append(orig_x[idx])
4942
+ new_y_list.append(orig_y[idx])
4943
+ idx += 1
4944
+ while idx < len(orig_x):
4945
+ end_idx = min(idx + n, len(orig_x))
4946
+ chunk_x = orig_x[idx:end_idx]
4947
+ chunk_y = orig_y[idx:end_idx]
4948
+ # Merge: use first x, merge y based on method
4949
+ new_x = chunk_x[0] if len(chunk_x) > 0 else np.nan
4950
+ new_y = merge_func(chunk_y) if len(chunk_y) > 0 else np.nan
4951
+ if np.isfinite(new_x) and np.isfinite(new_y):
4952
+ new_x_list.append(new_x)
4953
+ new_y_list.append(new_y)
4954
+ idx = end_idx
4955
+ if len(new_x_list) > 0:
4956
+ new_x = np.array(new_x_list)
4957
+ new_y = np.array(new_y_list)
4958
+ after = len(new_x)
4959
+ # Restore offset
4960
+ if i < len(offsets_list):
4961
+ new_y = new_y + offsets_list[i]
4962
+ x_data_list[i] = new_x
4963
+ y_data_list[i] = new_y
4964
+ processed += 1
4965
+ total_before += before
4966
+ total_after += after
4967
+ except Exception as e:
4968
+ print(f"Error processing curve {i+1}: {e}")
4969
+ if processed > 0:
4970
+ removed = total_before - total_after
4971
+ pct = 100 * removed / total_before if total_before else 0
4972
+ print(f"Processed {processed} curve(s); reduced {total_before} to {total_after} points (removed {removed}, {pct:.1f}%).")
4973
+ _update_full_processed_data() # Store full processed data for X-range filtering
4974
+ _apply_data_changes()
4975
+ # Save settings for next time
4976
+ _save_last_reduce_rows_settings('merge', {
4977
+ 'n': n,
4978
+ 'merge_by': merge_by,
4979
+ 'start_row': start_row # Save as 0-based
4980
+ })
4981
+ else:
4982
+ print("No curves were processed.")
4983
+ except (ValueError, KeyError):
4984
+ print("Invalid input.")
4985
+ continue
4986
+ if sub == 's':
4987
+ # Smooth submenu
4988
+ while True:
4989
+ print("\n\033[1mSmooth Data\033[0m")
4990
+ print("Methods:")
4991
+ print(" 1: Adjacent-Averaging (moving average)")
4992
+ print(" 2: Savitzky-Golay (polynomial smoothing)")
4993
+ print(" 3: FFT Filter (low-pass frequency filter)")
4994
+ print(" q: back to smooth menu")
4995
+ method = _safe_input(colorize_prompt("sm>s> ")).strip().lower()
4996
+ if not method or method == 'q':
4997
+ break
4998
+ if method == '1':
4999
+ # Adjacent-Averaging
5000
+ try:
5001
+ # Check for last settings (from config file for persistence)
5002
+ config_settings = _get_last_smooth_settings_from_config()
5003
+ session_settings = getattr(fig, '_last_smooth_settings', {})
5004
+ # Prefer config settings (persistent) over session settings
5005
+ last_settings = config_settings if config_settings.get('method') == 'adjacent_average' else session_settings
5006
+ last_method = last_settings.get('method')
5007
+ last_points = last_settings.get('points')
5008
+
5009
+ if last_method == 'adjacent_average' and last_points is not None:
5010
+ use_last = _safe_input(f"Use last settings? (points={last_points}, y/n or enter points): ").strip().lower()
5011
+ # Check if user entered a number directly (skip "use last settings")
5012
+ if use_last and use_last.replace('-', '').replace('.', '').isdigit():
5013
+ points = int(float(use_last))
5014
+ elif use_last != 'n':
5015
+ points = last_points
5016
+ else:
5017
+ points_in = _safe_input(f"Number of points (default {last_points}): ").strip()
5018
+ points = int(points_in) if points_in else last_points
5019
+ else:
5020
+ points_in = _safe_input("Number of points (default 5): ").strip()
5021
+ points = int(points_in) if points_in else 5
5022
+
5023
+ if points < 2:
5024
+ print("Points must be >= 2.")
5025
+ continue
5026
+ push_state("smooth-adjacent-average")
5027
+ _ensure_original_data()
5028
+ processed = 0
5029
+ total_points = 0
5030
+ for i in range(len(x_data_list)):
5031
+ try:
5032
+ # Use current data (may already be processed), not original
5033
+ orig_x = x_data_list[i].copy()
5034
+ orig_y = y_data_list[i].copy()
5035
+ # Remove offset for processing
5036
+ if i < len(offsets_list):
5037
+ orig_y = orig_y - offsets_list[i]
5038
+ n_points = len(orig_y)
5039
+ # Apply smoothing
5040
+ smoothed_y = _adjacent_average_smooth(orig_y, points)
5041
+ if len(smoothed_y) > 0:
5042
+ # Restore offset
5043
+ if i < len(offsets_list):
5044
+ smoothed_y = smoothed_y + offsets_list[i]
5045
+ # Keep original x, update y
5046
+ x_data_list[i] = orig_x.copy()
5047
+ y_data_list[i] = smoothed_y
5048
+ processed += 1
5049
+ total_points += n_points
5050
+ except Exception as e:
5051
+ print(f"Error processing curve {i+1}: {e}")
5052
+ if processed > 0:
5053
+ print(f"Smoothed {processed} curve(s) with {total_points} total points using Adjacent-Averaging (window={points}).")
5054
+ _update_full_processed_data() # Store full processed data for X-range filtering
5055
+ _apply_data_changes()
5056
+ # Store settings (both current and last)
5057
+ if not hasattr(fig, '_smooth_settings'):
5058
+ fig._smooth_settings = {}
5059
+ fig._smooth_settings['method'] = 'adjacent_average'
5060
+ fig._smooth_settings['points'] = points
5061
+ # Store as last settings for next time (both in-memory and config file)
5062
+ if not hasattr(fig, '_last_smooth_settings'):
5063
+ fig._last_smooth_settings = {}
5064
+ fig._last_smooth_settings['method'] = 'adjacent_average'
5065
+ fig._last_smooth_settings['points'] = points
5066
+ # Save to config file for persistence across sessions
5067
+ _save_last_smooth_settings_to_config({
5068
+ 'method': 'adjacent_average',
5069
+ 'points': points
5070
+ })
5071
+ else:
5072
+ print("No curves were smoothed.")
5073
+ except ValueError:
5074
+ print("Invalid number.")
5075
+ continue
5076
+ if method == '2':
5077
+ # Savitzky-Golay
5078
+ try:
5079
+ # Check for last settings (from config file for persistence)
5080
+ config_settings = _get_last_smooth_settings_from_config()
5081
+ session_settings = getattr(fig, '_last_smooth_settings', {})
5082
+ # Prefer config settings (persistent) over session settings
5083
+ last_settings = config_settings if config_settings.get('method') == 'savgol' else session_settings
5084
+ last_method = last_settings.get('method')
5085
+ last_window = last_settings.get('window')
5086
+ last_poly = last_settings.get('poly')
5087
+
5088
+ if last_method == 'savgol' and last_window is not None and last_poly is not None:
5089
+ use_last = _safe_input(f"Use last settings? (window={last_window}, poly={last_poly}, y/n or enter window): ").strip().lower()
5090
+ # Check if user entered a number directly (skip "use last settings")
5091
+ if use_last and use_last.replace('-', '').replace('.', '').isdigit():
5092
+ window = int(float(use_last))
5093
+ if window < 3:
5094
+ window = 3
5095
+ if window % 2 == 0:
5096
+ window += 1
5097
+ poly_in = _safe_input(f"Polynomial order (default {last_poly}): ").strip()
5098
+ poly = int(poly_in) if poly_in else last_poly
5099
+ elif use_last != 'n':
5100
+ window = last_window
5101
+ poly = last_poly
5102
+ else:
5103
+ window_in = _safe_input(f"Window size (odd >= 3, default {last_window}): ").strip()
5104
+ window = int(window_in) if window_in else last_window
5105
+ if window < 3:
5106
+ window = 3
5107
+ if window % 2 == 0:
5108
+ window += 1
5109
+ poly_in = _safe_input(f"Polynomial order (default {last_poly}): ").strip()
5110
+ poly = int(poly_in) if poly_in else last_poly
5111
+ else:
5112
+ window_in = _safe_input("Window size (odd >= 3, default 9): ").strip()
5113
+ window = int(window_in) if window_in else 9
5114
+ if window < 3:
5115
+ window = 3
5116
+ if window % 2 == 0:
5117
+ window += 1
5118
+ poly_in = _safe_input("Polynomial order (default 3): ").strip()
5119
+ poly = int(poly_in) if poly_in else 3
5120
+
5121
+ if poly < 1:
5122
+ poly = 1
5123
+ if poly >= window:
5124
+ poly = window - 1
5125
+ push_state("smooth-savgol")
5126
+ _ensure_original_data()
5127
+ processed = 0
5128
+ total_points = 0
5129
+ for i in range(len(x_data_list)):
5130
+ try:
5131
+ # Use current data (may already be processed), not original
5132
+ orig_x = x_data_list[i].copy()
5133
+ orig_y = y_data_list[i].copy()
5134
+ # Remove offset for processing
5135
+ if i < len(offsets_list):
5136
+ orig_y = orig_y - offsets_list[i]
5137
+ n_points = len(orig_y)
5138
+ # Apply smoothing
5139
+ smoothed_y = _savgol_smooth(orig_y, window, poly)
5140
+ if len(smoothed_y) > 0:
5141
+ # Restore offset
5142
+ if i < len(offsets_list):
5143
+ smoothed_y = smoothed_y + offsets_list[i]
5144
+ # Keep original x, update y
5145
+ x_data_list[i] = orig_x.copy()
5146
+ y_data_list[i] = smoothed_y
5147
+ processed += 1
5148
+ total_points += n_points
5149
+ except Exception as e:
5150
+ print(f"Error processing curve {i+1}: {e}")
5151
+ if processed > 0:
5152
+ print(f"Smoothed {processed} curve(s) with {total_points} total points using Savitzky-Golay (window={window}, poly={poly}).")
5153
+ _update_full_processed_data() # Store full processed data for X-range filtering
5154
+ _apply_data_changes()
5155
+ # Store settings (both current and last)
5156
+ if not hasattr(fig, '_smooth_settings'):
5157
+ fig._smooth_settings = {}
5158
+ fig._smooth_settings['method'] = 'savgol'
5159
+ fig._smooth_settings['window'] = window
5160
+ fig._smooth_settings['poly'] = poly
5161
+ # Store as last settings for next time (both in-memory and config file)
5162
+ if not hasattr(fig, '_last_smooth_settings'):
5163
+ fig._last_smooth_settings = {}
5164
+ fig._last_smooth_settings['method'] = 'savgol'
5165
+ fig._last_smooth_settings['window'] = window
5166
+ fig._last_smooth_settings['poly'] = poly
5167
+ # Save to config file for persistence across sessions
5168
+ _save_last_smooth_settings_to_config({
5169
+ 'method': 'savgol',
5170
+ 'window': window,
5171
+ 'poly': poly
5172
+ })
5173
+ else:
5174
+ print("No curves were smoothed.")
5175
+ except ValueError:
5176
+ print("Invalid number.")
5177
+ continue
5178
+ if method == '3':
5179
+ # FFT Filter
5180
+ try:
5181
+ # Check for last settings (from config file for persistence)
5182
+ config_settings = _get_last_smooth_settings_from_config()
5183
+ session_settings = getattr(fig, '_last_smooth_settings', {})
5184
+ # Prefer config settings (persistent) over session settings
5185
+ last_settings = config_settings if config_settings.get('method') == 'fft' else session_settings
5186
+ last_method = last_settings.get('method')
5187
+ last_points = last_settings.get('points')
5188
+ last_cutoff = last_settings.get('cutoff')
5189
+
5190
+ if last_method == 'fft' and last_points is not None and last_cutoff is not None:
5191
+ use_last = _safe_input(f"Use last settings? (points={last_points}, cutoff={last_cutoff:.3f}, y/n or enter points): ").strip().lower()
5192
+ # Check if user entered a number directly (skip "use last settings")
5193
+ if use_last and use_last.replace('-', '').replace('.', '').isdigit():
5194
+ points = int(float(use_last))
5195
+ if points < 2:
5196
+ points = 2
5197
+ cutoff_in = _safe_input(f"Cutoff frequency (0-1, default {last_cutoff:.3f}): ").strip()
5198
+ cutoff = float(cutoff_in) if cutoff_in else last_cutoff
5199
+ elif use_last != 'n':
5200
+ points = last_points
5201
+ cutoff = last_cutoff
5202
+ else:
5203
+ points_in = _safe_input(f"Points for FFT (default {last_points}): ").strip()
5204
+ points = int(points_in) if points_in else last_points
5205
+ if points < 2:
5206
+ points = 2
5207
+ cutoff_in = _safe_input(f"Cutoff frequency (0-1, default {last_cutoff:.3f}): ").strip()
5208
+ cutoff = float(cutoff_in) if cutoff_in else last_cutoff
5209
+ else:
5210
+ points_in = _safe_input("Points for FFT (default 5): ").strip()
5211
+ points = int(points_in) if points_in else 5
5212
+ if points < 2:
5213
+ points = 2
5214
+ cutoff_in = _safe_input("Cutoff frequency (0-1, default 0.1): ").strip()
5215
+ cutoff = float(cutoff_in) if cutoff_in else 0.1
5216
+
5217
+ if cutoff < 0:
5218
+ cutoff = 0
5219
+ if cutoff > 1:
5220
+ cutoff = 1
5221
+ push_state("smooth-fft")
5222
+ _ensure_original_data()
5223
+ processed = 0
5224
+ total_points = 0
5225
+ for i in range(len(x_data_list)):
5226
+ try:
5227
+ # Use current data (may already be processed), not original
5228
+ orig_x = x_data_list[i].copy()
5229
+ orig_y = y_data_list[i].copy()
5230
+ # Remove offset for processing
5231
+ if i < len(offsets_list):
5232
+ orig_y = orig_y - offsets_list[i]
5233
+ n_points = len(orig_y)
5234
+ # Apply smoothing
5235
+ smoothed_y = _fft_smooth(orig_y, points, cutoff)
5236
+ if len(smoothed_y) > 0:
5237
+ # Restore offset
5238
+ if i < len(offsets_list):
5239
+ smoothed_y = smoothed_y + offsets_list[i]
5240
+ # Keep original x, update y
5241
+ x_data_list[i] = orig_x.copy()
5242
+ y_data_list[i] = smoothed_y
5243
+ processed += 1
5244
+ total_points += n_points
5245
+ except Exception as e:
5246
+ print(f"Error processing curve {i+1}: {e}")
5247
+ if processed > 0:
5248
+ print(f"Smoothed {processed} curve(s) with {total_points} total points using FFT Filter (cutoff={cutoff:.3f}).")
5249
+ _update_full_processed_data() # Store full processed data for X-range filtering
5250
+ _apply_data_changes()
5251
+ # Store settings (both current and last)
5252
+ if not hasattr(fig, '_smooth_settings'):
5253
+ fig._smooth_settings = {}
5254
+ fig._smooth_settings['method'] = 'fft'
5255
+ fig._smooth_settings['points'] = points
5256
+ fig._smooth_settings['cutoff'] = cutoff
5257
+ # Store as last settings for next time (both in-memory and config file)
5258
+ if not hasattr(fig, '_last_smooth_settings'):
5259
+ fig._last_smooth_settings = {}
5260
+ fig._last_smooth_settings['method'] = 'fft'
5261
+ fig._last_smooth_settings['points'] = points
5262
+ fig._last_smooth_settings['cutoff'] = cutoff
5263
+ # Save to config file for persistence across sessions
5264
+ _save_last_smooth_settings_to_config({
5265
+ 'method': 'fft',
5266
+ 'points': points,
5267
+ 'cutoff': cutoff
5268
+ })
5269
+ else:
5270
+ print("No curves were smoothed.")
5271
+ except ValueError:
5272
+ print("Invalid number.")
5273
+ continue
3800
5274
  elif key == 'v':
3801
5275
  while True:
3802
5276
  try: