setiastrosuitepro 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of setiastrosuitepro might be problematic. Click here for more details.
- setiastro/__init__.py +2 -0
- setiastro/saspro/__init__.py +20 -0
- setiastro/saspro/__main__.py +784 -0
- setiastro/saspro/_generated/__init__.py +7 -0
- setiastro/saspro/_generated/build_info.py +2 -0
- setiastro/saspro/abe.py +1295 -0
- setiastro/saspro/abe_preset.py +196 -0
- setiastro/saspro/aberration_ai.py +694 -0
- setiastro/saspro/aberration_ai_preset.py +224 -0
- setiastro/saspro/accel_installer.py +218 -0
- setiastro/saspro/accel_workers.py +30 -0
- setiastro/saspro/add_stars.py +621 -0
- setiastro/saspro/astrobin_exporter.py +1007 -0
- setiastro/saspro/astrospike.py +153 -0
- setiastro/saspro/astrospike_python.py +1839 -0
- setiastro/saspro/autostretch.py +196 -0
- setiastro/saspro/backgroundneutral.py +560 -0
- setiastro/saspro/batch_convert.py +325 -0
- setiastro/saspro/batch_renamer.py +519 -0
- setiastro/saspro/blemish_blaster.py +488 -0
- setiastro/saspro/blink_comparator_pro.py +2923 -0
- setiastro/saspro/bundles.py +61 -0
- setiastro/saspro/bundles_dock.py +114 -0
- setiastro/saspro/cheat_sheet.py +168 -0
- setiastro/saspro/clahe.py +342 -0
- setiastro/saspro/comet_stacking.py +1377 -0
- setiastro/saspro/config.py +38 -0
- setiastro/saspro/config_bootstrap.py +40 -0
- setiastro/saspro/config_manager.py +316 -0
- setiastro/saspro/continuum_subtract.py +1617 -0
- setiastro/saspro/convo.py +1397 -0
- setiastro/saspro/convo_preset.py +414 -0
- setiastro/saspro/copyastro.py +187 -0
- setiastro/saspro/cosmicclarity.py +1564 -0
- setiastro/saspro/cosmicclarity_preset.py +407 -0
- setiastro/saspro/crop_dialog_pro.py +948 -0
- setiastro/saspro/crop_preset.py +189 -0
- setiastro/saspro/curve_editor_pro.py +2544 -0
- setiastro/saspro/curves_preset.py +375 -0
- setiastro/saspro/debayer.py +670 -0
- setiastro/saspro/debug_utils.py +29 -0
- setiastro/saspro/dnd_mime.py +35 -0
- setiastro/saspro/doc_manager.py +2634 -0
- setiastro/saspro/exoplanet_detector.py +2166 -0
- setiastro/saspro/file_utils.py +284 -0
- setiastro/saspro/fitsmodifier.py +744 -0
- setiastro/saspro/free_torch_memory.py +48 -0
- setiastro/saspro/frequency_separation.py +1343 -0
- setiastro/saspro/function_bundle.py +1594 -0
- setiastro/saspro/ghs_dialog_pro.py +660 -0
- setiastro/saspro/ghs_preset.py +284 -0
- setiastro/saspro/graxpert.py +634 -0
- setiastro/saspro/graxpert_preset.py +287 -0
- setiastro/saspro/gui/__init__.py +0 -0
- setiastro/saspro/gui/main_window.py +8494 -0
- setiastro/saspro/gui/mixins/__init__.py +33 -0
- setiastro/saspro/gui/mixins/dock_mixin.py +263 -0
- setiastro/saspro/gui/mixins/file_mixin.py +445 -0
- setiastro/saspro/gui/mixins/geometry_mixin.py +403 -0
- setiastro/saspro/gui/mixins/header_mixin.py +441 -0
- setiastro/saspro/gui/mixins/mask_mixin.py +421 -0
- setiastro/saspro/gui/mixins/menu_mixin.py +361 -0
- setiastro/saspro/gui/mixins/theme_mixin.py +367 -0
- setiastro/saspro/gui/mixins/toolbar_mixin.py +1324 -0
- setiastro/saspro/gui/mixins/update_mixin.py +309 -0
- setiastro/saspro/gui/mixins/view_mixin.py +435 -0
- setiastro/saspro/halobgon.py +462 -0
- setiastro/saspro/header_viewer.py +445 -0
- setiastro/saspro/headless_utils.py +88 -0
- setiastro/saspro/histogram.py +753 -0
- setiastro/saspro/history_explorer.py +939 -0
- setiastro/saspro/image_combine.py +414 -0
- setiastro/saspro/image_peeker_pro.py +1596 -0
- setiastro/saspro/imageops/__init__.py +37 -0
- setiastro/saspro/imageops/mdi_snap.py +292 -0
- setiastro/saspro/imageops/scnr.py +36 -0
- setiastro/saspro/imageops/starbasedwhitebalance.py +210 -0
- setiastro/saspro/imageops/stretch.py +244 -0
- setiastro/saspro/isophote.py +1179 -0
- setiastro/saspro/layers.py +208 -0
- setiastro/saspro/layers_dock.py +714 -0
- setiastro/saspro/lazy_imports.py +193 -0
- setiastro/saspro/legacy/__init__.py +2 -0
- setiastro/saspro/legacy/image_manager.py +2226 -0
- setiastro/saspro/legacy/numba_utils.py +3659 -0
- setiastro/saspro/legacy/xisf.py +1071 -0
- setiastro/saspro/linear_fit.py +534 -0
- setiastro/saspro/live_stacking.py +1830 -0
- setiastro/saspro/log_bus.py +5 -0
- setiastro/saspro/logging_config.py +460 -0
- setiastro/saspro/luminancerecombine.py +309 -0
- setiastro/saspro/main_helpers.py +201 -0
- setiastro/saspro/mask_creation.py +928 -0
- setiastro/saspro/masks_core.py +56 -0
- setiastro/saspro/mdi_widgets.py +353 -0
- setiastro/saspro/memory_utils.py +666 -0
- setiastro/saspro/metadata_patcher.py +75 -0
- setiastro/saspro/mfdeconv.py +3826 -0
- setiastro/saspro/mfdeconv_earlystop.py +71 -0
- setiastro/saspro/mfdeconvcudnn.py +3263 -0
- setiastro/saspro/mfdeconvsport.py +2382 -0
- setiastro/saspro/minorbodycatalog.py +567 -0
- setiastro/saspro/morphology.py +382 -0
- setiastro/saspro/multiscale_decomp.py +1290 -0
- setiastro/saspro/nbtorgb_stars.py +531 -0
- setiastro/saspro/numba_utils.py +3044 -0
- setiastro/saspro/numba_warmup.py +141 -0
- setiastro/saspro/ops/__init__.py +9 -0
- setiastro/saspro/ops/command_help_dialog.py +623 -0
- setiastro/saspro/ops/command_runner.py +217 -0
- setiastro/saspro/ops/commands.py +1594 -0
- setiastro/saspro/ops/script_editor.py +1102 -0
- setiastro/saspro/ops/scripts.py +1413 -0
- setiastro/saspro/ops/settings.py +560 -0
- setiastro/saspro/parallel_utils.py +554 -0
- setiastro/saspro/pedestal.py +121 -0
- setiastro/saspro/perfect_palette_picker.py +1053 -0
- setiastro/saspro/pipeline.py +110 -0
- setiastro/saspro/pixelmath.py +1600 -0
- setiastro/saspro/plate_solver.py +2435 -0
- setiastro/saspro/project_io.py +797 -0
- setiastro/saspro/psf_utils.py +136 -0
- setiastro/saspro/psf_viewer.py +549 -0
- setiastro/saspro/pyi_rthook_astroquery.py +95 -0
- setiastro/saspro/remove_green.py +314 -0
- setiastro/saspro/remove_stars.py +1625 -0
- setiastro/saspro/remove_stars_preset.py +404 -0
- setiastro/saspro/resources.py +472 -0
- setiastro/saspro/rgb_combination.py +207 -0
- setiastro/saspro/rgb_extract.py +19 -0
- setiastro/saspro/rgbalign.py +723 -0
- setiastro/saspro/runtime_imports.py +7 -0
- setiastro/saspro/runtime_torch.py +754 -0
- setiastro/saspro/save_options.py +72 -0
- setiastro/saspro/selective_color.py +1552 -0
- setiastro/saspro/sfcc.py +1425 -0
- setiastro/saspro/shortcuts.py +2807 -0
- setiastro/saspro/signature_insert.py +1099 -0
- setiastro/saspro/stacking_suite.py +17712 -0
- setiastro/saspro/star_alignment.py +7420 -0
- setiastro/saspro/star_alignment_preset.py +329 -0
- setiastro/saspro/star_metrics.py +49 -0
- setiastro/saspro/star_spikes.py +681 -0
- setiastro/saspro/star_stretch.py +470 -0
- setiastro/saspro/stat_stretch.py +502 -0
- setiastro/saspro/status_log_dock.py +78 -0
- setiastro/saspro/subwindow.py +3267 -0
- setiastro/saspro/supernovaasteroidhunter.py +1712 -0
- setiastro/saspro/swap_manager.py +99 -0
- setiastro/saspro/torch_backend.py +89 -0
- setiastro/saspro/torch_rejection.py +434 -0
- setiastro/saspro/view_bundle.py +1555 -0
- setiastro/saspro/wavescale_hdr.py +624 -0
- setiastro/saspro/wavescale_hdr_preset.py +100 -0
- setiastro/saspro/wavescalede.py +657 -0
- setiastro/saspro/wavescalede_preset.py +228 -0
- setiastro/saspro/wcs_update.py +374 -0
- setiastro/saspro/whitebalance.py +456 -0
- setiastro/saspro/widgets/__init__.py +48 -0
- setiastro/saspro/widgets/common_utilities.py +305 -0
- setiastro/saspro/widgets/graphics_views.py +122 -0
- setiastro/saspro/widgets/image_utils.py +518 -0
- setiastro/saspro/widgets/preview_dialogs.py +280 -0
- setiastro/saspro/widgets/spinboxes.py +275 -0
- setiastro/saspro/widgets/themed_buttons.py +13 -0
- setiastro/saspro/widgets/wavelet_utils.py +299 -0
- setiastro/saspro/window_shelf.py +185 -0
- setiastro/saspro/xisf.py +1123 -0
- setiastrosuitepro-1.6.0.dist-info/METADATA +266 -0
- setiastrosuitepro-1.6.0.dist-info/RECORD +174 -0
- setiastrosuitepro-1.6.0.dist-info/WHEEL +4 -0
- setiastrosuitepro-1.6.0.dist-info/entry_points.txt +6 -0
- setiastrosuitepro-1.6.0.dist-info/licenses/LICENSE +674 -0
- setiastrosuitepro-1.6.0.dist-info/licenses/license.txt +2580 -0
|
@@ -0,0 +1,694 @@
|
|
|
1
|
+
# pro/aberration_ai.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import os
|
|
4
|
+
import webbrowser
|
|
5
|
+
import requests
|
|
6
|
+
import numpy as np
|
|
7
|
+
import sys
|
|
8
|
+
import platform # add
|
|
9
|
+
import time
|
|
10
|
+
|
|
11
|
+
IS_APPLE_ARM = (sys.platform == "darwin" and platform.machine() == "arm64")
|
|
12
|
+
|
|
13
|
+
from PyQt6.QtCore import Qt, QThread, pyqtSignal, QStandardPaths, QSettings
|
|
14
|
+
from PyQt6.QtWidgets import (
|
|
15
|
+
QDialog, QVBoxLayout, QHBoxLayout, QLabel, QPushButton, QFileDialog,
|
|
16
|
+
QComboBox, QSpinBox, QProgressBar, QMessageBox, QCheckBox
|
|
17
|
+
)
|
|
18
|
+
from PyQt6.QtGui import QIcon
|
|
19
|
+
from setiastro.saspro.config import Config
|
|
20
|
+
|
|
21
|
+
# Optional import (soft dep)
|
|
22
|
+
try:
|
|
23
|
+
import onnxruntime as ort
|
|
24
|
+
except Exception:
|
|
25
|
+
ort = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
# ---------- GitHub model fetching ----------
|
|
29
|
+
GITHUB_REPO = Config.GITHUB_ABERRATION_REPO
|
|
30
|
+
LATEST_API = f"https://api.github.com/repos/{GITHUB_REPO}/releases/latest"
|
|
31
|
+
|
|
32
|
+
def _model_required_patch(model_path: str) -> int | None:
|
|
33
|
+
"""
|
|
34
|
+
Returns the fixed spatial size the model expects (e.g. 512), or None if dynamic.
|
|
35
|
+
"""
|
|
36
|
+
if ort is None or not os.path.isfile(model_path):
|
|
37
|
+
return None
|
|
38
|
+
try:
|
|
39
|
+
sess = ort.InferenceSession(model_path, providers=["CPUExecutionProvider"])
|
|
40
|
+
shp = sess.get_inputs()[0].shape # e.g. [1, 1, 512, 512] or ['N','C',512,512]
|
|
41
|
+
h = shp[-2]; w = shp[-1]
|
|
42
|
+
if isinstance(h, int) and isinstance(w, int) and h == w:
|
|
43
|
+
return int(h)
|
|
44
|
+
except Exception:
|
|
45
|
+
pass
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _app_model_dir() -> str:
|
|
50
|
+
d = Config.get_aberration_models_dir()
|
|
51
|
+
os.makedirs(d, exist_ok=True)
|
|
52
|
+
return d
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class _DownloadWorker(QThread):
|
|
56
|
+
progressed = pyqtSignal(int) # 0..100 (downloaded)
|
|
57
|
+
failed = pyqtSignal(str)
|
|
58
|
+
finished_ok= pyqtSignal(str) # path
|
|
59
|
+
|
|
60
|
+
def __init__(self, dst_dir: str):
|
|
61
|
+
super().__init__()
|
|
62
|
+
self.dst_dir = dst_dir
|
|
63
|
+
|
|
64
|
+
def run(self):
|
|
65
|
+
try:
|
|
66
|
+
r = requests.get(LATEST_API, timeout=10)
|
|
67
|
+
if r.status_code != 200:
|
|
68
|
+
raise RuntimeError(f"GitHub API error: {r.status_code}")
|
|
69
|
+
js = r.json()
|
|
70
|
+
assets = js.get("assets", [])
|
|
71
|
+
onnx_assets = [a for a in assets if a.get("name","").lower().endswith(".onnx")]
|
|
72
|
+
if not onnx_assets:
|
|
73
|
+
raise RuntimeError("No .onnx asset found in latest release.")
|
|
74
|
+
asset = onnx_assets[0]
|
|
75
|
+
url = asset["browser_download_url"]
|
|
76
|
+
name = asset["name"]
|
|
77
|
+
out_path = os.path.join(self.dst_dir, name)
|
|
78
|
+
|
|
79
|
+
with requests.get(url, stream=True, timeout=60) as rr:
|
|
80
|
+
rr.raise_for_status()
|
|
81
|
+
total = int(rr.headers.get("Content-Length", "0") or 0)
|
|
82
|
+
got = 0
|
|
83
|
+
chunk = 1 << 20
|
|
84
|
+
with open(out_path, "wb") as f:
|
|
85
|
+
for blk in rr.iter_content(chunk):
|
|
86
|
+
if blk:
|
|
87
|
+
f.write(blk)
|
|
88
|
+
got += len(blk)
|
|
89
|
+
if total > 0:
|
|
90
|
+
self.progressed.emit(int(got * 100 / total))
|
|
91
|
+
self.finished_ok.emit(out_path)
|
|
92
|
+
except Exception as e:
|
|
93
|
+
self.failed.emit(str(e))
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
# ---------- core: tiling + hann blend ----------
|
|
97
|
+
def _hann2d(n: int) -> np.ndarray:
|
|
98
|
+
w = np.hanning(n).astype(np.float32)
|
|
99
|
+
return (w[:, None] * w[None, :])
|
|
100
|
+
|
|
101
|
+
def _tile_indices(n: int, patch: int, overlap: int) -> list[int]:
|
|
102
|
+
stride = patch - overlap
|
|
103
|
+
if patch >= n:
|
|
104
|
+
return [0]
|
|
105
|
+
idx, pos = [], 0
|
|
106
|
+
while True:
|
|
107
|
+
if pos + patch >= n:
|
|
108
|
+
idx.append(n - patch)
|
|
109
|
+
break
|
|
110
|
+
idx.append(pos); pos += stride
|
|
111
|
+
return sorted(set(idx))
|
|
112
|
+
|
|
113
|
+
def _pad_C_HW(arr: np.ndarray, patch: int) -> tuple[np.ndarray, int, int]:
|
|
114
|
+
C, H, W = arr.shape
|
|
115
|
+
pad_h = max(0, patch - H)
|
|
116
|
+
pad_w = max(0, patch - W)
|
|
117
|
+
if pad_h or pad_w:
|
|
118
|
+
arr = np.pad(arr, ((0,0),(0,pad_h),(0,pad_w)), mode="edge")
|
|
119
|
+
return arr, H, W
|
|
120
|
+
|
|
121
|
+
def _prepare_input(img: np.ndarray) -> tuple[np.ndarray, bool, bool]:
|
|
122
|
+
"""
|
|
123
|
+
Returns (C,H,W) float32 in [0..1]; also returns (channels_last, was_uint16)
|
|
124
|
+
"""
|
|
125
|
+
channels_last = (img.ndim == 3)
|
|
126
|
+
if channels_last:
|
|
127
|
+
arr = img.transpose(2,0,1) # (C,H,W)
|
|
128
|
+
else:
|
|
129
|
+
arr = img[np.newaxis, ...] # (1,H,W)
|
|
130
|
+
was_uint16 = (arr.dtype == np.uint16)
|
|
131
|
+
if was_uint16:
|
|
132
|
+
arr = arr.astype(np.float32) / 65535.0
|
|
133
|
+
else:
|
|
134
|
+
arr = arr.astype(np.float32)
|
|
135
|
+
return arr, channels_last, was_uint16
|
|
136
|
+
|
|
137
|
+
def _restore_output(arr: np.ndarray, channels_last: bool, was_uint16: bool, H: int, W: int) -> np.ndarray:
|
|
138
|
+
arr = arr[:, :H, :W]
|
|
139
|
+
arr = np.clip(np.nan_to_num(arr), 0.0, 1.0)
|
|
140
|
+
if was_uint16:
|
|
141
|
+
arr = (arr * 65535.0).astype(np.uint16)
|
|
142
|
+
if channels_last:
|
|
143
|
+
arr = arr.transpose(1,2,0) # (H,W,C)
|
|
144
|
+
else:
|
|
145
|
+
arr = arr[0] # (H,W)
|
|
146
|
+
return arr
|
|
147
|
+
|
|
148
|
+
def run_onnx_tiled(session, img: np.ndarray, patch_size=512, overlap=64, progress_cb=None) -> np.ndarray:
|
|
149
|
+
"""
|
|
150
|
+
session: onnxruntime.InferenceSession
|
|
151
|
+
img: mono (H,W) or RGB (H,W,3) numpy array
|
|
152
|
+
"""
|
|
153
|
+
arr, channels_last, was_uint16 = _prepare_input(img) # (C,H,W)
|
|
154
|
+
arr, H0, W0 = _pad_C_HW(arr, patch_size)
|
|
155
|
+
C, H, W = arr.shape
|
|
156
|
+
|
|
157
|
+
win = _hann2d(patch_size)
|
|
158
|
+
out = np.zeros_like(arr, dtype=np.float32)
|
|
159
|
+
wgt = np.zeros_like(arr, dtype=np.float32)
|
|
160
|
+
|
|
161
|
+
hs = _tile_indices(H, patch_size, overlap)
|
|
162
|
+
ws = _tile_indices(W, patch_size, overlap)
|
|
163
|
+
|
|
164
|
+
inp_name = session.get_inputs()[0].name
|
|
165
|
+
total = len(hs) * len(ws) * C
|
|
166
|
+
done = 0
|
|
167
|
+
|
|
168
|
+
for c in range(C):
|
|
169
|
+
for i in hs:
|
|
170
|
+
for j in ws:
|
|
171
|
+
patch = arr[c:c+1, i:i+patch_size, j:j+patch_size] # (1, P, P)
|
|
172
|
+
inp = np.ascontiguousarray(patch[np.newaxis, ...], dtype=np.float32) # (1,1,P,P)
|
|
173
|
+
|
|
174
|
+
out_patch = session.run(None, {inp_name: inp})[0] # (1,1,P,P)
|
|
175
|
+
out_patch = np.squeeze(out_patch, axis=0) # (1,P,P)
|
|
176
|
+
out[c:c+1, i:i+patch_size, j:j+patch_size] += out_patch * win
|
|
177
|
+
wgt[c:c+1, i:i+patch_size, j:j+patch_size] += win
|
|
178
|
+
|
|
179
|
+
done += 1
|
|
180
|
+
if progress_cb:
|
|
181
|
+
progress_cb(done / max(1, total))
|
|
182
|
+
|
|
183
|
+
wgt[wgt == 0] = 1.0
|
|
184
|
+
arr = out / wgt
|
|
185
|
+
return _restore_output(arr, channels_last, was_uint16, H0, W0)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
# ---------- providers ----------
|
|
189
|
+
def pick_providers(auto_gpu=True) -> list[str]:
|
|
190
|
+
"""
|
|
191
|
+
Windows: DirectML → CUDA → CPU
|
|
192
|
+
mac(Intel): CPU → CoreML (optional)
|
|
193
|
+
mac(Apple Silicon): **CPU only** (avoid CoreML artifact path)
|
|
194
|
+
"""
|
|
195
|
+
if ort is None:
|
|
196
|
+
return []
|
|
197
|
+
|
|
198
|
+
avail = set(ort.get_available_providers())
|
|
199
|
+
|
|
200
|
+
# Apple Silicon: always CPU ( CoreML has 16,384-dim constraint and can artifact )
|
|
201
|
+
if IS_APPLE_ARM:
|
|
202
|
+
return ["CPUExecutionProvider"] if "CPUExecutionProvider" in avail else []
|
|
203
|
+
|
|
204
|
+
# Non-Apple ARM
|
|
205
|
+
if not auto_gpu:
|
|
206
|
+
return ["CPUExecutionProvider"] if "CPUExecutionProvider" in avail else []
|
|
207
|
+
|
|
208
|
+
order = []
|
|
209
|
+
if "DmlExecutionProvider" in avail:
|
|
210
|
+
order.append("DmlExecutionProvider")
|
|
211
|
+
if "CUDAExecutionProvider" in avail:
|
|
212
|
+
order.append("CUDAExecutionProvider")
|
|
213
|
+
|
|
214
|
+
# mac(Intel) can still use CoreML if someone insists, but we won't put it first.
|
|
215
|
+
if "CPUExecutionProvider" in avail:
|
|
216
|
+
order.append("CPUExecutionProvider")
|
|
217
|
+
if "CoreMLExecutionProvider" in avail:
|
|
218
|
+
order.append("CoreMLExecutionProvider")
|
|
219
|
+
|
|
220
|
+
return order
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def _preserve_border(dst: np.ndarray, src: np.ndarray, px: int = 10) -> np.ndarray:
|
|
224
|
+
"""
|
|
225
|
+
Copy a px-wide ring from src → dst, in-place. Handles mono/RGB.
|
|
226
|
+
Expects same shape for src and dst. Clamps px to image size.
|
|
227
|
+
"""
|
|
228
|
+
if px <= 0 or dst is None or src is None:
|
|
229
|
+
return dst
|
|
230
|
+
if dst.shape != src.shape:
|
|
231
|
+
return dst # shapes differ; skip quietly
|
|
232
|
+
|
|
233
|
+
h, w = dst.shape[:2]
|
|
234
|
+
px = int(max(0, min(px, h // 2, w // 2)))
|
|
235
|
+
if px == 0:
|
|
236
|
+
return dst
|
|
237
|
+
|
|
238
|
+
s = src.astype(dst.dtype, copy=False)
|
|
239
|
+
|
|
240
|
+
# top & bottom
|
|
241
|
+
dst[:px, ...] = s[:px, ...]
|
|
242
|
+
dst[-px:, ...] = s[-px:, ...]
|
|
243
|
+
# left & right
|
|
244
|
+
dst[:, :px, ...] = s[:, :px, ...]
|
|
245
|
+
dst[:, -px:, ...] = s[:, -px:, ...]
|
|
246
|
+
|
|
247
|
+
return dst
|
|
248
|
+
|
|
249
|
+
# ---------- worker ----------
|
|
250
|
+
class _ONNXWorker(QThread):
|
|
251
|
+
progressed = pyqtSignal(int) # 0..100
|
|
252
|
+
failed = pyqtSignal(str)
|
|
253
|
+
finished_ok= pyqtSignal(np.ndarray)
|
|
254
|
+
|
|
255
|
+
def __init__(self, model_path: str, image: np.ndarray, patch: int, overlap: int, providers: list[str]):
|
|
256
|
+
super().__init__()
|
|
257
|
+
self.model_path = model_path
|
|
258
|
+
self.image = image
|
|
259
|
+
self.patch = patch
|
|
260
|
+
self.overlap = overlap
|
|
261
|
+
self.providers = providers
|
|
262
|
+
self.used_provider = None
|
|
263
|
+
|
|
264
|
+
def run(self):
|
|
265
|
+
if ort is None:
|
|
266
|
+
self.failed.emit("onnxruntime is not installed.")
|
|
267
|
+
return
|
|
268
|
+
try:
|
|
269
|
+
sess = ort.InferenceSession(self.model_path, providers=self.providers)
|
|
270
|
+
self.used_provider = (sess.get_providers()[0] if sess.get_providers() else None)
|
|
271
|
+
except Exception:
|
|
272
|
+
# fallback CPU if GPU fails
|
|
273
|
+
try:
|
|
274
|
+
sess = ort.InferenceSession(self.model_path, providers=["CPUExecutionProvider"])
|
|
275
|
+
self.used_provider = "CPUExecutionProvider" # NEW
|
|
276
|
+
except Exception as e2:
|
|
277
|
+
self.failed.emit(f"Failed to init ONNX session:\n{e2}")
|
|
278
|
+
return
|
|
279
|
+
|
|
280
|
+
def cb(frac):
|
|
281
|
+
self.progressed.emit(int(frac * 100))
|
|
282
|
+
|
|
283
|
+
try:
|
|
284
|
+
out = run_onnx_tiled(sess, self.image, self.patch, self.overlap, cb)
|
|
285
|
+
except Exception as e:
|
|
286
|
+
self.failed.emit(str(e)); return
|
|
287
|
+
|
|
288
|
+
self.finished_ok.emit(out)
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
# ---------- dialog ----------
|
|
292
|
+
class AberrationAIDialog(QDialog):
|
|
293
|
+
def __init__(self, parent, docman, get_active_doc_callable, icon: QIcon | None = None):
|
|
294
|
+
super().__init__(parent)
|
|
295
|
+
self.setWindowTitle("R.A.'s Aberration Correction (AI)")
|
|
296
|
+
if icon is not None:
|
|
297
|
+
self.setWindowIcon(icon)
|
|
298
|
+
|
|
299
|
+
# Normalize window behavior across platforms
|
|
300
|
+
self.setWindowFlag(Qt.WindowType.Window, True)
|
|
301
|
+
# This is a “big operation” tool; app-modal is usually fine here
|
|
302
|
+
self.setWindowModality(Qt.WindowModality.ApplicationModal)
|
|
303
|
+
self.setModal(False)
|
|
304
|
+
#self.setAttribute(Qt.WidgetAttribute.WA_DeleteOnClose, True)
|
|
305
|
+
|
|
306
|
+
self.docman = docman
|
|
307
|
+
self.get_active_doc = get_active_doc_callable
|
|
308
|
+
self._t_start = None
|
|
309
|
+
self._last_used_provider = None
|
|
310
|
+
|
|
311
|
+
v = QVBoxLayout(self)
|
|
312
|
+
|
|
313
|
+
# Model row
|
|
314
|
+
row = QHBoxLayout()
|
|
315
|
+
row.addWidget(QLabel("Model:"))
|
|
316
|
+
self.model_label = QLabel("—")
|
|
317
|
+
self.model_label.setToolTip("")
|
|
318
|
+
btn_browse = QPushButton("Browse…"); btn_browse.clicked.connect(self._browse_model)
|
|
319
|
+
row.addWidget(self.model_label, 1)
|
|
320
|
+
row.addWidget(btn_browse)
|
|
321
|
+
v.addLayout(row)
|
|
322
|
+
|
|
323
|
+
# Providers row
|
|
324
|
+
row2 = QHBoxLayout()
|
|
325
|
+
self.chk_auto = QCheckBox("Auto GPU (if available)")
|
|
326
|
+
self.chk_auto.setChecked(True)
|
|
327
|
+
row2.addWidget(self.chk_auto)
|
|
328
|
+
self.cmb_provider = QComboBox()
|
|
329
|
+
row2.addWidget(QLabel("Provider:"))
|
|
330
|
+
row2.addWidget(self.cmb_provider, 1)
|
|
331
|
+
v.addLayout(row2)
|
|
332
|
+
|
|
333
|
+
# Params row
|
|
334
|
+
row3 = QHBoxLayout()
|
|
335
|
+
row3.addWidget(QLabel("Patch"))
|
|
336
|
+
self.spin_patch = QSpinBox(minimum=128, maximum=2048); self.spin_patch.setValue(512)
|
|
337
|
+
row3.addWidget(self.spin_patch)
|
|
338
|
+
row3.addWidget(QLabel("Overlap"))
|
|
339
|
+
self.spin_overlap = QSpinBox(minimum=16, maximum=512); self.spin_overlap.setValue(64)
|
|
340
|
+
row3.addWidget(self.spin_overlap)
|
|
341
|
+
v.addLayout(row3)
|
|
342
|
+
|
|
343
|
+
# Download / Open folder
|
|
344
|
+
row4 = QHBoxLayout()
|
|
345
|
+
btn_latest = QPushButton("Download latest model…")
|
|
346
|
+
btn_latest.clicked.connect(self._download_latest_model)
|
|
347
|
+
row4.addWidget(btn_latest)
|
|
348
|
+
btn_openfolder = QPushButton("Open model folder")
|
|
349
|
+
btn_openfolder.clicked.connect(self._open_model_folder)
|
|
350
|
+
row4.addWidget(btn_openfolder)
|
|
351
|
+
row4.addStretch(1)
|
|
352
|
+
v.addLayout(row4)
|
|
353
|
+
|
|
354
|
+
# Progress + actions
|
|
355
|
+
self.progress = QProgressBar(); self.progress.setRange(0, 100); v.addWidget(self.progress)
|
|
356
|
+
row5 = QHBoxLayout()
|
|
357
|
+
self.btn_run = QPushButton("Run"); self.btn_run.clicked.connect(self._run)
|
|
358
|
+
btn_close = QPushButton("Close"); btn_close.clicked.connect(self.reject)
|
|
359
|
+
row5.addStretch(1); row5.addWidget(self.btn_run); row5.addWidget(btn_close)
|
|
360
|
+
v.addLayout(row5)
|
|
361
|
+
|
|
362
|
+
info = QLabel(
|
|
363
|
+
"Model and weights © Riccardo Alberghi — "
|
|
364
|
+
"<a href='https://github.com/riccardoalberghi'>more information</a>."
|
|
365
|
+
)
|
|
366
|
+
info.setTextFormat(Qt.TextFormat.RichText)
|
|
367
|
+
info.setTextInteractionFlags(Qt.TextInteractionFlag.TextBrowserInteraction)
|
|
368
|
+
info.setOpenExternalLinks(True)
|
|
369
|
+
info.setWordWrap(True)
|
|
370
|
+
info.setStyleSheet("color:#888; font-size:11px; margin-top:4px;")
|
|
371
|
+
v.addWidget(info)
|
|
372
|
+
|
|
373
|
+
self._model_path = None
|
|
374
|
+
self._refresh_providers()
|
|
375
|
+
self._load_last_model_from_settings()
|
|
376
|
+
|
|
377
|
+
if IS_APPLE_ARM:
|
|
378
|
+
self.chk_auto.setChecked(False)
|
|
379
|
+
self.chk_auto.setEnabled(False)
|
|
380
|
+
|
|
381
|
+
# ----- model helpers -----
|
|
382
|
+
def _set_model_path(self, p: str | None):
|
|
383
|
+
self._model_path = p
|
|
384
|
+
if p:
|
|
385
|
+
self.model_label.setText(os.path.basename(p))
|
|
386
|
+
self.model_label.setToolTip(p)
|
|
387
|
+
QSettings().setValue("AberrationAI/model_path", p)
|
|
388
|
+
else:
|
|
389
|
+
self.model_label.setText("—")
|
|
390
|
+
self.model_label.setToolTip("")
|
|
391
|
+
QSettings().remove("AberrationAI/model_path")
|
|
392
|
+
|
|
393
|
+
def _load_last_model_from_settings(self):
|
|
394
|
+
p = QSettings().value("AberrationAI/model_path", type=str)
|
|
395
|
+
if p and os.path.isfile(p):
|
|
396
|
+
self._set_model_path(p)
|
|
397
|
+
|
|
398
|
+
def _browse_model(self):
|
|
399
|
+
start_dir = _app_model_dir()
|
|
400
|
+
p, _ = QFileDialog.getOpenFileName(self, "Select ONNX model", start_dir, "ONNX (*.onnx)")
|
|
401
|
+
if p:
|
|
402
|
+
self._set_model_path(p)
|
|
403
|
+
|
|
404
|
+
def _open_model_folder(self):
|
|
405
|
+
d = _app_model_dir()
|
|
406
|
+
try:
|
|
407
|
+
if os.name == "nt":
|
|
408
|
+
os.startfile(d) # type: ignore
|
|
409
|
+
elif sys.platform == "darwin":
|
|
410
|
+
import subprocess; subprocess.Popen(["open", d])
|
|
411
|
+
else:
|
|
412
|
+
import subprocess; subprocess.Popen(["xdg-open", d])
|
|
413
|
+
except Exception:
|
|
414
|
+
webbrowser.open(f"file://{d}")
|
|
415
|
+
|
|
416
|
+
# ----- provider UI -----
|
|
417
|
+
def _log(self, msg: str): # NEW
|
|
418
|
+
mw = self.parent()
|
|
419
|
+
try:
|
|
420
|
+
if hasattr(mw, "_log"):
|
|
421
|
+
mw._log(msg)
|
|
422
|
+
elif hasattr(mw, "update_status"):
|
|
423
|
+
mw.update_status(msg)
|
|
424
|
+
except Exception:
|
|
425
|
+
pass
|
|
426
|
+
|
|
427
|
+
def _refresh_providers(self):
|
|
428
|
+
if ort is None:
|
|
429
|
+
self.cmb_provider.clear()
|
|
430
|
+
self.cmb_provider.addItem("onnxruntime not installed")
|
|
431
|
+
self.cmb_provider.setEnabled(False)
|
|
432
|
+
return
|
|
433
|
+
|
|
434
|
+
avail = ort.get_available_providers()
|
|
435
|
+
self.cmb_provider.clear()
|
|
436
|
+
|
|
437
|
+
if IS_APPLE_ARM:
|
|
438
|
+
# Hard lock to CPU on M-series
|
|
439
|
+
self.cmb_provider.addItem("CPUExecutionProvider")
|
|
440
|
+
self.cmb_provider.setCurrentText("CPUExecutionProvider")
|
|
441
|
+
self.cmb_provider.setEnabled(False)
|
|
442
|
+
# also turn off Auto GPU and disable that checkbox
|
|
443
|
+
self.chk_auto.setChecked(False)
|
|
444
|
+
self.chk_auto.setEnabled(False)
|
|
445
|
+
return
|
|
446
|
+
|
|
447
|
+
# Other platforms: show all, sane default
|
|
448
|
+
for name in avail:
|
|
449
|
+
self.cmb_provider.addItem(name)
|
|
450
|
+
|
|
451
|
+
if "DmlExecutionProvider" in avail:
|
|
452
|
+
self.cmb_provider.setCurrentText("DmlExecutionProvider")
|
|
453
|
+
elif "CUDAExecutionProvider" in avail:
|
|
454
|
+
self.cmb_provider.setCurrentText("CUDAExecutionProvider")
|
|
455
|
+
elif "CPUExecutionProvider" in avail:
|
|
456
|
+
self.cmb_provider.setCurrentText("CPUExecutionProvider")
|
|
457
|
+
elif "CoreMLExecutionProvider" in avail:
|
|
458
|
+
self.cmb_provider.setCurrentText("CoreMLExecutionProvider")
|
|
459
|
+
|
|
460
|
+
# ----- download -----
|
|
461
|
+
def _download_latest_model(self):
|
|
462
|
+
if requests is None:
|
|
463
|
+
QMessageBox.warning(self, "Network", "The 'requests' package is required."); return
|
|
464
|
+
dst = _app_model_dir()
|
|
465
|
+
self.progress.setRange(0, 0) # busy
|
|
466
|
+
self.btn_run.setEnabled(False)
|
|
467
|
+
self._dl = _DownloadWorker(dst)
|
|
468
|
+
self._dl.progressed.connect(self.progress.setValue)
|
|
469
|
+
self._dl.failed.connect(self._on_download_failed)
|
|
470
|
+
self._dl.finished_ok.connect(self._on_download_ok)
|
|
471
|
+
self._dl.finished.connect(lambda: (self.progress.setRange(0, 100), self.btn_run.setEnabled(True)))
|
|
472
|
+
self._dl.start()
|
|
473
|
+
|
|
474
|
+
def _on_download_failed(self, msg: str):
|
|
475
|
+
QMessageBox.critical(self, "Download", msg)
|
|
476
|
+
|
|
477
|
+
def _on_download_ok(self, path: str):
|
|
478
|
+
self.progress.setValue(100)
|
|
479
|
+
self._set_model_path(path)
|
|
480
|
+
QMessageBox.information(self, "Model", f"Downloaded: {os.path.basename(path)}")
|
|
481
|
+
|
|
482
|
+
# ----- run -----
|
|
483
|
+
def _run(self):
|
|
484
|
+
if ort is None:
|
|
485
|
+
QMessageBox.critical(
|
|
486
|
+
self,
|
|
487
|
+
"Unsupported ONNX Runtime",
|
|
488
|
+
"The currently installed onnxruntime is not supported on this machine.\n"
|
|
489
|
+
"Please try installing an earlier version (for example 1.19.x) and try again."
|
|
490
|
+
)
|
|
491
|
+
return
|
|
492
|
+
if not self._model_path or not os.path.isfile(self._model_path):
|
|
493
|
+
QMessageBox.warning(self, "Model", "Please select or download a valid .onnx model first.")
|
|
494
|
+
return
|
|
495
|
+
|
|
496
|
+
doc = self.get_active_doc()
|
|
497
|
+
if doc is None or getattr(doc, "image", None) is None:
|
|
498
|
+
QMessageBox.warning(self, "Image", "No active image.")
|
|
499
|
+
return
|
|
500
|
+
|
|
501
|
+
img = np.asarray(doc.image)
|
|
502
|
+
self._orig_for_border = img.copy()
|
|
503
|
+
|
|
504
|
+
patch = int(self.spin_patch.value())
|
|
505
|
+
overlap = int(self.spin_overlap.value())
|
|
506
|
+
|
|
507
|
+
# -------- providers (always choose, then always run) --------
|
|
508
|
+
if IS_APPLE_ARM:
|
|
509
|
+
providers = ["CPUExecutionProvider"]
|
|
510
|
+
self.chk_auto.setChecked(False)
|
|
511
|
+
else:
|
|
512
|
+
if self.chk_auto.isChecked():
|
|
513
|
+
providers = pick_providers(auto_gpu=True)
|
|
514
|
+
else:
|
|
515
|
+
sel = self.cmb_provider.currentText()
|
|
516
|
+
providers = [sel] if sel else ["CPUExecutionProvider"]
|
|
517
|
+
|
|
518
|
+
# --- make patch match the model's requirement (if fixed) ---
|
|
519
|
+
req = _model_required_patch(self._model_path)
|
|
520
|
+
if req and req > 0:
|
|
521
|
+
patch = req
|
|
522
|
+
try:
|
|
523
|
+
self.spin_patch.blockSignals(True)
|
|
524
|
+
self.spin_patch.setValue(req)
|
|
525
|
+
finally:
|
|
526
|
+
self.spin_patch.blockSignals(False)
|
|
527
|
+
|
|
528
|
+
# --- CoreML guard on Intel: if model needs >128, run on CPU instead ---
|
|
529
|
+
if ("CoreMLExecutionProvider" in providers) and (req and req > 128):
|
|
530
|
+
self._log(f"CoreML limited to small tiles; model requires {req}px → using CPU.")
|
|
531
|
+
providers = ["CPUExecutionProvider"]
|
|
532
|
+
try:
|
|
533
|
+
self.cmb_provider.setCurrentText("CPUExecutionProvider")
|
|
534
|
+
self.chk_auto.setChecked(False)
|
|
535
|
+
except Exception:
|
|
536
|
+
pass
|
|
537
|
+
|
|
538
|
+
self._t_start = time.perf_counter()
|
|
539
|
+
prov_txt = ("auto" if self.chk_auto.isChecked() else self.cmb_provider.currentText() or "CPU")
|
|
540
|
+
self._log(f"🚀 Aberration AI: model={os.path.basename(self._model_path)}, "
|
|
541
|
+
f"provider={prov_txt}, patch={patch}, overlap={overlap}")
|
|
542
|
+
|
|
543
|
+
# -------- run worker --------
|
|
544
|
+
self.progress.setValue(0)
|
|
545
|
+
self.btn_run.setEnabled(False)
|
|
546
|
+
|
|
547
|
+
self._worker = _ONNXWorker(self._model_path, img, patch, overlap, providers)
|
|
548
|
+
self._worker.progressed.connect(self.progress.setValue)
|
|
549
|
+
self._worker.failed.connect(self._on_failed)
|
|
550
|
+
self._worker.finished_ok.connect(self._on_ok)
|
|
551
|
+
self._worker.finished.connect(self._on_worker_finished)
|
|
552
|
+
self._worker.start()
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
def _on_failed(self, msg: str):
|
|
556
|
+
self._log(f"❌ Aberration AI failed: {msg}") # NEW
|
|
557
|
+
QMessageBox.critical(self, "ONNX Error", msg)
|
|
558
|
+
|
|
559
|
+
def _on_ok(self, out: np.ndarray):
|
|
560
|
+
doc = self.get_active_doc()
|
|
561
|
+
if doc is None or getattr(doc, "image", None) is None:
|
|
562
|
+
QMessageBox.warning(self, "Image", "No active image.")
|
|
563
|
+
return
|
|
564
|
+
|
|
565
|
+
# 1) Preserve a thin border from the original image (prevents “eaten” edges)
|
|
566
|
+
BORDER_PX = 10
|
|
567
|
+
src = getattr(self, "_orig_for_border", None)
|
|
568
|
+
if src is None or src.shape != out.shape:
|
|
569
|
+
try:
|
|
570
|
+
src = np.asarray(doc.image)
|
|
571
|
+
except Exception:
|
|
572
|
+
src = None
|
|
573
|
+
out = _preserve_border(out, src, BORDER_PX)
|
|
574
|
+
|
|
575
|
+
# 2) Metadata for this step (stored on the document)
|
|
576
|
+
meta = {
|
|
577
|
+
"is_mono": (out.ndim == 2),
|
|
578
|
+
"processing_parameters": {
|
|
579
|
+
**(getattr(doc, "metadata", {}) or {}).get("processing_parameters", {}),
|
|
580
|
+
"AberrationAI": {
|
|
581
|
+
"model_path": self._model_path,
|
|
582
|
+
"patch_size": int(self.spin_patch.value()),
|
|
583
|
+
"overlap": int(self.spin_overlap.value()),
|
|
584
|
+
"provider": (self.cmb_provider.currentText()
|
|
585
|
+
if not self.chk_auto.isChecked() else "auto"),
|
|
586
|
+
"border_px": BORDER_PX,
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
# 3) Apply through history-aware API (either path is fine)
|
|
592
|
+
try:
|
|
593
|
+
# Preferred: directly on the document
|
|
594
|
+
if hasattr(doc, "apply_edit"):
|
|
595
|
+
doc.apply_edit(out, meta, step_name="Aberration AI")
|
|
596
|
+
# Or via DocManager (same effect)
|
|
597
|
+
elif hasattr(self.docman, "update_active_document"):
|
|
598
|
+
self.docman.update_active_document(out, metadata=meta, step_name="Aberration AI")
|
|
599
|
+
else:
|
|
600
|
+
# Last-resort fallback (no undo): avoid if possible
|
|
601
|
+
doc.image = out
|
|
602
|
+
try:
|
|
603
|
+
doc.metadata.update(meta)
|
|
604
|
+
doc.changed.emit()
|
|
605
|
+
except Exception:
|
|
606
|
+
pass
|
|
607
|
+
except Exception as e:
|
|
608
|
+
self._log(f"❌ Aberration AI apply failed: {e}")
|
|
609
|
+
QMessageBox.critical(self, "Apply Error", f"Failed to apply result:\n{e}")
|
|
610
|
+
return
|
|
611
|
+
|
|
612
|
+
# 3.5) Register this as last_headless_command for Replay Last Action ← NEW
|
|
613
|
+
try:
|
|
614
|
+
main = self.parent()
|
|
615
|
+
if main is not None:
|
|
616
|
+
auto_gpu = bool(self.chk_auto.isChecked())
|
|
617
|
+
preset = {
|
|
618
|
+
"model": self._model_path,
|
|
619
|
+
"patch": int(self.spin_patch.value()),
|
|
620
|
+
"overlap": int(self.spin_overlap.value()),
|
|
621
|
+
"border_px": int(BORDER_PX),
|
|
622
|
+
"auto_gpu": auto_gpu,
|
|
623
|
+
}
|
|
624
|
+
if not auto_gpu:
|
|
625
|
+
preset["provider"] = self.cmb_provider.currentText() or "CPUExecutionProvider"
|
|
626
|
+
|
|
627
|
+
payload = {
|
|
628
|
+
"command_id": "aberrationai",
|
|
629
|
+
"preset": preset,
|
|
630
|
+
}
|
|
631
|
+
setattr(main, "_last_headless_command", payload)
|
|
632
|
+
|
|
633
|
+
# optional log
|
|
634
|
+
try:
|
|
635
|
+
if hasattr(main, "_log"):
|
|
636
|
+
prov = preset.get("provider", "auto" if auto_gpu else "CPUExecutionProvider")
|
|
637
|
+
main._log(
|
|
638
|
+
f"[Replay] Registered Aberration AI as last action "
|
|
639
|
+
f"(patch={preset['patch']}, overlap={preset['overlap']}, "
|
|
640
|
+
f"border={preset['border_px']}px, provider={prov})"
|
|
641
|
+
)
|
|
642
|
+
except Exception:
|
|
643
|
+
pass
|
|
644
|
+
except Exception:
|
|
645
|
+
# never break the tool if replay wiring fails
|
|
646
|
+
pass
|
|
647
|
+
|
|
648
|
+
# 4) Refresh the active view
|
|
649
|
+
mw = self.parent()
|
|
650
|
+
sw = getattr(getattr(mw, "mdi", None), "activeSubWindow", lambda: None)()
|
|
651
|
+
if sw and hasattr(sw, "widget"):
|
|
652
|
+
w = sw.widget()
|
|
653
|
+
if hasattr(w, "reload_from_doc"):
|
|
654
|
+
try: w.reload_from_doc()
|
|
655
|
+
except Exception as e:
|
|
656
|
+
import logging
|
|
657
|
+
logging.debug(f"Exception suppressed: {type(e).__name__}: {e}")
|
|
658
|
+
elif hasattr(w, "update_view"):
|
|
659
|
+
try: w.update_view()
|
|
660
|
+
except Exception as e:
|
|
661
|
+
import logging
|
|
662
|
+
logging.debug(f"Exception suppressed: {type(e).__name__}: {e}")
|
|
663
|
+
elif hasattr(w, "update"):
|
|
664
|
+
w.update()
|
|
665
|
+
|
|
666
|
+
dt = 0.0
|
|
667
|
+
try:
|
|
668
|
+
if self._t_start is not None:
|
|
669
|
+
dt = time.perf_counter() - self._t_start
|
|
670
|
+
except Exception:
|
|
671
|
+
pass
|
|
672
|
+
used = getattr(self._worker, "used_provider", None) or \
|
|
673
|
+
(self.cmb_provider.currentText() if not self.chk_auto.isChecked() else "auto")
|
|
674
|
+
BORDER_PX = 10 # same value used above
|
|
675
|
+
self._log(
|
|
676
|
+
f"✅ Aberration AI applied "
|
|
677
|
+
f"(model={os.path.basename(self._model_path)}, provider={used}, "
|
|
678
|
+
f"patch={int(self.spin_patch.value())}, overlap={int(self.spin_overlap.value())}, "
|
|
679
|
+
f"border={BORDER_PX}px, time={dt:.2f}s)"
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
self.progress.setValue(100)
|
|
683
|
+
self.accept()
|
|
684
|
+
|
|
685
|
+
def _on_worker_finished(self):
|
|
686
|
+
# If dialog is already gone, this method is never called because the receiver (self)
|
|
687
|
+
# has been destroyed and Qt auto-disconnects the signal.
|
|
688
|
+
if hasattr(self, "btn_run"):
|
|
689
|
+
try:
|
|
690
|
+
self.btn_run.setEnabled(True)
|
|
691
|
+
except RuntimeError:
|
|
692
|
+
# Button already deleted; ignore
|
|
693
|
+
pass
|
|
694
|
+
self._worker = None
|