setiastrosuitepro 1.6.7__py3-none-any.whl → 1.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of setiastrosuitepro might be problematic. Click here for more details.
- setiastro/images/abeicon.svg +16 -0
- setiastro/images/colorwheel.svg +97 -0
- setiastro/images/cosmic.svg +40 -0
- setiastro/images/cosmicsat.svg +24 -0
- setiastro/images/graxpert.svg +19 -0
- setiastro/images/linearfit.svg +32 -0
- setiastro/images/narrowbandnormalization.png +0 -0
- setiastro/images/pixelmath.svg +42 -0
- setiastro/images/planetarystacker.png +0 -0
- setiastro/saspro/__main__.py +1 -1
- setiastro/saspro/_generated/build_info.py +2 -2
- setiastro/saspro/aberration_ai.py +49 -11
- setiastro/saspro/aberration_ai_preset.py +29 -3
- setiastro/saspro/add_stars.py +29 -5
- setiastro/saspro/backgroundneutral.py +73 -33
- setiastro/saspro/blink_comparator_pro.py +150 -55
- setiastro/saspro/convo.py +9 -6
- setiastro/saspro/cosmicclarity.py +125 -18
- setiastro/saspro/crop_dialog_pro.py +96 -2
- setiastro/saspro/curve_editor_pro.py +132 -61
- setiastro/saspro/curves_preset.py +249 -47
- setiastro/saspro/doc_manager.py +178 -11
- setiastro/saspro/frequency_separation.py +1159 -208
- setiastro/saspro/gui/main_window.py +340 -88
- setiastro/saspro/gui/mixins/dock_mixin.py +245 -24
- setiastro/saspro/gui/mixins/file_mixin.py +35 -16
- setiastro/saspro/gui/mixins/menu_mixin.py +31 -1
- setiastro/saspro/gui/mixins/theme_mixin.py +160 -14
- setiastro/saspro/gui/mixins/toolbar_mixin.py +132 -10
- setiastro/saspro/gui/mixins/update_mixin.py +121 -33
- setiastro/saspro/histogram.py +179 -7
- setiastro/saspro/imageops/narrowband_normalization.py +816 -0
- setiastro/saspro/imageops/serloader.py +769 -0
- setiastro/saspro/imageops/starbasedwhitebalance.py +23 -52
- setiastro/saspro/imageops/stretch.py +582 -62
- setiastro/saspro/layers.py +13 -9
- setiastro/saspro/layers_dock.py +183 -3
- setiastro/saspro/legacy/numba_utils.py +68 -48
- setiastro/saspro/live_stacking.py +181 -73
- setiastro/saspro/multiscale_decomp.py +77 -29
- setiastro/saspro/narrowband_normalization.py +1618 -0
- setiastro/saspro/numba_utils.py +72 -57
- setiastro/saspro/ops/commands.py +18 -18
- setiastro/saspro/ops/script_editor.py +5 -0
- setiastro/saspro/ops/scripts.py +119 -0
- setiastro/saspro/remove_green.py +1 -1
- setiastro/saspro/resources.py +4 -0
- setiastro/saspro/ser_stack_config.py +68 -0
- setiastro/saspro/ser_stacker.py +2245 -0
- setiastro/saspro/ser_stacker_dialog.py +1481 -0
- setiastro/saspro/ser_tracking.py +206 -0
- setiastro/saspro/serviewer.py +1242 -0
- setiastro/saspro/sfcc.py +602 -214
- setiastro/saspro/shortcuts.py +154 -25
- setiastro/saspro/signature_insert.py +688 -33
- setiastro/saspro/stacking_suite.py +853 -401
- setiastro/saspro/star_alignment.py +243 -122
- setiastro/saspro/stat_stretch.py +878 -131
- setiastro/saspro/subwindow.py +303 -74
- setiastro/saspro/whitebalance.py +24 -0
- setiastro/saspro/widgets/common_utilities.py +28 -21
- setiastro/saspro/widgets/resource_monitor.py +128 -80
- {setiastrosuitepro-1.6.7.dist-info → setiastrosuitepro-1.7.0.dist-info}/METADATA +2 -2
- {setiastrosuitepro-1.6.7.dist-info → setiastrosuitepro-1.7.0.dist-info}/RECORD +68 -51
- {setiastrosuitepro-1.6.7.dist-info → setiastrosuitepro-1.7.0.dist-info}/WHEEL +0 -0
- {setiastrosuitepro-1.6.7.dist-info → setiastrosuitepro-1.7.0.dist-info}/entry_points.txt +0 -0
- {setiastrosuitepro-1.6.7.dist-info → setiastrosuitepro-1.7.0.dist-info}/licenses/LICENSE +0 -0
- {setiastrosuitepro-1.6.7.dist-info → setiastrosuitepro-1.7.0.dist-info}/licenses/license.txt +0 -0
|
@@ -16,6 +16,7 @@ import hashlib
|
|
|
16
16
|
from numpy.lib.format import open_memmap
|
|
17
17
|
import tzlocal
|
|
18
18
|
import weakref
|
|
19
|
+
import ast
|
|
19
20
|
import re
|
|
20
21
|
import unicodedata
|
|
21
22
|
import math # used in compute_safe_chunk
|
|
@@ -1714,11 +1715,20 @@ class _MMFits:
|
|
|
1714
1715
|
raise ValueError(f"Unsupported ndim={self.ndim} for {path}")
|
|
1715
1716
|
|
|
1716
1717
|
def _apply_fixed_fits_scale(self, arr: np.ndarray) -> np.ndarray:
|
|
1718
|
+
"""
|
|
1719
|
+
Map 8/16-bit FITS integer samples to [0,1] using a fixed divisor.
|
|
1720
|
+
IMPORTANT: Only do this for integer dtypes. If Astropy already returned
|
|
1721
|
+
float (e.g. BSCALE/BZERO applied), do NOT divide again.
|
|
1722
|
+
"""
|
|
1723
|
+
# Only scale raw integer pixel arrays
|
|
1724
|
+
if arr.dtype.kind not in ("u", "i"):
|
|
1725
|
+
return arr
|
|
1726
|
+
|
|
1717
1727
|
bitpix = getattr(self, "_bitpix", 0)
|
|
1718
1728
|
if bitpix == 8:
|
|
1719
|
-
arr
|
|
1729
|
+
return arr / 255.0
|
|
1720
1730
|
elif bitpix == 16:
|
|
1721
|
-
arr
|
|
1731
|
+
return arr / 65535.0
|
|
1722
1732
|
return arr
|
|
1723
1733
|
|
|
1724
1734
|
def read_tile(self, y0, y1, x0, x1) -> np.ndarray:
|
|
@@ -1846,9 +1856,9 @@ class ReferenceFrameReviewDialog(QDialog):
|
|
|
1846
1856
|
self.initUI()
|
|
1847
1857
|
self.loadImageArray() # Load the image into self.original_image
|
|
1848
1858
|
if self.original_image is not None:
|
|
1849
|
-
self.updatePreview(self.original_image)
|
|
1850
|
-
if self.original_image is not None:
|
|
1851
|
-
|
|
1859
|
+
QTimer.singleShot(0, lambda: self.updatePreview(self.original_image, fit=True))
|
|
1860
|
+
#if self.original_image is not None:
|
|
1861
|
+
# QTimer.singleShot(0, self.zoomIn)
|
|
1852
1862
|
|
|
1853
1863
|
|
|
1854
1864
|
def initUI(self):
|
|
@@ -1906,6 +1916,89 @@ class ReferenceFrameReviewDialog(QDialog):
|
|
|
1906
1916
|
self.setLayout(main_layout)
|
|
1907
1917
|
self.zoomIn()
|
|
1908
1918
|
|
|
1919
|
+
def _ensure_hwc(self, x: np.ndarray) -> np.ndarray:
|
|
1920
|
+
"""Ensure HWC for RGB, HW for mono."""
|
|
1921
|
+
if x is None:
|
|
1922
|
+
return None
|
|
1923
|
+
x = np.asarray(x)
|
|
1924
|
+
# CHW -> HWC
|
|
1925
|
+
if x.ndim == 3 and x.shape[0] == 3 and x.shape[-1] != 3:
|
|
1926
|
+
x = np.transpose(x, (1, 2, 0))
|
|
1927
|
+
# squeeze HWC with singleton
|
|
1928
|
+
if x.ndim == 3 and x.shape[-1] == 1:
|
|
1929
|
+
x = np.squeeze(x, axis=-1)
|
|
1930
|
+
return x
|
|
1931
|
+
|
|
1932
|
+
|
|
1933
|
+
def _robust_preview_stretch(self, img: np.ndarray,
|
|
1934
|
+
lo_pct: float = 0.25,
|
|
1935
|
+
hi_pct: float = 99.75,
|
|
1936
|
+
gamma: float = 0.65) -> np.ndarray:
|
|
1937
|
+
"""
|
|
1938
|
+
Robust preview stretch:
|
|
1939
|
+
- nan/inf safe
|
|
1940
|
+
- pedestal remove per channel (img - min)
|
|
1941
|
+
- percentile clip to kill outliers
|
|
1942
|
+
- scale to 0..1
|
|
1943
|
+
- gentle gamma (default <1 brightens)
|
|
1944
|
+
Returns float32 in [0,1] and preserves mono vs RGB.
|
|
1945
|
+
"""
|
|
1946
|
+
x = self._ensure_hwc(img)
|
|
1947
|
+
if x is None:
|
|
1948
|
+
return None
|
|
1949
|
+
|
|
1950
|
+
x = np.asarray(x, dtype=np.float32)
|
|
1951
|
+
x = np.nan_to_num(x, nan=0.0, posinf=0.0, neginf=0.0)
|
|
1952
|
+
|
|
1953
|
+
# Mono
|
|
1954
|
+
if x.ndim == 2:
|
|
1955
|
+
x = x - float(x.min())
|
|
1956
|
+
# percentile clip on non-flat data
|
|
1957
|
+
p_lo = float(np.percentile(x, lo_pct))
|
|
1958
|
+
p_hi = float(np.percentile(x, hi_pct))
|
|
1959
|
+
if p_hi > p_lo:
|
|
1960
|
+
x = np.clip(x, p_lo, p_hi)
|
|
1961
|
+
x = (x - p_lo) / (p_hi - p_lo)
|
|
1962
|
+
else:
|
|
1963
|
+
mx = float(x.max())
|
|
1964
|
+
if mx > 0:
|
|
1965
|
+
x = x / mx
|
|
1966
|
+
if gamma is not None and gamma > 0:
|
|
1967
|
+
x = np.power(np.clip(x, 0.0, 1.0), gamma)
|
|
1968
|
+
return np.clip(x, 0.0, 1.0).astype(np.float32, copy=False)
|
|
1969
|
+
|
|
1970
|
+
# RGB (HWC)
|
|
1971
|
+
if x.ndim == 3 and x.shape[2] == 3:
|
|
1972
|
+
out = np.empty_like(x, dtype=np.float32)
|
|
1973
|
+
for c in range(3):
|
|
1974
|
+
ch = x[..., c]
|
|
1975
|
+
ch = ch - float(ch.min())
|
|
1976
|
+
p_lo = float(np.percentile(ch, lo_pct))
|
|
1977
|
+
p_hi = float(np.percentile(ch, hi_pct))
|
|
1978
|
+
if p_hi > p_lo:
|
|
1979
|
+
ch = np.clip(ch, p_lo, p_hi)
|
|
1980
|
+
ch = (ch - p_lo) / (p_hi - p_lo)
|
|
1981
|
+
else:
|
|
1982
|
+
mx = float(ch.max())
|
|
1983
|
+
if mx > 0:
|
|
1984
|
+
ch = ch / mx
|
|
1985
|
+
out[..., c] = ch
|
|
1986
|
+
|
|
1987
|
+
if gamma is not None and gamma > 0:
|
|
1988
|
+
out = np.power(np.clip(out, 0.0, 1.0), gamma)
|
|
1989
|
+
|
|
1990
|
+
return np.clip(out, 0.0, 1.0).astype(np.float32, copy=False)
|
|
1991
|
+
|
|
1992
|
+
# Fallback: treat as scalar field
|
|
1993
|
+
x = x - float(x.min())
|
|
1994
|
+
mx = float(x.max())
|
|
1995
|
+
if mx > 0:
|
|
1996
|
+
x = x / mx
|
|
1997
|
+
if gamma is not None and gamma > 0:
|
|
1998
|
+
x = np.power(np.clip(x, 0.0, 1.0), gamma)
|
|
1999
|
+
return np.clip(x, 0.0, 1.0).astype(np.float32, copy=False)
|
|
2000
|
+
|
|
2001
|
+
|
|
1909
2002
|
def fitToPreview(self):
|
|
1910
2003
|
"""Calculate and set the zoom factor so that the image fills the preview area."""
|
|
1911
2004
|
if self.original_image is None:
|
|
@@ -1932,32 +2025,46 @@ class ReferenceFrameReviewDialog(QDialog):
|
|
|
1932
2025
|
|
|
1933
2026
|
def _normalize_preview_01(self, img: np.ndarray) -> np.ndarray:
|
|
1934
2027
|
"""
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
|
|
1939
|
-
3. Always return float32 in [0,1].
|
|
2028
|
+
Always normalize to [0,1]:
|
|
2029
|
+
img = img - min(img)
|
|
2030
|
+
img = img / max(img)
|
|
2031
|
+
Per-channel if RGB, global if mono.
|
|
1940
2032
|
"""
|
|
1941
2033
|
if img is None:
|
|
1942
2034
|
return None
|
|
1943
2035
|
|
|
1944
|
-
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
finite = np.isfinite(img)
|
|
1948
|
-
if not finite.any():
|
|
1949
|
-
return np.zeros_like(img, dtype=np.float32)
|
|
1950
|
-
|
|
1951
|
-
mn = float(img[finite].min())
|
|
1952
|
-
mx = float(img[finite].max())
|
|
1953
|
-
if mx == mn:
|
|
1954
|
-
# flat frame → just zero it
|
|
1955
|
-
return np.zeros_like(img, dtype=np.float32)
|
|
2036
|
+
x = np.asarray(img, dtype=np.float32)
|
|
2037
|
+
x = np.nan_to_num(x, nan=0.0, posinf=0.0, neginf=0.0)
|
|
1956
2038
|
|
|
1957
|
-
if
|
|
1958
|
-
|
|
2039
|
+
if x.ndim == 2:
|
|
2040
|
+
mn = float(x.min())
|
|
2041
|
+
x = x - mn
|
|
2042
|
+
mx = float(x.max())
|
|
2043
|
+
if mx > 0:
|
|
2044
|
+
x = x / mx
|
|
2045
|
+
return np.clip(x, 0.0, 1.0).astype(np.float32, copy=False)
|
|
2046
|
+
|
|
2047
|
+
if x.ndim == 3 and x.shape[2] == 3:
|
|
2048
|
+
# per-channel pedestal remove + normalize
|
|
2049
|
+
out = x.copy()
|
|
2050
|
+
for c in range(3):
|
|
2051
|
+
ch = out[..., c]
|
|
2052
|
+
mn = float(ch.min())
|
|
2053
|
+
ch = ch - mn
|
|
2054
|
+
mx = float(ch.max())
|
|
2055
|
+
if mx > 0:
|
|
2056
|
+
ch = ch / mx
|
|
2057
|
+
out[..., c] = ch
|
|
2058
|
+
return np.clip(out, 0.0, 1.0).astype(np.float32, copy=False)
|
|
2059
|
+
|
|
2060
|
+
# fallback
|
|
2061
|
+
mn = float(x.min())
|
|
2062
|
+
x = x - mn
|
|
2063
|
+
mx = float(x.max())
|
|
2064
|
+
if mx > 0:
|
|
2065
|
+
x = x / mx
|
|
2066
|
+
return np.clip(x, 0.0, 1.0).astype(np.float32, copy=False)
|
|
1959
2067
|
|
|
1960
|
-
return np.clip(img, 0.0, 1.0)
|
|
1961
2068
|
|
|
1962
2069
|
|
|
1963
2070
|
def loadImageArray(self):
|
|
@@ -1980,22 +2087,44 @@ class ReferenceFrameReviewDialog(QDialog):
|
|
|
1980
2087
|
|
|
1981
2088
|
self.original_image = img
|
|
1982
2089
|
|
|
2090
|
+
def _fit_zoom_to_viewport(self, image: np.ndarray):
|
|
2091
|
+
"""Set zoom_factor so image fits inside the scrollArea viewport."""
|
|
2092
|
+
if image is None:
|
|
2093
|
+
return
|
|
2094
|
+
|
|
2095
|
+
img = self._ensure_hwc(image)
|
|
2096
|
+
|
|
2097
|
+
if img.ndim == 2:
|
|
2098
|
+
h, w = img.shape
|
|
2099
|
+
elif img.ndim == 3 and img.shape[2] == 3:
|
|
2100
|
+
h, w = img.shape[:2]
|
|
2101
|
+
else:
|
|
2102
|
+
return
|
|
2103
|
+
|
|
2104
|
+
vp = self.scrollArea.viewport().size()
|
|
2105
|
+
if vp.width() <= 0 or vp.height() <= 0 or w <= 0 or h <= 0:
|
|
2106
|
+
return
|
|
2107
|
+
|
|
2108
|
+
# Fit-to-viewport zoom
|
|
2109
|
+
self.zoom_factor = min(vp.width() / w, vp.height() / h)
|
|
1983
2110
|
|
|
1984
|
-
def updatePreview(self, image):
|
|
1985
|
-
"""
|
|
1986
|
-
Convert a given image array to a QPixmap and update the preview label.
|
|
1987
|
-
"""
|
|
2111
|
+
def updatePreview(self, image, *, fit: bool = False):
|
|
1988
2112
|
self.current_preview_image = image
|
|
2113
|
+
|
|
2114
|
+
if fit:
|
|
2115
|
+
self._fit_zoom_to_viewport(image)
|
|
2116
|
+
|
|
1989
2117
|
pixmap = self.convertArrayToPixmap(image)
|
|
1990
2118
|
if pixmap is None or pixmap.isNull():
|
|
1991
2119
|
self.previewLabel.setText(self.tr("Unable to load preview."))
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
|
|
1996
|
-
|
|
1997
|
-
|
|
1998
|
-
|
|
2120
|
+
return
|
|
2121
|
+
|
|
2122
|
+
scaled = pixmap.scaled(
|
|
2123
|
+
pixmap.size() * self.zoom_factor,
|
|
2124
|
+
Qt.AspectRatioMode.KeepAspectRatio,
|
|
2125
|
+
Qt.TransformationMode.SmoothTransformation
|
|
2126
|
+
)
|
|
2127
|
+
self.previewLabel.setPixmap(scaled)
|
|
1999
2128
|
|
|
2000
2129
|
def _preview_boost(self, img: np.ndarray) -> np.ndarray:
|
|
2001
2130
|
"""Robust, very gentle stretch for display when image would quantize to black."""
|
|
@@ -2013,62 +2142,48 @@ class ReferenceFrameReviewDialog(QDialog):
|
|
|
2013
2142
|
if image is None:
|
|
2014
2143
|
return None
|
|
2015
2144
|
|
|
2016
|
-
|
|
2017
|
-
|
|
2018
|
-
# If image is so dim or flat that 8-bit will zero-out, boost for preview
|
|
2019
|
-
ptp = float(img.max() - img.min())
|
|
2020
|
-
needs_boost = (float(img.max()) <= (1.0 / 255.0)) or (ptp < 1e-6) or (not np.isfinite(img).all())
|
|
2021
|
-
if needs_boost:
|
|
2022
|
-
img = self._preview_boost(np.nan_to_num(img, nan=0.0, posinf=0.0, neginf=0.0))
|
|
2145
|
+
# ALWAYS normalize to [0,1]
|
|
2146
|
+
img = self._normalize_preview_01(image)
|
|
2023
2147
|
|
|
2024
2148
|
# Convert to 8-bit for QImage
|
|
2025
2149
|
display_image = (img * 255.0).clip(0, 255).astype(np.uint8)
|
|
2026
2150
|
|
|
2151
|
+
# IMPORTANT: ensure contiguous memory
|
|
2152
|
+
display_image = np.ascontiguousarray(display_image)
|
|
2153
|
+
|
|
2154
|
+
# Keep a reference so Qt's QImage always has valid backing memory
|
|
2155
|
+
self._last_preview_u8 = display_image
|
|
2156
|
+
|
|
2027
2157
|
if display_image.ndim == 2:
|
|
2028
2158
|
h, w = display_image.shape
|
|
2029
|
-
q_image = QImage(
|
|
2159
|
+
q_image = QImage(self._last_preview_u8.data, w, h, w, QImage.Format.Format_Grayscale8)
|
|
2160
|
+
q_image = q_image.copy() # detach from numpy buffer (extra safety)
|
|
2030
2161
|
elif display_image.ndim == 3 and display_image.shape[2] == 3:
|
|
2031
2162
|
h, w, _ = display_image.shape
|
|
2032
|
-
q_image = QImage(
|
|
2163
|
+
q_image = QImage(self._last_preview_u8.data, w, h, 3 * w, QImage.Format.Format_RGB888)
|
|
2164
|
+
q_image = q_image.copy() # detach
|
|
2033
2165
|
else:
|
|
2034
2166
|
return None
|
|
2167
|
+
|
|
2035
2168
|
return QPixmap.fromImage(q_image)
|
|
2036
|
-
|
|
2169
|
+
|
|
2037
2170
|
def toggleAutostretch(self):
|
|
2038
2171
|
if self.original_image is None:
|
|
2039
2172
|
QMessageBox.warning(self, self.tr("Error"), self.tr("Reference image not loaded."))
|
|
2040
2173
|
return
|
|
2041
2174
|
|
|
2042
|
-
# 🔹 Ensure the image we feed to Statistical Stretch is in [0,1]
|
|
2043
|
-
base = self._normalize_preview_01(self.original_image)
|
|
2044
|
-
|
|
2045
2175
|
self.autostretch_enabled = not self.autostretch_enabled
|
|
2176
|
+
|
|
2046
2177
|
if self.autostretch_enabled:
|
|
2047
|
-
|
|
2048
|
-
new_image = stretch_mono_image(
|
|
2049
|
-
base,
|
|
2050
|
-
target_median=0.3,
|
|
2051
|
-
normalize=True,
|
|
2052
|
-
apply_curves=False
|
|
2053
|
-
)
|
|
2054
|
-
elif base.ndim == 3 and base.shape[2] == 3:
|
|
2055
|
-
new_image = stretch_color_image(
|
|
2056
|
-
base,
|
|
2057
|
-
target_median=0.3,
|
|
2058
|
-
linked=False,
|
|
2059
|
-
normalize=True,
|
|
2060
|
-
apply_curves=False
|
|
2061
|
-
)
|
|
2062
|
-
else:
|
|
2063
|
-
new_image = base
|
|
2178
|
+
new_image = self._robust_preview_stretch(self.original_image)
|
|
2064
2179
|
self.toggleAutoStretchButton.setText(self.tr("Disable Autostretch"))
|
|
2065
2180
|
else:
|
|
2066
|
-
new_image =
|
|
2181
|
+
new_image = self._normalize_preview_01(self.original_image)
|
|
2067
2182
|
self.toggleAutoStretchButton.setText(self.tr("Enable Autostretch"))
|
|
2068
2183
|
|
|
2069
|
-
self.updatePreview(new_image)
|
|
2184
|
+
self.updatePreview(new_image, fit=True)
|
|
2070
2185
|
|
|
2071
|
-
|
|
2186
|
+
|
|
2072
2187
|
def zoomIn(self):
|
|
2073
2188
|
self.zoom_factor *= 1.2
|
|
2074
2189
|
if self.current_preview_image is not None:
|
|
@@ -3531,7 +3646,8 @@ class _MMImage:
|
|
|
3531
3646
|
self._orig_dtype = None
|
|
3532
3647
|
self._color_axis = None
|
|
3533
3648
|
self._spat_axes = (0, 1)
|
|
3534
|
-
|
|
3649
|
+
self._dbg = bool(os.environ.get("SASPRO_MMIMAGE_DEBUG", "0") == "1")
|
|
3650
|
+
self._dbg_count = 0
|
|
3535
3651
|
self._xisf = None
|
|
3536
3652
|
self._xisf_memmap = None # np.memmap when possible
|
|
3537
3653
|
self._xisf_arr = None # decompressed ndarray when needed
|
|
@@ -3553,6 +3669,11 @@ class _MMImage:
|
|
|
3553
3669
|
self._open_fits(path)
|
|
3554
3670
|
self._kind = "fits"
|
|
3555
3671
|
|
|
3672
|
+
def _dbg_log(self, msg: str):
|
|
3673
|
+
if not getattr(self, "_dbg", False):
|
|
3674
|
+
return
|
|
3675
|
+
print(msg) # or your logger
|
|
3676
|
+
|
|
3556
3677
|
# ---------------- FITS ----------------
|
|
3557
3678
|
def _open_fits(self, path: str):
|
|
3558
3679
|
"""
|
|
@@ -3658,17 +3779,27 @@ class _MMImage:
|
|
|
3658
3779
|
# ---------------- common API ----------------
|
|
3659
3780
|
def _apply_fixed_fits_scale(self, arr: np.ndarray) -> np.ndarray:
|
|
3660
3781
|
"""
|
|
3661
|
-
Map 8/16-bit FITS
|
|
3662
|
-
|
|
3782
|
+
Map 8/16-bit FITS integer samples to [0,1] using a fixed divisor.
|
|
3783
|
+
IMPORTANT: Only do this for integer dtypes. If Astropy already returned
|
|
3784
|
+
float (e.g. BSCALE/BZERO applied), do NOT divide again.
|
|
3663
3785
|
"""
|
|
3786
|
+
# Only scale raw integer pixel arrays
|
|
3787
|
+
if arr.dtype.kind not in ("u", "i"):
|
|
3788
|
+
return arr
|
|
3789
|
+
|
|
3664
3790
|
bitpix = getattr(self, "_bitpix", 0)
|
|
3665
3791
|
if bitpix == 8:
|
|
3666
|
-
arr
|
|
3792
|
+
return arr / 255.0
|
|
3667
3793
|
elif bitpix == 16:
|
|
3668
|
-
arr
|
|
3794
|
+
return arr / 65535.0
|
|
3669
3795
|
return arr
|
|
3670
3796
|
|
|
3797
|
+
|
|
3671
3798
|
def read_tile(self, y0, y1, x0, x1) -> np.ndarray:
|
|
3799
|
+
import os
|
|
3800
|
+
import numpy as np
|
|
3801
|
+
|
|
3802
|
+
# ---- FITS / XISF tile read (unchanged) ----
|
|
3672
3803
|
if self._kind == "fits":
|
|
3673
3804
|
d = self._fits_data
|
|
3674
3805
|
if self.ndim == 2:
|
|
@@ -3682,23 +3813,23 @@ class _MMImage:
|
|
|
3682
3813
|
tile = np.moveaxis(tile, self._color_axis, -1)
|
|
3683
3814
|
else:
|
|
3684
3815
|
if self._xisf_memmap is not None:
|
|
3685
|
-
# memmapped (C,H,W) → slice, then move to (H,W,C)
|
|
3686
3816
|
C = 1 if self.ndim == 2 else self.shape[2]
|
|
3687
3817
|
if C == 1:
|
|
3688
3818
|
tile = self._xisf_memmap[0, y0:y1, x0:x1]
|
|
3689
3819
|
else:
|
|
3690
|
-
tile = np.moveaxis(
|
|
3691
|
-
self._xisf_memmap[:, y0:y1, x0:x1], 0, -1
|
|
3692
|
-
)
|
|
3820
|
+
tile = np.moveaxis(self._xisf_memmap[:, y0:y1, x0:x1], 0, -1)
|
|
3693
3821
|
else:
|
|
3694
3822
|
tile = self._xisf_arr[y0:y1, x0:x1]
|
|
3695
3823
|
|
|
3696
|
-
# Cast to float32
|
|
3824
|
+
# Cast to float32 copy (what you actually feed the stacker)
|
|
3697
3825
|
out = np.array(tile, dtype=np.float32, copy=True, order="C")
|
|
3698
3826
|
|
|
3699
|
-
|
|
3827
|
+
|
|
3828
|
+
# ---- APPLY FIXED SCALE (your real suspect) ----
|
|
3700
3829
|
if self._kind == "fits":
|
|
3701
|
-
|
|
3830
|
+
out2 = self._apply_fixed_fits_scale(out)
|
|
3831
|
+
|
|
3832
|
+
out = out2
|
|
3702
3833
|
|
|
3703
3834
|
# ensure (h,w,3) or (h,w)
|
|
3704
3835
|
if out.ndim == 3 and out.shape[-1] not in (1, 3):
|
|
@@ -3706,6 +3837,7 @@ class _MMImage:
|
|
|
3706
3837
|
out = np.moveaxis(out, 0, -1)
|
|
3707
3838
|
if out.ndim == 3 and out.shape[-1] == 1:
|
|
3708
3839
|
out = np.squeeze(out, axis=-1)
|
|
3840
|
+
|
|
3709
3841
|
return out
|
|
3710
3842
|
|
|
3711
3843
|
def read_full(self) -> np.ndarray:
|
|
@@ -4107,6 +4239,162 @@ def _read_center_patch_via_mmimage(path: str, y0: int, y1: int, x0: int, x1: int
|
|
|
4107
4239
|
except Exception:
|
|
4108
4240
|
pass
|
|
4109
4241
|
|
|
4242
|
+
def _get_key_float(hdr: fits.Header, key: str):
|
|
4243
|
+
try:
|
|
4244
|
+
v = hdr.get(key, None)
|
|
4245
|
+
if v is None:
|
|
4246
|
+
return None
|
|
4247
|
+
# handle strings like "-10.0" or "-10 C"
|
|
4248
|
+
if isinstance(v, str):
|
|
4249
|
+
v = v.strip().replace("C", "").replace("°", "").strip()
|
|
4250
|
+
return float(v)
|
|
4251
|
+
except Exception:
|
|
4252
|
+
return None
|
|
4253
|
+
|
|
4254
|
+
def _collect_temp_stats(file_list: list[str]):
|
|
4255
|
+
ccd = []
|
|
4256
|
+
setp = []
|
|
4257
|
+
n_ccd = 0
|
|
4258
|
+
n_set = 0
|
|
4259
|
+
|
|
4260
|
+
for p in file_list:
|
|
4261
|
+
try:
|
|
4262
|
+
hdr = fits.getheader(p, memmap=True)
|
|
4263
|
+
except Exception:
|
|
4264
|
+
continue
|
|
4265
|
+
|
|
4266
|
+
v1 = _get_key_float(hdr, "CCD-TEMP")
|
|
4267
|
+
v2 = _get_key_float(hdr, "SET-TEMP")
|
|
4268
|
+
|
|
4269
|
+
if v1 is not None:
|
|
4270
|
+
ccd.append(v1); n_ccd += 1
|
|
4271
|
+
if v2 is not None:
|
|
4272
|
+
setp.append(v2); n_set += 1
|
|
4273
|
+
|
|
4274
|
+
def _stats(arr):
|
|
4275
|
+
if not arr:
|
|
4276
|
+
return None, None, None, None
|
|
4277
|
+
a = np.asarray(arr, dtype=np.float32)
|
|
4278
|
+
return float(np.median(a)), float(np.min(a)), float(np.max(a)), float(np.std(a))
|
|
4279
|
+
|
|
4280
|
+
c_med, c_min, c_max, c_std = _stats(ccd)
|
|
4281
|
+
s_med, s_min, s_max, s_std = _stats(setp)
|
|
4282
|
+
|
|
4283
|
+
return {
|
|
4284
|
+
"ccd_med": c_med, "ccd_min": c_min, "ccd_max": c_max, "ccd_std": c_std, "ccd_n": n_ccd,
|
|
4285
|
+
"set_med": s_med, "set_min": s_min, "set_max": s_max, "set_std": s_std, "set_n": n_set,
|
|
4286
|
+
"n_files": len(file_list),
|
|
4287
|
+
}
|
|
4288
|
+
|
|
4289
|
+
def _temp_to_stem_tag(temp_c: float, *, prefix: str = "") -> str:
|
|
4290
|
+
"""
|
|
4291
|
+
Filename-safe temperature token:
|
|
4292
|
+
-10.0 -> 'm10p0C'
|
|
4293
|
+
+5.25 -> 'p5p3C' (rounded to 0.1C if you pass that in)
|
|
4294
|
+
Uses:
|
|
4295
|
+
m = minus, p = plus/decimal separator
|
|
4296
|
+
Never produces '_-' which your _normalize_master_stem would collapse.
|
|
4297
|
+
"""
|
|
4298
|
+
try:
|
|
4299
|
+
t = float(temp_c)
|
|
4300
|
+
except Exception:
|
|
4301
|
+
return ""
|
|
4302
|
+
|
|
4303
|
+
sign = "m" if t < 0 else "p"
|
|
4304
|
+
t_abs = abs(t)
|
|
4305
|
+
|
|
4306
|
+
# keep one decimal place (match your earlier plan)
|
|
4307
|
+
s = f"{t_abs:.1f}" # e.g. "10.0"
|
|
4308
|
+
s = s.replace(".", "p") # e.g. "10p0"
|
|
4309
|
+
return f"{prefix}{sign}{s}C"
|
|
4310
|
+
|
|
4311
|
+
|
|
4312
|
+
def _arr_stats(a: np.ndarray):
|
|
4313
|
+
a = np.asarray(a)
|
|
4314
|
+
fin = np.isfinite(a)
|
|
4315
|
+
if fin.any():
|
|
4316
|
+
v = a[fin]
|
|
4317
|
+
return dict(
|
|
4318
|
+
dtype=str(a.dtype),
|
|
4319
|
+
shape=tuple(a.shape),
|
|
4320
|
+
finite=int(fin.sum()),
|
|
4321
|
+
nan=int(np.isnan(a).sum()),
|
|
4322
|
+
inf=int(np.isinf(a).sum()),
|
|
4323
|
+
min=float(v.min()),
|
|
4324
|
+
max=float(v.max()),
|
|
4325
|
+
p01=float(np.percentile(v, 1)),
|
|
4326
|
+
p50=float(np.percentile(v, 50)),
|
|
4327
|
+
)
|
|
4328
|
+
return dict(dtype=str(a.dtype), shape=tuple(a.shape), finite=0, nan=int(np.isnan(a).sum()), inf=int(np.isinf(a).sum()))
|
|
4329
|
+
|
|
4330
|
+
def _print_stats(tag: str, a: np.ndarray, *, bit_depth=None, hdr=None):
|
|
4331
|
+
s = _arr_stats(a)
|
|
4332
|
+
bd = f", bit_depth={bit_depth}" if bit_depth is not None else ""
|
|
4333
|
+
print(f"🧪 {tag}{bd} dtype={s['dtype']} shape={s['shape']} finite={s['finite']} nan={s['nan']} inf={s['inf']}")
|
|
4334
|
+
if s["finite"] > 0:
|
|
4335
|
+
print(f" min={s['min']:.6f} p01={s['p01']:.6f} p50={s['p50']:.6f} max={s['max']:.6f}")
|
|
4336
|
+
# Header hints (best-effort)
|
|
4337
|
+
if hdr is not None:
|
|
4338
|
+
try:
|
|
4339
|
+
# FITS-ish
|
|
4340
|
+
if hasattr(hdr, "get"):
|
|
4341
|
+
print(f" hdr: BITPIX={hdr.get('BITPIX', 'NA')} BSCALE={hdr.get('BSCALE', 'NA')} BZERO={hdr.get('BZERO', 'NA')}")
|
|
4342
|
+
except Exception:
|
|
4343
|
+
pass
|
|
4344
|
+
|
|
4345
|
+
def _warn_if_units_mismatch(light: np.ndarray, dark: np.ndarray | None, flat: np.ndarray | None):
|
|
4346
|
+
# Heuristic: if one is ~0..1 and another is hundreds/thousands, you’ve got mixed scaling.
|
|
4347
|
+
def _range_kind(a):
|
|
4348
|
+
if a is None:
|
|
4349
|
+
return None
|
|
4350
|
+
fin = np.isfinite(a)
|
|
4351
|
+
if not fin.any():
|
|
4352
|
+
return None
|
|
4353
|
+
mx = float(np.max(a[fin]))
|
|
4354
|
+
mn = float(np.min(a[fin]))
|
|
4355
|
+
return (mn, mx)
|
|
4356
|
+
|
|
4357
|
+
lr = _range_kind(light)
|
|
4358
|
+
dr = _range_kind(dark)
|
|
4359
|
+
fr = _range_kind(flat)
|
|
4360
|
+
|
|
4361
|
+
def _is_01(r):
|
|
4362
|
+
if r is None: return False
|
|
4363
|
+
mn, mx = r
|
|
4364
|
+
return mx <= 2.5 and mn >= -0.5
|
|
4365
|
+
|
|
4366
|
+
def _is_aduish(r):
|
|
4367
|
+
if r is None: return False
|
|
4368
|
+
mn, mx = r
|
|
4369
|
+
return mx >= 50.0 # conservative
|
|
4370
|
+
|
|
4371
|
+
if lr and dr and _is_01(lr) and _is_aduish(dr):
|
|
4372
|
+
print("🚨 UNITS MISMATCH: light looks ~0–1, but dark looks like ADU (tens/hundreds/thousands). Expect huge negatives after subtraction.")
|
|
4373
|
+
if lr and fr and _is_01(lr) and _is_aduish(fr):
|
|
4374
|
+
print("🚨 UNITS MISMATCH: light looks ~0–1, but flat looks like ADU. Flat division will be wrong unless normalized to ~1 first.")
|
|
4375
|
+
|
|
4376
|
+
def _maybe_normalize_16bit_float(a: np.ndarray, *, name: str = "") -> np.ndarray:
|
|
4377
|
+
"""
|
|
4378
|
+
Fast guard:
|
|
4379
|
+
- If float array has max > 10, assume it's really 16-bit ADU data stored as float,
|
|
4380
|
+
and normalize to 0..1 by dividing by 65535.
|
|
4381
|
+
"""
|
|
4382
|
+
if a is None:
|
|
4383
|
+
return a
|
|
4384
|
+
if not np.issubdtype(a.dtype, np.floating):
|
|
4385
|
+
return a
|
|
4386
|
+
|
|
4387
|
+
fin = np.isfinite(a)
|
|
4388
|
+
if not fin.any():
|
|
4389
|
+
return a
|
|
4390
|
+
|
|
4391
|
+
mx = float(a[fin].max()) # fast reduction
|
|
4392
|
+
|
|
4393
|
+
if mx > 10.0:
|
|
4394
|
+
print(f"🛡️ Units-guard: {name or 'array'} max={mx:.3f} (>10). Assuming 16-bit ADU-in-float; normalizing /65535.")
|
|
4395
|
+
return (a / 65535.0).astype(np.float32, copy=False)
|
|
4396
|
+
|
|
4397
|
+
return a
|
|
4110
4398
|
|
|
4111
4399
|
class StackingSuiteDialog(QDialog):
|
|
4112
4400
|
requestRelaunch = pyqtSignal(str, str) # old_dir, new_dir
|
|
@@ -5293,6 +5581,20 @@ class StackingSuiteDialog(QDialog):
|
|
|
5293
5581
|
|
|
5294
5582
|
left_col.addWidget(gb_general)
|
|
5295
5583
|
|
|
5584
|
+
self.temp_group_step_spin = QDoubleSpinBox()
|
|
5585
|
+
self.temp_group_step_spin.setRange(0.0, 20.0) # 0 disables grouping-by-temp (optional behavior)
|
|
5586
|
+
self.temp_group_step_spin.setDecimals(2)
|
|
5587
|
+
self.temp_group_step_spin.setSingleStep(0.1)
|
|
5588
|
+
self.temp_group_step_spin.setValue(
|
|
5589
|
+
self.settings.value("stacking/temp_group_step", 1.0, type=float)
|
|
5590
|
+
)
|
|
5591
|
+
self.temp_group_step_spin.setToolTip(
|
|
5592
|
+
self.tr("Temperature grouping tolerance in °C.\n"
|
|
5593
|
+
"Frames within ±step are grouped together.\n"
|
|
5594
|
+
"Set 0 to disable temperature-based grouping.")
|
|
5595
|
+
)
|
|
5596
|
+
fl_general.addRow(self.tr("Temp grouping step (°C):"), self.temp_group_step_spin)
|
|
5597
|
+
|
|
5296
5598
|
# --- Distortion / Transform model ---
|
|
5297
5599
|
# --- Distortion / Transform model ---
|
|
5298
5600
|
disto_box = QGroupBox(self.tr("Distortion / Transform"))
|
|
@@ -6070,7 +6372,8 @@ class StackingSuiteDialog(QDialog):
|
|
|
6070
6372
|
self.settings.setValue("stacking/chunk_width", self.chunk_width)
|
|
6071
6373
|
self.settings.setValue("stacking/autocrop_enabled", self.autocrop_cb.isChecked())
|
|
6072
6374
|
self.settings.setValue("stacking/autocrop_pct", float(self.autocrop_pct.value()))
|
|
6073
|
-
|
|
6375
|
+
self.temp_group_step = float(self.temp_group_step_spin.value())
|
|
6376
|
+
self.settings.setValue("stacking/temp_group_step", self.temp_group_step)
|
|
6074
6377
|
# ----- alignment model (affine | homography | poly3 | poly4) -----
|
|
6075
6378
|
model_idx = self.align_model_combo.currentIndex()
|
|
6076
6379
|
if model_idx == 0: model_name = "affine"
|
|
@@ -6659,6 +6962,22 @@ class StackingSuiteDialog(QDialog):
|
|
|
6659
6962
|
|
|
6660
6963
|
return tab
|
|
6661
6964
|
|
|
6965
|
+
def _bucket_temp(self, t: float | None, step: float = 3.0) -> float | None:
|
|
6966
|
+
"""Round to stable bucket. Example: -10.2 -> -10.0 when step=1.0"""
|
|
6967
|
+
if t is None:
|
|
6968
|
+
return None
|
|
6969
|
+
try:
|
|
6970
|
+
return round(float(t) / float(step)) * float(step)
|
|
6971
|
+
except Exception:
|
|
6972
|
+
return None
|
|
6973
|
+
|
|
6974
|
+
def _temp_label(self, t: float | None, step: float = 1.0) -> str:
|
|
6975
|
+
if t is None:
|
|
6976
|
+
return "Temp: Unknown"
|
|
6977
|
+
# show fewer decimals if step is 1.0
|
|
6978
|
+
return f"Temp: {t:+.0f}C" if step >= 1.0 else f"Temp: {t:+.1f}C"
|
|
6979
|
+
|
|
6980
|
+
|
|
6662
6981
|
def _tree_for_type(self, t: str):
|
|
6663
6982
|
t = (t or "").upper()
|
|
6664
6983
|
if t == "LIGHT": return getattr(self, "light_tree", None)
|
|
@@ -10485,24 +10804,86 @@ class StackingSuiteDialog(QDialog):
|
|
|
10485
10804
|
keyword = self.settings.value("stacking/session_keyword", "Default", type=str)
|
|
10486
10805
|
session_tag = self._session_from_manual_keyword(path, keyword) or "Default"
|
|
10487
10806
|
|
|
10807
|
+
# --- Temperature (fast: header already loaded) ---
|
|
10808
|
+
ccd_temp = header.get("CCD-TEMP", None)
|
|
10809
|
+
set_temp = header.get("SET-TEMP", None)
|
|
10810
|
+
|
|
10811
|
+
def _to_float_temp(v):
|
|
10812
|
+
try:
|
|
10813
|
+
if v is None:
|
|
10814
|
+
return None
|
|
10815
|
+
if isinstance(v, (int, float)):
|
|
10816
|
+
return float(v)
|
|
10817
|
+
s = str(v).strip()
|
|
10818
|
+
s = s.replace("°", "").replace("C", "").replace("c", "").strip()
|
|
10819
|
+
return float(s)
|
|
10820
|
+
except Exception:
|
|
10821
|
+
return None
|
|
10822
|
+
|
|
10823
|
+
ccd_temp_f = _to_float_temp(ccd_temp)
|
|
10824
|
+
set_temp_f = _to_float_temp(set_temp)
|
|
10825
|
+
use_temp_f = ccd_temp_f if ccd_temp_f is not None else set_temp_f
|
|
10826
|
+
|
|
10827
|
+
# --- Common metadata string for leaf rows ---
|
|
10828
|
+
meta_text = f"Size: {image_size} | Session: {session_tag}"
|
|
10829
|
+
if use_temp_f is not None:
|
|
10830
|
+
meta_text += f" | Temp: {use_temp_f:.1f}C"
|
|
10831
|
+
if set_temp_f is not None:
|
|
10832
|
+
meta_text += f" (Set: {set_temp_f:.1f}C)"
|
|
10833
|
+
|
|
10488
10834
|
# --- Common metadata string for leaf rows ---
|
|
10489
10835
|
meta_text = f"Size: {image_size} | Session: {session_tag}"
|
|
10490
10836
|
|
|
10491
10837
|
# === DARKs ===
|
|
10492
10838
|
if expected_type_u == "DARK":
|
|
10493
|
-
|
|
10494
|
-
|
|
10839
|
+
# --- temperature for grouping (prefer CCD-TEMP else SET-TEMP) ---
|
|
10840
|
+
ccd_t = _get_key_float(header, "CCD-TEMP")
|
|
10841
|
+
set_t = _get_key_float(header, "SET-TEMP")
|
|
10842
|
+
chosen_t = ccd_t if ccd_t is not None else set_t
|
|
10843
|
+
|
|
10844
|
+
temp_step = float(self.settings.value("stacking/temp_group_step", 1.0, type=float) or 1.0)
|
|
10845
|
+
temp_step = max(0.0, temp_step)
|
|
10846
|
+
temp_bucket = self._bucket_temp(chosen_t, step=temp_step)
|
|
10847
|
+
temp_label = self._temp_label(temp_bucket, step=temp_step)
|
|
10848
|
+
|
|
10849
|
+
# --- tree grouping: exposure/size -> temp bucket -> files ---
|
|
10850
|
+
base_key = f"{exposure_text} ({image_size})"
|
|
10495
10851
|
|
|
10496
|
-
|
|
10852
|
+
# ensure caches exist
|
|
10853
|
+
if not hasattr(self, "_dark_group_item") or self._dark_group_item is None:
|
|
10854
|
+
self._dark_group_item = {}
|
|
10855
|
+
if not hasattr(self, "_dark_temp_item") or self._dark_temp_item is None:
|
|
10856
|
+
self._dark_temp_item = {} # (base_key, temp_label) -> QTreeWidgetItem
|
|
10857
|
+
|
|
10858
|
+
# top-level exposure group
|
|
10859
|
+
exposure_item = self._dark_group_item.get(base_key)
|
|
10497
10860
|
if exposure_item is None:
|
|
10498
|
-
exposure_item = QTreeWidgetItem([
|
|
10861
|
+
exposure_item = QTreeWidgetItem([base_key, ""])
|
|
10499
10862
|
tree.addTopLevelItem(exposure_item)
|
|
10500
|
-
self._dark_group_item[
|
|
10501
|
-
|
|
10502
|
-
|
|
10863
|
+
self._dark_group_item[base_key] = exposure_item
|
|
10864
|
+
|
|
10865
|
+
# second-level temp group under that exposure group
|
|
10866
|
+
temp_key = (base_key, temp_label)
|
|
10867
|
+
temp_item = self._dark_temp_item.get(temp_key)
|
|
10868
|
+
if temp_item is None:
|
|
10869
|
+
temp_item = QTreeWidgetItem([temp_label, ""])
|
|
10870
|
+
exposure_item.addChild(temp_item)
|
|
10871
|
+
self._dark_temp_item[temp_key] = temp_item
|
|
10872
|
+
|
|
10873
|
+
# --- store in dict for stacking ---
|
|
10874
|
+
# Key includes session + temp bucket so create_master_dark can split properly.
|
|
10875
|
+
# (We keep compatibility: your create_master_dark already handles tuple keys.)
|
|
10876
|
+
composite_key = (base_key, session_tag, temp_bucket)
|
|
10877
|
+
self.dark_files.setdefault(composite_key, []).append(path)
|
|
10878
|
+
|
|
10879
|
+
# --- leaf row ---
|
|
10880
|
+
# Also add temp info to metadata text so user can see it per file
|
|
10881
|
+
meta_text_dark = f"Size: {image_size} | Session: {session_tag} | {temp_label}"
|
|
10882
|
+
leaf = QTreeWidgetItem([os.path.basename(path), meta_text_dark])
|
|
10503
10883
|
leaf.setData(0, Qt.ItemDataRole.UserRole, path)
|
|
10504
10884
|
leaf.setData(0, Qt.ItemDataRole.UserRole + 1, session_tag)
|
|
10505
|
-
|
|
10885
|
+
leaf.setData(0, Qt.ItemDataRole.UserRole + 2, temp_bucket) # handy later
|
|
10886
|
+
temp_item.addChild(leaf)
|
|
10506
10887
|
|
|
10507
10888
|
# === FLATs ===
|
|
10508
10889
|
elif expected_type_u == "FLAT":
|
|
@@ -10664,14 +11045,40 @@ class StackingSuiteDialog(QDialog):
|
|
|
10664
11045
|
exposure_tolerance = self.exposure_tolerance_spinbox.value()
|
|
10665
11046
|
|
|
10666
11047
|
# -------------------------------------------------------------------------
|
|
10667
|
-
#
|
|
10668
|
-
# self.dark_files can be either:
|
|
10669
|
-
# legacy: exposure_key -> [paths]
|
|
10670
|
-
# session: (exposure_key, session) -> [paths]
|
|
11048
|
+
# Temp helpers
|
|
10671
11049
|
# -------------------------------------------------------------------------
|
|
10672
|
-
|
|
11050
|
+
def _bucket_temp(t: float | None, step: float = 3.0) -> float | None:
|
|
11051
|
+
"""Round temperature to a stable bucket (e.g. -10.2 -> -10.0 if step=1.0)."""
|
|
11052
|
+
if t is None:
|
|
11053
|
+
return None
|
|
11054
|
+
try:
|
|
11055
|
+
return round(float(t) / step) * step
|
|
11056
|
+
except Exception:
|
|
11057
|
+
return None
|
|
11058
|
+
|
|
11059
|
+
def _read_temp_quick(path: str) -> tuple[float | None, float | None, float | None]:
|
|
11060
|
+
"""Fast temp read (CCD, SET, chosen). Uses fits.getheader(memmap=True)."""
|
|
11061
|
+
try:
|
|
11062
|
+
hdr = fits.getheader(path, memmap=True)
|
|
11063
|
+
except Exception:
|
|
11064
|
+
return None, None, None
|
|
11065
|
+
ccd = _get_key_float(hdr, "CCD-TEMP")
|
|
11066
|
+
st = _get_key_float(hdr, "SET-TEMP")
|
|
11067
|
+
chosen = ccd if ccd is not None else st
|
|
11068
|
+
return ccd, st, chosen
|
|
11069
|
+
|
|
11070
|
+
# -------------------------------------------------------------------------
|
|
11071
|
+
# Group darks by (exposure +/- tolerance, image size, session, temp_bucket)
|
|
11072
|
+
# TEMP_STEP is the rounding bucket (1.0C default)
|
|
11073
|
+
# -------------------------------------------------------------------------
|
|
11074
|
+
TEMP_STEP = float(self.settings.value("stacking/temp_group_step", 1.0, type=float) or 1.0)
|
|
11075
|
+
TEMP_STEP = max(0.0, TEMP_STEP)
|
|
11076
|
+
|
|
11077
|
+
dark_files_by_group: dict[tuple[float, str, str, float | None], list[str]] = {} # (exp,size,session,temp)->list
|
|
10673
11078
|
|
|
10674
11079
|
for key, file_list in (self.dark_files or {}).items():
|
|
11080
|
+
# Support both legacy dark_files (key=str) and newer tuple keys.
|
|
11081
|
+
# We DO NOT assume dark_files already contains temp in key — we re-bucket from headers anyway.
|
|
10675
11082
|
if isinstance(key, tuple) and len(key) >= 2:
|
|
10676
11083
|
exposure_key = str(key[0])
|
|
10677
11084
|
session = str(key[1]) if str(key[1]).strip() else "Default"
|
|
@@ -10683,10 +11090,9 @@ class StackingSuiteDialog(QDialog):
|
|
|
10683
11090
|
exposure_time_str, image_size = exposure_key.split(" (", 1)
|
|
10684
11091
|
image_size = image_size.rstrip(")")
|
|
10685
11092
|
except ValueError:
|
|
10686
|
-
# If some malformed key got in, skip safely
|
|
10687
11093
|
continue
|
|
10688
11094
|
|
|
10689
|
-
if "Unknown" in exposure_time_str:
|
|
11095
|
+
if "Unknown" in (exposure_time_str or ""):
|
|
10690
11096
|
exposure_time = 0.0
|
|
10691
11097
|
else:
|
|
10692
11098
|
try:
|
|
@@ -10694,21 +11100,31 @@ class StackingSuiteDialog(QDialog):
|
|
|
10694
11100
|
except Exception:
|
|
10695
11101
|
exposure_time = 0.0
|
|
10696
11102
|
|
|
10697
|
-
|
|
10698
|
-
|
|
10699
|
-
|
|
10700
|
-
|
|
10701
|
-
|
|
10702
|
-
|
|
10703
|
-
|
|
10704
|
-
|
|
10705
|
-
|
|
11103
|
+
# Split the incoming list by temp bucket so mixed temps do not merge.
|
|
11104
|
+
bucketed: dict[float | None, list[str]] = {}
|
|
11105
|
+
for p in (file_list or []):
|
|
11106
|
+
_, _, chosen = _read_temp_quick(p)
|
|
11107
|
+
tb = _bucket_temp(chosen, step=TEMP_STEP)
|
|
11108
|
+
bucketed.setdefault(tb, []).append(p)
|
|
11109
|
+
|
|
11110
|
+
# Apply exposure tolerance grouping PER temp bucket
|
|
11111
|
+
for temp_bucket, paths_in_bucket in bucketed.items():
|
|
11112
|
+
matched_group = None
|
|
11113
|
+
for (existing_exposure, existing_size, existing_session, existing_temp) in list(dark_files_by_group.keys()):
|
|
11114
|
+
if (
|
|
11115
|
+
existing_session == session
|
|
11116
|
+
and existing_size == image_size
|
|
11117
|
+
and existing_temp == temp_bucket
|
|
11118
|
+
and abs(existing_exposure - exposure_time) <= exposure_tolerance
|
|
11119
|
+
):
|
|
11120
|
+
matched_group = (existing_exposure, existing_size, existing_session, existing_temp)
|
|
11121
|
+
break
|
|
10706
11122
|
|
|
10707
|
-
|
|
10708
|
-
|
|
10709
|
-
|
|
11123
|
+
if matched_group is None:
|
|
11124
|
+
matched_group = (exposure_time, image_size, session, temp_bucket)
|
|
11125
|
+
dark_files_by_group[matched_group] = []
|
|
10710
11126
|
|
|
10711
|
-
|
|
11127
|
+
dark_files_by_group[matched_group].extend(paths_in_bucket)
|
|
10712
11128
|
|
|
10713
11129
|
master_dir = os.path.join(self.stacking_directory, "Master_Calibration_Files")
|
|
10714
11130
|
os.makedirs(master_dir, exist_ok=True)
|
|
@@ -10717,11 +11133,11 @@ class StackingSuiteDialog(QDialog):
|
|
|
10717
11133
|
# Informative status about discovery
|
|
10718
11134
|
# -------------------------------------------------------------------------
|
|
10719
11135
|
try:
|
|
10720
|
-
|
|
11136
|
+
n_groups_eligible = sum(1 for _, v in dark_files_by_group.items() if len(v) >= 2)
|
|
10721
11137
|
total_files = sum(len(v) for v in dark_files_by_group.values())
|
|
10722
11138
|
self.update_status(self.tr(
|
|
10723
11139
|
f"🔎 Discovered {len(dark_files_by_group)} grouped exposures "
|
|
10724
|
-
f"({
|
|
11140
|
+
f"({n_groups_eligible} eligible to stack) — {total_files} files total."
|
|
10725
11141
|
))
|
|
10726
11142
|
except Exception:
|
|
10727
11143
|
pass
|
|
@@ -10731,12 +11147,12 @@ class StackingSuiteDialog(QDialog):
|
|
|
10731
11147
|
# Pre-count tiles for progress bar (per-group safe chunk sizes)
|
|
10732
11148
|
# -------------------------------------------------------------------------
|
|
10733
11149
|
total_tiles = 0
|
|
10734
|
-
group_shapes: dict[tuple[float, str, str], tuple[int, int, int, int, int]] = {}
|
|
11150
|
+
group_shapes: dict[tuple[float, str, str, float | None], tuple[int, int, int, int, int]] = {}
|
|
10735
11151
|
pref_chunk_h = self.chunk_height
|
|
10736
11152
|
pref_chunk_w = self.chunk_width
|
|
10737
11153
|
DTYPE = np.float32
|
|
10738
11154
|
|
|
10739
|
-
for (exposure_time, image_size, session), file_list in dark_files_by_group.items():
|
|
11155
|
+
for (exposure_time, image_size, session, temp_bucket), file_list in dark_files_by_group.items():
|
|
10740
11156
|
if len(file_list) < 2:
|
|
10741
11157
|
continue
|
|
10742
11158
|
|
|
@@ -10754,7 +11170,8 @@ class StackingSuiteDialog(QDialog):
|
|
|
10754
11170
|
except MemoryError:
|
|
10755
11171
|
chunk_h, chunk_w = pref_chunk_h, pref_chunk_w
|
|
10756
11172
|
|
|
10757
|
-
|
|
11173
|
+
gk = (exposure_time, image_size, session, temp_bucket)
|
|
11174
|
+
group_shapes[gk] = (H, W, C, chunk_h, chunk_w)
|
|
10758
11175
|
total_tiles += _count_tiles(H, W, chunk_h, chunk_w)
|
|
10759
11176
|
|
|
10760
11177
|
if total_tiles == 0:
|
|
@@ -10767,7 +11184,7 @@ class StackingSuiteDialog(QDialog):
|
|
|
10767
11184
|
QApplication.processEvents()
|
|
10768
11185
|
|
|
10769
11186
|
# -------------------------------------------------------------------------
|
|
10770
|
-
# Local CPU reducers
|
|
11187
|
+
# Local CPU reducers
|
|
10771
11188
|
# -------------------------------------------------------------------------
|
|
10772
11189
|
def _select_reducer(kind: str, N: int):
|
|
10773
11190
|
if kind == "dark":
|
|
@@ -10811,10 +11228,10 @@ class StackingSuiteDialog(QDialog):
|
|
|
10811
11228
|
# ---------------------------------------------------------------------
|
|
10812
11229
|
# Per-group stacking loop
|
|
10813
11230
|
# ---------------------------------------------------------------------
|
|
10814
|
-
for (exposure_time, image_size, session), file_list in dark_files_by_group.items():
|
|
11231
|
+
for (exposure_time, image_size, session, temp_bucket), file_list in dark_files_by_group.items():
|
|
10815
11232
|
if len(file_list) < 2:
|
|
10816
11233
|
self.update_status(self.tr(
|
|
10817
|
-
f"⚠️ Skipping {exposure_time}s ({image_size}) [{session}] - Not enough frames to stack."
|
|
11234
|
+
f"⚠️ Skipping {exposure_time:g}s ({image_size}) [{session}] - Not enough frames to stack."
|
|
10818
11235
|
))
|
|
10819
11236
|
QApplication.processEvents()
|
|
10820
11237
|
continue
|
|
@@ -10823,14 +11240,17 @@ class StackingSuiteDialog(QDialog):
|
|
|
10823
11240
|
self.update_status(self.tr("⛔ Master Dark creation cancelled."))
|
|
10824
11241
|
break
|
|
10825
11242
|
|
|
11243
|
+
temp_txt = "Unknown" if temp_bucket is None else f"{float(temp_bucket):+.1f}C"
|
|
10826
11244
|
self.update_status(self.tr(
|
|
10827
|
-
f"🟢 Processing {len(file_list)} darks for {exposure_time}s ({image_size})
|
|
11245
|
+
f"🟢 Processing {len(file_list)} darks for {exposure_time:g}s ({image_size}) "
|
|
11246
|
+
f"in session '{session}' at {temp_txt}…"
|
|
10828
11247
|
))
|
|
10829
11248
|
QApplication.processEvents()
|
|
10830
11249
|
|
|
10831
11250
|
# --- reference shape and per-group chunk size ---
|
|
10832
|
-
|
|
10833
|
-
|
|
11251
|
+
gk = (exposure_time, image_size, session, temp_bucket)
|
|
11252
|
+
if gk in group_shapes:
|
|
11253
|
+
height, width, channels, chunk_height, chunk_width = group_shapes[gk]
|
|
10834
11254
|
else:
|
|
10835
11255
|
ref_data, _, _, _ = load_image(file_list[0])
|
|
10836
11256
|
if ref_data is None:
|
|
@@ -10870,8 +11290,11 @@ class StackingSuiteDialog(QDialog):
|
|
|
10870
11290
|
QApplication.processEvents()
|
|
10871
11291
|
continue
|
|
10872
11292
|
|
|
10873
|
-
#
|
|
10874
|
-
|
|
11293
|
+
# Create temp memmap (stem-safe normalization)
|
|
11294
|
+
tb_tag = "notemp" if temp_bucket is None else _temp_to_stem_tag(float(temp_bucket))
|
|
11295
|
+
memmap_base = f"temp_dark_{session}_{exposure_time:g}s_{image_size}_{tb_tag}.dat"
|
|
11296
|
+
memmap_base = self._normalize_master_stem(memmap_base)
|
|
11297
|
+
memmap_path = os.path.join(master_dir, memmap_base)
|
|
10875
11298
|
|
|
10876
11299
|
self.update_status(self.tr(
|
|
10877
11300
|
f"🗂️ Creating temp memmap: {os.path.basename(memmap_path)} "
|
|
@@ -10883,6 +11306,7 @@ class StackingSuiteDialog(QDialog):
|
|
|
10883
11306
|
|
|
10884
11307
|
tiles = _tile_grid(height, width, chunk_height, chunk_width)
|
|
10885
11308
|
total_tiles_group = len(tiles)
|
|
11309
|
+
|
|
10886
11310
|
self.update_status(self.tr(
|
|
10887
11311
|
f"📦 {total_tiles_group} tiles to process for this group (chunk {chunk_height}×{chunk_width})."
|
|
10888
11312
|
))
|
|
@@ -10924,7 +11348,7 @@ class StackingSuiteDialog(QDialog):
|
|
|
10924
11348
|
fut = tp.submit(_read_tile_into, (buf1 if use0 else buf0), ny0, ny1, nx0, nx1)
|
|
10925
11349
|
|
|
10926
11350
|
pd.set_label(
|
|
10927
|
-
f"{int(exposure_time)}s ({image_size}) [{session}] — "
|
|
11351
|
+
f"{int(exposure_time)}s ({image_size}) [{session}] [{temp_txt}] — "
|
|
10928
11352
|
f"tile {t_idx}/{total_tiles_group} y:{y0}-{y1} x:{x0}-{x1}"
|
|
10929
11353
|
)
|
|
10930
11354
|
|
|
@@ -10954,6 +11378,7 @@ class StackingSuiteDialog(QDialog):
|
|
|
10954
11378
|
|
|
10955
11379
|
if tile_result.ndim == 2:
|
|
10956
11380
|
tile_result = tile_result[:, :, None]
|
|
11381
|
+
|
|
10957
11382
|
expected_shape = (th, tw, channels)
|
|
10958
11383
|
if tile_result.shape != expected_shape:
|
|
10959
11384
|
if tile_result.shape[:2] == (th, tw):
|
|
@@ -10988,37 +11413,115 @@ class StackingSuiteDialog(QDialog):
|
|
|
10988
11413
|
pass
|
|
10989
11414
|
break
|
|
10990
11415
|
|
|
11416
|
+
# -------------------------------------------------------------
|
|
11417
|
+
# Materialize final memmap to ndarray for save
|
|
11418
|
+
# -------------------------------------------------------------
|
|
10991
11419
|
master_dark_data = np.asarray(final_stacked, dtype=np.float32)
|
|
10992
|
-
|
|
11420
|
+
try:
|
|
11421
|
+
del final_stacked
|
|
11422
|
+
except Exception:
|
|
11423
|
+
pass
|
|
10993
11424
|
gc.collect()
|
|
11425
|
+
|
|
10994
11426
|
try:
|
|
10995
11427
|
os.remove(memmap_path)
|
|
10996
11428
|
except Exception:
|
|
10997
11429
|
pass
|
|
10998
11430
|
|
|
10999
|
-
#
|
|
11000
|
-
|
|
11431
|
+
# -------------------------------------------------------------
|
|
11432
|
+
# Collect temperature stats from input dark headers
|
|
11433
|
+
# -------------------------------------------------------------
|
|
11434
|
+
temp_info = {}
|
|
11435
|
+
try:
|
|
11436
|
+
temp_info = _collect_temp_stats(file_list) or {}
|
|
11437
|
+
except Exception:
|
|
11438
|
+
temp_info = {}
|
|
11439
|
+
|
|
11440
|
+
# -------------------------------------------------------------
|
|
11441
|
+
# Build output filename (include session + exposure + size + temp bucket tag)
|
|
11442
|
+
# -------------------------------------------------------------
|
|
11443
|
+
temp_tag = ""
|
|
11444
|
+
try:
|
|
11445
|
+
if temp_bucket is not None:
|
|
11446
|
+
temp_tag = "_" + _temp_to_stem_tag(float(temp_bucket))
|
|
11447
|
+
elif temp_info.get("ccd_med") is not None:
|
|
11448
|
+
temp_tag = "_" + _temp_to_stem_tag(float(temp_info["ccd_med"]))
|
|
11449
|
+
elif temp_info.get("set_med") is not None:
|
|
11450
|
+
temp_tag = "_" + _temp_to_stem_tag(float(temp_info["set_med"]), prefix="set")
|
|
11451
|
+
except Exception:
|
|
11452
|
+
temp_tag = ""
|
|
11453
|
+
|
|
11454
|
+
master_dark_stem = f"MasterDark_{session}_{int(exposure_time)}s_{image_size}{temp_tag}"
|
|
11455
|
+
master_dark_stem = self._normalize_master_stem(master_dark_stem)
|
|
11001
11456
|
master_dark_path = self._build_out(master_dir, master_dark_stem, "fit")
|
|
11002
11457
|
|
|
11458
|
+
# -------------------------------------------------------------
|
|
11459
|
+
# Header
|
|
11460
|
+
# -------------------------------------------------------------
|
|
11003
11461
|
master_header = fits.Header()
|
|
11004
11462
|
master_header["IMAGETYP"] = "DARK"
|
|
11005
|
-
master_header["EXPTIME"]
|
|
11006
|
-
master_header["SESSION"]
|
|
11007
|
-
master_header["
|
|
11008
|
-
master_header["
|
|
11009
|
-
|
|
11463
|
+
master_header["EXPTIME"] = (float(exposure_time), "Exposure time (s)")
|
|
11464
|
+
master_header["SESSION"] = (str(session), "User session tag")
|
|
11465
|
+
master_header["NCOMBINE"] = (int(N), "Number of darks combined")
|
|
11466
|
+
master_header["NSTACK"] = (int(N), "Alias of NCOMBINE (SetiAstro)")
|
|
11467
|
+
|
|
11468
|
+
# Temperature provenance (only write keys that exist)
|
|
11469
|
+
if temp_info.get("ccd_med") is not None:
|
|
11470
|
+
master_header["CCD-TEMP"] = (float(temp_info["ccd_med"]), "Median CCD temp of input darks (C)")
|
|
11471
|
+
if temp_info.get("ccd_min") is not None:
|
|
11472
|
+
master_header["CCDTMIN"] = (float(temp_info["ccd_min"]), "Min CCD temp in input darks (C)")
|
|
11473
|
+
if temp_info.get("ccd_max") is not None:
|
|
11474
|
+
master_header["CCDTMAX"] = (float(temp_info["ccd_max"]), "Max CCD temp in input darks (C)")
|
|
11475
|
+
if temp_info.get("ccd_std") is not None:
|
|
11476
|
+
master_header["CCDTSTD"] = (float(temp_info["ccd_std"]), "Std CCD temp in input darks (C)")
|
|
11477
|
+
if temp_info.get("ccd_n") is not None:
|
|
11478
|
+
master_header["CCDTN"] = (int(temp_info["ccd_n"]), "Count of frames with CCD-TEMP")
|
|
11479
|
+
|
|
11480
|
+
if temp_info.get("set_med") is not None:
|
|
11481
|
+
master_header["SET-TEMP"] = (float(temp_info["set_med"]), "Median setpoint temp of input darks (C)")
|
|
11482
|
+
if temp_info.get("set_min") is not None:
|
|
11483
|
+
master_header["SETTMIN"] = (float(temp_info["set_min"]), "Min setpoint in input darks (C)")
|
|
11484
|
+
if temp_info.get("set_max") is not None:
|
|
11485
|
+
master_header["SETTMAX"] = (float(temp_info["set_max"]), "Max setpoint in input darks (C)")
|
|
11486
|
+
if temp_info.get("set_std") is not None:
|
|
11487
|
+
master_header["SETTSTD"] = (float(temp_info["set_std"]), "Std setpoint in input darks (C)")
|
|
11488
|
+
if temp_info.get("set_n") is not None:
|
|
11489
|
+
master_header["SETTN"] = (int(temp_info["set_n"]), "Count of frames with SET-TEMP")
|
|
11490
|
+
|
|
11491
|
+
# Dimensions (save_image usually writes these, but keep your existing behavior)
|
|
11492
|
+
master_header["NAXIS"] = 3 if channels == 3 else 2
|
|
11493
|
+
master_header["NAXIS1"] = int(master_dark_data.shape[1])
|
|
11494
|
+
master_header["NAXIS2"] = int(master_dark_data.shape[0])
|
|
11010
11495
|
if channels == 3:
|
|
11011
11496
|
master_header["NAXIS3"] = 3
|
|
11012
11497
|
|
|
11013
|
-
save_image(
|
|
11498
|
+
save_image(
|
|
11499
|
+
master_dark_data,
|
|
11500
|
+
master_dark_path,
|
|
11501
|
+
"fit",
|
|
11502
|
+
"32-bit floating point",
|
|
11503
|
+
master_header,
|
|
11504
|
+
is_mono=(channels == 1)
|
|
11505
|
+
)
|
|
11506
|
+
|
|
11507
|
+
# Tree label includes temp for visibility
|
|
11508
|
+
tree_label = f"{exposure_time:g}s ({image_size}) [{session}]"
|
|
11509
|
+
if temp_info.get("ccd_med") is not None:
|
|
11510
|
+
tree_label += f" [CCD {float(temp_info['ccd_med']):+.1f}C]"
|
|
11511
|
+
elif temp_info.get("set_med") is not None:
|
|
11512
|
+
tree_label += f" [SET {float(temp_info['set_med']):+.1f}C]"
|
|
11513
|
+
elif temp_bucket is not None:
|
|
11514
|
+
tree_label += f" [TEMP {float(temp_bucket):+.1f}C]"
|
|
11014
11515
|
|
|
11015
|
-
self.add_master_dark_to_tree(
|
|
11516
|
+
self.add_master_dark_to_tree(tree_label, master_dark_path)
|
|
11016
11517
|
self.update_status(self.tr(f"✅ Master Dark saved: {master_dark_path}"))
|
|
11017
11518
|
QApplication.processEvents()
|
|
11018
11519
|
|
|
11520
|
+
# Refresh assignments + persistence
|
|
11019
11521
|
self.assign_best_master_files()
|
|
11020
11522
|
self.save_master_paths_to_settings()
|
|
11021
11523
|
|
|
11524
|
+
# Post pass refresh (unchanged behavior)
|
|
11022
11525
|
self.assign_best_master_dark()
|
|
11023
11526
|
self.update_override_dark_combo()
|
|
11024
11527
|
self.assign_best_master_files()
|
|
@@ -11031,7 +11534,6 @@ class StackingSuiteDialog(QDialog):
|
|
|
11031
11534
|
logging.debug(f"Exception suppressed: {type(e).__name__}: {e}")
|
|
11032
11535
|
pd.close()
|
|
11033
11536
|
|
|
11034
|
-
|
|
11035
11537
|
def add_master_dark_to_tree(self, exposure_label: str, master_dark_path: str):
|
|
11036
11538
|
"""
|
|
11037
11539
|
Adds the newly created Master Dark to the Master Dark TreeBox and updates the dropdown.
|
|
@@ -12079,6 +12581,140 @@ class StackingSuiteDialog(QDialog):
|
|
|
12079
12581
|
master_item = QTreeWidgetItem([os.path.basename(master_flat_path)])
|
|
12080
12582
|
filter_item.addChild(master_item)
|
|
12081
12583
|
|
|
12584
|
+
def _parse_float(self, v):
|
|
12585
|
+
try:
|
|
12586
|
+
if v is None:
|
|
12587
|
+
return None
|
|
12588
|
+
if isinstance(v, (int, float)):
|
|
12589
|
+
return float(v)
|
|
12590
|
+
s = str(v).strip()
|
|
12591
|
+
# handle " -10.0 C" or "-10.0C"
|
|
12592
|
+
s = s.replace("°", "").replace("C", "").replace("c", "").strip()
|
|
12593
|
+
return float(s)
|
|
12594
|
+
except Exception:
|
|
12595
|
+
return None
|
|
12596
|
+
|
|
12597
|
+
|
|
12598
|
+
def _read_ccd_set_temp_from_fits(self, path: str) -> tuple[float|None, float|None]:
|
|
12599
|
+
"""Read CCD-TEMP and SET-TEMP from FITS header (primary HDU)."""
|
|
12600
|
+
try:
|
|
12601
|
+
with fits.open(path) as hdul:
|
|
12602
|
+
hdr = hdul[0].header
|
|
12603
|
+
ccd = self._parse_float(hdr.get("CCD-TEMP", None))
|
|
12604
|
+
st = self._parse_float(hdr.get("SET-TEMP", None))
|
|
12605
|
+
return ccd, st
|
|
12606
|
+
except Exception:
|
|
12607
|
+
return None, None
|
|
12608
|
+
|
|
12609
|
+
|
|
12610
|
+
def _temp_for_matching(self, ccd: float|None, st: float|None) -> float|None:
|
|
12611
|
+
"""Prefer CCD-TEMP; else SET-TEMP; else None."""
|
|
12612
|
+
return ccd if ccd is not None else (st if st is not None else None)
|
|
12613
|
+
|
|
12614
|
+
|
|
12615
|
+
def _parse_masterdark_name(self, stem: str):
|
|
12616
|
+
"""
|
|
12617
|
+
From filename like:
|
|
12618
|
+
MasterDark_Session_300s_4144x2822_m10p0C.fit
|
|
12619
|
+
Return dict fields; temp is optional.
|
|
12620
|
+
"""
|
|
12621
|
+
out = {"session": None, "exp": None, "size": None, "temp": None}
|
|
12622
|
+
|
|
12623
|
+
base = os.path.basename(stem)
|
|
12624
|
+
base = os.path.splitext(base)[0]
|
|
12625
|
+
|
|
12626
|
+
# session is between MasterDark_ and _<exp>s_
|
|
12627
|
+
# exp is <num>s
|
|
12628
|
+
# size is <WxH> like 4144x2822
|
|
12629
|
+
m = re.match(r"^MasterDark_(?P<session>.+?)_(?P<exp>[\d._]+)s_(?P<size>\d+x\d+)(?:_(?P<temp>.*))?$", base)
|
|
12630
|
+
if not m:
|
|
12631
|
+
return out
|
|
12632
|
+
|
|
12633
|
+
out["session"] = (m.group("session") or "").strip()
|
|
12634
|
+
# exp might be "2_5" from _normalize_master_stem; convert back
|
|
12635
|
+
exp_txt = (m.group("exp") or "").replace("_", ".")
|
|
12636
|
+
try:
|
|
12637
|
+
out["exp"] = float(exp_txt)
|
|
12638
|
+
except Exception:
|
|
12639
|
+
out["exp"] = None
|
|
12640
|
+
|
|
12641
|
+
out["size"] = m.group("size")
|
|
12642
|
+
|
|
12643
|
+
# temp token like m10p0C / p5p0C / setm10p0C
|
|
12644
|
+
t = (m.group("temp") or "").strip()
|
|
12645
|
+
if t:
|
|
12646
|
+
# pick the first temp-ish token ending in C
|
|
12647
|
+
mt = re.search(r"(set)?([mp])(\d+)p(\d)C", t)
|
|
12648
|
+
if mt:
|
|
12649
|
+
sign = -1.0 if mt.group(2) == "m" else 1.0
|
|
12650
|
+
whole = float(mt.group(3))
|
|
12651
|
+
frac = float(mt.group(4)) / 10.0
|
|
12652
|
+
out["temp"] = sign * (whole + frac)
|
|
12653
|
+
|
|
12654
|
+
return out
|
|
12655
|
+
|
|
12656
|
+
|
|
12657
|
+
def _get_master_dark_meta(self, path: str) -> dict:
|
|
12658
|
+
"""
|
|
12659
|
+
Cached metadata for a master dark.
|
|
12660
|
+
Prefers FITS header for temp; falls back to filename temp token.
|
|
12661
|
+
"""
|
|
12662
|
+
if not hasattr(self, "_master_dark_meta_cache"):
|
|
12663
|
+
self._master_dark_meta_cache = {}
|
|
12664
|
+
cache = self._master_dark_meta_cache
|
|
12665
|
+
|
|
12666
|
+
p = os.path.normpath(path)
|
|
12667
|
+
if p in cache:
|
|
12668
|
+
return cache[p]
|
|
12669
|
+
|
|
12670
|
+
meta = {"path": p, "session": None, "exp": None, "size": None,
|
|
12671
|
+
"ccd": None, "set": None, "temp": None}
|
|
12672
|
+
|
|
12673
|
+
# filename parse (fast)
|
|
12674
|
+
fn = self._parse_masterdark_name(p)
|
|
12675
|
+
meta["session"] = fn.get("session") or None
|
|
12676
|
+
meta["exp"] = fn.get("exp")
|
|
12677
|
+
meta["size"] = fn.get("size")
|
|
12678
|
+
meta["temp"] = fn.get("temp")
|
|
12679
|
+
|
|
12680
|
+
# header parse (authoritative for temps)
|
|
12681
|
+
ccd, st = self._read_ccd_set_temp_from_fits(p)
|
|
12682
|
+
meta["ccd"] = ccd
|
|
12683
|
+
meta["set"] = st
|
|
12684
|
+
meta["temp"] = self._temp_for_matching(ccd, st) if (ccd is not None or st is not None) else meta["temp"]
|
|
12685
|
+
|
|
12686
|
+
# size from header if missing
|
|
12687
|
+
if not meta["size"]:
|
|
12688
|
+
try:
|
|
12689
|
+
with fits.open(p) as hdul:
|
|
12690
|
+
data = hdul[0].data
|
|
12691
|
+
if data is not None:
|
|
12692
|
+
meta["size"] = f"{data.shape[1]}x{data.shape[0]}"
|
|
12693
|
+
except Exception:
|
|
12694
|
+
pass
|
|
12695
|
+
|
|
12696
|
+
cache[p] = meta
|
|
12697
|
+
return meta
|
|
12698
|
+
|
|
12699
|
+
|
|
12700
|
+
def _get_light_temp(self, light_path: str) -> tuple[float|None, float|None, float|None]:
|
|
12701
|
+
"""Return (ccd, set, chosen) with caching."""
|
|
12702
|
+
if not hasattr(self, "_light_temp_cache"):
|
|
12703
|
+
self._light_temp_cache = {}
|
|
12704
|
+
cache = self._light_temp_cache
|
|
12705
|
+
|
|
12706
|
+
p = os.path.normpath(light_path or "")
|
|
12707
|
+
if not p:
|
|
12708
|
+
return None, None, None
|
|
12709
|
+
if p in cache:
|
|
12710
|
+
return cache[p]
|
|
12711
|
+
|
|
12712
|
+
ccd, st = self._read_ccd_set_temp_from_fits(p)
|
|
12713
|
+
chosen = self._temp_for_matching(ccd, st)
|
|
12714
|
+
cache[p] = (ccd, st, chosen)
|
|
12715
|
+
return cache[p]
|
|
12716
|
+
|
|
12717
|
+
|
|
12082
12718
|
def assign_best_master_files(self, fill_only: bool = True):
|
|
12083
12719
|
"""
|
|
12084
12720
|
Assign best matching Master Dark and Flat to each Light leaf.
|
|
@@ -12138,32 +12774,57 @@ class StackingSuiteDialog(QDialog):
|
|
|
12138
12774
|
if fill_only and curr_dark and curr_dark.lower() != "none":
|
|
12139
12775
|
dark_choice = curr_dark
|
|
12140
12776
|
else:
|
|
12141
|
-
# 3) Auto-pick by size+closest exposure
|
|
12142
|
-
|
|
12143
|
-
|
|
12144
|
-
|
|
12145
|
-
|
|
12146
|
-
|
|
12777
|
+
# 3) Auto-pick by size + closest exposure + closest temperature (and prefer same session)
|
|
12778
|
+
light_path = leaf_item.data(0, Qt.ItemDataRole.UserRole)
|
|
12779
|
+
l_ccd, l_set, l_temp = self._get_light_temp(light_path)
|
|
12780
|
+
|
|
12781
|
+
best_path = None
|
|
12782
|
+
best_score = None
|
|
12783
|
+
|
|
12784
|
+
for mk, mp in (self.master_files or {}).items():
|
|
12785
|
+
if not mp:
|
|
12147
12786
|
continue
|
|
12148
|
-
master_dark_exposure_time = float(dmatch.group(1))
|
|
12149
12787
|
|
|
12150
|
-
|
|
12151
|
-
|
|
12152
|
-
if not
|
|
12153
|
-
|
|
12154
|
-
|
|
12155
|
-
|
|
12156
|
-
|
|
12157
|
-
|
|
12158
|
-
|
|
12788
|
+
bn = os.path.basename(mp)
|
|
12789
|
+
# Only consider MasterDark_* files (cheap gate)
|
|
12790
|
+
if not bn.startswith("MasterDark_"):
|
|
12791
|
+
continue
|
|
12792
|
+
|
|
12793
|
+
md = self._get_master_dark_meta(mp)
|
|
12794
|
+
md_size = md.get("size") or "Unknown"
|
|
12795
|
+
if md_size != image_size:
|
|
12796
|
+
continue
|
|
12797
|
+
|
|
12798
|
+
md_exp = md.get("exp")
|
|
12799
|
+
if md_exp is None:
|
|
12800
|
+
continue
|
|
12159
12801
|
|
|
12160
|
-
|
|
12161
|
-
|
|
12162
|
-
if diff < best_dark_diff:
|
|
12163
|
-
best_dark_diff = diff
|
|
12164
|
-
best_dark_match = master_path
|
|
12802
|
+
# exposure closeness
|
|
12803
|
+
exp_diff = abs(float(md_exp) - float(exposure_time))
|
|
12165
12804
|
|
|
12166
|
-
|
|
12805
|
+
# session preference: exact match beats mismatch
|
|
12806
|
+
md_sess = (md.get("session") or "Default").strip()
|
|
12807
|
+
sess_mismatch = 0 if md_sess == session_name else 1
|
|
12808
|
+
|
|
12809
|
+
# temperature closeness (if both known)
|
|
12810
|
+
md_temp = md.get("temp")
|
|
12811
|
+
if (l_temp is not None) and (md_temp is not None):
|
|
12812
|
+
temp_diff = abs(float(md_temp) - float(l_temp))
|
|
12813
|
+
temp_unknown = 0
|
|
12814
|
+
else:
|
|
12815
|
+
# if light has temp but dark doesn't (or vice versa), penalize
|
|
12816
|
+
temp_diff = 9999.0
|
|
12817
|
+
temp_unknown = 1
|
|
12818
|
+
|
|
12819
|
+
# Score tuple: lower is better
|
|
12820
|
+
# Priority: session match -> exposure diff -> temp availability -> temp diff
|
|
12821
|
+
score = (sess_mismatch, exp_diff, temp_unknown, temp_diff)
|
|
12822
|
+
|
|
12823
|
+
if best_score is None or score < best_score:
|
|
12824
|
+
best_score = score
|
|
12825
|
+
best_path = mp
|
|
12826
|
+
|
|
12827
|
+
dark_choice = os.path.basename(best_path) if best_path else ("None" if not curr_dark else curr_dark)
|
|
12167
12828
|
|
|
12168
12829
|
# ---------- FLAT RESOLUTION ----------
|
|
12169
12830
|
flat_key_full = f"{filter_name_raw} - {exposure_text}"
|
|
@@ -12753,6 +13414,7 @@ class StackingSuiteDialog(QDialog):
|
|
|
12753
13414
|
|
|
12754
13415
|
# ---------- LOAD LIGHT ----------
|
|
12755
13416
|
light_data, hdr, bit_depth, is_mono = load_image(light_file)
|
|
13417
|
+
#_print_stats("LIGHT raw", light_data, bit_depth=bit_depth, hdr=hdr)
|
|
12756
13418
|
if light_data is None or hdr is None:
|
|
12757
13419
|
self.update_status(self.tr(f"❌ ERROR: Failed to load {os.path.basename(light_file)}"))
|
|
12758
13420
|
continue
|
|
@@ -12777,7 +13439,10 @@ class StackingSuiteDialog(QDialog):
|
|
|
12777
13439
|
|
|
12778
13440
|
# ---------- APPLY DARK (if resolved) ----------
|
|
12779
13441
|
if master_dark_path:
|
|
12780
|
-
dark_data, _,
|
|
13442
|
+
dark_data, _, dark_bit_depth, dark_is_mono = load_image(master_dark_path)
|
|
13443
|
+
#_print_stats("DARK raw", dark_data, bit_depth=dark_bit_depth)
|
|
13444
|
+
dark_data = _maybe_normalize_16bit_float(dark_data, name=os.path.basename(master_dark_path))
|
|
13445
|
+
#_print_stats("DARK normalized", dark_data, bit_depth=dark_bit_depth)
|
|
12781
13446
|
if dark_data is not None:
|
|
12782
13447
|
if not dark_is_mono and dark_data.ndim == 3 and dark_data.shape[-1] == 3:
|
|
12783
13448
|
dark_data = dark_data.transpose(2, 0, 1) # HWC -> CHW
|
|
@@ -12793,7 +13458,10 @@ class StackingSuiteDialog(QDialog):
|
|
|
12793
13458
|
|
|
12794
13459
|
# ---------- APPLY FLAT (if resolved) ----------
|
|
12795
13460
|
if master_flat_path:
|
|
12796
|
-
flat_data, _,
|
|
13461
|
+
flat_data, _, flat_bit_depth, flat_is_mono = load_image(master_flat_path)
|
|
13462
|
+
#_print_stats("FLAT raw", flat_data, bit_depth=flat_bit_depth)
|
|
13463
|
+
flat_data = _maybe_normalize_16bit_float(flat_data, name=os.path.basename(master_flat_path))
|
|
13464
|
+
#_print_stats("FLAT normalized", flat_data, bit_depth=flat_bit_depth)
|
|
12797
13465
|
if flat_data is not None:
|
|
12798
13466
|
|
|
12799
13467
|
# Make flat layout match your working light layout:
|
|
@@ -12907,8 +13575,10 @@ class StackingSuiteDialog(QDialog):
|
|
|
12907
13575
|
max_val = float(np.max(light_data))
|
|
12908
13576
|
self.update_status(self.tr(f"Before saving: min = {min_val:.4f}, max = {max_val:.4f}"))
|
|
12909
13577
|
print(f"Before saving: min = {min_val:.4f}, max = {max_val:.4f}")
|
|
13578
|
+
|
|
13579
|
+
_warn_if_units_mismatch(light_data, dark_data if master_dark_path else None, flat_data if master_flat_path else None)
|
|
13580
|
+
_print_stats("LIGHT final", light_data)
|
|
12910
13581
|
QApplication.processEvents()
|
|
12911
|
-
|
|
12912
13582
|
# Annotate header
|
|
12913
13583
|
try:
|
|
12914
13584
|
if hasattr(hdr, "add_history"):
|
|
@@ -16328,6 +16998,10 @@ class StackingSuiteDialog(QDialog):
|
|
|
16328
16998
|
hdr_orig["CREATOR"] = "SetiAstroSuite"
|
|
16329
16999
|
hdr_orig["DATE-OBS"] = datetime.utcnow().isoformat()
|
|
16330
17000
|
|
|
17001
|
+
n_frames_group = len(file_list)
|
|
17002
|
+
hdr_orig["NCOMBINE"] = (int(n_frames_group), "Number of frames combined")
|
|
17003
|
+
hdr_orig["NSTACK"] = (int(n_frames_group), "Alias of NCOMBINE (SetiAstro)")
|
|
17004
|
+
|
|
16331
17005
|
is_mono_orig = (integrated_image.ndim == 2)
|
|
16332
17006
|
if is_mono_orig:
|
|
16333
17007
|
hdr_orig["NAXIS"] = 2
|
|
@@ -16447,6 +17121,8 @@ class StackingSuiteDialog(QDialog):
|
|
|
16447
17121
|
scale=1.0,
|
|
16448
17122
|
rect_override=group_rect if group_rect is not None else global_rect
|
|
16449
17123
|
)
|
|
17124
|
+
hdr_crop["NCOMBINE"] = (int(n_frames_group), "Number of frames combined")
|
|
17125
|
+
hdr_crop["NSTACK"] = (int(n_frames_group), "Alias of NCOMBINE (SetiAstro)")
|
|
16450
17126
|
is_mono_crop = (cropped_img.ndim == 2)
|
|
16451
17127
|
Hc, Wc = (cropped_img.shape[:2] if cropped_img.ndim >= 2 else (H, W))
|
|
16452
17128
|
display_group_crop = self._label_with_dims(group_key, Wc, Hc)
|
|
@@ -16590,6 +17266,12 @@ class StackingSuiteDialog(QDialog):
|
|
|
16590
17266
|
algo_override=COMET_ALGO # << comet-friendly reducer
|
|
16591
17267
|
)
|
|
16592
17268
|
|
|
17269
|
+
n_usable = int(len(usable))
|
|
17270
|
+
ref_header_c = ref_header_c or ref_header or fits.Header()
|
|
17271
|
+
ref_header_c["NCOMBINE"] = (n_usable, "Number of frames combined (comet)")
|
|
17272
|
+
ref_header_c["NSTACK"] = (n_usable, "Alias of NCOMBINE (SetiAstro)")
|
|
17273
|
+
ref_header_c["COMETFR"] = (n_usable, "Frames used for comet-aligned stack")
|
|
17274
|
+
|
|
16593
17275
|
# Save CometOnly
|
|
16594
17276
|
Hc, Wc = comet_only.shape[:2]
|
|
16595
17277
|
display_group_c = self._label_with_dims(group_key, Wc, Hc)
|
|
@@ -16614,6 +17296,10 @@ class StackingSuiteDialog(QDialog):
|
|
|
16614
17296
|
scale=1.0,
|
|
16615
17297
|
rect_override=group_rect if group_rect is not None else global_rect
|
|
16616
17298
|
)
|
|
17299
|
+
comet_only_crop, hdr_c_crop = self._apply_autocrop(...)
|
|
17300
|
+
hdr_c_crop["NCOMBINE"] = (n_usable, "Number of frames combined (comet)")
|
|
17301
|
+
hdr_c_crop["NSTACK"] = (n_usable, "Alias of NCOMBINE (SetiAstro)")
|
|
17302
|
+
hdr_c_crop["COMETFR"] = (n_usable, "Frames used for comet-aligned stack")
|
|
16617
17303
|
Hcc, Wcc = comet_only_crop.shape[:2]
|
|
16618
17304
|
display_group_cc = self._label_with_dims(group_key, Wcc, Hcc)
|
|
16619
17305
|
comet_path_crop = self._build_out(
|
|
@@ -17201,246 +17887,6 @@ class StackingSuiteDialog(QDialog):
|
|
|
17201
17887
|
views[p] = np.load(npy, mmap_mode="r") # returns numpy.memmap
|
|
17202
17888
|
return views
|
|
17203
17889
|
|
|
17204
|
-
|
|
17205
|
-
def stack_registered_images_chunked(
|
|
17206
|
-
self,
|
|
17207
|
-
grouped_files,
|
|
17208
|
-
frame_weights,
|
|
17209
|
-
chunk_height=2048,
|
|
17210
|
-
chunk_width=2048
|
|
17211
|
-
):
|
|
17212
|
-
self.update_status(self.tr(f"✅ Chunked stacking {len(grouped_files)} group(s)..."))
|
|
17213
|
-
QApplication.processEvents()
|
|
17214
|
-
|
|
17215
|
-
all_rejection_coords = []
|
|
17216
|
-
|
|
17217
|
-
for group_key, file_list in grouped_files.items():
|
|
17218
|
-
num_files = len(file_list)
|
|
17219
|
-
self.update_status(self.tr(f"📊 Group '{group_key}' has {num_files} aligned file(s)."))
|
|
17220
|
-
QApplication.processEvents()
|
|
17221
|
-
if num_files < 2:
|
|
17222
|
-
self.update_status(self.tr(f"⚠️ Group '{group_key}' does not have enough frames to stack."))
|
|
17223
|
-
continue
|
|
17224
|
-
|
|
17225
|
-
# Reference shape/header (unchanged)
|
|
17226
|
-
ref_file = file_list[0]
|
|
17227
|
-
if not os.path.exists(ref_file):
|
|
17228
|
-
self.update_status(self.tr(f"⚠️ Reference file '{ref_file}' not found, skipping group."))
|
|
17229
|
-
continue
|
|
17230
|
-
|
|
17231
|
-
ref_data, ref_header, _, _ = load_image(ref_file)
|
|
17232
|
-
if ref_data is None:
|
|
17233
|
-
self.update_status(self.tr(f"⚠️ Could not load reference '{ref_file}', skipping group."))
|
|
17234
|
-
continue
|
|
17235
|
-
|
|
17236
|
-
is_color = (ref_data.ndim == 3 and ref_data.shape[2] == 3)
|
|
17237
|
-
height, width = ref_data.shape[:2]
|
|
17238
|
-
channels = 3 if is_color else 1
|
|
17239
|
-
|
|
17240
|
-
# Final output memmap (unchanged)
|
|
17241
|
-
memmap_path = self._build_out(self.stacking_directory, f"chunked_{group_key}", "dat")
|
|
17242
|
-
final_stacked = np.memmap(memmap_path, dtype=np.float32, mode='w+', shape=(height, width, channels))
|
|
17243
|
-
|
|
17244
|
-
# Valid files + weights
|
|
17245
|
-
aligned_paths, weights_list = [], []
|
|
17246
|
-
for fpath in file_list:
|
|
17247
|
-
if os.path.exists(fpath):
|
|
17248
|
-
aligned_paths.append(fpath)
|
|
17249
|
-
weights_list.append(frame_weights.get(fpath, 1.0))
|
|
17250
|
-
else:
|
|
17251
|
-
self.update_status(self.tr(f"⚠️ File not found: {fpath}, skipping."))
|
|
17252
|
-
if len(aligned_paths) < 2:
|
|
17253
|
-
self.update_status(self.tr(f"⚠️ Not enough valid frames in group '{group_key}' to stack."))
|
|
17254
|
-
continue
|
|
17255
|
-
|
|
17256
|
-
weights_list = np.array(weights_list, dtype=np.float32)
|
|
17257
|
-
|
|
17258
|
-
# ⬇️ NEW: open read-only memmaps for all aligned frames (float32 [0..1], HxWxC)
|
|
17259
|
-
mm_views = self._open_memmaps_readonly(aligned_paths)
|
|
17260
|
-
|
|
17261
|
-
self.update_status(self.tr(f"📊 Stacking group '{group_key}' with {self.rejection_algorithm}"))
|
|
17262
|
-
QApplication.processEvents()
|
|
17263
|
-
|
|
17264
|
-
rejection_coords = []
|
|
17265
|
-
N = len(aligned_paths)
|
|
17266
|
-
DTYPE = self._dtype()
|
|
17267
|
-
pref_h = self.chunk_height
|
|
17268
|
-
pref_w = self.chunk_width
|
|
17269
|
-
|
|
17270
|
-
try:
|
|
17271
|
-
chunk_h, chunk_w = compute_safe_chunk(height, width, N, channels, DTYPE, pref_h, pref_w)
|
|
17272
|
-
self.update_status(self.tr(f"🔧 Using chunk size {chunk_h}×{chunk_w} for {self._dtype()}"))
|
|
17273
|
-
except MemoryError as e:
|
|
17274
|
-
self.update_status(self.tr(f"⚠️ {e}"))
|
|
17275
|
-
return None, {}, None
|
|
17276
|
-
|
|
17277
|
-
# Tile loop (same structure, but tile loading reads from memmaps)
|
|
17278
|
-
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
17279
|
-
LOADER_WORKERS = min(max(2, (os.cpu_count() or 4) // 2), 8) # tuned for memory bw
|
|
17280
|
-
|
|
17281
|
-
for y_start in range(0, height, chunk_h):
|
|
17282
|
-
y_end = min(y_start + chunk_h, height)
|
|
17283
|
-
tile_h = y_end - y_start
|
|
17284
|
-
|
|
17285
|
-
for x_start in range(0, width, chunk_w):
|
|
17286
|
-
x_end = min(x_start + chunk_w, width)
|
|
17287
|
-
tile_w = x_end - x_start
|
|
17288
|
-
|
|
17289
|
-
# Preallocate tile stack
|
|
17290
|
-
tile_stack = np.empty((N, tile_h, tile_w, channels), dtype=np.float32)
|
|
17291
|
-
|
|
17292
|
-
# ⬇️ NEW: fill tile_stack from the memmaps (parallel copy)
|
|
17293
|
-
def _copy_one(i, path):
|
|
17294
|
-
v = mm_views[path][y_start:y_end, x_start:x_end] # view on disk
|
|
17295
|
-
if v.ndim == 2:
|
|
17296
|
-
# mono memmap stored as (H,W,1); but if legacy mono npy exists as (H,W),
|
|
17297
|
-
# make it (H,W,1) here:
|
|
17298
|
-
vv = v[..., None]
|
|
17299
|
-
else:
|
|
17300
|
-
vv = v
|
|
17301
|
-
if vv.shape[2] == 1 and channels == 3:
|
|
17302
|
-
vv = np.repeat(vv, 3, axis=2)
|
|
17303
|
-
tile_stack[i] = vv
|
|
17304
|
-
|
|
17305
|
-
with ThreadPoolExecutor(max_workers=LOADER_WORKERS) as exe:
|
|
17306
|
-
futs = {exe.submit(_copy_one, i, p): i for i, p in enumerate(aligned_paths)}
|
|
17307
|
-
for _ in as_completed(futs):
|
|
17308
|
-
pass
|
|
17309
|
-
|
|
17310
|
-
# Rejection (unchanged – uses your Numba kernels)
|
|
17311
|
-
algo = self.rejection_algorithm
|
|
17312
|
-
if algo == "Simple Median (No Rejection)":
|
|
17313
|
-
tile_result = np.median(tile_stack, axis=0)
|
|
17314
|
-
tile_rej_map = np.zeros(tile_stack.shape[1:3], dtype=np.bool_)
|
|
17315
|
-
elif algo == "Simple Average (No Rejection)":
|
|
17316
|
-
tile_result = np.average(tile_stack, axis=0, weights=weights_list)
|
|
17317
|
-
tile_rej_map = np.zeros(tile_stack.shape[1:3], dtype=np.bool_)
|
|
17318
|
-
elif algo == "Weighted Windsorized Sigma Clipping":
|
|
17319
|
-
tile_result, tile_rej_map = windsorized_sigma_clip_weighted(
|
|
17320
|
-
tile_stack, weights_list, lower=self.sigma_low, upper=self.sigma_high
|
|
17321
|
-
)
|
|
17322
|
-
elif algo == "Kappa-Sigma Clipping":
|
|
17323
|
-
tile_result, tile_rej_map = kappa_sigma_clip_weighted(
|
|
17324
|
-
tile_stack, weights_list, kappa=self.kappa, iterations=self.iterations
|
|
17325
|
-
)
|
|
17326
|
-
elif algo == "Trimmed Mean":
|
|
17327
|
-
tile_result, tile_rej_map = trimmed_mean_weighted(
|
|
17328
|
-
tile_stack, weights_list, trim_fraction=self.trim_fraction
|
|
17329
|
-
)
|
|
17330
|
-
elif algo == "Extreme Studentized Deviate (ESD)":
|
|
17331
|
-
tile_result, tile_rej_map = esd_clip_weighted(
|
|
17332
|
-
tile_stack, weights_list, threshold=self.esd_threshold
|
|
17333
|
-
)
|
|
17334
|
-
elif algo == "Biweight Estimator":
|
|
17335
|
-
tile_result, tile_rej_map = biweight_location_weighted(
|
|
17336
|
-
tile_stack, weights_list, tuning_constant=self.biweight_constant
|
|
17337
|
-
)
|
|
17338
|
-
elif algo == "Modified Z-Score Clipping":
|
|
17339
|
-
tile_result, tile_rej_map = modified_zscore_clip_weighted(
|
|
17340
|
-
tile_stack, weights_list, threshold=self.modz_threshold
|
|
17341
|
-
)
|
|
17342
|
-
elif algo == "Max Value":
|
|
17343
|
-
tile_result, tile_rej_map = max_value_stack(
|
|
17344
|
-
tile_stack, weights_list
|
|
17345
|
-
)
|
|
17346
|
-
else:
|
|
17347
|
-
tile_result, tile_rej_map = windsorized_sigma_clip_weighted(
|
|
17348
|
-
tile_stack, weights_list, lower=self.sigma_low, upper=self.sigma_high
|
|
17349
|
-
)
|
|
17350
|
-
|
|
17351
|
-
# Ensure tile_result has correct shape
|
|
17352
|
-
if tile_result.ndim == 2:
|
|
17353
|
-
tile_result = tile_result[:, :, None]
|
|
17354
|
-
expected_shape = (tile_h, tile_w, channels)
|
|
17355
|
-
if tile_result.shape != expected_shape:
|
|
17356
|
-
if tile_result.shape[2] == 0:
|
|
17357
|
-
tile_result = np.zeros(expected_shape, dtype=np.float32)
|
|
17358
|
-
elif tile_result.shape[:2] == (tile_h, tile_w):
|
|
17359
|
-
if tile_result.shape[2] > channels:
|
|
17360
|
-
tile_result = tile_result[:, :, :channels]
|
|
17361
|
-
else:
|
|
17362
|
-
tile_result = np.repeat(tile_result, channels, axis=2)[:, :, :channels]
|
|
17363
|
-
|
|
17364
|
-
# Commit tile
|
|
17365
|
-
final_stacked[y_start:y_end, x_start:x_end, :] = tile_result
|
|
17366
|
-
|
|
17367
|
-
# Collect per-tile rejection coords (unchanged logic)
|
|
17368
|
-
if tile_rej_map.ndim == 3: # (N, tile_h, tile_w)
|
|
17369
|
-
combined_rej = np.any(tile_rej_map, axis=0)
|
|
17370
|
-
elif tile_rej_map.ndim == 4: # (N, tile_h, tile_w, C)
|
|
17371
|
-
combined_rej = np.any(tile_rej_map, axis=0)
|
|
17372
|
-
combined_rej = np.any(combined_rej, axis=-1)
|
|
17373
|
-
else:
|
|
17374
|
-
combined_rej = np.zeros((tile_h, tile_w), dtype=np.bool_)
|
|
17375
|
-
|
|
17376
|
-
ys_tile, xs_tile = np.where(combined_rej)
|
|
17377
|
-
for dy, dx in zip(ys_tile, xs_tile):
|
|
17378
|
-
rejection_coords.append((x_start + dx, y_start + dy))
|
|
17379
|
-
|
|
17380
|
-
# Finish/save (unchanged from your version) …
|
|
17381
|
-
final_array = np.array(final_stacked)
|
|
17382
|
-
del final_stacked
|
|
17383
|
-
|
|
17384
|
-
final_array = self._normalize_stack_01(final_array)
|
|
17385
|
-
|
|
17386
|
-
if final_array.ndim == 3 and final_array.shape[-1] == 1:
|
|
17387
|
-
final_array = final_array[..., 0]
|
|
17388
|
-
is_mono = (final_array.ndim == 2)
|
|
17389
|
-
|
|
17390
|
-
if ref_header is None:
|
|
17391
|
-
ref_header = fits.Header()
|
|
17392
|
-
ref_header["IMAGETYP"] = "MASTER STACK"
|
|
17393
|
-
ref_header["BITPIX"] = -32
|
|
17394
|
-
ref_header["STACKED"] = (True, "Stacked using chunked approach")
|
|
17395
|
-
ref_header["CREATOR"] = "SetiAstroSuite"
|
|
17396
|
-
ref_header["DATE-OBS"] = datetime.utcnow().isoformat()
|
|
17397
|
-
if is_mono:
|
|
17398
|
-
ref_header["NAXIS"] = 2
|
|
17399
|
-
ref_header["NAXIS1"] = final_array.shape[1]
|
|
17400
|
-
ref_header["NAXIS2"] = final_array.shape[0]
|
|
17401
|
-
if "NAXIS3" in ref_header: del ref_header["NAXIS3"]
|
|
17402
|
-
else:
|
|
17403
|
-
ref_header["NAXIS"] = 3
|
|
17404
|
-
ref_header["NAXIS1"] = final_array.shape[1]
|
|
17405
|
-
ref_header["NAXIS2"] = final_array.shape[0]
|
|
17406
|
-
ref_header["NAXIS3"] = 3
|
|
17407
|
-
|
|
17408
|
-
output_stem = f"MasterLight_{group_key}_{len(aligned_paths)}stacked"
|
|
17409
|
-
output_path = self._build_out(self.stacking_directory, output_stem, "fit")
|
|
17410
|
-
|
|
17411
|
-
save_image(
|
|
17412
|
-
img_array=final_array,
|
|
17413
|
-
filename=output_path,
|
|
17414
|
-
original_format="fit",
|
|
17415
|
-
bit_depth="32-bit floating point",
|
|
17416
|
-
original_header=ref_header,
|
|
17417
|
-
is_mono=is_mono
|
|
17418
|
-
)
|
|
17419
|
-
|
|
17420
|
-
self.update_status(self.tr(f"✅ Group '{group_key}' stacked {len(aligned_paths)} frame(s)! Saved: {output_path}"))
|
|
17421
|
-
|
|
17422
|
-
print(f"✅ Master Light saved for group '{group_key}': {output_path}")
|
|
17423
|
-
|
|
17424
|
-
# Optionally, you might want to store or log 'rejection_coords' (here appended to all_rejection_coords)
|
|
17425
|
-
all_rejection_coords.extend(rejection_coords)
|
|
17426
|
-
|
|
17427
|
-
# Clean up memmap file
|
|
17428
|
-
try:
|
|
17429
|
-
os.remove(memmap_path)
|
|
17430
|
-
except OSError:
|
|
17431
|
-
pass
|
|
17432
|
-
|
|
17433
|
-
QMessageBox.information(
|
|
17434
|
-
self,
|
|
17435
|
-
"Stacking Complete",
|
|
17436
|
-
f"All stacking finished successfully.\n"
|
|
17437
|
-
f"Frames per group:\n" +
|
|
17438
|
-
"\n".join([f"{group_key}: {len(files)} frame(s)" for group_key, files in grouped_files.items()])
|
|
17439
|
-
)
|
|
17440
|
-
|
|
17441
|
-
# Optionally, you could return the global rejection coordinate list.
|
|
17442
|
-
return all_rejection_coords
|
|
17443
|
-
|
|
17444
17890
|
def _start_after_align_worker(self, aligned_light_files: dict[str, list[str]]):
|
|
17445
17891
|
# Snapshot UI settings
|
|
17446
17892
|
if getattr(self, "_suppress_normal_integration_once", False):
|
|
@@ -18455,6 +18901,10 @@ class StackingSuiteDialog(QDialog):
|
|
|
18455
18901
|
hdr_orig["CREATOR"] = "SetiAstroSuite"
|
|
18456
18902
|
hdr_orig["DATE-OBS"] = datetime.utcnow().isoformat()
|
|
18457
18903
|
|
|
18904
|
+
n_frames = int(len(file_list))
|
|
18905
|
+
hdr_orig["NCOMBINE"] = (n_frames, "Number of frames combined")
|
|
18906
|
+
hdr_orig["NSTACK"] = (n_frames, "Alias of NCOMBINE (SetiAstro)")
|
|
18907
|
+
|
|
18458
18908
|
if final_drizzle.ndim == 2:
|
|
18459
18909
|
hdr_orig["NAXIS"] = 2
|
|
18460
18910
|
hdr_orig["NAXIS1"] = final_drizzle.shape[1]
|
|
@@ -18484,10 +18934,12 @@ class StackingSuiteDialog(QDialog):
|
|
|
18484
18934
|
cropped_drizzle, hdr_crop = self._apply_autocrop(
|
|
18485
18935
|
final_drizzle,
|
|
18486
18936
|
file_list,
|
|
18487
|
-
|
|
18937
|
+
hdr_orig.copy(),
|
|
18488
18938
|
scale=float(scale_factor),
|
|
18489
18939
|
rect_override=rect_override
|
|
18490
18940
|
)
|
|
18941
|
+
hdr_crop["NCOMBINE"] = (n_frames, "Number of frames combined")
|
|
18942
|
+
hdr_crop["NSTACK"] = (n_frames, "Alias of NCOMBINE (SetiAstro)")
|
|
18491
18943
|
is_mono_crop = (cropped_drizzle.ndim == 2)
|
|
18492
18944
|
display_group_driz_crop = self._label_with_dims(group_key, cropped_drizzle.shape[1], cropped_drizzle.shape[0])
|
|
18493
18945
|
base_crop = f"MasterLight_{display_group_driz_crop}_{len(file_list)}stacked_drizzle_autocrop"
|