hyper-py-photometry 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hyper_py/bkg_single.py ADDED
@@ -0,0 +1,477 @@
1
+ import warnings
2
+
3
+ import numpy as np
4
+ from astropy.convolution import Gaussian2DKernel, interpolate_replace_nans
5
+ from astropy.modeling import fitting, models
6
+ from astropy.stats import SigmaClip, sigma_clipped_stats
7
+ from astropy.wcs import WCS
8
+ from sklearn.exceptions import ConvergenceWarning
9
+ from sklearn.linear_model import HuberRegressor, TheilSenRegressor
10
+
11
+
12
+ def masked_background_single_sources(
13
+ minimize_method,
14
+ image,
15
+ header,
16
+ xcen,
17
+ ycen,
18
+ nx, ny,
19
+ all_sources_xcen,
20
+ all_sources_ycen,
21
+ max_fwhm_extent,
22
+ box_sizes,
23
+ pol_orders_separate,
24
+ suffix,
25
+ source_id,
26
+ config,
27
+ logger,
28
+ logger_file_only
29
+ ):
30
+ """
31
+ Estimate and subtract a polynomial background from a masked cutout image of a single source.
32
+ Loops over multiple box sizes and polynomial orders, and selects the background model
33
+ that minimizes the residual scatter and residual mean.
34
+
35
+ Parameters
36
+ ----------
37
+ cutout_masked : 2D array
38
+ Cutout with source region masked (used for fitting background).
39
+ cutout : 2D array
40
+ Original (unmasked) cutout where background will be subtracted.
41
+ cutout_header : fits.Header
42
+ Header of the cutout (not used in this routine, but passed for I/O).
43
+ x0, y0 : float
44
+ Sub-pixel centroid of the main source.
45
+ external_sources : list of (x, y)
46
+ Additional source positions to be masked.
47
+ max_fwhm_extent : float
48
+ Radius (in pixels) used to mask each source.
49
+ pol_orders_separate : list of int
50
+ Polynomial orders to try (e.g. [0, 1, 2]).
51
+ suffix : str
52
+ String identifying the map or slice.
53
+ source_id : int
54
+ Index of the current source (for logging).
55
+ config : ConfigReader
56
+ Configuration reader with fitting options.
57
+ logger_file_only : Logger
58
+ Logger to print status and warnings to log file only.
59
+ """
60
+
61
+ logger_file_only.info("[INFO] Estimating background separately on masked cutout...")
62
+
63
+
64
+ # ---------- SELECT WHICH FITTERS TO USE ----------
65
+ bg_fitters = config.get("fit_options", "bg_fitters", ["least_squares"])
66
+ huber_epsilons = config.get("fit_options", "huber_epsilons", [1.35])
67
+
68
+ fitters = []
69
+ if "least_squares" in bg_fitters:
70
+ fitters.append(("LeastSquares", None, None)) # Will use np.linalg.lstsq
71
+
72
+ if "huber" in bg_fitters:
73
+ for eps in huber_epsilons:
74
+ reg = HuberRegressor(fit_intercept=False, max_iter=100, epsilon=eps)
75
+ fitters.append((f"Huber_{eps}", eps, reg))
76
+
77
+ if "theilsen" in bg_fitters:
78
+ reg = TheilSenRegressor(fit_intercept=False, max_subpopulation=1e4, random_state=42)
79
+ fitters.append(("TheilSen", None, reg))
80
+
81
+
82
+ # --- identify if trun on whole map (fix_min_box = 0) or not --- #
83
+ fix_min_box = config.get("background", "fix_min_box", 3) # minimum padding value (multiple of FWHM)
84
+
85
+
86
+ # - Initialize parameters - #
87
+ best_params = {}
88
+ best_order = None
89
+ best_min = np.inf
90
+
91
+ cutout_reference_mask = None
92
+
93
+
94
+ # ------------------ Loop over box sizes ------------------ #
95
+ for box in box_sizes:
96
+ if fix_min_box != 0:
97
+ half_box = box // 2 -1
98
+ xmin = max(0, int(np.min(xcen)) - half_box)
99
+ xmax = min(nx, int(np.max(xcen)) + half_box + 1)
100
+ ymin = max(0, int(np.min(ycen)) - half_box)
101
+ ymax = min(ny, int(np.max(ycen)) + half_box + 1)
102
+
103
+ cutout = image[ymin:ymax, xmin:xmax].copy()
104
+ else:
105
+ xmin = 0
106
+ xmax = box_sizes[0]
107
+ ymin = 0
108
+ ymax = box_sizes[1]
109
+ cutout = image
110
+
111
+
112
+ if cutout.size == 0 or np.isnan(cutout).all():
113
+ logger.warning("[WARNING] Empty or invalid cutout. Skipping.")
114
+ continue
115
+
116
+ # - first regularize mean background - #
117
+ valid_cutout = ~np.isnan(cutout)
118
+ mean_valid_cutout, median_valid_cutout, std_valid_cutout = sigma_clipped_stats(cutout[valid_cutout], sigma=3.0, maxiters=5)
119
+ cutout = cutout - median_valid_cutout
120
+
121
+
122
+ yy, xx = np.indices(cutout.shape)
123
+ x0 = xcen - xmin
124
+ y0 = ycen - ymin
125
+
126
+
127
+ # ---Initialize mask: True = valid pixel for background fitting --- #
128
+ # mask_bg = np.ones_like(cutout, dtype=bool)
129
+ mask_bg = np.isfinite(cutout)
130
+
131
+ all_sources_to_mask = []
132
+ all_sources_to_mask.append((x0, y0))
133
+
134
+ main_sources = []
135
+ main_sources.append((x0, y0))
136
+
137
+ external_sources = []
138
+
139
+ #--- Identify external sources inside box and add to main source - only if the background is not estimated over the whole map ---#
140
+ if fix_min_box != 0:
141
+ for i in range(len(all_sources_xcen)):
142
+ if (all_sources_xcen[i]-xmin != x0) and (all_sources_ycen[i]-ymin != y0):
143
+ sx = all_sources_xcen[i]
144
+ sy = all_sources_ycen[i]
145
+
146
+ if xmin <= sx <= xmax and ymin <= sy <= ymax:
147
+ ex = sx - xmin
148
+ ey = sy - ymin
149
+ all_sources_to_mask.append((ex, ey))
150
+ external_sources.append((ex, ey))
151
+
152
+
153
+
154
+ # --- Mask all external sources using simple 2D Gaussian fitting --- #
155
+ cut_local = cutout
156
+ for xc, yc in external_sources:
157
+ xc_int = int(round(xc))
158
+ yc_int = int(round(yc))
159
+
160
+ # Define small cutout around each source (e.g. max_fwhm_extent)
161
+ fit_size = round(max_fwhm_extent/2.) # half-size
162
+ xfit_min = max(0, xc_int - fit_size)
163
+ xfit_max = min(cut_local.shape[1], xc_int + fit_size + 1)
164
+ yfit_min = max(0, yc_int - fit_size)
165
+ yfit_max = min(cut_local.shape[0], yc_int + fit_size + 1)
166
+
167
+ data_fit = cut_local[yfit_min:yfit_max, xfit_min:xfit_max]
168
+ if data_fit.size < max_fwhm_extent*2 or np.all(np.isnan(data_fit)) or np.nanmax(data_fit) <= 0:
169
+ continue # skip this source if empty or invalid
170
+
171
+ yy_sub, xx_sub = np.mgrid[yfit_min:yfit_max, xfit_min:xfit_max]
172
+
173
+ # Define and fit elliptical Gaussian
174
+ g_init = models.Gaussian2D(
175
+ amplitude=np.nanmax(data_fit),
176
+ x_mean=xc,
177
+ y_mean=yc,
178
+ x_stddev=max_fwhm_extent,
179
+ y_stddev=max_fwhm_extent,
180
+ theta=0.0,
181
+ bounds={'x_stddev': (max_fwhm_extent/4., max_fwhm_extent*2), 'y_stddev': (max_fwhm_extent/4., max_fwhm_extent*2), 'theta': (-np.pi/2, np.pi/2)}
182
+ )
183
+
184
+ fit_p = fitting.LevMarLSQFitter()
185
+ try:
186
+ g_fit = fit_p(g_init, xx_sub, yy_sub, data_fit)
187
+ except Exception:
188
+ continue # skip if fit fails
189
+
190
+ # Evaluate fitted model over full local cutout
191
+ yy_full, xx_full = np.indices(cut_local.shape)
192
+ model_vals = g_fit(xx_full, yy_full)
193
+
194
+ # Mask pixels above 1-FWHM threshold for external sopurces (≈ 0.1353 × peak)
195
+ threshold = g_fit.amplitude.value * 0.1353
196
+ mask_bg[model_vals > threshold] = False
197
+
198
+
199
+ ### --- From now on, all photometry and background estimation is done on cutout_masked from external sources --- ###
200
+ # --- Apply external sources mask → set masked pixels to np.nan --- #
201
+ cutout_masked = np.copy(cutout)
202
+ cutout_masked[~mask_bg] = np.nan
203
+
204
+
205
+
206
+ # # ---- interpolate NaNs at the edges of the maps --- #
207
+ # --- Count NaNs in edge pixels ---
208
+ edge_thickness = round(max_fwhm_extent) # pixels to define the edge region
209
+ # ny_masked, nx_masked = cutout_masked.shape
210
+
211
+ edge_mask = np.zeros_like(cutout_masked, dtype=bool)
212
+ edge_mask[:edge_thickness, :] = True # top edge
213
+ edge_mask[-edge_thickness:, :] = True # bottom edge
214
+ edge_mask[:, :edge_thickness] = True # left edge
215
+ edge_mask[:, -edge_thickness:] = True # right edge
216
+
217
+ n_edge_total = np.sum(edge_mask)
218
+ n_edge_nan = np.sum(edge_mask & ~np.isfinite(cutout_masked))
219
+ nan_fraction = n_edge_nan / n_edge_total
220
+
221
+ # --- Only interpolate if edge NaNs < threshold ---
222
+ nan_threshold = 0.3 # allow up to 30% NaNs in edge region
223
+
224
+ if nan_fraction < nan_threshold:
225
+ sigma = 2.0
226
+ kernel = Gaussian2DKernel(x_stddev=sigma)
227
+ interpolated_map = interpolate_replace_nans(cutout_masked, kernel)
228
+
229
+ cutout_masked = interpolated_map
230
+ mask_bg = np.isfinite(cutout)
231
+
232
+ if np.any(~np.isfinite(interpolated_map)):
233
+ logger_file_only.warning("⚠️ Some NaNs remain after interpolation!")
234
+ else:
235
+ logger_file_only.warning(f"⚠️ Too many NaNs at edges (fraction: {nan_fraction:.2f}) — interpolation skipped.")
236
+
237
+
238
+
239
+ # --- Mask all main sources using simple 2D Gaussian fitting for background estimation purposes --- #
240
+ mask_bg_all = np.copy(mask_bg)
241
+
242
+ cut_local = cutout_masked
243
+ for xc, yc in main_sources:
244
+ xc_int = int(round(xc))
245
+ yc_int = int(round(yc))
246
+
247
+ # Define small cutout around each source (e.g. 2*max_fwhm_extent)
248
+ fit_size = round(max_fwhm_extent/2.) # half-size
249
+ xfit_min = max(0, xc_int - fit_size)
250
+ xfit_max = min(cut_local.shape[1], xc_int + fit_size + 1)
251
+ yfit_min = max(0, yc_int - fit_size)
252
+ yfit_max = min(cut_local.shape[0], yc_int + fit_size + 1)
253
+
254
+ data_fit = cut_local[yfit_min:yfit_max, xfit_min:xfit_max]
255
+ if data_fit.size < max_fwhm_extent*2 or np.all(np.isnan(data_fit)) or np.nanmax(data_fit) <= 0:
256
+ continue # skip this source if empty or invalid
257
+
258
+ yy_sub, xx_sub = np.mgrid[yfit_min:yfit_max, xfit_min:xfit_max]
259
+
260
+ # Define and fit elliptical Gaussian
261
+ g_init = models.Gaussian2D(
262
+ amplitude=np.nanmax(data_fit),
263
+ x_mean=xc,
264
+ y_mean=yc,
265
+ x_stddev=max_fwhm_extent,
266
+ y_stddev=max_fwhm_extent,
267
+ theta=0.0,
268
+ bounds={'x_stddev': (max_fwhm_extent/4., max_fwhm_extent*2), 'y_stddev': (max_fwhm_extent/4., max_fwhm_extent*2), 'theta': (-np.pi/2, np.pi/2)}
269
+ )
270
+
271
+ fit_p = fitting.LevMarLSQFitter()
272
+ try:
273
+ g_fit = fit_p(g_init, xx_sub, yy_sub, data_fit)
274
+ except Exception:
275
+ continue # skip if fit fails
276
+
277
+ # Evaluate fitted model over full local cutout
278
+ yy_full, xx_full = np.indices(cut_local.shape)
279
+ model_vals = g_fit(xx_full, yy_full)
280
+
281
+ # Mask pixels above 2-FWHM threshold for main sources (≈ 0.1353 × peak)
282
+ threshold = g_fit.amplitude.value * 0.1353
283
+ mask_bg_all[model_vals > threshold] = False
284
+
285
+ # --- Apply main sources mask → set masked pixels to np.nan --- #
286
+ cutout_masked_all = np.copy(cutout_masked)
287
+ cutout_masked_all[~mask_bg_all] = np.nan
288
+
289
+
290
+ # - Estimate good pixels in cutout_masked_all - #
291
+ y_bg, x_bg = np.where(mask_bg_all)
292
+ z_bg = cutout_masked_all[y_bg, x_bg]
293
+
294
+ sigma_clip = SigmaClip(sigma=3.0, maxiters=10)
295
+ clipped = sigma_clip(z_bg)
296
+ valid = ~clipped.mask
297
+
298
+ x_valid = x_bg[valid]
299
+ y_valid = y_bg[valid]
300
+ z_valid = clipped.data[valid]
301
+
302
+
303
+ # - identify the reference mask to estimate best_min from the first run - #
304
+ if cutout_reference_mask is None:
305
+ cutout_reference_mask = np.copy(cutout_masked_all)
306
+ ref_ny, ref_nx = cutout_reference_mask.shape
307
+ ref_box_size = box
308
+
309
+
310
+
311
+ # ------------------ Loop over polynomial orders ------------------
312
+ for order in pol_orders_separate:
313
+ # Build design matrix
314
+ terms = []
315
+ param_names = []
316
+ for dx in range(order + 1):
317
+ for dy in range(order + 1 - dx):
318
+ terms.append((x_valid ** dx) * (y_valid ** dy))
319
+ param_names.append(f"c{dx}_{dy}")
320
+
321
+ A = np.vstack(terms).T
322
+ add_intercept = False
323
+ if "c0_0" not in param_names:
324
+ A = np.column_stack([np.ones_like(z_valid), A])
325
+ param_names = ["c0_0"] + param_names
326
+ add_intercept = True
327
+
328
+
329
+ # --- run chosen fitter algorithm --- #
330
+ for method_name, eps, reg in fitters:
331
+ try:
332
+ if reg is None:
333
+ # Least-squares case
334
+ coeffs, _, _, _ = np.linalg.lstsq(A, z_valid, rcond=None)
335
+ else:
336
+ reg.fit(A, z_valid)
337
+ coeffs = reg.coef_
338
+ if add_intercept:
339
+ coeffs[0] = reg.intercept_
340
+ except Exception as e:
341
+ logger_file_only.warning(f"[FAIL] {method_name} fit failed (order={order}, ε={eps}): {e}")
342
+ continue
343
+
344
+ # Rebuild coeff_dict
345
+ coeff_dict = dict(zip(param_names, coeffs))
346
+
347
+
348
+ # --- Estimate best_min on common mask size for all runs --- #
349
+ if fix_min_box != 0:
350
+ half_ref_box = ref_box_size // 2 -1
351
+
352
+ x_start = max(0, int((x0)) - half_ref_box)
353
+ x_end = min(nx, int(x0) + half_ref_box +1)
354
+ y_start = max(0, int((y0)) - half_ref_box)
355
+ y_end = min(ny, int(y0) + half_ref_box +1)
356
+
357
+ # --- Check bounds ---
358
+ if (x_start < 0 or y_start < 0):
359
+ x_start = 0
360
+ y_start = 0
361
+ logger_file_only.warning(f"[SKIP] Box size {box} cannot be cropped to match reference.")
362
+ continue # this cutout is too small to extract the reference region
363
+ if (x_end > cutout_masked_all.shape[1]):
364
+ x_end = cutout_masked_all.shape[1]
365
+
366
+ if (y_end > cutout_masked_all.shape[0]):
367
+ y_end = cutout_masked_all.shape[0]
368
+ cutout_eval = cutout_masked_all[y_start:y_end, x_start:x_end]
369
+ else:
370
+ x_start = 0
371
+ x_end = box_sizes[0]
372
+ y_start = 0
373
+ y_end = box_sizes[1]
374
+ cutout_eval = cutout_masked_all
375
+
376
+
377
+ # --- Crop current cutout to match reference size ---
378
+ shared_valid_mask = np.isfinite(cutout_reference_mask) & np.isfinite(cutout_eval)
379
+
380
+ if np.count_nonzero(shared_valid_mask) < 10:
381
+ continue # Not enough shared pixels
382
+
383
+
384
+ yy_best_min, xx_best_min = np.where(shared_valid_mask)
385
+ z_valid_best_min = cutout_eval[yy_best_min, xx_best_min]
386
+ x_valid_best_min = xx_best_min
387
+ y_valid_best_min = yy_best_min
388
+
389
+
390
+ bg_model_local_valid_best_min = np.zeros_like(z_valid_best_min)
391
+ for pname, val in coeff_dict.items():
392
+ dx, dy = map(int, pname[1:].split("_"))
393
+ bg_model_local_valid_best_min += val * (x_valid_best_min ** dx) * (y_valid_best_min ** dy)
394
+
395
+ # Then compute your residual and metric
396
+ residual_valid_best_min = bg_model_local_valid_best_min - z_valid_best_min
397
+
398
+
399
+ mse = np.mean(residual_valid_best_min ** 2)
400
+ norm = np.mean(z_valid ** 2) + 1e-12
401
+ nmse = mse / norm
402
+
403
+ k_params = len(coeff_dict)
404
+ n_points = len(z_valid)
405
+ bic = n_points * np.log(mse) + k_params * np.log(n_points)
406
+
407
+ std_res = np.nanstd(residual_valid_best_min)
408
+ std_res = std_res if std_res > 0 else 1e-10
409
+ redchi = np.sum((residual_valid_best_min / std_res) ** 2) / (n_points - k_params)
410
+
411
+ # Evaluate metric
412
+ if minimize_method == "nmse":
413
+ my_min = nmse
414
+ elif minimize_method == "bic":
415
+ my_min = bic
416
+ elif minimize_method == "redchi":
417
+ my_min = redchi
418
+ else:
419
+ my_min = nmse # fallback
420
+
421
+
422
+
423
+ if my_min < best_min:
424
+ # Evaluate full model only once now
425
+ bg_model_full = np.zeros_like(xx, dtype=np.float64)
426
+ for pname, val in coeff_dict.items():
427
+ dx, dy = map(int, pname[1:].split("_"))
428
+ bg_model_full += val * (xx ** dx) * (yy ** dy)
429
+
430
+ #- save cutout header -#
431
+ cutout_wcs = WCS(header).deepcopy()
432
+ cutout_wcs.wcs.crpix[0] -= xmin # CRPIX1
433
+ cutout_wcs.wcs.crpix[1] -= ymin # CRPIX2
434
+ cutout_header = cutout_wcs.to_header()
435
+ #- preserve other non-WCS cards (e.g. instrument, DATE-OBS) -#
436
+ cutout_header.update({k: header[k] for k in header if k not in cutout_header and k not in ['COMMENT', 'HISTORY']})
437
+
438
+ best_cutout = cutout
439
+ best_cutout_masked = cutout_masked
440
+ best_cutout_masked_full = cutout_masked_all
441
+ best_bg_model = bg_model_full
442
+ best_median_cutout = median_valid_cutout
443
+ best_header = cutout_header
444
+ best_mask_bg = mask_bg
445
+ best_x0 = x0
446
+ best_y0 = y0
447
+ best_xx = xx
448
+ best_yy = yy
449
+ best_xmin = xmin
450
+ best_xmax = xmax
451
+ best_ymin = ymin
452
+ best_ymax = ymax
453
+ best_params = coeff_dict
454
+ best_order = order
455
+ best_box_sizes = [box]
456
+ best_method = method_name
457
+ best_eps = eps
458
+
459
+ best_min = my_min
460
+
461
+
462
+
463
+ # ------------------ Final background subtraction ------------------
464
+ if best_order is None:
465
+ # If no valid background was found, return unmodified cutout
466
+ logger_file_only.warning("[WARNING] Background fit failed; returning original cutout.")
467
+ return cutout_masked, np.zeros_like(cutout), None, np.zeros_like(cutout), np.zeros_like(cutout), 0, 0, 0, 0, 0, 0, 0, 0, [box], 0, {}
468
+
469
+ else:
470
+ # Subtract background from the original cutout
471
+ best_cutout -= best_bg_model
472
+ best_cutout_masked -= best_bg_model
473
+ best_bg_model = best_bg_model + best_median_cutout
474
+
475
+ logger_file_only.info(f"[INFO] Background subtracted using order {best_order} polynomial.")
476
+
477
+ return best_cutout_masked, best_cutout_masked_full, best_header, best_bg_model, best_mask_bg, best_x0, best_y0, best_xx, best_yy, best_xmin, best_xmax, best_ymin, best_ymax, best_box_sizes, best_order, best_params
hyper_py/config.py ADDED
@@ -0,0 +1,43 @@
1
+
2
+ import yaml
3
+ import os
4
+
5
+ class HyperConfig:
6
+ def __init__(self, yaml_file):
7
+ if not os.path.exists(yaml_file):
8
+ raise FileNotFoundError(f"Configuration file '{yaml_file}' not found.")
9
+ with open(yaml_file, 'r') as f:
10
+ self.config = yaml.safe_load(f)
11
+ self.validate()
12
+
13
+ def get(self, section, key=None, default=None):
14
+ if key:
15
+ return self.config.get(section, {}).get(key, default)
16
+ return self.config.get(section, default)
17
+
18
+ def validate(self):
19
+ required_sections = [
20
+ 'paths', 'units', 'control',
21
+ 'survey', 'detection', 'photometry',
22
+ 'background'
23
+ ]
24
+ for section in required_sections:
25
+ if section not in self.config:
26
+ raise ValueError(f"Missing required config section: '{section}'")
27
+
28
+ # Example check
29
+ if self.get('control', 'use_this_rms') and self.get('control', 'this_rms_value') is None:
30
+ raise ValueError("If 'use_this_rms' is true, 'this_rms_value' must be defined.")
31
+
32
+
33
+ def to_dict(self):
34
+ """Return the raw config dictionary for serialization or multiprocessing."""
35
+ return self.config
36
+
37
+ @staticmethod
38
+ def from_dict(config_dict):
39
+ """Create a HyperConfig object from an existing dictionary."""
40
+ cfg = HyperConfig.__new__(HyperConfig)
41
+ cfg.config = config_dict
42
+ cfg.validate()
43
+ return cfg
@@ -0,0 +1,160 @@
1
+ def create_background_cubes(background_slices, slice_cutout_header, cube_header, dir_slices_out, fix_min_box, convert_mjy, logger):
2
+
3
+ import os
4
+ import numpy as np
5
+ from astropy.io import fits
6
+ from astropy.wcs import WCS
7
+
8
+ # 1. Determine common crop size
9
+ all_shapes = [bg.shape for bg in background_slices]
10
+ ny_list = [s[0] for s in all_shapes]
11
+ nx_list = [s[1] for s in all_shapes]
12
+ min_ny = min(ny_list)
13
+ min_nx = min(nx_list)
14
+
15
+ # 2. Find index of slice matching both min_ny and min_nx
16
+ matching_index = None
17
+ for i, (ny, nx) in enumerate(all_shapes):
18
+ if ny == min_ny and nx == min_nx:
19
+ matching_index = i
20
+ break
21
+
22
+ # 3. If no exact match, find best fit (one axis matches)
23
+ if matching_index is None:
24
+ for i, (ny, nx) in enumerate(all_shapes):
25
+ if ny == min_ny or nx == min_nx:
26
+ matching_index = i
27
+ break
28
+
29
+ # If still None (should not happen), fallback to first
30
+ if matching_index is None:
31
+ matching_index = 0
32
+
33
+ # 4. Use that slice's header
34
+ cropped_header = slice_cutout_header[matching_index].copy()
35
+
36
+ # 5. Define crop with optional NaN padding
37
+ def central_crop_or_pad(array, target_ny, target_nx):
38
+ ny, nx = array.shape
39
+ if ny == target_ny and nx == target_nx:
40
+ return array
41
+ else:
42
+ cropped = np.full((target_ny, target_nx), np.nan, dtype=array.dtype)
43
+ y0 = (ny - target_ny) // 2
44
+ x0 = (nx - target_nx) // 2
45
+ y1 = y0 + target_ny
46
+ x1 = x0 + target_nx
47
+ # Clip to valid range
48
+ y0 = max(0, y0)
49
+ x0 = max(0, x0)
50
+ y1 = min(ny, y1)
51
+ x1 = min(nx, x1)
52
+ sub = array[y0:y1, x0:x1]
53
+
54
+ # Paste subarray into center of padded frame
55
+ sy, sx = sub.shape
56
+ start_y = (target_ny - sy) // 2
57
+ start_x = (target_nx - sx) // 2
58
+ cropped[start_y:start_y+sy, start_x:start_x+sx] = sub
59
+ return cropped
60
+
61
+ # 6. Centrally crop or pad all backgrounds to (min_ny, min_nx)
62
+ cropped_bgs = [central_crop_or_pad(bg, min_ny, min_nx) for bg in background_slices]
63
+
64
+ # 7. Stack into cube
65
+ bg_cube = np.stack(cropped_bgs, axis=0)
66
+
67
+ # 8. Adjust WCS header (preserve original logic)
68
+ new_header = cube_header.copy()
69
+
70
+ # 9. Update spatial WCS keywords (X and Y axes) from the cropped header
71
+ spatial_keys = [
72
+ 'NAXIS1', 'NAXIS2',
73
+ 'CRPIX1', 'CRPIX2',
74
+ 'CRVAL1', 'CRVAL2',
75
+ 'CDELT1', 'CDELT2',
76
+ 'CTYPE1', 'CTYPE2',
77
+ 'CUNIT1', 'CUNIT2',
78
+ 'CD1_1', 'CD1_2', 'CD2_1', 'CD2_2',
79
+ 'PC1_1', 'PC1_2', 'PC2_1', 'PC2_2',
80
+ 'CROTA1', 'CROTA2'
81
+ ]
82
+
83
+ for key in spatial_keys:
84
+ if key in cropped_header:
85
+ new_header[key] = cropped_header[key]
86
+
87
+ # 10. Update full shape to match the background cube
88
+ new_header['NAXIS'] = 3
89
+ new_header['NAXIS1'] = bg_cube.shape[2] # X axis
90
+ new_header['NAXIS2'] = bg_cube.shape[1] # Y axis
91
+ new_header['NAXIS3'] = bg_cube.shape[0] # Z axis
92
+
93
+ # 11. Ensure WCSAXES is at least 3
94
+ new_header['WCSAXES'] = max(new_header.get('WCSAXES', 3), 3)
95
+
96
+ # update units header
97
+ if convert_mjy:
98
+ new_header['BUNIT'] = 'mJy'
99
+ else:
100
+ new_header['BUNIT'] = 'Jy'
101
+
102
+ # Optional: clean inconsistent axis-specific keys (e.g., if 4D originally)
103
+ for ax in [4, 5]:
104
+ for prefix in ['CTYPE', 'CRPIX', 'CRVAL', 'CDELT', 'CUNIT']:
105
+ key = f"{prefix}{ax}"
106
+ if key in new_header:
107
+ del new_header[key]
108
+
109
+ output_cube_path = os.path.join(dir_slices_out, "background_cube_cut.fits")
110
+ fits.PrimaryHDU(data=bg_cube, header=new_header).writeto(output_cube_path, overwrite=True)
111
+ logger.info(f"📦 Background cube saved to: {output_cube_path}")
112
+
113
+ # === Also create a full-size cube with padded background slices if cropped size is != original size (fix_min_box != 0) === #
114
+ wcs_full = WCS(cube_header, naxis=2)
115
+ xcen_all = []
116
+ ycen_all = []
117
+
118
+ for hdr in slice_cutout_header:
119
+ ny, nx = cropped_bgs[0].shape
120
+ x_c = nx / 2.0
121
+ y_c = ny / 2.0
122
+ wcs_cutout = WCS(hdr, naxis=2)
123
+ skycoord = wcs_cutout.pixel_to_world(x_c, y_c)
124
+ x_pix, y_pix = wcs_full.world_to_pixel(skycoord)
125
+ xcen_all.append(x_pix)
126
+ ycen_all.append(y_pix)
127
+
128
+ if fix_min_box != 0:
129
+ full_ny = cube_header['NAXIS2']
130
+ full_nx = cube_header['NAXIS1']
131
+ padded_bgs = []
132
+ for i, cropped in enumerate(cropped_bgs):
133
+ padded = np.full((full_ny, full_nx), np.nan, dtype=float)
134
+ cy, cx = cropped.shape
135
+ xcen_full = xcen_all[i]
136
+ ycen_full = ycen_all[i]
137
+ x0 = int(round(xcen_full - cx // 2))
138
+ y0 = int(round(ycen_full - cy // 2))
139
+ x0 = max(0, x0)
140
+ y0 = max(0, y0)
141
+ x1 = min(x0 + cx, full_nx)
142
+ y1 = min(y0 + cy, full_ny)
143
+ sub = cropped[0:y1 - y0, 0:x1 - x0]
144
+ padded[y0:y1, x0:x1] = sub
145
+ padded_bgs.append(padded)
146
+ bg_cube_full = np.stack(padded_bgs, axis=0)
147
+ padded_header = cube_header.copy()
148
+ padded_header['NAXIS1'] = full_nx
149
+ padded_header['NAXIS2'] = full_ny
150
+ padded_header['NAXIS3'] = bg_cube_full.shape[0]
151
+ padded_header['WCSAXES'] = max(padded_header.get('WCSAXES', 3), 3)
152
+ padded_header['BUNIT'] = 'mJy' if convert_mjy else 'Jy'
153
+ for ax in [4, 5]:
154
+ for prefix in ['CTYPE', 'CRPIX', 'CRVAL', 'CDELT', 'CUNIT']:
155
+ key = f"{prefix}{ax}"
156
+ if key in padded_header:
157
+ del padded_header[key]
158
+ output_cube_full_path = os.path.join(dir_slices_out, "background_cube_fullsize.fits")
159
+ fits.PrimaryHDU(data=bg_cube_full, header=padded_header).writeto(output_cube_full_path, overwrite=True)
160
+ logger.info(f"📦 Full-size background cube saved to: {output_cube_full_path}")