hyper-py-photometry 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hyper_py/gaussfit.py ADDED
@@ -0,0 +1,519 @@
1
+ import os
2
+
3
+ import matplotlib.pyplot as plt
4
+ import numpy as np
5
+ from astropy.io import fits
6
+ from astropy.stats import SigmaClip, sigma_clipped_stats
7
+ from astropy.wcs import WCS
8
+ from lmfit import minimize, Parameters
9
+ from photutils.aperture import CircularAperture
10
+
11
+ from hyper_py.visualization import plot_fit_summary
12
+ from .bkg_single import masked_background_single_sources
13
+
14
+ def fit_isolated_gaussian(image, xcen, ycen, all_sources_xcen, all_sources_ycen, source_id, map_struct, suffix, config, logger, logger_file_only):
15
+ """
16
+ Fit a single 2D elliptical Gaussian + polynomial background to an isolated source.
17
+
18
+ Parameters
19
+ ----------
20
+ image : 2D numpy array
21
+ The full input map.
22
+ xcen, ycen : float
23
+ Pixel coordinates of the source center.
24
+ config : HyperConfig object
25
+ Configuration settings from YAML file.
26
+
27
+ Returns
28
+ -------
29
+ result : lmfit MinimizerResult
30
+ Best-fit parameters.
31
+ best_order : int
32
+ Polynomial order used for background.
33
+ cutout : 2D numpy array
34
+ Local image patch around the source.
35
+ cutout_slice : tuple of slices
36
+ Slices used to extract the cutout.
37
+ bg_mean : float
38
+ Mean value of the fitted background over the cutout.
39
+ """
40
+
41
+
42
+ # --- Load config parameters ---
43
+ dir_root = config.get("paths", "output")["dir_root"]
44
+ beam_pix = map_struct['beam_dim']/map_struct['pix_dim']/2.3548 # beam sigma size in pixels
45
+ fwhm_beam_pix = map_struct['beam_dim']/map_struct['pix_dim'] # beam FWHM size in pixels
46
+ aper_inf = config.get("photometry", "aper_inf", 1.0) * beam_pix
47
+ aper_sup = config.get("photometry", "aper_sup", 2.0) * beam_pix
48
+ max_fwhm_extent = aper_sup * 2.3548 # twice major FWHM in pixels
49
+
50
+
51
+ convert_mjy=config.get("units", "convert_mJy")
52
+
53
+ fit_cfg = config.get("fit_options", {})
54
+ minimize_method = config.get("fit_options", "min_method", "redchi")
55
+ weight_choice = fit_cfg.get("weights", None)
56
+ weight_power_snr = fit_cfg.get("power_snr", 1.0)
57
+
58
+ fix_min_box = config.get("background", "fix_min_box", 3) # minimum padding value (multiple of FWHM)
59
+ fix_max_box = config.get("background", "fix_max_box", 5) # maximum padding value (multiple of FWHM)
60
+
61
+ fit_gauss_and_bg_together = config.get("background", "fit_gauss_and_bg_together", False)
62
+ fit_separately = config.get("background", "fit_gauss_and_bg_separately", False)
63
+ orders = config.get("background", "polynomial_orders", [0, 1, 2]) if fit_gauss_and_bg_together else [0]
64
+ pol_orders_separate = config.get("background", "pol_orders_separate", [0]) # only if fit_separately
65
+
66
+
67
+ use_l2 = fit_cfg.get("use_l2_regularization", False)
68
+ lambda_l2 = fit_cfg.get("lambda_l2", 1e-3)
69
+
70
+ try:
71
+ lambda_l2 = float(lambda_l2)
72
+ except Exception as e:
73
+ logger.warning(f"[WARNING] lambda_l2 is not a float: {lambda_l2} → {e}")
74
+ lambda_l2 = 1e-3 # fallback
75
+
76
+
77
+ # - initialize map and header - #
78
+ header=map_struct['header']
79
+ ny, nx = image.shape
80
+
81
+
82
+ # === Determine box size ===
83
+ if fix_min_box == 0:
84
+ # Use entire map size directly
85
+ box_sizes = list((ny, nx))
86
+ else:
87
+ # Standard logic for square box sizes (in pixels)
88
+ dynamic_min_box = int(np.ceil(fix_min_box * fwhm_beam_pix) * 2 + max_fwhm_extent * 2)
89
+ dynamic_max_box = int(np.ceil(fix_max_box * fwhm_beam_pix) * 2 + max_fwhm_extent * 2)
90
+ box_sizes = list(range(dynamic_min_box + 1, dynamic_max_box + 2, 2)) # ensure odd
91
+
92
+
93
+
94
+ # - initialize params - #
95
+ best_result = None
96
+ best_min = np.inf
97
+ best_cutout = None
98
+ best_header = None
99
+ best_slice = None
100
+ best_order = None
101
+ bg_mean = 0.0
102
+ best_box = None
103
+
104
+
105
+ # --- Background estimation on cutout masked (optional) --- #
106
+ # cutout_ref = np.copy(cutout)
107
+ if fit_separately:
108
+ cutout_after_bg, cutout_full_with_bg, cutout_header, bg_model, mask_bg, x0, y0, xx, yy, xmin, xmax, ymin, ymax, box_sizes_after_bg, back_order, poly_params = masked_background_single_sources(
109
+ minimize_method,
110
+ image,
111
+ header,
112
+ xcen,
113
+ ycen,
114
+ nx, ny,
115
+ all_sources_xcen,
116
+ all_sources_ycen,
117
+ max_fwhm_extent,
118
+ box_sizes,
119
+ pol_orders_separate,
120
+ suffix,
121
+ source_id,
122
+ config,
123
+ logger,
124
+ logger_file_only
125
+ )
126
+
127
+ # - save original map without background - #
128
+ cutout = np.copy(cutout_after_bg)
129
+ cutout_masked = cutout_after_bg
130
+ cutout_masked_full = cutout_full_with_bg
131
+ box_sizes = box_sizes_after_bg
132
+ else:
133
+ bg_model = None
134
+
135
+
136
+
137
+ # --- Run over the various box sizes (if fit_separately = True this is the best size identified in the background fit) --- #
138
+ for box in box_sizes:
139
+
140
+ if not fit_separately:
141
+ if fix_min_box != 0:
142
+ half_box = box // 2 -1
143
+ xmin = max(0, int(np.min(xcen)) - half_box)
144
+ xmax = min(nx, int(np.max(xcen)) + half_box + 1)
145
+ ymin = max(0, int(np.min(ycen)) - half_box)
146
+ ymax = min(ny, int(np.max(ycen)) + half_box + 1)
147
+
148
+ cutout = image[ymin:ymax, xmin:xmax].copy()
149
+ else:
150
+ xmin = 0
151
+ xmax = box_sizes[0]
152
+ ymin = 0
153
+ ymax = box_sizes[1]
154
+ cutout = image
155
+
156
+ if cutout.size == 0 or np.isnan(cutout).all():
157
+ logger.warning("[WARNING] Empty or invalid cutout. Skipping.")
158
+ continue
159
+
160
+ #- save cutout header -#
161
+ cutout_wcs = WCS(header).deepcopy()
162
+ cutout_wcs.wcs.crpix[0] -= xmin # CRPIX1
163
+ cutout_wcs.wcs.crpix[1] -= ymin # CRPIX2
164
+ cutout_header = cutout_wcs.to_header()
165
+ #- preserve other non-WCS cards (e.g. instrument, DATE-OBS) -#
166
+ cutout_header.update({k: header[k] for k in header if k not in cutout_header and k not in ['COMMENT', 'HISTORY']})
167
+
168
+ yy, xx = np.indices(cutout.shape)
169
+ x0 = xcen - xmin
170
+ y0 = ycen - ymin
171
+
172
+
173
+ #--- Identify external sources inside box ---#
174
+ mask = np.ones_like(cutout, dtype=bool) # True = valid, False = masked
175
+ external_sources = []
176
+ for i in range(len(all_sources_xcen)):
177
+ if i == source_id:
178
+ continue # skip sources belonging to current group
179
+ sx = all_sources_xcen[i]
180
+ sy = all_sources_ycen[i]
181
+
182
+ if xmin <= sx <= xmax and ymin <= sy <= ymax and fix_min_box != 0:
183
+ ex = sx - xmin
184
+ ey = sy - ymin
185
+ external_sources.append((ex, ey)) # local cutout coords
186
+
187
+ # Define a bounding box around the source, clipped to cutout size
188
+ masking_radius = max_fwhm_extent/2. # radius
189
+ masking_radius_pix=np.round(masking_radius)
190
+
191
+ xmin_box = max(0, int(ex - masking_radius_pix))
192
+ xmax_box = min(nx, int(ex + masking_radius_pix + 1))
193
+ ymin_box = max(0, int(ey - masking_radius_pix))
194
+ ymax_box = min(ny, int(ey + masking_radius_pix + 1))
195
+
196
+ # Create coordinate grid for the local region
197
+ mask[ymin_box:ymax_box, xmin_box:xmax_box] = False
198
+
199
+
200
+ #--- Apply external sources mask → set masked pixels to np.nan ---#
201
+ cutout_masked = np.copy(cutout)
202
+ mask_bg = np.ones_like(cutout_masked, dtype=bool)
203
+ mask_bg[np.isnan(cutout_masked)] = False
204
+ mask_bg[~mask] = False # mask external sources etc.
205
+
206
+ ### --- From now on, all photometry and background estimation is done on cutout_masked from external sources --- ###
207
+ cutout_masked[~mask_bg] = np.nan
208
+
209
+
210
+ # --- Fit single 2D elliptical Gaussian (+ background) --- #
211
+ # Mask NaNs before computing stats
212
+ valid = ~np.isnan(cutout_masked)
213
+ mean_bg, median_bg, std_bg = sigma_clipped_stats(cutout_masked[valid], sigma=3.0, maxiters=10)
214
+
215
+
216
+ # Create rms map and propagate NaNs
217
+ cutout_rms = np.full_like(cutout_masked, std_bg)
218
+ cutout_rms[~valid] = np.nan
219
+
220
+ weights = None
221
+ if weight_choice == "inverse_rms":
222
+ weights = 1.0 / (cutout_rms + mean_bg)
223
+ elif weight_choice == "snr":
224
+ weights = (cutout_masked / (cutout_rms + mean_bg))
225
+ elif weight_choice == "power_snr":
226
+ weights = ((cutout_masked / (cutout_rms + mean_bg)))**weight_power_snr
227
+ elif weight_choice == "map":
228
+ weights = cutout_masked
229
+ elif weight_choice == "mask":
230
+ mask_stats = ~SigmaClip(sigma=3.0)(cutout_masked).mask
231
+ weights = mask_stats.astype(float)
232
+
233
+
234
+ for order in orders:
235
+ try:
236
+ vary = config.get("fit_options", "vary", True)
237
+ params = Parameters()
238
+ local_peak = np.nanmax(cutout_masked[int(y0)-1:int(y0)+1, int(x0)-1:int(x0)+1])
239
+
240
+ # - peak in cutout masked is well-defined after background subtraction (fit_separately = True) - #
241
+ if fit_separately:
242
+ params.add("g_amplitude", value=local_peak, min=0.8*local_peak, max=1.3*local_peak)
243
+ else:
244
+ params.add("g_amplitude", value=local_peak, min=0.4*local_peak, max=1.5*local_peak)
245
+
246
+ if vary == True:
247
+ params.add("g_centerx", value=x0, min=x0 - 0.5, max=x0 + 0.5)
248
+ params.add("g_centery", value=y0, min=y0 - 0.5, max=y0 + 0.5)
249
+ if vary == False:
250
+ params.add("g_centerx", value=x0, vary=False)
251
+ params.add("g_centery", value=y0, vary=False)
252
+
253
+
254
+ params.add("g_sigmax", value=(aper_inf+aper_sup)/2., min=aper_inf, max=aper_sup)
255
+ params.add("g_sigmay", value=(aper_inf+aper_sup)/2., min=aper_inf, max=aper_sup)
256
+
257
+ params.add("g_theta", value=0.0, min=-np.pi/2, max=np.pi/2)
258
+
259
+
260
+ # --- Add full 2D polynomial background (including cross terms) ---
261
+ if fit_gauss_and_bg_together:
262
+ max_order_all = max(orders)
263
+
264
+ for dx in range(max_order_all + 1):
265
+ for dy in range(max_order_all + 1 - dx):
266
+ pname = f"c{dx}_{dy}"
267
+ val = median_bg if (dx == 0 and dy == 0) else 1e-5
268
+ params.add(pname, value=val, vary=(dx + dy <= order))
269
+
270
+
271
+ def model_fn(p, x, y):
272
+ A = p["g_amplitude"]
273
+ x0 = p["g_centerx"]
274
+ y0 = p["g_centery"]
275
+ sx = p["g_sigmax"]
276
+ sy = p["g_sigmay"]
277
+ th = p["g_theta"]
278
+ a = (np.cos(th)**2)/(2*sx**2) + (np.sin(th)**2)/(2*sy**2)
279
+ b = -np.sin(2*th)/(4*sx**2) + np.sin(2*th)/(4*sy**2)
280
+ c = (np.sin(th)**2)/(2*sx**2) + (np.cos(th)**2)/(2*sy**2)
281
+ model = A * np.exp(- (a*(x - x0)**2 + 2*b*(x - x0)*(y - y0) + c*(y - y0)**2))
282
+
283
+ if fit_gauss_and_bg_together:
284
+ max_order_all = max(orders)
285
+
286
+ for dx in range(max_order_all + 1):
287
+ for dy in range(max_order_all + 1 - dx):
288
+ pname = f"c{dx}_{dy}"
289
+ val = median_bg if (dx == 0 and dy == 0) else 1e-5
290
+ params.add(pname, value=val, vary=(dx + dy <= order))
291
+
292
+ # Final check
293
+ model = np.where(np.isfinite(model), model, 0.0)
294
+ return model
295
+
296
+
297
+ def residual(p, x, y, data, weights=None):
298
+ model = model_fn(p, x, y)
299
+ resid = (model - data).ravel().astype(np.float64)
300
+
301
+ if weights is not None:
302
+ resid *= weights
303
+
304
+ if use_l2 and fit_gauss_and_bg_together:
305
+ penalty_values = [
306
+ float(p[name].value)
307
+ for name in p if name.startswith("c")
308
+ ]
309
+
310
+ if penalty_values:
311
+ penalty_resid = lambda_l2 * np.array(penalty_values, dtype=np.float64)
312
+ return np.concatenate([resid.ravel(), penalty_resid.ravel()])
313
+
314
+ return resid
315
+
316
+
317
+ fit_cfg = config.get("fit_options", {})
318
+ minimize_keys = ["max_nfev", "xtol", "ftol", "gtol", "calc_covar", "loss", "f_scale"]
319
+ minimize_kwargs = {}
320
+
321
+ for key in minimize_keys:
322
+ val = fit_cfg.get(key)
323
+ if val is not None:
324
+ if key == "calc_covar":
325
+ minimize_kwargs[key] = bool(val)
326
+ elif key == "max_nfev":
327
+ minimize_kwargs[key] = int(val)
328
+ elif key in ["loss"]: # must be string
329
+ minimize_kwargs[key] = str(val)
330
+ else:
331
+ minimize_kwargs[key] = float(val)
332
+
333
+
334
+
335
+ # --- Call minimize with dynamic kwargs ONLY across good pixels (masked sources from external sources within each box) --- #
336
+ valid = mask_bg.ravel()
337
+ x_valid = xx.ravel()[valid]
338
+ y_valid = yy.ravel()[valid]
339
+ data_valid = cutout_masked.ravel()[valid]
340
+ weights_valid = weights.ravel()[valid] if weights is not None else None
341
+
342
+
343
+ result = minimize(
344
+ residual,
345
+ params,
346
+ args=(x_valid.ravel(), y_valid.ravel(), data_valid),
347
+ kws={'weights': weights_valid},
348
+ method=fit_cfg.get("fit_method", "least_squares"),
349
+ **minimize_kwargs
350
+ )
351
+
352
+
353
+ # --- Evaluate reduced chi**2, BIC and NMSE (Normalized Mean Squared Error) statistics --- #
354
+ if result.success:
355
+ # Evaluate model on grid #
356
+ model_eval = model_fn(result.params, xx, yy)
357
+
358
+ # Compute normalized mean squared error only on valid pixels
359
+ valid_mask = np.isfinite(cutout_masked) & np.isfinite(model_eval)
360
+ residual = (model_eval - cutout_masked)[valid_mask]
361
+ mse = np.mean(residual**2)
362
+
363
+ norm = np.mean(cutout_masked[valid_mask]**2) + 1e-12
364
+ nmse = mse / norm
365
+
366
+ redchi = result.redchi
367
+ bic = result.bic
368
+
369
+ if minimize_method == "redchi" : my_min = redchi
370
+ if minimize_method == "nmse" : my_min = nmse
371
+ if minimize_method == "bic" : my_min = bic
372
+ logger_file_only.info(f"[SUCCESS] Fit (box={cutout.shape[1], cutout.shape[0]}, order={order}) → reduced chi² = {result.redchi:.5f}, NMSE = {nmse:.2e}, BIC = {bic:.2e}")
373
+ else:
374
+ nmse = np.nan
375
+ redchi = np.nan
376
+ bic = np.nan
377
+ my_min = np.nan
378
+ logger_file_only.error(f"[FAILURE] Fit failed (box={cutout.shape[1], cutout.shape[0]}, order={order})")
379
+
380
+ if my_min < best_min:
381
+ best_result = result
382
+ best_nmse = nmse
383
+ best_redchi = redchi
384
+ best_bic = bic
385
+ if fit_separately:
386
+ best_order = back_order
387
+ else:
388
+ best_order = order
389
+ best_cutout = cutout_masked
390
+ best_cutout_masked_full = cutout_masked_full
391
+ best_header = cutout_header
392
+
393
+ bg_model = np.where(np.isfinite(cutout_masked), bg_model, np.nan)
394
+ best_bg_model = bg_model
395
+
396
+ best_slice = (slice(ymin, ymax), slice(xmin, xmax))
397
+ bg_mean = median_bg
398
+ best_box = (cutout_masked.shape[1], cutout_masked.shape[0])
399
+ best_min = my_min
400
+
401
+
402
+ except Exception as e:
403
+ logger.error(f"[ERROR] Fit failed (box={cutout.shape[1], cutout.shape[0]}, order={order}): {e}")
404
+ continue
405
+
406
+
407
+ if best_result is not None:
408
+ fit_status = 1 # 1 if True, 0 if False
409
+
410
+ yy, xx = np.indices(best_cutout.shape)
411
+
412
+ model_eval = model_fn(best_result.params, xx, yy)
413
+ residual_map = best_cutout - model_eval
414
+
415
+
416
+ # --- save best fit in fits format --- #
417
+ try:
418
+ fits_fitting = config.get("fits_output", "fits_fitting", False)
419
+ fits_output_dir_fitting = os.path.join(dir_root, config.get("fits_output", "fits_output_dir_fitting", "fits/fitting"))
420
+ except:
421
+ fits_fitting = False
422
+
423
+ if fits_fitting:
424
+ def save_fits(array, output_dir, label_name, extension_name, header=None):
425
+ # Ensure the output directory exists
426
+ os.makedirs(output_dir, exist_ok=True)
427
+
428
+ # Create the FITS filename based on the label and extension type
429
+ filename = f"{output_dir}/{label_name}_{extension_name}.fits"
430
+
431
+ # Create a PrimaryHDU object and write the array into the FITS file
432
+ hdu = fits.PrimaryHDU(data=array, header=header)
433
+ if convert_mjy:
434
+ hdu.header['BUNIT'] = 'mJy/pixel'
435
+ else: hdu.header['BUNIT'] = 'Jy/pixel'
436
+ hdul = fits.HDUList([hdu])
437
+
438
+ # Write the FITS file
439
+ hdul.writeto(filename, overwrite=True)
440
+
441
+ save_fits(best_cutout, fits_output_dir_fitting, f"HYPER_MAP_{suffix}_ID_{source_id+1}", "cutout", header = best_header)
442
+ save_fits(best_cutout_masked_full, fits_output_dir_fitting, f"HYPER_MAP_{suffix}_ID_{source_id+1}", "cutout masked full", header = best_header)
443
+ save_fits(model_eval, fits_output_dir_fitting, f"HYPER_MAP_{suffix}_ID_{source_id+1}", "model", header = best_header)
444
+ save_fits(residual_map, fits_output_dir_fitting, f"HYPER_MAP_{suffix}_ID_{source_id+1}", "residual", header = best_header)
445
+
446
+
447
+ # --- visualize best fit in png format --- #
448
+ try:
449
+ visualize = config.get("visualization", "visualize_fitting")
450
+ except:
451
+ visualize = False
452
+
453
+ try:
454
+ output_dir_vis = os.path.join(dir_root, config.get("visualization", "output_dir_fitting", "plots/fitting"))
455
+ except:
456
+ output_dir_vis = "Images/Fitting"
457
+
458
+ if visualize:
459
+ logger_file_only.info("2D and 3D visualization of the Gaussian fits and residual ON")
460
+ plot_fit_summary(
461
+ cutout=best_cutout,
462
+ cutout_masked_full = best_cutout_masked_full,
463
+ model=model_eval,
464
+ residual=residual_map,
465
+ output_dir=output_dir_vis,
466
+ label_name=f"HYPER_MAP_{suffix}_ID_{source_id+1}" if source_id is not None else "source",
467
+ box_size=best_box,
468
+ poly_order=best_order,
469
+ nmse=best_nmse
470
+ )
471
+
472
+
473
+ # --- Optionally save separated background model as FITS --- #
474
+ try:
475
+ fits_bg_separate = config.get("fits_output", "fits_bg_separate", False)
476
+ fits_output_dir = os.path.join(dir_root, config.get("fits_output", "fits_output_dir_bg_separate", "fits/bg_separate"))
477
+ except Exception:
478
+ fits_bg_separate = False
479
+
480
+ if fits_bg_separate:
481
+ os.makedirs(fits_output_dir, exist_ok=True)
482
+ label_name = f"HYPER_MAP_{suffix}_ID_{source_id + 1}"
483
+ filename = f"{fits_output_dir}/{label_name}_bg_masked3D.fits"
484
+
485
+ convert_mjy=config.get("units", "convert_mJy")
486
+
487
+ hdu = fits.PrimaryHDU(data=best_bg_model, header=best_header)
488
+ if convert_mjy:
489
+ hdu.header['BUNIT'] = 'mJy/pixel'
490
+ else: hdu.header['BUNIT'] = 'Jy/pixel'
491
+ hdu.writeto(filename, overwrite=True)
492
+
493
+
494
+ # --- Optionally save separated background 3D visualization as png format --- #
495
+ try:
496
+ visualize_bg = config.get("visualization", "visualize_bg_separate", False)
497
+ output_dir = os.path.join(dir_root, config.get("visualization", "output_dir_bg_separate", "plots/bg_separate"))
498
+ except Exception:
499
+ visualize_bg = False
500
+
501
+ if visualize_bg:
502
+ os.makedirs(output_dir, exist_ok=True)
503
+ fig = plt.figure(figsize=(6, 5))
504
+ ax = fig.add_subplot(111, projection="3d")
505
+ ax.plot_surface(xx, yy, best_bg_model, cmap="viridis", linewidth=0, antialiased=True)
506
+ ax.set_xlabel("X (pix)", fontsize=8, fontweight="bold")
507
+ ax.set_ylabel("Y (pix)", fontsize=8, fontweight="bold")
508
+ ax.set_zlabel("Flux (Jy)", fontsize=8, fontweight="bold")
509
+ ax.set_title("Initial Background (Isolated)", fontsize=10, fontweight="bold")
510
+
511
+ label_str = f"HYPER_MAP_{suffix}_ID_{source_id + 1}"
512
+ outname = os.path.join(output_dir, f"{label_str}_bg_masked3D.png")
513
+ plt.savefig(outname, dpi=300, bbox_inches="tight")
514
+ plt.close()
515
+
516
+
517
+ return fit_status, best_result, model_fn, best_order, best_cutout, best_slice, bg_mean, best_bg_model, best_header, best_nmse, best_redchi, best_bic
518
+ else:
519
+ return 0, None, None, None, cutout_masked, (None, None), None, None, None, None, None, None
hyper_py/groups.py ADDED
@@ -0,0 +1,66 @@
1
+ import numpy as np
2
+
3
+ def group_sources(xcen, ycen, pix_dim, beam_dim, aper_sup):
4
+ '''
5
+ Groups sources based on proximity within the beam scale, ensuring no duplicate groups and transitive merging.
6
+
7
+ Parameters:
8
+ xcen, ycen: arrays of source positions in pixels
9
+ pix_dim: pixel scale (arcsec)
10
+ beam_dim: beam size (arcsec)
11
+ aper_sup: aperture scaling factor
12
+
13
+ Returns:
14
+ start_group: 1 for blended sources, 0 otherwise
15
+ common_group: 2D array of group membership (list of sources per group)
16
+ deblend: number of neighbors (for Gaussian deblending)
17
+ '''
18
+ n = len(xcen)
19
+ xcen = np.array(xcen)
20
+ ycen = np.array(ycen)
21
+
22
+ max_dist = beam_dim * aper_sup * 2.0
23
+ max_dist_pix = max_dist / pix_dim
24
+
25
+ start_group = np.zeros(n, dtype=int)
26
+ common_group = -1 * np.ones((n, n), dtype=int) # Initialize common_group as a 2D array
27
+ deblend = np.zeros(n, dtype=int)
28
+
29
+ # Each source is initially its own group
30
+ group_assignment = np.arange(n, dtype=int)
31
+
32
+ def find(group_id):
33
+ if group_assignment[group_id] != group_id:
34
+ group_assignment[group_id] = find(group_assignment[group_id]) # Path compression
35
+ return group_assignment[group_id]
36
+
37
+ def union(group1, group2):
38
+ root1 = find(group1)
39
+ root2 = find(group2)
40
+ if root1 != root2:
41
+ group_assignment[root2] = root1 # Merge the groups
42
+
43
+ # First pass: union sources that are within max_dist_pix of each other
44
+ for i in range(n):
45
+ dx = xcen[i] - xcen
46
+ dy = ycen[i] - ycen
47
+ dist = np.sqrt(dx**2 + dy**2)
48
+ same_group = np.where(dist < max_dist_pix)[0]
49
+
50
+ for j in same_group:
51
+ if find(i) != find(j):
52
+ union(i, j)
53
+
54
+ # Essential fix: flatten all group pointers after union phase
55
+ for i in range(n):
56
+ group_assignment[i] = find(i)
57
+
58
+ # Second pass: assign group info for each source (same as your original)
59
+ for i in range(n):
60
+ group_members = np.where(group_assignment == group_assignment[i])[0]
61
+ common_group[i, :len(group_members)] = group_members
62
+ deblend[i] = len(group_members) - 1
63
+ if len(group_members) > 1:
64
+ start_group[i] = 1
65
+
66
+ return start_group, common_group, deblend