hyper-py-photometry 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,132 @@
1
+ import os
2
+ from astropy.table import Table
3
+
4
+
5
+ def write_tables(data_dict, output_dir, config, sigma_thres, real_rms, base_filename="hyper_output"):
6
+
7
+ '''
8
+ Write photometry results into formatted ECSV and IPAC tables,
9
+ preserving numeric data types and precision.
10
+
11
+ Parameters
12
+ ----------
13
+ data_dict : dict
14
+ Dictionary of columns for the output table.
15
+ output_dir : str
16
+ Directory where the file will be written.
17
+ base_filename : str
18
+ Base name for the output file (without extension).
19
+ '''
20
+
21
+ if config.get('units', 'convert_mJy'):
22
+ flux_units = 'mJy'
23
+ flux_units_beam = 'mJy/beam'
24
+ else:
25
+ flux_units_beam = 'Jy/beam'
26
+ flux_units = 'Jy/beam'
27
+
28
+ units = {
29
+ 'MAP_ID': '', 'HYPER_ID': '', 'BAND': 'GHz',
30
+ 'RA': 'deg', 'DEC': 'deg', 'GLON': 'deg', 'GLAT': 'deg',
31
+ 'FLUX': flux_units, 'FLUX_ERR': flux_units,
32
+ 'FLUX_PEAK': flux_units_beam,
33
+ 'RESIDUALS': flux_units,
34
+ 'FWHM_1': 'arcsec', 'FWHM_2': 'arcsec',
35
+ 'PA': 'deg', 'NMSE': '', 'CHI2_RED': '',
36
+ 'POLYN': '', 'STATUS': '', 'DEBLEND': '', 'CLUSTER': '',
37
+ }
38
+
39
+ descriptions = {
40
+ 'MAP_ID': 'Map identifier',
41
+ 'HYPER_ID': 'Source identifier',
42
+ 'FLUX_PEAK': 'Peak flux (' + flux_units_beam + ')',
43
+ 'FLUX': 'Integrated flux density (' + flux_units + ')',
44
+ 'FLUX_ERR': 'Flux density uncertainty (' + flux_units + ')',
45
+ 'RESIDUALS': 'Mean background level (' + flux_units + ')',
46
+ 'POLYN': 'Polynomial background order',
47
+ 'NMSE': 'Normalized Mean Squared Error of fit: scale-independent metric of model-data agreement',
48
+ 'CHI2_RED': 'Reduced chi-squared of Gaussian + background fit (valid only if residuals are Gaussian-distributed with constant variance)',
49
+ 'BIC': 'Bayesian Information Criterion: model selection metric that penalizes overfitting; lower BIC indicates better trade-off between goodness of fit and model complexity',
50
+ 'FWHM_1': 'First axis FWHM (arcsec)',
51
+ 'FWHM_2': 'Second axis FWHM (arcsec)',
52
+ 'PA': 'Position angle (deg East of North)',
53
+ 'STATUS': 'Fit status flag: 1 = fit succeeded, 0 = fit failed',
54
+ 'GLON': 'Galactic longitude (deg)',
55
+ 'GLAT': 'Galactic latitude (deg)',
56
+ 'RA': 'Right Ascension (deg, J2000)',
57
+ 'DEC': 'Declination (deg, J2000)',
58
+ 'DEBLEND': 'Deblending flag',
59
+ 'CLUSTER': 'Cluster flag',
60
+ }
61
+
62
+
63
+ format_dict = {
64
+ 'MAP_ID': 's', 'HYPER_ID': 'd', 'RA': '.5f', 'DEC': '.5f', 'GLON': '.5f', 'GLAT': '.5f',
65
+ 'FLUX': '.3f', 'FLUX_ERR': '.3f', 'FLUX_PEAK': '.5f',
66
+ 'FLUX_PEAK_JY': '.4f', 'RESIDUALS': '.5f',
67
+ 'FWHM_1': '.3f', 'FWHM_2': '.3f', 'PA': '.1f',
68
+ 'NMSE': '.3f', 'CHI2_RED': '.3f', 'BIC': '.2f', 'POLYN': 'd',
69
+ 'STATUS': 'd', 'DEBLEND': 'd', 'CLUSTER': 'd',
70
+ }
71
+
72
+
73
+ # Original numeric table
74
+ table = Table(data_dict)
75
+
76
+ # Set units, descriptions, and numeric formats
77
+ for col in table.colnames:
78
+ if col in units and units[col]:
79
+ table[col].unit = units[col]
80
+ if col in descriptions:
81
+ table[col].description = descriptions[col]
82
+ if col in format_dict:
83
+ table[col].format = format_dict[col]
84
+
85
+
86
+ # Create your custom header lines explicitly
87
+ if config.get('units', 'convert_mJy'):
88
+ rms_sentence = f"Estimated r.m.s. to identify sources: {real_rms:.5f} mJy"
89
+ else:
90
+ rms_sentence = f"Estimated r.m.s. to identify sources: {real_rms:.5f} Jy"
91
+
92
+ custom_header_lines = [
93
+ " ****************** Hyper photometry ******************",
94
+ f"Survey code: {config.get('survey', 'survey_code')}",
95
+ f"Detection threshold: {sigma_thres} sigma",
96
+ # f"Pixel size: {pix_dim:.3f} arcsec",
97
+ # f"Background type: {'none' if background_type == 'none' else background_type}",
98
+ f"Convert from MJy/sr: {config.get('units', 'convert_Jy')}",
99
+ f"Convert from Jy/beam: {config.get('units', 'convert_beam_Jy')}",
100
+ f"Convert to mJy: {config.get('units', 'convert_mJy')}",
101
+ rms_sentence,
102
+ " ******************************************************"
103
+ ]
104
+
105
+ # Add custom header to table metadata (ECSV)
106
+ table.meta['comments'] = custom_header_lines + table.meta.get('comments', [])
107
+
108
+ # Write ECSV table with custom header
109
+ csv_output_path = os.path.join(output_dir, base_filename + ".csv")
110
+ table.write(csv_output_path, format="ascii.ecsv", overwrite=True)
111
+
112
+ # For IPAC, explicitly copy rows to preserve compatibility
113
+ ipac_table = Table(names=table.colnames, dtype=[table[col].dtype for col in table.colnames])
114
+
115
+ for row in table:
116
+ ipac_table.add_row(row)
117
+
118
+ # Copy formatting, units, and descriptions again explicitly to IPAC table
119
+ for col in ipac_table.colnames:
120
+ ipac_table[col].format = table[col].format
121
+ ipac_table[col].unit = table[col].unit
122
+ ipac_table[col].description = table[col].description
123
+
124
+ # Add custom header lines explicitly to IPAC table comments
125
+ ipac_table.meta['comments'] = custom_header_lines + [
126
+ f"{col} = {ipac_table[col].description}" for col in ipac_table.colnames
127
+ ]
128
+
129
+ # Write IPAC table explicitly with custom headers
130
+ ipac_output_path = os.path.join(output_dir, base_filename + ".txt")
131
+ ipac_table.write(ipac_output_path, format="ipac", overwrite=True)
132
+
hyper_py/detection.py ADDED
@@ -0,0 +1,142 @@
1
+ import numpy as np
2
+ from astropy.stats import sigma_clipped_stats
3
+ from photutils.detection import DAOStarFinder
4
+ from scipy.ndimage import convolve
5
+ from astropy.table import Table
6
+ from astropy.wcs import WCS
7
+
8
+
9
+
10
+ def select_channel_map(map_struct):
11
+ beam_dim_ref = map_struct["beam_dim"]
12
+ pix_dim_ref = map_struct["pix_dim"]
13
+ FWHM_pix = beam_dim_ref / pix_dim_ref
14
+
15
+ return map_struct, FWHM_pix
16
+
17
+
18
+ def high_pass_filter(image, kernel_dim=9):
19
+ ny, nx = image.shape
20
+ kdim = min(kernel_dim, ny, nx)
21
+ if kdim % 2 == 0:
22
+ kdim -= 1
23
+
24
+ kernel = np.full((kdim, kdim), -1.0)
25
+ kernel[kdim // 2, kdim // 2] = kdim**2 - 1.0
26
+ filtered = convolve(image.astype(float), kernel, mode='nearest')
27
+ filtered[filtered < 0] = 0.0
28
+ return filtered
29
+
30
+
31
+ def normalize_filtered_image(filtered):
32
+ # Step 1: Make a copy to avoid modifying input in-place
33
+ filtered = np.array(filtered, copy=True)
34
+
35
+ # Step 2: Set all values ≤ 0 to 0
36
+ filtered[filtered <= 0] = 0.0
37
+
38
+ # Step 3: Normalize to peak = 100 (only if peak > 0)
39
+ peak = np.nanmax(filtered)
40
+ normalized = (filtered / peak) * 100.0 if peak > 0 else filtered
41
+
42
+ return normalized
43
+
44
+
45
+ # --- low values to get as many sources as possible in this first filter stage --- #
46
+ def estimate_rms(image, sigma_clip=3.0):
47
+ values = image[image > 0]
48
+ if len(values) == 0:
49
+ return 0.0
50
+ _, _, sigma = sigma_clipped_stats(values, sigma=sigma_clip, maxiters=10, mask_value=0.0)
51
+
52
+ return sigma
53
+
54
+
55
+ def detect_peaks(filtered_image, threshold, fwhm_pix, roundlim=(-1.0, 1.0), sharplim=(-1.0, 2.0)):
56
+ finder = DAOStarFinder(
57
+ threshold=threshold,
58
+ fwhm=fwhm_pix,
59
+ roundlo=roundlim[0], roundhi=roundlim[1],
60
+ sharplo=sharplim[0], sharphi=sharplim[1]
61
+ )
62
+ return finder(filtered_image)
63
+
64
+
65
+ def filter_peaks(peaks_table, fwhm_pix, image_shape, min_dist_pix, aper_sup):
66
+ if min_dist_pix is None:
67
+ min_dist_pix = fwhm_pix
68
+
69
+ ny, nx = image_shape
70
+ margin = int(fwhm_pix)*aper_sup
71
+
72
+
73
+ # Step 1: remove peaks too close to image border
74
+ valid = (
75
+ (peaks_table['xcentroid'] > margin) &
76
+ (peaks_table['xcentroid'] < nx - margin) &
77
+ (peaks_table['ycentroid'] > margin) &
78
+ (peaks_table['ycentroid'] < ny - margin)
79
+ )
80
+ peaks = peaks_table[valid]
81
+
82
+ # Step 2: remove close neighbors (keep brightest)
83
+ coords = np.vstack([peaks['xcentroid'], peaks['ycentroid']]).T
84
+ keep = np.ones(len(peaks), dtype=bool)
85
+
86
+ for i in range(len(peaks)):
87
+ if not keep[i]:
88
+ continue
89
+ for j in range(i + 1, len(peaks)):
90
+ if not keep[j]:
91
+ continue
92
+ dx = coords[i][0] - coords[j][0]
93
+ dy = coords[i][1] - coords[j][1]
94
+ dist = np.hypot(dx, dy)
95
+ if dist < min_dist_pix:
96
+ if peaks[i]['peak'] >= peaks[j]['peak']:
97
+ keep[j] = False
98
+ else:
99
+ keep[i] = False
100
+
101
+ return peaks[keep]
102
+
103
+
104
+ # --- save only sources above a sigma-clipped rms estimation in the maps, or use a manual value ---
105
+ def filter_by_snr(peaks_table, real_map, rms_real, snr_threshold):
106
+ keep = []
107
+ for row in peaks_table:
108
+ x = int(round(row['xcentroid']))
109
+ y = int(round(row['ycentroid']))
110
+ if 0 <= y < real_map.shape[0] and 0 <= x < real_map.shape[1]:
111
+ peak_val = real_map[y, x]
112
+ snr = peak_val / rms_real if rms_real > 0 else 0
113
+ keep.append(snr >= snr_threshold)
114
+ else:
115
+ keep.append(False)
116
+
117
+ return peaks_table[keep]
118
+
119
+
120
+ def detect_sources(map_struct_list, dist_limit_arcsec, real_map, rms_real, snr_threshold, roundlim, sharplim, config):
121
+ map_struct, FWHM_pix = select_channel_map(map_struct_list)
122
+ image = map_struct["map"]
123
+ pix_dim_ref = map_struct["pix_dim"]
124
+ beam_dim_ref = map_struct["beam_dim"]
125
+ aper_sup=config.get("photometry", "aper_sup")
126
+
127
+ my_dist_limit_arcsec = beam_dim_ref if dist_limit_arcsec == 0 else dist_limit_arcsec
128
+ dist_limit_pix = my_dist_limit_arcsec / pix_dim_ref
129
+
130
+
131
+ # --- identify multiple peaks in filtered image and save good peaks with real snr threshold --- #
132
+ filtered = high_pass_filter(image)
133
+ norm_filtered = normalize_filtered_image(filtered)
134
+
135
+ filtered_rms_detect = estimate_rms(norm_filtered)
136
+ filtered_threshold = 2. * filtered_rms_detect
137
+
138
+ peaks = detect_peaks(norm_filtered, filtered_threshold, FWHM_pix, roundlim=roundlim, sharplim=sharplim)
139
+ good_peaks = filter_peaks(peaks, FWHM_pix, image.shape, dist_limit_pix, aper_sup)
140
+ final_sources = filter_by_snr(good_peaks, real_map, rms_real, snr_threshold)
141
+
142
+ return final_sources
@@ -0,0 +1,42 @@
1
+ def extract_maps_from_cube(cube_names, dir_slices_out, dir_maps_in):
2
+ """
3
+ Extract 2D slices from 3D datacubes and return list of 2D FITS file paths.
4
+ """
5
+ from astropy.io import fits
6
+ from astropy.wcs import WCS
7
+ import os
8
+
9
+
10
+ extracted_maps = []
11
+
12
+ for cube_name in cube_names:
13
+ cube_path = os.path.join(dir_maps_in, cube_name)
14
+ with fits.open(cube_path) as hdul:
15
+ data = hdul[0].data
16
+ cube_header = hdul[0].header
17
+ wcs = WCS(cube_header)
18
+
19
+ if data.ndim != 3:
20
+ raise ValueError(f"{cube_name} is not a 3D datacube.")
21
+
22
+ # Ensure output directory exists
23
+ os.makedirs(dir_slices_out, exist_ok=True)
24
+
25
+ for i in range(data.shape[0]):
26
+ slice_data = data[i, :, :]
27
+ slice_header = cube_header.copy()
28
+ slice_header['CRPIX3'] = i + 1 # track the slice index
29
+ slice_header['NAXIS'] = 2 # force 2D output
30
+ for key in list(slice_header.keys()):
31
+ if key.startswith('NAXIS') and key != 'NAXIS1' and key != 'NAXIS2':
32
+ del slice_header[key]
33
+ if key.startswith('CRVAL3') or key.startswith('CDELT3') or key.startswith('CTYPE3'):
34
+ del slice_header[key]
35
+
36
+ out_name = f"{os.path.splitext(cube_name)[0]}_slice_{i+1:03d}.fits"
37
+ out_path = os.path.join(dir_slices_out, out_name)
38
+
39
+ fits.writeto(out_path, slice_data, slice_header, overwrite=True)
40
+ extracted_maps.append(out_name)
41
+
42
+ return extracted_maps, cube_header