hyper-py-photometry 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hyper_py/__init__.py +1 -0
- hyper_py/bkg_multigauss.py +524 -0
- hyper_py/bkg_single.py +477 -0
- hyper_py/config.py +43 -0
- hyper_py/create_background_slices.py +160 -0
- hyper_py/data_output.py +132 -0
- hyper_py/detection.py +142 -0
- hyper_py/extract_cubes.py +42 -0
- hyper_py/fitting.py +562 -0
- hyper_py/gaussfit.py +519 -0
- hyper_py/groups.py +66 -0
- hyper_py/hyper.py +150 -0
- hyper_py/logger.py +73 -0
- hyper_py/map_io.py +73 -0
- hyper_py/paths_io.py +122 -0
- hyper_py/photometry.py +114 -0
- hyper_py/run_hyper.py +45 -0
- hyper_py/single_map.py +716 -0
- hyper_py/survey.py +70 -0
- hyper_py/visualization.py +150 -0
- hyper_py_photometry-0.1.0.dist-info/METADATA +514 -0
- hyper_py_photometry-0.1.0.dist-info/RECORD +26 -0
- hyper_py_photometry-0.1.0.dist-info/WHEEL +5 -0
- hyper_py_photometry-0.1.0.dist-info/entry_points.txt +4 -0
- hyper_py_photometry-0.1.0.dist-info/licenses/LICENSE +13 -0
- hyper_py_photometry-0.1.0.dist-info/top_level.txt +1 -0
hyper_py/hyper.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import multiprocessing
|
|
5
|
+
from concurrent.futures import ProcessPoolExecutor, as_completed
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
from astropy.io import ascii, fits
|
|
9
|
+
from astropy.table import vstack
|
|
10
|
+
from astropy.wcs import WCS
|
|
11
|
+
|
|
12
|
+
from hyper_py.single_map import main as single_map
|
|
13
|
+
from hyper_py.config import HyperConfig
|
|
14
|
+
from hyper_py.logger import setup_logger
|
|
15
|
+
from hyper_py.create_background_slices import create_background_cubes
|
|
16
|
+
from .extract_cubes import extract_maps_from_cube
|
|
17
|
+
|
|
18
|
+
# Set multiprocessing start method
|
|
19
|
+
multiprocessing.set_start_method("spawn", force=True)
|
|
20
|
+
|
|
21
|
+
def start_hyper(cfg_path):
|
|
22
|
+
# === Load config ===
|
|
23
|
+
os.chdir(os.path.dirname(__file__))
|
|
24
|
+
|
|
25
|
+
config_path = cfg_path if not None else "config.yaml"
|
|
26
|
+
|
|
27
|
+
if not os.path.exists(config_path):
|
|
28
|
+
raise FileNotFoundError(f"Config file not found: {config_path}")
|
|
29
|
+
|
|
30
|
+
cfg = HyperConfig(config_path)
|
|
31
|
+
|
|
32
|
+
# --- Initialize paths --- #
|
|
33
|
+
# - common - #
|
|
34
|
+
paths = cfg.get("paths")
|
|
35
|
+
dir_root = paths["output"]["dir_root"]
|
|
36
|
+
|
|
37
|
+
# # - input - #
|
|
38
|
+
dir_maps = paths["input"]["dir_maps"]
|
|
39
|
+
dir_slices_out = Path(dir_root, cfg.get("control")["dir_datacube_slices"])
|
|
40
|
+
base_table_name = cfg.get("files", "file_table_base")
|
|
41
|
+
map_names = cfg.get("files", "file_map_name")
|
|
42
|
+
datacube = cfg.get("control", "datacube", False)
|
|
43
|
+
fix_min_box = cfg.get("background", "fix_min_box", 3) # minimum padding value (multiple of FWHM)
|
|
44
|
+
convert_mjy=cfg.get("units", "convert_mJy")
|
|
45
|
+
|
|
46
|
+
# If it's a path to a .txt file, read it #
|
|
47
|
+
if isinstance(map_names, str) and map_names.endswith('.txt'):
|
|
48
|
+
map_list_path = os.path.join(dir_maps, map_names)
|
|
49
|
+
with open(map_list_path, 'r') as f:
|
|
50
|
+
map_names = [line.strip() for line in f if line.strip()]
|
|
51
|
+
# If it's a single string but not a .txt, wrap it in a list
|
|
52
|
+
elif isinstance(map_names, str):
|
|
53
|
+
map_names = [map_names]
|
|
54
|
+
|
|
55
|
+
if datacube:
|
|
56
|
+
map_names, cube_header = extract_maps_from_cube(map_names, dir_slices_out, dir_maps)
|
|
57
|
+
background_slices = []
|
|
58
|
+
slice_cutout_header = []
|
|
59
|
+
|
|
60
|
+
# - output - #
|
|
61
|
+
output_dir = paths["output"]["dir_table_out"]
|
|
62
|
+
|
|
63
|
+
# --- Set up logging for warnings --- #
|
|
64
|
+
dir_log = paths["output"]["dir_log_out"]
|
|
65
|
+
file_log = cfg.get("files", "file_log_name")
|
|
66
|
+
log_path = os.path.join(dir_root, dir_log, file_log)
|
|
67
|
+
|
|
68
|
+
# Ensure the log directory exists
|
|
69
|
+
log_path_dir = os.path.join(dir_root, dir_log)
|
|
70
|
+
os.makedirs(log_path_dir, exist_ok=True)
|
|
71
|
+
|
|
72
|
+
logger, logger_file_only = setup_logger(log_path, logger_name="HyperLogger", overwrite=True)
|
|
73
|
+
|
|
74
|
+
logger.info("******************* 🔥 Hyper starts !!! *******************")
|
|
75
|
+
|
|
76
|
+
# --- Parallel control ---
|
|
77
|
+
control_cfg = cfg.get("control", {})
|
|
78
|
+
use_parallel = control_cfg.get("parallel_maps", False)
|
|
79
|
+
n_cores = control_cfg.get("n_cores", os.cpu_count())
|
|
80
|
+
|
|
81
|
+
# --- Main parallel or serial execution ---
|
|
82
|
+
logger.info(f"🔄 Starting map analysis using {'multiprocessing' if use_parallel else 'serial'} mode")
|
|
83
|
+
|
|
84
|
+
results = []
|
|
85
|
+
|
|
86
|
+
if use_parallel:
|
|
87
|
+
logger.info(f"📡 Running HYPER on {len(map_names)} maps using {n_cores} cores...")
|
|
88
|
+
with ProcessPoolExecutor(max_workers=n_cores) as executor:
|
|
89
|
+
futures = {
|
|
90
|
+
executor.submit(single_map, name, cfg, dir_root): name
|
|
91
|
+
for name in map_names
|
|
92
|
+
}
|
|
93
|
+
for future in as_completed(futures):
|
|
94
|
+
map_name = futures[future]
|
|
95
|
+
try:
|
|
96
|
+
suffix, bg_model, cutout_header, initial_header = future.result()
|
|
97
|
+
results.append(suffix)
|
|
98
|
+
if datacube:
|
|
99
|
+
background_slices.append(bg_model)
|
|
100
|
+
slice_cutout_header.append(cutout_header)
|
|
101
|
+
|
|
102
|
+
logger.info(f"✅ Finished processing {map_name}")
|
|
103
|
+
except Exception as e:
|
|
104
|
+
logger.error(f"❌ Error processing {map_name}: {e}")
|
|
105
|
+
else:
|
|
106
|
+
for map_name in map_names:
|
|
107
|
+
logger.info(f"📡 Running HYPER on: {map_name}")
|
|
108
|
+
suffix, bg_model, cutout_header, initial_header = single_map(map_name, cfg, dir_root, logger, logger_file_only)
|
|
109
|
+
results.append(suffix)
|
|
110
|
+
if datacube:
|
|
111
|
+
background_slices.append(bg_model)
|
|
112
|
+
slice_cutout_header.append(cutout_header)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
# --- Collect all output tables --- #
|
|
116
|
+
all_tables = []
|
|
117
|
+
for suffix in results:
|
|
118
|
+
try:
|
|
119
|
+
suffix_clean = Path(suffix).stem # remove ".fits"
|
|
120
|
+
output_table_path = os.path.join(dir_root, output_dir, f"{base_table_name}_{suffix_clean}.txt")
|
|
121
|
+
table = ascii.read(output_table_path, format="ipac")
|
|
122
|
+
all_tables.append(table)
|
|
123
|
+
except Exception as e:
|
|
124
|
+
logger_file_only.error(f"[ERROR] Failed to load table for {suffix}: {e}")
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
# === Merge and write combined tables ===
|
|
128
|
+
final_table = vstack(all_tables)
|
|
129
|
+
|
|
130
|
+
# Keep only the comments (headers) from the first table
|
|
131
|
+
if hasattr(all_tables[0], 'meta') and 'comments' in all_tables[0].meta:
|
|
132
|
+
final_table.meta['comments'] = all_tables[0].meta['comments']
|
|
133
|
+
else:
|
|
134
|
+
final_table.meta['comments'] = []
|
|
135
|
+
|
|
136
|
+
# Output file paths
|
|
137
|
+
ipac_path = os.path.join(dir_root, output_dir, f"{base_table_name}_ALL.txt")
|
|
138
|
+
csv_path = os.path.join(dir_root, output_dir, f"{base_table_name}_ALL.csv")
|
|
139
|
+
|
|
140
|
+
# Write outputs
|
|
141
|
+
final_table.write(ipac_path, format='ipac', overwrite=True)
|
|
142
|
+
final_table.write(csv_path, format='csv', overwrite=True)
|
|
143
|
+
logger_file_only.info(f"\n✅ Final merged table saved to:\n- {ipac_path}\n- {csv_path}")
|
|
144
|
+
|
|
145
|
+
# === Combine all bg_models into a datacube ===
|
|
146
|
+
if datacube:
|
|
147
|
+
create_background_cubes(background_slices, slice_cutout_header, cube_header, dir_slices_out, fix_min_box, convert_mjy, logger)
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
logger.info("****************** ✅ Hyper finished !!! ******************")
|
hyper_py/logger.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import warnings
|
|
3
|
+
import sys
|
|
4
|
+
from astropy.wcs import FITSFixedWarning
|
|
5
|
+
from astropy.utils.exceptions import AstropyUserWarning
|
|
6
|
+
|
|
7
|
+
class StreamToLogger:
|
|
8
|
+
def __init__(self, logger, level=logging.WARNING):
|
|
9
|
+
self.logger = logger
|
|
10
|
+
self.level = level
|
|
11
|
+
def write(self, buf):
|
|
12
|
+
for line in buf.rstrip().splitlines():
|
|
13
|
+
self.logger.log(self.level, line.rstrip())
|
|
14
|
+
def flush(self):
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
class ProcessNameFilter(logging.Filter):
|
|
18
|
+
def __init__(self, process_name):
|
|
19
|
+
super().__init__()
|
|
20
|
+
self.process_name = process_name
|
|
21
|
+
|
|
22
|
+
def filter(self, record):
|
|
23
|
+
record.process_name = self.process_name
|
|
24
|
+
return True
|
|
25
|
+
|
|
26
|
+
def setup_logger(log_path="hyper.log", logger_name="HyperLogger", overwrite=True, process_name = ""):
|
|
27
|
+
logger = logging.getLogger(logger_name)
|
|
28
|
+
logger.setLevel(logging.DEBUG)
|
|
29
|
+
logger.propagate = False
|
|
30
|
+
|
|
31
|
+
if logger.hasHandlers():
|
|
32
|
+
logger.handlers.clear()
|
|
33
|
+
|
|
34
|
+
# File handler
|
|
35
|
+
mode = "w" if overwrite else "a"
|
|
36
|
+
file_handler = logging.FileHandler(log_path, mode=mode, encoding='utf-8')
|
|
37
|
+
file_handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s'))
|
|
38
|
+
logger.addHandler(file_handler)
|
|
39
|
+
|
|
40
|
+
# Console handler (only INFO)
|
|
41
|
+
console_handler = logging.StreamHandler(sys.stdout)
|
|
42
|
+
logger.addFilter(lambda record: setattr(record, "process_name_suffix", f" ({process_name})" if process_name else "") or True)
|
|
43
|
+
console_handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s%(process_name_suffix)s'))
|
|
44
|
+
logger.addFilter(ProcessNameFilter(process_name))
|
|
45
|
+
logger.addHandler(console_handler)
|
|
46
|
+
logger.setLevel(logging.INFO)
|
|
47
|
+
|
|
48
|
+
# File-only logger for warnings
|
|
49
|
+
logger_file_only = logging.getLogger(f"{logger_name}FileOnly")
|
|
50
|
+
logger_file_only.setLevel(logging.INFO)
|
|
51
|
+
logger_file_only.propagate = False
|
|
52
|
+
|
|
53
|
+
if logger_file_only.hasHandlers():
|
|
54
|
+
logger_file_only.handlers.clear()
|
|
55
|
+
|
|
56
|
+
logger_file_only.addHandler(file_handler)
|
|
57
|
+
|
|
58
|
+
# Redirect warnings
|
|
59
|
+
sys.stderr = StreamToLogger(logger_file_only, level=logging.WARNING)
|
|
60
|
+
sys.__stderr__ = StreamToLogger(logger_file_only, level=logging.WARNING)
|
|
61
|
+
|
|
62
|
+
def custom_showwarning(message, category, filename, lineno, file=None, line=None):
|
|
63
|
+
logger_file_only.warning(f"{category.__name__}: {message} (from {filename}:{lineno})")
|
|
64
|
+
|
|
65
|
+
warnings.showwarning = custom_showwarning
|
|
66
|
+
warnings.simplefilter("always")
|
|
67
|
+
warnings.filterwarnings("always", category=FITSFixedWarning)
|
|
68
|
+
warnings.filterwarnings("always", category=AstropyUserWarning)
|
|
69
|
+
warnings.filterwarnings("always", category=ResourceWarning)
|
|
70
|
+
warnings.filterwarnings("ignore", category=FITSFixedWarning, module="astropy")
|
|
71
|
+
warnings.filterwarnings("ignore", category=UserWarning, module="uncertainties")
|
|
72
|
+
|
|
73
|
+
return logger, logger_file_only
|
hyper_py/map_io.py
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from astropy.io import fits
|
|
2
|
+
from astropy.wcs import WCS
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def read_and_prepare_map(filepath, beam, beam_area_arcsec2, beam_area_sr, convert_mjy=False):
|
|
7
|
+
'''
|
|
8
|
+
Load a FITS map and convert units as needed.
|
|
9
|
+
Always reduce WCS to 2D (RA, Dec) if higher dimensional axes are present.
|
|
10
|
+
|
|
11
|
+
Parameters:
|
|
12
|
+
filepath (str): Path to the FITS file.
|
|
13
|
+
beam (float): Beam size in arcsec (FWHM).
|
|
14
|
+
beam_area_arcsec2 (float): Beam area in arcsec^2.
|
|
15
|
+
convert_jy_sr (bool): If True, convert from MJy/sr to Jy/pixel.
|
|
16
|
+
convert_beam_jy (bool): If True, convert from Jy/beam to Jy/pixel.
|
|
17
|
+
convert_mjy (bool): If True, convert Jy to mJy/pixel.
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
dict: {
|
|
21
|
+
'map': 2D image array,
|
|
22
|
+
'header': cleaned FITS header,
|
|
23
|
+
'pix_dim': pixel size in arcsec,
|
|
24
|
+
'beam_dim': beam (arcsec),
|
|
25
|
+
'band': extracted from header or placeholder
|
|
26
|
+
}
|
|
27
|
+
'''
|
|
28
|
+
with fits.open(filepath) as hdul:
|
|
29
|
+
image_data = hdul[0].data
|
|
30
|
+
header = hdul[0].header
|
|
31
|
+
|
|
32
|
+
# --- Auto-squeeze down to 2D image ---
|
|
33
|
+
while image_data.ndim > 2:
|
|
34
|
+
image_data = image_data[0]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# --- Get pixel scale from header ---
|
|
38
|
+
pix_dim = abs(header.get('CDELT1', header.get('CD1_1', 1))) * 3600.0 # arcsec
|
|
39
|
+
if pix_dim <= 0:
|
|
40
|
+
raise ValueError("Invalid pixel scale in FITS header.")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# --- Strip header down to 2D WCS only ---
|
|
44
|
+
try:
|
|
45
|
+
wcs_full = WCS(header)
|
|
46
|
+
wcs_2d = wcs_full.celestial # Only RA/DEC axes
|
|
47
|
+
header = wcs_2d.to_header()
|
|
48
|
+
except Exception as e:
|
|
49
|
+
print(f"[WARNING] Could not clean WCS header: {e}")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# --- Unit conversions ---
|
|
53
|
+
bunit = header.get('BUNIT')
|
|
54
|
+
if bunit == 'MJy /sr':
|
|
55
|
+
arcsec_to_rad = np.pi / (180.0 * 3600.0)
|
|
56
|
+
pix_area_sr = (pix_dim * arcsec_to_rad)**2
|
|
57
|
+
image_data *= 1e6 * pix_area_sr # MJy/sr to Jy/pixel
|
|
58
|
+
|
|
59
|
+
if bunit == 'Jy/beam' or bunit == 'beam-1 Jy':
|
|
60
|
+
pix_area = pix_dim**2
|
|
61
|
+
image_data /= (beam_area_arcsec2 / pix_area) # Jy/beam to Jy/pixel
|
|
62
|
+
|
|
63
|
+
if convert_mjy:
|
|
64
|
+
image_data *= 1e3 # Jy → mJy
|
|
65
|
+
|
|
66
|
+
return {
|
|
67
|
+
"map": image_data,
|
|
68
|
+
"header": header,
|
|
69
|
+
"pix_dim": pix_dim,
|
|
70
|
+
"beam_dim": beam,
|
|
71
|
+
"beam_area_arcsec2": beam_area_arcsec2,
|
|
72
|
+
"beam_area_sr": beam_area_sr,
|
|
73
|
+
}
|
hyper_py/paths_io.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get_hyper_paths(cfg):
|
|
6
|
+
"""
|
|
7
|
+
Prepares and returns all relevant I/O paths used in a HYPER run.
|
|
8
|
+
"""
|
|
9
|
+
# - Main paths - #
|
|
10
|
+
paths = cfg.get("paths")
|
|
11
|
+
dir_root = paths["output"]["dir_root"]
|
|
12
|
+
|
|
13
|
+
# - Input - #
|
|
14
|
+
dir_maps = paths["input"]["dir_maps"]
|
|
15
|
+
file_map = 'temp'
|
|
16
|
+
input_map_path = os.path.join(dir_maps, file_map)
|
|
17
|
+
|
|
18
|
+
# - Output - #
|
|
19
|
+
suffix = Path(file_map).stem
|
|
20
|
+
|
|
21
|
+
# - Tables - #
|
|
22
|
+
dir_table_out = paths["output"]["dir_table_out"]
|
|
23
|
+
output_dir_path = os.path.join(dir_root, dir_table_out)
|
|
24
|
+
base_name_with_suffix = f"{cfg.get('files', 'file_table_base')}_{suffix}"
|
|
25
|
+
|
|
26
|
+
# - Region files - #
|
|
27
|
+
dir_region_out = paths["output"]["dir_region_out"]
|
|
28
|
+
region_base = os.path.join(dir_root, dir_region_out, f"{cfg.get('files', 'file_region_base')}_{suffix}")
|
|
29
|
+
centroids_file = region_base + "_centroids.reg"
|
|
30
|
+
ellipses_file = region_base + "_ellipses.reg"
|
|
31
|
+
|
|
32
|
+
# - Log files - #
|
|
33
|
+
dir_log = paths["output"]["dir_log_out"]
|
|
34
|
+
file_log = cfg.get("files", "file_log_name")
|
|
35
|
+
log_path = os.path.join(dir_root, dir_log, file_log)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
return {
|
|
39
|
+
"dir_root": dir_root,
|
|
40
|
+
"input_map_path": input_map_path,
|
|
41
|
+
"suffix": suffix,
|
|
42
|
+
"output_dir_path": output_dir_path,
|
|
43
|
+
"base_table_name": cfg.get("files", "file_table_base"),
|
|
44
|
+
"base_name_with_suffix": base_name_with_suffix,
|
|
45
|
+
"region_base": region_base,
|
|
46
|
+
"centroids_file": centroids_file,
|
|
47
|
+
"ellipses_file": ellipses_file,
|
|
48
|
+
"log_path": log_path
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_hyper_single_map_paths(cfg, map_name):
|
|
54
|
+
"""
|
|
55
|
+
Prepares and returns all relevant I/O paths used in a HYPER run.
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
if cfg is None:
|
|
59
|
+
raise ValueError("cfg must be provided")
|
|
60
|
+
|
|
61
|
+
# - Main paths - #
|
|
62
|
+
paths = cfg.get("paths")
|
|
63
|
+
|
|
64
|
+
if paths is None:
|
|
65
|
+
raise ValueError("Missing 'paths' section in configuration.")
|
|
66
|
+
|
|
67
|
+
dir_root = paths["output"]["dir_root"]
|
|
68
|
+
if dir_root is None:
|
|
69
|
+
raise ValueError("Missing 'dir_root' key in paths configuration.")
|
|
70
|
+
|
|
71
|
+
# - Input - #
|
|
72
|
+
dir_maps = paths["input"]["dir_maps"]
|
|
73
|
+
file_map = map_name
|
|
74
|
+
input_map_path = os.path.join(dir_maps, file_map)
|
|
75
|
+
|
|
76
|
+
# - Output - #
|
|
77
|
+
suffix = Path(file_map).stem
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
# - Tables - #
|
|
81
|
+
dir_table_out = paths["output"]["dir_table_out"]
|
|
82
|
+
output_dir_path = os.path.join(dir_root, dir_table_out)
|
|
83
|
+
# Ensure the output directory exists
|
|
84
|
+
os.makedirs(output_dir_path, exist_ok=True)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
base_name_with_suffix = f"{cfg.get('files', 'file_table_base')}_{suffix}"
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# - Region files - #
|
|
91
|
+
dir_region_out = paths["output"]["dir_region_out"]
|
|
92
|
+
# Ensure the output directory exists
|
|
93
|
+
output_dir_region_path = os.path.join(dir_root, dir_region_out)
|
|
94
|
+
os.makedirs(output_dir_region_path, exist_ok=True)
|
|
95
|
+
|
|
96
|
+
region_base = os.path.join(dir_root, dir_region_out, f"{cfg.get('files', 'file_region_base')}_{suffix}")
|
|
97
|
+
centroids_file = region_base + "_centroids.reg"
|
|
98
|
+
ellipses_file = region_base + "_ellipses.reg"
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
# - Log files - #
|
|
102
|
+
dir_log = paths["output"]["dir_log_out"]
|
|
103
|
+
# Ensure the output directory exists
|
|
104
|
+
output_dir_log_path = os.path.join(dir_root, dir_table_out)
|
|
105
|
+
os.makedirs(output_dir_log_path, exist_ok=True)
|
|
106
|
+
|
|
107
|
+
file_log = cfg.get("files", "file_log_name")
|
|
108
|
+
log_path = os.path.join(dir_root, dir_log, file_log)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
return {
|
|
112
|
+
"dir_root": dir_root,
|
|
113
|
+
"input_map_path": input_map_path,
|
|
114
|
+
"suffix": suffix,
|
|
115
|
+
"output_dir_path": output_dir_path,
|
|
116
|
+
"base_table_name": cfg.get("files", "file_table_base"),
|
|
117
|
+
"base_name_with_suffix": base_name_with_suffix,
|
|
118
|
+
"region_base": region_base,
|
|
119
|
+
"centroids_file": centroids_file,
|
|
120
|
+
"ellipses_file": ellipses_file,
|
|
121
|
+
"log_path": log_path
|
|
122
|
+
}
|
hyper_py/photometry.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
|
|
2
|
+
import numpy as np
|
|
3
|
+
from photutils.aperture import EllipticalAperture, aperture_photometry
|
|
4
|
+
from astropy.table import Table
|
|
5
|
+
|
|
6
|
+
def aperture_photometry_on_sources(image, xcen, ycen, config,
|
|
7
|
+
radius_val_1=None, radius_val_2=None, PA_val=None):
|
|
8
|
+
'''
|
|
9
|
+
Perform elliptical aperture photometry on given source positions.
|
|
10
|
+
|
|
11
|
+
Parameters:
|
|
12
|
+
image : 2D array
|
|
13
|
+
xcen, ycen : 1D arrays of centroid positions (in pixels)
|
|
14
|
+
config : HyperConfig instance
|
|
15
|
+
radius_val_1, radius_val_2, PA_val : arrays of ellipse parameters in pixels
|
|
16
|
+
If None, use values from config.
|
|
17
|
+
|
|
18
|
+
Returns:
|
|
19
|
+
Table with flux and uncertainty
|
|
20
|
+
'''
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
if len(radius_val_1) == 1:
|
|
24
|
+
radius_val_1 = radius_val_1 * len(xcen)
|
|
25
|
+
if len(radius_val_2) == 1:
|
|
26
|
+
radius_val_2 = radius_val_2 * len(xcen)
|
|
27
|
+
if len(PA_val) == 1:
|
|
28
|
+
PA_val = PA_val * len(xcen)
|
|
29
|
+
|
|
30
|
+
fluxes = []
|
|
31
|
+
errors = []
|
|
32
|
+
|
|
33
|
+
for i in range(len(xcen)):
|
|
34
|
+
a = radius_val_1[i]
|
|
35
|
+
b = radius_val_2[i]
|
|
36
|
+
theta = np.deg2rad(PA_val[i] + 90.) # rotated only for photometry (x and y axes inverted here)
|
|
37
|
+
|
|
38
|
+
position = (xcen[i], ycen[i])
|
|
39
|
+
aperture = EllipticalAperture(position, a, b, theta=theta)
|
|
40
|
+
|
|
41
|
+
# print(aperture)
|
|
42
|
+
phot_table = aperture_photometry(image, aperture, method='exact')
|
|
43
|
+
flux = phot_table['aperture_sum']
|
|
44
|
+
|
|
45
|
+
# print(phot_table)
|
|
46
|
+
|
|
47
|
+
# --- Perform area-weighted photometry on an image within an elliptical aperture --- #
|
|
48
|
+
# flux = area_weighted_photometry(image, xcen[i], ycen[i], a, b, theta)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
# Estimate noise inside aperture
|
|
52
|
+
mask = aperture.to_mask(method="exact")
|
|
53
|
+
data_cutout = mask.cutout(image)
|
|
54
|
+
if data_cutout is not None:
|
|
55
|
+
aperture_data = data_cutout * mask.data
|
|
56
|
+
noise = np.nanstd(aperture_data)
|
|
57
|
+
error = noise * np.sqrt(np.sum(mask.data**2))
|
|
58
|
+
else:
|
|
59
|
+
error = 0.0
|
|
60
|
+
|
|
61
|
+
fluxes.append(flux)
|
|
62
|
+
errors.append(error)
|
|
63
|
+
|
|
64
|
+
return Table(data={"x": xcen, "y": ycen, "flux": fluxes, "error": errors})
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
# --- Function to calculate area-weighted flux inside an elliptical aperture --- #
|
|
70
|
+
def area_weighted_photometry(image, xcen, ycen, a, b, theta, method="exact"):
|
|
71
|
+
"""
|
|
72
|
+
Perform area-weighted photometry on an image within an elliptical aperture.
|
|
73
|
+
|
|
74
|
+
Parameters:
|
|
75
|
+
image: 2D numpy array of image data
|
|
76
|
+
xcen, ycen: coordinates of the center of the aperture
|
|
77
|
+
a, b: semi-major and semi-minor axes of the elliptical aperture
|
|
78
|
+
theta: position angle of the ellipse
|
|
79
|
+
method: method used for photometry (not used here but can be extended)
|
|
80
|
+
|
|
81
|
+
Returns:
|
|
82
|
+
total_flux: total flux inside the elliptical aperture
|
|
83
|
+
"""
|
|
84
|
+
total_flux = 0
|
|
85
|
+
|
|
86
|
+
# Get the shape of the image
|
|
87
|
+
ysize, xsize = image.shape
|
|
88
|
+
|
|
89
|
+
# Create a meshgrid for pixel coordinates (Note the order: y-axis, then x-axis)
|
|
90
|
+
y, x = np.indices((ysize, xsize)) # y -> rows, x -> columns
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
# Transform the pixel coordinates to match the elliptical aperture
|
|
94
|
+
cos_theta = np.cos(np.radians(theta))
|
|
95
|
+
sin_theta = np.sin(np.radians(theta))
|
|
96
|
+
|
|
97
|
+
# Rotate the coordinates according to the position angle
|
|
98
|
+
x_rot = (x - xcen) * cos_theta + (y - ycen) * sin_theta
|
|
99
|
+
y_rot = -(x - xcen) * sin_theta + (y - ycen) * cos_theta
|
|
100
|
+
|
|
101
|
+
# Check if each pixel lies within the elliptical aperture
|
|
102
|
+
inside_aperture = (x_rot**2 / a**2 + y_rot**2 / b**2) <= 1
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
# Now calculate the weighted flux (sum of fluxes for each pixel inside the aperture)
|
|
106
|
+
for i in range(ysize):
|
|
107
|
+
for j in range(xsize):
|
|
108
|
+
if inside_aperture[i, j]:
|
|
109
|
+
# For pixels inside the ellipse, add their full flux
|
|
110
|
+
total_flux += image[i, j]
|
|
111
|
+
# print(image[i, j])
|
|
112
|
+
return total_flux
|
|
113
|
+
|
|
114
|
+
|
hyper_py/run_hyper.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
import warnings
|
|
5
|
+
import yaml
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
|
9
|
+
|
|
10
|
+
# Block warnings
|
|
11
|
+
warnings.filterwarnings("ignore", message="Using UFloat objects with std_dev==0 may give unexpected results.")
|
|
12
|
+
warnings.filterwarnings("ignore", message=".*Set OBSGEO-L to.*")
|
|
13
|
+
warnings.filterwarnings("ignore", message=".*Wrapping comment lines > 78 characters.*")
|
|
14
|
+
warnings.filterwarnings("ignore", message=".*more axes \(4\) than the image it is associated with \(2\).*")
|
|
15
|
+
warnings.filterwarnings("ignore", message=".*Set MJD-OBS to.*")
|
|
16
|
+
|
|
17
|
+
from hyper_py.hyper import start_hyper
|
|
18
|
+
|
|
19
|
+
def update_dir_root(default_config, config_path, new_dir_root):
|
|
20
|
+
"""Create a new config.yaml in the specified directory with updated dir_root."""
|
|
21
|
+
default_config = Path(default_config)
|
|
22
|
+
config_path = Path(config_path)
|
|
23
|
+
cfg = yaml.safe_load(default_config.read_text(encoding="utf-8")) or {}
|
|
24
|
+
cfg['paths']['output']['dir_root'] = str(Path(new_dir_root, "output"))
|
|
25
|
+
config_path.write_text(yaml.safe_dump(cfg, sort_keys=False), encoding="utf-8")
|
|
26
|
+
|
|
27
|
+
def main():
|
|
28
|
+
config_path = sys.argv[1] if len(sys.argv) > 1 else "config.yaml"
|
|
29
|
+
if not os.path.exists(config_path):
|
|
30
|
+
default_config = os.path.join(os.path.dirname(__file__), "config.yaml")
|
|
31
|
+
if not os.path.exists(default_config):
|
|
32
|
+
print("Error: default config.yaml not found.")
|
|
33
|
+
sys.exit(1)
|
|
34
|
+
|
|
35
|
+
config_path = os.path.join(os.getcwd(), "config.yaml")
|
|
36
|
+
|
|
37
|
+
update_dir_root(default_config, config_path, os.getcwd())
|
|
38
|
+
print(f"⚠️ New config.yaml created in {config_path}")
|
|
39
|
+
print("⚠️ Please edit the configuration file and set the correct parameters and paths before running again.")
|
|
40
|
+
sys.exit(0)
|
|
41
|
+
|
|
42
|
+
start_hyper(config_path)
|
|
43
|
+
|
|
44
|
+
if __name__ == "__main__":
|
|
45
|
+
main()
|