starbash 0.1.1__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of starbash might be problematic. Click here for more details.
- starbash/__init__.py +5 -0
- starbash/analytics.py +21 -6
- starbash/app.py +145 -17
- starbash/commands/repo.py +102 -30
- starbash/commands/select.py +326 -0
- starbash/commands/user.py +91 -6
- starbash/database.py +152 -20
- starbash/defaults/starbash.toml +2 -34
- starbash/main.py +25 -127
- starbash/recipes/README.md +3 -0
- starbash/recipes/__init__.py +0 -0
- starbash/recipes/master_bias/starbash.toml +55 -0
- starbash/recipes/master_flat/starbash.toml +46 -0
- starbash/recipes/osc_dual_duo/starbash.py +151 -0
- starbash/recipes/osc_dual_duo/starbash.toml +88 -0
- starbash/recipes/osc_single_duo/starbash.toml +67 -0
- starbash/recipes/starbash.toml +34 -0
- starbash/repo/manager.py +82 -21
- starbash/selection.py +36 -0
- starbash/templates/userconfig.toml +33 -1
- starbash-0.1.4.dist-info/METADATA +124 -0
- starbash-0.1.4.dist-info/RECORD +32 -0
- starbash/commands/selection.py +0 -117
- starbash-0.1.1.dist-info/METADATA +0 -96
- starbash-0.1.1.dist-info/RECORD +0 -24
- {starbash-0.1.1.dist-info → starbash-0.1.4.dist-info}/WHEEL +0 -0
- {starbash-0.1.1.dist-info → starbash-0.1.4.dist-info}/entry_points.txt +0 -0
- {starbash-0.1.1.dist-info → starbash-0.1.4.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
|
|
2
|
+
[repo]
|
|
3
|
+
kind = "recipe"
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
[recipe]
|
|
7
|
+
author.name = "FIXMESiril?"
|
|
8
|
+
author.email = "FIXMESiril?"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
[[stage]]
|
|
12
|
+
|
|
13
|
+
description = "Generate master flat"
|
|
14
|
+
disabled = true # FIXME, debugging later stuff
|
|
15
|
+
|
|
16
|
+
# For any camera
|
|
17
|
+
auto.for-camera = []
|
|
18
|
+
|
|
19
|
+
tool = "siril"
|
|
20
|
+
# input.source = "session" # or auto? prefer ones in session otherwise find by in masters
|
|
21
|
+
input.type = "flat" # look in _session_ directories, but look only for flat files
|
|
22
|
+
|
|
23
|
+
# FIXME for early development we have support for simple absolute file paths with globs
|
|
24
|
+
input.source = "path"
|
|
25
|
+
input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/FLAT/*.fit*"
|
|
26
|
+
input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
|
|
27
|
+
|
|
28
|
+
when = "session-config" # run once per session-config
|
|
29
|
+
context.output = "{process_dir}/flat_s{sessionid}_c{sessionconfig}.fits"
|
|
30
|
+
|
|
31
|
+
# FIXME, bias should have been added to context by two previous stages. But for now hardwire
|
|
32
|
+
context.bias = '/workspaces/starbash/images/masters/biases/2025-09-09_stacked.fits'
|
|
33
|
+
|
|
34
|
+
script = '''
|
|
35
|
+
# Create a sequence from the raw flat frames
|
|
36
|
+
link flat -out={process_dir}
|
|
37
|
+
cd {process_dir}
|
|
38
|
+
|
|
39
|
+
# Calibrate the flat frames using master bias
|
|
40
|
+
calibrate flat -bias={bias}
|
|
41
|
+
|
|
42
|
+
# Stack the pre-processed (calibrated) flat frames (writes to flat_stacked.fit)
|
|
43
|
+
stack pp_flat rej 3 3 -norm=mul -out=flat_stacked
|
|
44
|
+
'''
|
|
45
|
+
|
|
46
|
+
temporaries = ["flat", "pp_flat"]
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
# pyright: reportUndefinedVariable=false
|
|
2
|
+
# ('context' and 'logger' are injected by the starbash runtime)
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
from glob import glob
|
|
6
|
+
from starbash.tool import tools
|
|
7
|
+
|
|
8
|
+
siril = tools["siril"]
|
|
9
|
+
|
|
10
|
+
delete_temps = False
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# FIXME move this into main starbash
|
|
14
|
+
def perhaps_delete_temps(temps: list[str]) -> None:
|
|
15
|
+
if delete_temps:
|
|
16
|
+
for t in temps:
|
|
17
|
+
for path in glob(f"{context['process_dir']}/{t}_*"):
|
|
18
|
+
os.remove(path)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def normalize_target_name(name: str) -> str:
|
|
22
|
+
"""Converts a target name to an any filesystem-safe format by removing spaces"""
|
|
23
|
+
return name.replace(" ", "").upper()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def make_stacked(sessionconfig: str, variant: str, output_file: str):
|
|
27
|
+
"""
|
|
28
|
+
Registers and stacks all pre-processed light frames for a given filter configuration
|
|
29
|
+
across all sessions.
|
|
30
|
+
"""
|
|
31
|
+
# The sequence name for all frames of this variant across all sessions
|
|
32
|
+
# e.g. Ha_bkg_pp_light_cHaOiii
|
|
33
|
+
merged_seq_base = f"all_{variant}_bkg_pp_light"
|
|
34
|
+
|
|
35
|
+
# Absolute path for the output stacked file
|
|
36
|
+
stacked_output_path = glob(f"{context["process_dir"]}/{output_file}.fit*")
|
|
37
|
+
|
|
38
|
+
if stacked_output_path:
|
|
39
|
+
logger.info(f"Using existing stacked file: {stacked_output_path}")
|
|
40
|
+
else:
|
|
41
|
+
# Merge all frames (from multiple sessions and configs) use those for stacking
|
|
42
|
+
frames = glob(
|
|
43
|
+
f"{context["process_dir"]}/{variant}_bkg_pp_light_s*_c{sessionconfig}_*.fit*"
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
logger.info(
|
|
47
|
+
f"Registering and stacking {len(frames)} frames for {sessionconfig}/{variant} -> {stacked_output_path}"
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
# Siril commands for registration and stacking. We run this in process_dir.
|
|
51
|
+
commands = f"""
|
|
52
|
+
link {merged_seq_base} -out={context["process_dir"]}
|
|
53
|
+
cd {context["process_dir"]}
|
|
54
|
+
|
|
55
|
+
register {merged_seq_base}
|
|
56
|
+
stack r_{merged_seq_base} rej g 0.3 0.05 -filter-wfwhm=3k -norm=addscale -output_norm -32b -out={output_file}
|
|
57
|
+
|
|
58
|
+
# and flip if required
|
|
59
|
+
mirrorx_single {output_file}
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
context["input_files"] = frames
|
|
63
|
+
siril.run_in_temp_dir(commands, context=context)
|
|
64
|
+
|
|
65
|
+
perhaps_delete_temps([merged_seq_base, f"r_{merged_seq_base}"])
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def make_renormalize():
|
|
69
|
+
"""
|
|
70
|
+
Aligns the stacked images (Sii, Ha, OIII) and renormalizes Sii and OIII
|
|
71
|
+
to match the flux of the Ha channel.
|
|
72
|
+
"""
|
|
73
|
+
logger.info("Aligning and renormalizing stacked images.")
|
|
74
|
+
|
|
75
|
+
# Define file basenames for the stacked images created in the 'process' directory
|
|
76
|
+
ha_base = "results_00001"
|
|
77
|
+
oiii_base = "results_00002"
|
|
78
|
+
sii_base = "results_00003"
|
|
79
|
+
|
|
80
|
+
# Define final output paths. The 'results' directory is a symlink in the work dir.
|
|
81
|
+
results_dir = f"{context["targets"]}/{normalize_target_name(context["target"])}"
|
|
82
|
+
os.makedirs(results_dir, exist_ok=True)
|
|
83
|
+
|
|
84
|
+
ha_final_path = f"{results_dir}/stacked_Ha.fits"
|
|
85
|
+
oiii_final_path = f"{results_dir}/stacked_OIII.fits"
|
|
86
|
+
|
|
87
|
+
# Check if final files already exist to allow resuming
|
|
88
|
+
if all(os.path.exists(f) for f in [ha_final_path, oiii_final_path]):
|
|
89
|
+
logger.info("Renormalized files already exist, skipping.")
|
|
90
|
+
return
|
|
91
|
+
|
|
92
|
+
# Basenames for registered files (output of 'register' command)
|
|
93
|
+
r_ha = f"r_{ha_base}"
|
|
94
|
+
r_oiii = f"r_{oiii_base}"
|
|
95
|
+
|
|
96
|
+
# Pixel math formula for renormalization.
|
|
97
|
+
# It matches the median and spread (MAD) of a channel to a reference channel (Ha).
|
|
98
|
+
# Formula: new = old * (MAD(ref)/MAD(old)) - (MAD(ref)/MAD(old)) * MEDIAN(old) + MEDIAN(ref)
|
|
99
|
+
pm_oiii = f'"${r_oiii}$*mad(${r_ha}$)/mad(${r_oiii}$)-mad(${r_ha}$)/mad(${r_oiii}$)*median(${r_oiii}$)+median(${r_ha}$)"'
|
|
100
|
+
|
|
101
|
+
# Siril commands to be executed in the 'process' directory
|
|
102
|
+
commands = f"""
|
|
103
|
+
# -transf=shift fails sometimes, which I guess is possible because we have multiple sessions with possible different camera rotation
|
|
104
|
+
# -interp=none also fails sometimes, so let default interp happen
|
|
105
|
+
register results
|
|
106
|
+
pm {pm_oiii}
|
|
107
|
+
update_key FILTER Oiii "OSC dual Duo filter extracted"
|
|
108
|
+
save "{oiii_final_path}"
|
|
109
|
+
load {r_ha}
|
|
110
|
+
update_key FILTER Ha "OSC dual Duo filter extracted"
|
|
111
|
+
save "{ha_final_path}"
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
if os.path.exists(f"{results_dir}/{sii_base}.fit"):
|
|
115
|
+
logger.info(f"Doing renormalisation of extra Sii channel")
|
|
116
|
+
|
|
117
|
+
sii_final_path = f"{results_dir}/stacked_Sii.fits"
|
|
118
|
+
r_sii = f"r_{sii_base}"
|
|
119
|
+
pm_sii = f'"${r_sii}$*mad(${r_ha}$)/mad(${r_sii}$)-mad(${r_ha}$)/mad(${r_sii}$)*median(${r_sii}$)+median(${r_ha}$)"'
|
|
120
|
+
commands += f"""
|
|
121
|
+
pm {pm_sii}
|
|
122
|
+
update_key FILTER Sii "OSC dual Duo filter extracted"
|
|
123
|
+
save "{sii_final_path}"
|
|
124
|
+
"""
|
|
125
|
+
|
|
126
|
+
siril.run(context["process_dir"], commands, context=context)
|
|
127
|
+
logger.info(f"Saved final renormalized images to {results_dir}")
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def osc_dual_duo_post_session():
|
|
131
|
+
logger.info("Running osc_dual_duo_post_session python script")
|
|
132
|
+
logger.info("Using context: %s", context)
|
|
133
|
+
|
|
134
|
+
# red output channel - from the SiiOiii filter Sii is on the 672nm red channel (mistakenly called Ha by siril)
|
|
135
|
+
make_stacked("SiiOiii", "Ha", f"results_00001")
|
|
136
|
+
|
|
137
|
+
# green output channel - from the HaOiii filter Ha is on the 656nm red channel
|
|
138
|
+
make_stacked("HaOiii", "Ha", f"results_00001")
|
|
139
|
+
|
|
140
|
+
# blue output channel - both filters have Oiii on the 500nm blue channel. Note the case here is uppercase to match siril output
|
|
141
|
+
make_stacked("*", "OIII", f"results_00002")
|
|
142
|
+
|
|
143
|
+
# There might be an old/state autogenerated .seq file, delete it so it doesn't confuse renormalize
|
|
144
|
+
results_seq_path = f"{context["process_dir"]}/results_.seq"
|
|
145
|
+
if os.path.exists(results_seq_path):
|
|
146
|
+
os.remove(results_seq_path)
|
|
147
|
+
|
|
148
|
+
make_renormalize()
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
osc_dual_duo_post_session()
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
|
|
2
|
+
[repo]
|
|
3
|
+
kind = "recipe"
|
|
4
|
+
|
|
5
|
+
# all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
|
|
6
|
+
# to the user and the file will be ignored for future processing.
|
|
7
|
+
[recipe.require.version]
|
|
8
|
+
min="0.1.0"
|
|
9
|
+
max="4.5.8"
|
|
10
|
+
|
|
11
|
+
[recipe]
|
|
12
|
+
author.name = "Kevin Hester"
|
|
13
|
+
author.email = "kevinh@geeksville.com"
|
|
14
|
+
|
|
15
|
+
[[stage]]
|
|
16
|
+
|
|
17
|
+
description = "Extract OSC dual duo filter Ha, Oiii and Sii channels"
|
|
18
|
+
disabled = true # FIXME, debugging later stuff
|
|
19
|
+
|
|
20
|
+
# FIXME-somehow-specify-what-filternames are used to auto detect this recipe can be used?
|
|
21
|
+
# figure out how to support dual duo vs single duo. Perhaps: the FIRST recipe that matches an auto rule
|
|
22
|
+
# is used for any auto-defected defaults. If an auto match is found it will be saved in the generated starter
|
|
23
|
+
# project.toml file.
|
|
24
|
+
|
|
25
|
+
# non OSC people use names like LRGB or SHO
|
|
26
|
+
|
|
27
|
+
# for dual duo if we see Sii assume they also have HaOiii
|
|
28
|
+
auto.for-filter = ["SiiOiii"]
|
|
29
|
+
# for single duo look for this
|
|
30
|
+
# auto.for-filter = ["HaOiii"]
|
|
31
|
+
auto.for-camera = ["OSC"]
|
|
32
|
+
|
|
33
|
+
tool = "siril"
|
|
34
|
+
when = "session-light" # run once per session-config
|
|
35
|
+
output = "FIXME"
|
|
36
|
+
|
|
37
|
+
# FIXME, bias and flat should have been added to context by two previous stages. But for now hardwire
|
|
38
|
+
# Note: they should not have filename extensions (see strip_extension in the old process.py)
|
|
39
|
+
context.bias = '/workspaces/starbash/images/masters/biases/2025-09-09_stacked.fits'
|
|
40
|
+
|
|
41
|
+
context.sessionid = "2025-09-16" # FIXME, generate this by looping over all sessions (from outside this file)
|
|
42
|
+
context.sessionconfig = "SiiOiii" # FIXME generate this by looping over all session configs
|
|
43
|
+
context.light_base = "light_s{sessionid}_c{sessionconfig}"
|
|
44
|
+
|
|
45
|
+
# FIXME until auto light finding is in
|
|
46
|
+
input.source = "path"
|
|
47
|
+
input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/LIGHT/*.fit*"
|
|
48
|
+
|
|
49
|
+
script = '''
|
|
50
|
+
# Create a sequence from the raw light frames, seq file goes to process_dir
|
|
51
|
+
link {light_base} -out={process_dir}
|
|
52
|
+
cd {process_dir}
|
|
53
|
+
|
|
54
|
+
# Calibrate the light frames using master bias and flat
|
|
55
|
+
calibrate {light_base} -bias={bias} -flat=flat -cfa -equalize_cfa
|
|
56
|
+
|
|
57
|
+
# Remove background gradient on a per-frame basis (generates bkg_pp_light.seq)
|
|
58
|
+
seqsubsky pp_{light_base} 1
|
|
59
|
+
|
|
60
|
+
# FIXME only do this step for duo filters (refactor to share common light processing function)
|
|
61
|
+
seqextract_HaOIII bkg_pp_{light_base} -resample=ha
|
|
62
|
+
'''
|
|
63
|
+
|
|
64
|
+
temporaries = ["FIXME"]
|
|
65
|
+
|
|
66
|
+
[[stage]]
|
|
67
|
+
|
|
68
|
+
# FIXME, eventually we could make it optional to even have a starbash.toml. If we find an
|
|
69
|
+
# starbash.py we could introspect it for a starbash_config dict. And look inside that for description
|
|
70
|
+
# "stage", "when" or whatever?
|
|
71
|
+
|
|
72
|
+
description = "Stack OSC dual duo filter data, with separate Ha, Oiii and Sii channels"
|
|
73
|
+
|
|
74
|
+
context.target = "M 27" # FIXME
|
|
75
|
+
context.targets = "/workspaces/starbash/images/processed" # FIXME, do something smarter
|
|
76
|
+
|
|
77
|
+
tool = "python"
|
|
78
|
+
when = "session-stack" # run once after all session/session-config processing was done
|
|
79
|
+
|
|
80
|
+
# if not specified starbash.py used
|
|
81
|
+
# script-file = "script.py"
|
|
82
|
+
|
|
83
|
+
# or inline python code instead of that function?
|
|
84
|
+
#script = '''
|
|
85
|
+
# make_stacked("SiiOiii", "Ha", f"results_00001")
|
|
86
|
+
# ...
|
|
87
|
+
# (moved to script.py)
|
|
88
|
+
# '''
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
|
|
2
|
+
[repo]
|
|
3
|
+
kind = "recipe"
|
|
4
|
+
|
|
5
|
+
# all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
|
|
6
|
+
# to the user and the file will be ignored for future processing.
|
|
7
|
+
[recipe.require.version]
|
|
8
|
+
min="0.1.0"
|
|
9
|
+
max="4.5.8"
|
|
10
|
+
|
|
11
|
+
[recipe]
|
|
12
|
+
author.name = "Kevin Hester"
|
|
13
|
+
author.email = "kevinh@geeksville.com"
|
|
14
|
+
|
|
15
|
+
[[stage]]
|
|
16
|
+
|
|
17
|
+
description = "Process OSC single duo filter data, extracting Ha and Oiii"
|
|
18
|
+
|
|
19
|
+
disabled = true # FIXME, we don't yet have auto selection based on filter types
|
|
20
|
+
|
|
21
|
+
# Restrict processing of this stage to only if detected hardware was found for this session
|
|
22
|
+
# for single duo look for this
|
|
23
|
+
auto.for-filter = ["HaOiii"]
|
|
24
|
+
auto.for-camera = ["OSC"]
|
|
25
|
+
|
|
26
|
+
tool = "siril"
|
|
27
|
+
when = "session-light" # run once per session-config
|
|
28
|
+
output = "FIXME"
|
|
29
|
+
|
|
30
|
+
script = '''
|
|
31
|
+
# Create a sequence from the raw light frames, seq file goes to process_dir
|
|
32
|
+
link {light_base} -out={process_dir}
|
|
33
|
+
cd {process_dir}
|
|
34
|
+
|
|
35
|
+
# Calibrate the light frames using master bias and flat
|
|
36
|
+
calibrate {light_base} -bias={bias} -flat={flat} -cfa -equalize_cfa
|
|
37
|
+
|
|
38
|
+
# Remove background gradient on a per-frame basis (generates bkg_pp_{light_base}.seq)
|
|
39
|
+
seqsubsky pp_{light_base} 1
|
|
40
|
+
'''
|
|
41
|
+
|
|
42
|
+
temporaries = ["FIXME"]
|
|
43
|
+
|
|
44
|
+
[[stage]]
|
|
45
|
+
|
|
46
|
+
disabled = true # FIXME, we don't yet have auto selection based on filter types
|
|
47
|
+
|
|
48
|
+
tool = "python"
|
|
49
|
+
when = "session-stack" # run once after all session/session-config processing was done
|
|
50
|
+
|
|
51
|
+
script-file = "script.py"
|
|
52
|
+
|
|
53
|
+
# or inline python code instead of that function?
|
|
54
|
+
script = '''
|
|
55
|
+
# green output channel - from the HaOiii filter Ha is on the 656nm red channel
|
|
56
|
+
make_stacked("HaOiii", "Ha", f"results_00001")
|
|
57
|
+
|
|
58
|
+
# blue output channel - both filters have Oiii on the 500nm blue channel. Note the case here is uppercase to match siril output
|
|
59
|
+
make_stacked("*", "OIII", f"results_00002")
|
|
60
|
+
|
|
61
|
+
# There might be an old/state autogenerated .seq file, delete it so it doesn't confuse renormalize
|
|
62
|
+
results_seq_path = f"{process_dir}/results_.seq"
|
|
63
|
+
if os.path.exists(results_seq_path):
|
|
64
|
+
os.remove(results_seq_path)
|
|
65
|
+
|
|
66
|
+
make_renormalize()
|
|
67
|
+
'''
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
[repo]
|
|
2
|
+
kind = "repo"
|
|
3
|
+
|
|
4
|
+
# recipes for generating master flats, bias, darks...
|
|
5
|
+
[[repo-ref]]
|
|
6
|
+
dir = "master_bias"
|
|
7
|
+
[[repo-ref]]
|
|
8
|
+
dir = "master_flat"
|
|
9
|
+
|
|
10
|
+
# Note: For automated recipe finding, it is important to list more demanding recipes first. For instance:
|
|
11
|
+
# osc_dual_duo (which needs a HaOiii and a SiiOiii filter) should be listed first, so that if we see
|
|
12
|
+
# dataset containing both HaOiii and SiiOiii filters we will assume that is the recipe to use.
|
|
13
|
+
#
|
|
14
|
+
# But later if we see a dataset containing only HaOiii data, assume osc_single_duo should be used.
|
|
15
|
+
|
|
16
|
+
[[repo-ref]]
|
|
17
|
+
dir = "osc_dual_duo"
|
|
18
|
+
|
|
19
|
+
[[repo-ref]]
|
|
20
|
+
dir = "osc_single_duo"
|
|
21
|
+
|
|
22
|
+
# processing stages, currently all declared here, but possibly in the future they could be added by user/other toml files
|
|
23
|
+
|
|
24
|
+
[[stages]]
|
|
25
|
+
name = "session-config" # for flat processing, master generation etc
|
|
26
|
+
priority = 10
|
|
27
|
+
|
|
28
|
+
[[stages]]
|
|
29
|
+
name = "session-light" # generate light frames from lights and with reference to flats/bias
|
|
30
|
+
priority = 20
|
|
31
|
+
|
|
32
|
+
[[stages]]
|
|
33
|
+
name = "session-stack" # stack frames
|
|
34
|
+
priority = 30
|
starbash/repo/manager.py
CHANGED
|
@@ -6,6 +6,7 @@ from __future__ import annotations
|
|
|
6
6
|
import logging
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
from importlib import resources
|
|
9
|
+
from typing import Any
|
|
9
10
|
|
|
10
11
|
import tomlkit
|
|
11
12
|
from tomlkit.toml_file import TOMLFile
|
|
@@ -42,29 +43,44 @@ class Repo:
|
|
|
42
43
|
|
|
43
44
|
__repr__ = __str__
|
|
44
45
|
|
|
45
|
-
|
|
46
|
-
def kind(self) -> str:
|
|
46
|
+
def kind(self, unknown_kind: str = "unknown") -> str:
|
|
47
47
|
"""
|
|
48
48
|
Read-only attribute for the repository kind (e.g., "recipe", "data", etc.).
|
|
49
49
|
|
|
50
50
|
Returns:
|
|
51
51
|
The kind of the repository as a string.
|
|
52
52
|
"""
|
|
53
|
-
|
|
53
|
+
c = self.get("repo.kind", unknown_kind)
|
|
54
|
+
return str(c)
|
|
55
|
+
|
|
56
|
+
def add_repo_ref(self, dir: str) -> Repo | None:
|
|
57
|
+
"""
|
|
58
|
+
Adds a new repo-ref to this repository's configuration.
|
|
59
|
+
if new returns the newly added Repo object, if already exists returns None"""
|
|
54
60
|
|
|
55
|
-
|
|
61
|
+
# if dir is not absolute, we need to resolve it relative to the cwd
|
|
62
|
+
if not Path(dir).is_absolute():
|
|
63
|
+
dir = str((Path.cwd() / dir).resolve())
|
|
64
|
+
|
|
65
|
+
# Add the ref to this repo
|
|
56
66
|
aot = self.config.get(REPO_REF, None)
|
|
57
67
|
if aot is None:
|
|
58
68
|
aot = tomlkit.aot()
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
69
|
+
self.config[REPO_REF] = aot # add an empty AoT at the end of the file
|
|
70
|
+
|
|
71
|
+
if type(aot) is not AoT:
|
|
72
|
+
raise ValueError(f"repo-ref in {self.url} is not an array")
|
|
73
|
+
|
|
74
|
+
for t in aot:
|
|
75
|
+
if "dir" in t and t["dir"] == dir:
|
|
76
|
+
logging.warning(f"Repo ref {dir} already exists - ignoring.")
|
|
77
|
+
return None # already exists
|
|
63
78
|
|
|
64
79
|
ref = {"dir": dir}
|
|
65
80
|
aot.append(ref)
|
|
66
|
-
|
|
67
|
-
|
|
81
|
+
|
|
82
|
+
# Also add the repo to the manager
|
|
83
|
+
return self.add_from_ref(ref)
|
|
68
84
|
|
|
69
85
|
def write_config(self) -> None:
|
|
70
86
|
"""
|
|
@@ -107,25 +123,31 @@ class Repo:
|
|
|
107
123
|
|
|
108
124
|
return None
|
|
109
125
|
|
|
110
|
-
def add_from_ref(self, ref: dict) ->
|
|
126
|
+
def add_from_ref(self, ref: dict) -> Repo:
|
|
111
127
|
"""
|
|
112
128
|
Adds a repository based on a repo-ref dictionary.
|
|
113
129
|
"""
|
|
114
130
|
if "url" in ref:
|
|
115
131
|
url = ref["url"]
|
|
116
132
|
elif "dir" in ref:
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
133
|
+
# FIXME don't allow ~ or .. in file paths for security reasons?
|
|
134
|
+
if self.is_scheme("file"):
|
|
135
|
+
path = Path(ref["dir"])
|
|
136
|
+
base_path = self.get_path()
|
|
137
|
+
|
|
138
|
+
if base_path and not path.is_absolute():
|
|
139
|
+
# Resolve relative to the current TOML file's directory
|
|
140
|
+
path = (base_path / path).resolve()
|
|
141
|
+
else:
|
|
142
|
+
# Expand ~ and resolve from CWD
|
|
143
|
+
path = path.expanduser().resolve()
|
|
144
|
+
url = f"file://{path}"
|
|
122
145
|
else:
|
|
123
|
-
#
|
|
124
|
-
|
|
125
|
-
url = f"file://{path}"
|
|
146
|
+
# construct an URL relative to this repo's URL
|
|
147
|
+
url = self.url.rstrip("/") + "/" + ref["dir"].lstrip("/")
|
|
126
148
|
else:
|
|
127
149
|
raise ValueError(f"Invalid repo reference: {ref}")
|
|
128
|
-
self.manager.add_repo(url)
|
|
150
|
+
return self.manager.add_repo(url)
|
|
129
151
|
|
|
130
152
|
def add_by_repo_refs(self) -> None:
|
|
131
153
|
"""Add all repos mentioned by repo-refs in this repo's config."""
|
|
@@ -203,7 +225,7 @@ class Repo:
|
|
|
203
225
|
) # we currently make it optional to have the config file at root
|
|
204
226
|
return tomlkit.TOMLDocument() # empty placeholder
|
|
205
227
|
|
|
206
|
-
def get(self, key: str, default=None):
|
|
228
|
+
def get(self, key: str, default: Any | None = None) -> Any | None:
|
|
207
229
|
"""
|
|
208
230
|
Gets a value from this repo's config for a given key.
|
|
209
231
|
The key can be a dot-separated string for nested values.
|
|
@@ -222,6 +244,36 @@ class Repo:
|
|
|
222
244
|
value = value.get(k)
|
|
223
245
|
return value if value is not None else default
|
|
224
246
|
|
|
247
|
+
def set(self, key: str, value: Any) -> None:
|
|
248
|
+
"""
|
|
249
|
+
Sets a value in this repo's config for a given key.
|
|
250
|
+
The key can be a dot-separated string for nested values.
|
|
251
|
+
Creates nested Table structures as needed.
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
key: The dot-separated key to set (e.g., "repo.kind").
|
|
255
|
+
value: The value to set.
|
|
256
|
+
|
|
257
|
+
Example:
|
|
258
|
+
repo.set("repo.kind", "preferences")
|
|
259
|
+
repo.set("user.name", "John Doe")
|
|
260
|
+
"""
|
|
261
|
+
keys = key.split(".")
|
|
262
|
+
current: Any = self.config
|
|
263
|
+
|
|
264
|
+
# Navigate/create nested structure for all keys except the last
|
|
265
|
+
for k in keys[:-1]:
|
|
266
|
+
if k not in current:
|
|
267
|
+
# Create a new nested table
|
|
268
|
+
current[k] = tomlkit.table()
|
|
269
|
+
elif not isinstance(current[k], dict):
|
|
270
|
+
# Overwrite non-dict value with a table
|
|
271
|
+
current[k] = tomlkit.table()
|
|
272
|
+
current = current[k]
|
|
273
|
+
|
|
274
|
+
# Set the final value
|
|
275
|
+
current[keys[-1]] = value
|
|
276
|
+
|
|
225
277
|
|
|
226
278
|
class RepoManager:
|
|
227
279
|
"""
|
|
@@ -245,6 +297,15 @@ class RepoManager:
|
|
|
245
297
|
# Most users will just want to read from merged
|
|
246
298
|
self.merged = MultiDict()
|
|
247
299
|
|
|
300
|
+
@property
|
|
301
|
+
def regular_repos(self) -> list[Repo]:
|
|
302
|
+
"We exclude certain repo types (preferences, recipe) from the list of repos users care about."
|
|
303
|
+
return [
|
|
304
|
+
r
|
|
305
|
+
for r in self.repos
|
|
306
|
+
if r.kind() not in ("preferences") and not r.is_scheme("pkg")
|
|
307
|
+
]
|
|
308
|
+
|
|
248
309
|
def add_repo(self, url: str) -> Repo:
|
|
249
310
|
logging.debug(f"Adding repo: {url}")
|
|
250
311
|
r = Repo(self, url)
|
starbash/selection.py
CHANGED
|
@@ -17,6 +17,7 @@ class Selection:
|
|
|
17
17
|
- Date ranges
|
|
18
18
|
- Filters
|
|
19
19
|
- Image types
|
|
20
|
+
- Telescope names
|
|
20
21
|
|
|
21
22
|
The selection state is saved to disk and can be used to build database queries.
|
|
22
23
|
"""
|
|
@@ -33,6 +34,7 @@ class Selection:
|
|
|
33
34
|
self.date_end: Optional[str] = None
|
|
34
35
|
self.filters: list[str] = []
|
|
35
36
|
self.image_types: list[str] = []
|
|
37
|
+
self.telescopes: list[str] = []
|
|
36
38
|
|
|
37
39
|
# Load existing state if it exists
|
|
38
40
|
self._load()
|
|
@@ -48,6 +50,7 @@ class Selection:
|
|
|
48
50
|
self.date_end = data.get("date_end")
|
|
49
51
|
self.filters = data.get("filters", [])
|
|
50
52
|
self.image_types = data.get("image_types", [])
|
|
53
|
+
self.telescopes = data.get("telescopes", [])
|
|
51
54
|
logging.debug(f"Loaded selection state from {self.state_file}")
|
|
52
55
|
except Exception as e:
|
|
53
56
|
logging.warning(f"Failed to load selection state: {e}")
|
|
@@ -64,6 +67,7 @@ class Selection:
|
|
|
64
67
|
"date_end": self.date_end,
|
|
65
68
|
"filters": self.filters,
|
|
66
69
|
"image_types": self.image_types,
|
|
70
|
+
"telescopes": self.telescopes,
|
|
67
71
|
}
|
|
68
72
|
|
|
69
73
|
with open(self.state_file, "w") as f:
|
|
@@ -79,6 +83,7 @@ class Selection:
|
|
|
79
83
|
self.date_end = None
|
|
80
84
|
self.filters = []
|
|
81
85
|
self.image_types = []
|
|
86
|
+
self.telescopes = []
|
|
82
87
|
self._save()
|
|
83
88
|
|
|
84
89
|
def add_target(self, target: str) -> None:
|
|
@@ -101,6 +106,26 @@ class Selection:
|
|
|
101
106
|
self.targets.remove(target)
|
|
102
107
|
self._save()
|
|
103
108
|
|
|
109
|
+
def add_telescope(self, telescope: str) -> None:
|
|
110
|
+
"""Add a telescope to the selection.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
telescope: Telescope name to add to the selection
|
|
114
|
+
"""
|
|
115
|
+
if telescope not in self.telescopes:
|
|
116
|
+
self.telescopes.append(telescope)
|
|
117
|
+
self._save()
|
|
118
|
+
|
|
119
|
+
def remove_telescope(self, telescope: str) -> None:
|
|
120
|
+
"""Remove a telescope from the selection.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
telescope: Telescope name to remove from the selection
|
|
124
|
+
"""
|
|
125
|
+
if telescope in self.telescopes:
|
|
126
|
+
self.telescopes.remove(telescope)
|
|
127
|
+
self._save()
|
|
128
|
+
|
|
104
129
|
def set_date_range(
|
|
105
130
|
self, start: Optional[str] = None, end: Optional[str] = None
|
|
106
131
|
) -> None:
|
|
@@ -146,6 +171,7 @@ class Selection:
|
|
|
146
171
|
and self.date_end is None
|
|
147
172
|
and not self.filters
|
|
148
173
|
and not self.image_types
|
|
174
|
+
and not self.telescopes
|
|
149
175
|
)
|
|
150
176
|
|
|
151
177
|
def get_query_conditions(self) -> dict[str, Any]:
|
|
@@ -173,6 +199,13 @@ class Selection:
|
|
|
173
199
|
# TODO: Support multiple filters in queries
|
|
174
200
|
conditions["FILTER"] = self.filters[0] if len(self.filters) == 1 else None
|
|
175
201
|
|
|
202
|
+
if self.telescopes:
|
|
203
|
+
# For now, just use the first telescope
|
|
204
|
+
# TODO: Support multiple telescopes in queries
|
|
205
|
+
conditions["TELESCOP"] = (
|
|
206
|
+
self.telescopes[0] if len(self.telescopes) == 1 else None
|
|
207
|
+
)
|
|
208
|
+
|
|
176
209
|
# Add date range conditions
|
|
177
210
|
if self.date_start:
|
|
178
211
|
conditions["date_start"] = self.date_start
|
|
@@ -198,6 +231,9 @@ class Selection:
|
|
|
198
231
|
if self.targets:
|
|
199
232
|
summary["criteria"].append(f"Targets: {', '.join(self.targets)}")
|
|
200
233
|
|
|
234
|
+
if self.telescopes:
|
|
235
|
+
summary["criteria"].append(f"Telescopes: {', '.join(self.telescopes)}")
|
|
236
|
+
|
|
201
237
|
if self.date_start or self.date_end:
|
|
202
238
|
date_range = []
|
|
203
239
|
if self.date_start:
|