starbash 0.1.3__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of starbash might be problematic. Click here for more details.
- starbash/__init__.py +5 -0
- starbash/analytics.py +21 -6
- starbash/app.py +63 -9
- starbash/commands/repo.py +19 -12
- starbash/commands/select.py +326 -0
- starbash/commands/user.py +91 -6
- starbash/database.py +11 -0
- starbash/defaults/starbash.toml +2 -2
- starbash/main.py +25 -133
- starbash/recipes/README.md +3 -0
- starbash/recipes/__init__.py +0 -0
- starbash/recipes/master_bias/starbash.toml +55 -0
- starbash/recipes/master_flat/starbash.toml +46 -0
- starbash/recipes/osc_dual_duo/starbash.py +151 -0
- starbash/recipes/osc_dual_duo/starbash.toml +88 -0
- starbash/recipes/osc_single_duo/starbash.toml +67 -0
- starbash/recipes/starbash.toml +34 -0
- starbash/repo/manager.py +78 -22
- starbash/templates/userconfig.toml +1 -1
- {starbash-0.1.3.dist-info → starbash-0.1.4.dist-info}/METADATA +33 -23
- starbash-0.1.4.dist-info/RECORD +32 -0
- starbash/commands/selection.py +0 -137
- starbash-0.1.3.dist-info/RECORD +0 -24
- {starbash-0.1.3.dist-info → starbash-0.1.4.dist-info}/WHEEL +0 -0
- {starbash-0.1.3.dist-info → starbash-0.1.4.dist-info}/entry_points.txt +0 -0
- {starbash-0.1.3.dist-info → starbash-0.1.4.dist-info}/licenses/LICENSE +0 -0
starbash/commands/user.py
CHANGED
|
@@ -3,6 +3,7 @@ from typing_extensions import Annotated
|
|
|
3
3
|
|
|
4
4
|
from starbash.app import Starbash
|
|
5
5
|
from starbash import console
|
|
6
|
+
from rich.panel import Panel
|
|
6
7
|
|
|
7
8
|
app = typer.Typer()
|
|
8
9
|
|
|
@@ -19,9 +20,9 @@ def analytics(
|
|
|
19
20
|
"""
|
|
20
21
|
Enable or disable analytics (crash reports and usage data).
|
|
21
22
|
"""
|
|
22
|
-
with Starbash("analytics
|
|
23
|
+
with Starbash("analytics.change") as sb:
|
|
23
24
|
sb.analytics.set_data("analytics.enabled", enable)
|
|
24
|
-
sb.user_repo.
|
|
25
|
+
sb.user_repo.set("analytics.enabled", enable)
|
|
25
26
|
sb.user_repo.write_config()
|
|
26
27
|
status = "enabled" if enable else "disabled"
|
|
27
28
|
console.print(f"Analytics (crash reports) {status}.")
|
|
@@ -39,8 +40,8 @@ def name(
|
|
|
39
40
|
"""
|
|
40
41
|
Set your name for attribution in generated images.
|
|
41
42
|
"""
|
|
42
|
-
with Starbash("user
|
|
43
|
-
sb.user_repo.
|
|
43
|
+
with Starbash("user.name") as sb:
|
|
44
|
+
sb.user_repo.set("user.name", user_name)
|
|
44
45
|
sb.user_repo.write_config()
|
|
45
46
|
console.print(f"User name set to: {user_name}")
|
|
46
47
|
|
|
@@ -57,7 +58,91 @@ def email(
|
|
|
57
58
|
"""
|
|
58
59
|
Set your email for attribution in generated images.
|
|
59
60
|
"""
|
|
60
|
-
with Starbash("user
|
|
61
|
-
sb.user_repo.
|
|
61
|
+
with Starbash("user.email") as sb:
|
|
62
|
+
sb.user_repo.set("user.email", user_email)
|
|
62
63
|
sb.user_repo.write_config()
|
|
63
64
|
console.print(f"User email set to: {user_email}")
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def do_reinit(sb: Starbash) -> None:
|
|
68
|
+
console.print()
|
|
69
|
+
console.print(
|
|
70
|
+
Panel.fit(
|
|
71
|
+
"[bold cyan]Starbash getting started...[/bold cyan]\n\n"
|
|
72
|
+
"Let's set up your preferences. You can skip any question by pressing Enter.",
|
|
73
|
+
border_style="cyan",
|
|
74
|
+
)
|
|
75
|
+
)
|
|
76
|
+
console.print()
|
|
77
|
+
|
|
78
|
+
# Ask for username
|
|
79
|
+
user_name = typer.prompt(
|
|
80
|
+
"Enter your name (for attribution in generated images)",
|
|
81
|
+
default="",
|
|
82
|
+
show_default=False,
|
|
83
|
+
)
|
|
84
|
+
sb.analytics.set_data("analytics.use_name", user_name != "")
|
|
85
|
+
if user_name:
|
|
86
|
+
sb.user_repo.set("user.name", user_name)
|
|
87
|
+
console.print(f"✅ Name set to: {user_name}")
|
|
88
|
+
else:
|
|
89
|
+
console.print("[dim]Skipped name[/dim]")
|
|
90
|
+
|
|
91
|
+
# Ask for email
|
|
92
|
+
user_email = typer.prompt(
|
|
93
|
+
"Enter your email address (for attribution in generated images)",
|
|
94
|
+
default="",
|
|
95
|
+
show_default=False,
|
|
96
|
+
)
|
|
97
|
+
sb.analytics.set_data("analytics.use_email", user_email != "")
|
|
98
|
+
if user_email:
|
|
99
|
+
sb.user_repo.set("user.email", user_email)
|
|
100
|
+
console.print(f"✅ Email set to: {user_email}")
|
|
101
|
+
else:
|
|
102
|
+
console.print("[dim]Skipped email[/dim]")
|
|
103
|
+
|
|
104
|
+
# Ask about including email in crash reports
|
|
105
|
+
include_in_reports = typer.confirm(
|
|
106
|
+
"Would you like to include your email address with crash reports/analytics? "
|
|
107
|
+
"(This helps us follow up if we need more information about issues.)",
|
|
108
|
+
default=False,
|
|
109
|
+
)
|
|
110
|
+
sb.analytics.set_data("analytics.use_email_report", include_in_reports)
|
|
111
|
+
sb.user_repo.set("analytics.include_user", include_in_reports)
|
|
112
|
+
if include_in_reports:
|
|
113
|
+
console.print("✅ Email will be included with crash reports")
|
|
114
|
+
else:
|
|
115
|
+
console.print("❌ Email will NOT be included with crash reports")
|
|
116
|
+
console.print()
|
|
117
|
+
|
|
118
|
+
# Save all changes
|
|
119
|
+
sb.user_repo.write_config()
|
|
120
|
+
|
|
121
|
+
console.print(
|
|
122
|
+
Panel.fit(
|
|
123
|
+
"[bold green]Configuration complete![/bold green]\n\n"
|
|
124
|
+
"Your preferences have been saved.",
|
|
125
|
+
border_style="green",
|
|
126
|
+
)
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@app.command()
|
|
131
|
+
def reinit():
|
|
132
|
+
"""
|
|
133
|
+
Configure starbash via a brief guided process.
|
|
134
|
+
|
|
135
|
+
This will ask you for your name, email, and analytics preferences.
|
|
136
|
+
You can skip any question by pressing Enter.
|
|
137
|
+
"""
|
|
138
|
+
with Starbash("user.reinit") as sb:
|
|
139
|
+
do_reinit(sb)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@app.callback(invoke_without_command=True)
|
|
143
|
+
def main_callback(ctx: typer.Context):
|
|
144
|
+
"""Main callback for the Starbash application."""
|
|
145
|
+
if ctx.invoked_subcommand is None:
|
|
146
|
+
# No command provided, show help
|
|
147
|
+
console.print(ctx.get_help())
|
|
148
|
+
raise typer.Exit()
|
starbash/database.py
CHANGED
|
@@ -293,6 +293,17 @@ class Database:
|
|
|
293
293
|
result = cursor.fetchone()
|
|
294
294
|
return result[0] if result else 0
|
|
295
295
|
|
|
296
|
+
def get_column(self, column_name: str, table_name: str = "sessions") -> list[Any]:
|
|
297
|
+
"""Return all values from a specific column in the specified table."""
|
|
298
|
+
cursor = self._db.cursor()
|
|
299
|
+
cursor.execute(f"SELECT {column_name} FROM {table_name}")
|
|
300
|
+
|
|
301
|
+
results = []
|
|
302
|
+
for row in cursor.fetchall():
|
|
303
|
+
results.append(row[column_name])
|
|
304
|
+
|
|
305
|
+
return results
|
|
306
|
+
|
|
296
307
|
def get_image(self, path: str) -> dict[str, Any] | None:
|
|
297
308
|
"""Get an image record by path."""
|
|
298
309
|
cursor = self._db.cursor()
|
starbash/defaults/starbash.toml
CHANGED
|
@@ -18,7 +18,7 @@ kind = "preferences"
|
|
|
18
18
|
|
|
19
19
|
# Add our built-in recipes (FIXME, add a "resource" repo type for directories we expect to find inside
|
|
20
20
|
# our python blob)
|
|
21
|
-
|
|
21
|
+
url = "pkg://recipes"
|
|
22
22
|
|
|
23
23
|
# [[repo-ref]]
|
|
24
24
|
|
|
@@ -39,7 +39,7 @@ dir = "/workspaces/starbash/doc/toml/example/recipe-repo"
|
|
|
39
39
|
# or inband?
|
|
40
40
|
|
|
41
41
|
|
|
42
|
-
# allow including multiple
|
|
42
|
+
# allow including multiple recipes FIXME old idea, not sure if needed.
|
|
43
43
|
# [[repo-ref]]
|
|
44
44
|
|
|
45
45
|
# looks for a file with this name and .py for the code and .toml for the config
|
starbash/main.py
CHANGED
|
@@ -1,14 +1,12 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
from datetime import datetime
|
|
3
|
-
from tomlkit import table
|
|
4
2
|
import typer
|
|
5
|
-
from
|
|
3
|
+
from typing_extensions import Annotated
|
|
6
4
|
|
|
7
|
-
from starbash.database import Database
|
|
8
5
|
import starbash.url as url
|
|
6
|
+
import starbash
|
|
9
7
|
|
|
10
|
-
from .app import Starbash
|
|
11
|
-
from .commands import repo,
|
|
8
|
+
from .app import Starbash, get_user_config_path, setup_logging
|
|
9
|
+
from .commands import repo, select, user
|
|
12
10
|
from . import console
|
|
13
11
|
|
|
14
12
|
app = typer.Typer(
|
|
@@ -17,140 +15,34 @@ app = typer.Typer(
|
|
|
17
15
|
)
|
|
18
16
|
app.add_typer(user.app, name="user", help="Manage user settings.")
|
|
19
17
|
app.add_typer(repo.app, name="repo", help="Manage Starbash repositories.")
|
|
20
|
-
app.add_typer(
|
|
21
|
-
selection.app, name="selection", help="Manage session and target selection."
|
|
22
|
-
)
|
|
18
|
+
app.add_typer(select.app, name="select", help="Manage session and target selection.")
|
|
23
19
|
|
|
24
20
|
|
|
25
21
|
@app.callback(invoke_without_command=True)
|
|
26
|
-
def main_callback(
|
|
22
|
+
def main_callback(
|
|
23
|
+
ctx: typer.Context,
|
|
24
|
+
debug: Annotated[
|
|
25
|
+
bool,
|
|
26
|
+
typer.Option(
|
|
27
|
+
"--debug",
|
|
28
|
+
help="Enable debug logging output.",
|
|
29
|
+
),
|
|
30
|
+
] = False,
|
|
31
|
+
):
|
|
27
32
|
"""Main callback for the Starbash application."""
|
|
33
|
+
# Set the log level based on --debug flag
|
|
34
|
+
if debug:
|
|
35
|
+
starbash.log_filter_level = logging.DEBUG
|
|
36
|
+
|
|
28
37
|
if ctx.invoked_subcommand is None:
|
|
29
|
-
|
|
30
|
-
|
|
38
|
+
if not get_user_config_path().exists():
|
|
39
|
+
with Starbash("app.first") as sb:
|
|
40
|
+
user.do_reinit(sb)
|
|
41
|
+
else:
|
|
42
|
+
# No command provided, show help
|
|
43
|
+
console.print(ctx.get_help())
|
|
31
44
|
raise typer.Exit()
|
|
32
45
|
|
|
33
46
|
|
|
34
|
-
def format_duration(seconds: int):
|
|
35
|
-
"""Format seconds as a human-readable duration string."""
|
|
36
|
-
if seconds < 60:
|
|
37
|
-
return f"{int(seconds)}s"
|
|
38
|
-
elif seconds < 120:
|
|
39
|
-
minutes = int(seconds // 60)
|
|
40
|
-
secs = int(seconds % 60)
|
|
41
|
-
return f"{minutes}m {secs}s" if secs else f"{minutes}m"
|
|
42
|
-
else:
|
|
43
|
-
hours = int(seconds // 3600)
|
|
44
|
-
minutes = int((seconds % 3600) // 60)
|
|
45
|
-
return f"{hours}h {minutes}m" if minutes else f"{hours}h"
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
@app.command()
|
|
49
|
-
def session():
|
|
50
|
-
"""List sessions (filtered based on the current selection)"""
|
|
51
|
-
|
|
52
|
-
with Starbash("session") as sb:
|
|
53
|
-
sessions = sb.search_session()
|
|
54
|
-
if sessions and isinstance(sessions, list):
|
|
55
|
-
len_all = sb.db.len_session()
|
|
56
|
-
table = Table(title=f"Sessions ({len(sessions)} selected out of {len_all})")
|
|
57
|
-
|
|
58
|
-
table.add_column("Date", style="cyan", no_wrap=True)
|
|
59
|
-
table.add_column("# images", style="cyan", no_wrap=True)
|
|
60
|
-
table.add_column("Time", style="cyan", no_wrap=True)
|
|
61
|
-
table.add_column("Type/Filter", style="cyan", no_wrap=True)
|
|
62
|
-
table.add_column("Telescope", style="cyan", no_wrap=True)
|
|
63
|
-
table.add_column(
|
|
64
|
-
"About", style="cyan", no_wrap=True
|
|
65
|
-
) # type of frames, filter, target
|
|
66
|
-
# table.add_column("Released", justify="right", style="cyan", no_wrap=True)
|
|
67
|
-
|
|
68
|
-
total_images = 0
|
|
69
|
-
total_seconds = 0.0
|
|
70
|
-
|
|
71
|
-
for sess in sessions:
|
|
72
|
-
date_iso = sess.get(Database.START_KEY, "N/A")
|
|
73
|
-
# Try to cnvert ISO UTC datetime to local short date string
|
|
74
|
-
try:
|
|
75
|
-
dt_utc = datetime.fromisoformat(date_iso)
|
|
76
|
-
dt_local = dt_utc.astimezone()
|
|
77
|
-
date = dt_local.strftime("%Y-%m-%d")
|
|
78
|
-
except (ValueError, TypeError):
|
|
79
|
-
date = date_iso
|
|
80
|
-
|
|
81
|
-
object = str(sess.get(Database.OBJECT_KEY, "N/A"))
|
|
82
|
-
filter = sess.get(Database.FILTER_KEY, "N/A")
|
|
83
|
-
image_type = str(sess.get(Database.IMAGETYP_KEY, "N/A"))
|
|
84
|
-
telescop = str(sess.get(Database.TELESCOP_KEY, "N/A"))
|
|
85
|
-
|
|
86
|
-
# Format total exposure time as integer seconds
|
|
87
|
-
exptime_raw = str(sess.get(Database.EXPTIME_TOTAL_KEY, "N/A"))
|
|
88
|
-
try:
|
|
89
|
-
exptime_float = float(exptime_raw)
|
|
90
|
-
total_seconds += exptime_float
|
|
91
|
-
total_secs = format_duration(int(exptime_float))
|
|
92
|
-
except (ValueError, TypeError):
|
|
93
|
-
total_secs = exptime_raw
|
|
94
|
-
|
|
95
|
-
# Count images
|
|
96
|
-
try:
|
|
97
|
-
num_images = int(sess.get(Database.NUM_IMAGES_KEY, 0))
|
|
98
|
-
total_images += num_images
|
|
99
|
-
except (ValueError, TypeError):
|
|
100
|
-
num_images = sess.get(Database.NUM_IMAGES_KEY, "N/A")
|
|
101
|
-
|
|
102
|
-
type_str = image_type
|
|
103
|
-
if image_type.upper() == "LIGHT":
|
|
104
|
-
image_type = filter
|
|
105
|
-
elif image_type.upper() == "FLAT":
|
|
106
|
-
image_type = f"{image_type}/{filter}"
|
|
107
|
-
else: # either bias or dark
|
|
108
|
-
object = "" # Don't show meaningless target
|
|
109
|
-
|
|
110
|
-
table.add_row(
|
|
111
|
-
date,
|
|
112
|
-
str(num_images),
|
|
113
|
-
total_secs,
|
|
114
|
-
image_type,
|
|
115
|
-
telescop,
|
|
116
|
-
object,
|
|
117
|
-
)
|
|
118
|
-
|
|
119
|
-
# Add totals row
|
|
120
|
-
if sessions:
|
|
121
|
-
table.add_row(
|
|
122
|
-
"",
|
|
123
|
-
f"[bold]{total_images}[/bold]",
|
|
124
|
-
f"[bold]{format_duration(int(total_seconds))}[/bold]",
|
|
125
|
-
"",
|
|
126
|
-
"",
|
|
127
|
-
"",
|
|
128
|
-
)
|
|
129
|
-
|
|
130
|
-
console.print(table)
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
# @app.command(hidden=True)
|
|
134
|
-
# def default_cmd():
|
|
135
|
-
# """Default entry point for the starbash application."""
|
|
136
|
-
#
|
|
137
|
-
# with Starbash() as sb:
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
# @app.command(hidden=True)
|
|
141
|
-
# def default_cmd():
|
|
142
|
-
# """Default entry point for the starbash application."""
|
|
143
|
-
#
|
|
144
|
-
# with Starbash() as sb:
|
|
145
|
-
# pass
|
|
146
|
-
#
|
|
147
|
-
#
|
|
148
|
-
# @app.callback(invoke_without_command=True)
|
|
149
|
-
# def _default(ctx: typer.Context):
|
|
150
|
-
# # If the user didn’t specify a subcommand, run the default
|
|
151
|
-
# if ctx.invoked_subcommand is None:
|
|
152
|
-
# return default_cmd()
|
|
153
|
-
|
|
154
|
-
|
|
155
47
|
if __name__ == "__main__":
|
|
156
48
|
app()
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
This is what a typical directory of recipes would look like. it could be hosted locally in a directory tree, on github, whatever.
|
|
2
|
+
|
|
3
|
+
Currently it lives in the starbash python blob, but eventually the 'master' set of recipes will live in a different repo. In fact, different orgs could provide their own recipe repos.
|
|
File without changes
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
|
|
2
|
+
[repo]
|
|
3
|
+
kind = "recipe"
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
[recipe]
|
|
7
|
+
author.name = "FIXMESiril?"
|
|
8
|
+
author.email = "FIXMESiril?"
|
|
9
|
+
|
|
10
|
+
[[stage]]
|
|
11
|
+
|
|
12
|
+
description = "Generate master bias"
|
|
13
|
+
disabled = true # FIXME, debugging later stuff
|
|
14
|
+
|
|
15
|
+
# Restrict processing of this stage to only if detected hardware was found for this session
|
|
16
|
+
# For any camera
|
|
17
|
+
auto.for-camera = []
|
|
18
|
+
|
|
19
|
+
tool = "siril"
|
|
20
|
+
|
|
21
|
+
# or auto?
|
|
22
|
+
# find the most recent raw fits for the current instrument (as of the time of session start)
|
|
23
|
+
# input.source = "most-recent" # only look for the most recent set of raws for this particular type
|
|
24
|
+
input.type = "bias" # look in all raw repos, but look only for bias files
|
|
25
|
+
|
|
26
|
+
# for early development we have support for simple absolute file paths with globs
|
|
27
|
+
input.source = "path"
|
|
28
|
+
input.path = "/workspaces/starbash/images/from_astroboy/masters-raw/2025-09-09/BIAS/*.fit*"
|
|
29
|
+
input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
|
|
30
|
+
|
|
31
|
+
# make the following also work
|
|
32
|
+
#
|
|
33
|
+
#os.makedirs(os.path.dirname(output), exist_ok=True)
|
|
34
|
+
#os.makedirs(os.path.dirname(process_dir), exist_ok=True)
|
|
35
|
+
#frames = glob(f"{masters_raw}/{date}/BIAS/{date}_*.fit*")
|
|
36
|
+
#siril_run_in_temp_dir(frames, ...
|
|
37
|
+
when = "session-config" # run at the start of each session process
|
|
38
|
+
|
|
39
|
+
# The following constants are auto defined before running the tool
|
|
40
|
+
# context.process_dir (points to the session specific semi-persistent local dir for that sessions written/read data files)
|
|
41
|
+
# context.masters (FIXME) - need to find this name dynamically by looking for a suitable writable repo
|
|
42
|
+
# context.temp_dir (points to a temporary directory this tool can use for writing)
|
|
43
|
+
|
|
44
|
+
# Everything in the constants dict will be predefined as named variables for use by the script
|
|
45
|
+
context.date = "2025-09-09" # FIXME - later find auto latest date with bias frames
|
|
46
|
+
context.output = "{masters}/biases/{date}_stacked.fits" # if the output already exists processing will be skipped
|
|
47
|
+
|
|
48
|
+
script = '''
|
|
49
|
+
# Convert Bias Frames to .fit files
|
|
50
|
+
link bias -out={process_dir}
|
|
51
|
+
cd {process_dir}
|
|
52
|
+
|
|
53
|
+
# Stack Bias Frames to bias_stacked.fit
|
|
54
|
+
stack bias rej 3 3 -nonorm -out={output}
|
|
55
|
+
'''
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
|
|
2
|
+
[repo]
|
|
3
|
+
kind = "recipe"
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
[recipe]
|
|
7
|
+
author.name = "FIXMESiril?"
|
|
8
|
+
author.email = "FIXMESiril?"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
[[stage]]
|
|
12
|
+
|
|
13
|
+
description = "Generate master flat"
|
|
14
|
+
disabled = true # FIXME, debugging later stuff
|
|
15
|
+
|
|
16
|
+
# For any camera
|
|
17
|
+
auto.for-camera = []
|
|
18
|
+
|
|
19
|
+
tool = "siril"
|
|
20
|
+
# input.source = "session" # or auto? prefer ones in session otherwise find by in masters
|
|
21
|
+
input.type = "flat" # look in _session_ directories, but look only for flat files
|
|
22
|
+
|
|
23
|
+
# FIXME for early development we have support for simple absolute file paths with globs
|
|
24
|
+
input.source = "path"
|
|
25
|
+
input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/FLAT/*.fit*"
|
|
26
|
+
input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
|
|
27
|
+
|
|
28
|
+
when = "session-config" # run once per session-config
|
|
29
|
+
context.output = "{process_dir}/flat_s{sessionid}_c{sessionconfig}.fits"
|
|
30
|
+
|
|
31
|
+
# FIXME, bias should have been added to context by two previous stages. But for now hardwire
|
|
32
|
+
context.bias = '/workspaces/starbash/images/masters/biases/2025-09-09_stacked.fits'
|
|
33
|
+
|
|
34
|
+
script = '''
|
|
35
|
+
# Create a sequence from the raw flat frames
|
|
36
|
+
link flat -out={process_dir}
|
|
37
|
+
cd {process_dir}
|
|
38
|
+
|
|
39
|
+
# Calibrate the flat frames using master bias
|
|
40
|
+
calibrate flat -bias={bias}
|
|
41
|
+
|
|
42
|
+
# Stack the pre-processed (calibrated) flat frames (writes to flat_stacked.fit)
|
|
43
|
+
stack pp_flat rej 3 3 -norm=mul -out=flat_stacked
|
|
44
|
+
'''
|
|
45
|
+
|
|
46
|
+
temporaries = ["flat", "pp_flat"]
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
# pyright: reportUndefinedVariable=false
|
|
2
|
+
# ('context' and 'logger' are injected by the starbash runtime)
|
|
3
|
+
|
|
4
|
+
import os
|
|
5
|
+
from glob import glob
|
|
6
|
+
from starbash.tool import tools
|
|
7
|
+
|
|
8
|
+
siril = tools["siril"]
|
|
9
|
+
|
|
10
|
+
delete_temps = False
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# FIXME move this into main starbash
|
|
14
|
+
def perhaps_delete_temps(temps: list[str]) -> None:
|
|
15
|
+
if delete_temps:
|
|
16
|
+
for t in temps:
|
|
17
|
+
for path in glob(f"{context['process_dir']}/{t}_*"):
|
|
18
|
+
os.remove(path)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def normalize_target_name(name: str) -> str:
|
|
22
|
+
"""Converts a target name to an any filesystem-safe format by removing spaces"""
|
|
23
|
+
return name.replace(" ", "").upper()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def make_stacked(sessionconfig: str, variant: str, output_file: str):
|
|
27
|
+
"""
|
|
28
|
+
Registers and stacks all pre-processed light frames for a given filter configuration
|
|
29
|
+
across all sessions.
|
|
30
|
+
"""
|
|
31
|
+
# The sequence name for all frames of this variant across all sessions
|
|
32
|
+
# e.g. Ha_bkg_pp_light_cHaOiii
|
|
33
|
+
merged_seq_base = f"all_{variant}_bkg_pp_light"
|
|
34
|
+
|
|
35
|
+
# Absolute path for the output stacked file
|
|
36
|
+
stacked_output_path = glob(f"{context["process_dir"]}/{output_file}.fit*")
|
|
37
|
+
|
|
38
|
+
if stacked_output_path:
|
|
39
|
+
logger.info(f"Using existing stacked file: {stacked_output_path}")
|
|
40
|
+
else:
|
|
41
|
+
# Merge all frames (from multiple sessions and configs) use those for stacking
|
|
42
|
+
frames = glob(
|
|
43
|
+
f"{context["process_dir"]}/{variant}_bkg_pp_light_s*_c{sessionconfig}_*.fit*"
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
logger.info(
|
|
47
|
+
f"Registering and stacking {len(frames)} frames for {sessionconfig}/{variant} -> {stacked_output_path}"
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
# Siril commands for registration and stacking. We run this in process_dir.
|
|
51
|
+
commands = f"""
|
|
52
|
+
link {merged_seq_base} -out={context["process_dir"]}
|
|
53
|
+
cd {context["process_dir"]}
|
|
54
|
+
|
|
55
|
+
register {merged_seq_base}
|
|
56
|
+
stack r_{merged_seq_base} rej g 0.3 0.05 -filter-wfwhm=3k -norm=addscale -output_norm -32b -out={output_file}
|
|
57
|
+
|
|
58
|
+
# and flip if required
|
|
59
|
+
mirrorx_single {output_file}
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
context["input_files"] = frames
|
|
63
|
+
siril.run_in_temp_dir(commands, context=context)
|
|
64
|
+
|
|
65
|
+
perhaps_delete_temps([merged_seq_base, f"r_{merged_seq_base}"])
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def make_renormalize():
|
|
69
|
+
"""
|
|
70
|
+
Aligns the stacked images (Sii, Ha, OIII) and renormalizes Sii and OIII
|
|
71
|
+
to match the flux of the Ha channel.
|
|
72
|
+
"""
|
|
73
|
+
logger.info("Aligning and renormalizing stacked images.")
|
|
74
|
+
|
|
75
|
+
# Define file basenames for the stacked images created in the 'process' directory
|
|
76
|
+
ha_base = "results_00001"
|
|
77
|
+
oiii_base = "results_00002"
|
|
78
|
+
sii_base = "results_00003"
|
|
79
|
+
|
|
80
|
+
# Define final output paths. The 'results' directory is a symlink in the work dir.
|
|
81
|
+
results_dir = f"{context["targets"]}/{normalize_target_name(context["target"])}"
|
|
82
|
+
os.makedirs(results_dir, exist_ok=True)
|
|
83
|
+
|
|
84
|
+
ha_final_path = f"{results_dir}/stacked_Ha.fits"
|
|
85
|
+
oiii_final_path = f"{results_dir}/stacked_OIII.fits"
|
|
86
|
+
|
|
87
|
+
# Check if final files already exist to allow resuming
|
|
88
|
+
if all(os.path.exists(f) for f in [ha_final_path, oiii_final_path]):
|
|
89
|
+
logger.info("Renormalized files already exist, skipping.")
|
|
90
|
+
return
|
|
91
|
+
|
|
92
|
+
# Basenames for registered files (output of 'register' command)
|
|
93
|
+
r_ha = f"r_{ha_base}"
|
|
94
|
+
r_oiii = f"r_{oiii_base}"
|
|
95
|
+
|
|
96
|
+
# Pixel math formula for renormalization.
|
|
97
|
+
# It matches the median and spread (MAD) of a channel to a reference channel (Ha).
|
|
98
|
+
# Formula: new = old * (MAD(ref)/MAD(old)) - (MAD(ref)/MAD(old)) * MEDIAN(old) + MEDIAN(ref)
|
|
99
|
+
pm_oiii = f'"${r_oiii}$*mad(${r_ha}$)/mad(${r_oiii}$)-mad(${r_ha}$)/mad(${r_oiii}$)*median(${r_oiii}$)+median(${r_ha}$)"'
|
|
100
|
+
|
|
101
|
+
# Siril commands to be executed in the 'process' directory
|
|
102
|
+
commands = f"""
|
|
103
|
+
# -transf=shift fails sometimes, which I guess is possible because we have multiple sessions with possible different camera rotation
|
|
104
|
+
# -interp=none also fails sometimes, so let default interp happen
|
|
105
|
+
register results
|
|
106
|
+
pm {pm_oiii}
|
|
107
|
+
update_key FILTER Oiii "OSC dual Duo filter extracted"
|
|
108
|
+
save "{oiii_final_path}"
|
|
109
|
+
load {r_ha}
|
|
110
|
+
update_key FILTER Ha "OSC dual Duo filter extracted"
|
|
111
|
+
save "{ha_final_path}"
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
if os.path.exists(f"{results_dir}/{sii_base}.fit"):
|
|
115
|
+
logger.info(f"Doing renormalisation of extra Sii channel")
|
|
116
|
+
|
|
117
|
+
sii_final_path = f"{results_dir}/stacked_Sii.fits"
|
|
118
|
+
r_sii = f"r_{sii_base}"
|
|
119
|
+
pm_sii = f'"${r_sii}$*mad(${r_ha}$)/mad(${r_sii}$)-mad(${r_ha}$)/mad(${r_sii}$)*median(${r_sii}$)+median(${r_ha}$)"'
|
|
120
|
+
commands += f"""
|
|
121
|
+
pm {pm_sii}
|
|
122
|
+
update_key FILTER Sii "OSC dual Duo filter extracted"
|
|
123
|
+
save "{sii_final_path}"
|
|
124
|
+
"""
|
|
125
|
+
|
|
126
|
+
siril.run(context["process_dir"], commands, context=context)
|
|
127
|
+
logger.info(f"Saved final renormalized images to {results_dir}")
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def osc_dual_duo_post_session():
|
|
131
|
+
logger.info("Running osc_dual_duo_post_session python script")
|
|
132
|
+
logger.info("Using context: %s", context)
|
|
133
|
+
|
|
134
|
+
# red output channel - from the SiiOiii filter Sii is on the 672nm red channel (mistakenly called Ha by siril)
|
|
135
|
+
make_stacked("SiiOiii", "Ha", f"results_00001")
|
|
136
|
+
|
|
137
|
+
# green output channel - from the HaOiii filter Ha is on the 656nm red channel
|
|
138
|
+
make_stacked("HaOiii", "Ha", f"results_00001")
|
|
139
|
+
|
|
140
|
+
# blue output channel - both filters have Oiii on the 500nm blue channel. Note the case here is uppercase to match siril output
|
|
141
|
+
make_stacked("*", "OIII", f"results_00002")
|
|
142
|
+
|
|
143
|
+
# There might be an old/state autogenerated .seq file, delete it so it doesn't confuse renormalize
|
|
144
|
+
results_seq_path = f"{context["process_dir"]}/results_.seq"
|
|
145
|
+
if os.path.exists(results_seq_path):
|
|
146
|
+
os.remove(results_seq_path)
|
|
147
|
+
|
|
148
|
+
make_renormalize()
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
osc_dual_duo_post_session()
|