starbash 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of starbash might be problematic. Click here for more details.
- starbash/__init__.py +5 -0
- starbash/analytics.py +22 -7
- starbash/app.py +63 -9
- starbash/commands/__init__.py +15 -0
- starbash/commands/info.py +92 -0
- starbash/commands/repo.py +19 -12
- starbash/commands/select.py +313 -0
- starbash/commands/user.py +91 -6
- starbash/database.py +56 -33
- starbash/defaults/starbash.toml +2 -2
- starbash/main.py +26 -133
- starbash/recipes/README.md +3 -0
- starbash/recipes/__init__.py +0 -0
- starbash/recipes/master_bias/starbash.toml +55 -0
- starbash/recipes/master_flat/starbash.toml +46 -0
- starbash/recipes/osc_dual_duo/starbash.py +151 -0
- starbash/recipes/osc_dual_duo/starbash.toml +88 -0
- starbash/recipes/osc_single_duo/starbash.toml +67 -0
- starbash/recipes/starbash.toml +34 -0
- starbash/repo/manager.py +78 -22
- starbash/templates/userconfig.toml +1 -1
- {starbash-0.1.3.dist-info → starbash-0.1.5.dist-info}/METADATA +33 -23
- starbash-0.1.5.dist-info/RECORD +33 -0
- starbash/commands/selection.py +0 -137
- starbash-0.1.3.dist-info/RECORD +0 -24
- {starbash-0.1.3.dist-info → starbash-0.1.5.dist-info}/WHEEL +0 -0
- {starbash-0.1.3.dist-info → starbash-0.1.5.dist-info}/entry_points.txt +0 -0
- {starbash-0.1.3.dist-info → starbash-0.1.5.dist-info}/licenses/LICENSE +0 -0
starbash/commands/user.py
CHANGED
|
@@ -3,6 +3,7 @@ from typing_extensions import Annotated
|
|
|
3
3
|
|
|
4
4
|
from starbash.app import Starbash
|
|
5
5
|
from starbash import console
|
|
6
|
+
from rich.panel import Panel
|
|
6
7
|
|
|
7
8
|
app = typer.Typer()
|
|
8
9
|
|
|
@@ -19,9 +20,9 @@ def analytics(
|
|
|
19
20
|
"""
|
|
20
21
|
Enable or disable analytics (crash reports and usage data).
|
|
21
22
|
"""
|
|
22
|
-
with Starbash("analytics
|
|
23
|
+
with Starbash("analytics.change") as sb:
|
|
23
24
|
sb.analytics.set_data("analytics.enabled", enable)
|
|
24
|
-
sb.user_repo.
|
|
25
|
+
sb.user_repo.set("analytics.enabled", enable)
|
|
25
26
|
sb.user_repo.write_config()
|
|
26
27
|
status = "enabled" if enable else "disabled"
|
|
27
28
|
console.print(f"Analytics (crash reports) {status}.")
|
|
@@ -39,8 +40,8 @@ def name(
|
|
|
39
40
|
"""
|
|
40
41
|
Set your name for attribution in generated images.
|
|
41
42
|
"""
|
|
42
|
-
with Starbash("user
|
|
43
|
-
sb.user_repo.
|
|
43
|
+
with Starbash("user.name") as sb:
|
|
44
|
+
sb.user_repo.set("user.name", user_name)
|
|
44
45
|
sb.user_repo.write_config()
|
|
45
46
|
console.print(f"User name set to: {user_name}")
|
|
46
47
|
|
|
@@ -57,7 +58,91 @@ def email(
|
|
|
57
58
|
"""
|
|
58
59
|
Set your email for attribution in generated images.
|
|
59
60
|
"""
|
|
60
|
-
with Starbash("user
|
|
61
|
-
sb.user_repo.
|
|
61
|
+
with Starbash("user.email") as sb:
|
|
62
|
+
sb.user_repo.set("user.email", user_email)
|
|
62
63
|
sb.user_repo.write_config()
|
|
63
64
|
console.print(f"User email set to: {user_email}")
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def do_reinit(sb: Starbash) -> None:
|
|
68
|
+
console.print()
|
|
69
|
+
console.print(
|
|
70
|
+
Panel.fit(
|
|
71
|
+
"[bold cyan]Starbash getting started...[/bold cyan]\n\n"
|
|
72
|
+
"Let's set up your preferences. You can skip any question by pressing Enter.",
|
|
73
|
+
border_style="cyan",
|
|
74
|
+
)
|
|
75
|
+
)
|
|
76
|
+
console.print()
|
|
77
|
+
|
|
78
|
+
# Ask for username
|
|
79
|
+
user_name = typer.prompt(
|
|
80
|
+
"Enter your name (for attribution in generated images)",
|
|
81
|
+
default="",
|
|
82
|
+
show_default=False,
|
|
83
|
+
)
|
|
84
|
+
sb.analytics.set_data("analytics.use_name", user_name != "")
|
|
85
|
+
if user_name:
|
|
86
|
+
sb.user_repo.set("user.name", user_name)
|
|
87
|
+
console.print(f"✅ Name set to: {user_name}")
|
|
88
|
+
else:
|
|
89
|
+
console.print("[dim]Skipped name[/dim]")
|
|
90
|
+
|
|
91
|
+
# Ask for email
|
|
92
|
+
user_email = typer.prompt(
|
|
93
|
+
"Enter your email address (for attribution in generated images)",
|
|
94
|
+
default="",
|
|
95
|
+
show_default=False,
|
|
96
|
+
)
|
|
97
|
+
sb.analytics.set_data("analytics.use_email", user_email != "")
|
|
98
|
+
if user_email:
|
|
99
|
+
sb.user_repo.set("user.email", user_email)
|
|
100
|
+
console.print(f"✅ Email set to: {user_email}")
|
|
101
|
+
else:
|
|
102
|
+
console.print("[dim]Skipped email[/dim]")
|
|
103
|
+
|
|
104
|
+
# Ask about including email in crash reports
|
|
105
|
+
include_in_reports = typer.confirm(
|
|
106
|
+
"Would you like to include your email address with crash reports/analytics? "
|
|
107
|
+
"(This helps us follow up if we need more information about issues.)",
|
|
108
|
+
default=False,
|
|
109
|
+
)
|
|
110
|
+
sb.analytics.set_data("analytics.use_email_report", include_in_reports)
|
|
111
|
+
sb.user_repo.set("analytics.include_user", include_in_reports)
|
|
112
|
+
if include_in_reports:
|
|
113
|
+
console.print("✅ Email will be included with crash reports")
|
|
114
|
+
else:
|
|
115
|
+
console.print("❌ Email will NOT be included with crash reports")
|
|
116
|
+
console.print()
|
|
117
|
+
|
|
118
|
+
# Save all changes
|
|
119
|
+
sb.user_repo.write_config()
|
|
120
|
+
|
|
121
|
+
console.print(
|
|
122
|
+
Panel.fit(
|
|
123
|
+
"[bold green]Configuration complete![/bold green]\n\n"
|
|
124
|
+
"Your preferences have been saved.",
|
|
125
|
+
border_style="green",
|
|
126
|
+
)
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
@app.command()
|
|
131
|
+
def reinit():
|
|
132
|
+
"""
|
|
133
|
+
Configure starbash via a brief guided process.
|
|
134
|
+
|
|
135
|
+
This will ask you for your name, email, and analytics preferences.
|
|
136
|
+
You can skip any question by pressing Enter.
|
|
137
|
+
"""
|
|
138
|
+
with Starbash("user.reinit") as sb:
|
|
139
|
+
do_reinit(sb)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
@app.callback(invoke_without_command=True)
|
|
143
|
+
def main_callback(ctx: typer.Context):
|
|
144
|
+
"""Main callback for the Starbash application."""
|
|
145
|
+
if ctx.invoked_subcommand is None:
|
|
146
|
+
# No command provided, show help
|
|
147
|
+
console.print(ctx.get_help())
|
|
148
|
+
raise typer.Exit()
|
starbash/database.py
CHANGED
|
@@ -33,6 +33,9 @@ class Database:
|
|
|
33
33
|
OBJECT_KEY = "OBJECT"
|
|
34
34
|
TELESCOP_KEY = "TELESCOP"
|
|
35
35
|
|
|
36
|
+
SESSIONS_TABLE = "sessions"
|
|
37
|
+
IMAGES_TABLE = "images"
|
|
38
|
+
|
|
36
39
|
def __init__(
|
|
37
40
|
self,
|
|
38
41
|
base_dir: Optional[Path] = None,
|
|
@@ -59,8 +62,8 @@ class Database:
|
|
|
59
62
|
|
|
60
63
|
# Create images table with DATE-OBS and DATE as indexed columns
|
|
61
64
|
cursor.execute(
|
|
62
|
-
"""
|
|
63
|
-
CREATE TABLE IF NOT EXISTS
|
|
65
|
+
f"""
|
|
66
|
+
CREATE TABLE IF NOT EXISTS {self.IMAGES_TABLE} (
|
|
64
67
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
65
68
|
path TEXT UNIQUE NOT NULL,
|
|
66
69
|
date_obs TEXT,
|
|
@@ -72,29 +75,29 @@ class Database:
|
|
|
72
75
|
|
|
73
76
|
# Create index on path for faster lookups
|
|
74
77
|
cursor.execute(
|
|
75
|
-
"""
|
|
76
|
-
CREATE INDEX IF NOT EXISTS idx_images_path ON
|
|
78
|
+
f"""
|
|
79
|
+
CREATE INDEX IF NOT EXISTS idx_images_path ON {self.IMAGES_TABLE}(path)
|
|
77
80
|
"""
|
|
78
81
|
)
|
|
79
82
|
|
|
80
83
|
# Create index on date_obs for efficient date range queries
|
|
81
84
|
cursor.execute(
|
|
82
|
-
"""
|
|
83
|
-
CREATE INDEX IF NOT EXISTS idx_images_date_obs ON
|
|
85
|
+
f"""
|
|
86
|
+
CREATE INDEX IF NOT EXISTS idx_images_date_obs ON {self.IMAGES_TABLE}(date_obs)
|
|
84
87
|
"""
|
|
85
88
|
)
|
|
86
89
|
|
|
87
90
|
# Create index on date for queries using DATE field
|
|
88
91
|
cursor.execute(
|
|
89
|
-
"""
|
|
90
|
-
CREATE INDEX IF NOT EXISTS idx_images_date ON
|
|
92
|
+
f"""
|
|
93
|
+
CREATE INDEX IF NOT EXISTS idx_images_date ON {self.IMAGES_TABLE}(date)
|
|
91
94
|
"""
|
|
92
95
|
)
|
|
93
96
|
|
|
94
97
|
# Create sessions table
|
|
95
98
|
cursor.execute(
|
|
96
|
-
"""
|
|
97
|
-
CREATE TABLE IF NOT EXISTS
|
|
99
|
+
f"""
|
|
100
|
+
CREATE TABLE IF NOT EXISTS {self.SESSIONS_TABLE} (
|
|
98
101
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
99
102
|
start TEXT NOT NULL,
|
|
100
103
|
end TEXT NOT NULL,
|
|
@@ -111,9 +114,9 @@ class Database:
|
|
|
111
114
|
|
|
112
115
|
# Create index on session attributes for faster queries
|
|
113
116
|
cursor.execute(
|
|
114
|
-
"""
|
|
117
|
+
f"""
|
|
115
118
|
CREATE INDEX IF NOT EXISTS idx_sessions_lookup
|
|
116
|
-
ON
|
|
119
|
+
ON {self.SESSIONS_TABLE}(filter, imagetyp, object, telescop, start, end)
|
|
117
120
|
"""
|
|
118
121
|
)
|
|
119
122
|
|
|
@@ -141,8 +144,8 @@ class Database:
|
|
|
141
144
|
|
|
142
145
|
cursor = self._db.cursor()
|
|
143
146
|
cursor.execute(
|
|
144
|
-
"""
|
|
145
|
-
INSERT INTO
|
|
147
|
+
f"""
|
|
148
|
+
INSERT INTO {self.IMAGES_TABLE} (path, date_obs, date, metadata) VALUES (?, ?, ?, ?)
|
|
146
149
|
ON CONFLICT(path) DO UPDATE SET
|
|
147
150
|
date_obs = excluded.date_obs,
|
|
148
151
|
date = excluded.date,
|
|
@@ -154,7 +157,7 @@ class Database:
|
|
|
154
157
|
self._db.commit()
|
|
155
158
|
|
|
156
159
|
# Get the rowid of the inserted/updated record
|
|
157
|
-
cursor.execute("SELECT id FROM
|
|
160
|
+
cursor.execute(f"SELECT id FROM {self.IMAGES_TABLE} WHERE path = ?", (path,))
|
|
158
161
|
result = cursor.fetchone()
|
|
159
162
|
if result:
|
|
160
163
|
return result[0]
|
|
@@ -190,7 +193,7 @@ class Database:
|
|
|
190
193
|
params.append(date_end)
|
|
191
194
|
|
|
192
195
|
# Build the query
|
|
193
|
-
query = "SELECT id, path, date_obs, date, metadata FROM
|
|
196
|
+
query = f"SELECT id, path, date_obs, date, metadata FROM {self.IMAGES_TABLE}"
|
|
194
197
|
if where_clauses:
|
|
195
198
|
query += " WHERE " + " AND ".join(where_clauses)
|
|
196
199
|
|
|
@@ -236,10 +239,10 @@ class Database:
|
|
|
236
239
|
|
|
237
240
|
cursor = self._db.cursor()
|
|
238
241
|
cursor.execute(
|
|
239
|
-
"""
|
|
242
|
+
f"""
|
|
240
243
|
SELECT id, start, end, filter, imagetyp, object, telescop,
|
|
241
244
|
num_images, exptime_total, image_doc_id
|
|
242
|
-
FROM
|
|
245
|
+
FROM {self.SESSIONS_TABLE}
|
|
243
246
|
"""
|
|
244
247
|
)
|
|
245
248
|
|
|
@@ -286,18 +289,36 @@ class Database:
|
|
|
286
289
|
|
|
287
290
|
return results if results else None
|
|
288
291
|
|
|
289
|
-
def
|
|
290
|
-
"""Return the total number of
|
|
292
|
+
def len_table(self, table_name: str) -> int:
|
|
293
|
+
"""Return the total number of rows in the specified table."""
|
|
291
294
|
cursor = self._db.cursor()
|
|
292
|
-
cursor.execute("SELECT COUNT(*) FROM
|
|
295
|
+
cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
|
|
293
296
|
result = cursor.fetchone()
|
|
294
297
|
return result[0] if result else 0
|
|
295
298
|
|
|
299
|
+
def get_column(self, table_name: str, column_name: str) -> list[Any]:
|
|
300
|
+
"""Return all values from a specific column in the specified table."""
|
|
301
|
+
cursor = self._db.cursor()
|
|
302
|
+
cursor.execute(f'SELECT "{column_name}" FROM {table_name}')
|
|
303
|
+
|
|
304
|
+
results = []
|
|
305
|
+
for row in cursor.fetchall():
|
|
306
|
+
results.append(row[column_name])
|
|
307
|
+
|
|
308
|
+
return results
|
|
309
|
+
|
|
310
|
+
def sum_column(self, table_name: str, column_name: str) -> float:
|
|
311
|
+
"""Return the SUM of all values in a specific column in the specified table."""
|
|
312
|
+
cursor = self._db.cursor()
|
|
313
|
+
cursor.execute(f'SELECT SUM("{column_name}") FROM {table_name}')
|
|
314
|
+
result = cursor.fetchone()
|
|
315
|
+
return result[0] if result and result[0] is not None else 0
|
|
316
|
+
|
|
296
317
|
def get_image(self, path: str) -> dict[str, Any] | None:
|
|
297
318
|
"""Get an image record by path."""
|
|
298
319
|
cursor = self._db.cursor()
|
|
299
320
|
cursor.execute(
|
|
300
|
-
"SELECT id, path, date_obs, date, metadata FROM
|
|
321
|
+
f"SELECT id, path, date_obs, date, metadata FROM {self.IMAGES_TABLE} WHERE path = ?",
|
|
301
322
|
(path,),
|
|
302
323
|
)
|
|
303
324
|
row = cursor.fetchone()
|
|
@@ -320,7 +341,9 @@ class Database:
|
|
|
320
341
|
def all_images(self) -> list[dict[str, Any]]:
|
|
321
342
|
"""Return all image records."""
|
|
322
343
|
cursor = self._db.cursor()
|
|
323
|
-
cursor.execute(
|
|
344
|
+
cursor.execute(
|
|
345
|
+
f"SELECT id, path, date_obs, date, metadata FROM {self.IMAGES_TABLE}"
|
|
346
|
+
)
|
|
324
347
|
|
|
325
348
|
results = []
|
|
326
349
|
for row in cursor.fetchall():
|
|
@@ -342,10 +365,10 @@ class Database:
|
|
|
342
365
|
"""Return all session records."""
|
|
343
366
|
cursor = self._db.cursor()
|
|
344
367
|
cursor.execute(
|
|
345
|
-
"""
|
|
368
|
+
f"""
|
|
346
369
|
SELECT id, start, end, filter, imagetyp, object, telescop,
|
|
347
370
|
num_images, exptime_total, image_doc_id
|
|
348
|
-
FROM
|
|
371
|
+
FROM {self.SESSIONS_TABLE}
|
|
349
372
|
"""
|
|
350
373
|
)
|
|
351
374
|
|
|
@@ -378,10 +401,10 @@ class Database:
|
|
|
378
401
|
"""
|
|
379
402
|
cursor = self._db.cursor()
|
|
380
403
|
cursor.execute(
|
|
381
|
-
"""
|
|
404
|
+
f"""
|
|
382
405
|
SELECT id, start, end, filter, imagetyp, object, telescop,
|
|
383
406
|
num_images, exptime_total, image_doc_id
|
|
384
|
-
FROM
|
|
407
|
+
FROM {self.SESSIONS_TABLE}
|
|
385
408
|
WHERE id = ?
|
|
386
409
|
""",
|
|
387
410
|
(session_id,),
|
|
@@ -432,10 +455,10 @@ class Database:
|
|
|
432
455
|
# comparison aligns with chronological ordering for a uniform format.
|
|
433
456
|
cursor = self._db.cursor()
|
|
434
457
|
cursor.execute(
|
|
435
|
-
"""
|
|
458
|
+
f"""
|
|
436
459
|
SELECT id, start, end, filter, imagetyp, object, telescop,
|
|
437
460
|
num_images, exptime_total, image_doc_id
|
|
438
|
-
FROM
|
|
461
|
+
FROM {self.SESSIONS_TABLE}
|
|
439
462
|
WHERE filter = ? AND imagetyp = ? AND object = ? AND telescop = ?
|
|
440
463
|
AND start >= ? AND start <= ?
|
|
441
464
|
LIMIT 1
|
|
@@ -478,8 +501,8 @@ class Database:
|
|
|
478
501
|
) + new.get(Database.EXPTIME_TOTAL_KEY, 0)
|
|
479
502
|
|
|
480
503
|
cursor.execute(
|
|
481
|
-
"""
|
|
482
|
-
UPDATE
|
|
504
|
+
f"""
|
|
505
|
+
UPDATE {self.SESSIONS_TABLE}
|
|
483
506
|
SET start = ?, end = ?, num_images = ?, exptime_total = ?
|
|
484
507
|
WHERE id = ?
|
|
485
508
|
""",
|
|
@@ -494,8 +517,8 @@ class Database:
|
|
|
494
517
|
else:
|
|
495
518
|
# Insert new session
|
|
496
519
|
cursor.execute(
|
|
497
|
-
"""
|
|
498
|
-
INSERT INTO
|
|
520
|
+
f"""
|
|
521
|
+
INSERT INTO {self.SESSIONS_TABLE}
|
|
499
522
|
(start, end, filter, imagetyp, object, telescop, num_images, exptime_total, image_doc_id)
|
|
500
523
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
501
524
|
""",
|
starbash/defaults/starbash.toml
CHANGED
|
@@ -18,7 +18,7 @@ kind = "preferences"
|
|
|
18
18
|
|
|
19
19
|
# Add our built-in recipes (FIXME, add a "resource" repo type for directories we expect to find inside
|
|
20
20
|
# our python blob)
|
|
21
|
-
|
|
21
|
+
url = "pkg://recipes"
|
|
22
22
|
|
|
23
23
|
# [[repo-ref]]
|
|
24
24
|
|
|
@@ -39,7 +39,7 @@ dir = "/workspaces/starbash/doc/toml/example/recipe-repo"
|
|
|
39
39
|
# or inband?
|
|
40
40
|
|
|
41
41
|
|
|
42
|
-
# allow including multiple
|
|
42
|
+
# allow including multiple recipes FIXME old idea, not sure if needed.
|
|
43
43
|
# [[repo-ref]]
|
|
44
44
|
|
|
45
45
|
# looks for a file with this name and .py for the code and .toml for the config
|
starbash/main.py
CHANGED
|
@@ -1,14 +1,12 @@
|
|
|
1
1
|
import logging
|
|
2
|
-
from datetime import datetime
|
|
3
|
-
from tomlkit import table
|
|
4
2
|
import typer
|
|
5
|
-
from
|
|
3
|
+
from typing_extensions import Annotated
|
|
6
4
|
|
|
7
|
-
from starbash.database import Database
|
|
8
5
|
import starbash.url as url
|
|
6
|
+
import starbash
|
|
9
7
|
|
|
10
|
-
from .app import Starbash
|
|
11
|
-
from .commands import repo,
|
|
8
|
+
from .app import Starbash, get_user_config_path, setup_logging
|
|
9
|
+
from .commands import info, repo, select, user
|
|
12
10
|
from . import console
|
|
13
11
|
|
|
14
12
|
app = typer.Typer(
|
|
@@ -17,140 +15,35 @@ app = typer.Typer(
|
|
|
17
15
|
)
|
|
18
16
|
app.add_typer(user.app, name="user", help="Manage user settings.")
|
|
19
17
|
app.add_typer(repo.app, name="repo", help="Manage Starbash repositories.")
|
|
20
|
-
app.add_typer(
|
|
21
|
-
|
|
22
|
-
)
|
|
18
|
+
app.add_typer(select.app, name="select", help="Manage session and target selection.")
|
|
19
|
+
app.add_typer(info.app, name="info", help="Display system and data information.")
|
|
23
20
|
|
|
24
21
|
|
|
25
22
|
@app.callback(invoke_without_command=True)
|
|
26
|
-
def main_callback(
|
|
23
|
+
def main_callback(
|
|
24
|
+
ctx: typer.Context,
|
|
25
|
+
debug: Annotated[
|
|
26
|
+
bool,
|
|
27
|
+
typer.Option(
|
|
28
|
+
"--debug",
|
|
29
|
+
help="Enable debug logging output.",
|
|
30
|
+
),
|
|
31
|
+
] = False,
|
|
32
|
+
):
|
|
27
33
|
"""Main callback for the Starbash application."""
|
|
34
|
+
# Set the log level based on --debug flag
|
|
35
|
+
if debug:
|
|
36
|
+
starbash.log_filter_level = logging.DEBUG
|
|
37
|
+
|
|
28
38
|
if ctx.invoked_subcommand is None:
|
|
29
|
-
|
|
30
|
-
|
|
39
|
+
if not get_user_config_path().exists():
|
|
40
|
+
with Starbash("app.first") as sb:
|
|
41
|
+
user.do_reinit(sb)
|
|
42
|
+
else:
|
|
43
|
+
# No command provided, show help
|
|
44
|
+
console.print(ctx.get_help())
|
|
31
45
|
raise typer.Exit()
|
|
32
46
|
|
|
33
47
|
|
|
34
|
-
def format_duration(seconds: int):
|
|
35
|
-
"""Format seconds as a human-readable duration string."""
|
|
36
|
-
if seconds < 60:
|
|
37
|
-
return f"{int(seconds)}s"
|
|
38
|
-
elif seconds < 120:
|
|
39
|
-
minutes = int(seconds // 60)
|
|
40
|
-
secs = int(seconds % 60)
|
|
41
|
-
return f"{minutes}m {secs}s" if secs else f"{minutes}m"
|
|
42
|
-
else:
|
|
43
|
-
hours = int(seconds // 3600)
|
|
44
|
-
minutes = int((seconds % 3600) // 60)
|
|
45
|
-
return f"{hours}h {minutes}m" if minutes else f"{hours}h"
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
@app.command()
|
|
49
|
-
def session():
|
|
50
|
-
"""List sessions (filtered based on the current selection)"""
|
|
51
|
-
|
|
52
|
-
with Starbash("session") as sb:
|
|
53
|
-
sessions = sb.search_session()
|
|
54
|
-
if sessions and isinstance(sessions, list):
|
|
55
|
-
len_all = sb.db.len_session()
|
|
56
|
-
table = Table(title=f"Sessions ({len(sessions)} selected out of {len_all})")
|
|
57
|
-
|
|
58
|
-
table.add_column("Date", style="cyan", no_wrap=True)
|
|
59
|
-
table.add_column("# images", style="cyan", no_wrap=True)
|
|
60
|
-
table.add_column("Time", style="cyan", no_wrap=True)
|
|
61
|
-
table.add_column("Type/Filter", style="cyan", no_wrap=True)
|
|
62
|
-
table.add_column("Telescope", style="cyan", no_wrap=True)
|
|
63
|
-
table.add_column(
|
|
64
|
-
"About", style="cyan", no_wrap=True
|
|
65
|
-
) # type of frames, filter, target
|
|
66
|
-
# table.add_column("Released", justify="right", style="cyan", no_wrap=True)
|
|
67
|
-
|
|
68
|
-
total_images = 0
|
|
69
|
-
total_seconds = 0.0
|
|
70
|
-
|
|
71
|
-
for sess in sessions:
|
|
72
|
-
date_iso = sess.get(Database.START_KEY, "N/A")
|
|
73
|
-
# Try to cnvert ISO UTC datetime to local short date string
|
|
74
|
-
try:
|
|
75
|
-
dt_utc = datetime.fromisoformat(date_iso)
|
|
76
|
-
dt_local = dt_utc.astimezone()
|
|
77
|
-
date = dt_local.strftime("%Y-%m-%d")
|
|
78
|
-
except (ValueError, TypeError):
|
|
79
|
-
date = date_iso
|
|
80
|
-
|
|
81
|
-
object = str(sess.get(Database.OBJECT_KEY, "N/A"))
|
|
82
|
-
filter = sess.get(Database.FILTER_KEY, "N/A")
|
|
83
|
-
image_type = str(sess.get(Database.IMAGETYP_KEY, "N/A"))
|
|
84
|
-
telescop = str(sess.get(Database.TELESCOP_KEY, "N/A"))
|
|
85
|
-
|
|
86
|
-
# Format total exposure time as integer seconds
|
|
87
|
-
exptime_raw = str(sess.get(Database.EXPTIME_TOTAL_KEY, "N/A"))
|
|
88
|
-
try:
|
|
89
|
-
exptime_float = float(exptime_raw)
|
|
90
|
-
total_seconds += exptime_float
|
|
91
|
-
total_secs = format_duration(int(exptime_float))
|
|
92
|
-
except (ValueError, TypeError):
|
|
93
|
-
total_secs = exptime_raw
|
|
94
|
-
|
|
95
|
-
# Count images
|
|
96
|
-
try:
|
|
97
|
-
num_images = int(sess.get(Database.NUM_IMAGES_KEY, 0))
|
|
98
|
-
total_images += num_images
|
|
99
|
-
except (ValueError, TypeError):
|
|
100
|
-
num_images = sess.get(Database.NUM_IMAGES_KEY, "N/A")
|
|
101
|
-
|
|
102
|
-
type_str = image_type
|
|
103
|
-
if image_type.upper() == "LIGHT":
|
|
104
|
-
image_type = filter
|
|
105
|
-
elif image_type.upper() == "FLAT":
|
|
106
|
-
image_type = f"{image_type}/{filter}"
|
|
107
|
-
else: # either bias or dark
|
|
108
|
-
object = "" # Don't show meaningless target
|
|
109
|
-
|
|
110
|
-
table.add_row(
|
|
111
|
-
date,
|
|
112
|
-
str(num_images),
|
|
113
|
-
total_secs,
|
|
114
|
-
image_type,
|
|
115
|
-
telescop,
|
|
116
|
-
object,
|
|
117
|
-
)
|
|
118
|
-
|
|
119
|
-
# Add totals row
|
|
120
|
-
if sessions:
|
|
121
|
-
table.add_row(
|
|
122
|
-
"",
|
|
123
|
-
f"[bold]{total_images}[/bold]",
|
|
124
|
-
f"[bold]{format_duration(int(total_seconds))}[/bold]",
|
|
125
|
-
"",
|
|
126
|
-
"",
|
|
127
|
-
"",
|
|
128
|
-
)
|
|
129
|
-
|
|
130
|
-
console.print(table)
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
# @app.command(hidden=True)
|
|
134
|
-
# def default_cmd():
|
|
135
|
-
# """Default entry point for the starbash application."""
|
|
136
|
-
#
|
|
137
|
-
# with Starbash() as sb:
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
# @app.command(hidden=True)
|
|
141
|
-
# def default_cmd():
|
|
142
|
-
# """Default entry point for the starbash application."""
|
|
143
|
-
#
|
|
144
|
-
# with Starbash() as sb:
|
|
145
|
-
# pass
|
|
146
|
-
#
|
|
147
|
-
#
|
|
148
|
-
# @app.callback(invoke_without_command=True)
|
|
149
|
-
# def _default(ctx: typer.Context):
|
|
150
|
-
# # If the user didn’t specify a subcommand, run the default
|
|
151
|
-
# if ctx.invoked_subcommand is None:
|
|
152
|
-
# return default_cmd()
|
|
153
|
-
|
|
154
|
-
|
|
155
48
|
if __name__ == "__main__":
|
|
156
49
|
app()
|
|
@@ -0,0 +1,3 @@
|
|
|
1
|
+
This is what a typical directory of recipes would look like. it could be hosted locally in a directory tree, on github, whatever.
|
|
2
|
+
|
|
3
|
+
Currently it lives in the starbash python blob, but eventually the 'master' set of recipes will live in a different repo. In fact, different orgs could provide their own recipe repos.
|
|
File without changes
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
|
|
2
|
+
[repo]
|
|
3
|
+
kind = "recipe"
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
[recipe]
|
|
7
|
+
author.name = "FIXMESiril?"
|
|
8
|
+
author.email = "FIXMESiril?"
|
|
9
|
+
|
|
10
|
+
[[stage]]
|
|
11
|
+
|
|
12
|
+
description = "Generate master bias"
|
|
13
|
+
disabled = true # FIXME, debugging later stuff
|
|
14
|
+
|
|
15
|
+
# Restrict processing of this stage to only if detected hardware was found for this session
|
|
16
|
+
# For any camera
|
|
17
|
+
auto.for-camera = []
|
|
18
|
+
|
|
19
|
+
tool = "siril"
|
|
20
|
+
|
|
21
|
+
# or auto?
|
|
22
|
+
# find the most recent raw fits for the current instrument (as of the time of session start)
|
|
23
|
+
# input.source = "most-recent" # only look for the most recent set of raws for this particular type
|
|
24
|
+
input.type = "bias" # look in all raw repos, but look only for bias files
|
|
25
|
+
|
|
26
|
+
# for early development we have support for simple absolute file paths with globs
|
|
27
|
+
input.source = "path"
|
|
28
|
+
input.path = "/workspaces/starbash/images/from_astroboy/masters-raw/2025-09-09/BIAS/*.fit*"
|
|
29
|
+
input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
|
|
30
|
+
|
|
31
|
+
# make the following also work
|
|
32
|
+
#
|
|
33
|
+
#os.makedirs(os.path.dirname(output), exist_ok=True)
|
|
34
|
+
#os.makedirs(os.path.dirname(process_dir), exist_ok=True)
|
|
35
|
+
#frames = glob(f"{masters_raw}/{date}/BIAS/{date}_*.fit*")
|
|
36
|
+
#siril_run_in_temp_dir(frames, ...
|
|
37
|
+
when = "session-config" # run at the start of each session process
|
|
38
|
+
|
|
39
|
+
# The following constants are auto defined before running the tool
|
|
40
|
+
# context.process_dir (points to the session specific semi-persistent local dir for that sessions written/read data files)
|
|
41
|
+
# context.masters (FIXME) - need to find this name dynamically by looking for a suitable writable repo
|
|
42
|
+
# context.temp_dir (points to a temporary directory this tool can use for writing)
|
|
43
|
+
|
|
44
|
+
# Everything in the constants dict will be predefined as named variables for use by the script
|
|
45
|
+
context.date = "2025-09-09" # FIXME - later find auto latest date with bias frames
|
|
46
|
+
context.output = "{masters}/biases/{date}_stacked.fits" # if the output already exists processing will be skipped
|
|
47
|
+
|
|
48
|
+
script = '''
|
|
49
|
+
# Convert Bias Frames to .fit files
|
|
50
|
+
link bias -out={process_dir}
|
|
51
|
+
cd {process_dir}
|
|
52
|
+
|
|
53
|
+
# Stack Bias Frames to bias_stacked.fit
|
|
54
|
+
stack bias rej 3 3 -nonorm -out={output}
|
|
55
|
+
'''
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
|
|
2
|
+
[repo]
|
|
3
|
+
kind = "recipe"
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
[recipe]
|
|
7
|
+
author.name = "FIXMESiril?"
|
|
8
|
+
author.email = "FIXMESiril?"
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
[[stage]]
|
|
12
|
+
|
|
13
|
+
description = "Generate master flat"
|
|
14
|
+
disabled = true # FIXME, debugging later stuff
|
|
15
|
+
|
|
16
|
+
# For any camera
|
|
17
|
+
auto.for-camera = []
|
|
18
|
+
|
|
19
|
+
tool = "siril"
|
|
20
|
+
# input.source = "session" # or auto? prefer ones in session otherwise find by in masters
|
|
21
|
+
input.type = "flat" # look in _session_ directories, but look only for flat files
|
|
22
|
+
|
|
23
|
+
# FIXME for early development we have support for simple absolute file paths with globs
|
|
24
|
+
input.source = "path"
|
|
25
|
+
input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/FLAT/*.fit*"
|
|
26
|
+
input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
|
|
27
|
+
|
|
28
|
+
when = "session-config" # run once per session-config
|
|
29
|
+
context.output = "{process_dir}/flat_s{sessionid}_c{sessionconfig}.fits"
|
|
30
|
+
|
|
31
|
+
# FIXME, bias should have been added to context by two previous stages. But for now hardwire
|
|
32
|
+
context.bias = '/workspaces/starbash/images/masters/biases/2025-09-09_stacked.fits'
|
|
33
|
+
|
|
34
|
+
script = '''
|
|
35
|
+
# Create a sequence from the raw flat frames
|
|
36
|
+
link flat -out={process_dir}
|
|
37
|
+
cd {process_dir}
|
|
38
|
+
|
|
39
|
+
# Calibrate the flat frames using master bias
|
|
40
|
+
calibrate flat -bias={bias}
|
|
41
|
+
|
|
42
|
+
# Stack the pre-processed (calibrated) flat frames (writes to flat_stacked.fit)
|
|
43
|
+
stack pp_flat rej 3 3 -norm=mul -out=flat_stacked
|
|
44
|
+
'''
|
|
45
|
+
|
|
46
|
+
temporaries = ["flat", "pp_flat"]
|