starbash 0.1.1__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of starbash might be problematic. Click here for more details.

starbash/database.py CHANGED
@@ -15,6 +15,9 @@ class Database:
15
15
 
16
16
  Stores data under the OS-specific user data directory using platformdirs.
17
17
  Provides an `images` table for FITS metadata and basic helpers.
18
+
19
+ The images table stores DATE-OBS and DATE as indexed SQL columns for
20
+ efficient date-based queries, while other FITS metadata is stored in JSON.
18
21
  """
19
22
 
20
23
  EXPTIME_KEY = "EXPTIME"
@@ -24,9 +27,11 @@ class Database:
24
27
  NUM_IMAGES_KEY = "num-images"
25
28
  EXPTIME_TOTAL_KEY = "exptime-total"
26
29
  DATE_OBS_KEY = "DATE-OBS"
30
+ DATE_KEY = "DATE"
27
31
  IMAGE_DOC_KEY = "image-doc"
28
32
  IMAGETYP_KEY = "IMAGETYP"
29
33
  OBJECT_KEY = "OBJECT"
34
+ TELESCOP_KEY = "TELESCOP"
30
35
 
31
36
  def __init__(
32
37
  self,
@@ -52,12 +57,14 @@ class Database:
52
57
  """Create the images and sessions tables if they don't exist."""
53
58
  cursor = self._db.cursor()
54
59
 
55
- # Create images table
60
+ # Create images table with DATE-OBS and DATE as indexed columns
56
61
  cursor.execute(
57
62
  """
58
63
  CREATE TABLE IF NOT EXISTS images (
59
64
  id INTEGER PRIMARY KEY AUTOINCREMENT,
60
65
  path TEXT UNIQUE NOT NULL,
66
+ date_obs TEXT,
67
+ date TEXT,
61
68
  metadata TEXT NOT NULL
62
69
  )
63
70
  """
@@ -70,6 +77,20 @@ class Database:
70
77
  """
71
78
  )
72
79
 
80
+ # Create index on date_obs for efficient date range queries
81
+ cursor.execute(
82
+ """
83
+ CREATE INDEX IF NOT EXISTS idx_images_date_obs ON images(date_obs)
84
+ """
85
+ )
86
+
87
+ # Create index on date for queries using DATE field
88
+ cursor.execute(
89
+ """
90
+ CREATE INDEX IF NOT EXISTS idx_images_date ON images(date)
91
+ """
92
+ )
93
+
73
94
  # Create sessions table
74
95
  cursor.execute(
75
96
  """
@@ -80,6 +101,7 @@ class Database:
80
101
  filter TEXT NOT NULL,
81
102
  imagetyp TEXT NOT NULL,
82
103
  object TEXT NOT NULL,
104
+ telescop TEXT NOT NULL,
83
105
  num_images INTEGER NOT NULL,
84
106
  exptime_total REAL NOT NULL,
85
107
  image_doc_id INTEGER
@@ -91,7 +113,7 @@ class Database:
91
113
  cursor.execute(
92
114
  """
93
115
  CREATE INDEX IF NOT EXISTS idx_sessions_lookup
94
- ON sessions(filter, imagetyp, object, start, end)
116
+ ON sessions(filter, imagetyp, object, telescop, start, end)
95
117
  """
96
118
  )
97
119
 
@@ -102,23 +124,31 @@ class Database:
102
124
  """Insert or update an image record by unique path.
103
125
 
104
126
  The record must include a 'path' key; other keys are arbitrary FITS metadata.
127
+ DATE-OBS and DATE are extracted and stored as indexed columns for efficient queries.
105
128
  Returns the rowid of the inserted/updated record.
106
129
  """
107
130
  path = record.get("path")
108
131
  if not path:
109
132
  raise ValueError("record must include 'path'")
110
133
 
111
- # Separate path from metadata
134
+ # Extract date fields for column storage
135
+ date_obs = record.get(self.DATE_OBS_KEY)
136
+ date = record.get(self.DATE_KEY)
137
+
138
+ # Separate path and date fields from metadata
112
139
  metadata = {k: v for k, v in record.items() if k != "path"}
113
140
  metadata_json = json.dumps(metadata)
114
141
 
115
142
  cursor = self._db.cursor()
116
143
  cursor.execute(
117
144
  """
118
- INSERT INTO images (path, metadata) VALUES (?, ?)
119
- ON CONFLICT(path) DO UPDATE SET metadata = excluded.metadata
145
+ INSERT INTO images (path, date_obs, date, metadata) VALUES (?, ?, ?, ?)
146
+ ON CONFLICT(path) DO UPDATE SET
147
+ date_obs = excluded.date_obs,
148
+ date = excluded.date,
149
+ metadata = excluded.metadata
120
150
  """,
121
- (path, metadata_json),
151
+ (path, date_obs, date, metadata_json),
122
152
  )
123
153
 
124
154
  self._db.commit()
@@ -134,13 +164,38 @@ class Database:
134
164
  """Search for images matching the given conditions.
135
165
 
136
166
  Args:
137
- conditions: Dictionary of metadata key-value pairs to match
167
+ conditions: Dictionary of metadata key-value pairs to match.
168
+ Special keys:
169
+ - 'date_start': Filter images with DATE-OBS >= this date
170
+ - 'date_end': Filter images with DATE-OBS <= this date
138
171
 
139
172
  Returns:
140
173
  List of matching image records or None if no matches
141
174
  """
175
+ # Extract special date filter keys (make a copy to avoid modifying caller's dict)
176
+ conditions_copy = dict(conditions)
177
+ date_start = conditions_copy.pop("date_start", None)
178
+ date_end = conditions_copy.pop("date_end", None)
179
+
180
+ # Build SQL query with WHERE clauses for date filtering
181
+ where_clauses = []
182
+ params = []
183
+
184
+ if date_start:
185
+ where_clauses.append("date_obs >= ?")
186
+ params.append(date_start)
187
+
188
+ if date_end:
189
+ where_clauses.append("date_obs <= ?")
190
+ params.append(date_end)
191
+
192
+ # Build the query
193
+ query = "SELECT id, path, date_obs, date, metadata FROM images"
194
+ if where_clauses:
195
+ query += " WHERE " + " AND ".join(where_clauses)
196
+
142
197
  cursor = self._db.cursor()
143
- cursor.execute("SELECT id, path, metadata FROM images")
198
+ cursor.execute(query, params)
144
199
 
145
200
  results = []
146
201
  for row in cursor.fetchall():
@@ -148,8 +203,15 @@ class Database:
148
203
  metadata["path"] = row["path"]
149
204
  metadata["id"] = row["id"]
150
205
 
151
- # Check if all conditions match
152
- match = all(metadata.get(k) == v for k, v in conditions.items())
206
+ # Add date fields back to metadata for compatibility
207
+ if row["date_obs"]:
208
+ metadata[self.DATE_OBS_KEY] = row["date_obs"]
209
+ if row["date"]:
210
+ metadata[self.DATE_KEY] = row["date"]
211
+
212
+ # Check if remaining conditions match (those stored in JSON metadata)
213
+ match = all(metadata.get(k) == v for k, v in conditions_copy.items())
214
+
153
215
  if match:
154
216
  results.append(metadata)
155
217
 
@@ -175,7 +237,7 @@ class Database:
175
237
  cursor = self._db.cursor()
176
238
  cursor.execute(
177
239
  """
178
- SELECT id, start, end, filter, imagetyp, object,
240
+ SELECT id, start, end, filter, imagetyp, object, telescop,
179
241
  num_images, exptime_total, image_doc_id
180
242
  FROM sessions
181
243
  """
@@ -201,6 +263,7 @@ class Database:
201
263
  self.FILTER_KEY: row["filter"],
202
264
  self.IMAGETYP_KEY: row["imagetyp"],
203
265
  self.OBJECT_KEY: row["object"],
266
+ self.TELESCOP_KEY: row["telescop"],
204
267
  self.NUM_IMAGES_KEY: row["num_images"],
205
268
  self.EXPTIME_TOTAL_KEY: row["exptime_total"],
206
269
  self.IMAGE_DOC_KEY: row["image_doc_id"],
@@ -230,10 +293,24 @@ class Database:
230
293
  result = cursor.fetchone()
231
294
  return result[0] if result else 0
232
295
 
296
+ def get_column(self, column_name: str, table_name: str = "sessions") -> list[Any]:
297
+ """Return all values from a specific column in the specified table."""
298
+ cursor = self._db.cursor()
299
+ cursor.execute(f"SELECT {column_name} FROM {table_name}")
300
+
301
+ results = []
302
+ for row in cursor.fetchall():
303
+ results.append(row[column_name])
304
+
305
+ return results
306
+
233
307
  def get_image(self, path: str) -> dict[str, Any] | None:
234
308
  """Get an image record by path."""
235
309
  cursor = self._db.cursor()
236
- cursor.execute("SELECT id, path, metadata FROM images WHERE path = ?", (path,))
310
+ cursor.execute(
311
+ "SELECT id, path, date_obs, date, metadata FROM images WHERE path = ?",
312
+ (path,),
313
+ )
237
314
  row = cursor.fetchone()
238
315
 
239
316
  if row is None:
@@ -242,18 +319,32 @@ class Database:
242
319
  metadata = json.loads(row["metadata"])
243
320
  metadata["path"] = row["path"]
244
321
  metadata["id"] = row["id"]
322
+
323
+ # Add date fields back to metadata for compatibility
324
+ if row["date_obs"]:
325
+ metadata[self.DATE_OBS_KEY] = row["date_obs"]
326
+ if row["date"]:
327
+ metadata[self.DATE_KEY] = row["date"]
328
+
245
329
  return metadata
246
330
 
247
331
  def all_images(self) -> list[dict[str, Any]]:
248
332
  """Return all image records."""
249
333
  cursor = self._db.cursor()
250
- cursor.execute("SELECT id, path, metadata FROM images")
334
+ cursor.execute("SELECT id, path, date_obs, date, metadata FROM images")
251
335
 
252
336
  results = []
253
337
  for row in cursor.fetchall():
254
338
  metadata = json.loads(row["metadata"])
255
339
  metadata["path"] = row["path"]
256
340
  metadata["id"] = row["id"]
341
+
342
+ # Add date fields back to metadata for compatibility
343
+ if row["date_obs"]:
344
+ metadata[self.DATE_OBS_KEY] = row["date_obs"]
345
+ if row["date"]:
346
+ metadata[self.DATE_KEY] = row["date"]
347
+
257
348
  results.append(metadata)
258
349
 
259
350
  return results
@@ -263,7 +354,7 @@ class Database:
263
354
  cursor = self._db.cursor()
264
355
  cursor.execute(
265
356
  """
266
- SELECT id, start, end, filter, imagetyp, object,
357
+ SELECT id, start, end, filter, imagetyp, object, telescop,
267
358
  num_images, exptime_total, image_doc_id
268
359
  FROM sessions
269
360
  """
@@ -278,6 +369,7 @@ class Database:
278
369
  self.FILTER_KEY: row["filter"],
279
370
  self.IMAGETYP_KEY: row["imagetyp"],
280
371
  self.OBJECT_KEY: row["object"],
372
+ self.TELESCOP_KEY: row["telescop"],
281
373
  self.NUM_IMAGES_KEY: row["num_images"],
282
374
  self.EXPTIME_TOTAL_KEY: row["exptime_total"],
283
375
  self.IMAGE_DOC_KEY: row["image_doc_id"],
@@ -286,10 +378,47 @@ class Database:
286
378
 
287
379
  return results
288
380
 
381
+ def get_session_by_id(self, session_id: int) -> dict[str, Any] | None:
382
+ """Get a session record by its ID.
383
+
384
+ Args:
385
+ session_id: The database ID of the session
386
+
387
+ Returns:
388
+ Session record dictionary or None if not found
389
+ """
390
+ cursor = self._db.cursor()
391
+ cursor.execute(
392
+ """
393
+ SELECT id, start, end, filter, imagetyp, object, telescop,
394
+ num_images, exptime_total, image_doc_id
395
+ FROM sessions
396
+ WHERE id = ?
397
+ """,
398
+ (session_id,),
399
+ )
400
+
401
+ row = cursor.fetchone()
402
+ if row is None:
403
+ return None
404
+
405
+ return {
406
+ "id": row["id"],
407
+ self.START_KEY: row["start"],
408
+ self.END_KEY: row["end"],
409
+ self.FILTER_KEY: row["filter"],
410
+ self.IMAGETYP_KEY: row["imagetyp"],
411
+ self.OBJECT_KEY: row["object"],
412
+ self.TELESCOP_KEY: row["telescop"],
413
+ self.NUM_IMAGES_KEY: row["num_images"],
414
+ self.EXPTIME_TOTAL_KEY: row["exptime_total"],
415
+ self.IMAGE_DOC_KEY: row["image_doc_id"],
416
+ }
417
+
289
418
  def get_session(self, to_find: dict[str, str]) -> dict[str, Any] | None:
290
419
  """Find a session matching the given criteria.
291
420
 
292
- Searches for sessions with the same filter, image type, and target
421
+ Searches for sessions with the same filter, image type, target, and telescope
293
422
  whose start time is within +/- 8 hours of the provided date.
294
423
  """
295
424
  date = to_find.get(Database.START_KEY)
@@ -300,6 +429,7 @@ class Database:
300
429
  assert filter
301
430
  target = to_find.get(Database.OBJECT_KEY)
302
431
  assert target
432
+ telescop = to_find.get(Database.TELESCOP_KEY, "unspecified")
303
433
 
304
434
  # Convert the provided ISO8601 date string to a datetime, then
305
435
  # search for sessions with the same filter whose start time is
@@ -314,14 +444,14 @@ class Database:
314
444
  cursor = self._db.cursor()
315
445
  cursor.execute(
316
446
  """
317
- SELECT id, start, end, filter, imagetyp, object,
447
+ SELECT id, start, end, filter, imagetyp, object, telescop,
318
448
  num_images, exptime_total, image_doc_id
319
449
  FROM sessions
320
- WHERE filter = ? AND imagetyp = ? AND object = ?
450
+ WHERE filter = ? AND imagetyp = ? AND object = ? AND telescop = ?
321
451
  AND start >= ? AND start <= ?
322
452
  LIMIT 1
323
453
  """,
324
- (filter, image_type, target, start_min, start_max),
454
+ (filter, image_type, target, telescop, start_min, start_max),
325
455
  )
326
456
 
327
457
  row = cursor.fetchone()
@@ -335,6 +465,7 @@ class Database:
335
465
  self.FILTER_KEY: row["filter"],
336
466
  self.IMAGETYP_KEY: row["imagetyp"],
337
467
  self.OBJECT_KEY: row["object"],
468
+ self.TELESCOP_KEY: row["telescop"],
338
469
  self.NUM_IMAGES_KEY: row["num_images"],
339
470
  self.EXPTIME_TOTAL_KEY: row["exptime_total"],
340
471
  self.IMAGE_DOC_KEY: row["image_doc_id"],
@@ -376,8 +507,8 @@ class Database:
376
507
  cursor.execute(
377
508
  """
378
509
  INSERT INTO sessions
379
- (start, end, filter, imagetyp, object, num_images, exptime_total, image_doc_id)
380
- VALUES (?, ?, ?, ?, ?, ?, ?, ?)
510
+ (start, end, filter, imagetyp, object, telescop, num_images, exptime_total, image_doc_id)
511
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
381
512
  """,
382
513
  (
383
514
  new[Database.START_KEY],
@@ -385,6 +516,7 @@ class Database:
385
516
  new[Database.FILTER_KEY],
386
517
  new[Database.IMAGETYP_KEY],
387
518
  new[Database.OBJECT_KEY],
519
+ new.get(Database.TELESCOP_KEY, "unspecified"),
388
520
  new[Database.NUM_IMAGES_KEY],
389
521
  new[Database.EXPTIME_TOTAL_KEY],
390
522
  new.get(Database.IMAGE_DOC_KEY),
@@ -18,7 +18,7 @@ kind = "preferences"
18
18
 
19
19
  # Add our built-in recipes (FIXME, add a "resource" repo type for directories we expect to find inside
20
20
  # our python blob)
21
- dir = "/workspaces/starbash/doc/toml/example/recipe-repo"
21
+ url = "pkg://recipes"
22
22
 
23
23
  # [[repo-ref]]
24
24
 
@@ -39,7 +39,7 @@ dir = "/workspaces/starbash/doc/toml/example/recipe-repo"
39
39
  # or inband?
40
40
 
41
41
 
42
- # allow including multiple recipies FIXME old idea, not sure if needed.
42
+ # allow including multiple recipes FIXME old idea, not sure if needed.
43
43
  # [[repo-ref]]
44
44
 
45
45
  # looks for a file with this name and .py for the code and .toml for the config
@@ -47,35 +47,3 @@ dir = "/workspaces/starbash/doc/toml/example/recipe-repo"
47
47
  # location in the sequence as if they were defined here
48
48
  #by-file = "process-osc-dual-duo"
49
49
  #by-url = "http:..."
50
-
51
- [config]
52
-
53
- # What fits fields should we store in our DB cache
54
- fits-whitelist = [
55
- "INSTRUME",
56
- "FILTER",
57
- "TELESCOP",
58
- "IMAGETYP",
59
- "DATE-OBS",
60
- "DATE-LOC",
61
- "DATE",
62
- "EXPTIME", # Use use this instead of EXPOSURE because it seems like not all apps use EXPOSURE (Siril)
63
- "FWHEEL",
64
- "OBJECT",
65
- "RA", # we ignore the text version OBJCTRA / OBJCTDEC
66
- "DEC",
67
- "OBJCTROT",
68
- "FOCPOS",
69
- "SITELAT",
70
- "SITELON",
71
- "SITEELEV",
72
- "NAXIS1",
73
- "NAXIS2",
74
- "SWCREATE",
75
- "XBINNING",
76
- "YBINNING",
77
- "GAIN",
78
- "CCD-TEMP",
79
- "SET-TEMP",
80
- "AMBTEMP",
81
- ]
starbash/main.py CHANGED
@@ -1,14 +1,12 @@
1
1
  import logging
2
- from datetime import datetime
3
- from tomlkit import table
4
2
  import typer
5
- from rich.table import Table
3
+ from typing_extensions import Annotated
6
4
 
7
- from starbash.database import Database
8
5
  import starbash.url as url
6
+ import starbash
9
7
 
10
- from .app import Starbash
11
- from .commands import repo, user, selection
8
+ from .app import Starbash, get_user_config_path, setup_logging
9
+ from .commands import repo, select, user
12
10
  from . import console
13
11
 
14
12
  app = typer.Typer(
@@ -17,134 +15,34 @@ app = typer.Typer(
17
15
  )
18
16
  app.add_typer(user.app, name="user", help="Manage user settings.")
19
17
  app.add_typer(repo.app, name="repo", help="Manage Starbash repositories.")
20
- app.add_typer(
21
- selection.app, name="selection", help="Manage session and target selection."
22
- )
18
+ app.add_typer(select.app, name="select", help="Manage session and target selection.")
23
19
 
24
20
 
25
21
  @app.callback(invoke_without_command=True)
26
- def main_callback(ctx: typer.Context):
22
+ def main_callback(
23
+ ctx: typer.Context,
24
+ debug: Annotated[
25
+ bool,
26
+ typer.Option(
27
+ "--debug",
28
+ help="Enable debug logging output.",
29
+ ),
30
+ ] = False,
31
+ ):
27
32
  """Main callback for the Starbash application."""
33
+ # Set the log level based on --debug flag
34
+ if debug:
35
+ starbash.log_filter_level = logging.DEBUG
36
+
28
37
  if ctx.invoked_subcommand is None:
29
- # No command provided, show help
30
- console.print(ctx.get_help())
38
+ if not get_user_config_path().exists():
39
+ with Starbash("app.first") as sb:
40
+ user.do_reinit(sb)
41
+ else:
42
+ # No command provided, show help
43
+ console.print(ctx.get_help())
31
44
  raise typer.Exit()
32
45
 
33
46
 
34
- def format_duration(seconds: int):
35
- """Format seconds as a human-readable duration string."""
36
- if seconds < 60:
37
- return f"{int(seconds)}s"
38
- elif seconds < 120:
39
- minutes = int(seconds // 60)
40
- secs = int(seconds % 60)
41
- return f"{minutes}m {secs}s" if secs else f"{minutes}m"
42
- else:
43
- hours = int(seconds // 3600)
44
- minutes = int((seconds % 3600) // 60)
45
- return f"{hours}h {minutes}m" if minutes else f"{hours}h"
46
-
47
-
48
- @app.command()
49
- def session():
50
- """List sessions (filtered based on the current selection)"""
51
-
52
- with Starbash("session") as sb:
53
- sessions = sb.search_session()
54
- if sessions and isinstance(sessions, list):
55
- len_all = sb.db.len_session()
56
- table = Table(title=f"Sessions ({len(sessions)} selected out of {len_all})")
57
-
58
- table.add_column("Date", style="cyan", no_wrap=True)
59
- table.add_column("# images", style="cyan", no_wrap=True)
60
- table.add_column("Time", style="cyan", no_wrap=True)
61
- table.add_column("Type/Filter", style="cyan", no_wrap=True)
62
- table.add_column(
63
- "About", style="cyan", no_wrap=True
64
- ) # type of frames, filter, target
65
- # table.add_column("Released", justify="right", style="cyan", no_wrap=True)
66
-
67
- total_images = 0
68
- total_seconds = 0.0
69
-
70
- for sess in sessions:
71
- date_iso = sess.get(Database.START_KEY, "N/A")
72
- # Try to cnvert ISO UTC datetime to local short date string
73
- try:
74
- dt_utc = datetime.fromisoformat(date_iso)
75
- dt_local = dt_utc.astimezone()
76
- date = dt_local.strftime("%Y-%m-%d")
77
- except (ValueError, TypeError):
78
- date = date_iso
79
-
80
- object = str(sess.get(Database.OBJECT_KEY, "N/A"))
81
- filter = sess.get(Database.FILTER_KEY, "N/A")
82
- image_type = str(sess.get(Database.IMAGETYP_KEY, "N/A"))
83
-
84
- # Format total exposure time as integer seconds
85
- exptime_raw = str(sess.get(Database.EXPTIME_TOTAL_KEY, "N/A"))
86
- try:
87
- exptime_float = float(exptime_raw)
88
- total_seconds += exptime_float
89
- total_secs = format_duration(int(exptime_float))
90
- except (ValueError, TypeError):
91
- total_secs = exptime_raw
92
-
93
- # Count images
94
- try:
95
- num_images = int(sess.get(Database.NUM_IMAGES_KEY, 0))
96
- total_images += num_images
97
- except (ValueError, TypeError):
98
- num_images = sess.get(Database.NUM_IMAGES_KEY, "N/A")
99
-
100
- type_str = image_type
101
- if image_type.upper() == "LIGHT":
102
- image_type = filter
103
- if image_type.upper() == "FLAT":
104
- image_type = f"{image_type}/{filter}"
105
-
106
- table.add_row(
107
- date,
108
- str(num_images),
109
- total_secs,
110
- image_type,
111
- object,
112
- )
113
-
114
- # Add totals row
115
- if sessions:
116
- table.add_row(
117
- "",
118
- f"[bold]{total_images}[/bold]",
119
- f"[bold]{format_duration(int(total_seconds))}[/bold]",
120
- "",
121
- "",
122
- )
123
-
124
- console.print(table)
125
-
126
-
127
- # @app.command(hidden=True)
128
- # def default_cmd():
129
- # """Default entry point for the starbash application."""
130
- #
131
- # with Starbash() as sb:
132
-
133
-
134
- # @app.command(hidden=True)
135
- # def default_cmd():
136
- # """Default entry point for the starbash application."""
137
- #
138
- # with Starbash() as sb:
139
- # pass
140
- #
141
- #
142
- # @app.callback(invoke_without_command=True)
143
- # def _default(ctx: typer.Context):
144
- # # If the user didn’t specify a subcommand, run the default
145
- # if ctx.invoked_subcommand is None:
146
- # return default_cmd()
147
-
148
-
149
47
  if __name__ == "__main__":
150
48
  app()
@@ -0,0 +1,3 @@
1
+ This is what a typical directory of recipes would look like. it could be hosted locally in a directory tree, on github, whatever.
2
+
3
+ Currently it lives in the starbash python blob, but eventually the 'master' set of recipes will live in a different repo. In fact, different orgs could provide their own recipe repos.
File without changes
@@ -0,0 +1,55 @@
1
+
2
+ [repo]
3
+ kind = "recipe"
4
+
5
+
6
+ [recipe]
7
+ author.name = "FIXMESiril?"
8
+ author.email = "FIXMESiril?"
9
+
10
+ [[stage]]
11
+
12
+ description = "Generate master bias"
13
+ disabled = true # FIXME, debugging later stuff
14
+
15
+ # Restrict processing of this stage to only if detected hardware was found for this session
16
+ # For any camera
17
+ auto.for-camera = []
18
+
19
+ tool = "siril"
20
+
21
+ # or auto?
22
+ # find the most recent raw fits for the current instrument (as of the time of session start)
23
+ # input.source = "most-recent" # only look for the most recent set of raws for this particular type
24
+ input.type = "bias" # look in all raw repos, but look only for bias files
25
+
26
+ # for early development we have support for simple absolute file paths with globs
27
+ input.source = "path"
28
+ input.path = "/workspaces/starbash/images/from_astroboy/masters-raw/2025-09-09/BIAS/*.fit*"
29
+ input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
30
+
31
+ # make the following also work
32
+ #
33
+ #os.makedirs(os.path.dirname(output), exist_ok=True)
34
+ #os.makedirs(os.path.dirname(process_dir), exist_ok=True)
35
+ #frames = glob(f"{masters_raw}/{date}/BIAS/{date}_*.fit*")
36
+ #siril_run_in_temp_dir(frames, ...
37
+ when = "session-config" # run at the start of each session process
38
+
39
+ # The following constants are auto defined before running the tool
40
+ # context.process_dir (points to the session specific semi-persistent local dir for that sessions written/read data files)
41
+ # context.masters (FIXME) - need to find this name dynamically by looking for a suitable writable repo
42
+ # context.temp_dir (points to a temporary directory this tool can use for writing)
43
+
44
+ # Everything in the constants dict will be predefined as named variables for use by the script
45
+ context.date = "2025-09-09" # FIXME - later find auto latest date with bias frames
46
+ context.output = "{masters}/biases/{date}_stacked.fits" # if the output already exists processing will be skipped
47
+
48
+ script = '''
49
+ # Convert Bias Frames to .fit files
50
+ link bias -out={process_dir}
51
+ cd {process_dir}
52
+
53
+ # Stack Bias Frames to bias_stacked.fit
54
+ stack bias rej 3 3 -nonorm -out={output}
55
+ '''