starbash 0.1.8__py3-none-any.whl → 0.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
starbash/database.py CHANGED
@@ -5,9 +5,13 @@ from pathlib import Path
5
5
  from typing import Any, Optional
6
6
  from datetime import datetime, timedelta
7
7
  import json
8
+ from typing import TypeAlias
8
9
 
9
10
  from .paths import get_user_data_dir
10
11
 
12
+ SessionRow: TypeAlias = dict[str, Any]
13
+ ImageRow: TypeAlias = dict[str, Any]
14
+
11
15
 
12
16
  def get_column_name(k: str) -> str:
13
17
  """Convert keynames to SQL legal column names"""
@@ -21,10 +25,35 @@ class Database:
21
25
  """SQLite-backed application database.
22
26
 
23
27
  Stores data under the OS-specific user data directory using platformdirs.
28
+
29
+ Tables:
30
+ #1: repos
31
+ A table with one row per repository. Contains only 'id' (primary key) and 'url' (unique).
32
+ The URL identifies the repository root (e.g., 'file:///path/to/repo').
33
+
34
+ #2: images
24
35
  Provides an `images` table for FITS metadata and basic helpers.
25
36
 
26
37
  The images table stores DATE-OBS and DATE as indexed SQL columns for
27
38
  efficient date-based queries, while other FITS metadata is stored in JSON.
39
+
40
+ The 'path' column contains a path **relative** to the repository root.
41
+ Each image belongs to exactly one repo, linked via the repo_id foreign key.
42
+ The combination of (repo_id, path) is unique.
43
+
44
+ Image retrieval methods (get_image, search_image, all_images) join with the repos
45
+ table to include repo_url in results, allowing callers to reconstruct absolute paths.
46
+
47
+ #3: sessions
48
+ The sessions table has one row per observing session, summarizing key info.
49
+ Sessions are identified by filter, image type, target, telescope, etc, and start/end times.
50
+ They correspond to groups of images taken together during an observing run (e.g.
51
+ session start/end describes the range of images DATE-OBS).
52
+
53
+ Each session also has an image_doc_id field which will point to a representative
54
+ image in the images table. Eventually we'll use joins to add extra info from images to
55
+ the exposed 'session' row.
56
+
28
57
  """
29
58
 
30
59
  EXPTIME_KEY = "EXPTIME"
@@ -35,13 +64,15 @@ class Database:
35
64
  EXPTIME_TOTAL_KEY = "exptime-total"
36
65
  DATE_OBS_KEY = "DATE-OBS"
37
66
  DATE_KEY = "DATE"
38
- IMAGE_DOC_KEY = "image-doc"
67
+ IMAGE_DOC_KEY = "image-doc-id"
39
68
  IMAGETYP_KEY = "IMAGETYP"
40
69
  OBJECT_KEY = "OBJECT"
41
70
  TELESCOP_KEY = "TELESCOP"
71
+ ID_KEY = "id" # for finding any row by its ID
42
72
 
43
73
  SESSIONS_TABLE = "sessions"
44
74
  IMAGES_TABLE = "images"
75
+ REPOS_TABLE = "repos"
45
76
 
46
77
  def __init__(
47
78
  self,
@@ -64,18 +95,38 @@ class Database:
64
95
  self._init_tables()
65
96
 
66
97
  def _init_tables(self) -> None:
67
- """Create the images and sessions tables if they don't exist."""
98
+ """Create the repos, images and sessions tables if they don't exist."""
68
99
  cursor = self._db.cursor()
69
100
 
101
+ # Create repos table
102
+ cursor.execute(
103
+ f"""
104
+ CREATE TABLE IF NOT EXISTS {self.REPOS_TABLE} (
105
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
106
+ url TEXT UNIQUE NOT NULL
107
+ )
108
+ """
109
+ )
110
+
111
+ # Create index on url for faster lookups
112
+ cursor.execute(
113
+ f"""
114
+ CREATE INDEX IF NOT EXISTS idx_repos_url ON {self.REPOS_TABLE}(url)
115
+ """
116
+ )
117
+
70
118
  # Create images table with DATE-OBS and DATE as indexed columns
71
119
  cursor.execute(
72
120
  f"""
73
121
  CREATE TABLE IF NOT EXISTS {self.IMAGES_TABLE} (
74
122
  id INTEGER PRIMARY KEY AUTOINCREMENT,
75
- path TEXT UNIQUE NOT NULL,
123
+ repo_id INTEGER NOT NULL,
124
+ path TEXT NOT NULL,
76
125
  date_obs TEXT,
77
126
  date TEXT,
78
- metadata TEXT NOT NULL
127
+ metadata TEXT NOT NULL,
128
+ FOREIGN KEY (repo_id) REFERENCES {self.REPOS_TABLE}(id),
129
+ UNIQUE(repo_id, path)
79
130
  )
80
131
  """
81
132
  )
@@ -114,7 +165,8 @@ class Database:
114
165
  telescop TEXT NOT NULL,
115
166
  num_images INTEGER NOT NULL,
116
167
  exptime_total REAL NOT NULL,
117
- image_doc_id INTEGER
168
+ image_doc_id INTEGER,
169
+ FOREIGN KEY (image_doc_id) REFERENCES {self.IMAGES_TABLE}(id)
118
170
  )
119
171
  """
120
172
  )
@@ -129,18 +181,125 @@ class Database:
129
181
 
130
182
  self._db.commit()
131
183
 
184
+ # --- Convenience helpers for common repo operations ---
185
+ def remove_repo(self, url: str) -> None:
186
+ """Remove a repo record by URL.
187
+
188
+ This will cascade delete all images belonging to this repo, and all sessions
189
+ that reference those images.
190
+
191
+ Args:
192
+ url: The repository URL (e.g., 'file:///path/to/repo')
193
+ """
194
+ cursor = self._db.cursor()
195
+
196
+ # First get the repo_id
197
+ repo_id = self.get_repo_id(url)
198
+ if repo_id is None:
199
+ return # Repo doesn't exist, nothing to delete
200
+
201
+ # Delete sessions that reference images from this repo
202
+ # This deletes sessions where image_doc_id points to any image in this repo
203
+ cursor.execute(
204
+ f"""
205
+ DELETE FROM {self.SESSIONS_TABLE}
206
+ WHERE image_doc_id IN (
207
+ SELECT id FROM {self.IMAGES_TABLE} WHERE repo_id = ?
208
+ )
209
+ """,
210
+ (repo_id,),
211
+ )
212
+
213
+ # Delete all images from this repo
214
+ cursor.execute(
215
+ f"DELETE FROM {self.IMAGES_TABLE} WHERE repo_id = ?",
216
+ (repo_id,),
217
+ )
218
+
219
+ # Finally delete the repo itself
220
+ cursor.execute(f"DELETE FROM {self.REPOS_TABLE} WHERE id = ?", (repo_id,))
221
+
222
+ self._db.commit()
223
+
224
+ def upsert_repo(self, url: str) -> int:
225
+ """Insert or update a repo record by unique URL.
226
+
227
+ Args:
228
+ url: The repository URL (e.g., 'file:///path/to/repo')
229
+
230
+ Returns:
231
+ The rowid of the inserted/updated record.
232
+ """
233
+ cursor = self._db.cursor()
234
+ cursor.execute(
235
+ f"""
236
+ INSERT INTO {self.REPOS_TABLE} (url) VALUES (?)
237
+ ON CONFLICT(url) DO NOTHING
238
+ """,
239
+ (url,),
240
+ )
241
+
242
+ self._db.commit()
243
+
244
+ # Get the rowid of the inserted/existing record
245
+ cursor.execute(f"SELECT id FROM {self.REPOS_TABLE} WHERE url = ?", (url,))
246
+ result = cursor.fetchone()
247
+ if result:
248
+ return result[0]
249
+ return cursor.lastrowid if cursor.lastrowid is not None else 0
250
+
251
+ def get_repo_id(self, url: str) -> int | None:
252
+ """Get the repo_id for a given URL.
253
+
254
+ Args:
255
+ url: The repository URL
256
+
257
+ Returns:
258
+ The repo_id if found, None otherwise
259
+ """
260
+ cursor = self._db.cursor()
261
+ cursor.execute(f"SELECT id FROM {self.REPOS_TABLE} WHERE url = ?", (url,))
262
+ result = cursor.fetchone()
263
+ return result[0] if result else None
264
+
265
+ def get_repo_url(self, repo_id: int) -> str | None:
266
+ """Get the URL for a given repo_id.
267
+
268
+ Args:
269
+ repo_id: The repository ID
270
+
271
+ Returns:
272
+ The URL if found, None otherwise
273
+ """
274
+ cursor = self._db.cursor()
275
+ cursor.execute(f"SELECT url FROM {self.REPOS_TABLE} WHERE id = ?", (repo_id,))
276
+ result = cursor.fetchone()
277
+ return result[0] if result else None
278
+
132
279
  # --- Convenience helpers for common image operations ---
133
- def upsert_image(self, record: dict[str, Any]) -> int:
280
+ def upsert_image(self, record: dict[str, Any], repo_url: str) -> int:
134
281
  """Insert or update an image record by unique path.
135
282
 
136
- The record must include a 'path' key; other keys are arbitrary FITS metadata.
283
+ The record must include a 'path' key (relative to repo); other keys are arbitrary FITS metadata.
284
+ The path is stored as-is - caller is responsible for making it relative to the repo.
137
285
  DATE-OBS and DATE are extracted and stored as indexed columns for efficient queries.
138
- Returns the rowid of the inserted/updated record.
286
+
287
+ Args:
288
+ record: Dictionary containing image metadata including 'path' (relative to repo)
289
+ repo_url: The repository URL this image belongs to
290
+
291
+ Returns:
292
+ The rowid of the inserted/updated record.
139
293
  """
140
294
  path = record.get("path")
141
295
  if not path:
142
296
  raise ValueError("record must include 'path'")
143
297
 
298
+ # Get or create the repo_id for this URL
299
+ repo_id = self.get_repo_id(repo_url)
300
+ if repo_id is None:
301
+ repo_id = self.upsert_repo(repo_url)
302
+
144
303
  # Extract date fields for column storage
145
304
  date_obs = record.get(self.DATE_OBS_KEY)
146
305
  date = record.get(self.DATE_KEY)
@@ -152,25 +311,28 @@ class Database:
152
311
  cursor = self._db.cursor()
153
312
  cursor.execute(
154
313
  f"""
155
- INSERT INTO {self.IMAGES_TABLE} (path, date_obs, date, metadata) VALUES (?, ?, ?, ?)
156
- ON CONFLICT(path) DO UPDATE SET
314
+ INSERT INTO {self.IMAGES_TABLE} (repo_id, path, date_obs, date, metadata) VALUES (?, ?, ?, ?, ?)
315
+ ON CONFLICT(repo_id, path) DO UPDATE SET
157
316
  date_obs = excluded.date_obs,
158
317
  date = excluded.date,
159
318
  metadata = excluded.metadata
160
319
  """,
161
- (path, date_obs, date, metadata_json),
320
+ (repo_id, str(path), date_obs, date, metadata_json),
162
321
  )
163
322
 
164
323
  self._db.commit()
165
324
 
166
325
  # Get the rowid of the inserted/updated record
167
- cursor.execute(f"SELECT id FROM {self.IMAGES_TABLE} WHERE path = ?", (path,))
326
+ cursor.execute(
327
+ f"SELECT id FROM {self.IMAGES_TABLE} WHERE repo_id = ? AND path = ?",
328
+ (repo_id, str(path)),
329
+ )
168
330
  result = cursor.fetchone()
169
331
  if result:
170
332
  return result[0]
171
333
  return cursor.lastrowid if cursor.lastrowid is not None else 0
172
334
 
173
- def search_image(self, conditions: dict[str, Any]) -> list[dict[str, Any]] | None:
335
+ def search_image(self, conditions: dict[str, Any]) -> list[SessionRow]:
174
336
  """Search for images matching the given conditions.
175
337
 
176
338
  Args:
@@ -180,7 +342,7 @@ class Database:
180
342
  - 'date_end': Filter images with DATE-OBS <= this date
181
343
 
182
344
  Returns:
183
- List of matching image records or None if no matches
345
+ List of matching image records with relative path, repo_id, and repo_url
184
346
  """
185
347
  # Extract special date filter keys (make a copy to avoid modifying caller's dict)
186
348
  conditions_copy = dict(conditions)
@@ -192,15 +354,19 @@ class Database:
192
354
  params = []
193
355
 
194
356
  if date_start:
195
- where_clauses.append("date_obs >= ?")
357
+ where_clauses.append("i.date_obs >= ?")
196
358
  params.append(date_start)
197
359
 
198
360
  if date_end:
199
- where_clauses.append("date_obs <= ?")
361
+ where_clauses.append("i.date_obs <= ?")
200
362
  params.append(date_end)
201
363
 
202
- # Build the query
203
- query = f"SELECT id, path, date_obs, date, metadata FROM {self.IMAGES_TABLE}"
364
+ # Build the query with JOIN to repos table
365
+ query = f"""
366
+ SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.metadata, r.url as repo_url
367
+ FROM {self.IMAGES_TABLE} i
368
+ JOIN {self.REPOS_TABLE} r ON i.repo_id = r.id
369
+ """
204
370
  if where_clauses:
205
371
  query += " WHERE " + " AND ".join(where_clauses)
206
372
 
@@ -210,7 +376,10 @@ class Database:
210
376
  results = []
211
377
  for row in cursor.fetchall():
212
378
  metadata = json.loads(row["metadata"])
379
+ # Store the relative path, repo_id, and repo_url for caller
213
380
  metadata["path"] = row["path"]
381
+ metadata["repo_id"] = row["repo_id"]
382
+ metadata["repo_url"] = row["repo_url"]
214
383
  metadata["id"] = row["id"]
215
384
 
216
385
  # Add date fields back to metadata for compatibility
@@ -225,58 +394,11 @@ class Database:
225
394
  if match:
226
395
  results.append(metadata)
227
396
 
228
- return results if results else None
229
-
230
- def where_session(self, conditions: dict[str, Any] | None) -> tuple[str, list[Any]]:
231
- """Search for sessions matching the given conditions.
232
-
233
- Args:
234
- conditions: Dictionary of session key-value pairs to match, or None for all.
235
- Special keys:
236
- - 'date_start': Filter sessions starting on or after this date
237
- - 'date_end': Filter sessions starting on or before this date
238
-
239
- Returns:
240
- Tuple of (WHERE clause string, list of parameters)
241
- """
242
- if conditions is None:
243
- conditions = {}
244
-
245
- # Build WHERE clause dynamically based on conditions
246
- where_clauses = []
247
- params = []
248
-
249
- # Extract date range conditions
250
- date_start = conditions.get("date_start")
251
- date_end = conditions.get("date_end")
252
-
253
- # Add date range filters to WHERE clause
254
- if date_start:
255
- where_clauses.append("start >= ?")
256
- params.append(date_start)
257
-
258
- if date_end:
259
- where_clauses.append("start <= ?")
260
- params.append(date_end)
261
-
262
- # Add standard conditions to WHERE clause
263
- for key, value in conditions.items():
264
- if key not in ("date_start", "date_end") and value is not None:
265
- column_name = key
266
- where_clauses.append(f"{column_name} = ?")
267
- params.append(value)
268
-
269
- # Build the query
270
- query = ""
271
-
272
- if where_clauses:
273
- query += " WHERE " + " AND ".join(where_clauses)
274
-
275
- return (query, params)
397
+ return results
276
398
 
277
399
  def search_session(
278
- self, conditions: dict[str, Any] | None = None
279
- ) -> list[dict[str, Any]]:
400
+ self, where_tuple: tuple[str, list[Any]] = ("", [])
401
+ ) -> list[SessionRow]:
280
402
  """Search for sessions matching the given conditions.
281
403
 
282
404
  Args:
@@ -286,26 +408,31 @@ class Database:
286
408
  - 'date_end': Filter sessions starting on or before this date
287
409
 
288
410
  Returns:
289
- List of matching session records or None
411
+ List of matching session records with metadata from the reference image
290
412
  """
291
- if conditions is None:
292
- conditions = {}
293
-
294
413
  # Build WHERE clause dynamically based on conditions
295
- where_clause, params = self.where_session(conditions)
414
+ where_clause, params = where_tuple
296
415
 
297
- # Build the query
416
+ # Build the query with JOIN to images table to get reference image metadata
298
417
  query = f"""
299
- SELECT id, start, end, filter, imagetyp, object, telescop,
300
- num_images, exptime_total, image_doc_id
301
- FROM {self.SESSIONS_TABLE}
418
+ SELECT s.id, s.start, s.end, s.filter, s.imagetyp, s.object, s.telescop,
419
+ s.num_images, s.exptime_total, s.image_doc_id, i.metadata
420
+ FROM {self.SESSIONS_TABLE} s
421
+ LEFT JOIN {self.IMAGES_TABLE} i ON s.image_doc_id = i.id
302
422
  {where_clause}
303
423
  """
304
424
 
305
425
  cursor = self._db.cursor()
306
426
  cursor.execute(query, params)
307
427
 
308
- results = [dict(row) for row in cursor.fetchall()]
428
+ results = []
429
+ for row in cursor.fetchall():
430
+ session_dict = dict(row)
431
+ # Parse the metadata JSON if it exists
432
+ if session_dict.get("metadata"):
433
+ session_dict["metadata"] = json.loads(session_dict["metadata"])
434
+ results.append(session_dict)
435
+
309
436
  return results
310
437
 
311
438
  def len_table(self, table_name: str) -> int:
@@ -333,20 +460,35 @@ class Database:
333
460
  result = cursor.fetchone()
334
461
  return result[0] if result and result[0] is not None else 0
335
462
 
336
- def get_image(self, path: str) -> dict[str, Any] | None:
337
- """Get an image record by path."""
463
+ def get_image(self, repo_url: str, path: str) -> ImageRow | None:
464
+ """Get an image record by repo_url and relative path.
465
+
466
+ Args:
467
+ repo_url: The repository URL
468
+ path: Path relative to the repository root
469
+
470
+ Returns:
471
+ Image record with relative path, repo_id, and repo_url, or None if not found
472
+ """
338
473
  cursor = self._db.cursor()
339
474
  cursor.execute(
340
- f"SELECT id, path, date_obs, date, metadata FROM {self.IMAGES_TABLE} WHERE path = ?",
341
- (path,),
475
+ f"""
476
+ SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.metadata, r.url as repo_url
477
+ FROM {self.IMAGES_TABLE} i
478
+ JOIN {self.REPOS_TABLE} r ON i.repo_id = r.id
479
+ WHERE r.url = ? AND i.path = ?
480
+ """,
481
+ (repo_url, path),
342
482
  )
343
- row = cursor.fetchone()
344
483
 
484
+ row = cursor.fetchone()
345
485
  if row is None:
346
486
  return None
347
487
 
348
488
  metadata = json.loads(row["metadata"])
349
489
  metadata["path"] = row["path"]
490
+ metadata["repo_id"] = row["repo_id"]
491
+ metadata["repo_url"] = row["repo_url"]
350
492
  metadata["id"] = row["id"]
351
493
 
352
494
  # Add date fields back to metadata for compatibility
@@ -357,17 +499,24 @@ class Database:
357
499
 
358
500
  return metadata
359
501
 
360
- def all_images(self) -> list[dict[str, Any]]:
361
- """Return all image records."""
502
+ def all_images(self) -> list[ImageRow]:
503
+ """Return all image records with relative paths, repo_id, and repo_url."""
362
504
  cursor = self._db.cursor()
363
505
  cursor.execute(
364
- f"SELECT id, path, date_obs, date, metadata FROM {self.IMAGES_TABLE}"
506
+ f"""
507
+ SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.metadata, r.url as repo_url
508
+ FROM {self.IMAGES_TABLE} i
509
+ JOIN {self.REPOS_TABLE} r ON i.repo_id = r.id
510
+ """
365
511
  )
366
512
 
367
513
  results = []
368
514
  for row in cursor.fetchall():
369
515
  metadata = json.loads(row["metadata"])
516
+ # Return relative path, repo_id, and repo_url for caller
370
517
  metadata["path"] = row["path"]
518
+ metadata["repo_id"] = row["repo_id"]
519
+ metadata["repo_url"] = row["repo_url"]
371
520
  metadata["id"] = row["id"]
372
521
 
373
522
  # Add date fields back to metadata for compatibility
@@ -406,7 +555,7 @@ class Database:
406
555
 
407
556
  return dict(row)
408
557
 
409
- def get_session(self, to_find: dict[str, str]) -> dict[str, Any] | None:
558
+ def get_session(self, to_find: dict[str, str]) -> SessionRow | None:
410
559
  """Find a session matching the given criteria.
411
560
 
412
561
  Searches for sessions with the same filter, image type, target, and telescope
@@ -452,7 +601,7 @@ class Database:
452
601
  return dict(row)
453
602
 
454
603
  def upsert_session(
455
- self, new: dict[str, Any], existing: dict[str, Any] | None = None
604
+ self, new: SessionRow, existing: SessionRow | None = None
456
605
  ) -> None:
457
606
  """Insert or update a session record."""
458
607
  cursor = self._db.cursor()
@@ -3,6 +3,23 @@
3
3
  [repo]
4
4
  kind = "preferences"
5
5
 
6
+ [aliases]
7
+ # aliases can be used to map non standard (or non english) frame names to standard terms
8
+ # This is also used to map filters based on common misspellings or variations.
9
+ # We assume the first listed option in the list is the 'canonical' name used for printing etc...
10
+
11
+ # frame types
12
+ dark = ["dark", "darks"]
13
+ flat = ["flat", "flats"]
14
+ bias = ["bias", "biases"]
15
+
16
+ # file suffixes
17
+ fit = ["fits", "fit"]
18
+
19
+ # filter names
20
+ SiiOiii = ["SiiOiii", "SII-OIII", "S2-O3"]
21
+ HaOiii = ["HaOiii", "HA-OIII", "Halpha-O3"]
22
+
6
23
  # FIXME, somewhere here list default patterns which can be used to identify NINA, ASIAIR, SEESTAR
7
24
  # raw repo layouts
8
25
 
starbash/main.py CHANGED
@@ -6,7 +6,7 @@ import starbash.url as url
6
6
  import starbash
7
7
 
8
8
  from .app import Starbash, get_user_config_path, setup_logging
9
- from .commands import info, repo, select, user
9
+ from .commands import info, process, repo, select, user
10
10
  from . import console
11
11
 
12
12
  app = typer.Typer(
@@ -17,6 +17,9 @@ app.add_typer(user.app, name="user", help="Manage user settings.")
17
17
  app.add_typer(repo.app, name="repo", help="Manage Starbash repositories.")
18
18
  app.add_typer(select.app, name="select", help="Manage session and target selection.")
19
19
  app.add_typer(info.app, name="info", help="Display system and data information.")
20
+ app.add_typer(
21
+ process.app, name="process", help="Process images using automated workflows."
22
+ )
20
23
 
21
24
 
22
25
  @app.callback(invoke_without_command=True)
@@ -10,7 +10,7 @@ author.email = "FIXMESiril?"
10
10
  [[stage]]
11
11
 
12
12
  description = "Generate master bias"
13
- disabled = true # FIXME, debugging later stuff
13
+ disabled = false # FIXME, debugging later stuff
14
14
 
15
15
  # Restrict processing of this stage to only if detected hardware was found for this session
16
16
  # For any camera
@@ -23,9 +23,9 @@ tool = "siril"
23
23
  # input.source = "most-recent" # only look for the most recent set of raws for this particular type
24
24
  input.type = "bias" # look in all raw repos, but look only for bias files
25
25
 
26
- # for early development we have support for simple absolute file paths with globs
27
- input.source = "path"
28
- input.path = "/workspaces/starbash/images/from_astroboy/masters-raw/2025-09-09/BIAS/*.fit*"
26
+ # Look for files in input repos, finding them by using the "relative" tag they contain
27
+ input.source = "repo"
28
+ # input.path = ".../from_astroboy/masters-raw/2025-09-09/BIAS/*.fit*"
29
29
  input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
30
30
 
31
31
  # make the following also work
@@ -34,7 +34,17 @@ input.required = true # Is at least one input file required? If true, we will s
34
34
  #os.makedirs(os.path.dirname(process_dir), exist_ok=True)
35
35
  #frames = glob(f"{masters_raw}/{date}/BIAS/{date}_*.fit*")
36
36
  #siril_run_in_temp_dir(frames, ...
37
- when = "session-config" # run at the start of each session process
37
+ when = "setup.masters" # run when master biases are regenerated
38
+
39
+ # Based on the following definitions in the stage toml file...
40
+ output.dest = "repo" # write to a particular repo
41
+ output.type = "master" # write output to the special masters repo
42
+
43
+ # the following fields will be auto populated in the context before entry:
44
+ # context.output.base_path - the full filepath to write the output file to **excluding the suffix**
45
+ # context.output.full_path - the full filepath to write the output file to (including suffix)
46
+ # (NOT implemented / needed) context.output.root_path - points to the base of the destination repo
47
+ # (NOT implemented / needed) context.output.suffix - the suffix to append to the output file (e.g. .fits or .fit.gz)
38
48
 
39
49
  # The following constants are auto defined before running the tool
40
50
  # context.process_dir (points to the session specific semi-persistent local dir for that sessions written/read data files)
@@ -42,8 +52,15 @@ when = "session-config" # run at the start of each session process
42
52
  # context.temp_dir (points to a temporary directory this tool can use for writing)
43
53
 
44
54
  # Everything in the constants dict will be predefined as named variables for use by the script
45
- context.date = "2025-09-09" # FIXME - later find auto latest date with bias frames
46
- context.output = "{masters}/biases/{date}_stacked.fits" # if the output already exists processing will be skipped
55
+ # context.date = "2025-09-0.9" # FIXME - later find auto latest date with bias frames
56
+
57
+ # output file will be place in the masters repo
58
+ # if the output already exists processing will be skipped
59
+ #
60
+ # with a path like "{instrument}/{date}/{imagetyp}/{sessionconfig}.fits"
61
+ # this path comes from master_repo.relative
62
+ # context.output = "{output.root}/{instrument}/{date}/{imagetyp}/{sessionconfig}.fits"
63
+
47
64
 
48
65
  script = '''
49
66
  # Convert Bias Frames to .fit files
@@ -51,5 +68,5 @@ script = '''
51
68
  cd {process_dir}
52
69
 
53
70
  # Stack Bias Frames to bias_stacked.fit
54
- stack bias rej 3 3 -nonorm -out={output}
71
+ stack bias rej 3 3 -nonorm -out={output["base_path"]}
55
72
  '''
@@ -21,6 +21,11 @@ dir = "osc_single_duo"
21
21
 
22
22
  # processing stages, currently all declared here, but possibly in the future they could be added by user/other toml files
23
23
 
24
+ # Not included in standard list - for now we run manually
25
+ #[[stages]]
26
+ #name = "setup-masters" # for flat processing, master generation etc
27
+ #priority = 5
28
+
24
29
  [[stages]]
25
30
  name = "session-config" # for flat processing, master generation etc
26
31
  priority = 10