starbash 0.1.9__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
starbash/database.py CHANGED
@@ -2,17 +2,32 @@ from __future__ import annotations
2
2
 
3
3
  import sqlite3
4
4
  from pathlib import Path
5
- from typing import Any, Optional
5
+ from typing import Any, Optional, NamedTuple
6
6
  from datetime import datetime, timedelta
7
7
  import json
8
8
  from typing import TypeAlias
9
9
 
10
10
  from .paths import get_user_data_dir
11
+ from .aliases import normalize_target_name
11
12
 
12
13
  SessionRow: TypeAlias = dict[str, Any]
13
14
  ImageRow: TypeAlias = dict[str, Any]
14
15
 
15
16
 
17
+ class SearchCondition(NamedTuple):
18
+ """A search condition for database queries.
19
+
20
+ Args:
21
+ column_name: The column name to filter on (e.g., 'i.date_obs', 'r.url')
22
+ comparison_op: The comparison operator (e.g., '=', '>=', '<=', 'LIKE')
23
+ value: The value to compare against
24
+ """
25
+
26
+ column_name: str
27
+ comparison_op: str
28
+ value: Any
29
+
30
+
16
31
  def get_column_name(k: str) -> str:
17
32
  """Convert keynames to SQL legal column names"""
18
33
  k = k.lower()
@@ -34,8 +49,8 @@ class Database:
34
49
  #2: images
35
50
  Provides an `images` table for FITS metadata and basic helpers.
36
51
 
37
- The images table stores DATE-OBS and DATE as indexed SQL columns for
38
- efficient date-based queries, while other FITS metadata is stored in JSON.
52
+ The images table stores DATE-OBS, DATE, and IMAGETYP as indexed SQL columns for
53
+ efficient date-based and type-based queries, while other FITS metadata is stored in JSON.
39
54
 
40
55
  The 'path' column contains a path **relative** to the repository root.
41
56
  Each image belongs to exactly one repo, linked via the repo_id foreign key.
@@ -68,7 +83,9 @@ class Database:
68
83
  IMAGETYP_KEY = "IMAGETYP"
69
84
  OBJECT_KEY = "OBJECT"
70
85
  TELESCOP_KEY = "TELESCOP"
86
+ EXPTIME_KEY = "EXPTIME"
71
87
  ID_KEY = "id" # for finding any row by its ID
88
+ REPO_URL_KEY = "repo_url"
72
89
 
73
90
  SESSIONS_TABLE = "sessions"
74
91
  IMAGES_TABLE = "images"
@@ -115,7 +132,7 @@ class Database:
115
132
  """
116
133
  )
117
134
 
118
- # Create images table with DATE-OBS and DATE as indexed columns
135
+ # Create images table with DATE-OBS, DATE, and IMAGETYP as indexed columns
119
136
  cursor.execute(
120
137
  f"""
121
138
  CREATE TABLE IF NOT EXISTS {self.IMAGES_TABLE} (
@@ -124,6 +141,7 @@ class Database:
124
141
  path TEXT NOT NULL,
125
142
  date_obs TEXT,
126
143
  date TEXT,
144
+ imagetyp TEXT COLLATE NOCASE,
127
145
  metadata TEXT NOT NULL,
128
146
  FOREIGN KEY (repo_id) REFERENCES {self.REPOS_TABLE}(id),
129
147
  UNIQUE(repo_id, path)
@@ -152,6 +170,13 @@ class Database:
152
170
  """
153
171
  )
154
172
 
173
+ # Create index on imagetyp for efficient image type filtering
174
+ cursor.execute(
175
+ f"""
176
+ CREATE INDEX IF NOT EXISTS idx_images_imagetyp ON {self.IMAGES_TABLE}(imagetyp)
177
+ """
178
+ )
179
+
155
180
  # Create sessions table
156
181
  cursor.execute(
157
182
  f"""
@@ -159,12 +184,13 @@ class Database:
159
184
  id INTEGER PRIMARY KEY AUTOINCREMENT,
160
185
  start TEXT NOT NULL,
161
186
  end TEXT NOT NULL,
162
- filter TEXT NOT NULL,
163
- imagetyp TEXT NOT NULL,
164
- object TEXT NOT NULL,
187
+ filter TEXT COLLATE NOCASE,
188
+ imagetyp TEXT COLLATE NOCASE NOT NULL,
189
+ object TEXT,
165
190
  telescop TEXT NOT NULL,
166
191
  num_images INTEGER NOT NULL,
167
192
  exptime_total REAL NOT NULL,
193
+ exptime REAL NOT NULL,
168
194
  image_doc_id INTEGER,
169
195
  FOREIGN KEY (image_doc_id) REFERENCES {self.IMAGES_TABLE}(id)
170
196
  )
@@ -175,7 +201,7 @@ class Database:
175
201
  cursor.execute(
176
202
  f"""
177
203
  CREATE INDEX IF NOT EXISTS idx_sessions_lookup
178
- ON {self.SESSIONS_TABLE}(filter, imagetyp, object, telescop, start, end)
204
+ ON {self.SESSIONS_TABLE}(filter, imagetyp, object, telescop, exptime, start, end)
179
205
  """
180
206
  )
181
207
 
@@ -186,38 +212,44 @@ class Database:
186
212
  """Remove a repo record by URL.
187
213
 
188
214
  This will cascade delete all images belonging to this repo, and all sessions
189
- that reference those images.
215
+ that reference those images via image_doc_id.
216
+
217
+ The relationship is: repos -> images (via repo_id) -> sessions (via image_doc_id).
218
+ Sessions have an image_doc_id field that points to a representative image.
219
+ We delete sessions whose representative image belongs to the repo being deleted.
190
220
 
191
221
  Args:
192
222
  url: The repository URL (e.g., 'file:///path/to/repo')
193
223
  """
194
224
  cursor = self._db.cursor()
195
225
 
196
- # First get the repo_id
197
- repo_id = self.get_repo_id(url)
198
- if repo_id is None:
199
- return # Repo doesn't exist, nothing to delete
200
-
201
- # Delete sessions that reference images from this repo
202
- # This deletes sessions where image_doc_id points to any image in this repo
226
+ # Use a 3-way join to find and delete sessions that reference images from this repo
227
+ # repo_url -> repo_id -> images.id -> sessions.image_doc_id
203
228
  cursor.execute(
204
229
  f"""
205
230
  DELETE FROM {self.SESSIONS_TABLE}
206
- WHERE image_doc_id IN (
207
- SELECT id FROM {self.IMAGES_TABLE} WHERE repo_id = ?
231
+ WHERE id IN (
232
+ SELECT s.id
233
+ FROM {self.SESSIONS_TABLE} s
234
+ INNER JOIN {self.IMAGES_TABLE} i ON s.image_doc_id = i.id
235
+ INNER JOIN {self.REPOS_TABLE} r ON i.repo_id = r.id
236
+ WHERE r.url = ?
208
237
  )
209
238
  """,
210
- (repo_id,),
239
+ (url,),
211
240
  )
212
241
 
213
- # Delete all images from this repo
242
+ # Delete all images from this repo (using repo_id from URL)
214
243
  cursor.execute(
215
- f"DELETE FROM {self.IMAGES_TABLE} WHERE repo_id = ?",
216
- (repo_id,),
244
+ f"""
245
+ DELETE FROM {self.IMAGES_TABLE}
246
+ WHERE repo_id = (SELECT id FROM {self.REPOS_TABLE} WHERE url = ?)
247
+ """,
248
+ (url,),
217
249
  )
218
250
 
219
251
  # Finally delete the repo itself
220
- cursor.execute(f"DELETE FROM {self.REPOS_TABLE} WHERE id = ?", (repo_id,))
252
+ cursor.execute(f"DELETE FROM {self.REPOS_TABLE} WHERE url = ?", (url,))
221
253
 
222
254
  self._db.commit()
223
255
 
@@ -282,7 +314,7 @@ class Database:
282
314
 
283
315
  The record must include a 'path' key (relative to repo); other keys are arbitrary FITS metadata.
284
316
  The path is stored as-is - caller is responsible for making it relative to the repo.
285
- DATE-OBS and DATE are extracted and stored as indexed columns for efficient queries.
317
+ DATE-OBS, DATE, and IMAGETYP are extracted and stored as indexed columns for efficient queries.
286
318
 
287
319
  Args:
288
320
  record: Dictionary containing image metadata including 'path' (relative to repo)
@@ -300,24 +332,26 @@ class Database:
300
332
  if repo_id is None:
301
333
  repo_id = self.upsert_repo(repo_url)
302
334
 
303
- # Extract date fields for column storage
335
+ # Extract special fields for column storage
304
336
  date_obs = record.get(self.DATE_OBS_KEY)
305
337
  date = record.get(self.DATE_KEY)
338
+ imagetyp = record.get(self.IMAGETYP_KEY)
306
339
 
307
- # Separate path and date fields from metadata
340
+ # Separate path and special fields from metadata
308
341
  metadata = {k: v for k, v in record.items() if k != "path"}
309
342
  metadata_json = json.dumps(metadata)
310
343
 
311
344
  cursor = self._db.cursor()
312
345
  cursor.execute(
313
346
  f"""
314
- INSERT INTO {self.IMAGES_TABLE} (repo_id, path, date_obs, date, metadata) VALUES (?, ?, ?, ?, ?)
347
+ INSERT INTO {self.IMAGES_TABLE} (repo_id, path, date_obs, date, imagetyp, metadata) VALUES (?, ?, ?, ?, ?, ?)
315
348
  ON CONFLICT(repo_id, path) DO UPDATE SET
316
349
  date_obs = excluded.date_obs,
317
350
  date = excluded.date,
351
+ imagetyp = excluded.imagetyp,
318
352
  metadata = excluded.metadata
319
353
  """,
320
- (repo_id, str(path), date_obs, date, metadata_json),
354
+ (repo_id, str(path), date_obs, date, imagetyp, metadata_json),
321
355
  )
322
356
 
323
357
  self._db.commit()
@@ -332,38 +366,36 @@ class Database:
332
366
  return result[0]
333
367
  return cursor.lastrowid if cursor.lastrowid is not None else 0
334
368
 
335
- def search_image(self, conditions: dict[str, Any]) -> list[SessionRow]:
369
+ def search_image(self, conditions: list[SearchCondition]) -> list[ImageRow]:
336
370
  """Search for images matching the given conditions.
337
371
 
338
372
  Args:
339
- conditions: Dictionary of metadata key-value pairs to match.
340
- Special keys:
341
- - 'date_start': Filter images with DATE-OBS >= this date
342
- - 'date_end': Filter images with DATE-OBS <= this date
373
+ conditions: List of SearchCondition tuples, each containing:
374
+ - column_name: The column to filter on (e.g., 'i.date_obs', 'r.url', 'i.imagetyp')
375
+ - comparison_op: The comparison operator (e.g., '=', '>=', '<=')
376
+ - value: The value to compare against
343
377
 
344
378
  Returns:
345
379
  List of matching image records with relative path, repo_id, and repo_url
346
- """
347
- # Extract special date filter keys (make a copy to avoid modifying caller's dict)
348
- conditions_copy = dict(conditions)
349
- date_start = conditions_copy.pop("date_start", None)
350
- date_end = conditions_copy.pop("date_end", None)
351
380
 
352
- # Build SQL query with WHERE clauses for date filtering
381
+ Example:
382
+ conditions = [
383
+ SearchCondition('r.url', '=', 'file:///path/to/repo'),
384
+ SearchCondition('i.imagetyp', '=', 'BIAS'),
385
+ SearchCondition('i.date_obs', '>=', '2025-01-01'),
386
+ ]
387
+ """
388
+ # Build SQL query with WHERE clauses from conditions
353
389
  where_clauses = []
354
390
  params = []
355
391
 
356
- if date_start:
357
- where_clauses.append("i.date_obs >= ?")
358
- params.append(date_start)
359
-
360
- if date_end:
361
- where_clauses.append("i.date_obs <= ?")
362
- params.append(date_end)
392
+ for condition in conditions:
393
+ where_clauses.append(f"{condition.column_name} {condition.comparison_op} ?")
394
+ params.append(condition.value)
363
395
 
364
396
  # Build the query with JOIN to repos table
365
397
  query = f"""
366
- SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.metadata, r.url as repo_url
398
+ SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.imagetyp, i.metadata, r.url as repo_url
367
399
  FROM {self.IMAGES_TABLE} i
368
400
  JOIN {self.REPOS_TABLE} r ON i.repo_id = r.id
369
401
  """
@@ -379,20 +411,18 @@ class Database:
379
411
  # Store the relative path, repo_id, and repo_url for caller
380
412
  metadata["path"] = row["path"]
381
413
  metadata["repo_id"] = row["repo_id"]
382
- metadata["repo_url"] = row["repo_url"]
414
+ metadata[Database.REPO_URL_KEY] = row[Database.REPO_URL_KEY]
383
415
  metadata["id"] = row["id"]
384
416
 
385
- # Add date fields back to metadata for compatibility
417
+ # Add special fields back to metadata for compatibility
386
418
  if row["date_obs"]:
387
419
  metadata[self.DATE_OBS_KEY] = row["date_obs"]
388
420
  if row["date"]:
389
421
  metadata[self.DATE_KEY] = row["date"]
422
+ if row["imagetyp"]:
423
+ metadata[self.IMAGETYP_KEY] = row["imagetyp"]
390
424
 
391
- # Check if remaining conditions match (those stored in JSON metadata)
392
- match = all(metadata.get(k) == v for k, v in conditions_copy.items())
393
-
394
- if match:
395
- results.append(metadata)
425
+ results.append(metadata)
396
426
 
397
427
  return results
398
428
 
@@ -402,10 +432,7 @@ class Database:
402
432
  """Search for sessions matching the given conditions.
403
433
 
404
434
  Args:
405
- conditions: Dictionary of session key-value pairs to match, or None for all.
406
- Special keys:
407
- - 'date_start': Filter sessions starting on or after this date
408
- - 'date_end': Filter sessions starting on or before this date
435
+ where_tuple
409
436
 
410
437
  Returns:
411
438
  List of matching session records with metadata from the reference image
@@ -416,7 +443,7 @@ class Database:
416
443
  # Build the query with JOIN to images table to get reference image metadata
417
444
  query = f"""
418
445
  SELECT s.id, s.start, s.end, s.filter, s.imagetyp, s.object, s.telescop,
419
- s.num_images, s.exptime_total, s.image_doc_id, i.metadata
446
+ s.num_images, s.exptime_total, s.exptime, s.image_doc_id, i.metadata
420
447
  FROM {self.SESSIONS_TABLE} s
421
448
  LEFT JOIN {self.IMAGES_TABLE} i ON s.image_doc_id = i.id
422
449
  {where_clause}
@@ -473,7 +500,7 @@ class Database:
473
500
  cursor = self._db.cursor()
474
501
  cursor.execute(
475
502
  f"""
476
- SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.metadata, r.url as repo_url
503
+ SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.imagetyp, i.metadata, r.url as repo_url
477
504
  FROM {self.IMAGES_TABLE} i
478
505
  JOIN {self.REPOS_TABLE} r ON i.repo_id = r.id
479
506
  WHERE r.url = ? AND i.path = ?
@@ -488,14 +515,16 @@ class Database:
488
515
  metadata = json.loads(row["metadata"])
489
516
  metadata["path"] = row["path"]
490
517
  metadata["repo_id"] = row["repo_id"]
491
- metadata["repo_url"] = row["repo_url"]
518
+ metadata[Database.REPO_URL_KEY] = row[Database.REPO_URL_KEY]
492
519
  metadata["id"] = row["id"]
493
520
 
494
- # Add date fields back to metadata for compatibility
521
+ # Add special fields back to metadata for compatibility
495
522
  if row["date_obs"]:
496
523
  metadata[self.DATE_OBS_KEY] = row["date_obs"]
497
524
  if row["date"]:
498
525
  metadata[self.DATE_KEY] = row["date"]
526
+ if row["imagetyp"]:
527
+ metadata[self.IMAGETYP_KEY] = row["imagetyp"]
499
528
 
500
529
  return metadata
501
530
 
@@ -504,7 +533,7 @@ class Database:
504
533
  cursor = self._db.cursor()
505
534
  cursor.execute(
506
535
  f"""
507
- SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.metadata, r.url as repo_url
536
+ SELECT i.id, i.repo_id, i.path, i.date_obs, i.date, i.imagetyp, i.metadata, r.url as repo_url
508
537
  FROM {self.IMAGES_TABLE} i
509
538
  JOIN {self.REPOS_TABLE} r ON i.repo_id = r.id
510
539
  """
@@ -516,14 +545,16 @@ class Database:
516
545
  # Return relative path, repo_id, and repo_url for caller
517
546
  metadata["path"] = row["path"]
518
547
  metadata["repo_id"] = row["repo_id"]
519
- metadata["repo_url"] = row["repo_url"]
548
+ metadata[Database.REPO_URL_KEY] = row[Database.REPO_URL_KEY]
520
549
  metadata["id"] = row["id"]
521
550
 
522
- # Add date fields back to metadata for compatibility
551
+ # Add special fields back to metadata for compatibility
523
552
  if row["date_obs"]:
524
553
  metadata[self.DATE_OBS_KEY] = row["date_obs"]
525
554
  if row["date"]:
526
555
  metadata[self.DATE_KEY] = row["date"]
556
+ if row["imagetyp"]:
557
+ metadata[self.IMAGETYP_KEY] = row["imagetyp"]
527
558
 
528
559
  results.append(metadata)
529
560
 
@@ -542,7 +573,7 @@ class Database:
542
573
  cursor.execute(
543
574
  f"""
544
575
  SELECT id, start, end, filter, imagetyp, object, telescop,
545
- num_images, exptime_total, image_doc_id
576
+ num_images, exptime_total, exptime, image_doc_id
546
577
  FROM {self.SESSIONS_TABLE}
547
578
  WHERE id = ?
548
579
  """,
@@ -561,15 +592,10 @@ class Database:
561
592
  Searches for sessions with the same filter, image type, target, and telescope
562
593
  whose start time is within +/- 8 hours of the provided date.
563
594
  """
564
- date = to_find.get(Database.START_KEY)
595
+ date = to_find.get(get_column_name(Database.START_KEY))
565
596
  assert date
566
- image_type = to_find.get(Database.IMAGETYP_KEY)
597
+ image_type = to_find.get(get_column_name(Database.IMAGETYP_KEY))
567
598
  assert image_type
568
- filter = to_find.get(Database.FILTER_KEY)
569
- assert filter
570
- target = to_find.get(Database.OBJECT_KEY)
571
- assert target
572
- telescop = to_find.get(Database.TELESCOP_KEY, "unspecified")
573
599
 
574
600
  # Convert the provided ISO8601 date string to a datetime, then
575
601
  # search for sessions with the same filter whose start time is
@@ -581,17 +607,75 @@ class Database:
581
607
 
582
608
  # Since session 'start' is stored as ISO8601 strings, lexicographic
583
609
  # comparison aligns with chronological ordering for a uniform format.
610
+
611
+ # Build WHERE clause handling NULL values properly
612
+ # In SQL, you cannot use = with NULL, must use IS NULL
613
+ # If a field is not in to_find, we don't filter on it at all
614
+ where_clauses = []
615
+ params = []
616
+
617
+ # Handle imagetyp (required)
618
+ where_clauses.append("imagetyp = ?")
619
+ params.append(image_type)
620
+
621
+ # Handle filter (optional - only filter if present in to_find)
622
+ filter_key = get_column_name(Database.FILTER_KEY)
623
+ filter = to_find.get(filter_key) # filter can be the string "None"
624
+ if filter:
625
+ if filter is None:
626
+ where_clauses.append("filter IS NULL")
627
+ else:
628
+ where_clauses.append("filter = ?")
629
+ params.append(filter)
630
+
631
+ # Handle object/target (optional - only filter if present in to_find)
632
+ object_key = get_column_name(Database.OBJECT_KEY)
633
+ target = to_find.get(object_key)
634
+ if target:
635
+ target = normalize_target_name(target)
636
+ if target is None:
637
+ where_clauses.append("object IS NULL")
638
+ else:
639
+ where_clauses.append("object = ?")
640
+ params.append(target)
641
+
642
+ # Handle telescop (optional - only filter if present in to_find)
643
+ telescop_key = get_column_name(Database.TELESCOP_KEY)
644
+ telescop = to_find.get(telescop_key)
645
+ if telescop:
646
+ if telescop is None:
647
+ where_clauses.append("telescop IS NULL")
648
+ else:
649
+ where_clauses.append("telescop = ?")
650
+ params.append(telescop)
651
+
652
+ # Handle exptime (optional - only filter if present in to_find)
653
+ exptime_key = get_column_name(Database.EXPTIME_KEY)
654
+ if exptime_key in to_find:
655
+ exptime = to_find.get(exptime_key)
656
+ if exptime is None:
657
+ where_clauses.append("exptime IS NULL")
658
+ else:
659
+ where_clauses.append("exptime = ?")
660
+ params.append(exptime)
661
+
662
+ # Time window
663
+ where_clauses.append("start >= ?")
664
+ where_clauses.append("start <= ?")
665
+ params.extend([start_min, start_max])
666
+
667
+ where_clause = " AND ".join(where_clauses)
668
+
584
669
  cursor = self._db.cursor()
585
670
  cursor.execute(
586
671
  f"""
587
672
  SELECT id, start, end, filter, imagetyp, object, telescop,
588
- num_images, exptime_total, image_doc_id
673
+ num_images, exptime_total, exptime, image_doc_id
589
674
  FROM {self.SESSIONS_TABLE}
590
- WHERE filter = ? AND imagetyp = ? AND object = ? AND telescop = ?
591
- AND start >= ? AND start <= ?
675
+ WHERE {where_clause}
592
676
  LIMIT 1
593
677
  """,
594
- (filter, image_type, target, telescop, start_min, start_max),
678
+ params,
595
679
  )
596
680
 
597
681
  row = cursor.fetchone()
@@ -608,14 +692,20 @@ class Database:
608
692
 
609
693
  if existing:
610
694
  # Update existing session with new data
611
- updated_start = min(new[Database.START_KEY], existing[Database.START_KEY])
612
- updated_end = max(new[Database.END_KEY], existing[Database.END_KEY])
613
- updated_num_images = existing.get(Database.NUM_IMAGES_KEY, 0) + new.get(
614
- Database.NUM_IMAGES_KEY, 0
695
+ updated_start = min(
696
+ new[get_column_name(Database.START_KEY)],
697
+ existing[get_column_name(Database.START_KEY)],
698
+ )
699
+ updated_end = max(
700
+ new[get_column_name(Database.END_KEY)],
701
+ existing[get_column_name(Database.END_KEY)],
615
702
  )
703
+ updated_num_images = existing.get(
704
+ get_column_name(Database.NUM_IMAGES_KEY), 0
705
+ ) + new.get(get_column_name(Database.NUM_IMAGES_KEY), 0)
616
706
  updated_exptime_total = existing.get(
617
- Database.EXPTIME_TOTAL_KEY, 0
618
- ) + new.get(Database.EXPTIME_TOTAL_KEY, 0)
707
+ get_column_name(Database.EXPTIME_TOTAL_KEY), 0
708
+ ) + new.get(get_column_name(Database.EXPTIME_TOTAL_KEY), 0)
619
709
 
620
710
  cursor.execute(
621
711
  f"""
@@ -636,19 +726,22 @@ class Database:
636
726
  cursor.execute(
637
727
  f"""
638
728
  INSERT INTO {self.SESSIONS_TABLE}
639
- (start, end, filter, imagetyp, object, telescop, num_images, exptime_total, image_doc_id)
640
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
729
+ (start, end, filter, imagetyp, object, telescop, num_images, exptime_total, exptime, image_doc_id)
730
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
641
731
  """,
642
732
  (
643
- new[Database.START_KEY],
644
- new[Database.END_KEY],
645
- new[Database.FILTER_KEY],
646
- new[Database.IMAGETYP_KEY],
647
- new[Database.OBJECT_KEY],
648
- new.get(Database.TELESCOP_KEY, "unspecified"),
649
- new[Database.NUM_IMAGES_KEY],
650
- new[Database.EXPTIME_TOTAL_KEY],
651
- new.get(Database.IMAGE_DOC_KEY),
733
+ new[get_column_name(Database.START_KEY)],
734
+ new[get_column_name(Database.END_KEY)],
735
+ new.get(get_column_name(Database.FILTER_KEY)),
736
+ new[get_column_name(Database.IMAGETYP_KEY)],
737
+ normalize_target_name(
738
+ new.get(get_column_name(Database.OBJECT_KEY))
739
+ ),
740
+ new.get(get_column_name(Database.TELESCOP_KEY)),
741
+ new[get_column_name(Database.NUM_IMAGES_KEY)],
742
+ new[get_column_name(Database.EXPTIME_TOTAL_KEY)],
743
+ new[get_column_name(Database.EXPTIME_KEY)],
744
+ new[get_column_name(Database.IMAGE_DOC_KEY)],
652
745
  ),
653
746
  )
654
747
 
@@ -12,13 +12,33 @@ kind = "preferences"
12
12
  dark = ["dark", "darks"]
13
13
  flat = ["flat", "flats"]
14
14
  bias = ["bias", "biases"]
15
+ light = ["light", "lights"]
15
16
 
16
17
  # file suffixes
17
- fit = ["fits", "fit"]
18
+ fits = ["fits", "fit"]
18
19
 
19
20
  # filter names
20
- SiiOiii = ["SiiOiii", "SII-OIII", "S2-O3"]
21
- HaOiii = ["HaOiii", "HA-OIII", "Halpha-O3"]
21
+ SiiOiii = ["SiiOiii", "S2O3"]
22
+ HaOiii = ["HaOiii", "HaO3"]
23
+
24
+ None = ["None"]
25
+
26
+ camera_osc = ["OSC", "ZWO ASI2600MC Duo"]
27
+
28
+ # Passes SII 672.4nm and H-Beta 486.1nm lines
29
+ # Capturing of the two main emission wavebands in the deep red and blue at the same time
30
+ #
31
+ # The ALP-T dual band 3.5nm SII&Hb filter is a dual narrowband filter, which lets the deep
32
+ # red Sulfur-II 672.4nm and the blue Hydrogen-Beta 486.1nm lines through and is primarily
33
+ # engineered for color cameras to assist astrophotographers taking deep sky images with
34
+ # superior SNR(Signal to Noise Ratio). With an FWHM halfbandwidth designed at 3.5nm and
35
+ # achieving an optical density (OD) of 4.5 on unwanted wavelengths, it works strongly in
36
+ # blocking light pollution, moonlight, and airglow, leding to enhanced contrast in nebulae
37
+ # images by effectively passing the SII and H-beta emission lines signal only.
38
+ #
39
+ # http://www.antliafilter.com/pd.jsp?fromColId=2&id=160#_pp=2_671
40
+ SiiHb = ["SiiHb", "S2Hb"]
41
+
22
42
 
23
43
  # FIXME, somewhere here list default patterns which can be used to identify NINA, ASIAIR, SEESTAR
24
44
  # raw repo layouts
starbash/paths.py CHANGED
@@ -7,19 +7,24 @@ app_author = "geeksville"
7
7
  dirs = PlatformDirs(app_name, app_author)
8
8
  config_dir = Path(dirs.user_config_dir)
9
9
  data_dir = Path(dirs.user_data_dir)
10
+ documents_dir = Path(dirs.user_documents_dir) / "starbash"
10
11
 
11
12
  # These can be overridden for testing
12
13
  _override_config_dir: Path | None = None
13
14
  _override_data_dir: Path | None = None
15
+ _override_documents_dir: Path | None = None
14
16
 
15
17
 
16
18
  def set_test_directories(
17
- config_dir_override: Path | None = None, data_dir_override: Path | None = None
19
+ config_dir_override: Path | None = None,
20
+ data_dir_override: Path | None = None,
21
+ documents_dir_override: Path | None = None,
18
22
  ) -> None:
19
23
  """Set override directories for testing. Used by test fixtures to isolate test data."""
20
- global _override_config_dir, _override_data_dir
24
+ global _override_config_dir, _override_data_dir, _override_documents_dir
21
25
  _override_config_dir = config_dir_override
22
26
  _override_data_dir = data_dir_override
27
+ _override_documents_dir = documents_dir_override
23
28
 
24
29
 
25
30
  def get_user_config_dir() -> Path:
@@ -36,3 +41,14 @@ def get_user_data_dir() -> Path:
36
41
  dir_to_use = _override_data_dir if _override_data_dir is not None else data_dir
37
42
  os.makedirs(dir_to_use, exist_ok=True)
38
43
  return dir_to_use
44
+
45
+
46
+ def get_user_documents_dir() -> Path:
47
+ """Get the user documents directory. Returns test override if set, otherwise the real user directory."""
48
+ dir_to_use = (
49
+ _override_documents_dir
50
+ if _override_documents_dir is not None
51
+ else documents_dir
52
+ )
53
+ os.makedirs(dir_to_use, exist_ok=True)
54
+ return dir_to_use
@@ -7,16 +7,17 @@ kind = "recipe"
7
7
  author.name = "FIXMESiril?"
8
8
  author.email = "FIXMESiril?"
9
9
 
10
- [[stage]]
10
+ [recipe.stage.master-bias]
11
11
 
12
12
  description = "Generate master bias"
13
- disabled = false # FIXME, debugging later stuff
13
+ # disabled = false # turn on to skip
14
14
 
15
15
  # Restrict processing of this stage to only if detected hardware was found for this session
16
16
  # For any camera
17
- auto.for-camera = []
17
+ # auto.for-camera = []
18
18
 
19
- tool = "siril"
19
+ tool.name = "siril"
20
+ # tool.timeout = 15.0 # allow up to 15 seconds before we timeout and kill tool
20
21
 
21
22
  # or auto?
22
23
  # find the most recent raw fits for the current instrument (as of the time of session start)
@@ -25,19 +26,12 @@ input.type = "bias" # look in all raw repos, but look only for bias files
25
26
 
26
27
  # Look for files in input repos, finding them by using the "relative" tag they contain
27
28
  input.source = "repo"
29
+ input.required = 2 # siril needs at least 2 frames to stack
30
+ # old school paths also work (but are not recommended)
28
31
  # input.path = ".../from_astroboy/masters-raw/2025-09-09/BIAS/*.fit*"
29
- input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
30
-
31
- # make the following also work
32
- #
33
- #os.makedirs(os.path.dirname(output), exist_ok=True)
34
- #os.makedirs(os.path.dirname(process_dir), exist_ok=True)
35
- #frames = glob(f"{masters_raw}/{date}/BIAS/{date}_*.fit*")
36
- #siril_run_in_temp_dir(frames, ...
37
- when = "setup.masters" # run when master biases are regenerated
38
32
 
39
33
  # Based on the following definitions in the stage toml file...
40
- output.dest = "repo" # write to a particular repo
34
+ output.dest = "repo" # write to a particular repo
41
35
  output.type = "master" # write output to the special masters repo
42
36
 
43
37
  # the following fields will be auto populated in the context before entry:
@@ -64,9 +58,9 @@ output.type = "master" # write output to the special masters repo
64
58
 
65
59
  script = '''
66
60
  # Convert Bias Frames to .fit files
67
- link bias -out={process_dir}
61
+ link frames -out={process_dir}
68
62
  cd {process_dir}
69
63
 
70
- # Stack Bias Frames to bias_stacked.fit
71
- stack bias rej 3 3 -nonorm -out={output["base_path"]}
72
- '''
64
+ # Stack frames
65
+ stack frames rej 3 3 -nonorm -out={output["base_path"]}
66
+ '''