starbash 0.1.9__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. repo/__init__.py +1 -1
  2. repo/manager.py +14 -23
  3. repo/repo.py +52 -10
  4. starbash/__init__.py +10 -3
  5. starbash/aliases.py +145 -0
  6. starbash/analytics.py +3 -2
  7. starbash/app.py +512 -473
  8. starbash/check_version.py +18 -0
  9. starbash/commands/__init__.py +2 -1
  10. starbash/commands/info.py +88 -14
  11. starbash/commands/process.py +76 -24
  12. starbash/commands/repo.py +41 -68
  13. starbash/commands/select.py +141 -142
  14. starbash/commands/user.py +88 -23
  15. starbash/database.py +219 -112
  16. starbash/defaults/starbash.toml +24 -3
  17. starbash/exception.py +21 -0
  18. starbash/main.py +29 -7
  19. starbash/paths.py +35 -5
  20. starbash/processing.py +724 -0
  21. starbash/recipes/README.md +3 -0
  22. starbash/recipes/master_bias/starbash.toml +16 -19
  23. starbash/recipes/master_dark/starbash.toml +33 -0
  24. starbash/recipes/master_flat/starbash.toml +26 -18
  25. starbash/recipes/osc.py +190 -0
  26. starbash/recipes/osc_dual_duo/starbash.toml +54 -44
  27. starbash/recipes/osc_simple/starbash.toml +82 -0
  28. starbash/recipes/osc_single_duo/starbash.toml +51 -32
  29. starbash/recipes/seestar/starbash.toml +82 -0
  30. starbash/recipes/starbash.toml +30 -9
  31. starbash/selection.py +32 -36
  32. starbash/templates/repo/master.toml +7 -3
  33. starbash/templates/repo/processed.toml +15 -0
  34. starbash/templates/userconfig.toml +9 -0
  35. starbash/toml.py +13 -13
  36. starbash/tool.py +230 -96
  37. starbash-0.1.15.dist-info/METADATA +216 -0
  38. starbash-0.1.15.dist-info/RECORD +45 -0
  39. starbash/recipes/osc_dual_duo/starbash.py +0 -151
  40. starbash-0.1.9.dist-info/METADATA +0 -145
  41. starbash-0.1.9.dist-info/RECORD +0 -37
  42. {starbash-0.1.9.dist-info → starbash-0.1.15.dist-info}/WHEEL +0 -0
  43. {starbash-0.1.9.dist-info → starbash-0.1.15.dist-info}/entry_points.txt +0 -0
  44. {starbash-0.1.9.dist-info → starbash-0.1.15.dist-info}/licenses/LICENSE +0 -0
@@ -2,30 +2,43 @@
2
2
  [repo]
3
3
  kind = "recipe"
4
4
 
5
+ [recipe]
6
+
7
+ # Lower priority recipes are tried to match first
8
+ # - we want this to match after the osc_dual_duo has a chance
9
+ priority = 20
10
+
11
+ author.name = "FIXMEsirilsomeone"
12
+ author.email = "FIXMEsirilsomeone"
13
+
14
+ # for single duo look for this
15
+ auto.require.filter = ["HaOiii"]
16
+ auto.require.color = true
17
+
5
18
  # all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
6
19
  # to the user and the file will be ignored for future processing.
7
20
  [recipe.require.version]
8
21
  min="0.1.0"
9
22
  max="4.5.8"
10
23
 
11
- [recipe]
12
- author.name = "Kevin Hester"
13
- author.email = "kevinh@geeksville.com"
24
+ [recipe.stage.light]
14
25
 
15
- [[stage]]
26
+ description = "Extract OSC dual duo filter Ha, Oiii and Sii channels"
27
+ # disabled = true # FIXME, debugging later stuff
16
28
 
17
- description = "Process OSC single duo filter data, extracting Ha and Oiii"
29
+ tool.name = "siril"
18
30
 
19
- disabled = true # FIXME, we don't yet have auto selection based on filter types
31
+ # Auto find input light frames for the current session
32
+ # Look for files in input repos, finding them by using the "relative" tag they contain
33
+ input.source = "repo"
34
+ input.required = 2 # siril needs at least 2 frames to stack
35
+ input.type = "light" # look in all raw repos, but look only for light files
20
36
 
21
- # Restrict processing of this stage to only if detected hardware was found for this session
22
- # for single duo look for this
23
- auto.for-filter = ["HaOiii"]
24
- auto.for-camera = ["OSC"]
37
+ # Auto find suitable masters of the following type
38
+ input.masters = ["bias", "flat"]
25
39
 
26
- tool = "siril"
27
- when = "session-light" # run once per session-config
28
- output = "FIXME"
40
+ # the base name for our light files
41
+ context.light_base = '''light_s{session["id"]}'''
29
42
 
30
43
  script = '''
31
44
  # Create a sequence from the raw light frames, seq file goes to process_dir
@@ -33,35 +46,41 @@ script = '''
33
46
  cd {process_dir}
34
47
 
35
48
  # Calibrate the light frames using master bias and flat
36
- calibrate {light_base} -bias={bias} -flat={flat} -cfa -equalize_cfa
49
+ calibrate {light_base} -bias={master["bias"]} -flat={master["flat"]} -cfa -equalize_cfa
37
50
 
38
- # Remove background gradient on a per-frame basis (generates bkg_pp_{light_base}.seq)
51
+ # Remove background gradient on a per-frame basis (generates bkg_pp_light.seq)
39
52
  seqsubsky pp_{light_base} 1
53
+
54
+ # FIXME only do this step for duo filters (refactor to share common light processing function)
55
+ seqextract_HaOIII bkg_pp_{light_base} -resample=ha
40
56
  '''
41
57
 
42
- temporaries = ["FIXME"]
58
+ # temporaries = ["FIXME"]
43
59
 
44
- [[stage]]
60
+ [recipe.stage.stack]
45
61
 
46
- disabled = true # FIXME, we don't yet have auto selection based on filter types
62
+ # this stage is only be considered if the previous stage in this same array
63
+ # was run. It must be run inside the same tempdir (so that files from previous stage are available)
47
64
 
48
- tool = "python"
49
- when = "session-stack" # run once after all session/session-config processing was done
65
+ description = "Stack OSC single duo filter data: with Ha, Oiii extraction"
50
66
 
51
- script-file = "script.py"
67
+ input.source = "recipe" # we will use output files from previous stages in this same recipe as our input
52
68
 
53
- # or inline python code instead of that function?
54
- script = '''
55
- # green output channel - from the HaOiii filter Ha is on the 656nm red channel
56
- make_stacked("HaOiii", "Ha", f"results_00001")
69
+ tool.name = "python"
57
70
 
58
- # blue output channel - both filters have Oiii on the 500nm blue channel. Note the case here is uppercase to match siril output
59
- make_stacked("*", "OIII", f"results_00002")
71
+ # Based on the following definitions in the stage toml file...
72
+ # FIXME, we should inherit this - most recipes shouldn't have to declare it
73
+ output.dest = "repo" # write to a particular repo
74
+ output.type = "processed" # write output to the special processed repo
60
75
 
61
- # There might be an old/state autogenerated .seq file, delete it so it doesn't confuse renormalize
62
- results_seq_path = f"{process_dir}/results_.seq"
63
- if os.path.exists(results_seq_path):
64
- os.remove(results_seq_path)
76
+ # if not specified starbash.py used
77
+ # script-file = "script.py"
65
78
 
66
- make_renormalize()
79
+ # or inline python code can be provided here. In this case I'm using some python
80
+ # code I'm temporarily sharing from the main project...
81
+ script = '''
82
+ from starbash.recipes import osc
83
+ osc.logger = logger
84
+ osc.context = context
85
+ osc.osc_process(has_ha_oiii=True, has_sii_oiii=False)
67
86
  '''
@@ -0,0 +1,82 @@
1
+
2
+ [repo]
3
+ kind = "recipe"
4
+
5
+ [recipe]
6
+
7
+ # Lower priority recipes are tried to match first
8
+ # - we want to match this before all other color cameras (because we need special processing)
9
+ priority = 5
10
+
11
+ author.name = "FIXMEsirilsomeone"
12
+ author.email = "FIXMEsirilsomeone"
13
+
14
+ # no filter requirements - will work for any osc session
15
+ auto.require.camera = ["Seestar"]
16
+ auto.require.color = true
17
+
18
+ # all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
19
+ # to the user and the file will be ignored for future processing.
20
+ [recipe.require.version]
21
+ min="0.1.0"
22
+ max="4.5.8"
23
+
24
+ [recipe.stage.light]
25
+
26
+ description = "Fix background gradient on Seestar frames"
27
+ # disabled = true # FIXME, debugging later stuff
28
+
29
+ tool.name = "siril"
30
+
31
+ # Auto find input light frames for the current session
32
+ # Look for files in input repos, finding them by using the "relative" tag they contain
33
+ input.source = "repo"
34
+ input.required = 2 # siril needs at least 2 frames to stack
35
+ input.type = "light" # look in all raw repos, but look only for light files
36
+
37
+ # We don't require any masters for seestar processing
38
+ input.masters = []
39
+
40
+ # the base name for our light files
41
+ context.light_base = '''light_s{session["id"]}'''
42
+
43
+ script = '''
44
+ # Create a sequence from the raw light frames, seq file goes to process_dir
45
+ link pp_{light_base} -out={process_dir}
46
+ cd {process_dir}
47
+
48
+ # No calibration needed for seestar frames - it was done in camera
49
+
50
+ # Remove background gradient on a per-frame basis (generates bkg_pp_light.seq)
51
+ seqsubsky pp_{light_base} 1
52
+ '''
53
+
54
+ # temporaries = ["FIXME"]
55
+
56
+ [recipe.stage.stack]
57
+
58
+ # this stage is only be considered if the previous stage in this same array
59
+ # was run. It must be run inside the same tempdir (so that files from previous stage are available)
60
+
61
+ description = "Stack OSC simple (non duo filter) images"
62
+
63
+ input.source = "recipe" # we will use output files from previous stages in this same recipe as our input
64
+
65
+ tool.name = "python"
66
+
67
+ # Based on the following definitions in the stage toml file...
68
+ # FIXME, we should inherit this - most recipes shouldn't have to declare it
69
+ output.dest = "repo" # write to a particular repo
70
+ output.type = "processed" # write output to the special processed repo
71
+
72
+ # if not specified starbash.py used
73
+ # script-file = "script.py"
74
+
75
+ # or inline python code can be provided here. In this case I'm using some python
76
+ # code I'm temporarily sharing from the main project...
77
+ script = '''
78
+ from starbash.recipes import osc
79
+ osc.logger = logger
80
+ osc.context = context
81
+ osc.osc_process(has_ha_oiii=False, has_sii_oiii=False)
82
+ '''
@@ -5,6 +5,8 @@ kind = "repo"
5
5
  [[repo-ref]]
6
6
  dir = "master_bias"
7
7
  [[repo-ref]]
8
+ dir = "master_dark"
9
+ [[repo-ref]]
8
10
  dir = "master_flat"
9
11
 
10
12
  # Note: For automated recipe finding, it is important to list more demanding recipes first. For instance:
@@ -19,21 +21,40 @@ dir = "osc_dual_duo"
19
21
  [[repo-ref]]
20
22
  dir = "osc_single_duo"
21
23
 
22
- # processing stages, currently all declared here, but possibly in the future they could be added by user/other toml files
24
+ [[repo-ref]]
25
+ dir = "osc_simple"
23
26
 
24
- # Not included in standard list - for now we run manually
25
- #[[stages]]
26
- #name = "setup-masters" # for flat processing, master generation etc
27
- #priority = 5
27
+ [[repo-ref]]
28
+ dir = "seestar"
28
29
 
29
- [[stages]]
30
- name = "session-config" # for flat processing, master generation etc
30
+ #
31
+ # master specific stages
32
+ #
33
+ [[master-stages]]
34
+ name = "master-bias" # generate master bias frames
31
35
  priority = 10
36
+ input = "bias" # only used for frames of this type
37
+
38
+ [[master-stages]]
39
+ name = "master-dark" # generate master dark frames
40
+ priority = 10
41
+ input = "dark"
42
+
43
+ [[master-stages]]
44
+ name = "master-flat" # generate master flat frames
45
+ priority = 20
46
+ input = "flat"
47
+
48
+ #
49
+ # session specific processing stages
50
+ # FIXME - we are not using this yet, for now we just hardcode in app.py
32
51
 
33
52
  [[stages]]
34
- name = "session-light" # generate light frames from lights and with reference to flats/bias
53
+ name = "light" # generate light frames from lights and with reference to flats/bias
35
54
  priority = 20
55
+ input = "light" # only used for frames of this type
36
56
 
37
57
  [[stages]]
38
- name = "session-stack" # stack frames
58
+ name = "stack" # stack frames
39
59
  priority = 30
60
+ input = "recipe" # use output from previous stages in the same recipe
starbash/selection.py CHANGED
@@ -3,12 +3,19 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import logging
6
- from typing import Any, Optional, TYPE_CHECKING
6
+ from typing import TYPE_CHECKING, Any
7
+
8
+ if TYPE_CHECKING:
9
+ from starbash.database import SearchCondition
10
+
7
11
  from repo import Repo
12
+ from starbash.aliases import normalize_target_name
8
13
 
9
14
 
10
- def where_tuple(conditions: dict[str, Any] | None) -> tuple[str, list[Any]]:
11
- """Search for sessions matching the given conditions.
15
+ def build_search_conditions(
16
+ conditions: dict[str, Any] | None,
17
+ ) -> list[SearchCondition]:
18
+ """Build a list of SearchCondition objects from a conditions dictionary.
12
19
 
13
20
  Args:
14
21
  conditions: Dictionary of session key-value pairs to match, or None for all.
@@ -17,42 +24,33 @@ def where_tuple(conditions: dict[str, Any] | None) -> tuple[str, list[Any]]:
17
24
  - 'date_end': Filter sessions starting on or before this date
18
25
 
19
26
  Returns:
20
- Tuple of (WHERE clause string, list of parameters)
27
+ List of SearchCondition tuples for database queries
21
28
  """
29
+ # Import here to avoid circular dependency
30
+ from starbash.database import SearchCondition
31
+
22
32
  if conditions is None:
23
33
  conditions = {}
24
34
 
25
- # Build WHERE clause dynamically based on conditions
26
- where_clauses = []
27
- params = []
35
+ search_conditions = []
28
36
 
29
37
  # Extract date range conditions
30
38
  date_start = conditions.get("date_start")
31
39
  date_end = conditions.get("date_end")
32
40
 
33
- # Add date range filters to WHERE clause
41
+ # Add date range filters as SearchConditions
34
42
  if date_start:
35
- where_clauses.append("start >= ?")
36
- params.append(date_start)
43
+ search_conditions.append(SearchCondition("start", ">=", date_start))
37
44
 
38
45
  if date_end:
39
- where_clauses.append("start <= ?")
40
- params.append(date_end)
46
+ search_conditions.append(SearchCondition("start", "<=", date_end))
41
47
 
42
- # Add standard conditions to WHERE clause
48
+ # Add standard conditions as SearchConditions
43
49
  for key, value in conditions.items():
44
50
  if key not in ("date_start", "date_end") and value is not None:
45
- column_name = key
46
- where_clauses.append(f"{column_name} = ?")
47
- params.append(value)
48
-
49
- # Build the query
50
- query = ""
51
+ search_conditions.append(SearchCondition(key, "=", value))
51
52
 
52
- if where_clauses:
53
- query += " WHERE " + " AND ".join(where_clauses)
54
-
55
- return (query, params)
53
+ return search_conditions
56
54
 
57
55
 
58
56
  class Selection:
@@ -69,7 +67,7 @@ class Selection:
69
67
  used to build database queries.
70
68
  """
71
69
 
72
- def __init__(self, user_repo: "Repo"):
70
+ def __init__(self, user_repo: Repo):
73
71
  """Initialize the Selection with the user config repository.
74
72
 
75
73
  Args:
@@ -77,8 +75,8 @@ class Selection:
77
75
  """
78
76
  self.user_repo = user_repo
79
77
  self.targets: list[str] = []
80
- self.date_start: Optional[str] = None
81
- self.date_end: Optional[str] = None
78
+ self.date_start: str | None = None
79
+ self.date_end: str | None = None
82
80
  self.filters: list[str] = []
83
81
  self.image_types: list[str] = []
84
82
  self.telescopes: list[str] = []
@@ -193,9 +191,7 @@ class Selection:
193
191
  self.telescopes.remove(telescope)
194
192
  self._save()
195
193
 
196
- def set_date_range(
197
- self, start: Optional[str] = None, end: Optional[str] = None
198
- ) -> None:
194
+ def set_date_range(self, start: str | None = None, end: str | None = None) -> None:
199
195
  """Set the date range for the selection.
200
196
 
201
197
  Args:
@@ -241,11 +237,11 @@ class Selection:
241
237
  and not self.telescopes
242
238
  )
243
239
 
244
- def get_query_conditions(self) -> tuple[str, list[Any]]:
240
+ def get_query_conditions(self) -> list[SearchCondition]:
245
241
  """Build query conditions based on the current selection.
246
242
 
247
243
  Returns:
248
- A tuple of SQL (WHERE clause string, list of parameters)
244
+ A list of SearchCondition objects for database queries
249
245
  """
250
246
  conditions = {}
251
247
 
@@ -256,7 +252,9 @@ class Selection:
256
252
  if self.targets:
257
253
  # For now, just use the first target
258
254
  # TODO: Support multiple targets in queries
259
- conditions["OBJECT"] = self.targets[0] if len(self.targets) == 1 else None
255
+ conditions["OBJECT"] = (
256
+ normalize_target_name(self.targets[0]) if len(self.targets) == 1 else None
257
+ )
260
258
 
261
259
  if self.filters:
262
260
  # For now, just use the first filter
@@ -266,9 +264,7 @@ class Selection:
266
264
  if self.telescopes:
267
265
  # For now, just use the first telescope
268
266
  # TODO: Support multiple telescopes in queries
269
- conditions["TELESCOP"] = (
270
- self.telescopes[0] if len(self.telescopes) == 1 else None
271
- )
267
+ conditions["TELESCOP"] = self.telescopes[0] if len(self.telescopes) == 1 else None
272
268
 
273
269
  # Add date range conditions
274
270
  if self.date_start:
@@ -276,7 +272,7 @@ class Selection:
276
272
  if self.date_end:
277
273
  conditions["date_end"] = self.date_end
278
274
 
279
- return where_tuple(conditions)
275
+ return build_search_conditions(conditions)
280
276
 
281
277
  def summary(self) -> dict[str, Any]:
282
278
  """Get a summary of the current selection state.
@@ -1,13 +1,17 @@
1
- # This is a master repository for (Starbash)[{PROJECT_URL}].
1
+ # This is a master repository for (Starbash)[$PROJECT_URL].
2
2
  #
3
3
  # This file marks the root directory of a set of auto matinained astrophotography
4
4
  # 'master' files, such as master darks, flats or biases.
5
5
  #
6
6
  # You generally don't need to edit this file directly - it was auto generated when you ran
7
- # "sb repo add --master {REPO_PATH}".
7
+ # "sb repo add --$REPO_TYPE $REPO_PATH".
8
8
  #
9
9
 
10
10
  [repo]
11
11
  kind = "master"
12
12
 
13
- relative = "{DEFAULT_RELATIVE}"
13
+ # Given a particular 'kind' of master, we can use different relative paths
14
+ relative.flat = "{instrument}/{date}/{imagetyp}/master_{session_config}.fit"
15
+
16
+ # For any other kind we use the default path (i.e. bias, dark, etc)
17
+ relative.default = "{camera_id}/{date}/{imagetyp}/master_{session_config}.fit"
@@ -0,0 +1,15 @@
1
+ # This is a processed repository for (Starbash)[$PROJECT_URL].
2
+ #
3
+ # This file marks the root directory of a set of generated/processed starbash output files.
4
+ #
5
+ # You generally don't need to edit this file directly - it was auto generated when you ran
6
+ # "sb repo add --$REPO_TYPE $REPO_PATH".
7
+ #
8
+
9
+ [repo]
10
+ kind = "processed"
11
+
12
+ # This path should at least point to a FITS file - so that 'is output file generated' test can work.
13
+ # if this expression includes a * at the end we assume multiple files might be generated and they are all
14
+ # placed in the indicated directory.
15
+ relative.default = "{target}/*"
@@ -6,6 +6,15 @@
6
6
  [repo]
7
7
  kind = "preferences"
8
8
 
9
+ #[aliases]
10
+ # aliases can be used to map non standard (or non english) frame names to standard terms
11
+ # This is also used to map filters based on common misspellings or variations.
12
+ # We assume the first listed option in the list is the 'canonical' name used for printing etc...
13
+
14
+ # frame types
15
+ # dark = ["dark", "darks"]
16
+ # etc...
17
+
9
18
  [analytics]
10
19
  # enabled = true # default is true - change to false if you don't want any analytics/crash-reports
11
20
  # include_user = false # default is false - change to true if you'd like your email added to crash reports/analytics
starbash/toml.py CHANGED
@@ -1,28 +1,28 @@
1
- import tomlkit
2
- from tomlkit.toml_file import TOMLFile
3
- from pathlib import Path
4
1
  from importlib import resources
2
+ from pathlib import Path
3
+ from string import Template
5
4
  from typing import Any
6
5
 
6
+ import tomlkit
7
+ from tomlkit.toml_file import TOMLFile
8
+
7
9
  from starbash import url
8
10
 
9
- def toml_from_template(template_name: str, dest_path: Path, overrides: dict[str, Any] = {}) -> tomlkit.TOMLDocument:
11
+
12
+ def toml_from_template(
13
+ template_name: str, dest_path: Path, overrides: dict[str, Any] = {}
14
+ ) -> tomlkit.TOMLDocument:
10
15
  """Load a TOML document from a template file.
11
16
  expand {vars} in the template using the `overrides` dictionary.
12
17
  """
13
18
 
14
- tomlstr = (
15
- resources.files("starbash")
16
- .joinpath(f"templates/{template_name}.toml")
17
- .read_text()
18
- )
19
+ tomlstr = resources.files("starbash").joinpath(f"templates/{template_name}.toml").read_text()
19
20
 
20
21
  # add default vars always available
21
- vars = {
22
- "PROJECT_URL": url.project
23
- }
22
+ vars = {"PROJECT_URL": url.project}
24
23
  vars.update(overrides)
25
- tomlstr = tomlstr.format(**vars)
24
+ t = Template(tomlstr)
25
+ tomlstr = t.substitute(vars)
26
26
 
27
27
  toml = tomlkit.parse(tomlstr)
28
28
  TOMLFile(dest_path).write(toml)