starbash 0.1.9__py3-none-any.whl → 0.1.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. repo/__init__.py +1 -1
  2. repo/manager.py +14 -23
  3. repo/repo.py +52 -10
  4. starbash/__init__.py +10 -3
  5. starbash/aliases.py +145 -0
  6. starbash/analytics.py +3 -2
  7. starbash/app.py +512 -473
  8. starbash/check_version.py +18 -0
  9. starbash/commands/__init__.py +2 -1
  10. starbash/commands/info.py +88 -14
  11. starbash/commands/process.py +76 -24
  12. starbash/commands/repo.py +41 -68
  13. starbash/commands/select.py +141 -142
  14. starbash/commands/user.py +88 -23
  15. starbash/database.py +219 -112
  16. starbash/defaults/starbash.toml +24 -3
  17. starbash/exception.py +21 -0
  18. starbash/main.py +29 -7
  19. starbash/paths.py +35 -5
  20. starbash/processing.py +724 -0
  21. starbash/recipes/README.md +3 -0
  22. starbash/recipes/master_bias/starbash.toml +16 -19
  23. starbash/recipes/master_dark/starbash.toml +33 -0
  24. starbash/recipes/master_flat/starbash.toml +26 -18
  25. starbash/recipes/osc.py +190 -0
  26. starbash/recipes/osc_dual_duo/starbash.toml +54 -44
  27. starbash/recipes/osc_simple/starbash.toml +82 -0
  28. starbash/recipes/osc_single_duo/starbash.toml +51 -32
  29. starbash/recipes/seestar/starbash.toml +82 -0
  30. starbash/recipes/starbash.toml +30 -9
  31. starbash/selection.py +32 -36
  32. starbash/templates/repo/master.toml +7 -3
  33. starbash/templates/repo/processed.toml +15 -0
  34. starbash/templates/userconfig.toml +9 -0
  35. starbash/toml.py +13 -13
  36. starbash/tool.py +230 -96
  37. starbash-0.1.15.dist-info/METADATA +216 -0
  38. starbash-0.1.15.dist-info/RECORD +45 -0
  39. starbash/recipes/osc_dual_duo/starbash.py +0 -151
  40. starbash-0.1.9.dist-info/METADATA +0 -145
  41. starbash-0.1.9.dist-info/RECORD +0 -37
  42. {starbash-0.1.9.dist-info → starbash-0.1.15.dist-info}/WHEEL +0 -0
  43. {starbash-0.1.9.dist-info → starbash-0.1.15.dist-info}/entry_points.txt +0 -0
  44. {starbash-0.1.9.dist-info → starbash-0.1.15.dist-info}/licenses/LICENSE +0 -0
@@ -1,3 +1,6 @@
1
1
  This is what a typical directory of recipes would look like. it could be hosted locally in a directory tree, on github, whatever.
2
2
 
3
3
  Currently it lives in the starbash python blob, but eventually the 'master' set of recipes will live in a different repo. In fact, different orgs could provide their own recipe repos.
4
+
5
+ FIXME:
6
+ For the time being we also have a few python scripts - until they are refactored a bit so they can live in the http tree.
@@ -7,16 +7,17 @@ kind = "recipe"
7
7
  author.name = "FIXMESiril?"
8
8
  author.email = "FIXMESiril?"
9
9
 
10
- [[stage]]
10
+ [recipe.stage.master-bias]
11
11
 
12
12
  description = "Generate master bias"
13
- disabled = false # FIXME, debugging later stuff
13
+ # disabled = false # turn on to skip
14
14
 
15
15
  # Restrict processing of this stage to only if detected hardware was found for this session
16
16
  # For any camera
17
- auto.for-camera = []
17
+ # auto.for-camera = []
18
18
 
19
- tool = "siril"
19
+ tool.name = "siril"
20
+ # tool.timeout = 15.0 # allow up to 15 seconds before we timeout and kill tool
20
21
 
21
22
  # or auto?
22
23
  # find the most recent raw fits for the current instrument (as of the time of session start)
@@ -25,25 +26,21 @@ input.type = "bias" # look in all raw repos, but look only for bias files
25
26
 
26
27
  # Look for files in input repos, finding them by using the "relative" tag they contain
27
28
  input.source = "repo"
29
+ input.required = 2 # siril needs at least 2 frames to stack
30
+ # old school paths also work (but are not recommended)
28
31
  # input.path = ".../from_astroboy/masters-raw/2025-09-09/BIAS/*.fit*"
29
- input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
30
-
31
- # make the following also work
32
- #
33
- #os.makedirs(os.path.dirname(output), exist_ok=True)
34
- #os.makedirs(os.path.dirname(process_dir), exist_ok=True)
35
- #frames = glob(f"{masters_raw}/{date}/BIAS/{date}_*.fit*")
36
- #siril_run_in_temp_dir(frames, ...
37
- when = "setup.masters" # run when master biases are regenerated
38
32
 
39
33
  # Based on the following definitions in the stage toml file...
40
- output.dest = "repo" # write to a particular repo
34
+ output.dest = "repo" # write to a particular repo
41
35
  output.type = "master" # write output to the special masters repo
42
36
 
37
+
38
+ # context.target - the name of the target (m31 etc...) we are processing (if available)
43
39
  # the following fields will be auto populated in the context before entry:
44
40
  # context.output.base_path - the full filepath to write the output file to **excluding the suffix**
45
41
  # context.output.full_path - the full filepath to write the output file to (including suffix)
46
- # (NOT implemented / needed) context.output.root_path - points to the base of the destination repo
42
+ # context.output.repo - points to the destination repo object
43
+ # context.output.relative_base_path - the relative path within the repo (excluding suffix) (for logging/user message purposes)
47
44
  # (NOT implemented / needed) context.output.suffix - the suffix to append to the output file (e.g. .fits or .fit.gz)
48
45
 
49
46
  # The following constants are auto defined before running the tool
@@ -64,9 +61,9 @@ output.type = "master" # write output to the special masters repo
64
61
 
65
62
  script = '''
66
63
  # Convert Bias Frames to .fit files
67
- link bias -out={process_dir}
64
+ link frames -out={process_dir}
68
65
  cd {process_dir}
69
66
 
70
- # Stack Bias Frames to bias_stacked.fit
71
- stack bias rej 3 3 -nonorm -out={output["base_path"]}
72
- '''
67
+ # Stack frames
68
+ stack frames rej 3 3 -nonorm -out={output["base_path"]}
69
+ '''
@@ -0,0 +1,33 @@
1
+
2
+ [repo]
3
+ kind = "recipe"
4
+
5
+
6
+ [recipe]
7
+ author.name = "FIXMESiril?"
8
+ author.email = "FIXMESiril?"
9
+
10
+ [recipe.stage.master-dark]
11
+
12
+ description = "Generate master dark"
13
+
14
+ tool.name = "siril"
15
+
16
+ input.type = "dark"
17
+
18
+ # Look for files in input repos, finding them by using the "relative" tag they contain
19
+ input.source = "repo"
20
+ input.required = 2 # siril needs at least 2 frames to stack
21
+
22
+ # Based on the following definitions in the stage toml file...
23
+ output.dest = "repo" # write to a particular repo
24
+ output.type = "master" # write output to the special masters repo
25
+
26
+ script = '''
27
+ # Convert bias/dark Frames to .fit files
28
+ link frames -out={process_dir}
29
+ cd {process_dir}
30
+
31
+ # Stack frames
32
+ stack frames rej 3 3 -nonorm -out={output["base_path"]}
33
+ '''
@@ -8,28 +8,36 @@ author.name = "FIXMESiril?"
8
8
  author.email = "FIXMESiril?"
9
9
 
10
10
 
11
- [[stage]]
11
+ [recipe.stage.master-flat]
12
+
13
+ # See master_bias/starbash.toml for more documentation
12
14
 
13
15
  description = "Generate master flat"
14
- disabled = true # FIXME, debugging later stuff
16
+ # disabled = false
15
17
 
16
- # For any camera
17
- auto.for-camera = []
18
+ tool.name = "siril"
19
+ # tool.timeout = 15.0 # allow up to 15 seconds before we timeout and kill tool
18
20
 
19
- tool = "siril"
20
- # input.source = "session" # or auto? prefer ones in session otherwise find by in masters
21
- input.type = "flat" # look in _session_ directories, but look only for flat files
21
+ # or auto?
22
+ # find the most recent raw fits for the current instrument (as of the time of session start)
23
+ # input.source = "most-recent" # only look for the most recent set of raws for this particular type
24
+ input.type = "flat" # look in all raw repos, but look only for flat files
22
25
 
23
- # FIXME for early development we have support for simple absolute file paths with globs
24
- input.source = "path"
25
- input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/FLAT/*.fit*"
26
- input.required = true # Is at least one input file required? If true, we will skip running this stage entirely (with a warning)
26
+ # Look for files in input repos, finding them by using the "relative" tag they contain
27
+ input.source = "repo"
28
+ input.required = 2 # siril needs at least 2 frames to stack
27
29
 
28
- when = "session-config" # run once per session-config
29
- context.output = "{process_dir}/flat_s{sessionid}_c{sessionconfig}.fits"
30
+ # We require a master bias frame for this recipe. By the time our recipe is invoked
31
+ # context.master.bias will have been set to a full path to a master bias frame
32
+ input.masters = ["bias"]
30
33
 
31
- # FIXME, bias should have been added to context by two previous stages. But for now hardwire
32
- context.bias = '/workspaces/starbash/images/masters/biases/2025-09-09_stacked.fits'
34
+ # Based on the following definitions in the stage toml file...
35
+ output.dest = "repo" # write to a particular repo
36
+ output.type = "master" # write output to the special masters repo
37
+
38
+ # FIXME for early development we have support for simple absolute file paths with globs
39
+ #input.source = "path"
40
+ #input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/FLAT/*.fit*"
33
41
 
34
42
  script = '''
35
43
  # Create a sequence from the raw flat frames
@@ -37,10 +45,10 @@ script = '''
37
45
  cd {process_dir}
38
46
 
39
47
  # Calibrate the flat frames using master bias
40
- calibrate flat -bias={bias}
48
+ calibrate flat -bias={master["bias"]}
41
49
 
42
- # Stack the pre-processed (calibrated) flat frames (writes to flat_stacked.fit)
43
- stack pp_flat rej 3 3 -norm=mul -out=flat_stacked
50
+ # Stack the pre-processed (calibrated) flat frames
51
+ stack pp_flat rej 3 3 -norm=mul -out={output["base_path"]}
44
52
  '''
45
53
 
46
54
  temporaries = ["flat", "pp_flat"]
@@ -0,0 +1,190 @@
1
+ # pyright: reportUndefinedVariable=false
2
+
3
+
4
+ import logging
5
+ import os
6
+ from glob import glob
7
+ from typing import Any
8
+
9
+ from starbash.processing import NotEnoughFilesError
10
+ from starbash.tool import tools
11
+
12
+ siril = tools["siril"]
13
+
14
+ delete_temps = False
15
+
16
+ # ('context' and 'logger' are normally injected by the starbash runtime)
17
+ context: dict[str, Any] = {}
18
+ logger: logging.Logger = None # type: ignore
19
+
20
+
21
+ # FIXME move this into main starbash
22
+ def perhaps_delete_temps(temps: list[str]) -> None:
23
+ if delete_temps:
24
+ for t in temps:
25
+ for path in glob(f"{context['process_dir']}/{t}_*"):
26
+ os.remove(path)
27
+
28
+
29
+ def make_stacked(variant: str | None, output_file: str):
30
+ """
31
+ Registers and stacks all pre-processed light frames for a given filter configuration
32
+ across all sessions.
33
+ """
34
+ # If we are being invoked for simple/non duo filter we won't have variants generated by prior steps in the workflow
35
+ if variant is None:
36
+ input_base = "bkg_pp_light"
37
+ else:
38
+ input_base = f"{variant}_bkg_pp_light"
39
+
40
+ # The sequence name for all frames of this variant across all sessions
41
+ # e.g. Ha_bkg_pp_light_cHaOiii
42
+ merged_seq_base = f"all_{input_base}"
43
+
44
+ # Absolute path for the output stacked file
45
+ stacked_output_path = glob(f"{context['process_dir']}/{output_file}.fit*")
46
+
47
+ if stacked_output_path:
48
+ logger.info(f"Using existing stacked file: {stacked_output_path}")
49
+ else:
50
+ # Merge all frames (from multiple sessions and configs) use those for stacking
51
+ frames = glob(f"{context['process_dir']}/{input_base}_s*.fit*")
52
+
53
+ logger.info(
54
+ f"Registering and stacking {len(frames)} frames for {variant} -> {stacked_output_path}"
55
+ )
56
+ if len(frames) < 2:
57
+ raise NotEnoughFilesError("Need at least two frames", frames)
58
+
59
+ # Siril commands for registration and stacking. We run this in process_dir.
60
+ commands = f"""
61
+ link {merged_seq_base} -out={context["process_dir"]}
62
+ cd {context["process_dir"]}
63
+
64
+ # We use -2pass to select the best possible reference frame for others to register against
65
+ register {merged_seq_base} -2pass
66
+
67
+ # because we are using -2pass we must complete the registration here before stacking
68
+ # FIXME make drizzle optional
69
+ seqapplyreg {merged_seq_base} -drizzle
70
+
71
+ stack r_{merged_seq_base} rej g 0.3 0.05 -filter-wfwhm=3k -norm=addscale -output_norm -32b -out={output_file}
72
+
73
+ # and flip if required
74
+ mirrorx_single {output_file}
75
+ """
76
+
77
+ context["input_files"] = frames
78
+ siril.run(commands, context=context)
79
+
80
+ perhaps_delete_temps([merged_seq_base, f"r_{merged_seq_base}"])
81
+
82
+
83
+ def make_renormalize(channel_num: int):
84
+ """
85
+ Aligns the stacked images (Sii, Ha, OIII) and renormalizes Sii and OIII
86
+ to match the flux of the Ha channel.
87
+ """
88
+ logger.info("Aligning and renormalizing stacked images.")
89
+
90
+ # Define file basenames for the stacked images created in the 'process' directory
91
+ ha_base = "results_00001"
92
+ oiii_base = "results_00002"
93
+ sii_base = "results_00003"
94
+
95
+ # Define final output paths. The 'results' directory is a symlink in the work dir.
96
+ results_dir = context["output"]["base_path"]
97
+ os.makedirs(results_dir, exist_ok=True)
98
+
99
+ commands = ""
100
+
101
+ if channel_num == 1:
102
+ # Only one channel - just copy it - eventually we'll add other metadata
103
+ final_path = f"{results_dir}/stacked.fits"
104
+ commands += f"""
105
+ load results_00001
106
+ save "{final_path}"
107
+ """
108
+
109
+ # Basenames for registered files (output of 'register' command)
110
+ r_ha = f"r_{ha_base}"
111
+ r_oiii = f"r_{oiii_base}"
112
+
113
+ if channel_num >= 2:
114
+ # Do pixelmath to fixup channel brightness
115
+ logger.info("Doing renormalisation of extra Ha/Oiii channels")
116
+
117
+ ha_final_path = f"{results_dir}/stacked_Ha.fits"
118
+ oiii_final_path = f"{results_dir}/stacked_OIII.fits"
119
+
120
+ # Pixel math formula for renormalization.
121
+ # It matches the median and spread (MAD) of a channel to a reference channel (Ha).
122
+ # Formula: new = old * (MAD(ref)/MAD(old)) - (MAD(ref)/MAD(old)) * MEDIAN(old) + MEDIAN(ref)
123
+ pm_oiii = f'"${r_oiii}$*mad(${r_ha}$)/mad(${r_oiii}$)-mad(${r_ha}$)/mad(${r_oiii}$)*median(${r_oiii}$)+median(${r_ha}$)"'
124
+
125
+ # Siril commands to be executed in the 'process' directory
126
+ commands += f"""
127
+ # -transf=shift fails sometimes, which I guess is possible because we have multiple sessions with possible different camera rotation
128
+ # -interp=none also fails sometimes, so let default interp happen
129
+ # -drizzle is required for success on many images
130
+ register results -drizzle
131
+ pm {pm_oiii}
132
+ update_key FILTER Oiii "OSC Duo filter extracted"
133
+ save "{oiii_final_path}"
134
+ load {r_ha}
135
+ update_key FILTER Ha "OSC Duo filter extracted"
136
+ save "{ha_final_path}"
137
+ """
138
+
139
+ if channel_num >= 3:
140
+ logger.info("Doing renormalisation of extra Sii channel")
141
+
142
+ sii_final_path = f"{results_dir}/stacked_Sii.fits"
143
+ r_sii = f"r_{sii_base}"
144
+ pm_sii = f'"${r_sii}$*mad(${r_ha}$)/mad(${r_sii}$)-mad(${r_ha}$)/mad(${r_sii}$)*median(${r_sii}$)+median(${r_ha}$)"'
145
+ commands += f"""
146
+ pm {pm_sii}
147
+ update_key FILTER Sii "OSC dual Duo filter extracted"
148
+ save "{sii_final_path}"
149
+ """
150
+
151
+ siril.run(commands, context=context, cwd=context["process_dir"])
152
+ logger.info(f"Saved final renormalized images to {results_dir}")
153
+
154
+
155
+ def osc_process(has_ha_oiii: bool, has_sii_oiii: bool):
156
+ """Shared code for use by OSC processing scripts"""
157
+
158
+ logger.info(f"Running osc_process(has_ha_oiii={has_ha_oiii}, has_sii_oiii={has_sii_oiii})")
159
+ logger.debug("Using context: %s", context)
160
+
161
+ channel_num = 0
162
+ if has_sii_oiii:
163
+ # red output channel - from the SiiOiii filter Sii is on the 672nm red channel (mistakenly called Ha by siril)
164
+ channel_num += 1
165
+ make_stacked("Ha", f"results_{channel_num:05d}")
166
+
167
+ if has_ha_oiii:
168
+ # green output channel - from the HaOiii filter Ha is on the 656nm red channel
169
+ channel_num += 1
170
+ make_stacked("Ha", f"results_{channel_num:05d}")
171
+
172
+ if has_ha_oiii or has_sii_oiii:
173
+ # blue output channel - both filters have Oiii on the 500nm blue channel. Note the case here is uppercase to match siril output
174
+ channel_num += 1
175
+ make_stacked("OIII", f"results_{channel_num:05d}")
176
+
177
+ # if we haven't already processed some other way - just do a single channel process
178
+ # FIXME in this case we want to use a siril line like "stack r_bkg_pp_light rej g 0.3 0.05 -filter-wfwhm=3k -norm=addscale -output_norm -rgb_equal -32b -out=result"
179
+ if channel_num == 0:
180
+ # single channel - just stack all Ha frames together
181
+ channel_num += 1
182
+ make_stacked(None, f"results_{channel_num:05d}")
183
+
184
+ # There might be an old/state autogenerated .seq file, delete it so it doesn't confuse renormalize
185
+ results_seq_path = f"{context['process_dir']}/results_.seq"
186
+ if os.path.exists(results_seq_path):
187
+ os.remove(results_seq_path)
188
+
189
+ assert channel_num >= 1, "At least one channel should have been processed"
190
+ make_renormalize(channel_num)
@@ -2,20 +2,13 @@
2
2
  [repo]
3
3
  kind = "recipe"
4
4
 
5
- # all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
6
- # to the user and the file will be ignored for future processing.
7
- [recipe.require.version]
8
- min="0.1.0"
9
- max="4.5.8"
10
-
11
5
  [recipe]
12
- author.name = "Kevin Hester"
13
- author.email = "kevinh@geeksville.com"
6
+ author.name = "FIXMEsirilsomeone"
7
+ author.email = "FIXMEsirilsomeone"
14
8
 
15
- [[stage]]
16
-
17
- description = "Extract OSC dual duo filter Ha, Oiii and Sii channels"
18
- disabled = true # FIXME, debugging later stuff
9
+ # Lower priority recipes are tried to match first
10
+ # - we want this to match after the osc_dual_duo has a chance
11
+ priority = 10
19
12
 
20
13
  # FIXME-somehow-specify-what-filternames are used to auto detect this recipe can be used?
21
14
  # figure out how to support dual duo vs single duo. Perhaps: the FIRST recipe that matches an auto rule
@@ -25,26 +18,39 @@ disabled = true # FIXME, debugging later stuff
25
18
  # non OSC people use names like LRGB or SHO
26
19
 
27
20
  # for dual duo if we see Sii assume they also have HaOiii
28
- auto.for-filter = ["SiiOiii"]
21
+ auto.require.filter = ["SiiOiii"]
29
22
  # for single duo look for this
30
- # auto.for-filter = ["HaOiii"]
31
- auto.for-camera = ["OSC"]
23
+ # auto.require.filter = ["HaOiii"]
24
+
25
+ # To require a color camera use this. or to require a mono camera auto.require.mono.
26
+ auto.require.color = true
27
+
28
+ # all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
29
+ # to the user and the file will be ignored for future processing.
30
+ [recipe.require.version]
31
+ min="0.1.0"
32
+ max="4.5.8"
33
+
34
+ [recipe.stage.light]
32
35
 
33
- tool = "siril"
34
- when = "session-light" # run once per session-config
35
- output = "FIXME"
36
+ description = "Extract OSC dual duo filter Ha, Oiii and Sii channels"
37
+ # disabled = true # FIXME, debugging later stuff
36
38
 
37
- # FIXME, bias and flat should have been added to context by two previous stages. But for now hardwire
38
- # Note: they should not have filename extensions (see strip_extension in the old process.py)
39
- context.bias = '/workspaces/starbash/images/masters/biases/2025-09-09_stacked.fits'
39
+ tool.name = "siril"
40
+
41
+ # Auto find input light frames for the current session
42
+ # Look for files in input repos, finding them by using the "relative" tag they contain
43
+ input.source = "repo"
44
+ input.required = 2 # siril needs at least 2 frames to stack
45
+ input.type = "light" # look in all raw repos, but look only for light files
46
+
47
+ # Auto find suitable masters of the following type
48
+ input.masters = ["bias", "flat"]
49
+
50
+ # the base name for our light files
51
+ context.light_base = '''light_s{session["id"]}'''
40
52
 
41
- context.sessionid = "2025-09-16" # FIXME, generate this by looping over all sessions (from outside this file)
42
- context.sessionconfig = "SiiOiii" # FIXME generate this by looping over all session configs
43
- context.light_base = "light_s{sessionid}_c{sessionconfig}"
44
53
 
45
- # FIXME until auto light finding is in
46
- input.source = "path"
47
- input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/LIGHT/*.fit*"
48
54
 
49
55
  script = '''
50
56
  # Create a sequence from the raw light frames, seq file goes to process_dir
@@ -52,7 +58,7 @@ script = '''
52
58
  cd {process_dir}
53
59
 
54
60
  # Calibrate the light frames using master bias and flat
55
- calibrate {light_base} -bias={bias} -flat=flat -cfa -equalize_cfa
61
+ calibrate {light_base} -bias={master["bias"]} -flat={master["flat"]} -cfa -equalize_cfa
56
62
 
57
63
  # Remove background gradient on a per-frame basis (generates bkg_pp_light.seq)
58
64
  seqsubsky pp_{light_base} 1
@@ -61,28 +67,32 @@ script = '''
61
67
  seqextract_HaOIII bkg_pp_{light_base} -resample=ha
62
68
  '''
63
69
 
64
- temporaries = ["FIXME"]
70
+ # temporaries = ["FIXME"]
65
71
 
66
- [[stage]]
72
+ [recipe.stage.stack]
67
73
 
68
- # FIXME, eventually we could make it optional to even have a starbash.toml. If we find an
69
- # starbash.py we could introspect it for a starbash_config dict. And look inside that for description
70
- # "stage", "when" or whatever?
74
+ # this stage is considered if the previous stage in this same array
75
+ # was run. It must be run inside the same tempdir (so that files from previous stage are available)
71
76
 
72
- description = "Stack OSC dual duo filter data, with separate Ha, Oiii and Sii channels"
77
+ description = "Stack OSC dual duo (HaOiii + SiiOiii) filter data: with separate Ha, Oiii and Sii channels"
73
78
 
74
- context.target = "M 27" # FIXME
75
- context.targets = "/workspaces/starbash/images/processed" # FIXME, do something smarter
79
+ input.source = "recipe" # we will use output files from previous stages in this same recipe as our input
76
80
 
77
- tool = "python"
78
- when = "session-stack" # run once after all session/session-config processing was done
81
+ tool.name = "python"
82
+
83
+ # Based on the following definitions in the stage toml file...
84
+ # FIXME, we should inherit this - most recipes shouldn't have to declare it
85
+ output.dest = "repo" # write to a particular repo
86
+ output.type = "processed" # write output to the special processed repo
79
87
 
80
88
  # if not specified starbash.py used
81
89
  # script-file = "script.py"
82
90
 
83
- # or inline python code instead of that function?
84
- #script = '''
85
- # make_stacked("SiiOiii", "Ha", f"results_00001")
86
- # ...
87
- # (moved to script.py)
88
- # '''
91
+ # or inline python code can be provided here. In this case I'm using some python
92
+ # code I'm temporarily sharing from the main project...
93
+ script = '''
94
+ from starbash.recipes import osc
95
+ osc.logger = logger
96
+ osc.context = context
97
+ osc.osc_process(has_ha_oiii=True, has_sii_oiii=True)
98
+ '''
@@ -0,0 +1,82 @@
1
+
2
+ [repo]
3
+ kind = "recipe"
4
+
5
+ [recipe]
6
+
7
+ # Lower priority recipes are tried to match first
8
+ # - we want this to match after the osc_dual_duo has a chance
9
+ priority = 30
10
+
11
+ author.name = "FIXMEsirilsomeone"
12
+ author.email = "FIXMEsirilsomeone"
13
+
14
+ # no filter requirements - will work for any osc session
15
+ auto.require.color = true
16
+
17
+ # all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
18
+ # to the user and the file will be ignored for future processing.
19
+ [recipe.require.version]
20
+ min="0.1.0"
21
+ max="4.5.8"
22
+
23
+ [recipe.stage.light]
24
+
25
+ description = "Calibrate OSC frames vs bias and flats"
26
+ # disabled = true # FIXME, debugging later stuff
27
+
28
+ tool.name = "siril"
29
+
30
+ # Auto find input light frames for the current session
31
+ # Look for files in input repos, finding them by using the "relative" tag they contain
32
+ input.source = "repo"
33
+ input.required = 2 # siril needs at least 2 frames to stack
34
+ input.type = "light" # look in all raw repos, but look only for light files
35
+
36
+ # Auto find suitable masters of the following type
37
+ input.masters = ["bias", "flat"]
38
+
39
+ # the base name for our light files
40
+ context.light_base = '''light_s{session["id"]}'''
41
+
42
+ script = '''
43
+ # Create a sequence from the raw light frames, seq file goes to process_dir
44
+ link {light_base} -out={process_dir}
45
+ cd {process_dir}
46
+
47
+ # Calibrate the light frames using master bias and flat
48
+ calibrate {light_base} -bias={master["bias"]} -flat={master["flat"]} -cfa -equalize_cfa
49
+
50
+ # Remove background gradient on a per-frame basis (generates bkg_pp_light.seq)
51
+ seqsubsky pp_{light_base} 1
52
+ '''
53
+
54
+ # temporaries = ["FIXME"]
55
+
56
+ [recipe.stage.stack]
57
+
58
+ # this stage is only be considered if the previous stage in this same array
59
+ # was run. It must be run inside the same tempdir (so that files from previous stage are available)
60
+
61
+ description = "Stack OSC simple (non duo filter) images"
62
+
63
+ input.source = "recipe" # we will use output files from previous stages in this same recipe as our input
64
+
65
+ tool.name = "python"
66
+
67
+ # Based on the following definitions in the stage toml file...
68
+ # FIXME, we should inherit this - most recipes shouldn't have to declare it
69
+ output.dest = "repo" # write to a particular repo
70
+ output.type = "processed" # write output to the special processed repo
71
+
72
+ # if not specified starbash.py used
73
+ # script-file = "script.py"
74
+
75
+ # or inline python code can be provided here. In this case I'm using some python
76
+ # code I'm temporarily sharing from the main project...
77
+ script = '''
78
+ from starbash.recipes import osc
79
+ osc.logger = logger
80
+ osc.context = context
81
+ osc.osc_process(has_ha_oiii=False, has_sii_oiii=False)
82
+ '''