starbash 0.1.10__tar.gz → 0.1.11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of starbash might be problematic. Click here for more details.
- {starbash-0.1.10 → starbash-0.1.11}/PKG-INFO +1 -1
- {starbash-0.1.10 → starbash-0.1.11}/pyproject.toml +1 -1
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/app.py +153 -59
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/defaults/starbash.toml +2 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/master_bias/starbash.toml +4 -6
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/master_dark/starbash.toml +1 -3
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/master_flat/starbash.toml +3 -5
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/osc_dual_duo/starbash.toml +35 -26
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/starbash.toml +11 -9
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/tool.py +61 -24
- {starbash-0.1.10 → starbash-0.1.11}/LICENSE +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/README.md +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/repo/__init__.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/repo/manager.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/repo/repo.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/__init__.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/aliases.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/analytics.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/commands/__init__.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/commands/info.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/commands/process.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/commands/repo.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/commands/select.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/commands/user.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/database.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/defaults/__init__.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/main.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/paths.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/README.md +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/__init__.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/osc_dual_duo/starbash.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/recipes/osc_single_duo/starbash.toml +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/selection.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/templates/__init__.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/templates/repo/master.toml +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/templates/repo/processed.toml +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/templates/userconfig.toml +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/toml.py +0 -0
- {starbash-0.1.10 → starbash-0.1.11}/src/starbash/url.py +0 -0
|
@@ -22,7 +22,7 @@ import starbash
|
|
|
22
22
|
from starbash import console, _is_test_env, to_shortdate
|
|
23
23
|
from starbash.aliases import Aliases
|
|
24
24
|
from starbash.database import Database, SessionRow, ImageRow, get_column_name
|
|
25
|
-
from repo import Repo, repo_suffix
|
|
25
|
+
from repo import Repo, repo, repo_suffix
|
|
26
26
|
from starbash.toml import toml_from_template
|
|
27
27
|
from starbash.tool import Tool, expand_context, expand_context_unsafe
|
|
28
28
|
from repo import RepoManager
|
|
@@ -124,6 +124,33 @@ def copy_images_to_dir(images: list[ImageRow], output_dir: Path) -> None:
|
|
|
124
124
|
console.print(f" [red]Errors: {error_count} files[/red]")
|
|
125
125
|
|
|
126
126
|
|
|
127
|
+
class ProcessingContext(tempfile.TemporaryDirectory):
|
|
128
|
+
"""For processing a set of sessions for a particular target.
|
|
129
|
+
|
|
130
|
+
Keeps a shared temporary directory for intermediate files. We expose the path to that
|
|
131
|
+
directory in context["process_dir"].
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
def __init__(self, starbash: "Starbash"):
|
|
135
|
+
super().__init__(prefix="sbprocessing_")
|
|
136
|
+
self.sb = starbash
|
|
137
|
+
logging.debug(f"Created processing context at {self.name}")
|
|
138
|
+
|
|
139
|
+
self.sb.init_context()
|
|
140
|
+
self.sb.context["process_dir"] = self.name
|
|
141
|
+
|
|
142
|
+
def __enter__(self) -> "ProcessingContext":
|
|
143
|
+
return super().__enter__()
|
|
144
|
+
|
|
145
|
+
def __exit__(self, exc_type, exc_value, traceback) -> None:
|
|
146
|
+
logging.debug(f"Cleaning up processing context at {self.name}")
|
|
147
|
+
|
|
148
|
+
# unregister our process dir
|
|
149
|
+
self.sb.context.pop("process_dir", None)
|
|
150
|
+
|
|
151
|
+
super().__exit__(exc_type, exc_value, traceback)
|
|
152
|
+
|
|
153
|
+
|
|
127
154
|
class Starbash:
|
|
128
155
|
"""The main Starbash application class."""
|
|
129
156
|
|
|
@@ -674,7 +701,22 @@ class Starbash:
|
|
|
674
701
|
return sorted_pipeline
|
|
675
702
|
|
|
676
703
|
def run_all_stages(self):
|
|
677
|
-
"""On the currently active session, run all processing stages
|
|
704
|
+
"""On the currently active session, run all processing stages
|
|
705
|
+
|
|
706
|
+
New design, not yet implemented:
|
|
707
|
+
* find all recipes
|
|
708
|
+
* for each target in the current selection:
|
|
709
|
+
* select ONE recipe for processing that target (check recipe.auto.require.* conditions)
|
|
710
|
+
* create a processing output directory (for high value final files)
|
|
711
|
+
* create a temporary processing directory (for intermediate files - shared by all stages)
|
|
712
|
+
* init session context (it will be shared for all following steps)
|
|
713
|
+
* iterate over all light frame sessions in the current selection
|
|
714
|
+
* for each session:
|
|
715
|
+
* update context input and output files
|
|
716
|
+
* run session.light stages
|
|
717
|
+
* after all sessions are processed, run final.stack stages (using the shared context and temp dir)
|
|
718
|
+
|
|
719
|
+
"""
|
|
678
720
|
logging.info("--- Running all stages ---")
|
|
679
721
|
|
|
680
722
|
# 1. Get all pipeline definitions (the `[[stages]]` tables with name and priority).
|
|
@@ -700,6 +742,89 @@ class Starbash:
|
|
|
700
742
|
for task in tasks_to_run:
|
|
701
743
|
self.run_stage(task)
|
|
702
744
|
|
|
745
|
+
def get_recipe_for_session(
|
|
746
|
+
self, session: SessionRow, step: dict[str, Any]
|
|
747
|
+
) -> Repo | None:
|
|
748
|
+
"""Try to find a recipe that can be used to process the given session for the given step name
|
|
749
|
+
(master-dark, master-bias, light, stack, etc...)
|
|
750
|
+
|
|
751
|
+
* if a recipe doesn't have a matching recipe.stage.<step_name> it is not considered
|
|
752
|
+
* As part of this checking we will look at recipe.auto.require.* conditions to see if the recipe
|
|
753
|
+
is suitable for this session.
|
|
754
|
+
* the imagetyp of this session matches step.input
|
|
755
|
+
|
|
756
|
+
Currently we return just one Repo but eventually we should support multiple matching recipes
|
|
757
|
+
and make the user pick (by throwing an exception?).
|
|
758
|
+
"""
|
|
759
|
+
# Get all recipe repos - FIXME add a getall(kind) to RepoManager
|
|
760
|
+
recipe_repos = [r for r in self.repo_manager.repos if r.kind() == "recipe"]
|
|
761
|
+
|
|
762
|
+
step_name = step.get("name")
|
|
763
|
+
if not step_name:
|
|
764
|
+
raise ValueError("Invalid pipeline step found: missing 'name' key.")
|
|
765
|
+
|
|
766
|
+
input_name = step.get("input")
|
|
767
|
+
if not input_name:
|
|
768
|
+
raise ValueError("Invalid pipeline step found: missing 'input' key.")
|
|
769
|
+
|
|
770
|
+
imagetyp = session.get(get_column_name(Database.IMAGETYP_KEY))
|
|
771
|
+
|
|
772
|
+
if not imagetyp or input_name != self.aliases.normalize(imagetyp):
|
|
773
|
+
logging.debug(
|
|
774
|
+
f"Session imagetyp '{imagetyp}' does not match step input '{input_name}', skipping"
|
|
775
|
+
)
|
|
776
|
+
return None
|
|
777
|
+
|
|
778
|
+
# Get session metadata for checking requirements
|
|
779
|
+
session_metadata = session.get("metadata", {})
|
|
780
|
+
|
|
781
|
+
for repo in recipe_repos:
|
|
782
|
+
# Check if this recipe has the requested stage
|
|
783
|
+
stage_config = repo.get(f"recipe.stage.{step_name}")
|
|
784
|
+
if not stage_config:
|
|
785
|
+
logging.debug(
|
|
786
|
+
f"Recipe {repo.url} does not have stage '{step_name}', skipping"
|
|
787
|
+
)
|
|
788
|
+
continue
|
|
789
|
+
|
|
790
|
+
# Check auto.require conditions if they exist
|
|
791
|
+
|
|
792
|
+
# If requirements are specified, check if session matches
|
|
793
|
+
required_filters = repo.get("auto.require.filter", [])
|
|
794
|
+
if required_filters:
|
|
795
|
+
session_filter = self.aliases.normalize(
|
|
796
|
+
session_metadata.get(Database.FILTER_KEY)
|
|
797
|
+
)
|
|
798
|
+
|
|
799
|
+
# Session must have a filter that matches one of the required filters
|
|
800
|
+
if not session_filter or session_filter not in required_filters:
|
|
801
|
+
logging.debug(
|
|
802
|
+
f"Recipe {repo.url} requires filters {required_filters}, "
|
|
803
|
+
f"session has '{session_filter}', skipping"
|
|
804
|
+
)
|
|
805
|
+
continue
|
|
806
|
+
|
|
807
|
+
required_cameras = repo.get("auto.require.camera", [])
|
|
808
|
+
if required_cameras:
|
|
809
|
+
session_camera = self.aliases.normalize(
|
|
810
|
+
session_metadata.get("INSTRUME")
|
|
811
|
+
) # Camera identifier
|
|
812
|
+
|
|
813
|
+
# Session must have a camera that matches one of the required cameras
|
|
814
|
+
if not session_camera or session_camera not in required_cameras:
|
|
815
|
+
logging.debug(
|
|
816
|
+
f"Recipe {repo.url} requires cameras {required_cameras}, "
|
|
817
|
+
f"session has '{session_camera}', skipping"
|
|
818
|
+
)
|
|
819
|
+
continue
|
|
820
|
+
|
|
821
|
+
# This recipe matches!
|
|
822
|
+
logging.info(f"Selected recipe {repo.url} for stage '{step_name}' ")
|
|
823
|
+
return repo
|
|
824
|
+
|
|
825
|
+
# No matching recipe found
|
|
826
|
+
return None
|
|
827
|
+
|
|
703
828
|
def run_master_stages(self):
|
|
704
829
|
"""Generate any missing master frames
|
|
705
830
|
|
|
@@ -711,62 +836,29 @@ class Starbash:
|
|
|
711
836
|
* add_input_to_context() add the input files to the context (from the session)
|
|
712
837
|
* run_stage(task) to generate the new master frame
|
|
713
838
|
"""
|
|
839
|
+
sorted_pipeline = self._get_stages("master-stages")
|
|
714
840
|
sessions = self.search_session()
|
|
715
|
-
for session in sessions:
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
)
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
# Find all tasks that should run during this step
|
|
738
|
-
tasks_to_run = [
|
|
739
|
-
task for task in all_tasks if task.get("when") == step_name
|
|
740
|
-
]
|
|
741
|
-
|
|
742
|
-
for task in tasks_to_run:
|
|
743
|
-
input_config = task.get("input", {})
|
|
744
|
-
input_type = input_config.get("type")
|
|
745
|
-
if not input_type:
|
|
746
|
-
raise ValueError(
|
|
747
|
-
f"Task for step '{step_name}' missing required input.type"
|
|
748
|
-
)
|
|
749
|
-
if self.aliases.equals(input_type, imagetyp):
|
|
750
|
-
logging.debug(
|
|
751
|
-
f"Running {step_name} task for imagetyp '{imagetyp}'"
|
|
752
|
-
)
|
|
753
|
-
|
|
754
|
-
# Create a default process dir in /tmp, though more advanced 'session' based workflows will
|
|
755
|
-
# probably override this and place it somewhere persistent.
|
|
756
|
-
with tempfile.TemporaryDirectory(
|
|
757
|
-
prefix="session_tmp_"
|
|
758
|
-
) as temp_dir:
|
|
759
|
-
logging.debug(
|
|
760
|
-
f"Created temporary session directory: {temp_dir}"
|
|
761
|
-
)
|
|
762
|
-
self.init_context()
|
|
763
|
-
self.context["process_dir"] = temp_dir
|
|
764
|
-
self.add_session_to_context(session)
|
|
765
|
-
self.run_stage(task)
|
|
766
|
-
except RuntimeError as e:
|
|
767
|
-
logging.error(
|
|
768
|
-
f"Skipping session {session[get_column_name(Database.ID_KEY)]}: {e}"
|
|
769
|
-
)
|
|
841
|
+
for session in track(sessions, description="Generating masters..."):
|
|
842
|
+
# 4. Iterate through the sorted pipeline and execute the associated tasks.
|
|
843
|
+
# FIXME unify the master vs normal step running code
|
|
844
|
+
for step in sorted_pipeline:
|
|
845
|
+
task = None
|
|
846
|
+
recipe = self.get_recipe_for_session(session, step)
|
|
847
|
+
if recipe:
|
|
848
|
+
task = recipe.get("recipe.stage." + step["name"])
|
|
849
|
+
|
|
850
|
+
if task:
|
|
851
|
+
input_config = task.get("input", {})
|
|
852
|
+
input_type = input_config.get("type")
|
|
853
|
+
if not input_type:
|
|
854
|
+
raise ValueError(f"Task for step missing required input.type")
|
|
855
|
+
|
|
856
|
+
# Create a default process dir in /tmp.
|
|
857
|
+
# FIXME - eventually we should allow hashing or somesuch to keep reusing processing
|
|
858
|
+
# dirs for particular targets?
|
|
859
|
+
with ProcessingContext(self) as temp_dir:
|
|
860
|
+
self.set_session_in_context(session)
|
|
861
|
+
self.run_stage(task)
|
|
770
862
|
|
|
771
863
|
def init_context(self) -> None:
|
|
772
864
|
"""Do common session init"""
|
|
@@ -780,8 +872,10 @@ class Starbash:
|
|
|
780
872
|
}
|
|
781
873
|
self.context.update(runtime_context)
|
|
782
874
|
|
|
783
|
-
def
|
|
875
|
+
def set_session_in_context(self, session: SessionRow) -> None:
|
|
784
876
|
"""adds to context from the indicated session:
|
|
877
|
+
|
|
878
|
+
Sets the following context variables based on the provided session:
|
|
785
879
|
* instrument - for the session
|
|
786
880
|
* date - the localtimezone date of the session
|
|
787
881
|
* imagetyp - the imagetyp of the session
|
|
@@ -1000,7 +1094,7 @@ class Starbash:
|
|
|
1000
1094
|
raise ValueError(
|
|
1001
1095
|
f"Tool '{tool_name}' for stage '{stage.get('name')}' not found."
|
|
1002
1096
|
)
|
|
1003
|
-
logging.debug(f"
|
|
1097
|
+
logging.debug(f"Using tool: {tool_name}")
|
|
1004
1098
|
tool.set_defaults()
|
|
1005
1099
|
|
|
1006
1100
|
# Allow stage to override tool timeout if specified
|
|
@@ -7,7 +7,7 @@ kind = "recipe"
|
|
|
7
7
|
author.name = "FIXMESiril?"
|
|
8
8
|
author.email = "FIXMESiril?"
|
|
9
9
|
|
|
10
|
-
[
|
|
10
|
+
[recipe.stage.master-bias]
|
|
11
11
|
|
|
12
12
|
description = "Generate master bias"
|
|
13
13
|
# disabled = false # turn on to skip
|
|
@@ -26,14 +26,12 @@ input.type = "bias" # look in all raw repos, but look only for bias files
|
|
|
26
26
|
|
|
27
27
|
# Look for files in input repos, finding them by using the "relative" tag they contain
|
|
28
28
|
input.source = "repo"
|
|
29
|
-
input.required = 2
|
|
29
|
+
input.required = 2 # siril needs at least 2 frames to stack
|
|
30
30
|
# old school paths also work (but are not recommended)
|
|
31
31
|
# input.path = ".../from_astroboy/masters-raw/2025-09-09/BIAS/*.fit*"
|
|
32
32
|
|
|
33
|
-
when = "setup.master.bias" # run when master biases are regenerated
|
|
34
|
-
|
|
35
33
|
# Based on the following definitions in the stage toml file...
|
|
36
|
-
output.dest = "repo"
|
|
34
|
+
output.dest = "repo" # write to a particular repo
|
|
37
35
|
output.type = "master" # write output to the special masters repo
|
|
38
36
|
|
|
39
37
|
# the following fields will be auto populated in the context before entry:
|
|
@@ -65,4 +63,4 @@ script = '''
|
|
|
65
63
|
|
|
66
64
|
# Stack frames
|
|
67
65
|
stack frames rej 3 3 -nonorm -out={output["base_path"]}
|
|
68
|
-
'''
|
|
66
|
+
'''
|
|
@@ -7,7 +7,7 @@ kind = "recipe"
|
|
|
7
7
|
author.name = "FIXMESiril?"
|
|
8
8
|
author.email = "FIXMESiril?"
|
|
9
9
|
|
|
10
|
-
[
|
|
10
|
+
[recipe.stage.master-dark]
|
|
11
11
|
|
|
12
12
|
description = "Generate master dark"
|
|
13
13
|
|
|
@@ -20,8 +20,6 @@ input.type = "dark"
|
|
|
20
20
|
input.source = "repo"
|
|
21
21
|
input.required = 2 # siril needs at least 2 frames to stack
|
|
22
22
|
|
|
23
|
-
when = "setup.master.dark" # run when master darks are regenerated
|
|
24
|
-
|
|
25
23
|
# Based on the following definitions in the stage toml file...
|
|
26
24
|
output.dest = "repo" # write to a particular repo
|
|
27
25
|
output.type = "master" # write output to the special masters repo
|
|
@@ -8,7 +8,7 @@ author.name = "FIXMESiril?"
|
|
|
8
8
|
author.email = "FIXMESiril?"
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
[
|
|
11
|
+
[recipe.stage.master-flat]
|
|
12
12
|
|
|
13
13
|
# See master_bias/starbash.toml for more documentation
|
|
14
14
|
|
|
@@ -25,16 +25,14 @@ input.type = "flat" # look in all raw repos, but look only for flat files
|
|
|
25
25
|
|
|
26
26
|
# Look for files in input repos, finding them by using the "relative" tag they contain
|
|
27
27
|
input.source = "repo"
|
|
28
|
-
input.required = 2
|
|
28
|
+
input.required = 2 # siril needs at least 2 frames to stack
|
|
29
29
|
|
|
30
30
|
# We require a master bias frame for this recipe. By the time our recipe is invoked
|
|
31
31
|
# context.master.bias will have been set to a full path to a master bias frame
|
|
32
32
|
input.masters = ["bias"]
|
|
33
33
|
|
|
34
|
-
when = "setup.master.flat" # run when master biases are regenerated
|
|
35
|
-
|
|
36
34
|
# Based on the following definitions in the stage toml file...
|
|
37
|
-
output.dest = "repo"
|
|
35
|
+
output.dest = "repo" # write to a particular repo
|
|
38
36
|
output.type = "master" # write output to the special masters repo
|
|
39
37
|
|
|
40
38
|
# FIXME for early development we have support for simple absolute file paths with globs
|
|
@@ -2,21 +2,10 @@
|
|
|
2
2
|
[repo]
|
|
3
3
|
kind = "recipe"
|
|
4
4
|
|
|
5
|
-
# all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
|
|
6
|
-
# to the user and the file will be ignored for future processing.
|
|
7
|
-
[recipe.require.version]
|
|
8
|
-
min="0.1.0"
|
|
9
|
-
max="4.5.8"
|
|
10
|
-
|
|
11
5
|
[recipe]
|
|
12
6
|
author.name = "Kevin Hester"
|
|
13
7
|
author.email = "kevinh@geeksville.com"
|
|
14
8
|
|
|
15
|
-
[[stage]]
|
|
16
|
-
|
|
17
|
-
description = "Extract OSC dual duo filter Ha, Oiii and Sii channels"
|
|
18
|
-
disabled = true # FIXME, debugging later stuff
|
|
19
|
-
|
|
20
9
|
# FIXME-somehow-specify-what-filternames are used to auto detect this recipe can be used?
|
|
21
10
|
# figure out how to support dual duo vs single duo. Perhaps: the FIRST recipe that matches an auto rule
|
|
22
11
|
# is used for any auto-defected defaults. If an auto match is found it will be saved in the generated starter
|
|
@@ -25,27 +14,39 @@ disabled = true # FIXME, debugging later stuff
|
|
|
25
14
|
# non OSC people use names like LRGB or SHO
|
|
26
15
|
|
|
27
16
|
# for dual duo if we see Sii assume they also have HaOiii
|
|
28
|
-
auto.
|
|
17
|
+
auto.require.filter = ["SiiOiii"]
|
|
29
18
|
# for single duo look for this
|
|
30
|
-
# auto.
|
|
31
|
-
auto.
|
|
19
|
+
# auto.require.filter = ["HaOiii"]
|
|
20
|
+
auto.require.camera = ["OSC"]
|
|
21
|
+
|
|
22
|
+
# all sb.toml files can optionally contain a version section. if version of the running starbash app is out of bounds a warning message will be printed
|
|
23
|
+
# to the user and the file will be ignored for future processing.
|
|
24
|
+
[recipe.require.version]
|
|
25
|
+
min="0.1.0"
|
|
26
|
+
max="4.5.8"
|
|
27
|
+
|
|
28
|
+
[recipe.stage.light]
|
|
29
|
+
|
|
30
|
+
description = "Extract OSC dual duo filter Ha, Oiii and Sii channels"
|
|
31
|
+
# disabled = true # FIXME, debugging later stuff
|
|
32
32
|
|
|
33
33
|
tool.name = "siril"
|
|
34
34
|
|
|
35
35
|
when = "session.light" # run once per session.config
|
|
36
|
-
|
|
36
|
+
|
|
37
|
+
input.masters = ["bias", "flat"]
|
|
37
38
|
|
|
38
39
|
# FIXME, bias and flat should have been added to context by two previous stages. But for now hardwire
|
|
39
40
|
# Note: they should not have filename extensions (see strip_extension in the old process.py)
|
|
40
|
-
context.bias = '/workspaces/starbash/images/masters/biases/2025-09-09_stacked.fits'
|
|
41
|
+
# context.bias = '/workspaces/starbash/images/masters/biases/2025-09-09_stacked.fits'
|
|
41
42
|
|
|
42
|
-
context.sessionid = "2025-09-16" # FIXME, generate this by looping over all sessions (from outside this file)
|
|
43
|
-
context.sessionconfig = "SiiOiii" # FIXME generate this by looping over all session configs
|
|
44
|
-
context.light_base = "light_s{sessionid}_c{sessionconfig}"
|
|
43
|
+
# context.sessionid = "2025-09-16" # FIXME, generate this by looping over all sessions (from outside this file)
|
|
44
|
+
# context.sessionconfig = "SiiOiii" # FIXME generate this by looping over all session configs
|
|
45
|
+
# context.light_base = "light_s{sessionid}_c{sessionconfig}"
|
|
45
46
|
|
|
46
47
|
# FIXME until auto light finding is in
|
|
47
|
-
input.source = "path"
|
|
48
|
-
input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/LIGHT/*.fit*"
|
|
48
|
+
# input.source = "path"
|
|
49
|
+
# input.path = "/workspaces/starbash/images/from_astroboy/M 27/2025-09-16/LIGHT/*.fit*"
|
|
49
50
|
|
|
50
51
|
script = '''
|
|
51
52
|
# Create a sequence from the raw light frames, seq file goes to process_dir
|
|
@@ -64,7 +65,12 @@ script = '''
|
|
|
64
65
|
|
|
65
66
|
temporaries = ["FIXME"]
|
|
66
67
|
|
|
67
|
-
[
|
|
68
|
+
[recipe.stage.stack]
|
|
69
|
+
|
|
70
|
+
disabled = false # not yet ready to test
|
|
71
|
+
|
|
72
|
+
# FIXME this stage should only be considered if the previous stage in this same array
|
|
73
|
+
# was run. It must be run inside the same tempdir (so that files from previous stage are available)
|
|
68
74
|
|
|
69
75
|
# FIXME, eventually we could make it optional to even have a starbash.toml. If we find an
|
|
70
76
|
# starbash.py we could introspect it for a starbash_config dict. And look inside that for description
|
|
@@ -72,14 +78,17 @@ temporaries = ["FIXME"]
|
|
|
72
78
|
|
|
73
79
|
description = "Stack OSC dual duo filter data, with separate Ha, Oiii and Sii channels"
|
|
74
80
|
|
|
75
|
-
context.target = "M 27" # FIXME
|
|
76
|
-
context.targets = "/workspaces/starbash/images/processed" # FIXME, do something smarter
|
|
81
|
+
# context.target = "M 27" # FIXME
|
|
82
|
+
# context.targets = "/workspaces/starbash/images/processed" # FIXME, do something smarter
|
|
77
83
|
|
|
78
84
|
tool.name = "python"
|
|
79
85
|
|
|
80
|
-
when = "
|
|
86
|
+
when = "final.stack" # run once after all session/session.config processing was done
|
|
81
87
|
|
|
82
|
-
|
|
88
|
+
# Based on the following definitions in the stage toml file...
|
|
89
|
+
# FIXME, we should inherit this - most recipes shouldn't have to declare it
|
|
90
|
+
output.dest = "repo" # write to a particular repo
|
|
91
|
+
output.type = "processed" # write output to the special masters repo
|
|
83
92
|
|
|
84
93
|
# if not specified starbash.py used
|
|
85
94
|
# script-file = "script.py"
|
|
@@ -32,28 +32,30 @@ dir = "osc_single_duo"
|
|
|
32
32
|
# master specific stages
|
|
33
33
|
#
|
|
34
34
|
[[master-stages]]
|
|
35
|
-
name = "
|
|
35
|
+
name = "master-bias" # generate master bias frames
|
|
36
36
|
priority = 10
|
|
37
|
+
input = "bias" # only used for frames of this type
|
|
37
38
|
|
|
38
39
|
[[master-stages]]
|
|
39
|
-
name = "
|
|
40
|
+
name = "master-dark" # generate master dark frames
|
|
40
41
|
priority = 10
|
|
42
|
+
input = "dark"
|
|
41
43
|
|
|
42
44
|
[[master-stages]]
|
|
43
|
-
name = "
|
|
45
|
+
name = "master-flat" # generate master flat frames
|
|
44
46
|
priority = 20
|
|
47
|
+
input = "flat"
|
|
45
48
|
|
|
46
49
|
#
|
|
47
|
-
# session specific processing stages
|
|
50
|
+
# session specific processing stages, not currently used, for now I just do this list from code
|
|
48
51
|
#
|
|
49
|
-
[[stages]]
|
|
50
|
-
name = "session.config" # for flat processing, master generation etc
|
|
51
|
-
priority = 10
|
|
52
52
|
|
|
53
53
|
[[stages]]
|
|
54
|
-
name = "
|
|
54
|
+
name = "light" # generate light frames from lights and with reference to flats/bias
|
|
55
55
|
priority = 20
|
|
56
|
+
input = "light" # only used for frames of this type
|
|
56
57
|
|
|
57
58
|
[[stages]]
|
|
58
|
-
name = "
|
|
59
|
+
name = "stack" # stack frames
|
|
59
60
|
priority = 30
|
|
61
|
+
input = "light" # only used for frames of this type
|
|
@@ -3,11 +3,10 @@ import shutil
|
|
|
3
3
|
import textwrap
|
|
4
4
|
import tempfile
|
|
5
5
|
import subprocess
|
|
6
|
-
import select
|
|
7
6
|
import re
|
|
8
|
-
|
|
7
|
+
import threading
|
|
8
|
+
import queue
|
|
9
9
|
import logging
|
|
10
|
-
|
|
11
10
|
import RestrictedPython
|
|
12
11
|
|
|
13
12
|
logger = logging.getLogger(__name__)
|
|
@@ -194,33 +193,71 @@ def tool_run(
|
|
|
194
193
|
pass
|
|
195
194
|
|
|
196
195
|
# Stream output line by line in real-time
|
|
196
|
+
# Use threading for cross-platform compatibility (select doesn't work on Windows with pipes)
|
|
197
|
+
|
|
197
198
|
assert process.stdout
|
|
198
199
|
assert process.stderr
|
|
199
200
|
|
|
201
|
+
output_queue: queue.Queue = queue.Queue()
|
|
202
|
+
|
|
203
|
+
def read_stream(stream, log_func, stream_name):
|
|
204
|
+
"""Read from stream and put lines in queue."""
|
|
205
|
+
try:
|
|
206
|
+
for line in stream:
|
|
207
|
+
line = line.rstrip("\n")
|
|
208
|
+
output_queue.put((log_func, stream_name, line))
|
|
209
|
+
finally:
|
|
210
|
+
output_queue.put((None, stream_name, None)) # Signal EOF
|
|
211
|
+
|
|
212
|
+
# Start threads to read stdout and stderr
|
|
213
|
+
stdout_thread = threading.Thread(
|
|
214
|
+
target=read_stream,
|
|
215
|
+
args=(process.stdout, logger.debug, "tool-stdout"),
|
|
216
|
+
daemon=True,
|
|
217
|
+
)
|
|
218
|
+
stderr_thread = threading.Thread(
|
|
219
|
+
target=read_stream,
|
|
220
|
+
args=(process.stderr, logger.warning, "tool-stderr"),
|
|
221
|
+
daemon=True,
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
stdout_thread.start()
|
|
225
|
+
stderr_thread.start()
|
|
226
|
+
|
|
227
|
+
# Track which streams have finished
|
|
228
|
+
streams_finished = 0
|
|
229
|
+
|
|
200
230
|
try:
|
|
201
|
-
streams
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
for fd in ready:
|
|
211
|
-
stream, log_func, stream_name = streams[fd]
|
|
212
|
-
line = stream.readline()
|
|
213
|
-
|
|
214
|
-
if line:
|
|
215
|
-
# Strip trailing newline and log immediately
|
|
216
|
-
line = line.rstrip("\n")
|
|
217
|
-
log_func(f"[{stream_name}] {line}")
|
|
231
|
+
# Process output from queue until both streams are done
|
|
232
|
+
while streams_finished < 2:
|
|
233
|
+
try:
|
|
234
|
+
# Use timeout to periodically check if process has terminated
|
|
235
|
+
log_func, stream_name, line = output_queue.get(timeout=0.1)
|
|
236
|
+
|
|
237
|
+
if log_func is None:
|
|
238
|
+
# EOF signal
|
|
239
|
+
streams_finished += 1
|
|
218
240
|
else:
|
|
219
|
-
#
|
|
220
|
-
|
|
241
|
+
# Log the line
|
|
242
|
+
log_func(f"[{stream_name}] {line}")
|
|
243
|
+
|
|
244
|
+
except queue.Empty:
|
|
245
|
+
# No output available, check if process terminated
|
|
246
|
+
if process.poll() is not None:
|
|
247
|
+
# Process finished, wait a bit more for remaining output
|
|
248
|
+
break
|
|
249
|
+
|
|
250
|
+
# Wait for threads to finish (they should be done or very close)
|
|
251
|
+
stdout_thread.join(timeout=1.0)
|
|
252
|
+
stderr_thread.join(timeout=1.0)
|
|
221
253
|
|
|
222
|
-
|
|
223
|
-
|
|
254
|
+
# Drain any remaining items in queue
|
|
255
|
+
while not output_queue.empty():
|
|
256
|
+
try:
|
|
257
|
+
log_func, stream_name, line = output_queue.get_nowait()
|
|
258
|
+
if log_func is not None:
|
|
259
|
+
log_func(f"[{stream_name}] {line}")
|
|
260
|
+
except queue.Empty:
|
|
224
261
|
break
|
|
225
262
|
|
|
226
263
|
# Wait for process to complete with timeout
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|