starbash 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of starbash might be problematic. Click here for more details.
- starbash/__init__.py +6 -0
- starbash/app.py +228 -0
- starbash/appdefaults.sb.toml +89 -0
- starbash/commands/__init__.py +0 -0
- starbash/commands/repo.py +51 -0
- starbash/database.py +75 -0
- starbash/main.py +27 -0
- starbash/repo/__init__.py +7 -0
- starbash/repo/manager.py +248 -0
- starbash/tool.py +260 -0
- starbash-0.1.0.dist-info/LICENSE +674 -0
- starbash-0.1.0.dist-info/METADATA +82 -0
- starbash-0.1.0.dist-info/RECORD +15 -0
- starbash-0.1.0.dist-info/WHEEL +4 -0
- starbash-0.1.0.dist-info/entry_points.txt +4 -0
starbash/__init__.py
ADDED
starbash/app.py
ADDED
|
@@ -0,0 +1,228 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from importlib import resources
|
|
3
|
+
|
|
4
|
+
import glob
|
|
5
|
+
from typing import Any
|
|
6
|
+
from astropy.io import fits
|
|
7
|
+
import itertools
|
|
8
|
+
from rich.progress import track
|
|
9
|
+
from rich.logging import RichHandler
|
|
10
|
+
from starbash.database import Database
|
|
11
|
+
from starbash.tool import Tool
|
|
12
|
+
from starbash.repo import RepoManager
|
|
13
|
+
from starbash.tool import tools
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def setup_logging():
|
|
17
|
+
"""
|
|
18
|
+
Configures basic logging.
|
|
19
|
+
"""
|
|
20
|
+
logging.basicConfig(
|
|
21
|
+
level="INFO", # don't print messages of lower priority than this
|
|
22
|
+
format="%(message)s",
|
|
23
|
+
datefmt="[%X]",
|
|
24
|
+
handlers=[RichHandler(rich_tracebacks=True)],
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
setup_logging()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class AstroGlue:
|
|
32
|
+
"""The main AstroGlue application class."""
|
|
33
|
+
|
|
34
|
+
def __init__(self):
|
|
35
|
+
"""
|
|
36
|
+
Initializes the AstroGlue application by loading configurations
|
|
37
|
+
and setting up the repository manager.
|
|
38
|
+
"""
|
|
39
|
+
setup_logging()
|
|
40
|
+
logging.info("AstroGlue application initializing...")
|
|
41
|
+
|
|
42
|
+
# Load app defaults and initialize the repository manager
|
|
43
|
+
app_defaults_text = (
|
|
44
|
+
resources.files("starbash").joinpath("appdefaults.sb.toml").read_text()
|
|
45
|
+
)
|
|
46
|
+
self.repo_manager = RepoManager(app_defaults_text)
|
|
47
|
+
logging.info(
|
|
48
|
+
f"Repo manager initialized with {len(self.repo_manager.repos)} default repo references."
|
|
49
|
+
)
|
|
50
|
+
# self.repo_manager.dump()
|
|
51
|
+
|
|
52
|
+
self.db = Database()
|
|
53
|
+
# FIXME, call reindex somewhere and also index whenever new repos are added
|
|
54
|
+
# self.reindex_repos()
|
|
55
|
+
|
|
56
|
+
# --- Lifecycle ---
|
|
57
|
+
def close(self) -> None:
|
|
58
|
+
self.db.close()
|
|
59
|
+
|
|
60
|
+
# Context manager support
|
|
61
|
+
def __enter__(self) -> "AstroGlue":
|
|
62
|
+
return self
|
|
63
|
+
|
|
64
|
+
def __exit__(self, exc_type, exc, tb) -> None:
|
|
65
|
+
self.close()
|
|
66
|
+
|
|
67
|
+
def reindex_repos(self):
|
|
68
|
+
"""Reindex all repositories managed by the RepoManager."""
|
|
69
|
+
logging.info("Reindexing all repositories...")
|
|
70
|
+
config = self.repo_manager.merged.get("config")
|
|
71
|
+
if not config:
|
|
72
|
+
raise ValueError(f"App config not found.")
|
|
73
|
+
whitelist = config["fits-whitelist"]
|
|
74
|
+
|
|
75
|
+
for repo in track(self.repo_manager.repos, description="Reindexing repos..."):
|
|
76
|
+
# FIXME, add a method to get just the repos that contain images
|
|
77
|
+
if repo.is_local and repo.kind != "recipe":
|
|
78
|
+
logging.debug("Reindexing %s...", repo.url)
|
|
79
|
+
path = repo.get_path()
|
|
80
|
+
|
|
81
|
+
# Find all FITS files under this repo path
|
|
82
|
+
for f in track(
|
|
83
|
+
list(path.rglob("*.fit*")),
|
|
84
|
+
description=f"Indexing {repo.url}...",
|
|
85
|
+
):
|
|
86
|
+
# progress.console.print(f"Indexing {f}...")
|
|
87
|
+
try:
|
|
88
|
+
# Read and log the primary header (HDU 0)
|
|
89
|
+
with fits.open(str(f), memmap=False) as hdul:
|
|
90
|
+
# convert headers to dict
|
|
91
|
+
hdu0: Any = hdul[0]
|
|
92
|
+
items = hdu0.header.items()
|
|
93
|
+
headers = {}
|
|
94
|
+
for key, value in items:
|
|
95
|
+
if key in whitelist:
|
|
96
|
+
headers[key] = value
|
|
97
|
+
logging.debug("Headers for %s: %s", f, headers)
|
|
98
|
+
self.db.add_from_fits(f, headers)
|
|
99
|
+
except Exception as e:
|
|
100
|
+
logging.warning("Failed to read FITS header for %s: %s", f, e)
|
|
101
|
+
|
|
102
|
+
logging.info("Reindexing complete.")
|
|
103
|
+
|
|
104
|
+
def test_processing(self):
|
|
105
|
+
"""A crude test of image processing pipeline - FIXME move into testing"""
|
|
106
|
+
self.run_all_stages()
|
|
107
|
+
|
|
108
|
+
def run_all_stages(self):
|
|
109
|
+
"""On the currently active session, run all processing stages"""
|
|
110
|
+
logging.info("--- Running all stages ---")
|
|
111
|
+
|
|
112
|
+
# 1. Get all pipeline definitions (the `[[stages]]` tables with name and priority).
|
|
113
|
+
pipeline_definitions = self.repo_manager.merged.getall("stages")
|
|
114
|
+
flat_pipeline_steps = list(itertools.chain.from_iterable(pipeline_definitions))
|
|
115
|
+
|
|
116
|
+
# 2. Sort the pipeline steps by their 'priority' field.
|
|
117
|
+
try:
|
|
118
|
+
sorted_pipeline = sorted(flat_pipeline_steps, key=lambda s: s["priority"])
|
|
119
|
+
except KeyError as e:
|
|
120
|
+
# Re-raise as a ValueError with a more descriptive message.
|
|
121
|
+
raise ValueError(
|
|
122
|
+
f"invalid stage definition: a stage is missing the required 'priority' key"
|
|
123
|
+
) from e
|
|
124
|
+
|
|
125
|
+
# 3. Get all available task definitions (the `[[stage]]` tables with tool, script, when).
|
|
126
|
+
task_definitions = self.repo_manager.merged.getall("stage")
|
|
127
|
+
all_tasks = list(itertools.chain.from_iterable(task_definitions))
|
|
128
|
+
|
|
129
|
+
logging.info(
|
|
130
|
+
f"Found {len(sorted_pipeline)} pipeline steps to run in order of priority."
|
|
131
|
+
)
|
|
132
|
+
|
|
133
|
+
self.start_session()
|
|
134
|
+
# 4. Iterate through the sorted pipeline and execute the associated tasks.
|
|
135
|
+
for step in sorted_pipeline:
|
|
136
|
+
step_name = step.get("name")
|
|
137
|
+
if not step_name:
|
|
138
|
+
raise ValueError("Invalid pipeline step found: missing 'name' key.")
|
|
139
|
+
|
|
140
|
+
logging.info(
|
|
141
|
+
f"--- Running pipeline step: '{step_name}' (Priority: {step['priority']}) ---"
|
|
142
|
+
)
|
|
143
|
+
# Find all tasks that should run during this pipeline step.
|
|
144
|
+
tasks_to_run = [task for task in all_tasks if task.get("when") == step_name]
|
|
145
|
+
for task in tasks_to_run:
|
|
146
|
+
self.run_stage(task)
|
|
147
|
+
|
|
148
|
+
def start_session(self) -> None:
|
|
149
|
+
"""Do common session init"""
|
|
150
|
+
|
|
151
|
+
# Context is preserved through all stages, so each stage can add new symbols to it for use by later stages
|
|
152
|
+
self.context = {}
|
|
153
|
+
|
|
154
|
+
# Update the context with runtime values.
|
|
155
|
+
runtime_context = {
|
|
156
|
+
"process_dir": "/workspaces/starbash/images/process", # FIXME - create/find this more correctly per session
|
|
157
|
+
"masters": "/workspaces/starbash/images/masters", # FIXME find this the correct way
|
|
158
|
+
}
|
|
159
|
+
self.context.update(runtime_context)
|
|
160
|
+
|
|
161
|
+
def run_stage(self, stage: dict) -> None:
|
|
162
|
+
"""
|
|
163
|
+
Executes a single processing stage.
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
stage: A dictionary representing the stage configuration, containing
|
|
167
|
+
at least 'tool' and 'script' keys.
|
|
168
|
+
"""
|
|
169
|
+
stage_desc = stage.get("description", "(missing description)")
|
|
170
|
+
stage_disabled = stage.get("disabled", False)
|
|
171
|
+
if stage_disabled:
|
|
172
|
+
logging.info(f"Skipping disabled stage: {stage_desc}")
|
|
173
|
+
return
|
|
174
|
+
|
|
175
|
+
logging.info(f"Running stage: {stage_desc}")
|
|
176
|
+
|
|
177
|
+
tool_name = stage.get("tool")
|
|
178
|
+
if not tool_name:
|
|
179
|
+
raise ValueError(
|
|
180
|
+
f"Stage '{stage.get('name')}' is missing a 'tool' definition."
|
|
181
|
+
)
|
|
182
|
+
tool: Tool | None = tools.get(tool_name)
|
|
183
|
+
if not tool:
|
|
184
|
+
raise ValueError(
|
|
185
|
+
f"Tool '{tool_name}' for stage '{stage.get('name')}' not found."
|
|
186
|
+
)
|
|
187
|
+
logging.debug(f" Using tool: {tool_name}")
|
|
188
|
+
|
|
189
|
+
script_filename = stage.get("script-file", tool.default_script_file)
|
|
190
|
+
if script_filename:
|
|
191
|
+
source = stage.source # type: ignore (was monkeypatched by repo)
|
|
192
|
+
script = source.read(script_filename)
|
|
193
|
+
else:
|
|
194
|
+
script = stage.get("script")
|
|
195
|
+
|
|
196
|
+
if script is None:
|
|
197
|
+
raise ValueError(
|
|
198
|
+
f"Stage '{stage.get('name')}' is missing a 'script' or 'script-file' definition."
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
# This allows recipe TOML to define their own default variables.
|
|
202
|
+
stage_context = stage.get("context", {})
|
|
203
|
+
self.context.update(stage_context)
|
|
204
|
+
|
|
205
|
+
# Assume no files for this stage
|
|
206
|
+
if "input_files" in self.context:
|
|
207
|
+
del self.context["input_files"]
|
|
208
|
+
|
|
209
|
+
input_files = []
|
|
210
|
+
input_config = stage.get("input")
|
|
211
|
+
input_required = False
|
|
212
|
+
if input_config:
|
|
213
|
+
# if there is an "input" dict, we assume input.required is true if unset
|
|
214
|
+
input_required = input_config.get("required", True)
|
|
215
|
+
if "path" in input_config:
|
|
216
|
+
# The path might contain context variables that need to be expanded.
|
|
217
|
+
# path_pattern = expand_context(input_config["path"], context)
|
|
218
|
+
path_pattern = input_config["path"]
|
|
219
|
+
input_files = glob.glob(path_pattern, recursive=True)
|
|
220
|
+
|
|
221
|
+
self.context["input_files"] = (
|
|
222
|
+
input_files # Pass in the file list via the context dict
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
if input_required and not input_files:
|
|
226
|
+
raise RuntimeError("No input files found for stage")
|
|
227
|
+
|
|
228
|
+
tool.run_in_temp_dir(script, context=self.context)
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
[repo]
|
|
4
|
+
kind = "preferences"
|
|
5
|
+
|
|
6
|
+
# FIXME, somewhere here list default patterns which can be used to identify NINA, ASIAIR, SEESTAR
|
|
7
|
+
# raw repo layouts
|
|
8
|
+
|
|
9
|
+
# List the initial search path to find other repos, when the app launches it always first loads this sb.toml file and this file should list all of the
|
|
10
|
+
# standard default repo locations. When searching repos, repos listed LAST have precedence, so target file can override the root processing defaults,
|
|
11
|
+
# then the user prefs, then a live github URL or whatever
|
|
12
|
+
|
|
13
|
+
# [[repo.ref]]
|
|
14
|
+
# Possibly provide default repos via http from github?
|
|
15
|
+
# url = "https://github.com/geeksville/starbash-default-repo"
|
|
16
|
+
|
|
17
|
+
[[repo.ref]]
|
|
18
|
+
|
|
19
|
+
# Add our built-in recipes (FIXME, add a "resource" repo type for directories we expect to find inside
|
|
20
|
+
# our python blob)
|
|
21
|
+
dir = "/workspaces/starbash/doc/toml/example/recipe-repo"
|
|
22
|
+
|
|
23
|
+
[[repo.ref]]
|
|
24
|
+
|
|
25
|
+
# User custom settings live here
|
|
26
|
+
# For "dir" or "url" repos we expect to find an starbash.toml file in the root of the directory.
|
|
27
|
+
# dir = "~/.config/starbash"
|
|
28
|
+
# But temporarily during early development I'm keeping them in the master github
|
|
29
|
+
dir = "/workspaces/starbash/doc/toml/example/config/user/"
|
|
30
|
+
|
|
31
|
+
# [[repo.ref]]
|
|
32
|
+
|
|
33
|
+
# recipe repos contain recipes (identified by name). When any sb.toml file references
|
|
34
|
+
# a recipe the current path of all sources is searched to find that named recipe.
|
|
35
|
+
|
|
36
|
+
# the standard config includes a few key recipe repos, but interested users or tool
|
|
37
|
+
# vendors can define/provide their own
|
|
38
|
+
|
|
39
|
+
# add a place to find FOO.sb.toml files
|
|
40
|
+
# url = "http://fixme.com/foo-repo/somedir"
|
|
41
|
+
|
|
42
|
+
# test data. Moved to user preferences (where it should should have been all along)
|
|
43
|
+
# [[repo.ref]]
|
|
44
|
+
# dir = "~/Pictures/telescope/from_astroboy"
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# or inband?
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# allow including multiple recipies FIXME old idea, not sure if needed.
|
|
51
|
+
# [[repo.ref]]
|
|
52
|
+
|
|
53
|
+
# looks for a file with this name and .py for the code and .toml for the config
|
|
54
|
+
# we'll expect that toml file to contain various [[recipe.*]] sections which will be loaded at this
|
|
55
|
+
# location in the sequence as if they were defined here
|
|
56
|
+
#by-file = "process-osc-dual-duo"
|
|
57
|
+
#by-url = "http:..."
|
|
58
|
+
|
|
59
|
+
[config]
|
|
60
|
+
|
|
61
|
+
# What fits fields should we store in our DB cache
|
|
62
|
+
fits-whitelist = [
|
|
63
|
+
"INSTRUME",
|
|
64
|
+
"FILTER",
|
|
65
|
+
"TELESCOP",
|
|
66
|
+
"IMAGETYP",
|
|
67
|
+
"DATE-OBS",
|
|
68
|
+
"DATE-LOC",
|
|
69
|
+
"DATE",
|
|
70
|
+
"EXPTIME",
|
|
71
|
+
"FWHEEL",
|
|
72
|
+
"OBJECT",
|
|
73
|
+
"RA", # we ignore the text version OBJCTRA / OBJCTDEC
|
|
74
|
+
"DEC",
|
|
75
|
+
"OBJCTROT",
|
|
76
|
+
"FOCPOS",
|
|
77
|
+
"SITELAT",
|
|
78
|
+
"SITELON",
|
|
79
|
+
"SITEELEV",
|
|
80
|
+
"NAXIS1",
|
|
81
|
+
"NAXIS2",
|
|
82
|
+
"SWCREATE",
|
|
83
|
+
"XBINNING",
|
|
84
|
+
"YBINNING",
|
|
85
|
+
"GAIN",
|
|
86
|
+
"CCD-TEMP",
|
|
87
|
+
"SET-TEMP",
|
|
88
|
+
"AMBTEMP",
|
|
89
|
+
]
|
|
File without changes
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import typer
|
|
2
|
+
from typing_extensions import Annotated
|
|
3
|
+
|
|
4
|
+
from starbash.app import AstroGlue
|
|
5
|
+
from starbash import console
|
|
6
|
+
|
|
7
|
+
app = typer.Typer()
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@app.command()
|
|
11
|
+
def add(path: str):
|
|
12
|
+
"""
|
|
13
|
+
Add a repository. path is either a local path or a remote URL.
|
|
14
|
+
"""
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@app.command()
|
|
19
|
+
def remove(reponame: str):
|
|
20
|
+
"""
|
|
21
|
+
Remove a repository by name or number.
|
|
22
|
+
"""
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@app.command()
|
|
27
|
+
def list():
|
|
28
|
+
"""
|
|
29
|
+
List all repositories. The listed names/numbers can be used with other commands.
|
|
30
|
+
"""
|
|
31
|
+
with AstroGlue() as ag:
|
|
32
|
+
for i, repo in enumerate(sb.repo_manager.repos):
|
|
33
|
+
console.print(f"{ i + 1:2}: { repo.url } (kind={ repo.kind})")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@app.command()
|
|
37
|
+
def reindex(
|
|
38
|
+
reponame: Annotated[
|
|
39
|
+
str,
|
|
40
|
+
typer.Argument(help="The repository name or number, or none to reindex all."),
|
|
41
|
+
],
|
|
42
|
+
):
|
|
43
|
+
"""
|
|
44
|
+
Reindex the named repository.
|
|
45
|
+
If no name is given, reindex all repositories.
|
|
46
|
+
"""
|
|
47
|
+
pass
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
if __name__ == "__main__":
|
|
51
|
+
app()
|
starbash/database.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any, Optional
|
|
5
|
+
|
|
6
|
+
from tinydb import TinyDB, Query, table
|
|
7
|
+
from platformdirs import PlatformDirs
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class Database:
|
|
11
|
+
"""TinyDB-backed application database.
|
|
12
|
+
|
|
13
|
+
Stores data under the OS-specific user data directory using platformdirs.
|
|
14
|
+
Provides an `images` table for FITS metadata and basic helpers.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(
|
|
18
|
+
self,
|
|
19
|
+
base_dir: Optional[Path] = None,
|
|
20
|
+
) -> None:
|
|
21
|
+
# Resolve base data directory (allow override for tests)
|
|
22
|
+
if base_dir is None:
|
|
23
|
+
app_name = "starbash"
|
|
24
|
+
app_author = "geeksville"
|
|
25
|
+
dirs = PlatformDirs(app_name, app_author)
|
|
26
|
+
data_dir = Path(dirs.user_data_dir)
|
|
27
|
+
else:
|
|
28
|
+
data_dir = base_dir
|
|
29
|
+
|
|
30
|
+
db_filename = "db.json"
|
|
31
|
+
data_dir.mkdir(parents=True, exist_ok=True)
|
|
32
|
+
self.db_path = data_dir / db_filename
|
|
33
|
+
|
|
34
|
+
# Open TinyDB JSON store
|
|
35
|
+
self._db = TinyDB(self.db_path)
|
|
36
|
+
|
|
37
|
+
# Public handle to the images table
|
|
38
|
+
self.images = self._db.table("images")
|
|
39
|
+
|
|
40
|
+
def add_from_fits(self, file_path: Path, headers: dict[str, Any]) -> None:
|
|
41
|
+
data = {}
|
|
42
|
+
data.update(headers)
|
|
43
|
+
data["path"] = str(file_path)
|
|
44
|
+
self.upsert_image(data)
|
|
45
|
+
|
|
46
|
+
# --- Convenience helpers for common image operations ---
|
|
47
|
+
def upsert_image(self, record: dict[str, Any]) -> None:
|
|
48
|
+
"""Insert or update an image record by unique path.
|
|
49
|
+
|
|
50
|
+
The record must include a 'path' key; other keys are arbitrary FITS metadata.
|
|
51
|
+
"""
|
|
52
|
+
path = record.get("path")
|
|
53
|
+
if not path:
|
|
54
|
+
raise ValueError("record must include 'path'")
|
|
55
|
+
|
|
56
|
+
Image = Query()
|
|
57
|
+
self.images.upsert(record, Image.path == path)
|
|
58
|
+
|
|
59
|
+
def get_image(self, path: str) -> table.Document | list[table.Document] | None:
|
|
60
|
+
Image = Query()
|
|
61
|
+
return self.images.get(Image.path == path)
|
|
62
|
+
|
|
63
|
+
def all_images(self) -> list[dict[str, Any]]:
|
|
64
|
+
return list(self.images.all())
|
|
65
|
+
|
|
66
|
+
# --- Lifecycle ---
|
|
67
|
+
def close(self) -> None:
|
|
68
|
+
self._db.close()
|
|
69
|
+
|
|
70
|
+
# Context manager support
|
|
71
|
+
def __enter__(self) -> "Database":
|
|
72
|
+
return self
|
|
73
|
+
|
|
74
|
+
def __exit__(self, exc_type, exc, tb) -> None:
|
|
75
|
+
self.close()
|
starbash/main.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import typer
|
|
3
|
+
|
|
4
|
+
from .app import AstroGlue
|
|
5
|
+
from .commands import repo
|
|
6
|
+
|
|
7
|
+
app = typer.Typer()
|
|
8
|
+
app.add_typer(repo.app, name="repo", help="Manage Starbash repositories.")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@app.command(hidden=True)
|
|
12
|
+
def default_cmd():
|
|
13
|
+
"""Default entry point for the starbash application."""
|
|
14
|
+
|
|
15
|
+
with AstroGlue() as ag:
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@app.callback(invoke_without_command=True)
|
|
20
|
+
def _default(ctx: typer.Context):
|
|
21
|
+
# If the user didn’t specify a subcommand, run the default
|
|
22
|
+
if ctx.invoked_subcommand is None:
|
|
23
|
+
return default_cmd()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
if __name__ == "__main__":
|
|
27
|
+
app()
|