starbash 0.1.6__tar.gz → 0.1.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of starbash might be problematic. Click here for more details.
- {starbash-0.1.6 → starbash-0.1.7}/PKG-INFO +21 -5
- {starbash-0.1.6 → starbash-0.1.7}/README.md +20 -4
- {starbash-0.1.6 → starbash-0.1.7}/pyproject.toml +1 -1
- starbash-0.1.7/src/starbash/__init__.py +18 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/analytics.py +2 -7
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/app.py +9 -12
- starbash-0.1.7/src/starbash/commands/__init__.py +39 -0
- starbash-0.1.7/src/starbash/commands/info.py +120 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/commands/select.py +31 -26
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/database.py +70 -102
- starbash-0.1.6/src/starbash/__init__.py +0 -11
- starbash-0.1.6/src/starbash/commands/__init__.py +0 -15
- starbash-0.1.6/src/starbash/commands/info.py +0 -92
- starbash-0.1.6/src/starbash/repo/__init__.py +0 -7
- starbash-0.1.6/src/starbash/repo/manager.py +0 -381
- {starbash-0.1.6 → starbash-0.1.7}/LICENSE +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/commands/repo.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/commands/user.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/defaults/__init__.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/defaults/starbash.toml +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/main.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/paths.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/recipes/README.md +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/recipes/__init__.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/recipes/master_bias/starbash.toml +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/recipes/master_flat/starbash.toml +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/recipes/osc_dual_duo/starbash.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/recipes/osc_dual_duo/starbash.toml +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/recipes/osc_single_duo/starbash.toml +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/recipes/starbash.toml +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/selection.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/templates/__init__.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/templates/userconfig.toml +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/tool.py +0 -0
- {starbash-0.1.6 → starbash-0.1.7}/src/starbash/url.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: starbash
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.7
|
|
4
4
|
Summary: A tool for automating/standardizing/sharing astrophotography workflows.
|
|
5
5
|
License-File: LICENSE
|
|
6
6
|
Author: Kevin Hester
|
|
@@ -22,20 +22,31 @@ Description-Content-Type: text/markdown
|
|
|
22
22
|
|
|
23
23
|
# Starbash
|
|
24
24
|
|
|
25
|
+
<img src="https://raw.githubusercontent.com/geeksville/starbash/refs/heads/main/img/icon.png" alt="Starbash: Astrophotography workflows simplified" width="30%" align="right" style="margin-bottom: 20px;">
|
|
26
|
+
|
|
25
27
|
[](https://pypi.org/project/starbash/)
|
|
26
28
|
[](https://github.com/geeksville/starbash/actions)
|
|
27
29
|
[](https://codecov.io/github/geeksville/starbash)
|
|
28
30
|
|
|
29
|
-

|
|
30
|
-
|
|
31
31
|
A tool for automating/standardizing/sharing astrophotography workflows.
|
|
32
32
|
|
|
33
|
+
* Automatic - with sensible defaults, that you can change as needed.
|
|
34
|
+
* Easy - provides a 'seestar like' starting-point for autoprocessing all your sessions (by default).
|
|
35
|
+
* Fast - even with large image repositories. Automatic master bias and flat generation and reasonable defaults
|
|
36
|
+
* Sharable - you can share/use recipes for image preprocessing flows.
|
|
37
|
+
|
|
38
|
+
(This project is currently 'alpha' and missing recipes for some workflows, but adding new recipes is easy and we're happy to help. Please file a github issue if your images are not auto-processed and we'll work out a fix.)
|
|
39
|
+
|
|
40
|
+
<br clear="right">
|
|
41
|
+
|
|
33
42
|
# Current status
|
|
34
43
|
|
|
35
44
|
Not quite ready 😊. But making good progress.
|
|
36
45
|
|
|
37
46
|
See the current [TODO](TODO.md) file for work items. I'll be looking for pre-alpha testers/feedback soon.
|
|
38
47
|
|
|
48
|
+

|
|
49
|
+
|
|
39
50
|
## Current features
|
|
40
51
|
|
|
41
52
|
* Automatically recognizes and auto-parses the default NINA, Asiair and Seestar raw file repos (adding support for other layouts is easy)
|
|
@@ -56,15 +67,20 @@ See the current [TODO](TODO.md) file for work items. I'll be looking for pre-al
|
|
|
56
67
|
|
|
57
68
|
Currently the easiest way to install this command-line based tool is to install is via [pipx](https://pipx.pypa.io/stable/). If you don't already have pipx and you have python installed, you can auto install it by running "pip install --user pipx." If you don't have python installed see the pipx link for pipx installers for any OS.
|
|
58
69
|
|
|
59
|
-
Once pipx is installed just run:
|
|
70
|
+
Once pipx is installed just run the following **two** commands (the sb --install-completion will make TAB auto-complete automatically complete sb options (for most platforms)):
|
|
60
71
|
|
|
61
72
|
```
|
|
62
|
-
pipx install starbash
|
|
73
|
+
➜ pipx install starbash
|
|
63
74
|
installed package starbash 0.1.3, installed using Python 3.12.3
|
|
64
75
|
These apps are now globally available
|
|
65
76
|
- sb
|
|
66
77
|
- starbash
|
|
67
78
|
done! ✨ 🌟 ✨
|
|
79
|
+
|
|
80
|
+
➜ sb --install-completion
|
|
81
|
+
bash completion installed in /home/.../sb.sh
|
|
82
|
+
Completion will take effect once you restart the terminal
|
|
83
|
+
|
|
68
84
|
```
|
|
69
85
|
|
|
70
86
|
FIXME - add getting started instructions (possibly with a screenshare video)
|
|
@@ -1,19 +1,30 @@
|
|
|
1
1
|
# Starbash
|
|
2
2
|
|
|
3
|
+
<img src="https://raw.githubusercontent.com/geeksville/starbash/refs/heads/main/img/icon.png" alt="Starbash: Astrophotography workflows simplified" width="30%" align="right" style="margin-bottom: 20px;">
|
|
4
|
+
|
|
3
5
|
[](https://pypi.org/project/starbash/)
|
|
4
6
|
[](https://github.com/geeksville/starbash/actions)
|
|
5
7
|
[](https://codecov.io/github/geeksville/starbash)
|
|
6
8
|
|
|
7
|
-

|
|
8
|
-
|
|
9
9
|
A tool for automating/standardizing/sharing astrophotography workflows.
|
|
10
10
|
|
|
11
|
+
* Automatic - with sensible defaults, that you can change as needed.
|
|
12
|
+
* Easy - provides a 'seestar like' starting-point for autoprocessing all your sessions (by default).
|
|
13
|
+
* Fast - even with large image repositories. Automatic master bias and flat generation and reasonable defaults
|
|
14
|
+
* Sharable - you can share/use recipes for image preprocessing flows.
|
|
15
|
+
|
|
16
|
+
(This project is currently 'alpha' and missing recipes for some workflows, but adding new recipes is easy and we're happy to help. Please file a github issue if your images are not auto-processed and we'll work out a fix.)
|
|
17
|
+
|
|
18
|
+
<br clear="right">
|
|
19
|
+
|
|
11
20
|
# Current status
|
|
12
21
|
|
|
13
22
|
Not quite ready 😊. But making good progress.
|
|
14
23
|
|
|
15
24
|
See the current [TODO](TODO.md) file for work items. I'll be looking for pre-alpha testers/feedback soon.
|
|
16
25
|
|
|
26
|
+

|
|
27
|
+
|
|
17
28
|
## Current features
|
|
18
29
|
|
|
19
30
|
* Automatically recognizes and auto-parses the default NINA, Asiair and Seestar raw file repos (adding support for other layouts is easy)
|
|
@@ -34,15 +45,20 @@ See the current [TODO](TODO.md) file for work items. I'll be looking for pre-al
|
|
|
34
45
|
|
|
35
46
|
Currently the easiest way to install this command-line based tool is to install is via [pipx](https://pipx.pypa.io/stable/). If you don't already have pipx and you have python installed, you can auto install it by running "pip install --user pipx." If you don't have python installed see the pipx link for pipx installers for any OS.
|
|
36
47
|
|
|
37
|
-
Once pipx is installed just run:
|
|
48
|
+
Once pipx is installed just run the following **two** commands (the sb --install-completion will make TAB auto-complete automatically complete sb options (for most platforms)):
|
|
38
49
|
|
|
39
50
|
```
|
|
40
|
-
pipx install starbash
|
|
51
|
+
➜ pipx install starbash
|
|
41
52
|
installed package starbash 0.1.3, installed using Python 3.12.3
|
|
42
53
|
These apps are now globally available
|
|
43
54
|
- sb
|
|
44
55
|
- starbash
|
|
45
56
|
done! ✨ 🌟 ✨
|
|
57
|
+
|
|
58
|
+
➜ sb --install-completion
|
|
59
|
+
bash completion installed in /home/.../sb.sh
|
|
60
|
+
Completion will take effect once you restart the terminal
|
|
61
|
+
|
|
46
62
|
```
|
|
47
63
|
|
|
48
64
|
FIXME - add getting started instructions (possibly with a screenshare video)
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
|
|
4
|
+
from .database import Database # re-export for convenience
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
|
|
7
|
+
# Disable Rich formatting in test environments (pytest or NO_COLOR set)
|
|
8
|
+
# This prevents ANSI escape codes and line wrapping in test output for more reliable test parsing.
|
|
9
|
+
_is_test_env = "PYTEST_VERSION" in os.environ
|
|
10
|
+
console = Console(
|
|
11
|
+
force_terminal=False if _is_test_env else None,
|
|
12
|
+
width=999999 if _is_test_env else None, # Disable line wrapping in tests
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
# Global variable for log filter level (can be changed via --debug flag)
|
|
16
|
+
log_filter_level = logging.INFO
|
|
17
|
+
|
|
18
|
+
__all__ = ["Database"]
|
|
@@ -2,7 +2,7 @@ import logging
|
|
|
2
2
|
import os
|
|
3
3
|
|
|
4
4
|
import starbash
|
|
5
|
-
from starbash import console
|
|
5
|
+
from starbash import console, _is_test_env
|
|
6
6
|
import starbash.url as url
|
|
7
7
|
|
|
8
8
|
# Default to no analytics/auto crash reports
|
|
@@ -51,11 +51,6 @@ def analytics_shutdown() -> None:
|
|
|
51
51
|
sentry_sdk.flush()
|
|
52
52
|
|
|
53
53
|
|
|
54
|
-
def is_running_in_pytest() -> bool:
|
|
55
|
-
"""Detect if code is being run inside pytest."""
|
|
56
|
-
return "PYTEST_CURRENT_TEST" in os.environ
|
|
57
|
-
|
|
58
|
-
|
|
59
54
|
def is_development_environment() -> bool:
|
|
60
55
|
"""Detect if running in a development environment."""
|
|
61
56
|
|
|
@@ -80,7 +75,7 @@ def analytics_exception(exc: Exception) -> bool:
|
|
|
80
75
|
if analytics_allowed:
|
|
81
76
|
import sentry_sdk
|
|
82
77
|
|
|
83
|
-
if
|
|
78
|
+
if _is_test_env:
|
|
84
79
|
report_id = "TESTING-ENVIRONMENT"
|
|
85
80
|
else:
|
|
86
81
|
report_id = sentry_sdk.capture_exception(exc)
|
|
@@ -13,11 +13,11 @@ from rich.logging import RichHandler
|
|
|
13
13
|
import shutil
|
|
14
14
|
|
|
15
15
|
import starbash
|
|
16
|
-
from starbash import console
|
|
16
|
+
from starbash import console, _is_test_env
|
|
17
17
|
from starbash.database import Database
|
|
18
|
-
from
|
|
18
|
+
from repo.manager import Repo
|
|
19
19
|
from starbash.tool import Tool
|
|
20
|
-
from
|
|
20
|
+
from repo import RepoManager
|
|
21
21
|
from starbash.tool import tools
|
|
22
22
|
from starbash.paths import get_user_config_dir, get_user_data_dir
|
|
23
23
|
from starbash.selection import Selection
|
|
@@ -34,11 +34,12 @@ def setup_logging():
|
|
|
34
34
|
"""
|
|
35
35
|
Configures basic logging.
|
|
36
36
|
"""
|
|
37
|
+
handlers = [RichHandler(rich_tracebacks=True)] if not _is_test_env else []
|
|
37
38
|
logging.basicConfig(
|
|
38
39
|
level=starbash.log_filter_level, # use the global log filter level
|
|
39
40
|
format="%(message)s",
|
|
40
41
|
datefmt="[%X]",
|
|
41
|
-
handlers=
|
|
42
|
+
handlers=handlers,
|
|
42
43
|
)
|
|
43
44
|
|
|
44
45
|
|
|
@@ -204,15 +205,11 @@ class Starbash:
|
|
|
204
205
|
session = self.db.get_session(new)
|
|
205
206
|
self.db.upsert_session(new, existing=session)
|
|
206
207
|
|
|
207
|
-
def search_session(self) -> list[dict[str, Any]]
|
|
208
|
+
def search_session(self) -> list[dict[str, Any]]:
|
|
208
209
|
"""Search for sessions, optionally filtered by the current selection."""
|
|
209
|
-
#
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
else:
|
|
213
|
-
# Get query conditions from selection
|
|
214
|
-
conditions = self.selection.get_query_conditions()
|
|
215
|
-
return self.db.search_session(conditions)
|
|
210
|
+
# Get query conditions from selection
|
|
211
|
+
conditions = self.selection.get_query_conditions()
|
|
212
|
+
return self.db.search_session(conditions)
|
|
216
213
|
|
|
217
214
|
def get_session_images(self, session_id: int) -> list[dict[str, Any]]:
|
|
218
215
|
"""
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Shared utilities for starbash commands."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from rich.style import Style
|
|
5
|
+
|
|
6
|
+
# Define reusable table styles
|
|
7
|
+
TABLE_COLUMN_STYLE = Style(color="cyan")
|
|
8
|
+
TABLE_VALUE_STYLE = Style(color="green")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def format_duration(seconds: int | float) -> str:
|
|
12
|
+
"""Format seconds as a human-readable duration string."""
|
|
13
|
+
if seconds < 60:
|
|
14
|
+
return f"{int(seconds)}s"
|
|
15
|
+
elif seconds < 120:
|
|
16
|
+
minutes = int(seconds // 60)
|
|
17
|
+
secs = int(seconds % 60)
|
|
18
|
+
return f"{minutes}m {secs}s" if secs else f"{minutes}m"
|
|
19
|
+
else:
|
|
20
|
+
hours = int(seconds // 3600)
|
|
21
|
+
minutes = int((seconds % 3600) // 60)
|
|
22
|
+
return f"{hours}h {minutes}m" if minutes else f"{hours}h"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def to_shortdate(date_iso: str) -> str:
|
|
26
|
+
"""Convert ISO UTC datetime string to local short date string (YYYY-MM-DD).
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
date_iso: ISO format datetime string (e.g., "2023-10-15T14:30:00Z")
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Short date string in YYYY-MM-DD format, or the original string if conversion fails
|
|
33
|
+
"""
|
|
34
|
+
try:
|
|
35
|
+
dt_utc = datetime.fromisoformat(date_iso)
|
|
36
|
+
dt_local = dt_utc.astimezone()
|
|
37
|
+
return dt_local.strftime("%Y-%m-%d")
|
|
38
|
+
except (ValueError, TypeError):
|
|
39
|
+
return date_iso
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""Info commands for displaying system and data information."""
|
|
2
|
+
|
|
3
|
+
import typer
|
|
4
|
+
from typing_extensions import Annotated
|
|
5
|
+
from rich.table import Table
|
|
6
|
+
from collections import Counter
|
|
7
|
+
|
|
8
|
+
from starbash.app import Starbash
|
|
9
|
+
from starbash import console
|
|
10
|
+
from starbash.database import Database, get_column_name
|
|
11
|
+
from starbash.paths import get_user_config_dir, get_user_data_dir
|
|
12
|
+
from starbash.commands import format_duration, TABLE_COLUMN_STYLE, TABLE_VALUE_STYLE
|
|
13
|
+
|
|
14
|
+
app = typer.Typer()
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def plural(name: str) -> str:
|
|
18
|
+
"""Return the plural form of a given noun (simple heuristic - FIXME won't work with i18n)."""
|
|
19
|
+
if name.endswith("y"):
|
|
20
|
+
return name[:-1] + "ies"
|
|
21
|
+
else:
|
|
22
|
+
return name + "s"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def dump_column(sb: Starbash, human_name: str, column_name: str) -> None:
|
|
26
|
+
# Get all telescopes from the database
|
|
27
|
+
sessions = sb.search_session()
|
|
28
|
+
|
|
29
|
+
# Also do a complete unfiltered search so we can compare for the users
|
|
30
|
+
allsessions = sb.db.search_session()
|
|
31
|
+
|
|
32
|
+
column_name = get_column_name(column_name)
|
|
33
|
+
found = [session[column_name] for session in sessions if session[column_name]]
|
|
34
|
+
allfound = [session[column_name] for session in allsessions if session[column_name]]
|
|
35
|
+
|
|
36
|
+
# Count occurrences of each telescope
|
|
37
|
+
found_counts = Counter(found)
|
|
38
|
+
all_counts = Counter(allfound)
|
|
39
|
+
|
|
40
|
+
# Sort by telescope name
|
|
41
|
+
sorted_telescopes = sorted(found_counts.items())
|
|
42
|
+
|
|
43
|
+
# Create and display table
|
|
44
|
+
table = Table(
|
|
45
|
+
title=f"{plural(human_name)} ({len(found_counts)} / {len(all_counts)} selected)"
|
|
46
|
+
)
|
|
47
|
+
table.add_column(human_name, style=TABLE_COLUMN_STYLE, no_wrap=False)
|
|
48
|
+
table.add_column(
|
|
49
|
+
"# of sessions", style=TABLE_COLUMN_STYLE, no_wrap=True, justify="right"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
for i, count in sorted_telescopes:
|
|
53
|
+
table.add_row(i, str(count))
|
|
54
|
+
|
|
55
|
+
console.print(table)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@app.command()
|
|
59
|
+
def target():
|
|
60
|
+
"""List targets (filtered based on the current selection)."""
|
|
61
|
+
with Starbash("info.target") as sb:
|
|
62
|
+
dump_column(sb, "Target", Database.OBJECT_KEY)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@app.command()
|
|
66
|
+
def telescope():
|
|
67
|
+
"""List telescopes/instruments (filtered based on the current selection)."""
|
|
68
|
+
with Starbash("info.telescope") as sb:
|
|
69
|
+
dump_column(sb, "Telescope", Database.TELESCOP_KEY)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@app.command()
|
|
73
|
+
def filter():
|
|
74
|
+
"""List all filters (filtered based on the current selection)."""
|
|
75
|
+
with Starbash("info.filter") as sb:
|
|
76
|
+
dump_column(sb, "Filter", Database.FILTER_KEY)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@app.callback(invoke_without_command=True)
|
|
80
|
+
def main_callback(ctx: typer.Context):
|
|
81
|
+
"""Show user preferences location and other app info.
|
|
82
|
+
|
|
83
|
+
This is the default command when no subcommand is specified.
|
|
84
|
+
"""
|
|
85
|
+
if ctx.invoked_subcommand is None:
|
|
86
|
+
with Starbash("info") as sb:
|
|
87
|
+
table = Table(title="Starbash Information")
|
|
88
|
+
table.add_column("Setting", style=TABLE_COLUMN_STYLE, no_wrap=True)
|
|
89
|
+
table.add_column("Value", style=TABLE_VALUE_STYLE)
|
|
90
|
+
|
|
91
|
+
# Show config and data directories
|
|
92
|
+
# table.add_row("Config Directory", str(get_user_config_dir()))
|
|
93
|
+
# table.add_row("Data Directory", str(get_user_data_dir()))
|
|
94
|
+
|
|
95
|
+
# Show user preferences if set
|
|
96
|
+
user_name = sb.user_repo.get("user.name")
|
|
97
|
+
if user_name:
|
|
98
|
+
table.add_row("User Name", str(user_name))
|
|
99
|
+
|
|
100
|
+
user_email = sb.user_repo.get("user.email")
|
|
101
|
+
if user_email:
|
|
102
|
+
table.add_row("User Email", str(user_email))
|
|
103
|
+
|
|
104
|
+
# Show number of repos
|
|
105
|
+
table.add_row("Total Repositories", str(len(sb.repo_manager.repos)))
|
|
106
|
+
table.add_row("User Repositories", str(len(sb.repo_manager.regular_repos)))
|
|
107
|
+
|
|
108
|
+
# Show database stats
|
|
109
|
+
table.add_row(
|
|
110
|
+
"Sessions Indexed", str(sb.db.len_table(Database.SESSIONS_TABLE))
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
table.add_row("Images Indexed", str(sb.db.len_table(Database.IMAGES_TABLE)))
|
|
114
|
+
|
|
115
|
+
total_exptime = sb.db.sum_column(Database.SESSIONS_TABLE, "exptime_total")
|
|
116
|
+
table.add_row(
|
|
117
|
+
"Total image time",
|
|
118
|
+
format_duration(total_exptime),
|
|
119
|
+
)
|
|
120
|
+
console.print(table)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Selection commands for filtering sessions and targets."""
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
+
from typing import Any
|
|
4
5
|
import typer
|
|
5
6
|
from pathlib import Path
|
|
6
7
|
from typing_extensions import Annotated
|
|
@@ -8,9 +9,14 @@ from datetime import datetime
|
|
|
8
9
|
from rich.table import Table
|
|
9
10
|
|
|
10
11
|
from starbash.app import Starbash, copy_images_to_dir
|
|
11
|
-
from starbash.database import Database
|
|
12
|
+
from starbash.database import Database, get_column_name
|
|
12
13
|
from starbash import console
|
|
13
|
-
from starbash.commands import
|
|
14
|
+
from starbash.commands import (
|
|
15
|
+
format_duration,
|
|
16
|
+
to_shortdate,
|
|
17
|
+
TABLE_COLUMN_STYLE,
|
|
18
|
+
TABLE_VALUE_STYLE,
|
|
19
|
+
)
|
|
14
20
|
|
|
15
21
|
app = typer.Typer()
|
|
16
22
|
|
|
@@ -127,14 +133,14 @@ def list_sessions():
|
|
|
127
133
|
sb.analytics.set_data("session.num_selected", len(sessions))
|
|
128
134
|
sb.analytics.set_data("session.num_total", len_all)
|
|
129
135
|
|
|
130
|
-
table.add_column("#", style=
|
|
131
|
-
table.add_column("Date", style=
|
|
132
|
-
table.add_column("# images", style=
|
|
133
|
-
table.add_column("Time", style=
|
|
134
|
-
table.add_column("Type/Filter", style=
|
|
135
|
-
table.add_column("Telescope", style=
|
|
136
|
+
table.add_column("#", style=TABLE_COLUMN_STYLE, no_wrap=True)
|
|
137
|
+
table.add_column("Date", style=TABLE_COLUMN_STYLE, no_wrap=True)
|
|
138
|
+
table.add_column("# images", style=TABLE_COLUMN_STYLE, no_wrap=True)
|
|
139
|
+
table.add_column("Time", style=TABLE_COLUMN_STYLE, no_wrap=True)
|
|
140
|
+
table.add_column("Type/Filter", style=TABLE_COLUMN_STYLE, no_wrap=True)
|
|
141
|
+
table.add_column("Telescope", style=TABLE_COLUMN_STYLE, no_wrap=True)
|
|
136
142
|
table.add_column(
|
|
137
|
-
"About", style=
|
|
143
|
+
"About", style=TABLE_COLUMN_STYLE, no_wrap=True
|
|
138
144
|
) # type of frames, filter, target
|
|
139
145
|
|
|
140
146
|
total_images = 0
|
|
@@ -143,26 +149,25 @@ def list_sessions():
|
|
|
143
149
|
image_types = set()
|
|
144
150
|
telescopes = set()
|
|
145
151
|
|
|
152
|
+
def get_key(k: str, default: Any = "N/A") -> Any:
|
|
153
|
+
"""Convert keynames to SQL legal column names"""
|
|
154
|
+
k = get_column_name(k)
|
|
155
|
+
return sess.get(k, default)
|
|
156
|
+
|
|
146
157
|
for session_index, sess in enumerate(sessions):
|
|
147
|
-
date_iso =
|
|
148
|
-
|
|
149
|
-
try:
|
|
150
|
-
dt_utc = datetime.fromisoformat(date_iso)
|
|
151
|
-
dt_local = dt_utc.astimezone()
|
|
152
|
-
date = dt_local.strftime("%Y-%m-%d")
|
|
153
|
-
except (ValueError, TypeError):
|
|
154
|
-
date = date_iso
|
|
158
|
+
date_iso = get_key(Database.START_KEY)
|
|
159
|
+
date = to_shortdate(date_iso)
|
|
155
160
|
|
|
156
|
-
object =
|
|
157
|
-
filter =
|
|
161
|
+
object = get_key(Database.OBJECT_KEY)
|
|
162
|
+
filter = get_key(Database.FILTER_KEY)
|
|
158
163
|
filters.add(filter)
|
|
159
|
-
image_type =
|
|
164
|
+
image_type = get_key(Database.IMAGETYP_KEY)
|
|
160
165
|
image_types.add(image_type)
|
|
161
|
-
telescope =
|
|
166
|
+
telescope = get_key(Database.TELESCOP_KEY)
|
|
162
167
|
telescopes.add(telescope)
|
|
163
168
|
|
|
164
169
|
# Format total exposure time as integer seconds
|
|
165
|
-
exptime_raw =
|
|
170
|
+
exptime_raw = get_key(Database.EXPTIME_TOTAL_KEY)
|
|
166
171
|
try:
|
|
167
172
|
exptime_float = float(exptime_raw)
|
|
168
173
|
total_seconds += exptime_float
|
|
@@ -172,10 +177,10 @@ def list_sessions():
|
|
|
172
177
|
|
|
173
178
|
# Count images
|
|
174
179
|
try:
|
|
175
|
-
num_images = int(
|
|
180
|
+
num_images = int(get_key(Database.NUM_IMAGES_KEY, 0))
|
|
176
181
|
total_images += num_images
|
|
177
182
|
except (ValueError, TypeError):
|
|
178
|
-
num_images =
|
|
183
|
+
num_images = get_key(Database.NUM_IMAGES_KEY)
|
|
179
184
|
|
|
180
185
|
type_str = image_type
|
|
181
186
|
if image_type.upper() == "LIGHT":
|
|
@@ -300,8 +305,8 @@ def show_selection(ctx: typer.Context):
|
|
|
300
305
|
console.print(f"[yellow]{summary['message']}[/yellow]")
|
|
301
306
|
else:
|
|
302
307
|
table = Table(title="Current Selection")
|
|
303
|
-
table.add_column("Criteria", style=
|
|
304
|
-
table.add_column("Value", style=
|
|
308
|
+
table.add_column("Criteria", style=TABLE_COLUMN_STYLE)
|
|
309
|
+
table.add_column("Value", style=TABLE_VALUE_STYLE)
|
|
305
310
|
|
|
306
311
|
for criterion in summary["criteria"]:
|
|
307
312
|
parts = criterion.split(": ", 1)
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import logging
|
|
4
3
|
import sqlite3
|
|
5
4
|
from pathlib import Path
|
|
6
5
|
from typing import Any, Optional
|
|
@@ -10,6 +9,14 @@ import json
|
|
|
10
9
|
from .paths import get_user_data_dir
|
|
11
10
|
|
|
12
11
|
|
|
12
|
+
def get_column_name(k: str) -> str:
|
|
13
|
+
"""Convert keynames to SQL legal column names"""
|
|
14
|
+
k = k.lower()
|
|
15
|
+
k = k.replace(" ", "_")
|
|
16
|
+
k = k.replace("-", "_")
|
|
17
|
+
return k
|
|
18
|
+
|
|
19
|
+
|
|
13
20
|
class Database:
|
|
14
21
|
"""SQLite-backed application database.
|
|
15
22
|
|
|
@@ -220,9 +227,56 @@ class Database:
|
|
|
220
227
|
|
|
221
228
|
return results if results else None
|
|
222
229
|
|
|
230
|
+
def where_session(self, conditions: dict[str, Any] | None) -> tuple[str, list[Any]]:
|
|
231
|
+
"""Search for sessions matching the given conditions.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
conditions: Dictionary of session key-value pairs to match, or None for all.
|
|
235
|
+
Special keys:
|
|
236
|
+
- 'date_start': Filter sessions starting on or after this date
|
|
237
|
+
- 'date_end': Filter sessions starting on or before this date
|
|
238
|
+
|
|
239
|
+
Returns:
|
|
240
|
+
Tuple of (WHERE clause string, list of parameters)
|
|
241
|
+
"""
|
|
242
|
+
if conditions is None:
|
|
243
|
+
conditions = {}
|
|
244
|
+
|
|
245
|
+
# Build WHERE clause dynamically based on conditions
|
|
246
|
+
where_clauses = []
|
|
247
|
+
params = []
|
|
248
|
+
|
|
249
|
+
# Extract date range conditions
|
|
250
|
+
date_start = conditions.get("date_start")
|
|
251
|
+
date_end = conditions.get("date_end")
|
|
252
|
+
|
|
253
|
+
# Add date range filters to WHERE clause
|
|
254
|
+
if date_start:
|
|
255
|
+
where_clauses.append("start >= ?")
|
|
256
|
+
params.append(date_start)
|
|
257
|
+
|
|
258
|
+
if date_end:
|
|
259
|
+
where_clauses.append("start <= ?")
|
|
260
|
+
params.append(date_end)
|
|
261
|
+
|
|
262
|
+
# Add standard conditions to WHERE clause
|
|
263
|
+
for key, value in conditions.items():
|
|
264
|
+
if key not in ("date_start", "date_end") and value is not None:
|
|
265
|
+
column_name = key
|
|
266
|
+
where_clauses.append(f"{column_name} = ?")
|
|
267
|
+
params.append(value)
|
|
268
|
+
|
|
269
|
+
# Build the query
|
|
270
|
+
query = ""
|
|
271
|
+
|
|
272
|
+
if where_clauses:
|
|
273
|
+
query += " WHERE " + " AND ".join(where_clauses)
|
|
274
|
+
|
|
275
|
+
return (query, params)
|
|
276
|
+
|
|
223
277
|
def search_session(
|
|
224
|
-
self, conditions: dict[str, Any] | None
|
|
225
|
-
) -> list[dict[str, Any]]
|
|
278
|
+
self, conditions: dict[str, Any] | None = None
|
|
279
|
+
) -> list[dict[str, Any]]:
|
|
226
280
|
"""Search for sessions matching the given conditions.
|
|
227
281
|
|
|
228
282
|
Args:
|
|
@@ -235,59 +289,24 @@ class Database:
|
|
|
235
289
|
List of matching session records or None
|
|
236
290
|
"""
|
|
237
291
|
if conditions is None:
|
|
238
|
-
|
|
292
|
+
conditions = {}
|
|
239
293
|
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
294
|
+
# Build WHERE clause dynamically based on conditions
|
|
295
|
+
where_clause, params = self.where_session(conditions)
|
|
296
|
+
|
|
297
|
+
# Build the query
|
|
298
|
+
query = f"""
|
|
243
299
|
SELECT id, start, end, filter, imagetyp, object, telescop,
|
|
244
300
|
num_images, exptime_total, image_doc_id
|
|
245
301
|
FROM {self.SESSIONS_TABLE}
|
|
302
|
+
{where_clause}
|
|
246
303
|
"""
|
|
247
|
-
)
|
|
248
|
-
|
|
249
|
-
# Extract date range conditions if present
|
|
250
|
-
date_start = conditions.get("date_start")
|
|
251
|
-
date_end = conditions.get("date_end")
|
|
252
|
-
|
|
253
|
-
# Create a copy without date range keys for standard matching
|
|
254
|
-
standard_conditions = {
|
|
255
|
-
k: v
|
|
256
|
-
for k, v in conditions.items()
|
|
257
|
-
if k not in ("date_start", "date_end") and v is not None
|
|
258
|
-
}
|
|
259
|
-
|
|
260
|
-
results = []
|
|
261
|
-
for row in cursor.fetchall():
|
|
262
|
-
session = {
|
|
263
|
-
"id": row["id"],
|
|
264
|
-
self.START_KEY: row["start"],
|
|
265
|
-
self.END_KEY: row["end"],
|
|
266
|
-
self.FILTER_KEY: row["filter"],
|
|
267
|
-
self.IMAGETYP_KEY: row["imagetyp"],
|
|
268
|
-
self.OBJECT_KEY: row["object"],
|
|
269
|
-
self.TELESCOP_KEY: row["telescop"],
|
|
270
|
-
self.NUM_IMAGES_KEY: row["num_images"],
|
|
271
|
-
self.EXPTIME_TOTAL_KEY: row["exptime_total"],
|
|
272
|
-
self.IMAGE_DOC_KEY: row["image_doc_id"],
|
|
273
|
-
}
|
|
274
|
-
|
|
275
|
-
# Check if all standard conditions match
|
|
276
|
-
match = all(session.get(k) == v for k, v in standard_conditions.items())
|
|
277
|
-
|
|
278
|
-
# Apply date range filtering
|
|
279
|
-
if match and date_start:
|
|
280
|
-
session_start = session.get(self.START_KEY, "")
|
|
281
|
-
match = match and session_start >= date_start
|
|
282
|
-
|
|
283
|
-
if match and date_end:
|
|
284
|
-
session_start = session.get(self.START_KEY, "")
|
|
285
|
-
match = match and session_start <= date_end
|
|
286
304
|
|
|
287
|
-
|
|
288
|
-
|
|
305
|
+
cursor = self._db.cursor()
|
|
306
|
+
cursor.execute(query, params)
|
|
289
307
|
|
|
290
|
-
|
|
308
|
+
results = [dict(row) for row in cursor.fetchall()]
|
|
309
|
+
return results
|
|
291
310
|
|
|
292
311
|
def len_table(self, table_name: str) -> int:
|
|
293
312
|
"""Return the total number of rows in the specified table."""
|
|
@@ -361,35 +380,6 @@ class Database:
|
|
|
361
380
|
|
|
362
381
|
return results
|
|
363
382
|
|
|
364
|
-
def all_sessions(self) -> list[dict[str, Any]]:
|
|
365
|
-
"""Return all session records."""
|
|
366
|
-
cursor = self._db.cursor()
|
|
367
|
-
cursor.execute(
|
|
368
|
-
f"""
|
|
369
|
-
SELECT id, start, end, filter, imagetyp, object, telescop,
|
|
370
|
-
num_images, exptime_total, image_doc_id
|
|
371
|
-
FROM {self.SESSIONS_TABLE}
|
|
372
|
-
"""
|
|
373
|
-
)
|
|
374
|
-
|
|
375
|
-
results = []
|
|
376
|
-
for row in cursor.fetchall():
|
|
377
|
-
session = {
|
|
378
|
-
"id": row["id"],
|
|
379
|
-
self.START_KEY: row["start"],
|
|
380
|
-
self.END_KEY: row["end"],
|
|
381
|
-
self.FILTER_KEY: row["filter"],
|
|
382
|
-
self.IMAGETYP_KEY: row["imagetyp"],
|
|
383
|
-
self.OBJECT_KEY: row["object"],
|
|
384
|
-
self.TELESCOP_KEY: row["telescop"],
|
|
385
|
-
self.NUM_IMAGES_KEY: row["num_images"],
|
|
386
|
-
self.EXPTIME_TOTAL_KEY: row["exptime_total"],
|
|
387
|
-
self.IMAGE_DOC_KEY: row["image_doc_id"],
|
|
388
|
-
}
|
|
389
|
-
results.append(session)
|
|
390
|
-
|
|
391
|
-
return results
|
|
392
|
-
|
|
393
383
|
def get_session_by_id(self, session_id: int) -> dict[str, Any] | None:
|
|
394
384
|
"""Get a session record by its ID.
|
|
395
385
|
|
|
@@ -414,18 +404,7 @@ class Database:
|
|
|
414
404
|
if row is None:
|
|
415
405
|
return None
|
|
416
406
|
|
|
417
|
-
return
|
|
418
|
-
"id": row["id"],
|
|
419
|
-
self.START_KEY: row["start"],
|
|
420
|
-
self.END_KEY: row["end"],
|
|
421
|
-
self.FILTER_KEY: row["filter"],
|
|
422
|
-
self.IMAGETYP_KEY: row["imagetyp"],
|
|
423
|
-
self.OBJECT_KEY: row["object"],
|
|
424
|
-
self.TELESCOP_KEY: row["telescop"],
|
|
425
|
-
self.NUM_IMAGES_KEY: row["num_images"],
|
|
426
|
-
self.EXPTIME_TOTAL_KEY: row["exptime_total"],
|
|
427
|
-
self.IMAGE_DOC_KEY: row["image_doc_id"],
|
|
428
|
-
}
|
|
407
|
+
return dict(row)
|
|
429
408
|
|
|
430
409
|
def get_session(self, to_find: dict[str, str]) -> dict[str, Any] | None:
|
|
431
410
|
"""Find a session matching the given criteria.
|
|
@@ -470,18 +449,7 @@ class Database:
|
|
|
470
449
|
if row is None:
|
|
471
450
|
return None
|
|
472
451
|
|
|
473
|
-
return
|
|
474
|
-
"id": row["id"],
|
|
475
|
-
self.START_KEY: row["start"],
|
|
476
|
-
self.END_KEY: row["end"],
|
|
477
|
-
self.FILTER_KEY: row["filter"],
|
|
478
|
-
self.IMAGETYP_KEY: row["imagetyp"],
|
|
479
|
-
self.OBJECT_KEY: row["object"],
|
|
480
|
-
self.TELESCOP_KEY: row["telescop"],
|
|
481
|
-
self.NUM_IMAGES_KEY: row["num_images"],
|
|
482
|
-
self.EXPTIME_TOTAL_KEY: row["exptime_total"],
|
|
483
|
-
self.IMAGE_DOC_KEY: row["image_doc_id"],
|
|
484
|
-
}
|
|
452
|
+
return dict(row)
|
|
485
453
|
|
|
486
454
|
def upsert_session(
|
|
487
455
|
self, new: dict[str, Any], existing: dict[str, Any] | None = None
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
|
|
3
|
-
from .database import Database # re-export for convenience
|
|
4
|
-
from rich.console import Console
|
|
5
|
-
|
|
6
|
-
console = Console()
|
|
7
|
-
|
|
8
|
-
# Global variable for log filter level (can be changed via --debug flag)
|
|
9
|
-
log_filter_level = logging.INFO
|
|
10
|
-
|
|
11
|
-
__all__ = ["Database"]
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
"""Shared utilities for starbash commands."""
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
def format_duration(seconds: int | float) -> str:
|
|
5
|
-
"""Format seconds as a human-readable duration string."""
|
|
6
|
-
if seconds < 60:
|
|
7
|
-
return f"{int(seconds)}s"
|
|
8
|
-
elif seconds < 120:
|
|
9
|
-
minutes = int(seconds // 60)
|
|
10
|
-
secs = int(seconds % 60)
|
|
11
|
-
return f"{minutes}m {secs}s" if secs else f"{minutes}m"
|
|
12
|
-
else:
|
|
13
|
-
hours = int(seconds // 3600)
|
|
14
|
-
minutes = int((seconds % 3600) // 60)
|
|
15
|
-
return f"{hours}h {minutes}m" if minutes else f"{hours}h"
|
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
"""Info commands for displaying system and data information."""
|
|
2
|
-
|
|
3
|
-
import typer
|
|
4
|
-
from typing_extensions import Annotated
|
|
5
|
-
|
|
6
|
-
from starbash.app import Starbash
|
|
7
|
-
from starbash import console
|
|
8
|
-
from starbash.database import Database
|
|
9
|
-
from starbash.paths import get_user_config_dir, get_user_data_dir
|
|
10
|
-
from starbash.commands import format_duration
|
|
11
|
-
|
|
12
|
-
app = typer.Typer()
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
@app.command()
|
|
16
|
-
def target():
|
|
17
|
-
"""List targets (filtered based on the current selection)."""
|
|
18
|
-
with Starbash("info.target") as sb:
|
|
19
|
-
console.print("[yellow]Not yet implemented[/yellow]")
|
|
20
|
-
console.print(
|
|
21
|
-
"This command will list all unique targets in the current selection."
|
|
22
|
-
)
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
@app.command()
|
|
26
|
-
def telescope():
|
|
27
|
-
"""List telescopes/instruments (filtered based on the current selection)."""
|
|
28
|
-
with Starbash("info.telescope") as sb:
|
|
29
|
-
console.print("[yellow]Not yet implemented[/yellow]")
|
|
30
|
-
console.print(
|
|
31
|
-
"This command will list all unique telescopes in the current selection."
|
|
32
|
-
)
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
@app.command()
|
|
36
|
-
def filter():
|
|
37
|
-
"""List all filters found in current selection."""
|
|
38
|
-
with Starbash("info.filter") as sb:
|
|
39
|
-
console.print("[yellow]Not yet implemented[/yellow]")
|
|
40
|
-
console.print(
|
|
41
|
-
"This command will list all unique filters in the current selection."
|
|
42
|
-
)
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
@app.callback(invoke_without_command=True)
|
|
46
|
-
def main_callback(ctx: typer.Context):
|
|
47
|
-
"""Show user preferences location and other app info.
|
|
48
|
-
|
|
49
|
-
This is the default command when no subcommand is specified.
|
|
50
|
-
"""
|
|
51
|
-
if ctx.invoked_subcommand is None:
|
|
52
|
-
with Starbash("info") as sb:
|
|
53
|
-
from rich.table import Table
|
|
54
|
-
|
|
55
|
-
table = Table(title="Starbash Information")
|
|
56
|
-
table.add_column("Setting", style="cyan", no_wrap=True)
|
|
57
|
-
table.add_column("Value", style="green")
|
|
58
|
-
|
|
59
|
-
# Show config and data directories
|
|
60
|
-
table.add_row("Config Directory", str(get_user_config_dir()))
|
|
61
|
-
table.add_row("Data Directory", str(get_user_data_dir()))
|
|
62
|
-
|
|
63
|
-
# Show user preferences if set
|
|
64
|
-
user_name = sb.user_repo.get("user.name")
|
|
65
|
-
if user_name:
|
|
66
|
-
table.add_row("User Name", str(user_name))
|
|
67
|
-
|
|
68
|
-
user_email = sb.user_repo.get("user.email")
|
|
69
|
-
if user_email:
|
|
70
|
-
table.add_row("User Email", str(user_email))
|
|
71
|
-
|
|
72
|
-
# Show analytics setting
|
|
73
|
-
analytics_enabled = sb.user_repo.get("analytics.enabled", True)
|
|
74
|
-
table.add_row("Analytics", "Enabled" if analytics_enabled else "Disabled")
|
|
75
|
-
|
|
76
|
-
# Show number of repos
|
|
77
|
-
table.add_row("Total Repositories", str(len(sb.repo_manager.repos)))
|
|
78
|
-
table.add_row("User Repositories", str(len(sb.repo_manager.regular_repos)))
|
|
79
|
-
|
|
80
|
-
# Show database stats
|
|
81
|
-
table.add_row(
|
|
82
|
-
"Sessions Indexed", str(sb.db.len_table(Database.SESSIONS_TABLE))
|
|
83
|
-
)
|
|
84
|
-
|
|
85
|
-
table.add_row("Images Indexed", str(sb.db.len_table(Database.IMAGES_TABLE)))
|
|
86
|
-
|
|
87
|
-
total_exptime = sb.db.sum_column(Database.SESSIONS_TABLE, "exptime_total")
|
|
88
|
-
table.add_row(
|
|
89
|
-
"Total image time",
|
|
90
|
-
format_duration(total_exptime),
|
|
91
|
-
)
|
|
92
|
-
console.print(table)
|
|
@@ -1,381 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Manages the repository of processing recipes and configurations.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
from __future__ import annotations
|
|
6
|
-
import logging
|
|
7
|
-
from pathlib import Path
|
|
8
|
-
from importlib import resources
|
|
9
|
-
from typing import Any
|
|
10
|
-
|
|
11
|
-
import tomlkit
|
|
12
|
-
from tomlkit.toml_file import TOMLFile
|
|
13
|
-
from tomlkit.items import AoT
|
|
14
|
-
from multidict import MultiDict
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
repo_suffix = "starbash.toml"
|
|
18
|
-
|
|
19
|
-
REPO_REF = "repo-ref"
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class Repo:
|
|
23
|
-
"""
|
|
24
|
-
Represents a single starbash repository."""
|
|
25
|
-
|
|
26
|
-
def __init__(self, manager: RepoManager, url: str):
|
|
27
|
-
"""
|
|
28
|
-
Initializes a Repo instance.
|
|
29
|
-
|
|
30
|
-
Args:
|
|
31
|
-
url: The URL to the repository (file or general http/https urls are acceptable).
|
|
32
|
-
"""
|
|
33
|
-
self.manager = manager
|
|
34
|
-
self.url = url
|
|
35
|
-
self.config = self._load_config()
|
|
36
|
-
|
|
37
|
-
def __str__(self) -> str:
|
|
38
|
-
"""Return a concise one-line description of this repo.
|
|
39
|
-
|
|
40
|
-
Example: "Repo(kind=recipe, local=True, url=file:///path/to/repo)"
|
|
41
|
-
"""
|
|
42
|
-
return f"Repo(kind={self.kind}, url={self.url})"
|
|
43
|
-
|
|
44
|
-
__repr__ = __str__
|
|
45
|
-
|
|
46
|
-
def kind(self, unknown_kind: str = "unknown") -> str:
|
|
47
|
-
"""
|
|
48
|
-
Read-only attribute for the repository kind (e.g., "recipe", "data", etc.).
|
|
49
|
-
|
|
50
|
-
Returns:
|
|
51
|
-
The kind of the repository as a string.
|
|
52
|
-
"""
|
|
53
|
-
c = self.get("repo.kind", unknown_kind)
|
|
54
|
-
return str(c)
|
|
55
|
-
|
|
56
|
-
def add_repo_ref(self, dir: str) -> Repo | None:
|
|
57
|
-
"""
|
|
58
|
-
Adds a new repo-ref to this repository's configuration.
|
|
59
|
-
if new returns the newly added Repo object, if already exists returns None"""
|
|
60
|
-
|
|
61
|
-
# if dir is not absolute, we need to resolve it relative to the cwd
|
|
62
|
-
if not Path(dir).is_absolute():
|
|
63
|
-
dir = str((Path.cwd() / dir).resolve())
|
|
64
|
-
|
|
65
|
-
# Add the ref to this repo
|
|
66
|
-
aot = self.config.get(REPO_REF, None)
|
|
67
|
-
if aot is None:
|
|
68
|
-
aot = tomlkit.aot()
|
|
69
|
-
self.config[REPO_REF] = aot # add an empty AoT at the end of the file
|
|
70
|
-
|
|
71
|
-
if type(aot) is not AoT:
|
|
72
|
-
raise ValueError(f"repo-ref in {self.url} is not an array")
|
|
73
|
-
|
|
74
|
-
for t in aot:
|
|
75
|
-
if "dir" in t and t["dir"] == dir:
|
|
76
|
-
logging.warning(f"Repo ref {dir} already exists - ignoring.")
|
|
77
|
-
return None # already exists
|
|
78
|
-
|
|
79
|
-
ref = {"dir": dir}
|
|
80
|
-
aot.append(ref)
|
|
81
|
-
|
|
82
|
-
# Also add the repo to the manager
|
|
83
|
-
return self.add_from_ref(ref)
|
|
84
|
-
|
|
85
|
-
def write_config(self) -> None:
|
|
86
|
-
"""
|
|
87
|
-
Writes the current (possibly modified) configuration back to the repository's config file.
|
|
88
|
-
|
|
89
|
-
Raises:
|
|
90
|
-
ValueError: If the repository is not a local file repository.
|
|
91
|
-
"""
|
|
92
|
-
base_path = self.get_path()
|
|
93
|
-
if base_path is None:
|
|
94
|
-
raise ValueError("Cannot resolve path for non-local repository")
|
|
95
|
-
|
|
96
|
-
config_path = base_path / repo_suffix
|
|
97
|
-
# FIXME, be more careful to write the file atomically (by writing to a temp file and renaming)
|
|
98
|
-
TOMLFile(config_path).write(self.config)
|
|
99
|
-
logging.debug(f"Wrote config to {config_path}")
|
|
100
|
-
|
|
101
|
-
def is_scheme(self, scheme: str = "file") -> bool:
|
|
102
|
-
"""
|
|
103
|
-
Read-only attribute indicating whether the repository URL points to a
|
|
104
|
-
local file system path (file:// scheme).
|
|
105
|
-
|
|
106
|
-
Returns:
|
|
107
|
-
bool: True if the URL is a local file path, False otherwise.
|
|
108
|
-
"""
|
|
109
|
-
return self.url.startswith(f"{scheme}://")
|
|
110
|
-
|
|
111
|
-
def get_path(self) -> Path | None:
|
|
112
|
-
"""
|
|
113
|
-
Resolves the URL to a local file system path if it's a file URI.
|
|
114
|
-
|
|
115
|
-
Args:
|
|
116
|
-
url: The repository URL.
|
|
117
|
-
|
|
118
|
-
Returns:
|
|
119
|
-
A Path object if the URL is a local file, otherwise None.
|
|
120
|
-
"""
|
|
121
|
-
if self.is_scheme("file"):
|
|
122
|
-
return Path(self.url[len("file://") :])
|
|
123
|
-
|
|
124
|
-
return None
|
|
125
|
-
|
|
126
|
-
def add_from_ref(self, ref: dict) -> Repo:
|
|
127
|
-
"""
|
|
128
|
-
Adds a repository based on a repo-ref dictionary.
|
|
129
|
-
"""
|
|
130
|
-
if "url" in ref:
|
|
131
|
-
url = ref["url"]
|
|
132
|
-
elif "dir" in ref:
|
|
133
|
-
# FIXME don't allow ~ or .. in file paths for security reasons?
|
|
134
|
-
if self.is_scheme("file"):
|
|
135
|
-
path = Path(ref["dir"])
|
|
136
|
-
base_path = self.get_path()
|
|
137
|
-
|
|
138
|
-
if base_path and not path.is_absolute():
|
|
139
|
-
# Resolve relative to the current TOML file's directory
|
|
140
|
-
path = (base_path / path).resolve()
|
|
141
|
-
else:
|
|
142
|
-
# Expand ~ and resolve from CWD
|
|
143
|
-
path = path.expanduser().resolve()
|
|
144
|
-
url = f"file://{path}"
|
|
145
|
-
else:
|
|
146
|
-
# construct an URL relative to this repo's URL
|
|
147
|
-
url = self.url.rstrip("/") + "/" + ref["dir"].lstrip("/")
|
|
148
|
-
else:
|
|
149
|
-
raise ValueError(f"Invalid repo reference: {ref}")
|
|
150
|
-
return self.manager.add_repo(url)
|
|
151
|
-
|
|
152
|
-
def add_by_repo_refs(self) -> None:
|
|
153
|
-
"""Add all repos mentioned by repo-refs in this repo's config."""
|
|
154
|
-
repo_refs = self.config.get(REPO_REF, [])
|
|
155
|
-
|
|
156
|
-
for ref in repo_refs:
|
|
157
|
-
self.add_from_ref(ref)
|
|
158
|
-
|
|
159
|
-
def _read_file(self, filepath: str) -> str:
|
|
160
|
-
"""
|
|
161
|
-
Read a filepath relative to the base of this repo. Return the contents in a string.
|
|
162
|
-
|
|
163
|
-
Args:
|
|
164
|
-
filepath: The path to the file, relative to the repository root.
|
|
165
|
-
|
|
166
|
-
Returns:
|
|
167
|
-
The content of the file as a string.
|
|
168
|
-
"""
|
|
169
|
-
base_path = self.get_path()
|
|
170
|
-
if base_path is None:
|
|
171
|
-
raise ValueError("Cannot read files from non-local repositories")
|
|
172
|
-
target_path = (base_path / filepath).resolve()
|
|
173
|
-
|
|
174
|
-
# Security check to prevent reading files outside the repo directory.
|
|
175
|
-
# FIXME SECURITY - temporarily disabled because I want to let file urls say things like ~/foo.
|
|
176
|
-
# it would false trigger if user homedir path has a symlink in it (such as /home -> /var/home)
|
|
177
|
-
# base_path = PosixPath('/home/kevinh/.config/starbash') │ │
|
|
178
|
-
# filepath = 'starbash.toml' │ │
|
|
179
|
-
# self = <repr-error 'maximum recursion depth exceeded'> │ │
|
|
180
|
-
# target_path = PosixPath('/var/home/kevinh/.config/starbash/starbash.toml')
|
|
181
|
-
#
|
|
182
|
-
# if base_path not in target_path.parents and target_path != base_path:
|
|
183
|
-
# raise PermissionError("Attempted to read file outside of repository")
|
|
184
|
-
|
|
185
|
-
return target_path.read_text()
|
|
186
|
-
|
|
187
|
-
def _read_resource(self, filepath: str) -> str:
|
|
188
|
-
"""
|
|
189
|
-
Read a resource from the installed starbash package using a pkg:// URL.
|
|
190
|
-
|
|
191
|
-
Assumptions (simplified per project constraints):
|
|
192
|
-
- All pkg URLs point somewhere inside the already-imported 'starbash' package.
|
|
193
|
-
- The URL is treated as a path relative to the starbash package root.
|
|
194
|
-
|
|
195
|
-
Examples:
|
|
196
|
-
url: pkg://defaults + filepath: "starbash.toml"
|
|
197
|
-
-> reads starbash/defaults/starbash.toml
|
|
198
|
-
|
|
199
|
-
Args:
|
|
200
|
-
filepath: Path within the base resource directory for this repo.
|
|
201
|
-
|
|
202
|
-
Returns:
|
|
203
|
-
The content of the resource as a string (UTF-8).
|
|
204
|
-
"""
|
|
205
|
-
# Path portion after pkg://, interpreted relative to the 'starbash' package
|
|
206
|
-
subpath = self.url[len("pkg://") :].strip("/")
|
|
207
|
-
|
|
208
|
-
res = resources.files("starbash").joinpath(subpath).joinpath(filepath)
|
|
209
|
-
return res.read_text()
|
|
210
|
-
|
|
211
|
-
def _load_config(self) -> tomlkit.TOMLDocument:
|
|
212
|
-
"""
|
|
213
|
-
Loads the repository's configuration file (e.g., repo.sb.toml).
|
|
214
|
-
|
|
215
|
-
If the config file does not exist, it logs a warning and returns an empty dict.
|
|
216
|
-
|
|
217
|
-
Returns:
|
|
218
|
-
A dictionary containing the parsed configuration.
|
|
219
|
-
"""
|
|
220
|
-
try:
|
|
221
|
-
if self.is_scheme("file"):
|
|
222
|
-
config_content = self._read_file(repo_suffix)
|
|
223
|
-
elif self.is_scheme("pkg"):
|
|
224
|
-
config_content = self._read_resource(repo_suffix)
|
|
225
|
-
else:
|
|
226
|
-
raise ValueError(f"Unsupported URL scheme for repo: {self.url}")
|
|
227
|
-
logging.debug(f"Loading repo config from {repo_suffix}")
|
|
228
|
-
return tomlkit.parse(config_content)
|
|
229
|
-
except FileNotFoundError:
|
|
230
|
-
logging.debug(
|
|
231
|
-
f"No {repo_suffix} found"
|
|
232
|
-
) # we currently make it optional to have the config file at root
|
|
233
|
-
return tomlkit.TOMLDocument() # empty placeholder
|
|
234
|
-
|
|
235
|
-
def get(self, key: str, default: Any | None = None) -> Any | None:
|
|
236
|
-
"""
|
|
237
|
-
Gets a value from this repo's config for a given key.
|
|
238
|
-
The key can be a dot-separated string for nested values.
|
|
239
|
-
|
|
240
|
-
Args:
|
|
241
|
-
key: The dot-separated key to search for (e.g., "repo.kind").
|
|
242
|
-
default: The value to return if the key is not found.
|
|
243
|
-
|
|
244
|
-
Returns:
|
|
245
|
-
The found value or the default.
|
|
246
|
-
"""
|
|
247
|
-
value = self.config
|
|
248
|
-
for k in key.split("."):
|
|
249
|
-
if not isinstance(value, dict):
|
|
250
|
-
return default
|
|
251
|
-
value = value.get(k)
|
|
252
|
-
return value if value is not None else default
|
|
253
|
-
|
|
254
|
-
def set(self, key: str, value: Any) -> None:
|
|
255
|
-
"""
|
|
256
|
-
Sets a value in this repo's config for a given key.
|
|
257
|
-
The key can be a dot-separated string for nested values.
|
|
258
|
-
Creates nested Table structures as needed.
|
|
259
|
-
|
|
260
|
-
Args:
|
|
261
|
-
key: The dot-separated key to set (e.g., "repo.kind").
|
|
262
|
-
value: The value to set.
|
|
263
|
-
|
|
264
|
-
Example:
|
|
265
|
-
repo.set("repo.kind", "preferences")
|
|
266
|
-
repo.set("user.name", "John Doe")
|
|
267
|
-
"""
|
|
268
|
-
keys = key.split(".")
|
|
269
|
-
current: Any = self.config
|
|
270
|
-
|
|
271
|
-
# Navigate/create nested structure for all keys except the last
|
|
272
|
-
for k in keys[:-1]:
|
|
273
|
-
if k not in current:
|
|
274
|
-
# Create a new nested table
|
|
275
|
-
current[k] = tomlkit.table()
|
|
276
|
-
elif not isinstance(current[k], dict):
|
|
277
|
-
# Overwrite non-dict value with a table
|
|
278
|
-
current[k] = tomlkit.table()
|
|
279
|
-
current = current[k]
|
|
280
|
-
|
|
281
|
-
# Set the final value
|
|
282
|
-
current[keys[-1]] = value
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
class RepoManager:
|
|
286
|
-
"""
|
|
287
|
-
Manages the collection of starbash repositories.
|
|
288
|
-
|
|
289
|
-
This class is responsible for finding, loading, and providing an API
|
|
290
|
-
for searching through known repositories defined in TOML configuration
|
|
291
|
-
files (like appdefaults.sb.toml).
|
|
292
|
-
"""
|
|
293
|
-
|
|
294
|
-
def __init__(self):
|
|
295
|
-
"""
|
|
296
|
-
Initializes the RepoManager by loading the application default repos.
|
|
297
|
-
"""
|
|
298
|
-
self.repos = []
|
|
299
|
-
|
|
300
|
-
# We expose the app default preferences as a special root repo with a private URL
|
|
301
|
-
# root_repo = Repo(self, "pkg://starbash-defaults", config=app_defaults)
|
|
302
|
-
# self.repos.append(root_repo)
|
|
303
|
-
|
|
304
|
-
# Most users will just want to read from merged
|
|
305
|
-
self.merged = MultiDict()
|
|
306
|
-
|
|
307
|
-
@property
|
|
308
|
-
def regular_repos(self) -> list[Repo]:
|
|
309
|
-
"We exclude certain repo types (preferences, recipe) from the list of repos users care about."
|
|
310
|
-
return [
|
|
311
|
-
r
|
|
312
|
-
for r in self.repos
|
|
313
|
-
if r.kind() not in ("preferences") and not r.is_scheme("pkg")
|
|
314
|
-
]
|
|
315
|
-
|
|
316
|
-
def add_repo(self, url: str) -> Repo:
|
|
317
|
-
logging.debug(f"Adding repo: {url}")
|
|
318
|
-
r = Repo(self, url)
|
|
319
|
-
self.repos.append(r)
|
|
320
|
-
|
|
321
|
-
# FIXME, generate the merged dict lazily
|
|
322
|
-
self._add_merged(r)
|
|
323
|
-
|
|
324
|
-
# if this new repo has sub-repos, add them too
|
|
325
|
-
r.add_by_repo_refs()
|
|
326
|
-
|
|
327
|
-
return r
|
|
328
|
-
|
|
329
|
-
def get(self, key: str, default=None):
|
|
330
|
-
"""
|
|
331
|
-
Searches for a key across all repositories and returns the first value found.
|
|
332
|
-
The search is performed in reverse order of repository loading, so the
|
|
333
|
-
most recently added repositories have precedence.
|
|
334
|
-
|
|
335
|
-
Args:
|
|
336
|
-
key: The dot-separated key to search for (e.g., "repo.kind").
|
|
337
|
-
default: The value to return if the key is not found in any repo.
|
|
338
|
-
|
|
339
|
-
Returns:
|
|
340
|
-
The found value or the default.
|
|
341
|
-
"""
|
|
342
|
-
# Iterate in reverse to give precedence to later-loaded repos
|
|
343
|
-
for repo in reversed(self.repos):
|
|
344
|
-
value = repo.get(key)
|
|
345
|
-
if value is not None:
|
|
346
|
-
return value
|
|
347
|
-
|
|
348
|
-
return default
|
|
349
|
-
|
|
350
|
-
def dump(self):
|
|
351
|
-
"""
|
|
352
|
-
Prints a detailed, multi-line description of the combined top-level keys
|
|
353
|
-
and values from all repositories, using a MultiDict for aggregation.
|
|
354
|
-
This is useful for debugging and inspecting the consolidated configuration.
|
|
355
|
-
"""
|
|
356
|
-
|
|
357
|
-
combined_config = self.merged
|
|
358
|
-
logging.info("RepoManager Dump")
|
|
359
|
-
for key, value in combined_config.items():
|
|
360
|
-
# tomlkit.items() can return complex types (e.g., ArrayOfTables, Table)
|
|
361
|
-
# For a debug dump, a simple string representation is usually sufficient.
|
|
362
|
-
logging.info(f" %s: %s", key, value)
|
|
363
|
-
|
|
364
|
-
def _add_merged(self, repo: Repo) -> None:
|
|
365
|
-
for key, value in repo.config.items():
|
|
366
|
-
# if the toml object is an AoT type, monkey patch each element in the array instead
|
|
367
|
-
if isinstance(value, AoT):
|
|
368
|
-
for v in value:
|
|
369
|
-
setattr(v, "source", repo)
|
|
370
|
-
else:
|
|
371
|
-
# We monkey patch source into any object that came from a repo, so that users can
|
|
372
|
-
# find the source repo (for attribution, URL relative resolution, whatever...)
|
|
373
|
-
setattr(value, "source", repo)
|
|
374
|
-
|
|
375
|
-
self.merged.add(key, value)
|
|
376
|
-
|
|
377
|
-
def __str__(self):
|
|
378
|
-
lines = [f"RepoManager with {len(self.repos)} repositories:"]
|
|
379
|
-
for i, repo in enumerate(self.repos):
|
|
380
|
-
lines.append(f" [{i}] {repo.url}")
|
|
381
|
-
return "\n".join(lines)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|