starbash 0.1.0__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of starbash might be problematic. Click here for more details.
- starbash/analytics.py +121 -0
- starbash/app.py +223 -38
- starbash/commands/repo.py +103 -21
- starbash/commands/selection.py +137 -0
- starbash/commands/user.py +63 -0
- starbash/database.py +476 -25
- starbash/defaults/__init__.py +0 -0
- starbash/{appdefaults.sb.toml → defaults/starbash.toml} +5 -45
- starbash/main.py +142 -13
- starbash/paths.py +38 -0
- starbash/repo/manager.py +129 -59
- starbash/selection.py +251 -0
- starbash/templates/__init__.py +0 -0
- starbash/templates/userconfig.toml +53 -0
- starbash/url.py +9 -0
- starbash-0.1.3.dist-info/METADATA +114 -0
- starbash-0.1.3.dist-info/RECORD +24 -0
- {starbash-0.1.0.dist-info → starbash-0.1.3.dist-info}/WHEEL +1 -1
- starbash-0.1.0.dist-info/METADATA +0 -82
- starbash-0.1.0.dist-info/RECORD +0 -15
- {starbash-0.1.0.dist-info → starbash-0.1.3.dist-info}/entry_points.txt +0 -0
- {starbash-0.1.0.dist-info → starbash-0.1.3.dist-info/licenses}/LICENSE +0 -0
starbash/analytics.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from starbash import console
|
|
4
|
+
import starbash.url as url
|
|
5
|
+
|
|
6
|
+
# Default to no analytics/auto crash reports
|
|
7
|
+
analytics_allowed = False
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def analytics_setup(allowed: bool = False, user_email: str | None = None) -> None:
|
|
11
|
+
import sentry_sdk
|
|
12
|
+
|
|
13
|
+
global analytics_allowed
|
|
14
|
+
analytics_allowed = allowed
|
|
15
|
+
if analytics_allowed:
|
|
16
|
+
logging.info(
|
|
17
|
+
f"Analytics/crash-reports enabled. To change [link={url.analytics_docs}]click here[/link]",
|
|
18
|
+
extra={"markup": True},
|
|
19
|
+
)
|
|
20
|
+
sentry_sdk.init(
|
|
21
|
+
dsn="https://e9496a4ea8b37a053203a2cbc10d64e6@o209837.ingest.us.sentry.io/4510264204132352",
|
|
22
|
+
send_default_pii=True,
|
|
23
|
+
enable_logs=True,
|
|
24
|
+
traces_sample_rate=1.0,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
if user_email:
|
|
28
|
+
sentry_sdk.set_user({"email": user_email})
|
|
29
|
+
else:
|
|
30
|
+
logging.info(
|
|
31
|
+
f"Analytics/crash-reports disabled. To learn more [link={url.analytics_docs}]click here[/link]",
|
|
32
|
+
extra={"markup": True},
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def analytics_shutdown() -> None:
|
|
37
|
+
"""Shut down the analytics service, if enabled."""
|
|
38
|
+
if analytics_allowed:
|
|
39
|
+
import sentry_sdk
|
|
40
|
+
|
|
41
|
+
sentry_sdk.flush()
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def is_development_environment() -> bool:
|
|
45
|
+
"""Detect if running in a development environment."""
|
|
46
|
+
import os
|
|
47
|
+
import sys
|
|
48
|
+
from pathlib import Path
|
|
49
|
+
|
|
50
|
+
# Check for explicit environment variable
|
|
51
|
+
if os.getenv("STARBASH_ENV") == "development":
|
|
52
|
+
return True
|
|
53
|
+
|
|
54
|
+
# Check if running under VS Code
|
|
55
|
+
if any(k.startswith("VSCODE_") for k in os.environ):
|
|
56
|
+
return True
|
|
57
|
+
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def analytics_exception(exc: Exception) -> bool:
|
|
62
|
+
"""Report an exception to the analytics service, if enabled.
|
|
63
|
+
return True to suppress exception propagation/log messages"""
|
|
64
|
+
|
|
65
|
+
if is_development_environment():
|
|
66
|
+
return False # We want to let devs see full exception traces
|
|
67
|
+
|
|
68
|
+
if analytics_allowed:
|
|
69
|
+
import sentry_sdk
|
|
70
|
+
|
|
71
|
+
report_id = sentry_sdk.capture_exception(exc)
|
|
72
|
+
|
|
73
|
+
logging.info(
|
|
74
|
+
f"""An unexpected error has occurred and been reported. Thank you for your help.
|
|
75
|
+
If you'd like to chat with the devs about it, please click
|
|
76
|
+
[link={url.new_issue(str(report_id))}]here[/link] to open an issue.""",
|
|
77
|
+
extra={"markup": True},
|
|
78
|
+
)
|
|
79
|
+
else:
|
|
80
|
+
logging.error(
|
|
81
|
+
f"""An unexpected error has occurred. Automated crash reporting is disabled,
|
|
82
|
+
but we encourage you to contact the developers
|
|
83
|
+
at [link={url.new_issue()}]here[/link] and we will try to help.
|
|
84
|
+
|
|
85
|
+
The full exception is: {exc}""",
|
|
86
|
+
extra={"markup": True},
|
|
87
|
+
)
|
|
88
|
+
return True
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class NopAnalytics:
|
|
92
|
+
"""Used when users have disabled analytics/crash reporting."""
|
|
93
|
+
|
|
94
|
+
def __enter__(self):
|
|
95
|
+
return self
|
|
96
|
+
|
|
97
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
def set_data(self, key, value):
|
|
101
|
+
pass
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def analytics_start_span(**kwargs):
|
|
105
|
+
"""Start an analytics/tracing span if analytics is enabled, otherwise return a no-op context manager."""
|
|
106
|
+
if analytics_allowed:
|
|
107
|
+
import sentry_sdk
|
|
108
|
+
|
|
109
|
+
return sentry_sdk.start_span(**kwargs)
|
|
110
|
+
else:
|
|
111
|
+
return NopAnalytics()
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def analytics_start_transaction(**kwargs):
|
|
115
|
+
"""Start an analytics/tracing transaction if analytics is enabled, otherwise return a no-op context manager."""
|
|
116
|
+
if analytics_allowed:
|
|
117
|
+
import sentry_sdk
|
|
118
|
+
|
|
119
|
+
return sentry_sdk.start_transaction(**kwargs)
|
|
120
|
+
else:
|
|
121
|
+
return NopAnalytics()
|
starbash/app.py
CHANGED
|
@@ -1,16 +1,30 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from importlib import resources
|
|
3
|
-
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
import typer
|
|
5
|
+
import tomlkit
|
|
6
|
+
from tomlkit.toml_file import TOMLFile
|
|
4
7
|
import glob
|
|
5
8
|
from typing import Any
|
|
6
9
|
from astropy.io import fits
|
|
7
10
|
import itertools
|
|
8
11
|
from rich.progress import track
|
|
9
12
|
from rich.logging import RichHandler
|
|
13
|
+
|
|
10
14
|
from starbash.database import Database
|
|
15
|
+
from starbash.repo.manager import Repo
|
|
11
16
|
from starbash.tool import Tool
|
|
12
17
|
from starbash.repo import RepoManager
|
|
13
18
|
from starbash.tool import tools
|
|
19
|
+
from starbash.paths import get_user_config_dir, get_user_data_dir
|
|
20
|
+
from starbash.selection import Selection
|
|
21
|
+
from starbash.analytics import (
|
|
22
|
+
NopAnalytics,
|
|
23
|
+
analytics_exception,
|
|
24
|
+
analytics_setup,
|
|
25
|
+
analytics_shutdown,
|
|
26
|
+
analytics_start_transaction,
|
|
27
|
+
)
|
|
14
28
|
|
|
15
29
|
|
|
16
30
|
def setup_logging():
|
|
@@ -28,78 +42,249 @@ def setup_logging():
|
|
|
28
42
|
setup_logging()
|
|
29
43
|
|
|
30
44
|
|
|
31
|
-
|
|
32
|
-
"""
|
|
45
|
+
def create_user() -> Path:
|
|
46
|
+
"""Create user directories if they don't exist yet."""
|
|
47
|
+
config_dir = get_user_config_dir()
|
|
48
|
+
userconfig_path = config_dir / "starbash.toml"
|
|
49
|
+
if not (userconfig_path).exists():
|
|
50
|
+
tomlstr = (
|
|
51
|
+
resources.files("starbash")
|
|
52
|
+
.joinpath("templates/userconfig.toml")
|
|
53
|
+
.read_text()
|
|
54
|
+
)
|
|
55
|
+
toml = tomlkit.parse(tomlstr)
|
|
56
|
+
TOMLFile(userconfig_path).write(toml)
|
|
57
|
+
logging.info(f"Created user config file: {userconfig_path}")
|
|
58
|
+
return config_dir
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class Starbash:
|
|
62
|
+
"""The main Starbash application class."""
|
|
33
63
|
|
|
34
|
-
def __init__(self):
|
|
64
|
+
def __init__(self, cmd: str = "unspecified"):
|
|
35
65
|
"""
|
|
36
|
-
Initializes the
|
|
66
|
+
Initializes the Starbash application by loading configurations
|
|
37
67
|
and setting up the repository manager.
|
|
38
68
|
"""
|
|
39
69
|
setup_logging()
|
|
40
|
-
logging.info("
|
|
70
|
+
logging.info("Starbash starting...")
|
|
41
71
|
|
|
42
72
|
# Load app defaults and initialize the repository manager
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
73
|
+
self.repo_manager = RepoManager()
|
|
74
|
+
self.repo_manager.add_repo("pkg://defaults")
|
|
75
|
+
|
|
76
|
+
# Add user prefs as a repo
|
|
77
|
+
self.user_repo = self.repo_manager.add_repo("file://" + str(create_user()))
|
|
78
|
+
|
|
79
|
+
self.analytics = NopAnalytics()
|
|
80
|
+
if self.user_repo.get("analytics.enabled", True):
|
|
81
|
+
include_user = self.user_repo.get("analytics.include_user", False)
|
|
82
|
+
user_email = (
|
|
83
|
+
self.user_repo.get("user.email", None) if include_user else None
|
|
84
|
+
)
|
|
85
|
+
if user_email is not None:
|
|
86
|
+
user_email = str(user_email)
|
|
87
|
+
analytics_setup(allowed=True, user_email=user_email)
|
|
88
|
+
# this is intended for use with "with" so we manually do enter/exit
|
|
89
|
+
self.analytics = analytics_start_transaction(name="App session", op=cmd)
|
|
90
|
+
self.analytics.__enter__()
|
|
91
|
+
|
|
47
92
|
logging.info(
|
|
48
93
|
f"Repo manager initialized with {len(self.repo_manager.repos)} default repo references."
|
|
49
94
|
)
|
|
50
95
|
# self.repo_manager.dump()
|
|
51
96
|
|
|
52
97
|
self.db = Database()
|
|
98
|
+
self.session_query = None # None means search all sessions
|
|
99
|
+
|
|
100
|
+
# Initialize selection state
|
|
101
|
+
data_dir = get_user_data_dir()
|
|
102
|
+
selection_file = data_dir / "selection.json"
|
|
103
|
+
self.selection = Selection(selection_file)
|
|
104
|
+
|
|
53
105
|
# FIXME, call reindex somewhere and also index whenever new repos are added
|
|
54
106
|
# self.reindex_repos()
|
|
55
107
|
|
|
56
108
|
# --- Lifecycle ---
|
|
57
109
|
def close(self) -> None:
|
|
110
|
+
self.analytics.__exit__(None, None, None)
|
|
111
|
+
|
|
112
|
+
analytics_shutdown()
|
|
58
113
|
self.db.close()
|
|
59
114
|
|
|
60
115
|
# Context manager support
|
|
61
|
-
def __enter__(self) -> "
|
|
116
|
+
def __enter__(self) -> "Starbash":
|
|
62
117
|
return self
|
|
63
118
|
|
|
64
|
-
def __exit__(self, exc_type, exc, tb) ->
|
|
119
|
+
def __exit__(self, exc_type, exc, tb) -> bool:
|
|
120
|
+
handled = False
|
|
121
|
+
# Don't suppress typer.Exit - it's used for controlled exit codes
|
|
122
|
+
if exc and not isinstance(exc, typer.Exit):
|
|
123
|
+
handled = analytics_exception(exc)
|
|
65
124
|
self.close()
|
|
125
|
+
return handled
|
|
126
|
+
|
|
127
|
+
def _add_session(self, f: str, image_doc_id: int, header: dict) -> None:
|
|
128
|
+
filter = header.get(Database.FILTER_KEY, "unspecified")
|
|
129
|
+
image_type = header.get(Database.IMAGETYP_KEY)
|
|
130
|
+
date = header.get(Database.DATE_OBS_KEY)
|
|
131
|
+
if not date or not image_type:
|
|
132
|
+
logging.warning(
|
|
133
|
+
"Image %s missing either DATE-OBS or IMAGETYP FITS header, skipping...",
|
|
134
|
+
f,
|
|
135
|
+
)
|
|
136
|
+
else:
|
|
137
|
+
exptime = header.get(Database.EXPTIME_KEY, 0)
|
|
138
|
+
telescop = header.get(Database.TELESCOP_KEY, "unspecified")
|
|
139
|
+
new = {
|
|
140
|
+
Database.FILTER_KEY: filter,
|
|
141
|
+
Database.START_KEY: date,
|
|
142
|
+
Database.END_KEY: date, # FIXME not quite correct, should be longer by exptime
|
|
143
|
+
Database.IMAGE_DOC_KEY: image_doc_id,
|
|
144
|
+
Database.IMAGETYP_KEY: image_type,
|
|
145
|
+
Database.NUM_IMAGES_KEY: 1,
|
|
146
|
+
Database.EXPTIME_TOTAL_KEY: exptime,
|
|
147
|
+
Database.OBJECT_KEY: header.get(Database.OBJECT_KEY, "unspecified"),
|
|
148
|
+
Database.TELESCOP_KEY: telescop,
|
|
149
|
+
}
|
|
150
|
+
session = self.db.get_session(new)
|
|
151
|
+
self.db.upsert_session(new, existing=session)
|
|
152
|
+
|
|
153
|
+
def search_session(self) -> list[dict[str, Any]] | None:
|
|
154
|
+
"""Search for sessions, optionally filtered by the current selection."""
|
|
155
|
+
# If selection has filters, use them; otherwise return all sessions
|
|
156
|
+
if self.selection.is_empty():
|
|
157
|
+
return self.db.search_session(None)
|
|
158
|
+
else:
|
|
159
|
+
# Get query conditions from selection
|
|
160
|
+
conditions = self.selection.get_query_conditions()
|
|
161
|
+
return self.db.search_session(conditions)
|
|
66
162
|
|
|
67
|
-
def
|
|
68
|
-
"""
|
|
69
|
-
|
|
70
|
-
config = self.repo_manager.merged.get("config")
|
|
71
|
-
if not config:
|
|
72
|
-
raise ValueError(f"App config not found.")
|
|
73
|
-
whitelist = config["fits-whitelist"]
|
|
163
|
+
def get_session_images(self, session_id: int) -> list[dict[str, Any]]:
|
|
164
|
+
"""
|
|
165
|
+
Get all images belonging to a specific session.
|
|
74
166
|
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
167
|
+
Sessions are defined by a unique combination of filter, imagetyp (image type),
|
|
168
|
+
object (target name), telescope, and date range. This method queries the images
|
|
169
|
+
table for all images matching the session's criteria in a single database query.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
session_id: The database ID of the session
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
List of image records (dictionaries with path, metadata, etc.)
|
|
176
|
+
Returns empty list if session not found or has no images.
|
|
177
|
+
|
|
178
|
+
Raises:
|
|
179
|
+
ValueError: If session_id is not found in the database
|
|
180
|
+
"""
|
|
181
|
+
# First get the session details
|
|
182
|
+
session = self.db.get_session_by_id(session_id)
|
|
183
|
+
if session is None:
|
|
184
|
+
raise ValueError(f"Session with id {session_id} not found")
|
|
185
|
+
|
|
186
|
+
# Query images that match ALL session criteria including date range
|
|
187
|
+
conditions = {
|
|
188
|
+
Database.FILTER_KEY: session[Database.FILTER_KEY],
|
|
189
|
+
Database.IMAGETYP_KEY: session[Database.IMAGETYP_KEY],
|
|
190
|
+
Database.OBJECT_KEY: session[Database.OBJECT_KEY],
|
|
191
|
+
Database.TELESCOP_KEY: session[Database.TELESCOP_KEY],
|
|
192
|
+
"date_start": session[Database.START_KEY],
|
|
193
|
+
"date_end": session[Database.END_KEY],
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
# Single query with all conditions
|
|
197
|
+
images = self.db.search_image(conditions)
|
|
198
|
+
return images if images else []
|
|
199
|
+
|
|
200
|
+
def remove_repo_ref(self, url: str) -> None:
|
|
201
|
+
"""
|
|
202
|
+
Remove a repository reference from the user configuration.
|
|
203
|
+
|
|
204
|
+
Args:
|
|
205
|
+
url: The repository URL to remove (e.g., 'file:///path/to/repo')
|
|
206
|
+
|
|
207
|
+
Raises:
|
|
208
|
+
ValueError: If the repository URL is not found in user configuration
|
|
209
|
+
"""
|
|
210
|
+
# Get the repo-ref list from user config
|
|
211
|
+
repo_refs = self.user_repo.config.get("repo-ref")
|
|
212
|
+
|
|
213
|
+
if not repo_refs:
|
|
214
|
+
raise ValueError(f"No repository references found in user configuration.")
|
|
215
|
+
|
|
216
|
+
# Find and remove the matching repo-ref
|
|
217
|
+
found = False
|
|
218
|
+
refs_copy = [r for r in repo_refs] # Make a copy to iterate
|
|
219
|
+
for ref in refs_copy:
|
|
220
|
+
ref_dir = ref.get("dir", "")
|
|
221
|
+
# Match by converting to file:// URL format if needed
|
|
222
|
+
if ref_dir == url or f"file://{ref_dir}" == url:
|
|
223
|
+
repo_refs.remove(ref)
|
|
224
|
+
found = True
|
|
225
|
+
break
|
|
226
|
+
|
|
227
|
+
if not found:
|
|
228
|
+
raise ValueError(f"Repository '{url}' not found in user configuration.")
|
|
229
|
+
|
|
230
|
+
# Write the updated config
|
|
231
|
+
self.user_repo.write_config()
|
|
232
|
+
|
|
233
|
+
def reindex_repo(self, repo: Repo, force: bool = False):
|
|
234
|
+
"""Reindex all repositories managed by the RepoManager."""
|
|
235
|
+
# FIXME, add a method to get just the repos that contain images
|
|
236
|
+
if repo.is_scheme("file") and repo.kind != "recipe":
|
|
237
|
+
logging.debug("Reindexing %s...", repo.url)
|
|
238
|
+
|
|
239
|
+
whitelist = None
|
|
240
|
+
config = self.repo_manager.merged.get("config")
|
|
241
|
+
if config:
|
|
242
|
+
whitelist = config.get("fits-whitelist", None)
|
|
243
|
+
|
|
244
|
+
path = repo.get_path()
|
|
245
|
+
if not path:
|
|
246
|
+
raise ValueError(f"Repo path not found for {repo}")
|
|
247
|
+
|
|
248
|
+
# Find all FITS files under this repo path
|
|
249
|
+
for f in track(
|
|
250
|
+
list(path.rglob("*.fit*")),
|
|
251
|
+
description=f"Indexing {repo.url}...",
|
|
252
|
+
):
|
|
253
|
+
# progress.console.print(f"Indexing {f}...")
|
|
254
|
+
try:
|
|
255
|
+
found = self.db.get_image(str(f))
|
|
256
|
+
if not found or force:
|
|
88
257
|
# Read and log the primary header (HDU 0)
|
|
89
258
|
with fits.open(str(f), memmap=False) as hdul:
|
|
90
259
|
# convert headers to dict
|
|
91
260
|
hdu0: Any = hdul[0]
|
|
92
|
-
|
|
261
|
+
header = hdu0.header
|
|
262
|
+
if type(header).__name__ == "Unknown":
|
|
263
|
+
raise ValueError("FITS header has Unknown type: %s", f)
|
|
264
|
+
|
|
265
|
+
items = header.items()
|
|
93
266
|
headers = {}
|
|
94
267
|
for key, value in items:
|
|
95
|
-
if key in whitelist:
|
|
268
|
+
if (not whitelist) or (key in whitelist):
|
|
96
269
|
headers[key] = value
|
|
97
270
|
logging.debug("Headers for %s: %s", f, headers)
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
271
|
+
headers["path"] = str(f)
|
|
272
|
+
image_doc_id = self.db.upsert_image(headers)
|
|
273
|
+
|
|
274
|
+
if not found:
|
|
275
|
+
# Update the session infos, but ONLY on first file scan
|
|
276
|
+
# (otherwise invariants will get messed up)
|
|
277
|
+
self._add_session(str(f), image_doc_id, header)
|
|
101
278
|
|
|
102
|
-
|
|
279
|
+
except Exception as e:
|
|
280
|
+
logging.warning("Failed to read FITS header for %s: %s", f, e)
|
|
281
|
+
|
|
282
|
+
def reindex_repos(self, force: bool = False):
|
|
283
|
+
"""Reindex all repositories managed by the RepoManager."""
|
|
284
|
+
logging.debug("Reindexing all repositories...")
|
|
285
|
+
|
|
286
|
+
for repo in track(self.repo_manager.repos, description="Reindexing repos..."):
|
|
287
|
+
self.reindex_repo(repo, force=force)
|
|
103
288
|
|
|
104
289
|
def test_processing(self):
|
|
105
290
|
"""A crude test of image processing pipeline - FIXME move into testing"""
|
starbash/commands/repo.py
CHANGED
|
@@ -1,50 +1,132 @@
|
|
|
1
1
|
import typer
|
|
2
2
|
from typing_extensions import Annotated
|
|
3
3
|
|
|
4
|
-
from starbash.app import
|
|
4
|
+
from starbash.app import Starbash
|
|
5
5
|
from starbash import console
|
|
6
6
|
|
|
7
|
-
app = typer.Typer()
|
|
7
|
+
app = typer.Typer(invoke_without_command=True)
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
@app.
|
|
11
|
-
def
|
|
10
|
+
@app.callback()
|
|
11
|
+
def main(
|
|
12
|
+
ctx: typer.Context,
|
|
13
|
+
verbose: bool = typer.Option(
|
|
14
|
+
False, "--verbose", "-v", help="Show all repos including system repos"
|
|
15
|
+
),
|
|
16
|
+
):
|
|
12
17
|
"""
|
|
13
|
-
|
|
18
|
+
Manage repositories.
|
|
19
|
+
|
|
20
|
+
When called without a subcommand, lists all repositories.
|
|
21
|
+
Use --verbose to show all repos including system/recipe repos.
|
|
14
22
|
"""
|
|
15
|
-
|
|
23
|
+
# If no subcommand is invoked, run the list behavior
|
|
24
|
+
if ctx.invoked_subcommand is None:
|
|
25
|
+
with Starbash("repo-list") as sb:
|
|
26
|
+
repos = sb.repo_manager.repos if verbose else sb.repo_manager.regular_repos
|
|
27
|
+
for i, repo in enumerate(repos):
|
|
28
|
+
if verbose:
|
|
29
|
+
# No numbers for verbose mode (system repos can't be removed)
|
|
30
|
+
console.print(f"{ repo.url } (kind={ repo.kind})")
|
|
31
|
+
else:
|
|
32
|
+
# Show numbers for user repos (can be removed later)
|
|
33
|
+
console.print(f"{ i + 1:2}: { repo.url } (kind={ repo.kind})")
|
|
16
34
|
|
|
17
35
|
|
|
18
36
|
@app.command()
|
|
19
|
-
def
|
|
37
|
+
def add(path: str):
|
|
20
38
|
"""
|
|
21
|
-
|
|
39
|
+
Add a repository. path is either a local path or a remote URL.
|
|
22
40
|
"""
|
|
23
|
-
|
|
41
|
+
with Starbash("repo-add") as sb:
|
|
42
|
+
sb.user_repo.add_repo_ref(path)
|
|
43
|
+
# we don't yet write default config files at roots of repos, but it would be easy to add here
|
|
44
|
+
# r.write_config()
|
|
45
|
+
sb.user_repo.write_config()
|
|
46
|
+
# FIXME, we also need to index the newly added repo!!!
|
|
47
|
+
console.print(f"Added repository: {path}")
|
|
24
48
|
|
|
25
49
|
|
|
26
50
|
@app.command()
|
|
27
|
-
def
|
|
51
|
+
def remove(reponum: str):
|
|
28
52
|
"""
|
|
29
|
-
|
|
53
|
+
Remove a repository by number (from list).
|
|
54
|
+
Use 'starbash repo' to see the repository numbers.
|
|
30
55
|
"""
|
|
31
|
-
with
|
|
32
|
-
|
|
33
|
-
|
|
56
|
+
with Starbash("repo-remove") as sb:
|
|
57
|
+
try:
|
|
58
|
+
# Parse the repo number (1-indexed)
|
|
59
|
+
repo_index = int(reponum) - 1
|
|
60
|
+
|
|
61
|
+
# Get only the regular (user-visible) repos
|
|
62
|
+
regular_repos = sb.repo_manager.regular_repos
|
|
63
|
+
|
|
64
|
+
if repo_index < 0 or repo_index >= len(regular_repos):
|
|
65
|
+
console.print(
|
|
66
|
+
f"[red]Error: Repository number {reponum} is out of range. Valid range: 1-{len(regular_repos)}[/red]"
|
|
67
|
+
)
|
|
68
|
+
raise typer.Exit(code=1)
|
|
69
|
+
|
|
70
|
+
# Get the repo to remove
|
|
71
|
+
repo_to_remove = regular_repos[repo_index]
|
|
72
|
+
repo_url = repo_to_remove.url
|
|
73
|
+
|
|
74
|
+
# Remove the repo reference from user config
|
|
75
|
+
sb.remove_repo_ref(repo_url)
|
|
76
|
+
console.print(f"[green]Removed repository: {repo_url}[/green]")
|
|
77
|
+
|
|
78
|
+
except ValueError:
|
|
79
|
+
console.print(
|
|
80
|
+
f"[red]Error: '{reponum}' is not a valid repository number. Please use a number from 'repo list'.[/red]"
|
|
81
|
+
)
|
|
82
|
+
raise typer.Exit(code=1)
|
|
34
83
|
|
|
35
84
|
|
|
36
85
|
@app.command()
|
|
37
86
|
def reindex(
|
|
38
|
-
|
|
39
|
-
str,
|
|
40
|
-
typer.Argument(help="The repository
|
|
41
|
-
],
|
|
87
|
+
reponum: Annotated[
|
|
88
|
+
str | None,
|
|
89
|
+
typer.Argument(help="The repository number, if not specified reindex all."),
|
|
90
|
+
] = None,
|
|
91
|
+
force: bool = typer.Option(
|
|
92
|
+
default=False, help="Reread FITS headers, even if they are already indexed."
|
|
93
|
+
),
|
|
42
94
|
):
|
|
43
95
|
"""
|
|
44
|
-
Reindex
|
|
45
|
-
If no
|
|
96
|
+
Reindex a repository by number.
|
|
97
|
+
If no number is given, reindex all repositories.
|
|
98
|
+
Use 'starbash repo' to see the repository numbers.
|
|
46
99
|
"""
|
|
47
|
-
|
|
100
|
+
with Starbash("repo-reindex") as sb:
|
|
101
|
+
if reponum is None:
|
|
102
|
+
sb.reindex_repos(force=force)
|
|
103
|
+
else:
|
|
104
|
+
try:
|
|
105
|
+
# Parse the repo number (1-indexed)
|
|
106
|
+
repo_index = int(reponum) - 1
|
|
107
|
+
|
|
108
|
+
# Get only the regular (user-visible) repos
|
|
109
|
+
regular_repos = sb.repo_manager.regular_repos
|
|
110
|
+
|
|
111
|
+
if repo_index < 0 or repo_index >= len(regular_repos):
|
|
112
|
+
console.print(
|
|
113
|
+
f"[red]Error: Repository number {reponum} is out of range. Valid range: 1-{len(regular_repos)}[/red]"
|
|
114
|
+
)
|
|
115
|
+
raise typer.Exit(code=1)
|
|
116
|
+
|
|
117
|
+
# Get the repo to reindex
|
|
118
|
+
repo_to_reindex = regular_repos[repo_index]
|
|
119
|
+
console.print(f"Reindexing repository: {repo_to_reindex.url}")
|
|
120
|
+
sb.reindex_repo(repo_to_reindex, force=force)
|
|
121
|
+
console.print(
|
|
122
|
+
f"[green]Successfully reindexed repository {reponum}[/green]"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
except ValueError:
|
|
126
|
+
console.print(
|
|
127
|
+
f"[red]Error: '{reponum}' is not a valid repository number. Please use a number from 'starbash repo'.[/red]"
|
|
128
|
+
)
|
|
129
|
+
raise typer.Exit(code=1)
|
|
48
130
|
|
|
49
131
|
|
|
50
132
|
if __name__ == "__main__":
|