starbash 0.1.6__py3-none-any.whl → 0.1.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,5 +3,6 @@ The repo package handles finding, loading and searching starbash repositories.
3
3
  """
4
4
 
5
5
  from .manager import RepoManager
6
+ from .repo import Repo, repo_suffix, REPO_REF
6
7
 
7
- __all__ = ["RepoManager"]
8
+ __all__ = ["RepoManager", "Repo", "repo_suffix", "REPO_REF"]
repo/manager.py ADDED
@@ -0,0 +1,144 @@
1
+ """
2
+ Manages the repository of processing recipes and configurations.
3
+ """
4
+
5
+ from __future__ import annotations
6
+ import logging
7
+ from pathlib import Path
8
+ from importlib import resources
9
+ from typing import Any
10
+
11
+ import tomlkit
12
+ from tomlkit.toml_file import TOMLFile
13
+ from tomlkit.items import AoT
14
+ from multidict import MultiDict
15
+ from repo.repo import Repo
16
+
17
+
18
+ class RepoManager:
19
+ """
20
+ Manages the collection of starbash repositories.
21
+
22
+ This class is responsible for finding, loading, and providing an API
23
+ for searching through known repositories defined in TOML configuration
24
+ files (like appdefaults.sb.toml).
25
+ """
26
+
27
+ def __init__(self):
28
+ """
29
+ Initializes the RepoManager by loading the application default repos.
30
+ """
31
+ self.repos = []
32
+
33
+ # We expose the app default preferences as a special root repo with a private URL
34
+ # root_repo = Repo(self, "pkg://starbash-defaults", config=app_defaults)
35
+ # self.repos.append(root_repo)
36
+
37
+ # Most users will just want to read from merged
38
+ self.merged = MultiDict()
39
+
40
+ @property
41
+ def regular_repos(self) -> list[Repo]:
42
+ "We exclude certain repo types (preferences, recipe) from the list of repos users care about."
43
+ return [
44
+ r
45
+ for r in self.repos
46
+ if r.kind() not in ("preferences") and not r.is_scheme("pkg")
47
+ ]
48
+
49
+ def add_repo(self, url: str) -> Repo:
50
+ logging.debug(f"Adding repo: {url}")
51
+ r = Repo(self, url)
52
+ self.repos.append(r)
53
+
54
+ # FIXME, generate the merged dict lazily
55
+ self._add_merged(r)
56
+
57
+ # if this new repo has sub-repos, add them too
58
+ r.add_by_repo_refs()
59
+
60
+ return r
61
+
62
+ def get_repo_by_url(self, url: str) -> Repo | None:
63
+ """
64
+ Retrieves a repository by its URL.
65
+
66
+ Args:
67
+ url: The URL of the repository to retrieve.
68
+
69
+ Returns:
70
+ The Repo instance with the matching URL, or None if not found.
71
+ """
72
+ for repo in self.repos:
73
+ if repo.url == url:
74
+ return repo
75
+ return None
76
+
77
+ def get_repo_by_kind(self, kind: str) -> Repo | None:
78
+ """
79
+ Retrieves the first repository matching the specified kind.
80
+
81
+ Args:
82
+ kind: The kind of repository to search for (e.g., "recipe", "preferences").
83
+
84
+ Returns:
85
+ The first Repo instance matching the kind, or None if not found.
86
+ """
87
+ for repo in self.repos:
88
+ if repo.kind() == kind:
89
+ return repo
90
+ return None
91
+
92
+ def get(self, key: str, default=None):
93
+ """
94
+ Searches for a key across all repositories and returns the first value found.
95
+ The search is performed in reverse order of repository loading, so the
96
+ most recently added repositories have precedence.
97
+
98
+ Args:
99
+ key: The dot-separated key to search for (e.g., "repo.kind").
100
+ default: The value to return if the key is not found in any repo.
101
+
102
+ Returns:
103
+ The found value or the default.
104
+ """
105
+ # Iterate in reverse to give precedence to later-loaded repos
106
+ for repo in reversed(self.repos):
107
+ value = repo.get(key)
108
+ if value is not None:
109
+ return value
110
+
111
+ return default
112
+
113
+ def dump(self):
114
+ """
115
+ Prints a detailed, multi-line description of the combined top-level keys
116
+ and values from all repositories, using a MultiDict for aggregation.
117
+ This is useful for debugging and inspecting the consolidated configuration.
118
+ """
119
+
120
+ combined_config = self.merged
121
+ logging.info("RepoManager Dump")
122
+ for key, value in combined_config.items():
123
+ # tomlkit.items() can return complex types (e.g., ArrayOfTables, Table)
124
+ # For a debug dump, a simple string representation is usually sufficient.
125
+ logging.info(f" %s: %s", key, value)
126
+
127
+ def _add_merged(self, repo: Repo) -> None:
128
+ for key, value in repo.config.items():
129
+ # if the toml object is an AoT type, monkey patch each element in the array instead
130
+ if isinstance(value, AoT):
131
+ for v in value:
132
+ setattr(v, "source", repo)
133
+ else:
134
+ # We monkey patch source into any object that came from a repo, so that users can
135
+ # find the source repo (for attribution, URL relative resolution, whatever...)
136
+ setattr(value, "source", repo)
137
+
138
+ self.merged.add(key, value)
139
+
140
+ def __str__(self):
141
+ lines = [f"RepoManager with {len(self.repos)} repositories:"]
142
+ for i, repo in enumerate(self.repos):
143
+ lines.append(f" [{i}] {repo.url}")
144
+ return "\n".join(lines)
@@ -1,18 +1,16 @@
1
- """
2
- Manages the repository of processing recipes and configurations.
3
- """
4
-
5
1
  from __future__ import annotations
6
2
  import logging
7
3
  from pathlib import Path
8
4
  from importlib import resources
9
- from typing import Any
5
+ from typing import Any, TYPE_CHECKING
10
6
 
11
7
  import tomlkit
12
8
  from tomlkit.toml_file import TOMLFile
13
9
  from tomlkit.items import AoT
14
10
  from multidict import MultiDict
15
11
 
12
+ if TYPE_CHECKING:
13
+ from repo.manager import RepoManager
16
14
 
17
15
  repo_suffix = "starbash.toml"
18
16
 
@@ -39,7 +37,7 @@ class Repo:
39
37
 
40
38
  Example: "Repo(kind=recipe, local=True, url=file:///path/to/repo)"
41
39
  """
42
- return f"Repo(kind={self.kind}, url={self.url})"
40
+ return f"Repo(kind={self.kind()}, url={self.url})"
43
41
 
44
42
  __repr__ = __str__
45
43
 
@@ -53,14 +51,14 @@ class Repo:
53
51
  c = self.get("repo.kind", unknown_kind)
54
52
  return str(c)
55
53
 
56
- def add_repo_ref(self, dir: str) -> Repo | None:
54
+ def add_repo_ref(self, dir: Path) -> Repo | None:
57
55
  """
58
56
  Adds a new repo-ref to this repository's configuration.
59
57
  if new returns the newly added Repo object, if already exists returns None"""
60
58
 
61
59
  # if dir is not absolute, we need to resolve it relative to the cwd
62
- if not Path(dir).is_absolute():
63
- dir = str((Path.cwd() / dir).resolve())
60
+ if not dir.is_absolute():
61
+ dir = (Path.cwd() / dir).resolve()
64
62
 
65
63
  # Add the ref to this repo
66
64
  aot = self.config.get(REPO_REF, None)
@@ -72,11 +70,11 @@ class Repo:
72
70
  raise ValueError(f"repo-ref in {self.url} is not an array")
73
71
 
74
72
  for t in aot:
75
- if "dir" in t and t["dir"] == dir:
73
+ if "dir" in t and t["dir"] == str(dir):
76
74
  logging.warning(f"Repo ref {dir} already exists - ignoring.")
77
75
  return None # already exists
78
76
 
79
- ref = {"dir": dir}
77
+ ref = {"dir": str(dir)}
80
78
  aot.append(ref)
81
79
 
82
80
  # Also add the repo to the manager
@@ -156,22 +154,22 @@ class Repo:
156
154
  for ref in repo_refs:
157
155
  self.add_from_ref(ref)
158
156
 
159
- def _read_file(self, filepath: str) -> str:
157
+ def resolve_path(self, filepath: str) -> Path:
160
158
  """
161
- Read a filepath relative to the base of this repo. Return the contents in a string.
159
+ Resolve a filepath relative to the base of this repo.
162
160
 
163
161
  Args:
164
162
  filepath: The path to the file, relative to the repository root.
165
163
 
166
164
  Returns:
167
- The content of the file as a string.
165
+ The resolved Path object.
168
166
  """
169
167
  base_path = self.get_path()
170
168
  if base_path is None:
171
- raise ValueError("Cannot read files from non-local repositories")
169
+ raise ValueError("Cannot resolve filepaths for non-local repositories")
172
170
  target_path = (base_path / filepath).resolve()
173
171
 
174
- # Security check to prevent reading files outside the repo directory.
172
+ # Security check to prevent accessing files outside the repo directory.
175
173
  # FIXME SECURITY - temporarily disabled because I want to let file urls say things like ~/foo.
176
174
  # it would false trigger if user homedir path has a symlink in it (such as /home -> /var/home)
177
175
  # base_path = PosixPath('/home/kevinh/.config/starbash') │ │
@@ -180,7 +178,21 @@ class Repo:
180
178
  # target_path = PosixPath('/var/home/kevinh/.config/starbash/starbash.toml')
181
179
  #
182
180
  # if base_path not in target_path.parents and target_path != base_path:
183
- # raise PermissionError("Attempted to read file outside of repository")
181
+ # raise PermissionError("Attempted to access file outside of repository")
182
+
183
+ return target_path
184
+
185
+ def _read_file(self, filepath: str) -> str:
186
+ """
187
+ Read a filepath relative to the base of this repo. Return the contents in a string.
188
+
189
+ Args:
190
+ filepath: The path to the file, relative to the repository root.
191
+
192
+ Returns:
193
+ The content of the file as a string.
194
+ """
195
+ target_path = self.resolve_path(filepath)
184
196
 
185
197
  return target_path.read_text()
186
198
 
@@ -280,102 +292,3 @@ class Repo:
280
292
 
281
293
  # Set the final value
282
294
  current[keys[-1]] = value
283
-
284
-
285
- class RepoManager:
286
- """
287
- Manages the collection of starbash repositories.
288
-
289
- This class is responsible for finding, loading, and providing an API
290
- for searching through known repositories defined in TOML configuration
291
- files (like appdefaults.sb.toml).
292
- """
293
-
294
- def __init__(self):
295
- """
296
- Initializes the RepoManager by loading the application default repos.
297
- """
298
- self.repos = []
299
-
300
- # We expose the app default preferences as a special root repo with a private URL
301
- # root_repo = Repo(self, "pkg://starbash-defaults", config=app_defaults)
302
- # self.repos.append(root_repo)
303
-
304
- # Most users will just want to read from merged
305
- self.merged = MultiDict()
306
-
307
- @property
308
- def regular_repos(self) -> list[Repo]:
309
- "We exclude certain repo types (preferences, recipe) from the list of repos users care about."
310
- return [
311
- r
312
- for r in self.repos
313
- if r.kind() not in ("preferences") and not r.is_scheme("pkg")
314
- ]
315
-
316
- def add_repo(self, url: str) -> Repo:
317
- logging.debug(f"Adding repo: {url}")
318
- r = Repo(self, url)
319
- self.repos.append(r)
320
-
321
- # FIXME, generate the merged dict lazily
322
- self._add_merged(r)
323
-
324
- # if this new repo has sub-repos, add them too
325
- r.add_by_repo_refs()
326
-
327
- return r
328
-
329
- def get(self, key: str, default=None):
330
- """
331
- Searches for a key across all repositories and returns the first value found.
332
- The search is performed in reverse order of repository loading, so the
333
- most recently added repositories have precedence.
334
-
335
- Args:
336
- key: The dot-separated key to search for (e.g., "repo.kind").
337
- default: The value to return if the key is not found in any repo.
338
-
339
- Returns:
340
- The found value or the default.
341
- """
342
- # Iterate in reverse to give precedence to later-loaded repos
343
- for repo in reversed(self.repos):
344
- value = repo.get(key)
345
- if value is not None:
346
- return value
347
-
348
- return default
349
-
350
- def dump(self):
351
- """
352
- Prints a detailed, multi-line description of the combined top-level keys
353
- and values from all repositories, using a MultiDict for aggregation.
354
- This is useful for debugging and inspecting the consolidated configuration.
355
- """
356
-
357
- combined_config = self.merged
358
- logging.info("RepoManager Dump")
359
- for key, value in combined_config.items():
360
- # tomlkit.items() can return complex types (e.g., ArrayOfTables, Table)
361
- # For a debug dump, a simple string representation is usually sufficient.
362
- logging.info(f" %s: %s", key, value)
363
-
364
- def _add_merged(self, repo: Repo) -> None:
365
- for key, value in repo.config.items():
366
- # if the toml object is an AoT type, monkey patch each element in the array instead
367
- if isinstance(value, AoT):
368
- for v in value:
369
- setattr(v, "source", repo)
370
- else:
371
- # We monkey patch source into any object that came from a repo, so that users can
372
- # find the source repo (for attribution, URL relative resolution, whatever...)
373
- setattr(value, "source", repo)
374
-
375
- self.merged.add(key, value)
376
-
377
- def __str__(self):
378
- lines = [f"RepoManager with {len(self.repos)} repositories:"]
379
- for i, repo in enumerate(self.repos):
380
- lines.append(f" [{i}] {repo.url}")
381
- return "\n".join(lines)
starbash/__init__.py CHANGED
@@ -1,11 +1,38 @@
1
+ import datetime
1
2
  import logging
3
+ import os
4
+ from datetime import datetime
2
5
 
3
6
  from .database import Database # re-export for convenience
4
7
  from rich.console import Console
5
8
 
6
- console = Console()
9
+ # Disable Rich formatting in test environments (pytest or NO_COLOR set)
10
+ # This prevents ANSI escape codes and line wrapping in test output for more reliable test parsing.
11
+ _is_test_env = "PYTEST_VERSION" in os.environ
12
+ console = Console(
13
+ force_terminal=False if _is_test_env else None,
14
+ width=999999 if _is_test_env else None, # Disable line wrapping in tests
15
+ )
7
16
 
8
17
  # Global variable for log filter level (can be changed via --debug flag)
9
18
  log_filter_level = logging.INFO
10
19
 
20
+
21
+ def to_shortdate(date_iso: str) -> str:
22
+ """Convert ISO UTC datetime string to local short date string (YYYY-MM-DD).
23
+
24
+ Args:
25
+ date_iso: ISO format datetime string (e.g., "2023-10-15T14:30:00Z")
26
+
27
+ Returns:
28
+ Short date string in YYYY-MM-DD format
29
+ """
30
+ try:
31
+ dt_utc = datetime.fromisoformat(date_iso)
32
+ dt_local = dt_utc.astimezone()
33
+ return dt_local.strftime("%Y-%m-%d")
34
+ except (ValueError, TypeError):
35
+ return date_iso
36
+
37
+
11
38
  __all__ = ["Database"]
starbash/analytics.py CHANGED
@@ -2,8 +2,9 @@ import logging
2
2
  import os
3
3
 
4
4
  import starbash
5
- from starbash import console
5
+ from starbash import console, _is_test_env
6
6
  import starbash.url as url
7
+ from sentry_sdk.integrations.excepthook import ExcepthookIntegration
7
8
 
8
9
  # Default to no analytics/auto crash reports
9
10
  analytics_allowed = False
@@ -25,6 +26,9 @@ def analytics_setup(allowed: bool = False, user_email: str | None = None) -> Non
25
26
  send_default_pii=True,
26
27
  enable_logs=True,
27
28
  traces_sample_rate=1.0,
29
+ disabled_integrations=[
30
+ ExcepthookIntegration()
31
+ ], # This line removes the aggressive unhandled exception catcher
28
32
  integrations=[
29
33
  LoggingIntegration(
30
34
  level=starbash.log_filter_level, # Capture INFO and above as breadcrumbs
@@ -51,11 +55,6 @@ def analytics_shutdown() -> None:
51
55
  sentry_sdk.flush()
52
56
 
53
57
 
54
- def is_running_in_pytest() -> bool:
55
- """Detect if code is being run inside pytest."""
56
- return "PYTEST_CURRENT_TEST" in os.environ
57
-
58
-
59
58
  def is_development_environment() -> bool:
60
59
  """Detect if running in a development environment."""
61
60
 
@@ -80,7 +79,7 @@ def analytics_exception(exc: Exception) -> bool:
80
79
  if analytics_allowed:
81
80
  import sentry_sdk
82
81
 
83
- if is_running_in_pytest():
82
+ if _is_test_env:
84
83
  report_id = "TESTING-ENVIRONMENT"
85
84
  else:
86
85
  report_id = sentry_sdk.capture_exception(exc)