kata-cli 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,298 @@
1
+ """BFS walker from a seed repo.
2
+
3
+ Edge types emitted:
4
+
5
+ * ``import`` — from manifest deps_runtime
6
+ * ``cite`` — from CITATION.md
7
+ * ``vendor`` — from vendored_skills' ``source`` provenance
8
+
9
+ Each edge target name is resolved against repos discovered under
10
+ ``roots`` via :func:`seer.repo.detect.find_repos`. Unresolvable target names
11
+ become "external" nodes (no path, no profile).
12
+
13
+ Per-node errors during the walk are collected in the result's
14
+ ``walk_errors`` field; the walk continues unless ``strict=True``.
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ from collections import deque
20
+ from dataclasses import dataclass, field
21
+ from pathlib import Path
22
+ from typing import Any
23
+
24
+ from seer.cli._errors import SeerError
25
+ from seer.repo.detect import find_repos, resolve_name
26
+ from seer.repo.errors import invalid_depth, path_not_a_directory
27
+ from seer.repo.profile import profile_deep, profile_shallow
28
+
29
+
30
+ def _coerce_depth(depth: int | str | None) -> int | str:
31
+ """Normalise *depth* to a non-negative ``int`` or the sentinel ``"all"``.
32
+
33
+ Raises :func:`seer.repo.errors.invalid_depth` for any value that is
34
+ neither a non-negative integer nor the string ``"all"``.
35
+ """
36
+ if depth is None:
37
+ return 1
38
+ if isinstance(depth, str):
39
+ if depth == "all":
40
+ return "all"
41
+ try:
42
+ n = int(depth)
43
+ except ValueError as exc:
44
+ raise invalid_depth(depth) from exc
45
+ else:
46
+ n = depth
47
+ if n < 0:
48
+ raise invalid_depth(str(n))
49
+ return n
50
+
51
+
52
+ def _build_index( # pylint: disable=duplicate-code
53
+ roots: list[Path],
54
+ additional_markers: list[str] | None,
55
+ skip_dirs: list[str] | None,
56
+ ) -> dict[str, Path]:
57
+ """Map every known repo name to its path (first-write wins on collisions)."""
58
+ index: dict[str, Path] = {}
59
+ for root in roots:
60
+ if not root.is_dir():
61
+ continue
62
+ for repo in find_repos(
63
+ root,
64
+ additional_markers=additional_markers,
65
+ skip_dirs=skip_dirs,
66
+ ):
67
+ name = resolve_name(repo)
68
+ index.setdefault(name, repo)
69
+ return index
70
+
71
+
72
+ def _strip_version(spec: str) -> str:
73
+ """Return the bare package name from a PEP 508 dependency specifier.
74
+
75
+ Examples::
76
+
77
+ "pkg[extra]>=1.0" -> "pkg"
78
+ "requests>=2,<3" -> "requests"
79
+ "mylib" -> "mylib"
80
+ """
81
+ for sep in ("[", "(", ">=", "<=", ">", "<", "==", "!=", "~="):
82
+ if sep in spec:
83
+ spec = spec.split(sep, 1)[0]
84
+ return spec.strip()
85
+
86
+
87
+ def _edges_from_profile(name: str, profile: dict[str, Any]) -> list[dict[str, str]]:
88
+ """Build the outgoing edge list for *name* from its shallow profile dict."""
89
+ edges: list[dict[str, str]] = []
90
+ _collect_import_edges(edges, name, profile.get("deps_runtime") or [])
91
+ _collect_cite_edges(edges, name, profile.get("citations") or [])
92
+ _collect_vendor_edges(edges, name, profile.get("vendored_skills") or [])
93
+ return edges
94
+
95
+
96
+ def _collect_import_edges(edges: list[dict[str, str]], name: str, deps: list[Any]) -> None:
97
+ """Append ``import`` edges to *edges* for each runtime dependency."""
98
+ for dep in deps:
99
+ target = _strip_version(dep)
100
+ if target:
101
+ edges.append({"from": name, "to": target, "type": "import", "spec": dep})
102
+
103
+
104
+ def _collect_cite_edges(edges: list[dict[str, str]], name: str, citations: list[Any]) -> None:
105
+ """Append ``cite`` edges to *edges* for each CITATION.md row."""
106
+ for cit in citations:
107
+ repo = cit.get("source_repo")
108
+ if repo:
109
+ edges.append(
110
+ {
111
+ "from": name,
112
+ "to": str(repo),
113
+ "type": "cite",
114
+ "spec": str(cit.get("sha", "")),
115
+ }
116
+ )
117
+
118
+
119
+ def _collect_vendor_edges(edges: list[dict[str, str]], name: str, skills: list[Any]) -> None:
120
+ """Append ``vendor`` edges to *edges* for each vendored skill with a source."""
121
+ for skill in skills:
122
+ source = skill.get("source")
123
+ if source:
124
+ edges.append(
125
+ {
126
+ "from": name,
127
+ "to": str(source),
128
+ "type": "vendor",
129
+ "spec": str(skill.get("name", "")),
130
+ }
131
+ )
132
+
133
+
134
+ @dataclass
135
+ class _ProfileOpts:
136
+ """Options that control how nodes are profiled during the BFS walk."""
137
+
138
+ with_profile: bool = False
139
+ depth_profile: str = "shallow"
140
+ strict: bool = False
141
+
142
+
143
+ @dataclass
144
+ class _BfsState:
145
+ """Mutable BFS accumulator: repo index, depth budget, queue, and results."""
146
+
147
+ index: dict[str, Path]
148
+ depth_n: int | str
149
+ nodes: list[dict[str, Any]] = field(default_factory=list)
150
+ edges: list[dict[str, str]] = field(default_factory=list)
151
+ walk_errors: list[dict[str, str]] = field(default_factory=list)
152
+ visited: set[str] = field(default_factory=set)
153
+ queue: deque = field(default_factory=deque)
154
+
155
+
156
+ def _profile_node(path: Path, opts: _ProfileOpts) -> tuple[dict[str, Any], dict[str, Any]]:
157
+ """Return ``(node_extra, shallow_profile)`` for an internal node."""
158
+ p = profile_shallow(path)
159
+ node_extra: dict[str, Any] = {"version": p.get("version", "")}
160
+ if opts.with_profile:
161
+ node_extra["profile"] = profile_deep(path) if opts.depth_profile == "deep" else p
162
+ return node_extra, p
163
+
164
+
165
+ def _enqueue_targets(
166
+ outgoing: list[dict[str, str]],
167
+ state: _BfsState,
168
+ current_hop: int,
169
+ ) -> None:
170
+ """Push unvisited edge targets onto *state.queue* when the depth budget allows."""
171
+ within_budget = state.depth_n == "all" or current_hop < int(
172
+ state.depth_n
173
+ ) # type: ignore[arg-type]
174
+ if not within_budget:
175
+ return
176
+ for edge in outgoing:
177
+ target = edge["to"]
178
+ if target not in state.visited:
179
+ state.visited.add(target)
180
+ state.queue.append((target, current_hop + 1))
181
+
182
+
183
+ def _expand_node(
184
+ current_name: str,
185
+ path: Path,
186
+ current_hop: int,
187
+ state: _BfsState,
188
+ opts: _ProfileOpts,
189
+ ) -> dict[str, Any]:
190
+ """Profile *path*, collect edges, enqueue targets; return the node dict."""
191
+ node: dict[str, Any] = {"id": current_name, "path": str(path), "external": False}
192
+ try:
193
+ node_extra, shallow = _profile_node(path, opts)
194
+ node.update(node_extra)
195
+ outgoing = _edges_from_profile(current_name, shallow)
196
+ state.edges.extend(outgoing)
197
+ _enqueue_targets(outgoing, state, current_hop)
198
+ except SeerError as err:
199
+ if opts.strict:
200
+ raise
201
+ state.walk_errors.append(
202
+ {
203
+ "node": f"{current_name} ({path})",
204
+ "reason": err.reason or err.message,
205
+ "remediation": err.remediation,
206
+ }
207
+ )
208
+ return node
209
+
210
+
211
+ def _run_bfs(
212
+ seed_name: str,
213
+ index: dict[str, Path],
214
+ depth_n: int | str,
215
+ opts: _ProfileOpts,
216
+ ) -> _BfsState:
217
+ """Execute the BFS from *seed_name* and return the populated state."""
218
+ state = _BfsState(index=index, depth_n=depth_n)
219
+ state.visited.add(seed_name)
220
+ state.queue.append((seed_name, 0))
221
+
222
+ while state.queue:
223
+ current_name, current_hop = state.queue.popleft()
224
+ path = state.index.get(current_name)
225
+ if path is not None:
226
+ node = _expand_node(current_name, path, current_hop, state, opts)
227
+ else:
228
+ node = {"id": current_name, "path": None, "external": True}
229
+ state.nodes.append(node)
230
+
231
+ return state
232
+
233
+
234
+ def walk( # pylint: disable=too-many-arguments
235
+ *,
236
+ seed: Path,
237
+ roots: list[Path],
238
+ depth: int | str = 1,
239
+ with_profile: bool = False,
240
+ depth_profile: str = "shallow",
241
+ additional_markers: list[str] | None = None,
242
+ skip_dirs: list[str] | None = None,
243
+ strict: bool = False,
244
+ ) -> dict[str, Any]:
245
+ """BFS-walk from *seed* outward, emitting nodes + typed edges.
246
+
247
+ Parameters
248
+ ----------
249
+ seed:
250
+ The root repo to start from (must be an existing directory).
251
+ roots:
252
+ Workspace roots scanned by :func:`seer.repo.detect.find_repos` to
253
+ resolve edge targets to local paths.
254
+ depth:
255
+ How many hops to follow. ``1`` visits direct neighbours only;
256
+ ``"all"`` walks the full connected component.
257
+ with_profile:
258
+ When *True*, attach a ``"profile"`` key to each internal node.
259
+ depth_profile:
260
+ ``"shallow"`` (default) or ``"deep"``; controls which profiler is
261
+ used when *with_profile* is *True*.
262
+ additional_markers:
263
+ Extra filenames treated as repo markers during discovery.
264
+ skip_dirs:
265
+ Directory names skipped during discovery.
266
+ strict:
267
+ When *True*, a per-node :class:`SeerError` re-raises immediately
268
+ instead of being collected in ``walk_errors``.
269
+
270
+ Returns
271
+ -------
272
+ dict with keys:
273
+ ``seed``, ``seed_name``, ``depth``, ``nodes``, ``edges``,
274
+ ``walk_errors``.
275
+ """
276
+ if not seed.is_dir():
277
+ raise path_not_a_directory(seed)
278
+ depth_n = _coerce_depth(depth)
279
+
280
+ index = _build_index(roots, additional_markers, skip_dirs)
281
+ seed_name = resolve_name(seed)
282
+ index.setdefault(seed_name, seed)
283
+
284
+ opts = _ProfileOpts(
285
+ with_profile=with_profile,
286
+ depth_profile=depth_profile,
287
+ strict=strict,
288
+ )
289
+ accum = _run_bfs(seed_name, index, depth_n, opts)
290
+
291
+ return {
292
+ "seed": str(seed),
293
+ "seed_name": seed_name,
294
+ "depth": depth_n,
295
+ "nodes": accum.nodes,
296
+ "edges": accum.edges,
297
+ "walk_errors": accum.walk_errors,
298
+ }
seer/repo/detect.py ADDED
@@ -0,0 +1,86 @@
1
+ """Generic repo detection + name resolution.
2
+
3
+ A directory is a "repo of interest" when ANY of the following is true:
4
+
5
+ 1. it contains ``pyproject.toml``
6
+ 2. it contains ``.claude/skills/``
7
+ 3. it contains any file listed in ``additional_markers``
8
+
9
+ Name resolution prefers, in order:
10
+
11
+ 1. ``[project].name`` from ``pyproject.toml``
12
+ 2. ``agents[0].suffix`` (or ``.nick``) from ``culture.yaml``
13
+ 3. the directory basename
14
+ """
15
+
16
+ from __future__ import annotations
17
+
18
+ import tomllib
19
+ from pathlib import Path
20
+
21
+ import yaml
22
+
23
+
24
+ def is_repo(path: Path, additional_markers: list[str] | None = None) -> bool:
25
+ """Return True if ``path`` qualifies as a repo of interest."""
26
+ if not path.is_dir():
27
+ return False
28
+ if (path / "pyproject.toml").exists():
29
+ return True
30
+ if (path / ".claude" / "skills").is_dir():
31
+ return True
32
+ for marker in additional_markers or []:
33
+ if (path / marker).exists():
34
+ return True
35
+ return False
36
+
37
+
38
+ def find_repos(
39
+ root: Path,
40
+ *,
41
+ additional_markers: list[str] | None = None,
42
+ skip_dirs: list[str] | None = None,
43
+ ) -> list[Path]:
44
+ """Return the sorted list of child directories under ``root`` that qualify as repos."""
45
+ skip = set(skip_dirs or [])
46
+ repos: list[Path] = []
47
+ for child in root.iterdir():
48
+ if not child.is_dir() or child.name in skip:
49
+ continue
50
+ if is_repo(child, additional_markers):
51
+ repos.append(child)
52
+ return sorted(repos, key=lambda p: p.name)
53
+
54
+
55
+ def _name_from_pyproject(path: Path) -> str | None:
56
+ """Return ``[project].name`` from *path*'s ``pyproject.toml`` if present and parseable."""
57
+ pyproject = path / "pyproject.toml"
58
+ if not pyproject.exists():
59
+ return None
60
+ try:
61
+ data = tomllib.loads(pyproject.read_text(encoding="utf-8"))
62
+ except (tomllib.TOMLDecodeError, OSError):
63
+ return None
64
+ name = (data.get("project") or {}).get("name")
65
+ return str(name) if name else None
66
+
67
+
68
+ def _name_from_culture_yaml(path: Path) -> str | None:
69
+ """Return the first agent's nick (``suffix`` or ``nick``) from ``culture.yaml`` if any."""
70
+ culture_yaml = path / "culture.yaml"
71
+ if not culture_yaml.exists():
72
+ return None
73
+ try:
74
+ data = yaml.safe_load(culture_yaml.read_text(encoding="utf-8")) or {}
75
+ except (yaml.YAMLError, OSError):
76
+ return None
77
+ agents = data.get("agents", [])
78
+ if not agents or not isinstance(agents[0], dict):
79
+ return None
80
+ nick = agents[0].get("suffix") or agents[0].get("nick")
81
+ return str(nick) if nick else None
82
+
83
+
84
+ def resolve_name(path: Path) -> str:
85
+ """Return the repo's preferred name (pyproject → culture.yaml → basename)."""
86
+ return _name_from_pyproject(path) or _name_from_culture_yaml(path) or path.name
seer/repo/errors.py ADDED
@@ -0,0 +1,81 @@
1
+ """Domain-specific :class:`SeerError` factories for seer.repo.
2
+
3
+ Centralising error construction here keeps message / reason / remediation
4
+ copy uniform across every raise site.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from pathlib import Path
10
+
11
+ from seer.cli._errors import EXIT_ENV_ERROR, EXIT_USER_ERROR, SeerError
12
+
13
+
14
+ def manifest_not_found(path: Path) -> SeerError:
15
+ """Return a SeerError for a missing pyproject.toml manifest."""
16
+ return SeerError(
17
+ code=EXIT_USER_ERROR,
18
+ kind="user_error",
19
+ message=f"Cannot find pyproject.toml in {path}",
20
+ reason=(
21
+ "No recognized manifest at the given path. Looked for "
22
+ "pyproject.toml, .claude/skills/, and any configured "
23
+ "additional_markers."
24
+ ),
25
+ remediation=(
26
+ "Confirm the path points to a repo root, not a subdirectory. "
27
+ "To treat this directory as a repo regardless, add a marker "
28
+ "via .claude/skills/repo-map/config.json → additional_markers."
29
+ ),
30
+ )
31
+
32
+
33
+ def malformed_pyproject(path: Path, detail: str) -> SeerError:
34
+ """Return a SeerError for a pyproject.toml that exists but won't parse."""
35
+ return SeerError(
36
+ code=EXIT_ENV_ERROR,
37
+ kind="env_error",
38
+ message=f"Cannot parse {path}",
39
+ reason=f"TOML syntax error: {detail}",
40
+ remediation=(
41
+ f'Validate with `python3 -c "import tomllib; '
42
+ f"tomllib.load(open('{path}', 'rb'))\"` or fix the file."
43
+ ),
44
+ )
45
+
46
+
47
+ def invalid_depth(value: str) -> SeerError:
48
+ """Return a SeerError for a `--depth` value that is neither a non-negative int nor 'all'."""
49
+ return SeerError(
50
+ code=EXIT_USER_ERROR,
51
+ kind="user_error",
52
+ message=f"Invalid --depth value: '{value}'",
53
+ reason="--depth must be a non-negative integer or 'all'.",
54
+ remediation="Try `--depth 1` (default), `--depth 3`, or `--depth all`.",
55
+ )
56
+
57
+
58
+ def path_not_a_directory(path: Path) -> SeerError:
59
+ """Return a SeerError for a path that doesn't exist or isn't a directory."""
60
+ return SeerError(
61
+ code=EXIT_USER_ERROR,
62
+ kind="user_error",
63
+ message=f"Path does not exist or is not a directory: {path}",
64
+ reason="The given path was not a directory on disk.",
65
+ remediation="Pass an absolute path to an existing directory.",
66
+ )
67
+
68
+
69
+ def seed_not_under_root(seed: Path, roots: list[Path]) -> SeerError:
70
+ """Return a SeerError for a seed repo that doesn't live under any configured root."""
71
+ root_list = ", ".join(str(r) for r in roots)
72
+ return SeerError(
73
+ code=EXIT_USER_ERROR,
74
+ kind="user_error",
75
+ message=f"Seed repo {seed} is not under any configured root",
76
+ reason=f"Edge resolution requires the seed to live in a configured root: {root_list}.",
77
+ remediation=(
78
+ "Pass --root, or add the seed's parent to "
79
+ ".claude/skills/repo-map/config.json `roots`."
80
+ ),
81
+ )
seer/repo/graph.py ADDED
@@ -0,0 +1,182 @@
1
+ """Multi-root workspace view: every repo found + every edge between them.
2
+
3
+ This is the "show me what's in this workspace" verb, distinct from
4
+ :func:`seer.repo.connections.walk` which traverses outward from a single seed.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import hashlib
10
+ import re
11
+ from pathlib import Path
12
+ from typing import Any
13
+
14
+ from seer.cli._errors import SeerError
15
+ from seer.repo.connections import _edges_from_profile
16
+ from seer.repo.detect import find_repos, resolve_name
17
+ from seer.repo.profile import profile_shallow
18
+
19
+ # ASCII flag is essential: without it, ``\W`` in Python 3 excludes Unicode
20
+ # letters/digits from the "unsafe" set, which would leave non-ASCII chars
21
+ # in Mermaid node ids. ``re.ASCII`` collapses ``\W`` to ``[^A-Za-z0-9_]``.
22
+ _SAFE_RE = re.compile(r"\W", re.ASCII)
23
+
24
+
25
+ def _discover_repos(
26
+ roots: list[Path],
27
+ additional_markers: list[str] | None,
28
+ skip_dirs: list[str] | None,
29
+ ) -> dict[str, Path]:
30
+ """Union-discover every repo under *roots*, mapped by resolved name."""
31
+ name_to_path: dict[str, Path] = {}
32
+ for root in roots:
33
+ if not root.is_dir():
34
+ continue
35
+ for repo in find_repos(
36
+ root,
37
+ additional_markers=additional_markers,
38
+ skip_dirs=skip_dirs,
39
+ ):
40
+ name_to_path.setdefault(resolve_name(repo), repo)
41
+ return name_to_path
42
+
43
+
44
+ def _profile_or_walk_error(
45
+ name: str,
46
+ path: Path,
47
+ *,
48
+ strict: bool,
49
+ walk_errors: list[dict[str, str]],
50
+ ) -> dict[str, Any]:
51
+ """Return ``profile_shallow(path)`` or inline the error and return ``{}``.
52
+
53
+ Re-raises when ``strict`` is True.
54
+ """
55
+ try:
56
+ return profile_shallow(path)
57
+ except SeerError as err:
58
+ if strict:
59
+ raise
60
+ walk_errors.append(
61
+ {
62
+ "node": f"{name} ({path})",
63
+ "reason": err.reason or err.message,
64
+ "remediation": err.remediation,
65
+ }
66
+ )
67
+ return {}
68
+
69
+
70
+ def _collect_nodes_and_edges(
71
+ name_to_path: dict[str, Path],
72
+ strict: bool,
73
+ ) -> tuple[list[dict[str, Any]], list[dict[str, str]], set[str], list[dict[str, str]]]:
74
+ """Build the (nodes, edges, externals, walk_errors) tuple for the workspace."""
75
+ nodes: list[dict[str, Any]] = []
76
+ edges: list[dict[str, str]] = []
77
+ external_seen: set[str] = set()
78
+ walk_errors: list[dict[str, str]] = []
79
+
80
+ for name, path in sorted(name_to_path.items()):
81
+ profile = _profile_or_walk_error(name, path, strict=strict, walk_errors=walk_errors)
82
+ nodes.append(
83
+ {
84
+ "id": name,
85
+ "path": str(path),
86
+ "external": False,
87
+ "version": profile.get("version", ""),
88
+ }
89
+ )
90
+ for edge in _edges_from_profile(name, profile):
91
+ edges.append(edge)
92
+ target = edge["to"]
93
+ if target not in name_to_path and target not in external_seen:
94
+ external_seen.add(target)
95
+ return nodes, edges, external_seen, walk_errors
96
+
97
+
98
+ def build_graph(
99
+ roots: list[Path],
100
+ *,
101
+ additional_markers: list[str] | None = None,
102
+ skip_dirs: list[str] | None = None,
103
+ strict: bool = False,
104
+ ) -> dict[str, Any]:
105
+ """Build a workspace graph over the given roots.
106
+
107
+ Per-node profiling errors are collected in ``walk_errors``; the build
108
+ continues unless ``strict=True``.
109
+
110
+ Parameters
111
+ ----------
112
+ roots:
113
+ One or more workspace root directories. Each is scanned with
114
+ :func:`seer.repo.detect.find_repos`; results are unioned.
115
+ additional_markers:
116
+ Extra filenames treated as repo markers during discovery.
117
+ skip_dirs:
118
+ Directory names skipped during discovery.
119
+ strict:
120
+ When ``True``, re-raise the first per-node :class:`SeerError`
121
+ instead of inlining it into ``walk_errors``.
122
+
123
+ Returns
124
+ -------
125
+ dict with keys:
126
+ ``roots``, ``nodes``, ``edges``, ``walk_errors``, ``mermaid``.
127
+ """
128
+ name_to_path = _discover_repos(roots, additional_markers, skip_dirs)
129
+ nodes, edges, external_seen, walk_errors = _collect_nodes_and_edges(name_to_path, strict)
130
+ for ext in sorted(external_seen):
131
+ nodes.append({"id": ext, "path": None, "external": True})
132
+
133
+ return {
134
+ "roots": [str(r) for r in roots],
135
+ "nodes": nodes,
136
+ "edges": edges,
137
+ "walk_errors": walk_errors,
138
+ "mermaid": _render_mermaid(nodes, edges),
139
+ }
140
+
141
+
142
+ def _render_mermaid(
143
+ _nodes: list[dict[str, Any]],
144
+ edges: list[dict[str, str]],
145
+ ) -> str:
146
+ """Return a Mermaid ``graph TD`` source for the workspace."""
147
+ lines = ["graph TD"]
148
+ for edge in edges:
149
+ spec = edge.get("spec") or ""
150
+ edge_type = edge.get("type") or ""
151
+ if edge_type:
152
+ label = f"|{edge_type}{(' ' + spec) if spec else ''}|"
153
+ else:
154
+ label = ""
155
+ lines.append(f" {_safe(edge['from'])} -->{label} {_safe(edge['to'])}")
156
+ return "\n".join(lines) + "\n"
157
+
158
+
159
+ def _safe(name: str) -> str:
160
+ """Return a Mermaid-safe node id for ``name``.
161
+
162
+ Mermaid identifiers must match ``[A-Za-z_][A-Za-z0-9_]*``. This
163
+ helper:
164
+
165
+ * Replaces every character outside ``[A-Za-z0-9_]`` with ``_``.
166
+ * Prepends ``n_`` when the sanitised value is empty or starts with
167
+ a digit (Mermaid forbids digit-leading identifiers).
168
+ * Appends a short stable hash of the original string whenever
169
+ sanitisation actually changed the value, so two distinct inputs
170
+ that would otherwise collapse to the same id (e.g. ``a-b`` and
171
+ ``a_b``, or ``foo bar`` and ``foo/bar``) stay distinct in the
172
+ generated diagram.
173
+ """
174
+ if not name:
175
+ return "n_"
176
+ sanitised = _SAFE_RE.sub("_", name)
177
+ if sanitised != name:
178
+ digest = hashlib.sha256(name.encode("utf-8")).hexdigest()[:6]
179
+ sanitised = f"{sanitised}_{digest}"
180
+ if sanitised[0].isdigit():
181
+ sanitised = "n_" + sanitised
182
+ return sanitised
seer/repo/manifest.py ADDED
@@ -0,0 +1,36 @@
1
+ """pyproject.toml reader with structured-error mapping."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import tomllib
6
+ from pathlib import Path
7
+
8
+ from seer.repo.errors import malformed_pyproject, manifest_not_found
9
+
10
+
11
+ def read_pyproject(repo: Path) -> dict[str, object]:
12
+ """Parse ``repo/pyproject.toml`` into a stable dict.
13
+
14
+ Returns a dict with keys ``name``, ``version``, ``entry_points``,
15
+ ``deps_runtime``, ``deps_dev``. Raises :class:`SeerError` (user_error)
16
+ when the file is missing, or (env_error) when it cannot be parsed.
17
+ """
18
+ pyproject = repo / "pyproject.toml"
19
+ if not pyproject.exists():
20
+ raise manifest_not_found(repo)
21
+ try:
22
+ data = tomllib.loads(pyproject.read_text(encoding="utf-8"))
23
+ except tomllib.TOMLDecodeError as e:
24
+ raise malformed_pyproject(pyproject, str(e)) from e
25
+
26
+ project = data.get("project", {}) or {}
27
+ scripts = project.get("scripts", {}) or {}
28
+ dep_groups = data.get("dependency-groups", {}) or {}
29
+
30
+ return {
31
+ "name": project.get("name") or repo.name,
32
+ "version": project.get("version", ""),
33
+ "entry_points": dict(scripts),
34
+ "deps_runtime": list(project.get("dependencies", []) or []),
35
+ "deps_dev": list(dep_groups.get("dev", []) or []),
36
+ }