elspais 0.11.2__py3-none-any.whl → 0.43.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elspais/__init__.py +1 -10
- elspais/{sponsors/__init__.py → associates.py} +102 -56
- elspais/cli.py +366 -69
- elspais/commands/__init__.py +9 -3
- elspais/commands/analyze.py +118 -169
- elspais/commands/changed.py +12 -23
- elspais/commands/config_cmd.py +10 -13
- elspais/commands/edit.py +33 -13
- elspais/commands/example_cmd.py +319 -0
- elspais/commands/hash_cmd.py +161 -183
- elspais/commands/health.py +1177 -0
- elspais/commands/index.py +98 -115
- elspais/commands/init.py +99 -22
- elspais/commands/reformat_cmd.py +41 -433
- elspais/commands/rules_cmd.py +2 -2
- elspais/commands/trace.py +443 -324
- elspais/commands/validate.py +193 -411
- elspais/config/__init__.py +799 -5
- elspais/{core/content_rules.py → content_rules.py} +20 -2
- elspais/docs/cli/assertions.md +67 -0
- elspais/docs/cli/commands.md +304 -0
- elspais/docs/cli/config.md +262 -0
- elspais/docs/cli/format.md +66 -0
- elspais/docs/cli/git.md +45 -0
- elspais/docs/cli/health.md +190 -0
- elspais/docs/cli/hierarchy.md +60 -0
- elspais/docs/cli/ignore.md +72 -0
- elspais/docs/cli/mcp.md +245 -0
- elspais/docs/cli/quickstart.md +58 -0
- elspais/docs/cli/traceability.md +89 -0
- elspais/docs/cli/validation.md +96 -0
- elspais/graph/GraphNode.py +383 -0
- elspais/graph/__init__.py +40 -0
- elspais/graph/annotators.py +927 -0
- elspais/graph/builder.py +1886 -0
- elspais/graph/deserializer.py +248 -0
- elspais/graph/factory.py +284 -0
- elspais/graph/metrics.py +127 -0
- elspais/graph/mutations.py +161 -0
- elspais/graph/parsers/__init__.py +156 -0
- elspais/graph/parsers/code.py +213 -0
- elspais/graph/parsers/comments.py +112 -0
- elspais/graph/parsers/config_helpers.py +29 -0
- elspais/graph/parsers/heredocs.py +225 -0
- elspais/graph/parsers/journey.py +131 -0
- elspais/graph/parsers/remainder.py +79 -0
- elspais/graph/parsers/requirement.py +347 -0
- elspais/graph/parsers/results/__init__.py +6 -0
- elspais/graph/parsers/results/junit_xml.py +229 -0
- elspais/graph/parsers/results/pytest_json.py +313 -0
- elspais/graph/parsers/test.py +305 -0
- elspais/graph/relations.py +78 -0
- elspais/graph/serialize.py +216 -0
- elspais/html/__init__.py +8 -0
- elspais/html/generator.py +731 -0
- elspais/html/templates/trace_view.html.j2 +2151 -0
- elspais/mcp/__init__.py +45 -29
- elspais/mcp/__main__.py +5 -1
- elspais/mcp/file_mutations.py +138 -0
- elspais/mcp/server.py +1998 -244
- elspais/testing/__init__.py +3 -3
- elspais/testing/config.py +3 -0
- elspais/testing/mapper.py +1 -1
- elspais/testing/scanner.py +301 -12
- elspais/utilities/__init__.py +1 -0
- elspais/utilities/docs_loader.py +115 -0
- elspais/utilities/git.py +607 -0
- elspais/{core → utilities}/hasher.py +8 -22
- elspais/utilities/md_renderer.py +189 -0
- elspais/{core → utilities}/patterns.py +56 -51
- elspais/utilities/reference_config.py +626 -0
- elspais/validation/__init__.py +19 -0
- elspais/validation/format.py +264 -0
- {elspais-0.11.2.dist-info → elspais-0.43.5.dist-info}/METADATA +7 -4
- elspais-0.43.5.dist-info/RECORD +80 -0
- elspais/config/defaults.py +0 -179
- elspais/config/loader.py +0 -494
- elspais/core/__init__.py +0 -21
- elspais/core/git.py +0 -346
- elspais/core/models.py +0 -320
- elspais/core/parser.py +0 -639
- elspais/core/rules.py +0 -509
- elspais/mcp/context.py +0 -172
- elspais/mcp/serializers.py +0 -112
- elspais/reformat/__init__.py +0 -50
- elspais/reformat/detector.py +0 -112
- elspais/reformat/hierarchy.py +0 -247
- elspais/reformat/line_breaks.py +0 -218
- elspais/reformat/prompts.py +0 -133
- elspais/reformat/transformer.py +0 -266
- elspais/trace_view/__init__.py +0 -55
- elspais/trace_view/coverage.py +0 -183
- elspais/trace_view/generators/__init__.py +0 -12
- elspais/trace_view/generators/base.py +0 -334
- elspais/trace_view/generators/csv.py +0 -118
- elspais/trace_view/generators/markdown.py +0 -170
- elspais/trace_view/html/__init__.py +0 -33
- elspais/trace_view/html/generator.py +0 -1140
- elspais/trace_view/html/templates/base.html +0 -283
- elspais/trace_view/html/templates/components/code_viewer_modal.html +0 -14
- elspais/trace_view/html/templates/components/file_picker_modal.html +0 -20
- elspais/trace_view/html/templates/components/legend_modal.html +0 -69
- elspais/trace_view/html/templates/components/review_panel.html +0 -118
- elspais/trace_view/html/templates/partials/review/help/help-panel.json +0 -244
- elspais/trace_view/html/templates/partials/review/help/onboarding.json +0 -77
- elspais/trace_view/html/templates/partials/review/help/tooltips.json +0 -237
- elspais/trace_view/html/templates/partials/review/review-comments.js +0 -928
- elspais/trace_view/html/templates/partials/review/review-data.js +0 -961
- elspais/trace_view/html/templates/partials/review/review-help.js +0 -679
- elspais/trace_view/html/templates/partials/review/review-init.js +0 -177
- elspais/trace_view/html/templates/partials/review/review-line-numbers.js +0 -429
- elspais/trace_view/html/templates/partials/review/review-packages.js +0 -1029
- elspais/trace_view/html/templates/partials/review/review-position.js +0 -540
- elspais/trace_view/html/templates/partials/review/review-resize.js +0 -115
- elspais/trace_view/html/templates/partials/review/review-status.js +0 -659
- elspais/trace_view/html/templates/partials/review/review-sync.js +0 -992
- elspais/trace_view/html/templates/partials/review-styles.css +0 -2238
- elspais/trace_view/html/templates/partials/scripts.js +0 -1741
- elspais/trace_view/html/templates/partials/styles.css +0 -1756
- elspais/trace_view/models.py +0 -378
- elspais/trace_view/review/__init__.py +0 -63
- elspais/trace_view/review/branches.py +0 -1142
- elspais/trace_view/review/models.py +0 -1200
- elspais/trace_view/review/position.py +0 -591
- elspais/trace_view/review/server.py +0 -1032
- elspais/trace_view/review/status.py +0 -455
- elspais/trace_view/review/storage.py +0 -1343
- elspais/trace_view/scanning.py +0 -213
- elspais/trace_view/specs/README.md +0 -84
- elspais/trace_view/specs/tv-d00001-template-architecture.md +0 -36
- elspais/trace_view/specs/tv-d00002-css-extraction.md +0 -37
- elspais/trace_view/specs/tv-d00003-js-extraction.md +0 -43
- elspais/trace_view/specs/tv-d00004-build-embedding.md +0 -40
- elspais/trace_view/specs/tv-d00005-test-format.md +0 -78
- elspais/trace_view/specs/tv-d00010-review-data-models.md +0 -33
- elspais/trace_view/specs/tv-d00011-review-storage.md +0 -33
- elspais/trace_view/specs/tv-d00012-position-resolution.md +0 -33
- elspais/trace_view/specs/tv-d00013-git-branches.md +0 -31
- elspais/trace_view/specs/tv-d00014-review-api-server.md +0 -31
- elspais/trace_view/specs/tv-d00015-status-modifier.md +0 -27
- elspais/trace_view/specs/tv-d00016-js-integration.md +0 -33
- elspais/trace_view/specs/tv-p00001-html-generator.md +0 -33
- elspais/trace_view/specs/tv-p00002-review-system.md +0 -29
- elspais-0.11.2.dist-info/RECORD +0 -101
- {elspais-0.11.2.dist-info → elspais-0.43.5.dist-info}/WHEEL +0 -0
- {elspais-0.11.2.dist-info → elspais-0.43.5.dist-info}/entry_points.txt +0 -0
- {elspais-0.11.2.dist-info → elspais-0.43.5.dist-info}/licenses/LICENSE +0 -0
elspais/graph/builder.py
ADDED
|
@@ -0,0 +1,1886 @@
|
|
|
1
|
+
"""Graph Builder - Constructs TraceGraph from parsed content.
|
|
2
|
+
|
|
3
|
+
This module provides the builder pattern for constructing a complete
|
|
4
|
+
traceability graph from parsed content.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import re
|
|
10
|
+
from dataclasses import dataclass, field
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Iterator
|
|
13
|
+
|
|
14
|
+
from elspais.graph.GraphNode import GraphNode, NodeKind, SourceLocation
|
|
15
|
+
from elspais.graph.mutations import BrokenReference, MutationEntry, MutationLog
|
|
16
|
+
from elspais.graph.parsers import ParsedContent
|
|
17
|
+
from elspais.graph.relations import EdgeKind
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class TraceGraph:
|
|
22
|
+
"""Container for the complete traceability graph.
|
|
23
|
+
|
|
24
|
+
Provides indexed access to all nodes and methods for graph-wide
|
|
25
|
+
operations. Uses iterator-only API for traversal.
|
|
26
|
+
|
|
27
|
+
Attributes:
|
|
28
|
+
repo_root: Path to the repository root.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
repo_root: Path = field(default_factory=Path.cwd)
|
|
32
|
+
|
|
33
|
+
# Internal storage (prefixed) - excluded from constructor
|
|
34
|
+
_roots: list[GraphNode] = field(default_factory=list, init=False)
|
|
35
|
+
_index: dict[str, GraphNode] = field(default_factory=dict, init=False, repr=False)
|
|
36
|
+
|
|
37
|
+
# Detection: orphans and broken references (populated at build time)
|
|
38
|
+
_orphaned_ids: set[str] = field(default_factory=set, init=False)
|
|
39
|
+
_broken_references: list[BrokenReference] = field(default_factory=list, init=False)
|
|
40
|
+
|
|
41
|
+
# Mutation infrastructure
|
|
42
|
+
_mutation_log: MutationLog = field(default_factory=MutationLog, init=False)
|
|
43
|
+
_deleted_nodes: list[GraphNode] = field(default_factory=list, init=False)
|
|
44
|
+
|
|
45
|
+
def iter_roots(self) -> Iterator[GraphNode]:
|
|
46
|
+
"""Iterate root nodes."""
|
|
47
|
+
yield from self._roots
|
|
48
|
+
|
|
49
|
+
def root_count(self) -> int:
|
|
50
|
+
"""Return number of root nodes."""
|
|
51
|
+
return len(self._roots)
|
|
52
|
+
|
|
53
|
+
def has_root(self, node_id: str) -> bool:
|
|
54
|
+
"""Check if a node ID is a root."""
|
|
55
|
+
return any(r.id == node_id for r in self._roots)
|
|
56
|
+
|
|
57
|
+
def find_by_id(self, node_id: str) -> GraphNode | None:
|
|
58
|
+
"""Find node by ID.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
node_id: The node ID to find.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
The matching GraphNode, or None if not found.
|
|
65
|
+
"""
|
|
66
|
+
return self._index.get(node_id)
|
|
67
|
+
|
|
68
|
+
def all_nodes(self) -> Iterator[GraphNode]:
|
|
69
|
+
"""Iterate ALL nodes in graph, including orphans.
|
|
70
|
+
|
|
71
|
+
Yields:
|
|
72
|
+
All GraphNode instances in the graph.
|
|
73
|
+
"""
|
|
74
|
+
yield from self._index.values()
|
|
75
|
+
|
|
76
|
+
def all_connected_nodes(self, order: str = "pre") -> Iterator[GraphNode]:
|
|
77
|
+
"""Iterate nodes reachable from roots (excludes orphans).
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
order: Traversal order ("pre", "post", "level").
|
|
81
|
+
|
|
82
|
+
Yields:
|
|
83
|
+
GraphNode instances reachable from root nodes.
|
|
84
|
+
"""
|
|
85
|
+
for root in self._roots:
|
|
86
|
+
yield from root.walk(order)
|
|
87
|
+
|
|
88
|
+
def nodes_by_kind(self, kind: NodeKind) -> Iterator[GraphNode]:
|
|
89
|
+
"""Get all nodes of a specific kind.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
kind: The NodeKind to filter by.
|
|
93
|
+
|
|
94
|
+
Yields:
|
|
95
|
+
GraphNode instances of the specified kind.
|
|
96
|
+
"""
|
|
97
|
+
for node in self._index.values():
|
|
98
|
+
if node.kind == kind:
|
|
99
|
+
yield node
|
|
100
|
+
|
|
101
|
+
def node_count(self) -> int:
|
|
102
|
+
"""Return total number of nodes in the graph."""
|
|
103
|
+
return len(self._index)
|
|
104
|
+
|
|
105
|
+
def clone(self) -> TraceGraph:
|
|
106
|
+
"""Create a deep copy of this graph.
|
|
107
|
+
|
|
108
|
+
All nodes, edges, and relationships are cloned. The new graph
|
|
109
|
+
is completely independent - mutations to one do not affect the other.
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
A new TraceGraph with all data deep copied.
|
|
113
|
+
"""
|
|
114
|
+
import copy
|
|
115
|
+
|
|
116
|
+
return copy.deepcopy(self)
|
|
117
|
+
|
|
118
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
119
|
+
# Detection API: Orphans and Broken References
|
|
120
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
121
|
+
|
|
122
|
+
def orphaned_nodes(self) -> Iterator[GraphNode]:
|
|
123
|
+
"""Iterate over orphaned nodes (nodes without parents).
|
|
124
|
+
|
|
125
|
+
Orphans are nodes that were never linked to a parent during
|
|
126
|
+
graph construction. This excludes root nodes which are intentionally
|
|
127
|
+
parentless.
|
|
128
|
+
|
|
129
|
+
Yields:
|
|
130
|
+
GraphNode instances that are orphaned.
|
|
131
|
+
"""
|
|
132
|
+
for node_id in self._orphaned_ids:
|
|
133
|
+
node = self._index.get(node_id)
|
|
134
|
+
if node:
|
|
135
|
+
yield node
|
|
136
|
+
|
|
137
|
+
def has_orphans(self) -> bool:
|
|
138
|
+
"""Check if the graph has orphaned nodes."""
|
|
139
|
+
return len(self._orphaned_ids) > 0
|
|
140
|
+
|
|
141
|
+
def orphan_count(self) -> int:
|
|
142
|
+
"""Return the number of orphaned nodes."""
|
|
143
|
+
return len(self._orphaned_ids)
|
|
144
|
+
|
|
145
|
+
def broken_references(self) -> list[BrokenReference]:
|
|
146
|
+
"""Get all broken references detected during build.
|
|
147
|
+
|
|
148
|
+
Broken references occur when a node references a target ID
|
|
149
|
+
that doesn't exist in the graph.
|
|
150
|
+
|
|
151
|
+
Returns:
|
|
152
|
+
List of BrokenReference instances.
|
|
153
|
+
"""
|
|
154
|
+
return list(self._broken_references)
|
|
155
|
+
|
|
156
|
+
def has_broken_references(self) -> bool:
|
|
157
|
+
"""Check if the graph has broken references."""
|
|
158
|
+
return len(self._broken_references) > 0
|
|
159
|
+
|
|
160
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
161
|
+
# Mutation Infrastructure
|
|
162
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
163
|
+
|
|
164
|
+
@property
|
|
165
|
+
def mutation_log(self) -> MutationLog:
|
|
166
|
+
"""Access the mutation log for this graph."""
|
|
167
|
+
return self._mutation_log
|
|
168
|
+
|
|
169
|
+
def deleted_nodes(self) -> list[GraphNode]:
|
|
170
|
+
"""Get all nodes that have been deleted from this graph.
|
|
171
|
+
|
|
172
|
+
Deleted nodes are preserved for delta reporting and undo operations.
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
List of deleted GraphNode instances.
|
|
176
|
+
"""
|
|
177
|
+
return list(self._deleted_nodes)
|
|
178
|
+
|
|
179
|
+
def has_deletions(self) -> bool:
|
|
180
|
+
"""Check if any nodes have been deleted."""
|
|
181
|
+
return len(self._deleted_nodes) > 0
|
|
182
|
+
|
|
183
|
+
def undo_last(self) -> MutationEntry | None:
|
|
184
|
+
"""Undo the most recent mutation.
|
|
185
|
+
|
|
186
|
+
Reverses the last mutation using its before_state and removes
|
|
187
|
+
it from the mutation log.
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
The undone MutationEntry, or None if log is empty.
|
|
191
|
+
"""
|
|
192
|
+
entry = self._mutation_log.pop()
|
|
193
|
+
if entry:
|
|
194
|
+
self._apply_undo(entry)
|
|
195
|
+
return entry
|
|
196
|
+
|
|
197
|
+
def undo_to(self, mutation_id: str) -> list[MutationEntry]:
|
|
198
|
+
"""Undo all mutations back to (and including) a specific mutation.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
mutation_id: The mutation ID to undo back to.
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
List of undone MutationEntry instances in reverse order.
|
|
205
|
+
|
|
206
|
+
Raises:
|
|
207
|
+
ValueError: If the mutation_id is not found.
|
|
208
|
+
"""
|
|
209
|
+
# Find all entries from the target to the end
|
|
210
|
+
entries_to_undo = self._mutation_log.entries_since(mutation_id)
|
|
211
|
+
undone: list[MutationEntry] = []
|
|
212
|
+
|
|
213
|
+
# Undo in reverse order (most recent first)
|
|
214
|
+
for _ in range(len(entries_to_undo)):
|
|
215
|
+
entry = self._mutation_log.pop()
|
|
216
|
+
if entry:
|
|
217
|
+
self._apply_undo(entry)
|
|
218
|
+
undone.append(entry)
|
|
219
|
+
|
|
220
|
+
return undone
|
|
221
|
+
|
|
222
|
+
def _apply_undo(self, entry: MutationEntry) -> None:
|
|
223
|
+
"""Apply an undo operation based on mutation type.
|
|
224
|
+
|
|
225
|
+
Restores the graph state from entry.before_state.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
entry: The mutation entry to reverse.
|
|
229
|
+
"""
|
|
230
|
+
op = entry.operation
|
|
231
|
+
|
|
232
|
+
if op == "rename_node":
|
|
233
|
+
self._undo_rename_node(entry)
|
|
234
|
+
elif op == "update_title":
|
|
235
|
+
self._undo_update_title(entry)
|
|
236
|
+
elif op == "change_status":
|
|
237
|
+
self._undo_change_status(entry)
|
|
238
|
+
elif op == "add_requirement":
|
|
239
|
+
self._undo_add_requirement(entry)
|
|
240
|
+
elif op == "delete_requirement":
|
|
241
|
+
self._undo_delete_requirement(entry)
|
|
242
|
+
elif op == "add_edge":
|
|
243
|
+
self._undo_add_edge(entry)
|
|
244
|
+
elif op == "delete_edge":
|
|
245
|
+
self._undo_delete_edge(entry)
|
|
246
|
+
elif op == "change_edge_kind":
|
|
247
|
+
self._undo_change_edge_kind(entry)
|
|
248
|
+
elif op == "add_assertion":
|
|
249
|
+
self._undo_add_assertion(entry)
|
|
250
|
+
elif op == "delete_assertion":
|
|
251
|
+
self._undo_delete_assertion(entry)
|
|
252
|
+
elif op == "update_assertion":
|
|
253
|
+
self._undo_update_assertion(entry)
|
|
254
|
+
elif op == "rename_assertion":
|
|
255
|
+
self._undo_rename_assertion(entry)
|
|
256
|
+
elif op == "fix_broken_reference":
|
|
257
|
+
self._undo_fix_broken_reference(entry)
|
|
258
|
+
# Unknown operations are silently ignored (forward compatibility)
|
|
259
|
+
|
|
260
|
+
def _undo_rename_node(self, entry: MutationEntry) -> None:
|
|
261
|
+
"""Undo a node rename operation."""
|
|
262
|
+
old_id = entry.before_state.get("id")
|
|
263
|
+
new_id = entry.after_state.get("id")
|
|
264
|
+
if old_id and new_id and new_id in self._index:
|
|
265
|
+
node = self._index.pop(new_id)
|
|
266
|
+
node.set_id(old_id)
|
|
267
|
+
self._index[old_id] = node
|
|
268
|
+
|
|
269
|
+
def _undo_update_title(self, entry: MutationEntry) -> None:
|
|
270
|
+
"""Undo a title update operation."""
|
|
271
|
+
node_id = entry.target_id
|
|
272
|
+
old_title = entry.before_state.get("title")
|
|
273
|
+
if node_id in self._index and old_title is not None:
|
|
274
|
+
self._index[node_id].set_label(old_title)
|
|
275
|
+
|
|
276
|
+
def _undo_change_status(self, entry: MutationEntry) -> None:
|
|
277
|
+
"""Undo a status change operation."""
|
|
278
|
+
node_id = entry.target_id
|
|
279
|
+
old_status = entry.before_state.get("status")
|
|
280
|
+
if node_id in self._index and old_status is not None:
|
|
281
|
+
self._index[node_id].set_field("status", old_status)
|
|
282
|
+
|
|
283
|
+
def _undo_add_requirement(self, entry: MutationEntry) -> None:
|
|
284
|
+
"""Undo an add requirement operation (delete the added node)."""
|
|
285
|
+
node_id = entry.target_id
|
|
286
|
+
if node_id in self._index:
|
|
287
|
+
node = self._index.pop(node_id)
|
|
288
|
+
# Remove from roots if present
|
|
289
|
+
self._roots = [r for r in self._roots if r.id != node_id]
|
|
290
|
+
# Remove edges
|
|
291
|
+
for parent in list(node.iter_parents()):
|
|
292
|
+
parent.remove_child(node)
|
|
293
|
+
|
|
294
|
+
def _undo_delete_requirement(self, entry: MutationEntry) -> None:
|
|
295
|
+
"""Undo a delete requirement operation (restore the node)."""
|
|
296
|
+
# Find and restore from deleted_nodes
|
|
297
|
+
node_id = entry.target_id
|
|
298
|
+
for i, node in enumerate(self._deleted_nodes):
|
|
299
|
+
if node.id == node_id:
|
|
300
|
+
self._deleted_nodes.pop(i)
|
|
301
|
+
self._index[node_id] = node
|
|
302
|
+
# Restore as root if it was one
|
|
303
|
+
if entry.before_state.get("was_root"):
|
|
304
|
+
self._roots.append(node)
|
|
305
|
+
break
|
|
306
|
+
|
|
307
|
+
def _undo_add_edge(self, entry: MutationEntry) -> None:
|
|
308
|
+
"""Undo an add edge operation."""
|
|
309
|
+
source_id = entry.before_state.get("source_id")
|
|
310
|
+
target_id = entry.before_state.get("target_id")
|
|
311
|
+
was_orphan = entry.before_state.get("was_orphan", False)
|
|
312
|
+
|
|
313
|
+
if source_id and target_id:
|
|
314
|
+
# Check if this was a broken reference (never created actual edge)
|
|
315
|
+
if entry.after_state.get("broken"):
|
|
316
|
+
# Remove from broken references
|
|
317
|
+
self._broken_references = [
|
|
318
|
+
br
|
|
319
|
+
for br in self._broken_references
|
|
320
|
+
if not (br.source_id == source_id and br.target_id == target_id)
|
|
321
|
+
]
|
|
322
|
+
else:
|
|
323
|
+
# Remove actual edge
|
|
324
|
+
source = self._index.get(source_id)
|
|
325
|
+
target = self._index.get(target_id)
|
|
326
|
+
if source and target:
|
|
327
|
+
target.remove_child(source)
|
|
328
|
+
|
|
329
|
+
# Restore orphan status
|
|
330
|
+
if was_orphan and source_id in self._index:
|
|
331
|
+
self._orphaned_ids.add(source_id)
|
|
332
|
+
|
|
333
|
+
def _undo_delete_edge(self, entry: MutationEntry) -> None:
|
|
334
|
+
"""Undo a delete edge operation (restore the edge)."""
|
|
335
|
+
source_id = entry.before_state.get("source_id")
|
|
336
|
+
target_id = entry.before_state.get("target_id")
|
|
337
|
+
edge_kind_str = entry.before_state.get("edge_kind")
|
|
338
|
+
assertion_targets = entry.before_state.get("assertion_targets", [])
|
|
339
|
+
became_orphan = entry.after_state.get("became_orphan", False)
|
|
340
|
+
|
|
341
|
+
if source_id and target_id and edge_kind_str:
|
|
342
|
+
source = self._index.get(source_id)
|
|
343
|
+
target = self._index.get(target_id)
|
|
344
|
+
if source and target:
|
|
345
|
+
edge_kind = EdgeKind(edge_kind_str)
|
|
346
|
+
target.link(source, edge_kind, assertion_targets or None)
|
|
347
|
+
|
|
348
|
+
# Remove from orphans if it was marked orphan after deletion
|
|
349
|
+
if became_orphan:
|
|
350
|
+
self._orphaned_ids.discard(source_id)
|
|
351
|
+
|
|
352
|
+
def _undo_change_edge_kind(self, entry: MutationEntry) -> None:
|
|
353
|
+
"""Undo an edge kind change."""
|
|
354
|
+
source_id = entry.before_state.get("source_id")
|
|
355
|
+
target_id = entry.before_state.get("target_id")
|
|
356
|
+
old_kind = entry.before_state.get("edge_kind")
|
|
357
|
+
if source_id and target_id and old_kind:
|
|
358
|
+
source = self._index.get(source_id)
|
|
359
|
+
target = self._index.get(target_id)
|
|
360
|
+
if source and target:
|
|
361
|
+
# Find and update the edge (dataclass field, not _kind)
|
|
362
|
+
for edge in source.iter_incoming_edges():
|
|
363
|
+
if edge.source.id == target_id:
|
|
364
|
+
edge.kind = EdgeKind(old_kind)
|
|
365
|
+
break
|
|
366
|
+
|
|
367
|
+
def _undo_fix_broken_reference(self, entry: MutationEntry) -> None:
|
|
368
|
+
"""Undo a fix broken reference operation."""
|
|
369
|
+
source_id = entry.before_state.get("source_id")
|
|
370
|
+
old_target_id = entry.before_state.get("old_target_id")
|
|
371
|
+
new_target_id = entry.after_state.get("new_target_id")
|
|
372
|
+
edge_kind_str = entry.before_state.get("edge_kind")
|
|
373
|
+
was_orphan = entry.before_state.get("was_orphan", False)
|
|
374
|
+
|
|
375
|
+
if source_id and old_target_id and new_target_id and edge_kind_str:
|
|
376
|
+
source = self._index.get(source_id)
|
|
377
|
+
|
|
378
|
+
# Check if the fix was successful (actual edge created)
|
|
379
|
+
if entry.after_state.get("fixed"):
|
|
380
|
+
# Remove the edge that was created
|
|
381
|
+
new_target = self._index.get(new_target_id)
|
|
382
|
+
if source and new_target:
|
|
383
|
+
new_target.remove_child(source)
|
|
384
|
+
else:
|
|
385
|
+
# Remove from broken references (with new target)
|
|
386
|
+
self._broken_references = [
|
|
387
|
+
br
|
|
388
|
+
for br in self._broken_references
|
|
389
|
+
if not (br.source_id == source_id and br.target_id == new_target_id)
|
|
390
|
+
]
|
|
391
|
+
|
|
392
|
+
# Restore the original broken reference
|
|
393
|
+
self._broken_references.append(
|
|
394
|
+
BrokenReference(
|
|
395
|
+
source_id=source_id,
|
|
396
|
+
target_id=old_target_id,
|
|
397
|
+
edge_kind=edge_kind_str,
|
|
398
|
+
)
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
# Restore orphan status
|
|
402
|
+
if was_orphan and source_id in self._index:
|
|
403
|
+
self._orphaned_ids.add(source_id)
|
|
404
|
+
|
|
405
|
+
def _undo_add_assertion(self, entry: MutationEntry) -> None:
|
|
406
|
+
"""Undo an add assertion operation."""
|
|
407
|
+
assertion_id = entry.target_id
|
|
408
|
+
if assertion_id in self._index:
|
|
409
|
+
node = self._index.pop(assertion_id)
|
|
410
|
+
for parent in list(node.iter_parents()):
|
|
411
|
+
parent.remove_child(node)
|
|
412
|
+
# Restore parent hash (even if None)
|
|
413
|
+
if "parent_hash" in entry.before_state:
|
|
414
|
+
parent.set_field("hash", entry.before_state["parent_hash"])
|
|
415
|
+
|
|
416
|
+
def _undo_delete_assertion(self, entry: MutationEntry) -> None:
|
|
417
|
+
"""Undo a delete assertion operation."""
|
|
418
|
+
# First, undo any compaction renames in reverse order
|
|
419
|
+
renames = entry.before_state.get("renames", [])
|
|
420
|
+
for rename in reversed(renames):
|
|
421
|
+
old_id = rename.get("old_id")
|
|
422
|
+
new_id = rename.get("new_id")
|
|
423
|
+
old_label = rename.get("old_label")
|
|
424
|
+
new_label = rename.get("new_label")
|
|
425
|
+
|
|
426
|
+
if new_id and new_id in self._index:
|
|
427
|
+
node = self._index.pop(new_id)
|
|
428
|
+
node.set_id(old_id)
|
|
429
|
+
node.set_field("label", old_label)
|
|
430
|
+
self._index[old_id] = node
|
|
431
|
+
|
|
432
|
+
# Update edges back
|
|
433
|
+
for edge_parent in self._index.values():
|
|
434
|
+
for edge in edge_parent.iter_outgoing_edges():
|
|
435
|
+
if new_label in edge.assertion_targets:
|
|
436
|
+
edge.assertion_targets.remove(new_label)
|
|
437
|
+
edge.assertion_targets.append(old_label)
|
|
438
|
+
|
|
439
|
+
# Restore the deleted assertion
|
|
440
|
+
node_id = entry.target_id
|
|
441
|
+
for i, node in enumerate(self._deleted_nodes):
|
|
442
|
+
if node.id == node_id:
|
|
443
|
+
self._deleted_nodes.pop(i)
|
|
444
|
+
# Restore original ID and label
|
|
445
|
+
old_id = entry.before_state.get("id", node_id)
|
|
446
|
+
old_label = entry.before_state.get("label")
|
|
447
|
+
node.set_id(old_id)
|
|
448
|
+
if old_label:
|
|
449
|
+
node.set_field("label", old_label)
|
|
450
|
+
self._index[old_id] = node
|
|
451
|
+
# Restore parent link
|
|
452
|
+
parent_id = entry.before_state.get("parent_id")
|
|
453
|
+
if parent_id and parent_id in self._index:
|
|
454
|
+
parent = self._index[parent_id]
|
|
455
|
+
parent.add_child(node)
|
|
456
|
+
# Restore parent hash (even if None)
|
|
457
|
+
if "parent_hash" in entry.before_state:
|
|
458
|
+
parent.set_field("hash", entry.before_state["parent_hash"])
|
|
459
|
+
break
|
|
460
|
+
|
|
461
|
+
def _undo_update_assertion(self, entry: MutationEntry) -> None:
|
|
462
|
+
"""Undo an assertion text update."""
|
|
463
|
+
node_id = entry.target_id
|
|
464
|
+
old_text = entry.before_state.get("text")
|
|
465
|
+
if node_id in self._index and old_text is not None:
|
|
466
|
+
self._index[node_id].set_label(old_text)
|
|
467
|
+
# Restore parent hash (even if None)
|
|
468
|
+
parent_id = entry.before_state.get("parent_id")
|
|
469
|
+
if parent_id and parent_id in self._index and "parent_hash" in entry.before_state:
|
|
470
|
+
self._index[parent_id].set_field("hash", entry.before_state["parent_hash"])
|
|
471
|
+
|
|
472
|
+
def _undo_rename_assertion(self, entry: MutationEntry) -> None:
|
|
473
|
+
"""Undo an assertion rename."""
|
|
474
|
+
old_id = entry.before_state.get("id")
|
|
475
|
+
new_id = entry.after_state.get("id")
|
|
476
|
+
old_label = entry.before_state.get("label")
|
|
477
|
+
new_label = entry.after_state.get("label")
|
|
478
|
+
|
|
479
|
+
if old_id and new_id and new_id in self._index:
|
|
480
|
+
node = self._index.pop(new_id)
|
|
481
|
+
node.set_id(old_id)
|
|
482
|
+
if old_label:
|
|
483
|
+
node.set_field("label", old_label)
|
|
484
|
+
self._index[old_id] = node
|
|
485
|
+
|
|
486
|
+
# Update edges back
|
|
487
|
+
if old_label and new_label:
|
|
488
|
+
for edge_parent in self._index.values():
|
|
489
|
+
for edge in edge_parent.iter_outgoing_edges():
|
|
490
|
+
if new_label in edge.assertion_targets:
|
|
491
|
+
edge.assertion_targets.remove(new_label)
|
|
492
|
+
edge.assertion_targets.append(old_label)
|
|
493
|
+
|
|
494
|
+
# Restore parent hash (even if None)
|
|
495
|
+
parent_id = entry.before_state.get("parent_id")
|
|
496
|
+
if parent_id and parent_id in self._index and "parent_hash" in entry.before_state:
|
|
497
|
+
self._index[parent_id].set_field("hash", entry.before_state["parent_hash"])
|
|
498
|
+
|
|
499
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
500
|
+
# Node Mutation API
|
|
501
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
502
|
+
|
|
503
|
+
def rename_node(self, old_id: str, new_id: str) -> MutationEntry:
|
|
504
|
+
"""Rename a node (e.g., REQ-p00001 -> REQ-p00002).
|
|
505
|
+
|
|
506
|
+
Updates the node's ID, all edges pointing to/from this node,
|
|
507
|
+
and assertion IDs if the node is a requirement.
|
|
508
|
+
|
|
509
|
+
Args:
|
|
510
|
+
old_id: Current node ID.
|
|
511
|
+
new_id: New node ID.
|
|
512
|
+
|
|
513
|
+
Returns:
|
|
514
|
+
MutationEntry recording the operation.
|
|
515
|
+
|
|
516
|
+
Raises:
|
|
517
|
+
KeyError: If old_id is not found.
|
|
518
|
+
ValueError: If new_id already exists.
|
|
519
|
+
"""
|
|
520
|
+
if old_id not in self._index:
|
|
521
|
+
raise KeyError(f"Node '{old_id}' not found")
|
|
522
|
+
if new_id in self._index:
|
|
523
|
+
raise ValueError(f"Node '{new_id}' already exists")
|
|
524
|
+
|
|
525
|
+
node = self._index.pop(old_id)
|
|
526
|
+
old_title = node.get_label()
|
|
527
|
+
|
|
528
|
+
# Create mutation entry
|
|
529
|
+
entry = MutationEntry(
|
|
530
|
+
operation="rename_node",
|
|
531
|
+
target_id=old_id,
|
|
532
|
+
before_state={"id": old_id, "title": old_title},
|
|
533
|
+
after_state={"id": new_id, "title": old_title},
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
# Update node ID
|
|
537
|
+
node.set_id(new_id)
|
|
538
|
+
self._index[new_id] = node
|
|
539
|
+
|
|
540
|
+
# Update roots list if this was a root
|
|
541
|
+
for _i, root in enumerate(self._roots):
|
|
542
|
+
if root is node:
|
|
543
|
+
break # Root reference is same object, no update needed
|
|
544
|
+
|
|
545
|
+
# Update orphaned_ids if this was an orphan
|
|
546
|
+
if old_id in self._orphaned_ids:
|
|
547
|
+
self._orphaned_ids.discard(old_id)
|
|
548
|
+
self._orphaned_ids.add(new_id)
|
|
549
|
+
|
|
550
|
+
# Update broken references that reference this node
|
|
551
|
+
for i, br in enumerate(self._broken_references):
|
|
552
|
+
if br.source_id == old_id:
|
|
553
|
+
self._broken_references[i] = BrokenReference(
|
|
554
|
+
source_id=new_id,
|
|
555
|
+
target_id=br.target_id,
|
|
556
|
+
edge_kind=br.edge_kind,
|
|
557
|
+
)
|
|
558
|
+
elif br.target_id == old_id:
|
|
559
|
+
self._broken_references[i] = BrokenReference(
|
|
560
|
+
source_id=br.source_id,
|
|
561
|
+
target_id=new_id,
|
|
562
|
+
edge_kind=br.edge_kind,
|
|
563
|
+
)
|
|
564
|
+
|
|
565
|
+
# If this is a requirement, rename its assertion children
|
|
566
|
+
if node.kind == NodeKind.REQUIREMENT:
|
|
567
|
+
for child in list(node.iter_children()):
|
|
568
|
+
if child.kind == NodeKind.ASSERTION:
|
|
569
|
+
assertion_label = child.get_field("label", "")
|
|
570
|
+
if assertion_label:
|
|
571
|
+
old_assertion_id = f"{old_id}-{assertion_label}"
|
|
572
|
+
new_assertion_id = f"{new_id}-{assertion_label}"
|
|
573
|
+
if old_assertion_id in self._index:
|
|
574
|
+
self._index.pop(old_assertion_id)
|
|
575
|
+
child.set_id(new_assertion_id)
|
|
576
|
+
self._index[new_assertion_id] = child
|
|
577
|
+
|
|
578
|
+
self._mutation_log.append(entry)
|
|
579
|
+
return entry
|
|
580
|
+
|
|
581
|
+
def update_title(self, node_id: str, new_title: str) -> MutationEntry:
|
|
582
|
+
"""Update requirement title. Does not affect hash.
|
|
583
|
+
|
|
584
|
+
Args:
|
|
585
|
+
node_id: The node ID to update.
|
|
586
|
+
new_title: The new title.
|
|
587
|
+
|
|
588
|
+
Returns:
|
|
589
|
+
MutationEntry recording the operation.
|
|
590
|
+
|
|
591
|
+
Raises:
|
|
592
|
+
KeyError: If node_id is not found.
|
|
593
|
+
"""
|
|
594
|
+
if node_id not in self._index:
|
|
595
|
+
raise KeyError(f"Node '{node_id}' not found")
|
|
596
|
+
|
|
597
|
+
node = self._index[node_id]
|
|
598
|
+
old_title = node.get_label()
|
|
599
|
+
|
|
600
|
+
entry = MutationEntry(
|
|
601
|
+
operation="update_title",
|
|
602
|
+
target_id=node_id,
|
|
603
|
+
before_state={"title": old_title},
|
|
604
|
+
after_state={"title": new_title},
|
|
605
|
+
)
|
|
606
|
+
|
|
607
|
+
node.set_label(new_title)
|
|
608
|
+
self._mutation_log.append(entry)
|
|
609
|
+
return entry
|
|
610
|
+
|
|
611
|
+
def change_status(self, node_id: str, new_status: str) -> MutationEntry:
|
|
612
|
+
"""Change requirement status (e.g., Draft -> Active).
|
|
613
|
+
|
|
614
|
+
Args:
|
|
615
|
+
node_id: The node ID to update.
|
|
616
|
+
new_status: The new status value.
|
|
617
|
+
|
|
618
|
+
Returns:
|
|
619
|
+
MutationEntry recording the operation.
|
|
620
|
+
|
|
621
|
+
Raises:
|
|
622
|
+
KeyError: If node_id is not found.
|
|
623
|
+
"""
|
|
624
|
+
if node_id not in self._index:
|
|
625
|
+
raise KeyError(f"Node '{node_id}' not found")
|
|
626
|
+
|
|
627
|
+
node = self._index[node_id]
|
|
628
|
+
old_status = node.get_field("status")
|
|
629
|
+
|
|
630
|
+
entry = MutationEntry(
|
|
631
|
+
operation="change_status",
|
|
632
|
+
target_id=node_id,
|
|
633
|
+
before_state={"status": old_status},
|
|
634
|
+
after_state={"status": new_status},
|
|
635
|
+
)
|
|
636
|
+
|
|
637
|
+
node.set_field("status", new_status)
|
|
638
|
+
self._mutation_log.append(entry)
|
|
639
|
+
return entry
|
|
640
|
+
|
|
641
|
+
def add_requirement(
|
|
642
|
+
self,
|
|
643
|
+
req_id: str,
|
|
644
|
+
title: str,
|
|
645
|
+
level: str,
|
|
646
|
+
status: str = "Draft",
|
|
647
|
+
parent_id: str | None = None,
|
|
648
|
+
edge_kind: EdgeKind = EdgeKind.IMPLEMENTS,
|
|
649
|
+
) -> MutationEntry:
|
|
650
|
+
"""Add a new requirement node.
|
|
651
|
+
|
|
652
|
+
Creates a node with the specified properties and optionally
|
|
653
|
+
links it to a parent. Computes initial hash (empty body = specific hash).
|
|
654
|
+
|
|
655
|
+
Args:
|
|
656
|
+
req_id: The requirement ID (e.g., "REQ-p00001").
|
|
657
|
+
title: The requirement title.
|
|
658
|
+
level: The requirement level ("PRD", "OPS", "DEV").
|
|
659
|
+
status: The requirement status (default "Draft").
|
|
660
|
+
parent_id: Optional parent node ID to link to.
|
|
661
|
+
edge_kind: Edge type for parent link (default IMPLEMENTS).
|
|
662
|
+
|
|
663
|
+
Returns:
|
|
664
|
+
MutationEntry recording the operation.
|
|
665
|
+
|
|
666
|
+
Raises:
|
|
667
|
+
ValueError: If req_id already exists.
|
|
668
|
+
KeyError: If parent_id is specified but not found.
|
|
669
|
+
"""
|
|
670
|
+
from elspais.utilities.hasher import calculate_hash
|
|
671
|
+
|
|
672
|
+
if req_id in self._index:
|
|
673
|
+
raise ValueError(f"Node '{req_id}' already exists")
|
|
674
|
+
if parent_id and parent_id not in self._index:
|
|
675
|
+
raise KeyError(f"Parent node '{parent_id}' not found")
|
|
676
|
+
|
|
677
|
+
# Create the node
|
|
678
|
+
node = GraphNode(
|
|
679
|
+
id=req_id,
|
|
680
|
+
kind=NodeKind.REQUIREMENT,
|
|
681
|
+
label=title,
|
|
682
|
+
)
|
|
683
|
+
|
|
684
|
+
# Compute hash for empty body
|
|
685
|
+
empty_hash = calculate_hash("")
|
|
686
|
+
|
|
687
|
+
node._content = {
|
|
688
|
+
"level": level,
|
|
689
|
+
"status": status,
|
|
690
|
+
"hash": empty_hash,
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
# Add to index
|
|
694
|
+
self._index[req_id] = node
|
|
695
|
+
|
|
696
|
+
# Build entry with before/after state
|
|
697
|
+
entry = MutationEntry(
|
|
698
|
+
operation="add_requirement",
|
|
699
|
+
target_id=req_id,
|
|
700
|
+
before_state={}, # Node didn't exist
|
|
701
|
+
after_state={
|
|
702
|
+
"id": req_id,
|
|
703
|
+
"title": title,
|
|
704
|
+
"level": level,
|
|
705
|
+
"status": status,
|
|
706
|
+
"hash": empty_hash,
|
|
707
|
+
"parent_id": parent_id,
|
|
708
|
+
},
|
|
709
|
+
)
|
|
710
|
+
|
|
711
|
+
# Link to parent if specified
|
|
712
|
+
if parent_id:
|
|
713
|
+
parent = self._index[parent_id]
|
|
714
|
+
parent.link(node, edge_kind)
|
|
715
|
+
else:
|
|
716
|
+
# No parent - this is a root node
|
|
717
|
+
self._roots.append(node)
|
|
718
|
+
|
|
719
|
+
self._mutation_log.append(entry)
|
|
720
|
+
return entry
|
|
721
|
+
|
|
722
|
+
def delete_requirement(
|
|
723
|
+
self,
|
|
724
|
+
node_id: str,
|
|
725
|
+
compact_assertions: bool = True,
|
|
726
|
+
) -> MutationEntry:
|
|
727
|
+
"""Delete a requirement.
|
|
728
|
+
|
|
729
|
+
Removes the node from the index, moves it to _deleted_nodes for
|
|
730
|
+
delta tracking, removes all edges to/from this node, and marks
|
|
731
|
+
children as orphans.
|
|
732
|
+
|
|
733
|
+
Args:
|
|
734
|
+
node_id: The requirement ID to delete.
|
|
735
|
+
compact_assertions: If True, sibling assertions are renumbered
|
|
736
|
+
after deletion. (Currently not implemented - reserved for
|
|
737
|
+
assertion deletion.)
|
|
738
|
+
|
|
739
|
+
Returns:
|
|
740
|
+
MutationEntry recording the operation.
|
|
741
|
+
|
|
742
|
+
Raises:
|
|
743
|
+
KeyError: If node_id is not found.
|
|
744
|
+
"""
|
|
745
|
+
if node_id not in self._index:
|
|
746
|
+
raise KeyError(f"Node '{node_id}' not found")
|
|
747
|
+
|
|
748
|
+
node = self._index[node_id]
|
|
749
|
+
was_root = node in self._roots
|
|
750
|
+
|
|
751
|
+
# Record state before deletion
|
|
752
|
+
entry = MutationEntry(
|
|
753
|
+
operation="delete_requirement",
|
|
754
|
+
target_id=node_id,
|
|
755
|
+
before_state={
|
|
756
|
+
"id": node_id,
|
|
757
|
+
"title": node.get_label(),
|
|
758
|
+
"level": node.get_field("level"),
|
|
759
|
+
"status": node.get_field("status"),
|
|
760
|
+
"hash": node.get_field("hash"),
|
|
761
|
+
"was_root": was_root,
|
|
762
|
+
"parent_ids": [p.id for p in node.iter_parents()],
|
|
763
|
+
"child_ids": [c.id for c in node.iter_children()],
|
|
764
|
+
},
|
|
765
|
+
after_state={}, # Node deleted
|
|
766
|
+
)
|
|
767
|
+
|
|
768
|
+
# Remove from index
|
|
769
|
+
self._index.pop(node_id)
|
|
770
|
+
|
|
771
|
+
# Move to deleted_nodes for delta tracking
|
|
772
|
+
self._deleted_nodes.append(node)
|
|
773
|
+
|
|
774
|
+
# Remove from roots if present
|
|
775
|
+
if was_root:
|
|
776
|
+
self._roots = [r for r in self._roots if r.id != node_id]
|
|
777
|
+
|
|
778
|
+
# Remove from orphaned_ids if present
|
|
779
|
+
self._orphaned_ids.discard(node_id)
|
|
780
|
+
|
|
781
|
+
# Disconnect from parents
|
|
782
|
+
for parent in list(node.iter_parents()):
|
|
783
|
+
parent.remove_child(node)
|
|
784
|
+
|
|
785
|
+
# Mark children as orphans (except assertions which go with the req)
|
|
786
|
+
for child in list(node.iter_children()):
|
|
787
|
+
if child.kind == NodeKind.ASSERTION:
|
|
788
|
+
# Delete assertion children too
|
|
789
|
+
if child.id in self._index:
|
|
790
|
+
self._index.pop(child.id)
|
|
791
|
+
self._deleted_nodes.append(child)
|
|
792
|
+
else:
|
|
793
|
+
# Non-assertion children become orphans
|
|
794
|
+
node.remove_child(child)
|
|
795
|
+
self._orphaned_ids.add(child.id)
|
|
796
|
+
|
|
797
|
+
self._mutation_log.append(entry)
|
|
798
|
+
return entry
|
|
799
|
+
|
|
800
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
801
|
+
# Assertion Mutation API
|
|
802
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
803
|
+
|
|
804
|
+
# Regex pattern for assertion lines in body_text (e.g., "A. The system SHALL...")
|
|
805
|
+
_ASSERTION_LINE_RE = re.compile(r"^([A-Z0-9]+)\.\s+(.*)$", re.MULTILINE)
|
|
806
|
+
|
|
807
|
+
def _update_assertion_in_body_text(self, body_text: str, label: str, new_text: str) -> str:
|
|
808
|
+
"""Update an assertion line in body_text.
|
|
809
|
+
|
|
810
|
+
Args:
|
|
811
|
+
body_text: The requirement body text.
|
|
812
|
+
label: The assertion label (e.g., "A").
|
|
813
|
+
new_text: The new assertion text.
|
|
814
|
+
|
|
815
|
+
Returns:
|
|
816
|
+
Updated body_text with the assertion modified.
|
|
817
|
+
"""
|
|
818
|
+
pattern = re.compile(rf"^({re.escape(label)})\.\s+.*$", re.MULTILINE)
|
|
819
|
+
return pattern.sub(rf"\1. {new_text}", body_text)
|
|
820
|
+
|
|
821
|
+
def _add_assertion_to_body_text(self, body_text: str, label: str, text: str) -> str:
|
|
822
|
+
"""Add an assertion line to body_text.
|
|
823
|
+
|
|
824
|
+
Inserts the assertion in sorted order within the assertions section.
|
|
825
|
+
|
|
826
|
+
Args:
|
|
827
|
+
body_text: The requirement body text.
|
|
828
|
+
label: The assertion label (e.g., "C").
|
|
829
|
+
text: The assertion text.
|
|
830
|
+
|
|
831
|
+
Returns:
|
|
832
|
+
Updated body_text with the new assertion added.
|
|
833
|
+
"""
|
|
834
|
+
new_line = f"{label}. {text}"
|
|
835
|
+
lines = body_text.split("\n")
|
|
836
|
+
result_lines = []
|
|
837
|
+
inserted = False
|
|
838
|
+
|
|
839
|
+
for line in lines:
|
|
840
|
+
match = self._ASSERTION_LINE_RE.match(line)
|
|
841
|
+
if match and not inserted:
|
|
842
|
+
existing_label = match.group(1)
|
|
843
|
+
# Insert before this line if our label comes first
|
|
844
|
+
if label < existing_label:
|
|
845
|
+
result_lines.append(new_line)
|
|
846
|
+
inserted = True
|
|
847
|
+
result_lines.append(line)
|
|
848
|
+
|
|
849
|
+
# If not inserted, append at end (either no assertions or comes last)
|
|
850
|
+
if not inserted:
|
|
851
|
+
# Check if there's an ## Assertions header to append after
|
|
852
|
+
for i, line in enumerate(result_lines):
|
|
853
|
+
if line.strip().lower() == "## assertions":
|
|
854
|
+
# Find next non-empty line or end of section
|
|
855
|
+
insert_pos = i + 1
|
|
856
|
+
while insert_pos < len(result_lines):
|
|
857
|
+
if result_lines[insert_pos].strip():
|
|
858
|
+
break
|
|
859
|
+
insert_pos += 1
|
|
860
|
+
# Find end of assertion block
|
|
861
|
+
while insert_pos < len(result_lines):
|
|
862
|
+
if not self._ASSERTION_LINE_RE.match(result_lines[insert_pos]):
|
|
863
|
+
break
|
|
864
|
+
insert_pos += 1
|
|
865
|
+
result_lines.insert(insert_pos, new_line)
|
|
866
|
+
inserted = True
|
|
867
|
+
break
|
|
868
|
+
|
|
869
|
+
if not inserted:
|
|
870
|
+
# No assertions section found, just append
|
|
871
|
+
result_lines.append(new_line)
|
|
872
|
+
|
|
873
|
+
return "\n".join(result_lines)
|
|
874
|
+
|
|
875
|
+
def _delete_assertion_from_body_text(self, body_text: str, label: str) -> str:
|
|
876
|
+
"""Delete an assertion line from body_text.
|
|
877
|
+
|
|
878
|
+
Args:
|
|
879
|
+
body_text: The requirement body text.
|
|
880
|
+
label: The assertion label to delete (e.g., "B").
|
|
881
|
+
|
|
882
|
+
Returns:
|
|
883
|
+
Updated body_text with the assertion removed.
|
|
884
|
+
"""
|
|
885
|
+
pattern = re.compile(rf"^{re.escape(label)}\.\s+.*\n?", re.MULTILINE)
|
|
886
|
+
return pattern.sub("", body_text)
|
|
887
|
+
|
|
888
|
+
def _rename_assertion_in_body_text(self, body_text: str, old_label: str, new_label: str) -> str:
|
|
889
|
+
"""Rename an assertion label in body_text.
|
|
890
|
+
|
|
891
|
+
Args:
|
|
892
|
+
body_text: The requirement body text.
|
|
893
|
+
old_label: The current assertion label (e.g., "A").
|
|
894
|
+
new_label: The new assertion label (e.g., "D").
|
|
895
|
+
|
|
896
|
+
Returns:
|
|
897
|
+
Updated body_text with the assertion label changed.
|
|
898
|
+
"""
|
|
899
|
+
pattern = re.compile(rf"^{re.escape(old_label)}(\.\s+.*)$", re.MULTILINE)
|
|
900
|
+
return pattern.sub(rf"{new_label}\1", body_text)
|
|
901
|
+
|
|
902
|
+
def _recompute_requirement_hash(self, req_node: GraphNode) -> str:
|
|
903
|
+
"""Recompute hash for a requirement based on body_text.
|
|
904
|
+
|
|
905
|
+
Per spec/requirements-spec.md, the hash is computed from every line
|
|
906
|
+
AFTER the Header line and BEFORE the Footer line (i.e., body_text).
|
|
907
|
+
|
|
908
|
+
Args:
|
|
909
|
+
req_node: The requirement node to recompute hash for.
|
|
910
|
+
|
|
911
|
+
Returns:
|
|
912
|
+
The new hash value.
|
|
913
|
+
"""
|
|
914
|
+
from elspais.utilities.hasher import calculate_hash
|
|
915
|
+
|
|
916
|
+
# Use body_text for hash computation (per spec)
|
|
917
|
+
body_text = req_node.get_field("body_text", "")
|
|
918
|
+
|
|
919
|
+
new_hash = calculate_hash(body_text)
|
|
920
|
+
req_node.set_field("hash", new_hash)
|
|
921
|
+
return new_hash
|
|
922
|
+
|
|
923
|
+
def rename_assertion(self, old_id: str, new_label: str) -> MutationEntry:
|
|
924
|
+
"""Rename assertion label (e.g., REQ-p00001-A -> REQ-p00001-D).
|
|
925
|
+
|
|
926
|
+
Updates the assertion node ID, edges with assertion_targets,
|
|
927
|
+
and recomputes the parent requirement hash.
|
|
928
|
+
|
|
929
|
+
Args:
|
|
930
|
+
old_id: Current assertion ID (e.g., "REQ-p00001-A").
|
|
931
|
+
new_label: New assertion label (e.g., "D").
|
|
932
|
+
|
|
933
|
+
Returns:
|
|
934
|
+
MutationEntry recording the operation.
|
|
935
|
+
|
|
936
|
+
Raises:
|
|
937
|
+
KeyError: If old_id is not found.
|
|
938
|
+
ValueError: If the node is not an assertion or new_id exists.
|
|
939
|
+
"""
|
|
940
|
+
if old_id not in self._index:
|
|
941
|
+
raise KeyError(f"Assertion '{old_id}' not found")
|
|
942
|
+
|
|
943
|
+
node = self._index[old_id]
|
|
944
|
+
if node.kind != NodeKind.ASSERTION:
|
|
945
|
+
raise ValueError(f"Node '{old_id}' is not an assertion")
|
|
946
|
+
|
|
947
|
+
# Get parent requirement
|
|
948
|
+
parents = [p for p in node.iter_parents() if p.kind == NodeKind.REQUIREMENT]
|
|
949
|
+
if not parents:
|
|
950
|
+
raise ValueError(f"Assertion '{old_id}' has no parent requirement")
|
|
951
|
+
parent = parents[0]
|
|
952
|
+
|
|
953
|
+
# Compute new ID
|
|
954
|
+
old_label = node.get_field("label", "")
|
|
955
|
+
new_id = f"{parent.id}-{new_label}"
|
|
956
|
+
|
|
957
|
+
if new_id in self._index:
|
|
958
|
+
raise ValueError(f"Assertion '{new_id}' already exists")
|
|
959
|
+
|
|
960
|
+
# Record before state
|
|
961
|
+
old_hash = parent.get_field("hash")
|
|
962
|
+
entry = MutationEntry(
|
|
963
|
+
operation="rename_assertion",
|
|
964
|
+
target_id=old_id,
|
|
965
|
+
before_state={
|
|
966
|
+
"id": old_id,
|
|
967
|
+
"label": old_label,
|
|
968
|
+
"parent_id": parent.id,
|
|
969
|
+
"parent_hash": old_hash,
|
|
970
|
+
},
|
|
971
|
+
after_state={
|
|
972
|
+
"id": new_id,
|
|
973
|
+
"label": new_label,
|
|
974
|
+
},
|
|
975
|
+
affects_hash=True,
|
|
976
|
+
)
|
|
977
|
+
|
|
978
|
+
# Update assertion node
|
|
979
|
+
self._index.pop(old_id)
|
|
980
|
+
node.set_id(new_id)
|
|
981
|
+
node.set_field("label", new_label)
|
|
982
|
+
self._index[new_id] = node
|
|
983
|
+
|
|
984
|
+
# Update edges with assertion_targets referencing old label
|
|
985
|
+
for parent_node in self._index.values():
|
|
986
|
+
for edge in parent_node.iter_outgoing_edges():
|
|
987
|
+
if old_label in edge.assertion_targets:
|
|
988
|
+
edge.assertion_targets.remove(old_label)
|
|
989
|
+
edge.assertion_targets.append(new_label)
|
|
990
|
+
|
|
991
|
+
# Update body_text to reflect renamed assertion
|
|
992
|
+
body_text = parent.get_field("body_text", "")
|
|
993
|
+
if body_text:
|
|
994
|
+
new_body_text = self._rename_assertion_in_body_text(body_text, old_label, new_label)
|
|
995
|
+
parent.set_field("body_text", new_body_text)
|
|
996
|
+
|
|
997
|
+
# Recompute parent hash
|
|
998
|
+
self._recompute_requirement_hash(parent)
|
|
999
|
+
|
|
1000
|
+
self._mutation_log.append(entry)
|
|
1001
|
+
return entry
|
|
1002
|
+
|
|
1003
|
+
def update_assertion(self, assertion_id: str, new_text: str) -> MutationEntry:
|
|
1004
|
+
"""Update assertion text.
|
|
1005
|
+
|
|
1006
|
+
Recomputes the parent requirement hash.
|
|
1007
|
+
|
|
1008
|
+
Args:
|
|
1009
|
+
assertion_id: The assertion ID to update.
|
|
1010
|
+
new_text: The new assertion text.
|
|
1011
|
+
|
|
1012
|
+
Returns:
|
|
1013
|
+
MutationEntry recording the operation.
|
|
1014
|
+
|
|
1015
|
+
Raises:
|
|
1016
|
+
KeyError: If assertion_id is not found.
|
|
1017
|
+
ValueError: If the node is not an assertion.
|
|
1018
|
+
"""
|
|
1019
|
+
if assertion_id not in self._index:
|
|
1020
|
+
raise KeyError(f"Assertion '{assertion_id}' not found")
|
|
1021
|
+
|
|
1022
|
+
node = self._index[assertion_id]
|
|
1023
|
+
if node.kind != NodeKind.ASSERTION:
|
|
1024
|
+
raise ValueError(f"Node '{assertion_id}' is not an assertion")
|
|
1025
|
+
|
|
1026
|
+
# Get parent requirement
|
|
1027
|
+
parents = [p for p in node.iter_parents() if p.kind == NodeKind.REQUIREMENT]
|
|
1028
|
+
if not parents:
|
|
1029
|
+
raise ValueError(f"Assertion '{assertion_id}' has no parent requirement")
|
|
1030
|
+
parent = parents[0]
|
|
1031
|
+
|
|
1032
|
+
old_text = node.get_label()
|
|
1033
|
+
old_hash = parent.get_field("hash")
|
|
1034
|
+
|
|
1035
|
+
entry = MutationEntry(
|
|
1036
|
+
operation="update_assertion",
|
|
1037
|
+
target_id=assertion_id,
|
|
1038
|
+
before_state={
|
|
1039
|
+
"text": old_text,
|
|
1040
|
+
"parent_id": parent.id,
|
|
1041
|
+
"parent_hash": old_hash,
|
|
1042
|
+
},
|
|
1043
|
+
after_state={
|
|
1044
|
+
"text": new_text,
|
|
1045
|
+
},
|
|
1046
|
+
affects_hash=True,
|
|
1047
|
+
)
|
|
1048
|
+
|
|
1049
|
+
# Update assertion text
|
|
1050
|
+
node.set_label(new_text)
|
|
1051
|
+
|
|
1052
|
+
# Update body_text to reflect updated assertion
|
|
1053
|
+
label = node.get_field("label", "")
|
|
1054
|
+
body_text = parent.get_field("body_text", "")
|
|
1055
|
+
if body_text and label:
|
|
1056
|
+
new_body_text = self._update_assertion_in_body_text(body_text, label, new_text)
|
|
1057
|
+
parent.set_field("body_text", new_body_text)
|
|
1058
|
+
|
|
1059
|
+
# Recompute parent hash
|
|
1060
|
+
self._recompute_requirement_hash(parent)
|
|
1061
|
+
|
|
1062
|
+
self._mutation_log.append(entry)
|
|
1063
|
+
return entry
|
|
1064
|
+
|
|
1065
|
+
def add_assertion(self, req_id: str, label: str, text: str) -> MutationEntry:
|
|
1066
|
+
"""Add assertion to requirement.
|
|
1067
|
+
|
|
1068
|
+
Creates an assertion node, links it as child of the requirement,
|
|
1069
|
+
and recomputes the requirement hash.
|
|
1070
|
+
|
|
1071
|
+
Args:
|
|
1072
|
+
req_id: The parent requirement ID.
|
|
1073
|
+
label: The assertion label (e.g., "A", "B").
|
|
1074
|
+
text: The assertion text.
|
|
1075
|
+
|
|
1076
|
+
Returns:
|
|
1077
|
+
MutationEntry recording the operation.
|
|
1078
|
+
|
|
1079
|
+
Raises:
|
|
1080
|
+
KeyError: If req_id is not found.
|
|
1081
|
+
ValueError: If req_id is not a requirement or assertion exists.
|
|
1082
|
+
"""
|
|
1083
|
+
if req_id not in self._index:
|
|
1084
|
+
raise KeyError(f"Requirement '{req_id}' not found")
|
|
1085
|
+
|
|
1086
|
+
parent = self._index[req_id]
|
|
1087
|
+
if parent.kind != NodeKind.REQUIREMENT:
|
|
1088
|
+
raise ValueError(f"Node '{req_id}' is not a requirement")
|
|
1089
|
+
|
|
1090
|
+
assertion_id = f"{req_id}-{label}"
|
|
1091
|
+
if assertion_id in self._index:
|
|
1092
|
+
raise ValueError(f"Assertion '{assertion_id}' already exists")
|
|
1093
|
+
|
|
1094
|
+
old_hash = parent.get_field("hash")
|
|
1095
|
+
|
|
1096
|
+
# Create assertion node
|
|
1097
|
+
assertion_node = GraphNode(
|
|
1098
|
+
id=assertion_id,
|
|
1099
|
+
kind=NodeKind.ASSERTION,
|
|
1100
|
+
label=text,
|
|
1101
|
+
)
|
|
1102
|
+
assertion_node._content = {"label": label}
|
|
1103
|
+
|
|
1104
|
+
# Add to index and link to parent
|
|
1105
|
+
self._index[assertion_id] = assertion_node
|
|
1106
|
+
parent.add_child(assertion_node)
|
|
1107
|
+
|
|
1108
|
+
# Update body_text to include new assertion
|
|
1109
|
+
body_text = parent.get_field("body_text", "")
|
|
1110
|
+
if body_text:
|
|
1111
|
+
new_body_text = self._add_assertion_to_body_text(body_text, label, text)
|
|
1112
|
+
parent.set_field("body_text", new_body_text)
|
|
1113
|
+
|
|
1114
|
+
# Recompute parent hash
|
|
1115
|
+
new_hash = self._recompute_requirement_hash(parent)
|
|
1116
|
+
|
|
1117
|
+
entry = MutationEntry(
|
|
1118
|
+
operation="add_assertion",
|
|
1119
|
+
target_id=assertion_id,
|
|
1120
|
+
before_state={
|
|
1121
|
+
"parent_id": req_id,
|
|
1122
|
+
"parent_hash": old_hash,
|
|
1123
|
+
},
|
|
1124
|
+
after_state={
|
|
1125
|
+
"id": assertion_id,
|
|
1126
|
+
"label": label,
|
|
1127
|
+
"text": text,
|
|
1128
|
+
"parent_hash": new_hash,
|
|
1129
|
+
},
|
|
1130
|
+
affects_hash=True,
|
|
1131
|
+
)
|
|
1132
|
+
|
|
1133
|
+
self._mutation_log.append(entry)
|
|
1134
|
+
return entry
|
|
1135
|
+
|
|
1136
|
+
def delete_assertion(
|
|
1137
|
+
self,
|
|
1138
|
+
assertion_id: str,
|
|
1139
|
+
compact: bool = True,
|
|
1140
|
+
) -> MutationEntry:
|
|
1141
|
+
"""Delete assertion with optional compaction.
|
|
1142
|
+
|
|
1143
|
+
If compact=True and deleting B from [A, B, C, D]:
|
|
1144
|
+
- C -> B, D -> C
|
|
1145
|
+
- Updates all edges referencing C, D
|
|
1146
|
+
- Recomputes parent hash
|
|
1147
|
+
|
|
1148
|
+
Args:
|
|
1149
|
+
assertion_id: The assertion ID to delete.
|
|
1150
|
+
compact: If True, renumber subsequent assertions.
|
|
1151
|
+
|
|
1152
|
+
Returns:
|
|
1153
|
+
MutationEntry recording the operation.
|
|
1154
|
+
|
|
1155
|
+
Raises:
|
|
1156
|
+
KeyError: If assertion_id is not found.
|
|
1157
|
+
ValueError: If the node is not an assertion.
|
|
1158
|
+
"""
|
|
1159
|
+
if assertion_id not in self._index:
|
|
1160
|
+
raise KeyError(f"Assertion '{assertion_id}' not found")
|
|
1161
|
+
|
|
1162
|
+
node = self._index[assertion_id]
|
|
1163
|
+
if node.kind != NodeKind.ASSERTION:
|
|
1164
|
+
raise ValueError(f"Node '{assertion_id}' is not an assertion")
|
|
1165
|
+
|
|
1166
|
+
# Get parent requirement
|
|
1167
|
+
parents = [p for p in node.iter_parents() if p.kind == NodeKind.REQUIREMENT]
|
|
1168
|
+
if not parents:
|
|
1169
|
+
raise ValueError(f"Assertion '{assertion_id}' has no parent requirement")
|
|
1170
|
+
parent = parents[0]
|
|
1171
|
+
|
|
1172
|
+
old_label = node.get_field("label", "")
|
|
1173
|
+
old_text = node.get_label()
|
|
1174
|
+
old_hash = parent.get_field("hash")
|
|
1175
|
+
|
|
1176
|
+
# Collect sibling assertions sorted by label
|
|
1177
|
+
siblings = []
|
|
1178
|
+
for child in parent.iter_children():
|
|
1179
|
+
if child.kind == NodeKind.ASSERTION:
|
|
1180
|
+
siblings.append((child.get_field("label", ""), child))
|
|
1181
|
+
siblings.sort(key=lambda x: x[0])
|
|
1182
|
+
|
|
1183
|
+
# Track renames for undo (label_before -> label_after)
|
|
1184
|
+
renames: list[dict[str, str]] = []
|
|
1185
|
+
|
|
1186
|
+
# Remove from index first
|
|
1187
|
+
self._index.pop(assertion_id)
|
|
1188
|
+
parent.remove_child(node)
|
|
1189
|
+
self._deleted_nodes.append(node)
|
|
1190
|
+
|
|
1191
|
+
# Remove edges referencing this assertion
|
|
1192
|
+
for parent_node in self._index.values():
|
|
1193
|
+
for edge in parent_node.iter_outgoing_edges():
|
|
1194
|
+
if old_label in edge.assertion_targets:
|
|
1195
|
+
edge.assertion_targets.remove(old_label)
|
|
1196
|
+
|
|
1197
|
+
# Update body_text to remove deleted assertion
|
|
1198
|
+
body_text = parent.get_field("body_text", "")
|
|
1199
|
+
if body_text:
|
|
1200
|
+
body_text = self._delete_assertion_from_body_text(body_text, old_label)
|
|
1201
|
+
|
|
1202
|
+
# Compact if requested
|
|
1203
|
+
if compact:
|
|
1204
|
+
# Find assertions after the deleted one
|
|
1205
|
+
deleted_found = False
|
|
1206
|
+
for sib_label, sib_node in siblings:
|
|
1207
|
+
if sib_node is node:
|
|
1208
|
+
deleted_found = True
|
|
1209
|
+
continue
|
|
1210
|
+
if deleted_found and sib_node.id in self._index:
|
|
1211
|
+
# This sibling needs to be renamed to previous letter
|
|
1212
|
+
prev_label = chr(ord(sib_label) - 1)
|
|
1213
|
+
old_sib_id = sib_node.id
|
|
1214
|
+
new_sib_id = f"{parent.id}-{prev_label}"
|
|
1215
|
+
|
|
1216
|
+
renames.append(
|
|
1217
|
+
{
|
|
1218
|
+
"old_id": old_sib_id,
|
|
1219
|
+
"new_id": new_sib_id,
|
|
1220
|
+
"old_label": sib_label,
|
|
1221
|
+
"new_label": prev_label,
|
|
1222
|
+
}
|
|
1223
|
+
)
|
|
1224
|
+
|
|
1225
|
+
# Update the node
|
|
1226
|
+
self._index.pop(old_sib_id)
|
|
1227
|
+
sib_node.set_id(new_sib_id)
|
|
1228
|
+
sib_node.set_field("label", prev_label)
|
|
1229
|
+
self._index[new_sib_id] = sib_node
|
|
1230
|
+
|
|
1231
|
+
# Update edges referencing this assertion
|
|
1232
|
+
for edge_parent in self._index.values():
|
|
1233
|
+
for edge in edge_parent.iter_outgoing_edges():
|
|
1234
|
+
if sib_label in edge.assertion_targets:
|
|
1235
|
+
edge.assertion_targets.remove(sib_label)
|
|
1236
|
+
edge.assertion_targets.append(prev_label)
|
|
1237
|
+
|
|
1238
|
+
# Update body_text for this rename
|
|
1239
|
+
if body_text:
|
|
1240
|
+
body_text = self._rename_assertion_in_body_text(
|
|
1241
|
+
body_text, sib_label, prev_label
|
|
1242
|
+
)
|
|
1243
|
+
|
|
1244
|
+
# Update parent's body_text field
|
|
1245
|
+
if parent.get_field("body_text", ""):
|
|
1246
|
+
parent.set_field("body_text", body_text)
|
|
1247
|
+
|
|
1248
|
+
# Recompute parent hash
|
|
1249
|
+
new_hash = self._recompute_requirement_hash(parent)
|
|
1250
|
+
|
|
1251
|
+
entry = MutationEntry(
|
|
1252
|
+
operation="delete_assertion",
|
|
1253
|
+
target_id=assertion_id,
|
|
1254
|
+
before_state={
|
|
1255
|
+
"id": assertion_id,
|
|
1256
|
+
"label": old_label,
|
|
1257
|
+
"text": old_text,
|
|
1258
|
+
"parent_id": parent.id,
|
|
1259
|
+
"parent_hash": old_hash,
|
|
1260
|
+
"compact": compact,
|
|
1261
|
+
"renames": renames,
|
|
1262
|
+
},
|
|
1263
|
+
after_state={
|
|
1264
|
+
"parent_hash": new_hash,
|
|
1265
|
+
},
|
|
1266
|
+
affects_hash=True,
|
|
1267
|
+
)
|
|
1268
|
+
|
|
1269
|
+
self._mutation_log.append(entry)
|
|
1270
|
+
return entry
|
|
1271
|
+
|
|
1272
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
1273
|
+
# Edge Mutation API
|
|
1274
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
1275
|
+
|
|
1276
|
+
def add_edge(
|
|
1277
|
+
self,
|
|
1278
|
+
source_id: str,
|
|
1279
|
+
target_id: str,
|
|
1280
|
+
edge_kind: EdgeKind,
|
|
1281
|
+
assertion_targets: list[str] | None = None,
|
|
1282
|
+
) -> MutationEntry:
|
|
1283
|
+
"""Add a new edge (reference).
|
|
1284
|
+
|
|
1285
|
+
Creates a relationship from source to target. If target doesn't exist,
|
|
1286
|
+
adds to _broken_references instead of creating an edge.
|
|
1287
|
+
|
|
1288
|
+
Args:
|
|
1289
|
+
source_id: The child/source node ID.
|
|
1290
|
+
target_id: The parent/target node ID.
|
|
1291
|
+
edge_kind: The type of relationship.
|
|
1292
|
+
assertion_targets: Optional assertion labels targeted.
|
|
1293
|
+
|
|
1294
|
+
Returns:
|
|
1295
|
+
MutationEntry recording the operation.
|
|
1296
|
+
|
|
1297
|
+
Raises:
|
|
1298
|
+
KeyError: If source_id is not found.
|
|
1299
|
+
"""
|
|
1300
|
+
if source_id not in self._index:
|
|
1301
|
+
raise KeyError(f"Source node '{source_id}' not found")
|
|
1302
|
+
|
|
1303
|
+
source = self._index[source_id]
|
|
1304
|
+
target = self._index.get(target_id)
|
|
1305
|
+
|
|
1306
|
+
# Check if source was orphan before
|
|
1307
|
+
was_orphan = source_id in self._orphaned_ids
|
|
1308
|
+
|
|
1309
|
+
entry = MutationEntry(
|
|
1310
|
+
operation="add_edge",
|
|
1311
|
+
target_id=source_id,
|
|
1312
|
+
before_state={
|
|
1313
|
+
"source_id": source_id,
|
|
1314
|
+
"target_id": target_id,
|
|
1315
|
+
"was_orphan": was_orphan,
|
|
1316
|
+
},
|
|
1317
|
+
after_state={
|
|
1318
|
+
"source_id": source_id,
|
|
1319
|
+
"target_id": target_id,
|
|
1320
|
+
"edge_kind": edge_kind.value,
|
|
1321
|
+
"assertion_targets": assertion_targets or [],
|
|
1322
|
+
},
|
|
1323
|
+
)
|
|
1324
|
+
|
|
1325
|
+
if target:
|
|
1326
|
+
# Create the edge
|
|
1327
|
+
target.link(source, edge_kind, assertion_targets)
|
|
1328
|
+
|
|
1329
|
+
# Source is no longer orphan (it now has a parent)
|
|
1330
|
+
self._orphaned_ids.discard(source_id)
|
|
1331
|
+
else:
|
|
1332
|
+
# Target doesn't exist - record as broken reference
|
|
1333
|
+
self._broken_references.append(
|
|
1334
|
+
BrokenReference(
|
|
1335
|
+
source_id=source_id,
|
|
1336
|
+
target_id=target_id,
|
|
1337
|
+
edge_kind=edge_kind.value,
|
|
1338
|
+
)
|
|
1339
|
+
)
|
|
1340
|
+
entry.after_state["broken"] = True
|
|
1341
|
+
|
|
1342
|
+
self._mutation_log.append(entry)
|
|
1343
|
+
return entry
|
|
1344
|
+
|
|
1345
|
+
def change_edge_kind(
|
|
1346
|
+
self,
|
|
1347
|
+
source_id: str,
|
|
1348
|
+
target_id: str,
|
|
1349
|
+
new_kind: EdgeKind,
|
|
1350
|
+
) -> MutationEntry:
|
|
1351
|
+
"""Change edge type (e.g., IMPLEMENTS -> REFINES).
|
|
1352
|
+
|
|
1353
|
+
Args:
|
|
1354
|
+
source_id: The child/source node ID.
|
|
1355
|
+
target_id: The parent/target node ID.
|
|
1356
|
+
new_kind: The new edge kind.
|
|
1357
|
+
|
|
1358
|
+
Returns:
|
|
1359
|
+
MutationEntry recording the operation.
|
|
1360
|
+
|
|
1361
|
+
Raises:
|
|
1362
|
+
KeyError: If source_id or target_id is not found.
|
|
1363
|
+
ValueError: If no edge exists between source and target.
|
|
1364
|
+
"""
|
|
1365
|
+
if source_id not in self._index:
|
|
1366
|
+
raise KeyError(f"Source node '{source_id}' not found")
|
|
1367
|
+
if target_id not in self._index:
|
|
1368
|
+
raise KeyError(f"Target node '{target_id}' not found")
|
|
1369
|
+
|
|
1370
|
+
source = self._index[source_id]
|
|
1371
|
+
# target_id already validated above, used in edge lookup below
|
|
1372
|
+
|
|
1373
|
+
# Find the edge from target to source (target is parent, source is child)
|
|
1374
|
+
edge_to_update = None
|
|
1375
|
+
for edge in source.iter_incoming_edges():
|
|
1376
|
+
if edge.source.id == target_id:
|
|
1377
|
+
edge_to_update = edge
|
|
1378
|
+
break
|
|
1379
|
+
|
|
1380
|
+
if edge_to_update is None:
|
|
1381
|
+
raise ValueError(f"No edge exists from '{target_id}' to '{source_id}'")
|
|
1382
|
+
|
|
1383
|
+
old_kind = edge_to_update.kind
|
|
1384
|
+
|
|
1385
|
+
entry = MutationEntry(
|
|
1386
|
+
operation="change_edge_kind",
|
|
1387
|
+
target_id=source_id,
|
|
1388
|
+
before_state={
|
|
1389
|
+
"source_id": source_id,
|
|
1390
|
+
"target_id": target_id,
|
|
1391
|
+
"edge_kind": old_kind.value,
|
|
1392
|
+
"assertion_targets": list(edge_to_update.assertion_targets),
|
|
1393
|
+
},
|
|
1394
|
+
after_state={
|
|
1395
|
+
"source_id": source_id,
|
|
1396
|
+
"target_id": target_id,
|
|
1397
|
+
"edge_kind": new_kind.value,
|
|
1398
|
+
"assertion_targets": list(edge_to_update.assertion_targets),
|
|
1399
|
+
},
|
|
1400
|
+
)
|
|
1401
|
+
|
|
1402
|
+
# Update the edge kind directly (dataclass field, not _kind)
|
|
1403
|
+
edge_to_update.kind = new_kind
|
|
1404
|
+
|
|
1405
|
+
self._mutation_log.append(entry)
|
|
1406
|
+
return entry
|
|
1407
|
+
|
|
1408
|
+
def delete_edge(self, source_id: str, target_id: str) -> MutationEntry:
|
|
1409
|
+
"""Remove an edge.
|
|
1410
|
+
|
|
1411
|
+
Removes the edge from target to source. If source has no other parents
|
|
1412
|
+
(except roots), it may become an orphan.
|
|
1413
|
+
|
|
1414
|
+
Args:
|
|
1415
|
+
source_id: The child/source node ID.
|
|
1416
|
+
target_id: The parent/target node ID.
|
|
1417
|
+
|
|
1418
|
+
Returns:
|
|
1419
|
+
MutationEntry recording the operation.
|
|
1420
|
+
|
|
1421
|
+
Raises:
|
|
1422
|
+
KeyError: If source_id or target_id is not found.
|
|
1423
|
+
ValueError: If no edge exists between source and target.
|
|
1424
|
+
"""
|
|
1425
|
+
if source_id not in self._index:
|
|
1426
|
+
raise KeyError(f"Source node '{source_id}' not found")
|
|
1427
|
+
if target_id not in self._index:
|
|
1428
|
+
raise KeyError(f"Target node '{target_id}' not found")
|
|
1429
|
+
|
|
1430
|
+
source = self._index[source_id]
|
|
1431
|
+
target = self._index[target_id]
|
|
1432
|
+
|
|
1433
|
+
# Find the edge from target to source
|
|
1434
|
+
edge_to_delete = None
|
|
1435
|
+
for edge in source.iter_incoming_edges():
|
|
1436
|
+
if edge.source.id == target_id:
|
|
1437
|
+
edge_to_delete = edge
|
|
1438
|
+
break
|
|
1439
|
+
|
|
1440
|
+
if edge_to_delete is None:
|
|
1441
|
+
raise ValueError(f"No edge exists from '{target_id}' to '{source_id}'")
|
|
1442
|
+
|
|
1443
|
+
entry = MutationEntry(
|
|
1444
|
+
operation="delete_edge",
|
|
1445
|
+
target_id=source_id,
|
|
1446
|
+
before_state={
|
|
1447
|
+
"source_id": source_id,
|
|
1448
|
+
"target_id": target_id,
|
|
1449
|
+
"edge_kind": edge_to_delete.kind.value,
|
|
1450
|
+
"assertion_targets": list(edge_to_delete.assertion_targets),
|
|
1451
|
+
},
|
|
1452
|
+
after_state={
|
|
1453
|
+
"source_id": source_id,
|
|
1454
|
+
"target_id": target_id,
|
|
1455
|
+
},
|
|
1456
|
+
)
|
|
1457
|
+
|
|
1458
|
+
# Remove the edge (parent removes child)
|
|
1459
|
+
target.remove_child(source)
|
|
1460
|
+
|
|
1461
|
+
# Check if source is now orphaned (no parents, not a root)
|
|
1462
|
+
if source.parent_count() == 0 and not self.has_root(source_id):
|
|
1463
|
+
# Only requirements can be orphaned
|
|
1464
|
+
if source.kind == NodeKind.REQUIREMENT:
|
|
1465
|
+
self._orphaned_ids.add(source_id)
|
|
1466
|
+
entry.after_state["became_orphan"] = True
|
|
1467
|
+
|
|
1468
|
+
self._mutation_log.append(entry)
|
|
1469
|
+
return entry
|
|
1470
|
+
|
|
1471
|
+
def fix_broken_reference(
|
|
1472
|
+
self,
|
|
1473
|
+
source_id: str,
|
|
1474
|
+
old_target_id: str,
|
|
1475
|
+
new_target_id: str,
|
|
1476
|
+
) -> MutationEntry:
|
|
1477
|
+
"""Fix a broken reference by changing its target.
|
|
1478
|
+
|
|
1479
|
+
Finds a broken reference from source to old_target and attempts to
|
|
1480
|
+
redirect it to new_target. If new_target also doesn't exist, the
|
|
1481
|
+
reference remains broken (but with updated target).
|
|
1482
|
+
|
|
1483
|
+
Args:
|
|
1484
|
+
source_id: The source node ID with the broken reference.
|
|
1485
|
+
old_target_id: The current (broken) target ID.
|
|
1486
|
+
new_target_id: The new target ID to point to.
|
|
1487
|
+
|
|
1488
|
+
Returns:
|
|
1489
|
+
MutationEntry recording the operation.
|
|
1490
|
+
|
|
1491
|
+
Raises:
|
|
1492
|
+
KeyError: If source_id is not found.
|
|
1493
|
+
ValueError: If no broken reference exists from source to old_target.
|
|
1494
|
+
"""
|
|
1495
|
+
if source_id not in self._index:
|
|
1496
|
+
raise KeyError(f"Source node '{source_id}' not found")
|
|
1497
|
+
|
|
1498
|
+
# Find the broken reference
|
|
1499
|
+
broken_ref = None
|
|
1500
|
+
broken_ref_index = None
|
|
1501
|
+
for i, br in enumerate(self._broken_references):
|
|
1502
|
+
if br.source_id == source_id and br.target_id == old_target_id:
|
|
1503
|
+
broken_ref = br
|
|
1504
|
+
broken_ref_index = i
|
|
1505
|
+
break
|
|
1506
|
+
|
|
1507
|
+
if broken_ref is None:
|
|
1508
|
+
raise ValueError(f"No broken reference from '{source_id}' to '{old_target_id}'")
|
|
1509
|
+
|
|
1510
|
+
source = self._index[source_id]
|
|
1511
|
+
new_target = self._index.get(new_target_id)
|
|
1512
|
+
edge_kind = EdgeKind(broken_ref.edge_kind)
|
|
1513
|
+
|
|
1514
|
+
# Check if source was orphan before
|
|
1515
|
+
was_orphan = source_id in self._orphaned_ids
|
|
1516
|
+
|
|
1517
|
+
entry = MutationEntry(
|
|
1518
|
+
operation="fix_broken_reference",
|
|
1519
|
+
target_id=source_id,
|
|
1520
|
+
before_state={
|
|
1521
|
+
"source_id": source_id,
|
|
1522
|
+
"old_target_id": old_target_id,
|
|
1523
|
+
"edge_kind": broken_ref.edge_kind,
|
|
1524
|
+
"was_orphan": was_orphan,
|
|
1525
|
+
},
|
|
1526
|
+
after_state={
|
|
1527
|
+
"source_id": source_id,
|
|
1528
|
+
"new_target_id": new_target_id,
|
|
1529
|
+
"edge_kind": broken_ref.edge_kind,
|
|
1530
|
+
},
|
|
1531
|
+
)
|
|
1532
|
+
|
|
1533
|
+
# Remove the old broken reference
|
|
1534
|
+
self._broken_references.pop(broken_ref_index)
|
|
1535
|
+
|
|
1536
|
+
if new_target:
|
|
1537
|
+
# Create valid edge
|
|
1538
|
+
new_target.link(source, edge_kind)
|
|
1539
|
+
|
|
1540
|
+
# Source is no longer orphan
|
|
1541
|
+
self._orphaned_ids.discard(source_id)
|
|
1542
|
+
entry.after_state["fixed"] = True
|
|
1543
|
+
else:
|
|
1544
|
+
# New target also doesn't exist - remains broken
|
|
1545
|
+
self._broken_references.append(
|
|
1546
|
+
BrokenReference(
|
|
1547
|
+
source_id=source_id,
|
|
1548
|
+
target_id=new_target_id,
|
|
1549
|
+
edge_kind=broken_ref.edge_kind,
|
|
1550
|
+
)
|
|
1551
|
+
)
|
|
1552
|
+
entry.after_state["still_broken"] = True
|
|
1553
|
+
|
|
1554
|
+
self._mutation_log.append(entry)
|
|
1555
|
+
return entry
|
|
1556
|
+
|
|
1557
|
+
|
|
1558
|
+
class GraphBuilder:
|
|
1559
|
+
"""Builder for constructing TraceGraph from parsed content.
|
|
1560
|
+
|
|
1561
|
+
Usage:
|
|
1562
|
+
builder = GraphBuilder()
|
|
1563
|
+
for content in parsed_contents:
|
|
1564
|
+
builder.add_parsed_content(content)
|
|
1565
|
+
graph = builder.build()
|
|
1566
|
+
|
|
1567
|
+
Note on Privileged Access:
|
|
1568
|
+
GraphBuilder directly accesses GraphNode._content during construction.
|
|
1569
|
+
This is intentional - as the construction layer, GraphBuilder has
|
|
1570
|
+
"friend class" privileges to efficiently build node content without
|
|
1571
|
+
the overhead of set_field() calls. This pattern is acceptable because:
|
|
1572
|
+
1. GraphBuilder is the ONLY external class with this access
|
|
1573
|
+
2. Access occurs only during initial construction
|
|
1574
|
+
3. Post-construction, all access should use get_field()/set_field()
|
|
1575
|
+
"""
|
|
1576
|
+
|
|
1577
|
+
def __init__(self, repo_root: Path | None = None) -> None:
|
|
1578
|
+
"""Initialize the graph builder.
|
|
1579
|
+
|
|
1580
|
+
Args:
|
|
1581
|
+
repo_root: Repository root path.
|
|
1582
|
+
"""
|
|
1583
|
+
self.repo_root = repo_root or Path.cwd()
|
|
1584
|
+
self._nodes: dict[str, GraphNode] = {}
|
|
1585
|
+
self._pending_links: list[tuple[str, str, EdgeKind]] = []
|
|
1586
|
+
# Detection: track orphan candidates and broken references
|
|
1587
|
+
self._orphan_candidates: set[str] = set()
|
|
1588
|
+
self._broken_references: list[BrokenReference] = []
|
|
1589
|
+
|
|
1590
|
+
def add_parsed_content(self, content: ParsedContent) -> None:
|
|
1591
|
+
"""Add parsed content to the graph.
|
|
1592
|
+
|
|
1593
|
+
Args:
|
|
1594
|
+
content: Parsed content from a parser.
|
|
1595
|
+
"""
|
|
1596
|
+
if content.content_type == "requirement":
|
|
1597
|
+
self._add_requirement(content)
|
|
1598
|
+
elif content.content_type == "journey":
|
|
1599
|
+
self._add_journey(content)
|
|
1600
|
+
elif content.content_type == "code_ref":
|
|
1601
|
+
self._add_code_ref(content)
|
|
1602
|
+
elif content.content_type == "test_ref":
|
|
1603
|
+
self._add_test_ref(content)
|
|
1604
|
+
elif content.content_type == "test_result":
|
|
1605
|
+
self._add_test_result(content)
|
|
1606
|
+
elif content.content_type == "remainder":
|
|
1607
|
+
self._add_remainder(content)
|
|
1608
|
+
|
|
1609
|
+
def _add_requirement(self, content: ParsedContent) -> None:
|
|
1610
|
+
"""Add a requirement node and its assertions."""
|
|
1611
|
+
data = content.parsed_data
|
|
1612
|
+
req_id = data["id"]
|
|
1613
|
+
|
|
1614
|
+
# Get source path from context if available
|
|
1615
|
+
source_ctx = getattr(content, "source_context", None)
|
|
1616
|
+
source_path = source_ctx.source_id if source_ctx else ""
|
|
1617
|
+
|
|
1618
|
+
# Create requirement node
|
|
1619
|
+
source = SourceLocation(
|
|
1620
|
+
path=source_path,
|
|
1621
|
+
line=content.start_line,
|
|
1622
|
+
end_line=content.end_line,
|
|
1623
|
+
)
|
|
1624
|
+
|
|
1625
|
+
node = GraphNode(
|
|
1626
|
+
id=req_id,
|
|
1627
|
+
kind=NodeKind.REQUIREMENT,
|
|
1628
|
+
label=data.get("title", ""),
|
|
1629
|
+
source=source,
|
|
1630
|
+
)
|
|
1631
|
+
node._content = {
|
|
1632
|
+
"level": data.get("level"),
|
|
1633
|
+
"status": data.get("status"),
|
|
1634
|
+
"hash": data.get("hash"),
|
|
1635
|
+
"body_text": data.get("body_text", ""), # For hash computation
|
|
1636
|
+
}
|
|
1637
|
+
self._nodes[req_id] = node
|
|
1638
|
+
self._orphan_candidates.add(req_id) # Track as potential orphan
|
|
1639
|
+
|
|
1640
|
+
# Create assertion nodes
|
|
1641
|
+
for assertion in data.get("assertions", []):
|
|
1642
|
+
assertion_id = f"{req_id}-{assertion['label']}"
|
|
1643
|
+
assertion_node = GraphNode(
|
|
1644
|
+
id=assertion_id,
|
|
1645
|
+
kind=NodeKind.ASSERTION,
|
|
1646
|
+
label=assertion["text"],
|
|
1647
|
+
)
|
|
1648
|
+
assertion_node._content = {"label": assertion["label"]}
|
|
1649
|
+
self._nodes[assertion_id] = assertion_node
|
|
1650
|
+
|
|
1651
|
+
# Link assertion to parent requirement
|
|
1652
|
+
node.add_child(assertion_node)
|
|
1653
|
+
|
|
1654
|
+
# Queue implements/refines links for later resolution
|
|
1655
|
+
for impl_ref in data.get("implements", []):
|
|
1656
|
+
self._pending_links.append((req_id, impl_ref, EdgeKind.IMPLEMENTS))
|
|
1657
|
+
|
|
1658
|
+
for refine_ref in data.get("refines", []):
|
|
1659
|
+
self._pending_links.append((req_id, refine_ref, EdgeKind.REFINES))
|
|
1660
|
+
|
|
1661
|
+
def _add_journey(self, content: ParsedContent) -> None:
|
|
1662
|
+
"""Add a user journey node."""
|
|
1663
|
+
data = content.parsed_data
|
|
1664
|
+
journey_id = data["id"]
|
|
1665
|
+
|
|
1666
|
+
node = GraphNode(
|
|
1667
|
+
id=journey_id,
|
|
1668
|
+
kind=NodeKind.USER_JOURNEY,
|
|
1669
|
+
label=data.get("title", ""),
|
|
1670
|
+
)
|
|
1671
|
+
node._content = {
|
|
1672
|
+
"actor": data.get("actor"),
|
|
1673
|
+
"goal": data.get("goal"),
|
|
1674
|
+
}
|
|
1675
|
+
self._nodes[journey_id] = node
|
|
1676
|
+
|
|
1677
|
+
def _add_code_ref(self, content: ParsedContent) -> None:
|
|
1678
|
+
"""Add code reference nodes."""
|
|
1679
|
+
data = content.parsed_data
|
|
1680
|
+
source_ctx = getattr(content, "source_context", None)
|
|
1681
|
+
source_id = source_ctx.source_id if source_ctx else "code"
|
|
1682
|
+
|
|
1683
|
+
for impl_ref in data.get("implements", []):
|
|
1684
|
+
code_id = f"code:{source_id}:{content.start_line}"
|
|
1685
|
+
if code_id not in self._nodes:
|
|
1686
|
+
node = GraphNode(
|
|
1687
|
+
id=code_id,
|
|
1688
|
+
kind=NodeKind.CODE,
|
|
1689
|
+
label=f"Code at {source_id}:{content.start_line}",
|
|
1690
|
+
source=SourceLocation(
|
|
1691
|
+
path=source_id,
|
|
1692
|
+
line=content.start_line,
|
|
1693
|
+
end_line=content.end_line,
|
|
1694
|
+
),
|
|
1695
|
+
)
|
|
1696
|
+
self._nodes[code_id] = node
|
|
1697
|
+
|
|
1698
|
+
self._pending_links.append((code_id, impl_ref, EdgeKind.IMPLEMENTS))
|
|
1699
|
+
|
|
1700
|
+
def _add_test_ref(self, content: ParsedContent) -> None:
|
|
1701
|
+
"""Add test reference nodes."""
|
|
1702
|
+
data = content.parsed_data
|
|
1703
|
+
source_ctx = getattr(content, "source_context", None)
|
|
1704
|
+
source_id = source_ctx.source_id if source_ctx else "test"
|
|
1705
|
+
|
|
1706
|
+
for val_ref in data.get("validates", []):
|
|
1707
|
+
test_id = f"test:{source_id}:{content.start_line}"
|
|
1708
|
+
if test_id not in self._nodes:
|
|
1709
|
+
node = GraphNode(
|
|
1710
|
+
id=test_id,
|
|
1711
|
+
kind=NodeKind.TEST,
|
|
1712
|
+
label=f"Test at {source_id}:{content.start_line}",
|
|
1713
|
+
source=SourceLocation(
|
|
1714
|
+
path=source_id,
|
|
1715
|
+
line=content.start_line,
|
|
1716
|
+
end_line=content.end_line,
|
|
1717
|
+
),
|
|
1718
|
+
)
|
|
1719
|
+
self._nodes[test_id] = node
|
|
1720
|
+
|
|
1721
|
+
self._pending_links.append((test_id, val_ref, EdgeKind.VALIDATES))
|
|
1722
|
+
|
|
1723
|
+
def _add_test_result(self, content: ParsedContent) -> None:
|
|
1724
|
+
"""Add a test result node.
|
|
1725
|
+
|
|
1726
|
+
If the result has a test_id, auto-creates a TEST node if needed.
|
|
1727
|
+
If validates list is present (extracted from test name), creates
|
|
1728
|
+
VALIDATES edges from TEST to requirements/assertions.
|
|
1729
|
+
"""
|
|
1730
|
+
data = content.parsed_data
|
|
1731
|
+
result_id = data["id"]
|
|
1732
|
+
test_id = data.get("test_id") # e.g., "test:classname::test_name"
|
|
1733
|
+
validates = data.get("validates", []) # REQs extracted from test name
|
|
1734
|
+
source_ctx = getattr(content, "source_context", None)
|
|
1735
|
+
source_path = source_ctx.source_id if source_ctx else ""
|
|
1736
|
+
|
|
1737
|
+
# Create a readable label from test name and class
|
|
1738
|
+
test_name = data.get("name", "")
|
|
1739
|
+
classname = data.get("classname", "")
|
|
1740
|
+
# Extract just the class name from dotted path
|
|
1741
|
+
# e.g., "TestGraphBuilder" from "tests.core.test_builder.TestGraphBuilder"
|
|
1742
|
+
short_class = classname.split(".")[-1] if classname else ""
|
|
1743
|
+
label = f"{short_class}::{test_name}" if short_class else test_name
|
|
1744
|
+
|
|
1745
|
+
# Auto-create TEST node if test_id provided and doesn't exist yet
|
|
1746
|
+
if test_id and test_id not in self._nodes:
|
|
1747
|
+
test_node = GraphNode(
|
|
1748
|
+
id=test_id,
|
|
1749
|
+
kind=NodeKind.TEST,
|
|
1750
|
+
label=label,
|
|
1751
|
+
source=SourceLocation(
|
|
1752
|
+
path=source_path,
|
|
1753
|
+
line=content.start_line,
|
|
1754
|
+
end_line=content.end_line,
|
|
1755
|
+
),
|
|
1756
|
+
)
|
|
1757
|
+
test_node._content = {
|
|
1758
|
+
"classname": classname,
|
|
1759
|
+
"name": test_name,
|
|
1760
|
+
"from_results": True, # Indicates this TEST was auto-created
|
|
1761
|
+
}
|
|
1762
|
+
self._nodes[test_id] = test_node
|
|
1763
|
+
|
|
1764
|
+
# Queue VALIDATES edges from TEST → REQ/Assertion based on validates list
|
|
1765
|
+
for req_id in validates:
|
|
1766
|
+
self._pending_links.append((test_id, req_id, EdgeKind.VALIDATES))
|
|
1767
|
+
|
|
1768
|
+
node = GraphNode(
|
|
1769
|
+
id=result_id,
|
|
1770
|
+
kind=NodeKind.TEST_RESULT,
|
|
1771
|
+
label=label,
|
|
1772
|
+
source=SourceLocation(
|
|
1773
|
+
path=source_path,
|
|
1774
|
+
line=content.start_line,
|
|
1775
|
+
end_line=content.end_line,
|
|
1776
|
+
),
|
|
1777
|
+
)
|
|
1778
|
+
node._content = {
|
|
1779
|
+
"status": data.get("status"),
|
|
1780
|
+
"test_id": test_id,
|
|
1781
|
+
"duration": data.get("duration"),
|
|
1782
|
+
"name": test_name,
|
|
1783
|
+
"classname": classname,
|
|
1784
|
+
"message": data.get("message"),
|
|
1785
|
+
}
|
|
1786
|
+
self._nodes[result_id] = node
|
|
1787
|
+
|
|
1788
|
+
# Queue edge to parent TEST node if test_id is provided
|
|
1789
|
+
if test_id:
|
|
1790
|
+
self._pending_links.append((result_id, test_id, EdgeKind.CONTAINS))
|
|
1791
|
+
|
|
1792
|
+
def _add_remainder(self, content: ParsedContent) -> None:
|
|
1793
|
+
"""Add a remainder/unclaimed content node."""
|
|
1794
|
+
data = content.parsed_data
|
|
1795
|
+
source_ctx = getattr(content, "source_context", None)
|
|
1796
|
+
source_path = source_ctx.source_id if source_ctx else ""
|
|
1797
|
+
|
|
1798
|
+
# Use provided ID or generate from source location
|
|
1799
|
+
remainder_id = data.get("id") or f"rem:{source_path}:{content.start_line}"
|
|
1800
|
+
text = data.get("text", content.raw_text or "")
|
|
1801
|
+
|
|
1802
|
+
node = GraphNode(
|
|
1803
|
+
id=remainder_id,
|
|
1804
|
+
kind=NodeKind.REMAINDER,
|
|
1805
|
+
label=text[:50] + "..." if len(text) > 50 else text,
|
|
1806
|
+
source=SourceLocation(
|
|
1807
|
+
path=source_path,
|
|
1808
|
+
line=content.start_line,
|
|
1809
|
+
end_line=content.end_line,
|
|
1810
|
+
),
|
|
1811
|
+
)
|
|
1812
|
+
node._content = {"text": text}
|
|
1813
|
+
self._nodes[remainder_id] = node
|
|
1814
|
+
|
|
1815
|
+
def build(self) -> TraceGraph:
|
|
1816
|
+
"""Build the final TraceGraph.
|
|
1817
|
+
|
|
1818
|
+
Resolves all pending links and identifies root nodes.
|
|
1819
|
+
Also detects orphaned nodes and broken references.
|
|
1820
|
+
|
|
1821
|
+
Returns:
|
|
1822
|
+
Complete TraceGraph with detection data populated.
|
|
1823
|
+
"""
|
|
1824
|
+
# Resolve pending links
|
|
1825
|
+
for source_id, target_id, edge_kind in self._pending_links:
|
|
1826
|
+
source = self._nodes.get(source_id)
|
|
1827
|
+
target = self._nodes.get(target_id)
|
|
1828
|
+
|
|
1829
|
+
if source and target:
|
|
1830
|
+
# Node is being linked to a parent - no longer orphan candidate
|
|
1831
|
+
self._orphan_candidates.discard(source_id)
|
|
1832
|
+
|
|
1833
|
+
# If target is an assertion, link from its parent requirement
|
|
1834
|
+
# with assertion_targets set, so the child appears under the
|
|
1835
|
+
# parent REQ (not the assertion node) with assertion badges
|
|
1836
|
+
if target.kind == NodeKind.ASSERTION:
|
|
1837
|
+
# Find the parent requirement of this assertion
|
|
1838
|
+
parent_reqs = [
|
|
1839
|
+
p for p in target.iter_parents() if p.kind == NodeKind.REQUIREMENT
|
|
1840
|
+
]
|
|
1841
|
+
if parent_reqs:
|
|
1842
|
+
parent_req = parent_reqs[0]
|
|
1843
|
+
assertion_label = target.get_field("label", "")
|
|
1844
|
+
parent_req.link(
|
|
1845
|
+
source,
|
|
1846
|
+
edge_kind,
|
|
1847
|
+
assertion_targets=[assertion_label] if assertion_label else None,
|
|
1848
|
+
)
|
|
1849
|
+
else:
|
|
1850
|
+
# Fallback: link directly if no parent found
|
|
1851
|
+
target.link(source, edge_kind)
|
|
1852
|
+
else:
|
|
1853
|
+
# Link target as parent of source (implements relationship)
|
|
1854
|
+
target.link(source, edge_kind)
|
|
1855
|
+
elif source and not target:
|
|
1856
|
+
# Broken reference: target doesn't exist
|
|
1857
|
+
self._broken_references.append(
|
|
1858
|
+
BrokenReference(
|
|
1859
|
+
source_id=source_id,
|
|
1860
|
+
target_id=target_id,
|
|
1861
|
+
edge_kind=edge_kind.value,
|
|
1862
|
+
)
|
|
1863
|
+
)
|
|
1864
|
+
|
|
1865
|
+
# Identify roots (nodes with no parents)
|
|
1866
|
+
roots = [
|
|
1867
|
+
node
|
|
1868
|
+
for node in self._nodes.values()
|
|
1869
|
+
if node.is_root and node.kind == NodeKind.REQUIREMENT
|
|
1870
|
+
]
|
|
1871
|
+
|
|
1872
|
+
# Also include journeys as roots
|
|
1873
|
+
roots.extend(node for node in self._nodes.values() if node.kind == NodeKind.USER_JOURNEY)
|
|
1874
|
+
|
|
1875
|
+
# Root nodes are not orphans - they're intentionally parentless
|
|
1876
|
+
root_ids = {r.id for r in roots}
|
|
1877
|
+
|
|
1878
|
+
# Final orphan set: candidates that aren't roots
|
|
1879
|
+
orphaned_ids = self._orphan_candidates - root_ids
|
|
1880
|
+
|
|
1881
|
+
graph = TraceGraph(repo_root=self.repo_root)
|
|
1882
|
+
graph._roots = roots
|
|
1883
|
+
graph._index = dict(self._nodes)
|
|
1884
|
+
graph._orphaned_ids = orphaned_ids
|
|
1885
|
+
graph._broken_references = list(self._broken_references)
|
|
1886
|
+
return graph
|