elspais 0.9.3__py3-none-any.whl → 0.11.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- elspais/cli.py +141 -10
- elspais/commands/hash_cmd.py +72 -26
- elspais/commands/reformat_cmd.py +458 -0
- elspais/commands/trace.py +157 -3
- elspais/commands/validate.py +44 -16
- elspais/core/models.py +2 -0
- elspais/core/parser.py +68 -24
- elspais/reformat/__init__.py +50 -0
- elspais/reformat/detector.py +119 -0
- elspais/reformat/hierarchy.py +246 -0
- elspais/reformat/line_breaks.py +220 -0
- elspais/reformat/prompts.py +123 -0
- elspais/reformat/transformer.py +264 -0
- elspais/sponsors/__init__.py +432 -0
- elspais/trace_view/__init__.py +54 -0
- elspais/trace_view/coverage.py +183 -0
- elspais/trace_view/generators/__init__.py +12 -0
- elspais/trace_view/generators/base.py +329 -0
- elspais/trace_view/generators/csv.py +122 -0
- elspais/trace_view/generators/markdown.py +175 -0
- elspais/trace_view/html/__init__.py +31 -0
- elspais/trace_view/html/generator.py +1006 -0
- elspais/trace_view/html/templates/base.html +283 -0
- elspais/trace_view/html/templates/components/code_viewer_modal.html +14 -0
- elspais/trace_view/html/templates/components/file_picker_modal.html +20 -0
- elspais/trace_view/html/templates/components/legend_modal.html +69 -0
- elspais/trace_view/html/templates/components/review_panel.html +118 -0
- elspais/trace_view/html/templates/partials/review/help/help-panel.json +244 -0
- elspais/trace_view/html/templates/partials/review/help/onboarding.json +77 -0
- elspais/trace_view/html/templates/partials/review/help/tooltips.json +237 -0
- elspais/trace_view/html/templates/partials/review/review-comments.js +928 -0
- elspais/trace_view/html/templates/partials/review/review-data.js +961 -0
- elspais/trace_view/html/templates/partials/review/review-help.js +679 -0
- elspais/trace_view/html/templates/partials/review/review-init.js +177 -0
- elspais/trace_view/html/templates/partials/review/review-line-numbers.js +429 -0
- elspais/trace_view/html/templates/partials/review/review-packages.js +1029 -0
- elspais/trace_view/html/templates/partials/review/review-position.js +540 -0
- elspais/trace_view/html/templates/partials/review/review-resize.js +115 -0
- elspais/trace_view/html/templates/partials/review/review-status.js +659 -0
- elspais/trace_view/html/templates/partials/review/review-sync.js +992 -0
- elspais/trace_view/html/templates/partials/review-styles.css +2238 -0
- elspais/trace_view/html/templates/partials/scripts.js +1741 -0
- elspais/trace_view/html/templates/partials/styles.css +1756 -0
- elspais/trace_view/models.py +353 -0
- elspais/trace_view/review/__init__.py +60 -0
- elspais/trace_view/review/branches.py +1149 -0
- elspais/trace_view/review/models.py +1205 -0
- elspais/trace_view/review/position.py +609 -0
- elspais/trace_view/review/server.py +1056 -0
- elspais/trace_view/review/status.py +470 -0
- elspais/trace_view/review/storage.py +1367 -0
- elspais/trace_view/scanning.py +213 -0
- elspais/trace_view/specs/README.md +84 -0
- elspais/trace_view/specs/tv-d00001-template-architecture.md +36 -0
- elspais/trace_view/specs/tv-d00002-css-extraction.md +37 -0
- elspais/trace_view/specs/tv-d00003-js-extraction.md +43 -0
- elspais/trace_view/specs/tv-d00004-build-embedding.md +40 -0
- elspais/trace_view/specs/tv-d00005-test-format.md +78 -0
- elspais/trace_view/specs/tv-d00010-review-data-models.md +33 -0
- elspais/trace_view/specs/tv-d00011-review-storage.md +33 -0
- elspais/trace_view/specs/tv-d00012-position-resolution.md +33 -0
- elspais/trace_view/specs/tv-d00013-git-branches.md +31 -0
- elspais/trace_view/specs/tv-d00014-review-api-server.md +31 -0
- elspais/trace_view/specs/tv-d00015-status-modifier.md +27 -0
- elspais/trace_view/specs/tv-d00016-js-integration.md +33 -0
- elspais/trace_view/specs/tv-p00001-html-generator.md +33 -0
- elspais/trace_view/specs/tv-p00002-review-system.md +29 -0
- {elspais-0.9.3.dist-info → elspais-0.11.1.dist-info}/METADATA +36 -18
- elspais-0.11.1.dist-info/RECORD +101 -0
- elspais-0.9.3.dist-info/RECORD +0 -40
- {elspais-0.9.3.dist-info → elspais-0.11.1.dist-info}/WHEEL +0 -0
- {elspais-0.9.3.dist-info → elspais-0.11.1.dist-info}/entry_points.txt +0 -0
- {elspais-0.9.3.dist-info → elspais-0.11.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,1367 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Review Storage Operations Module for trace_view
|
|
4
|
+
|
|
5
|
+
CRUD operations for the review system:
|
|
6
|
+
- Config operations (load/save)
|
|
7
|
+
- Review flag operations (load/save)
|
|
8
|
+
- Thread operations (load/save/add/resolve/unresolve)
|
|
9
|
+
- Status request operations (load/save/create/approve/apply)
|
|
10
|
+
- Package operations (load/save/create/update/delete)
|
|
11
|
+
- Merge operations for combining multiple user branches
|
|
12
|
+
|
|
13
|
+
IMPLEMENTS REQUIREMENTS:
|
|
14
|
+
REQ-tv-d00011: Review Storage Operations
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import json
|
|
18
|
+
import os
|
|
19
|
+
import re
|
|
20
|
+
import tempfile
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from typing import Any, Dict, List, Optional
|
|
23
|
+
|
|
24
|
+
from .models import (
|
|
25
|
+
ReviewConfig,
|
|
26
|
+
ReviewFlag,
|
|
27
|
+
Thread,
|
|
28
|
+
Comment,
|
|
29
|
+
ThreadsFile,
|
|
30
|
+
StatusFile,
|
|
31
|
+
StatusRequest,
|
|
32
|
+
Approval,
|
|
33
|
+
ReviewPackage,
|
|
34
|
+
PackagesFile,
|
|
35
|
+
parse_iso_datetime,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# =============================================================================
|
|
40
|
+
# Helper Functions
|
|
41
|
+
# REQ-tv-d00011-A: Atomic write operations
|
|
42
|
+
# =============================================================================
|
|
43
|
+
|
|
44
|
+
def atomic_write_json(path: Path, data: Dict[str, Any]) -> None:
|
|
45
|
+
"""
|
|
46
|
+
Atomically write JSON data to a file.
|
|
47
|
+
|
|
48
|
+
REQ-tv-d00011-A: Uses temp file + rename pattern to ensure file is either
|
|
49
|
+
fully written or not changed at all.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
path: Target file path
|
|
53
|
+
data: JSON-serializable dictionary
|
|
54
|
+
"""
|
|
55
|
+
# Ensure parent directories exist
|
|
56
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
57
|
+
|
|
58
|
+
# Write to temp file in same directory (for atomic rename)
|
|
59
|
+
fd, temp_path = tempfile.mkstemp(
|
|
60
|
+
suffix='.json',
|
|
61
|
+
prefix='.tmp_',
|
|
62
|
+
dir=path.parent
|
|
63
|
+
)
|
|
64
|
+
try:
|
|
65
|
+
with os.fdopen(fd, 'w') as f:
|
|
66
|
+
json.dump(data, f, indent=2)
|
|
67
|
+
# Atomic rename
|
|
68
|
+
os.rename(temp_path, path)
|
|
69
|
+
except Exception:
|
|
70
|
+
# Clean up temp file on failure
|
|
71
|
+
if os.path.exists(temp_path):
|
|
72
|
+
os.unlink(temp_path)
|
|
73
|
+
raise
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def read_json(path: Path) -> Dict[str, Any]:
|
|
77
|
+
"""
|
|
78
|
+
Read JSON file and return dictionary.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
path: Path to JSON file
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Parsed JSON as dictionary
|
|
85
|
+
|
|
86
|
+
Raises:
|
|
87
|
+
FileNotFoundError: If file doesn't exist
|
|
88
|
+
json.JSONDecodeError: If file contains invalid JSON
|
|
89
|
+
"""
|
|
90
|
+
with open(path, 'r') as f:
|
|
91
|
+
return json.load(f)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
# =============================================================================
|
|
95
|
+
# Path Functions
|
|
96
|
+
# REQ-tv-d00011-H: Storage paths convention
|
|
97
|
+
# REQ-tv-d00011-I: Requirement ID normalization
|
|
98
|
+
# =============================================================================
|
|
99
|
+
|
|
100
|
+
def normalize_req_id(req_id: str) -> str:
|
|
101
|
+
"""
|
|
102
|
+
Normalize requirement ID for use in file paths.
|
|
103
|
+
|
|
104
|
+
REQ-tv-d00011-I: Replace colons and slashes with underscores.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
req_id: Original requirement ID
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Normalized requirement ID safe for file paths
|
|
111
|
+
"""
|
|
112
|
+
return re.sub(r'[:/]', '_', req_id)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def get_reviews_root(repo_root: Path) -> Path:
|
|
116
|
+
"""
|
|
117
|
+
Get the root directory for review storage.
|
|
118
|
+
|
|
119
|
+
REQ-tv-d00011-H: Returns .reviews directory.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
repo_root: Repository root path
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
Path to .reviews directory
|
|
126
|
+
"""
|
|
127
|
+
return repo_root / '.reviews'
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def get_req_dir(repo_root: Path, req_id: str) -> Path:
|
|
131
|
+
"""
|
|
132
|
+
Get the directory for a specific requirement's review data.
|
|
133
|
+
|
|
134
|
+
REQ-tv-d00011-H: Returns .reviews/reqs/{normalized-req-id}/
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
repo_root: Repository root path
|
|
138
|
+
req_id: Requirement ID
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
Path to requirement's review directory
|
|
142
|
+
"""
|
|
143
|
+
normalized = normalize_req_id(req_id)
|
|
144
|
+
return get_reviews_root(repo_root) / 'reqs' / normalized
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def get_threads_path(repo_root: Path, req_id: str) -> Path:
|
|
148
|
+
"""
|
|
149
|
+
Get path to threads.json file for a requirement.
|
|
150
|
+
|
|
151
|
+
REQ-tv-d00011-H: Returns .reviews/reqs/{normalized-req-id}/threads.json
|
|
152
|
+
|
|
153
|
+
Args:
|
|
154
|
+
repo_root: Repository root path
|
|
155
|
+
req_id: Requirement ID
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
Path to threads.json
|
|
159
|
+
"""
|
|
160
|
+
return get_req_dir(repo_root, req_id) / 'threads.json'
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def get_status_path(repo_root: Path, req_id: str) -> Path:
|
|
164
|
+
"""
|
|
165
|
+
Get path to status.json file for a requirement.
|
|
166
|
+
|
|
167
|
+
REQ-tv-d00011-H: Returns .reviews/reqs/{normalized-req-id}/status.json
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
repo_root: Repository root path
|
|
171
|
+
req_id: Requirement ID
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Path to status.json
|
|
175
|
+
"""
|
|
176
|
+
return get_req_dir(repo_root, req_id) / 'status.json'
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def get_review_flag_path(repo_root: Path, req_id: str) -> Path:
|
|
180
|
+
"""
|
|
181
|
+
Get path to flag.json file for a requirement.
|
|
182
|
+
|
|
183
|
+
REQ-tv-d00011-H: Returns .reviews/reqs/{normalized-req-id}/flag.json
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
repo_root: Repository root path
|
|
187
|
+
req_id: Requirement ID
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
Path to flag.json
|
|
191
|
+
"""
|
|
192
|
+
return get_req_dir(repo_root, req_id) / 'flag.json'
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def get_config_path(repo_root: Path) -> Path:
|
|
196
|
+
"""
|
|
197
|
+
Get path to config.json file.
|
|
198
|
+
|
|
199
|
+
REQ-tv-d00011-H: Returns .reviews/config.json
|
|
200
|
+
|
|
201
|
+
Args:
|
|
202
|
+
repo_root: Repository root path
|
|
203
|
+
|
|
204
|
+
Returns:
|
|
205
|
+
Path to config.json
|
|
206
|
+
"""
|
|
207
|
+
return get_reviews_root(repo_root) / 'config.json'
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def get_packages_path(repo_root: Path) -> Path:
|
|
211
|
+
"""
|
|
212
|
+
Get path to packages.json file (v1 format).
|
|
213
|
+
|
|
214
|
+
REQ-tv-d00011-H: Returns .reviews/packages.json
|
|
215
|
+
|
|
216
|
+
Args:
|
|
217
|
+
repo_root: Repository root path
|
|
218
|
+
|
|
219
|
+
Returns:
|
|
220
|
+
Path to packages.json
|
|
221
|
+
"""
|
|
222
|
+
return get_reviews_root(repo_root) / 'packages.json'
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
# =============================================================================
|
|
226
|
+
# V2 Path Functions (Package-Centric Storage)
|
|
227
|
+
# REQ-d00096: Review Storage Architecture
|
|
228
|
+
# =============================================================================
|
|
229
|
+
|
|
230
|
+
def get_index_path(repo_root: Path) -> Path:
|
|
231
|
+
"""
|
|
232
|
+
Get path to index.json file (v2 format).
|
|
233
|
+
|
|
234
|
+
REQ-d00096-D: Returns .reviews/index.json
|
|
235
|
+
|
|
236
|
+
Args:
|
|
237
|
+
repo_root: Repository root path
|
|
238
|
+
|
|
239
|
+
Returns:
|
|
240
|
+
Path to index.json
|
|
241
|
+
"""
|
|
242
|
+
return get_reviews_root(repo_root) / 'index.json'
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def get_package_dir(repo_root: Path, package_id: str) -> Path:
|
|
246
|
+
"""
|
|
247
|
+
Get the directory for a specific package (v2 format).
|
|
248
|
+
|
|
249
|
+
REQ-d00096-A: Returns .reviews/packages/{pkg-id}/
|
|
250
|
+
|
|
251
|
+
Args:
|
|
252
|
+
repo_root: Repository root path
|
|
253
|
+
package_id: Package UUID
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
Path to package directory
|
|
257
|
+
"""
|
|
258
|
+
return get_reviews_root(repo_root) / 'packages' / package_id
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def get_package_metadata_path(repo_root: Path, package_id: str) -> Path:
|
|
262
|
+
"""
|
|
263
|
+
Get path to package.json file for a package (v2 format).
|
|
264
|
+
|
|
265
|
+
REQ-d00096-B: Returns .reviews/packages/{pkg-id}/package.json
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
repo_root: Repository root path
|
|
269
|
+
package_id: Package UUID
|
|
270
|
+
|
|
271
|
+
Returns:
|
|
272
|
+
Path to package.json
|
|
273
|
+
"""
|
|
274
|
+
return get_package_dir(repo_root, package_id) / 'package.json'
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def get_package_threads_path(repo_root: Path, package_id: str, req_id: str) -> Path:
|
|
278
|
+
"""
|
|
279
|
+
Get path to threads.json file for a requirement within a package (v2 format).
|
|
280
|
+
|
|
281
|
+
REQ-d00096-C: Returns .reviews/packages/{pkg-id}/reqs/{req-id}/threads.json
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
repo_root: Repository root path
|
|
285
|
+
package_id: Package UUID
|
|
286
|
+
req_id: Requirement ID
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
Path to threads.json
|
|
290
|
+
"""
|
|
291
|
+
normalized = normalize_req_id(req_id)
|
|
292
|
+
return get_package_dir(repo_root, package_id) / 'reqs' / normalized / 'threads.json'
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
def get_archive_dir(repo_root: Path) -> Path:
|
|
296
|
+
"""
|
|
297
|
+
Get the root directory for archived packages.
|
|
298
|
+
|
|
299
|
+
REQ-d00097-A: Returns .reviews/archive/
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
repo_root: Repository root path
|
|
303
|
+
|
|
304
|
+
Returns:
|
|
305
|
+
Path to archive directory
|
|
306
|
+
"""
|
|
307
|
+
return get_reviews_root(repo_root) / 'archive'
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def get_archived_package_dir(repo_root: Path, package_id: str) -> Path:
|
|
311
|
+
"""
|
|
312
|
+
Get the directory for an archived package.
|
|
313
|
+
|
|
314
|
+
REQ-d00097-A: Returns .reviews/archive/{pkg-id}/
|
|
315
|
+
|
|
316
|
+
Args:
|
|
317
|
+
repo_root: Repository root path
|
|
318
|
+
package_id: Package UUID
|
|
319
|
+
|
|
320
|
+
Returns:
|
|
321
|
+
Path to archived package directory
|
|
322
|
+
"""
|
|
323
|
+
return get_archive_dir(repo_root) / package_id
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def get_archived_package_metadata_path(repo_root: Path, package_id: str) -> Path:
|
|
327
|
+
"""
|
|
328
|
+
Get path to package.json file for an archived package.
|
|
329
|
+
|
|
330
|
+
REQ-d00097-B: Returns .reviews/archive/{pkg-id}/package.json
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
repo_root: Repository root path
|
|
334
|
+
package_id: Package UUID
|
|
335
|
+
|
|
336
|
+
Returns:
|
|
337
|
+
Path to archived package.json
|
|
338
|
+
"""
|
|
339
|
+
return get_archived_package_dir(repo_root, package_id) / 'package.json'
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def get_archived_package_threads_path(
|
|
343
|
+
repo_root: Path,
|
|
344
|
+
package_id: str,
|
|
345
|
+
req_id: str
|
|
346
|
+
) -> Path:
|
|
347
|
+
"""
|
|
348
|
+
Get path to threads.json file for a requirement within an archived package.
|
|
349
|
+
|
|
350
|
+
REQ-d00097-B: Returns .reviews/archive/{pkg-id}/reqs/{req-id}/threads.json
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
repo_root: Repository root path
|
|
354
|
+
package_id: Package UUID
|
|
355
|
+
req_id: Requirement ID
|
|
356
|
+
|
|
357
|
+
Returns:
|
|
358
|
+
Path to archived threads.json
|
|
359
|
+
"""
|
|
360
|
+
normalized = normalize_req_id(req_id)
|
|
361
|
+
return get_archived_package_dir(repo_root, package_id) / 'reqs' / normalized / 'threads.json'
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
# =============================================================================
|
|
365
|
+
# Config Operations
|
|
366
|
+
# REQ-tv-d00011-F: Config storage operations
|
|
367
|
+
# =============================================================================
|
|
368
|
+
|
|
369
|
+
def load_config(repo_root: Path) -> ReviewConfig:
|
|
370
|
+
"""
|
|
371
|
+
Load review system configuration.
|
|
372
|
+
|
|
373
|
+
REQ-tv-d00011-F: Returns default config if file doesn't exist.
|
|
374
|
+
|
|
375
|
+
Args:
|
|
376
|
+
repo_root: Repository root path
|
|
377
|
+
|
|
378
|
+
Returns:
|
|
379
|
+
ReviewConfig instance
|
|
380
|
+
"""
|
|
381
|
+
config_path = get_config_path(repo_root)
|
|
382
|
+
if not config_path.exists():
|
|
383
|
+
return ReviewConfig.default()
|
|
384
|
+
data = read_json(config_path)
|
|
385
|
+
return ReviewConfig.from_dict(data)
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
def save_config(repo_root: Path, config: ReviewConfig) -> None:
|
|
389
|
+
"""
|
|
390
|
+
Save review system configuration.
|
|
391
|
+
|
|
392
|
+
REQ-tv-d00011-F: Uses atomic write for safety.
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
repo_root: Repository root path
|
|
396
|
+
config: ReviewConfig instance to save
|
|
397
|
+
"""
|
|
398
|
+
config_path = get_config_path(repo_root)
|
|
399
|
+
atomic_write_json(config_path, config.to_dict())
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
# =============================================================================
|
|
403
|
+
# Review Flag Operations
|
|
404
|
+
# REQ-tv-d00011-D: Review flag storage operations
|
|
405
|
+
# =============================================================================
|
|
406
|
+
|
|
407
|
+
def load_review_flag(repo_root: Path, req_id: str) -> ReviewFlag:
|
|
408
|
+
"""
|
|
409
|
+
Load review flag for a requirement.
|
|
410
|
+
|
|
411
|
+
REQ-tv-d00011-D: Returns cleared flag if file doesn't exist.
|
|
412
|
+
|
|
413
|
+
Args:
|
|
414
|
+
repo_root: Repository root path
|
|
415
|
+
req_id: Requirement ID
|
|
416
|
+
|
|
417
|
+
Returns:
|
|
418
|
+
ReviewFlag instance
|
|
419
|
+
"""
|
|
420
|
+
flag_path = get_review_flag_path(repo_root, req_id)
|
|
421
|
+
if not flag_path.exists():
|
|
422
|
+
return ReviewFlag.cleared()
|
|
423
|
+
data = read_json(flag_path)
|
|
424
|
+
return ReviewFlag.from_dict(data)
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def save_review_flag(repo_root: Path, req_id: str, flag: ReviewFlag) -> None:
|
|
428
|
+
"""
|
|
429
|
+
Save review flag for a requirement.
|
|
430
|
+
|
|
431
|
+
REQ-tv-d00011-D: Uses atomic write for safety.
|
|
432
|
+
|
|
433
|
+
Args:
|
|
434
|
+
repo_root: Repository root path
|
|
435
|
+
req_id: Requirement ID
|
|
436
|
+
flag: ReviewFlag instance to save
|
|
437
|
+
"""
|
|
438
|
+
flag_path = get_review_flag_path(repo_root, req_id)
|
|
439
|
+
atomic_write_json(flag_path, flag.to_dict())
|
|
440
|
+
|
|
441
|
+
|
|
442
|
+
# =============================================================================
|
|
443
|
+
# Thread Operations
|
|
444
|
+
# REQ-tv-d00011-B: Thread storage operations
|
|
445
|
+
# =============================================================================
|
|
446
|
+
|
|
447
|
+
def load_threads(repo_root: Path, req_id: str) -> ThreadsFile:
|
|
448
|
+
"""
|
|
449
|
+
Load threads for a requirement.
|
|
450
|
+
|
|
451
|
+
REQ-tv-d00011-B: Returns empty threads file if doesn't exist.
|
|
452
|
+
|
|
453
|
+
Args:
|
|
454
|
+
repo_root: Repository root path
|
|
455
|
+
req_id: Requirement ID
|
|
456
|
+
|
|
457
|
+
Returns:
|
|
458
|
+
ThreadsFile instance
|
|
459
|
+
"""
|
|
460
|
+
normalized_id = normalize_req_id(req_id)
|
|
461
|
+
threads_path = get_threads_path(repo_root, req_id)
|
|
462
|
+
if not threads_path.exists():
|
|
463
|
+
return ThreadsFile(reqId=normalized_id, threads=[])
|
|
464
|
+
data = read_json(threads_path)
|
|
465
|
+
return ThreadsFile.from_dict(data)
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
def save_threads(repo_root: Path, req_id: str, threads_file: ThreadsFile) -> None:
|
|
469
|
+
"""
|
|
470
|
+
Save threads file for a requirement.
|
|
471
|
+
|
|
472
|
+
REQ-tv-d00011-B: Uses atomic write for safety.
|
|
473
|
+
|
|
474
|
+
Args:
|
|
475
|
+
repo_root: Repository root path
|
|
476
|
+
req_id: Requirement ID
|
|
477
|
+
threads_file: ThreadsFile instance to save
|
|
478
|
+
"""
|
|
479
|
+
threads_path = get_threads_path(repo_root, req_id)
|
|
480
|
+
atomic_write_json(threads_path, threads_file.to_dict())
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
def add_thread(repo_root: Path, req_id: str, thread: Thread) -> Thread:
|
|
484
|
+
"""
|
|
485
|
+
Add a new thread to a requirement.
|
|
486
|
+
|
|
487
|
+
REQ-tv-d00011-B: Creates file if needed and appends thread.
|
|
488
|
+
|
|
489
|
+
Args:
|
|
490
|
+
repo_root: Repository root path
|
|
491
|
+
req_id: Requirement ID
|
|
492
|
+
thread: Thread to add
|
|
493
|
+
|
|
494
|
+
Returns:
|
|
495
|
+
The added thread
|
|
496
|
+
"""
|
|
497
|
+
threads_file = load_threads(repo_root, req_id)
|
|
498
|
+
threads_file.threads.append(thread)
|
|
499
|
+
save_threads(repo_root, req_id, threads_file)
|
|
500
|
+
return thread
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
def add_comment_to_thread(
|
|
504
|
+
repo_root: Path,
|
|
505
|
+
req_id: str,
|
|
506
|
+
thread_id: str,
|
|
507
|
+
author: str,
|
|
508
|
+
body: str
|
|
509
|
+
) -> Comment:
|
|
510
|
+
"""
|
|
511
|
+
Add a comment to an existing thread.
|
|
512
|
+
|
|
513
|
+
REQ-tv-d00011-B: Persists comment and returns it.
|
|
514
|
+
|
|
515
|
+
Args:
|
|
516
|
+
repo_root: Repository root path
|
|
517
|
+
req_id: Requirement ID
|
|
518
|
+
thread_id: Thread UUID
|
|
519
|
+
author: Comment author username
|
|
520
|
+
body: Comment body text
|
|
521
|
+
|
|
522
|
+
Returns:
|
|
523
|
+
The created comment
|
|
524
|
+
|
|
525
|
+
Raises:
|
|
526
|
+
ValueError: If thread not found
|
|
527
|
+
"""
|
|
528
|
+
threads_file = load_threads(repo_root, req_id)
|
|
529
|
+
|
|
530
|
+
# Find the thread
|
|
531
|
+
thread = None
|
|
532
|
+
for t in threads_file.threads:
|
|
533
|
+
if t.threadId == thread_id:
|
|
534
|
+
thread = t
|
|
535
|
+
break
|
|
536
|
+
|
|
537
|
+
if thread is None:
|
|
538
|
+
raise ValueError(f"Thread not found: {thread_id}")
|
|
539
|
+
|
|
540
|
+
comment = thread.add_comment(author, body)
|
|
541
|
+
save_threads(repo_root, req_id, threads_file)
|
|
542
|
+
return comment
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
def resolve_thread(
|
|
546
|
+
repo_root: Path,
|
|
547
|
+
req_id: str,
|
|
548
|
+
thread_id: str,
|
|
549
|
+
user: str
|
|
550
|
+
) -> bool:
|
|
551
|
+
"""
|
|
552
|
+
Mark a thread as resolved.
|
|
553
|
+
|
|
554
|
+
REQ-tv-d00011-B: Persists resolution state.
|
|
555
|
+
|
|
556
|
+
Args:
|
|
557
|
+
repo_root: Repository root path
|
|
558
|
+
req_id: Requirement ID
|
|
559
|
+
thread_id: Thread UUID
|
|
560
|
+
user: Username resolving the thread
|
|
561
|
+
|
|
562
|
+
Returns:
|
|
563
|
+
True if resolved, False if thread not found
|
|
564
|
+
"""
|
|
565
|
+
threads_file = load_threads(repo_root, req_id)
|
|
566
|
+
|
|
567
|
+
for thread in threads_file.threads:
|
|
568
|
+
if thread.threadId == thread_id:
|
|
569
|
+
thread.resolve(user)
|
|
570
|
+
save_threads(repo_root, req_id, threads_file)
|
|
571
|
+
return True
|
|
572
|
+
|
|
573
|
+
return False
|
|
574
|
+
|
|
575
|
+
|
|
576
|
+
def unresolve_thread(repo_root: Path, req_id: str, thread_id: str) -> bool:
|
|
577
|
+
"""
|
|
578
|
+
Mark a thread as unresolved.
|
|
579
|
+
|
|
580
|
+
REQ-tv-d00011-B: Persists unresolved state.
|
|
581
|
+
|
|
582
|
+
Args:
|
|
583
|
+
repo_root: Repository root path
|
|
584
|
+
req_id: Requirement ID
|
|
585
|
+
thread_id: Thread UUID
|
|
586
|
+
|
|
587
|
+
Returns:
|
|
588
|
+
True if unresolved, False if thread not found
|
|
589
|
+
"""
|
|
590
|
+
threads_file = load_threads(repo_root, req_id)
|
|
591
|
+
|
|
592
|
+
for thread in threads_file.threads:
|
|
593
|
+
if thread.threadId == thread_id:
|
|
594
|
+
thread.unresolve()
|
|
595
|
+
save_threads(repo_root, req_id, threads_file)
|
|
596
|
+
return True
|
|
597
|
+
|
|
598
|
+
return False
|
|
599
|
+
|
|
600
|
+
|
|
601
|
+
# =============================================================================
|
|
602
|
+
# Status Request Operations
|
|
603
|
+
# REQ-tv-d00011-C: Status request storage operations
|
|
604
|
+
# =============================================================================
|
|
605
|
+
|
|
606
|
+
def load_status_requests(repo_root: Path, req_id: str) -> StatusFile:
|
|
607
|
+
"""
|
|
608
|
+
Load status requests for a requirement.
|
|
609
|
+
|
|
610
|
+
REQ-tv-d00011-C: Returns empty status file if doesn't exist.
|
|
611
|
+
|
|
612
|
+
Args:
|
|
613
|
+
repo_root: Repository root path
|
|
614
|
+
req_id: Requirement ID
|
|
615
|
+
|
|
616
|
+
Returns:
|
|
617
|
+
StatusFile instance
|
|
618
|
+
"""
|
|
619
|
+
normalized_id = normalize_req_id(req_id)
|
|
620
|
+
status_path = get_status_path(repo_root, req_id)
|
|
621
|
+
if not status_path.exists():
|
|
622
|
+
return StatusFile(reqId=normalized_id, requests=[])
|
|
623
|
+
data = read_json(status_path)
|
|
624
|
+
return StatusFile.from_dict(data)
|
|
625
|
+
|
|
626
|
+
|
|
627
|
+
def save_status_requests(repo_root: Path, req_id: str, status_file: StatusFile) -> None:
|
|
628
|
+
"""
|
|
629
|
+
Save status requests file for a requirement.
|
|
630
|
+
|
|
631
|
+
REQ-tv-d00011-C: Uses atomic write for safety.
|
|
632
|
+
|
|
633
|
+
Args:
|
|
634
|
+
repo_root: Repository root path
|
|
635
|
+
req_id: Requirement ID
|
|
636
|
+
status_file: StatusFile instance to save
|
|
637
|
+
"""
|
|
638
|
+
status_path = get_status_path(repo_root, req_id)
|
|
639
|
+
atomic_write_json(status_path, status_file.to_dict())
|
|
640
|
+
|
|
641
|
+
|
|
642
|
+
def create_status_request(
|
|
643
|
+
repo_root: Path,
|
|
644
|
+
req_id: str,
|
|
645
|
+
request: StatusRequest
|
|
646
|
+
) -> StatusRequest:
|
|
647
|
+
"""
|
|
648
|
+
Create a new status change request.
|
|
649
|
+
|
|
650
|
+
REQ-tv-d00011-C: Persists request and returns it.
|
|
651
|
+
|
|
652
|
+
Args:
|
|
653
|
+
repo_root: Repository root path
|
|
654
|
+
req_id: Requirement ID
|
|
655
|
+
request: StatusRequest to create
|
|
656
|
+
|
|
657
|
+
Returns:
|
|
658
|
+
The created request
|
|
659
|
+
"""
|
|
660
|
+
status_file = load_status_requests(repo_root, req_id)
|
|
661
|
+
status_file.requests.append(request)
|
|
662
|
+
save_status_requests(repo_root, req_id, status_file)
|
|
663
|
+
return request
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def add_approval(
|
|
667
|
+
repo_root: Path,
|
|
668
|
+
req_id: str,
|
|
669
|
+
request_id: str,
|
|
670
|
+
user: str,
|
|
671
|
+
decision: str,
|
|
672
|
+
comment: Optional[str] = None
|
|
673
|
+
) -> Approval:
|
|
674
|
+
"""
|
|
675
|
+
Add an approval to a status request.
|
|
676
|
+
|
|
677
|
+
REQ-tv-d00011-C: Persists approval and returns it.
|
|
678
|
+
|
|
679
|
+
Args:
|
|
680
|
+
repo_root: Repository root path
|
|
681
|
+
req_id: Requirement ID
|
|
682
|
+
request_id: Request UUID
|
|
683
|
+
user: Approving user
|
|
684
|
+
decision: "approve" or "reject"
|
|
685
|
+
comment: Optional comment
|
|
686
|
+
|
|
687
|
+
Returns:
|
|
688
|
+
The created approval
|
|
689
|
+
|
|
690
|
+
Raises:
|
|
691
|
+
ValueError: If request not found
|
|
692
|
+
"""
|
|
693
|
+
status_file = load_status_requests(repo_root, req_id)
|
|
694
|
+
|
|
695
|
+
# Find the request
|
|
696
|
+
request = None
|
|
697
|
+
for r in status_file.requests:
|
|
698
|
+
if r.requestId == request_id:
|
|
699
|
+
request = r
|
|
700
|
+
break
|
|
701
|
+
|
|
702
|
+
if request is None:
|
|
703
|
+
raise ValueError(f"Status request not found: {request_id}")
|
|
704
|
+
|
|
705
|
+
approval = request.add_approval(user, decision, comment)
|
|
706
|
+
save_status_requests(repo_root, req_id, status_file)
|
|
707
|
+
return approval
|
|
708
|
+
|
|
709
|
+
|
|
710
|
+
def mark_request_applied(repo_root: Path, req_id: str, request_id: str) -> bool:
|
|
711
|
+
"""
|
|
712
|
+
Mark a status request as applied.
|
|
713
|
+
|
|
714
|
+
REQ-tv-d00011-C: Persists applied state.
|
|
715
|
+
|
|
716
|
+
Args:
|
|
717
|
+
repo_root: Repository root path
|
|
718
|
+
req_id: Requirement ID
|
|
719
|
+
request_id: Request UUID
|
|
720
|
+
|
|
721
|
+
Returns:
|
|
722
|
+
True if marked applied, False if not found
|
|
723
|
+
|
|
724
|
+
Raises:
|
|
725
|
+
ValueError: If request is not in approved state
|
|
726
|
+
"""
|
|
727
|
+
status_file = load_status_requests(repo_root, req_id)
|
|
728
|
+
|
|
729
|
+
for request in status_file.requests:
|
|
730
|
+
if request.requestId == request_id:
|
|
731
|
+
request.mark_applied() # This raises ValueError if not approved
|
|
732
|
+
save_status_requests(repo_root, req_id, status_file)
|
|
733
|
+
return True
|
|
734
|
+
|
|
735
|
+
return False
|
|
736
|
+
|
|
737
|
+
|
|
738
|
+
# =============================================================================
|
|
739
|
+
# Package Operations
|
|
740
|
+
# REQ-tv-d00011-E: Package storage operations
|
|
741
|
+
# =============================================================================
|
|
742
|
+
|
|
743
|
+
def load_packages(repo_root: Path) -> PackagesFile:
|
|
744
|
+
"""
|
|
745
|
+
Load packages file.
|
|
746
|
+
|
|
747
|
+
REQ-tv-d00011-E: Returns file with default package if doesn't exist.
|
|
748
|
+
|
|
749
|
+
Args:
|
|
750
|
+
repo_root: Repository root path
|
|
751
|
+
|
|
752
|
+
Returns:
|
|
753
|
+
PackagesFile instance
|
|
754
|
+
"""
|
|
755
|
+
packages_path = get_packages_path(repo_root)
|
|
756
|
+
if not packages_path.exists():
|
|
757
|
+
# Create default package
|
|
758
|
+
default_pkg = ReviewPackage.create_default()
|
|
759
|
+
return PackagesFile(packages=[default_pkg])
|
|
760
|
+
data = read_json(packages_path)
|
|
761
|
+
packages_file = PackagesFile.from_dict(data)
|
|
762
|
+
|
|
763
|
+
# Ensure default package exists
|
|
764
|
+
if packages_file.get_default() is None:
|
|
765
|
+
default_pkg = ReviewPackage.create_default()
|
|
766
|
+
packages_file.packages.insert(0, default_pkg)
|
|
767
|
+
|
|
768
|
+
return packages_file
|
|
769
|
+
|
|
770
|
+
|
|
771
|
+
def save_packages(repo_root: Path, packages_file: PackagesFile) -> None:
|
|
772
|
+
"""
|
|
773
|
+
Save packages file.
|
|
774
|
+
|
|
775
|
+
REQ-tv-d00011-E: Uses atomic write for safety.
|
|
776
|
+
|
|
777
|
+
Args:
|
|
778
|
+
repo_root: Repository root path
|
|
779
|
+
packages_file: PackagesFile instance to save
|
|
780
|
+
"""
|
|
781
|
+
packages_path = get_packages_path(repo_root)
|
|
782
|
+
atomic_write_json(packages_path, packages_file.to_dict())
|
|
783
|
+
|
|
784
|
+
|
|
785
|
+
def create_package(repo_root: Path, package: ReviewPackage) -> ReviewPackage:
|
|
786
|
+
"""
|
|
787
|
+
Create a new package.
|
|
788
|
+
|
|
789
|
+
REQ-tv-d00011-E: Persists package and returns it.
|
|
790
|
+
|
|
791
|
+
Args:
|
|
792
|
+
repo_root: Repository root path
|
|
793
|
+
package: ReviewPackage to create
|
|
794
|
+
|
|
795
|
+
Returns:
|
|
796
|
+
The created package
|
|
797
|
+
"""
|
|
798
|
+
packages_file = load_packages(repo_root)
|
|
799
|
+
packages_file.packages.append(package)
|
|
800
|
+
save_packages(repo_root, packages_file)
|
|
801
|
+
return package
|
|
802
|
+
|
|
803
|
+
|
|
804
|
+
def update_package(repo_root: Path, package: ReviewPackage) -> bool:
|
|
805
|
+
"""
|
|
806
|
+
Update an existing package.
|
|
807
|
+
|
|
808
|
+
REQ-tv-d00011-E: Persists updated package.
|
|
809
|
+
|
|
810
|
+
Args:
|
|
811
|
+
repo_root: Repository root path
|
|
812
|
+
package: ReviewPackage with updated data
|
|
813
|
+
|
|
814
|
+
Returns:
|
|
815
|
+
True if updated, False if package not found
|
|
816
|
+
"""
|
|
817
|
+
packages_file = load_packages(repo_root)
|
|
818
|
+
|
|
819
|
+
for i, p in enumerate(packages_file.packages):
|
|
820
|
+
if p.packageId == package.packageId:
|
|
821
|
+
packages_file.packages[i] = package
|
|
822
|
+
save_packages(repo_root, packages_file)
|
|
823
|
+
return True
|
|
824
|
+
|
|
825
|
+
return False
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
def delete_package(repo_root: Path, package_id: str) -> bool:
|
|
829
|
+
"""
|
|
830
|
+
Delete a package by ID.
|
|
831
|
+
|
|
832
|
+
REQ-tv-d00011-E: Removes package and persists change.
|
|
833
|
+
|
|
834
|
+
Args:
|
|
835
|
+
repo_root: Repository root path
|
|
836
|
+
package_id: Package UUID
|
|
837
|
+
|
|
838
|
+
Returns:
|
|
839
|
+
True if deleted, False if package not found
|
|
840
|
+
"""
|
|
841
|
+
packages_file = load_packages(repo_root)
|
|
842
|
+
|
|
843
|
+
for i, p in enumerate(packages_file.packages):
|
|
844
|
+
if p.packageId == package_id:
|
|
845
|
+
del packages_file.packages[i]
|
|
846
|
+
save_packages(repo_root, packages_file)
|
|
847
|
+
return True
|
|
848
|
+
|
|
849
|
+
return False
|
|
850
|
+
|
|
851
|
+
|
|
852
|
+
def add_req_to_package(repo_root: Path, package_id: str, req_id: str) -> bool:
|
|
853
|
+
"""
|
|
854
|
+
Add a requirement ID to a package.
|
|
855
|
+
|
|
856
|
+
REQ-tv-d00011-E: Prevents duplicates.
|
|
857
|
+
|
|
858
|
+
Args:
|
|
859
|
+
repo_root: Repository root path
|
|
860
|
+
package_id: Package UUID
|
|
861
|
+
req_id: Requirement ID to add
|
|
862
|
+
|
|
863
|
+
Returns:
|
|
864
|
+
True if added, False if package not found
|
|
865
|
+
"""
|
|
866
|
+
packages_file = load_packages(repo_root)
|
|
867
|
+
|
|
868
|
+
for package in packages_file.packages:
|
|
869
|
+
if package.packageId == package_id:
|
|
870
|
+
if req_id not in package.reqIds:
|
|
871
|
+
package.reqIds.append(req_id)
|
|
872
|
+
save_packages(repo_root, packages_file)
|
|
873
|
+
return True
|
|
874
|
+
|
|
875
|
+
return False
|
|
876
|
+
|
|
877
|
+
|
|
878
|
+
def remove_req_from_package(repo_root: Path, package_id: str, req_id: str) -> bool:
|
|
879
|
+
"""
|
|
880
|
+
Remove a requirement ID from a package.
|
|
881
|
+
|
|
882
|
+
REQ-tv-d00011-E: Persists change.
|
|
883
|
+
|
|
884
|
+
Args:
|
|
885
|
+
repo_root: Repository root path
|
|
886
|
+
package_id: Package UUID
|
|
887
|
+
req_id: Requirement ID to remove
|
|
888
|
+
|
|
889
|
+
Returns:
|
|
890
|
+
True if removed, False if package not found
|
|
891
|
+
"""
|
|
892
|
+
packages_file = load_packages(repo_root)
|
|
893
|
+
|
|
894
|
+
for package in packages_file.packages:
|
|
895
|
+
if package.packageId == package_id:
|
|
896
|
+
if req_id in package.reqIds:
|
|
897
|
+
package.reqIds.remove(req_id)
|
|
898
|
+
save_packages(repo_root, packages_file)
|
|
899
|
+
return True
|
|
900
|
+
|
|
901
|
+
return False
|
|
902
|
+
|
|
903
|
+
|
|
904
|
+
# =============================================================================
|
|
905
|
+
# Merge Operations
|
|
906
|
+
# REQ-tv-d00011-G: Merge operations
|
|
907
|
+
# REQ-tv-d00011-J: Deduplication and timestamp-based conflict resolution
|
|
908
|
+
# =============================================================================
|
|
909
|
+
|
|
910
|
+
def merge_threads(local: ThreadsFile, remote: ThreadsFile) -> ThreadsFile:
|
|
911
|
+
"""
|
|
912
|
+
Merge thread files from local and remote.
|
|
913
|
+
|
|
914
|
+
REQ-tv-d00011-G: Combines data from multiple user branches.
|
|
915
|
+
REQ-tv-d00011-J: Deduplicates by ID and uses timestamp-based conflict resolution.
|
|
916
|
+
|
|
917
|
+
Strategy:
|
|
918
|
+
- Unique threads (by threadId) are combined
|
|
919
|
+
- Matching threads merge their comments (by comment id)
|
|
920
|
+
- Resolution state: if either is resolved, keep resolved
|
|
921
|
+
|
|
922
|
+
Args:
|
|
923
|
+
local: Local threads file
|
|
924
|
+
remote: Remote threads file
|
|
925
|
+
|
|
926
|
+
Returns:
|
|
927
|
+
Merged ThreadsFile
|
|
928
|
+
"""
|
|
929
|
+
# Build map of local threads by ID
|
|
930
|
+
local_map: Dict[str, Thread] = {t.threadId: t for t in local.threads}
|
|
931
|
+
|
|
932
|
+
merged_threads: List[Thread] = []
|
|
933
|
+
|
|
934
|
+
# Process all remote threads
|
|
935
|
+
for remote_thread in remote.threads:
|
|
936
|
+
if remote_thread.threadId in local_map:
|
|
937
|
+
# Merge the threads
|
|
938
|
+
local_thread = local_map.pop(remote_thread.threadId)
|
|
939
|
+
merged_thread = _merge_single_thread(local_thread, remote_thread)
|
|
940
|
+
merged_threads.append(merged_thread)
|
|
941
|
+
else:
|
|
942
|
+
# Only in remote
|
|
943
|
+
merged_threads.append(remote_thread)
|
|
944
|
+
|
|
945
|
+
# Add remaining local-only threads
|
|
946
|
+
for local_thread in local_map.values():
|
|
947
|
+
merged_threads.append(local_thread)
|
|
948
|
+
|
|
949
|
+
return ThreadsFile(reqId=local.reqId, threads=merged_threads)
|
|
950
|
+
|
|
951
|
+
|
|
952
|
+
def _merge_single_thread(local: Thread, remote: Thread) -> Thread:
|
|
953
|
+
"""
|
|
954
|
+
Merge two versions of the same thread.
|
|
955
|
+
|
|
956
|
+
REQ-tv-d00011-J: Deduplicates comments by ID and sorts by timestamp.
|
|
957
|
+
"""
|
|
958
|
+
# Merge comments by ID
|
|
959
|
+
local_comment_map = {c.id: c for c in local.comments}
|
|
960
|
+
remote_comment_map = {c.id: c for c in remote.comments}
|
|
961
|
+
|
|
962
|
+
all_comment_ids = set(local_comment_map.keys()) | set(remote_comment_map.keys())
|
|
963
|
+
merged_comments = []
|
|
964
|
+
|
|
965
|
+
for comment_id in all_comment_ids:
|
|
966
|
+
if comment_id in local_comment_map:
|
|
967
|
+
merged_comments.append(local_comment_map[comment_id])
|
|
968
|
+
else:
|
|
969
|
+
merged_comments.append(remote_comment_map[comment_id])
|
|
970
|
+
|
|
971
|
+
# Sort comments by timestamp
|
|
972
|
+
merged_comments.sort(key=lambda c: parse_iso_datetime(c.timestamp))
|
|
973
|
+
|
|
974
|
+
# Resolution: if either resolved, keep resolved (prefer whichever has the state)
|
|
975
|
+
resolved = local.resolved or remote.resolved
|
|
976
|
+
resolved_by = remote.resolvedBy if remote.resolved else local.resolvedBy
|
|
977
|
+
resolved_at = remote.resolvedAt if remote.resolved else local.resolvedAt
|
|
978
|
+
|
|
979
|
+
return Thread(
|
|
980
|
+
threadId=local.threadId,
|
|
981
|
+
reqId=local.reqId,
|
|
982
|
+
createdBy=local.createdBy,
|
|
983
|
+
createdAt=local.createdAt,
|
|
984
|
+
position=local.position, # Use local position
|
|
985
|
+
resolved=resolved,
|
|
986
|
+
resolvedBy=resolved_by,
|
|
987
|
+
resolvedAt=resolved_at,
|
|
988
|
+
comments=merged_comments
|
|
989
|
+
)
|
|
990
|
+
|
|
991
|
+
|
|
992
|
+
def merge_status_files(local: StatusFile, remote: StatusFile) -> StatusFile:
|
|
993
|
+
"""
|
|
994
|
+
Merge status files from local and remote.
|
|
995
|
+
|
|
996
|
+
REQ-tv-d00011-G: Combines data from multiple user branches.
|
|
997
|
+
REQ-tv-d00011-J: Deduplicates by ID and uses timestamp-based conflict resolution.
|
|
998
|
+
|
|
999
|
+
Strategy:
|
|
1000
|
+
- Unique requests (by requestId) are combined
|
|
1001
|
+
- Matching requests merge their approvals
|
|
1002
|
+
- State is recalculated based on merged approvals
|
|
1003
|
+
|
|
1004
|
+
Args:
|
|
1005
|
+
local: Local status file
|
|
1006
|
+
remote: Remote status file
|
|
1007
|
+
|
|
1008
|
+
Returns:
|
|
1009
|
+
Merged StatusFile
|
|
1010
|
+
"""
|
|
1011
|
+
# Build map of local requests by ID
|
|
1012
|
+
local_map: Dict[str, StatusRequest] = {r.requestId: r for r in local.requests}
|
|
1013
|
+
|
|
1014
|
+
merged_requests: List[StatusRequest] = []
|
|
1015
|
+
|
|
1016
|
+
# Process all remote requests
|
|
1017
|
+
for remote_request in remote.requests:
|
|
1018
|
+
if remote_request.requestId in local_map:
|
|
1019
|
+
# Merge the requests
|
|
1020
|
+
local_request = local_map.pop(remote_request.requestId)
|
|
1021
|
+
merged_request = _merge_single_request(local_request, remote_request)
|
|
1022
|
+
merged_requests.append(merged_request)
|
|
1023
|
+
else:
|
|
1024
|
+
# Only in remote
|
|
1025
|
+
merged_requests.append(remote_request)
|
|
1026
|
+
|
|
1027
|
+
# Add remaining local-only requests
|
|
1028
|
+
for local_request in local_map.values():
|
|
1029
|
+
merged_requests.append(local_request)
|
|
1030
|
+
|
|
1031
|
+
return StatusFile(reqId=local.reqId, requests=merged_requests)
|
|
1032
|
+
|
|
1033
|
+
|
|
1034
|
+
def _merge_single_request(local: StatusRequest, remote: StatusRequest) -> StatusRequest:
|
|
1035
|
+
"""
|
|
1036
|
+
Merge two versions of the same status request.
|
|
1037
|
+
|
|
1038
|
+
REQ-tv-d00011-J: Uses timestamp-based conflict resolution for approvals.
|
|
1039
|
+
"""
|
|
1040
|
+
# Merge approvals by user (later approval wins)
|
|
1041
|
+
local_approval_map = {a.user: a for a in local.approvals}
|
|
1042
|
+
remote_approval_map = {a.user: a for a in remote.approvals}
|
|
1043
|
+
|
|
1044
|
+
all_users = set(local_approval_map.keys()) | set(remote_approval_map.keys())
|
|
1045
|
+
merged_approvals = []
|
|
1046
|
+
|
|
1047
|
+
for user in all_users:
|
|
1048
|
+
local_approval = local_approval_map.get(user)
|
|
1049
|
+
remote_approval = remote_approval_map.get(user)
|
|
1050
|
+
|
|
1051
|
+
if local_approval and remote_approval:
|
|
1052
|
+
# Take the later one (timestamp-based conflict resolution)
|
|
1053
|
+
local_time = parse_iso_datetime(local_approval.at)
|
|
1054
|
+
remote_time = parse_iso_datetime(remote_approval.at)
|
|
1055
|
+
if remote_time >= local_time:
|
|
1056
|
+
merged_approvals.append(remote_approval)
|
|
1057
|
+
else:
|
|
1058
|
+
merged_approvals.append(local_approval)
|
|
1059
|
+
elif local_approval:
|
|
1060
|
+
merged_approvals.append(local_approval)
|
|
1061
|
+
else:
|
|
1062
|
+
merged_approvals.append(remote_approval)
|
|
1063
|
+
|
|
1064
|
+
# Create merged request
|
|
1065
|
+
merged = StatusRequest(
|
|
1066
|
+
requestId=local.requestId,
|
|
1067
|
+
reqId=local.reqId,
|
|
1068
|
+
type=local.type,
|
|
1069
|
+
fromStatus=local.fromStatus,
|
|
1070
|
+
toStatus=local.toStatus,
|
|
1071
|
+
requestedBy=local.requestedBy,
|
|
1072
|
+
requestedAt=local.requestedAt,
|
|
1073
|
+
justification=local.justification,
|
|
1074
|
+
approvals=merged_approvals,
|
|
1075
|
+
requiredApprovers=local.requiredApprovers,
|
|
1076
|
+
state=local.state # Will be recalculated
|
|
1077
|
+
)
|
|
1078
|
+
|
|
1079
|
+
# Recalculate state based on merged approvals
|
|
1080
|
+
merged._update_state()
|
|
1081
|
+
|
|
1082
|
+
return merged
|
|
1083
|
+
|
|
1084
|
+
|
|
1085
|
+
def merge_review_flags(local: ReviewFlag, remote: ReviewFlag) -> ReviewFlag:
|
|
1086
|
+
"""
|
|
1087
|
+
Merge review flags from local and remote.
|
|
1088
|
+
|
|
1089
|
+
REQ-tv-d00011-G: Combines data from multiple user branches.
|
|
1090
|
+
REQ-tv-d00011-J: Uses timestamp-based conflict resolution.
|
|
1091
|
+
|
|
1092
|
+
Strategy:
|
|
1093
|
+
- If neither flagged, return unflagged
|
|
1094
|
+
- If only one flagged, return that one
|
|
1095
|
+
- If both flagged, take newer flag but merge scopes
|
|
1096
|
+
|
|
1097
|
+
Args:
|
|
1098
|
+
local: Local review flag
|
|
1099
|
+
remote: Remote review flag
|
|
1100
|
+
|
|
1101
|
+
Returns:
|
|
1102
|
+
Merged ReviewFlag
|
|
1103
|
+
"""
|
|
1104
|
+
# Neither flagged
|
|
1105
|
+
if not local.flaggedForReview and not remote.flaggedForReview:
|
|
1106
|
+
return ReviewFlag.cleared()
|
|
1107
|
+
|
|
1108
|
+
# Only one flagged
|
|
1109
|
+
if not local.flaggedForReview:
|
|
1110
|
+
return remote
|
|
1111
|
+
if not remote.flaggedForReview:
|
|
1112
|
+
return local
|
|
1113
|
+
|
|
1114
|
+
# Both flagged - take newer but merge scopes
|
|
1115
|
+
local_time = parse_iso_datetime(local.flaggedAt)
|
|
1116
|
+
remote_time = parse_iso_datetime(remote.flaggedAt)
|
|
1117
|
+
|
|
1118
|
+
# Merge scopes (unique values)
|
|
1119
|
+
merged_scope = list(set(local.scope) | set(remote.scope))
|
|
1120
|
+
|
|
1121
|
+
if remote_time >= local_time:
|
|
1122
|
+
# Remote is newer
|
|
1123
|
+
return ReviewFlag(
|
|
1124
|
+
flaggedForReview=True,
|
|
1125
|
+
flaggedBy=remote.flaggedBy,
|
|
1126
|
+
flaggedAt=remote.flaggedAt,
|
|
1127
|
+
reason=remote.reason,
|
|
1128
|
+
scope=merged_scope
|
|
1129
|
+
)
|
|
1130
|
+
else:
|
|
1131
|
+
# Local is newer
|
|
1132
|
+
return ReviewFlag(
|
|
1133
|
+
flaggedForReview=True,
|
|
1134
|
+
flaggedBy=local.flaggedBy,
|
|
1135
|
+
flaggedAt=local.flaggedAt,
|
|
1136
|
+
reason=local.reason,
|
|
1137
|
+
scope=merged_scope
|
|
1138
|
+
)
|
|
1139
|
+
|
|
1140
|
+
|
|
1141
|
+
# =============================================================================
|
|
1142
|
+
# Archive Operations
|
|
1143
|
+
# REQ-d00097: Review Package Archival
|
|
1144
|
+
# =============================================================================
|
|
1145
|
+
|
|
1146
|
+
def archive_package(
|
|
1147
|
+
repo_root: Path,
|
|
1148
|
+
package_id: str,
|
|
1149
|
+
reason: str,
|
|
1150
|
+
user: str
|
|
1151
|
+
) -> bool:
|
|
1152
|
+
"""
|
|
1153
|
+
Archive a package by moving it to the archive directory.
|
|
1154
|
+
|
|
1155
|
+
REQ-d00097-D: Archive SHALL be triggered by resolution, deletion, or manual action.
|
|
1156
|
+
REQ-d00097-E: Deleting a package SHALL move it to archive rather than destroying.
|
|
1157
|
+
REQ-d00097-C: Archive metadata SHALL be added to package.json.
|
|
1158
|
+
|
|
1159
|
+
Args:
|
|
1160
|
+
repo_root: Repository root path
|
|
1161
|
+
package_id: Package UUID
|
|
1162
|
+
reason: Archive reason - one of "resolved", "deleted", "manual"
|
|
1163
|
+
user: Username who triggered the archive
|
|
1164
|
+
|
|
1165
|
+
Returns:
|
|
1166
|
+
True if archived successfully, False if package not found or already archived
|
|
1167
|
+
|
|
1168
|
+
Raises:
|
|
1169
|
+
ValueError: If reason is not valid
|
|
1170
|
+
"""
|
|
1171
|
+
from .models import (
|
|
1172
|
+
ARCHIVE_REASON_RESOLVED,
|
|
1173
|
+
ARCHIVE_REASON_DELETED,
|
|
1174
|
+
ARCHIVE_REASON_MANUAL,
|
|
1175
|
+
)
|
|
1176
|
+
|
|
1177
|
+
valid_reasons = {ARCHIVE_REASON_RESOLVED, ARCHIVE_REASON_DELETED, ARCHIVE_REASON_MANUAL}
|
|
1178
|
+
if reason not in valid_reasons:
|
|
1179
|
+
raise ValueError(f"Invalid archive reason: {reason}. Must be one of: {valid_reasons}")
|
|
1180
|
+
|
|
1181
|
+
# Load packages and find the one to archive
|
|
1182
|
+
packages_file = load_packages(repo_root)
|
|
1183
|
+
package = None
|
|
1184
|
+
package_index = None
|
|
1185
|
+
|
|
1186
|
+
for i, p in enumerate(packages_file.packages):
|
|
1187
|
+
if p.packageId == package_id:
|
|
1188
|
+
package = p
|
|
1189
|
+
package_index = i
|
|
1190
|
+
break
|
|
1191
|
+
|
|
1192
|
+
if package is None:
|
|
1193
|
+
return False
|
|
1194
|
+
|
|
1195
|
+
# Archive the package using its archive() method
|
|
1196
|
+
package.archive(user, reason)
|
|
1197
|
+
|
|
1198
|
+
# Create archive directory structure
|
|
1199
|
+
archive_pkg_dir = get_archived_package_dir(repo_root, package_id)
|
|
1200
|
+
archive_pkg_dir.mkdir(parents=True, exist_ok=True)
|
|
1201
|
+
|
|
1202
|
+
# Save archived package metadata
|
|
1203
|
+
archive_metadata_path = get_archived_package_metadata_path(repo_root, package_id)
|
|
1204
|
+
atomic_write_json(archive_metadata_path, package.to_dict())
|
|
1205
|
+
|
|
1206
|
+
# Copy thread files to archive (v1 format - from .reviews/reqs/{req-id}/)
|
|
1207
|
+
for req_id in package.reqIds:
|
|
1208
|
+
source_threads_path = get_threads_path(repo_root, req_id)
|
|
1209
|
+
if source_threads_path.exists():
|
|
1210
|
+
dest_threads_path = get_archived_package_threads_path(repo_root, package_id, req_id)
|
|
1211
|
+
dest_threads_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1212
|
+
# Copy the data
|
|
1213
|
+
threads_data = read_json(source_threads_path)
|
|
1214
|
+
atomic_write_json(dest_threads_path, threads_data)
|
|
1215
|
+
|
|
1216
|
+
# Remove package from active packages
|
|
1217
|
+
del packages_file.packages[package_index]
|
|
1218
|
+
save_packages(repo_root, packages_file)
|
|
1219
|
+
|
|
1220
|
+
return True
|
|
1221
|
+
|
|
1222
|
+
|
|
1223
|
+
def list_archived_packages(repo_root: Path) -> List[ReviewPackage]:
|
|
1224
|
+
"""
|
|
1225
|
+
List all archived packages.
|
|
1226
|
+
|
|
1227
|
+
REQ-d00097: Provides read access to archived packages for the archive viewer.
|
|
1228
|
+
|
|
1229
|
+
Args:
|
|
1230
|
+
repo_root: Repository root path
|
|
1231
|
+
|
|
1232
|
+
Returns:
|
|
1233
|
+
List of archived ReviewPackage instances
|
|
1234
|
+
"""
|
|
1235
|
+
archive_root = get_archive_dir(repo_root)
|
|
1236
|
+
if not archive_root.exists():
|
|
1237
|
+
return []
|
|
1238
|
+
|
|
1239
|
+
packages = []
|
|
1240
|
+
for pkg_dir in archive_root.iterdir():
|
|
1241
|
+
if pkg_dir.is_dir():
|
|
1242
|
+
metadata_path = pkg_dir / 'package.json'
|
|
1243
|
+
if metadata_path.exists():
|
|
1244
|
+
try:
|
|
1245
|
+
data = read_json(metadata_path)
|
|
1246
|
+
package = ReviewPackage.from_dict(data)
|
|
1247
|
+
packages.append(package)
|
|
1248
|
+
except (json.JSONDecodeError, KeyError):
|
|
1249
|
+
# Skip invalid package files
|
|
1250
|
+
continue
|
|
1251
|
+
|
|
1252
|
+
# Sort by archive date (most recent first)
|
|
1253
|
+
packages.sort(
|
|
1254
|
+
key=lambda p: p.archivedAt or '',
|
|
1255
|
+
reverse=True
|
|
1256
|
+
)
|
|
1257
|
+
|
|
1258
|
+
return packages
|
|
1259
|
+
|
|
1260
|
+
|
|
1261
|
+
def get_archived_package(repo_root: Path, package_id: str) -> Optional[ReviewPackage]:
|
|
1262
|
+
"""
|
|
1263
|
+
Get a specific archived package by ID.
|
|
1264
|
+
|
|
1265
|
+
REQ-d00097: Provides read access to archived package details.
|
|
1266
|
+
|
|
1267
|
+
Args:
|
|
1268
|
+
repo_root: Repository root path
|
|
1269
|
+
package_id: Package UUID
|
|
1270
|
+
|
|
1271
|
+
Returns:
|
|
1272
|
+
ReviewPackage if found, None otherwise
|
|
1273
|
+
"""
|
|
1274
|
+
metadata_path = get_archived_package_metadata_path(repo_root, package_id)
|
|
1275
|
+
if not metadata_path.exists():
|
|
1276
|
+
return None
|
|
1277
|
+
|
|
1278
|
+
try:
|
|
1279
|
+
data = read_json(metadata_path)
|
|
1280
|
+
return ReviewPackage.from_dict(data)
|
|
1281
|
+
except (json.JSONDecodeError, KeyError):
|
|
1282
|
+
return None
|
|
1283
|
+
|
|
1284
|
+
|
|
1285
|
+
def load_archived_threads(
|
|
1286
|
+
repo_root: Path,
|
|
1287
|
+
package_id: str,
|
|
1288
|
+
req_id: str
|
|
1289
|
+
) -> Optional[ThreadsFile]:
|
|
1290
|
+
"""
|
|
1291
|
+
Load threads for a requirement from an archived package.
|
|
1292
|
+
|
|
1293
|
+
REQ-d00097-F: Archived data SHALL be read-only.
|
|
1294
|
+
|
|
1295
|
+
Args:
|
|
1296
|
+
repo_root: Repository root path
|
|
1297
|
+
package_id: Archived package UUID
|
|
1298
|
+
req_id: Requirement ID
|
|
1299
|
+
|
|
1300
|
+
Returns:
|
|
1301
|
+
ThreadsFile if found, None otherwise
|
|
1302
|
+
"""
|
|
1303
|
+
threads_path = get_archived_package_threads_path(repo_root, package_id, req_id)
|
|
1304
|
+
if not threads_path.exists():
|
|
1305
|
+
return None
|
|
1306
|
+
|
|
1307
|
+
try:
|
|
1308
|
+
data = read_json(threads_path)
|
|
1309
|
+
return ThreadsFile.from_dict(data)
|
|
1310
|
+
except (json.JSONDecodeError, KeyError):
|
|
1311
|
+
return None
|
|
1312
|
+
|
|
1313
|
+
|
|
1314
|
+
def check_auto_archive(repo_root: Path, package_id: str, user: str) -> bool:
|
|
1315
|
+
"""
|
|
1316
|
+
Check if a package should be auto-archived (all threads resolved) and archive if so.
|
|
1317
|
+
|
|
1318
|
+
REQ-d00097-D: Resolving all threads in a package SHALL trigger auto-archive.
|
|
1319
|
+
|
|
1320
|
+
Args:
|
|
1321
|
+
repo_root: Repository root path
|
|
1322
|
+
package_id: Package UUID
|
|
1323
|
+
user: Username who resolved the last thread
|
|
1324
|
+
|
|
1325
|
+
Returns:
|
|
1326
|
+
True if package was auto-archived, False otherwise
|
|
1327
|
+
"""
|
|
1328
|
+
from .models import ARCHIVE_REASON_RESOLVED
|
|
1329
|
+
|
|
1330
|
+
# Load packages and find the one to check
|
|
1331
|
+
packages_file = load_packages(repo_root)
|
|
1332
|
+
package = None
|
|
1333
|
+
|
|
1334
|
+
for p in packages_file.packages:
|
|
1335
|
+
if p.packageId == package_id:
|
|
1336
|
+
package = p
|
|
1337
|
+
break
|
|
1338
|
+
|
|
1339
|
+
if package is None:
|
|
1340
|
+
return False
|
|
1341
|
+
|
|
1342
|
+
# Don't auto-archive default package
|
|
1343
|
+
if package.isDefault:
|
|
1344
|
+
return False
|
|
1345
|
+
|
|
1346
|
+
# Check if all threads in all reqs are resolved
|
|
1347
|
+
all_resolved = True
|
|
1348
|
+
has_threads = False
|
|
1349
|
+
|
|
1350
|
+
for req_id in package.reqIds:
|
|
1351
|
+
threads_file = load_threads(repo_root, req_id)
|
|
1352
|
+
for thread in threads_file.threads:
|
|
1353
|
+
# Only count threads belonging to this package (if packageId is set)
|
|
1354
|
+
if thread.packageId is None or thread.packageId == package_id:
|
|
1355
|
+
has_threads = True
|
|
1356
|
+
if not thread.resolved:
|
|
1357
|
+
all_resolved = False
|
|
1358
|
+
break
|
|
1359
|
+
|
|
1360
|
+
if not all_resolved:
|
|
1361
|
+
break
|
|
1362
|
+
|
|
1363
|
+
# Archive if all threads are resolved and there was at least one thread
|
|
1364
|
+
if has_threads and all_resolved:
|
|
1365
|
+
return archive_package(repo_root, package_id, ARCHIVE_REASON_RESOLVED, user)
|
|
1366
|
+
|
|
1367
|
+
return False
|