specfact-cli 0.4.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. specfact_cli/__init__.py +14 -0
  2. specfact_cli/agents/__init__.py +24 -0
  3. specfact_cli/agents/analyze_agent.py +392 -0
  4. specfact_cli/agents/base.py +95 -0
  5. specfact_cli/agents/plan_agent.py +202 -0
  6. specfact_cli/agents/registry.py +176 -0
  7. specfact_cli/agents/sync_agent.py +133 -0
  8. specfact_cli/analyzers/__init__.py +11 -0
  9. specfact_cli/analyzers/code_analyzer.py +796 -0
  10. specfact_cli/cli.py +396 -0
  11. specfact_cli/commands/__init__.py +7 -0
  12. specfact_cli/commands/enforce.py +88 -0
  13. specfact_cli/commands/import_cmd.py +365 -0
  14. specfact_cli/commands/init.py +125 -0
  15. specfact_cli/commands/plan.py +1089 -0
  16. specfact_cli/commands/repro.py +192 -0
  17. specfact_cli/commands/sync.py +408 -0
  18. specfact_cli/common/__init__.py +25 -0
  19. specfact_cli/common/logger_setup.py +654 -0
  20. specfact_cli/common/logging_utils.py +41 -0
  21. specfact_cli/common/text_utils.py +52 -0
  22. specfact_cli/common/utils.py +48 -0
  23. specfact_cli/comparators/__init__.py +11 -0
  24. specfact_cli/comparators/plan_comparator.py +391 -0
  25. specfact_cli/generators/__init__.py +14 -0
  26. specfact_cli/generators/plan_generator.py +105 -0
  27. specfact_cli/generators/protocol_generator.py +115 -0
  28. specfact_cli/generators/report_generator.py +200 -0
  29. specfact_cli/generators/workflow_generator.py +120 -0
  30. specfact_cli/importers/__init__.py +7 -0
  31. specfact_cli/importers/speckit_converter.py +773 -0
  32. specfact_cli/importers/speckit_scanner.py +711 -0
  33. specfact_cli/models/__init__.py +33 -0
  34. specfact_cli/models/deviation.py +105 -0
  35. specfact_cli/models/enforcement.py +150 -0
  36. specfact_cli/models/plan.py +97 -0
  37. specfact_cli/models/protocol.py +28 -0
  38. specfact_cli/modes/__init__.py +19 -0
  39. specfact_cli/modes/detector.py +126 -0
  40. specfact_cli/modes/router.py +153 -0
  41. specfact_cli/resources/semgrep/async.yml +285 -0
  42. specfact_cli/sync/__init__.py +12 -0
  43. specfact_cli/sync/repository_sync.py +279 -0
  44. specfact_cli/sync/speckit_sync.py +388 -0
  45. specfact_cli/utils/__init__.py +58 -0
  46. specfact_cli/utils/console.py +70 -0
  47. specfact_cli/utils/feature_keys.py +212 -0
  48. specfact_cli/utils/git.py +241 -0
  49. specfact_cli/utils/github_annotations.py +399 -0
  50. specfact_cli/utils/ide_setup.py +382 -0
  51. specfact_cli/utils/prompts.py +180 -0
  52. specfact_cli/utils/structure.py +497 -0
  53. specfact_cli/utils/yaml_utils.py +200 -0
  54. specfact_cli/validators/__init__.py +20 -0
  55. specfact_cli/validators/fsm.py +262 -0
  56. specfact_cli/validators/repro_checker.py +759 -0
  57. specfact_cli/validators/schema.py +196 -0
  58. specfact_cli-0.4.2.dist-info/METADATA +370 -0
  59. specfact_cli-0.4.2.dist-info/RECORD +62 -0
  60. specfact_cli-0.4.2.dist-info/WHEEL +4 -0
  61. specfact_cli-0.4.2.dist-info/entry_points.txt +2 -0
  62. specfact_cli-0.4.2.dist-info/licenses/LICENSE.md +61 -0
@@ -0,0 +1,279 @@
1
+ """
2
+ Repository sync implementation.
3
+
4
+ This module provides synchronization of repository code changes to SpecFact artifacts.
5
+ It detects code changes, updates plan artifacts, and tracks deviations from manual plans.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import hashlib
11
+ from dataclasses import dataclass
12
+ from pathlib import Path
13
+ from typing import Any
14
+
15
+ from beartype import beartype
16
+ from icontract import ensure, require
17
+
18
+ from specfact_cli.analyzers.code_analyzer import CodeAnalyzer
19
+ from specfact_cli.comparators.plan_comparator import PlanComparator
20
+ from specfact_cli.models.plan import PlanBundle
21
+ from specfact_cli.validators.schema import validate_plan_bundle
22
+
23
+
24
+ @dataclass
25
+ class RepositorySyncResult:
26
+ """
27
+ Result of repository sync operation.
28
+
29
+ Attributes:
30
+ status: Sync status ("success" | "deviation_detected" | "error")
31
+ code_changes: List of detected code changes
32
+ plan_updates: List of plan artifact updates
33
+ deviations: List of deviations from manual plan
34
+ """
35
+
36
+ status: str
37
+ code_changes: list[dict[str, Any]]
38
+ plan_updates: list[dict[str, Any]]
39
+ deviations: list[dict[str, Any]]
40
+
41
+ @beartype
42
+ def __post_init__(self) -> None:
43
+ """Validate RepositorySyncResult after initialization."""
44
+ valid_statuses = ["success", "deviation_detected", "error"]
45
+ if self.status not in valid_statuses:
46
+ msg = f"Status must be one of {valid_statuses}, got {self.status}"
47
+ raise ValueError(msg)
48
+
49
+
50
+ class RepositorySync:
51
+ """
52
+ Sync code changes to SpecFact artifacts.
53
+
54
+ Monitors repository code changes, updates plan artifacts based on detected
55
+ features/stories, and tracks deviations from manual plans.
56
+ """
57
+
58
+ @beartype
59
+ def __init__(self, repo_path: Path, target: Path | None = None, confidence_threshold: float = 0.5) -> None:
60
+ """
61
+ Initialize repository sync.
62
+
63
+ Args:
64
+ repo_path: Path to repository root
65
+ target: Target directory for artifacts (default: .specfact)
66
+ confidence_threshold: Minimum confidence threshold for feature detection
67
+ """
68
+ self.repo_path = Path(repo_path).resolve()
69
+ self.target = Path(target).resolve() if target else self.repo_path / ".specfact"
70
+ self.confidence_threshold = confidence_threshold
71
+ self.hash_store: dict[str, str] = {}
72
+ self.analyzer = CodeAnalyzer(self.repo_path, confidence_threshold)
73
+
74
+ @beartype
75
+ @require(lambda repo_path: repo_path.exists(), "Repository path must exist")
76
+ @require(lambda repo_path: repo_path.is_dir(), "Repository path must be a directory")
77
+ @ensure(lambda result: isinstance(result, RepositorySyncResult), "Must return RepositorySyncResult")
78
+ @ensure(lambda result: result.status in ["success", "deviation_detected", "error"], "Status must be valid")
79
+ def sync_repository_changes(self, repo_path: Path | None = None) -> RepositorySyncResult:
80
+ """
81
+ Sync code changes to SpecFact artifacts.
82
+
83
+ Args:
84
+ repo_path: Path to repository (default: self.repo_path)
85
+
86
+ Returns:
87
+ Repository sync result with code changes, plan updates, and deviations
88
+ """
89
+ if repo_path is None:
90
+ repo_path = self.repo_path
91
+
92
+ # 1. Detect code changes
93
+ code_changes = self.detect_code_changes(repo_path)
94
+
95
+ # 2. Update plan artifacts based on code changes
96
+ plan_updates = self.update_plan_artifacts(code_changes, self.target)
97
+
98
+ # 3. Track deviations from manual plans
99
+ deviations = self.track_deviations(code_changes, self.target)
100
+
101
+ # Determine status
102
+ status = "deviation_detected" if deviations else "success"
103
+
104
+ return RepositorySyncResult(
105
+ status=status,
106
+ code_changes=code_changes,
107
+ plan_updates=plan_updates,
108
+ deviations=deviations,
109
+ )
110
+
111
+ @beartype
112
+ @require(lambda repo_path: repo_path.exists(), "Repository path must exist")
113
+ @ensure(lambda result: isinstance(result, list), "Must return list")
114
+ def detect_code_changes(self, repo_path: Path) -> list[dict[str, Any]]:
115
+ """
116
+ Detect code changes in repository.
117
+
118
+ Monitors source files in src/ directory and detects modifications
119
+ based on file hashing.
120
+
121
+ Args:
122
+ repo_path: Path to repository
123
+
124
+ Returns:
125
+ List of detected code changes
126
+ """
127
+ changes: list[dict[str, Any]] = []
128
+
129
+ # Monitor source files in src/ directory
130
+ src_dir = repo_path / "src"
131
+ if src_dir.exists():
132
+ for source_file in src_dir.rglob("*.py"):
133
+ if source_file.is_file():
134
+ relative_path = str(source_file.relative_to(repo_path))
135
+ current_hash = self._get_file_hash(source_file)
136
+ stored_hash = self.hash_store.get(relative_path, "")
137
+
138
+ if current_hash != stored_hash:
139
+ changes.append(
140
+ {
141
+ "file": source_file,
142
+ "hash": current_hash,
143
+ "type": "modified" if stored_hash else "new",
144
+ "relative_path": relative_path,
145
+ }
146
+ )
147
+
148
+ return changes
149
+
150
+ @beartype
151
+ @ensure(lambda result: isinstance(result, list), "Must return list")
152
+ def update_plan_artifacts(self, code_changes: list[dict[str, Any]], target: Path) -> list[dict[str, Any]]:
153
+ """
154
+ Update plan artifacts based on code changes.
155
+
156
+ Analyzes code changes to extract features/stories and updates
157
+ plan bundle if auto-generated plan exists.
158
+
159
+ Args:
160
+ code_changes: List of detected code changes
161
+ target: Target directory for artifacts
162
+
163
+ Returns:
164
+ List of plan updates
165
+ """
166
+ updates: list[dict[str, Any]] = []
167
+
168
+ if not code_changes:
169
+ return updates
170
+
171
+ # Analyze code changes using CodeAnalyzer
172
+ # For now, analyze entire repository if there are changes
173
+ # (could be optimized to only analyze changed files)
174
+ try:
175
+ auto_plan = self.analyzer.analyze()
176
+ if auto_plan and auto_plan.features:
177
+ # Write auto-generated plan to reports directory
178
+ reports_dir = target / "reports" / "repository"
179
+ reports_dir.mkdir(parents=True, exist_ok=True)
180
+ auto_plan_file = reports_dir / "auto-generated-plan.yaml"
181
+
182
+ from specfact_cli.generators.plan_generator import PlanGenerator
183
+
184
+ generator = PlanGenerator()
185
+ generator.generate(auto_plan, auto_plan_file)
186
+
187
+ updates.append(
188
+ {
189
+ "plan_file": auto_plan_file,
190
+ "features": len(auto_plan.features),
191
+ "stories": sum(len(f.stories) for f in auto_plan.features),
192
+ "updated": True,
193
+ }
194
+ )
195
+ except Exception:
196
+ # If analysis fails, continue without update
197
+ pass
198
+
199
+ return updates
200
+
201
+ @beartype
202
+ @ensure(lambda result: isinstance(result, list), "Must return list")
203
+ def track_deviations(self, code_changes: list[dict[str, Any]], target: Path) -> list[dict[str, Any]]:
204
+ """
205
+ Track deviations from manual plans.
206
+
207
+ Compares detected features/stories from code changes against
208
+ manual plan bundle and identifies deviations.
209
+
210
+ Args:
211
+ code_changes: List of detected code changes
212
+ target: Target directory for artifacts
213
+
214
+ Returns:
215
+ List of deviation dictionaries
216
+ """
217
+ deviations: list[dict[str, Any]] = []
218
+
219
+ # Load manual plan
220
+ manual_plan_file = target / "plans" / "main.bundle.yaml"
221
+ if not manual_plan_file.exists():
222
+ return deviations
223
+
224
+ # Validate and load manual plan
225
+ is_valid, _error, manual_plan = validate_plan_bundle(manual_plan_file)
226
+ if not is_valid or manual_plan is None:
227
+ return deviations
228
+
229
+ # Type guard: manual_plan is not None after check
230
+ assert isinstance(manual_plan, PlanBundle)
231
+
232
+ # Generate auto plan from current code
233
+ try:
234
+ auto_plan = self.analyzer.analyze()
235
+ if not auto_plan or not auto_plan.features:
236
+ return deviations
237
+
238
+ # Compare manual vs auto plan using PlanComparator
239
+ comparator = PlanComparator()
240
+ comparison = comparator.compare(manual_plan, auto_plan)
241
+
242
+ # Convert comparison deviations to sync deviations
243
+ for deviation in comparison.deviations:
244
+ deviations.append(
245
+ {
246
+ "type": deviation.type.value if hasattr(deviation.type, "value") else str(deviation.type),
247
+ "severity": (
248
+ deviation.severity.value
249
+ if hasattr(deviation.severity, "value")
250
+ else str(deviation.severity)
251
+ ),
252
+ "description": deviation.description,
253
+ "location": deviation.location or "",
254
+ "fix_hint": deviation.suggestion or "",
255
+ }
256
+ )
257
+ except Exception:
258
+ # If comparison fails, continue without deviations
259
+ pass
260
+
261
+ return deviations
262
+
263
+ @beartype
264
+ def _get_file_hash(self, file_path: Path) -> str:
265
+ """
266
+ Get file hash for change detection.
267
+
268
+ Args:
269
+ file_path: Path to file
270
+
271
+ Returns:
272
+ SHA256 hash of file contents
273
+ """
274
+ if not file_path.exists():
275
+ return ""
276
+
277
+ with file_path.open("rb") as f:
278
+ content = f.read()
279
+ return hashlib.sha256(content).hexdigest()
@@ -0,0 +1,388 @@
1
+ """
2
+ Spec-Kit bidirectional sync implementation.
3
+
4
+ This module provides bidirectional synchronization between Spec-Kit markdown artifacts
5
+ and SpecFact plans/protocols. It detects changes, merges updates, and resolves conflicts.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import hashlib
11
+ from dataclasses import dataclass
12
+ from pathlib import Path
13
+ from typing import Any
14
+
15
+ from beartype import beartype
16
+ from icontract import ensure, require
17
+
18
+ from specfact_cli.importers.speckit_converter import SpecKitConverter
19
+ from specfact_cli.importers.speckit_scanner import SpecKitScanner
20
+
21
+
22
+ @dataclass
23
+ class SyncResult:
24
+ """
25
+ Result of sync operation.
26
+
27
+ Attributes:
28
+ status: Sync status ("success" | "conflict" | "error")
29
+ changes: List of detected changes
30
+ conflicts: List of conflicts (if any)
31
+ merged: Merged artifacts
32
+ """
33
+
34
+ status: str
35
+ changes: list[dict[str, Any]]
36
+ conflicts: list[dict[str, Any]]
37
+ merged: dict[str, Any]
38
+
39
+ @beartype
40
+ def __post_init__(self) -> None:
41
+ """Validate SyncResult after initialization."""
42
+ valid_statuses = ["success", "conflict", "error"]
43
+ if self.status not in valid_statuses:
44
+ msg = f"Status must be one of {valid_statuses}, got {self.status}"
45
+ raise ValueError(msg)
46
+
47
+
48
+ class SpecKitSync:
49
+ """
50
+ Bidirectional sync between Spec-Kit and SpecFact.
51
+
52
+ Synchronizes changes between Spec-Kit markdown artifacts (generated by Spec-Kit
53
+ slash commands) and SpecFact plan bundles/protocols.
54
+ """
55
+
56
+ @beartype
57
+ def __init__(self, repo_path: Path) -> None:
58
+ """
59
+ Initialize Spec-Kit sync.
60
+
61
+ Args:
62
+ repo_path: Path to repository root
63
+ """
64
+ self.repo_path = Path(repo_path).resolve()
65
+ self.scanner = SpecKitScanner(self.repo_path)
66
+ self.converter = SpecKitConverter(self.repo_path)
67
+ self.hash_store: dict[str, str] = {}
68
+
69
+ @beartype
70
+ @require(lambda repo_path: repo_path.exists(), "Repository path must exist")
71
+ @require(lambda repo_path: repo_path.is_dir(), "Repository path must be a directory")
72
+ @ensure(lambda result: isinstance(result, SyncResult), "Must return SyncResult")
73
+ @ensure(lambda result: result.status in ["success", "conflict", "error"], "Status must be valid")
74
+ def sync_bidirectional(self, repo_path: Path | None = None) -> SyncResult:
75
+ """
76
+ Sync changes between Spec-Kit and SpecFact artifacts bidirectionally.
77
+
78
+ Note: Spec-Kit is a workflow tool that generates markdown artifacts through
79
+ slash commands. This method synchronizes the **artifacts that Spec-Kit commands
80
+ have already generated**, not run Spec-Kit commands ourselves.
81
+
82
+ Args:
83
+ repo_path: Path to repository (default: self.repo_path)
84
+
85
+ Returns:
86
+ Sync result with changes, conflicts, and merged artifacts
87
+ """
88
+ if repo_path is None:
89
+ repo_path = self.repo_path
90
+
91
+ # 1. Detect changes in Spec-Kit artifacts
92
+ speckit_changes = self.detect_speckit_changes(repo_path)
93
+
94
+ # 2. Detect changes in SpecFact artifacts
95
+ specfact_changes = self.detect_specfact_changes(repo_path)
96
+
97
+ # 3. Merge bidirectional changes
98
+ merged = self.merge_changes(speckit_changes, specfact_changes)
99
+
100
+ # 4. Detect conflicts
101
+ conflicts = self.detect_conflicts(speckit_changes, specfact_changes)
102
+
103
+ # 5. Resolve conflicts if any
104
+ if conflicts:
105
+ resolved = self.resolve_conflicts(conflicts)
106
+ merged = self.apply_resolved_conflicts(merged, resolved)
107
+
108
+ return SyncResult(
109
+ status="conflict" if conflicts else "success",
110
+ changes=[speckit_changes, specfact_changes],
111
+ conflicts=conflicts,
112
+ merged=merged,
113
+ )
114
+
115
+ @beartype
116
+ @require(lambda repo_path: repo_path.exists(), "Repository path must exist")
117
+ @ensure(lambda result: isinstance(result, dict), "Must return dict")
118
+ def detect_speckit_changes(self, repo_path: Path) -> dict[str, Any]:
119
+ """
120
+ Detect changes in Spec-Kit artifacts.
121
+
122
+ Monitors modern Spec-Kit format:
123
+ - `.specify/memory/constitution.md` (from `/speckit.constitution`)
124
+ - `specs/[###-feature-name]/spec.md` (from `/speckit.specify`)
125
+ - `specs/[###-feature-name]/plan.md` (from `/speckit.plan`)
126
+ - `specs/[###-feature-name]/tasks.md` (from `/speckit.tasks`)
127
+
128
+ Args:
129
+ repo_path: Path to repository
130
+
131
+ Returns:
132
+ Dictionary of detected changes keyed by file path
133
+ """
134
+ changes: dict[str, Any] = {}
135
+
136
+ # Check for modern Spec-Kit format (.specify directory)
137
+ specify_dir = repo_path / ".specify"
138
+ if specify_dir.exists():
139
+ # Monitor .specify/memory/ files
140
+ memory_dir = repo_path / ".specify" / "memory"
141
+ if memory_dir.exists():
142
+ for memory_file in memory_dir.glob("*.md"):
143
+ relative_path = str(memory_file.relative_to(repo_path))
144
+ current_hash = self._get_file_hash(memory_file)
145
+ stored_hash = self.hash_store.get(relative_path, "")
146
+
147
+ if current_hash != stored_hash:
148
+ changes[relative_path] = {
149
+ "file": memory_file,
150
+ "hash": current_hash,
151
+ "type": "modified" if stored_hash else "new",
152
+ }
153
+
154
+ # Monitor specs/ directory for feature specifications
155
+ specs_dir = repo_path / "specs"
156
+ if specs_dir.exists():
157
+ for spec_dir in specs_dir.iterdir():
158
+ if spec_dir.is_dir():
159
+ for spec_file in spec_dir.glob("*.md"):
160
+ relative_path = str(spec_file.relative_to(repo_path))
161
+ current_hash = self._get_file_hash(spec_file)
162
+ stored_hash = self.hash_store.get(relative_path, "")
163
+
164
+ if current_hash != stored_hash:
165
+ changes[relative_path] = {
166
+ "file": spec_file,
167
+ "hash": current_hash,
168
+ "type": "modified" if stored_hash else "new",
169
+ }
170
+
171
+ return changes
172
+
173
+ @beartype
174
+ @require(lambda repo_path: repo_path.exists(), "Repository path must exist")
175
+ @ensure(lambda result: isinstance(result, dict), "Must return dict")
176
+ def detect_specfact_changes(self, repo_path: Path) -> dict[str, Any]:
177
+ """
178
+ Detect changes in SpecFact artifacts.
179
+
180
+ Monitors:
181
+ - `.specfact/plans/*.yaml`
182
+ - `.specfact/protocols/*.yaml`
183
+
184
+ Args:
185
+ repo_path: Path to repository
186
+
187
+ Returns:
188
+ Dictionary of detected changes keyed by file path
189
+ """
190
+ changes: dict[str, Any] = {}
191
+
192
+ # Monitor .specfact/plans/ files
193
+ plans_dir = repo_path / ".specfact" / "plans"
194
+ if plans_dir.exists():
195
+ for plan_file in plans_dir.glob("*.yaml"):
196
+ relative_path = str(plan_file.relative_to(repo_path))
197
+ current_hash = self._get_file_hash(plan_file)
198
+ stored_hash = self.hash_store.get(relative_path, "")
199
+
200
+ if current_hash != stored_hash:
201
+ changes[relative_path] = {
202
+ "file": plan_file,
203
+ "hash": current_hash,
204
+ "type": "modified" if stored_hash else "new",
205
+ }
206
+
207
+ # Monitor .specfact/protocols/ files
208
+ protocols_dir = repo_path / ".specfact" / "protocols"
209
+ if protocols_dir.exists():
210
+ for protocol_file in protocols_dir.glob("*.yaml"):
211
+ relative_path = str(protocol_file.relative_to(repo_path))
212
+ current_hash = self._get_file_hash(protocol_file)
213
+ stored_hash = self.hash_store.get(relative_path, "")
214
+
215
+ if current_hash != stored_hash:
216
+ changes[relative_path] = {
217
+ "file": protocol_file,
218
+ "hash": current_hash,
219
+ "type": "modified" if stored_hash else "new",
220
+ }
221
+
222
+ return changes
223
+
224
+ @beartype
225
+ @ensure(lambda result: isinstance(result, dict), "Must return dict")
226
+ def merge_changes(self, speckit_changes: dict[str, Any], specfact_changes: dict[str, Any]) -> dict[str, Any]:
227
+ """
228
+ Merge changes from both sources.
229
+
230
+ Args:
231
+ speckit_changes: Spec-Kit detected changes
232
+ specfact_changes: SpecFact detected changes
233
+
234
+ Returns:
235
+ Merged changes dictionary
236
+ """
237
+ merged: dict[str, Any] = {}
238
+
239
+ # Merge Spec-Kit changes
240
+ for key, change in speckit_changes.items():
241
+ merged[key] = {
242
+ "source": "speckit",
243
+ **change,
244
+ }
245
+
246
+ # Merge SpecFact changes
247
+ for key, change in specfact_changes.items():
248
+ if key in merged:
249
+ # Conflict detected
250
+ merged[key]["conflict"] = True
251
+ merged[key]["specfact_change"] = change
252
+ else:
253
+ merged[key] = {
254
+ "source": "specfact",
255
+ **change,
256
+ }
257
+
258
+ return merged
259
+
260
+ @beartype
261
+ @ensure(lambda result: isinstance(result, list), "Must return list")
262
+ def detect_conflicts(
263
+ self, speckit_changes: dict[str, Any], specfact_changes: dict[str, Any]
264
+ ) -> list[dict[str, Any]]:
265
+ """
266
+ Detect conflicts between Spec-Kit and SpecFact changes.
267
+
268
+ Args:
269
+ speckit_changes: Spec-Kit detected changes
270
+ specfact_changes: SpecFact detected changes
271
+
272
+ Returns:
273
+ List of conflict dictionaries
274
+ """
275
+ conflicts: list[dict[str, Any]] = []
276
+
277
+ for key in set(speckit_changes.keys()) & set(specfact_changes.keys()):
278
+ conflicts.append(
279
+ {
280
+ "key": key,
281
+ "speckit_change": speckit_changes[key],
282
+ "specfact_change": specfact_changes[key],
283
+ }
284
+ )
285
+
286
+ return conflicts
287
+
288
+ @beartype
289
+ @ensure(lambda result: isinstance(result, dict), "Must return dict")
290
+ def resolve_conflicts(self, conflicts: list[dict[str, Any]]) -> dict[str, Any]:
291
+ """
292
+ Resolve conflicts with merge strategy.
293
+
294
+ Strategy:
295
+ - Priority: SpecFact > Spec-Kit for artifacts (specs/*)
296
+ - Priority: Spec-Kit > SpecFact for memory files (.specify/memory/)
297
+
298
+ Args:
299
+ conflicts: List of conflict dictionaries
300
+
301
+ Returns:
302
+ Resolved conflicts dictionary
303
+ """
304
+ resolved: dict[str, Any] = {}
305
+
306
+ for conflict in conflicts:
307
+ file_key = conflict["key"]
308
+ file_type = self._get_file_type(file_key)
309
+
310
+ if file_type == "artifact":
311
+ # SpecFact takes priority for artifacts
312
+ resolved[file_key] = {
313
+ "resolution": "specfact_priority",
314
+ "source": "specfact",
315
+ "data": conflict["specfact_change"],
316
+ }
317
+ elif file_type == "memory":
318
+ # Spec-Kit takes priority for memory files
319
+ resolved[file_key] = {
320
+ "resolution": "speckit_priority",
321
+ "source": "speckit",
322
+ "data": conflict["speckit_change"],
323
+ }
324
+ else:
325
+ # Default: SpecFact priority
326
+ resolved[file_key] = {
327
+ "resolution": "specfact_priority",
328
+ "source": "specfact",
329
+ "data": conflict["specfact_change"],
330
+ }
331
+
332
+ return resolved
333
+
334
+ @beartype
335
+ @ensure(lambda result: isinstance(result, dict), "Must return dict")
336
+ def apply_resolved_conflicts(self, merged: dict[str, Any], resolved: dict[str, Any]) -> dict[str, Any]:
337
+ """
338
+ Apply resolved conflicts to merged changes.
339
+
340
+ Args:
341
+ merged: Merged changes dictionary
342
+ resolved: Resolved conflicts dictionary
343
+
344
+ Returns:
345
+ Updated merged changes dictionary
346
+ """
347
+ for key, resolution in resolved.items():
348
+ if key in merged:
349
+ merged[key]["conflict"] = False
350
+ merged[key]["resolution"] = resolution["resolution"]
351
+ merged[key]["source"] = resolution["source"]
352
+
353
+ return merged
354
+
355
+ @beartype
356
+ def _get_file_hash(self, file_path: Path) -> str:
357
+ """
358
+ Get file hash for change detection.
359
+
360
+ Args:
361
+ file_path: Path to file
362
+
363
+ Returns:
364
+ SHA256 hash of file contents
365
+ """
366
+ if not file_path.exists():
367
+ return ""
368
+
369
+ with file_path.open("rb") as f:
370
+ content = f.read()
371
+ return hashlib.sha256(content).hexdigest()
372
+
373
+ @beartype
374
+ def _get_file_type(self, file_path: str) -> str:
375
+ """
376
+ Determine file type for conflict resolution.
377
+
378
+ Args:
379
+ file_path: Relative file path
380
+
381
+ Returns:
382
+ File type ("artifact" | "memory" | "other")
383
+ """
384
+ if "/memory/" in file_path or file_path.startswith(".specify/memory/"):
385
+ return "memory"
386
+ if "/specs/" in file_path or file_path.startswith("specs/"):
387
+ return "artifact"
388
+ return "other"