xtrm-tools 2.4.0 → 2.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. package/README.md +23 -9
  2. package/cli/dist/index.cjs +774 -240
  3. package/cli/dist/index.cjs.map +1 -1
  4. package/cli/package.json +1 -1
  5. package/config/hooks.json +10 -0
  6. package/config/pi/extensions/core/adapter.ts +2 -14
  7. package/config/pi/extensions/core/guard-rules.ts +70 -0
  8. package/config/pi/extensions/core/session-state.ts +59 -0
  9. package/config/pi/extensions/main-guard.ts +10 -14
  10. package/config/pi/extensions/plan-mode/README.md +65 -0
  11. package/config/pi/extensions/plan-mode/index.ts +340 -0
  12. package/config/pi/extensions/plan-mode/utils.ts +168 -0
  13. package/config/pi/extensions/service-skills.ts +51 -7
  14. package/config/pi/extensions/session-flow.ts +117 -0
  15. package/hooks/beads-claim-sync.mjs +123 -2
  16. package/hooks/beads-compact-restore.mjs +41 -9
  17. package/hooks/beads-compact-save.mjs +36 -5
  18. package/hooks/beads-gate-messages.mjs +27 -1
  19. package/hooks/beads-stop-gate.mjs +58 -8
  20. package/hooks/guard-rules.mjs +86 -0
  21. package/hooks/hooks.json +28 -18
  22. package/hooks/main-guard.mjs +3 -21
  23. package/hooks/quality-check.cjs +1286 -0
  24. package/hooks/quality-check.py +345 -0
  25. package/hooks/session-state.mjs +138 -0
  26. package/package.json +2 -1
  27. package/project-skills/quality-gates/.claude/settings.json +1 -24
  28. package/skills/creating-service-skills/SKILL.md +433 -0
  29. package/skills/creating-service-skills/references/script_quality_standards.md +425 -0
  30. package/skills/creating-service-skills/references/service_skill_system_guide.md +278 -0
  31. package/skills/creating-service-skills/scripts/bootstrap.py +326 -0
  32. package/skills/creating-service-skills/scripts/deep_dive.py +304 -0
  33. package/skills/creating-service-skills/scripts/scaffolder.py +482 -0
  34. package/skills/scoping-service-skills/SKILL.md +231 -0
  35. package/skills/scoping-service-skills/scripts/scope.py +74 -0
  36. package/skills/sync-docs/SKILL.md +235 -0
  37. package/skills/sync-docs/evals/evals.json +89 -0
  38. package/skills/sync-docs/references/doc-structure.md +104 -0
  39. package/skills/sync-docs/references/schema.md +103 -0
  40. package/skills/sync-docs/scripts/context_gatherer.py +246 -0
  41. package/skills/sync-docs/scripts/doc_structure_analyzer.py +495 -0
  42. package/skills/sync-docs/scripts/validate_doc.py +365 -0
  43. package/skills/sync-docs-workspace/iteration-1/benchmark.json +293 -0
  44. package/skills/sync-docs-workspace/iteration-1/benchmark.md +13 -0
  45. package/skills/sync-docs-workspace/iteration-1/eval-doc-audit/eval_metadata.json +27 -0
  46. package/skills/sync-docs-workspace/iteration-1/eval-doc-audit/with_skill/outputs/result.md +210 -0
  47. package/skills/sync-docs-workspace/iteration-1/eval-doc-audit/with_skill/run-1/grading.json +28 -0
  48. package/skills/sync-docs-workspace/iteration-1/eval-doc-audit/with_skill/run-1/timing.json +1 -0
  49. package/skills/sync-docs-workspace/iteration-1/eval-doc-audit/without_skill/outputs/result.md +101 -0
  50. package/skills/sync-docs-workspace/iteration-1/eval-doc-audit/without_skill/run-1/grading.json +28 -0
  51. package/skills/sync-docs-workspace/iteration-1/eval-doc-audit/without_skill/run-1/timing.json +5 -0
  52. package/skills/sync-docs-workspace/iteration-1/eval-doc-audit/without_skill/timing.json +5 -0
  53. package/skills/sync-docs-workspace/iteration-1/eval-fix-mode/eval_metadata.json +27 -0
  54. package/skills/sync-docs-workspace/iteration-1/eval-fix-mode/with_skill/outputs/result.md +198 -0
  55. package/skills/sync-docs-workspace/iteration-1/eval-fix-mode/with_skill/run-1/grading.json +28 -0
  56. package/skills/sync-docs-workspace/iteration-1/eval-fix-mode/with_skill/run-1/timing.json +1 -0
  57. package/skills/sync-docs-workspace/iteration-1/eval-fix-mode/without_skill/outputs/result.md +94 -0
  58. package/skills/sync-docs-workspace/iteration-1/eval-fix-mode/without_skill/run-1/grading.json +28 -0
  59. package/skills/sync-docs-workspace/iteration-1/eval-fix-mode/without_skill/run-1/timing.json +1 -0
  60. package/skills/sync-docs-workspace/iteration-1/eval-sprint-closeout/eval_metadata.json +27 -0
  61. package/skills/sync-docs-workspace/iteration-1/eval-sprint-closeout/with_skill/outputs/result.md +237 -0
  62. package/skills/sync-docs-workspace/iteration-1/eval-sprint-closeout/with_skill/run-1/grading.json +28 -0
  63. package/skills/sync-docs-workspace/iteration-1/eval-sprint-closeout/with_skill/run-1/timing.json +1 -0
  64. package/skills/sync-docs-workspace/iteration-1/eval-sprint-closeout/without_skill/outputs/result.md +134 -0
  65. package/skills/sync-docs-workspace/iteration-1/eval-sprint-closeout/without_skill/run-1/grading.json +28 -0
  66. package/skills/sync-docs-workspace/iteration-1/eval-sprint-closeout/without_skill/run-1/timing.json +1 -0
  67. package/skills/sync-docs-workspace/iteration-2/benchmark.json +297 -0
  68. package/skills/sync-docs-workspace/iteration-2/benchmark.md +13 -0
  69. package/skills/sync-docs-workspace/iteration-2/eval-doc-audit/eval_metadata.json +27 -0
  70. package/skills/sync-docs-workspace/iteration-2/eval-doc-audit/with_skill/outputs/result.md +137 -0
  71. package/skills/sync-docs-workspace/iteration-2/eval-doc-audit/with_skill/run-1/grading.json +92 -0
  72. package/skills/sync-docs-workspace/iteration-2/eval-doc-audit/with_skill/run-1/timing.json +1 -0
  73. package/skills/sync-docs-workspace/iteration-2/eval-doc-audit/without_skill/outputs/result.md +134 -0
  74. package/skills/sync-docs-workspace/iteration-2/eval-doc-audit/without_skill/run-1/grading.json +86 -0
  75. package/skills/sync-docs-workspace/iteration-2/eval-doc-audit/without_skill/run-1/timing.json +1 -0
  76. package/skills/sync-docs-workspace/iteration-2/eval-fix-mode/eval_metadata.json +27 -0
  77. package/skills/sync-docs-workspace/iteration-2/eval-fix-mode/with_skill/outputs/result.md +193 -0
  78. package/skills/sync-docs-workspace/iteration-2/eval-fix-mode/with_skill/run-1/grading.json +72 -0
  79. package/skills/sync-docs-workspace/iteration-2/eval-fix-mode/with_skill/run-1/timing.json +1 -0
  80. package/skills/sync-docs-workspace/iteration-2/eval-fix-mode/without_skill/outputs/result.md +211 -0
  81. package/skills/sync-docs-workspace/iteration-2/eval-fix-mode/without_skill/run-1/grading.json +91 -0
  82. package/skills/sync-docs-workspace/iteration-2/eval-fix-mode/without_skill/run-1/timing.json +5 -0
  83. package/skills/sync-docs-workspace/iteration-2/eval-sprint-closeout/eval_metadata.json +27 -0
  84. package/skills/sync-docs-workspace/iteration-2/eval-sprint-closeout/with_skill/outputs/result.md +182 -0
  85. package/skills/sync-docs-workspace/iteration-2/eval-sprint-closeout/with_skill/run-1/grading.json +95 -0
  86. package/skills/sync-docs-workspace/iteration-2/eval-sprint-closeout/with_skill/run-1/timing.json +1 -0
  87. package/skills/sync-docs-workspace/iteration-2/eval-sprint-closeout/without_skill/outputs/result.md +222 -0
  88. package/skills/sync-docs-workspace/iteration-2/eval-sprint-closeout/without_skill/run-1/grading.json +88 -0
  89. package/skills/sync-docs-workspace/iteration-2/eval-sprint-closeout/without_skill/run-1/timing.json +5 -0
  90. package/skills/sync-docs-workspace/iteration-3/benchmark.json +298 -0
  91. package/skills/sync-docs-workspace/iteration-3/benchmark.md +13 -0
  92. package/skills/sync-docs-workspace/iteration-3/eval-doc-audit/eval_metadata.json +27 -0
  93. package/skills/sync-docs-workspace/iteration-3/eval-doc-audit/with_skill/outputs/result.md +125 -0
  94. package/skills/sync-docs-workspace/iteration-3/eval-doc-audit/with_skill/run-1/grading.json +97 -0
  95. package/skills/sync-docs-workspace/iteration-3/eval-doc-audit/with_skill/run-1/timing.json +5 -0
  96. package/skills/sync-docs-workspace/iteration-3/eval-doc-audit/without_skill/outputs/result.md +144 -0
  97. package/skills/sync-docs-workspace/iteration-3/eval-doc-audit/without_skill/run-1/grading.json +78 -0
  98. package/skills/sync-docs-workspace/iteration-3/eval-doc-audit/without_skill/run-1/timing.json +5 -0
  99. package/skills/sync-docs-workspace/iteration-3/eval-fix-mode/eval_metadata.json +27 -0
  100. package/skills/sync-docs-workspace/iteration-3/eval-fix-mode/with_skill/outputs/result.md +104 -0
  101. package/skills/sync-docs-workspace/iteration-3/eval-fix-mode/with_skill/run-1/grading.json +91 -0
  102. package/skills/sync-docs-workspace/iteration-3/eval-fix-mode/with_skill/run-1/timing.json +5 -0
  103. package/skills/sync-docs-workspace/iteration-3/eval-fix-mode/without_skill/outputs/result.md +79 -0
  104. package/skills/sync-docs-workspace/iteration-3/eval-fix-mode/without_skill/run-1/grading.json +82 -0
  105. package/skills/sync-docs-workspace/iteration-3/eval-fix-mode/without_skill/run-1/timing.json +5 -0
  106. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/eval_metadata.json +27 -0
  107. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/with_skill/outputs/phase1_context.json +302 -0
  108. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/with_skill/outputs/phase2_drift.txt +33 -0
  109. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/with_skill/outputs/phase3_analysis.json +114 -0
  110. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/with_skill/outputs/phase4_fix.txt +118 -0
  111. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/with_skill/outputs/phase5_validate.txt +38 -0
  112. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/with_skill/outputs/result.md +158 -0
  113. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/with_skill/run-1/grading.json +95 -0
  114. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/with_skill/run-1/timing.json +5 -0
  115. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/without_skill/outputs/result.md +71 -0
  116. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/without_skill/run-1/grading.json +90 -0
  117. package/skills/sync-docs-workspace/iteration-3/eval-sprint-closeout/without_skill/run-1/timing.json +5 -0
  118. package/skills/updating-service-skills/SKILL.md +136 -0
  119. package/skills/updating-service-skills/scripts/drift_detector.py +222 -0
  120. package/skills/using-quality-gates/SKILL.md +254 -0
  121. package/skills/using-service-skills/SKILL.md +108 -0
  122. package/skills/using-service-skills/scripts/cataloger.py +74 -0
  123. package/skills/using-service-skills/scripts/skill_activator.py +152 -0
  124. package/skills/using-service-skills/scripts/test_skill_activator.py +58 -0
  125. package/skills/using-xtrm/SKILL.md +34 -38
@@ -0,0 +1,495 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Analyze README.md and docs/ for structural drift.
4
+
5
+ Checks:
6
+ 1. README.md bloat — line count threshold and sections that belong in docs/
7
+ 2. CHANGELOG.md coverage — last entry date vs recent git activity
8
+ 3. docs/ gaps — expected focused files that don't exist yet
9
+
10
+ Outputs a JSON report with per-file findings categorized as:
11
+ BLOATED, MISSING, STALE, OK
12
+
13
+ Usage:
14
+ doc_structure_analyzer.py [options]
15
+
16
+ --root=<path> Project root (default: auto-detect via .git)
17
+ --readme-threshold=N Line count that marks README as bloated (default: 200)
18
+ --fix Auto-scaffold all MISSING docs/ files
19
+ --bd-remember After --fix, persist a summary via bd remember
20
+ """
21
+
22
+ import sys
23
+ import re
24
+ import json
25
+ import subprocess
26
+ from pathlib import Path
27
+ from datetime import datetime, timezone
28
+
29
+
30
+ # Sections in README that indicate content belonging in docs/ files
31
+ # Format: (pattern, suggested_docs_file, description)
32
+ SECTION_DOCS_MAP = [
33
+ (re.compile(r"^#{1,3}\s+(hooks?|hook system|hook events)", re.I), "hooks.md", "Hooks reference"),
34
+ (re.compile(r"^#{1,3}\s+(pi.?extensions?|copilot.?ext|pi.?ext)", re.I), "pi-extensions.md", "Pi extensions reference"),
35
+ (re.compile(r"^#{1,3}\s+(architecture|system design|components)", re.I), "architecture.md", "Architecture overview"),
36
+ (re.compile(r"^#{1,3}\s+(policy|policies|enforcement rules)", re.I), "policies.md", "Policy reference"),
37
+ (re.compile(r"^#{1,3}\s+(mcp.?servers?|model context)", re.I), "mcp-servers.md", "MCP server configuration"),
38
+ (re.compile(r"^#{1,3}\s+(skills?|skill catalog)", re.I), "skills.md", "Skills catalog"),
39
+ (re.compile(r"^#{1,3}\s+(cli.?reference|commands?.?reference)", re.I), "cli-reference.md", "CLI reference"),
40
+ (re.compile(r"^#{1,3}\s+(troubleshoot|debugging|common issues)", re.I), "troubleshooting.md", "Troubleshooting guide"),
41
+ ]
42
+
43
+ # Signals that suggest a docs/ file should exist even without README sections
44
+ # Format: (signal_path, docs_file, reason, title, scope, category, source_globs)
45
+ SUBSYSTEM_SIGNALS: list[tuple[str, str, str, str, str, str, list[str]]] = [
46
+ ("hooks/", "hooks.md", "hooks/ directory exists",
47
+ "Hooks Reference", "hooks", "reference", ["hooks/**/*.mjs", "policies/*.json"]),
48
+ ("config/pi/extensions/", "pi-extensions.md", "Pi extensions directory exists",
49
+ "Pi Extensions Reference", "pi-extensions", "reference", ["config/pi/extensions/**/*.ts"]),
50
+ (".mcp.json", "mcp-servers.md", ".mcp.json present",
51
+ "MCP Servers Configuration", "mcp-servers", "reference", [".mcp.json"]),
52
+ ("policies/", "policies.md", "policies/ directory exists",
53
+ "Policy Reference", "policies", "reference", ["policies/*.json"]),
54
+ ("skills/", "skills.md", "skills/ directory exists",
55
+ "Skills Catalog", "skills", "overview", ["skills/**/*.md"]),
56
+ ]
57
+
58
+
59
+ def find_project_root(start: Path | None = None) -> Path:
60
+ p = start or Path.cwd()
61
+ for parent in [p, *p.parents]:
62
+ if (parent / ".git").exists():
63
+ return parent
64
+ return p
65
+
66
+
67
+ def find_main_repo_root(root: Path) -> Path:
68
+ """For git worktrees, resolve the main repo root from the .git file."""
69
+ git_path = root / ".git"
70
+ if git_path.is_file():
71
+ content = git_path.read_text(encoding="utf-8").strip()
72
+ if content.startswith("gitdir:"):
73
+ worktree_git = Path(content[len("gitdir:"):].strip())
74
+ main_git = worktree_git.parent.parent
75
+ return main_git.parent
76
+ return root
77
+
78
+
79
+ def count_lines(path: Path) -> int:
80
+ try:
81
+ return len(path.read_text(encoding="utf-8").splitlines())
82
+ except Exception:
83
+ return 0
84
+
85
+
86
+ def extract_sections(content: str) -> list[str]:
87
+ return [line for line in content.splitlines() if re.match(r"^#{1,3}\s+", line)]
88
+
89
+
90
+ def get_last_changelog_date(path: Path) -> str | None:
91
+ """Extract the most recent dated version entry from a Keep-a-Changelog CHANGELOG.md."""
92
+ if not path.exists():
93
+ return None
94
+ content = path.read_text(encoding="utf-8")
95
+ m = re.search(r"##\s+\[?(\d+\.\d+\.\d+)\]?\s*[-–]\s*(\d{4}-\d{2}-\d{2})", content)
96
+ if m:
97
+ return m.group(2)
98
+ return None
99
+
100
+
101
+ def get_package_version(root: Path) -> str | None:
102
+ """Read current version from package.json if present."""
103
+ pkg = root / "package.json"
104
+ if not pkg.exists():
105
+ return None
106
+ try:
107
+ import json as _json
108
+ data = _json.loads(pkg.read_text(encoding="utf-8"))
109
+ return data.get("version")
110
+ except Exception:
111
+ return None
112
+
113
+
114
+ def get_latest_changelog_version(path: Path) -> str | None:
115
+ """Return the most recent versioned section from CHANGELOG.md.
116
+
117
+ CHANGELOG is in reverse-chronological order, so the first version
118
+ heading encountered is the most recently released one.
119
+ """
120
+ if not path.exists():
121
+ return None
122
+ content = path.read_text(encoding="utf-8")
123
+ m = re.search(r"##\s+\[?(\d+\.\d+\.\d+)\]?", content)
124
+ return m.group(1) if m else None
125
+
126
+
127
+ def get_last_commit_date(root: Path) -> str | None:
128
+ try:
129
+ result = subprocess.run(
130
+ ["git", "log", "-1", "--format=%ci"],
131
+ cwd=str(root), capture_output=True, text=True, timeout=5
132
+ )
133
+ if result.returncode == 0 and result.stdout.strip():
134
+ return result.stdout.strip()[:10]
135
+ except Exception:
136
+ pass
137
+ return None
138
+
139
+
140
+ def analyze_readme(root: Path, threshold: int = 200) -> dict:
141
+ readme = root / "README.md"
142
+ if not readme.exists():
143
+ return {"status": "MISSING", "path": "README.md", "issues": ["README.md not found"]}
144
+
145
+ content = readme.read_text(encoding="utf-8")
146
+ lines = content.splitlines()
147
+ line_count = len(lines)
148
+ sections = extract_sections(content)
149
+
150
+ issues = []
151
+ extraction_candidates = []
152
+
153
+ if line_count > threshold:
154
+ issues.append(f"README has {line_count} lines (threshold: {threshold})")
155
+
156
+ for line in lines:
157
+ for pattern, target_file, description in SECTION_DOCS_MAP:
158
+ if pattern.match(line):
159
+ target = root / "docs" / target_file
160
+ if not target.exists():
161
+ extraction_candidates.append({
162
+ "section": line.strip(),
163
+ "suggest": f"docs/{target_file}",
164
+ "reason": description,
165
+ })
166
+
167
+ status = "OK"
168
+ if line_count > threshold and extraction_candidates:
169
+ status = "BLOATED"
170
+ elif line_count > threshold:
171
+ status = "BLOATED"
172
+ elif extraction_candidates:
173
+ status = "EXTRACTABLE"
174
+
175
+ return {
176
+ "status": status,
177
+ "path": "README.md",
178
+ "line_count": line_count,
179
+ "section_count": len(sections),
180
+ "threshold": threshold,
181
+ "extraction_candidates": extraction_candidates,
182
+ "issues": issues,
183
+ }
184
+
185
+
186
+ def analyze_changelog(root: Path) -> dict:
187
+ changelog = root / "CHANGELOG.md"
188
+ if not changelog.exists():
189
+ return {"status": "MISSING", "path": "CHANGELOG.md", "issues": ["CHANGELOG.md not found"]}
190
+
191
+ last_entry = get_last_changelog_date(changelog)
192
+ last_commit = get_last_commit_date(root)
193
+ latest_changelog_version = get_latest_changelog_version(changelog)
194
+ pkg_version = get_package_version(root)
195
+
196
+ issues = []
197
+ status = "OK"
198
+
199
+ # Check 1: date gap between last dated entry and last commit
200
+ if last_entry and last_commit and last_entry < last_commit[:10]:
201
+ days_stale = (
202
+ datetime.fromisoformat(last_commit[:10]) - datetime.fromisoformat(last_entry)
203
+ ).days
204
+ if days_stale > 7:
205
+ issues.append(
206
+ f"Last CHANGELOG entry ({last_entry}) is {days_stale} days older than "
207
+ f"last commit ({last_commit[:10]})"
208
+ )
209
+ status = "STALE"
210
+
211
+ # Check 2: package version ahead of latest changelog version (undocumented release)
212
+ if pkg_version and latest_changelog_version and pkg_version != latest_changelog_version:
213
+ def semver_key(v: str) -> tuple[int, ...]:
214
+ return tuple(int(p) for p in v.split("."))
215
+ try:
216
+ if semver_key(pkg_version) > semver_key(latest_changelog_version):
217
+ issues.append(
218
+ f"package.json is at v{pkg_version} but latest CHANGELOG entry is "
219
+ f"v{latest_changelog_version} — release is undocumented"
220
+ )
221
+ status = "STALE"
222
+ except (ValueError, AttributeError):
223
+ pass
224
+
225
+ result: dict = {
226
+ "status": status,
227
+ "path": "CHANGELOG.md",
228
+ "last_entry_date": last_entry,
229
+ "last_commit_date": last_commit,
230
+ "package_version": pkg_version,
231
+ "latest_changelog_version": latest_changelog_version,
232
+ "issues": issues,
233
+ }
234
+
235
+ # When package.json is ahead of CHANGELOG, emit a ready-to-run fix command
236
+ if status == "STALE" and pkg_version and latest_changelog_version:
237
+ try:
238
+ def _semver(v: str) -> tuple[int, ...]:
239
+ return tuple(int(p) for p in v.split("."))
240
+ if _semver(pkg_version) > _semver(latest_changelog_version):
241
+ add_entry = next(
242
+ (p for p in [
243
+ Path.home() / ".claude/skills/documenting/scripts/changelog/add_entry.py",
244
+ Path(__file__).parent.parent.parent / "documenting/scripts/changelog/add_entry.py",
245
+ ] if p.exists()),
246
+ None,
247
+ )
248
+ script = str(add_entry) if add_entry else "skills/documenting/scripts/changelog/add_entry.py"
249
+ result["fix_hint"] = (
250
+ f"python3 {script} CHANGELOG.md Added "
251
+ f'"v{pkg_version} — describe changes since v{latest_changelog_version}"'
252
+ )
253
+ except (ValueError, AttributeError):
254
+ pass
255
+
256
+ return result
257
+
258
+
259
+ def analyze_docs_gaps(root: Path) -> list[dict]:
260
+ """Find expected docs/ files that don't exist given repo signals."""
261
+ docs_dir = root / "docs"
262
+ gaps = []
263
+
264
+ for signal_path, docs_file, reason, _title, _scope, _cat, _globs in SUBSYSTEM_SIGNALS:
265
+ if (root / signal_path).exists():
266
+ target = docs_dir / docs_file
267
+ if not target.exists():
268
+ gaps.append({
269
+ "status": "MISSING",
270
+ "path": f"docs/{docs_file}",
271
+ "reason": reason,
272
+ "signal": signal_path,
273
+ })
274
+
275
+ return gaps
276
+
277
+
278
+ def analyze_existing_docs(root: Path) -> list[dict]:
279
+ """Check existing docs/ files for schema validity (frontmatter present)."""
280
+ docs_dir = root / "docs"
281
+ if not docs_dir.exists():
282
+ return []
283
+
284
+ results = []
285
+ for md_file in sorted(docs_dir.glob("*.md")):
286
+ content = md_file.read_text(encoding="utf-8")
287
+ has_frontmatter = content.startswith("---\n")
288
+ status = "OK" if has_frontmatter else "INVALID_SCHEMA"
289
+ issues = [] if has_frontmatter else ["Missing YAML frontmatter — run validate_doc.py to fix"]
290
+ results.append({
291
+ "status": status,
292
+ "path": str(md_file.relative_to(root)),
293
+ "line_count": len(content.splitlines()),
294
+ "has_frontmatter": has_frontmatter,
295
+ "issues": issues,
296
+ })
297
+
298
+ return results
299
+
300
+
301
+ def inject_minimal_frontmatter(path: Path) -> bool:
302
+ """Add minimal valid frontmatter to an existing docs/ file that lacks it."""
303
+ try:
304
+ content = path.read_text(encoding="utf-8")
305
+ if content.startswith("---\n"):
306
+ return False # already has frontmatter
307
+
308
+ # Derive title from first # heading, or filename
309
+ title = path.stem.replace("-", " ").replace("_", " ").title()
310
+ for line in content.splitlines():
311
+ if line.startswith("# "):
312
+ title = line[2:].strip()
313
+ break
314
+
315
+ scope = path.stem.lower().replace(" ", "-")
316
+ today = datetime.now(timezone.utc).date().isoformat()
317
+ fm = (
318
+ f"---\ntitle: {title}\nscope: {scope}\ncategory: reference\n"
319
+ f"version: 1.0.0\nupdated: {today}\ndomain: []\n---\n\n"
320
+ )
321
+ path.write_text(fm + content, encoding="utf-8")
322
+ return True
323
+ except Exception:
324
+ return False
325
+
326
+
327
+ def scaffold_missing_docs(root: Path, gaps: list[dict]) -> list[str]:
328
+ """Generate scaffold files for all MISSING docs/ gaps. Returns list of created paths."""
329
+ # Build a lookup from docs_file name → signal metadata
330
+ signal_meta = {
331
+ docs_file: (title, scope, cat, globs)
332
+ for _, docs_file, _, title, scope, cat, globs in SUBSYSTEM_SIGNALS
333
+ }
334
+
335
+ validator = Path(__file__).parent / "validate_doc.py"
336
+ created = []
337
+ docs_dir = root / "docs"
338
+ docs_dir.mkdir(parents=True, exist_ok=True)
339
+
340
+ for gap in gaps:
341
+ docs_file = Path(gap["path"]).name # e.g. "hooks.md"
342
+ output_path = root / gap["path"]
343
+
344
+ meta = signal_meta.get(docs_file)
345
+ if not meta:
346
+ print(f" SKIP {gap['path']} — no scaffold metadata", file=sys.stderr)
347
+ continue
348
+
349
+ title, scope, category, globs = meta
350
+ cmd = [
351
+ sys.executable, str(validator),
352
+ "--generate", str(output_path),
353
+ f"--title={title}",
354
+ f"--scope={scope}",
355
+ f"--category={category}",
356
+ ]
357
+ if globs:
358
+ cmd.append(f"--source-for={','.join(globs)}")
359
+
360
+ try:
361
+ result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
362
+ if result.returncode == 0:
363
+ print(f" CREATED {gap['path']}")
364
+ created.append(gap["path"])
365
+ else:
366
+ print(f" FAILED {gap['path']}: {result.stderr.strip()}", file=sys.stderr)
367
+ except Exception as e:
368
+ print(f" ERROR {gap['path']}: {e}", file=sys.stderr)
369
+
370
+ return created
371
+
372
+
373
+ def bd_remember(insight: str, key: str, cwd: str) -> bool:
374
+ """Persist an insight via bd remember. Returns True on success."""
375
+ try:
376
+ result = subprocess.run(
377
+ ["bd", "remember", insight, "--key", key],
378
+ cwd=cwd, capture_output=True, text=True, timeout=8
379
+ )
380
+ return result.returncode == 0
381
+ except Exception:
382
+ return False
383
+
384
+
385
+ def main() -> None:
386
+ root = find_project_root()
387
+ threshold = 200
388
+ fix_mode = False
389
+ remember_mode = False
390
+
391
+ for arg in sys.argv[1:]:
392
+ if arg.startswith("--root="):
393
+ root = Path(arg.split("=", 1)[1]).resolve()
394
+ elif arg.startswith("--readme-threshold="):
395
+ try:
396
+ threshold = int(arg.split("=", 1)[1])
397
+ except ValueError:
398
+ pass
399
+ elif arg == "--fix":
400
+ fix_mode = True
401
+ elif arg == "--bd-remember":
402
+ remember_mode = True
403
+
404
+ readme_result = analyze_readme(root, threshold)
405
+ changelog_result = analyze_changelog(root)
406
+ docs_gaps = analyze_docs_gaps(root)
407
+ existing_docs = analyze_existing_docs(root)
408
+
409
+ summary_issues = (
410
+ (1 if readme_result["status"] != "OK" else 0)
411
+ + (1 if changelog_result["status"] != "OK" else 0)
412
+ + len(docs_gaps)
413
+ + sum(1 for d in existing_docs if d["status"] != "OK")
414
+ )
415
+
416
+ report: dict = {
417
+ "project_root": str(root),
418
+ "summary": {
419
+ "total_issues": summary_issues,
420
+ "needs_attention": summary_issues > 0,
421
+ },
422
+ "readme": readme_result,
423
+ "changelog": changelog_result,
424
+ "docs_gaps": docs_gaps,
425
+ "existing_docs": existing_docs,
426
+ }
427
+
428
+ # --fix: scaffold MISSING files + inject frontmatter into INVALID_SCHEMA files
429
+ if fix_mode:
430
+ created: list[str] = []
431
+ schema_fixed: list[str] = []
432
+
433
+ if docs_gaps:
434
+ print(f"\nFixing {len(docs_gaps)} missing docs/ files...")
435
+ created = scaffold_missing_docs(root, docs_gaps)
436
+
437
+ invalid_docs = [d for d in existing_docs if d["status"] == "INVALID_SCHEMA"]
438
+ if invalid_docs:
439
+ print(f"\nInjecting frontmatter into {len(invalid_docs)} schema-invalid docs/ files...")
440
+ for doc in invalid_docs:
441
+ doc_path = root / doc["path"]
442
+ if inject_minimal_frontmatter(doc_path):
443
+ print(f" FIXED {doc['path']}")
444
+ schema_fixed.append(doc["path"])
445
+ else:
446
+ print(f" SKIP {doc['path']} — already has frontmatter or unreadable")
447
+
448
+ if not created and not schema_fixed:
449
+ print("\nNothing to fix — no MISSING gaps or INVALID_SCHEMA files detected.")
450
+
451
+ report["fix_created"] = created
452
+ report["fix_schema_fixed"] = schema_fixed
453
+
454
+ # Re-analyze after fixes so the JSON report reflects post-fix state
455
+ if created or schema_fixed:
456
+ report["docs_gaps"] = analyze_docs_gaps(root)
457
+ report["existing_docs"] = analyze_existing_docs(root)
458
+ post_fix_issues = (
459
+ (1 if report["readme"]["status"] != "OK" else 0)
460
+ + (1 if report["changelog"]["status"] != "OK" else 0)
461
+ + len(report["docs_gaps"])
462
+ + sum(1 for d in report["existing_docs"] if d["status"] != "OK")
463
+ )
464
+ report["summary"] = {
465
+ "total_issues": post_fix_issues,
466
+ "needs_attention": post_fix_issues > 0,
467
+ "pre_fix_issues": summary_issues,
468
+ "fixed": summary_issues - post_fix_issues,
469
+ }
470
+
471
+ # --bd-remember: persist a summary insight
472
+ all_fixed = created + schema_fixed
473
+ main_root = find_main_repo_root(root)
474
+ if remember_mode and all_fixed and (main_root / ".beads").exists():
475
+ parts = []
476
+ if created:
477
+ parts.append(f"created {len(created)} scaffold(s): {', '.join(Path(p).name for p in created)}")
478
+ if schema_fixed:
479
+ parts.append(f"added frontmatter to {len(schema_fixed)} existing file(s): {', '.join(Path(p).name for p in schema_fixed)}")
480
+ insight = (
481
+ f"sync-docs --fix: {'; '.join(parts)}. "
482
+ f"Fill in content and run validate_doc.py docs/ to confirm schema."
483
+ )
484
+ key = f"sync-docs-fix-{datetime.now(timezone.utc).strftime('%Y-%m-%d')}"
485
+ ok = bd_remember(insight, key, str(main_root))
486
+ report["bd_remember"] = {"stored": ok, "key": key, "insight": insight}
487
+ if ok:
488
+ print(f"\n Persisted to bd memory: {key}")
489
+
490
+ print(json.dumps(report, indent=2))
491
+ sys.exit(1 if summary_issues > 0 and not fix_mode else 0)
492
+
493
+
494
+ if __name__ == "__main__":
495
+ main()