@mainahq/core 1.0.3 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/package.json +1 -1
  2. package/src/ai/__tests__/delegation.test.ts +55 -1
  3. package/src/ai/delegation.ts +5 -3
  4. package/src/context/__tests__/budget.test.ts +29 -6
  5. package/src/context/__tests__/engine.test.ts +1 -0
  6. package/src/context/__tests__/selector.test.ts +23 -3
  7. package/src/context/__tests__/wiki.test.ts +349 -0
  8. package/src/context/budget.ts +12 -8
  9. package/src/context/engine.ts +37 -0
  10. package/src/context/selector.ts +30 -4
  11. package/src/context/wiki.ts +296 -0
  12. package/src/db/index.ts +12 -0
  13. package/src/feedback/__tests__/capture.test.ts +166 -0
  14. package/src/feedback/__tests__/signals.test.ts +144 -0
  15. package/src/feedback/__tests__/tmp-capture-1775575256633-lah0etnzlj/feedback.db +0 -0
  16. package/src/feedback/__tests__/tmp-capture-1775575256640-2xmjme4qraa/feedback.db +0 -0
  17. package/src/feedback/capture.ts +102 -0
  18. package/src/feedback/signals.ts +68 -0
  19. package/src/index.ts +104 -0
  20. package/src/init/__tests__/init.test.ts +400 -3
  21. package/src/init/index.ts +368 -12
  22. package/src/language/__tests__/__fixtures__/detect/composer.lock +1 -0
  23. package/src/prompts/defaults/index.ts +3 -1
  24. package/src/prompts/defaults/wiki-compile.md +20 -0
  25. package/src/prompts/defaults/wiki-query.md +18 -0
  26. package/src/stats/__tests__/tool-usage.test.ts +133 -0
  27. package/src/stats/tracker.ts +92 -0
  28. package/src/verify/__tests__/pipeline.test.ts +11 -8
  29. package/src/verify/pipeline.ts +13 -1
  30. package/src/verify/tools/__tests__/wiki-lint.test.ts +784 -0
  31. package/src/verify/tools/wiki-lint-runner.ts +38 -0
  32. package/src/verify/tools/wiki-lint.ts +898 -0
  33. package/src/wiki/__tests__/compiler.test.ts +389 -0
  34. package/src/wiki/__tests__/extractors/code.test.ts +99 -0
  35. package/src/wiki/__tests__/extractors/decision.test.ts +323 -0
  36. package/src/wiki/__tests__/extractors/feature.test.ts +186 -0
  37. package/src/wiki/__tests__/extractors/workflow.test.ts +131 -0
  38. package/src/wiki/__tests__/graph.test.ts +344 -0
  39. package/src/wiki/__tests__/hooks.test.ts +119 -0
  40. package/src/wiki/__tests__/indexer.test.ts +285 -0
  41. package/src/wiki/__tests__/linker.test.ts +230 -0
  42. package/src/wiki/__tests__/louvain.test.ts +229 -0
  43. package/src/wiki/__tests__/query.test.ts +316 -0
  44. package/src/wiki/__tests__/schema.test.ts +114 -0
  45. package/src/wiki/__tests__/signals.test.ts +474 -0
  46. package/src/wiki/__tests__/state.test.ts +168 -0
  47. package/src/wiki/__tests__/tracking.test.ts +118 -0
  48. package/src/wiki/__tests__/types.test.ts +387 -0
  49. package/src/wiki/compiler.ts +1075 -0
  50. package/src/wiki/extractors/code.ts +90 -0
  51. package/src/wiki/extractors/decision.ts +217 -0
  52. package/src/wiki/extractors/feature.ts +206 -0
  53. package/src/wiki/extractors/workflow.ts +112 -0
  54. package/src/wiki/graph.ts +445 -0
  55. package/src/wiki/hooks.ts +49 -0
  56. package/src/wiki/indexer.ts +105 -0
  57. package/src/wiki/linker.ts +117 -0
  58. package/src/wiki/louvain.ts +190 -0
  59. package/src/wiki/prompts/compile-architecture.md +59 -0
  60. package/src/wiki/prompts/compile-decision.md +66 -0
  61. package/src/wiki/prompts/compile-entity.md +56 -0
  62. package/src/wiki/prompts/compile-feature.md +60 -0
  63. package/src/wiki/prompts/compile-module.md +42 -0
  64. package/src/wiki/prompts/wiki-query.md +25 -0
  65. package/src/wiki/query.ts +338 -0
  66. package/src/wiki/schema.ts +111 -0
  67. package/src/wiki/signals.ts +368 -0
  68. package/src/wiki/state.ts +89 -0
  69. package/src/wiki/tracking.ts +30 -0
  70. package/src/wiki/types.ts +169 -0
  71. package/src/workflow/context.ts +26 -0
@@ -0,0 +1,898 @@
1
+ /**
2
+ * Wiki Lint Tool — checks wiki articles for staleness, orphans, broken links,
3
+ * coverage gaps, and missing articles.
4
+ *
5
+ * Integrates into the Verify Engine pipeline by converting WikiLintResult
6
+ * into Finding[] for diff-only filtering and unified reporting.
7
+ */
8
+
9
+ import { existsSync, readdirSync, readFileSync } from "node:fs";
10
+ import { join, relative } from "node:path";
11
+ import { extractDecisions } from "../../wiki/extractors/decision";
12
+ import { extractFeatures } from "../../wiki/extractors/feature";
13
+ import { hashFile, loadState } from "../../wiki/state";
14
+ import type { WikiLintFinding, WikiLintResult } from "../../wiki/types";
15
+ import type { Finding } from "../diff-filter";
16
+
17
+ // ─── Types ───────────────────────────────────────────────────────────────
18
+
19
+ export interface WikiLintOptions {
20
+ wikiDir: string;
21
+ repoRoot: string;
22
+ featuresDir?: string;
23
+ adrDir?: string;
24
+ }
25
+
26
+ // ─── Helpers ─────────────────────────────────────────────────────────────
27
+
28
+ /** Recursively collect all .md files under a directory. */
29
+ function collectMarkdownFiles(dir: string): string[] {
30
+ const results: string[] = [];
31
+ if (!existsSync(dir)) return results;
32
+
33
+ try {
34
+ const entries = readdirSync(dir, { withFileTypes: true });
35
+ for (const entry of entries) {
36
+ const full = join(dir, entry.name);
37
+ if (entry.isDirectory()) {
38
+ results.push(...collectMarkdownFiles(full));
39
+ } else if (entry.name.endsWith(".md")) {
40
+ results.push(full);
41
+ }
42
+ }
43
+ } catch {
44
+ // Directory read failure — skip gracefully
45
+ }
46
+
47
+ return results;
48
+ }
49
+
50
+ /** Extract all wiki links like [[entity:foo]], [[module:bar]] from content. */
51
+ function extractWikiLinks(content: string): string[] {
52
+ const pattern = /\[\[(\w+:\w[\w-]*)\]\]/g;
53
+ const links: string[] = [];
54
+ let match: RegExpExecArray | null = null;
55
+ match = pattern.exec(content);
56
+ while (match !== null) {
57
+ links.push(match[1] ?? "");
58
+ match = pattern.exec(content);
59
+ }
60
+ return links.filter(Boolean);
61
+ }
62
+
63
+ /** Get article type from file path (e.g., .maina/wiki/modules/foo.md -> module). */
64
+ function getArticleTypeFromPath(filePath: string, wikiDir: string): string {
65
+ const rel = relative(wikiDir, filePath);
66
+ const firstDir = rel.split("/")[0] ?? "";
67
+ // Directories use plural names, article types are singular
68
+ const singularMap: Record<string, string> = {
69
+ modules: "module",
70
+ entities: "entity",
71
+ features: "feature",
72
+ decisions: "decision",
73
+ architecture: "architecture",
74
+ raw: "raw",
75
+ };
76
+ return singularMap[firstDir] ?? firstDir;
77
+ }
78
+
79
+ /** Get article identifier from file path. */
80
+ function getArticleId(filePath: string, wikiDir: string): string {
81
+ const rel = relative(wikiDir, filePath);
82
+ // Strip extension and first directory
83
+ const parts = rel.split("/");
84
+ parts.shift(); // Remove type directory
85
+ return parts.join("/").replace(/\.md$/, "");
86
+ }
87
+
88
+ /** Build a set of all known article identifiers (type:id format). */
89
+ function buildArticleIndex(
90
+ articlePaths: string[],
91
+ wikiDir: string,
92
+ ): Set<string> {
93
+ const index = new Set<string>();
94
+ for (const path of articlePaths) {
95
+ const type = getArticleTypeFromPath(path, wikiDir);
96
+ const id = getArticleId(path, wikiDir);
97
+ if (type && id) {
98
+ index.add(`${type}:${id}`);
99
+ }
100
+ }
101
+ return index;
102
+ }
103
+
104
+ // ─── Empty Result ────────────────────────────────────────────────────────
105
+
106
+ function emptyResult(): WikiLintResult {
107
+ return {
108
+ stale: [],
109
+ orphans: [],
110
+ gaps: [],
111
+ brokenLinks: [],
112
+ contradictions: [],
113
+ specDrift: [],
114
+ decisionViolations: [],
115
+ missingRationale: [],
116
+ coveragePercent: 0,
117
+ };
118
+ }
119
+
120
+ // ─── Check Functions ─────────────────────────────────────────────────────
121
+
122
+ /** Check 1: Stale articles — sourceHashes don't match current file hashes. */
123
+ function checkStale(
124
+ articlePaths: string[],
125
+ wikiDir: string,
126
+ repoRoot: string,
127
+ ): WikiLintFinding[] {
128
+ const state = loadState(wikiDir);
129
+ if (!state) return [];
130
+
131
+ const findings: WikiLintFinding[] = [];
132
+
133
+ for (const articlePath of articlePaths) {
134
+ const rel = relative(wikiDir, articlePath);
135
+ const articleHash = state.articleHashes[rel];
136
+ if (!articleHash) continue;
137
+
138
+ // Read article to check if content hash matches
139
+ try {
140
+ const currentHash = hashFile(articlePath);
141
+ if (currentHash && currentHash !== articleHash) {
142
+ findings.push({
143
+ check: "stale",
144
+ severity: "warning",
145
+ article: rel,
146
+ message: `Article "${rel}" content has changed since last compilation`,
147
+ source: articlePath,
148
+ });
149
+ }
150
+ } catch {
151
+ // File read error — skip
152
+ }
153
+ }
154
+
155
+ // Also check if tracked source files have changed
156
+ for (const [file, hash] of Object.entries(state.fileHashes)) {
157
+ const fullPath = join(repoRoot, file);
158
+ const currentHash = hashFile(fullPath);
159
+ if (currentHash !== null && currentHash !== hash) {
160
+ findings.push({
161
+ check: "stale",
162
+ severity: "warning",
163
+ article: file,
164
+ message: `Source file "${file}" has changed since last wiki compilation`,
165
+ source: fullPath,
166
+ });
167
+ }
168
+ }
169
+
170
+ return findings;
171
+ }
172
+
173
+ /** Check 2: Missing articles — wiki dir exists but has very few articles. */
174
+ function checkMissing(articlePaths: string[]): WikiLintFinding[] {
175
+ if (articlePaths.length < 5) {
176
+ return [
177
+ {
178
+ check: "gap",
179
+ severity: "info",
180
+ article: "",
181
+ message: `Wiki has only ${articlePaths.length} article(s). Run \`maina wiki init\` to generate articles from your codebase.`,
182
+ },
183
+ ];
184
+ }
185
+ return [];
186
+ }
187
+
188
+ /** Check 3: Orphan articles — article references deleted source files. */
189
+ function checkOrphans(
190
+ articlePaths: string[],
191
+ repoRoot: string,
192
+ ): WikiLintFinding[] {
193
+ const findings: WikiLintFinding[] = [];
194
+
195
+ for (const articlePath of articlePaths) {
196
+ try {
197
+ const content = readFileSync(articlePath, "utf-8");
198
+ // Look for source file references in frontmatter-style comments
199
+ // Pattern: <!-- source: path/to/file.ts -->
200
+ const sourcePattern = /<!--\s*source:\s*(.+?)\s*-->/g;
201
+ let match: RegExpExecArray | null = null;
202
+ match = sourcePattern.exec(content);
203
+ while (match !== null) {
204
+ const sourcePath = match[1]?.trim() ?? "";
205
+ if (sourcePath && !existsSync(join(repoRoot, sourcePath))) {
206
+ findings.push({
207
+ check: "orphan",
208
+ severity: "warning",
209
+ article: articlePath,
210
+ message: `Article references deleted source file: ${sourcePath}`,
211
+ source: sourcePath,
212
+ });
213
+ }
214
+ match = sourcePattern.exec(content);
215
+ }
216
+ } catch {
217
+ // File read error — skip
218
+ }
219
+ }
220
+
221
+ return findings;
222
+ }
223
+
224
+ /** Check 4: Broken links — [[type:id]] points to non-existent article. */
225
+ function checkBrokenLinks(
226
+ articlePaths: string[],
227
+ wikiDir: string,
228
+ ): WikiLintFinding[] {
229
+ const index = buildArticleIndex(articlePaths, wikiDir);
230
+ const findings: WikiLintFinding[] = [];
231
+
232
+ for (const articlePath of articlePaths) {
233
+ try {
234
+ const content = readFileSync(articlePath, "utf-8");
235
+ const links = extractWikiLinks(content);
236
+ const rel = relative(wikiDir, articlePath);
237
+
238
+ for (const link of links) {
239
+ if (!index.has(link)) {
240
+ findings.push({
241
+ check: "broken_link",
242
+ severity: "error",
243
+ article: rel,
244
+ message: `Broken wiki link [[${link}]] — target article does not exist`,
245
+ source: articlePath,
246
+ });
247
+ }
248
+ }
249
+ } catch {
250
+ // File read error — skip
251
+ }
252
+ }
253
+
254
+ return findings;
255
+ }
256
+
257
+ /** Check 5: Coverage gap — percentage of source files with wiki articles. */
258
+ function calculateCoverage(
259
+ wikiDir: string,
260
+ _repoRoot: string,
261
+ ): { coveragePercent: number; finding: WikiLintFinding | null } {
262
+ const state = loadState(wikiDir);
263
+ if (!state) {
264
+ return { coveragePercent: 0, finding: null };
265
+ }
266
+
267
+ const trackedFiles = Object.keys(state.fileHashes);
268
+ if (trackedFiles.length === 0) {
269
+ return { coveragePercent: 0, finding: null };
270
+ }
271
+
272
+ // Count source files that have corresponding articles
273
+ const articleHashes = Object.keys(state.articleHashes);
274
+ const coveredCount = articleHashes.length;
275
+ const totalCount = trackedFiles.length;
276
+ const coveragePercent =
277
+ totalCount > 0 ? Math.round((coveredCount / totalCount) * 100) : 0;
278
+
279
+ const finding: WikiLintFinding | null =
280
+ coveragePercent < 50
281
+ ? {
282
+ check: "gap",
283
+ severity: "info",
284
+ article: "",
285
+ message: `Wiki coverage is ${coveragePercent}% (${coveredCount}/${totalCount} source files have articles). Consider running \`maina wiki compile\` to improve coverage.`,
286
+ }
287
+ : null;
288
+
289
+ return { coveragePercent, finding };
290
+ }
291
+
292
+ // ─── Check 6: Spec Drift Detection ──────────────────────────────────────
293
+
294
+ /** Recursively collect source files (.ts, .js, .tsx, .jsx) under a directory. */
295
+ function collectSourceFiles(dir: string): string[] {
296
+ const results: string[] = [];
297
+ if (!existsSync(dir)) return results;
298
+ try {
299
+ const entries = readdirSync(dir, { withFileTypes: true });
300
+ for (const entry of entries) {
301
+ const full = join(dir, entry.name);
302
+ if (entry.isDirectory()) {
303
+ // Skip node_modules, dist, .git
304
+ if (
305
+ entry.name === "node_modules" ||
306
+ entry.name === "dist" ||
307
+ entry.name === ".git"
308
+ ) {
309
+ continue;
310
+ }
311
+ results.push(...collectSourceFiles(full));
312
+ } else if (/\.(ts|js|tsx|jsx)$/.test(entry.name)) {
313
+ results.push(full);
314
+ }
315
+ }
316
+ } catch {
317
+ // Directory read failure — skip gracefully
318
+ }
319
+ return results;
320
+ }
321
+
322
+ /**
323
+ * Check 6: Spec Drift — detect when spec assertions conflict with code.
324
+ *
325
+ * Deterministic: pattern-matches spec assertions for "Result" / "never throw"
326
+ * patterns, then scans feature source files for `throw` statements.
327
+ */
328
+ function checkSpecDrift(
329
+ featuresDir: string,
330
+ repoRoot: string,
331
+ ): WikiLintFinding[] {
332
+ if (!existsSync(featuresDir)) return [];
333
+
334
+ const result = extractFeatures(featuresDir);
335
+ if (!result.ok) return [];
336
+
337
+ const findings: WikiLintFinding[] = [];
338
+ const resultPattern = /result\s*<|never\s+throw/i;
339
+ const throwPattern = /\bthrow\s+/;
340
+
341
+ for (const feature of result.value) {
342
+ // Check if any spec assertion mentions Result pattern or "never throw"
343
+ const hasResultAssertion = feature.specAssertions.some((a) =>
344
+ resultPattern.test(a),
345
+ );
346
+
347
+ if (!hasResultAssertion) continue;
348
+
349
+ // Scan source files for throw statements
350
+ // If feature has entitiesModified, use those; otherwise scan repo src/
351
+ const filesToScan: string[] = [];
352
+
353
+ if (feature.entitiesModified.length > 0) {
354
+ for (const entity of feature.entitiesModified) {
355
+ const fullPath = join(repoRoot, entity);
356
+ if (existsSync(fullPath)) {
357
+ filesToScan.push(fullPath);
358
+ }
359
+ }
360
+ } else {
361
+ // Scan src/ directory under repoRoot for a broad check
362
+ const srcDir = join(repoRoot, "src");
363
+ if (existsSync(srcDir)) {
364
+ filesToScan.push(...collectSourceFiles(srcDir));
365
+ }
366
+ }
367
+
368
+ for (const filePath of filesToScan) {
369
+ try {
370
+ const content = readFileSync(filePath, "utf-8");
371
+ const lines = content.split("\n");
372
+ for (let i = 0; i < lines.length; i++) {
373
+ const line = lines[i] ?? "";
374
+ // Skip comments
375
+ const trimmed = line.trim();
376
+ if (trimmed.startsWith("//") || trimmed.startsWith("*")) continue;
377
+ if (throwPattern.test(line)) {
378
+ const relPath = relative(repoRoot, filePath);
379
+ findings.push({
380
+ check: "spec_drift",
381
+ severity: "warning",
382
+ article: feature.id,
383
+ message: `Spec drift: feature "${feature.id}" asserts Result/never-throw pattern, but "${relPath}" line ${i + 1} uses throw`,
384
+ source: filePath,
385
+ });
386
+ // One finding per file is enough
387
+ break;
388
+ }
389
+ }
390
+ } catch {
391
+ // File read error — skip
392
+ }
393
+ }
394
+ }
395
+
396
+ return findings;
397
+ }
398
+
399
+ // ─── Check 7: Decision Violation Detection ──────────────────────────────
400
+
401
+ /** Known technology constraint patterns extracted from ADR text. */
402
+ interface TechConstraint {
403
+ keyword: string;
404
+ violations: { pattern: RegExp; description: string }[];
405
+ }
406
+
407
+ const TECH_CONSTRAINTS: TechConstraint[] = [
408
+ {
409
+ keyword: "bun:test",
410
+ violations: [
411
+ {
412
+ pattern: /from\s+["'](?:jest|vitest|mocha|chai)["']/,
413
+ description: "imports from jest/vitest/mocha/chai",
414
+ },
415
+ {
416
+ pattern: /require\s*\(\s*["'](?:jest|vitest|mocha|chai)["']\s*\)/,
417
+ description: "requires jest/vitest/mocha/chai",
418
+ },
419
+ ],
420
+ },
421
+ {
422
+ keyword: "biome",
423
+ violations: [
424
+ {
425
+ pattern: /\.eslintrc|eslint\.config/,
426
+ description: "uses ESLint configuration",
427
+ },
428
+ {
429
+ pattern: /\.prettierrc|prettier\.config/,
430
+ description: "uses Prettier configuration",
431
+ },
432
+ ],
433
+ },
434
+ {
435
+ keyword: "jwt",
436
+ violations: [
437
+ {
438
+ pattern: /from\s+["']express-session["']/,
439
+ description: "imports express-session instead of JWT",
440
+ },
441
+ ],
442
+ },
443
+ {
444
+ keyword: "result<",
445
+ violations: [
446
+ {
447
+ pattern: /\bthrow\s+new\b/,
448
+ description: "throws instead of returning Result",
449
+ },
450
+ ],
451
+ },
452
+ {
453
+ keyword: "never throw",
454
+ violations: [
455
+ {
456
+ pattern: /\bthrow\s+/,
457
+ description: "uses throw despite never-throw policy",
458
+ },
459
+ ],
460
+ },
461
+ ];
462
+
463
+ /**
464
+ * Check 7: Decision Violations — detect code that contradicts ADR decisions.
465
+ *
466
+ * Deterministic: extracts technology keywords from accepted ADRs,
467
+ * then scans source files for contradicting imports/configs.
468
+ */
469
+ function checkDecisionViolations(
470
+ adrDir: string,
471
+ repoRoot: string,
472
+ ): WikiLintFinding[] {
473
+ if (!existsSync(adrDir)) return [];
474
+
475
+ const result = extractDecisions(adrDir);
476
+ if (!result.ok) return [];
477
+
478
+ const findings: WikiLintFinding[] = [];
479
+
480
+ // Collect accepted decisions and their applicable constraints
481
+ const activeConstraints: {
482
+ decisionId: string;
483
+ constraint: TechConstraint;
484
+ }[] = [];
485
+
486
+ for (const decision of result.value) {
487
+ if (decision.status !== "accepted") continue;
488
+
489
+ const fullText =
490
+ `${decision.decision} ${decision.context} ${decision.rationale}`.toLowerCase();
491
+
492
+ for (const constraint of TECH_CONSTRAINTS) {
493
+ if (fullText.includes(constraint.keyword)) {
494
+ activeConstraints.push({
495
+ decisionId: decision.id,
496
+ constraint,
497
+ });
498
+ }
499
+ }
500
+ }
501
+
502
+ if (activeConstraints.length === 0) return [];
503
+
504
+ // Collect source files to scan
505
+ const sourceFiles = collectSourceFiles(repoRoot);
506
+
507
+ // Also check for config files at repo root for Biome/ESLint checks
508
+ const configFiles: string[] = [];
509
+ try {
510
+ const rootEntries = readdirSync(repoRoot);
511
+ for (const entry of rootEntries) {
512
+ if (
513
+ entry.startsWith(".eslintrc") ||
514
+ entry.startsWith("eslint.config") ||
515
+ entry.startsWith(".prettierrc") ||
516
+ entry.startsWith("prettier.config")
517
+ ) {
518
+ configFiles.push(join(repoRoot, entry));
519
+ }
520
+ }
521
+ } catch {
522
+ // Root dir read failure — skip
523
+ }
524
+
525
+ // Check config file names against constraints
526
+ for (const configFile of configFiles) {
527
+ const fileName = relative(repoRoot, configFile);
528
+ for (const { decisionId, constraint } of activeConstraints) {
529
+ for (const violation of constraint.violations) {
530
+ if (violation.pattern.test(fileName)) {
531
+ findings.push({
532
+ check: "decision_violation",
533
+ severity: "error",
534
+ article: decisionId,
535
+ message: `Decision violation: ADR "${decisionId}" requires ${constraint.keyword}, but "${fileName}" ${violation.description}`,
536
+ source: configFile,
537
+ });
538
+ }
539
+ }
540
+ }
541
+ }
542
+
543
+ // Check source file contents
544
+ for (const filePath of sourceFiles) {
545
+ try {
546
+ const content = readFileSync(filePath, "utf-8");
547
+ const relPath = relative(repoRoot, filePath);
548
+
549
+ for (const { decisionId, constraint } of activeConstraints) {
550
+ for (const violation of constraint.violations) {
551
+ if (violation.pattern.test(content)) {
552
+ findings.push({
553
+ check: "decision_violation",
554
+ severity: "error",
555
+ article: decisionId,
556
+ message: `Decision violation: ADR "${decisionId}" requires ${constraint.keyword}, but "${relPath}" ${violation.description}`,
557
+ source: filePath,
558
+ });
559
+ // One finding per file per constraint is enough
560
+ break;
561
+ }
562
+ }
563
+ }
564
+ } catch {
565
+ // File read error — skip
566
+ }
567
+ }
568
+
569
+ return findings;
570
+ }
571
+
572
+ // ─── Check 8: Missing Rationale ─────────────────────────────────────────
573
+
574
+ /**
575
+ * Count commits for a file using git log.
576
+ * Uses Bun.spawnSync for synchronous operation.
577
+ * Returns 0 if git is unavailable or fails.
578
+ */
579
+ function countFileCommits(filePath: string, repoRoot: string): number {
580
+ try {
581
+ const proc = Bun.spawnSync(
582
+ ["git", "log", "--oneline", "--follow", "--", filePath],
583
+ { cwd: repoRoot, stdout: "pipe", stderr: "pipe" },
584
+ );
585
+ if (proc.exitCode !== 0) return 0;
586
+ const output = new TextDecoder().decode(proc.stdout);
587
+ return output.split("\n").filter((line) => line.trim().length > 0).length;
588
+ } catch {
589
+ return 0;
590
+ }
591
+ }
592
+
593
+ /**
594
+ * Check 8: Missing Rationale — flag high-activity files without an ADR.
595
+ *
596
+ * Deterministic: counts commits per tracked source file via git log,
597
+ * then checks if any decision mentions that file path.
598
+ */
599
+ function checkMissingRationale(
600
+ wikiDir: string,
601
+ adrDir: string,
602
+ repoRoot: string,
603
+ ): WikiLintFinding[] {
604
+ const findings: WikiLintFinding[] = [];
605
+
606
+ // Load wiki state to get tracked files
607
+ const state = loadState(wikiDir);
608
+ if (!state) return [];
609
+
610
+ const trackedFiles = Object.keys(state.fileHashes);
611
+ if (trackedFiles.length === 0) return [];
612
+
613
+ // Load decisions and collect all mentioned entity paths
614
+ const mentionedPaths = new Set<string>();
615
+ if (existsSync(adrDir)) {
616
+ const decisionResult = extractDecisions(adrDir);
617
+ if (decisionResult.ok) {
618
+ for (const decision of decisionResult.value) {
619
+ for (const mention of decision.entityMentions) {
620
+ mentionedPaths.add(mention);
621
+ }
622
+ }
623
+ }
624
+ }
625
+
626
+ const COMMIT_THRESHOLD = 5;
627
+
628
+ for (const file of trackedFiles) {
629
+ // Check if any decision mentions this file path
630
+ const hasMention =
631
+ mentionedPaths.has(file) ||
632
+ [...mentionedPaths].some((m) => file.includes(m) || m.includes(file));
633
+
634
+ if (hasMention) continue;
635
+
636
+ const commitCount = countFileCommits(file, repoRoot);
637
+ if (commitCount >= COMMIT_THRESHOLD) {
638
+ findings.push({
639
+ check: "missing_rationale",
640
+ severity: "info",
641
+ article: file,
642
+ message: `Missing rationale: "${file}" changed in ${commitCount} commits, no architecture decision recorded`,
643
+ source: join(repoRoot, file),
644
+ });
645
+ }
646
+ }
647
+
648
+ return findings;
649
+ }
650
+
651
+ // ─── Check 9: Contradiction Detection ───────────────────────────────────
652
+
653
+ /**
654
+ * Check 9: Contradictions — detect wiki articles that contradict code.
655
+ *
656
+ * Deterministic:
657
+ * - Entity articles: check if the entity still exists at file:line
658
+ * - Module articles: check if listed entities still exist
659
+ * - Feature articles: check if task status matches tasks.md
660
+ */
661
+ function checkContradictions(
662
+ wikiDir: string,
663
+ repoRoot: string,
664
+ featuresDir: string,
665
+ ): WikiLintFinding[] {
666
+ const findings: WikiLintFinding[] = [];
667
+ const articlePaths = collectMarkdownFiles(wikiDir);
668
+
669
+ for (const articlePath of articlePaths) {
670
+ const type = getArticleTypeFromPath(articlePath, wikiDir);
671
+
672
+ try {
673
+ const content = readFileSync(articlePath, "utf-8");
674
+ const rel = relative(wikiDir, articlePath);
675
+
676
+ if (type === "entity") {
677
+ // Check entity location references: "<!-- source: path/to/file.ts:42 -->"
678
+ const sourceLinePattern = /<!--\s*source:\s*([^:]+):(\d+)\s*-->/g;
679
+ let match: RegExpExecArray | null = null;
680
+ match = sourceLinePattern.exec(content);
681
+ while (match !== null) {
682
+ const sourcePath = match[1]?.trim() ?? "";
683
+ const lineNum = Number.parseInt(match[2] ?? "0", 10);
684
+ const fullPath = join(repoRoot, sourcePath);
685
+
686
+ if (!existsSync(fullPath)) {
687
+ findings.push({
688
+ check: "contradiction",
689
+ severity: "warning",
690
+ article: rel,
691
+ message: `Contradiction: entity article references "${sourcePath}" which no longer exists`,
692
+ source: articlePath,
693
+ });
694
+ } else if (lineNum > 0) {
695
+ // Check if the file has enough lines
696
+ try {
697
+ const sourceContent = readFileSync(fullPath, "utf-8");
698
+ const totalLines = sourceContent.split("\n").length;
699
+ if (lineNum > totalLines) {
700
+ findings.push({
701
+ check: "contradiction",
702
+ severity: "warning",
703
+ article: rel,
704
+ message: `Contradiction: entity article references "${sourcePath}:${lineNum}" but file only has ${totalLines} lines`,
705
+ source: articlePath,
706
+ });
707
+ }
708
+ } catch {
709
+ // File read error — skip
710
+ }
711
+ }
712
+ match = sourceLinePattern.exec(content);
713
+ }
714
+ }
715
+
716
+ if (type === "module") {
717
+ // Check entity list references: "- [[entity:foo]]" or "<!-- entity: path -->"
718
+ const entityRefPattern = /<!--\s*entity:\s*(.+?)\s*-->/g;
719
+ let match: RegExpExecArray | null = null;
720
+ match = entityRefPattern.exec(content);
721
+ while (match !== null) {
722
+ const entityPath = match[1]?.trim() ?? "";
723
+ if (entityPath && !existsSync(join(repoRoot, entityPath))) {
724
+ findings.push({
725
+ check: "contradiction",
726
+ severity: "warning",
727
+ article: rel,
728
+ message: `Contradiction: module article lists entity "${entityPath}" which no longer exists`,
729
+ source: articlePath,
730
+ });
731
+ }
732
+ match = entityRefPattern.exec(content);
733
+ }
734
+ }
735
+
736
+ if (type === "feature") {
737
+ // Check if feature tasks.md has different completion status
738
+ checkFeatureTaskContradiction(
739
+ content,
740
+ rel,
741
+ articlePath,
742
+ featuresDir,
743
+ findings,
744
+ );
745
+ }
746
+ } catch {
747
+ // File read error — skip
748
+ }
749
+ }
750
+
751
+ return findings;
752
+ }
753
+
754
+ /**
755
+ * Compare task completion status in wiki feature article vs tasks.md.
756
+ */
757
+ function checkFeatureTaskContradiction(
758
+ articleContent: string,
759
+ articleRel: string,
760
+ articlePath: string,
761
+ featuresDir: string,
762
+ findings: WikiLintFinding[],
763
+ ): void {
764
+ // Extract feature ID from article — look for "<!-- feature: 001-foo -->"
765
+ const featureIdMatch = articleContent.match(/<!--\s*feature:\s*(\S+)\s*-->/);
766
+ if (!featureIdMatch?.[1]) return;
767
+
768
+ const featureId = featureIdMatch[1];
769
+ const featureDir = join(featuresDir, featureId);
770
+ if (!existsSync(featureDir)) return;
771
+
772
+ const featureResult = extractFeatures(featuresDir);
773
+ if (!featureResult.ok) return;
774
+
775
+ const feature = featureResult.value.find((f) => f.id === featureId);
776
+ if (!feature) return;
777
+
778
+ // Extract task completion from article content
779
+ const articleTasks = new Map<string, boolean>();
780
+ const taskPattern = /(?:- \[([ xX])\]\s+(T\d+))/g;
781
+ let match: RegExpExecArray | null = null;
782
+ match = taskPattern.exec(articleContent);
783
+ while (match !== null) {
784
+ const completed = match[1] !== " ";
785
+ const taskId = match[2] ?? "";
786
+ if (taskId) {
787
+ articleTasks.set(taskId, completed);
788
+ }
789
+ match = taskPattern.exec(articleContent);
790
+ }
791
+
792
+ // Compare with actual tasks
793
+ for (const task of feature.tasks) {
794
+ const articleCompleted = articleTasks.get(task.id);
795
+ if (articleCompleted !== undefined && articleCompleted !== task.completed) {
796
+ findings.push({
797
+ check: "contradiction",
798
+ severity: "warning",
799
+ article: articleRel,
800
+ message: `Contradiction: wiki says task ${task.id} is ${articleCompleted ? "completed" : "incomplete"} but tasks.md says ${task.completed ? "completed" : "incomplete"}`,
801
+ source: articlePath,
802
+ });
803
+ }
804
+ }
805
+ }
806
+
807
+ // ─── Main ────────────────────────────────────────────────────────────────
808
+
809
+ /**
810
+ * Run wiki lint checks on a wiki directory.
811
+ *
812
+ * Auto-skips gracefully if .maina/wiki/ doesn't exist — returns empty result.
813
+ */
814
+ export function runWikiLint(options: WikiLintOptions): WikiLintResult {
815
+ const { wikiDir, repoRoot } = options;
816
+ const featuresDir =
817
+ options.featuresDir ?? join(repoRoot, ".maina", "features");
818
+ const adrDir = options.adrDir ?? join(repoRoot, "adr");
819
+
820
+ // Auto-skip: wiki directory doesn't exist
821
+ if (!existsSync(wikiDir)) {
822
+ return emptyResult();
823
+ }
824
+
825
+ // Collect all article .md files
826
+ const articlePaths = collectMarkdownFiles(wikiDir).filter(
827
+ (p) => !p.endsWith(".state.json") && !p.endsWith(".signals.json"),
828
+ );
829
+
830
+ // Run original checks (1-5)
831
+ const stale = checkStale(articlePaths, wikiDir, repoRoot);
832
+ const gaps = checkMissing(articlePaths);
833
+ const orphans = checkOrphans(articlePaths, repoRoot);
834
+ const brokenLinks = checkBrokenLinks(articlePaths, wikiDir);
835
+ const { coveragePercent, finding: coverageFinding } = calculateCoverage(
836
+ wikiDir,
837
+ repoRoot,
838
+ );
839
+
840
+ // Add coverage finding to gaps if present
841
+ const allGaps = [...gaps];
842
+ if (coverageFinding) {
843
+ allGaps.push(coverageFinding);
844
+ }
845
+
846
+ // Run advanced checks (6-9)
847
+ const specDrift = checkSpecDrift(featuresDir, repoRoot);
848
+ const decisionViolations = checkDecisionViolations(adrDir, repoRoot);
849
+ const missingRationale = checkMissingRationale(wikiDir, adrDir, repoRoot);
850
+ const contradictions = checkContradictions(wikiDir, repoRoot, featuresDir);
851
+
852
+ return {
853
+ stale,
854
+ orphans,
855
+ gaps: allGaps,
856
+ brokenLinks,
857
+ contradictions,
858
+ specDrift,
859
+ decisionViolations,
860
+ missingRationale,
861
+ coveragePercent,
862
+ };
863
+ }
864
+
865
+ // ─── Pipeline Integration ────────────────────────────────────────────────
866
+
867
+ /**
868
+ * Convert WikiLintResult to Finding[] for verify pipeline integration.
869
+ * Maps each WikiLintFinding into the standard Finding shape used by
870
+ * the diff-only filter and pipeline reporting.
871
+ */
872
+ export function wikiLintToFindings(result: WikiLintResult): Finding[] {
873
+ const findings: Finding[] = [];
874
+
875
+ const allWikiFindings: WikiLintFinding[] = [
876
+ ...result.stale,
877
+ ...result.orphans,
878
+ ...result.gaps,
879
+ ...result.brokenLinks,
880
+ ...result.contradictions,
881
+ ...result.specDrift,
882
+ ...result.decisionViolations,
883
+ ...result.missingRationale,
884
+ ];
885
+
886
+ for (const wf of allWikiFindings) {
887
+ findings.push({
888
+ tool: "wiki-lint",
889
+ file: wf.source ?? wf.article,
890
+ line: 0,
891
+ message: wf.message,
892
+ severity: wf.severity,
893
+ ruleId: `wiki/${wf.check}`,
894
+ });
895
+ }
896
+
897
+ return findings;
898
+ }