@toolbaux/guardian 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +366 -0
  3. package/dist/adapters/csharp-adapter.js +149 -0
  4. package/dist/adapters/go-adapter.js +96 -0
  5. package/dist/adapters/index.js +16 -0
  6. package/dist/adapters/java-adapter.js +122 -0
  7. package/dist/adapters/python-adapter.js +183 -0
  8. package/dist/adapters/runner.js +69 -0
  9. package/dist/adapters/types.js +1 -0
  10. package/dist/adapters/typescript-adapter.js +179 -0
  11. package/dist/benchmarking/framework.js +91 -0
  12. package/dist/cli.js +343 -0
  13. package/dist/commands/analyze-depth.js +43 -0
  14. package/dist/commands/api-spec-extractor.js +52 -0
  15. package/dist/commands/breaking-change-analyzer.js +334 -0
  16. package/dist/commands/config-compliance.js +219 -0
  17. package/dist/commands/constraints.js +221 -0
  18. package/dist/commands/context.js +101 -0
  19. package/dist/commands/data-flow-tracer.js +291 -0
  20. package/dist/commands/dependency-impact-analyzer.js +27 -0
  21. package/dist/commands/diff.js +146 -0
  22. package/dist/commands/discrepancy.js +71 -0
  23. package/dist/commands/doc-generate.js +163 -0
  24. package/dist/commands/doc-html.js +120 -0
  25. package/dist/commands/drift.js +88 -0
  26. package/dist/commands/extract.js +16 -0
  27. package/dist/commands/feature-context.js +116 -0
  28. package/dist/commands/generate.js +339 -0
  29. package/dist/commands/guard.js +182 -0
  30. package/dist/commands/init.js +209 -0
  31. package/dist/commands/intel.js +20 -0
  32. package/dist/commands/license-dependency-auditor.js +33 -0
  33. package/dist/commands/performance-hotspot-profiler.js +42 -0
  34. package/dist/commands/search.js +314 -0
  35. package/dist/commands/security-boundary-auditor.js +359 -0
  36. package/dist/commands/simulate.js +294 -0
  37. package/dist/commands/summary.js +27 -0
  38. package/dist/commands/test-coverage-mapper.js +264 -0
  39. package/dist/commands/verify-drift.js +62 -0
  40. package/dist/config.js +441 -0
  41. package/dist/extract/ai-context-hints.js +107 -0
  42. package/dist/extract/analyzers/backend.js +1704 -0
  43. package/dist/extract/analyzers/depth.js +264 -0
  44. package/dist/extract/analyzers/frontend.js +2221 -0
  45. package/dist/extract/api-usage-tracker.js +19 -0
  46. package/dist/extract/cache.js +53 -0
  47. package/dist/extract/codebase-intel.js +190 -0
  48. package/dist/extract/compress.js +452 -0
  49. package/dist/extract/context-block.js +356 -0
  50. package/dist/extract/contracts.js +183 -0
  51. package/dist/extract/discrepancies.js +233 -0
  52. package/dist/extract/docs-loader.js +110 -0
  53. package/dist/extract/docs.js +2379 -0
  54. package/dist/extract/drift.js +1578 -0
  55. package/dist/extract/duplicates.js +435 -0
  56. package/dist/extract/feature-arcs.js +138 -0
  57. package/dist/extract/graph.js +76 -0
  58. package/dist/extract/html-doc.js +1409 -0
  59. package/dist/extract/ignore.js +45 -0
  60. package/dist/extract/index.js +455 -0
  61. package/dist/extract/llm-client.js +159 -0
  62. package/dist/extract/pattern-registry.js +141 -0
  63. package/dist/extract/product-doc.js +497 -0
  64. package/dist/extract/python.js +1202 -0
  65. package/dist/extract/runtime.js +193 -0
  66. package/dist/extract/schema-evolution-validator.js +35 -0
  67. package/dist/extract/test-gap-analyzer.js +20 -0
  68. package/dist/extract/tests.js +74 -0
  69. package/dist/extract/types.js +1 -0
  70. package/dist/extract/validate-backend.js +30 -0
  71. package/dist/extract/writer.js +11 -0
  72. package/dist/output-layout.js +37 -0
  73. package/dist/project-discovery.js +309 -0
  74. package/dist/schema/architecture.js +350 -0
  75. package/dist/schema/feature-spec.js +89 -0
  76. package/dist/schema/index.js +8 -0
  77. package/dist/schema/ux.js +46 -0
  78. package/package.json +75 -0
@@ -0,0 +1,146 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import yaml from "js-yaml";
4
+ export async function runDiff(options) {
5
+ const { baselinePath, currentPath, output } = options;
6
+ let baselineRaw;
7
+ let currentRaw;
8
+ try {
9
+ baselineRaw = await fs.readFile(baselinePath, "utf8");
10
+ }
11
+ catch (err) {
12
+ console.error(`Failed to read baseline snapshot at ${baselinePath}`);
13
+ return;
14
+ }
15
+ try {
16
+ currentRaw = await fs.readFile(currentPath, "utf8");
17
+ }
18
+ catch (err) {
19
+ console.error(`Failed to read current snapshot at ${currentPath}`);
20
+ return;
21
+ }
22
+ const baseline = yaml.load(baselineRaw);
23
+ const current = yaml.load(currentRaw);
24
+ const diffResult = computeDiff(baseline, current);
25
+ const markdownRaw = generateDiffMarkdown(diffResult);
26
+ await fs.mkdir(path.dirname(output), { recursive: true });
27
+ await fs.writeFile(output, markdownRaw, "utf8");
28
+ console.log(`Wrote diff markdown to ${output}`);
29
+ }
30
+ function computeDiff(baseline, current) {
31
+ const baselineEndpoints = new Map(baseline.endpoints.map(e => [e.id, e]));
32
+ const currentEndpoints = new Map(current.endpoints.map(e => [e.id, e]));
33
+ const endpointAdded = [];
34
+ const endpointRemoved = [];
35
+ const endpointChanged = [];
36
+ for (const [id, e] of currentEndpoints.entries()) {
37
+ if (!baselineEndpoints.has(id)) {
38
+ endpointAdded.push(id);
39
+ }
40
+ else {
41
+ const bE = baselineEndpoints.get(id);
42
+ if (bE.path !== e.path || bE.method !== e.method || bE.handler !== e.handler) {
43
+ endpointChanged.push(id);
44
+ }
45
+ }
46
+ }
47
+ for (const id of baselineEndpoints.keys()) {
48
+ if (!currentEndpoints.has(id)) {
49
+ endpointRemoved.push(id);
50
+ }
51
+ }
52
+ const baselineModels = new Map(baseline.data_models?.map(m => [m.name, m]) ?? []);
53
+ const currentModels = new Map(current.data_models?.map(m => [m.name, m]) ?? []);
54
+ const modelsAdded = [];
55
+ const modelsRemoved = [];
56
+ const modelsChanged = [];
57
+ for (const [name, m] of currentModels.entries()) {
58
+ if (!baselineModels.has(name)) {
59
+ modelsAdded.push(name);
60
+ }
61
+ else {
62
+ const bM = baselineModels.get(name);
63
+ if (bM.fields.length !== m.fields.length || bM.relationships.length !== m.relationships.length) {
64
+ modelsChanged.push(name);
65
+ }
66
+ }
67
+ }
68
+ for (const name of baselineModels.keys()) {
69
+ if (!currentModels.has(name)) {
70
+ modelsRemoved.push(name);
71
+ }
72
+ }
73
+ // To handle components we need to look at UxSnapshot mostly, but let's see if we can use frontend_files/pages
74
+ const baselinePages = new Set((baseline.frontend?.pages ?? []).map(p => p.path));
75
+ const currentPages = new Set((current.frontend?.pages ?? []).map(p => p.path));
76
+ const componentsAdded = [];
77
+ const componentsRemoved = [];
78
+ for (const p of currentPages) {
79
+ if (!baselinePages.has(p))
80
+ componentsAdded.push(p);
81
+ }
82
+ for (const p of baselinePages) {
83
+ if (!currentPages.has(p))
84
+ componentsRemoved.push(p);
85
+ }
86
+ return {
87
+ endpoints: { added: endpointAdded, removed: endpointRemoved, changed: endpointChanged },
88
+ models: { added: modelsAdded, removed: modelsRemoved, changed: modelsChanged },
89
+ components: { added: componentsAdded, removed: componentsRemoved }
90
+ };
91
+ }
92
+ function generateDiffMarkdown(diff) {
93
+ const lines = [];
94
+ lines.push("# Architecture Snapshot Changelog");
95
+ lines.push("");
96
+ lines.push(`**${diff.endpoints.added.length}** endpoints added, **${diff.models.changed.length}** models changed, **${diff.components.removed.length}** components/pages removed.`);
97
+ lines.push("");
98
+ lines.push("## Endpoints");
99
+ if (diff.endpoints.added.length > 0) {
100
+ lines.push("### Added");
101
+ diff.endpoints.added.forEach(e => lines.push(`- ${e}`));
102
+ }
103
+ if (diff.endpoints.removed.length > 0) {
104
+ lines.push("### Removed");
105
+ diff.endpoints.removed.forEach(e => lines.push(`- ${e}`));
106
+ }
107
+ if (diff.endpoints.changed.length > 0) {
108
+ lines.push("### Changed");
109
+ diff.endpoints.changed.forEach(e => lines.push(`- ${e}`));
110
+ }
111
+ if (diff.endpoints.added.length === 0 && diff.endpoints.removed.length === 0 && diff.endpoints.changed.length === 0) {
112
+ lines.push("*No changes*");
113
+ }
114
+ lines.push("");
115
+ lines.push("## Data Models");
116
+ if (diff.models.added.length > 0) {
117
+ lines.push("### Added");
118
+ diff.models.added.forEach(e => lines.push(`- ${e}`));
119
+ }
120
+ if (diff.models.removed.length > 0) {
121
+ lines.push("### Removed");
122
+ diff.models.removed.forEach(e => lines.push(`- ${e}`));
123
+ }
124
+ if (diff.models.changed.length > 0) {
125
+ lines.push("### Changed");
126
+ diff.models.changed.forEach(e => lines.push(`- ${e}`));
127
+ }
128
+ if (diff.models.added.length === 0 && diff.models.removed.length === 0 && diff.models.changed.length === 0) {
129
+ lines.push("*No changes*");
130
+ }
131
+ lines.push("");
132
+ lines.push("## Frontend Pages (Components)");
133
+ if (diff.components.added.length > 0) {
134
+ lines.push("### Added");
135
+ diff.components.added.forEach(e => lines.push(`- ${e}`));
136
+ }
137
+ if (diff.components.removed.length > 0) {
138
+ lines.push("### Removed");
139
+ diff.components.removed.forEach(e => lines.push(`- ${e}`));
140
+ }
141
+ if (diff.components.added.length === 0 && diff.components.removed.length === 0) {
142
+ lines.push("*No changes*");
143
+ }
144
+ lines.push("");
145
+ return lines.join("\n");
146
+ }
@@ -0,0 +1,71 @@
1
+ /**
2
+ * `specguard discrepancy` — diff current codebase intelligence against a baseline.
3
+ *
4
+ * Reads:
5
+ * - specs-out/machine/codebase-intelligence.json
6
+ * - specs-out/machine/product-document.baseline.json (optional)
7
+ * - feature-specs/*.yaml (optional)
8
+ *
9
+ * Writes:
10
+ * - specs-out/machine/discrepancies.json (--format json, default)
11
+ * - specs-out/human/discrepancies.md (--format md)
12
+ * Both if format is omitted.
13
+ */
14
+ import fs from "node:fs/promises";
15
+ import path from "node:path";
16
+ import { buildDiscrepancyReport, renderDiscrepancyMarkdown, } from "../extract/discrepancies.js";
17
+ import { loadCodebaseIntelligence } from "../extract/codebase-intel.js";
18
+ import { getOutputLayout } from "../output-layout.js";
19
+ export async function runDiscrepancy(options) {
20
+ const specsDir = path.resolve(options.specs);
21
+ const layout = getOutputLayout(specsDir);
22
+ // Load codebase intelligence
23
+ const intelPath = path.join(layout.machineDir, "codebase-intelligence.json");
24
+ const intel = await loadCodebaseIntelligence(intelPath).catch(() => {
25
+ throw new Error(`Could not load codebase-intelligence.json from ${intelPath}. Run \`specguard intel --specs ${options.specs}\` first.`);
26
+ });
27
+ const baselinePath = path.join(layout.machineDir, "product-document.baseline.json");
28
+ const featureSpecsDir = options.featureSpecs ? path.resolve(options.featureSpecs) : null;
29
+ const report = await buildDiscrepancyReport({
30
+ intel,
31
+ baselinePath,
32
+ featureSpecsDir,
33
+ });
34
+ const format = options.format ?? "json";
35
+ const writeJson = format === "json" || format === "both";
36
+ const writeMd = format === "md" || format === "both";
37
+ if (writeJson) {
38
+ const jsonPath = options.output
39
+ ? path.resolve(options.output)
40
+ : path.join(layout.machineDir, "discrepancies.json");
41
+ await fs.mkdir(path.dirname(jsonPath), { recursive: true });
42
+ await fs.writeFile(jsonPath, JSON.stringify(report, null, 2), "utf8");
43
+ console.log(`Wrote ${jsonPath}`);
44
+ }
45
+ if (writeMd) {
46
+ const mdPath = options.output && format === "md"
47
+ ? path.resolve(options.output)
48
+ : path.join(layout.humanDir, "discrepancies.md");
49
+ const md = renderDiscrepancyMarkdown(report);
50
+ await fs.mkdir(path.dirname(mdPath), { recursive: true });
51
+ await fs.writeFile(mdPath, md, "utf8");
52
+ console.log(`Wrote ${mdPath}`);
53
+ }
54
+ // Exit summary
55
+ const { total_issues, has_critical } = report.summary;
56
+ if (total_issues === 0) {
57
+ console.log("✓ No discrepancies found.");
58
+ }
59
+ else {
60
+ console.log(`${has_critical ? "⚠ " : ""}${total_issues} discrepancy(s): ` +
61
+ [
62
+ report.new_endpoints.length > 0 && `${report.new_endpoints.length} new endpoint(s)`,
63
+ report.removed_endpoints.length > 0 && `${report.removed_endpoints.length} removed endpoint(s)`,
64
+ report.drifted_models.length > 0 && `${report.drifted_models.length} drifted model(s)`,
65
+ report.orphan_specs.length > 0 && `${report.orphan_specs.length} orphan spec(s)`,
66
+ report.untracked_endpoints.length > 0 && `${report.untracked_endpoints.length} untracked endpoint(s)`,
67
+ ]
68
+ .filter(Boolean)
69
+ .join(", "));
70
+ }
71
+ }
@@ -0,0 +1,163 @@
1
+ /**
2
+ * `specguard doc-generate` — generate a human-readable, self-updating product document.
3
+ *
4
+ * Reads:
5
+ * - specs-out/machine/codebase-intelligence.json
6
+ * - feature-specs/*.yaml (optional)
7
+ * - specs-out/machine/feature-arcs.json (optional, if exists)
8
+ * - specs-out/machine/product-document.baseline.json (optional, for discrepancy section)
9
+ *
10
+ * Writes:
11
+ * - specs-out/human/product-document.md
12
+ * - specs-out/machine/product-document.baseline.json (if --update-baseline)
13
+ *
14
+ * LLM env vars (optional — all deterministic sections write regardless):
15
+ * SPECGUARD_LLM_ENDPOINT, SPECGUARD_LLM_API_KEY, SPECGUARD_LLM_MODEL
16
+ * SPECGUARD_OLLAMA_HOST, SPECGUARD_OLLAMA_MODEL
17
+ */
18
+ import fs from "node:fs/promises";
19
+ import path from "node:path";
20
+ import { loadCodebaseIntelligence } from "../extract/codebase-intel.js";
21
+ import { buildFeatureArcs } from "../extract/feature-arcs.js";
22
+ import { buildDiscrepancyReport, buildBaseline, } from "../extract/discrepancies.js";
23
+ import { renderProductDocument } from "../extract/product-doc.js";
24
+ import { loadExistingDocs } from "../extract/docs-loader.js";
25
+ import { loadLlmConfig } from "../extract/llm-client.js";
26
+ import { getOutputLayout } from "../output-layout.js";
27
+ export async function runDocGenerate(options) {
28
+ const specsDir = path.resolve(options.specs);
29
+ const layout = getOutputLayout(specsDir);
30
+ // ── Step 1: LLM config resolution ─────────────────────────────────────────
31
+ process.stdout.write("Resolving LLM config... ");
32
+ const llmConfig = await loadLlmConfig();
33
+ if (!llmConfig) {
34
+ console.log("none (deterministic only)");
35
+ console.log(" Tip: set SPECGUARD_LLM_ENDPOINT + SPECGUARD_LLM_API_KEY, or run Ollama locally, to add narrative summaries.");
36
+ }
37
+ else if (llmConfig.provider === "ollama") {
38
+ console.log(`Ollama (${llmConfig.model} at ${llmConfig.endpoint.replace("/api/chat", "")})`);
39
+ }
40
+ else {
41
+ console.log(`${llmConfig.provider} (${llmConfig.model})`);
42
+ console.log(" ⚠ Cloud LLM enabled — this will consume API tokens (one call per section: overview, API domains, each model). Use Ollama to avoid costs.");
43
+ }
44
+ // ── Step 2: Load codebase intelligence ────────────────────────────────────
45
+ const intelPath = path.join(layout.machineDir, "codebase-intelligence.json");
46
+ process.stdout.write("Loading codebase intelligence... ");
47
+ const intel = await loadCodebaseIntelligence(intelPath).catch(() => {
48
+ console.log("failed");
49
+ throw new Error(`Could not load ${intelPath}. Run \`specguard intel --specs ${options.specs}\` first.`);
50
+ });
51
+ console.log(`${intel.meta.counts.endpoints} endpoints, ${intel.meta.counts.models} models, ` +
52
+ `${intel.meta.counts.enums} enums, ${intel.meta.counts.tasks} tasks`);
53
+ // ── Step 3: Feature arcs (optional) ───────────────────────────────────────
54
+ const arcsPath = path.join(layout.machineDir, "feature-arcs.json");
55
+ let featureArcs = null;
56
+ if (options.featureSpecs) {
57
+ const featureSpecsDir = path.resolve(options.featureSpecs);
58
+ process.stdout.write(`Building feature arcs from ${options.featureSpecs}... `);
59
+ featureArcs = await buildFeatureArcs(featureSpecsDir);
60
+ const arcCount = Object.keys(featureArcs.arcs).length;
61
+ console.log(`${arcCount} arc(s)`);
62
+ await fs.writeFile(arcsPath, JSON.stringify(featureArcs, null, 2), "utf8");
63
+ console.log(` Wrote ${arcsPath}`);
64
+ }
65
+ else {
66
+ try {
67
+ const raw = await fs.readFile(arcsPath, "utf8");
68
+ featureArcs = JSON.parse(raw);
69
+ const arcCount = Object.keys(featureArcs.arcs).length;
70
+ console.log(`Feature arcs: loaded from cache (${arcCount} arc(s))`);
71
+ }
72
+ catch {
73
+ // No arcs — skip feature timeline section silently
74
+ }
75
+ }
76
+ // ── Step 4: Discrepancy report ─────────────────────────────────────────────
77
+ const baselinePath = path.join(layout.machineDir, "product-document.baseline.json");
78
+ process.stdout.write("Computing discrepancies... ");
79
+ const discrepancies = await buildDiscrepancyReport({
80
+ intel,
81
+ baselinePath,
82
+ featureSpecsDir: options.featureSpecs ? path.resolve(options.featureSpecs) : null,
83
+ });
84
+ if (discrepancies.summary.total_issues === 0) {
85
+ console.log("none (in sync)");
86
+ }
87
+ else {
88
+ const critical = discrepancies.summary.has_critical ? " ⚠ critical" : "";
89
+ console.log(`${discrepancies.summary.total_issues} issue(s)${critical}`);
90
+ }
91
+ // ── Step 5: Load existing docs (hld.md, summary.md, integration.md, etc.) ─
92
+ process.stdout.write("Loading existing docs... ");
93
+ const existingDocs = await loadExistingDocs(layout.machineDocsDir);
94
+ const loadedKeys = Object.entries(existingDocs).filter(([, v]) => v != null).map(([k]) => k);
95
+ console.log(loadedKeys.length > 0 ? loadedKeys.join(", ") : "none");
96
+ // ── Step 5b: Load product context (from config description or README) ─────
97
+ let productContext = null;
98
+ {
99
+ // Try config.project.description first
100
+ // Then try README.md auto-detection
101
+ const readmeCandidates = [
102
+ path.join(path.dirname(specsDir), "README.md"),
103
+ path.join(path.dirname(specsDir), "readme.md"),
104
+ path.join(path.dirname(specsDir), "Readme.md"),
105
+ ];
106
+ for (const candidate of readmeCandidates) {
107
+ try {
108
+ const raw = await fs.readFile(candidate, "utf8");
109
+ // Extract first meaningful content (up to first ## or 800 chars)
110
+ const lines = raw.split("\n");
111
+ const contextLines = [];
112
+ let inHeader = false;
113
+ let sectionCount = 0;
114
+ for (const line of lines) {
115
+ if (line.startsWith("## ") && sectionCount > 0)
116
+ break; // stop at second H2
117
+ if (line.startsWith("## "))
118
+ sectionCount++;
119
+ if (line.startsWith("# ")) {
120
+ inHeader = true;
121
+ continue;
122
+ } // skip H1
123
+ if (inHeader && line.trim() === "") {
124
+ inHeader = false;
125
+ continue;
126
+ }
127
+ contextLines.push(line);
128
+ if (contextLines.join("\n").length > 800)
129
+ break;
130
+ }
131
+ productContext = contextLines.join("\n").trim();
132
+ if (productContext.length > 0) {
133
+ console.log(`Product context: loaded from README.md (${productContext.length} chars)`);
134
+ }
135
+ break;
136
+ }
137
+ catch {
138
+ // Not found, try next
139
+ }
140
+ }
141
+ }
142
+ // ── Step 6: Render product document ───────────────────────────────────────
143
+ console.log("Rendering product document...");
144
+ const content = await renderProductDocument({ intel, featureArcs, discrepancies, llmConfig, existingDocs, productContext });
145
+ // ── Step 7: Write output ───────────────────────────────────────────────────
146
+ const outputPath = options.output
147
+ ? path.resolve(options.output)
148
+ : path.join(layout.humanDir, "product-document.md");
149
+ await fs.mkdir(path.dirname(outputPath), { recursive: true });
150
+ await fs.writeFile(outputPath, content, "utf8");
151
+ console.log(`Wrote ${outputPath}`);
152
+ // ── Step 7: Update baseline (optional) ────────────────────────────────────
153
+ if (options.updateBaseline) {
154
+ const baseline = buildBaseline(intel);
155
+ await fs.writeFile(baselinePath, JSON.stringify(baseline, null, 2), "utf8");
156
+ console.log(`Wrote baseline ${baselinePath}`);
157
+ }
158
+ // ── Done ──────────────────────────────────────────────────────────────────
159
+ if (discrepancies.summary.total_issues > 0) {
160
+ const critical = discrepancies.summary.has_critical ? " (critical changes detected)" : "";
161
+ console.log(` ⚠ ${discrepancies.summary.total_issues} discrepancy(s) found${critical}. Run \`specguard discrepancy\` for details.`);
162
+ }
163
+ }
@@ -0,0 +1,120 @@
1
+ /**
2
+ * `specguard doc-html` — generate a self-contained Javadoc-style HTML viewer.
3
+ *
4
+ * Same data pipeline as `doc-generate` but outputs a single index.html with:
5
+ * - Fixed sidebar navigation (collapsible, searchable)
6
+ * - Mermaid diagrams rendered in-browser
7
+ * - Tables, collapsible sections, scroll-spy active states
8
+ * - No server or build step required — open directly in browser
9
+ */
10
+ import fs from "node:fs/promises";
11
+ import path from "node:path";
12
+ import yaml from "js-yaml";
13
+ import { loadCodebaseIntelligence } from "../extract/codebase-intel.js";
14
+ import { buildDiscrepancyReport } from "../extract/discrepancies.js";
15
+ import { loadExistingDocs } from "../extract/docs-loader.js";
16
+ import { renderHtmlDoc } from "../extract/html-doc.js";
17
+ import { getOutputLayout } from "../output-layout.js";
18
+ export async function runDocHtml(options) {
19
+ const specsDir = path.resolve(options.specs);
20
+ const layout = getOutputLayout(specsDir);
21
+ // ── Load codebase intelligence ────────────────────────────────────────────
22
+ const intelPath = path.join(layout.machineDir, "codebase-intelligence.json");
23
+ process.stdout.write("Loading codebase intelligence... ");
24
+ const intel = await loadCodebaseIntelligence(intelPath).catch(() => {
25
+ console.log("failed");
26
+ throw new Error(`Could not load ${intelPath}. Run \`specguard intel --specs ${options.specs}\` first.`);
27
+ });
28
+ console.log(`${intel.meta.counts.endpoints} endpoints, ${intel.meta.counts.models} models`);
29
+ // ── Feature arcs (optional) ───────────────────────────────────────────────
30
+ const arcsPath = path.join(layout.machineDir, "feature-arcs.json");
31
+ let featureArcs = null;
32
+ try {
33
+ const raw = await fs.readFile(arcsPath, "utf8");
34
+ featureArcs = JSON.parse(raw);
35
+ const arcCount = Object.keys(featureArcs.arcs).length;
36
+ console.log(`Feature arcs: ${arcCount} arc(s)`);
37
+ }
38
+ catch {
39
+ // optional
40
+ }
41
+ // ── Discrepancy report ────────────────────────────────────────────────────
42
+ const baselinePath = path.join(layout.machineDir, "product-document.baseline.json");
43
+ process.stdout.write("Computing discrepancies... ");
44
+ const discrepancies = await buildDiscrepancyReport({
45
+ intel,
46
+ baselinePath,
47
+ featureSpecsDir: null,
48
+ });
49
+ console.log(discrepancies.summary.total_issues === 0
50
+ ? "none"
51
+ : `${discrepancies.summary.total_issues} issue(s)`);
52
+ // ── Load existing docs ────────────────────────────────────────────────────
53
+ process.stdout.write("Loading existing docs... ");
54
+ const existingDocs = await loadExistingDocs(layout.machineDocsDir);
55
+ const loadedKeys = Object.entries(existingDocs).filter(([, v]) => v != null).map(([k]) => k);
56
+ console.log(loadedKeys.length > 0 ? loadedKeys.join(", ") : "none");
57
+ // ── Load UX snapshot (for component graph) ───────────────────────────────
58
+ process.stdout.write("Loading UX snapshot... ");
59
+ let uxSnapshot = null;
60
+ try {
61
+ const uxPath = path.join(layout.machineDir, "ux.snapshot.yaml");
62
+ const raw = await fs.readFile(uxPath, "utf8");
63
+ uxSnapshot = yaml.load(raw);
64
+ console.log(`${uxSnapshot.components?.length ?? 0} components, ${uxSnapshot.component_graph?.length ?? 0} edges`);
65
+ }
66
+ catch {
67
+ console.log("not found");
68
+ }
69
+ // ── Load product context from README ──────────────────────────────────────
70
+ let productContext = null;
71
+ const readmeCandidates = [
72
+ path.join(path.dirname(specsDir), "README.md"),
73
+ path.join(path.dirname(specsDir), "readme.md"),
74
+ ];
75
+ for (const candidate of readmeCandidates) {
76
+ try {
77
+ const raw = await fs.readFile(candidate, "utf8");
78
+ const lines = raw.split("\n");
79
+ const contextLines = [];
80
+ let sectionCount = 0;
81
+ for (const line of lines) {
82
+ if (line.startsWith("## ") && sectionCount > 0)
83
+ break;
84
+ if (line.startsWith("## "))
85
+ sectionCount++;
86
+ if (line.startsWith("# "))
87
+ continue;
88
+ contextLines.push(line);
89
+ if (contextLines.join("\n").length > 1200)
90
+ break;
91
+ }
92
+ productContext = contextLines.join("\n").trim();
93
+ break;
94
+ }
95
+ catch {
96
+ // Not found
97
+ }
98
+ }
99
+ // ── Render HTML (multi-page) ──────────────────────────────────────────────
100
+ console.log("Rendering HTML viewer...");
101
+ const files = renderHtmlDoc({ intel, featureArcs, discrepancies, existingDocs, uxSnapshot, productContext });
102
+ // ── Write output files ────────────────────────────────────────────────────
103
+ const outputDir = options.output
104
+ ? path.resolve(options.output)
105
+ : path.join(layout.humanDir, "docs");
106
+ await fs.mkdir(outputDir, { recursive: true });
107
+ let totalBytes = 0;
108
+ for (const [filename, html] of Object.entries(files)) {
109
+ const filePath = path.join(outputDir, filename);
110
+ await fs.writeFile(filePath, html, "utf8");
111
+ totalBytes += Buffer.byteLength(html, "utf8");
112
+ }
113
+ const totalKb = Math.round(totalBytes / 1024);
114
+ const fileCount = Object.keys(files).length;
115
+ console.log(`Wrote ${fileCount} files to ${outputDir}/ (${totalKb} KB total)`);
116
+ console.log(` Open in browser: open "${path.join(outputDir, "index.html")}"`);
117
+ for (const filename of Object.keys(files)) {
118
+ console.log(` ${filename}`);
119
+ }
120
+ }
@@ -0,0 +1,88 @@
1
+ import fs from "node:fs/promises";
2
+ import path from "node:path";
3
+ import { computeProjectDrift } from "../extract/drift.js";
4
+ import { logResolvedProjectPaths, resolveProjectPaths } from "../project-discovery.js";
5
+ export async function runDrift(options) {
6
+ const resolved = await resolveProjectPaths({
7
+ projectRoot: options.projectRoot,
8
+ backendRoot: options.backendRoot,
9
+ frontendRoot: options.frontendRoot,
10
+ configPath: options.configPath
11
+ });
12
+ logResolvedProjectPaths(resolved);
13
+ const drift = await computeProjectDrift({
14
+ backendRoot: resolved.backendRoot,
15
+ frontendRoot: resolved.frontendRoot,
16
+ configPath: options.configPath
17
+ });
18
+ const outputPath = path.resolve(options.output ?? "specs-out/drift.report.json");
19
+ await fs.mkdir(path.dirname(outputPath), { recursive: true });
20
+ await fs.writeFile(outputPath, JSON.stringify(drift, null, 2));
21
+ if (options.baseline) {
22
+ const config = resolved.config;
23
+ const projectRoot = resolved.workspaceRoot;
24
+ const baselinePath = typeof options.baseline === "string"
25
+ ? options.baseline
26
+ : config.drift?.baselinePath || "specs-out/baseline.json";
27
+ const resolvedBaseline = path.isAbsolute(baselinePath)
28
+ ? baselinePath
29
+ : path.resolve(projectRoot, baselinePath);
30
+ const baselinePayload = {
31
+ created_at: new Date().toISOString(),
32
+ K_t: drift.K_t,
33
+ drift
34
+ };
35
+ await fs.mkdir(path.dirname(resolvedBaseline), { recursive: true });
36
+ await fs.writeFile(resolvedBaseline, JSON.stringify(baselinePayload, null, 2));
37
+ console.log(`Wrote baseline ${resolvedBaseline}`);
38
+ }
39
+ if (options.history) {
40
+ const config = resolved.config;
41
+ const projectRoot = resolved.workspaceRoot;
42
+ const historyPath = typeof options.history === "string"
43
+ ? options.history
44
+ : config.drift?.historyPath || "specs-out/drift.history.jsonl";
45
+ const resolvedHistory = path.isAbsolute(historyPath)
46
+ ? historyPath
47
+ : path.resolve(projectRoot, historyPath);
48
+ const entry = {
49
+ timestamp: new Date().toISOString(),
50
+ graph_level: drift.graph_level,
51
+ D_t: drift.D_t,
52
+ K_t: drift.K_t,
53
+ delta: drift.delta,
54
+ status: drift.status,
55
+ metrics: drift.metrics,
56
+ details: drift.details,
57
+ capacity: drift.capacity,
58
+ growth: drift.growth,
59
+ scales: drift.scales
60
+ };
61
+ await fs.mkdir(path.dirname(resolvedHistory), { recursive: true });
62
+ await fs.appendFile(resolvedHistory, `${JSON.stringify(entry)}\n`);
63
+ console.log(`Appended history ${resolvedHistory}`);
64
+ }
65
+ console.log("SpecGuard Drift Report");
66
+ console.log(`Status: ${drift.status}`);
67
+ console.log(`D_t: ${drift.D_t.toFixed(4)}`);
68
+ console.log(`K_t: ${drift.K_t.toFixed(4)}`);
69
+ console.log(`Delta: ${drift.delta.toFixed(4)}`);
70
+ console.log(`Entropy: ${drift.metrics.entropy.toFixed(4)} | Cross-Layer: ${drift.metrics.cross_layer_ratio.toFixed(4)} | Cycle Density: ${drift.metrics.cycle_density.toFixed(4)} | Modularity Gap: ${drift.metrics.modularity_gap.toFixed(4)}`);
71
+ if (drift.scales.length > 1) {
72
+ console.log("Scale Summary:");
73
+ for (const scale of drift.scales) {
74
+ console.log(`- ${scale.level}: ${scale.status} | Delta ${scale.delta.toFixed(4)} | Edges ${scale.details.edges}`);
75
+ }
76
+ }
77
+ const totalCapacity = drift.capacity.total;
78
+ if (totalCapacity) {
79
+ const ratio = totalCapacity.ratio !== undefined ? totalCapacity.ratio.toFixed(2) : "n/a";
80
+ const budget = totalCapacity.budget ?? 0;
81
+ console.log(`Capacity: ${drift.capacity.status} | Used ${totalCapacity.used} / ${budget || "n/a"} | Ratio ${ratio}`);
82
+ }
83
+ else {
84
+ console.log(`Capacity: ${drift.capacity.status}`);
85
+ }
86
+ console.log(`Growth: ${drift.growth.status} | ${drift.growth.edges_per_day.toFixed(2)} edges/day (${drift.growth.trend})`);
87
+ console.log(`Wrote ${outputPath}`);
88
+ }
@@ -0,0 +1,16 @@
1
+ import path from "node:path";
2
+ import { extractProject } from "../extract/index.js";
3
+ import { runIntel } from "./intel.js";
4
+ export async function runExtract(options) {
5
+ const { architecturePath, uxPath } = await extractProject(options);
6
+ console.log(`Wrote ${architecturePath}`);
7
+ console.log(`Wrote ${uxPath}`);
8
+ // Auto-build codebase intelligence after every extract
9
+ const specsDir = path.resolve(options.output);
10
+ try {
11
+ await runIntel({ specs: specsDir });
12
+ }
13
+ catch {
14
+ // Non-fatal — intel build failure should not break extract
15
+ }
16
+ }