@devinnn/docdrift 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,80 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.ensureDocdriftDir = ensureDocdriftDir;
7
+ exports.createCustomInstructionsFile = createCustomInstructionsFile;
8
+ exports.ensureGitignore = ensureGitignore;
9
+ exports.addGitHubWorkflow = addGitHubWorkflow;
10
+ exports.runOnboarding = runOnboarding;
11
+ const node_fs_1 = __importDefault(require("node:fs"));
12
+ const node_path_1 = __importDefault(require("node:path"));
13
+ const DOCDRIFT_DIR = ".docdrift";
14
+ const CUSTOM_INSTRUCTIONS_FILE = ".docdrift/DocDrift.md";
15
+ const GITIGNORE_BLOCK = `
16
+ # Docdrift run artifacts
17
+ .docdrift/evidence
18
+ .docdrift/*.log
19
+ .docdrift/state.json
20
+ .docdrift/run-output.json
21
+ `;
22
+ const WORKFLOW_CONTENT = "name: docdrift\n\non:\n push:\n branches: [\"main\"]\n pull_request:\n branches: [\"main\"]\n workflow_dispatch:\n\njobs:\n docdrift:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n pull-requests: write\n issues: write\n steps:\n - uses: actions/checkout@v4\n with:\n fetch-depth: 0\n\n - uses: actions/setup-node@v4\n with:\n node-version: \"20\"\n\n - run: npm install\n\n - name: Determine SHAs\n id: shas\n run: |\n if [ \"${{ github.event_name }}\" = \"pull_request\" ]; then\n HEAD_SHA=\"${{ github.event.pull_request.head.sha }}\"\n BASE_SHA=\"${{ github.event.pull_request.base.sha }}\"\n else\n HEAD_SHA=\"${{ github.sha }}\"\n BASE_SHA=\"${{ github.event.before }}\"\n if [ -z \"$BASE_SHA\" ] || [ \"$BASE_SHA\" = \"0000000000000000000000000000000000000000\" ]; then\n BASE_SHA=\"$(git rev-parse HEAD^)\"\n fi\n fi\n echo \"head=${HEAD_SHA}\" >> $GITHUB_OUTPUT\n echo \"base=${BASE_SHA}\" >> $GITHUB_OUTPUT\n echo \"pr_number=${{ github.event.pull_request.number || '' }}\" >> $GITHUB_OUTPUT\n\n - name: Validate config\n run: npx docdrift validate\n\n - name: Run Doc Drift\n env:\n DEVIN_API_KEY: ${{ secrets.DEVIN_API_KEY }}\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n GITHUB_REPOSITORY: ${{ github.repository }}\n GITHUB_SHA: ${{ github.sha }}\n GITHUB_EVENT_NAME: ${{ github.event_name }}\n GITHUB_PR_NUMBER: ${{ steps.shas.outputs.pr_number }}\n run: |\n PR_ARGS=\"\"\n if [ -n \"$GITHUB_PR_NUMBER\" ]; then\n PR_ARGS=\"--trigger pull_request --pr-number $GITHUB_PR_NUMBER\"\n fi\n npx docdrift run --base ${{ steps.shas.outputs.base }} --head ${{ steps.shas.outputs.head }} $PR_ARGS\n\n - name: Upload artifacts\n if: always()\n uses: actions/upload-artifact@v4\n with:\n name: docdrift-artifacts\n path: |\n .docdrift/drift_report.json\n .docdrift/metrics.json\n .docdrift/run-output.json\n .docdrift/evidence/**\n .docdrift/state.json\n";
23
+ function ensureDocdriftDir(cwd) {
24
+ const dir = node_path_1.default.resolve(cwd, DOCDRIFT_DIR);
25
+ node_fs_1.default.mkdirSync(dir, { recursive: true });
26
+ }
27
+ const CUSTOM_INSTRUCTIONS_TEMPLATE = `# DocDrift custom instructions
28
+
29
+ - **PR titles:** Start every pull request title with \`[docdrift]\`.
30
+ - Add project-specific guidance for Devin here (e.g. terminology, tone, what to avoid).
31
+ `;
32
+ function createCustomInstructionsFile(cwd) {
33
+ const filePath = node_path_1.default.resolve(cwd, CUSTOM_INSTRUCTIONS_FILE);
34
+ ensureDocdriftDir(cwd);
35
+ node_fs_1.default.writeFileSync(filePath, CUSTOM_INSTRUCTIONS_TEMPLATE.trimStart(), "utf8");
36
+ }
37
+ const GITIGNORE_ENTRIES = [
38
+ ".docdrift/evidence",
39
+ ".docdrift/*.log",
40
+ ".docdrift/state.json",
41
+ ".docdrift/run-output.json",
42
+ ];
43
+ function hasGitignoreBlock(content) {
44
+ return GITIGNORE_ENTRIES.every((e) => content.includes(e));
45
+ }
46
+ function ensureGitignore(cwd) {
47
+ const gitignorePath = node_path_1.default.resolve(cwd, ".gitignore");
48
+ let content = "";
49
+ if (node_fs_1.default.existsSync(gitignorePath)) {
50
+ content = node_fs_1.default.readFileSync(gitignorePath, "utf8");
51
+ if (hasGitignoreBlock(content))
52
+ return;
53
+ }
54
+ const toAppend = content.endsWith("\n") ? GITIGNORE_BLOCK.trimStart() : GITIGNORE_BLOCK;
55
+ node_fs_1.default.writeFileSync(gitignorePath, content + toAppend, "utf8");
56
+ }
57
+ function addGitHubWorkflow(cwd) {
58
+ const workflowsDir = node_path_1.default.resolve(cwd, ".github", "workflows");
59
+ node_fs_1.default.mkdirSync(workflowsDir, { recursive: true });
60
+ const workflowPath = node_path_1.default.join(workflowsDir, "docdrift.yml");
61
+ node_fs_1.default.writeFileSync(workflowPath, WORKFLOW_CONTENT, "utf8");
62
+ }
63
+ function runOnboarding(cwd, choices) {
64
+ const created = [];
65
+ ensureDocdriftDir(cwd);
66
+ created.push(".docdrift/");
67
+ if (choices.addCustomInstructions) {
68
+ createCustomInstructionsFile(cwd);
69
+ created.push("DocDrift.md");
70
+ }
71
+ if (choices.addGitignore) {
72
+ ensureGitignore(cwd);
73
+ created.push(".gitignore");
74
+ }
75
+ if (choices.addWorkflow) {
76
+ addGitHubWorkflow(cwd);
77
+ created.push(".github/workflows/docdrift.yml");
78
+ }
79
+ return { created };
80
+ }
@@ -0,0 +1,62 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.SYSTEM_PROMPT = void 0;
4
+ exports.SYSTEM_PROMPT = `You are a docdrift config expert. Given a repo fingerprint (file tree, package.json scripts, and detected paths), infer a partial docdrift.yaml configuration and a list of interactive choices for the user.
5
+
6
+ ## Docdrift config (simple mode)
7
+
8
+ Minimal valid config uses: version, openapi, docsite, pathMappings, devin, policy.
9
+
10
+ Example:
11
+ \`\`\`yaml
12
+ version: 1
13
+ openapi:
14
+ export: "npm run openapi:export"
15
+ generated: "openapi/generated.json"
16
+ published: "apps/docs-site/openapi/openapi.json"
17
+ docsite: "apps/docs-site"
18
+ pathMappings:
19
+ - match: "apps/api/**"
20
+ impacts: ["apps/docs-site/docs/**", "apps/docs-site/openapi/**"]
21
+ exclude: ["**/CHANGELOG*", "apps/docs-site/blog/**"]
22
+ requireHumanReview: []
23
+ devin:
24
+ apiVersion: v1
25
+ unlisted: true
26
+ maxAcuLimit: 2
27
+ tags: ["docdrift"]
28
+ policy:
29
+ prCaps: { maxPrsPerDay: 5, maxFilesTouched: 30 }
30
+ confidence: { autopatchThreshold: 0.8 }
31
+ allowlist: ["openapi/**", "apps/**"]
32
+ verification:
33
+ commands: ["npm run docs:gen", "npm run docs:build"]
34
+ slaDays: 7
35
+ slaLabel: docdrift
36
+ allowNewFiles: false
37
+ \`\`\`
38
+
39
+ ## Field rules
40
+
41
+ - openapi.export: Command to generate OpenAPI spec (e.g. "npm run openapi:export"). Prefer an existing script from root or workspace package.json.
42
+ - openapi.generated: Path where the export writes the spec (e.g. "openapi/generated.json").
43
+ - openapi.published: Path where the docsite consumes the spec (often under docsite, e.g. "apps/docs-site/openapi/openapi.json").
44
+ - docsite: Path to the docs site root (Docusaurus, Next.js docs, VitePress, MkDocs). Single string or array of strings.
45
+ - pathMappings: Array of { match, impacts }. match = glob for source/API code; impacts = globs for doc files that may need updates when match changes.
46
+ - policy.verification.commands: Commands to run after patching (e.g. "npm run docs:gen", "npm run docs:build"). Must exist in repo.
47
+ - exclude: Globs to never touch (e.g. blog, CHANGELOG).
48
+ - requireHumanReview: Globs that require human review when touched (e.g. guides).
49
+
50
+ ## Common patterns
51
+
52
+ - Docusaurus: docsite often has docusaurus.config.*; docs:gen may be "docusaurus -- gen-api-docs api"; openapi published path often under docsite/openapi/.
53
+ - Next/VitePress/MkDocs: docsite is the app root; look for docs/ or similar.
54
+
55
+ ## Output rules
56
+
57
+ 1. Infer suggestedConfig from the fingerprint. Only include fields you can confidently infer. Use existing paths and scripts from the fingerprint; do not invent paths that are not present.
58
+ 2. For each field where confidence is medium or low, OR where multiple valid options exist, add an entry to choices with: key (e.g. "openapi.export"), question, options (array of { value, label, recommended? }), defaultIndex, help?, warning?, confidence ("high"|"medium"|"low").
59
+ 3. Add to skipQuestions the keys for which you are highly confident so the CLI will not ask the user.
60
+ 4. Prefer fewer, high-quality choices. If truly uncertain, set confidence to "low" and provide 2–3 options.
61
+ 5. Do not suggest paths that do not exist in the fingerprint. Prefer existing package.json scripts for export and verification commands.
62
+ 6. suggestedConfig must be a valid partial docdrift config; policy.allowlist and policy.verification.commands are required if you include policy. devin.apiVersion must be "v1" if you include devin.`;
@@ -0,0 +1,155 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.buildRepoFingerprint = buildRepoFingerprint;
7
+ exports.fingerprintHash = fingerprintHash;
8
+ const node_crypto_1 = __importDefault(require("node:crypto"));
9
+ const node_fs_1 = __importDefault(require("node:fs"));
10
+ const node_path_1 = __importDefault(require("node:path"));
11
+ const IGNORE_DIRS = new Set(["node_modules", ".git", "dist", "build", "coverage", ".docdrift"]);
12
+ const DOC_HINTS = ["openapi", "swagger", "docusaurus", "mkdocs", "next", "vitepress"];
13
+ const MAX_TREE_DEPTH = 3;
14
+ function walkDir(dir, depth, tree) {
15
+ if (depth > MAX_TREE_DEPTH)
16
+ return;
17
+ let entries;
18
+ try {
19
+ entries = node_fs_1.default.readdirSync(dir, { withFileTypes: true });
20
+ }
21
+ catch {
22
+ return;
23
+ }
24
+ const relDir = node_path_1.default.relative(process.cwd(), dir) || ".";
25
+ const names = [];
26
+ for (const e of entries) {
27
+ if (e.name.startsWith(".") && e.name !== ".env")
28
+ continue;
29
+ if (IGNORE_DIRS.has(e.name))
30
+ continue;
31
+ names.push(e.isDirectory() ? `${e.name}/` : e.name);
32
+ }
33
+ names.sort();
34
+ tree[relDir] = names;
35
+ for (const e of entries) {
36
+ if (!e.isDirectory() || IGNORE_DIRS.has(e.name))
37
+ continue;
38
+ walkDir(node_path_1.default.join(dir, e.name), depth + 1, tree);
39
+ }
40
+ }
41
+ function findMatchingFiles(cwd, test) {
42
+ const out = [];
43
+ function walk(dir, depth) {
44
+ if (depth > 5)
45
+ return;
46
+ let entries;
47
+ try {
48
+ entries = node_fs_1.default.readdirSync(dir, { withFileTypes: true });
49
+ }
50
+ catch {
51
+ return;
52
+ }
53
+ for (const e of entries) {
54
+ if (e.name.startsWith(".") && e.name !== ".env")
55
+ continue;
56
+ if (IGNORE_DIRS.has(e.name))
57
+ continue;
58
+ const full = node_path_1.default.join(dir, e.name);
59
+ const rel = node_path_1.default.relative(cwd, full);
60
+ if (e.isFile() && test(rel, e.name))
61
+ out.push(rel);
62
+ else if (e.isDirectory())
63
+ walk(full, depth + 1);
64
+ }
65
+ }
66
+ walk(cwd, 0);
67
+ return out;
68
+ }
69
+ function findDirsNamed(cwd, name) {
70
+ const out = [];
71
+ function scan(dir, depth) {
72
+ if (depth > 2)
73
+ return;
74
+ let entries;
75
+ try {
76
+ entries = node_fs_1.default.readdirSync(dir, { withFileTypes: true });
77
+ }
78
+ catch {
79
+ return;
80
+ }
81
+ for (const e of entries) {
82
+ if (e.name.startsWith(".") || IGNORE_DIRS.has(e.name))
83
+ continue;
84
+ const full = node_path_1.default.join(dir, e.name);
85
+ const rel = node_path_1.default.relative(cwd, full);
86
+ if (e.isDirectory()) {
87
+ if (e.name === name)
88
+ out.push(rel);
89
+ scan(full, depth + 1);
90
+ }
91
+ }
92
+ }
93
+ scan(cwd, 0);
94
+ return out;
95
+ }
96
+ function buildRepoFingerprint(cwd = process.cwd()) {
97
+ const fileTree = {};
98
+ walkDir(cwd, 0, fileTree);
99
+ let rootPackage = { scripts: {}, dependencies: [], workspaces: [] };
100
+ const pkgPath = node_path_1.default.join(cwd, "package.json");
101
+ if (node_fs_1.default.existsSync(pkgPath)) {
102
+ try {
103
+ const pkg = JSON.parse(node_fs_1.default.readFileSync(pkgPath, "utf8"));
104
+ rootPackage.scripts = pkg.scripts || {};
105
+ const deps = { ...pkg.dependencies, ...pkg.devDependencies };
106
+ rootPackage.dependencies = Object.keys(deps || {}).filter((k) => DOC_HINTS.some((h) => k.toLowerCase().includes(h)));
107
+ if (pkg.workspaces) {
108
+ rootPackage.workspaces = Array.isArray(pkg.workspaces) ? pkg.workspaces : [pkg.workspaces];
109
+ }
110
+ }
111
+ catch {
112
+ // ignore
113
+ }
114
+ }
115
+ const workspacePackages = [];
116
+ if (rootPackage.workspaces?.length) {
117
+ for (const w of rootPackage.workspaces) {
118
+ const base = w.replace("/*", "").replace("*", "");
119
+ const dir = node_path_1.default.join(cwd, base);
120
+ if (!node_fs_1.default.existsSync(dir) || !node_fs_1.default.statSync(dir).isDirectory())
121
+ continue;
122
+ const subdirs = base.includes("*") ? node_fs_1.default.readdirSync(dir, { withFileTypes: true }).filter((e) => e.isDirectory()).map((e) => node_path_1.default.join(dir, e.name)) : [dir];
123
+ for (const sub of subdirs) {
124
+ const pj = node_path_1.default.join(sub, "package.json");
125
+ if (!node_fs_1.default.existsSync(pj))
126
+ continue;
127
+ try {
128
+ const pkg = JSON.parse(node_fs_1.default.readFileSync(pj, "utf8"));
129
+ workspacePackages.push({
130
+ path: node_path_1.default.relative(cwd, sub),
131
+ scripts: pkg.scripts || {},
132
+ });
133
+ }
134
+ catch {
135
+ // ignore
136
+ }
137
+ }
138
+ }
139
+ }
140
+ const openapi = findMatchingFiles(cwd, (_, name) => /^openapi.*\.json$/i.test(name));
141
+ const swagger = findMatchingFiles(cwd, (_, name) => /^swagger.*\.json$/i.test(name));
142
+ const docusaurusConfig = findMatchingFiles(cwd, (_, name) => name.startsWith("docusaurus.config."));
143
+ const mkdocs = findMatchingFiles(cwd, (_, name) => name === "mkdocs.yml");
144
+ const docsDirs = findDirsNamed(cwd, "docs");
145
+ return {
146
+ fileTree,
147
+ rootPackage,
148
+ workspacePackages,
149
+ foundPaths: { openapi, swagger, docusaurusConfig, mkdocs, docsDirs },
150
+ };
151
+ }
152
+ function fingerprintHash(fingerprint) {
153
+ const canonical = JSON.stringify(fingerprint, Object.keys(fingerprint).sort());
154
+ return node_crypto_1.default.createHash("sha256").update(canonical).digest("hex");
155
+ }
@@ -0,0 +1,123 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.detectFernSpecDrift = detectFernSpecDrift;
7
+ const node_fs_1 = __importDefault(require("node:fs"));
8
+ const node_path_1 = __importDefault(require("node:path"));
9
+ const fs_1 = require("../utils/fs");
10
+ const json_1 = require("../utils/json");
11
+ function readFernDefinitionDir(dirPath) {
12
+ const out = {};
13
+ if (!node_fs_1.default.existsSync(dirPath) || !node_fs_1.default.statSync(dirPath).isDirectory()) {
14
+ return out;
15
+ }
16
+ const entries = node_fs_1.default.readdirSync(dirPath, { withFileTypes: true });
17
+ for (const e of entries) {
18
+ const full = node_path_1.default.join(dirPath, e.name);
19
+ if (e.isDirectory()) {
20
+ Object.assign(out, readFernDefinitionDir(full));
21
+ }
22
+ else if (e.isFile() && (e.name.endsWith(".yml") || e.name.endsWith(".yaml"))) {
23
+ out[full] = node_fs_1.default.readFileSync(full, "utf8");
24
+ }
25
+ }
26
+ return out;
27
+ }
28
+ async function getCurrentContent(config) {
29
+ const current = config.current;
30
+ if (current.type !== "local") {
31
+ throw new Error("Fern provider only supports local definition folder");
32
+ }
33
+ return readFernDefinitionDir(current.path);
34
+ }
35
+ function contentSignature(files) {
36
+ const sorted = Object.keys(files).sort();
37
+ return (0, json_1.stableStringify)(sorted.map((k) => ({ path: k, content: files[k] })));
38
+ }
39
+ async function detectFernSpecDrift(config, evidenceDir) {
40
+ if (config.format !== "fern") {
41
+ return {
42
+ hasDrift: false,
43
+ summary: `Format ${config.format} is not fern`,
44
+ evidenceFiles: [],
45
+ impactedDocs: [],
46
+ };
47
+ }
48
+ (0, fs_1.ensureDir)(evidenceDir);
49
+ let currentFiles;
50
+ try {
51
+ currentFiles = await getCurrentContent(config);
52
+ }
53
+ catch (err) {
54
+ const msg = err instanceof Error ? err.message : String(err);
55
+ const logPath = node_path_1.default.join(evidenceDir, "fern-export.log");
56
+ node_fs_1.default.writeFileSync(logPath, msg, "utf8");
57
+ return {
58
+ hasDrift: true,
59
+ summary: `Fern definition read failed: ${msg}`,
60
+ evidenceFiles: [logPath],
61
+ impactedDocs: [config.published],
62
+ signal: {
63
+ kind: "weak_evidence",
64
+ tier: 2,
65
+ confidence: 0.35,
66
+ evidence: [logPath],
67
+ },
68
+ };
69
+ }
70
+ const publishedPath = config.published;
71
+ let publishedSignature;
72
+ if (node_fs_1.default.existsSync(publishedPath) && node_fs_1.default.statSync(publishedPath).isDirectory()) {
73
+ const publishedFiles = readFernDefinitionDir(publishedPath);
74
+ publishedSignature = contentSignature(publishedFiles);
75
+ }
76
+ else if (node_fs_1.default.existsSync(publishedPath)) {
77
+ publishedSignature = node_fs_1.default.readFileSync(publishedPath, "utf8");
78
+ }
79
+ else {
80
+ return {
81
+ hasDrift: true,
82
+ summary: "Fern published path missing",
83
+ evidenceFiles: [],
84
+ impactedDocs: [config.published],
85
+ signal: {
86
+ kind: "weak_evidence",
87
+ tier: 2,
88
+ confidence: 0.35,
89
+ evidence: [],
90
+ },
91
+ };
92
+ }
93
+ const currentSignature = contentSignature(currentFiles);
94
+ if (currentSignature === publishedSignature) {
95
+ return {
96
+ hasDrift: false,
97
+ summary: "No Fern definition drift detected",
98
+ evidenceFiles: [],
99
+ impactedDocs: [config.published],
100
+ };
101
+ }
102
+ const summary = "Fern definition YAML changed.";
103
+ const diffPath = node_path_1.default.join(evidenceDir, "fern.diff.txt");
104
+ node_fs_1.default.writeFileSync(diffPath, [
105
+ "# Fern Drift Summary",
106
+ summary,
107
+ "",
108
+ "# Current definition signature (file list + content hash)",
109
+ currentSignature.slice(0, 12000),
110
+ ].join("\n"), "utf8");
111
+ return {
112
+ hasDrift: true,
113
+ summary,
114
+ evidenceFiles: [diffPath],
115
+ impactedDocs: [config.published],
116
+ signal: {
117
+ kind: "fern_diff",
118
+ tier: 1,
119
+ confidence: 0.95,
120
+ evidence: [diffPath],
121
+ },
122
+ };
123
+ }
@@ -0,0 +1,168 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ var __importDefault = (this && this.__importDefault) || function (mod) {
36
+ return (mod && mod.__esModule) ? mod : { "default": mod };
37
+ };
38
+ Object.defineProperty(exports, "__esModule", { value: true });
39
+ exports.detectGraphQLSpecDrift = detectGraphQLSpecDrift;
40
+ const node_fs_1 = __importDefault(require("node:fs"));
41
+ const node_path_1 = __importDefault(require("node:path"));
42
+ const fs_1 = require("../utils/fs");
43
+ const fetch_1 = require("../utils/fetch");
44
+ const GRAPHQL_INTROSPECTION_QUERY = `
45
+ query IntrospectionQuery {
46
+ __schema {
47
+ types { name kind }
48
+ queryType { name }
49
+ mutationType { name }
50
+ }
51
+ }
52
+ `;
53
+ function normalizeGraphQLSchema(content) {
54
+ // Strip comments and normalize whitespace for comparison
55
+ return content
56
+ .replace(/#[^\n]*/g, "")
57
+ .replace(/\s+/g, " ")
58
+ .trim();
59
+ }
60
+ async function getCurrentContent(config) {
61
+ const current = config.current;
62
+ if (current.type === "url") {
63
+ const body = { query: GRAPHQL_INTROSPECTION_QUERY };
64
+ const res = await (0, fetch_1.fetchSpecPost)(current.url, body);
65
+ const json = JSON.parse(res);
66
+ const schema = json?.data?.__schema;
67
+ if (!schema) {
68
+ throw new Error("GraphQL introspection did not return __schema");
69
+ }
70
+ return JSON.stringify(schema, null, 2);
71
+ }
72
+ if (current.type === "local") {
73
+ if (!node_fs_1.default.existsSync(current.path)) {
74
+ throw new Error(`GraphQL local path not found: ${current.path}`);
75
+ }
76
+ return node_fs_1.default.readFileSync(current.path, "utf8");
77
+ }
78
+ const { execCommand } = await Promise.resolve().then(() => __importStar(require("../utils/exec")));
79
+ const result = await execCommand(current.command);
80
+ if (result.exitCode !== 0) {
81
+ throw new Error(`GraphQL export failed: ${result.stderr}`);
82
+ }
83
+ if (!node_fs_1.default.existsSync(current.outputPath)) {
84
+ throw new Error(`GraphQL export did not create: ${current.outputPath}`);
85
+ }
86
+ return node_fs_1.default.readFileSync(current.outputPath, "utf8");
87
+ }
88
+ async function detectGraphQLSpecDrift(config, evidenceDir) {
89
+ if (config.format !== "graphql") {
90
+ return {
91
+ hasDrift: false,
92
+ summary: `Format ${config.format} is not graphql`,
93
+ evidenceFiles: [],
94
+ impactedDocs: [],
95
+ };
96
+ }
97
+ (0, fs_1.ensureDir)(evidenceDir);
98
+ let currentContent;
99
+ try {
100
+ currentContent = await getCurrentContent(config);
101
+ }
102
+ catch (err) {
103
+ const msg = err instanceof Error ? err.message : String(err);
104
+ const logPath = node_path_1.default.join(evidenceDir, "graphql-export.log");
105
+ node_fs_1.default.writeFileSync(logPath, msg, "utf8");
106
+ return {
107
+ hasDrift: true,
108
+ summary: `GraphQL current spec failed: ${msg}`,
109
+ evidenceFiles: [logPath],
110
+ impactedDocs: [config.published],
111
+ signal: {
112
+ kind: "weak_evidence",
113
+ tier: 2,
114
+ confidence: 0.35,
115
+ evidence: [logPath],
116
+ },
117
+ };
118
+ }
119
+ if (!node_fs_1.default.existsSync(config.published)) {
120
+ return {
121
+ hasDrift: true,
122
+ summary: "GraphQL published file missing",
123
+ evidenceFiles: [],
124
+ impactedDocs: [config.published],
125
+ signal: {
126
+ kind: "weak_evidence",
127
+ tier: 2,
128
+ confidence: 0.35,
129
+ evidence: [],
130
+ },
131
+ };
132
+ }
133
+ const publishedContent = node_fs_1.default.readFileSync(config.published, "utf8");
134
+ const normalizedCurrent = normalizeGraphQLSchema(currentContent);
135
+ const normalizedPublished = normalizeGraphQLSchema(publishedContent);
136
+ if (normalizedCurrent === normalizedPublished) {
137
+ return {
138
+ hasDrift: false,
139
+ summary: "No GraphQL schema drift detected",
140
+ evidenceFiles: [],
141
+ impactedDocs: [config.published],
142
+ };
143
+ }
144
+ const summary = "GraphQL schema changed (types or fields differ).";
145
+ const diffPath = node_path_1.default.join(evidenceDir, "graphql.diff.txt");
146
+ node_fs_1.default.writeFileSync(diffPath, [
147
+ "# GraphQL Drift Summary",
148
+ summary,
149
+ "",
150
+ "# Published (excerpt)",
151
+ normalizedPublished.slice(0, 8000),
152
+ "",
153
+ "# Current (excerpt)",
154
+ normalizedCurrent.slice(0, 8000),
155
+ ].join("\n"), "utf8");
156
+ return {
157
+ hasDrift: true,
158
+ summary,
159
+ evidenceFiles: [diffPath],
160
+ impactedDocs: [config.published],
161
+ signal: {
162
+ kind: "graphql_diff",
163
+ tier: 1,
164
+ confidence: 0.95,
165
+ evidence: [diffPath],
166
+ },
167
+ };
168
+ }