@devinnn/docdrift 0.1.1 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -7,18 +7,8 @@ exports.buildDriftReport = buildDriftReport;
7
7
  const node_path_1 = __importDefault(require("node:path"));
8
8
  const fs_1 = require("../utils/fs");
9
9
  const git_1 = require("../utils/git");
10
- const docsCheck_1 = require("./docsCheck");
11
10
  const heuristics_1 = require("./heuristics");
12
- const openapi_1 = require("./openapi");
13
- function defaultRecommendation(mode, signals) {
14
- if (!signals.length) {
15
- return "NOOP";
16
- }
17
- if (mode === "autogen") {
18
- return signals.some((s) => s.tier <= 1) ? "OPEN_PR" : "OPEN_ISSUE";
19
- }
20
- return "OPEN_ISSUE";
21
- }
11
+ const registry_1 = require("../spec-providers/registry");
22
12
  async function buildDriftReport(input) {
23
13
  const runInfo = {
24
14
  runId: `${Date.now()}`,
@@ -27,51 +17,122 @@ async function buildDriftReport(input) {
27
17
  headSha: input.headSha,
28
18
  trigger: input.trigger,
29
19
  timestamp: new Date().toISOString(),
20
+ prNumber: input.prNumber,
30
21
  };
31
22
  const evidenceRoot = node_path_1.default.resolve(".docdrift", "evidence", runInfo.runId);
32
23
  (0, fs_1.ensureDir)(evidenceRoot);
33
24
  const changedPaths = await (0, git_1.gitChangedPaths)(input.baseSha, input.headSha);
34
25
  const diffSummary = await (0, git_1.gitDiffSummary)(input.baseSha, input.headSha);
35
26
  const commits = await (0, git_1.gitCommitList)(input.baseSha, input.headSha);
36
- const docsCheck = await (0, docsCheck_1.runDocsChecks)(input.config.policy.verification.commands, evidenceRoot);
37
- const items = [];
38
- const checkSummaries = [docsCheck.summary];
39
- for (const docArea of input.config.docAreas) {
40
- const signals = [];
41
- const impactedDocs = new Set(docArea.patch.targets ?? []);
42
- const summaries = [];
43
- if (docsCheck.signal) {
44
- signals.push(docsCheck.signal);
45
- summaries.push(docsCheck.summary);
46
- }
47
- if (docArea.detect.openapi) {
48
- const openapiResult = await (0, openapi_1.detectOpenApiDrift)(docArea, evidenceRoot);
49
- if (openapiResult.signal) {
50
- signals.push(openapiResult.signal);
27
+ (0, fs_1.writeJsonFile)(node_path_1.default.join(evidenceRoot, "changeset.json"), {
28
+ changedPaths,
29
+ diffSummary,
30
+ commits,
31
+ });
32
+ const { config } = input;
33
+ const signals = [];
34
+ const impactedDocs = new Set();
35
+ const summaries = [];
36
+ const evidenceFiles = [];
37
+ // 1. Run all spec providers (parallel)
38
+ const providerResults = [];
39
+ if (config.specProviders.length > 0) {
40
+ const results = await Promise.all(config.specProviders.map(async (provider) => {
41
+ const detector = (0, registry_1.getSpecDetector)(provider.format);
42
+ return detector(provider, evidenceRoot);
43
+ }));
44
+ providerResults.push(...results);
45
+ }
46
+ const anySpecDrift = providerResults.some((r) => r.hasDrift && r.signal && r.signal.tier <= 1);
47
+ const allSpecFailedOrNoDrift = providerResults.length === 0 ||
48
+ providerResults.every((r) => !r.hasDrift || (r.signal?.tier ?? 2) > 1);
49
+ if (anySpecDrift) {
50
+ for (const r of providerResults) {
51
+ if (r.hasDrift && r.signal) {
52
+ signals.push(r.signal);
53
+ r.impactedDocs.forEach((d) => impactedDocs.add(d));
54
+ summaries.push(r.summary);
55
+ evidenceFiles.push(...r.evidenceFiles);
51
56
  }
52
- openapiResult.impactedDocs.forEach((doc) => impactedDocs.add(doc));
53
- summaries.push(openapiResult.summary);
54
57
  }
58
+ }
59
+ // 2. Path heuristics (always run for aggregation when we have docAreas)
60
+ for (const docArea of config.docAreas) {
55
61
  if (docArea.detect.paths?.length) {
56
62
  const heuristicResult = (0, heuristics_1.detectHeuristicImpacts)(docArea, changedPaths, evidenceRoot);
57
63
  if (heuristicResult.signal) {
58
64
  signals.push(heuristicResult.signal);
65
+ heuristicResult.impactedDocs.forEach((d) => impactedDocs.add(d));
66
+ summaries.push(heuristicResult.summary);
67
+ }
68
+ }
69
+ }
70
+ const hasHeuristicMatch = signals.some((s) => s.kind === "heuristic_path_impact");
71
+ // 3. Gate logic
72
+ let runGate = "none";
73
+ if (anySpecDrift) {
74
+ runGate = "spec_drift";
75
+ }
76
+ else if (config.allowConceptualOnlyRun && hasHeuristicMatch) {
77
+ runGate = "conceptual_only";
78
+ }
79
+ else if (config.inferMode && (config.specProviders.length === 0 || allSpecFailedOrNoDrift)) {
80
+ runGate = "infer";
81
+ if (config.specProviders.length > 0) {
82
+ for (const r of providerResults) {
83
+ if (r.signal) {
84
+ signals.push(r.signal);
85
+ r.impactedDocs.forEach((d) => impactedDocs.add(d));
86
+ summaries.push(r.summary);
87
+ }
59
88
  }
60
- heuristicResult.impactedDocs.forEach((doc) => impactedDocs.add(doc));
61
- summaries.push(heuristicResult.summary);
62
89
  }
63
- if (!signals.length) {
64
- continue;
90
+ if (signals.length === 0) {
91
+ signals.push({
92
+ kind: "infer_mode",
93
+ tier: 2,
94
+ confidence: 0.6,
95
+ evidence: [node_path_1.default.join(evidenceRoot, "changeset.json")],
96
+ });
97
+ changedPaths.forEach((p) => impactedDocs.add(p));
98
+ summaries.push("Infer mode: no spec drift; infer docs from file changes.");
65
99
  }
66
- items.push({
67
- docArea: docArea.name,
68
- mode: docArea.mode,
69
- signals,
70
- impactedDocs: [...impactedDocs],
71
- recommendedAction: defaultRecommendation(docArea.mode, signals),
72
- summary: summaries.filter(Boolean).join(" | "),
73
- });
74
100
  }
101
+ if (runGate === "none") {
102
+ const report = {
103
+ run: {
104
+ repo: input.repo,
105
+ baseSha: input.baseSha,
106
+ headSha: input.headSha,
107
+ trigger: input.trigger,
108
+ timestamp: runInfo.timestamp,
109
+ },
110
+ items: [],
111
+ };
112
+ (0, fs_1.writeJsonFile)(node_path_1.default.resolve(".docdrift", "drift_report.json"), report);
113
+ return {
114
+ report,
115
+ aggregated: null,
116
+ changedPaths,
117
+ evidenceRoot,
118
+ runInfo,
119
+ hasOpenApiDrift: false,
120
+ runGate: "none",
121
+ };
122
+ }
123
+ const aggregated = {
124
+ signals,
125
+ impactedDocs: [...impactedDocs],
126
+ summary: summaries.filter(Boolean).join(" | "),
127
+ };
128
+ const item = {
129
+ docArea: "docsite",
130
+ mode: runGate === "conceptual_only" ? "conceptual" : "autogen",
131
+ signals: aggregated.signals,
132
+ impactedDocs: aggregated.impactedDocs,
133
+ recommendedAction: aggregated.signals.some((s) => s.tier <= 1) ? "OPEN_PR" : "OPEN_ISSUE",
134
+ summary: aggregated.summary,
135
+ };
75
136
  const report = {
76
137
  run: {
77
138
  repo: input.repo,
@@ -80,13 +141,16 @@ async function buildDriftReport(input) {
80
141
  trigger: input.trigger,
81
142
  timestamp: runInfo.timestamp,
82
143
  },
83
- items,
144
+ items: [item],
84
145
  };
85
146
  (0, fs_1.writeJsonFile)(node_path_1.default.resolve(".docdrift", "drift_report.json"), report);
86
- (0, fs_1.writeJsonFile)(node_path_1.default.join(evidenceRoot, "changeset.json"), {
147
+ return {
148
+ report,
149
+ aggregated,
87
150
  changedPaths,
88
- diffSummary,
89
- commits,
90
- });
91
- return { report, changedPaths, evidenceRoot, runInfo, checkSummaries };
151
+ evidenceRoot,
152
+ runInfo,
153
+ hasOpenApiDrift: anySpecDrift,
154
+ runGate,
155
+ };
92
156
  }
@@ -4,6 +4,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.detectOpenApiDrift = detectOpenApiDrift;
7
+ exports.detectOpenApiDriftFromNormalized = detectOpenApiDriftFromNormalized;
7
8
  const node_fs_1 = __importDefault(require("node:fs"));
8
9
  const node_path_1 = __importDefault(require("node:path"));
9
10
  const exec_1 = require("../utils/exec");
@@ -121,3 +122,85 @@ async function detectOpenApiDrift(docArea, evidenceDir) {
121
122
  },
122
123
  };
123
124
  }
125
+ /** Run OpenAPI drift detection from normalized config (simple openapi block). Used as gate. */
126
+ async function detectOpenApiDriftFromNormalized(config, evidenceDir) {
127
+ const openapi = config.openapi;
128
+ const exportLogPath = node_path_1.default.join(evidenceDir, "openapi-export.log");
129
+ const exportResult = await (0, exec_1.execCommand)(openapi.export);
130
+ node_fs_1.default.writeFileSync(exportLogPath, [
131
+ `$ ${openapi.export}`,
132
+ `exitCode: ${exportResult.exitCode}`,
133
+ "\n--- stdout ---",
134
+ exportResult.stdout,
135
+ "\n--- stderr ---",
136
+ exportResult.stderr,
137
+ ].join("\n"), "utf8");
138
+ if (exportResult.exitCode !== 0) {
139
+ return {
140
+ impactedDocs: [openapi.published],
141
+ evidenceFiles: [exportLogPath],
142
+ summary: "OpenAPI export command failed",
143
+ signal: {
144
+ kind: "weak_evidence",
145
+ tier: 2,
146
+ confidence: 0.35,
147
+ evidence: [exportLogPath],
148
+ },
149
+ };
150
+ }
151
+ if (!node_fs_1.default.existsSync(openapi.generated) || !node_fs_1.default.existsSync(openapi.published)) {
152
+ return {
153
+ impactedDocs: [openapi.generated, openapi.published],
154
+ evidenceFiles: [exportLogPath],
155
+ summary: "OpenAPI file(s) missing",
156
+ signal: {
157
+ kind: "weak_evidence",
158
+ tier: 2,
159
+ confidence: 0.35,
160
+ evidence: [exportLogPath],
161
+ },
162
+ };
163
+ }
164
+ const generatedRaw = node_fs_1.default.readFileSync(openapi.generated, "utf8");
165
+ const publishedRaw = node_fs_1.default.readFileSync(openapi.published, "utf8");
166
+ const generatedJson = JSON.parse(generatedRaw);
167
+ const publishedJson = JSON.parse(publishedRaw);
168
+ const normalizedGenerated = (0, json_1.stableStringify)(generatedJson);
169
+ const normalizedPublished = (0, json_1.stableStringify)(publishedJson);
170
+ if (normalizedGenerated === normalizedPublished) {
171
+ return {
172
+ impactedDocs: [openapi.published],
173
+ evidenceFiles: [exportLogPath],
174
+ summary: "No OpenAPI drift detected",
175
+ };
176
+ }
177
+ const summary = summarizeSpecDelta(publishedJson, generatedJson);
178
+ const diffPath = node_path_1.default.join(evidenceDir, "openapi.diff.txt");
179
+ node_fs_1.default.writeFileSync(diffPath, [
180
+ "# OpenAPI Drift Summary",
181
+ summary,
182
+ "",
183
+ "# Published (normalized)",
184
+ normalizedPublished,
185
+ "",
186
+ "# Generated (normalized)",
187
+ normalizedGenerated,
188
+ ].join("\n"), "utf8");
189
+ const impactedDocs = [
190
+ ...new Set([
191
+ openapi.published,
192
+ ...config.docAreas.flatMap((a) => a.patch.targets ?? []).filter(Boolean),
193
+ ]),
194
+ ].filter(Boolean);
195
+ return {
196
+ impactedDocs,
197
+ evidenceFiles: [exportLogPath, diffPath],
198
+ summary,
199
+ signal: {
200
+ kind: "openapi_diff",
201
+ tier: 1,
202
+ confidence: 0.95,
203
+ evidence: [diffPath],
204
+ },
205
+ };
206
+ }
@@ -2,6 +2,7 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.buildAutogenPrompt = buildAutogenPrompt;
4
4
  exports.buildConceptualPrompt = buildConceptualPrompt;
5
+ exports.buildWholeDocsitePrompt = buildWholeDocsitePrompt;
5
6
  function attachmentBlock(attachmentUrls) {
6
7
  return attachmentUrls.map((url, index) => `- ATTACHMENT ${index + 1}: ${url}`).join("\n");
7
8
  }
@@ -61,3 +62,89 @@ function buildConceptualPrompt(input) {
61
62
  }
62
63
  return base;
63
64
  }
65
+ /** Whole-docsite prompt for single-session runs */
66
+ function buildWholeDocsitePrompt(input) {
67
+ const excludeNote = input.config.exclude?.length > 0
68
+ ? `\n6) NEVER modify files matching these patterns: ${input.config.exclude.join(", ")}`
69
+ : "";
70
+ const requireReviewNote = input.config.requireHumanReview?.length > 0
71
+ ? `\n7) If you touch files under: ${input.config.requireHumanReview.join(", ")} — note it in the PR description (a follow-up issue will flag for human review).`
72
+ : "";
73
+ const allowNewFiles = input.config.policy.allowNewFiles ?? false;
74
+ const newFilesRule = allowNewFiles
75
+ ? "8) You MAY add new articles, create new folders, and change information architecture when warranted."
76
+ : "8) You may ONLY edit existing files. Do NOT create new files, new articles, or new folders. Do NOT change information architecture.";
77
+ const driftSummary = input.aggregated.summary?.trim();
78
+ const openapiPublished = input.config.openapi?.published;
79
+ const openapiGenerated = input.config.openapi?.generated;
80
+ const specLine = openapiPublished && openapiGenerated
81
+ ? `Update ${openapiPublished} to match the generated spec (${openapiGenerated}). The attachments contain the full diff.`
82
+ : "Update published docs to match the evidence (attachments).";
83
+ const driftBlock = driftSummary &&
84
+ [
85
+ "DRIFT DETECTED (you must fix this):",
86
+ "---",
87
+ driftSummary,
88
+ "---",
89
+ specLine,
90
+ "",
91
+ ].join("\n");
92
+ const inferBlock = input.runGate === "infer"
93
+ ? [
94
+ "INFER MODE: No API spec diff was available. These file changes may impact docs.",
95
+ "Infer what documentation might need updates from the changed files. Update or create docs as needed.",
96
+ "Do NOT invent APIs; only document what you can infer from the code changes.",
97
+ "",
98
+ ].join("\n")
99
+ : "";
100
+ const draftPrBlock = input.trigger === "pull_request" && input.prNumber
101
+ ? [
102
+ "",
103
+ "This run was triggered by an open API PR. Open a **draft** pull request.",
104
+ `In the PR description, link to the API PR (#${input.prNumber}) and state: "Merge the API PR first, then review this doc PR."`,
105
+ "Use a branch name like docdrift/pr-" + input.prNumber + " or docdrift/preview-<short-sha>.",
106
+ "",
107
+ ].join("\n")
108
+ : "";
109
+ const pathMappings = input.config.pathMappings ?? [];
110
+ const pathMappingsBlock = pathMappings.length > 0
111
+ ? [
112
+ "PATH MAPPINGS (when these code paths change, consider these docs for updates):",
113
+ ...pathMappings.map((p) => `- ${p.match} → ${p.impacts.join(", ")}`),
114
+ "",
115
+ ].join("\n")
116
+ : "";
117
+ const base = [
118
+ "You are Devin. Task: update the entire docsite to match the API and code changes.",
119
+ "",
120
+ driftBlock ?? "",
121
+ inferBlock,
122
+ pathMappingsBlock,
123
+ "EVIDENCE (attachments):",
124
+ input.attachmentUrls.map((url, i) => `- ATTACHMENT ${i + 1}: ${url}`).join("\n"),
125
+ "",
126
+ "Rules (hard):",
127
+ `1) Only modify files under: ${input.config.policy.allowlist.join(", ")}`,
128
+ "2) Make the smallest change that makes docs correct.",
129
+ "3) Update API reference (OpenAPI) and any impacted guides in one PR.",
130
+ "4) Run verification commands and record results:",
131
+ ...input.config.policy.verification.commands.map((c) => ` - ${c}`),
132
+ "5) Open exactly ONE pull request with a clear title and reviewer-friendly description." +
133
+ draftPrBlock,
134
+ `6) Docsite scope: ${input.config.docsite.join(", ")}` +
135
+ excludeNote +
136
+ requireReviewNote +
137
+ `\n${newFilesRule}`,
138
+ "",
139
+ "Structured Output:",
140
+ "- Maintain structured output in the provided JSON schema.",
141
+ "- Update it at: planning, editing, verifying, open-pr, blocked, done.",
142
+ "- If blocked, fill blocked.questions with concrete questions.",
143
+ "",
144
+ "Goal: Produce ONE PR that updates the whole docsite (API reference + guides) using only the evidence.",
145
+ ].join("\n");
146
+ if (input.config.devin.customInstructionContent) {
147
+ return base + "\n\n---\n\nCustom instructions:\n\n" + input.config.devin.customInstructionContent;
148
+ }
149
+ return base;
150
+ }
@@ -1,9 +1,15 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.parseRepo = parseRepo;
3
4
  exports.postCommitComment = postCommitComment;
5
+ exports.postPrComment = postPrComment;
4
6
  exports.createIssue = createIssue;
5
7
  exports.renderRunComment = renderRunComment;
6
8
  exports.renderBlockedIssueBody = renderBlockedIssueBody;
9
+ exports.renderRequireHumanReviewIssueBody = renderRequireHumanReviewIssueBody;
10
+ exports.renderSlaIssueBody = renderSlaIssueBody;
11
+ exports.isPrOpen = isPrOpen;
12
+ exports.listOpenPrsWithLabel = listOpenPrsWithLabel;
7
13
  const rest_1 = require("@octokit/rest");
8
14
  function parseRepo(full) {
9
15
  const [owner, repo] = full.split("/");
@@ -23,6 +29,18 @@ async function postCommitComment(input) {
23
29
  });
24
30
  return response.data.html_url;
25
31
  }
32
+ /** Post a comment on a pull request (e.g. to link the doc drift PR when trigger is pull_request). */
33
+ async function postPrComment(input) {
34
+ const octokit = new rest_1.Octokit({ auth: input.token });
35
+ const { owner, repo } = parseRepo(input.repository);
36
+ const response = await octokit.issues.createComment({
37
+ owner,
38
+ repo,
39
+ issue_number: input.prNumber,
40
+ body: input.body,
41
+ });
42
+ return response.data.html_url;
43
+ }
26
44
  async function createIssue(input) {
27
45
  const octokit = new rest_1.Octokit({ auth: input.token });
28
46
  const { owner, repo } = parseRepo(input.repository);
@@ -84,3 +102,73 @@ function renderBlockedIssueBody(input) {
84
102
  }
85
103
  return lines.join("\n");
86
104
  }
105
+ function renderRequireHumanReviewIssueBody(input) {
106
+ const lines = [];
107
+ lines.push("## Why this issue");
108
+ lines.push("");
109
+ lines.push("This doc-drift PR touches paths that require human review (guides, prose, or other non-technical docs).");
110
+ lines.push("");
111
+ lines.push("## What to do");
112
+ lines.push("");
113
+ lines.push(`1. Review the PR: ${input.prUrl}`);
114
+ lines.push("2. Confirm the changes are correct or request modifications.");
115
+ lines.push("3. Merge or close the PR.");
116
+ lines.push("");
117
+ if (input.touchedPaths.length > 0) {
118
+ lines.push("## Touched paths (require review)");
119
+ lines.push("");
120
+ for (const p of input.touchedPaths.slice(0, 20)) {
121
+ lines.push(`- \`${p}\``);
122
+ }
123
+ if (input.touchedPaths.length > 20) {
124
+ lines.push(`- ... and ${input.touchedPaths.length - 20} more`);
125
+ }
126
+ }
127
+ return lines.join("\n");
128
+ }
129
+ function renderSlaIssueBody(input) {
130
+ const lines = [];
131
+ lines.push("## Why this issue");
132
+ lines.push("");
133
+ lines.push(`Doc-drift PR(s) have been open for ${input.slaDays}+ days. Docs may be out of sync.`);
134
+ lines.push("");
135
+ lines.push("## What to do");
136
+ lines.push("");
137
+ lines.push("Please review and merge or close the following PR(s):");
138
+ lines.push("");
139
+ for (const url of input.prUrls) {
140
+ lines.push(`- ${url}`);
141
+ }
142
+ lines.push("");
143
+ lines.push("If the PR is no longer needed, close it to resolve this reminder.");
144
+ return lines.join("\n");
145
+ }
146
+ /** Check if a PR is still open. URL format: https://github.com/owner/repo/pull/123 */
147
+ async function isPrOpen(token, prUrl) {
148
+ const match = prUrl.match(/github\.com[/]([^/]+)[/]([^/]+)[/]pull[/](\d+)/);
149
+ if (!match)
150
+ return { open: false };
151
+ const [, owner, repo, numStr] = match;
152
+ const number = parseInt(numStr ?? "0", 10);
153
+ if (!owner || !repo || !Number.isFinite(number))
154
+ return { open: false };
155
+ const octokit = new rest_1.Octokit({ auth: token });
156
+ const { data } = await octokit.pulls.get({ owner, repo, pull_number: number });
157
+ return { open: data.state === "open", number: data.number };
158
+ }
159
+ /** List open PRs with a given label */
160
+ async function listOpenPrsWithLabel(token, repository, label) {
161
+ const octokit = new rest_1.Octokit({ auth: token });
162
+ const { owner, repo } = parseRepo(repository);
163
+ const { data } = await octokit.pulls.list({
164
+ owner,
165
+ repo,
166
+ state: "open",
167
+ labels: label,
168
+ });
169
+ return data.map((pr) => ({
170
+ url: pr.html_url ?? "",
171
+ number: pr.number,
172
+ created_at: pr.created_at ?? "",
173
+ }));
174
+ }