@devinnn/docdrift 0.1.3 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/src/cli.js CHANGED
@@ -1,5 +1,38 @@
1
1
  #!/usr/bin/env node
2
2
  "use strict";
3
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
4
+ if (k2 === undefined) k2 = k;
5
+ var desc = Object.getOwnPropertyDescriptor(m, k);
6
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
7
+ desc = { enumerable: true, get: function() { return m[k]; } };
8
+ }
9
+ Object.defineProperty(o, k2, desc);
10
+ }) : (function(o, m, k, k2) {
11
+ if (k2 === undefined) k2 = k;
12
+ o[k2] = m[k];
13
+ }));
14
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
15
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
16
+ }) : function(o, v) {
17
+ o["default"] = v;
18
+ });
19
+ var __importStar = (this && this.__importStar) || (function () {
20
+ var ownKeys = function(o) {
21
+ ownKeys = Object.getOwnPropertyNames || function (o) {
22
+ var ar = [];
23
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
24
+ return ar;
25
+ };
26
+ return ownKeys(o);
27
+ };
28
+ return function (mod) {
29
+ if (mod && mod.__esModule) return mod;
30
+ var result = {};
31
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
32
+ __setModuleDefault(result, mod);
33
+ return result;
34
+ };
35
+ })();
3
36
  var __importDefault = (this && this.__importDefault) || function (mod) {
4
37
  return (mod && mod.__esModule) ? mod : { "default": mod };
5
38
  };
@@ -17,7 +50,23 @@ function getArg(args, flag) {
17
50
  async function main() {
18
51
  const [, , command, ...args] = process.argv;
19
52
  if (!command) {
20
- throw new Error("Usage: docdrift <validate|detect|run|status|sla-check> [options]\n detect|run: [--base SHA] [--head SHA] (defaults: merge-base with main..HEAD)");
53
+ throw new Error("Usage: docdrift <validate|detect|run|status|sla-check|setup|generate-yaml> [options]\n" +
54
+ " validate Validate docdrift.yaml (v2 config)\n" +
55
+ " detect Check for drift [--base SHA] [--head SHA]\n" +
56
+ " run Full run with Devin [--base SHA] [--head SHA]\n" +
57
+ " status Show run status [--since 24h]\n" +
58
+ " sla-check Check SLA for unmerged PRs\n" +
59
+ " setup Interactive setup (generates v2 docdrift.yaml)\n" +
60
+ " generate-yaml Generate config [--output path] [--force]");
61
+ }
62
+ if (command === "setup" || command === "generate-yaml") {
63
+ require("dotenv").config();
64
+ const { runSetup } = await Promise.resolve().then(() => __importStar(require("./setup")));
65
+ await runSetup({
66
+ outputPath: getArg(args, "--output") ?? "docdrift.yaml",
67
+ force: args.includes("--force"),
68
+ });
69
+ return;
21
70
  }
22
71
  switch (command) {
23
72
  case "validate": {
@@ -16,8 +16,7 @@ function normalizeConfig(config) {
16
16
  let exclude = config.exclude ?? [];
17
17
  let requireHumanReview = config.requireHumanReview ?? [];
18
18
  let docAreas = config.docAreas ?? [];
19
- const allowConceptualOnlyRun = config.allowConceptualOnlyRun ?? false;
20
- const inferMode = config.inferMode ?? true;
19
+ const mode = config.mode ?? "strict";
21
20
  if (config.specProviders && config.specProviders.length >= 1) {
22
21
  specProviders = config.specProviders;
23
22
  const firstOpenApi3 = specProviders.find((p) => p.format === "openapi3");
@@ -149,7 +148,6 @@ function normalizeConfig(config) {
149
148
  exclude,
150
149
  requireHumanReview,
151
150
  docAreas,
152
- allowConceptualOnlyRun,
153
- inferMode,
151
+ mode,
154
152
  };
155
153
  }
@@ -98,8 +98,8 @@ exports.docDriftConfigBaseSchema = zod_1.z.object({
98
98
  exclude: zod_1.z.array(zod_1.z.string().min(1)).optional().default([]),
99
99
  requireHumanReview: zod_1.z.array(zod_1.z.string().min(1)).optional().default([]),
100
100
  pathMappings: zod_1.z.array(exports.pathRuleSchema).optional().default([]),
101
- allowConceptualOnlyRun: zod_1.z.boolean().optional().default(false),
102
- inferMode: zod_1.z.boolean().optional().default(true),
101
+ /** strict: only run on spec drift. auto: also run when pathMappings match (no spec drift). */
102
+ mode: zod_1.z.enum(["strict", "auto"]).optional().default("strict"),
103
103
  devin: zod_1.z.object({
104
104
  apiVersion: zod_1.z.literal("v1"),
105
105
  unlisted: zod_1.z.boolean().default(true),
@@ -119,8 +119,8 @@ const docDriftConfigObjectSchema = zod_1.z.object({
119
119
  exclude: zod_1.z.array(zod_1.z.string().min(1)).optional().default([]),
120
120
  requireHumanReview: zod_1.z.array(zod_1.z.string().min(1)).optional().default([]),
121
121
  pathMappings: zod_1.z.array(exports.pathRuleSchema).optional().default([]),
122
- allowConceptualOnlyRun: zod_1.z.boolean().optional().default(false),
123
- inferMode: zod_1.z.boolean().optional().default(true),
122
+ /** strict: only run on spec drift. auto: also run when pathMappings match (no spec drift). */
123
+ mode: zod_1.z.enum(["strict", "auto"]).optional().default("strict"),
124
124
  devin: zod_1.z.object({
125
125
  apiVersion: zod_1.z.literal("v1"),
126
126
  unlisted: zod_1.z.boolean().default(true),
@@ -7,6 +7,7 @@ exports.buildDriftReport = buildDriftReport;
7
7
  const node_path_1 = __importDefault(require("node:path"));
8
8
  const fs_1 = require("../utils/fs");
9
9
  const git_1 = require("../utils/git");
10
+ const glob_1 = require("../utils/glob");
10
11
  const heuristics_1 = require("./heuristics");
11
12
  const registry_1 = require("../spec-providers/registry");
12
13
  async function buildDriftReport(input) {
@@ -68,15 +69,21 @@ async function buildDriftReport(input) {
68
69
  }
69
70
  }
70
71
  const hasHeuristicMatch = signals.some((s) => s.kind === "heuristic_path_impact");
72
+ const pathMappings = config.pathMappings ?? [];
73
+ const hasPathMappingMatch = pathMappings.length > 0 &&
74
+ changedPaths.some((p) => pathMappings.some((m) => (0, glob_1.matchesGlob)(m.match, p)));
71
75
  // 3. Gate logic
76
+ const isAuto = config.mode === "auto";
72
77
  let runGate = "none";
73
78
  if (anySpecDrift) {
74
79
  runGate = "spec_drift";
75
80
  }
76
- else if (config.allowConceptualOnlyRun && hasHeuristicMatch) {
81
+ else if (isAuto && hasHeuristicMatch) {
77
82
  runGate = "conceptual_only";
78
83
  }
79
- else if (config.inferMode && (config.specProviders.length === 0 || allSpecFailedOrNoDrift)) {
84
+ else if (isAuto &&
85
+ hasPathMappingMatch &&
86
+ (config.specProviders.length === 0 || allSpecFailedOrNoDrift)) {
80
87
  runGate = "infer";
81
88
  if (config.specProviders.length > 0) {
82
89
  for (const r of providerResults) {
@@ -97,15 +97,29 @@ function buildWholeDocsitePrompt(input) {
97
97
  "",
98
98
  ].join("\n")
99
99
  : "";
100
- const draftPrBlock = input.trigger === "pull_request" && input.prNumber
101
- ? [
100
+ const draftPrBlock = (() => {
101
+ if (input.trigger !== "pull_request" || !input.prNumber)
102
+ return "";
103
+ if (input.existingDocdriftPr) {
104
+ return [
105
+ "",
106
+ "CRITICAL: An existing doc-drift PR already exists for this API PR.",
107
+ `You MUST UPDATE that PR — do NOT create a new one.`,
108
+ `- Existing PR: #${input.existingDocdriftPr.number} (${input.existingDocdriftPr.url})`,
109
+ `- Branch to update: ${input.existingDocdriftPr.headRef}`,
110
+ "Checkout that branch, pull latest main, apply your doc changes, push. The existing PR will update.",
111
+ "Do NOT open a new pull request.",
112
+ "",
113
+ ].join("\n");
114
+ }
115
+ return [
102
116
  "",
103
117
  "This run was triggered by an open API PR. Open a **draft** pull request.",
104
118
  `In the PR description, link to the API PR (#${input.prNumber}) and state: "Merge the API PR first, then review this doc PR."`,
105
- "Use a branch name like docdrift/pr-" + input.prNumber + " or docdrift/preview-<short-sha>.",
119
+ "Use branch name docdrift/pr-" + input.prNumber + " (required for future runs to update this PR).",
106
120
  "",
107
- ].join("\n")
108
- : "";
121
+ ].join("\n");
122
+ })();
109
123
  const pathMappings = input.config.pathMappings ?? [];
110
124
  const pathMappingsBlock = pathMappings.length > 0
111
125
  ? [
@@ -91,12 +91,37 @@ async function devinListSessions(apiKey, params = {}) {
91
91
  }
92
92
  return [];
93
93
  }
94
+ const TERMINAL_STATUSES = [
95
+ "finished",
96
+ "blocked",
97
+ "error",
98
+ "cancelled",
99
+ "done",
100
+ "complete",
101
+ "completed",
102
+ "success",
103
+ "terminated",
104
+ ];
105
+ function hasPrUrl(session) {
106
+ if (typeof session.pull_request_url === "string" && session.pull_request_url)
107
+ return true;
108
+ if (typeof session.pr_url === "string" && session.pr_url)
109
+ return true;
110
+ const structured = (session.structured_output ?? session.data?.structured_output);
111
+ if (structured?.pr?.url)
112
+ return true;
113
+ return false;
114
+ }
94
115
  async function pollUntilTerminal(apiKey, sessionId, timeoutMs = 30 * 60_000) {
95
116
  const started = Date.now();
96
117
  while (Date.now() - started < timeoutMs) {
97
118
  const session = await devinGetSession(apiKey, sessionId);
98
119
  const status = String(session.status_enum ?? session.status ?? "UNKNOWN").toLowerCase();
99
- if (["finished", "blocked", "error", "cancelled", "done", "complete"].includes(status)) {
120
+ if (TERMINAL_STATUSES.includes(status)) {
121
+ return session;
122
+ }
123
+ // Session already produced a PR; stop polling so we don't timeout waiting for status to flip
124
+ if (hasPrUrl(session)) {
100
125
  return session;
101
126
  }
102
127
  await new Promise((resolve) => setTimeout(resolve, 5000));
@@ -10,6 +10,7 @@ exports.renderRequireHumanReviewIssueBody = renderRequireHumanReviewIssueBody;
10
10
  exports.renderSlaIssueBody = renderSlaIssueBody;
11
11
  exports.isPrOpen = isPrOpen;
12
12
  exports.listOpenPrsWithLabel = listOpenPrsWithLabel;
13
+ exports.findExistingDocdriftPrForSource = findExistingDocdriftPrForSource;
13
14
  const rest_1 = require("@octokit/rest");
14
15
  function parseRepo(full) {
15
16
  const [owner, repo] = full.split("/");
@@ -172,3 +173,26 @@ async function listOpenPrsWithLabel(token, repository, label) {
172
173
  created_at: pr.created_at ?? "",
173
174
  }));
174
175
  }
176
+ /** Find an existing open docdrift PR for a given source PR number.
177
+ * Looks for PRs from branch docdrift/pr-{sourcePrNumber} (Devin's convention).
178
+ * Returns the first match so we can instruct Devin to update it instead of creating a new one.
179
+ */
180
+ async function findExistingDocdriftPrForSource(token, repository, sourcePrNumber) {
181
+ const octokit = new rest_1.Octokit({ auth: token });
182
+ const { owner, repo } = parseRepo(repository);
183
+ const branchName = `docdrift/pr-${sourcePrNumber}`;
184
+ const { data } = await octokit.pulls.list({
185
+ owner,
186
+ repo,
187
+ state: "open",
188
+ head: branchName,
189
+ });
190
+ const pr = data[0];
191
+ if (!pr)
192
+ return null;
193
+ return {
194
+ number: pr.number,
195
+ url: pr.html_url ?? "",
196
+ headRef: pr.head?.ref ?? branchName,
197
+ };
198
+ }
package/dist/src/index.js CHANGED
@@ -36,7 +36,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
36
36
  return (mod && mod.__esModule) ? mod : { "default": mod };
37
37
  };
38
38
  Object.defineProperty(exports, "__esModule", { value: true });
39
- exports.STATE_PATH = void 0;
39
+ exports.runSetup = exports.STATE_PATH = void 0;
40
40
  exports.runDetect = runDetect;
41
41
  exports.runDocDrift = runDocDrift;
42
42
  exports.runValidate = runValidate;
@@ -97,6 +97,7 @@ async function executeSessionSingle(input) {
97
97
  runGate: input.runGate,
98
98
  trigger: input.trigger,
99
99
  prNumber: input.prNumber,
100
+ existingDocdriftPr: input.existingDocdriftPr,
100
101
  });
101
102
  const session = await (0, v1_1.devinCreateSession)(input.apiKey, {
102
103
  prompt,
@@ -256,6 +257,16 @@ async function runDocDrift(options) {
256
257
  }
257
258
  const bundle = await (0, bundle_1.buildEvidenceBundle)({ runInfo, item, evidenceRoot });
258
259
  const attachmentPaths = [...new Set([bundle.archivePath, ...bundle.attachmentPaths])];
260
+ let existingDocdriftPr;
261
+ if (githubToken && runInfo.trigger === "pull_request" && runInfo.prNumber) {
262
+ existingDocdriftPr = (await (0, client_1.findExistingDocdriftPrForSource)(githubToken, repo, runInfo.prNumber)) ?? undefined;
263
+ if (existingDocdriftPr) {
264
+ (0, log_1.logInfo)("Found existing docdrift PR for source PR; will instruct Devin to update it", {
265
+ existingPr: existingDocdriftPr.number,
266
+ headRef: existingDocdriftPr.headRef,
267
+ });
268
+ }
269
+ }
259
270
  let sessionOutcome = {
260
271
  outcome: "NO_CHANGE",
261
272
  summary: "Skipped Devin session",
@@ -276,6 +287,7 @@ async function runDocDrift(options) {
276
287
  runGate,
277
288
  trigger: runInfo.trigger,
278
289
  prNumber: runInfo.prNumber,
290
+ existingDocdriftPr,
279
291
  });
280
292
  metrics.timeToSessionTerminalMs.push(Date.now() - sessionStart);
281
293
  }
@@ -296,7 +308,7 @@ async function runDocDrift(options) {
296
308
  metrics.prsOpened += 1;
297
309
  state.lastDocDriftPrUrl = sessionOutcome.prUrl;
298
310
  state.lastDocDriftPrOpenedAt = new Date().toISOString();
299
- if (githubToken && runInfo.trigger === "pull_request" && runInfo.prNumber) {
311
+ if (githubToken && runInfo.trigger === "pull_request" && runInfo.prNumber && !existingDocdriftPr) {
300
312
  await (0, client_1.postPrComment)({
301
313
  token: githubToken,
302
314
  repository: repo,
@@ -322,19 +334,18 @@ async function runDocDrift(options) {
322
334
  }
323
335
  }
324
336
  else if (githubToken &&
325
- (decision.action === "OPEN_ISSUE" ||
326
- sessionOutcome.outcome === "BLOCKED" ||
327
- sessionOutcome.outcome === "NO_CHANGE")) {
337
+ sessionOutcome.outcome === "BLOCKED" &&
338
+ sessionOutcome.summary.includes("DEVIN_API_KEY")) {
328
339
  issueUrl = await (0, client_1.createIssue)({
329
340
  token: githubToken,
330
341
  repository: repo,
331
342
  issue: {
332
- title: "[docdrift] docsite: docs drift requires input",
343
+ title: "[docdrift] Configuration required set DEVIN_API_KEY",
333
344
  body: (0, client_1.renderBlockedIssueBody)({
334
345
  docArea: item.docArea,
335
- evidenceSummary: item.summary,
346
+ evidenceSummary: sessionOutcome.summary,
336
347
  questions: sessionOutcome.questions ?? [
337
- "Please confirm intended behavior and doc wording.",
348
+ "Set DEVIN_API_KEY in GitHub Actions secrets or environment.",
338
349
  ],
339
350
  sessionUrl: sessionOutcome.sessionUrl,
340
351
  }),
@@ -342,10 +353,11 @@ async function runDocDrift(options) {
342
353
  },
343
354
  });
344
355
  metrics.issuesOpened += 1;
345
- if (sessionOutcome.outcome !== "PR_OPENED") {
346
- sessionOutcome.outcome = "ISSUE_OPENED";
347
- }
348
356
  }
357
+ // Note: We do NOT create "docs drift requires input" issues for Devin-reported BLOCKED
358
+ // (evidence questions) or for OPEN_ISSUE/NO_CHANGE. Issues are only created for:
359
+ // (1) requireHumanReview when a PR touches those paths, (2) 7-day SLA reminders,
360
+ // and (3) DEVIN_API_KEY missing. See docdrift-yml.md.
349
361
  if (sessionOutcome.outcome === "BLOCKED") {
350
362
  metrics.blockedCount += 1;
351
363
  }
@@ -534,3 +546,5 @@ async function resolveBaseHead(baseArg, headArg) {
534
546
  return resolveDefaultBaseHead(headRef);
535
547
  }
536
548
  exports.STATE_PATH = node_path_1.default.resolve(".docdrift", "state.json");
549
+ var setup_1 = require("./setup");
550
+ Object.defineProperty(exports, "runSetup", { enumerable: true, get: function () { return setup_1.runSetup; } });
@@ -0,0 +1,217 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.inferConfigFromFingerprint = inferConfigFromFingerprint;
7
+ const gateway_1 = require("@ai-sdk/gateway");
8
+ const ai_1 = require("ai");
9
+ const node_fs_1 = __importDefault(require("node:fs"));
10
+ const node_path_1 = __importDefault(require("node:path"));
11
+ const zod_1 = require("zod");
12
+ const repo_fingerprint_1 = require("./repo-fingerprint");
13
+ const prompts_1 = require("./prompts");
14
+ const pathRuleSchema = zod_1.z.object({
15
+ match: zod_1.z.string().min(1),
16
+ impacts: zod_1.z.array(zod_1.z.string().min(1)).min(1),
17
+ });
18
+ const specProviderSchema = zod_1.z.object({
19
+ format: zod_1.z.enum(["openapi3", "swagger2", "graphql", "fern", "postman"]),
20
+ current: zod_1.z.object({
21
+ type: zod_1.z.literal("export"),
22
+ command: zod_1.z.string().min(1),
23
+ outputPath: zod_1.z.string().min(1),
24
+ }),
25
+ published: zod_1.z.string().min(1),
26
+ });
27
+ const InferenceSchema = zod_1.z.object({
28
+ suggestedConfig: zod_1.z.object({
29
+ version: zod_1.z.literal(2).optional(),
30
+ specProviders: zod_1.z.array(specProviderSchema).optional(),
31
+ docsite: zod_1.z.union([zod_1.z.string().min(1), zod_1.z.array(zod_1.z.string().min(1))]).optional(),
32
+ exclude: zod_1.z.array(zod_1.z.string().min(1)).optional(),
33
+ requireHumanReview: zod_1.z.array(zod_1.z.string().min(1)).optional(),
34
+ pathMappings: zod_1.z.array(pathRuleSchema).optional(),
35
+ mode: zod_1.z.enum(["strict", "auto"]).optional(),
36
+ devin: zod_1.z
37
+ .object({
38
+ apiVersion: zod_1.z.literal("v1"),
39
+ unlisted: zod_1.z.boolean().optional(),
40
+ maxAcuLimit: zod_1.z.number().optional(),
41
+ tags: zod_1.z.array(zod_1.z.string()).optional(),
42
+ customInstructions: zod_1.z.array(zod_1.z.string()).optional(),
43
+ })
44
+ .optional(),
45
+ policy: zod_1.z
46
+ .object({
47
+ prCaps: zod_1.z.object({ maxPrsPerDay: zod_1.z.number(), maxFilesTouched: zod_1.z.number() }).optional(),
48
+ confidence: zod_1.z.object({ autopatchThreshold: zod_1.z.number() }).optional(),
49
+ allowlist: zod_1.z.array(zod_1.z.string().min(1)).optional(),
50
+ verification: zod_1.z.object({ commands: zod_1.z.array(zod_1.z.string().min(1)) }).optional(),
51
+ slaDays: zod_1.z.number().optional(),
52
+ slaLabel: zod_1.z.string().optional(),
53
+ allowNewFiles: zod_1.z.boolean().optional(),
54
+ })
55
+ .optional(),
56
+ }),
57
+ choices: zod_1.z.array(zod_1.z.object({
58
+ key: zod_1.z.string(),
59
+ question: zod_1.z.string(),
60
+ options: zod_1.z.array(zod_1.z.object({
61
+ value: zod_1.z.string(),
62
+ label: zod_1.z.string(),
63
+ recommended: zod_1.z.boolean().optional(),
64
+ })),
65
+ defaultIndex: zod_1.z.number(),
66
+ help: zod_1.z.string().optional(),
67
+ warning: zod_1.z.string().optional(),
68
+ confidence: zod_1.z.enum(["high", "medium", "low"]),
69
+ })),
70
+ skipQuestions: zod_1.z.array(zod_1.z.string()).optional(),
71
+ });
72
+ const CACHE_DIR = ".docdrift";
73
+ const CACHE_FILE = "setup-cache.json";
74
+ function getCachePath(cwd) {
75
+ return node_path_1.default.resolve(cwd, CACHE_DIR, CACHE_FILE);
76
+ }
77
+ function readCache(cwd) {
78
+ const cachePath = getCachePath(cwd);
79
+ if (!node_fs_1.default.existsSync(cachePath))
80
+ return null;
81
+ try {
82
+ const raw = JSON.parse(node_fs_1.default.readFileSync(cachePath, "utf8"));
83
+ const parsed = InferenceSchema.safeParse(raw.inference);
84
+ if (!parsed.success)
85
+ return null;
86
+ return {
87
+ fingerprintHash: String(raw.fingerprintHash),
88
+ inference: parsed.data,
89
+ timestamp: Number(raw.timestamp) || 0,
90
+ };
91
+ }
92
+ catch {
93
+ return null;
94
+ }
95
+ }
96
+ function writeCache(cwd, fingerprintHash, inference) {
97
+ const dir = node_path_1.default.resolve(cwd, CACHE_DIR);
98
+ node_fs_1.default.mkdirSync(dir, { recursive: true });
99
+ node_fs_1.default.writeFileSync(getCachePath(cwd), JSON.stringify({ fingerprintHash, inference, timestamp: Date.now() }, null, 2), "utf8");
100
+ }
101
+ function heuristicInference(fingerprint) {
102
+ const scripts = fingerprint.rootPackage.scripts || {};
103
+ const scriptNames = Object.keys(scripts);
104
+ const openapiScriptName = scriptNames.find((s) => s === "openapi:export" || s === "openapi:generate");
105
+ const openapiExport = openapiScriptName ? `npm run ${openapiScriptName}` : "npm run openapi:export";
106
+ const firstOpenapi = fingerprint.foundPaths.openapi[0];
107
+ const firstDocsite = fingerprint.foundPaths.docusaurusConfig[0]
108
+ ? node_path_1.default.dirname(fingerprint.foundPaths.docusaurusConfig[0]).replace(/\\/g, "/")
109
+ : fingerprint.foundPaths.docsDirs[0]
110
+ ? node_path_1.default.dirname(fingerprint.foundPaths.docsDirs[0]).replace(/\\/g, "/")
111
+ : "apps/docs-site";
112
+ const published = firstOpenapi && firstOpenapi.includes(firstDocsite)
113
+ ? firstOpenapi
114
+ : `${firstDocsite}/openapi/openapi.json`;
115
+ const generated = firstOpenapi && !firstOpenapi.includes(firstDocsite)
116
+ ? firstOpenapi
117
+ : "openapi/generated.json";
118
+ const verificationCommands = [];
119
+ if (scripts["docs:gen"])
120
+ verificationCommands.push("npm run docs:gen");
121
+ if (scripts["docs:build"])
122
+ verificationCommands.push("npm run docs:build");
123
+ if (verificationCommands.length === 0)
124
+ verificationCommands.push("npm run build");
125
+ const treeKeys = Object.keys(fingerprint.fileTree);
126
+ const hasAppsApi = treeKeys.some((k) => k === "apps/api" || k.startsWith("apps/api/"));
127
+ const matchGlob = hasAppsApi ? "apps/api/**" : "**/api/**";
128
+ const allowlist = treeKeys.some((k) => k === "apps" || k.startsWith("apps/"))
129
+ ? ["openapi/**", "apps/**"]
130
+ : ["openapi/**", `${firstDocsite}/**`];
131
+ const requireHumanReview = fingerprint.foundPaths.docsDirs.length > 0
132
+ ? [`${firstDocsite}/docs/guides/**`]
133
+ : [];
134
+ return {
135
+ suggestedConfig: {
136
+ version: 2,
137
+ specProviders: [
138
+ {
139
+ format: "openapi3",
140
+ current: { type: "export", command: openapiExport, outputPath: generated },
141
+ published,
142
+ },
143
+ ],
144
+ docsite: firstDocsite,
145
+ exclude: ["**/CHANGELOG*", "**/blog/**"],
146
+ requireHumanReview,
147
+ pathMappings: [{ match: matchGlob, impacts: [`${firstDocsite}/docs/**`, `${firstDocsite}/openapi/**`] }],
148
+ mode: "strict",
149
+ devin: { apiVersion: "v1", unlisted: true, maxAcuLimit: 2, tags: ["docdrift"] },
150
+ policy: {
151
+ prCaps: { maxPrsPerDay: 5, maxFilesTouched: 30 },
152
+ confidence: { autopatchThreshold: 0.8 },
153
+ allowlist,
154
+ verification: { commands: verificationCommands },
155
+ slaDays: 7,
156
+ slaLabel: "docdrift",
157
+ allowNewFiles: false,
158
+ },
159
+ },
160
+ choices: [
161
+ {
162
+ key: "specProviders.0.current.command",
163
+ question: "OpenAPI export command",
164
+ options: [{ value: openapiExport, label: openapiExport, recommended: true }],
165
+ defaultIndex: 0,
166
+ help: "Use the npm script that generates the spec (e.g. npm run openapi:export).",
167
+ confidence: "medium",
168
+ },
169
+ {
170
+ key: "docsite",
171
+ question: "Docsite path",
172
+ options: [{ value: firstDocsite, label: firstDocsite, recommended: true }],
173
+ defaultIndex: 0,
174
+ confidence: "medium",
175
+ },
176
+ ],
177
+ skipQuestions: [],
178
+ };
179
+ }
180
+ async function inferConfigFromFingerprint(fingerprint, cwd = process.cwd()) {
181
+ const apiKey = process.env.AI_GATEWAY_API_KEY?.trim();
182
+ const hash = (0, repo_fingerprint_1.fingerprintHash)(fingerprint);
183
+ const cached = readCache(cwd);
184
+ if (cached && cached.fingerprintHash === hash)
185
+ return cached.inference;
186
+ if (!apiKey)
187
+ return heuristicInference(fingerprint);
188
+ const gateway = (0, gateway_1.createGateway)({
189
+ apiKey,
190
+ baseURL: "https://ai-gateway.vercel.sh/v1/ai",
191
+ });
192
+ const prompt = `Repo fingerprint:\n${JSON.stringify(fingerprint, null, 2)}`;
193
+ try {
194
+ const result = await (0, ai_1.generateText)({
195
+ model: gateway("anthropic/claude-opus-4.6"),
196
+ system: prompts_1.SYSTEM_PROMPT,
197
+ prompt,
198
+ experimental_output: ai_1.Output.object({
199
+ schema: InferenceSchema,
200
+ }),
201
+ maxRetries: 2,
202
+ abortSignal: AbortSignal.timeout(60_000),
203
+ });
204
+ const output = result.experimental_output;
205
+ if (!output)
206
+ throw new Error("No structured output");
207
+ const parsed = InferenceSchema.safeParse(output);
208
+ if (!parsed.success)
209
+ throw new Error(parsed.error.message);
210
+ const inference = parsed.data;
211
+ writeCache(cwd, hash, inference);
212
+ return inference;
213
+ }
214
+ catch {
215
+ return heuristicInference(fingerprint);
216
+ }
217
+ }
@@ -0,0 +1,132 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.buildConfigFromInference = buildConfigFromInference;
7
+ exports.writeConfig = writeConfig;
8
+ exports.validateGeneratedConfig = validateGeneratedConfig;
9
+ const node_fs_1 = __importDefault(require("node:fs"));
10
+ const node_path_1 = __importDefault(require("node:path"));
11
+ const js_yaml_1 = __importDefault(require("js-yaml"));
12
+ const schema_1 = require("../config/schema");
13
+ function deepMerge(target, source) {
14
+ const out = { ...target };
15
+ for (const key of Object.keys(source)) {
16
+ const s = source[key];
17
+ const t = out[key];
18
+ if (s != null && typeof s === "object" && !Array.isArray(s) && t != null && typeof t === "object" && !Array.isArray(t)) {
19
+ out[key] = deepMerge(t, s);
20
+ }
21
+ else if (s !== undefined) {
22
+ out[key] = s;
23
+ }
24
+ }
25
+ return out;
26
+ }
27
+ function applyOverrides(base, overrides) {
28
+ for (const [key, value] of Object.entries(overrides)) {
29
+ setByKey(base, key, value);
30
+ }
31
+ }
32
+ function setByKey(obj, key, value) {
33
+ const parts = key.split(".");
34
+ let cur = obj;
35
+ let i = 0;
36
+ while (i < parts.length - 1) {
37
+ const p = parts[i];
38
+ const nextPart = parts[i + 1];
39
+ const isArrayIndex = nextPart !== undefined && /^\d+$/.test(nextPart);
40
+ const existing = cur[p];
41
+ if (existing != null && typeof existing === "object") {
42
+ if (Array.isArray(existing) && isArrayIndex) {
43
+ const idx = parseInt(nextPart, 10);
44
+ let el = existing[idx];
45
+ if (el == null || typeof el !== "object") {
46
+ el = {};
47
+ existing[idx] = el;
48
+ }
49
+ cur = el;
50
+ i += 2;
51
+ continue;
52
+ }
53
+ if (!Array.isArray(existing)) {
54
+ cur = existing;
55
+ }
56
+ else {
57
+ cur[p] = isArrayIndex ? [] : {};
58
+ cur = cur[p];
59
+ }
60
+ }
61
+ else {
62
+ const next = isArrayIndex ? [] : {};
63
+ cur[p] = next;
64
+ cur = next;
65
+ }
66
+ i++;
67
+ }
68
+ cur[parts[parts.length - 1]] = value;
69
+ }
70
+ const DEFAULT_CONFIG = {
71
+ version: 2,
72
+ specProviders: [
73
+ {
74
+ format: "openapi3",
75
+ current: {
76
+ type: "export",
77
+ command: "npm run openapi:export",
78
+ outputPath: "openapi/generated.json",
79
+ },
80
+ published: "apps/docs-site/openapi/openapi.json",
81
+ },
82
+ ],
83
+ docsite: "apps/docs-site",
84
+ exclude: [],
85
+ requireHumanReview: [],
86
+ pathMappings: [],
87
+ mode: "strict",
88
+ devin: {
89
+ apiVersion: "v1",
90
+ unlisted: true,
91
+ maxAcuLimit: 2,
92
+ tags: ["docdrift"],
93
+ },
94
+ policy: {
95
+ prCaps: { maxPrsPerDay: 5, maxFilesTouched: 30 },
96
+ confidence: { autopatchThreshold: 0.8 },
97
+ allowlist: ["openapi/**", "apps/**"],
98
+ verification: { commands: ["npm run docs:gen", "npm run docs:build"] },
99
+ slaDays: 7,
100
+ slaLabel: "docdrift",
101
+ allowNewFiles: false,
102
+ },
103
+ };
104
+ function buildConfigFromInference(inference, formResult) {
105
+ const base = deepMerge({ ...DEFAULT_CONFIG }, inference.suggestedConfig);
106
+ applyOverrides(base, formResult.configOverrides);
107
+ return base;
108
+ }
109
+ function writeConfig(config, outputPath) {
110
+ const dir = node_path_1.default.dirname(outputPath);
111
+ node_fs_1.default.mkdirSync(dir, { recursive: true });
112
+ const yamlContent = [
113
+ "# yaml-language-server: $schema=./docdrift.schema.json",
114
+ js_yaml_1.default.dump(config, { lineWidth: 120, noRefs: true }),
115
+ ].join("\n");
116
+ node_fs_1.default.writeFileSync(outputPath, yamlContent, "utf8");
117
+ }
118
+ function validateGeneratedConfig(configPath) {
119
+ try {
120
+ const content = node_fs_1.default.readFileSync(configPath, "utf8");
121
+ const parsed = js_yaml_1.default.load(content);
122
+ const result = schema_1.docDriftConfigSchema.safeParse(parsed);
123
+ if (!result.success) {
124
+ const errors = result.error.errors.map((e) => `${e.path.join(".") || "root"}: ${e.message}`);
125
+ return { ok: false, errors };
126
+ }
127
+ return { ok: true, errors: [] };
128
+ }
129
+ catch (err) {
130
+ return { ok: false, errors: [err instanceof Error ? err.message : String(err)] };
131
+ }
132
+ }
@@ -0,0 +1,109 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ var __importDefault = (this && this.__importDefault) || function (mod) {
36
+ return (mod && mod.__esModule) ? mod : { "default": mod };
37
+ };
38
+ Object.defineProperty(exports, "__esModule", { value: true });
39
+ exports.runSetup = runSetup;
40
+ const node_path_1 = __importDefault(require("node:path"));
41
+ const repo_fingerprint_1 = require("./repo-fingerprint");
42
+ const ai_infer_1 = require("./ai-infer");
43
+ const interactive_form_1 = require("./interactive-form");
44
+ const generate_yaml_1 = require("./generate-yaml");
45
+ const onboard_1 = require("./onboard");
46
+ const index_1 = require("../index");
47
+ async function runSetup(options = {}) {
48
+ const cwd = options.cwd ?? process.cwd();
49
+ const outputPath = node_path_1.default.resolve(cwd, options.outputPath ?? "docdrift.yaml");
50
+ const configExists = await Promise.resolve().then(() => __importStar(require("node:fs"))).then((fs) => fs.existsSync(outputPath));
51
+ if (configExists && !options.force) {
52
+ const { confirm } = await Promise.resolve().then(() => __importStar(require("@inquirer/prompts")));
53
+ const overwrite = await confirm({
54
+ message: "Config already exists. Overwrite?",
55
+ default: false,
56
+ });
57
+ if (!overwrite) {
58
+ console.log("Setup cancelled.");
59
+ return;
60
+ }
61
+ }
62
+ process.stdout.write("Analyzing your repo…\n");
63
+ const fingerprint = (0, repo_fingerprint_1.buildRepoFingerprint)(cwd);
64
+ process.stdout.write("Generating suggestions…\n");
65
+ const inference = await (0, ai_infer_1.inferConfigFromFingerprint)(fingerprint, cwd);
66
+ const formResult = await (0, interactive_form_1.runInteractiveForm)(inference, cwd);
67
+ let config = (0, generate_yaml_1.buildConfigFromInference)(inference, formResult);
68
+ if (formResult.onboarding.addCustomInstructions) {
69
+ const devin = config.devin ?? {};
70
+ config.devin = {
71
+ ...devin,
72
+ customInstructions: [".docdrift/DocDrift.md"],
73
+ };
74
+ }
75
+ (0, generate_yaml_1.writeConfig)(config, outputPath);
76
+ const { created } = (0, onboard_1.runOnboarding)(cwd, formResult.onboarding);
77
+ const validation = (0, generate_yaml_1.validateGeneratedConfig)(outputPath);
78
+ if (!validation.ok) {
79
+ console.error("Config validation failed:\n" + validation.errors.join("\n"));
80
+ throw new Error("Generated config is invalid. Fix the errors above or edit docdrift.yaml manually.");
81
+ }
82
+ if (outputPath === node_path_1.default.resolve(cwd, "docdrift.yaml")) {
83
+ try {
84
+ await (0, index_1.runValidate)();
85
+ }
86
+ catch (err) {
87
+ console.error(err instanceof Error ? err.message : String(err));
88
+ throw err;
89
+ }
90
+ }
91
+ console.log("\ndocdrift setup complete\n");
92
+ console.log(" docdrift.yaml written and validated");
93
+ for (const item of created) {
94
+ if (item === ".docdrift/")
95
+ console.log(" .docdrift/ created");
96
+ else if (item === "DocDrift.md")
97
+ console.log(" DocDrift.md created (edit for custom instructions)");
98
+ else if (item === ".gitignore")
99
+ console.log(" .gitignore updated");
100
+ else if (item.endsWith("docdrift.yml"))
101
+ console.log(" " + item + " added");
102
+ }
103
+ console.log("\nNext steps:");
104
+ console.log(" 1. Set DEVIN_API_KEY (local: .env or export; CI: repo secrets)");
105
+ console.log(" 2. Set GITHUB_TOKEN in repo secrets for PR comments and issues");
106
+ console.log(" 3. Run: docdrift validate — verify config");
107
+ console.log(" 4. Run: docdrift detect — check for drift");
108
+ console.log(" 5. Run: docdrift run — create Devin session (requires DEVIN_API_KEY)");
109
+ }
@@ -0,0 +1,49 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.runInteractiveForm = runInteractiveForm;
4
+ const prompts_1 = require("@inquirer/prompts");
5
+ async function runInteractiveForm(inference, _cwd = process.cwd()) {
6
+ const configOverrides = {};
7
+ const skip = new Set(inference.skipQuestions ?? []);
8
+ for (const choice of inference.choices) {
9
+ if (skip.has(choice.key))
10
+ continue;
11
+ const options = choice.options;
12
+ if (options.length === 0)
13
+ continue;
14
+ const defaultOption = options[choice.defaultIndex] ?? options[0];
15
+ const choices = options.map((o, i) => ({
16
+ name: o.recommended ? `${o.label} (recommended)` : o.label,
17
+ value: o.value,
18
+ }));
19
+ const answer = await (0, prompts_1.select)({
20
+ message: choice.question,
21
+ choices,
22
+ default: defaultOption?.value,
23
+ });
24
+ configOverrides[choice.key] = answer;
25
+ }
26
+ const addCustomInstructions = await (0, prompts_1.confirm)({
27
+ message: "Add a custom instructions file for Devin? (PR titles, tone, project-specific guidance)",
28
+ default: true,
29
+ });
30
+ const addGitignore = await (0, prompts_1.confirm)({
31
+ message: "Add .docdrift artifact entries to .gitignore?",
32
+ default: true,
33
+ });
34
+ const addWorkflow = await (0, prompts_1.confirm)({
35
+ message: "Add GitHub Actions workflow for docdrift? (runs on push/PR to main)",
36
+ default: false,
37
+ });
38
+ const confirmed = await (0, prompts_1.confirm)({
39
+ message: "Write docdrift.yaml and complete setup? (will run validate)",
40
+ default: true,
41
+ });
42
+ if (!confirmed) {
43
+ throw new Error("Setup cancelled");
44
+ }
45
+ return {
46
+ configOverrides,
47
+ onboarding: { addCustomInstructions, addGitignore, addWorkflow },
48
+ };
49
+ }
@@ -0,0 +1,80 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.ensureDocdriftDir = ensureDocdriftDir;
7
+ exports.createCustomInstructionsFile = createCustomInstructionsFile;
8
+ exports.ensureGitignore = ensureGitignore;
9
+ exports.addGitHubWorkflow = addGitHubWorkflow;
10
+ exports.runOnboarding = runOnboarding;
11
+ const node_fs_1 = __importDefault(require("node:fs"));
12
+ const node_path_1 = __importDefault(require("node:path"));
13
+ const DOCDRIFT_DIR = ".docdrift";
14
+ const CUSTOM_INSTRUCTIONS_FILE = ".docdrift/DocDrift.md";
15
+ const GITIGNORE_BLOCK = `
16
+ # Docdrift run artifacts
17
+ .docdrift/evidence
18
+ .docdrift/*.log
19
+ .docdrift/state.json
20
+ .docdrift/run-output.json
21
+ `;
22
+ const WORKFLOW_CONTENT = "name: docdrift\n\non:\n push:\n branches: [\"main\"]\n pull_request:\n branches: [\"main\"]\n workflow_dispatch:\n\njobs:\n docdrift:\n runs-on: ubuntu-latest\n permissions:\n contents: write\n pull-requests: write\n issues: write\n steps:\n - uses: actions/checkout@v4\n with:\n fetch-depth: 0\n\n - uses: actions/setup-node@v4\n with:\n node-version: \"20\"\n\n - run: npm install\n\n - name: Determine SHAs\n id: shas\n run: |\n if [ \"${{ github.event_name }}\" = \"pull_request\" ]; then\n HEAD_SHA=\"${{ github.event.pull_request.head.sha }}\"\n BASE_SHA=\"${{ github.event.pull_request.base.sha }}\"\n else\n HEAD_SHA=\"${{ github.sha }}\"\n BASE_SHA=\"${{ github.event.before }}\"\n if [ -z \"$BASE_SHA\" ] || [ \"$BASE_SHA\" = \"0000000000000000000000000000000000000000\" ]; then\n BASE_SHA=\"$(git rev-parse HEAD^)\"\n fi\n fi\n echo \"head=${HEAD_SHA}\" >> $GITHUB_OUTPUT\n echo \"base=${BASE_SHA}\" >> $GITHUB_OUTPUT\n echo \"pr_number=${{ github.event.pull_request.number || '' }}\" >> $GITHUB_OUTPUT\n\n - name: Validate config\n run: npx docdrift validate\n\n - name: Run Doc Drift\n env:\n DEVIN_API_KEY: ${{ secrets.DEVIN_API_KEY }}\n GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n GITHUB_REPOSITORY: ${{ github.repository }}\n GITHUB_SHA: ${{ github.sha }}\n GITHUB_EVENT_NAME: ${{ github.event_name }}\n GITHUB_PR_NUMBER: ${{ steps.shas.outputs.pr_number }}\n run: |\n PR_ARGS=\"\"\n if [ -n \"$GITHUB_PR_NUMBER\" ]; then\n PR_ARGS=\"--trigger pull_request --pr-number $GITHUB_PR_NUMBER\"\n fi\n npx docdrift run --base ${{ steps.shas.outputs.base }} --head ${{ steps.shas.outputs.head }} $PR_ARGS\n\n - name: Upload artifacts\n if: always()\n uses: actions/upload-artifact@v4\n with:\n name: docdrift-artifacts\n path: |\n .docdrift/drift_report.json\n .docdrift/metrics.json\n .docdrift/run-output.json\n .docdrift/evidence/**\n .docdrift/state.json\n";
23
+ function ensureDocdriftDir(cwd) {
24
+ const dir = node_path_1.default.resolve(cwd, DOCDRIFT_DIR);
25
+ node_fs_1.default.mkdirSync(dir, { recursive: true });
26
+ }
27
+ const CUSTOM_INSTRUCTIONS_TEMPLATE = `# DocDrift custom instructions
28
+
29
+ - **PR titles:** Start every pull request title with \`[docdrift]\`.
30
+ - Add project-specific guidance for Devin here (e.g. terminology, tone, what to avoid).
31
+ `;
32
+ function createCustomInstructionsFile(cwd) {
33
+ const filePath = node_path_1.default.resolve(cwd, CUSTOM_INSTRUCTIONS_FILE);
34
+ ensureDocdriftDir(cwd);
35
+ node_fs_1.default.writeFileSync(filePath, CUSTOM_INSTRUCTIONS_TEMPLATE.trimStart(), "utf8");
36
+ }
37
+ const GITIGNORE_ENTRIES = [
38
+ ".docdrift/evidence",
39
+ ".docdrift/*.log",
40
+ ".docdrift/state.json",
41
+ ".docdrift/run-output.json",
42
+ ];
43
+ function hasGitignoreBlock(content) {
44
+ return GITIGNORE_ENTRIES.every((e) => content.includes(e));
45
+ }
46
+ function ensureGitignore(cwd) {
47
+ const gitignorePath = node_path_1.default.resolve(cwd, ".gitignore");
48
+ let content = "";
49
+ if (node_fs_1.default.existsSync(gitignorePath)) {
50
+ content = node_fs_1.default.readFileSync(gitignorePath, "utf8");
51
+ if (hasGitignoreBlock(content))
52
+ return;
53
+ }
54
+ const toAppend = content.endsWith("\n") ? GITIGNORE_BLOCK.trimStart() : GITIGNORE_BLOCK;
55
+ node_fs_1.default.writeFileSync(gitignorePath, content + toAppend, "utf8");
56
+ }
57
+ function addGitHubWorkflow(cwd) {
58
+ const workflowsDir = node_path_1.default.resolve(cwd, ".github", "workflows");
59
+ node_fs_1.default.mkdirSync(workflowsDir, { recursive: true });
60
+ const workflowPath = node_path_1.default.join(workflowsDir, "docdrift.yml");
61
+ node_fs_1.default.writeFileSync(workflowPath, WORKFLOW_CONTENT, "utf8");
62
+ }
63
+ function runOnboarding(cwd, choices) {
64
+ const created = [];
65
+ ensureDocdriftDir(cwd);
66
+ created.push(".docdrift/");
67
+ if (choices.addCustomInstructions) {
68
+ createCustomInstructionsFile(cwd);
69
+ created.push("DocDrift.md");
70
+ }
71
+ if (choices.addGitignore) {
72
+ ensureGitignore(cwd);
73
+ created.push(".gitignore");
74
+ }
75
+ if (choices.addWorkflow) {
76
+ addGitHubWorkflow(cwd);
77
+ created.push(".github/workflows/docdrift.yml");
78
+ }
79
+ return { created };
80
+ }
@@ -0,0 +1,129 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.SYSTEM_PROMPT = void 0;
4
+ exports.SYSTEM_PROMPT = `You are a docdrift config expert. Given a repo fingerprint (file tree, package.json scripts, and detected paths), infer a partial docdrift.yaml configuration and a list of interactive choices for the user.
5
+
6
+ ## Docdrift config (v2)
7
+
8
+ Minimal valid config uses: version: 2, specProviders (or pathMappings only for path-only setups), docsite, devin, policy.
9
+
10
+ Example:
11
+ \`\`\`yaml
12
+ version: 2
13
+ specProviders:
14
+ - format: openapi3
15
+ current:
16
+ type: export
17
+ command: "npm run openapi:export"
18
+ outputPath: "openapi/generated.json"
19
+ published: "apps/docs-site/openapi/openapi.json"
20
+ docsite: "apps/docs-site"
21
+ pathMappings:
22
+ - match: "apps/api/**"
23
+ impacts: ["apps/docs-site/docs/**", "apps/docs-site/openapi/**"]
24
+ exclude: ["**/CHANGELOG*", "apps/docs-site/blog/**"]
25
+ requireHumanReview: []
26
+ mode: strict
27
+ devin:
28
+ apiVersion: v1
29
+ unlisted: true
30
+ maxAcuLimit: 2
31
+ tags: ["docdrift"]
32
+ policy:
33
+ prCaps: { maxPrsPerDay: 5, maxFilesTouched: 30 }
34
+ confidence: { autopatchThreshold: 0.8 }
35
+ allowlist: ["openapi/**", "apps/**"]
36
+ verification:
37
+ commands: ["npm run docs:gen", "npm run docs:build"]
38
+ slaDays: 7
39
+ slaLabel: docdrift
40
+ allowNewFiles: false
41
+ \`\`\`
42
+
43
+ ## Field rules
44
+
45
+ - version: Always use 2.
46
+ - specProviders: Array of spec sources. For OpenAPI: format "openapi3", current.type "export", current.command = npm script (e.g. "npm run openapi:export"), current.outputPath = where export writes (e.g. "openapi/generated.json"), published = docsite path (e.g. "apps/docs-site/openapi/openapi.json"). Never use raw script body; use "npm run <scriptName>".
47
+ - docsite: Path to the docs site root (Docusaurus, Next.js docs, VitePress, MkDocs). Single string or array of strings.
48
+ - pathMappings: Array of { match, impacts }. match = glob for source/API code; impacts = globs for doc files that may need updates when match changes.
49
+ - mode: "strict" (only run on spec drift) or "auto" (also run when pathMappings match without spec drift). Default: strict.
50
+ - policy.verification.commands: Commands to run after patching (e.g. "npm run docs:gen", "npm run docs:build"). Must exist in repo.
51
+ - exclude: Globs to never touch (e.g. blog, CHANGELOG).
52
+ - requireHumanReview: Globs that require human review when touched (e.g. guides).
53
+
54
+ ## Path-only config (no OpenAPI)
55
+
56
+ If no OpenAPI/spec found, use version: 2 with pathMappings only (no specProviders):
57
+ \`\`\`yaml
58
+ version: 2
59
+ docsite: "apps/docs-site"
60
+ pathMappings: [...]
61
+ mode: auto
62
+ \`\`\`
63
+
64
+ ## Common patterns
65
+
66
+ - Docusaurus: docsite often has docusaurus.config.*; docs:gen may be "docusaurus -- gen-api-docs api"; published path often under docsite/openapi/.
67
+ - Next/VitePress/MkDocs: docsite is the app root; look for docs/ or similar.
68
+
69
+ ## Output rules
70
+
71
+ 1. Infer suggestedConfig from the fingerprint. Use version: 2. Only include fields you can confidently infer. Use existing paths and scripts from the fingerprint; do not invent paths that are not present.
72
+ 2. For each field where confidence is medium or low, OR where multiple valid options exist, add an entry to choices with: key (e.g. "specProviders.0.current.command"), question, options (array of { value, label, recommended? }), defaultIndex, help?, warning?, confidence ("high"|"medium"|"low").
73
+ 3. Add to skipQuestions the keys for which you are highly confident so the CLI will not ask the user.
74
+ 4. Prefer fewer, high-quality choices. If truly uncertain, set confidence to "low" and provide 2–3 options.
75
+ 5. Do not suggest paths that do not exist in the fingerprint. Prefer existing package.json scripts for export and verification commands.
76
+ 6. suggestedConfig must be a valid partial docdrift config; policy.allowlist and policy.verification.commands are required if you include policy. devin.apiVersion must be "v1" if you include devin.
77
+
78
+ ## Example docdrift.yaml
79
+ # yaml-language-server: $schema=./docdrift.schema.json
80
+ version: 2
81
+
82
+ specProviders:
83
+ - format: openapi3
84
+ current:
85
+ type: export
86
+ command: "npm run openapi:export"
87
+ outputPath: "openapi/generated.json"
88
+ published: "apps/docs-site/openapi/openapi.json"
89
+
90
+ docsite: "apps/docs-site"
91
+ mode: strict
92
+
93
+ pathMappings:
94
+ - match: "apps/api/**"
95
+ impacts: ["apps/docs-site/docs/**", "apps/docs-site/openapi/**"]
96
+
97
+ exclude:
98
+ - "apps/docs-site/blog/**"
99
+ - "**/CHANGELOG*"
100
+
101
+ requireHumanReview:
102
+ - "apps/docs-site/docs/guides/**"
103
+
104
+ devin:
105
+ apiVersion: v1
106
+ unlisted: true
107
+ maxAcuLimit: 2
108
+ tags:
109
+ - docdrift
110
+ customInstructions:
111
+ - "DocDrift.md"
112
+
113
+ policy:
114
+ prCaps:
115
+ maxPrsPerDay: 5
116
+ maxFilesTouched: 30
117
+ confidence:
118
+ autopatchThreshold: 0.8
119
+ allowlist:
120
+ - "openapi/**"
121
+ - "apps/**"
122
+ verification:
123
+ commands:
124
+ - "npm run docs:gen"
125
+ - "npm run docs:build"
126
+ slaDays: 7
127
+ slaLabel: docdrift
128
+ allowNewFiles: false
129
+ `;
@@ -0,0 +1,155 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.buildRepoFingerprint = buildRepoFingerprint;
7
+ exports.fingerprintHash = fingerprintHash;
8
+ const node_crypto_1 = __importDefault(require("node:crypto"));
9
+ const node_fs_1 = __importDefault(require("node:fs"));
10
+ const node_path_1 = __importDefault(require("node:path"));
11
+ const IGNORE_DIRS = new Set(["node_modules", ".git", "dist", "build", "coverage", ".docdrift"]);
12
+ const DOC_HINTS = ["openapi", "swagger", "docusaurus", "mkdocs", "next", "vitepress"];
13
+ const MAX_TREE_DEPTH = 3;
14
+ function walkDir(dir, depth, tree) {
15
+ if (depth > MAX_TREE_DEPTH)
16
+ return;
17
+ let entries;
18
+ try {
19
+ entries = node_fs_1.default.readdirSync(dir, { withFileTypes: true });
20
+ }
21
+ catch {
22
+ return;
23
+ }
24
+ const relDir = node_path_1.default.relative(process.cwd(), dir) || ".";
25
+ const names = [];
26
+ for (const e of entries) {
27
+ if (e.name.startsWith(".") && e.name !== ".env")
28
+ continue;
29
+ if (IGNORE_DIRS.has(e.name))
30
+ continue;
31
+ names.push(e.isDirectory() ? `${e.name}/` : e.name);
32
+ }
33
+ names.sort();
34
+ tree[relDir] = names;
35
+ for (const e of entries) {
36
+ if (!e.isDirectory() || IGNORE_DIRS.has(e.name))
37
+ continue;
38
+ walkDir(node_path_1.default.join(dir, e.name), depth + 1, tree);
39
+ }
40
+ }
41
+ function findMatchingFiles(cwd, test) {
42
+ const out = [];
43
+ function walk(dir, depth) {
44
+ if (depth > 5)
45
+ return;
46
+ let entries;
47
+ try {
48
+ entries = node_fs_1.default.readdirSync(dir, { withFileTypes: true });
49
+ }
50
+ catch {
51
+ return;
52
+ }
53
+ for (const e of entries) {
54
+ if (e.name.startsWith(".") && e.name !== ".env")
55
+ continue;
56
+ if (IGNORE_DIRS.has(e.name))
57
+ continue;
58
+ const full = node_path_1.default.join(dir, e.name);
59
+ const rel = node_path_1.default.relative(cwd, full);
60
+ if (e.isFile() && test(rel, e.name))
61
+ out.push(rel);
62
+ else if (e.isDirectory())
63
+ walk(full, depth + 1);
64
+ }
65
+ }
66
+ walk(cwd, 0);
67
+ return out;
68
+ }
69
+ function findDirsNamed(cwd, name) {
70
+ const out = [];
71
+ function scan(dir, depth) {
72
+ if (depth > 2)
73
+ return;
74
+ let entries;
75
+ try {
76
+ entries = node_fs_1.default.readdirSync(dir, { withFileTypes: true });
77
+ }
78
+ catch {
79
+ return;
80
+ }
81
+ for (const e of entries) {
82
+ if (e.name.startsWith(".") || IGNORE_DIRS.has(e.name))
83
+ continue;
84
+ const full = node_path_1.default.join(dir, e.name);
85
+ const rel = node_path_1.default.relative(cwd, full);
86
+ if (e.isDirectory()) {
87
+ if (e.name === name)
88
+ out.push(rel);
89
+ scan(full, depth + 1);
90
+ }
91
+ }
92
+ }
93
+ scan(cwd, 0);
94
+ return out;
95
+ }
96
+ function buildRepoFingerprint(cwd = process.cwd()) {
97
+ const fileTree = {};
98
+ walkDir(cwd, 0, fileTree);
99
+ let rootPackage = { scripts: {}, dependencies: [], workspaces: [] };
100
+ const pkgPath = node_path_1.default.join(cwd, "package.json");
101
+ if (node_fs_1.default.existsSync(pkgPath)) {
102
+ try {
103
+ const pkg = JSON.parse(node_fs_1.default.readFileSync(pkgPath, "utf8"));
104
+ rootPackage.scripts = pkg.scripts || {};
105
+ const deps = { ...pkg.dependencies, ...pkg.devDependencies };
106
+ rootPackage.dependencies = Object.keys(deps || {}).filter((k) => DOC_HINTS.some((h) => k.toLowerCase().includes(h)));
107
+ if (pkg.workspaces) {
108
+ rootPackage.workspaces = Array.isArray(pkg.workspaces) ? pkg.workspaces : [pkg.workspaces];
109
+ }
110
+ }
111
+ catch {
112
+ // ignore
113
+ }
114
+ }
115
+ const workspacePackages = [];
116
+ if (rootPackage.workspaces?.length) {
117
+ for (const w of rootPackage.workspaces) {
118
+ const base = w.replace("/*", "").replace("*", "");
119
+ const dir = node_path_1.default.join(cwd, base);
120
+ if (!node_fs_1.default.existsSync(dir) || !node_fs_1.default.statSync(dir).isDirectory())
121
+ continue;
122
+ const subdirs = base.includes("*") ? node_fs_1.default.readdirSync(dir, { withFileTypes: true }).filter((e) => e.isDirectory()).map((e) => node_path_1.default.join(dir, e.name)) : [dir];
123
+ for (const sub of subdirs) {
124
+ const pj = node_path_1.default.join(sub, "package.json");
125
+ if (!node_fs_1.default.existsSync(pj))
126
+ continue;
127
+ try {
128
+ const pkg = JSON.parse(node_fs_1.default.readFileSync(pj, "utf8"));
129
+ workspacePackages.push({
130
+ path: node_path_1.default.relative(cwd, sub),
131
+ scripts: pkg.scripts || {},
132
+ });
133
+ }
134
+ catch {
135
+ // ignore
136
+ }
137
+ }
138
+ }
139
+ }
140
+ const openapi = findMatchingFiles(cwd, (_, name) => /^openapi.*\.json$/i.test(name));
141
+ const swagger = findMatchingFiles(cwd, (_, name) => /^swagger.*\.json$/i.test(name));
142
+ const docusaurusConfig = findMatchingFiles(cwd, (_, name) => name.startsWith("docusaurus.config."));
143
+ const mkdocs = findMatchingFiles(cwd, (_, name) => name === "mkdocs.yml");
144
+ const docsDirs = findDirsNamed(cwd, "docs");
145
+ return {
146
+ fileTree,
147
+ rootPackage,
148
+ workspacePackages,
149
+ foundPaths: { openapi, swagger, docusaurusConfig, mkdocs, docsDirs },
150
+ };
151
+ }
152
+ function fingerprintHash(fingerprint) {
153
+ const canonical = JSON.stringify(fingerprint, Object.keys(fingerprint).sort());
154
+ return node_crypto_1.default.createHash("sha256").update(canonical).digest("hex");
155
+ }
@@ -183,13 +183,13 @@
183
183
  },
184
184
  "default": []
185
185
  },
186
- "allowConceptualOnlyRun": {
187
- "type": "boolean",
188
- "default": false
189
- },
190
- "inferMode": {
191
- "type": "boolean",
192
- "default": true
186
+ "mode": {
187
+ "type": "string",
188
+ "enum": [
189
+ "strict",
190
+ "auto"
191
+ ],
192
+ "default": "strict"
193
193
  },
194
194
  "devin": {
195
195
  "type": "object",
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@devinnn/docdrift",
3
- "version": "0.1.3",
3
+ "version": "0.1.6",
4
4
  "private": false,
5
5
  "description": "Detect and remediate documentation drift with Devin sessions",
6
6
  "main": "dist/src/index.js",
@@ -42,7 +42,11 @@
42
42
  "prepublishOnly": "npm run build"
43
43
  },
44
44
  "dependencies": {
45
+ "@ai-sdk/gateway": "^1.0.0",
46
+ "@inquirer/prompts": "^7.2.0",
45
47
  "@octokit/rest": "^21.1.1",
48
+ "ai": "^4.0.0",
49
+ "dotenv": "^16.4.5",
46
50
  "fastify": "^5.2.1",
47
51
  "js-yaml": "^4.1.0",
48
52
  "zod": "^3.24.1"
@@ -57,4 +61,4 @@
57
61
  "vitest": "^3.0.5",
58
62
  "zod-to-json-schema": "^3.25.1"
59
63
  }
60
- }
64
+ }