@devinnn/docdrift 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -13
- package/dist/src/cli.js +5 -1
- package/dist/src/config/load.js +7 -0
- package/dist/src/config/normalize.js +68 -0
- package/dist/src/config/schema.js +45 -17
- package/dist/src/config/validate.js +3 -4
- package/dist/src/detect/index.js +54 -45
- package/dist/src/detect/openapi.js +83 -0
- package/dist/src/devin/prompts.js +43 -0
- package/dist/src/github/client.js +75 -0
- package/dist/src/index.js +239 -150
- package/dist/src/policy/engine.js +2 -1
- package/dist/src/utils/glob.js +13 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -9,6 +9,7 @@ Docs that never lie: detect drift between merged code and docs, then open low-no
|
|
|
9
9
|
- `detect --base <sha> --head <sha>`
|
|
10
10
|
- `run --base <sha> --head <sha>`
|
|
11
11
|
- `status --since 24h`
|
|
12
|
+
- `sla-check` — Check for doc-drift PRs open 7+ days and open a reminder issue
|
|
12
13
|
- GitHub Action: `/Users/cameronking/Desktop/sideproject/docdrift/.github/workflows/devin-doc-drift.yml`
|
|
13
14
|
- Repo-local config: `/Users/cameronking/Desktop/sideproject/docdrift/docdrift.yaml`
|
|
14
15
|
- Demo API + OpenAPI exporter + driftable docs
|
|
@@ -16,17 +17,18 @@ Docs that never lie: detect drift between merged code and docs, then open low-no
|
|
|
16
17
|
|
|
17
18
|
## Why this is low-noise
|
|
18
19
|
|
|
19
|
-
-
|
|
20
|
-
-
|
|
21
|
-
-
|
|
22
|
-
-
|
|
20
|
+
- **Single session, single PR** — One Devin session handles the whole docsite (API reference + guides).
|
|
21
|
+
- **Gate on API spec diff** — We only run when OpenAPI drift is detected; no session for docs-check-only failures.
|
|
22
|
+
- **requireHumanReview** — When the PR touches guides/prose, we open an issue after the PR to direct attention.
|
|
23
|
+
- **7-day SLA** — If a doc-drift PR is open 7+ days, we open a reminder issue (configurable `slaDays`; use `sla-check` CLI or cron workflow).
|
|
24
|
+
- Confidence gating and allowlist/exclude enforcement.
|
|
23
25
|
- Idempotency key prevents duplicate actions for same repo/SHAs/action.
|
|
24
26
|
|
|
25
|
-
## Detection
|
|
27
|
+
## Detection and gate
|
|
26
28
|
|
|
27
|
-
-
|
|
28
|
-
- Tier 1: OpenAPI drift (`openapi/generated.json` vs
|
|
29
|
-
- Tier 2:
|
|
29
|
+
- **Gate:** We only run a Devin session when **OpenAPI drift** is detected. No drift → no session.
|
|
30
|
+
- Tier 1: OpenAPI drift (`openapi/generated.json` vs published spec)
|
|
31
|
+
- Tier 2: Heuristic path impacts from docAreas (e.g. `apps/api/src/auth/**` → guides)
|
|
30
32
|
|
|
31
33
|
Output artifacts (under `.docdrift/`):
|
|
32
34
|
|
|
@@ -38,13 +40,13 @@ When you run docdrift as a package (e.g. `npx docdrift` or from another repo), a
|
|
|
38
40
|
## Core flow (`docdrift run`)
|
|
39
41
|
|
|
40
42
|
1. Validate config and command availability.
|
|
41
|
-
2. Build drift report.
|
|
43
|
+
2. Build drift report. **Gate:** If no OpenAPI drift, exit (no session).
|
|
42
44
|
3. Policy decision (`OPEN_PR | UPDATE_EXISTING_PR | OPEN_ISSUE | NOOP`).
|
|
43
|
-
4. Build evidence bundle
|
|
44
|
-
5.
|
|
45
|
-
6.
|
|
45
|
+
4. Build one aggregated evidence bundle for the whole docsite.
|
|
46
|
+
5. One Devin session with whole-docsite prompt; poll to terminal status.
|
|
47
|
+
6. If PR opened and touches `requireHumanReview` paths → create issue to direct attention.
|
|
46
48
|
7. Surface result via GitHub commit comment; open issue on blocked/low-confidence paths.
|
|
47
|
-
8. Persist state
|
|
49
|
+
8. Persist state (including `lastDocDriftPrUrl` for SLA); write `.docdrift/metrics.json`.
|
|
48
50
|
|
|
49
51
|
## Where the docs are (this repo)
|
|
50
52
|
|
package/dist/src/cli.js
CHANGED
|
@@ -17,7 +17,7 @@ function getArg(args, flag) {
|
|
|
17
17
|
async function main() {
|
|
18
18
|
const [, , command, ...args] = process.argv;
|
|
19
19
|
if (!command) {
|
|
20
|
-
throw new Error("Usage: docdrift <validate|detect|run|status> [options]");
|
|
20
|
+
throw new Error("Usage: docdrift <validate|detect|run|status|sla-check> [options]");
|
|
21
21
|
}
|
|
22
22
|
switch (command) {
|
|
23
23
|
case "validate": {
|
|
@@ -49,6 +49,10 @@ async function main() {
|
|
|
49
49
|
await (0, index_1.runStatus)(sinceHours);
|
|
50
50
|
return;
|
|
51
51
|
}
|
|
52
|
+
case "sla-check": {
|
|
53
|
+
await (0, index_1.runSlaCheck)();
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
52
56
|
default:
|
|
53
57
|
throw new Error(`Unknown command: ${command}`);
|
|
54
58
|
}
|
package/dist/src/config/load.js
CHANGED
|
@@ -4,9 +4,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.loadConfig = loadConfig;
|
|
7
|
+
exports.loadNormalizedConfig = loadNormalizedConfig;
|
|
7
8
|
const node_fs_1 = __importDefault(require("node:fs"));
|
|
8
9
|
const node_path_1 = __importDefault(require("node:path"));
|
|
9
10
|
const js_yaml_1 = __importDefault(require("js-yaml"));
|
|
11
|
+
const normalize_1 = require("./normalize");
|
|
10
12
|
const schema_1 = require("./schema");
|
|
11
13
|
function loadConfig(configPath = "docdrift.yaml") {
|
|
12
14
|
const resolved = node_path_1.default.resolve(configPath);
|
|
@@ -36,3 +38,8 @@ function loadConfig(configPath = "docdrift.yaml") {
|
|
|
36
38
|
}
|
|
37
39
|
return data;
|
|
38
40
|
}
|
|
41
|
+
/** Load and normalize config for use by detection/run (always has openapi, docsite, etc.) */
|
|
42
|
+
function loadNormalizedConfig(configPath = "docdrift.yaml") {
|
|
43
|
+
const config = loadConfig(configPath);
|
|
44
|
+
return (0, normalize_1.normalizeConfig)(config);
|
|
45
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.normalizeConfig = normalizeConfig;
|
|
7
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
8
|
+
/**
|
|
9
|
+
* Produce a normalized config that the rest of the app consumes.
|
|
10
|
+
* Derives openapi/docsite/exclude/requireHumanReview from docAreas when using legacy config.
|
|
11
|
+
*/
|
|
12
|
+
function normalizeConfig(config) {
|
|
13
|
+
let openapi;
|
|
14
|
+
let docsite;
|
|
15
|
+
let exclude = config.exclude ?? [];
|
|
16
|
+
let requireHumanReview = config.requireHumanReview ?? [];
|
|
17
|
+
if (config.openapi && config.docsite) {
|
|
18
|
+
// Simple config
|
|
19
|
+
openapi = config.openapi;
|
|
20
|
+
docsite = Array.isArray(config.docsite) ? config.docsite : [config.docsite];
|
|
21
|
+
}
|
|
22
|
+
else if (config.docAreas && config.docAreas.length > 0) {
|
|
23
|
+
// Legacy: derive from docAreas
|
|
24
|
+
const firstOpenApiArea = config.docAreas.find((a) => a.detect.openapi);
|
|
25
|
+
if (!firstOpenApiArea?.detect.openapi) {
|
|
26
|
+
throw new Error("Legacy config requires at least one docArea with detect.openapi");
|
|
27
|
+
}
|
|
28
|
+
const o = firstOpenApiArea.detect.openapi;
|
|
29
|
+
openapi = {
|
|
30
|
+
export: o.exportCmd,
|
|
31
|
+
generated: o.generatedPath,
|
|
32
|
+
published: o.publishedPath,
|
|
33
|
+
};
|
|
34
|
+
const allPaths = [o.publishedPath];
|
|
35
|
+
for (const area of config.docAreas) {
|
|
36
|
+
area.patch.targets?.forEach((t) => allPaths.push(t));
|
|
37
|
+
area.detect.paths?.forEach((p) => p.impacts.forEach((i) => allPaths.push(i)));
|
|
38
|
+
}
|
|
39
|
+
const roots = new Set();
|
|
40
|
+
for (const p of allPaths) {
|
|
41
|
+
const parts = p.split("/").filter(Boolean);
|
|
42
|
+
if (parts.length >= 2)
|
|
43
|
+
roots.add(parts[0] + "/" + parts[1]);
|
|
44
|
+
else if (parts.length === 1)
|
|
45
|
+
roots.add(parts[0]);
|
|
46
|
+
}
|
|
47
|
+
docsite = roots.size > 0 ? [...roots] : [node_path_1.default.dirname(o.publishedPath) || "."];
|
|
48
|
+
// Derive requireHumanReview from areas with requireHumanConfirmation or conceptual mode
|
|
49
|
+
const reviewPaths = new Set();
|
|
50
|
+
for (const area of config.docAreas) {
|
|
51
|
+
if (area.patch.requireHumanConfirmation || area.mode === "conceptual") {
|
|
52
|
+
area.patch.targets?.forEach((t) => reviewPaths.add(t));
|
|
53
|
+
area.detect.paths?.forEach((p) => p.impacts.forEach((i) => reviewPaths.add(i)));
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
requireHumanReview = [...reviewPaths];
|
|
57
|
+
}
|
|
58
|
+
else {
|
|
59
|
+
throw new Error("Config must include (openapi + docsite) or docAreas");
|
|
60
|
+
}
|
|
61
|
+
return {
|
|
62
|
+
...config,
|
|
63
|
+
openapi,
|
|
64
|
+
docsite,
|
|
65
|
+
exclude,
|
|
66
|
+
requireHumanReview,
|
|
67
|
+
};
|
|
68
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.docDriftConfigSchema = void 0;
|
|
3
|
+
exports.docDriftConfigSchema = exports.openApiSimpleSchema = void 0;
|
|
4
4
|
const zod_1 = require("zod");
|
|
5
5
|
const pathRuleSchema = zod_1.z.object({
|
|
6
6
|
match: zod_1.z.string().min(1),
|
|
@@ -11,6 +11,12 @@ const openApiDetectSchema = zod_1.z.object({
|
|
|
11
11
|
generatedPath: zod_1.z.string().min(1),
|
|
12
12
|
publishedPath: zod_1.z.string().min(1),
|
|
13
13
|
});
|
|
14
|
+
/** Simple config: short field names for openapi block */
|
|
15
|
+
exports.openApiSimpleSchema = zod_1.z.object({
|
|
16
|
+
export: zod_1.z.string().min(1),
|
|
17
|
+
generated: zod_1.z.string().min(1),
|
|
18
|
+
published: zod_1.z.string().min(1),
|
|
19
|
+
});
|
|
14
20
|
const docAreaSchema = zod_1.z.object({
|
|
15
21
|
name: zod_1.z.string().min(1),
|
|
16
22
|
mode: zod_1.z.enum(["autogen", "conceptual"]),
|
|
@@ -30,8 +36,40 @@ const docAreaSchema = zod_1.z.object({
|
|
|
30
36
|
requireHumanConfirmation: zod_1.z.boolean().optional().default(false),
|
|
31
37
|
}),
|
|
32
38
|
});
|
|
33
|
-
|
|
39
|
+
const policySchema = zod_1.z.object({
|
|
40
|
+
prCaps: zod_1.z.object({
|
|
41
|
+
maxPrsPerDay: zod_1.z.number().int().positive().default(1),
|
|
42
|
+
maxFilesTouched: zod_1.z.number().int().positive().default(12),
|
|
43
|
+
}),
|
|
44
|
+
confidence: zod_1.z.object({
|
|
45
|
+
autopatchThreshold: zod_1.z.number().min(0).max(1).default(0.8),
|
|
46
|
+
}),
|
|
47
|
+
allowlist: zod_1.z.array(zod_1.z.string().min(1)).min(1),
|
|
48
|
+
verification: zod_1.z.object({
|
|
49
|
+
commands: zod_1.z.array(zod_1.z.string().min(1)).min(1),
|
|
50
|
+
}),
|
|
51
|
+
/** Days before opening SLA issue for unmerged doc-drift PRs. 0 = disabled. */
|
|
52
|
+
slaDays: zod_1.z.number().int().min(0).optional().default(7),
|
|
53
|
+
/** Label to identify doc-drift PRs for SLA check (only these PRs count). */
|
|
54
|
+
slaLabel: zod_1.z.string().min(1).optional().default("docdrift"),
|
|
55
|
+
/**
|
|
56
|
+
* If false (default): Devin may only edit existing files. No new articles, no new folders.
|
|
57
|
+
* If true: Devin may add new articles, create folders, change information architecture.
|
|
58
|
+
* Gives teams control to prevent doc sprawl; mainly applies to conceptual/guides.
|
|
59
|
+
*/
|
|
60
|
+
allowNewFiles: zod_1.z.boolean().optional().default(false),
|
|
61
|
+
});
|
|
62
|
+
exports.docDriftConfigSchema = zod_1.z
|
|
63
|
+
.object({
|
|
34
64
|
version: zod_1.z.literal(1),
|
|
65
|
+
/** Simple config: openapi block (API spec = gate for run) */
|
|
66
|
+
openapi: exports.openApiSimpleSchema.optional(),
|
|
67
|
+
/** Simple config: docsite root path(s) */
|
|
68
|
+
docsite: zod_1.z.union([zod_1.z.string().min(1), zod_1.z.array(zod_1.z.string().min(1))]).optional(),
|
|
69
|
+
/** Paths we never touch (glob patterns) */
|
|
70
|
+
exclude: zod_1.z.array(zod_1.z.string().min(1)).optional().default([]),
|
|
71
|
+
/** Paths that require human review when touched (we create issue post-PR) */
|
|
72
|
+
requireHumanReview: zod_1.z.array(zod_1.z.string().min(1)).optional().default([]),
|
|
35
73
|
devin: zod_1.z.object({
|
|
36
74
|
apiVersion: zod_1.z.literal("v1"),
|
|
37
75
|
unlisted: zod_1.z.boolean().default(true),
|
|
@@ -40,18 +78,8 @@ exports.docDriftConfigSchema = zod_1.z.object({
|
|
|
40
78
|
customInstructions: zod_1.z.array(zod_1.z.string().min(1)).optional(),
|
|
41
79
|
customInstructionContent: zod_1.z.string().optional(),
|
|
42
80
|
}),
|
|
43
|
-
policy:
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
confidence: zod_1.z.object({
|
|
49
|
-
autopatchThreshold: zod_1.z.number().min(0).max(1).default(0.8),
|
|
50
|
-
}),
|
|
51
|
-
allowlist: zod_1.z.array(zod_1.z.string().min(1)).min(1),
|
|
52
|
-
verification: zod_1.z.object({
|
|
53
|
-
commands: zod_1.z.array(zod_1.z.string().min(1)).min(1),
|
|
54
|
-
}),
|
|
55
|
-
}),
|
|
56
|
-
docAreas: zod_1.z.array(docAreaSchema).min(1),
|
|
57
|
-
});
|
|
81
|
+
policy: policySchema,
|
|
82
|
+
/** Legacy: doc areas (optional when openapi+docsite present) */
|
|
83
|
+
docAreas: zod_1.z.array(docAreaSchema).optional().default([]),
|
|
84
|
+
})
|
|
85
|
+
.refine((v) => (v.openapi && v.docsite) || v.docAreas.length >= 1, { message: "Config must include (openapi + docsite) or docAreas" });
|
|
@@ -13,9 +13,8 @@ async function validateRuntimeConfig(config) {
|
|
|
13
13
|
}
|
|
14
14
|
const commandSet = new Set([
|
|
15
15
|
...config.policy.verification.commands,
|
|
16
|
-
...config.
|
|
17
|
-
|
|
18
|
-
.filter((value) => Boolean(value)),
|
|
16
|
+
...(config.openapi ? [config.openapi.export] : []),
|
|
17
|
+
...(config.docAreas ?? []).map((area) => area.detect.openapi?.exportCmd).filter((value) => Boolean(value)),
|
|
19
18
|
]);
|
|
20
19
|
for (const command of commandSet) {
|
|
21
20
|
const binary = commandBinary(command);
|
|
@@ -24,7 +23,7 @@ async function validateRuntimeConfig(config) {
|
|
|
24
23
|
errors.push(`Command not found for '${command}' (binary: ${binary})`);
|
|
25
24
|
}
|
|
26
25
|
}
|
|
27
|
-
for (const area of config.docAreas) {
|
|
26
|
+
for (const area of config.docAreas ?? []) {
|
|
28
27
|
if (area.mode === "autogen" && !area.patch.targets?.length) {
|
|
29
28
|
warnings.push(`docArea '${area.name}' is autogen but has no patch.targets`);
|
|
30
29
|
}
|
package/dist/src/detect/index.js
CHANGED
|
@@ -7,18 +7,8 @@ exports.buildDriftReport = buildDriftReport;
|
|
|
7
7
|
const node_path_1 = __importDefault(require("node:path"));
|
|
8
8
|
const fs_1 = require("../utils/fs");
|
|
9
9
|
const git_1 = require("../utils/git");
|
|
10
|
-
const docsCheck_1 = require("./docsCheck");
|
|
11
10
|
const heuristics_1 = require("./heuristics");
|
|
12
11
|
const openapi_1 = require("./openapi");
|
|
13
|
-
function defaultRecommendation(mode, signals) {
|
|
14
|
-
if (!signals.length) {
|
|
15
|
-
return "NOOP";
|
|
16
|
-
}
|
|
17
|
-
if (mode === "autogen") {
|
|
18
|
-
return signals.some((s) => s.tier <= 1) ? "OPEN_PR" : "OPEN_ISSUE";
|
|
19
|
-
}
|
|
20
|
-
return "OPEN_ISSUE";
|
|
21
|
-
}
|
|
22
12
|
async function buildDriftReport(input) {
|
|
23
13
|
const runInfo = {
|
|
24
14
|
runId: `${Date.now()}`,
|
|
@@ -33,25 +23,40 @@ async function buildDriftReport(input) {
|
|
|
33
23
|
const changedPaths = await (0, git_1.gitChangedPaths)(input.baseSha, input.headSha);
|
|
34
24
|
const diffSummary = await (0, git_1.gitDiffSummary)(input.baseSha, input.headSha);
|
|
35
25
|
const commits = await (0, git_1.gitCommitList)(input.baseSha, input.headSha);
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
26
|
+
(0, fs_1.writeJsonFile)(node_path_1.default.join(evidenceRoot, "changeset.json"), {
|
|
27
|
+
changedPaths,
|
|
28
|
+
diffSummary,
|
|
29
|
+
commits,
|
|
30
|
+
});
|
|
31
|
+
// Gate: run OpenAPI detection first. If no OpenAPI drift, exit (no session).
|
|
32
|
+
const openapiResult = await (0, openapi_1.detectOpenApiDriftFromNormalized)(input.config, evidenceRoot);
|
|
33
|
+
if (!openapiResult.signal) {
|
|
34
|
+
// No OpenAPI drift — gate closed. Return empty.
|
|
35
|
+
const report = {
|
|
36
|
+
run: {
|
|
37
|
+
repo: input.repo,
|
|
38
|
+
baseSha: input.baseSha,
|
|
39
|
+
headSha: input.headSha,
|
|
40
|
+
trigger: input.trigger,
|
|
41
|
+
timestamp: runInfo.timestamp,
|
|
42
|
+
},
|
|
43
|
+
items: [],
|
|
44
|
+
};
|
|
45
|
+
(0, fs_1.writeJsonFile)(node_path_1.default.resolve(".docdrift", "drift_report.json"), report);
|
|
46
|
+
return {
|
|
47
|
+
report,
|
|
48
|
+
aggregated: null,
|
|
49
|
+
changedPaths,
|
|
50
|
+
evidenceRoot,
|
|
51
|
+
runInfo,
|
|
52
|
+
hasOpenApiDrift: false,
|
|
53
|
+
};
|
|
54
|
+
}
|
|
55
|
+
// Gate passed: aggregate signals and impacted docs.
|
|
56
|
+
const signals = [openapiResult.signal];
|
|
57
|
+
const impactedDocs = new Set(openapiResult.impactedDocs);
|
|
58
|
+
const summaries = [openapiResult.summary];
|
|
39
59
|
for (const docArea of input.config.docAreas) {
|
|
40
|
-
const signals = [];
|
|
41
|
-
const impactedDocs = new Set(docArea.patch.targets ?? []);
|
|
42
|
-
const summaries = [];
|
|
43
|
-
if (docsCheck.signal) {
|
|
44
|
-
signals.push(docsCheck.signal);
|
|
45
|
-
summaries.push(docsCheck.summary);
|
|
46
|
-
}
|
|
47
|
-
if (docArea.detect.openapi) {
|
|
48
|
-
const openapiResult = await (0, openapi_1.detectOpenApiDrift)(docArea, evidenceRoot);
|
|
49
|
-
if (openapiResult.signal) {
|
|
50
|
-
signals.push(openapiResult.signal);
|
|
51
|
-
}
|
|
52
|
-
openapiResult.impactedDocs.forEach((doc) => impactedDocs.add(doc));
|
|
53
|
-
summaries.push(openapiResult.summary);
|
|
54
|
-
}
|
|
55
60
|
if (docArea.detect.paths?.length) {
|
|
56
61
|
const heuristicResult = (0, heuristics_1.detectHeuristicImpacts)(docArea, changedPaths, evidenceRoot);
|
|
57
62
|
if (heuristicResult.signal) {
|
|
@@ -60,18 +65,20 @@ async function buildDriftReport(input) {
|
|
|
60
65
|
heuristicResult.impactedDocs.forEach((doc) => impactedDocs.add(doc));
|
|
61
66
|
summaries.push(heuristicResult.summary);
|
|
62
67
|
}
|
|
63
|
-
if (!signals.length) {
|
|
64
|
-
continue;
|
|
65
|
-
}
|
|
66
|
-
items.push({
|
|
67
|
-
docArea: docArea.name,
|
|
68
|
-
mode: docArea.mode,
|
|
69
|
-
signals,
|
|
70
|
-
impactedDocs: [...impactedDocs],
|
|
71
|
-
recommendedAction: defaultRecommendation(docArea.mode, signals),
|
|
72
|
-
summary: summaries.filter(Boolean).join(" | "),
|
|
73
|
-
});
|
|
74
68
|
}
|
|
69
|
+
const aggregated = {
|
|
70
|
+
signals,
|
|
71
|
+
impactedDocs: [...impactedDocs],
|
|
72
|
+
summary: summaries.filter(Boolean).join(" | "),
|
|
73
|
+
};
|
|
74
|
+
const item = {
|
|
75
|
+
docArea: "docsite",
|
|
76
|
+
mode: "autogen",
|
|
77
|
+
signals: aggregated.signals,
|
|
78
|
+
impactedDocs: aggregated.impactedDocs,
|
|
79
|
+
recommendedAction: aggregated.signals.some((s) => s.tier <= 1) ? "OPEN_PR" : "OPEN_ISSUE",
|
|
80
|
+
summary: aggregated.summary,
|
|
81
|
+
};
|
|
75
82
|
const report = {
|
|
76
83
|
run: {
|
|
77
84
|
repo: input.repo,
|
|
@@ -80,13 +87,15 @@ async function buildDriftReport(input) {
|
|
|
80
87
|
trigger: input.trigger,
|
|
81
88
|
timestamp: runInfo.timestamp,
|
|
82
89
|
},
|
|
83
|
-
items,
|
|
90
|
+
items: [item],
|
|
84
91
|
};
|
|
85
92
|
(0, fs_1.writeJsonFile)(node_path_1.default.resolve(".docdrift", "drift_report.json"), report);
|
|
86
|
-
|
|
93
|
+
return {
|
|
94
|
+
report,
|
|
95
|
+
aggregated,
|
|
87
96
|
changedPaths,
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
97
|
+
evidenceRoot,
|
|
98
|
+
runInfo,
|
|
99
|
+
hasOpenApiDrift: true,
|
|
100
|
+
};
|
|
92
101
|
}
|
|
@@ -4,6 +4,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
6
|
exports.detectOpenApiDrift = detectOpenApiDrift;
|
|
7
|
+
exports.detectOpenApiDriftFromNormalized = detectOpenApiDriftFromNormalized;
|
|
7
8
|
const node_fs_1 = __importDefault(require("node:fs"));
|
|
8
9
|
const node_path_1 = __importDefault(require("node:path"));
|
|
9
10
|
const exec_1 = require("../utils/exec");
|
|
@@ -121,3 +122,85 @@ async function detectOpenApiDrift(docArea, evidenceDir) {
|
|
|
121
122
|
},
|
|
122
123
|
};
|
|
123
124
|
}
|
|
125
|
+
/** Run OpenAPI drift detection from normalized config (simple openapi block). Used as gate. */
|
|
126
|
+
async function detectOpenApiDriftFromNormalized(config, evidenceDir) {
|
|
127
|
+
const openapi = config.openapi;
|
|
128
|
+
const exportLogPath = node_path_1.default.join(evidenceDir, "openapi-export.log");
|
|
129
|
+
const exportResult = await (0, exec_1.execCommand)(openapi.export);
|
|
130
|
+
node_fs_1.default.writeFileSync(exportLogPath, [
|
|
131
|
+
`$ ${openapi.export}`,
|
|
132
|
+
`exitCode: ${exportResult.exitCode}`,
|
|
133
|
+
"\n--- stdout ---",
|
|
134
|
+
exportResult.stdout,
|
|
135
|
+
"\n--- stderr ---",
|
|
136
|
+
exportResult.stderr,
|
|
137
|
+
].join("\n"), "utf8");
|
|
138
|
+
if (exportResult.exitCode !== 0) {
|
|
139
|
+
return {
|
|
140
|
+
impactedDocs: [openapi.published],
|
|
141
|
+
evidenceFiles: [exportLogPath],
|
|
142
|
+
summary: "OpenAPI export command failed",
|
|
143
|
+
signal: {
|
|
144
|
+
kind: "weak_evidence",
|
|
145
|
+
tier: 2,
|
|
146
|
+
confidence: 0.35,
|
|
147
|
+
evidence: [exportLogPath],
|
|
148
|
+
},
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
if (!node_fs_1.default.existsSync(openapi.generated) || !node_fs_1.default.existsSync(openapi.published)) {
|
|
152
|
+
return {
|
|
153
|
+
impactedDocs: [openapi.generated, openapi.published],
|
|
154
|
+
evidenceFiles: [exportLogPath],
|
|
155
|
+
summary: "OpenAPI file(s) missing",
|
|
156
|
+
signal: {
|
|
157
|
+
kind: "weak_evidence",
|
|
158
|
+
tier: 2,
|
|
159
|
+
confidence: 0.35,
|
|
160
|
+
evidence: [exportLogPath],
|
|
161
|
+
},
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
const generatedRaw = node_fs_1.default.readFileSync(openapi.generated, "utf8");
|
|
165
|
+
const publishedRaw = node_fs_1.default.readFileSync(openapi.published, "utf8");
|
|
166
|
+
const generatedJson = JSON.parse(generatedRaw);
|
|
167
|
+
const publishedJson = JSON.parse(publishedRaw);
|
|
168
|
+
const normalizedGenerated = (0, json_1.stableStringify)(generatedJson);
|
|
169
|
+
const normalizedPublished = (0, json_1.stableStringify)(publishedJson);
|
|
170
|
+
if (normalizedGenerated === normalizedPublished) {
|
|
171
|
+
return {
|
|
172
|
+
impactedDocs: [openapi.published],
|
|
173
|
+
evidenceFiles: [exportLogPath],
|
|
174
|
+
summary: "No OpenAPI drift detected",
|
|
175
|
+
};
|
|
176
|
+
}
|
|
177
|
+
const summary = summarizeSpecDelta(publishedJson, generatedJson);
|
|
178
|
+
const diffPath = node_path_1.default.join(evidenceDir, "openapi.diff.txt");
|
|
179
|
+
node_fs_1.default.writeFileSync(diffPath, [
|
|
180
|
+
"# OpenAPI Drift Summary",
|
|
181
|
+
summary,
|
|
182
|
+
"",
|
|
183
|
+
"# Published (normalized)",
|
|
184
|
+
normalizedPublished,
|
|
185
|
+
"",
|
|
186
|
+
"# Generated (normalized)",
|
|
187
|
+
normalizedGenerated,
|
|
188
|
+
].join("\n"), "utf8");
|
|
189
|
+
const impactedDocs = [
|
|
190
|
+
...new Set([
|
|
191
|
+
openapi.published,
|
|
192
|
+
...config.docAreas.flatMap((a) => a.patch.targets ?? []).filter(Boolean),
|
|
193
|
+
]),
|
|
194
|
+
].filter(Boolean);
|
|
195
|
+
return {
|
|
196
|
+
impactedDocs,
|
|
197
|
+
evidenceFiles: [exportLogPath, diffPath],
|
|
198
|
+
summary,
|
|
199
|
+
signal: {
|
|
200
|
+
kind: "openapi_diff",
|
|
201
|
+
tier: 1,
|
|
202
|
+
confidence: 0.95,
|
|
203
|
+
evidence: [diffPath],
|
|
204
|
+
},
|
|
205
|
+
};
|
|
206
|
+
}
|
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.buildAutogenPrompt = buildAutogenPrompt;
|
|
4
4
|
exports.buildConceptualPrompt = buildConceptualPrompt;
|
|
5
|
+
exports.buildWholeDocsitePrompt = buildWholeDocsitePrompt;
|
|
5
6
|
function attachmentBlock(attachmentUrls) {
|
|
6
7
|
return attachmentUrls.map((url, index) => `- ATTACHMENT ${index + 1}: ${url}`).join("\n");
|
|
7
8
|
}
|
|
@@ -61,3 +62,45 @@ function buildConceptualPrompt(input) {
|
|
|
61
62
|
}
|
|
62
63
|
return base;
|
|
63
64
|
}
|
|
65
|
+
/** Whole-docsite prompt for single-session runs */
|
|
66
|
+
function buildWholeDocsitePrompt(input) {
|
|
67
|
+
const excludeNote = input.config.exclude?.length > 0
|
|
68
|
+
? `\n6) NEVER modify files matching these patterns: ${input.config.exclude.join(", ")}`
|
|
69
|
+
: "";
|
|
70
|
+
const requireReviewNote = input.config.requireHumanReview?.length > 0
|
|
71
|
+
? `\n7) If you touch files under: ${input.config.requireHumanReview.join(", ")} — note it in the PR description (a follow-up issue will flag for human review).`
|
|
72
|
+
: "";
|
|
73
|
+
const allowNewFiles = input.config.policy.allowNewFiles ?? false;
|
|
74
|
+
const newFilesRule = allowNewFiles
|
|
75
|
+
? "8) You MAY add new articles, create new folders, and change information architecture when warranted."
|
|
76
|
+
: "8) You may ONLY edit existing files. Do NOT create new files, new articles, or new folders. Do NOT change information architecture.";
|
|
77
|
+
const base = [
|
|
78
|
+
"You are Devin. Task: update the entire docsite to match the API and code changes.",
|
|
79
|
+
"",
|
|
80
|
+
"EVIDENCE (attachments):",
|
|
81
|
+
input.attachmentUrls.map((url, i) => `- ATTACHMENT ${i + 1}: ${url}`).join("\n"),
|
|
82
|
+
"",
|
|
83
|
+
"Rules (hard):",
|
|
84
|
+
`1) Only modify files under: ${input.config.policy.allowlist.join(", ")}`,
|
|
85
|
+
"2) Make the smallest change that makes docs correct.",
|
|
86
|
+
"3) Update API reference (OpenAPI) and any impacted guides in one PR.",
|
|
87
|
+
"4) Run verification commands and record results:",
|
|
88
|
+
...input.config.policy.verification.commands.map((c) => ` - ${c}`),
|
|
89
|
+
"5) Open exactly ONE pull request with a clear title and reviewer-friendly description.",
|
|
90
|
+
`6) Docsite scope: ${input.config.docsite.join(", ")}` +
|
|
91
|
+
excludeNote +
|
|
92
|
+
requireReviewNote +
|
|
93
|
+
`\n${newFilesRule}`,
|
|
94
|
+
"",
|
|
95
|
+
"Structured Output:",
|
|
96
|
+
"- Maintain structured output in the provided JSON schema.",
|
|
97
|
+
"- Update it at: planning, editing, verifying, open-pr, blocked, done.",
|
|
98
|
+
"- If blocked, fill blocked.questions with concrete questions.",
|
|
99
|
+
"",
|
|
100
|
+
"Goal: Produce ONE PR that updates the whole docsite (API reference + guides) using only the evidence.",
|
|
101
|
+
].join("\n");
|
|
102
|
+
if (input.config.devin.customInstructionContent) {
|
|
103
|
+
return base + "\n\n---\n\nCustom instructions:\n\n" + input.config.devin.customInstructionContent;
|
|
104
|
+
}
|
|
105
|
+
return base;
|
|
106
|
+
}
|
|
@@ -1,9 +1,14 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.parseRepo = parseRepo;
|
|
3
4
|
exports.postCommitComment = postCommitComment;
|
|
4
5
|
exports.createIssue = createIssue;
|
|
5
6
|
exports.renderRunComment = renderRunComment;
|
|
6
7
|
exports.renderBlockedIssueBody = renderBlockedIssueBody;
|
|
8
|
+
exports.renderRequireHumanReviewIssueBody = renderRequireHumanReviewIssueBody;
|
|
9
|
+
exports.renderSlaIssueBody = renderSlaIssueBody;
|
|
10
|
+
exports.isPrOpen = isPrOpen;
|
|
11
|
+
exports.listOpenPrsWithLabel = listOpenPrsWithLabel;
|
|
7
12
|
const rest_1 = require("@octokit/rest");
|
|
8
13
|
function parseRepo(full) {
|
|
9
14
|
const [owner, repo] = full.split("/");
|
|
@@ -84,3 +89,73 @@ function renderBlockedIssueBody(input) {
|
|
|
84
89
|
}
|
|
85
90
|
return lines.join("\n");
|
|
86
91
|
}
|
|
92
|
+
function renderRequireHumanReviewIssueBody(input) {
|
|
93
|
+
const lines = [];
|
|
94
|
+
lines.push("## Why this issue");
|
|
95
|
+
lines.push("");
|
|
96
|
+
lines.push("This doc-drift PR touches paths that require human review (guides, prose, or other non-technical docs).");
|
|
97
|
+
lines.push("");
|
|
98
|
+
lines.push("## What to do");
|
|
99
|
+
lines.push("");
|
|
100
|
+
lines.push(`1. Review the PR: ${input.prUrl}`);
|
|
101
|
+
lines.push("2. Confirm the changes are correct or request modifications.");
|
|
102
|
+
lines.push("3. Merge or close the PR.");
|
|
103
|
+
lines.push("");
|
|
104
|
+
if (input.touchedPaths.length > 0) {
|
|
105
|
+
lines.push("## Touched paths (require review)");
|
|
106
|
+
lines.push("");
|
|
107
|
+
for (const p of input.touchedPaths.slice(0, 20)) {
|
|
108
|
+
lines.push(`- \`${p}\``);
|
|
109
|
+
}
|
|
110
|
+
if (input.touchedPaths.length > 20) {
|
|
111
|
+
lines.push(`- ... and ${input.touchedPaths.length - 20} more`);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
return lines.join("\n");
|
|
115
|
+
}
|
|
116
|
+
function renderSlaIssueBody(input) {
|
|
117
|
+
const lines = [];
|
|
118
|
+
lines.push("## Why this issue");
|
|
119
|
+
lines.push("");
|
|
120
|
+
lines.push(`Doc-drift PR(s) have been open for ${input.slaDays}+ days. Docs may be out of sync.`);
|
|
121
|
+
lines.push("");
|
|
122
|
+
lines.push("## What to do");
|
|
123
|
+
lines.push("");
|
|
124
|
+
lines.push("Please review and merge or close the following PR(s):");
|
|
125
|
+
lines.push("");
|
|
126
|
+
for (const url of input.prUrls) {
|
|
127
|
+
lines.push(`- ${url}`);
|
|
128
|
+
}
|
|
129
|
+
lines.push("");
|
|
130
|
+
lines.push("If the PR is no longer needed, close it to resolve this reminder.");
|
|
131
|
+
return lines.join("\n");
|
|
132
|
+
}
|
|
133
|
+
/** Check if a PR is still open. URL format: https://github.com/owner/repo/pull/123 */
|
|
134
|
+
async function isPrOpen(token, prUrl) {
|
|
135
|
+
const match = prUrl.match(/github\.com[/]([^/]+)[/]([^/]+)[/]pull[/](\d+)/);
|
|
136
|
+
if (!match)
|
|
137
|
+
return { open: false };
|
|
138
|
+
const [, owner, repo, numStr] = match;
|
|
139
|
+
const number = parseInt(numStr ?? "0", 10);
|
|
140
|
+
if (!owner || !repo || !Number.isFinite(number))
|
|
141
|
+
return { open: false };
|
|
142
|
+
const octokit = new rest_1.Octokit({ auth: token });
|
|
143
|
+
const { data } = await octokit.pulls.get({ owner, repo, pull_number: number });
|
|
144
|
+
return { open: data.state === "open", number: data.number };
|
|
145
|
+
}
|
|
146
|
+
/** List open PRs with a given label */
|
|
147
|
+
async function listOpenPrsWithLabel(token, repository, label) {
|
|
148
|
+
const octokit = new rest_1.Octokit({ auth: token });
|
|
149
|
+
const { owner, repo } = parseRepo(repository);
|
|
150
|
+
const { data } = await octokit.pulls.list({
|
|
151
|
+
owner,
|
|
152
|
+
repo,
|
|
153
|
+
state: "open",
|
|
154
|
+
labels: label,
|
|
155
|
+
});
|
|
156
|
+
return data.map((pr) => ({
|
|
157
|
+
url: pr.html_url ?? "",
|
|
158
|
+
number: pr.number,
|
|
159
|
+
created_at: pr.created_at ?? "",
|
|
160
|
+
}));
|
|
161
|
+
}
|
package/dist/src/index.js
CHANGED
|
@@ -7,6 +7,7 @@ exports.STATE_PATH = void 0;
|
|
|
7
7
|
exports.runDetect = runDetect;
|
|
8
8
|
exports.runDocDrift = runDocDrift;
|
|
9
9
|
exports.runValidate = runValidate;
|
|
10
|
+
exports.runSlaCheck = runSlaCheck;
|
|
10
11
|
exports.runStatus = runStatus;
|
|
11
12
|
exports.resolveTrigger = resolveTrigger;
|
|
12
13
|
exports.parseDurationHours = parseDurationHours;
|
|
@@ -21,6 +22,7 @@ const engine_1 = require("./policy/engine");
|
|
|
21
22
|
const state_1 = require("./policy/state");
|
|
22
23
|
const log_1 = require("./utils/log");
|
|
23
24
|
const prompts_1 = require("./devin/prompts");
|
|
25
|
+
const glob_1 = require("./utils/glob");
|
|
24
26
|
const schemas_1 = require("./devin/schemas");
|
|
25
27
|
const v1_1 = require("./devin/v1");
|
|
26
28
|
function parseStructured(session) {
|
|
@@ -48,29 +50,17 @@ function inferQuestions(structured) {
|
|
|
48
50
|
"What are the exact user-visible semantics after this merge?",
|
|
49
51
|
];
|
|
50
52
|
}
|
|
51
|
-
async function
|
|
53
|
+
async function executeSessionSingle(input) {
|
|
52
54
|
const attachmentUrls = [];
|
|
53
55
|
for (const attachmentPath of input.attachmentPaths) {
|
|
54
56
|
const url = await (0, v1_1.devinUploadAttachment)(input.apiKey, attachmentPath);
|
|
55
57
|
attachmentUrls.push(url);
|
|
56
58
|
}
|
|
57
|
-
const prompt =
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
allowlist: input.config.policy.allowlist,
|
|
63
|
-
confidenceThreshold: input.config.policy.confidence.autopatchThreshold,
|
|
64
|
-
customAppend: input.config.devin.customInstructionContent ?? undefined,
|
|
65
|
-
})
|
|
66
|
-
: (0, prompts_1.buildConceptualPrompt)({
|
|
67
|
-
item: input.item,
|
|
68
|
-
attachmentUrls,
|
|
69
|
-
verificationCommands: input.config.policy.verification.commands,
|
|
70
|
-
allowlist: input.config.policy.allowlist,
|
|
71
|
-
confidenceThreshold: input.config.policy.confidence.autopatchThreshold,
|
|
72
|
-
customAppend: input.config.devin.customInstructionContent ?? undefined,
|
|
73
|
-
});
|
|
59
|
+
const prompt = (0, prompts_1.buildWholeDocsitePrompt)({
|
|
60
|
+
aggregated: input.aggregated,
|
|
61
|
+
config: input.config,
|
|
62
|
+
attachmentUrls,
|
|
63
|
+
});
|
|
74
64
|
const session = await (0, v1_1.devinCreateSession)(input.apiKey, {
|
|
75
65
|
prompt,
|
|
76
66
|
unlisted: input.config.devin.unlisted,
|
|
@@ -132,14 +122,15 @@ async function runDetect(options) {
|
|
|
132
122
|
throw new Error(`Config validation failed:\n${runtimeValidation.errors.join("\n")}`);
|
|
133
123
|
}
|
|
134
124
|
const repo = process.env.GITHUB_REPOSITORY ?? "local/docdrift";
|
|
135
|
-
const
|
|
136
|
-
|
|
125
|
+
const normalized = (0, load_1.loadNormalizedConfig)();
|
|
126
|
+
const { report, hasOpenApiDrift } = await (0, detect_1.buildDriftReport)({
|
|
127
|
+
config: normalized,
|
|
137
128
|
repo,
|
|
138
129
|
baseSha: options.baseSha,
|
|
139
130
|
headSha: options.headSha,
|
|
140
131
|
trigger: options.trigger ?? "manual",
|
|
141
132
|
});
|
|
142
|
-
(0, log_1.logInfo)(`Drift items detected: ${report.items.length}`);
|
|
133
|
+
(0, log_1.logInfo)(`Drift items detected: ${report.items.length} (hasOpenApiDrift: ${hasOpenApiDrift})`);
|
|
143
134
|
return { hasDrift: report.items.length > 0 };
|
|
144
135
|
}
|
|
145
136
|
async function runDocDrift(options) {
|
|
@@ -148,183 +139,233 @@ async function runDocDrift(options) {
|
|
|
148
139
|
if (runtimeValidation.errors.length) {
|
|
149
140
|
throw new Error(`Config validation failed:\n${runtimeValidation.errors.join("\n")}`);
|
|
150
141
|
}
|
|
142
|
+
const normalized = (0, load_1.loadNormalizedConfig)();
|
|
151
143
|
const repo = process.env.GITHUB_REPOSITORY ?? "local/docdrift";
|
|
152
144
|
const commitSha = process.env.GITHUB_SHA ?? options.headSha;
|
|
153
145
|
const githubToken = process.env.GITHUB_TOKEN;
|
|
154
146
|
const devinApiKey = process.env.DEVIN_API_KEY;
|
|
155
|
-
const { report, runInfo, evidenceRoot } = await (0, detect_1.buildDriftReport)({
|
|
156
|
-
config,
|
|
147
|
+
const { report, aggregated, runInfo, evidenceRoot, hasOpenApiDrift } = await (0, detect_1.buildDriftReport)({
|
|
148
|
+
config: normalized,
|
|
157
149
|
repo,
|
|
158
150
|
baseSha: options.baseSha,
|
|
159
151
|
headSha: options.headSha,
|
|
160
152
|
trigger: options.trigger ?? "manual",
|
|
161
153
|
});
|
|
162
|
-
|
|
154
|
+
// Gate: no OpenAPI drift — exit early, no session
|
|
155
|
+
if (!hasOpenApiDrift || report.items.length === 0) {
|
|
156
|
+
(0, log_1.logInfo)("No OpenAPI drift; skipping session");
|
|
157
|
+
return [];
|
|
158
|
+
}
|
|
159
|
+
const item = report.items[0];
|
|
160
|
+
const docAreaConfig = {
|
|
161
|
+
name: "docsite",
|
|
162
|
+
mode: "autogen",
|
|
163
|
+
owners: { reviewers: [] },
|
|
164
|
+
detect: { openapi: { exportCmd: normalized.openapi.export, generatedPath: normalized.openapi.generated, publishedPath: normalized.openapi.published }, paths: [] },
|
|
165
|
+
patch: { targets: [], requireHumanConfirmation: false },
|
|
166
|
+
};
|
|
163
167
|
let state = (0, state_1.loadState)();
|
|
164
168
|
const startedAt = Date.now();
|
|
165
169
|
const results = [];
|
|
166
170
|
const metrics = {
|
|
167
|
-
driftItemsDetected:
|
|
171
|
+
driftItemsDetected: 1,
|
|
168
172
|
prsOpened: 0,
|
|
169
173
|
issuesOpened: 0,
|
|
170
174
|
blockedCount: 0,
|
|
171
175
|
timeToSessionTerminalMs: [],
|
|
172
|
-
docAreaCounts: {},
|
|
176
|
+
docAreaCounts: { docsite: 1 },
|
|
173
177
|
noiseRateProxy: 0,
|
|
174
178
|
};
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
179
|
+
const decision = (0, engine_1.decidePolicy)({
|
|
180
|
+
item,
|
|
181
|
+
docAreaConfig,
|
|
182
|
+
config,
|
|
183
|
+
state,
|
|
184
|
+
repo,
|
|
185
|
+
baseSha: options.baseSha,
|
|
186
|
+
headSha: options.headSha,
|
|
187
|
+
});
|
|
188
|
+
if (decision.action === "NOOP") {
|
|
189
|
+
results.push({
|
|
190
|
+
docArea: item.docArea,
|
|
191
|
+
decision,
|
|
192
|
+
outcome: "NO_CHANGE",
|
|
193
|
+
summary: decision.reason,
|
|
194
|
+
});
|
|
195
|
+
(0, bundle_1.writeMetrics)(metrics);
|
|
196
|
+
return results;
|
|
197
|
+
}
|
|
198
|
+
if (decision.action === "UPDATE_EXISTING_PR") {
|
|
199
|
+
const existingPr = state.areaLatestPr["docsite"];
|
|
200
|
+
results.push({
|
|
201
|
+
docArea: item.docArea,
|
|
202
|
+
decision,
|
|
203
|
+
outcome: existingPr ? "NO_CHANGE" : "BLOCKED",
|
|
204
|
+
summary: existingPr ? `Bundled into existing PR: ${existingPr}` : "PR cap reached",
|
|
205
|
+
prUrl: existingPr,
|
|
206
|
+
});
|
|
207
|
+
state = (0, engine_1.applyDecisionToState)({
|
|
185
208
|
state,
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
209
|
+
decision,
|
|
210
|
+
docArea: "docsite",
|
|
211
|
+
outcome: existingPr ? "NO_CHANGE" : "BLOCKED",
|
|
212
|
+
link: existingPr,
|
|
189
213
|
});
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
let sessionOutcome = {
|
|
224
|
-
outcome: "NO_CHANGE",
|
|
225
|
-
summary: "Skipped Devin session",
|
|
226
|
-
verification: config.policy.verification.commands.map((command) => ({
|
|
214
|
+
(0, state_1.saveState)(state);
|
|
215
|
+
(0, bundle_1.writeMetrics)(metrics);
|
|
216
|
+
return results;
|
|
217
|
+
}
|
|
218
|
+
const bundle = await (0, bundle_1.buildEvidenceBundle)({ runInfo, item, evidenceRoot });
|
|
219
|
+
const attachmentPaths = [...new Set([bundle.archivePath, ...bundle.attachmentPaths])];
|
|
220
|
+
let sessionOutcome = {
|
|
221
|
+
outcome: "NO_CHANGE",
|
|
222
|
+
summary: "Skipped Devin session",
|
|
223
|
+
verification: normalized.policy.verification.commands.map((command) => ({
|
|
224
|
+
command,
|
|
225
|
+
result: "not run",
|
|
226
|
+
})),
|
|
227
|
+
};
|
|
228
|
+
if (devinApiKey) {
|
|
229
|
+
const sessionStart = Date.now();
|
|
230
|
+
sessionOutcome = await executeSessionSingle({
|
|
231
|
+
apiKey: devinApiKey,
|
|
232
|
+
repository: repo,
|
|
233
|
+
item,
|
|
234
|
+
aggregated: aggregated,
|
|
235
|
+
attachmentPaths,
|
|
236
|
+
config: normalized,
|
|
237
|
+
});
|
|
238
|
+
metrics.timeToSessionTerminalMs.push(Date.now() - sessionStart);
|
|
239
|
+
}
|
|
240
|
+
else {
|
|
241
|
+
(0, log_1.logWarn)("DEVIN_API_KEY not set; running fallback behavior", { docArea: item.docArea });
|
|
242
|
+
sessionOutcome = {
|
|
243
|
+
outcome: "BLOCKED",
|
|
244
|
+
summary: "DEVIN_API_KEY missing; cannot start Devin session",
|
|
245
|
+
questions: ["Set DEVIN_API_KEY in environment or GitHub Actions secrets"],
|
|
246
|
+
verification: normalized.policy.verification.commands.map((command) => ({
|
|
227
247
|
command,
|
|
228
248
|
result: "not run",
|
|
229
249
|
})),
|
|
230
250
|
};
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
});
|
|
240
|
-
metrics.timeToSessionTerminalMs.push(Date.now() - sessionStart);
|
|
241
|
-
}
|
|
242
|
-
else {
|
|
243
|
-
(0, log_1.logWarn)("DEVIN_API_KEY not set; running fallback behavior", { docArea: item.docArea });
|
|
244
|
-
sessionOutcome = {
|
|
245
|
-
outcome: "BLOCKED",
|
|
246
|
-
summary: "DEVIN_API_KEY missing; cannot start Devin session",
|
|
247
|
-
questions: ["Set DEVIN_API_KEY in environment or GitHub Actions secrets"],
|
|
248
|
-
verification: config.policy.verification.commands.map((command) => ({
|
|
249
|
-
command,
|
|
250
|
-
result: "not run",
|
|
251
|
-
})),
|
|
252
|
-
};
|
|
253
|
-
}
|
|
254
|
-
let issueUrl;
|
|
255
|
-
if (githubToken &&
|
|
256
|
-
(decision.action === "OPEN_ISSUE" ||
|
|
257
|
-
sessionOutcome.outcome === "BLOCKED" ||
|
|
258
|
-
sessionOutcome.outcome === "NO_CHANGE")) {
|
|
251
|
+
}
|
|
252
|
+
let issueUrl;
|
|
253
|
+
if (sessionOutcome.outcome === "PR_OPENED" && sessionOutcome.prUrl) {
|
|
254
|
+
metrics.prsOpened += 1;
|
|
255
|
+
state.lastDocDriftPrUrl = sessionOutcome.prUrl;
|
|
256
|
+
state.lastDocDriftPrOpenedAt = new Date().toISOString();
|
|
257
|
+
const touchedRequireReview = (item.impactedDocs ?? []).filter((p) => normalized.requireHumanReview.some((glob) => (0, glob_1.matchesGlob)(glob, p)));
|
|
258
|
+
if (githubToken && touchedRequireReview.length > 0) {
|
|
259
259
|
issueUrl = await (0, client_1.createIssue)({
|
|
260
260
|
token: githubToken,
|
|
261
261
|
repository: repo,
|
|
262
262
|
issue: {
|
|
263
|
-
title:
|
|
264
|
-
body: (0, client_1.
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
questions: sessionOutcome.questions ?? [
|
|
268
|
-
"Please confirm intended behavior and doc wording.",
|
|
269
|
-
],
|
|
270
|
-
sessionUrl: sessionOutcome.sessionUrl,
|
|
263
|
+
title: "[docdrift] Docs out of sync — review doc drift PR",
|
|
264
|
+
body: (0, client_1.renderRequireHumanReviewIssueBody)({
|
|
265
|
+
prUrl: sessionOutcome.prUrl,
|
|
266
|
+
touchedPaths: touchedRequireReview,
|
|
271
267
|
}),
|
|
272
268
|
labels: ["docdrift"],
|
|
273
269
|
},
|
|
274
270
|
});
|
|
275
271
|
metrics.issuesOpened += 1;
|
|
276
|
-
sessionOutcome.outcome = "ISSUE_OPENED";
|
|
277
272
|
}
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
273
|
+
}
|
|
274
|
+
else if (githubToken &&
|
|
275
|
+
(decision.action === "OPEN_ISSUE" ||
|
|
276
|
+
sessionOutcome.outcome === "BLOCKED" ||
|
|
277
|
+
sessionOutcome.outcome === "NO_CHANGE")) {
|
|
278
|
+
issueUrl = await (0, client_1.createIssue)({
|
|
279
|
+
token: githubToken,
|
|
280
|
+
repository: repo,
|
|
281
|
+
issue: {
|
|
282
|
+
title: "[docdrift] docsite: docs drift requires input",
|
|
283
|
+
body: (0, client_1.renderBlockedIssueBody)({
|
|
284
|
+
docArea: item.docArea,
|
|
285
|
+
evidenceSummary: item.summary,
|
|
286
|
+
questions: sessionOutcome.questions ?? [
|
|
287
|
+
"Please confirm intended behavior and doc wording.",
|
|
288
|
+
],
|
|
289
|
+
sessionUrl: sessionOutcome.sessionUrl,
|
|
290
|
+
}),
|
|
291
|
+
labels: ["docdrift"],
|
|
292
|
+
},
|
|
293
|
+
});
|
|
294
|
+
metrics.issuesOpened += 1;
|
|
295
|
+
if (sessionOutcome.outcome !== "PR_OPENED") {
|
|
296
|
+
sessionOutcome.outcome = "ISSUE_OPENED";
|
|
283
297
|
}
|
|
284
|
-
|
|
298
|
+
}
|
|
299
|
+
if (sessionOutcome.outcome === "BLOCKED") {
|
|
300
|
+
metrics.blockedCount += 1;
|
|
301
|
+
}
|
|
302
|
+
const result = {
|
|
303
|
+
docArea: item.docArea,
|
|
304
|
+
decision,
|
|
305
|
+
outcome: sessionOutcome.outcome,
|
|
306
|
+
summary: sessionOutcome.summary,
|
|
307
|
+
sessionUrl: sessionOutcome.sessionUrl,
|
|
308
|
+
prUrl: sessionOutcome.prUrl,
|
|
309
|
+
issueUrl,
|
|
310
|
+
};
|
|
311
|
+
results.push(result);
|
|
312
|
+
state = (0, engine_1.applyDecisionToState)({
|
|
313
|
+
state,
|
|
314
|
+
decision,
|
|
315
|
+
docArea: "docsite",
|
|
316
|
+
outcome: sessionOutcome.outcome,
|
|
317
|
+
link: sessionOutcome.prUrl ?? issueUrl,
|
|
318
|
+
});
|
|
319
|
+
if (sessionOutcome.outcome === "PR_OPENED" && sessionOutcome.prUrl) {
|
|
320
|
+
state.lastDocDriftPrUrl = sessionOutcome.prUrl;
|
|
321
|
+
state.lastDocDriftPrOpenedAt = new Date().toISOString();
|
|
322
|
+
}
|
|
323
|
+
(0, state_1.saveState)(state);
|
|
324
|
+
if (githubToken) {
|
|
325
|
+
const body = (0, client_1.renderRunComment)({
|
|
285
326
|
docArea: item.docArea,
|
|
286
|
-
decision,
|
|
287
|
-
outcome: sessionOutcome.outcome,
|
|
288
327
|
summary: sessionOutcome.summary,
|
|
328
|
+
decision: decision.action,
|
|
329
|
+
outcome: sessionOutcome.outcome,
|
|
289
330
|
sessionUrl: sessionOutcome.sessionUrl,
|
|
290
331
|
prUrl: sessionOutcome.prUrl,
|
|
291
332
|
issueUrl,
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
333
|
+
validation: sessionOutcome.verification,
|
|
334
|
+
});
|
|
335
|
+
await (0, client_1.postCommitComment)({
|
|
336
|
+
token: githubToken,
|
|
337
|
+
repository: repo,
|
|
338
|
+
commitSha,
|
|
339
|
+
body,
|
|
340
|
+
});
|
|
341
|
+
}
|
|
342
|
+
const slaDays = normalized.policy.slaDays ?? 0;
|
|
343
|
+
if (githubToken && slaDays > 0 && state.lastDocDriftPrUrl && state.lastDocDriftPrOpenedAt) {
|
|
344
|
+
const openedAt = Date.parse(state.lastDocDriftPrOpenedAt);
|
|
345
|
+
const daysOld = (Date.now() - openedAt) / (24 * 60 * 60 * 1000);
|
|
346
|
+
const lastSla = state.lastSlaIssueOpenedAt ? Date.parse(state.lastSlaIssueOpenedAt) : 0;
|
|
347
|
+
const slaCooldown = 6 * 24 * 60 * 60 * 1000;
|
|
348
|
+
if (daysOld >= slaDays && Date.now() - lastSla > slaCooldown) {
|
|
349
|
+
const slaIssueUrl = await (0, client_1.createIssue)({
|
|
306
350
|
token: githubToken,
|
|
307
351
|
repository: repo,
|
|
308
|
-
|
|
309
|
-
|
|
352
|
+
issue: {
|
|
353
|
+
title: "[docdrift] Docs out of sync — merge doc drift PR(s)",
|
|
354
|
+
body: (0, client_1.renderSlaIssueBody)({
|
|
355
|
+
prUrls: [state.lastDocDriftPrUrl],
|
|
356
|
+
slaDays,
|
|
357
|
+
}),
|
|
358
|
+
labels: ["docdrift"],
|
|
359
|
+
},
|
|
310
360
|
});
|
|
361
|
+
state.lastSlaIssueOpenedAt = new Date().toISOString();
|
|
362
|
+
(0, state_1.saveState)(state);
|
|
311
363
|
}
|
|
312
|
-
state = (0, engine_1.applyDecisionToState)({
|
|
313
|
-
state,
|
|
314
|
-
decision,
|
|
315
|
-
docArea: item.docArea,
|
|
316
|
-
outcome: sessionOutcome.outcome,
|
|
317
|
-
link: sessionOutcome.prUrl ?? issueUrl,
|
|
318
|
-
});
|
|
319
364
|
}
|
|
320
|
-
|
|
321
|
-
metrics.noiseRateProxy =
|
|
322
|
-
metrics.driftItemsDetected === 0
|
|
323
|
-
? 0
|
|
324
|
-
: Number((metrics.prsOpened / metrics.driftItemsDetected).toFixed(4));
|
|
365
|
+
metrics.noiseRateProxy = metrics.prsOpened;
|
|
325
366
|
(0, bundle_1.writeMetrics)(metrics);
|
|
326
367
|
(0, log_1.logInfo)("Run complete", {
|
|
327
|
-
items:
|
|
368
|
+
items: 1,
|
|
328
369
|
elapsedMs: Date.now() - startedAt,
|
|
329
370
|
});
|
|
330
371
|
return results;
|
|
@@ -338,6 +379,54 @@ async function runValidate() {
|
|
|
338
379
|
runtimeValidation.warnings.forEach((warning) => (0, log_1.logWarn)(warning));
|
|
339
380
|
(0, log_1.logInfo)("Config is valid");
|
|
340
381
|
}
|
|
382
|
+
async function runSlaCheck() {
|
|
383
|
+
const githubToken = process.env.GITHUB_TOKEN;
|
|
384
|
+
if (!githubToken) {
|
|
385
|
+
throw new Error("GITHUB_TOKEN is required for sla-check command");
|
|
386
|
+
}
|
|
387
|
+
const repo = process.env.GITHUB_REPOSITORY;
|
|
388
|
+
if (!repo) {
|
|
389
|
+
throw new Error("GITHUB_REPOSITORY is required for sla-check command");
|
|
390
|
+
}
|
|
391
|
+
const normalized = (0, load_1.loadNormalizedConfig)();
|
|
392
|
+
const slaDays = normalized.policy.slaDays ?? 0;
|
|
393
|
+
const slaLabel = normalized.policy.slaLabel ?? "docdrift";
|
|
394
|
+
if (slaDays <= 0) {
|
|
395
|
+
(0, log_1.logInfo)("SLA check disabled (slaDays <= 0)");
|
|
396
|
+
return { issueOpened: false };
|
|
397
|
+
}
|
|
398
|
+
const cutoff = new Date(Date.now() - slaDays * 24 * 60 * 60 * 1000);
|
|
399
|
+
const openPrs = await (0, client_1.listOpenPrsWithLabel)(githubToken, repo, slaLabel);
|
|
400
|
+
const stalePrs = openPrs.filter((pr) => {
|
|
401
|
+
const created = pr.created_at ? Date.parse(pr.created_at) : Date.now();
|
|
402
|
+
return Number.isFinite(created) && created <= cutoff.getTime();
|
|
403
|
+
});
|
|
404
|
+
if (stalePrs.length === 0) {
|
|
405
|
+
(0, log_1.logInfo)("No doc-drift PRs open longer than slaDays; nothing to do");
|
|
406
|
+
return { issueOpened: false };
|
|
407
|
+
}
|
|
408
|
+
let state = (0, state_1.loadState)();
|
|
409
|
+
const lastSla = state.lastSlaIssueOpenedAt ? Date.parse(state.lastSlaIssueOpenedAt) : 0;
|
|
410
|
+
const slaCooldown = 6 * 24 * 60 * 60 * 1000;
|
|
411
|
+
if (Date.now() - lastSla < slaCooldown) {
|
|
412
|
+
(0, log_1.logInfo)("SLA issue cooldown; skipping");
|
|
413
|
+
return { issueOpened: false };
|
|
414
|
+
}
|
|
415
|
+
const prUrls = stalePrs.map((p) => p.url).filter(Boolean);
|
|
416
|
+
await (0, client_1.createIssue)({
|
|
417
|
+
token: githubToken,
|
|
418
|
+
repository: repo,
|
|
419
|
+
issue: {
|
|
420
|
+
title: "[docdrift] Docs out of sync — merge doc drift PR(s)",
|
|
421
|
+
body: (0, client_1.renderSlaIssueBody)({ prUrls, slaDays }),
|
|
422
|
+
labels: ["docdrift"],
|
|
423
|
+
},
|
|
424
|
+
});
|
|
425
|
+
state.lastSlaIssueOpenedAt = new Date().toISOString();
|
|
426
|
+
(0, state_1.saveState)(state);
|
|
427
|
+
(0, log_1.logInfo)(`Opened SLA issue for ${prUrls.length} stale PR(s)`);
|
|
428
|
+
return { issueOpened: true };
|
|
429
|
+
}
|
|
341
430
|
async function runStatus(sinceHours = 24) {
|
|
342
431
|
const apiKey = process.env.DEVIN_API_KEY;
|
|
343
432
|
if (!apiKey) {
|
|
@@ -21,7 +21,8 @@ function decidePolicy(input) {
|
|
|
21
21
|
const capReached = prCountToday >= config.policy.prCaps.maxPrsPerDay;
|
|
22
22
|
const areaDailyKey = `${today}:${item.docArea}`;
|
|
23
23
|
const exceedsFileCap = item.impactedDocs.length > config.policy.prCaps.maxFilesTouched;
|
|
24
|
-
const
|
|
24
|
+
const exclude = "exclude" in config && Array.isArray(config.exclude) ? config.exclude : [];
|
|
25
|
+
const hasPathOutsideAllowlist = item.impactedDocs.some((filePath) => filePath && !(0, glob_1.isPathAllowedAndNotExcluded)(filePath, config.policy.allowlist, exclude));
|
|
25
26
|
let action = "NOOP";
|
|
26
27
|
let reason = "No action needed";
|
|
27
28
|
if (hasPathOutsideAllowlist) {
|
package/dist/src/utils/glob.js
CHANGED
|
@@ -3,6 +3,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.globToRegExp = globToRegExp;
|
|
4
4
|
exports.matchesGlob = matchesGlob;
|
|
5
5
|
exports.isPathAllowed = isPathAllowed;
|
|
6
|
+
exports.isPathExcluded = isPathExcluded;
|
|
7
|
+
exports.isPathAllowedAndNotExcluded = isPathAllowedAndNotExcluded;
|
|
6
8
|
function escapeRegex(input) {
|
|
7
9
|
return input.replace(/[|\\{}()[\]^$+?.]/g, "\\$&");
|
|
8
10
|
}
|
|
@@ -19,3 +21,14 @@ function matchesGlob(glob, value) {
|
|
|
19
21
|
function isPathAllowed(path, allowlist) {
|
|
20
22
|
return allowlist.some((glob) => matchesGlob(glob, path));
|
|
21
23
|
}
|
|
24
|
+
function isPathExcluded(path, exclude) {
|
|
25
|
+
if (!exclude?.length)
|
|
26
|
+
return false;
|
|
27
|
+
return exclude.some((glob) => matchesGlob(glob, path));
|
|
28
|
+
}
|
|
29
|
+
/** Path is allowed by allowlist AND not excluded */
|
|
30
|
+
function isPathAllowedAndNotExcluded(path, allowlist, exclude = []) {
|
|
31
|
+
if (isPathExcluded(path, exclude))
|
|
32
|
+
return false;
|
|
33
|
+
return isPathAllowed(path, allowlist);
|
|
34
|
+
}
|