@devinnn/docdrift 0.1.2 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/src/cli.js +11 -9
- package/dist/src/config/normalize.js +93 -6
- package/dist/src/config/schema.js +78 -18
- package/dist/src/config/validate.js +6 -2
- package/dist/src/detect/index.js +76 -21
- package/dist/src/devin/prompts.js +45 -1
- package/dist/src/github/client.js +13 -0
- package/dist/src/index.js +68 -10
- package/dist/src/spec-providers/fern.js +123 -0
- package/dist/src/spec-providers/graphql.js +168 -0
- package/dist/src/spec-providers/openapi.js +181 -0
- package/dist/src/spec-providers/postman.js +193 -0
- package/dist/src/spec-providers/registry.js +26 -0
- package/dist/src/spec-providers/swagger2.js +229 -0
- package/dist/src/spec-providers/types.js +2 -0
- package/dist/src/utils/fetch.js +87 -0
- package/dist/src/utils/git.js +20 -0
- package/docdrift.schema.json +438 -0
- package/package.json +9 -4
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.detectPostmanSpecDrift = detectPostmanSpecDrift;
|
|
40
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
41
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
42
|
+
const fs_1 = require("../utils/fs");
|
|
43
|
+
const json_1 = require("../utils/json");
|
|
44
|
+
const fetch_1 = require("../utils/fetch");
|
|
45
|
+
function extractEndpoints(collection) {
|
|
46
|
+
const endpoints = new Set();
|
|
47
|
+
const info = collection?.info ?? collection?.information;
|
|
48
|
+
const items = collection?.item ?? [];
|
|
49
|
+
function walk(items, prefix = "") {
|
|
50
|
+
for (const item of items) {
|
|
51
|
+
if (!item)
|
|
52
|
+
continue;
|
|
53
|
+
const name = item.name ?? item.id ?? "";
|
|
54
|
+
if (item.request) {
|
|
55
|
+
const req = typeof item.request === "string" ? { url: item.request, method: "GET" } : item.request;
|
|
56
|
+
const url = req?.url?.raw ?? req?.url ?? "";
|
|
57
|
+
const method = (req?.method ?? "GET").toUpperCase();
|
|
58
|
+
if (url) {
|
|
59
|
+
endpoints.add(`${method} ${url}`);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
else if (item.item) {
|
|
63
|
+
walk(item.item, `${prefix}/${name}`);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
walk(Array.isArray(items) ? items : [items]);
|
|
68
|
+
return endpoints;
|
|
69
|
+
}
|
|
70
|
+
async function getCurrentContent(config) {
|
|
71
|
+
const current = config.current;
|
|
72
|
+
if (current.type === "url") {
|
|
73
|
+
return (0, fetch_1.fetchSpec)(current.url);
|
|
74
|
+
}
|
|
75
|
+
if (current.type === "local") {
|
|
76
|
+
if (!node_fs_1.default.existsSync(current.path)) {
|
|
77
|
+
throw new Error(`Postman collection path not found: ${current.path}`);
|
|
78
|
+
}
|
|
79
|
+
return node_fs_1.default.readFileSync(current.path, "utf8");
|
|
80
|
+
}
|
|
81
|
+
const { execCommand } = await Promise.resolve().then(() => __importStar(require("../utils/exec")));
|
|
82
|
+
const result = await execCommand(current.command);
|
|
83
|
+
if (result.exitCode !== 0) {
|
|
84
|
+
throw new Error(`Postman export failed: ${result.stderr}`);
|
|
85
|
+
}
|
|
86
|
+
if (!node_fs_1.default.existsSync(current.outputPath)) {
|
|
87
|
+
throw new Error(`Postman export did not create: ${current.outputPath}`);
|
|
88
|
+
}
|
|
89
|
+
return node_fs_1.default.readFileSync(current.outputPath, "utf8");
|
|
90
|
+
}
|
|
91
|
+
async function detectPostmanSpecDrift(config, evidenceDir) {
|
|
92
|
+
if (config.format !== "postman") {
|
|
93
|
+
return {
|
|
94
|
+
hasDrift: false,
|
|
95
|
+
summary: `Format ${config.format} is not postman`,
|
|
96
|
+
evidenceFiles: [],
|
|
97
|
+
impactedDocs: [],
|
|
98
|
+
};
|
|
99
|
+
}
|
|
100
|
+
(0, fs_1.ensureDir)(evidenceDir);
|
|
101
|
+
let currentContent;
|
|
102
|
+
try {
|
|
103
|
+
currentContent = await getCurrentContent(config);
|
|
104
|
+
}
|
|
105
|
+
catch (err) {
|
|
106
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
107
|
+
const logPath = node_path_1.default.join(evidenceDir, "postman-export.log");
|
|
108
|
+
node_fs_1.default.writeFileSync(logPath, msg, "utf8");
|
|
109
|
+
return {
|
|
110
|
+
hasDrift: true,
|
|
111
|
+
summary: `Postman current spec failed: ${msg}`,
|
|
112
|
+
evidenceFiles: [logPath],
|
|
113
|
+
impactedDocs: [config.published],
|
|
114
|
+
signal: {
|
|
115
|
+
kind: "weak_evidence",
|
|
116
|
+
tier: 2,
|
|
117
|
+
confidence: 0.35,
|
|
118
|
+
evidence: [logPath],
|
|
119
|
+
},
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
if (!node_fs_1.default.existsSync(config.published)) {
|
|
123
|
+
return {
|
|
124
|
+
hasDrift: true,
|
|
125
|
+
summary: "Postman published file missing",
|
|
126
|
+
evidenceFiles: [],
|
|
127
|
+
impactedDocs: [config.published],
|
|
128
|
+
signal: {
|
|
129
|
+
kind: "weak_evidence",
|
|
130
|
+
tier: 2,
|
|
131
|
+
confidence: 0.35,
|
|
132
|
+
evidence: [],
|
|
133
|
+
},
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
const publishedRaw = node_fs_1.default.readFileSync(config.published, "utf8");
|
|
137
|
+
let currentJson;
|
|
138
|
+
let publishedJson;
|
|
139
|
+
try {
|
|
140
|
+
currentJson = JSON.parse(currentContent);
|
|
141
|
+
publishedJson = JSON.parse(publishedRaw);
|
|
142
|
+
}
|
|
143
|
+
catch {
|
|
144
|
+
return {
|
|
145
|
+
hasDrift: true,
|
|
146
|
+
summary: "Postman collection invalid JSON",
|
|
147
|
+
evidenceFiles: [],
|
|
148
|
+
impactedDocs: [config.published],
|
|
149
|
+
signal: {
|
|
150
|
+
kind: "weak_evidence",
|
|
151
|
+
tier: 2,
|
|
152
|
+
confidence: 0.35,
|
|
153
|
+
evidence: [],
|
|
154
|
+
},
|
|
155
|
+
};
|
|
156
|
+
}
|
|
157
|
+
const currentEndpoints = extractEndpoints(currentJson);
|
|
158
|
+
const publishedEndpoints = extractEndpoints(publishedJson);
|
|
159
|
+
const added = [...currentEndpoints].filter((e) => !publishedEndpoints.has(e)).sort();
|
|
160
|
+
const removed = [...publishedEndpoints].filter((e) => !currentEndpoints.has(e)).sort();
|
|
161
|
+
if (added.length === 0 && removed.length === 0) {
|
|
162
|
+
return {
|
|
163
|
+
hasDrift: false,
|
|
164
|
+
summary: "No Postman collection drift detected",
|
|
165
|
+
evidenceFiles: [],
|
|
166
|
+
impactedDocs: [config.published],
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
const lines = [];
|
|
170
|
+
if (added.length) {
|
|
171
|
+
lines.push(`Added endpoints (${added.length}):`);
|
|
172
|
+
lines.push(...added.map((v) => `+ ${v}`));
|
|
173
|
+
}
|
|
174
|
+
if (removed.length) {
|
|
175
|
+
lines.push(`Removed endpoints (${removed.length}):`);
|
|
176
|
+
lines.push(...removed.map((v) => `- ${v}`));
|
|
177
|
+
}
|
|
178
|
+
const summary = lines.join("\n");
|
|
179
|
+
const diffPath = node_path_1.default.join(evidenceDir, "postman.diff.txt");
|
|
180
|
+
node_fs_1.default.writeFileSync(diffPath, ["# Postman Drift Summary", summary, "", "# Current endpoints", (0, json_1.stableStringify)([...currentEndpoints].sort()), "", "# Published endpoints", (0, json_1.stableStringify)([...publishedEndpoints].sort())].join("\n"), "utf8");
|
|
181
|
+
return {
|
|
182
|
+
hasDrift: true,
|
|
183
|
+
summary,
|
|
184
|
+
evidenceFiles: [diffPath],
|
|
185
|
+
impactedDocs: [config.published],
|
|
186
|
+
signal: {
|
|
187
|
+
kind: "postman_diff",
|
|
188
|
+
tier: 1,
|
|
189
|
+
confidence: 0.95,
|
|
190
|
+
evidence: [diffPath],
|
|
191
|
+
},
|
|
192
|
+
};
|
|
193
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getSpecDetector = getSpecDetector;
|
|
4
|
+
exports.getSupportedFormats = getSupportedFormats;
|
|
5
|
+
const openapi_1 = require("./openapi");
|
|
6
|
+
const swagger2_1 = require("./swagger2");
|
|
7
|
+
const graphql_1 = require("./graphql");
|
|
8
|
+
const fern_1 = require("./fern");
|
|
9
|
+
const postman_1 = require("./postman");
|
|
10
|
+
const registry = {
|
|
11
|
+
openapi3: openapi_1.detectOpenApiSpecDrift,
|
|
12
|
+
swagger2: swagger2_1.detectSwagger2SpecDrift,
|
|
13
|
+
graphql: graphql_1.detectGraphQLSpecDrift,
|
|
14
|
+
fern: fern_1.detectFernSpecDrift,
|
|
15
|
+
postman: postman_1.detectPostmanSpecDrift,
|
|
16
|
+
};
|
|
17
|
+
function getSpecDetector(format) {
|
|
18
|
+
const detector = registry[format];
|
|
19
|
+
if (!detector) {
|
|
20
|
+
throw new Error(`Unknown spec format: ${format}`);
|
|
21
|
+
}
|
|
22
|
+
return detector;
|
|
23
|
+
}
|
|
24
|
+
function getSupportedFormats() {
|
|
25
|
+
return Object.keys(registry);
|
|
26
|
+
}
|
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.detectSwagger2SpecDrift = detectSwagger2SpecDrift;
|
|
40
|
+
const node_fs_1 = __importDefault(require("node:fs"));
|
|
41
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
42
|
+
const fs_1 = require("../utils/fs");
|
|
43
|
+
const json_1 = require("../utils/json");
|
|
44
|
+
const fetch_1 = require("../utils/fetch");
|
|
45
|
+
function resolveRef(spec, ref) {
|
|
46
|
+
if (!ref || !ref.startsWith("#/"))
|
|
47
|
+
return null;
|
|
48
|
+
const parts = ref.slice(2).split("/");
|
|
49
|
+
let cur = spec;
|
|
50
|
+
for (const p of parts) {
|
|
51
|
+
cur = cur?.[p];
|
|
52
|
+
}
|
|
53
|
+
return cur;
|
|
54
|
+
}
|
|
55
|
+
function getResponseFieldsSwagger2(spec) {
|
|
56
|
+
const fields = new Set();
|
|
57
|
+
const paths = spec?.paths ?? {};
|
|
58
|
+
const definitions = spec?.definitions ?? {};
|
|
59
|
+
for (const [pathName, pathItem] of Object.entries(paths)) {
|
|
60
|
+
const item = pathItem;
|
|
61
|
+
for (const method of ["get", "post", "put", "patch", "delete"]) {
|
|
62
|
+
const op = item[method];
|
|
63
|
+
if (!op)
|
|
64
|
+
continue;
|
|
65
|
+
const res = op.responses?.["200"];
|
|
66
|
+
if (!res)
|
|
67
|
+
continue;
|
|
68
|
+
let schema = res.schema;
|
|
69
|
+
if (schema?.$ref) {
|
|
70
|
+
schema = resolveRef({ paths, definitions }, schema.$ref) ?? schema;
|
|
71
|
+
}
|
|
72
|
+
const properties = schema?.properties ?? {};
|
|
73
|
+
for (const key of Object.keys(properties)) {
|
|
74
|
+
fields.add(`${method.toUpperCase()} ${pathName}: ${key}`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
return fields;
|
|
79
|
+
}
|
|
80
|
+
function summarizeSwagger2Delta(previousSpec, currentSpec) {
|
|
81
|
+
const previous = getResponseFieldsSwagger2(previousSpec);
|
|
82
|
+
const current = getResponseFieldsSwagger2(currentSpec);
|
|
83
|
+
const added = [...current].filter((item) => !previous.has(item)).sort();
|
|
84
|
+
const removed = [...previous].filter((item) => !current.has(item)).sort();
|
|
85
|
+
const lines = [];
|
|
86
|
+
if (added.length) {
|
|
87
|
+
lines.push(`Added response fields (${added.length}):`);
|
|
88
|
+
lines.push(...added.map((value) => `+ ${value}`));
|
|
89
|
+
}
|
|
90
|
+
if (removed.length) {
|
|
91
|
+
lines.push(`Removed response fields (${removed.length}):`);
|
|
92
|
+
lines.push(...removed.map((value) => `- ${value}`));
|
|
93
|
+
}
|
|
94
|
+
if (!lines.length) {
|
|
95
|
+
return "Swagger 2 changed, but no top-level response field changes were detected in 200 responses.";
|
|
96
|
+
}
|
|
97
|
+
return lines.join("\n");
|
|
98
|
+
}
|
|
99
|
+
async function getCurrentContent(config) {
|
|
100
|
+
const current = config.current;
|
|
101
|
+
if (current.type === "url") {
|
|
102
|
+
return (0, fetch_1.fetchSpec)(current.url);
|
|
103
|
+
}
|
|
104
|
+
if (current.type === "local") {
|
|
105
|
+
if (!node_fs_1.default.existsSync(current.path)) {
|
|
106
|
+
throw new Error(`Swagger 2 local path not found: ${current.path}`);
|
|
107
|
+
}
|
|
108
|
+
return node_fs_1.default.readFileSync(current.path, "utf8");
|
|
109
|
+
}
|
|
110
|
+
// export: run command then read outputPath
|
|
111
|
+
const { execCommand } = await Promise.resolve().then(() => __importStar(require("../utils/exec")));
|
|
112
|
+
const result = await execCommand(current.command);
|
|
113
|
+
if (result.exitCode !== 0) {
|
|
114
|
+
throw new Error(`Swagger 2 export failed: ${result.stderr}`);
|
|
115
|
+
}
|
|
116
|
+
if (!node_fs_1.default.existsSync(current.outputPath)) {
|
|
117
|
+
throw new Error(`Swagger 2 export did not create: ${current.outputPath}`);
|
|
118
|
+
}
|
|
119
|
+
return node_fs_1.default.readFileSync(current.outputPath, "utf8");
|
|
120
|
+
}
|
|
121
|
+
async function detectSwagger2SpecDrift(config, evidenceDir) {
|
|
122
|
+
if (config.format !== "swagger2") {
|
|
123
|
+
return {
|
|
124
|
+
hasDrift: false,
|
|
125
|
+
summary: `Format ${config.format} is not swagger2`,
|
|
126
|
+
evidenceFiles: [],
|
|
127
|
+
impactedDocs: [],
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
(0, fs_1.ensureDir)(evidenceDir);
|
|
131
|
+
let currentContent;
|
|
132
|
+
try {
|
|
133
|
+
currentContent = await getCurrentContent(config);
|
|
134
|
+
}
|
|
135
|
+
catch (err) {
|
|
136
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
137
|
+
const logPath = node_path_1.default.join(evidenceDir, "swagger2-export.log");
|
|
138
|
+
node_fs_1.default.writeFileSync(logPath, msg, "utf8");
|
|
139
|
+
return {
|
|
140
|
+
hasDrift: true,
|
|
141
|
+
summary: `Swagger 2 current spec failed: ${msg}`,
|
|
142
|
+
evidenceFiles: [logPath],
|
|
143
|
+
impactedDocs: [config.published],
|
|
144
|
+
signal: {
|
|
145
|
+
kind: "weak_evidence",
|
|
146
|
+
tier: 2,
|
|
147
|
+
confidence: 0.35,
|
|
148
|
+
evidence: [logPath],
|
|
149
|
+
},
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
if (!node_fs_1.default.existsSync(config.published)) {
|
|
153
|
+
return {
|
|
154
|
+
hasDrift: true,
|
|
155
|
+
summary: "Swagger 2 published file missing",
|
|
156
|
+
evidenceFiles: [],
|
|
157
|
+
impactedDocs: [config.published],
|
|
158
|
+
signal: {
|
|
159
|
+
kind: "weak_evidence",
|
|
160
|
+
tier: 2,
|
|
161
|
+
confidence: 0.35,
|
|
162
|
+
evidence: [],
|
|
163
|
+
},
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
const publishedRaw = node_fs_1.default.readFileSync(config.published, "utf8");
|
|
167
|
+
let currentJson;
|
|
168
|
+
let publishedJson;
|
|
169
|
+
try {
|
|
170
|
+
currentJson = JSON.parse(currentContent);
|
|
171
|
+
publishedJson = JSON.parse(publishedRaw);
|
|
172
|
+
}
|
|
173
|
+
catch {
|
|
174
|
+
return {
|
|
175
|
+
hasDrift: true,
|
|
176
|
+
summary: "Swagger 2 invalid JSON",
|
|
177
|
+
evidenceFiles: [],
|
|
178
|
+
impactedDocs: [config.published],
|
|
179
|
+
signal: {
|
|
180
|
+
kind: "weak_evidence",
|
|
181
|
+
tier: 2,
|
|
182
|
+
confidence: 0.35,
|
|
183
|
+
evidence: [],
|
|
184
|
+
},
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
if (currentJson.swagger !== "2.0") {
|
|
188
|
+
return {
|
|
189
|
+
hasDrift: false,
|
|
190
|
+
summary: "Not a Swagger 2.0 spec",
|
|
191
|
+
evidenceFiles: [],
|
|
192
|
+
impactedDocs: [],
|
|
193
|
+
};
|
|
194
|
+
}
|
|
195
|
+
const normalizedCurrent = (0, json_1.stableStringify)(currentJson);
|
|
196
|
+
const normalizedPublished = (0, json_1.stableStringify)(publishedJson);
|
|
197
|
+
if (normalizedCurrent === normalizedPublished) {
|
|
198
|
+
return {
|
|
199
|
+
hasDrift: false,
|
|
200
|
+
summary: "No Swagger 2 drift detected",
|
|
201
|
+
evidenceFiles: [],
|
|
202
|
+
impactedDocs: [config.published],
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
const summary = summarizeSwagger2Delta(publishedJson, currentJson);
|
|
206
|
+
const diffPath = node_path_1.default.join(evidenceDir, "swagger2.diff.txt");
|
|
207
|
+
node_fs_1.default.writeFileSync(diffPath, [
|
|
208
|
+
"# Swagger 2 Drift Summary",
|
|
209
|
+
summary,
|
|
210
|
+
"",
|
|
211
|
+
"# Published (normalized)",
|
|
212
|
+
normalizedPublished,
|
|
213
|
+
"",
|
|
214
|
+
"# Current (normalized)",
|
|
215
|
+
normalizedCurrent,
|
|
216
|
+
].join("\n"), "utf8");
|
|
217
|
+
return {
|
|
218
|
+
hasDrift: true,
|
|
219
|
+
summary,
|
|
220
|
+
evidenceFiles: [diffPath],
|
|
221
|
+
impactedDocs: [config.published],
|
|
222
|
+
signal: {
|
|
223
|
+
kind: "swagger2_diff",
|
|
224
|
+
tier: 1,
|
|
225
|
+
confidence: 0.95,
|
|
226
|
+
evidence: [diffPath],
|
|
227
|
+
},
|
|
228
|
+
};
|
|
229
|
+
}
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.fetchSpec = fetchSpec;
|
|
7
|
+
exports.fetchSpecPost = fetchSpecPost;
|
|
8
|
+
const node_https_1 = __importDefault(require("node:https"));
|
|
9
|
+
const node_http_1 = __importDefault(require("node:http"));
|
|
10
|
+
const TIMEOUT_MS = 30_000;
|
|
11
|
+
function getAgent(_url) {
|
|
12
|
+
const proxy = process.env.HTTPS_PROXY || process.env.HTTP_PROXY;
|
|
13
|
+
if (proxy) {
|
|
14
|
+
try {
|
|
15
|
+
const { HttpsProxyAgent } = require("https-proxy-agent");
|
|
16
|
+
return new HttpsProxyAgent(proxy);
|
|
17
|
+
}
|
|
18
|
+
catch {
|
|
19
|
+
// Optional dependency not installed
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
return undefined;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Fetch a URL and return the response body as string.
|
|
26
|
+
* Respects HTTP_PROXY, HTTPS_PROXY; 30s timeout; follows redirects.
|
|
27
|
+
*/
|
|
28
|
+
async function fetchSpec(url) {
|
|
29
|
+
const parsed = new URL(url);
|
|
30
|
+
const isHttps = parsed.protocol === "https:";
|
|
31
|
+
const agent = getAgent(url);
|
|
32
|
+
return new Promise((resolve, reject) => {
|
|
33
|
+
const req = (isHttps ? node_https_1.default : node_http_1.default).get(url, { agent: agent ?? undefined, timeout: TIMEOUT_MS }, (res) => {
|
|
34
|
+
const redirect = res.headers.location;
|
|
35
|
+
if (redirect && [301, 302, 307, 308].includes(res.statusCode ?? 0)) {
|
|
36
|
+
req.destroy();
|
|
37
|
+
fetchSpec(new URL(redirect, url).href).then(resolve).catch(reject);
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
const chunks = [];
|
|
41
|
+
res.on("data", (chunk) => chunks.push(chunk));
|
|
42
|
+
res.on("end", () => resolve(Buffer.concat(chunks).toString("utf8")));
|
|
43
|
+
res.on("error", reject);
|
|
44
|
+
});
|
|
45
|
+
req.on("timeout", () => {
|
|
46
|
+
req.destroy();
|
|
47
|
+
reject(new Error(`Fetch timeout after ${TIMEOUT_MS}ms`));
|
|
48
|
+
});
|
|
49
|
+
req.on("error", reject);
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* POST to a URL with JSON body (e.g. GraphQL introspection).
|
|
54
|
+
*/
|
|
55
|
+
async function fetchSpecPost(url, body) {
|
|
56
|
+
const parsed = new URL(url);
|
|
57
|
+
const isHttps = parsed.protocol === "https:";
|
|
58
|
+
const agent = getAgent(url);
|
|
59
|
+
const bodyStr = JSON.stringify(body);
|
|
60
|
+
return new Promise((resolve, reject) => {
|
|
61
|
+
const options = {
|
|
62
|
+
hostname: parsed.hostname,
|
|
63
|
+
port: parsed.port || (isHttps ? 443 : 80),
|
|
64
|
+
path: parsed.pathname + parsed.search,
|
|
65
|
+
method: "POST",
|
|
66
|
+
headers: {
|
|
67
|
+
"Content-Type": "application/json",
|
|
68
|
+
"Content-Length": Buffer.byteLength(bodyStr),
|
|
69
|
+
},
|
|
70
|
+
...(agent && { agent }),
|
|
71
|
+
timeout: TIMEOUT_MS,
|
|
72
|
+
};
|
|
73
|
+
const req = (isHttps ? node_https_1.default : node_http_1.default).request(options, (res) => {
|
|
74
|
+
const chunks = [];
|
|
75
|
+
res.on("data", (chunk) => chunks.push(chunk));
|
|
76
|
+
res.on("end", () => resolve(Buffer.concat(chunks).toString("utf8")));
|
|
77
|
+
res.on("error", reject);
|
|
78
|
+
});
|
|
79
|
+
req.on("timeout", () => {
|
|
80
|
+
req.destroy();
|
|
81
|
+
reject(new Error(`Fetch timeout after ${TIMEOUT_MS}ms`));
|
|
82
|
+
});
|
|
83
|
+
req.on("error", reject);
|
|
84
|
+
req.write(bodyStr);
|
|
85
|
+
req.end();
|
|
86
|
+
});
|
|
87
|
+
}
|
package/dist/src/utils/git.js
CHANGED
|
@@ -1,9 +1,29 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.resolveDefaultBaseHead = resolveDefaultBaseHead;
|
|
3
4
|
exports.gitChangedPaths = gitChangedPaths;
|
|
4
5
|
exports.gitDiffSummary = gitDiffSummary;
|
|
5
6
|
exports.gitCommitList = gitCommitList;
|
|
6
7
|
const exec_1 = require("./exec");
|
|
8
|
+
/** Resolve default base/head when not provided. Uses GITHUB_* in CI, else merge-base(main, headRef)..headRef. */
|
|
9
|
+
async function resolveDefaultBaseHead(headRef = process.env.GITHUB_SHA ?? "HEAD") {
|
|
10
|
+
const headSha = headRef;
|
|
11
|
+
const baseSha = process.env.GITHUB_BASE_SHA;
|
|
12
|
+
if (baseSha) {
|
|
13
|
+
return { baseSha, headSha };
|
|
14
|
+
}
|
|
15
|
+
for (const branch of ["origin/main", "origin/master", "main", "master"]) {
|
|
16
|
+
const res = await (0, exec_1.execCommand)(`git merge-base ${branch} ${headRef}`);
|
|
17
|
+
if (res.exitCode === 0 && res.stdout.trim()) {
|
|
18
|
+
return { baseSha: res.stdout.trim(), headSha };
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
const fallback = await (0, exec_1.execCommand)(`git rev-parse ${headRef}^`);
|
|
22
|
+
if (fallback.exitCode === 0 && fallback.stdout.trim()) {
|
|
23
|
+
return { baseSha: fallback.stdout.trim(), headSha };
|
|
24
|
+
}
|
|
25
|
+
return { baseSha: headSha, headSha };
|
|
26
|
+
}
|
|
7
27
|
async function gitChangedPaths(baseSha, headSha) {
|
|
8
28
|
const res = await (0, exec_1.execCommand)(`git diff --name-only ${baseSha} ${headSha}`);
|
|
9
29
|
if (res.exitCode !== 0) {
|