@doccov/api 0.5.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +6 -0
- package/api/index.ts +105 -26
- package/migrations/005_coverage_sdk_field_names.ts +41 -0
- package/package.json +2 -2
- package/src/index.ts +6 -0
- package/src/routes/coverage.ts +19 -32
- package/src/routes/demo.ts +323 -14
- package/src/routes/github-app.ts +6 -6
- package/src/routes/spec-v1.ts +165 -0
- package/src/routes/spec.ts +186 -0
- package/src/utils/github-checks.ts +231 -11
- package/src/utils/remote-analyzer.ts +12 -12
- package/src/utils/spec-cache.ts +131 -0
- package/src/utils/spec-diff-core.ts +406 -0
|
@@ -0,0 +1,406 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core spec diff logic shared between routes.
|
|
3
|
+
* Provides full CLI-parity diff with breaking changes, member changes, etc.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { rm } from 'node:fs/promises';
|
|
7
|
+
import { tmpdir } from 'node:os';
|
|
8
|
+
import { join } from 'node:path';
|
|
9
|
+
import {
|
|
10
|
+
DocCov,
|
|
11
|
+
diffSpecWithDocs,
|
|
12
|
+
enrichSpec,
|
|
13
|
+
type MarkdownDocFile,
|
|
14
|
+
parseMarkdownFiles,
|
|
15
|
+
type SpecDiffWithDocs,
|
|
16
|
+
} from '@doccov/sdk';
|
|
17
|
+
import type { OpenPkg } from '@openpkg-ts/spec';
|
|
18
|
+
import { getTokenByInstallationId } from './github-app';
|
|
19
|
+
import { getCachedDiff, getCachedSpec, setCachedDiff, setCachedSpec } from './spec-cache';
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Source for fetching a spec
|
|
23
|
+
*/
|
|
24
|
+
export interface SpecSource {
|
|
25
|
+
owner: string;
|
|
26
|
+
repo: string;
|
|
27
|
+
ref: string;
|
|
28
|
+
installationId: string;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Options for diff computation
|
|
33
|
+
*/
|
|
34
|
+
export interface DiffOptions {
|
|
35
|
+
/** Include docs impact analysis */
|
|
36
|
+
includeDocsImpact?: boolean;
|
|
37
|
+
/** Markdown file patterns to fetch */
|
|
38
|
+
markdownPatterns?: string[];
|
|
39
|
+
/** Direct markdown files (for upload mode) */
|
|
40
|
+
markdownFiles?: MarkdownDocFile[];
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Result from diff computation
|
|
45
|
+
*/
|
|
46
|
+
export interface DiffResult {
|
|
47
|
+
diff: SpecDiffWithDocs;
|
|
48
|
+
base: { ref: string; sha: string };
|
|
49
|
+
head: { ref: string; sha: string };
|
|
50
|
+
generatedAt: string;
|
|
51
|
+
cached: boolean;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Shallow clone a GitHub repo to a temp directory.
|
|
56
|
+
*/
|
|
57
|
+
async function cloneRepo(
|
|
58
|
+
owner: string,
|
|
59
|
+
repo: string,
|
|
60
|
+
ref: string,
|
|
61
|
+
authToken: string,
|
|
62
|
+
): Promise<string> {
|
|
63
|
+
const tmpDir = join(tmpdir(), `doccov-diff-${owner}-${repo}-${Date.now()}`);
|
|
64
|
+
|
|
65
|
+
const cloneUrl = `https://x-access-token:${authToken}@github.com/${owner}/${repo}.git`;
|
|
66
|
+
|
|
67
|
+
const proc = Bun.spawn(['git', 'clone', '--depth', '1', '--branch', ref, cloneUrl, tmpDir], {
|
|
68
|
+
stdout: 'pipe',
|
|
69
|
+
stderr: 'pipe',
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
const exitCode = await proc.exited;
|
|
73
|
+
|
|
74
|
+
if (exitCode !== 0) {
|
|
75
|
+
const stderr = await new Response(proc.stderr).text();
|
|
76
|
+
|
|
77
|
+
// Try fetching specific SHA if branch clone fails
|
|
78
|
+
if (ref.length === 40) {
|
|
79
|
+
const shallowProc = Bun.spawn(['git', 'clone', '--depth', '1', cloneUrl, tmpDir], {
|
|
80
|
+
stdout: 'pipe',
|
|
81
|
+
stderr: 'pipe',
|
|
82
|
+
});
|
|
83
|
+
await shallowProc.exited;
|
|
84
|
+
|
|
85
|
+
const fetchProc = Bun.spawn(['git', '-C', tmpDir, 'fetch', 'origin', ref], {
|
|
86
|
+
stdout: 'pipe',
|
|
87
|
+
stderr: 'pipe',
|
|
88
|
+
});
|
|
89
|
+
await fetchProc.exited;
|
|
90
|
+
|
|
91
|
+
const checkoutProc = Bun.spawn(['git', '-C', tmpDir, 'checkout', ref], {
|
|
92
|
+
stdout: 'pipe',
|
|
93
|
+
stderr: 'pipe',
|
|
94
|
+
});
|
|
95
|
+
const checkoutExit = await checkoutProc.exited;
|
|
96
|
+
|
|
97
|
+
if (checkoutExit !== 0) {
|
|
98
|
+
throw new Error(`Failed to checkout ${ref}: ${stderr}`);
|
|
99
|
+
}
|
|
100
|
+
} else {
|
|
101
|
+
throw new Error(`Failed to clone ${owner}/${repo}@${ref}: ${stderr}`);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return tmpDir;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Detect the entry point for a package.
|
|
110
|
+
*/
|
|
111
|
+
async function detectEntryPoint(repoDir: string): Promise<string | null> {
|
|
112
|
+
try {
|
|
113
|
+
const packageJsonPath = join(repoDir, 'package.json');
|
|
114
|
+
const packageJson = await Bun.file(packageJsonPath).json();
|
|
115
|
+
|
|
116
|
+
// Check exports first (modern packages)
|
|
117
|
+
if (packageJson.exports) {
|
|
118
|
+
const mainExport = packageJson.exports['.'];
|
|
119
|
+
if (typeof mainExport === 'string') {
|
|
120
|
+
return mainExport.replace(/^\.\//, '');
|
|
121
|
+
}
|
|
122
|
+
if (mainExport?.import) {
|
|
123
|
+
const importPath =
|
|
124
|
+
typeof mainExport.import === 'string' ? mainExport.import : mainExport.import.default;
|
|
125
|
+
if (importPath) return importPath.replace(/^\.\//, '');
|
|
126
|
+
}
|
|
127
|
+
if (mainExport?.types) {
|
|
128
|
+
return mainExport.types.replace(/^\.\//, '');
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Check types field
|
|
133
|
+
if (packageJson.types) {
|
|
134
|
+
return packageJson.types.replace(/^\.\//, '');
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
// Check main field
|
|
138
|
+
if (packageJson.main) {
|
|
139
|
+
const main = packageJson.main.replace(/^\.\//, '');
|
|
140
|
+
const tsMain = main.replace(/\.js$/, '.ts');
|
|
141
|
+
const tsxMain = main.replace(/\.js$/, '.tsx');
|
|
142
|
+
|
|
143
|
+
const tsFile = Bun.file(join(repoDir, tsMain));
|
|
144
|
+
if (await tsFile.exists()) return tsMain;
|
|
145
|
+
|
|
146
|
+
const tsxFile = Bun.file(join(repoDir, tsxMain));
|
|
147
|
+
if (await tsxFile.exists()) return tsxMain;
|
|
148
|
+
|
|
149
|
+
return main;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Common fallbacks
|
|
153
|
+
const fallbacks = ['src/index.ts', 'src/index.tsx', 'index.ts', 'lib/index.ts'];
|
|
154
|
+
for (const fallback of fallbacks) {
|
|
155
|
+
const file = Bun.file(join(repoDir, fallback));
|
|
156
|
+
if (await file.exists()) return fallback;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
return null;
|
|
160
|
+
} catch {
|
|
161
|
+
return null;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
/**
|
|
166
|
+
* Resolve a git ref to a SHA
|
|
167
|
+
*/
|
|
168
|
+
async function resolveRefToSha(
|
|
169
|
+
owner: string,
|
|
170
|
+
repo: string,
|
|
171
|
+
ref: string,
|
|
172
|
+
authToken: string,
|
|
173
|
+
): Promise<string> {
|
|
174
|
+
// If already looks like a SHA, return as-is
|
|
175
|
+
if (/^[a-f0-9]{40}$/i.test(ref)) {
|
|
176
|
+
return ref;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// Resolve via GitHub API
|
|
180
|
+
const url = `https://api.github.com/repos/${owner}/${repo}/commits/${ref}`;
|
|
181
|
+
const res = await fetch(url, {
|
|
182
|
+
headers: {
|
|
183
|
+
Authorization: `Bearer ${authToken}`,
|
|
184
|
+
Accept: 'application/vnd.github.v3+json',
|
|
185
|
+
'User-Agent': 'DocCov',
|
|
186
|
+
},
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
if (!res.ok) {
|
|
190
|
+
throw new Error(`Failed to resolve ref ${ref}: ${res.status}`);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
const data = (await res.json()) as { sha: string };
|
|
194
|
+
return data.sha;
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
/**
|
|
198
|
+
* Try to fetch existing openpkg.json from repo
|
|
199
|
+
*/
|
|
200
|
+
async function fetchExistingSpec(
|
|
201
|
+
owner: string,
|
|
202
|
+
repo: string,
|
|
203
|
+
ref: string,
|
|
204
|
+
authToken: string,
|
|
205
|
+
): Promise<OpenPkg | null> {
|
|
206
|
+
const url = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/openpkg.json`;
|
|
207
|
+
|
|
208
|
+
try {
|
|
209
|
+
const res = await fetch(url, {
|
|
210
|
+
headers: {
|
|
211
|
+
Authorization: `Bearer ${authToken}`,
|
|
212
|
+
'User-Agent': 'DocCov',
|
|
213
|
+
},
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
if (res.ok) {
|
|
217
|
+
return (await res.json()) as OpenPkg;
|
|
218
|
+
}
|
|
219
|
+
} catch {
|
|
220
|
+
// Not found or error - will generate
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
return null;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
/**
|
|
227
|
+
* Generate spec by cloning repo and running DocCov
|
|
228
|
+
*/
|
|
229
|
+
async function generateSpec(
|
|
230
|
+
owner: string,
|
|
231
|
+
repo: string,
|
|
232
|
+
ref: string,
|
|
233
|
+
authToken: string,
|
|
234
|
+
): Promise<OpenPkg> {
|
|
235
|
+
let tmpDir: string | null = null;
|
|
236
|
+
|
|
237
|
+
try {
|
|
238
|
+
tmpDir = await cloneRepo(owner, repo, ref, authToken);
|
|
239
|
+
|
|
240
|
+
const entryPoint = await detectEntryPoint(tmpDir);
|
|
241
|
+
if (!entryPoint) {
|
|
242
|
+
throw new Error(`No entry point found for ${owner}/${repo}`);
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
const entryPath = join(tmpDir, entryPoint);
|
|
246
|
+
|
|
247
|
+
const doccov = new DocCov({
|
|
248
|
+
resolveExternalTypes: false,
|
|
249
|
+
useCache: false,
|
|
250
|
+
});
|
|
251
|
+
|
|
252
|
+
const result = await doccov.analyzeFileWithDiagnostics(entryPath);
|
|
253
|
+
return result.spec;
|
|
254
|
+
} finally {
|
|
255
|
+
if (tmpDir) {
|
|
256
|
+
try {
|
|
257
|
+
await rm(tmpDir, { recursive: true, force: true });
|
|
258
|
+
} catch {
|
|
259
|
+
// Ignore cleanup errors
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
/**
|
|
266
|
+
* Get spec for a ref (checks cache -> existing openpkg.json -> generates)
|
|
267
|
+
*/
|
|
268
|
+
export async function getSpecForRef(source: SpecSource): Promise<{ spec: OpenPkg; sha: string }> {
|
|
269
|
+
const { owner, repo, ref, installationId } = source;
|
|
270
|
+
|
|
271
|
+
const token = await getTokenByInstallationId(installationId);
|
|
272
|
+
if (!token) {
|
|
273
|
+
throw new Error(`No token for installation ${installationId}`);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// Resolve ref to SHA for caching
|
|
277
|
+
const sha = await resolveRefToSha(owner, repo, ref, token);
|
|
278
|
+
|
|
279
|
+
// Check cache first
|
|
280
|
+
const cached = getCachedSpec(owner, repo, sha);
|
|
281
|
+
if (cached) {
|
|
282
|
+
return { spec: cached, sha };
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
// Try existing openpkg.json
|
|
286
|
+
let spec = await fetchExistingSpec(owner, repo, sha, token);
|
|
287
|
+
|
|
288
|
+
// Generate if not found
|
|
289
|
+
if (!spec) {
|
|
290
|
+
spec = await generateSpec(owner, repo, sha, token);
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Enrich and cache
|
|
294
|
+
const enriched = enrichSpec(spec) as OpenPkg;
|
|
295
|
+
setCachedSpec(owner, repo, sha, enriched);
|
|
296
|
+
|
|
297
|
+
return { spec: enriched, sha };
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
/**
|
|
301
|
+
* Compute full diff between two refs with CLI parity
|
|
302
|
+
*/
|
|
303
|
+
export async function computeFullDiff(
|
|
304
|
+
base: SpecSource,
|
|
305
|
+
head: SpecSource,
|
|
306
|
+
options: DiffOptions = {},
|
|
307
|
+
): Promise<DiffResult> {
|
|
308
|
+
// Get both specs in parallel
|
|
309
|
+
const [baseResult, headResult] = await Promise.all([getSpecForRef(base), getSpecForRef(head)]);
|
|
310
|
+
|
|
311
|
+
// Check diff cache
|
|
312
|
+
const cachedDiff = getCachedDiff(baseResult.sha, headResult.sha);
|
|
313
|
+
if (cachedDiff) {
|
|
314
|
+
return {
|
|
315
|
+
diff: cachedDiff,
|
|
316
|
+
base: { ref: base.ref, sha: baseResult.sha },
|
|
317
|
+
head: { ref: head.ref, sha: headResult.sha },
|
|
318
|
+
generatedAt: new Date().toISOString(),
|
|
319
|
+
cached: true,
|
|
320
|
+
};
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
// Compute diff
|
|
324
|
+
const diff = diffSpecWithDocs(baseResult.spec, headResult.spec, {
|
|
325
|
+
markdownFiles: options.markdownFiles,
|
|
326
|
+
});
|
|
327
|
+
|
|
328
|
+
// Cache result
|
|
329
|
+
setCachedDiff(baseResult.sha, headResult.sha, diff);
|
|
330
|
+
|
|
331
|
+
return {
|
|
332
|
+
diff,
|
|
333
|
+
base: { ref: base.ref, sha: baseResult.sha },
|
|
334
|
+
head: { ref: head.ref, sha: headResult.sha },
|
|
335
|
+
generatedAt: new Date().toISOString(),
|
|
336
|
+
cached: false,
|
|
337
|
+
};
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
/**
|
|
341
|
+
* Direct diff from uploaded specs (no GitHub access needed)
|
|
342
|
+
*/
|
|
343
|
+
export function diffSpecs(
|
|
344
|
+
baseSpec: OpenPkg,
|
|
345
|
+
headSpec: OpenPkg,
|
|
346
|
+
markdownFiles?: Array<{ path: string; content: string }>,
|
|
347
|
+
): SpecDiffWithDocs {
|
|
348
|
+
// Parse markdown files if provided
|
|
349
|
+
const parsedMarkdown = markdownFiles ? parseMarkdownFiles(markdownFiles) : undefined;
|
|
350
|
+
|
|
351
|
+
return diffSpecWithDocs(baseSpec, headSpec, {
|
|
352
|
+
markdownFiles: parsedMarkdown,
|
|
353
|
+
});
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
/**
|
|
357
|
+
* Format diff result for API response
|
|
358
|
+
*/
|
|
359
|
+
export function formatDiffResponse(result: DiffResult): {
|
|
360
|
+
breaking: string[];
|
|
361
|
+
nonBreaking: string[];
|
|
362
|
+
docsOnly: string[];
|
|
363
|
+
coverageDelta: number;
|
|
364
|
+
oldCoverage: number;
|
|
365
|
+
newCoverage: number;
|
|
366
|
+
driftIntroduced: number;
|
|
367
|
+
driftResolved: number;
|
|
368
|
+
newUndocumented: string[];
|
|
369
|
+
improvedExports: string[];
|
|
370
|
+
regressedExports: string[];
|
|
371
|
+
memberChanges?: unknown[];
|
|
372
|
+
categorizedBreaking?: unknown[];
|
|
373
|
+
docsImpact?: unknown;
|
|
374
|
+
base: { ref: string; sha: string };
|
|
375
|
+
head: { ref: string; sha: string };
|
|
376
|
+
generatedAt: string;
|
|
377
|
+
cached: boolean;
|
|
378
|
+
} {
|
|
379
|
+
const { diff } = result;
|
|
380
|
+
|
|
381
|
+
return {
|
|
382
|
+
// Core diff fields
|
|
383
|
+
breaking: diff.breaking,
|
|
384
|
+
nonBreaking: diff.nonBreaking,
|
|
385
|
+
docsOnly: diff.docsOnly,
|
|
386
|
+
coverageDelta: diff.coverageDelta,
|
|
387
|
+
oldCoverage: diff.oldCoverage,
|
|
388
|
+
newCoverage: diff.newCoverage,
|
|
389
|
+
driftIntroduced: diff.driftIntroduced,
|
|
390
|
+
driftResolved: diff.driftResolved,
|
|
391
|
+
newUndocumented: diff.newUndocumented,
|
|
392
|
+
improvedExports: diff.improvedExports,
|
|
393
|
+
regressedExports: diff.regressedExports,
|
|
394
|
+
|
|
395
|
+
// Extended fields
|
|
396
|
+
memberChanges: diff.memberChanges,
|
|
397
|
+
categorizedBreaking: diff.categorizedBreaking,
|
|
398
|
+
docsImpact: diff.docsImpact,
|
|
399
|
+
|
|
400
|
+
// Metadata
|
|
401
|
+
base: result.base,
|
|
402
|
+
head: result.head,
|
|
403
|
+
generatedAt: result.generatedAt,
|
|
404
|
+
cached: result.cached,
|
|
405
|
+
};
|
|
406
|
+
}
|