@doccov/api 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,6 +3,7 @@
3
3
  * Uses the /plan and /execute-stream endpoints from the Vercel API
4
4
  */
5
5
 
6
+ import { fetchGitHubContext, parseScanGitHubUrl } from '@doccov/sdk';
6
7
  import { Hono } from 'hono';
7
8
  import { streamSSE } from 'hono/streaming';
8
9
  import { anonymousRateLimit } from '../middleware/anonymous-rate-limit';
@@ -86,20 +87,85 @@ async function fetchNpmPackage(packageName: string): Promise<NpmPackageInfo> {
86
87
  }
87
88
 
88
89
  /**
89
- * Analysis result summary (matches SpecSummary from Vercel API)
90
+ * Analysis result summary (matches SDK CoverageSnapshot naming)
90
91
  */
91
92
  interface AnalysisSummary {
92
93
  packageName: string;
93
94
  version: string;
94
- coverage: number;
95
- exportCount: number;
96
- documentedCount: number;
97
- undocumentedCount: number;
95
+ coverageScore: number;
96
+ totalExports: number;
97
+ documentedExports: number;
98
98
  driftCount: number;
99
99
  topUndocumented: string[];
100
100
  topDrift: Array<{ name: string; issue: string }>;
101
101
  }
102
102
 
103
+ /**
104
+ * Workspace package info for monorepo detection
105
+ */
106
+ interface WorkspacePackageInfo {
107
+ name: string;
108
+ path: string;
109
+ private: boolean;
110
+ }
111
+
112
+ /**
113
+ * Resolve workspace patterns to actual package names via GitHub API.
114
+ * Fetches package.json from each directory to get real package names.
115
+ */
116
+ async function resolveGitHubPackages(
117
+ owner: string,
118
+ repo: string,
119
+ ref: string,
120
+ patterns: string[],
121
+ ): Promise<WorkspacePackageInfo[]> {
122
+ const packages: WorkspacePackageInfo[] = [];
123
+ const seen = new Set<string>();
124
+
125
+ for (const pattern of patterns) {
126
+ // Extract base directory from pattern: "packages/*" -> "packages"
127
+ const baseDir = pattern.replace(/\/?\*\*?$/, '');
128
+ if (!baseDir || baseDir.includes('*')) continue;
129
+
130
+ // List directories via GitHub API
131
+ const contentsUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${baseDir}?ref=${ref}`;
132
+ const contentsRes = await fetch(contentsUrl, {
133
+ headers: { 'User-Agent': 'DocCov', Accept: 'application/vnd.github.v3+json' },
134
+ });
135
+
136
+ if (!contentsRes.ok) continue;
137
+
138
+ const contents = (await contentsRes.json()) as Array<{ name: string; type: string }>;
139
+
140
+ // Fetch package.json from each subdirectory
141
+ for (const item of contents) {
142
+ if (item.type !== 'dir') continue;
143
+
144
+ const pkgPath = `${baseDir}/${item.name}`;
145
+ const pkgJsonUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${pkgPath}/package.json`;
146
+
147
+ try {
148
+ const pkgRes = await fetch(pkgJsonUrl);
149
+ if (!pkgRes.ok) continue;
150
+
151
+ const pkg = (await pkgRes.json()) as { name?: string; private?: boolean };
152
+ if (pkg.name && !seen.has(pkg.name)) {
153
+ seen.add(pkg.name);
154
+ packages.push({
155
+ name: pkg.name,
156
+ path: pkgPath,
157
+ private: pkg.private ?? false,
158
+ });
159
+ }
160
+ } catch {
161
+ // Skip invalid package.json
162
+ }
163
+ }
164
+ }
165
+
166
+ return packages.sort((a, b) => a.name.localeCompare(b.name));
167
+ }
168
+
103
169
  // GET /demo/analyze?package=lodash
104
170
  demoRoute.get('/analyze', async (c) => {
105
171
  const packageName = c.req.query('package');
@@ -228,6 +294,250 @@ demoRoute.get('/analyze', async (c) => {
228
294
  exports: number;
229
295
  documented: number;
230
296
  undocumented: number;
297
+ driftCount: number;
298
+ topUndocumented: string[];
299
+ topDrift: Array<{ name: string; issue: string }>;
300
+ };
301
+ error?: string;
302
+ };
303
+
304
+ // Forward progress events
305
+ if (eventType === 'progress') {
306
+ await sendEvent('log', { message: eventData.message || eventData.stage });
307
+ } else if (eventType === 'step:start') {
308
+ await sendEvent('status', {
309
+ step: eventData.stepId === 'analyze' ? 'analyze' : 'build',
310
+ message: eventData.name || `Running ${eventData.stepId}...`,
311
+ });
312
+ } else if (eventType === 'step:complete' && eventData.stepId) {
313
+ await sendEvent('log', {
314
+ message: `${eventData.stepId} completed`,
315
+ });
316
+ } else if (eventType === 'complete' && eventData.summary) {
317
+ // Transform summary to our format (SDK-aligned field names)
318
+ const summary: AnalysisSummary = {
319
+ packageName: eventData.summary.name,
320
+ version: eventData.summary.version,
321
+ coverageScore: eventData.summary.coverage,
322
+ totalExports: eventData.summary.exports,
323
+ documentedExports: eventData.summary.documented,
324
+ driftCount: eventData.summary.driftCount ?? 0,
325
+ topUndocumented: eventData.summary.topUndocumented ?? [],
326
+ topDrift: eventData.summary.topDrift ?? [],
327
+ };
328
+
329
+ await sendEvent('log', {
330
+ message: `Found ${summary.totalExports} exports, ${summary.documentedExports} documented`,
331
+ });
332
+
333
+ await sendEvent('status', {
334
+ step: 'complete',
335
+ message: 'Analysis complete!',
336
+ });
337
+
338
+ await sendEvent('result', { data: summary });
339
+ return;
340
+ } else if (eventType === 'error') {
341
+ throw new Error(eventData.error || 'Execution failed');
342
+ }
343
+ } catch (parseError) {
344
+ // Ignore JSON parse errors for incomplete data
345
+ if (parseError instanceof SyntaxError) continue;
346
+ throw parseError;
347
+ }
348
+ }
349
+ }
350
+ }
351
+ }
352
+
353
+ // If we get here without a complete event, something went wrong
354
+ throw new Error('Execution completed without results');
355
+ } catch (err) {
356
+ const message = err instanceof Error ? err.message : 'Analysis failed';
357
+ await sendEvent('error', { message });
358
+ }
359
+ });
360
+ });
361
+
362
+ // POST /demo/detect - detect monorepo packages from GitHub URL
363
+ demoRoute.post('/detect', async (c) => {
364
+ const body = (await c.req.json()) as { url?: string };
365
+
366
+ if (!body.url) {
367
+ return c.json({ error: 'GitHub URL required' }, 400);
368
+ }
369
+
370
+ // Validate and parse GitHub URL
371
+ const parsed = parseScanGitHubUrl(body.url);
372
+ if (!parsed) {
373
+ return c.json({ error: 'Invalid GitHub URL' }, 400);
374
+ }
375
+
376
+ try {
377
+ // Fetch context from GitHub
378
+ const context = await fetchGitHubContext(body.url);
379
+
380
+ // If not a monorepo, return simple response
381
+ if (!context.workspace.isMonorepo) {
382
+ return c.json({
383
+ isMonorepo: false,
384
+ packageManager: context.packageManager,
385
+ owner: context.metadata.owner,
386
+ repo: context.metadata.repo,
387
+ ref: context.ref,
388
+ packages: [],
389
+ });
390
+ }
391
+
392
+ // Resolve actual package names from workspace patterns
393
+ const patterns = context.workspace.packages || ['packages/*'];
394
+ const packages = await resolveGitHubPackages(
395
+ context.metadata.owner,
396
+ context.metadata.repo,
397
+ context.ref,
398
+ patterns,
399
+ );
400
+
401
+ return c.json({
402
+ isMonorepo: true,
403
+ packageManager: context.packageManager,
404
+ owner: context.metadata.owner,
405
+ repo: context.metadata.repo,
406
+ ref: context.ref,
407
+ packages,
408
+ });
409
+ } catch (err) {
410
+ const message = err instanceof Error ? err.message : 'Detection failed';
411
+ return c.json({ error: message }, 500);
412
+ }
413
+ });
414
+
415
+ // GET /demo/analyze-repo?url=...&package=... - analyze GitHub repo directly
416
+ demoRoute.get('/analyze-repo', async (c) => {
417
+ const repoUrl = c.req.query('url');
418
+ const packageName = c.req.query('package');
419
+
420
+ if (!repoUrl) {
421
+ return c.json({ error: 'GitHub URL required' }, 400);
422
+ }
423
+
424
+ // Validate GitHub URL
425
+ const parsed = parseScanGitHubUrl(repoUrl);
426
+ if (!parsed) {
427
+ return c.json({ error: 'Invalid GitHub URL' }, 400);
428
+ }
429
+
430
+ return streamSSE(c, async (stream) => {
431
+ const sendEvent = async (
432
+ type: 'status' | 'log' | 'result' | 'error',
433
+ data: { step?: string; message?: string; data?: unknown },
434
+ ) => {
435
+ await stream.writeSSE({
436
+ data: JSON.stringify({ type, ...data }),
437
+ event: type === 'error' ? 'error' : type === 'result' ? 'complete' : 'progress',
438
+ });
439
+ };
440
+
441
+ try {
442
+ // Step 1: Log repo info
443
+ await sendEvent('status', {
444
+ step: 'repo',
445
+ message: `Analyzing ${parsed.owner}/${parsed.repo}...`,
446
+ });
447
+
448
+ await sendEvent('log', {
449
+ message: `Repository: ${repoUrl}`,
450
+ });
451
+
452
+ if (packageName) {
453
+ await sendEvent('log', {
454
+ message: `Package: ${packageName}`,
455
+ });
456
+ }
457
+
458
+ // Step 2: Generate build plan via /plan endpoint
459
+ await sendEvent('status', {
460
+ step: 'plan',
461
+ message: 'Generating build plan...',
462
+ });
463
+
464
+ const planResponse = await fetch(`${VERCEL_API_URL}/plan`, {
465
+ method: 'POST',
466
+ headers: { 'Content-Type': 'application/json' },
467
+ body: JSON.stringify({
468
+ url: repoUrl,
469
+ package: packageName,
470
+ }),
471
+ });
472
+
473
+ if (!planResponse.ok) {
474
+ const errorData = (await planResponse.json()) as { error?: string };
475
+ throw new Error(errorData.error || `Plan generation failed: ${planResponse.status}`);
476
+ }
477
+
478
+ const planData = (await planResponse.json()) as {
479
+ plan: unknown;
480
+ context: { isMonorepo: boolean; packageManager: string };
481
+ };
482
+
483
+ await sendEvent('log', {
484
+ message: `Build plan ready (${planData.context.packageManager}${planData.context.isMonorepo ? ', monorepo' : ''})`,
485
+ });
486
+
487
+ // Step 3: Execute build plan via /execute-stream endpoint
488
+ await sendEvent('status', {
489
+ step: 'build',
490
+ message: 'Building and analyzing...',
491
+ });
492
+
493
+ const executeResponse = await fetch(`${VERCEL_API_URL}/execute-stream`, {
494
+ method: 'POST',
495
+ headers: { 'Content-Type': 'application/json' },
496
+ body: JSON.stringify({ plan: planData.plan }),
497
+ });
498
+
499
+ if (!executeResponse.ok || !executeResponse.body) {
500
+ throw new Error(`Execution failed: ${executeResponse.status}`);
501
+ }
502
+
503
+ // Stream the execute-stream SSE events and forward relevant ones
504
+ const reader = executeResponse.body.getReader();
505
+ const decoder = new TextDecoder();
506
+ let buffer = '';
507
+
508
+ while (true) {
509
+ const { done, value } = await reader.read();
510
+ if (done) break;
511
+
512
+ buffer += decoder.decode(value, { stream: true });
513
+ const lines = buffer.split('\n');
514
+ buffer = lines.pop() || '';
515
+
516
+ for (const line of lines) {
517
+ if (line.startsWith('event:')) {
518
+ const eventType = line.slice(7).trim();
519
+
520
+ // Get the next data line
521
+ const dataLineIndex = lines.indexOf(line) + 1;
522
+ if (dataLineIndex < lines.length && lines[dataLineIndex].startsWith('data:')) {
523
+ const dataStr = lines[dataLineIndex].slice(5).trim();
524
+ try {
525
+ const eventData = JSON.parse(dataStr) as {
526
+ stage?: string;
527
+ message?: string;
528
+ stepId?: string;
529
+ name?: string;
530
+ success?: boolean;
531
+ summary?: {
532
+ name: string;
533
+ version: string;
534
+ coverage: number;
535
+ exports: number;
536
+ documented: number;
537
+ undocumented: number;
538
+ driftCount: number;
539
+ topUndocumented: string[];
540
+ topDrift: Array<{ name: string; issue: string }>;
231
541
  };
232
542
  error?: string;
233
543
  };
@@ -245,21 +555,20 @@ demoRoute.get('/analyze', async (c) => {
245
555
  message: `${eventData.stepId} completed`,
246
556
  });
247
557
  } else if (eventType === 'complete' && eventData.summary) {
248
- // Transform summary to our format
558
+ // Transform summary to our format (SDK-aligned field names)
249
559
  const summary: AnalysisSummary = {
250
560
  packageName: eventData.summary.name,
251
561
  version: eventData.summary.version,
252
- coverage: eventData.summary.coverage,
253
- exportCount: eventData.summary.exports,
254
- documentedCount: eventData.summary.documented,
255
- undocumentedCount: eventData.summary.undocumented,
256
- driftCount: 0,
257
- topUndocumented: [],
258
- topDrift: [],
562
+ coverageScore: eventData.summary.coverage,
563
+ totalExports: eventData.summary.exports,
564
+ documentedExports: eventData.summary.documented,
565
+ driftCount: eventData.summary.driftCount ?? 0,
566
+ topUndocumented: eventData.summary.topUndocumented ?? [],
567
+ topDrift: eventData.summary.topDrift ?? [],
259
568
  };
260
569
 
261
570
  await sendEvent('log', {
262
- message: `Found ${summary.exportCount} exports, ${summary.documentedCount} documented`,
571
+ message: `Found ${summary.totalExports} exports, ${summary.documentedExports} documented`,
263
572
  });
264
573
 
265
574
  await sendEvent('status', {
@@ -280,7 +280,7 @@ async function handlePushEvent(payload: {
280
280
  const result = await analyzeRemoteRepo(installationId, owner.login, repo, sha);
281
281
 
282
282
  if (result) {
283
- console.log(`[webhook] Analysis complete: ${result.coveragePercent}% coverage`);
283
+ console.log(`[webhook] Analysis complete: ${result.coverageScore}% coverage`);
284
284
 
285
285
  // Create check run with analysis results
286
286
  await createCheckRun(installationId, owner.login, repo, sha, result);
@@ -289,7 +289,7 @@ async function handlePushEvent(payload: {
289
289
  await db
290
290
  .updateTable('projects')
291
291
  .set({
292
- coverageScore: result.coveragePercent,
292
+ coverageScore: result.coverageScore,
293
293
  driftCount: result.driftCount,
294
294
  updatedAt: new Date(),
295
295
  })
@@ -333,7 +333,7 @@ async function handlePullRequestEvent(payload: {
333
333
  return;
334
334
  }
335
335
 
336
- console.log(`[webhook] PR analysis complete: ${headResult.coveragePercent}% coverage`);
336
+ console.log(`[webhook] PR analysis complete: ${headResult.coverageScore}% coverage`);
337
337
 
338
338
  // Try to get baseline from database or analyze base
339
339
  let diff: ReturnType<typeof computeAnalysisDiff> | null = null;
@@ -349,9 +349,9 @@ async function handlePullRequestEvent(payload: {
349
349
  // Use cached baseline for speed
350
350
  diff = computeAnalysisDiff(
351
351
  {
352
- coveragePercent: project.coverageScore,
353
- documentedCount: 0,
354
- totalCount: 0,
352
+ coverageScore: project.coverageScore,
353
+ documentedExports: 0,
354
+ totalExports: 0,
355
355
  driftCount: project.driftCount ?? 0,
356
356
  qualityErrors: 0,
357
357
  qualityWarnings: 0,
@@ -0,0 +1,165 @@
1
+ /**
2
+ * Spec routes (v1) - API key authenticated endpoints for programmatic access
3
+ *
4
+ * POST /v1/spec/diff - Compare two specs
5
+ */
6
+
7
+ import { Hono } from 'hono';
8
+ import { z } from 'zod';
9
+ import { db } from '../db/client';
10
+ import type { ApiKeyContext } from '../middleware/api-key-auth';
11
+ import {
12
+ computeFullDiff,
13
+ type DiffOptions,
14
+ diffSpecs,
15
+ formatDiffResponse,
16
+ } from '../utils/spec-diff-core';
17
+
18
+ type Env = {
19
+ Variables: ApiKeyContext;
20
+ };
21
+
22
+ export const specV1Route = new Hono<Env>();
23
+
24
+ // Request schemas
25
+ const GitHubDiffSchema = z.object({
26
+ mode: z.literal('github'),
27
+ owner: z.string().min(1),
28
+ repo: z.string().min(1),
29
+ base: z.string().min(1),
30
+ head: z.string().min(1),
31
+ includeDocsImpact: z.boolean().optional(),
32
+ });
33
+
34
+ const SpecsDiffSchema = z.object({
35
+ mode: z.literal('specs'),
36
+ baseSpec: z.object({}).passthrough(),
37
+ headSpec: z.object({}).passthrough(),
38
+ markdownFiles: z
39
+ .array(
40
+ z.object({
41
+ path: z.string(),
42
+ content: z.string(),
43
+ }),
44
+ )
45
+ .optional(),
46
+ });
47
+
48
+ const DiffRequestSchema = z.discriminatedUnion('mode', [GitHubDiffSchema, SpecsDiffSchema]);
49
+
50
+ /**
51
+ * POST /v1/spec/diff - Compare two specs
52
+ *
53
+ * Supports two modes:
54
+ * 1. GitHub refs: Clone and compare specs from GitHub refs
55
+ * 2. Direct specs: Compare uploaded spec objects
56
+ */
57
+ specV1Route.post('/diff', async (c) => {
58
+ const org = c.get('org');
59
+
60
+ // Parse and validate request body
61
+ let body: z.infer<typeof DiffRequestSchema>;
62
+ try {
63
+ const rawBody = await c.req.json();
64
+ body = DiffRequestSchema.parse(rawBody);
65
+ } catch (err) {
66
+ if (err instanceof z.ZodError) {
67
+ return c.json(
68
+ {
69
+ error: 'Invalid request',
70
+ details: err.errors,
71
+ },
72
+ 400,
73
+ );
74
+ }
75
+ return c.json({ error: 'Invalid JSON body' }, 400);
76
+ }
77
+
78
+ try {
79
+ if (body.mode === 'github') {
80
+ // GitHub mode: need to find installation for this org
81
+ const { owner, repo, base, head, includeDocsImpact } = body;
82
+
83
+ // Look up installation from org
84
+ const installation = await db
85
+ .selectFrom('github_installations')
86
+ .where('orgId', '=', org.id)
87
+ .select(['installationId'])
88
+ .executeTakeFirst();
89
+
90
+ if (!installation) {
91
+ return c.json(
92
+ {
93
+ error: 'No GitHub App installation found for this repository',
94
+ hint: 'Install the DocCov GitHub App to compare repos',
95
+ },
96
+ 403,
97
+ );
98
+ }
99
+
100
+ // Compute diff with timeout
101
+ const diffOptions: DiffOptions = {
102
+ includeDocsImpact,
103
+ };
104
+
105
+ const result = await Promise.race([
106
+ computeFullDiff(
107
+ { owner, repo, ref: base, installationId: installation.installationId },
108
+ { owner, repo, ref: head, installationId: installation.installationId },
109
+ diffOptions,
110
+ ),
111
+ new Promise<never>((_, reject) => setTimeout(() => reject(new Error('TIMEOUT')), 60_000)),
112
+ ]);
113
+
114
+ return c.json(formatDiffResponse(result));
115
+ }
116
+
117
+ // Specs mode: direct comparison
118
+ const { baseSpec, headSpec, markdownFiles } = body;
119
+
120
+ const diff = diffSpecs(
121
+ baseSpec as Parameters<typeof diffSpecs>[0],
122
+ headSpec as Parameters<typeof diffSpecs>[1],
123
+ markdownFiles,
124
+ );
125
+
126
+ return c.json({
127
+ // Core diff fields
128
+ breaking: diff.breaking,
129
+ nonBreaking: diff.nonBreaking,
130
+ docsOnly: diff.docsOnly,
131
+ coverageDelta: diff.coverageDelta,
132
+ oldCoverage: diff.oldCoverage,
133
+ newCoverage: diff.newCoverage,
134
+ driftIntroduced: diff.driftIntroduced,
135
+ driftResolved: diff.driftResolved,
136
+ newUndocumented: diff.newUndocumented,
137
+ improvedExports: diff.improvedExports,
138
+ regressedExports: diff.regressedExports,
139
+
140
+ // Extended fields
141
+ memberChanges: diff.memberChanges,
142
+ categorizedBreaking: diff.categorizedBreaking,
143
+ docsImpact: diff.docsImpact,
144
+
145
+ // Metadata
146
+ generatedAt: new Date().toISOString(),
147
+ cached: false,
148
+ });
149
+ } catch (err) {
150
+ if (err instanceof Error) {
151
+ if (err.message === 'TIMEOUT') {
152
+ return c.json({ error: 'Spec generation timed out' }, 408);
153
+ }
154
+ if (err.message.includes('not found') || err.message.includes('404')) {
155
+ return c.json({ error: 'Repository or ref not found' }, 404);
156
+ }
157
+ if (err.message.includes('No token')) {
158
+ return c.json({ error: 'GitHub App access required' }, 403);
159
+ }
160
+ }
161
+
162
+ console.error('Spec diff error:', err);
163
+ return c.json({ error: 'Failed to compute diff' }, 500);
164
+ }
165
+ });