@doccov/api 0.2.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,217 @@
1
+ import { Writable } from 'node:stream';
2
+ import type { VercelRequest, VercelResponse } from '@vercel/node';
3
+ import { Sandbox } from '@vercel/sandbox';
4
+
5
+ export const config = {
6
+ runtime: 'nodejs',
7
+ maxDuration: 60, // Quick detection, 1 minute max
8
+ };
9
+
10
+ interface DetectRequestBody {
11
+ url: string;
12
+ ref?: string;
13
+ }
14
+
15
+ interface PackageInfo {
16
+ name: string;
17
+ path: string;
18
+ description?: string;
19
+ }
20
+
21
+ interface DetectResponse {
22
+ isMonorepo: boolean;
23
+ packageManager: 'npm' | 'pnpm' | 'bun' | 'yarn';
24
+ packages?: PackageInfo[];
25
+ defaultPackage?: string;
26
+ error?: string;
27
+ }
28
+
29
+ // Helper to capture stream output
30
+ function createCaptureStream(): { stream: Writable; getOutput: () => string } {
31
+ let output = '';
32
+ const stream = new Writable({
33
+ write(chunk, _encoding, callback) {
34
+ output += chunk.toString();
35
+ callback();
36
+ },
37
+ });
38
+ return { stream, getOutput: () => output };
39
+ }
40
+
41
+ export default async function handler(req: VercelRequest, res: VercelResponse) {
42
+ // CORS
43
+ res.setHeader('Access-Control-Allow-Origin', '*');
44
+ res.setHeader('Access-Control-Allow-Methods', 'POST, OPTIONS');
45
+ res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
46
+
47
+ if (req.method === 'OPTIONS') {
48
+ return res.status(200).end();
49
+ }
50
+
51
+ if (req.method !== 'POST') {
52
+ return res.status(405).json({ error: 'Method not allowed' });
53
+ }
54
+
55
+ const body = req.body as DetectRequestBody;
56
+
57
+ if (!body.url) {
58
+ return res.status(400).json({ error: 'url is required' });
59
+ }
60
+
61
+ try {
62
+ const result = await detectMonorepo(body.url, body.ref ?? 'main');
63
+ return res.status(200).json(result);
64
+ } catch (error) {
65
+ const message = error instanceof Error ? error.message : String(error);
66
+ return res.status(500).json({
67
+ isMonorepo: false,
68
+ packageManager: 'npm',
69
+ error: message,
70
+ } as DetectResponse);
71
+ }
72
+ }
73
+
74
+ async function detectMonorepo(url: string, _ref: string): Promise<DetectResponse> {
75
+ const sandbox = await Sandbox.create({
76
+ source: {
77
+ url,
78
+ type: 'git',
79
+ },
80
+ resources: { vcpus: 2 },
81
+ timeout: 60 * 1000, // 1 minute
82
+ runtime: 'node22',
83
+ });
84
+
85
+ try {
86
+ // List root files
87
+ const lsCapture = createCaptureStream();
88
+ await sandbox.runCommand({
89
+ cmd: 'ls',
90
+ args: ['-1'],
91
+ stdout: lsCapture.stream,
92
+ });
93
+ const files = lsCapture.getOutput();
94
+
95
+ // Detect package manager
96
+ let packageManager: DetectResponse['packageManager'] = 'npm';
97
+ if (files.includes('pnpm-lock.yaml')) {
98
+ packageManager = 'pnpm';
99
+ } else if (files.includes('bun.lock') || files.includes('bun.lockb')) {
100
+ packageManager = 'bun';
101
+ } else if (files.includes('yarn.lock')) {
102
+ packageManager = 'yarn';
103
+ }
104
+
105
+ // Read root package.json
106
+ const pkgCapture = createCaptureStream();
107
+ await sandbox.runCommand({
108
+ cmd: 'cat',
109
+ args: ['package.json'],
110
+ stdout: pkgCapture.stream,
111
+ });
112
+
113
+ let rootPkg: { workspaces?: string[] | { packages?: string[] }; name?: string } = {};
114
+ try {
115
+ rootPkg = JSON.parse(pkgCapture.getOutput());
116
+ } catch {
117
+ // Not a valid package.json
118
+ }
119
+
120
+ // Check for workspaces (npm/yarn/bun) or pnpm-workspace.yaml
121
+ let workspacePatterns: string[] = [];
122
+
123
+ if (rootPkg.workspaces) {
124
+ if (Array.isArray(rootPkg.workspaces)) {
125
+ workspacePatterns = rootPkg.workspaces;
126
+ } else if (rootPkg.workspaces.packages) {
127
+ workspacePatterns = rootPkg.workspaces.packages;
128
+ }
129
+ }
130
+
131
+ // Check pnpm-workspace.yaml
132
+ if (files.includes('pnpm-workspace.yaml')) {
133
+ const wsCapture = createCaptureStream();
134
+ await sandbox.runCommand({
135
+ cmd: 'cat',
136
+ args: ['pnpm-workspace.yaml'],
137
+ stdout: wsCapture.stream,
138
+ });
139
+ const wsContent = wsCapture.getOutput();
140
+ // Simple YAML parsing for packages array
141
+ const packagesMatch = wsContent.match(/packages:\s*\n((?:\s+-\s*.+\n?)+)/);
142
+ if (packagesMatch) {
143
+ const lines = packagesMatch[1].split('\n');
144
+ for (const line of lines) {
145
+ const match = line.match(/^\s+-\s*['"]?([^'"]+)['"]?\s*$/);
146
+ if (match) {
147
+ workspacePatterns.push(match[1]);
148
+ }
149
+ }
150
+ }
151
+ }
152
+
153
+ // Not a monorepo
154
+ if (workspacePatterns.length === 0) {
155
+ return {
156
+ isMonorepo: false,
157
+ packageManager,
158
+ };
159
+ }
160
+
161
+ // Find all packages
162
+ const packages: PackageInfo[] = [];
163
+
164
+ // Use find to locate package.json files in workspace dirs
165
+ const findCapture = createCaptureStream();
166
+ await sandbox.runCommand({
167
+ cmd: 'find',
168
+ args: ['.', '-name', 'package.json', '-maxdepth', '3', '-type', 'f'],
169
+ stdout: findCapture.stream,
170
+ });
171
+
172
+ const packagePaths = findCapture
173
+ .getOutput()
174
+ .trim()
175
+ .split('\n')
176
+ .filter((p) => p && p !== './package.json');
177
+
178
+ for (const pkgPath of packagePaths.slice(0, 30)) {
179
+ // Limit to 30 packages
180
+ const catCapture = createCaptureStream();
181
+ await sandbox.runCommand({
182
+ cmd: 'cat',
183
+ args: [pkgPath],
184
+ stdout: catCapture.stream,
185
+ });
186
+
187
+ try {
188
+ const pkg = JSON.parse(catCapture.getOutput()) as {
189
+ name?: string;
190
+ description?: string;
191
+ private?: boolean;
192
+ };
193
+ if (pkg.name && !pkg.private) {
194
+ packages.push({
195
+ name: pkg.name,
196
+ path: pkgPath.replace('./package.json', '.').replace('/package.json', ''),
197
+ description: pkg.description,
198
+ });
199
+ }
200
+ } catch {
201
+ // Skip invalid package.json
202
+ }
203
+ }
204
+
205
+ // Sort by name
206
+ packages.sort((a, b) => a.name.localeCompare(b.name));
207
+
208
+ return {
209
+ isMonorepo: true,
210
+ packageManager,
211
+ packages,
212
+ defaultPackage: packages[0]?.name,
213
+ };
214
+ } finally {
215
+ await sandbox.stop();
216
+ }
217
+ }
@@ -0,0 +1,362 @@
1
+ import { Writable } from 'node:stream';
2
+ import type { VercelRequest, VercelResponse } from '@vercel/node';
3
+ import { Sandbox } from '@vercel/sandbox';
4
+
5
+ export const config = {
6
+ runtime: 'nodejs',
7
+ maxDuration: 300,
8
+ };
9
+
10
+ interface JobEvent {
11
+ type: 'progress' | 'complete' | 'error';
12
+ stage?: string;
13
+ message?: string;
14
+ progress?: number;
15
+ result?: ScanResult;
16
+ }
17
+
18
+ interface ScanResult {
19
+ owner: string;
20
+ repo: string;
21
+ ref: string;
22
+ packageName?: string;
23
+ coverage: number;
24
+ exportCount: number;
25
+ typeCount: number;
26
+ driftCount: number;
27
+ undocumented: string[];
28
+ drift: Array<{
29
+ export: string;
30
+ type: string;
31
+ issue: string;
32
+ }>;
33
+ }
34
+
35
+ // Helper to capture stream output
36
+ function createCaptureStream(): { stream: Writable; getOutput: () => string } {
37
+ let output = '';
38
+ const stream = new Writable({
39
+ write(chunk, _encoding, callback) {
40
+ output += chunk.toString();
41
+ callback();
42
+ },
43
+ });
44
+ return { stream, getOutput: () => output };
45
+ }
46
+
47
+ export default async function handler(req: VercelRequest, res: VercelResponse) {
48
+ // CORS
49
+ res.setHeader('Access-Control-Allow-Origin', '*');
50
+ res.setHeader('Access-Control-Allow-Methods', 'GET, OPTIONS');
51
+ res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
52
+
53
+ if (req.method === 'OPTIONS') {
54
+ return res.status(200).end();
55
+ }
56
+
57
+ if (req.method !== 'GET') {
58
+ return res.status(405).json({ error: 'Method not allowed' });
59
+ }
60
+
61
+ // Get params from query string
62
+ const url = req.query.url as string;
63
+ const ref = (req.query.ref as string) || 'main';
64
+ const owner = req.query.owner as string;
65
+ const repo = req.query.repo as string;
66
+ const pkg = req.query.package as string | undefined;
67
+
68
+ if (!url || !owner || !repo) {
69
+ return res.status(400).json({ error: 'Missing required query params (url, owner, repo)' });
70
+ }
71
+
72
+ // Set SSE headers
73
+ res.setHeader('Content-Type', 'text/event-stream');
74
+ res.setHeader('Cache-Control', 'no-cache');
75
+ res.setHeader('Connection', 'keep-alive');
76
+
77
+ // Send initial comment
78
+ res.write(':ok\n\n');
79
+
80
+ // Helper to send SSE event
81
+ const sendEvent = (event: JobEvent) => {
82
+ const data = JSON.stringify(event);
83
+ res.write(`data: ${data}\n\n`);
84
+ };
85
+
86
+ // Run scan with streaming progress
87
+ await runScanWithProgress({ url, ref, owner, repo, package: pkg }, sendEvent);
88
+
89
+ res.end();
90
+ }
91
+
92
+ interface ScanOptions {
93
+ url: string;
94
+ ref: string;
95
+ owner: string;
96
+ repo: string;
97
+ package?: string;
98
+ }
99
+
100
+ async function runScanWithProgress(
101
+ options: ScanOptions,
102
+ sendEvent: (event: JobEvent) => void,
103
+ ): Promise<void> {
104
+ try {
105
+ sendEvent({
106
+ type: 'progress',
107
+ stage: 'cloning',
108
+ message: `Cloning ${options.owner}/${options.repo}...`,
109
+ progress: 5,
110
+ });
111
+
112
+ const sandbox = await Sandbox.create({
113
+ source: {
114
+ url: options.url,
115
+ type: 'git',
116
+ },
117
+ resources: { vcpus: 4 },
118
+ timeout: 5 * 60 * 1000,
119
+ runtime: 'node22',
120
+ });
121
+
122
+ try {
123
+ sendEvent({
124
+ type: 'progress',
125
+ stage: 'detecting',
126
+ message: 'Detecting project structure...',
127
+ progress: 10,
128
+ });
129
+
130
+ // Detect package manager
131
+ const lsCapture = createCaptureStream();
132
+ await sandbox.runCommand({
133
+ cmd: 'ls',
134
+ args: ['-1'],
135
+ stdout: lsCapture.stream,
136
+ });
137
+ const files = lsCapture.getOutput();
138
+
139
+ let installCmd: string;
140
+ let installArgs: string[];
141
+ let pm: 'pnpm' | 'bun' | 'npm' = 'npm';
142
+ let pmMessage = 'Detected npm project';
143
+
144
+ if (files.includes('pnpm-lock.yaml')) {
145
+ pm = 'pnpm';
146
+ installCmd = 'pnpm';
147
+ installArgs = ['install', '--frozen-lockfile'];
148
+ pmMessage = 'Detected pnpm monorepo';
149
+ } else if (files.includes('bun.lock') || files.includes('bun.lockb')) {
150
+ pm = 'bun';
151
+ installCmd = 'bun';
152
+ installArgs = ['install', '--frozen-lockfile'];
153
+ pmMessage = 'Detected bun project';
154
+ } else {
155
+ installCmd = 'npm';
156
+ installArgs = ['install', '--ignore-scripts', '--legacy-peer-deps'];
157
+ }
158
+
159
+ sendEvent({ type: 'progress', stage: 'detecting', message: pmMessage, progress: 15 });
160
+
161
+ // Install package manager if needed
162
+ if (pm === 'pnpm') {
163
+ sendEvent({
164
+ type: 'progress',
165
+ stage: 'installing',
166
+ message: 'Installing pnpm...',
167
+ progress: 18,
168
+ });
169
+ await sandbox.runCommand({ cmd: 'npm', args: ['install', '-g', 'pnpm'] });
170
+ } else if (pm === 'bun') {
171
+ sendEvent({
172
+ type: 'progress',
173
+ stage: 'installing',
174
+ message: 'Installing bun...',
175
+ progress: 18,
176
+ });
177
+ await sandbox.runCommand({ cmd: 'npm', args: ['install', '-g', 'bun'] });
178
+ }
179
+
180
+ // Install dependencies
181
+ sendEvent({
182
+ type: 'progress',
183
+ stage: 'installing',
184
+ message: 'Installing dependencies...',
185
+ progress: 20,
186
+ });
187
+
188
+ const installCapture = createCaptureStream();
189
+ const install = await sandbox.runCommand({
190
+ cmd: installCmd,
191
+ args: installArgs,
192
+ stdout: installCapture.stream,
193
+ stderr: installCapture.stream,
194
+ });
195
+
196
+ if (install.exitCode !== 0) {
197
+ throw new Error(`${installCmd} install failed: ${installCapture.getOutput().slice(-300)}`);
198
+ }
199
+
200
+ sendEvent({
201
+ type: 'progress',
202
+ stage: 'installing',
203
+ message: 'Dependencies installed',
204
+ progress: 40,
205
+ });
206
+
207
+ // Check for build script
208
+ const pkgCapture = createCaptureStream();
209
+ await sandbox.runCommand({
210
+ cmd: 'cat',
211
+ args: ['package.json'],
212
+ stdout: pkgCapture.stream,
213
+ });
214
+
215
+ try {
216
+ const pkgJson = JSON.parse(pkgCapture.getOutput()) as { scripts?: Record<string, string> };
217
+ const scripts = pkgJson.scripts ?? {};
218
+ const buildScript = scripts.build ? 'build' : scripts.compile ? 'compile' : null;
219
+
220
+ if (buildScript) {
221
+ sendEvent({
222
+ type: 'progress',
223
+ stage: 'building',
224
+ message: 'Running build...',
225
+ progress: 45,
226
+ });
227
+
228
+ const buildCapture = createCaptureStream();
229
+ const buildResult = await sandbox.runCommand({
230
+ cmd: pm === 'npm' ? 'npm' : pm,
231
+ args: pm === 'npm' ? ['run', buildScript] : [buildScript],
232
+ stdout: buildCapture.stream,
233
+ stderr: buildCapture.stream,
234
+ });
235
+
236
+ const buildMessage =
237
+ buildResult.exitCode === 0 ? 'Build complete' : 'Build failed (continuing)';
238
+ sendEvent({ type: 'progress', stage: 'building', message: buildMessage, progress: 55 });
239
+ }
240
+ } catch {
241
+ // Ignore package.json errors
242
+ }
243
+
244
+ // Install doccov CLI
245
+ sendEvent({
246
+ type: 'progress',
247
+ stage: 'analyzing',
248
+ message: 'Installing DocCov CLI...',
249
+ progress: 60,
250
+ });
251
+
252
+ const cliInstall = await sandbox.runCommand({
253
+ cmd: 'npm',
254
+ args: ['install', '-g', '@doccov/cli'],
255
+ });
256
+
257
+ if (cliInstall.exitCode !== 0) {
258
+ throw new Error('Failed to install @doccov/cli');
259
+ }
260
+
261
+ // Run generate
262
+ const specFile = '/tmp/spec.json';
263
+ const genArgs = ['generate', '--cwd', '.', '-o', specFile];
264
+ const analyzeMessage = options.package
265
+ ? `Analyzing ${options.package}...`
266
+ : 'Generating DocCov spec...';
267
+ if (options.package) {
268
+ genArgs.push('--package', options.package);
269
+ }
270
+
271
+ sendEvent({ type: 'progress', stage: 'analyzing', message: analyzeMessage, progress: 65 });
272
+
273
+ const genCapture = createCaptureStream();
274
+ const genResult = await sandbox.runCommand({
275
+ cmd: 'doccov',
276
+ args: genArgs,
277
+ stdout: genCapture.stream,
278
+ stderr: genCapture.stream,
279
+ });
280
+
281
+ const genOutput = genCapture.getOutput();
282
+ if (genResult.exitCode !== 0) {
283
+ throw new Error(`doccov generate failed: ${genOutput.slice(-300)}`);
284
+ }
285
+
286
+ sendEvent({
287
+ type: 'progress',
288
+ stage: 'extracting',
289
+ message: 'Extracting results...',
290
+ progress: 85,
291
+ });
292
+
293
+ // Extract summary
294
+ const extractScript = `
295
+ const fs = require('fs');
296
+ const spec = JSON.parse(fs.readFileSync('${specFile}', 'utf-8'));
297
+ const undocumented = [];
298
+ const drift = [];
299
+ for (const exp of spec.exports || []) {
300
+ const docs = exp.docs;
301
+ if (!docs) continue;
302
+ if ((docs.missing?.length || 0) > 0 || (docs.coverageScore || 0) < 100) {
303
+ undocumented.push(exp.name);
304
+ }
305
+ for (const d of docs.drift || []) {
306
+ drift.push({ export: exp.name, type: d.type, issue: d.issue });
307
+ }
308
+ }
309
+ console.log(JSON.stringify({
310
+ coverage: spec.docs?.coverageScore || 0,
311
+ exportCount: spec.exports?.length || 0,
312
+ typeCount: spec.types?.length || 0,
313
+ undocumented: undocumented.slice(0, 50),
314
+ drift: drift.slice(0, 20),
315
+ driftCount: drift.length,
316
+ }));
317
+ `.replace(/\n/g, ' ');
318
+
319
+ const nodeCapture = createCaptureStream();
320
+ const nodeResult = await sandbox.runCommand({
321
+ cmd: 'node',
322
+ args: ['-e', extractScript],
323
+ stdout: nodeCapture.stream,
324
+ stderr: nodeCapture.stream,
325
+ });
326
+
327
+ const summaryJson = nodeCapture.getOutput();
328
+ if (nodeResult.exitCode !== 0 || !summaryJson.trim()) {
329
+ throw new Error(`Failed to extract summary: ${summaryJson.slice(0, 300)}`);
330
+ }
331
+
332
+ const summary = JSON.parse(summaryJson.trim()) as {
333
+ coverage: number;
334
+ exportCount: number;
335
+ typeCount: number;
336
+ undocumented: string[];
337
+ drift: ScanResult['drift'];
338
+ driftCount: number;
339
+ };
340
+
341
+ const result: ScanResult = {
342
+ owner: options.owner,
343
+ repo: options.repo,
344
+ ref: options.ref,
345
+ packageName: options.package,
346
+ coverage: summary.coverage,
347
+ exportCount: summary.exportCount,
348
+ typeCount: summary.typeCount,
349
+ driftCount: summary.driftCount,
350
+ undocumented: summary.undocumented,
351
+ drift: summary.drift,
352
+ };
353
+
354
+ sendEvent({ type: 'complete', result });
355
+ } finally {
356
+ await sandbox.stop();
357
+ }
358
+ } catch (error) {
359
+ const message = error instanceof Error ? error.message : String(error);
360
+ sendEvent({ type: 'error', message });
361
+ }
362
+ }
package/api/scan.ts ADDED
@@ -0,0 +1,64 @@
1
+ import type { VercelRequest, VercelResponse } from '@vercel/node';
2
+
3
+ export const config = {
4
+ runtime: 'nodejs',
5
+ maxDuration: 10,
6
+ };
7
+
8
+ interface ScanRequestBody {
9
+ url: string;
10
+ ref?: string;
11
+ package?: string;
12
+ }
13
+
14
+ export default async function handler(req: VercelRequest, res: VercelResponse) {
15
+ // CORS
16
+ res.setHeader('Access-Control-Allow-Origin', '*');
17
+ res.setHeader('Access-Control-Allow-Methods', 'POST, OPTIONS');
18
+ res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
19
+
20
+ if (req.method === 'OPTIONS') {
21
+ return res.status(200).end();
22
+ }
23
+
24
+ if (req.method !== 'POST') {
25
+ return res.status(405).json({ error: 'Method not allowed' });
26
+ }
27
+
28
+ const body = req.body as ScanRequestBody;
29
+
30
+ if (!body.url) {
31
+ return res.status(400).json({ error: 'url is required' });
32
+ }
33
+
34
+ // Parse GitHub URL
35
+ const urlMatch = body.url.match(/github\.com\/([^/]+)\/([^/]+)/);
36
+ if (!urlMatch) {
37
+ return res.status(400).json({ error: 'Invalid GitHub URL' });
38
+ }
39
+
40
+ const [, owner, repoWithExt] = urlMatch;
41
+ const repo = repoWithExt.replace(/\.git$/, '');
42
+ const ref = body.ref ?? 'main';
43
+
44
+ // Generate a job ID
45
+ const jobId = `scan-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
46
+
47
+ // Build stream URL with params encoded
48
+ const params = new URLSearchParams({
49
+ url: body.url,
50
+ ref,
51
+ owner,
52
+ repo,
53
+ });
54
+ if (body.package) {
55
+ params.set('package', body.package);
56
+ }
57
+
58
+ // Return job ID and stream URL with all params
59
+ return res.status(202).json({
60
+ jobId,
61
+ status: 'pending',
62
+ streamUrl: `/scan-stream?${params.toString()}`,
63
+ });
64
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@doccov/api",
3
- "version": "0.2.0",
3
+ "version": "0.2.1",
4
4
  "description": "DocCov API - Badge endpoint and coverage services",
5
5
  "keywords": [
6
6
  "doccov",
@@ -27,14 +27,16 @@
27
27
  "format": "biome format --write src/"
28
28
  },
29
29
  "dependencies": {
30
- "@openpkg-ts/spec": "^0.2.0",
30
+ "@openpkg-ts/spec": "^0.3.0",
31
31
  "@vercel/sandbox": "^1.0.3",
32
32
  "hono": "^4.0.0",
33
33
  "ms": "^2.1.3"
34
34
  },
35
35
  "devDependencies": {
36
36
  "@types/bun": "latest",
37
- "@types/ms": "^0.7.34"
37
+ "@types/ms": "^0.7.34",
38
+ "@types/node": "^20.0.0",
39
+ "@vercel/node": "^3.0.0",
40
+ "typescript": "^5.0.0"
38
41
  }
39
42
  }
40
-
package/src/index.ts CHANGED
@@ -3,6 +3,7 @@ import { cors } from 'hono/cors';
3
3
  import { badgeRoute } from './routes/badge';
4
4
  import { leaderboardRoute } from './routes/leaderboard';
5
5
  import { scanRoute } from './routes/scan';
6
+ import { widgetRoute } from './routes/widget';
6
7
 
7
8
  const app = new Hono();
8
9
 
@@ -16,6 +17,7 @@ app.get('/', (c) => {
16
17
  version: '0.2.0',
17
18
  endpoints: {
18
19
  badge: '/badge/:owner/:repo',
20
+ widget: '/widget/:owner/:repo',
19
21
  leaderboard: '/leaderboard',
20
22
  scan: '/scan',
21
23
  health: '/health',
@@ -29,14 +31,9 @@ app.get('/health', (c) => {
29
31
 
30
32
  // Routes
31
33
  app.route('/badge', badgeRoute);
34
+ app.route('/widget', widgetRoute);
32
35
  app.route('/leaderboard', leaderboardRoute);
33
36
  app.route('/scan', scanRoute);
34
37
 
35
- const port = Number(process.env.PORT) || 3000;
36
-
37
- console.log(`DocCov API running on http://localhost:${port}`);
38
-
39
- export default {
40
- port,
41
- fetch: app.fetch,
42
- };
38
+ // Vercel serverless handler + Bun auto-serves this export
39
+ export default app;