@doccov/api 0.3.3 → 0.3.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.vercelignore +5 -0
- package/CHANGELOG.md +15 -0
- package/api/[...path].ts +35 -0
- package/bunup.config.ts +16 -0
- package/functions/execute-stream.ts +273 -0
- package/functions/execute.ts +204 -0
- package/functions/plan.ts +104 -0
- package/lib/plan-agent.ts +252 -0
- package/package.json +10 -6
- package/src/index.ts +9 -7
- package/src/routes/plan.ts +75 -0
- package/tsconfig.json +2 -2
- package/vercel.json +5 -3
- package/api/scan/detect.ts +0 -121
- package/api/scan-stream.ts +0 -460
- package/api/scan.ts +0 -63
- package/src/routes/scan.ts +0 -240
- package/src/sandbox-runner.ts +0 -82
- package/src/scan-worker.ts +0 -122
- package/src/stores/job-store.interface.ts +0 -25
package/api/scan-stream.ts
DELETED
|
@@ -1,460 +0,0 @@
|
|
|
1
|
-
import { Writable } from 'node:stream';
|
|
2
|
-
import {
|
|
3
|
-
detectBuildInfo,
|
|
4
|
-
detectMonorepo,
|
|
5
|
-
detectPackageManager,
|
|
6
|
-
getInstallCommand,
|
|
7
|
-
getPrimaryBuildScript,
|
|
8
|
-
SandboxFileSystem,
|
|
9
|
-
type ScanResult,
|
|
10
|
-
} from '@doccov/sdk';
|
|
11
|
-
import type { VercelRequest, VercelResponse } from '@vercel/node';
|
|
12
|
-
import { Sandbox } from '@vercel/sandbox';
|
|
13
|
-
|
|
14
|
-
export const config = {
|
|
15
|
-
runtime: 'nodejs',
|
|
16
|
-
maxDuration: 300,
|
|
17
|
-
};
|
|
18
|
-
|
|
19
|
-
interface JobEvent {
|
|
20
|
-
type: 'progress' | 'complete' | 'error';
|
|
21
|
-
stage?: string;
|
|
22
|
-
message?: string;
|
|
23
|
-
progress?: number;
|
|
24
|
-
result?: ScanResult;
|
|
25
|
-
availablePackages?: string[];
|
|
26
|
-
}
|
|
27
|
-
|
|
28
|
-
// Helper to capture stream output
|
|
29
|
-
function createCaptureStream(): { stream: Writable; getOutput: () => string } {
|
|
30
|
-
let output = '';
|
|
31
|
-
const stream = new Writable({
|
|
32
|
-
write(chunk, _encoding, callback) {
|
|
33
|
-
output += chunk.toString();
|
|
34
|
-
callback();
|
|
35
|
-
},
|
|
36
|
-
});
|
|
37
|
-
return { stream, getOutput: () => output };
|
|
38
|
-
}
|
|
39
|
-
|
|
40
|
-
export default async function handler(req: VercelRequest, res: VercelResponse) {
|
|
41
|
-
// CORS
|
|
42
|
-
res.setHeader('Access-Control-Allow-Origin', '*');
|
|
43
|
-
res.setHeader('Access-Control-Allow-Methods', 'GET, OPTIONS');
|
|
44
|
-
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
|
|
45
|
-
|
|
46
|
-
if (req.method === 'OPTIONS') {
|
|
47
|
-
return res.status(200).end();
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
if (req.method !== 'GET') {
|
|
51
|
-
return res.status(405).json({ error: 'Method not allowed' });
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
// Get params from query string
|
|
55
|
-
const url = req.query.url as string;
|
|
56
|
-
const ref = (req.query.ref as string) || 'main';
|
|
57
|
-
const owner = req.query.owner as string;
|
|
58
|
-
const repo = req.query.repo as string;
|
|
59
|
-
const pkg = req.query.package as string | undefined;
|
|
60
|
-
|
|
61
|
-
if (!url || !owner || !repo) {
|
|
62
|
-
return res.status(400).json({ error: 'Missing required query params (url, owner, repo)' });
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
// Set SSE headers
|
|
66
|
-
res.setHeader('Content-Type', 'text/event-stream');
|
|
67
|
-
res.setHeader('Cache-Control', 'no-cache');
|
|
68
|
-
res.setHeader('Connection', 'keep-alive');
|
|
69
|
-
|
|
70
|
-
// Send initial comment
|
|
71
|
-
res.write(':ok\n\n');
|
|
72
|
-
|
|
73
|
-
// Helper to send SSE event
|
|
74
|
-
const sendEvent = (event: JobEvent) => {
|
|
75
|
-
const data = JSON.stringify(event);
|
|
76
|
-
res.write(`data: ${data}\n\n`);
|
|
77
|
-
};
|
|
78
|
-
|
|
79
|
-
// Run scan with streaming progress
|
|
80
|
-
await runScanWithProgress({ url, ref, owner, repo, package: pkg }, sendEvent);
|
|
81
|
-
|
|
82
|
-
res.end();
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
interface ScanOptions {
|
|
86
|
-
url: string;
|
|
87
|
-
ref: string;
|
|
88
|
-
owner: string;
|
|
89
|
-
repo: string;
|
|
90
|
-
package?: string;
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
async function runScanWithProgress(
|
|
94
|
-
options: ScanOptions,
|
|
95
|
-
sendEvent: (event: JobEvent) => void,
|
|
96
|
-
): Promise<void> {
|
|
97
|
-
try {
|
|
98
|
-
sendEvent({
|
|
99
|
-
type: 'progress',
|
|
100
|
-
stage: 'cloning',
|
|
101
|
-
message: `Cloning ${options.owner}/${options.repo}...`,
|
|
102
|
-
progress: 5,
|
|
103
|
-
});
|
|
104
|
-
|
|
105
|
-
const sandbox = await Sandbox.create({
|
|
106
|
-
source: {
|
|
107
|
-
url: options.url,
|
|
108
|
-
type: 'git',
|
|
109
|
-
},
|
|
110
|
-
resources: { vcpus: 4 },
|
|
111
|
-
timeout: 5 * 60 * 1000,
|
|
112
|
-
runtime: 'node22',
|
|
113
|
-
});
|
|
114
|
-
|
|
115
|
-
try {
|
|
116
|
-
// Create filesystem abstraction for SDK detection functions
|
|
117
|
-
const fs = new SandboxFileSystem(sandbox);
|
|
118
|
-
|
|
119
|
-
// Checkout specific ref if not main/master
|
|
120
|
-
if (options.ref && options.ref !== 'main' && options.ref !== 'master') {
|
|
121
|
-
sendEvent({
|
|
122
|
-
type: 'progress',
|
|
123
|
-
stage: 'cloning',
|
|
124
|
-
message: `Checking out ${options.ref}...`,
|
|
125
|
-
progress: 7,
|
|
126
|
-
});
|
|
127
|
-
|
|
128
|
-
const checkoutCapture = createCaptureStream();
|
|
129
|
-
const checkoutResult = await sandbox.runCommand({
|
|
130
|
-
cmd: 'git',
|
|
131
|
-
args: ['checkout', options.ref],
|
|
132
|
-
stdout: checkoutCapture.stream,
|
|
133
|
-
stderr: checkoutCapture.stream,
|
|
134
|
-
});
|
|
135
|
-
|
|
136
|
-
if (checkoutResult.exitCode !== 0) {
|
|
137
|
-
// Try fetching the ref first (might be a tag not fetched by shallow clone)
|
|
138
|
-
await sandbox.runCommand({
|
|
139
|
-
cmd: 'git',
|
|
140
|
-
args: [
|
|
141
|
-
'fetch',
|
|
142
|
-
'--depth',
|
|
143
|
-
'1',
|
|
144
|
-
'origin',
|
|
145
|
-
`refs/tags/${options.ref}:refs/tags/${options.ref}`,
|
|
146
|
-
],
|
|
147
|
-
});
|
|
148
|
-
const retryResult = await sandbox.runCommand({
|
|
149
|
-
cmd: 'git',
|
|
150
|
-
args: ['checkout', options.ref],
|
|
151
|
-
});
|
|
152
|
-
if (retryResult.exitCode !== 0) {
|
|
153
|
-
throw new Error(`Failed to checkout ${options.ref}: ${checkoutCapture.getOutput()}`);
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
sendEvent({
|
|
159
|
-
type: 'progress',
|
|
160
|
-
stage: 'detecting',
|
|
161
|
-
message: 'Detecting project structure...',
|
|
162
|
-
progress: 10,
|
|
163
|
-
});
|
|
164
|
-
|
|
165
|
-
// Detect package manager using SDK
|
|
166
|
-
const pmInfo = await detectPackageManager(fs);
|
|
167
|
-
const pmMessage = pmInfo.lockfile
|
|
168
|
-
? `Detected ${pmInfo.name} project`
|
|
169
|
-
: 'No lockfile detected';
|
|
170
|
-
sendEvent({ type: 'progress', stage: 'detecting', message: pmMessage, progress: 15 });
|
|
171
|
-
|
|
172
|
-
// Early monorepo detection - fail fast if monorepo without package param
|
|
173
|
-
if (!options.package) {
|
|
174
|
-
const mono = await detectMonorepo(fs);
|
|
175
|
-
|
|
176
|
-
if (mono.isMonorepo) {
|
|
177
|
-
sendEvent({
|
|
178
|
-
type: 'progress',
|
|
179
|
-
stage: 'detecting',
|
|
180
|
-
message: 'Monorepo detected, listing packages...',
|
|
181
|
-
progress: 17,
|
|
182
|
-
});
|
|
183
|
-
|
|
184
|
-
const availablePackages = mono.packages.filter((p) => !p.private).map((p) => p.name);
|
|
185
|
-
|
|
186
|
-
await sandbox.stop();
|
|
187
|
-
sendEvent({
|
|
188
|
-
type: 'error',
|
|
189
|
-
message: `Monorepo detected. Please specify a package to analyze using the 'package' query parameter.`,
|
|
190
|
-
availablePackages,
|
|
191
|
-
});
|
|
192
|
-
return;
|
|
193
|
-
}
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
// Install package manager if needed (npm and pnpm are pre-installed in node22)
|
|
197
|
-
if (pmInfo.name === 'bun') {
|
|
198
|
-
sendEvent({
|
|
199
|
-
type: 'progress',
|
|
200
|
-
stage: 'installing',
|
|
201
|
-
message: 'Installing bun...',
|
|
202
|
-
progress: 18,
|
|
203
|
-
});
|
|
204
|
-
await sandbox.runCommand({ cmd: 'npm', args: ['install', '-g', 'bun'] });
|
|
205
|
-
} else if (pmInfo.name === 'yarn') {
|
|
206
|
-
sendEvent({
|
|
207
|
-
type: 'progress',
|
|
208
|
-
stage: 'installing',
|
|
209
|
-
message: 'Installing yarn...',
|
|
210
|
-
progress: 18,
|
|
211
|
-
});
|
|
212
|
-
await sandbox.runCommand({ cmd: 'npm', args: ['install', '-g', 'yarn'] });
|
|
213
|
-
}
|
|
214
|
-
|
|
215
|
-
// Install dependencies with fallback chain
|
|
216
|
-
sendEvent({
|
|
217
|
-
type: 'progress',
|
|
218
|
-
stage: 'installing',
|
|
219
|
-
message: 'Installing dependencies...',
|
|
220
|
-
progress: 20,
|
|
221
|
-
});
|
|
222
|
-
|
|
223
|
-
let installed = false;
|
|
224
|
-
let activePm = pmInfo.name;
|
|
225
|
-
const installCapture = createCaptureStream();
|
|
226
|
-
|
|
227
|
-
// Try primary package manager using SDK's getInstallCommand
|
|
228
|
-
const primaryCmd = getInstallCommand(pmInfo);
|
|
229
|
-
const primaryResult = await sandbox.runCommand({
|
|
230
|
-
cmd: primaryCmd[0],
|
|
231
|
-
args: primaryCmd.slice(1),
|
|
232
|
-
stdout: installCapture.stream,
|
|
233
|
-
stderr: installCapture.stream,
|
|
234
|
-
});
|
|
235
|
-
|
|
236
|
-
if (primaryResult.exitCode === 0) {
|
|
237
|
-
installed = true;
|
|
238
|
-
} else {
|
|
239
|
-
const errorOutput = installCapture.getOutput();
|
|
240
|
-
|
|
241
|
-
// Check if it's a workspace:* protocol error - try bun fallback
|
|
242
|
-
if (errorOutput.includes('workspace:') || errorOutput.includes('EUNSUPPORTEDPROTOCOL')) {
|
|
243
|
-
sendEvent({
|
|
244
|
-
type: 'progress',
|
|
245
|
-
stage: 'installing',
|
|
246
|
-
message: 'Trying bun fallback for workspace protocol...',
|
|
247
|
-
progress: 25,
|
|
248
|
-
});
|
|
249
|
-
|
|
250
|
-
// Install bun if not already the primary
|
|
251
|
-
if (pmInfo.name !== 'bun') {
|
|
252
|
-
await sandbox.runCommand({ cmd: 'npm', args: ['install', '-g', 'bun'] });
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
const bunCapture = createCaptureStream();
|
|
256
|
-
const bunResult = await sandbox.runCommand({
|
|
257
|
-
cmd: 'bun',
|
|
258
|
-
args: ['install'],
|
|
259
|
-
stdout: bunCapture.stream,
|
|
260
|
-
stderr: bunCapture.stream,
|
|
261
|
-
});
|
|
262
|
-
|
|
263
|
-
if (bunResult.exitCode === 0) {
|
|
264
|
-
installed = true;
|
|
265
|
-
activePm = 'bun'; // Update pm for build step
|
|
266
|
-
}
|
|
267
|
-
}
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
if (installed) {
|
|
271
|
-
sendEvent({
|
|
272
|
-
type: 'progress',
|
|
273
|
-
stage: 'installing',
|
|
274
|
-
message: 'Dependencies installed',
|
|
275
|
-
progress: 40,
|
|
276
|
-
});
|
|
277
|
-
} else {
|
|
278
|
-
// Graceful degradation - continue with limited analysis
|
|
279
|
-
sendEvent({
|
|
280
|
-
type: 'progress',
|
|
281
|
-
stage: 'installing',
|
|
282
|
-
message: 'Install failed (continuing with limited analysis)',
|
|
283
|
-
progress: 40,
|
|
284
|
-
});
|
|
285
|
-
}
|
|
286
|
-
|
|
287
|
-
// Check for build script using SDK
|
|
288
|
-
const buildInfo = await detectBuildInfo(fs);
|
|
289
|
-
const buildScript = getPrimaryBuildScript(buildInfo);
|
|
290
|
-
|
|
291
|
-
if (buildScript) {
|
|
292
|
-
sendEvent({
|
|
293
|
-
type: 'progress',
|
|
294
|
-
stage: 'building',
|
|
295
|
-
message: 'Running build...',
|
|
296
|
-
progress: 45,
|
|
297
|
-
});
|
|
298
|
-
|
|
299
|
-
const buildCapture = createCaptureStream();
|
|
300
|
-
// Use activePm (may have changed to bun as fallback)
|
|
301
|
-
const buildCmd =
|
|
302
|
-
activePm === 'npm' || activePm === 'yarn'
|
|
303
|
-
? [activePm, 'run', buildScript]
|
|
304
|
-
: [activePm, buildScript];
|
|
305
|
-
|
|
306
|
-
const buildResult = await sandbox.runCommand({
|
|
307
|
-
cmd: buildCmd[0],
|
|
308
|
-
args: buildCmd.slice(1),
|
|
309
|
-
stdout: buildCapture.stream,
|
|
310
|
-
stderr: buildCapture.stream,
|
|
311
|
-
});
|
|
312
|
-
|
|
313
|
-
const buildMessage =
|
|
314
|
-
buildResult.exitCode === 0 ? 'Build complete' : 'Build failed (continuing)';
|
|
315
|
-
sendEvent({ type: 'progress', stage: 'building', message: buildMessage, progress: 55 });
|
|
316
|
-
}
|
|
317
|
-
|
|
318
|
-
// Install doccov CLI
|
|
319
|
-
sendEvent({
|
|
320
|
-
type: 'progress',
|
|
321
|
-
stage: 'analyzing',
|
|
322
|
-
message: 'Installing DocCov CLI...',
|
|
323
|
-
progress: 60,
|
|
324
|
-
});
|
|
325
|
-
|
|
326
|
-
const cliInstall = await sandbox.runCommand({
|
|
327
|
-
cmd: 'npm',
|
|
328
|
-
args: ['install', '-g', '@doccov/cli'],
|
|
329
|
-
});
|
|
330
|
-
|
|
331
|
-
if (cliInstall.exitCode !== 0) {
|
|
332
|
-
throw new Error('Failed to install @doccov/cli');
|
|
333
|
-
}
|
|
334
|
-
|
|
335
|
-
// Run generate
|
|
336
|
-
const specFile = '/tmp/spec.json';
|
|
337
|
-
const genArgs = ['generate', '--cwd', '.', '-o', specFile];
|
|
338
|
-
const analyzeMessage = options.package
|
|
339
|
-
? `Analyzing ${options.package}...`
|
|
340
|
-
: 'Generating DocCov spec...';
|
|
341
|
-
if (options.package) {
|
|
342
|
-
genArgs.push('--package', options.package);
|
|
343
|
-
}
|
|
344
|
-
|
|
345
|
-
sendEvent({ type: 'progress', stage: 'analyzing', message: analyzeMessage, progress: 65 });
|
|
346
|
-
|
|
347
|
-
const genCapture = createCaptureStream();
|
|
348
|
-
const genResult = await sandbox.runCommand({
|
|
349
|
-
cmd: 'doccov',
|
|
350
|
-
args: genArgs,
|
|
351
|
-
stdout: genCapture.stream,
|
|
352
|
-
stderr: genCapture.stream,
|
|
353
|
-
});
|
|
354
|
-
|
|
355
|
-
const genOutput = genCapture.getOutput();
|
|
356
|
-
if (genResult.exitCode !== 0) {
|
|
357
|
-
throw new Error(`doccov generate failed: ${genOutput.slice(-300)}`);
|
|
358
|
-
}
|
|
359
|
-
|
|
360
|
-
sendEvent({
|
|
361
|
-
type: 'progress',
|
|
362
|
-
stage: 'extracting',
|
|
363
|
-
message: 'Extracting results...',
|
|
364
|
-
progress: 85,
|
|
365
|
-
});
|
|
366
|
-
|
|
367
|
-
// Check if spec file was created
|
|
368
|
-
const checkFileCapture = createCaptureStream();
|
|
369
|
-
await sandbox.runCommand({
|
|
370
|
-
cmd: 'cat',
|
|
371
|
-
args: [specFile],
|
|
372
|
-
stdout: checkFileCapture.stream,
|
|
373
|
-
stderr: checkFileCapture.stream,
|
|
374
|
-
});
|
|
375
|
-
const specContent = checkFileCapture.getOutput();
|
|
376
|
-
|
|
377
|
-
if (!specContent.trim() || specContent.includes('No such file')) {
|
|
378
|
-
throw new Error(`Spec file not found or empty. Generate output: ${genOutput.slice(-500)}`);
|
|
379
|
-
}
|
|
380
|
-
|
|
381
|
-
// Extract summary with error handling
|
|
382
|
-
const extractScript = `
|
|
383
|
-
const fs = require('fs');
|
|
384
|
-
try {
|
|
385
|
-
if (!fs.existsSync('${specFile}')) {
|
|
386
|
-
console.error('Spec file not found: ${specFile}');
|
|
387
|
-
process.exit(1);
|
|
388
|
-
}
|
|
389
|
-
const content = fs.readFileSync('${specFile}', 'utf-8');
|
|
390
|
-
const spec = JSON.parse(content);
|
|
391
|
-
const undocumented = [];
|
|
392
|
-
const drift = [];
|
|
393
|
-
for (const exp of spec.exports || []) {
|
|
394
|
-
const docs = exp.docs;
|
|
395
|
-
if (!docs) continue;
|
|
396
|
-
if ((docs.missing?.length || 0) > 0 || (docs.coverageScore || 0) < 100) {
|
|
397
|
-
undocumented.push(exp.name);
|
|
398
|
-
}
|
|
399
|
-
for (const d of docs.drift || []) {
|
|
400
|
-
drift.push({ export: exp.name, type: d.type, issue: d.issue });
|
|
401
|
-
}
|
|
402
|
-
}
|
|
403
|
-
console.log(JSON.stringify({
|
|
404
|
-
coverage: spec.docs?.coverageScore || 0,
|
|
405
|
-
exportCount: spec.exports?.length || 0,
|
|
406
|
-
typeCount: spec.types?.length || 0,
|
|
407
|
-
undocumented: undocumented.slice(0, 50),
|
|
408
|
-
drift: drift.slice(0, 20),
|
|
409
|
-
driftCount: drift.length,
|
|
410
|
-
}));
|
|
411
|
-
} catch (e) {
|
|
412
|
-
console.error('Extract error:', e.message);
|
|
413
|
-
process.exit(1);
|
|
414
|
-
}
|
|
415
|
-
`.replace(/\n/g, ' ');
|
|
416
|
-
|
|
417
|
-
const nodeCapture = createCaptureStream();
|
|
418
|
-
const nodeResult = await sandbox.runCommand({
|
|
419
|
-
cmd: 'node',
|
|
420
|
-
args: ['-e', extractScript],
|
|
421
|
-
stdout: nodeCapture.stream,
|
|
422
|
-
stderr: nodeCapture.stream,
|
|
423
|
-
});
|
|
424
|
-
|
|
425
|
-
const summaryJson = nodeCapture.getOutput();
|
|
426
|
-
if (nodeResult.exitCode !== 0 || !summaryJson.trim()) {
|
|
427
|
-
throw new Error(`Failed to extract summary: ${summaryJson.slice(0, 300)}`);
|
|
428
|
-
}
|
|
429
|
-
|
|
430
|
-
const summary = JSON.parse(summaryJson.trim()) as {
|
|
431
|
-
coverage: number;
|
|
432
|
-
exportCount: number;
|
|
433
|
-
typeCount: number;
|
|
434
|
-
undocumented: string[];
|
|
435
|
-
drift: ScanResult['drift'];
|
|
436
|
-
driftCount: number;
|
|
437
|
-
};
|
|
438
|
-
|
|
439
|
-
const result: ScanResult = {
|
|
440
|
-
owner: options.owner,
|
|
441
|
-
repo: options.repo,
|
|
442
|
-
ref: options.ref,
|
|
443
|
-
packageName: options.package,
|
|
444
|
-
coverage: summary.coverage,
|
|
445
|
-
exportCount: summary.exportCount,
|
|
446
|
-
typeCount: summary.typeCount,
|
|
447
|
-
driftCount: summary.driftCount,
|
|
448
|
-
undocumented: summary.undocumented,
|
|
449
|
-
drift: summary.drift,
|
|
450
|
-
};
|
|
451
|
-
|
|
452
|
-
sendEvent({ type: 'complete', result });
|
|
453
|
-
} finally {
|
|
454
|
-
await sandbox.stop();
|
|
455
|
-
}
|
|
456
|
-
} catch (error) {
|
|
457
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
458
|
-
sendEvent({ type: 'error', message });
|
|
459
|
-
}
|
|
460
|
-
}
|
package/api/scan.ts
DELETED
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
import { parseGitHubUrl } from '@doccov/sdk';
|
|
2
|
-
import type { VercelRequest, VercelResponse } from '@vercel/node';
|
|
3
|
-
|
|
4
|
-
export const config = {
|
|
5
|
-
runtime: 'nodejs',
|
|
6
|
-
maxDuration: 10,
|
|
7
|
-
};
|
|
8
|
-
|
|
9
|
-
interface ScanRequestBody {
|
|
10
|
-
url: string;
|
|
11
|
-
ref?: string;
|
|
12
|
-
package?: string;
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
export default async function handler(req: VercelRequest, res: VercelResponse) {
|
|
16
|
-
// CORS
|
|
17
|
-
res.setHeader('Access-Control-Allow-Origin', '*');
|
|
18
|
-
res.setHeader('Access-Control-Allow-Methods', 'POST, OPTIONS');
|
|
19
|
-
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
|
|
20
|
-
|
|
21
|
-
if (req.method === 'OPTIONS') {
|
|
22
|
-
return res.status(200).end();
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
if (req.method !== 'POST') {
|
|
26
|
-
return res.status(405).json({ error: 'Method not allowed' });
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
const body = req.body as ScanRequestBody;
|
|
30
|
-
|
|
31
|
-
if (!body.url) {
|
|
32
|
-
return res.status(400).json({ error: 'url is required' });
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
// Parse GitHub URL using SDK
|
|
36
|
-
let parsed;
|
|
37
|
-
try {
|
|
38
|
-
parsed = parseGitHubUrl(body.url, body.ref ?? 'main');
|
|
39
|
-
} catch {
|
|
40
|
-
return res.status(400).json({ error: 'Invalid GitHub URL' });
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
// Generate a job ID
|
|
44
|
-
const jobId = `scan-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
|
45
|
-
|
|
46
|
-
// Build stream URL with params encoded
|
|
47
|
-
const params = new URLSearchParams({
|
|
48
|
-
url: body.url,
|
|
49
|
-
ref: parsed.ref,
|
|
50
|
-
owner: parsed.owner,
|
|
51
|
-
repo: parsed.repo,
|
|
52
|
-
});
|
|
53
|
-
if (body.package) {
|
|
54
|
-
params.set('package', body.package);
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
// Return job ID and stream URL with all params
|
|
58
|
-
return res.status(202).json({
|
|
59
|
-
jobId,
|
|
60
|
-
status: 'pending',
|
|
61
|
-
streamUrl: `/scan-stream?${params.toString()}`,
|
|
62
|
-
});
|
|
63
|
-
}
|