@doccov/api 0.4.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +19 -0
- package/api/index.ts +105 -26
- package/migrations/005_coverage_sdk_field_names.ts +41 -0
- package/package.json +4 -4
- package/src/index.ts +36 -2
- package/src/middleware/anonymous-rate-limit.ts +131 -0
- package/src/routes/ai.ts +353 -0
- package/src/routes/badge.ts +122 -32
- package/src/routes/billing.ts +65 -0
- package/src/routes/coverage.ts +53 -48
- package/src/routes/demo.ts +606 -0
- package/src/routes/github-app.ts +368 -0
- package/src/routes/invites.ts +90 -0
- package/src/routes/orgs.ts +249 -0
- package/src/routes/spec-v1.ts +165 -0
- package/src/routes/spec.ts +186 -0
- package/src/utils/github-app.ts +196 -0
- package/src/utils/github-checks.ts +498 -0
- package/src/utils/remote-analyzer.ts +251 -0
- package/src/utils/spec-cache.ts +131 -0
- package/src/utils/spec-diff-core.ts +406 -0
- package/src/utils/github.ts +0 -5
|
@@ -0,0 +1,606 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Demo route - public "Try it Now" endpoint for analyzing npm packages
|
|
3
|
+
* Uses the /plan and /execute-stream endpoints from the Vercel API
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { fetchGitHubContext, parseScanGitHubUrl } from '@doccov/sdk';
|
|
7
|
+
import { Hono } from 'hono';
|
|
8
|
+
import { streamSSE } from 'hono/streaming';
|
|
9
|
+
import { anonymousRateLimit } from '../middleware/anonymous-rate-limit';
|
|
10
|
+
|
|
11
|
+
export const demoRoute = new Hono();
|
|
12
|
+
|
|
13
|
+
// Vercel API URL (where /plan and /execute-stream live)
|
|
14
|
+
const VERCEL_API_URL = process.env.VERCEL_API_URL || 'https://api-khaki-phi.vercel.app';
|
|
15
|
+
|
|
16
|
+
// Rate limit: 5 analyses per hour per IP
|
|
17
|
+
demoRoute.use(
|
|
18
|
+
'*',
|
|
19
|
+
anonymousRateLimit({
|
|
20
|
+
windowMs: 60 * 60 * 1000, // 1 hour
|
|
21
|
+
max: 5,
|
|
22
|
+
message: 'Demo limit reached. Sign up for unlimited access.',
|
|
23
|
+
upgradeUrl: 'https://doccov.com/pricing',
|
|
24
|
+
}),
|
|
25
|
+
);
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* npm package info from registry
|
|
29
|
+
*/
|
|
30
|
+
interface NpmPackageInfo {
|
|
31
|
+
name: string;
|
|
32
|
+
version: string;
|
|
33
|
+
description?: string;
|
|
34
|
+
repository?: string;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Fetch package info from npm registry
|
|
39
|
+
*/
|
|
40
|
+
async function fetchNpmPackage(packageName: string): Promise<NpmPackageInfo> {
|
|
41
|
+
const encodedName = encodeURIComponent(packageName);
|
|
42
|
+
const url = `https://registry.npmjs.org/${encodedName}/latest`;
|
|
43
|
+
|
|
44
|
+
const res = await fetch(url, {
|
|
45
|
+
headers: { Accept: 'application/json' },
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
if (!res.ok) {
|
|
49
|
+
if (res.status === 404) {
|
|
50
|
+
throw new Error(`Package "${packageName}" not found on npm`);
|
|
51
|
+
}
|
|
52
|
+
throw new Error(`npm registry error: ${res.status}`);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
const data = (await res.json()) as {
|
|
56
|
+
name: string;
|
|
57
|
+
version: string;
|
|
58
|
+
description?: string;
|
|
59
|
+
repository?: string | { type?: string; url?: string };
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
// Extract and normalize GitHub URL from repository field
|
|
63
|
+
let repoUrl: string | undefined;
|
|
64
|
+
if (data.repository) {
|
|
65
|
+
if (typeof data.repository === 'string') {
|
|
66
|
+
repoUrl = data.repository;
|
|
67
|
+
} else if (data.repository.url) {
|
|
68
|
+
// Normalize: git+https://github.com/... or git://github.com/...
|
|
69
|
+
repoUrl = data.repository.url
|
|
70
|
+
.replace(/^git\+/, '')
|
|
71
|
+
.replace(/^git:\/\//, 'https://')
|
|
72
|
+
.replace(/\.git$/, '');
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Validate it's a GitHub URL
|
|
77
|
+
if (repoUrl && !repoUrl.includes('github.com')) {
|
|
78
|
+
repoUrl = undefined;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
return {
|
|
82
|
+
name: data.name,
|
|
83
|
+
version: data.version,
|
|
84
|
+
description: data.description,
|
|
85
|
+
repository: repoUrl,
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Analysis result summary (matches SDK CoverageSnapshot naming)
|
|
91
|
+
*/
|
|
92
|
+
interface AnalysisSummary {
|
|
93
|
+
packageName: string;
|
|
94
|
+
version: string;
|
|
95
|
+
coverageScore: number;
|
|
96
|
+
totalExports: number;
|
|
97
|
+
documentedExports: number;
|
|
98
|
+
driftCount: number;
|
|
99
|
+
topUndocumented: string[];
|
|
100
|
+
topDrift: Array<{ name: string; issue: string }>;
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Workspace package info for monorepo detection
|
|
105
|
+
*/
|
|
106
|
+
interface WorkspacePackageInfo {
|
|
107
|
+
name: string;
|
|
108
|
+
path: string;
|
|
109
|
+
private: boolean;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Resolve workspace patterns to actual package names via GitHub API.
|
|
114
|
+
* Fetches package.json from each directory to get real package names.
|
|
115
|
+
*/
|
|
116
|
+
async function resolveGitHubPackages(
|
|
117
|
+
owner: string,
|
|
118
|
+
repo: string,
|
|
119
|
+
ref: string,
|
|
120
|
+
patterns: string[],
|
|
121
|
+
): Promise<WorkspacePackageInfo[]> {
|
|
122
|
+
const packages: WorkspacePackageInfo[] = [];
|
|
123
|
+
const seen = new Set<string>();
|
|
124
|
+
|
|
125
|
+
for (const pattern of patterns) {
|
|
126
|
+
// Extract base directory from pattern: "packages/*" -> "packages"
|
|
127
|
+
const baseDir = pattern.replace(/\/?\*\*?$/, '');
|
|
128
|
+
if (!baseDir || baseDir.includes('*')) continue;
|
|
129
|
+
|
|
130
|
+
// List directories via GitHub API
|
|
131
|
+
const contentsUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${baseDir}?ref=${ref}`;
|
|
132
|
+
const contentsRes = await fetch(contentsUrl, {
|
|
133
|
+
headers: { 'User-Agent': 'DocCov', Accept: 'application/vnd.github.v3+json' },
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
if (!contentsRes.ok) continue;
|
|
137
|
+
|
|
138
|
+
const contents = (await contentsRes.json()) as Array<{ name: string; type: string }>;
|
|
139
|
+
|
|
140
|
+
// Fetch package.json from each subdirectory
|
|
141
|
+
for (const item of contents) {
|
|
142
|
+
if (item.type !== 'dir') continue;
|
|
143
|
+
|
|
144
|
+
const pkgPath = `${baseDir}/${item.name}`;
|
|
145
|
+
const pkgJsonUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${pkgPath}/package.json`;
|
|
146
|
+
|
|
147
|
+
try {
|
|
148
|
+
const pkgRes = await fetch(pkgJsonUrl);
|
|
149
|
+
if (!pkgRes.ok) continue;
|
|
150
|
+
|
|
151
|
+
const pkg = (await pkgRes.json()) as { name?: string; private?: boolean };
|
|
152
|
+
if (pkg.name && !seen.has(pkg.name)) {
|
|
153
|
+
seen.add(pkg.name);
|
|
154
|
+
packages.push({
|
|
155
|
+
name: pkg.name,
|
|
156
|
+
path: pkgPath,
|
|
157
|
+
private: pkg.private ?? false,
|
|
158
|
+
});
|
|
159
|
+
}
|
|
160
|
+
} catch {
|
|
161
|
+
// Skip invalid package.json
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
return packages.sort((a, b) => a.name.localeCompare(b.name));
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
// GET /demo/analyze?package=lodash
|
|
170
|
+
demoRoute.get('/analyze', async (c) => {
|
|
171
|
+
const packageName = c.req.query('package');
|
|
172
|
+
|
|
173
|
+
if (!packageName) {
|
|
174
|
+
return c.json({ error: 'Package name required' }, 400);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
// Validate package name (basic sanitation)
|
|
178
|
+
if (!/^(@[\w-]+\/)?[\w.-]+$/.test(packageName)) {
|
|
179
|
+
return c.json({ error: 'Invalid package name format' }, 400);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return streamSSE(c, async (stream) => {
|
|
183
|
+
const sendEvent = async (
|
|
184
|
+
type: 'status' | 'log' | 'result' | 'error',
|
|
185
|
+
data: { step?: string; message?: string; data?: unknown },
|
|
186
|
+
) => {
|
|
187
|
+
await stream.writeSSE({
|
|
188
|
+
data: JSON.stringify({ type, ...data }),
|
|
189
|
+
event: type === 'error' ? 'error' : type === 'result' ? 'complete' : 'progress',
|
|
190
|
+
});
|
|
191
|
+
};
|
|
192
|
+
|
|
193
|
+
try {
|
|
194
|
+
// Step 1: Fetch from npm registry
|
|
195
|
+
await sendEvent('status', {
|
|
196
|
+
step: 'npm',
|
|
197
|
+
message: `Fetching ${packageName} from npm registry...`,
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
const npmInfo = await fetchNpmPackage(packageName);
|
|
201
|
+
|
|
202
|
+
await sendEvent('log', {
|
|
203
|
+
message: `Found ${npmInfo.name}@${npmInfo.version}`,
|
|
204
|
+
});
|
|
205
|
+
|
|
206
|
+
if (!npmInfo.repository) {
|
|
207
|
+
await sendEvent('error', {
|
|
208
|
+
message: 'No GitHub repository linked to this package',
|
|
209
|
+
});
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
await sendEvent('log', {
|
|
214
|
+
message: `Repository: ${npmInfo.repository}`,
|
|
215
|
+
});
|
|
216
|
+
|
|
217
|
+
// Step 2: Generate build plan via /plan endpoint
|
|
218
|
+
await sendEvent('status', {
|
|
219
|
+
step: 'plan',
|
|
220
|
+
message: 'Generating build plan...',
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
const planResponse = await fetch(`${VERCEL_API_URL}/plan`, {
|
|
224
|
+
method: 'POST',
|
|
225
|
+
headers: { 'Content-Type': 'application/json' },
|
|
226
|
+
body: JSON.stringify({
|
|
227
|
+
url: npmInfo.repository,
|
|
228
|
+
package: packageName.startsWith('@') ? packageName : undefined,
|
|
229
|
+
}),
|
|
230
|
+
});
|
|
231
|
+
|
|
232
|
+
if (!planResponse.ok) {
|
|
233
|
+
const errorData = (await planResponse.json()) as { error?: string };
|
|
234
|
+
throw new Error(errorData.error || `Plan generation failed: ${planResponse.status}`);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
const planData = (await planResponse.json()) as {
|
|
238
|
+
plan: unknown;
|
|
239
|
+
context: { isMonorepo: boolean; packageManager: string };
|
|
240
|
+
};
|
|
241
|
+
|
|
242
|
+
await sendEvent('log', {
|
|
243
|
+
message: `Build plan ready (${planData.context.packageManager}${planData.context.isMonorepo ? ', monorepo' : ''})`,
|
|
244
|
+
});
|
|
245
|
+
|
|
246
|
+
// Step 3: Execute build plan via /execute-stream endpoint
|
|
247
|
+
await sendEvent('status', {
|
|
248
|
+
step: 'build',
|
|
249
|
+
message: 'Building and analyzing...',
|
|
250
|
+
});
|
|
251
|
+
|
|
252
|
+
const executeResponse = await fetch(`${VERCEL_API_URL}/execute-stream`, {
|
|
253
|
+
method: 'POST',
|
|
254
|
+
headers: { 'Content-Type': 'application/json' },
|
|
255
|
+
body: JSON.stringify({ plan: planData.plan }),
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
if (!executeResponse.ok || !executeResponse.body) {
|
|
259
|
+
throw new Error(`Execution failed: ${executeResponse.status}`);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
// Stream the execute-stream SSE events and forward relevant ones
|
|
263
|
+
const reader = executeResponse.body.getReader();
|
|
264
|
+
const decoder = new TextDecoder();
|
|
265
|
+
let buffer = '';
|
|
266
|
+
|
|
267
|
+
while (true) {
|
|
268
|
+
const { done, value } = await reader.read();
|
|
269
|
+
if (done) break;
|
|
270
|
+
|
|
271
|
+
buffer += decoder.decode(value, { stream: true });
|
|
272
|
+
const lines = buffer.split('\n');
|
|
273
|
+
buffer = lines.pop() || '';
|
|
274
|
+
|
|
275
|
+
for (const line of lines) {
|
|
276
|
+
if (line.startsWith('event:')) {
|
|
277
|
+
const eventType = line.slice(7).trim();
|
|
278
|
+
|
|
279
|
+
// Get the next data line
|
|
280
|
+
const dataLineIndex = lines.indexOf(line) + 1;
|
|
281
|
+
if (dataLineIndex < lines.length && lines[dataLineIndex].startsWith('data:')) {
|
|
282
|
+
const dataStr = lines[dataLineIndex].slice(5).trim();
|
|
283
|
+
try {
|
|
284
|
+
const eventData = JSON.parse(dataStr) as {
|
|
285
|
+
stage?: string;
|
|
286
|
+
message?: string;
|
|
287
|
+
stepId?: string;
|
|
288
|
+
name?: string;
|
|
289
|
+
success?: boolean;
|
|
290
|
+
summary?: {
|
|
291
|
+
name: string;
|
|
292
|
+
version: string;
|
|
293
|
+
coverage: number;
|
|
294
|
+
exports: number;
|
|
295
|
+
documented: number;
|
|
296
|
+
undocumented: number;
|
|
297
|
+
driftCount: number;
|
|
298
|
+
topUndocumented: string[];
|
|
299
|
+
topDrift: Array<{ name: string; issue: string }>;
|
|
300
|
+
};
|
|
301
|
+
error?: string;
|
|
302
|
+
};
|
|
303
|
+
|
|
304
|
+
// Forward progress events
|
|
305
|
+
if (eventType === 'progress') {
|
|
306
|
+
await sendEvent('log', { message: eventData.message || eventData.stage });
|
|
307
|
+
} else if (eventType === 'step:start') {
|
|
308
|
+
await sendEvent('status', {
|
|
309
|
+
step: eventData.stepId === 'analyze' ? 'analyze' : 'build',
|
|
310
|
+
message: eventData.name || `Running ${eventData.stepId}...`,
|
|
311
|
+
});
|
|
312
|
+
} else if (eventType === 'step:complete' && eventData.stepId) {
|
|
313
|
+
await sendEvent('log', {
|
|
314
|
+
message: `${eventData.stepId} completed`,
|
|
315
|
+
});
|
|
316
|
+
} else if (eventType === 'complete' && eventData.summary) {
|
|
317
|
+
// Transform summary to our format (SDK-aligned field names)
|
|
318
|
+
const summary: AnalysisSummary = {
|
|
319
|
+
packageName: eventData.summary.name,
|
|
320
|
+
version: eventData.summary.version,
|
|
321
|
+
coverageScore: eventData.summary.coverage,
|
|
322
|
+
totalExports: eventData.summary.exports,
|
|
323
|
+
documentedExports: eventData.summary.documented,
|
|
324
|
+
driftCount: eventData.summary.driftCount ?? 0,
|
|
325
|
+
topUndocumented: eventData.summary.topUndocumented ?? [],
|
|
326
|
+
topDrift: eventData.summary.topDrift ?? [],
|
|
327
|
+
};
|
|
328
|
+
|
|
329
|
+
await sendEvent('log', {
|
|
330
|
+
message: `Found ${summary.totalExports} exports, ${summary.documentedExports} documented`,
|
|
331
|
+
});
|
|
332
|
+
|
|
333
|
+
await sendEvent('status', {
|
|
334
|
+
step: 'complete',
|
|
335
|
+
message: 'Analysis complete!',
|
|
336
|
+
});
|
|
337
|
+
|
|
338
|
+
await sendEvent('result', { data: summary });
|
|
339
|
+
return;
|
|
340
|
+
} else if (eventType === 'error') {
|
|
341
|
+
throw new Error(eventData.error || 'Execution failed');
|
|
342
|
+
}
|
|
343
|
+
} catch (parseError) {
|
|
344
|
+
// Ignore JSON parse errors for incomplete data
|
|
345
|
+
if (parseError instanceof SyntaxError) continue;
|
|
346
|
+
throw parseError;
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
// If we get here without a complete event, something went wrong
|
|
354
|
+
throw new Error('Execution completed without results');
|
|
355
|
+
} catch (err) {
|
|
356
|
+
const message = err instanceof Error ? err.message : 'Analysis failed';
|
|
357
|
+
await sendEvent('error', { message });
|
|
358
|
+
}
|
|
359
|
+
});
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
// POST /demo/detect - detect monorepo packages from GitHub URL
|
|
363
|
+
demoRoute.post('/detect', async (c) => {
|
|
364
|
+
const body = (await c.req.json()) as { url?: string };
|
|
365
|
+
|
|
366
|
+
if (!body.url) {
|
|
367
|
+
return c.json({ error: 'GitHub URL required' }, 400);
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
// Validate and parse GitHub URL
|
|
371
|
+
const parsed = parseScanGitHubUrl(body.url);
|
|
372
|
+
if (!parsed) {
|
|
373
|
+
return c.json({ error: 'Invalid GitHub URL' }, 400);
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
try {
|
|
377
|
+
// Fetch context from GitHub
|
|
378
|
+
const context = await fetchGitHubContext(body.url);
|
|
379
|
+
|
|
380
|
+
// If not a monorepo, return simple response
|
|
381
|
+
if (!context.workspace.isMonorepo) {
|
|
382
|
+
return c.json({
|
|
383
|
+
isMonorepo: false,
|
|
384
|
+
packageManager: context.packageManager,
|
|
385
|
+
owner: context.metadata.owner,
|
|
386
|
+
repo: context.metadata.repo,
|
|
387
|
+
ref: context.ref,
|
|
388
|
+
packages: [],
|
|
389
|
+
});
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
// Resolve actual package names from workspace patterns
|
|
393
|
+
const patterns = context.workspace.packages || ['packages/*'];
|
|
394
|
+
const packages = await resolveGitHubPackages(
|
|
395
|
+
context.metadata.owner,
|
|
396
|
+
context.metadata.repo,
|
|
397
|
+
context.ref,
|
|
398
|
+
patterns,
|
|
399
|
+
);
|
|
400
|
+
|
|
401
|
+
return c.json({
|
|
402
|
+
isMonorepo: true,
|
|
403
|
+
packageManager: context.packageManager,
|
|
404
|
+
owner: context.metadata.owner,
|
|
405
|
+
repo: context.metadata.repo,
|
|
406
|
+
ref: context.ref,
|
|
407
|
+
packages,
|
|
408
|
+
});
|
|
409
|
+
} catch (err) {
|
|
410
|
+
const message = err instanceof Error ? err.message : 'Detection failed';
|
|
411
|
+
return c.json({ error: message }, 500);
|
|
412
|
+
}
|
|
413
|
+
});
|
|
414
|
+
|
|
415
|
+
// GET /demo/analyze-repo?url=...&package=... - analyze GitHub repo directly
|
|
416
|
+
demoRoute.get('/analyze-repo', async (c) => {
|
|
417
|
+
const repoUrl = c.req.query('url');
|
|
418
|
+
const packageName = c.req.query('package');
|
|
419
|
+
|
|
420
|
+
if (!repoUrl) {
|
|
421
|
+
return c.json({ error: 'GitHub URL required' }, 400);
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
// Validate GitHub URL
|
|
425
|
+
const parsed = parseScanGitHubUrl(repoUrl);
|
|
426
|
+
if (!parsed) {
|
|
427
|
+
return c.json({ error: 'Invalid GitHub URL' }, 400);
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
return streamSSE(c, async (stream) => {
|
|
431
|
+
const sendEvent = async (
|
|
432
|
+
type: 'status' | 'log' | 'result' | 'error',
|
|
433
|
+
data: { step?: string; message?: string; data?: unknown },
|
|
434
|
+
) => {
|
|
435
|
+
await stream.writeSSE({
|
|
436
|
+
data: JSON.stringify({ type, ...data }),
|
|
437
|
+
event: type === 'error' ? 'error' : type === 'result' ? 'complete' : 'progress',
|
|
438
|
+
});
|
|
439
|
+
};
|
|
440
|
+
|
|
441
|
+
try {
|
|
442
|
+
// Step 1: Log repo info
|
|
443
|
+
await sendEvent('status', {
|
|
444
|
+
step: 'repo',
|
|
445
|
+
message: `Analyzing ${parsed.owner}/${parsed.repo}...`,
|
|
446
|
+
});
|
|
447
|
+
|
|
448
|
+
await sendEvent('log', {
|
|
449
|
+
message: `Repository: ${repoUrl}`,
|
|
450
|
+
});
|
|
451
|
+
|
|
452
|
+
if (packageName) {
|
|
453
|
+
await sendEvent('log', {
|
|
454
|
+
message: `Package: ${packageName}`,
|
|
455
|
+
});
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
// Step 2: Generate build plan via /plan endpoint
|
|
459
|
+
await sendEvent('status', {
|
|
460
|
+
step: 'plan',
|
|
461
|
+
message: 'Generating build plan...',
|
|
462
|
+
});
|
|
463
|
+
|
|
464
|
+
const planResponse = await fetch(`${VERCEL_API_URL}/plan`, {
|
|
465
|
+
method: 'POST',
|
|
466
|
+
headers: { 'Content-Type': 'application/json' },
|
|
467
|
+
body: JSON.stringify({
|
|
468
|
+
url: repoUrl,
|
|
469
|
+
package: packageName,
|
|
470
|
+
}),
|
|
471
|
+
});
|
|
472
|
+
|
|
473
|
+
if (!planResponse.ok) {
|
|
474
|
+
const errorData = (await planResponse.json()) as { error?: string };
|
|
475
|
+
throw new Error(errorData.error || `Plan generation failed: ${planResponse.status}`);
|
|
476
|
+
}
|
|
477
|
+
|
|
478
|
+
const planData = (await planResponse.json()) as {
|
|
479
|
+
plan: unknown;
|
|
480
|
+
context: { isMonorepo: boolean; packageManager: string };
|
|
481
|
+
};
|
|
482
|
+
|
|
483
|
+
await sendEvent('log', {
|
|
484
|
+
message: `Build plan ready (${planData.context.packageManager}${planData.context.isMonorepo ? ', monorepo' : ''})`,
|
|
485
|
+
});
|
|
486
|
+
|
|
487
|
+
// Step 3: Execute build plan via /execute-stream endpoint
|
|
488
|
+
await sendEvent('status', {
|
|
489
|
+
step: 'build',
|
|
490
|
+
message: 'Building and analyzing...',
|
|
491
|
+
});
|
|
492
|
+
|
|
493
|
+
const executeResponse = await fetch(`${VERCEL_API_URL}/execute-stream`, {
|
|
494
|
+
method: 'POST',
|
|
495
|
+
headers: { 'Content-Type': 'application/json' },
|
|
496
|
+
body: JSON.stringify({ plan: planData.plan }),
|
|
497
|
+
});
|
|
498
|
+
|
|
499
|
+
if (!executeResponse.ok || !executeResponse.body) {
|
|
500
|
+
throw new Error(`Execution failed: ${executeResponse.status}`);
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
// Stream the execute-stream SSE events and forward relevant ones
|
|
504
|
+
const reader = executeResponse.body.getReader();
|
|
505
|
+
const decoder = new TextDecoder();
|
|
506
|
+
let buffer = '';
|
|
507
|
+
|
|
508
|
+
while (true) {
|
|
509
|
+
const { done, value } = await reader.read();
|
|
510
|
+
if (done) break;
|
|
511
|
+
|
|
512
|
+
buffer += decoder.decode(value, { stream: true });
|
|
513
|
+
const lines = buffer.split('\n');
|
|
514
|
+
buffer = lines.pop() || '';
|
|
515
|
+
|
|
516
|
+
for (const line of lines) {
|
|
517
|
+
if (line.startsWith('event:')) {
|
|
518
|
+
const eventType = line.slice(7).trim();
|
|
519
|
+
|
|
520
|
+
// Get the next data line
|
|
521
|
+
const dataLineIndex = lines.indexOf(line) + 1;
|
|
522
|
+
if (dataLineIndex < lines.length && lines[dataLineIndex].startsWith('data:')) {
|
|
523
|
+
const dataStr = lines[dataLineIndex].slice(5).trim();
|
|
524
|
+
try {
|
|
525
|
+
const eventData = JSON.parse(dataStr) as {
|
|
526
|
+
stage?: string;
|
|
527
|
+
message?: string;
|
|
528
|
+
stepId?: string;
|
|
529
|
+
name?: string;
|
|
530
|
+
success?: boolean;
|
|
531
|
+
summary?: {
|
|
532
|
+
name: string;
|
|
533
|
+
version: string;
|
|
534
|
+
coverage: number;
|
|
535
|
+
exports: number;
|
|
536
|
+
documented: number;
|
|
537
|
+
undocumented: number;
|
|
538
|
+
driftCount: number;
|
|
539
|
+
topUndocumented: string[];
|
|
540
|
+
topDrift: Array<{ name: string; issue: string }>;
|
|
541
|
+
};
|
|
542
|
+
error?: string;
|
|
543
|
+
};
|
|
544
|
+
|
|
545
|
+
// Forward progress events
|
|
546
|
+
if (eventType === 'progress') {
|
|
547
|
+
await sendEvent('log', { message: eventData.message || eventData.stage });
|
|
548
|
+
} else if (eventType === 'step:start') {
|
|
549
|
+
await sendEvent('status', {
|
|
550
|
+
step: eventData.stepId === 'analyze' ? 'analyze' : 'build',
|
|
551
|
+
message: eventData.name || `Running ${eventData.stepId}...`,
|
|
552
|
+
});
|
|
553
|
+
} else if (eventType === 'step:complete' && eventData.stepId) {
|
|
554
|
+
await sendEvent('log', {
|
|
555
|
+
message: `${eventData.stepId} completed`,
|
|
556
|
+
});
|
|
557
|
+
} else if (eventType === 'complete' && eventData.summary) {
|
|
558
|
+
// Transform summary to our format (SDK-aligned field names)
|
|
559
|
+
const summary: AnalysisSummary = {
|
|
560
|
+
packageName: eventData.summary.name,
|
|
561
|
+
version: eventData.summary.version,
|
|
562
|
+
coverageScore: eventData.summary.coverage,
|
|
563
|
+
totalExports: eventData.summary.exports,
|
|
564
|
+
documentedExports: eventData.summary.documented,
|
|
565
|
+
driftCount: eventData.summary.driftCount ?? 0,
|
|
566
|
+
topUndocumented: eventData.summary.topUndocumented ?? [],
|
|
567
|
+
topDrift: eventData.summary.topDrift ?? [],
|
|
568
|
+
};
|
|
569
|
+
|
|
570
|
+
await sendEvent('log', {
|
|
571
|
+
message: `Found ${summary.totalExports} exports, ${summary.documentedExports} documented`,
|
|
572
|
+
});
|
|
573
|
+
|
|
574
|
+
await sendEvent('status', {
|
|
575
|
+
step: 'complete',
|
|
576
|
+
message: 'Analysis complete!',
|
|
577
|
+
});
|
|
578
|
+
|
|
579
|
+
await sendEvent('result', { data: summary });
|
|
580
|
+
return;
|
|
581
|
+
} else if (eventType === 'error') {
|
|
582
|
+
throw new Error(eventData.error || 'Execution failed');
|
|
583
|
+
}
|
|
584
|
+
} catch (parseError) {
|
|
585
|
+
// Ignore JSON parse errors for incomplete data
|
|
586
|
+
if (parseError instanceof SyntaxError) continue;
|
|
587
|
+
throw parseError;
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
// If we get here without a complete event, something went wrong
|
|
595
|
+
throw new Error('Execution completed without results');
|
|
596
|
+
} catch (err) {
|
|
597
|
+
const message = err instanceof Error ? err.message : 'Analysis failed';
|
|
598
|
+
await sendEvent('error', { message });
|
|
599
|
+
}
|
|
600
|
+
});
|
|
601
|
+
});
|
|
602
|
+
|
|
603
|
+
// Health check
|
|
604
|
+
demoRoute.get('/health', (c) => {
|
|
605
|
+
return c.json({ status: 'ok' });
|
|
606
|
+
});
|