birdeatsbug-mcp-server 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +148 -0
  2. package/package.json +46 -0
  3. package/src/index.js +1100 -0
package/README.md ADDED
@@ -0,0 +1,148 @@
1
+ # birdeatsbug-mcp-server
2
+
3
+ An [MCP (Model Context Protocol)](https://modelcontextprotocol.io) server that connects AI assistants to **BirdeatsBug** bug reports — extracting console logs, network logs, and video frames for deep, AI-powered root-cause analysis.
4
+
5
+ ## What it does
6
+
7
+ - Launches headless Chromium, navigates to a BirdeatsBug report URL, and intercepts all runtime API responses
8
+ - Extracts structured **console logs**, **network requests**, and **video recording URLs**
9
+ - Downloads the bug recording and extracts **JPEG video frames** for visual AI analysis
10
+ - Merges errors and network failures into a **chronological event timeline**
11
+ - Auto-detects the likely bug moment and clusters frames around it
12
+
13
+ ## Tools
14
+
15
+ | Tool | Description |
16
+ |---|---|
17
+ | `inspect_birdeatsbug_report` | Metadata, access state, artifact counts |
18
+ | `get_birdeatsbug_console_logs` | Console entries (level, message, timestamp) |
19
+ | `get_birdeatsbug_network_logs` | Network requests (method, URL, status, duration) |
20
+ | `get_birdeatsbug_video_frames` | JPEG frames extracted from the bug recording for vision AI |
21
+ | `generate_birdeatsbug_report` | All-in-one: title, description, timeline, errors, failures, and video frames |
22
+
23
+ ## Installation
24
+
25
+ ### Option A — npx (no install needed)
26
+
27
+ ```bash
28
+ npx birdeatsbug-mcp-server
29
+ ```
30
+
31
+ ### Option B — global install
32
+
33
+ ```bash
34
+ npm install -g birdeatsbug-mcp-server
35
+ birdeatsbug-mcp-server
36
+ ```
37
+
38
+ > **Note:** On first run, the postinstall script installs the Chromium browser (~200 MB). This only happens once.
39
+
40
+ ---
41
+
42
+ ## VS Code (GitHub Copilot / Continue)
43
+
44
+ Add to your **global** MCP config at `~/Library/Application Support/Code/User/mcp.json` (macOS):
45
+
46
+ ```json
47
+ {
48
+ "servers": {
49
+ "birdeatsbug": {
50
+ "type": "stdio",
51
+ "command": "npx",
52
+ "args": ["-y", "birdeatsbug-mcp-server"]
53
+ }
54
+ }
55
+ }
56
+ ```
57
+
58
+ Or add to a workspace's `.vscode/mcp.json`:
59
+
60
+ ```json
61
+ {
62
+ "servers": {
63
+ "birdeatsbug": {
64
+ "type": "stdio",
65
+ "command": "npx",
66
+ "args": ["-y", "birdeatsbug-mcp-server"]
67
+ }
68
+ }
69
+ }
70
+ ```
71
+
72
+ ## Claude Desktop
73
+
74
+ Edit `~/Library/Application Support/Claude/claude_desktop_config.json`:
75
+
76
+ ```json
77
+ {
78
+ "mcpServers": {
79
+ "birdeatsbug": {
80
+ "command": "npx",
81
+ "args": ["-y", "birdeatsbug-mcp-server"]
82
+ }
83
+ }
84
+ }
85
+ ```
86
+
87
+ Restart Claude Desktop after saving.
88
+
89
+ ---
90
+
91
+ ## Usage
92
+
93
+ Each tool accepts a `url` pointing to any `app.birdeatsbug.com/...` report URL.
94
+
95
+ **Quick start — full analysis:**
96
+ > "Use generate_birdeatsbug_report on https://app.birdeatsbug.com/sessions/YOUR_SESSION_ID"
97
+
98
+ **Video frame analysis with focus on a specific moment:**
99
+ > "Use get_birdeatsbug_video_frames on https://app.birdeatsbug.com/sessions/YOUR_SESSION_ID with focusTimeSec 8"
100
+
101
+ ### `generate_birdeatsbug_report` parameters
102
+
103
+ | Parameter | Default | Description |
104
+ |---|---|---|
105
+ | `url` | required | BirdeatsBug report URL |
106
+ | `frameCount` | 16 | Number of video frames to extract (max 30) |
107
+ | `focusTimeSec` | auto | Timestamp (seconds) to cluster frames around. Auto-detected from first error if omitted |
108
+ | `focusWindowSec` | 4 | Window width (seconds) around the focus timestamp |
109
+ | `logLimit` | 100 | Max console/network log entries to include |
110
+
111
+ ---
112
+
113
+ ## Private sessions
114
+
115
+ Most sessions require login. To access private reports, generate a Playwright storage state with your logged-in cookies and pass it via environment variable:
116
+
117
+ ```bash
118
+ BIRDEATSBUG_STORAGE_STATE=/path/to/storageState.json npx birdeatsbug-mcp-server
119
+ ```
120
+
121
+ Or in your MCP config:
122
+
123
+ ```json
124
+ {
125
+ "servers": {
126
+ "birdeatsbug": {
127
+ "type": "stdio",
128
+ "command": "npx",
129
+ "args": ["-y", "birdeatsbug-mcp-server"],
130
+ "env": {
131
+ "BIRDEATSBUG_STORAGE_STATE": "/path/to/storageState.json"
132
+ }
133
+ }
134
+ }
135
+ }
136
+ ```
137
+
138
+ ---
139
+
140
+ ## Requirements
141
+
142
+ - Node.js >= 18
143
+ - Chromium (installed automatically via `playwright install chromium`)
144
+ - ffmpeg (bundled via `ffmpeg-static` — no system install needed)
145
+
146
+ ## License
147
+
148
+ MIT
package/package.json ADDED
@@ -0,0 +1,46 @@
1
+ {
2
+ "name": "birdeatsbug-mcp-server",
3
+ "version": "0.1.0",
4
+ "description": "MCP server for BirdeatsBug — extracts console logs, network logs, and video frames from bug reports for AI-powered root-cause analysis",
5
+ "type": "module",
6
+ "main": "src/index.js",
7
+ "bin": {
8
+ "birdeatsbug-mcp-server": "src/index.js"
9
+ },
10
+ "scripts": {
11
+ "start": "node src/index.js",
12
+ "dev": "node --watch src/index.js",
13
+ "postinstall": "playwright install chromium --with-deps 2>/dev/null || playwright install chromium"
14
+ },
15
+ "keywords": [
16
+ "mcp",
17
+ "model-context-protocol",
18
+ "birdeatsbug",
19
+ "bug-report",
20
+ "debugging",
21
+ "ai",
22
+ "playwright",
23
+ "video-frames",
24
+ "console-logs",
25
+ "network-logs"
26
+ ],
27
+ "author": "BhavikaTibrewal",
28
+ "license": "MIT",
29
+ "repository": {
30
+ "type": "git",
31
+ "url": "https://github.com/BhavikaTibrewal/birdeatsbug-mcp-server.git"
32
+ },
33
+ "homepage": "https://github.com/BhavikaTibrewal/birdeatsbug-mcp-server#readme",
34
+ "bugs": {
35
+ "url": "https://github.com/BhavikaTibrewal/birdeatsbug-mcp-server/issues"
36
+ },
37
+ "engines": {
38
+ "node": ">=18"
39
+ },
40
+ "dependencies": {
41
+ "@modelcontextprotocol/sdk": "^1.17.5",
42
+ "ffmpeg-static": "^5.3.0",
43
+ "playwright": "^1.52.0",
44
+ "zod": "^3.24.1"
45
+ }
46
+ }
package/src/index.js ADDED
@@ -0,0 +1,1100 @@
1
+ #!/usr/bin/env node
2
+ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
3
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
+ import {
5
+ CallToolRequestSchema,
6
+ ListToolsRequestSchema,
7
+ } from "@modelcontextprotocol/sdk/types.js";
8
+ import { execFile } from "child_process";
9
+ import { mkdtemp, readdir, readFile, rm, writeFile } from "fs/promises";
10
+ import { createRequire } from "module";
11
+ import { tmpdir } from "os";
12
+ import { join } from "path";
13
+ import { chromium } from "playwright";
14
+ import { promisify } from "util";
15
+ import { z } from "zod";
16
+
17
+ const require = createRequire(import.meta.url);
18
+ const ffmpegPath = require("ffmpeg-static");
19
+ const execFileAsync = promisify(execFile);
20
+
21
+ const URLSchema = z.object({
22
+ url: z
23
+ .string()
24
+ .url()
25
+ .refine((value) => value.includes("birdeatsbug.com"), {
26
+ message: "Must be a BirdeatsBug URL",
27
+ }),
28
+ });
29
+
30
+ const LimitedURLSchema = URLSchema.extend({
31
+ limit: z.number().int().positive().max(2000).optional(),
32
+ });
33
+
34
+ const VideoFramesSchema = URLSchema.extend({
35
+ frameCount: z.number().int().positive().max(30).optional().default(16),
36
+ focusTimeSec: z.number().nonnegative().optional(),
37
+ focusWindowSec: z.number().positive().optional().default(4),
38
+ });
39
+
40
+ const GenerateReportSchema = URLSchema.extend({
41
+ frameCount: z.number().int().positive().max(30).optional().default(16),
42
+ focusTimeSec: z.number().nonnegative().optional(),
43
+ focusWindowSec: z.number().positive().optional().default(4),
44
+ logLimit: z.number().int().positive().max(500).optional().default(100),
45
+ });
46
+
47
+ const STORAGE_STATE_PATH = process.env.BIRDEATSBUG_STORAGE_STATE;
48
+ const NAVIGATION_TIMEOUT_MS = 45000;
49
+ // Wait for network to go quiet after initial page load
50
+ const NETWORK_IDLE_TIMEOUT_MS = 10000;
51
+ // Hard cap after networkidle to let deferred API calls land
52
+ const POST_IDLE_WAIT_MS = 2000;
53
+
54
+ const unique = (values) => [...new Set(values.filter(Boolean))];
55
+
56
+ const walk = (node, visitor, path = []) => {
57
+ if (node === null || node === undefined) {
58
+ return;
59
+ }
60
+
61
+ visitor(node, path);
62
+
63
+ if (Array.isArray(node)) {
64
+ node.forEach((item, index) => walk(item, visitor, [...path, index]));
65
+ return;
66
+ }
67
+
68
+ if (typeof node === "object") {
69
+ Object.entries(node).forEach(([key, value]) =>
70
+ walk(value, visitor, [...path, key]),
71
+ );
72
+ }
73
+ };
74
+
75
+ const likelyConsoleEntry = (entry) => {
76
+ if (typeof entry === "string") {
77
+ return true;
78
+ }
79
+
80
+ if (!entry || typeof entry !== "object") {
81
+ return false;
82
+ }
83
+
84
+ const keys = Object.keys(entry).map((key) => key.toLowerCase());
85
+ return keys.some((key) =>
86
+ [
87
+ "message",
88
+ "level",
89
+ "severity",
90
+ "stack",
91
+ "console",
92
+ "text",
93
+ "warn",
94
+ "error",
95
+ "log",
96
+ ].some((token) => key.includes(token)),
97
+ );
98
+ };
99
+
100
+ const likelyNetworkEntry = (entry) => {
101
+ if (!entry || typeof entry !== "object") {
102
+ return false;
103
+ }
104
+
105
+ const keys = Object.keys(entry).map((key) => key.toLowerCase());
106
+ const keyScore = [
107
+ "request",
108
+ "response",
109
+ "status",
110
+ "url",
111
+ "method",
112
+ "headers",
113
+ "network",
114
+ ].filter((token) => keys.some((key) => key.includes(token))).length;
115
+
116
+ return keyScore >= 2;
117
+ };
118
+
119
+ const normalizeConsole = (entry) => {
120
+ if (typeof entry === "string") {
121
+ return { level: "info", message: entry, timestamp: null, raw: entry };
122
+ }
123
+
124
+ if (!entry || typeof entry !== "object") {
125
+ return {
126
+ level: "info",
127
+ message: String(entry),
128
+ timestamp: null,
129
+ raw: entry,
130
+ };
131
+ }
132
+
133
+ return {
134
+ level: entry.level || entry.type || entry.severity || "info",
135
+ message:
136
+ entry.message ||
137
+ entry.text ||
138
+ entry.msg ||
139
+ entry.body ||
140
+ (typeof entry.error === "string" ? entry.error : JSON.stringify(entry)),
141
+ timestamp: entry.timestamp || entry.time || entry.createdAt || null,
142
+ source: entry.source || entry.origin || null,
143
+ raw: entry,
144
+ };
145
+ };
146
+
147
+ const normalizeNetwork = (entry) => {
148
+ if (!entry || typeof entry !== "object") {
149
+ return {
150
+ method: null,
151
+ url: String(entry),
152
+ status: null,
153
+ durationMs: null,
154
+ timestamp: null,
155
+ raw: entry,
156
+ };
157
+ }
158
+
159
+ const request = entry.request || {};
160
+ const response = entry.response || {};
161
+
162
+ return {
163
+ method: entry.method || request.method || null,
164
+ url: entry.url || request.url || entry.endpoint || null,
165
+ status: entry.status || response.status || null,
166
+ durationMs:
167
+ entry.duration || entry.durationMs || entry.timing?.total || null,
168
+ timestamp: entry.timestamp || entry.time || entry.startedDateTime || null,
169
+ raw: entry,
170
+ };
171
+ };
172
+
173
+ const extractVideoUrls = ({ apiPayloads, documentText }) => {
174
+ const urls = [];
175
+ const regex = /https?:\/\/[^\s"'<>]+\.(?:mp4|m3u8|webm)(?:\?[^\s"'<>]*)?/gi;
176
+
177
+ for (const payload of apiPayloads) {
178
+ walk(payload, (value, path) => {
179
+ if (typeof value === "string" && regex.test(value)) {
180
+ urls.push(value);
181
+ regex.lastIndex = 0;
182
+ return;
183
+ }
184
+
185
+ if (typeof value === "string" && /https?:\/\//i.test(value)) {
186
+ const parentPath = path.map(String).join(".").toLowerCase();
187
+ if (
188
+ (parentPath.includes("video") || parentPath.includes("record")) &&
189
+ /https?:\/\//i.test(value)
190
+ ) {
191
+ urls.push(value);
192
+ }
193
+ }
194
+ });
195
+ }
196
+
197
+ const textMatches = documentText.match(regex) || [];
198
+ urls.push(...textMatches);
199
+
200
+ return unique(urls);
201
+ };
202
+
203
+ const extractConsoleLogs = ({ apiPayloads, documentTextLines }) => {
204
+ const entries = [];
205
+
206
+ for (const payload of apiPayloads) {
207
+ walk(payload, (value, path) => {
208
+ if (!likelyConsoleEntry(value)) {
209
+ return;
210
+ }
211
+
212
+ const pathString = path.map(String).join(".").toLowerCase();
213
+ if (pathString.includes("network")) {
214
+ return;
215
+ }
216
+
217
+ entries.push(normalizeConsole(value));
218
+ });
219
+ }
220
+
221
+ const domFallback = documentTextLines
222
+ .filter(
223
+ (line) =>
224
+ /(^|\s)(log|warning|warn|error|info)(\s|:)/i.test(line) ||
225
+ line.includes("(in promise)"),
226
+ )
227
+ .map((line) => normalizeConsole(line));
228
+
229
+ return [...entries, ...domFallback].slice(0, 5000);
230
+ };
231
+
232
+ const extractNetworkLogs = ({ apiPayloads, documentTextLines }) => {
233
+ const entries = [];
234
+
235
+ for (const payload of apiPayloads) {
236
+ walk(payload, (value) => {
237
+ if (!likelyNetworkEntry(value)) {
238
+ return;
239
+ }
240
+
241
+ const normalized = normalizeNetwork(value);
242
+ if (normalized.url || normalized.status) {
243
+ entries.push(normalized);
244
+ }
245
+ });
246
+ }
247
+
248
+ const domFallback = documentTextLines
249
+ .filter((line) => /\b(GET|POST|PUT|PATCH|DELETE)\s+https?:\/\//i.test(line))
250
+ .map((line) => {
251
+ const match = line.match(
252
+ /\b(GET|POST|PUT|PATCH|DELETE)\s+(https?:\/\/\S+)(?:\s+(\d{3}))?/i,
253
+ );
254
+ return {
255
+ method: match?.[1] || null,
256
+ url: match?.[2] || null,
257
+ status: match?.[3] ? Number(match[3]) : null,
258
+ durationMs: null,
259
+ timestamp: null,
260
+ raw: line,
261
+ };
262
+ });
263
+
264
+ return [...entries, ...domFallback].slice(0, 5000);
265
+ };
266
+
267
+ // Fetch HTTP metadata (size, type, duration header, etc.) for each video URL
268
+ // using a HEAD request so we never download the full binary.
269
+ const fetchVideoMetadata = async (urls) => {
270
+ return Promise.all(
271
+ urls.map(async (url) => {
272
+ try {
273
+ const res = await fetch(url, { method: "HEAD" });
274
+ const headers = Object.fromEntries(res.headers.entries());
275
+ return {
276
+ url,
277
+ ok: res.ok,
278
+ status: res.status,
279
+ contentType: headers["content-type"] || null,
280
+ contentLength: headers["content-length"]
281
+ ? Number(headers["content-length"])
282
+ : null,
283
+ contentLengthHuman: headers["content-length"]
284
+ ? `${(Number(headers["content-length"]) / 1024 / 1024).toFixed(2)} MB`
285
+ : null,
286
+ lastModified: headers["last-modified"] || null,
287
+ etag: headers["etag"] || null,
288
+ // Some CDNs expose X-Playback-Session-Id or x-goog-* headers
289
+ extraHeaders: Object.fromEntries(
290
+ Object.entries(headers).filter(
291
+ ([k]) =>
292
+ k.startsWith("x-") ||
293
+ k.startsWith("cf-") ||
294
+ k.startsWith("x-goog"),
295
+ ),
296
+ ),
297
+ };
298
+ } catch (err) {
299
+ return { url, ok: false, error: err.message };
300
+ }
301
+ }),
302
+ );
303
+ };
304
+
305
+ // Capture any JSON-returning network response from the page.
306
+ // Previously this was restricted to specific birdeatsbug.com subdomains which
307
+ // caused empty results when the app fetched data from other origins (e.g.
308
+ // Supabase, CDN presigned URLs, etc.).
309
+ const IS_BIRDEATSBUG_API = (_responseUrl) => true;
310
+
311
+ const collectRuntimeData = async (url) => {
312
+ const browser = await chromium.launch({ headless: true });
313
+ const context = await browser.newContext(
314
+ STORAGE_STATE_PATH ? { storageState: STORAGE_STATE_PATH } : {},
315
+ );
316
+ const page = await context.newPage();
317
+
318
+ // Collect pending body-read promises so we can await them all before extraction.
319
+ const pendingBodyReads = [];
320
+ const apiPayloads = [];
321
+ const responseMeta = [];
322
+
323
+ page.on("response", (response) => {
324
+ const responseUrl = response.url();
325
+ if (!IS_BIRDEATSBUG_API(responseUrl)) {
326
+ return;
327
+ }
328
+
329
+ const status = response.status();
330
+ const contentType = response.headers()["content-type"] || "";
331
+
332
+ const meta = { url: responseUrl, status, contentType };
333
+ responseMeta.push(meta);
334
+
335
+ if (!contentType.includes("application/json")) {
336
+ return;
337
+ }
338
+
339
+ // Capture the promise — don't await inline to avoid blocking the event loop.
340
+ const bodyPromise = response
341
+ .json()
342
+ .then((body) => {
343
+ apiPayloads.push(body);
344
+ })
345
+ .catch(() => {
346
+ // Body already consumed or not parseable — skip silently.
347
+ });
348
+
349
+ pendingBodyReads.push(bodyPromise);
350
+ });
351
+
352
+ try {
353
+ await page.goto(url, {
354
+ waitUntil: "domcontentloaded",
355
+ timeout: NAVIGATION_TIMEOUT_MS,
356
+ });
357
+
358
+ // Wait for SPA API calls to settle (networkidle = no requests for 500ms).
359
+ await page
360
+ .waitForLoadState("networkidle", { timeout: NETWORK_IDLE_TIMEOUT_MS })
361
+ .catch(() => {
362
+ // Some pages never reach strict networkidle — continue anyway.
363
+ });
364
+
365
+ // Yield event-loop ticks so any trailing deferred API calls can register.
366
+ await page.waitForTimeout(POST_IDLE_WAIT_MS);
367
+
368
+ // Now await all enqueued body reads before we extract.
369
+ await Promise.allSettled(pendingBodyReads);
370
+
371
+ const documentText = await page
372
+ .locator("body")
373
+ .innerText()
374
+ .catch(() => "");
375
+ const documentTextLines = documentText
376
+ .split("\n")
377
+ .map((line) => line.trim())
378
+ .filter(Boolean);
379
+
380
+ const privateGateDetected =
381
+ /view this private session in your bird eats bug account/i.test(
382
+ documentText,
383
+ );
384
+
385
+ // Extract report title and description from the page DOM.
386
+ // BirdeatsBug renders these into visible headings / paragraphs.
387
+ const title = await page
388
+ .locator("h1")
389
+ .first()
390
+ .innerText()
391
+ .catch(() => null);
392
+
393
+ // Grab the first substantial paragraph that looks like a user description
394
+ // (longer than 30 chars, not a navigation label).
395
+ let description = null;
396
+ try {
397
+ const paragraphs = await page.locator("p").allInnerTexts();
398
+ description = paragraphs.find((t) => t.trim().length > 30) || null;
399
+ } catch {
400
+ // ignore
401
+ }
402
+
403
+ // Also try meta og:description / twitter:description for SPAs that set them
404
+ if (!description) {
405
+ description = await page
406
+ .locator('meta[property="og:description"], meta[name="description"]')
407
+ .first()
408
+ .getAttribute("content")
409
+ .catch(() => null);
410
+ }
411
+
412
+ const videoUrls = extractVideoUrls({ apiPayloads, documentText });
413
+ const consoleLogs = extractConsoleLogs({ apiPayloads, documentTextLines });
414
+ const networkLogs = extractNetworkLogs({ apiPayloads, documentTextLines });
415
+ const videoDetails = await fetchVideoMetadata(videoUrls);
416
+
417
+ return {
418
+ sourceUrl: url,
419
+ fetchedAt: new Date().toISOString(),
420
+ privateGateDetected,
421
+ title: title || null,
422
+ description: description || null,
423
+ apiResponseCount: responseMeta.length,
424
+ apiResponses: responseMeta,
425
+ videoUrls,
426
+ videoDetails,
427
+ consoleLogs,
428
+ networkLogs,
429
+ metadata: {
430
+ videoCount: videoUrls.length,
431
+ consoleCount: consoleLogs.length,
432
+ networkCount: networkLogs.length,
433
+ usedStorageState: Boolean(STORAGE_STATE_PATH),
434
+ },
435
+ };
436
+ } finally {
437
+ await context.close();
438
+ await browser.close();
439
+ }
440
+ };
441
+
442
+ // ---------------------------------------------------------------------------
443
+ // Video frame extraction
444
+ // ---------------------------------------------------------------------------
445
+
446
+ const downloadVideoBuffer = async (videoUrl) => {
447
+ const res = await fetch(videoUrl);
448
+ if (!res.ok) {
449
+ throw new Error(
450
+ `Failed to download video: ${res.status} ${res.statusText}`,
451
+ );
452
+ }
453
+ return Buffer.from(await res.arrayBuffer());
454
+ };
455
+
456
+ /**
457
+ * Probe the video duration (seconds) by parsing ffmpeg's stderr output.
458
+ * ffmpeg always prints "Duration: HH:MM:SS.ss" to stderr regardless of -v level.
459
+ */
460
+ const probeVideoDuration = async (videoPath) => {
461
+ try {
462
+ // Run `ffmpeg -i <file>` with no output — it always exits non-zero but
463
+ // prints the container header (including Duration) to stderr at info level.
464
+ // Using -v error suppresses that line, so we omit any -v flag here.
465
+ const result = await execFileAsync(ffmpegPath, ["-i", videoPath], {
466
+ timeout: 15000,
467
+ }).catch((e) => e); // non-zero exit is expected — we only need stderr
468
+ const stderr = result.stderr || "";
469
+ const match = stderr.match(/Duration:\s*(\d+):(\d{2}):(\d{2}(?:\.\d+)?)/);
470
+ if (match) {
471
+ return (
472
+ parseInt(match[1], 10) * 3600 +
473
+ parseInt(match[2], 10) * 60 +
474
+ parseFloat(match[3])
475
+ );
476
+ }
477
+ } catch {
478
+ // ignore — callers fall back to fps=1
479
+ }
480
+ return 0;
481
+ };
482
+
483
+ /**
484
+ * Build an array of timestamps to capture.
485
+ *
486
+ * When `focusTimeSec` is provided:
487
+ * - 40 % of frames are spread evenly across the full video (context)
488
+ * - 60 % are clustered inside a window of ±(focusWindowSec/2) around the focus time
489
+ *
490
+ * Without focusTimeSec the frames are distributed uniformly.
491
+ */
492
+ const buildTimestamps = (duration, count, focusTimeSec, focusWindowSec = 4) => {
493
+ const clamp = (v, lo, hi) => Math.max(lo, Math.min(hi, v));
494
+ const round1 = (v) => Math.round(v * 10) / 10;
495
+
496
+ if (focusTimeSec == null || duration <= 0) {
497
+ // Uniform distribution, avoid exactly 0 and duration (may be a blank frame)
498
+ return Array.from({ length: count }, (_, i) =>
499
+ round1(clamp((i / (count - 1 || 1)) * duration, 0, duration - 0.1)),
500
+ );
501
+ }
502
+
503
+ const baseCount = Math.max(1, Math.round(count * 0.4));
504
+ const focusCount = count - baseCount;
505
+
506
+ // Evenly spaced baseline across full video
507
+ const base = Array.from({ length: baseCount }, (_, i) =>
508
+ round1(clamp((i / (baseCount - 1 || 1)) * duration, 0, duration - 0.1)),
509
+ );
510
+
511
+ // Dense cluster around focus
512
+ const halfWin = focusWindowSec / 2;
513
+ const winStart = clamp(focusTimeSec - halfWin, 0, duration - 0.1);
514
+ const winEnd = clamp(focusTimeSec + halfWin, 0, duration - 0.1);
515
+ const focus = Array.from({ length: focusCount }, (_, i) =>
516
+ round1(winStart + (i / (focusCount - 1 || 1)) * (winEnd - winStart)),
517
+ );
518
+
519
+ // Merge, sort, deduplicate (drop frames within 0.15 s of each other)
520
+ const sorted = [...new Set([...base, ...focus])].sort((a, b) => a - b);
521
+ const deduped = sorted.filter((t, i) => i === 0 || t - sorted[i - 1] > 0.15);
522
+
523
+ return deduped.slice(0, count);
524
+ };
525
+
526
+ /**
527
+ * Extract frames at specific timestamps from a video buffer.
528
+ * Returns an array of { frameIndex, timestampSec, base64, mimeType }.
529
+ */
530
+ const extractFrames = async (
531
+ videoBuffer,
532
+ { count = 16, focusTimeSec = undefined, focusWindowSec = 4 } = {},
533
+ ) => {
534
+ const tmpDir = await mkdtemp(join(tmpdir(), "birdeatsbug-frames-"));
535
+ const videoPath = join(tmpDir, "recording.webm");
536
+
537
+ try {
538
+ await writeFile(videoPath, videoBuffer);
539
+
540
+ const duration = await probeVideoDuration(videoPath);
541
+ const timestamps = buildTimestamps(
542
+ duration,
543
+ count,
544
+ focusTimeSec,
545
+ focusWindowSec,
546
+ );
547
+
548
+ // Extract each frame individually at its precise timestamp.
549
+ // Run in parallel for speed; ffmpeg -ss before -i is fast (keyframe seek).
550
+ await Promise.all(
551
+ timestamps.map((ts, i) => {
552
+ const outPath = join(
553
+ tmpDir,
554
+ `frame-${String(i + 1).padStart(3, "0")}.jpg`,
555
+ );
556
+ return execFileAsync(ffmpegPath, [
557
+ "-ss",
558
+ String(ts),
559
+ "-i",
560
+ videoPath,
561
+ "-vframes",
562
+ "1",
563
+ "-q:v",
564
+ "3",
565
+ "-vf",
566
+ "scale=1280:-2",
567
+ outPath,
568
+ "-y",
569
+ ]);
570
+ }),
571
+ );
572
+
573
+ const files = (await readdir(tmpDir))
574
+ .filter((f) => f.startsWith("frame-") && f.endsWith(".jpg"))
575
+ .sort();
576
+
577
+ return await Promise.all(
578
+ files.map(async (file, idx) => ({
579
+ frameIndex: idx + 1,
580
+ timestampSec: timestamps[idx] ?? null,
581
+ base64: (await readFile(join(tmpDir, file))).toString("base64"),
582
+ mimeType: "image/jpeg",
583
+ })),
584
+ );
585
+ } finally {
586
+ await rm(tmpDir, { recursive: true, force: true });
587
+ }
588
+ };
589
+
590
+ /**
591
+ * Merges console errors and network failures into a single chronological
592
+ * timeline, sorted by timestamp. Entries without a timestamp are appended at
593
+ * the end in original order.
594
+ *
595
+ * Each event: { time, kind, level?, method?, url?, status?, message?, summary }
596
+ */
597
+ const buildTimeline = (consoleErrors, networkFailures) => {
598
+ const toMs = (ts) => {
599
+ if (ts == null) return null;
600
+ const n = Number(ts);
601
+ if (!Number.isNaN(n)) return n;
602
+ const d = Date.parse(ts);
603
+ return Number.isNaN(d) ? null : d;
604
+ };
605
+
606
+ const consoleEvents = consoleErrors.map((e) => ({
607
+ time: toMs(e.timestamp),
608
+ kind: "console",
609
+ level: e.level,
610
+ message: String(e.message ?? "").slice(0, 200),
611
+ summary: `[${e.level?.toUpperCase() ?? "ERROR"}] ${String(e.message ?? "").slice(0, 120)}`,
612
+ }));
613
+
614
+ const networkEvents = networkFailures.map((r) => ({
615
+ time: toMs(r.timestamp),
616
+ kind: "network",
617
+ method: r.method,
618
+ url: r.url,
619
+ status: r.status,
620
+ durationMs: r.durationMs,
621
+ summary: `${r.method ?? "?"} ${r.url} → ${r.status}`,
622
+ }));
623
+
624
+ const all = [...consoleEvents, ...networkEvents];
625
+
626
+ // Sort: timestamped events first (ascending), then un-timestamped
627
+ all.sort((a, b) => {
628
+ if (a.time != null && b.time != null) return a.time - b.time;
629
+ if (a.time != null) return -1;
630
+ if (b.time != null) return 1;
631
+ return 0;
632
+ });
633
+
634
+ return all;
635
+ };
636
+
637
+ /**
638
+ * Renders the merged timeline as a markdown table.
639
+ */
640
+ const buildTimelineTable = (timeline) => {
641
+ if (timeline.length === 0) return "_No errors or network failures recorded._";
642
+
643
+ const header =
644
+ `| # | Time | Kind | Details |\n` + `|---|------|------|---------|`;
645
+
646
+ const rows = timeline
647
+ .map((event, i) => {
648
+ const time =
649
+ event.time != null
650
+ ? new Date(event.time).toISOString().replace("T", " ").slice(0, 23)
651
+ : "—";
652
+ const kind = event.kind === "console" ? "🖥 console" : "🌐 network";
653
+ const details =
654
+ event.kind === "console"
655
+ ? `**[${event.level?.toUpperCase() ?? "ERROR"}]** ${event.message?.slice(0, 120) ?? ""}`
656
+ : `\`${event.method ?? "?"} ${String(event.url ?? "").slice(0, 80)}\` → **${event.status}**${event.durationMs != null ? ` (${event.durationMs}ms)` : ""}`;
657
+ return `| ${i + 1} | ${time} | ${kind} | ${details} |`;
658
+ })
659
+ .join("\n");
660
+
661
+ return `${header}\n${rows}`;
662
+ };
663
+
664
+ /**
665
+ * Builds a markdown table summarising extracted frames.
666
+ * Zone is "focus" when the timestamp falls within the focus window, otherwise "context".
667
+ * The Description column is intentionally blank so the receiving AI fills it in
668
+ * by visually analysing the image that follows each row.
669
+ */
670
+ const buildFrameTable = (frames, focusTimeSec, focusWindowSec = 4) => {
671
+ const halfWin = focusWindowSec / 2;
672
+ const inFocus = (ts) =>
673
+ focusTimeSec != null &&
674
+ ts != null &&
675
+ Math.abs(ts - focusTimeSec) <= halfWin;
676
+
677
+ const header = `| Frame | Timestamp | Zone | Description |\n|-------|-----------|------|-------------|`;
678
+ const rows = frames
679
+ .map(({ frameIndex, timestampSec }) => {
680
+ const ts = timestampSec != null ? `${timestampSec}s` : "—";
681
+ const zone =
682
+ focusTimeSec != null
683
+ ? inFocus(timestampSec)
684
+ ? "🔍 focus"
685
+ : "context"
686
+ : "—";
687
+ return `| ${frameIndex} | ${ts} | ${zone} | |`;
688
+ })
689
+ .join("\n");
690
+ return `${header}\n${rows}`;
691
+ };
692
+
693
+ const asTextContent = (obj) => ({
694
+ content: [
695
+ {
696
+ type: "text",
697
+ text: JSON.stringify(obj, null, 2),
698
+ },
699
+ ],
700
+ });
701
+
702
+ const server = new Server(
703
+ {
704
+ name: "birdeatsbug-live-mcp",
705
+ version: "0.1.0",
706
+ },
707
+ {
708
+ capabilities: {
709
+ tools: {},
710
+ },
711
+ },
712
+ );
713
+
714
+ server.setRequestHandler(ListToolsRequestSchema, async () => ({
715
+ tools: [
716
+ {
717
+ name: "inspect_birdeatsbug_report",
718
+ description:
719
+ "Fetch report page live and return extraction metadata, access state, and discovered artifact counts.",
720
+ inputSchema: {
721
+ type: "object",
722
+ properties: {
723
+ url: { type: "string", description: "BirdeatsBug report URL" },
724
+ },
725
+ required: ["url"],
726
+ },
727
+ },
728
+ {
729
+ name: "get_birdeatsbug_console_logs",
730
+ description: "Fetch console logs live from a BirdeatsBug report URL.",
731
+ inputSchema: {
732
+ type: "object",
733
+ properties: {
734
+ url: { type: "string", description: "BirdeatsBug report URL" },
735
+ limit: { type: "number", description: "Optional max logs to return" },
736
+ },
737
+ required: ["url"],
738
+ },
739
+ },
740
+ {
741
+ name: "get_birdeatsbug_network_logs",
742
+ description: "Fetch network logs live from a BirdeatsBug report URL.",
743
+ inputSchema: {
744
+ type: "object",
745
+ properties: {
746
+ url: { type: "string", description: "BirdeatsBug report URL" },
747
+ limit: {
748
+ type: "number",
749
+ description: "Optional max requests to return",
750
+ },
751
+ },
752
+ required: ["url"],
753
+ },
754
+ },
755
+ {
756
+ name: "get_birdeatsbug_video_frames",
757
+ description:
758
+ "Download the bug recording and extract frames as images for AI visual analysis. Extracts evenly-spaced frames by default. When focusTimeSec is provided, 60% of frames cluster around that timestamp (±focusWindowSec/2) and 40% spread across the full video for context — ideal for zooming in on the moment a bug occurred.",
759
+ inputSchema: {
760
+ type: "object",
761
+ properties: {
762
+ url: { type: "string", description: "BirdeatsBug report URL" },
763
+ frameCount: {
764
+ type: "number",
765
+ description: "Number of frames to extract (1-30, default 16)",
766
+ },
767
+ focusTimeSec: {
768
+ type: "number",
769
+ description:
770
+ "Timestamp in seconds to cluster frames around (e.g. 6 to focus on the 6s mark)",
771
+ },
772
+ focusWindowSec: {
773
+ type: "number",
774
+ description:
775
+ "Width in seconds of the focus window around focusTimeSec (default 4)",
776
+ },
777
+ },
778
+ required: ["url"],
779
+ },
780
+ },
781
+ {
782
+ name: "generate_birdeatsbug_report",
783
+ description:
784
+ "Generate a comprehensive bug report from a BirdeatsBug URL. Fetches the report title, description, console logs, network logs (errors/failures highlighted), and video frames in a single pass, then returns all data together so an AI can produce a detailed root-cause analysis.",
785
+ inputSchema: {
786
+ type: "object",
787
+ properties: {
788
+ url: { type: "string", description: "BirdeatsBug report URL" },
789
+ frameCount: {
790
+ type: "number",
791
+ description: "Number of video frames to extract (1-30, default 16)",
792
+ },
793
+ focusTimeSec: {
794
+ type: "number",
795
+ description:
796
+ "Timestamp in seconds to cluster frames around for closer inspection of the bug moment",
797
+ },
798
+ focusWindowSec: {
799
+ type: "number",
800
+ description:
801
+ "Width in seconds of the focus window around focusTimeSec (default 4)",
802
+ },
803
+ logLimit: {
804
+ type: "number",
805
+ description:
806
+ "Max console/network log entries to include (default 100)",
807
+ },
808
+ },
809
+ required: ["url"],
810
+ },
811
+ },
812
+ ],
813
+ }));
814
+
815
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
816
+ const { name, arguments: args = {} } = request.params;
817
+
818
+ if (name === "inspect_birdeatsbug_report") {
819
+ const { url } = URLSchema.parse(args);
820
+ const data = await collectRuntimeData(url);
821
+
822
+ return asTextContent({
823
+ sourceUrl: data.sourceUrl,
824
+ fetchedAt: data.fetchedAt,
825
+ privateGateDetected: data.privateGateDetected,
826
+ metadata: data.metadata,
827
+ apiResponseCount: data.apiResponseCount,
828
+ apiResponsesSample: data.apiResponses.slice(0, 20),
829
+ });
830
+ }
831
+
832
+ if (name === "get_birdeatsbug_console_logs") {
833
+ const { url, limit } = LimitedURLSchema.parse(args);
834
+ const data = await collectRuntimeData(url);
835
+
836
+ return asTextContent({
837
+ sourceUrl: data.sourceUrl,
838
+ fetchedAt: data.fetchedAt,
839
+ privateGateDetected: data.privateGateDetected,
840
+ count: data.consoleLogs.length,
841
+ logs:
842
+ typeof limit === "number"
843
+ ? data.consoleLogs.slice(0, limit)
844
+ : data.consoleLogs,
845
+ });
846
+ }
847
+
848
+ if (name === "get_birdeatsbug_network_logs") {
849
+ const { url, limit } = LimitedURLSchema.parse(args);
850
+ const data = await collectRuntimeData(url);
851
+
852
+ return asTextContent({
853
+ sourceUrl: data.sourceUrl,
854
+ fetchedAt: data.fetchedAt,
855
+ privateGateDetected: data.privateGateDetected,
856
+ count: data.networkLogs.length,
857
+ requests:
858
+ typeof limit === "number"
859
+ ? data.networkLogs.slice(0, limit)
860
+ : data.networkLogs,
861
+ });
862
+ }
863
+
864
+ if (name === "get_birdeatsbug_video_frames") {
865
+ const { url, frameCount, focusTimeSec, focusWindowSec } =
866
+ VideoFramesSchema.parse(args);
867
+
868
+ const data = await collectRuntimeData(url);
869
+
870
+ if (data.privateGateDetected) {
871
+ return asTextContent({
872
+ error:
873
+ "This is a private session — set BIRDEATSBUG_STORAGE_STATE to access it.",
874
+ });
875
+ }
876
+
877
+ if (data.videoUrls.length === 0) {
878
+ return asTextContent({
879
+ error: "No video URL found for this report.",
880
+ metadata: data.metadata,
881
+ });
882
+ }
883
+
884
+ const videoUrl = data.videoUrls[0];
885
+ const videoBuffer = await downloadVideoBuffer(videoUrl);
886
+ const frames = await extractFrames(videoBuffer, {
887
+ count: frameCount,
888
+ focusTimeSec,
889
+ focusWindowSec,
890
+ });
891
+
892
+ return {
893
+ content: [
894
+ {
895
+ type: "text",
896
+ text: [
897
+ `## Video Frames (${frames.length} total)`,
898
+ focusTimeSec != null
899
+ ? `**Focus:** ±${focusWindowSec / 2}s around ${focusTimeSec}s — 60% of frames cluster here, 40% spread across the full recording.`
900
+ : "Frames distributed uniformly across the full recording.",
901
+ "",
902
+ buildFrameTable(frames, focusTimeSec, focusWindowSec),
903
+ "",
904
+ "---",
905
+ "```json",
906
+ JSON.stringify(
907
+ {
908
+ sourceUrl: data.sourceUrl,
909
+ fetchedAt: data.fetchedAt,
910
+ videoUrl,
911
+ totalFramesExtracted: frames.length,
912
+ focusTimeSec: focusTimeSec ?? null,
913
+ focusWindowSec: focusTimeSec != null ? focusWindowSec : null,
914
+ },
915
+ null,
916
+ 2,
917
+ ),
918
+ "```",
919
+ ].join("\n"),
920
+ },
921
+ ...frames.map(({ frameIndex, timestampSec, base64, mimeType }) => ({
922
+ type: "image",
923
+ data: base64,
924
+ mimeType,
925
+ description:
926
+ timestampSec !== null
927
+ ? `Frame ${frameIndex} @ ${timestampSec}s`
928
+ : `Frame ${frameIndex}`,
929
+ })),
930
+ ],
931
+ };
932
+ }
933
+
934
+ if (name === "generate_birdeatsbug_report") {
935
+ const { url, frameCount, focusTimeSec, focusWindowSec, logLimit } =
936
+ GenerateReportSchema.parse(args);
937
+
938
+ const data = await collectRuntimeData(url);
939
+
940
+ if (data.privateGateDetected) {
941
+ return asTextContent({
942
+ error:
943
+ "This is a private session — set BIRDEATSBUG_STORAGE_STATE to access it.",
944
+ });
945
+ }
946
+
947
+ const consoleLogs = data.consoleLogs.slice(0, logLimit);
948
+ const networkLogs = data.networkLogs.slice(0, logLimit);
949
+
950
+ const consoleErrors = consoleLogs.filter((e) =>
951
+ /error|exception|uncaught|fail/i.test(
952
+ String(e.level) + " " + String(e.message),
953
+ ),
954
+ );
955
+ const networkFailures = networkLogs.filter(
956
+ (r) => r.status && (r.status >= 400 || r.status === 0),
957
+ );
958
+
959
+ // Build the merged event timeline first so we can use it for auto-focus.
960
+ const timeline = buildTimeline(consoleErrors, networkFailures);
961
+
962
+ // Auto-detect focusTimeSec from the first timestamped error when not provided.
963
+ // Timeline entries whose .time is an epoch-ms value are converted to seconds
964
+ // relative to the first event in the timeline.
965
+ let resolvedFocusTimeSec = focusTimeSec;
966
+ if (resolvedFocusTimeSec == null && timeline.length > 0) {
967
+ const firstWithTime = timeline.find((e) => e.time != null);
968
+ const firstConsoleWithTime = timeline.find(
969
+ (e) => e.kind === "console" && e.time != null,
970
+ );
971
+ const anchor = firstWithTime?.time ?? null;
972
+ const errorTime = firstConsoleWithTime?.time ?? null;
973
+ if (anchor != null && errorTime != null) {
974
+ // Convert absolute ms timestamps to relative seconds from session start.
975
+ resolvedFocusTimeSec = Math.max(0, (errorTime - anchor) / 1000);
976
+ }
977
+ }
978
+
979
+ let frames = [];
980
+ let videoUrl = null;
981
+ if (data.videoUrls.length > 0) {
982
+ videoUrl = data.videoUrls[0];
983
+ const videoBuffer = await downloadVideoBuffer(videoUrl);
984
+ frames = await extractFrames(videoBuffer, {
985
+ count: frameCount,
986
+ focusTimeSec: resolvedFocusTimeSec,
987
+ focusWindowSec,
988
+ });
989
+ }
990
+
991
+ const reportMeta = {
992
+ sourceUrl: data.sourceUrl,
993
+ fetchedAt: data.fetchedAt,
994
+ title: data.title,
995
+ description: data.description,
996
+ metadata: data.metadata,
997
+ frameStrategy:
998
+ resolvedFocusTimeSec != null
999
+ ? `60% of frames within ±${focusWindowSec / 2}s of ${resolvedFocusTimeSec}s${focusTimeSec == null ? " (auto-detected from first error)" : ""}; 40% spread across full video`
1000
+ : "Uniform distribution across full recording",
1001
+ summary: {
1002
+ totalConsoleLogs: data.consoleLogs.length,
1003
+ consoleErrorCount: consoleErrors.length,
1004
+ totalNetworkRequests: data.networkLogs.length,
1005
+ networkFailureCount: networkFailures.length,
1006
+ videoFramesExtracted: frames.length,
1007
+ autoDetectedFocusTimeSec:
1008
+ focusTimeSec == null ? (resolvedFocusTimeSec ?? null) : null,
1009
+ },
1010
+ eventTimeline: timeline,
1011
+ consoleErrors,
1012
+ networkFailures,
1013
+ consoleLogs,
1014
+ networkLogs,
1015
+ videoUrl,
1016
+ };
1017
+
1018
+ return {
1019
+ content: [
1020
+ {
1021
+ type: "text",
1022
+ text: [
1023
+ `## Bug Report: ${data.title || data.sourceUrl}`,
1024
+ data.description ? `> ${data.description}` : "",
1025
+ "",
1026
+ "### Video Frames",
1027
+ frames.length > 0
1028
+ ? [
1029
+ resolvedFocusTimeSec != null
1030
+ ? `**Focus:** ±${focusWindowSec / 2}s around ${resolvedFocusTimeSec}s${focusTimeSec == null ? " _(auto-detected from first error)_" : ""} — 60% cluster here, 40% spread across recording.`
1031
+ : "Frames distributed uniformly across the full recording.",
1032
+ "",
1033
+ buildFrameTable(frames, resolvedFocusTimeSec, focusWindowSec),
1034
+ ].join("\n")
1035
+ : "No video found for this report.",
1036
+ "",
1037
+ "### Summary",
1038
+ `| Metric | Count |`,
1039
+ `|--------|-------|`,
1040
+ `| Console logs | ${data.consoleLogs.length} |`,
1041
+ `| Console errors | ${consoleErrors.length} |`,
1042
+ `| Network requests | ${data.networkLogs.length} |`,
1043
+ `| Network failures | ${networkFailures.length} |`,
1044
+ `| Video frames | ${frames.length} |`,
1045
+ "",
1046
+ "### Event Timeline",
1047
+ "_Errors and network failures merged in chronological order._",
1048
+ "",
1049
+ buildTimelineTable(timeline),
1050
+ "",
1051
+ "### Console Errors",
1052
+ consoleErrors.length > 0
1053
+ ? consoleErrors
1054
+ .map((e, i) => `${i + 1}. **[${e.level}]** ${e.message}`)
1055
+ .join("\n")
1056
+ : "None",
1057
+ "",
1058
+ "### Network Failures",
1059
+ networkFailures.length > 0
1060
+ ? networkFailures
1061
+ .map(
1062
+ (r, i) =>
1063
+ `${i + 1}. \`${r.method || "?"} ${r.url}\` → **${r.status}**`,
1064
+ )
1065
+ .join("\n")
1066
+ : "None",
1067
+ "",
1068
+ "---",
1069
+ "```json",
1070
+ JSON.stringify(reportMeta, null, 2),
1071
+ "```",
1072
+ ]
1073
+ .filter((line) => line !== null && line !== undefined)
1074
+ .join("\n"),
1075
+ },
1076
+ ...frames.map(({ frameIndex, timestampSec, base64, mimeType }) => ({
1077
+ type: "image",
1078
+ data: base64,
1079
+ mimeType,
1080
+ description:
1081
+ timestampSec !== null
1082
+ ? `Frame ${frameIndex} @ ${timestampSec}s`
1083
+ : `Frame ${frameIndex}`,
1084
+ })),
1085
+ ],
1086
+ };
1087
+ }
1088
+
1089
+ throw new Error(`Unknown tool: ${name}`);
1090
+ });
1091
+
1092
+ const start = async () => {
1093
+ const transport = new StdioServerTransport();
1094
+ await server.connect(transport);
1095
+ };
1096
+
1097
+ start().catch((error) => {
1098
+ console.error("BirdeatsBug MCP server failed to start:", error);
1099
+ process.exit(1);
1100
+ });