@debugg-ai/debugg-ai-mcp 2.5.0 → 2.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -7,16 +7,15 @@
|
|
|
7
7
|
* (c) MCP-side aggregates per-target HAR slices into NetworkSummary[].
|
|
8
8
|
*
|
|
9
9
|
* The backend "Page Probe" workflow template runs:
|
|
10
|
-
* browser.setup → loop[targets](
|
|
10
|
+
* browser.setup → loop[targets](browser.navigate → browser.capture) → done
|
|
11
11
|
*
|
|
12
|
-
* Each
|
|
12
|
+
* Each browser.capture node emits per-iteration outputData with consoleSlice
|
|
13
13
|
* + harSlice windowed to that URL's load span — that's what makes per-URL
|
|
14
14
|
* networkSummary attribution accurate.
|
|
15
15
|
*/
|
|
16
16
|
import { config } from '../config/index.js';
|
|
17
17
|
import { Logger } from '../utils/logger.js';
|
|
18
18
|
import { handleExternalServiceError } from '../utils/errors.js';
|
|
19
|
-
import { imageContentBlock } from '../utils/imageUtils.js';
|
|
20
19
|
import { DebuggAIServerClient } from '../services/index.js';
|
|
21
20
|
import { TunnelProvisionError } from '../services/tunnels.js';
|
|
22
21
|
import { tunnelManager } from '../services/ngrok/tunnelManager.js';
|
|
@@ -24,7 +23,8 @@ import { probeLocalPort, probeTunnelHealth } from '../utils/localReachability.js
|
|
|
24
23
|
import { extractLocalhostPort } from '../utils/urlParser.js';
|
|
25
24
|
import { buildContext, findExistingTunnel, ensureTunnel, sanitizeResponseUrls, touchTunnelById, } from '../utils/tunnelContext.js';
|
|
26
25
|
import { getCachedTemplateUuid, invalidateTemplateCache } from '../utils/handlerCaches.js';
|
|
27
|
-
import {
|
|
26
|
+
import { reaggregateByOriginPath, mapConsoleSlice } from '../utils/harSummarizer.js';
|
|
27
|
+
import { fetchImageAsBase64, imageContentBlock } from '../utils/imageUtils.js';
|
|
28
28
|
const logger = new Logger({ module: 'probePageHandler' });
|
|
29
29
|
const TEMPLATE_KEYWORD = 'page probe';
|
|
30
30
|
export async function probePageHandler(input, context, rawProgressCallback) {
|
|
@@ -155,13 +155,35 @@ export async function probePageHandler(input, context, rawProgressCallback) {
|
|
|
155
155
|
`Ensure the backend has a template matching "${TEMPLATE_KEYWORD}" seeded and accessible.`);
|
|
156
156
|
}
|
|
157
157
|
// ── Build contextData (camelCase; axiosTransport snake_cases on the wire) ──
|
|
158
|
+
// Backend's browser.setup node (shared with App Evaluation + Raw Crawl
|
|
159
|
+
// templates) requires `target_url` (singular). The Page Probe template
|
|
160
|
+
// currently uses that node as-is — the per-target loop primitive is
|
|
161
|
+
// pending. Send BOTH:
|
|
162
|
+
// - targetUrl: first target's tunneled URL (satisfies browser.setup
|
|
163
|
+
// today; will keep working when the loop wraps it later)
|
|
164
|
+
// - targets[]: the full per-URL config for when the loop primitive
|
|
165
|
+
// ships and iterates over them
|
|
166
|
+
const firstTargetUrl = targetContexts[0]?.targetUrl ?? input.targets[0].url;
|
|
158
167
|
const contextData = {
|
|
168
|
+
targetUrl: firstTargetUrl,
|
|
159
169
|
targets: input.targets.map((t, i) => ({
|
|
160
170
|
url: targetContexts[i].targetUrl ?? t.url,
|
|
161
|
-
|
|
171
|
+
// Send null (not undefined) for optional fields so the field exists
|
|
172
|
+
// in the target object even when the caller didn't pass one. Backend
|
|
173
|
+
// placeholder resolver was fixed in commit 154e1e69 to type-preserve
|
|
174
|
+
// null in single-placeholder substitutions, so null flows through.
|
|
175
|
+
waitForSelector: t.waitForSelector ?? null,
|
|
162
176
|
waitForLoadState: t.waitForLoadState,
|
|
163
177
|
timeoutMs: t.timeoutMs,
|
|
164
178
|
})),
|
|
179
|
+
// Backend's browser.capture template binds {{include_dom}} and
|
|
180
|
+
// {{include_screenshot}} from contextData (verified 2026-04-29).
|
|
181
|
+
// The MCP-facing schema keeps `includeHtml` / `captureScreenshots`
|
|
182
|
+
// for caller ergonomics; we just map them to what the template wants.
|
|
183
|
+
includeDom: input.includeHtml,
|
|
184
|
+
includeScreenshot: input.captureScreenshots,
|
|
185
|
+
// Keep the original keys too for any downstream node that reads them
|
|
186
|
+
// (cheap to send, future-proof against template field-name churn).
|
|
165
187
|
includeHtml: input.includeHtml,
|
|
166
188
|
captureScreenshots: input.captureScreenshots,
|
|
167
189
|
};
|
|
@@ -182,7 +204,7 @@ export async function probePageHandler(input, context, rawProgressCallback) {
|
|
|
182
204
|
}
|
|
183
205
|
if (!progressCallback)
|
|
184
206
|
return;
|
|
185
|
-
const completedNodes = (exec.nodeExecutions ?? []).filter(n => n.nodeType === '
|
|
207
|
+
const completedNodes = (exec.nodeExecutions ?? []).filter(n => n.nodeType === 'browser.capture' && n.status === 'success').length;
|
|
186
208
|
if (completedNodes !== lastCompleted) {
|
|
187
209
|
lastCompleted = completedNodes;
|
|
188
210
|
await progressCallback({
|
|
@@ -195,22 +217,35 @@ export async function probePageHandler(input, context, rawProgressCallback) {
|
|
|
195
217
|
// ── Format response ────────────────────────────────────────────────────
|
|
196
218
|
const duration = Date.now() - startTime;
|
|
197
219
|
const captureNodes = (finalExecution.nodeExecutions ?? [])
|
|
198
|
-
.filter(n => n.nodeType === '
|
|
220
|
+
.filter(n => n.nodeType === 'browser.capture')
|
|
199
221
|
.sort((a, b) => a.executionOrder - b.executionOrder);
|
|
200
222
|
const results = [];
|
|
201
|
-
const screenshotBlocks = [];
|
|
202
223
|
for (let i = 0; i < input.targets.length; i++) {
|
|
203
224
|
const target = input.targets[i];
|
|
204
225
|
const node = captureNodes[i];
|
|
205
226
|
const data = node?.outputData ?? {};
|
|
227
|
+
// Backend (post-154e1e69) emits browser.capture output_data with:
|
|
228
|
+
// captured_url, status_code, title, load_time_ms,
|
|
229
|
+
// console_slice (already per-capture, in {text, level, location, timestamp} shape),
|
|
230
|
+
// network_summary (already pre-aggregated by FULL URL,
|
|
231
|
+
// in {url, count, methods[], statuses{}, resource_types[]} shape),
|
|
232
|
+
// surfer_page_uuid (reference to SurferPage row for screenshot/title/visible_text),
|
|
233
|
+
// error
|
|
234
|
+
// axiosTransport snake→camel'd at the wire, so JS-side these are
|
|
235
|
+
// capturedUrl / consoleSlice / networkSummary / surferPageUuid / etc.
|
|
236
|
+
// Re-aggregate networkSummary by origin+pathname so refetch loops
|
|
237
|
+
// collapse (preserves the original client-feedback contract).
|
|
206
238
|
const result = {
|
|
207
239
|
url: target.url, // ORIGINAL caller URL — not the tunneled rewrite
|
|
208
|
-
finalUrl: typeof data.
|
|
240
|
+
finalUrl: typeof data.capturedUrl === 'string' ? data.capturedUrl
|
|
241
|
+
: typeof data.finalUrl === 'string' ? data.finalUrl
|
|
242
|
+
: typeof data.url === 'string' ? data.url
|
|
243
|
+
: target.url,
|
|
209
244
|
statusCode: typeof data.statusCode === 'number' ? data.statusCode : 0,
|
|
210
245
|
title: typeof data.title === 'string' ? data.title : null,
|
|
211
246
|
loadTimeMs: typeof data.loadTimeMs === 'number' ? data.loadTimeMs : 0,
|
|
212
|
-
consoleErrors:
|
|
213
|
-
networkSummary:
|
|
247
|
+
consoleErrors: mapConsoleSlice(Array.isArray(data.consoleSlice) ? data.consoleSlice : []),
|
|
248
|
+
networkSummary: reaggregateByOriginPath(Array.isArray(data.networkSummary) ? data.networkSummary : []),
|
|
214
249
|
};
|
|
215
250
|
if (input.includeHtml && typeof data.html === 'string') {
|
|
216
251
|
result.html = data.html;
|
|
@@ -218,10 +253,10 @@ export async function probePageHandler(input, context, rawProgressCallback) {
|
|
|
218
253
|
if (typeof data.error === 'string' && data.error) {
|
|
219
254
|
result.error = data.error;
|
|
220
255
|
}
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
screenshotBlocks.push(imageContentBlock(data.screenshotB64, 'image/png'));
|
|
256
|
+
if (typeof data.surferPageUuid === 'string' && data.surferPageUuid) {
|
|
257
|
+
result.surferPageUuid = data.surferPageUuid;
|
|
224
258
|
}
|
|
259
|
+
results.push(result);
|
|
225
260
|
}
|
|
226
261
|
const responsePayload = {
|
|
227
262
|
executionId: executionUuid,
|
|
@@ -244,12 +279,35 @@ export async function probePageHandler(input, context, rawProgressCallback) {
|
|
|
244
279
|
}
|
|
245
280
|
}
|
|
246
281
|
logger.toolComplete('probe_page', duration);
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
282
|
+
const responseContent = [
|
|
283
|
+
{ type: 'text', text: JSON.stringify(sanitizedPayload, null, 2) },
|
|
284
|
+
];
|
|
285
|
+
// Embed screenshots when captureScreenshots is true. The backend may return
|
|
286
|
+
// screenshotB64 or a URL-keyed field on browser.capture outputData.
|
|
287
|
+
if (input.captureScreenshots) {
|
|
288
|
+
const SCREENSHOT_URL_KEYS = ['screenshotB64', 'screenshot', 'screenshotUrl', 'screenshotUri', 'finalScreenshot'];
|
|
289
|
+
for (const node of captureNodes) {
|
|
290
|
+
const data = node?.outputData ?? {};
|
|
291
|
+
if (typeof data.screenshotB64 === 'string' && data.screenshotB64) {
|
|
292
|
+
responseContent.push(imageContentBlock(data.screenshotB64, 'image/png'));
|
|
293
|
+
}
|
|
294
|
+
else {
|
|
295
|
+
let screenshotUrl = null;
|
|
296
|
+
for (const key of SCREENSHOT_URL_KEYS) {
|
|
297
|
+
if (key !== 'screenshotB64' && typeof data[key] === 'string' && data[key]) {
|
|
298
|
+
screenshotUrl = data[key];
|
|
299
|
+
break;
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
if (screenshotUrl) {
|
|
303
|
+
const img = await fetchImageAsBase64(screenshotUrl).catch(() => null);
|
|
304
|
+
if (img)
|
|
305
|
+
responseContent.push(imageContentBlock(img.data, img.mimeType));
|
|
306
|
+
}
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
return { content: responseContent };
|
|
253
311
|
}
|
|
254
312
|
catch (error) {
|
|
255
313
|
const duration = Date.now() - startTime;
|
|
@@ -13,6 +13,7 @@ import { handleExternalServiceError } from '../utils/errors.js';
|
|
|
13
13
|
import { DebuggAIServerClient } from '../services/index.js';
|
|
14
14
|
import { config } from '../config/index.js';
|
|
15
15
|
import { toPaginationParams } from '../utils/pagination.js';
|
|
16
|
+
import { fetchImageAsBase64, imageContentBlock } from '../utils/imageUtils.js';
|
|
16
17
|
const logger = new Logger({ module: 'searchExecutionsHandler' });
|
|
17
18
|
function notFound(uuid) {
|
|
18
19
|
return {
|
|
@@ -38,7 +39,53 @@ export async function searchExecutionsHandler(input, _context) {
|
|
|
38
39
|
executions: [execution],
|
|
39
40
|
};
|
|
40
41
|
logger.toolComplete('search_executions', Date.now() - start);
|
|
41
|
-
|
|
42
|
+
const content = [
|
|
43
|
+
{ type: 'text', text: JSON.stringify(payload, null, 2) },
|
|
44
|
+
];
|
|
45
|
+
const SCREENSHOT_URL_KEYS = ['finalScreenshot', 'screenshot', 'screenshotUrl', 'screenshotUri'];
|
|
46
|
+
const GIF_KEYS = ['runGif', 'gifUrl', 'gif', 'videoUrl', 'recordingUrl'];
|
|
47
|
+
const nodes = execution.nodeExecutions ?? [];
|
|
48
|
+
const subworkflowNode = nodes.find((n) => n.nodeType === 'subworkflow.run');
|
|
49
|
+
let screenshotEmbedded = false;
|
|
50
|
+
let screenshotUrl = null;
|
|
51
|
+
let gifUrl = null;
|
|
52
|
+
const screenshotB64 = subworkflowNode?.outputData?.screenshotB64;
|
|
53
|
+
if (typeof screenshotB64 === 'string' && screenshotB64) {
|
|
54
|
+
content.push(imageContentBlock(screenshotB64, 'image/png'));
|
|
55
|
+
screenshotEmbedded = true;
|
|
56
|
+
}
|
|
57
|
+
for (const node of nodes) {
|
|
58
|
+
const data = node.outputData ?? {};
|
|
59
|
+
if (!screenshotEmbedded && !screenshotUrl) {
|
|
60
|
+
for (const key of SCREENSHOT_URL_KEYS) {
|
|
61
|
+
if (typeof data[key] === 'string' && data[key]) {
|
|
62
|
+
screenshotUrl = data[key];
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
if (!gifUrl) {
|
|
68
|
+
for (const key of GIF_KEYS) {
|
|
69
|
+
if (typeof data[key] === 'string' && data[key]) {
|
|
70
|
+
gifUrl = data[key];
|
|
71
|
+
break;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
if ((screenshotEmbedded || screenshotUrl) && gifUrl)
|
|
76
|
+
break;
|
|
77
|
+
}
|
|
78
|
+
if (!screenshotEmbedded && screenshotUrl) {
|
|
79
|
+
const img = await fetchImageAsBase64(screenshotUrl).catch(() => null);
|
|
80
|
+
if (img)
|
|
81
|
+
content.push(imageContentBlock(img.data, img.mimeType));
|
|
82
|
+
}
|
|
83
|
+
if (gifUrl) {
|
|
84
|
+
const gif = await fetchImageAsBase64(gifUrl).catch(() => null);
|
|
85
|
+
if (gif)
|
|
86
|
+
content.push(imageContentBlock(gif.data, 'image/gif'));
|
|
87
|
+
}
|
|
88
|
+
return { content };
|
|
42
89
|
}
|
|
43
90
|
catch (err) {
|
|
44
91
|
if (err?.statusCode === 404 || err?.response?.status === 404)
|
|
@@ -7,6 +7,94 @@
|
|
|
7
7
|
* Pure functions — no I/O, no async — so they can be reused by the future
|
|
8
8
|
* `summarize_execution` tool.
|
|
9
9
|
*/
|
|
10
|
+
/**
|
|
11
|
+
* Re-aggregate backend's pre-grouped network_summary entries by
|
|
12
|
+
* `origin + pathname` (vs backend's full-URL key). Collapses refetch loops:
|
|
13
|
+
* 5 separate `/api/poll?n=0..4` entries become 1 entry with count: 5.
|
|
14
|
+
*
|
|
15
|
+
* Backend `browser.capture` (commit 154e1e69) emits network_summary already
|
|
16
|
+
* grouped by full URL with shape `{url, count, methods[], statuses{}, resource_types[]}`.
|
|
17
|
+
* That preserves the per-request granularity but defeats the original
|
|
18
|
+
* client #1 use case ("endpoint hit N times" refetch detection). MCP-side
|
|
19
|
+
* re-aggregation runs once over the small pre-grouped list — cheap.
|
|
20
|
+
*/
|
|
21
|
+
export function reaggregateByOriginPath(entries) {
|
|
22
|
+
if (!Array.isArray(entries))
|
|
23
|
+
return [];
|
|
24
|
+
const buckets = new Map();
|
|
25
|
+
for (const e of entries) {
|
|
26
|
+
try {
|
|
27
|
+
const url = e?.url;
|
|
28
|
+
if (typeof url !== 'string')
|
|
29
|
+
continue;
|
|
30
|
+
const parsed = new URL(url);
|
|
31
|
+
const key = `${parsed.origin}${parsed.pathname}`;
|
|
32
|
+
const count = typeof e.count === 'number' ? e.count : 0;
|
|
33
|
+
const statuses = e.statuses ?? {};
|
|
34
|
+
const existing = buckets.get(key);
|
|
35
|
+
if (existing) {
|
|
36
|
+
existing.count += count;
|
|
37
|
+
for (const [code, n] of Object.entries(statuses)) {
|
|
38
|
+
if (typeof n === 'number') {
|
|
39
|
+
existing.statuses[code] = (existing.statuses[code] ?? 0) + n;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
const out = {
|
|
45
|
+
url: key,
|
|
46
|
+
count,
|
|
47
|
+
statuses: { ...statuses },
|
|
48
|
+
totalBytes: 0, // Backend's pre-grouped shape doesn't expose response bytes; placeholder until we wire fetched-bytes.
|
|
49
|
+
};
|
|
50
|
+
buckets.set(key, out);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
catch {
|
|
54
|
+
// malformed URL — skip
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
return [...buckets.values()].sort((a, b) => b.count - a.count);
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Map backend's console_slice entry shape to MCP's ConsoleErrorEntry.
|
|
61
|
+
* Backend shape: {text, level, location: {url, line}, timestamp}
|
|
62
|
+
* MCP shape: {level, text, source?, lineNumber?, timestamp?}
|
|
63
|
+
*/
|
|
64
|
+
export function mapConsoleSlice(entries) {
|
|
65
|
+
if (!Array.isArray(entries))
|
|
66
|
+
return [];
|
|
67
|
+
const out = [];
|
|
68
|
+
for (const e of entries) {
|
|
69
|
+
if (typeof e !== 'object' || e === null)
|
|
70
|
+
continue;
|
|
71
|
+
const entry = {
|
|
72
|
+
level: typeof e.level === 'string' ? e.level : 'log',
|
|
73
|
+
text: typeof e.text === 'string' ? e.text : '',
|
|
74
|
+
};
|
|
75
|
+
const loc = e.location ?? {};
|
|
76
|
+
if (typeof loc.url === 'string' && loc.url)
|
|
77
|
+
entry.source = loc.url;
|
|
78
|
+
else if (typeof e.source === 'string' && e.source)
|
|
79
|
+
entry.source = e.source;
|
|
80
|
+
if (typeof loc.line === 'number')
|
|
81
|
+
entry.lineNumber = loc.line;
|
|
82
|
+
else if (typeof e.lineNumber === 'number')
|
|
83
|
+
entry.lineNumber = e.lineNumber;
|
|
84
|
+
// Backend timestamps are ISO strings; MCP type uses number (ms since epoch).
|
|
85
|
+
// Coerce when possible; otherwise pass through unchanged.
|
|
86
|
+
if (typeof e.timestamp === 'number') {
|
|
87
|
+
entry.timestamp = e.timestamp;
|
|
88
|
+
}
|
|
89
|
+
else if (typeof e.timestamp === 'string') {
|
|
90
|
+
const parsed = Date.parse(e.timestamp);
|
|
91
|
+
if (!isNaN(parsed))
|
|
92
|
+
entry.timestamp = parsed;
|
|
93
|
+
}
|
|
94
|
+
out.push(entry);
|
|
95
|
+
}
|
|
96
|
+
return out;
|
|
97
|
+
}
|
|
10
98
|
/**
|
|
11
99
|
* Aggregate HAR `log.entries` into per-endpoint NetworkSummary[], sorted
|
|
12
100
|
* descending by request count (hottest endpoints first). Malformed entries
|