@debugg-ai/debugg-ai-mcp 2.1.4 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -403,6 +403,13 @@ async function testPageChangesHandlerInner(input, context, rawProgressCallback)
403
403
  if (surferNode?.outputData) {
404
404
  responsePayload.surferOutput = sanitizeResponseUrls(surferNode.outputData, ctx);
405
405
  }
406
+ // Backend release 2026-04-25: browser_session block on execution detail
407
+ // carries presigned S3 URLs for HAR + console log + recording. Pass through
408
+ // verbatim — sanitizeResponseUrls below only strips ngrok hosts so S3 URLs
409
+ // are preserved. Resolves client-feedback items #1 (network) + #7 (console).
410
+ if (finalExecution.browserSession) {
411
+ responsePayload.browserSession = finalExecution.browserSession;
412
+ }
406
413
  logger.toolComplete('check_app_in_browser', duration);
407
414
  // NOTE (bead 0bq): the final "Complete:" progress is emitted INSIDE
408
415
  // pollExecution's onUpdate when terminal status is detected — see the
@@ -196,6 +196,12 @@ export async function triggerCrawlHandler(input, context, rawProgressCallback) {
196
196
  responsePayload.resolvedEnvironmentId = executeResponse.resolvedEnvironmentId;
197
197
  if (executeResponse.resolvedCredentialId)
198
198
  responsePayload.resolvedCredentialId = executeResponse.resolvedCredentialId;
199
+ // Backend release 2026-04-25: browser_session block on execution detail.
200
+ // Crawl runs through the same browser pipeline, so the field is populated
201
+ // here too. Pass through verbatim (presigned S3 URLs).
202
+ if (finalExecution.browserSession) {
203
+ responsePayload.browserSession = finalExecution.browserSession;
204
+ }
199
205
  // Extract crawl metrics from surfer.crawl node (absent in older graph shapes)
200
206
  const crawlNode = nodes.find(n => n.nodeType === 'surfer.crawl');
201
207
  if (crawlNode?.outputData) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@debugg-ai/debugg-ai-mcp",
3
- "version": "2.1.4",
3
+ "version": "2.2.0",
4
4
  "description": "Zero-Config, Fully AI-Managed End-to-End Testing for all code gen platforms.",
5
5
  "type": "module",
6
6
  "bin": {