brave-real-browser-mcp-server 2.17.10 → 2.17.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,15 +4,7 @@ import { ExtensionManager } from './extension-manager.js';
4
4
  import * as path from 'path';
5
5
  import * as net from 'net';
6
6
  import { execSync, spawn } from 'child_process';
7
- import { config as dotenvConfig } from 'dotenv';
8
7
  import { BraveInstaller } from './brave-installer.js';
9
- // Load environment variables from .env file
10
- // Silence dotenv output
11
- const originalWrite = process.stdout.write;
12
- // @ts-ignore
13
- process.stdout.write = () => true;
14
- dotenvConfig();
15
- process.stdout.write = originalWrite;
16
8
  // Browser error categorization
17
9
  export var BrowserErrorType;
18
10
  (function (BrowserErrorType) {
@@ -0,0 +1,28 @@
1
+ import fs from 'fs';
2
+ import path from 'path';
3
+ import { fileURLToPath } from 'url';
4
+ const __filename = fileURLToPath(import.meta.url);
5
+ const __dirname = path.dirname(__filename);
6
+ // Log to a file in the project root (up one level from src, or relative to where it runs)
7
+ // We try to find a writable location.
8
+ const LOG_FILE = path.join(process.cwd(), 'mcp-server-debug.log');
9
+ export function logDebug(message, data) {
10
+ const timestamp = new Date().toISOString();
11
+ let logMessage = `[${timestamp}] ${message}`;
12
+ if (data) {
13
+ try {
14
+ logMessage += `\nData: ${JSON.stringify(data, null, 2)}`;
15
+ }
16
+ catch (e) {
17
+ logMessage += `\nData: [Circular or Non-Serializable]`;
18
+ }
19
+ }
20
+ logMessage += '\n' + '-'.repeat(40) + '\n';
21
+ try {
22
+ fs.appendFileSync(LOG_FILE, logMessage);
23
+ }
24
+ catch (error) {
25
+ // If we can't write to file, fallback to stderr (which is safe for MCP)
26
+ console.error(`[DEBUG FAILED] ${message}`);
27
+ }
28
+ }
@@ -280,86 +280,6 @@ ${JSON.stringify(videoData, null, 2)}
280
280
  };
281
281
  }, 'Failed to extract advanced video sources');
282
282
  }
283
- /**
284
- * Deobfuscate JavaScript - Attempt to decode obfuscated JavaScript
285
- */
286
- export async function handleDeobfuscateJS(args) {
287
- return await withErrorHandling(async () => {
288
- validateWorkflow('deobfuscate_js', {
289
- requireBrowser: true,
290
- requirePage: true,
291
- });
292
- const page = getCurrentPage();
293
- const deobfuscationResults = await page.evaluate(() => {
294
- const results = [];
295
- document.querySelectorAll('script').forEach((script, index) => {
296
- const content = script.textContent || '';
297
- if (content.length < 100)
298
- return;
299
- const analysis = {
300
- scriptIndex: index,
301
- obfuscationType: [],
302
- extractedData: {
303
- urls: [],
304
- domains: [],
305
- apiKeys: [],
306
- base64Strings: []
307
- }
308
- };
309
- // Detect obfuscation types
310
- if (content.includes('eval('))
311
- analysis.obfuscationType.push('eval');
312
- if (content.includes('atob('))
313
- analysis.obfuscationType.push('base64');
314
- if (content.match(/0x[0-9a-f]{4}/gi))
315
- analysis.obfuscationType.push('hex');
316
- if (content.match(/_0x[0-9a-f]+/gi))
317
- analysis.obfuscationType.push('identifier_obfuscation');
318
- if (content.includes('\\x'))
319
- analysis.obfuscationType.push('hex_escape');
320
- if (analysis.obfuscationType.length === 0)
321
- return;
322
- // Extract URLs
323
- const urlPattern = /https?:\/\/[^\s"'<>]+/gi;
324
- const urls = content.match(urlPattern);
325
- if (urls) {
326
- analysis.extractedData.urls = [...new Set(urls)];
327
- }
328
- // Extract base64 encoded strings
329
- const base64Pattern = /["']([A-Za-z0-9+/]{20,}={0,2})["']/g;
330
- let match;
331
- while ((match = base64Pattern.exec(content)) !== null) {
332
- try {
333
- const decoded = atob(match[1]);
334
- if (decoded.includes('http') || decoded.includes('video') || decoded.includes('.m3u8')) {
335
- analysis.extractedData.base64Strings.push({
336
- original: match[1].substring(0, 50) + '...',
337
- decoded: decoded.substring(0, 200)
338
- });
339
- }
340
- }
341
- catch (e) {
342
- // Not valid base64
343
- }
344
- }
345
- // Extract potential domains
346
- const domainPattern = /[a-z0-9][a-z0-9-]*\.(com|net|org|io|tv|online|xyz|cc)/gi;
347
- const domains = content.match(domainPattern);
348
- if (domains) {
349
- analysis.extractedData.domains = [...new Set(domains)];
350
- }
351
- results.push(analysis);
352
- });
353
- return results.filter(r => r.obfuscationType.length > 0);
354
- });
355
- return {
356
- content: [{
357
- type: 'text',
358
- text: `🔓 Deobfuscation Results:\n\nFound ${deobfuscationResults.length} obfuscated scripts\n\n${JSON.stringify(deobfuscationResults, null, 2)}`
359
- }]
360
- };
361
- }, 'Failed to deobfuscate JavaScript');
362
- }
363
283
  /**
364
284
  * Multi-Layer Redirect Tracer - Follow multiple redirect layers to find final video source
365
285
  */
@@ -0,0 +1,119 @@
1
+ // @ts-nocheck
2
+ import { getCurrentPage } from '../browser-manager.js';
3
+ import { withErrorHandling, sleep } from '../system-utils.js';
4
+ import { validateWorkflow } from '../workflow-validation.js';
5
+ /**
6
+ * Deep Analysis Tool
7
+ * Captures a comprehensive snapshot of the page including network traces, console logs, and DOM state.
8
+ */
9
+ export async function handleDeepAnalysis(args) {
10
+ return await withErrorHandling(async () => {
11
+ validateWorkflow('deep_analysis', {
12
+ requireBrowser: true,
13
+ requirePage: true,
14
+ });
15
+ const page = getCurrentPage();
16
+ const { url, duration = 5000, screenshots = true, network = true, logs = true, dom = true } = args;
17
+ // Navigate if URL provided
18
+ if (url && page.url() !== url) {
19
+ await page.goto(url, { waitUntil: 'domcontentloaded', timeout: 30000 });
20
+ }
21
+ // Storage for captured data
22
+ const capturedData = {
23
+ network: [],
24
+ console: [],
25
+ error: null
26
+ };
27
+ // Setup Listeners
28
+ const listeners = [];
29
+ if (network) {
30
+ const netHandler = (req) => {
31
+ capturedData.network.push({
32
+ type: 'request',
33
+ url: req.url(),
34
+ method: req.method(),
35
+ resource: req.resourceType(),
36
+ timestamp: Date.now()
37
+ });
38
+ };
39
+ page.on('request', netHandler);
40
+ listeners.push(() => page.off('request', netHandler));
41
+ }
42
+ if (logs) {
43
+ const logHandler = (msg) => {
44
+ capturedData.console.push({
45
+ type: msg.type(),
46
+ text: msg.text(),
47
+ timestamp: Date.now()
48
+ });
49
+ };
50
+ page.on('console', logHandler);
51
+ listeners.push(() => page.off('console', logHandler));
52
+ }
53
+ // Wait and Record
54
+ await sleep(duration);
55
+ // Cleanup Listeners
56
+ listeners.forEach(cleanup => cleanup());
57
+ // Take Snapshot
58
+ const result = {
59
+ timestamp: new Date().toISOString(),
60
+ url: page.url(),
61
+ title: await page.title(),
62
+ recordingDuration: duration,
63
+ networkRequests: capturedData.network.length,
64
+ consoleLogs: capturedData.console.length,
65
+ data: {
66
+ network: capturedData.network,
67
+ console: capturedData.console
68
+ }
69
+ };
70
+ if (dom) {
71
+ result.data.dom = await page.evaluate(() => {
72
+ // Simplified DOM snapshot
73
+ const cleanText = (text) => text?.replace(/\\s+/g, ' ').trim() || '';
74
+ return {
75
+ title: document.title,
76
+ headings: Array.from(document.querySelectorAll('h1, h2, h3')).map(h => ({ tag: h.tagName, text: cleanText(h.textContent) })),
77
+ buttons: Array.from(document.querySelectorAll('button, a.btn, input[type="submit"]')).map(b => cleanText(b.textContent)),
78
+ links: Array.from(document.querySelectorAll('a')).slice(0, 50).map(a => ({ text: cleanText(a.textContent), href: a.href })),
79
+ inputs: Array.from(document.querySelectorAll('input, textarea, select')).map(i => ({ tag: i.tagName, type: i.type, id: i.id, placeholder: i.placeholder }))
80
+ };
81
+ });
82
+ }
83
+ if (screenshots) {
84
+ result.data.screenshot = await page.screenshot({ encoding: 'base64', type: 'webp', quality: 50 });
85
+ }
86
+ const summary = `
87
+ 🔍 Deep Analysis Report
88
+ ═══════════════════════
89
+
90
+ 📍 URL: ${result.url}
91
+ âąī¸ Duration: ${duration}ms
92
+ 📅 Time: ${result.timestamp}
93
+
94
+ 📊 Statistics:
95
+ â€ĸ Network Requests: ${result.networkRequests}
96
+ â€ĸ Console Logs: ${result.consoleLogs}
97
+ ${dom ? `â€ĸ DOM Elements: ${result.data.dom.headings.length} headings, ${result.data.dom.buttons.length} buttons, ${result.data.dom.links.length} links` : ''}
98
+
99
+ ${logs && result.data.console.length > 0 ? `
100
+ 📝 Recent Console Logs (Last 5):
101
+ ${result.data.console.slice(-5).map(l => ` [${l.type}] ${l.text}`).join('\n')}
102
+ ` : ''}
103
+
104
+ ${dom ? `
105
+ đŸ—ī¸ Page Structure:
106
+ â€ĸ Headings: ${result.data.dom.headings.map(h => h.text).join(', ')}
107
+ â€ĸ Interactive: ${result.data.dom.buttons.length} buttons
108
+ ` : ''}
109
+ `;
110
+ return {
111
+ content: [
112
+ { type: 'text', text: summary },
113
+ ...(screenshots ? [{ type: 'image', data: result.data.screenshot, netType: 'image/webp' }] : [])
114
+ ],
115
+ // Return full dataset as JSON for programmatic use if needed (MCP usually just text/image)
116
+ // We embed the summary logic here.
117
+ };
118
+ }, 'Deep Analysis Failed');
119
+ }
@@ -67,66 +67,6 @@ export async function handleBatchElementScraper(args) {
67
67
  };
68
68
  }, 'Failed to batch scrape elements');
69
69
  }
70
- /**
71
- * ⤏⤭āĨ€ elements ⤕āĨ‡ attributes (href, src, data-*) collect ā¤•ā¤°ā¤¤ā¤ž ā¤šāĨˆ
72
- */
73
- export async function handleAttributeHarvester(args) {
74
- return await withErrorHandling(async () => {
75
- validateWorkflow('attribute_harvester', {
76
- requireBrowser: true,
77
- requirePage: true,
78
- });
79
- const page = getCurrentPage();
80
- const selector = args.selector;
81
- const attributes = args.attributes || [];
82
- const maxElements = args.maxElements || 100;
83
- const attributeData = await page.evaluate(({ selector, attributes, maxElements }) => {
84
- const elements = document.querySelectorAll(selector);
85
- const result = {
86
- selector,
87
- count: Math.min(elements.length, maxElements),
88
- attributes: [],
89
- };
90
- let count = 0;
91
- elements.forEach((element, index) => {
92
- if (count >= maxElements)
93
- return;
94
- const attrs = {};
95
- if (attributes.length > 0) {
96
- // Extract specific attributes
97
- attributes.forEach((attr) => {
98
- const value = element.getAttribute(attr);
99
- if (value !== null) {
100
- attrs[attr] = value;
101
- }
102
- });
103
- }
104
- else {
105
- // Extract all attributes
106
- Array.from(element.attributes).forEach((attr) => {
107
- attrs[attr.name] = attr.value;
108
- });
109
- }
110
- if (Object.keys(attrs).length > 0) {
111
- result.attributes.push({
112
- element: index,
113
- attrs,
114
- });
115
- count++;
116
- }
117
- });
118
- return result;
119
- }, { selector, attributes, maxElements });
120
- return {
121
- content: [
122
- {
123
- type: 'text',
124
- text: `✅ Harvested attributes from ${attributeData.count} elements\n\n${JSON.stringify(attributeData, null, 2)}`,
125
- },
126
- ],
127
- };
128
- }, 'Failed to harvest attributes');
129
- }
130
70
  /**
131
71
  * Internal/external links classification ⤕āĨ‡ ā¤¸ā¤žā¤Ĩ collect ā¤•ā¤°ā¤¤ā¤ž ā¤šāĨˆ
132
72
  */