@fanboynz/network-scanner 2.0.57 → 2.0.58

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,13 +9,13 @@ jobs:
9
9
  contents: write
10
10
 
11
11
  steps:
12
- - uses: actions/checkout@v4
12
+ - uses: actions/checkout@v5
13
13
  with:
14
14
  token: ${{ secrets.GITHUB_TOKEN }}
15
15
  fetch-depth: 0
16
16
 
17
17
  - name: Setup Node.js
18
- uses: actions/setup-node@v4
18
+ uses: actions/setup-node@v5
19
19
  with:
20
20
  node-version: '20'
21
21
  registry-url: 'https://registry.npmjs.org'
@@ -38,4 +38,4 @@ jobs:
38
38
  - name: Push changes
39
39
  run: git push --follow-tags
40
40
  env:
41
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
41
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
package/CLAUDE.md ADDED
@@ -0,0 +1,65 @@
1
+ # Network Scanner (NWSS)
2
+
3
+ Puppeteer-based network scanner for analyzing web traffic, generating adblock filter rules, and identifying third-party requests. Features fingerprint spoofing, Cloudflare bypass, content analysis with curl/grep, VPN/proxy routing, and multiple output formats.
4
+
5
+ ## Project Structure
6
+
7
+ - `nwss.js` — Main entry point (~4,600 lines). CLI args, URL processing, orchestration.
8
+ - `config.json` — Default scan configuration (sites, filters, options).
9
+ - `lib/` — 28 focused, single-purpose modules:
10
+ - `fingerprint.js` — Bot detection evasion (device/GPU/timezone spoofing)
11
+ - `cloudflare.js` — Cloudflare challenge detection and solving
12
+ - `browserhealth.js` — Memory management and browser lifecycle
13
+ - `interaction.js` — Human-like mouse/scroll/typing simulation
14
+ - `smart-cache.js` — Multi-layer caching with persistence
15
+ - `nettools.js` — WHOIS/dig integration
16
+ - `output.js` — Multi-format rule output (adblock, dnsmasq, unbound, pihole, etc.)
17
+ - `proxy.js` — SOCKS5/HTTP proxy support
18
+ - `wireguard_vpn.js` / `openvpn_vpn.js` — VPN routing
19
+ - `adblock.js` — Adblock filter parsing and validation
20
+ - `validate_rules.js` — Domain and rule format validation
21
+ - `colorize.js` — Console output formatting and colors
22
+ - `domain-cache.js` — Domain detection cache for performance
23
+ - `post-processing.js` — Result cleanup and deduplication
24
+ - `redirect.js`, `referrer.js`, `cdp.js`, `curl.js`, `grep.js`, `compare.js`, `compress.js`, `dry-run.js`, `browserexit.js`, `clear_sitedata.js`, `flowproxy.js`, `ignore_similar.js`, `searchstring.js`
25
+ - `.github/workflows/npm-publish.yml` — Automated npm publishing
26
+ - `nwss.1` — Man page
27
+
28
+ ## Tech Stack
29
+
30
+ - **Node.js** >=20.0.0
31
+ - **puppeteer** >=20.0.0 — Headless browser automation
32
+ - **psl** — Public Suffix List for domain parsing
33
+ - **lru-cache** — LRU cache implementation
34
+ - **p-limit** — Concurrency limiting (dynamically imported)
35
+ - **eslint** — Linting (`npm run lint`)
36
+
37
+ ## Conventions
38
+
39
+ - Store modular functionality in `./lib/` with focused, single-purpose modules
40
+ - Use `messageColors` and `formatLogMessage` from `./lib/colorize` for consistent console output
41
+ - Implement timeout protection for all Puppeteer operations using `Promise.race` patterns
42
+ - Handle browser lifecycle with comprehensive cleanup in try-finally blocks
43
+ - Validate all external tool availability before use (grep, curl, whois, dig)
44
+ - Use `forceDebug` flag for detailed logging, `silentMode` for minimal output
45
+ - Use `Object.freeze` for constant configuration objects (TIMEOUTS, CACHE_LIMITS, CONCURRENCY_LIMITS)
46
+ - Use `fastTimeout(ms)` helper instead of `node:timers/promises` for Puppeteer 22.x compatibility
47
+
48
+ ## Running
49
+
50
+ ```bash
51
+ node nwss.js # Run with default config.json
52
+ node nwss.js config-custom.json # Run with custom config
53
+ node nwss.js --validate-config # Validate configuration
54
+ node nwss.js --dry-run # Preview without network calls
55
+ node nwss.js --headful # Launch with browser GUI
56
+ ```
57
+
58
+ ## Files to Ignore
59
+
60
+ - `node_modules/**`
61
+ - `logs/**`
62
+ - `sources/**`
63
+ - `.cache/**`
64
+ - `*.log`
65
+ - `*.gz`
package/lib/adblock.js CHANGED
@@ -51,11 +51,12 @@ function parseAdblockRules(filePath, options = {}) {
51
51
  caseSensitive = false
52
52
  } = options;
53
53
 
54
- if (!fs.existsSync(filePath)) {
54
+ let fileContent;
55
+ try {
56
+ fileContent = fs.readFileSync(filePath, 'utf-8');
57
+ } catch (err) {
55
58
  throw new Error(`Adblock rules file not found: ${filePath}`);
56
59
  }
57
-
58
- const fileContent = fs.readFileSync(filePath, 'utf-8');
59
60
  const lines = fileContent.split('\n');
60
61
 
61
62
  const rules = {
@@ -5,6 +5,7 @@
5
5
 
6
6
 
7
7
  const fs = require('fs');
8
+ const path = require('path');
8
9
  const { execSync } = require('child_process');
9
10
 
10
11
  // Constants for temp file cleanup
@@ -15,20 +16,55 @@ const CHROME_TEMP_PATHS = [
15
16
  ];
16
17
 
17
18
  const CHROME_TEMP_PATTERNS = [
18
- 'com.google.Chrome.*', // Google Chrome temp files (no leading dot)
19
- '.org.chromium.Chromium.*',
20
- 'puppeteer-*'
19
+ /^\.?com\.google\.Chrome\./,
20
+ /^\.?org\.chromium\.Chromium\./,
21
+ /^puppeteer-/
21
22
  ];
22
23
 
24
+ /**
25
+ * Count and remove matching Chrome/Puppeteer temp entries from a directory using fs
26
+ * @param {string} basePath - Directory to scan
27
+ * @param {boolean} forceDebug - Whether to output debug logs
28
+ * @returns {number} Number of items cleaned
29
+ */
30
+ function cleanTempDir(basePath, forceDebug) {
31
+ let entries;
32
+ try {
33
+ entries = fs.readdirSync(basePath);
34
+ } catch {
35
+ if (forceDebug) console.log(`[debug] [temp-cleanup] Cannot read ${basePath}`);
36
+ return 0;
37
+ }
38
+
39
+ let cleaned = 0;
40
+ for (const entry of entries) {
41
+ let matched = false;
42
+ for (const re of CHROME_TEMP_PATTERNS) {
43
+ if (re.test(entry)) { matched = true; break; }
44
+ }
45
+ if (!matched) continue;
46
+
47
+ try {
48
+ fs.rmSync(path.join(basePath, entry), { recursive: true, force: true });
49
+ cleaned++;
50
+ if (forceDebug) console.log(`[debug] [temp-cleanup] Removed ${basePath}/${entry}`);
51
+ } catch (rmErr) {
52
+ if (forceDebug) console.log(`[debug] [temp-cleanup] Failed to remove ${basePath}/${entry}: ${rmErr.message}`);
53
+ }
54
+ }
55
+
56
+ return cleaned;
57
+ }
58
+
23
59
  /**
24
60
  * Clean Chrome temporary files and directories
25
61
  * @param {Object} options - Cleanup options
26
62
  * @param {boolean} options.includeSnapTemp - Whether to clean snap temp directories
27
63
  * @param {boolean} options.forceDebug - Whether to output debug logs
28
64
  * @param {boolean} options.comprehensive - Whether to perform comprehensive cleanup of all temp locations
29
- * @returns {Promise<Object>} Cleanup results
65
+ * @returns {Object} Cleanup results
30
66
  */
31
- async function cleanupChromeTempFiles(options = {}) {
67
+ function cleanupChromeTempFiles(options = {}) {
32
68
  const {
33
69
  includeSnapTemp = false,
34
70
  forceDebug = false,
@@ -36,57 +72,20 @@ async function cleanupChromeTempFiles(options = {}) {
36
72
  } = options;
37
73
 
38
74
  try {
39
-
40
- // Base cleanup commands for standard temp directories
41
- const cleanupCommands = [
42
- 'rm -rf /tmp/com.google.Chrome.* 2>/dev/null || true',
43
- 'rm -rf /tmp/.com.google.Chrome.* 2>/dev/null || true',
44
- 'rm -rf /tmp/.org.chromium.Chromium.* 2>/dev/null || true',
45
- 'rm -rf /tmp/puppeteer-* 2>/dev/null || true',
46
- 'rm -rf /dev/shm/.com.google.Chrome.* 2>/dev/null || true',
47
- 'rm -rf /dev/shm/.org.chromium.Chromium.* 2>/dev/null || true'
48
- ];
49
-
50
- // Add snap-specific cleanup if requested
51
- if (includeSnapTemp || comprehensive) {
52
- cleanupCommands.push('rm -rf /dev/shm/com.google.Chrome.* 2>/dev/null || true');
53
- cleanupCommands.push(
54
- 'rm -rf /tmp/snap-private-tmp/snap.chromium/tmp/.org.chromium.Chromium.* 2>/dev/null || true',
55
- 'rm -rf /tmp/snap-private-tmp/snap.chromium/tmp/puppeteer-* 2>/dev/null || true'
56
- );
57
- }
75
+ const paths = comprehensive || includeSnapTemp
76
+ ? CHROME_TEMP_PATHS
77
+ : CHROME_TEMP_PATHS.slice(0, 2); // /tmp and /dev/shm only
58
78
 
59
79
  let totalCleaned = 0;
60
-
61
- for (const command of cleanupCommands) {
62
- try {
63
- // Extract glob pattern and count matches before deletion
64
- const globPattern = command.match(/rm -rf ([^ ]+)/)?.[1];
65
- if (!globPattern) continue;
66
- const fileCount = parseInt(execSync(`ls -1d ${globPattern} 2>/dev/null | wc -l || echo 0`, { stdio: 'pipe' }).toString().trim()) || 0;
67
-
68
- if (fileCount > 0) {
69
- execSync(command, { stdio: 'ignore' });
70
- totalCleaned += fileCount;
71
-
72
- if (forceDebug) {
73
- console.log(`[debug] [temp-cleanup] Cleaned ${fileCount} items from ${globPattern}`);
74
- }
75
- }
76
- } catch (cmdErr) {
77
- // Ignore individual command errors but log in debug mode
78
- if (forceDebug) {
79
- console.log(`[debug] [temp-cleanup] Cleanup command failed: ${command} (${cmdErr.message})`);
80
- }
81
- }
80
+ for (const basePath of paths) {
81
+ totalCleaned += cleanTempDir(basePath, forceDebug);
82
82
  }
83
83
 
84
84
  if (forceDebug) {
85
- console.log(`[debug] [temp-cleanup] Standard cleanup completed (${totalCleaned} items)`);
85
+ console.log(`[debug] [temp-cleanup] Cleanup completed (${totalCleaned} items)`);
86
86
  }
87
-
87
+
88
88
  return { success: true, itemsCleaned: totalCleaned };
89
-
90
89
  } catch (cleanupErr) {
91
90
  if (forceDebug) {
92
91
  console.log(`[debug] [temp-cleanup] Chrome cleanup error: ${cleanupErr.message}`);
@@ -96,72 +95,38 @@ async function cleanupChromeTempFiles(options = {}) {
96
95
  }
97
96
 
98
97
  /**
99
- * Comprehensive temp file cleanup that systematically checks all known Chrome temp locations
98
+ * Comprehensive temp file cleanup that checks all known Chrome temp locations
100
99
  * @param {Object} options - Cleanup options
101
100
  * @param {boolean} options.forceDebug - Whether to output debug logs
102
101
  * @param {boolean} options.verbose - Whether to show verbose output
103
- * @returns {Promise<Object>} Cleanup results
102
+ * @returns {Object} Cleanup results
104
103
  */
105
- async function comprehensiveChromeTempCleanup(options = {}) {
104
+ function comprehensiveChromeTempCleanup(options = {}) {
106
105
  const { forceDebug = false, verbose = false } = options;
107
-
106
+
108
107
  try {
109
- let totalCleaned = 0;
110
-
111
108
  if (verbose && !forceDebug) {
112
109
  console.log(`[temp-cleanup] Scanning Chrome/Puppeteer temporary files...`);
113
110
  }
114
-
111
+
112
+ let totalCleaned = 0;
115
113
  for (const basePath of CHROME_TEMP_PATHS) {
116
- // Check if the base path exists before trying to clean it
117
- try {
118
- const pathExists = fs.existsSync(basePath);
119
-
120
- if (!pathExists) {
121
- if (forceDebug) {
122
- console.log(`[debug] [temp-cleanup] Skipping non-existent path: ${basePath}`);
123
- }
124
- continue;
125
- }
126
-
127
- for (const pattern of CHROME_TEMP_PATTERNS) {
128
- const fullPattern = `${basePath}/${pattern}`;
129
-
130
- // Count items before deletion
131
- const countCommand = `ls -1d ${fullPattern} 2>/dev/null | wc -l || echo 0`;
132
- const itemCount = parseInt(execSync(countCommand, { stdio: 'pipe' }).toString().trim()) || 0;
133
-
134
- if (itemCount > 0) {
135
- const deleteCommand = `rm -rf ${fullPattern} 2>/dev/null || true`;
136
- execSync(deleteCommand, { stdio: 'ignore' });
137
- totalCleaned += itemCount;
138
-
139
- if (forceDebug) {
140
- console.log(`[debug] [temp-cleanup] Removed ${itemCount} items matching ${fullPattern}`);
141
- }
142
- }
143
- }
144
- } catch (pathErr) {
145
- if (forceDebug) {
146
- console.log(`[debug] [temp-cleanup] Error checking path ${basePath}: ${pathErr.message}`);
147
- }
148
- }
114
+ totalCleaned += cleanTempDir(basePath, forceDebug);
149
115
  }
150
-
116
+
151
117
  if (verbose && totalCleaned > 0) {
152
- console.log(`[temp-cleanup] ? Removed ${totalCleaned} temporary file(s)/folder(s)`);
118
+ console.log(`[temp-cleanup] Removed ${totalCleaned} temporary file(s)/folder(s)`);
153
119
  } else if (verbose && totalCleaned === 0) {
154
- console.log(`[temp-cleanup] ? Clean - no remaining temporary files`);
120
+ console.log(`[temp-cleanup] Clean - no remaining temporary files`);
155
121
  } else if (forceDebug) {
156
122
  console.log(`[debug] [temp-cleanup] Comprehensive cleanup completed (${totalCleaned} items)`);
157
123
  }
158
-
124
+
159
125
  return { success: true, itemsCleaned: totalCleaned };
160
-
161
126
  } catch (err) {
162
127
  const errorMsg = `Comprehensive temp file cleanup failed: ${err.message}`;
163
128
  if (verbose) {
164
- console.warn(`[temp-cleanup] ? ${errorMsg}`);
129
+ console.warn(`[temp-cleanup] ${errorMsg}`);
165
130
  } else if (forceDebug) {
166
131
  console.log(`[debug] [temp-cleanup] ${errorMsg}`);
167
132
  }
@@ -317,7 +282,7 @@ async function forceBrowserKill(browser, forceDebug = false) {
317
282
  }
318
283
 
319
284
  // Wait for graceful termination
320
- await new Promise(resolve => setTimeout(resolve, 3000));
285
+ await new Promise(resolve => setTimeout(resolve, 1000));
321
286
 
322
287
  // Force kill any remaining processes with SIGKILL
323
288
  for (const pid of pidsToKill) {
@@ -438,11 +438,12 @@ async function performRealtimeWindowCleanup(browserInstance, threshold = REALTIM
438
438
  let closedCount = 0;
439
439
  for (const page of safePagesToClose) {
440
440
  try {
441
- // Cache both page state and URL for this iteration
442
441
  const isPageClosed = page.isClosed();
443
- const pageUrl = page.url();
444
-
445
- if (!isPageClosed) {
442
+
443
+ // Re-check processing state — may have changed since safety check
444
+ const usage = pageUsageTracker.get(page);
445
+ if (!isPageClosed && !(usage && usage.isProcessing)) {
446
+ const pageUrl = page.url();
446
447
  await page.close();
447
448
  pageCreationTracker.delete(page); // Remove from tracker
448
449
  pageUsageTracker.delete(page);
@@ -569,6 +570,16 @@ function trackPageForRealtime(page) {
569
570
  updatePageUsage(page, false); // Initialize usage tracking
570
571
  }
571
572
 
573
+ /**
574
+ * Removes a page from all tracking Maps immediately.
575
+ * Call this before page.close() to prevent stale entries during concurrent execution.
576
+ * @param {import('puppeteer').Page} page - Page to untrack
577
+ */
578
+ function untrackPage(page) {
579
+ pageCreationTracker.delete(page);
580
+ pageUsageTracker.delete(page);
581
+ }
582
+
572
583
  /**
573
584
  * Purges stale entries from tracking Maps (pages that were closed without cleanup)
574
585
  * Should be called periodically to prevent memory leaks
@@ -1225,6 +1236,7 @@ module.exports = {
1225
1236
  isBrowserHealthy,
1226
1237
  isCriticalProtocolError,
1227
1238
  updatePageUsage,
1239
+ untrackPage,
1228
1240
  cleanupPageBeforeReload,
1229
1241
  purgeStaleTrackers
1230
1242
  };
package/lib/compare.js CHANGED
@@ -9,10 +9,6 @@ const path = require('path');
9
9
  */
10
10
  function loadComparisonRules(compareFilePath, forceDebug = false) {
11
11
  try {
12
- if (!fs.existsSync(compareFilePath)) {
13
- throw new Error(`Comparison file not found: ${compareFilePath}`);
14
- }
15
-
16
12
  const content = fs.readFileSync(compareFilePath, 'utf8');
17
13
  const lines = content.split('\n')
18
14
  .map(line => line.trim())
package/lib/compress.js CHANGED
@@ -24,9 +24,7 @@ async function compressFile(filePath, removeOriginal = true) {
24
24
  const handleError = (error) => {
25
25
  // Clean up partial compressed file on error
26
26
  try {
27
- if (fs.existsSync(compressedPath)) {
28
- fs.unlinkSync(compressedPath);
29
- }
27
+ fs.unlinkSync(compressedPath);
30
28
  } catch (cleanupErr) {
31
29
  // Ignore cleanup errors
32
30
  }
@@ -69,18 +67,11 @@ async function compressMultipleFiles(filePaths, removeOriginals = true) {
69
67
 
70
68
  for (const filePath of filePaths) {
71
69
  try {
72
- if (fs.existsSync(filePath)) {
73
- const compressedPath = await compressFile(filePath, removeOriginals);
74
- results.successful.push({
75
- original: filePath,
76
- compressed: compressedPath
77
- });
78
- } else {
79
- results.failed.push({
80
- path: filePath,
81
- error: 'File does not exist'
82
- });
83
- }
70
+ const compressedPath = await compressFile(filePath, removeOriginals);
71
+ results.successful.push({
72
+ original: filePath,
73
+ compressed: compressedPath
74
+ });
84
75
  } catch (error) {
85
76
  results.failed.push({
86
77
  path: filePath,
package/lib/dry-run.js CHANGED
@@ -436,7 +436,7 @@ function writeDryRunOutput(outputFile, dryRunOutput, silentMode = false) {
436
436
  // Ensure output directory exists
437
437
  const path = require('path');
438
438
  const outputDir = path.dirname(outputFile);
439
- if (outputDir !== '.' && !fs.existsSync(outputDir)) {
439
+ if (outputDir !== '.') {
440
440
  fs.mkdirSync(outputDir, { recursive: true });
441
441
  }
442
442
 
@@ -41,8 +41,6 @@ const USER_AGENT_COLLECTIONS = Object.freeze(new Map([
41
41
  ['safari', "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.6 Safari/605.1.15"]
42
42
  ]));
43
43
 
44
- // Timezone configuration with offsets
45
-
46
44
  // GPU pool — realistic vendor/renderer combos per OS (used for WebGL spoofing)
47
45
  const GPU_POOL = {
48
46
  windows: [
@@ -86,12 +84,20 @@ function selectGpuForUserAgent(userAgentString) {
86
84
  const pool = GPU_POOL[osKey];
87
85
  return pool[Math.floor(Math.random() * pool.length)];
88
86
  }
89
- const TIMEZONE_CONFIG = {
90
- 'America/New_York': { offset: 300, abbr: 'EST' },
91
- 'America/Los_Angeles': { offset: 480, abbr: 'PST' },
92
- 'Europe/London': { offset: 0, abbr: 'GMT' },
93
- 'America/Chicago': { offset: 360, abbr: 'CST' }
94
- };
87
+
88
+ /**
89
+ * Checks if an error is a session/protocol closed error (common during page navigation)
90
+ */
91
+ function isSessionClosedError(err) {
92
+ const msg = err.message;
93
+ return msg.includes('Session closed') ||
94
+ msg.includes('addScriptToEvaluateOnNewDocument timed out') ||
95
+ msg.includes('Target closed') ||
96
+ msg.includes('Protocol error') || err.name === 'ProtocolError' ||
97
+ msg.includes('detached Frame') || msg.includes('Navigating frame was detached') ||
98
+ msg.includes('Cannot find context') ||
99
+ msg.includes('Execution context was destroyed');
100
+ }
95
101
 
96
102
  /**
97
103
  * Safely defines a property with comprehensive error handling
@@ -406,11 +412,6 @@ async function applyUserAgentSpoofing(page, siteConfig, forceDebug, currentUrl)
406
412
  }
407
413
  }
408
414
 
409
- // Add cached descriptors helper for page context
410
- const CACHED_DESCRIPTORS = {
411
- getter: (fn) => ({ get: fn, enumerable: true, configurable: true })
412
- };
413
-
414
415
  // Add monomorphic spoofing functions for page context
415
416
  function spoofNavigatorProperties(navigator, properties) {
416
417
  for (const [prop, descriptor] of Object.entries(properties)) {
@@ -1689,15 +1690,31 @@ async function applyUserAgentSpoofing(page, siteConfig, forceDebug, currentUrl)
1689
1690
  if (debugEnabled) console.log('[fingerprint] toString protection applied to all spoofed functions');
1690
1691
  }, 'Function.prototype.toString bulk masking');
1691
1692
 
1693
+ // Trigger interaction-gated scripts (GTM, Monetag etc.) on page load
1694
+ safeExecute(() => {
1695
+ function triggerInteraction() {
1696
+ setTimeout(() => {
1697
+ const x = Math.floor(Math.random() * 800) + 100;
1698
+ const y = Math.floor(Math.random() * 400) + 100;
1699
+ window.dispatchEvent(new MouseEvent('mousemove', {
1700
+ clientX: x, clientY: y, bubbles: true, cancelable: true, view: window
1701
+ }));
1702
+ window.dispatchEvent(new Event('scroll', { bubbles: true }));
1703
+ document.dispatchEvent(new KeyboardEvent('keydown', {
1704
+ key: 'Tab', code: 'Tab', bubbles: true
1705
+ }));
1706
+ }, 50);
1707
+ }
1708
+ if (document.readyState === 'loading') {
1709
+ document.addEventListener('DOMContentLoaded', triggerInteraction, { once: true });
1710
+ } else {
1711
+ triggerInteraction();
1712
+ }
1713
+ }, 'interaction-gated script trigger');
1714
+
1692
1715
  }, ua, forceDebug, selectedGpu);
1693
1716
  } catch (stealthErr) {
1694
- if (stealthErr.message.includes('Session closed') ||
1695
- stealthErr.message.includes('addScriptToEvaluateOnNewDocument timed out') ||
1696
- stealthErr.message.includes('Target closed') ||
1697
- stealthErr.message.includes('Protocol error') || stealthErr.name === 'ProtocolError' ||
1698
- stealthErr.message.includes('detached Frame') || stealthErr.message.includes('Navigating frame was detached') ||
1699
- stealthErr.message.includes('Cannot find context') ||
1700
- stealthErr.message.includes('Execution context was destroyed')) {
1717
+ if (isSessionClosedError(stealthErr)) {
1701
1718
  if (forceDebug) console.log(`[debug] Page closed during stealth injection: ${currentUrl}`);
1702
1719
  return;
1703
1720
  }
@@ -1749,13 +1766,7 @@ async function applyBraveSpoofing(page, siteConfig, forceDebug, currentUrl) {
1749
1766
  }
1750
1767
  }, forceDebug);
1751
1768
  } catch (braveErr) {
1752
- if (braveErr.message.includes('Session closed') ||
1753
- braveErr.message.includes('addScriptToEvaluateOnNewDocument timed out') ||
1754
- braveErr.message.includes('Target closed') ||
1755
- braveErr.message.includes('Protocol error') || braveErr.name === 'ProtocolError' ||
1756
- braveErr.message.includes('detached Frame') || braveErr.message.includes('Navigating frame was detached') ||
1757
- braveErr.message.includes('Cannot find context') ||
1758
- braveErr.message.includes('Execution context was destroyed')) {
1769
+ if (isSessionClosedError(braveErr)) {
1759
1770
  if (forceDebug) console.log(`[debug] Page closed during Brave injection: ${currentUrl}`);
1760
1771
  return;
1761
1772
  }
@@ -1918,13 +1929,7 @@ async function applyFingerprintProtection(page, siteConfig, forceDebug, currentU
1918
1929
 
1919
1930
  }, { spoof, debugEnabled: forceDebug });
1920
1931
  } catch (err) {
1921
- if (err.message.includes('Session closed') ||
1922
- err.message.includes('addScriptToEvaluateOnNewDocument timed out') ||
1923
- err.message.includes('Target closed') ||
1924
- err.message.includes('Protocol error') || err.name === 'ProtocolError' ||
1925
- err.message.includes('detached Frame') || err.message.includes('Navigating frame was detached') ||
1926
- err.message.includes('Cannot find context') ||
1927
- err.message.includes('Execution context was destroyed')) {
1932
+ if (isSessionClosedError(err)) {
1928
1933
  if (forceDebug) console.log(`[debug] Page closed during fingerprint injection: ${currentUrl}`);
1929
1934
  return;
1930
1935
  }
@@ -2083,9 +2088,6 @@ async function applyAllFingerprintSpoofing(page, siteConfig, forceDebug, current
2083
2088
  }
2084
2089
  }
2085
2090
 
2086
- // Legacy compatibility function - maintained for backwards compatibility
2087
-
2088
-
2089
2091
  module.exports = {
2090
2092
  generateRealisticFingerprint,
2091
2093
  getRealisticScreenResolution,
package/lib/flowproxy.js CHANGED
@@ -41,6 +41,13 @@ const FAST_TIMEOUTS = {
41
41
  * Gets module version information
42
42
  * @returns {object} Version information object
43
43
  */
44
+ // Protocols to skip — FlowProxy only protects web traffic
45
+ const SKIP_PATTERNS = [
46
+ 'about:', 'chrome:', 'chrome-extension:', 'chrome-error:', 'chrome-search:',
47
+ 'devtools:', 'edge:', 'moz-extension:', 'safari-extension:', 'webkit:',
48
+ 'data:', 'blob:', 'javascript:', 'vbscript:', 'file:', 'ftp:', 'ftps:'
49
+ ];
50
+
44
51
  function getModuleInfo() {
45
52
  return {
46
53
  version: FLOWPROXY_MODULE_VERSION,
@@ -73,15 +80,8 @@ function shouldProcessUrl(url, forceDebug = false) {
73
80
  }
74
81
 
75
82
  // Skip browser-internal and special protocol URLs
76
- // These protocols are not relevant for FlowProxy protection
77
- const skipPatterns = [
78
- 'about:', 'chrome:', 'chrome-extension:', 'chrome-error:', 'chrome-search:',
79
- 'devtools:', 'edge:', 'moz-extension:', 'safari-extension:', 'webkit:',
80
- 'data:', 'blob:', 'javascript:', 'vbscript:', 'file:', 'ftp:', 'ftps:'
81
- ];
82
-
83
83
  const urlLower = url.toLowerCase();
84
- for (const pattern of skipPatterns) {
84
+ for (const pattern of SKIP_PATTERNS) {
85
85
  if (urlLower.startsWith(pattern)) {
86
86
  if (forceDebug) {
87
87
  console.log(`[flowproxy][url-validation] Skipping ${pattern} URL: ${url.substring(0, 100)}${url.length > 100 ? '...' : ''}`);
package/lib/grep.js CHANGED
@@ -28,7 +28,7 @@ const GREP_DEFAULTS = {
28
28
  * @param {object} options - Grep options
29
29
  * @returns {Promise<object>} Object with found boolean, matchedPattern, and allMatches array
30
30
  */
31
- async function grepContent(content, searchPatterns, options = {}) {
31
+ function grepContent(content, searchPatterns, options = {}) {
32
32
  const {
33
33
  ignoreCase = true,
34
34
  wholeWord = false,
@@ -374,11 +374,14 @@ async function humanLikeMouseMove(page, fromX, fromY, toX, toY, options = {}) {
374
374
  }
375
375
 
376
376
  for (let i = 0; i <= actualSteps; i++) {
377
+ // Bail out if page closed mid-movement
378
+ try { if (page.isClosed()) return; } catch { return; }
379
+
377
380
  const progress = i / actualSteps;
378
-
381
+
379
382
  // Apply easing curve for more natural movement
380
- const easedProgress = progress < 0.5
381
- ? 2 * progress * progress
383
+ const easedProgress = progress < 0.5
384
+ ? 2 * progress * progress
382
385
  : 1 - Math.pow(-2 * progress + 2, 2) / 2;
383
386
 
384
387
  // Calculate base position
@@ -390,7 +393,7 @@ async function humanLikeMouseMove(page, fromX, fromY, toX, toY, options = {}) {
390
393
  const curveIntensity = Math.sin((i / actualSteps) * Math.PI) * curve * distance * MOUSE_MOVEMENT.CURVE_INTENSITY_RATIO;
391
394
  const perpX = -(toY - fromY) / distance;
392
395
  const perpY = (toX - fromX) / distance;
393
-
396
+
394
397
  currentX += perpX * curveIntensity;
395
398
  currentY += perpY * curveIntensity;
396
399
  }
@@ -402,7 +405,7 @@ async function humanLikeMouseMove(page, fromX, fromY, toX, toY, options = {}) {
402
405
  }
403
406
 
404
407
  await page.mouse.move(currentX, currentY);
405
-
408
+
406
409
  // Variable delay between movements
407
410
  if (i < actualSteps) {
408
411
  const delay = Math.floor(Math.random() * (maxDelay - minDelay + 1)) + minDelay;
@@ -457,14 +460,16 @@ async function simulateScrolling(page, options = {}) {
457
460
 
458
461
  try {
459
462
  for (let i = 0; i < amount; i++) {
463
+ try { if (page.isClosed()) return; } catch { return; }
464
+
460
465
  const scrollDelta = direction === 'down' ? SCROLLING.SCROLL_DELTA : -SCROLLING.SCROLL_DELTA;
461
-
466
+
462
467
  // Smooth scrolling by breaking into smaller increments
463
468
  for (let j = 0; j < smoothness; j++) {
464
469
  await page.mouse.wheel({ deltaY: scrollDelta / smoothness });
465
470
  await fastTimeout(SCROLLING.SMOOTH_INCREMENT_DELAY);
466
471
  }
467
-
472
+
468
473
  if (i < amount - 1) {
469
474
  await fastTimeout(pauseBetween);
470
475
  }
@@ -547,6 +552,8 @@ async function interactWithElements(page, options = {}) {
547
552
 
548
553
  // Very short timeout since page should already be loaded
549
554
  await page.waitForSelector('body', { timeout: 1000 });
555
+ // Re-check after async wait — page may have closed during selector wait
556
+ if (page.isClosed()) return;
550
557
  } catch (bodyWaitErr) {
551
558
  if (options.forceDebug) {
552
559
  console.log(`[interaction] Page not ready for element interaction: ${bodyWaitErr.message}`);
@@ -677,7 +684,7 @@ async function performContentClicks(page, options = {}) {
677
684
  let lastY = minY + Math.floor(Math.random() * (maxY - minY));
678
685
 
679
686
  for (let i = 0; i < clicks; i++) {
680
- if (page.isClosed()) break;
687
+ try { if (page.isClosed()) break; } catch { break; }
681
688
 
682
689
  // Random position in content zone
683
690
  const targetX = minX + Math.floor(Math.random() * (maxX - minX));
@@ -813,11 +820,6 @@ function cleanupInteractionMemory(force = false) {
813
820
  cachedViewport = null;
814
821
  lastViewportCheck = 0;
815
822
  }
816
-
817
- // Force garbage collection if available (helps with memory pressure)
818
- if (global.gc) {
819
- global.gc();
820
- }
821
823
  }
822
824
 
823
825
  /**
@@ -915,7 +917,9 @@ async function performPageInteraction(page, currentUrl, options = {}, forceDebug
915
917
  const maxY = viewport.height;
916
918
 
917
919
  if (forceDebug) {
918
- console.log(`[interaction] Starting enhanced interaction simulation for ${new URL(currentUrl).hostname} (${intensity} intensity)`);
920
+ let hostname = currentUrl;
921
+ try { hostname = new URL(currentUrl).hostname; } catch {}
922
+ console.log(`[interaction] Starting enhanced interaction simulation for ${hostname} (${intensity} intensity)`);
919
923
  }
920
924
 
921
925
  // Configure intensity settings
@@ -1010,8 +1014,11 @@ async function performPageInteraction(page, currentUrl, options = {}, forceDebug
1010
1014
  try {
1011
1015
  const bodyElement = await page.$('body');
1012
1016
  if (bodyElement) {
1013
- await page.hover('body');
1014
- await bodyElement.dispose();
1017
+ try {
1018
+ await page.hover('body');
1019
+ } finally {
1020
+ await bodyElement.dispose();
1021
+ }
1015
1022
  }
1016
1023
  } catch (hoverErr) {
1017
1024
  // Silently handle hover failures - not critical
@@ -1037,35 +1044,6 @@ async function performPageInteraction(page, currentUrl, options = {}, forceDebug
1037
1044
  }
1038
1045
  }
1039
1046
 
1040
- /**
1041
- * Performs minimal interaction for very slow or problematic pages
1042
- * Only does basic mouse movement without body validation
1043
- */
1044
- async function performMinimalInteraction(page, currentUrl, options = {}, forceDebug = false) {
1045
- try {
1046
- if (page.isClosed()) return;
1047
-
1048
- const viewport = await getCachedViewport(page);
1049
- const maxX = viewport.width;
1050
- const maxY = viewport.height;
1051
-
1052
- if (forceDebug) {
1053
- console.log(`[interaction] Performing minimal interaction for slow page: ${new URL(currentUrl).hostname}`);
1054
- }
1055
-
1056
- // Just do basic mouse movement without body-dependent operations
1057
- const startPos = generateRandomCoordinates(maxX, maxY);
1058
- const endPos = generateRandomCoordinates(maxX, maxY);
1059
-
1060
- await page.mouse.move(startPos.x, startPos.y);
1061
- await fastTimeout(200);
1062
- await humanLikeMouseMove(page, startPos.x, startPos.y, endPos.x, endPos.y);
1063
-
1064
- } catch (minimalErr) {
1065
- // Even minimal interaction failed - page is truly broken
1066
- }
1067
- }
1068
-
1069
1047
  /**
1070
1048
  * Creates an optimized interaction configuration based on site characteristics
1071
1049
  *
@@ -5,7 +5,7 @@
5
5
  //
6
6
  // NOTE: Like wireguard_vpn.js, OpenVPN modifies system-level routing.
7
7
  // When running concurrent scans, all traffic routes through the active
8
- // VPN tunnel � not just the site that requested it. For isolated
8
+ // VPN tunnel � not just the site that requested it. For isolated
9
9
  // per-site VPN with concurrency, a SOCKS proxy approach is needed.
10
10
 
11
11
  const { execSync, spawn } = require('child_process');
@@ -77,13 +77,17 @@ function hasRootPrivileges() {
77
77
  * Detect if running inside WSL
78
78
  * @returns {boolean}
79
79
  */
80
- function isWSL() {
80
+ const _isWSL = (() => {
81
81
  try {
82
82
  const release = fs.readFileSync('/proc/version', 'utf8').toLowerCase();
83
83
  return release.includes('microsoft') || release.includes('wsl');
84
84
  } catch {
85
85
  return false;
86
86
  }
87
+ })();
88
+
89
+ function isWSL() {
90
+ return _isWSL;
87
91
  }
88
92
 
89
93
  /**
@@ -122,9 +126,7 @@ function checkTunDevice() {
122
126
  * Ensure temp directory exists with secure permissions
123
127
  */
124
128
  function ensureTempDir() {
125
- if (!fs.existsSync(TEMP_DIR)) {
126
- fs.mkdirSync(TEMP_DIR, { recursive: true, mode: 0o755 });
127
- }
129
+ fs.mkdirSync(TEMP_DIR, { recursive: true, mode: 0o755 });
128
130
  }
129
131
 
130
132
  /**
@@ -369,7 +371,7 @@ async function startConnection(configPath, vpnConfig, forceDebug = false) {
369
371
  const logPath = path.join(TEMP_DIR, `${connectionName}.log`);
370
372
 
371
373
  // Clean stale log
372
- try { if (fs.existsSync(logPath)) fs.unlinkSync(logPath); } catch {}
374
+ try { fs.unlinkSync(logPath); } catch {}
373
375
 
374
376
  // Pre-create log file writable by all so sudo openvpn can write and user can read
375
377
  try { fs.writeFileSync(logPath, '', { mode: 0o666 }); } catch {}
@@ -380,9 +382,8 @@ async function startConnection(configPath, vpnConfig, forceDebug = false) {
380
382
  console.log(formatLogMessage('debug', `[openvpn] Starting: openvpn ${args.join(' ')}`));
381
383
  }
382
384
 
383
- // Spawn OpenVPN it daemonizes itself via --daemon, but we spawn
385
+ // Spawn OpenVPN it daemonizes itself via --daemon, but we spawn
384
386
  // without --daemon so we can track the process directly
385
- const filteredArgs = args.filter(a => a !== '--daemon' && a !== connectionName);
386
387
  // Remove --daemon and its argument from args, run in foreground
387
388
  const fgArgs = [];
388
389
  for (let i = 0; i < args.length; i++) {
@@ -508,9 +509,7 @@ function cleanupConnectionFiles(connectionName) {
508
509
  ];
509
510
 
510
511
  for (const file of filesToClean) {
511
- try {
512
- if (fs.existsSync(file)) fs.unlinkSync(file);
513
- } catch {}
512
+ try { fs.unlinkSync(file); } catch {}
514
513
  }
515
514
  }
516
515
 
@@ -582,11 +581,9 @@ function getConnectionStatus(connectionName) {
582
581
 
583
582
  // Read last few lines of log
584
583
  try {
585
- if (fs.existsSync(info.logPath)) {
586
- const log = fs.readFileSync(info.logPath, 'utf8');
587
- const lines = log.trim().split('\n');
588
- status.lastLog = lines.slice(-3).join('\n');
589
- }
584
+ const log = fs.readFileSync(info.logPath, 'utf8');
585
+ const lines = log.trim().split('\n');
586
+ status.lastLog = lines.slice(-3).join('\n');
590
587
  } catch {}
591
588
 
592
589
  return status;
@@ -694,12 +691,12 @@ function validateOvpnConfig(ovpnConfig) {
694
691
 
695
692
  // Privilege check
696
693
  if (!hasRootPrivileges()) {
697
- result.warnings.push('OpenVPN requires root privileges � run with sudo');
694
+ result.warnings.push('OpenVPN requires root privileges � run with sudo');
698
695
  }
699
696
 
700
697
  // WSL checks
701
698
  if (isWSL()) {
702
- result.warnings.push('Running on WSL2 � ensure TUN module is loaded: sudo modprobe tun');
699
+ result.warnings.push('Running on WSL2 � ensure TUN module is loaded: sudo modprobe tun');
703
700
  const tunCheck = checkTunDevice();
704
701
  if (!tunCheck.available) {
705
702
  result.warnings.push(tunCheck.error);
@@ -862,9 +859,7 @@ function disconnectAll(forceDebug = false) {
862
859
  }
863
860
 
864
861
  // Clean up entire temp directory
865
- if (fs.existsSync(TEMP_DIR)) {
866
- try { fs.rmSync(TEMP_DIR, { recursive: true, force: true }); } catch {}
867
- }
862
+ try { fs.rmSync(TEMP_DIR, { recursive: true, force: true }); } catch {}
868
863
 
869
864
  if (forceDebug && results.tornDown > 0) {
870
865
  console.log(formatLogMessage('debug',
package/lib/output.js CHANGED
@@ -5,6 +5,9 @@ const { getTotalDomainsSkipped } = require('./domain-cache');
5
5
  const { loadComparisonRules, filterUniqueRules } = require('./compare');
6
6
  const { colorize, colors, messageColors, tags, formatLogMessage } = require('./colorize');
7
7
 
8
+ // Cache for compiled wildcard regex patterns in matchesIgnoreDomain
9
+ const wildcardRegexCache = new Map();
10
+
8
11
  /**
9
12
  * Check if domain matches any ignore patterns (supports wildcards)
10
13
  * @param {string} domain - Domain to check
@@ -37,11 +40,14 @@ function matchesIgnoreDomain(domain, ignorePatterns) {
37
40
  const baseDomain = pattern.slice(0, -2); // Remove ".*"
38
41
  return domain.startsWith(baseDomain + '.');
39
42
  } else {
40
- // Complex wildcard pattern
41
- const regexPattern = pattern
42
- .replace(/\./g, '\\.') // Escape dots
43
- .replace(/\*/g, '.*'); // Convert * to .*
44
- return new RegExp(`^${regexPattern}$`).test(domain);
43
+ // Complex wildcard pattern (cached)
44
+ if (!wildcardRegexCache.has(pattern)) {
45
+ const regexPattern = pattern
46
+ .replace(/\./g, '\\.') // Escape dots
47
+ .replace(/\*/g, '.*'); // Convert * to .*
48
+ wildcardRegexCache.set(pattern, new RegExp(`^${regexPattern}$`));
49
+ }
50
+ return wildcardRegexCache.get(pattern).test(domain);
45
51
  }
46
52
  } else {
47
53
  // Exact pattern matching
@@ -435,7 +441,7 @@ function writeOutput(lines, outputFile = null, silentMode = false) {
435
441
  if (outputFile) {
436
442
  // Ensure output directory exists
437
443
  const outputDir = path.dirname(outputFile);
438
- if (outputDir !== '.' && !fs.existsSync(outputDir)) {
444
+ if (outputDir !== '.') {
439
445
  fs.mkdirSync(outputDir, { recursive: true });
440
446
  }
441
447
 
@@ -1,5 +1,11 @@
1
1
  const { formatLogMessage } = require('./colorize');
2
2
 
3
+ // Pre-compiled regex constants for validation
4
+ const REGEX_LABEL = /^[a-zA-Z0-9-]+$/;
5
+ const REGEX_TLD = /^[a-zA-Z][a-zA-Z0-9]*$/;
6
+ const REGEX_IPv4 = /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/;
7
+ const REGEX_IPv6 = /^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}$|^::$|^::1$|^(?:[0-9a-fA-F]{1,4}:)*::(?:[0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4}$/;
8
+
3
9
  /**
4
10
  * Enhanced domain validation function
5
11
  * @param {string} domain - The domain to validate
@@ -88,8 +94,7 @@ function isValidDomainLabel(label) {
88
94
  }
89
95
 
90
96
  // Label can only contain alphanumeric characters and hyphens
91
- const labelRegex = /^[a-zA-Z0-9-]+$/;
92
- if (!labelRegex.test(label)) {
97
+ if (!REGEX_LABEL.test(label)) {
93
98
  return false;
94
99
  }
95
100
 
@@ -115,8 +120,7 @@ function isValidTLD(tld) {
115
120
  // but still validate structure
116
121
 
117
122
  // TLD can contain letters and numbers, but must start with letter
118
- const tldRegex = /^[a-zA-Z][a-zA-Z0-9]*$/;
119
- if (!tldRegex.test(tld)) {
123
+ if (!REGEX_TLD.test(tld)) {
120
124
  return false;
121
125
  }
122
126
 
@@ -138,8 +142,7 @@ function isIPAddress(str) {
138
142
  * @returns {boolean} True if valid IPv4
139
143
  */
140
144
  function isIPv4(str) {
141
- const ipv4Regex = /^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$/;
142
- return ipv4Regex.test(str);
145
+ return REGEX_IPv4.test(str);
143
146
  }
144
147
 
145
148
  /**
@@ -148,9 +151,7 @@ function isIPv4(str) {
148
151
  * @returns {boolean} True if valid IPv6
149
152
  */
150
153
  function isIPv6(str) {
151
- // Simplified IPv6 regex - covers most common cases
152
- const ipv6Regex = /^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}$|^::$|^::1$|^(?:[0-9a-fA-F]{1,4}:)*::(?:[0-9a-fA-F]{1,4}:)*[0-9a-fA-F]{1,4}$/;
153
- return ipv6Regex.test(str);
154
+ return REGEX_IPv6.test(str);
154
155
  }
155
156
 
156
157
  /**
@@ -467,15 +468,7 @@ function validateRulesetFile(filePath, options = {}) {
467
468
  } = options;
468
469
 
469
470
  const fs = require('fs');
470
-
471
- if (!fs.existsSync(filePath)) {
472
- return {
473
- isValid: false,
474
- error: `File not found: ${filePath}`,
475
- stats: { total: 0, valid: 0, invalid: 0, comments: 0 }
476
- };
477
- }
478
-
471
+
479
472
  let content;
480
473
  try {
481
474
  content = fs.readFileSync(filePath, 'utf8');
@@ -721,15 +714,7 @@ function cleanRulesetFile(filePath, outputPath = null, options = {}) {
721
714
 
722
715
  const fs = require('fs');
723
716
  const path = require('path');
724
-
725
- if (!fs.existsSync(filePath)) {
726
- return {
727
- success: false,
728
- error: `File not found: ${filePath}`,
729
- stats: { total: 0, valid: 0, invalid: 0, removed: 0, duplicates: 0 }
730
- };
731
- }
732
-
717
+
733
718
  let content;
734
719
  try {
735
720
  content = fs.readFileSync(filePath, 'utf8');
package/nwss.js CHANGED
@@ -4,6 +4,7 @@
4
4
  // const pLimit = require('p-limit'); // Will be dynamically imported
5
5
  const puppeteer = require('puppeteer');
6
6
  const fs = require('fs');
7
+ const os = require('os');
7
8
  const psl = require('psl');
8
9
  const path = require('path');
9
10
  const { createGrepHandler, validateGrepAvailability } = require('./lib/grep');
@@ -151,7 +152,7 @@ function detectPuppeteerVersion() {
151
152
  // Enhanced redirect handling
152
153
  const { navigateWithRedirectHandling, handleRedirectTimeout } = require('./lib/redirect');
153
154
  // Ensure web browser is working correctly
154
- const { monitorBrowserHealth, isBrowserHealthy, isQuicklyResponsive, performGroupWindowCleanup, performRealtimeWindowCleanup, trackPageForRealtime, updatePageUsage, cleanupPageBeforeReload, purgeStaleTrackers } = require('./lib/browserhealth');
155
+ const { monitorBrowserHealth, isBrowserHealthy, isQuicklyResponsive, performGroupWindowCleanup, performRealtimeWindowCleanup, trackPageForRealtime, updatePageUsage, untrackPage, cleanupPageBeforeReload, purgeStaleTrackers } = require('./lib/browserhealth');
155
156
 
156
157
  // --- Script Configuration & Constants ---
157
158
  const VERSION = '2.0.33'; // Script version
@@ -1375,7 +1376,7 @@ function setupFrameHandling(page, forceDebug) {
1375
1376
  */
1376
1377
  async function createBrowser(extraArgs = []) {
1377
1378
  // Create temporary user data directory that we can fully control and clean up
1378
- const tempUserDataDir = `/tmp/puppeteer-${Date.now()}-${Math.random().toString(36).substring(7)}`;
1379
+ const tempUserDataDir = path.join(os.tmpdir(), `puppeteer-${Date.now()}-${Math.random().toString(36).substring(7)}`);
1379
1380
  userDataDir = tempUserDataDir; // Store for cleanup tracking (use outer scope variable)
1380
1381
 
1381
1382
  // Try to find system Chrome installation to avoid Puppeteer downloads
@@ -1405,11 +1406,15 @@ function setupFrameHandling(page, forceDebug) {
1405
1406
  }
1406
1407
 
1407
1408
  const systemChromePaths = [
1409
+ // Linux / WSL
1408
1410
  '/usr/bin/google-chrome-stable',
1409
1411
  '/usr/bin/google-chrome',
1410
1412
  '/usr/bin/chromium-browser',
1411
1413
  '/usr/bin/chromium',
1412
- '/snap/bin/chromium'
1414
+ '/snap/bin/chromium',
1415
+ // macOS
1416
+ '/Applications/Google Chrome.app/Contents/MacOS/Google Chrome',
1417
+ '/Applications/Chromium.app/Contents/MacOS/Chromium'
1413
1418
  ];
1414
1419
  // V8 Optimization: Freeze the Chrome paths array since it's constant
1415
1420
  Object.freeze(systemChromePaths);
@@ -3894,6 +3899,7 @@ function setupFrameHandling(page, forceDebug) {
3894
3899
  }
3895
3900
 
3896
3901
  try {
3902
+ untrackPage(page);
3897
3903
  await page.close();
3898
3904
  if (forceDebug) console.log(formatLogMessage('debug', `Page closed for ${currentUrl}`));
3899
3905
  } catch (pageCloseErr) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fanboynz/network-scanner",
3
- "version": "2.0.57",
3
+ "version": "2.0.58",
4
4
  "description": "A Puppeteer-based network scanner for analyzing web traffic, generating adblock filter rules, and identifying third-party requests. Features include fingerprint spoofing, Cloudflare bypass, content analysis with curl/grep, and multiple output formats.",
5
5
  "main": "nwss.js",
6
6
  "scripts": {
package/.clauderc DELETED
@@ -1,30 +0,0 @@
1
- {
2
- "description": "Network scanner that monitors website requests and generates blocking rules. Uses Puppeteer to load sites, intercepts network traffic, matches patterns, and outputs rules in various formats (adblock, dnsmasq, hosts file, etc.).",
3
-
4
- "conventions": [
5
- "Store modular functionality in ./lib/ directory with focused, single-purpose modules",
6
- "Use messageColors and formatLogMessage from ./lib/colorize for consistent console output",
7
- "Implement timeout protection for all Puppeteer operations using Promise.race patterns",
8
- "Handle browser lifecycle with comprehensive cleanup in try-finally blocks",
9
- "Validate all external tool availability before use (grep, curl, whois, dig)",
10
- "Use forceDebug flag for detailed logging, silentMode for minimal output"
11
- ],
12
-
13
- "files": {
14
- "important": [
15
- "nwss.js",
16
- "config.json",
17
- "lib/*.js",
18
- "*.md",
19
- "nwss.1"
20
- ],
21
- "ignore": [
22
- "node_modules/**",
23
- "logs/**",
24
- "sources/**",
25
- ".cache/**",
26
- "*.log",
27
- "*.gz"
28
- ]
29
- }
30
- }