jsana 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/jsana.js +102 -0
- package/lib/fetcher.js +108 -0
- package/lib/patterns.js +78 -0
- package/lib/pipeline.js +138 -0
- package/lib/progress.js +51 -0
- package/lib/reporter.js +30 -0
- package/lib/retry.js +17 -0
- package/lib/scanner.js +28 -0
- package/package.json +21 -0
package/bin/jsana.js
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { existsSync } from 'node:fs';
|
|
3
|
+
import { resolve } from 'node:path';
|
|
4
|
+
import { run } from '../lib/pipeline.js';
|
|
5
|
+
import { categories } from '../lib/patterns.js';
|
|
6
|
+
|
|
7
|
+
const HELP = `
|
|
8
|
+
jsana - JavaScript Security Analyzer for Bug Bounty
|
|
9
|
+
|
|
10
|
+
Usage:
|
|
11
|
+
jsana <urls-file> [options]
|
|
12
|
+
|
|
13
|
+
Options:
|
|
14
|
+
-o, --output <file> Output file (default: jsana-results.txt)
|
|
15
|
+
-j, --json NDJSON output
|
|
16
|
+
-c, --concurrency <n> Concurrent fetches (default: 50)
|
|
17
|
+
-r, --retries <n> Max retries per URL (default: 2)
|
|
18
|
+
-t, --timeout <ms> Request timeout in ms (default: 30000)
|
|
19
|
+
--category <name> Filter to specific categories (repeatable)
|
|
20
|
+
-h, --help Show this help
|
|
21
|
+
|
|
22
|
+
Categories:
|
|
23
|
+
${categories.join(', ')}
|
|
24
|
+
|
|
25
|
+
Examples:
|
|
26
|
+
jsana urls.txt
|
|
27
|
+
jsana urls.txt -o results.txt -c 100 --json
|
|
28
|
+
jsana urls.txt --category secret --category xss-sink
|
|
29
|
+
`;
|
|
30
|
+
|
|
31
|
+
function parseArgs(argv) {
|
|
32
|
+
const args = {
|
|
33
|
+
urlsFile: null,
|
|
34
|
+
output: 'jsana-results.txt',
|
|
35
|
+
json: false,
|
|
36
|
+
concurrency: 50,
|
|
37
|
+
retries: 2,
|
|
38
|
+
timeout: 30000,
|
|
39
|
+
categories: [],
|
|
40
|
+
help: false,
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
let i = 0;
|
|
44
|
+
while (i < argv.length) {
|
|
45
|
+
const arg = argv[i];
|
|
46
|
+
switch (arg) {
|
|
47
|
+
case '-h': case '--help':
|
|
48
|
+
args.help = true; break;
|
|
49
|
+
case '-j': case '--json':
|
|
50
|
+
args.json = true; break;
|
|
51
|
+
case '-o': case '--output':
|
|
52
|
+
args.output = argv[++i]; break;
|
|
53
|
+
case '-c': case '--concurrency':
|
|
54
|
+
args.concurrency = parseInt(argv[++i], 10); break;
|
|
55
|
+
case '-r': case '--retries':
|
|
56
|
+
args.retries = parseInt(argv[++i], 10); break;
|
|
57
|
+
case '-t': case '--timeout':
|
|
58
|
+
args.timeout = parseInt(argv[++i], 10); break;
|
|
59
|
+
case '--category':
|
|
60
|
+
args.categories.push(argv[++i]); break;
|
|
61
|
+
default:
|
|
62
|
+
if (!arg.startsWith('-') && !args.urlsFile) {
|
|
63
|
+
args.urlsFile = arg;
|
|
64
|
+
} else {
|
|
65
|
+
process.stderr.write(`Unknown option: ${arg}\n`);
|
|
66
|
+
process.exit(1);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
i++;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return args;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
async function main() {
|
|
76
|
+
const args = parseArgs(process.argv.slice(2));
|
|
77
|
+
|
|
78
|
+
if (args.help || !args.urlsFile) {
|
|
79
|
+
process.stdout.write(HELP);
|
|
80
|
+
process.exit(args.help ? 0 : 1);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
const urlsFile = resolve(args.urlsFile);
|
|
84
|
+
if (!existsSync(urlsFile)) {
|
|
85
|
+
process.stderr.write(`Error: file not found: ${urlsFile}\n`);
|
|
86
|
+
process.exit(1);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
await run(urlsFile, {
|
|
90
|
+
output: args.output,
|
|
91
|
+
json: args.json,
|
|
92
|
+
concurrency: args.concurrency,
|
|
93
|
+
retries: args.retries,
|
|
94
|
+
timeout: args.timeout,
|
|
95
|
+
categories: args.categories,
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
main().catch(err => {
|
|
100
|
+
process.stderr.write(`Fatal: ${err.message}\n`);
|
|
101
|
+
process.exit(1);
|
|
102
|
+
});
|
package/lib/fetcher.js
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import { Pool } from 'undici';
|
|
2
|
+
import { createBrotliDecompress, createGunzip, createInflate } from 'node:zlib';
|
|
3
|
+
import { PassThrough } from 'node:stream';
|
|
4
|
+
|
|
5
|
+
const BINARY_TYPES = /^(image|audio|video|font|application\/(pdf|zip|gzip|octet-stream|x-tar|x-7z|x-rar))/;
|
|
6
|
+
const MAX_RESPONSE_SIZE = 10 * 1024 * 1024; // 10 MB
|
|
7
|
+
|
|
8
|
+
// LRU pool cache: origin → { pool, lastUsed }
|
|
9
|
+
const poolCache = new Map();
|
|
10
|
+
const MAX_POOLS = 500;
|
|
11
|
+
|
|
12
|
+
function getPool(origin) {
|
|
13
|
+
let entry = poolCache.get(origin);
|
|
14
|
+
if (entry) {
|
|
15
|
+
entry.lastUsed = Date.now();
|
|
16
|
+
return entry.pool;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Evict oldest if at capacity
|
|
20
|
+
if (poolCache.size >= MAX_POOLS) {
|
|
21
|
+
let oldestKey = null, oldestTime = Infinity;
|
|
22
|
+
for (const [key, val] of poolCache) {
|
|
23
|
+
if (val.lastUsed < oldestTime) {
|
|
24
|
+
oldestTime = val.lastUsed;
|
|
25
|
+
oldestKey = key;
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
if (oldestKey) {
|
|
29
|
+
poolCache.get(oldestKey).pool.close();
|
|
30
|
+
poolCache.delete(oldestKey);
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const pool = new Pool(origin, { connections: 6, pipelining: 1 });
|
|
35
|
+
poolCache.set(origin, { pool, lastUsed: Date.now() });
|
|
36
|
+
return pool;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export async function closeAllPools() {
|
|
40
|
+
const promises = [];
|
|
41
|
+
for (const { pool } of poolCache.values()) {
|
|
42
|
+
promises.push(pool.close());
|
|
43
|
+
}
|
|
44
|
+
poolCache.clear();
|
|
45
|
+
await Promise.allSettled(promises);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Returns a Readable stream of the response body, or null if skipped.
|
|
49
|
+
export async function fetchUrl(rawUrl, { timeout = 30000 } = {}) {
|
|
50
|
+
const url = new URL(rawUrl);
|
|
51
|
+
const origin = url.origin;
|
|
52
|
+
const pool = getPool(origin);
|
|
53
|
+
|
|
54
|
+
const { statusCode, headers, body } = await pool.request({
|
|
55
|
+
path: url.pathname + url.search,
|
|
56
|
+
method: 'GET',
|
|
57
|
+
headers: {
|
|
58
|
+
'User-Agent': 'Mozilla/5.0 (compatible; jsana/1.0)',
|
|
59
|
+
'Accept': '*/*',
|
|
60
|
+
'Accept-Encoding': 'gzip, deflate, br',
|
|
61
|
+
},
|
|
62
|
+
headersTimeout: timeout,
|
|
63
|
+
bodyTimeout: timeout,
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
// Skip non-success
|
|
67
|
+
if (statusCode < 200 || statusCode >= 400) {
|
|
68
|
+
// Drain the body to free the socket
|
|
69
|
+
body.resume();
|
|
70
|
+
throw new Error(`HTTP ${statusCode}`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// Skip binary content types
|
|
74
|
+
const ct = (headers['content-type'] || '').toLowerCase();
|
|
75
|
+
if (BINARY_TYPES.test(ct)) {
|
|
76
|
+
body.resume();
|
|
77
|
+
return null;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
// Handle content-encoding decompression
|
|
81
|
+
const encoding = (headers['content-encoding'] || '').toLowerCase();
|
|
82
|
+
let stream;
|
|
83
|
+
if (encoding === 'gzip' || encoding === 'x-gzip') {
|
|
84
|
+
stream = body.pipe(createGunzip());
|
|
85
|
+
} else if (encoding === 'br') {
|
|
86
|
+
stream = body.pipe(createBrotliDecompress());
|
|
87
|
+
} else if (encoding === 'deflate') {
|
|
88
|
+
stream = body.pipe(createInflate());
|
|
89
|
+
} else {
|
|
90
|
+
stream = body;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Enforce size limit via a transform
|
|
94
|
+
let received = 0;
|
|
95
|
+
const limiter = new PassThrough({
|
|
96
|
+
transform(chunk, _enc, cb) {
|
|
97
|
+
received += chunk.length;
|
|
98
|
+
if (received > MAX_RESPONSE_SIZE) {
|
|
99
|
+
cb(new Error('Response too large'));
|
|
100
|
+
body.destroy();
|
|
101
|
+
} else {
|
|
102
|
+
cb(null, chunk);
|
|
103
|
+
}
|
|
104
|
+
},
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
return stream.pipe(limiter);
|
|
108
|
+
}
|
package/lib/patterns.js
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
// All regex patterns compiled once at load time.
|
|
2
|
+
// Each pattern: { id, regex, category, severity, description }
|
|
3
|
+
|
|
4
|
+
const patterns = [
|
|
5
|
+
// ── Dangerous Sinks (XSS) ──
|
|
6
|
+
{ id: 'xss-innerhtml', regex: /\.innerHTML\s*=/, category: 'xss-sink', severity: 'high', description: 'innerHTML assignment' },
|
|
7
|
+
{ id: 'xss-outerhtml', regex: /\.outerHTML\s*=/, category: 'xss-sink', severity: 'high', description: 'outerHTML assignment' },
|
|
8
|
+
{ id: 'xss-document-write', regex: /document\.write(ln)?\s*\(/, category: 'xss-sink', severity: 'high', description: 'document.write()' },
|
|
9
|
+
{ id: 'xss-eval', regex: /[^a-zA-Z_$]eval\s*\(/, category: 'xss-sink', severity: 'critical', description: 'eval()' },
|
|
10
|
+
{ id: 'xss-function-ctor', regex: /new\s+Function\s*\(/, category: 'xss-sink', severity: 'critical', description: 'new Function()' },
|
|
11
|
+
{ id: 'xss-settimeout-str', regex: /setTimeout\s*\(\s*['"`]/, category: 'xss-sink', severity: 'high', description: 'setTimeout with string' },
|
|
12
|
+
{ id: 'xss-setinterval-str', regex: /setInterval\s*\(\s*['"`]/, category: 'xss-sink', severity: 'high', description: 'setInterval with string' },
|
|
13
|
+
{ id: 'xss-insertadjacenthtml', regex: /\.insertAdjacentHTML\s*\(/, category: 'xss-sink', severity: 'high', description: 'insertAdjacentHTML()' },
|
|
14
|
+
{ id: 'xss-jquery-html', regex: /\.\$\s*\(.*\)\s*\.html\s*\(|\$\(.*\)\.html\s*\(/, category: 'xss-sink', severity: 'high', description: 'jQuery .html()' },
|
|
15
|
+
{ id: 'xss-jquery-append', regex: /\.\$\s*\(.*\)\s*\.append\s*\(|\$\(.*\)\.append\s*\(/, category: 'xss-sink', severity: 'medium', description: 'jQuery .append()' },
|
|
16
|
+
{ id: 'xss-react-dangerously', regex: /dangerouslySetInnerHTML/, category: 'xss-sink', severity: 'high', description: 'React dangerouslySetInnerHTML' },
|
|
17
|
+
{ id: 'xss-vue-vhtml', regex: /v-html\s*=/, category: 'xss-sink', severity: 'high', description: 'Vue v-html directive' },
|
|
18
|
+
{ id: 'xss-angular-bypass', regex: /bypassSecurityTrust(Html|Script|Style|Url|ResourceUrl)/, category: 'xss-sink', severity: 'critical', description: 'Angular bypassSecurityTrust*' },
|
|
19
|
+
|
|
20
|
+
// ── User-Controlled Sources ──
|
|
21
|
+
{ id: 'source-location-hash', regex: /location\.hash/, category: 'source', severity: 'medium', description: 'location.hash' },
|
|
22
|
+
{ id: 'source-location-search', regex: /location\.search/, category: 'source', severity: 'medium', description: 'location.search' },
|
|
23
|
+
{ id: 'source-location-href', regex: /location\.href/, category: 'source', severity: 'medium', description: 'location.href' },
|
|
24
|
+
{ id: 'source-location-pathname', regex: /location\.pathname/, category: 'source', severity: 'low', description: 'location.pathname' },
|
|
25
|
+
{ id: 'source-document-url', regex: /document\.URL/, category: 'source', severity: 'medium', description: 'document.URL' },
|
|
26
|
+
{ id: 'source-document-referrer', regex: /document\.referrer/, category: 'source', severity: 'medium', description: 'document.referrer' },
|
|
27
|
+
{ id: 'source-document-cookie', regex: /document\.cookie/, category: 'source', severity: 'medium', description: 'document.cookie' },
|
|
28
|
+
{ id: 'source-window-name', regex: /window\.name/, category: 'source', severity: 'medium', description: 'window.name' },
|
|
29
|
+
{ id: 'source-urlsearchparams', regex: /new\s+URLSearchParams/, category: 'source', severity: 'low', description: 'URLSearchParams' },
|
|
30
|
+
{ id: 'source-localstorage', regex: /localStorage\.(getItem|setItem)\s*\(/, category: 'source', severity: 'low', description: 'localStorage access' },
|
|
31
|
+
{ id: 'source-sessionstorage', regex: /sessionStorage\.(getItem|setItem)\s*\(/, category: 'source', severity: 'low', description: 'sessionStorage access' },
|
|
32
|
+
|
|
33
|
+
// ── API Endpoints & Secrets ──
|
|
34
|
+
{ id: 'secret-aws-key', regex: /AKIA[0-9A-Z]{16}/, category: 'secret', severity: 'critical', description: 'AWS Access Key ID' },
|
|
35
|
+
{ id: 'secret-jwt', regex: /eyJ[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}\.?[A-Za-z0-9_-]*/, category: 'secret', severity: 'high', description: 'JWT token' },
|
|
36
|
+
{ id: 'secret-google-api', regex: /AIza[0-9A-Za-z\-_]{35}/, category: 'secret', severity: 'high', description: 'Google API key' },
|
|
37
|
+
{ id: 'secret-github-token', regex: /(ghp|gho|ghu|ghs|ghr)_[A-Za-z0-9_]{36,}/, category: 'secret', severity: 'critical', description: 'GitHub token' },
|
|
38
|
+
{ id: 'secret-generic-token', regex: /['"`](token|api[_-]?key|apikey|secret|password|passwd|authorization)\s*['"`]\s*[:=]\s*['"`][^'"`]{8,}['"`]/i, category: 'secret', severity: 'high', description: 'Generic secret/token in string' },
|
|
39
|
+
{ id: 'secret-bearer', regex: /['"` ]Bearer\s+[A-Za-z0-9_\-.~+/]{20,}/, category: 'secret', severity: 'high', description: 'Bearer token' },
|
|
40
|
+
{ id: 'api-fetch', regex: /fetch\s*\(\s*['"`]https?:\/\//, category: 'api', severity: 'info', description: 'fetch() call with URL' },
|
|
41
|
+
{ id: 'api-xhr-open', regex: /\.open\s*\(\s*['"`](GET|POST|PUT|DELETE|PATCH)['"`]/, category: 'api', severity: 'info', description: 'XHR open()' },
|
|
42
|
+
{ id: 'api-axios', regex: /axios\.(get|post|put|delete|patch|request)\s*\(/, category: 'api', severity: 'info', description: 'axios call' },
|
|
43
|
+
{ id: 'api-auth-header', regex: /['"`](Authorization|X-Api-Key|X-Auth-Token)['"`]\s*:/, category: 'api', severity: 'medium', description: 'Auth header' },
|
|
44
|
+
{ id: 'api-endpoint', regex: /['"`]\/api\/[a-zA-Z0-9_/\-{}]+['"`]/, category: 'api', severity: 'info', description: 'API endpoint path' },
|
|
45
|
+
|
|
46
|
+
// ── Prototype Pollution ──
|
|
47
|
+
{ id: 'proto-proto', regex: /__proto__/, category: 'prototype-pollution', severity: 'high', description: '__proto__ access' },
|
|
48
|
+
{ id: 'proto-constructor', regex: /constructor\s*\[\s*['"`]prototype['"`]\s*\]|constructor\.prototype/, category: 'prototype-pollution', severity: 'high', description: 'constructor.prototype access' },
|
|
49
|
+
{ id: 'proto-merge', regex: /deepmerge|deep[_-]?extend|merge\s*\(|extend\s*\(|Object\.assign\s*\(\s*\{\s*\}/, category: 'prototype-pollution', severity: 'medium', description: 'Deep merge/extend function' },
|
|
50
|
+
|
|
51
|
+
// ── Open Redirects ──
|
|
52
|
+
{ id: 'redirect-location-assign', regex: /location\s*=\s*[^=]|location\.assign\s*\(/, category: 'open-redirect', severity: 'high', description: 'location assignment/assign()' },
|
|
53
|
+
{ id: 'redirect-location-replace', regex: /location\.replace\s*\(/, category: 'open-redirect', severity: 'high', description: 'location.replace()' },
|
|
54
|
+
{ id: 'redirect-window-open', regex: /window\.open\s*\(/, category: 'open-redirect', severity: 'medium', description: 'window.open()' },
|
|
55
|
+
{ id: 'redirect-navigate', regex: /\.navigate\s*\(\s*['"`]?http/, category: 'open-redirect', severity: 'medium', description: '.navigate() with URL' },
|
|
56
|
+
|
|
57
|
+
// ── PostMessage Issues ──
|
|
58
|
+
{ id: 'postmessage-listener', regex: /addEventListener\s*\(\s*['"`]message['"`]/, category: 'postmessage', severity: 'medium', description: 'message event listener' },
|
|
59
|
+
{ id: 'postmessage-send', regex: /\.postMessage\s*\(/, category: 'postmessage', severity: 'medium', description: 'postMessage() call' },
|
|
60
|
+
|
|
61
|
+
// ── Debug / Admin ──
|
|
62
|
+
{ id: 'debug-todo', regex: /\/\/\s*(TODO|FIXME|HACK|XXX|BUG)\b/i, category: 'debug', severity: 'low', description: 'TODO/FIXME/HACK comment' },
|
|
63
|
+
{ id: 'debug-admin-path', regex: /['"`]\/?admin(\/[a-zA-Z0-9_/-]*)?['"`]/, category: 'debug', severity: 'medium', description: 'Admin path reference' },
|
|
64
|
+
{ id: 'debug-console-log', regex: /console\.(log|debug|info|warn|error)\s*\(/, category: 'debug', severity: 'low', description: 'Console logging' },
|
|
65
|
+
{ id: 'debug-debugger', regex: /\bdebugger\b/, category: 'debug', severity: 'low', description: 'debugger statement' },
|
|
66
|
+
{ id: 'debug-feature-flag', regex: /['"`](feature[_-]?flag|is[_-]?enabled|enable[_-]?feature|canary|experiment)['"`]/i, category: 'debug', severity: 'low', description: 'Feature flag reference' },
|
|
67
|
+
{ id: 'debug-sourcemap', regex: /\/\/[#@]\s*sourceMappingURL\s*=/, category: 'debug', severity: 'info', description: 'Source map reference' },
|
|
68
|
+
];
|
|
69
|
+
|
|
70
|
+
const categories = [...new Set(patterns.map(p => p.category))];
|
|
71
|
+
|
|
72
|
+
export function getPatterns(filterCategories) {
|
|
73
|
+
if (!filterCategories || filterCategories.length === 0) return patterns;
|
|
74
|
+
const set = new Set(filterCategories);
|
|
75
|
+
return patterns.filter(p => set.has(p.category));
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
export { categories, patterns };
|
package/lib/pipeline.js
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import { createReadStream } from 'node:fs';
|
|
2
|
+
import { createInterface } from 'node:readline';
|
|
3
|
+
import { fetchUrl, closeAllPools } from './fetcher.js';
|
|
4
|
+
import { scan } from './scanner.js';
|
|
5
|
+
import { Reporter } from './reporter.js';
|
|
6
|
+
import { Progress, countLines } from './progress.js';
|
|
7
|
+
import { getPatterns } from './patterns.js';
|
|
8
|
+
import { withRetry } from './retry.js';
|
|
9
|
+
|
|
10
|
+
// Simple semaphore for concurrency control
|
|
11
|
+
class Semaphore {
|
|
12
|
+
constructor(max) {
|
|
13
|
+
this.max = max;
|
|
14
|
+
this.current = 0;
|
|
15
|
+
this.queue = [];
|
|
16
|
+
}
|
|
17
|
+
acquire() {
|
|
18
|
+
if (this.current < this.max) {
|
|
19
|
+
this.current++;
|
|
20
|
+
return Promise.resolve();
|
|
21
|
+
}
|
|
22
|
+
return new Promise(resolve => this.queue.push(resolve));
|
|
23
|
+
}
|
|
24
|
+
release() {
|
|
25
|
+
this.current--;
|
|
26
|
+
if (this.queue.length > 0) {
|
|
27
|
+
this.current++;
|
|
28
|
+
this.queue.shift()();
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
export async function run(urlsFile, opts) {
|
|
34
|
+
const {
|
|
35
|
+
output = 'jsana-results.txt',
|
|
36
|
+
json = false,
|
|
37
|
+
concurrency = 50,
|
|
38
|
+
retries = 2,
|
|
39
|
+
timeout = 30000,
|
|
40
|
+
categories = [],
|
|
41
|
+
} = opts;
|
|
42
|
+
|
|
43
|
+
const patterns = getPatterns(categories);
|
|
44
|
+
if (patterns.length === 0) {
|
|
45
|
+
process.stderr.write('[jsana] No patterns matched the selected categories.\n');
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// Pre-count lines for progress
|
|
50
|
+
const totalLines = await countLines(urlsFile);
|
|
51
|
+
const progress = new Progress(totalLines);
|
|
52
|
+
const reporter = new Reporter(output, { json });
|
|
53
|
+
const sem = new Semaphore(concurrency);
|
|
54
|
+
|
|
55
|
+
let shuttingDown = false;
|
|
56
|
+
const inFlight = new Set();
|
|
57
|
+
|
|
58
|
+
// Graceful shutdown
|
|
59
|
+
const shutdown = () => {
|
|
60
|
+
if (shuttingDown) return;
|
|
61
|
+
shuttingDown = true;
|
|
62
|
+
process.stderr.write('\n[jsana] Shutting down gracefully...\n');
|
|
63
|
+
};
|
|
64
|
+
process.on('SIGINT', shutdown);
|
|
65
|
+
|
|
66
|
+
progress.start();
|
|
67
|
+
|
|
68
|
+
const rl = createInterface({
|
|
69
|
+
input: createReadStream(urlsFile, { encoding: 'utf8' }),
|
|
70
|
+
crlfDelay: Infinity,
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
for await (const rawLine of rl) {
|
|
74
|
+
if (shuttingDown) break;
|
|
75
|
+
|
|
76
|
+
const line = rawLine.trim();
|
|
77
|
+
// Skip empty lines and comments
|
|
78
|
+
if (!line || line.startsWith('#')) {
|
|
79
|
+
progress.tick();
|
|
80
|
+
continue;
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Basic URL validation
|
|
84
|
+
let url;
|
|
85
|
+
try {
|
|
86
|
+
url = new URL(line);
|
|
87
|
+
if (url.protocol !== 'http:' && url.protocol !== 'https:') throw new Error('bad proto');
|
|
88
|
+
} catch {
|
|
89
|
+
progress.tick();
|
|
90
|
+
progress.addError();
|
|
91
|
+
continue;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
await sem.acquire();
|
|
95
|
+
if (shuttingDown) { sem.release(); break; }
|
|
96
|
+
|
|
97
|
+
const task = (async () => {
|
|
98
|
+
try {
|
|
99
|
+
const stream = await withRetry(
|
|
100
|
+
() => fetchUrl(url.href, { timeout }),
|
|
101
|
+
{ maxRetries: retries }
|
|
102
|
+
);
|
|
103
|
+
|
|
104
|
+
if (!stream) return; // skipped (binary, etc.)
|
|
105
|
+
|
|
106
|
+
let findingsInUrl = 0;
|
|
107
|
+
for await (const finding of scan(stream, url.href, patterns)) {
|
|
108
|
+
reporter.write(finding);
|
|
109
|
+
findingsInUrl++;
|
|
110
|
+
}
|
|
111
|
+
progress.addFindings(findingsInUrl);
|
|
112
|
+
} catch (err) {
|
|
113
|
+
progress.addError();
|
|
114
|
+
} finally {
|
|
115
|
+
progress.tick();
|
|
116
|
+
sem.release();
|
|
117
|
+
}
|
|
118
|
+
})();
|
|
119
|
+
|
|
120
|
+
inFlight.add(task);
|
|
121
|
+
task.finally(() => inFlight.delete(task));
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// Wait for all in-flight tasks
|
|
125
|
+
await Promise.allSettled([...inFlight]);
|
|
126
|
+
|
|
127
|
+
progress.stop();
|
|
128
|
+
await reporter.close();
|
|
129
|
+
await closeAllPools();
|
|
130
|
+
|
|
131
|
+
process.removeListener('SIGINT', shutdown);
|
|
132
|
+
|
|
133
|
+
process.stderr.write(
|
|
134
|
+
`[jsana] Done. ${progress.processed} URLs processed, ` +
|
|
135
|
+
`${reporter.count} findings, ${progress.errors} errors. ` +
|
|
136
|
+
`Output: ${output}\n`
|
|
137
|
+
);
|
|
138
|
+
}
|
package/lib/progress.js
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { createInterface } from 'node:readline';
|
|
2
|
+
import { createReadStream } from 'node:fs';
|
|
3
|
+
|
|
4
|
+
// Fast line count: streams through the file counting newlines.
|
|
5
|
+
export async function countLines(filePath) {
|
|
6
|
+
return new Promise((resolve, reject) => {
|
|
7
|
+
let count = 0;
|
|
8
|
+
const s = createReadStream(filePath, { encoding: 'utf8', highWaterMark: 64 * 1024 });
|
|
9
|
+
s.on('data', chunk => {
|
|
10
|
+
for (let i = 0; i < chunk.length; i++) {
|
|
11
|
+
if (chunk[i] === '\n') count++;
|
|
12
|
+
}
|
|
13
|
+
});
|
|
14
|
+
s.on('end', () => resolve(count || 1));
|
|
15
|
+
s.on('error', reject);
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export class Progress {
|
|
20
|
+
constructor(total) {
|
|
21
|
+
this.total = total;
|
|
22
|
+
this.processed = 0;
|
|
23
|
+
this.findings = 0;
|
|
24
|
+
this.errors = 0;
|
|
25
|
+
this.startTime = Date.now();
|
|
26
|
+
this._interval = null;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
start() {
|
|
30
|
+
this._render();
|
|
31
|
+
this._interval = setInterval(() => this._render(), 500);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
tick() { this.processed++; }
|
|
35
|
+
addFindings(n) { this.findings += n; }
|
|
36
|
+
addError() { this.errors++; }
|
|
37
|
+
|
|
38
|
+
_render() {
|
|
39
|
+
const elapsed = ((Date.now() - this.startTime) / 1000).toFixed(1);
|
|
40
|
+
const rate = this.processed > 0 ? (this.processed / (elapsed || 1)).toFixed(1) : '0.0';
|
|
41
|
+
const pct = ((this.processed / this.total) * 100).toFixed(1);
|
|
42
|
+
const line = `\r[jsana] ${this.processed}/${this.total} (${pct}%) | findings: ${this.findings} | errors: ${this.errors} | ${rate} URLs/s | ${elapsed}s`;
|
|
43
|
+
process.stderr.write(line);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
stop() {
|
|
47
|
+
if (this._interval) clearInterval(this._interval);
|
|
48
|
+
this._render();
|
|
49
|
+
process.stderr.write('\n');
|
|
50
|
+
}
|
|
51
|
+
}
|
package/lib/reporter.js
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
import { createWriteStream } from 'node:fs';
|
|
2
|
+
|
|
3
|
+
export class Reporter {
|
|
4
|
+
constructor(outputPath, { json = false } = {}) {
|
|
5
|
+
this.json = json;
|
|
6
|
+
this.stream = createWriteStream(outputPath, { flags: 'w', encoding: 'utf8' });
|
|
7
|
+
this.count = 0;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
write(finding) {
|
|
11
|
+
this.count++;
|
|
12
|
+
if (this.json) {
|
|
13
|
+
this.stream.write(JSON.stringify(finding) + '\n');
|
|
14
|
+
} else {
|
|
15
|
+
this.stream.write(
|
|
16
|
+
`[${finding.severity.toUpperCase()}] [${finding.category}] ${finding.description}\n` +
|
|
17
|
+
` URL: ${finding.url}\n` +
|
|
18
|
+
` Line: ${finding.line}\n` +
|
|
19
|
+
` Code: ${finding.snippet}\n\n`
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async close() {
|
|
25
|
+
return new Promise((resolve, reject) => {
|
|
26
|
+
this.stream.end(() => resolve());
|
|
27
|
+
this.stream.on('error', reject);
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
}
|
package/lib/retry.js
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
// Exponential backoff retry with jitter.
|
|
2
|
+
|
|
3
|
+
export async function withRetry(fn, { maxRetries = 2, baseDelay = 500 } = {}) {
|
|
4
|
+
let lastError;
|
|
5
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
6
|
+
try {
|
|
7
|
+
return await fn(attempt);
|
|
8
|
+
} catch (err) {
|
|
9
|
+
lastError = err;
|
|
10
|
+
if (attempt < maxRetries) {
|
|
11
|
+
const delay = baseDelay * Math.pow(2, attempt) + Math.random() * baseDelay;
|
|
12
|
+
await new Promise(r => setTimeout(r, delay));
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
throw lastError;
|
|
17
|
+
}
|
package/lib/scanner.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { createInterface } from 'node:readline';
|
|
2
|
+
|
|
3
|
+
// Async generator: yields Finding objects as patterns match on response stream lines.
|
|
4
|
+
export async function* scan(stream, url, patterns) {
|
|
5
|
+
const rl = createInterface({ input: stream, crlfDelay: Infinity });
|
|
6
|
+
let lineNum = 0;
|
|
7
|
+
|
|
8
|
+
for await (const line of rl) {
|
|
9
|
+
lineNum++;
|
|
10
|
+
if (line.length < 3) continue;
|
|
11
|
+
|
|
12
|
+
for (const pat of patterns) {
|
|
13
|
+
if (pat.regex.test(line)) {
|
|
14
|
+
// Extract a snippet: trim and cap at 200 chars
|
|
15
|
+
const snippet = line.trim().slice(0, 200);
|
|
16
|
+
yield {
|
|
17
|
+
url,
|
|
18
|
+
line: lineNum,
|
|
19
|
+
patternId: pat.id,
|
|
20
|
+
category: pat.category,
|
|
21
|
+
severity: pat.severity,
|
|
22
|
+
description: pat.description,
|
|
23
|
+
snippet,
|
|
24
|
+
};
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "jsana",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "JavaScript Security Analyzer for Bug Bounty",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"keywords": ["security", "bug-bounty", "javascript", "scanner", "xss", "secrets", "recon"],
|
|
8
|
+
"bin": {
|
|
9
|
+
"jsana": "./bin/jsana.js"
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"bin/",
|
|
13
|
+
"lib/"
|
|
14
|
+
],
|
|
15
|
+
"engines": {
|
|
16
|
+
"node": ">=18.0.0"
|
|
17
|
+
},
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"undici": "^6.21.1"
|
|
20
|
+
}
|
|
21
|
+
}
|