seabox 0.1.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.mocharc.json +6 -0
- package/LICENSE.MD +21 -0
- package/README.md +209 -0
- package/bin/seabox-rebuild.js +395 -0
- package/bin/seabox.js +81 -0
- package/lib/bindings.js +31 -0
- package/lib/blob.js +109 -0
- package/lib/bootstrap.js +655 -0
- package/lib/build.js +283 -0
- package/lib/config.js +117 -0
- package/lib/crypto-assets.js +160 -0
- package/lib/fetch-node.js +177 -0
- package/lib/index.js +27 -0
- package/lib/inject.js +81 -0
- package/lib/manifest.js +98 -0
- package/lib/obfuscate.js +73 -0
- package/lib/scanner.js +153 -0
- package/lib/unsign.js +184 -0
- package/package.json +47 -0
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* fetch-node.js
|
|
3
|
+
* Download target Node.js binary for SEA injection.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const https = require('https');
|
|
9
|
+
const { pipeline } = require('stream');
|
|
10
|
+
const { promisify } = require('util');
|
|
11
|
+
|
|
12
|
+
const pipelineAsync = promisify(pipeline);
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Construct the Node.js download URL for a given target.
|
|
16
|
+
* @param {string} nodeVersion - e.g., "24.11.0"
|
|
17
|
+
* @param {string} platform - e.g., "win", "linux", "darwin"
|
|
18
|
+
* @param {string} arch - e.g., "x64", "arm64"
|
|
19
|
+
* @returns {string}
|
|
20
|
+
*/
|
|
21
|
+
function getNodeDownloadUrl(nodeVersion, platform, arch) {
|
|
22
|
+
const baseUrl = 'https://nodejs.org/dist';
|
|
23
|
+
|
|
24
|
+
// Map platform names to Node.js naming
|
|
25
|
+
const platformMap = {
|
|
26
|
+
win32: 'win',
|
|
27
|
+
linux: 'linux',
|
|
28
|
+
darwin: 'darwin'
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
const mappedPlatform = platformMap[platform] || platform;
|
|
32
|
+
|
|
33
|
+
// Construct filename
|
|
34
|
+
let filename;
|
|
35
|
+
if (mappedPlatform === 'win') {
|
|
36
|
+
filename = `node-v${nodeVersion}-${mappedPlatform}-${arch}.zip`;
|
|
37
|
+
} else {
|
|
38
|
+
filename = `node-v${nodeVersion}-${mappedPlatform}-${arch}.tar.gz`;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return `${baseUrl}/v${nodeVersion}/${filename}`;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* Download a file from a URL to a local path.
|
|
46
|
+
* @param {string} url
|
|
47
|
+
* @param {string} outputPath
|
|
48
|
+
* @returns {Promise<void>}
|
|
49
|
+
*/
|
|
50
|
+
async function downloadFile(url, outputPath) {
|
|
51
|
+
return new Promise((resolve, reject) => {
|
|
52
|
+
https.get(url, response => {
|
|
53
|
+
if (response.statusCode === 302 || response.statusCode === 301) {
|
|
54
|
+
// Follow redirect
|
|
55
|
+
return downloadFile(response.headers.location, outputPath)
|
|
56
|
+
.then(resolve)
|
|
57
|
+
.catch(reject);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if (response.statusCode !== 200) {
|
|
61
|
+
reject(new Error(`HTTP ${response.statusCode}: ${url}`));
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
const file = fs.createWriteStream(outputPath);
|
|
66
|
+
pipelineAsync(response, file)
|
|
67
|
+
.then(resolve)
|
|
68
|
+
.catch(reject);
|
|
69
|
+
}).on('error', reject);
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/**
|
|
74
|
+
* Extract node.exe or node binary from downloaded archive.
|
|
75
|
+
* @param {string} archivePath - Path to .zip or .tar.gz
|
|
76
|
+
* @param {string} outputDir - Directory to extract to
|
|
77
|
+
* @param {string} platform - Platform identifier
|
|
78
|
+
* @returns {Promise<string>} - Path to extracted node binary
|
|
79
|
+
*/
|
|
80
|
+
async function extractNodeBinary(archivePath, outputDir, platform) {
|
|
81
|
+
const AdmZip = require('adm-zip');
|
|
82
|
+
const tar = require('tar');
|
|
83
|
+
|
|
84
|
+
if (platform === 'win32') {
|
|
85
|
+
// Extract from ZIP
|
|
86
|
+
const zip = new AdmZip(archivePath);
|
|
87
|
+
zip.extractAllTo(outputDir, true);
|
|
88
|
+
|
|
89
|
+
// Find node.exe in the extracted directory structure
|
|
90
|
+
const extracted = fs.readdirSync(outputDir);
|
|
91
|
+
for (const item of extracted) {
|
|
92
|
+
const itemPath = path.join(outputDir, item);
|
|
93
|
+
if (fs.statSync(itemPath).isDirectory()) {
|
|
94
|
+
const nodeExePath = path.join(itemPath, 'node.exe');
|
|
95
|
+
if (fs.existsSync(nodeExePath)) {
|
|
96
|
+
const finalPath = path.join(outputDir, 'node.exe');
|
|
97
|
+
fs.renameSync(nodeExePath, finalPath);
|
|
98
|
+
// Clean up extracted directory
|
|
99
|
+
fs.rmSync(itemPath, { recursive: true, force: true });
|
|
100
|
+
return finalPath;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
throw new Error('node.exe not found in archive');
|
|
106
|
+
} else {
|
|
107
|
+
// Extract from tar.gz
|
|
108
|
+
await tar.extract({
|
|
109
|
+
file: archivePath,
|
|
110
|
+
cwd: outputDir,
|
|
111
|
+
filter: (p) => p.endsWith('/bin/node')
|
|
112
|
+
});
|
|
113
|
+
|
|
114
|
+
// Find the extracted node binary
|
|
115
|
+
const extracted = fs.readdirSync(outputDir);
|
|
116
|
+
for (const dir of extracted) {
|
|
117
|
+
const nodePath = path.join(outputDir, dir, 'bin', 'node');
|
|
118
|
+
if (fs.existsSync(nodePath)) {
|
|
119
|
+
const finalPath = path.join(outputDir, 'node');
|
|
120
|
+
fs.renameSync(nodePath, finalPath);
|
|
121
|
+
fs.chmodSync(finalPath, 0o755);
|
|
122
|
+
return finalPath;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
throw new Error('node binary not found in archive');
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* Fetch and prepare a Node.js binary for SEA injection.
|
|
132
|
+
* @param {string} nodeVersion
|
|
133
|
+
* @param {string} platform
|
|
134
|
+
* @param {string} arch
|
|
135
|
+
* @param {string} cacheDir - Directory to cache downloads
|
|
136
|
+
* @returns {Promise<string>} - Path to the node binary
|
|
137
|
+
*/
|
|
138
|
+
async function fetchNodeBinary(nodeVersion, platform, arch, cacheDir) {
|
|
139
|
+
if (!fs.existsSync(cacheDir)) {
|
|
140
|
+
fs.mkdirSync(cacheDir, { recursive: true });
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
const binaryName = platform === 'win32' ? 'node.exe' : 'node';
|
|
144
|
+
const cachedBinary = path.join(cacheDir, `${nodeVersion}-${platform}-${arch}`, binaryName);
|
|
145
|
+
|
|
146
|
+
// Check cache
|
|
147
|
+
if (fs.existsSync(cachedBinary)) {
|
|
148
|
+
console.log(`✓ Using cached Node binary: ${cachedBinary}`);
|
|
149
|
+
return cachedBinary;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
console.log(`Downloading Node.js v${nodeVersion} for ${platform}-${arch}...`);
|
|
153
|
+
const url = getNodeDownloadUrl(nodeVersion, platform, arch);
|
|
154
|
+
const archiveName = path.basename(url);
|
|
155
|
+
const archivePath = path.join(cacheDir, archiveName);
|
|
156
|
+
|
|
157
|
+
await downloadFile(url, archivePath);
|
|
158
|
+
console.log(`✓ Downloaded: ${archivePath}`);
|
|
159
|
+
|
|
160
|
+
const extractDir = path.join(cacheDir, `${nodeVersion}-${platform}-${arch}`);
|
|
161
|
+
fs.mkdirSync(extractDir, { recursive: true });
|
|
162
|
+
|
|
163
|
+
const binaryPath = await extractNodeBinary(archivePath, extractDir, platform);
|
|
164
|
+
console.log(`✓ Extracted Node binary: ${binaryPath}`);
|
|
165
|
+
|
|
166
|
+
// Clean up archive
|
|
167
|
+
fs.unlinkSync(archivePath);
|
|
168
|
+
|
|
169
|
+
return binaryPath;
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
module.exports = {
|
|
173
|
+
getNodeDownloadUrl,
|
|
174
|
+
downloadFile,
|
|
175
|
+
extractNodeBinary,
|
|
176
|
+
fetchNodeBinary
|
|
177
|
+
};
|
package/lib/index.js
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* index.js
|
|
3
|
+
* Main entry point for seabox
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const { build } = require('./build');
|
|
7
|
+
const { loadConfig, parseTarget } = require('./config');
|
|
8
|
+
const { scanAssets, groupAssets } = require('./scanner');
|
|
9
|
+
const { generateManifest, serializeManifest } = require('./manifest');
|
|
10
|
+
const { createSeaConfig, writeSeaConfigJson, generateBlob } = require('./blob');
|
|
11
|
+
const { fetchNodeBinary } = require('./fetch-node');
|
|
12
|
+
const { injectBlob } = require('./inject');
|
|
13
|
+
|
|
14
|
+
module.exports = {
|
|
15
|
+
build,
|
|
16
|
+
loadConfig,
|
|
17
|
+
parseTarget,
|
|
18
|
+
scanAssets,
|
|
19
|
+
groupAssets,
|
|
20
|
+
generateManifest,
|
|
21
|
+
serializeManifest,
|
|
22
|
+
createSeaConfig,
|
|
23
|
+
writeSeaConfigJson,
|
|
24
|
+
generateBlob,
|
|
25
|
+
fetchNodeBinary,
|
|
26
|
+
injectBlob
|
|
27
|
+
};
|
package/lib/inject.js
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* inject.js
|
|
3
|
+
* Inject SEA blob into Node binary using postject.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const { execFile } = require('child_process');
|
|
9
|
+
const { promisify } = require('util');
|
|
10
|
+
const { removeSignature } = require('./unsign');
|
|
11
|
+
|
|
12
|
+
const execFileAsync = promisify(execFile);
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Inject a SEA blob into a Node.js binary using postject.
|
|
16
|
+
* @param {string} nodeBinaryPath - Path to the source Node binary
|
|
17
|
+
* @param {string} blobPath - Path to the SEA blob file
|
|
18
|
+
* @param {string} outputPath - Path for the output executable
|
|
19
|
+
* @param {string} platform - Target platform (win32, linux, darwin)
|
|
20
|
+
* @returns {Promise<void>}
|
|
21
|
+
*/
|
|
22
|
+
async function injectBlob(nodeBinaryPath, blobPath, outputPath, platform, verbose) {
|
|
23
|
+
// Copy node binary to output location
|
|
24
|
+
fs.copyFileSync(nodeBinaryPath, outputPath);
|
|
25
|
+
|
|
26
|
+
// Remove existing signature before postject injection
|
|
27
|
+
// The downloaded Node.js binary is signed, and postject will corrupt this signature
|
|
28
|
+
await removeSignature(outputPath, platform);
|
|
29
|
+
|
|
30
|
+
// Prepare postject command
|
|
31
|
+
const sentinel = 'NODE_SEA_BLOB';
|
|
32
|
+
const sentinelFuse = 'NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2';
|
|
33
|
+
|
|
34
|
+
const args = [
|
|
35
|
+
outputPath,
|
|
36
|
+
sentinel,
|
|
37
|
+
blobPath,
|
|
38
|
+
'--sentinel-fuse', sentinelFuse
|
|
39
|
+
];
|
|
40
|
+
|
|
41
|
+
// Platform-specific postject options
|
|
42
|
+
if (platform === 'darwin') {
|
|
43
|
+
args.push('--macho-segment-name', 'NODE_SEA');
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
console.log(`Injecting SEA blob into: ${outputPath}`);
|
|
47
|
+
|
|
48
|
+
// Use cmd.exe on Windows to run npx
|
|
49
|
+
const isWindows = process.platform === 'win32';
|
|
50
|
+
const command = isWindows ? 'cmd.exe' : 'npx';
|
|
51
|
+
const cmdArgs = isWindows
|
|
52
|
+
? ['/c', 'npx', 'postject', ...args]
|
|
53
|
+
: ['postject', ...args];
|
|
54
|
+
|
|
55
|
+
//console.log(`Command: ${command} ${cmdArgs.join(' ')}`);
|
|
56
|
+
|
|
57
|
+
try {
|
|
58
|
+
const { stdout, stderr } = await execFileAsync(command, cmdArgs);
|
|
59
|
+
if (stdout && verbose) console.log(stdout);
|
|
60
|
+
if (stderr && verbose) console.error(stderr);
|
|
61
|
+
console.log('✓ SEA blob injected successfully');
|
|
62
|
+
} catch (error) {
|
|
63
|
+
throw new Error(`Postject injection failed: ${error.message}`);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
//console.log('\nNote: Executable is now ready for signing with your certificate');
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Resolve the postject executable path.
|
|
71
|
+
* @returns {string}
|
|
72
|
+
*/
|
|
73
|
+
function resolvePostject() {
|
|
74
|
+
// Use npx to run postject
|
|
75
|
+
return 'npx';
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
module.exports = {
|
|
79
|
+
injectBlob,
|
|
80
|
+
resolvePostject
|
|
81
|
+
};
|
package/lib/manifest.js
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* manifest.js
|
|
3
|
+
* Generate runtime manifest with asset metadata and extraction rules.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const path = require('path');
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* @typedef {Object} BinaryManifestEntry
|
|
10
|
+
* @property {string} assetKey - Key in the SEA blob
|
|
11
|
+
* @property {string} fileName - Original filename
|
|
12
|
+
* @property {string} platform - Target platform (win32, linux, darwin, *)
|
|
13
|
+
* @property {string} arch - Target architecture (x64, arm64, *)
|
|
14
|
+
* @property {number} order - Extraction order priority (lower = earlier)
|
|
15
|
+
* @property {string} hash - SHA-256 hash for integrity check
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* @typedef {Object} RuntimeManifest
|
|
20
|
+
* @property {string} appName - Application name
|
|
21
|
+
* @property {string} appVersion - Application version
|
|
22
|
+
* @property {string} platform - Target platform
|
|
23
|
+
* @property {string} arch - Target architecture
|
|
24
|
+
* @property {BinaryManifestEntry[]} binaries - Binary extraction rules
|
|
25
|
+
* @property {string[]} allAssetKeys - All embedded asset keys
|
|
26
|
+
*/
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Generate a runtime manifest from scanned assets.
|
|
30
|
+
* @param {import('./scanner').AssetEntry[]} assets - All scanned assets
|
|
31
|
+
* @param {Object} config - SEA configuration
|
|
32
|
+
* @param {string} targetPlatform - Target platform (win32, linux, darwin)
|
|
33
|
+
* @param {string} targetArch - Target architecture (x64, arm64)
|
|
34
|
+
* @returns {RuntimeManifest}
|
|
35
|
+
*/
|
|
36
|
+
function generateManifest(assets, config, targetPlatform, targetArch) {
|
|
37
|
+
const binaries = assets
|
|
38
|
+
.filter(a => a.isBinary)
|
|
39
|
+
.map((asset, index) => {
|
|
40
|
+
const fileName = path.basename(asset.sourcePath);
|
|
41
|
+
return {
|
|
42
|
+
assetKey: asset.assetKey,
|
|
43
|
+
fileName,
|
|
44
|
+
platform: targetPlatform,
|
|
45
|
+
arch: targetArch,
|
|
46
|
+
order: inferExtractionOrder(fileName, index),
|
|
47
|
+
hash: asset.hash
|
|
48
|
+
};
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
return {
|
|
52
|
+
appName: config._packageName || 'app',
|
|
53
|
+
appVersion: config._packageVersion || '1.0.0',
|
|
54
|
+
platform: targetPlatform,
|
|
55
|
+
arch: targetArch,
|
|
56
|
+
binaries,
|
|
57
|
+
allAssetKeys: assets.map(a => a.assetKey)
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Infer extraction order based on file type.
|
|
63
|
+
* Libraries (.dll, .so, .dylib) should extract before .node addons.
|
|
64
|
+
* @param {string} fileName
|
|
65
|
+
* @param {number} fallbackIndex - Fallback order if heuristic doesn't apply
|
|
66
|
+
* @returns {number}
|
|
67
|
+
*/
|
|
68
|
+
function inferExtractionOrder(fileName, fallbackIndex) {
|
|
69
|
+
const ext = path.extname(fileName).toLowerCase();
|
|
70
|
+
|
|
71
|
+
// Extract shared libraries first
|
|
72
|
+
if (['.dll', '.so', '.dylib'].includes(ext)) {
|
|
73
|
+
return 10;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Then native addons
|
|
77
|
+
if (ext === '.node') {
|
|
78
|
+
return 20;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// Fallback for other binaries
|
|
82
|
+
return 100 + fallbackIndex;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Serialize manifest to JSON string for embedding.
|
|
87
|
+
* @param {RuntimeManifest} manifest
|
|
88
|
+
* @returns {string}
|
|
89
|
+
*/
|
|
90
|
+
function serializeManifest(manifest) {
|
|
91
|
+
return JSON.stringify(manifest, null, 2);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
module.exports = {
|
|
95
|
+
generateManifest,
|
|
96
|
+
inferExtractionOrder,
|
|
97
|
+
serializeManifest
|
|
98
|
+
};
|
package/lib/obfuscate.js
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @file Obfuscate bootstrap code to protect encryption keys and decryption logic
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
const JavaScriptObfuscator = require('javascript-obfuscator');
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Obfuscate bootstrap code with maximum protection settings
|
|
9
|
+
*
|
|
10
|
+
* @param {string} bootstrapCode - The bootstrap JavaScript code to obfuscate
|
|
11
|
+
* @returns {string} Obfuscated JavaScript code
|
|
12
|
+
*/
|
|
13
|
+
function obfuscateBootstrap(bootstrapCode) {
|
|
14
|
+
const obfuscationResult = JavaScriptObfuscator.obfuscate(bootstrapCode, {
|
|
15
|
+
// Maximum protection settings for encryption key and decryption logic
|
|
16
|
+
|
|
17
|
+
// String encoding
|
|
18
|
+
stringArray: true,
|
|
19
|
+
stringArrayThreshold: 1,
|
|
20
|
+
stringArrayEncoding: ['rc4'],
|
|
21
|
+
stringArrayIndexShift: true,
|
|
22
|
+
stringArrayRotate: true,
|
|
23
|
+
stringArrayShuffle: true,
|
|
24
|
+
stringArrayWrappersCount: 5,
|
|
25
|
+
stringArrayWrappersChainedCalls: true,
|
|
26
|
+
stringArrayWrappersParametersMaxCount: 5,
|
|
27
|
+
stringArrayWrappersType: 'function',
|
|
28
|
+
|
|
29
|
+
// Control flow
|
|
30
|
+
controlFlowFlattening: true,
|
|
31
|
+
controlFlowFlatteningThreshold: 1,
|
|
32
|
+
deadCodeInjection: true,
|
|
33
|
+
deadCodeInjectionThreshold: 0.4,
|
|
34
|
+
|
|
35
|
+
// Code transformations
|
|
36
|
+
transformObjectKeys: true,
|
|
37
|
+
splitStrings: true,
|
|
38
|
+
splitStringsChunkLength: 10,
|
|
39
|
+
|
|
40
|
+
// Identifiers
|
|
41
|
+
identifierNamesGenerator: 'hexadecimal',
|
|
42
|
+
identifiersPrefix: '',
|
|
43
|
+
renameGlobals: false, // Keep false - we need to preserve global scope
|
|
44
|
+
renameProperties: false, // Keep false - breaks sea.getAsset patching
|
|
45
|
+
|
|
46
|
+
// Self-defending
|
|
47
|
+
selfDefending: true,
|
|
48
|
+
|
|
49
|
+
// Compact output
|
|
50
|
+
compact: true,
|
|
51
|
+
|
|
52
|
+
// Additional obfuscation
|
|
53
|
+
numbersToExpressions: true,
|
|
54
|
+
simplify: true,
|
|
55
|
+
|
|
56
|
+
// Disable source maps (we don't want them)
|
|
57
|
+
sourceMap: false,
|
|
58
|
+
|
|
59
|
+
// Performance vs protection tradeoff
|
|
60
|
+
// (these settings prioritize protection over performance)
|
|
61
|
+
target: 'node',
|
|
62
|
+
ignoreImports: true,
|
|
63
|
+
|
|
64
|
+
// Comments removal
|
|
65
|
+
// (handled automatically by compact: true)
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
return obfuscationResult.getObfuscatedCode();
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
module.exports = {
|
|
72
|
+
obfuscateBootstrap
|
|
73
|
+
};
|
package/lib/scanner.js
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* scanner.js
|
|
3
|
+
* Resolve glob patterns, collect assets, and identify binary artifacts.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const { glob } = require('glob');
|
|
9
|
+
const crypto = require('crypto');
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* @typedef {Object} AssetEntry
|
|
13
|
+
* @property {string} sourcePath - Absolute path to the asset on disk
|
|
14
|
+
* @property {string} assetKey - Logical key in the SEA blob
|
|
15
|
+
* @property {boolean} isBinary - True if this is a binary artifact requiring extraction
|
|
16
|
+
* @property {string} [hash] - SHA-256 hash of the file
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Scan and resolve all assets from configuration.
|
|
21
|
+
* Supports negative glob patterns (prefixed with '!') for exclusions.
|
|
22
|
+
* @param {string[]} assetPatterns - Glob patterns from config (supports '!' prefix for exclusions)
|
|
23
|
+
* @param {string[]} [binaryPatterns] - Patterns identifying binaries to extract
|
|
24
|
+
* @param {string[]} [excludePatterns] - Legacy: Additional patterns to exclude (optional)
|
|
25
|
+
* @param {string} projectRoot - Project root directory
|
|
26
|
+
* @returns {Promise<AssetEntry[]>}
|
|
27
|
+
*/
|
|
28
|
+
async function scanAssets(assetPatterns, binaryPatterns = [], excludePatterns = [], projectRoot = process.cwd()) {
|
|
29
|
+
const assets = [];
|
|
30
|
+
const seenKeys = new Set();
|
|
31
|
+
|
|
32
|
+
// Separate positive and negative patterns
|
|
33
|
+
const includePatterns = [];
|
|
34
|
+
const negativePatterns = [];
|
|
35
|
+
|
|
36
|
+
for (const pattern of assetPatterns) {
|
|
37
|
+
if (pattern.startsWith('!')) {
|
|
38
|
+
// Negative pattern - add to exclusions (remove the '!' prefix)
|
|
39
|
+
negativePatterns.push(pattern.slice(1));
|
|
40
|
+
} else {
|
|
41
|
+
includePatterns.push(pattern);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Combine negative patterns with legacy excludePatterns
|
|
46
|
+
const allExclusions = [...negativePatterns, ...excludePatterns];
|
|
47
|
+
|
|
48
|
+
// Process each include pattern
|
|
49
|
+
for (const pattern of includePatterns) {
|
|
50
|
+
const matches = await glob(pattern, {
|
|
51
|
+
cwd: projectRoot,
|
|
52
|
+
nodir: true,
|
|
53
|
+
absolute: false,
|
|
54
|
+
ignore: allExclusions
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
for (const match of matches) {
|
|
58
|
+
const sourcePath = path.resolve(projectRoot, match);
|
|
59
|
+
const assetKey = normalizeAssetKey(match);
|
|
60
|
+
|
|
61
|
+
// Skip duplicates
|
|
62
|
+
if (seenKeys.has(assetKey)) {
|
|
63
|
+
continue;
|
|
64
|
+
}
|
|
65
|
+
seenKeys.add(assetKey);
|
|
66
|
+
|
|
67
|
+
const isBinary = isBinaryAsset(match, binaryPatterns);
|
|
68
|
+
const hash = isBinary ? await computeHash(sourcePath) : undefined;
|
|
69
|
+
|
|
70
|
+
assets.push({
|
|
71
|
+
sourcePath,
|
|
72
|
+
assetKey,
|
|
73
|
+
isBinary,
|
|
74
|
+
hash
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return assets;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
/**
|
|
83
|
+
* Normalize a file path to a forward-slash asset key.
|
|
84
|
+
* @param {string} filePath
|
|
85
|
+
* @returns {string}
|
|
86
|
+
*/
|
|
87
|
+
function normalizeAssetKey(filePath) {
|
|
88
|
+
return filePath.replace(/\\/g, '/');
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
/**
|
|
92
|
+
* Check if an asset matches binary patterns.
|
|
93
|
+
* @param {string} filePath
|
|
94
|
+
* @param {string[]} binaryPatterns
|
|
95
|
+
* @returns {boolean}
|
|
96
|
+
*/
|
|
97
|
+
function isBinaryAsset(filePath, binaryPatterns) {
|
|
98
|
+
const ext = path.extname(filePath).toLowerCase();
|
|
99
|
+
const binaryExtensions = ['.node', '.dll', '.so', '.dylib'];
|
|
100
|
+
|
|
101
|
+
// Check explicit patterns first
|
|
102
|
+
for (const pattern of binaryPatterns) {
|
|
103
|
+
if (filePath.includes(pattern) || filePath.endsWith(pattern)) {
|
|
104
|
+
return true;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Fall back to extension check
|
|
109
|
+
return binaryExtensions.includes(ext);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Compute SHA-256 hash of a file.
|
|
114
|
+
* @param {string} filePath
|
|
115
|
+
* @returns {Promise<string>}
|
|
116
|
+
*/
|
|
117
|
+
function computeHash(filePath) {
|
|
118
|
+
return new Promise((resolve, reject) => {
|
|
119
|
+
const hash = crypto.createHash('sha256');
|
|
120
|
+
const stream = fs.createReadStream(filePath);
|
|
121
|
+
stream.on('data', chunk => hash.update(chunk));
|
|
122
|
+
stream.on('end', () => resolve(hash.digest('hex')));
|
|
123
|
+
stream.on('error', reject);
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Group assets by binary vs non-binary.
|
|
129
|
+
* @param {AssetEntry[]} assets
|
|
130
|
+
* @returns {{binaries: AssetEntry[], nonBinaries: AssetEntry[]}}
|
|
131
|
+
*/
|
|
132
|
+
function groupAssets(assets) {
|
|
133
|
+
const binaries = [];
|
|
134
|
+
const nonBinaries = [];
|
|
135
|
+
|
|
136
|
+
for (const asset of assets) {
|
|
137
|
+
if (asset.isBinary) {
|
|
138
|
+
binaries.push(asset);
|
|
139
|
+
} else {
|
|
140
|
+
nonBinaries.push(asset);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
return { binaries, nonBinaries };
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
module.exports = {
|
|
148
|
+
scanAssets,
|
|
149
|
+
normalizeAssetKey,
|
|
150
|
+
isBinaryAsset,
|
|
151
|
+
computeHash,
|
|
152
|
+
groupAssets
|
|
153
|
+
};
|