np-audit 0.0.1-beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +212 -0
- package/bin/npa.js +3 -0
- package/package.json +33 -0
- package/src/aware.js +183 -0
- package/src/cli.js +262 -0
- package/src/config.js +71 -0
- package/src/detector.js +235 -0
- package/src/fetcher.js +129 -0
- package/src/lockfile.js +107 -0
- package/src/output.js +92 -0
- package/src/scanner.js +331 -0
- package/src/tarball.js +117 -0
package/src/scanner.js
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('fs');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const { parseLockfile } = require('./lockfile');
|
|
6
|
+
const { fetchTarball, buildTarballUrl, verifyIntegrity } = require('./fetcher');
|
|
7
|
+
const { parseTarGz, extractFile, getPackageJson } = require('./tarball');
|
|
8
|
+
const { detectObfuscation } = require('./detector');
|
|
9
|
+
const output = require('./output');
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Main scan orchestrator.
|
|
13
|
+
* @param {object} opts
|
|
14
|
+
* @param {string} opts.cwd
|
|
15
|
+
* @param {object} opts.config
|
|
16
|
+
* @param {boolean} opts.noDev
|
|
17
|
+
* @param {boolean} opts.verbose
|
|
18
|
+
* @param {string|null} opts.singlePackage name for single-package mode
|
|
19
|
+
* @returns {Promise<ScanResult[]>}
|
|
20
|
+
*/
|
|
21
|
+
async function scan(opts) {
|
|
22
|
+
const { cwd, config, noDev, verbose, singlePackage } = opts;
|
|
23
|
+
|
|
24
|
+
let packages;
|
|
25
|
+
let lockfileVersion = 1;
|
|
26
|
+
if (singlePackage) {
|
|
27
|
+
packages = await resolveSinglePackage(singlePackage, config);
|
|
28
|
+
} else {
|
|
29
|
+
const parsed = parseLockfile(cwd);
|
|
30
|
+
packages = parsed.packages;
|
|
31
|
+
lockfileVersion = parsed.lockfileVersion;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Apply skip filters
|
|
35
|
+
packages = packages.filter(pkg => {
|
|
36
|
+
if (noDev && pkg.dev) return false;
|
|
37
|
+
if (pkg.inBundle || pkg.link) return false;
|
|
38
|
+
if (config.skipPackages && config.skipPackages.includes(pkg.name)) return false;
|
|
39
|
+
if (config.skipScopes) {
|
|
40
|
+
for (const scope of config.skipScopes) {
|
|
41
|
+
if (pkg.name.startsWith(scope + '/') || pkg.name === scope) return false;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
// v2/v3 lockfiles reliably report hasInstallScript — skip definitive negatives
|
|
45
|
+
if (lockfileVersion >= 2 && pkg.hasInstallScript === false) return false;
|
|
46
|
+
return true;
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
if (verbose) output.info(`Scanning ${packages.length} packages...`);
|
|
50
|
+
|
|
51
|
+
// Parallel fetch + scan with concurrency limit
|
|
52
|
+
const results = await mapWithConcurrency(packages, config.parallelFetches, async (pkg) => {
|
|
53
|
+
return scanPackage(pkg, cwd, config, verbose);
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
return results.filter(Boolean);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Scan a single package for obfuscated install scripts.
|
|
61
|
+
* @returns {ScanResult|null} null if no install scripts found
|
|
62
|
+
*/
|
|
63
|
+
async function scanPackage(pkg, cwd, config, verbose) {
|
|
64
|
+
let pkgJson = null;
|
|
65
|
+
let source = 'registry';
|
|
66
|
+
|
|
67
|
+
// Try local node_modules first
|
|
68
|
+
const localPkgJson = tryReadLocalPackageJson(cwd, pkg);
|
|
69
|
+
if (localPkgJson) {
|
|
70
|
+
pkgJson = localPkgJson;
|
|
71
|
+
source = 'local';
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// If v2/v3 lockfile says no install script, skip unless we couldn't confirm locally
|
|
75
|
+
if (source === 'local' && !hasInstallScripts(pkgJson)) {
|
|
76
|
+
return null;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
if (!pkgJson) {
|
|
80
|
+
// v1 lockfile or package not installed — need to fetch
|
|
81
|
+
if (!pkg.resolved && !pkg.version) return null;
|
|
82
|
+
|
|
83
|
+
const tarballUrl = pkg.resolved || buildTarballUrl(pkg.name, pkg.version, config.registry);
|
|
84
|
+
|
|
85
|
+
let tarBuffer;
|
|
86
|
+
try {
|
|
87
|
+
if (verbose) output.info(`Fetching ${pkg.name}@${pkg.version}...`);
|
|
88
|
+
tarBuffer = await fetchTarball(tarballUrl, { timeout: config.timeout });
|
|
89
|
+
} catch (err) {
|
|
90
|
+
output.warn(`Could not fetch ${pkg.name}@${pkg.version}: ${err.message}`);
|
|
91
|
+
return null;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (!verifyIntegrity(tarBuffer, pkg.integrity)) {
|
|
95
|
+
output.warn(`Integrity check failed for ${pkg.name}@${pkg.version} — skipping`);
|
|
96
|
+
return null;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
let files;
|
|
100
|
+
try {
|
|
101
|
+
files = parseTarGz(tarBuffer);
|
|
102
|
+
} catch (err) {
|
|
103
|
+
output.warn(`Could not parse tarball for ${pkg.name}@${pkg.version}: ${err.message}`);
|
|
104
|
+
return null;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
pkgJson = getPackageJson(files);
|
|
108
|
+
if (!pkgJson) return null;
|
|
109
|
+
|
|
110
|
+
if (!hasInstallScripts(pkgJson)) return null;
|
|
111
|
+
|
|
112
|
+
// Analyze script files from tarball
|
|
113
|
+
return analyzeScripts(pkg, pkgJson, files, config);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// Analyze from local node_modules
|
|
117
|
+
return analyzeScriptsLocal(pkg, pkgJson, cwd, config);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Analyze install scripts from a tarball's file map.
|
|
122
|
+
*/
|
|
123
|
+
function analyzeScripts(pkg, pkgJson, files, config) {
|
|
124
|
+
const scripts = getInstallScripts(pkgJson);
|
|
125
|
+
if (scripts.length === 0) return null;
|
|
126
|
+
|
|
127
|
+
const scriptResults = [];
|
|
128
|
+
|
|
129
|
+
for (const { lifecycle, command } of scripts) {
|
|
130
|
+
const scriptFile = extractScriptFileFromCommand(command);
|
|
131
|
+
if (!scriptFile) {
|
|
132
|
+
// Inline shell command — analyze the command string itself
|
|
133
|
+
const result = detectObfuscation(command, config);
|
|
134
|
+
scriptResults.push({ lifecycle, file: '(inline)', code: command, ...result });
|
|
135
|
+
continue;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
const fileBuf = extractFile(files, scriptFile);
|
|
139
|
+
if (!fileBuf) continue;
|
|
140
|
+
|
|
141
|
+
const code = fileBuf.toString('utf8');
|
|
142
|
+
const result = detectObfuscation(code, config);
|
|
143
|
+
scriptResults.push({ lifecycle, file: scriptFile, code, ...result });
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (scriptResults.length === 0) return null;
|
|
147
|
+
|
|
148
|
+
const maxScore = Math.max(...scriptResults.map(r => r.score));
|
|
149
|
+
const allFindings = scriptResults.flatMap(r => r.findings);
|
|
150
|
+
const verdict = verdictFromScore(maxScore, config);
|
|
151
|
+
|
|
152
|
+
return { pkg, scripts: scriptResults, score: maxScore, findings: allFindings, verdict };
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* Analyze install scripts from local node_modules.
|
|
157
|
+
*/
|
|
158
|
+
function analyzeScriptsLocal(pkg, pkgJson, cwd, config) {
|
|
159
|
+
const scripts = getInstallScripts(pkgJson);
|
|
160
|
+
if (scripts.length === 0) return null;
|
|
161
|
+
|
|
162
|
+
const pkgDir = findLocalPackageDir(cwd, pkg.name);
|
|
163
|
+
const scriptResults = [];
|
|
164
|
+
|
|
165
|
+
for (const { lifecycle, command } of scripts) {
|
|
166
|
+
const scriptFile = extractScriptFileFromCommand(command);
|
|
167
|
+
if (!scriptFile) {
|
|
168
|
+
const result = detectObfuscation(command, config);
|
|
169
|
+
scriptResults.push({ lifecycle, file: '(inline)', code: command, ...result });
|
|
170
|
+
continue;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const absolutePath = pkgDir ? path.join(pkgDir, scriptFile) : null;
|
|
174
|
+
if (!absolutePath || !fs.existsSync(absolutePath)) continue;
|
|
175
|
+
|
|
176
|
+
let code;
|
|
177
|
+
try { code = fs.readFileSync(absolutePath, 'utf8'); } catch { continue; }
|
|
178
|
+
|
|
179
|
+
const result = detectObfuscation(code, config);
|
|
180
|
+
scriptResults.push({ lifecycle, file: scriptFile, code, ...result });
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
if (scriptResults.length === 0) return null;
|
|
184
|
+
|
|
185
|
+
const maxScore = Math.max(...scriptResults.map(r => r.score));
|
|
186
|
+
const allFindings = scriptResults.flatMap(r => r.findings);
|
|
187
|
+
const verdict = verdictFromScore(maxScore, config);
|
|
188
|
+
|
|
189
|
+
return { pkg, scripts: scriptResults, score: maxScore, findings: allFindings, verdict };
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
|
193
|
+
|
|
194
|
+
function hasInstallScripts(pkgJson) {
|
|
195
|
+
if (!pkgJson || !pkgJson.scripts) return false;
|
|
196
|
+
return !!(pkgJson.scripts.preinstall || pkgJson.scripts.postinstall || pkgJson.scripts.install);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
function getInstallScripts(pkgJson) {
|
|
200
|
+
const result = [];
|
|
201
|
+
const s = pkgJson && pkgJson.scripts || {};
|
|
202
|
+
for (const lc of ['preinstall', 'install', 'postinstall']) {
|
|
203
|
+
if (s[lc]) result.push({ lifecycle: lc, command: s[lc] });
|
|
204
|
+
}
|
|
205
|
+
return result;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Extract the JS file path from a script command like "node ./install.js" or "node scripts/setup".
|
|
210
|
+
* Returns null if it's a pure shell command.
|
|
211
|
+
*/
|
|
212
|
+
function extractScriptFileFromCommand(command) {
|
|
213
|
+
const m = command.match(/(?:^|\s)node\s+([^\s]+\.(?:js|mjs|cjs))/);
|
|
214
|
+
if (m) return m[1].replace(/^\.\//, '');
|
|
215
|
+
const m2 = command.match(/(?:^|\s)node\s+([^\s]+)(?:\s|$)/);
|
|
216
|
+
if (m2) {
|
|
217
|
+
const f = m2[1].replace(/^\.\//, '');
|
|
218
|
+
if (!f.startsWith('-')) return f + (f.includes('.') ? '' : '.js');
|
|
219
|
+
}
|
|
220
|
+
return null;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
function tryReadLocalPackageJson(cwd, pkg) {
|
|
224
|
+
const dir = findLocalPackageDir(cwd, pkg.name);
|
|
225
|
+
if (!dir) return null;
|
|
226
|
+
try {
|
|
227
|
+
return JSON.parse(fs.readFileSync(path.join(dir, 'package.json'), 'utf8'));
|
|
228
|
+
} catch {
|
|
229
|
+
return null;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
function findLocalPackageDir(cwd, name) {
|
|
234
|
+
const candidate = path.join(cwd, 'node_modules', name);
|
|
235
|
+
if (fs.existsSync(candidate)) return candidate;
|
|
236
|
+
return null;
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
function verdictFromScore(score, config) {
|
|
240
|
+
if (score >= config.blockScore) return 'BLOCK';
|
|
241
|
+
if (score >= config.warnScore) return 'WARN';
|
|
242
|
+
return 'OK';
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* Resolve a single package's dependency tree via the npm registry.
|
|
247
|
+
* @param {string} packageSpec e.g. "express" or "express@4.18.0"
|
|
248
|
+
* @param {object} config
|
|
249
|
+
* @returns {Promise<PackageDescriptor[]>}
|
|
250
|
+
*/
|
|
251
|
+
async function resolveSinglePackage(packageSpec, config) {
|
|
252
|
+
const [name, version] = packageSpec.includes('@') && !packageSpec.startsWith('@')
|
|
253
|
+
? packageSpec.split('@')
|
|
254
|
+
: [packageSpec, 'latest'];
|
|
255
|
+
|
|
256
|
+
const { fetchJSON } = require('./fetcher');
|
|
257
|
+
let meta;
|
|
258
|
+
try {
|
|
259
|
+
meta = await fetchJSON(`${config.registry}/${encodeURIComponent(name)}`, { timeout: config.timeout });
|
|
260
|
+
} catch (err) {
|
|
261
|
+
throw new Error(`Could not fetch registry metadata for "${name}": ${err.message}`);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
const resolvedVersion = version === 'latest'
|
|
265
|
+
? (meta['dist-tags'] && meta['dist-tags'].latest)
|
|
266
|
+
: version;
|
|
267
|
+
|
|
268
|
+
const versionData = meta.versions && meta.versions[resolvedVersion];
|
|
269
|
+
if (!versionData) throw new Error(`Version "${resolvedVersion}" not found for "${name}"`);
|
|
270
|
+
|
|
271
|
+
const packages = [];
|
|
272
|
+
const seen = new Set();
|
|
273
|
+
|
|
274
|
+
function collectDeps(deps) {
|
|
275
|
+
for (const [depName, range] of Object.entries(deps || {})) {
|
|
276
|
+
if (seen.has(depName)) continue;
|
|
277
|
+
seen.add(depName);
|
|
278
|
+
// We don't resolve ranges here — just list direct deps; full tree would need more registry calls
|
|
279
|
+
packages.push({
|
|
280
|
+
name: depName,
|
|
281
|
+
version: range.replace(/^[\^~>=<]/, ''),
|
|
282
|
+
resolved: buildTarballUrl(depName, range.replace(/^[\^~>=<]/, ''), config.registry),
|
|
283
|
+
integrity: '',
|
|
284
|
+
hasInstallScript: false,
|
|
285
|
+
dev: false,
|
|
286
|
+
optional: false,
|
|
287
|
+
inBundle: false,
|
|
288
|
+
link: false,
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// Include the package itself
|
|
294
|
+
packages.unshift({
|
|
295
|
+
name,
|
|
296
|
+
version: resolvedVersion,
|
|
297
|
+
resolved: versionData.dist && versionData.dist.tarball,
|
|
298
|
+
integrity: versionData.dist && versionData.dist.integrity || '',
|
|
299
|
+
hasInstallScript: !!(versionData.scripts &&
|
|
300
|
+
(versionData.scripts.preinstall || versionData.scripts.postinstall || versionData.scripts.install)),
|
|
301
|
+
dev: false,
|
|
302
|
+
optional: false,
|
|
303
|
+
inBundle: false,
|
|
304
|
+
link: false,
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
collectDeps(versionData.dependencies);
|
|
308
|
+
|
|
309
|
+
return packages;
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
/**
|
|
313
|
+
* Async map with concurrency limit.
|
|
314
|
+
*/
|
|
315
|
+
async function mapWithConcurrency(items, limit, fn) {
|
|
316
|
+
const results = new Array(items.length);
|
|
317
|
+
let index = 0;
|
|
318
|
+
|
|
319
|
+
async function worker() {
|
|
320
|
+
while (index < items.length) {
|
|
321
|
+
const i = index++;
|
|
322
|
+
results[i] = await fn(items[i]);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
const workers = Array.from({ length: Math.min(limit, items.length) }, worker);
|
|
327
|
+
await Promise.all(workers);
|
|
328
|
+
return results;
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
module.exports = { scan, hasInstallScripts, extractScriptFileFromCommand, verdictFromScore };
|
package/src/tarball.js
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const zlib = require('zlib');
|
|
4
|
+
|
|
5
|
+
const BLOCK_SIZE = 512;
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Parse a tar.gz buffer and return a Map<normalizedPath, Buffer>.
|
|
9
|
+
* Pure Node.js — no external dependencies.
|
|
10
|
+
* Handles GNU long name (typeflag 'L') and POSIX ustar extended headers (typeflag 'x').
|
|
11
|
+
* @param {Buffer} gzipBuffer
|
|
12
|
+
* @returns {Map<string, Buffer>}
|
|
13
|
+
*/
|
|
14
|
+
function parseTarGz(gzipBuffer) {
|
|
15
|
+
const tar = zlib.gunzipSync(gzipBuffer);
|
|
16
|
+
const files = new Map();
|
|
17
|
+
|
|
18
|
+
let offset = 0;
|
|
19
|
+
let pendingLongName = null;
|
|
20
|
+
|
|
21
|
+
while (offset + BLOCK_SIZE <= tar.length) {
|
|
22
|
+
const header = tar.slice(offset, offset + BLOCK_SIZE);
|
|
23
|
+
|
|
24
|
+
// Two consecutive zero blocks = EOF
|
|
25
|
+
if (isZeroBlock(header)) {
|
|
26
|
+
const next = tar.slice(offset + BLOCK_SIZE, offset + BLOCK_SIZE * 2);
|
|
27
|
+
if (next.length === 0 || isZeroBlock(next)) break;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
offset += BLOCK_SIZE;
|
|
31
|
+
|
|
32
|
+
const typeFlag = String.fromCharCode(header[156]) || '0';
|
|
33
|
+
const rawName = readNullTerminated(header, 0, 100);
|
|
34
|
+
const prefix = readNullTerminated(header, 345, 155);
|
|
35
|
+
const sizeOctal = header.slice(124, 136).toString('ascii').trim();
|
|
36
|
+
const size = parseInt(sizeOctal, 8) || 0;
|
|
37
|
+
const dataBlocks = Math.ceil(size / BLOCK_SIZE);
|
|
38
|
+
const dataEnd = offset + dataBlocks * BLOCK_SIZE;
|
|
39
|
+
|
|
40
|
+
if (typeFlag === 'L') {
|
|
41
|
+
// GNU long filename — data block contains the real name
|
|
42
|
+
pendingLongName = tar.slice(offset, offset + size).toString('utf8').replace(/\0/g, '');
|
|
43
|
+
offset = dataEnd;
|
|
44
|
+
continue;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
if (typeFlag === 'x' || typeFlag === 'g') {
|
|
48
|
+
// POSIX extended header — skip
|
|
49
|
+
pendingLongName = null;
|
|
50
|
+
offset = dataEnd;
|
|
51
|
+
continue;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
let name = pendingLongName || (prefix ? `${prefix}/${rawName}` : rawName);
|
|
55
|
+
pendingLongName = null;
|
|
56
|
+
name = name.replace(/\0/g, '');
|
|
57
|
+
|
|
58
|
+
if ((typeFlag === '0' || typeFlag === '\0') && size > 0) {
|
|
59
|
+
files.set(name, tar.slice(offset, offset + size));
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
offset = dataEnd;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
return files;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Extract a specific file from a parsed tarball map.
|
|
70
|
+
* Tries exact path and then strips one leading path component (e.g. "package/").
|
|
71
|
+
* @param {Map<string, Buffer>} files
|
|
72
|
+
* @param {string} filePath
|
|
73
|
+
* @returns {Buffer|null}
|
|
74
|
+
*/
|
|
75
|
+
function extractFile(files, filePath) {
|
|
76
|
+
if (files.has(filePath)) return files.get(filePath);
|
|
77
|
+
// Try with "package/" prefix (npm tarball convention)
|
|
78
|
+
const prefixed = `package/${filePath}`;
|
|
79
|
+
if (files.has(prefixed)) return files.get(prefixed);
|
|
80
|
+
// Try stripping one leading component
|
|
81
|
+
for (const [key, val] of files) {
|
|
82
|
+
const stripped = key.replace(/^[^/]+\//, '');
|
|
83
|
+
if (stripped === filePath) return val;
|
|
84
|
+
}
|
|
85
|
+
return null;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Get the package.json buffer from a tarball.
|
|
90
|
+
* @param {Map<string, Buffer>} files
|
|
91
|
+
* @returns {object|null} parsed package.json
|
|
92
|
+
*/
|
|
93
|
+
function getPackageJson(files) {
|
|
94
|
+
const buf = extractFile(files, 'package.json');
|
|
95
|
+
if (!buf) return null;
|
|
96
|
+
try {
|
|
97
|
+
return JSON.parse(buf.toString('utf8'));
|
|
98
|
+
} catch {
|
|
99
|
+
return null;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function isZeroBlock(buf) {
|
|
104
|
+
for (let i = 0; i < BLOCK_SIZE; i++) {
|
|
105
|
+
if (buf[i] !== 0) return false;
|
|
106
|
+
}
|
|
107
|
+
return true;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
function readNullTerminated(buf, start, maxLen) {
|
|
111
|
+
const end = Math.min(start + maxLen, buf.length);
|
|
112
|
+
let len = start;
|
|
113
|
+
while (len < end && buf[len] !== 0) len++;
|
|
114
|
+
return buf.slice(start, len).toString('utf8');
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
module.exports = { parseTarGz, extractFile, getPackageJson };
|