jpm-pkg 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +21 -0
- package/README.md +148 -0
- package/bin/jpm.js +252 -0
- package/package.json +52 -0
- package/src/commands/audit.js +56 -0
- package/src/commands/config.js +59 -0
- package/src/commands/info.js +78 -0
- package/src/commands/init.js +88 -0
- package/src/commands/install.js +139 -0
- package/src/commands/list.js +103 -0
- package/src/commands/publish.js +148 -0
- package/src/commands/run.js +72 -0
- package/src/commands/search.js +41 -0
- package/src/commands/uninstall.js +48 -0
- package/src/commands/update.js +63 -0
- package/src/commands/x.js +136 -0
- package/src/core/cache.js +117 -0
- package/src/core/installer.js +316 -0
- package/src/core/lockfile.js +128 -0
- package/src/core/package-json.js +133 -0
- package/src/core/registry.js +166 -0
- package/src/core/resolver.js +248 -0
- package/src/security/audit.js +100 -0
- package/src/security/integrity.js +70 -0
- package/src/utils/config.js +138 -0
- package/src/utils/env.js +31 -0
- package/src/utils/fs.js +154 -0
- package/src/utils/http.js +232 -0
- package/src/utils/logger.js +128 -0
- package/src/utils/lru-cache.js +66 -0
- package/src/utils/progress.js +142 -0
- package/src/utils/semver.js +279 -0
- package/src/utils/system.js +39 -0
- package/src/workspace/workspace.js +126 -0
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const PackageJSON = require('../core/package-json');
|
|
4
|
+
const registry = require('../core/registry');
|
|
5
|
+
const semver = require('../utils/semver');
|
|
6
|
+
const { Spinner } = require('../utils/progress');
|
|
7
|
+
const logger = require('../utils/logger');
|
|
8
|
+
|
|
9
|
+
/** jpm update [pkg] | jpm outdated */
|
|
10
|
+
module.exports = async function update(args, flags, command) {
|
|
11
|
+
const cwd = process.cwd();
|
|
12
|
+
const pkgJson = PackageJSON.fromDir(cwd);
|
|
13
|
+
const allDeps = pkgJson.allDeps();
|
|
14
|
+
|
|
15
|
+
const targets = args.length
|
|
16
|
+
? args.reduce((acc, n) => { if (allDeps[n]) acc[n] = allDeps[n]; return acc; }, {})
|
|
17
|
+
: allDeps;
|
|
18
|
+
|
|
19
|
+
const spinner = new Spinner(`Checking ${Object.keys(targets).length} packages for updates...`).start();
|
|
20
|
+
const rows = [];
|
|
21
|
+
|
|
22
|
+
await Promise.all(
|
|
23
|
+
Object.entries(targets).map(async ([name, range]) => {
|
|
24
|
+
try {
|
|
25
|
+
const versions = await registry.getVersions(name);
|
|
26
|
+
const latest = await registry.getLatest(name);
|
|
27
|
+
const current = semver.maxSatisfying(versions, range);
|
|
28
|
+
const wanted = semver.maxSatisfying(versions, range);
|
|
29
|
+
const isOutdated = latest && current && semver.gt(latest, current);
|
|
30
|
+
|
|
31
|
+
rows.push({
|
|
32
|
+
Package: name,
|
|
33
|
+
Current: current || 'n/a',
|
|
34
|
+
Wanted: wanted || 'n/a',
|
|
35
|
+
Latest: latest || 'n/a',
|
|
36
|
+
Status: isOutdated
|
|
37
|
+
? logger.c.yellow('outdated')
|
|
38
|
+
: logger.c.green('up to date'),
|
|
39
|
+
});
|
|
40
|
+
} catch {
|
|
41
|
+
rows.push({ Package: name, Current: '?', Wanted: '?', Latest: '?', Status: logger.c.red('error') });
|
|
42
|
+
}
|
|
43
|
+
})
|
|
44
|
+
);
|
|
45
|
+
|
|
46
|
+
spinner.succeed('Done');
|
|
47
|
+
|
|
48
|
+
if (command === 'outdated') {
|
|
49
|
+
const outdated = rows.filter(r => r.Status.includes('outdated'));
|
|
50
|
+
if (!outdated.length) { logger.success('All packages are up to date!'); return; }
|
|
51
|
+
logger.table(outdated, ['Package', 'Current', 'Wanted', 'Latest', 'Status']);
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// Actually update
|
|
56
|
+
const outdatedPkgs = rows.filter(r => r.Status.includes('outdated'));
|
|
57
|
+
if (!outdatedPkgs.length) { logger.success('All packages are up to date!'); return; }
|
|
58
|
+
|
|
59
|
+
logger.info(`Updating ${outdatedPkgs.length} package(s)...`);
|
|
60
|
+
const installCmd = require('./install');
|
|
61
|
+
const pkgArgs = outdatedPkgs.map(p => `${p.Package}@${p.Latest}`);
|
|
62
|
+
await installCmd(pkgArgs, flags);
|
|
63
|
+
};
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const path = require('node:path');
|
|
4
|
+
const { spawnSync } = require('node:child_process');
|
|
5
|
+
const Resolver = require('../core/resolver');
|
|
6
|
+
const Installer = require('../core/installer');
|
|
7
|
+
const { tempDir, rimraf } = require('../utils/fs');
|
|
8
|
+
const { Spinner } = require('../utils/progress');
|
|
9
|
+
const config = require('../utils/config');
|
|
10
|
+
const logger = require('../utils/logger');
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* jpm x <package>[@version] [args...]
|
|
14
|
+
*
|
|
15
|
+
* Equivalent to npx. Resolves, installs to a temp dir, and executes.
|
|
16
|
+
*/
|
|
17
|
+
module.exports = async function xCommand(args, flags) {
|
|
18
|
+
if (!args.length) {
|
|
19
|
+
logger.error('No package specified to execute.');
|
|
20
|
+
logger.info('Usage: jpm x <package>[@version] [args...]');
|
|
21
|
+
process.exit(1);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const pkgArg = args[0];
|
|
25
|
+
const execArgs = args.slice(1);
|
|
26
|
+
|
|
27
|
+
const lastAt = pkgArg.lastIndexOf('@');
|
|
28
|
+
// Handle scoped packages correctly: @scope/pkg@version
|
|
29
|
+
const hasVersion = lastAt > 0 && !pkgArg.startsWith('@') || (pkgArg.startsWith('@') && pkgArg.split('@').length > 2);
|
|
30
|
+
|
|
31
|
+
let name, range;
|
|
32
|
+
if (pkgArg.startsWith('@')) {
|
|
33
|
+
const parts = pkgArg.split('@');
|
|
34
|
+
name = '@' + parts[1];
|
|
35
|
+
range = parts[2] || 'latest';
|
|
36
|
+
} else {
|
|
37
|
+
name = hasVersion ? pkgArg.slice(0, lastAt) : pkgArg;
|
|
38
|
+
range = hasVersion ? pkgArg.slice(lastAt + 1) : 'latest';
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const resolveSpinner = new Spinner(`Resolving ${name}@${range}...`).start();
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
// 1. Resolve
|
|
45
|
+
const resolver = new Resolver();
|
|
46
|
+
const resolvedMap = await resolver.resolve({ [name]: range }, {}, {}, (count) => {
|
|
47
|
+
resolveSpinner.text = `Resolving... (${count} resolved)`;
|
|
48
|
+
});
|
|
49
|
+
resolveSpinner.succeed(`Resolved ${resolvedMap.size} packages`);
|
|
50
|
+
|
|
51
|
+
// 2. Install to temp
|
|
52
|
+
const tmp = tempDir('jpm-x-');
|
|
53
|
+
const installer = new Installer(tmp);
|
|
54
|
+
|
|
55
|
+
logger.info(`Installing to temporary directory...`);
|
|
56
|
+
await installer.installAll(resolvedMap, { flags });
|
|
57
|
+
|
|
58
|
+
// 3. Find the binary
|
|
59
|
+
const resolvedPkg = [...resolvedMap.values()].find(m => m.name === name);
|
|
60
|
+
if (!resolvedPkg) throw new Error(`Failed to find resolved metadata for ${name}`);
|
|
61
|
+
|
|
62
|
+
const bins = resolvedPkg.bin;
|
|
63
|
+
let binName;
|
|
64
|
+
|
|
65
|
+
if (typeof bins === 'string') {
|
|
66
|
+
binName = name.split('/').pop();
|
|
67
|
+
} else if (typeof bins === 'object' && bins !== null) {
|
|
68
|
+
const entries = Object.keys(bins);
|
|
69
|
+
if (entries.length === 0) throw new Error(`Package ${name} has no binaries.`);
|
|
70
|
+
binName = entries.find(k => k === name || k === name.split('/').pop()) || entries[0];
|
|
71
|
+
} else {
|
|
72
|
+
throw new Error(`Package ${name} does not define any binaries in package.json`);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
const binPath = path.join(tmp, 'node_modules', '.bin', binName + (process.platform === 'win32' ? '.cmd' : ''));
|
|
76
|
+
|
|
77
|
+
// If the .cmd doesn't exist, try the raw JS file with node
|
|
78
|
+
let finalBin = binPath;
|
|
79
|
+
let finalArgs = execArgs;
|
|
80
|
+
let useNode = false;
|
|
81
|
+
|
|
82
|
+
const fs = require('node:fs');
|
|
83
|
+
if (!fs.existsSync(binPath)) {
|
|
84
|
+
// Fallback to finding the JS file
|
|
85
|
+
const relPath = typeof bins === 'string' ? bins : bins[binName];
|
|
86
|
+
finalBin = path.join(tmp, 'node_modules', name, relPath);
|
|
87
|
+
useNode = true;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
logger.info(`Executing ${binName}...\n`);
|
|
91
|
+
|
|
92
|
+
const spawnCmd = useNode ? process.execPath : finalBin;
|
|
93
|
+
const spawnArgs = useNode ? [finalBin, ...execArgs] : execArgs;
|
|
94
|
+
|
|
95
|
+
const env = {
|
|
96
|
+
...process.env,
|
|
97
|
+
PATH: `${path.join(tmp, 'node_modules', '.bin')}${process.platform === 'win32' ? ';' : ':'}${process.env.PATH}`,
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
// On Windows, when shell: true is used, we need to be very careful with quoting
|
|
101
|
+
// if the path to node or the binary contains spaces (like C:\Program Files\...)
|
|
102
|
+
const isWin = process.platform === 'win32';
|
|
103
|
+
|
|
104
|
+
let result;
|
|
105
|
+
if (isWin && useNode) {
|
|
106
|
+
// Special handling for Windows + Node to avoid quoting hell with shell: true
|
|
107
|
+
result = spawnSync(`"${process.execPath}"`, [`"${finalBin}"`, ...execArgs.map(a => `"${a}"`)], {
|
|
108
|
+
cwd: process.cwd(),
|
|
109
|
+
stdio: 'inherit',
|
|
110
|
+
env,
|
|
111
|
+
shell: true,
|
|
112
|
+
windowsVerbatimArguments: true
|
|
113
|
+
});
|
|
114
|
+
} else {
|
|
115
|
+
result = spawnSync(useNode ? process.execPath : finalBin, useNode ? [finalBin, ...execArgs] : execArgs, {
|
|
116
|
+
cwd: process.cwd(),
|
|
117
|
+
stdio: 'inherit',
|
|
118
|
+
env,
|
|
119
|
+
shell: true
|
|
120
|
+
});
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (result.error) throw result.error;
|
|
124
|
+
|
|
125
|
+
// Cleanup on success if not in debug
|
|
126
|
+
if (result.status === 0 && config.loglevel !== 'debug') {
|
|
127
|
+
rimraf(tmp);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
process.exit(result.status ?? 0);
|
|
131
|
+
|
|
132
|
+
} catch (err) {
|
|
133
|
+
resolveSpinner.fail(`Error: ${err.message}`);
|
|
134
|
+
process.exit(1);
|
|
135
|
+
}
|
|
136
|
+
};
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('node:fs');
|
|
4
|
+
const path = require('node:path');
|
|
5
|
+
const crypto = require('node:crypto');
|
|
6
|
+
const config = require('../utils/config');
|
|
7
|
+
const { mkdirp, rimraf } = require('../utils/fs');
|
|
8
|
+
const logger = require('../utils/logger');
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Disk cache at ~/.jpm/cache/<name>/<version>.tgz
|
|
12
|
+
* Also stores metadata JSON alongside: <name>/<version>.json
|
|
13
|
+
*/
|
|
14
|
+
|
|
15
|
+
function cacheRoot() {
|
|
16
|
+
return config.cacheDir;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function tgzPath(name, version) {
|
|
20
|
+
const safeName = name.replace('/', '__SCOPE__');
|
|
21
|
+
return path.join(cacheRoot(), safeName, `${version}.tgz`);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function metaPath(name, version) {
|
|
25
|
+
const safeName = name.replace('/', '__SCOPE__');
|
|
26
|
+
return path.join(cacheRoot(), safeName, `${version}.json`);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async function get(name, version) {
|
|
30
|
+
const p = tgzPath(name, version);
|
|
31
|
+
if (fs.existsSync(p)) {
|
|
32
|
+
logger.verbose(`cache hit ${name}@${version}`);
|
|
33
|
+
return p;
|
|
34
|
+
}
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
async function set(name, version, srcTgz) {
|
|
39
|
+
const dest = tgzPath(name, version);
|
|
40
|
+
mkdirp(path.dirname(dest));
|
|
41
|
+
fs.copyFileSync(srcTgz, dest);
|
|
42
|
+
logger.verbose(`cache store ${name}@${version}`);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
async function setMeta(name, version, meta) {
|
|
46
|
+
const p = metaPath(name, version);
|
|
47
|
+
mkdirp(path.dirname(p));
|
|
48
|
+
fs.writeFileSync(p, JSON.stringify(meta, null, 2), 'utf8');
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
async function getMeta(name, version) {
|
|
52
|
+
const p = metaPath(name, version);
|
|
53
|
+
try { return JSON.parse(fs.readFileSync(p, 'utf8')); }
|
|
54
|
+
catch { return null; }
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function has(name, version) {
|
|
58
|
+
return fs.existsSync(tgzPath(name, version));
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
function clear(name, version) {
|
|
62
|
+
if (name && version) {
|
|
63
|
+
rimraf(tgzPath(name, version));
|
|
64
|
+
rimraf(metaPath(name, version));
|
|
65
|
+
} else if (name) {
|
|
66
|
+
const safeName = name.replace('/', '__SCOPE__');
|
|
67
|
+
rimraf(path.join(cacheRoot(), safeName));
|
|
68
|
+
} else {
|
|
69
|
+
// Clear entire cache
|
|
70
|
+
rimraf(cacheRoot());
|
|
71
|
+
logger.success('Cache cleared');
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
function stats() {
|
|
76
|
+
const root = cacheRoot();
|
|
77
|
+
if (!fs.existsSync(root)) return { packages: 0, size: 0 };
|
|
78
|
+
|
|
79
|
+
let packages = 0;
|
|
80
|
+
let size = 0;
|
|
81
|
+
|
|
82
|
+
function walk(dir) {
|
|
83
|
+
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
84
|
+
const full = path.join(dir, entry.name);
|
|
85
|
+
if (entry.isDirectory()) {
|
|
86
|
+
walk(full);
|
|
87
|
+
} else {
|
|
88
|
+
const s = fs.statSync(full);
|
|
89
|
+
size += s.size;
|
|
90
|
+
if (entry.name.endsWith('.tgz')) packages++;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
try { walk(root); } catch { }
|
|
96
|
+
return { packages, size, root };
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function list() {
|
|
100
|
+
const root = cacheRoot();
|
|
101
|
+
if (!fs.existsSync(root)) return [];
|
|
102
|
+
const result = [];
|
|
103
|
+
for (const scopeOrName of fs.readdirSync(root)) {
|
|
104
|
+
const dir = path.join(root, scopeOrName);
|
|
105
|
+
if (!fs.statSync(dir).isDirectory()) continue;
|
|
106
|
+
for (const file of fs.readdirSync(dir)) {
|
|
107
|
+
if (file.endsWith('.tgz')) {
|
|
108
|
+
const version = file.replace('.tgz', '');
|
|
109
|
+
const name = scopeOrName.replace('__SCOPE__', '/');
|
|
110
|
+
result.push({ name, version });
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
return result;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
module.exports = { get, set, getMeta, setMeta, has, clear, stats, list };
|
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const fs = require('node:fs');
|
|
4
|
+
const path = require('node:path');
|
|
5
|
+
const crypto = require('node:crypto');
|
|
6
|
+
const tar = require('tar');
|
|
7
|
+
const registry = require('./registry');
|
|
8
|
+
const cache = require('./cache');
|
|
9
|
+
const { mkdirp, rimraf, tempDir, symlink } = require('../utils/fs');
|
|
10
|
+
const { MultiBar, Spinner } = require('../utils/progress');
|
|
11
|
+
const logger = require('../utils/logger');
|
|
12
|
+
|
|
13
|
+
const SUSPICIOUS_PATTERNS = [
|
|
14
|
+
/curl\s+.*?http/i,
|
|
15
|
+
/wget\s+.*?http/i,
|
|
16
|
+
/rm\s+-rf\s+\//,
|
|
17
|
+
/sh\s+-c\s+.*?http/i,
|
|
18
|
+
/python.*?import\s+socket/i,
|
|
19
|
+
];
|
|
20
|
+
|
|
21
|
+
const integrity = require('../security/integrity');
|
|
22
|
+
|
|
23
|
+
const CONCURRENCY = 6;
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Handles the actual filesystem installation of resolved packages.
|
|
27
|
+
*/
|
|
28
|
+
class Installer {
|
|
29
|
+
/**
|
|
30
|
+
* Creates an instance of the Installer.
|
|
31
|
+
*
|
|
32
|
+
* @param {string} projectRoot - The absolute path to the project root directory
|
|
33
|
+
*/
|
|
34
|
+
constructor(projectRoot) {
|
|
35
|
+
this.projectRoot = projectRoot;
|
|
36
|
+
this.nodeModules = path.join(projectRoot, 'node_modules');
|
|
37
|
+
this._multibar = new MultiBar();
|
|
38
|
+
this._flags = {};
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Installs all packages specified in a resolved dependency map.
|
|
43
|
+
*
|
|
44
|
+
* @param {Map<string, object>} resolvedMap - The output of Resolver.resolve()
|
|
45
|
+
* @param {object} [options={}] - Installation options
|
|
46
|
+
* @param {boolean} [options.dev=false] - Whether to include devDependencies
|
|
47
|
+
* @param {boolean} [options.production=false] - Whether to skip devDependencies
|
|
48
|
+
* @param {boolean} [options.dryRun=false] - If true, only print what would be installed
|
|
49
|
+
* @param {object} [options.flags={}] - CLI flags
|
|
50
|
+
* @returns {Promise<void>}
|
|
51
|
+
*/
|
|
52
|
+
async installAll(resolvedMap, options = {}) {
|
|
53
|
+
this._flags = options.flags || {};
|
|
54
|
+
if (options.dryRun) {
|
|
55
|
+
logger.info('[dry-run] Would install:');
|
|
56
|
+
for (const [key] of resolvedMap) logger.log(` + ${key}`);
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
mkdirp(this.nodeModules);
|
|
61
|
+
|
|
62
|
+
const packages = [...resolvedMap.values()];
|
|
63
|
+
this._installedInRun = []; // Track for rollback
|
|
64
|
+
|
|
65
|
+
try {
|
|
66
|
+
if (this._flags.fast) {
|
|
67
|
+
logger.warn('BLIND INSTALL: Skipping all extraction and integrity checks.');
|
|
68
|
+
await this._linkBins(packages);
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
await this._installBatch(packages);
|
|
73
|
+
await this._linkBins(packages);
|
|
74
|
+
} catch (err) {
|
|
75
|
+
logger.error('Installation failed. Rolling back partial changes...');
|
|
76
|
+
for (const name of this._installedInRun) {
|
|
77
|
+
await this.uninstall(name);
|
|
78
|
+
}
|
|
79
|
+
throw err;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
this._multibar.stop();
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Installs packages in parallel batches to optimize network and disk I/O.
|
|
87
|
+
*
|
|
88
|
+
* @param {Object[]} packages - Array of package metadata objects to install
|
|
89
|
+
* @protected
|
|
90
|
+
*/
|
|
91
|
+
async _installBatch(packages) {
|
|
92
|
+
const queue = [...packages];
|
|
93
|
+
const workers = Array(Math.min(CONCURRENCY, queue.length)).fill(null).map(async () => {
|
|
94
|
+
while (queue.length > 0) {
|
|
95
|
+
const p = queue.shift();
|
|
96
|
+
if (p) await this._installOne(p);
|
|
97
|
+
}
|
|
98
|
+
});
|
|
99
|
+
await Promise.all(workers);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Installs a single package into the node_modules directory.
|
|
104
|
+
* Handles caching, downloading, integrity verification, and extraction.
|
|
105
|
+
*
|
|
106
|
+
* @param {Object} meta - The resolved metadata for the package
|
|
107
|
+
* @protected
|
|
108
|
+
*/
|
|
109
|
+
async _installOne(meta) {
|
|
110
|
+
const { name, version, resolved, integrity: integrityHash, shasum } = meta;
|
|
111
|
+
const installName = meta.alias || name;
|
|
112
|
+
const destDir = this._destDir(installName);
|
|
113
|
+
|
|
114
|
+
// Check if already installed (Incremental Install)
|
|
115
|
+
if (this._isInstalled(installName, version)) {
|
|
116
|
+
logger.verbose(`skip ${installName}@${version} (already installed)`);
|
|
117
|
+
return;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Check local cache for the tarball
|
|
121
|
+
const cached = await cache.get(name, version);
|
|
122
|
+
if (cached) {
|
|
123
|
+
logger.verbose(`cache hit ${installName}@${version}`);
|
|
124
|
+
await this._extract(cached, destDir, installName, version);
|
|
125
|
+
return;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
if (!resolved) {
|
|
129
|
+
logger.warn(`No tarball URL for ${installName}@${version}, skipping.`);
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Download to a temporary location
|
|
134
|
+
const tmp = tempDir('jpm-dl-');
|
|
135
|
+
const tgz = path.join(tmp, `${installName.replace('/', '-')}-${version}.tgz`);
|
|
136
|
+
const dest = fs.createWriteStream(tgz);
|
|
137
|
+
|
|
138
|
+
const barKey = `${installName}@${version}`;
|
|
139
|
+
this._multibar.add(barKey, 100);
|
|
140
|
+
|
|
141
|
+
try {
|
|
142
|
+
await registry.downloadTarball(resolved, dest, (received, total) => {
|
|
143
|
+
if (total) this._multibar.update(barKey, Math.round((received / total) * 100));
|
|
144
|
+
});
|
|
145
|
+
this._multibar.update(barKey, 100);
|
|
146
|
+
this._multibar.remove(barKey);
|
|
147
|
+
} catch (err) {
|
|
148
|
+
this._multibar.remove(barKey);
|
|
149
|
+
rimraf(tmp);
|
|
150
|
+
throw new Error(`Failed to download ${installName}@${version}: ${err.message}`);
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Verify cryptographic integrity
|
|
154
|
+
const ok = await integrity.verify(tgz, integrityHash, shasum);
|
|
155
|
+
if (!ok) {
|
|
156
|
+
rimraf(tmp);
|
|
157
|
+
throw new Error(`Integrity check failed for ${installName}@${version}`);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Update local cache
|
|
161
|
+
await cache.set(name, version, tgz);
|
|
162
|
+
|
|
163
|
+
// Extract contents to node_modules
|
|
164
|
+
this._checkScripts(installName, version, meta);
|
|
165
|
+
await this._extract(tgz, destDir, installName, version);
|
|
166
|
+
rimraf(tmp);
|
|
167
|
+
|
|
168
|
+
this._installedInRun.push(installName);
|
|
169
|
+
logger.verbose(`installed ${installName}@${version}`);
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Extracts a tarball to the destination directory.
|
|
174
|
+
*
|
|
175
|
+
* @param {string} tgzPath - Path to the tarball file
|
|
176
|
+
* @param {string} destDir - Target directory for extraction
|
|
177
|
+
* @param {string} name - Package name for logging
|
|
178
|
+
* @param {string} version - Package version for logging
|
|
179
|
+
* @protected
|
|
180
|
+
*/
|
|
181
|
+
async _extract(tgzPath, destDir, name, version) {
|
|
182
|
+
rimraf(destDir);
|
|
183
|
+
mkdirp(destDir);
|
|
184
|
+
|
|
185
|
+
const absoluteDest = path.resolve(destDir);
|
|
186
|
+
|
|
187
|
+
await tar.extract({
|
|
188
|
+
file: tgzPath,
|
|
189
|
+
cwd: destDir,
|
|
190
|
+
strip: 1,
|
|
191
|
+
filter: (p, stat) => {
|
|
192
|
+
const fullPath = path.resolve(destDir, p);
|
|
193
|
+
if (!fullPath.startsWith(absoluteDest)) {
|
|
194
|
+
logger.error(`Zip Slip security violation blocked: ${p} in ${name}@${version}`);
|
|
195
|
+
return false;
|
|
196
|
+
}
|
|
197
|
+
return true;
|
|
198
|
+
}
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
/**
|
|
203
|
+
* Resolves the absolute path for a package in node_modules, handling scopes.
|
|
204
|
+
*
|
|
205
|
+
* @param {string} name - The package name
|
|
206
|
+
* @returns {string} Absolute path to the package directory
|
|
207
|
+
* @protected
|
|
208
|
+
*/
|
|
209
|
+
_destDir(name) {
|
|
210
|
+
if (name.startsWith('@')) {
|
|
211
|
+
const [scope, pkg] = name.split('/');
|
|
212
|
+
return path.join(this.nodeModules, scope, pkg);
|
|
213
|
+
}
|
|
214
|
+
return path.join(this.nodeModules, name);
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* Scans package scripts for known malicious or suspicious execution patterns.
|
|
219
|
+
*
|
|
220
|
+
* @param {string} name - Package name
|
|
221
|
+
* @param {string} version - Package version
|
|
222
|
+
* @param {Object} meta - Package metadata containing scripts
|
|
223
|
+
* @returns {boolean} True if suspicious patterns were detected
|
|
224
|
+
* @protected
|
|
225
|
+
*/
|
|
226
|
+
_checkScripts(name, version, meta) {
|
|
227
|
+
const scripts = meta.scripts || {};
|
|
228
|
+
for (const [id, cmd] of Object.entries(scripts)) {
|
|
229
|
+
if (['preinstall', 'postinstall', 'install'].includes(id)) {
|
|
230
|
+
for (const pattern of SUSPICIOUS_PATTERNS) {
|
|
231
|
+
if (pattern.test(cmd)) {
|
|
232
|
+
logger.warn(`SUSPICIOUS SCRIPT detected in ${name}@${version}: "${id}": "${cmd}"`);
|
|
233
|
+
logger.warn('Exercise caution when installing this package.');
|
|
234
|
+
return true;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
return false;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
/**
|
|
243
|
+
* Verifies if a specific version of a package is already present in node_modules.
|
|
244
|
+
*
|
|
245
|
+
* @param {string} name - Package name
|
|
246
|
+
* @param {string} version - Package version to verify
|
|
247
|
+
* @returns {boolean} True if the exact version is installed
|
|
248
|
+
* @protected
|
|
249
|
+
*/
|
|
250
|
+
_isInstalled(name, version) {
|
|
251
|
+
const pkgJson = path.join(this._destDir(name), 'package.json');
|
|
252
|
+
if (!fs.existsSync(pkgJson)) return false;
|
|
253
|
+
try {
|
|
254
|
+
const pkg = JSON.parse(fs.readFileSync(pkgJson, 'utf8'));
|
|
255
|
+
return pkg.version === version;
|
|
256
|
+
} catch { return false; }
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
/**
|
|
260
|
+
* Creates symbolic links for binary executables defined in package metadata.
|
|
261
|
+
*
|
|
262
|
+
* @param {Object[]} packages - Array of resolved package metadata
|
|
263
|
+
* @protected
|
|
264
|
+
*/
|
|
265
|
+
async _linkBins(packages) {
|
|
266
|
+
const binDir = path.join(this.nodeModules, '.bin');
|
|
267
|
+
mkdirp(binDir);
|
|
268
|
+
|
|
269
|
+
for (const meta of packages) {
|
|
270
|
+
const { name, bin } = meta;
|
|
271
|
+
const installName = meta.alias || name;
|
|
272
|
+
if (!bin || typeof bin !== 'object') continue;
|
|
273
|
+
for (const [binName, binPath] of Object.entries(bin)) {
|
|
274
|
+
const src = path.join(this._destDir(installName), binPath);
|
|
275
|
+
const dest = path.join(binDir, binName);
|
|
276
|
+
|
|
277
|
+
// Security: Ensure binary source is within the package directory
|
|
278
|
+
const resolvedSrc = path.resolve(src);
|
|
279
|
+
const packageDir = path.resolve(this._destDir(installName));
|
|
280
|
+
if (!resolvedSrc.startsWith(packageDir)) {
|
|
281
|
+
logger.warn(`Insecure binary path blocked for ${installName}: ${binPath}`);
|
|
282
|
+
continue;
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
try {
|
|
286
|
+
symlink(src, dest);
|
|
287
|
+
fs.chmodSync(src, 0o755);
|
|
288
|
+
} catch { }
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
/**
|
|
294
|
+
* Remove a single package from node_modules
|
|
295
|
+
*/
|
|
296
|
+
async uninstall(name) {
|
|
297
|
+
const destDir = this._destDir(name);
|
|
298
|
+
if (!fs.existsSync(destDir)) return false;
|
|
299
|
+
rimraf(destDir);
|
|
300
|
+
|
|
301
|
+
// Remove bin links
|
|
302
|
+
const binDir = path.join(this.nodeModules, '.bin');
|
|
303
|
+
if (fs.existsSync(binDir)) {
|
|
304
|
+
for (const entry of fs.readdirSync(binDir)) {
|
|
305
|
+
const linkPath = path.join(binDir, entry);
|
|
306
|
+
try {
|
|
307
|
+
const target = fs.readlinkSync(linkPath);
|
|
308
|
+
if (target.includes(path.sep + name + path.sep)) fs.unlinkSync(linkPath);
|
|
309
|
+
} catch { }
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
return true;
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
module.exports = Installer;
|