@gjsify/cli 0.3.7 → 0.3.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/actions/build.js +28 -4
- package/lib/commands/dlx.d.ts +2 -0
- package/lib/commands/dlx.js +18 -1
- package/lib/config.js +57 -10
- package/lib/utils/install-backend-native.d.ts +2 -0
- package/lib/utils/install-backend-native.js +299 -0
- package/lib/utils/install-backend.d.ts +8 -0
- package/lib/utils/install-backend.js +14 -11
- package/package.json +9 -6
- package/src/actions/build.ts +27 -1
- package/src/commands/dlx.ts +23 -1
- package/src/config.ts +59 -12
- package/src/utils/install-backend-native.ts +363 -0
- package/src/utils/install-backend.ts +22 -13
package/lib/actions/build.js
CHANGED
|
@@ -6,6 +6,20 @@ import { getPnpPlugin } from "@gjsify/resolve-npm/pnp-relay";
|
|
|
6
6
|
import { dirname, extname } from "node:path";
|
|
7
7
|
import { chmod, readFile, writeFile } from "node:fs/promises";
|
|
8
8
|
const GJS_SHEBANG = "#!/usr/bin/env -S gjs -m\n";
|
|
9
|
+
/**
|
|
10
|
+
* `true` when `path` points at a location that's unsafe to use as a build
|
|
11
|
+
* outfile (would overwrite source). Currently catches:
|
|
12
|
+
* - any TypeScript extension (`.ts`, `.tsx`, `.mts`, `.cts`, `.mtsx`, `.ctsx`)
|
|
13
|
+
* - paths that live under a `src/` segment (relative or absolute)
|
|
14
|
+
*/
|
|
15
|
+
function isUnsafeDefaultOutput(path) {
|
|
16
|
+
if (/\.[cm]?tsx?$/i.test(path))
|
|
17
|
+
return true;
|
|
18
|
+
const norm = path.replace(/\\/g, "/");
|
|
19
|
+
if (/(?:^|\/)src\//.test(norm))
|
|
20
|
+
return true;
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
9
23
|
/**
|
|
10
24
|
* Resolve the gjsify-flavoured PnP plugin. Anchors the relay on this file's
|
|
11
25
|
* URL so transitive `@gjsify/*` polyfills (reached via @gjsify/cli's deps on
|
|
@@ -190,10 +204,20 @@ export class BuildAction {
|
|
|
190
204
|
!esbuild?.outfile &&
|
|
191
205
|
!esbuild?.outdir &&
|
|
192
206
|
(pgk?.main || pgk?.module)) {
|
|
193
|
-
esbuild
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
207
|
+
const candidate = esbuild?.format === "cjs"
|
|
208
|
+
? pgk.main || pgk.module
|
|
209
|
+
: pgk.module || pgk.main;
|
|
210
|
+
if (candidate && isUnsafeDefaultOutput(candidate)) {
|
|
211
|
+
// `package.json#main`/`module` commonly points at a TypeScript
|
|
212
|
+
// source (e.g. `src/index.ts` for TS-direct workflows). Falling
|
|
213
|
+
// back to that value would have esbuild OVERWRITE the source.
|
|
214
|
+
// Surface a clear error and require an explicit outfile/outdir
|
|
215
|
+
// instead of silently destroying the user's code.
|
|
216
|
+
throw new Error(`gjsify build: refusing to default --outfile to ${candidate} ` +
|
|
217
|
+
`(would overwrite a TypeScript source file). Pass --outfile/--outdir ` +
|
|
218
|
+
`explicitly, or set "gjsify.esbuild.outfile" in package.json.`);
|
|
219
|
+
}
|
|
220
|
+
esbuild.outfile = candidate;
|
|
197
221
|
}
|
|
198
222
|
const { consoleShim, globals } = this.configData;
|
|
199
223
|
const pluginOpts = {
|
package/lib/commands/dlx.d.ts
CHANGED
package/lib/commands/dlx.js
CHANGED
|
@@ -37,6 +37,16 @@ export const dlxCommand = {
|
|
|
37
37
|
description: 'Cache TTL in minutes. Defaults to 7 days. Use 0 to bypass cache.',
|
|
38
38
|
type: 'number',
|
|
39
39
|
default: 60 * 24 * 7,
|
|
40
|
+
})
|
|
41
|
+
.option('reinstall', {
|
|
42
|
+
description: 'Bypass the cache for this run (alias for --cache-max-age=0).',
|
|
43
|
+
type: 'boolean',
|
|
44
|
+
default: false,
|
|
45
|
+
})
|
|
46
|
+
.option('frozen', {
|
|
47
|
+
description: 'Use the project-local gjsify-lock.json verbatim — fail if missing or stale (no resolver pass).',
|
|
48
|
+
type: 'boolean',
|
|
49
|
+
default: false,
|
|
40
50
|
})
|
|
41
51
|
.option('verbose', {
|
|
42
52
|
description: 'Verbose logging (passes --loglevel verbose to npm).',
|
|
@@ -49,10 +59,12 @@ export const dlxCommand = {
|
|
|
49
59
|
}),
|
|
50
60
|
handler: async (args) => {
|
|
51
61
|
const parsed = parseSpec(args.spec);
|
|
62
|
+
const cacheMaxAge = args.reinstall ? 0 : args['cache-max-age'];
|
|
52
63
|
const { pkgDir, cachedPkgName } = await ensurePkgDir(parsed, {
|
|
53
64
|
verbose: args.verbose,
|
|
54
65
|
registry: args.registry,
|
|
55
|
-
cacheMaxAge
|
|
66
|
+
cacheMaxAge,
|
|
67
|
+
frozen: args.frozen,
|
|
56
68
|
});
|
|
57
69
|
// Bin / args disambiguation:
|
|
58
70
|
// gjsify dlx <pkg> → no bin, no args
|
|
@@ -86,6 +98,11 @@ async function ensurePkgDir(parsed, opts) {
|
|
|
86
98
|
specs: [parsed.spec],
|
|
87
99
|
verbose: opts.verbose,
|
|
88
100
|
registry: opts.registry,
|
|
101
|
+
// Cache-prepare dirs are scoped per cache key, so writing a lockfile
|
|
102
|
+
// there gives us reproducibility for repeated `gjsify dlx <pkg>` calls
|
|
103
|
+
// and lets `--frozen` short-circuit the resolver entirely.
|
|
104
|
+
lockfile: true,
|
|
105
|
+
frozen: opts.frozen,
|
|
89
106
|
});
|
|
90
107
|
const liveTarget = symlinkSwap(cacheDir, prepareDir);
|
|
91
108
|
return {
|
package/lib/config.js
CHANGED
|
@@ -1,5 +1,23 @@
|
|
|
1
1
|
import { APP_NAME } from './constants.js';
|
|
2
2
|
import { cosmiconfig } from 'cosmiconfig';
|
|
3
|
+
/** Default cosmiconfig search places for a given module name (matches cosmiconfig defaults). */
|
|
4
|
+
function defaultSearchPlaces(name) {
|
|
5
|
+
return [
|
|
6
|
+
'package.json',
|
|
7
|
+
`.${name}rc`,
|
|
8
|
+
`.${name}rc.json`,
|
|
9
|
+
`.${name}rc.yaml`,
|
|
10
|
+
`.${name}rc.yml`,
|
|
11
|
+
`.${name}rc.js`,
|
|
12
|
+
`.${name}rc.ts`,
|
|
13
|
+
`.${name}rc.mjs`,
|
|
14
|
+
`.${name}rc.cjs`,
|
|
15
|
+
`${name}.config.js`,
|
|
16
|
+
`${name}.config.ts`,
|
|
17
|
+
`${name}.config.mjs`,
|
|
18
|
+
`${name}.config.cjs`,
|
|
19
|
+
];
|
|
20
|
+
}
|
|
3
21
|
import { readPackageJSON, resolvePackageJSON } from 'pkg-types';
|
|
4
22
|
import { getTsconfig } from 'get-tsconfig';
|
|
5
23
|
/** Deep merge objects (replaces lodash.merge) */
|
|
@@ -34,17 +52,46 @@ export class Config {
|
|
|
34
52
|
}
|
|
35
53
|
/** Loads gjsify config file, e.g `.gjsifyrc.js` */
|
|
36
54
|
async load(searchFrom) {
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
55
|
+
// cosmiconfig's default first-match-wins behaviour silently drops one
|
|
56
|
+
// source when both `package.json#gjsify` and an explicit config file
|
|
57
|
+
// (`.gjsifyrc.js`, `gjsify.config.mjs`, ...) are present. Project hits
|
|
58
|
+
// this footgun: adding `gjsify.bin` to package.json (so `gjsify dlx`
|
|
59
|
+
// resolves the GJS bundle) silently disables `.gjsifyrc.js`. We
|
|
60
|
+
// explicitly load both sources and merge — package.json is the lower
|
|
61
|
+
// layer, the explicit file wins on key collisions.
|
|
62
|
+
//
|
|
63
|
+
// Run two searches:
|
|
64
|
+
// 1. Default (includes package.json) — for projects that only use
|
|
65
|
+
// package.json#gjsify and no separate file.
|
|
66
|
+
// 2. Explicit-file only (package.json excluded) — to find the
|
|
67
|
+
// `.gjsifyrc.*` / `gjsify.config.*` regardless of whether
|
|
68
|
+
// package.json#gjsify exists.
|
|
69
|
+
const fileExplorer = cosmiconfig(APP_NAME, {
|
|
70
|
+
...this.loadOptions,
|
|
71
|
+
searchPlaces: (this.loadOptions.searchPlaces ?? defaultSearchPlaces(APP_NAME))
|
|
72
|
+
.filter((p) => p !== 'package.json'),
|
|
73
|
+
});
|
|
74
|
+
const fileResult = await fileExplorer.search(searchFrom);
|
|
75
|
+
const merged = {};
|
|
76
|
+
try {
|
|
77
|
+
const pkg = await this.readPackageJSON(searchFrom);
|
|
78
|
+
if (isPlainObject(pkg?.gjsify))
|
|
79
|
+
merge(merged, pkg.gjsify);
|
|
80
|
+
}
|
|
81
|
+
catch {
|
|
82
|
+
// Missing or unreadable package.json — skip.
|
|
83
|
+
}
|
|
84
|
+
if (fileResult?.config && isPlainObject(fileResult.config)) {
|
|
85
|
+
merge(merged, fileResult.config);
|
|
86
|
+
}
|
|
87
|
+
merged.esbuild ||= {};
|
|
88
|
+
merged.library ||= {};
|
|
89
|
+
merged.typescript ||= {};
|
|
90
|
+
return {
|
|
91
|
+
config: merged,
|
|
92
|
+
filepath: fileResult?.filepath ?? '',
|
|
93
|
+
isEmpty: !fileResult && Object.keys(merged).length === 3, // only the three default-empty objects
|
|
42
94
|
};
|
|
43
|
-
configFile.config ||= {};
|
|
44
|
-
configFile.config.esbuild ||= {};
|
|
45
|
-
configFile.config.library ||= {};
|
|
46
|
-
configFile.config.typescript ||= {};
|
|
47
|
-
return configFile;
|
|
48
95
|
}
|
|
49
96
|
/** Loads package.json of the current project */
|
|
50
97
|
async readPackageJSON(dirPath) {
|
|
@@ -0,0 +1,299 @@
|
|
|
1
|
+
// Native install backend — GJS-runnable replacement for `npm install`.
|
|
2
|
+
//
|
|
3
|
+
// Pipeline: parse specs → resolve deps via @gjsify/npm-registry packuments and
|
|
4
|
+
// @gjsify/semver → download tarballs in parallel → extract into a flat
|
|
5
|
+
// node_modules/ via @gjsify/tar. Output layout matches `npm install` so the
|
|
6
|
+
// existing `runGjsBundle()` prebuild detection works without branching.
|
|
7
|
+
//
|
|
8
|
+
// Out of scope (deferred to Phase 4): lockfile, peerDependencies validation,
|
|
9
|
+
// lifecycle scripts, git/file specs.
|
|
10
|
+
import * as fs from "node:fs";
|
|
11
|
+
import * as path from "node:path";
|
|
12
|
+
import * as os from "node:os";
|
|
13
|
+
import { Range, SemVer, maxSatisfying, } from "@gjsify/semver";
|
|
14
|
+
import { DEFAULT_REGISTRY, fetchPackument, fetchTarball, parseNpmrc, } from "@gjsify/npm-registry";
|
|
15
|
+
import { extractTarball } from "@gjsify/tar";
|
|
16
|
+
const DEFAULT_CONCURRENCY = Number(process.env.GJSIFY_INSTALL_CONCURRENCY ?? "8") || 8;
|
|
17
|
+
const LOCKFILE_NAME = "gjsify-lock.json";
|
|
18
|
+
const LOCKFILE_VERSION = 1;
|
|
19
|
+
export async function installPackagesNative(opts) {
|
|
20
|
+
if (opts.specs.length === 0) {
|
|
21
|
+
throw new Error("installPackagesNative: empty specs list");
|
|
22
|
+
}
|
|
23
|
+
fs.mkdirSync(opts.prefix, { recursive: true });
|
|
24
|
+
const npmrc = await loadNpmrc(opts);
|
|
25
|
+
const log = makeLogger(opts.verbose ?? false);
|
|
26
|
+
const lockfilePath = path.join(opts.prefix, LOCKFILE_NAME);
|
|
27
|
+
const existingLock = readLockfile(lockfilePath);
|
|
28
|
+
let nodes;
|
|
29
|
+
if (existingLock && (opts.frozen || lockfileMatchesRequest(existingLock, opts.specs))) {
|
|
30
|
+
log("install: using lockfile (%d package(s))", Object.keys(existingLock.packages).length);
|
|
31
|
+
nodes = lockfileToNodes(existingLock);
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
if (opts.frozen) {
|
|
35
|
+
throw new Error(`install: --frozen requested but ${lockfilePath} is missing or stale (specs differ)`);
|
|
36
|
+
}
|
|
37
|
+
log("install: resolving %d top-level spec(s) → %s", opts.specs.length, opts.prefix);
|
|
38
|
+
nodes = await resolveDeps(opts.specs, npmrc, log);
|
|
39
|
+
if (opts.lockfile) {
|
|
40
|
+
writeLockfile(lockfilePath, opts.specs, nodes);
|
|
41
|
+
log("install: wrote %s (%d entries)", LOCKFILE_NAME, nodes.length);
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
log("install: downloading %d tarball(s)", nodes.length);
|
|
45
|
+
await downloadAndExtractAll(nodes, opts.prefix, npmrc, log);
|
|
46
|
+
await linkBins(nodes, opts.prefix, log);
|
|
47
|
+
log("install: done");
|
|
48
|
+
}
|
|
49
|
+
async function resolveDeps(specs, npmrc, log) {
|
|
50
|
+
const packumentCache = new Map();
|
|
51
|
+
const fetchPkg = (name) => {
|
|
52
|
+
const cached = packumentCache.get(name);
|
|
53
|
+
if (cached)
|
|
54
|
+
return cached;
|
|
55
|
+
const fresh = fetchPackument(name, { npmrc });
|
|
56
|
+
packumentCache.set(name, fresh);
|
|
57
|
+
return fresh;
|
|
58
|
+
};
|
|
59
|
+
const resolved = new Map();
|
|
60
|
+
const queue = specs.map(parseSpec);
|
|
61
|
+
while (queue.length > 0) {
|
|
62
|
+
const spec = queue.shift();
|
|
63
|
+
if (resolved.has(spec.name)) {
|
|
64
|
+
// Single-version-per-name policy (npm v6 semantics). Phase 4 v2
|
|
65
|
+
// (when peer-dep validation lands) revisits this for duplication.
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
const packument = await fetchPkg(spec.name);
|
|
69
|
+
const version = pickVersion(packument, spec.range);
|
|
70
|
+
if (!version) {
|
|
71
|
+
throw new Error(`No version of ${spec.name} satisfies ${spec.range}`);
|
|
72
|
+
}
|
|
73
|
+
const v = packument.versions[version];
|
|
74
|
+
if (!v) {
|
|
75
|
+
throw new Error(`Packument for ${spec.name} promised ${version} but no entry exists`);
|
|
76
|
+
}
|
|
77
|
+
const node = {
|
|
78
|
+
name: spec.name,
|
|
79
|
+
version,
|
|
80
|
+
tarballUrl: v.dist.tarball,
|
|
81
|
+
integrity: v.dist.integrity,
|
|
82
|
+
dependencies: v.dependencies ?? {},
|
|
83
|
+
optionalDependencies: v.optionalDependencies ?? {},
|
|
84
|
+
bin: v.bin,
|
|
85
|
+
};
|
|
86
|
+
resolved.set(spec.name, node);
|
|
87
|
+
log("resolve: %s@%s ← %s", spec.name, version, spec.range);
|
|
88
|
+
for (const [depName, depRange] of Object.entries(node.dependencies)) {
|
|
89
|
+
if (!resolved.has(depName))
|
|
90
|
+
queue.push({ name: depName, range: depRange });
|
|
91
|
+
}
|
|
92
|
+
for (const [depName, depRange] of Object.entries(node.optionalDependencies)) {
|
|
93
|
+
if (!resolved.has(depName))
|
|
94
|
+
queue.push({ name: depName, range: depRange });
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return Array.from(resolved.values());
|
|
98
|
+
}
|
|
99
|
+
function readLockfile(lockfilePath) {
|
|
100
|
+
if (!fs.existsSync(lockfilePath))
|
|
101
|
+
return null;
|
|
102
|
+
try {
|
|
103
|
+
const parsed = JSON.parse(fs.readFileSync(lockfilePath, "utf-8"));
|
|
104
|
+
if (parsed.lockfileVersion !== LOCKFILE_VERSION)
|
|
105
|
+
return null;
|
|
106
|
+
if (!parsed.packages || typeof parsed.packages !== "object")
|
|
107
|
+
return null;
|
|
108
|
+
return parsed;
|
|
109
|
+
}
|
|
110
|
+
catch {
|
|
111
|
+
return null;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
function writeLockfile(lockfilePath, specs, nodes) {
|
|
115
|
+
const packages = {};
|
|
116
|
+
// Sort for deterministic output (diff-friendly).
|
|
117
|
+
const sorted = [...nodes].sort((a, b) => (a.name < b.name ? -1 : a.name > b.name ? 1 : 0));
|
|
118
|
+
for (const node of sorted) {
|
|
119
|
+
packages[node.name] = {
|
|
120
|
+
version: node.version,
|
|
121
|
+
resolved: node.tarballUrl,
|
|
122
|
+
integrity: node.integrity,
|
|
123
|
+
dependencies: Object.keys(node.dependencies).length > 0 ? node.dependencies : undefined,
|
|
124
|
+
bin: node.bin,
|
|
125
|
+
};
|
|
126
|
+
}
|
|
127
|
+
const lockfile = {
|
|
128
|
+
lockfileVersion: LOCKFILE_VERSION,
|
|
129
|
+
requested: [...specs],
|
|
130
|
+
packages,
|
|
131
|
+
};
|
|
132
|
+
fs.writeFileSync(lockfilePath, JSON.stringify(lockfile, null, 2) + "\n");
|
|
133
|
+
}
|
|
134
|
+
function lockfileToNodes(lockfile) {
|
|
135
|
+
return Object.entries(lockfile.packages).map(([name, entry]) => ({
|
|
136
|
+
name,
|
|
137
|
+
version: entry.version,
|
|
138
|
+
tarballUrl: entry.resolved,
|
|
139
|
+
integrity: entry.integrity,
|
|
140
|
+
dependencies: entry.dependencies ?? {},
|
|
141
|
+
optionalDependencies: {},
|
|
142
|
+
bin: entry.bin,
|
|
143
|
+
}));
|
|
144
|
+
}
|
|
145
|
+
function lockfileMatchesRequest(lockfile, specs) {
|
|
146
|
+
if (lockfile.requested.length !== specs.length)
|
|
147
|
+
return false;
|
|
148
|
+
const a = [...lockfile.requested].sort();
|
|
149
|
+
const b = [...specs].sort();
|
|
150
|
+
return a.every((v, i) => v === b[i]);
|
|
151
|
+
}
|
|
152
|
+
function parseSpec(raw) {
|
|
153
|
+
if (raw.startsWith("@")) {
|
|
154
|
+
const slash = raw.indexOf("/");
|
|
155
|
+
if (slash < 0)
|
|
156
|
+
throw new Error(`Invalid spec (scoped name without slash): ${raw}`);
|
|
157
|
+
const at = raw.indexOf("@", slash);
|
|
158
|
+
if (at < 0)
|
|
159
|
+
return { name: raw, range: "*" };
|
|
160
|
+
return { name: raw.slice(0, at), range: raw.slice(at + 1) || "*" };
|
|
161
|
+
}
|
|
162
|
+
const at = raw.indexOf("@");
|
|
163
|
+
if (at < 0)
|
|
164
|
+
return { name: raw, range: "*" };
|
|
165
|
+
return { name: raw.slice(0, at), range: raw.slice(at + 1) || "*" };
|
|
166
|
+
}
|
|
167
|
+
function pickVersion(packument, range) {
|
|
168
|
+
// dist-tag fast path: `latest`, `next`, ...
|
|
169
|
+
if (packument["dist-tags"][range])
|
|
170
|
+
return packument["dist-tags"][range];
|
|
171
|
+
// Validate range early so a typo fails loudly.
|
|
172
|
+
let parsedRange;
|
|
173
|
+
try {
|
|
174
|
+
parsedRange = new Range(range);
|
|
175
|
+
}
|
|
176
|
+
catch {
|
|
177
|
+
throw new Error(`Invalid version range for ${packument.name}: ${range}`);
|
|
178
|
+
}
|
|
179
|
+
const versions = Object.keys(packument.versions).filter((v) => {
|
|
180
|
+
try {
|
|
181
|
+
new SemVer(v);
|
|
182
|
+
return true;
|
|
183
|
+
}
|
|
184
|
+
catch {
|
|
185
|
+
return false;
|
|
186
|
+
}
|
|
187
|
+
});
|
|
188
|
+
return maxSatisfying(versions, parsedRange);
|
|
189
|
+
}
|
|
190
|
+
async function downloadAndExtractAll(nodes, prefix, npmrc, log) {
|
|
191
|
+
const queue = [...nodes];
|
|
192
|
+
const workers = [];
|
|
193
|
+
const concurrency = Math.max(1, Math.min(DEFAULT_CONCURRENCY, queue.length));
|
|
194
|
+
for (let i = 0; i < concurrency; i++) {
|
|
195
|
+
workers.push(worker());
|
|
196
|
+
}
|
|
197
|
+
await Promise.all(workers);
|
|
198
|
+
async function worker() {
|
|
199
|
+
while (queue.length > 0) {
|
|
200
|
+
const node = queue.shift();
|
|
201
|
+
if (!node)
|
|
202
|
+
return;
|
|
203
|
+
const dest = path.join(prefix, "node_modules", node.name);
|
|
204
|
+
log("fetch: %s@%s ← %s", node.name, node.version, node.tarballUrl);
|
|
205
|
+
const bytes = await fetchTarball(node.tarballUrl, {
|
|
206
|
+
npmrc,
|
|
207
|
+
integrity: node.integrity,
|
|
208
|
+
});
|
|
209
|
+
fs.rmSync(dest, { recursive: true, force: true });
|
|
210
|
+
fs.mkdirSync(dest, { recursive: true });
|
|
211
|
+
await extractTarball(bytes, dest);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
async function linkBins(nodes, prefix, log) {
|
|
216
|
+
const binDir = path.join(prefix, "node_modules", ".bin");
|
|
217
|
+
let created = 0;
|
|
218
|
+
for (const node of nodes) {
|
|
219
|
+
if (!node.bin)
|
|
220
|
+
continue;
|
|
221
|
+
const map = normalizeBin(node.name, node.bin);
|
|
222
|
+
if (map.size === 0)
|
|
223
|
+
continue;
|
|
224
|
+
fs.mkdirSync(binDir, { recursive: true });
|
|
225
|
+
for (const [binName, binTarget] of map) {
|
|
226
|
+
const targetAbs = path.join(prefix, "node_modules", node.name, binTarget);
|
|
227
|
+
if (!fs.existsSync(targetAbs))
|
|
228
|
+
continue;
|
|
229
|
+
try {
|
|
230
|
+
fs.chmodSync(targetAbs, 0o755);
|
|
231
|
+
}
|
|
232
|
+
catch {
|
|
233
|
+
/* best effort */
|
|
234
|
+
}
|
|
235
|
+
const linkPath = path.join(binDir, binName);
|
|
236
|
+
fs.rmSync(linkPath, { force: true });
|
|
237
|
+
const rel = path.relative(binDir, targetAbs);
|
|
238
|
+
try {
|
|
239
|
+
fs.symlinkSync(rel, linkPath);
|
|
240
|
+
created++;
|
|
241
|
+
}
|
|
242
|
+
catch {
|
|
243
|
+
fs.copyFileSync(targetAbs, linkPath);
|
|
244
|
+
fs.chmodSync(linkPath, 0o755);
|
|
245
|
+
created++;
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
if (created > 0)
|
|
250
|
+
log("bin: linked %d entry(ies) under .bin/", created);
|
|
251
|
+
}
|
|
252
|
+
function normalizeBin(pkgName, bin) {
|
|
253
|
+
const out = new Map();
|
|
254
|
+
if (typeof bin === "string") {
|
|
255
|
+
// String form is shorthand for `{ <last-segment-of-pkgName>: <bin> }`.
|
|
256
|
+
const baseName = pkgName.startsWith("@")
|
|
257
|
+
? pkgName.slice(pkgName.indexOf("/") + 1)
|
|
258
|
+
: pkgName;
|
|
259
|
+
out.set(baseName, bin);
|
|
260
|
+
return out;
|
|
261
|
+
}
|
|
262
|
+
for (const [k, v] of Object.entries(bin))
|
|
263
|
+
out.set(k, v);
|
|
264
|
+
return out;
|
|
265
|
+
}
|
|
266
|
+
async function loadNpmrc(opts) {
|
|
267
|
+
const home = os.homedir();
|
|
268
|
+
const homeRc = path.join(home, ".npmrc");
|
|
269
|
+
let parsed = {
|
|
270
|
+
registry: opts.registry ?? DEFAULT_REGISTRY,
|
|
271
|
+
scopes: {},
|
|
272
|
+
authTokens: {},
|
|
273
|
+
basicAuth: {},
|
|
274
|
+
};
|
|
275
|
+
if (fs.existsSync(homeRc)) {
|
|
276
|
+
try {
|
|
277
|
+
parsed = parseNpmrc(fs.readFileSync(homeRc, "utf-8"));
|
|
278
|
+
}
|
|
279
|
+
catch (e) {
|
|
280
|
+
// Don't let a busted .npmrc prevent installs from anonymous registries.
|
|
281
|
+
console.warn(`gjsify install: ignoring malformed ${homeRc}: ${e.message}`);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
if (opts.registry) {
|
|
285
|
+
parsed.registry = opts.registry;
|
|
286
|
+
}
|
|
287
|
+
return parsed;
|
|
288
|
+
}
|
|
289
|
+
function makeLogger(verbose) {
|
|
290
|
+
if (!verbose) {
|
|
291
|
+
return () => {
|
|
292
|
+
/* silent unless verbose */
|
|
293
|
+
};
|
|
294
|
+
}
|
|
295
|
+
return (fmt, ...args) => {
|
|
296
|
+
const msg = fmt.replace(/%s|%d/g, () => String(args.shift()));
|
|
297
|
+
process.stderr.write(`gjsify install: ${msg}\n`);
|
|
298
|
+
};
|
|
299
|
+
}
|
|
@@ -7,5 +7,13 @@ export interface InstallOptions {
|
|
|
7
7
|
verbose?: boolean;
|
|
8
8
|
/** Optional registry override (writes a temp `.npmrc` in prefix). */
|
|
9
9
|
registry?: string;
|
|
10
|
+
/**
|
|
11
|
+
* Native backend only: write `<prefix>/gjsify-lock.json` after a successful
|
|
12
|
+
* resolve. When the file exists on next call AND `frozen: true`, the
|
|
13
|
+
* resolver is skipped and downloads use the pinned tarball URL + integrity.
|
|
14
|
+
*/
|
|
15
|
+
lockfile?: boolean;
|
|
16
|
+
/** Use `<prefix>/gjsify-lock.json` as the source of truth — fail if missing. */
|
|
17
|
+
frozen?: boolean;
|
|
10
18
|
}
|
|
11
19
|
export declare function installPackages(opts: InstallOptions): Promise<void>;
|
|
@@ -1,24 +1,27 @@
|
|
|
1
|
-
// Install backend abstraction
|
|
1
|
+
// Install backend abstraction.
|
|
2
2
|
//
|
|
3
|
-
//
|
|
4
|
-
//
|
|
5
|
-
//
|
|
3
|
+
// Default: native backend (resolves packuments via @gjsify/npm-registry,
|
|
4
|
+
// extracts tarballs via @gjsify/tar — no Node, no npm required at runtime).
|
|
5
|
+
// Fallback: `npm install --no-package-lock --no-audit --no-fund --prefix <dir> <specs...>`,
|
|
6
|
+
// for parity with the legacy code path. Switched via
|
|
7
|
+
// `GJSIFY_INSTALL_BACKEND=native|npm`.
|
|
6
8
|
//
|
|
7
|
-
//
|
|
8
|
-
//
|
|
9
|
-
//
|
|
9
|
+
// `gjsify dlx` uses this seam — installing under a cache prefix, with no
|
|
10
|
+
// package.json update to the user's project. The native backend matches that
|
|
11
|
+
// workflow without ever shelling out to Node.
|
|
10
12
|
//
|
|
11
13
|
// `--no-package-lock` keeps the cache prepare dir hermetic; the cache key
|
|
12
14
|
// already covers reproducibility. `--no-audit --no-fund` cuts ~5s off cold runs.
|
|
13
15
|
import { spawn } from 'node:child_process';
|
|
14
16
|
import { writeFileSync } from 'node:fs';
|
|
15
17
|
import { join } from 'node:path';
|
|
16
|
-
const DEFAULT_BACKEND = process.env.GJSIFY_INSTALL_BACKEND ?? '
|
|
18
|
+
const DEFAULT_BACKEND = process.env.GJSIFY_INSTALL_BACKEND ?? 'native';
|
|
17
19
|
export async function installPackages(opts) {
|
|
18
|
-
if (DEFAULT_BACKEND === '
|
|
19
|
-
|
|
20
|
+
if (DEFAULT_BACKEND === 'npm') {
|
|
21
|
+
return installViaNpm(opts);
|
|
20
22
|
}
|
|
21
|
-
|
|
23
|
+
const { installPackagesNative } = await import('./install-backend-native.js');
|
|
24
|
+
return installPackagesNative(opts);
|
|
22
25
|
}
|
|
23
26
|
async function installViaNpm({ prefix, specs, verbose, registry }) {
|
|
24
27
|
if (specs.length === 0) {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@gjsify/cli",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.9",
|
|
4
4
|
"description": "CLI for Gjsify",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "lib/index.js",
|
|
@@ -23,11 +23,14 @@
|
|
|
23
23
|
"cli"
|
|
24
24
|
],
|
|
25
25
|
"dependencies": {
|
|
26
|
-
"@gjsify/create-app": "^0.3.
|
|
27
|
-
"@gjsify/esbuild-plugin-gjsify": "^0.3.
|
|
28
|
-
"@gjsify/node-polyfills": "^0.3.
|
|
29
|
-
"@gjsify/
|
|
30
|
-
"@gjsify/
|
|
26
|
+
"@gjsify/create-app": "^0.3.9",
|
|
27
|
+
"@gjsify/esbuild-plugin-gjsify": "^0.3.9",
|
|
28
|
+
"@gjsify/node-polyfills": "^0.3.9",
|
|
29
|
+
"@gjsify/npm-registry": "^0.3.9",
|
|
30
|
+
"@gjsify/resolve-npm": "^0.3.9",
|
|
31
|
+
"@gjsify/semver": "^0.3.9",
|
|
32
|
+
"@gjsify/tar": "^0.3.9",
|
|
33
|
+
"@gjsify/web-polyfills": "^0.3.9",
|
|
31
34
|
"@yarnpkg/esbuild-plugin-pnp": "^3.0.0-rc.15",
|
|
32
35
|
"cosmiconfig": "^9.0.1",
|
|
33
36
|
"esbuild": "^0.28.0",
|
package/src/actions/build.ts
CHANGED
|
@@ -14,6 +14,19 @@ import { chmod, readFile, writeFile } from "node:fs/promises";
|
|
|
14
14
|
|
|
15
15
|
const GJS_SHEBANG = "#!/usr/bin/env -S gjs -m\n";
|
|
16
16
|
|
|
17
|
+
/**
|
|
18
|
+
* `true` when `path` points at a location that's unsafe to use as a build
|
|
19
|
+
* outfile (would overwrite source). Currently catches:
|
|
20
|
+
* - any TypeScript extension (`.ts`, `.tsx`, `.mts`, `.cts`, `.mtsx`, `.ctsx`)
|
|
21
|
+
* - paths that live under a `src/` segment (relative or absolute)
|
|
22
|
+
*/
|
|
23
|
+
function isUnsafeDefaultOutput(path: string): boolean {
|
|
24
|
+
if (/\.[cm]?tsx?$/i.test(path)) return true;
|
|
25
|
+
const norm = path.replace(/\\/g, "/");
|
|
26
|
+
if (/(?:^|\/)src\//.test(norm)) return true;
|
|
27
|
+
return false;
|
|
28
|
+
}
|
|
29
|
+
|
|
17
30
|
/**
|
|
18
31
|
* Resolve the gjsify-flavoured PnP plugin. Anchors the relay on this file's
|
|
19
32
|
* URL so transitive `@gjsify/*` polyfills (reached via @gjsify/cli's deps on
|
|
@@ -253,10 +266,23 @@ export class BuildAction {
|
|
|
253
266
|
!esbuild?.outdir &&
|
|
254
267
|
(pgk?.main || pgk?.module)
|
|
255
268
|
) {
|
|
256
|
-
|
|
269
|
+
const candidate =
|
|
257
270
|
esbuild?.format === "cjs"
|
|
258
271
|
? pgk.main || pgk.module
|
|
259
272
|
: pgk.module || pgk.main;
|
|
273
|
+
if (candidate && isUnsafeDefaultOutput(candidate)) {
|
|
274
|
+
// `package.json#main`/`module` commonly points at a TypeScript
|
|
275
|
+
// source (e.g. `src/index.ts` for TS-direct workflows). Falling
|
|
276
|
+
// back to that value would have esbuild OVERWRITE the source.
|
|
277
|
+
// Surface a clear error and require an explicit outfile/outdir
|
|
278
|
+
// instead of silently destroying the user's code.
|
|
279
|
+
throw new Error(
|
|
280
|
+
`gjsify build: refusing to default --outfile to ${candidate} ` +
|
|
281
|
+
`(would overwrite a TypeScript source file). Pass --outfile/--outdir ` +
|
|
282
|
+
`explicitly, or set "gjsify.esbuild.outfile" in package.json.`,
|
|
283
|
+
);
|
|
284
|
+
}
|
|
285
|
+
esbuild.outfile = candidate;
|
|
260
286
|
}
|
|
261
287
|
|
|
262
288
|
const { consoleShim, globals } = this.configData;
|
package/src/commands/dlx.ts
CHANGED
|
@@ -30,6 +30,8 @@ interface DlxOptions {
|
|
|
30
30
|
binOrArg?: string;
|
|
31
31
|
extraArgs?: string[];
|
|
32
32
|
'cache-max-age': number;
|
|
33
|
+
reinstall: boolean;
|
|
34
|
+
frozen: boolean;
|
|
33
35
|
verbose: boolean;
|
|
34
36
|
registry?: string;
|
|
35
37
|
}
|
|
@@ -62,6 +64,18 @@ export const dlxCommand: Command<any, DlxOptions> = {
|
|
|
62
64
|
type: 'number',
|
|
63
65
|
default: 60 * 24 * 7,
|
|
64
66
|
})
|
|
67
|
+
.option('reinstall', {
|
|
68
|
+
description:
|
|
69
|
+
'Bypass the cache for this run (alias for --cache-max-age=0).',
|
|
70
|
+
type: 'boolean',
|
|
71
|
+
default: false,
|
|
72
|
+
})
|
|
73
|
+
.option('frozen', {
|
|
74
|
+
description:
|
|
75
|
+
'Use the project-local gjsify-lock.json verbatim — fail if missing or stale (no resolver pass).',
|
|
76
|
+
type: 'boolean',
|
|
77
|
+
default: false,
|
|
78
|
+
})
|
|
65
79
|
.option('verbose', {
|
|
66
80
|
description: 'Verbose logging (passes --loglevel verbose to npm).',
|
|
67
81
|
type: 'boolean',
|
|
@@ -74,10 +88,12 @@ export const dlxCommand: Command<any, DlxOptions> = {
|
|
|
74
88
|
handler: async (args) => {
|
|
75
89
|
const parsed = parseSpec(args.spec);
|
|
76
90
|
|
|
91
|
+
const cacheMaxAge = args.reinstall ? 0 : args['cache-max-age'];
|
|
77
92
|
const { pkgDir, cachedPkgName } = await ensurePkgDir(parsed, {
|
|
78
93
|
verbose: args.verbose,
|
|
79
94
|
registry: args.registry,
|
|
80
|
-
cacheMaxAge
|
|
95
|
+
cacheMaxAge,
|
|
96
|
+
frozen: args.frozen,
|
|
81
97
|
});
|
|
82
98
|
|
|
83
99
|
// Bin / args disambiguation:
|
|
@@ -106,6 +122,7 @@ interface EnsureOpts {
|
|
|
106
122
|
verbose: boolean;
|
|
107
123
|
registry?: string;
|
|
108
124
|
cacheMaxAge: number;
|
|
125
|
+
frozen: boolean;
|
|
109
126
|
}
|
|
110
127
|
|
|
111
128
|
async function ensurePkgDir(
|
|
@@ -133,6 +150,11 @@ async function ensurePkgDir(
|
|
|
133
150
|
specs: [parsed.spec],
|
|
134
151
|
verbose: opts.verbose,
|
|
135
152
|
registry: opts.registry,
|
|
153
|
+
// Cache-prepare dirs are scoped per cache key, so writing a lockfile
|
|
154
|
+
// there gives us reproducibility for repeated `gjsify dlx <pkg>` calls
|
|
155
|
+
// and lets `--frozen` short-circuit the resolver entirely.
|
|
156
|
+
lockfile: true,
|
|
157
|
+
frozen: opts.frozen,
|
|
136
158
|
});
|
|
137
159
|
|
|
138
160
|
const liveTarget = symlinkSwap(cacheDir, prepareDir);
|
package/src/config.ts
CHANGED
|
@@ -1,5 +1,24 @@
|
|
|
1
1
|
import { APP_NAME } from './constants.js';
|
|
2
2
|
import { cosmiconfig, type Options as LoadOptions } from 'cosmiconfig';
|
|
3
|
+
|
|
4
|
+
/** Default cosmiconfig search places for a given module name (matches cosmiconfig defaults). */
|
|
5
|
+
function defaultSearchPlaces(name: string): string[] {
|
|
6
|
+
return [
|
|
7
|
+
'package.json',
|
|
8
|
+
`.${name}rc`,
|
|
9
|
+
`.${name}rc.json`,
|
|
10
|
+
`.${name}rc.yaml`,
|
|
11
|
+
`.${name}rc.yml`,
|
|
12
|
+
`.${name}rc.js`,
|
|
13
|
+
`.${name}rc.ts`,
|
|
14
|
+
`.${name}rc.mjs`,
|
|
15
|
+
`.${name}rc.cjs`,
|
|
16
|
+
`${name}.config.js`,
|
|
17
|
+
`${name}.config.ts`,
|
|
18
|
+
`${name}.config.mjs`,
|
|
19
|
+
`${name}.config.cjs`,
|
|
20
|
+
];
|
|
21
|
+
}
|
|
3
22
|
import { readPackageJSON, resolvePackageJSON } from 'pkg-types';
|
|
4
23
|
import { getTsconfig } from 'get-tsconfig';
|
|
5
24
|
|
|
@@ -40,20 +59,48 @@ export class Config {
|
|
|
40
59
|
}
|
|
41
60
|
|
|
42
61
|
/** Loads gjsify config file, e.g `.gjsifyrc.js` */
|
|
43
|
-
private async load(searchFrom?: string) {
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
62
|
+
private async load(searchFrom?: string) {
|
|
63
|
+
// cosmiconfig's default first-match-wins behaviour silently drops one
|
|
64
|
+
// source when both `package.json#gjsify` and an explicit config file
|
|
65
|
+
// (`.gjsifyrc.js`, `gjsify.config.mjs`, ...) are present. Project hits
|
|
66
|
+
// this footgun: adding `gjsify.bin` to package.json (so `gjsify dlx`
|
|
67
|
+
// resolves the GJS bundle) silently disables `.gjsifyrc.js`. We
|
|
68
|
+
// explicitly load both sources and merge — package.json is the lower
|
|
69
|
+
// layer, the explicit file wins on key collisions.
|
|
70
|
+
//
|
|
71
|
+
// Run two searches:
|
|
72
|
+
// 1. Default (includes package.json) — for projects that only use
|
|
73
|
+
// package.json#gjsify and no separate file.
|
|
74
|
+
// 2. Explicit-file only (package.json excluded) — to find the
|
|
75
|
+
// `.gjsifyrc.*` / `gjsify.config.*` regardless of whether
|
|
76
|
+
// package.json#gjsify exists.
|
|
77
|
+
const fileExplorer = cosmiconfig(APP_NAME, {
|
|
78
|
+
...this.loadOptions,
|
|
79
|
+
searchPlaces: (this.loadOptions.searchPlaces ?? defaultSearchPlaces(APP_NAME))
|
|
80
|
+
.filter((p) => p !== 'package.json'),
|
|
81
|
+
});
|
|
82
|
+
const fileResult = await fileExplorer.search(searchFrom) as CosmiconfigResult<ConfigData> | null;
|
|
83
|
+
|
|
84
|
+
const merged: ConfigData = {};
|
|
85
|
+
try {
|
|
86
|
+
const pkg = await this.readPackageJSON(searchFrom) as { gjsify?: ConfigData };
|
|
87
|
+
if (isPlainObject(pkg?.gjsify)) merge(merged, pkg.gjsify);
|
|
88
|
+
} catch {
|
|
89
|
+
// Missing or unreadable package.json — skip.
|
|
50
90
|
}
|
|
91
|
+
if (fileResult?.config && isPlainObject(fileResult.config)) {
|
|
92
|
+
merge(merged, fileResult.config);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
merged.esbuild ||= {};
|
|
96
|
+
merged.library ||= {};
|
|
97
|
+
merged.typescript ||= {};
|
|
51
98
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
99
|
+
return {
|
|
100
|
+
config: merged,
|
|
101
|
+
filepath: fileResult?.filepath ?? '',
|
|
102
|
+
isEmpty: !fileResult && Object.keys(merged).length === 3, // only the three default-empty objects
|
|
103
|
+
};
|
|
57
104
|
}
|
|
58
105
|
|
|
59
106
|
/** Loads package.json of the current project */
|
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
// Native install backend — GJS-runnable replacement for `npm install`.
|
|
2
|
+
//
|
|
3
|
+
// Pipeline: parse specs → resolve deps via @gjsify/npm-registry packuments and
|
|
4
|
+
// @gjsify/semver → download tarballs in parallel → extract into a flat
|
|
5
|
+
// node_modules/ via @gjsify/tar. Output layout matches `npm install` so the
|
|
6
|
+
// existing `runGjsBundle()` prebuild detection works without branching.
|
|
7
|
+
//
|
|
8
|
+
// Out of scope (deferred to Phase 4): lockfile, peerDependencies validation,
|
|
9
|
+
// lifecycle scripts, git/file specs.
|
|
10
|
+
|
|
11
|
+
import * as fs from "node:fs";
|
|
12
|
+
import * as path from "node:path";
|
|
13
|
+
import * as os from "node:os";
|
|
14
|
+
|
|
15
|
+
import {
|
|
16
|
+
Range,
|
|
17
|
+
SemVer,
|
|
18
|
+
maxSatisfying,
|
|
19
|
+
} from "@gjsify/semver";
|
|
20
|
+
import {
|
|
21
|
+
DEFAULT_REGISTRY,
|
|
22
|
+
fetchPackument,
|
|
23
|
+
fetchTarball,
|
|
24
|
+
parseNpmrc,
|
|
25
|
+
type NpmrcConfig,
|
|
26
|
+
type Packument,
|
|
27
|
+
type PackumentVersion,
|
|
28
|
+
} from "@gjsify/npm-registry";
|
|
29
|
+
import { extractTarball } from "@gjsify/tar";
|
|
30
|
+
|
|
31
|
+
import type { InstallOptions } from "./install-backend.ts";
|
|
32
|
+
|
|
33
|
+
const DEFAULT_CONCURRENCY = Number(process.env.GJSIFY_INSTALL_CONCURRENCY ?? "8") || 8;
|
|
34
|
+
|
|
35
|
+
interface ParsedSpec {
|
|
36
|
+
name: string;
|
|
37
|
+
range: string;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
interface ResolvedNode {
|
|
41
|
+
name: string;
|
|
42
|
+
version: string;
|
|
43
|
+
tarballUrl: string;
|
|
44
|
+
integrity?: string;
|
|
45
|
+
dependencies: Record<string, string>;
|
|
46
|
+
optionalDependencies: Record<string, string>;
|
|
47
|
+
bin?: string | Record<string, string>;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const LOCKFILE_NAME = "gjsify-lock.json";
|
|
51
|
+
const LOCKFILE_VERSION = 1;
|
|
52
|
+
|
|
53
|
+
interface LockfileEntry {
|
|
54
|
+
version: string;
|
|
55
|
+
resolved: string;
|
|
56
|
+
integrity?: string;
|
|
57
|
+
dependencies?: Record<string, string>;
|
|
58
|
+
bin?: string | Record<string, string>;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
interface Lockfile {
|
|
62
|
+
lockfileVersion: number;
|
|
63
|
+
/** Top-level specs used to seed this lockfile (preserves user intent). */
|
|
64
|
+
requested: string[];
|
|
65
|
+
/** Pinned packages keyed by name. */
|
|
66
|
+
packages: Record<string, LockfileEntry>;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
export async function installPackagesNative(opts: InstallOptions): Promise<void> {
|
|
70
|
+
if (opts.specs.length === 0) {
|
|
71
|
+
throw new Error("installPackagesNative: empty specs list");
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
fs.mkdirSync(opts.prefix, { recursive: true });
|
|
75
|
+
const npmrc = await loadNpmrc(opts);
|
|
76
|
+
const log = makeLogger(opts.verbose ?? false);
|
|
77
|
+
|
|
78
|
+
const lockfilePath = path.join(opts.prefix, LOCKFILE_NAME);
|
|
79
|
+
const existingLock = readLockfile(lockfilePath);
|
|
80
|
+
|
|
81
|
+
let nodes: ResolvedNode[];
|
|
82
|
+
if (existingLock && (opts.frozen || lockfileMatchesRequest(existingLock, opts.specs))) {
|
|
83
|
+
log("install: using lockfile (%d package(s))", Object.keys(existingLock.packages).length);
|
|
84
|
+
nodes = lockfileToNodes(existingLock);
|
|
85
|
+
} else {
|
|
86
|
+
if (opts.frozen) {
|
|
87
|
+
throw new Error(
|
|
88
|
+
`install: --frozen requested but ${lockfilePath} is missing or stale (specs differ)`,
|
|
89
|
+
);
|
|
90
|
+
}
|
|
91
|
+
log("install: resolving %d top-level spec(s) → %s", opts.specs.length, opts.prefix);
|
|
92
|
+
nodes = await resolveDeps(opts.specs, npmrc, log);
|
|
93
|
+
if (opts.lockfile) {
|
|
94
|
+
writeLockfile(lockfilePath, opts.specs, nodes);
|
|
95
|
+
log("install: wrote %s (%d entries)", LOCKFILE_NAME, nodes.length);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
log("install: downloading %d tarball(s)", nodes.length);
|
|
100
|
+
await downloadAndExtractAll(nodes, opts.prefix, npmrc, log);
|
|
101
|
+
await linkBins(nodes, opts.prefix, log);
|
|
102
|
+
log("install: done");
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async function resolveDeps(
|
|
106
|
+
specs: string[],
|
|
107
|
+
npmrc: NpmrcConfig,
|
|
108
|
+
log: Logger,
|
|
109
|
+
): Promise<ResolvedNode[]> {
|
|
110
|
+
const packumentCache = new Map<string, Promise<Packument>>();
|
|
111
|
+
const fetchPkg = (name: string): Promise<Packument> => {
|
|
112
|
+
const cached = packumentCache.get(name);
|
|
113
|
+
if (cached) return cached;
|
|
114
|
+
const fresh = fetchPackument(name, { npmrc });
|
|
115
|
+
packumentCache.set(name, fresh);
|
|
116
|
+
return fresh;
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
const resolved = new Map<string, ResolvedNode>();
|
|
120
|
+
const queue: ParsedSpec[] = specs.map(parseSpec);
|
|
121
|
+
|
|
122
|
+
while (queue.length > 0) {
|
|
123
|
+
const spec = queue.shift() as ParsedSpec;
|
|
124
|
+
if (resolved.has(spec.name)) {
|
|
125
|
+
// Single-version-per-name policy (npm v6 semantics). Phase 4 v2
|
|
126
|
+
// (when peer-dep validation lands) revisits this for duplication.
|
|
127
|
+
continue;
|
|
128
|
+
}
|
|
129
|
+
const packument = await fetchPkg(spec.name);
|
|
130
|
+
const version = pickVersion(packument, spec.range);
|
|
131
|
+
if (!version) {
|
|
132
|
+
throw new Error(`No version of ${spec.name} satisfies ${spec.range}`);
|
|
133
|
+
}
|
|
134
|
+
const v = packument.versions[version];
|
|
135
|
+
if (!v) {
|
|
136
|
+
throw new Error(
|
|
137
|
+
`Packument for ${spec.name} promised ${version} but no entry exists`,
|
|
138
|
+
);
|
|
139
|
+
}
|
|
140
|
+
const node: ResolvedNode = {
|
|
141
|
+
name: spec.name,
|
|
142
|
+
version,
|
|
143
|
+
tarballUrl: v.dist.tarball,
|
|
144
|
+
integrity: v.dist.integrity,
|
|
145
|
+
dependencies: v.dependencies ?? {},
|
|
146
|
+
optionalDependencies: v.optionalDependencies ?? {},
|
|
147
|
+
bin: v.bin,
|
|
148
|
+
};
|
|
149
|
+
resolved.set(spec.name, node);
|
|
150
|
+
log("resolve: %s@%s ← %s", spec.name, version, spec.range);
|
|
151
|
+
|
|
152
|
+
for (const [depName, depRange] of Object.entries(node.dependencies)) {
|
|
153
|
+
if (!resolved.has(depName)) queue.push({ name: depName, range: depRange });
|
|
154
|
+
}
|
|
155
|
+
for (const [depName, depRange] of Object.entries(node.optionalDependencies)) {
|
|
156
|
+
if (!resolved.has(depName)) queue.push({ name: depName, range: depRange });
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
return Array.from(resolved.values());
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
function readLockfile(lockfilePath: string): Lockfile | null {
|
|
163
|
+
if (!fs.existsSync(lockfilePath)) return null;
|
|
164
|
+
try {
|
|
165
|
+
const parsed = JSON.parse(fs.readFileSync(lockfilePath, "utf-8")) as Lockfile;
|
|
166
|
+
if (parsed.lockfileVersion !== LOCKFILE_VERSION) return null;
|
|
167
|
+
if (!parsed.packages || typeof parsed.packages !== "object") return null;
|
|
168
|
+
return parsed;
|
|
169
|
+
} catch {
|
|
170
|
+
return null;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function writeLockfile(lockfilePath: string, specs: string[], nodes: ResolvedNode[]): void {
|
|
175
|
+
const packages: Record<string, LockfileEntry> = {};
|
|
176
|
+
// Sort for deterministic output (diff-friendly).
|
|
177
|
+
const sorted = [...nodes].sort((a, b) => (a.name < b.name ? -1 : a.name > b.name ? 1 : 0));
|
|
178
|
+
for (const node of sorted) {
|
|
179
|
+
packages[node.name] = {
|
|
180
|
+
version: node.version,
|
|
181
|
+
resolved: node.tarballUrl,
|
|
182
|
+
integrity: node.integrity,
|
|
183
|
+
dependencies:
|
|
184
|
+
Object.keys(node.dependencies).length > 0 ? node.dependencies : undefined,
|
|
185
|
+
bin: node.bin,
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
const lockfile: Lockfile = {
|
|
189
|
+
lockfileVersion: LOCKFILE_VERSION,
|
|
190
|
+
requested: [...specs],
|
|
191
|
+
packages,
|
|
192
|
+
};
|
|
193
|
+
fs.writeFileSync(lockfilePath, JSON.stringify(lockfile, null, 2) + "\n");
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
function lockfileToNodes(lockfile: Lockfile): ResolvedNode[] {
|
|
197
|
+
return Object.entries(lockfile.packages).map(([name, entry]) => ({
|
|
198
|
+
name,
|
|
199
|
+
version: entry.version,
|
|
200
|
+
tarballUrl: entry.resolved,
|
|
201
|
+
integrity: entry.integrity,
|
|
202
|
+
dependencies: entry.dependencies ?? {},
|
|
203
|
+
optionalDependencies: {},
|
|
204
|
+
bin: entry.bin,
|
|
205
|
+
}));
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
function lockfileMatchesRequest(lockfile: Lockfile, specs: string[]): boolean {
|
|
209
|
+
if (lockfile.requested.length !== specs.length) return false;
|
|
210
|
+
const a = [...lockfile.requested].sort();
|
|
211
|
+
const b = [...specs].sort();
|
|
212
|
+
return a.every((v, i) => v === b[i]);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
function parseSpec(raw: string): ParsedSpec {
|
|
216
|
+
if (raw.startsWith("@")) {
|
|
217
|
+
const slash = raw.indexOf("/");
|
|
218
|
+
if (slash < 0) throw new Error(`Invalid spec (scoped name without slash): ${raw}`);
|
|
219
|
+
const at = raw.indexOf("@", slash);
|
|
220
|
+
if (at < 0) return { name: raw, range: "*" };
|
|
221
|
+
return { name: raw.slice(0, at), range: raw.slice(at + 1) || "*" };
|
|
222
|
+
}
|
|
223
|
+
const at = raw.indexOf("@");
|
|
224
|
+
if (at < 0) return { name: raw, range: "*" };
|
|
225
|
+
return { name: raw.slice(0, at), range: raw.slice(at + 1) || "*" };
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
function pickVersion(packument: Packument, range: string): string | null {
|
|
229
|
+
// dist-tag fast path: `latest`, `next`, ...
|
|
230
|
+
if (packument["dist-tags"][range]) return packument["dist-tags"][range];
|
|
231
|
+
|
|
232
|
+
// Validate range early so a typo fails loudly.
|
|
233
|
+
let parsedRange: Range;
|
|
234
|
+
try {
|
|
235
|
+
parsedRange = new Range(range);
|
|
236
|
+
} catch {
|
|
237
|
+
throw new Error(`Invalid version range for ${packument.name}: ${range}`);
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
const versions = Object.keys(packument.versions).filter((v) => {
|
|
241
|
+
try {
|
|
242
|
+
new SemVer(v);
|
|
243
|
+
return true;
|
|
244
|
+
} catch {
|
|
245
|
+
return false;
|
|
246
|
+
}
|
|
247
|
+
});
|
|
248
|
+
return maxSatisfying(versions, parsedRange);
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
async function downloadAndExtractAll(
|
|
252
|
+
nodes: ResolvedNode[],
|
|
253
|
+
prefix: string,
|
|
254
|
+
npmrc: NpmrcConfig,
|
|
255
|
+
log: Logger,
|
|
256
|
+
): Promise<void> {
|
|
257
|
+
const queue = [...nodes];
|
|
258
|
+
const workers: Array<Promise<void>> = [];
|
|
259
|
+
const concurrency = Math.max(1, Math.min(DEFAULT_CONCURRENCY, queue.length));
|
|
260
|
+
for (let i = 0; i < concurrency; i++) {
|
|
261
|
+
workers.push(worker());
|
|
262
|
+
}
|
|
263
|
+
await Promise.all(workers);
|
|
264
|
+
|
|
265
|
+
async function worker(): Promise<void> {
|
|
266
|
+
while (queue.length > 0) {
|
|
267
|
+
const node = queue.shift();
|
|
268
|
+
if (!node) return;
|
|
269
|
+
const dest = path.join(prefix, "node_modules", node.name);
|
|
270
|
+
log("fetch: %s@%s ← %s", node.name, node.version, node.tarballUrl);
|
|
271
|
+
const bytes = await fetchTarball(node.tarballUrl, {
|
|
272
|
+
npmrc,
|
|
273
|
+
integrity: node.integrity,
|
|
274
|
+
});
|
|
275
|
+
fs.rmSync(dest, { recursive: true, force: true });
|
|
276
|
+
fs.mkdirSync(dest, { recursive: true });
|
|
277
|
+
await extractTarball(bytes, dest);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
async function linkBins(nodes: ResolvedNode[], prefix: string, log: Logger): Promise<void> {
|
|
283
|
+
const binDir = path.join(prefix, "node_modules", ".bin");
|
|
284
|
+
let created = 0;
|
|
285
|
+
for (const node of nodes) {
|
|
286
|
+
if (!node.bin) continue;
|
|
287
|
+
const map = normalizeBin(node.name, node.bin);
|
|
288
|
+
if (map.size === 0) continue;
|
|
289
|
+
fs.mkdirSync(binDir, { recursive: true });
|
|
290
|
+
for (const [binName, binTarget] of map) {
|
|
291
|
+
const targetAbs = path.join(prefix, "node_modules", node.name, binTarget);
|
|
292
|
+
if (!fs.existsSync(targetAbs)) continue;
|
|
293
|
+
try {
|
|
294
|
+
fs.chmodSync(targetAbs, 0o755);
|
|
295
|
+
} catch {
|
|
296
|
+
/* best effort */
|
|
297
|
+
}
|
|
298
|
+
const linkPath = path.join(binDir, binName);
|
|
299
|
+
fs.rmSync(linkPath, { force: true });
|
|
300
|
+
const rel = path.relative(binDir, targetAbs);
|
|
301
|
+
try {
|
|
302
|
+
fs.symlinkSync(rel, linkPath);
|
|
303
|
+
created++;
|
|
304
|
+
} catch {
|
|
305
|
+
fs.copyFileSync(targetAbs, linkPath);
|
|
306
|
+
fs.chmodSync(linkPath, 0o755);
|
|
307
|
+
created++;
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
if (created > 0) log("bin: linked %d entry(ies) under .bin/", created);
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
function normalizeBin(pkgName: string, bin: string | Record<string, string>): Map<string, string> {
|
|
315
|
+
const out = new Map<string, string>();
|
|
316
|
+
if (typeof bin === "string") {
|
|
317
|
+
// String form is shorthand for `{ <last-segment-of-pkgName>: <bin> }`.
|
|
318
|
+
const baseName = pkgName.startsWith("@")
|
|
319
|
+
? pkgName.slice(pkgName.indexOf("/") + 1)
|
|
320
|
+
: pkgName;
|
|
321
|
+
out.set(baseName, bin);
|
|
322
|
+
return out;
|
|
323
|
+
}
|
|
324
|
+
for (const [k, v] of Object.entries(bin)) out.set(k, v);
|
|
325
|
+
return out;
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
async function loadNpmrc(opts: InstallOptions): Promise<NpmrcConfig> {
|
|
329
|
+
const home = os.homedir();
|
|
330
|
+
const homeRc = path.join(home, ".npmrc");
|
|
331
|
+
let parsed: NpmrcConfig = {
|
|
332
|
+
registry: opts.registry ?? DEFAULT_REGISTRY,
|
|
333
|
+
scopes: {},
|
|
334
|
+
authTokens: {},
|
|
335
|
+
basicAuth: {},
|
|
336
|
+
};
|
|
337
|
+
if (fs.existsSync(homeRc)) {
|
|
338
|
+
try {
|
|
339
|
+
parsed = parseNpmrc(fs.readFileSync(homeRc, "utf-8"));
|
|
340
|
+
} catch (e) {
|
|
341
|
+
// Don't let a busted .npmrc prevent installs from anonymous registries.
|
|
342
|
+
console.warn(`gjsify install: ignoring malformed ${homeRc}: ${(e as Error).message}`);
|
|
343
|
+
}
|
|
344
|
+
}
|
|
345
|
+
if (opts.registry) {
|
|
346
|
+
parsed.registry = opts.registry;
|
|
347
|
+
}
|
|
348
|
+
return parsed;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
type Logger = (fmt: string, ...args: unknown[]) => void;
|
|
352
|
+
|
|
353
|
+
function makeLogger(verbose: boolean): Logger {
|
|
354
|
+
if (!verbose) {
|
|
355
|
+
return () => {
|
|
356
|
+
/* silent unless verbose */
|
|
357
|
+
};
|
|
358
|
+
}
|
|
359
|
+
return (fmt, ...args) => {
|
|
360
|
+
const msg = fmt.replace(/%s|%d/g, () => String(args.shift()));
|
|
361
|
+
process.stderr.write(`gjsify install: ${msg}\n`);
|
|
362
|
+
};
|
|
363
|
+
}
|
|
@@ -1,12 +1,14 @@
|
|
|
1
|
-
// Install backend abstraction
|
|
1
|
+
// Install backend abstraction.
|
|
2
2
|
//
|
|
3
|
-
//
|
|
4
|
-
//
|
|
5
|
-
//
|
|
3
|
+
// Default: native backend (resolves packuments via @gjsify/npm-registry,
|
|
4
|
+
// extracts tarballs via @gjsify/tar — no Node, no npm required at runtime).
|
|
5
|
+
// Fallback: `npm install --no-package-lock --no-audit --no-fund --prefix <dir> <specs...>`,
|
|
6
|
+
// for parity with the legacy code path. Switched via
|
|
7
|
+
// `GJSIFY_INSTALL_BACKEND=native|npm`.
|
|
6
8
|
//
|
|
7
|
-
//
|
|
8
|
-
//
|
|
9
|
-
//
|
|
9
|
+
// `gjsify dlx` uses this seam — installing under a cache prefix, with no
|
|
10
|
+
// package.json update to the user's project. The native backend matches that
|
|
11
|
+
// workflow without ever shelling out to Node.
|
|
10
12
|
//
|
|
11
13
|
// `--no-package-lock` keeps the cache prepare dir hermetic; the cache key
|
|
12
14
|
// already covers reproducibility. `--no-audit --no-fund` cuts ~5s off cold runs.
|
|
@@ -24,17 +26,24 @@ export interface InstallOptions {
|
|
|
24
26
|
verbose?: boolean;
|
|
25
27
|
/** Optional registry override (writes a temp `.npmrc` in prefix). */
|
|
26
28
|
registry?: string;
|
|
29
|
+
/**
|
|
30
|
+
* Native backend only: write `<prefix>/gjsify-lock.json` after a successful
|
|
31
|
+
* resolve. When the file exists on next call AND `frozen: true`, the
|
|
32
|
+
* resolver is skipped and downloads use the pinned tarball URL + integrity.
|
|
33
|
+
*/
|
|
34
|
+
lockfile?: boolean;
|
|
35
|
+
/** Use `<prefix>/gjsify-lock.json` as the source of truth — fail if missing. */
|
|
36
|
+
frozen?: boolean;
|
|
27
37
|
}
|
|
28
38
|
|
|
29
|
-
const DEFAULT_BACKEND = process.env.GJSIFY_INSTALL_BACKEND ?? '
|
|
39
|
+
const DEFAULT_BACKEND = process.env.GJSIFY_INSTALL_BACKEND ?? 'native';
|
|
30
40
|
|
|
31
41
|
export async function installPackages(opts: InstallOptions): Promise<void> {
|
|
32
|
-
if (DEFAULT_BACKEND === '
|
|
33
|
-
|
|
34
|
-
'GJSIFY_INSTALL_BACKEND=native is reserved for the Phase 4 GJS-native resolver — not yet implemented.',
|
|
35
|
-
);
|
|
42
|
+
if (DEFAULT_BACKEND === 'npm') {
|
|
43
|
+
return installViaNpm(opts);
|
|
36
44
|
}
|
|
37
|
-
|
|
45
|
+
const { installPackagesNative } = await import('./install-backend-native.js');
|
|
46
|
+
return installPackagesNative(opts);
|
|
38
47
|
}
|
|
39
48
|
|
|
40
49
|
async function installViaNpm({ prefix, specs, verbose, registry }: InstallOptions): Promise<void> {
|