@yao-pkg/pkg 5.16.1 → 6.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,7 +1,3 @@
1
- **Disclaimer: `pkg` was created for use within containers and is not intended for use in serverless environments. For those using Vercel, this means that there is no requirement to use `pkg` in your projects as the benefits it provides are not applicable to the platform.**
2
-
3
- ![](https://res.cloudinary.com/zeit-inc/image/upload/v1509936789/repositories/pkg/pkg-repo-banner-new.png)
4
-
5
1
  [![Build Status](https://github.com/yao-pkg/pkg/actions/workflows/ci.yml/badge.svg)](https://github.com/yao-pkg/pkg/actions/workflows/ci.yml)
6
2
 
7
3
  This command line interface enables you to package your Node.js project into an executable that can be run even on devices without Node.js installed.
@@ -27,7 +23,7 @@ npm install -g @yao-pkg/pkg
27
23
  After installing it, run `pkg --help` without arguments to see list of options:
28
24
 
29
25
  ```console
30
- pkg [options] <input>
26
+ pkg [options] <input>
31
27
 
32
28
  Options:
33
29
 
@@ -44,30 +40,32 @@ pkg [options] <input>
44
40
  --public-packages force specified packages to be considered public
45
41
  --no-bytecode skip bytecode generation and include source files as plain js
46
42
  --no-native-build skip native addons build
47
- --no-signature skip signature of the final executable on macos
48
43
  --no-dict comma-separated list of packages names to ignore dictionaries. Use --no-dict * to disable all dictionaries
49
44
  -C, --compress [default=None] compression algorithm = Brotli or GZip
45
+ --sea (Experimental) compile give file using node's SEA feature. Requires node v20.0.0 or higher and only single file is supported
50
46
 
51
47
  Examples:
52
48
 
53
- - Makes executables for Linux, macOS and Windows
49
+ Makes executables for Linux, macOS and Windows
54
50
  $ pkg index.js
55
- - Takes package.json from cwd and follows 'bin' entry
51
+ Takes package.json from cwd and follows 'bin' entry
56
52
  $ pkg .
57
- - Makes executable for particular target machine
58
- $ pkg -t node16-win-arm64 index.js
59
- - Makes executables for target machines of your choice
60
- $ pkg -t node16-linux,node18-linux,node16-win index.js
61
- - Bakes '--expose-gc' and '--max-heap-size=34' into executable
53
+ Makes executable for particular target machine
54
+ $ pkg -t node14-win-arm64 index.js
55
+ Makes executables for target machines of your choice
56
+ $ pkg -t node16-linux,node18-linux,node18-win index.js
57
+ Bakes '--expose-gc' and '--max-heap-size=34' into executable
62
58
  $ pkg --options "expose-gc,max-heap-size=34" index.js
63
- - Consider packageA and packageB to be public
59
+ Consider packageA and packageB to be public
64
60
  $ pkg --public-packages "packageA,packageB" index.js
65
- - Consider all packages to be public
61
+ Consider all packages to be public
66
62
  $ pkg --public-packages "*" index.js
67
- - Bakes '--expose-gc' into executable
63
+ Bakes '--expose-gc' into executable
68
64
  $ pkg --options expose-gc index.js
69
- - reduce size of the data packed inside the executable with GZip
65
+ reduce size of the data packed inside the executable with GZip
70
66
  $ pkg --compress GZip index.js
67
+ – compile the file using node's SEA feature. Creates executables for Linux, macOS and Windows
68
+ $ pkg --sea index.js
71
69
  ```
72
70
 
73
71
  The entrypoint of your project is a mandatory CLI argument. It may be:
package/lib-es5/help.js CHANGED
@@ -23,6 +23,7 @@ function help() {
23
23
  --no-native-build skip native addons build
24
24
  --no-dict comma-separated list of packages names to ignore dictionaries. Use --no-dict * to disable all dictionaries
25
25
  -C, --compress [default=None] compression algorithm = Brotli or GZip
26
+ --sea (Experimental) compile give file using node's SEA feature. Requires node v20.0.0 or higher and only single file is supported
26
27
 
27
28
  ${colors_1.pc.dim('Examples:')}
28
29
 
@@ -44,6 +45,8 @@ function help() {
44
45
  ${colors_1.pc.cyan('$ pkg --options expose-gc index.js')}
45
46
  ${colors_1.pc.gray('–')} reduce size of the data packed inside the executable with GZip
46
47
  ${colors_1.pc.cyan('$ pkg --compress GZip index.js')}
48
+ ${colors_1.pc.gray('–')} compile the file using node's SEA feature. Creates executables for Linux, macOS and Windows
49
+ ${colors_1.pc.cyan('$ pkg --sea index.js')}
47
50
  `);
48
51
  }
49
52
  exports.default = help;
package/lib-es5/index.js CHANGED
@@ -23,6 +23,7 @@ const walker_1 = __importDefault(require("./walker"));
23
23
  const compress_type_1 = require("./compress_type");
24
24
  const mach_o_1 = require("./mach-o");
25
25
  const options_1 = __importDefault(require("./options"));
26
+ const sea_1 = __importDefault(require("./sea"));
26
27
  const { version } = JSON.parse((0, fs_1.readFileSync)(path_1.default.join(__dirname, '../package.json'), 'utf-8'));
27
28
  function isConfiguration(file) {
28
29
  return (0, common_1.isPackageJson)(file) || file.endsWith('.config.json');
@@ -163,6 +164,7 @@ async function exec(argv2) {
163
164
  'v',
164
165
  'version',
165
166
  'signature',
167
+ 'sea',
166
168
  ],
167
169
  string: [
168
170
  '_',
@@ -396,6 +398,13 @@ async function exec(argv2) {
396
398
  }
397
399
  }
398
400
  }
401
+ if (argv.sea) {
402
+ await (0, sea_1.default)(inputFin, {
403
+ targets,
404
+ signature: argv.signature,
405
+ });
406
+ return;
407
+ }
399
408
  // fetch targets
400
409
  const { bytecode } = argv;
401
410
  const nativeBuild = argv['native-build'];
package/lib-es5/mach-o.js CHANGED
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.signMachOExecutable = exports.patchMachOExecutable = void 0;
3
+ exports.signMachOExecutable = exports.removeMachOExecutableSignature = exports.patchMachOExecutable = void 0;
4
4
  const child_process_1 = require("child_process");
5
5
  function parseCStr(buf) {
6
6
  for (let i = 0; i < buf.length; i += 1) {
@@ -28,6 +28,11 @@ function patchCommand(type, buf, file) {
28
28
  buf.writeUInt32LE(strsizePatched, 12);
29
29
  }
30
30
  }
31
+ /**
32
+ * It would be nice to explain the purpose of this patching function
33
+ * @param file
34
+ * @returns
35
+ */
31
36
  function patchMachOExecutable(file) {
32
37
  const align = 8;
33
38
  const hsize = 32;
@@ -58,4 +63,10 @@ function signMachOExecutable(executable) {
58
63
  }
59
64
  }
60
65
  exports.signMachOExecutable = signMachOExecutable;
66
+ function removeMachOExecutableSignature(executable) {
67
+ (0, child_process_1.execFileSync)('codesign', ['--remove-signature', executable], {
68
+ stdio: 'inherit',
69
+ });
70
+ }
71
+ exports.removeMachOExecutableSignature = removeMachOExecutableSignature;
61
72
  //# sourceMappingURL=mach-o.js.map
package/lib-es5/sea.js ADDED
@@ -0,0 +1,268 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const child_process_1 = require("child_process");
7
+ const util_1 = __importDefault(require("util"));
8
+ const path_1 = require("path");
9
+ const promises_1 = require("fs/promises");
10
+ const fs_1 = require("fs");
11
+ const promises_2 = require("stream/promises");
12
+ const crypto_1 = require("crypto");
13
+ const os_1 = require("os");
14
+ const unzipper_1 = __importDefault(require("unzipper"));
15
+ const tar_1 = require("tar");
16
+ const log_1 = require("./log");
17
+ const mach_o_1 = require("./mach-o");
18
+ const exec = util_1.default.promisify(child_process_1.exec);
19
+ /** Returns stat of path when exits, false otherwise */
20
+ const exists = async (path) => {
21
+ try {
22
+ return await (0, promises_1.stat)(path);
23
+ }
24
+ catch (_a) {
25
+ return false;
26
+ }
27
+ };
28
+ const defaultSeaConfig = {
29
+ disableExperimentalSEAWarning: true,
30
+ useSnapshot: false,
31
+ useCodeCache: false,
32
+ };
33
+ /** Download a file from a given URL and save it to `filePath` */
34
+ async function downloadFile(url, filePath) {
35
+ const response = await fetch(url);
36
+ if (!response.ok || !response.body) {
37
+ throw new Error(`Failed to download file from ${url}`);
38
+ }
39
+ const fileStream = (0, fs_1.createWriteStream)(filePath);
40
+ return (0, promises_2.pipeline)(response.body, fileStream);
41
+ }
42
+ /** Extract node executable from the archive */
43
+ async function extract(os, archivePath) {
44
+ const nodeDir = (0, path_1.basename)(archivePath, os === 'win' ? '.zip' : '.tar.gz');
45
+ const archiveDir = (0, path_1.dirname)(archivePath);
46
+ let nodePath = '';
47
+ if (os === 'win') {
48
+ // use unzipper to extract the archive
49
+ const { files } = await unzipper_1.default.Open.file(archivePath);
50
+ const nodeBinPath = `${nodeDir}/node.exe`;
51
+ const nodeBin = files.find((file) => file.path === nodeBinPath);
52
+ if (!nodeBin) {
53
+ throw new Error('Node executable not found in the archive');
54
+ }
55
+ nodePath = (0, path_1.join)(archiveDir, `${nodeDir}.exe`);
56
+ // extract the node executable
57
+ await (0, promises_2.pipeline)(nodeBin.stream(), (0, fs_1.createWriteStream)(nodePath));
58
+ }
59
+ else {
60
+ const nodeBinPath = `${nodeDir}/bin/node`;
61
+ // use tar to extract the archive
62
+ await (0, tar_1.extract)({
63
+ file: archivePath,
64
+ cwd: archiveDir,
65
+ filter: (path) => path === nodeBinPath,
66
+ });
67
+ // check if the node executable exists
68
+ nodePath = (0, path_1.join)(archiveDir, nodeBinPath);
69
+ }
70
+ // check if the node executable exists
71
+ if (!(await exists(nodePath))) {
72
+ throw new Error('Node executable not found in the archive');
73
+ }
74
+ return nodePath;
75
+ }
76
+ /** Verify the checksum of downloaded NodeJS archive */
77
+ async function verifyChecksum(filePath, checksumUrl, fileName) {
78
+ var _a;
79
+ const response = await fetch(checksumUrl);
80
+ if (!response.ok) {
81
+ throw new Error(`Failed to download checksum file from ${checksumUrl}`);
82
+ }
83
+ const checksums = await response.text();
84
+ const expectedChecksum = (_a = checksums
85
+ .split('\n')
86
+ .find((line) => line.includes(fileName))) === null || _a === void 0 ? void 0 : _a.split(' ')[0];
87
+ if (!expectedChecksum) {
88
+ throw new Error(`Checksum for ${fileName} not found`);
89
+ }
90
+ const fileBuffer = await (0, promises_1.readFile)(filePath);
91
+ const hashSum = (0, crypto_1.createHash)('sha256');
92
+ hashSum.update(fileBuffer);
93
+ const actualChecksum = hashSum.digest('hex');
94
+ if (actualChecksum !== expectedChecksum) {
95
+ throw new Error(`Checksum verification failed for ${fileName}`);
96
+ }
97
+ }
98
+ /** Get the node os based on target platform */
99
+ function getNodeOs(platform) {
100
+ const allowedOSs = ['darwin', 'linux', 'win'];
101
+ const platformsMap = {
102
+ macos: 'darwin',
103
+ };
104
+ const validatedPlatform = platformsMap[platform] || platform;
105
+ if (!allowedOSs.includes(validatedPlatform)) {
106
+ throw new Error(`Unsupported OS: ${platform}`);
107
+ }
108
+ return validatedPlatform;
109
+ }
110
+ /** Get the node arch based on target arch */
111
+ function getNodeArch(arch) {
112
+ const allowedArchs = ['x64', 'arm64', 'armv7l', 'ppc64', 's390x'];
113
+ if (!allowedArchs.includes(arch)) {
114
+ throw new Error(`Unsupported architecture: ${arch}`);
115
+ }
116
+ return arch;
117
+ }
118
+ /** Get latest node version based on the provided partial version */
119
+ async function getNodeVersion(nodeVersion) {
120
+ // validate nodeVersion using regex. Allowed formats: 16, 16.0, 16.0.0
121
+ const regex = /^\d{1,2}(\.\d{1,2}){0,2}$/;
122
+ if (!regex.test(nodeVersion)) {
123
+ throw new Error('Invalid node version format');
124
+ }
125
+ const parts = nodeVersion.split('.');
126
+ if (parts.length > 3) {
127
+ throw new Error('Invalid node version format');
128
+ }
129
+ if (parts.length === 3) {
130
+ return nodeVersion;
131
+ }
132
+ const response = await fetch('https://nodejs.org/dist/index.json');
133
+ if (!response.ok) {
134
+ throw new Error('Failed to fetch node versions');
135
+ }
136
+ const versions = await response.json();
137
+ const latestVersion = versions
138
+ .map((v) => v.version)
139
+ .find((v) => v.startsWith(`v${nodeVersion}`));
140
+ if (!latestVersion) {
141
+ throw new Error(`Node version ${nodeVersion} not found`);
142
+ }
143
+ return latestVersion;
144
+ }
145
+ /** Fetch, validate and extract nodejs binary. Returns a path to it */
146
+ async function getNodejsExecutable(target, opts) {
147
+ if (opts.nodePath) {
148
+ // check if the nodePath exists
149
+ if (!(await exists(opts.nodePath))) {
150
+ throw new Error(`Priovided node executable path "${opts.nodePath}" does not exist`);
151
+ }
152
+ return opts.nodePath;
153
+ }
154
+ if (opts.useLocalNode) {
155
+ return process.execPath;
156
+ }
157
+ const nodeVersion = await getNodeVersion(target.nodeRange.replace('node', ''));
158
+ const os = getNodeOs(target.platform);
159
+ const arch = getNodeArch(target.arch);
160
+ const fileName = `node-${nodeVersion}-${os}-${arch}.${os === 'win' ? 'zip' : 'tar.gz'}`;
161
+ const url = `https://nodejs.org/dist/${nodeVersion}/${fileName}`;
162
+ const checksumUrl = `https://nodejs.org/dist/${nodeVersion}/SHASUMS256.txt`;
163
+ const downloadDir = (0, path_1.join)((0, os_1.homedir)(), '.pkg-cache', 'sea');
164
+ // Ensure the download directory exists
165
+ if (!(await exists(downloadDir))) {
166
+ await (0, promises_1.mkdir)(downloadDir, { recursive: true });
167
+ }
168
+ const filePath = (0, path_1.join)(downloadDir, fileName);
169
+ // skip download if file exists
170
+ if (!(await exists(filePath))) {
171
+ log_1.log.info(`Downloading nodejs executable from ${url}...`);
172
+ await downloadFile(url, filePath);
173
+ }
174
+ log_1.log.info(`Verifying checksum of ${fileName}`);
175
+ await verifyChecksum(filePath, checksumUrl, fileName);
176
+ log_1.log.info(`Extracting node binary from ${fileName}`);
177
+ const nodePath = await extract(os, filePath);
178
+ return nodePath;
179
+ }
180
+ /** Bake the blob into the executable */
181
+ async function bake(nodePath, target, blobPath) {
182
+ const outPath = (0, path_1.resolve)(process.cwd(), target.output);
183
+ log_1.log.info(`Creating executable for ${target.nodeRange}-${target.platform}-${target.arch}....`);
184
+ if (!(await exists((0, path_1.dirname)(outPath)))) {
185
+ log_1.log.error(`Output directory "${(0, path_1.dirname)(outPath)}" does not exist`);
186
+ return;
187
+ }
188
+ // check if executable_path exists
189
+ if (await exists(outPath)) {
190
+ log_1.log.warn(`Executable ${outPath} already exists, will be overwritten`);
191
+ }
192
+ // copy the executable as the output executable
193
+ await (0, promises_1.copyFile)(nodePath, outPath);
194
+ log_1.log.info(`Injecting the blob into ${outPath}...`);
195
+ if (target.platform === 'macos') {
196
+ (0, mach_o_1.removeMachOExecutableSignature)(outPath);
197
+ await exec(`npx postject "${outPath}" NODE_SEA_BLOB "${blobPath}" --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2 --macho-segment-name NODE_SEA`);
198
+ }
199
+ else {
200
+ await exec(`npx postject "${outPath}" NODE_SEA_BLOB "${blobPath}" --sentinel-fuse NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2`);
201
+ }
202
+ }
203
+ /** Create NodeJS executable using sea */
204
+ async function sea(entryPoint, opts) {
205
+ entryPoint = (0, path_1.resolve)(process.cwd(), entryPoint);
206
+ if (!(await exists(entryPoint))) {
207
+ throw new Error(`Entrypoint path "${entryPoint}" does not exist`);
208
+ }
209
+ const nodeMajor = parseInt(process.version.slice(1).split('.')[0], 10);
210
+ // check node version, needs to be at least 20.0.0
211
+ if (nodeMajor < 20) {
212
+ throw new Error(`SEA support requires as least node v20.0.0, actual node version is ${process.version}`);
213
+ }
214
+ const nodePaths = await Promise.all(opts.targets.map((target) => getNodejsExecutable(target, opts)));
215
+ // create a temporary directory for the processing work
216
+ const tmpDir = (0, path_1.join)((0, os_1.tmpdir)(), 'pkg-sea', `${Date.now()}`);
217
+ await (0, promises_1.mkdir)(tmpDir, { recursive: true });
218
+ try {
219
+ // change working directory to the temp directory
220
+ process.chdir(tmpDir);
221
+ // docs: https://nodejs.org/api/single-executable-applications.html
222
+ const blobPath = (0, path_1.join)(tmpDir, 'sea-prep.blob');
223
+ const seaConfigFilePath = (0, path_1.join)(tmpDir, 'sea-config.json');
224
+ const seaConfig = Object.assign({ main: entryPoint, output: blobPath }, Object.assign(Object.assign({}, defaultSeaConfig), (opts.seaConfig || {})));
225
+ log_1.log.info('Creating sea-config.json file...');
226
+ await (0, promises_1.writeFile)(seaConfigFilePath, JSON.stringify(seaConfig));
227
+ log_1.log.info('Generating the blob...');
228
+ await exec(`node --experimental-sea-config "${seaConfigFilePath}"`);
229
+ await Promise.allSettled(nodePaths.map(async (nodePath, i) => {
230
+ const target = opts.targets[i];
231
+ await bake(nodePath, target, blobPath);
232
+ const output = target.output;
233
+ if (opts.signature && target.platform === 'macos') {
234
+ const buf = (0, mach_o_1.patchMachOExecutable)(await (0, promises_1.readFile)(output));
235
+ await (0, promises_1.writeFile)(output, buf);
236
+ try {
237
+ // sign executable ad-hoc to workaround the new mandatory signing requirement
238
+ // users can always replace the signature if necessary
239
+ (0, mach_o_1.signMachOExecutable)(output);
240
+ }
241
+ catch (_a) {
242
+ if (target.arch === 'arm64') {
243
+ log_1.log.warn('Unable to sign the macOS executable', [
244
+ 'Due to the mandatory code signing requirement, before the',
245
+ 'executable is distributed to end users, it must be signed.',
246
+ 'Otherwise, it will be immediately killed by kernel on launch.',
247
+ 'An ad-hoc signature is sufficient.',
248
+ 'To do that, run pkg on a Mac, or transfer the executable to a Mac',
249
+ 'and run "codesign --sign - <executable>", or (if you use Linux)',
250
+ 'install "ldid" utility to PATH and then run pkg again',
251
+ ]);
252
+ }
253
+ }
254
+ }
255
+ }));
256
+ }
257
+ catch (error) {
258
+ throw new Error(`Error while creating the executable: ${error}`);
259
+ }
260
+ finally {
261
+ // cleanup the temp directory
262
+ await (0, promises_1.rm)(tmpDir, { recursive: true }).catch(() => {
263
+ log_1.log.warn(`Failed to cleanup the temp directory ${tmpDir}`);
264
+ });
265
+ }
266
+ }
267
+ exports.default = sea;
268
+ //# sourceMappingURL=sea.js.map
package/lib-es5/walker.js CHANGED
@@ -235,13 +235,30 @@ function stepDetect(record, marker, derivatives) {
235
235
  throw (0, log_1.wasReported)(error.message);
236
236
  }
237
237
  }
238
- function findCommonJunctionPoint(file, realFile) {
238
+ /**
239
+ * Find a common junction point between a symlink and the real file path.
240
+ *
241
+ * @param {string} file The file path, including symlink(s).
242
+ * @param {string} realFile The real path to the file.
243
+ *
244
+ * @throws {Error} If no common junction point is found prior to hitting the
245
+ * filesystem root.
246
+ */
247
+ async function findCommonJunctionPoint(file, realFile) {
239
248
  // find common denominator => where the link changes
240
- while ((0, common_1.toNormalizedRealPath)(path_1.default.dirname(file)) === path_1.default.dirname(realFile)) {
249
+ while (true) {
250
+ const stats = await promises_1.default.lstat(file);
251
+ if (stats.isSymbolicLink()) {
252
+ return { file, realFile };
253
+ }
241
254
  file = path_1.default.dirname(file);
242
255
  realFile = path_1.default.dirname(realFile);
256
+ // If the directory is /, break out of the loop and log an error.
257
+ if (file === path_1.default.parse(file).root ||
258
+ realFile === path_1.default.parse(realFile).root) {
259
+ throw new Error('Reached root directory without finding a common junction point');
260
+ }
243
261
  }
244
- return { file, realFile };
245
262
  }
246
263
  class Walker {
247
264
  constructor() {
@@ -286,8 +303,8 @@ class Walker {
286
303
  log_1.log.debug(`${what} ${task.file} is added to queue.`);
287
304
  }
288
305
  }
289
- appendSymlink(file, realFile) {
290
- const a = findCommonJunctionPoint(file, realFile);
306
+ async appendSymlink(file, realFile) {
307
+ const a = await findCommonJunctionPoint(file, realFile);
291
308
  file = a.file;
292
309
  realFile = a.realFile;
293
310
  if (!this.symLinks[file]) {
@@ -337,7 +354,7 @@ class Walker {
337
354
  store: common_1.STORE_STAT,
338
355
  });
339
356
  }
340
- appendBlobOrContent(task) {
357
+ async appendBlobOrContent(task) {
341
358
  if (strictVerify) {
342
359
  (0, assert_1.default)(task.file === (0, common_1.normalizePath)(task.file));
343
360
  }
@@ -360,7 +377,7 @@ class Walker {
360
377
  return;
361
378
  }
362
379
  this.append(Object.assign(Object.assign({}, task), { file: realFile }));
363
- this.appendSymlink(task.file, realFile);
380
+ await this.appendSymlink(task.file, realFile);
364
381
  this.appendStat({
365
382
  file: task.file,
366
383
  store: common_1.STORE_STAT,
@@ -382,7 +399,7 @@ class Walker {
382
399
  script,
383
400
  ]);
384
401
  }
385
- this.appendBlobOrContent({
402
+ await this.appendBlobOrContent({
386
403
  file: (0, common_1.normalizePath)(script),
387
404
  marker,
388
405
  store: common_1.STORE_BLOB,
@@ -398,7 +415,7 @@ class Walker {
398
415
  log_1.log.debug(' Adding asset : .... ', asset);
399
416
  const stat = await promises_1.default.stat(asset);
400
417
  if (stat.isFile()) {
401
- this.appendBlobOrContent({
418
+ await this.appendBlobOrContent({
402
419
  file: (0, common_1.normalizePath)(asset),
403
420
  marker,
404
421
  store: common_1.STORE_CONTENT,
@@ -420,7 +437,7 @@ class Walker {
420
437
  // 2) non-source (non-js) files of top-level package are shipped as CONTENT
421
438
  // 3) parsing some js 'files' of non-top-level packages fails, hence all CONTENT
422
439
  if (marker.toplevel) {
423
- this.appendBlobOrContent({
440
+ await this.appendBlobOrContent({
424
441
  file,
425
442
  marker,
426
443
  store: (0, common_1.isDotJS)(file) ? common_1.STORE_BLOB : common_1.STORE_CONTENT,
@@ -428,7 +445,7 @@ class Walker {
428
445
  });
429
446
  }
430
447
  else {
431
- this.appendBlobOrContent({
448
+ await this.appendBlobOrContent({
432
449
  file,
433
450
  marker,
434
451
  store: common_1.STORE_CONTENT,
@@ -576,7 +593,7 @@ class Walker {
576
593
  ]);
577
594
  }
578
595
  if (stat && stat.isFile()) {
579
- this.appendBlobOrContent({
596
+ await this.appendBlobOrContent({
580
597
  file,
581
598
  marker,
582
599
  store: common_1.STORE_CONTENT,
@@ -662,14 +679,14 @@ class Walker {
662
679
  (0, assert_1.default)(newPackageForNewRecords.packageJson ===
663
680
  (0, common_1.normalizePath)(newPackageForNewRecords.packageJson));
664
681
  }
665
- this.appendBlobOrContent({
682
+ await this.appendBlobOrContent({
666
683
  file: newPackageForNewRecords.packageJson,
667
684
  marker: newPackageForNewRecords.marker,
668
685
  store: common_1.STORE_CONTENT,
669
686
  reason: record.file,
670
687
  });
671
688
  }
672
- this.appendBlobOrContent({
689
+ await this.appendBlobOrContent({
673
690
  file: newFile,
674
691
  marker: newPackageForNewRecords ? newPackageForNewRecords.marker : marker,
675
692
  store: common_1.STORE_BLOB,
@@ -709,7 +726,7 @@ class Walker {
709
726
  await this.stepDerivatives(record, marker, derivatives1);
710
727
  if (store === common_1.STORE_BLOB) {
711
728
  if (unlikelyJavascript(record.file) || (0, common_1.isDotNODE)(record.file)) {
712
- this.appendBlobOrContent({
729
+ await this.appendBlobOrContent({
713
730
  file: record.file,
714
731
  marker,
715
732
  store: common_1.STORE_CONTENT,
@@ -717,7 +734,7 @@ class Walker {
717
734
  return; // discard
718
735
  }
719
736
  if (marker.public || marker.hasDictionary) {
720
- this.appendBlobOrContent({
737
+ await this.appendBlobOrContent({
721
738
  file: record.file,
722
739
  marker,
723
740
  store: common_1.STORE_CONTENT,
@@ -847,14 +864,14 @@ class Walker {
847
864
  this.symLinks = {};
848
865
  await this.readDictionary(marker);
849
866
  entrypoint = (0, common_1.normalizePath)(entrypoint);
850
- this.appendBlobOrContent({
867
+ await this.appendBlobOrContent({
851
868
  file: entrypoint,
852
869
  marker,
853
870
  store: common_1.STORE_BLOB,
854
871
  });
855
872
  if (addition) {
856
873
  addition = (0, common_1.normalizePath)(addition);
857
- this.appendBlobOrContent({
874
+ await this.appendBlobOrContent({
858
875
  file: addition,
859
876
  marker,
860
877
  store: common_1.STORE_CONTENT,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@yao-pkg/pkg",
3
- "version": "5.16.1",
3
+ "version": "6.0.1",
4
4
  "description": "Package your Node.js project into an executable",
5
5
  "main": "lib-es5/index.js",
6
6
  "license": "MIT",
@@ -34,7 +34,9 @@
34
34
  "prebuild-install": "^7.1.1",
35
35
  "resolve": "^1.22.0",
36
36
  "stream-meter": "^1.0.4",
37
- "tinyglobby": "^0.2.9"
37
+ "tar": "^7.4.3",
38
+ "tinyglobby": "^0.2.9",
39
+ "unzipper": "^0.12.3"
38
40
  },
39
41
  "devDependencies": {
40
42
  "@babel/core": "^7.23.0",
@@ -46,8 +48,12 @@
46
48
  "@types/picomatch": "^3.0.1",
47
49
  "@types/resolve": "^1.20.2",
48
50
  "@types/stream-meter": "^0.0.22",
51
+ "@types/tar": "^6.1.13",
52
+ "@types/unzipper": "^0.10.10",
49
53
  "@typescript-eslint/eslint-plugin": "^6.7.4",
50
54
  "@typescript-eslint/parser": "^6.7.4",
55
+ "esbuild": "^0.24.0",
56
+ "esbuild-register": "^3.6.0",
51
57
  "eslint": "^8.50.0",
52
58
  "eslint-config-airbnb-base": "^15.0.0",
53
59
  "eslint-config-airbnb-typescript": "^17.1.0",
@@ -71,10 +77,9 @@
71
77
  "fix": "npm run lint:style -- -w && npm run lint:code -- --fix",
72
78
  "prepare": "npm run build",
73
79
  "prepublishOnly": "npm run lint",
74
- "test": "npm run build && npm run test:18 && npm run test:16 && npm run test:host",
80
+ "test": "npm run build && npm run test:host && npm run test:18 && npm run test:20",
75
81
  "test:20": "node test/test.js node20 no-npm",
76
82
  "test:18": "node test/test.js node18 no-npm",
77
- "test:16": "node test/test.js node16 no-npm",
78
83
  "test:host": "node test/test.js host only-npm",
79
84
  "release": "read -p 'GITHUB_TOKEN: ' GITHUB_TOKEN && export GITHUB_TOKEN=$GITHUB_TOKEN && release-it"
80
85
  },
@@ -137,5 +142,8 @@
137
142
  "publishConfig": {
138
143
  "access": "public"
139
144
  },
140
- "packageManager": "yarn@1.22.22"
145
+ "packageManager": "yarn@1.22.22",
146
+ "engines": {
147
+ "node": ">=18.0.0"
148
+ }
141
149
  }
@@ -22,7 +22,7 @@ const fs = require('fs');
22
22
  const { isRegExp } = require('util').types;
23
23
  const Module = require('module');
24
24
  const path = require('path');
25
- const { promisify, _extend } = require('util');
25
+ const { promisify } = require('util');
26
26
  const { Script } = require('vm');
27
27
  const { homedir } = require('os');
28
28
  const util = require('util');
@@ -2005,7 +2005,7 @@ function payloadFileSync(pointer) {
2005
2005
  args.splice(pos, 0, {});
2006
2006
  }
2007
2007
  const opts = args[pos];
2008
- if (!opts.env) opts.env = _extend({}, process.env);
2008
+ if (!opts.env) opts.env = { ...process.env };
2009
2009
  // see https://github.com/vercel/pkg/issues/897#issuecomment-1049370335
2010
2010
  if (opts.env.PKG_EXECPATH !== undefined) return;
2011
2011
  opts.env.PKG_EXECPATH = EXECPATH;