@socketsecurity/lib 5.7.0 → 5.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +48 -2
- package/README.md +190 -18
- package/dist/archives.d.ts +58 -0
- package/dist/archives.js +313 -0
- package/dist/arrays.js +2 -3
- package/dist/bin.js +100 -23
- package/dist/cache-with-ttl.js +21 -6
- package/dist/constants/agents.d.ts +0 -1
- package/dist/constants/agents.js +8 -8
- package/dist/constants/node.js +2 -1
- package/dist/cover/formatters.js +5 -3
- package/dist/dlx/detect.js +39 -13
- package/dist/dlx/package.js +10 -1
- package/dist/external/@npmcli/package-json.js +352 -824
- package/dist/external/adm-zip.js +2695 -0
- package/dist/external/debug.js +183 -7
- package/dist/external/external-pack.js +19 -1409
- package/dist/external/libnpmexec.js +2 -2
- package/dist/external/npm-pack.js +18777 -19997
- package/dist/external/pico-pack.js +29 -5
- package/dist/external/spdx-pack.js +41 -263
- package/dist/external/tar-fs.js +3053 -0
- package/dist/git.js +63 -23
- package/dist/github.js +7 -8
- package/dist/globs.js +20 -1
- package/dist/http-request.js +1 -1
- package/dist/memoization.js +22 -13
- package/dist/package-extensions.js +4 -2
- package/dist/packages/normalize.js +3 -0
- package/dist/process-lock.js +7 -5
- package/dist/releases/github.d.ts +40 -0
- package/dist/releases/github.js +122 -22
- package/dist/spawn.js +31 -6
- package/dist/spinner.js +1 -1
- package/dist/stdio/progress.js +2 -2
- package/package.json +38 -15
package/dist/archives.js
ADDED
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/* Socket Lib - Built with esbuild */
|
|
3
|
+
var __create = Object.create;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
8
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
+
var __export = (target, all) => {
|
|
10
|
+
for (var name in all)
|
|
11
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
12
|
+
};
|
|
13
|
+
var __copyProps = (to, from, except, desc) => {
|
|
14
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
15
|
+
for (let key of __getOwnPropNames(from))
|
|
16
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
17
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
18
|
+
}
|
|
19
|
+
return to;
|
|
20
|
+
};
|
|
21
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
22
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
23
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
24
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
25
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
26
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
27
|
+
mod
|
|
28
|
+
));
|
|
29
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
30
|
+
var archives_exports = {};
|
|
31
|
+
__export(archives_exports, {
|
|
32
|
+
detectArchiveFormat: () => detectArchiveFormat,
|
|
33
|
+
extractArchive: () => extractArchive,
|
|
34
|
+
extractTar: () => extractTar,
|
|
35
|
+
extractTarGz: () => extractTarGz,
|
|
36
|
+
extractZip: () => extractZip
|
|
37
|
+
});
|
|
38
|
+
module.exports = __toCommonJS(archives_exports);
|
|
39
|
+
var import_node_fs = require("node:fs");
|
|
40
|
+
var import_promises = require("node:stream/promises");
|
|
41
|
+
var import_node_zlib = require("node:zlib");
|
|
42
|
+
var import_adm_zip = __toESM(require("./external/adm-zip.js"));
|
|
43
|
+
var import_tar_fs = __toESM(require("./external/tar-fs.js"));
|
|
44
|
+
var import_fs = require("./fs.js");
|
|
45
|
+
var import_normalize = require("./paths/normalize.js");
|
|
46
|
+
let _path;
|
|
47
|
+
// @__NO_SIDE_EFFECTS__
|
|
48
|
+
function getPath() {
|
|
49
|
+
if (_path === void 0) {
|
|
50
|
+
_path = require("path");
|
|
51
|
+
}
|
|
52
|
+
return _path;
|
|
53
|
+
}
|
|
54
|
+
const DEFAULT_MAX_FILE_SIZE = 100 * 1024 * 1024;
|
|
55
|
+
const DEFAULT_MAX_TOTAL_SIZE = 1024 * 1024 * 1024;
|
|
56
|
+
function validatePathWithinBase(targetPath, baseDir, entryName) {
|
|
57
|
+
const path = /* @__PURE__ */ getPath();
|
|
58
|
+
const resolvedTarget = path.resolve(targetPath);
|
|
59
|
+
const resolvedBase = path.resolve(baseDir);
|
|
60
|
+
if (!resolvedTarget.startsWith(resolvedBase + path.sep) && resolvedTarget !== resolvedBase) {
|
|
61
|
+
throw new Error(
|
|
62
|
+
`Path traversal attempt detected: entry "${entryName}" would extract to "${resolvedTarget}" outside target directory "${resolvedBase}"`
|
|
63
|
+
);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
function detectArchiveFormat(filePath) {
|
|
67
|
+
const lower = filePath.toLowerCase();
|
|
68
|
+
if (lower.endsWith(".tar.gz")) {
|
|
69
|
+
return "tar.gz";
|
|
70
|
+
}
|
|
71
|
+
if (lower.endsWith(".tgz")) {
|
|
72
|
+
return "tgz";
|
|
73
|
+
}
|
|
74
|
+
if (lower.endsWith(".tar")) {
|
|
75
|
+
return "tar";
|
|
76
|
+
}
|
|
77
|
+
if (lower.endsWith(".zip")) {
|
|
78
|
+
return "zip";
|
|
79
|
+
}
|
|
80
|
+
return null;
|
|
81
|
+
}
|
|
82
|
+
async function extractTar(archivePath, outputDir, options = {}) {
|
|
83
|
+
const {
|
|
84
|
+
maxFileSize = DEFAULT_MAX_FILE_SIZE,
|
|
85
|
+
maxTotalSize = DEFAULT_MAX_TOTAL_SIZE,
|
|
86
|
+
strip = 0
|
|
87
|
+
} = options;
|
|
88
|
+
const normalizedOutputDir = (0, import_normalize.normalizePath)(outputDir);
|
|
89
|
+
await (0, import_fs.safeMkdir)(normalizedOutputDir);
|
|
90
|
+
let totalExtractedSize = 0;
|
|
91
|
+
let destroyScheduled = false;
|
|
92
|
+
const extractStream = import_tar_fs.default.extract(normalizedOutputDir, {
|
|
93
|
+
map: (header) => {
|
|
94
|
+
if (destroyScheduled) {
|
|
95
|
+
return header;
|
|
96
|
+
}
|
|
97
|
+
if (header.type === "symlink" || header.type === "link") {
|
|
98
|
+
destroyScheduled = true;
|
|
99
|
+
process.nextTick(() => {
|
|
100
|
+
extractStream.destroy(
|
|
101
|
+
new Error(
|
|
102
|
+
`Symlink detected in archive: ${header.name}. Symlinks are not supported for security reasons.`
|
|
103
|
+
)
|
|
104
|
+
);
|
|
105
|
+
});
|
|
106
|
+
return header;
|
|
107
|
+
}
|
|
108
|
+
if (header.size && header.size > maxFileSize) {
|
|
109
|
+
destroyScheduled = true;
|
|
110
|
+
process.nextTick(() => {
|
|
111
|
+
extractStream.destroy(
|
|
112
|
+
new Error(
|
|
113
|
+
`File size exceeds limit: ${header.name} (${header.size} bytes > ${maxFileSize} bytes)`
|
|
114
|
+
)
|
|
115
|
+
);
|
|
116
|
+
});
|
|
117
|
+
return header;
|
|
118
|
+
}
|
|
119
|
+
if (header.size) {
|
|
120
|
+
totalExtractedSize += header.size;
|
|
121
|
+
if (totalExtractedSize > maxTotalSize) {
|
|
122
|
+
destroyScheduled = true;
|
|
123
|
+
process.nextTick(() => {
|
|
124
|
+
extractStream.destroy(
|
|
125
|
+
new Error(
|
|
126
|
+
`Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`
|
|
127
|
+
)
|
|
128
|
+
);
|
|
129
|
+
});
|
|
130
|
+
return header;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
return header;
|
|
134
|
+
},
|
|
135
|
+
strip
|
|
136
|
+
});
|
|
137
|
+
extractStream.on("error", () => {
|
|
138
|
+
});
|
|
139
|
+
const readStream = (0, import_node_fs.createReadStream)(archivePath);
|
|
140
|
+
try {
|
|
141
|
+
await (0, import_promises.pipeline)(readStream, extractStream);
|
|
142
|
+
} catch (error) {
|
|
143
|
+
readStream.destroy();
|
|
144
|
+
throw error;
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
async function extractTarGz(archivePath, outputDir, options = {}) {
|
|
148
|
+
const {
|
|
149
|
+
maxFileSize = DEFAULT_MAX_FILE_SIZE,
|
|
150
|
+
maxTotalSize = DEFAULT_MAX_TOTAL_SIZE,
|
|
151
|
+
strip = 0
|
|
152
|
+
} = options;
|
|
153
|
+
const normalizedOutputDir = (0, import_normalize.normalizePath)(outputDir);
|
|
154
|
+
await (0, import_fs.safeMkdir)(normalizedOutputDir);
|
|
155
|
+
let totalExtractedSize = 0;
|
|
156
|
+
let destroyScheduled = false;
|
|
157
|
+
const extractStream = import_tar_fs.default.extract(normalizedOutputDir, {
|
|
158
|
+
map: (header) => {
|
|
159
|
+
if (destroyScheduled) {
|
|
160
|
+
return header;
|
|
161
|
+
}
|
|
162
|
+
if (header.type === "symlink" || header.type === "link") {
|
|
163
|
+
destroyScheduled = true;
|
|
164
|
+
process.nextTick(() => {
|
|
165
|
+
extractStream.destroy(
|
|
166
|
+
new Error(
|
|
167
|
+
`Symlink detected in archive: ${header.name}. Symlinks are not supported for security reasons.`
|
|
168
|
+
)
|
|
169
|
+
);
|
|
170
|
+
});
|
|
171
|
+
return header;
|
|
172
|
+
}
|
|
173
|
+
if (header.size && header.size > maxFileSize) {
|
|
174
|
+
destroyScheduled = true;
|
|
175
|
+
process.nextTick(() => {
|
|
176
|
+
extractStream.destroy(
|
|
177
|
+
new Error(
|
|
178
|
+
`File size exceeds limit: ${header.name} (${header.size} bytes > ${maxFileSize} bytes)`
|
|
179
|
+
)
|
|
180
|
+
);
|
|
181
|
+
});
|
|
182
|
+
return header;
|
|
183
|
+
}
|
|
184
|
+
if (header.size) {
|
|
185
|
+
totalExtractedSize += header.size;
|
|
186
|
+
if (totalExtractedSize > maxTotalSize) {
|
|
187
|
+
destroyScheduled = true;
|
|
188
|
+
process.nextTick(() => {
|
|
189
|
+
extractStream.destroy(
|
|
190
|
+
new Error(
|
|
191
|
+
`Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`
|
|
192
|
+
)
|
|
193
|
+
);
|
|
194
|
+
});
|
|
195
|
+
return header;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
return header;
|
|
199
|
+
},
|
|
200
|
+
strip
|
|
201
|
+
});
|
|
202
|
+
extractStream.on("error", () => {
|
|
203
|
+
});
|
|
204
|
+
const readStream = (0, import_node_fs.createReadStream)(archivePath);
|
|
205
|
+
try {
|
|
206
|
+
await (0, import_promises.pipeline)(readStream, (0, import_node_zlib.createGunzip)(), extractStream);
|
|
207
|
+
} catch (error) {
|
|
208
|
+
readStream.destroy();
|
|
209
|
+
throw error;
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
async function extractZip(archivePath, outputDir, options = {}) {
|
|
213
|
+
const {
|
|
214
|
+
maxFileSize = DEFAULT_MAX_FILE_SIZE,
|
|
215
|
+
maxTotalSize = DEFAULT_MAX_TOTAL_SIZE,
|
|
216
|
+
strip = 0
|
|
217
|
+
} = options;
|
|
218
|
+
const normalizedOutputDir = (0, import_normalize.normalizePath)(outputDir);
|
|
219
|
+
await (0, import_fs.safeMkdir)(normalizedOutputDir);
|
|
220
|
+
const zip = new import_adm_zip.default(archivePath);
|
|
221
|
+
const path = /* @__PURE__ */ getPath();
|
|
222
|
+
const entries = zip.getEntries();
|
|
223
|
+
let totalExtractedSize = 0;
|
|
224
|
+
for (const entry of entries) {
|
|
225
|
+
if (entry.isDirectory) {
|
|
226
|
+
continue;
|
|
227
|
+
}
|
|
228
|
+
const uncompressedSize = entry.header.size;
|
|
229
|
+
if (uncompressedSize > maxFileSize) {
|
|
230
|
+
throw new Error(
|
|
231
|
+
`File size exceeds limit: ${entry.entryName} (${uncompressedSize} bytes > ${maxFileSize} bytes)`
|
|
232
|
+
);
|
|
233
|
+
}
|
|
234
|
+
totalExtractedSize += uncompressedSize;
|
|
235
|
+
if (totalExtractedSize > maxTotalSize) {
|
|
236
|
+
throw new Error(
|
|
237
|
+
`Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`
|
|
238
|
+
);
|
|
239
|
+
}
|
|
240
|
+
const parts = entry.entryName.split("/");
|
|
241
|
+
if (parts.length <= strip) {
|
|
242
|
+
continue;
|
|
243
|
+
}
|
|
244
|
+
const strippedPath = parts.slice(strip).join("/");
|
|
245
|
+
const targetPath = path.join(normalizedOutputDir, strippedPath);
|
|
246
|
+
validatePathWithinBase(targetPath, normalizedOutputDir, entry.entryName);
|
|
247
|
+
}
|
|
248
|
+
if (strip === 0) {
|
|
249
|
+
for (const entry of entries) {
|
|
250
|
+
if (!entry.isDirectory) {
|
|
251
|
+
const targetPath = path.join(normalizedOutputDir, entry.entryName);
|
|
252
|
+
validatePathWithinBase(targetPath, normalizedOutputDir, entry.entryName);
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
zip.extractAllTo(normalizedOutputDir, true);
|
|
256
|
+
} else {
|
|
257
|
+
const path2 = /* @__PURE__ */ getPath();
|
|
258
|
+
const entries2 = zip.getEntries();
|
|
259
|
+
const dirsToCreate = /* @__PURE__ */ new Set();
|
|
260
|
+
for (const entry of entries2) {
|
|
261
|
+
if (entry.isDirectory) {
|
|
262
|
+
continue;
|
|
263
|
+
}
|
|
264
|
+
const parts = entry.entryName.split("/");
|
|
265
|
+
if (parts.length <= strip) {
|
|
266
|
+
continue;
|
|
267
|
+
}
|
|
268
|
+
const strippedPath = parts.slice(strip).join("/");
|
|
269
|
+
const targetPath = path2.join(normalizedOutputDir, strippedPath);
|
|
270
|
+
dirsToCreate.add(path2.dirname(targetPath));
|
|
271
|
+
}
|
|
272
|
+
await Promise.all(Array.from(dirsToCreate).map((dir) => (0, import_fs.safeMkdir)(dir)));
|
|
273
|
+
for (const entry of entries2) {
|
|
274
|
+
if (entry.isDirectory) {
|
|
275
|
+
continue;
|
|
276
|
+
}
|
|
277
|
+
const parts = entry.entryName.split("/");
|
|
278
|
+
if (parts.length <= strip) {
|
|
279
|
+
continue;
|
|
280
|
+
}
|
|
281
|
+
const strippedPath = parts.slice(strip).join("/");
|
|
282
|
+
const targetPath = path2.join(normalizedOutputDir, strippedPath);
|
|
283
|
+
zip.extractEntryTo(entry, path2.dirname(targetPath), false, true);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
async function extractArchive(archivePath, outputDir, options = {}) {
|
|
288
|
+
const format = detectArchiveFormat(archivePath);
|
|
289
|
+
if (!format) {
|
|
290
|
+
const path = /* @__PURE__ */ getPath();
|
|
291
|
+
const ext = path.extname(archivePath).toLowerCase();
|
|
292
|
+
throw new Error(
|
|
293
|
+
`Unsupported archive format${ext ? ` (extension: ${ext})` : ""}: ${archivePath}. Supported formats: .zip, .tar, .tar.gz, .tgz`
|
|
294
|
+
);
|
|
295
|
+
}
|
|
296
|
+
switch (format) {
|
|
297
|
+
case "zip":
|
|
298
|
+
return await extractZip(archivePath, outputDir, options);
|
|
299
|
+
case "tar":
|
|
300
|
+
return await extractTar(archivePath, outputDir, options);
|
|
301
|
+
case "tar.gz":
|
|
302
|
+
case "tgz":
|
|
303
|
+
return await extractTarGz(archivePath, outputDir, options);
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
307
|
+
0 && (module.exports = {
|
|
308
|
+
detectArchiveFormat,
|
|
309
|
+
extractArchive,
|
|
310
|
+
extractTar,
|
|
311
|
+
extractTarGz,
|
|
312
|
+
extractZip
|
|
313
|
+
});
|
package/dist/arrays.js
CHANGED
|
@@ -57,10 +57,9 @@ function arrayChunk(arr, size) {
|
|
|
57
57
|
throw new Error("Chunk size must be greater than 0");
|
|
58
58
|
}
|
|
59
59
|
const { length } = arr;
|
|
60
|
-
const actualChunkSize = Math.min(length, chunkSize);
|
|
61
60
|
const chunks = [];
|
|
62
|
-
for (let i = 0; i < length; i +=
|
|
63
|
-
chunks.push(arr.slice(i, i +
|
|
61
|
+
for (let i = 0; i < length; i += chunkSize) {
|
|
62
|
+
chunks.push(arr.slice(i, i + chunkSize));
|
|
64
63
|
}
|
|
65
64
|
return chunks;
|
|
66
65
|
}
|
package/dist/bin.js
CHANGED
|
@@ -50,6 +50,9 @@ var import_which = __toESM(require("./external/which"));
|
|
|
50
50
|
var import_fs = require("./fs");
|
|
51
51
|
var import_normalize = require("./paths/normalize");
|
|
52
52
|
var import_spawn = require("./spawn");
|
|
53
|
+
const binPathCache = /* @__PURE__ */ new Map();
|
|
54
|
+
const binPathAllCache = /* @__PURE__ */ new Map();
|
|
55
|
+
const voltaBinCache = /* @__PURE__ */ new Map();
|
|
53
56
|
let _fs;
|
|
54
57
|
// @__NO_SIDE_EFFECTS__
|
|
55
58
|
function getFs() {
|
|
@@ -68,7 +71,25 @@ function getPath() {
|
|
|
68
71
|
}
|
|
69
72
|
// @__NO_SIDE_EFFECTS__
|
|
70
73
|
async function execBin(binPath, args, options) {
|
|
71
|
-
|
|
74
|
+
let resolvedPath;
|
|
75
|
+
if ((0, import_normalize.isPath)(binPath)) {
|
|
76
|
+
resolvedPath = /* @__PURE__ */ resolveRealBinSync(binPath);
|
|
77
|
+
} else {
|
|
78
|
+
const cached = binPathCache.get(binPath);
|
|
79
|
+
if (cached) {
|
|
80
|
+
if ((/* @__PURE__ */ getFs()).existsSync(cached)) {
|
|
81
|
+
resolvedPath = cached;
|
|
82
|
+
} else {
|
|
83
|
+
binPathCache.delete(binPath);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
if (!resolvedPath) {
|
|
87
|
+
resolvedPath = await whichReal(binPath);
|
|
88
|
+
if (typeof resolvedPath === "string") {
|
|
89
|
+
binPathCache.set(binPath, resolvedPath);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
72
93
|
if (!resolvedPath) {
|
|
73
94
|
const error = new Error(
|
|
74
95
|
`Binary not found: ${binPath}
|
|
@@ -93,27 +114,23 @@ To resolve:
|
|
|
93
114
|
function findRealBin(binName, commonPaths = []) {
|
|
94
115
|
const fs = /* @__PURE__ */ getFs();
|
|
95
116
|
const path = /* @__PURE__ */ getPath();
|
|
96
|
-
for (const
|
|
97
|
-
if (fs.existsSync(
|
|
98
|
-
return
|
|
117
|
+
for (const binPath of commonPaths) {
|
|
118
|
+
if (fs.existsSync(binPath)) {
|
|
119
|
+
return binPath;
|
|
99
120
|
}
|
|
100
121
|
}
|
|
101
|
-
const
|
|
102
|
-
|
|
122
|
+
const allPaths = import_which.default.sync(binName, { all: true, nothrow: true }) || [];
|
|
123
|
+
const pathsArray = Array.isArray(allPaths) ? allPaths : typeof allPaths === "string" ? [allPaths] : [];
|
|
124
|
+
if (pathsArray.length === 0) {
|
|
125
|
+
return void 0;
|
|
126
|
+
}
|
|
127
|
+
for (const binPath of pathsArray) {
|
|
103
128
|
const binDir = path.dirname(binPath);
|
|
104
|
-
if (isShadowBinPath(binDir)) {
|
|
105
|
-
|
|
106
|
-
const pathsArray = Array.isArray(allPaths) ? allPaths : typeof allPaths === "string" ? [allPaths] : [];
|
|
107
|
-
for (const altPath of pathsArray) {
|
|
108
|
-
const altDir = path.dirname(altPath);
|
|
109
|
-
if (!isShadowBinPath(altDir)) {
|
|
110
|
-
return altPath;
|
|
111
|
-
}
|
|
112
|
-
}
|
|
129
|
+
if (!isShadowBinPath(binDir)) {
|
|
130
|
+
return binPath;
|
|
113
131
|
}
|
|
114
|
-
return binPath;
|
|
115
132
|
}
|
|
116
|
-
return
|
|
133
|
+
return pathsArray[0];
|
|
117
134
|
}
|
|
118
135
|
function findRealNpm() {
|
|
119
136
|
const fs = /* @__PURE__ */ getFs();
|
|
@@ -199,6 +216,14 @@ function resolveRealBinSync(binPath) {
|
|
|
199
216
|
const voltaIndex = basename === "node" ? -1 : /(?<=\/)\.volta\//i.exec(binPath)?.index ?? -1;
|
|
200
217
|
if (voltaIndex !== -1) {
|
|
201
218
|
const voltaPath = binPath.slice(0, voltaIndex);
|
|
219
|
+
const voltaCacheKey = `${voltaPath}:${basename}`;
|
|
220
|
+
const cachedVolta = voltaBinCache.get(voltaCacheKey);
|
|
221
|
+
if (cachedVolta) {
|
|
222
|
+
if (fs.existsSync(cachedVolta)) {
|
|
223
|
+
return cachedVolta;
|
|
224
|
+
}
|
|
225
|
+
voltaBinCache.delete(voltaCacheKey);
|
|
226
|
+
}
|
|
202
227
|
const voltaToolsPath = path.join(voltaPath, "tools");
|
|
203
228
|
const voltaImagePath = path.join(voltaToolsPath, "image");
|
|
204
229
|
const voltaUserPath = path.join(voltaToolsPath, "user");
|
|
@@ -247,11 +272,13 @@ function resolveRealBinSync(binPath) {
|
|
|
247
272
|
}
|
|
248
273
|
}
|
|
249
274
|
if (voltaBinPath) {
|
|
275
|
+
let resolvedVoltaPath = voltaBinPath;
|
|
250
276
|
try {
|
|
251
|
-
|
|
277
|
+
resolvedVoltaPath = (0, import_normalize.normalizePath)(fs.realpathSync.native(voltaBinPath));
|
|
252
278
|
} catch {
|
|
253
279
|
}
|
|
254
|
-
|
|
280
|
+
voltaBinCache.set(voltaCacheKey, resolvedVoltaPath);
|
|
281
|
+
return resolvedVoltaPath;
|
|
255
282
|
}
|
|
256
283
|
}
|
|
257
284
|
if (import_platform.WIN32) {
|
|
@@ -392,28 +419,78 @@ async function which(binName, options) {
|
|
|
392
419
|
}
|
|
393
420
|
}
|
|
394
421
|
async function whichReal(binName, options) {
|
|
422
|
+
const fs = /* @__PURE__ */ getFs();
|
|
395
423
|
const opts = { nothrow: true, ...options };
|
|
424
|
+
if (opts.all) {
|
|
425
|
+
const cachedAll = binPathAllCache.get(binName);
|
|
426
|
+
if (cachedAll && cachedAll.length > 0) {
|
|
427
|
+
if (fs.existsSync(cachedAll[0])) {
|
|
428
|
+
return cachedAll;
|
|
429
|
+
}
|
|
430
|
+
binPathAllCache.delete(binName);
|
|
431
|
+
}
|
|
432
|
+
} else {
|
|
433
|
+
const cached = binPathCache.get(binName);
|
|
434
|
+
if (cached) {
|
|
435
|
+
if (fs.existsSync(cached)) {
|
|
436
|
+
return cached;
|
|
437
|
+
}
|
|
438
|
+
binPathCache.delete(binName);
|
|
439
|
+
}
|
|
440
|
+
}
|
|
396
441
|
const result = await (0, import_which.default)(binName, opts);
|
|
397
442
|
if (opts?.all) {
|
|
398
443
|
const paths = Array.isArray(result) ? result : typeof result === "string" ? [result] : void 0;
|
|
399
|
-
|
|
444
|
+
if (paths?.length) {
|
|
445
|
+
const resolved2 = paths.map((p) => /* @__PURE__ */ resolveRealBinSync(p));
|
|
446
|
+
binPathAllCache.set(binName, resolved2);
|
|
447
|
+
return resolved2;
|
|
448
|
+
}
|
|
449
|
+
return paths;
|
|
400
450
|
}
|
|
401
451
|
if (!result) {
|
|
402
452
|
return void 0;
|
|
403
453
|
}
|
|
404
|
-
|
|
454
|
+
const resolved = /* @__PURE__ */ resolveRealBinSync(result);
|
|
455
|
+
binPathCache.set(binName, resolved);
|
|
456
|
+
return resolved;
|
|
405
457
|
}
|
|
406
458
|
function whichRealSync(binName, options) {
|
|
459
|
+
const fs = /* @__PURE__ */ getFs();
|
|
407
460
|
const opts = { nothrow: true, ...options };
|
|
461
|
+
if (opts.all) {
|
|
462
|
+
const cachedAll = binPathAllCache.get(binName);
|
|
463
|
+
if (cachedAll && cachedAll.length > 0) {
|
|
464
|
+
if (fs.existsSync(cachedAll[0])) {
|
|
465
|
+
return cachedAll;
|
|
466
|
+
}
|
|
467
|
+
binPathAllCache.delete(binName);
|
|
468
|
+
}
|
|
469
|
+
} else {
|
|
470
|
+
const cached = binPathCache.get(binName);
|
|
471
|
+
if (cached) {
|
|
472
|
+
if (fs.existsSync(cached)) {
|
|
473
|
+
return cached;
|
|
474
|
+
}
|
|
475
|
+
binPathCache.delete(binName);
|
|
476
|
+
}
|
|
477
|
+
}
|
|
408
478
|
const result = whichSync(binName, opts);
|
|
409
479
|
if (opts.all) {
|
|
410
480
|
const paths = Array.isArray(result) ? result : typeof result === "string" ? [result] : void 0;
|
|
411
|
-
|
|
481
|
+
if (paths?.length) {
|
|
482
|
+
const resolved2 = paths.map((p) => /* @__PURE__ */ resolveRealBinSync(p));
|
|
483
|
+
binPathAllCache.set(binName, resolved2);
|
|
484
|
+
return resolved2;
|
|
485
|
+
}
|
|
486
|
+
return paths;
|
|
412
487
|
}
|
|
413
488
|
if (!result) {
|
|
414
489
|
return void 0;
|
|
415
490
|
}
|
|
416
|
-
|
|
491
|
+
const resolved = /* @__PURE__ */ resolveRealBinSync(result);
|
|
492
|
+
binPathCache.set(binName, resolved);
|
|
493
|
+
return resolved;
|
|
417
494
|
}
|
|
418
495
|
function whichSync(binName, options) {
|
|
419
496
|
if ((0, import_normalize.isPath)(binName)) {
|
package/dist/cache-with-ttl.js
CHANGED
|
@@ -55,7 +55,8 @@ function createTtlCache(options) {
|
|
|
55
55
|
}
|
|
56
56
|
function isExpired(entry) {
|
|
57
57
|
const now = Date.now();
|
|
58
|
-
|
|
58
|
+
const maxFutureMs = 1e4;
|
|
59
|
+
if (entry.expiresAt > now + ttl + maxFutureMs) {
|
|
59
60
|
return true;
|
|
60
61
|
}
|
|
61
62
|
return now > entry.expiresAt;
|
|
@@ -114,7 +115,7 @@ function createTtlCache(options) {
|
|
|
114
115
|
memoCache.delete(key);
|
|
115
116
|
continue;
|
|
116
117
|
}
|
|
117
|
-
const originalKey = key.slice(
|
|
118
|
+
const originalKey = opts.prefix ? key.slice(opts.prefix.length + 1) : key;
|
|
118
119
|
results.set(originalKey, entry.data);
|
|
119
120
|
}
|
|
120
121
|
}
|
|
@@ -128,7 +129,7 @@ function createTtlCache(options) {
|
|
|
128
129
|
if (!matches(cacheEntry.key)) {
|
|
129
130
|
continue;
|
|
130
131
|
}
|
|
131
|
-
const originalKey = cacheEntry.key.slice(
|
|
132
|
+
const originalKey = opts.prefix ? cacheEntry.key.slice(opts.prefix.length + 1) : cacheEntry.key;
|
|
132
133
|
if (results.has(originalKey)) {
|
|
133
134
|
continue;
|
|
134
135
|
}
|
|
@@ -174,14 +175,28 @@ function createTtlCache(options) {
|
|
|
174
175
|
} catch {
|
|
175
176
|
}
|
|
176
177
|
}
|
|
178
|
+
const inflightRequests = /* @__PURE__ */ new Map();
|
|
177
179
|
async function getOrFetch(key, fetcher) {
|
|
178
180
|
const cached = await get(key);
|
|
179
181
|
if (cached !== void 0) {
|
|
180
182
|
return cached;
|
|
181
183
|
}
|
|
182
|
-
const
|
|
183
|
-
|
|
184
|
-
|
|
184
|
+
const fullKey = buildKey(key);
|
|
185
|
+
const existing = inflightRequests.get(fullKey);
|
|
186
|
+
if (existing) {
|
|
187
|
+
return await existing;
|
|
188
|
+
}
|
|
189
|
+
const promise = (async () => {
|
|
190
|
+
try {
|
|
191
|
+
const data = await fetcher();
|
|
192
|
+
await set(key, data);
|
|
193
|
+
return data;
|
|
194
|
+
} finally {
|
|
195
|
+
inflightRequests.delete(fullKey);
|
|
196
|
+
}
|
|
197
|
+
})();
|
|
198
|
+
inflightRequests.set(fullKey, promise);
|
|
199
|
+
return await promise;
|
|
185
200
|
}
|
|
186
201
|
async function deleteEntry(key) {
|
|
187
202
|
if (key.includes("*")) {
|
|
@@ -5,7 +5,6 @@ export declare const YARN = "yarn";
|
|
|
5
5
|
export declare const BUN = "bun";
|
|
6
6
|
export declare const VLT = "vlt";
|
|
7
7
|
export declare const NPX = "npx";
|
|
8
|
-
// NPM binary path - resolved at runtime using which.
|
|
9
8
|
export declare const NPM_BIN_PATH: string;
|
|
10
9
|
// NPM CLI entry point - resolved at runtime from npm bin location.
|
|
11
10
|
// NOTE: This is kept for backward compatibility but NPM_BIN_PATH should be used instead
|
package/dist/constants/agents.js
CHANGED
|
@@ -61,22 +61,22 @@ const YARN = "yarn";
|
|
|
61
61
|
const BUN = "bun";
|
|
62
62
|
const VLT = "vlt";
|
|
63
63
|
const NPX = "npx";
|
|
64
|
-
const
|
|
64
|
+
const _npmBinPath = /* @__PURE__ */ (() => {
|
|
65
65
|
try {
|
|
66
|
-
return import_which.default.sync("npm", { nothrow: true }) ||
|
|
66
|
+
return import_which.default.sync("npm", { nothrow: true }) || null;
|
|
67
67
|
} catch {
|
|
68
|
-
return
|
|
68
|
+
return null;
|
|
69
69
|
}
|
|
70
70
|
})();
|
|
71
|
+
const NPM_BIN_PATH = _npmBinPath || "npm";
|
|
71
72
|
const NPM_REAL_EXEC_PATH = /* @__PURE__ */ (() => {
|
|
72
73
|
try {
|
|
73
|
-
|
|
74
|
-
const path = require("path");
|
|
75
|
-
const npmBin = import_which.default.sync("npm", { nothrow: true });
|
|
76
|
-
if (!npmBin) {
|
|
74
|
+
if (!_npmBinPath) {
|
|
77
75
|
return void 0;
|
|
78
76
|
}
|
|
79
|
-
const
|
|
77
|
+
const { existsSync } = require("fs");
|
|
78
|
+
const path = require("path");
|
|
79
|
+
const npmDir = path.dirname(_npmBinPath);
|
|
80
80
|
const nodeModulesPath = path.join(
|
|
81
81
|
npmDir,
|
|
82
82
|
"..",
|
package/dist/constants/node.js
CHANGED
|
@@ -47,7 +47,8 @@ function getNodeVersion() {
|
|
|
47
47
|
return NODE_VERSION;
|
|
48
48
|
}
|
|
49
49
|
function getNodeMajorVersion() {
|
|
50
|
-
|
|
50
|
+
const major = NODE_VERSION.slice(1).split(".")[0] ?? "0";
|
|
51
|
+
return Number.parseInt(major, 10) || 0;
|
|
51
52
|
}
|
|
52
53
|
function getNodeMinorVersion() {
|
|
53
54
|
return Number.parseInt(NODE_VERSION.split(".")[1] ?? "0", 10);
|
package/dist/cover/formatters.js
CHANGED
|
@@ -77,7 +77,8 @@ function formatCoverage(options) {
|
|
|
77
77
|
{ count: 2 }
|
|
78
78
|
);
|
|
79
79
|
}
|
|
80
|
-
const
|
|
80
|
+
const overallValue = Number.parseFloat(overall);
|
|
81
|
+
const emoji = getCoverageEmoji(Number.isNaN(overallValue) ? 0 : overallValue);
|
|
81
82
|
output += `
|
|
82
83
|
Overall: ${overall}%${emoji}
|
|
83
84
|
`;
|
|
@@ -89,9 +90,10 @@ function calculateOverall(code, type) {
|
|
|
89
90
|
Number.parseFloat(code.branches.percent),
|
|
90
91
|
Number.parseFloat(code.functions.percent),
|
|
91
92
|
Number.parseFloat(code.lines.percent)
|
|
92
|
-
];
|
|
93
|
+
].map((val) => Number.isNaN(val) ? 0 : val);
|
|
93
94
|
if (type) {
|
|
94
|
-
|
|
95
|
+
const typePercent = Number.parseFloat(type.percent);
|
|
96
|
+
metrics.push(Number.isNaN(typePercent) ? 0 : typePercent);
|
|
95
97
|
}
|
|
96
98
|
const average = metrics.reduce((sum, val) => sum + val, 0) / metrics.length;
|
|
97
99
|
return average.toFixed(2);
|