@socketsecurity/lib 5.6.0 → 5.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/CHANGELOG.md +92 -2
  2. package/README.md +190 -18
  3. package/dist/archives.d.ts +58 -0
  4. package/dist/archives.js +313 -0
  5. package/dist/arrays.js +2 -3
  6. package/dist/cache-with-ttl.js +25 -6
  7. package/dist/constants/node.js +2 -1
  8. package/dist/cover/formatters.js +5 -3
  9. package/dist/dlx/binary.d.ts +20 -0
  10. package/dist/dlx/binary.js +115 -99
  11. package/dist/dlx/detect.d.ts +8 -8
  12. package/dist/dlx/detect.js +18 -18
  13. package/dist/dlx/manifest.d.ts +32 -31
  14. package/dist/dlx/manifest.js +114 -112
  15. package/dist/dlx/package.d.ts +55 -0
  16. package/dist/dlx/package.js +90 -80
  17. package/dist/env/ci.js +1 -2
  18. package/dist/env/rewire.d.ts +33 -22
  19. package/dist/env/rewire.js +20 -7
  20. package/dist/env/socket-cli.d.ts +24 -24
  21. package/dist/env/socket-cli.js +12 -12
  22. package/dist/env/temp-dir.d.ts +6 -6
  23. package/dist/env/temp-dir.js +4 -4
  24. package/dist/env/windows.d.ts +6 -6
  25. package/dist/env/windows.js +4 -4
  26. package/dist/external/@npmcli/package-json.js +352 -824
  27. package/dist/external/adm-zip.js +2695 -0
  28. package/dist/external/debug.js +183 -7
  29. package/dist/external/external-pack.js +19 -1409
  30. package/dist/external/libnpmexec.js +2 -2
  31. package/dist/external/npm-pack.js +18777 -19997
  32. package/dist/external/pico-pack.js +29 -5
  33. package/dist/external/spdx-pack.js +41 -263
  34. package/dist/external/tar-fs.js +3053 -0
  35. package/dist/git.js +22 -4
  36. package/dist/github.js +17 -9
  37. package/dist/globs.js +20 -1
  38. package/dist/http-request.js +1 -1
  39. package/dist/memoization.js +22 -13
  40. package/dist/package-extensions.js +4 -2
  41. package/dist/packages/normalize.js +3 -0
  42. package/dist/packages/specs.js +1 -1
  43. package/dist/process-lock.js +4 -2
  44. package/dist/releases/github.d.ts +55 -4
  45. package/dist/releases/github.js +203 -101
  46. package/dist/spawn.js +1 -1
  47. package/dist/spinner.js +1 -1
  48. package/dist/stdio/progress.js +2 -2
  49. package/package.json +38 -15
@@ -0,0 +1,313 @@
1
+ "use strict";
2
+ /* Socket Lib - Built with esbuild */
3
+ var __create = Object.create;
4
+ var __defProp = Object.defineProperty;
5
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
6
+ var __getOwnPropNames = Object.getOwnPropertyNames;
7
+ var __getProtoOf = Object.getPrototypeOf;
8
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
9
+ var __export = (target, all) => {
10
+ for (var name in all)
11
+ __defProp(target, name, { get: all[name], enumerable: true });
12
+ };
13
+ var __copyProps = (to, from, except, desc) => {
14
+ if (from && typeof from === "object" || typeof from === "function") {
15
+ for (let key of __getOwnPropNames(from))
16
+ if (!__hasOwnProp.call(to, key) && key !== except)
17
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
18
+ }
19
+ return to;
20
+ };
21
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
22
+ // If the importer is in node compatibility mode or this is not an ESM
23
+ // file that has been converted to a CommonJS file using a Babel-
24
+ // compatible transform (i.e. "__esModule" has not been set), then set
25
+ // "default" to the CommonJS "module.exports" for node compatibility.
26
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
27
+ mod
28
+ ));
29
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
30
+ var archives_exports = {};
31
+ __export(archives_exports, {
32
+ detectArchiveFormat: () => detectArchiveFormat,
33
+ extractArchive: () => extractArchive,
34
+ extractTar: () => extractTar,
35
+ extractTarGz: () => extractTarGz,
36
+ extractZip: () => extractZip
37
+ });
38
+ module.exports = __toCommonJS(archives_exports);
39
+ var import_node_fs = require("node:fs");
40
+ var import_promises = require("node:stream/promises");
41
+ var import_node_zlib = require("node:zlib");
42
+ var import_adm_zip = __toESM(require("./external/adm-zip.js"));
43
+ var import_tar_fs = __toESM(require("./external/tar-fs.js"));
44
+ var import_fs = require("./fs.js");
45
+ var import_normalize = require("./paths/normalize.js");
46
+ let _path;
47
+ // @__NO_SIDE_EFFECTS__
48
+ function getPath() {
49
+ if (_path === void 0) {
50
+ _path = require("path");
51
+ }
52
+ return _path;
53
+ }
54
+ const DEFAULT_MAX_FILE_SIZE = 100 * 1024 * 1024;
55
+ const DEFAULT_MAX_TOTAL_SIZE = 1024 * 1024 * 1024;
56
+ function validatePathWithinBase(targetPath, baseDir, entryName) {
57
+ const path = /* @__PURE__ */ getPath();
58
+ const resolvedTarget = path.resolve(targetPath);
59
+ const resolvedBase = path.resolve(baseDir);
60
+ if (!resolvedTarget.startsWith(resolvedBase + path.sep) && resolvedTarget !== resolvedBase) {
61
+ throw new Error(
62
+ `Path traversal attempt detected: entry "${entryName}" would extract to "${resolvedTarget}" outside target directory "${resolvedBase}"`
63
+ );
64
+ }
65
+ }
66
+ function detectArchiveFormat(filePath) {
67
+ const lower = filePath.toLowerCase();
68
+ if (lower.endsWith(".tar.gz")) {
69
+ return "tar.gz";
70
+ }
71
+ if (lower.endsWith(".tgz")) {
72
+ return "tgz";
73
+ }
74
+ if (lower.endsWith(".tar")) {
75
+ return "tar";
76
+ }
77
+ if (lower.endsWith(".zip")) {
78
+ return "zip";
79
+ }
80
+ return null;
81
+ }
82
+ async function extractTar(archivePath, outputDir, options = {}) {
83
+ const {
84
+ maxFileSize = DEFAULT_MAX_FILE_SIZE,
85
+ maxTotalSize = DEFAULT_MAX_TOTAL_SIZE,
86
+ strip = 0
87
+ } = options;
88
+ const normalizedOutputDir = (0, import_normalize.normalizePath)(outputDir);
89
+ await (0, import_fs.safeMkdir)(normalizedOutputDir);
90
+ let totalExtractedSize = 0;
91
+ let destroyScheduled = false;
92
+ const extractStream = import_tar_fs.default.extract(normalizedOutputDir, {
93
+ map: (header) => {
94
+ if (destroyScheduled) {
95
+ return header;
96
+ }
97
+ if (header.type === "symlink" || header.type === "link") {
98
+ destroyScheduled = true;
99
+ process.nextTick(() => {
100
+ extractStream.destroy(
101
+ new Error(
102
+ `Symlink detected in archive: ${header.name}. Symlinks are not supported for security reasons.`
103
+ )
104
+ );
105
+ });
106
+ return header;
107
+ }
108
+ if (header.size && header.size > maxFileSize) {
109
+ destroyScheduled = true;
110
+ process.nextTick(() => {
111
+ extractStream.destroy(
112
+ new Error(
113
+ `File size exceeds limit: ${header.name} (${header.size} bytes > ${maxFileSize} bytes)`
114
+ )
115
+ );
116
+ });
117
+ return header;
118
+ }
119
+ if (header.size) {
120
+ totalExtractedSize += header.size;
121
+ if (totalExtractedSize > maxTotalSize) {
122
+ destroyScheduled = true;
123
+ process.nextTick(() => {
124
+ extractStream.destroy(
125
+ new Error(
126
+ `Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`
127
+ )
128
+ );
129
+ });
130
+ return header;
131
+ }
132
+ }
133
+ return header;
134
+ },
135
+ strip
136
+ });
137
+ extractStream.on("error", () => {
138
+ });
139
+ const readStream = (0, import_node_fs.createReadStream)(archivePath);
140
+ try {
141
+ await (0, import_promises.pipeline)(readStream, extractStream);
142
+ } catch (error) {
143
+ readStream.destroy();
144
+ throw error;
145
+ }
146
+ }
147
+ async function extractTarGz(archivePath, outputDir, options = {}) {
148
+ const {
149
+ maxFileSize = DEFAULT_MAX_FILE_SIZE,
150
+ maxTotalSize = DEFAULT_MAX_TOTAL_SIZE,
151
+ strip = 0
152
+ } = options;
153
+ const normalizedOutputDir = (0, import_normalize.normalizePath)(outputDir);
154
+ await (0, import_fs.safeMkdir)(normalizedOutputDir);
155
+ let totalExtractedSize = 0;
156
+ let destroyScheduled = false;
157
+ const extractStream = import_tar_fs.default.extract(normalizedOutputDir, {
158
+ map: (header) => {
159
+ if (destroyScheduled) {
160
+ return header;
161
+ }
162
+ if (header.type === "symlink" || header.type === "link") {
163
+ destroyScheduled = true;
164
+ process.nextTick(() => {
165
+ extractStream.destroy(
166
+ new Error(
167
+ `Symlink detected in archive: ${header.name}. Symlinks are not supported for security reasons.`
168
+ )
169
+ );
170
+ });
171
+ return header;
172
+ }
173
+ if (header.size && header.size > maxFileSize) {
174
+ destroyScheduled = true;
175
+ process.nextTick(() => {
176
+ extractStream.destroy(
177
+ new Error(
178
+ `File size exceeds limit: ${header.name} (${header.size} bytes > ${maxFileSize} bytes)`
179
+ )
180
+ );
181
+ });
182
+ return header;
183
+ }
184
+ if (header.size) {
185
+ totalExtractedSize += header.size;
186
+ if (totalExtractedSize > maxTotalSize) {
187
+ destroyScheduled = true;
188
+ process.nextTick(() => {
189
+ extractStream.destroy(
190
+ new Error(
191
+ `Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`
192
+ )
193
+ );
194
+ });
195
+ return header;
196
+ }
197
+ }
198
+ return header;
199
+ },
200
+ strip
201
+ });
202
+ extractStream.on("error", () => {
203
+ });
204
+ const readStream = (0, import_node_fs.createReadStream)(archivePath);
205
+ try {
206
+ await (0, import_promises.pipeline)(readStream, (0, import_node_zlib.createGunzip)(), extractStream);
207
+ } catch (error) {
208
+ readStream.destroy();
209
+ throw error;
210
+ }
211
+ }
212
+ async function extractZip(archivePath, outputDir, options = {}) {
213
+ const {
214
+ maxFileSize = DEFAULT_MAX_FILE_SIZE,
215
+ maxTotalSize = DEFAULT_MAX_TOTAL_SIZE,
216
+ strip = 0
217
+ } = options;
218
+ const normalizedOutputDir = (0, import_normalize.normalizePath)(outputDir);
219
+ await (0, import_fs.safeMkdir)(normalizedOutputDir);
220
+ const zip = new import_adm_zip.default(archivePath);
221
+ const path = /* @__PURE__ */ getPath();
222
+ const entries = zip.getEntries();
223
+ let totalExtractedSize = 0;
224
+ for (const entry of entries) {
225
+ if (entry.isDirectory) {
226
+ continue;
227
+ }
228
+ const uncompressedSize = entry.header.size;
229
+ if (uncompressedSize > maxFileSize) {
230
+ throw new Error(
231
+ `File size exceeds limit: ${entry.entryName} (${uncompressedSize} bytes > ${maxFileSize} bytes)`
232
+ );
233
+ }
234
+ totalExtractedSize += uncompressedSize;
235
+ if (totalExtractedSize > maxTotalSize) {
236
+ throw new Error(
237
+ `Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`
238
+ );
239
+ }
240
+ const parts = entry.entryName.split("/");
241
+ if (parts.length <= strip) {
242
+ continue;
243
+ }
244
+ const strippedPath = parts.slice(strip).join("/");
245
+ const targetPath = path.join(normalizedOutputDir, strippedPath);
246
+ validatePathWithinBase(targetPath, normalizedOutputDir, entry.entryName);
247
+ }
248
+ if (strip === 0) {
249
+ for (const entry of entries) {
250
+ if (!entry.isDirectory) {
251
+ const targetPath = path.join(normalizedOutputDir, entry.entryName);
252
+ validatePathWithinBase(targetPath, normalizedOutputDir, entry.entryName);
253
+ }
254
+ }
255
+ zip.extractAllTo(normalizedOutputDir, true);
256
+ } else {
257
+ const path2 = /* @__PURE__ */ getPath();
258
+ const entries2 = zip.getEntries();
259
+ const dirsToCreate = /* @__PURE__ */ new Set();
260
+ for (const entry of entries2) {
261
+ if (entry.isDirectory) {
262
+ continue;
263
+ }
264
+ const parts = entry.entryName.split("/");
265
+ if (parts.length <= strip) {
266
+ continue;
267
+ }
268
+ const strippedPath = parts.slice(strip).join("/");
269
+ const targetPath = path2.join(normalizedOutputDir, strippedPath);
270
+ dirsToCreate.add(path2.dirname(targetPath));
271
+ }
272
+ await Promise.all(Array.from(dirsToCreate).map((dir) => (0, import_fs.safeMkdir)(dir)));
273
+ for (const entry of entries2) {
274
+ if (entry.isDirectory) {
275
+ continue;
276
+ }
277
+ const parts = entry.entryName.split("/");
278
+ if (parts.length <= strip) {
279
+ continue;
280
+ }
281
+ const strippedPath = parts.slice(strip).join("/");
282
+ const targetPath = path2.join(normalizedOutputDir, strippedPath);
283
+ zip.extractEntryTo(entry, path2.dirname(targetPath), false, true);
284
+ }
285
+ }
286
+ }
287
+ async function extractArchive(archivePath, outputDir, options = {}) {
288
+ const format = detectArchiveFormat(archivePath);
289
+ if (!format) {
290
+ const path = /* @__PURE__ */ getPath();
291
+ const ext = path.extname(archivePath).toLowerCase();
292
+ throw new Error(
293
+ `Unsupported archive format${ext ? ` (extension: ${ext})` : ""}: ${archivePath}. Supported formats: .zip, .tar, .tar.gz, .tgz`
294
+ );
295
+ }
296
+ switch (format) {
297
+ case "zip":
298
+ return await extractZip(archivePath, outputDir, options);
299
+ case "tar":
300
+ return await extractTar(archivePath, outputDir, options);
301
+ case "tar.gz":
302
+ case "tgz":
303
+ return await extractTarGz(archivePath, outputDir, options);
304
+ }
305
+ }
306
+ // Annotate the CommonJS export names for ESM import in node:
307
+ 0 && (module.exports = {
308
+ detectArchiveFormat,
309
+ extractArchive,
310
+ extractTar,
311
+ extractTarGz,
312
+ extractZip
313
+ });
package/dist/arrays.js CHANGED
@@ -57,10 +57,9 @@ function arrayChunk(arr, size) {
57
57
  throw new Error("Chunk size must be greater than 0");
58
58
  }
59
59
  const { length } = arr;
60
- const actualChunkSize = Math.min(length, chunkSize);
61
60
  const chunks = [];
62
- for (let i = 0; i < length; i += actualChunkSize) {
63
- chunks.push(arr.slice(i, i + actualChunkSize));
61
+ for (let i = 0; i < length; i += chunkSize) {
62
+ chunks.push(arr.slice(i, i + chunkSize));
64
63
  }
65
64
  return chunks;
66
65
  }
@@ -54,7 +54,12 @@ function createTtlCache(options) {
54
54
  return `${opts.prefix}:${key}`;
55
55
  }
56
56
  function isExpired(entry) {
57
- return Date.now() > entry.expiresAt;
57
+ const now = Date.now();
58
+ const maxFutureMs = 1e4;
59
+ if (entry.expiresAt > now + ttl + maxFutureMs) {
60
+ return true;
61
+ }
62
+ return now > entry.expiresAt;
58
63
  }
59
64
  function createMatcher(pattern) {
60
65
  const fullPattern = buildKey(pattern);
@@ -110,7 +115,7 @@ function createTtlCache(options) {
110
115
  memoCache.delete(key);
111
116
  continue;
112
117
  }
113
- const originalKey = key.slice((opts.prefix?.length ?? 0) + 1);
118
+ const originalKey = opts.prefix ? key.slice(opts.prefix.length + 1) : key;
114
119
  results.set(originalKey, entry.data);
115
120
  }
116
121
  }
@@ -124,7 +129,7 @@ function createTtlCache(options) {
124
129
  if (!matches(cacheEntry.key)) {
125
130
  continue;
126
131
  }
127
- const originalKey = cacheEntry.key.slice((opts.prefix?.length ?? 0) + 1);
132
+ const originalKey = opts.prefix ? cacheEntry.key.slice(opts.prefix.length + 1) : cacheEntry.key;
128
133
  if (results.has(originalKey)) {
129
134
  continue;
130
135
  }
@@ -170,14 +175,28 @@ function createTtlCache(options) {
170
175
  } catch {
171
176
  }
172
177
  }
178
+ const inflightRequests = /* @__PURE__ */ new Map();
173
179
  async function getOrFetch(key, fetcher) {
174
180
  const cached = await get(key);
175
181
  if (cached !== void 0) {
176
182
  return cached;
177
183
  }
178
- const data = await fetcher();
179
- await set(key, data);
180
- return data;
184
+ const fullKey = buildKey(key);
185
+ const existing = inflightRequests.get(fullKey);
186
+ if (existing) {
187
+ return await existing;
188
+ }
189
+ const promise = (async () => {
190
+ try {
191
+ const data = await fetcher();
192
+ await set(key, data);
193
+ return data;
194
+ } finally {
195
+ inflightRequests.delete(fullKey);
196
+ }
197
+ })();
198
+ inflightRequests.set(fullKey, promise);
199
+ return await promise;
181
200
  }
182
201
  async function deleteEntry(key) {
183
202
  if (key.includes("*")) {
@@ -47,7 +47,8 @@ function getNodeVersion() {
47
47
  return NODE_VERSION;
48
48
  }
49
49
  function getNodeMajorVersion() {
50
- return Number.parseInt(NODE_VERSION.slice(1).split(".")[0] ?? "0", 10);
50
+ const major = NODE_VERSION.slice(1).split(".")[0] ?? "0";
51
+ return Number.parseInt(major, 10) || 0;
51
52
  }
52
53
  function getNodeMinorVersion() {
53
54
  return Number.parseInt(NODE_VERSION.split(".")[1] ?? "0", 10);
@@ -77,7 +77,8 @@ function formatCoverage(options) {
77
77
  { count: 2 }
78
78
  );
79
79
  }
80
- const emoji = getCoverageEmoji(Number.parseFloat(overall));
80
+ const overallValue = Number.parseFloat(overall);
81
+ const emoji = getCoverageEmoji(Number.isNaN(overallValue) ? 0 : overallValue);
81
82
  output += `
82
83
  Overall: ${overall}%${emoji}
83
84
  `;
@@ -89,9 +90,10 @@ function calculateOverall(code, type) {
89
90
  Number.parseFloat(code.branches.percent),
90
91
  Number.parseFloat(code.functions.percent),
91
92
  Number.parseFloat(code.lines.percent)
92
- ];
93
+ ].map((val) => Number.isNaN(val) ? 0 : val);
93
94
  if (type) {
94
- metrics.push(Number.parseFloat(type.percent));
95
+ const typePercent = Number.parseFloat(type.percent);
96
+ metrics.push(Number.isNaN(typePercent) ? 0 : typePercent);
95
97
  }
96
98
  const average = metrics.reduce((sum, val) => sum + val, 0) / metrics.length;
97
99
  return average.toFixed(2);
@@ -123,6 +123,12 @@ export declare function downloadBinary(options: Omit<DlxBinaryOptions, 'spawnOpt
123
123
  binaryPath: string;
124
124
  downloaded: boolean;
125
125
  }>;
126
+ /**
127
+ * Download a file from a URL with integrity checking and concurrent download protection.
128
+ * Uses processLock to prevent multiple processes from downloading the same binary simultaneously.
129
+ * Internal helper function for downloading binary files.
130
+ */
131
+ export declare function downloadBinaryFile(url: string, destPath: string, integrity?: string | undefined): Promise<string>;
126
132
  /**
127
133
  * Execute a cached binary without re-downloading.
128
134
  * Similar to executePackage from dlx-package.
@@ -141,6 +147,14 @@ export declare function executeBinary(binaryPath: string, args: readonly string[
141
147
  * Uses same directory as dlx-package for unified DLX storage.
142
148
  */
143
149
  export declare function getDlxCachePath(): string;
150
+ /**
151
+ * Get metadata file path for a cached binary.
152
+ */
153
+ export declare function getBinaryCacheMetadataPath(cacheEntryPath: string): string;
154
+ /**
155
+ * Check if a cached binary is still valid.
156
+ */
157
+ export declare function isBinaryCacheValid(cacheEntryPath: string, cacheTtl: number): Promise<boolean>;
144
158
  /**
145
159
  * Get information about cached binaries.
146
160
  */
@@ -151,3 +165,9 @@ export declare function listDlxCache(): Promise<Array<{
151
165
  size: number;
152
166
  url: string;
153
167
  }>>;
168
+ /**
169
+ * Write metadata for a cached binary.
170
+ * Uses unified schema shared with C++ decompressor and CLI dlxBinary.
171
+ * Schema documentation: See DlxMetadata interface in this file (exported).
172
+ */
173
+ export declare function writeBinaryCacheMetadata(cacheEntryPath: string, cacheKey: string, url: string, integrity: string, size: number): Promise<void>;