@socketsecurity/lib 3.1.3 → 3.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -5,6 +5,37 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [3.2.1](https://github.com/SocketDev/socket-lib/releases/tag/v3.2.1) - 2025-11-02
9
+
10
+ ### Changed
11
+
12
+ - **Logger/Spinner**: Use module-level constants to prevent duplicate and rogue spinner indicators
13
+ - Call `getDefaultLogger()` and `getDefaultSpinner()` once at module scope instead of repeated calls
14
+ - Prevents multiple spinner instances that can cause duplicate or lingering indicators in terminal output
15
+ - Applied in `src/dlx-manifest.ts`, `src/stdio/mask.ts`, and `src/spinner.ts`
16
+ - Follows DRY principle and aligns with socket-registry/socket-sdk-js patterns
17
+
18
+ ### Fixed
19
+
20
+ - **Scripts**: Fixed undefined logger variable in update script
21
+ - Replaced undefined `log` references with `_logger` throughout `scripts/update.mjs`
22
+ - Resolves ESLint errors that blocked test execution
23
+ - **Tests**: Improved stdout test stability by checking call delta instead of absolute counts
24
+ - Fixed flaky CI failures where spy call count was 101 instead of expected 100
25
+ - More robust approach handles potential state leakage between tests
26
+ - **Tests**: Removed unnecessary 10ms delay in cache-with-ttl test
27
+ - Cache with memoization enabled updates in-memory storage synchronously
28
+ - Delay was insufficient in CI and unnecessary given synchronous behavior
29
+ - Resolves flaky CI failures where cached values returned undefined
30
+
31
+ ## [3.2.0](https://github.com/SocketDev/socket-lib/releases/tag/v3.2.0) - 2025-11-02
32
+
33
+ ### Added
34
+
35
+ - **DLX**: Unified manifest for packages and binaries
36
+ - Centralized manifest system for tracking DLX-compatible packages
37
+ - Simplifies package and binary lookups for dependency-free execution
38
+
8
39
  ## [3.1.3](https://github.com/SocketDev/socket-lib/releases/tag/v3.1.3) - 2025-11-02
9
40
 
10
41
  ### Changed
@@ -41,6 +41,7 @@ var import_os = __toESM(require("os"));
41
41
  var import_path = __toESM(require("path"));
42
42
  var import_platform = require("#constants/platform");
43
43
  var import_dlx = require("./dlx");
44
+ var import_dlx_manifest = require("./dlx-manifest");
44
45
  var import_http_request = require("./http-request");
45
46
  var import_fs = require("./fs");
46
47
  var import_objects = require("./objects");
@@ -129,7 +130,7 @@ Check your internet connection or verify the URL is accessible.`,
129
130
  }
130
131
  );
131
132
  }
132
- async function writeMetadata(cacheEntryPath, cacheKey, url, checksum, size) {
133
+ async function writeMetadata(cacheEntryPath, cacheKey, url, binaryName, checksum, size) {
133
134
  const metaPath = getMetadataPath(cacheEntryPath);
134
135
  const metadata = {
135
136
  version: "1.0.0",
@@ -147,6 +148,21 @@ async function writeMetadata(cacheEntryPath, cacheKey, url, checksum, size) {
147
148
  };
148
149
  const fs = /* @__PURE__ */ getFs();
149
150
  await fs.promises.writeFile(metaPath, JSON.stringify(metadata, null, 2));
151
+ try {
152
+ const spec = `${url}:${binaryName}`;
153
+ await import_dlx_manifest.dlxManifest.setBinaryEntry(spec, cacheKey, {
154
+ checksum,
155
+ checksum_algorithm: "sha256",
156
+ platform: import_os.default.platform(),
157
+ arch: import_os.default.arch(),
158
+ size,
159
+ source: {
160
+ type: "download",
161
+ url
162
+ }
163
+ });
164
+ } catch {
165
+ }
150
166
  }
151
167
  async function cleanDlxCache(maxAge = (
152
168
  /*@__INLINE__*/
@@ -258,6 +274,7 @@ Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.`
258
274
  cacheEntryDir,
259
275
  cacheKey,
260
276
  url,
277
+ binaryName,
261
278
  computedChecksum || "",
262
279
  stats.size
263
280
  );
@@ -329,6 +346,7 @@ Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.`
329
346
  cacheEntryDir,
330
347
  cacheKey,
331
348
  url,
349
+ binaryName,
332
350
  computedChecksum || "",
333
351
  stats.size
334
352
  );
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/dlx-binary.ts"],
4
- "sourcesContent": ["/** @fileoverview DLX binary execution utilities for Socket ecosystem. */\n\nimport { createHash } from 'crypto'\n\nimport os from 'os'\n\nimport path from 'path'\n\nimport { WIN32 } from '#constants/platform'\n\nimport { generateCacheKey } from './dlx'\nimport { httpDownload } from './http-request'\nimport { isDir, readJson, safeDelete, safeMkdir } from './fs'\nimport { isObjectObject } from './objects'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nexport interface DlxBinaryOptions {\n /**\n * URL to download the binary from.\n */\n url: string\n\n /**\n * Optional name for the cached binary (defaults to URL hash).\n */\n name?: string | undefined\n\n /**\n * Expected checksum (sha256) for verification.\n */\n checksum?: string | undefined\n\n /**\n * Cache TTL in milliseconds (default: 7 days).\n */\n cacheTtl?: number | undefined\n\n /**\n * Force re-download even if cached.\n * Aligns with npm/npx --force flag.\n */\n force?: boolean | undefined\n\n /**\n * Skip confirmation prompts (auto-approve).\n * Aligns with npx --yes/-y flag.\n */\n yes?: boolean | undefined\n\n /**\n * Suppress output (quiet mode).\n * Aligns with npx --quiet/-q and pnpm --silent/-s flags.\n */\n quiet?: boolean | undefined\n\n /**\n * Additional spawn options.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxBinaryResult {\n /** Path to the cached binary. */\n binaryPath: string\n /** Whether the binary was newly downloaded. */\n downloaded: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Metadata structure for cached binaries (.dlx-metadata.json).\n * Unified schema shared across TypeScript (dlxBinary) and C++ (socket_macho_decompress).\n *\n * Core Fields (present in all implementations):\n * - version: Schema version (currently \"1.0.0\")\n * - cache_key: First 16 chars of SHA-512 hash (matches directory name)\n * - timestamp: Unix timestamp in milliseconds\n * - checksum: Full hash of cached binary (SHA-512 for C++, SHA-256 for TypeScript)\n * - checksum_algorithm: \"sha512\" or \"sha256\"\n * - platform: \"darwin\" | \"linux\" | \"win32\"\n * - arch: \"x64\" | \"arm64\"\n * - size: Size of cached binary in bytes\n * - source: Origin information\n * - type: \"download\" (from URL) or \"decompression\" (from embedded binary)\n * - url: Download URL (if type is \"download\")\n * - path: Source binary path (if type is \"decompression\")\n *\n * Extra Fields (implementation-specific):\n * - For C++ decompression:\n * - compressed_size: Size of compressed data in bytes\n * - compression_algorithm: Brotli level (numeric)\n * - compression_ratio: original_size / compressed_size\n *\n * Example (TypeScript download):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"a1b2c3d4e5f67890\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha256-abc123...\",\n * \"checksum_algorithm\": \"sha256\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 15000000,\n * \"source\": {\n * \"type\": \"download\",\n * \"url\": \"https://example.com/binary\"\n * }\n * }\n * ```\n *\n * Example (C++ decompression):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"0123456789abcdef\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha512-def456...\",\n * \"checksum_algorithm\": \"sha512\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 13000000,\n * \"source\": {\n * \"type\": \"decompression\",\n * \"path\": \"/usr/local/bin/socket\"\n * },\n * \"extra\": {\n * \"compressed_size\": 1700000,\n * \"compression_algorithm\": 3,\n * \"compression_ratio\": 7.647\n * }\n * }\n * ```\n *\n * @internal This interface documents the metadata file format.\n */\nexport interface DlxMetadata {\n version: string\n cache_key: string\n timestamp: number\n checksum: string\n checksum_algorithm: string\n platform: string\n arch: string\n size: number\n source?: {\n type: 'download' | 'decompression'\n url?: string\n path?: string\n }\n extra?: Record<string, unknown>\n}\n\n/**\n * Get metadata file path for a cached binary.\n */\nfunction getMetadataPath(cacheEntryPath: string): string {\n return path.join(cacheEntryPath, '.dlx-metadata.json')\n}\n\n/**\n * Check if a cached binary is still valid.\n */\nasync function isCacheValid(\n cacheEntryPath: string,\n cacheTtl: number,\n): Promise<boolean> {\n const fs = getFs()\n try {\n const metaPath = getMetadataPath(cacheEntryPath)\n if (!fs.existsSync(metaPath)) {\n return false\n }\n\n const metadata = await readJson(metaPath, { throws: false })\n if (!isObjectObject(metadata)) {\n return false\n }\n const now = Date.now()\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, cache is invalid\n if (typeof timestamp !== 'number' || timestamp <= 0) {\n return false\n }\n const age = now - timestamp\n\n return age < cacheTtl\n } catch {\n return false\n }\n}\n\n/**\n * Download a file from a URL with integrity checking and concurrent download protection.\n * Uses processLock to prevent multiple processes from downloading the same binary simultaneously.\n * Internal helper function for downloading binary files.\n */\nasync function downloadBinaryFile(\n url: string,\n destPath: string,\n checksum?: string | undefined,\n): Promise<string> {\n // Use process lock to prevent concurrent downloads.\n // Lock is placed in the cache entry directory as 'concurrency.lock'.\n const cacheEntryDir = path.dirname(destPath)\n const lockPath = path.join(cacheEntryDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n const fs = getFs()\n // Check if file was downloaded while waiting for lock.\n if (fs.existsSync(destPath)) {\n const stats = await fs.promises.stat(destPath)\n if (stats.size > 0) {\n // File exists, compute and return checksum.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n return hasher.digest('hex')\n }\n }\n\n // Download the file.\n try {\n await httpDownload(url, destPath)\n } catch (e) {\n throw new Error(\n `Failed to download binary from ${url}\\n` +\n `Destination: ${destPath}\\n` +\n 'Check your internet connection or verify the URL is accessible.',\n { cause: e },\n )\n }\n\n // Compute checksum of downloaded file.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n const actualChecksum = hasher.digest('hex')\n\n // Verify checksum if provided.\n if (checksum && actualChecksum !== checksum) {\n // Clean up invalid file.\n await safeDelete(destPath)\n throw new Error(\n `Checksum mismatch: expected ${checksum}, got ${actualChecksum}`,\n )\n }\n\n // Make executable on POSIX systems.\n if (!WIN32) {\n await fs.promises.chmod(destPath, 0o755)\n }\n\n return actualChecksum\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Write metadata for a cached binary.\n * Uses unified schema shared with C++ decompressor and CLI dlxBinary.\n * Schema documentation: See DlxMetadata interface in this file (exported).\n * Core fields: version, cache_key, timestamp, checksum, checksum_algorithm, platform, arch, size, source\n * Note: This implementation uses SHA-256 checksums instead of SHA-512.\n */\nasync function writeMetadata(\n cacheEntryPath: string,\n cacheKey: string,\n url: string,\n checksum: string,\n size: number,\n): Promise<void> {\n const metaPath = getMetadataPath(cacheEntryPath)\n const metadata = {\n version: '1.0.0',\n cache_key: cacheKey,\n timestamp: Date.now(),\n checksum,\n checksum_algorithm: 'sha256',\n platform: os.platform(),\n arch: os.arch(),\n size,\n source: {\n type: 'download',\n url,\n },\n }\n const fs = getFs()\n await fs.promises.writeFile(metaPath, JSON.stringify(metadata, null, 2))\n}\n\n/**\n * Clean expired entries from the DLX cache.\n */\nexport async function cleanDlxCache(\n maxAge: number = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n): Promise<number> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return 0\n }\n\n let cleaned = 0\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n const metaPath = getMetadataPath(entryPath)\n\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, treat as expired (age = infinity)\n const age =\n typeof timestamp === 'number' && timestamp > 0\n ? now - timestamp\n : Number.POSITIVE_INFINITY\n\n if (age > maxAge) {\n // Remove entire cache entry directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath, { force: true, recursive: true })\n cleaned += 1\n }\n } catch {\n // If we can't read metadata, check if directory is empty or corrupted.\n try {\n // eslint-disable-next-line no-await-in-loop\n const contents = await fs.promises.readdir(entryPath)\n if (!contents.length) {\n // Remove empty directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath)\n cleaned += 1\n }\n } catch {}\n }\n }\n\n return cleaned\n}\n\n/**\n * Download and execute a binary from a URL with caching.\n */\nexport async function dlxBinary(\n args: readonly string[] | string[],\n options?: DlxBinaryOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxBinaryResult> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force: userForce = false,\n name,\n spawnOptions,\n url,\n yes,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Map --yes flag to force behavior (auto-approve/skip prompts)\n const force = yes === true ? true : userForce\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n let computedChecksum = checksum\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid, read the checksum from metadata.\n try {\n const metaPath = getMetadataPath(cacheEntryDir)\n const metadata = await readJson(metaPath, { throws: false })\n if (\n metadata &&\n typeof metadata === 'object' &&\n !Array.isArray(metadata) &&\n typeof (metadata as Record<string, unknown>)['checksum'] === 'string'\n ) {\n computedChecksum = (metadata as Record<string, unknown>)[\n 'checksum'\n ] as string\n } else {\n // If metadata is invalid, re-download.\n downloaded = true\n }\n } catch {\n // If we can't read metadata, re-download.\n downloaded = true\n }\n } else {\n downloaded = true\n }\n\n if (downloaded) {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir)\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n }\n\n // Execute the binary.\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension (e.g.,\n // C:\\cache\\test.cmd becomes just \"test\"). Windows cmd.exe then searches for \"test\"\n // in directories listed in PATH, trying each extension from PATHEXT environment\n // variable (.COM, .EXE, .BAT, .CMD, etc.) until it finds a match.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH\n // (unlike system package managers like npm/pnpm/yarn which are already in PATH),\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n //\n // This approach is consistent with how other tools handle Windows command execution:\n // - npm's promise-spawn: uses which.sync() to find commands in PATH\n // - cross-spawn: spawns cmd.exe with escaped arguments\n // - Node.js spawn with shell: true: delegates to cmd.exe which uses PATH\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n const spawnPromise = spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n\n return {\n binaryPath,\n downloaded,\n spawnPromise,\n }\n}\n\n/**\n * Download a binary from a URL with caching (without execution).\n * Similar to downloadPackage from dlx-package.\n *\n * @returns Object containing the path to the cached binary and whether it was downloaded\n */\nexport async function downloadBinary(\n options: Omit<DlxBinaryOptions, 'spawnOptions'>,\n): Promise<{ binaryPath: string; downloaded: boolean }> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid.\n downloaded = false\n } else {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir)\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n const computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n downloaded = true\n }\n\n return {\n binaryPath,\n downloaded,\n }\n}\n\n/**\n * Execute a cached binary without re-downloading.\n * Similar to executePackage from dlx-package.\n * Binary must have been previously downloaded via downloadBinary or dlxBinary.\n *\n * @param binaryPath Path to the cached binary (from downloadBinary result)\n * @param args Arguments to pass to the binary\n * @param spawnOptions Spawn options for execution\n * @param spawnExtra Extra spawn configuration\n * @returns The spawn promise for the running process\n */\nexport function executeBinary(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension. Windows cmd.exe\n // then searches for the binary in directories listed in PATH.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH,\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n const cacheEntryDir = path.dirname(binaryPath)\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n}\n\n/**\n * Get the DLX binary cache directory path.\n * Returns normalized path for cross-platform compatibility.\n * Uses same directory as dlx-package for unified DLX storage.\n */\nexport function getDlxCachePath(): string {\n return getSocketDlxDir()\n}\n\n/**\n * Get information about cached binaries.\n */\nexport async function listDlxCache(): Promise<\n Array<{\n age: number\n arch: string\n checksum: string\n name: string\n platform: string\n size: number\n url: string\n }>\n> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return []\n }\n\n const results = []\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n const metaPath = getMetadataPath(entryPath)\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n\n const metaObj = metadata as Record<string, unknown>\n\n // Get URL from unified schema (source.url) or legacy schema (url).\n // Allow empty URL for backward compatibility with partial metadata.\n const source = metaObj['source'] as Record<string, unknown> | undefined\n const url =\n (source?.['url'] as string) || (metaObj['url'] as string) || ''\n\n // Find the binary file in the directory.\n // eslint-disable-next-line no-await-in-loop\n const files = await fs.promises.readdir(entryPath)\n const binaryFile = files.find(f => !f.startsWith('.'))\n\n if (binaryFile) {\n const binaryPath = path.join(entryPath, binaryFile)\n // eslint-disable-next-line no-await-in-loop\n const binaryStats = await fs.promises.stat(binaryPath)\n\n results.push({\n age: now - ((metaObj['timestamp'] as number) || 0),\n arch: (metaObj['arch'] as string) || 'unknown',\n checksum: (metaObj['checksum'] as string) || '',\n name: binaryFile,\n platform: (metaObj['platform'] as string) || 'unknown',\n size: binaryStats.size,\n url,\n })\n }\n } catch {}\n }\n\n return results\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA2B;AAE3B,gBAAe;AAEf,kBAAiB;AAEjB,sBAAsB;AAEtB,iBAAiC;AACjC,0BAA6B;AAC7B,gBAAuD;AACvD,qBAA+B;AAC/B,IAAAA,eAA8B;AAC9B,mBAAgC;AAChC,0BAA4B;AAE5B,mBAAsB;AAEtB,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AA+IA,SAAS,gBAAgB,gBAAgC;AACvD,SAAO,YAAAC,QAAK,KAAK,gBAAgB,oBAAoB;AACvD;AAKA,eAAe,aACb,gBACA,UACkB;AAClB,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,WAAW,gBAAgB,cAAc;AAC/C,QAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,QAAI,KAAC,+BAAe,QAAQ,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,YAAa,SAAqC,WAAW;AAEnE,QAAI,OAAO,cAAc,YAAY,aAAa,GAAG;AACnD,aAAO;AAAA,IACT;AACA,UAAM,MAAM,MAAM;AAElB,WAAO,MAAM;AAAA,EACf,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAOA,eAAe,mBACb,KACA,UACA,UACiB;AAGjB,QAAM,gBAAgB,YAAAA,QAAK,QAAQ,QAAQ;AAC3C,QAAM,WAAW,YAAAA,QAAK,KAAK,eAAe,kBAAkB;AAE5D,SAAO,MAAM,gCAAY;AAAA,IACvB;AAAA,IACA,YAAY;AACV,YAAM,KAAK,sBAAM;AAEjB,UAAI,GAAG,WAAW,QAAQ,GAAG;AAC3B,cAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,QAAQ;AAC7C,YAAI,MAAM,OAAO,GAAG;AAElB,gBAAMC,cAAa,MAAM,GAAG,SAAS,SAAS,QAAQ;AACtD,gBAAMC,cAAS,0BAAW,QAAQ;AAClC,UAAAA,QAAO,OAAOD,WAAU;AACxB,iBAAOC,QAAO,OAAO,KAAK;AAAA,QAC5B;AAAA,MACF;AAGA,UAAI;AACF,kBAAM,kCAAa,KAAK,QAAQ;AAAA,MAClC,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR,kCAAkC,GAAG;AAAA,eACnB,QAAQ;AAAA;AAAA,UAE1B,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AAGA,YAAM,aAAa,MAAM,GAAG,SAAS,SAAS,QAAQ;AACtD,YAAM,aAAS,0BAAW,QAAQ;AAClC,aAAO,OAAO,UAAU;AACxB,YAAM,iBAAiB,OAAO,OAAO,KAAK;AAG1C,UAAI,YAAY,mBAAmB,UAAU;AAE3C,kBAAM,sBAAW,QAAQ;AACzB,cAAM,IAAI;AAAA,UACR,+BAA+B,QAAQ,SAAS,cAAc;AAAA,QAChE;AAAA,MACF;AAGA,UAAI,CAAC,uBAAO;AACV,cAAM,GAAG,SAAS,MAAM,UAAU,GAAK;AAAA,MACzC;AAEA,aAAO;AAAA,IACT;AAAA,IACA;AAAA;AAAA,MAEE,SAAS;AAAA,MACT,iBAAiB;AAAA,IACnB;AAAA,EACF;AACF;AASA,eAAe,cACb,gBACA,UACA,KACA,UACA,MACe;AACf,QAAM,WAAW,gBAAgB,cAAc;AAC/C,QAAM,WAAW;AAAA,IACf,SAAS;AAAA,IACT,WAAW;AAAA,IACX,WAAW,KAAK,IAAI;AAAA,IACpB;AAAA,IACA,oBAAoB;AAAA,IACpB,UAAU,UAAAC,QAAG,SAAS;AAAA,IACtB,MAAM,UAAAA,QAAG,KAAK;AAAA,IACd;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,GAAG,SAAS,UAAU,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AACzE;AAKA,eAAsB,cACpB;AAAA;AAAA,EAAiC,QAAQ,iBAAiB,EAAE;AAAA,GAC3C;AACjB,QAAM,WAAW,gBAAgB;AACjC,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,WAAO;AAAA,EACT;AAEA,MAAI,UAAU;AACd,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,QAAQ;AAElD,aAAW,SAAS,SAAS;AAC3B,UAAM,YAAY,YAAAH,QAAK,KAAK,UAAU,KAAK;AAC3C,UAAM,WAAW,gBAAgB,SAAS;AAE1C,QAAI;AAEF,UAAI,CAAE,UAAM,iBAAM,SAAS,GAAI;AAC7B;AAAA,MACF;AAGA,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,CAAC,YACD,OAAO,aAAa,YACpB,MAAM,QAAQ,QAAQ,GACtB;AACA;AAAA,MACF;AACA,YAAM,YAAa,SAAqC,WAAW;AAEnE,YAAM,MACJ,OAAO,cAAc,YAAY,YAAY,IACzC,MAAM,YACN,OAAO;AAEb,UAAI,MAAM,QAAQ;AAGhB,kBAAM,sBAAW,WAAW,EAAE,OAAO,MAAM,WAAW,KAAK,CAAC;AAC5D,mBAAW;AAAA,MACb;AAAA,IACF,QAAQ;AAEN,UAAI;AAEF,cAAM,WAAW,MAAM,GAAG,SAAS,QAAQ,SAAS;AACpD,YAAI,CAAC,SAAS,QAAQ;AAGpB,oBAAM,sBAAW,SAAS;AAC1B,qBAAW;AAAA,QACb;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,UACpB,MACA,SACA,YAC0B;AAC1B,QAAM;AAAA,IACJ;AAAA;AAAA,MAA2B,QAAQ,iBAAiB,EAAE;AAAA;AAAA,IACtD;AAAA,IACA,OAAO,YAAY;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAGlC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAGpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,QAAQ,UAAU,QAAQ,QAAQ,IAAI,UAAAG,QAAG,KAAK,CAAC;AAElE,QAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,QAAM,eAAW,6BAAiB,IAAI;AACtC,QAAM,gBAAgB,YAAAH,QAAK,KAAK,UAAU,QAAQ;AAClD,QAAM,iBAAa,4BAAc,YAAAA,QAAK,KAAK,eAAe,UAAU,CAAC;AACrE,QAAM,KAAK,sBAAM;AAEjB,MAAI,aAAa;AACjB,MAAI,mBAAmB;AAGvB,MACE,CAAC,SACD,GAAG,WAAW,aAAa,KAC1B,MAAM,aAAa,eAAe,QAAQ,GAC3C;AAEA,QAAI;AACF,YAAM,WAAW,gBAAgB,aAAa;AAC9C,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,YACA,OAAO,aAAa,YACpB,CAAC,MAAM,QAAQ,QAAQ,KACvB,OAAQ,SAAqC,UAAU,MAAM,UAC7D;AACA,2BAAoB,SAClB,UACF;AAAA,MACF,OAAO;AAEL,qBAAa;AAAA,MACf;AAAA,IACF,QAAQ;AAEN,mBAAa;AAAA,IACf;AAAA,EACF,OAAO;AACL,iBAAa;AAAA,EACf;AAEA,MAAI,YAAY;AAEd,QAAI;AACF,gBAAM,qBAAU,aAAa;AAAA,IAC/B,SAAS,GAAG;AACV,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,sDAAsD,aAAa;AAAA;AAAA,UAEnE,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR,iEAAiE,aAAa;AAAA;AAAA,UAE9E,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,4CAA4C,aAAa;AAAA,QACzD,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AAGA,uBAAmB,MAAM,mBAAmB,KAAK,YAAY,QAAQ;AAGrE,UAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,UAAU;AAC/C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,oBAAoB;AAAA,MACpB,MAAM;AAAA,IACR;AAAA,EACF;AAMA,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AAgBjE,QAAM,oBAAoB,aACtB;AAAA,IACE,GAAG;AAAA,IACH,KAAK;AAAA,MACH,GAAG,cAAc;AAAA,MACjB,MAAM,GAAG,aAAa,GAAG,YAAAA,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAM,KAAK,EAAE;AAAA,IACrE;AAAA,IACA,OAAO;AAAA,EACT,IACA;AACJ,QAAM,mBAAe,oBAAM,YAAY,MAAM,mBAAmB,UAAU;AAE1E,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQA,eAAsB,eACpB,SACsD;AACtD,QAAM;AAAA,IACJ;AAAA;AAAA,MAA2B,QAAQ,iBAAiB,EAAE;AAAA;AAAA,IACtD;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAGlC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,QAAQ,UAAU,QAAQ,QAAQ,IAAI,UAAAG,QAAG,KAAK,CAAC;AAElE,QAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,QAAM,eAAW,6BAAiB,IAAI;AACtC,QAAM,gBAAgB,YAAAH,QAAK,KAAK,UAAU,QAAQ;AAClD,QAAM,iBAAa,4BAAc,YAAAA,QAAK,KAAK,eAAe,UAAU,CAAC;AACrE,QAAM,KAAK,sBAAM;AAEjB,MAAI,aAAa;AAGjB,MACE,CAAC,SACD,GAAG,WAAW,aAAa,KAC1B,MAAM,aAAa,eAAe,QAAQ,GAC3C;AAEA,iBAAa;AAAA,EACf,OAAO;AAEL,QAAI;AACF,gBAAM,qBAAU,aAAa;AAAA,IAC/B,SAAS,GAAG;AACV,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,sDAAsD,aAAa;AAAA;AAAA,UAEnE,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR,iEAAiE,aAAa;AAAA;AAAA,UAE9E,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,4CAA4C,aAAa;AAAA,QACzD,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AAGA,UAAM,mBAAmB,MAAM,mBAAmB,KAAK,YAAY,QAAQ;AAG3E,UAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,UAAU;AAC/C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,oBAAoB;AAAA,MACpB,MAAM;AAAA,IACR;AACA,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAaO,SAAS,cACd,YACA,MACA,cACA,YAC0B;AAI1B,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AASjE,QAAM,gBAAgB,YAAAA,QAAK,QAAQ,UAAU;AAC7C,QAAM,oBAAoB,aACtB;AAAA,IACE,GAAG;AAAA,IACH,KAAK;AAAA,MACH,GAAG,cAAc;AAAA,MACjB,MAAM,GAAG,aAAa,GAAG,YAAAA,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAM,KAAK,EAAE;AAAA,IACrE;AAAA,IACA,OAAO;AAAA,EACT,IACA;AAEJ,aAAO,oBAAM,YAAY,MAAM,mBAAmB,UAAU;AAC9D;AAOO,SAAS,kBAA0B;AACxC,aAAO,8BAAgB;AACzB;AAKA,eAAsB,eAUpB;AACA,QAAM,WAAW,gBAAgB;AACjC,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAU,CAAC;AACjB,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,QAAQ;AAElD,aAAW,SAAS,SAAS;AAC3B,UAAM,YAAY,YAAAA,QAAK,KAAK,UAAU,KAAK;AAC3C,QAAI;AAEF,UAAI,CAAE,UAAM,iBAAM,SAAS,GAAI;AAC7B;AAAA,MACF;AAEA,YAAM,WAAW,gBAAgB,SAAS;AAE1C,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,CAAC,YACD,OAAO,aAAa,YACpB,MAAM,QAAQ,QAAQ,GACtB;AACA;AAAA,MACF;AAEA,YAAM,UAAU;AAIhB,YAAM,SAAS,QAAQ,QAAQ;AAC/B,YAAM,MACH,SAAS,KAAK,KAAiB,QAAQ,KAAK,KAAgB;AAI/D,YAAM,QAAQ,MAAM,GAAG,SAAS,QAAQ,SAAS;AACjD,YAAM,aAAa,MAAM,KAAK,OAAK,CAAC,EAAE,WAAW,GAAG,CAAC;AAErD,UAAI,YAAY;AACd,cAAM,aAAa,YAAAA,QAAK,KAAK,WAAW,UAAU;AAElD,cAAM,cAAc,MAAM,GAAG,SAAS,KAAK,UAAU;AAErD,gBAAQ,KAAK;AAAA,UACX,KAAK,OAAQ,QAAQ,WAAW,KAAgB;AAAA,UAChD,MAAO,QAAQ,MAAM,KAAgB;AAAA,UACrC,UAAW,QAAQ,UAAU,KAAgB;AAAA,UAC7C,MAAM;AAAA,UACN,UAAW,QAAQ,UAAU,KAAgB;AAAA,UAC7C,MAAM,YAAY;AAAA,UAClB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,SAAO;AACT;",
4
+ "sourcesContent": ["/** @fileoverview DLX binary execution utilities for Socket ecosystem. */\n\nimport { createHash } from 'crypto'\n\nimport os from 'os'\n\nimport path from 'path'\n\nimport { WIN32 } from '#constants/platform'\n\nimport { generateCacheKey } from './dlx'\nimport { dlxManifest } from './dlx-manifest'\nimport { httpDownload } from './http-request'\nimport { isDir, readJson, safeDelete, safeMkdir } from './fs'\nimport { isObjectObject } from './objects'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nexport interface DlxBinaryOptions {\n /**\n * URL to download the binary from.\n */\n url: string\n\n /**\n * Optional name for the cached binary (defaults to URL hash).\n */\n name?: string | undefined\n\n /**\n * Expected checksum (sha256) for verification.\n */\n checksum?: string | undefined\n\n /**\n * Cache TTL in milliseconds (default: 7 days).\n */\n cacheTtl?: number | undefined\n\n /**\n * Force re-download even if cached.\n * Aligns with npm/npx --force flag.\n */\n force?: boolean | undefined\n\n /**\n * Skip confirmation prompts (auto-approve).\n * Aligns with npx --yes/-y flag.\n */\n yes?: boolean | undefined\n\n /**\n * Suppress output (quiet mode).\n * Aligns with npx --quiet/-q and pnpm --silent/-s flags.\n */\n quiet?: boolean | undefined\n\n /**\n * Additional spawn options.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxBinaryResult {\n /** Path to the cached binary. */\n binaryPath: string\n /** Whether the binary was newly downloaded. */\n downloaded: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Metadata structure for cached binaries (.dlx-metadata.json).\n * Unified schema shared across TypeScript (dlxBinary) and C++ (socket_macho_decompress).\n *\n * Core Fields (present in all implementations):\n * - version: Schema version (currently \"1.0.0\")\n * - cache_key: First 16 chars of SHA-512 hash (matches directory name)\n * - timestamp: Unix timestamp in milliseconds\n * - checksum: Full hash of cached binary (SHA-512 for C++, SHA-256 for TypeScript)\n * - checksum_algorithm: \"sha512\" or \"sha256\"\n * - platform: \"darwin\" | \"linux\" | \"win32\"\n * - arch: \"x64\" | \"arm64\"\n * - size: Size of cached binary in bytes\n * - source: Origin information\n * - type: \"download\" (from URL) or \"decompression\" (from embedded binary)\n * - url: Download URL (if type is \"download\")\n * - path: Source binary path (if type is \"decompression\")\n *\n * Extra Fields (implementation-specific):\n * - For C++ decompression:\n * - compressed_size: Size of compressed data in bytes\n * - compression_algorithm: Brotli level (numeric)\n * - compression_ratio: original_size / compressed_size\n *\n * Example (TypeScript download):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"a1b2c3d4e5f67890\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha256-abc123...\",\n * \"checksum_algorithm\": \"sha256\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 15000000,\n * \"source\": {\n * \"type\": \"download\",\n * \"url\": \"https://example.com/binary\"\n * }\n * }\n * ```\n *\n * Example (C++ decompression):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"0123456789abcdef\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha512-def456...\",\n * \"checksum_algorithm\": \"sha512\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 13000000,\n * \"source\": {\n * \"type\": \"decompression\",\n * \"path\": \"/usr/local/bin/socket\"\n * },\n * \"extra\": {\n * \"compressed_size\": 1700000,\n * \"compression_algorithm\": 3,\n * \"compression_ratio\": 7.647\n * }\n * }\n * ```\n *\n * @internal This interface documents the metadata file format.\n */\nexport interface DlxMetadata {\n version: string\n cache_key: string\n timestamp: number\n checksum: string\n checksum_algorithm: string\n platform: string\n arch: string\n size: number\n source?: {\n type: 'download' | 'decompression'\n url?: string\n path?: string\n }\n extra?: Record<string, unknown>\n}\n\n/**\n * Get metadata file path for a cached binary.\n */\nfunction getMetadataPath(cacheEntryPath: string): string {\n return path.join(cacheEntryPath, '.dlx-metadata.json')\n}\n\n/**\n * Check if a cached binary is still valid.\n */\nasync function isCacheValid(\n cacheEntryPath: string,\n cacheTtl: number,\n): Promise<boolean> {\n const fs = getFs()\n try {\n const metaPath = getMetadataPath(cacheEntryPath)\n if (!fs.existsSync(metaPath)) {\n return false\n }\n\n const metadata = await readJson(metaPath, { throws: false })\n if (!isObjectObject(metadata)) {\n return false\n }\n const now = Date.now()\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, cache is invalid\n if (typeof timestamp !== 'number' || timestamp <= 0) {\n return false\n }\n const age = now - timestamp\n\n return age < cacheTtl\n } catch {\n return false\n }\n}\n\n/**\n * Download a file from a URL with integrity checking and concurrent download protection.\n * Uses processLock to prevent multiple processes from downloading the same binary simultaneously.\n * Internal helper function for downloading binary files.\n */\nasync function downloadBinaryFile(\n url: string,\n destPath: string,\n checksum?: string | undefined,\n): Promise<string> {\n // Use process lock to prevent concurrent downloads.\n // Lock is placed in the cache entry directory as 'concurrency.lock'.\n const cacheEntryDir = path.dirname(destPath)\n const lockPath = path.join(cacheEntryDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n const fs = getFs()\n // Check if file was downloaded while waiting for lock.\n if (fs.existsSync(destPath)) {\n const stats = await fs.promises.stat(destPath)\n if (stats.size > 0) {\n // File exists, compute and return checksum.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n return hasher.digest('hex')\n }\n }\n\n // Download the file.\n try {\n await httpDownload(url, destPath)\n } catch (e) {\n throw new Error(\n `Failed to download binary from ${url}\\n` +\n `Destination: ${destPath}\\n` +\n 'Check your internet connection or verify the URL is accessible.',\n { cause: e },\n )\n }\n\n // Compute checksum of downloaded file.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n const actualChecksum = hasher.digest('hex')\n\n // Verify checksum if provided.\n if (checksum && actualChecksum !== checksum) {\n // Clean up invalid file.\n await safeDelete(destPath)\n throw new Error(\n `Checksum mismatch: expected ${checksum}, got ${actualChecksum}`,\n )\n }\n\n // Make executable on POSIX systems.\n if (!WIN32) {\n await fs.promises.chmod(destPath, 0o755)\n }\n\n return actualChecksum\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Write metadata for a cached binary.\n * Writes to both per-directory metadata file (for backward compatibility)\n * and global manifest (~/.socket/_dlx/.dlx-manifest.json).\n * Uses unified schema shared with C++ decompressor and CLI dlxBinary.\n * Schema documentation: See DlxMetadata interface in this file (exported).\n * Core fields: version, cache_key, timestamp, checksum, checksum_algorithm, platform, arch, size, source\n * Note: This implementation uses SHA-256 checksums instead of SHA-512.\n */\nasync function writeMetadata(\n cacheEntryPath: string,\n cacheKey: string,\n url: string,\n binaryName: string,\n checksum: string,\n size: number,\n): Promise<void> {\n // Write per-directory metadata file for backward compatibility.\n const metaPath = getMetadataPath(cacheEntryPath)\n const metadata = {\n version: '1.0.0',\n cache_key: cacheKey,\n timestamp: Date.now(),\n checksum,\n checksum_algorithm: 'sha256',\n platform: os.platform(),\n arch: os.arch(),\n size,\n source: {\n type: 'download',\n url,\n },\n }\n const fs = getFs()\n await fs.promises.writeFile(metaPath, JSON.stringify(metadata, null, 2))\n\n // Write to global manifest.\n try {\n const spec = `${url}:${binaryName}`\n await dlxManifest.setBinaryEntry(spec, cacheKey, {\n checksum,\n checksum_algorithm: 'sha256',\n platform: os.platform(),\n arch: os.arch(),\n size,\n source: {\n type: 'download',\n url,\n },\n })\n } catch {\n // Silently ignore manifest write errors - not critical.\n // The per-directory metadata is the source of truth for now.\n }\n}\n\n/**\n * Clean expired entries from the DLX cache.\n */\nexport async function cleanDlxCache(\n maxAge: number = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n): Promise<number> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return 0\n }\n\n let cleaned = 0\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n const metaPath = getMetadataPath(entryPath)\n\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, treat as expired (age = infinity)\n const age =\n typeof timestamp === 'number' && timestamp > 0\n ? now - timestamp\n : Number.POSITIVE_INFINITY\n\n if (age > maxAge) {\n // Remove entire cache entry directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath, { force: true, recursive: true })\n cleaned += 1\n }\n } catch {\n // If we can't read metadata, check if directory is empty or corrupted.\n try {\n // eslint-disable-next-line no-await-in-loop\n const contents = await fs.promises.readdir(entryPath)\n if (!contents.length) {\n // Remove empty directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath)\n cleaned += 1\n }\n } catch {}\n }\n }\n\n return cleaned\n}\n\n/**\n * Download and execute a binary from a URL with caching.\n */\nexport async function dlxBinary(\n args: readonly string[] | string[],\n options?: DlxBinaryOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxBinaryResult> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force: userForce = false,\n name,\n spawnOptions,\n url,\n yes,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Map --yes flag to force behavior (auto-approve/skip prompts)\n const force = yes === true ? true : userForce\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n let computedChecksum = checksum\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid, read the checksum from metadata.\n try {\n const metaPath = getMetadataPath(cacheEntryDir)\n const metadata = await readJson(metaPath, { throws: false })\n if (\n metadata &&\n typeof metadata === 'object' &&\n !Array.isArray(metadata) &&\n typeof (metadata as Record<string, unknown>)['checksum'] === 'string'\n ) {\n computedChecksum = (metadata as Record<string, unknown>)[\n 'checksum'\n ] as string\n } else {\n // If metadata is invalid, re-download.\n downloaded = true\n }\n } catch {\n // If we can't read metadata, re-download.\n downloaded = true\n }\n } else {\n downloaded = true\n }\n\n if (downloaded) {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir)\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n binaryName,\n computedChecksum || '',\n stats.size,\n )\n }\n\n // Execute the binary.\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension (e.g.,\n // C:\\cache\\test.cmd becomes just \"test\"). Windows cmd.exe then searches for \"test\"\n // in directories listed in PATH, trying each extension from PATHEXT environment\n // variable (.COM, .EXE, .BAT, .CMD, etc.) until it finds a match.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH\n // (unlike system package managers like npm/pnpm/yarn which are already in PATH),\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n //\n // This approach is consistent with how other tools handle Windows command execution:\n // - npm's promise-spawn: uses which.sync() to find commands in PATH\n // - cross-spawn: spawns cmd.exe with escaped arguments\n // - Node.js spawn with shell: true: delegates to cmd.exe which uses PATH\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n const spawnPromise = spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n\n return {\n binaryPath,\n downloaded,\n spawnPromise,\n }\n}\n\n/**\n * Download a binary from a URL with caching (without execution).\n * Similar to downloadPackage from dlx-package.\n *\n * @returns Object containing the path to the cached binary and whether it was downloaded\n */\nexport async function downloadBinary(\n options: Omit<DlxBinaryOptions, 'spawnOptions'>,\n): Promise<{ binaryPath: string; downloaded: boolean }> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid.\n downloaded = false\n } else {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir)\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n const computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n binaryName,\n computedChecksum || '',\n stats.size,\n )\n downloaded = true\n }\n\n return {\n binaryPath,\n downloaded,\n }\n}\n\n/**\n * Execute a cached binary without re-downloading.\n * Similar to executePackage from dlx-package.\n * Binary must have been previously downloaded via downloadBinary or dlxBinary.\n *\n * @param binaryPath Path to the cached binary (from downloadBinary result)\n * @param args Arguments to pass to the binary\n * @param spawnOptions Spawn options for execution\n * @param spawnExtra Extra spawn configuration\n * @returns The spawn promise for the running process\n */\nexport function executeBinary(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension. Windows cmd.exe\n // then searches for the binary in directories listed in PATH.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH,\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n const cacheEntryDir = path.dirname(binaryPath)\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n}\n\n/**\n * Get the DLX binary cache directory path.\n * Returns normalized path for cross-platform compatibility.\n * Uses same directory as dlx-package for unified DLX storage.\n */\nexport function getDlxCachePath(): string {\n return getSocketDlxDir()\n}\n\n/**\n * Get information about cached binaries.\n */\nexport async function listDlxCache(): Promise<\n Array<{\n age: number\n arch: string\n checksum: string\n name: string\n platform: string\n size: number\n url: string\n }>\n> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return []\n }\n\n const results = []\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n const metaPath = getMetadataPath(entryPath)\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n\n const metaObj = metadata as Record<string, unknown>\n\n // Get URL from unified schema (source.url) or legacy schema (url).\n // Allow empty URL for backward compatibility with partial metadata.\n const source = metaObj['source'] as Record<string, unknown> | undefined\n const url =\n (source?.['url'] as string) || (metaObj['url'] as string) || ''\n\n // Find the binary file in the directory.\n // eslint-disable-next-line no-await-in-loop\n const files = await fs.promises.readdir(entryPath)\n const binaryFile = files.find(f => !f.startsWith('.'))\n\n if (binaryFile) {\n const binaryPath = path.join(entryPath, binaryFile)\n // eslint-disable-next-line no-await-in-loop\n const binaryStats = await fs.promises.stat(binaryPath)\n\n results.push({\n age: now - ((metaObj['timestamp'] as number) || 0),\n arch: (metaObj['arch'] as string) || 'unknown',\n checksum: (metaObj['checksum'] as string) || '',\n name: binaryFile,\n platform: (metaObj['platform'] as string) || 'unknown',\n size: binaryStats.size,\n url,\n })\n }\n } catch {}\n }\n\n return results\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA2B;AAE3B,gBAAe;AAEf,kBAAiB;AAEjB,sBAAsB;AAEtB,iBAAiC;AACjC,0BAA4B;AAC5B,0BAA6B;AAC7B,gBAAuD;AACvD,qBAA+B;AAC/B,IAAAA,eAA8B;AAC9B,mBAAgC;AAChC,0BAA4B;AAE5B,mBAAsB;AAEtB,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AA+IA,SAAS,gBAAgB,gBAAgC;AACvD,SAAO,YAAAC,QAAK,KAAK,gBAAgB,oBAAoB;AACvD;AAKA,eAAe,aACb,gBACA,UACkB;AAClB,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,WAAW,gBAAgB,cAAc;AAC/C,QAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,QAAI,KAAC,+BAAe,QAAQ,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,YAAa,SAAqC,WAAW;AAEnE,QAAI,OAAO,cAAc,YAAY,aAAa,GAAG;AACnD,aAAO;AAAA,IACT;AACA,UAAM,MAAM,MAAM;AAElB,WAAO,MAAM;AAAA,EACf,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAOA,eAAe,mBACb,KACA,UACA,UACiB;AAGjB,QAAM,gBAAgB,YAAAA,QAAK,QAAQ,QAAQ;AAC3C,QAAM,WAAW,YAAAA,QAAK,KAAK,eAAe,kBAAkB;AAE5D,SAAO,MAAM,gCAAY;AAAA,IACvB;AAAA,IACA,YAAY;AACV,YAAM,KAAK,sBAAM;AAEjB,UAAI,GAAG,WAAW,QAAQ,GAAG;AAC3B,cAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,QAAQ;AAC7C,YAAI,MAAM,OAAO,GAAG;AAElB,gBAAMC,cAAa,MAAM,GAAG,SAAS,SAAS,QAAQ;AACtD,gBAAMC,cAAS,0BAAW,QAAQ;AAClC,UAAAA,QAAO,OAAOD,WAAU;AACxB,iBAAOC,QAAO,OAAO,KAAK;AAAA,QAC5B;AAAA,MACF;AAGA,UAAI;AACF,kBAAM,kCAAa,KAAK,QAAQ;AAAA,MAClC,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR,kCAAkC,GAAG;AAAA,eACnB,QAAQ;AAAA;AAAA,UAE1B,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AAGA,YAAM,aAAa,MAAM,GAAG,SAAS,SAAS,QAAQ;AACtD,YAAM,aAAS,0BAAW,QAAQ;AAClC,aAAO,OAAO,UAAU;AACxB,YAAM,iBAAiB,OAAO,OAAO,KAAK;AAG1C,UAAI,YAAY,mBAAmB,UAAU;AAE3C,kBAAM,sBAAW,QAAQ;AACzB,cAAM,IAAI;AAAA,UACR,+BAA+B,QAAQ,SAAS,cAAc;AAAA,QAChE;AAAA,MACF;AAGA,UAAI,CAAC,uBAAO;AACV,cAAM,GAAG,SAAS,MAAM,UAAU,GAAK;AAAA,MACzC;AAEA,aAAO;AAAA,IACT;AAAA,IACA;AAAA;AAAA,MAEE,SAAS;AAAA,MACT,iBAAiB;AAAA,IACnB;AAAA,EACF;AACF;AAWA,eAAe,cACb,gBACA,UACA,KACA,YACA,UACA,MACe;AAEf,QAAM,WAAW,gBAAgB,cAAc;AAC/C,QAAM,WAAW;AAAA,IACf,SAAS;AAAA,IACT,WAAW;AAAA,IACX,WAAW,KAAK,IAAI;AAAA,IACpB;AAAA,IACA,oBAAoB;AAAA,IACpB,UAAU,UAAAC,QAAG,SAAS;AAAA,IACtB,MAAM,UAAAA,QAAG,KAAK;AAAA,IACd;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,GAAG,SAAS,UAAU,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AAGvE,MAAI;AACF,UAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,UAAM,gCAAY,eAAe,MAAM,UAAU;AAAA,MAC/C;AAAA,MACA,oBAAoB;AAAA,MACpB,UAAU,UAAAA,QAAG,SAAS;AAAA,MACtB,MAAM,UAAAA,QAAG,KAAK;AAAA,MACd;AAAA,MACA,QAAQ;AAAA,QACN,MAAM;AAAA,QACN;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH,QAAQ;AAAA,EAGR;AACF;AAKA,eAAsB,cACpB;AAAA;AAAA,EAAiC,QAAQ,iBAAiB,EAAE;AAAA,GAC3C;AACjB,QAAM,WAAW,gBAAgB;AACjC,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,WAAO;AAAA,EACT;AAEA,MAAI,UAAU;AACd,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,QAAQ;AAElD,aAAW,SAAS,SAAS;AAC3B,UAAM,YAAY,YAAAH,QAAK,KAAK,UAAU,KAAK;AAC3C,UAAM,WAAW,gBAAgB,SAAS;AAE1C,QAAI;AAEF,UAAI,CAAE,UAAM,iBAAM,SAAS,GAAI;AAC7B;AAAA,MACF;AAGA,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,CAAC,YACD,OAAO,aAAa,YACpB,MAAM,QAAQ,QAAQ,GACtB;AACA;AAAA,MACF;AACA,YAAM,YAAa,SAAqC,WAAW;AAEnE,YAAM,MACJ,OAAO,cAAc,YAAY,YAAY,IACzC,MAAM,YACN,OAAO;AAEb,UAAI,MAAM,QAAQ;AAGhB,kBAAM,sBAAW,WAAW,EAAE,OAAO,MAAM,WAAW,KAAK,CAAC;AAC5D,mBAAW;AAAA,MACb;AAAA,IACF,QAAQ;AAEN,UAAI;AAEF,cAAM,WAAW,MAAM,GAAG,SAAS,QAAQ,SAAS;AACpD,YAAI,CAAC,SAAS,QAAQ;AAGpB,oBAAM,sBAAW,SAAS;AAC1B,qBAAW;AAAA,QACb;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,UACpB,MACA,SACA,YAC0B;AAC1B,QAAM;AAAA,IACJ;AAAA;AAAA,MAA2B,QAAQ,iBAAiB,EAAE;AAAA;AAAA,IACtD;AAAA,IACA,OAAO,YAAY;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAGlC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAGpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,QAAQ,UAAU,QAAQ,QAAQ,IAAI,UAAAG,QAAG,KAAK,CAAC;AAElE,QAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,QAAM,eAAW,6BAAiB,IAAI;AACtC,QAAM,gBAAgB,YAAAH,QAAK,KAAK,UAAU,QAAQ;AAClD,QAAM,iBAAa,4BAAc,YAAAA,QAAK,KAAK,eAAe,UAAU,CAAC;AACrE,QAAM,KAAK,sBAAM;AAEjB,MAAI,aAAa;AACjB,MAAI,mBAAmB;AAGvB,MACE,CAAC,SACD,GAAG,WAAW,aAAa,KAC1B,MAAM,aAAa,eAAe,QAAQ,GAC3C;AAEA,QAAI;AACF,YAAM,WAAW,gBAAgB,aAAa;AAC9C,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,YACA,OAAO,aAAa,YACpB,CAAC,MAAM,QAAQ,QAAQ,KACvB,OAAQ,SAAqC,UAAU,MAAM,UAC7D;AACA,2BAAoB,SAClB,UACF;AAAA,MACF,OAAO;AAEL,qBAAa;AAAA,MACf;AAAA,IACF,QAAQ;AAEN,mBAAa;AAAA,IACf;AAAA,EACF,OAAO;AACL,iBAAa;AAAA,EACf;AAEA,MAAI,YAAY;AAEd,QAAI;AACF,gBAAM,qBAAU,aAAa;AAAA,IAC/B,SAAS,GAAG;AACV,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,sDAAsD,aAAa;AAAA;AAAA,UAEnE,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR,iEAAiE,aAAa;AAAA;AAAA,UAE9E,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,4CAA4C,aAAa;AAAA,QACzD,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AAGA,uBAAmB,MAAM,mBAAmB,KAAK,YAAY,QAAQ;AAGrE,UAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,UAAU;AAC/C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,oBAAoB;AAAA,MACpB,MAAM;AAAA,IACR;AAAA,EACF;AAMA,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AAgBjE,QAAM,oBAAoB,aACtB;AAAA,IACE,GAAG;AAAA,IACH,KAAK;AAAA,MACH,GAAG,cAAc;AAAA,MACjB,MAAM,GAAG,aAAa,GAAG,YAAAA,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAM,KAAK,EAAE;AAAA,IACrE;AAAA,IACA,OAAO;AAAA,EACT,IACA;AACJ,QAAM,mBAAe,oBAAM,YAAY,MAAM,mBAAmB,UAAU;AAE1E,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQA,eAAsB,eACpB,SACsD;AACtD,QAAM;AAAA,IACJ;AAAA;AAAA,MAA2B,QAAQ,iBAAiB,EAAE;AAAA;AAAA,IACtD;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAGlC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,QAAQ,UAAU,QAAQ,QAAQ,IAAI,UAAAG,QAAG,KAAK,CAAC;AAElE,QAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,QAAM,eAAW,6BAAiB,IAAI;AACtC,QAAM,gBAAgB,YAAAH,QAAK,KAAK,UAAU,QAAQ;AAClD,QAAM,iBAAa,4BAAc,YAAAA,QAAK,KAAK,eAAe,UAAU,CAAC;AACrE,QAAM,KAAK,sBAAM;AAEjB,MAAI,aAAa;AAGjB,MACE,CAAC,SACD,GAAG,WAAW,aAAa,KAC1B,MAAM,aAAa,eAAe,QAAQ,GAC3C;AAEA,iBAAa;AAAA,EACf,OAAO;AAEL,QAAI;AACF,gBAAM,qBAAU,aAAa;AAAA,IAC/B,SAAS,GAAG;AACV,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,sDAAsD,aAAa;AAAA;AAAA,UAEnE,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR,iEAAiE,aAAa;AAAA;AAAA,UAE9E,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,4CAA4C,aAAa;AAAA,QACzD,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AAGA,UAAM,mBAAmB,MAAM,mBAAmB,KAAK,YAAY,QAAQ;AAG3E,UAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,UAAU;AAC/C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,oBAAoB;AAAA,MACpB,MAAM;AAAA,IACR;AACA,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAaO,SAAS,cACd,YACA,MACA,cACA,YAC0B;AAI1B,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AASjE,QAAM,gBAAgB,YAAAA,QAAK,QAAQ,UAAU;AAC7C,QAAM,oBAAoB,aACtB;AAAA,IACE,GAAG;AAAA,IACH,KAAK;AAAA,MACH,GAAG,cAAc;AAAA,MACjB,MAAM,GAAG,aAAa,GAAG,YAAAA,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAM,KAAK,EAAE;AAAA,IACrE;AAAA,IACA,OAAO;AAAA,EACT,IACA;AAEJ,aAAO,oBAAM,YAAY,MAAM,mBAAmB,UAAU;AAC9D;AAOO,SAAS,kBAA0B;AACxC,aAAO,8BAAgB;AACzB;AAKA,eAAsB,eAUpB;AACA,QAAM,WAAW,gBAAgB;AACjC,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAU,CAAC;AACjB,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,QAAQ;AAElD,aAAW,SAAS,SAAS;AAC3B,UAAM,YAAY,YAAAA,QAAK,KAAK,UAAU,KAAK;AAC3C,QAAI;AAEF,UAAI,CAAE,UAAM,iBAAM,SAAS,GAAI;AAC7B;AAAA,MACF;AAEA,YAAM,WAAW,gBAAgB,SAAS;AAE1C,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,CAAC,YACD,OAAO,aAAa,YACpB,MAAM,QAAQ,QAAQ,GACtB;AACA;AAAA,MACF;AAEA,YAAM,UAAU;AAIhB,YAAM,SAAS,QAAQ,QAAQ;AAC/B,YAAM,MACH,SAAS,KAAK,KAAiB,QAAQ,KAAK,KAAgB;AAI/D,YAAM,QAAQ,MAAM,GAAG,SAAS,QAAQ,SAAS;AACjD,YAAM,aAAa,MAAM,KAAK,OAAK,CAAC,EAAE,WAAW,GAAG,CAAC;AAErD,UAAI,YAAY;AACd,cAAM,aAAa,YAAAA,QAAK,KAAK,WAAW,UAAU;AAElD,cAAM,cAAc,MAAM,GAAG,SAAS,KAAK,UAAU;AAErD,gBAAQ,KAAK;AAAA,UACX,KAAK,OAAQ,QAAQ,WAAW,KAAgB;AAAA,UAChD,MAAO,QAAQ,MAAM,KAAgB;AAAA,UACrC,UAAW,QAAQ,UAAU,KAAgB;AAAA,UAC7C,MAAM;AAAA,UACN,UAAW,QAAQ,UAAU,KAAgB;AAAA,UAC7C,MAAM,YAAY;AAAA,UAClB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,SAAO;AACT;",
6
6
  "names": ["import_path", "path", "fileBuffer", "hasher", "os"]
7
7
  }
@@ -0,0 +1,116 @@
1
+ /**
2
+ * Details for npm package entries.
3
+ */
4
+ export interface PackageDetails {
5
+ installed_version: string;
6
+ size?: number;
7
+ update_check?: {
8
+ last_check: number;
9
+ last_notification: number;
10
+ latest_known: string;
11
+ };
12
+ }
13
+ /**
14
+ * Details for binary download entries.
15
+ */
16
+ export interface BinaryDetails {
17
+ checksum: string;
18
+ checksum_algorithm: 'sha256' | 'sha512';
19
+ platform: string;
20
+ arch: string;
21
+ size: number;
22
+ source: {
23
+ type: 'download';
24
+ url: string;
25
+ };
26
+ }
27
+ /**
28
+ * Unified manifest entry for all cached items (packages and binaries).
29
+ * Shared fields at root, type-specific fields in details.
30
+ */
31
+ export interface ManifestEntry {
32
+ type: 'package' | 'binary';
33
+ cache_key: string;
34
+ timestamp: number;
35
+ details: PackageDetails | BinaryDetails;
36
+ }
37
+ /**
38
+ * Type guard for package entries.
39
+ */
40
+ export declare function isPackageEntry(entry: ManifestEntry): entry is ManifestEntry & {
41
+ details: PackageDetails;
42
+ };
43
+ /**
44
+ * Type guard for binary entries.
45
+ */
46
+ export declare function isBinaryEntry(entry: ManifestEntry): entry is ManifestEntry & {
47
+ details: BinaryDetails;
48
+ };
49
+ /**
50
+ * Legacy store record format (deprecated, for migration).
51
+ */
52
+ export interface StoreRecord {
53
+ timestampFetch: number;
54
+ timestampNotification: number;
55
+ version: string;
56
+ }
57
+ export interface DlxManifestOptions {
58
+ /**
59
+ * Custom manifest file path (defaults to ~/.socket/_dlx/.dlx-manifest.json).
60
+ */
61
+ manifestPath?: string;
62
+ }
63
+ /**
64
+ * DLX manifest storage manager with atomic operations.
65
+ * Supports both legacy format (package name keys) and new unified manifest format (spec keys).
66
+ */
67
+ export declare class DlxManifest {
68
+ private readonly manifestPath;
69
+ private readonly lockPath;
70
+ constructor(options?: DlxManifestOptions);
71
+ /**
72
+ * Read the entire manifest file.
73
+ */
74
+ private readManifest;
75
+ /**
76
+ * Get a manifest entry by spec (e.g., "@socketsecurity/cli@^2.0.11").
77
+ */
78
+ getManifestEntry(spec: string): ManifestEntry | undefined;
79
+ /**
80
+ * Get cached update information for a package (legacy format).
81
+ * @deprecated Use getManifestEntry() for new code.
82
+ */
83
+ get(name: string): StoreRecord | undefined;
84
+ /**
85
+ * Set a package manifest entry.
86
+ */
87
+ setPackageEntry(spec: string, cacheKey: string, details: PackageDetails): Promise<void>;
88
+ /**
89
+ * Set a binary manifest entry.
90
+ */
91
+ setBinaryEntry(spec: string, cacheKey: string, details: BinaryDetails): Promise<void>;
92
+ private writeManifest;
93
+ /**
94
+ * Store update information for a package (legacy format).
95
+ * @deprecated Use setPackageEntry() for new code.
96
+ */
97
+ set(name: string, record: StoreRecord): Promise<void>;
98
+ /**
99
+ * Clear cached data for a specific entry.
100
+ */
101
+ clear(name: string): Promise<void>;
102
+ /**
103
+ * Clear all cached data.
104
+ */
105
+ clearAll(): Promise<void>;
106
+ /**
107
+ * Check if cached data is fresh based on TTL.
108
+ */
109
+ isFresh(record: StoreRecord | undefined, ttlMs: number): boolean;
110
+ /**
111
+ * Get all cached package names.
112
+ */
113
+ getAllPackages(): string[];
114
+ }
115
+ // Export singleton instance using default manifest location.
116
+ export declare const dlxManifest: DlxManifest;
@@ -0,0 +1,296 @@
1
+ /* Socket Lib - Built with esbuild */
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+ var dlx_manifest_exports = {};
30
+ __export(dlx_manifest_exports, {
31
+ DlxManifest: () => DlxManifest,
32
+ dlxManifest: () => dlxManifest,
33
+ isBinaryEntry: () => isBinaryEntry,
34
+ isPackageEntry: () => isPackageEntry
35
+ });
36
+ module.exports = __toCommonJS(dlx_manifest_exports);
37
+ var import_fs = require("fs");
38
+ var import_path = __toESM(require("path"));
39
+ var import_fs2 = require("./fs");
40
+ var import_logger = require("./logger");
41
+ var import_paths = require("./paths");
42
+ var import_process_lock = require("./process-lock");
43
+ const logger = (0, import_logger.getDefaultLogger)();
44
+ const MANIFEST_FILE_NAME = ".dlx-manifest.json";
45
+ function isPackageEntry(entry) {
46
+ return entry.type === "package";
47
+ }
48
+ function isBinaryEntry(entry) {
49
+ return entry.type === "binary";
50
+ }
51
+ class DlxManifest {
52
+ manifestPath;
53
+ lockPath;
54
+ constructor(options = {}) {
55
+ this.manifestPath = options.manifestPath ?? import_path.default.join((0, import_paths.getSocketDlxDir)(), MANIFEST_FILE_NAME);
56
+ this.lockPath = `${this.manifestPath}.lock`;
57
+ }
58
+ /**
59
+ * Read the entire manifest file.
60
+ */
61
+ readManifest() {
62
+ try {
63
+ if (!(0, import_fs.existsSync)(this.manifestPath)) {
64
+ return /* @__PURE__ */ Object.create(null);
65
+ }
66
+ const rawContent = (0, import_fs2.readFileUtf8Sync)(this.manifestPath);
67
+ const content = (typeof rawContent === "string" ? rawContent : rawContent.toString("utf8")).trim();
68
+ if (!content) {
69
+ return /* @__PURE__ */ Object.create(null);
70
+ }
71
+ return JSON.parse(content);
72
+ } catch (error) {
73
+ logger.warn(
74
+ `Failed to read manifest: ${error instanceof Error ? error.message : String(error)}`
75
+ );
76
+ return /* @__PURE__ */ Object.create(null);
77
+ }
78
+ }
79
+ /**
80
+ * Get a manifest entry by spec (e.g., "@socketsecurity/cli@^2.0.11").
81
+ */
82
+ getManifestEntry(spec) {
83
+ const data = this.readManifest();
84
+ const entry = data[spec];
85
+ if (entry && "type" in entry) {
86
+ return entry;
87
+ }
88
+ return void 0;
89
+ }
90
+ /**
91
+ * Get cached update information for a package (legacy format).
92
+ * @deprecated Use getManifestEntry() for new code.
93
+ */
94
+ get(name) {
95
+ const data = this.readManifest();
96
+ const entry = data[name];
97
+ if (entry && !("type" in entry)) {
98
+ return entry;
99
+ }
100
+ return void 0;
101
+ }
102
+ /**
103
+ * Set a package manifest entry.
104
+ */
105
+ async setPackageEntry(spec, cacheKey, details) {
106
+ await import_process_lock.processLock.withLock(this.lockPath, async () => {
107
+ const data = this.readManifest();
108
+ data[spec] = {
109
+ type: "package",
110
+ cache_key: cacheKey,
111
+ timestamp: Date.now(),
112
+ details
113
+ };
114
+ await this.writeManifest(data);
115
+ });
116
+ }
117
+ /**
118
+ * Set a binary manifest entry.
119
+ */
120
+ async setBinaryEntry(spec, cacheKey, details) {
121
+ await import_process_lock.processLock.withLock(this.lockPath, async () => {
122
+ const data = this.readManifest();
123
+ data[spec] = {
124
+ type: "binary",
125
+ cache_key: cacheKey,
126
+ timestamp: Date.now(),
127
+ details
128
+ };
129
+ await this.writeManifest(data);
130
+ });
131
+ }
132
+ /**
133
+ * Write the manifest file atomically.
134
+ */
135
+ async writeManifest(data) {
136
+ const manifestDir = import_path.default.dirname(this.manifestPath);
137
+ try {
138
+ (0, import_fs2.safeMkdirSync)(manifestDir, { recursive: true });
139
+ } catch (error) {
140
+ logger.warn(
141
+ `Failed to create manifest directory: ${error instanceof Error ? error.message : String(error)}`
142
+ );
143
+ }
144
+ const content = JSON.stringify(data, null, 2);
145
+ const tempPath = `${this.manifestPath}.tmp`;
146
+ try {
147
+ (0, import_fs.writeFileSync)(tempPath, content, "utf8");
148
+ (0, import_fs.writeFileSync)(this.manifestPath, content, "utf8");
149
+ try {
150
+ if ((0, import_fs.existsSync)(tempPath)) {
151
+ (0, import_fs.unlinkSync)(tempPath);
152
+ }
153
+ } catch {
154
+ }
155
+ } catch (error) {
156
+ try {
157
+ if ((0, import_fs.existsSync)(tempPath)) {
158
+ (0, import_fs.unlinkSync)(tempPath);
159
+ }
160
+ } catch {
161
+ }
162
+ throw error;
163
+ }
164
+ }
165
+ /**
166
+ * Store update information for a package (legacy format).
167
+ * @deprecated Use setPackageEntry() for new code.
168
+ */
169
+ async set(name, record) {
170
+ await import_process_lock.processLock.withLock(this.lockPath, async () => {
171
+ let data = /* @__PURE__ */ Object.create(null);
172
+ try {
173
+ if ((0, import_fs.existsSync)(this.manifestPath)) {
174
+ const content2 = (0, import_fs.readFileSync)(this.manifestPath, "utf8");
175
+ if (content2.trim()) {
176
+ data = JSON.parse(content2);
177
+ }
178
+ }
179
+ } catch (error) {
180
+ logger.warn(
181
+ `Failed to read existing manifest: ${error instanceof Error ? error.message : String(error)}`
182
+ );
183
+ }
184
+ data[name] = record;
185
+ const manifestDir = import_path.default.dirname(this.manifestPath);
186
+ try {
187
+ (0, import_fs2.safeMkdirSync)(manifestDir, { recursive: true });
188
+ } catch (error) {
189
+ logger.warn(
190
+ `Failed to create manifest directory: ${error instanceof Error ? error.message : String(error)}`
191
+ );
192
+ }
193
+ const content = JSON.stringify(data, null, 2);
194
+ const tempPath = `${this.manifestPath}.tmp`;
195
+ try {
196
+ (0, import_fs.writeFileSync)(tempPath, content, "utf8");
197
+ (0, import_fs.writeFileSync)(this.manifestPath, content, "utf8");
198
+ try {
199
+ if ((0, import_fs.existsSync)(tempPath)) {
200
+ (0, import_fs.unlinkSync)(tempPath);
201
+ }
202
+ } catch {
203
+ }
204
+ } catch (error) {
205
+ try {
206
+ if ((0, import_fs.existsSync)(tempPath)) {
207
+ (0, import_fs.unlinkSync)(tempPath);
208
+ }
209
+ } catch {
210
+ }
211
+ throw error;
212
+ }
213
+ });
214
+ }
215
+ /**
216
+ * Clear cached data for a specific entry.
217
+ */
218
+ async clear(name) {
219
+ await import_process_lock.processLock.withLock(this.lockPath, async () => {
220
+ try {
221
+ if (!(0, import_fs.existsSync)(this.manifestPath)) {
222
+ return;
223
+ }
224
+ const content = (0, import_fs.readFileSync)(this.manifestPath, "utf8");
225
+ if (!content.trim()) {
226
+ return;
227
+ }
228
+ const data = JSON.parse(content);
229
+ delete data[name];
230
+ const updatedContent = JSON.stringify(data, null, 2);
231
+ (0, import_fs.writeFileSync)(this.manifestPath, updatedContent, "utf8");
232
+ } catch (error) {
233
+ logger.warn(
234
+ `Failed to clear cache for ${name}: ${error instanceof Error ? error.message : String(error)}`
235
+ );
236
+ }
237
+ });
238
+ }
239
+ /**
240
+ * Clear all cached data.
241
+ */
242
+ async clearAll() {
243
+ await import_process_lock.processLock.withLock(this.lockPath, async () => {
244
+ try {
245
+ if ((0, import_fs.existsSync)(this.manifestPath)) {
246
+ (0, import_fs.unlinkSync)(this.manifestPath);
247
+ }
248
+ } catch (error) {
249
+ logger.warn(
250
+ `Failed to clear all cache: ${error instanceof Error ? error.message : String(error)}`
251
+ );
252
+ }
253
+ });
254
+ }
255
+ /**
256
+ * Check if cached data is fresh based on TTL.
257
+ */
258
+ isFresh(record, ttlMs) {
259
+ if (!record) {
260
+ return false;
261
+ }
262
+ const age = Date.now() - record.timestampFetch;
263
+ return age < ttlMs;
264
+ }
265
+ /**
266
+ * Get all cached package names.
267
+ */
268
+ getAllPackages() {
269
+ try {
270
+ if (!(0, import_fs.existsSync)(this.manifestPath)) {
271
+ return [];
272
+ }
273
+ const rawContent = (0, import_fs2.readFileUtf8Sync)(this.manifestPath);
274
+ const content = (typeof rawContent === "string" ? rawContent : rawContent.toString("utf8")).trim();
275
+ if (!content) {
276
+ return [];
277
+ }
278
+ const data = JSON.parse(content);
279
+ return Object.keys(data);
280
+ } catch (error) {
281
+ logger.warn(
282
+ `Failed to get package list: ${error instanceof Error ? error.message : String(error)}`
283
+ );
284
+ return [];
285
+ }
286
+ }
287
+ }
288
+ const dlxManifest = new DlxManifest();
289
+ // Annotate the CommonJS export names for ESM import in node:
290
+ 0 && (module.exports = {
291
+ DlxManifest,
292
+ dlxManifest,
293
+ isBinaryEntry,
294
+ isPackageEntry
295
+ });
296
+ //# sourceMappingURL=dlx-manifest.js.map