@socketsecurity/lib 3.0.6 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
6
6
  and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## [3.1.0](https://github.com/SocketDev/socket-lib/releases/tag/v3.1.0) - 2025-11-01
9
+
10
+ ### Changed
11
+
12
+ - **File system utilities**: `safeMkdir` and `safeMkdirSync` now default to `recursive: true`
13
+ - Nested directories are created by default, simplifying common usage patterns
14
+
8
15
  ## [3.0.6](https://github.com/SocketDev/socket-lib/releases/tag/v3.0.6) - 2025-11-01
9
16
 
10
17
  ### Added
@@ -230,7 +230,7 @@ async function dlxBinary(args, options, spawnExtra) {
230
230
  }
231
231
  if (downloaded) {
232
232
  try {
233
- await (0, import_fs.safeMkdir)(cacheEntryDir, { recursive: true });
233
+ await (0, import_fs.safeMkdir)(cacheEntryDir);
234
234
  } catch (e) {
235
235
  const code = e.code;
236
236
  if (code === "EACCES" || code === "EPERM") {
@@ -301,7 +301,7 @@ async function downloadBinary(options) {
301
301
  downloaded = false;
302
302
  } else {
303
303
  try {
304
- await (0, import_fs.safeMkdir)(cacheEntryDir, { recursive: true });
304
+ await (0, import_fs.safeMkdir)(cacheEntryDir);
305
305
  } catch (e) {
306
306
  const code = e.code;
307
307
  if (code === "EACCES" || code === "EPERM") {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/dlx-binary.ts"],
4
- "sourcesContent": ["/** @fileoverview DLX binary execution utilities for Socket ecosystem. */\n\nimport { createHash } from 'crypto'\n\nimport os from 'os'\n\nimport path from 'path'\n\nimport { WIN32 } from '#constants/platform'\n\nimport { generateCacheKey } from './dlx'\nimport { httpDownload } from './http-request'\nimport { isDir, readJson, safeDelete, safeMkdir } from './fs'\nimport { isObjectObject } from './objects'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nexport interface DlxBinaryOptions {\n /**\n * URL to download the binary from.\n */\n url: string\n\n /**\n * Optional name for the cached binary (defaults to URL hash).\n */\n name?: string | undefined\n\n /**\n * Expected checksum (sha256) for verification.\n */\n checksum?: string | undefined\n\n /**\n * Cache TTL in milliseconds (default: 7 days).\n */\n cacheTtl?: number | undefined\n\n /**\n * Force re-download even if cached.\n * Aligns with npm/npx --force flag.\n */\n force?: boolean | undefined\n\n /**\n * Skip confirmation prompts (auto-approve).\n * Aligns with npx --yes/-y flag.\n */\n yes?: boolean | undefined\n\n /**\n * Suppress output (quiet mode).\n * Aligns with npx --quiet/-q and pnpm --silent/-s flags.\n */\n quiet?: boolean | undefined\n\n /**\n * Additional spawn options.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxBinaryResult {\n /** Path to the cached binary. */\n binaryPath: string\n /** Whether the binary was newly downloaded. */\n downloaded: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Metadata structure for cached binaries (.dlx-metadata.json).\n * Unified schema shared across TypeScript (dlxBinary) and C++ (socket_macho_decompress).\n *\n * Core Fields (present in all implementations):\n * - version: Schema version (currently \"1.0.0\")\n * - cache_key: First 16 chars of SHA-512 hash (matches directory name)\n * - timestamp: Unix timestamp in milliseconds\n * - checksum: Full hash of cached binary (SHA-512 for C++, SHA-256 for TypeScript)\n * - checksum_algorithm: \"sha512\" or \"sha256\"\n * - platform: \"darwin\" | \"linux\" | \"win32\"\n * - arch: \"x64\" | \"arm64\"\n * - size: Size of cached binary in bytes\n * - source: Origin information\n * - type: \"download\" (from URL) or \"decompression\" (from embedded binary)\n * - url: Download URL (if type is \"download\")\n * - path: Source binary path (if type is \"decompression\")\n *\n * Extra Fields (implementation-specific):\n * - For C++ decompression:\n * - compressed_size: Size of compressed data in bytes\n * - compression_algorithm: Brotli level (numeric)\n * - compression_ratio: original_size / compressed_size\n *\n * Example (TypeScript download):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"a1b2c3d4e5f67890\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha256-abc123...\",\n * \"checksum_algorithm\": \"sha256\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 15000000,\n * \"source\": {\n * \"type\": \"download\",\n * \"url\": \"https://example.com/binary\"\n * }\n * }\n * ```\n *\n * Example (C++ decompression):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"0123456789abcdef\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha512-def456...\",\n * \"checksum_algorithm\": \"sha512\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 13000000,\n * \"source\": {\n * \"type\": \"decompression\",\n * \"path\": \"/usr/local/bin/socket\"\n * },\n * \"extra\": {\n * \"compressed_size\": 1700000,\n * \"compression_algorithm\": 3,\n * \"compression_ratio\": 7.647\n * }\n * }\n * ```\n *\n * @internal This interface documents the metadata file format.\n */\nexport interface DlxMetadata {\n version: string\n cache_key: string\n timestamp: number\n checksum: string\n checksum_algorithm: string\n platform: string\n arch: string\n size: number\n source?: {\n type: 'download' | 'decompression'\n url?: string\n path?: string\n }\n extra?: Record<string, unknown>\n}\n\n/**\n * Get metadata file path for a cached binary.\n */\nfunction getMetadataPath(cacheEntryPath: string): string {\n return path.join(cacheEntryPath, '.dlx-metadata.json')\n}\n\n/**\n * Check if a cached binary is still valid.\n */\nasync function isCacheValid(\n cacheEntryPath: string,\n cacheTtl: number,\n): Promise<boolean> {\n const fs = getFs()\n try {\n const metaPath = getMetadataPath(cacheEntryPath)\n if (!fs.existsSync(metaPath)) {\n return false\n }\n\n const metadata = await readJson(metaPath, { throws: false })\n if (!isObjectObject(metadata)) {\n return false\n }\n const now = Date.now()\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, cache is invalid\n if (typeof timestamp !== 'number' || timestamp <= 0) {\n return false\n }\n const age = now - timestamp\n\n return age < cacheTtl\n } catch {\n return false\n }\n}\n\n/**\n * Download a file from a URL with integrity checking and concurrent download protection.\n * Uses processLock to prevent multiple processes from downloading the same binary simultaneously.\n * Internal helper function for downloading binary files.\n */\nasync function downloadBinaryFile(\n url: string,\n destPath: string,\n checksum?: string | undefined,\n): Promise<string> {\n // Use process lock to prevent concurrent downloads.\n // Lock is placed in the cache entry directory as 'concurrency.lock'.\n const cacheEntryDir = path.dirname(destPath)\n const lockPath = path.join(cacheEntryDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n const fs = getFs()\n // Check if file was downloaded while waiting for lock.\n if (fs.existsSync(destPath)) {\n const stats = await fs.promises.stat(destPath)\n if (stats.size > 0) {\n // File exists, compute and return checksum.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n return hasher.digest('hex')\n }\n }\n\n // Download the file.\n try {\n await httpDownload(url, destPath)\n } catch (e) {\n throw new Error(\n `Failed to download binary from ${url}\\n` +\n `Destination: ${destPath}\\n` +\n 'Check your internet connection or verify the URL is accessible.',\n { cause: e },\n )\n }\n\n // Compute checksum of downloaded file.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n const actualChecksum = hasher.digest('hex')\n\n // Verify checksum if provided.\n if (checksum && actualChecksum !== checksum) {\n // Clean up invalid file.\n await safeDelete(destPath)\n throw new Error(\n `Checksum mismatch: expected ${checksum}, got ${actualChecksum}`,\n )\n }\n\n // Make executable on POSIX systems.\n if (!WIN32) {\n await fs.promises.chmod(destPath, 0o755)\n }\n\n return actualChecksum\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Write metadata for a cached binary.\n * Uses unified schema shared with C++ decompressor and CLI dlxBinary.\n * Schema documentation: See DlxMetadata interface in this file (exported).\n * Core fields: version, cache_key, timestamp, checksum, checksum_algorithm, platform, arch, size, source\n * Note: This implementation uses SHA-256 checksums instead of SHA-512.\n */\nasync function writeMetadata(\n cacheEntryPath: string,\n cacheKey: string,\n url: string,\n checksum: string,\n size: number,\n): Promise<void> {\n const metaPath = getMetadataPath(cacheEntryPath)\n const metadata = {\n version: '1.0.0',\n cache_key: cacheKey,\n timestamp: Date.now(),\n checksum,\n checksum_algorithm: 'sha256',\n platform: os.platform(),\n arch: os.arch(),\n size,\n source: {\n type: 'download',\n url,\n },\n }\n const fs = getFs()\n await fs.promises.writeFile(metaPath, JSON.stringify(metadata, null, 2))\n}\n\n/**\n * Clean expired entries from the DLX cache.\n */\nexport async function cleanDlxCache(\n maxAge: number = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n): Promise<number> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return 0\n }\n\n let cleaned = 0\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n const metaPath = getMetadataPath(entryPath)\n\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, treat as expired (age = infinity)\n const age =\n typeof timestamp === 'number' && timestamp > 0\n ? now - timestamp\n : Number.POSITIVE_INFINITY\n\n if (age > maxAge) {\n // Remove entire cache entry directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath, { force: true, recursive: true })\n cleaned += 1\n }\n } catch {\n // If we can't read metadata, check if directory is empty or corrupted.\n try {\n // eslint-disable-next-line no-await-in-loop\n const contents = await fs.promises.readdir(entryPath)\n if (!contents.length) {\n // Remove empty directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath)\n cleaned += 1\n }\n } catch {}\n }\n }\n\n return cleaned\n}\n\n/**\n * Download and execute a binary from a URL with caching.\n */\nexport async function dlxBinary(\n args: readonly string[] | string[],\n options?: DlxBinaryOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxBinaryResult> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force: userForce = false,\n name,\n spawnOptions,\n url,\n yes,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Map --yes flag to force behavior (auto-approve/skip prompts)\n const force = yes === true ? true : userForce\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n let computedChecksum = checksum\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid, read the checksum from metadata.\n try {\n const metaPath = getMetadataPath(cacheEntryDir)\n const metadata = await readJson(metaPath, { throws: false })\n if (\n metadata &&\n typeof metadata === 'object' &&\n !Array.isArray(metadata) &&\n typeof (metadata as Record<string, unknown>)['checksum'] === 'string'\n ) {\n computedChecksum = (metadata as Record<string, unknown>)[\n 'checksum'\n ] as string\n } else {\n // If metadata is invalid, re-download.\n downloaded = true\n }\n } catch {\n // If we can't read metadata, re-download.\n downloaded = true\n }\n } else {\n downloaded = true\n }\n\n if (downloaded) {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n }\n\n // Execute the binary.\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension (e.g.,\n // C:\\cache\\test.cmd becomes just \"test\"). Windows cmd.exe then searches for \"test\"\n // in directories listed in PATH, trying each extension from PATHEXT environment\n // variable (.COM, .EXE, .BAT, .CMD, etc.) until it finds a match.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH\n // (unlike system package managers like npm/pnpm/yarn which are already in PATH),\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n //\n // This approach is consistent with how other tools handle Windows command execution:\n // - npm's promise-spawn: uses which.sync() to find commands in PATH\n // - cross-spawn: spawns cmd.exe with escaped arguments\n // - Node.js spawn with shell: true: delegates to cmd.exe which uses PATH\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n const spawnPromise = spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n\n return {\n binaryPath,\n downloaded,\n spawnPromise,\n }\n}\n\n/**\n * Download a binary from a URL with caching (without execution).\n * Similar to downloadPackage from dlx-package.\n *\n * @returns Object containing the path to the cached binary and whether it was downloaded\n */\nexport async function downloadBinary(\n options: Omit<DlxBinaryOptions, 'spawnOptions'>,\n): Promise<{ binaryPath: string; downloaded: boolean }> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid.\n downloaded = false\n } else {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n const computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n downloaded = true\n }\n\n return {\n binaryPath,\n downloaded,\n }\n}\n\n/**\n * Execute a cached binary without re-downloading.\n * Similar to executePackage from dlx-package.\n * Binary must have been previously downloaded via downloadBinary or dlxBinary.\n *\n * @param binaryPath Path to the cached binary (from downloadBinary result)\n * @param args Arguments to pass to the binary\n * @param spawnOptions Spawn options for execution\n * @param spawnExtra Extra spawn configuration\n * @returns The spawn promise for the running process\n */\nexport function executeBinary(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension. Windows cmd.exe\n // then searches for the binary in directories listed in PATH.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH,\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n const cacheEntryDir = path.dirname(binaryPath)\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n}\n\n/**\n * Get the DLX binary cache directory path.\n * Returns normalized path for cross-platform compatibility.\n * Uses same directory as dlx-package for unified DLX storage.\n */\nexport function getDlxCachePath(): string {\n return getSocketDlxDir()\n}\n\n/**\n * Get information about cached binaries.\n */\nexport async function listDlxCache(): Promise<\n Array<{\n age: number\n arch: string\n checksum: string\n name: string\n platform: string\n size: number\n url: string\n }>\n> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return []\n }\n\n const results = []\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n const metaPath = getMetadataPath(entryPath)\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n\n const metaObj = metadata as Record<string, unknown>\n\n // Get URL from unified schema (source.url) or legacy schema (url).\n // Allow empty URL for backward compatibility with partial metadata.\n const source = metaObj['source'] as Record<string, unknown> | undefined\n const url =\n (source?.['url'] as string) || (metaObj['url'] as string) || ''\n\n // Find the binary file in the directory.\n // eslint-disable-next-line no-await-in-loop\n const files = await fs.promises.readdir(entryPath)\n const binaryFile = files.find(f => !f.startsWith('.'))\n\n if (binaryFile) {\n const binaryPath = path.join(entryPath, binaryFile)\n // eslint-disable-next-line no-await-in-loop\n const binaryStats = await fs.promises.stat(binaryPath)\n\n results.push({\n age: now - ((metaObj['timestamp'] as number) || 0),\n arch: (metaObj['arch'] as string) || 'unknown',\n checksum: (metaObj['checksum'] as string) || '',\n name: binaryFile,\n platform: (metaObj['platform'] as string) || 'unknown',\n size: binaryStats.size,\n url,\n })\n }\n } catch {}\n }\n\n return results\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA2B;AAE3B,gBAAe;AAEf,kBAAiB;AAEjB,sBAAsB;AAEtB,iBAAiC;AACjC,0BAA6B;AAC7B,gBAAuD;AACvD,qBAA+B;AAC/B,IAAAA,eAA8B;AAC9B,mBAAgC;AAChC,0BAA4B;AAE5B,mBAAsB;AAEtB,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AA+IA,SAAS,gBAAgB,gBAAgC;AACvD,SAAO,YAAAC,QAAK,KAAK,gBAAgB,oBAAoB;AACvD;AAKA,eAAe,aACb,gBACA,UACkB;AAClB,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,WAAW,gBAAgB,cAAc;AAC/C,QAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,QAAI,KAAC,+BAAe,QAAQ,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,YAAa,SAAqC,WAAW;AAEnE,QAAI,OAAO,cAAc,YAAY,aAAa,GAAG;AACnD,aAAO;AAAA,IACT;AACA,UAAM,MAAM,MAAM;AAElB,WAAO,MAAM;AAAA,EACf,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAOA,eAAe,mBACb,KACA,UACA,UACiB;AAGjB,QAAM,gBAAgB,YAAAA,QAAK,QAAQ,QAAQ;AAC3C,QAAM,WAAW,YAAAA,QAAK,KAAK,eAAe,kBAAkB;AAE5D,SAAO,MAAM,gCAAY;AAAA,IACvB;AAAA,IACA,YAAY;AACV,YAAM,KAAK,sBAAM;AAEjB,UAAI,GAAG,WAAW,QAAQ,GAAG;AAC3B,cAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,QAAQ;AAC7C,YAAI,MAAM,OAAO,GAAG;AAElB,gBAAMC,cAAa,MAAM,GAAG,SAAS,SAAS,QAAQ;AACtD,gBAAMC,cAAS,0BAAW,QAAQ;AAClC,UAAAA,QAAO,OAAOD,WAAU;AACxB,iBAAOC,QAAO,OAAO,KAAK;AAAA,QAC5B;AAAA,MACF;AAGA,UAAI;AACF,kBAAM,kCAAa,KAAK,QAAQ;AAAA,MAClC,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR,kCAAkC,GAAG;AAAA,eACnB,QAAQ;AAAA;AAAA,UAE1B,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AAGA,YAAM,aAAa,MAAM,GAAG,SAAS,SAAS,QAAQ;AACtD,YAAM,aAAS,0BAAW,QAAQ;AAClC,aAAO,OAAO,UAAU;AACxB,YAAM,iBAAiB,OAAO,OAAO,KAAK;AAG1C,UAAI,YAAY,mBAAmB,UAAU;AAE3C,kBAAM,sBAAW,QAAQ;AACzB,cAAM,IAAI;AAAA,UACR,+BAA+B,QAAQ,SAAS,cAAc;AAAA,QAChE;AAAA,MACF;AAGA,UAAI,CAAC,uBAAO;AACV,cAAM,GAAG,SAAS,MAAM,UAAU,GAAK;AAAA,MACzC;AAEA,aAAO;AAAA,IACT;AAAA,IACA;AAAA;AAAA,MAEE,SAAS;AAAA,MACT,iBAAiB;AAAA,IACnB;AAAA,EACF;AACF;AASA,eAAe,cACb,gBACA,UACA,KACA,UACA,MACe;AACf,QAAM,WAAW,gBAAgB,cAAc;AAC/C,QAAM,WAAW;AAAA,IACf,SAAS;AAAA,IACT,WAAW;AAAA,IACX,WAAW,KAAK,IAAI;AAAA,IACpB;AAAA,IACA,oBAAoB;AAAA,IACpB,UAAU,UAAAC,QAAG,SAAS;AAAA,IACtB,MAAM,UAAAA,QAAG,KAAK;AAAA,IACd;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,GAAG,SAAS,UAAU,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AACzE;AAKA,eAAsB,cACpB;AAAA;AAAA,EAAiC,QAAQ,iBAAiB,EAAE;AAAA,GAC3C;AACjB,QAAM,WAAW,gBAAgB;AACjC,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,WAAO;AAAA,EACT;AAEA,MAAI,UAAU;AACd,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,QAAQ;AAElD,aAAW,SAAS,SAAS;AAC3B,UAAM,YAAY,YAAAH,QAAK,KAAK,UAAU,KAAK;AAC3C,UAAM,WAAW,gBAAgB,SAAS;AAE1C,QAAI;AAEF,UAAI,CAAE,UAAM,iBAAM,SAAS,GAAI;AAC7B;AAAA,MACF;AAGA,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,CAAC,YACD,OAAO,aAAa,YACpB,MAAM,QAAQ,QAAQ,GACtB;AACA;AAAA,MACF;AACA,YAAM,YAAa,SAAqC,WAAW;AAEnE,YAAM,MACJ,OAAO,cAAc,YAAY,YAAY,IACzC,MAAM,YACN,OAAO;AAEb,UAAI,MAAM,QAAQ;AAGhB,kBAAM,sBAAW,WAAW,EAAE,OAAO,MAAM,WAAW,KAAK,CAAC;AAC5D,mBAAW;AAAA,MACb;AAAA,IACF,QAAQ;AAEN,UAAI;AAEF,cAAM,WAAW,MAAM,GAAG,SAAS,QAAQ,SAAS;AACpD,YAAI,CAAC,SAAS,QAAQ;AAGpB,oBAAM,sBAAW,SAAS;AAC1B,qBAAW;AAAA,QACb;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,UACpB,MACA,SACA,YAC0B;AAC1B,QAAM;AAAA,IACJ;AAAA;AAAA,MAA2B,QAAQ,iBAAiB,EAAE;AAAA;AAAA,IACtD;AAAA,IACA,OAAO,YAAY;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAGlC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAGpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,QAAQ,UAAU,QAAQ,QAAQ,IAAI,UAAAG,QAAG,KAAK,CAAC;AAElE,QAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,QAAM,eAAW,6BAAiB,IAAI;AACtC,QAAM,gBAAgB,YAAAH,QAAK,KAAK,UAAU,QAAQ;AAClD,QAAM,iBAAa,4BAAc,YAAAA,QAAK,KAAK,eAAe,UAAU,CAAC;AACrE,QAAM,KAAK,sBAAM;AAEjB,MAAI,aAAa;AACjB,MAAI,mBAAmB;AAGvB,MACE,CAAC,SACD,GAAG,WAAW,aAAa,KAC1B,MAAM,aAAa,eAAe,QAAQ,GAC3C;AAEA,QAAI;AACF,YAAM,WAAW,gBAAgB,aAAa;AAC9C,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,YACA,OAAO,aAAa,YACpB,CAAC,MAAM,QAAQ,QAAQ,KACvB,OAAQ,SAAqC,UAAU,MAAM,UAC7D;AACA,2BAAoB,SAClB,UACF;AAAA,MACF,OAAO;AAEL,qBAAa;AAAA,MACf;AAAA,IACF,QAAQ;AAEN,mBAAa;AAAA,IACf;AAAA,EACF,OAAO;AACL,iBAAa;AAAA,EACf;AAEA,MAAI,YAAY;AAEd,QAAI;AACF,gBAAM,qBAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,IACpD,SAAS,GAAG;AACV,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,sDAAsD,aAAa;AAAA;AAAA,UAEnE,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR,iEAAiE,aAAa;AAAA;AAAA,UAE9E,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,4CAA4C,aAAa;AAAA,QACzD,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AAGA,uBAAmB,MAAM,mBAAmB,KAAK,YAAY,QAAQ;AAGrE,UAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,UAAU;AAC/C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,oBAAoB;AAAA,MACpB,MAAM;AAAA,IACR;AAAA,EACF;AAMA,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AAgBjE,QAAM,oBAAoB,aACtB;AAAA,IACE,GAAG;AAAA,IACH,KAAK;AAAA,MACH,GAAG,cAAc;AAAA,MACjB,MAAM,GAAG,aAAa,GAAG,YAAAA,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAM,KAAK,EAAE;AAAA,IACrE;AAAA,IACA,OAAO;AAAA,EACT,IACA;AACJ,QAAM,mBAAe,oBAAM,YAAY,MAAM,mBAAmB,UAAU;AAE1E,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQA,eAAsB,eACpB,SACsD;AACtD,QAAM;AAAA,IACJ;AAAA;AAAA,MAA2B,QAAQ,iBAAiB,EAAE;AAAA;AAAA,IACtD;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAGlC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,QAAQ,UAAU,QAAQ,QAAQ,IAAI,UAAAG,QAAG,KAAK,CAAC;AAElE,QAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,QAAM,eAAW,6BAAiB,IAAI;AACtC,QAAM,gBAAgB,YAAAH,QAAK,KAAK,UAAU,QAAQ;AAClD,QAAM,iBAAa,4BAAc,YAAAA,QAAK,KAAK,eAAe,UAAU,CAAC;AACrE,QAAM,KAAK,sBAAM;AAEjB,MAAI,aAAa;AAGjB,MACE,CAAC,SACD,GAAG,WAAW,aAAa,KAC1B,MAAM,aAAa,eAAe,QAAQ,GAC3C;AAEA,iBAAa;AAAA,EACf,OAAO;AAEL,QAAI;AACF,gBAAM,qBAAU,eAAe,EAAE,WAAW,KAAK,CAAC;AAAA,IACpD,SAAS,GAAG;AACV,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,sDAAsD,aAAa;AAAA;AAAA,UAEnE,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR,iEAAiE,aAAa;AAAA;AAAA,UAE9E,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,4CAA4C,aAAa;AAAA,QACzD,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AAGA,UAAM,mBAAmB,MAAM,mBAAmB,KAAK,YAAY,QAAQ;AAG3E,UAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,UAAU;AAC/C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,oBAAoB;AAAA,MACpB,MAAM;AAAA,IACR;AACA,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAaO,SAAS,cACd,YACA,MACA,cACA,YAC0B;AAI1B,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AASjE,QAAM,gBAAgB,YAAAA,QAAK,QAAQ,UAAU;AAC7C,QAAM,oBAAoB,aACtB;AAAA,IACE,GAAG;AAAA,IACH,KAAK;AAAA,MACH,GAAG,cAAc;AAAA,MACjB,MAAM,GAAG,aAAa,GAAG,YAAAA,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAM,KAAK,EAAE;AAAA,IACrE;AAAA,IACA,OAAO;AAAA,EACT,IACA;AAEJ,aAAO,oBAAM,YAAY,MAAM,mBAAmB,UAAU;AAC9D;AAOO,SAAS,kBAA0B;AACxC,aAAO,8BAAgB;AACzB;AAKA,eAAsB,eAUpB;AACA,QAAM,WAAW,gBAAgB;AACjC,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAU,CAAC;AACjB,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,QAAQ;AAElD,aAAW,SAAS,SAAS;AAC3B,UAAM,YAAY,YAAAA,QAAK,KAAK,UAAU,KAAK;AAC3C,QAAI;AAEF,UAAI,CAAE,UAAM,iBAAM,SAAS,GAAI;AAC7B;AAAA,MACF;AAEA,YAAM,WAAW,gBAAgB,SAAS;AAE1C,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,CAAC,YACD,OAAO,aAAa,YACpB,MAAM,QAAQ,QAAQ,GACtB;AACA;AAAA,MACF;AAEA,YAAM,UAAU;AAIhB,YAAM,SAAS,QAAQ,QAAQ;AAC/B,YAAM,MACH,SAAS,KAAK,KAAiB,QAAQ,KAAK,KAAgB;AAI/D,YAAM,QAAQ,MAAM,GAAG,SAAS,QAAQ,SAAS;AACjD,YAAM,aAAa,MAAM,KAAK,OAAK,CAAC,EAAE,WAAW,GAAG,CAAC;AAErD,UAAI,YAAY;AACd,cAAM,aAAa,YAAAA,QAAK,KAAK,WAAW,UAAU;AAElD,cAAM,cAAc,MAAM,GAAG,SAAS,KAAK,UAAU;AAErD,gBAAQ,KAAK;AAAA,UACX,KAAK,OAAQ,QAAQ,WAAW,KAAgB;AAAA,UAChD,MAAO,QAAQ,MAAM,KAAgB;AAAA,UACrC,UAAW,QAAQ,UAAU,KAAgB;AAAA,UAC7C,MAAM;AAAA,UACN,UAAW,QAAQ,UAAU,KAAgB;AAAA,UAC7C,MAAM,YAAY;AAAA,UAClB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,SAAO;AACT;",
4
+ "sourcesContent": ["/** @fileoverview DLX binary execution utilities for Socket ecosystem. */\n\nimport { createHash } from 'crypto'\n\nimport os from 'os'\n\nimport path from 'path'\n\nimport { WIN32 } from '#constants/platform'\n\nimport { generateCacheKey } from './dlx'\nimport { httpDownload } from './http-request'\nimport { isDir, readJson, safeDelete, safeMkdir } from './fs'\nimport { isObjectObject } from './objects'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nexport interface DlxBinaryOptions {\n /**\n * URL to download the binary from.\n */\n url: string\n\n /**\n * Optional name for the cached binary (defaults to URL hash).\n */\n name?: string | undefined\n\n /**\n * Expected checksum (sha256) for verification.\n */\n checksum?: string | undefined\n\n /**\n * Cache TTL in milliseconds (default: 7 days).\n */\n cacheTtl?: number | undefined\n\n /**\n * Force re-download even if cached.\n * Aligns with npm/npx --force flag.\n */\n force?: boolean | undefined\n\n /**\n * Skip confirmation prompts (auto-approve).\n * Aligns with npx --yes/-y flag.\n */\n yes?: boolean | undefined\n\n /**\n * Suppress output (quiet mode).\n * Aligns with npx --quiet/-q and pnpm --silent/-s flags.\n */\n quiet?: boolean | undefined\n\n /**\n * Additional spawn options.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxBinaryResult {\n /** Path to the cached binary. */\n binaryPath: string\n /** Whether the binary was newly downloaded. */\n downloaded: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Metadata structure for cached binaries (.dlx-metadata.json).\n * Unified schema shared across TypeScript (dlxBinary) and C++ (socket_macho_decompress).\n *\n * Core Fields (present in all implementations):\n * - version: Schema version (currently \"1.0.0\")\n * - cache_key: First 16 chars of SHA-512 hash (matches directory name)\n * - timestamp: Unix timestamp in milliseconds\n * - checksum: Full hash of cached binary (SHA-512 for C++, SHA-256 for TypeScript)\n * - checksum_algorithm: \"sha512\" or \"sha256\"\n * - platform: \"darwin\" | \"linux\" | \"win32\"\n * - arch: \"x64\" | \"arm64\"\n * - size: Size of cached binary in bytes\n * - source: Origin information\n * - type: \"download\" (from URL) or \"decompression\" (from embedded binary)\n * - url: Download URL (if type is \"download\")\n * - path: Source binary path (if type is \"decompression\")\n *\n * Extra Fields (implementation-specific):\n * - For C++ decompression:\n * - compressed_size: Size of compressed data in bytes\n * - compression_algorithm: Brotli level (numeric)\n * - compression_ratio: original_size / compressed_size\n *\n * Example (TypeScript download):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"a1b2c3d4e5f67890\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha256-abc123...\",\n * \"checksum_algorithm\": \"sha256\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 15000000,\n * \"source\": {\n * \"type\": \"download\",\n * \"url\": \"https://example.com/binary\"\n * }\n * }\n * ```\n *\n * Example (C++ decompression):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"0123456789abcdef\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha512-def456...\",\n * \"checksum_algorithm\": \"sha512\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 13000000,\n * \"source\": {\n * \"type\": \"decompression\",\n * \"path\": \"/usr/local/bin/socket\"\n * },\n * \"extra\": {\n * \"compressed_size\": 1700000,\n * \"compression_algorithm\": 3,\n * \"compression_ratio\": 7.647\n * }\n * }\n * ```\n *\n * @internal This interface documents the metadata file format.\n */\nexport interface DlxMetadata {\n version: string\n cache_key: string\n timestamp: number\n checksum: string\n checksum_algorithm: string\n platform: string\n arch: string\n size: number\n source?: {\n type: 'download' | 'decompression'\n url?: string\n path?: string\n }\n extra?: Record<string, unknown>\n}\n\n/**\n * Get metadata file path for a cached binary.\n */\nfunction getMetadataPath(cacheEntryPath: string): string {\n return path.join(cacheEntryPath, '.dlx-metadata.json')\n}\n\n/**\n * Check if a cached binary is still valid.\n */\nasync function isCacheValid(\n cacheEntryPath: string,\n cacheTtl: number,\n): Promise<boolean> {\n const fs = getFs()\n try {\n const metaPath = getMetadataPath(cacheEntryPath)\n if (!fs.existsSync(metaPath)) {\n return false\n }\n\n const metadata = await readJson(metaPath, { throws: false })\n if (!isObjectObject(metadata)) {\n return false\n }\n const now = Date.now()\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, cache is invalid\n if (typeof timestamp !== 'number' || timestamp <= 0) {\n return false\n }\n const age = now - timestamp\n\n return age < cacheTtl\n } catch {\n return false\n }\n}\n\n/**\n * Download a file from a URL with integrity checking and concurrent download protection.\n * Uses processLock to prevent multiple processes from downloading the same binary simultaneously.\n * Internal helper function for downloading binary files.\n */\nasync function downloadBinaryFile(\n url: string,\n destPath: string,\n checksum?: string | undefined,\n): Promise<string> {\n // Use process lock to prevent concurrent downloads.\n // Lock is placed in the cache entry directory as 'concurrency.lock'.\n const cacheEntryDir = path.dirname(destPath)\n const lockPath = path.join(cacheEntryDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n const fs = getFs()\n // Check if file was downloaded while waiting for lock.\n if (fs.existsSync(destPath)) {\n const stats = await fs.promises.stat(destPath)\n if (stats.size > 0) {\n // File exists, compute and return checksum.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n return hasher.digest('hex')\n }\n }\n\n // Download the file.\n try {\n await httpDownload(url, destPath)\n } catch (e) {\n throw new Error(\n `Failed to download binary from ${url}\\n` +\n `Destination: ${destPath}\\n` +\n 'Check your internet connection or verify the URL is accessible.',\n { cause: e },\n )\n }\n\n // Compute checksum of downloaded file.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n const actualChecksum = hasher.digest('hex')\n\n // Verify checksum if provided.\n if (checksum && actualChecksum !== checksum) {\n // Clean up invalid file.\n await safeDelete(destPath)\n throw new Error(\n `Checksum mismatch: expected ${checksum}, got ${actualChecksum}`,\n )\n }\n\n // Make executable on POSIX systems.\n if (!WIN32) {\n await fs.promises.chmod(destPath, 0o755)\n }\n\n return actualChecksum\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Write metadata for a cached binary.\n * Uses unified schema shared with C++ decompressor and CLI dlxBinary.\n * Schema documentation: See DlxMetadata interface in this file (exported).\n * Core fields: version, cache_key, timestamp, checksum, checksum_algorithm, platform, arch, size, source\n * Note: This implementation uses SHA-256 checksums instead of SHA-512.\n */\nasync function writeMetadata(\n cacheEntryPath: string,\n cacheKey: string,\n url: string,\n checksum: string,\n size: number,\n): Promise<void> {\n const metaPath = getMetadataPath(cacheEntryPath)\n const metadata = {\n version: '1.0.0',\n cache_key: cacheKey,\n timestamp: Date.now(),\n checksum,\n checksum_algorithm: 'sha256',\n platform: os.platform(),\n arch: os.arch(),\n size,\n source: {\n type: 'download',\n url,\n },\n }\n const fs = getFs()\n await fs.promises.writeFile(metaPath, JSON.stringify(metadata, null, 2))\n}\n\n/**\n * Clean expired entries from the DLX cache.\n */\nexport async function cleanDlxCache(\n maxAge: number = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n): Promise<number> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return 0\n }\n\n let cleaned = 0\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n const metaPath = getMetadataPath(entryPath)\n\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, treat as expired (age = infinity)\n const age =\n typeof timestamp === 'number' && timestamp > 0\n ? now - timestamp\n : Number.POSITIVE_INFINITY\n\n if (age > maxAge) {\n // Remove entire cache entry directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath, { force: true, recursive: true })\n cleaned += 1\n }\n } catch {\n // If we can't read metadata, check if directory is empty or corrupted.\n try {\n // eslint-disable-next-line no-await-in-loop\n const contents = await fs.promises.readdir(entryPath)\n if (!contents.length) {\n // Remove empty directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath)\n cleaned += 1\n }\n } catch {}\n }\n }\n\n return cleaned\n}\n\n/**\n * Download and execute a binary from a URL with caching.\n */\nexport async function dlxBinary(\n args: readonly string[] | string[],\n options?: DlxBinaryOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxBinaryResult> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force: userForce = false,\n name,\n spawnOptions,\n url,\n yes,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Map --yes flag to force behavior (auto-approve/skip prompts)\n const force = yes === true ? true : userForce\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n let computedChecksum = checksum\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid, read the checksum from metadata.\n try {\n const metaPath = getMetadataPath(cacheEntryDir)\n const metadata = await readJson(metaPath, { throws: false })\n if (\n metadata &&\n typeof metadata === 'object' &&\n !Array.isArray(metadata) &&\n typeof (metadata as Record<string, unknown>)['checksum'] === 'string'\n ) {\n computedChecksum = (metadata as Record<string, unknown>)[\n 'checksum'\n ] as string\n } else {\n // If metadata is invalid, re-download.\n downloaded = true\n }\n } catch {\n // If we can't read metadata, re-download.\n downloaded = true\n }\n } else {\n downloaded = true\n }\n\n if (downloaded) {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir)\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n }\n\n // Execute the binary.\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension (e.g.,\n // C:\\cache\\test.cmd becomes just \"test\"). Windows cmd.exe then searches for \"test\"\n // in directories listed in PATH, trying each extension from PATHEXT environment\n // variable (.COM, .EXE, .BAT, .CMD, etc.) until it finds a match.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH\n // (unlike system package managers like npm/pnpm/yarn which are already in PATH),\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n //\n // This approach is consistent with how other tools handle Windows command execution:\n // - npm's promise-spawn: uses which.sync() to find commands in PATH\n // - cross-spawn: spawns cmd.exe with escaped arguments\n // - Node.js spawn with shell: true: delegates to cmd.exe which uses PATH\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n const spawnPromise = spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n\n return {\n binaryPath,\n downloaded,\n spawnPromise,\n }\n}\n\n/**\n * Download a binary from a URL with caching (without execution).\n * Similar to downloadPackage from dlx-package.\n *\n * @returns Object containing the path to the cached binary and whether it was downloaded\n */\nexport async function downloadBinary(\n options: Omit<DlxBinaryOptions, 'spawnOptions'>,\n): Promise<{ binaryPath: string; downloaded: boolean }> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid.\n downloaded = false\n } else {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir)\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n const computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n downloaded = true\n }\n\n return {\n binaryPath,\n downloaded,\n }\n}\n\n/**\n * Execute a cached binary without re-downloading.\n * Similar to executePackage from dlx-package.\n * Binary must have been previously downloaded via downloadBinary or dlxBinary.\n *\n * @param binaryPath Path to the cached binary (from downloadBinary result)\n * @param args Arguments to pass to the binary\n * @param spawnOptions Spawn options for execution\n * @param spawnExtra Extra spawn configuration\n * @returns The spawn promise for the running process\n */\nexport function executeBinary(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension. Windows cmd.exe\n // then searches for the binary in directories listed in PATH.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH,\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n const cacheEntryDir = path.dirname(binaryPath)\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n}\n\n/**\n * Get the DLX binary cache directory path.\n * Returns normalized path for cross-platform compatibility.\n * Uses same directory as dlx-package for unified DLX storage.\n */\nexport function getDlxCachePath(): string {\n return getSocketDlxDir()\n}\n\n/**\n * Get information about cached binaries.\n */\nexport async function listDlxCache(): Promise<\n Array<{\n age: number\n arch: string\n checksum: string\n name: string\n platform: string\n size: number\n url: string\n }>\n> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return []\n }\n\n const results = []\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n const metaPath = getMetadataPath(entryPath)\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n\n const metaObj = metadata as Record<string, unknown>\n\n // Get URL from unified schema (source.url) or legacy schema (url).\n // Allow empty URL for backward compatibility with partial metadata.\n const source = metaObj['source'] as Record<string, unknown> | undefined\n const url =\n (source?.['url'] as string) || (metaObj['url'] as string) || ''\n\n // Find the binary file in the directory.\n // eslint-disable-next-line no-await-in-loop\n const files = await fs.promises.readdir(entryPath)\n const binaryFile = files.find(f => !f.startsWith('.'))\n\n if (binaryFile) {\n const binaryPath = path.join(entryPath, binaryFile)\n // eslint-disable-next-line no-await-in-loop\n const binaryStats = await fs.promises.stat(binaryPath)\n\n results.push({\n age: now - ((metaObj['timestamp'] as number) || 0),\n arch: (metaObj['arch'] as string) || 'unknown',\n checksum: (metaObj['checksum'] as string) || '',\n name: binaryFile,\n platform: (metaObj['platform'] as string) || 'unknown',\n size: binaryStats.size,\n url,\n })\n }\n } catch {}\n }\n\n return results\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA2B;AAE3B,gBAAe;AAEf,kBAAiB;AAEjB,sBAAsB;AAEtB,iBAAiC;AACjC,0BAA6B;AAC7B,gBAAuD;AACvD,qBAA+B;AAC/B,IAAAA,eAA8B;AAC9B,mBAAgC;AAChC,0BAA4B;AAE5B,mBAAsB;AAEtB,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AA+IA,SAAS,gBAAgB,gBAAgC;AACvD,SAAO,YAAAC,QAAK,KAAK,gBAAgB,oBAAoB;AACvD;AAKA,eAAe,aACb,gBACA,UACkB;AAClB,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,WAAW,gBAAgB,cAAc;AAC/C,QAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,aAAO;AAAA,IACT;AAEA,UAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,QAAI,KAAC,+BAAe,QAAQ,GAAG;AAC7B,aAAO;AAAA,IACT;AACA,UAAM,MAAM,KAAK,IAAI;AACrB,UAAM,YAAa,SAAqC,WAAW;AAEnE,QAAI,OAAO,cAAc,YAAY,aAAa,GAAG;AACnD,aAAO;AAAA,IACT;AACA,UAAM,MAAM,MAAM;AAElB,WAAO,MAAM;AAAA,EACf,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAOA,eAAe,mBACb,KACA,UACA,UACiB;AAGjB,QAAM,gBAAgB,YAAAA,QAAK,QAAQ,QAAQ;AAC3C,QAAM,WAAW,YAAAA,QAAK,KAAK,eAAe,kBAAkB;AAE5D,SAAO,MAAM,gCAAY;AAAA,IACvB;AAAA,IACA,YAAY;AACV,YAAM,KAAK,sBAAM;AAEjB,UAAI,GAAG,WAAW,QAAQ,GAAG;AAC3B,cAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,QAAQ;AAC7C,YAAI,MAAM,OAAO,GAAG;AAElB,gBAAMC,cAAa,MAAM,GAAG,SAAS,SAAS,QAAQ;AACtD,gBAAMC,cAAS,0BAAW,QAAQ;AAClC,UAAAA,QAAO,OAAOD,WAAU;AACxB,iBAAOC,QAAO,OAAO,KAAK;AAAA,QAC5B;AAAA,MACF;AAGA,UAAI;AACF,kBAAM,kCAAa,KAAK,QAAQ;AAAA,MAClC,SAAS,GAAG;AACV,cAAM,IAAI;AAAA,UACR,kCAAkC,GAAG;AAAA,eACnB,QAAQ;AAAA;AAAA,UAE1B,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AAGA,YAAM,aAAa,MAAM,GAAG,SAAS,SAAS,QAAQ;AACtD,YAAM,aAAS,0BAAW,QAAQ;AAClC,aAAO,OAAO,UAAU;AACxB,YAAM,iBAAiB,OAAO,OAAO,KAAK;AAG1C,UAAI,YAAY,mBAAmB,UAAU;AAE3C,kBAAM,sBAAW,QAAQ;AACzB,cAAM,IAAI;AAAA,UACR,+BAA+B,QAAQ,SAAS,cAAc;AAAA,QAChE;AAAA,MACF;AAGA,UAAI,CAAC,uBAAO;AACV,cAAM,GAAG,SAAS,MAAM,UAAU,GAAK;AAAA,MACzC;AAEA,aAAO;AAAA,IACT;AAAA,IACA;AAAA;AAAA,MAEE,SAAS;AAAA,MACT,iBAAiB;AAAA,IACnB;AAAA,EACF;AACF;AASA,eAAe,cACb,gBACA,UACA,KACA,UACA,MACe;AACf,QAAM,WAAW,gBAAgB,cAAc;AAC/C,QAAM,WAAW;AAAA,IACf,SAAS;AAAA,IACT,WAAW;AAAA,IACX,WAAW,KAAK,IAAI;AAAA,IACpB;AAAA,IACA,oBAAoB;AAAA,IACpB,UAAU,UAAAC,QAAG,SAAS;AAAA,IACtB,MAAM,UAAAA,QAAG,KAAK;AAAA,IACd;AAAA,IACA,QAAQ;AAAA,MACN,MAAM;AAAA,MACN;AAAA,IACF;AAAA,EACF;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,GAAG,SAAS,UAAU,UAAU,KAAK,UAAU,UAAU,MAAM,CAAC,CAAC;AACzE;AAKA,eAAsB,cACpB;AAAA;AAAA,EAAiC,QAAQ,iBAAiB,EAAE;AAAA,GAC3C;AACjB,QAAM,WAAW,gBAAgB;AACjC,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,WAAO;AAAA,EACT;AAEA,MAAI,UAAU;AACd,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,QAAQ;AAElD,aAAW,SAAS,SAAS;AAC3B,UAAM,YAAY,YAAAH,QAAK,KAAK,UAAU,KAAK;AAC3C,UAAM,WAAW,gBAAgB,SAAS;AAE1C,QAAI;AAEF,UAAI,CAAE,UAAM,iBAAM,SAAS,GAAI;AAC7B;AAAA,MACF;AAGA,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,CAAC,YACD,OAAO,aAAa,YACpB,MAAM,QAAQ,QAAQ,GACtB;AACA;AAAA,MACF;AACA,YAAM,YAAa,SAAqC,WAAW;AAEnE,YAAM,MACJ,OAAO,cAAc,YAAY,YAAY,IACzC,MAAM,YACN,OAAO;AAEb,UAAI,MAAM,QAAQ;AAGhB,kBAAM,sBAAW,WAAW,EAAE,OAAO,MAAM,WAAW,KAAK,CAAC;AAC5D,mBAAW;AAAA,MACb;AAAA,IACF,QAAQ;AAEN,UAAI;AAEF,cAAM,WAAW,MAAM,GAAG,SAAS,QAAQ,SAAS;AACpD,YAAI,CAAC,SAAS,QAAQ;AAGpB,oBAAM,sBAAW,SAAS;AAC1B,qBAAW;AAAA,QACb;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,UACpB,MACA,SACA,YAC0B;AAC1B,QAAM;AAAA,IACJ;AAAA;AAAA,MAA2B,QAAQ,iBAAiB,EAAE;AAAA;AAAA,IACtD;AAAA,IACA,OAAO,YAAY;AAAA,IACnB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAGlC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AAGpC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,QAAQ,UAAU,QAAQ,QAAQ,IAAI,UAAAG,QAAG,KAAK,CAAC;AAElE,QAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,QAAM,eAAW,6BAAiB,IAAI;AACtC,QAAM,gBAAgB,YAAAH,QAAK,KAAK,UAAU,QAAQ;AAClD,QAAM,iBAAa,4BAAc,YAAAA,QAAK,KAAK,eAAe,UAAU,CAAC;AACrE,QAAM,KAAK,sBAAM;AAEjB,MAAI,aAAa;AACjB,MAAI,mBAAmB;AAGvB,MACE,CAAC,SACD,GAAG,WAAW,aAAa,KAC1B,MAAM,aAAa,eAAe,QAAQ,GAC3C;AAEA,QAAI;AACF,YAAM,WAAW,gBAAgB,aAAa;AAC9C,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,YACA,OAAO,aAAa,YACpB,CAAC,MAAM,QAAQ,QAAQ,KACvB,OAAQ,SAAqC,UAAU,MAAM,UAC7D;AACA,2BAAoB,SAClB,UACF;AAAA,MACF,OAAO;AAEL,qBAAa;AAAA,MACf;AAAA,IACF,QAAQ;AAEN,mBAAa;AAAA,IACf;AAAA,EACF,OAAO;AACL,iBAAa;AAAA,EACf;AAEA,MAAI,YAAY;AAEd,QAAI;AACF,gBAAM,qBAAU,aAAa;AAAA,IAC/B,SAAS,GAAG;AACV,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,sDAAsD,aAAa;AAAA;AAAA,UAEnE,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR,iEAAiE,aAAa;AAAA;AAAA,UAE9E,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,4CAA4C,aAAa;AAAA,QACzD,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AAGA,uBAAmB,MAAM,mBAAmB,KAAK,YAAY,QAAQ;AAGrE,UAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,UAAU;AAC/C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,oBAAoB;AAAA,MACpB,MAAM;AAAA,IACR;AAAA,EACF;AAMA,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AAgBjE,QAAM,oBAAoB,aACtB;AAAA,IACE,GAAG;AAAA,IACH,KAAK;AAAA,MACH,GAAG,cAAc;AAAA,MACjB,MAAM,GAAG,aAAa,GAAG,YAAAA,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAM,KAAK,EAAE;AAAA,IACrE;AAAA,IACA,OAAO;AAAA,EACT,IACA;AACJ,QAAM,mBAAe,oBAAM,YAAY,MAAM,mBAAmB,UAAU;AAE1E,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAQA,eAAsB,eACpB,SACsD;AACtD,QAAM;AAAA,IACJ;AAAA;AAAA,MAA2B,QAAQ,iBAAiB,EAAE;AAAA;AAAA,IACtD;AAAA,IACA,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EACF,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAGlC,QAAM,WAAW,gBAAgB;AACjC,QAAM,aAAa,QAAQ,UAAU,QAAQ,QAAQ,IAAI,UAAAG,QAAG,KAAK,CAAC;AAElE,QAAM,OAAO,GAAG,GAAG,IAAI,UAAU;AACjC,QAAM,eAAW,6BAAiB,IAAI;AACtC,QAAM,gBAAgB,YAAAH,QAAK,KAAK,UAAU,QAAQ;AAClD,QAAM,iBAAa,4BAAc,YAAAA,QAAK,KAAK,eAAe,UAAU,CAAC;AACrE,QAAM,KAAK,sBAAM;AAEjB,MAAI,aAAa;AAGjB,MACE,CAAC,SACD,GAAG,WAAW,aAAa,KAC1B,MAAM,aAAa,eAAe,QAAQ,GAC3C;AAEA,iBAAa;AAAA,EACf,OAAO;AAEL,QAAI;AACF,gBAAM,qBAAU,aAAa;AAAA,IAC/B,SAAS,GAAG;AACV,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,sDAAsD,aAAa;AAAA;AAAA,UAEnE,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,SAAS;AACpB,cAAM,IAAI;AAAA,UACR,iEAAiE,aAAa;AAAA;AAAA,UAE9E,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM,IAAI;AAAA,QACR,4CAA4C,aAAa;AAAA,QACzD,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AAGA,UAAM,mBAAmB,MAAM,mBAAmB,KAAK,YAAY,QAAQ;AAG3E,UAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,UAAU;AAC/C,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,MACA,oBAAoB;AAAA,MACpB,MAAM;AAAA,IACR;AACA,iBAAa;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,EACF;AACF;AAaO,SAAS,cACd,YACA,MACA,cACA,YAC0B;AAI1B,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AASjE,QAAM,gBAAgB,YAAAA,QAAK,QAAQ,UAAU;AAC7C,QAAM,oBAAoB,aACtB;AAAA,IACE,GAAG;AAAA,IACH,KAAK;AAAA,MACH,GAAG,cAAc;AAAA,MACjB,MAAM,GAAG,aAAa,GAAG,YAAAA,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAM,KAAK,EAAE;AAAA,IACrE;AAAA,IACA,OAAO;AAAA,EACT,IACA;AAEJ,aAAO,oBAAM,YAAY,MAAM,mBAAmB,UAAU;AAC9D;AAOO,SAAS,kBAA0B;AACxC,aAAO,8BAAgB;AACzB;AAKA,eAAsB,eAUpB;AACA,QAAM,WAAW,gBAAgB;AACjC,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,GAAG,WAAW,QAAQ,GAAG;AAC5B,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,UAAU,CAAC;AACjB,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,UAAU,MAAM,GAAG,SAAS,QAAQ,QAAQ;AAElD,aAAW,SAAS,SAAS;AAC3B,UAAM,YAAY,YAAAA,QAAK,KAAK,UAAU,KAAK;AAC3C,QAAI;AAEF,UAAI,CAAE,UAAM,iBAAM,SAAS,GAAI;AAC7B;AAAA,MACF;AAEA,YAAM,WAAW,gBAAgB,SAAS;AAE1C,YAAM,WAAW,UAAM,oBAAS,UAAU,EAAE,QAAQ,MAAM,CAAC;AAC3D,UACE,CAAC,YACD,OAAO,aAAa,YACpB,MAAM,QAAQ,QAAQ,GACtB;AACA;AAAA,MACF;AAEA,YAAM,UAAU;AAIhB,YAAM,SAAS,QAAQ,QAAQ;AAC/B,YAAM,MACH,SAAS,KAAK,KAAiB,QAAQ,KAAK,KAAgB;AAI/D,YAAM,QAAQ,MAAM,GAAG,SAAS,QAAQ,SAAS;AACjD,YAAM,aAAa,MAAM,KAAK,OAAK,CAAC,EAAE,WAAW,GAAG,CAAC;AAErD,UAAI,YAAY;AACd,cAAM,aAAa,YAAAA,QAAK,KAAK,WAAW,UAAU;AAElD,cAAM,cAAc,MAAM,GAAG,SAAS,KAAK,UAAU;AAErD,gBAAQ,KAAK;AAAA,UACX,KAAK,OAAQ,QAAQ,WAAW,KAAgB;AAAA,UAChD,MAAO,QAAQ,MAAM,KAAgB;AAAA,UACrC,UAAW,QAAQ,UAAU,KAAgB;AAAA,UAC7C,MAAM;AAAA,UACN,UAAW,QAAQ,UAAU,KAAgB;AAAA,UAC7C,MAAM,YAAY;AAAA,UAClB;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,QAAQ;AAAA,IAAC;AAAA,EACX;AAEA,SAAO;AACT;",
6
6
  "names": ["import_path", "path", "fileBuffer", "hasher", "os"]
7
7
  }
@@ -94,7 +94,7 @@ async function ensurePackageInstalled(packageName, packageSpec, force) {
94
94
  import_path.default.join(packageDir, "node_modules", packageName)
95
95
  );
96
96
  try {
97
- await (0, import_fs.safeMkdir)(packageDir, { recursive: true });
97
+ await (0, import_fs.safeMkdir)(packageDir);
98
98
  } catch (e) {
99
99
  const code = e.code;
100
100
  if (code === "EACCES" || code === "EPERM") {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/dlx-package.ts"],
4
- "sourcesContent": ["/**\n * @fileoverview DLX package execution - Install and execute npm packages.\n *\n * This module provides functionality to install and execute npm packages\n * in the ~/.socket/_dlx directory, similar to npx but with Socket's own cache.\n *\n * Uses content-addressed storage like npm's _npx:\n * - Hash is generated from package spec (name@version)\n * - Each unique spec gets its own directory: ~/.socket/_dlx/<hash>/\n * - Allows caching multiple versions of the same package\n *\n * Concurrency protection:\n * - Uses process-lock to prevent concurrent installation corruption\n * - Lock file created at ~/.socket/_dlx/<hash>/concurrency.lock\n * - Uses npm npx's concurrency.lock naming convention (5s stale, 2s touching)\n * - Prevents multiple processes from corrupting the same package installation\n *\n * Version range handling:\n * - Exact versions (1.0.0) use cache if available\n * - Range versions (^1.0.0, ~1.0.0) auto-force to get latest within range\n * - User can override with explicit force: false\n *\n * Key difference from dlx-binary.ts:\n * - dlx-binary.ts: Downloads standalone binaries from URLs\n * - dlx-package.ts: Installs npm packages from registries\n *\n * Implementation:\n * - Uses pacote for package installation (no npm CLI required)\n * - Split into downloadPackage() and executePackage() for flexibility\n * - dlxPackage() combines both for convenience\n */\n\nimport path from 'path'\n\nimport { WIN32 } from './constants/platform'\nimport { getPacoteCachePath } from './constants/packages'\nimport { generateCacheKey } from './dlx'\nimport { readJsonSync, safeMkdir } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('./external/npm-package-arg')\n }\n return _npmPackageArg as typeof import('npm-package-arg')\n}\n\nlet _pacote: typeof import('pacote') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPacote() {\n if (_pacote === undefined) {\n _pacote = /*@__PURE__*/ require('./external/pacote')\n }\n return _pacote as typeof import('pacote')\n}\n\n/**\n * Regex to check if a version string contains range operators.\n * Matches any version with range operators: ~, ^, >, <, =, x, X, *, spaces, or ||.\n */\nconst rangeOperatorsRegExp = /[~^><=xX* ]|\\|\\|/\n\nexport interface DownloadPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n}\n\nexport interface DlxPackageOptions {\n /**\n * Package to install (e.g., '@cyclonedx/cdxgen@10.0.0').\n * Aligns with npx --package flag.\n */\n package: string\n\n /**\n * Binary name to execute (optional - auto-detected in most cases).\n *\n * Auto-detection logic:\n * 1. If package has only one binary, uses it automatically\n * 2. Tries user-provided binaryName\n * 3. Tries last segment of package name (e.g., 'cli' from '@socketsecurity/cli')\n * 4. Falls back to first binary\n *\n * Only needed when package has multiple binaries and auto-detection fails.\n *\n * @example\n * // Auto-detected (single binary)\n * { package: '@socketsecurity/cli' } // Finds 'socket' binary automatically\n *\n * // Explicit (multiple binaries)\n * { package: 'some-tool', binaryName: 'specific-tool' }\n */\n binaryName?: string | undefined\n\n /**\n * Force reinstallation even if package exists.\n * Aligns with npx --yes/-y flag behavior.\n */\n force?: boolean | undefined\n\n /**\n * Skip confirmation prompts (auto-approve).\n * Aligns with npx --yes/-y flag.\n */\n yes?: boolean | undefined\n\n /**\n * Suppress output (quiet mode).\n * Aligns with npx --quiet/-q and pnpm --silent/-s flags.\n */\n quiet?: boolean | undefined\n\n /**\n * Additional spawn options for the execution.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary that was executed. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Parse package spec into name and version using npm-package-arg.\n * Examples:\n * - 'lodash@4.17.21' \u2192 { name: 'lodash', version: '4.17.21' }\n * - '@scope/pkg@1.0.0' \u2192 { name: '@scope/pkg', version: '1.0.0' }\n * - 'lodash' \u2192 { name: 'lodash', version: undefined }\n */\nfunction parsePackageSpec(spec: string): {\n name: string\n version: string | undefined\n} {\n try {\n const npa = getNpmPackageArg()\n const parsed = npa(spec)\n\n // Extract version from different types of specs.\n // For registry specs, use fetchSpec (the version/range).\n // For git/file/etc, version will be undefined.\n const version =\n parsed.type === 'tag'\n ? parsed.fetchSpec\n : parsed.type === 'version' || parsed.type === 'range'\n ? parsed.fetchSpec\n : undefined\n\n return {\n name: parsed.name || spec,\n version,\n }\n } catch {\n // Fallback to simple parsing if npm-package-arg fails.\n const atIndex = spec.lastIndexOf('@')\n if (atIndex === -1 || spec.startsWith('@')) {\n // No version or scoped package without version.\n return { name: spec, version: undefined }\n }\n return {\n name: spec.slice(0, atIndex),\n version: spec.slice(atIndex + 1),\n }\n }\n}\n\n/**\n * Install package to ~/.socket/_dlx/<hash>/ if not already installed.\n * Uses pacote for installation (no npm CLI required).\n * Protected by process lock to prevent concurrent installation corruption.\n */\nasync function ensurePackageInstalled(\n packageName: string,\n packageSpec: string,\n force: boolean,\n): Promise<{ installed: boolean; packageDir: string }> {\n const cacheKey = generateCacheKey(packageSpec)\n const packageDir = normalizePath(path.join(getSocketDlxDir(), cacheKey))\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n\n // Ensure package directory exists before creating lock.\n // The lock directory will be created inside this directory.\n try {\n await safeMkdir(packageDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating package directory: ${packageDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create package directory on read-only filesystem: ${packageDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(`Failed to create package directory: ${packageDir}`, {\n cause: e,\n })\n }\n\n // Use process lock to prevent concurrent installations.\n // Uses npm npx's concurrency.lock naming convention.\n const lockPath = path.join(packageDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n const fs = getFs()\n // Double-check if already installed (unless force).\n // Another process may have installed while waiting for lock.\n if (!force && fs.existsSync(installedDir)) {\n // Verify package.json exists.\n const pkgJsonPath = path.join(installedDir, 'package.json')\n if (fs.existsSync(pkgJsonPath)) {\n return { installed: false, packageDir }\n }\n }\n\n // Use pacote to extract the package.\n // Pacote leverages npm cache when available but doesn't require npm CLI.\n const pacoteCachePath = getPacoteCachePath()\n try {\n await getPacote().extract(packageSpec, installedDir, {\n // Use consistent pacote cache path (respects npm cache locations when available).\n cache: pacoteCachePath || path.join(packageDir, '.cache'),\n })\n } catch (e) {\n const code = (e as any).code\n if (code === 'E404' || code === 'ETARGET') {\n throw new Error(\n `Package not found: ${packageSpec}\\n` +\n 'Verify the package exists on npm registry and check the version.\\n' +\n `Visit https://www.npmjs.com/package/${packageName} to see available versions.`,\n { cause: e },\n )\n }\n if (\n code === 'ENOTFOUND' ||\n code === 'ETIMEDOUT' ||\n code === 'EAI_AGAIN'\n ) {\n throw new Error(\n `Network error installing ${packageSpec}\\n` +\n 'Check your internet connection and try again.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to install package: ${packageSpec}\\n` +\n `Destination: ${installedDir}\\n` +\n 'Check npm registry connectivity or package name.',\n { cause: e },\n )\n }\n\n return { installed: true, packageDir }\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Resolve binary path with cross-platform wrapper support.\n * On Windows, checks for .cmd, .bat, .ps1, .exe wrappers in order.\n * On Unix, uses path directly.\n *\n * Aligns with npm/npx binary resolution strategy.\n */\nfunction resolveBinaryPath(basePath: string): string {\n const fs = getFs()\n\n if (!WIN32) {\n // Unix: use path directly\n return basePath\n }\n\n // Windows: check for wrappers in priority order\n // Order matches npm bin-links creation: .cmd, .ps1, .exe, then bare\n const extensions = ['.cmd', '.bat', '.ps1', '.exe', '']\n\n for (const ext of extensions) {\n const testPath = basePath + ext\n if (fs.existsSync(testPath)) {\n return testPath\n }\n }\n\n // Fallback to original path if no wrapper found\n return basePath\n}\n\n/**\n * Find the binary path for an installed package.\n * Intelligently handles packages with single or multiple binaries.\n * Resolves platform-specific wrappers (.cmd, .ps1, etc.) on Windows.\n */\nfunction findBinaryPath(\n packageDir: string,\n packageName: string,\n binaryName?: string,\n): string {\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n const pkgJsonPath = path.join(installedDir, 'package.json')\n\n // Read package.json to find bin entry.\n const pkgJson = readJsonSync(pkgJsonPath) as Record<string, unknown>\n const bin = pkgJson['bin']\n\n let binPath: string | undefined\n\n if (typeof bin === 'string') {\n // Single binary - use it directly.\n binPath = bin\n } else if (typeof bin === 'object' && bin !== null) {\n const binObj = bin as Record<string, string>\n const binKeys = Object.keys(binObj)\n\n // If only one binary, use it regardless of name.\n if (binKeys.length === 1) {\n binPath = binObj[binKeys[0]!]\n } else {\n // Multiple binaries - try to find the right one:\n // 1. User-provided binaryName\n // 2. Last segment of package name (e.g., 'cli' from '@socketsecurity/cli')\n // 3. Full package name without scope (e.g., 'cli' from '@socketsecurity/cli')\n // 4. First binary as fallback\n const lastSegment = packageName.split('/').pop()\n const candidates = [\n binaryName,\n lastSegment,\n packageName.replace(/^@[^/]+\\//, ''),\n ].filter(Boolean)\n\n for (const candidate of candidates) {\n if (candidate && binObj[candidate]) {\n binPath = binObj[candidate]\n break\n }\n }\n\n // Fallback to first binary if nothing matched.\n if (!binPath && binKeys.length > 0) {\n binPath = binObj[binKeys[0]!]\n }\n }\n }\n\n if (!binPath) {\n throw new Error(`No binary found for package \"${packageName}\"`)\n }\n\n const rawPath = normalizePath(path.join(installedDir, binPath))\n\n // Resolve platform-specific wrapper (Windows .cmd/.ps1/etc.)\n return resolveBinaryPath(rawPath)\n}\n\n/**\n * Execute a package via DLX - install if needed and run its binary.\n *\n * This is the Socket equivalent of npx/pnpm dlx/yarn dlx, but using\n * our own cache directory (~/.socket/_dlx) and installation logic.\n *\n * Auto-forces reinstall for version ranges to get latest within range.\n *\n * @example\n * ```typescript\n * // Download and execute cdxgen\n * const result = await dlxPackage(\n * ['--version'],\n * { package: '@cyclonedx/cdxgen@10.0.0' }\n * )\n * await result.spawnPromise\n * ```\n */\nexport async function dlxPackage(\n args: readonly string[] | string[],\n options?: DlxPackageOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxPackageResult> {\n // Download the package.\n const downloadResult = await downloadPackage(options!)\n\n // Execute the binary.\n const spawnPromise = executePackage(\n downloadResult.binaryPath,\n args,\n options?.spawnOptions,\n spawnExtra,\n )\n\n return {\n ...downloadResult,\n spawnPromise,\n }\n}\n\n/**\n * Download and install a package without executing it.\n * This is useful for self-update or when you need the package files\n * but don't want to run the binary immediately.\n *\n * @example\n * ```typescript\n * // Install @socketsecurity/cli without running it\n * const result = await downloadPackage({\n * package: '@socketsecurity/cli@1.2.0',\n * force: true\n * })\n * console.log('Installed to:', result.packageDir)\n * console.log('Binary at:', result.binaryPath)\n * ```\n */\nexport async function downloadPackage(\n options: DlxPackageOptions,\n): Promise<DownloadPackageResult> {\n const fs = getFs()\n const {\n binaryName,\n force: userForce,\n package: packageSpec,\n yes,\n } = {\n __proto__: null,\n ...options,\n } as DlxPackageOptions\n\n // Parse package spec.\n const { name: packageName, version: packageVersion } =\n parsePackageSpec(packageSpec)\n\n // Determine force behavior:\n // 1. Explicit force takes precedence\n // 2. --yes flag implies force (auto-approve/skip prompts)\n // 3. Version ranges auto-force to get latest\n const isVersionRange =\n packageVersion !== undefined && rangeOperatorsRegExp.test(packageVersion)\n const force =\n userForce !== undefined ? userForce : yes === true ? true : isVersionRange\n\n // Build full package spec for installation.\n const fullPackageSpec = packageVersion\n ? `${packageName}@${packageVersion}`\n : packageName\n\n // Ensure package is installed.\n const { installed, packageDir } = await ensurePackageInstalled(\n packageName,\n fullPackageSpec,\n force,\n )\n\n // Find binary path.\n const binaryPath = findBinaryPath(packageDir, packageName, binaryName)\n\n // Make binary executable on Unix systems.\n if (!WIN32 && fs.existsSync(binaryPath)) {\n try {\n fs.chmodSync(binaryPath, 0o755)\n } catch {\n // Ignore chmod errors.\n }\n }\n\n return {\n binaryPath,\n installed,\n packageDir,\n }\n}\n\n/**\n * Execute a package's binary with cross-platform shell handling.\n * The package must already be installed (use downloadPackage first).\n *\n * On Windows, script files (.bat, .cmd, .ps1) require shell: true.\n * Matches npm/npx execution behavior.\n *\n * @example\n * ```typescript\n * // Execute an already-installed package\n * const downloaded = await downloadPackage({ package: 'cowsay@1.5.0' })\n * const result = await executePackage(\n * downloaded.binaryPath,\n * ['Hello World'],\n * { stdio: 'inherit' }\n * )\n * ```\n */\nexport function executePackage(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true\n // because they are not executable on their own and must be run through cmd.exe.\n // .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n const finalOptions = needsShell\n ? {\n ...spawnOptions,\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalOptions, spawnExtra)\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgCA,kBAAiB;AAEjB,sBAAsB;AACtB,sBAAmC;AACnC,iBAAiC;AACjC,gBAAwC;AACxC,IAAAA,eAA8B;AAC9B,mBAAgC;AAChC,0BAA4B;AAE5B,mBAAsB;AAEtB,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,mBAAmB;AAC1B,MAAI,mBAAmB,QAAW;AAChC,qBAA+B,QAAQ,4BAA4B;AAAA,EACrE;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,YAAY;AACnB,MAAI,YAAY,QAAW;AACzB,cAAwB,QAAQ,mBAAmB;AAAA,EACrD;AACA,SAAO;AACT;AAMA,MAAM,uBAAuB;AAgF7B,SAAS,iBAAiB,MAGxB;AACA,MAAI;AACF,UAAM,MAAM,iCAAiB;AAC7B,UAAM,SAAS,IAAI,IAAI;AAKvB,UAAM,UACJ,OAAO,SAAS,QACZ,OAAO,YACP,OAAO,SAAS,aAAa,OAAO,SAAS,UAC3C,OAAO,YACP;AAER,WAAO;AAAA,MACL,MAAM,OAAO,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF,QAAQ;AAEN,UAAM,UAAU,KAAK,YAAY,GAAG;AACpC,QAAI,YAAY,MAAM,KAAK,WAAW,GAAG,GAAG;AAE1C,aAAO,EAAE,MAAM,MAAM,SAAS,OAAU;AAAA,IAC1C;AACA,WAAO;AAAA,MACL,MAAM,KAAK,MAAM,GAAG,OAAO;AAAA,MAC3B,SAAS,KAAK,MAAM,UAAU,CAAC;AAAA,IACjC;AAAA,EACF;AACF;AAOA,eAAe,uBACb,aACA,aACA,OACqD;AACrD,QAAM,eAAW,6BAAiB,WAAW;AAC7C,QAAM,iBAAa,4BAAc,YAAAC,QAAK,SAAK,8BAAgB,GAAG,QAAQ,CAAC;AACvE,QAAM,mBAAe;AAAA,IACnB,YAAAA,QAAK,KAAK,YAAY,gBAAgB,WAAW;AAAA,EACnD;AAIA,MAAI;AACF,cAAM,qBAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AAAA,EACjD,SAAS,GAAG;AACV,UAAM,OAAQ,EAA4B;AAC1C,QAAI,SAAS,YAAY,SAAS,SAAS;AACzC,YAAM,IAAI;AAAA,QACR,iDAAiD,UAAU;AAAA;AAAA,QAE3D,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AACA,QAAI,SAAS,SAAS;AACpB,YAAM,IAAI;AAAA,QACR,4DAA4D,UAAU;AAAA;AAAA,QAEtE,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AACA,UAAM,IAAI,MAAM,uCAAuC,UAAU,IAAI;AAAA,MACnE,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AAIA,QAAM,WAAW,YAAAA,QAAK,KAAK,YAAY,kBAAkB;AAEzD,SAAO,MAAM,gCAAY;AAAA,IACvB;AAAA,IACA,YAAY;AACV,YAAM,KAAK,sBAAM;AAGjB,UAAI,CAAC,SAAS,GAAG,WAAW,YAAY,GAAG;AAEzC,cAAM,cAAc,YAAAA,QAAK,KAAK,cAAc,cAAc;AAC1D,YAAI,GAAG,WAAW,WAAW,GAAG;AAC9B,iBAAO,EAAE,WAAW,OAAO,WAAW;AAAA,QACxC;AAAA,MACF;AAIA,YAAM,sBAAkB,oCAAmB;AAC3C,UAAI;AACF,eAAM,0BAAU,GAAE,QAAQ,aAAa,cAAc;AAAA;AAAA,UAEnD,OAAO,mBAAmB,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAAA,QAC1D,CAAC;AAAA,MACH,SAAS,GAAG;AACV,cAAM,OAAQ,EAAU;AACxB,YAAI,SAAS,UAAU,SAAS,WAAW;AACzC,gBAAM,IAAI;AAAA,YACR,sBAAsB,WAAW;AAAA;AAAA,sCAEQ,WAAW;AAAA,YACpD,EAAE,OAAO,EAAE;AAAA,UACb;AAAA,QACF;AACA,YACE,SAAS,eACT,SAAS,eACT,SAAS,aACT;AACA,gBAAM,IAAI;AAAA,YACR,4BAA4B,WAAW;AAAA;AAAA,YAEvC,EAAE,OAAO,EAAE;AAAA,UACb;AAAA,QACF;AACA,cAAM,IAAI;AAAA,UACR,8BAA8B,WAAW;AAAA,eACvB,YAAY;AAAA;AAAA,UAE9B,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AAEA,aAAO,EAAE,WAAW,MAAM,WAAW;AAAA,IACvC;AAAA,IACA;AAAA;AAAA,MAEE,SAAS;AAAA,MACT,iBAAiB;AAAA,IACnB;AAAA,EACF;AACF;AASA,SAAS,kBAAkB,UAA0B;AACnD,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,uBAAO;AAEV,WAAO;AAAA,EACT;AAIA,QAAM,aAAa,CAAC,QAAQ,QAAQ,QAAQ,QAAQ,EAAE;AAEtD,aAAW,OAAO,YAAY;AAC5B,UAAM,WAAW,WAAW;AAC5B,QAAI,GAAG,WAAW,QAAQ,GAAG;AAC3B,aAAO;AAAA,IACT;AAAA,EACF;AAGA,SAAO;AACT;AAOA,SAAS,eACP,YACA,aACA,YACQ;AACR,QAAM,mBAAe;AAAA,IACnB,YAAAA,QAAK,KAAK,YAAY,gBAAgB,WAAW;AAAA,EACnD;AACA,QAAM,cAAc,YAAAA,QAAK,KAAK,cAAc,cAAc;AAG1D,QAAM,cAAU,wBAAa,WAAW;AACxC,QAAM,MAAM,QAAQ,KAAK;AAEzB,MAAI;AAEJ,MAAI,OAAO,QAAQ,UAAU;AAE3B,cAAU;AAAA,EACZ,WAAW,OAAO,QAAQ,YAAY,QAAQ,MAAM;AAClD,UAAM,SAAS;AACf,UAAM,UAAU,OAAO,KAAK,MAAM;AAGlC,QAAI,QAAQ,WAAW,GAAG;AACxB,gBAAU,OAAO,QAAQ,CAAC,CAAE;AAAA,IAC9B,OAAO;AAML,YAAM,cAAc,YAAY,MAAM,GAAG,EAAE,IAAI;AAC/C,YAAM,aAAa;AAAA,QACjB;AAAA,QACA;AAAA,QACA,YAAY,QAAQ,aAAa,EAAE;AAAA,MACrC,EAAE,OAAO,OAAO;AAEhB,iBAAW,aAAa,YAAY;AAClC,YAAI,aAAa,OAAO,SAAS,GAAG;AAClC,oBAAU,OAAO,SAAS;AAC1B;AAAA,QACF;AAAA,MACF;AAGA,UAAI,CAAC,WAAW,QAAQ,SAAS,GAAG;AAClC,kBAAU,OAAO,QAAQ,CAAC,CAAE;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,gCAAgC,WAAW,GAAG;AAAA,EAChE;AAEA,QAAM,cAAU,4BAAc,YAAAA,QAAK,KAAK,cAAc,OAAO,CAAC;AAG9D,SAAO,kBAAkB,OAAO;AAClC;AAoBA,eAAsB,WACpB,MACA,SACA,YAC2B;AAE3B,QAAM,iBAAiB,MAAM,gBAAgB,OAAQ;AAGrD,QAAM,eAAe;AAAA,IACnB,eAAe;AAAA,IACf;AAAA,IACA,SAAS;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EACF;AACF;AAkBA,eAAsB,gBACpB,SACgC;AAChC,QAAM,KAAK,sBAAM;AACjB,QAAM;AAAA,IACJ;AAAA,IACA,OAAO;AAAA,IACP,SAAS;AAAA,IACT;AAAA,EACF,IAAI;AAAA,IACF,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AAGA,QAAM,EAAE,MAAM,aAAa,SAAS,eAAe,IACjD,iBAAiB,WAAW;AAM9B,QAAM,iBACJ,mBAAmB,UAAa,qBAAqB,KAAK,cAAc;AAC1E,QAAM,QACJ,cAAc,SAAY,YAAY,QAAQ,OAAO,OAAO;AAG9D,QAAM,kBAAkB,iBACpB,GAAG,WAAW,IAAI,cAAc,KAChC;AAGJ,QAAM,EAAE,WAAW,WAAW,IAAI,MAAM;AAAA,IACtC;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,aAAa,eAAe,YAAY,aAAa,UAAU;AAGrE,MAAI,CAAC,yBAAS,GAAG,WAAW,UAAU,GAAG;AACvC,QAAI;AACF,SAAG,UAAU,YAAY,GAAK;AAAA,IAChC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAoBO,SAAS,eACd,YACA,MACA,cACA,YAC0B;AAI1B,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AAEjE,QAAM,eAAe,aACjB;AAAA,IACE,GAAG;AAAA,IACH,OAAO;AAAA,EACT,IACA;AAEJ,aAAO,oBAAM,YAAY,MAAM,cAAc,UAAU;AACzD;",
4
+ "sourcesContent": ["/**\n * @fileoverview DLX package execution - Install and execute npm packages.\n *\n * This module provides functionality to install and execute npm packages\n * in the ~/.socket/_dlx directory, similar to npx but with Socket's own cache.\n *\n * Uses content-addressed storage like npm's _npx:\n * - Hash is generated from package spec (name@version)\n * - Each unique spec gets its own directory: ~/.socket/_dlx/<hash>/\n * - Allows caching multiple versions of the same package\n *\n * Concurrency protection:\n * - Uses process-lock to prevent concurrent installation corruption\n * - Lock file created at ~/.socket/_dlx/<hash>/concurrency.lock\n * - Uses npm npx's concurrency.lock naming convention (5s stale, 2s touching)\n * - Prevents multiple processes from corrupting the same package installation\n *\n * Version range handling:\n * - Exact versions (1.0.0) use cache if available\n * - Range versions (^1.0.0, ~1.0.0) auto-force to get latest within range\n * - User can override with explicit force: false\n *\n * Key difference from dlx-binary.ts:\n * - dlx-binary.ts: Downloads standalone binaries from URLs\n * - dlx-package.ts: Installs npm packages from registries\n *\n * Implementation:\n * - Uses pacote for package installation (no npm CLI required)\n * - Split into downloadPackage() and executePackage() for flexibility\n * - dlxPackage() combines both for convenience\n */\n\nimport path from 'path'\n\nimport { WIN32 } from './constants/platform'\nimport { getPacoteCachePath } from './constants/packages'\nimport { generateCacheKey } from './dlx'\nimport { readJsonSync, safeMkdir } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('./external/npm-package-arg')\n }\n return _npmPackageArg as typeof import('npm-package-arg')\n}\n\nlet _pacote: typeof import('pacote') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPacote() {\n if (_pacote === undefined) {\n _pacote = /*@__PURE__*/ require('./external/pacote')\n }\n return _pacote as typeof import('pacote')\n}\n\n/**\n * Regex to check if a version string contains range operators.\n * Matches any version with range operators: ~, ^, >, <, =, x, X, *, spaces, or ||.\n */\nconst rangeOperatorsRegExp = /[~^><=xX* ]|\\|\\|/\n\nexport interface DownloadPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n}\n\nexport interface DlxPackageOptions {\n /**\n * Package to install (e.g., '@cyclonedx/cdxgen@10.0.0').\n * Aligns with npx --package flag.\n */\n package: string\n\n /**\n * Binary name to execute (optional - auto-detected in most cases).\n *\n * Auto-detection logic:\n * 1. If package has only one binary, uses it automatically\n * 2. Tries user-provided binaryName\n * 3. Tries last segment of package name (e.g., 'cli' from '@socketsecurity/cli')\n * 4. Falls back to first binary\n *\n * Only needed when package has multiple binaries and auto-detection fails.\n *\n * @example\n * // Auto-detected (single binary)\n * { package: '@socketsecurity/cli' } // Finds 'socket' binary automatically\n *\n * // Explicit (multiple binaries)\n * { package: 'some-tool', binaryName: 'specific-tool' }\n */\n binaryName?: string | undefined\n\n /**\n * Force reinstallation even if package exists.\n * Aligns with npx --yes/-y flag behavior.\n */\n force?: boolean | undefined\n\n /**\n * Skip confirmation prompts (auto-approve).\n * Aligns with npx --yes/-y flag.\n */\n yes?: boolean | undefined\n\n /**\n * Suppress output (quiet mode).\n * Aligns with npx --quiet/-q and pnpm --silent/-s flags.\n */\n quiet?: boolean | undefined\n\n /**\n * Additional spawn options for the execution.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary that was executed. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Parse package spec into name and version using npm-package-arg.\n * Examples:\n * - 'lodash@4.17.21' \u2192 { name: 'lodash', version: '4.17.21' }\n * - '@scope/pkg@1.0.0' \u2192 { name: '@scope/pkg', version: '1.0.0' }\n * - 'lodash' \u2192 { name: 'lodash', version: undefined }\n */\nfunction parsePackageSpec(spec: string): {\n name: string\n version: string | undefined\n} {\n try {\n const npa = getNpmPackageArg()\n const parsed = npa(spec)\n\n // Extract version from different types of specs.\n // For registry specs, use fetchSpec (the version/range).\n // For git/file/etc, version will be undefined.\n const version =\n parsed.type === 'tag'\n ? parsed.fetchSpec\n : parsed.type === 'version' || parsed.type === 'range'\n ? parsed.fetchSpec\n : undefined\n\n return {\n name: parsed.name || spec,\n version,\n }\n } catch {\n // Fallback to simple parsing if npm-package-arg fails.\n const atIndex = spec.lastIndexOf('@')\n if (atIndex === -1 || spec.startsWith('@')) {\n // No version or scoped package without version.\n return { name: spec, version: undefined }\n }\n return {\n name: spec.slice(0, atIndex),\n version: spec.slice(atIndex + 1),\n }\n }\n}\n\n/**\n * Install package to ~/.socket/_dlx/<hash>/ if not already installed.\n * Uses pacote for installation (no npm CLI required).\n * Protected by process lock to prevent concurrent installation corruption.\n */\nasync function ensurePackageInstalled(\n packageName: string,\n packageSpec: string,\n force: boolean,\n): Promise<{ installed: boolean; packageDir: string }> {\n const cacheKey = generateCacheKey(packageSpec)\n const packageDir = normalizePath(path.join(getSocketDlxDir(), cacheKey))\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n\n // Ensure package directory exists before creating lock.\n // The lock directory will be created inside this directory.\n try {\n await safeMkdir(packageDir)\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating package directory: ${packageDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create package directory on read-only filesystem: ${packageDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(`Failed to create package directory: ${packageDir}`, {\n cause: e,\n })\n }\n\n // Use process lock to prevent concurrent installations.\n // Uses npm npx's concurrency.lock naming convention.\n const lockPath = path.join(packageDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n const fs = getFs()\n // Double-check if already installed (unless force).\n // Another process may have installed while waiting for lock.\n if (!force && fs.existsSync(installedDir)) {\n // Verify package.json exists.\n const pkgJsonPath = path.join(installedDir, 'package.json')\n if (fs.existsSync(pkgJsonPath)) {\n return { installed: false, packageDir }\n }\n }\n\n // Use pacote to extract the package.\n // Pacote leverages npm cache when available but doesn't require npm CLI.\n const pacoteCachePath = getPacoteCachePath()\n try {\n await getPacote().extract(packageSpec, installedDir, {\n // Use consistent pacote cache path (respects npm cache locations when available).\n cache: pacoteCachePath || path.join(packageDir, '.cache'),\n })\n } catch (e) {\n const code = (e as any).code\n if (code === 'E404' || code === 'ETARGET') {\n throw new Error(\n `Package not found: ${packageSpec}\\n` +\n 'Verify the package exists on npm registry and check the version.\\n' +\n `Visit https://www.npmjs.com/package/${packageName} to see available versions.`,\n { cause: e },\n )\n }\n if (\n code === 'ENOTFOUND' ||\n code === 'ETIMEDOUT' ||\n code === 'EAI_AGAIN'\n ) {\n throw new Error(\n `Network error installing ${packageSpec}\\n` +\n 'Check your internet connection and try again.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to install package: ${packageSpec}\\n` +\n `Destination: ${installedDir}\\n` +\n 'Check npm registry connectivity or package name.',\n { cause: e },\n )\n }\n\n return { installed: true, packageDir }\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Resolve binary path with cross-platform wrapper support.\n * On Windows, checks for .cmd, .bat, .ps1, .exe wrappers in order.\n * On Unix, uses path directly.\n *\n * Aligns with npm/npx binary resolution strategy.\n */\nfunction resolveBinaryPath(basePath: string): string {\n const fs = getFs()\n\n if (!WIN32) {\n // Unix: use path directly\n return basePath\n }\n\n // Windows: check for wrappers in priority order\n // Order matches npm bin-links creation: .cmd, .ps1, .exe, then bare\n const extensions = ['.cmd', '.bat', '.ps1', '.exe', '']\n\n for (const ext of extensions) {\n const testPath = basePath + ext\n if (fs.existsSync(testPath)) {\n return testPath\n }\n }\n\n // Fallback to original path if no wrapper found\n return basePath\n}\n\n/**\n * Find the binary path for an installed package.\n * Intelligently handles packages with single or multiple binaries.\n * Resolves platform-specific wrappers (.cmd, .ps1, etc.) on Windows.\n */\nfunction findBinaryPath(\n packageDir: string,\n packageName: string,\n binaryName?: string,\n): string {\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n const pkgJsonPath = path.join(installedDir, 'package.json')\n\n // Read package.json to find bin entry.\n const pkgJson = readJsonSync(pkgJsonPath) as Record<string, unknown>\n const bin = pkgJson['bin']\n\n let binPath: string | undefined\n\n if (typeof bin === 'string') {\n // Single binary - use it directly.\n binPath = bin\n } else if (typeof bin === 'object' && bin !== null) {\n const binObj = bin as Record<string, string>\n const binKeys = Object.keys(binObj)\n\n // If only one binary, use it regardless of name.\n if (binKeys.length === 1) {\n binPath = binObj[binKeys[0]!]\n } else {\n // Multiple binaries - try to find the right one:\n // 1. User-provided binaryName\n // 2. Last segment of package name (e.g., 'cli' from '@socketsecurity/cli')\n // 3. Full package name without scope (e.g., 'cli' from '@socketsecurity/cli')\n // 4. First binary as fallback\n const lastSegment = packageName.split('/').pop()\n const candidates = [\n binaryName,\n lastSegment,\n packageName.replace(/^@[^/]+\\//, ''),\n ].filter(Boolean)\n\n for (const candidate of candidates) {\n if (candidate && binObj[candidate]) {\n binPath = binObj[candidate]\n break\n }\n }\n\n // Fallback to first binary if nothing matched.\n if (!binPath && binKeys.length > 0) {\n binPath = binObj[binKeys[0]!]\n }\n }\n }\n\n if (!binPath) {\n throw new Error(`No binary found for package \"${packageName}\"`)\n }\n\n const rawPath = normalizePath(path.join(installedDir, binPath))\n\n // Resolve platform-specific wrapper (Windows .cmd/.ps1/etc.)\n return resolveBinaryPath(rawPath)\n}\n\n/**\n * Execute a package via DLX - install if needed and run its binary.\n *\n * This is the Socket equivalent of npx/pnpm dlx/yarn dlx, but using\n * our own cache directory (~/.socket/_dlx) and installation logic.\n *\n * Auto-forces reinstall for version ranges to get latest within range.\n *\n * @example\n * ```typescript\n * // Download and execute cdxgen\n * const result = await dlxPackage(\n * ['--version'],\n * { package: '@cyclonedx/cdxgen@10.0.0' }\n * )\n * await result.spawnPromise\n * ```\n */\nexport async function dlxPackage(\n args: readonly string[] | string[],\n options?: DlxPackageOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxPackageResult> {\n // Download the package.\n const downloadResult = await downloadPackage(options!)\n\n // Execute the binary.\n const spawnPromise = executePackage(\n downloadResult.binaryPath,\n args,\n options?.spawnOptions,\n spawnExtra,\n )\n\n return {\n ...downloadResult,\n spawnPromise,\n }\n}\n\n/**\n * Download and install a package without executing it.\n * This is useful for self-update or when you need the package files\n * but don't want to run the binary immediately.\n *\n * @example\n * ```typescript\n * // Install @socketsecurity/cli without running it\n * const result = await downloadPackage({\n * package: '@socketsecurity/cli@1.2.0',\n * force: true\n * })\n * console.log('Installed to:', result.packageDir)\n * console.log('Binary at:', result.binaryPath)\n * ```\n */\nexport async function downloadPackage(\n options: DlxPackageOptions,\n): Promise<DownloadPackageResult> {\n const fs = getFs()\n const {\n binaryName,\n force: userForce,\n package: packageSpec,\n yes,\n } = {\n __proto__: null,\n ...options,\n } as DlxPackageOptions\n\n // Parse package spec.\n const { name: packageName, version: packageVersion } =\n parsePackageSpec(packageSpec)\n\n // Determine force behavior:\n // 1. Explicit force takes precedence\n // 2. --yes flag implies force (auto-approve/skip prompts)\n // 3. Version ranges auto-force to get latest\n const isVersionRange =\n packageVersion !== undefined && rangeOperatorsRegExp.test(packageVersion)\n const force =\n userForce !== undefined ? userForce : yes === true ? true : isVersionRange\n\n // Build full package spec for installation.\n const fullPackageSpec = packageVersion\n ? `${packageName}@${packageVersion}`\n : packageName\n\n // Ensure package is installed.\n const { installed, packageDir } = await ensurePackageInstalled(\n packageName,\n fullPackageSpec,\n force,\n )\n\n // Find binary path.\n const binaryPath = findBinaryPath(packageDir, packageName, binaryName)\n\n // Make binary executable on Unix systems.\n if (!WIN32 && fs.existsSync(binaryPath)) {\n try {\n fs.chmodSync(binaryPath, 0o755)\n } catch {\n // Ignore chmod errors.\n }\n }\n\n return {\n binaryPath,\n installed,\n packageDir,\n }\n}\n\n/**\n * Execute a package's binary with cross-platform shell handling.\n * The package must already be installed (use downloadPackage first).\n *\n * On Windows, script files (.bat, .cmd, .ps1) require shell: true.\n * Matches npm/npx execution behavior.\n *\n * @example\n * ```typescript\n * // Execute an already-installed package\n * const downloaded = await downloadPackage({ package: 'cowsay@1.5.0' })\n * const result = await executePackage(\n * downloaded.binaryPath,\n * ['Hello World'],\n * { stdio: 'inherit' }\n * )\n * ```\n */\nexport function executePackage(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true\n // because they are not executable on their own and must be run through cmd.exe.\n // .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n const finalOptions = needsShell\n ? {\n ...spawnOptions,\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalOptions, spawnExtra)\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgCA,kBAAiB;AAEjB,sBAAsB;AACtB,sBAAmC;AACnC,iBAAiC;AACjC,gBAAwC;AACxC,IAAAA,eAA8B;AAC9B,mBAAgC;AAChC,0BAA4B;AAE5B,mBAAsB;AAEtB,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,mBAAmB;AAC1B,MAAI,mBAAmB,QAAW;AAChC,qBAA+B,QAAQ,4BAA4B;AAAA,EACrE;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AAEJ,SAAS,YAAY;AACnB,MAAI,YAAY,QAAW;AACzB,cAAwB,QAAQ,mBAAmB;AAAA,EACrD;AACA,SAAO;AACT;AAMA,MAAM,uBAAuB;AAgF7B,SAAS,iBAAiB,MAGxB;AACA,MAAI;AACF,UAAM,MAAM,iCAAiB;AAC7B,UAAM,SAAS,IAAI,IAAI;AAKvB,UAAM,UACJ,OAAO,SAAS,QACZ,OAAO,YACP,OAAO,SAAS,aAAa,OAAO,SAAS,UAC3C,OAAO,YACP;AAER,WAAO;AAAA,MACL,MAAM,OAAO,QAAQ;AAAA,MACrB;AAAA,IACF;AAAA,EACF,QAAQ;AAEN,UAAM,UAAU,KAAK,YAAY,GAAG;AACpC,QAAI,YAAY,MAAM,KAAK,WAAW,GAAG,GAAG;AAE1C,aAAO,EAAE,MAAM,MAAM,SAAS,OAAU;AAAA,IAC1C;AACA,WAAO;AAAA,MACL,MAAM,KAAK,MAAM,GAAG,OAAO;AAAA,MAC3B,SAAS,KAAK,MAAM,UAAU,CAAC;AAAA,IACjC;AAAA,EACF;AACF;AAOA,eAAe,uBACb,aACA,aACA,OACqD;AACrD,QAAM,eAAW,6BAAiB,WAAW;AAC7C,QAAM,iBAAa,4BAAc,YAAAC,QAAK,SAAK,8BAAgB,GAAG,QAAQ,CAAC;AACvE,QAAM,mBAAe;AAAA,IACnB,YAAAA,QAAK,KAAK,YAAY,gBAAgB,WAAW;AAAA,EACnD;AAIA,MAAI;AACF,cAAM,qBAAU,UAAU;AAAA,EAC5B,SAAS,GAAG;AACV,UAAM,OAAQ,EAA4B;AAC1C,QAAI,SAAS,YAAY,SAAS,SAAS;AACzC,YAAM,IAAI;AAAA,QACR,iDAAiD,UAAU;AAAA;AAAA,QAE3D,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AACA,QAAI,SAAS,SAAS;AACpB,YAAM,IAAI;AAAA,QACR,4DAA4D,UAAU;AAAA;AAAA,QAEtE,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AACA,UAAM,IAAI,MAAM,uCAAuC,UAAU,IAAI;AAAA,MACnE,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AAIA,QAAM,WAAW,YAAAA,QAAK,KAAK,YAAY,kBAAkB;AAEzD,SAAO,MAAM,gCAAY;AAAA,IACvB;AAAA,IACA,YAAY;AACV,YAAM,KAAK,sBAAM;AAGjB,UAAI,CAAC,SAAS,GAAG,WAAW,YAAY,GAAG;AAEzC,cAAM,cAAc,YAAAA,QAAK,KAAK,cAAc,cAAc;AAC1D,YAAI,GAAG,WAAW,WAAW,GAAG;AAC9B,iBAAO,EAAE,WAAW,OAAO,WAAW;AAAA,QACxC;AAAA,MACF;AAIA,YAAM,sBAAkB,oCAAmB;AAC3C,UAAI;AACF,eAAM,0BAAU,GAAE,QAAQ,aAAa,cAAc;AAAA;AAAA,UAEnD,OAAO,mBAAmB,YAAAA,QAAK,KAAK,YAAY,QAAQ;AAAA,QAC1D,CAAC;AAAA,MACH,SAAS,GAAG;AACV,cAAM,OAAQ,EAAU;AACxB,YAAI,SAAS,UAAU,SAAS,WAAW;AACzC,gBAAM,IAAI;AAAA,YACR,sBAAsB,WAAW;AAAA;AAAA,sCAEQ,WAAW;AAAA,YACpD,EAAE,OAAO,EAAE;AAAA,UACb;AAAA,QACF;AACA,YACE,SAAS,eACT,SAAS,eACT,SAAS,aACT;AACA,gBAAM,IAAI;AAAA,YACR,4BAA4B,WAAW;AAAA;AAAA,YAEvC,EAAE,OAAO,EAAE;AAAA,UACb;AAAA,QACF;AACA,cAAM,IAAI;AAAA,UACR,8BAA8B,WAAW;AAAA,eACvB,YAAY;AAAA;AAAA,UAE9B,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AAEA,aAAO,EAAE,WAAW,MAAM,WAAW;AAAA,IACvC;AAAA,IACA;AAAA;AAAA,MAEE,SAAS;AAAA,MACT,iBAAiB;AAAA,IACnB;AAAA,EACF;AACF;AASA,SAAS,kBAAkB,UAA0B;AACnD,QAAM,KAAK,sBAAM;AAEjB,MAAI,CAAC,uBAAO;AAEV,WAAO;AAAA,EACT;AAIA,QAAM,aAAa,CAAC,QAAQ,QAAQ,QAAQ,QAAQ,EAAE;AAEtD,aAAW,OAAO,YAAY;AAC5B,UAAM,WAAW,WAAW;AAC5B,QAAI,GAAG,WAAW,QAAQ,GAAG;AAC3B,aAAO;AAAA,IACT;AAAA,EACF;AAGA,SAAO;AACT;AAOA,SAAS,eACP,YACA,aACA,YACQ;AACR,QAAM,mBAAe;AAAA,IACnB,YAAAA,QAAK,KAAK,YAAY,gBAAgB,WAAW;AAAA,EACnD;AACA,QAAM,cAAc,YAAAA,QAAK,KAAK,cAAc,cAAc;AAG1D,QAAM,cAAU,wBAAa,WAAW;AACxC,QAAM,MAAM,QAAQ,KAAK;AAEzB,MAAI;AAEJ,MAAI,OAAO,QAAQ,UAAU;AAE3B,cAAU;AAAA,EACZ,WAAW,OAAO,QAAQ,YAAY,QAAQ,MAAM;AAClD,UAAM,SAAS;AACf,UAAM,UAAU,OAAO,KAAK,MAAM;AAGlC,QAAI,QAAQ,WAAW,GAAG;AACxB,gBAAU,OAAO,QAAQ,CAAC,CAAE;AAAA,IAC9B,OAAO;AAML,YAAM,cAAc,YAAY,MAAM,GAAG,EAAE,IAAI;AAC/C,YAAM,aAAa;AAAA,QACjB;AAAA,QACA;AAAA,QACA,YAAY,QAAQ,aAAa,EAAE;AAAA,MACrC,EAAE,OAAO,OAAO;AAEhB,iBAAW,aAAa,YAAY;AAClC,YAAI,aAAa,OAAO,SAAS,GAAG;AAClC,oBAAU,OAAO,SAAS;AAC1B;AAAA,QACF;AAAA,MACF;AAGA,UAAI,CAAC,WAAW,QAAQ,SAAS,GAAG;AAClC,kBAAU,OAAO,QAAQ,CAAC,CAAE;AAAA,MAC9B;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,gCAAgC,WAAW,GAAG;AAAA,EAChE;AAEA,QAAM,cAAU,4BAAc,YAAAA,QAAK,KAAK,cAAc,OAAO,CAAC;AAG9D,SAAO,kBAAkB,OAAO;AAClC;AAoBA,eAAsB,WACpB,MACA,SACA,YAC2B;AAE3B,QAAM,iBAAiB,MAAM,gBAAgB,OAAQ;AAGrD,QAAM,eAAe;AAAA,IACnB,eAAe;AAAA,IACf;AAAA,IACA,SAAS;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH;AAAA,EACF;AACF;AAkBA,eAAsB,gBACpB,SACgC;AAChC,QAAM,KAAK,sBAAM;AACjB,QAAM;AAAA,IACJ;AAAA,IACA,OAAO;AAAA,IACP,SAAS;AAAA,IACT;AAAA,EACF,IAAI;AAAA,IACF,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AAGA,QAAM,EAAE,MAAM,aAAa,SAAS,eAAe,IACjD,iBAAiB,WAAW;AAM9B,QAAM,iBACJ,mBAAmB,UAAa,qBAAqB,KAAK,cAAc;AAC1E,QAAM,QACJ,cAAc,SAAY,YAAY,QAAQ,OAAO,OAAO;AAG9D,QAAM,kBAAkB,iBACpB,GAAG,WAAW,IAAI,cAAc,KAChC;AAGJ,QAAM,EAAE,WAAW,WAAW,IAAI,MAAM;AAAA,IACtC;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,aAAa,eAAe,YAAY,aAAa,UAAU;AAGrE,MAAI,CAAC,yBAAS,GAAG,WAAW,UAAU,GAAG;AACvC,QAAI;AACF,SAAG,UAAU,YAAY,GAAK;AAAA,IAChC,QAAQ;AAAA,IAER;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAoBO,SAAS,eACd,YACA,MACA,cACA,YAC0B;AAI1B,QAAM,aAAa,yBAAS,sBAAsB,KAAK,UAAU;AAEjE,QAAM,eAAe,aACjB;AAAA,IACE,GAAG;AAAA,IACH,OAAO;AAAA,EACT,IACA;AAEJ,aAAO,oBAAM,YAAY,MAAM,cAAc,UAAU;AACzD;",
6
6
  "names": ["import_path", "path"]
7
7
  }
package/dist/dlx.js CHANGED
@@ -86,10 +86,10 @@ async function dlxDirExistsAsync() {
86
86
  }
87
87
  }
88
88
  async function ensureDlxDir() {
89
- await (0, import_fs.safeMkdir)((0, import_paths.getSocketDlxDir)(), { recursive: true });
89
+ await (0, import_fs.safeMkdir)((0, import_paths.getSocketDlxDir)());
90
90
  }
91
91
  function ensureDlxDirSync() {
92
- (0, import_fs.safeMkdirSync)((0, import_paths.getSocketDlxDir)(), { recursive: true });
92
+ (0, import_fs.safeMkdirSync)((0, import_paths.getSocketDlxDir)());
93
93
  }
94
94
  function getDlxInstalledPackageDir(packageName) {
95
95
  const path = /* @__PURE__ */ getPath();
package/dist/dlx.js.map CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/dlx.ts"],
4
- "sourcesContent": ["/** @fileoverview DLX (execute package) utilities for Socket ecosystem shared installations. */\n\nimport { createHash } from 'crypto'\n\nimport { readDirNamesSync, safeDelete, safeMkdir, safeMkdirSync } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { pEach } from './promises'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\n/**\n * Generate a cache directory name using npm/npx approach.\n * Uses first 16 characters of SHA-512 hash (like npm/npx).\n *\n * Rationale for SHA-512 truncated (vs full SHA-256):\n * - Matches npm/npx ecosystem behavior\n * - Shorter paths for Windows MAX_PATH compatibility (260 chars)\n * - 16 hex chars = 64 bits = acceptable collision risk for local cache\n * - Collision probability ~1 in 18 quintillion with 1000 entries\n *\n * Input strategy (aligned with npx):\n * - npx uses package spec strings (e.g., '@scope/pkg@1.0.0', 'prettier@3.0.0')\n * - Caller provides complete spec string with version for accurate cache keying\n * - For package installs: Use PURL-style spec with version\n * Examples: 'npm:prettier@3.0.0', 'pypi:requests@2.31.0', 'gem:rails@7.0.0'\n * Note: Socket uses shorthand format without 'pkg:' prefix\n * (handled by @socketregistry/packageurl-js)\n * - For binary downloads: Use URL:name for uniqueness\n *\n * Reference: npm/cli v11.6.2 libnpmexec/lib/index.js#L233-L244\n * https://github.com/npm/cli/blob/v11.6.2/workspaces/libnpmexec/lib/index.js#L233-L244\n * Implementation: packages.map().sort().join('\\n') \u2192 SHA-512 \u2192 slice(0,16)\n * npx hashes the package spec (name@version), not just name\n */\nexport function generateCacheKey(spec: string): string {\n return createHash('sha512').update(spec).digest('hex').substring(0, 16)\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path!\n}\n\n/**\n * Clear all DLX package installations.\n */\nexport async function clearDlx(): Promise<void> {\n const packages = await listDlxPackagesAsync()\n await pEach(packages, pkg => removeDlxPackage(pkg))\n}\n\n/**\n * Clear all DLX package installations synchronously.\n */\nexport function clearDlxSync(): void {\n const packages = listDlxPackages()\n for (const pkg of packages) {\n removeDlxPackageSync(pkg)\n }\n}\n\n/**\n * Check if the DLX directory exists.\n */\nexport function dlxDirExists(): boolean {\n const fs = getFs()\n return fs.existsSync(getSocketDlxDir())\n}\n\n/**\n * Check if the DLX directory exists asynchronously.\n */\nexport async function dlxDirExistsAsync(): Promise<boolean> {\n const fs = getFs()\n try {\n await fs.promises.access(getSocketDlxDir())\n return true\n } catch {\n return false\n }\n}\n\n/**\n * Ensure the DLX directory exists, creating it if necessary.\n */\nexport async function ensureDlxDir(): Promise<void> {\n await safeMkdir(getSocketDlxDir(), { recursive: true })\n}\n\n/**\n * Ensure the DLX directory exists synchronously, creating it if necessary.\n */\nexport function ensureDlxDirSync(): void {\n safeMkdirSync(getSocketDlxDir(), { recursive: true })\n}\n\n/**\n * Get the installed package directory within DLX node_modules.\n */\nexport function getDlxInstalledPackageDir(packageName: string): string {\n const path = getPath()\n return normalizePath(\n path.join(getDlxPackageNodeModulesDir(packageName), packageName),\n )\n}\n\n/**\n * Get the DLX installation directory for a specific package.\n */\nexport function getDlxPackageDir(packageName: string): string {\n const path = getPath()\n return normalizePath(path.join(getSocketDlxDir(), packageName))\n}\n\n/**\n * Get the package.json path for a DLX installed package.\n */\nexport function getDlxPackageJsonPath(packageName: string): string {\n const path = getPath()\n return normalizePath(\n path.join(getDlxInstalledPackageDir(packageName), 'package.json'),\n )\n}\n\n/**\n * Get the node_modules directory for a DLX package installation.\n */\nexport function getDlxPackageNodeModulesDir(packageName: string): string {\n const path = getPath()\n return normalizePath(path.join(getDlxPackageDir(packageName), 'node_modules'))\n}\n\n/**\n * Check if a file path is within the Socket DLX directory.\n * This is useful for determining if a binary or file is managed by Socket's DLX system.\n *\n * @param filePath - Absolute or relative path to check\n * @returns true if the path is within ~/.socket/_dlx/, false otherwise\n *\n * @example\n * ```typescript\n * isInSocketDlx('/home/user/.socket/_dlx/abc123/bin/socket') // true\n * isInSocketDlx('/usr/local/bin/socket') // false\n * isInSocketDlx(process.argv[0]) // Check if current binary is in DLX\n * ```\n */\nexport function isInSocketDlx(filePath: string): boolean {\n if (!filePath) {\n return false\n }\n\n const path = getPath()\n const dlxDir = getSocketDlxDir()\n const absolutePath = normalizePath(path.resolve(filePath))\n\n // Check if the absolute path starts with the DLX directory.\n // Both paths are normalized to use forward slashes for consistent comparison.\n return absolutePath.startsWith(`${dlxDir}/`)\n}\n\n/**\n * Check if a package is installed in DLX.\n */\nexport function isDlxPackageInstalled(packageName: string): boolean {\n const fs = getFs()\n return fs.existsSync(getDlxInstalledPackageDir(packageName))\n}\n\n/**\n * Check if a package is installed in DLX asynchronously.\n */\nexport async function isDlxPackageInstalledAsync(\n packageName: string,\n): Promise<boolean> {\n const fs = getFs()\n try {\n await fs.promises.access(getDlxInstalledPackageDir(packageName))\n return true\n } catch {\n return false\n }\n}\n\n/**\n * List all packages installed in DLX.\n */\nexport function listDlxPackages(): string[] {\n try {\n return readDirNamesSync(getSocketDlxDir(), { sort: true })\n } catch {\n return []\n }\n}\n\n/**\n * List all packages installed in DLX asynchronously.\n */\nexport async function listDlxPackagesAsync(): Promise<string[]> {\n const fs = getFs()\n try {\n const entries = await fs.promises.readdir(getSocketDlxDir(), {\n withFileTypes: true,\n })\n return entries\n .filter(e => e.isDirectory())\n .map(e => e.name)\n .sort()\n } catch {\n return []\n }\n}\n\n/**\n * Remove a DLX package installation.\n */\nexport async function removeDlxPackage(packageName: string): Promise<void> {\n const packageDir = getDlxPackageDir(packageName)\n try {\n await safeDelete(packageDir, { recursive: true, force: true })\n } catch (e) {\n throw new Error(`Failed to remove DLX package \"${packageName}\"`, {\n cause: e,\n })\n }\n}\n\n/**\n * Remove a DLX package installation synchronously.\n */\nexport function removeDlxPackageSync(packageName: string): void {\n const fs = getFs()\n const packageDir = getDlxPackageDir(packageName)\n try {\n fs.rmSync(packageDir, { recursive: true, force: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied removing DLX package \"${packageName}\"\\n` +\n `Directory: ${packageDir}\\n` +\n 'To resolve:\\n' +\n ' 1. Check file/directory permissions\\n' +\n ' 2. Close any programs using files in this directory\\n' +\n ' 3. Try running with elevated privileges if necessary\\n' +\n ` 4. Manually remove: rm -rf \"${packageDir}\"`,\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot remove DLX package \"${packageName}\" from read-only filesystem\\n` +\n `Directory: ${packageDir}\\n` +\n 'The filesystem is mounted read-only.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to remove DLX package \"${packageName}\"\\n` +\n `Directory: ${packageDir}\\n` +\n 'Check permissions and ensure no programs are using this directory.',\n { cause: e },\n )\n }\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA2B;AAE3B,gBAAuE;AACvE,kBAA8B;AAC9B,mBAAgC;AAChC,sBAAsB;AAEtB,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AA0BO,SAAS,iBAAiB,MAAsB;AACrD,aAAO,0BAAW,QAAQ,EAAE,OAAO,IAAI,EAAE,OAAO,KAAK,EAAE,UAAU,GAAG,EAAE;AACxE;AAEA,IAAI;AAAA;AAMJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAKA,eAAsB,WAA0B;AAC9C,QAAM,WAAW,MAAM,qBAAqB;AAC5C,YAAM,uBAAM,UAAU,SAAO,iBAAiB,GAAG,CAAC;AACpD;AAKO,SAAS,eAAqB;AACnC,QAAM,WAAW,gBAAgB;AACjC,aAAW,OAAO,UAAU;AAC1B,yBAAqB,GAAG;AAAA,EAC1B;AACF;AAKO,SAAS,eAAwB;AACtC,QAAM,KAAK,sBAAM;AACjB,SAAO,GAAG,eAAW,8BAAgB,CAAC;AACxC;AAKA,eAAsB,oBAAsC;AAC1D,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,GAAG,SAAS,WAAO,8BAAgB,CAAC;AAC1C,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,eAA8B;AAClD,YAAM,yBAAU,8BAAgB,GAAG,EAAE,WAAW,KAAK,CAAC;AACxD;AAKO,SAAS,mBAAyB;AACvC,mCAAc,8BAAgB,GAAG,EAAE,WAAW,KAAK,CAAC;AACtD;AAKO,SAAS,0BAA0B,aAA6B;AACrE,QAAM,OAAO,wBAAQ;AACrB,aAAO;AAAA,IACL,KAAK,KAAK,4BAA4B,WAAW,GAAG,WAAW;AAAA,EACjE;AACF;AAKO,SAAS,iBAAiB,aAA6B;AAC5D,QAAM,OAAO,wBAAQ;AACrB,aAAO,2BAAc,KAAK,SAAK,8BAAgB,GAAG,WAAW,CAAC;AAChE;AAKO,SAAS,sBAAsB,aAA6B;AACjE,QAAM,OAAO,wBAAQ;AACrB,aAAO;AAAA,IACL,KAAK,KAAK,0BAA0B,WAAW,GAAG,cAAc;AAAA,EAClE;AACF;AAKO,SAAS,4BAA4B,aAA6B;AACvE,QAAM,OAAO,wBAAQ;AACrB,aAAO,2BAAc,KAAK,KAAK,iBAAiB,WAAW,GAAG,cAAc,CAAC;AAC/E;AAgBO,SAAS,cAAc,UAA2B;AACvD,MAAI,CAAC,UAAU;AACb,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,wBAAQ;AACrB,QAAM,aAAS,8BAAgB;AAC/B,QAAM,mBAAe,2BAAc,KAAK,QAAQ,QAAQ,CAAC;AAIzD,SAAO,aAAa,WAAW,GAAG,MAAM,GAAG;AAC7C;AAKO,SAAS,sBAAsB,aAA8B;AAClE,QAAM,KAAK,sBAAM;AACjB,SAAO,GAAG,WAAW,0BAA0B,WAAW,CAAC;AAC7D;AAKA,eAAsB,2BACpB,aACkB;AAClB,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,GAAG,SAAS,OAAO,0BAA0B,WAAW,CAAC;AAC/D,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,kBAA4B;AAC1C,MAAI;AACF,eAAO,gCAAiB,8BAAgB,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,eAAsB,uBAA0C;AAC9D,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,UAAU,MAAM,GAAG,SAAS,YAAQ,8BAAgB,GAAG;AAAA,MAC3D,eAAe;AAAA,IACjB,CAAC;AACD,WAAO,QACJ,OAAO,OAAK,EAAE,YAAY,CAAC,EAC3B,IAAI,OAAK,EAAE,IAAI,EACf,KAAK;AAAA,EACV,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,eAAsB,iBAAiB,aAAoC;AACzE,QAAM,aAAa,iBAAiB,WAAW;AAC/C,MAAI;AACF,cAAM,sBAAW,YAAY,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EAC/D,SAAS,GAAG;AACV,UAAM,IAAI,MAAM,iCAAiC,WAAW,KAAK;AAAA,MAC/D,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAKO,SAAS,qBAAqB,aAA2B;AAC9D,QAAM,KAAK,sBAAM;AACjB,QAAM,aAAa,iBAAiB,WAAW;AAC/C,MAAI;AACF,OAAG,OAAO,YAAY,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACxD,SAAS,GAAG;AACV,UAAM,OAAQ,EAA4B;AAC1C,QAAI,SAAS,YAAY,SAAS,SAAS;AACzC,YAAM,IAAI;AAAA,QACR,2CAA2C,WAAW;AAAA,aACtC,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,gCAKS,UAAU;AAAA,QAC7C,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AACA,QAAI,SAAS,SAAS;AACpB,YAAM,IAAI;AAAA,QACR,8BAA8B,WAAW;AAAA,aACzB,UAAU;AAAA;AAAA,QAE1B,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AACA,UAAM,IAAI;AAAA,MACR,iCAAiC,WAAW;AAAA,aAC5B,UAAU;AAAA;AAAA,MAE1B,EAAE,OAAO,EAAE;AAAA,IACb;AAAA,EACF;AACF;",
4
+ "sourcesContent": ["/** @fileoverview DLX (execute package) utilities for Socket ecosystem shared installations. */\n\nimport { createHash } from 'crypto'\n\nimport { readDirNamesSync, safeDelete, safeMkdir, safeMkdirSync } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { pEach } from './promises'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\n/**\n * Generate a cache directory name using npm/npx approach.\n * Uses first 16 characters of SHA-512 hash (like npm/npx).\n *\n * Rationale for SHA-512 truncated (vs full SHA-256):\n * - Matches npm/npx ecosystem behavior\n * - Shorter paths for Windows MAX_PATH compatibility (260 chars)\n * - 16 hex chars = 64 bits = acceptable collision risk for local cache\n * - Collision probability ~1 in 18 quintillion with 1000 entries\n *\n * Input strategy (aligned with npx):\n * - npx uses package spec strings (e.g., '@scope/pkg@1.0.0', 'prettier@3.0.0')\n * - Caller provides complete spec string with version for accurate cache keying\n * - For package installs: Use PURL-style spec with version\n * Examples: 'npm:prettier@3.0.0', 'pypi:requests@2.31.0', 'gem:rails@7.0.0'\n * Note: Socket uses shorthand format without 'pkg:' prefix\n * (handled by @socketregistry/packageurl-js)\n * - For binary downloads: Use URL:name for uniqueness\n *\n * Reference: npm/cli v11.6.2 libnpmexec/lib/index.js#L233-L244\n * https://github.com/npm/cli/blob/v11.6.2/workspaces/libnpmexec/lib/index.js#L233-L244\n * Implementation: packages.map().sort().join('\\n') \u2192 SHA-512 \u2192 slice(0,16)\n * npx hashes the package spec (name@version), not just name\n */\nexport function generateCacheKey(spec: string): string {\n return createHash('sha512').update(spec).digest('hex').substring(0, 16)\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path!\n}\n\n/**\n * Clear all DLX package installations.\n */\nexport async function clearDlx(): Promise<void> {\n const packages = await listDlxPackagesAsync()\n await pEach(packages, pkg => removeDlxPackage(pkg))\n}\n\n/**\n * Clear all DLX package installations synchronously.\n */\nexport function clearDlxSync(): void {\n const packages = listDlxPackages()\n for (const pkg of packages) {\n removeDlxPackageSync(pkg)\n }\n}\n\n/**\n * Check if the DLX directory exists.\n */\nexport function dlxDirExists(): boolean {\n const fs = getFs()\n return fs.existsSync(getSocketDlxDir())\n}\n\n/**\n * Check if the DLX directory exists asynchronously.\n */\nexport async function dlxDirExistsAsync(): Promise<boolean> {\n const fs = getFs()\n try {\n await fs.promises.access(getSocketDlxDir())\n return true\n } catch {\n return false\n }\n}\n\n/**\n * Ensure the DLX directory exists, creating it if necessary.\n */\nexport async function ensureDlxDir(): Promise<void> {\n await safeMkdir(getSocketDlxDir())\n}\n\n/**\n * Ensure the DLX directory exists synchronously, creating it if necessary.\n */\nexport function ensureDlxDirSync(): void {\n safeMkdirSync(getSocketDlxDir())\n}\n\n/**\n * Get the installed package directory within DLX node_modules.\n */\nexport function getDlxInstalledPackageDir(packageName: string): string {\n const path = getPath()\n return normalizePath(\n path.join(getDlxPackageNodeModulesDir(packageName), packageName),\n )\n}\n\n/**\n * Get the DLX installation directory for a specific package.\n */\nexport function getDlxPackageDir(packageName: string): string {\n const path = getPath()\n return normalizePath(path.join(getSocketDlxDir(), packageName))\n}\n\n/**\n * Get the package.json path for a DLX installed package.\n */\nexport function getDlxPackageJsonPath(packageName: string): string {\n const path = getPath()\n return normalizePath(\n path.join(getDlxInstalledPackageDir(packageName), 'package.json'),\n )\n}\n\n/**\n * Get the node_modules directory for a DLX package installation.\n */\nexport function getDlxPackageNodeModulesDir(packageName: string): string {\n const path = getPath()\n return normalizePath(path.join(getDlxPackageDir(packageName), 'node_modules'))\n}\n\n/**\n * Check if a file path is within the Socket DLX directory.\n * This is useful for determining if a binary or file is managed by Socket's DLX system.\n *\n * @param filePath - Absolute or relative path to check\n * @returns true if the path is within ~/.socket/_dlx/, false otherwise\n *\n * @example\n * ```typescript\n * isInSocketDlx('/home/user/.socket/_dlx/abc123/bin/socket') // true\n * isInSocketDlx('/usr/local/bin/socket') // false\n * isInSocketDlx(process.argv[0]) // Check if current binary is in DLX\n * ```\n */\nexport function isInSocketDlx(filePath: string): boolean {\n if (!filePath) {\n return false\n }\n\n const path = getPath()\n const dlxDir = getSocketDlxDir()\n const absolutePath = normalizePath(path.resolve(filePath))\n\n // Check if the absolute path starts with the DLX directory.\n // Both paths are normalized to use forward slashes for consistent comparison.\n return absolutePath.startsWith(`${dlxDir}/`)\n}\n\n/**\n * Check if a package is installed in DLX.\n */\nexport function isDlxPackageInstalled(packageName: string): boolean {\n const fs = getFs()\n return fs.existsSync(getDlxInstalledPackageDir(packageName))\n}\n\n/**\n * Check if a package is installed in DLX asynchronously.\n */\nexport async function isDlxPackageInstalledAsync(\n packageName: string,\n): Promise<boolean> {\n const fs = getFs()\n try {\n await fs.promises.access(getDlxInstalledPackageDir(packageName))\n return true\n } catch {\n return false\n }\n}\n\n/**\n * List all packages installed in DLX.\n */\nexport function listDlxPackages(): string[] {\n try {\n return readDirNamesSync(getSocketDlxDir(), { sort: true })\n } catch {\n return []\n }\n}\n\n/**\n * List all packages installed in DLX asynchronously.\n */\nexport async function listDlxPackagesAsync(): Promise<string[]> {\n const fs = getFs()\n try {\n const entries = await fs.promises.readdir(getSocketDlxDir(), {\n withFileTypes: true,\n })\n return entries\n .filter(e => e.isDirectory())\n .map(e => e.name)\n .sort()\n } catch {\n return []\n }\n}\n\n/**\n * Remove a DLX package installation.\n */\nexport async function removeDlxPackage(packageName: string): Promise<void> {\n const packageDir = getDlxPackageDir(packageName)\n try {\n await safeDelete(packageDir, { recursive: true, force: true })\n } catch (e) {\n throw new Error(`Failed to remove DLX package \"${packageName}\"`, {\n cause: e,\n })\n }\n}\n\n/**\n * Remove a DLX package installation synchronously.\n */\nexport function removeDlxPackageSync(packageName: string): void {\n const fs = getFs()\n const packageDir = getDlxPackageDir(packageName)\n try {\n fs.rmSync(packageDir, { recursive: true, force: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied removing DLX package \"${packageName}\"\\n` +\n `Directory: ${packageDir}\\n` +\n 'To resolve:\\n' +\n ' 1. Check file/directory permissions\\n' +\n ' 2. Close any programs using files in this directory\\n' +\n ' 3. Try running with elevated privileges if necessary\\n' +\n ` 4. Manually remove: rm -rf \"${packageDir}\"`,\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot remove DLX package \"${packageName}\" from read-only filesystem\\n` +\n `Directory: ${packageDir}\\n` +\n 'The filesystem is mounted read-only.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to remove DLX package \"${packageName}\"\\n` +\n `Directory: ${packageDir}\\n` +\n 'Check permissions and ensure no programs are using this directory.',\n { cause: e },\n )\n }\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA2B;AAE3B,gBAAuE;AACvE,kBAA8B;AAC9B,mBAAgC;AAChC,sBAAsB;AAEtB,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAGrB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AA0BO,SAAS,iBAAiB,MAAsB;AACrD,aAAO,0BAAW,QAAQ,EAAE,OAAO,IAAI,EAAE,OAAO,KAAK,EAAE,UAAU,GAAG,EAAE;AACxE;AAEA,IAAI;AAAA;AAMJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAKA,eAAsB,WAA0B;AAC9C,QAAM,WAAW,MAAM,qBAAqB;AAC5C,YAAM,uBAAM,UAAU,SAAO,iBAAiB,GAAG,CAAC;AACpD;AAKO,SAAS,eAAqB;AACnC,QAAM,WAAW,gBAAgB;AACjC,aAAW,OAAO,UAAU;AAC1B,yBAAqB,GAAG;AAAA,EAC1B;AACF;AAKO,SAAS,eAAwB;AACtC,QAAM,KAAK,sBAAM;AACjB,SAAO,GAAG,eAAW,8BAAgB,CAAC;AACxC;AAKA,eAAsB,oBAAsC;AAC1D,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,GAAG,SAAS,WAAO,8BAAgB,CAAC;AAC1C,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,eAA8B;AAClD,YAAM,yBAAU,8BAAgB,CAAC;AACnC;AAKO,SAAS,mBAAyB;AACvC,mCAAc,8BAAgB,CAAC;AACjC;AAKO,SAAS,0BAA0B,aAA6B;AACrE,QAAM,OAAO,wBAAQ;AACrB,aAAO;AAAA,IACL,KAAK,KAAK,4BAA4B,WAAW,GAAG,WAAW;AAAA,EACjE;AACF;AAKO,SAAS,iBAAiB,aAA6B;AAC5D,QAAM,OAAO,wBAAQ;AACrB,aAAO,2BAAc,KAAK,SAAK,8BAAgB,GAAG,WAAW,CAAC;AAChE;AAKO,SAAS,sBAAsB,aAA6B;AACjE,QAAM,OAAO,wBAAQ;AACrB,aAAO;AAAA,IACL,KAAK,KAAK,0BAA0B,WAAW,GAAG,cAAc;AAAA,EAClE;AACF;AAKO,SAAS,4BAA4B,aAA6B;AACvE,QAAM,OAAO,wBAAQ;AACrB,aAAO,2BAAc,KAAK,KAAK,iBAAiB,WAAW,GAAG,cAAc,CAAC;AAC/E;AAgBO,SAAS,cAAc,UAA2B;AACvD,MAAI,CAAC,UAAU;AACb,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,wBAAQ;AACrB,QAAM,aAAS,8BAAgB;AAC/B,QAAM,mBAAe,2BAAc,KAAK,QAAQ,QAAQ,CAAC;AAIzD,SAAO,aAAa,WAAW,GAAG,MAAM,GAAG;AAC7C;AAKO,SAAS,sBAAsB,aAA8B;AAClE,QAAM,KAAK,sBAAM;AACjB,SAAO,GAAG,WAAW,0BAA0B,WAAW,CAAC;AAC7D;AAKA,eAAsB,2BACpB,aACkB;AAClB,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,GAAG,SAAS,OAAO,0BAA0B,WAAW,CAAC;AAC/D,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKO,SAAS,kBAA4B;AAC1C,MAAI;AACF,eAAO,gCAAiB,8BAAgB,GAAG,EAAE,MAAM,KAAK,CAAC;AAAA,EAC3D,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,eAAsB,uBAA0C;AAC9D,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,UAAU,MAAM,GAAG,SAAS,YAAQ,8BAAgB,GAAG;AAAA,MAC3D,eAAe;AAAA,IACjB,CAAC;AACD,WAAO,QACJ,OAAO,OAAK,EAAE,YAAY,CAAC,EAC3B,IAAI,OAAK,EAAE,IAAI,EACf,KAAK;AAAA,EACV,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,eAAsB,iBAAiB,aAAoC;AACzE,QAAM,aAAa,iBAAiB,WAAW;AAC/C,MAAI;AACF,cAAM,sBAAW,YAAY,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EAC/D,SAAS,GAAG;AACV,UAAM,IAAI,MAAM,iCAAiC,WAAW,KAAK;AAAA,MAC/D,OAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;AAKO,SAAS,qBAAqB,aAA2B;AAC9D,QAAM,KAAK,sBAAM;AACjB,QAAM,aAAa,iBAAiB,WAAW;AAC/C,MAAI;AACF,OAAG,OAAO,YAAY,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EACxD,SAAS,GAAG;AACV,UAAM,OAAQ,EAA4B;AAC1C,QAAI,SAAS,YAAY,SAAS,SAAS;AACzC,YAAM,IAAI;AAAA,QACR,2CAA2C,WAAW;AAAA,aACtC,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,gCAKS,UAAU;AAAA,QAC7C,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AACA,QAAI,SAAS,SAAS;AACpB,YAAM,IAAI;AAAA,QACR,8BAA8B,WAAW;AAAA,aACzB,UAAU;AAAA;AAAA,QAE1B,EAAE,OAAO,EAAE;AAAA,MACb;AAAA,IACF;AACA,UAAM,IAAI;AAAA,MACR,iCAAiC,WAAW;AAAA,aAC5B,UAAU;AAAA;AAAA,MAE1B,EAAE,OAAO,EAAE;AAAA,IACb;AAAA,EACF;AACF;",
6
6
  "names": []
7
7
  }
package/dist/fs.d.ts CHANGED
@@ -653,21 +653,25 @@ export declare function safeDeleteSync(filepath: PathLike | PathLike[], options?
653
653
  * - Silently ignores EEXIST errors (directory already exists)
654
654
  * - Re-throws all other errors (permissions, invalid path, etc.)
655
655
  * - Works reliably in multi-process/concurrent scenarios
656
+ * - Defaults to recursive: true for convenient nested directory creation
656
657
  *
657
658
  * @param path - Directory path to create
658
- * @param options - Options including recursive and mode settings
659
+ * @param options - Options including recursive (default: true) and mode settings
659
660
  * @returns Promise that resolves when directory is created or already exists
660
661
  *
661
662
  * @example
662
663
  * ```ts
663
- * // Create a directory, no error if it exists
664
+ * // Create a directory recursively by default, no error if it exists
664
665
  * await safeMkdir('./config')
665
666
  *
666
- * // Create nested directories
667
- * await safeMkdir('./data/cache/temp', { recursive: true })
667
+ * // Create nested directories (recursive: true is the default)
668
+ * await safeMkdir('./data/cache/temp')
668
669
  *
669
670
  * // Create with specific permissions
670
671
  * await safeMkdir('./secure', { mode: 0o700 })
672
+ *
673
+ * // Explicitly disable recursive behavior
674
+ * await safeMkdir('./single-level', { recursive: false })
671
675
  * ```
672
676
  */
673
677
  /*@__NO_SIDE_EFFECTS__*/
@@ -681,20 +685,24 @@ export declare function safeMkdir(path: PathLike, options?: MakeDirectoryOptions
681
685
  * - Silently ignores EEXIST errors (directory already exists)
682
686
  * - Re-throws all other errors (permissions, invalid path, etc.)
683
687
  * - Works reliably in multi-process/concurrent scenarios
688
+ * - Defaults to recursive: true for convenient nested directory creation
684
689
  *
685
690
  * @param path - Directory path to create
686
- * @param options - Options including recursive and mode settings
691
+ * @param options - Options including recursive (default: true) and mode settings
687
692
  *
688
693
  * @example
689
694
  * ```ts
690
- * // Create a directory, no error if it exists
695
+ * // Create a directory recursively by default, no error if it exists
691
696
  * safeMkdirSync('./config')
692
697
  *
693
- * // Create nested directories
694
- * safeMkdirSync('./data/cache/temp', { recursive: true })
698
+ * // Create nested directories (recursive: true is the default)
699
+ * safeMkdirSync('./data/cache/temp')
695
700
  *
696
701
  * // Create with specific permissions
697
702
  * safeMkdirSync('./secure', { mode: 0o700 })
703
+ *
704
+ * // Explicitly disable recursive behavior
705
+ * safeMkdirSync('./single-level', { recursive: false })
698
706
  * ```
699
707
  */
700
708
  /*@__NO_SIDE_EFFECTS__*/
package/dist/fs.js CHANGED
@@ -510,8 +510,9 @@ function safeDeleteSync(filepath, options) {
510
510
  // @__NO_SIDE_EFFECTS__
511
511
  async function safeMkdir(path, options) {
512
512
  const fs = /* @__PURE__ */ getFs();
513
+ const opts = { __proto__: null, recursive: true, ...options };
513
514
  try {
514
- await fs.promises.mkdir(path, options);
515
+ await fs.promises.mkdir(path, opts);
515
516
  } catch (e) {
516
517
  if (typeof e === "object" && e !== null && "code" in e && e.code !== "EEXIST") {
517
518
  throw e;
@@ -521,8 +522,9 @@ async function safeMkdir(path, options) {
521
522
  // @__NO_SIDE_EFFECTS__
522
523
  function safeMkdirSync(path, options) {
523
524
  const fs = /* @__PURE__ */ getFs();
525
+ const opts = { __proto__: null, recursive: true, ...options };
524
526
  try {
525
- fs.mkdirSync(path, options);
527
+ fs.mkdirSync(path, opts);
526
528
  } catch (e) {
527
529
  if (typeof e === "object" && e !== null && "code" in e && e.code !== "EEXIST") {
528
530
  throw e;
package/dist/fs.js.map CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/fs.ts"],
4
- "sourcesContent": ["/**\n * @fileoverview File system utilities with cross-platform path handling.\n * Provides enhanced fs operations, glob matching, and directory traversal functions.\n */\n\nimport type { Abortable } from 'events'\n\nimport type {\n Dirent,\n MakeDirectoryOptions,\n ObjectEncodingOptions,\n OpenMode,\n PathLike,\n StatSyncOptions,\n WriteFileOptions,\n} from 'fs'\n\nimport { getAbortSignal } from '#constants/process'\n\nimport { isArray } from './arrays'\n\nconst abortSignal = getAbortSignal()\n\nimport { defaultIgnore, getGlobMatcher } from './globs'\nimport type { JsonReviver } from './json'\nimport { jsonParse } from './json'\nimport { objectFreeze, type Remap } from './objects'\nimport { normalizePath, pathLikeToString } from './path'\nimport { registerCacheInvalidation } from './paths/rewire'\nimport { naturalCompare } from './sorts'\n\n/**\n * Supported text encodings for Node.js Buffers.\n * Includes ASCII, UTF-8/16, base64, binary, and hexadecimal encodings.\n */\nexport type BufferEncoding =\n | 'ascii'\n | 'utf8'\n | 'utf-8'\n | 'utf16le'\n | 'ucs2'\n | 'ucs-2'\n | 'base64'\n | 'base64url'\n | 'latin1'\n | 'binary'\n | 'hex'\n\n/**\n * Represents any valid JSON content type.\n */\nexport type JsonContent = unknown\n\n/**\n * Options for asynchronous `findUp` operations.\n */\nexport interface FindUpOptions {\n /**\n * Starting directory for the search.\n * @default process.cwd()\n */\n cwd?: string | undefined\n /**\n * Only match directories, not files.\n * @default false\n */\n onlyDirectories?: boolean | undefined\n /**\n * Only match files, not directories.\n * @default true\n */\n onlyFiles?: boolean | undefined\n /**\n * Abort signal to cancel the search operation.\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Options for synchronous `findUpSync` operations.\n */\nexport interface FindUpSyncOptions {\n /**\n * Starting directory for the search.\n * @default process.cwd()\n */\n cwd?: string | undefined\n /**\n * Directory to stop searching at (inclusive).\n * When provided, search will stop at this directory even if the root hasn't been reached.\n */\n stopAt?: string | undefined\n /**\n * Only match directories, not files.\n * @default false\n */\n onlyDirectories?: boolean | undefined\n /**\n * Only match files, not directories.\n * @default true\n */\n onlyFiles?: boolean | undefined\n}\n\n/**\n * Options for checking if a directory is empty.\n */\nexport interface IsDirEmptyOptions {\n /**\n * Glob patterns for files to ignore when checking emptiness.\n * Files matching these patterns are not counted.\n * @default defaultIgnore\n */\n ignore?: string[] | readonly string[] | undefined\n}\n\n/**\n * Options for read operations with abort support.\n */\nexport interface ReadOptions extends Abortable {\n /**\n * Character encoding to use for reading.\n * @default 'utf8'\n */\n encoding?: BufferEncoding | string | undefined\n /**\n * File system flag for reading behavior.\n * @default 'r'\n */\n flag?: string | undefined\n}\n\n/**\n * Options for reading directories with filtering and sorting.\n */\nexport interface ReadDirOptions {\n /**\n * Glob patterns for directories to ignore.\n * @default undefined\n */\n ignore?: string[] | readonly string[] | undefined\n /**\n * Include empty directories in results.\n * When `false`, empty directories are filtered out.\n * @default true\n */\n includeEmpty?: boolean | undefined\n /**\n * Sort directory names alphabetically using natural sort order.\n * @default true\n */\n sort?: boolean | undefined\n}\n\n/**\n * Options for reading files with encoding and abort support.\n * Can be either an options object, an encoding string, or null.\n */\nexport type ReadFileOptions =\n | Remap<\n ObjectEncodingOptions &\n Abortable & {\n flag?: OpenMode | undefined\n }\n >\n | BufferEncoding\n | null\n\n/**\n * Options for reading and parsing JSON files.\n */\nexport type ReadJsonOptions = Remap<\n ReadFileOptions & {\n /**\n * Whether to throw errors on parse failure.\n * When `false`, returns `undefined` on error instead of throwing.\n * @default true\n */\n throws?: boolean | undefined\n /**\n * JSON reviver function to transform parsed values.\n * Same as the second parameter to `JSON.parse()`.\n */\n reviver?: Parameters<typeof JSON.parse>[1] | undefined\n }\n>\n\n/**\n * Options for file/directory removal operations.\n */\nexport interface RemoveOptions {\n /**\n * Force deletion even outside normally safe directories.\n * When `false`, prevents deletion outside temp, cacache, and ~/.socket.\n * @default true for safe directories, false otherwise\n */\n force?: boolean | undefined\n /**\n * Maximum number of retry attempts on failure.\n * @default 3\n */\n maxRetries?: number | undefined\n /**\n * Recursively delete directories and contents.\n * @default true\n */\n recursive?: boolean | undefined\n /**\n * Delay in milliseconds between retry attempts.\n * @default 200\n */\n retryDelay?: number | undefined\n /**\n * Abort signal to cancel the operation.\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Options for safe read operations that don't throw on errors.\n */\nexport interface SafeReadOptions extends ReadOptions {\n /**\n * Default value to return on read failure.\n * If not provided, `undefined` is returned on error.\n */\n defaultValue?: unknown | undefined\n}\n\n/**\n * Options for write operations with encoding and mode control.\n */\nexport interface WriteOptions extends Abortable {\n /**\n * Character encoding for writing.\n * @default 'utf8'\n */\n encoding?: BufferEncoding | string | undefined\n /**\n * File mode (permissions) to set.\n * Uses standard Unix permission bits (e.g., 0o644).\n * @default 0o666 (read/write for all, respecting umask)\n */\n mode?: number | undefined\n /**\n * File system flag for write behavior.\n * @default 'w' (create or truncate)\n */\n flag?: string | undefined\n}\n\n/**\n * Options for writing JSON files with formatting control.\n */\nexport interface WriteJsonOptions extends WriteOptions {\n /**\n * End-of-line sequence to use.\n * @default '\\n'\n * @example\n * ```ts\n * // Windows-style line endings\n * writeJson('data.json', data, { EOL: '\\r\\n' })\n * ```\n */\n EOL?: string | undefined\n /**\n * Whether to add a final newline at end of file.\n * @default true\n */\n finalEOL?: boolean | undefined\n /**\n * JSON replacer function to transform values during stringification.\n * Same as the second parameter to `JSON.stringify()`.\n */\n replacer?: JsonReviver | undefined\n /**\n * Number of spaces for indentation, or string to use for indentation.\n * @default 2\n * @example\n * ```ts\n * // Use tabs instead of spaces\n * writeJson('data.json', data, { spaces: '\\t' })\n *\n * // Use 4 spaces for indentation\n * writeJson('data.json', data, { spaces: 4 })\n * ```\n */\n spaces?: number | string | undefined\n}\n\nconst defaultRemoveOptions = objectFreeze({\n __proto__: null,\n force: true,\n maxRetries: 3,\n recursive: true,\n retryDelay: 200,\n})\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js path module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\n/**\n * Process directory entries and filter for directories.\n * Filters entries to include only directories, optionally excluding empty ones.\n * Applies ignore patterns and natural sorting.\n *\n * @param dirents - Directory entries from readdir\n * @param dirname - Parent directory path\n * @param options - Filtering and sorting options\n * @returns Array of directory names, optionally sorted\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction innerReadDirNames(\n dirents: Dirent[],\n dirname: string | undefined,\n options?: ReadDirOptions | undefined,\n): string[] {\n const {\n ignore,\n includeEmpty = true,\n sort = true,\n } = { __proto__: null, ...options } as ReadDirOptions\n const path = getPath()\n const names = dirents\n .filter(\n (d: Dirent) =>\n d.isDirectory() &&\n (includeEmpty ||\n !isDirEmptySync(path.join(dirname || d.parentPath, d.name), {\n ignore,\n })),\n )\n .map((d: Dirent) => d.name)\n return sort ? names.sort(naturalCompare) : names\n}\n\n/**\n * Stringify JSON with custom formatting options.\n * Formats JSON with configurable line endings and indentation.\n *\n * @param json - Value to stringify\n * @param EOL - End-of-line sequence\n * @param finalEOL - Whether to add final newline\n * @param replacer - JSON replacer function\n * @param spaces - Indentation spaces or string\n * @returns Formatted JSON string\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction stringify(\n json: unknown,\n EOL: string,\n finalEOL: boolean,\n replacer: JsonReviver | undefined,\n spaces: number | string = 2,\n): string {\n const EOF = finalEOL ? EOL : ''\n const str = JSON.stringify(json, replacer, spaces)\n return `${str.replace(/\\n/g, EOL)}${EOF}`\n}\n\n/**\n * Find a file or directory by traversing up parent directories.\n * Searches from the starting directory upward to the filesystem root.\n * Useful for finding configuration files or project roots.\n *\n * @param name - Filename(s) to search for\n * @param options - Search options including cwd and type filters\n * @returns Normalized absolute path if found, undefined otherwise\n *\n * @example\n * ```ts\n * // Find package.json starting from current directory\n * const pkgPath = await findUp('package.json')\n *\n * // Find any of multiple config files\n * const configPath = await findUp(['.config.js', '.config.json'])\n *\n * // Find a directory instead of file\n * const nodeModules = await findUp('node_modules', { onlyDirectories: true })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function findUp(\n name: string | string[] | readonly string[],\n options?: FindUpOptions | undefined,\n): Promise<string | undefined> {\n const { cwd = process.cwd(), signal = abortSignal } = {\n __proto__: null,\n ...options,\n } as FindUpOptions\n let { onlyDirectories = false, onlyFiles = true } = {\n __proto__: null,\n ...options,\n } as FindUpOptions\n if (onlyDirectories) {\n onlyFiles = false\n }\n if (onlyFiles) {\n onlyDirectories = false\n }\n const fs = getFs()\n const path = getPath()\n let dir = path.resolve(cwd)\n const { root } = path.parse(dir)\n const names = isArray(name) ? name : [name as string]\n while (dir && dir !== root) {\n for (const n of names) {\n if (signal?.aborted) {\n return undefined\n }\n const thePath = path.join(dir, n)\n try {\n // eslint-disable-next-line no-await-in-loop\n const stats = await fs.promises.stat(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n dir = path.dirname(dir)\n }\n return undefined\n}\n\n/**\n * Synchronously find a file or directory by traversing up parent directories.\n * Searches from the starting directory upward to the filesystem root or `stopAt` directory.\n * Useful for finding configuration files or project roots in synchronous contexts.\n *\n * @param name - Filename(s) to search for\n * @param options - Search options including cwd, stopAt, and type filters\n * @returns Normalized absolute path if found, undefined otherwise\n *\n * @example\n * ```ts\n * // Find package.json starting from current directory\n * const pkgPath = findUpSync('package.json')\n *\n * // Find .git directory but stop at home directory\n * const gitPath = findUpSync('.git', {\n * onlyDirectories: true,\n * stopAt: process.env.HOME\n * })\n *\n * // Find any of multiple config files\n * const configPath = findUpSync(['.eslintrc.js', '.eslintrc.json'])\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function findUpSync(\n name: string | string[] | readonly string[],\n options?: FindUpSyncOptions | undefined,\n) {\n const { cwd = process.cwd(), stopAt } = {\n __proto__: null,\n ...options,\n } as FindUpSyncOptions\n let { onlyDirectories = false, onlyFiles = true } = {\n __proto__: null,\n ...options,\n } as FindUpSyncOptions\n if (onlyDirectories) {\n onlyFiles = false\n }\n if (onlyFiles) {\n onlyDirectories = false\n }\n const fs = getFs()\n const path = getPath()\n let dir = path.resolve(cwd)\n const { root } = path.parse(dir)\n const stopDir = stopAt ? path.resolve(stopAt) : undefined\n const names = isArray(name) ? name : [name as string]\n while (dir && dir !== root) {\n // Check if we should stop at this directory.\n if (stopDir && dir === stopDir) {\n // Check current directory but don't go up.\n for (const n of names) {\n const thePath = path.join(dir, n)\n try {\n const stats = fs.statSync(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n return undefined\n }\n for (const n of names) {\n const thePath = path.join(dir, n)\n try {\n const stats = fs.statSync(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n dir = path.dirname(dir)\n }\n return undefined\n}\n\n/**\n * Check if a path is a directory asynchronously.\n * Returns `true` for directories, `false` for files or non-existent paths.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a directory, `false` otherwise\n *\n * @example\n * ```ts\n * if (await isDir('./src')) {\n * console.log('src is a directory')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function isDir(filepath: PathLike) {\n return !!(await safeStats(filepath))?.isDirectory()\n}\n\n/**\n * Check if a path is a directory synchronously.\n * Returns `true` for directories, `false` for files or non-existent paths.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a directory, `false` otherwise\n *\n * @example\n * ```ts\n * if (isDirSync('./src')) {\n * console.log('src is a directory')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isDirSync(filepath: PathLike) {\n return !!safeStatsSync(filepath)?.isDirectory()\n}\n\n/**\n * Check if a directory is empty synchronously.\n * A directory is considered empty if it contains no files after applying ignore patterns.\n * Uses glob patterns to filter ignored files.\n *\n * @param dirname - Directory path to check\n * @param options - Options including ignore patterns\n * @returns `true` if directory is empty (or doesn't exist), `false` otherwise\n *\n * @example\n * ```ts\n * // Check if directory is completely empty\n * isDirEmptySync('./build')\n *\n * // Check if directory is empty, ignoring .DS_Store files\n * isDirEmptySync('./cache', { ignore: ['.DS_Store'] })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isDirEmptySync(\n dirname: PathLike,\n options?: IsDirEmptyOptions | undefined,\n) {\n const { ignore = defaultIgnore } = {\n __proto__: null,\n ...options,\n } as IsDirEmptyOptions\n const fs = getFs()\n try {\n const files = fs.readdirSync(dirname)\n const { length } = files\n if (length === 0) {\n return true\n }\n const matcher = getGlobMatcher(\n ignore as string[],\n {\n cwd: pathLikeToString(dirname),\n } as { cwd?: string; dot?: boolean; ignore?: string[]; nocase?: boolean },\n )\n let ignoredCount = 0\n for (let i = 0; i < length; i += 1) {\n const file = files[i]\n if (file && matcher(file)) {\n ignoredCount += 1\n }\n }\n return ignoredCount === length\n } catch {\n // Return false for non-existent paths or other errors.\n return false\n }\n}\n\n/**\n * Check if a path is a symbolic link synchronously.\n * Uses `lstat` to check the link itself, not the target.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a symbolic link, `false` otherwise\n *\n * @example\n * ```ts\n * if (isSymLinkSync('./my-link')) {\n * console.log('Path is a symbolic link')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isSymLinkSync(filepath: PathLike) {\n const fs = getFs()\n try {\n return fs.lstatSync(filepath).isSymbolicLink()\n } catch {}\n return false\n}\n\n/**\n * Result of file readability validation.\n * Contains lists of valid and invalid file paths.\n */\nexport interface ValidateFilesResult {\n /**\n * File paths that passed validation and are readable.\n */\n validPaths: string[]\n /**\n * File paths that failed validation (unreadable, permission denied, or non-existent).\n * Common with Yarn Berry PnP virtual filesystem, pnpm symlinks, or filesystem race conditions.\n */\n invalidPaths: string[]\n}\n\n/**\n * Validate that file paths are readable before processing.\n * Filters out files from glob results that cannot be accessed (common with\n * Yarn Berry PnP virtual filesystem, pnpm content-addressable store symlinks,\n * or filesystem race conditions in CI/CD environments).\n *\n * This defensive pattern prevents ENOENT errors when files exist in glob\n * results but are not accessible via standard filesystem operations.\n *\n * @param filepaths - Array of file paths to validate\n * @returns Object with `validPaths` (readable) and `invalidPaths` (unreadable)\n *\n * @example\n * ```ts\n * import { validateFiles } from '@socketsecurity/lib/fs'\n *\n * const files = ['package.json', '.pnp.cjs/virtual-file.json']\n * const { validPaths, invalidPaths } = validateFiles(files)\n *\n * console.log(`Valid: ${validPaths.length}`)\n * console.log(`Invalid: ${invalidPaths.length}`)\n * ```\n *\n * @example\n * ```ts\n * // Typical usage in Socket CLI commands\n * const packagePaths = await getPackageFilesForScan(targets)\n * const { validPaths } = validateFiles(packagePaths)\n * await sdk.uploadManifestFiles(orgSlug, validPaths)\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function validateFiles(\n filepaths: string[] | readonly string[],\n): ValidateFilesResult {\n const fs = getFs()\n const validPaths: string[] = []\n const invalidPaths: string[] = []\n const { R_OK } = fs.constants\n\n for (const filepath of filepaths) {\n try {\n fs.accessSync(filepath, R_OK)\n validPaths.push(filepath)\n } catch {\n invalidPaths.push(filepath)\n }\n }\n\n return { __proto__: null, validPaths, invalidPaths } as ValidateFilesResult\n}\n\n/**\n * Read directory names asynchronously with filtering and sorting.\n * Returns only directory names (not files), with optional filtering for empty directories\n * and glob-based ignore patterns. Results are naturally sorted by default.\n *\n * @param dirname - Directory path to read\n * @param options - Options for filtering and sorting\n * @returns Array of directory names, empty array on error\n *\n * @example\n * ```ts\n * // Get all subdirectories, sorted naturally\n * const dirs = await readDirNames('./packages')\n *\n * // Get non-empty directories only\n * const nonEmpty = await readDirNames('./cache', { includeEmpty: false })\n *\n * // Get directories without sorting\n * const unsorted = await readDirNames('./src', { sort: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readDirNames(\n dirname: PathLike,\n options?: ReadDirOptions | undefined,\n) {\n const fs = getFs()\n try {\n return innerReadDirNames(\n await fs.promises.readdir(dirname, {\n __proto__: null,\n encoding: 'utf8',\n withFileTypes: true,\n } as ObjectEncodingOptions & { withFileTypes: true }),\n String(dirname),\n options,\n )\n } catch {}\n return []\n}\n\n/**\n * Read directory names synchronously with filtering and sorting.\n * Returns only directory names (not files), with optional filtering for empty directories\n * and glob-based ignore patterns. Results are naturally sorted by default.\n *\n * @param dirname - Directory path to read\n * @param options - Options for filtering and sorting\n * @returns Array of directory names, empty array on error\n *\n * @example\n * ```ts\n * // Get all subdirectories, sorted naturally\n * const dirs = readDirNamesSync('./packages')\n *\n * // Get non-empty directories only, ignoring node_modules\n * const nonEmpty = readDirNamesSync('./src', {\n * includeEmpty: false,\n * ignore: ['node_modules']\n * })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readDirNamesSync(dirname: PathLike, options?: ReadDirOptions) {\n const fs = getFs()\n try {\n return innerReadDirNames(\n fs.readdirSync(dirname, {\n __proto__: null,\n encoding: 'utf8',\n withFileTypes: true,\n } as ObjectEncodingOptions & { withFileTypes: true }),\n String(dirname),\n options,\n )\n } catch {}\n return []\n}\n\n/**\n * Read a file as binary data asynchronously.\n * Returns a Buffer without encoding the contents.\n * Useful for reading images, archives, or other binary formats.\n *\n * @param filepath - Path to file\n * @param options - Read options (encoding is forced to null for binary)\n * @returns Promise resolving to Buffer containing file contents\n *\n * @example\n * ```ts\n * // Read an image file\n * const imageBuffer = await readFileBinary('./image.png')\n *\n * // Read with abort signal\n * const buffer = await readFileBinary('./data.bin', { signal: abortSignal })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readFileBinary(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n // Don't specify encoding to get a Buffer.\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n encoding: null,\n })\n}\n\n/**\n * Read a file as UTF-8 text asynchronously.\n * Returns a string with the file contents decoded as UTF-8.\n * This is the most common way to read text files.\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and abort signal\n * @returns Promise resolving to string containing file contents\n *\n * @example\n * ```ts\n * // Read a text file\n * const content = await readFileUtf8('./README.md')\n *\n * // Read with custom encoding\n * const content = await readFileUtf8('./data.txt', { encoding: 'utf-8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readFileUtf8(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n encoding: 'utf8',\n })\n}\n\n/**\n * Read a file as binary data synchronously.\n * Returns a Buffer without encoding the contents.\n * Useful for reading images, archives, or other binary formats.\n *\n * @param filepath - Path to file\n * @param options - Read options (encoding is forced to null for binary)\n * @returns Buffer containing file contents\n *\n * @example\n * ```ts\n * // Read an image file\n * const imageBuffer = readFileBinarySync('./logo.png')\n *\n * // Read a compressed file\n * const gzipData = readFileBinarySync('./archive.gz')\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readFileBinarySync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n // Don't specify encoding to get a Buffer\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return fs.readFileSync(filepath, {\n ...opts,\n encoding: null,\n } as ObjectEncodingOptions)\n}\n\n/**\n * Read a file as UTF-8 text synchronously.\n * Returns a string with the file contents decoded as UTF-8.\n * This is the most common way to read text files synchronously.\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding\n * @returns String containing file contents\n *\n * @example\n * ```ts\n * // Read a configuration file\n * const config = readFileUtf8Sync('./config.txt')\n *\n * // Read with custom options\n * const data = readFileUtf8Sync('./data.txt', { encoding: 'utf8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readFileUtf8Sync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return fs.readFileSync(filepath, {\n ...opts,\n encoding: 'utf8',\n } as ObjectEncodingOptions)\n}\n\n/**\n * Read and parse a JSON file asynchronously.\n * Reads the file as UTF-8 text and parses it as JSON.\n * Optionally accepts a reviver function to transform parsed values.\n *\n * @param filepath - Path to JSON file\n * @param options - Read and parse options\n * @returns Promise resolving to parsed JSON value, or undefined if throws is false and an error occurs\n *\n * @example\n * ```ts\n * // Read and parse package.json\n * const pkg = await readJson('./package.json')\n *\n * // Read JSON with custom reviver\n * const data = await readJson('./data.json', {\n * reviver: (key, value) => {\n * if (key === 'date') return new Date(value)\n * return value\n * }\n * })\n *\n * // Don't throw on parse errors\n * const config = await readJson('./config.json', { throws: false })\n * if (config === undefined) {\n * console.log('Failed to parse config')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readJson(\n filepath: PathLike,\n options?: ReadJsonOptions | string | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { reviver, throws, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as unknown as ReadJsonOptions\n const shouldThrow = throws === undefined || !!throws\n const fs = getFs()\n let content = ''\n try {\n content = await fs.promises.readFile(filepath, {\n __proto__: null,\n encoding: 'utf8',\n ...fsOptions,\n } as unknown as Parameters<typeof fs.promises.readFile>[1] & {\n encoding: string\n })\n } catch (e) {\n if (shouldThrow) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'ENOENT') {\n throw new Error(\n `JSON file not found: ${filepath}\\n` +\n 'Ensure the file exists or create it with the expected structure.',\n { cause: e },\n )\n }\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied reading JSON file: ${filepath}\\n` +\n 'Check file permissions or run with appropriate access.',\n { cause: e },\n )\n }\n throw e\n }\n return undefined\n }\n return jsonParse(content, {\n filepath: String(filepath),\n reviver,\n throws: shouldThrow,\n })\n}\n\n/**\n * Read and parse a JSON file synchronously.\n * Reads the file as UTF-8 text and parses it as JSON.\n * Optionally accepts a reviver function to transform parsed values.\n *\n * @param filepath - Path to JSON file\n * @param options - Read and parse options\n * @returns Parsed JSON value, or undefined if throws is false and an error occurs\n *\n * @example\n * ```ts\n * // Read and parse tsconfig.json\n * const tsconfig = readJsonSync('./tsconfig.json')\n *\n * // Read JSON with custom reviver\n * const data = readJsonSync('./data.json', {\n * reviver: (key, value) => {\n * if (typeof value === 'string' && /^\\d{4}-\\d{2}-\\d{2}/.test(value)) {\n * return new Date(value)\n * }\n * return value\n * }\n * })\n *\n * // Don't throw on parse errors\n * const config = readJsonSync('./config.json', { throws: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readJsonSync(\n filepath: PathLike,\n options?: ReadJsonOptions | string | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { reviver, throws, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as unknown as ReadJsonOptions\n const shouldThrow = throws === undefined || !!throws\n const fs = getFs()\n let content = ''\n try {\n content = fs.readFileSync(filepath, {\n __proto__: null,\n encoding: 'utf8',\n ...fsOptions,\n } as unknown as Parameters<typeof fs.readFileSync>[1] & {\n encoding: string\n })\n } catch (e) {\n if (shouldThrow) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'ENOENT') {\n throw new Error(\n `JSON file not found: ${filepath}\\n` +\n 'Ensure the file exists or create it with the expected structure.',\n { cause: e },\n )\n }\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied reading JSON file: ${filepath}\\n` +\n 'Check file permissions or run with appropriate access.',\n { cause: e },\n )\n }\n throw e\n }\n return undefined\n }\n return jsonParse(content, {\n filepath: String(filepath),\n reviver,\n throws: shouldThrow,\n })\n}\n\n// Cache for resolved allowed directories\nlet _cachedAllowedDirs: string[] | undefined\n\n/**\n * Get resolved allowed directories for safe deletion with lazy caching.\n * These directories are resolved once and cached for the process lifetime.\n */\nfunction getAllowedDirectories(): string[] {\n if (_cachedAllowedDirs === undefined) {\n const path = getPath()\n const {\n getOsTmpDir,\n getSocketCacacheDir,\n getSocketUserDir,\n } = /*@__PURE__*/ require('#lib/paths')\n\n _cachedAllowedDirs = [\n path.resolve(getOsTmpDir()),\n path.resolve(getSocketCacacheDir()),\n path.resolve(getSocketUserDir()),\n ]\n }\n return _cachedAllowedDirs\n}\n\n/**\n * Invalidate the cached allowed directories.\n * Called automatically by the paths/rewire module when paths are overridden in tests.\n *\n * @internal Used for test rewiring\n */\nexport function invalidatePathCache(): void {\n _cachedAllowedDirs = undefined\n}\n\n// Register cache invalidation with the rewire module\nregisterCacheInvalidation(invalidatePathCache)\n\n/**\n * Safely delete a file or directory asynchronously with built-in protections.\n * Uses `del` for safer deletion that prevents removing cwd and above by default.\n * Automatically uses force: true for temp directory, cacache, and ~/.socket subdirectories.\n *\n * @param filepath - Path or array of paths to delete (supports glob patterns)\n * @param options - Deletion options including force, retries, and recursion\n * @throws {Error} When attempting to delete protected paths without force option\n *\n * @example\n * ```ts\n * // Delete a single file\n * await safeDelete('./temp-file.txt')\n *\n * // Delete a directory recursively\n * await safeDelete('./build', { recursive: true })\n *\n * // Delete multiple paths\n * await safeDelete(['./dist', './coverage'])\n *\n * // Delete with custom retry settings\n * await safeDelete('./flaky-dir', { maxRetries: 5, retryDelay: 500 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeDelete(\n filepath: PathLike | PathLike[],\n options?: RemoveOptions | undefined,\n) {\n const del = /*@__PURE__*/ require('./external/del')\n const { deleteAsync } = del\n const opts = { __proto__: null, ...options } as RemoveOptions\n const patterns = isArray(filepath)\n ? filepath.map(pathLikeToString)\n : [pathLikeToString(filepath)]\n\n // Check if we're deleting within allowed directories.\n let shouldForce = opts.force !== false\n if (!shouldForce && patterns.length > 0) {\n const path = getPath()\n const allowedDirs = getAllowedDirectories()\n\n // Check if all patterns are within allowed directories.\n const allInAllowedDirs = patterns.every(pattern => {\n const resolvedPath = path.resolve(pattern)\n\n // Check each allowed directory\n for (const allowedDir of allowedDirs) {\n const isInAllowedDir =\n resolvedPath.startsWith(allowedDir + path.sep) ||\n resolvedPath === allowedDir\n const relativePath = path.relative(allowedDir, resolvedPath)\n const isGoingBackward = relativePath.startsWith('..')\n\n if (isInAllowedDir && !isGoingBackward) {\n return true\n }\n }\n\n return false\n })\n\n if (allInAllowedDirs) {\n shouldForce = true\n }\n }\n\n await deleteAsync(patterns, {\n concurrency: opts.maxRetries || defaultRemoveOptions.maxRetries,\n dryRun: false,\n force: shouldForce,\n onlyFiles: false,\n })\n}\n\n/**\n * Safely delete a file or directory synchronously with built-in protections.\n * Uses `del` for safer deletion that prevents removing cwd and above by default.\n * Automatically uses force: true for temp directory, cacache, and ~/.socket subdirectories.\n *\n * @param filepath - Path or array of paths to delete (supports glob patterns)\n * @param options - Deletion options including force, retries, and recursion\n * @throws {Error} When attempting to delete protected paths without force option\n *\n * @example\n * ```ts\n * // Delete a single file\n * safeDeleteSync('./temp-file.txt')\n *\n * // Delete a directory recursively\n * safeDeleteSync('./build', { recursive: true })\n *\n * // Delete multiple paths with globs\n * safeDeleteSync(['./dist/**', './coverage/**'])\n *\n * // Force delete a protected path (use with caution)\n * safeDeleteSync('./important', { force: true })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeDeleteSync(\n filepath: PathLike | PathLike[],\n options?: RemoveOptions | undefined,\n) {\n const del = /*@__PURE__*/ require('./external/del')\n const { deleteSync } = del\n const opts = { __proto__: null, ...options } as RemoveOptions\n const patterns = isArray(filepath)\n ? filepath.map(pathLikeToString)\n : [pathLikeToString(filepath)]\n\n // Check if we're deleting within allowed directories.\n let shouldForce = opts.force !== false\n if (!shouldForce && patterns.length > 0) {\n const path = getPath()\n const allowedDirs = getAllowedDirectories()\n\n // Check if all patterns are within allowed directories.\n const allInAllowedDirs = patterns.every(pattern => {\n const resolvedPath = path.resolve(pattern)\n\n // Check each allowed directory\n for (const allowedDir of allowedDirs) {\n const isInAllowedDir =\n resolvedPath.startsWith(allowedDir + path.sep) ||\n resolvedPath === allowedDir\n const relativePath = path.relative(allowedDir, resolvedPath)\n const isGoingBackward = relativePath.startsWith('..')\n\n if (isInAllowedDir && !isGoingBackward) {\n return true\n }\n }\n\n return false\n })\n\n if (allInAllowedDirs) {\n shouldForce = true\n }\n }\n\n deleteSync(patterns, {\n concurrency: opts.maxRetries || defaultRemoveOptions.maxRetries,\n dryRun: false,\n force: shouldForce,\n onlyFiles: false,\n })\n}\n\n/**\n * Safely create a directory asynchronously, ignoring EEXIST errors.\n * This function wraps fs.promises.mkdir and handles the race condition where\n * the directory might already exist, which is common in concurrent code.\n *\n * Unlike fs.promises.mkdir with recursive:true, this function:\n * - Silently ignores EEXIST errors (directory already exists)\n * - Re-throws all other errors (permissions, invalid path, etc.)\n * - Works reliably in multi-process/concurrent scenarios\n *\n * @param path - Directory path to create\n * @param options - Options including recursive and mode settings\n * @returns Promise that resolves when directory is created or already exists\n *\n * @example\n * ```ts\n * // Create a directory, no error if it exists\n * await safeMkdir('./config')\n *\n * // Create nested directories\n * await safeMkdir('./data/cache/temp', { recursive: true })\n *\n * // Create with specific permissions\n * await safeMkdir('./secure', { mode: 0o700 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeMkdir(\n path: PathLike,\n options?: MakeDirectoryOptions | undefined,\n): Promise<void> {\n const fs = getFs()\n try {\n await fs.promises.mkdir(path, options)\n } catch (e: unknown) {\n // Ignore EEXIST error - directory already exists.\n if (\n typeof e === 'object' &&\n e !== null &&\n 'code' in e &&\n e.code !== 'EEXIST'\n ) {\n throw e\n }\n }\n}\n\n/**\n * Safely create a directory synchronously, ignoring EEXIST errors.\n * This function wraps fs.mkdirSync and handles the race condition where\n * the directory might already exist, which is common in concurrent code.\n *\n * Unlike fs.mkdirSync with recursive:true, this function:\n * - Silently ignores EEXIST errors (directory already exists)\n * - Re-throws all other errors (permissions, invalid path, etc.)\n * - Works reliably in multi-process/concurrent scenarios\n *\n * @param path - Directory path to create\n * @param options - Options including recursive and mode settings\n *\n * @example\n * ```ts\n * // Create a directory, no error if it exists\n * safeMkdirSync('./config')\n *\n * // Create nested directories\n * safeMkdirSync('./data/cache/temp', { recursive: true })\n *\n * // Create with specific permissions\n * safeMkdirSync('./secure', { mode: 0o700 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeMkdirSync(\n path: PathLike,\n options?: MakeDirectoryOptions | undefined,\n): void {\n const fs = getFs()\n try {\n fs.mkdirSync(path, options)\n } catch (e: unknown) {\n // Ignore EEXIST error - directory already exists.\n if (\n typeof e === 'object' &&\n e !== null &&\n 'code' in e &&\n e.code !== 'EEXIST'\n ) {\n throw e\n }\n }\n}\n\n/**\n * Safely read a file asynchronously, returning undefined on error.\n * Useful when you want to attempt reading a file without handling errors explicitly.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and default value\n * @returns Promise resolving to file contents, or undefined on error\n *\n * @example\n * ```ts\n * // Try to read a file, get undefined if it doesn't exist\n * const content = await safeReadFile('./optional-config.txt')\n * if (content) {\n * console.log('Config found:', content)\n * }\n *\n * // Read with specific encoding\n * const data = await safeReadFile('./data.txt', { encoding: 'utf8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeReadFile(\n filepath: PathLike,\n options?: SafeReadOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n } as Abortable)\n } catch {}\n return undefined\n}\n\n/**\n * Safely read a file synchronously, returning undefined on error.\n * Useful when you want to attempt reading a file without handling errors explicitly.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and default value\n * @returns File contents, or undefined on error\n *\n * @example\n * ```ts\n * // Try to read a config file\n * const config = safeReadFileSync('./config.txt')\n * if (config) {\n * console.log('Config loaded successfully')\n * }\n *\n * // Read binary file safely\n * const buffer = safeReadFileSync('./image.png', { encoding: null })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeReadFileSync(\n filepath: PathLike,\n options?: SafeReadOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return fs.readFileSync(filepath, {\n __proto__: null,\n ...opts,\n } as ObjectEncodingOptions)\n } catch {}\n return undefined\n}\n\n/**\n * Safely get file stats asynchronously, returning undefined on error.\n * Useful for checking file existence and properties without error handling.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to check\n * @returns Promise resolving to Stats object, or undefined on error\n *\n * @example\n * ```ts\n * // Check if file exists and get its stats\n * const stats = await safeStats('./file.txt')\n * if (stats) {\n * console.log('File size:', stats.size)\n * console.log('Modified:', stats.mtime)\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeStats(filepath: PathLike) {\n const fs = getFs()\n try {\n return await fs.promises.stat(filepath)\n } catch {}\n return undefined\n}\n\n/**\n * Safely get file stats synchronously, returning undefined on error.\n * Useful for checking file existence and properties without error handling.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to check\n * @param options - Read options (currently unused but kept for API consistency)\n * @returns Stats object, or undefined on error\n *\n * @example\n * ```ts\n * // Check if file exists and get its size\n * const stats = safeStatsSync('./file.txt')\n * if (stats) {\n * console.log('File size:', stats.size)\n * console.log('Is directory:', stats.isDirectory())\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeStatsSync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return fs.statSync(filepath, {\n __proto__: null,\n throwIfNoEntry: false,\n ...opts,\n } as StatSyncOptions)\n } catch {}\n return undefined\n}\n\n/**\n * Generate a unique filepath by adding number suffix if the path exists.\n * Appends `-1`, `-2`, etc. before the file extension until a non-existent path is found.\n * Useful for creating files without overwriting existing ones.\n *\n * @param filepath - Desired file path\n * @returns Normalized unique filepath (original if it doesn't exist, or with number suffix)\n *\n * @example\n * ```ts\n * // If 'report.pdf' exists, returns 'report-1.pdf'\n * const uniquePath = uniqueSync('./report.pdf')\n *\n * // If 'data.json' and 'data-1.json' exist, returns 'data-2.json'\n * const path = uniqueSync('./data.json')\n *\n * // If 'backup' doesn't exist, returns 'backup' unchanged\n * const backupPath = uniqueSync('./backup')\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function uniqueSync(filepath: PathLike): string {\n const fs = getFs()\n const path = getPath()\n const filepathStr = String(filepath)\n\n // If the file doesn't exist, return as is\n if (!fs.existsSync(filepathStr)) {\n return normalizePath(filepathStr)\n }\n\n const dirname = path.dirname(filepathStr)\n const ext = path.extname(filepathStr)\n const basename = path.basename(filepathStr, ext)\n\n let counter = 1\n let uniquePath: string\n do {\n uniquePath = path.join(dirname, `${basename}-${counter}${ext}`)\n counter++\n } while (fs.existsSync(uniquePath))\n\n return normalizePath(uniquePath)\n}\n\n/**\n * Write JSON content to a file asynchronously with formatting.\n * Stringifies the value with configurable indentation and line endings.\n * Automatically adds a final newline by default for POSIX compliance.\n *\n * @param filepath - Path to write to\n * @param jsonContent - Value to stringify and write\n * @param options - Write options including formatting and encoding\n * @returns Promise that resolves when write completes\n *\n * @example\n * ```ts\n * // Write formatted JSON with default 2-space indentation\n * await writeJson('./data.json', { name: 'example', version: '1.0.0' })\n *\n * // Write with custom indentation\n * await writeJson('./config.json', config, { spaces: 4 })\n *\n * // Write with tabs instead of spaces\n * await writeJson('./data.json', data, { spaces: '\\t' })\n *\n * // Write without final newline\n * await writeJson('./inline.json', obj, { finalEOL: false })\n *\n * // Write with Windows line endings\n * await writeJson('./win.json', data, { EOL: '\\r\\n' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function writeJson(\n filepath: PathLike,\n jsonContent: unknown,\n options?: WriteJsonOptions | string,\n): Promise<void> {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { EOL, finalEOL, replacer, spaces, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as WriteJsonOptions\n const fs = getFs()\n const jsonString = stringify(\n jsonContent,\n EOL || '\\n',\n finalEOL !== undefined ? finalEOL : true,\n replacer,\n spaces,\n )\n await fs.promises.writeFile(filepath, jsonString, {\n encoding: 'utf8',\n ...fsOptions,\n __proto__: null,\n } as ObjectEncodingOptions)\n}\n\n/**\n * Write JSON content to a file synchronously with formatting.\n * Stringifies the value with configurable indentation and line endings.\n * Automatically adds a final newline by default for POSIX compliance.\n *\n * @param filepath - Path to write to\n * @param jsonContent - Value to stringify and write\n * @param options - Write options including formatting and encoding\n *\n * @example\n * ```ts\n * // Write formatted JSON with default 2-space indentation\n * writeJsonSync('./package.json', pkg)\n *\n * // Write with custom indentation\n * writeJsonSync('./tsconfig.json', tsconfig, { spaces: 4 })\n *\n * // Write with tabs for indentation\n * writeJsonSync('./data.json', data, { spaces: '\\t' })\n *\n * // Write compacted (no indentation)\n * writeJsonSync('./compact.json', data, { spaces: 0 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function writeJsonSync(\n filepath: PathLike,\n jsonContent: unknown,\n options?: WriteJsonOptions | string | undefined,\n): void {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { EOL, finalEOL, replacer, spaces, ...fsOptions } = {\n __proto__: null,\n ...opts,\n }\n const fs = getFs()\n const jsonString = stringify(\n jsonContent,\n EOL || '\\n',\n finalEOL !== undefined ? finalEOL : true,\n replacer,\n spaces,\n )\n fs.writeFileSync(filepath, jsonString, {\n encoding: 'utf8',\n ...fsOptions,\n __proto__: null,\n } as WriteFileOptions)\n}\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,qBAA+B;AAE/B,oBAAwB;AAIxB,mBAA8C;AAE9C,kBAA0B;AAC1B,qBAAyC;AACzC,kBAAgD;AAChD,oBAA0C;AAC1C,mBAA+B;AAR/B,MAAM,kBAAc,+BAAe;AA6QnC,MAAM,2BAAuB,6BAAa;AAAA,EACxC,WAAW;AAAA,EACX,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,YAAY;AACd,CAAC;AAED,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAErB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AASJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAAA;AAcA,SAAS,kBACP,SACA,SACA,SACU;AACV,QAAM;AAAA,IACJ;AAAA,IACA,eAAe;AAAA,IACf,OAAO;AAAA,EACT,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAClC,QAAM,OAAO,wBAAQ;AACrB,QAAM,QAAQ,QACX;AAAA,IACC,CAAC,MACC,EAAE,YAAY,MACb,gBACC,CAAC,+BAAe,KAAK,KAAK,WAAW,EAAE,YAAY,EAAE,IAAI,GAAG;AAAA,MAC1D;AAAA,IACF,CAAC;AAAA,EACP,EACC,IAAI,CAAC,MAAc,EAAE,IAAI;AAC5B,SAAO,OAAO,MAAM,KAAK,2BAAc,IAAI;AAC7C;AAAA;AAeA,SAAS,UACP,MACA,KACA,UACA,UACA,SAA0B,GAClB;AACR,QAAM,MAAM,WAAW,MAAM;AAC7B,QAAM,MAAM,KAAK,UAAU,MAAM,UAAU,MAAM;AACjD,SAAO,GAAG,IAAI,QAAQ,OAAO,GAAG,CAAC,GAAG,GAAG;AACzC;AAAA;AAwBA,eAAsB,OACpB,MACA,SAC6B;AAC7B,QAAM,EAAE,MAAM,QAAQ,IAAI,GAAG,SAAS,YAAY,IAAI;AAAA,IACpD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,MAAI,EAAE,kBAAkB,OAAO,YAAY,KAAK,IAAI;AAAA,IAClD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,MAAI,iBAAiB;AACnB,gBAAY;AAAA,EACd;AACA,MAAI,WAAW;AACb,sBAAkB;AAAA,EACpB;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,wBAAQ;AACrB,MAAI,MAAM,KAAK,QAAQ,GAAG;AAC1B,QAAM,EAAE,KAAK,IAAI,KAAK,MAAM,GAAG;AAC/B,QAAM,YAAQ,uBAAQ,IAAI,IAAI,OAAO,CAAC,IAAc;AACpD,SAAO,OAAO,QAAQ,MAAM;AAC1B,eAAW,KAAK,OAAO;AACrB,UAAI,QAAQ,SAAS;AACnB,eAAO;AAAA,MACT;AACA,YAAM,UAAU,KAAK,KAAK,KAAK,CAAC;AAChC,UAAI;AAEF,cAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,OAAO;AAC5C,YAAI,CAAC,mBAAmB,MAAM,OAAO,GAAG;AACtC,qBAAO,2BAAc,OAAO;AAAA,QAC9B;AACA,YAAI,CAAC,aAAa,MAAM,YAAY,GAAG;AACrC,qBAAO,2BAAc,OAAO;AAAA,QAC9B;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AACA,UAAM,KAAK,QAAQ,GAAG;AAAA,EACxB;AACA,SAAO;AACT;AAAA;AA2BO,SAAS,WACd,MACA,SACA;AACA,QAAM,EAAE,MAAM,QAAQ,IAAI,GAAG,OAAO,IAAI;AAAA,IACtC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,MAAI,EAAE,kBAAkB,OAAO,YAAY,KAAK,IAAI;AAAA,IAClD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,MAAI,iBAAiB;AACnB,gBAAY;AAAA,EACd;AACA,MAAI,WAAW;AACb,sBAAkB;AAAA,EACpB;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,wBAAQ;AACrB,MAAI,MAAM,KAAK,QAAQ,GAAG;AAC1B,QAAM,EAAE,KAAK,IAAI,KAAK,MAAM,GAAG;AAC/B,QAAM,UAAU,SAAS,KAAK,QAAQ,MAAM,IAAI;AAChD,QAAM,YAAQ,uBAAQ,IAAI,IAAI,OAAO,CAAC,IAAc;AACpD,SAAO,OAAO,QAAQ,MAAM;AAE1B,QAAI,WAAW,QAAQ,SAAS;AAE9B,iBAAW,KAAK,OAAO;AACrB,cAAM,UAAU,KAAK,KAAK,KAAK,CAAC;AAChC,YAAI;AACF,gBAAM,QAAQ,GAAG,SAAS,OAAO;AACjC,cAAI,CAAC,mBAAmB,MAAM,OAAO,GAAG;AACtC,uBAAO,2BAAc,OAAO;AAAA,UAC9B;AACA,cAAI,CAAC,aAAa,MAAM,YAAY,GAAG;AACrC,uBAAO,2BAAc,OAAO;AAAA,UAC9B;AAAA,QACF,QAAQ;AAAA,QAAC;AAAA,MACX;AACA,aAAO;AAAA,IACT;AACA,eAAW,KAAK,OAAO;AACrB,YAAM,UAAU,KAAK,KAAK,KAAK,CAAC;AAChC,UAAI;AACF,cAAM,QAAQ,GAAG,SAAS,OAAO;AACjC,YAAI,CAAC,mBAAmB,MAAM,OAAO,GAAG;AACtC,qBAAO,2BAAc,OAAO;AAAA,QAC9B;AACA,YAAI,CAAC,aAAa,MAAM,YAAY,GAAG;AACrC,qBAAO,2BAAc,OAAO;AAAA,QAC9B;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AACA,UAAM,KAAK,QAAQ,GAAG;AAAA,EACxB;AACA,SAAO;AACT;AAAA;AAiBA,eAAsB,MAAM,UAAoB;AAC9C,SAAO,CAAC,EAAE,MAAM,0BAAU,QAAQ,IAAI,YAAY;AACpD;AAAA;AAiBO,SAAS,UAAU,UAAoB;AAC5C,SAAO,CAAC,EAAC,8BAAc,QAAQ,IAAG,YAAY;AAChD;AAAA;AAqBO,SAAS,eACd,SACA,SACA;AACA,QAAM,EAAE,SAAS,2BAAc,IAAI;AAAA,IACjC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,QAAQ,GAAG,YAAY,OAAO;AACpC,UAAM,EAAE,OAAO,IAAI;AACnB,QAAI,WAAW,GAAG;AAChB,aAAO;AAAA,IACT;AACA,UAAM,cAAU;AAAA,MACd;AAAA,MACA;AAAA,QACE,SAAK,8BAAiB,OAAO;AAAA,MAC/B;AAAA,IACF;AACA,QAAI,eAAe;AACnB,aAAS,IAAI,GAAG,IAAI,QAAQ,KAAK,GAAG;AAClC,YAAM,OAAO,MAAM,CAAC;AACpB,UAAI,QAAQ,QAAQ,IAAI,GAAG;AACzB,wBAAgB;AAAA,MAClB;AAAA,IACF;AACA,WAAO,iBAAiB;AAAA,EAC1B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAAA;AAiBO,SAAS,cAAc,UAAoB;AAChD,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,GAAG,UAAU,QAAQ,EAAE,eAAe;AAAA,EAC/C,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAkDO,SAAS,cACd,WACqB;AACrB,QAAM,KAAK,sBAAM;AACjB,QAAM,aAAuB,CAAC;AAC9B,QAAM,eAAyB,CAAC;AAChC,QAAM,EAAE,KAAK,IAAI,GAAG;AAEpB,aAAW,YAAY,WAAW;AAChC,QAAI;AACF,SAAG,WAAW,UAAU,IAAI;AAC5B,iBAAW,KAAK,QAAQ;AAAA,IAC1B,QAAQ;AACN,mBAAa,KAAK,QAAQ;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO,EAAE,WAAW,MAAM,YAAY,aAAa;AACrD;AAAA;AAwBA,eAAsB,aACpB,SACA,SACA;AACA,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO;AAAA,MACL,MAAM,GAAG,SAAS,QAAQ,SAAS;AAAA,QACjC,WAAW;AAAA,QACX,UAAU;AAAA,QACV,eAAe;AAAA,MACjB,CAAoD;AAAA,MACpD,OAAO,OAAO;AAAA,MACd;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAAC;AACT,SAAO,CAAC;AACV;AAAA;AAwBO,SAAS,iBAAiB,SAAmB,SAA0B;AAC5E,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO;AAAA,MACL,GAAG,YAAY,SAAS;AAAA,QACtB,WAAW;AAAA,QACX,UAAU;AAAA,QACV,eAAe;AAAA,MACjB,CAAoD;AAAA,MACpD,OAAO,OAAO;AAAA,MACd;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAAC;AACT,SAAO,CAAC;AACV;AAAA;AAqBA,eAAsB,eACpB,UACA,SACA;AAEA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,SAAO,MAAM,GAAG,SAAS,SAAS,UAAU;AAAA,IAC1C,QAAQ;AAAA,IACR,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAAC;AACH;AAAA;AAqBA,eAAsB,aACpB,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,SAAO,MAAM,GAAG,SAAS,SAAS,UAAU;AAAA,IAC1C,QAAQ;AAAA,IACR,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAAC;AACH;AAAA;AAqBO,SAAS,mBACd,UACA,SACA;AAEA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,SAAO,GAAG,aAAa,UAAU;AAAA,IAC/B,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAA0B;AAC5B;AAAA;AAqBO,SAAS,iBACd,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,SAAO,GAAG,aAAa,UAAU;AAAA,IAC/B,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAA0B;AAC5B;AAAA;AAgCA,eAAsB,SACpB,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,EAAE,SAAS,QAAQ,GAAG,UAAU,IAAI;AAAA,IACxC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,cAAc,WAAW,UAAa,CAAC,CAAC;AAC9C,QAAM,KAAK,sBAAM;AACjB,MAAI,UAAU;AACd,MAAI;AACF,cAAU,MAAM,GAAG,SAAS,SAAS,UAAU;AAAA,MAC7C,WAAW;AAAA,MACX,UAAU;AAAA,MACV,GAAG;AAAA,IACL,CAEC;AAAA,EACH,SAAS,GAAG;AACV,QAAI,aAAa;AACf,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,UAAU;AACrB,cAAM,IAAI;AAAA,UACR,wBAAwB,QAAQ;AAAA;AAAA,UAEhC,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,wCAAwC,QAAQ;AAAA;AAAA,UAEhD,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM;AAAA,IACR;AACA,WAAO;AAAA,EACT;AACA,aAAO,uBAAU,SAAS;AAAA,IACxB,UAAU,OAAO,QAAQ;AAAA,IACzB;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AACH;AAAA;AA+BO,SAAS,aACd,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,EAAE,SAAS,QAAQ,GAAG,UAAU,IAAI;AAAA,IACxC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,cAAc,WAAW,UAAa,CAAC,CAAC;AAC9C,QAAM,KAAK,sBAAM;AACjB,MAAI,UAAU;AACd,MAAI;AACF,cAAU,GAAG,aAAa,UAAU;AAAA,MAClC,WAAW;AAAA,MACX,UAAU;AAAA,MACV,GAAG;AAAA,IACL,CAEC;AAAA,EACH,SAAS,GAAG;AACV,QAAI,aAAa;AACf,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,UAAU;AACrB,cAAM,IAAI;AAAA,UACR,wBAAwB,QAAQ;AAAA;AAAA,UAEhC,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,wCAAwC,QAAQ;AAAA;AAAA,UAEhD,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM;AAAA,IACR;AACA,WAAO;AAAA,EACT;AACA,aAAO,uBAAU,SAAS;AAAA,IACxB,UAAU,OAAO,QAAQ;AAAA,IACzB;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AACH;AAGA,IAAI;AAMJ,SAAS,wBAAkC;AACzC,MAAI,uBAAuB,QAAW;AACpC,UAAM,OAAO,wBAAQ;AACrB,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAkB,QAAQ,YAAY;AAEtC,yBAAqB;AAAA,MACnB,KAAK,QAAQ,YAAY,CAAC;AAAA,MAC1B,KAAK,QAAQ,oBAAoB,CAAC;AAAA,MAClC,KAAK,QAAQ,iBAAiB,CAAC;AAAA,IACjC;AAAA,EACF;AACA,SAAO;AACT;AAQO,SAAS,sBAA4B;AAC1C,uBAAqB;AACvB;AAAA,IAGA,yCAA0B,mBAAmB;AAAA;AA2B7C,eAAsB,WACpB,UACA,SACA;AACA,QAAM,MAAoB,QAAQ,gBAAgB;AAClD,QAAM,EAAE,YAAY,IAAI;AACxB,QAAM,OAAO,EAAE,WAAW,MAAM,GAAG,QAAQ;AAC3C,QAAM,eAAW,uBAAQ,QAAQ,IAC7B,SAAS,IAAI,4BAAgB,IAC7B,KAAC,8BAAiB,QAAQ,CAAC;AAG/B,MAAI,cAAc,KAAK,UAAU;AACjC,MAAI,CAAC,eAAe,SAAS,SAAS,GAAG;AACvC,UAAM,OAAO,wBAAQ;AACrB,UAAM,cAAc,sBAAsB;AAG1C,UAAM,mBAAmB,SAAS,MAAM,aAAW;AACjD,YAAM,eAAe,KAAK,QAAQ,OAAO;AAGzC,iBAAW,cAAc,aAAa;AACpC,cAAM,iBACJ,aAAa,WAAW,aAAa,KAAK,GAAG,KAC7C,iBAAiB;AACnB,cAAM,eAAe,KAAK,SAAS,YAAY,YAAY;AAC3D,cAAM,kBAAkB,aAAa,WAAW,IAAI;AAEpD,YAAI,kBAAkB,CAAC,iBAAiB;AACtC,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,IACT,CAAC;AAED,QAAI,kBAAkB;AACpB,oBAAc;AAAA,IAChB;AAAA,EACF;AAEA,QAAM,YAAY,UAAU;AAAA,IAC1B,aAAa,KAAK,cAAc,qBAAqB;AAAA,IACrD,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,WAAW;AAAA,EACb,CAAC;AACH;AAAA;AA2BO,SAAS,eACd,UACA,SACA;AACA,QAAM,MAAoB,QAAQ,gBAAgB;AAClD,QAAM,EAAE,WAAW,IAAI;AACvB,QAAM,OAAO,EAAE,WAAW,MAAM,GAAG,QAAQ;AAC3C,QAAM,eAAW,uBAAQ,QAAQ,IAC7B,SAAS,IAAI,4BAAgB,IAC7B,KAAC,8BAAiB,QAAQ,CAAC;AAG/B,MAAI,cAAc,KAAK,UAAU;AACjC,MAAI,CAAC,eAAe,SAAS,SAAS,GAAG;AACvC,UAAM,OAAO,wBAAQ;AACrB,UAAM,cAAc,sBAAsB;AAG1C,UAAM,mBAAmB,SAAS,MAAM,aAAW;AACjD,YAAM,eAAe,KAAK,QAAQ,OAAO;AAGzC,iBAAW,cAAc,aAAa;AACpC,cAAM,iBACJ,aAAa,WAAW,aAAa,KAAK,GAAG,KAC7C,iBAAiB;AACnB,cAAM,eAAe,KAAK,SAAS,YAAY,YAAY;AAC3D,cAAM,kBAAkB,aAAa,WAAW,IAAI;AAEpD,YAAI,kBAAkB,CAAC,iBAAiB;AACtC,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,IACT,CAAC;AAED,QAAI,kBAAkB;AACpB,oBAAc;AAAA,IAChB;AAAA,EACF;AAEA,aAAW,UAAU;AAAA,IACnB,aAAa,KAAK,cAAc,qBAAqB;AAAA,IACrD,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,WAAW;AAAA,EACb,CAAC;AACH;AAAA;AA6BA,eAAsB,UACpB,MACA,SACe;AACf,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,GAAG,SAAS,MAAM,MAAM,OAAO;AAAA,EACvC,SAAS,GAAY;AAEnB,QACE,OAAO,MAAM,YACb,MAAM,QACN,UAAU,KACV,EAAE,SAAS,UACX;AACA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAAA;AA4BO,SAAS,cACd,MACA,SACM;AACN,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,OAAG,UAAU,MAAM,OAAO;AAAA,EAC5B,SAAS,GAAY;AAEnB,QACE,OAAO,MAAM,YACb,MAAM,QACN,UAAU,KACV,EAAE,SAAS,UACX;AACA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAAA;AAwBA,eAAsB,aACpB,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,MAAM,GAAG,SAAS,SAAS,UAAU;AAAA,MAC1C,QAAQ;AAAA,MACR,GAAG;AAAA,IACL,CAAc;AAAA,EAChB,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAwBO,SAAS,iBACd,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,GAAG,aAAa,UAAU;AAAA,MAC/B,WAAW;AAAA,MACX,GAAG;AAAA,IACL,CAA0B;AAAA,EAC5B,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAqBA,eAAsB,UAAU,UAAoB;AAClD,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,MAAM,GAAG,SAAS,KAAK,QAAQ;AAAA,EACxC,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAsBO,SAAS,cACd,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,GAAG,SAAS,UAAU;AAAA,MAC3B,WAAW;AAAA,MACX,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL,CAAoB;AAAA,EACtB,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAuBO,SAAS,WAAW,UAA4B;AACrD,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,wBAAQ;AACrB,QAAM,cAAc,OAAO,QAAQ;AAGnC,MAAI,CAAC,GAAG,WAAW,WAAW,GAAG;AAC/B,eAAO,2BAAc,WAAW;AAAA,EAClC;AAEA,QAAM,UAAU,KAAK,QAAQ,WAAW;AACxC,QAAM,MAAM,KAAK,QAAQ,WAAW;AACpC,QAAM,WAAW,KAAK,SAAS,aAAa,GAAG;AAE/C,MAAI,UAAU;AACd,MAAI;AACJ,KAAG;AACD,iBAAa,KAAK,KAAK,SAAS,GAAG,QAAQ,IAAI,OAAO,GAAG,GAAG,EAAE;AAC9D;AAAA,EACF,SAAS,GAAG,WAAW,UAAU;AAEjC,aAAO,2BAAc,UAAU;AACjC;AAAA;AA+BA,eAAsB,UACpB,UACA,aACA,SACe;AACf,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,EAAE,KAAK,UAAU,UAAU,QAAQ,GAAG,UAAU,IAAI;AAAA,IACxD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,OAAO;AAAA,IACP,aAAa,SAAY,WAAW;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACA,QAAM,GAAG,SAAS,UAAU,UAAU,YAAY;AAAA,IAChD,UAAU;AAAA,IACV,GAAG;AAAA,IACH,WAAW;AAAA,EACb,CAA0B;AAC5B;AAAA;AA2BO,SAAS,cACd,UACA,aACA,SACM;AACN,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,EAAE,KAAK,UAAU,UAAU,QAAQ,GAAG,UAAU,IAAI;AAAA,IACxD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,OAAO;AAAA,IACP,aAAa,SAAY,WAAW;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACA,KAAG,cAAc,UAAU,YAAY;AAAA,IACrC,UAAU;AAAA,IACV,GAAG;AAAA,IACH,WAAW;AAAA,EACb,CAAqB;AACvB;",
4
+ "sourcesContent": ["/**\n * @fileoverview File system utilities with cross-platform path handling.\n * Provides enhanced fs operations, glob matching, and directory traversal functions.\n */\n\nimport type { Abortable } from 'events'\n\nimport type {\n Dirent,\n MakeDirectoryOptions,\n ObjectEncodingOptions,\n OpenMode,\n PathLike,\n StatSyncOptions,\n WriteFileOptions,\n} from 'fs'\n\nimport { getAbortSignal } from '#constants/process'\n\nimport { isArray } from './arrays'\n\nconst abortSignal = getAbortSignal()\n\nimport { defaultIgnore, getGlobMatcher } from './globs'\nimport type { JsonReviver } from './json'\nimport { jsonParse } from './json'\nimport { objectFreeze, type Remap } from './objects'\nimport { normalizePath, pathLikeToString } from './path'\nimport { registerCacheInvalidation } from './paths/rewire'\nimport { naturalCompare } from './sorts'\n\n/**\n * Supported text encodings for Node.js Buffers.\n * Includes ASCII, UTF-8/16, base64, binary, and hexadecimal encodings.\n */\nexport type BufferEncoding =\n | 'ascii'\n | 'utf8'\n | 'utf-8'\n | 'utf16le'\n | 'ucs2'\n | 'ucs-2'\n | 'base64'\n | 'base64url'\n | 'latin1'\n | 'binary'\n | 'hex'\n\n/**\n * Represents any valid JSON content type.\n */\nexport type JsonContent = unknown\n\n/**\n * Options for asynchronous `findUp` operations.\n */\nexport interface FindUpOptions {\n /**\n * Starting directory for the search.\n * @default process.cwd()\n */\n cwd?: string | undefined\n /**\n * Only match directories, not files.\n * @default false\n */\n onlyDirectories?: boolean | undefined\n /**\n * Only match files, not directories.\n * @default true\n */\n onlyFiles?: boolean | undefined\n /**\n * Abort signal to cancel the search operation.\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Options for synchronous `findUpSync` operations.\n */\nexport interface FindUpSyncOptions {\n /**\n * Starting directory for the search.\n * @default process.cwd()\n */\n cwd?: string | undefined\n /**\n * Directory to stop searching at (inclusive).\n * When provided, search will stop at this directory even if the root hasn't been reached.\n */\n stopAt?: string | undefined\n /**\n * Only match directories, not files.\n * @default false\n */\n onlyDirectories?: boolean | undefined\n /**\n * Only match files, not directories.\n * @default true\n */\n onlyFiles?: boolean | undefined\n}\n\n/**\n * Options for checking if a directory is empty.\n */\nexport interface IsDirEmptyOptions {\n /**\n * Glob patterns for files to ignore when checking emptiness.\n * Files matching these patterns are not counted.\n * @default defaultIgnore\n */\n ignore?: string[] | readonly string[] | undefined\n}\n\n/**\n * Options for read operations with abort support.\n */\nexport interface ReadOptions extends Abortable {\n /**\n * Character encoding to use for reading.\n * @default 'utf8'\n */\n encoding?: BufferEncoding | string | undefined\n /**\n * File system flag for reading behavior.\n * @default 'r'\n */\n flag?: string | undefined\n}\n\n/**\n * Options for reading directories with filtering and sorting.\n */\nexport interface ReadDirOptions {\n /**\n * Glob patterns for directories to ignore.\n * @default undefined\n */\n ignore?: string[] | readonly string[] | undefined\n /**\n * Include empty directories in results.\n * When `false`, empty directories are filtered out.\n * @default true\n */\n includeEmpty?: boolean | undefined\n /**\n * Sort directory names alphabetically using natural sort order.\n * @default true\n */\n sort?: boolean | undefined\n}\n\n/**\n * Options for reading files with encoding and abort support.\n * Can be either an options object, an encoding string, or null.\n */\nexport type ReadFileOptions =\n | Remap<\n ObjectEncodingOptions &\n Abortable & {\n flag?: OpenMode | undefined\n }\n >\n | BufferEncoding\n | null\n\n/**\n * Options for reading and parsing JSON files.\n */\nexport type ReadJsonOptions = Remap<\n ReadFileOptions & {\n /**\n * Whether to throw errors on parse failure.\n * When `false`, returns `undefined` on error instead of throwing.\n * @default true\n */\n throws?: boolean | undefined\n /**\n * JSON reviver function to transform parsed values.\n * Same as the second parameter to `JSON.parse()`.\n */\n reviver?: Parameters<typeof JSON.parse>[1] | undefined\n }\n>\n\n/**\n * Options for file/directory removal operations.\n */\nexport interface RemoveOptions {\n /**\n * Force deletion even outside normally safe directories.\n * When `false`, prevents deletion outside temp, cacache, and ~/.socket.\n * @default true for safe directories, false otherwise\n */\n force?: boolean | undefined\n /**\n * Maximum number of retry attempts on failure.\n * @default 3\n */\n maxRetries?: number | undefined\n /**\n * Recursively delete directories and contents.\n * @default true\n */\n recursive?: boolean | undefined\n /**\n * Delay in milliseconds between retry attempts.\n * @default 200\n */\n retryDelay?: number | undefined\n /**\n * Abort signal to cancel the operation.\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Options for safe read operations that don't throw on errors.\n */\nexport interface SafeReadOptions extends ReadOptions {\n /**\n * Default value to return on read failure.\n * If not provided, `undefined` is returned on error.\n */\n defaultValue?: unknown | undefined\n}\n\n/**\n * Options for write operations with encoding and mode control.\n */\nexport interface WriteOptions extends Abortable {\n /**\n * Character encoding for writing.\n * @default 'utf8'\n */\n encoding?: BufferEncoding | string | undefined\n /**\n * File mode (permissions) to set.\n * Uses standard Unix permission bits (e.g., 0o644).\n * @default 0o666 (read/write for all, respecting umask)\n */\n mode?: number | undefined\n /**\n * File system flag for write behavior.\n * @default 'w' (create or truncate)\n */\n flag?: string | undefined\n}\n\n/**\n * Options for writing JSON files with formatting control.\n */\nexport interface WriteJsonOptions extends WriteOptions {\n /**\n * End-of-line sequence to use.\n * @default '\\n'\n * @example\n * ```ts\n * // Windows-style line endings\n * writeJson('data.json', data, { EOL: '\\r\\n' })\n * ```\n */\n EOL?: string | undefined\n /**\n * Whether to add a final newline at end of file.\n * @default true\n */\n finalEOL?: boolean | undefined\n /**\n * JSON replacer function to transform values during stringification.\n * Same as the second parameter to `JSON.stringify()`.\n */\n replacer?: JsonReviver | undefined\n /**\n * Number of spaces for indentation, or string to use for indentation.\n * @default 2\n * @example\n * ```ts\n * // Use tabs instead of spaces\n * writeJson('data.json', data, { spaces: '\\t' })\n *\n * // Use 4 spaces for indentation\n * writeJson('data.json', data, { spaces: 4 })\n * ```\n */\n spaces?: number | string | undefined\n}\n\nconst defaultRemoveOptions = objectFreeze({\n __proto__: null,\n force: true,\n maxRetries: 3,\n recursive: true,\n retryDelay: 200,\n})\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js path module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\n/**\n * Process directory entries and filter for directories.\n * Filters entries to include only directories, optionally excluding empty ones.\n * Applies ignore patterns and natural sorting.\n *\n * @param dirents - Directory entries from readdir\n * @param dirname - Parent directory path\n * @param options - Filtering and sorting options\n * @returns Array of directory names, optionally sorted\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction innerReadDirNames(\n dirents: Dirent[],\n dirname: string | undefined,\n options?: ReadDirOptions | undefined,\n): string[] {\n const {\n ignore,\n includeEmpty = true,\n sort = true,\n } = { __proto__: null, ...options } as ReadDirOptions\n const path = getPath()\n const names = dirents\n .filter(\n (d: Dirent) =>\n d.isDirectory() &&\n (includeEmpty ||\n !isDirEmptySync(path.join(dirname || d.parentPath, d.name), {\n ignore,\n })),\n )\n .map((d: Dirent) => d.name)\n return sort ? names.sort(naturalCompare) : names\n}\n\n/**\n * Stringify JSON with custom formatting options.\n * Formats JSON with configurable line endings and indentation.\n *\n * @param json - Value to stringify\n * @param EOL - End-of-line sequence\n * @param finalEOL - Whether to add final newline\n * @param replacer - JSON replacer function\n * @param spaces - Indentation spaces or string\n * @returns Formatted JSON string\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction stringify(\n json: unknown,\n EOL: string,\n finalEOL: boolean,\n replacer: JsonReviver | undefined,\n spaces: number | string = 2,\n): string {\n const EOF = finalEOL ? EOL : ''\n const str = JSON.stringify(json, replacer, spaces)\n return `${str.replace(/\\n/g, EOL)}${EOF}`\n}\n\n/**\n * Find a file or directory by traversing up parent directories.\n * Searches from the starting directory upward to the filesystem root.\n * Useful for finding configuration files or project roots.\n *\n * @param name - Filename(s) to search for\n * @param options - Search options including cwd and type filters\n * @returns Normalized absolute path if found, undefined otherwise\n *\n * @example\n * ```ts\n * // Find package.json starting from current directory\n * const pkgPath = await findUp('package.json')\n *\n * // Find any of multiple config files\n * const configPath = await findUp(['.config.js', '.config.json'])\n *\n * // Find a directory instead of file\n * const nodeModules = await findUp('node_modules', { onlyDirectories: true })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function findUp(\n name: string | string[] | readonly string[],\n options?: FindUpOptions | undefined,\n): Promise<string | undefined> {\n const { cwd = process.cwd(), signal = abortSignal } = {\n __proto__: null,\n ...options,\n } as FindUpOptions\n let { onlyDirectories = false, onlyFiles = true } = {\n __proto__: null,\n ...options,\n } as FindUpOptions\n if (onlyDirectories) {\n onlyFiles = false\n }\n if (onlyFiles) {\n onlyDirectories = false\n }\n const fs = getFs()\n const path = getPath()\n let dir = path.resolve(cwd)\n const { root } = path.parse(dir)\n const names = isArray(name) ? name : [name as string]\n while (dir && dir !== root) {\n for (const n of names) {\n if (signal?.aborted) {\n return undefined\n }\n const thePath = path.join(dir, n)\n try {\n // eslint-disable-next-line no-await-in-loop\n const stats = await fs.promises.stat(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n dir = path.dirname(dir)\n }\n return undefined\n}\n\n/**\n * Synchronously find a file or directory by traversing up parent directories.\n * Searches from the starting directory upward to the filesystem root or `stopAt` directory.\n * Useful for finding configuration files or project roots in synchronous contexts.\n *\n * @param name - Filename(s) to search for\n * @param options - Search options including cwd, stopAt, and type filters\n * @returns Normalized absolute path if found, undefined otherwise\n *\n * @example\n * ```ts\n * // Find package.json starting from current directory\n * const pkgPath = findUpSync('package.json')\n *\n * // Find .git directory but stop at home directory\n * const gitPath = findUpSync('.git', {\n * onlyDirectories: true,\n * stopAt: process.env.HOME\n * })\n *\n * // Find any of multiple config files\n * const configPath = findUpSync(['.eslintrc.js', '.eslintrc.json'])\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function findUpSync(\n name: string | string[] | readonly string[],\n options?: FindUpSyncOptions | undefined,\n) {\n const { cwd = process.cwd(), stopAt } = {\n __proto__: null,\n ...options,\n } as FindUpSyncOptions\n let { onlyDirectories = false, onlyFiles = true } = {\n __proto__: null,\n ...options,\n } as FindUpSyncOptions\n if (onlyDirectories) {\n onlyFiles = false\n }\n if (onlyFiles) {\n onlyDirectories = false\n }\n const fs = getFs()\n const path = getPath()\n let dir = path.resolve(cwd)\n const { root } = path.parse(dir)\n const stopDir = stopAt ? path.resolve(stopAt) : undefined\n const names = isArray(name) ? name : [name as string]\n while (dir && dir !== root) {\n // Check if we should stop at this directory.\n if (stopDir && dir === stopDir) {\n // Check current directory but don't go up.\n for (const n of names) {\n const thePath = path.join(dir, n)\n try {\n const stats = fs.statSync(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n return undefined\n }\n for (const n of names) {\n const thePath = path.join(dir, n)\n try {\n const stats = fs.statSync(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n dir = path.dirname(dir)\n }\n return undefined\n}\n\n/**\n * Check if a path is a directory asynchronously.\n * Returns `true` for directories, `false` for files or non-existent paths.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a directory, `false` otherwise\n *\n * @example\n * ```ts\n * if (await isDir('./src')) {\n * console.log('src is a directory')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function isDir(filepath: PathLike) {\n return !!(await safeStats(filepath))?.isDirectory()\n}\n\n/**\n * Check if a path is a directory synchronously.\n * Returns `true` for directories, `false` for files or non-existent paths.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a directory, `false` otherwise\n *\n * @example\n * ```ts\n * if (isDirSync('./src')) {\n * console.log('src is a directory')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isDirSync(filepath: PathLike) {\n return !!safeStatsSync(filepath)?.isDirectory()\n}\n\n/**\n * Check if a directory is empty synchronously.\n * A directory is considered empty if it contains no files after applying ignore patterns.\n * Uses glob patterns to filter ignored files.\n *\n * @param dirname - Directory path to check\n * @param options - Options including ignore patterns\n * @returns `true` if directory is empty (or doesn't exist), `false` otherwise\n *\n * @example\n * ```ts\n * // Check if directory is completely empty\n * isDirEmptySync('./build')\n *\n * // Check if directory is empty, ignoring .DS_Store files\n * isDirEmptySync('./cache', { ignore: ['.DS_Store'] })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isDirEmptySync(\n dirname: PathLike,\n options?: IsDirEmptyOptions | undefined,\n) {\n const { ignore = defaultIgnore } = {\n __proto__: null,\n ...options,\n } as IsDirEmptyOptions\n const fs = getFs()\n try {\n const files = fs.readdirSync(dirname)\n const { length } = files\n if (length === 0) {\n return true\n }\n const matcher = getGlobMatcher(\n ignore as string[],\n {\n cwd: pathLikeToString(dirname),\n } as { cwd?: string; dot?: boolean; ignore?: string[]; nocase?: boolean },\n )\n let ignoredCount = 0\n for (let i = 0; i < length; i += 1) {\n const file = files[i]\n if (file && matcher(file)) {\n ignoredCount += 1\n }\n }\n return ignoredCount === length\n } catch {\n // Return false for non-existent paths or other errors.\n return false\n }\n}\n\n/**\n * Check if a path is a symbolic link synchronously.\n * Uses `lstat` to check the link itself, not the target.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a symbolic link, `false` otherwise\n *\n * @example\n * ```ts\n * if (isSymLinkSync('./my-link')) {\n * console.log('Path is a symbolic link')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isSymLinkSync(filepath: PathLike) {\n const fs = getFs()\n try {\n return fs.lstatSync(filepath).isSymbolicLink()\n } catch {}\n return false\n}\n\n/**\n * Result of file readability validation.\n * Contains lists of valid and invalid file paths.\n */\nexport interface ValidateFilesResult {\n /**\n * File paths that passed validation and are readable.\n */\n validPaths: string[]\n /**\n * File paths that failed validation (unreadable, permission denied, or non-existent).\n * Common with Yarn Berry PnP virtual filesystem, pnpm symlinks, or filesystem race conditions.\n */\n invalidPaths: string[]\n}\n\n/**\n * Validate that file paths are readable before processing.\n * Filters out files from glob results that cannot be accessed (common with\n * Yarn Berry PnP virtual filesystem, pnpm content-addressable store symlinks,\n * or filesystem race conditions in CI/CD environments).\n *\n * This defensive pattern prevents ENOENT errors when files exist in glob\n * results but are not accessible via standard filesystem operations.\n *\n * @param filepaths - Array of file paths to validate\n * @returns Object with `validPaths` (readable) and `invalidPaths` (unreadable)\n *\n * @example\n * ```ts\n * import { validateFiles } from '@socketsecurity/lib/fs'\n *\n * const files = ['package.json', '.pnp.cjs/virtual-file.json']\n * const { validPaths, invalidPaths } = validateFiles(files)\n *\n * console.log(`Valid: ${validPaths.length}`)\n * console.log(`Invalid: ${invalidPaths.length}`)\n * ```\n *\n * @example\n * ```ts\n * // Typical usage in Socket CLI commands\n * const packagePaths = await getPackageFilesForScan(targets)\n * const { validPaths } = validateFiles(packagePaths)\n * await sdk.uploadManifestFiles(orgSlug, validPaths)\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function validateFiles(\n filepaths: string[] | readonly string[],\n): ValidateFilesResult {\n const fs = getFs()\n const validPaths: string[] = []\n const invalidPaths: string[] = []\n const { R_OK } = fs.constants\n\n for (const filepath of filepaths) {\n try {\n fs.accessSync(filepath, R_OK)\n validPaths.push(filepath)\n } catch {\n invalidPaths.push(filepath)\n }\n }\n\n return { __proto__: null, validPaths, invalidPaths } as ValidateFilesResult\n}\n\n/**\n * Read directory names asynchronously with filtering and sorting.\n * Returns only directory names (not files), with optional filtering for empty directories\n * and glob-based ignore patterns. Results are naturally sorted by default.\n *\n * @param dirname - Directory path to read\n * @param options - Options for filtering and sorting\n * @returns Array of directory names, empty array on error\n *\n * @example\n * ```ts\n * // Get all subdirectories, sorted naturally\n * const dirs = await readDirNames('./packages')\n *\n * // Get non-empty directories only\n * const nonEmpty = await readDirNames('./cache', { includeEmpty: false })\n *\n * // Get directories without sorting\n * const unsorted = await readDirNames('./src', { sort: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readDirNames(\n dirname: PathLike,\n options?: ReadDirOptions | undefined,\n) {\n const fs = getFs()\n try {\n return innerReadDirNames(\n await fs.promises.readdir(dirname, {\n __proto__: null,\n encoding: 'utf8',\n withFileTypes: true,\n } as ObjectEncodingOptions & { withFileTypes: true }),\n String(dirname),\n options,\n )\n } catch {}\n return []\n}\n\n/**\n * Read directory names synchronously with filtering and sorting.\n * Returns only directory names (not files), with optional filtering for empty directories\n * and glob-based ignore patterns. Results are naturally sorted by default.\n *\n * @param dirname - Directory path to read\n * @param options - Options for filtering and sorting\n * @returns Array of directory names, empty array on error\n *\n * @example\n * ```ts\n * // Get all subdirectories, sorted naturally\n * const dirs = readDirNamesSync('./packages')\n *\n * // Get non-empty directories only, ignoring node_modules\n * const nonEmpty = readDirNamesSync('./src', {\n * includeEmpty: false,\n * ignore: ['node_modules']\n * })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readDirNamesSync(dirname: PathLike, options?: ReadDirOptions) {\n const fs = getFs()\n try {\n return innerReadDirNames(\n fs.readdirSync(dirname, {\n __proto__: null,\n encoding: 'utf8',\n withFileTypes: true,\n } as ObjectEncodingOptions & { withFileTypes: true }),\n String(dirname),\n options,\n )\n } catch {}\n return []\n}\n\n/**\n * Read a file as binary data asynchronously.\n * Returns a Buffer without encoding the contents.\n * Useful for reading images, archives, or other binary formats.\n *\n * @param filepath - Path to file\n * @param options - Read options (encoding is forced to null for binary)\n * @returns Promise resolving to Buffer containing file contents\n *\n * @example\n * ```ts\n * // Read an image file\n * const imageBuffer = await readFileBinary('./image.png')\n *\n * // Read with abort signal\n * const buffer = await readFileBinary('./data.bin', { signal: abortSignal })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readFileBinary(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n // Don't specify encoding to get a Buffer.\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n encoding: null,\n })\n}\n\n/**\n * Read a file as UTF-8 text asynchronously.\n * Returns a string with the file contents decoded as UTF-8.\n * This is the most common way to read text files.\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and abort signal\n * @returns Promise resolving to string containing file contents\n *\n * @example\n * ```ts\n * // Read a text file\n * const content = await readFileUtf8('./README.md')\n *\n * // Read with custom encoding\n * const content = await readFileUtf8('./data.txt', { encoding: 'utf-8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readFileUtf8(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n encoding: 'utf8',\n })\n}\n\n/**\n * Read a file as binary data synchronously.\n * Returns a Buffer without encoding the contents.\n * Useful for reading images, archives, or other binary formats.\n *\n * @param filepath - Path to file\n * @param options - Read options (encoding is forced to null for binary)\n * @returns Buffer containing file contents\n *\n * @example\n * ```ts\n * // Read an image file\n * const imageBuffer = readFileBinarySync('./logo.png')\n *\n * // Read a compressed file\n * const gzipData = readFileBinarySync('./archive.gz')\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readFileBinarySync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n // Don't specify encoding to get a Buffer\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return fs.readFileSync(filepath, {\n ...opts,\n encoding: null,\n } as ObjectEncodingOptions)\n}\n\n/**\n * Read a file as UTF-8 text synchronously.\n * Returns a string with the file contents decoded as UTF-8.\n * This is the most common way to read text files synchronously.\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding\n * @returns String containing file contents\n *\n * @example\n * ```ts\n * // Read a configuration file\n * const config = readFileUtf8Sync('./config.txt')\n *\n * // Read with custom options\n * const data = readFileUtf8Sync('./data.txt', { encoding: 'utf8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readFileUtf8Sync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return fs.readFileSync(filepath, {\n ...opts,\n encoding: 'utf8',\n } as ObjectEncodingOptions)\n}\n\n/**\n * Read and parse a JSON file asynchronously.\n * Reads the file as UTF-8 text and parses it as JSON.\n * Optionally accepts a reviver function to transform parsed values.\n *\n * @param filepath - Path to JSON file\n * @param options - Read and parse options\n * @returns Promise resolving to parsed JSON value, or undefined if throws is false and an error occurs\n *\n * @example\n * ```ts\n * // Read and parse package.json\n * const pkg = await readJson('./package.json')\n *\n * // Read JSON with custom reviver\n * const data = await readJson('./data.json', {\n * reviver: (key, value) => {\n * if (key === 'date') return new Date(value)\n * return value\n * }\n * })\n *\n * // Don't throw on parse errors\n * const config = await readJson('./config.json', { throws: false })\n * if (config === undefined) {\n * console.log('Failed to parse config')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readJson(\n filepath: PathLike,\n options?: ReadJsonOptions | string | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { reviver, throws, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as unknown as ReadJsonOptions\n const shouldThrow = throws === undefined || !!throws\n const fs = getFs()\n let content = ''\n try {\n content = await fs.promises.readFile(filepath, {\n __proto__: null,\n encoding: 'utf8',\n ...fsOptions,\n } as unknown as Parameters<typeof fs.promises.readFile>[1] & {\n encoding: string\n })\n } catch (e) {\n if (shouldThrow) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'ENOENT') {\n throw new Error(\n `JSON file not found: ${filepath}\\n` +\n 'Ensure the file exists or create it with the expected structure.',\n { cause: e },\n )\n }\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied reading JSON file: ${filepath}\\n` +\n 'Check file permissions or run with appropriate access.',\n { cause: e },\n )\n }\n throw e\n }\n return undefined\n }\n return jsonParse(content, {\n filepath: String(filepath),\n reviver,\n throws: shouldThrow,\n })\n}\n\n/**\n * Read and parse a JSON file synchronously.\n * Reads the file as UTF-8 text and parses it as JSON.\n * Optionally accepts a reviver function to transform parsed values.\n *\n * @param filepath - Path to JSON file\n * @param options - Read and parse options\n * @returns Parsed JSON value, or undefined if throws is false and an error occurs\n *\n * @example\n * ```ts\n * // Read and parse tsconfig.json\n * const tsconfig = readJsonSync('./tsconfig.json')\n *\n * // Read JSON with custom reviver\n * const data = readJsonSync('./data.json', {\n * reviver: (key, value) => {\n * if (typeof value === 'string' && /^\\d{4}-\\d{2}-\\d{2}/.test(value)) {\n * return new Date(value)\n * }\n * return value\n * }\n * })\n *\n * // Don't throw on parse errors\n * const config = readJsonSync('./config.json', { throws: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readJsonSync(\n filepath: PathLike,\n options?: ReadJsonOptions | string | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { reviver, throws, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as unknown as ReadJsonOptions\n const shouldThrow = throws === undefined || !!throws\n const fs = getFs()\n let content = ''\n try {\n content = fs.readFileSync(filepath, {\n __proto__: null,\n encoding: 'utf8',\n ...fsOptions,\n } as unknown as Parameters<typeof fs.readFileSync>[1] & {\n encoding: string\n })\n } catch (e) {\n if (shouldThrow) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'ENOENT') {\n throw new Error(\n `JSON file not found: ${filepath}\\n` +\n 'Ensure the file exists or create it with the expected structure.',\n { cause: e },\n )\n }\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied reading JSON file: ${filepath}\\n` +\n 'Check file permissions or run with appropriate access.',\n { cause: e },\n )\n }\n throw e\n }\n return undefined\n }\n return jsonParse(content, {\n filepath: String(filepath),\n reviver,\n throws: shouldThrow,\n })\n}\n\n// Cache for resolved allowed directories\nlet _cachedAllowedDirs: string[] | undefined\n\n/**\n * Get resolved allowed directories for safe deletion with lazy caching.\n * These directories are resolved once and cached for the process lifetime.\n */\nfunction getAllowedDirectories(): string[] {\n if (_cachedAllowedDirs === undefined) {\n const path = getPath()\n const {\n getOsTmpDir,\n getSocketCacacheDir,\n getSocketUserDir,\n } = /*@__PURE__*/ require('#lib/paths')\n\n _cachedAllowedDirs = [\n path.resolve(getOsTmpDir()),\n path.resolve(getSocketCacacheDir()),\n path.resolve(getSocketUserDir()),\n ]\n }\n return _cachedAllowedDirs\n}\n\n/**\n * Invalidate the cached allowed directories.\n * Called automatically by the paths/rewire module when paths are overridden in tests.\n *\n * @internal Used for test rewiring\n */\nexport function invalidatePathCache(): void {\n _cachedAllowedDirs = undefined\n}\n\n// Register cache invalidation with the rewire module\nregisterCacheInvalidation(invalidatePathCache)\n\n/**\n * Safely delete a file or directory asynchronously with built-in protections.\n * Uses `del` for safer deletion that prevents removing cwd and above by default.\n * Automatically uses force: true for temp directory, cacache, and ~/.socket subdirectories.\n *\n * @param filepath - Path or array of paths to delete (supports glob patterns)\n * @param options - Deletion options including force, retries, and recursion\n * @throws {Error} When attempting to delete protected paths without force option\n *\n * @example\n * ```ts\n * // Delete a single file\n * await safeDelete('./temp-file.txt')\n *\n * // Delete a directory recursively\n * await safeDelete('./build', { recursive: true })\n *\n * // Delete multiple paths\n * await safeDelete(['./dist', './coverage'])\n *\n * // Delete with custom retry settings\n * await safeDelete('./flaky-dir', { maxRetries: 5, retryDelay: 500 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeDelete(\n filepath: PathLike | PathLike[],\n options?: RemoveOptions | undefined,\n) {\n const del = /*@__PURE__*/ require('./external/del')\n const { deleteAsync } = del\n const opts = { __proto__: null, ...options } as RemoveOptions\n const patterns = isArray(filepath)\n ? filepath.map(pathLikeToString)\n : [pathLikeToString(filepath)]\n\n // Check if we're deleting within allowed directories.\n let shouldForce = opts.force !== false\n if (!shouldForce && patterns.length > 0) {\n const path = getPath()\n const allowedDirs = getAllowedDirectories()\n\n // Check if all patterns are within allowed directories.\n const allInAllowedDirs = patterns.every(pattern => {\n const resolvedPath = path.resolve(pattern)\n\n // Check each allowed directory\n for (const allowedDir of allowedDirs) {\n const isInAllowedDir =\n resolvedPath.startsWith(allowedDir + path.sep) ||\n resolvedPath === allowedDir\n const relativePath = path.relative(allowedDir, resolvedPath)\n const isGoingBackward = relativePath.startsWith('..')\n\n if (isInAllowedDir && !isGoingBackward) {\n return true\n }\n }\n\n return false\n })\n\n if (allInAllowedDirs) {\n shouldForce = true\n }\n }\n\n await deleteAsync(patterns, {\n concurrency: opts.maxRetries || defaultRemoveOptions.maxRetries,\n dryRun: false,\n force: shouldForce,\n onlyFiles: false,\n })\n}\n\n/**\n * Safely delete a file or directory synchronously with built-in protections.\n * Uses `del` for safer deletion that prevents removing cwd and above by default.\n * Automatically uses force: true for temp directory, cacache, and ~/.socket subdirectories.\n *\n * @param filepath - Path or array of paths to delete (supports glob patterns)\n * @param options - Deletion options including force, retries, and recursion\n * @throws {Error} When attempting to delete protected paths without force option\n *\n * @example\n * ```ts\n * // Delete a single file\n * safeDeleteSync('./temp-file.txt')\n *\n * // Delete a directory recursively\n * safeDeleteSync('./build', { recursive: true })\n *\n * // Delete multiple paths with globs\n * safeDeleteSync(['./dist/**', './coverage/**'])\n *\n * // Force delete a protected path (use with caution)\n * safeDeleteSync('./important', { force: true })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeDeleteSync(\n filepath: PathLike | PathLike[],\n options?: RemoveOptions | undefined,\n) {\n const del = /*@__PURE__*/ require('./external/del')\n const { deleteSync } = del\n const opts = { __proto__: null, ...options } as RemoveOptions\n const patterns = isArray(filepath)\n ? filepath.map(pathLikeToString)\n : [pathLikeToString(filepath)]\n\n // Check if we're deleting within allowed directories.\n let shouldForce = opts.force !== false\n if (!shouldForce && patterns.length > 0) {\n const path = getPath()\n const allowedDirs = getAllowedDirectories()\n\n // Check if all patterns are within allowed directories.\n const allInAllowedDirs = patterns.every(pattern => {\n const resolvedPath = path.resolve(pattern)\n\n // Check each allowed directory\n for (const allowedDir of allowedDirs) {\n const isInAllowedDir =\n resolvedPath.startsWith(allowedDir + path.sep) ||\n resolvedPath === allowedDir\n const relativePath = path.relative(allowedDir, resolvedPath)\n const isGoingBackward = relativePath.startsWith('..')\n\n if (isInAllowedDir && !isGoingBackward) {\n return true\n }\n }\n\n return false\n })\n\n if (allInAllowedDirs) {\n shouldForce = true\n }\n }\n\n deleteSync(patterns, {\n concurrency: opts.maxRetries || defaultRemoveOptions.maxRetries,\n dryRun: false,\n force: shouldForce,\n onlyFiles: false,\n })\n}\n\n/**\n * Safely create a directory asynchronously, ignoring EEXIST errors.\n * This function wraps fs.promises.mkdir and handles the race condition where\n * the directory might already exist, which is common in concurrent code.\n *\n * Unlike fs.promises.mkdir with recursive:true, this function:\n * - Silently ignores EEXIST errors (directory already exists)\n * - Re-throws all other errors (permissions, invalid path, etc.)\n * - Works reliably in multi-process/concurrent scenarios\n * - Defaults to recursive: true for convenient nested directory creation\n *\n * @param path - Directory path to create\n * @param options - Options including recursive (default: true) and mode settings\n * @returns Promise that resolves when directory is created or already exists\n *\n * @example\n * ```ts\n * // Create a directory recursively by default, no error if it exists\n * await safeMkdir('./config')\n *\n * // Create nested directories (recursive: true is the default)\n * await safeMkdir('./data/cache/temp')\n *\n * // Create with specific permissions\n * await safeMkdir('./secure', { mode: 0o700 })\n *\n * // Explicitly disable recursive behavior\n * await safeMkdir('./single-level', { recursive: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeMkdir(\n path: PathLike,\n options?: MakeDirectoryOptions | undefined,\n): Promise<void> {\n const fs = getFs()\n const opts = { __proto__: null, recursive: true, ...options }\n try {\n await fs.promises.mkdir(path, opts)\n } catch (e: unknown) {\n // Ignore EEXIST error - directory already exists.\n if (\n typeof e === 'object' &&\n e !== null &&\n 'code' in e &&\n e.code !== 'EEXIST'\n ) {\n throw e\n }\n }\n}\n\n/**\n * Safely create a directory synchronously, ignoring EEXIST errors.\n * This function wraps fs.mkdirSync and handles the race condition where\n * the directory might already exist, which is common in concurrent code.\n *\n * Unlike fs.mkdirSync with recursive:true, this function:\n * - Silently ignores EEXIST errors (directory already exists)\n * - Re-throws all other errors (permissions, invalid path, etc.)\n * - Works reliably in multi-process/concurrent scenarios\n * - Defaults to recursive: true for convenient nested directory creation\n *\n * @param path - Directory path to create\n * @param options - Options including recursive (default: true) and mode settings\n *\n * @example\n * ```ts\n * // Create a directory recursively by default, no error if it exists\n * safeMkdirSync('./config')\n *\n * // Create nested directories (recursive: true is the default)\n * safeMkdirSync('./data/cache/temp')\n *\n * // Create with specific permissions\n * safeMkdirSync('./secure', { mode: 0o700 })\n *\n * // Explicitly disable recursive behavior\n * safeMkdirSync('./single-level', { recursive: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeMkdirSync(\n path: PathLike,\n options?: MakeDirectoryOptions | undefined,\n): void {\n const fs = getFs()\n const opts = { __proto__: null, recursive: true, ...options }\n try {\n fs.mkdirSync(path, opts)\n } catch (e: unknown) {\n // Ignore EEXIST error - directory already exists.\n if (\n typeof e === 'object' &&\n e !== null &&\n 'code' in e &&\n e.code !== 'EEXIST'\n ) {\n throw e\n }\n }\n}\n\n/**\n * Safely read a file asynchronously, returning undefined on error.\n * Useful when you want to attempt reading a file without handling errors explicitly.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and default value\n * @returns Promise resolving to file contents, or undefined on error\n *\n * @example\n * ```ts\n * // Try to read a file, get undefined if it doesn't exist\n * const content = await safeReadFile('./optional-config.txt')\n * if (content) {\n * console.log('Config found:', content)\n * }\n *\n * // Read with specific encoding\n * const data = await safeReadFile('./data.txt', { encoding: 'utf8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeReadFile(\n filepath: PathLike,\n options?: SafeReadOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n } as Abortable)\n } catch {}\n return undefined\n}\n\n/**\n * Safely read a file synchronously, returning undefined on error.\n * Useful when you want to attempt reading a file without handling errors explicitly.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and default value\n * @returns File contents, or undefined on error\n *\n * @example\n * ```ts\n * // Try to read a config file\n * const config = safeReadFileSync('./config.txt')\n * if (config) {\n * console.log('Config loaded successfully')\n * }\n *\n * // Read binary file safely\n * const buffer = safeReadFileSync('./image.png', { encoding: null })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeReadFileSync(\n filepath: PathLike,\n options?: SafeReadOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return fs.readFileSync(filepath, {\n __proto__: null,\n ...opts,\n } as ObjectEncodingOptions)\n } catch {}\n return undefined\n}\n\n/**\n * Safely get file stats asynchronously, returning undefined on error.\n * Useful for checking file existence and properties without error handling.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to check\n * @returns Promise resolving to Stats object, or undefined on error\n *\n * @example\n * ```ts\n * // Check if file exists and get its stats\n * const stats = await safeStats('./file.txt')\n * if (stats) {\n * console.log('File size:', stats.size)\n * console.log('Modified:', stats.mtime)\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeStats(filepath: PathLike) {\n const fs = getFs()\n try {\n return await fs.promises.stat(filepath)\n } catch {}\n return undefined\n}\n\n/**\n * Safely get file stats synchronously, returning undefined on error.\n * Useful for checking file existence and properties without error handling.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to check\n * @param options - Read options (currently unused but kept for API consistency)\n * @returns Stats object, or undefined on error\n *\n * @example\n * ```ts\n * // Check if file exists and get its size\n * const stats = safeStatsSync('./file.txt')\n * if (stats) {\n * console.log('File size:', stats.size)\n * console.log('Is directory:', stats.isDirectory())\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeStatsSync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return fs.statSync(filepath, {\n __proto__: null,\n throwIfNoEntry: false,\n ...opts,\n } as StatSyncOptions)\n } catch {}\n return undefined\n}\n\n/**\n * Generate a unique filepath by adding number suffix if the path exists.\n * Appends `-1`, `-2`, etc. before the file extension until a non-existent path is found.\n * Useful for creating files without overwriting existing ones.\n *\n * @param filepath - Desired file path\n * @returns Normalized unique filepath (original if it doesn't exist, or with number suffix)\n *\n * @example\n * ```ts\n * // If 'report.pdf' exists, returns 'report-1.pdf'\n * const uniquePath = uniqueSync('./report.pdf')\n *\n * // If 'data.json' and 'data-1.json' exist, returns 'data-2.json'\n * const path = uniqueSync('./data.json')\n *\n * // If 'backup' doesn't exist, returns 'backup' unchanged\n * const backupPath = uniqueSync('./backup')\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function uniqueSync(filepath: PathLike): string {\n const fs = getFs()\n const path = getPath()\n const filepathStr = String(filepath)\n\n // If the file doesn't exist, return as is\n if (!fs.existsSync(filepathStr)) {\n return normalizePath(filepathStr)\n }\n\n const dirname = path.dirname(filepathStr)\n const ext = path.extname(filepathStr)\n const basename = path.basename(filepathStr, ext)\n\n let counter = 1\n let uniquePath: string\n do {\n uniquePath = path.join(dirname, `${basename}-${counter}${ext}`)\n counter++\n } while (fs.existsSync(uniquePath))\n\n return normalizePath(uniquePath)\n}\n\n/**\n * Write JSON content to a file asynchronously with formatting.\n * Stringifies the value with configurable indentation and line endings.\n * Automatically adds a final newline by default for POSIX compliance.\n *\n * @param filepath - Path to write to\n * @param jsonContent - Value to stringify and write\n * @param options - Write options including formatting and encoding\n * @returns Promise that resolves when write completes\n *\n * @example\n * ```ts\n * // Write formatted JSON with default 2-space indentation\n * await writeJson('./data.json', { name: 'example', version: '1.0.0' })\n *\n * // Write with custom indentation\n * await writeJson('./config.json', config, { spaces: 4 })\n *\n * // Write with tabs instead of spaces\n * await writeJson('./data.json', data, { spaces: '\\t' })\n *\n * // Write without final newline\n * await writeJson('./inline.json', obj, { finalEOL: false })\n *\n * // Write with Windows line endings\n * await writeJson('./win.json', data, { EOL: '\\r\\n' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function writeJson(\n filepath: PathLike,\n jsonContent: unknown,\n options?: WriteJsonOptions | string,\n): Promise<void> {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { EOL, finalEOL, replacer, spaces, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as WriteJsonOptions\n const fs = getFs()\n const jsonString = stringify(\n jsonContent,\n EOL || '\\n',\n finalEOL !== undefined ? finalEOL : true,\n replacer,\n spaces,\n )\n await fs.promises.writeFile(filepath, jsonString, {\n encoding: 'utf8',\n ...fsOptions,\n __proto__: null,\n } as ObjectEncodingOptions)\n}\n\n/**\n * Write JSON content to a file synchronously with formatting.\n * Stringifies the value with configurable indentation and line endings.\n * Automatically adds a final newline by default for POSIX compliance.\n *\n * @param filepath - Path to write to\n * @param jsonContent - Value to stringify and write\n * @param options - Write options including formatting and encoding\n *\n * @example\n * ```ts\n * // Write formatted JSON with default 2-space indentation\n * writeJsonSync('./package.json', pkg)\n *\n * // Write with custom indentation\n * writeJsonSync('./tsconfig.json', tsconfig, { spaces: 4 })\n *\n * // Write with tabs for indentation\n * writeJsonSync('./data.json', data, { spaces: '\\t' })\n *\n * // Write compacted (no indentation)\n * writeJsonSync('./compact.json', data, { spaces: 0 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function writeJsonSync(\n filepath: PathLike,\n jsonContent: unknown,\n options?: WriteJsonOptions | string | undefined,\n): void {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { EOL, finalEOL, replacer, spaces, ...fsOptions } = {\n __proto__: null,\n ...opts,\n }\n const fs = getFs()\n const jsonString = stringify(\n jsonContent,\n EOL || '\\n',\n finalEOL !== undefined ? finalEOL : true,\n replacer,\n spaces,\n )\n fs.writeFileSync(filepath, jsonString, {\n encoding: 'utf8',\n ...fsOptions,\n __proto__: null,\n } as WriteFileOptions)\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiBA,qBAA+B;AAE/B,oBAAwB;AAIxB,mBAA8C;AAE9C,kBAA0B;AAC1B,qBAAyC;AACzC,kBAAgD;AAChD,oBAA0C;AAC1C,mBAA+B;AAR/B,MAAM,kBAAc,+BAAe;AA6QnC,MAAM,2BAAuB,6BAAa;AAAA,EACxC,WAAW;AAAA,EACX,OAAO;AAAA,EACP,YAAY;AAAA,EACZ,WAAW;AAAA,EACX,YAAY;AACd,CAAC;AAED,IAAI;AAAA;AASJ,SAAS,QAAQ;AACf,MAAI,QAAQ,QAAW;AAErB,UAAoB,QAAQ,SAAS;AAAA,EACvC;AACA,SAAO;AACT;AAEA,IAAI;AAAA;AASJ,SAAS,UAAU;AACjB,MAAI,UAAU,QAAW;AAGvB,YAAsB,QAAQ,WAAW;AAAA,EAC3C;AACA,SAAO;AACT;AAAA;AAcA,SAAS,kBACP,SACA,SACA,SACU;AACV,QAAM;AAAA,IACJ;AAAA,IACA,eAAe;AAAA,IACf,OAAO;AAAA,EACT,IAAI,EAAE,WAAW,MAAM,GAAG,QAAQ;AAClC,QAAM,OAAO,wBAAQ;AACrB,QAAM,QAAQ,QACX;AAAA,IACC,CAAC,MACC,EAAE,YAAY,MACb,gBACC,CAAC,+BAAe,KAAK,KAAK,WAAW,EAAE,YAAY,EAAE,IAAI,GAAG;AAAA,MAC1D;AAAA,IACF,CAAC;AAAA,EACP,EACC,IAAI,CAAC,MAAc,EAAE,IAAI;AAC5B,SAAO,OAAO,MAAM,KAAK,2BAAc,IAAI;AAC7C;AAAA;AAeA,SAAS,UACP,MACA,KACA,UACA,UACA,SAA0B,GAClB;AACR,QAAM,MAAM,WAAW,MAAM;AAC7B,QAAM,MAAM,KAAK,UAAU,MAAM,UAAU,MAAM;AACjD,SAAO,GAAG,IAAI,QAAQ,OAAO,GAAG,CAAC,GAAG,GAAG;AACzC;AAAA;AAwBA,eAAsB,OACpB,MACA,SAC6B;AAC7B,QAAM,EAAE,MAAM,QAAQ,IAAI,GAAG,SAAS,YAAY,IAAI;AAAA,IACpD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,MAAI,EAAE,kBAAkB,OAAO,YAAY,KAAK,IAAI;AAAA,IAClD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,MAAI,iBAAiB;AACnB,gBAAY;AAAA,EACd;AACA,MAAI,WAAW;AACb,sBAAkB;AAAA,EACpB;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,wBAAQ;AACrB,MAAI,MAAM,KAAK,QAAQ,GAAG;AAC1B,QAAM,EAAE,KAAK,IAAI,KAAK,MAAM,GAAG;AAC/B,QAAM,YAAQ,uBAAQ,IAAI,IAAI,OAAO,CAAC,IAAc;AACpD,SAAO,OAAO,QAAQ,MAAM;AAC1B,eAAW,KAAK,OAAO;AACrB,UAAI,QAAQ,SAAS;AACnB,eAAO;AAAA,MACT;AACA,YAAM,UAAU,KAAK,KAAK,KAAK,CAAC;AAChC,UAAI;AAEF,cAAM,QAAQ,MAAM,GAAG,SAAS,KAAK,OAAO;AAC5C,YAAI,CAAC,mBAAmB,MAAM,OAAO,GAAG;AACtC,qBAAO,2BAAc,OAAO;AAAA,QAC9B;AACA,YAAI,CAAC,aAAa,MAAM,YAAY,GAAG;AACrC,qBAAO,2BAAc,OAAO;AAAA,QAC9B;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AACA,UAAM,KAAK,QAAQ,GAAG;AAAA,EACxB;AACA,SAAO;AACT;AAAA;AA2BO,SAAS,WACd,MACA,SACA;AACA,QAAM,EAAE,MAAM,QAAQ,IAAI,GAAG,OAAO,IAAI;AAAA,IACtC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,MAAI,EAAE,kBAAkB,OAAO,YAAY,KAAK,IAAI;AAAA,IAClD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,MAAI,iBAAiB;AACnB,gBAAY;AAAA,EACd;AACA,MAAI,WAAW;AACb,sBAAkB;AAAA,EACpB;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,wBAAQ;AACrB,MAAI,MAAM,KAAK,QAAQ,GAAG;AAC1B,QAAM,EAAE,KAAK,IAAI,KAAK,MAAM,GAAG;AAC/B,QAAM,UAAU,SAAS,KAAK,QAAQ,MAAM,IAAI;AAChD,QAAM,YAAQ,uBAAQ,IAAI,IAAI,OAAO,CAAC,IAAc;AACpD,SAAO,OAAO,QAAQ,MAAM;AAE1B,QAAI,WAAW,QAAQ,SAAS;AAE9B,iBAAW,KAAK,OAAO;AACrB,cAAM,UAAU,KAAK,KAAK,KAAK,CAAC;AAChC,YAAI;AACF,gBAAM,QAAQ,GAAG,SAAS,OAAO;AACjC,cAAI,CAAC,mBAAmB,MAAM,OAAO,GAAG;AACtC,uBAAO,2BAAc,OAAO;AAAA,UAC9B;AACA,cAAI,CAAC,aAAa,MAAM,YAAY,GAAG;AACrC,uBAAO,2BAAc,OAAO;AAAA,UAC9B;AAAA,QACF,QAAQ;AAAA,QAAC;AAAA,MACX;AACA,aAAO;AAAA,IACT;AACA,eAAW,KAAK,OAAO;AACrB,YAAM,UAAU,KAAK,KAAK,KAAK,CAAC;AAChC,UAAI;AACF,cAAM,QAAQ,GAAG,SAAS,OAAO;AACjC,YAAI,CAAC,mBAAmB,MAAM,OAAO,GAAG;AACtC,qBAAO,2BAAc,OAAO;AAAA,QAC9B;AACA,YAAI,CAAC,aAAa,MAAM,YAAY,GAAG;AACrC,qBAAO,2BAAc,OAAO;AAAA,QAC9B;AAAA,MACF,QAAQ;AAAA,MAAC;AAAA,IACX;AACA,UAAM,KAAK,QAAQ,GAAG;AAAA,EACxB;AACA,SAAO;AACT;AAAA;AAiBA,eAAsB,MAAM,UAAoB;AAC9C,SAAO,CAAC,EAAE,MAAM,0BAAU,QAAQ,IAAI,YAAY;AACpD;AAAA;AAiBO,SAAS,UAAU,UAAoB;AAC5C,SAAO,CAAC,EAAC,8BAAc,QAAQ,IAAG,YAAY;AAChD;AAAA;AAqBO,SAAS,eACd,SACA,SACA;AACA,QAAM,EAAE,SAAS,2BAAc,IAAI;AAAA,IACjC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,UAAM,QAAQ,GAAG,YAAY,OAAO;AACpC,UAAM,EAAE,OAAO,IAAI;AACnB,QAAI,WAAW,GAAG;AAChB,aAAO;AAAA,IACT;AACA,UAAM,cAAU;AAAA,MACd;AAAA,MACA;AAAA,QACE,SAAK,8BAAiB,OAAO;AAAA,MAC/B;AAAA,IACF;AACA,QAAI,eAAe;AACnB,aAAS,IAAI,GAAG,IAAI,QAAQ,KAAK,GAAG;AAClC,YAAM,OAAO,MAAM,CAAC;AACpB,UAAI,QAAQ,QAAQ,IAAI,GAAG;AACzB,wBAAgB;AAAA,MAClB;AAAA,IACF;AACA,WAAO,iBAAiB;AAAA,EAC1B,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAAA;AAiBO,SAAS,cAAc,UAAoB;AAChD,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,GAAG,UAAU,QAAQ,EAAE,eAAe;AAAA,EAC/C,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAkDO,SAAS,cACd,WACqB;AACrB,QAAM,KAAK,sBAAM;AACjB,QAAM,aAAuB,CAAC;AAC9B,QAAM,eAAyB,CAAC;AAChC,QAAM,EAAE,KAAK,IAAI,GAAG;AAEpB,aAAW,YAAY,WAAW;AAChC,QAAI;AACF,SAAG,WAAW,UAAU,IAAI;AAC5B,iBAAW,KAAK,QAAQ;AAAA,IAC1B,QAAQ;AACN,mBAAa,KAAK,QAAQ;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO,EAAE,WAAW,MAAM,YAAY,aAAa;AACrD;AAAA;AAwBA,eAAsB,aACpB,SACA,SACA;AACA,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO;AAAA,MACL,MAAM,GAAG,SAAS,QAAQ,SAAS;AAAA,QACjC,WAAW;AAAA,QACX,UAAU;AAAA,QACV,eAAe;AAAA,MACjB,CAAoD;AAAA,MACpD,OAAO,OAAO;AAAA,MACd;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAAC;AACT,SAAO,CAAC;AACV;AAAA;AAwBO,SAAS,iBAAiB,SAAmB,SAA0B;AAC5E,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO;AAAA,MACL,GAAG,YAAY,SAAS;AAAA,QACtB,WAAW;AAAA,QACX,UAAU;AAAA,QACV,eAAe;AAAA,MACjB,CAAoD;AAAA,MACpD,OAAO,OAAO;AAAA,MACd;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAAC;AACT,SAAO,CAAC;AACV;AAAA;AAqBA,eAAsB,eACpB,UACA,SACA;AAEA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,SAAO,MAAM,GAAG,SAAS,SAAS,UAAU;AAAA,IAC1C,QAAQ;AAAA,IACR,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAAC;AACH;AAAA;AAqBA,eAAsB,aACpB,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,SAAO,MAAM,GAAG,SAAS,SAAS,UAAU;AAAA,IAC1C,QAAQ;AAAA,IACR,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAAC;AACH;AAAA;AAqBO,SAAS,mBACd,UACA,SACA;AAEA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,SAAO,GAAG,aAAa,UAAU;AAAA,IAC/B,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAA0B;AAC5B;AAAA;AAqBO,SAAS,iBACd,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,SAAO,GAAG,aAAa,UAAU;AAAA,IAC/B,GAAG;AAAA,IACH,UAAU;AAAA,EACZ,CAA0B;AAC5B;AAAA;AAgCA,eAAsB,SACpB,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,EAAE,SAAS,QAAQ,GAAG,UAAU,IAAI;AAAA,IACxC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,cAAc,WAAW,UAAa,CAAC,CAAC;AAC9C,QAAM,KAAK,sBAAM;AACjB,MAAI,UAAU;AACd,MAAI;AACF,cAAU,MAAM,GAAG,SAAS,SAAS,UAAU;AAAA,MAC7C,WAAW;AAAA,MACX,UAAU;AAAA,MACV,GAAG;AAAA,IACL,CAEC;AAAA,EACH,SAAS,GAAG;AACV,QAAI,aAAa;AACf,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,UAAU;AACrB,cAAM,IAAI;AAAA,UACR,wBAAwB,QAAQ;AAAA;AAAA,UAEhC,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,wCAAwC,QAAQ;AAAA;AAAA,UAEhD,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM;AAAA,IACR;AACA,WAAO;AAAA,EACT;AACA,aAAO,uBAAU,SAAS;AAAA,IACxB,UAAU,OAAO,QAAQ;AAAA,IACzB;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AACH;AAAA;AA+BO,SAAS,aACd,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,EAAE,SAAS,QAAQ,GAAG,UAAU,IAAI;AAAA,IACxC,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,cAAc,WAAW,UAAa,CAAC,CAAC;AAC9C,QAAM,KAAK,sBAAM;AACjB,MAAI,UAAU;AACd,MAAI;AACF,cAAU,GAAG,aAAa,UAAU;AAAA,MAClC,WAAW;AAAA,MACX,UAAU;AAAA,MACV,GAAG;AAAA,IACL,CAEC;AAAA,EACH,SAAS,GAAG;AACV,QAAI,aAAa;AACf,YAAM,OAAQ,EAA4B;AAC1C,UAAI,SAAS,UAAU;AACrB,cAAM,IAAI;AAAA,UACR,wBAAwB,QAAQ;AAAA;AAAA,UAEhC,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,UAAI,SAAS,YAAY,SAAS,SAAS;AACzC,cAAM,IAAI;AAAA,UACR,wCAAwC,QAAQ;AAAA;AAAA,UAEhD,EAAE,OAAO,EAAE;AAAA,QACb;AAAA,MACF;AACA,YAAM;AAAA,IACR;AACA,WAAO;AAAA,EACT;AACA,aAAO,uBAAU,SAAS;AAAA,IACxB,UAAU,OAAO,QAAQ;AAAA,IACzB;AAAA,IACA,QAAQ;AAAA,EACV,CAAC;AACH;AAGA,IAAI;AAMJ,SAAS,wBAAkC;AACzC,MAAI,uBAAuB,QAAW;AACpC,UAAM,OAAO,wBAAQ;AACrB,UAAM;AAAA,MACJ;AAAA,MACA;AAAA,MACA;AAAA,IACF,IAAkB,QAAQ,YAAY;AAEtC,yBAAqB;AAAA,MACnB,KAAK,QAAQ,YAAY,CAAC;AAAA,MAC1B,KAAK,QAAQ,oBAAoB,CAAC;AAAA,MAClC,KAAK,QAAQ,iBAAiB,CAAC;AAAA,IACjC;AAAA,EACF;AACA,SAAO;AACT;AAQO,SAAS,sBAA4B;AAC1C,uBAAqB;AACvB;AAAA,IAGA,yCAA0B,mBAAmB;AAAA;AA2B7C,eAAsB,WACpB,UACA,SACA;AACA,QAAM,MAAoB,QAAQ,gBAAgB;AAClD,QAAM,EAAE,YAAY,IAAI;AACxB,QAAM,OAAO,EAAE,WAAW,MAAM,GAAG,QAAQ;AAC3C,QAAM,eAAW,uBAAQ,QAAQ,IAC7B,SAAS,IAAI,4BAAgB,IAC7B,KAAC,8BAAiB,QAAQ,CAAC;AAG/B,MAAI,cAAc,KAAK,UAAU;AACjC,MAAI,CAAC,eAAe,SAAS,SAAS,GAAG;AACvC,UAAM,OAAO,wBAAQ;AACrB,UAAM,cAAc,sBAAsB;AAG1C,UAAM,mBAAmB,SAAS,MAAM,aAAW;AACjD,YAAM,eAAe,KAAK,QAAQ,OAAO;AAGzC,iBAAW,cAAc,aAAa;AACpC,cAAM,iBACJ,aAAa,WAAW,aAAa,KAAK,GAAG,KAC7C,iBAAiB;AACnB,cAAM,eAAe,KAAK,SAAS,YAAY,YAAY;AAC3D,cAAM,kBAAkB,aAAa,WAAW,IAAI;AAEpD,YAAI,kBAAkB,CAAC,iBAAiB;AACtC,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,IACT,CAAC;AAED,QAAI,kBAAkB;AACpB,oBAAc;AAAA,IAChB;AAAA,EACF;AAEA,QAAM,YAAY,UAAU;AAAA,IAC1B,aAAa,KAAK,cAAc,qBAAqB;AAAA,IACrD,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,WAAW;AAAA,EACb,CAAC;AACH;AAAA;AA2BO,SAAS,eACd,UACA,SACA;AACA,QAAM,MAAoB,QAAQ,gBAAgB;AAClD,QAAM,EAAE,WAAW,IAAI;AACvB,QAAM,OAAO,EAAE,WAAW,MAAM,GAAG,QAAQ;AAC3C,QAAM,eAAW,uBAAQ,QAAQ,IAC7B,SAAS,IAAI,4BAAgB,IAC7B,KAAC,8BAAiB,QAAQ,CAAC;AAG/B,MAAI,cAAc,KAAK,UAAU;AACjC,MAAI,CAAC,eAAe,SAAS,SAAS,GAAG;AACvC,UAAM,OAAO,wBAAQ;AACrB,UAAM,cAAc,sBAAsB;AAG1C,UAAM,mBAAmB,SAAS,MAAM,aAAW;AACjD,YAAM,eAAe,KAAK,QAAQ,OAAO;AAGzC,iBAAW,cAAc,aAAa;AACpC,cAAM,iBACJ,aAAa,WAAW,aAAa,KAAK,GAAG,KAC7C,iBAAiB;AACnB,cAAM,eAAe,KAAK,SAAS,YAAY,YAAY;AAC3D,cAAM,kBAAkB,aAAa,WAAW,IAAI;AAEpD,YAAI,kBAAkB,CAAC,iBAAiB;AACtC,iBAAO;AAAA,QACT;AAAA,MACF;AAEA,aAAO;AAAA,IACT,CAAC;AAED,QAAI,kBAAkB;AACpB,oBAAc;AAAA,IAChB;AAAA,EACF;AAEA,aAAW,UAAU;AAAA,IACnB,aAAa,KAAK,cAAc,qBAAqB;AAAA,IACrD,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,WAAW;AAAA,EACb,CAAC;AACH;AAAA;AAiCA,eAAsB,UACpB,MACA,SACe;AACf,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,EAAE,WAAW,MAAM,WAAW,MAAM,GAAG,QAAQ;AAC5D,MAAI;AACF,UAAM,GAAG,SAAS,MAAM,MAAM,IAAI;AAAA,EACpC,SAAS,GAAY;AAEnB,QACE,OAAO,MAAM,YACb,MAAM,QACN,UAAU,KACV,EAAE,SAAS,UACX;AACA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAAA;AAgCO,SAAS,cACd,MACA,SACM;AACN,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,EAAE,WAAW,MAAM,WAAW,MAAM,GAAG,QAAQ;AAC5D,MAAI;AACF,OAAG,UAAU,MAAM,IAAI;AAAA,EACzB,SAAS,GAAY;AAEnB,QACE,OAAO,MAAM,YACb,MAAM,QACN,UAAU,KACV,EAAE,SAAS,UACX;AACA,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAAA;AAwBA,eAAsB,aACpB,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,MAAM,GAAG,SAAS,SAAS,UAAU;AAAA,MAC1C,QAAQ;AAAA,MACR,GAAG;AAAA,IACL,CAAc;AAAA,EAChB,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAwBO,SAAS,iBACd,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,GAAG,aAAa,UAAU;AAAA,MAC/B,WAAW;AAAA,MACX,GAAG;AAAA,IACL,CAA0B;AAAA,EAC5B,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAqBA,eAAsB,UAAU,UAAoB;AAClD,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,MAAM,GAAG,SAAS,KAAK,QAAQ;AAAA,EACxC,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAsBO,SAAS,cACd,UACA,SACA;AACA,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,KAAK,sBAAM;AACjB,MAAI;AACF,WAAO,GAAG,SAAS,UAAU;AAAA,MAC3B,WAAW;AAAA,MACX,gBAAgB;AAAA,MAChB,GAAG;AAAA,IACL,CAAoB;AAAA,EACtB,QAAQ;AAAA,EAAC;AACT,SAAO;AACT;AAAA;AAuBO,SAAS,WAAW,UAA4B;AACrD,QAAM,KAAK,sBAAM;AACjB,QAAM,OAAO,wBAAQ;AACrB,QAAM,cAAc,OAAO,QAAQ;AAGnC,MAAI,CAAC,GAAG,WAAW,WAAW,GAAG;AAC/B,eAAO,2BAAc,WAAW;AAAA,EAClC;AAEA,QAAM,UAAU,KAAK,QAAQ,WAAW;AACxC,QAAM,MAAM,KAAK,QAAQ,WAAW;AACpC,QAAM,WAAW,KAAK,SAAS,aAAa,GAAG;AAE/C,MAAI,UAAU;AACd,MAAI;AACJ,KAAG;AACD,iBAAa,KAAK,KAAK,SAAS,GAAG,QAAQ,IAAI,OAAO,GAAG,GAAG,EAAE;AAC9D;AAAA,EACF,SAAS,GAAG,WAAW,UAAU;AAEjC,aAAO,2BAAc,UAAU;AACjC;AAAA;AA+BA,eAAsB,UACpB,UACA,aACA,SACe;AACf,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,EAAE,KAAK,UAAU,UAAU,QAAQ,GAAG,UAAU,IAAI;AAAA,IACxD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,OAAO;AAAA,IACP,aAAa,SAAY,WAAW;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACA,QAAM,GAAG,SAAS,UAAU,UAAU,YAAY;AAAA,IAChD,UAAU;AAAA,IACV,GAAG;AAAA,IACH,WAAW;AAAA,EACb,CAA0B;AAC5B;AAAA;AA2BO,SAAS,cACd,UACA,aACA,SACM;AACN,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,UAAU,QAAQ,IAAI;AACnE,QAAM,EAAE,KAAK,UAAU,UAAU,QAAQ,GAAG,UAAU,IAAI;AAAA,IACxD,WAAW;AAAA,IACX,GAAG;AAAA,EACL;AACA,QAAM,KAAK,sBAAM;AACjB,QAAM,aAAa;AAAA,IACjB;AAAA,IACA,OAAO;AAAA,IACP,aAAa,SAAY,WAAW;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AACA,KAAG,cAAc,UAAU,YAAY;AAAA,IACrC,UAAU;AAAA,IACV,GAAG;AAAA,IACH,WAAW;AAAA,EACb,CAAqB;AACvB;",
6
6
  "names": []
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@socketsecurity/lib",
3
- "version": "3.0.6",
3
+ "version": "3.1.0",
4
4
  "license": "MIT",
5
5
  "description": "Core utilities and infrastructure for Socket.dev security tools",
6
6
  "keywords": [