@socketsecurity/lib 2.10.3 → 2.10.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/fs.js.map CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/fs.ts"],
4
- "sourcesContent": ["/**\n * @fileoverview File system utilities with cross-platform path handling.\n * Provides enhanced fs operations, glob matching, and directory traversal functions.\n */\n\nimport type { Abortable } from 'node:events'\nimport type {\n Dirent,\n ObjectEncodingOptions,\n OpenMode,\n PathLike,\n StatSyncOptions,\n WriteFileOptions,\n} from 'node:fs'\n\nimport { getAbortSignal } from '#constants/process'\n\nimport { isArray } from './arrays'\n\nconst abortSignal = getAbortSignal()\n\nimport { defaultIgnore, getGlobMatcher } from './globs'\nimport type { JsonReviver } from './json'\nimport { jsonParse } from './json'\nimport { objectFreeze, type Remap } from './objects'\nimport { normalizePath, pathLikeToString } from './path'\nimport { registerCacheInvalidation } from './paths/rewire'\nimport { naturalCompare } from './sorts'\n\n/**\n * Supported text encodings for Node.js Buffers.\n * Includes ASCII, UTF-8/16, base64, binary, and hexadecimal encodings.\n */\nexport type BufferEncoding =\n | 'ascii'\n | 'utf8'\n | 'utf-8'\n | 'utf16le'\n | 'ucs2'\n | 'ucs-2'\n | 'base64'\n | 'base64url'\n | 'latin1'\n | 'binary'\n | 'hex'\n\n/**\n * Represents any valid JSON content type.\n */\nexport type JsonContent = unknown\n\n/**\n * Options for asynchronous `findUp` operations.\n */\nexport interface FindUpOptions {\n /**\n * Starting directory for the search.\n * @default process.cwd()\n */\n cwd?: string | undefined\n /**\n * Only match directories, not files.\n * @default false\n */\n onlyDirectories?: boolean | undefined\n /**\n * Only match files, not directories.\n * @default true\n */\n onlyFiles?: boolean | undefined\n /**\n * Abort signal to cancel the search operation.\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Options for synchronous `findUpSync` operations.\n */\nexport interface FindUpSyncOptions {\n /**\n * Starting directory for the search.\n * @default process.cwd()\n */\n cwd?: string | undefined\n /**\n * Directory to stop searching at (inclusive).\n * When provided, search will stop at this directory even if the root hasn't been reached.\n */\n stopAt?: string | undefined\n /**\n * Only match directories, not files.\n * @default false\n */\n onlyDirectories?: boolean | undefined\n /**\n * Only match files, not directories.\n * @default true\n */\n onlyFiles?: boolean | undefined\n}\n\n/**\n * Options for checking if a directory is empty.\n */\nexport interface IsDirEmptyOptions {\n /**\n * Glob patterns for files to ignore when checking emptiness.\n * Files matching these patterns are not counted.\n * @default defaultIgnore\n */\n ignore?: string[] | readonly string[] | undefined\n}\n\n/**\n * Options for read operations with abort support.\n */\nexport interface ReadOptions extends Abortable {\n /**\n * Character encoding to use for reading.\n * @default 'utf8'\n */\n encoding?: BufferEncoding | string | undefined\n /**\n * File system flag for reading behavior.\n * @default 'r'\n */\n flag?: string | undefined\n}\n\n/**\n * Options for reading directories with filtering and sorting.\n */\nexport interface ReadDirOptions {\n /**\n * Glob patterns for directories to ignore.\n * @default undefined\n */\n ignore?: string[] | readonly string[] | undefined\n /**\n * Include empty directories in results.\n * When `false`, empty directories are filtered out.\n * @default true\n */\n includeEmpty?: boolean | undefined\n /**\n * Sort directory names alphabetically using natural sort order.\n * @default true\n */\n sort?: boolean | undefined\n}\n\n/**\n * Options for reading files with encoding and abort support.\n * Can be either an options object, an encoding string, or null.\n */\nexport type ReadFileOptions =\n | Remap<\n ObjectEncodingOptions &\n Abortable & {\n flag?: OpenMode | undefined\n }\n >\n | BufferEncoding\n | null\n\n/**\n * Options for reading and parsing JSON files.\n */\nexport type ReadJsonOptions = Remap<\n ReadFileOptions & {\n /**\n * Whether to throw errors on parse failure.\n * When `false`, returns `undefined` on error instead of throwing.\n * @default true\n */\n throws?: boolean | undefined\n /**\n * JSON reviver function to transform parsed values.\n * Same as the second parameter to `JSON.parse()`.\n */\n reviver?: Parameters<typeof JSON.parse>[1] | undefined\n }\n>\n\n/**\n * Options for file/directory removal operations.\n */\nexport interface RemoveOptions {\n /**\n * Force deletion even outside normally safe directories.\n * When `false`, prevents deletion outside temp, cacache, and ~/.socket.\n * @default true for safe directories, false otherwise\n */\n force?: boolean | undefined\n /**\n * Maximum number of retry attempts on failure.\n * @default 3\n */\n maxRetries?: number | undefined\n /**\n * Recursively delete directories and contents.\n * @default true\n */\n recursive?: boolean | undefined\n /**\n * Delay in milliseconds between retry attempts.\n * @default 200\n */\n retryDelay?: number | undefined\n /**\n * Abort signal to cancel the operation.\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Options for safe read operations that don't throw on errors.\n */\nexport interface SafeReadOptions extends ReadOptions {\n /**\n * Default value to return on read failure.\n * If not provided, `undefined` is returned on error.\n */\n defaultValue?: unknown | undefined\n}\n\n/**\n * Options for write operations with encoding and mode control.\n */\nexport interface WriteOptions extends Abortable {\n /**\n * Character encoding for writing.\n * @default 'utf8'\n */\n encoding?: BufferEncoding | string | undefined\n /**\n * File mode (permissions) to set.\n * Uses standard Unix permission bits (e.g., 0o644).\n * @default 0o666 (read/write for all, respecting umask)\n */\n mode?: number | undefined\n /**\n * File system flag for write behavior.\n * @default 'w' (create or truncate)\n */\n flag?: string | undefined\n}\n\n/**\n * Options for writing JSON files with formatting control.\n */\nexport interface WriteJsonOptions extends WriteOptions {\n /**\n * End-of-line sequence to use.\n * @default '\\n'\n * @example\n * ```ts\n * // Windows-style line endings\n * writeJson('data.json', data, { EOL: '\\r\\n' })\n * ```\n */\n EOL?: string | undefined\n /**\n * Whether to add a final newline at end of file.\n * @default true\n */\n finalEOL?: boolean | undefined\n /**\n * JSON replacer function to transform values during stringification.\n * Same as the second parameter to `JSON.stringify()`.\n */\n replacer?: JsonReviver | undefined\n /**\n * Number of spaces for indentation, or string to use for indentation.\n * @default 2\n * @example\n * ```ts\n * // Use tabs instead of spaces\n * writeJson('data.json', data, { spaces: '\\t' })\n *\n * // Use 4 spaces for indentation\n * writeJson('data.json', data, { spaces: 4 })\n * ```\n */\n spaces?: number | string | undefined\n}\n\nconst defaultRemoveOptions = objectFreeze({\n __proto__: null,\n force: true,\n maxRetries: 3,\n recursive: true,\n retryDelay: 200,\n})\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js path module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\n/**\n * Process directory entries and filter for directories.\n * Filters entries to include only directories, optionally excluding empty ones.\n * Applies ignore patterns and natural sorting.\n *\n * @param dirents - Directory entries from readdir\n * @param dirname - Parent directory path\n * @param options - Filtering and sorting options\n * @returns Array of directory names, optionally sorted\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction innerReadDirNames(\n dirents: Dirent[],\n dirname: string | undefined,\n options?: ReadDirOptions | undefined,\n): string[] {\n const {\n ignore,\n includeEmpty = true,\n sort = true,\n } = { __proto__: null, ...options } as ReadDirOptions\n const path = getPath()\n const names = dirents\n .filter(\n (d: Dirent) =>\n d.isDirectory() &&\n (includeEmpty ||\n !isDirEmptySync(path.join(dirname || d.parentPath, d.name), {\n ignore,\n })),\n )\n .map((d: Dirent) => d.name)\n return sort ? names.sort(naturalCompare) : names\n}\n\n/**\n * Stringify JSON with custom formatting options.\n * Formats JSON with configurable line endings and indentation.\n *\n * @param json - Value to stringify\n * @param EOL - End-of-line sequence\n * @param finalEOL - Whether to add final newline\n * @param replacer - JSON replacer function\n * @param spaces - Indentation spaces or string\n * @returns Formatted JSON string\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction stringify(\n json: unknown,\n EOL: string,\n finalEOL: boolean,\n replacer: JsonReviver | undefined,\n spaces: number | string = 2,\n): string {\n const EOF = finalEOL ? EOL : ''\n const str = JSON.stringify(json, replacer, spaces)\n return `${str.replace(/\\n/g, EOL)}${EOF}`\n}\n\n/**\n * Find a file or directory by traversing up parent directories.\n * Searches from the starting directory upward to the filesystem root.\n * Useful for finding configuration files or project roots.\n *\n * @param name - Filename(s) to search for\n * @param options - Search options including cwd and type filters\n * @returns Normalized absolute path if found, undefined otherwise\n *\n * @example\n * ```ts\n * // Find package.json starting from current directory\n * const pkgPath = await findUp('package.json')\n *\n * // Find any of multiple config files\n * const configPath = await findUp(['.config.js', '.config.json'])\n *\n * // Find a directory instead of file\n * const nodeModules = await findUp('node_modules', { onlyDirectories: true })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function findUp(\n name: string | string[] | readonly string[],\n options?: FindUpOptions | undefined,\n): Promise<string | undefined> {\n const { cwd = process.cwd(), signal = abortSignal } = {\n __proto__: null,\n ...options,\n } as FindUpOptions\n let { onlyDirectories = false, onlyFiles = true } = {\n __proto__: null,\n ...options,\n } as FindUpOptions\n if (onlyDirectories) {\n onlyFiles = false\n }\n if (onlyFiles) {\n onlyDirectories = false\n }\n const fs = getFs()\n const path = getPath()\n let dir = path.resolve(cwd)\n const { root } = path.parse(dir)\n const names = isArray(name) ? name : [name as string]\n while (dir && dir !== root) {\n for (const n of names) {\n if (signal?.aborted) {\n return undefined\n }\n const thePath = path.join(dir, n)\n try {\n // eslint-disable-next-line no-await-in-loop\n const stats = await fs.promises.stat(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n dir = path.dirname(dir)\n }\n return undefined\n}\n\n/**\n * Synchronously find a file or directory by traversing up parent directories.\n * Searches from the starting directory upward to the filesystem root or `stopAt` directory.\n * Useful for finding configuration files or project roots in synchronous contexts.\n *\n * @param name - Filename(s) to search for\n * @param options - Search options including cwd, stopAt, and type filters\n * @returns Normalized absolute path if found, undefined otherwise\n *\n * @example\n * ```ts\n * // Find package.json starting from current directory\n * const pkgPath = findUpSync('package.json')\n *\n * // Find .git directory but stop at home directory\n * const gitPath = findUpSync('.git', {\n * onlyDirectories: true,\n * stopAt: process.env.HOME\n * })\n *\n * // Find any of multiple config files\n * const configPath = findUpSync(['.eslintrc.js', '.eslintrc.json'])\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function findUpSync(\n name: string | string[] | readonly string[],\n options?: FindUpSyncOptions | undefined,\n) {\n const { cwd = process.cwd(), stopAt } = {\n __proto__: null,\n ...options,\n } as FindUpSyncOptions\n let { onlyDirectories = false, onlyFiles = true } = {\n __proto__: null,\n ...options,\n } as FindUpSyncOptions\n if (onlyDirectories) {\n onlyFiles = false\n }\n if (onlyFiles) {\n onlyDirectories = false\n }\n const fs = getFs()\n const path = getPath()\n let dir = path.resolve(cwd)\n const { root } = path.parse(dir)\n const stopDir = stopAt ? path.resolve(stopAt) : undefined\n const names = isArray(name) ? name : [name as string]\n while (dir && dir !== root) {\n // Check if we should stop at this directory.\n if (stopDir && dir === stopDir) {\n // Check current directory but don't go up.\n for (const n of names) {\n const thePath = path.join(dir, n)\n try {\n const stats = fs.statSync(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n return undefined\n }\n for (const n of names) {\n const thePath = path.join(dir, n)\n try {\n const stats = fs.statSync(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n dir = path.dirname(dir)\n }\n return undefined\n}\n\n/**\n * Check if a path is a directory asynchronously.\n * Returns `true` for directories, `false` for files or non-existent paths.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a directory, `false` otherwise\n *\n * @example\n * ```ts\n * if (await isDir('./src')) {\n * console.log('src is a directory')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function isDir(filepath: PathLike) {\n return !!(await safeStats(filepath))?.isDirectory()\n}\n\n/**\n * Check if a path is a directory synchronously.\n * Returns `true` for directories, `false` for files or non-existent paths.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a directory, `false` otherwise\n *\n * @example\n * ```ts\n * if (isDirSync('./src')) {\n * console.log('src is a directory')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isDirSync(filepath: PathLike) {\n return !!safeStatsSync(filepath)?.isDirectory()\n}\n\n/**\n * Check if a directory is empty synchronously.\n * A directory is considered empty if it contains no files after applying ignore patterns.\n * Uses glob patterns to filter ignored files.\n *\n * @param dirname - Directory path to check\n * @param options - Options including ignore patterns\n * @returns `true` if directory is empty (or doesn't exist), `false` otherwise\n *\n * @example\n * ```ts\n * // Check if directory is completely empty\n * isDirEmptySync('./build')\n *\n * // Check if directory is empty, ignoring .DS_Store files\n * isDirEmptySync('./cache', { ignore: ['.DS_Store'] })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isDirEmptySync(\n dirname: PathLike,\n options?: IsDirEmptyOptions | undefined,\n) {\n const { ignore = defaultIgnore } = {\n __proto__: null,\n ...options,\n } as IsDirEmptyOptions\n const fs = getFs()\n try {\n const files = fs.readdirSync(dirname)\n const { length } = files\n if (length === 0) {\n return true\n }\n const matcher = getGlobMatcher(\n ignore as string[],\n {\n cwd: pathLikeToString(dirname),\n } as { cwd?: string; dot?: boolean; ignore?: string[]; nocase?: boolean },\n )\n let ignoredCount = 0\n for (let i = 0; i < length; i += 1) {\n const file = files[i]\n if (file && matcher(file)) {\n ignoredCount += 1\n }\n }\n return ignoredCount === length\n } catch {\n // Return false for non-existent paths or other errors.\n return false\n }\n}\n\n/**\n * Check if a path is a symbolic link synchronously.\n * Uses `lstat` to check the link itself, not the target.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a symbolic link, `false` otherwise\n *\n * @example\n * ```ts\n * if (isSymLinkSync('./my-link')) {\n * console.log('Path is a symbolic link')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isSymLinkSync(filepath: PathLike) {\n const fs = getFs()\n try {\n return fs.lstatSync(filepath).isSymbolicLink()\n } catch {}\n return false\n}\n\n/**\n * Result of file readability validation.\n * Contains lists of valid and invalid file paths.\n */\nexport interface ValidateFilesResult {\n /**\n * File paths that passed validation and are readable.\n */\n validPaths: string[]\n /**\n * File paths that failed validation (unreadable, permission denied, or non-existent).\n * Common with Yarn Berry PnP virtual filesystem, pnpm symlinks, or filesystem race conditions.\n */\n invalidPaths: string[]\n}\n\n/**\n * Validate that file paths are readable before processing.\n * Filters out files from glob results that cannot be accessed (common with\n * Yarn Berry PnP virtual filesystem, pnpm content-addressable store symlinks,\n * or filesystem race conditions in CI/CD environments).\n *\n * This defensive pattern prevents ENOENT errors when files exist in glob\n * results but are not accessible via standard filesystem operations.\n *\n * @param filepaths - Array of file paths to validate\n * @returns Object with `validPaths` (readable) and `invalidPaths` (unreadable)\n *\n * @example\n * ```ts\n * import { validateFiles } from '@socketsecurity/lib/fs'\n *\n * const files = ['package.json', '.pnp.cjs/virtual-file.json']\n * const { validPaths, invalidPaths } = validateFiles(files)\n *\n * console.log(`Valid: ${validPaths.length}`)\n * console.log(`Invalid: ${invalidPaths.length}`)\n * ```\n *\n * @example\n * ```ts\n * // Typical usage in Socket CLI commands\n * const packagePaths = await getPackageFilesForScan(targets)\n * const { validPaths } = validateFiles(packagePaths)\n * await sdk.uploadManifestFiles(orgSlug, validPaths)\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function validateFiles(\n filepaths: string[] | readonly string[],\n): ValidateFilesResult {\n const fs = getFs()\n const validPaths: string[] = []\n const invalidPaths: string[] = []\n const { R_OK } = fs.constants\n\n for (const filepath of filepaths) {\n try {\n fs.accessSync(filepath, R_OK)\n validPaths.push(filepath)\n } catch {\n invalidPaths.push(filepath)\n }\n }\n\n return { __proto__: null, validPaths, invalidPaths } as ValidateFilesResult\n}\n\n/**\n * Read directory names asynchronously with filtering and sorting.\n * Returns only directory names (not files), with optional filtering for empty directories\n * and glob-based ignore patterns. Results are naturally sorted by default.\n *\n * @param dirname - Directory path to read\n * @param options - Options for filtering and sorting\n * @returns Array of directory names, empty array on error\n *\n * @example\n * ```ts\n * // Get all subdirectories, sorted naturally\n * const dirs = await readDirNames('./packages')\n *\n * // Get non-empty directories only\n * const nonEmpty = await readDirNames('./cache', { includeEmpty: false })\n *\n * // Get directories without sorting\n * const unsorted = await readDirNames('./src', { sort: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readDirNames(\n dirname: PathLike,\n options?: ReadDirOptions | undefined,\n) {\n const fs = getFs()\n try {\n return innerReadDirNames(\n await fs.promises.readdir(dirname, {\n __proto__: null,\n encoding: 'utf8',\n withFileTypes: true,\n } as ObjectEncodingOptions & { withFileTypes: true }),\n String(dirname),\n options,\n )\n } catch {}\n return []\n}\n\n/**\n * Read directory names synchronously with filtering and sorting.\n * Returns only directory names (not files), with optional filtering for empty directories\n * and glob-based ignore patterns. Results are naturally sorted by default.\n *\n * @param dirname - Directory path to read\n * @param options - Options for filtering and sorting\n * @returns Array of directory names, empty array on error\n *\n * @example\n * ```ts\n * // Get all subdirectories, sorted naturally\n * const dirs = readDirNamesSync('./packages')\n *\n * // Get non-empty directories only, ignoring node_modules\n * const nonEmpty = readDirNamesSync('./src', {\n * includeEmpty: false,\n * ignore: ['node_modules']\n * })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readDirNamesSync(dirname: PathLike, options?: ReadDirOptions) {\n const fs = getFs()\n try {\n return innerReadDirNames(\n fs.readdirSync(dirname, {\n __proto__: null,\n encoding: 'utf8',\n withFileTypes: true,\n } as ObjectEncodingOptions & { withFileTypes: true }),\n String(dirname),\n options,\n )\n } catch {}\n return []\n}\n\n/**\n * Read a file as binary data asynchronously.\n * Returns a Buffer without encoding the contents.\n * Useful for reading images, archives, or other binary formats.\n *\n * @param filepath - Path to file\n * @param options - Read options (encoding is forced to null for binary)\n * @returns Promise resolving to Buffer containing file contents\n *\n * @example\n * ```ts\n * // Read an image file\n * const imageBuffer = await readFileBinary('./image.png')\n *\n * // Read with abort signal\n * const buffer = await readFileBinary('./data.bin', { signal: abortSignal })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readFileBinary(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n // Don't specify encoding to get a Buffer.\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n encoding: null,\n })\n}\n\n/**\n * Read a file as UTF-8 text asynchronously.\n * Returns a string with the file contents decoded as UTF-8.\n * This is the most common way to read text files.\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and abort signal\n * @returns Promise resolving to string containing file contents\n *\n * @example\n * ```ts\n * // Read a text file\n * const content = await readFileUtf8('./README.md')\n *\n * // Read with custom encoding\n * const content = await readFileUtf8('./data.txt', { encoding: 'utf-8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readFileUtf8(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n encoding: 'utf8',\n })\n}\n\n/**\n * Read a file as binary data synchronously.\n * Returns a Buffer without encoding the contents.\n * Useful for reading images, archives, or other binary formats.\n *\n * @param filepath - Path to file\n * @param options - Read options (encoding is forced to null for binary)\n * @returns Buffer containing file contents\n *\n * @example\n * ```ts\n * // Read an image file\n * const imageBuffer = readFileBinarySync('./logo.png')\n *\n * // Read a compressed file\n * const gzipData = readFileBinarySync('./archive.gz')\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readFileBinarySync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n // Don't specify encoding to get a Buffer\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return fs.readFileSync(filepath, {\n ...opts,\n encoding: null,\n } as ObjectEncodingOptions)\n}\n\n/**\n * Read a file as UTF-8 text synchronously.\n * Returns a string with the file contents decoded as UTF-8.\n * This is the most common way to read text files synchronously.\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding\n * @returns String containing file contents\n *\n * @example\n * ```ts\n * // Read a configuration file\n * const config = readFileUtf8Sync('./config.txt')\n *\n * // Read with custom options\n * const data = readFileUtf8Sync('./data.txt', { encoding: 'utf8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readFileUtf8Sync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return fs.readFileSync(filepath, {\n ...opts,\n encoding: 'utf8',\n } as ObjectEncodingOptions)\n}\n\n/**\n * Read and parse a JSON file asynchronously.\n * Reads the file as UTF-8 text and parses it as JSON.\n * Optionally accepts a reviver function to transform parsed values.\n *\n * @param filepath - Path to JSON file\n * @param options - Read and parse options\n * @returns Promise resolving to parsed JSON value, or undefined if throws is false and an error occurs\n *\n * @example\n * ```ts\n * // Read and parse package.json\n * const pkg = await readJson('./package.json')\n *\n * // Read JSON with custom reviver\n * const data = await readJson('./data.json', {\n * reviver: (key, value) => {\n * if (key === 'date') return new Date(value)\n * return value\n * }\n * })\n *\n * // Don't throw on parse errors\n * const config = await readJson('./config.json', { throws: false })\n * if (config === undefined) {\n * console.log('Failed to parse config')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readJson(\n filepath: PathLike,\n options?: ReadJsonOptions | string | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { reviver, throws, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as unknown as ReadJsonOptions\n const shouldThrow = throws === undefined || !!throws\n const fs = getFs()\n let content = ''\n try {\n content = await fs.promises.readFile(filepath, {\n __proto__: null,\n encoding: 'utf8',\n ...fsOptions,\n } as unknown as Parameters<typeof fs.promises.readFile>[1] & {\n encoding: string\n })\n } catch (e) {\n if (shouldThrow) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'ENOENT') {\n throw new Error(\n `JSON file not found: ${filepath}\\n` +\n 'Ensure the file exists or create it with the expected structure.',\n { cause: e },\n )\n }\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied reading JSON file: ${filepath}\\n` +\n 'Check file permissions or run with appropriate access.',\n { cause: e },\n )\n }\n throw e\n }\n return undefined\n }\n return jsonParse(content, {\n filepath: String(filepath),\n reviver,\n throws: shouldThrow,\n })\n}\n\n/**\n * Read and parse a JSON file synchronously.\n * Reads the file as UTF-8 text and parses it as JSON.\n * Optionally accepts a reviver function to transform parsed values.\n *\n * @param filepath - Path to JSON file\n * @param options - Read and parse options\n * @returns Parsed JSON value, or undefined if throws is false and an error occurs\n *\n * @example\n * ```ts\n * // Read and parse tsconfig.json\n * const tsconfig = readJsonSync('./tsconfig.json')\n *\n * // Read JSON with custom reviver\n * const data = readJsonSync('./data.json', {\n * reviver: (key, value) => {\n * if (typeof value === 'string' && /^\\d{4}-\\d{2}-\\d{2}/.test(value)) {\n * return new Date(value)\n * }\n * return value\n * }\n * })\n *\n * // Don't throw on parse errors\n * const config = readJsonSync('./config.json', { throws: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readJsonSync(\n filepath: PathLike,\n options?: ReadJsonOptions | string | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { reviver, throws, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as unknown as ReadJsonOptions\n const shouldThrow = throws === undefined || !!throws\n const fs = getFs()\n let content = ''\n try {\n content = fs.readFileSync(filepath, {\n __proto__: null,\n encoding: 'utf8',\n ...fsOptions,\n } as unknown as Parameters<typeof fs.readFileSync>[1] & {\n encoding: string\n })\n } catch (e) {\n if (shouldThrow) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'ENOENT') {\n throw new Error(\n `JSON file not found: ${filepath}\\n` +\n 'Ensure the file exists or create it with the expected structure.',\n { cause: e },\n )\n }\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied reading JSON file: ${filepath}\\n` +\n 'Check file permissions or run with appropriate access.',\n { cause: e },\n )\n }\n throw e\n }\n return undefined\n }\n return jsonParse(content, {\n filepath: String(filepath),\n reviver,\n throws: shouldThrow,\n })\n}\n\n// Cache for resolved allowed directories\nlet _cachedAllowedDirs: string[] | undefined\n\n/**\n * Get resolved allowed directories for safe deletion with lazy caching.\n * These directories are resolved once and cached for the process lifetime.\n */\nfunction getAllowedDirectories(): string[] {\n if (_cachedAllowedDirs === undefined) {\n const path = getPath()\n const {\n getOsTmpDir,\n getSocketCacacheDir,\n getSocketUserDir,\n } = /*@__PURE__*/ require('#lib/paths')\n\n _cachedAllowedDirs = [\n path.resolve(getOsTmpDir()),\n path.resolve(getSocketCacacheDir()),\n path.resolve(getSocketUserDir()),\n ]\n }\n return _cachedAllowedDirs\n}\n\n/**\n * Invalidate the cached allowed directories.\n * Called automatically by the paths/rewire module when paths are overridden in tests.\n *\n * @internal Used for test rewiring\n */\nexport function invalidatePathCache(): void {\n _cachedAllowedDirs = undefined\n}\n\n// Register cache invalidation with the rewire module\nregisterCacheInvalidation(invalidatePathCache)\n\n/**\n * Safely delete a file or directory asynchronously with built-in protections.\n * Uses `del` for safer deletion that prevents removing cwd and above by default.\n * Automatically uses force: true for temp directory, cacache, and ~/.socket subdirectories.\n *\n * @param filepath - Path or array of paths to delete (supports glob patterns)\n * @param options - Deletion options including force, retries, and recursion\n * @throws {Error} When attempting to delete protected paths without force option\n *\n * @example\n * ```ts\n * // Delete a single file\n * await safeDelete('./temp-file.txt')\n *\n * // Delete a directory recursively\n * await safeDelete('./build', { recursive: true })\n *\n * // Delete multiple paths\n * await safeDelete(['./dist', './coverage'])\n *\n * // Delete with custom retry settings\n * await safeDelete('./flaky-dir', { maxRetries: 5, retryDelay: 500 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeDelete(\n filepath: PathLike | PathLike[],\n options?: RemoveOptions | undefined,\n) {\n const del = /*@__PURE__*/ require('./external/del')\n const { deleteAsync } = del\n const opts = { __proto__: null, ...options } as RemoveOptions\n const patterns = isArray(filepath)\n ? filepath.map(pathLikeToString)\n : [pathLikeToString(filepath)]\n\n // Check if we're deleting within allowed directories.\n let shouldForce = opts.force !== false\n if (!shouldForce && patterns.length > 0) {\n const path = getPath()\n const allowedDirs = getAllowedDirectories()\n\n // Check if all patterns are within allowed directories.\n const allInAllowedDirs = patterns.every(pattern => {\n const resolvedPath = path.resolve(pattern)\n\n // Check each allowed directory\n for (const allowedDir of allowedDirs) {\n const isInAllowedDir =\n resolvedPath.startsWith(allowedDir + path.sep) ||\n resolvedPath === allowedDir\n const relativePath = path.relative(allowedDir, resolvedPath)\n const isGoingBackward = relativePath.startsWith('..')\n\n if (isInAllowedDir && !isGoingBackward) {\n return true\n }\n }\n\n return false\n })\n\n if (allInAllowedDirs) {\n shouldForce = true\n }\n }\n\n await deleteAsync(patterns, {\n concurrency: opts.maxRetries || defaultRemoveOptions.maxRetries,\n dryRun: false,\n force: shouldForce,\n onlyFiles: false,\n })\n}\n\n/**\n * Safely delete a file or directory synchronously with built-in protections.\n * Uses `del` for safer deletion that prevents removing cwd and above by default.\n * Automatically uses force: true for temp directory, cacache, and ~/.socket subdirectories.\n *\n * @param filepath - Path or array of paths to delete (supports glob patterns)\n * @param options - Deletion options including force, retries, and recursion\n * @throws {Error} When attempting to delete protected paths without force option\n *\n * @example\n * ```ts\n * // Delete a single file\n * safeDeleteSync('./temp-file.txt')\n *\n * // Delete a directory recursively\n * safeDeleteSync('./build', { recursive: true })\n *\n * // Delete multiple paths with globs\n * safeDeleteSync(['./dist/**', './coverage/**'])\n *\n * // Force delete a protected path (use with caution)\n * safeDeleteSync('./important', { force: true })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeDeleteSync(\n filepath: PathLike | PathLike[],\n options?: RemoveOptions | undefined,\n) {\n const del = /*@__PURE__*/ require('./external/del')\n const { deleteSync } = del\n const opts = { __proto__: null, ...options } as RemoveOptions\n const patterns = isArray(filepath)\n ? filepath.map(pathLikeToString)\n : [pathLikeToString(filepath)]\n\n // Check if we're deleting within allowed directories.\n let shouldForce = opts.force !== false\n if (!shouldForce && patterns.length > 0) {\n const path = getPath()\n const allowedDirs = getAllowedDirectories()\n\n // Check if all patterns are within allowed directories.\n const allInAllowedDirs = patterns.every(pattern => {\n const resolvedPath = path.resolve(pattern)\n\n // Check each allowed directory\n for (const allowedDir of allowedDirs) {\n const isInAllowedDir =\n resolvedPath.startsWith(allowedDir + path.sep) ||\n resolvedPath === allowedDir\n const relativePath = path.relative(allowedDir, resolvedPath)\n const isGoingBackward = relativePath.startsWith('..')\n\n if (isInAllowedDir && !isGoingBackward) {\n return true\n }\n }\n\n return false\n })\n\n if (allInAllowedDirs) {\n shouldForce = true\n }\n }\n\n deleteSync(patterns, {\n concurrency: opts.maxRetries || defaultRemoveOptions.maxRetries,\n dryRun: false,\n force: shouldForce,\n onlyFiles: false,\n })\n}\n\n/**\n * Safely read a file asynchronously, returning undefined on error.\n * Useful when you want to attempt reading a file without handling errors explicitly.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and default value\n * @returns Promise resolving to file contents, or undefined on error\n *\n * @example\n * ```ts\n * // Try to read a file, get undefined if it doesn't exist\n * const content = await safeReadFile('./optional-config.txt')\n * if (content) {\n * console.log('Config found:', content)\n * }\n *\n * // Read with specific encoding\n * const data = await safeReadFile('./data.txt', { encoding: 'utf8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeReadFile(\n filepath: PathLike,\n options?: SafeReadOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n } as Abortable)\n } catch {}\n return undefined\n}\n\n/**\n * Safely get file stats asynchronously, returning undefined on error.\n * Useful for checking file existence and properties without error handling.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to check\n * @returns Promise resolving to Stats object, or undefined on error\n *\n * @example\n * ```ts\n * // Check if file exists and get its stats\n * const stats = await safeStats('./file.txt')\n * if (stats) {\n * console.log('File size:', stats.size)\n * console.log('Modified:', stats.mtime)\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeStats(filepath: PathLike) {\n const fs = getFs()\n try {\n return await fs.promises.stat(filepath)\n } catch {}\n return undefined\n}\n\n/**\n * Safely get file stats synchronously, returning undefined on error.\n * Useful for checking file existence and properties without error handling.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to check\n * @param options - Read options (currently unused but kept for API consistency)\n * @returns Stats object, or undefined on error\n *\n * @example\n * ```ts\n * // Check if file exists and get its size\n * const stats = safeStatsSync('./file.txt')\n * if (stats) {\n * console.log('File size:', stats.size)\n * console.log('Is directory:', stats.isDirectory())\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeStatsSync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return fs.statSync(filepath, {\n __proto__: null,\n throwIfNoEntry: false,\n ...opts,\n } as StatSyncOptions)\n } catch {}\n return undefined\n}\n\n/**\n * Safely read a file synchronously, returning undefined on error.\n * Useful when you want to attempt reading a file without handling errors explicitly.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and default value\n * @returns File contents, or undefined on error\n *\n * @example\n * ```ts\n * // Try to read a config file\n * const config = safeReadFileSync('./config.txt')\n * if (config) {\n * console.log('Config loaded successfully')\n * }\n *\n * // Read binary file safely\n * const buffer = safeReadFileSync('./image.png', { encoding: null })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeReadFileSync(\n filepath: PathLike,\n options?: SafeReadOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return fs.readFileSync(filepath, {\n __proto__: null,\n ...opts,\n } as ObjectEncodingOptions)\n } catch {}\n return undefined\n}\n\n/**\n * Generate a unique filepath by adding number suffix if the path exists.\n * Appends `-1`, `-2`, etc. before the file extension until a non-existent path is found.\n * Useful for creating files without overwriting existing ones.\n *\n * @param filepath - Desired file path\n * @returns Normalized unique filepath (original if it doesn't exist, or with number suffix)\n *\n * @example\n * ```ts\n * // If 'report.pdf' exists, returns 'report-1.pdf'\n * const uniquePath = uniqueSync('./report.pdf')\n *\n * // If 'data.json' and 'data-1.json' exist, returns 'data-2.json'\n * const path = uniqueSync('./data.json')\n *\n * // If 'backup' doesn't exist, returns 'backup' unchanged\n * const backupPath = uniqueSync('./backup')\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function uniqueSync(filepath: PathLike): string {\n const fs = getFs()\n const path = getPath()\n const filepathStr = String(filepath)\n\n // If the file doesn't exist, return as is\n if (!fs.existsSync(filepathStr)) {\n return normalizePath(filepathStr)\n }\n\n const dirname = path.dirname(filepathStr)\n const ext = path.extname(filepathStr)\n const basename = path.basename(filepathStr, ext)\n\n let counter = 1\n let uniquePath: string\n do {\n uniquePath = path.join(dirname, `${basename}-${counter}${ext}`)\n counter++\n } while (fs.existsSync(uniquePath))\n\n return normalizePath(uniquePath)\n}\n\n/**\n * Write JSON content to a file asynchronously with formatting.\n * Stringifies the value with configurable indentation and line endings.\n * Automatically adds a final newline by default for POSIX compliance.\n *\n * @param filepath - Path to write to\n * @param jsonContent - Value to stringify and write\n * @param options - Write options including formatting and encoding\n * @returns Promise that resolves when write completes\n *\n * @example\n * ```ts\n * // Write formatted JSON with default 2-space indentation\n * await writeJson('./data.json', { name: 'example', version: '1.0.0' })\n *\n * // Write with custom indentation\n * await writeJson('./config.json', config, { spaces: 4 })\n *\n * // Write with tabs instead of spaces\n * await writeJson('./data.json', data, { spaces: '\\t' })\n *\n * // Write without final newline\n * await writeJson('./inline.json', obj, { finalEOL: false })\n *\n * // Write with Windows line endings\n * await writeJson('./win.json', data, { EOL: '\\r\\n' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function writeJson(\n filepath: PathLike,\n jsonContent: unknown,\n options?: WriteJsonOptions | string,\n): Promise<void> {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { EOL, finalEOL, replacer, spaces, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as WriteJsonOptions\n const fs = getFs()\n const jsonString = stringify(\n jsonContent,\n EOL || '\\n',\n finalEOL !== undefined ? finalEOL : true,\n replacer,\n spaces,\n )\n await fs.promises.writeFile(filepath, jsonString, {\n encoding: 'utf8',\n ...fsOptions,\n __proto__: null,\n } as ObjectEncodingOptions)\n}\n\n/**\n * Write JSON content to a file synchronously with formatting.\n * Stringifies the value with configurable indentation and line endings.\n * Automatically adds a final newline by default for POSIX compliance.\n *\n * @param filepath - Path to write to\n * @param jsonContent - Value to stringify and write\n * @param options - Write options including formatting and encoding\n *\n * @example\n * ```ts\n * // Write formatted JSON with default 2-space indentation\n * writeJsonSync('./package.json', pkg)\n *\n * // Write with custom indentation\n * writeJsonSync('./tsconfig.json', tsconfig, { spaces: 4 })\n *\n * // Write with tabs for indentation\n * writeJsonSync('./data.json', data, { spaces: '\\t' })\n *\n * // Write compacted (no indentation)\n * writeJsonSync('./compact.json', data, { spaces: 0 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function writeJsonSync(\n filepath: PathLike,\n jsonContent: unknown,\n options?: WriteJsonOptions | string | undefined,\n): void {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { EOL, finalEOL, replacer, spaces, ...fsOptions } = {\n __proto__: null,\n ...opts,\n }\n const fs = getFs()\n const jsonString = stringify(\n jsonContent,\n EOL || '\\n',\n finalEOL !== undefined ? finalEOL : true,\n replacer,\n spaces,\n )\n fs.writeFileSync(filepath, jsonString, {\n encoding: 'utf8',\n ...fsOptions,\n __proto__: null,\n } as WriteFileOptions)\n}\n"],
5
- "mappings": ";4ZAAA,IAAAA,GAAA,GAAAC,EAAAD,GAAA,YAAAE,EAAA,eAAAC,EAAA,wBAAAC,EAAA,UAAAC,EAAA,mBAAAC,EAAA,cAAAC,EAAA,kBAAAC,EAAA,iBAAAC,EAAA,qBAAAC,EAAA,mBAAAC,EAAA,uBAAAC,GAAA,iBAAAC,GAAA,qBAAAC,GAAA,aAAAC,GAAA,iBAAAC,GAAA,eAAAC,GAAA,mBAAAC,GAAA,iBAAAC,GAAA,qBAAAC,GAAA,cAAAC,EAAA,kBAAAC,EAAA,eAAAC,GAAA,kBAAAC,EAAA,cAAAC,GAAA,kBAAAC,KAAA,eAAAC,EAAA3B,IAeA,IAAA4B,EAA+B,8BAE/BC,EAAwB,oBAIxBC,EAA8C,mBAE9CC,EAA0B,kBAC1BC,EAAyC,qBACzCC,EAAgD,kBAChDC,EAA0C,0BAC1CC,EAA+B,mBAR/B,MAAMC,KAAc,kBAAe,EA6Q7BC,KAAuB,gBAAa,CACxC,UAAW,KACX,MAAO,GACP,WAAY,EACZ,UAAW,GACX,WAAY,GACd,CAAC,EAED,IAAIC,EASJ,SAASC,GAAQ,CACf,OAAID,IAAQ,SAGVA,EAAoB,QAAQ,SAAS,GAEhCA,CACT,CAEA,IAAIE,EASJ,SAASC,GAAU,CACjB,OAAID,IAAU,SAGZA,EAAsB,QAAQ,WAAW,GAEpCA,CACT,CAcA,SAASE,EACPC,EACAC,EACAC,EACU,CACV,KAAM,CACJ,OAAAC,EACA,aAAAC,EAAe,GACf,KAAAC,EAAO,EACT,EAAI,CAAE,UAAW,KAAM,GAAGH,CAAQ,EAC5BI,EAAOR,EAAQ,EACfS,EAAQP,EACX,OACEQ,GACCA,EAAE,YAAY,IACbJ,GACC,CAACzC,EAAe2C,EAAK,KAAKL,GAAWO,EAAE,WAAYA,EAAE,IAAI,EAAG,CAC1D,OAAAL,CACF,CAAC,EACP,EACC,IAAKK,GAAcA,EAAE,IAAI,EAC5B,OAAOH,EAAOE,EAAM,KAAK,gBAAc,EAAIA,CAC7C,CAeA,SAASE,EACPC,EACAC,EACAC,EACAC,EACAC,EAA0B,EAClB,CACR,MAAMC,EAAMH,EAAWD,EAAM,GAE7B,MAAO,GADK,KAAK,UAAUD,EAAMG,EAAUC,CAAM,EACnC,QAAQ,MAAOH,CAAG,CAAC,GAAGI,CAAG,EACzC,CAwBA,eAAsBxD,EACpByD,EACAd,EAC6B,CAC7B,KAAM,CAAE,IAAAe,EAAM,QAAQ,IAAI,EAAG,OAAAC,EAASzB,CAAY,EAAI,CACpD,UAAW,KACX,GAAGS,CACL,EACA,GAAI,CAAE,gBAAAiB,EAAkB,GAAO,UAAAC,EAAY,EAAK,EAAI,CAClD,UAAW,KACX,GAAGlB,CACL,EACIiB,IACFC,EAAY,IAEVA,IACFD,EAAkB,IAEpB,MAAME,EAAKzB,EAAM,EACXU,EAAOR,EAAQ,EACrB,IAAIwB,EAAMhB,EAAK,QAAQW,CAAG,EAC1B,KAAM,CAAE,KAAAM,CAAK,EAAIjB,EAAK,MAAMgB,CAAG,EACzBf,KAAQ,WAAQS,CAAI,EAAIA,EAAO,CAACA,CAAc,EACpD,KAAOM,GAAOA,IAAQC,GAAM,CAC1B,UAAWC,KAAKjB,EAAO,CACrB,GAAIW,GAAQ,QACV,OAEF,MAAMO,EAAUnB,EAAK,KAAKgB,EAAKE,CAAC,EAChC,GAAI,CAEF,MAAME,EAAQ,MAAML,EAAG,SAAS,KAAKI,CAAO,EAC5C,GAAI,CAACN,GAAmBO,EAAM,OAAO,EACnC,SAAO,iBAAcD,CAAO,EAE9B,GAAI,CAACL,GAAaM,EAAM,YAAY,EAClC,SAAO,iBAAcD,CAAO,CAEhC,MAAQ,CAAC,CACX,CACAH,EAAMhB,EAAK,QAAQgB,CAAG,CACxB,CAEF,CA2BO,SAAS9D,EACdwD,EACAd,EACA,CACA,KAAM,CAAE,IAAAe,EAAM,QAAQ,IAAI,EAAG,OAAAU,CAAO,EAAI,CACtC,UAAW,KACX,GAAGzB,CACL,EACA,GAAI,CAAE,gBAAAiB,EAAkB,GAAO,UAAAC,EAAY,EAAK,EAAI,CAClD,UAAW,KACX,GAAGlB,CACL,EACIiB,IACFC,EAAY,IAEVA,IACFD,EAAkB,IAEpB,MAAME,EAAKzB,EAAM,EACXU,EAAOR,EAAQ,EACrB,IAAIwB,EAAMhB,EAAK,QAAQW,CAAG,EAC1B,KAAM,CAAE,KAAAM,CAAK,EAAIjB,EAAK,MAAMgB,CAAG,EACzBM,EAAUD,EAASrB,EAAK,QAAQqB,CAAM,EAAI,OAC1CpB,KAAQ,WAAQS,CAAI,EAAIA,EAAO,CAACA,CAAc,EACpD,KAAOM,GAAOA,IAAQC,GAAM,CAE1B,GAAIK,GAAWN,IAAQM,EAAS,CAE9B,UAAWJ,KAAKjB,EAAO,CACrB,MAAMkB,EAAUnB,EAAK,KAAKgB,EAAKE,CAAC,EAChC,GAAI,CACF,MAAME,EAAQL,EAAG,SAASI,CAAO,EACjC,GAAI,CAACN,GAAmBO,EAAM,OAAO,EACnC,SAAO,iBAAcD,CAAO,EAE9B,GAAI,CAACL,GAAaM,EAAM,YAAY,EAClC,SAAO,iBAAcD,CAAO,CAEhC,MAAQ,CAAC,CACX,CACA,MACF,CACA,UAAWD,KAAKjB,EAAO,CACrB,MAAMkB,EAAUnB,EAAK,KAAKgB,EAAKE,CAAC,EAChC,GAAI,CACF,MAAME,EAAQL,EAAG,SAASI,CAAO,EACjC,GAAI,CAACN,GAAmBO,EAAM,OAAO,EACnC,SAAO,iBAAcD,CAAO,EAE9B,GAAI,CAACL,GAAaM,EAAM,YAAY,EAClC,SAAO,iBAAcD,CAAO,CAEhC,MAAQ,CAAC,CACX,CACAH,EAAMhB,EAAK,QAAQgB,CAAG,CACxB,CAEF,CAiBA,eAAsB5D,EAAMmE,EAAoB,CAC9C,MAAO,CAAC,EAAE,MAAMnD,EAAUmD,CAAQ,IAAI,YAAY,CACpD,CAiBO,SAASjE,EAAUiE,EAAoB,CAC5C,MAAO,CAAC,CAAClD,EAAckD,CAAQ,GAAG,YAAY,CAChD,CAqBO,SAASlE,EACdsC,EACAC,EACA,CACA,KAAM,CAAE,OAAAC,EAAS,eAAc,EAAI,CACjC,UAAW,KACX,GAAGD,CACL,EACMmB,EAAKzB,EAAM,EACjB,GAAI,CACF,MAAMkC,EAAQT,EAAG,YAAYpB,CAAO,EAC9B,CAAE,OAAA8B,CAAO,EAAID,EACnB,GAAIC,IAAW,EACb,MAAO,GAET,MAAMC,KAAU,kBACd7B,EACA,CACE,OAAK,oBAAiBF,CAAO,CAC/B,CACF,EACA,IAAIgC,EAAe,EACnB,QAASC,EAAI,EAAGA,EAAIH,EAAQG,GAAK,EAAG,CAClC,MAAMC,EAAOL,EAAMI,CAAC,EAChBC,GAAQH,EAAQG,CAAI,IACtBF,GAAgB,EAEpB,CACA,OAAOA,IAAiBF,CAC1B,MAAQ,CAEN,MAAO,EACT,CACF,CAiBO,SAASlE,EAAcgE,EAAoB,CAChD,MAAMR,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOyB,EAAG,UAAUQ,CAAQ,EAAE,eAAe,CAC/C,MAAQ,CAAC,CACT,MAAO,EACT,CAkDO,SAAShD,EACduD,EACqB,CACrB,MAAMf,EAAKzB,EAAM,EACXyC,EAAuB,CAAC,EACxBC,EAAyB,CAAC,EAC1B,CAAE,KAAAC,CAAK,EAAIlB,EAAG,UAEpB,UAAWQ,KAAYO,EACrB,GAAI,CACFf,EAAG,WAAWQ,EAAUU,CAAI,EAC5BF,EAAW,KAAKR,CAAQ,CAC1B,MAAQ,CACNS,EAAa,KAAKT,CAAQ,CAC5B,CAGF,MAAO,CAAE,UAAW,KAAM,WAAAQ,EAAY,aAAAC,CAAa,CACrD,CAwBA,eAAsBxE,EACpBmC,EACAC,EACA,CACA,MAAMmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOG,EACL,MAAMsB,EAAG,SAAS,QAAQpB,EAAS,CACjC,UAAW,KACX,SAAU,OACV,cAAe,EACjB,CAAoD,EACpD,OAAOA,CAAO,EACdC,CACF,CACF,MAAQ,CAAC,CACT,MAAO,CAAC,CACV,CAwBO,SAASnC,EAAiBkC,EAAmBC,EAA0B,CAC5E,MAAMmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOG,EACLsB,EAAG,YAAYpB,EAAS,CACtB,UAAW,KACX,SAAU,OACV,cAAe,EACjB,CAAoD,EACpD,OAAOA,CAAO,EACdC,CACF,CACF,MAAQ,CAAC,CACT,MAAO,CAAC,CACV,CAqBA,eAAsBlC,EACpB6D,EACA3B,EACA,CAEA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAEnE,OAAO,MADIN,EAAM,EACD,SAAS,SAASiC,EAAU,CAC1C,OAAQpC,EACR,GAAG+C,EACH,SAAU,IACZ,CAAC,CACH,CAqBA,eAAsBtE,GACpB2D,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAEnE,OAAO,MADIN,EAAM,EACD,SAAS,SAASiC,EAAU,CAC1C,OAAQpC,EACR,GAAG+C,EACH,SAAU,MACZ,CAAC,CACH,CAqBO,SAASvE,GACd4D,EACA3B,EACA,CAEA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAEnE,OADWN,EAAM,EACP,aAAaiC,EAAU,CAC/B,GAAGW,EACH,SAAU,IACZ,CAA0B,CAC5B,CAqBO,SAASrE,GACd0D,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAEnE,OADWN,EAAM,EACP,aAAaiC,EAAU,CAC/B,GAAGW,EACH,SAAU,MACZ,CAA0B,CAC5B,CAgCA,eAAsBpE,GACpByD,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7D,CAAE,QAAAuC,EAAS,OAAAC,EAAQ,GAAGC,CAAU,EAAI,CACxC,UAAW,KACX,GAAGH,CACL,EACMI,EAAcF,IAAW,QAAa,CAAC,CAACA,EACxCrB,EAAKzB,EAAM,EACjB,IAAIiD,EAAU,GACd,GAAI,CACFA,EAAU,MAAMxB,EAAG,SAAS,SAASQ,EAAU,CAC7C,UAAW,KACX,SAAU,OACV,GAAGc,CACL,CAEC,CACH,OAASG,EAAG,CACV,GAAIF,EAAa,CACf,MAAMG,EAAQD,EAA4B,KAC1C,MAAIC,IAAS,SACL,IAAI,MACR,wBAAwBlB,CAAQ;AAAA,kEAEhC,CAAE,MAAOiB,CAAE,CACb,EAEEC,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,wCAAwClB,CAAQ;AAAA,wDAEhD,CAAE,MAAOiB,CAAE,CACb,EAEIA,CACR,CACA,MACF,CACA,SAAO,aAAUD,EAAS,CACxB,SAAU,OAAOhB,CAAQ,EACzB,QAAAY,EACA,OAAQG,CACV,CAAC,CACH,CA+BO,SAASvE,GACdwD,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7D,CAAE,QAAAuC,EAAS,OAAAC,EAAQ,GAAGC,CAAU,EAAI,CACxC,UAAW,KACX,GAAGH,CACL,EACMI,EAAcF,IAAW,QAAa,CAAC,CAACA,EACxCrB,EAAKzB,EAAM,EACjB,IAAIiD,EAAU,GACd,GAAI,CACFA,EAAUxB,EAAG,aAAaQ,EAAU,CAClC,UAAW,KACX,SAAU,OACV,GAAGc,CACL,CAEC,CACH,OAASG,EAAG,CACV,GAAIF,EAAa,CACf,MAAMG,EAAQD,EAA4B,KAC1C,MAAIC,IAAS,SACL,IAAI,MACR,wBAAwBlB,CAAQ;AAAA,kEAEhC,CAAE,MAAOiB,CAAE,CACb,EAEEC,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,wCAAwClB,CAAQ;AAAA,wDAEhD,CAAE,MAAOiB,CAAE,CACb,EAEIA,CACR,CACA,MACF,CACA,SAAO,aAAUD,EAAS,CACxB,SAAU,OAAOhB,CAAQ,EACzB,QAAAY,EACA,OAAQG,CACV,CAAC,CACH,CAGA,IAAII,EAMJ,SAASC,GAAkC,CACzC,GAAID,IAAuB,OAAW,CACpC,MAAM1C,EAAOR,EAAQ,EACf,CACJ,YAAAoD,EACA,oBAAAC,EACA,iBAAAC,CACF,EAAkB,QAAQ,YAAY,EAEtCJ,EAAqB,CACnB1C,EAAK,QAAQ4C,EAAY,CAAC,EAC1B5C,EAAK,QAAQ6C,EAAoB,CAAC,EAClC7C,EAAK,QAAQ8C,EAAiB,CAAC,CACjC,CACF,CACA,OAAOJ,CACT,CAQO,SAASvF,GAA4B,CAC1CuF,EAAqB,MACvB,IAGA,6BAA0BvF,CAAmB,EA2B7C,eAAsBa,GACpBuD,EACA3B,EACA,CACA,MAAMmD,EAAoB,QAAQ,gBAAgB,EAC5C,CAAE,YAAAC,CAAY,EAAID,EAClBb,EAAO,CAAE,UAAW,KAAM,GAAGtC,CAAQ,EACrCqD,KAAW,WAAQ1B,CAAQ,EAC7BA,EAAS,IAAI,kBAAgB,EAC7B,IAAC,oBAAiBA,CAAQ,CAAC,EAG/B,IAAI2B,EAAchB,EAAK,QAAU,GACjC,GAAI,CAACgB,GAAeD,EAAS,OAAS,EAAG,CACvC,MAAMjD,EAAOR,EAAQ,EACf2D,EAAcR,EAAsB,EAGjBM,EAAS,MAAMG,GAAW,CACjD,MAAMC,EAAerD,EAAK,QAAQoD,CAAO,EAGzC,UAAWE,KAAcH,EAAa,CACpC,MAAMI,EACJF,EAAa,WAAWC,EAAatD,EAAK,GAAG,GAC7CqD,IAAiBC,EAEbE,EADexD,EAAK,SAASsD,EAAYD,CAAY,EACtB,WAAW,IAAI,EAEpD,GAAIE,GAAkB,CAACC,EACrB,MAAO,EAEX,CAEA,MAAO,EACT,CAAC,IAGCN,EAAc,GAElB,CAEA,MAAMF,EAAYC,EAAU,CAC1B,YAAaf,EAAK,YAAc9C,EAAqB,WACrD,OAAQ,GACR,MAAO8D,EACP,UAAW,EACb,CAAC,CACH,CA2BO,SAASjF,GACdsD,EACA3B,EACA,CACA,MAAMmD,EAAoB,QAAQ,gBAAgB,EAC5C,CAAE,WAAAU,CAAW,EAAIV,EACjBb,EAAO,CAAE,UAAW,KAAM,GAAGtC,CAAQ,EACrCqD,KAAW,WAAQ1B,CAAQ,EAC7BA,EAAS,IAAI,kBAAgB,EAC7B,IAAC,oBAAiBA,CAAQ,CAAC,EAG/B,IAAI2B,EAAchB,EAAK,QAAU,GACjC,GAAI,CAACgB,GAAeD,EAAS,OAAS,EAAG,CACvC,MAAMjD,EAAOR,EAAQ,EACf2D,EAAcR,EAAsB,EAGjBM,EAAS,MAAMG,GAAW,CACjD,MAAMC,EAAerD,EAAK,QAAQoD,CAAO,EAGzC,UAAWE,KAAcH,EAAa,CACpC,MAAMI,EACJF,EAAa,WAAWC,EAAatD,EAAK,GAAG,GAC7CqD,IAAiBC,EAEbE,EADexD,EAAK,SAASsD,EAAYD,CAAY,EACtB,WAAW,IAAI,EAEpD,GAAIE,GAAkB,CAACC,EACrB,MAAO,EAEX,CAEA,MAAO,EACT,CAAC,IAGCN,EAAc,GAElB,CAEAO,EAAWR,EAAU,CACnB,YAAaf,EAAK,YAAc9C,EAAqB,WACrD,OAAQ,GACR,MAAO8D,EACP,UAAW,EACb,CAAC,CACH,CAwBA,eAAsBhF,GACpBqD,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7DmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAO,MAAMyB,EAAG,SAAS,SAASQ,EAAU,CAC1C,OAAQpC,EACR,GAAG+C,CACL,CAAc,CAChB,MAAQ,CAAC,CAEX,CAqBA,eAAsB9D,EAAUmD,EAAoB,CAClD,MAAMR,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAO,MAAMyB,EAAG,SAAS,KAAKQ,CAAQ,CACxC,MAAQ,CAAC,CAEX,CAsBO,SAASlD,EACdkD,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7DmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOyB,EAAG,SAASQ,EAAU,CAC3B,UAAW,KACX,eAAgB,GAChB,GAAGW,CACL,CAAoB,CACtB,MAAQ,CAAC,CAEX,CAwBO,SAAS/D,GACdoD,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7DmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOyB,EAAG,aAAaQ,EAAU,CAC/B,UAAW,KACX,GAAGW,CACL,CAA0B,CAC5B,MAAQ,CAAC,CAEX,CAuBO,SAAS5D,GAAWiD,EAA4B,CACrD,MAAMR,EAAKzB,EAAM,EACXU,EAAOR,EAAQ,EACfkE,EAAc,OAAOnC,CAAQ,EAGnC,GAAI,CAACR,EAAG,WAAW2C,CAAW,EAC5B,SAAO,iBAAcA,CAAW,EAGlC,MAAM/D,EAAUK,EAAK,QAAQ0D,CAAW,EAClCC,EAAM3D,EAAK,QAAQ0D,CAAW,EAC9BE,EAAW5D,EAAK,SAAS0D,EAAaC,CAAG,EAE/C,IAAIE,EAAU,EACVC,EACJ,GACEA,EAAa9D,EAAK,KAAKL,EAAS,GAAGiE,CAAQ,IAAIC,CAAO,GAAGF,CAAG,EAAE,EAC9DE,UACO9C,EAAG,WAAW+C,CAAU,GAEjC,SAAO,iBAAcA,CAAU,CACjC,CA+BA,eAAsBtF,GACpB+C,EACAwC,EACAnE,EACe,CACf,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7D,CAAE,IAAAS,EAAK,SAAAC,EAAU,SAAAC,EAAU,OAAAC,EAAQ,GAAG6B,CAAU,EAAI,CACxD,UAAW,KACX,GAAGH,CACL,EACMnB,EAAKzB,EAAM,EACX0E,EAAa7D,EACjB4D,EACA1D,GAAO;AAAA,EACPC,IAAa,OAAYA,EAAW,GACpCC,EACAC,CACF,EACA,MAAMO,EAAG,SAAS,UAAUQ,EAAUyC,EAAY,CAChD,SAAU,OACV,GAAG3B,EACH,UAAW,IACb,CAA0B,CAC5B,CA2BO,SAAS5D,GACd8C,EACAwC,EACAnE,EACM,CACN,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7D,CAAE,IAAAS,EAAK,SAAAC,EAAU,SAAAC,EAAU,OAAAC,EAAQ,GAAG6B,CAAU,EAAI,CACxD,UAAW,KACX,GAAGH,CACL,EACMnB,EAAKzB,EAAM,EACX0E,EAAa7D,EACjB4D,EACA1D,GAAO;AAAA,EACPC,IAAa,OAAYA,EAAW,GACpCC,EACAC,CACF,EACAO,EAAG,cAAcQ,EAAUyC,EAAY,CACrC,SAAU,OACV,GAAG3B,EACH,UAAW,IACb,CAAqB,CACvB",
6
- "names": ["fs_exports", "__export", "findUp", "findUpSync", "invalidatePathCache", "isDir", "isDirEmptySync", "isDirSync", "isSymLinkSync", "readDirNames", "readDirNamesSync", "readFileBinary", "readFileBinarySync", "readFileUtf8", "readFileUtf8Sync", "readJson", "readJsonSync", "safeDelete", "safeDeleteSync", "safeReadFile", "safeReadFileSync", "safeStats", "safeStatsSync", "uniqueSync", "validateFiles", "writeJson", "writeJsonSync", "__toCommonJS", "import_process", "import_arrays", "import_globs", "import_json", "import_objects", "import_path", "import_rewire", "import_sorts", "abortSignal", "defaultRemoveOptions", "_fs", "getFs", "_path", "getPath", "innerReadDirNames", "dirents", "dirname", "options", "ignore", "includeEmpty", "sort", "path", "names", "d", "stringify", "json", "EOL", "finalEOL", "replacer", "spaces", "EOF", "name", "cwd", "signal", "onlyDirectories", "onlyFiles", "fs", "dir", "root", "n", "thePath", "stats", "stopAt", "stopDir", "filepath", "files", "length", "matcher", "ignoredCount", "i", "file", "filepaths", "validPaths", "invalidPaths", "R_OK", "opts", "reviver", "throws", "fsOptions", "shouldThrow", "content", "e", "code", "_cachedAllowedDirs", "getAllowedDirectories", "getOsTmpDir", "getSocketCacacheDir", "getSocketUserDir", "del", "deleteAsync", "patterns", "shouldForce", "allowedDirs", "pattern", "resolvedPath", "allowedDir", "isInAllowedDir", "isGoingBackward", "deleteSync", "filepathStr", "ext", "basename", "counter", "uniquePath", "jsonContent", "jsonString"]
4
+ "sourcesContent": ["/**\n * @fileoverview File system utilities with cross-platform path handling.\n * Provides enhanced fs operations, glob matching, and directory traversal functions.\n */\n\nimport type { Abortable } from 'node:events'\n\nimport type {\n Dirent,\n MakeDirectoryOptions,\n ObjectEncodingOptions,\n OpenMode,\n PathLike,\n StatSyncOptions,\n WriteFileOptions,\n} from 'node:fs'\n\nimport { getAbortSignal } from '#constants/process'\n\nimport { isArray } from './arrays'\n\nconst abortSignal = getAbortSignal()\n\nimport { defaultIgnore, getGlobMatcher } from './globs'\nimport type { JsonReviver } from './json'\nimport { jsonParse } from './json'\nimport { objectFreeze, type Remap } from './objects'\nimport { normalizePath, pathLikeToString } from './path'\nimport { registerCacheInvalidation } from './paths/rewire'\nimport { naturalCompare } from './sorts'\n\n/**\n * Supported text encodings for Node.js Buffers.\n * Includes ASCII, UTF-8/16, base64, binary, and hexadecimal encodings.\n */\nexport type BufferEncoding =\n | 'ascii'\n | 'utf8'\n | 'utf-8'\n | 'utf16le'\n | 'ucs2'\n | 'ucs-2'\n | 'base64'\n | 'base64url'\n | 'latin1'\n | 'binary'\n | 'hex'\n\n/**\n * Represents any valid JSON content type.\n */\nexport type JsonContent = unknown\n\n/**\n * Options for asynchronous `findUp` operations.\n */\nexport interface FindUpOptions {\n /**\n * Starting directory for the search.\n * @default process.cwd()\n */\n cwd?: string | undefined\n /**\n * Only match directories, not files.\n * @default false\n */\n onlyDirectories?: boolean | undefined\n /**\n * Only match files, not directories.\n * @default true\n */\n onlyFiles?: boolean | undefined\n /**\n * Abort signal to cancel the search operation.\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Options for synchronous `findUpSync` operations.\n */\nexport interface FindUpSyncOptions {\n /**\n * Starting directory for the search.\n * @default process.cwd()\n */\n cwd?: string | undefined\n /**\n * Directory to stop searching at (inclusive).\n * When provided, search will stop at this directory even if the root hasn't been reached.\n */\n stopAt?: string | undefined\n /**\n * Only match directories, not files.\n * @default false\n */\n onlyDirectories?: boolean | undefined\n /**\n * Only match files, not directories.\n * @default true\n */\n onlyFiles?: boolean | undefined\n}\n\n/**\n * Options for checking if a directory is empty.\n */\nexport interface IsDirEmptyOptions {\n /**\n * Glob patterns for files to ignore when checking emptiness.\n * Files matching these patterns are not counted.\n * @default defaultIgnore\n */\n ignore?: string[] | readonly string[] | undefined\n}\n\n/**\n * Options for read operations with abort support.\n */\nexport interface ReadOptions extends Abortable {\n /**\n * Character encoding to use for reading.\n * @default 'utf8'\n */\n encoding?: BufferEncoding | string | undefined\n /**\n * File system flag for reading behavior.\n * @default 'r'\n */\n flag?: string | undefined\n}\n\n/**\n * Options for reading directories with filtering and sorting.\n */\nexport interface ReadDirOptions {\n /**\n * Glob patterns for directories to ignore.\n * @default undefined\n */\n ignore?: string[] | readonly string[] | undefined\n /**\n * Include empty directories in results.\n * When `false`, empty directories are filtered out.\n * @default true\n */\n includeEmpty?: boolean | undefined\n /**\n * Sort directory names alphabetically using natural sort order.\n * @default true\n */\n sort?: boolean | undefined\n}\n\n/**\n * Options for reading files with encoding and abort support.\n * Can be either an options object, an encoding string, or null.\n */\nexport type ReadFileOptions =\n | Remap<\n ObjectEncodingOptions &\n Abortable & {\n flag?: OpenMode | undefined\n }\n >\n | BufferEncoding\n | null\n\n/**\n * Options for reading and parsing JSON files.\n */\nexport type ReadJsonOptions = Remap<\n ReadFileOptions & {\n /**\n * Whether to throw errors on parse failure.\n * When `false`, returns `undefined` on error instead of throwing.\n * @default true\n */\n throws?: boolean | undefined\n /**\n * JSON reviver function to transform parsed values.\n * Same as the second parameter to `JSON.parse()`.\n */\n reviver?: Parameters<typeof JSON.parse>[1] | undefined\n }\n>\n\n/**\n * Options for file/directory removal operations.\n */\nexport interface RemoveOptions {\n /**\n * Force deletion even outside normally safe directories.\n * When `false`, prevents deletion outside temp, cacache, and ~/.socket.\n * @default true for safe directories, false otherwise\n */\n force?: boolean | undefined\n /**\n * Maximum number of retry attempts on failure.\n * @default 3\n */\n maxRetries?: number | undefined\n /**\n * Recursively delete directories and contents.\n * @default true\n */\n recursive?: boolean | undefined\n /**\n * Delay in milliseconds between retry attempts.\n * @default 200\n */\n retryDelay?: number | undefined\n /**\n * Abort signal to cancel the operation.\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Options for safe read operations that don't throw on errors.\n */\nexport interface SafeReadOptions extends ReadOptions {\n /**\n * Default value to return on read failure.\n * If not provided, `undefined` is returned on error.\n */\n defaultValue?: unknown | undefined\n}\n\n/**\n * Options for write operations with encoding and mode control.\n */\nexport interface WriteOptions extends Abortable {\n /**\n * Character encoding for writing.\n * @default 'utf8'\n */\n encoding?: BufferEncoding | string | undefined\n /**\n * File mode (permissions) to set.\n * Uses standard Unix permission bits (e.g., 0o644).\n * @default 0o666 (read/write for all, respecting umask)\n */\n mode?: number | undefined\n /**\n * File system flag for write behavior.\n * @default 'w' (create or truncate)\n */\n flag?: string | undefined\n}\n\n/**\n * Options for writing JSON files with formatting control.\n */\nexport interface WriteJsonOptions extends WriteOptions {\n /**\n * End-of-line sequence to use.\n * @default '\\n'\n * @example\n * ```ts\n * // Windows-style line endings\n * writeJson('data.json', data, { EOL: '\\r\\n' })\n * ```\n */\n EOL?: string | undefined\n /**\n * Whether to add a final newline at end of file.\n * @default true\n */\n finalEOL?: boolean | undefined\n /**\n * JSON replacer function to transform values during stringification.\n * Same as the second parameter to `JSON.stringify()`.\n */\n replacer?: JsonReviver | undefined\n /**\n * Number of spaces for indentation, or string to use for indentation.\n * @default 2\n * @example\n * ```ts\n * // Use tabs instead of spaces\n * writeJson('data.json', data, { spaces: '\\t' })\n *\n * // Use 4 spaces for indentation\n * writeJson('data.json', data, { spaces: 4 })\n * ```\n */\n spaces?: number | string | undefined\n}\n\nconst defaultRemoveOptions = objectFreeze({\n __proto__: null,\n force: true,\n maxRetries: 3,\n recursive: true,\n retryDelay: 200,\n})\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js path module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path as typeof import('path')\n}\n\n/**\n * Process directory entries and filter for directories.\n * Filters entries to include only directories, optionally excluding empty ones.\n * Applies ignore patterns and natural sorting.\n *\n * @param dirents - Directory entries from readdir\n * @param dirname - Parent directory path\n * @param options - Filtering and sorting options\n * @returns Array of directory names, optionally sorted\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction innerReadDirNames(\n dirents: Dirent[],\n dirname: string | undefined,\n options?: ReadDirOptions | undefined,\n): string[] {\n const {\n ignore,\n includeEmpty = true,\n sort = true,\n } = { __proto__: null, ...options } as ReadDirOptions\n const path = getPath()\n const names = dirents\n .filter(\n (d: Dirent) =>\n d.isDirectory() &&\n (includeEmpty ||\n !isDirEmptySync(path.join(dirname || d.parentPath, d.name), {\n ignore,\n })),\n )\n .map((d: Dirent) => d.name)\n return sort ? names.sort(naturalCompare) : names\n}\n\n/**\n * Stringify JSON with custom formatting options.\n * Formats JSON with configurable line endings and indentation.\n *\n * @param json - Value to stringify\n * @param EOL - End-of-line sequence\n * @param finalEOL - Whether to add final newline\n * @param replacer - JSON replacer function\n * @param spaces - Indentation spaces or string\n * @returns Formatted JSON string\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction stringify(\n json: unknown,\n EOL: string,\n finalEOL: boolean,\n replacer: JsonReviver | undefined,\n spaces: number | string = 2,\n): string {\n const EOF = finalEOL ? EOL : ''\n const str = JSON.stringify(json, replacer, spaces)\n return `${str.replace(/\\n/g, EOL)}${EOF}`\n}\n\n/**\n * Find a file or directory by traversing up parent directories.\n * Searches from the starting directory upward to the filesystem root.\n * Useful for finding configuration files or project roots.\n *\n * @param name - Filename(s) to search for\n * @param options - Search options including cwd and type filters\n * @returns Normalized absolute path if found, undefined otherwise\n *\n * @example\n * ```ts\n * // Find package.json starting from current directory\n * const pkgPath = await findUp('package.json')\n *\n * // Find any of multiple config files\n * const configPath = await findUp(['.config.js', '.config.json'])\n *\n * // Find a directory instead of file\n * const nodeModules = await findUp('node_modules', { onlyDirectories: true })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function findUp(\n name: string | string[] | readonly string[],\n options?: FindUpOptions | undefined,\n): Promise<string | undefined> {\n const { cwd = process.cwd(), signal = abortSignal } = {\n __proto__: null,\n ...options,\n } as FindUpOptions\n let { onlyDirectories = false, onlyFiles = true } = {\n __proto__: null,\n ...options,\n } as FindUpOptions\n if (onlyDirectories) {\n onlyFiles = false\n }\n if (onlyFiles) {\n onlyDirectories = false\n }\n const fs = getFs()\n const path = getPath()\n let dir = path.resolve(cwd)\n const { root } = path.parse(dir)\n const names = isArray(name) ? name : [name as string]\n while (dir && dir !== root) {\n for (const n of names) {\n if (signal?.aborted) {\n return undefined\n }\n const thePath = path.join(dir, n)\n try {\n // eslint-disable-next-line no-await-in-loop\n const stats = await fs.promises.stat(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n dir = path.dirname(dir)\n }\n return undefined\n}\n\n/**\n * Synchronously find a file or directory by traversing up parent directories.\n * Searches from the starting directory upward to the filesystem root or `stopAt` directory.\n * Useful for finding configuration files or project roots in synchronous contexts.\n *\n * @param name - Filename(s) to search for\n * @param options - Search options including cwd, stopAt, and type filters\n * @returns Normalized absolute path if found, undefined otherwise\n *\n * @example\n * ```ts\n * // Find package.json starting from current directory\n * const pkgPath = findUpSync('package.json')\n *\n * // Find .git directory but stop at home directory\n * const gitPath = findUpSync('.git', {\n * onlyDirectories: true,\n * stopAt: process.env.HOME\n * })\n *\n * // Find any of multiple config files\n * const configPath = findUpSync(['.eslintrc.js', '.eslintrc.json'])\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function findUpSync(\n name: string | string[] | readonly string[],\n options?: FindUpSyncOptions | undefined,\n) {\n const { cwd = process.cwd(), stopAt } = {\n __proto__: null,\n ...options,\n } as FindUpSyncOptions\n let { onlyDirectories = false, onlyFiles = true } = {\n __proto__: null,\n ...options,\n } as FindUpSyncOptions\n if (onlyDirectories) {\n onlyFiles = false\n }\n if (onlyFiles) {\n onlyDirectories = false\n }\n const fs = getFs()\n const path = getPath()\n let dir = path.resolve(cwd)\n const { root } = path.parse(dir)\n const stopDir = stopAt ? path.resolve(stopAt) : undefined\n const names = isArray(name) ? name : [name as string]\n while (dir && dir !== root) {\n // Check if we should stop at this directory.\n if (stopDir && dir === stopDir) {\n // Check current directory but don't go up.\n for (const n of names) {\n const thePath = path.join(dir, n)\n try {\n const stats = fs.statSync(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n return undefined\n }\n for (const n of names) {\n const thePath = path.join(dir, n)\n try {\n const stats = fs.statSync(thePath)\n if (!onlyDirectories && stats.isFile()) {\n return normalizePath(thePath)\n }\n if (!onlyFiles && stats.isDirectory()) {\n return normalizePath(thePath)\n }\n } catch {}\n }\n dir = path.dirname(dir)\n }\n return undefined\n}\n\n/**\n * Check if a path is a directory asynchronously.\n * Returns `true` for directories, `false` for files or non-existent paths.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a directory, `false` otherwise\n *\n * @example\n * ```ts\n * if (await isDir('./src')) {\n * console.log('src is a directory')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function isDir(filepath: PathLike) {\n return !!(await safeStats(filepath))?.isDirectory()\n}\n\n/**\n * Check if a path is a directory synchronously.\n * Returns `true` for directories, `false` for files or non-existent paths.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a directory, `false` otherwise\n *\n * @example\n * ```ts\n * if (isDirSync('./src')) {\n * console.log('src is a directory')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isDirSync(filepath: PathLike) {\n return !!safeStatsSync(filepath)?.isDirectory()\n}\n\n/**\n * Check if a directory is empty synchronously.\n * A directory is considered empty if it contains no files after applying ignore patterns.\n * Uses glob patterns to filter ignored files.\n *\n * @param dirname - Directory path to check\n * @param options - Options including ignore patterns\n * @returns `true` if directory is empty (or doesn't exist), `false` otherwise\n *\n * @example\n * ```ts\n * // Check if directory is completely empty\n * isDirEmptySync('./build')\n *\n * // Check if directory is empty, ignoring .DS_Store files\n * isDirEmptySync('./cache', { ignore: ['.DS_Store'] })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isDirEmptySync(\n dirname: PathLike,\n options?: IsDirEmptyOptions | undefined,\n) {\n const { ignore = defaultIgnore } = {\n __proto__: null,\n ...options,\n } as IsDirEmptyOptions\n const fs = getFs()\n try {\n const files = fs.readdirSync(dirname)\n const { length } = files\n if (length === 0) {\n return true\n }\n const matcher = getGlobMatcher(\n ignore as string[],\n {\n cwd: pathLikeToString(dirname),\n } as { cwd?: string; dot?: boolean; ignore?: string[]; nocase?: boolean },\n )\n let ignoredCount = 0\n for (let i = 0; i < length; i += 1) {\n const file = files[i]\n if (file && matcher(file)) {\n ignoredCount += 1\n }\n }\n return ignoredCount === length\n } catch {\n // Return false for non-existent paths or other errors.\n return false\n }\n}\n\n/**\n * Check if a path is a symbolic link synchronously.\n * Uses `lstat` to check the link itself, not the target.\n *\n * @param filepath - Path to check\n * @returns `true` if path is a symbolic link, `false` otherwise\n *\n * @example\n * ```ts\n * if (isSymLinkSync('./my-link')) {\n * console.log('Path is a symbolic link')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function isSymLinkSync(filepath: PathLike) {\n const fs = getFs()\n try {\n return fs.lstatSync(filepath).isSymbolicLink()\n } catch {}\n return false\n}\n\n/**\n * Result of file readability validation.\n * Contains lists of valid and invalid file paths.\n */\nexport interface ValidateFilesResult {\n /**\n * File paths that passed validation and are readable.\n */\n validPaths: string[]\n /**\n * File paths that failed validation (unreadable, permission denied, or non-existent).\n * Common with Yarn Berry PnP virtual filesystem, pnpm symlinks, or filesystem race conditions.\n */\n invalidPaths: string[]\n}\n\n/**\n * Validate that file paths are readable before processing.\n * Filters out files from glob results that cannot be accessed (common with\n * Yarn Berry PnP virtual filesystem, pnpm content-addressable store symlinks,\n * or filesystem race conditions in CI/CD environments).\n *\n * This defensive pattern prevents ENOENT errors when files exist in glob\n * results but are not accessible via standard filesystem operations.\n *\n * @param filepaths - Array of file paths to validate\n * @returns Object with `validPaths` (readable) and `invalidPaths` (unreadable)\n *\n * @example\n * ```ts\n * import { validateFiles } from '@socketsecurity/lib/fs'\n *\n * const files = ['package.json', '.pnp.cjs/virtual-file.json']\n * const { validPaths, invalidPaths } = validateFiles(files)\n *\n * console.log(`Valid: ${validPaths.length}`)\n * console.log(`Invalid: ${invalidPaths.length}`)\n * ```\n *\n * @example\n * ```ts\n * // Typical usage in Socket CLI commands\n * const packagePaths = await getPackageFilesForScan(targets)\n * const { validPaths } = validateFiles(packagePaths)\n * await sdk.uploadManifestFiles(orgSlug, validPaths)\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function validateFiles(\n filepaths: string[] | readonly string[],\n): ValidateFilesResult {\n const fs = getFs()\n const validPaths: string[] = []\n const invalidPaths: string[] = []\n const { R_OK } = fs.constants\n\n for (const filepath of filepaths) {\n try {\n fs.accessSync(filepath, R_OK)\n validPaths.push(filepath)\n } catch {\n invalidPaths.push(filepath)\n }\n }\n\n return { __proto__: null, validPaths, invalidPaths } as ValidateFilesResult\n}\n\n/**\n * Read directory names asynchronously with filtering and sorting.\n * Returns only directory names (not files), with optional filtering for empty directories\n * and glob-based ignore patterns. Results are naturally sorted by default.\n *\n * @param dirname - Directory path to read\n * @param options - Options for filtering and sorting\n * @returns Array of directory names, empty array on error\n *\n * @example\n * ```ts\n * // Get all subdirectories, sorted naturally\n * const dirs = await readDirNames('./packages')\n *\n * // Get non-empty directories only\n * const nonEmpty = await readDirNames('./cache', { includeEmpty: false })\n *\n * // Get directories without sorting\n * const unsorted = await readDirNames('./src', { sort: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readDirNames(\n dirname: PathLike,\n options?: ReadDirOptions | undefined,\n) {\n const fs = getFs()\n try {\n return innerReadDirNames(\n await fs.promises.readdir(dirname, {\n __proto__: null,\n encoding: 'utf8',\n withFileTypes: true,\n } as ObjectEncodingOptions & { withFileTypes: true }),\n String(dirname),\n options,\n )\n } catch {}\n return []\n}\n\n/**\n * Read directory names synchronously with filtering and sorting.\n * Returns only directory names (not files), with optional filtering for empty directories\n * and glob-based ignore patterns. Results are naturally sorted by default.\n *\n * @param dirname - Directory path to read\n * @param options - Options for filtering and sorting\n * @returns Array of directory names, empty array on error\n *\n * @example\n * ```ts\n * // Get all subdirectories, sorted naturally\n * const dirs = readDirNamesSync('./packages')\n *\n * // Get non-empty directories only, ignoring node_modules\n * const nonEmpty = readDirNamesSync('./src', {\n * includeEmpty: false,\n * ignore: ['node_modules']\n * })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readDirNamesSync(dirname: PathLike, options?: ReadDirOptions) {\n const fs = getFs()\n try {\n return innerReadDirNames(\n fs.readdirSync(dirname, {\n __proto__: null,\n encoding: 'utf8',\n withFileTypes: true,\n } as ObjectEncodingOptions & { withFileTypes: true }),\n String(dirname),\n options,\n )\n } catch {}\n return []\n}\n\n/**\n * Read a file as binary data asynchronously.\n * Returns a Buffer without encoding the contents.\n * Useful for reading images, archives, or other binary formats.\n *\n * @param filepath - Path to file\n * @param options - Read options (encoding is forced to null for binary)\n * @returns Promise resolving to Buffer containing file contents\n *\n * @example\n * ```ts\n * // Read an image file\n * const imageBuffer = await readFileBinary('./image.png')\n *\n * // Read with abort signal\n * const buffer = await readFileBinary('./data.bin', { signal: abortSignal })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readFileBinary(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n // Don't specify encoding to get a Buffer.\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n encoding: null,\n })\n}\n\n/**\n * Read a file as UTF-8 text asynchronously.\n * Returns a string with the file contents decoded as UTF-8.\n * This is the most common way to read text files.\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and abort signal\n * @returns Promise resolving to string containing file contents\n *\n * @example\n * ```ts\n * // Read a text file\n * const content = await readFileUtf8('./README.md')\n *\n * // Read with custom encoding\n * const content = await readFileUtf8('./data.txt', { encoding: 'utf-8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readFileUtf8(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n encoding: 'utf8',\n })\n}\n\n/**\n * Read a file as binary data synchronously.\n * Returns a Buffer without encoding the contents.\n * Useful for reading images, archives, or other binary formats.\n *\n * @param filepath - Path to file\n * @param options - Read options (encoding is forced to null for binary)\n * @returns Buffer containing file contents\n *\n * @example\n * ```ts\n * // Read an image file\n * const imageBuffer = readFileBinarySync('./logo.png')\n *\n * // Read a compressed file\n * const gzipData = readFileBinarySync('./archive.gz')\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readFileBinarySync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n // Don't specify encoding to get a Buffer\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return fs.readFileSync(filepath, {\n ...opts,\n encoding: null,\n } as ObjectEncodingOptions)\n}\n\n/**\n * Read a file as UTF-8 text synchronously.\n * Returns a string with the file contents decoded as UTF-8.\n * This is the most common way to read text files synchronously.\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding\n * @returns String containing file contents\n *\n * @example\n * ```ts\n * // Read a configuration file\n * const config = readFileUtf8Sync('./config.txt')\n *\n * // Read with custom options\n * const data = readFileUtf8Sync('./data.txt', { encoding: 'utf8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readFileUtf8Sync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n return fs.readFileSync(filepath, {\n ...opts,\n encoding: 'utf8',\n } as ObjectEncodingOptions)\n}\n\n/**\n * Read and parse a JSON file asynchronously.\n * Reads the file as UTF-8 text and parses it as JSON.\n * Optionally accepts a reviver function to transform parsed values.\n *\n * @param filepath - Path to JSON file\n * @param options - Read and parse options\n * @returns Promise resolving to parsed JSON value, or undefined if throws is false and an error occurs\n *\n * @example\n * ```ts\n * // Read and parse package.json\n * const pkg = await readJson('./package.json')\n *\n * // Read JSON with custom reviver\n * const data = await readJson('./data.json', {\n * reviver: (key, value) => {\n * if (key === 'date') return new Date(value)\n * return value\n * }\n * })\n *\n * // Don't throw on parse errors\n * const config = await readJson('./config.json', { throws: false })\n * if (config === undefined) {\n * console.log('Failed to parse config')\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function readJson(\n filepath: PathLike,\n options?: ReadJsonOptions | string | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { reviver, throws, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as unknown as ReadJsonOptions\n const shouldThrow = throws === undefined || !!throws\n const fs = getFs()\n let content = ''\n try {\n content = await fs.promises.readFile(filepath, {\n __proto__: null,\n encoding: 'utf8',\n ...fsOptions,\n } as unknown as Parameters<typeof fs.promises.readFile>[1] & {\n encoding: string\n })\n } catch (e) {\n if (shouldThrow) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'ENOENT') {\n throw new Error(\n `JSON file not found: ${filepath}\\n` +\n 'Ensure the file exists or create it with the expected structure.',\n { cause: e },\n )\n }\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied reading JSON file: ${filepath}\\n` +\n 'Check file permissions or run with appropriate access.',\n { cause: e },\n )\n }\n throw e\n }\n return undefined\n }\n return jsonParse(content, {\n filepath: String(filepath),\n reviver,\n throws: shouldThrow,\n })\n}\n\n/**\n * Read and parse a JSON file synchronously.\n * Reads the file as UTF-8 text and parses it as JSON.\n * Optionally accepts a reviver function to transform parsed values.\n *\n * @param filepath - Path to JSON file\n * @param options - Read and parse options\n * @returns Parsed JSON value, or undefined if throws is false and an error occurs\n *\n * @example\n * ```ts\n * // Read and parse tsconfig.json\n * const tsconfig = readJsonSync('./tsconfig.json')\n *\n * // Read JSON with custom reviver\n * const data = readJsonSync('./data.json', {\n * reviver: (key, value) => {\n * if (typeof value === 'string' && /^\\d{4}-\\d{2}-\\d{2}/.test(value)) {\n * return new Date(value)\n * }\n * return value\n * }\n * })\n *\n * // Don't throw on parse errors\n * const config = readJsonSync('./config.json', { throws: false })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function readJsonSync(\n filepath: PathLike,\n options?: ReadJsonOptions | string | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { reviver, throws, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as unknown as ReadJsonOptions\n const shouldThrow = throws === undefined || !!throws\n const fs = getFs()\n let content = ''\n try {\n content = fs.readFileSync(filepath, {\n __proto__: null,\n encoding: 'utf8',\n ...fsOptions,\n } as unknown as Parameters<typeof fs.readFileSync>[1] & {\n encoding: string\n })\n } catch (e) {\n if (shouldThrow) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'ENOENT') {\n throw new Error(\n `JSON file not found: ${filepath}\\n` +\n 'Ensure the file exists or create it with the expected structure.',\n { cause: e },\n )\n }\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied reading JSON file: ${filepath}\\n` +\n 'Check file permissions or run with appropriate access.',\n { cause: e },\n )\n }\n throw e\n }\n return undefined\n }\n return jsonParse(content, {\n filepath: String(filepath),\n reviver,\n throws: shouldThrow,\n })\n}\n\n// Cache for resolved allowed directories\nlet _cachedAllowedDirs: string[] | undefined\n\n/**\n * Get resolved allowed directories for safe deletion with lazy caching.\n * These directories are resolved once and cached for the process lifetime.\n */\nfunction getAllowedDirectories(): string[] {\n if (_cachedAllowedDirs === undefined) {\n const path = getPath()\n const {\n getOsTmpDir,\n getSocketCacacheDir,\n getSocketUserDir,\n } = /*@__PURE__*/ require('#lib/paths')\n\n _cachedAllowedDirs = [\n path.resolve(getOsTmpDir()),\n path.resolve(getSocketCacacheDir()),\n path.resolve(getSocketUserDir()),\n ]\n }\n return _cachedAllowedDirs\n}\n\n/**\n * Invalidate the cached allowed directories.\n * Called automatically by the paths/rewire module when paths are overridden in tests.\n *\n * @internal Used for test rewiring\n */\nexport function invalidatePathCache(): void {\n _cachedAllowedDirs = undefined\n}\n\n// Register cache invalidation with the rewire module\nregisterCacheInvalidation(invalidatePathCache)\n\n/**\n * Safely delete a file or directory asynchronously with built-in protections.\n * Uses `del` for safer deletion that prevents removing cwd and above by default.\n * Automatically uses force: true for temp directory, cacache, and ~/.socket subdirectories.\n *\n * @param filepath - Path or array of paths to delete (supports glob patterns)\n * @param options - Deletion options including force, retries, and recursion\n * @throws {Error} When attempting to delete protected paths without force option\n *\n * @example\n * ```ts\n * // Delete a single file\n * await safeDelete('./temp-file.txt')\n *\n * // Delete a directory recursively\n * await safeDelete('./build', { recursive: true })\n *\n * // Delete multiple paths\n * await safeDelete(['./dist', './coverage'])\n *\n * // Delete with custom retry settings\n * await safeDelete('./flaky-dir', { maxRetries: 5, retryDelay: 500 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeDelete(\n filepath: PathLike | PathLike[],\n options?: RemoveOptions | undefined,\n) {\n const del = /*@__PURE__*/ require('./external/del')\n const { deleteAsync } = del\n const opts = { __proto__: null, ...options } as RemoveOptions\n const patterns = isArray(filepath)\n ? filepath.map(pathLikeToString)\n : [pathLikeToString(filepath)]\n\n // Check if we're deleting within allowed directories.\n let shouldForce = opts.force !== false\n if (!shouldForce && patterns.length > 0) {\n const path = getPath()\n const allowedDirs = getAllowedDirectories()\n\n // Check if all patterns are within allowed directories.\n const allInAllowedDirs = patterns.every(pattern => {\n const resolvedPath = path.resolve(pattern)\n\n // Check each allowed directory\n for (const allowedDir of allowedDirs) {\n const isInAllowedDir =\n resolvedPath.startsWith(allowedDir + path.sep) ||\n resolvedPath === allowedDir\n const relativePath = path.relative(allowedDir, resolvedPath)\n const isGoingBackward = relativePath.startsWith('..')\n\n if (isInAllowedDir && !isGoingBackward) {\n return true\n }\n }\n\n return false\n })\n\n if (allInAllowedDirs) {\n shouldForce = true\n }\n }\n\n await deleteAsync(patterns, {\n concurrency: opts.maxRetries || defaultRemoveOptions.maxRetries,\n dryRun: false,\n force: shouldForce,\n onlyFiles: false,\n })\n}\n\n/**\n * Safely delete a file or directory synchronously with built-in protections.\n * Uses `del` for safer deletion that prevents removing cwd and above by default.\n * Automatically uses force: true for temp directory, cacache, and ~/.socket subdirectories.\n *\n * @param filepath - Path or array of paths to delete (supports glob patterns)\n * @param options - Deletion options including force, retries, and recursion\n * @throws {Error} When attempting to delete protected paths without force option\n *\n * @example\n * ```ts\n * // Delete a single file\n * safeDeleteSync('./temp-file.txt')\n *\n * // Delete a directory recursively\n * safeDeleteSync('./build', { recursive: true })\n *\n * // Delete multiple paths with globs\n * safeDeleteSync(['./dist/**', './coverage/**'])\n *\n * // Force delete a protected path (use with caution)\n * safeDeleteSync('./important', { force: true })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeDeleteSync(\n filepath: PathLike | PathLike[],\n options?: RemoveOptions | undefined,\n) {\n const del = /*@__PURE__*/ require('./external/del')\n const { deleteSync } = del\n const opts = { __proto__: null, ...options } as RemoveOptions\n const patterns = isArray(filepath)\n ? filepath.map(pathLikeToString)\n : [pathLikeToString(filepath)]\n\n // Check if we're deleting within allowed directories.\n let shouldForce = opts.force !== false\n if (!shouldForce && patterns.length > 0) {\n const path = getPath()\n const allowedDirs = getAllowedDirectories()\n\n // Check if all patterns are within allowed directories.\n const allInAllowedDirs = patterns.every(pattern => {\n const resolvedPath = path.resolve(pattern)\n\n // Check each allowed directory\n for (const allowedDir of allowedDirs) {\n const isInAllowedDir =\n resolvedPath.startsWith(allowedDir + path.sep) ||\n resolvedPath === allowedDir\n const relativePath = path.relative(allowedDir, resolvedPath)\n const isGoingBackward = relativePath.startsWith('..')\n\n if (isInAllowedDir && !isGoingBackward) {\n return true\n }\n }\n\n return false\n })\n\n if (allInAllowedDirs) {\n shouldForce = true\n }\n }\n\n deleteSync(patterns, {\n concurrency: opts.maxRetries || defaultRemoveOptions.maxRetries,\n dryRun: false,\n force: shouldForce,\n onlyFiles: false,\n })\n}\n\n/**\n * Safely create a directory asynchronously, ignoring EEXIST errors.\n * This function wraps fs.promises.mkdir and handles the race condition where\n * the directory might already exist, which is common in concurrent code.\n *\n * Unlike fs.promises.mkdir with recursive:true, this function:\n * - Silently ignores EEXIST errors (directory already exists)\n * - Re-throws all other errors (permissions, invalid path, etc.)\n * - Works reliably in multi-process/concurrent scenarios\n *\n * @param path - Directory path to create\n * @param options - Options including recursive and mode settings\n * @returns Promise that resolves when directory is created or already exists\n *\n * @example\n * ```ts\n * // Create a directory, no error if it exists\n * await safeMkdir('./config')\n *\n * // Create nested directories\n * await safeMkdir('./data/cache/temp', { recursive: true })\n *\n * // Create with specific permissions\n * await safeMkdir('./secure', { mode: 0o700 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeMkdir(\n path: PathLike,\n options?: MakeDirectoryOptions | undefined,\n): Promise<void> {\n const fs = getFs()\n try {\n await fs.promises.mkdir(path, options)\n } catch (e: unknown) {\n // Ignore EEXIST error - directory already exists.\n if (\n typeof e === 'object' &&\n e !== null &&\n 'code' in e &&\n e.code !== 'EEXIST'\n ) {\n throw e\n }\n }\n}\n\n/**\n * Safely create a directory synchronously, ignoring EEXIST errors.\n * This function wraps fs.mkdirSync and handles the race condition where\n * the directory might already exist, which is common in concurrent code.\n *\n * Unlike fs.mkdirSync with recursive:true, this function:\n * - Silently ignores EEXIST errors (directory already exists)\n * - Re-throws all other errors (permissions, invalid path, etc.)\n * - Works reliably in multi-process/concurrent scenarios\n *\n * @param path - Directory path to create\n * @param options - Options including recursive and mode settings\n *\n * @example\n * ```ts\n * // Create a directory, no error if it exists\n * safeMkdirSync('./config')\n *\n * // Create nested directories\n * safeMkdirSync('./data/cache/temp', { recursive: true })\n *\n * // Create with specific permissions\n * safeMkdirSync('./secure', { mode: 0o700 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeMkdirSync(\n path: PathLike,\n options?: MakeDirectoryOptions | undefined,\n): void {\n const fs = getFs()\n try {\n fs.mkdirSync(path, options)\n } catch (e: unknown) {\n // Ignore EEXIST error - directory already exists.\n if (\n typeof e === 'object' &&\n e !== null &&\n 'code' in e &&\n e.code !== 'EEXIST'\n ) {\n throw e\n }\n }\n}\n\n/**\n * Safely read a file asynchronously, returning undefined on error.\n * Useful when you want to attempt reading a file without handling errors explicitly.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and default value\n * @returns Promise resolving to file contents, or undefined on error\n *\n * @example\n * ```ts\n * // Try to read a file, get undefined if it doesn't exist\n * const content = await safeReadFile('./optional-config.txt')\n * if (content) {\n * console.log('Config found:', content)\n * }\n *\n * // Read with specific encoding\n * const data = await safeReadFile('./data.txt', { encoding: 'utf8' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeReadFile(\n filepath: PathLike,\n options?: SafeReadOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return await fs.promises.readFile(filepath, {\n signal: abortSignal,\n ...opts,\n } as Abortable)\n } catch {}\n return undefined\n}\n\n/**\n * Safely read a file synchronously, returning undefined on error.\n * Useful when you want to attempt reading a file without handling errors explicitly.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to file\n * @param options - Read options including encoding and default value\n * @returns File contents, or undefined on error\n *\n * @example\n * ```ts\n * // Try to read a config file\n * const config = safeReadFileSync('./config.txt')\n * if (config) {\n * console.log('Config loaded successfully')\n * }\n *\n * // Read binary file safely\n * const buffer = safeReadFileSync('./image.png', { encoding: null })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeReadFileSync(\n filepath: PathLike,\n options?: SafeReadOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return fs.readFileSync(filepath, {\n __proto__: null,\n ...opts,\n } as ObjectEncodingOptions)\n } catch {}\n return undefined\n}\n\n/**\n * Safely get file stats asynchronously, returning undefined on error.\n * Useful for checking file existence and properties without error handling.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to check\n * @returns Promise resolving to Stats object, or undefined on error\n *\n * @example\n * ```ts\n * // Check if file exists and get its stats\n * const stats = await safeStats('./file.txt')\n * if (stats) {\n * console.log('File size:', stats.size)\n * console.log('Modified:', stats.mtime)\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function safeStats(filepath: PathLike) {\n const fs = getFs()\n try {\n return await fs.promises.stat(filepath)\n } catch {}\n return undefined\n}\n\n/**\n * Safely get file stats synchronously, returning undefined on error.\n * Useful for checking file existence and properties without error handling.\n * Returns undefined for any error (file not found, permission denied, etc.).\n *\n * @param filepath - Path to check\n * @param options - Read options (currently unused but kept for API consistency)\n * @returns Stats object, or undefined on error\n *\n * @example\n * ```ts\n * // Check if file exists and get its size\n * const stats = safeStatsSync('./file.txt')\n * if (stats) {\n * console.log('File size:', stats.size)\n * console.log('Is directory:', stats.isDirectory())\n * }\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function safeStatsSync(\n filepath: PathLike,\n options?: ReadFileOptions | undefined,\n) {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const fs = getFs()\n try {\n return fs.statSync(filepath, {\n __proto__: null,\n throwIfNoEntry: false,\n ...opts,\n } as StatSyncOptions)\n } catch {}\n return undefined\n}\n\n/**\n * Generate a unique filepath by adding number suffix if the path exists.\n * Appends `-1`, `-2`, etc. before the file extension until a non-existent path is found.\n * Useful for creating files without overwriting existing ones.\n *\n * @param filepath - Desired file path\n * @returns Normalized unique filepath (original if it doesn't exist, or with number suffix)\n *\n * @example\n * ```ts\n * // If 'report.pdf' exists, returns 'report-1.pdf'\n * const uniquePath = uniqueSync('./report.pdf')\n *\n * // If 'data.json' and 'data-1.json' exist, returns 'data-2.json'\n * const path = uniqueSync('./data.json')\n *\n * // If 'backup' doesn't exist, returns 'backup' unchanged\n * const backupPath = uniqueSync('./backup')\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function uniqueSync(filepath: PathLike): string {\n const fs = getFs()\n const path = getPath()\n const filepathStr = String(filepath)\n\n // If the file doesn't exist, return as is\n if (!fs.existsSync(filepathStr)) {\n return normalizePath(filepathStr)\n }\n\n const dirname = path.dirname(filepathStr)\n const ext = path.extname(filepathStr)\n const basename = path.basename(filepathStr, ext)\n\n let counter = 1\n let uniquePath: string\n do {\n uniquePath = path.join(dirname, `${basename}-${counter}${ext}`)\n counter++\n } while (fs.existsSync(uniquePath))\n\n return normalizePath(uniquePath)\n}\n\n/**\n * Write JSON content to a file asynchronously with formatting.\n * Stringifies the value with configurable indentation and line endings.\n * Automatically adds a final newline by default for POSIX compliance.\n *\n * @param filepath - Path to write to\n * @param jsonContent - Value to stringify and write\n * @param options - Write options including formatting and encoding\n * @returns Promise that resolves when write completes\n *\n * @example\n * ```ts\n * // Write formatted JSON with default 2-space indentation\n * await writeJson('./data.json', { name: 'example', version: '1.0.0' })\n *\n * // Write with custom indentation\n * await writeJson('./config.json', config, { spaces: 4 })\n *\n * // Write with tabs instead of spaces\n * await writeJson('./data.json', data, { spaces: '\\t' })\n *\n * // Write without final newline\n * await writeJson('./inline.json', obj, { finalEOL: false })\n *\n * // Write with Windows line endings\n * await writeJson('./win.json', data, { EOL: '\\r\\n' })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function writeJson(\n filepath: PathLike,\n jsonContent: unknown,\n options?: WriteJsonOptions | string,\n): Promise<void> {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { EOL, finalEOL, replacer, spaces, ...fsOptions } = {\n __proto__: null,\n ...opts,\n } as WriteJsonOptions\n const fs = getFs()\n const jsonString = stringify(\n jsonContent,\n EOL || '\\n',\n finalEOL !== undefined ? finalEOL : true,\n replacer,\n spaces,\n )\n await fs.promises.writeFile(filepath, jsonString, {\n encoding: 'utf8',\n ...fsOptions,\n __proto__: null,\n } as ObjectEncodingOptions)\n}\n\n/**\n * Write JSON content to a file synchronously with formatting.\n * Stringifies the value with configurable indentation and line endings.\n * Automatically adds a final newline by default for POSIX compliance.\n *\n * @param filepath - Path to write to\n * @param jsonContent - Value to stringify and write\n * @param options - Write options including formatting and encoding\n *\n * @example\n * ```ts\n * // Write formatted JSON with default 2-space indentation\n * writeJsonSync('./package.json', pkg)\n *\n * // Write with custom indentation\n * writeJsonSync('./tsconfig.json', tsconfig, { spaces: 4 })\n *\n * // Write with tabs for indentation\n * writeJsonSync('./data.json', data, { spaces: '\\t' })\n *\n * // Write compacted (no indentation)\n * writeJsonSync('./compact.json', data, { spaces: 0 })\n * ```\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function writeJsonSync(\n filepath: PathLike,\n jsonContent: unknown,\n options?: WriteJsonOptions | string | undefined,\n): void {\n const opts = typeof options === 'string' ? { encoding: options } : options\n const { EOL, finalEOL, replacer, spaces, ...fsOptions } = {\n __proto__: null,\n ...opts,\n }\n const fs = getFs()\n const jsonString = stringify(\n jsonContent,\n EOL || '\\n',\n finalEOL !== undefined ? finalEOL : true,\n replacer,\n spaces,\n )\n fs.writeFileSync(filepath, jsonString, {\n encoding: 'utf8',\n ...fsOptions,\n __proto__: null,\n } as WriteFileOptions)\n}\n"],
5
+ "mappings": ";4ZAAA,IAAAA,GAAA,GAAAC,EAAAD,GAAA,YAAAE,EAAA,eAAAC,EAAA,wBAAAC,EAAA,UAAAC,EAAA,mBAAAC,EAAA,cAAAC,EAAA,kBAAAC,EAAA,iBAAAC,EAAA,qBAAAC,EAAA,mBAAAC,EAAA,uBAAAC,GAAA,iBAAAC,GAAA,qBAAAC,GAAA,aAAAC,GAAA,iBAAAC,GAAA,eAAAC,GAAA,mBAAAC,GAAA,cAAAC,GAAA,kBAAAC,GAAA,iBAAAC,GAAA,qBAAAC,GAAA,cAAAC,EAAA,kBAAAC,EAAA,eAAAC,GAAA,kBAAAC,EAAA,cAAAC,GAAA,kBAAAC,KAAA,eAAAC,EAAA7B,IAiBA,IAAA8B,EAA+B,8BAE/BC,EAAwB,oBAIxBC,EAA8C,mBAE9CC,EAA0B,kBAC1BC,EAAyC,qBACzCC,EAAgD,kBAChDC,EAA0C,0BAC1CC,EAA+B,mBAR/B,MAAMC,KAAc,kBAAe,EA6Q7BC,KAAuB,gBAAa,CACxC,UAAW,KACX,MAAO,GACP,WAAY,EACZ,UAAW,GACX,WAAY,GACd,CAAC,EAED,IAAIC,EASJ,SAASC,GAAQ,CACf,OAAID,IAAQ,SAEVA,EAAoB,QAAQ,SAAS,GAEhCA,CACT,CAEA,IAAIE,EASJ,SAASC,GAAU,CACjB,OAAID,IAAU,SAGZA,EAAsB,QAAQ,WAAW,GAEpCA,CACT,CAcA,SAASE,EACPC,EACAC,EACAC,EACU,CACV,KAAM,CACJ,OAAAC,EACA,aAAAC,EAAe,GACf,KAAAC,EAAO,EACT,EAAI,CAAE,UAAW,KAAM,GAAGH,CAAQ,EAC5BI,EAAOR,EAAQ,EACfS,EAAQP,EACX,OACEQ,GACCA,EAAE,YAAY,IACbJ,GACC,CAAC3C,EAAe6C,EAAK,KAAKL,GAAWO,EAAE,WAAYA,EAAE,IAAI,EAAG,CAC1D,OAAAL,CACF,CAAC,EACP,EACC,IAAKK,GAAcA,EAAE,IAAI,EAC5B,OAAOH,EAAOE,EAAM,KAAK,gBAAc,EAAIA,CAC7C,CAeA,SAASE,EACPC,EACAC,EACAC,EACAC,EACAC,EAA0B,EAClB,CACR,MAAMC,EAAMH,EAAWD,EAAM,GAE7B,MAAO,GADK,KAAK,UAAUD,EAAMG,EAAUC,CAAM,EACnC,QAAQ,MAAOH,CAAG,CAAC,GAAGI,CAAG,EACzC,CAwBA,eAAsB1D,EACpB2D,EACAd,EAC6B,CAC7B,KAAM,CAAE,IAAAe,EAAM,QAAQ,IAAI,EAAG,OAAAC,EAASzB,CAAY,EAAI,CACpD,UAAW,KACX,GAAGS,CACL,EACA,GAAI,CAAE,gBAAAiB,EAAkB,GAAO,UAAAC,EAAY,EAAK,EAAI,CAClD,UAAW,KACX,GAAGlB,CACL,EACIiB,IACFC,EAAY,IAEVA,IACFD,EAAkB,IAEpB,MAAME,EAAKzB,EAAM,EACXU,EAAOR,EAAQ,EACrB,IAAIwB,EAAMhB,EAAK,QAAQW,CAAG,EAC1B,KAAM,CAAE,KAAAM,CAAK,EAAIjB,EAAK,MAAMgB,CAAG,EACzBf,KAAQ,WAAQS,CAAI,EAAIA,EAAO,CAACA,CAAc,EACpD,KAAOM,GAAOA,IAAQC,GAAM,CAC1B,UAAWC,KAAKjB,EAAO,CACrB,GAAIW,GAAQ,QACV,OAEF,MAAMO,EAAUnB,EAAK,KAAKgB,EAAKE,CAAC,EAChC,GAAI,CAEF,MAAME,EAAQ,MAAML,EAAG,SAAS,KAAKI,CAAO,EAC5C,GAAI,CAACN,GAAmBO,EAAM,OAAO,EACnC,SAAO,iBAAcD,CAAO,EAE9B,GAAI,CAACL,GAAaM,EAAM,YAAY,EAClC,SAAO,iBAAcD,CAAO,CAEhC,MAAQ,CAAC,CACX,CACAH,EAAMhB,EAAK,QAAQgB,CAAG,CACxB,CAEF,CA2BO,SAAShE,EACd0D,EACAd,EACA,CACA,KAAM,CAAE,IAAAe,EAAM,QAAQ,IAAI,EAAG,OAAAU,CAAO,EAAI,CACtC,UAAW,KACX,GAAGzB,CACL,EACA,GAAI,CAAE,gBAAAiB,EAAkB,GAAO,UAAAC,EAAY,EAAK,EAAI,CAClD,UAAW,KACX,GAAGlB,CACL,EACIiB,IACFC,EAAY,IAEVA,IACFD,EAAkB,IAEpB,MAAME,EAAKzB,EAAM,EACXU,EAAOR,EAAQ,EACrB,IAAIwB,EAAMhB,EAAK,QAAQW,CAAG,EAC1B,KAAM,CAAE,KAAAM,CAAK,EAAIjB,EAAK,MAAMgB,CAAG,EACzBM,EAAUD,EAASrB,EAAK,QAAQqB,CAAM,EAAI,OAC1CpB,KAAQ,WAAQS,CAAI,EAAIA,EAAO,CAACA,CAAc,EACpD,KAAOM,GAAOA,IAAQC,GAAM,CAE1B,GAAIK,GAAWN,IAAQM,EAAS,CAE9B,UAAWJ,KAAKjB,EAAO,CACrB,MAAMkB,EAAUnB,EAAK,KAAKgB,EAAKE,CAAC,EAChC,GAAI,CACF,MAAME,EAAQL,EAAG,SAASI,CAAO,EACjC,GAAI,CAACN,GAAmBO,EAAM,OAAO,EACnC,SAAO,iBAAcD,CAAO,EAE9B,GAAI,CAACL,GAAaM,EAAM,YAAY,EAClC,SAAO,iBAAcD,CAAO,CAEhC,MAAQ,CAAC,CACX,CACA,MACF,CACA,UAAWD,KAAKjB,EAAO,CACrB,MAAMkB,EAAUnB,EAAK,KAAKgB,EAAKE,CAAC,EAChC,GAAI,CACF,MAAME,EAAQL,EAAG,SAASI,CAAO,EACjC,GAAI,CAACN,GAAmBO,EAAM,OAAO,EACnC,SAAO,iBAAcD,CAAO,EAE9B,GAAI,CAACL,GAAaM,EAAM,YAAY,EAClC,SAAO,iBAAcD,CAAO,CAEhC,MAAQ,CAAC,CACX,CACAH,EAAMhB,EAAK,QAAQgB,CAAG,CACxB,CAEF,CAiBA,eAAsB9D,EAAMqE,EAAoB,CAC9C,MAAO,CAAC,EAAE,MAAMnD,EAAUmD,CAAQ,IAAI,YAAY,CACpD,CAiBO,SAASnE,EAAUmE,EAAoB,CAC5C,MAAO,CAAC,CAAClD,EAAckD,CAAQ,GAAG,YAAY,CAChD,CAqBO,SAASpE,EACdwC,EACAC,EACA,CACA,KAAM,CAAE,OAAAC,EAAS,eAAc,EAAI,CACjC,UAAW,KACX,GAAGD,CACL,EACMmB,EAAKzB,EAAM,EACjB,GAAI,CACF,MAAMkC,EAAQT,EAAG,YAAYpB,CAAO,EAC9B,CAAE,OAAA8B,CAAO,EAAID,EACnB,GAAIC,IAAW,EACb,MAAO,GAET,MAAMC,KAAU,kBACd7B,EACA,CACE,OAAK,oBAAiBF,CAAO,CAC/B,CACF,EACA,IAAIgC,EAAe,EACnB,QAASC,EAAI,EAAGA,EAAIH,EAAQG,GAAK,EAAG,CAClC,MAAMC,EAAOL,EAAMI,CAAC,EAChBC,GAAQH,EAAQG,CAAI,IACtBF,GAAgB,EAEpB,CACA,OAAOA,IAAiBF,CAC1B,MAAQ,CAEN,MAAO,EACT,CACF,CAiBO,SAASpE,EAAckE,EAAoB,CAChD,MAAMR,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOyB,EAAG,UAAUQ,CAAQ,EAAE,eAAe,CAC/C,MAAQ,CAAC,CACT,MAAO,EACT,CAkDO,SAAShD,EACduD,EACqB,CACrB,MAAMf,EAAKzB,EAAM,EACXyC,EAAuB,CAAC,EACxBC,EAAyB,CAAC,EAC1B,CAAE,KAAAC,CAAK,EAAIlB,EAAG,UAEpB,UAAWQ,KAAYO,EACrB,GAAI,CACFf,EAAG,WAAWQ,EAAUU,CAAI,EAC5BF,EAAW,KAAKR,CAAQ,CAC1B,MAAQ,CACNS,EAAa,KAAKT,CAAQ,CAC5B,CAGF,MAAO,CAAE,UAAW,KAAM,WAAAQ,EAAY,aAAAC,CAAa,CACrD,CAwBA,eAAsB1E,EACpBqC,EACAC,EACA,CACA,MAAMmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOG,EACL,MAAMsB,EAAG,SAAS,QAAQpB,EAAS,CACjC,UAAW,KACX,SAAU,OACV,cAAe,EACjB,CAAoD,EACpD,OAAOA,CAAO,EACdC,CACF,CACF,MAAQ,CAAC,CACT,MAAO,CAAC,CACV,CAwBO,SAASrC,EAAiBoC,EAAmBC,EAA0B,CAC5E,MAAMmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOG,EACLsB,EAAG,YAAYpB,EAAS,CACtB,UAAW,KACX,SAAU,OACV,cAAe,EACjB,CAAoD,EACpD,OAAOA,CAAO,EACdC,CACF,CACF,MAAQ,CAAC,CACT,MAAO,CAAC,CACV,CAqBA,eAAsBpC,EACpB+D,EACA3B,EACA,CAEA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAEnE,OAAO,MADIN,EAAM,EACD,SAAS,SAASiC,EAAU,CAC1C,OAAQpC,EACR,GAAG+C,EACH,SAAU,IACZ,CAAC,CACH,CAqBA,eAAsBxE,GACpB6D,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAEnE,OAAO,MADIN,EAAM,EACD,SAAS,SAASiC,EAAU,CAC1C,OAAQpC,EACR,GAAG+C,EACH,SAAU,MACZ,CAAC,CACH,CAqBO,SAASzE,GACd8D,EACA3B,EACA,CAEA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAEnE,OADWN,EAAM,EACP,aAAaiC,EAAU,CAC/B,GAAGW,EACH,SAAU,IACZ,CAA0B,CAC5B,CAqBO,SAASvE,GACd4D,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAEnE,OADWN,EAAM,EACP,aAAaiC,EAAU,CAC/B,GAAGW,EACH,SAAU,MACZ,CAA0B,CAC5B,CAgCA,eAAsBtE,GACpB2D,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7D,CAAE,QAAAuC,EAAS,OAAAC,EAAQ,GAAGC,CAAU,EAAI,CACxC,UAAW,KACX,GAAGH,CACL,EACMI,EAAcF,IAAW,QAAa,CAAC,CAACA,EACxCrB,EAAKzB,EAAM,EACjB,IAAIiD,EAAU,GACd,GAAI,CACFA,EAAU,MAAMxB,EAAG,SAAS,SAASQ,EAAU,CAC7C,UAAW,KACX,SAAU,OACV,GAAGc,CACL,CAEC,CACH,OAASG,EAAG,CACV,GAAIF,EAAa,CACf,MAAMG,EAAQD,EAA4B,KAC1C,MAAIC,IAAS,SACL,IAAI,MACR,wBAAwBlB,CAAQ;AAAA,kEAEhC,CAAE,MAAOiB,CAAE,CACb,EAEEC,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,wCAAwClB,CAAQ;AAAA,wDAEhD,CAAE,MAAOiB,CAAE,CACb,EAEIA,CACR,CACA,MACF,CACA,SAAO,aAAUD,EAAS,CACxB,SAAU,OAAOhB,CAAQ,EACzB,QAAAY,EACA,OAAQG,CACV,CAAC,CACH,CA+BO,SAASzE,GACd0D,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7D,CAAE,QAAAuC,EAAS,OAAAC,EAAQ,GAAGC,CAAU,EAAI,CACxC,UAAW,KACX,GAAGH,CACL,EACMI,EAAcF,IAAW,QAAa,CAAC,CAACA,EACxCrB,EAAKzB,EAAM,EACjB,IAAIiD,EAAU,GACd,GAAI,CACFA,EAAUxB,EAAG,aAAaQ,EAAU,CAClC,UAAW,KACX,SAAU,OACV,GAAGc,CACL,CAEC,CACH,OAASG,EAAG,CACV,GAAIF,EAAa,CACf,MAAMG,EAAQD,EAA4B,KAC1C,MAAIC,IAAS,SACL,IAAI,MACR,wBAAwBlB,CAAQ;AAAA,kEAEhC,CAAE,MAAOiB,CAAE,CACb,EAEEC,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,wCAAwClB,CAAQ;AAAA,wDAEhD,CAAE,MAAOiB,CAAE,CACb,EAEIA,CACR,CACA,MACF,CACA,SAAO,aAAUD,EAAS,CACxB,SAAU,OAAOhB,CAAQ,EACzB,QAAAY,EACA,OAAQG,CACV,CAAC,CACH,CAGA,IAAII,EAMJ,SAASC,GAAkC,CACzC,GAAID,IAAuB,OAAW,CACpC,MAAM1C,EAAOR,EAAQ,EACf,CACJ,YAAAoD,EACA,oBAAAC,EACA,iBAAAC,CACF,EAAkB,QAAQ,YAAY,EAEtCJ,EAAqB,CACnB1C,EAAK,QAAQ4C,EAAY,CAAC,EAC1B5C,EAAK,QAAQ6C,EAAoB,CAAC,EAClC7C,EAAK,QAAQ8C,EAAiB,CAAC,CACjC,CACF,CACA,OAAOJ,CACT,CAQO,SAASzF,GAA4B,CAC1CyF,EAAqB,MACvB,IAGA,6BAA0BzF,CAAmB,EA2B7C,eAAsBa,GACpByD,EACA3B,EACA,CACA,MAAMmD,EAAoB,QAAQ,gBAAgB,EAC5C,CAAE,YAAAC,CAAY,EAAID,EAClBb,EAAO,CAAE,UAAW,KAAM,GAAGtC,CAAQ,EACrCqD,KAAW,WAAQ1B,CAAQ,EAC7BA,EAAS,IAAI,kBAAgB,EAC7B,IAAC,oBAAiBA,CAAQ,CAAC,EAG/B,IAAI2B,EAAchB,EAAK,QAAU,GACjC,GAAI,CAACgB,GAAeD,EAAS,OAAS,EAAG,CACvC,MAAMjD,EAAOR,EAAQ,EACf2D,EAAcR,EAAsB,EAGjBM,EAAS,MAAMG,GAAW,CACjD,MAAMC,EAAerD,EAAK,QAAQoD,CAAO,EAGzC,UAAWE,KAAcH,EAAa,CACpC,MAAMI,EACJF,EAAa,WAAWC,EAAatD,EAAK,GAAG,GAC7CqD,IAAiBC,EAEbE,EADexD,EAAK,SAASsD,EAAYD,CAAY,EACtB,WAAW,IAAI,EAEpD,GAAIE,GAAkB,CAACC,EACrB,MAAO,EAEX,CAEA,MAAO,EACT,CAAC,IAGCN,EAAc,GAElB,CAEA,MAAMF,EAAYC,EAAU,CAC1B,YAAaf,EAAK,YAAc9C,EAAqB,WACrD,OAAQ,GACR,MAAO8D,EACP,UAAW,EACb,CAAC,CACH,CA2BO,SAASnF,GACdwD,EACA3B,EACA,CACA,MAAMmD,EAAoB,QAAQ,gBAAgB,EAC5C,CAAE,WAAAU,CAAW,EAAIV,EACjBb,EAAO,CAAE,UAAW,KAAM,GAAGtC,CAAQ,EACrCqD,KAAW,WAAQ1B,CAAQ,EAC7BA,EAAS,IAAI,kBAAgB,EAC7B,IAAC,oBAAiBA,CAAQ,CAAC,EAG/B,IAAI2B,EAAchB,EAAK,QAAU,GACjC,GAAI,CAACgB,GAAeD,EAAS,OAAS,EAAG,CACvC,MAAMjD,EAAOR,EAAQ,EACf2D,EAAcR,EAAsB,EAGjBM,EAAS,MAAMG,GAAW,CACjD,MAAMC,EAAerD,EAAK,QAAQoD,CAAO,EAGzC,UAAWE,KAAcH,EAAa,CACpC,MAAMI,EACJF,EAAa,WAAWC,EAAatD,EAAK,GAAG,GAC7CqD,IAAiBC,EAEbE,EADexD,EAAK,SAASsD,EAAYD,CAAY,EACtB,WAAW,IAAI,EAEpD,GAAIE,GAAkB,CAACC,EACrB,MAAO,EAEX,CAEA,MAAO,EACT,CAAC,IAGCN,EAAc,GAElB,CAEAO,EAAWR,EAAU,CACnB,YAAaf,EAAK,YAAc9C,EAAqB,WACrD,OAAQ,GACR,MAAO8D,EACP,UAAW,EACb,CAAC,CACH,CA6BA,eAAsBlF,GACpBgC,EACAJ,EACe,CACf,MAAMmB,EAAKzB,EAAM,EACjB,GAAI,CACF,MAAMyB,EAAG,SAAS,MAAMf,EAAMJ,CAAO,CACvC,OAAS4C,EAAY,CAEnB,GACE,OAAOA,GAAM,UACbA,IAAM,MACN,SAAUA,GACVA,EAAE,OAAS,SAEX,MAAMA,CAEV,CACF,CA4BO,SAASvE,GACd+B,EACAJ,EACM,CACN,MAAMmB,EAAKzB,EAAM,EACjB,GAAI,CACFyB,EAAG,UAAUf,EAAMJ,CAAO,CAC5B,OAAS4C,EAAY,CAEnB,GACE,OAAOA,GAAM,UACbA,IAAM,MACN,SAAUA,GACVA,EAAE,OAAS,SAEX,MAAMA,CAEV,CACF,CAwBA,eAAsBtE,GACpBqD,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7DmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAO,MAAMyB,EAAG,SAAS,SAASQ,EAAU,CAC1C,OAAQpC,EACR,GAAG+C,CACL,CAAc,CAChB,MAAQ,CAAC,CAEX,CAwBO,SAAS/D,GACdoD,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7DmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOyB,EAAG,aAAaQ,EAAU,CAC/B,UAAW,KACX,GAAGW,CACL,CAA0B,CAC5B,MAAQ,CAAC,CAEX,CAqBA,eAAsB9D,EAAUmD,EAAoB,CAClD,MAAMR,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAO,MAAMyB,EAAG,SAAS,KAAKQ,CAAQ,CACxC,MAAQ,CAAC,CAEX,CAsBO,SAASlD,EACdkD,EACA3B,EACA,CACA,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7DmB,EAAKzB,EAAM,EACjB,GAAI,CACF,OAAOyB,EAAG,SAASQ,EAAU,CAC3B,UAAW,KACX,eAAgB,GAChB,GAAGW,CACL,CAAoB,CACtB,MAAQ,CAAC,CAEX,CAuBO,SAAS5D,GAAWiD,EAA4B,CACrD,MAAMR,EAAKzB,EAAM,EACXU,EAAOR,EAAQ,EACfkE,EAAc,OAAOnC,CAAQ,EAGnC,GAAI,CAACR,EAAG,WAAW2C,CAAW,EAC5B,SAAO,iBAAcA,CAAW,EAGlC,MAAM/D,EAAUK,EAAK,QAAQ0D,CAAW,EAClCC,EAAM3D,EAAK,QAAQ0D,CAAW,EAC9BE,EAAW5D,EAAK,SAAS0D,EAAaC,CAAG,EAE/C,IAAIE,EAAU,EACVC,EACJ,GACEA,EAAa9D,EAAK,KAAKL,EAAS,GAAGiE,CAAQ,IAAIC,CAAO,GAAGF,CAAG,EAAE,EAC9DE,UACO9C,EAAG,WAAW+C,CAAU,GAEjC,SAAO,iBAAcA,CAAU,CACjC,CA+BA,eAAsBtF,GACpB+C,EACAwC,EACAnE,EACe,CACf,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7D,CAAE,IAAAS,EAAK,SAAAC,EAAU,SAAAC,EAAU,OAAAC,EAAQ,GAAG6B,CAAU,EAAI,CACxD,UAAW,KACX,GAAGH,CACL,EACMnB,EAAKzB,EAAM,EACX0E,EAAa7D,EACjB4D,EACA1D,GAAO;AAAA,EACPC,IAAa,OAAYA,EAAW,GACpCC,EACAC,CACF,EACA,MAAMO,EAAG,SAAS,UAAUQ,EAAUyC,EAAY,CAChD,SAAU,OACV,GAAG3B,EACH,UAAW,IACb,CAA0B,CAC5B,CA2BO,SAAS5D,GACd8C,EACAwC,EACAnE,EACM,CACN,MAAMsC,EAAO,OAAOtC,GAAY,SAAW,CAAE,SAAUA,CAAQ,EAAIA,EAC7D,CAAE,IAAAS,EAAK,SAAAC,EAAU,SAAAC,EAAU,OAAAC,EAAQ,GAAG6B,CAAU,EAAI,CACxD,UAAW,KACX,GAAGH,CACL,EACMnB,EAAKzB,EAAM,EACX0E,EAAa7D,EACjB4D,EACA1D,GAAO;AAAA,EACPC,IAAa,OAAYA,EAAW,GACpCC,EACAC,CACF,EACAO,EAAG,cAAcQ,EAAUyC,EAAY,CACrC,SAAU,OACV,GAAG3B,EACH,UAAW,IACb,CAAqB,CACvB",
6
+ "names": ["fs_exports", "__export", "findUp", "findUpSync", "invalidatePathCache", "isDir", "isDirEmptySync", "isDirSync", "isSymLinkSync", "readDirNames", "readDirNamesSync", "readFileBinary", "readFileBinarySync", "readFileUtf8", "readFileUtf8Sync", "readJson", "readJsonSync", "safeDelete", "safeDeleteSync", "safeMkdir", "safeMkdirSync", "safeReadFile", "safeReadFileSync", "safeStats", "safeStatsSync", "uniqueSync", "validateFiles", "writeJson", "writeJsonSync", "__toCommonJS", "import_process", "import_arrays", "import_globs", "import_json", "import_objects", "import_path", "import_rewire", "import_sorts", "abortSignal", "defaultRemoveOptions", "_fs", "getFs", "_path", "getPath", "innerReadDirNames", "dirents", "dirname", "options", "ignore", "includeEmpty", "sort", "path", "names", "d", "stringify", "json", "EOL", "finalEOL", "replacer", "spaces", "EOF", "name", "cwd", "signal", "onlyDirectories", "onlyFiles", "fs", "dir", "root", "n", "thePath", "stats", "stopAt", "stopDir", "filepath", "files", "length", "matcher", "ignoredCount", "i", "file", "filepaths", "validPaths", "invalidPaths", "R_OK", "opts", "reviver", "throws", "fsOptions", "shouldThrow", "content", "e", "code", "_cachedAllowedDirs", "getAllowedDirectories", "getOsTmpDir", "getSocketCacacheDir", "getSocketUserDir", "del", "deleteAsync", "patterns", "shouldForce", "allowedDirs", "pattern", "resolvedPath", "allowedDir", "isInAllowedDir", "isGoingBackward", "deleteSync", "filepathStr", "ext", "basename", "counter", "uniquePath", "jsonContent", "jsonString"]
7
7
  }
package/dist/ipc.js.map CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/ipc.ts"],
4
- "sourcesContent": ["/**\n * IPC (Inter-Process Communication) Module\n * ==========================================\n *\n * This module provides secure inter-process communication utilities for Socket CLI\n * and related tools. It replaces environment variable passing with more secure and\n * scalable alternatives.\n *\n * ## Key Features:\n * - File-based stub communication for initial data handoff\n * - Node.js IPC channel support for real-time bidirectional messaging\n * - Automatic cleanup of temporary files\n * - Type-safe message validation with Zod schemas\n * - Timeout handling for reliability\n *\n * ## Use Cases:\n * 1. Passing API tokens between processes without exposing them in env vars\n * 2. Transferring large configuration objects that exceed env var size limits\n * 3. Bidirectional communication between parent and child processes\n * 4. Secure handshake protocols between Socket CLI components\n *\n * ## Security Considerations:\n * - Stub files are created with restricted permissions in OS temp directory\n * - Messages include timestamps for freshness validation\n * - Automatic cleanup prevents sensitive data persistence\n * - Unique IDs prevent message replay attacks\n *\n * @module ipc\n */\n\nimport crypto from 'node:crypto'\nimport { promises as fs } from 'node:fs'\nimport path from 'node:path'\n\nimport { safeDeleteSync } from './fs'\nimport { getOsTmpDir } from './paths'\nimport { z } from './zod'\n\n// Define BufferEncoding type for TypeScript compatibility.\ntype BufferEncoding = globalThis.BufferEncoding\n\n/**\n * Zod Schemas for Runtime Validation\n * ====================================\n * These schemas provide runtime type safety for IPC messages,\n * ensuring data integrity across process boundaries.\n */\n\n/**\n * Base IPC message schema - validates the core message structure.\n * All IPC messages must conform to this schema.\n */\nconst IpcMessageSchema = z.object({\n /** Unique identifier for message tracking and response correlation. */\n id: z.string().min(1),\n /** Unix timestamp for freshness validation and replay prevention. */\n timestamp: z.number().positive(),\n /** Message type identifier for routing and handling. */\n type: z.string().min(1),\n /** Payload data - can be any JSON-serializable value. */\n data: z.unknown(),\n})\n\n/**\n * IPC handshake schema - used for initial connection establishment.\n * The handshake includes version info and authentication tokens.\n * @internal Exported for testing purposes.\n */\nexport const IpcHandshakeSchema = IpcMessageSchema.extend({\n type: z.literal('handshake'),\n data: z.object({\n /** Protocol version for compatibility checking. */\n version: z.string(),\n /** Process ID for identification. */\n pid: z.number().int().positive(),\n /** Optional API token for authentication. */\n apiToken: z.string().optional(),\n /** Application name for multi-app support. */\n appName: z.string(),\n }),\n})\n\n/**\n * IPC stub file schema - validates the structure of stub files.\n * Stub files are used for passing data between processes via filesystem.\n */\nconst IpcStubSchema = z.object({\n /** Process ID that created the stub. */\n pid: z.number().int().positive(),\n /** Creation timestamp for age validation. */\n timestamp: z.number().positive(),\n /** The actual data payload. */\n data: z.unknown(),\n})\n\n/**\n * TypeScript interfaces for IPC communication.\n * These types ensure type consistency across the IPC module.\n */\n\n/**\n * Base IPC message interface.\n * All IPC messages must conform to this structure.\n */\nexport interface IpcMessage<T = unknown> {\n /** Unique identifier for message tracking and response correlation. */\n id: string\n /** Unix timestamp for freshness validation and replay prevention. */\n timestamp: number\n /** Message type identifier for routing and handling. */\n type: string\n /** Payload data - can be any JSON-serializable value. */\n data: T\n}\n\n/**\n * IPC handshake message interface.\n * Used for initial connection establishment.\n */\nexport interface IpcHandshake\n extends IpcMessage<{\n /** Protocol version for compatibility checking. */\n version: string\n /** Process ID for identification. */\n pid: number\n /** Optional API token for authentication. */\n apiToken?: string\n /** Application name for multi-app support. */\n appName: string\n }> {\n type: 'handshake'\n}\n\n/**\n * IPC stub file interface.\n * Represents the structure of stub files used for filesystem-based IPC.\n */\nexport interface IpcStub {\n /** Process ID that created the stub. */\n pid: number\n /** Creation timestamp for age validation. */\n timestamp: number\n /** The actual data payload. */\n data: unknown\n}\n\n/**\n * Options for IPC communication\n */\nexport interface IpcOptions {\n /** Timeout in milliseconds for async operations. */\n timeout?: number\n /** Text encoding for message serialization. */\n encoding?: BufferEncoding\n}\n\n/**\n * Create a unique IPC channel identifier for message correlation.\n *\n * Generates a unique identifier that combines:\n * - A prefix for namespacing (defaults to 'socket')\n * - The current process ID for process identification\n * - A random hex string for uniqueness\n *\n * @param prefix - Optional prefix to namespace the channel ID\n * @returns A unique channel identifier string\n *\n * @example\n * ```typescript\n * const channelId = createIpcChannelId('socket-cli')\n * // Returns: 'socket-cli-12345-a1b2c3d4e5f6g7h8'\n * ```\n */\nexport function createIpcChannelId(prefix = 'socket'): string {\n return `${prefix}-${process.pid}-${crypto.randomBytes(8).toString('hex')}`\n}\n\n/**\n * Get the IPC stub path for a given application.\n *\n * This function generates a unique file path for IPC stub files that are used\n * to pass data between processes. The stub files are stored in a hidden directory\n * within the system's temporary folder.\n *\n * ## Path Structure:\n * - Base: System temp directory (e.g., /tmp on Unix, %TEMP% on Windows)\n * - Directory: `.socket-ipc/{appName}/`\n * - Filename: `stub-{pid}.json`\n *\n * ## Security Features:\n * - Files are isolated per application via appName parameter\n * - Process ID in filename prevents collisions between concurrent processes\n * - Temporary directory location ensures automatic cleanup on system restart\n *\n * @param appName - The application identifier (e.g., 'socket-cli', 'socket-dlx')\n * @returns Full path to the IPC stub file\n *\n * @example\n * ```typescript\n * const stubPath = getIpcStubPath('socket-cli')\n * // Returns: '/tmp/.socket-ipc/socket-cli/stub-12345.json' (Unix)\n * // Returns: 'C:\\\\Users\\\\Name\\\\AppData\\\\Local\\\\Temp\\\\.socket-ipc\\\\socket-cli\\\\stub-12345.json' (Windows)\n * ```\n *\n * @used Currently used by socket-cli for self-update and inter-process communication\n */\nexport function getIpcStubPath(appName: string): string {\n // Get the system's temporary directory - this is platform-specific.\n const tempDir = getOsTmpDir()\n\n // Create a hidden directory structure for Socket IPC files.\n // The dot prefix makes it hidden on Unix-like systems.\n const stubDir = path.join(tempDir, '.socket-ipc', appName)\n\n // Generate filename with process ID to ensure uniqueness.\n // The PID prevents conflicts when multiple processes run simultaneously.\n return path.join(stubDir, `stub-${process.pid}.json`)\n}\n\n/**\n * Ensure IPC directory exists for stub file creation.\n *\n * This helper function creates the directory structure needed for IPC stub files.\n * It's called before writing stub files to ensure the parent directories exist.\n *\n * @param filePath - Full path to the file that needs its directory created\n * @returns Promise that resolves when directory is created\n *\n * @internal Helper function used by writeIpcStub\n */\nasync function ensureIpcDirectory(filePath: string): Promise<void> {\n const dir = path.dirname(filePath)\n // Create directory recursively if it doesn't exist.\n await fs.mkdir(dir, { recursive: true })\n}\n\n/**\n * Write IPC data to a stub file for inter-process data transfer.\n *\n * This function creates a stub file containing data that needs to be passed\n * between processes. The stub file includes metadata like process ID and\n * timestamp for validation.\n *\n * ## File Structure:\n * ```json\n * {\n * \"pid\": 12345,\n * \"timestamp\": 1699564234567,\n * \"data\": { ... }\n * }\n * ```\n *\n * ## Use Cases:\n * - Passing API tokens to child processes\n * - Transferring configuration between Socket CLI components\n * - Sharing large data that exceeds environment variable limits\n *\n * @param appName - The application identifier\n * @param data - The data to write to the stub file\n * @returns Promise resolving to the stub file path\n *\n * @example\n * ```typescript\n * const stubPath = await writeIpcStub('socket-cli', {\n * apiToken: 'secret-token',\n * config: { ... }\n * })\n * // Pass stubPath to child process for reading\n * ```\n */\nexport async function writeIpcStub(\n appName: string,\n data: unknown,\n): Promise<string> {\n const stubPath = getIpcStubPath(appName)\n await ensureIpcDirectory(stubPath)\n\n // Create stub data with validation metadata.\n const ipcData: IpcStub = {\n data,\n pid: process.pid,\n timestamp: Date.now(),\n }\n\n // Validate data structure with Zod schema.\n const validated = IpcStubSchema.parse(ipcData)\n\n // Write with pretty printing for debugging.\n await fs.writeFile(stubPath, JSON.stringify(validated, null, 2), 'utf8')\n return stubPath\n}\n\n/**\n * Read IPC data from a stub file with automatic cleanup.\n *\n * This function reads data from an IPC stub file and validates its freshness.\n * Stale files (older than 5 minutes) are automatically cleaned up to prevent\n * accumulation of temporary files.\n *\n * ## Validation Steps:\n * 1. Read and parse JSON file\n * 2. Validate structure with Zod schema\n * 3. Check timestamp freshness\n * 4. Clean up if stale\n * 5. Return data if valid\n *\n * @param stubPath - Path to the stub file to read\n * @returns Promise resolving to the data or null if invalid/stale\n *\n * @example\n * ```typescript\n * const data = await readIpcStub('/tmp/.socket-ipc/socket-cli/stub-12345.json')\n * if (data) {\n * console.log('Received:', data)\n * }\n * ```\n *\n * @unused Reserved for future implementation\n */\nexport async function readIpcStub(stubPath: string): Promise<unknown> {\n try {\n const content = await fs.readFile(stubPath, 'utf8')\n const parsed = JSON.parse(content)\n // Validate structure with Zod schema.\n const validated = IpcStubSchema.parse(parsed)\n // Check age for freshness validation.\n const ageMs = Date.now() - validated.timestamp\n // 5 minutes.\n const maxAgeMs = 5 * 60 * 1000\n if (ageMs > maxAgeMs) {\n // Clean up stale file. IPC stubs are always in tmpdir, so use force: true.\n try {\n safeDeleteSync(stubPath, { force: true })\n } catch {\n // Ignore deletion errors\n }\n return null\n }\n return validated.data\n } catch {\n // Return null for any errors (file not found, invalid JSON, validation failure).\n return null\n }\n}\n\n/**\n * Clean up IPC stub files for an application.\n *\n * This maintenance function removes stale IPC stub files to prevent\n * accumulation in the temporary directory. It's designed to be called\n * periodically or on application startup.\n *\n * ## Cleanup Rules:\n * - Files older than 5 minutes are removed (checked via both filesystem mtime and JSON timestamp)\n * - Only stub files (stub-*.json) are processed\n * - Errors are silently ignored (best-effort cleanup)\n *\n * @param appName - The application identifier\n * @returns Promise that resolves when cleanup is complete\n *\n * @example\n * ```typescript\n * // Clean up on application startup\n * await cleanupIpcStubs('socket-cli')\n * ```\n *\n * @unused Reserved for future implementation\n */\nexport async function cleanupIpcStubs(appName: string): Promise<void> {\n const tempDir = getOsTmpDir()\n const stubDir = path.join(tempDir, '.socket-ipc', appName)\n try {\n const files = await fs.readdir(stubDir)\n const now = Date.now()\n // 5 minutes.\n const maxAgeMs = 5 * 60 * 1000\n // Process each file in parallel for efficiency.\n await Promise.all(\n files.map(async file => {\n if (file.startsWith('stub-') && file.endsWith('.json')) {\n const filePath = path.join(stubDir, file)\n try {\n // Check both filesystem mtime and JSON timestamp for more reliable detection\n const stats = await fs.stat(filePath)\n const mtimeAge = now - stats.mtimeMs\n let isStale = mtimeAge > maxAgeMs\n\n // Always check the timestamp inside the JSON file for accuracy\n // This is more reliable than filesystem mtime in some environments\n try {\n const content = await fs.readFile(filePath, 'utf8')\n const parsed = JSON.parse(content)\n const validated = IpcStubSchema.parse(parsed)\n const contentAge = now - validated.timestamp\n // File is stale if EITHER check indicates staleness\n isStale = isStale || contentAge > maxAgeMs\n } catch {\n // If we can't read/parse the file, rely on mtime check\n }\n\n if (isStale) {\n // IPC stubs are always in tmpdir, so we can use force: true to skip path checks\n safeDeleteSync(filePath, { force: true })\n }\n } catch {\n // Ignore errors for individual files.\n }\n }\n }),\n )\n } catch {\n // Directory might not exist, that's ok.\n }\n}\n\n/**\n * Send data through Node.js IPC channel.\n *\n * This function sends structured messages through the Node.js IPC channel\n * when available. The IPC channel must be established with stdio: ['pipe', 'pipe', 'pipe', 'ipc'].\n *\n * ## Requirements:\n * - Process must have been spawned with IPC channel enabled\n * - Message must be serializable to JSON\n * - Process.send() must be available\n *\n * @param process - The process object with IPC channel\n * @param message - The IPC message to send\n * @returns true if message was sent, false otherwise\n *\n * @example\n * ```typescript\n * const message = createIpcMessage('handshake', { version: '1.0.0' })\n * const sent = sendIpc(childProcess, message)\n * ```\n *\n * @unused Reserved for bidirectional communication implementation\n */\nexport function sendIpc(\n process: NodeJS.Process | unknown,\n message: IpcMessage,\n): boolean {\n if (\n process &&\n typeof process === 'object' &&\n 'send' in process &&\n typeof process.send === 'function'\n ) {\n try {\n // Validate message structure before sending.\n const validated = IpcMessageSchema.parse(message)\n return process.send(validated)\n } catch {\n return false\n }\n }\n return false\n}\n\n/**\n * Receive data through Node.js IPC channel.\n *\n * Sets up a listener for IPC messages with automatic validation and parsing.\n * Returns a cleanup function to remove the listener when no longer needed.\n *\n * ## Message Flow:\n * 1. Receive raw message from IPC channel\n * 2. Validate with parseIpcMessage\n * 3. Call handler if valid\n * 4. Ignore invalid messages\n *\n * @param handler - Function to call with valid IPC messages\n * @returns Cleanup function to remove the listener\n *\n * @example\n * ```typescript\n * const cleanup = onIpc((message) => {\n * console.log('Received:', message.type, message.data)\n * })\n * // Later...\n * cleanup() // Remove listener\n * ```\n *\n * @unused Reserved for bidirectional communication\n */\nexport function onIpc(handler: (message: IpcMessage) => void): () => void {\n const listener = (message: unknown) => {\n const parsed = parseIpcMessage(message)\n if (parsed) {\n handler(parsed)\n }\n }\n process.on('message', listener)\n // Return cleanup function for proper resource management.\n return () => {\n process.off('message', listener)\n }\n}\n\n/**\n * Create a promise that resolves when a specific IPC message is received.\n *\n * This utility function provides async/await support for IPC communication,\n * allowing you to wait for specific message types with timeout support.\n *\n * ## Features:\n * - Automatic timeout handling\n * - Type-safe message data\n * - Resource cleanup on completion\n * - Promise-based API\n *\n * @param messageType - The message type to wait for\n * @param options - Options including timeout configuration\n * @returns Promise resolving to the message data\n *\n * @example\n * ```typescript\n * try {\n * const response = await waitForIpc<ConfigData>('config-response', {\n * timeout: 5000 // 5 seconds\n * })\n * console.log('Config received:', response)\n * } catch (error) {\n * console.error('Timeout waiting for config')\n * }\n * ```\n *\n * @unused Reserved for request-response pattern implementation\n */\nexport function waitForIpc<T = unknown>(\n messageType: string,\n options: IpcOptions = {},\n): Promise<T> {\n const { timeout = 30_000 } = options\n return new Promise((resolve, reject) => {\n let cleanup: (() => void) | null = null\n let timeoutId: NodeJS.Timeout | null = null\n const handleTimeout = () => {\n if (cleanup) {\n cleanup()\n }\n reject(new Error(`IPC timeout waiting for message type: ${messageType}`))\n }\n const handleMessage = (message: IpcMessage) => {\n if (message.type === messageType) {\n if (timeoutId) {\n clearTimeout(timeoutId)\n }\n if (cleanup) {\n cleanup()\n }\n resolve(message.data as T)\n }\n }\n cleanup = onIpc(handleMessage)\n if (timeout > 0) {\n timeoutId = setTimeout(handleTimeout, timeout)\n }\n })\n}\n\n/**\n * Create an IPC message with proper structure and metadata.\n *\n * This factory function creates properly structured IPC messages with:\n * - Unique ID for tracking\n * - Timestamp for freshness\n * - Type for routing\n * - Data payload\n *\n * @param type - The message type identifier\n * @param data - The message payload\n * @returns A properly structured IPC message\n *\n * @example\n * ```typescript\n * const handshake = createIpcMessage('handshake', {\n * version: '1.0.0',\n * pid: process.pid,\n * appName: 'socket-cli'\n * })\n * ```\n *\n * @unused Reserved for future message creation needs\n */\nexport function createIpcMessage<T = unknown>(\n type: string,\n data: T,\n): IpcMessage<T> {\n return {\n id: crypto.randomBytes(16).toString('hex'),\n timestamp: Date.now(),\n type,\n data,\n }\n}\n\n/**\n * Check if process has IPC channel available.\n *\n * This utility checks whether a process object has the necessary\n * properties for IPC communication. Used to determine if IPC\n * messaging is possible before attempting to send.\n *\n * @param process - The process object to check\n * @returns true if IPC is available, false otherwise\n *\n * @example\n * ```typescript\n * if (hasIpcChannel(childProcess)) {\n * sendIpc(childProcess, message)\n * } else {\n * // Fall back to alternative communication method\n * }\n * ```\n *\n * @unused Reserved for IPC availability detection\n */\nexport function hasIpcChannel(process: unknown): boolean {\n return Boolean(\n process &&\n typeof process === 'object' &&\n 'send' in process &&\n typeof process.send === 'function' &&\n 'channel' in process &&\n process.channel !== undefined,\n )\n}\n\n/**\n * Safely parse and validate IPC messages.\n *\n * This function performs runtime validation of incoming messages\n * to ensure they conform to the IPC message structure. It uses\n * Zod schemas for robust validation.\n *\n * ## Validation Steps:\n * 1. Check if message is an object\n * 2. Validate required fields exist\n * 3. Validate field types\n * 4. Return typed message or null\n *\n * @param message - The raw message to parse\n * @returns Parsed IPC message or null if invalid\n *\n * @example\n * ```typescript\n * const parsed = parseIpcMessage(rawMessage)\n * if (parsed) {\n * handleMessage(parsed)\n * }\n * ```\n *\n * @unused Reserved for message validation needs\n */\nexport function parseIpcMessage(message: unknown): IpcMessage | null {\n try {\n // Use Zod schema for comprehensive validation.\n const validated = IpcMessageSchema.parse(message)\n return validated as IpcMessage\n } catch {\n // Return null for any validation failure.\n return null\n }\n}\n"],
5
- "mappings": ";6iBAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,wBAAAE,EAAA,oBAAAC,EAAA,uBAAAC,EAAA,qBAAAC,EAAA,mBAAAC,EAAA,kBAAAC,EAAA,UAAAC,EAAA,oBAAAC,EAAA,gBAAAC,EAAA,YAAAC,EAAA,eAAAC,EAAA,iBAAAC,IAAA,eAAAC,EAAAd,GA8BA,IAAAe,EAAmB,0BACnBC,EAA+B,mBAC/BC,EAAiB,wBAEjBC,EAA+B,gBAC/BC,EAA4B,mBAC5BC,EAAkB,iBAgBlB,MAAMC,EAAmB,IAAE,OAAO,CAEhC,GAAI,IAAE,OAAO,EAAE,IAAI,CAAC,EAEpB,UAAW,IAAE,OAAO,EAAE,SAAS,EAE/B,KAAM,IAAE,OAAO,EAAE,IAAI,CAAC,EAEtB,KAAM,IAAE,QAAQ,CAClB,CAAC,EAOYnB,EAAqBmB,EAAiB,OAAO,CACxD,KAAM,IAAE,QAAQ,WAAW,EAC3B,KAAM,IAAE,OAAO,CAEb,QAAS,IAAE,OAAO,EAElB,IAAK,IAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAE/B,SAAU,IAAE,OAAO,EAAE,SAAS,EAE9B,QAAS,IAAE,OAAO,CACpB,CAAC,CACH,CAAC,EAMKC,EAAgB,IAAE,OAAO,CAE7B,IAAK,IAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAE/B,UAAW,IAAE,OAAO,EAAE,SAAS,EAE/B,KAAM,IAAE,QAAQ,CAClB,CAAC,EAgFM,SAASlB,EAAmBmB,EAAS,SAAkB,CAC5D,MAAO,GAAGA,CAAM,IAAI,QAAQ,GAAG,IAAI,EAAAC,QAAO,YAAY,CAAC,EAAE,SAAS,KAAK,CAAC,EAC1E,CA+BO,SAASlB,EAAemB,EAAyB,CAEtD,MAAMC,KAAU,eAAY,EAItBC,EAAU,EAAAC,QAAK,KAAKF,EAAS,cAAeD,CAAO,EAIzD,OAAO,EAAAG,QAAK,KAAKD,EAAS,QAAQ,QAAQ,GAAG,OAAO,CACtD,CAaA,eAAeE,EAAmBC,EAAiC,CACjE,MAAMC,EAAM,EAAAH,QAAK,QAAQE,CAAQ,EAEjC,MAAM,EAAAE,SAAG,MAAMD,EAAK,CAAE,UAAW,EAAK,CAAC,CACzC,CAoCA,eAAsBlB,EACpBY,EACAQ,EACiB,CACjB,MAAMC,EAAW5B,EAAemB,CAAO,EACvC,MAAMI,EAAmBK,CAAQ,EAGjC,MAAMC,EAAmB,CACvB,KAAAF,EACA,IAAK,QAAQ,IACb,UAAW,KAAK,IAAI,CACtB,EAGMG,EAAYd,EAAc,MAAMa,CAAO,EAG7C,aAAM,EAAAH,SAAG,UAAUE,EAAU,KAAK,UAAUE,EAAW,KAAM,CAAC,EAAG,MAAM,EAChEF,CACT,CA6BA,eAAsBxB,EAAYwB,EAAoC,CACpE,GAAI,CACF,MAAMG,EAAU,MAAM,EAAAL,SAAG,SAASE,EAAU,MAAM,EAC5CI,EAAS,KAAK,MAAMD,CAAO,EAE3BD,EAAYd,EAAc,MAAMgB,CAAM,EAEtCC,EAAQ,KAAK,IAAI,EAAIH,EAAU,UAE/BI,EAAW,IAAS,IAC1B,GAAID,EAAQC,EAAU,CAEpB,GAAI,IACF,kBAAeN,EAAU,CAAE,MAAO,EAAK,CAAC,CAC1C,MAAQ,CAER,CACA,OAAO,IACT,CACA,OAAOE,EAAU,IACnB,MAAQ,CAEN,OAAO,IACT,CACF,CAyBA,eAAsBjC,EAAgBsB,EAAgC,CACpE,MAAMC,KAAU,eAAY,EACtBC,EAAU,EAAAC,QAAK,KAAKF,EAAS,cAAeD,CAAO,EACzD,GAAI,CACF,MAAMgB,EAAQ,MAAM,EAAAT,SAAG,QAAQL,CAAO,EAChCe,EAAM,KAAK,IAAI,EAEfF,EAAW,IAAS,IAE1B,MAAM,QAAQ,IACZC,EAAM,IAAI,MAAME,GAAQ,CACtB,GAAIA,EAAK,WAAW,OAAO,GAAKA,EAAK,SAAS,OAAO,EAAG,CACtD,MAAMb,EAAW,EAAAF,QAAK,KAAKD,EAASgB,CAAI,EACxC,GAAI,CAEF,MAAMC,EAAQ,MAAM,EAAAZ,SAAG,KAAKF,CAAQ,EAEpC,IAAIe,EADaH,EAAME,EAAM,QACJJ,EAIzB,GAAI,CACF,MAAMH,EAAU,MAAM,EAAAL,SAAG,SAASF,EAAU,MAAM,EAC5CQ,EAAS,KAAK,MAAMD,CAAO,EAC3BD,EAAYd,EAAc,MAAMgB,CAAM,EACtCQ,EAAaJ,EAAMN,EAAU,UAEnCS,EAAUA,GAAWC,EAAaN,CACpC,MAAQ,CAER,CAEIK,MAEF,kBAAef,EAAU,CAAE,MAAO,EAAK,CAAC,CAE5C,MAAQ,CAER,CACF,CACF,CAAC,CACH,CACF,MAAQ,CAER,CACF,CAyBO,SAASnB,EACdoC,EACAC,EACS,CACT,GACED,GACA,OAAOA,GAAY,UACnB,SAAUA,GACV,OAAOA,EAAQ,MAAS,WAExB,GAAI,CAEF,MAAMX,EAAYf,EAAiB,MAAM2B,CAAO,EAChD,OAAOD,EAAQ,KAAKX,CAAS,CAC/B,MAAQ,CACN,MAAO,EACT,CAEF,MAAO,EACT,CA4BO,SAAS5B,EAAMyC,EAAoD,CACxE,MAAMC,EAAYF,GAAqB,CACrC,MAAMV,EAAS7B,EAAgBuC,CAAO,EAClCV,GACFW,EAAQX,CAAM,CAElB,EACA,eAAQ,GAAG,UAAWY,CAAQ,EAEvB,IAAM,CACX,QAAQ,IAAI,UAAWA,CAAQ,CACjC,CACF,CAgCO,SAAStC,EACduC,EACAC,EAAsB,CAAC,EACX,CACZ,KAAM,CAAE,QAAAC,EAAU,GAAO,EAAID,EAC7B,OAAO,IAAI,QAAQ,CAACE,EAASC,IAAW,CACtC,IAAIC,EAA+B,KAC/BC,EAAmC,KACvC,MAAMC,EAAgB,IAAM,CACtBF,GACFA,EAAQ,EAEVD,EAAO,IAAI,MAAM,yCAAyCJ,CAAW,EAAE,CAAC,CAC1E,EAYAK,EAAUhD,EAXawC,GAAwB,CACzCA,EAAQ,OAASG,IACfM,GACF,aAAaA,CAAS,EAEpBD,GACFA,EAAQ,EAEVF,EAAQN,EAAQ,IAAS,EAE7B,CAC6B,EACzBK,EAAU,IACZI,EAAY,WAAWC,EAAeL,CAAO,EAEjD,CAAC,CACH,CA0BO,SAAShD,EACdsD,EACA1B,EACe,CACf,MAAO,CACL,GAAI,EAAAT,QAAO,YAAY,EAAE,EAAE,SAAS,KAAK,EACzC,UAAW,KAAK,IAAI,EACpB,KAAAmC,EACA,KAAA1B,CACF,CACF,CAuBO,SAAS1B,EAAcwC,EAA2B,CACvD,MAAO,GACLA,GACE,OAAOA,GAAY,UACnB,SAAUA,GACV,OAAOA,EAAQ,MAAS,YACxB,YAAaA,GACbA,EAAQ,UAAY,OAE1B,CA4BO,SAAStC,EAAgBuC,EAAqC,CACnE,GAAI,CAGF,OADkB3B,EAAiB,MAAM2B,CAAO,CAElD,MAAQ,CAEN,OAAO,IACT,CACF",
4
+ "sourcesContent": ["/**\n * IPC (Inter-Process Communication) Module\n * ==========================================\n *\n * This module provides secure inter-process communication utilities for Socket CLI\n * and related tools. It replaces environment variable passing with more secure and\n * scalable alternatives.\n *\n * ## Key Features:\n * - File-based stub communication for initial data handoff\n * - Node.js IPC channel support for real-time bidirectional messaging\n * - Automatic cleanup of temporary files\n * - Type-safe message validation with Zod schemas\n * - Timeout handling for reliability\n *\n * ## Use Cases:\n * 1. Passing API tokens between processes without exposing them in env vars\n * 2. Transferring large configuration objects that exceed env var size limits\n * 3. Bidirectional communication between parent and child processes\n * 4. Secure handshake protocols between Socket CLI components\n *\n * ## Security Considerations:\n * - Stub files are created with restricted permissions in OS temp directory\n * - Messages include timestamps for freshness validation\n * - Automatic cleanup prevents sensitive data persistence\n * - Unique IDs prevent message replay attacks\n *\n * @module ipc\n */\n\nimport crypto from 'node:crypto'\n\nimport { promises as fs } from 'node:fs'\n\nimport path from 'node:path'\n\nimport { safeDeleteSync } from './fs'\nimport { getOsTmpDir } from './paths'\nimport { z } from './zod'\n\n// Define BufferEncoding type for TypeScript compatibility.\ntype BufferEncoding = globalThis.BufferEncoding\n\n/**\n * Zod Schemas for Runtime Validation\n * ====================================\n * These schemas provide runtime type safety for IPC messages,\n * ensuring data integrity across process boundaries.\n */\n\n/**\n * Base IPC message schema - validates the core message structure.\n * All IPC messages must conform to this schema.\n */\nconst IpcMessageSchema = z.object({\n /** Unique identifier for message tracking and response correlation. */\n id: z.string().min(1),\n /** Unix timestamp for freshness validation and replay prevention. */\n timestamp: z.number().positive(),\n /** Message type identifier for routing and handling. */\n type: z.string().min(1),\n /** Payload data - can be any JSON-serializable value. */\n data: z.unknown(),\n})\n\n/**\n * IPC handshake schema - used for initial connection establishment.\n * The handshake includes version info and authentication tokens.\n * @internal Exported for testing purposes.\n */\nexport const IpcHandshakeSchema = IpcMessageSchema.extend({\n type: z.literal('handshake'),\n data: z.object({\n /** Protocol version for compatibility checking. */\n version: z.string(),\n /** Process ID for identification. */\n pid: z.number().int().positive(),\n /** Optional API token for authentication. */\n apiToken: z.string().optional(),\n /** Application name for multi-app support. */\n appName: z.string(),\n }),\n})\n\n/**\n * IPC stub file schema - validates the structure of stub files.\n * Stub files are used for passing data between processes via filesystem.\n */\nconst IpcStubSchema = z.object({\n /** Process ID that created the stub. */\n pid: z.number().int().positive(),\n /** Creation timestamp for age validation. */\n timestamp: z.number().positive(),\n /** The actual data payload. */\n data: z.unknown(),\n})\n\n/**\n * TypeScript interfaces for IPC communication.\n * These types ensure type consistency across the IPC module.\n */\n\n/**\n * Base IPC message interface.\n * All IPC messages must conform to this structure.\n */\nexport interface IpcMessage<T = unknown> {\n /** Unique identifier for message tracking and response correlation. */\n id: string\n /** Unix timestamp for freshness validation and replay prevention. */\n timestamp: number\n /** Message type identifier for routing and handling. */\n type: string\n /** Payload data - can be any JSON-serializable value. */\n data: T\n}\n\n/**\n * IPC handshake message interface.\n * Used for initial connection establishment.\n */\nexport interface IpcHandshake\n extends IpcMessage<{\n /** Protocol version for compatibility checking. */\n version: string\n /** Process ID for identification. */\n pid: number\n /** Optional API token for authentication. */\n apiToken?: string\n /** Application name for multi-app support. */\n appName: string\n }> {\n type: 'handshake'\n}\n\n/**\n * IPC stub file interface.\n * Represents the structure of stub files used for filesystem-based IPC.\n */\nexport interface IpcStub {\n /** Process ID that created the stub. */\n pid: number\n /** Creation timestamp for age validation. */\n timestamp: number\n /** The actual data payload. */\n data: unknown\n}\n\n/**\n * Options for IPC communication\n */\nexport interface IpcOptions {\n /** Timeout in milliseconds for async operations. */\n timeout?: number\n /** Text encoding for message serialization. */\n encoding?: BufferEncoding\n}\n\n/**\n * Create a unique IPC channel identifier for message correlation.\n *\n * Generates a unique identifier that combines:\n * - A prefix for namespacing (defaults to 'socket')\n * - The current process ID for process identification\n * - A random hex string for uniqueness\n *\n * @param prefix - Optional prefix to namespace the channel ID\n * @returns A unique channel identifier string\n *\n * @example\n * ```typescript\n * const channelId = createIpcChannelId('socket-cli')\n * // Returns: 'socket-cli-12345-a1b2c3d4e5f6g7h8'\n * ```\n */\nexport function createIpcChannelId(prefix = 'socket'): string {\n return `${prefix}-${process.pid}-${crypto.randomBytes(8).toString('hex')}`\n}\n\n/**\n * Get the IPC stub path for a given application.\n *\n * This function generates a unique file path for IPC stub files that are used\n * to pass data between processes. The stub files are stored in a hidden directory\n * within the system's temporary folder.\n *\n * ## Path Structure:\n * - Base: System temp directory (e.g., /tmp on Unix, %TEMP% on Windows)\n * - Directory: `.socket-ipc/{appName}/`\n * - Filename: `stub-{pid}.json`\n *\n * ## Security Features:\n * - Files are isolated per application via appName parameter\n * - Process ID in filename prevents collisions between concurrent processes\n * - Temporary directory location ensures automatic cleanup on system restart\n *\n * @param appName - The application identifier (e.g., 'socket-cli', 'socket-dlx')\n * @returns Full path to the IPC stub file\n *\n * @example\n * ```typescript\n * const stubPath = getIpcStubPath('socket-cli')\n * // Returns: '/tmp/.socket-ipc/socket-cli/stub-12345.json' (Unix)\n * // Returns: 'C:\\\\Users\\\\Name\\\\AppData\\\\Local\\\\Temp\\\\.socket-ipc\\\\socket-cli\\\\stub-12345.json' (Windows)\n * ```\n *\n * @used Currently used by socket-cli for self-update and inter-process communication\n */\nexport function getIpcStubPath(appName: string): string {\n // Get the system's temporary directory - this is platform-specific.\n const tempDir = getOsTmpDir()\n\n // Create a hidden directory structure for Socket IPC files.\n // The dot prefix makes it hidden on Unix-like systems.\n const stubDir = path.join(tempDir, '.socket-ipc', appName)\n\n // Generate filename with process ID to ensure uniqueness.\n // The PID prevents conflicts when multiple processes run simultaneously.\n return path.join(stubDir, `stub-${process.pid}.json`)\n}\n\n/**\n * Ensure IPC directory exists for stub file creation.\n *\n * This helper function creates the directory structure needed for IPC stub files.\n * It's called before writing stub files to ensure the parent directories exist.\n *\n * @param filePath - Full path to the file that needs its directory created\n * @returns Promise that resolves when directory is created\n *\n * @internal Helper function used by writeIpcStub\n */\nasync function ensureIpcDirectory(filePath: string): Promise<void> {\n const dir = path.dirname(filePath)\n // Create directory recursively if it doesn't exist.\n await fs.mkdir(dir, { recursive: true })\n}\n\n/**\n * Write IPC data to a stub file for inter-process data transfer.\n *\n * This function creates a stub file containing data that needs to be passed\n * between processes. The stub file includes metadata like process ID and\n * timestamp for validation.\n *\n * ## File Structure:\n * ```json\n * {\n * \"pid\": 12345,\n * \"timestamp\": 1699564234567,\n * \"data\": { ... }\n * }\n * ```\n *\n * ## Use Cases:\n * - Passing API tokens to child processes\n * - Transferring configuration between Socket CLI components\n * - Sharing large data that exceeds environment variable limits\n *\n * @param appName - The application identifier\n * @param data - The data to write to the stub file\n * @returns Promise resolving to the stub file path\n *\n * @example\n * ```typescript\n * const stubPath = await writeIpcStub('socket-cli', {\n * apiToken: 'secret-token',\n * config: { ... }\n * })\n * // Pass stubPath to child process for reading\n * ```\n */\nexport async function writeIpcStub(\n appName: string,\n data: unknown,\n): Promise<string> {\n const stubPath = getIpcStubPath(appName)\n await ensureIpcDirectory(stubPath)\n\n // Create stub data with validation metadata.\n const ipcData: IpcStub = {\n data,\n pid: process.pid,\n timestamp: Date.now(),\n }\n\n // Validate data structure with Zod schema.\n const validated = IpcStubSchema.parse(ipcData)\n\n // Write with pretty printing for debugging.\n await fs.writeFile(stubPath, JSON.stringify(validated, null, 2), 'utf8')\n return stubPath\n}\n\n/**\n * Read IPC data from a stub file with automatic cleanup.\n *\n * This function reads data from an IPC stub file and validates its freshness.\n * Stale files (older than 5 minutes) are automatically cleaned up to prevent\n * accumulation of temporary files.\n *\n * ## Validation Steps:\n * 1. Read and parse JSON file\n * 2. Validate structure with Zod schema\n * 3. Check timestamp freshness\n * 4. Clean up if stale\n * 5. Return data if valid\n *\n * @param stubPath - Path to the stub file to read\n * @returns Promise resolving to the data or null if invalid/stale\n *\n * @example\n * ```typescript\n * const data = await readIpcStub('/tmp/.socket-ipc/socket-cli/stub-12345.json')\n * if (data) {\n * console.log('Received:', data)\n * }\n * ```\n *\n * @unused Reserved for future implementation\n */\nexport async function readIpcStub(stubPath: string): Promise<unknown> {\n try {\n const content = await fs.readFile(stubPath, 'utf8')\n const parsed = JSON.parse(content)\n // Validate structure with Zod schema.\n const validated = IpcStubSchema.parse(parsed)\n // Check age for freshness validation.\n const ageMs = Date.now() - validated.timestamp\n // 5 minutes.\n const maxAgeMs = 5 * 60 * 1000\n if (ageMs > maxAgeMs) {\n // Clean up stale file. IPC stubs are always in tmpdir, so use force: true.\n try {\n safeDeleteSync(stubPath, { force: true })\n } catch {\n // Ignore deletion errors\n }\n return null\n }\n return validated.data\n } catch {\n // Return null for any errors (file not found, invalid JSON, validation failure).\n return null\n }\n}\n\n/**\n * Clean up IPC stub files for an application.\n *\n * This maintenance function removes stale IPC stub files to prevent\n * accumulation in the temporary directory. It's designed to be called\n * periodically or on application startup.\n *\n * ## Cleanup Rules:\n * - Files older than 5 minutes are removed (checked via both filesystem mtime and JSON timestamp)\n * - Only stub files (stub-*.json) are processed\n * - Errors are silently ignored (best-effort cleanup)\n *\n * @param appName - The application identifier\n * @returns Promise that resolves when cleanup is complete\n *\n * @example\n * ```typescript\n * // Clean up on application startup\n * await cleanupIpcStubs('socket-cli')\n * ```\n *\n * @unused Reserved for future implementation\n */\nexport async function cleanupIpcStubs(appName: string): Promise<void> {\n const tempDir = getOsTmpDir()\n const stubDir = path.join(tempDir, '.socket-ipc', appName)\n try {\n const files = await fs.readdir(stubDir)\n const now = Date.now()\n // 5 minutes.\n const maxAgeMs = 5 * 60 * 1000\n // Process each file in parallel for efficiency.\n await Promise.all(\n files.map(async file => {\n if (file.startsWith('stub-') && file.endsWith('.json')) {\n const filePath = path.join(stubDir, file)\n try {\n // Check both filesystem mtime and JSON timestamp for more reliable detection\n const stats = await fs.stat(filePath)\n const mtimeAge = now - stats.mtimeMs\n let isStale = mtimeAge > maxAgeMs\n\n // Always check the timestamp inside the JSON file for accuracy\n // This is more reliable than filesystem mtime in some environments\n try {\n const content = await fs.readFile(filePath, 'utf8')\n const parsed = JSON.parse(content)\n const validated = IpcStubSchema.parse(parsed)\n const contentAge = now - validated.timestamp\n // File is stale if EITHER check indicates staleness\n isStale = isStale || contentAge > maxAgeMs\n } catch {\n // If we can't read/parse the file, rely on mtime check\n }\n\n if (isStale) {\n // IPC stubs are always in tmpdir, so we can use force: true to skip path checks\n safeDeleteSync(filePath, { force: true })\n }\n } catch {\n // Ignore errors for individual files.\n }\n }\n }),\n )\n } catch {\n // Directory might not exist, that's ok.\n }\n}\n\n/**\n * Send data through Node.js IPC channel.\n *\n * This function sends structured messages through the Node.js IPC channel\n * when available. The IPC channel must be established with stdio: ['pipe', 'pipe', 'pipe', 'ipc'].\n *\n * ## Requirements:\n * - Process must have been spawned with IPC channel enabled\n * - Message must be serializable to JSON\n * - Process.send() must be available\n *\n * @param process - The process object with IPC channel\n * @param message - The IPC message to send\n * @returns true if message was sent, false otherwise\n *\n * @example\n * ```typescript\n * const message = createIpcMessage('handshake', { version: '1.0.0' })\n * const sent = sendIpc(childProcess, message)\n * ```\n *\n * @unused Reserved for bidirectional communication implementation\n */\nexport function sendIpc(\n process: NodeJS.Process | unknown,\n message: IpcMessage,\n): boolean {\n if (\n process &&\n typeof process === 'object' &&\n 'send' in process &&\n typeof process.send === 'function'\n ) {\n try {\n // Validate message structure before sending.\n const validated = IpcMessageSchema.parse(message)\n return process.send(validated)\n } catch {\n return false\n }\n }\n return false\n}\n\n/**\n * Receive data through Node.js IPC channel.\n *\n * Sets up a listener for IPC messages with automatic validation and parsing.\n * Returns a cleanup function to remove the listener when no longer needed.\n *\n * ## Message Flow:\n * 1. Receive raw message from IPC channel\n * 2. Validate with parseIpcMessage\n * 3. Call handler if valid\n * 4. Ignore invalid messages\n *\n * @param handler - Function to call with valid IPC messages\n * @returns Cleanup function to remove the listener\n *\n * @example\n * ```typescript\n * const cleanup = onIpc((message) => {\n * console.log('Received:', message.type, message.data)\n * })\n * // Later...\n * cleanup() // Remove listener\n * ```\n *\n * @unused Reserved for bidirectional communication\n */\nexport function onIpc(handler: (message: IpcMessage) => void): () => void {\n const listener = (message: unknown) => {\n const parsed = parseIpcMessage(message)\n if (parsed) {\n handler(parsed)\n }\n }\n process.on('message', listener)\n // Return cleanup function for proper resource management.\n return () => {\n process.off('message', listener)\n }\n}\n\n/**\n * Create a promise that resolves when a specific IPC message is received.\n *\n * This utility function provides async/await support for IPC communication,\n * allowing you to wait for specific message types with timeout support.\n *\n * ## Features:\n * - Automatic timeout handling\n * - Type-safe message data\n * - Resource cleanup on completion\n * - Promise-based API\n *\n * @param messageType - The message type to wait for\n * @param options - Options including timeout configuration\n * @returns Promise resolving to the message data\n *\n * @example\n * ```typescript\n * try {\n * const response = await waitForIpc<ConfigData>('config-response', {\n * timeout: 5000 // 5 seconds\n * })\n * console.log('Config received:', response)\n * } catch (error) {\n * console.error('Timeout waiting for config')\n * }\n * ```\n *\n * @unused Reserved for request-response pattern implementation\n */\nexport function waitForIpc<T = unknown>(\n messageType: string,\n options: IpcOptions = {},\n): Promise<T> {\n const { timeout = 30_000 } = options\n return new Promise((resolve, reject) => {\n let cleanup: (() => void) | null = null\n let timeoutId: NodeJS.Timeout | null = null\n const handleTimeout = () => {\n if (cleanup) {\n cleanup()\n }\n reject(new Error(`IPC timeout waiting for message type: ${messageType}`))\n }\n const handleMessage = (message: IpcMessage) => {\n if (message.type === messageType) {\n if (timeoutId) {\n clearTimeout(timeoutId)\n }\n if (cleanup) {\n cleanup()\n }\n resolve(message.data as T)\n }\n }\n cleanup = onIpc(handleMessage)\n if (timeout > 0) {\n timeoutId = setTimeout(handleTimeout, timeout)\n }\n })\n}\n\n/**\n * Create an IPC message with proper structure and metadata.\n *\n * This factory function creates properly structured IPC messages with:\n * - Unique ID for tracking\n * - Timestamp for freshness\n * - Type for routing\n * - Data payload\n *\n * @param type - The message type identifier\n * @param data - The message payload\n * @returns A properly structured IPC message\n *\n * @example\n * ```typescript\n * const handshake = createIpcMessage('handshake', {\n * version: '1.0.0',\n * pid: process.pid,\n * appName: 'socket-cli'\n * })\n * ```\n *\n * @unused Reserved for future message creation needs\n */\nexport function createIpcMessage<T = unknown>(\n type: string,\n data: T,\n): IpcMessage<T> {\n return {\n id: crypto.randomBytes(16).toString('hex'),\n timestamp: Date.now(),\n type,\n data,\n }\n}\n\n/**\n * Check if process has IPC channel available.\n *\n * This utility checks whether a process object has the necessary\n * properties for IPC communication. Used to determine if IPC\n * messaging is possible before attempting to send.\n *\n * @param process - The process object to check\n * @returns true if IPC is available, false otherwise\n *\n * @example\n * ```typescript\n * if (hasIpcChannel(childProcess)) {\n * sendIpc(childProcess, message)\n * } else {\n * // Fall back to alternative communication method\n * }\n * ```\n *\n * @unused Reserved for IPC availability detection\n */\nexport function hasIpcChannel(process: unknown): boolean {\n return Boolean(\n process &&\n typeof process === 'object' &&\n 'send' in process &&\n typeof process.send === 'function' &&\n 'channel' in process &&\n process.channel !== undefined,\n )\n}\n\n/**\n * Safely parse and validate IPC messages.\n *\n * This function performs runtime validation of incoming messages\n * to ensure they conform to the IPC message structure. It uses\n * Zod schemas for robust validation.\n *\n * ## Validation Steps:\n * 1. Check if message is an object\n * 2. Validate required fields exist\n * 3. Validate field types\n * 4. Return typed message or null\n *\n * @param message - The raw message to parse\n * @returns Parsed IPC message or null if invalid\n *\n * @example\n * ```typescript\n * const parsed = parseIpcMessage(rawMessage)\n * if (parsed) {\n * handleMessage(parsed)\n * }\n * ```\n *\n * @unused Reserved for message validation needs\n */\nexport function parseIpcMessage(message: unknown): IpcMessage | null {\n try {\n // Use Zod schema for comprehensive validation.\n const validated = IpcMessageSchema.parse(message)\n return validated as IpcMessage\n } catch {\n // Return null for any validation failure.\n return null\n }\n}\n"],
5
+ "mappings": ";6iBAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,wBAAAE,EAAA,oBAAAC,EAAA,uBAAAC,EAAA,qBAAAC,EAAA,mBAAAC,EAAA,kBAAAC,EAAA,UAAAC,EAAA,oBAAAC,EAAA,gBAAAC,EAAA,YAAAC,EAAA,eAAAC,EAAA,iBAAAC,IAAA,eAAAC,EAAAd,GA8BA,IAAAe,EAAmB,0BAEnBC,EAA+B,mBAE/BC,EAAiB,wBAEjBC,EAA+B,gBAC/BC,EAA4B,mBAC5BC,EAAkB,iBAgBlB,MAAMC,EAAmB,IAAE,OAAO,CAEhC,GAAI,IAAE,OAAO,EAAE,IAAI,CAAC,EAEpB,UAAW,IAAE,OAAO,EAAE,SAAS,EAE/B,KAAM,IAAE,OAAO,EAAE,IAAI,CAAC,EAEtB,KAAM,IAAE,QAAQ,CAClB,CAAC,EAOYnB,EAAqBmB,EAAiB,OAAO,CACxD,KAAM,IAAE,QAAQ,WAAW,EAC3B,KAAM,IAAE,OAAO,CAEb,QAAS,IAAE,OAAO,EAElB,IAAK,IAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAE/B,SAAU,IAAE,OAAO,EAAE,SAAS,EAE9B,QAAS,IAAE,OAAO,CACpB,CAAC,CACH,CAAC,EAMKC,EAAgB,IAAE,OAAO,CAE7B,IAAK,IAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAE/B,UAAW,IAAE,OAAO,EAAE,SAAS,EAE/B,KAAM,IAAE,QAAQ,CAClB,CAAC,EAgFM,SAASlB,EAAmBmB,EAAS,SAAkB,CAC5D,MAAO,GAAGA,CAAM,IAAI,QAAQ,GAAG,IAAI,EAAAC,QAAO,YAAY,CAAC,EAAE,SAAS,KAAK,CAAC,EAC1E,CA+BO,SAASlB,EAAemB,EAAyB,CAEtD,MAAMC,KAAU,eAAY,EAItBC,EAAU,EAAAC,QAAK,KAAKF,EAAS,cAAeD,CAAO,EAIzD,OAAO,EAAAG,QAAK,KAAKD,EAAS,QAAQ,QAAQ,GAAG,OAAO,CACtD,CAaA,eAAeE,EAAmBC,EAAiC,CACjE,MAAMC,EAAM,EAAAH,QAAK,QAAQE,CAAQ,EAEjC,MAAM,EAAAE,SAAG,MAAMD,EAAK,CAAE,UAAW,EAAK,CAAC,CACzC,CAoCA,eAAsBlB,EACpBY,EACAQ,EACiB,CACjB,MAAMC,EAAW5B,EAAemB,CAAO,EACvC,MAAMI,EAAmBK,CAAQ,EAGjC,MAAMC,EAAmB,CACvB,KAAAF,EACA,IAAK,QAAQ,IACb,UAAW,KAAK,IAAI,CACtB,EAGMG,EAAYd,EAAc,MAAMa,CAAO,EAG7C,aAAM,EAAAH,SAAG,UAAUE,EAAU,KAAK,UAAUE,EAAW,KAAM,CAAC,EAAG,MAAM,EAChEF,CACT,CA6BA,eAAsBxB,EAAYwB,EAAoC,CACpE,GAAI,CACF,MAAMG,EAAU,MAAM,EAAAL,SAAG,SAASE,EAAU,MAAM,EAC5CI,EAAS,KAAK,MAAMD,CAAO,EAE3BD,EAAYd,EAAc,MAAMgB,CAAM,EAEtCC,EAAQ,KAAK,IAAI,EAAIH,EAAU,UAE/BI,EAAW,IAAS,IAC1B,GAAID,EAAQC,EAAU,CAEpB,GAAI,IACF,kBAAeN,EAAU,CAAE,MAAO,EAAK,CAAC,CAC1C,MAAQ,CAER,CACA,OAAO,IACT,CACA,OAAOE,EAAU,IACnB,MAAQ,CAEN,OAAO,IACT,CACF,CAyBA,eAAsBjC,EAAgBsB,EAAgC,CACpE,MAAMC,KAAU,eAAY,EACtBC,EAAU,EAAAC,QAAK,KAAKF,EAAS,cAAeD,CAAO,EACzD,GAAI,CACF,MAAMgB,EAAQ,MAAM,EAAAT,SAAG,QAAQL,CAAO,EAChCe,EAAM,KAAK,IAAI,EAEfF,EAAW,IAAS,IAE1B,MAAM,QAAQ,IACZC,EAAM,IAAI,MAAME,GAAQ,CACtB,GAAIA,EAAK,WAAW,OAAO,GAAKA,EAAK,SAAS,OAAO,EAAG,CACtD,MAAMb,EAAW,EAAAF,QAAK,KAAKD,EAASgB,CAAI,EACxC,GAAI,CAEF,MAAMC,EAAQ,MAAM,EAAAZ,SAAG,KAAKF,CAAQ,EAEpC,IAAIe,EADaH,EAAME,EAAM,QACJJ,EAIzB,GAAI,CACF,MAAMH,EAAU,MAAM,EAAAL,SAAG,SAASF,EAAU,MAAM,EAC5CQ,EAAS,KAAK,MAAMD,CAAO,EAC3BD,EAAYd,EAAc,MAAMgB,CAAM,EACtCQ,EAAaJ,EAAMN,EAAU,UAEnCS,EAAUA,GAAWC,EAAaN,CACpC,MAAQ,CAER,CAEIK,MAEF,kBAAef,EAAU,CAAE,MAAO,EAAK,CAAC,CAE5C,MAAQ,CAER,CACF,CACF,CAAC,CACH,CACF,MAAQ,CAER,CACF,CAyBO,SAASnB,EACdoC,EACAC,EACS,CACT,GACED,GACA,OAAOA,GAAY,UACnB,SAAUA,GACV,OAAOA,EAAQ,MAAS,WAExB,GAAI,CAEF,MAAMX,EAAYf,EAAiB,MAAM2B,CAAO,EAChD,OAAOD,EAAQ,KAAKX,CAAS,CAC/B,MAAQ,CACN,MAAO,EACT,CAEF,MAAO,EACT,CA4BO,SAAS5B,EAAMyC,EAAoD,CACxE,MAAMC,EAAYF,GAAqB,CACrC,MAAMV,EAAS7B,EAAgBuC,CAAO,EAClCV,GACFW,EAAQX,CAAM,CAElB,EACA,eAAQ,GAAG,UAAWY,CAAQ,EAEvB,IAAM,CACX,QAAQ,IAAI,UAAWA,CAAQ,CACjC,CACF,CAgCO,SAAStC,EACduC,EACAC,EAAsB,CAAC,EACX,CACZ,KAAM,CAAE,QAAAC,EAAU,GAAO,EAAID,EAC7B,OAAO,IAAI,QAAQ,CAACE,EAASC,IAAW,CACtC,IAAIC,EAA+B,KAC/BC,EAAmC,KACvC,MAAMC,EAAgB,IAAM,CACtBF,GACFA,EAAQ,EAEVD,EAAO,IAAI,MAAM,yCAAyCJ,CAAW,EAAE,CAAC,CAC1E,EAYAK,EAAUhD,EAXawC,GAAwB,CACzCA,EAAQ,OAASG,IACfM,GACF,aAAaA,CAAS,EAEpBD,GACFA,EAAQ,EAEVF,EAAQN,EAAQ,IAAS,EAE7B,CAC6B,EACzBK,EAAU,IACZI,EAAY,WAAWC,EAAeL,CAAO,EAEjD,CAAC,CACH,CA0BO,SAAShD,EACdsD,EACA1B,EACe,CACf,MAAO,CACL,GAAI,EAAAT,QAAO,YAAY,EAAE,EAAE,SAAS,KAAK,EACzC,UAAW,KAAK,IAAI,EACpB,KAAAmC,EACA,KAAA1B,CACF,CACF,CAuBO,SAAS1B,EAAcwC,EAA2B,CACvD,MAAO,GACLA,GACE,OAAOA,GAAY,UACnB,SAAUA,GACV,OAAOA,EAAQ,MAAS,YACxB,YAAaA,GACbA,EAAQ,UAAY,OAE1B,CA4BO,SAAStC,EAAgBuC,EAAqC,CACnE,GAAI,CAGF,OADkB3B,EAAiB,MAAM2B,CAAO,CAElD,MAAQ,CAEN,OAAO,IACT,CACF",
6
6
  "names": ["ipc_exports", "__export", "IpcHandshakeSchema", "cleanupIpcStubs", "createIpcChannelId", "createIpcMessage", "getIpcStubPath", "hasIpcChannel", "onIpc", "parseIpcMessage", "readIpcStub", "sendIpc", "waitForIpc", "writeIpcStub", "__toCommonJS", "import_node_crypto", "import_node_fs", "import_node_path", "import_fs", "import_paths", "import_zod", "IpcMessageSchema", "IpcStubSchema", "prefix", "crypto", "appName", "tempDir", "stubDir", "path", "ensureIpcDirectory", "filePath", "dir", "fs", "data", "stubPath", "ipcData", "validated", "content", "parsed", "ageMs", "maxAgeMs", "files", "now", "file", "stats", "isStale", "contentAge", "process", "message", "handler", "listener", "messageType", "options", "timeout", "resolve", "reject", "cleanup", "timeoutId", "handleTimeout", "type"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@socketsecurity/lib",
3
- "version": "2.10.3",
3
+ "version": "2.10.4",
4
4
  "license": "MIT",
5
5
  "description": "Core utilities and infrastructure for Socket.dev security tools",
6
6
  "keywords": [
@@ -579,7 +579,7 @@
579
579
  "@socketregistry/is-unicode-supported": "1.0.5",
580
580
  "@socketregistry/packageurl-js": "1.3.1",
581
581
  "@socketregistry/yocto-spinner": "1.0.19",
582
- "@types/node": "24.6.2",
582
+ "@types/node": "24.9.2",
583
583
  "@typescript/native-preview": "7.0.0-dev.20250920.1",
584
584
  "@vitest/coverage-v8": "4.0.3",
585
585
  "@vitest/ui": "4.0.3",