@socketsecurity/lib 2.8.0 → 2.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/dlx-binary.js +1 -1
- package/dist/dlx-binary.js.map +3 -3
- package/dist/dlx-package.js +1 -1
- package/dist/dlx-package.js.map +3 -3
- package/dist/dlx.d.ts +25 -0
- package/dist/dlx.js +1 -1
- package/dist/dlx.js.map +3 -3
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -5,6 +5,16 @@ All notable changes to this project will be documented in this file.
|
|
|
5
5
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
|
6
6
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
|
7
7
|
|
|
8
|
+
## [2.8.1](https://github.com/SocketDev/socket-lib/releases/tag/v2.8.1) - 2025-10-29
|
|
9
|
+
|
|
10
|
+
### Changed
|
|
11
|
+
|
|
12
|
+
- **Consolidated DLX cache key generation**: Extracted `generateCacheKey` function to shared `dlx.ts` module
|
|
13
|
+
- Eliminates code duplication between `dlx-binary.ts` and `dlx-package.ts`
|
|
14
|
+
- Enables consistent cache key generation across the Socket ecosystem
|
|
15
|
+
- Exports function for use in dependent packages (e.g., socket-cli)
|
|
16
|
+
- Maintains SHA-512 truncated to 16 chars strategy from v2.8.0
|
|
17
|
+
|
|
8
18
|
## [2.8.0](https://github.com/SocketDev/socket-lib/releases/tag/v2.8.0) - 2025-10-29
|
|
9
19
|
|
|
10
20
|
### Changed
|
package/dist/dlx-binary.js
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
/* Socket Lib - Built with esbuild */
|
|
2
|
-
var
|
|
2
|
+
var H=Object.create;var x=Object.defineProperty;var L=Object.getOwnPropertyDescriptor;var z=Object.getOwnPropertyNames;var F=Object.getPrototypeOf,W=Object.prototype.hasOwnProperty;var Y=(t,n)=>{for(var e in n)x(t,e,{get:n[e],enumerable:!0})},O=(t,n,e,i)=>{if(n&&typeof n=="object"||typeof n=="function")for(let r of z(n))!W.call(t,r)&&r!==e&&x(t,r,{get:()=>n[r],enumerable:!(i=L(n,r))||i.enumerable});return t};var T=(t,n,e)=>(e=t!=null?H(F(t)):{},O(n||!t||!t.__esModule?x(e,"default",{value:t,enumerable:!0}):e,t)),q=t=>O(x({},"__esModule",{value:!0}),t);var Q={};Y(Q,{cleanDlxCache:()=>V,dlxBinary:()=>X,getDlxCachePath:()=>P,listDlxCache:()=>G});module.exports=q(Q);var _=require("node:crypto"),s=require("node:fs"),D=T(require("node:os")),l=T(require("node:path")),A=require("#constants/platform"),C=require("./dlx"),S=require("./download-lock"),o=require("./fs"),B=require("./objects"),R=require("./path"),$=require("./paths"),I=require("./spawn");function g(t){return l.default.join(t,".dlx-metadata.json")}async function J(t,n){try{const e=g(t);if(!(0,s.existsSync)(e))return!1;const i=await(0,o.readJson)(e,{throws:!1});if(!(0,B.isObjectObject)(i))return!1;const r=Date.now(),a=i.timestamp;return typeof a!="number"||a<=0?!1:r-a<n}catch{return!1}}async function K(t,n,e){await(0,S.downloadWithLock)(t,n,{staleTimeout:1e4,lockTimeout:12e4});const i=await s.promises.readFile(n),r=(0,_.createHash)("sha256");r.update(i);const a=r.digest("hex");if(e&&a!==e)throw await(0,o.safeDelete)(n),new Error(`Checksum mismatch: expected ${e}, got ${a}`);return A.WIN32||await s.promises.chmod(n,493),a}async function M(t,n,e){const i=g(t),r={arch:D.default.arch(),checksum:e,platform:D.default.platform(),timestamp:Date.now(),url:n,version:"1.0.0"};await s.promises.writeFile(i,JSON.stringify(r,null,2))}async function V(t=require("#constants/time").DLX_BINARY_CACHE_TTL){const n=P();if(!(0,s.existsSync)(n))return 0;let e=0;const i=Date.now(),r=await s.promises.readdir(n);for(const a of r){const c=l.default.join(n,a),m=g(c);try{if(!await(0,o.isDir)(c))continue;const f=await(0,o.readJson)(m,{throws:!1});if(!f||typeof f!="object"||Array.isArray(f))continue;const u=f.timestamp;(typeof u=="number"&&u>0?i-u:Number.POSITIVE_INFINITY)>t&&(await(0,o.safeDelete)(c,{force:!0,recursive:!0}),e+=1)}catch{try{(await s.promises.readdir(c)).length||(await(0,o.safeDelete)(c),e+=1)}catch{}}}return e}async function X(t,n,e){const{cacheTtl:i=require("#constants/time").DLX_BINARY_CACHE_TTL,checksum:r,force:a=!1,name:c,spawnOptions:m,url:f}={__proto__:null,...n},u=P(),p=c||`binary-${process.platform}-${D.default.arch()}`,k=`${f}:${p}`,d=(0,C.generateCacheKey)(k),h=l.default.join(u,d),b=(0,R.normalizePath)(l.default.join(h,p));let w=!1,j=r;if(!a&&(0,s.existsSync)(h)&&await J(h,i))try{const E=g(h),y=await(0,o.readJson)(E,{throws:!1});y&&typeof y=="object"&&!Array.isArray(y)&&typeof y.checksum=="string"?j=y.checksum:w=!0}catch{w=!0}else w=!0;w&&(await s.promises.mkdir(h,{recursive:!0}),j=await K(f,b,r),await M(h,f,j||""));const N=A.WIN32&&/\.(?:bat|cmd|ps1)$/i.test(b)?{...m,env:{...m?.env,PATH:`${h}${l.default.delimiter}${process.env.PATH||""}`},shell:!0}:m,v=(0,I.spawn)(b,t,N,e);return{binaryPath:b,downloaded:w,spawnPromise:v}}function P(){return(0,$.getSocketDlxDir)()}async function G(){const t=P();if(!(0,s.existsSync)(t))return[];const n=[],e=Date.now(),i=await s.promises.readdir(t);for(const r of i){const a=l.default.join(t,r);try{if(!await(0,o.isDir)(a))continue;const c=g(a),m=await(0,o.readJson)(c,{throws:!1});if(!m||typeof m!="object"||Array.isArray(m))continue;const u=(await s.promises.readdir(a)).find(p=>!p.startsWith("."));if(u){const p=l.default.join(a,u),k=await s.promises.stat(p),d=m;n.push({age:e-(d.timestamp||0),arch:d.arch||"unknown",checksum:d.checksum||"",name:u,platform:d.platform||"unknown",size:k.size,url:d.url||""})}}catch{}}return n}0&&(module.exports={cleanDlxCache,dlxBinary,getDlxCachePath,listDlxCache});
|
|
3
3
|
//# sourceMappingURL=dlx-binary.js.map
|
package/dist/dlx-binary.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/dlx-binary.ts"],
|
|
4
|
-
"sourcesContent": ["/** @fileoverview DLX binary execution utilities for Socket ecosystem. */\n\nimport { createHash } from 'node:crypto'\nimport { existsSync, promises as fs } from 'node:fs'\nimport os from 'node:os'\nimport path from 'node:path'\n\nimport { WIN32 } from '#constants/platform'\n\nimport { downloadWithLock } from './download-lock'\nimport { isDir, readJson, safeDelete } from './fs'\nimport { isObjectObject } from './objects'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nexport interface DlxBinaryOptions {\n /** URL to download the binary from. */\n url: string\n /** Optional name for the cached binary (defaults to URL hash). */\n name?: string | undefined\n /** Expected checksum (sha256) for verification. */\n checksum?: string | undefined\n /** Cache TTL in milliseconds (default: 7 days). */\n cacheTtl?: number | undefined\n /** Force re-download even if cached. */\n force?: boolean | undefined\n /** Additional spawn options. */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxBinaryResult {\n /** Path to the cached binary. */\n binaryPath: string\n /** Whether the binary was newly downloaded. */\n downloaded: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Generate a cache directory name using npm/npx approach.\n * Uses first 16 characters of SHA-512 hash (like npm/npx).\n *\n * Rationale for SHA-512 truncated (vs full SHA-256):\n * - Matches npm/npx ecosystem behavior\n * - Shorter paths for Windows MAX_PATH compatibility (260 chars)\n * - 16 hex chars = 64 bits = acceptable collision risk for local cache\n * - Collision probability ~1 in 18 quintillion with 1000 entries\n *\n * Input strategy (aligned with npx):\n * - npx uses package spec strings (e.g., '@scope/pkg@1.0.0', 'prettier@3.0.0')\n * - Caller provides complete spec string with version for accurate cache keying\n * - For package installs: Use PURL-style spec with version\n * Examples: 'npm:prettier@3.0.0', 'pypi:requests@2.31.0', 'gem:rails@7.0.0'\n * Note: Socket uses shorthand format without 'pkg:' prefix\n * (handled by @socketregistry/packageurl-js)\n * - For binary downloads: Use URL for uniqueness\n *\n * Reference: npm/cli v11.6.2 libnpmexec/lib/index.js#L233-L244\n * https://github.com/npm/cli/blob/v11.6.2/workspaces/libnpmexec/lib/index.js#L233-L244\n * Implementation: packages.map().sort().join('\\n') \u2192 SHA-512 \u2192 slice(0,16)\n * npx hashes the package spec (name@version), not just name\n */\nfunction generateCacheKey(spec: string): string {\n return createHash('sha512').update(spec).digest('hex').substring(0, 16)\n}\n\n/**\n * Get metadata file path for a cached binary.\n */\nfunction getMetadataPath(cacheEntryPath: string): string {\n return path.join(cacheEntryPath, '.dlx-metadata.json')\n}\n\n/**\n * Check if a cached binary is still valid.\n */\nasync function isCacheValid(\n cacheEntryPath: string,\n cacheTtl: number,\n): Promise<boolean> {\n try {\n const metaPath = getMetadataPath(cacheEntryPath)\n if (!existsSync(metaPath)) {\n return false\n }\n\n const metadata = await readJson(metaPath, { throws: false })\n if (!isObjectObject(metadata)) {\n return false\n }\n const now = Date.now()\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, cache is invalid\n if (typeof timestamp !== 'number' || timestamp <= 0) {\n return false\n }\n const age = now - timestamp\n\n return age < cacheTtl\n } catch {\n return false\n }\n}\n\n/**\n * Download a file from a URL with integrity checking and concurrent download protection.\n * Uses downloadWithLock to prevent multiple processes from downloading the same binary simultaneously.\n */\nasync function downloadBinary(\n url: string,\n destPath: string,\n checksum?: string | undefined,\n): Promise<string> {\n // Use downloadWithLock to handle concurrent download protection.\n // This prevents corruption when multiple processes try to download the same binary.\n await downloadWithLock(url, destPath, {\n // Align with npm's npx locking strategy.\n staleTimeout: 10_000,\n // Allow up to 2 minutes for large binary downloads.\n lockTimeout: 120_000,\n })\n\n // Compute checksum of downloaded file.\n const fileBuffer = await fs.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n const actualChecksum = hasher.digest('hex')\n\n // Verify checksum if provided.\n if (checksum && actualChecksum !== checksum) {\n // Clean up invalid file.\n await safeDelete(destPath)\n throw new Error(\n `Checksum mismatch: expected ${checksum}, got ${actualChecksum}`,\n )\n }\n\n // Make executable on POSIX systems.\n if (!WIN32) {\n await fs.chmod(destPath, 0o755)\n }\n\n return actualChecksum\n}\n\n/**\n * Write metadata for a cached binary.\n */\nasync function writeMetadata(\n cacheEntryPath: string,\n url: string,\n checksum: string,\n): Promise<void> {\n const metaPath = getMetadataPath(cacheEntryPath)\n const metadata = {\n arch: os.arch(),\n checksum,\n platform: os.platform(),\n timestamp: Date.now(),\n url,\n version: '1.0.0',\n }\n await fs.writeFile(metaPath, JSON.stringify(metadata, null, 2))\n}\n\n/**\n * Clean expired entries from the DLX cache.\n */\nexport async function cleanDlxCache(\n maxAge: number = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n): Promise<number> {\n const cacheDir = getDlxCachePath()\n\n if (!existsSync(cacheDir)) {\n return 0\n }\n\n let cleaned = 0\n const now = Date.now()\n const entries = await fs.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n const metaPath = getMetadataPath(entryPath)\n\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, treat as expired (age = infinity)\n const age =\n typeof timestamp === 'number' && timestamp > 0\n ? now - timestamp\n : Number.POSITIVE_INFINITY\n\n if (age > maxAge) {\n // Remove entire cache entry directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath, { force: true, recursive: true })\n cleaned += 1\n }\n } catch {\n // If we can't read metadata, check if directory is empty or corrupted.\n try {\n // eslint-disable-next-line no-await-in-loop\n const contents = await fs.readdir(entryPath)\n if (!contents.length) {\n // Remove empty directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath)\n cleaned += 1\n }\n } catch {}\n }\n }\n\n return cleaned\n}\n\n/**\n * Download and execute a binary from a URL with caching.\n */\nexport async function dlxBinary(\n args: readonly string[] | string[],\n options?: DlxBinaryOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxBinaryResult> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n spawnOptions,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n\n let downloaded = false\n let computedChecksum = checksum\n\n // Check if we need to download.\n if (\n !force &&\n existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid, read the checksum from metadata.\n try {\n const metaPath = getMetadataPath(cacheEntryDir)\n const metadata = await readJson(metaPath, { throws: false })\n if (\n metadata &&\n typeof metadata === 'object' &&\n !Array.isArray(metadata) &&\n typeof (metadata as Record<string, unknown>)['checksum'] === 'string'\n ) {\n computedChecksum = (metadata as Record<string, unknown>)[\n 'checksum'\n ] as string\n } else {\n // If metadata is invalid, re-download.\n downloaded = true\n }\n } catch {\n // If we can't read metadata, re-download.\n downloaded = true\n }\n } else {\n downloaded = true\n }\n\n if (downloaded) {\n // Ensure cache directory exists.\n await fs.mkdir(cacheEntryDir, { recursive: true })\n\n // Download the binary.\n computedChecksum = await downloadBinary(url, binaryPath, checksum)\n await writeMetadata(cacheEntryDir, url, computedChecksum || '')\n }\n\n // Execute the binary.\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension (e.g.,\n // C:\\cache\\test.cmd becomes just \"test\"). Windows cmd.exe then searches for \"test\"\n // in directories listed in PATH, trying each extension from PATHEXT environment\n // variable (.COM, .EXE, .BAT, .CMD, etc.) until it finds a match.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH\n // (unlike system package managers like npm/pnpm/yarn which are already in PATH),\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n //\n // This approach is consistent with how other tools handle Windows command execution:\n // - npm's promise-spawn: uses which.sync() to find commands in PATH\n // - cross-spawn: spawns cmd.exe with escaped arguments\n // - Node.js spawn with shell: true: delegates to cmd.exe which uses PATH\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n const spawnPromise = spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n\n return {\n binaryPath,\n downloaded,\n spawnPromise,\n }\n}\n\n/**\n * Get the DLX binary cache directory path.\n * Returns normalized path for cross-platform compatibility.\n * Uses same directory as dlx-package for unified DLX storage.\n */\nexport function getDlxCachePath(): string {\n return getSocketDlxDir()\n}\n\n/**\n * Get information about cached binaries.\n */\nexport async function listDlxCache(): Promise<\n Array<{\n age: number\n arch: string\n checksum: string\n name: string\n platform: string\n size: number\n url: string\n }>\n> {\n const cacheDir = getDlxCachePath()\n\n if (!existsSync(cacheDir)) {\n return []\n }\n\n const results = []\n const now = Date.now()\n const entries = await fs.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n const metaPath = getMetadataPath(entryPath)\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n\n // Find the binary file in the directory.\n // eslint-disable-next-line no-await-in-loop\n const files = await fs.readdir(entryPath)\n const binaryFile = files.find(f => !f.startsWith('.'))\n\n if (binaryFile) {\n const binaryPath = path.join(entryPath, binaryFile)\n // eslint-disable-next-line no-await-in-loop\n const binaryStats = await fs.stat(binaryPath)\n\n const metaObj = metadata as Record<string, unknown>\n results.push({\n age: now - ((metaObj['timestamp'] as number) || 0),\n arch: (metaObj['arch'] as string) || 'unknown',\n checksum: (metaObj['checksum'] as string) || '',\n name: binaryFile,\n platform: (metaObj['platform'] as string) || 'unknown',\n size: binaryStats.size,\n url: (metaObj['url'] as string) || '',\n })\n }\n } catch {}\n }\n\n return results\n}\n"],
|
|
5
|
-
"mappings": ";6iBAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,mBAAAE,EAAA,cAAAC,EAAA,oBAAAC,EAAA,iBAAAC,IAAA,eAAAC,EAAAN,GAEA,IAAAO,EAA2B,uBAC3BC,EAA2C,mBAC3CC,EAAe,sBACfC,EAAiB,wBAEjBC,EAAsB,+BAEtBC,EAAiC,2BACjCC,EAA4C,gBAC5CC,EAA+B,qBAC/BC,EAA8B,kBAC9BC,EAAgC,mBAEhCC,EAAsB,
|
|
6
|
-
"names": ["dlx_binary_exports", "__export", "cleanDlxCache", "dlxBinary", "getDlxCachePath", "listDlxCache", "__toCommonJS", "import_node_crypto", "import_node_fs", "import_node_os", "import_node_path", "import_platform", "import_download_lock", "import_fs", "import_objects", "import_path", "import_paths", "import_spawn", "
|
|
4
|
+
"sourcesContent": ["/** @fileoverview DLX binary execution utilities for Socket ecosystem. */\n\nimport { createHash } from 'node:crypto'\nimport { existsSync, promises as fs } from 'node:fs'\nimport os from 'node:os'\nimport path from 'node:path'\n\nimport { WIN32 } from '#constants/platform'\n\nimport { generateCacheKey } from './dlx'\nimport { downloadWithLock } from './download-lock'\nimport { isDir, readJson, safeDelete } from './fs'\nimport { isObjectObject } from './objects'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nexport interface DlxBinaryOptions {\n /** URL to download the binary from. */\n url: string\n /** Optional name for the cached binary (defaults to URL hash). */\n name?: string | undefined\n /** Expected checksum (sha256) for verification. */\n checksum?: string | undefined\n /** Cache TTL in milliseconds (default: 7 days). */\n cacheTtl?: number | undefined\n /** Force re-download even if cached. */\n force?: boolean | undefined\n /** Additional spawn options. */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxBinaryResult {\n /** Path to the cached binary. */\n binaryPath: string\n /** Whether the binary was newly downloaded. */\n downloaded: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Get metadata file path for a cached binary.\n */\nfunction getMetadataPath(cacheEntryPath: string): string {\n return path.join(cacheEntryPath, '.dlx-metadata.json')\n}\n\n/**\n * Check if a cached binary is still valid.\n */\nasync function isCacheValid(\n cacheEntryPath: string,\n cacheTtl: number,\n): Promise<boolean> {\n try {\n const metaPath = getMetadataPath(cacheEntryPath)\n if (!existsSync(metaPath)) {\n return false\n }\n\n const metadata = await readJson(metaPath, { throws: false })\n if (!isObjectObject(metadata)) {\n return false\n }\n const now = Date.now()\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, cache is invalid\n if (typeof timestamp !== 'number' || timestamp <= 0) {\n return false\n }\n const age = now - timestamp\n\n return age < cacheTtl\n } catch {\n return false\n }\n}\n\n/**\n * Download a file from a URL with integrity checking and concurrent download protection.\n * Uses downloadWithLock to prevent multiple processes from downloading the same binary simultaneously.\n */\nasync function downloadBinary(\n url: string,\n destPath: string,\n checksum?: string | undefined,\n): Promise<string> {\n // Use downloadWithLock to handle concurrent download protection.\n // This prevents corruption when multiple processes try to download the same binary.\n await downloadWithLock(url, destPath, {\n // Align with npm's npx locking strategy.\n staleTimeout: 10_000,\n // Allow up to 2 minutes for large binary downloads.\n lockTimeout: 120_000,\n })\n\n // Compute checksum of downloaded file.\n const fileBuffer = await fs.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n const actualChecksum = hasher.digest('hex')\n\n // Verify checksum if provided.\n if (checksum && actualChecksum !== checksum) {\n // Clean up invalid file.\n await safeDelete(destPath)\n throw new Error(\n `Checksum mismatch: expected ${checksum}, got ${actualChecksum}`,\n )\n }\n\n // Make executable on POSIX systems.\n if (!WIN32) {\n await fs.chmod(destPath, 0o755)\n }\n\n return actualChecksum\n}\n\n/**\n * Write metadata for a cached binary.\n */\nasync function writeMetadata(\n cacheEntryPath: string,\n url: string,\n checksum: string,\n): Promise<void> {\n const metaPath = getMetadataPath(cacheEntryPath)\n const metadata = {\n arch: os.arch(),\n checksum,\n platform: os.platform(),\n timestamp: Date.now(),\n url,\n version: '1.0.0',\n }\n await fs.writeFile(metaPath, JSON.stringify(metadata, null, 2))\n}\n\n/**\n * Clean expired entries from the DLX cache.\n */\nexport async function cleanDlxCache(\n maxAge: number = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n): Promise<number> {\n const cacheDir = getDlxCachePath()\n\n if (!existsSync(cacheDir)) {\n return 0\n }\n\n let cleaned = 0\n const now = Date.now()\n const entries = await fs.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n const metaPath = getMetadataPath(entryPath)\n\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, treat as expired (age = infinity)\n const age =\n typeof timestamp === 'number' && timestamp > 0\n ? now - timestamp\n : Number.POSITIVE_INFINITY\n\n if (age > maxAge) {\n // Remove entire cache entry directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath, { force: true, recursive: true })\n cleaned += 1\n }\n } catch {\n // If we can't read metadata, check if directory is empty or corrupted.\n try {\n // eslint-disable-next-line no-await-in-loop\n const contents = await fs.readdir(entryPath)\n if (!contents.length) {\n // Remove empty directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath)\n cleaned += 1\n }\n } catch {}\n }\n }\n\n return cleaned\n}\n\n/**\n * Download and execute a binary from a URL with caching.\n */\nexport async function dlxBinary(\n args: readonly string[] | string[],\n options?: DlxBinaryOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxBinaryResult> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n spawnOptions,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n\n let downloaded = false\n let computedChecksum = checksum\n\n // Check if we need to download.\n if (\n !force &&\n existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid, read the checksum from metadata.\n try {\n const metaPath = getMetadataPath(cacheEntryDir)\n const metadata = await readJson(metaPath, { throws: false })\n if (\n metadata &&\n typeof metadata === 'object' &&\n !Array.isArray(metadata) &&\n typeof (metadata as Record<string, unknown>)['checksum'] === 'string'\n ) {\n computedChecksum = (metadata as Record<string, unknown>)[\n 'checksum'\n ] as string\n } else {\n // If metadata is invalid, re-download.\n downloaded = true\n }\n } catch {\n // If we can't read metadata, re-download.\n downloaded = true\n }\n } else {\n downloaded = true\n }\n\n if (downloaded) {\n // Ensure cache directory exists.\n await fs.mkdir(cacheEntryDir, { recursive: true })\n\n // Download the binary.\n computedChecksum = await downloadBinary(url, binaryPath, checksum)\n await writeMetadata(cacheEntryDir, url, computedChecksum || '')\n }\n\n // Execute the binary.\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension (e.g.,\n // C:\\cache\\test.cmd becomes just \"test\"). Windows cmd.exe then searches for \"test\"\n // in directories listed in PATH, trying each extension from PATHEXT environment\n // variable (.COM, .EXE, .BAT, .CMD, etc.) until it finds a match.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH\n // (unlike system package managers like npm/pnpm/yarn which are already in PATH),\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n //\n // This approach is consistent with how other tools handle Windows command execution:\n // - npm's promise-spawn: uses which.sync() to find commands in PATH\n // - cross-spawn: spawns cmd.exe with escaped arguments\n // - Node.js spawn with shell: true: delegates to cmd.exe which uses PATH\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n const spawnPromise = spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n\n return {\n binaryPath,\n downloaded,\n spawnPromise,\n }\n}\n\n/**\n * Get the DLX binary cache directory path.\n * Returns normalized path for cross-platform compatibility.\n * Uses same directory as dlx-package for unified DLX storage.\n */\nexport function getDlxCachePath(): string {\n return getSocketDlxDir()\n}\n\n/**\n * Get information about cached binaries.\n */\nexport async function listDlxCache(): Promise<\n Array<{\n age: number\n arch: string\n checksum: string\n name: string\n platform: string\n size: number\n url: string\n }>\n> {\n const cacheDir = getDlxCachePath()\n\n if (!existsSync(cacheDir)) {\n return []\n }\n\n const results = []\n const now = Date.now()\n const entries = await fs.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n const metaPath = getMetadataPath(entryPath)\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n\n // Find the binary file in the directory.\n // eslint-disable-next-line no-await-in-loop\n const files = await fs.readdir(entryPath)\n const binaryFile = files.find(f => !f.startsWith('.'))\n\n if (binaryFile) {\n const binaryPath = path.join(entryPath, binaryFile)\n // eslint-disable-next-line no-await-in-loop\n const binaryStats = await fs.stat(binaryPath)\n\n const metaObj = metadata as Record<string, unknown>\n results.push({\n age: now - ((metaObj['timestamp'] as number) || 0),\n arch: (metaObj['arch'] as string) || 'unknown',\n checksum: (metaObj['checksum'] as string) || '',\n name: binaryFile,\n platform: (metaObj['platform'] as string) || 'unknown',\n size: binaryStats.size,\n url: (metaObj['url'] as string) || '',\n })\n }\n } catch {}\n }\n\n return results\n}\n"],
|
|
5
|
+
"mappings": ";6iBAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,mBAAAE,EAAA,cAAAC,EAAA,oBAAAC,EAAA,iBAAAC,IAAA,eAAAC,EAAAN,GAEA,IAAAO,EAA2B,uBAC3BC,EAA2C,mBAC3CC,EAAe,sBACfC,EAAiB,wBAEjBC,EAAsB,+BAEtBC,EAAiC,iBACjCC,EAAiC,2BACjCC,EAA4C,gBAC5CC,EAA+B,qBAC/BC,EAA8B,kBAC9BC,EAAgC,mBAEhCC,EAAsB,mBA6BtB,SAASC,EAAgBC,EAAgC,CACvD,OAAO,EAAAC,QAAK,KAAKD,EAAgB,oBAAoB,CACvD,CAKA,eAAeE,EACbF,EACAG,EACkB,CAClB,GAAI,CACF,MAAMC,EAAWL,EAAgBC,CAAc,EAC/C,GAAI,IAAC,cAAWI,CAAQ,EACtB,MAAO,GAGT,MAAMC,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GAAI,IAAC,kBAAeC,CAAQ,EAC1B,MAAO,GAET,MAAMC,EAAM,KAAK,IAAI,EACfC,EAAaF,EAAqC,UAExD,OAAI,OAAOE,GAAc,UAAYA,GAAa,EACzC,GAEGD,EAAMC,EAELJ,CACf,MAAQ,CACN,MAAO,EACT,CACF,CAMA,eAAeK,EACbC,EACAC,EACAC,EACiB,CAGjB,QAAM,oBAAiBF,EAAKC,EAAU,CAEpC,aAAc,IAEd,YAAa,IACf,CAAC,EAGD,MAAME,EAAa,MAAM,EAAAC,SAAG,SAASH,CAAQ,EACvCI,KAAS,cAAW,QAAQ,EAClCA,EAAO,OAAOF,CAAU,EACxB,MAAMG,EAAiBD,EAAO,OAAO,KAAK,EAG1C,GAAIH,GAAYI,IAAmBJ,EAEjC,cAAM,cAAWD,CAAQ,EACnB,IAAI,MACR,+BAA+BC,CAAQ,SAASI,CAAc,EAChE,EAIF,OAAK,SACH,MAAM,EAAAF,SAAG,MAAMH,EAAU,GAAK,EAGzBK,CACT,CAKA,eAAeC,EACbhB,EACAS,EACAE,EACe,CACf,MAAMP,EAAWL,EAAgBC,CAAc,EACzCK,EAAW,CACf,KAAM,EAAAY,QAAG,KAAK,EACd,SAAAN,EACA,SAAU,EAAAM,QAAG,SAAS,EACtB,UAAW,KAAK,IAAI,EACpB,IAAAR,EACA,QAAS,OACX,EACA,MAAM,EAAAI,SAAG,UAAUT,EAAU,KAAK,UAAUC,EAAU,KAAM,CAAC,CAAC,CAChE,CAKA,eAAsBvB,EACpBoC,EAAiC,QAAQ,iBAAiB,EAAE,qBAC3C,CACjB,MAAMC,EAAWnC,EAAgB,EAEjC,GAAI,IAAC,cAAWmC,CAAQ,EACtB,MAAO,GAGT,IAAIC,EAAU,EACd,MAAMd,EAAM,KAAK,IAAI,EACfe,EAAU,MAAM,EAAAR,SAAG,QAAQM,CAAQ,EAEzC,UAAWG,KAASD,EAAS,CAC3B,MAAME,EAAY,EAAAtB,QAAK,KAAKkB,EAAUG,CAAK,EACrClB,EAAWL,EAAgBwB,CAAS,EAE1C,GAAI,CAEF,GAAI,CAAE,QAAM,SAAMA,CAAS,EACzB,SAIF,MAAMlB,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GACE,CAACC,GACD,OAAOA,GAAa,UACpB,MAAM,QAAQA,CAAQ,EAEtB,SAEF,MAAME,EAAaF,EAAqC,WAGtD,OAAOE,GAAc,UAAYA,EAAY,EACzCD,EAAMC,EACN,OAAO,mBAEHW,IAGR,QAAM,cAAWK,EAAW,CAAE,MAAO,GAAM,UAAW,EAAK,CAAC,EAC5DH,GAAW,EAEf,MAAQ,CAEN,GAAI,EAEe,MAAM,EAAAP,SAAG,QAAQU,CAAS,GAC7B,SAGZ,QAAM,cAAWA,CAAS,EAC1BH,GAAW,EAEf,MAAQ,CAAC,CACX,CACF,CAEA,OAAOA,CACT,CAKA,eAAsBrC,EACpByC,EACAC,EACAC,EAC0B,CAC1B,KAAM,CACJ,SAAAvB,EAA2B,QAAQ,iBAAiB,EAAE,qBACtD,SAAAQ,EACA,MAAAgB,EAAQ,GACR,KAAAC,EACA,aAAAC,EACA,IAAApB,CACF,EAAI,CAAE,UAAW,KAAM,GAAGgB,CAAQ,EAG5BN,EAAWnC,EAAgB,EAC3B8C,EAAaF,GAAQ,UAAU,QAAQ,QAAQ,IAAI,EAAAX,QAAG,KAAK,CAAC,GAE5Dc,EAAO,GAAGtB,CAAG,IAAIqB,CAAU,GAC3BE,KAAW,oBAAiBD,CAAI,EAChCE,EAAgB,EAAAhC,QAAK,KAAKkB,EAAUa,CAAQ,EAC5CE,KAAa,iBAAc,EAAAjC,QAAK,KAAKgC,EAAeH,CAAU,CAAC,EAErE,IAAIK,EAAa,GACbC,EAAmBzB,EAGvB,GACE,CAACgB,MACD,cAAWM,CAAa,GACvB,MAAM/B,EAAa+B,EAAe9B,CAAQ,EAG3C,GAAI,CACF,MAAMC,EAAWL,EAAgBkC,CAAa,EACxC5B,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAEzDC,GACA,OAAOA,GAAa,UACpB,CAAC,MAAM,QAAQA,CAAQ,GACvB,OAAQA,EAAqC,UAAgB,SAE7D+B,EAAoB/B,EAClB,SAIF8B,EAAa,EAEjB,MAAQ,CAENA,EAAa,EACf,MAEAA,EAAa,GAGXA,IAEF,MAAM,EAAAtB,SAAG,MAAMoB,EAAe,CAAE,UAAW,EAAK,CAAC,EAGjDG,EAAmB,MAAM5B,EAAeC,EAAKyB,EAAYvB,CAAQ,EACjE,MAAMK,EAAciB,EAAexB,EAAK2B,GAAoB,EAAE,GAuBhE,MAAMC,EAhBa,SAAS,sBAAsB,KAAKH,CAAU,EAiB7D,CACE,GAAGL,EACH,IAAK,CACH,GAAGA,GAAc,IACjB,KAAM,GAAGI,CAAa,GAAG,EAAAhC,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAW,EAAE,EACrE,EACA,MAAO,EACT,EACA4B,EACES,KAAe,SAAMJ,EAAYV,EAAMa,EAAmBX,CAAU,EAE1E,MAAO,CACL,WAAAQ,EACA,WAAAC,EACA,aAAAG,CACF,CACF,CAOO,SAAStD,GAA0B,CACxC,SAAO,mBAAgB,CACzB,CAKA,eAAsBC,GAUpB,CACA,MAAMkC,EAAWnC,EAAgB,EAEjC,GAAI,IAAC,cAAWmC,CAAQ,EACtB,MAAO,CAAC,EAGV,MAAMoB,EAAU,CAAC,EACXjC,EAAM,KAAK,IAAI,EACfe,EAAU,MAAM,EAAAR,SAAG,QAAQM,CAAQ,EAEzC,UAAWG,KAASD,EAAS,CAC3B,MAAME,EAAY,EAAAtB,QAAK,KAAKkB,EAAUG,CAAK,EAC3C,GAAI,CAEF,GAAI,CAAE,QAAM,SAAMC,CAAS,EACzB,SAGF,MAAMnB,EAAWL,EAAgBwB,CAAS,EAEpClB,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GACE,CAACC,GACD,OAAOA,GAAa,UACpB,MAAM,QAAQA,CAAQ,EAEtB,SAMF,MAAMmC,GADQ,MAAM,EAAA3B,SAAG,QAAQU,CAAS,GACf,KAAKkB,GAAK,CAACA,EAAE,WAAW,GAAG,CAAC,EAErD,GAAID,EAAY,CACd,MAAMN,EAAa,EAAAjC,QAAK,KAAKsB,EAAWiB,CAAU,EAE5CE,EAAc,MAAM,EAAA7B,SAAG,KAAKqB,CAAU,EAEtCS,EAAUtC,EAChBkC,EAAQ,KAAK,CACX,IAAKjC,GAAQqC,EAAQ,WAA2B,GAChD,KAAOA,EAAQ,MAAsB,UACrC,SAAWA,EAAQ,UAA0B,GAC7C,KAAMH,EACN,SAAWG,EAAQ,UAA0B,UAC7C,KAAMD,EAAY,KAClB,IAAMC,EAAQ,KAAqB,EACrC,CAAC,CACH,CACF,MAAQ,CAAC,CACX,CAEA,OAAOJ,CACT",
|
|
6
|
+
"names": ["dlx_binary_exports", "__export", "cleanDlxCache", "dlxBinary", "getDlxCachePath", "listDlxCache", "__toCommonJS", "import_node_crypto", "import_node_fs", "import_node_os", "import_node_path", "import_platform", "import_dlx", "import_download_lock", "import_fs", "import_objects", "import_path", "import_paths", "import_spawn", "getMetadataPath", "cacheEntryPath", "path", "isCacheValid", "cacheTtl", "metaPath", "metadata", "now", "timestamp", "downloadBinary", "url", "destPath", "checksum", "fileBuffer", "fs", "hasher", "actualChecksum", "writeMetadata", "os", "maxAge", "cacheDir", "cleaned", "entries", "entry", "entryPath", "args", "options", "spawnExtra", "force", "name", "spawnOptions", "binaryName", "spec", "cacheKey", "cacheEntryDir", "binaryPath", "downloaded", "computedChecksum", "finalSpawnOptions", "spawnPromise", "results", "binaryFile", "f", "binaryStats", "metaObj"]
|
|
7
7
|
}
|
package/dist/dlx-package.js
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
/* Socket Lib - Built with esbuild */
|
|
2
|
-
var v=Object.create;var d=Object.defineProperty;var _=Object.getOwnPropertyDescriptor;var E=Object.getOwnPropertyNames;var I=Object.getPrototypeOf,J=Object.prototype.hasOwnProperty;var $=(n,e)=>{for(var t in e)d(n,t,{get:e[t],enumerable:!0})},m=(n,e,t,o)=>{if(e&&typeof e=="object"||typeof e=="function")for(let a of E(e))!J.call(n,a)&&a!==t&&d(n,a,{get:()=>e[a],enumerable:!(o=_(e,a))||o.enumerable});return n};var k=(n,e,t)=>(t=n!=null?v(I(n)):{},m(e||!n||!n.__esModule?d(t,"default",{value:n,enumerable:!0}):t,n)),N=n=>m(d({},"__esModule",{value:!0}),n);var
|
|
2
|
+
var v=Object.create;var d=Object.defineProperty;var _=Object.getOwnPropertyDescriptor;var E=Object.getOwnPropertyNames;var I=Object.getPrototypeOf,J=Object.prototype.hasOwnProperty;var $=(n,e)=>{for(var t in e)d(n,t,{get:e[t],enumerable:!0})},m=(n,e,t,o)=>{if(e&&typeof e=="object"||typeof e=="function")for(let a of E(e))!J.call(n,a)&&a!==t&&d(n,a,{get:()=>e[a],enumerable:!(o=_(e,a))||o.enumerable});return n};var k=(n,e,t)=>(t=n!=null?v(I(n)):{},m(e||!n||!n.__esModule?d(t,"default",{value:n,enumerable:!0}):t,n)),N=n=>m(d({},"__esModule",{value:!0}),n);var T={};$(T,{dlxPackage:()=>M,downloadPackage:()=>j,executePackage:()=>O});module.exports=N(T);var p=require("node:fs"),i=k(require("node:path")),P=require("./constants/platform"),h=require("./constants/packages"),w=require("./dlx"),y=k(require("./external/pacote")),x=require("./fs"),g=require("./path"),D=require("./paths"),b=require("./process-lock"),S=require("./spawn");const C=/[~^><=xX* ]|\|\|/;function A(n){if(n.startsWith("@")){const t=n.split("@");return t.length===3?{name:t[1],version:t[2]}:t.length===2?{name:`@${t[1]}`,version:void 0}:{name:`@${t[1]}`,version:t[2]}}const e=n.lastIndexOf("@");return e===-1?{name:n,version:void 0}:{name:n.slice(0,e),version:n.slice(e+1)}}async function K(n,e,t){const o=(0,w.generateCacheKey)(n),a=(0,g.normalizePath)(i.default.join((0,D.getSocketDlxDir)(),o)),c=(0,g.normalizePath)(i.default.join(a,"node_modules",e)),r=i.default.join(a,".lock");return await b.processLock.withLock(r,async()=>{if(!t&&(0,p.existsSync)(c)){const l=i.default.join(c,"package.json");if((0,p.existsSync)(l))return{installed:!1,packageDir:a}}await p.promises.mkdir(a,{recursive:!0});const s=(0,h.getPacoteCachePath)();return await y.default.extract(n,c,{cache:s||i.default.join(a,".cache")}),{installed:!0,packageDir:a}},{staleMs:5e3,touchIntervalMs:2e3})}function L(n,e,t){const o=(0,g.normalizePath)(i.default.join(n,"node_modules",e)),a=i.default.join(o,"package.json"),r=(0,x.readJsonSync)(a).bin;let s;if(typeof r=="string")s=r;else if(typeof r=="object"&&r!==null){const l=t||e.split("/").pop();s=r[l]}if(!s)throw new Error(`No binary found for package "${e}"`);return(0,g.normalizePath)(i.default.join(o,s))}async function M(n,e,t){const o=await j(e),a=O(o.binaryPath,n,e?.spawnOptions,t);return{...o,spawnPromise:a}}async function j(n){const{force:e,package:t}={__proto__:null,...n},{name:o,version:a}=A(t),c=a!==void 0&&C.test(a),r=e!==void 0?e:c,s=a?`${o}@${a}`:o,{installed:l,packageDir:u}=await K(s,o,r),f=L(u,o);if(!P.WIN32&&(0,p.existsSync)(f)){const{chmodSync:R}=require("node:fs");try{R(f,493)}catch{}}return{binaryPath:f,installed:l,packageDir:u}}function O(n,e,t,o){return(0,S.spawn)(n,e,t,o)}0&&(module.exports={dlxPackage,downloadPackage,executePackage});
|
|
3
3
|
//# sourceMappingURL=dlx-package.js.map
|
package/dist/dlx-package.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/dlx-package.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview DLX package execution - Install and execute npm packages.\n *\n * This module provides functionality to install and execute npm packages\n * in the ~/.socket/_dlx directory, similar to npx but with Socket's own cache.\n *\n * Uses content-addressed storage like npm's _npx:\n * - Hash is generated from package spec (name@version)\n * - Each unique spec gets its own directory: ~/.socket/_dlx/<hash>/\n * - Allows caching multiple versions of the same package\n *\n * Concurrency protection:\n * - Uses process-lock to prevent concurrent installation corruption\n * - Lock file created at ~/.socket/_dlx/<hash>/.lock\n * - Aligned with npm npx's concurrency.lock strategy (5s stale, 2s touching)\n * - Prevents multiple processes from corrupting the same package installation\n *\n * Version range handling:\n * - Exact versions (1.0.0) use cache if available\n * - Range versions (^1.0.0, ~1.0.0) auto-force to get latest within range\n * - User can override with explicit force: false\n *\n * Key difference from dlx-binary.ts:\n * - dlx-binary.ts: Downloads standalone binaries from URLs\n * - dlx-package.ts: Installs npm packages from registries\n *\n * Implementation:\n * - Uses pacote for package installation (no npm CLI required)\n * - Split into downloadPackage() and executePackage() for flexibility\n * - dlxPackage() combines both for convenience\n */\n\nimport { createHash } from 'node:crypto'\nimport { existsSync, promises as fs } from 'node:fs'\nimport path from 'node:path'\n\nimport pacote from './external/pacote'\nimport { WIN32 } from './constants/platform'\nimport { getPacoteCachePath } from './constants/packages'\nimport { readJsonSync } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\n/**\n * Regex to check if a version string contains range operators.\n * Matches any version with range operators: ~, ^, >, <, =, x, X, *, spaces, or ||.\n */\nconst rangeOperatorsRegExp = /[~^><=xX* ]|\\|\\|/\n\nexport interface DownloadPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n}\n\nexport interface DlxPackageOptions {\n /**\n * Package to install (e.g., '@cyclonedx/cdxgen@10.0.0').\n */\n package: string\n /**\n * Force reinstallation even if package exists.\n */\n force?: boolean | undefined\n /**\n * Additional spawn options for the execution.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary that was executed. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Generate a cache key from package spec, similar to npm's _npx.\n * Uses first 16 hex characters of SHA256 hash.\n */\nfunction generatePackageCacheKey(packageSpec: string): string {\n return createHash('sha256').update(packageSpec).digest('hex').slice(0, 16)\n}\n\n/**\n * Parse package spec into name and version.\n * Examples:\n * - 'lodash@4.17.21' \u2192 { name: 'lodash', version: '4.17.21' }\n * - '@scope/pkg@1.0.0' \u2192 { name: '@scope/pkg', version: '1.0.0' }\n * - 'lodash' \u2192 { name: 'lodash', version: undefined }\n */\nfunction parsePackageSpec(spec: string): {\n name: string\n version: string | undefined\n} {\n // Handle scoped packages (@scope/name@version).\n if (spec.startsWith('@')) {\n const parts = spec.split('@')\n if (parts.length === 3) {\n // @scope@version -> Invalid, but handle gracefully.\n return { name: parts[1], version: parts[2] }\n }\n if (parts.length === 2) {\n // @scope/name with no version.\n return { name: `@${parts[1]}`, version: undefined }\n }\n // @scope/name@version.\n const scopeAndName = `@${parts[1]}`\n return { name: scopeAndName, version: parts[2] }\n }\n\n // Handle unscoped packages (name@version).\n const atIndex = spec.lastIndexOf('@')\n if (atIndex === -1) {\n return { name: spec, version: undefined }\n }\n\n return {\n name: spec.slice(0, atIndex),\n version: spec.slice(atIndex + 1),\n }\n}\n\n/**\n * Install package to ~/.socket/_dlx/<hash>/ if not already installed.\n * Uses pacote for installation (no npm CLI required).\n * Protected by process lock to prevent concurrent installation corruption.\n */\nasync function ensurePackageInstalled(\n packageSpec: string,\n packageName: string,\n force: boolean,\n): Promise<{ installed: boolean; packageDir: string }> {\n const cacheKey = generatePackageCacheKey(packageSpec)\n const packageDir = normalizePath(path.join(getSocketDlxDir(), cacheKey))\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n\n // Use process lock to prevent concurrent installations.\n // Similar to npm npx's concurrency.lock approach.\n const lockPath = path.join(packageDir, '.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n // Double-check if already installed (unless force).\n // Another process may have installed while waiting for lock.\n if (!force && existsSync(installedDir)) {\n // Verify package.json exists.\n const pkgJsonPath = path.join(installedDir, 'package.json')\n if (existsSync(pkgJsonPath)) {\n return { installed: false, packageDir }\n }\n }\n\n // Ensure package directory exists.\n await fs.mkdir(packageDir, { recursive: true })\n\n // Use pacote to extract the package.\n // Pacote leverages npm cache when available but doesn't require npm CLI.\n const pacoteCachePath = getPacoteCachePath()\n await pacote.extract(packageSpec, installedDir, {\n // Use consistent pacote cache path (respects npm cache locations when available).\n cache: pacoteCachePath || path.join(packageDir, '.cache'),\n })\n\n return { installed: true, packageDir }\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Find the binary path for an installed package.\n */\nfunction findBinaryPath(\n packageDir: string,\n packageName: string,\n binaryName?: string,\n): string {\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n const pkgJsonPath = path.join(installedDir, 'package.json')\n\n // Read package.json to find bin entry.\n const pkgJson = readJsonSync(pkgJsonPath) as Record<string, unknown>\n const bin = pkgJson['bin']\n\n let binPath: string | undefined\n\n if (typeof bin === 'string') {\n // Single binary.\n binPath = bin\n } else if (typeof bin === 'object' && bin !== null) {\n // Multiple binaries - use binaryName or package name.\n const binName = binaryName || packageName.split('/').pop()\n binPath = (bin as Record<string, string>)[binName!]\n }\n\n if (!binPath) {\n throw new Error(`No binary found for package \"${packageName}\"`)\n }\n\n return normalizePath(path.join(installedDir, binPath))\n}\n\n/**\n * Execute a package via DLX - install if needed and run its binary.\n *\n * This is the Socket equivalent of npx/pnpm dlx/yarn dlx, but using\n * our own cache directory (~/.socket/_dlx) and installation logic.\n *\n * Auto-forces reinstall for version ranges to get latest within range.\n *\n * @example\n * ```typescript\n * // Download and execute cdxgen\n * const result = await dlxPackage(\n * ['--version'],\n * { package: '@cyclonedx/cdxgen@10.0.0' }\n * )\n * await result.spawnPromise\n * ```\n */\nexport async function dlxPackage(\n args: readonly string[] | string[],\n options?: DlxPackageOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxPackageResult> {\n // Download the package.\n const downloadResult = await downloadPackage(options!)\n\n // Execute the binary.\n const spawnPromise = executePackage(\n downloadResult.binaryPath,\n args,\n options?.spawnOptions,\n spawnExtra,\n )\n\n return {\n ...downloadResult,\n spawnPromise,\n }\n}\n\n/**\n * Download and install a package without executing it.\n * This is useful for self-update or when you need the package files\n * but don't want to run the binary immediately.\n *\n * @example\n * ```typescript\n * // Install @socketsecurity/cli without running it\n * const result = await downloadPackage({\n * package: '@socketsecurity/cli@1.2.0',\n * force: true\n * })\n * console.log('Installed to:', result.packageDir)\n * console.log('Binary at:', result.binaryPath)\n * ```\n */\nexport async function downloadPackage(\n options: DlxPackageOptions,\n): Promise<DownloadPackageResult> {\n const { force: userForce, package: packageSpec } = {\n __proto__: null,\n ...options,\n } as DlxPackageOptions\n\n // Parse package spec.\n const { name: packageName, version: packageVersion } =\n parsePackageSpec(packageSpec)\n\n // Auto-force for version ranges to get latest within range.\n // User can still override with explicit force: false if they want cache.\n const isVersionRange =\n packageVersion !== undefined && rangeOperatorsRegExp.test(packageVersion)\n const force = userForce !== undefined ? userForce : isVersionRange\n\n // Build full package spec for installation.\n const fullPackageSpec = packageVersion\n ? `${packageName}@${packageVersion}`\n : packageName\n\n // Ensure package is installed.\n const { installed, packageDir } = await ensurePackageInstalled(\n fullPackageSpec,\n packageName,\n force,\n )\n\n // Find binary path.\n const binaryPath = findBinaryPath(packageDir, packageName)\n\n // Make binary executable on Unix systems.\n if (!WIN32 && existsSync(binaryPath)) {\n const { chmodSync } = require('node:fs')\n try {\n chmodSync(binaryPath, 0o755)\n } catch {\n // Ignore chmod errors.\n }\n }\n\n return {\n binaryPath,\n installed,\n packageDir,\n }\n}\n\n/**\n * Execute a package's binary.\n * The package must already be installed (use downloadPackage first).\n *\n * @example\n * ```typescript\n * // Execute an already-installed package\n * const downloaded = await downloadPackage({ package: 'cowsay@1.5.0' })\n * const result = await executePackage(\n * downloaded.binaryPath,\n * ['Hello World'],\n * { stdio: 'inherit' }\n * )\n * ```\n */\nexport function executePackage(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n return spawn(binaryPath, args, spawnOptions, spawnExtra)\n}\n"],
|
|
5
|
-
"mappings": ";6iBAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,gBAAAE,EAAA,oBAAAC,EAAA,mBAAAC,IAAA,eAAAC,EAAAL,GAgCA,IAAAM,
|
|
6
|
-
"names": ["dlx_package_exports", "__export", "dlxPackage", "downloadPackage", "executePackage", "__toCommonJS", "
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview DLX package execution - Install and execute npm packages.\n *\n * This module provides functionality to install and execute npm packages\n * in the ~/.socket/_dlx directory, similar to npx but with Socket's own cache.\n *\n * Uses content-addressed storage like npm's _npx:\n * - Hash is generated from package spec (name@version)\n * - Each unique spec gets its own directory: ~/.socket/_dlx/<hash>/\n * - Allows caching multiple versions of the same package\n *\n * Concurrency protection:\n * - Uses process-lock to prevent concurrent installation corruption\n * - Lock file created at ~/.socket/_dlx/<hash>/.lock\n * - Aligned with npm npx's concurrency.lock strategy (5s stale, 2s touching)\n * - Prevents multiple processes from corrupting the same package installation\n *\n * Version range handling:\n * - Exact versions (1.0.0) use cache if available\n * - Range versions (^1.0.0, ~1.0.0) auto-force to get latest within range\n * - User can override with explicit force: false\n *\n * Key difference from dlx-binary.ts:\n * - dlx-binary.ts: Downloads standalone binaries from URLs\n * - dlx-package.ts: Installs npm packages from registries\n *\n * Implementation:\n * - Uses pacote for package installation (no npm CLI required)\n * - Split into downloadPackage() and executePackage() for flexibility\n * - dlxPackage() combines both for convenience\n */\n\nimport { existsSync, promises as fs } from 'node:fs'\nimport path from 'node:path'\n\nimport { WIN32 } from './constants/platform'\nimport { getPacoteCachePath } from './constants/packages'\nimport { generateCacheKey } from './dlx'\nimport pacote from './external/pacote'\nimport { readJsonSync } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\n/**\n * Regex to check if a version string contains range operators.\n * Matches any version with range operators: ~, ^, >, <, =, x, X, *, spaces, or ||.\n */\nconst rangeOperatorsRegExp = /[~^><=xX* ]|\\|\\|/\n\nexport interface DownloadPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n}\n\nexport interface DlxPackageOptions {\n /**\n * Package to install (e.g., '@cyclonedx/cdxgen@10.0.0').\n */\n package: string\n /**\n * Force reinstallation even if package exists.\n */\n force?: boolean | undefined\n /**\n * Additional spawn options for the execution.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary that was executed. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Parse package spec into name and version.\n * Examples:\n * - 'lodash@4.17.21' \u2192 { name: 'lodash', version: '4.17.21' }\n * - '@scope/pkg@1.0.0' \u2192 { name: '@scope/pkg', version: '1.0.0' }\n * - 'lodash' \u2192 { name: 'lodash', version: undefined }\n */\nfunction parsePackageSpec(spec: string): {\n name: string\n version: string | undefined\n} {\n // Handle scoped packages (@scope/name@version).\n if (spec.startsWith('@')) {\n const parts = spec.split('@')\n if (parts.length === 3) {\n // @scope@version -> Invalid, but handle gracefully.\n return { name: parts[1], version: parts[2] }\n }\n if (parts.length === 2) {\n // @scope/name with no version.\n return { name: `@${parts[1]}`, version: undefined }\n }\n // @scope/name@version.\n const scopeAndName = `@${parts[1]}`\n return { name: scopeAndName, version: parts[2] }\n }\n\n // Handle unscoped packages (name@version).\n const atIndex = spec.lastIndexOf('@')\n if (atIndex === -1) {\n return { name: spec, version: undefined }\n }\n\n return {\n name: spec.slice(0, atIndex),\n version: spec.slice(atIndex + 1),\n }\n}\n\n/**\n * Install package to ~/.socket/_dlx/<hash>/ if not already installed.\n * Uses pacote for installation (no npm CLI required).\n * Protected by process lock to prevent concurrent installation corruption.\n */\nasync function ensurePackageInstalled(\n packageSpec: string,\n packageName: string,\n force: boolean,\n): Promise<{ installed: boolean; packageDir: string }> {\n const cacheKey = generateCacheKey(packageSpec)\n const packageDir = normalizePath(path.join(getSocketDlxDir(), cacheKey))\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n\n // Use process lock to prevent concurrent installations.\n // Similar to npm npx's concurrency.lock approach.\n const lockPath = path.join(packageDir, '.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n // Double-check if already installed (unless force).\n // Another process may have installed while waiting for lock.\n if (!force && existsSync(installedDir)) {\n // Verify package.json exists.\n const pkgJsonPath = path.join(installedDir, 'package.json')\n if (existsSync(pkgJsonPath)) {\n return { installed: false, packageDir }\n }\n }\n\n // Ensure package directory exists.\n await fs.mkdir(packageDir, { recursive: true })\n\n // Use pacote to extract the package.\n // Pacote leverages npm cache when available but doesn't require npm CLI.\n const pacoteCachePath = getPacoteCachePath()\n await pacote.extract(packageSpec, installedDir, {\n // Use consistent pacote cache path (respects npm cache locations when available).\n cache: pacoteCachePath || path.join(packageDir, '.cache'),\n })\n\n return { installed: true, packageDir }\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Find the binary path for an installed package.\n */\nfunction findBinaryPath(\n packageDir: string,\n packageName: string,\n binaryName?: string,\n): string {\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n const pkgJsonPath = path.join(installedDir, 'package.json')\n\n // Read package.json to find bin entry.\n const pkgJson = readJsonSync(pkgJsonPath) as Record<string, unknown>\n const bin = pkgJson['bin']\n\n let binPath: string | undefined\n\n if (typeof bin === 'string') {\n // Single binary.\n binPath = bin\n } else if (typeof bin === 'object' && bin !== null) {\n // Multiple binaries - use binaryName or package name.\n const binName = binaryName || packageName.split('/').pop()\n binPath = (bin as Record<string, string>)[binName!]\n }\n\n if (!binPath) {\n throw new Error(`No binary found for package \"${packageName}\"`)\n }\n\n return normalizePath(path.join(installedDir, binPath))\n}\n\n/**\n * Execute a package via DLX - install if needed and run its binary.\n *\n * This is the Socket equivalent of npx/pnpm dlx/yarn dlx, but using\n * our own cache directory (~/.socket/_dlx) and installation logic.\n *\n * Auto-forces reinstall for version ranges to get latest within range.\n *\n * @example\n * ```typescript\n * // Download and execute cdxgen\n * const result = await dlxPackage(\n * ['--version'],\n * { package: '@cyclonedx/cdxgen@10.0.0' }\n * )\n * await result.spawnPromise\n * ```\n */\nexport async function dlxPackage(\n args: readonly string[] | string[],\n options?: DlxPackageOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxPackageResult> {\n // Download the package.\n const downloadResult = await downloadPackage(options!)\n\n // Execute the binary.\n const spawnPromise = executePackage(\n downloadResult.binaryPath,\n args,\n options?.spawnOptions,\n spawnExtra,\n )\n\n return {\n ...downloadResult,\n spawnPromise,\n }\n}\n\n/**\n * Download and install a package without executing it.\n * This is useful for self-update or when you need the package files\n * but don't want to run the binary immediately.\n *\n * @example\n * ```typescript\n * // Install @socketsecurity/cli without running it\n * const result = await downloadPackage({\n * package: '@socketsecurity/cli@1.2.0',\n * force: true\n * })\n * console.log('Installed to:', result.packageDir)\n * console.log('Binary at:', result.binaryPath)\n * ```\n */\nexport async function downloadPackage(\n options: DlxPackageOptions,\n): Promise<DownloadPackageResult> {\n const { force: userForce, package: packageSpec } = {\n __proto__: null,\n ...options,\n } as DlxPackageOptions\n\n // Parse package spec.\n const { name: packageName, version: packageVersion } =\n parsePackageSpec(packageSpec)\n\n // Auto-force for version ranges to get latest within range.\n // User can still override with explicit force: false if they want cache.\n const isVersionRange =\n packageVersion !== undefined && rangeOperatorsRegExp.test(packageVersion)\n const force = userForce !== undefined ? userForce : isVersionRange\n\n // Build full package spec for installation.\n const fullPackageSpec = packageVersion\n ? `${packageName}@${packageVersion}`\n : packageName\n\n // Ensure package is installed.\n const { installed, packageDir } = await ensurePackageInstalled(\n fullPackageSpec,\n packageName,\n force,\n )\n\n // Find binary path.\n const binaryPath = findBinaryPath(packageDir, packageName)\n\n // Make binary executable on Unix systems.\n if (!WIN32 && existsSync(binaryPath)) {\n const { chmodSync } = require('node:fs')\n try {\n chmodSync(binaryPath, 0o755)\n } catch {\n // Ignore chmod errors.\n }\n }\n\n return {\n binaryPath,\n installed,\n packageDir,\n }\n}\n\n/**\n * Execute a package's binary.\n * The package must already be installed (use downloadPackage first).\n *\n * @example\n * ```typescript\n * // Execute an already-installed package\n * const downloaded = await downloadPackage({ package: 'cowsay@1.5.0' })\n * const result = await executePackage(\n * downloaded.binaryPath,\n * ['Hello World'],\n * { stdio: 'inherit' }\n * )\n * ```\n */\nexport function executePackage(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n return spawn(binaryPath, args, spawnOptions, spawnExtra)\n}\n"],
|
|
5
|
+
"mappings": ";6iBAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,gBAAAE,EAAA,oBAAAC,EAAA,mBAAAC,IAAA,eAAAC,EAAAL,GAgCA,IAAAM,EAA2C,mBAC3CC,EAAiB,wBAEjBC,EAAsB,gCACtBC,EAAmC,gCACnCC,EAAiC,iBACjCC,EAAmB,gCACnBC,EAA6B,gBAC7BC,EAA8B,kBAC9BC,EAAgC,mBAChCC,EAA4B,0BAE5BC,EAAsB,mBAMtB,MAAMC,EAAuB,mBA4C7B,SAASC,EAAiBC,EAGxB,CAEA,GAAIA,EAAK,WAAW,GAAG,EAAG,CACxB,MAAMC,EAAQD,EAAK,MAAM,GAAG,EAC5B,OAAIC,EAAM,SAAW,EAEZ,CAAE,KAAMA,EAAM,CAAC,EAAG,QAASA,EAAM,CAAC,CAAE,EAEzCA,EAAM,SAAW,EAEZ,CAAE,KAAM,IAAIA,EAAM,CAAC,CAAC,GAAI,QAAS,MAAU,EAI7C,CAAE,KADY,IAAIA,EAAM,CAAC,CAAC,GACJ,QAASA,EAAM,CAAC,CAAE,CACjD,CAGA,MAAMC,EAAUF,EAAK,YAAY,GAAG,EACpC,OAAIE,IAAY,GACP,CAAE,KAAMF,EAAM,QAAS,MAAU,EAGnC,CACL,KAAMA,EAAK,MAAM,EAAGE,CAAO,EAC3B,QAASF,EAAK,MAAME,EAAU,CAAC,CACjC,CACF,CAOA,eAAeC,EACbC,EACAC,EACAC,EACqD,CACrD,MAAMC,KAAW,oBAAiBH,CAAW,EACvCI,KAAa,iBAAc,EAAAC,QAAK,QAAK,mBAAgB,EAAGF,CAAQ,CAAC,EACjEG,KAAe,iBACnB,EAAAD,QAAK,KAAKD,EAAY,eAAgBH,CAAW,CACnD,EAIMM,EAAW,EAAAF,QAAK,KAAKD,EAAY,OAAO,EAE9C,OAAO,MAAM,cAAY,SACvBG,EACA,SAAY,CAGV,GAAI,CAACL,MAAS,cAAWI,CAAY,EAAG,CAEtC,MAAME,EAAc,EAAAH,QAAK,KAAKC,EAAc,cAAc,EAC1D,MAAI,cAAWE,CAAW,EACxB,MAAO,CAAE,UAAW,GAAO,WAAAJ,CAAW,CAE1C,CAGA,MAAM,EAAAK,SAAG,MAAML,EAAY,CAAE,UAAW,EAAK,CAAC,EAI9C,MAAMM,KAAkB,sBAAmB,EAC3C,aAAM,EAAAC,QAAO,QAAQX,EAAaM,EAAc,CAE9C,MAAOI,GAAmB,EAAAL,QAAK,KAAKD,EAAY,QAAQ,CAC1D,CAAC,EAEM,CAAE,UAAW,GAAM,WAAAA,CAAW,CACvC,EACA,CAEE,QAAS,IACT,gBAAiB,GACnB,CACF,CACF,CAKA,SAASQ,EACPR,EACAH,EACAY,EACQ,CACR,MAAMP,KAAe,iBACnB,EAAAD,QAAK,KAAKD,EAAY,eAAgBH,CAAW,CACnD,EACMO,EAAc,EAAAH,QAAK,KAAKC,EAAc,cAAc,EAIpDQ,KADU,gBAAaN,CAAW,EACpB,IAEpB,IAAIO,EAEJ,GAAI,OAAOD,GAAQ,SAEjBC,EAAUD,UACD,OAAOA,GAAQ,UAAYA,IAAQ,KAAM,CAElD,MAAME,EAAUH,GAAcZ,EAAY,MAAM,GAAG,EAAE,IAAI,EACzDc,EAAWD,EAA+BE,CAAQ,CACpD,CAEA,GAAI,CAACD,EACH,MAAM,IAAI,MAAM,gCAAgCd,CAAW,GAAG,EAGhE,SAAO,iBAAc,EAAAI,QAAK,KAAKC,EAAcS,CAAO,CAAC,CACvD,CAoBA,eAAsBpC,EACpBsC,EACAC,EACAC,EAC2B,CAE3B,MAAMC,EAAiB,MAAMxC,EAAgBsC,CAAQ,EAG/CG,EAAexC,EACnBuC,EAAe,WACfH,EACAC,GAAS,aACTC,CACF,EAEA,MAAO,CACL,GAAGC,EACH,aAAAC,CACF,CACF,CAkBA,eAAsBzC,EACpBsC,EACgC,CAChC,KAAM,CAAE,MAAOI,EAAW,QAAStB,CAAY,EAAI,CACjD,UAAW,KACX,GAAGkB,CACL,EAGM,CAAE,KAAMjB,EAAa,QAASsB,CAAe,EACjD5B,EAAiBK,CAAW,EAIxBwB,EACJD,IAAmB,QAAa7B,EAAqB,KAAK6B,CAAc,EACpErB,EAAQoB,IAAc,OAAYA,EAAYE,EAG9CC,EAAkBF,EACpB,GAAGtB,CAAW,IAAIsB,CAAc,GAChCtB,EAGE,CAAE,UAAAyB,EAAW,WAAAtB,CAAW,EAAI,MAAML,EACtC0B,EACAxB,EACAC,CACF,EAGMyB,EAAaf,EAAeR,EAAYH,CAAW,EAGzD,GAAI,CAAC,YAAS,cAAW0B,CAAU,EAAG,CACpC,KAAM,CAAE,UAAAC,CAAU,EAAI,QAAQ,SAAS,EACvC,GAAI,CACFA,EAAUD,EAAY,GAAK,CAC7B,MAAQ,CAER,CACF,CAEA,MAAO,CACL,WAAAA,EACA,UAAAD,EACA,WAAAtB,CACF,CACF,CAiBO,SAASvB,EACd8C,EACAV,EACAY,EACAV,EAC0B,CAC1B,SAAO,SAAMQ,EAAYV,EAAMY,EAAcV,CAAU,CACzD",
|
|
6
|
+
"names": ["dlx_package_exports", "__export", "dlxPackage", "downloadPackage", "executePackage", "__toCommonJS", "import_node_fs", "import_node_path", "import_platform", "import_packages", "import_dlx", "import_pacote", "import_fs", "import_path", "import_paths", "import_process_lock", "import_spawn", "rangeOperatorsRegExp", "parsePackageSpec", "spec", "parts", "atIndex", "ensurePackageInstalled", "packageSpec", "packageName", "force", "cacheKey", "packageDir", "path", "installedDir", "lockPath", "pkgJsonPath", "fs", "pacoteCachePath", "pacote", "findBinaryPath", "binaryName", "bin", "binPath", "binName", "args", "options", "spawnExtra", "downloadResult", "spawnPromise", "userForce", "packageVersion", "isVersionRange", "fullPackageSpec", "installed", "binaryPath", "chmodSync", "spawnOptions"]
|
|
7
7
|
}
|
package/dist/dlx.d.ts
CHANGED
|
@@ -1,3 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Generate a cache directory name using npm/npx approach.
|
|
3
|
+
* Uses first 16 characters of SHA-512 hash (like npm/npx).
|
|
4
|
+
*
|
|
5
|
+
* Rationale for SHA-512 truncated (vs full SHA-256):
|
|
6
|
+
* - Matches npm/npx ecosystem behavior
|
|
7
|
+
* - Shorter paths for Windows MAX_PATH compatibility (260 chars)
|
|
8
|
+
* - 16 hex chars = 64 bits = acceptable collision risk for local cache
|
|
9
|
+
* - Collision probability ~1 in 18 quintillion with 1000 entries
|
|
10
|
+
*
|
|
11
|
+
* Input strategy (aligned with npx):
|
|
12
|
+
* - npx uses package spec strings (e.g., '@scope/pkg@1.0.0', 'prettier@3.0.0')
|
|
13
|
+
* - Caller provides complete spec string with version for accurate cache keying
|
|
14
|
+
* - For package installs: Use PURL-style spec with version
|
|
15
|
+
* Examples: 'npm:prettier@3.0.0', 'pypi:requests@2.31.0', 'gem:rails@7.0.0'
|
|
16
|
+
* Note: Socket uses shorthand format without 'pkg:' prefix
|
|
17
|
+
* (handled by @socketregistry/packageurl-js)
|
|
18
|
+
* - For binary downloads: Use URL:name for uniqueness
|
|
19
|
+
*
|
|
20
|
+
* Reference: npm/cli v11.6.2 libnpmexec/lib/index.js#L233-L244
|
|
21
|
+
* https://github.com/npm/cli/blob/v11.6.2/workspaces/libnpmexec/lib/index.js#L233-L244
|
|
22
|
+
* Implementation: packages.map().sort().join('\n') → SHA-512 → slice(0,16)
|
|
23
|
+
* npx hashes the package spec (name@version), not just name
|
|
24
|
+
*/
|
|
25
|
+
export declare function generateCacheKey(spec: string): string;
|
|
1
26
|
/**
|
|
2
27
|
* Clear all DLX package installations.
|
|
3
28
|
*/
|
package/dist/dlx.js
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
/* Socket Lib - Built with esbuild */
|
|
2
|
-
var
|
|
2
|
+
var g=Object.defineProperty;var k=Object.getOwnPropertyDescriptor;var v=Object.getOwnPropertyNames;var w=Object.prototype.hasOwnProperty;var S=(t,r)=>{for(var e in r)g(t,e,{get:r[e],enumerable:!0})},b=(t,r,e,i)=>{if(r&&typeof r=="object"||typeof r=="function")for(let s of v(r))!w.call(t,s)&&s!==e&&g(t,s,{get:()=>r[s],enumerable:!(i=k(r,s))||i.enumerable});return t};var j=t=>b(g({},"__esModule",{value:!0}),t);var H={};S(H,{clearDlx:()=>I,clearDlxSync:()=>q,dlxDirExists:()=>A,dlxDirExistsAsync:()=>F,ensureDlxDir:()=>L,ensureDlxDirSync:()=>X,generateCacheKey:()=>E,getDlxInstalledPackageDir:()=>l,getDlxPackageDir:()=>p,getDlxPackageJsonPath:()=>_,getDlxPackageNodeModulesDir:()=>h,isDlxPackageInstalled:()=>z,isDlxPackageInstalledAsync:()=>C,isInSocketDlx:()=>$,listDlxPackages:()=>d,listDlxPackagesAsync:()=>m,removeDlxPackage:()=>y,removeDlxPackageSync:()=>P});module.exports=j(H);var f=require("node:crypto"),o=require("node:fs"),u=require("./fs"),a=require("./path"),n=require("./paths"),D=require("./promises");function E(t){return(0,f.createHash)("sha512").update(t).digest("hex").substring(0,16)}let x;function c(){return x===void 0&&(x=require("node:path")),x}async function I(){const t=await m();await(0,D.pEach)(t,r=>y(r))}function q(){const t=d();for(const r of t)P(r)}function A(){return(0,o.existsSync)((0,n.getSocketDlxDir)())}async function F(){try{return await o.promises.access((0,n.getSocketDlxDir)()),!0}catch{return!1}}async function L(){await o.promises.mkdir((0,n.getSocketDlxDir)(),{recursive:!0})}function X(){const{mkdirSync:t}=require("node:fs");t((0,n.getSocketDlxDir)(),{recursive:!0})}function l(t){return(0,a.normalizePath)(c().join(h(t),t))}function p(t){return(0,a.normalizePath)(c().join((0,n.getSocketDlxDir)(),t))}function _(t){return(0,a.normalizePath)(c().join(l(t),"package.json"))}function h(t){return(0,a.normalizePath)(c().join(p(t),"node_modules"))}function $(t){if(!t)return!1;const r=c(),e=(0,n.getSocketDlxDir)();return r.resolve(t).startsWith(e+r.sep)}function z(t){return(0,o.existsSync)(l(t))}async function C(t){try{return await o.promises.access(l(t)),!0}catch{return!1}}function d(){try{return(0,u.readDirNamesSync)((0,n.getSocketDlxDir)(),{sort:!0})}catch{return[]}}async function m(){try{return(await o.promises.readdir((0,n.getSocketDlxDir)(),{withFileTypes:!0})).filter(r=>r.isDirectory()).map(r=>r.name).sort()}catch{return[]}}async function y(t){const r=p(t);try{await(0,u.safeDelete)(r,{recursive:!0,force:!0})}catch(e){throw new Error(`Failed to remove DLX package "${t}"`,{cause:e})}}function P(t){const{rmSync:r}=require("node:fs"),e=p(t);try{r(e,{recursive:!0,force:!0})}catch(i){throw new Error(`Failed to remove DLX package "${t}"`,{cause:i})}}0&&(module.exports={clearDlx,clearDlxSync,dlxDirExists,dlxDirExistsAsync,ensureDlxDir,ensureDlxDirSync,generateCacheKey,getDlxInstalledPackageDir,getDlxPackageDir,getDlxPackageJsonPath,getDlxPackageNodeModulesDir,isDlxPackageInstalled,isDlxPackageInstalledAsync,isInSocketDlx,listDlxPackages,listDlxPackagesAsync,removeDlxPackage,removeDlxPackageSync});
|
|
3
3
|
//# sourceMappingURL=dlx.js.map
|
package/dist/dlx.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/dlx.ts"],
|
|
4
|
-
"sourcesContent": ["/** @fileoverview DLX (execute package) utilities for Socket ecosystem shared installations. */\n\nimport { existsSync, promises as fs } from 'node:fs'\n\nimport { readDirNamesSync, safeDelete } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { pEach } from './promises'\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path!\n}\n\n/**\n * Clear all DLX package installations.\n */\nexport async function clearDlx(): Promise<void> {\n const packages = await listDlxPackagesAsync()\n await pEach(packages, pkg => removeDlxPackage(pkg))\n}\n\n/**\n * Clear all DLX package installations synchronously.\n */\nexport function clearDlxSync(): void {\n const packages = listDlxPackages()\n for (const pkg of packages) {\n removeDlxPackageSync(pkg)\n }\n}\n\n/**\n * Check if the DLX directory exists.\n */\nexport function dlxDirExists(): boolean {\n return existsSync(getSocketDlxDir())\n}\n\n/**\n * Check if the DLX directory exists asynchronously.\n */\nexport async function dlxDirExistsAsync(): Promise<boolean> {\n try {\n await fs.access(getSocketDlxDir())\n return true\n } catch {\n return false\n }\n}\n\n/**\n * Ensure the DLX directory exists, creating it if necessary.\n */\nexport async function ensureDlxDir(): Promise<void> {\n await fs.mkdir(getSocketDlxDir(), { recursive: true })\n}\n\n/**\n * Ensure the DLX directory exists synchronously, creating it if necessary.\n */\nexport function ensureDlxDirSync(): void {\n const { mkdirSync } = require('node:fs')\n mkdirSync(getSocketDlxDir(), { recursive: true })\n}\n\n/**\n * Get the installed package directory within DLX node_modules.\n */\nexport function getDlxInstalledPackageDir(packageName: string): string {\n const path = getPath()\n return normalizePath(\n path.join(getDlxPackageNodeModulesDir(packageName), packageName),\n )\n}\n\n/**\n * Get the DLX installation directory for a specific package.\n */\nexport function getDlxPackageDir(packageName: string): string {\n const path = getPath()\n return normalizePath(path.join(getSocketDlxDir(), packageName))\n}\n\n/**\n * Get the package.json path for a DLX installed package.\n */\nexport function getDlxPackageJsonPath(packageName: string): string {\n const path = getPath()\n return normalizePath(\n path.join(getDlxInstalledPackageDir(packageName), 'package.json'),\n )\n}\n\n/**\n * Get the node_modules directory for a DLX package installation.\n */\nexport function getDlxPackageNodeModulesDir(packageName: string): string {\n const path = getPath()\n return normalizePath(path.join(getDlxPackageDir(packageName), 'node_modules'))\n}\n\n/**\n * Check if a file path is within the Socket DLX directory.\n * This is useful for determining if a binary or file is managed by Socket's DLX system.\n *\n * @param filePath - Absolute or relative path to check\n * @returns true if the path is within ~/.socket/_dlx/, false otherwise\n *\n * @example\n * ```typescript\n * isInSocketDlx('/home/user/.socket/_dlx/abc123/bin/socket') // true\n * isInSocketDlx('/usr/local/bin/socket') // false\n * isInSocketDlx(process.argv[0]) // Check if current binary is in DLX\n * ```\n */\nexport function isInSocketDlx(filePath: string): boolean {\n if (!filePath) {\n return false\n }\n\n const path = getPath()\n const dlxDir = getSocketDlxDir()\n const absolutePath = path.resolve(filePath)\n\n // Check if the absolute path starts with the DLX directory.\n return absolutePath.startsWith(dlxDir + path.sep)\n}\n\n/**\n * Check if a package is installed in DLX.\n */\nexport function isDlxPackageInstalled(packageName: string): boolean {\n return existsSync(getDlxInstalledPackageDir(packageName))\n}\n\n/**\n * Check if a package is installed in DLX asynchronously.\n */\nexport async function isDlxPackageInstalledAsync(\n packageName: string,\n): Promise<boolean> {\n try {\n await fs.access(getDlxInstalledPackageDir(packageName))\n return true\n } catch {\n return false\n }\n}\n\n/**\n * List all packages installed in DLX.\n */\nexport function listDlxPackages(): string[] {\n try {\n return readDirNamesSync(getSocketDlxDir(), { sort: true })\n } catch {\n return []\n }\n}\n\n/**\n * List all packages installed in DLX asynchronously.\n */\nexport async function listDlxPackagesAsync(): Promise<string[]> {\n try {\n const entries = await fs.readdir(getSocketDlxDir(), {\n withFileTypes: true,\n })\n return entries\n .filter(e => e.isDirectory())\n .map(e => e.name)\n .sort()\n } catch {\n return []\n }\n}\n\n/**\n * Remove a DLX package installation.\n */\nexport async function removeDlxPackage(packageName: string): Promise<void> {\n const packageDir = getDlxPackageDir(packageName)\n try {\n await safeDelete(packageDir, { recursive: true, force: true })\n } catch (e) {\n throw new Error(`Failed to remove DLX package \"${packageName}\"`, {\n cause: e,\n })\n }\n}\n\n/**\n * Remove a DLX package installation synchronously.\n */\nexport function removeDlxPackageSync(packageName: string): void {\n const { rmSync } = require('node:fs')\n const packageDir = getDlxPackageDir(packageName)\n try {\n rmSync(packageDir, { recursive: true, force: true })\n } catch (e) {\n throw new Error(`Failed to remove DLX package \"${packageName}\"`, {\n cause: e,\n })\n }\n}\n"],
|
|
5
|
-
"mappings": ";4ZAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,cAAAE,EAAA,iBAAAC,EAAA,iBAAAC,EAAA,sBAAAC,EAAA,iBAAAC,EAAA,qBAAAC,EAAA,8BAAAC,EAAA,qBAAAC,EAAA,0BAAAC,EAAA,gCAAAC,EAAA,0BAAAC,EAAA,+BAAAC,EAAA,kBAAAC,EAAA,oBAAAC,EAAA,yBAAAC,EAAA,qBAAAC,EAAA,yBAAAC,IAAA,eAAAC,
|
|
6
|
-
"names": ["dlx_exports", "__export", "clearDlx", "clearDlxSync", "dlxDirExists", "dlxDirExistsAsync", "ensureDlxDir", "ensureDlxDirSync", "getDlxInstalledPackageDir", "getDlxPackageDir", "getDlxPackageJsonPath", "getDlxPackageNodeModulesDir", "isDlxPackageInstalled", "isDlxPackageInstalledAsync", "isInSocketDlx", "listDlxPackages", "listDlxPackagesAsync", "removeDlxPackage", "removeDlxPackageSync", "__toCommonJS", "import_node_fs", "import_fs", "import_path", "import_paths", "import_promises", "_path", "getPath", "packages", "pkg", "fs", "mkdirSync", "packageName", "filePath", "path", "dlxDir", "e", "packageDir", "rmSync"]
|
|
4
|
+
"sourcesContent": ["/** @fileoverview DLX (execute package) utilities for Socket ecosystem shared installations. */\n\nimport { createHash } from 'node:crypto'\nimport { existsSync, promises as fs } from 'node:fs'\n\nimport { readDirNamesSync, safeDelete } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { pEach } from './promises'\n\n/**\n * Generate a cache directory name using npm/npx approach.\n * Uses first 16 characters of SHA-512 hash (like npm/npx).\n *\n * Rationale for SHA-512 truncated (vs full SHA-256):\n * - Matches npm/npx ecosystem behavior\n * - Shorter paths for Windows MAX_PATH compatibility (260 chars)\n * - 16 hex chars = 64 bits = acceptable collision risk for local cache\n * - Collision probability ~1 in 18 quintillion with 1000 entries\n *\n * Input strategy (aligned with npx):\n * - npx uses package spec strings (e.g., '@scope/pkg@1.0.0', 'prettier@3.0.0')\n * - Caller provides complete spec string with version for accurate cache keying\n * - For package installs: Use PURL-style spec with version\n * Examples: 'npm:prettier@3.0.0', 'pypi:requests@2.31.0', 'gem:rails@7.0.0'\n * Note: Socket uses shorthand format without 'pkg:' prefix\n * (handled by @socketregistry/packageurl-js)\n * - For binary downloads: Use URL:name for uniqueness\n *\n * Reference: npm/cli v11.6.2 libnpmexec/lib/index.js#L233-L244\n * https://github.com/npm/cli/blob/v11.6.2/workspaces/libnpmexec/lib/index.js#L233-L244\n * Implementation: packages.map().sort().join('\\n') \u2192 SHA-512 \u2192 slice(0,16)\n * npx hashes the package spec (name@version), not just name\n */\nexport function generateCacheKey(spec: string): string {\n return createHash('sha512').update(spec).digest('hex').substring(0, 16)\n}\n\nlet _path: typeof import('path') | undefined\n/**\n * Lazily load the path module to avoid Webpack errors.\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPath() {\n if (_path === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _path = /*@__PURE__*/ require('node:path')\n }\n return _path!\n}\n\n/**\n * Clear all DLX package installations.\n */\nexport async function clearDlx(): Promise<void> {\n const packages = await listDlxPackagesAsync()\n await pEach(packages, pkg => removeDlxPackage(pkg))\n}\n\n/**\n * Clear all DLX package installations synchronously.\n */\nexport function clearDlxSync(): void {\n const packages = listDlxPackages()\n for (const pkg of packages) {\n removeDlxPackageSync(pkg)\n }\n}\n\n/**\n * Check if the DLX directory exists.\n */\nexport function dlxDirExists(): boolean {\n return existsSync(getSocketDlxDir())\n}\n\n/**\n * Check if the DLX directory exists asynchronously.\n */\nexport async function dlxDirExistsAsync(): Promise<boolean> {\n try {\n await fs.access(getSocketDlxDir())\n return true\n } catch {\n return false\n }\n}\n\n/**\n * Ensure the DLX directory exists, creating it if necessary.\n */\nexport async function ensureDlxDir(): Promise<void> {\n await fs.mkdir(getSocketDlxDir(), { recursive: true })\n}\n\n/**\n * Ensure the DLX directory exists synchronously, creating it if necessary.\n */\nexport function ensureDlxDirSync(): void {\n const { mkdirSync } = require('node:fs')\n mkdirSync(getSocketDlxDir(), { recursive: true })\n}\n\n/**\n * Get the installed package directory within DLX node_modules.\n */\nexport function getDlxInstalledPackageDir(packageName: string): string {\n const path = getPath()\n return normalizePath(\n path.join(getDlxPackageNodeModulesDir(packageName), packageName),\n )\n}\n\n/**\n * Get the DLX installation directory for a specific package.\n */\nexport function getDlxPackageDir(packageName: string): string {\n const path = getPath()\n return normalizePath(path.join(getSocketDlxDir(), packageName))\n}\n\n/**\n * Get the package.json path for a DLX installed package.\n */\nexport function getDlxPackageJsonPath(packageName: string): string {\n const path = getPath()\n return normalizePath(\n path.join(getDlxInstalledPackageDir(packageName), 'package.json'),\n )\n}\n\n/**\n * Get the node_modules directory for a DLX package installation.\n */\nexport function getDlxPackageNodeModulesDir(packageName: string): string {\n const path = getPath()\n return normalizePath(path.join(getDlxPackageDir(packageName), 'node_modules'))\n}\n\n/**\n * Check if a file path is within the Socket DLX directory.\n * This is useful for determining if a binary or file is managed by Socket's DLX system.\n *\n * @param filePath - Absolute or relative path to check\n * @returns true if the path is within ~/.socket/_dlx/, false otherwise\n *\n * @example\n * ```typescript\n * isInSocketDlx('/home/user/.socket/_dlx/abc123/bin/socket') // true\n * isInSocketDlx('/usr/local/bin/socket') // false\n * isInSocketDlx(process.argv[0]) // Check if current binary is in DLX\n * ```\n */\nexport function isInSocketDlx(filePath: string): boolean {\n if (!filePath) {\n return false\n }\n\n const path = getPath()\n const dlxDir = getSocketDlxDir()\n const absolutePath = path.resolve(filePath)\n\n // Check if the absolute path starts with the DLX directory.\n return absolutePath.startsWith(dlxDir + path.sep)\n}\n\n/**\n * Check if a package is installed in DLX.\n */\nexport function isDlxPackageInstalled(packageName: string): boolean {\n return existsSync(getDlxInstalledPackageDir(packageName))\n}\n\n/**\n * Check if a package is installed in DLX asynchronously.\n */\nexport async function isDlxPackageInstalledAsync(\n packageName: string,\n): Promise<boolean> {\n try {\n await fs.access(getDlxInstalledPackageDir(packageName))\n return true\n } catch {\n return false\n }\n}\n\n/**\n * List all packages installed in DLX.\n */\nexport function listDlxPackages(): string[] {\n try {\n return readDirNamesSync(getSocketDlxDir(), { sort: true })\n } catch {\n return []\n }\n}\n\n/**\n * List all packages installed in DLX asynchronously.\n */\nexport async function listDlxPackagesAsync(): Promise<string[]> {\n try {\n const entries = await fs.readdir(getSocketDlxDir(), {\n withFileTypes: true,\n })\n return entries\n .filter(e => e.isDirectory())\n .map(e => e.name)\n .sort()\n } catch {\n return []\n }\n}\n\n/**\n * Remove a DLX package installation.\n */\nexport async function removeDlxPackage(packageName: string): Promise<void> {\n const packageDir = getDlxPackageDir(packageName)\n try {\n await safeDelete(packageDir, { recursive: true, force: true })\n } catch (e) {\n throw new Error(`Failed to remove DLX package \"${packageName}\"`, {\n cause: e,\n })\n }\n}\n\n/**\n * Remove a DLX package installation synchronously.\n */\nexport function removeDlxPackageSync(packageName: string): void {\n const { rmSync } = require('node:fs')\n const packageDir = getDlxPackageDir(packageName)\n try {\n rmSync(packageDir, { recursive: true, force: true })\n } catch (e) {\n throw new Error(`Failed to remove DLX package \"${packageName}\"`, {\n cause: e,\n })\n }\n}\n"],
|
|
5
|
+
"mappings": ";4ZAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,cAAAE,EAAA,iBAAAC,EAAA,iBAAAC,EAAA,sBAAAC,EAAA,iBAAAC,EAAA,qBAAAC,EAAA,qBAAAC,EAAA,8BAAAC,EAAA,qBAAAC,EAAA,0BAAAC,EAAA,gCAAAC,EAAA,0BAAAC,EAAA,+BAAAC,EAAA,kBAAAC,EAAA,oBAAAC,EAAA,yBAAAC,EAAA,qBAAAC,EAAA,yBAAAC,IAAA,eAAAC,EAAApB,GAEA,IAAAqB,EAA2B,uBAC3BC,EAA2C,mBAE3CC,EAA6C,gBAC7CC,EAA8B,kBAC9BC,EAAgC,mBAChCC,EAAsB,sBA0Bf,SAASlB,EAAiBmB,EAAsB,CACrD,SAAO,cAAW,QAAQ,EAAE,OAAOA,CAAI,EAAE,OAAO,KAAK,EAAE,UAAU,EAAG,EAAE,CACxE,CAEA,IAAIC,EAMJ,SAASC,GAAU,CACjB,OAAID,IAAU,SAGZA,EAAsB,QAAQ,WAAW,GAEpCA,CACT,CAKA,eAAsB1B,GAA0B,CAC9C,MAAM4B,EAAW,MAAMb,EAAqB,EAC5C,QAAM,SAAMa,EAAUC,GAAOb,EAAiBa,CAAG,CAAC,CACpD,CAKO,SAAS5B,GAAqB,CACnC,MAAM2B,EAAWd,EAAgB,EACjC,UAAWe,KAAOD,EAChBX,EAAqBY,CAAG,CAE5B,CAKO,SAAS3B,GAAwB,CACtC,SAAO,iBAAW,mBAAgB,CAAC,CACrC,CAKA,eAAsBC,GAAsC,CAC1D,GAAI,CACF,aAAM,EAAA2B,SAAG,UAAO,mBAAgB,CAAC,EAC1B,EACT,MAAQ,CACN,MAAO,EACT,CACF,CAKA,eAAsB1B,GAA8B,CAClD,MAAM,EAAA0B,SAAG,SAAM,mBAAgB,EAAG,CAAE,UAAW,EAAK,CAAC,CACvD,CAKO,SAASzB,GAAyB,CACvC,KAAM,CAAE,UAAA0B,CAAU,EAAI,QAAQ,SAAS,EACvCA,KAAU,mBAAgB,EAAG,CAAE,UAAW,EAAK,CAAC,CAClD,CAKO,SAASxB,EAA0ByB,EAA6B,CAErE,SAAO,iBADML,EAAQ,EAEd,KAAKjB,EAA4BsB,CAAW,EAAGA,CAAW,CACjE,CACF,CAKO,SAASxB,EAAiBwB,EAA6B,CAE5D,SAAO,iBADML,EAAQ,EACK,QAAK,mBAAgB,EAAGK,CAAW,CAAC,CAChE,CAKO,SAASvB,EAAsBuB,EAA6B,CAEjE,SAAO,iBADML,EAAQ,EAEd,KAAKpB,EAA0ByB,CAAW,EAAG,cAAc,CAClE,CACF,CAKO,SAAStB,EAA4BsB,EAA6B,CAEvE,SAAO,iBADML,EAAQ,EACK,KAAKnB,EAAiBwB,CAAW,EAAG,cAAc,CAAC,CAC/E,CAgBO,SAASnB,EAAcoB,EAA2B,CACvD,GAAI,CAACA,EACH,MAAO,GAGT,MAAMC,EAAOP,EAAQ,EACfQ,KAAS,mBAAgB,EAI/B,OAHqBD,EAAK,QAAQD,CAAQ,EAGtB,WAAWE,EAASD,EAAK,GAAG,CAClD,CAKO,SAASvB,EAAsBqB,EAA8B,CAClE,SAAO,cAAWzB,EAA0ByB,CAAW,CAAC,CAC1D,CAKA,eAAsBpB,EACpBoB,EACkB,CAClB,GAAI,CACF,aAAM,EAAAF,SAAG,OAAOvB,EAA0ByB,CAAW,CAAC,EAC/C,EACT,MAAQ,CACN,MAAO,EACT,CACF,CAKO,SAASlB,GAA4B,CAC1C,GAAI,CACF,SAAO,uBAAiB,mBAAgB,EAAG,CAAE,KAAM,EAAK,CAAC,CAC3D,MAAQ,CACN,MAAO,CAAC,CACV,CACF,CAKA,eAAsBC,GAA0C,CAC9D,GAAI,CAIF,OAHgB,MAAM,EAAAe,SAAG,WAAQ,mBAAgB,EAAG,CAClD,cAAe,EACjB,CAAC,GAEE,OAAOM,GAAKA,EAAE,YAAY,CAAC,EAC3B,IAAIA,GAAKA,EAAE,IAAI,EACf,KAAK,CACV,MAAQ,CACN,MAAO,CAAC,CACV,CACF,CAKA,eAAsBpB,EAAiBgB,EAAoC,CACzE,MAAMK,EAAa7B,EAAiBwB,CAAW,EAC/C,GAAI,CACF,QAAM,cAAWK,EAAY,CAAE,UAAW,GAAM,MAAO,EAAK,CAAC,CAC/D,OAAS,EAAG,CACV,MAAM,IAAI,MAAM,iCAAiCL,CAAW,IAAK,CAC/D,MAAO,CACT,CAAC,CACH,CACF,CAKO,SAASf,EAAqBe,EAA2B,CAC9D,KAAM,CAAE,OAAAM,CAAO,EAAI,QAAQ,SAAS,EAC9BD,EAAa7B,EAAiBwB,CAAW,EAC/C,GAAI,CACFM,EAAOD,EAAY,CAAE,UAAW,GAAM,MAAO,EAAK,CAAC,CACrD,OAASD,EAAG,CACV,MAAM,IAAI,MAAM,iCAAiCJ,CAAW,IAAK,CAC/D,MAAOI,CACT,CAAC,CACH,CACF",
|
|
6
|
+
"names": ["dlx_exports", "__export", "clearDlx", "clearDlxSync", "dlxDirExists", "dlxDirExistsAsync", "ensureDlxDir", "ensureDlxDirSync", "generateCacheKey", "getDlxInstalledPackageDir", "getDlxPackageDir", "getDlxPackageJsonPath", "getDlxPackageNodeModulesDir", "isDlxPackageInstalled", "isDlxPackageInstalledAsync", "isInSocketDlx", "listDlxPackages", "listDlxPackagesAsync", "removeDlxPackage", "removeDlxPackageSync", "__toCommonJS", "import_node_crypto", "import_node_fs", "import_fs", "import_path", "import_paths", "import_promises", "spec", "_path", "getPath", "packages", "pkg", "fs", "mkdirSync", "packageName", "filePath", "path", "dlxDir", "e", "packageDir", "rmSync"]
|
|
7
7
|
}
|