@socketsecurity/lib 2.10.3 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/CHANGELOG.md +38 -0
  2. package/README.md +231 -40
  3. package/dist/constants/platform.js +1 -1
  4. package/dist/constants/platform.js.map +3 -3
  5. package/dist/cover/code.js +1 -1
  6. package/dist/cover/code.js.map +3 -3
  7. package/dist/debug.js +2 -2
  8. package/dist/debug.js.map +3 -3
  9. package/dist/dlx-binary.d.ts +29 -6
  10. package/dist/dlx-binary.js +7 -7
  11. package/dist/dlx-binary.js.map +3 -3
  12. package/dist/dlx-package.d.ts +16 -1
  13. package/dist/dlx-package.js +7 -7
  14. package/dist/dlx-package.js.map +3 -3
  15. package/dist/dlx.js +4 -4
  16. package/dist/dlx.js.map +3 -3
  17. package/dist/env/rewire.js +1 -1
  18. package/dist/env/rewire.js.map +3 -3
  19. package/dist/env/socket-cli.d.ts +7 -0
  20. package/dist/env/socket-cli.js +1 -1
  21. package/dist/env/socket-cli.js.map +2 -2
  22. package/dist/external/yoctocolors-cjs.d.ts +14 -0
  23. package/dist/fs.d.ts +82 -27
  24. package/dist/fs.js +7 -7
  25. package/dist/fs.js.map +3 -3
  26. package/dist/git.js +1 -1
  27. package/dist/git.js.map +3 -3
  28. package/dist/http-request.js +1 -1
  29. package/dist/http-request.js.map +3 -3
  30. package/dist/ipc.js +1 -1
  31. package/dist/ipc.js.map +3 -3
  32. package/dist/links/index.d.ts +65 -0
  33. package/dist/links/index.js +3 -0
  34. package/dist/links/index.js.map +7 -0
  35. package/dist/logger.d.ts +21 -18
  36. package/dist/logger.js +1 -1
  37. package/dist/logger.js.map +3 -3
  38. package/dist/packages/isolation.js +1 -1
  39. package/dist/packages/isolation.js.map +3 -3
  40. package/dist/paths.js +1 -1
  41. package/dist/paths.js.map +2 -2
  42. package/dist/process-lock.js +2 -2
  43. package/dist/process-lock.js.map +3 -3
  44. package/dist/promises.d.ts +6 -21
  45. package/dist/promises.js +1 -1
  46. package/dist/promises.js.map +2 -2
  47. package/dist/prompts/index.d.ts +115 -0
  48. package/dist/prompts/index.js +3 -0
  49. package/dist/prompts/index.js.map +7 -0
  50. package/dist/spinner.d.ts +33 -23
  51. package/dist/spinner.js +1 -1
  52. package/dist/spinner.js.map +3 -3
  53. package/dist/stdio/mask.d.ts +2 -2
  54. package/dist/stdio/mask.js +4 -4
  55. package/dist/stdio/mask.js.map +3 -3
  56. package/dist/stdio/stdout.js +1 -1
  57. package/dist/stdio/stdout.js.map +3 -3
  58. package/dist/themes/context.d.ts +80 -0
  59. package/dist/themes/context.js +3 -0
  60. package/dist/themes/context.js.map +7 -0
  61. package/dist/themes/index.d.ts +53 -0
  62. package/dist/themes/index.js +3 -0
  63. package/dist/themes/index.js.map +7 -0
  64. package/dist/themes/themes.d.ts +49 -0
  65. package/dist/themes/themes.js +3 -0
  66. package/dist/themes/themes.js.map +7 -0
  67. package/dist/themes/types.d.ts +92 -0
  68. package/dist/themes/types.js +3 -0
  69. package/dist/themes/types.js.map +7 -0
  70. package/dist/themes/utils.d.ts +78 -0
  71. package/dist/themes/utils.js +3 -0
  72. package/dist/themes/utils.js.map +7 -0
  73. package/package.json +40 -8
  74. package/dist/download-lock.d.ts +0 -49
  75. package/dist/download-lock.js +0 -10
  76. package/dist/download-lock.js.map +0 -7
@@ -1,17 +1,40 @@
1
1
  import type { SpawnExtra, SpawnOptions } from './spawn';
2
2
  import { spawn } from './spawn';
3
3
  export interface DlxBinaryOptions {
4
- /** URL to download the binary from. */
4
+ /**
5
+ * URL to download the binary from.
6
+ */
5
7
  url: string;
6
- /** Optional name for the cached binary (defaults to URL hash). */
8
+ /**
9
+ * Optional name for the cached binary (defaults to URL hash).
10
+ */
7
11
  name?: string | undefined;
8
- /** Expected checksum (sha256) for verification. */
12
+ /**
13
+ * Expected checksum (sha256) for verification.
14
+ */
9
15
  checksum?: string | undefined;
10
- /** Cache TTL in milliseconds (default: 7 days). */
16
+ /**
17
+ * Cache TTL in milliseconds (default: 7 days).
18
+ */
11
19
  cacheTtl?: number | undefined;
12
- /** Force re-download even if cached. */
20
+ /**
21
+ * Force re-download even if cached.
22
+ * Aligns with npm/npx --force flag.
23
+ */
13
24
  force?: boolean | undefined;
14
- /** Additional spawn options. */
25
+ /**
26
+ * Skip confirmation prompts (auto-approve).
27
+ * Aligns with npx --yes/-y flag.
28
+ */
29
+ yes?: boolean | undefined;
30
+ /**
31
+ * Suppress output (quiet mode).
32
+ * Aligns with npx --quiet/-q and pnpm --silent/-s flags.
33
+ */
34
+ quiet?: boolean | undefined;
35
+ /**
36
+ * Additional spawn options.
37
+ */
15
38
  spawnOptions?: SpawnOptions | undefined;
16
39
  }
17
40
  export interface DlxBinaryResult {
@@ -1,9 +1,9 @@
1
1
  /* Socket Lib - Built with esbuild */
2
- var K=Object.create;var P=Object.defineProperty;var X=Object.getOwnPropertyDescriptor;var J=Object.getOwnPropertyNames;var Y=Object.getPrototypeOf,q=Object.prototype.hasOwnProperty;var V=(e,t)=>{for(var r in t)P(e,r,{get:t[r],enumerable:!0})},T=(e,t,r,i)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of J(t))!q.call(e,n)&&n!==r&&P(e,n,{get:()=>t[n],enumerable:!(i=X(t,n))||i.enumerable});return e};var j=(e,t,r)=>(r=e!=null?K(Y(e)):{},T(t||!e||!e.__esModule?P(r,"default",{value:e,enumerable:!0}):r,e)),W=e=>T(P({},"__esModule",{value:!0}),e);var et={};V(et,{cleanDlxCache:()=>U,dlxBinary:()=>G,downloadBinary:()=>Q,executeBinary:()=>Z,getDlxCachePath:()=>x,listDlxCache:()=>tt});module.exports=W(et);var S=require("node:crypto"),s=require("node:fs"),D=j(require("node:os")),f=j(require("node:path")),_=require("#constants/platform"),O=require("./dlx"),B=require("./http-request"),d=require("./fs"),v=require("./objects"),R=require("./path"),I=require("./paths"),N=require("./process-lock"),A=require("./spawn");function k(e){return f.default.join(e,".dlx-metadata.json")}async function F(e,t){try{const r=k(e);if(!(0,s.existsSync)(r))return!1;const i=await(0,d.readJson)(r,{throws:!1});if(!(0,v.isObjectObject)(i))return!1;const n=Date.now(),a=i.timestamp;return typeof a!="number"||a<=0?!1:n-a<t}catch{return!1}}async function L(e,t,r){const i=f.default.dirname(t),n=f.default.join(i,"concurrency.lock");return await N.processLock.withLock(n,async()=>{if((0,s.existsSync)(t)&&(await s.promises.stat(t)).size>0){const p=await s.promises.readFile(t),l=(0,S.createHash)("sha256");return l.update(p),l.digest("hex")}try{await(0,B.httpDownload)(e,t)}catch(o){throw new Error(`Failed to download binary from ${e}
3
- Destination: ${t}
4
- Check your internet connection or verify the URL is accessible.`,{cause:o})}const a=await s.promises.readFile(t),c=(0,S.createHash)("sha256");c.update(a);const u=c.digest("hex");if(r&&u!==r)throw await(0,d.safeDelete)(t),new Error(`Checksum mismatch: expected ${r}, got ${u}`);return _.WIN32||await s.promises.chmod(t,493),u},{staleMs:5e3,touchIntervalMs:2e3})}async function z(e,t,r,i,n){const a=k(e),c={version:"1.0.0",cache_key:t,timestamp:Date.now(),checksum:i,checksum_algorithm:"sha256",platform:D.default.platform(),arch:D.default.arch(),size:n,source:{type:"download",url:r}};await s.promises.writeFile(a,JSON.stringify(c,null,2))}async function U(e=require("#constants/time").DLX_BINARY_CACHE_TTL){const t=x();if(!(0,s.existsSync)(t))return 0;let r=0;const i=Date.now(),n=await s.promises.readdir(t);for(const a of n){const c=f.default.join(t,a),u=k(c);try{if(!await(0,d.isDir)(c))continue;const o=await(0,d.readJson)(u,{throws:!1});if(!o||typeof o!="object"||Array.isArray(o))continue;const p=o.timestamp;(typeof p=="number"&&p>0?i-p:Number.POSITIVE_INFINITY)>e&&(await(0,d.safeDelete)(c,{force:!0,recursive:!0}),r+=1)}catch{try{(await s.promises.readdir(c)).length||(await(0,d.safeDelete)(c),r+=1)}catch{}}}return r}async function G(e,t,r){const{cacheTtl:i=require("#constants/time").DLX_BINARY_CACHE_TTL,checksum:n,force:a=!1,name:c,spawnOptions:u,url:o}={__proto__:null,...t},p=x(),l=c||`binary-${process.platform}-${D.default.arch()}`,E=`${o}:${l}`,h=(0,O.generateCacheKey)(E),m=f.default.join(p,h),g=(0,R.normalizePath)(f.default.join(m,l));let w=!1,b=n;if(!a&&(0,s.existsSync)(m)&&await F(m,i))try{const $=k(m),y=await(0,d.readJson)($,{throws:!1});y&&typeof y=="object"&&!Array.isArray(y)&&typeof y.checksum=="string"?b=y.checksum:w=!0}catch{w=!0}else w=!0;if(w){try{await s.promises.mkdir(m,{recursive:!0})}catch(y){const C=y.code;throw C==="EACCES"||C==="EPERM"?new Error(`Permission denied creating binary cache directory: ${m}
5
- Please check directory permissions or run with appropriate access.`,{cause:y}):C==="EROFS"?new Error(`Cannot create binary cache directory on read-only filesystem: ${m}
6
- Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.`,{cause:y}):new Error(`Failed to create binary cache directory: ${m}`,{cause:y})}b=await L(o,g,n);const $=await s.promises.stat(g);await z(m,h,o,b||"",$.size)}const H=_.WIN32&&/\.(?:bat|cmd|ps1)$/i.test(g)?{...u,env:{...u?.env,PATH:`${m}${f.default.delimiter}${process.env.PATH||""}`},shell:!0}:u,M=(0,A.spawn)(g,e,H,r);return{binaryPath:g,downloaded:w,spawnPromise:M}}async function Q(e){const{cacheTtl:t=require("#constants/time").DLX_BINARY_CACHE_TTL,checksum:r,force:i=!1,name:n,url:a}={__proto__:null,...e},c=x(),u=n||`binary-${process.platform}-${D.default.arch()}`,o=`${a}:${u}`,p=(0,O.generateCacheKey)(o),l=f.default.join(c,p),E=(0,R.normalizePath)(f.default.join(l,u));let h=!1;if(!i&&(0,s.existsSync)(l)&&await F(l,t))h=!1;else{try{await s.promises.mkdir(l,{recursive:!0})}catch(w){const b=w.code;throw b==="EACCES"||b==="EPERM"?new Error(`Permission denied creating binary cache directory: ${l}
7
- Please check directory permissions or run with appropriate access.`,{cause:w}):b==="EROFS"?new Error(`Cannot create binary cache directory on read-only filesystem: ${l}
8
- Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.`,{cause:w}):new Error(`Failed to create binary cache directory: ${l}`,{cause:w})}const m=await L(a,E,r),g=await s.promises.stat(E);await z(l,p,a,m||"",g.size),h=!0}return{binaryPath:E,downloaded:h}}function Z(e,t,r,i){const n=_.WIN32&&/\.(?:bat|cmd|ps1)$/i.test(e),a=f.default.dirname(e),c=n?{...r,env:{...r?.env,PATH:`${a}${f.default.delimiter}${process.env.PATH||""}`},shell:!0}:r;return(0,A.spawn)(e,t,c,i)}function x(){return(0,I.getSocketDlxDir)()}async function tt(){const e=x();if(!(0,s.existsSync)(e))return[];const t=[],r=Date.now(),i=await s.promises.readdir(e);for(const n of i){const a=f.default.join(e,n);try{if(!await(0,d.isDir)(a))continue;const c=k(a),u=await(0,d.readJson)(c,{throws:!1});if(!u||typeof u!="object"||Array.isArray(u))continue;const o=u,l=o.source?.url||o.url||"",h=(await s.promises.readdir(a)).find(m=>!m.startsWith("."));if(h){const m=f.default.join(a,h),g=await s.promises.stat(m);t.push({age:r-(o.timestamp||0),arch:o.arch||"unknown",checksum:o.checksum||"",name:h,platform:o.platform||"unknown",size:g.size,url:l})}}catch{}}return t}0&&(module.exports={cleanDlxCache,dlxBinary,downloadBinary,executeBinary,getDlxCachePath,listDlxCache});
2
+ var Y=Object.create;var P=Object.defineProperty;var V=Object.getOwnPropertyDescriptor;var W=Object.getOwnPropertyNames;var U=Object.getPrototypeOf,G=Object.prototype.hasOwnProperty;var Q=(t,e)=>{for(var n in e)P(t,n,{get:e[n],enumerable:!0})},F=(t,e,n,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of W(e))!G.call(t,r)&&r!==n&&P(t,r,{get:()=>e[r],enumerable:!(s=V(e,r))||s.enumerable});return t};var I=(t,e,n)=>(n=t!=null?Y(U(t)):{},F(e||!t||!t.__esModule?P(n,"default",{value:t,enumerable:!0}):n,t)),Z=t=>F(P({},"__esModule",{value:!0}),t);var ae={};Q(ae,{cleanDlxCache:()=>ee,dlxBinary:()=>te,downloadBinary:()=>ne,executeBinary:()=>re,getDlxCachePath:()=>_,listDlxCache:()=>se});module.exports=Z(ae);var T=require("crypto"),k=I(require("os")),p=I(require("path")),$=require("#constants/platform"),j=require("./dlx"),N=require("./http-request"),f=require("./fs"),L=require("./objects"),B=require("./path"),z=require("./paths"),H=require("./process-lock"),v=require("./spawn");let A;function b(){return A===void 0&&(A=require("node:fs")),A}function S(t){return p.default.join(t,".dlx-metadata.json")}async function M(t,e){const n=b();try{const s=S(t);if(!n.existsSync(s))return!1;const r=await(0,f.readJson)(s,{throws:!1});if(!(0,L.isObjectObject)(r))return!1;const a=Date.now(),o=r.timestamp;return typeof o!="number"||o<=0?!1:a-o<e}catch{return!1}}async function q(t,e,n){const s=p.default.dirname(e),r=p.default.join(s,"concurrency.lock");return await H.processLock.withLock(r,async()=>{const a=b();if(a.existsSync(e)&&(await a.promises.stat(e)).size>0){const u=await a.promises.readFile(e),d=(0,T.createHash)("sha256");return d.update(u),d.digest("hex")}try{await(0,N.httpDownload)(t,e)}catch(i){throw new Error(`Failed to download binary from ${t}
3
+ Destination: ${e}
4
+ Check your internet connection or verify the URL is accessible.`,{cause:i})}const o=await a.promises.readFile(e),c=(0,T.createHash)("sha256");c.update(o);const m=c.digest("hex");if(n&&m!==n)throw await(0,f.safeDelete)(e),new Error(`Checksum mismatch: expected ${n}, got ${m}`);return $.WIN32||await a.promises.chmod(e,493),m},{staleMs:5e3,touchIntervalMs:2e3})}async function K(t,e,n,s,r){const a=S(t),o={version:"1.0.0",cache_key:e,timestamp:Date.now(),checksum:s,checksum_algorithm:"sha256",platform:k.default.platform(),arch:k.default.arch(),size:r,source:{type:"download",url:n}};await b().promises.writeFile(a,JSON.stringify(o,null,2))}async function ee(t=require("#constants/time").DLX_BINARY_CACHE_TTL){const e=_(),n=b();if(!n.existsSync(e))return 0;let s=0;const r=Date.now(),a=await n.promises.readdir(e);for(const o of a){const c=p.default.join(e,o),m=S(c);try{if(!await(0,f.isDir)(c))continue;const i=await(0,f.readJson)(m,{throws:!1});if(!i||typeof i!="object"||Array.isArray(i))continue;const u=i.timestamp;(typeof u=="number"&&u>0?r-u:Number.POSITIVE_INFINITY)>t&&(await(0,f.safeDelete)(c,{force:!0,recursive:!0}),s+=1)}catch{try{(await n.promises.readdir(c)).length||(await(0,f.safeDelete)(c),s+=1)}catch{}}}return s}async function te(t,e,n){const{cacheTtl:s=require("#constants/time").DLX_BINARY_CACHE_TTL,checksum:r,force:a=!1,name:o,spawnOptions:c,url:m,yes:i}={__proto__:null,...e},u=i===!0?!0:a,d=_(),x=o||`binary-${process.platform}-${k.default.arch()}`,w=`${m}:${x}`,g=(0,j.generateCacheKey)(w),l=p.default.join(d,g),h=(0,B.normalizePath)(p.default.join(l,x)),E=b();let D=!1,C=r;if(!u&&E.existsSync(l)&&await M(l,s))try{const O=S(l),y=await(0,f.readJson)(O,{throws:!1});y&&typeof y=="object"&&!Array.isArray(y)&&typeof y.checksum=="string"?C=y.checksum:D=!0}catch{D=!0}else D=!0;if(D){try{await(0,f.safeMkdir)(l,{recursive:!0})}catch(y){const R=y.code;throw R==="EACCES"||R==="EPERM"?new Error(`Permission denied creating binary cache directory: ${l}
5
+ Please check directory permissions or run with appropriate access.`,{cause:y}):R==="EROFS"?new Error(`Cannot create binary cache directory on read-only filesystem: ${l}
6
+ Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.`,{cause:y}):new Error(`Failed to create binary cache directory: ${l}`,{cause:y})}C=await q(m,h,r);const O=await E.promises.stat(h);await K(l,g,m,C||"",O.size)}const X=$.WIN32&&/\.(?:bat|cmd|ps1)$/i.test(h)?{...c,env:{...c?.env,PATH:`${l}${p.default.delimiter}${process.env.PATH||""}`},shell:!0}:c,J=(0,v.spawn)(h,t,X,n);return{binaryPath:h,downloaded:D,spawnPromise:J}}async function ne(t){const{cacheTtl:e=require("#constants/time").DLX_BINARY_CACHE_TTL,checksum:n,force:s=!1,name:r,url:a}={__proto__:null,...t},o=_(),c=r||`binary-${process.platform}-${k.default.arch()}`,m=`${a}:${c}`,i=(0,j.generateCacheKey)(m),u=p.default.join(o,i),d=(0,B.normalizePath)(p.default.join(u,c)),x=b();let w=!1;if(!s&&x.existsSync(u)&&await M(u,e))w=!1;else{try{await(0,f.safeMkdir)(u,{recursive:!0})}catch(h){const E=h.code;throw E==="EACCES"||E==="EPERM"?new Error(`Permission denied creating binary cache directory: ${u}
7
+ Please check directory permissions or run with appropriate access.`,{cause:h}):E==="EROFS"?new Error(`Cannot create binary cache directory on read-only filesystem: ${u}
8
+ Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.`,{cause:h}):new Error(`Failed to create binary cache directory: ${u}`,{cause:h})}const g=await q(a,d,n),l=await x.promises.stat(d);await K(u,i,a,g||"",l.size),w=!0}return{binaryPath:d,downloaded:w}}function re(t,e,n,s){const r=$.WIN32&&/\.(?:bat|cmd|ps1)$/i.test(t),a=p.default.dirname(t),o=r?{...n,env:{...n?.env,PATH:`${a}${p.default.delimiter}${process.env.PATH||""}`},shell:!0}:n;return(0,v.spawn)(t,e,o,s)}function _(){return(0,z.getSocketDlxDir)()}async function se(){const t=_(),e=b();if(!e.existsSync(t))return[];const n=[],s=Date.now(),r=await e.promises.readdir(t);for(const a of r){const o=p.default.join(t,a);try{if(!await(0,f.isDir)(o))continue;const c=S(o),m=await(0,f.readJson)(c,{throws:!1});if(!m||typeof m!="object"||Array.isArray(m))continue;const i=m,d=i.source?.url||i.url||"",w=(await e.promises.readdir(o)).find(g=>!g.startsWith("."));if(w){const g=p.default.join(o,w),l=await e.promises.stat(g);n.push({age:s-(i.timestamp||0),arch:i.arch||"unknown",checksum:i.checksum||"",name:w,platform:i.platform||"unknown",size:l.size,url:d})}}catch{}}return n}0&&(module.exports={cleanDlxCache,dlxBinary,downloadBinary,executeBinary,getDlxCachePath,listDlxCache});
9
9
  //# sourceMappingURL=dlx-binary.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/dlx-binary.ts"],
4
- "sourcesContent": ["/** @fileoverview DLX binary execution utilities for Socket ecosystem. */\n\nimport { createHash } from 'node:crypto'\nimport { existsSync, promises as fs } from 'node:fs'\nimport os from 'node:os'\nimport path from 'node:path'\n\nimport { WIN32 } from '#constants/platform'\n\nimport { generateCacheKey } from './dlx'\nimport { httpDownload } from './http-request'\nimport { isDir, readJson, safeDelete } from './fs'\nimport { isObjectObject } from './objects'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nexport interface DlxBinaryOptions {\n /** URL to download the binary from. */\n url: string\n /** Optional name for the cached binary (defaults to URL hash). */\n name?: string | undefined\n /** Expected checksum (sha256) for verification. */\n checksum?: string | undefined\n /** Cache TTL in milliseconds (default: 7 days). */\n cacheTtl?: number | undefined\n /** Force re-download even if cached. */\n force?: boolean | undefined\n /** Additional spawn options. */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxBinaryResult {\n /** Path to the cached binary. */\n binaryPath: string\n /** Whether the binary was newly downloaded. */\n downloaded: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Metadata structure for cached binaries (.dlx-metadata.json).\n * Unified schema shared across TypeScript (dlxBinary) and C++ (socket_macho_decompress).\n *\n * Core Fields (present in all implementations):\n * - version: Schema version (currently \"1.0.0\")\n * - cache_key: First 16 chars of SHA-512 hash (matches directory name)\n * - timestamp: Unix timestamp in milliseconds\n * - checksum: Full hash of cached binary (SHA-512 for C++, SHA-256 for TypeScript)\n * - checksum_algorithm: \"sha512\" or \"sha256\"\n * - platform: \"darwin\" | \"linux\" | \"win32\"\n * - arch: \"x64\" | \"arm64\"\n * - size: Size of cached binary in bytes\n * - source: Origin information\n * - type: \"download\" (from URL) or \"decompression\" (from embedded binary)\n * - url: Download URL (if type is \"download\")\n * - path: Source binary path (if type is \"decompression\")\n *\n * Extra Fields (implementation-specific):\n * - For C++ decompression:\n * - compressed_size: Size of compressed data in bytes\n * - compression_algorithm: Brotli level (numeric)\n * - compression_ratio: original_size / compressed_size\n *\n * Example (TypeScript download):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"a1b2c3d4e5f67890\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha256-abc123...\",\n * \"checksum_algorithm\": \"sha256\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 15000000,\n * \"source\": {\n * \"type\": \"download\",\n * \"url\": \"https://example.com/binary\"\n * }\n * }\n * ```\n *\n * Example (C++ decompression):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"0123456789abcdef\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha512-def456...\",\n * \"checksum_algorithm\": \"sha512\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 13000000,\n * \"source\": {\n * \"type\": \"decompression\",\n * \"path\": \"/usr/local/bin/socket\"\n * },\n * \"extra\": {\n * \"compressed_size\": 1700000,\n * \"compression_algorithm\": 3,\n * \"compression_ratio\": 7.647\n * }\n * }\n * ```\n *\n * @internal This interface documents the metadata file format.\n */\nexport interface DlxMetadata {\n version: string\n cache_key: string\n timestamp: number\n checksum: string\n checksum_algorithm: string\n platform: string\n arch: string\n size: number\n source?: {\n type: 'download' | 'decompression'\n url?: string\n path?: string\n }\n extra?: Record<string, unknown>\n}\n\n/**\n * Get metadata file path for a cached binary.\n */\nfunction getMetadataPath(cacheEntryPath: string): string {\n return path.join(cacheEntryPath, '.dlx-metadata.json')\n}\n\n/**\n * Check if a cached binary is still valid.\n */\nasync function isCacheValid(\n cacheEntryPath: string,\n cacheTtl: number,\n): Promise<boolean> {\n try {\n const metaPath = getMetadataPath(cacheEntryPath)\n if (!existsSync(metaPath)) {\n return false\n }\n\n const metadata = await readJson(metaPath, { throws: false })\n if (!isObjectObject(metadata)) {\n return false\n }\n const now = Date.now()\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, cache is invalid\n if (typeof timestamp !== 'number' || timestamp <= 0) {\n return false\n }\n const age = now - timestamp\n\n return age < cacheTtl\n } catch {\n return false\n }\n}\n\n/**\n * Download a file from a URL with integrity checking and concurrent download protection.\n * Uses processLock to prevent multiple processes from downloading the same binary simultaneously.\n * Internal helper function for downloading binary files.\n */\nasync function downloadBinaryFile(\n url: string,\n destPath: string,\n checksum?: string | undefined,\n): Promise<string> {\n // Use process lock to prevent concurrent downloads.\n // Lock is placed in the cache entry directory as 'concurrency.lock'.\n const cacheEntryDir = path.dirname(destPath)\n const lockPath = path.join(cacheEntryDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n // Check if file was downloaded while waiting for lock.\n if (existsSync(destPath)) {\n const stats = await fs.stat(destPath)\n if (stats.size > 0) {\n // File exists, compute and return checksum.\n const fileBuffer = await fs.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n return hasher.digest('hex')\n }\n }\n\n // Download the file.\n try {\n await httpDownload(url, destPath)\n } catch (e) {\n throw new Error(\n `Failed to download binary from ${url}\\n` +\n `Destination: ${destPath}\\n` +\n 'Check your internet connection or verify the URL is accessible.',\n { cause: e },\n )\n }\n\n // Compute checksum of downloaded file.\n const fileBuffer = await fs.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n const actualChecksum = hasher.digest('hex')\n\n // Verify checksum if provided.\n if (checksum && actualChecksum !== checksum) {\n // Clean up invalid file.\n await safeDelete(destPath)\n throw new Error(\n `Checksum mismatch: expected ${checksum}, got ${actualChecksum}`,\n )\n }\n\n // Make executable on POSIX systems.\n if (!WIN32) {\n await fs.chmod(destPath, 0o755)\n }\n\n return actualChecksum\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Write metadata for a cached binary.\n * Uses unified schema shared with C++ decompressor and CLI dlxBinary.\n * Schema documentation: See DlxMetadata interface in this file (exported).\n * Core fields: version, cache_key, timestamp, checksum, checksum_algorithm, platform, arch, size, source\n * Note: This implementation uses SHA-256 checksums instead of SHA-512.\n */\nasync function writeMetadata(\n cacheEntryPath: string,\n cacheKey: string,\n url: string,\n checksum: string,\n size: number,\n): Promise<void> {\n const metaPath = getMetadataPath(cacheEntryPath)\n const metadata = {\n version: '1.0.0',\n cache_key: cacheKey,\n timestamp: Date.now(),\n checksum,\n checksum_algorithm: 'sha256',\n platform: os.platform(),\n arch: os.arch(),\n size,\n source: {\n type: 'download',\n url,\n },\n }\n await fs.writeFile(metaPath, JSON.stringify(metadata, null, 2))\n}\n\n/**\n * Clean expired entries from the DLX cache.\n */\nexport async function cleanDlxCache(\n maxAge: number = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n): Promise<number> {\n const cacheDir = getDlxCachePath()\n\n if (!existsSync(cacheDir)) {\n return 0\n }\n\n let cleaned = 0\n const now = Date.now()\n const entries = await fs.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n const metaPath = getMetadataPath(entryPath)\n\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, treat as expired (age = infinity)\n const age =\n typeof timestamp === 'number' && timestamp > 0\n ? now - timestamp\n : Number.POSITIVE_INFINITY\n\n if (age > maxAge) {\n // Remove entire cache entry directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath, { force: true, recursive: true })\n cleaned += 1\n }\n } catch {\n // If we can't read metadata, check if directory is empty or corrupted.\n try {\n // eslint-disable-next-line no-await-in-loop\n const contents = await fs.readdir(entryPath)\n if (!contents.length) {\n // Remove empty directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath)\n cleaned += 1\n }\n } catch {}\n }\n }\n\n return cleaned\n}\n\n/**\n * Download and execute a binary from a URL with caching.\n */\nexport async function dlxBinary(\n args: readonly string[] | string[],\n options?: DlxBinaryOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxBinaryResult> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n spawnOptions,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n\n let downloaded = false\n let computedChecksum = checksum\n\n // Check if we need to download.\n if (\n !force &&\n existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid, read the checksum from metadata.\n try {\n const metaPath = getMetadataPath(cacheEntryDir)\n const metadata = await readJson(metaPath, { throws: false })\n if (\n metadata &&\n typeof metadata === 'object' &&\n !Array.isArray(metadata) &&\n typeof (metadata as Record<string, unknown>)['checksum'] === 'string'\n ) {\n computedChecksum = (metadata as Record<string, unknown>)[\n 'checksum'\n ] as string\n } else {\n // If metadata is invalid, re-download.\n downloaded = true\n }\n } catch {\n // If we can't read metadata, re-download.\n downloaded = true\n }\n } else {\n downloaded = true\n }\n\n if (downloaded) {\n // Ensure cache directory exists before downloading.\n try {\n await fs.mkdir(cacheEntryDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n }\n\n // Execute the binary.\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension (e.g.,\n // C:\\cache\\test.cmd becomes just \"test\"). Windows cmd.exe then searches for \"test\"\n // in directories listed in PATH, trying each extension from PATHEXT environment\n // variable (.COM, .EXE, .BAT, .CMD, etc.) until it finds a match.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH\n // (unlike system package managers like npm/pnpm/yarn which are already in PATH),\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n //\n // This approach is consistent with how other tools handle Windows command execution:\n // - npm's promise-spawn: uses which.sync() to find commands in PATH\n // - cross-spawn: spawns cmd.exe with escaped arguments\n // - Node.js spawn with shell: true: delegates to cmd.exe which uses PATH\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n const spawnPromise = spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n\n return {\n binaryPath,\n downloaded,\n spawnPromise,\n }\n}\n\n/**\n * Download a binary from a URL with caching (without execution).\n * Similar to downloadPackage from dlx-package.\n *\n * @returns Object containing the path to the cached binary and whether it was downloaded\n */\nexport async function downloadBinary(\n options: Omit<DlxBinaryOptions, 'spawnOptions'>,\n): Promise<{ binaryPath: string; downloaded: boolean }> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n\n let downloaded = false\n\n // Check if we need to download.\n if (\n !force &&\n existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid.\n downloaded = false\n } else {\n // Ensure cache directory exists before downloading.\n try {\n await fs.mkdir(cacheEntryDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n const computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n downloaded = true\n }\n\n return {\n binaryPath,\n downloaded,\n }\n}\n\n/**\n * Execute a cached binary without re-downloading.\n * Similar to executePackage from dlx-package.\n * Binary must have been previously downloaded via downloadBinary or dlxBinary.\n *\n * @param binaryPath Path to the cached binary (from downloadBinary result)\n * @param args Arguments to pass to the binary\n * @param spawnOptions Spawn options for execution\n * @param spawnExtra Extra spawn configuration\n * @returns The spawn promise for the running process\n */\nexport function executeBinary(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension. Windows cmd.exe\n // then searches for the binary in directories listed in PATH.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH,\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n const cacheEntryDir = path.dirname(binaryPath)\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n}\n\n/**\n * Get the DLX binary cache directory path.\n * Returns normalized path for cross-platform compatibility.\n * Uses same directory as dlx-package for unified DLX storage.\n */\nexport function getDlxCachePath(): string {\n return getSocketDlxDir()\n}\n\n/**\n * Get information about cached binaries.\n */\nexport async function listDlxCache(): Promise<\n Array<{\n age: number\n arch: string\n checksum: string\n name: string\n platform: string\n size: number\n url: string\n }>\n> {\n const cacheDir = getDlxCachePath()\n\n if (!existsSync(cacheDir)) {\n return []\n }\n\n const results = []\n const now = Date.now()\n const entries = await fs.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n const metaPath = getMetadataPath(entryPath)\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n\n const metaObj = metadata as Record<string, unknown>\n\n // Get URL from unified schema (source.url) or legacy schema (url).\n // Allow empty URL for backward compatibility with partial metadata.\n const source = metaObj['source'] as Record<string, unknown> | undefined\n const url =\n (source?.['url'] as string) || (metaObj['url'] as string) || ''\n\n // Find the binary file in the directory.\n // eslint-disable-next-line no-await-in-loop\n const files = await fs.readdir(entryPath)\n const binaryFile = files.find(f => !f.startsWith('.'))\n\n if (binaryFile) {\n const binaryPath = path.join(entryPath, binaryFile)\n // eslint-disable-next-line no-await-in-loop\n const binaryStats = await fs.stat(binaryPath)\n\n results.push({\n age: now - ((metaObj['timestamp'] as number) || 0),\n arch: (metaObj['arch'] as string) || 'unknown',\n checksum: (metaObj['checksum'] as string) || '',\n name: binaryFile,\n platform: (metaObj['platform'] as string) || 'unknown',\n size: binaryStats.size,\n url,\n })\n }\n } catch {}\n }\n\n return results\n}\n"],
5
- "mappings": ";6iBAAA,IAAAA,GAAA,GAAAC,EAAAD,GAAA,mBAAAE,EAAA,cAAAC,EAAA,mBAAAC,EAAA,kBAAAC,EAAA,oBAAAC,EAAA,iBAAAC,KAAA,eAAAC,EAAAR,IAEA,IAAAS,EAA2B,uBAC3BC,EAA2C,mBAC3CC,EAAe,sBACfC,EAAiB,wBAEjBC,EAAsB,+BAEtBC,EAAiC,iBACjCC,EAA6B,0BAC7BC,EAA4C,gBAC5CC,EAA+B,qBAC/BC,EAA8B,kBAC9BC,EAAgC,mBAChCC,EAA4B,0BAE5BC,EAAsB,mBAiHtB,SAASC,EAAgBC,EAAgC,CACvD,OAAO,EAAAC,QAAK,KAAKD,EAAgB,oBAAoB,CACvD,CAKA,eAAeE,EACbF,EACAG,EACkB,CAClB,GAAI,CACF,MAAMC,EAAWL,EAAgBC,CAAc,EAC/C,GAAI,IAAC,cAAWI,CAAQ,EACtB,MAAO,GAGT,MAAMC,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GAAI,IAAC,kBAAeC,CAAQ,EAC1B,MAAO,GAET,MAAMC,EAAM,KAAK,IAAI,EACfC,EAAaF,EAAqC,UAExD,OAAI,OAAOE,GAAc,UAAYA,GAAa,EACzC,GAEGD,EAAMC,EAELJ,CACf,MAAQ,CACN,MAAO,EACT,CACF,CAOA,eAAeK,EACbC,EACAC,EACAC,EACiB,CAGjB,MAAMC,EAAgB,EAAAX,QAAK,QAAQS,CAAQ,EACrCG,EAAW,EAAAZ,QAAK,KAAKW,EAAe,kBAAkB,EAE5D,OAAO,MAAM,cAAY,SACvBC,EACA,SAAY,CAEV,MAAI,cAAWH,CAAQ,IACP,MAAM,EAAAI,SAAG,KAAKJ,CAAQ,GAC1B,KAAO,EAAG,CAElB,MAAMK,EAAa,MAAM,EAAAD,SAAG,SAASJ,CAAQ,EACvCM,KAAS,cAAW,QAAQ,EAClC,OAAAA,EAAO,OAAOD,CAAU,EACjBC,EAAO,OAAO,KAAK,CAC5B,CAIF,GAAI,CACF,QAAM,gBAAaP,EAAKC,CAAQ,CAClC,OAASO,EAAG,CACV,MAAM,IAAI,MACR,kCAAkCR,CAAG;AAAA,eACnBC,CAAQ;AAAA,iEAE1B,CAAE,MAAOO,CAAE,CACb,CACF,CAGA,MAAMF,EAAa,MAAM,EAAAD,SAAG,SAASJ,CAAQ,EACvCM,KAAS,cAAW,QAAQ,EAClCA,EAAO,OAAOD,CAAU,EACxB,MAAMG,EAAiBF,EAAO,OAAO,KAAK,EAG1C,GAAIL,GAAYO,IAAmBP,EAEjC,cAAM,cAAWD,CAAQ,EACnB,IAAI,MACR,+BAA+BC,CAAQ,SAASO,CAAc,EAChE,EAIF,OAAK,SACH,MAAM,EAAAJ,SAAG,MAAMJ,EAAU,GAAK,EAGzBQ,CACT,EACA,CAEE,QAAS,IACT,gBAAiB,GACnB,CACF,CACF,CASA,eAAeC,EACbnB,EACAoB,EACAX,EACAE,EACAU,EACe,CACf,MAAMjB,EAAWL,EAAgBC,CAAc,EACzCK,EAAW,CACf,QAAS,QACT,UAAWe,EACX,UAAW,KAAK,IAAI,EACpB,SAAAT,EACA,mBAAoB,SACpB,SAAU,EAAAW,QAAG,SAAS,EACtB,KAAM,EAAAA,QAAG,KAAK,EACd,KAAAD,EACA,OAAQ,CACN,KAAM,WACN,IAAAZ,CACF,CACF,EACA,MAAM,EAAAK,SAAG,UAAUV,EAAU,KAAK,UAAUC,EAAU,KAAM,CAAC,CAAC,CAChE,CAKA,eAAsB1B,EACpB4C,EAAiC,QAAQ,iBAAiB,EAAE,qBAC3C,CACjB,MAAMC,EAAWzC,EAAgB,EAEjC,GAAI,IAAC,cAAWyC,CAAQ,EACtB,MAAO,GAGT,IAAIC,EAAU,EACd,MAAMnB,EAAM,KAAK,IAAI,EACfoB,EAAU,MAAM,EAAAZ,SAAG,QAAQU,CAAQ,EAEzC,UAAWG,KAASD,EAAS,CAC3B,MAAME,EAAY,EAAA3B,QAAK,KAAKuB,EAAUG,CAAK,EACrCvB,EAAWL,EAAgB6B,CAAS,EAE1C,GAAI,CAEF,GAAI,CAAE,QAAM,SAAMA,CAAS,EACzB,SAIF,MAAMvB,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GACE,CAACC,GACD,OAAOA,GAAa,UACpB,MAAM,QAAQA,CAAQ,EAEtB,SAEF,MAAME,EAAaF,EAAqC,WAGtD,OAAOE,GAAc,UAAYA,EAAY,EACzCD,EAAMC,EACN,OAAO,mBAEHgB,IAGR,QAAM,cAAWK,EAAW,CAAE,MAAO,GAAM,UAAW,EAAK,CAAC,EAC5DH,GAAW,EAEf,MAAQ,CAEN,GAAI,EAEe,MAAM,EAAAX,SAAG,QAAQc,CAAS,GAC7B,SAGZ,QAAM,cAAWA,CAAS,EAC1BH,GAAW,EAEf,MAAQ,CAAC,CACX,CACF,CAEA,OAAOA,CACT,CAKA,eAAsB7C,EACpBiD,EACAC,EACAC,EAC0B,CAC1B,KAAM,CACJ,SAAA5B,EAA2B,QAAQ,iBAAiB,EAAE,qBACtD,SAAAQ,EACA,MAAAqB,EAAQ,GACR,KAAAC,EACA,aAAAC,EACA,IAAAzB,CACF,EAAI,CAAE,UAAW,KAAM,GAAGqB,CAAQ,EAG5BN,EAAWzC,EAAgB,EAC3BoD,EAAaF,GAAQ,UAAU,QAAQ,QAAQ,IAAI,EAAAX,QAAG,KAAK,CAAC,GAE5Dc,EAAO,GAAG3B,CAAG,IAAI0B,CAAU,GAC3Bf,KAAW,oBAAiBgB,CAAI,EAChCxB,EAAgB,EAAAX,QAAK,KAAKuB,EAAUJ,CAAQ,EAC5CiB,KAAa,iBAAc,EAAApC,QAAK,KAAKW,EAAeuB,CAAU,CAAC,EAErE,IAAIG,EAAa,GACbC,EAAmB5B,EAGvB,GACE,CAACqB,MACD,cAAWpB,CAAa,GACvB,MAAMV,EAAaU,EAAeT,CAAQ,EAG3C,GAAI,CACF,MAAMC,EAAWL,EAAgBa,CAAa,EACxCP,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAEzDC,GACA,OAAOA,GAAa,UACpB,CAAC,MAAM,QAAQA,CAAQ,GACvB,OAAQA,EAAqC,UAAgB,SAE7DkC,EAAoBlC,EAClB,SAIFiC,EAAa,EAEjB,MAAQ,CAENA,EAAa,EACf,MAEAA,EAAa,GAGf,GAAIA,EAAY,CAEd,GAAI,CACF,MAAM,EAAAxB,SAAG,MAAMF,EAAe,CAAE,UAAW,EAAK,CAAC,CACnD,OAASK,EAAG,CACV,MAAMuB,EAAQvB,EAA4B,KAC1C,MAAIuB,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,sDAAsD5B,CAAa;AAAA,oEAEnE,CAAE,MAAOK,CAAE,CACb,EAEEuB,IAAS,QACL,IAAI,MACR,iEAAiE5B,CAAa;AAAA,iFAE9E,CAAE,MAAOK,CAAE,CACb,EAEI,IAAI,MACR,4CAA4CL,CAAa,GACzD,CAAE,MAAOK,CAAE,CACb,CACF,CAGAsB,EAAmB,MAAM/B,EAAmBC,EAAK4B,EAAY1B,CAAQ,EAGrE,MAAM8B,EAAQ,MAAM,EAAA3B,SAAG,KAAKuB,CAAU,EACtC,MAAMlB,EACJP,EACAQ,EACAX,EACA8B,GAAoB,GACpBE,EAAM,IACR,CACF,CAsBA,MAAMC,EAhBa,SAAS,sBAAsB,KAAKL,CAAU,EAiB7D,CACE,GAAGH,EACH,IAAK,CACH,GAAGA,GAAc,IACjB,KAAM,GAAGtB,CAAa,GAAG,EAAAX,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAW,EAAE,EACrE,EACA,MAAO,EACT,EACAiC,EACES,KAAe,SAAMN,EAAYR,EAAMa,EAAmBX,CAAU,EAE1E,MAAO,CACL,WAAAM,EACA,WAAAC,EACA,aAAAK,CACF,CACF,CAQA,eAAsB9D,EACpBiD,EACsD,CACtD,KAAM,CACJ,SAAA3B,EAA2B,QAAQ,iBAAiB,EAAE,qBACtD,SAAAQ,EACA,MAAAqB,EAAQ,GACR,KAAAC,EACA,IAAAxB,CACF,EAAI,CAAE,UAAW,KAAM,GAAGqB,CAAQ,EAG5BN,EAAWzC,EAAgB,EAC3BoD,EAAaF,GAAQ,UAAU,QAAQ,QAAQ,IAAI,EAAAX,QAAG,KAAK,CAAC,GAE5Dc,EAAO,GAAG3B,CAAG,IAAI0B,CAAU,GAC3Bf,KAAW,oBAAiBgB,CAAI,EAChCxB,EAAgB,EAAAX,QAAK,KAAKuB,EAAUJ,CAAQ,EAC5CiB,KAAa,iBAAc,EAAApC,QAAK,KAAKW,EAAeuB,CAAU,CAAC,EAErE,IAAIG,EAAa,GAGjB,GACE,CAACN,MACD,cAAWpB,CAAa,GACvB,MAAMV,EAAaU,EAAeT,CAAQ,EAG3CmC,EAAa,OACR,CAEL,GAAI,CACF,MAAM,EAAAxB,SAAG,MAAMF,EAAe,CAAE,UAAW,EAAK,CAAC,CACnD,OAASK,EAAG,CACV,MAAMuB,EAAQvB,EAA4B,KAC1C,MAAIuB,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,sDAAsD5B,CAAa;AAAA,oEAEnE,CAAE,MAAOK,CAAE,CACb,EAEEuB,IAAS,QACL,IAAI,MACR,iEAAiE5B,CAAa;AAAA,iFAE9E,CAAE,MAAOK,CAAE,CACb,EAEI,IAAI,MACR,4CAA4CL,CAAa,GACzD,CAAE,MAAOK,CAAE,CACb,CACF,CAGA,MAAMsB,EAAmB,MAAM/B,EAAmBC,EAAK4B,EAAY1B,CAAQ,EAGrE8B,EAAQ,MAAM,EAAA3B,SAAG,KAAKuB,CAAU,EACtC,MAAMlB,EACJP,EACAQ,EACAX,EACA8B,GAAoB,GACpBE,EAAM,IACR,EACAH,EAAa,EACf,CAEA,MAAO,CACL,WAAAD,EACA,WAAAC,CACF,CACF,CAaO,SAASxD,EACduD,EACAR,EACAK,EACAH,EAC0B,CAI1B,MAAMa,EAAa,SAAS,sBAAsB,KAAKP,CAAU,EAS3DzB,EAAgB,EAAAX,QAAK,QAAQoC,CAAU,EACvCK,EAAoBE,EACtB,CACE,GAAGV,EACH,IAAK,CACH,GAAGA,GAAc,IACjB,KAAM,GAAGtB,CAAa,GAAG,EAAAX,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAW,EAAE,EACrE,EACA,MAAO,EACT,EACAiC,EAEJ,SAAO,SAAMG,EAAYR,EAAMa,EAAmBX,CAAU,CAC9D,CAOO,SAAShD,GAA0B,CACxC,SAAO,mBAAgB,CACzB,CAKA,eAAsBC,IAUpB,CACA,MAAMwC,EAAWzC,EAAgB,EAEjC,GAAI,IAAC,cAAWyC,CAAQ,EACtB,MAAO,CAAC,EAGV,MAAMqB,EAAU,CAAC,EACXvC,EAAM,KAAK,IAAI,EACfoB,EAAU,MAAM,EAAAZ,SAAG,QAAQU,CAAQ,EAEzC,UAAWG,KAASD,EAAS,CAC3B,MAAME,EAAY,EAAA3B,QAAK,KAAKuB,EAAUG,CAAK,EAC3C,GAAI,CAEF,GAAI,CAAE,QAAM,SAAMC,CAAS,EACzB,SAGF,MAAMxB,EAAWL,EAAgB6B,CAAS,EAEpCvB,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GACE,CAACC,GACD,OAAOA,GAAa,UACpB,MAAM,QAAQA,CAAQ,EAEtB,SAGF,MAAMyC,EAAUzC,EAKVI,EADSqC,EAAQ,QAEX,KAAsBA,EAAQ,KAAqB,GAKzDC,GADQ,MAAM,EAAAjC,SAAG,QAAQc,CAAS,GACf,KAAKoB,GAAK,CAACA,EAAE,WAAW,GAAG,CAAC,EAErD,GAAID,EAAY,CACd,MAAMV,EAAa,EAAApC,QAAK,KAAK2B,EAAWmB,CAAU,EAE5CE,EAAc,MAAM,EAAAnC,SAAG,KAAKuB,CAAU,EAE5CQ,EAAQ,KAAK,CACX,IAAKvC,GAAQwC,EAAQ,WAA2B,GAChD,KAAOA,EAAQ,MAAsB,UACrC,SAAWA,EAAQ,UAA0B,GAC7C,KAAMC,EACN,SAAWD,EAAQ,UAA0B,UAC7C,KAAMG,EAAY,KAClB,IAAAxC,CACF,CAAC,CACH,CACF,MAAQ,CAAC,CACX,CAEA,OAAOoC,CACT",
6
- "names": ["dlx_binary_exports", "__export", "cleanDlxCache", "dlxBinary", "downloadBinary", "executeBinary", "getDlxCachePath", "listDlxCache", "__toCommonJS", "import_node_crypto", "import_node_fs", "import_node_os", "import_node_path", "import_platform", "import_dlx", "import_http_request", "import_fs", "import_objects", "import_path", "import_paths", "import_process_lock", "import_spawn", "getMetadataPath", "cacheEntryPath", "path", "isCacheValid", "cacheTtl", "metaPath", "metadata", "now", "timestamp", "downloadBinaryFile", "url", "destPath", "checksum", "cacheEntryDir", "lockPath", "fs", "fileBuffer", "hasher", "e", "actualChecksum", "writeMetadata", "cacheKey", "size", "os", "maxAge", "cacheDir", "cleaned", "entries", "entry", "entryPath", "args", "options", "spawnExtra", "force", "name", "spawnOptions", "binaryName", "spec", "binaryPath", "downloaded", "computedChecksum", "code", "stats", "finalSpawnOptions", "spawnPromise", "needsShell", "results", "metaObj", "binaryFile", "f", "binaryStats"]
4
+ "sourcesContent": ["/** @fileoverview DLX binary execution utilities for Socket ecosystem. */\n\nimport { createHash } from 'crypto'\n\nimport os from 'os'\n\nimport path from 'path'\n\nimport { WIN32 } from '#constants/platform'\n\nimport { generateCacheKey } from './dlx'\nimport { httpDownload } from './http-request'\nimport { isDir, readJson, safeDelete, safeMkdir } from './fs'\nimport { isObjectObject } from './objects'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nexport interface DlxBinaryOptions {\n /**\n * URL to download the binary from.\n */\n url: string\n\n /**\n * Optional name for the cached binary (defaults to URL hash).\n */\n name?: string | undefined\n\n /**\n * Expected checksum (sha256) for verification.\n */\n checksum?: string | undefined\n\n /**\n * Cache TTL in milliseconds (default: 7 days).\n */\n cacheTtl?: number | undefined\n\n /**\n * Force re-download even if cached.\n * Aligns with npm/npx --force flag.\n */\n force?: boolean | undefined\n\n /**\n * Skip confirmation prompts (auto-approve).\n * Aligns with npx --yes/-y flag.\n */\n yes?: boolean | undefined\n\n /**\n * Suppress output (quiet mode).\n * Aligns with npx --quiet/-q and pnpm --silent/-s flags.\n */\n quiet?: boolean | undefined\n\n /**\n * Additional spawn options.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxBinaryResult {\n /** Path to the cached binary. */\n binaryPath: string\n /** Whether the binary was newly downloaded. */\n downloaded: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Metadata structure for cached binaries (.dlx-metadata.json).\n * Unified schema shared across TypeScript (dlxBinary) and C++ (socket_macho_decompress).\n *\n * Core Fields (present in all implementations):\n * - version: Schema version (currently \"1.0.0\")\n * - cache_key: First 16 chars of SHA-512 hash (matches directory name)\n * - timestamp: Unix timestamp in milliseconds\n * - checksum: Full hash of cached binary (SHA-512 for C++, SHA-256 for TypeScript)\n * - checksum_algorithm: \"sha512\" or \"sha256\"\n * - platform: \"darwin\" | \"linux\" | \"win32\"\n * - arch: \"x64\" | \"arm64\"\n * - size: Size of cached binary in bytes\n * - source: Origin information\n * - type: \"download\" (from URL) or \"decompression\" (from embedded binary)\n * - url: Download URL (if type is \"download\")\n * - path: Source binary path (if type is \"decompression\")\n *\n * Extra Fields (implementation-specific):\n * - For C++ decompression:\n * - compressed_size: Size of compressed data in bytes\n * - compression_algorithm: Brotli level (numeric)\n * - compression_ratio: original_size / compressed_size\n *\n * Example (TypeScript download):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"a1b2c3d4e5f67890\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha256-abc123...\",\n * \"checksum_algorithm\": \"sha256\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 15000000,\n * \"source\": {\n * \"type\": \"download\",\n * \"url\": \"https://example.com/binary\"\n * }\n * }\n * ```\n *\n * Example (C++ decompression):\n * ```json\n * {\n * \"version\": \"1.0.0\",\n * \"cache_key\": \"0123456789abcdef\",\n * \"timestamp\": 1730332800000,\n * \"checksum\": \"sha512-def456...\",\n * \"checksum_algorithm\": \"sha512\",\n * \"platform\": \"darwin\",\n * \"arch\": \"arm64\",\n * \"size\": 13000000,\n * \"source\": {\n * \"type\": \"decompression\",\n * \"path\": \"/usr/local/bin/socket\"\n * },\n * \"extra\": {\n * \"compressed_size\": 1700000,\n * \"compression_algorithm\": 3,\n * \"compression_ratio\": 7.647\n * }\n * }\n * ```\n *\n * @internal This interface documents the metadata file format.\n */\nexport interface DlxMetadata {\n version: string\n cache_key: string\n timestamp: number\n checksum: string\n checksum_algorithm: string\n platform: string\n arch: string\n size: number\n source?: {\n type: 'download' | 'decompression'\n url?: string\n path?: string\n }\n extra?: Record<string, unknown>\n}\n\n/**\n * Get metadata file path for a cached binary.\n */\nfunction getMetadataPath(cacheEntryPath: string): string {\n return path.join(cacheEntryPath, '.dlx-metadata.json')\n}\n\n/**\n * Check if a cached binary is still valid.\n */\nasync function isCacheValid(\n cacheEntryPath: string,\n cacheTtl: number,\n): Promise<boolean> {\n const fs = getFs()\n try {\n const metaPath = getMetadataPath(cacheEntryPath)\n if (!fs.existsSync(metaPath)) {\n return false\n }\n\n const metadata = await readJson(metaPath, { throws: false })\n if (!isObjectObject(metadata)) {\n return false\n }\n const now = Date.now()\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, cache is invalid\n if (typeof timestamp !== 'number' || timestamp <= 0) {\n return false\n }\n const age = now - timestamp\n\n return age < cacheTtl\n } catch {\n return false\n }\n}\n\n/**\n * Download a file from a URL with integrity checking and concurrent download protection.\n * Uses processLock to prevent multiple processes from downloading the same binary simultaneously.\n * Internal helper function for downloading binary files.\n */\nasync function downloadBinaryFile(\n url: string,\n destPath: string,\n checksum?: string | undefined,\n): Promise<string> {\n // Use process lock to prevent concurrent downloads.\n // Lock is placed in the cache entry directory as 'concurrency.lock'.\n const cacheEntryDir = path.dirname(destPath)\n const lockPath = path.join(cacheEntryDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n const fs = getFs()\n // Check if file was downloaded while waiting for lock.\n if (fs.existsSync(destPath)) {\n const stats = await fs.promises.stat(destPath)\n if (stats.size > 0) {\n // File exists, compute and return checksum.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n return hasher.digest('hex')\n }\n }\n\n // Download the file.\n try {\n await httpDownload(url, destPath)\n } catch (e) {\n throw new Error(\n `Failed to download binary from ${url}\\n` +\n `Destination: ${destPath}\\n` +\n 'Check your internet connection or verify the URL is accessible.',\n { cause: e },\n )\n }\n\n // Compute checksum of downloaded file.\n const fileBuffer = await fs.promises.readFile(destPath)\n const hasher = createHash('sha256')\n hasher.update(fileBuffer)\n const actualChecksum = hasher.digest('hex')\n\n // Verify checksum if provided.\n if (checksum && actualChecksum !== checksum) {\n // Clean up invalid file.\n await safeDelete(destPath)\n throw new Error(\n `Checksum mismatch: expected ${checksum}, got ${actualChecksum}`,\n )\n }\n\n // Make executable on POSIX systems.\n if (!WIN32) {\n await fs.promises.chmod(destPath, 0o755)\n }\n\n return actualChecksum\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Write metadata for a cached binary.\n * Uses unified schema shared with C++ decompressor and CLI dlxBinary.\n * Schema documentation: See DlxMetadata interface in this file (exported).\n * Core fields: version, cache_key, timestamp, checksum, checksum_algorithm, platform, arch, size, source\n * Note: This implementation uses SHA-256 checksums instead of SHA-512.\n */\nasync function writeMetadata(\n cacheEntryPath: string,\n cacheKey: string,\n url: string,\n checksum: string,\n size: number,\n): Promise<void> {\n const metaPath = getMetadataPath(cacheEntryPath)\n const metadata = {\n version: '1.0.0',\n cache_key: cacheKey,\n timestamp: Date.now(),\n checksum,\n checksum_algorithm: 'sha256',\n platform: os.platform(),\n arch: os.arch(),\n size,\n source: {\n type: 'download',\n url,\n },\n }\n const fs = getFs()\n await fs.promises.writeFile(metaPath, JSON.stringify(metadata, null, 2))\n}\n\n/**\n * Clean expired entries from the DLX cache.\n */\nexport async function cleanDlxCache(\n maxAge: number = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n): Promise<number> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return 0\n }\n\n let cleaned = 0\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n const metaPath = getMetadataPath(entryPath)\n\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n const timestamp = (metadata as Record<string, unknown>)['timestamp']\n // If timestamp is missing or invalid, treat as expired (age = infinity)\n const age =\n typeof timestamp === 'number' && timestamp > 0\n ? now - timestamp\n : Number.POSITIVE_INFINITY\n\n if (age > maxAge) {\n // Remove entire cache entry directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath, { force: true, recursive: true })\n cleaned += 1\n }\n } catch {\n // If we can't read metadata, check if directory is empty or corrupted.\n try {\n // eslint-disable-next-line no-await-in-loop\n const contents = await fs.promises.readdir(entryPath)\n if (!contents.length) {\n // Remove empty directory.\n // eslint-disable-next-line no-await-in-loop\n await safeDelete(entryPath)\n cleaned += 1\n }\n } catch {}\n }\n }\n\n return cleaned\n}\n\n/**\n * Download and execute a binary from a URL with caching.\n */\nexport async function dlxBinary(\n args: readonly string[] | string[],\n options?: DlxBinaryOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxBinaryResult> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force: userForce = false,\n name,\n spawnOptions,\n url,\n yes,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Map --yes flag to force behavior (auto-approve/skip prompts)\n const force = yes === true ? true : userForce\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n let computedChecksum = checksum\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid, read the checksum from metadata.\n try {\n const metaPath = getMetadataPath(cacheEntryDir)\n const metadata = await readJson(metaPath, { throws: false })\n if (\n metadata &&\n typeof metadata === 'object' &&\n !Array.isArray(metadata) &&\n typeof (metadata as Record<string, unknown>)['checksum'] === 'string'\n ) {\n computedChecksum = (metadata as Record<string, unknown>)[\n 'checksum'\n ] as string\n } else {\n // If metadata is invalid, re-download.\n downloaded = true\n }\n } catch {\n // If we can't read metadata, re-download.\n downloaded = true\n }\n } else {\n downloaded = true\n }\n\n if (downloaded) {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n }\n\n // Execute the binary.\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension (e.g.,\n // C:\\cache\\test.cmd becomes just \"test\"). Windows cmd.exe then searches for \"test\"\n // in directories listed in PATH, trying each extension from PATHEXT environment\n // variable (.COM, .EXE, .BAT, .CMD, etc.) until it finds a match.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH\n // (unlike system package managers like npm/pnpm/yarn which are already in PATH),\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n //\n // This approach is consistent with how other tools handle Windows command execution:\n // - npm's promise-spawn: uses which.sync() to find commands in PATH\n // - cross-spawn: spawns cmd.exe with escaped arguments\n // - Node.js spawn with shell: true: delegates to cmd.exe which uses PATH\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n const spawnPromise = spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n\n return {\n binaryPath,\n downloaded,\n spawnPromise,\n }\n}\n\n/**\n * Download a binary from a URL with caching (without execution).\n * Similar to downloadPackage from dlx-package.\n *\n * @returns Object containing the path to the cached binary and whether it was downloaded\n */\nexport async function downloadBinary(\n options: Omit<DlxBinaryOptions, 'spawnOptions'>,\n): Promise<{ binaryPath: string; downloaded: boolean }> {\n const {\n cacheTtl = /*@__INLINE__*/ require('#constants/time').DLX_BINARY_CACHE_TTL,\n checksum,\n force = false,\n name,\n url,\n } = { __proto__: null, ...options } as DlxBinaryOptions\n\n // Generate cache paths similar to pnpm/npx structure.\n const cacheDir = getDlxCachePath()\n const binaryName = name || `binary-${process.platform}-${os.arch()}`\n // Create spec from URL and binary name for unique cache identity.\n const spec = `${url}:${binaryName}`\n const cacheKey = generateCacheKey(spec)\n const cacheEntryDir = path.join(cacheDir, cacheKey)\n const binaryPath = normalizePath(path.join(cacheEntryDir, binaryName))\n const fs = getFs()\n\n let downloaded = false\n\n // Check if we need to download.\n if (\n !force &&\n fs.existsSync(cacheEntryDir) &&\n (await isCacheValid(cacheEntryDir, cacheTtl))\n ) {\n // Binary is cached and valid.\n downloaded = false\n } else {\n // Ensure cache directory exists before downloading.\n try {\n await safeMkdir(cacheEntryDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating binary cache directory: ${cacheEntryDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create binary cache directory on read-only filesystem: ${cacheEntryDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to create binary cache directory: ${cacheEntryDir}`,\n { cause: e },\n )\n }\n\n // Download the binary.\n const computedChecksum = await downloadBinaryFile(url, binaryPath, checksum)\n\n // Get file size for metadata.\n const stats = await fs.promises.stat(binaryPath)\n await writeMetadata(\n cacheEntryDir,\n cacheKey,\n url,\n computedChecksum || '',\n stats.size,\n )\n downloaded = true\n }\n\n return {\n binaryPath,\n downloaded,\n }\n}\n\n/**\n * Execute a cached binary without re-downloading.\n * Similar to executePackage from dlx-package.\n * Binary must have been previously downloaded via downloadBinary or dlxBinary.\n *\n * @param binaryPath Path to the cached binary (from downloadBinary result)\n * @param args Arguments to pass to the binary\n * @param spawnOptions Spawn options for execution\n * @param spawnExtra Extra spawn configuration\n * @returns The spawn promise for the running process\n */\nexport function executeBinary(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true because\n // they are not executable on their own and must be run through cmd.exe.\n // Note: .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n // Windows cmd.exe PATH resolution behavior:\n // When shell: true on Windows with .cmd/.bat/.ps1 files, spawn will automatically\n // strip the full path down to just the basename without extension. Windows cmd.exe\n // then searches for the binary in directories listed in PATH.\n //\n // Since our binaries are downloaded to a custom cache directory that's not in PATH,\n // we must prepend the cache directory to PATH so cmd.exe can locate the binary.\n const cacheEntryDir = path.dirname(binaryPath)\n const finalSpawnOptions = needsShell\n ? {\n ...spawnOptions,\n env: {\n ...spawnOptions?.env,\n PATH: `${cacheEntryDir}${path.delimiter}${process.env['PATH'] || ''}`,\n },\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalSpawnOptions, spawnExtra)\n}\n\n/**\n * Get the DLX binary cache directory path.\n * Returns normalized path for cross-platform compatibility.\n * Uses same directory as dlx-package for unified DLX storage.\n */\nexport function getDlxCachePath(): string {\n return getSocketDlxDir()\n}\n\n/**\n * Get information about cached binaries.\n */\nexport async function listDlxCache(): Promise<\n Array<{\n age: number\n arch: string\n checksum: string\n name: string\n platform: string\n size: number\n url: string\n }>\n> {\n const cacheDir = getDlxCachePath()\n const fs = getFs()\n\n if (!fs.existsSync(cacheDir)) {\n return []\n }\n\n const results = []\n const now = Date.now()\n const entries = await fs.promises.readdir(cacheDir)\n\n for (const entry of entries) {\n const entryPath = path.join(cacheDir, entry)\n try {\n // eslint-disable-next-line no-await-in-loop\n if (!(await isDir(entryPath))) {\n continue\n }\n\n const metaPath = getMetadataPath(entryPath)\n // eslint-disable-next-line no-await-in-loop\n const metadata = await readJson(metaPath, { throws: false })\n if (\n !metadata ||\n typeof metadata !== 'object' ||\n Array.isArray(metadata)\n ) {\n continue\n }\n\n const metaObj = metadata as Record<string, unknown>\n\n // Get URL from unified schema (source.url) or legacy schema (url).\n // Allow empty URL for backward compatibility with partial metadata.\n const source = metaObj['source'] as Record<string, unknown> | undefined\n const url =\n (source?.['url'] as string) || (metaObj['url'] as string) || ''\n\n // Find the binary file in the directory.\n // eslint-disable-next-line no-await-in-loop\n const files = await fs.promises.readdir(entryPath)\n const binaryFile = files.find(f => !f.startsWith('.'))\n\n if (binaryFile) {\n const binaryPath = path.join(entryPath, binaryFile)\n // eslint-disable-next-line no-await-in-loop\n const binaryStats = await fs.promises.stat(binaryPath)\n\n results.push({\n age: now - ((metaObj['timestamp'] as number) || 0),\n arch: (metaObj['arch'] as string) || 'unknown',\n checksum: (metaObj['checksum'] as string) || '',\n name: binaryFile,\n platform: (metaObj['platform'] as string) || 'unknown',\n size: binaryStats.size,\n url,\n })\n }\n } catch {}\n }\n\n return results\n}\n"],
5
+ "mappings": ";6iBAAA,IAAAA,GAAA,GAAAC,EAAAD,GAAA,mBAAAE,GAAA,cAAAC,GAAA,mBAAAC,GAAA,kBAAAC,GAAA,oBAAAC,EAAA,iBAAAC,KAAA,eAAAC,EAAAR,IAEA,IAAAS,EAA2B,kBAE3BC,EAAe,iBAEfC,EAAiB,mBAEjBC,EAAsB,+BAEtBC,EAAiC,iBACjCC,EAA6B,0BAC7BC,EAAuD,gBACvDC,EAA+B,qBAC/BL,EAA8B,kBAC9BM,EAAgC,mBAChCC,EAA4B,0BAE5BC,EAAsB,mBAEtB,IAAIC,EASJ,SAASC,GAAQ,CACf,OAAID,IAAQ,SAGVA,EAAoB,QAAQ,SAAS,GAEhCA,CACT,CA+IA,SAASE,EAAgBC,EAAgC,CACvD,OAAO,EAAAC,QAAK,KAAKD,EAAgB,oBAAoB,CACvD,CAKA,eAAeE,EACbF,EACAG,EACkB,CAClB,MAAMC,EAAKN,EAAM,EACjB,GAAI,CACF,MAAMO,EAAWN,EAAgBC,CAAc,EAC/C,GAAI,CAACI,EAAG,WAAWC,CAAQ,EACzB,MAAO,GAGT,MAAMC,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GAAI,IAAC,kBAAeC,CAAQ,EAC1B,MAAO,GAET,MAAMC,EAAM,KAAK,IAAI,EACfC,EAAaF,EAAqC,UAExD,OAAI,OAAOE,GAAc,UAAYA,GAAa,EACzC,GAEGD,EAAMC,EAELL,CACf,MAAQ,CACN,MAAO,EACT,CACF,CAOA,eAAeM,EACbC,EACAC,EACAC,EACiB,CAGjB,MAAMC,EAAgB,EAAAZ,QAAK,QAAQU,CAAQ,EACrCG,EAAW,EAAAb,QAAK,KAAKY,EAAe,kBAAkB,EAE5D,OAAO,MAAM,cAAY,SACvBC,EACA,SAAY,CACV,MAAMV,EAAKN,EAAM,EAEjB,GAAIM,EAAG,WAAWO,CAAQ,IACV,MAAMP,EAAG,SAAS,KAAKO,CAAQ,GACnC,KAAO,EAAG,CAElB,MAAMI,EAAa,MAAMX,EAAG,SAAS,SAASO,CAAQ,EAChDK,KAAS,cAAW,QAAQ,EAClC,OAAAA,EAAO,OAAOD,CAAU,EACjBC,EAAO,OAAO,KAAK,CAC5B,CAIF,GAAI,CACF,QAAM,gBAAaN,EAAKC,CAAQ,CAClC,OAASM,EAAG,CACV,MAAM,IAAI,MACR,kCAAkCP,CAAG;AAAA,eACnBC,CAAQ;AAAA,iEAE1B,CAAE,MAAOM,CAAE,CACb,CACF,CAGA,MAAMF,EAAa,MAAMX,EAAG,SAAS,SAASO,CAAQ,EAChDK,KAAS,cAAW,QAAQ,EAClCA,EAAO,OAAOD,CAAU,EACxB,MAAMG,EAAiBF,EAAO,OAAO,KAAK,EAG1C,GAAIJ,GAAYM,IAAmBN,EAEjC,cAAM,cAAWD,CAAQ,EACnB,IAAI,MACR,+BAA+BC,CAAQ,SAASM,CAAc,EAChE,EAIF,OAAK,SACH,MAAMd,EAAG,SAAS,MAAMO,EAAU,GAAK,EAGlCO,CACT,EACA,CAEE,QAAS,IACT,gBAAiB,GACnB,CACF,CACF,CASA,eAAeC,EACbnB,EACAoB,EACAV,EACAE,EACAS,EACe,CACf,MAAMhB,EAAWN,EAAgBC,CAAc,EACzCM,EAAW,CACf,QAAS,QACT,UAAWc,EACX,UAAW,KAAK,IAAI,EACpB,SAAAR,EACA,mBAAoB,SACpB,SAAU,EAAAU,QAAG,SAAS,EACtB,KAAM,EAAAA,QAAG,KAAK,EACd,KAAAD,EACA,OAAQ,CACN,KAAM,WACN,IAAAX,CACF,CACF,EAEA,MADWZ,EAAM,EACR,SAAS,UAAUO,EAAU,KAAK,UAAUC,EAAU,KAAM,CAAC,CAAC,CACzE,CAKA,eAAsB3B,GACpB4C,EAAiC,QAAQ,iBAAiB,EAAE,qBAC3C,CACjB,MAAMC,EAAWzC,EAAgB,EAC3BqB,EAAKN,EAAM,EAEjB,GAAI,CAACM,EAAG,WAAWoB,CAAQ,EACzB,MAAO,GAGT,IAAIC,EAAU,EACd,MAAMlB,EAAM,KAAK,IAAI,EACfmB,EAAU,MAAMtB,EAAG,SAAS,QAAQoB,CAAQ,EAElD,UAAWG,KAASD,EAAS,CAC3B,MAAME,EAAY,EAAA3B,QAAK,KAAKuB,EAAUG,CAAK,EACrCtB,EAAWN,EAAgB6B,CAAS,EAE1C,GAAI,CAEF,GAAI,CAAE,QAAM,SAAMA,CAAS,EACzB,SAIF,MAAMtB,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GACE,CAACC,GACD,OAAOA,GAAa,UACpB,MAAM,QAAQA,CAAQ,EAEtB,SAEF,MAAME,EAAaF,EAAqC,WAGtD,OAAOE,GAAc,UAAYA,EAAY,EACzCD,EAAMC,EACN,OAAO,mBAEHe,IAGR,QAAM,cAAWK,EAAW,CAAE,MAAO,GAAM,UAAW,EAAK,CAAC,EAC5DH,GAAW,EAEf,MAAQ,CAEN,GAAI,EAEe,MAAMrB,EAAG,SAAS,QAAQwB,CAAS,GACtC,SAGZ,QAAM,cAAWA,CAAS,EAC1BH,GAAW,EAEf,MAAQ,CAAC,CACX,CACF,CAEA,OAAOA,CACT,CAKA,eAAsB7C,GACpBiD,EACAC,EACAC,EAC0B,CAC1B,KAAM,CACJ,SAAA5B,EAA2B,QAAQ,iBAAiB,EAAE,qBACtD,SAAAS,EACA,MAAOoB,EAAY,GACnB,KAAAC,EACA,aAAAC,EACA,IAAAxB,EACA,IAAAyB,CACF,EAAI,CAAE,UAAW,KAAM,GAAGL,CAAQ,EAG5BM,EAAQD,IAAQ,GAAO,GAAOH,EAG9BR,EAAWzC,EAAgB,EAC3BsD,EAAaJ,GAAQ,UAAU,QAAQ,QAAQ,IAAI,EAAAX,QAAG,KAAK,CAAC,GAE5DgB,EAAO,GAAG5B,CAAG,IAAI2B,CAAU,GAC3BjB,KAAW,oBAAiBkB,CAAI,EAChCzB,EAAgB,EAAAZ,QAAK,KAAKuB,EAAUJ,CAAQ,EAC5CmB,KAAa,iBAAc,EAAAtC,QAAK,KAAKY,EAAewB,CAAU,CAAC,EAC/DjC,EAAKN,EAAM,EAEjB,IAAI0C,EAAa,GACbC,EAAmB7B,EAGvB,GACE,CAACwB,GACDhC,EAAG,WAAWS,CAAa,GAC1B,MAAMX,EAAaW,EAAeV,CAAQ,EAG3C,GAAI,CACF,MAAME,EAAWN,EAAgBc,CAAa,EACxCP,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAEzDC,GACA,OAAOA,GAAa,UACpB,CAAC,MAAM,QAAQA,CAAQ,GACvB,OAAQA,EAAqC,UAAgB,SAE7DmC,EAAoBnC,EAClB,SAIFkC,EAAa,EAEjB,MAAQ,CAENA,EAAa,EACf,MAEAA,EAAa,GAGf,GAAIA,EAAY,CAEd,GAAI,CACF,QAAM,aAAU3B,EAAe,CAAE,UAAW,EAAK,CAAC,CACpD,OAASI,EAAG,CACV,MAAMyB,EAAQzB,EAA4B,KAC1C,MAAIyB,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,sDAAsD7B,CAAa;AAAA,oEAEnE,CAAE,MAAOI,CAAE,CACb,EAEEyB,IAAS,QACL,IAAI,MACR,iEAAiE7B,CAAa;AAAA,iFAE9E,CAAE,MAAOI,CAAE,CACb,EAEI,IAAI,MACR,4CAA4CJ,CAAa,GACzD,CAAE,MAAOI,CAAE,CACb,CACF,CAGAwB,EAAmB,MAAMhC,EAAmBC,EAAK6B,EAAY3B,CAAQ,EAGrE,MAAM+B,EAAQ,MAAMvC,EAAG,SAAS,KAAKmC,CAAU,EAC/C,MAAMpB,EACJN,EACAO,EACAV,EACA+B,GAAoB,GACpBE,EAAM,IACR,CACF,CAsBA,MAAMC,EAhBa,SAAS,sBAAsB,KAAKL,CAAU,EAiB7D,CACE,GAAGL,EACH,IAAK,CACH,GAAGA,GAAc,IACjB,KAAM,GAAGrB,CAAa,GAAG,EAAAZ,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAW,EAAE,EACrE,EACA,MAAO,EACT,EACAiC,EACEW,KAAe,SAAMN,EAAYV,EAAMe,EAAmBb,CAAU,EAE1E,MAAO,CACL,WAAAQ,EACA,WAAAC,EACA,aAAAK,CACF,CACF,CAQA,eAAsBhE,GACpBiD,EACsD,CACtD,KAAM,CACJ,SAAA3B,EAA2B,QAAQ,iBAAiB,EAAE,qBACtD,SAAAS,EACA,MAAAwB,EAAQ,GACR,KAAAH,EACA,IAAAvB,CACF,EAAI,CAAE,UAAW,KAAM,GAAGoB,CAAQ,EAG5BN,EAAWzC,EAAgB,EAC3BsD,EAAaJ,GAAQ,UAAU,QAAQ,QAAQ,IAAI,EAAAX,QAAG,KAAK,CAAC,GAE5DgB,EAAO,GAAG5B,CAAG,IAAI2B,CAAU,GAC3BjB,KAAW,oBAAiBkB,CAAI,EAChCzB,EAAgB,EAAAZ,QAAK,KAAKuB,EAAUJ,CAAQ,EAC5CmB,KAAa,iBAAc,EAAAtC,QAAK,KAAKY,EAAewB,CAAU,CAAC,EAC/DjC,EAAKN,EAAM,EAEjB,IAAI0C,EAAa,GAGjB,GACE,CAACJ,GACDhC,EAAG,WAAWS,CAAa,GAC1B,MAAMX,EAAaW,EAAeV,CAAQ,EAG3CqC,EAAa,OACR,CAEL,GAAI,CACF,QAAM,aAAU3B,EAAe,CAAE,UAAW,EAAK,CAAC,CACpD,OAASI,EAAG,CACV,MAAMyB,EAAQzB,EAA4B,KAC1C,MAAIyB,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,sDAAsD7B,CAAa;AAAA,oEAEnE,CAAE,MAAOI,CAAE,CACb,EAEEyB,IAAS,QACL,IAAI,MACR,iEAAiE7B,CAAa;AAAA,iFAE9E,CAAE,MAAOI,CAAE,CACb,EAEI,IAAI,MACR,4CAA4CJ,CAAa,GACzD,CAAE,MAAOI,CAAE,CACb,CACF,CAGA,MAAMwB,EAAmB,MAAMhC,EAAmBC,EAAK6B,EAAY3B,CAAQ,EAGrE+B,EAAQ,MAAMvC,EAAG,SAAS,KAAKmC,CAAU,EAC/C,MAAMpB,EACJN,EACAO,EACAV,EACA+B,GAAoB,GACpBE,EAAM,IACR,EACAH,EAAa,EACf,CAEA,MAAO,CACL,WAAAD,EACA,WAAAC,CACF,CACF,CAaO,SAAS1D,GACdyD,EACAV,EACAK,EACAH,EAC0B,CAI1B,MAAMe,EAAa,SAAS,sBAAsB,KAAKP,CAAU,EAS3D1B,EAAgB,EAAAZ,QAAK,QAAQsC,CAAU,EACvCK,EAAoBE,EACtB,CACE,GAAGZ,EACH,IAAK,CACH,GAAGA,GAAc,IACjB,KAAM,GAAGrB,CAAa,GAAG,EAAAZ,QAAK,SAAS,GAAG,QAAQ,IAAI,MAAW,EAAE,EACrE,EACA,MAAO,EACT,EACAiC,EAEJ,SAAO,SAAMK,EAAYV,EAAMe,EAAmBb,CAAU,CAC9D,CAOO,SAAShD,GAA0B,CACxC,SAAO,mBAAgB,CACzB,CAKA,eAAsBC,IAUpB,CACA,MAAMwC,EAAWzC,EAAgB,EAC3BqB,EAAKN,EAAM,EAEjB,GAAI,CAACM,EAAG,WAAWoB,CAAQ,EACzB,MAAO,CAAC,EAGV,MAAMuB,EAAU,CAAC,EACXxC,EAAM,KAAK,IAAI,EACfmB,EAAU,MAAMtB,EAAG,SAAS,QAAQoB,CAAQ,EAElD,UAAWG,KAASD,EAAS,CAC3B,MAAME,EAAY,EAAA3B,QAAK,KAAKuB,EAAUG,CAAK,EAC3C,GAAI,CAEF,GAAI,CAAE,QAAM,SAAMC,CAAS,EACzB,SAGF,MAAMvB,EAAWN,EAAgB6B,CAAS,EAEpCtB,EAAW,QAAM,YAASD,EAAU,CAAE,OAAQ,EAAM,CAAC,EAC3D,GACE,CAACC,GACD,OAAOA,GAAa,UACpB,MAAM,QAAQA,CAAQ,EAEtB,SAGF,MAAM0C,EAAU1C,EAKVI,EADSsC,EAAQ,QAEX,KAAsBA,EAAQ,KAAqB,GAKzDC,GADQ,MAAM7C,EAAG,SAAS,QAAQwB,CAAS,GACxB,KAAKsB,GAAK,CAACA,EAAE,WAAW,GAAG,CAAC,EAErD,GAAID,EAAY,CACd,MAAMV,EAAa,EAAAtC,QAAK,KAAK2B,EAAWqB,CAAU,EAE5CE,EAAc,MAAM/C,EAAG,SAAS,KAAKmC,CAAU,EAErDQ,EAAQ,KAAK,CACX,IAAKxC,GAAQyC,EAAQ,WAA2B,GAChD,KAAOA,EAAQ,MAAsB,UACrC,SAAWA,EAAQ,UAA0B,GAC7C,KAAMC,EACN,SAAWD,EAAQ,UAA0B,UAC7C,KAAMG,EAAY,KAClB,IAAAzC,CACF,CAAC,CACH,CACF,MAAQ,CAAC,CACX,CAEA,OAAOqC,CACT",
6
+ "names": ["dlx_binary_exports", "__export", "cleanDlxCache", "dlxBinary", "downloadBinary", "executeBinary", "getDlxCachePath", "listDlxCache", "__toCommonJS", "import_crypto", "import_os", "import_path", "import_platform", "import_dlx", "import_http_request", "import_fs", "import_objects", "import_paths", "import_process_lock", "import_spawn", "_fs", "getFs", "getMetadataPath", "cacheEntryPath", "path", "isCacheValid", "cacheTtl", "fs", "metaPath", "metadata", "now", "timestamp", "downloadBinaryFile", "url", "destPath", "checksum", "cacheEntryDir", "lockPath", "fileBuffer", "hasher", "e", "actualChecksum", "writeMetadata", "cacheKey", "size", "os", "maxAge", "cacheDir", "cleaned", "entries", "entry", "entryPath", "args", "options", "spawnExtra", "userForce", "name", "spawnOptions", "yes", "force", "binaryName", "spec", "binaryPath", "downloaded", "computedChecksum", "code", "stats", "finalSpawnOptions", "spawnPromise", "needsShell", "results", "metaObj", "binaryFile", "f", "binaryStats"]
7
7
  }
@@ -11,6 +11,7 @@ export interface DownloadPackageResult {
11
11
  export interface DlxPackageOptions {
12
12
  /**
13
13
  * Package to install (e.g., '@cyclonedx/cdxgen@10.0.0').
14
+ * Aligns with npx --package flag.
14
15
  */
15
16
  package: string;
16
17
  /**
@@ -34,8 +35,19 @@ export interface DlxPackageOptions {
34
35
  binaryName?: string | undefined;
35
36
  /**
36
37
  * Force reinstallation even if package exists.
38
+ * Aligns with npx --yes/-y flag behavior.
37
39
  */
38
40
  force?: boolean | undefined;
41
+ /**
42
+ * Skip confirmation prompts (auto-approve).
43
+ * Aligns with npx --yes/-y flag.
44
+ */
45
+ yes?: boolean | undefined;
46
+ /**
47
+ * Suppress output (quiet mode).
48
+ * Aligns with npx --quiet/-q and pnpm --silent/-s flags.
49
+ */
50
+ quiet?: boolean | undefined;
39
51
  /**
40
52
  * Additional spawn options for the execution.
41
53
  */
@@ -88,9 +100,12 @@ export declare function dlxPackage(args: readonly string[] | string[], options?:
88
100
  */
89
101
  export declare function downloadPackage(options: DlxPackageOptions): Promise<DownloadPackageResult>;
90
102
  /**
91
- * Execute a package's binary.
103
+ * Execute a package's binary with cross-platform shell handling.
92
104
  * The package must already be installed (use downloadPackage first).
93
105
  *
106
+ * On Windows, script files (.bat, .cmd, .ps1) require shell: true.
107
+ * Matches npm/npx execution behavior.
108
+ *
94
109
  * @example
95
110
  * ```typescript
96
111
  * // Execute an already-installed package
@@ -1,10 +1,10 @@
1
1
  /* Socket Lib - Built with esbuild */
2
- var v=Object.create;var y=Object.defineProperty;var _=Object.getOwnPropertyDescriptor;var $=Object.getOwnPropertyNames;var C=Object.getPrototypeOf,I=Object.prototype.hasOwnProperty;var T=(n,e)=>{for(var t in e)y(n,t,{get:e[t],enumerable:!0})},w=(n,e,t,a)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of $(e))!I.call(n,r)&&r!==t&&y(n,r,{get:()=>e[r],enumerable:!(a=_(e,r))||a.enumerable});return n};var A=(n,e,t)=>(t=n!=null?v(C(n)):{},w(e||!n||!n.__esModule?y(t,"default",{value:n,enumerable:!0}):t,n)),N=n=>w(y({},"__esModule",{value:!0}),n);var B={};T(B,{dlxPackage:()=>L,downloadPackage:()=>R,executePackage:()=>j});module.exports=N(B);var l=require("node:fs"),d=A(require("node:path")),P=require("./constants/platform"),E=require("./constants/packages"),x=require("./dlx"),b=require("./fs"),u=require("./path"),D=require("./paths"),O=require("./process-lock"),S=require("./spawn");let k;function J(){return k===void 0&&(k=require("./external/npm-package-arg")),k}let h;function F(){return h===void 0&&(h=require("./external/pacote")),h}const K=/[~^><=xX* ]|\|\|/;function M(n){try{const t=J()(n),a=t.type==="tag"||t.type==="version"||t.type==="range"?t.fetchSpec:void 0;return{name:t.name||n,version:a}}catch{const e=n.lastIndexOf("@");return e===-1||n.startsWith("@")?{name:n,version:void 0}:{name:n.slice(0,e),version:n.slice(e+1)}}}async function V(n,e,t){const a=(0,x.generateCacheKey)(e),r=(0,u.normalizePath)(d.default.join((0,D.getSocketDlxDir)(),a)),c=(0,u.normalizePath)(d.default.join(r,"node_modules",n));try{await l.promises.mkdir(r,{recursive:!0})}catch(i){const o=i.code;throw o==="EACCES"||o==="EPERM"?new Error(`Permission denied creating package directory: ${r}
3
- Please check directory permissions or run with appropriate access.`,{cause:i}):o==="EROFS"?new Error(`Cannot create package directory on read-only filesystem: ${r}
4
- Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.`,{cause:i}):new Error(`Failed to create package directory: ${r}`,{cause:i})}const p=d.default.join(r,"concurrency.lock");return await O.processLock.withLock(p,async()=>{if(!t&&(0,l.existsSync)(c)){const o=d.default.join(c,"package.json");if((0,l.existsSync)(o))return{installed:!1,packageDir:r}}const i=(0,E.getPacoteCachePath)();try{await F().extract(e,c,{cache:i||d.default.join(r,".cache")})}catch(o){const s=o.code;throw s==="E404"||s==="ETARGET"?new Error(`Package not found: ${e}
2
+ var $=Object.create;var y=Object.defineProperty;var C=Object.getOwnPropertyDescriptor;var I=Object.getOwnPropertyNames;var T=Object.getPrototypeOf,A=Object.prototype.hasOwnProperty;var N=(e,n)=>{for(var t in n)y(e,t,{get:n[t],enumerable:!0})},b=(e,n,t,a)=>{if(n&&typeof n=="object"||typeof n=="function")for(let r of I(n))!A.call(e,r)&&r!==t&&y(e,r,{get:()=>n[r],enumerable:!(a=C(n,r))||a.enumerable});return e};var F=(e,n,t)=>(t=e!=null?$(T(e)):{},b(n||!e||!e.__esModule?y(t,"default",{value:e,enumerable:!0}):t,e)),J=e=>b(y({},"__esModule",{value:!0}),e);var W={};N(W,{dlxPackage:()=>U,downloadPackage:()=>v,executePackage:()=>_});module.exports=J(W);var d=F(require("path")),m=require("./constants/platform"),D=require("./constants/packages"),S=require("./dlx"),h=require("./fs"),g=require("./path"),O=require("./paths"),R=require("./process-lock"),j=require("./spawn");let w;function E(){return w===void 0&&(w=require("node:fs")),w}let P;function M(){return P===void 0&&(P=require("./external/npm-package-arg")),P}let x;function q(){return x===void 0&&(x=require("./external/pacote")),x}const K=/[~^><=xX* ]|\|\|/;function V(e){try{const t=M()(e),a=t.type==="tag"||t.type==="version"||t.type==="range"?t.fetchSpec:void 0;return{name:t.name||e,version:a}}catch{const n=e.lastIndexOf("@");return n===-1||e.startsWith("@")?{name:e,version:void 0}:{name:e.slice(0,n),version:e.slice(n+1)}}}async function B(e,n,t){const a=(0,S.generateCacheKey)(n),r=(0,g.normalizePath)(d.default.join((0,O.getSocketDlxDir)(),a)),p=(0,g.normalizePath)(d.default.join(r,"node_modules",e));try{await(0,h.safeMkdir)(r,{recursive:!0})}catch(o){const f=o.code;throw f==="EACCES"||f==="EPERM"?new Error(`Permission denied creating package directory: ${r}
3
+ Please check directory permissions or run with appropriate access.`,{cause:o}):f==="EROFS"?new Error(`Cannot create package directory on read-only filesystem: ${r}
4
+ Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.`,{cause:o}):new Error(`Failed to create package directory: ${r}`,{cause:o})}const s=d.default.join(r,"concurrency.lock");return await R.processLock.withLock(s,async()=>{const o=E();if(!t&&o.existsSync(p)){const i=d.default.join(p,"package.json");if(o.existsSync(i))return{installed:!1,packageDir:r}}const f=(0,D.getPacoteCachePath)();try{await q().extract(n,p,{cache:f||d.default.join(r,".cache")})}catch(i){const c=i.code;throw c==="E404"||c==="ETARGET"?new Error(`Package not found: ${n}
5
5
  Verify the package exists on npm registry and check the version.
6
- Visit https://www.npmjs.com/package/${n} to see available versions.`,{cause:o}):s==="ENOTFOUND"||s==="ETIMEDOUT"||s==="EAI_AGAIN"?new Error(`Network error installing ${e}
7
- Check your internet connection and try again.`,{cause:o}):new Error(`Failed to install package: ${e}
8
- Destination: ${c}
9
- Check npm registry connectivity or package name.`,{cause:o})}return{installed:!0,packageDir:r}},{staleMs:5e3,touchIntervalMs:2e3})}function q(n,e,t){const a=(0,u.normalizePath)(d.default.join(n,"node_modules",e)),r=d.default.join(a,"package.json"),p=(0,b.readJsonSync)(r).bin;let i;if(typeof p=="string")i=p;else if(typeof p=="object"&&p!==null){const o=p,s=Object.keys(o);if(s.length===1)i=o[s[0]];else{const m=e.split("/").pop(),g=[t,m,e.replace(/^@[^/]+\//,"")].filter(Boolean);for(const f of g)if(f&&o[f]){i=o[f];break}!i&&s.length>0&&(i=o[s[0]])}}if(!i)throw new Error(`No binary found for package "${e}"`);return(0,u.normalizePath)(d.default.join(a,i))}async function L(n,e,t){const a=await R(e),r=j(a.binaryPath,n,e?.spawnOptions,t);return{...a,spawnPromise:r}}async function R(n){const{binaryName:e,force:t,package:a}={__proto__:null,...n},{name:r,version:c}=M(a),p=c!==void 0&&K.test(c),i=t!==void 0?t:p,o=c?`${r}@${c}`:r,{installed:s,packageDir:m}=await V(r,o,i),g=q(m,r,e);if(!P.WIN32&&(0,l.existsSync)(g)){const{chmodSync:f}=require("node:fs");try{f(g,493)}catch{}}return{binaryPath:g,installed:s,packageDir:m}}function j(n,e,t,a){return(0,S.spawn)(n,e,t,a)}0&&(module.exports={dlxPackage,downloadPackage,executePackage});
6
+ Visit https://www.npmjs.com/package/${e} to see available versions.`,{cause:i}):c==="ENOTFOUND"||c==="ETIMEDOUT"||c==="EAI_AGAIN"?new Error(`Network error installing ${n}
7
+ Check your internet connection and try again.`,{cause:i}):new Error(`Failed to install package: ${n}
8
+ Destination: ${p}
9
+ Check npm registry connectivity or package name.`,{cause:i})}return{installed:!0,packageDir:r}},{staleMs:5e3,touchIntervalMs:2e3})}function L(e){const n=E();if(!m.WIN32)return e;const t=[".cmd",".bat",".ps1",".exe",""];for(const a of t){const r=e+a;if(n.existsSync(r))return r}return e}function G(e,n,t){const a=(0,g.normalizePath)(d.default.join(e,"node_modules",n)),r=d.default.join(a,"package.json"),s=(0,h.readJsonSync)(r).bin;let o;if(typeof s=="string")o=s;else if(typeof s=="object"&&s!==null){const i=s,c=Object.keys(i);if(c.length===1)o=i[c[0]];else{const k=n.split("/").pop(),u=[t,k,n.replace(/^@[^/]+\//,"")].filter(Boolean);for(const l of u)if(l&&i[l]){o=i[l];break}!o&&c.length>0&&(o=i[c[0]])}}if(!o)throw new Error(`No binary found for package "${n}"`);const f=(0,g.normalizePath)(d.default.join(a,o));return L(f)}async function U(e,n,t){const a=await v(n),r=_(a.binaryPath,e,n?.spawnOptions,t);return{...a,spawnPromise:r}}async function v(e){const n=E(),{binaryName:t,force:a,package:r,yes:p}={__proto__:null,...e},{name:s,version:o}=V(r),f=o!==void 0&&K.test(o),i=a!==void 0?a:p===!0?!0:f,c=o?`${s}@${o}`:s,{installed:k,packageDir:u}=await B(s,c,i),l=G(u,s,t);if(!m.WIN32&&n.existsSync(l))try{n.chmodSync(l,493)}catch{}return{binaryPath:l,installed:k,packageDir:u}}function _(e,n,t,a){const p=m.WIN32&&/\.(?:bat|cmd|ps1)$/i.test(e)?{...t,shell:!0}:t;return(0,j.spawn)(e,n,p,a)}0&&(module.exports={dlxPackage,downloadPackage,executePackage});
10
10
  //# sourceMappingURL=dlx-package.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../src/dlx-package.ts"],
4
- "sourcesContent": ["/**\n * @fileoverview DLX package execution - Install and execute npm packages.\n *\n * This module provides functionality to install and execute npm packages\n * in the ~/.socket/_dlx directory, similar to npx but with Socket's own cache.\n *\n * Uses content-addressed storage like npm's _npx:\n * - Hash is generated from package spec (name@version)\n * - Each unique spec gets its own directory: ~/.socket/_dlx/<hash>/\n * - Allows caching multiple versions of the same package\n *\n * Concurrency protection:\n * - Uses process-lock to prevent concurrent installation corruption\n * - Lock file created at ~/.socket/_dlx/<hash>/concurrency.lock\n * - Uses npm npx's concurrency.lock naming convention (5s stale, 2s touching)\n * - Prevents multiple processes from corrupting the same package installation\n *\n * Version range handling:\n * - Exact versions (1.0.0) use cache if available\n * - Range versions (^1.0.0, ~1.0.0) auto-force to get latest within range\n * - User can override with explicit force: false\n *\n * Key difference from dlx-binary.ts:\n * - dlx-binary.ts: Downloads standalone binaries from URLs\n * - dlx-package.ts: Installs npm packages from registries\n *\n * Implementation:\n * - Uses pacote for package installation (no npm CLI required)\n * - Split into downloadPackage() and executePackage() for flexibility\n * - dlxPackage() combines both for convenience\n */\n\nimport { existsSync, promises as fs } from 'node:fs'\nimport path from 'node:path'\n\nimport { WIN32 } from './constants/platform'\nimport { getPacoteCachePath } from './constants/packages'\nimport { generateCacheKey } from './dlx'\nimport { readJsonSync } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('./external/npm-package-arg')\n }\n return _npmPackageArg as typeof import('npm-package-arg')\n}\n\nlet _pacote: typeof import('pacote') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPacote() {\n if (_pacote === undefined) {\n _pacote = /*@__PURE__*/ require('./external/pacote')\n }\n return _pacote as typeof import('pacote')\n}\n\n/**\n * Regex to check if a version string contains range operators.\n * Matches any version with range operators: ~, ^, >, <, =, x, X, *, spaces, or ||.\n */\nconst rangeOperatorsRegExp = /[~^><=xX* ]|\\|\\|/\n\nexport interface DownloadPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n}\n\nexport interface DlxPackageOptions {\n /**\n * Package to install (e.g., '@cyclonedx/cdxgen@10.0.0').\n */\n package: string\n /**\n * Binary name to execute (optional - auto-detected in most cases).\n *\n * Auto-detection logic:\n * 1. If package has only one binary, uses it automatically\n * 2. Tries user-provided binaryName\n * 3. Tries last segment of package name (e.g., 'cli' from '@socketsecurity/cli')\n * 4. Falls back to first binary\n *\n * Only needed when package has multiple binaries and auto-detection fails.\n *\n * @example\n * // Auto-detected (single binary)\n * { package: '@socketsecurity/cli' } // Finds 'socket' binary automatically\n *\n * // Explicit (multiple binaries)\n * { package: 'some-tool', binaryName: 'specific-tool' }\n */\n binaryName?: string | undefined\n /**\n * Force reinstallation even if package exists.\n */\n force?: boolean | undefined\n /**\n * Additional spawn options for the execution.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary that was executed. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Parse package spec into name and version using npm-package-arg.\n * Examples:\n * - 'lodash@4.17.21' \u2192 { name: 'lodash', version: '4.17.21' }\n * - '@scope/pkg@1.0.0' \u2192 { name: '@scope/pkg', version: '1.0.0' }\n * - 'lodash' \u2192 { name: 'lodash', version: undefined }\n */\nfunction parsePackageSpec(spec: string): {\n name: string\n version: string | undefined\n} {\n try {\n const npa = getNpmPackageArg()\n const parsed = npa(spec)\n\n // Extract version from different types of specs.\n // For registry specs, use fetchSpec (the version/range).\n // For git/file/etc, version will be undefined.\n const version =\n parsed.type === 'tag'\n ? parsed.fetchSpec\n : parsed.type === 'version' || parsed.type === 'range'\n ? parsed.fetchSpec\n : undefined\n\n return {\n name: parsed.name || spec,\n version,\n }\n } catch {\n // Fallback to simple parsing if npm-package-arg fails.\n const atIndex = spec.lastIndexOf('@')\n if (atIndex === -1 || spec.startsWith('@')) {\n // No version or scoped package without version.\n return { name: spec, version: undefined }\n }\n return {\n name: spec.slice(0, atIndex),\n version: spec.slice(atIndex + 1),\n }\n }\n}\n\n/**\n * Install package to ~/.socket/_dlx/<hash>/ if not already installed.\n * Uses pacote for installation (no npm CLI required).\n * Protected by process lock to prevent concurrent installation corruption.\n */\nasync function ensurePackageInstalled(\n packageName: string,\n packageSpec: string,\n force: boolean,\n): Promise<{ installed: boolean; packageDir: string }> {\n const cacheKey = generateCacheKey(packageSpec)\n const packageDir = normalizePath(path.join(getSocketDlxDir(), cacheKey))\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n\n // Ensure package directory exists before creating lock.\n // The lock directory will be created inside this directory.\n try {\n await fs.mkdir(packageDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating package directory: ${packageDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create package directory on read-only filesystem: ${packageDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(`Failed to create package directory: ${packageDir}`, {\n cause: e,\n })\n }\n\n // Use process lock to prevent concurrent installations.\n // Uses npm npx's concurrency.lock naming convention.\n const lockPath = path.join(packageDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n // Double-check if already installed (unless force).\n // Another process may have installed while waiting for lock.\n if (!force && existsSync(installedDir)) {\n // Verify package.json exists.\n const pkgJsonPath = path.join(installedDir, 'package.json')\n if (existsSync(pkgJsonPath)) {\n return { installed: false, packageDir }\n }\n }\n\n // Use pacote to extract the package.\n // Pacote leverages npm cache when available but doesn't require npm CLI.\n const pacoteCachePath = getPacoteCachePath()\n try {\n await getPacote().extract(packageSpec, installedDir, {\n // Use consistent pacote cache path (respects npm cache locations when available).\n cache: pacoteCachePath || path.join(packageDir, '.cache'),\n })\n } catch (e) {\n const code = (e as any).code\n if (code === 'E404' || code === 'ETARGET') {\n throw new Error(\n `Package not found: ${packageSpec}\\n` +\n 'Verify the package exists on npm registry and check the version.\\n' +\n `Visit https://www.npmjs.com/package/${packageName} to see available versions.`,\n { cause: e },\n )\n }\n if (\n code === 'ENOTFOUND' ||\n code === 'ETIMEDOUT' ||\n code === 'EAI_AGAIN'\n ) {\n throw new Error(\n `Network error installing ${packageSpec}\\n` +\n 'Check your internet connection and try again.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to install package: ${packageSpec}\\n` +\n `Destination: ${installedDir}\\n` +\n 'Check npm registry connectivity or package name.',\n { cause: e },\n )\n }\n\n return { installed: true, packageDir }\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Find the binary path for an installed package.\n * Intelligently handles packages with single or multiple binaries.\n */\nfunction findBinaryPath(\n packageDir: string,\n packageName: string,\n binaryName?: string,\n): string {\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n const pkgJsonPath = path.join(installedDir, 'package.json')\n\n // Read package.json to find bin entry.\n const pkgJson = readJsonSync(pkgJsonPath) as Record<string, unknown>\n const bin = pkgJson['bin']\n\n let binPath: string | undefined\n\n if (typeof bin === 'string') {\n // Single binary - use it directly.\n binPath = bin\n } else if (typeof bin === 'object' && bin !== null) {\n const binObj = bin as Record<string, string>\n const binKeys = Object.keys(binObj)\n\n // If only one binary, use it regardless of name.\n if (binKeys.length === 1) {\n binPath = binObj[binKeys[0]!]\n } else {\n // Multiple binaries - try to find the right one:\n // 1. User-provided binaryName\n // 2. Last segment of package name (e.g., 'cli' from '@socketsecurity/cli')\n // 3. Full package name without scope (e.g., 'cli' from '@socketsecurity/cli')\n // 4. First binary as fallback\n const lastSegment = packageName.split('/').pop()\n const candidates = [\n binaryName,\n lastSegment,\n packageName.replace(/^@[^/]+\\//, ''),\n ].filter(Boolean)\n\n for (const candidate of candidates) {\n if (candidate && binObj[candidate]) {\n binPath = binObj[candidate]\n break\n }\n }\n\n // Fallback to first binary if nothing matched.\n if (!binPath && binKeys.length > 0) {\n binPath = binObj[binKeys[0]!]\n }\n }\n }\n\n if (!binPath) {\n throw new Error(`No binary found for package \"${packageName}\"`)\n }\n\n return normalizePath(path.join(installedDir, binPath))\n}\n\n/**\n * Execute a package via DLX - install if needed and run its binary.\n *\n * This is the Socket equivalent of npx/pnpm dlx/yarn dlx, but using\n * our own cache directory (~/.socket/_dlx) and installation logic.\n *\n * Auto-forces reinstall for version ranges to get latest within range.\n *\n * @example\n * ```typescript\n * // Download and execute cdxgen\n * const result = await dlxPackage(\n * ['--version'],\n * { package: '@cyclonedx/cdxgen@10.0.0' }\n * )\n * await result.spawnPromise\n * ```\n */\nexport async function dlxPackage(\n args: readonly string[] | string[],\n options?: DlxPackageOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxPackageResult> {\n // Download the package.\n const downloadResult = await downloadPackage(options!)\n\n // Execute the binary.\n const spawnPromise = executePackage(\n downloadResult.binaryPath,\n args,\n options?.spawnOptions,\n spawnExtra,\n )\n\n return {\n ...downloadResult,\n spawnPromise,\n }\n}\n\n/**\n * Download and install a package without executing it.\n * This is useful for self-update or when you need the package files\n * but don't want to run the binary immediately.\n *\n * @example\n * ```typescript\n * // Install @socketsecurity/cli without running it\n * const result = await downloadPackage({\n * package: '@socketsecurity/cli@1.2.0',\n * force: true\n * })\n * console.log('Installed to:', result.packageDir)\n * console.log('Binary at:', result.binaryPath)\n * ```\n */\nexport async function downloadPackage(\n options: DlxPackageOptions,\n): Promise<DownloadPackageResult> {\n const {\n binaryName,\n force: userForce,\n package: packageSpec,\n } = {\n __proto__: null,\n ...options,\n } as DlxPackageOptions\n\n // Parse package spec.\n const { name: packageName, version: packageVersion } =\n parsePackageSpec(packageSpec)\n\n // Auto-force for version ranges to get latest within range.\n // User can still override with explicit force: false if they want cache.\n const isVersionRange =\n packageVersion !== undefined && rangeOperatorsRegExp.test(packageVersion)\n const force = userForce !== undefined ? userForce : isVersionRange\n\n // Build full package spec for installation.\n const fullPackageSpec = packageVersion\n ? `${packageName}@${packageVersion}`\n : packageName\n\n // Ensure package is installed.\n const { installed, packageDir } = await ensurePackageInstalled(\n packageName,\n fullPackageSpec,\n force,\n )\n\n // Find binary path.\n const binaryPath = findBinaryPath(packageDir, packageName, binaryName)\n\n // Make binary executable on Unix systems.\n if (!WIN32 && existsSync(binaryPath)) {\n const { chmodSync } = require('node:fs')\n try {\n chmodSync(binaryPath, 0o755)\n } catch {\n // Ignore chmod errors.\n }\n }\n\n return {\n binaryPath,\n installed,\n packageDir,\n }\n}\n\n/**\n * Execute a package's binary.\n * The package must already be installed (use downloadPackage first).\n *\n * @example\n * ```typescript\n * // Execute an already-installed package\n * const downloaded = await downloadPackage({ package: 'cowsay@1.5.0' })\n * const result = await executePackage(\n * downloaded.binaryPath,\n * ['Hello World'],\n * { stdio: 'inherit' }\n * )\n * ```\n */\nexport function executePackage(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n return spawn(binaryPath, args, spawnOptions, spawnExtra)\n}\n"],
5
- "mappings": ";6iBAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,gBAAAE,EAAA,oBAAAC,EAAA,mBAAAC,IAAA,eAAAC,EAAAL,GAgCA,IAAAM,EAA2C,mBAC3CC,EAAiB,wBAEjBC,EAAsB,gCACtBC,EAAmC,gCACnCC,EAAiC,iBACjCC,EAA6B,gBAC7BC,EAA8B,kBAC9BC,EAAgC,mBAChCC,EAA4B,0BAE5BC,EAAsB,mBAEtB,IAAIC,EAEJ,SAASC,GAAmB,CAC1B,OAAID,IAAmB,SACrBA,EAA+B,QAAQ,4BAA4B,GAE9DA,CACT,CAEA,IAAIE,EAEJ,SAASC,GAAY,CACnB,OAAID,IAAY,SACdA,EAAwB,QAAQ,mBAAmB,GAE9CA,CACT,CAMA,MAAME,EAAuB,mBA+D7B,SAASC,EAAiBC,EAGxB,CACA,GAAI,CAEF,MAAMC,EADMN,EAAiB,EACVK,CAAI,EAKjBE,EACJD,EAAO,OAAS,OAEZA,EAAO,OAAS,WAAaA,EAAO,OAAS,QAD7CA,EAAO,UAGL,OAER,MAAO,CACL,KAAMA,EAAO,MAAQD,EACrB,QAAAE,CACF,CACF,MAAQ,CAEN,MAAMC,EAAUH,EAAK,YAAY,GAAG,EACpC,OAAIG,IAAY,IAAMH,EAAK,WAAW,GAAG,EAEhC,CAAE,KAAMA,EAAM,QAAS,MAAU,EAEnC,CACL,KAAMA,EAAK,MAAM,EAAGG,CAAO,EAC3B,QAASH,EAAK,MAAMG,EAAU,CAAC,CACjC,CACF,CACF,CAOA,eAAeC,EACbC,EACAC,EACAC,EACqD,CACrD,MAAMC,KAAW,oBAAiBF,CAAW,EACvCG,KAAa,iBAAc,EAAAC,QAAK,QAAK,mBAAgB,EAAGF,CAAQ,CAAC,EACjEG,KAAe,iBACnB,EAAAD,QAAK,KAAKD,EAAY,eAAgBJ,CAAW,CACnD,EAIA,GAAI,CACF,MAAM,EAAAO,SAAG,MAAMH,EAAY,CAAE,UAAW,EAAK,CAAC,CAChD,OAASI,EAAG,CACV,MAAMC,EAAQD,EAA4B,KAC1C,MAAIC,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,iDAAiDL,CAAU;AAAA,oEAE3D,CAAE,MAAOI,CAAE,CACb,EAEEC,IAAS,QACL,IAAI,MACR,4DAA4DL,CAAU;AAAA,iFAEtE,CAAE,MAAOI,CAAE,CACb,EAEI,IAAI,MAAM,uCAAuCJ,CAAU,GAAI,CACnE,MAAOI,CACT,CAAC,CACH,CAIA,MAAME,EAAW,EAAAL,QAAK,KAAKD,EAAY,kBAAkB,EAEzD,OAAO,MAAM,cAAY,SACvBM,EACA,SAAY,CAGV,GAAI,CAACR,MAAS,cAAWI,CAAY,EAAG,CAEtC,MAAMK,EAAc,EAAAN,QAAK,KAAKC,EAAc,cAAc,EAC1D,MAAI,cAAWK,CAAW,EACxB,MAAO,CAAE,UAAW,GAAO,WAAAP,CAAW,CAE1C,CAIA,MAAMQ,KAAkB,sBAAmB,EAC3C,GAAI,CACF,MAAMpB,EAAU,EAAE,QAAQS,EAAaK,EAAc,CAEnD,MAAOM,GAAmB,EAAAP,QAAK,KAAKD,EAAY,QAAQ,CAC1D,CAAC,CACH,OAASI,EAAG,CACV,MAAMC,EAAQD,EAAU,KACxB,MAAIC,IAAS,QAAUA,IAAS,UACxB,IAAI,MACR,sBAAsBR,CAAW;AAAA;AAAA,sCAEQD,CAAW,8BACpD,CAAE,MAAOQ,CAAE,CACb,EAGAC,IAAS,aACTA,IAAS,aACTA,IAAS,YAEH,IAAI,MACR,4BAA4BR,CAAW;AAAA,+CAEvC,CAAE,MAAOO,CAAE,CACb,EAEI,IAAI,MACR,8BAA8BP,CAAW;AAAA,eACvBK,CAAY;AAAA,kDAE9B,CAAE,MAAOE,CAAE,CACb,CACF,CAEA,MAAO,CAAE,UAAW,GAAM,WAAAJ,CAAW,CACvC,EACA,CAEE,QAAS,IACT,gBAAiB,GACnB,CACF,CACF,CAMA,SAASS,EACPT,EACAJ,EACAc,EACQ,CACR,MAAMR,KAAe,iBACnB,EAAAD,QAAK,KAAKD,EAAY,eAAgBJ,CAAW,CACnD,EACMW,EAAc,EAAAN,QAAK,KAAKC,EAAc,cAAc,EAIpDS,KADU,gBAAaJ,CAAW,EACpB,IAEpB,IAAIK,EAEJ,GAAI,OAAOD,GAAQ,SAEjBC,EAAUD,UACD,OAAOA,GAAQ,UAAYA,IAAQ,KAAM,CAClD,MAAME,EAASF,EACTG,EAAU,OAAO,KAAKD,CAAM,EAGlC,GAAIC,EAAQ,SAAW,EACrBF,EAAUC,EAAOC,EAAQ,CAAC,CAAE,MACvB,CAML,MAAMC,EAAcnB,EAAY,MAAM,GAAG,EAAE,IAAI,EACzCoB,EAAa,CACjBN,EACAK,EACAnB,EAAY,QAAQ,YAAa,EAAE,CACrC,EAAE,OAAO,OAAO,EAEhB,UAAWqB,KAAaD,EACtB,GAAIC,GAAaJ,EAAOI,CAAS,EAAG,CAClCL,EAAUC,EAAOI,CAAS,EAC1B,KACF,CAIE,CAACL,GAAWE,EAAQ,OAAS,IAC/BF,EAAUC,EAAOC,EAAQ,CAAC,CAAE,EAEhC,CACF,CAEA,GAAI,CAACF,EACH,MAAM,IAAI,MAAM,gCAAgChB,CAAW,GAAG,EAGhE,SAAO,iBAAc,EAAAK,QAAK,KAAKC,EAAcU,CAAO,CAAC,CACvD,CAoBA,eAAsBzC,EACpB+C,EACAC,EACAC,EAC2B,CAE3B,MAAMC,EAAiB,MAAMjD,EAAgB+C,CAAQ,EAG/CG,EAAejD,EACnBgD,EAAe,WACfH,EACAC,GAAS,aACTC,CACF,EAEA,MAAO,CACL,GAAGC,EACH,aAAAC,CACF,CACF,CAkBA,eAAsBlD,EACpB+C,EACgC,CAChC,KAAM,CACJ,WAAAT,EACA,MAAOa,EACP,QAAS1B,CACX,EAAI,CACF,UAAW,KACX,GAAGsB,CACL,EAGM,CAAE,KAAMvB,EAAa,QAAS4B,CAAe,EACjDlC,EAAiBO,CAAW,EAIxB4B,EACJD,IAAmB,QAAanC,EAAqB,KAAKmC,CAAc,EACpE1B,EAAQyB,IAAc,OAAYA,EAAYE,EAG9CC,EAAkBF,EACpB,GAAG5B,CAAW,IAAI4B,CAAc,GAChC5B,EAGE,CAAE,UAAA+B,EAAW,WAAA3B,CAAW,EAAI,MAAML,EACtCC,EACA8B,EACA5B,CACF,EAGM8B,EAAanB,EAAeT,EAAYJ,EAAac,CAAU,EAGrE,GAAI,CAAC,YAAS,cAAWkB,CAAU,EAAG,CACpC,KAAM,CAAE,UAAAC,CAAU,EAAI,QAAQ,SAAS,EACvC,GAAI,CACFA,EAAUD,EAAY,GAAK,CAC7B,MAAQ,CAER,CACF,CAEA,MAAO,CACL,WAAAA,EACA,UAAAD,EACA,WAAA3B,CACF,CACF,CAiBO,SAAS3B,EACduD,EACAV,EACAY,EACAV,EAC0B,CAC1B,SAAO,SAAMQ,EAAYV,EAAMY,EAAcV,CAAU,CACzD",
6
- "names": ["dlx_package_exports", "__export", "dlxPackage", "downloadPackage", "executePackage", "__toCommonJS", "import_node_fs", "import_node_path", "import_platform", "import_packages", "import_dlx", "import_fs", "import_path", "import_paths", "import_process_lock", "import_spawn", "_npmPackageArg", "getNpmPackageArg", "_pacote", "getPacote", "rangeOperatorsRegExp", "parsePackageSpec", "spec", "parsed", "version", "atIndex", "ensurePackageInstalled", "packageName", "packageSpec", "force", "cacheKey", "packageDir", "path", "installedDir", "fs", "e", "code", "lockPath", "pkgJsonPath", "pacoteCachePath", "findBinaryPath", "binaryName", "bin", "binPath", "binObj", "binKeys", "lastSegment", "candidates", "candidate", "args", "options", "spawnExtra", "downloadResult", "spawnPromise", "userForce", "packageVersion", "isVersionRange", "fullPackageSpec", "installed", "binaryPath", "chmodSync", "spawnOptions"]
4
+ "sourcesContent": ["/**\n * @fileoverview DLX package execution - Install and execute npm packages.\n *\n * This module provides functionality to install and execute npm packages\n * in the ~/.socket/_dlx directory, similar to npx but with Socket's own cache.\n *\n * Uses content-addressed storage like npm's _npx:\n * - Hash is generated from package spec (name@version)\n * - Each unique spec gets its own directory: ~/.socket/_dlx/<hash>/\n * - Allows caching multiple versions of the same package\n *\n * Concurrency protection:\n * - Uses process-lock to prevent concurrent installation corruption\n * - Lock file created at ~/.socket/_dlx/<hash>/concurrency.lock\n * - Uses npm npx's concurrency.lock naming convention (5s stale, 2s touching)\n * - Prevents multiple processes from corrupting the same package installation\n *\n * Version range handling:\n * - Exact versions (1.0.0) use cache if available\n * - Range versions (^1.0.0, ~1.0.0) auto-force to get latest within range\n * - User can override with explicit force: false\n *\n * Key difference from dlx-binary.ts:\n * - dlx-binary.ts: Downloads standalone binaries from URLs\n * - dlx-package.ts: Installs npm packages from registries\n *\n * Implementation:\n * - Uses pacote for package installation (no npm CLI required)\n * - Split into downloadPackage() and executePackage() for flexibility\n * - dlxPackage() combines both for convenience\n */\n\nimport path from 'path'\n\nimport { WIN32 } from './constants/platform'\nimport { getPacoteCachePath } from './constants/packages'\nimport { generateCacheKey } from './dlx'\nimport { readJsonSync, safeMkdir } from './fs'\nimport { normalizePath } from './path'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\nimport type { SpawnExtra, SpawnOptions } from './spawn'\nimport { spawn } from './spawn'\n\nlet _fs: typeof import('fs') | undefined\n/**\n * Lazily load the fs module to avoid Webpack errors.\n * Uses non-'node:' prefixed require to prevent Webpack bundling issues.\n *\n * @returns The Node.js fs module\n * @private\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getFs() {\n if (_fs === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _fs = /*@__PURE__*/ require('node:fs')\n }\n return _fs as typeof import('fs')\n}\n\nlet _npmPackageArg: typeof import('npm-package-arg') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getNpmPackageArg() {\n if (_npmPackageArg === undefined) {\n _npmPackageArg = /*@__PURE__*/ require('./external/npm-package-arg')\n }\n return _npmPackageArg as typeof import('npm-package-arg')\n}\n\nlet _pacote: typeof import('pacote') | undefined\n/*@__NO_SIDE_EFFECTS__*/\nfunction getPacote() {\n if (_pacote === undefined) {\n _pacote = /*@__PURE__*/ require('./external/pacote')\n }\n return _pacote as typeof import('pacote')\n}\n\n/**\n * Regex to check if a version string contains range operators.\n * Matches any version with range operators: ~, ^, >, <, =, x, X, *, spaces, or ||.\n */\nconst rangeOperatorsRegExp = /[~^><=xX* ]|\\|\\|/\n\nexport interface DownloadPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n}\n\nexport interface DlxPackageOptions {\n /**\n * Package to install (e.g., '@cyclonedx/cdxgen@10.0.0').\n * Aligns with npx --package flag.\n */\n package: string\n\n /**\n * Binary name to execute (optional - auto-detected in most cases).\n *\n * Auto-detection logic:\n * 1. If package has only one binary, uses it automatically\n * 2. Tries user-provided binaryName\n * 3. Tries last segment of package name (e.g., 'cli' from '@socketsecurity/cli')\n * 4. Falls back to first binary\n *\n * Only needed when package has multiple binaries and auto-detection fails.\n *\n * @example\n * // Auto-detected (single binary)\n * { package: '@socketsecurity/cli' } // Finds 'socket' binary automatically\n *\n * // Explicit (multiple binaries)\n * { package: 'some-tool', binaryName: 'specific-tool' }\n */\n binaryName?: string | undefined\n\n /**\n * Force reinstallation even if package exists.\n * Aligns with npx --yes/-y flag behavior.\n */\n force?: boolean | undefined\n\n /**\n * Skip confirmation prompts (auto-approve).\n * Aligns with npx --yes/-y flag.\n */\n yes?: boolean | undefined\n\n /**\n * Suppress output (quiet mode).\n * Aligns with npx --quiet/-q and pnpm --silent/-s flags.\n */\n quiet?: boolean | undefined\n\n /**\n * Additional spawn options for the execution.\n */\n spawnOptions?: SpawnOptions | undefined\n}\n\nexport interface DlxPackageResult {\n /** Path to the installed package directory. */\n packageDir: string\n /** Path to the binary that was executed. */\n binaryPath: string\n /** Whether the package was newly installed. */\n installed: boolean\n /** The spawn promise for the running process. */\n spawnPromise: ReturnType<typeof spawn>\n}\n\n/**\n * Parse package spec into name and version using npm-package-arg.\n * Examples:\n * - 'lodash@4.17.21' \u2192 { name: 'lodash', version: '4.17.21' }\n * - '@scope/pkg@1.0.0' \u2192 { name: '@scope/pkg', version: '1.0.0' }\n * - 'lodash' \u2192 { name: 'lodash', version: undefined }\n */\nfunction parsePackageSpec(spec: string): {\n name: string\n version: string | undefined\n} {\n try {\n const npa = getNpmPackageArg()\n const parsed = npa(spec)\n\n // Extract version from different types of specs.\n // For registry specs, use fetchSpec (the version/range).\n // For git/file/etc, version will be undefined.\n const version =\n parsed.type === 'tag'\n ? parsed.fetchSpec\n : parsed.type === 'version' || parsed.type === 'range'\n ? parsed.fetchSpec\n : undefined\n\n return {\n name: parsed.name || spec,\n version,\n }\n } catch {\n // Fallback to simple parsing if npm-package-arg fails.\n const atIndex = spec.lastIndexOf('@')\n if (atIndex === -1 || spec.startsWith('@')) {\n // No version or scoped package without version.\n return { name: spec, version: undefined }\n }\n return {\n name: spec.slice(0, atIndex),\n version: spec.slice(atIndex + 1),\n }\n }\n}\n\n/**\n * Install package to ~/.socket/_dlx/<hash>/ if not already installed.\n * Uses pacote for installation (no npm CLI required).\n * Protected by process lock to prevent concurrent installation corruption.\n */\nasync function ensurePackageInstalled(\n packageName: string,\n packageSpec: string,\n force: boolean,\n): Promise<{ installed: boolean; packageDir: string }> {\n const cacheKey = generateCacheKey(packageSpec)\n const packageDir = normalizePath(path.join(getSocketDlxDir(), cacheKey))\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n\n // Ensure package directory exists before creating lock.\n // The lock directory will be created inside this directory.\n try {\n await safeMkdir(packageDir, { recursive: true })\n } catch (e) {\n const code = (e as NodeJS.ErrnoException).code\n if (code === 'EACCES' || code === 'EPERM') {\n throw new Error(\n `Permission denied creating package directory: ${packageDir}\\n` +\n 'Please check directory permissions or run with appropriate access.',\n { cause: e },\n )\n }\n if (code === 'EROFS') {\n throw new Error(\n `Cannot create package directory on read-only filesystem: ${packageDir}\\n` +\n 'Ensure the filesystem is writable or set SOCKET_DLX_DIR to a writable location.',\n { cause: e },\n )\n }\n throw new Error(`Failed to create package directory: ${packageDir}`, {\n cause: e,\n })\n }\n\n // Use process lock to prevent concurrent installations.\n // Uses npm npx's concurrency.lock naming convention.\n const lockPath = path.join(packageDir, 'concurrency.lock')\n\n return await processLock.withLock(\n lockPath,\n async () => {\n const fs = getFs()\n // Double-check if already installed (unless force).\n // Another process may have installed while waiting for lock.\n if (!force && fs.existsSync(installedDir)) {\n // Verify package.json exists.\n const pkgJsonPath = path.join(installedDir, 'package.json')\n if (fs.existsSync(pkgJsonPath)) {\n return { installed: false, packageDir }\n }\n }\n\n // Use pacote to extract the package.\n // Pacote leverages npm cache when available but doesn't require npm CLI.\n const pacoteCachePath = getPacoteCachePath()\n try {\n await getPacote().extract(packageSpec, installedDir, {\n // Use consistent pacote cache path (respects npm cache locations when available).\n cache: pacoteCachePath || path.join(packageDir, '.cache'),\n })\n } catch (e) {\n const code = (e as any).code\n if (code === 'E404' || code === 'ETARGET') {\n throw new Error(\n `Package not found: ${packageSpec}\\n` +\n 'Verify the package exists on npm registry and check the version.\\n' +\n `Visit https://www.npmjs.com/package/${packageName} to see available versions.`,\n { cause: e },\n )\n }\n if (\n code === 'ENOTFOUND' ||\n code === 'ETIMEDOUT' ||\n code === 'EAI_AGAIN'\n ) {\n throw new Error(\n `Network error installing ${packageSpec}\\n` +\n 'Check your internet connection and try again.',\n { cause: e },\n )\n }\n throw new Error(\n `Failed to install package: ${packageSpec}\\n` +\n `Destination: ${installedDir}\\n` +\n 'Check npm registry connectivity or package name.',\n { cause: e },\n )\n }\n\n return { installed: true, packageDir }\n },\n {\n // Align with npm npx locking strategy.\n staleMs: 5000,\n touchIntervalMs: 2000,\n },\n )\n}\n\n/**\n * Resolve binary path with cross-platform wrapper support.\n * On Windows, checks for .cmd, .bat, .ps1, .exe wrappers in order.\n * On Unix, uses path directly.\n *\n * Aligns with npm/npx binary resolution strategy.\n */\nfunction resolveBinaryPath(basePath: string): string {\n const fs = getFs()\n\n if (!WIN32) {\n // Unix: use path directly\n return basePath\n }\n\n // Windows: check for wrappers in priority order\n // Order matches npm bin-links creation: .cmd, .ps1, .exe, then bare\n const extensions = ['.cmd', '.bat', '.ps1', '.exe', '']\n\n for (const ext of extensions) {\n const testPath = basePath + ext\n if (fs.existsSync(testPath)) {\n return testPath\n }\n }\n\n // Fallback to original path if no wrapper found\n return basePath\n}\n\n/**\n * Find the binary path for an installed package.\n * Intelligently handles packages with single or multiple binaries.\n * Resolves platform-specific wrappers (.cmd, .ps1, etc.) on Windows.\n */\nfunction findBinaryPath(\n packageDir: string,\n packageName: string,\n binaryName?: string,\n): string {\n const installedDir = normalizePath(\n path.join(packageDir, 'node_modules', packageName),\n )\n const pkgJsonPath = path.join(installedDir, 'package.json')\n\n // Read package.json to find bin entry.\n const pkgJson = readJsonSync(pkgJsonPath) as Record<string, unknown>\n const bin = pkgJson['bin']\n\n let binPath: string | undefined\n\n if (typeof bin === 'string') {\n // Single binary - use it directly.\n binPath = bin\n } else if (typeof bin === 'object' && bin !== null) {\n const binObj = bin as Record<string, string>\n const binKeys = Object.keys(binObj)\n\n // If only one binary, use it regardless of name.\n if (binKeys.length === 1) {\n binPath = binObj[binKeys[0]!]\n } else {\n // Multiple binaries - try to find the right one:\n // 1. User-provided binaryName\n // 2. Last segment of package name (e.g., 'cli' from '@socketsecurity/cli')\n // 3. Full package name without scope (e.g., 'cli' from '@socketsecurity/cli')\n // 4. First binary as fallback\n const lastSegment = packageName.split('/').pop()\n const candidates = [\n binaryName,\n lastSegment,\n packageName.replace(/^@[^/]+\\//, ''),\n ].filter(Boolean)\n\n for (const candidate of candidates) {\n if (candidate && binObj[candidate]) {\n binPath = binObj[candidate]\n break\n }\n }\n\n // Fallback to first binary if nothing matched.\n if (!binPath && binKeys.length > 0) {\n binPath = binObj[binKeys[0]!]\n }\n }\n }\n\n if (!binPath) {\n throw new Error(`No binary found for package \"${packageName}\"`)\n }\n\n const rawPath = normalizePath(path.join(installedDir, binPath))\n\n // Resolve platform-specific wrapper (Windows .cmd/.ps1/etc.)\n return resolveBinaryPath(rawPath)\n}\n\n/**\n * Execute a package via DLX - install if needed and run its binary.\n *\n * This is the Socket equivalent of npx/pnpm dlx/yarn dlx, but using\n * our own cache directory (~/.socket/_dlx) and installation logic.\n *\n * Auto-forces reinstall for version ranges to get latest within range.\n *\n * @example\n * ```typescript\n * // Download and execute cdxgen\n * const result = await dlxPackage(\n * ['--version'],\n * { package: '@cyclonedx/cdxgen@10.0.0' }\n * )\n * await result.spawnPromise\n * ```\n */\nexport async function dlxPackage(\n args: readonly string[] | string[],\n options?: DlxPackageOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): Promise<DlxPackageResult> {\n // Download the package.\n const downloadResult = await downloadPackage(options!)\n\n // Execute the binary.\n const spawnPromise = executePackage(\n downloadResult.binaryPath,\n args,\n options?.spawnOptions,\n spawnExtra,\n )\n\n return {\n ...downloadResult,\n spawnPromise,\n }\n}\n\n/**\n * Download and install a package without executing it.\n * This is useful for self-update or when you need the package files\n * but don't want to run the binary immediately.\n *\n * @example\n * ```typescript\n * // Install @socketsecurity/cli without running it\n * const result = await downloadPackage({\n * package: '@socketsecurity/cli@1.2.0',\n * force: true\n * })\n * console.log('Installed to:', result.packageDir)\n * console.log('Binary at:', result.binaryPath)\n * ```\n */\nexport async function downloadPackage(\n options: DlxPackageOptions,\n): Promise<DownloadPackageResult> {\n const fs = getFs()\n const {\n binaryName,\n force: userForce,\n package: packageSpec,\n yes,\n } = {\n __proto__: null,\n ...options,\n } as DlxPackageOptions\n\n // Parse package spec.\n const { name: packageName, version: packageVersion } =\n parsePackageSpec(packageSpec)\n\n // Determine force behavior:\n // 1. Explicit force takes precedence\n // 2. --yes flag implies force (auto-approve/skip prompts)\n // 3. Version ranges auto-force to get latest\n const isVersionRange =\n packageVersion !== undefined && rangeOperatorsRegExp.test(packageVersion)\n const force =\n userForce !== undefined ? userForce : yes === true ? true : isVersionRange\n\n // Build full package spec for installation.\n const fullPackageSpec = packageVersion\n ? `${packageName}@${packageVersion}`\n : packageName\n\n // Ensure package is installed.\n const { installed, packageDir } = await ensurePackageInstalled(\n packageName,\n fullPackageSpec,\n force,\n )\n\n // Find binary path.\n const binaryPath = findBinaryPath(packageDir, packageName, binaryName)\n\n // Make binary executable on Unix systems.\n if (!WIN32 && fs.existsSync(binaryPath)) {\n try {\n fs.chmodSync(binaryPath, 0o755)\n } catch {\n // Ignore chmod errors.\n }\n }\n\n return {\n binaryPath,\n installed,\n packageDir,\n }\n}\n\n/**\n * Execute a package's binary with cross-platform shell handling.\n * The package must already be installed (use downloadPackage first).\n *\n * On Windows, script files (.bat, .cmd, .ps1) require shell: true.\n * Matches npm/npx execution behavior.\n *\n * @example\n * ```typescript\n * // Execute an already-installed package\n * const downloaded = await downloadPackage({ package: 'cowsay@1.5.0' })\n * const result = await executePackage(\n * downloaded.binaryPath,\n * ['Hello World'],\n * { stdio: 'inherit' }\n * )\n * ```\n */\nexport function executePackage(\n binaryPath: string,\n args: readonly string[] | string[],\n spawnOptions?: SpawnOptions | undefined,\n spawnExtra?: SpawnExtra | undefined,\n): ReturnType<typeof spawn> {\n // On Windows, script files (.bat, .cmd, .ps1) require shell: true\n // because they are not executable on their own and must be run through cmd.exe.\n // .exe files are actual binaries and don't need shell mode.\n const needsShell = WIN32 && /\\.(?:bat|cmd|ps1)$/i.test(binaryPath)\n\n const finalOptions = needsShell\n ? {\n ...spawnOptions,\n shell: true,\n }\n : spawnOptions\n\n return spawn(binaryPath, args, finalOptions, spawnExtra)\n}\n"],
5
+ "mappings": ";6iBAAA,IAAAA,EAAA,GAAAC,EAAAD,EAAA,gBAAAE,EAAA,oBAAAC,EAAA,mBAAAC,IAAA,eAAAC,EAAAL,GAgCA,IAAAM,EAAiB,mBAEjBC,EAAsB,gCACtBC,EAAmC,gCACnCC,EAAiC,iBACjCC,EAAwC,gBACxCJ,EAA8B,kBAC9BK,EAAgC,mBAChCC,EAA4B,0BAE5BC,EAAsB,mBAEtB,IAAIC,EASJ,SAASC,GAAQ,CACf,OAAID,IAAQ,SAGVA,EAAoB,QAAQ,SAAS,GAEhCA,CACT,CAEA,IAAIE,EAEJ,SAASC,GAAmB,CAC1B,OAAID,IAAmB,SACrBA,EAA+B,QAAQ,4BAA4B,GAE9DA,CACT,CAEA,IAAIE,EAEJ,SAASC,GAAY,CACnB,OAAID,IAAY,SACdA,EAAwB,QAAQ,mBAAmB,GAE9CA,CACT,CAMA,MAAME,EAAuB,mBAgF7B,SAASC,EAAiBC,EAGxB,CACA,GAAI,CAEF,MAAMC,EADMN,EAAiB,EACVK,CAAI,EAKjBE,EACJD,EAAO,OAAS,OAEZA,EAAO,OAAS,WAAaA,EAAO,OAAS,QAD7CA,EAAO,UAGL,OAER,MAAO,CACL,KAAMA,EAAO,MAAQD,EACrB,QAAAE,CACF,CACF,MAAQ,CAEN,MAAMC,EAAUH,EAAK,YAAY,GAAG,EACpC,OAAIG,IAAY,IAAMH,EAAK,WAAW,GAAG,EAEhC,CAAE,KAAMA,EAAM,QAAS,MAAU,EAEnC,CACL,KAAMA,EAAK,MAAM,EAAGG,CAAO,EAC3B,QAASH,EAAK,MAAMG,EAAU,CAAC,CACjC,CACF,CACF,CAOA,eAAeC,EACbC,EACAC,EACAC,EACqD,CACrD,MAAMC,KAAW,oBAAiBF,CAAW,EACvCG,KAAa,iBAAc,EAAAC,QAAK,QAAK,mBAAgB,EAAGF,CAAQ,CAAC,EACjEG,KAAe,iBACnB,EAAAD,QAAK,KAAKD,EAAY,eAAgBJ,CAAW,CACnD,EAIA,GAAI,CACF,QAAM,aAAUI,EAAY,CAAE,UAAW,EAAK,CAAC,CACjD,OAASG,EAAG,CACV,MAAMC,EAAQD,EAA4B,KAC1C,MAAIC,IAAS,UAAYA,IAAS,QAC1B,IAAI,MACR,iDAAiDJ,CAAU;AAAA,oEAE3D,CAAE,MAAOG,CAAE,CACb,EAEEC,IAAS,QACL,IAAI,MACR,4DAA4DJ,CAAU;AAAA,iFAEtE,CAAE,MAAOG,CAAE,CACb,EAEI,IAAI,MAAM,uCAAuCH,CAAU,GAAI,CACnE,MAAOG,CACT,CAAC,CACH,CAIA,MAAME,EAAW,EAAAJ,QAAK,KAAKD,EAAY,kBAAkB,EAEzD,OAAO,MAAM,cAAY,SACvBK,EACA,SAAY,CACV,MAAMC,EAAKtB,EAAM,EAGjB,GAAI,CAACc,GAASQ,EAAG,WAAWJ,CAAY,EAAG,CAEzC,MAAMK,EAAc,EAAAN,QAAK,KAAKC,EAAc,cAAc,EAC1D,GAAII,EAAG,WAAWC,CAAW,EAC3B,MAAO,CAAE,UAAW,GAAO,WAAAP,CAAW,CAE1C,CAIA,MAAMQ,KAAkB,sBAAmB,EAC3C,GAAI,CACF,MAAMpB,EAAU,EAAE,QAAQS,EAAaK,EAAc,CAEnD,MAAOM,GAAmB,EAAAP,QAAK,KAAKD,EAAY,QAAQ,CAC1D,CAAC,CACH,OAASG,EAAG,CACV,MAAMC,EAAQD,EAAU,KACxB,MAAIC,IAAS,QAAUA,IAAS,UACxB,IAAI,MACR,sBAAsBP,CAAW;AAAA;AAAA,sCAEQD,CAAW,8BACpD,CAAE,MAAOO,CAAE,CACb,EAGAC,IAAS,aACTA,IAAS,aACTA,IAAS,YAEH,IAAI,MACR,4BAA4BP,CAAW;AAAA,+CAEvC,CAAE,MAAOM,CAAE,CACb,EAEI,IAAI,MACR,8BAA8BN,CAAW;AAAA,eACvBK,CAAY;AAAA,kDAE9B,CAAE,MAAOC,CAAE,CACb,CACF,CAEA,MAAO,CAAE,UAAW,GAAM,WAAAH,CAAW,CACvC,EACA,CAEE,QAAS,IACT,gBAAiB,GACnB,CACF,CACF,CASA,SAASS,EAAkBC,EAA0B,CACnD,MAAMJ,EAAKtB,EAAM,EAEjB,GAAI,CAAC,QAEH,OAAO0B,EAKT,MAAMC,EAAa,CAAC,OAAQ,OAAQ,OAAQ,OAAQ,EAAE,EAEtD,UAAWC,KAAOD,EAAY,CAC5B,MAAME,EAAWH,EAAWE,EAC5B,GAAIN,EAAG,WAAWO,CAAQ,EACxB,OAAOA,CAEX,CAGA,OAAOH,CACT,CAOA,SAASI,EACPd,EACAJ,EACAmB,EACQ,CACR,MAAMb,KAAe,iBACnB,EAAAD,QAAK,KAAKD,EAAY,eAAgBJ,CAAW,CACnD,EACMW,EAAc,EAAAN,QAAK,KAAKC,EAAc,cAAc,EAIpDc,KADU,gBAAaT,CAAW,EACpB,IAEpB,IAAIU,EAEJ,GAAI,OAAOD,GAAQ,SAEjBC,EAAUD,UACD,OAAOA,GAAQ,UAAYA,IAAQ,KAAM,CAClD,MAAME,EAASF,EACTG,EAAU,OAAO,KAAKD,CAAM,EAGlC,GAAIC,EAAQ,SAAW,EACrBF,EAAUC,EAAOC,EAAQ,CAAC,CAAE,MACvB,CAML,MAAMC,EAAcxB,EAAY,MAAM,GAAG,EAAE,IAAI,EACzCyB,EAAa,CACjBN,EACAK,EACAxB,EAAY,QAAQ,YAAa,EAAE,CACrC,EAAE,OAAO,OAAO,EAEhB,UAAW0B,KAAaD,EACtB,GAAIC,GAAaJ,EAAOI,CAAS,EAAG,CAClCL,EAAUC,EAAOI,CAAS,EAC1B,KACF,CAIE,CAACL,GAAWE,EAAQ,OAAS,IAC/BF,EAAUC,EAAOC,EAAQ,CAAC,CAAE,EAEhC,CACF,CAEA,GAAI,CAACF,EACH,MAAM,IAAI,MAAM,gCAAgCrB,CAAW,GAAG,EAGhE,MAAM2B,KAAU,iBAAc,EAAAtB,QAAK,KAAKC,EAAce,CAAO,CAAC,EAG9D,OAAOR,EAAkBc,CAAO,CAClC,CAoBA,eAAsBpD,EACpBqD,EACAC,EACAC,EAC2B,CAE3B,MAAMC,EAAiB,MAAMvD,EAAgBqD,CAAQ,EAG/CG,EAAevD,EACnBsD,EAAe,WACfH,EACAC,GAAS,aACTC,CACF,EAEA,MAAO,CACL,GAAGC,EACH,aAAAC,CACF,CACF,CAkBA,eAAsBxD,EACpBqD,EACgC,CAChC,MAAMnB,EAAKtB,EAAM,EACX,CACJ,WAAA+B,EACA,MAAOc,EACP,QAAShC,EACT,IAAAiC,CACF,EAAI,CACF,UAAW,KACX,GAAGL,CACL,EAGM,CAAE,KAAM7B,EAAa,QAASmC,CAAe,EACjDzC,EAAiBO,CAAW,EAMxBmC,EACJD,IAAmB,QAAa1C,EAAqB,KAAK0C,CAAc,EACpEjC,EACJ+B,IAAc,OAAYA,EAAYC,IAAQ,GAAO,GAAOE,EAGxDC,EAAkBF,EACpB,GAAGnC,CAAW,IAAImC,CAAc,GAChCnC,EAGE,CAAE,UAAAsC,EAAW,WAAAlC,CAAW,EAAI,MAAML,EACtCC,EACAqC,EACAnC,CACF,EAGMqC,EAAarB,EAAed,EAAYJ,EAAamB,CAAU,EAGrE,GAAI,CAAC,SAAST,EAAG,WAAW6B,CAAU,EACpC,GAAI,CACF7B,EAAG,UAAU6B,EAAY,GAAK,CAChC,MAAQ,CAER,CAGF,MAAO,CACL,WAAAA,EACA,UAAAD,EACA,WAAAlC,CACF,CACF,CAoBO,SAAS3B,EACd8D,EACAX,EACAY,EACAV,EAC0B,CAM1B,MAAMW,EAFa,SAAS,sBAAsB,KAAKF,CAAU,EAG7D,CACE,GAAGC,EACH,MAAO,EACT,EACAA,EAEJ,SAAO,SAAMD,EAAYX,EAAMa,EAAcX,CAAU,CACzD",
6
+ "names": ["dlx_package_exports", "__export", "dlxPackage", "downloadPackage", "executePackage", "__toCommonJS", "import_path", "import_platform", "import_packages", "import_dlx", "import_fs", "import_paths", "import_process_lock", "import_spawn", "_fs", "getFs", "_npmPackageArg", "getNpmPackageArg", "_pacote", "getPacote", "rangeOperatorsRegExp", "parsePackageSpec", "spec", "parsed", "version", "atIndex", "ensurePackageInstalled", "packageName", "packageSpec", "force", "cacheKey", "packageDir", "path", "installedDir", "e", "code", "lockPath", "fs", "pkgJsonPath", "pacoteCachePath", "resolveBinaryPath", "basePath", "extensions", "ext", "testPath", "findBinaryPath", "binaryName", "bin", "binPath", "binObj", "binKeys", "lastSegment", "candidates", "candidate", "rawPath", "args", "options", "spawnExtra", "downloadResult", "spawnPromise", "userForce", "yes", "packageVersion", "isVersionRange", "fullPackageSpec", "installed", "binaryPath", "spawnOptions", "finalOptions"]
7
7
  }
package/dist/dlx.js CHANGED
@@ -1,13 +1,13 @@
1
1
  /* Socket Lib - Built with esbuild */
2
- var f=Object.defineProperty;var k=Object.getOwnPropertyDescriptor;var v=Object.getOwnPropertyNames;var w=Object.prototype.hasOwnProperty;var E=(r,e)=>{for(var t in e)f(r,t,{get:e[t],enumerable:!0})},S=(r,e,t,i)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of v(e))!w.call(r,n)&&n!==t&&f(r,n,{get:()=>e[n],enumerable:!(i=k(e,n))||i.enumerable});return r};var b=r=>S(f({},"__esModule",{value:!0}),r);var J={};E(J,{clearDlx:()=>C,clearDlxSync:()=>j,dlxDirExists:()=>A,dlxDirExistsAsync:()=>F,ensureDlxDir:()=>I,ensureDlxDirSync:()=>L,generateCacheKey:()=>$,getDlxInstalledPackageDir:()=>l,getDlxPackageDir:()=>p,getDlxPackageJsonPath:()=>T,getDlxPackageNodeModulesDir:()=>m,isDlxPackageInstalled:()=>q,isDlxPackageInstalledAsync:()=>M,isInSocketDlx:()=>X,listDlxPackages:()=>y,listDlxPackagesAsync:()=>h,removeDlxPackage:()=>D,removeDlxPackageSync:()=>P});module.exports=b(J);var d=require("node:crypto"),s=require("node:fs"),u=require("./fs"),a=require("./path"),o=require("./paths"),x=require("./promises");function $(r){return(0,d.createHash)("sha512").update(r).digest("hex").substring(0,16)}let g;function c(){return g===void 0&&(g=require("node:path")),g}async function C(){const r=await h();await(0,x.pEach)(r,e=>D(e))}function j(){const r=y();for(const e of r)P(e)}function A(){return(0,s.existsSync)((0,o.getSocketDlxDir)())}async function F(){try{return await s.promises.access((0,o.getSocketDlxDir)()),!0}catch{return!1}}async function I(){await s.promises.mkdir((0,o.getSocketDlxDir)(),{recursive:!0})}function L(){const{mkdirSync:r}=require("node:fs");r((0,o.getSocketDlxDir)(),{recursive:!0})}function l(r){return(0,a.normalizePath)(c().join(m(r),r))}function p(r){return(0,a.normalizePath)(c().join((0,o.getSocketDlxDir)(),r))}function T(r){return(0,a.normalizePath)(c().join(l(r),"package.json"))}function m(r){return(0,a.normalizePath)(c().join(p(r),"node_modules"))}function X(r){if(!r)return!1;const e=c(),t=(0,o.getSocketDlxDir)();return e.resolve(r).startsWith(t+e.sep)}function q(r){return(0,s.existsSync)(l(r))}async function M(r){try{return await s.promises.access(l(r)),!0}catch{return!1}}function y(){try{return(0,u.readDirNamesSync)((0,o.getSocketDlxDir)(),{sort:!0})}catch{return[]}}async function h(){try{return(await s.promises.readdir((0,o.getSocketDlxDir)(),{withFileTypes:!0})).filter(e=>e.isDirectory()).map(e=>e.name).sort()}catch{return[]}}async function D(r){const e=p(r);try{await(0,u.safeDelete)(e,{recursive:!0,force:!0})}catch(t){throw new Error(`Failed to remove DLX package "${r}"`,{cause:t})}}function P(r){const{rmSync:e}=require("node:fs"),t=p(r);try{e(t,{recursive:!0,force:!0})}catch(i){const n=i.code;throw n==="EACCES"||n==="EPERM"?new Error(`Permission denied removing DLX package "${r}"
2
+ var p=Object.defineProperty;var v=Object.getOwnPropertyDescriptor;var w=Object.getOwnPropertyNames;var E=Object.prototype.hasOwnProperty;var S=(e,r)=>{for(var t in r)p(e,t,{get:r[t],enumerable:!0})},b=(e,r,t,s)=>{if(r&&typeof r=="object"||typeof r=="function")for(let n of w(r))!E.call(e,n)&&n!==t&&p(e,n,{get:()=>r[n],enumerable:!(s=v(r,n))||s.enumerable});return e};var $=e=>b(p({},"__esModule",{value:!0}),e);var J={};S(J,{clearDlx:()=>j,clearDlxSync:()=>F,dlxDirExists:()=>M,dlxDirExistsAsync:()=>A,ensureDlxDir:()=>I,ensureDlxDirSync:()=>L,generateCacheKey:()=>C,getDlxInstalledPackageDir:()=>f,getDlxPackageDir:()=>l,getDlxPackageJsonPath:()=>T,getDlxPackageNodeModulesDir:()=>m,isDlxPackageInstalled:()=>_,isDlxPackageInstalledAsync:()=>q,isInSocketDlx:()=>X,listDlxPackages:()=>h,listDlxPackagesAsync:()=>D,removeDlxPackage:()=>P,removeDlxPackageSync:()=>k});module.exports=$(J);var x=require("crypto"),i=require("./fs"),c=require("./path"),o=require("./paths"),y=require("./promises");let g;function a(){return g===void 0&&(g=require("node:fs")),g}function C(e){return(0,x.createHash)("sha512").update(e).digest("hex").substring(0,16)}let d;function u(){return d===void 0&&(d=require("node:path")),d}async function j(){const e=await D();await(0,y.pEach)(e,r=>P(r))}function F(){const e=h();for(const r of e)k(r)}function M(){return a().existsSync((0,o.getSocketDlxDir)())}async function A(){const e=a();try{return await e.promises.access((0,o.getSocketDlxDir)()),!0}catch{return!1}}async function I(){await(0,i.safeMkdir)((0,o.getSocketDlxDir)(),{recursive:!0})}function L(){(0,i.safeMkdirSync)((0,o.getSocketDlxDir)(),{recursive:!0})}function f(e){return(0,c.normalizePath)(u().join(m(e),e))}function l(e){return(0,c.normalizePath)(u().join((0,o.getSocketDlxDir)(),e))}function T(e){return(0,c.normalizePath)(u().join(f(e),"package.json"))}function m(e){return(0,c.normalizePath)(u().join(l(e),"node_modules"))}function X(e){if(!e)return!1;const r=u(),t=(0,o.getSocketDlxDir)();return r.resolve(e).startsWith(t+r.sep)}function _(e){return a().existsSync(f(e))}async function q(e){const r=a();try{return await r.promises.access(f(e)),!0}catch{return!1}}function h(){try{return(0,i.readDirNamesSync)((0,o.getSocketDlxDir)(),{sort:!0})}catch{return[]}}async function D(){const e=a();try{return(await e.promises.readdir((0,o.getSocketDlxDir)(),{withFileTypes:!0})).filter(t=>t.isDirectory()).map(t=>t.name).sort()}catch{return[]}}async function P(e){const r=l(e);try{await(0,i.safeDelete)(r,{recursive:!0,force:!0})}catch(t){throw new Error(`Failed to remove DLX package "${e}"`,{cause:t})}}function k(e){const r=a(),t=l(e);try{r.rmSync(t,{recursive:!0,force:!0})}catch(s){const n=s.code;throw n==="EACCES"||n==="EPERM"?new Error(`Permission denied removing DLX package "${e}"
3
3
  Directory: ${t}
4
4
  To resolve:
5
5
  1. Check file/directory permissions
6
6
  2. Close any programs using files in this directory
7
7
  3. Try running with elevated privileges if necessary
8
- 4. Manually remove: rm -rf "${t}"`,{cause:i}):n==="EROFS"?new Error(`Cannot remove DLX package "${r}" from read-only filesystem
8
+ 4. Manually remove: rm -rf "${t}"`,{cause:s}):n==="EROFS"?new Error(`Cannot remove DLX package "${e}" from read-only filesystem
9
9
  Directory: ${t}
10
- The filesystem is mounted read-only.`,{cause:i}):new Error(`Failed to remove DLX package "${r}"
10
+ The filesystem is mounted read-only.`,{cause:s}):new Error(`Failed to remove DLX package "${e}"
11
11
  Directory: ${t}
12
- Check permissions and ensure no programs are using this directory.`,{cause:i})}}0&&(module.exports={clearDlx,clearDlxSync,dlxDirExists,dlxDirExistsAsync,ensureDlxDir,ensureDlxDirSync,generateCacheKey,getDlxInstalledPackageDir,getDlxPackageDir,getDlxPackageJsonPath,getDlxPackageNodeModulesDir,isDlxPackageInstalled,isDlxPackageInstalledAsync,isInSocketDlx,listDlxPackages,listDlxPackagesAsync,removeDlxPackage,removeDlxPackageSync});
12
+ Check permissions and ensure no programs are using this directory.`,{cause:s})}}0&&(module.exports={clearDlx,clearDlxSync,dlxDirExists,dlxDirExistsAsync,ensureDlxDir,ensureDlxDirSync,generateCacheKey,getDlxInstalledPackageDir,getDlxPackageDir,getDlxPackageJsonPath,getDlxPackageNodeModulesDir,isDlxPackageInstalled,isDlxPackageInstalledAsync,isInSocketDlx,listDlxPackages,listDlxPackagesAsync,removeDlxPackage,removeDlxPackageSync});
13
13
  //# sourceMappingURL=dlx.js.map