@socketsecurity/lib 3.1.3 → 3.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +31 -0
- package/dist/dlx-binary.js +19 -1
- package/dist/dlx-binary.js.map +2 -2
- package/dist/dlx-manifest.d.ts +116 -0
- package/dist/dlx-manifest.js +296 -0
- package/dist/dlx-manifest.js.map +7 -0
- package/dist/env/socket-cli.d.ts +14 -0
- package/dist/env/socket-cli.js +10 -0
- package/dist/env/socket-cli.js.map +2 -2
- package/dist/ipc.js +1 -1
- package/dist/ipc.js.map +1 -1
- package/dist/promises.js +5 -2
- package/dist/promises.js.map +2 -2
- package/dist/spinner.js +4 -5
- package/dist/spinner.js.map +2 -2
- package/dist/stdio/mask.js +5 -5
- package/dist/stdio/mask.js.map +2 -2
- package/package.json +8 -4
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../src/dlx-manifest.ts"],
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview DLX manifest storage utilities.\n * Manages persistent caching of DLX package and binary metadata with TTL support\n * and atomic file operations.\n *\n * Key Functions:\n * - getManifestEntry: Retrieve manifest entry by spec\n * - setPackageEntry: Store npm package metadata\n * - setBinaryEntry: Store binary download metadata\n *\n * Features:\n * - TTL-based cache expiration\n * - Atomic file operations with locking\n * - JSON-based persistent storage\n * - Error-resistant implementation\n *\n * Storage Format:\n * - Stores in ~/.socket/_dlx/.dlx-manifest.json\n * - Per-spec manifest entries with timestamps\n * - Thread-safe operations using process lock utility\n *\n * Usage:\n * - Update check caching\n * - Binary metadata tracking\n * - Rate limiting registry requests\n */\n\nimport { existsSync, readFileSync, unlinkSync, writeFileSync } from 'fs'\nimport path from 'path'\n\nimport { readFileUtf8Sync, safeMkdirSync } from './fs'\nimport { getDefaultLogger } from './logger'\nimport { getSocketDlxDir } from './paths'\nimport { processLock } from './process-lock'\n\nconst logger = getDefaultLogger()\n\n/**\n * Manifest file name.\n */\nconst MANIFEST_FILE_NAME = '.dlx-manifest.json'\n\n/**\n * Details for npm package entries.\n */\nexport interface PackageDetails {\n installed_version: string\n size?: number\n update_check?: {\n last_check: number\n last_notification: number\n latest_known: string\n }\n}\n\n/**\n * Details for binary download entries.\n */\nexport interface BinaryDetails {\n checksum: string\n checksum_algorithm: 'sha256' | 'sha512'\n platform: string\n arch: string\n size: number\n source: {\n type: 'download'\n url: string\n }\n}\n\n/**\n * Unified manifest entry for all cached items (packages and binaries).\n * Shared fields at root, type-specific fields in details.\n */\nexport interface ManifestEntry {\n type: 'package' | 'binary'\n cache_key: string\n timestamp: number\n details: PackageDetails | BinaryDetails\n}\n\n/**\n * Type guard for package entries.\n */\nexport function isPackageEntry(\n entry: ManifestEntry,\n): entry is ManifestEntry & { details: PackageDetails } {\n return entry.type === 'package'\n}\n\n/**\n * Type guard for binary entries.\n */\nexport function isBinaryEntry(\n entry: ManifestEntry,\n): entry is ManifestEntry & { details: BinaryDetails } {\n return entry.type === 'binary'\n}\n\n/**\n * Legacy store record format (deprecated, for migration).\n */\nexport interface StoreRecord {\n timestampFetch: number\n timestampNotification: number\n version: string\n}\n\nexport interface DlxManifestOptions {\n /**\n * Custom manifest file path (defaults to ~/.socket/_dlx/.dlx-manifest.json).\n */\n manifestPath?: string\n}\n\n/**\n * DLX manifest storage manager with atomic operations.\n * Supports both legacy format (package name keys) and new unified manifest format (spec keys).\n */\nexport class DlxManifest {\n private readonly manifestPath: string\n private readonly lockPath: string\n\n constructor(options: DlxManifestOptions = {}) {\n this.manifestPath =\n options.manifestPath ?? path.join(getSocketDlxDir(), MANIFEST_FILE_NAME)\n this.lockPath = `${this.manifestPath}.lock`\n }\n\n /**\n * Read the entire manifest file.\n */\n private readManifest(): Record<string, ManifestEntry | StoreRecord> {\n try {\n if (!existsSync(this.manifestPath)) {\n return Object.create(null)\n }\n\n const rawContent = readFileUtf8Sync(this.manifestPath)\n const content = (\n typeof rawContent === 'string'\n ? rawContent\n : rawContent.toString('utf8')\n ).trim()\n\n if (!content) {\n return Object.create(null)\n }\n\n return JSON.parse(content) as Record<string, ManifestEntry | StoreRecord>\n } catch (error) {\n logger.warn(\n `Failed to read manifest: ${error instanceof Error ? error.message : String(error)}`,\n )\n return Object.create(null)\n }\n }\n\n /**\n * Get a manifest entry by spec (e.g., \"@socketsecurity/cli@^2.0.11\").\n */\n getManifestEntry(spec: string): ManifestEntry | undefined {\n const data = this.readManifest()\n const entry = data[spec]\n\n // Check if it's a new-format entry (has 'type' field).\n if (entry && 'type' in entry) {\n return entry as ManifestEntry\n }\n\n return undefined\n }\n\n /**\n * Get cached update information for a package (legacy format).\n * @deprecated Use getManifestEntry() for new code.\n */\n get(name: string): StoreRecord | undefined {\n const data = this.readManifest()\n const entry = data[name]\n\n // Return legacy format entries only.\n if (entry && !('type' in entry)) {\n return entry as StoreRecord\n }\n\n return undefined\n }\n\n /**\n * Set a package manifest entry.\n */\n async setPackageEntry(\n spec: string,\n cacheKey: string,\n details: PackageDetails,\n ): Promise<void> {\n await processLock.withLock(this.lockPath, async () => {\n const data = this.readManifest()\n\n data[spec] = {\n type: 'package',\n cache_key: cacheKey,\n timestamp: Date.now(),\n details,\n }\n\n await this.writeManifest(data)\n })\n }\n\n /**\n * Set a binary manifest entry.\n */\n async setBinaryEntry(\n spec: string,\n cacheKey: string,\n details: BinaryDetails,\n ): Promise<void> {\n await processLock.withLock(this.lockPath, async () => {\n const data = this.readManifest()\n\n data[spec] = {\n type: 'binary',\n cache_key: cacheKey,\n timestamp: Date.now(),\n details,\n }\n\n await this.writeManifest(data)\n })\n }\n\n /**\n * Write the manifest file atomically.\n */\n private async writeManifest(\n data: Record<string, ManifestEntry | StoreRecord>,\n ): Promise<void> {\n // Ensure directory exists.\n const manifestDir = path.dirname(this.manifestPath)\n try {\n safeMkdirSync(manifestDir, { recursive: true })\n } catch (error) {\n logger.warn(\n `Failed to create manifest directory: ${error instanceof Error ? error.message : String(error)}`,\n )\n }\n\n // Write atomically.\n const content = JSON.stringify(data, null, 2)\n const tempPath = `${this.manifestPath}.tmp`\n\n try {\n writeFileSync(tempPath, content, 'utf8')\n writeFileSync(this.manifestPath, content, 'utf8')\n\n // Clean up temp file.\n try {\n if (existsSync(tempPath)) {\n unlinkSync(tempPath)\n }\n } catch {\n // Cleanup failed, not critical.\n }\n } catch (error) {\n // Clean up temp file on error.\n try {\n if (existsSync(tempPath)) {\n unlinkSync(tempPath)\n }\n } catch {\n // Best effort cleanup.\n }\n throw error\n }\n }\n\n /**\n * Store update information for a package (legacy format).\n * @deprecated Use setPackageEntry() for new code.\n */\n async set(name: string, record: StoreRecord): Promise<void> {\n await processLock.withLock(this.lockPath, async () => {\n let data: Record<string, StoreRecord> = Object.create(null)\n\n // Read existing data.\n try {\n if (existsSync(this.manifestPath)) {\n const content = readFileSync(this.manifestPath, 'utf8')\n if (content.trim()) {\n data = JSON.parse(content) as Record<string, StoreRecord>\n }\n }\n } catch (error) {\n logger.warn(\n `Failed to read existing manifest: ${error instanceof Error ? error.message : String(error)}`,\n )\n }\n\n // Update record.\n data[name] = record\n\n // Ensure directory exists.\n const manifestDir = path.dirname(this.manifestPath)\n try {\n safeMkdirSync(manifestDir, { recursive: true })\n } catch (error) {\n logger.warn(\n `Failed to create manifest directory: ${error instanceof Error ? error.message : String(error)}`,\n )\n }\n\n // Write atomically.\n const content = JSON.stringify(data, null, 2)\n const tempPath = `${this.manifestPath}.tmp`\n\n try {\n writeFileSync(tempPath, content, 'utf8')\n writeFileSync(this.manifestPath, content, 'utf8')\n\n // Clean up temp file.\n try {\n if (existsSync(tempPath)) {\n unlinkSync(tempPath)\n }\n } catch {\n // Cleanup failed, not critical.\n }\n } catch (error) {\n // Clean up temp file on error.\n try {\n if (existsSync(tempPath)) {\n unlinkSync(tempPath)\n }\n } catch {\n // Best effort cleanup.\n }\n throw error\n }\n })\n }\n\n /**\n * Clear cached data for a specific entry.\n */\n async clear(name: string): Promise<void> {\n await processLock.withLock(this.lockPath, async () => {\n try {\n if (!existsSync(this.manifestPath)) {\n return\n }\n\n const content = readFileSync(this.manifestPath, 'utf8')\n if (!content.trim()) {\n return\n }\n\n const data = JSON.parse(content) as Record<string, StoreRecord>\n delete data[name]\n\n const updatedContent = JSON.stringify(data, null, 2)\n writeFileSync(this.manifestPath, updatedContent, 'utf8')\n } catch (error) {\n logger.warn(\n `Failed to clear cache for ${name}: ${error instanceof Error ? error.message : String(error)}`,\n )\n }\n })\n }\n\n /**\n * Clear all cached data.\n */\n async clearAll(): Promise<void> {\n await processLock.withLock(this.lockPath, async () => {\n try {\n if (existsSync(this.manifestPath)) {\n unlinkSync(this.manifestPath)\n }\n } catch (error) {\n logger.warn(\n `Failed to clear all cache: ${error instanceof Error ? error.message : String(error)}`,\n )\n }\n })\n }\n\n /**\n * Check if cached data is fresh based on TTL.\n */\n isFresh(record: StoreRecord | undefined, ttlMs: number): boolean {\n if (!record) {\n return false\n }\n\n const age = Date.now() - record.timestampFetch\n return age < ttlMs\n }\n\n /**\n * Get all cached package names.\n */\n getAllPackages(): string[] {\n try {\n if (!existsSync(this.manifestPath)) {\n return []\n }\n\n const rawContent = readFileUtf8Sync(this.manifestPath)\n const content = (\n typeof rawContent === 'string'\n ? rawContent\n : rawContent.toString('utf8')\n ).trim()\n if (!content) {\n return []\n }\n\n const data = JSON.parse(content) as Record<string, StoreRecord>\n return Object.keys(data)\n } catch (error) {\n logger.warn(\n `Failed to get package list: ${error instanceof Error ? error.message : String(error)}`,\n )\n return []\n }\n }\n}\n\n// Export singleton instance using default manifest location.\nexport const dlxManifest = new DlxManifest()\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA2BA,gBAAoE;AACpE,kBAAiB;AAEjB,IAAAA,aAAgD;AAChD,oBAAiC;AACjC,mBAAgC;AAChC,0BAA4B;AAE5B,MAAM,aAAS,gCAAiB;AAKhC,MAAM,qBAAqB;AA4CpB,SAAS,eACd,OACsD;AACtD,SAAO,MAAM,SAAS;AACxB;AAKO,SAAS,cACd,OACqD;AACrD,SAAO,MAAM,SAAS;AACxB;AAsBO,MAAM,YAAY;AAAA,EACN;AAAA,EACA;AAAA,EAEjB,YAAY,UAA8B,CAAC,GAAG;AAC5C,SAAK,eACH,QAAQ,gBAAgB,YAAAC,QAAK,SAAK,8BAAgB,GAAG,kBAAkB;AACzE,SAAK,WAAW,GAAG,KAAK,YAAY;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA,EAKQ,eAA4D;AAClE,QAAI;AACF,UAAI,KAAC,sBAAW,KAAK,YAAY,GAAG;AAClC,eAAO,uBAAO,OAAO,IAAI;AAAA,MAC3B;AAEA,YAAM,iBAAa,6BAAiB,KAAK,YAAY;AACrD,YAAM,WACJ,OAAO,eAAe,WAClB,aACA,WAAW,SAAS,MAAM,GAC9B,KAAK;AAEP,UAAI,CAAC,SAAS;AACZ,eAAO,uBAAO,OAAO,IAAI;AAAA,MAC3B;AAEA,aAAO,KAAK,MAAM,OAAO;AAAA,IAC3B,SAAS,OAAO;AACd,aAAO;AAAA,QACL,4BAA4B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACpF;AACA,aAAO,uBAAO,OAAO,IAAI;AAAA,IAC3B;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,iBAAiB,MAAyC;AACxD,UAAM,OAAO,KAAK,aAAa;AAC/B,UAAM,QAAQ,KAAK,IAAI;AAGvB,QAAI,SAAS,UAAU,OAAO;AAC5B,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,MAAuC;AACzC,UAAM,OAAO,KAAK,aAAa;AAC/B,UAAM,QAAQ,KAAK,IAAI;AAGvB,QAAI,SAAS,EAAE,UAAU,QAAQ;AAC/B,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,gBACJ,MACA,UACA,SACe;AACf,UAAM,gCAAY,SAAS,KAAK,UAAU,YAAY;AACpD,YAAM,OAAO,KAAK,aAAa;AAE/B,WAAK,IAAI,IAAI;AAAA,QACX,MAAM;AAAA,QACN,WAAW;AAAA,QACX,WAAW,KAAK,IAAI;AAAA,QACpB;AAAA,MACF;AAEA,YAAM,KAAK,cAAc,IAAI;AAAA,IAC/B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eACJ,MACA,UACA,SACe;AACf,UAAM,gCAAY,SAAS,KAAK,UAAU,YAAY;AACpD,YAAM,OAAO,KAAK,aAAa;AAE/B,WAAK,IAAI,IAAI;AAAA,QACX,MAAM;AAAA,QACN,WAAW;AAAA,QACX,WAAW,KAAK,IAAI;AAAA,QACpB;AAAA,MACF;AAEA,YAAM,KAAK,cAAc,IAAI;AAAA,IAC/B,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,cACZ,MACe;AAEf,UAAM,cAAc,YAAAA,QAAK,QAAQ,KAAK,YAAY;AAClD,QAAI;AACF,oCAAc,aAAa,EAAE,WAAW,KAAK,CAAC;AAAA,IAChD,SAAS,OAAO;AACd,aAAO;AAAA,QACL,wCAAwC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MAChG;AAAA,IACF;AAGA,UAAM,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC;AAC5C,UAAM,WAAW,GAAG,KAAK,YAAY;AAErC,QAAI;AACF,mCAAc,UAAU,SAAS,MAAM;AACvC,mCAAc,KAAK,cAAc,SAAS,MAAM;AAGhD,UAAI;AACF,gBAAI,sBAAW,QAAQ,GAAG;AACxB,oCAAW,QAAQ;AAAA,QACrB;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF,SAAS,OAAO;AAEd,UAAI;AACF,gBAAI,sBAAW,QAAQ,GAAG;AACxB,oCAAW,QAAQ;AAAA,QACrB;AAAA,MACF,QAAQ;AAAA,MAER;AACA,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,IAAI,MAAc,QAAoC;AAC1D,UAAM,gCAAY,SAAS,KAAK,UAAU,YAAY;AACpD,UAAI,OAAoC,uBAAO,OAAO,IAAI;AAG1D,UAAI;AACF,gBAAI,sBAAW,KAAK,YAAY,GAAG;AACjC,gBAAMC,eAAU,wBAAa,KAAK,cAAc,MAAM;AACtD,cAAIA,SAAQ,KAAK,GAAG;AAClB,mBAAO,KAAK,MAAMA,QAAO;AAAA,UAC3B;AAAA,QACF;AAAA,MACF,SAAS,OAAO;AACd,eAAO;AAAA,UACL,qCAAqC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAC7F;AAAA,MACF;AAGA,WAAK,IAAI,IAAI;AAGb,YAAM,cAAc,YAAAD,QAAK,QAAQ,KAAK,YAAY;AAClD,UAAI;AACF,sCAAc,aAAa,EAAE,WAAW,KAAK,CAAC;AAAA,MAChD,SAAS,OAAO;AACd,eAAO;AAAA,UACL,wCAAwC,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAChG;AAAA,MACF;AAGA,YAAM,UAAU,KAAK,UAAU,MAAM,MAAM,CAAC;AAC5C,YAAM,WAAW,GAAG,KAAK,YAAY;AAErC,UAAI;AACF,qCAAc,UAAU,SAAS,MAAM;AACvC,qCAAc,KAAK,cAAc,SAAS,MAAM;AAGhD,YAAI;AACF,kBAAI,sBAAW,QAAQ,GAAG;AACxB,sCAAW,QAAQ;AAAA,UACrB;AAAA,QACF,QAAQ;AAAA,QAER;AAAA,MACF,SAAS,OAAO;AAEd,YAAI;AACF,kBAAI,sBAAW,QAAQ,GAAG;AACxB,sCAAW,QAAQ;AAAA,UACrB;AAAA,QACF,QAAQ;AAAA,QAER;AACA,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,MAAM,MAA6B;AACvC,UAAM,gCAAY,SAAS,KAAK,UAAU,YAAY;AACpD,UAAI;AACF,YAAI,KAAC,sBAAW,KAAK,YAAY,GAAG;AAClC;AAAA,QACF;AAEA,cAAM,cAAU,wBAAa,KAAK,cAAc,MAAM;AACtD,YAAI,CAAC,QAAQ,KAAK,GAAG;AACnB;AAAA,QACF;AAEA,cAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,eAAO,KAAK,IAAI;AAEhB,cAAM,iBAAiB,KAAK,UAAU,MAAM,MAAM,CAAC;AACnD,qCAAc,KAAK,cAAc,gBAAgB,MAAM;AAAA,MACzD,SAAS,OAAO;AACd,eAAO;AAAA,UACL,6BAA6B,IAAI,KAAK,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QAC9F;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAA0B;AAC9B,UAAM,gCAAY,SAAS,KAAK,UAAU,YAAY;AACpD,UAAI;AACF,gBAAI,sBAAW,KAAK,YAAY,GAAG;AACjC,oCAAW,KAAK,YAAY;AAAA,QAC9B;AAAA,MACF,SAAS,OAAO;AACd,eAAO;AAAA,UACL,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,QACtF;AAAA,MACF;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,QAAQ,QAAiC,OAAwB;AAC/D,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AAEA,UAAM,MAAM,KAAK,IAAI,IAAI,OAAO;AAChC,WAAO,MAAM;AAAA,EACf;AAAA;AAAA;AAAA;AAAA,EAKA,iBAA2B;AACzB,QAAI;AACF,UAAI,KAAC,sBAAW,KAAK,YAAY,GAAG;AAClC,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,iBAAa,6BAAiB,KAAK,YAAY;AACrD,YAAM,WACJ,OAAO,eAAe,WAClB,aACA,WAAW,SAAS,MAAM,GAC9B,KAAK;AACP,UAAI,CAAC,SAAS;AACZ,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,aAAO,OAAO,KAAK,IAAI;AAAA,IACzB,SAAS,OAAO;AACd,aAAO;AAAA,QACL,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA,MACvF;AACA,aAAO,CAAC;AAAA,IACV;AAAA,EACF;AACF;AAGO,MAAM,cAAc,IAAI,YAAY;",
|
|
6
|
+
"names": ["import_fs", "path", "content"]
|
|
7
|
+
}
|
package/dist/env/socket-cli.d.ts
CHANGED
|
@@ -77,3 +77,17 @@ export declare function getSocketCliViewAllRisks(): boolean;
|
|
|
77
77
|
* @returns GitHub token or undefined
|
|
78
78
|
*/
|
|
79
79
|
export declare function getSocketCliGithubToken(): string | undefined;
|
|
80
|
+
/**
|
|
81
|
+
* Bootstrap package spec (e.g., @socketsecurity/cli@^2.0.11).
|
|
82
|
+
* Set by bootstrap wrappers (SEA/smol/npm) to pass package spec to CLI.
|
|
83
|
+
*
|
|
84
|
+
* @returns Bootstrap package spec or undefined
|
|
85
|
+
*/
|
|
86
|
+
export declare function getSocketCliBootstrapSpec(): string | undefined;
|
|
87
|
+
/**
|
|
88
|
+
* Bootstrap cache directory path.
|
|
89
|
+
* Set by bootstrap wrappers to pass dlx cache location to CLI.
|
|
90
|
+
*
|
|
91
|
+
* @returns Bootstrap cache directory or undefined
|
|
92
|
+
*/
|
|
93
|
+
export declare function getSocketCliBootstrapCacheDir(): string | undefined;
|
package/dist/env/socket-cli.js
CHANGED
|
@@ -23,6 +23,8 @@ __export(socket_cli_exports, {
|
|
|
23
23
|
getSocketCliApiProxy: () => getSocketCliApiProxy,
|
|
24
24
|
getSocketCliApiTimeout: () => getSocketCliApiTimeout,
|
|
25
25
|
getSocketCliApiToken: () => getSocketCliApiToken,
|
|
26
|
+
getSocketCliBootstrapCacheDir: () => getSocketCliBootstrapCacheDir,
|
|
27
|
+
getSocketCliBootstrapSpec: () => getSocketCliBootstrapSpec,
|
|
26
28
|
getSocketCliConfig: () => getSocketCliConfig,
|
|
27
29
|
getSocketCliFix: () => getSocketCliFix,
|
|
28
30
|
getSocketCliGithubToken: () => getSocketCliGithubToken,
|
|
@@ -70,6 +72,12 @@ function getSocketCliViewAllRisks() {
|
|
|
70
72
|
function getSocketCliGithubToken() {
|
|
71
73
|
return (0, import_rewire.getEnvValue)("SOCKET_CLI_GITHUB_TOKEN") || (0, import_rewire.getEnvValue)("SOCKET_SECURITY_GITHUB_PAT") || (0, import_rewire.getEnvValue)("GITHUB_TOKEN");
|
|
72
74
|
}
|
|
75
|
+
function getSocketCliBootstrapSpec() {
|
|
76
|
+
return (0, import_rewire.getEnvValue)("SOCKET_CLI_BOOTSTRAP_SPEC");
|
|
77
|
+
}
|
|
78
|
+
function getSocketCliBootstrapCacheDir() {
|
|
79
|
+
return (0, import_rewire.getEnvValue)("SOCKET_CLI_BOOTSTRAP_CACHE_DIR");
|
|
80
|
+
}
|
|
73
81
|
// Annotate the CommonJS export names for ESM import in node:
|
|
74
82
|
0 && (module.exports = {
|
|
75
83
|
getSocketCliAcceptRisks,
|
|
@@ -77,6 +85,8 @@ function getSocketCliGithubToken() {
|
|
|
77
85
|
getSocketCliApiProxy,
|
|
78
86
|
getSocketCliApiTimeout,
|
|
79
87
|
getSocketCliApiToken,
|
|
88
|
+
getSocketCliBootstrapCacheDir,
|
|
89
|
+
getSocketCliBootstrapSpec,
|
|
80
90
|
getSocketCliConfig,
|
|
81
91
|
getSocketCliFix,
|
|
82
92
|
getSocketCliGithubToken,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/env/socket-cli.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Socket CLI environment variables.\n * Provides typed getters for SOCKET_CLI_* environment variables (excluding shadow).\n */\n\nimport { envAsBoolean, envAsNumber } from '#env/helpers'\nimport { getEnvValue } from '#env/rewire'\n\n/**\n * Whether to accept all Socket CLI risks (alternative name).\n *\n * @returns Whether to accept all risks\n */\nexport function getSocketCliAcceptRisks(): boolean {\n return envAsBoolean(getEnvValue('SOCKET_CLI_ACCEPT_RISKS'))\n}\n\n/**\n * Socket CLI API base URL (alternative name).\n * Checks SOCKET_CLI_API_BASE_URL first, then falls back to legacy SOCKET_SECURITY_API_BASE_URL.\n *\n * @returns API base URL or undefined\n */\nexport function getSocketCliApiBaseUrl(): string | undefined {\n return (\n getEnvValue('SOCKET_CLI_API_BASE_URL') ||\n getEnvValue('SOCKET_SECURITY_API_BASE_URL')\n )\n}\n\n/**\n * Proxy URL for Socket CLI API requests (alternative name).\n * Checks SOCKET_CLI_API_PROXY, SOCKET_SECURITY_API_PROXY, then standard proxy env vars.\n * Follows the same precedence as v1.x: HTTPS_PROXY \u2192 https_proxy \u2192 HTTP_PROXY \u2192 http_proxy.\n *\n * @returns API proxy URL or undefined\n */\nexport function getSocketCliApiProxy(): string | undefined {\n return (\n getEnvValue('SOCKET_CLI_API_PROXY') ||\n getEnvValue('SOCKET_SECURITY_API_PROXY') ||\n getEnvValue('HTTPS_PROXY') ||\n getEnvValue('https_proxy') ||\n getEnvValue('HTTP_PROXY') ||\n getEnvValue('http_proxy')\n )\n}\n\n/**\n * Timeout in milliseconds for Socket CLI API requests (alternative name).\n *\n * @returns API timeout in milliseconds\n */\nexport function getSocketCliApiTimeout(): number {\n return envAsNumber(getEnvValue('SOCKET_CLI_API_TIMEOUT'))\n}\n\n/**\n * Socket CLI API authentication token (alternative name).\n * Checks SOCKET_CLI_API_TOKEN, SOCKET_CLI_API_KEY, SOCKET_SECURITY_API_TOKEN, SOCKET_SECURITY_API_KEY.\n * Maintains full v1.x backward compatibility.\n *\n * @returns API token or undefined\n */\nexport function getSocketCliApiToken(): string | undefined {\n return (\n getEnvValue('SOCKET_CLI_API_TOKEN') ||\n getEnvValue('SOCKET_CLI_API_KEY') ||\n getEnvValue('SOCKET_SECURITY_API_TOKEN') ||\n getEnvValue('SOCKET_SECURITY_API_KEY')\n )\n}\n\n/**\n * Socket CLI configuration file path (alternative name).\n *\n * @returns Config file path or undefined\n */\nexport function getSocketCliConfig(): string | undefined {\n return getEnvValue('SOCKET_CLI_CONFIG')\n}\n\n/**\n * Controls Socket CLI fix mode.\n *\n * @returns Fix mode value or undefined\n */\nexport function getSocketCliFix(): string | undefined {\n return getEnvValue('SOCKET_CLI_FIX')\n}\n\n/**\n * Whether to skip Socket CLI API token requirement (alternative name).\n *\n * @returns Whether to skip API token requirement\n */\nexport function getSocketCliNoApiToken(): boolean {\n return envAsBoolean(getEnvValue('SOCKET_CLI_NO_API_TOKEN'))\n}\n\n/**\n * Controls Socket CLI optimization mode.\n *\n * @returns Whether optimization mode is enabled\n */\nexport function getSocketCliOptimize(): boolean {\n return envAsBoolean(getEnvValue('SOCKET_CLI_OPTIMIZE'))\n}\n\n/**\n * Socket CLI organization slug identifier (alternative name).\n * Checks SOCKET_CLI_ORG_SLUG first, then falls back to SOCKET_ORG_SLUG.\n *\n * @returns Organization slug or undefined\n */\nexport function getSocketCliOrgSlug(): string | undefined {\n return getEnvValue('SOCKET_CLI_ORG_SLUG') || getEnvValue('SOCKET_ORG_SLUG')\n}\n\n/**\n * Whether to view all Socket CLI risks (alternative name).\n *\n * @returns Whether to view all risks\n */\nexport function getSocketCliViewAllRisks(): boolean {\n return envAsBoolean(getEnvValue('SOCKET_CLI_VIEW_ALL_RISKS'))\n}\n\n/**\n * Socket CLI GitHub authentication token.\n * Checks SOCKET_CLI_GITHUB_TOKEN, SOCKET_SECURITY_GITHUB_PAT, then falls back to GITHUB_TOKEN.\n *\n * @returns GitHub token or undefined\n */\nexport function getSocketCliGithubToken(): string | undefined {\n return (\n getEnvValue('SOCKET_CLI_GITHUB_TOKEN') ||\n getEnvValue('SOCKET_SECURITY_GITHUB_PAT') ||\n getEnvValue('GITHUB_TOKEN')\n )\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,qBAA0C;AAC1C,oBAA4B;AAOrB,SAAS,0BAAmC;AACjD,aAAO,iCAAa,2BAAY,yBAAyB,CAAC;AAC5D;AAQO,SAAS,yBAA6C;AAC3D,aACE,2BAAY,yBAAyB,SACrC,2BAAY,8BAA8B;AAE9C;AASO,SAAS,uBAA2C;AACzD,aACE,2BAAY,sBAAsB,SAClC,2BAAY,2BAA2B,SACvC,2BAAY,aAAa,SACzB,2BAAY,aAAa,SACzB,2BAAY,YAAY,SACxB,2BAAY,YAAY;AAE5B;AAOO,SAAS,yBAAiC;AAC/C,aAAO,gCAAY,2BAAY,wBAAwB,CAAC;AAC1D;AASO,SAAS,uBAA2C;AACzD,aACE,2BAAY,sBAAsB,SAClC,2BAAY,oBAAoB,SAChC,2BAAY,2BAA2B,SACvC,2BAAY,yBAAyB;AAEzC;AAOO,SAAS,qBAAyC;AACvD,aAAO,2BAAY,mBAAmB;AACxC;AAOO,SAAS,kBAAsC;AACpD,aAAO,2BAAY,gBAAgB;AACrC;AAOO,SAAS,yBAAkC;AAChD,aAAO,iCAAa,2BAAY,yBAAyB,CAAC;AAC5D;AAOO,SAAS,uBAAgC;AAC9C,aAAO,iCAAa,2BAAY,qBAAqB,CAAC;AACxD;AAQO,SAAS,sBAA0C;AACxD,aAAO,2BAAY,qBAAqB,SAAK,2BAAY,iBAAiB;AAC5E;AAOO,SAAS,2BAAoC;AAClD,aAAO,iCAAa,2BAAY,2BAA2B,CAAC;AAC9D;AAQO,SAAS,0BAA8C;AAC5D,aACE,2BAAY,yBAAyB,SACrC,2BAAY,4BAA4B,SACxC,2BAAY,cAAc;AAE9B;",
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Socket CLI environment variables.\n * Provides typed getters for SOCKET_CLI_* environment variables (excluding shadow).\n */\n\nimport { envAsBoolean, envAsNumber } from '#env/helpers'\nimport { getEnvValue } from '#env/rewire'\n\n/**\n * Whether to accept all Socket CLI risks (alternative name).\n *\n * @returns Whether to accept all risks\n */\nexport function getSocketCliAcceptRisks(): boolean {\n return envAsBoolean(getEnvValue('SOCKET_CLI_ACCEPT_RISKS'))\n}\n\n/**\n * Socket CLI API base URL (alternative name).\n * Checks SOCKET_CLI_API_BASE_URL first, then falls back to legacy SOCKET_SECURITY_API_BASE_URL.\n *\n * @returns API base URL or undefined\n */\nexport function getSocketCliApiBaseUrl(): string | undefined {\n return (\n getEnvValue('SOCKET_CLI_API_BASE_URL') ||\n getEnvValue('SOCKET_SECURITY_API_BASE_URL')\n )\n}\n\n/**\n * Proxy URL for Socket CLI API requests (alternative name).\n * Checks SOCKET_CLI_API_PROXY, SOCKET_SECURITY_API_PROXY, then standard proxy env vars.\n * Follows the same precedence as v1.x: HTTPS_PROXY \u2192 https_proxy \u2192 HTTP_PROXY \u2192 http_proxy.\n *\n * @returns API proxy URL or undefined\n */\nexport function getSocketCliApiProxy(): string | undefined {\n return (\n getEnvValue('SOCKET_CLI_API_PROXY') ||\n getEnvValue('SOCKET_SECURITY_API_PROXY') ||\n getEnvValue('HTTPS_PROXY') ||\n getEnvValue('https_proxy') ||\n getEnvValue('HTTP_PROXY') ||\n getEnvValue('http_proxy')\n )\n}\n\n/**\n * Timeout in milliseconds for Socket CLI API requests (alternative name).\n *\n * @returns API timeout in milliseconds\n */\nexport function getSocketCliApiTimeout(): number {\n return envAsNumber(getEnvValue('SOCKET_CLI_API_TIMEOUT'))\n}\n\n/**\n * Socket CLI API authentication token (alternative name).\n * Checks SOCKET_CLI_API_TOKEN, SOCKET_CLI_API_KEY, SOCKET_SECURITY_API_TOKEN, SOCKET_SECURITY_API_KEY.\n * Maintains full v1.x backward compatibility.\n *\n * @returns API token or undefined\n */\nexport function getSocketCliApiToken(): string | undefined {\n return (\n getEnvValue('SOCKET_CLI_API_TOKEN') ||\n getEnvValue('SOCKET_CLI_API_KEY') ||\n getEnvValue('SOCKET_SECURITY_API_TOKEN') ||\n getEnvValue('SOCKET_SECURITY_API_KEY')\n )\n}\n\n/**\n * Socket CLI configuration file path (alternative name).\n *\n * @returns Config file path or undefined\n */\nexport function getSocketCliConfig(): string | undefined {\n return getEnvValue('SOCKET_CLI_CONFIG')\n}\n\n/**\n * Controls Socket CLI fix mode.\n *\n * @returns Fix mode value or undefined\n */\nexport function getSocketCliFix(): string | undefined {\n return getEnvValue('SOCKET_CLI_FIX')\n}\n\n/**\n * Whether to skip Socket CLI API token requirement (alternative name).\n *\n * @returns Whether to skip API token requirement\n */\nexport function getSocketCliNoApiToken(): boolean {\n return envAsBoolean(getEnvValue('SOCKET_CLI_NO_API_TOKEN'))\n}\n\n/**\n * Controls Socket CLI optimization mode.\n *\n * @returns Whether optimization mode is enabled\n */\nexport function getSocketCliOptimize(): boolean {\n return envAsBoolean(getEnvValue('SOCKET_CLI_OPTIMIZE'))\n}\n\n/**\n * Socket CLI organization slug identifier (alternative name).\n * Checks SOCKET_CLI_ORG_SLUG first, then falls back to SOCKET_ORG_SLUG.\n *\n * @returns Organization slug or undefined\n */\nexport function getSocketCliOrgSlug(): string | undefined {\n return getEnvValue('SOCKET_CLI_ORG_SLUG') || getEnvValue('SOCKET_ORG_SLUG')\n}\n\n/**\n * Whether to view all Socket CLI risks (alternative name).\n *\n * @returns Whether to view all risks\n */\nexport function getSocketCliViewAllRisks(): boolean {\n return envAsBoolean(getEnvValue('SOCKET_CLI_VIEW_ALL_RISKS'))\n}\n\n/**\n * Socket CLI GitHub authentication token.\n * Checks SOCKET_CLI_GITHUB_TOKEN, SOCKET_SECURITY_GITHUB_PAT, then falls back to GITHUB_TOKEN.\n *\n * @returns GitHub token or undefined\n */\nexport function getSocketCliGithubToken(): string | undefined {\n return (\n getEnvValue('SOCKET_CLI_GITHUB_TOKEN') ||\n getEnvValue('SOCKET_SECURITY_GITHUB_PAT') ||\n getEnvValue('GITHUB_TOKEN')\n )\n}\n\n/**\n * Bootstrap package spec (e.g., @socketsecurity/cli@^2.0.11).\n * Set by bootstrap wrappers (SEA/smol/npm) to pass package spec to CLI.\n *\n * @returns Bootstrap package spec or undefined\n */\nexport function getSocketCliBootstrapSpec(): string | undefined {\n return getEnvValue('SOCKET_CLI_BOOTSTRAP_SPEC')\n}\n\n/**\n * Bootstrap cache directory path.\n * Set by bootstrap wrappers to pass dlx cache location to CLI.\n *\n * @returns Bootstrap cache directory or undefined\n */\nexport function getSocketCliBootstrapCacheDir(): string | undefined {\n return getEnvValue('SOCKET_CLI_BOOTSTRAP_CACHE_DIR')\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,qBAA0C;AAC1C,oBAA4B;AAOrB,SAAS,0BAAmC;AACjD,aAAO,iCAAa,2BAAY,yBAAyB,CAAC;AAC5D;AAQO,SAAS,yBAA6C;AAC3D,aACE,2BAAY,yBAAyB,SACrC,2BAAY,8BAA8B;AAE9C;AASO,SAAS,uBAA2C;AACzD,aACE,2BAAY,sBAAsB,SAClC,2BAAY,2BAA2B,SACvC,2BAAY,aAAa,SACzB,2BAAY,aAAa,SACzB,2BAAY,YAAY,SACxB,2BAAY,YAAY;AAE5B;AAOO,SAAS,yBAAiC;AAC/C,aAAO,gCAAY,2BAAY,wBAAwB,CAAC;AAC1D;AASO,SAAS,uBAA2C;AACzD,aACE,2BAAY,sBAAsB,SAClC,2BAAY,oBAAoB,SAChC,2BAAY,2BAA2B,SACvC,2BAAY,yBAAyB;AAEzC;AAOO,SAAS,qBAAyC;AACvD,aAAO,2BAAY,mBAAmB;AACxC;AAOO,SAAS,kBAAsC;AACpD,aAAO,2BAAY,gBAAgB;AACrC;AAOO,SAAS,yBAAkC;AAChD,aAAO,iCAAa,2BAAY,yBAAyB,CAAC;AAC5D;AAOO,SAAS,uBAAgC;AAC9C,aAAO,iCAAa,2BAAY,qBAAqB,CAAC;AACxD;AAQO,SAAS,sBAA0C;AACxD,aAAO,2BAAY,qBAAqB,SAAK,2BAAY,iBAAiB;AAC5E;AAOO,SAAS,2BAAoC;AAClD,aAAO,iCAAa,2BAAY,2BAA2B,CAAC;AAC9D;AAQO,SAAS,0BAA8C;AAC5D,aACE,2BAAY,yBAAyB,SACrC,2BAAY,4BAA4B,SACxC,2BAAY,cAAc;AAE9B;AAQO,SAAS,4BAAgD;AAC9D,aAAO,2BAAY,2BAA2B;AAChD;AAQO,SAAS,gCAAoD;AAClE,aAAO,2BAAY,gCAAgC;AACrD;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/dist/ipc.js
CHANGED
|
@@ -129,7 +129,7 @@ async function cleanupIpcStubs(appName) {
|
|
|
129
129
|
const files = await import_fs.promises.readdir(stubDir);
|
|
130
130
|
const now = Date.now();
|
|
131
131
|
const maxAgeMs = 5 * 60 * 1e3;
|
|
132
|
-
await Promise.
|
|
132
|
+
await Promise.allSettled(
|
|
133
133
|
files.map(async (file) => {
|
|
134
134
|
if (file.startsWith("stub-") && file.endsWith(".json")) {
|
|
135
135
|
const filePath = import_path.default.join(stubDir, file);
|
package/dist/ipc.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/ipc.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * IPC (Inter-Process Communication) Module\n * ==========================================\n *\n * This module provides secure inter-process communication utilities for Socket CLI\n * and related tools. It replaces environment variable passing with more secure and\n * scalable alternatives.\n *\n * ## Key Features:\n * - File-based stub communication for initial data handoff\n * - Node.js IPC channel support for real-time bidirectional messaging\n * - Automatic cleanup of temporary files\n * - Type-safe message validation with Zod schemas\n * - Timeout handling for reliability\n *\n * ## Use Cases:\n * 1. Passing API tokens between processes without exposing them in env vars\n * 2. Transferring large configuration objects that exceed env var size limits\n * 3. Bidirectional communication between parent and child processes\n * 4. Secure handshake protocols between Socket CLI components\n *\n * ## Security Considerations:\n * - Stub files are created with restricted permissions in OS temp directory\n * - Messages include timestamps for freshness validation\n * - Automatic cleanup prevents sensitive data persistence\n * - Unique IDs prevent message replay attacks\n *\n * @module ipc\n */\n\nimport crypto from 'crypto'\n\nimport { promises as fs } from 'fs'\n\nimport path from 'path'\n\nimport { safeDeleteSync } from './fs'\nimport { getOsTmpDir } from './paths'\nimport { z } from './zod'\n\n// Define BufferEncoding type for TypeScript compatibility.\ntype BufferEncoding = globalThis.BufferEncoding\n\n/**\n * Zod Schemas for Runtime Validation\n * ====================================\n * These schemas provide runtime type safety for IPC messages,\n * ensuring data integrity across process boundaries.\n */\n\n/**\n * Base IPC message schema - validates the core message structure.\n * All IPC messages must conform to this schema.\n */\nconst IpcMessageSchema = z.object({\n /** Unique identifier for message tracking and response correlation. */\n id: z.string().min(1),\n /** Unix timestamp for freshness validation and replay prevention. */\n timestamp: z.number().positive(),\n /** Message type identifier for routing and handling. */\n type: z.string().min(1),\n /** Payload data - can be any JSON-serializable value. */\n data: z.unknown(),\n})\n\n/**\n * IPC handshake schema - used for initial connection establishment.\n * The handshake includes version info and authentication tokens.\n * @internal Exported for testing purposes.\n */\nexport const IpcHandshakeSchema = IpcMessageSchema.extend({\n type: z.literal('handshake'),\n data: z.object({\n /** Protocol version for compatibility checking. */\n version: z.string(),\n /** Process ID for identification. */\n pid: z.number().int().positive(),\n /** Optional API token for authentication. */\n apiToken: z.string().optional(),\n /** Application name for multi-app support. */\n appName: z.string(),\n }),\n})\n\n/**\n * IPC stub file schema - validates the structure of stub files.\n * Stub files are used for passing data between processes via filesystem.\n */\nconst IpcStubSchema = z.object({\n /** Process ID that created the stub. */\n pid: z.number().int().positive(),\n /** Creation timestamp for age validation. */\n timestamp: z.number().positive(),\n /** The actual data payload. */\n data: z.unknown(),\n})\n\n/**\n * TypeScript interfaces for IPC communication.\n * These types ensure type consistency across the IPC module.\n */\n\n/**\n * Base IPC message interface.\n * All IPC messages must conform to this structure.\n */\nexport interface IpcMessage<T = unknown> {\n /** Unique identifier for message tracking and response correlation. */\n id: string\n /** Unix timestamp for freshness validation and replay prevention. */\n timestamp: number\n /** Message type identifier for routing and handling. */\n type: string\n /** Payload data - can be any JSON-serializable value. */\n data: T\n}\n\n/**\n * IPC handshake message interface.\n * Used for initial connection establishment.\n */\nexport interface IpcHandshake\n extends IpcMessage<{\n /** Protocol version for compatibility checking. */\n version: string\n /** Process ID for identification. */\n pid: number\n /** Optional API token for authentication. */\n apiToken?: string\n /** Application name for multi-app support. */\n appName: string\n }> {\n type: 'handshake'\n}\n\n/**\n * IPC stub file interface.\n * Represents the structure of stub files used for filesystem-based IPC.\n */\nexport interface IpcStub {\n /** Process ID that created the stub. */\n pid: number\n /** Creation timestamp for age validation. */\n timestamp: number\n /** The actual data payload. */\n data: unknown\n}\n\n/**\n * Options for IPC communication\n */\nexport interface IpcOptions {\n /** Timeout in milliseconds for async operations. */\n timeout?: number\n /** Text encoding for message serialization. */\n encoding?: BufferEncoding\n}\n\n/**\n * Create a unique IPC channel identifier for message correlation.\n *\n * Generates a unique identifier that combines:\n * - A prefix for namespacing (defaults to 'socket')\n * - The current process ID for process identification\n * - A random hex string for uniqueness\n *\n * @param prefix - Optional prefix to namespace the channel ID\n * @returns A unique channel identifier string\n *\n * @example\n * ```typescript\n * const channelId = createIpcChannelId('socket-cli')\n * // Returns: 'socket-cli-12345-a1b2c3d4e5f6g7h8'\n * ```\n */\nexport function createIpcChannelId(prefix = 'socket'): string {\n return `${prefix}-${process.pid}-${crypto.randomBytes(8).toString('hex')}`\n}\n\n/**\n * Get the IPC stub path for a given application.\n *\n * This function generates a unique file path for IPC stub files that are used\n * to pass data between processes. The stub files are stored in a hidden directory\n * within the system's temporary folder.\n *\n * ## Path Structure:\n * - Base: System temp directory (e.g., /tmp on Unix, %TEMP% on Windows)\n * - Directory: `.socket-ipc/{appName}/`\n * - Filename: `stub-{pid}.json`\n *\n * ## Security Features:\n * - Files are isolated per application via appName parameter\n * - Process ID in filename prevents collisions between concurrent processes\n * - Temporary directory location ensures automatic cleanup on system restart\n *\n * @param appName - The application identifier (e.g., 'socket-cli', 'socket-dlx')\n * @returns Full path to the IPC stub file\n *\n * @example\n * ```typescript\n * const stubPath = getIpcStubPath('socket-cli')\n * // Returns: '/tmp/.socket-ipc/socket-cli/stub-12345.json' (Unix)\n * // Returns: 'C:\\\\Users\\\\Name\\\\AppData\\\\Local\\\\Temp\\\\.socket-ipc\\\\socket-cli\\\\stub-12345.json' (Windows)\n * ```\n *\n * @used Currently used by socket-cli for self-update and inter-process communication\n */\nexport function getIpcStubPath(appName: string): string {\n // Get the system's temporary directory - this is platform-specific.\n const tempDir = getOsTmpDir()\n\n // Create a hidden directory structure for Socket IPC files.\n // The dot prefix makes it hidden on Unix-like systems.\n const stubDir = path.join(tempDir, '.socket-ipc', appName)\n\n // Generate filename with process ID to ensure uniqueness.\n // The PID prevents conflicts when multiple processes run simultaneously.\n return path.join(stubDir, `stub-${process.pid}.json`)\n}\n\n/**\n * Ensure IPC directory exists for stub file creation.\n *\n * This helper function creates the directory structure needed for IPC stub files.\n * It's called before writing stub files to ensure the parent directories exist.\n *\n * @param filePath - Full path to the file that needs its directory created\n * @returns Promise that resolves when directory is created\n *\n * @internal Helper function used by writeIpcStub\n */\nasync function ensureIpcDirectory(filePath: string): Promise<void> {\n const dir = path.dirname(filePath)\n // Create directory recursively if it doesn't exist.\n await fs.mkdir(dir, { recursive: true })\n}\n\n/**\n * Write IPC data to a stub file for inter-process data transfer.\n *\n * This function creates a stub file containing data that needs to be passed\n * between processes. The stub file includes metadata like process ID and\n * timestamp for validation.\n *\n * ## File Structure:\n * ```json\n * {\n * \"pid\": 12345,\n * \"timestamp\": 1699564234567,\n * \"data\": { ... }\n * }\n * ```\n *\n * ## Use Cases:\n * - Passing API tokens to child processes\n * - Transferring configuration between Socket CLI components\n * - Sharing large data that exceeds environment variable limits\n *\n * @param appName - The application identifier\n * @param data - The data to write to the stub file\n * @returns Promise resolving to the stub file path\n *\n * @example\n * ```typescript\n * const stubPath = await writeIpcStub('socket-cli', {\n * apiToken: 'secret-token',\n * config: { ... }\n * })\n * // Pass stubPath to child process for reading\n * ```\n */\nexport async function writeIpcStub(\n appName: string,\n data: unknown,\n): Promise<string> {\n const stubPath = getIpcStubPath(appName)\n await ensureIpcDirectory(stubPath)\n\n // Create stub data with validation metadata.\n const ipcData: IpcStub = {\n data,\n pid: process.pid,\n timestamp: Date.now(),\n }\n\n // Validate data structure with Zod schema.\n const validated = IpcStubSchema.parse(ipcData)\n\n // Write with pretty printing for debugging.\n await fs.writeFile(stubPath, JSON.stringify(validated, null, 2), 'utf8')\n return stubPath\n}\n\n/**\n * Read IPC data from a stub file with automatic cleanup.\n *\n * This function reads data from an IPC stub file and validates its freshness.\n * Stale files (older than 5 minutes) are automatically cleaned up to prevent\n * accumulation of temporary files.\n *\n * ## Validation Steps:\n * 1. Read and parse JSON file\n * 2. Validate structure with Zod schema\n * 3. Check timestamp freshness\n * 4. Clean up if stale\n * 5. Return data if valid\n *\n * @param stubPath - Path to the stub file to read\n * @returns Promise resolving to the data or null if invalid/stale\n *\n * @example\n * ```typescript\n * const data = await readIpcStub('/tmp/.socket-ipc/socket-cli/stub-12345.json')\n * if (data) {\n * console.log('Received:', data)\n * }\n * ```\n *\n * @unused Reserved for future implementation\n */\nexport async function readIpcStub(stubPath: string): Promise<unknown> {\n try {\n const content = await fs.readFile(stubPath, 'utf8')\n const parsed = JSON.parse(content)\n // Validate structure with Zod schema.\n const validated = IpcStubSchema.parse(parsed)\n // Check age for freshness validation.\n const ageMs = Date.now() - validated.timestamp\n // 5 minutes.\n const maxAgeMs = 5 * 60 * 1000\n if (ageMs > maxAgeMs) {\n // Clean up stale file. IPC stubs are always in tmpdir, so use force: true.\n try {\n safeDeleteSync(stubPath, { force: true })\n } catch {\n // Ignore deletion errors\n }\n return null\n }\n return validated.data\n } catch {\n // Return null for any errors (file not found, invalid JSON, validation failure).\n return null\n }\n}\n\n/**\n * Clean up IPC stub files for an application.\n *\n * This maintenance function removes stale IPC stub files to prevent\n * accumulation in the temporary directory. It's designed to be called\n * periodically or on application startup.\n *\n * ## Cleanup Rules:\n * - Files older than 5 minutes are removed (checked via both filesystem mtime and JSON timestamp)\n * - Only stub files (stub-*.json) are processed\n * - Errors are silently ignored (best-effort cleanup)\n *\n * @param appName - The application identifier\n * @returns Promise that resolves when cleanup is complete\n *\n * @example\n * ```typescript\n * // Clean up on application startup\n * await cleanupIpcStubs('socket-cli')\n * ```\n *\n * @unused Reserved for future implementation\n */\nexport async function cleanupIpcStubs(appName: string): Promise<void> {\n const tempDir = getOsTmpDir()\n const stubDir = path.join(tempDir, '.socket-ipc', appName)\n try {\n const files = await fs.readdir(stubDir)\n const now = Date.now()\n // 5 minutes.\n const maxAgeMs = 5 * 60 * 1000\n // Process each file in parallel for efficiency.\n await Promise.all(\n files.map(async file => {\n if (file.startsWith('stub-') && file.endsWith('.json')) {\n const filePath = path.join(stubDir, file)\n try {\n // Check both filesystem mtime and JSON timestamp for more reliable detection\n const stats = await fs.stat(filePath)\n const mtimeAge = now - stats.mtimeMs\n let isStale = mtimeAge > maxAgeMs\n\n // Always check the timestamp inside the JSON file for accuracy\n // This is more reliable than filesystem mtime in some environments\n try {\n const content = await fs.readFile(filePath, 'utf8')\n const parsed = JSON.parse(content)\n const validated = IpcStubSchema.parse(parsed)\n const contentAge = now - validated.timestamp\n // File is stale if EITHER check indicates staleness\n isStale = isStale || contentAge > maxAgeMs\n } catch {\n // If we can't read/parse the file, rely on mtime check\n }\n\n if (isStale) {\n // IPC stubs are always in tmpdir, so we can use force: true to skip path checks\n safeDeleteSync(filePath, { force: true })\n }\n } catch {\n // Ignore errors for individual files.\n }\n }\n }),\n )\n } catch {\n // Directory might not exist, that's ok.\n }\n}\n\n/**\n * Send data through Node.js IPC channel.\n *\n * This function sends structured messages through the Node.js IPC channel\n * when available. The IPC channel must be established with stdio: ['pipe', 'pipe', 'pipe', 'ipc'].\n *\n * ## Requirements:\n * - Process must have been spawned with IPC channel enabled\n * - Message must be serializable to JSON\n * - Process.send() must be available\n *\n * @param process - The process object with IPC channel\n * @param message - The IPC message to send\n * @returns true if message was sent, false otherwise\n *\n * @example\n * ```typescript\n * const message = createIpcMessage('handshake', { version: '1.0.0' })\n * const sent = sendIpc(childProcess, message)\n * ```\n *\n * @unused Reserved for bidirectional communication implementation\n */\nexport function sendIpc(\n process: NodeJS.Process | unknown,\n message: IpcMessage,\n): boolean {\n if (\n process &&\n typeof process === 'object' &&\n 'send' in process &&\n typeof process.send === 'function'\n ) {\n try {\n // Validate message structure before sending.\n const validated = IpcMessageSchema.parse(message)\n return process.send(validated)\n } catch {\n return false\n }\n }\n return false\n}\n\n/**\n * Receive data through Node.js IPC channel.\n *\n * Sets up a listener for IPC messages with automatic validation and parsing.\n * Returns a cleanup function to remove the listener when no longer needed.\n *\n * ## Message Flow:\n * 1. Receive raw message from IPC channel\n * 2. Validate with parseIpcMessage\n * 3. Call handler if valid\n * 4. Ignore invalid messages\n *\n * @param handler - Function to call with valid IPC messages\n * @returns Cleanup function to remove the listener\n *\n * @example\n * ```typescript\n * const cleanup = onIpc((message) => {\n * console.log('Received:', message.type, message.data)\n * })\n * // Later...\n * cleanup() // Remove listener\n * ```\n *\n * @unused Reserved for bidirectional communication\n */\nexport function onIpc(handler: (message: IpcMessage) => void): () => void {\n const listener = (message: unknown) => {\n const parsed = parseIpcMessage(message)\n if (parsed) {\n handler(parsed)\n }\n }\n process.on('message', listener)\n // Return cleanup function for proper resource management.\n return () => {\n process.off('message', listener)\n }\n}\n\n/**\n * Create a promise that resolves when a specific IPC message is received.\n *\n * This utility function provides async/await support for IPC communication,\n * allowing you to wait for specific message types with timeout support.\n *\n * ## Features:\n * - Automatic timeout handling\n * - Type-safe message data\n * - Resource cleanup on completion\n * - Promise-based API\n *\n * @param messageType - The message type to wait for\n * @param options - Options including timeout configuration\n * @returns Promise resolving to the message data\n *\n * @example\n * ```typescript\n * try {\n * const response = await waitForIpc<ConfigData>('config-response', {\n * timeout: 5000 // 5 seconds\n * })\n * console.log('Config received:', response)\n * } catch (error) {\n * console.error('Timeout waiting for config')\n * }\n * ```\n *\n * @unused Reserved for request-response pattern implementation\n */\nexport function waitForIpc<T = unknown>(\n messageType: string,\n options: IpcOptions = {},\n): Promise<T> {\n const { timeout = 30_000 } = options\n return new Promise((resolve, reject) => {\n let cleanup: (() => void) | null = null\n let timeoutId: NodeJS.Timeout | null = null\n const handleTimeout = () => {\n if (cleanup) {\n cleanup()\n }\n reject(new Error(`IPC timeout waiting for message type: ${messageType}`))\n }\n const handleMessage = (message: IpcMessage) => {\n if (message.type === messageType) {\n if (timeoutId) {\n clearTimeout(timeoutId)\n }\n if (cleanup) {\n cleanup()\n }\n resolve(message.data as T)\n }\n }\n cleanup = onIpc(handleMessage)\n if (timeout > 0) {\n timeoutId = setTimeout(handleTimeout, timeout)\n }\n })\n}\n\n/**\n * Create an IPC message with proper structure and metadata.\n *\n * This factory function creates properly structured IPC messages with:\n * - Unique ID for tracking\n * - Timestamp for freshness\n * - Type for routing\n * - Data payload\n *\n * @param type - The message type identifier\n * @param data - The message payload\n * @returns A properly structured IPC message\n *\n * @example\n * ```typescript\n * const handshake = createIpcMessage('handshake', {\n * version: '1.0.0',\n * pid: process.pid,\n * appName: 'socket-cli'\n * })\n * ```\n *\n * @unused Reserved for future message creation needs\n */\nexport function createIpcMessage<T = unknown>(\n type: string,\n data: T,\n): IpcMessage<T> {\n return {\n id: crypto.randomBytes(16).toString('hex'),\n timestamp: Date.now(),\n type,\n data,\n }\n}\n\n/**\n * Check if process has IPC channel available.\n *\n * This utility checks whether a process object has the necessary\n * properties for IPC communication. Used to determine if IPC\n * messaging is possible before attempting to send.\n *\n * @param process - The process object to check\n * @returns true if IPC is available, false otherwise\n *\n * @example\n * ```typescript\n * if (hasIpcChannel(childProcess)) {\n * sendIpc(childProcess, message)\n * } else {\n * // Fall back to alternative communication method\n * }\n * ```\n *\n * @unused Reserved for IPC availability detection\n */\nexport function hasIpcChannel(process: unknown): boolean {\n return Boolean(\n process &&\n typeof process === 'object' &&\n 'send' in process &&\n typeof process.send === 'function' &&\n 'channel' in process &&\n process.channel !== undefined,\n )\n}\n\n/**\n * Safely parse and validate IPC messages.\n *\n * This function performs runtime validation of incoming messages\n * to ensure they conform to the IPC message structure. It uses\n * Zod schemas for robust validation.\n *\n * ## Validation Steps:\n * 1. Check if message is an object\n * 2. Validate required fields exist\n * 3. Validate field types\n * 4. Return typed message or null\n *\n * @param message - The raw message to parse\n * @returns Parsed IPC message or null if invalid\n *\n * @example\n * ```typescript\n * const parsed = parseIpcMessage(rawMessage)\n * if (parsed) {\n * handleMessage(parsed)\n * }\n * ```\n *\n * @unused Reserved for message validation needs\n */\nexport function parseIpcMessage(message: unknown): IpcMessage | null {\n try {\n // Use Zod schema for comprehensive validation.\n const validated = IpcMessageSchema.parse(message)\n return validated as IpcMessage\n } catch {\n // Return null for any validation failure.\n return null\n }\n}\n"],
|
|
4
|
+
"sourcesContent": ["/**\n * IPC (Inter-Process Communication) Module\n * ==========================================\n *\n * This module provides secure inter-process communication utilities for Socket CLI\n * and related tools. It replaces environment variable passing with more secure and\n * scalable alternatives.\n *\n * ## Key Features:\n * - File-based stub communication for initial data handoff\n * - Node.js IPC channel support for real-time bidirectional messaging\n * - Automatic cleanup of temporary files\n * - Type-safe message validation with Zod schemas\n * - Timeout handling for reliability\n *\n * ## Use Cases:\n * 1. Passing API tokens between processes without exposing them in env vars\n * 2. Transferring large configuration objects that exceed env var size limits\n * 3. Bidirectional communication between parent and child processes\n * 4. Secure handshake protocols between Socket CLI components\n *\n * ## Security Considerations:\n * - Stub files are created with restricted permissions in OS temp directory\n * - Messages include timestamps for freshness validation\n * - Automatic cleanup prevents sensitive data persistence\n * - Unique IDs prevent message replay attacks\n *\n * @module ipc\n */\n\nimport crypto from 'crypto'\n\nimport { promises as fs } from 'fs'\n\nimport path from 'path'\n\nimport { safeDeleteSync } from './fs'\nimport { getOsTmpDir } from './paths'\nimport { z } from './zod'\n\n// Define BufferEncoding type for TypeScript compatibility.\ntype BufferEncoding = globalThis.BufferEncoding\n\n/**\n * Zod Schemas for Runtime Validation\n * ====================================\n * These schemas provide runtime type safety for IPC messages,\n * ensuring data integrity across process boundaries.\n */\n\n/**\n * Base IPC message schema - validates the core message structure.\n * All IPC messages must conform to this schema.\n */\nconst IpcMessageSchema = z.object({\n /** Unique identifier for message tracking and response correlation. */\n id: z.string().min(1),\n /** Unix timestamp for freshness validation and replay prevention. */\n timestamp: z.number().positive(),\n /** Message type identifier for routing and handling. */\n type: z.string().min(1),\n /** Payload data - can be any JSON-serializable value. */\n data: z.unknown(),\n})\n\n/**\n * IPC handshake schema - used for initial connection establishment.\n * The handshake includes version info and authentication tokens.\n * @internal Exported for testing purposes.\n */\nexport const IpcHandshakeSchema = IpcMessageSchema.extend({\n type: z.literal('handshake'),\n data: z.object({\n /** Protocol version for compatibility checking. */\n version: z.string(),\n /** Process ID for identification. */\n pid: z.number().int().positive(),\n /** Optional API token for authentication. */\n apiToken: z.string().optional(),\n /** Application name for multi-app support. */\n appName: z.string(),\n }),\n})\n\n/**\n * IPC stub file schema - validates the structure of stub files.\n * Stub files are used for passing data between processes via filesystem.\n */\nconst IpcStubSchema = z.object({\n /** Process ID that created the stub. */\n pid: z.number().int().positive(),\n /** Creation timestamp for age validation. */\n timestamp: z.number().positive(),\n /** The actual data payload. */\n data: z.unknown(),\n})\n\n/**\n * TypeScript interfaces for IPC communication.\n * These types ensure type consistency across the IPC module.\n */\n\n/**\n * Base IPC message interface.\n * All IPC messages must conform to this structure.\n */\nexport interface IpcMessage<T = unknown> {\n /** Unique identifier for message tracking and response correlation. */\n id: string\n /** Unix timestamp for freshness validation and replay prevention. */\n timestamp: number\n /** Message type identifier for routing and handling. */\n type: string\n /** Payload data - can be any JSON-serializable value. */\n data: T\n}\n\n/**\n * IPC handshake message interface.\n * Used for initial connection establishment.\n */\nexport interface IpcHandshake\n extends IpcMessage<{\n /** Protocol version for compatibility checking. */\n version: string\n /** Process ID for identification. */\n pid: number\n /** Optional API token for authentication. */\n apiToken?: string\n /** Application name for multi-app support. */\n appName: string\n }> {\n type: 'handshake'\n}\n\n/**\n * IPC stub file interface.\n * Represents the structure of stub files used for filesystem-based IPC.\n */\nexport interface IpcStub {\n /** Process ID that created the stub. */\n pid: number\n /** Creation timestamp for age validation. */\n timestamp: number\n /** The actual data payload. */\n data: unknown\n}\n\n/**\n * Options for IPC communication\n */\nexport interface IpcOptions {\n /** Timeout in milliseconds for async operations. */\n timeout?: number\n /** Text encoding for message serialization. */\n encoding?: BufferEncoding\n}\n\n/**\n * Create a unique IPC channel identifier for message correlation.\n *\n * Generates a unique identifier that combines:\n * - A prefix for namespacing (defaults to 'socket')\n * - The current process ID for process identification\n * - A random hex string for uniqueness\n *\n * @param prefix - Optional prefix to namespace the channel ID\n * @returns A unique channel identifier string\n *\n * @example\n * ```typescript\n * const channelId = createIpcChannelId('socket-cli')\n * // Returns: 'socket-cli-12345-a1b2c3d4e5f6g7h8'\n * ```\n */\nexport function createIpcChannelId(prefix = 'socket'): string {\n return `${prefix}-${process.pid}-${crypto.randomBytes(8).toString('hex')}`\n}\n\n/**\n * Get the IPC stub path for a given application.\n *\n * This function generates a unique file path for IPC stub files that are used\n * to pass data between processes. The stub files are stored in a hidden directory\n * within the system's temporary folder.\n *\n * ## Path Structure:\n * - Base: System temp directory (e.g., /tmp on Unix, %TEMP% on Windows)\n * - Directory: `.socket-ipc/{appName}/`\n * - Filename: `stub-{pid}.json`\n *\n * ## Security Features:\n * - Files are isolated per application via appName parameter\n * - Process ID in filename prevents collisions between concurrent processes\n * - Temporary directory location ensures automatic cleanup on system restart\n *\n * @param appName - The application identifier (e.g., 'socket-cli', 'socket-dlx')\n * @returns Full path to the IPC stub file\n *\n * @example\n * ```typescript\n * const stubPath = getIpcStubPath('socket-cli')\n * // Returns: '/tmp/.socket-ipc/socket-cli/stub-12345.json' (Unix)\n * // Returns: 'C:\\\\Users\\\\Name\\\\AppData\\\\Local\\\\Temp\\\\.socket-ipc\\\\socket-cli\\\\stub-12345.json' (Windows)\n * ```\n *\n * @used Currently used by socket-cli for self-update and inter-process communication\n */\nexport function getIpcStubPath(appName: string): string {\n // Get the system's temporary directory - this is platform-specific.\n const tempDir = getOsTmpDir()\n\n // Create a hidden directory structure for Socket IPC files.\n // The dot prefix makes it hidden on Unix-like systems.\n const stubDir = path.join(tempDir, '.socket-ipc', appName)\n\n // Generate filename with process ID to ensure uniqueness.\n // The PID prevents conflicts when multiple processes run simultaneously.\n return path.join(stubDir, `stub-${process.pid}.json`)\n}\n\n/**\n * Ensure IPC directory exists for stub file creation.\n *\n * This helper function creates the directory structure needed for IPC stub files.\n * It's called before writing stub files to ensure the parent directories exist.\n *\n * @param filePath - Full path to the file that needs its directory created\n * @returns Promise that resolves when directory is created\n *\n * @internal Helper function used by writeIpcStub\n */\nasync function ensureIpcDirectory(filePath: string): Promise<void> {\n const dir = path.dirname(filePath)\n // Create directory recursively if it doesn't exist.\n await fs.mkdir(dir, { recursive: true })\n}\n\n/**\n * Write IPC data to a stub file for inter-process data transfer.\n *\n * This function creates a stub file containing data that needs to be passed\n * between processes. The stub file includes metadata like process ID and\n * timestamp for validation.\n *\n * ## File Structure:\n * ```json\n * {\n * \"pid\": 12345,\n * \"timestamp\": 1699564234567,\n * \"data\": { ... }\n * }\n * ```\n *\n * ## Use Cases:\n * - Passing API tokens to child processes\n * - Transferring configuration between Socket CLI components\n * - Sharing large data that exceeds environment variable limits\n *\n * @param appName - The application identifier\n * @param data - The data to write to the stub file\n * @returns Promise resolving to the stub file path\n *\n * @example\n * ```typescript\n * const stubPath = await writeIpcStub('socket-cli', {\n * apiToken: 'secret-token',\n * config: { ... }\n * })\n * // Pass stubPath to child process for reading\n * ```\n */\nexport async function writeIpcStub(\n appName: string,\n data: unknown,\n): Promise<string> {\n const stubPath = getIpcStubPath(appName)\n await ensureIpcDirectory(stubPath)\n\n // Create stub data with validation metadata.\n const ipcData: IpcStub = {\n data,\n pid: process.pid,\n timestamp: Date.now(),\n }\n\n // Validate data structure with Zod schema.\n const validated = IpcStubSchema.parse(ipcData)\n\n // Write with pretty printing for debugging.\n await fs.writeFile(stubPath, JSON.stringify(validated, null, 2), 'utf8')\n return stubPath\n}\n\n/**\n * Read IPC data from a stub file with automatic cleanup.\n *\n * This function reads data from an IPC stub file and validates its freshness.\n * Stale files (older than 5 minutes) are automatically cleaned up to prevent\n * accumulation of temporary files.\n *\n * ## Validation Steps:\n * 1. Read and parse JSON file\n * 2. Validate structure with Zod schema\n * 3. Check timestamp freshness\n * 4. Clean up if stale\n * 5. Return data if valid\n *\n * @param stubPath - Path to the stub file to read\n * @returns Promise resolving to the data or null if invalid/stale\n *\n * @example\n * ```typescript\n * const data = await readIpcStub('/tmp/.socket-ipc/socket-cli/stub-12345.json')\n * if (data) {\n * console.log('Received:', data)\n * }\n * ```\n *\n * @unused Reserved for future implementation\n */\nexport async function readIpcStub(stubPath: string): Promise<unknown> {\n try {\n const content = await fs.readFile(stubPath, 'utf8')\n const parsed = JSON.parse(content)\n // Validate structure with Zod schema.\n const validated = IpcStubSchema.parse(parsed)\n // Check age for freshness validation.\n const ageMs = Date.now() - validated.timestamp\n // 5 minutes.\n const maxAgeMs = 5 * 60 * 1000\n if (ageMs > maxAgeMs) {\n // Clean up stale file. IPC stubs are always in tmpdir, so use force: true.\n try {\n safeDeleteSync(stubPath, { force: true })\n } catch {\n // Ignore deletion errors\n }\n return null\n }\n return validated.data\n } catch {\n // Return null for any errors (file not found, invalid JSON, validation failure).\n return null\n }\n}\n\n/**\n * Clean up IPC stub files for an application.\n *\n * This maintenance function removes stale IPC stub files to prevent\n * accumulation in the temporary directory. It's designed to be called\n * periodically or on application startup.\n *\n * ## Cleanup Rules:\n * - Files older than 5 minutes are removed (checked via both filesystem mtime and JSON timestamp)\n * - Only stub files (stub-*.json) are processed\n * - Errors are silently ignored (best-effort cleanup)\n *\n * @param appName - The application identifier\n * @returns Promise that resolves when cleanup is complete\n *\n * @example\n * ```typescript\n * // Clean up on application startup\n * await cleanupIpcStubs('socket-cli')\n * ```\n *\n * @unused Reserved for future implementation\n */\nexport async function cleanupIpcStubs(appName: string): Promise<void> {\n const tempDir = getOsTmpDir()\n const stubDir = path.join(tempDir, '.socket-ipc', appName)\n try {\n const files = await fs.readdir(stubDir)\n const now = Date.now()\n // 5 minutes.\n const maxAgeMs = 5 * 60 * 1000\n // Process each file in parallel for efficiency.\n await Promise.allSettled(\n files.map(async file => {\n if (file.startsWith('stub-') && file.endsWith('.json')) {\n const filePath = path.join(stubDir, file)\n try {\n // Check both filesystem mtime and JSON timestamp for more reliable detection\n const stats = await fs.stat(filePath)\n const mtimeAge = now - stats.mtimeMs\n let isStale = mtimeAge > maxAgeMs\n\n // Always check the timestamp inside the JSON file for accuracy\n // This is more reliable than filesystem mtime in some environments\n try {\n const content = await fs.readFile(filePath, 'utf8')\n const parsed = JSON.parse(content)\n const validated = IpcStubSchema.parse(parsed)\n const contentAge = now - validated.timestamp\n // File is stale if EITHER check indicates staleness\n isStale = isStale || contentAge > maxAgeMs\n } catch {\n // If we can't read/parse the file, rely on mtime check\n }\n\n if (isStale) {\n // IPC stubs are always in tmpdir, so we can use force: true to skip path checks\n safeDeleteSync(filePath, { force: true })\n }\n } catch {\n // Ignore errors for individual files.\n }\n }\n }),\n )\n } catch {\n // Directory might not exist, that's ok.\n }\n}\n\n/**\n * Send data through Node.js IPC channel.\n *\n * This function sends structured messages through the Node.js IPC channel\n * when available. The IPC channel must be established with stdio: ['pipe', 'pipe', 'pipe', 'ipc'].\n *\n * ## Requirements:\n * - Process must have been spawned with IPC channel enabled\n * - Message must be serializable to JSON\n * - Process.send() must be available\n *\n * @param process - The process object with IPC channel\n * @param message - The IPC message to send\n * @returns true if message was sent, false otherwise\n *\n * @example\n * ```typescript\n * const message = createIpcMessage('handshake', { version: '1.0.0' })\n * const sent = sendIpc(childProcess, message)\n * ```\n *\n * @unused Reserved for bidirectional communication implementation\n */\nexport function sendIpc(\n process: NodeJS.Process | unknown,\n message: IpcMessage,\n): boolean {\n if (\n process &&\n typeof process === 'object' &&\n 'send' in process &&\n typeof process.send === 'function'\n ) {\n try {\n // Validate message structure before sending.\n const validated = IpcMessageSchema.parse(message)\n return process.send(validated)\n } catch {\n return false\n }\n }\n return false\n}\n\n/**\n * Receive data through Node.js IPC channel.\n *\n * Sets up a listener for IPC messages with automatic validation and parsing.\n * Returns a cleanup function to remove the listener when no longer needed.\n *\n * ## Message Flow:\n * 1. Receive raw message from IPC channel\n * 2. Validate with parseIpcMessage\n * 3. Call handler if valid\n * 4. Ignore invalid messages\n *\n * @param handler - Function to call with valid IPC messages\n * @returns Cleanup function to remove the listener\n *\n * @example\n * ```typescript\n * const cleanup = onIpc((message) => {\n * console.log('Received:', message.type, message.data)\n * })\n * // Later...\n * cleanup() // Remove listener\n * ```\n *\n * @unused Reserved for bidirectional communication\n */\nexport function onIpc(handler: (message: IpcMessage) => void): () => void {\n const listener = (message: unknown) => {\n const parsed = parseIpcMessage(message)\n if (parsed) {\n handler(parsed)\n }\n }\n process.on('message', listener)\n // Return cleanup function for proper resource management.\n return () => {\n process.off('message', listener)\n }\n}\n\n/**\n * Create a promise that resolves when a specific IPC message is received.\n *\n * This utility function provides async/await support for IPC communication,\n * allowing you to wait for specific message types with timeout support.\n *\n * ## Features:\n * - Automatic timeout handling\n * - Type-safe message data\n * - Resource cleanup on completion\n * - Promise-based API\n *\n * @param messageType - The message type to wait for\n * @param options - Options including timeout configuration\n * @returns Promise resolving to the message data\n *\n * @example\n * ```typescript\n * try {\n * const response = await waitForIpc<ConfigData>('config-response', {\n * timeout: 5000 // 5 seconds\n * })\n * console.log('Config received:', response)\n * } catch (error) {\n * console.error('Timeout waiting for config')\n * }\n * ```\n *\n * @unused Reserved for request-response pattern implementation\n */\nexport function waitForIpc<T = unknown>(\n messageType: string,\n options: IpcOptions = {},\n): Promise<T> {\n const { timeout = 30_000 } = options\n return new Promise((resolve, reject) => {\n let cleanup: (() => void) | null = null\n let timeoutId: NodeJS.Timeout | null = null\n const handleTimeout = () => {\n if (cleanup) {\n cleanup()\n }\n reject(new Error(`IPC timeout waiting for message type: ${messageType}`))\n }\n const handleMessage = (message: IpcMessage) => {\n if (message.type === messageType) {\n if (timeoutId) {\n clearTimeout(timeoutId)\n }\n if (cleanup) {\n cleanup()\n }\n resolve(message.data as T)\n }\n }\n cleanup = onIpc(handleMessage)\n if (timeout > 0) {\n timeoutId = setTimeout(handleTimeout, timeout)\n }\n })\n}\n\n/**\n * Create an IPC message with proper structure and metadata.\n *\n * This factory function creates properly structured IPC messages with:\n * - Unique ID for tracking\n * - Timestamp for freshness\n * - Type for routing\n * - Data payload\n *\n * @param type - The message type identifier\n * @param data - The message payload\n * @returns A properly structured IPC message\n *\n * @example\n * ```typescript\n * const handshake = createIpcMessage('handshake', {\n * version: '1.0.0',\n * pid: process.pid,\n * appName: 'socket-cli'\n * })\n * ```\n *\n * @unused Reserved for future message creation needs\n */\nexport function createIpcMessage<T = unknown>(\n type: string,\n data: T,\n): IpcMessage<T> {\n return {\n id: crypto.randomBytes(16).toString('hex'),\n timestamp: Date.now(),\n type,\n data,\n }\n}\n\n/**\n * Check if process has IPC channel available.\n *\n * This utility checks whether a process object has the necessary\n * properties for IPC communication. Used to determine if IPC\n * messaging is possible before attempting to send.\n *\n * @param process - The process object to check\n * @returns true if IPC is available, false otherwise\n *\n * @example\n * ```typescript\n * if (hasIpcChannel(childProcess)) {\n * sendIpc(childProcess, message)\n * } else {\n * // Fall back to alternative communication method\n * }\n * ```\n *\n * @unused Reserved for IPC availability detection\n */\nexport function hasIpcChannel(process: unknown): boolean {\n return Boolean(\n process &&\n typeof process === 'object' &&\n 'send' in process &&\n typeof process.send === 'function' &&\n 'channel' in process &&\n process.channel !== undefined,\n )\n}\n\n/**\n * Safely parse and validate IPC messages.\n *\n * This function performs runtime validation of incoming messages\n * to ensure they conform to the IPC message structure. It uses\n * Zod schemas for robust validation.\n *\n * ## Validation Steps:\n * 1. Check if message is an object\n * 2. Validate required fields exist\n * 3. Validate field types\n * 4. Return typed message or null\n *\n * @param message - The raw message to parse\n * @returns Parsed IPC message or null if invalid\n *\n * @example\n * ```typescript\n * const parsed = parseIpcMessage(rawMessage)\n * if (parsed) {\n * handleMessage(parsed)\n * }\n * ```\n *\n * @unused Reserved for message validation needs\n */\nexport function parseIpcMessage(message: unknown): IpcMessage | null {\n try {\n // Use Zod schema for comprehensive validation.\n const validated = IpcMessageSchema.parse(message)\n return validated as IpcMessage\n } catch {\n // Return null for any validation failure.\n return null\n }\n}\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA8BA,oBAAmB;AAEnB,gBAA+B;AAE/B,kBAAiB;AAEjB,IAAAA,aAA+B;AAC/B,mBAA4B;AAC5B,iBAAkB;AAgBlB,MAAM,mBAAmB,aAAE,OAAO;AAAA;AAAA,EAEhC,IAAI,aAAE,OAAO,EAAE,IAAI,CAAC;AAAA;AAAA,EAEpB,WAAW,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE/B,MAAM,aAAE,OAAO,EAAE,IAAI,CAAC;AAAA;AAAA,EAEtB,MAAM,aAAE,QAAQ;AAClB,CAAC;AAOM,MAAM,qBAAqB,iBAAiB,OAAO;AAAA,EACxD,MAAM,aAAE,QAAQ,WAAW;AAAA,EAC3B,MAAM,aAAE,OAAO;AAAA;AAAA,IAEb,SAAS,aAAE,OAAO;AAAA;AAAA,IAElB,KAAK,aAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA,IAE/B,UAAU,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,IAE9B,SAAS,aAAE,OAAO;AAAA,EACpB,CAAC;AACH,CAAC;AAMD,MAAM,gBAAgB,aAAE,OAAO;AAAA;AAAA,EAE7B,KAAK,aAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA,EAE/B,WAAW,aAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE/B,MAAM,aAAE,QAAQ;AAClB,CAAC;AAgFM,SAAS,mBAAmB,SAAS,UAAkB;AAC5D,SAAO,GAAG,MAAM,IAAI,QAAQ,GAAG,IAAI,cAAAC,QAAO,YAAY,CAAC,EAAE,SAAS,KAAK,CAAC;AAC1E;AA+BO,SAAS,eAAe,SAAyB;AAEtD,QAAM,cAAU,0BAAY;AAI5B,QAAM,UAAU,YAAAC,QAAK,KAAK,SAAS,eAAe,OAAO;AAIzD,SAAO,YAAAA,QAAK,KAAK,SAAS,QAAQ,QAAQ,GAAG,OAAO;AACtD;AAaA,eAAe,mBAAmB,UAAiC;AACjE,QAAM,MAAM,YAAAA,QAAK,QAAQ,QAAQ;AAEjC,QAAM,UAAAC,SAAG,MAAM,KAAK,EAAE,WAAW,KAAK,CAAC;AACzC;AAoCA,eAAsB,aACpB,SACA,MACiB;AACjB,QAAM,WAAW,eAAe,OAAO;AACvC,QAAM,mBAAmB,QAAQ;AAGjC,QAAM,UAAmB;AAAA,IACvB;AAAA,IACA,KAAK,QAAQ;AAAA,IACb,WAAW,KAAK,IAAI;AAAA,EACtB;AAGA,QAAM,YAAY,cAAc,MAAM,OAAO;AAG7C,QAAM,UAAAA,SAAG,UAAU,UAAU,KAAK,UAAU,WAAW,MAAM,CAAC,GAAG,MAAM;AACvE,SAAO;AACT;AA6BA,eAAsB,YAAY,UAAoC;AACpE,MAAI;AACF,UAAM,UAAU,MAAM,UAAAA,SAAG,SAAS,UAAU,MAAM;AAClD,UAAM,SAAS,KAAK,MAAM,OAAO;AAEjC,UAAM,YAAY,cAAc,MAAM,MAAM;AAE5C,UAAM,QAAQ,KAAK,IAAI,IAAI,UAAU;AAErC,UAAM,WAAW,IAAI,KAAK;AAC1B,QAAI,QAAQ,UAAU;AAEpB,UAAI;AACF,uCAAe,UAAU,EAAE,OAAO,KAAK,CAAC;AAAA,MAC1C,QAAQ;AAAA,MAER;AACA,aAAO;AAAA,IACT;AACA,WAAO,UAAU;AAAA,EACnB,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;AAyBA,eAAsB,gBAAgB,SAAgC;AACpE,QAAM,cAAU,0BAAY;AAC5B,QAAM,UAAU,YAAAD,QAAK,KAAK,SAAS,eAAe,OAAO;AACzD,MAAI;AACF,UAAM,QAAQ,MAAM,UAAAC,SAAG,QAAQ,OAAO;AACtC,UAAM,MAAM,KAAK,IAAI;AAErB,UAAM,WAAW,IAAI,KAAK;AAE1B,UAAM,QAAQ;AAAA,MACZ,MAAM,IAAI,OAAM,SAAQ;AACtB,YAAI,KAAK,WAAW,OAAO,KAAK,KAAK,SAAS,OAAO,GAAG;AACtD,gBAAM,WAAW,YAAAD,QAAK,KAAK,SAAS,IAAI;AACxC,cAAI;AAEF,kBAAM,QAAQ,MAAM,UAAAC,SAAG,KAAK,QAAQ;AACpC,kBAAM,WAAW,MAAM,MAAM;AAC7B,gBAAI,UAAU,WAAW;AAIzB,gBAAI;AACF,oBAAM,UAAU,MAAM,UAAAA,SAAG,SAAS,UAAU,MAAM;AAClD,oBAAM,SAAS,KAAK,MAAM,OAAO;AACjC,oBAAM,YAAY,cAAc,MAAM,MAAM;AAC5C,oBAAM,aAAa,MAAM,UAAU;AAEnC,wBAAU,WAAW,aAAa;AAAA,YACpC,QAAQ;AAAA,YAER;AAEA,gBAAI,SAAS;AAEX,6CAAe,UAAU,EAAE,OAAO,KAAK,CAAC;AAAA,YAC1C;AAAA,UACF,QAAQ;AAAA,UAER;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH;AAAA,EACF,QAAQ;AAAA,EAER;AACF;AAyBO,SAAS,QACdC,UACA,SACS;AACT,MACEA,YACA,OAAOA,aAAY,YACnB,UAAUA,YACV,OAAOA,SAAQ,SAAS,YACxB;AACA,QAAI;AAEF,YAAM,YAAY,iBAAiB,MAAM,OAAO;AAChD,aAAOA,SAAQ,KAAK,SAAS;AAAA,IAC/B,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AA4BO,SAAS,MAAM,SAAoD;AACxE,QAAM,WAAW,CAAC,YAAqB;AACrC,UAAM,SAAS,gBAAgB,OAAO;AACtC,QAAI,QAAQ;AACV,cAAQ,MAAM;AAAA,IAChB;AAAA,EACF;AACA,UAAQ,GAAG,WAAW,QAAQ;AAE9B,SAAO,MAAM;AACX,YAAQ,IAAI,WAAW,QAAQ;AAAA,EACjC;AACF;AAgCO,SAAS,WACd,aACA,UAAsB,CAAC,GACX;AACZ,QAAM,EAAE,UAAU,IAAO,IAAI;AAC7B,SAAO,IAAI,QAAQ,CAAC,SAAS,WAAW;AACtC,QAAI,UAA+B;AACnC,QAAI,YAAmC;AACvC,UAAM,gBAAgB,MAAM;AAC1B,UAAI,SAAS;AACX,gBAAQ;AAAA,MACV;AACA,aAAO,IAAI,MAAM,yCAAyC,WAAW,EAAE,CAAC;AAAA,IAC1E;AACA,UAAM,gBAAgB,CAAC,YAAwB;AAC7C,UAAI,QAAQ,SAAS,aAAa;AAChC,YAAI,WAAW;AACb,uBAAa,SAAS;AAAA,QACxB;AACA,YAAI,SAAS;AACX,kBAAQ;AAAA,QACV;AACA,gBAAQ,QAAQ,IAAS;AAAA,MAC3B;AAAA,IACF;AACA,cAAU,MAAM,aAAa;AAC7B,QAAI,UAAU,GAAG;AACf,kBAAY,WAAW,eAAe,OAAO;AAAA,IAC/C;AAAA,EACF,CAAC;AACH;AA0BO,SAAS,iBACd,MACA,MACe;AACf,SAAO;AAAA,IACL,IAAI,cAAAH,QAAO,YAAY,EAAE,EAAE,SAAS,KAAK;AAAA,IACzC,WAAW,KAAK,IAAI;AAAA,IACpB;AAAA,IACA;AAAA,EACF;AACF;AAuBO,SAAS,cAAcG,UAA2B;AACvD,SAAO;AAAA,IACLA,YACE,OAAOA,aAAY,YACnB,UAAUA,YACV,OAAOA,SAAQ,SAAS,cACxB,aAAaA,YACbA,SAAQ,YAAY;AAAA,EACxB;AACF;AA4BO,SAAS,gBAAgB,SAAqC;AACnE,MAAI;AAEF,UAAM,YAAY,iBAAiB,MAAM,OAAO;AAChD,WAAO;AAAA,EACT,QAAQ;AAEN,WAAO;AAAA,EACT;AACF;",
|
|
6
6
|
"names": ["import_fs", "crypto", "path", "fs", "process"]
|
|
7
7
|
}
|
package/dist/promises.js
CHANGED
|
@@ -122,7 +122,7 @@ async function pEach(array, callbackFn, options) {
|
|
|
122
122
|
if (signal?.aborted) {
|
|
123
123
|
return;
|
|
124
124
|
}
|
|
125
|
-
await Promise.
|
|
125
|
+
await Promise.allSettled(
|
|
126
126
|
chunk.map(
|
|
127
127
|
(item) => /* @__PURE__ */ pRetry((...args) => callbackFn(args[0]), {
|
|
128
128
|
...retries,
|
|
@@ -169,7 +169,7 @@ async function pFilterChunk(chunks, callbackFn, options) {
|
|
|
169
169
|
filteredChunks[i] = [];
|
|
170
170
|
} else {
|
|
171
171
|
const chunk = chunks[i];
|
|
172
|
-
const
|
|
172
|
+
const settled = await Promise.allSettled(
|
|
173
173
|
chunk.map(
|
|
174
174
|
(value) => /* @__PURE__ */ pRetry((...args) => callbackFn(args[0]), {
|
|
175
175
|
...retryOpts,
|
|
@@ -177,6 +177,9 @@ async function pFilterChunk(chunks, callbackFn, options) {
|
|
|
177
177
|
})
|
|
178
178
|
)
|
|
179
179
|
);
|
|
180
|
+
const predicateResults = settled.map(
|
|
181
|
+
(r) => r.status === "fulfilled" ? r.value : false
|
|
182
|
+
);
|
|
180
183
|
filteredChunks[i] = chunk.filter((_v, i2) => predicateResults[i2]);
|
|
181
184
|
}
|
|
182
185
|
}
|
package/dist/promises.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/promises.ts"],
|
|
4
|
-
"sourcesContent": ["/**\n * @fileoverview Promise utilities including chunked iteration and timers.\n * Provides async control flow helpers and promise-based timing functions.\n */\n\nimport { UNDEFINED_TOKEN } from '#constants/core'\nimport { getAbortSignal } from '#constants/process'\n\nimport { arrayChunk } from './arrays'\n\nconst abortSignal = getAbortSignal()\n\n/**\n * Configuration options for retry behavior with exponential backoff.\n *\n * Controls how failed operations are retried, including timing, backoff strategy,\n * and callback hooks for observing or modifying retry behavior.\n */\nexport interface RetryOptions {\n /**\n * Arguments to pass to the callback function on each attempt.\n *\n * @default []\n */\n args?: unknown[] | undefined\n\n /**\n * Multiplier for exponential backoff (e.g., 2 doubles delay each retry).\n * Each retry waits `baseDelayMs * (backoffFactor ** attemptNumber)`.\n *\n * @default 2\n * @example\n * // With backoffFactor: 2, baseDelayMs: 100\n * // Retry 1: 100ms\n * // Retry 2: 200ms\n * // Retry 3: 400ms\n */\n backoffFactor?: number | undefined\n\n /**\n * Initial delay before the first retry (in milliseconds).\n * This is the base value for exponential backoff calculations.\n *\n * @default 200\n */\n baseDelayMs?: number | undefined\n\n // REMOVED: Deprecated `factor` option\n // Migration: Use `backoffFactor` instead\n\n /**\n * Whether to apply randomness to spread out retries and avoid thundering herd.\n * When `true`, adds random delay between 0 and current delay value.\n *\n * @default true\n * @example\n * // With jitter: true, delay: 100ms\n * // Actual wait: 100ms + random(0-100ms) = 100-200ms\n */\n jitter?: boolean | undefined\n\n /**\n * Upper limit for any backoff delay (in milliseconds).\n * Prevents exponential backoff from growing unbounded.\n *\n * @default 10000\n */\n maxDelayMs?: number | undefined\n\n // REMOVED: Deprecated `maxTimeout` option\n // Migration: Use `maxDelayMs` instead\n\n // REMOVED: Deprecated `minTimeout` option\n // Migration: Use `baseDelayMs` instead\n\n /**\n * Callback invoked on each retry attempt.\n * Can observe errors, customize delays, or cancel retries.\n *\n * @param attempt - The current attempt number (1-based: 1, 2, 3, ...)\n * @param error - The error that triggered this retry\n * @param delay - The calculated delay in milliseconds before next retry\n * @returns `false` to cancel retries (if `onRetryCancelOnFalse` is `true`),\n * a number to override the delay, or `undefined` to use calculated delay\n *\n * @example\n * // Log each retry\n * onRetry: (attempt, error, delay) => {\n * console.log(`Retry ${attempt} after ${delay}ms: ${error}`)\n * }\n *\n * @example\n * // Cancel retries for specific errors\n * onRetry: (attempt, error) => {\n * if (error instanceof ValidationError) return false\n * }\n *\n * @example\n * // Use custom delay\n * onRetry: (attempt) => attempt * 1000 // 1s, 2s, 3s, ...\n */\n onRetry?:\n | ((\n attempt: number,\n error: unknown,\n delay: number,\n ) => boolean | number | undefined)\n | undefined\n\n /**\n * Whether `onRetry` can cancel retries by returning `false`.\n * When `true`, returning `false` from `onRetry` stops retry attempts.\n *\n * @default false\n */\n onRetryCancelOnFalse?: boolean | undefined\n\n /**\n * Whether errors thrown by `onRetry` should propagate.\n * When `true`, exceptions in `onRetry` terminate the retry loop.\n * When `false`, exceptions in `onRetry` are silently caught.\n *\n * @default false\n */\n onRetryRethrow?: boolean | undefined\n\n /**\n * Number of retry attempts (0 = no retries, only initial attempt).\n * The callback is executed `retries + 1` times total (initial + retries).\n *\n * @default 0\n * @example\n * // retries: 0 -> 1 total attempt (no retries)\n * // retries: 3 -> 4 total attempts (1 initial + 3 retries)\n */\n retries?: number | undefined\n\n /**\n * AbortSignal to support cancellation of retry operations.\n * When aborted, immediately stops retrying and returns `undefined`.\n *\n * @default process abort signal\n * @example\n * const controller = new AbortController()\n * pRetry(fn, { signal: controller.signal })\n * // Later: controller.abort() to cancel\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Configuration options for iteration functions with concurrency control.\n *\n * Controls how array operations are parallelized and retried.\n */\nexport interface IterationOptions {\n /**\n * The number of concurrent executions performed at one time.\n * Higher values increase parallelism but may overwhelm resources.\n *\n * @default 1\n * @example\n * // Process 5 items at a time\n * await pEach(items, processItem, { concurrency: 5 })\n */\n concurrency?: number | undefined\n\n /**\n * Retry configuration as a number (retry count) or full options object.\n * Applied to each individual item's callback execution.\n *\n * @default 0 (no retries)\n * @example\n * // Simple: retry each item up to 3 times\n * await pEach(items, fetchItem, { retries: 3 })\n *\n * @example\n * // Advanced: custom backoff for each item\n * await pEach(items, fetchItem, {\n * retries: {\n * retries: 3,\n * baseDelayMs: 1000,\n * backoffFactor: 2\n * }\n * })\n */\n retries?: number | RetryOptions | undefined\n\n /**\n * AbortSignal to support cancellation of the entire iteration.\n * When aborted, stops processing remaining items.\n *\n * @default process abort signal\n */\n signal?: AbortSignal | undefined\n}\n\nlet _timers: typeof import('node:timers/promises') | undefined\n/**\n * Get the timers/promises module.\n * Uses lazy loading to avoid Webpack bundling issues.\n *\n * @private\n * @returns The Node.js timers/promises module\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getTimers() {\n if (_timers === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _timers = /*@__PURE__*/ require('node:timers/promises')\n }\n return _timers as typeof import('node:timers/promises')\n}\n\n/**\n * Normalize options for iteration functions.\n *\n * Converts various option formats into a consistent structure with defaults applied.\n * Handles number shorthand for concurrency and ensures minimum values.\n *\n * @param options - Concurrency as number, or full options object, or undefined\n * @returns Normalized options with concurrency, retries, and signal\n *\n * @example\n * // Number shorthand for concurrency\n * normalizeIterationOptions(5)\n * // => { concurrency: 5, retries: {...}, signal: AbortSignal }\n *\n * @example\n * // Full options\n * normalizeIterationOptions({ concurrency: 3, retries: 2 })\n * // => { concurrency: 3, retries: {...}, signal: AbortSignal }\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function normalizeIterationOptions(\n options?: number | IterationOptions | undefined,\n): { concurrency: number; retries: RetryOptions; signal: AbortSignal } {\n // Handle number as concurrency shorthand\n const opts = typeof options === 'number' ? { concurrency: options } : options\n\n const {\n // The number of concurrent executions performed at one time.\n concurrency = 1,\n // Retries as a number or options object.\n retries,\n // AbortSignal used to support cancellation.\n signal = abortSignal,\n } = { __proto__: null, ...opts } as IterationOptions\n\n // Ensure concurrency is at least 1\n const normalizedConcurrency = Math.max(1, concurrency)\n const retryOpts = resolveRetryOptions(retries)\n return {\n __proto__: null,\n concurrency: normalizedConcurrency,\n retries: normalizeRetryOptions({ signal, ...retryOpts }),\n signal,\n } as { concurrency: number; retries: RetryOptions; signal: AbortSignal }\n}\n\n/**\n * Normalize options for retry functionality.\n *\n * Converts various retry option formats into a complete configuration with all defaults.\n * Handles legacy property names (`factor`, `minTimeout`, `maxTimeout`) and merges them\n * with modern equivalents.\n *\n * @param options - Retry count as number, or full options object, or undefined\n * @returns Normalized retry options with all properties set\n *\n * @example\n * // Number shorthand\n * normalizeRetryOptions(3)\n * // => { retries: 3, baseDelayMs: 200, backoffFactor: 2, ... }\n *\n * @example\n * // Full options with defaults filled in\n * normalizeRetryOptions({ retries: 5, baseDelayMs: 500 })\n * // => { retries: 5, baseDelayMs: 500, backoffFactor: 2, jitter: true, ... }\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function normalizeRetryOptions(\n options?: number | RetryOptions | undefined,\n): RetryOptions {\n const resolved = resolveRetryOptions(options)\n const {\n // Arguments to pass to the callback function.\n args = [],\n // Multiplier for exponential backoff (e.g., 2 doubles delay each retry).\n backoffFactor = 2,\n // Initial delay before the first retry (in milliseconds).\n baseDelayMs = 200,\n // Whether to apply randomness to spread out retries.\n jitter = true,\n // Upper limit for any backoff delay (in milliseconds).\n maxDelayMs = 10_000,\n // Optional callback invoked on each retry attempt:\n // (attempt: number, error: unknown, delay: number) => void\n onRetry,\n // Whether onRetry can cancel retries by returning `false`.\n onRetryCancelOnFalse = false,\n // Whether onRetry will rethrow errors.\n onRetryRethrow = false,\n // Number of retry attempts (0 = no retries, only initial attempt).\n retries = 0,\n // AbortSignal used to support cancellation.\n signal = abortSignal,\n } = resolved\n return {\n args,\n backoffFactor,\n baseDelayMs,\n jitter,\n maxDelayMs,\n onRetry,\n onRetryCancelOnFalse,\n onRetryRethrow,\n retries,\n signal,\n } as RetryOptions\n}\n\n/**\n * Resolve retry options from various input formats.\n *\n * Converts shorthand and partial options into a base configuration that can be\n * further normalized. This is an internal helper for option processing.\n *\n * @param options - Retry count as number, or partial options object, or undefined\n * @returns Resolved retry options with defaults for basic properties\n *\n * @example\n * resolveRetryOptions(3)\n * // => { retries: 3, minTimeout: 200, maxTimeout: 10000, factor: 2 }\n *\n * @example\n * resolveRetryOptions({ retries: 5, maxTimeout: 5000 })\n * // => { retries: 5, minTimeout: 200, maxTimeout: 5000, factor: 2 }\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolveRetryOptions(\n options?: number | RetryOptions | undefined,\n): RetryOptions {\n const defaults = {\n __proto__: null,\n retries: 0,\n baseDelayMs: 200,\n maxDelayMs: 10_000,\n backoffFactor: 2,\n }\n\n if (typeof options === 'number') {\n return { ...defaults, retries: options }\n }\n\n return options ? { ...defaults, ...options } : defaults\n}\n\n/**\n * Execute an async function for each array element with concurrency control.\n *\n * Processes array items in parallel batches (chunks) with configurable concurrency.\n * Each item's callback can be retried independently on failure. Similar to\n * `Promise.all(array.map(fn))` but with controlled parallelism.\n *\n * @template T - The type of array elements\n * @param array - The array to iterate over\n * @param callbackFn - Async function to execute for each item\n * @param options - Concurrency as number, or full iteration options, or undefined\n * @returns Promise that resolves when all items are processed\n *\n * @example\n * // Process items serially (concurrency: 1)\n * await pEach(urls, async (url) => {\n * await fetch(url)\n * })\n *\n * @example\n * // Process 5 items at a time\n * await pEach(files, async (file) => {\n * await processFile(file)\n * }, 5)\n *\n * @example\n * // With retries and cancellation\n * const controller = new AbortController()\n * await pEach(tasks, async (task) => {\n * await executeTask(task)\n * }, {\n * concurrency: 3,\n * retries: 2,\n * signal: controller.signal\n * })\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pEach<T>(\n array: T[],\n callbackFn: (item: T) => Promise<unknown>,\n options?: number | IterationOptions | undefined,\n): Promise<void> {\n const iterOpts = normalizeIterationOptions(options)\n const { concurrency, retries, signal } = iterOpts\n\n // Process items with concurrency control.\n const chunks = arrayChunk(array, concurrency)\n for (const chunk of chunks) {\n if (signal?.aborted) {\n return\n }\n // Process each item in the chunk concurrently.\n // eslint-disable-next-line no-await-in-loop\n await Promise.all(\n chunk.map((item: T) =>\n pRetry((...args: unknown[]) => callbackFn(args[0] as T), {\n ...retries,\n args: [item],\n signal,\n }),\n ),\n )\n }\n}\n\n/**\n * Filter an array asynchronously with concurrency control.\n *\n * Tests each element with an async predicate function, processing items in parallel\n * batches. Returns a new array with only items that pass the test. Similar to\n * `array.filter()` but for async predicates with controlled concurrency.\n *\n * @template T - The type of array elements\n * @param array - The array to filter\n * @param callbackFn - Async predicate function returning true to keep item\n * @param options - Concurrency as number, or full iteration options, or undefined\n * @returns Promise resolving to filtered array\n *\n * @example\n * // Filter serially\n * const activeUsers = await pFilter(users, async (user) => {\n * return await isUserActive(user.id)\n * })\n *\n * @example\n * // Filter with concurrency\n * const validFiles = await pFilter(filePaths, async (path) => {\n * try {\n * await fs.access(path)\n * return true\n * } catch {\n * return false\n * }\n * }, 10)\n *\n * @example\n * // With retries for flaky checks\n * const reachable = await pFilter(endpoints, async (url) => {\n * const response = await fetch(url)\n * return response.ok\n * }, {\n * concurrency: 5,\n * retries: 2\n * })\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pFilter<T>(\n array: T[],\n callbackFn: (item: T) => Promise<boolean>,\n options?: number | IterationOptions | undefined,\n): Promise<T[]> {\n const iterOpts = normalizeIterationOptions(options)\n return (\n await pFilterChunk(\n arrayChunk(array, iterOpts.concurrency),\n callbackFn,\n iterOpts.retries,\n )\n ).flat()\n}\n\n/**\n * Process array in chunks with an async callback.\n *\n * Divides the array into fixed-size chunks and processes each chunk sequentially\n * with the callback. Useful for batch operations like bulk database inserts or\n * API calls with payload size limits.\n *\n * @template T - The type of array elements\n * @param array - The array to process in chunks\n * @param callbackFn - Async function to execute for each chunk\n * @param options - Chunk size and retry options\n * @returns Promise that resolves when all chunks are processed\n *\n * @example\n * // Insert records in batches of 100\n * await pEachChunk(records, async (chunk) => {\n * await db.batchInsert(chunk)\n * }, { chunkSize: 100 })\n *\n * @example\n * // Upload files in batches with retries\n * await pEachChunk(files, async (batch) => {\n * await uploadBatch(batch)\n * }, {\n * chunkSize: 50,\n * retries: 3,\n * baseDelayMs: 1000\n * })\n *\n * @example\n * // Process with cancellation support\n * const controller = new AbortController()\n * await pEachChunk(items, async (chunk) => {\n * await processChunk(chunk)\n * }, {\n * chunkSize: 25,\n * signal: controller.signal\n * })\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pEachChunk<T>(\n array: T[],\n callbackFn: (chunk: T[]) => Promise<unknown>,\n options?: (RetryOptions & { chunkSize?: number | undefined }) | undefined,\n): Promise<void> {\n const { chunkSize = 100, ...retryOpts } = options || {}\n const chunks = arrayChunk(array, chunkSize)\n const normalizedRetryOpts = normalizeRetryOptions(retryOpts)\n const { signal } = normalizedRetryOpts\n for (const chunk of chunks) {\n if (signal?.aborted) {\n return\n }\n // eslint-disable-next-line no-await-in-loop\n await pRetry((...args: unknown[]) => callbackFn(args[0] as T[]), {\n ...normalizedRetryOpts,\n args: [chunk],\n })\n }\n}\n\n/**\n * Filter chunked arrays with an async predicate.\n *\n * Internal helper for `pFilter`. Processes pre-chunked arrays, applying the\n * predicate to each element within each chunk with retry support.\n *\n * @template T - The type of array elements\n * @param chunks - Pre-chunked array (array of arrays)\n * @param callbackFn - Async predicate function\n * @param options - Retry count as number, or full retry options, or undefined\n * @returns Promise resolving to array of filtered chunks\n *\n * @example\n * const chunks = [[1, 2], [3, 4], [5, 6]]\n * const filtered = await pFilterChunk(chunks, async (n) => n % 2 === 0)\n * // => [[2], [4], [6]]\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pFilterChunk<T>(\n chunks: T[][],\n callbackFn: (value: T) => Promise<boolean>,\n options?: number | RetryOptions | undefined,\n): Promise<T[][]> {\n const retryOpts = normalizeRetryOptions(options)\n const { signal } = retryOpts\n const { length } = chunks\n const filteredChunks = Array(length)\n for (let i = 0; i < length; i += 1) {\n // Process each chunk, filtering based on the callback function.\n if (signal?.aborted) {\n filteredChunks[i] = []\n } else {\n const chunk = chunks[i] as T[]\n // eslint-disable-next-line no-await-in-loop\n const predicateResults = await Promise.all(\n chunk.map(value =>\n pRetry((...args: unknown[]) => callbackFn(args[0] as T), {\n ...retryOpts,\n args: [value],\n }),\n ),\n )\n filteredChunks[i] = chunk.filter((_v, i) => predicateResults[i])\n }\n }\n return filteredChunks\n}\n\n/**\n * Retry an async function with exponential backoff.\n *\n * Attempts to execute a function multiple times with increasing delays between attempts.\n * Implements exponential backoff with optional jitter to prevent thundering herd problems.\n * Supports custom retry logic via `onRetry` callback.\n *\n * The delay calculation follows: `min(baseDelayMs * (backoffFactor ** attempt), maxDelayMs)`\n * With jitter: adds random value between 0 and calculated delay.\n *\n * @template T - The return type of the callback function\n * @param callbackFn - Async function to retry\n * @param options - Retry count as number, or full retry options, or undefined\n * @returns Promise resolving to callback result, or `undefined` if aborted\n *\n * @throws {Error} The last error if all retry attempts fail\n *\n * @example\n * // Simple retry: 3 attempts with default backoff\n * const data = await pRetry(async () => {\n * return await fetchData()\n * }, 3)\n *\n * @example\n * // Custom backoff strategy\n * const result = await pRetry(async () => {\n * return await unreliableOperation()\n * }, {\n * retries: 5,\n * baseDelayMs: 1000, // Start at 1 second\n * backoffFactor: 2, // Double each time\n * maxDelayMs: 30000, // Cap at 30 seconds\n * jitter: true // Add randomness\n * })\n * // Delays: ~1s, ~2s, ~4s, ~8s, ~16s (each \u00B1 random jitter)\n *\n * @example\n * // With custom retry logic\n * const data = await pRetry(async () => {\n * return await apiCall()\n * }, {\n * retries: 3,\n * onRetry: (attempt, error, delay) => {\n * console.log(`Attempt ${attempt} failed: ${error}`)\n * console.log(`Waiting ${delay}ms before retry...`)\n *\n * // Cancel retries for client errors (4xx)\n * if (error.statusCode >= 400 && error.statusCode < 500) {\n * return false\n * }\n *\n * // Use longer delay for rate limit errors\n * if (error.statusCode === 429) {\n * return 60000 // Wait 1 minute\n * }\n * },\n * onRetryCancelOnFalse: true\n * })\n *\n * @example\n * // With cancellation support\n * const controller = new AbortController()\n * setTimeout(() => controller.abort(), 5000) // Cancel after 5s\n *\n * const result = await pRetry(async ({ signal }) => {\n * return await longRunningTask(signal)\n * }, {\n * retries: 10,\n * signal: controller.signal\n * })\n * // Returns undefined if aborted\n *\n * @example\n * // Pass arguments to callback\n * const result = await pRetry(\n * async (url, options) => {\n * return await fetch(url, options)\n * },\n * {\n * retries: 3,\n * args: ['https://api.example.com', { method: 'POST' }]\n * }\n * )\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pRetry<T>(\n callbackFn: (...args: unknown[]) => Promise<T>,\n options?: number | RetryOptions | undefined,\n): Promise<T | undefined> {\n const {\n args,\n backoffFactor,\n baseDelayMs,\n jitter,\n maxDelayMs,\n onRetry,\n onRetryCancelOnFalse,\n onRetryRethrow,\n retries,\n signal,\n } = normalizeRetryOptions(options)\n if (signal?.aborted) {\n return undefined\n }\n if (retries === 0) {\n return await callbackFn(...(args || []), { signal })\n }\n\n const timers = getTimers()\n\n let attempts = retries as number\n let delay = baseDelayMs as number\n let error: unknown = UNDEFINED_TOKEN\n\n while (attempts-- >= 0) {\n // Check abort before attempt.\n if (signal?.aborted) {\n return undefined\n }\n\n try {\n // eslint-disable-next-line no-await-in-loop\n return await callbackFn(...(args || []), { signal })\n } catch (e) {\n if (error === UNDEFINED_TOKEN) {\n error = e\n }\n if (attempts < 0) {\n break\n }\n let waitTime = delay\n if (jitter) {\n // Add randomness: Pick a value between 0 and `delay`.\n waitTime += Math.floor(Math.random() * delay)\n }\n // Clamp wait time to max delay.\n waitTime = Math.min(waitTime, maxDelayMs as number)\n if (typeof onRetry === 'function') {\n try {\n const result = onRetry((retries as number) - attempts, e, waitTime)\n if (result === false && onRetryCancelOnFalse) {\n break\n }\n // If onRetry returns a number, use it as the custom delay.\n if (typeof result === 'number' && result >= 0) {\n waitTime = Math.min(result, maxDelayMs as number)\n }\n } catch (e) {\n if (onRetryRethrow) {\n throw e\n }\n }\n }\n\n try {\n // eslint-disable-next-line no-await-in-loop\n await timers.setTimeout(waitTime, undefined, { signal })\n } catch {\n // setTimeout was aborted.\n return undefined\n }\n\n // Check abort again after delay.\n if (signal?.aborted) {\n return undefined\n }\n\n // Exponentially increase the delay for the next attempt, capping at maxDelayMs.\n delay = Math.min(delay * (backoffFactor as number), maxDelayMs as number)\n }\n }\n if (error !== UNDEFINED_TOKEN) {\n throw error\n }\n return undefined\n}\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,kBAAgC;AAChC,qBAA+B;AAE/B,oBAA2B;AAE3B,MAAM,kBAAc,+BAAe;AA2LnC,IAAI;AAAA;AASJ,SAAS,YAAY;AACnB,MAAI,YAAY,QAAW;AAGzB,cAAwB,QAAQ,sBAAsB;AAAA,EACxD;AACA,SAAO;AACT;AAAA;AAsBO,SAAS,0BACd,SACqE;AAErE,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,aAAa,QAAQ,IAAI;AAEtE,QAAM;AAAA;AAAA,IAEJ,cAAc;AAAA;AAAA,IAEd;AAAA;AAAA,IAEA,SAAS;AAAA,EACX,IAAI,EAAE,WAAW,MAAM,GAAG,KAAK;AAG/B,QAAM,wBAAwB,KAAK,IAAI,GAAG,WAAW;AACrD,QAAM,YAAY,oCAAoB,OAAO;AAC7C,SAAO;AAAA,IACL,WAAW;AAAA,IACX,aAAa;AAAA,IACb,SAAS,sCAAsB,EAAE,QAAQ,GAAG,UAAU,CAAC;AAAA,IACvD;AAAA,EACF;AACF;AAAA;AAuBO,SAAS,sBACd,SACc;AACd,QAAM,WAAW,oCAAoB,OAAO;AAC5C,QAAM;AAAA;AAAA,IAEJ,OAAO,CAAC;AAAA;AAAA,IAER,gBAAgB;AAAA;AAAA,IAEhB,cAAc;AAAA;AAAA,IAEd,SAAS;AAAA;AAAA,IAET,aAAa;AAAA;AAAA;AAAA,IAGb;AAAA;AAAA,IAEA,uBAAuB;AAAA;AAAA,IAEvB,iBAAiB;AAAA;AAAA,IAEjB,UAAU;AAAA;AAAA,IAEV,SAAS;AAAA,EACX,IAAI;AACJ,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAAA;AAoBO,SAAS,oBACd,SACc;AACd,QAAM,WAAW;AAAA,IACf,WAAW;AAAA,IACX,SAAS;AAAA,IACT,aAAa;AAAA,IACb,YAAY;AAAA,IACZ,eAAe;AAAA,EACjB;AAEA,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO,EAAE,GAAG,UAAU,SAAS,QAAQ;AAAA,EACzC;AAEA,SAAO,UAAU,EAAE,GAAG,UAAU,GAAG,QAAQ,IAAI;AACjD;AAAA;AAuCA,eAAsB,MACpB,OACA,YACA,SACe;AACf,QAAM,WAAW,0CAA0B,OAAO;AAClD,QAAM,EAAE,aAAa,SAAS,OAAO,IAAI;AAGzC,QAAM,aAAS,0BAAW,OAAO,WAAW;AAC5C,aAAW,SAAS,QAAQ;AAC1B,QAAI,QAAQ,SAAS;AACnB;AAAA,IACF;AAGA,UAAM,QAAQ;AAAA,MACZ,MAAM;AAAA,QAAI,CAAC,SACT,uBAAO,IAAI,SAAoB,WAAW,KAAK,CAAC,CAAM,GAAG;AAAA,UACvD,GAAG;AAAA,UACH,MAAM,CAAC,IAAI;AAAA,UACX;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAAA;AA2CA,eAAsB,QACpB,OACA,YACA,SACc;AACd,QAAM,WAAW,0CAA0B,OAAO;AAClD,UACE,MAAM;AAAA,QACJ,0BAAW,OAAO,SAAS,WAAW;AAAA,IACtC;AAAA,IACA,SAAS;AAAA,EACX,GACA,KAAK;AACT;AAAA;AA0CA,eAAsB,WACpB,OACA,YACA,SACe;AACf,QAAM,EAAE,YAAY,KAAK,GAAG,UAAU,IAAI,WAAW,CAAC;AACtD,QAAM,aAAS,0BAAW,OAAO,SAAS;AAC1C,QAAM,sBAAsB,sCAAsB,SAAS;AAC3D,QAAM,EAAE,OAAO,IAAI;AACnB,aAAW,SAAS,QAAQ;AAC1B,QAAI,QAAQ,SAAS;AACnB;AAAA,IACF;AAEA,UAAM,uBAAO,IAAI,SAAoB,WAAW,KAAK,CAAC,CAAQ,GAAG;AAAA,MAC/D,GAAG;AAAA,MACH,MAAM,CAAC,KAAK;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAAA;AAoBA,eAAsB,aACpB,QACA,YACA,SACgB;AAChB,QAAM,YAAY,sCAAsB,OAAO;AAC/C,QAAM,EAAE,OAAO,IAAI;AACnB,QAAM,EAAE,OAAO,IAAI;AACnB,QAAM,iBAAiB,MAAM,MAAM;AACnC,WAAS,IAAI,GAAG,IAAI,QAAQ,KAAK,GAAG;AAElC,QAAI,QAAQ,SAAS;AACnB,qBAAe,CAAC,IAAI,CAAC;AAAA,IACvB,OAAO;AACL,YAAM,QAAQ,OAAO,CAAC;AAEtB,YAAM,
|
|
4
|
+
"sourcesContent": ["/**\n * @fileoverview Promise utilities including chunked iteration and timers.\n * Provides async control flow helpers and promise-based timing functions.\n */\n\nimport { UNDEFINED_TOKEN } from '#constants/core'\nimport { getAbortSignal } from '#constants/process'\n\nimport { arrayChunk } from './arrays'\n\nconst abortSignal = getAbortSignal()\n\n/**\n * Configuration options for retry behavior with exponential backoff.\n *\n * Controls how failed operations are retried, including timing, backoff strategy,\n * and callback hooks for observing or modifying retry behavior.\n */\nexport interface RetryOptions {\n /**\n * Arguments to pass to the callback function on each attempt.\n *\n * @default []\n */\n args?: unknown[] | undefined\n\n /**\n * Multiplier for exponential backoff (e.g., 2 doubles delay each retry).\n * Each retry waits `baseDelayMs * (backoffFactor ** attemptNumber)`.\n *\n * @default 2\n * @example\n * // With backoffFactor: 2, baseDelayMs: 100\n * // Retry 1: 100ms\n * // Retry 2: 200ms\n * // Retry 3: 400ms\n */\n backoffFactor?: number | undefined\n\n /**\n * Initial delay before the first retry (in milliseconds).\n * This is the base value for exponential backoff calculations.\n *\n * @default 200\n */\n baseDelayMs?: number | undefined\n\n // REMOVED: Deprecated `factor` option\n // Migration: Use `backoffFactor` instead\n\n /**\n * Whether to apply randomness to spread out retries and avoid thundering herd.\n * When `true`, adds random delay between 0 and current delay value.\n *\n * @default true\n * @example\n * // With jitter: true, delay: 100ms\n * // Actual wait: 100ms + random(0-100ms) = 100-200ms\n */\n jitter?: boolean | undefined\n\n /**\n * Upper limit for any backoff delay (in milliseconds).\n * Prevents exponential backoff from growing unbounded.\n *\n * @default 10000\n */\n maxDelayMs?: number | undefined\n\n // REMOVED: Deprecated `maxTimeout` option\n // Migration: Use `maxDelayMs` instead\n\n // REMOVED: Deprecated `minTimeout` option\n // Migration: Use `baseDelayMs` instead\n\n /**\n * Callback invoked on each retry attempt.\n * Can observe errors, customize delays, or cancel retries.\n *\n * @param attempt - The current attempt number (1-based: 1, 2, 3, ...)\n * @param error - The error that triggered this retry\n * @param delay - The calculated delay in milliseconds before next retry\n * @returns `false` to cancel retries (if `onRetryCancelOnFalse` is `true`),\n * a number to override the delay, or `undefined` to use calculated delay\n *\n * @example\n * // Log each retry\n * onRetry: (attempt, error, delay) => {\n * console.log(`Retry ${attempt} after ${delay}ms: ${error}`)\n * }\n *\n * @example\n * // Cancel retries for specific errors\n * onRetry: (attempt, error) => {\n * if (error instanceof ValidationError) return false\n * }\n *\n * @example\n * // Use custom delay\n * onRetry: (attempt) => attempt * 1000 // 1s, 2s, 3s, ...\n */\n onRetry?:\n | ((\n attempt: number,\n error: unknown,\n delay: number,\n ) => boolean | number | undefined)\n | undefined\n\n /**\n * Whether `onRetry` can cancel retries by returning `false`.\n * When `true`, returning `false` from `onRetry` stops retry attempts.\n *\n * @default false\n */\n onRetryCancelOnFalse?: boolean | undefined\n\n /**\n * Whether errors thrown by `onRetry` should propagate.\n * When `true`, exceptions in `onRetry` terminate the retry loop.\n * When `false`, exceptions in `onRetry` are silently caught.\n *\n * @default false\n */\n onRetryRethrow?: boolean | undefined\n\n /**\n * Number of retry attempts (0 = no retries, only initial attempt).\n * The callback is executed `retries + 1` times total (initial + retries).\n *\n * @default 0\n * @example\n * // retries: 0 -> 1 total attempt (no retries)\n * // retries: 3 -> 4 total attempts (1 initial + 3 retries)\n */\n retries?: number | undefined\n\n /**\n * AbortSignal to support cancellation of retry operations.\n * When aborted, immediately stops retrying and returns `undefined`.\n *\n * @default process abort signal\n * @example\n * const controller = new AbortController()\n * pRetry(fn, { signal: controller.signal })\n * // Later: controller.abort() to cancel\n */\n signal?: AbortSignal | undefined\n}\n\n/**\n * Configuration options for iteration functions with concurrency control.\n *\n * Controls how array operations are parallelized and retried.\n */\nexport interface IterationOptions {\n /**\n * The number of concurrent executions performed at one time.\n * Higher values increase parallelism but may overwhelm resources.\n *\n * @default 1\n * @example\n * // Process 5 items at a time\n * await pEach(items, processItem, { concurrency: 5 })\n */\n concurrency?: number | undefined\n\n /**\n * Retry configuration as a number (retry count) or full options object.\n * Applied to each individual item's callback execution.\n *\n * @default 0 (no retries)\n * @example\n * // Simple: retry each item up to 3 times\n * await pEach(items, fetchItem, { retries: 3 })\n *\n * @example\n * // Advanced: custom backoff for each item\n * await pEach(items, fetchItem, {\n * retries: {\n * retries: 3,\n * baseDelayMs: 1000,\n * backoffFactor: 2\n * }\n * })\n */\n retries?: number | RetryOptions | undefined\n\n /**\n * AbortSignal to support cancellation of the entire iteration.\n * When aborted, stops processing remaining items.\n *\n * @default process abort signal\n */\n signal?: AbortSignal | undefined\n}\n\nlet _timers: typeof import('node:timers/promises') | undefined\n/**\n * Get the timers/promises module.\n * Uses lazy loading to avoid Webpack bundling issues.\n *\n * @private\n * @returns The Node.js timers/promises module\n */\n/*@__NO_SIDE_EFFECTS__*/\nfunction getTimers() {\n if (_timers === undefined) {\n // Use non-'node:' prefixed require to avoid Webpack errors.\n\n _timers = /*@__PURE__*/ require('node:timers/promises')\n }\n return _timers as typeof import('node:timers/promises')\n}\n\n/**\n * Normalize options for iteration functions.\n *\n * Converts various option formats into a consistent structure with defaults applied.\n * Handles number shorthand for concurrency and ensures minimum values.\n *\n * @param options - Concurrency as number, or full options object, or undefined\n * @returns Normalized options with concurrency, retries, and signal\n *\n * @example\n * // Number shorthand for concurrency\n * normalizeIterationOptions(5)\n * // => { concurrency: 5, retries: {...}, signal: AbortSignal }\n *\n * @example\n * // Full options\n * normalizeIterationOptions({ concurrency: 3, retries: 2 })\n * // => { concurrency: 3, retries: {...}, signal: AbortSignal }\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function normalizeIterationOptions(\n options?: number | IterationOptions | undefined,\n): { concurrency: number; retries: RetryOptions; signal: AbortSignal } {\n // Handle number as concurrency shorthand\n const opts = typeof options === 'number' ? { concurrency: options } : options\n\n const {\n // The number of concurrent executions performed at one time.\n concurrency = 1,\n // Retries as a number or options object.\n retries,\n // AbortSignal used to support cancellation.\n signal = abortSignal,\n } = { __proto__: null, ...opts } as IterationOptions\n\n // Ensure concurrency is at least 1\n const normalizedConcurrency = Math.max(1, concurrency)\n const retryOpts = resolveRetryOptions(retries)\n return {\n __proto__: null,\n concurrency: normalizedConcurrency,\n retries: normalizeRetryOptions({ signal, ...retryOpts }),\n signal,\n } as { concurrency: number; retries: RetryOptions; signal: AbortSignal }\n}\n\n/**\n * Normalize options for retry functionality.\n *\n * Converts various retry option formats into a complete configuration with all defaults.\n * Handles legacy property names (`factor`, `minTimeout`, `maxTimeout`) and merges them\n * with modern equivalents.\n *\n * @param options - Retry count as number, or full options object, or undefined\n * @returns Normalized retry options with all properties set\n *\n * @example\n * // Number shorthand\n * normalizeRetryOptions(3)\n * // => { retries: 3, baseDelayMs: 200, backoffFactor: 2, ... }\n *\n * @example\n * // Full options with defaults filled in\n * normalizeRetryOptions({ retries: 5, baseDelayMs: 500 })\n * // => { retries: 5, baseDelayMs: 500, backoffFactor: 2, jitter: true, ... }\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function normalizeRetryOptions(\n options?: number | RetryOptions | undefined,\n): RetryOptions {\n const resolved = resolveRetryOptions(options)\n const {\n // Arguments to pass to the callback function.\n args = [],\n // Multiplier for exponential backoff (e.g., 2 doubles delay each retry).\n backoffFactor = 2,\n // Initial delay before the first retry (in milliseconds).\n baseDelayMs = 200,\n // Whether to apply randomness to spread out retries.\n jitter = true,\n // Upper limit for any backoff delay (in milliseconds).\n maxDelayMs = 10_000,\n // Optional callback invoked on each retry attempt:\n // (attempt: number, error: unknown, delay: number) => void\n onRetry,\n // Whether onRetry can cancel retries by returning `false`.\n onRetryCancelOnFalse = false,\n // Whether onRetry will rethrow errors.\n onRetryRethrow = false,\n // Number of retry attempts (0 = no retries, only initial attempt).\n retries = 0,\n // AbortSignal used to support cancellation.\n signal = abortSignal,\n } = resolved\n return {\n args,\n backoffFactor,\n baseDelayMs,\n jitter,\n maxDelayMs,\n onRetry,\n onRetryCancelOnFalse,\n onRetryRethrow,\n retries,\n signal,\n } as RetryOptions\n}\n\n/**\n * Resolve retry options from various input formats.\n *\n * Converts shorthand and partial options into a base configuration that can be\n * further normalized. This is an internal helper for option processing.\n *\n * @param options - Retry count as number, or partial options object, or undefined\n * @returns Resolved retry options with defaults for basic properties\n *\n * @example\n * resolveRetryOptions(3)\n * // => { retries: 3, minTimeout: 200, maxTimeout: 10000, factor: 2 }\n *\n * @example\n * resolveRetryOptions({ retries: 5, maxTimeout: 5000 })\n * // => { retries: 5, minTimeout: 200, maxTimeout: 5000, factor: 2 }\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport function resolveRetryOptions(\n options?: number | RetryOptions | undefined,\n): RetryOptions {\n const defaults = {\n __proto__: null,\n retries: 0,\n baseDelayMs: 200,\n maxDelayMs: 10_000,\n backoffFactor: 2,\n }\n\n if (typeof options === 'number') {\n return { ...defaults, retries: options }\n }\n\n return options ? { ...defaults, ...options } : defaults\n}\n\n/**\n * Execute an async function for each array element with concurrency control.\n *\n * Processes array items in parallel batches (chunks) with configurable concurrency.\n * Each item's callback can be retried independently on failure. Similar to\n * `Promise.all(array.map(fn))` but with controlled parallelism.\n *\n * @template T - The type of array elements\n * @param array - The array to iterate over\n * @param callbackFn - Async function to execute for each item\n * @param options - Concurrency as number, or full iteration options, or undefined\n * @returns Promise that resolves when all items are processed\n *\n * @example\n * // Process items serially (concurrency: 1)\n * await pEach(urls, async (url) => {\n * await fetch(url)\n * })\n *\n * @example\n * // Process 5 items at a time\n * await pEach(files, async (file) => {\n * await processFile(file)\n * }, 5)\n *\n * @example\n * // With retries and cancellation\n * const controller = new AbortController()\n * await pEach(tasks, async (task) => {\n * await executeTask(task)\n * }, {\n * concurrency: 3,\n * retries: 2,\n * signal: controller.signal\n * })\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pEach<T>(\n array: T[],\n callbackFn: (item: T) => Promise<unknown>,\n options?: number | IterationOptions | undefined,\n): Promise<void> {\n const iterOpts = normalizeIterationOptions(options)\n const { concurrency, retries, signal } = iterOpts\n\n // Process items with concurrency control.\n const chunks = arrayChunk(array, concurrency)\n for (const chunk of chunks) {\n if (signal?.aborted) {\n return\n }\n // Process each item in the chunk concurrently.\n // eslint-disable-next-line no-await-in-loop\n await Promise.allSettled(\n chunk.map((item: T) =>\n pRetry((...args: unknown[]) => callbackFn(args[0] as T), {\n ...retries,\n args: [item],\n signal,\n }),\n ),\n )\n }\n}\n\n/**\n * Filter an array asynchronously with concurrency control.\n *\n * Tests each element with an async predicate function, processing items in parallel\n * batches. Returns a new array with only items that pass the test. Similar to\n * `array.filter()` but for async predicates with controlled concurrency.\n *\n * @template T - The type of array elements\n * @param array - The array to filter\n * @param callbackFn - Async predicate function returning true to keep item\n * @param options - Concurrency as number, or full iteration options, or undefined\n * @returns Promise resolving to filtered array\n *\n * @example\n * // Filter serially\n * const activeUsers = await pFilter(users, async (user) => {\n * return await isUserActive(user.id)\n * })\n *\n * @example\n * // Filter with concurrency\n * const validFiles = await pFilter(filePaths, async (path) => {\n * try {\n * await fs.access(path)\n * return true\n * } catch {\n * return false\n * }\n * }, 10)\n *\n * @example\n * // With retries for flaky checks\n * const reachable = await pFilter(endpoints, async (url) => {\n * const response = await fetch(url)\n * return response.ok\n * }, {\n * concurrency: 5,\n * retries: 2\n * })\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pFilter<T>(\n array: T[],\n callbackFn: (item: T) => Promise<boolean>,\n options?: number | IterationOptions | undefined,\n): Promise<T[]> {\n const iterOpts = normalizeIterationOptions(options)\n return (\n await pFilterChunk(\n arrayChunk(array, iterOpts.concurrency),\n callbackFn,\n iterOpts.retries,\n )\n ).flat()\n}\n\n/**\n * Process array in chunks with an async callback.\n *\n * Divides the array into fixed-size chunks and processes each chunk sequentially\n * with the callback. Useful for batch operations like bulk database inserts or\n * API calls with payload size limits.\n *\n * @template T - The type of array elements\n * @param array - The array to process in chunks\n * @param callbackFn - Async function to execute for each chunk\n * @param options - Chunk size and retry options\n * @returns Promise that resolves when all chunks are processed\n *\n * @example\n * // Insert records in batches of 100\n * await pEachChunk(records, async (chunk) => {\n * await db.batchInsert(chunk)\n * }, { chunkSize: 100 })\n *\n * @example\n * // Upload files in batches with retries\n * await pEachChunk(files, async (batch) => {\n * await uploadBatch(batch)\n * }, {\n * chunkSize: 50,\n * retries: 3,\n * baseDelayMs: 1000\n * })\n *\n * @example\n * // Process with cancellation support\n * const controller = new AbortController()\n * await pEachChunk(items, async (chunk) => {\n * await processChunk(chunk)\n * }, {\n * chunkSize: 25,\n * signal: controller.signal\n * })\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pEachChunk<T>(\n array: T[],\n callbackFn: (chunk: T[]) => Promise<unknown>,\n options?: (RetryOptions & { chunkSize?: number | undefined }) | undefined,\n): Promise<void> {\n const { chunkSize = 100, ...retryOpts } = options || {}\n const chunks = arrayChunk(array, chunkSize)\n const normalizedRetryOpts = normalizeRetryOptions(retryOpts)\n const { signal } = normalizedRetryOpts\n for (const chunk of chunks) {\n if (signal?.aborted) {\n return\n }\n // eslint-disable-next-line no-await-in-loop\n await pRetry((...args: unknown[]) => callbackFn(args[0] as T[]), {\n ...normalizedRetryOpts,\n args: [chunk],\n })\n }\n}\n\n/**\n * Filter chunked arrays with an async predicate.\n *\n * Internal helper for `pFilter`. Processes pre-chunked arrays, applying the\n * predicate to each element within each chunk with retry support.\n *\n * @template T - The type of array elements\n * @param chunks - Pre-chunked array (array of arrays)\n * @param callbackFn - Async predicate function\n * @param options - Retry count as number, or full retry options, or undefined\n * @returns Promise resolving to array of filtered chunks\n *\n * @example\n * const chunks = [[1, 2], [3, 4], [5, 6]]\n * const filtered = await pFilterChunk(chunks, async (n) => n % 2 === 0)\n * // => [[2], [4], [6]]\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pFilterChunk<T>(\n chunks: T[][],\n callbackFn: (value: T) => Promise<boolean>,\n options?: number | RetryOptions | undefined,\n): Promise<T[][]> {\n const retryOpts = normalizeRetryOptions(options)\n const { signal } = retryOpts\n const { length } = chunks\n const filteredChunks = Array(length)\n for (let i = 0; i < length; i += 1) {\n // Process each chunk, filtering based on the callback function.\n if (signal?.aborted) {\n filteredChunks[i] = []\n } else {\n const chunk = chunks[i] as T[]\n // eslint-disable-next-line no-await-in-loop\n const settled = await Promise.allSettled(\n chunk.map(value =>\n pRetry((...args: unknown[]) => callbackFn(args[0] as T), {\n ...retryOpts,\n args: [value],\n }),\n ),\n )\n const predicateResults = settled.map(r =>\n r.status === 'fulfilled' ? r.value : false,\n )\n filteredChunks[i] = chunk.filter((_v, i) => predicateResults[i])\n }\n }\n return filteredChunks\n}\n\n/**\n * Retry an async function with exponential backoff.\n *\n * Attempts to execute a function multiple times with increasing delays between attempts.\n * Implements exponential backoff with optional jitter to prevent thundering herd problems.\n * Supports custom retry logic via `onRetry` callback.\n *\n * The delay calculation follows: `min(baseDelayMs * (backoffFactor ** attempt), maxDelayMs)`\n * With jitter: adds random value between 0 and calculated delay.\n *\n * @template T - The return type of the callback function\n * @param callbackFn - Async function to retry\n * @param options - Retry count as number, or full retry options, or undefined\n * @returns Promise resolving to callback result, or `undefined` if aborted\n *\n * @throws {Error} The last error if all retry attempts fail\n *\n * @example\n * // Simple retry: 3 attempts with default backoff\n * const data = await pRetry(async () => {\n * return await fetchData()\n * }, 3)\n *\n * @example\n * // Custom backoff strategy\n * const result = await pRetry(async () => {\n * return await unreliableOperation()\n * }, {\n * retries: 5,\n * baseDelayMs: 1000, // Start at 1 second\n * backoffFactor: 2, // Double each time\n * maxDelayMs: 30000, // Cap at 30 seconds\n * jitter: true // Add randomness\n * })\n * // Delays: ~1s, ~2s, ~4s, ~8s, ~16s (each \u00B1 random jitter)\n *\n * @example\n * // With custom retry logic\n * const data = await pRetry(async () => {\n * return await apiCall()\n * }, {\n * retries: 3,\n * onRetry: (attempt, error, delay) => {\n * console.log(`Attempt ${attempt} failed: ${error}`)\n * console.log(`Waiting ${delay}ms before retry...`)\n *\n * // Cancel retries for client errors (4xx)\n * if (error.statusCode >= 400 && error.statusCode < 500) {\n * return false\n * }\n *\n * // Use longer delay for rate limit errors\n * if (error.statusCode === 429) {\n * return 60000 // Wait 1 minute\n * }\n * },\n * onRetryCancelOnFalse: true\n * })\n *\n * @example\n * // With cancellation support\n * const controller = new AbortController()\n * setTimeout(() => controller.abort(), 5000) // Cancel after 5s\n *\n * const result = await pRetry(async ({ signal }) => {\n * return await longRunningTask(signal)\n * }, {\n * retries: 10,\n * signal: controller.signal\n * })\n * // Returns undefined if aborted\n *\n * @example\n * // Pass arguments to callback\n * const result = await pRetry(\n * async (url, options) => {\n * return await fetch(url, options)\n * },\n * {\n * retries: 3,\n * args: ['https://api.example.com', { method: 'POST' }]\n * }\n * )\n */\n/*@__NO_SIDE_EFFECTS__*/\nexport async function pRetry<T>(\n callbackFn: (...args: unknown[]) => Promise<T>,\n options?: number | RetryOptions | undefined,\n): Promise<T | undefined> {\n const {\n args,\n backoffFactor,\n baseDelayMs,\n jitter,\n maxDelayMs,\n onRetry,\n onRetryCancelOnFalse,\n onRetryRethrow,\n retries,\n signal,\n } = normalizeRetryOptions(options)\n if (signal?.aborted) {\n return undefined\n }\n if (retries === 0) {\n return await callbackFn(...(args || []), { signal })\n }\n\n const timers = getTimers()\n\n let attempts = retries as number\n let delay = baseDelayMs as number\n let error: unknown = UNDEFINED_TOKEN\n\n while (attempts-- >= 0) {\n // Check abort before attempt.\n if (signal?.aborted) {\n return undefined\n }\n\n try {\n // eslint-disable-next-line no-await-in-loop\n return await callbackFn(...(args || []), { signal })\n } catch (e) {\n if (error === UNDEFINED_TOKEN) {\n error = e\n }\n if (attempts < 0) {\n break\n }\n let waitTime = delay\n if (jitter) {\n // Add randomness: Pick a value between 0 and `delay`.\n waitTime += Math.floor(Math.random() * delay)\n }\n // Clamp wait time to max delay.\n waitTime = Math.min(waitTime, maxDelayMs as number)\n if (typeof onRetry === 'function') {\n try {\n const result = onRetry((retries as number) - attempts, e, waitTime)\n if (result === false && onRetryCancelOnFalse) {\n break\n }\n // If onRetry returns a number, use it as the custom delay.\n if (typeof result === 'number' && result >= 0) {\n waitTime = Math.min(result, maxDelayMs as number)\n }\n } catch (e) {\n if (onRetryRethrow) {\n throw e\n }\n }\n }\n\n try {\n // eslint-disable-next-line no-await-in-loop\n await timers.setTimeout(waitTime, undefined, { signal })\n } catch {\n // setTimeout was aborted.\n return undefined\n }\n\n // Check abort again after delay.\n if (signal?.aborted) {\n return undefined\n }\n\n // Exponentially increase the delay for the next attempt, capping at maxDelayMs.\n delay = Math.min(delay * (backoffFactor as number), maxDelayMs as number)\n }\n }\n if (error !== UNDEFINED_TOKEN) {\n throw error\n }\n return undefined\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAKA,kBAAgC;AAChC,qBAA+B;AAE/B,oBAA2B;AAE3B,MAAM,kBAAc,+BAAe;AA2LnC,IAAI;AAAA;AASJ,SAAS,YAAY;AACnB,MAAI,YAAY,QAAW;AAGzB,cAAwB,QAAQ,sBAAsB;AAAA,EACxD;AACA,SAAO;AACT;AAAA;AAsBO,SAAS,0BACd,SACqE;AAErE,QAAM,OAAO,OAAO,YAAY,WAAW,EAAE,aAAa,QAAQ,IAAI;AAEtE,QAAM;AAAA;AAAA,IAEJ,cAAc;AAAA;AAAA,IAEd;AAAA;AAAA,IAEA,SAAS;AAAA,EACX,IAAI,EAAE,WAAW,MAAM,GAAG,KAAK;AAG/B,QAAM,wBAAwB,KAAK,IAAI,GAAG,WAAW;AACrD,QAAM,YAAY,oCAAoB,OAAO;AAC7C,SAAO;AAAA,IACL,WAAW;AAAA,IACX,aAAa;AAAA,IACb,SAAS,sCAAsB,EAAE,QAAQ,GAAG,UAAU,CAAC;AAAA,IACvD;AAAA,EACF;AACF;AAAA;AAuBO,SAAS,sBACd,SACc;AACd,QAAM,WAAW,oCAAoB,OAAO;AAC5C,QAAM;AAAA;AAAA,IAEJ,OAAO,CAAC;AAAA;AAAA,IAER,gBAAgB;AAAA;AAAA,IAEhB,cAAc;AAAA;AAAA,IAEd,SAAS;AAAA;AAAA,IAET,aAAa;AAAA;AAAA;AAAA,IAGb;AAAA;AAAA,IAEA,uBAAuB;AAAA;AAAA,IAEvB,iBAAiB;AAAA;AAAA,IAEjB,UAAU;AAAA;AAAA,IAEV,SAAS;AAAA,EACX,IAAI;AACJ,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAAA;AAoBO,SAAS,oBACd,SACc;AACd,QAAM,WAAW;AAAA,IACf,WAAW;AAAA,IACX,SAAS;AAAA,IACT,aAAa;AAAA,IACb,YAAY;AAAA,IACZ,eAAe;AAAA,EACjB;AAEA,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO,EAAE,GAAG,UAAU,SAAS,QAAQ;AAAA,EACzC;AAEA,SAAO,UAAU,EAAE,GAAG,UAAU,GAAG,QAAQ,IAAI;AACjD;AAAA;AAuCA,eAAsB,MACpB,OACA,YACA,SACe;AACf,QAAM,WAAW,0CAA0B,OAAO;AAClD,QAAM,EAAE,aAAa,SAAS,OAAO,IAAI;AAGzC,QAAM,aAAS,0BAAW,OAAO,WAAW;AAC5C,aAAW,SAAS,QAAQ;AAC1B,QAAI,QAAQ,SAAS;AACnB;AAAA,IACF;AAGA,UAAM,QAAQ;AAAA,MACZ,MAAM;AAAA,QAAI,CAAC,SACT,uBAAO,IAAI,SAAoB,WAAW,KAAK,CAAC,CAAM,GAAG;AAAA,UACvD,GAAG;AAAA,UACH,MAAM,CAAC,IAAI;AAAA,UACX;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAAA;AA2CA,eAAsB,QACpB,OACA,YACA,SACc;AACd,QAAM,WAAW,0CAA0B,OAAO;AAClD,UACE,MAAM;AAAA,QACJ,0BAAW,OAAO,SAAS,WAAW;AAAA,IACtC;AAAA,IACA,SAAS;AAAA,EACX,GACA,KAAK;AACT;AAAA;AA0CA,eAAsB,WACpB,OACA,YACA,SACe;AACf,QAAM,EAAE,YAAY,KAAK,GAAG,UAAU,IAAI,WAAW,CAAC;AACtD,QAAM,aAAS,0BAAW,OAAO,SAAS;AAC1C,QAAM,sBAAsB,sCAAsB,SAAS;AAC3D,QAAM,EAAE,OAAO,IAAI;AACnB,aAAW,SAAS,QAAQ;AAC1B,QAAI,QAAQ,SAAS;AACnB;AAAA,IACF;AAEA,UAAM,uBAAO,IAAI,SAAoB,WAAW,KAAK,CAAC,CAAQ,GAAG;AAAA,MAC/D,GAAG;AAAA,MACH,MAAM,CAAC,KAAK;AAAA,IACd,CAAC;AAAA,EACH;AACF;AAAA;AAoBA,eAAsB,aACpB,QACA,YACA,SACgB;AAChB,QAAM,YAAY,sCAAsB,OAAO;AAC/C,QAAM,EAAE,OAAO,IAAI;AACnB,QAAM,EAAE,OAAO,IAAI;AACnB,QAAM,iBAAiB,MAAM,MAAM;AACnC,WAAS,IAAI,GAAG,IAAI,QAAQ,KAAK,GAAG;AAElC,QAAI,QAAQ,SAAS;AACnB,qBAAe,CAAC,IAAI,CAAC;AAAA,IACvB,OAAO;AACL,YAAM,QAAQ,OAAO,CAAC;AAEtB,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,MAAM;AAAA,UAAI,WACR,uBAAO,IAAI,SAAoB,WAAW,KAAK,CAAC,CAAM,GAAG;AAAA,YACvD,GAAG;AAAA,YACH,MAAM,CAAC,KAAK;AAAA,UACd,CAAC;AAAA,QACH;AAAA,MACF;AACA,YAAM,mBAAmB,QAAQ;AAAA,QAAI,OACnC,EAAE,WAAW,cAAc,EAAE,QAAQ;AAAA,MACvC;AACA,qBAAe,CAAC,IAAI,MAAM,OAAO,CAAC,IAAIA,OAAM,iBAAiBA,EAAC,CAAC;AAAA,IACjE;AAAA,EACF;AACA,SAAO;AACT;AAAA;AAuFA,eAAsB,OACpB,YACA,SACwB;AACxB,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,sCAAsB,OAAO;AACjC,MAAI,QAAQ,SAAS;AACnB,WAAO;AAAA,EACT;AACA,MAAI,YAAY,GAAG;AACjB,WAAO,MAAM,WAAW,GAAI,QAAQ,CAAC,GAAI,EAAE,OAAO,CAAC;AAAA,EACrD;AAEA,QAAM,SAAS,0BAAU;AAEzB,MAAI,WAAW;AACf,MAAI,QAAQ;AACZ,MAAI,QAAiB;AAErB,SAAO,cAAc,GAAG;AAEtB,QAAI,QAAQ,SAAS;AACnB,aAAO;AAAA,IACT;AAEA,QAAI;AAEF,aAAO,MAAM,WAAW,GAAI,QAAQ,CAAC,GAAI,EAAE,OAAO,CAAC;AAAA,IACrD,SAAS,GAAG;AACV,UAAI,UAAU,6BAAiB;AAC7B,gBAAQ;AAAA,MACV;AACA,UAAI,WAAW,GAAG;AAChB;AAAA,MACF;AACA,UAAI,WAAW;AACf,UAAI,QAAQ;AAEV,oBAAY,KAAK,MAAM,KAAK,OAAO,IAAI,KAAK;AAAA,MAC9C;AAEA,iBAAW,KAAK,IAAI,UAAU,UAAoB;AAClD,UAAI,OAAO,YAAY,YAAY;AACjC,YAAI;AACF,gBAAM,SAAS,QAAS,UAAqB,UAAU,GAAG,QAAQ;AAClE,cAAI,WAAW,SAAS,sBAAsB;AAC5C;AAAA,UACF;AAEA,cAAI,OAAO,WAAW,YAAY,UAAU,GAAG;AAC7C,uBAAW,KAAK,IAAI,QAAQ,UAAoB;AAAA,UAClD;AAAA,QACF,SAASC,IAAG;AACV,cAAI,gBAAgB;AAClB,kBAAMA;AAAA,UACR;AAAA,QACF;AAAA,MACF;AAEA,UAAI;AAEF,cAAM,OAAO,WAAW,UAAU,QAAW,EAAE,OAAO,CAAC;AAAA,MACzD,QAAQ;AAEN,eAAO;AAAA,MACT;AAGA,UAAI,QAAQ,SAAS;AACnB,eAAO;AAAA,MACT;AAGA,cAAQ,KAAK,IAAI,QAAS,eAA0B,UAAoB;AAAA,IAC1E;AAAA,EACF;AACA,MAAI,UAAU,6BAAiB;AAC7B,UAAM;AAAA,EACR;AACA,SAAO;AACT;",
|
|
6
6
|
"names": ["i", "e"]
|
|
7
7
|
}
|
package/dist/spinner.js
CHANGED
|
@@ -132,6 +132,7 @@ function Spinner(options) {
|
|
|
132
132
|
const YoctoCtor = import_yocto_spinner.default;
|
|
133
133
|
const tempInstance = YoctoCtor({});
|
|
134
134
|
const YoctoSpinnerClass = tempInstance.constructor;
|
|
135
|
+
const logger = (0, import_logger.getDefaultLogger)();
|
|
135
136
|
_Spinner = class SpinnerClass extends YoctoSpinnerClass {
|
|
136
137
|
#baseText = "";
|
|
137
138
|
#indentation = "";
|
|
@@ -260,7 +261,6 @@ function Spinner(options) {
|
|
|
260
261
|
} else {
|
|
261
262
|
super[methodName](normalized);
|
|
262
263
|
}
|
|
263
|
-
const logger = (0, import_logger.getDefaultLogger)();
|
|
264
264
|
if (methodName === "stop") {
|
|
265
265
|
if (wasSpinning && normalized) {
|
|
266
266
|
logger[import_logger.lastWasBlankSymbol]((0, import_strings.isBlankString)(normalized));
|
|
@@ -319,7 +319,6 @@ function Spinner(options) {
|
|
|
319
319
|
extras = args;
|
|
320
320
|
text = "";
|
|
321
321
|
}
|
|
322
|
-
const logger = (0, import_logger.getDefaultLogger)();
|
|
323
322
|
logger.error(`${import_logger.LOG_SYMBOLS[symbolType]} ${text}`, ...extras);
|
|
324
323
|
return this;
|
|
325
324
|
}
|
|
@@ -491,7 +490,6 @@ function Spinner(options) {
|
|
|
491
490
|
* @returns This spinner for chaining
|
|
492
491
|
*/
|
|
493
492
|
log(...args) {
|
|
494
|
-
const logger = (0, import_logger.getDefaultLogger)();
|
|
495
493
|
logger.log(...args);
|
|
496
494
|
return this;
|
|
497
495
|
}
|
|
@@ -606,7 +604,6 @@ function Spinner(options) {
|
|
|
606
604
|
*/
|
|
607
605
|
step(text, ...extras) {
|
|
608
606
|
if (typeof text === "string") {
|
|
609
|
-
const logger = (0, import_logger.getDefaultLogger)();
|
|
610
607
|
logger.error("");
|
|
611
608
|
logger.error(text, ...extras);
|
|
612
609
|
}
|
|
@@ -630,7 +627,6 @@ function Spinner(options) {
|
|
|
630
627
|
*/
|
|
631
628
|
substep(text, ...extras) {
|
|
632
629
|
if (typeof text === "string") {
|
|
633
|
-
const logger = (0, import_logger.getDefaultLogger)();
|
|
634
630
|
logger.error(` ${text}`, ...extras);
|
|
635
631
|
}
|
|
636
632
|
return this;
|
|
@@ -653,6 +649,9 @@ function Spinner(options) {
|
|
|
653
649
|
* ```
|
|
654
650
|
*/
|
|
655
651
|
stop(...args) {
|
|
652
|
+
if (!args.length || !args[0]) {
|
|
653
|
+
super.text = "";
|
|
654
|
+
}
|
|
656
655
|
this.#baseText = "";
|
|
657
656
|
this.#progress = void 0;
|
|
658
657
|
if (this.#shimmer) {
|