@soda-gql/common 0.7.0 → 0.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/dist/{canonical-id-BFnyQGST.mjs → canonical-id-BJahCcrS.mjs} +20 -2
  2. package/dist/canonical-id-BJahCcrS.mjs.map +1 -0
  3. package/dist/{canonical-id-BFcryTw5.cjs → canonical-id-CgMNOZyn.cjs} +26 -2
  4. package/dist/canonical-id-CgMNOZyn.cjs.map +1 -0
  5. package/dist/canonical-id.cjs +4 -3
  6. package/dist/canonical-id.d.cts +2 -2
  7. package/dist/canonical-id.d.mts +2 -2
  8. package/dist/canonical-id.mjs +2 -2
  9. package/dist/{index-DaAp2rNj.d.cts → index-B424kKYS.d.mts} +23 -1
  10. package/dist/index-B424kKYS.d.mts.map +1 -0
  11. package/dist/{index-C4t2Wbzs.d.mts → index-CPpVc8Id.d.mts} +11 -2
  12. package/dist/index-CPpVc8Id.d.mts.map +1 -0
  13. package/dist/{index-BG7Aiges.d.cts → index-Cm2Zwk9m.d.cts} +11 -2
  14. package/dist/index-Cm2Zwk9m.d.cts.map +1 -0
  15. package/dist/{index-BedBpKbv.d.mts → index-D1tzB3W5.d.cts} +23 -1
  16. package/dist/index-D1tzB3W5.d.cts.map +1 -0
  17. package/dist/index.cjs +5 -4
  18. package/dist/index.d.cts +3 -3
  19. package/dist/index.d.mts +3 -3
  20. package/dist/index.mjs +3 -3
  21. package/dist/{portable-Dbo3u2CQ.mjs → portable-BT3ahkQN.mjs} +133 -3
  22. package/dist/portable-BT3ahkQN.mjs.map +1 -0
  23. package/dist/{portable-C_7gJWmz.cjs → portable-cJqkfeHw.cjs} +133 -3
  24. package/dist/portable-cJqkfeHw.cjs.map +1 -0
  25. package/dist/portable.cjs +1 -1
  26. package/dist/portable.d.cts +1 -1
  27. package/dist/portable.d.mts +1 -1
  28. package/dist/portable.mjs +1 -1
  29. package/dist/{utils-CmLf7LU5.cjs → utils-CsTwS1dw.cjs} +2 -2
  30. package/dist/{utils-CmLf7LU5.cjs.map → utils-CsTwS1dw.cjs.map} +1 -1
  31. package/dist/utils.cjs +1 -1
  32. package/dist/{zod-CynYgOoN.cjs → zod-C_6JfuYV.cjs} +2 -2
  33. package/dist/{zod-CynYgOoN.cjs.map → zod-C_6JfuYV.cjs.map} +1 -1
  34. package/dist/zod.cjs +1 -1
  35. package/package.json +1 -1
  36. package/dist/canonical-id-BFcryTw5.cjs.map +0 -1
  37. package/dist/canonical-id-BFnyQGST.mjs.map +0 -1
  38. package/dist/index-BG7Aiges.d.cts.map +0 -1
  39. package/dist/index-BedBpKbv.d.mts.map +0 -1
  40. package/dist/index-C4t2Wbzs.d.mts.map +0 -1
  41. package/dist/index-DaAp2rNj.d.cts.map +0 -1
  42. package/dist/portable-C_7gJWmz.cjs.map +0 -1
  43. package/dist/portable-Dbo3u2CQ.mjs.map +0 -1
@@ -14,6 +14,24 @@ const createCanonicalId = (filePath, astPath) => {
14
14
  const idParts = [normalized, astPath];
15
15
  return idParts.join(canonicalIdSeparator);
16
16
  };
17
+ /**
18
+ * Parse a canonical ID into its components.
19
+ * @param canonicalId - The canonical ID to parse (e.g., "/app/src/user.ts::userFragment")
20
+ * @returns An object with filePath and astPath
21
+ */
22
+ const parseCanonicalId = (canonicalId) => {
23
+ const idx = canonicalId.indexOf(canonicalIdSeparator);
24
+ if (idx === -1) {
25
+ return {
26
+ filePath: canonicalId,
27
+ astPath: ""
28
+ };
29
+ }
30
+ return {
31
+ filePath: canonicalId.slice(0, idx),
32
+ astPath: canonicalId.slice(idx + canonicalIdSeparator.length)
33
+ };
34
+ };
17
35
 
18
36
  //#endregion
19
37
  //#region packages/common/src/canonical-id/path-tracker.ts
@@ -151,5 +169,5 @@ const buildAstPath = (stack) => {
151
169
  };
152
170
 
153
171
  //#endregion
154
- export { CanonicalIdSchema as a, createPathTracker as i, createCanonicalTracker as n, createCanonicalId as o, createOccurrenceTracker as r, buildAstPath as t };
155
- //# sourceMappingURL=canonical-id-BFnyQGST.mjs.map
172
+ export { CanonicalIdSchema as a, createPathTracker as i, createCanonicalTracker as n, createCanonicalId as o, createOccurrenceTracker as r, parseCanonicalId as s, buildAstPath as t };
173
+ //# sourceMappingURL=canonical-id-BJahCcrS.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"canonical-id-BJahCcrS.mjs","names":["CanonicalIdSchema: z.ZodType<CanonicalId>","z","scopeStack: ScopeFrame[]","frame: ScopeFrame","exportBinding: string | undefined"],"sources":["../src/canonical-id/canonical-id.ts","../src/canonical-id/path-tracker.ts"],"sourcesContent":["import { isAbsolute, resolve } from \"node:path\";\nimport z from \"zod\";\nimport { normalizePath } from \"../utils\";\n\nexport type CanonicalId = string & { readonly __brand: \"CanonicalId\" };\n\nconst canonicalIdSeparator = \"::\" as const;\n\nexport const CanonicalIdSchema: z.ZodType<CanonicalId> = z.string() as unknown as z.ZodType<CanonicalId>;\n\n// Type-safe schema for CanonicalId - validates as string but types as branded\nexport const createCanonicalId = (filePath: string, astPath: string): CanonicalId => {\n if (!isAbsolute(filePath)) {\n throw new Error(\"[INTERNAL] CANONICAL_ID_REQUIRES_ABSOLUTE_PATH\");\n }\n\n const resolved = resolve(filePath);\n const normalized = normalizePath(resolved);\n\n // Create a 2-part ID: {absPath}::{astPath}\n // astPath uniquely identifies the definition's location in the AST (e.g., \"MyComponent.useQuery.def\")\n const idParts = [normalized, astPath];\n\n return idParts.join(canonicalIdSeparator) as CanonicalId;\n};\n\n/**\n * Parse a canonical ID into its components.\n * @param canonicalId - The canonical ID to parse (e.g., \"/app/src/user.ts::userFragment\")\n * @returns An object with filePath and astPath\n */\nexport const parseCanonicalId = (\n canonicalId: CanonicalId | string,\n): {\n filePath: string;\n astPath: string;\n} => {\n const idx = canonicalId.indexOf(canonicalIdSeparator);\n if (idx === -1) {\n return { filePath: canonicalId, astPath: \"\" };\n }\n return {\n filePath: canonicalId.slice(0, idx),\n astPath: canonicalId.slice(idx + canonicalIdSeparator.length),\n };\n};\n","/**\n * Canonical path tracker for AST traversal.\n *\n * This module provides a stateful helper that tracks scope information during\n * AST traversal to generate canonical IDs. It's designed to integrate with\n * existing plugin visitor patterns (Babel, SWC, TypeScript) without requiring\n * a separate AST traversal.\n *\n * Usage pattern:\n * 1. Plugin creates tracker at file/program entry\n * 2. Plugin calls enterScope/exitScope during its traversal\n * 3. Plugin calls registerDefinition when discovering GQL definitions\n * 4. Tracker provides canonical ID information\n */\n\nimport type { CanonicalId } from \"./canonical-id\";\nimport { createCanonicalId } from \"./canonical-id\";\n\n/**\n * Scope frame for tracking AST path segments\n */\nexport type ScopeFrame = {\n /** Name segment (e.g., \"MyComponent\", \"useQuery\", \"arrow#1\") */\n readonly nameSegment: string;\n /** Kind of scope */\n readonly kind: \"function\" | \"class\" | \"variable\" | \"property\" | \"method\" | \"expression\";\n /** Occurrence index for disambiguation */\n readonly occurrence: number;\n};\n\n/**\n * Opaque handle for scope tracking\n */\nexport type ScopeHandle = {\n readonly __brand: \"ScopeHandle\";\n readonly depth: number;\n};\n\n/**\n * Canonical path tracker interface\n */\nexport interface CanonicalPathTracker {\n /**\n * Enter a new scope during traversal\n * @param options Scope information\n * @returns Handle to use when exiting the scope\n */\n enterScope(options: { segment: string; kind: ScopeFrame[\"kind\"]; stableKey?: string }): ScopeHandle;\n\n /**\n * Exit a scope during traversal\n * @param handle Handle returned from enterScope\n */\n exitScope(handle: ScopeHandle): void;\n\n /**\n * Register a definition discovered during traversal\n * @returns Definition metadata including astPath and canonical ID information\n */\n registerDefinition(): {\n astPath: string;\n isTopLevel: boolean;\n exportBinding?: string;\n };\n\n /**\n * Resolve a canonical ID from an astPath\n * @param astPath AST path string\n * @returns Canonical ID\n */\n resolveCanonicalId(astPath: string): CanonicalId;\n\n /**\n * Register an export binding\n * @param local Local variable name\n * @param exported Exported name\n */\n registerExportBinding(local: string, exported: string): void;\n\n /**\n * Get current scope depth\n * @returns Current depth (0 = top level)\n */\n currentDepth(): number;\n}\n\n/**\n * Build AST path from scope stack (internal helper)\n */\nconst _buildAstPath = (stack: readonly ScopeFrame[]): string => {\n return stack.map((frame) => frame.nameSegment).join(\".\");\n};\n\n/**\n * Create a canonical path tracker\n *\n * @param options Configuration options\n * @returns Tracker instance\n *\n * @example\n * ```typescript\n * // In a Babel plugin\n * const tracker = createCanonicalTracker({ filePath: state.filename });\n *\n * const visitor = {\n * FunctionDeclaration: {\n * enter(path) {\n * const handle = tracker.enterScope({\n * segment: path.node.id.name,\n * kind: 'function'\n * });\n * },\n * exit(path) {\n * tracker.exitScope(handle);\n * }\n * }\n * };\n * ```\n */\nexport const createCanonicalTracker = (options: {\n filePath: string;\n getExportName?: (localName: string) => string | undefined;\n}): CanonicalPathTracker => {\n const { filePath, getExportName } = options;\n\n // Scope stack\n const scopeStack: ScopeFrame[] = [];\n\n // Occurrence counters for disambiguating duplicate names\n const occurrenceCounters = new Map<string, number>();\n\n // Used paths for ensuring uniqueness\n const usedPaths = new Set<string>();\n\n // Export bindings map\n const exportBindings = new Map<string, string>();\n\n const getNextOccurrence = (key: string): number => {\n const current = occurrenceCounters.get(key) ?? 0;\n occurrenceCounters.set(key, current + 1);\n return current;\n };\n\n const ensureUniquePath = (basePath: string): string => {\n let path = basePath;\n let suffix = 0;\n while (usedPaths.has(path)) {\n suffix++;\n path = `${basePath}$${suffix}`;\n }\n usedPaths.add(path);\n return path;\n };\n\n return {\n enterScope({ segment, kind, stableKey }): ScopeHandle {\n const key = stableKey ?? `${kind}:${segment}`;\n const occurrence = getNextOccurrence(key);\n\n const frame: ScopeFrame = {\n nameSegment: segment,\n kind,\n occurrence,\n };\n\n scopeStack.push(frame);\n\n return {\n __brand: \"ScopeHandle\",\n depth: scopeStack.length - 1,\n } as ScopeHandle;\n },\n\n exitScope(handle: ScopeHandle): void {\n // Validate handle depth matches current stack\n if (handle.depth !== scopeStack.length - 1) {\n throw new Error(`[INTERNAL] Invalid scope exit: expected depth ${scopeStack.length - 1}, got ${handle.depth}`);\n }\n scopeStack.pop();\n },\n\n registerDefinition(): {\n astPath: string;\n isTopLevel: boolean;\n exportBinding?: string;\n } {\n const basePath = _buildAstPath(scopeStack);\n const astPath = ensureUniquePath(basePath);\n const isTopLevel = scopeStack.length === 0;\n\n // Check export binding if provided\n let exportBinding: string | undefined;\n if (getExportName && isTopLevel) {\n // For top-level definitions, try to get export name\n // This is a simplified version - real logic depends on how the definition is bound\n exportBinding = undefined;\n }\n\n return {\n astPath,\n isTopLevel,\n exportBinding,\n };\n },\n\n resolveCanonicalId(astPath: string): CanonicalId {\n return createCanonicalId(filePath, astPath);\n },\n\n registerExportBinding(local: string, exported: string): void {\n exportBindings.set(local, exported);\n },\n\n currentDepth(): number {\n return scopeStack.length;\n },\n };\n};\n\n/**\n * Helper to create occurrence tracker (for backward compatibility)\n */\nexport const createOccurrenceTracker = (): {\n getNextOccurrence: (key: string) => number;\n} => {\n const occurrenceCounters = new Map<string, number>();\n\n return {\n getNextOccurrence(key: string): number {\n const current = occurrenceCounters.get(key) ?? 0;\n occurrenceCounters.set(key, current + 1);\n return current;\n },\n };\n};\n\n/**\n * Helper to create path tracker (for backward compatibility)\n */\nexport const createPathTracker = (): {\n ensureUniquePath: (basePath: string) => string;\n} => {\n const usedPaths = new Set<string>();\n\n return {\n ensureUniquePath(basePath: string): string {\n let path = basePath;\n let suffix = 0;\n while (usedPaths.has(path)) {\n suffix++;\n path = `${basePath}$${suffix}`;\n }\n usedPaths.add(path);\n return path;\n },\n };\n};\n\n/**\n * Build AST path from scope stack (for backward compatibility)\n */\nexport const buildAstPath = (stack: readonly ScopeFrame[]): string => {\n return stack.map((frame) => frame.nameSegment).join(\".\");\n};\n"],"mappings":";;;;;AAMA,MAAM,uBAAuB;AAE7B,MAAaA,oBAA4CC,IAAE,QAAQ;AAGnE,MAAa,qBAAqB,UAAkB,YAAiC;AACnF,KAAI,CAAC,WAAW,SAAS,EAAE;AACzB,QAAM,IAAI,MAAM,iDAAiD;;CAGnE,MAAM,WAAW,QAAQ,SAAS;CAClC,MAAM,aAAa,cAAc,SAAS;CAI1C,MAAM,UAAU,CAAC,YAAY,QAAQ;AAErC,QAAO,QAAQ,KAAK,qBAAqB;;;;;;;AAQ3C,MAAa,oBACX,gBAIG;CACH,MAAM,MAAM,YAAY,QAAQ,qBAAqB;AACrD,KAAI,QAAQ,CAAC,GAAG;AACd,SAAO;GAAE,UAAU;GAAa,SAAS;GAAI;;AAE/C,QAAO;EACL,UAAU,YAAY,MAAM,GAAG,IAAI;EACnC,SAAS,YAAY,MAAM,MAAM,qBAAqB,OAAO;EAC9D;;;;;;;;AC6CH,MAAM,iBAAiB,UAAyC;AAC9D,QAAO,MAAM,KAAK,UAAU,MAAM,YAAY,CAAC,KAAK,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6B1D,MAAa,0BAA0B,YAGX;CAC1B,MAAM,EAAE,UAAU,kBAAkB;CAGpC,MAAMC,aAA2B,EAAE;CAGnC,MAAM,qBAAqB,IAAI,KAAqB;CAGpD,MAAM,YAAY,IAAI,KAAa;CAGnC,MAAM,iBAAiB,IAAI,KAAqB;CAEhD,MAAM,qBAAqB,QAAwB;EACjD,MAAM,UAAU,mBAAmB,IAAI,IAAI,IAAI;AAC/C,qBAAmB,IAAI,KAAK,UAAU,EAAE;AACxC,SAAO;;CAGT,MAAM,oBAAoB,aAA6B;EACrD,IAAI,OAAO;EACX,IAAI,SAAS;AACb,SAAO,UAAU,IAAI,KAAK,EAAE;AAC1B;AACA,UAAO,GAAG,SAAS,GAAG;;AAExB,YAAU,IAAI,KAAK;AACnB,SAAO;;AAGT,QAAO;EACL,WAAW,EAAE,SAAS,MAAM,aAA0B;GACpD,MAAM,MAAM,aAAa,GAAG,KAAK,GAAG;GACpC,MAAM,aAAa,kBAAkB,IAAI;GAEzC,MAAMC,QAAoB;IACxB,aAAa;IACb;IACA;IACD;AAED,cAAW,KAAK,MAAM;AAEtB,UAAO;IACL,SAAS;IACT,OAAO,WAAW,SAAS;IAC5B;;EAGH,UAAU,QAA2B;AAEnC,OAAI,OAAO,UAAU,WAAW,SAAS,GAAG;AAC1C,UAAM,IAAI,MAAM,iDAAiD,WAAW,SAAS,EAAE,QAAQ,OAAO,QAAQ;;AAEhH,cAAW,KAAK;;EAGlB,qBAIE;GACA,MAAM,WAAW,cAAc,WAAW;GAC1C,MAAM,UAAU,iBAAiB,SAAS;GAC1C,MAAM,aAAa,WAAW,WAAW;GAGzC,IAAIC;AACJ,OAAI,iBAAiB,YAAY;AAG/B,oBAAgB;;AAGlB,UAAO;IACL;IACA;IACA;IACD;;EAGH,mBAAmB,SAA8B;AAC/C,UAAO,kBAAkB,UAAU,QAAQ;;EAG7C,sBAAsB,OAAe,UAAwB;AAC3D,kBAAe,IAAI,OAAO,SAAS;;EAGrC,eAAuB;AACrB,UAAO,WAAW;;EAErB;;;;;AAMH,MAAa,gCAER;CACH,MAAM,qBAAqB,IAAI,KAAqB;AAEpD,QAAO,EACL,kBAAkB,KAAqB;EACrC,MAAM,UAAU,mBAAmB,IAAI,IAAI,IAAI;AAC/C,qBAAmB,IAAI,KAAK,UAAU,EAAE;AACxC,SAAO;IAEV;;;;;AAMH,MAAa,0BAER;CACH,MAAM,YAAY,IAAI,KAAa;AAEnC,QAAO,EACL,iBAAiB,UAA0B;EACzC,IAAI,OAAO;EACX,IAAI,SAAS;AACb,SAAO,UAAU,IAAI,KAAK,EAAE;AAC1B;AACA,UAAO,GAAG,SAAS,GAAG;;AAExB,YAAU,IAAI,KAAK;AACnB,SAAO;IAEV;;;;;AAMH,MAAa,gBAAgB,UAAyC;AACpE,QAAO,MAAM,KAAK,UAAU,MAAM,YAAY,CAAC,KAAK,IAAI"}
@@ -25,7 +25,7 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
25
25
  }) : target, mod));
26
26
 
27
27
  //#endregion
28
- const require_utils = require('./utils-CmLf7LU5.cjs');
28
+ const require_utils = require('./utils-CsTwS1dw.cjs');
29
29
  let node_path = require("node:path");
30
30
  let zod = require("zod");
31
31
  zod = __toESM(zod);
@@ -42,6 +42,24 @@ const createCanonicalId = (filePath, astPath) => {
42
42
  const idParts = [normalized, astPath];
43
43
  return idParts.join(canonicalIdSeparator);
44
44
  };
45
+ /**
46
+ * Parse a canonical ID into its components.
47
+ * @param canonicalId - The canonical ID to parse (e.g., "/app/src/user.ts::userFragment")
48
+ * @returns An object with filePath and astPath
49
+ */
50
+ const parseCanonicalId = (canonicalId) => {
51
+ const idx = canonicalId.indexOf(canonicalIdSeparator);
52
+ if (idx === -1) {
53
+ return {
54
+ filePath: canonicalId,
55
+ astPath: ""
56
+ };
57
+ }
58
+ return {
59
+ filePath: canonicalId.slice(0, idx),
60
+ astPath: canonicalId.slice(idx + canonicalIdSeparator.length)
61
+ };
62
+ };
45
63
 
46
64
  //#endregion
47
65
  //#region packages/common/src/canonical-id/path-tracker.ts
@@ -221,4 +239,10 @@ Object.defineProperty(exports, 'createPathTracker', {
221
239
  return createPathTracker;
222
240
  }
223
241
  });
224
- //# sourceMappingURL=canonical-id-BFcryTw5.cjs.map
242
+ Object.defineProperty(exports, 'parseCanonicalId', {
243
+ enumerable: true,
244
+ get: function () {
245
+ return parseCanonicalId;
246
+ }
247
+ });
248
+ //# sourceMappingURL=canonical-id-CgMNOZyn.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"canonical-id-CgMNOZyn.cjs","names":["CanonicalIdSchema: z.ZodType<CanonicalId>","z","normalizePath","scopeStack: ScopeFrame[]","frame: ScopeFrame","exportBinding: string | undefined"],"sources":["../src/canonical-id/canonical-id.ts","../src/canonical-id/path-tracker.ts"],"sourcesContent":["import { isAbsolute, resolve } from \"node:path\";\nimport z from \"zod\";\nimport { normalizePath } from \"../utils\";\n\nexport type CanonicalId = string & { readonly __brand: \"CanonicalId\" };\n\nconst canonicalIdSeparator = \"::\" as const;\n\nexport const CanonicalIdSchema: z.ZodType<CanonicalId> = z.string() as unknown as z.ZodType<CanonicalId>;\n\n// Type-safe schema for CanonicalId - validates as string but types as branded\nexport const createCanonicalId = (filePath: string, astPath: string): CanonicalId => {\n if (!isAbsolute(filePath)) {\n throw new Error(\"[INTERNAL] CANONICAL_ID_REQUIRES_ABSOLUTE_PATH\");\n }\n\n const resolved = resolve(filePath);\n const normalized = normalizePath(resolved);\n\n // Create a 2-part ID: {absPath}::{astPath}\n // astPath uniquely identifies the definition's location in the AST (e.g., \"MyComponent.useQuery.def\")\n const idParts = [normalized, astPath];\n\n return idParts.join(canonicalIdSeparator) as CanonicalId;\n};\n\n/**\n * Parse a canonical ID into its components.\n * @param canonicalId - The canonical ID to parse (e.g., \"/app/src/user.ts::userFragment\")\n * @returns An object with filePath and astPath\n */\nexport const parseCanonicalId = (\n canonicalId: CanonicalId | string,\n): {\n filePath: string;\n astPath: string;\n} => {\n const idx = canonicalId.indexOf(canonicalIdSeparator);\n if (idx === -1) {\n return { filePath: canonicalId, astPath: \"\" };\n }\n return {\n filePath: canonicalId.slice(0, idx),\n astPath: canonicalId.slice(idx + canonicalIdSeparator.length),\n };\n};\n","/**\n * Canonical path tracker for AST traversal.\n *\n * This module provides a stateful helper that tracks scope information during\n * AST traversal to generate canonical IDs. It's designed to integrate with\n * existing plugin visitor patterns (Babel, SWC, TypeScript) without requiring\n * a separate AST traversal.\n *\n * Usage pattern:\n * 1. Plugin creates tracker at file/program entry\n * 2. Plugin calls enterScope/exitScope during its traversal\n * 3. Plugin calls registerDefinition when discovering GQL definitions\n * 4. Tracker provides canonical ID information\n */\n\nimport type { CanonicalId } from \"./canonical-id\";\nimport { createCanonicalId } from \"./canonical-id\";\n\n/**\n * Scope frame for tracking AST path segments\n */\nexport type ScopeFrame = {\n /** Name segment (e.g., \"MyComponent\", \"useQuery\", \"arrow#1\") */\n readonly nameSegment: string;\n /** Kind of scope */\n readonly kind: \"function\" | \"class\" | \"variable\" | \"property\" | \"method\" | \"expression\";\n /** Occurrence index for disambiguation */\n readonly occurrence: number;\n};\n\n/**\n * Opaque handle for scope tracking\n */\nexport type ScopeHandle = {\n readonly __brand: \"ScopeHandle\";\n readonly depth: number;\n};\n\n/**\n * Canonical path tracker interface\n */\nexport interface CanonicalPathTracker {\n /**\n * Enter a new scope during traversal\n * @param options Scope information\n * @returns Handle to use when exiting the scope\n */\n enterScope(options: { segment: string; kind: ScopeFrame[\"kind\"]; stableKey?: string }): ScopeHandle;\n\n /**\n * Exit a scope during traversal\n * @param handle Handle returned from enterScope\n */\n exitScope(handle: ScopeHandle): void;\n\n /**\n * Register a definition discovered during traversal\n * @returns Definition metadata including astPath and canonical ID information\n */\n registerDefinition(): {\n astPath: string;\n isTopLevel: boolean;\n exportBinding?: string;\n };\n\n /**\n * Resolve a canonical ID from an astPath\n * @param astPath AST path string\n * @returns Canonical ID\n */\n resolveCanonicalId(astPath: string): CanonicalId;\n\n /**\n * Register an export binding\n * @param local Local variable name\n * @param exported Exported name\n */\n registerExportBinding(local: string, exported: string): void;\n\n /**\n * Get current scope depth\n * @returns Current depth (0 = top level)\n */\n currentDepth(): number;\n}\n\n/**\n * Build AST path from scope stack (internal helper)\n */\nconst _buildAstPath = (stack: readonly ScopeFrame[]): string => {\n return stack.map((frame) => frame.nameSegment).join(\".\");\n};\n\n/**\n * Create a canonical path tracker\n *\n * @param options Configuration options\n * @returns Tracker instance\n *\n * @example\n * ```typescript\n * // In a Babel plugin\n * const tracker = createCanonicalTracker({ filePath: state.filename });\n *\n * const visitor = {\n * FunctionDeclaration: {\n * enter(path) {\n * const handle = tracker.enterScope({\n * segment: path.node.id.name,\n * kind: 'function'\n * });\n * },\n * exit(path) {\n * tracker.exitScope(handle);\n * }\n * }\n * };\n * ```\n */\nexport const createCanonicalTracker = (options: {\n filePath: string;\n getExportName?: (localName: string) => string | undefined;\n}): CanonicalPathTracker => {\n const { filePath, getExportName } = options;\n\n // Scope stack\n const scopeStack: ScopeFrame[] = [];\n\n // Occurrence counters for disambiguating duplicate names\n const occurrenceCounters = new Map<string, number>();\n\n // Used paths for ensuring uniqueness\n const usedPaths = new Set<string>();\n\n // Export bindings map\n const exportBindings = new Map<string, string>();\n\n const getNextOccurrence = (key: string): number => {\n const current = occurrenceCounters.get(key) ?? 0;\n occurrenceCounters.set(key, current + 1);\n return current;\n };\n\n const ensureUniquePath = (basePath: string): string => {\n let path = basePath;\n let suffix = 0;\n while (usedPaths.has(path)) {\n suffix++;\n path = `${basePath}$${suffix}`;\n }\n usedPaths.add(path);\n return path;\n };\n\n return {\n enterScope({ segment, kind, stableKey }): ScopeHandle {\n const key = stableKey ?? `${kind}:${segment}`;\n const occurrence = getNextOccurrence(key);\n\n const frame: ScopeFrame = {\n nameSegment: segment,\n kind,\n occurrence,\n };\n\n scopeStack.push(frame);\n\n return {\n __brand: \"ScopeHandle\",\n depth: scopeStack.length - 1,\n } as ScopeHandle;\n },\n\n exitScope(handle: ScopeHandle): void {\n // Validate handle depth matches current stack\n if (handle.depth !== scopeStack.length - 1) {\n throw new Error(`[INTERNAL] Invalid scope exit: expected depth ${scopeStack.length - 1}, got ${handle.depth}`);\n }\n scopeStack.pop();\n },\n\n registerDefinition(): {\n astPath: string;\n isTopLevel: boolean;\n exportBinding?: string;\n } {\n const basePath = _buildAstPath(scopeStack);\n const astPath = ensureUniquePath(basePath);\n const isTopLevel = scopeStack.length === 0;\n\n // Check export binding if provided\n let exportBinding: string | undefined;\n if (getExportName && isTopLevel) {\n // For top-level definitions, try to get export name\n // This is a simplified version - real logic depends on how the definition is bound\n exportBinding = undefined;\n }\n\n return {\n astPath,\n isTopLevel,\n exportBinding,\n };\n },\n\n resolveCanonicalId(astPath: string): CanonicalId {\n return createCanonicalId(filePath, astPath);\n },\n\n registerExportBinding(local: string, exported: string): void {\n exportBindings.set(local, exported);\n },\n\n currentDepth(): number {\n return scopeStack.length;\n },\n };\n};\n\n/**\n * Helper to create occurrence tracker (for backward compatibility)\n */\nexport const createOccurrenceTracker = (): {\n getNextOccurrence: (key: string) => number;\n} => {\n const occurrenceCounters = new Map<string, number>();\n\n return {\n getNextOccurrence(key: string): number {\n const current = occurrenceCounters.get(key) ?? 0;\n occurrenceCounters.set(key, current + 1);\n return current;\n },\n };\n};\n\n/**\n * Helper to create path tracker (for backward compatibility)\n */\nexport const createPathTracker = (): {\n ensureUniquePath: (basePath: string) => string;\n} => {\n const usedPaths = new Set<string>();\n\n return {\n ensureUniquePath(basePath: string): string {\n let path = basePath;\n let suffix = 0;\n while (usedPaths.has(path)) {\n suffix++;\n path = `${basePath}$${suffix}`;\n }\n usedPaths.add(path);\n return path;\n },\n };\n};\n\n/**\n * Build AST path from scope stack (for backward compatibility)\n */\nexport const buildAstPath = (stack: readonly ScopeFrame[]): string => {\n return stack.map((frame) => frame.nameSegment).join(\".\");\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAMA,MAAM,uBAAuB;AAE7B,MAAaA,oBAA4CC,YAAE,QAAQ;AAGnE,MAAa,qBAAqB,UAAkB,YAAiC;AACnF,KAAI,2BAAY,SAAS,EAAE;AACzB,QAAM,IAAI,MAAM,iDAAiD;;CAGnE,MAAM,kCAAmB,SAAS;CAClC,MAAM,aAAaC,4BAAc,SAAS;CAI1C,MAAM,UAAU,CAAC,YAAY,QAAQ;AAErC,QAAO,QAAQ,KAAK,qBAAqB;;;;;;;AAQ3C,MAAa,oBACX,gBAIG;CACH,MAAM,MAAM,YAAY,QAAQ,qBAAqB;AACrD,KAAI,QAAQ,CAAC,GAAG;AACd,SAAO;GAAE,UAAU;GAAa,SAAS;GAAI;;AAE/C,QAAO;EACL,UAAU,YAAY,MAAM,GAAG,IAAI;EACnC,SAAS,YAAY,MAAM,MAAM,qBAAqB,OAAO;EAC9D;;;;;;;;AC6CH,MAAM,iBAAiB,UAAyC;AAC9D,QAAO,MAAM,KAAK,UAAU,MAAM,YAAY,CAAC,KAAK,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6B1D,MAAa,0BAA0B,YAGX;CAC1B,MAAM,EAAE,UAAU,kBAAkB;CAGpC,MAAMC,aAA2B,EAAE;CAGnC,MAAM,qBAAqB,IAAI,KAAqB;CAGpD,MAAM,YAAY,IAAI,KAAa;CAGnC,MAAM,iBAAiB,IAAI,KAAqB;CAEhD,MAAM,qBAAqB,QAAwB;EACjD,MAAM,UAAU,mBAAmB,IAAI,IAAI,IAAI;AAC/C,qBAAmB,IAAI,KAAK,UAAU,EAAE;AACxC,SAAO;;CAGT,MAAM,oBAAoB,aAA6B;EACrD,IAAI,OAAO;EACX,IAAI,SAAS;AACb,SAAO,UAAU,IAAI,KAAK,EAAE;AAC1B;AACA,UAAO,GAAG,SAAS,GAAG;;AAExB,YAAU,IAAI,KAAK;AACnB,SAAO;;AAGT,QAAO;EACL,WAAW,EAAE,SAAS,MAAM,aAA0B;GACpD,MAAM,MAAM,aAAa,GAAG,KAAK,GAAG;GACpC,MAAM,aAAa,kBAAkB,IAAI;GAEzC,MAAMC,QAAoB;IACxB,aAAa;IACb;IACA;IACD;AAED,cAAW,KAAK,MAAM;AAEtB,UAAO;IACL,SAAS;IACT,OAAO,WAAW,SAAS;IAC5B;;EAGH,UAAU,QAA2B;AAEnC,OAAI,OAAO,UAAU,WAAW,SAAS,GAAG;AAC1C,UAAM,IAAI,MAAM,iDAAiD,WAAW,SAAS,EAAE,QAAQ,OAAO,QAAQ;;AAEhH,cAAW,KAAK;;EAGlB,qBAIE;GACA,MAAM,WAAW,cAAc,WAAW;GAC1C,MAAM,UAAU,iBAAiB,SAAS;GAC1C,MAAM,aAAa,WAAW,WAAW;GAGzC,IAAIC;AACJ,OAAI,iBAAiB,YAAY;AAG/B,oBAAgB;;AAGlB,UAAO;IACL;IACA;IACA;IACD;;EAGH,mBAAmB,SAA8B;AAC/C,UAAO,kBAAkB,UAAU,QAAQ;;EAG7C,sBAAsB,OAAe,UAAwB;AAC3D,kBAAe,IAAI,OAAO,SAAS;;EAGrC,eAAuB;AACrB,UAAO,WAAW;;EAErB;;;;;AAMH,MAAa,gCAER;CACH,MAAM,qBAAqB,IAAI,KAAqB;AAEpD,QAAO,EACL,kBAAkB,KAAqB;EACrC,MAAM,UAAU,mBAAmB,IAAI,IAAI,IAAI;AAC/C,qBAAmB,IAAI,KAAK,UAAU,EAAE;AACxC,SAAO;IAEV;;;;;AAMH,MAAa,0BAER;CACH,MAAM,YAAY,IAAI,KAAa;AAEnC,QAAO,EACL,iBAAiB,UAA0B;EACzC,IAAI,OAAO;EACX,IAAI,SAAS;AACb,SAAO,UAAU,IAAI,KAAK,EAAE;AAC1B;AACA,UAAO,GAAG,SAAS,GAAG;;AAExB,YAAU,IAAI,KAAK;AACnB,SAAO;IAEV;;;;;AAMH,MAAa,gBAAgB,UAAyC;AACpE,QAAO,MAAM,KAAK,UAAU,MAAM,YAAY,CAAC,KAAK,IAAI"}
@@ -1,9 +1,10 @@
1
- const require_canonical_id = require('./canonical-id-BFcryTw5.cjs');
2
- require('./utils-CmLf7LU5.cjs');
1
+ const require_canonical_id = require('./canonical-id-CgMNOZyn.cjs');
2
+ require('./utils-CsTwS1dw.cjs');
3
3
 
4
4
  exports.CanonicalIdSchema = require_canonical_id.CanonicalIdSchema;
5
5
  exports.buildAstPath = require_canonical_id.buildAstPath;
6
6
  exports.createCanonicalId = require_canonical_id.createCanonicalId;
7
7
  exports.createCanonicalTracker = require_canonical_id.createCanonicalTracker;
8
8
  exports.createOccurrenceTracker = require_canonical_id.createOccurrenceTracker;
9
- exports.createPathTracker = require_canonical_id.createPathTracker;
9
+ exports.createPathTracker = require_canonical_id.createPathTracker;
10
+ exports.parseCanonicalId = require_canonical_id.parseCanonicalId;
@@ -1,2 +1,2 @@
1
- import { a as createCanonicalTracker, c as CanonicalId, i as buildAstPath, l as CanonicalIdSchema, n as ScopeFrame, o as createOccurrenceTracker, r as ScopeHandle, s as createPathTracker, t as CanonicalPathTracker, u as createCanonicalId } from "./index-BG7Aiges.cjs";
2
- export { CanonicalId, CanonicalIdSchema, CanonicalPathTracker, ScopeFrame, ScopeHandle, buildAstPath, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker };
1
+ import { a as createCanonicalTracker, c as CanonicalId, d as parseCanonicalId, i as buildAstPath, l as CanonicalIdSchema, n as ScopeFrame, o as createOccurrenceTracker, r as ScopeHandle, s as createPathTracker, t as CanonicalPathTracker, u as createCanonicalId } from "./index-Cm2Zwk9m.cjs";
2
+ export { CanonicalId, CanonicalIdSchema, CanonicalPathTracker, ScopeFrame, ScopeHandle, buildAstPath, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, parseCanonicalId };
@@ -1,2 +1,2 @@
1
- import { a as createCanonicalTracker, c as CanonicalId, i as buildAstPath, l as CanonicalIdSchema, n as ScopeFrame, o as createOccurrenceTracker, r as ScopeHandle, s as createPathTracker, t as CanonicalPathTracker, u as createCanonicalId } from "./index-C4t2Wbzs.mjs";
2
- export { CanonicalId, CanonicalIdSchema, CanonicalPathTracker, ScopeFrame, ScopeHandle, buildAstPath, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker };
1
+ import { a as createCanonicalTracker, c as CanonicalId, d as parseCanonicalId, i as buildAstPath, l as CanonicalIdSchema, n as ScopeFrame, o as createOccurrenceTracker, r as ScopeHandle, s as createPathTracker, t as CanonicalPathTracker, u as createCanonicalId } from "./index-CPpVc8Id.mjs";
2
+ export { CanonicalId, CanonicalIdSchema, CanonicalPathTracker, ScopeFrame, ScopeHandle, buildAstPath, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, parseCanonicalId };
@@ -1,4 +1,4 @@
1
1
  import "./utils-DLEgAn7q.mjs";
2
- import { a as CanonicalIdSchema, i as createPathTracker, n as createCanonicalTracker, o as createCanonicalId, r as createOccurrenceTracker, t as buildAstPath } from "./canonical-id-BFnyQGST.mjs";
2
+ import { a as CanonicalIdSchema, i as createPathTracker, n as createCanonicalTracker, o as createCanonicalId, r as createOccurrenceTracker, s as parseCanonicalId, t as buildAstPath } from "./canonical-id-BJahCcrS.mjs";
3
3
 
4
- export { CanonicalIdSchema, buildAstPath, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker };
4
+ export { CanonicalIdSchema, buildAstPath, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, parseCanonicalId };
@@ -5,6 +5,11 @@
5
5
  interface PortableFS {
6
6
  readFile(path: string): Promise<string>;
7
7
  writeFile(path: string, content: string): Promise<void>;
8
+ /**
9
+ * Write a file atomically using temp file + rename pattern.
10
+ * This prevents partial/corrupt writes on crash.
11
+ */
12
+ writeFileAtomic(path: string, content: string): Promise<void>;
8
13
  exists(path: string): Promise<boolean>;
9
14
  stat(path: string): Promise<{
10
15
  mtime: Date;
@@ -14,6 +19,23 @@ interface PortableFS {
14
19
  mkdir(path: string, options?: {
15
20
  recursive?: boolean;
16
21
  }): Promise<void>;
22
+ /**
23
+ * Write a file synchronously and atomically using temp file + rename pattern.
24
+ * Safe for use in beforeExit handlers.
25
+ */
26
+ writeFileSyncAtomic(path: string, content: string): void;
27
+ /**
28
+ * Remove a file. Does not throw if file doesn't exist.
29
+ */
30
+ unlink(path: string): Promise<void>;
31
+ /**
32
+ * Remove a file synchronously. Does not throw if file doesn't exist.
33
+ */
34
+ unlinkSync(path: string): void;
35
+ /**
36
+ * Read a file synchronously.
37
+ */
38
+ readFileSync(path: string): string;
17
39
  }
18
40
  declare function createPortableFS(): PortableFS;
19
41
  declare function getPortableFS(): PortableFS;
@@ -85,4 +107,4 @@ interface SpawnResult {
85
107
  declare function spawn(options: SpawnOptions): Promise<SpawnResult>;
86
108
  //#endregion
87
109
  export { resetPortableForTests as a, HashAlgorithm as c, createPortableHasher as d, getPortableHasher as f, getPortableFS as g, createPortableFS as h, once as i, PortableHasher as l, __resetPortableFSForTests as m, SpawnResult as n, runtime as o, PortableFS as p, spawn as r, generateId as s, SpawnOptions as t, __resetPortableHasherForTests as u };
88
- //# sourceMappingURL=index-DaAp2rNj.d.cts.map
110
+ //# sourceMappingURL=index-B424kKYS.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index-B424kKYS.d.mts","names":[],"sources":["../src/portable/fs.ts","../src/portable/hash.ts","../src/portable/id.ts","../src/portable/runtime.ts","../src/portable/spawn.ts"],"sourcesContent":[],"mappings":";;AAMA;;AAE4C,UAF3B,UAAA,CAE2B;EAKM,QAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EANxB,OAMwB,CAAA,MAAA,CAAA;EAC1B,SAAA,CAAA,IAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,CAAA,EANoB,OAMpB,CAAA,IAAA,CAAA;EACe;;;;EAWf,eAAA,CAAA,IAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,CAAA,EAb0B,OAa1B,CAAA,IAAA,CAAA;EAAO,MAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAZP,OAYO,CAAA,OAAA,CAAA;EAmEf,IAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EA9EM,OA8EU,CAAA;IA2OhB,KAAA,EAzTuB,IAyTvB;IAWA,IAAA,EAAA,MAAA;;4CAnU4B;6BCR3B;IAFL,SAAA,CAAA,EAAa,OAAA;EAER,CAAA,CAAA,EDSyC,OCTzC,CAAA,IAAA,CAAc;EAWf;AAqChB;AAWA;;;;ACzDA;;wBFgBwB;;AGtBxB;AASA;EAiBgB,UAAA,CAAA,IAAA,EAAA,MAAA,CAAqB,EAAA,IAAA;;;;ECxBpB,YAAA,CAAA,IAAY,EAAA,MAAA,CAAA,EAGrB,MAAM;AAGd;AAMsB,iBJ2EN,gBAAA,CAAA,CI3EW,EJ2ES,UI3ET;AAAU,iBJsTrB,aAAA,CAAA,CItTqB,EJsTJ,UItTI;;;;;iBJiUrB,yBAAA,CAAA;;;;AA7UhB;;AAE4C,KCFhC,aAAA,GDEgC,QAAA,GAAA,QAAA;AAKM,UCLjC,cAAA,CDKiC;EAC1B,IAAA,CAAA,OAAA,EAAA,MAAA,EAAA,SAAA,CAAA,ECLY,aDKZ,CAAA,EAAA,MAAA;;AACF,iBCIN,oBAAA,CAAA,CDJM,ECIkB,cDJlB;AACsB,iBCwC5B,iBAAA,CAAA,CDxC4B,ECwCP,cDxCO;;;;AA6E5C;AA2OgB,iBCrQA,6BAAA,CAAA,CDqQ2B,EAAA,IAAA;;;;AAlU3C;;;;;;AASsB,iBELN,UAAA,CAAA,CFKM,EAAA,MAAA;;;;AATtB;;AAE4C,cGJ/B,OHI+B,EAAA;EAKM,SAAA,KAAA,EAAA,OAAA;EAC1B,SAAA,MAAA,EAAA,OAAA;EACe,SAAA,iBAAA,EAAA,OAAA;CAAjB;;;;AAWS,iBGbf,IHae,CAAA,CAAA,CAAA,CAAA,EAAA,EAAA,GAAA,GGbG,CHaH,CAAA,EAAA,GAAA,GGba,CHab;AAmE/B;AA2OA;AAWA;;iBGrTgB,qBAAA,CAAA;;;;AHxBhB;;AAE4C,UIF3B,YAAA,CJE2B;EAKM,GAAA,EAAA,MAAA,EAAA;EAC1B,GAAA,CAAA,EAAA,MAAA;EACe,GAAA,CAAA,EIN/B,MJM+B,CAAA,MAAA,EAAA,MAAA,CAAA;;AACK,UIJ3B,WAAA,CJI2B;EACc,MAAA,EAAA,MAAA;EASlC,MAAA,EAAA,MAAA;EAAO,QAAA,EAAA,MAAA;AAmE/B;AA2OgB,iBItTM,KAAA,CJsTO,OAAI,EItTI,YJsTM,CAAA,EItTS,OJsTT,CItTiB,WJsTjB,CAAA"}
@@ -6,6 +6,15 @@ type CanonicalId = string & {
6
6
  };
7
7
  declare const CanonicalIdSchema: z$1.ZodType<CanonicalId>;
8
8
  declare const createCanonicalId: (filePath: string, astPath: string) => CanonicalId;
9
+ /**
10
+ * Parse a canonical ID into its components.
11
+ * @param canonicalId - The canonical ID to parse (e.g., "/app/src/user.ts::userFragment")
12
+ * @returns An object with filePath and astPath
13
+ */
14
+ declare const parseCanonicalId: (canonicalId: CanonicalId | string) => {
15
+ filePath: string;
16
+ astPath: string;
17
+ };
9
18
  //#endregion
10
19
  //#region packages/common/src/canonical-id/path-tracker.d.ts
11
20
 
@@ -120,5 +129,5 @@ declare const createPathTracker: () => {
120
129
  */
121
130
  declare const buildAstPath: (stack: readonly ScopeFrame[]) => string;
122
131
  //#endregion
123
- export { createCanonicalTracker as a, CanonicalId as c, buildAstPath as i, CanonicalIdSchema as l, ScopeFrame as n, createOccurrenceTracker as o, ScopeHandle as r, createPathTracker as s, CanonicalPathTracker as t, createCanonicalId as u };
124
- //# sourceMappingURL=index-C4t2Wbzs.d.mts.map
132
+ export { createCanonicalTracker as a, CanonicalId as c, parseCanonicalId as d, buildAstPath as i, CanonicalIdSchema as l, ScopeFrame as n, createOccurrenceTracker as o, ScopeHandle as r, createPathTracker as s, CanonicalPathTracker as t, createCanonicalId as u };
133
+ //# sourceMappingURL=index-CPpVc8Id.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index-CPpVc8Id.d.mts","names":[],"sources":["../src/canonical-id/canonical-id.ts","../src/canonical-id/path-tracker.ts"],"sourcesContent":[],"mappings":";;;KAIY,WAAA;;AAAZ,CAAA;AAIa,cAAA,iBAA6B,EAAV,GAAA,CAAE,OAAF,CAAU,WAAD,CAAA;AAG5B,cAAA,iBAAyD,EAAA,CAAA,QAarE,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,EAAA,GAbqE,WAarE;AAOD;;;;ACVA;AAYY,cDFC,gBCEU,EAAA,CAAA,WAAA,EDDR,WCCQ,GAAA,MAAA,EAAA,GAAA;EAQN,QAAA,EAAA,MAAA;EAM8B,OAAA,EAAA,MAAA;CAA2C;;;;;;;AAwE7E,KAlGD,UAAA,GAkGC;EAuGA;EAiBA,SAAA,WAiBZ,EAAA,MAAA;EAKY;;;;;;;;KApOD,WAAA;;;;;;;UAQK,oBAAA;;;;;;;;UAM8B;;MAA2C;;;;;oBAMtE;;;;;;;;;;;;;;;uCAiBmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAiD1B;;;MAGT;;;;cAoGS;;;;;;cAiBA;;;;;;cAsBA,+BAAgC"}
@@ -6,6 +6,15 @@ type CanonicalId = string & {
6
6
  };
7
7
  declare const CanonicalIdSchema: z$1.ZodType<CanonicalId>;
8
8
  declare const createCanonicalId: (filePath: string, astPath: string) => CanonicalId;
9
+ /**
10
+ * Parse a canonical ID into its components.
11
+ * @param canonicalId - The canonical ID to parse (e.g., "/app/src/user.ts::userFragment")
12
+ * @returns An object with filePath and astPath
13
+ */
14
+ declare const parseCanonicalId: (canonicalId: CanonicalId | string) => {
15
+ filePath: string;
16
+ astPath: string;
17
+ };
9
18
  //#endregion
10
19
  //#region packages/common/src/canonical-id/path-tracker.d.ts
11
20
 
@@ -120,5 +129,5 @@ declare const createPathTracker: () => {
120
129
  */
121
130
  declare const buildAstPath: (stack: readonly ScopeFrame[]) => string;
122
131
  //#endregion
123
- export { createCanonicalTracker as a, CanonicalId as c, buildAstPath as i, CanonicalIdSchema as l, ScopeFrame as n, createOccurrenceTracker as o, ScopeHandle as r, createPathTracker as s, CanonicalPathTracker as t, createCanonicalId as u };
124
- //# sourceMappingURL=index-BG7Aiges.d.cts.map
132
+ export { createCanonicalTracker as a, CanonicalId as c, parseCanonicalId as d, buildAstPath as i, CanonicalIdSchema as l, ScopeFrame as n, createOccurrenceTracker as o, ScopeHandle as r, createPathTracker as s, CanonicalPathTracker as t, createCanonicalId as u };
133
+ //# sourceMappingURL=index-Cm2Zwk9m.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index-Cm2Zwk9m.d.cts","names":[],"sources":["../src/canonical-id/canonical-id.ts","../src/canonical-id/path-tracker.ts"],"sourcesContent":[],"mappings":";;;KAIY,WAAA;;AAAZ,CAAA;AAIa,cAAA,iBAA6B,EAAV,GAAA,CAAE,OAAF,CAAU,WAAD,CAAA;AAG5B,cAAA,iBAAyD,EAAA,CAAA,QAarE,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,EAAA,GAbqE,WAarE;AAOD;;;;ACVA;AAYY,cDFC,gBCEU,EAAA,CAAA,WAAA,EDDR,WCCQ,GAAA,MAAA,EAAA,GAAA;EAQN,QAAA,EAAA,MAAA;EAM8B,OAAA,EAAA,MAAA;CAA2C;;;;;;;AAwE7E,KAlGD,UAAA,GAkGC;EAuGA;EAiBA,SAAA,WAiBZ,EAAA,MAAA;EAKY;;;;;;;;KApOD,WAAA;;;;;;;UAQK,oBAAA;;;;;;;;UAM8B;;MAA2C;;;;;oBAMtE;;;;;;;;;;;;;;;uCAiBmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAiD1B;;;MAGT;;;;cAoGS;;;;;;cAiBA;;;;;;cAsBA,+BAAgC"}
@@ -5,6 +5,11 @@
5
5
  interface PortableFS {
6
6
  readFile(path: string): Promise<string>;
7
7
  writeFile(path: string, content: string): Promise<void>;
8
+ /**
9
+ * Write a file atomically using temp file + rename pattern.
10
+ * This prevents partial/corrupt writes on crash.
11
+ */
12
+ writeFileAtomic(path: string, content: string): Promise<void>;
8
13
  exists(path: string): Promise<boolean>;
9
14
  stat(path: string): Promise<{
10
15
  mtime: Date;
@@ -14,6 +19,23 @@ interface PortableFS {
14
19
  mkdir(path: string, options?: {
15
20
  recursive?: boolean;
16
21
  }): Promise<void>;
22
+ /**
23
+ * Write a file synchronously and atomically using temp file + rename pattern.
24
+ * Safe for use in beforeExit handlers.
25
+ */
26
+ writeFileSyncAtomic(path: string, content: string): void;
27
+ /**
28
+ * Remove a file. Does not throw if file doesn't exist.
29
+ */
30
+ unlink(path: string): Promise<void>;
31
+ /**
32
+ * Remove a file synchronously. Does not throw if file doesn't exist.
33
+ */
34
+ unlinkSync(path: string): void;
35
+ /**
36
+ * Read a file synchronously.
37
+ */
38
+ readFileSync(path: string): string;
17
39
  }
18
40
  declare function createPortableFS(): PortableFS;
19
41
  declare function getPortableFS(): PortableFS;
@@ -85,4 +107,4 @@ interface SpawnResult {
85
107
  declare function spawn(options: SpawnOptions): Promise<SpawnResult>;
86
108
  //#endregion
87
109
  export { resetPortableForTests as a, HashAlgorithm as c, createPortableHasher as d, getPortableHasher as f, getPortableFS as g, createPortableFS as h, once as i, PortableHasher as l, __resetPortableFSForTests as m, SpawnResult as n, runtime as o, PortableFS as p, spawn as r, generateId as s, SpawnOptions as t, __resetPortableHasherForTests as u };
88
- //# sourceMappingURL=index-BedBpKbv.d.mts.map
110
+ //# sourceMappingURL=index-D1tzB3W5.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index-D1tzB3W5.d.cts","names":[],"sources":["../src/portable/fs.ts","../src/portable/hash.ts","../src/portable/id.ts","../src/portable/runtime.ts","../src/portable/spawn.ts"],"sourcesContent":[],"mappings":";;AAMA;;AAE4C,UAF3B,UAAA,CAE2B;EAKM,QAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EANxB,OAMwB,CAAA,MAAA,CAAA;EAC1B,SAAA,CAAA,IAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,CAAA,EANoB,OAMpB,CAAA,IAAA,CAAA;EACe;;;;EAWf,eAAA,CAAA,IAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,CAAA,EAb0B,OAa1B,CAAA,IAAA,CAAA;EAAO,MAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAZP,OAYO,CAAA,OAAA,CAAA;EAmEf,IAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EA9EM,OA8EU,CAAA;IA2OhB,KAAA,EAzTuB,IAyTvB;IAWA,IAAA,EAAA,MAAA;;4CAnU4B;6BCR3B;IAFL,SAAA,CAAA,EAAa,OAAA;EAER,CAAA,CAAA,EDSyC,OCTzC,CAAA,IAAA,CAAc;EAWf;AAqChB;AAWA;;;;ACzDA;;wBFgBwB;;AGtBxB;AASA;EAiBgB,UAAA,CAAA,IAAA,EAAA,MAAA,CAAqB,EAAA,IAAA;;;;ECxBpB,YAAA,CAAA,IAAY,EAAA,MAAA,CAAA,EAGrB,MAAM;AAGd;AAMsB,iBJ2EN,gBAAA,CAAA,CI3EW,EJ2ES,UI3ET;AAAU,iBJsTrB,aAAA,CAAA,CItTqB,EJsTJ,UItTI;;;;;iBJiUrB,yBAAA,CAAA;;;;AA7UhB;;AAE4C,KCFhC,aAAA,GDEgC,QAAA,GAAA,QAAA;AAKM,UCLjC,cAAA,CDKiC;EAC1B,IAAA,CAAA,OAAA,EAAA,MAAA,EAAA,SAAA,CAAA,ECLY,aDKZ,CAAA,EAAA,MAAA;;AACF,iBCIN,oBAAA,CAAA,CDJM,ECIkB,cDJlB;AACsB,iBCwC5B,iBAAA,CAAA,CDxC4B,ECwCP,cDxCO;;;;AA6E5C;AA2OgB,iBCrQA,6BAAA,CAAA,CDqQ2B,EAAA,IAAA;;;;AAlU3C;;;;;;AASsB,iBELN,UAAA,CAAA,CFKM,EAAA,MAAA;;;;AATtB;;AAE4C,cGJ/B,OHI+B,EAAA;EAKM,SAAA,KAAA,EAAA,OAAA;EAC1B,SAAA,MAAA,EAAA,OAAA;EACe,SAAA,iBAAA,EAAA,OAAA;CAAjB;;;;AAWS,iBGbf,IHae,CAAA,CAAA,CAAA,CAAA,EAAA,EAAA,GAAA,GGbG,CHaH,CAAA,EAAA,GAAA,GGba,CHab;AAmE/B;AA2OA;AAWA;;iBGrTgB,qBAAA,CAAA;;;;AHxBhB;;AAE4C,UIF3B,YAAA,CJE2B;EAKM,GAAA,EAAA,MAAA,EAAA;EAC1B,GAAA,CAAA,EAAA,MAAA;EACe,GAAA,CAAA,EIN/B,MJM+B,CAAA,MAAA,EAAA,MAAA,CAAA;;AACK,UIJ3B,WAAA,CJI2B;EACc,MAAA,EAAA,MAAA;EASlC,MAAA,EAAA,MAAA;EAAO,QAAA,EAAA,MAAA;AAmE/B;AA2OgB,iBItTM,KAAA,CJsTO,OAAI,EItTI,YJsTM,CAAA,EItTS,OJsTT,CItTiB,WJsTjB,CAAA"}
package/dist/index.cjs CHANGED
@@ -1,7 +1,7 @@
1
- const require_canonical_id = require('./canonical-id-BFcryTw5.cjs');
2
- const require_utils = require('./utils-CmLf7LU5.cjs');
3
- const require_portable = require('./portable-C_7gJWmz.cjs');
4
- const require_zod = require('./zod-CynYgOoN.cjs');
1
+ const require_canonical_id = require('./canonical-id-CgMNOZyn.cjs');
2
+ const require_utils = require('./utils-CsTwS1dw.cjs');
3
+ const require_portable = require('./portable-cJqkfeHw.cjs');
4
+ const require_zod = require('./zod-C_6JfuYV.cjs');
5
5
  let neverthrow = require("neverthrow");
6
6
 
7
7
  //#region packages/common/src/scheduler/types.ts
@@ -286,6 +286,7 @@ exports.isExternalSpecifier = require_utils.isExternalSpecifier;
286
286
  exports.isRelativeSpecifier = require_utils.isRelativeSpecifier;
287
287
  exports.normalizePath = require_utils.normalizePath;
288
288
  exports.once = require_portable.once;
289
+ exports.parseCanonicalId = require_canonical_id.parseCanonicalId;
289
290
  exports.resetPortableForTests = require_portable.resetPortableForTests;
290
291
  exports.resolveRelativeImportWithExistenceCheck = require_utils.resolveRelativeImportWithExistenceCheck;
291
292
  exports.resolveRelativeImportWithReferences = require_utils.resolveRelativeImportWithReferences;
package/dist/index.d.cts CHANGED
@@ -1,5 +1,5 @@
1
- import { a as createCanonicalTracker, c as CanonicalId, i as buildAstPath, l as CanonicalIdSchema, n as ScopeFrame, o as createOccurrenceTracker, r as ScopeHandle, s as createPathTracker, t as CanonicalPathTracker, u as createCanonicalId } from "./index-BG7Aiges.cjs";
2
- import { a as resetPortableForTests, c as HashAlgorithm, d as createPortableHasher, f as getPortableHasher, g as getPortableFS, h as createPortableFS, i as once, l as PortableHasher, m as __resetPortableFSForTests, n as SpawnResult, o as runtime, p as PortableFS, r as spawn, s as generateId, t as SpawnOptions, u as __resetPortableHasherForTests } from "./index-DaAp2rNj.cjs";
1
+ import { a as createCanonicalTracker, c as CanonicalId, d as parseCanonicalId, i as buildAstPath, l as CanonicalIdSchema, n as ScopeFrame, o as createOccurrenceTracker, r as ScopeHandle, s as createPathTracker, t as CanonicalPathTracker, u as createCanonicalId } from "./index-Cm2Zwk9m.cjs";
2
+ import { a as resetPortableForTests, c as HashAlgorithm, d as createPortableHasher, f as getPortableHasher, g as getPortableFS, h as createPortableFS, i as once, l as PortableHasher, m as __resetPortableFSForTests, n as SpawnResult, o as runtime, p as PortableFS, r as spawn, s as generateId, t as SpawnOptions, u as __resetPortableHasherForTests } from "./index-D1tzB3W5.cjs";
3
3
  import { a as resolveRelativeImportWithExistenceCheck, i as normalizePath, n as isExternalSpecifier, o as resolveRelativeImportWithReferences, r as isRelativeSpecifier, s as cachedFn, t as MODULE_EXTENSION_CANDIDATES } from "./index-LaXfl_e_.cjs";
4
4
  import { n as ShapeFor, r as defineSchemaFor, t as SchemaFor } from "./index-LHYortIn.cjs";
5
5
  import { Result } from "neverthrow";
@@ -216,5 +216,5 @@ declare const Effects: {
216
216
  */
217
217
  declare const createSyncScheduler: () => SyncScheduler;
218
218
  //#endregion
219
- export { type AsyncScheduler, CanonicalId, CanonicalIdSchema, CanonicalPathTracker, DeferEffect, Effect, type EffectGenerator, type EffectGeneratorFn, type EffectResult, Effects, HashAlgorithm, MODULE_EXTENSION_CANDIDATES, ParallelEffect, PortableFS, PortableHasher, PureEffect, type SchedulerError, SchemaFor, ScopeFrame, ScopeHandle, ShapeFor, SpawnOptions, SpawnResult, type SyncScheduler, YieldEffect, __resetPortableFSForTests, __resetPortableHasherForTests, buildAstPath, cachedFn, createAsyncScheduler, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, createPortableFS, createPortableHasher, createSchedulerError, createSyncScheduler, defineSchemaFor, generateId, getPortableFS, getPortableHasher, isExternalSpecifier, isRelativeSpecifier, normalizePath, once, resetPortableForTests, resolveRelativeImportWithExistenceCheck, resolveRelativeImportWithReferences, runtime, spawn };
219
+ export { type AsyncScheduler, CanonicalId, CanonicalIdSchema, CanonicalPathTracker, DeferEffect, Effect, type EffectGenerator, type EffectGeneratorFn, type EffectResult, Effects, HashAlgorithm, MODULE_EXTENSION_CANDIDATES, ParallelEffect, PortableFS, PortableHasher, PureEffect, type SchedulerError, SchemaFor, ScopeFrame, ScopeHandle, ShapeFor, SpawnOptions, SpawnResult, type SyncScheduler, YieldEffect, __resetPortableFSForTests, __resetPortableHasherForTests, buildAstPath, cachedFn, createAsyncScheduler, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, createPortableFS, createPortableHasher, createSchedulerError, createSyncScheduler, defineSchemaFor, generateId, getPortableFS, getPortableHasher, isExternalSpecifier, isRelativeSpecifier, normalizePath, once, parseCanonicalId, resetPortableForTests, resolveRelativeImportWithExistenceCheck, resolveRelativeImportWithReferences, runtime, spawn };
220
220
  //# sourceMappingURL=index.d.cts.map
package/dist/index.d.mts CHANGED
@@ -1,5 +1,5 @@
1
- import { a as createCanonicalTracker, c as CanonicalId, i as buildAstPath, l as CanonicalIdSchema, n as ScopeFrame, o as createOccurrenceTracker, r as ScopeHandle, s as createPathTracker, t as CanonicalPathTracker, u as createCanonicalId } from "./index-C4t2Wbzs.mjs";
2
- import { a as resetPortableForTests, c as HashAlgorithm, d as createPortableHasher, f as getPortableHasher, g as getPortableFS, h as createPortableFS, i as once, l as PortableHasher, m as __resetPortableFSForTests, n as SpawnResult, o as runtime, p as PortableFS, r as spawn, s as generateId, t as SpawnOptions, u as __resetPortableHasherForTests } from "./index-BedBpKbv.mjs";
1
+ import { a as createCanonicalTracker, c as CanonicalId, d as parseCanonicalId, i as buildAstPath, l as CanonicalIdSchema, n as ScopeFrame, o as createOccurrenceTracker, r as ScopeHandle, s as createPathTracker, t as CanonicalPathTracker, u as createCanonicalId } from "./index-CPpVc8Id.mjs";
2
+ import { a as resetPortableForTests, c as HashAlgorithm, d as createPortableHasher, f as getPortableHasher, g as getPortableFS, h as createPortableFS, i as once, l as PortableHasher, m as __resetPortableFSForTests, n as SpawnResult, o as runtime, p as PortableFS, r as spawn, s as generateId, t as SpawnOptions, u as __resetPortableHasherForTests } from "./index-B424kKYS.mjs";
3
3
  import { a as resolveRelativeImportWithExistenceCheck, i as normalizePath, n as isExternalSpecifier, o as resolveRelativeImportWithReferences, r as isRelativeSpecifier, s as cachedFn, t as MODULE_EXTENSION_CANDIDATES } from "./index-Dv8spPt0.mjs";
4
4
  import { n as ShapeFor, r as defineSchemaFor, t as SchemaFor } from "./index-Dit86qkX.mjs";
5
5
  import { Result } from "neverthrow";
@@ -216,5 +216,5 @@ declare const Effects: {
216
216
  */
217
217
  declare const createSyncScheduler: () => SyncScheduler;
218
218
  //#endregion
219
- export { type AsyncScheduler, CanonicalId, CanonicalIdSchema, CanonicalPathTracker, DeferEffect, Effect, type EffectGenerator, type EffectGeneratorFn, type EffectResult, Effects, HashAlgorithm, MODULE_EXTENSION_CANDIDATES, ParallelEffect, PortableFS, PortableHasher, PureEffect, type SchedulerError, SchemaFor, ScopeFrame, ScopeHandle, ShapeFor, SpawnOptions, SpawnResult, type SyncScheduler, YieldEffect, __resetPortableFSForTests, __resetPortableHasherForTests, buildAstPath, cachedFn, createAsyncScheduler, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, createPortableFS, createPortableHasher, createSchedulerError, createSyncScheduler, defineSchemaFor, generateId, getPortableFS, getPortableHasher, isExternalSpecifier, isRelativeSpecifier, normalizePath, once, resetPortableForTests, resolveRelativeImportWithExistenceCheck, resolveRelativeImportWithReferences, runtime, spawn };
219
+ export { type AsyncScheduler, CanonicalId, CanonicalIdSchema, CanonicalPathTracker, DeferEffect, Effect, type EffectGenerator, type EffectGeneratorFn, type EffectResult, Effects, HashAlgorithm, MODULE_EXTENSION_CANDIDATES, ParallelEffect, PortableFS, PortableHasher, PureEffect, type SchedulerError, SchemaFor, ScopeFrame, ScopeHandle, ShapeFor, SpawnOptions, SpawnResult, type SyncScheduler, YieldEffect, __resetPortableFSForTests, __resetPortableHasherForTests, buildAstPath, cachedFn, createAsyncScheduler, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, createPortableFS, createPortableHasher, createSchedulerError, createSyncScheduler, defineSchemaFor, generateId, getPortableFS, getPortableHasher, isExternalSpecifier, isRelativeSpecifier, normalizePath, once, parseCanonicalId, resetPortableForTests, resolveRelativeImportWithExistenceCheck, resolveRelativeImportWithReferences, runtime, spawn };
220
220
  //# sourceMappingURL=index.d.mts.map
package/dist/index.mjs CHANGED
@@ -1,6 +1,6 @@
1
- import { a as getPortableHasher, c as getPortableFS, d as runtime, i as createPortableHasher, l as once, n as generateId, o as __resetPortableFSForTests, r as __resetPortableHasherForTests, s as createPortableFS, t as spawn, u as resetPortableForTests } from "./portable-Dbo3u2CQ.mjs";
1
+ import { a as getPortableHasher, c as getPortableFS, d as runtime, i as createPortableHasher, l as once, n as generateId, o as __resetPortableFSForTests, r as __resetPortableHasherForTests, s as createPortableFS, t as spawn, u as resetPortableForTests } from "./portable-BT3ahkQN.mjs";
2
2
  import { a as resolveRelativeImportWithExistenceCheck, i as normalizePath, n as isExternalSpecifier, o as resolveRelativeImportWithReferences, r as isRelativeSpecifier, s as cachedFn, t as MODULE_EXTENSION_CANDIDATES } from "./utils-DLEgAn7q.mjs";
3
- import { a as CanonicalIdSchema, i as createPathTracker, n as createCanonicalTracker, o as createCanonicalId, r as createOccurrenceTracker, t as buildAstPath } from "./canonical-id-BFnyQGST.mjs";
3
+ import { a as CanonicalIdSchema, i as createPathTracker, n as createCanonicalTracker, o as createCanonicalId, r as createOccurrenceTracker, s as parseCanonicalId, t as buildAstPath } from "./canonical-id-BJahCcrS.mjs";
4
4
  import { t as defineSchemaFor } from "./zod-DeSimXdI.mjs";
5
5
  import { err, ok } from "neverthrow";
6
6
 
@@ -257,5 +257,5 @@ const isSchedulerError = (error) => {
257
257
  };
258
258
 
259
259
  //#endregion
260
- export { CanonicalIdSchema, DeferEffect, Effect, Effects, MODULE_EXTENSION_CANDIDATES, ParallelEffect, PureEffect, YieldEffect, __resetPortableFSForTests, __resetPortableHasherForTests, buildAstPath, cachedFn, createAsyncScheduler, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, createPortableFS, createPortableHasher, createSchedulerError, createSyncScheduler, defineSchemaFor, generateId, getPortableFS, getPortableHasher, isExternalSpecifier, isRelativeSpecifier, normalizePath, once, resetPortableForTests, resolveRelativeImportWithExistenceCheck, resolveRelativeImportWithReferences, runtime, spawn };
260
+ export { CanonicalIdSchema, DeferEffect, Effect, Effects, MODULE_EXTENSION_CANDIDATES, ParallelEffect, PureEffect, YieldEffect, __resetPortableFSForTests, __resetPortableHasherForTests, buildAstPath, cachedFn, createAsyncScheduler, createCanonicalId, createCanonicalTracker, createOccurrenceTracker, createPathTracker, createPortableFS, createPortableHasher, createSchedulerError, createSyncScheduler, defineSchemaFor, generateId, getPortableFS, getPortableHasher, isExternalSpecifier, isRelativeSpecifier, normalizePath, once, parseCanonicalId, resetPortableForTests, resolveRelativeImportWithExistenceCheck, resolveRelativeImportWithReferences, runtime, spawn };
261
261
  //# sourceMappingURL=index.mjs.map
@@ -42,6 +42,26 @@ const getNodeFS = once(async () => {
42
42
  const fs = await import("node:fs/promises");
43
43
  return fs;
44
44
  });
45
+ let nodeFsSync = null;
46
+ const getNodeFSSync = () => {
47
+ if (!nodeFsSync) {
48
+ nodeFsSync = __require("node:fs");
49
+ }
50
+ return nodeFsSync;
51
+ };
52
+ let pathModule = null;
53
+ const getPathModule = () => {
54
+ if (!pathModule) {
55
+ pathModule = __require("node:path");
56
+ }
57
+ return pathModule;
58
+ };
59
+ /**
60
+ * Generate a unique temp file path for atomic write.
61
+ */
62
+ const getTempPath = (targetPath) => {
63
+ return `${targetPath}.${process.pid}.${Date.now()}.tmp`;
64
+ };
45
65
  function createPortableFS() {
46
66
  if (runtime.isBun) {
47
67
  return {
@@ -52,6 +72,20 @@ function createPortableFS() {
52
72
  async writeFile(path, content) {
53
73
  await Bun.write(path, content);
54
74
  },
75
+ async writeFileAtomic(path, content) {
76
+ const tempPath = getTempPath(path);
77
+ try {
78
+ await Bun.write(tempPath, content);
79
+ const nodeFS = await getNodeFS();
80
+ await nodeFS.rename(tempPath, path);
81
+ } catch (error) {
82
+ try {
83
+ const nodeFS = await getNodeFS();
84
+ await nodeFS.unlink(tempPath);
85
+ } catch {}
86
+ throw error;
87
+ }
88
+ },
55
89
  async exists(path) {
56
90
  const nodeFS = await getNodeFS();
57
91
  try {
@@ -78,6 +112,46 @@ function createPortableFS() {
78
112
  async mkdir(path, options) {
79
113
  const nodeFS = await getNodeFS();
80
114
  await nodeFS.mkdir(path, options);
115
+ },
116
+ writeFileSyncAtomic(path, content) {
117
+ const fsSync = getNodeFSSync();
118
+ const pathMod = getPathModule();
119
+ const tempPath = getTempPath(path);
120
+ const dir = pathMod.dirname(path);
121
+ fsSync.mkdirSync(dir, { recursive: true });
122
+ try {
123
+ fsSync.writeFileSync(tempPath, content, "utf-8");
124
+ fsSync.renameSync(tempPath, path);
125
+ } catch (error) {
126
+ try {
127
+ fsSync.unlinkSync(tempPath);
128
+ } catch {}
129
+ throw error;
130
+ }
131
+ },
132
+ async unlink(path) {
133
+ const nodeFS = await getNodeFS();
134
+ try {
135
+ await nodeFS.unlink(path);
136
+ } catch (error) {
137
+ if (error.code !== "ENOENT") {
138
+ throw error;
139
+ }
140
+ }
141
+ },
142
+ unlinkSync(path) {
143
+ const fsSync = getNodeFSSync();
144
+ try {
145
+ fsSync.unlinkSync(path);
146
+ } catch (error) {
147
+ if (error.code !== "ENOENT") {
148
+ throw error;
149
+ }
150
+ }
151
+ },
152
+ readFileSync(path) {
153
+ const fsSync = getNodeFSSync();
154
+ return fsSync.readFileSync(path, "utf-8");
81
155
  }
82
156
  };
83
157
  }
@@ -88,11 +162,27 @@ function createPortableFS() {
88
162
  },
89
163
  async writeFile(path, content) {
90
164
  const nodeFS = await getNodeFS();
91
- const pathModule = await import("node:path");
92
- const dir = pathModule.dirname(path);
165
+ const pathModule$1 = await import("node:path");
166
+ const dir = pathModule$1.dirname(path);
93
167
  await nodeFS.mkdir(dir, { recursive: true });
94
168
  await nodeFS.writeFile(path, content, "utf-8");
95
169
  },
170
+ async writeFileAtomic(path, content) {
171
+ const nodeFS = await getNodeFS();
172
+ const pathMod = await import("node:path");
173
+ const dir = pathMod.dirname(path);
174
+ const tempPath = getTempPath(path);
175
+ try {
176
+ await nodeFS.mkdir(dir, { recursive: true });
177
+ await nodeFS.writeFile(tempPath, content, "utf-8");
178
+ await nodeFS.rename(tempPath, path);
179
+ } catch (error) {
180
+ try {
181
+ await nodeFS.unlink(tempPath);
182
+ } catch {}
183
+ throw error;
184
+ }
185
+ },
96
186
  async exists(path) {
97
187
  const nodeFS = await getNodeFS();
98
188
  try {
@@ -117,6 +207,46 @@ function createPortableFS() {
117
207
  async mkdir(path, options) {
118
208
  const nodeFS = await getNodeFS();
119
209
  await nodeFS.mkdir(path, options);
210
+ },
211
+ writeFileSyncAtomic(path, content) {
212
+ const fsSync = getNodeFSSync();
213
+ const pathMod = getPathModule();
214
+ const tempPath = getTempPath(path);
215
+ const dir = pathMod.dirname(path);
216
+ fsSync.mkdirSync(dir, { recursive: true });
217
+ try {
218
+ fsSync.writeFileSync(tempPath, content, "utf-8");
219
+ fsSync.renameSync(tempPath, path);
220
+ } catch (error) {
221
+ try {
222
+ fsSync.unlinkSync(tempPath);
223
+ } catch {}
224
+ throw error;
225
+ }
226
+ },
227
+ async unlink(path) {
228
+ const nodeFS = await getNodeFS();
229
+ try {
230
+ await nodeFS.unlink(path);
231
+ } catch (error) {
232
+ if (error.code !== "ENOENT") {
233
+ throw error;
234
+ }
235
+ }
236
+ },
237
+ unlinkSync(path) {
238
+ const fsSync = getNodeFSSync();
239
+ try {
240
+ fsSync.unlinkSync(path);
241
+ } catch (error) {
242
+ if (error.code !== "ENOENT") {
243
+ throw error;
244
+ }
245
+ }
246
+ },
247
+ readFileSync(path) {
248
+ const fsSync = getNodeFSSync();
249
+ return fsSync.readFileSync(path, "utf-8");
120
250
  }
121
251
  };
122
252
  }
@@ -258,4 +388,4 @@ async function spawn(options) {
258
388
 
259
389
  //#endregion
260
390
  export { getPortableHasher as a, getPortableFS as c, runtime as d, createPortableHasher as i, once as l, generateId as n, __resetPortableFSForTests as o, __resetPortableHasherForTests as r, createPortableFS as s, spawn as t, resetPortableForTests as u };
261
- //# sourceMappingURL=portable-Dbo3u2CQ.mjs.map
391
+ //# sourceMappingURL=portable-BT3ahkQN.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"portable-BT3ahkQN.mjs","names":["result: T | undefined","nodeFsSync: FSSync | null","pathModule: { dirname: (path: string) => string } | null","pathModule","fsInstance: PortableFS | null","crypto","hasherInstance: PortableHasher | null","crypto","execOptions: {\n cwd?: string;\n env?: NodeJS.ProcessEnv;\n encoding: BufferEncoding;\n }","error: unknown"],"sources":["../src/portable/runtime.ts","../src/portable/fs.ts","../src/portable/hash.ts","../src/portable/id.ts","../src/portable/spawn.ts"],"sourcesContent":["/**\n * Runtime detection utilities for portable API implementation\n */\n\nexport const runtime = {\n isBun: typeof Bun !== \"undefined\",\n isNode: typeof process !== \"undefined\" && typeof Bun === \"undefined\",\n supportsWebCrypto: typeof crypto !== \"undefined\" && typeof crypto.subtle !== \"undefined\",\n} as const;\n\n/**\n * Helper to cache module imports to avoid repeated dynamic imports\n */\nexport function once<T>(fn: () => T): () => T {\n let result: T | undefined;\n let called = false;\n\n return () => {\n if (!called) {\n result = fn();\n called = true;\n }\n return result as T;\n };\n}\n\n/**\n * Reset runtime state for testing purposes only\n * @internal\n */\nexport function resetPortableForTests(): void {\n // This is a marker function that portable modules can use\n // to reset their singleton state in tests\n}\n","/**\n * Portable filesystem API that works on both Bun and Node.js\n */\n\nimport { once, runtime } from \"./runtime\";\n\nexport interface PortableFS {\n readFile(path: string): Promise<string>;\n writeFile(path: string, content: string): Promise<void>;\n /**\n * Write a file atomically using temp file + rename pattern.\n * This prevents partial/corrupt writes on crash.\n */\n writeFileAtomic(path: string, content: string): Promise<void>;\n exists(path: string): Promise<boolean>;\n stat(path: string): Promise<{ mtime: Date; size: number }>;\n rename(oldPath: string, newPath: string): Promise<void>;\n mkdir(path: string, options?: { recursive?: boolean }): Promise<void>;\n /**\n * Write a file synchronously and atomically using temp file + rename pattern.\n * Safe for use in beforeExit handlers.\n */\n writeFileSyncAtomic(path: string, content: string): void;\n /**\n * Remove a file. Does not throw if file doesn't exist.\n */\n unlink(path: string): Promise<void>;\n /**\n * Remove a file synchronously. Does not throw if file doesn't exist.\n */\n unlinkSync(path: string): void;\n /**\n * Read a file synchronously.\n */\n readFileSync(path: string): string;\n}\n\ninterface FSPromises {\n readFile: (path: string, encoding: string) => Promise<string>;\n writeFile: (path: string, content: string, encoding: string) => Promise<void>;\n access: (path: string) => Promise<void>;\n stat: (path: string) => Promise<{\n mtime: Date;\n size: number;\n isDirectory: () => boolean;\n }>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n mkdir: (path: string, options?: { recursive?: boolean }) => Promise<void>;\n unlink: (path: string) => Promise<void>;\n}\n\ninterface FSSync {\n writeFileSync: (path: string, content: string, encoding: string) => void;\n renameSync: (oldPath: string, newPath: string) => void;\n unlinkSync: (path: string) => void;\n readFileSync: (path: string, encoding: string) => string;\n mkdirSync: (path: string, options?: { recursive?: boolean }) => void;\n}\n\n// Cache the fs/promises import\nconst getNodeFS = once(async (): Promise<FSPromises> => {\n const fs = await import(\"node:fs/promises\");\n return fs as FSPromises;\n});\n\n// Cache the sync fs import\nlet nodeFsSync: FSSync | null = null;\nconst getNodeFSSync = (): FSSync => {\n if (!nodeFsSync) {\n // Use require for sync loading\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n nodeFsSync = require(\"node:fs\") as FSSync;\n }\n return nodeFsSync;\n};\n\n// Cache path module\nlet pathModule: { dirname: (path: string) => string } | null = null;\nconst getPathModule = (): { dirname: (path: string) => string } => {\n if (!pathModule) {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n pathModule = require(\"node:path\") as { dirname: (path: string) => string };\n }\n return pathModule;\n};\n\n/**\n * Generate a unique temp file path for atomic write.\n */\nconst getTempPath = (targetPath: string): string => {\n return `${targetPath}.${process.pid}.${Date.now()}.tmp`;\n};\n\nexport function createPortableFS(): PortableFS {\n if (runtime.isBun) {\n return {\n async readFile(path) {\n const file = Bun.file(path);\n return await file.text();\n },\n\n async writeFile(path, content) {\n // Bun.write auto-creates parent directories\n await Bun.write(path, content);\n },\n\n async writeFileAtomic(path, content) {\n const tempPath = getTempPath(path);\n try {\n await Bun.write(tempPath, content);\n const nodeFS = await getNodeFS();\n await nodeFS.rename(tempPath, path);\n } catch (error) {\n // Clean up temp file on failure\n try {\n const nodeFS = await getNodeFS();\n await nodeFS.unlink(tempPath);\n } catch {\n // Ignore cleanup errors\n }\n throw error;\n }\n },\n\n async exists(path) {\n // Bun.file().exists() only works for files, use fs.stat for both files and dirs\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.stat(path);\n return true;\n } catch {\n return false;\n }\n },\n\n async stat(path) {\n const file = Bun.file(path);\n const size = file.size;\n // Bun doesn't expose mtime directly, use Node fs.stat\n const nodeFS = await getNodeFS();\n const { mtime } = await nodeFS.stat(path);\n return { mtime, size };\n },\n\n async rename(oldPath, newPath) {\n const nodeFS = await getNodeFS();\n await nodeFS.rename(oldPath, newPath);\n },\n\n async mkdir(path, options) {\n const nodeFS = await getNodeFS();\n await nodeFS.mkdir(path, options);\n },\n\n writeFileSyncAtomic(path, content) {\n const fsSync = getNodeFSSync();\n const pathMod = getPathModule();\n const tempPath = getTempPath(path);\n\n // Ensure parent directory exists\n const dir = pathMod.dirname(path);\n fsSync.mkdirSync(dir, { recursive: true });\n\n try {\n fsSync.writeFileSync(tempPath, content, \"utf-8\");\n fsSync.renameSync(tempPath, path);\n } catch (error) {\n // Clean up temp file on failure\n try {\n fsSync.unlinkSync(tempPath);\n } catch {\n // Ignore cleanup errors\n }\n throw error;\n }\n },\n\n async unlink(path) {\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.unlink(path);\n } catch (error) {\n // Ignore ENOENT (file not found)\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n },\n\n unlinkSync(path) {\n const fsSync = getNodeFSSync();\n try {\n fsSync.unlinkSync(path);\n } catch (error) {\n // Ignore ENOENT (file not found)\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n },\n\n readFileSync(path) {\n const fsSync = getNodeFSSync();\n return fsSync.readFileSync(path, \"utf-8\");\n },\n };\n }\n\n // Node.js implementation\n return {\n async readFile(path) {\n const nodeFS = await getNodeFS();\n return await nodeFS.readFile(path, \"utf-8\");\n },\n\n async writeFile(path, content) {\n const nodeFS = await getNodeFS();\n // Auto-create parent directories like Bun.write does\n const pathModule = await import(\"node:path\");\n const dir = pathModule.dirname(path);\n await nodeFS.mkdir(dir, { recursive: true });\n await nodeFS.writeFile(path, content, \"utf-8\");\n },\n\n async writeFileAtomic(path, content) {\n const nodeFS = await getNodeFS();\n const pathMod = await import(\"node:path\");\n const dir = pathMod.dirname(path);\n const tempPath = getTempPath(path);\n\n try {\n await nodeFS.mkdir(dir, { recursive: true });\n await nodeFS.writeFile(tempPath, content, \"utf-8\");\n await nodeFS.rename(tempPath, path);\n } catch (error) {\n // Clean up temp file on failure\n try {\n await nodeFS.unlink(tempPath);\n } catch {\n // Ignore cleanup errors\n }\n throw error;\n }\n },\n\n async exists(path) {\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.access(path);\n return true;\n } catch {\n return false;\n }\n },\n\n async stat(path) {\n const nodeFS = await getNodeFS();\n const stats = await nodeFS.stat(path);\n return { mtime: stats.mtime, size: stats.size };\n },\n\n async rename(oldPath, newPath) {\n const nodeFS = await getNodeFS();\n await nodeFS.rename(oldPath, newPath);\n },\n\n async mkdir(path, options) {\n const nodeFS = await getNodeFS();\n await nodeFS.mkdir(path, options);\n },\n\n writeFileSyncAtomic(path, content) {\n const fsSync = getNodeFSSync();\n const pathMod = getPathModule();\n const tempPath = getTempPath(path);\n\n // Ensure parent directory exists\n const dir = pathMod.dirname(path);\n fsSync.mkdirSync(dir, { recursive: true });\n\n try {\n fsSync.writeFileSync(tempPath, content, \"utf-8\");\n fsSync.renameSync(tempPath, path);\n } catch (error) {\n // Clean up temp file on failure\n try {\n fsSync.unlinkSync(tempPath);\n } catch {\n // Ignore cleanup errors\n }\n throw error;\n }\n },\n\n async unlink(path) {\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.unlink(path);\n } catch (error) {\n // Ignore ENOENT (file not found)\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n },\n\n unlinkSync(path) {\n const fsSync = getNodeFSSync();\n try {\n fsSync.unlinkSync(path);\n } catch (error) {\n // Ignore ENOENT (file not found)\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n },\n\n readFileSync(path) {\n const fsSync = getNodeFSSync();\n return fsSync.readFileSync(path, \"utf-8\");\n },\n };\n}\n\n// Singleton to avoid recreating instances\nlet fsInstance: PortableFS | null = null;\n\nexport function getPortableFS(): PortableFS {\n if (!fsInstance) {\n fsInstance = createPortableFS();\n }\n return fsInstance;\n}\n\n/**\n * Reset the filesystem singleton for testing\n * @internal\n */\nexport function __resetPortableFSForTests(): void {\n fsInstance = null;\n}\n","/**\n * Portable hashing API that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\nexport type HashAlgorithm = \"sha256\" | \"xxhash\";\n\nexport interface PortableHasher {\n hash(content: string, algorithm?: HashAlgorithm): string;\n}\n\n/**\n * Pads a hex string to the specified length\n */\nfunction padHex(hex: string, length: number): string {\n return hex.padStart(length, \"0\");\n}\n\nexport function createPortableHasher(): PortableHasher {\n if (runtime.isBun) {\n return {\n hash(content, algorithm = \"xxhash\") {\n if (algorithm === \"sha256\") {\n const hasher = new Bun.CryptoHasher(\"sha256\");\n hasher.update(content);\n return hasher.digest(\"hex\");\n }\n // xxhash - Bun.hash returns a number\n const hashNum = Bun.hash(content);\n // Convert to hex and pad to 16 chars for consistency\n return padHex(hashNum.toString(16), 16);\n },\n };\n }\n\n // Node.js implementation\n return {\n hash(content, algorithm = \"xxhash\") {\n if (algorithm === \"sha256\") {\n const crypto = require(\"node:crypto\");\n return crypto.createHash(\"sha256\").update(content).digest(\"hex\");\n }\n // xxhash fallback: use sha256 for now (can add xxhash package later if needed)\n // This ensures consistent behavior across runtimes\n const crypto = require(\"node:crypto\");\n const sha256Hash = crypto.createHash(\"sha256\").update(content).digest(\"hex\");\n // Take first 16 chars to match xxhash output length\n return sha256Hash.substring(0, 16);\n },\n };\n}\n\n// Singleton to avoid recreating instances\nlet hasherInstance: PortableHasher | null = null;\n\nexport function getPortableHasher(): PortableHasher {\n if (!hasherInstance) {\n hasherInstance = createPortableHasher();\n }\n return hasherInstance;\n}\n\n/**\n * Reset the hasher singleton for testing\n * @internal\n */\nexport function __resetPortableHasherForTests(): void {\n hasherInstance = null;\n}\n","/**\n * Portable ID generation that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\n/**\n * Generate a unique ID\n * Uses UUIDv7 on Bun (monotonic), falls back to randomUUID on Node.js\n */\nexport function generateId(): string {\n if (runtime.isBun && typeof Bun !== \"undefined\" && typeof Bun.randomUUIDv7 === \"function\") {\n return Bun.randomUUIDv7();\n }\n\n // Node.js fallback: use crypto.randomUUID\n const crypto = require(\"node:crypto\");\n return crypto.randomUUID();\n}\n","/**\n * Portable subprocess spawning that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\nexport interface SpawnOptions {\n cmd: string[];\n cwd?: string;\n env?: Record<string, string>;\n}\n\nexport interface SpawnResult {\n stdout: string;\n stderr: string;\n exitCode: number;\n}\n\nexport async function spawn(options: SpawnOptions): Promise<SpawnResult> {\n if (runtime.isBun) {\n const proc = Bun.spawn(options.cmd, {\n cwd: options.cwd,\n env: options.env,\n stdout: \"pipe\",\n stderr: \"pipe\",\n });\n\n const [stdout, stderr] = await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]);\n\n const exitCode = await proc.exited;\n\n return { stdout, stderr, exitCode };\n }\n\n // Node.js implementation\n const { execFile } = await import(\"node:child_process\");\n const { promisify } = await import(\"node:util\");\n const execFilePromise = promisify(execFile);\n\n const [command, ...args] = options.cmd;\n if (!command) {\n return {\n stdout: \"\",\n stderr: \"Error: No command provided\",\n exitCode: 1,\n };\n }\n\n try {\n const execOptions: {\n cwd?: string;\n env?: NodeJS.ProcessEnv;\n encoding: BufferEncoding;\n } = {\n encoding: \"utf-8\",\n };\n\n if (options.cwd) {\n execOptions.cwd = options.cwd;\n }\n if (options.env) {\n execOptions.env = options.env as NodeJS.ProcessEnv;\n }\n\n const { stdout, stderr } = await execFilePromise(command, args, execOptions);\n return {\n stdout: stdout || \"\",\n stderr: stderr || \"\",\n exitCode: 0,\n };\n } catch (error: unknown) {\n const err = error as {\n stdout?: string;\n stderr?: string;\n code?: number;\n };\n return {\n stdout: err.stdout || \"\",\n stderr: err.stderr || \"\",\n exitCode: err.code || 1,\n };\n }\n}\n"],"mappings":";;;;;;;;;;AAIA,MAAa,UAAU;CACrB,OAAO,OAAO,QAAQ;CACtB,QAAQ,OAAO,YAAY,eAAe,OAAO,QAAQ;CACzD,mBAAmB,OAAO,WAAW,eAAe,OAAO,OAAO,WAAW;CAC9E;;;;AAKD,SAAgB,KAAQ,IAAsB;CAC5C,IAAIA;CACJ,IAAI,SAAS;AAEb,cAAa;AACX,MAAI,CAAC,QAAQ;AACX,YAAS,IAAI;AACb,YAAS;;AAEX,SAAO;;;;;;;AAQX,SAAgB,wBAA8B;;;;;;;AC8B9C,MAAM,YAAY,KAAK,YAAiC;CACtD,MAAM,KAAK,MAAM,OAAO;AACxB,QAAO;EACP;AAGF,IAAIC,aAA4B;AAChC,MAAM,sBAA8B;AAClC,KAAI,CAAC,YAAY;AAGf,yBAAqB,UAAU;;AAEjC,QAAO;;AAIT,IAAIC,aAA2D;AAC/D,MAAM,sBAA6D;AACjE,KAAI,CAAC,YAAY;AAEf,yBAAqB,YAAY;;AAEnC,QAAO;;;;;AAMT,MAAM,eAAe,eAA+B;AAClD,QAAO,GAAG,WAAW,GAAG,QAAQ,IAAI,GAAG,KAAK,KAAK,CAAC;;AAGpD,SAAgB,mBAA+B;AAC7C,KAAI,QAAQ,OAAO;AACjB,SAAO;GACL,MAAM,SAAS,MAAM;IACnB,MAAM,OAAO,IAAI,KAAK,KAAK;AAC3B,WAAO,MAAM,KAAK,MAAM;;GAG1B,MAAM,UAAU,MAAM,SAAS;AAE7B,UAAM,IAAI,MAAM,MAAM,QAAQ;;GAGhC,MAAM,gBAAgB,MAAM,SAAS;IACnC,MAAM,WAAW,YAAY,KAAK;AAClC,QAAI;AACF,WAAM,IAAI,MAAM,UAAU,QAAQ;KAClC,MAAM,SAAS,MAAM,WAAW;AAChC,WAAM,OAAO,OAAO,UAAU,KAAK;aAC5B,OAAO;AAEd,SAAI;MACF,MAAM,SAAS,MAAM,WAAW;AAChC,YAAM,OAAO,OAAO,SAAS;aACvB;AAGR,WAAM;;;GAIV,MAAM,OAAO,MAAM;IAEjB,MAAM,SAAS,MAAM,WAAW;AAChC,QAAI;AACF,WAAM,OAAO,KAAK,KAAK;AACvB,YAAO;YACD;AACN,YAAO;;;GAIX,MAAM,KAAK,MAAM;IACf,MAAM,OAAO,IAAI,KAAK,KAAK;IAC3B,MAAM,OAAO,KAAK;IAElB,MAAM,SAAS,MAAM,WAAW;IAChC,MAAM,EAAE,UAAU,MAAM,OAAO,KAAK,KAAK;AACzC,WAAO;KAAE;KAAO;KAAM;;GAGxB,MAAM,OAAO,SAAS,SAAS;IAC7B,MAAM,SAAS,MAAM,WAAW;AAChC,UAAM,OAAO,OAAO,SAAS,QAAQ;;GAGvC,MAAM,MAAM,MAAM,SAAS;IACzB,MAAM,SAAS,MAAM,WAAW;AAChC,UAAM,OAAO,MAAM,MAAM,QAAQ;;GAGnC,oBAAoB,MAAM,SAAS;IACjC,MAAM,SAAS,eAAe;IAC9B,MAAM,UAAU,eAAe;IAC/B,MAAM,WAAW,YAAY,KAAK;IAGlC,MAAM,MAAM,QAAQ,QAAQ,KAAK;AACjC,WAAO,UAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAE1C,QAAI;AACF,YAAO,cAAc,UAAU,SAAS,QAAQ;AAChD,YAAO,WAAW,UAAU,KAAK;aAC1B,OAAO;AAEd,SAAI;AACF,aAAO,WAAW,SAAS;aACrB;AAGR,WAAM;;;GAIV,MAAM,OAAO,MAAM;IACjB,MAAM,SAAS,MAAM,WAAW;AAChC,QAAI;AACF,WAAM,OAAO,OAAO,KAAK;aAClB,OAAO;AAEd,SAAK,MAAgC,SAAS,UAAU;AACtD,YAAM;;;;GAKZ,WAAW,MAAM;IACf,MAAM,SAAS,eAAe;AAC9B,QAAI;AACF,YAAO,WAAW,KAAK;aAChB,OAAO;AAEd,SAAK,MAAgC,SAAS,UAAU;AACtD,YAAM;;;;GAKZ,aAAa,MAAM;IACjB,MAAM,SAAS,eAAe;AAC9B,WAAO,OAAO,aAAa,MAAM,QAAQ;;GAE5C;;AAIH,QAAO;EACL,MAAM,SAAS,MAAM;GACnB,MAAM,SAAS,MAAM,WAAW;AAChC,UAAO,MAAM,OAAO,SAAS,MAAM,QAAQ;;EAG7C,MAAM,UAAU,MAAM,SAAS;GAC7B,MAAM,SAAS,MAAM,WAAW;GAEhC,MAAMC,eAAa,MAAM,OAAO;GAChC,MAAM,MAAMA,aAAW,QAAQ,KAAK;AACpC,SAAM,OAAO,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;AAC5C,SAAM,OAAO,UAAU,MAAM,SAAS,QAAQ;;EAGhD,MAAM,gBAAgB,MAAM,SAAS;GACnC,MAAM,SAAS,MAAM,WAAW;GAChC,MAAM,UAAU,MAAM,OAAO;GAC7B,MAAM,MAAM,QAAQ,QAAQ,KAAK;GACjC,MAAM,WAAW,YAAY,KAAK;AAElC,OAAI;AACF,UAAM,OAAO,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;AAC5C,UAAM,OAAO,UAAU,UAAU,SAAS,QAAQ;AAClD,UAAM,OAAO,OAAO,UAAU,KAAK;YAC5B,OAAO;AAEd,QAAI;AACF,WAAM,OAAO,OAAO,SAAS;YACvB;AAGR,UAAM;;;EAIV,MAAM,OAAO,MAAM;GACjB,MAAM,SAAS,MAAM,WAAW;AAChC,OAAI;AACF,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO;WACD;AACN,WAAO;;;EAIX,MAAM,KAAK,MAAM;GACf,MAAM,SAAS,MAAM,WAAW;GAChC,MAAM,QAAQ,MAAM,OAAO,KAAK,KAAK;AACrC,UAAO;IAAE,OAAO,MAAM;IAAO,MAAM,MAAM;IAAM;;EAGjD,MAAM,OAAO,SAAS,SAAS;GAC7B,MAAM,SAAS,MAAM,WAAW;AAChC,SAAM,OAAO,OAAO,SAAS,QAAQ;;EAGvC,MAAM,MAAM,MAAM,SAAS;GACzB,MAAM,SAAS,MAAM,WAAW;AAChC,SAAM,OAAO,MAAM,MAAM,QAAQ;;EAGnC,oBAAoB,MAAM,SAAS;GACjC,MAAM,SAAS,eAAe;GAC9B,MAAM,UAAU,eAAe;GAC/B,MAAM,WAAW,YAAY,KAAK;GAGlC,MAAM,MAAM,QAAQ,QAAQ,KAAK;AACjC,UAAO,UAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAE1C,OAAI;AACF,WAAO,cAAc,UAAU,SAAS,QAAQ;AAChD,WAAO,WAAW,UAAU,KAAK;YAC1B,OAAO;AAEd,QAAI;AACF,YAAO,WAAW,SAAS;YACrB;AAGR,UAAM;;;EAIV,MAAM,OAAO,MAAM;GACjB,MAAM,SAAS,MAAM,WAAW;AAChC,OAAI;AACF,UAAM,OAAO,OAAO,KAAK;YAClB,OAAO;AAEd,QAAK,MAAgC,SAAS,UAAU;AACtD,WAAM;;;;EAKZ,WAAW,MAAM;GACf,MAAM,SAAS,eAAe;AAC9B,OAAI;AACF,WAAO,WAAW,KAAK;YAChB,OAAO;AAEd,QAAK,MAAgC,SAAS,UAAU;AACtD,WAAM;;;;EAKZ,aAAa,MAAM;GACjB,MAAM,SAAS,eAAe;AAC9B,UAAO,OAAO,aAAa,MAAM,QAAQ;;EAE5C;;AAIH,IAAIC,aAAgC;AAEpC,SAAgB,gBAA4B;AAC1C,KAAI,CAAC,YAAY;AACf,eAAa,kBAAkB;;AAEjC,QAAO;;;;;;AAOT,SAAgB,4BAAkC;AAChD,cAAa;;;;;;;;;;;ACrUf,SAAS,OAAO,KAAa,QAAwB;AACnD,QAAO,IAAI,SAAS,QAAQ,IAAI;;AAGlC,SAAgB,uBAAuC;AACrD,KAAI,QAAQ,OAAO;AACjB,SAAO,EACL,KAAK,SAAS,YAAY,UAAU;AAClC,OAAI,cAAc,UAAU;IAC1B,MAAM,SAAS,IAAI,IAAI,aAAa,SAAS;AAC7C,WAAO,OAAO,QAAQ;AACtB,WAAO,OAAO,OAAO,MAAM;;GAG7B,MAAM,UAAU,IAAI,KAAK,QAAQ;AAEjC,UAAO,OAAO,QAAQ,SAAS,GAAG,EAAE,GAAG;KAE1C;;AAIH,QAAO,EACL,KAAK,SAAS,YAAY,UAAU;AAClC,MAAI,cAAc,UAAU;GAC1B,MAAMC,qBAAiB,cAAc;AACrC,UAAOA,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;EAIlE,MAAMA,qBAAiB,cAAc;EACrC,MAAM,aAAaA,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;AAE5E,SAAO,WAAW,UAAU,GAAG,GAAG;IAErC;;AAIH,IAAIC,iBAAwC;AAE5C,SAAgB,oBAAoC;AAClD,KAAI,CAAC,gBAAgB;AACnB,mBAAiB,sBAAsB;;AAEzC,QAAO;;;;;;AAOT,SAAgB,gCAAsC;AACpD,kBAAiB;;;;;;;;;;;;AC1DnB,SAAgB,aAAqB;AACnC,KAAI,QAAQ,SAAS,OAAO,QAAQ,eAAe,OAAO,IAAI,iBAAiB,YAAY;AACzF,SAAO,IAAI,cAAc;;CAI3B,MAAMC,qBAAiB,cAAc;AACrC,QAAOA,SAAO,YAAY;;;;;;;;ACC5B,eAAsB,MAAM,SAA6C;AACvE,KAAI,QAAQ,OAAO;EACjB,MAAM,OAAO,IAAI,MAAM,QAAQ,KAAK;GAClC,KAAK,QAAQ;GACb,KAAK,QAAQ;GACb,QAAQ;GACR,QAAQ;GACT,CAAC;EAEF,MAAM,CAAC,QAAQ,UAAU,MAAM,QAAQ,IAAI,CAAC,IAAI,SAAS,KAAK,OAAO,CAAC,MAAM,EAAE,IAAI,SAAS,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;EAEhH,MAAM,WAAW,MAAM,KAAK;AAE5B,SAAO;GAAE;GAAQ;GAAQ;GAAU;;CAIrC,MAAM,EAAE,aAAa,MAAM,OAAO;CAClC,MAAM,EAAE,cAAc,MAAM,OAAO;CACnC,MAAM,kBAAkB,UAAU,SAAS;CAE3C,MAAM,CAAC,SAAS,GAAG,QAAQ,QAAQ;AACnC,KAAI,CAAC,SAAS;AACZ,SAAO;GACL,QAAQ;GACR,QAAQ;GACR,UAAU;GACX;;AAGH,KAAI;EACF,MAAMC,cAIF,EACF,UAAU,SACX;AAED,MAAI,QAAQ,KAAK;AACf,eAAY,MAAM,QAAQ;;AAE5B,MAAI,QAAQ,KAAK;AACf,eAAY,MAAM,QAAQ;;EAG5B,MAAM,EAAE,QAAQ,WAAW,MAAM,gBAAgB,SAAS,MAAM,YAAY;AAC5E,SAAO;GACL,QAAQ,UAAU;GAClB,QAAQ,UAAU;GAClB,UAAU;GACX;UACMC,OAAgB;EACvB,MAAM,MAAM;AAKZ,SAAO;GACL,QAAQ,IAAI,UAAU;GACtB,QAAQ,IAAI,UAAU;GACtB,UAAU,IAAI,QAAQ;GACvB"}
@@ -37,6 +37,26 @@ const getNodeFS = once(async () => {
37
37
  const fs = await import("node:fs/promises");
38
38
  return fs;
39
39
  });
40
+ let nodeFsSync = null;
41
+ const getNodeFSSync = () => {
42
+ if (!nodeFsSync) {
43
+ nodeFsSync = require("node:fs");
44
+ }
45
+ return nodeFsSync;
46
+ };
47
+ let pathModule = null;
48
+ const getPathModule = () => {
49
+ if (!pathModule) {
50
+ pathModule = require("node:path");
51
+ }
52
+ return pathModule;
53
+ };
54
+ /**
55
+ * Generate a unique temp file path for atomic write.
56
+ */
57
+ const getTempPath = (targetPath) => {
58
+ return `${targetPath}.${process.pid}.${Date.now()}.tmp`;
59
+ };
40
60
  function createPortableFS() {
41
61
  if (runtime.isBun) {
42
62
  return {
@@ -47,6 +67,20 @@ function createPortableFS() {
47
67
  async writeFile(path, content) {
48
68
  await Bun.write(path, content);
49
69
  },
70
+ async writeFileAtomic(path, content) {
71
+ const tempPath = getTempPath(path);
72
+ try {
73
+ await Bun.write(tempPath, content);
74
+ const nodeFS = await getNodeFS();
75
+ await nodeFS.rename(tempPath, path);
76
+ } catch (error) {
77
+ try {
78
+ const nodeFS = await getNodeFS();
79
+ await nodeFS.unlink(tempPath);
80
+ } catch {}
81
+ throw error;
82
+ }
83
+ },
50
84
  async exists(path) {
51
85
  const nodeFS = await getNodeFS();
52
86
  try {
@@ -73,6 +107,46 @@ function createPortableFS() {
73
107
  async mkdir(path, options) {
74
108
  const nodeFS = await getNodeFS();
75
109
  await nodeFS.mkdir(path, options);
110
+ },
111
+ writeFileSyncAtomic(path, content) {
112
+ const fsSync = getNodeFSSync();
113
+ const pathMod = getPathModule();
114
+ const tempPath = getTempPath(path);
115
+ const dir = pathMod.dirname(path);
116
+ fsSync.mkdirSync(dir, { recursive: true });
117
+ try {
118
+ fsSync.writeFileSync(tempPath, content, "utf-8");
119
+ fsSync.renameSync(tempPath, path);
120
+ } catch (error) {
121
+ try {
122
+ fsSync.unlinkSync(tempPath);
123
+ } catch {}
124
+ throw error;
125
+ }
126
+ },
127
+ async unlink(path) {
128
+ const nodeFS = await getNodeFS();
129
+ try {
130
+ await nodeFS.unlink(path);
131
+ } catch (error) {
132
+ if (error.code !== "ENOENT") {
133
+ throw error;
134
+ }
135
+ }
136
+ },
137
+ unlinkSync(path) {
138
+ const fsSync = getNodeFSSync();
139
+ try {
140
+ fsSync.unlinkSync(path);
141
+ } catch (error) {
142
+ if (error.code !== "ENOENT") {
143
+ throw error;
144
+ }
145
+ }
146
+ },
147
+ readFileSync(path) {
148
+ const fsSync = getNodeFSSync();
149
+ return fsSync.readFileSync(path, "utf-8");
76
150
  }
77
151
  };
78
152
  }
@@ -83,11 +157,27 @@ function createPortableFS() {
83
157
  },
84
158
  async writeFile(path, content) {
85
159
  const nodeFS = await getNodeFS();
86
- const pathModule = await import("node:path");
87
- const dir = pathModule.dirname(path);
160
+ const pathModule$1 = await import("node:path");
161
+ const dir = pathModule$1.dirname(path);
88
162
  await nodeFS.mkdir(dir, { recursive: true });
89
163
  await nodeFS.writeFile(path, content, "utf-8");
90
164
  },
165
+ async writeFileAtomic(path, content) {
166
+ const nodeFS = await getNodeFS();
167
+ const pathMod = await import("node:path");
168
+ const dir = pathMod.dirname(path);
169
+ const tempPath = getTempPath(path);
170
+ try {
171
+ await nodeFS.mkdir(dir, { recursive: true });
172
+ await nodeFS.writeFile(tempPath, content, "utf-8");
173
+ await nodeFS.rename(tempPath, path);
174
+ } catch (error) {
175
+ try {
176
+ await nodeFS.unlink(tempPath);
177
+ } catch {}
178
+ throw error;
179
+ }
180
+ },
91
181
  async exists(path) {
92
182
  const nodeFS = await getNodeFS();
93
183
  try {
@@ -112,6 +202,46 @@ function createPortableFS() {
112
202
  async mkdir(path, options) {
113
203
  const nodeFS = await getNodeFS();
114
204
  await nodeFS.mkdir(path, options);
205
+ },
206
+ writeFileSyncAtomic(path, content) {
207
+ const fsSync = getNodeFSSync();
208
+ const pathMod = getPathModule();
209
+ const tempPath = getTempPath(path);
210
+ const dir = pathMod.dirname(path);
211
+ fsSync.mkdirSync(dir, { recursive: true });
212
+ try {
213
+ fsSync.writeFileSync(tempPath, content, "utf-8");
214
+ fsSync.renameSync(tempPath, path);
215
+ } catch (error) {
216
+ try {
217
+ fsSync.unlinkSync(tempPath);
218
+ } catch {}
219
+ throw error;
220
+ }
221
+ },
222
+ async unlink(path) {
223
+ const nodeFS = await getNodeFS();
224
+ try {
225
+ await nodeFS.unlink(path);
226
+ } catch (error) {
227
+ if (error.code !== "ENOENT") {
228
+ throw error;
229
+ }
230
+ }
231
+ },
232
+ unlinkSync(path) {
233
+ const fsSync = getNodeFSSync();
234
+ try {
235
+ fsSync.unlinkSync(path);
236
+ } catch (error) {
237
+ if (error.code !== "ENOENT") {
238
+ throw error;
239
+ }
240
+ }
241
+ },
242
+ readFileSync(path) {
243
+ const fsSync = getNodeFSSync();
244
+ return fsSync.readFileSync(path, "utf-8");
115
245
  }
116
246
  };
117
247
  }
@@ -318,4 +448,4 @@ Object.defineProperty(exports, 'spawn', {
318
448
  return spawn;
319
449
  }
320
450
  });
321
- //# sourceMappingURL=portable-C_7gJWmz.cjs.map
451
+ //# sourceMappingURL=portable-cJqkfeHw.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"portable-cJqkfeHw.cjs","names":["result: T | undefined","nodeFsSync: FSSync | null","pathModule: { dirname: (path: string) => string } | null","pathModule","fsInstance: PortableFS | null","crypto","hasherInstance: PortableHasher | null","crypto","execOptions: {\n cwd?: string;\n env?: NodeJS.ProcessEnv;\n encoding: BufferEncoding;\n }","error: unknown"],"sources":["../src/portable/runtime.ts","../src/portable/fs.ts","../src/portable/hash.ts","../src/portable/id.ts","../src/portable/spawn.ts"],"sourcesContent":["/**\n * Runtime detection utilities for portable API implementation\n */\n\nexport const runtime = {\n isBun: typeof Bun !== \"undefined\",\n isNode: typeof process !== \"undefined\" && typeof Bun === \"undefined\",\n supportsWebCrypto: typeof crypto !== \"undefined\" && typeof crypto.subtle !== \"undefined\",\n} as const;\n\n/**\n * Helper to cache module imports to avoid repeated dynamic imports\n */\nexport function once<T>(fn: () => T): () => T {\n let result: T | undefined;\n let called = false;\n\n return () => {\n if (!called) {\n result = fn();\n called = true;\n }\n return result as T;\n };\n}\n\n/**\n * Reset runtime state for testing purposes only\n * @internal\n */\nexport function resetPortableForTests(): void {\n // This is a marker function that portable modules can use\n // to reset their singleton state in tests\n}\n","/**\n * Portable filesystem API that works on both Bun and Node.js\n */\n\nimport { once, runtime } from \"./runtime\";\n\nexport interface PortableFS {\n readFile(path: string): Promise<string>;\n writeFile(path: string, content: string): Promise<void>;\n /**\n * Write a file atomically using temp file + rename pattern.\n * This prevents partial/corrupt writes on crash.\n */\n writeFileAtomic(path: string, content: string): Promise<void>;\n exists(path: string): Promise<boolean>;\n stat(path: string): Promise<{ mtime: Date; size: number }>;\n rename(oldPath: string, newPath: string): Promise<void>;\n mkdir(path: string, options?: { recursive?: boolean }): Promise<void>;\n /**\n * Write a file synchronously and atomically using temp file + rename pattern.\n * Safe for use in beforeExit handlers.\n */\n writeFileSyncAtomic(path: string, content: string): void;\n /**\n * Remove a file. Does not throw if file doesn't exist.\n */\n unlink(path: string): Promise<void>;\n /**\n * Remove a file synchronously. Does not throw if file doesn't exist.\n */\n unlinkSync(path: string): void;\n /**\n * Read a file synchronously.\n */\n readFileSync(path: string): string;\n}\n\ninterface FSPromises {\n readFile: (path: string, encoding: string) => Promise<string>;\n writeFile: (path: string, content: string, encoding: string) => Promise<void>;\n access: (path: string) => Promise<void>;\n stat: (path: string) => Promise<{\n mtime: Date;\n size: number;\n isDirectory: () => boolean;\n }>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n mkdir: (path: string, options?: { recursive?: boolean }) => Promise<void>;\n unlink: (path: string) => Promise<void>;\n}\n\ninterface FSSync {\n writeFileSync: (path: string, content: string, encoding: string) => void;\n renameSync: (oldPath: string, newPath: string) => void;\n unlinkSync: (path: string) => void;\n readFileSync: (path: string, encoding: string) => string;\n mkdirSync: (path: string, options?: { recursive?: boolean }) => void;\n}\n\n// Cache the fs/promises import\nconst getNodeFS = once(async (): Promise<FSPromises> => {\n const fs = await import(\"node:fs/promises\");\n return fs as FSPromises;\n});\n\n// Cache the sync fs import\nlet nodeFsSync: FSSync | null = null;\nconst getNodeFSSync = (): FSSync => {\n if (!nodeFsSync) {\n // Use require for sync loading\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n nodeFsSync = require(\"node:fs\") as FSSync;\n }\n return nodeFsSync;\n};\n\n// Cache path module\nlet pathModule: { dirname: (path: string) => string } | null = null;\nconst getPathModule = (): { dirname: (path: string) => string } => {\n if (!pathModule) {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n pathModule = require(\"node:path\") as { dirname: (path: string) => string };\n }\n return pathModule;\n};\n\n/**\n * Generate a unique temp file path for atomic write.\n */\nconst getTempPath = (targetPath: string): string => {\n return `${targetPath}.${process.pid}.${Date.now()}.tmp`;\n};\n\nexport function createPortableFS(): PortableFS {\n if (runtime.isBun) {\n return {\n async readFile(path) {\n const file = Bun.file(path);\n return await file.text();\n },\n\n async writeFile(path, content) {\n // Bun.write auto-creates parent directories\n await Bun.write(path, content);\n },\n\n async writeFileAtomic(path, content) {\n const tempPath = getTempPath(path);\n try {\n await Bun.write(tempPath, content);\n const nodeFS = await getNodeFS();\n await nodeFS.rename(tempPath, path);\n } catch (error) {\n // Clean up temp file on failure\n try {\n const nodeFS = await getNodeFS();\n await nodeFS.unlink(tempPath);\n } catch {\n // Ignore cleanup errors\n }\n throw error;\n }\n },\n\n async exists(path) {\n // Bun.file().exists() only works for files, use fs.stat for both files and dirs\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.stat(path);\n return true;\n } catch {\n return false;\n }\n },\n\n async stat(path) {\n const file = Bun.file(path);\n const size = file.size;\n // Bun doesn't expose mtime directly, use Node fs.stat\n const nodeFS = await getNodeFS();\n const { mtime } = await nodeFS.stat(path);\n return { mtime, size };\n },\n\n async rename(oldPath, newPath) {\n const nodeFS = await getNodeFS();\n await nodeFS.rename(oldPath, newPath);\n },\n\n async mkdir(path, options) {\n const nodeFS = await getNodeFS();\n await nodeFS.mkdir(path, options);\n },\n\n writeFileSyncAtomic(path, content) {\n const fsSync = getNodeFSSync();\n const pathMod = getPathModule();\n const tempPath = getTempPath(path);\n\n // Ensure parent directory exists\n const dir = pathMod.dirname(path);\n fsSync.mkdirSync(dir, { recursive: true });\n\n try {\n fsSync.writeFileSync(tempPath, content, \"utf-8\");\n fsSync.renameSync(tempPath, path);\n } catch (error) {\n // Clean up temp file on failure\n try {\n fsSync.unlinkSync(tempPath);\n } catch {\n // Ignore cleanup errors\n }\n throw error;\n }\n },\n\n async unlink(path) {\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.unlink(path);\n } catch (error) {\n // Ignore ENOENT (file not found)\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n },\n\n unlinkSync(path) {\n const fsSync = getNodeFSSync();\n try {\n fsSync.unlinkSync(path);\n } catch (error) {\n // Ignore ENOENT (file not found)\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n },\n\n readFileSync(path) {\n const fsSync = getNodeFSSync();\n return fsSync.readFileSync(path, \"utf-8\");\n },\n };\n }\n\n // Node.js implementation\n return {\n async readFile(path) {\n const nodeFS = await getNodeFS();\n return await nodeFS.readFile(path, \"utf-8\");\n },\n\n async writeFile(path, content) {\n const nodeFS = await getNodeFS();\n // Auto-create parent directories like Bun.write does\n const pathModule = await import(\"node:path\");\n const dir = pathModule.dirname(path);\n await nodeFS.mkdir(dir, { recursive: true });\n await nodeFS.writeFile(path, content, \"utf-8\");\n },\n\n async writeFileAtomic(path, content) {\n const nodeFS = await getNodeFS();\n const pathMod = await import(\"node:path\");\n const dir = pathMod.dirname(path);\n const tempPath = getTempPath(path);\n\n try {\n await nodeFS.mkdir(dir, { recursive: true });\n await nodeFS.writeFile(tempPath, content, \"utf-8\");\n await nodeFS.rename(tempPath, path);\n } catch (error) {\n // Clean up temp file on failure\n try {\n await nodeFS.unlink(tempPath);\n } catch {\n // Ignore cleanup errors\n }\n throw error;\n }\n },\n\n async exists(path) {\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.access(path);\n return true;\n } catch {\n return false;\n }\n },\n\n async stat(path) {\n const nodeFS = await getNodeFS();\n const stats = await nodeFS.stat(path);\n return { mtime: stats.mtime, size: stats.size };\n },\n\n async rename(oldPath, newPath) {\n const nodeFS = await getNodeFS();\n await nodeFS.rename(oldPath, newPath);\n },\n\n async mkdir(path, options) {\n const nodeFS = await getNodeFS();\n await nodeFS.mkdir(path, options);\n },\n\n writeFileSyncAtomic(path, content) {\n const fsSync = getNodeFSSync();\n const pathMod = getPathModule();\n const tempPath = getTempPath(path);\n\n // Ensure parent directory exists\n const dir = pathMod.dirname(path);\n fsSync.mkdirSync(dir, { recursive: true });\n\n try {\n fsSync.writeFileSync(tempPath, content, \"utf-8\");\n fsSync.renameSync(tempPath, path);\n } catch (error) {\n // Clean up temp file on failure\n try {\n fsSync.unlinkSync(tempPath);\n } catch {\n // Ignore cleanup errors\n }\n throw error;\n }\n },\n\n async unlink(path) {\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.unlink(path);\n } catch (error) {\n // Ignore ENOENT (file not found)\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n },\n\n unlinkSync(path) {\n const fsSync = getNodeFSSync();\n try {\n fsSync.unlinkSync(path);\n } catch (error) {\n // Ignore ENOENT (file not found)\n if ((error as NodeJS.ErrnoException).code !== \"ENOENT\") {\n throw error;\n }\n }\n },\n\n readFileSync(path) {\n const fsSync = getNodeFSSync();\n return fsSync.readFileSync(path, \"utf-8\");\n },\n };\n}\n\n// Singleton to avoid recreating instances\nlet fsInstance: PortableFS | null = null;\n\nexport function getPortableFS(): PortableFS {\n if (!fsInstance) {\n fsInstance = createPortableFS();\n }\n return fsInstance;\n}\n\n/**\n * Reset the filesystem singleton for testing\n * @internal\n */\nexport function __resetPortableFSForTests(): void {\n fsInstance = null;\n}\n","/**\n * Portable hashing API that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\nexport type HashAlgorithm = \"sha256\" | \"xxhash\";\n\nexport interface PortableHasher {\n hash(content: string, algorithm?: HashAlgorithm): string;\n}\n\n/**\n * Pads a hex string to the specified length\n */\nfunction padHex(hex: string, length: number): string {\n return hex.padStart(length, \"0\");\n}\n\nexport function createPortableHasher(): PortableHasher {\n if (runtime.isBun) {\n return {\n hash(content, algorithm = \"xxhash\") {\n if (algorithm === \"sha256\") {\n const hasher = new Bun.CryptoHasher(\"sha256\");\n hasher.update(content);\n return hasher.digest(\"hex\");\n }\n // xxhash - Bun.hash returns a number\n const hashNum = Bun.hash(content);\n // Convert to hex and pad to 16 chars for consistency\n return padHex(hashNum.toString(16), 16);\n },\n };\n }\n\n // Node.js implementation\n return {\n hash(content, algorithm = \"xxhash\") {\n if (algorithm === \"sha256\") {\n const crypto = require(\"node:crypto\");\n return crypto.createHash(\"sha256\").update(content).digest(\"hex\");\n }\n // xxhash fallback: use sha256 for now (can add xxhash package later if needed)\n // This ensures consistent behavior across runtimes\n const crypto = require(\"node:crypto\");\n const sha256Hash = crypto.createHash(\"sha256\").update(content).digest(\"hex\");\n // Take first 16 chars to match xxhash output length\n return sha256Hash.substring(0, 16);\n },\n };\n}\n\n// Singleton to avoid recreating instances\nlet hasherInstance: PortableHasher | null = null;\n\nexport function getPortableHasher(): PortableHasher {\n if (!hasherInstance) {\n hasherInstance = createPortableHasher();\n }\n return hasherInstance;\n}\n\n/**\n * Reset the hasher singleton for testing\n * @internal\n */\nexport function __resetPortableHasherForTests(): void {\n hasherInstance = null;\n}\n","/**\n * Portable ID generation that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\n/**\n * Generate a unique ID\n * Uses UUIDv7 on Bun (monotonic), falls back to randomUUID on Node.js\n */\nexport function generateId(): string {\n if (runtime.isBun && typeof Bun !== \"undefined\" && typeof Bun.randomUUIDv7 === \"function\") {\n return Bun.randomUUIDv7();\n }\n\n // Node.js fallback: use crypto.randomUUID\n const crypto = require(\"node:crypto\");\n return crypto.randomUUID();\n}\n","/**\n * Portable subprocess spawning that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\nexport interface SpawnOptions {\n cmd: string[];\n cwd?: string;\n env?: Record<string, string>;\n}\n\nexport interface SpawnResult {\n stdout: string;\n stderr: string;\n exitCode: number;\n}\n\nexport async function spawn(options: SpawnOptions): Promise<SpawnResult> {\n if (runtime.isBun) {\n const proc = Bun.spawn(options.cmd, {\n cwd: options.cwd,\n env: options.env,\n stdout: \"pipe\",\n stderr: \"pipe\",\n });\n\n const [stdout, stderr] = await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]);\n\n const exitCode = await proc.exited;\n\n return { stdout, stderr, exitCode };\n }\n\n // Node.js implementation\n const { execFile } = await import(\"node:child_process\");\n const { promisify } = await import(\"node:util\");\n const execFilePromise = promisify(execFile);\n\n const [command, ...args] = options.cmd;\n if (!command) {\n return {\n stdout: \"\",\n stderr: \"Error: No command provided\",\n exitCode: 1,\n };\n }\n\n try {\n const execOptions: {\n cwd?: string;\n env?: NodeJS.ProcessEnv;\n encoding: BufferEncoding;\n } = {\n encoding: \"utf-8\",\n };\n\n if (options.cwd) {\n execOptions.cwd = options.cwd;\n }\n if (options.env) {\n execOptions.env = options.env as NodeJS.ProcessEnv;\n }\n\n const { stdout, stderr } = await execFilePromise(command, args, execOptions);\n return {\n stdout: stdout || \"\",\n stderr: stderr || \"\",\n exitCode: 0,\n };\n } catch (error: unknown) {\n const err = error as {\n stdout?: string;\n stderr?: string;\n code?: number;\n };\n return {\n stdout: err.stdout || \"\",\n stderr: err.stderr || \"\",\n exitCode: err.code || 1,\n };\n }\n}\n"],"mappings":";;;;;AAIA,MAAa,UAAU;CACrB,OAAO,OAAO,QAAQ;CACtB,QAAQ,OAAO,YAAY,eAAe,OAAO,QAAQ;CACzD,mBAAmB,OAAO,WAAW,eAAe,OAAO,OAAO,WAAW;CAC9E;;;;AAKD,SAAgB,KAAQ,IAAsB;CAC5C,IAAIA;CACJ,IAAI,SAAS;AAEb,cAAa;AACX,MAAI,CAAC,QAAQ;AACX,YAAS,IAAI;AACb,YAAS;;AAEX,SAAO;;;;;;;AAQX,SAAgB,wBAA8B;;;;;;;AC8B9C,MAAM,YAAY,KAAK,YAAiC;CACtD,MAAM,KAAK,MAAM,OAAO;AACxB,QAAO;EACP;AAGF,IAAIC,aAA4B;AAChC,MAAM,sBAA8B;AAClC,KAAI,CAAC,YAAY;AAGf,eAAa,QAAQ,UAAU;;AAEjC,QAAO;;AAIT,IAAIC,aAA2D;AAC/D,MAAM,sBAA6D;AACjE,KAAI,CAAC,YAAY;AAEf,eAAa,QAAQ,YAAY;;AAEnC,QAAO;;;;;AAMT,MAAM,eAAe,eAA+B;AAClD,QAAO,GAAG,WAAW,GAAG,QAAQ,IAAI,GAAG,KAAK,KAAK,CAAC;;AAGpD,SAAgB,mBAA+B;AAC7C,KAAI,QAAQ,OAAO;AACjB,SAAO;GACL,MAAM,SAAS,MAAM;IACnB,MAAM,OAAO,IAAI,KAAK,KAAK;AAC3B,WAAO,MAAM,KAAK,MAAM;;GAG1B,MAAM,UAAU,MAAM,SAAS;AAE7B,UAAM,IAAI,MAAM,MAAM,QAAQ;;GAGhC,MAAM,gBAAgB,MAAM,SAAS;IACnC,MAAM,WAAW,YAAY,KAAK;AAClC,QAAI;AACF,WAAM,IAAI,MAAM,UAAU,QAAQ;KAClC,MAAM,SAAS,MAAM,WAAW;AAChC,WAAM,OAAO,OAAO,UAAU,KAAK;aAC5B,OAAO;AAEd,SAAI;MACF,MAAM,SAAS,MAAM,WAAW;AAChC,YAAM,OAAO,OAAO,SAAS;aACvB;AAGR,WAAM;;;GAIV,MAAM,OAAO,MAAM;IAEjB,MAAM,SAAS,MAAM,WAAW;AAChC,QAAI;AACF,WAAM,OAAO,KAAK,KAAK;AACvB,YAAO;YACD;AACN,YAAO;;;GAIX,MAAM,KAAK,MAAM;IACf,MAAM,OAAO,IAAI,KAAK,KAAK;IAC3B,MAAM,OAAO,KAAK;IAElB,MAAM,SAAS,MAAM,WAAW;IAChC,MAAM,EAAE,UAAU,MAAM,OAAO,KAAK,KAAK;AACzC,WAAO;KAAE;KAAO;KAAM;;GAGxB,MAAM,OAAO,SAAS,SAAS;IAC7B,MAAM,SAAS,MAAM,WAAW;AAChC,UAAM,OAAO,OAAO,SAAS,QAAQ;;GAGvC,MAAM,MAAM,MAAM,SAAS;IACzB,MAAM,SAAS,MAAM,WAAW;AAChC,UAAM,OAAO,MAAM,MAAM,QAAQ;;GAGnC,oBAAoB,MAAM,SAAS;IACjC,MAAM,SAAS,eAAe;IAC9B,MAAM,UAAU,eAAe;IAC/B,MAAM,WAAW,YAAY,KAAK;IAGlC,MAAM,MAAM,QAAQ,QAAQ,KAAK;AACjC,WAAO,UAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAE1C,QAAI;AACF,YAAO,cAAc,UAAU,SAAS,QAAQ;AAChD,YAAO,WAAW,UAAU,KAAK;aAC1B,OAAO;AAEd,SAAI;AACF,aAAO,WAAW,SAAS;aACrB;AAGR,WAAM;;;GAIV,MAAM,OAAO,MAAM;IACjB,MAAM,SAAS,MAAM,WAAW;AAChC,QAAI;AACF,WAAM,OAAO,OAAO,KAAK;aAClB,OAAO;AAEd,SAAK,MAAgC,SAAS,UAAU;AACtD,YAAM;;;;GAKZ,WAAW,MAAM;IACf,MAAM,SAAS,eAAe;AAC9B,QAAI;AACF,YAAO,WAAW,KAAK;aAChB,OAAO;AAEd,SAAK,MAAgC,SAAS,UAAU;AACtD,YAAM;;;;GAKZ,aAAa,MAAM;IACjB,MAAM,SAAS,eAAe;AAC9B,WAAO,OAAO,aAAa,MAAM,QAAQ;;GAE5C;;AAIH,QAAO;EACL,MAAM,SAAS,MAAM;GACnB,MAAM,SAAS,MAAM,WAAW;AAChC,UAAO,MAAM,OAAO,SAAS,MAAM,QAAQ;;EAG7C,MAAM,UAAU,MAAM,SAAS;GAC7B,MAAM,SAAS,MAAM,WAAW;GAEhC,MAAMC,eAAa,MAAM,OAAO;GAChC,MAAM,MAAMA,aAAW,QAAQ,KAAK;AACpC,SAAM,OAAO,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;AAC5C,SAAM,OAAO,UAAU,MAAM,SAAS,QAAQ;;EAGhD,MAAM,gBAAgB,MAAM,SAAS;GACnC,MAAM,SAAS,MAAM,WAAW;GAChC,MAAM,UAAU,MAAM,OAAO;GAC7B,MAAM,MAAM,QAAQ,QAAQ,KAAK;GACjC,MAAM,WAAW,YAAY,KAAK;AAElC,OAAI;AACF,UAAM,OAAO,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;AAC5C,UAAM,OAAO,UAAU,UAAU,SAAS,QAAQ;AAClD,UAAM,OAAO,OAAO,UAAU,KAAK;YAC5B,OAAO;AAEd,QAAI;AACF,WAAM,OAAO,OAAO,SAAS;YACvB;AAGR,UAAM;;;EAIV,MAAM,OAAO,MAAM;GACjB,MAAM,SAAS,MAAM,WAAW;AAChC,OAAI;AACF,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO;WACD;AACN,WAAO;;;EAIX,MAAM,KAAK,MAAM;GACf,MAAM,SAAS,MAAM,WAAW;GAChC,MAAM,QAAQ,MAAM,OAAO,KAAK,KAAK;AACrC,UAAO;IAAE,OAAO,MAAM;IAAO,MAAM,MAAM;IAAM;;EAGjD,MAAM,OAAO,SAAS,SAAS;GAC7B,MAAM,SAAS,MAAM,WAAW;AAChC,SAAM,OAAO,OAAO,SAAS,QAAQ;;EAGvC,MAAM,MAAM,MAAM,SAAS;GACzB,MAAM,SAAS,MAAM,WAAW;AAChC,SAAM,OAAO,MAAM,MAAM,QAAQ;;EAGnC,oBAAoB,MAAM,SAAS;GACjC,MAAM,SAAS,eAAe;GAC9B,MAAM,UAAU,eAAe;GAC/B,MAAM,WAAW,YAAY,KAAK;GAGlC,MAAM,MAAM,QAAQ,QAAQ,KAAK;AACjC,UAAO,UAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAE1C,OAAI;AACF,WAAO,cAAc,UAAU,SAAS,QAAQ;AAChD,WAAO,WAAW,UAAU,KAAK;YAC1B,OAAO;AAEd,QAAI;AACF,YAAO,WAAW,SAAS;YACrB;AAGR,UAAM;;;EAIV,MAAM,OAAO,MAAM;GACjB,MAAM,SAAS,MAAM,WAAW;AAChC,OAAI;AACF,UAAM,OAAO,OAAO,KAAK;YAClB,OAAO;AAEd,QAAK,MAAgC,SAAS,UAAU;AACtD,WAAM;;;;EAKZ,WAAW,MAAM;GACf,MAAM,SAAS,eAAe;AAC9B,OAAI;AACF,WAAO,WAAW,KAAK;YAChB,OAAO;AAEd,QAAK,MAAgC,SAAS,UAAU;AACtD,WAAM;;;;EAKZ,aAAa,MAAM;GACjB,MAAM,SAAS,eAAe;AAC9B,UAAO,OAAO,aAAa,MAAM,QAAQ;;EAE5C;;AAIH,IAAIC,aAAgC;AAEpC,SAAgB,gBAA4B;AAC1C,KAAI,CAAC,YAAY;AACf,eAAa,kBAAkB;;AAEjC,QAAO;;;;;;AAOT,SAAgB,4BAAkC;AAChD,cAAa;;;;;;;;;;;ACrUf,SAAS,OAAO,KAAa,QAAwB;AACnD,QAAO,IAAI,SAAS,QAAQ,IAAI;;AAGlC,SAAgB,uBAAuC;AACrD,KAAI,QAAQ,OAAO;AACjB,SAAO,EACL,KAAK,SAAS,YAAY,UAAU;AAClC,OAAI,cAAc,UAAU;IAC1B,MAAM,SAAS,IAAI,IAAI,aAAa,SAAS;AAC7C,WAAO,OAAO,QAAQ;AACtB,WAAO,OAAO,OAAO,MAAM;;GAG7B,MAAM,UAAU,IAAI,KAAK,QAAQ;AAEjC,UAAO,OAAO,QAAQ,SAAS,GAAG,EAAE,GAAG;KAE1C;;AAIH,QAAO,EACL,KAAK,SAAS,YAAY,UAAU;AAClC,MAAI,cAAc,UAAU;GAC1B,MAAMC,WAAS,QAAQ,cAAc;AACrC,UAAOA,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;EAIlE,MAAMA,WAAS,QAAQ,cAAc;EACrC,MAAM,aAAaA,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;AAE5E,SAAO,WAAW,UAAU,GAAG,GAAG;IAErC;;AAIH,IAAIC,iBAAwC;AAE5C,SAAgB,oBAAoC;AAClD,KAAI,CAAC,gBAAgB;AACnB,mBAAiB,sBAAsB;;AAEzC,QAAO;;;;;;AAOT,SAAgB,gCAAsC;AACpD,kBAAiB;;;;;;;;;;;;AC1DnB,SAAgB,aAAqB;AACnC,KAAI,QAAQ,SAAS,OAAO,QAAQ,eAAe,OAAO,IAAI,iBAAiB,YAAY;AACzF,SAAO,IAAI,cAAc;;CAI3B,MAAMC,WAAS,QAAQ,cAAc;AACrC,QAAOA,SAAO,YAAY;;;;;;;;ACC5B,eAAsB,MAAM,SAA6C;AACvE,KAAI,QAAQ,OAAO;EACjB,MAAM,OAAO,IAAI,MAAM,QAAQ,KAAK;GAClC,KAAK,QAAQ;GACb,KAAK,QAAQ;GACb,QAAQ;GACR,QAAQ;GACT,CAAC;EAEF,MAAM,CAAC,QAAQ,UAAU,MAAM,QAAQ,IAAI,CAAC,IAAI,SAAS,KAAK,OAAO,CAAC,MAAM,EAAE,IAAI,SAAS,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;EAEhH,MAAM,WAAW,MAAM,KAAK;AAE5B,SAAO;GAAE;GAAQ;GAAQ;GAAU;;CAIrC,MAAM,EAAE,aAAa,MAAM,OAAO;CAClC,MAAM,EAAE,cAAc,MAAM,OAAO;CACnC,MAAM,kBAAkB,UAAU,SAAS;CAE3C,MAAM,CAAC,SAAS,GAAG,QAAQ,QAAQ;AACnC,KAAI,CAAC,SAAS;AACZ,SAAO;GACL,QAAQ;GACR,QAAQ;GACR,UAAU;GACX;;AAGH,KAAI;EACF,MAAMC,cAIF,EACF,UAAU,SACX;AAED,MAAI,QAAQ,KAAK;AACf,eAAY,MAAM,QAAQ;;AAE5B,MAAI,QAAQ,KAAK;AACf,eAAY,MAAM,QAAQ;;EAG5B,MAAM,EAAE,QAAQ,WAAW,MAAM,gBAAgB,SAAS,MAAM,YAAY;AAC5E,SAAO;GACL,QAAQ,UAAU;GAClB,QAAQ,UAAU;GAClB,UAAU;GACX;UACMC,OAAgB;EACvB,MAAM,MAAM;AAKZ,SAAO;GACL,QAAQ,IAAI,UAAU;GACtB,QAAQ,IAAI,UAAU;GACtB,UAAU,IAAI,QAAQ;GACvB"}
package/dist/portable.cjs CHANGED
@@ -1,4 +1,4 @@
1
- const require_portable = require('./portable-C_7gJWmz.cjs');
1
+ const require_portable = require('./portable-cJqkfeHw.cjs');
2
2
 
3
3
  exports.__resetPortableFSForTests = require_portable.__resetPortableFSForTests;
4
4
  exports.__resetPortableHasherForTests = require_portable.__resetPortableHasherForTests;
@@ -1,2 +1,2 @@
1
- import { a as resetPortableForTests, c as HashAlgorithm, d as createPortableHasher, f as getPortableHasher, g as getPortableFS, h as createPortableFS, i as once, l as PortableHasher, m as __resetPortableFSForTests, n as SpawnResult, o as runtime, p as PortableFS, r as spawn, s as generateId, t as SpawnOptions, u as __resetPortableHasherForTests } from "./index-DaAp2rNj.cjs";
1
+ import { a as resetPortableForTests, c as HashAlgorithm, d as createPortableHasher, f as getPortableHasher, g as getPortableFS, h as createPortableFS, i as once, l as PortableHasher, m as __resetPortableFSForTests, n as SpawnResult, o as runtime, p as PortableFS, r as spawn, s as generateId, t as SpawnOptions, u as __resetPortableHasherForTests } from "./index-D1tzB3W5.cjs";
2
2
  export { HashAlgorithm, PortableFS, PortableHasher, SpawnOptions, SpawnResult, __resetPortableFSForTests, __resetPortableHasherForTests, createPortableFS, createPortableHasher, generateId, getPortableFS, getPortableHasher, once, resetPortableForTests, runtime, spawn };
@@ -1,2 +1,2 @@
1
- import { a as resetPortableForTests, c as HashAlgorithm, d as createPortableHasher, f as getPortableHasher, g as getPortableFS, h as createPortableFS, i as once, l as PortableHasher, m as __resetPortableFSForTests, n as SpawnResult, o as runtime, p as PortableFS, r as spawn, s as generateId, t as SpawnOptions, u as __resetPortableHasherForTests } from "./index-BedBpKbv.mjs";
1
+ import { a as resetPortableForTests, c as HashAlgorithm, d as createPortableHasher, f as getPortableHasher, g as getPortableFS, h as createPortableFS, i as once, l as PortableHasher, m as __resetPortableFSForTests, n as SpawnResult, o as runtime, p as PortableFS, r as spawn, s as generateId, t as SpawnOptions, u as __resetPortableHasherForTests } from "./index-B424kKYS.mjs";
2
2
  export { HashAlgorithm, PortableFS, PortableHasher, SpawnOptions, SpawnResult, __resetPortableFSForTests, __resetPortableHasherForTests, createPortableFS, createPortableHasher, generateId, getPortableFS, getPortableHasher, once, resetPortableForTests, runtime, spawn };
package/dist/portable.mjs CHANGED
@@ -1,3 +1,3 @@
1
- import { a as getPortableHasher, c as getPortableFS, d as runtime, i as createPortableHasher, l as once, n as generateId, o as __resetPortableFSForTests, r as __resetPortableHasherForTests, s as createPortableFS, t as spawn, u as resetPortableForTests } from "./portable-Dbo3u2CQ.mjs";
1
+ import { a as getPortableHasher, c as getPortableFS, d as runtime, i as createPortableHasher, l as once, n as generateId, o as __resetPortableFSForTests, r as __resetPortableHasherForTests, s as createPortableFS, t as spawn, u as resetPortableForTests } from "./portable-BT3ahkQN.mjs";
2
2
 
3
3
  export { __resetPortableFSForTests, __resetPortableHasherForTests, createPortableFS, createPortableHasher, generateId, getPortableFS, getPortableHasher, once, resetPortableForTests, runtime, spawn };
@@ -1,4 +1,4 @@
1
- const require_canonical_id = require('./canonical-id-BFcryTw5.cjs');
1
+ const require_canonical_id = require('./canonical-id-CgMNOZyn.cjs');
2
2
  let node_path = require("node:path");
3
3
  let node_fs = require("node:fs");
4
4
 
@@ -145,4 +145,4 @@ Object.defineProperty(exports, 'resolveRelativeImportWithReferences', {
145
145
  return resolveRelativeImportWithReferences;
146
146
  }
147
147
  });
148
- //# sourceMappingURL=utils-CmLf7LU5.cjs.map
148
+ //# sourceMappingURL=utils-CsTwS1dw.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"utils-CmLf7LU5.cjs","names":["cached: { value: T } | null"],"sources":["../src/utils/cached-fn.ts","../src/utils/path.ts"],"sourcesContent":["export const cachedFn = <T>(fn: () => T) => {\n let cached: { value: T } | null = null;\n\n const ensure = () => (cached ??= { value: fn() }).value;\n ensure.clear = () => {\n cached = null;\n };\n\n return ensure;\n};\n","import { existsSync, statSync } from \"node:fs\";\nimport { dirname, join, normalize, resolve } from \"node:path\";\n\n/**\n * File extensions to try when resolving module specifiers.\n * Ordered to match TypeScript's module resolution order.\n * @see https://www.typescriptlang.org/docs/handbook/module-resolution.html\n */\nexport const MODULE_EXTENSION_CANDIDATES = [\".ts\", \".tsx\", \".mts\", \".cts\", \".js\", \".mjs\", \".cjs\", \".jsx\"] as const;\n\n/**\n * Normalize path to use forward slashes (cross-platform).\n * Ensures consistent path handling across platforms.\n */\nexport const normalizePath = (value: string): string => normalize(value).replace(/\\\\/g, \"/\");\n\n/**\n * Resolve a relative import specifier to an absolute file path.\n * Tries the specifier as-is, with extensions, and as a directory with index files.\n *\n * @param from - Absolute path to the importing file\n * @param specifier - Relative module specifier (must start with '.')\n * @returns Absolute POSIX path to the resolved file, or null if not found\n */\nexport const resolveRelativeImportWithExistenceCheck = ({\n filePath,\n specifier,\n}: {\n filePath: string;\n specifier: string;\n}): string | null => {\n const base = resolve(dirname(filePath), specifier);\n\n // Try with extensions first (most common case)\n // This handles cases like \"./constants\" resolving to \"./constants.ts\"\n // even when a \"./constants\" directory exists\n for (const ext of MODULE_EXTENSION_CANDIDATES) {\n const candidate = `${base}${ext}`;\n if (existsSync(candidate)) {\n return normalizePath(candidate);\n }\n }\n\n // Try as directory with index files\n for (const ext of MODULE_EXTENSION_CANDIDATES) {\n const candidate = join(base, `index${ext}`);\n if (existsSync(candidate)) {\n return normalizePath(candidate);\n }\n }\n\n // Try exact path last (only if it's a file, not directory)\n if (existsSync(base)) {\n try {\n const stat = statSync(base);\n if (stat.isFile()) {\n return normalizePath(base);\n }\n } catch {\n // Ignore stat errors\n }\n }\n\n return null;\n};\n\n/**\n * Resolve a relative import specifier to an absolute file path.\n * Tries the specifier as-is, with extensions, and as a directory with index files.\n *\n * @param from - Absolute path to the importing file\n * @param specifier - Relative module specifier (must start with '.')\n * @returns Absolute POSIX path to the resolved file, or null if not found\n */\nexport const resolveRelativeImportWithReferences = <_>({\n filePath,\n specifier,\n references,\n}: {\n filePath: string;\n specifier: string;\n references: Map<string, _> | Set<string>;\n}): string | null => {\n const base = resolve(dirname(filePath), specifier);\n\n // Try exact path first\n if (references.has(base)) {\n return normalizePath(base);\n }\n\n // Try with extensions\n for (const ext of MODULE_EXTENSION_CANDIDATES) {\n const candidate = `${base}${ext}`;\n if (references.has(candidate)) {\n return normalizePath(candidate);\n }\n }\n\n // Try as directory with index files\n for (const ext of MODULE_EXTENSION_CANDIDATES) {\n const candidate = join(base, `index${ext}`);\n if (references.has(candidate)) {\n return normalizePath(candidate);\n }\n }\n\n return null;\n};\n\n/**\n * Check if a module specifier is relative (starts with '.' or '..')\n */\nexport const isRelativeSpecifier = (specifier: string): boolean => specifier.startsWith(\"./\") || specifier.startsWith(\"../\");\n\n/**\n * Check if a module specifier is external (package name, not relative)\n */\nexport const isExternalSpecifier = (specifier: string): boolean => !isRelativeSpecifier(specifier);\n"],"mappings":";;;;;AAAA,MAAa,YAAe,OAAgB;CAC1C,IAAIA,SAA8B;CAElC,MAAM,gBAAgB,WAAW,EAAE,OAAO,IAAI,EAAE,EAAE;AAClD,QAAO,cAAc;AACnB,WAAS;;AAGX,QAAO;;;;;;;;;;ACAT,MAAa,8BAA8B;CAAC;CAAO;CAAQ;CAAQ;CAAQ;CAAO;CAAQ;CAAQ;CAAO;;;;;AAMzG,MAAa,iBAAiB,mCAAoC,MAAM,CAAC,QAAQ,OAAO,IAAI;;;;;;;;;AAU5F,MAAa,2CAA2C,EACtD,UACA,gBAImB;CACnB,MAAM,qDAAuB,SAAS,EAAE,UAAU;AAKlD,MAAK,MAAM,OAAO,6BAA6B;EAC7C,MAAM,YAAY,GAAG,OAAO;AAC5B,8BAAe,UAAU,EAAE;AACzB,UAAO,cAAc,UAAU;;;AAKnC,MAAK,MAAM,OAAO,6BAA6B;EAC7C,MAAM,gCAAiB,MAAM,QAAQ,MAAM;AAC3C,8BAAe,UAAU,EAAE;AACzB,UAAO,cAAc,UAAU;;;AAKnC,6BAAe,KAAK,EAAE;AACpB,MAAI;GACF,MAAM,6BAAgB,KAAK;AAC3B,OAAI,KAAK,QAAQ,EAAE;AACjB,WAAO,cAAc,KAAK;;UAEtB;;AAKV,QAAO;;;;;;;;;;AAWT,MAAa,uCAA0C,EACrD,UACA,WACA,iBAKmB;CACnB,MAAM,qDAAuB,SAAS,EAAE,UAAU;AAGlD,KAAI,WAAW,IAAI,KAAK,EAAE;AACxB,SAAO,cAAc,KAAK;;AAI5B,MAAK,MAAM,OAAO,6BAA6B;EAC7C,MAAM,YAAY,GAAG,OAAO;AAC5B,MAAI,WAAW,IAAI,UAAU,EAAE;AAC7B,UAAO,cAAc,UAAU;;;AAKnC,MAAK,MAAM,OAAO,6BAA6B;EAC7C,MAAM,gCAAiB,MAAM,QAAQ,MAAM;AAC3C,MAAI,WAAW,IAAI,UAAU,EAAE;AAC7B,UAAO,cAAc,UAAU;;;AAInC,QAAO;;;;;AAMT,MAAa,uBAAuB,cAA+B,UAAU,WAAW,KAAK,IAAI,UAAU,WAAW,MAAM;;;;AAK5H,MAAa,uBAAuB,cAA+B,CAAC,oBAAoB,UAAU"}
1
+ {"version":3,"file":"utils-CsTwS1dw.cjs","names":["cached: { value: T } | null"],"sources":["../src/utils/cached-fn.ts","../src/utils/path.ts"],"sourcesContent":["export const cachedFn = <T>(fn: () => T) => {\n let cached: { value: T } | null = null;\n\n const ensure = () => (cached ??= { value: fn() }).value;\n ensure.clear = () => {\n cached = null;\n };\n\n return ensure;\n};\n","import { existsSync, statSync } from \"node:fs\";\nimport { dirname, join, normalize, resolve } from \"node:path\";\n\n/**\n * File extensions to try when resolving module specifiers.\n * Ordered to match TypeScript's module resolution order.\n * @see https://www.typescriptlang.org/docs/handbook/module-resolution.html\n */\nexport const MODULE_EXTENSION_CANDIDATES = [\".ts\", \".tsx\", \".mts\", \".cts\", \".js\", \".mjs\", \".cjs\", \".jsx\"] as const;\n\n/**\n * Normalize path to use forward slashes (cross-platform).\n * Ensures consistent path handling across platforms.\n */\nexport const normalizePath = (value: string): string => normalize(value).replace(/\\\\/g, \"/\");\n\n/**\n * Resolve a relative import specifier to an absolute file path.\n * Tries the specifier as-is, with extensions, and as a directory with index files.\n *\n * @param from - Absolute path to the importing file\n * @param specifier - Relative module specifier (must start with '.')\n * @returns Absolute POSIX path to the resolved file, or null if not found\n */\nexport const resolveRelativeImportWithExistenceCheck = ({\n filePath,\n specifier,\n}: {\n filePath: string;\n specifier: string;\n}): string | null => {\n const base = resolve(dirname(filePath), specifier);\n\n // Try with extensions first (most common case)\n // This handles cases like \"./constants\" resolving to \"./constants.ts\"\n // even when a \"./constants\" directory exists\n for (const ext of MODULE_EXTENSION_CANDIDATES) {\n const candidate = `${base}${ext}`;\n if (existsSync(candidate)) {\n return normalizePath(candidate);\n }\n }\n\n // Try as directory with index files\n for (const ext of MODULE_EXTENSION_CANDIDATES) {\n const candidate = join(base, `index${ext}`);\n if (existsSync(candidate)) {\n return normalizePath(candidate);\n }\n }\n\n // Try exact path last (only if it's a file, not directory)\n if (existsSync(base)) {\n try {\n const stat = statSync(base);\n if (stat.isFile()) {\n return normalizePath(base);\n }\n } catch {\n // Ignore stat errors\n }\n }\n\n return null;\n};\n\n/**\n * Resolve a relative import specifier to an absolute file path.\n * Tries the specifier as-is, with extensions, and as a directory with index files.\n *\n * @param from - Absolute path to the importing file\n * @param specifier - Relative module specifier (must start with '.')\n * @returns Absolute POSIX path to the resolved file, or null if not found\n */\nexport const resolveRelativeImportWithReferences = <_>({\n filePath,\n specifier,\n references,\n}: {\n filePath: string;\n specifier: string;\n references: Map<string, _> | Set<string>;\n}): string | null => {\n const base = resolve(dirname(filePath), specifier);\n\n // Try exact path first\n if (references.has(base)) {\n return normalizePath(base);\n }\n\n // Try with extensions\n for (const ext of MODULE_EXTENSION_CANDIDATES) {\n const candidate = `${base}${ext}`;\n if (references.has(candidate)) {\n return normalizePath(candidate);\n }\n }\n\n // Try as directory with index files\n for (const ext of MODULE_EXTENSION_CANDIDATES) {\n const candidate = join(base, `index${ext}`);\n if (references.has(candidate)) {\n return normalizePath(candidate);\n }\n }\n\n return null;\n};\n\n/**\n * Check if a module specifier is relative (starts with '.' or '..')\n */\nexport const isRelativeSpecifier = (specifier: string): boolean => specifier.startsWith(\"./\") || specifier.startsWith(\"../\");\n\n/**\n * Check if a module specifier is external (package name, not relative)\n */\nexport const isExternalSpecifier = (specifier: string): boolean => !isRelativeSpecifier(specifier);\n"],"mappings":";;;;;AAAA,MAAa,YAAe,OAAgB;CAC1C,IAAIA,SAA8B;CAElC,MAAM,gBAAgB,WAAW,EAAE,OAAO,IAAI,EAAE,EAAE;AAClD,QAAO,cAAc;AACnB,WAAS;;AAGX,QAAO;;;;;;;;;;ACAT,MAAa,8BAA8B;CAAC;CAAO;CAAQ;CAAQ;CAAQ;CAAO;CAAQ;CAAQ;CAAO;;;;;AAMzG,MAAa,iBAAiB,mCAAoC,MAAM,CAAC,QAAQ,OAAO,IAAI;;;;;;;;;AAU5F,MAAa,2CAA2C,EACtD,UACA,gBAImB;CACnB,MAAM,qDAAuB,SAAS,EAAE,UAAU;AAKlD,MAAK,MAAM,OAAO,6BAA6B;EAC7C,MAAM,YAAY,GAAG,OAAO;AAC5B,8BAAe,UAAU,EAAE;AACzB,UAAO,cAAc,UAAU;;;AAKnC,MAAK,MAAM,OAAO,6BAA6B;EAC7C,MAAM,gCAAiB,MAAM,QAAQ,MAAM;AAC3C,8BAAe,UAAU,EAAE;AACzB,UAAO,cAAc,UAAU;;;AAKnC,6BAAe,KAAK,EAAE;AACpB,MAAI;GACF,MAAM,6BAAgB,KAAK;AAC3B,OAAI,KAAK,QAAQ,EAAE;AACjB,WAAO,cAAc,KAAK;;UAEtB;;AAKV,QAAO;;;;;;;;;;AAWT,MAAa,uCAA0C,EACrD,UACA,WACA,iBAKmB;CACnB,MAAM,qDAAuB,SAAS,EAAE,UAAU;AAGlD,KAAI,WAAW,IAAI,KAAK,EAAE;AACxB,SAAO,cAAc,KAAK;;AAI5B,MAAK,MAAM,OAAO,6BAA6B;EAC7C,MAAM,YAAY,GAAG,OAAO;AAC5B,MAAI,WAAW,IAAI,UAAU,EAAE;AAC7B,UAAO,cAAc,UAAU;;;AAKnC,MAAK,MAAM,OAAO,6BAA6B;EAC7C,MAAM,gCAAiB,MAAM,QAAQ,MAAM;AAC3C,MAAI,WAAW,IAAI,UAAU,EAAE;AAC7B,UAAO,cAAc,UAAU;;;AAInC,QAAO;;;;;AAMT,MAAa,uBAAuB,cAA+B,UAAU,WAAW,KAAK,IAAI,UAAU,WAAW,MAAM;;;;AAK5H,MAAa,uBAAuB,cAA+B,CAAC,oBAAoB,UAAU"}
package/dist/utils.cjs CHANGED
@@ -1,4 +1,4 @@
1
- const require_utils = require('./utils-CmLf7LU5.cjs');
1
+ const require_utils = require('./utils-CsTwS1dw.cjs');
2
2
 
3
3
  exports.MODULE_EXTENSION_CANDIDATES = require_utils.MODULE_EXTENSION_CANDIDATES;
4
4
  exports.cachedFn = require_utils.cachedFn;
@@ -1,4 +1,4 @@
1
- const require_canonical_id = require('./canonical-id-BFcryTw5.cjs');
1
+ const require_canonical_id = require('./canonical-id-CgMNOZyn.cjs');
2
2
  let zod = require("zod");
3
3
 
4
4
  //#region packages/common/src/zod/schema-helper.ts
@@ -13,4 +13,4 @@ Object.defineProperty(exports, 'defineSchemaFor', {
13
13
  return defineSchemaFor;
14
14
  }
15
15
  });
16
- //# sourceMappingURL=zod-CynYgOoN.cjs.map
16
+ //# sourceMappingURL=zod-C_6JfuYV.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"zod-CynYgOoN.cjs","names":["z"],"sources":["../src/zod/schema-helper.ts"],"sourcesContent":["import { z } from \"zod\";\n\n// biome-ignore lint/suspicious/noExplicitAny: abstract type\nexport type SchemaFor<TOutput> = z.ZodType<TOutput, any, any>;\n\nexport type ShapeFor<TOutput extends object> = { [K in keyof TOutput]-?: SchemaFor<TOutput[K]> };\n\nexport function defineSchemaFor<TOutput extends object>() {\n return <TShape extends ShapeFor<NoInfer<TOutput>>>(shape: TShape & { [K in Exclude<keyof TShape, keyof TOutput>]: never }) =>\n z.object(shape).strict();\n}\n"],"mappings":";;;;AAOA,SAAgB,kBAA0C;AACxD,SAAmD,UACjDA,MAAE,OAAO,MAAM,CAAC,QAAQ"}
1
+ {"version":3,"file":"zod-C_6JfuYV.cjs","names":["z"],"sources":["../src/zod/schema-helper.ts"],"sourcesContent":["import { z } from \"zod\";\n\n// biome-ignore lint/suspicious/noExplicitAny: abstract type\nexport type SchemaFor<TOutput> = z.ZodType<TOutput, any, any>;\n\nexport type ShapeFor<TOutput extends object> = { [K in keyof TOutput]-?: SchemaFor<TOutput[K]> };\n\nexport function defineSchemaFor<TOutput extends object>() {\n return <TShape extends ShapeFor<NoInfer<TOutput>>>(shape: TShape & { [K in Exclude<keyof TShape, keyof TOutput>]: never }) =>\n z.object(shape).strict();\n}\n"],"mappings":";;;;AAOA,SAAgB,kBAA0C;AACxD,SAAmD,UACjDA,MAAE,OAAO,MAAM,CAAC,QAAQ"}
package/dist/zod.cjs CHANGED
@@ -1,3 +1,3 @@
1
- const require_zod = require('./zod-CynYgOoN.cjs');
1
+ const require_zod = require('./zod-C_6JfuYV.cjs');
2
2
 
3
3
  exports.defineSchemaFor = require_zod.defineSchemaFor;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@soda-gql/common",
3
- "version": "0.7.0",
3
+ "version": "0.8.2",
4
4
  "description": "Shared utilities for soda-gql packages",
5
5
  "type": "module",
6
6
  "private": false,
@@ -1 +0,0 @@
1
- {"version":3,"file":"canonical-id-BFcryTw5.cjs","names":["CanonicalIdSchema: z.ZodType<CanonicalId>","z","normalizePath","scopeStack: ScopeFrame[]","frame: ScopeFrame","exportBinding: string | undefined"],"sources":["../src/canonical-id/canonical-id.ts","../src/canonical-id/path-tracker.ts"],"sourcesContent":["import { isAbsolute, resolve } from \"node:path\";\nimport z from \"zod\";\nimport { normalizePath } from \"../utils\";\n\nexport type CanonicalId = string & { readonly __brand: \"CanonicalId\" };\n\nconst canonicalIdSeparator = \"::\" as const;\n\nexport const CanonicalIdSchema: z.ZodType<CanonicalId> = z.string() as unknown as z.ZodType<CanonicalId>;\n\n// Type-safe schema for CanonicalId - validates as string but types as branded\nexport const createCanonicalId = (filePath: string, astPath: string): CanonicalId => {\n if (!isAbsolute(filePath)) {\n throw new Error(\"[INTERNAL] CANONICAL_ID_REQUIRES_ABSOLUTE_PATH\");\n }\n\n const resolved = resolve(filePath);\n const normalized = normalizePath(resolved);\n\n // Create a 2-part ID: {absPath}::{astPath}\n // astPath uniquely identifies the definition's location in the AST (e.g., \"MyComponent.useQuery.def\")\n const idParts = [normalized, astPath];\n\n return idParts.join(canonicalIdSeparator) as CanonicalId;\n};\n","/**\n * Canonical path tracker for AST traversal.\n *\n * This module provides a stateful helper that tracks scope information during\n * AST traversal to generate canonical IDs. It's designed to integrate with\n * existing plugin visitor patterns (Babel, SWC, TypeScript) without requiring\n * a separate AST traversal.\n *\n * Usage pattern:\n * 1. Plugin creates tracker at file/program entry\n * 2. Plugin calls enterScope/exitScope during its traversal\n * 3. Plugin calls registerDefinition when discovering GQL definitions\n * 4. Tracker provides canonical ID information\n */\n\nimport type { CanonicalId } from \"./canonical-id\";\nimport { createCanonicalId } from \"./canonical-id\";\n\n/**\n * Scope frame for tracking AST path segments\n */\nexport type ScopeFrame = {\n /** Name segment (e.g., \"MyComponent\", \"useQuery\", \"arrow#1\") */\n readonly nameSegment: string;\n /** Kind of scope */\n readonly kind: \"function\" | \"class\" | \"variable\" | \"property\" | \"method\" | \"expression\";\n /** Occurrence index for disambiguation */\n readonly occurrence: number;\n};\n\n/**\n * Opaque handle for scope tracking\n */\nexport type ScopeHandle = {\n readonly __brand: \"ScopeHandle\";\n readonly depth: number;\n};\n\n/**\n * Canonical path tracker interface\n */\nexport interface CanonicalPathTracker {\n /**\n * Enter a new scope during traversal\n * @param options Scope information\n * @returns Handle to use when exiting the scope\n */\n enterScope(options: { segment: string; kind: ScopeFrame[\"kind\"]; stableKey?: string }): ScopeHandle;\n\n /**\n * Exit a scope during traversal\n * @param handle Handle returned from enterScope\n */\n exitScope(handle: ScopeHandle): void;\n\n /**\n * Register a definition discovered during traversal\n * @returns Definition metadata including astPath and canonical ID information\n */\n registerDefinition(): {\n astPath: string;\n isTopLevel: boolean;\n exportBinding?: string;\n };\n\n /**\n * Resolve a canonical ID from an astPath\n * @param astPath AST path string\n * @returns Canonical ID\n */\n resolveCanonicalId(astPath: string): CanonicalId;\n\n /**\n * Register an export binding\n * @param local Local variable name\n * @param exported Exported name\n */\n registerExportBinding(local: string, exported: string): void;\n\n /**\n * Get current scope depth\n * @returns Current depth (0 = top level)\n */\n currentDepth(): number;\n}\n\n/**\n * Build AST path from scope stack (internal helper)\n */\nconst _buildAstPath = (stack: readonly ScopeFrame[]): string => {\n return stack.map((frame) => frame.nameSegment).join(\".\");\n};\n\n/**\n * Create a canonical path tracker\n *\n * @param options Configuration options\n * @returns Tracker instance\n *\n * @example\n * ```typescript\n * // In a Babel plugin\n * const tracker = createCanonicalTracker({ filePath: state.filename });\n *\n * const visitor = {\n * FunctionDeclaration: {\n * enter(path) {\n * const handle = tracker.enterScope({\n * segment: path.node.id.name,\n * kind: 'function'\n * });\n * },\n * exit(path) {\n * tracker.exitScope(handle);\n * }\n * }\n * };\n * ```\n */\nexport const createCanonicalTracker = (options: {\n filePath: string;\n getExportName?: (localName: string) => string | undefined;\n}): CanonicalPathTracker => {\n const { filePath, getExportName } = options;\n\n // Scope stack\n const scopeStack: ScopeFrame[] = [];\n\n // Occurrence counters for disambiguating duplicate names\n const occurrenceCounters = new Map<string, number>();\n\n // Used paths for ensuring uniqueness\n const usedPaths = new Set<string>();\n\n // Export bindings map\n const exportBindings = new Map<string, string>();\n\n const getNextOccurrence = (key: string): number => {\n const current = occurrenceCounters.get(key) ?? 0;\n occurrenceCounters.set(key, current + 1);\n return current;\n };\n\n const ensureUniquePath = (basePath: string): string => {\n let path = basePath;\n let suffix = 0;\n while (usedPaths.has(path)) {\n suffix++;\n path = `${basePath}$${suffix}`;\n }\n usedPaths.add(path);\n return path;\n };\n\n return {\n enterScope({ segment, kind, stableKey }): ScopeHandle {\n const key = stableKey ?? `${kind}:${segment}`;\n const occurrence = getNextOccurrence(key);\n\n const frame: ScopeFrame = {\n nameSegment: segment,\n kind,\n occurrence,\n };\n\n scopeStack.push(frame);\n\n return {\n __brand: \"ScopeHandle\",\n depth: scopeStack.length - 1,\n } as ScopeHandle;\n },\n\n exitScope(handle: ScopeHandle): void {\n // Validate handle depth matches current stack\n if (handle.depth !== scopeStack.length - 1) {\n throw new Error(`[INTERNAL] Invalid scope exit: expected depth ${scopeStack.length - 1}, got ${handle.depth}`);\n }\n scopeStack.pop();\n },\n\n registerDefinition(): {\n astPath: string;\n isTopLevel: boolean;\n exportBinding?: string;\n } {\n const basePath = _buildAstPath(scopeStack);\n const astPath = ensureUniquePath(basePath);\n const isTopLevel = scopeStack.length === 0;\n\n // Check export binding if provided\n let exportBinding: string | undefined;\n if (getExportName && isTopLevel) {\n // For top-level definitions, try to get export name\n // This is a simplified version - real logic depends on how the definition is bound\n exportBinding = undefined;\n }\n\n return {\n astPath,\n isTopLevel,\n exportBinding,\n };\n },\n\n resolveCanonicalId(astPath: string): CanonicalId {\n return createCanonicalId(filePath, astPath);\n },\n\n registerExportBinding(local: string, exported: string): void {\n exportBindings.set(local, exported);\n },\n\n currentDepth(): number {\n return scopeStack.length;\n },\n };\n};\n\n/**\n * Helper to create occurrence tracker (for backward compatibility)\n */\nexport const createOccurrenceTracker = (): {\n getNextOccurrence: (key: string) => number;\n} => {\n const occurrenceCounters = new Map<string, number>();\n\n return {\n getNextOccurrence(key: string): number {\n const current = occurrenceCounters.get(key) ?? 0;\n occurrenceCounters.set(key, current + 1);\n return current;\n },\n };\n};\n\n/**\n * Helper to create path tracker (for backward compatibility)\n */\nexport const createPathTracker = (): {\n ensureUniquePath: (basePath: string) => string;\n} => {\n const usedPaths = new Set<string>();\n\n return {\n ensureUniquePath(basePath: string): string {\n let path = basePath;\n let suffix = 0;\n while (usedPaths.has(path)) {\n suffix++;\n path = `${basePath}$${suffix}`;\n }\n usedPaths.add(path);\n return path;\n },\n };\n};\n\n/**\n * Build AST path from scope stack (for backward compatibility)\n */\nexport const buildAstPath = (stack: readonly ScopeFrame[]): string => {\n return stack.map((frame) => frame.nameSegment).join(\".\");\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAMA,MAAM,uBAAuB;AAE7B,MAAaA,oBAA4CC,YAAE,QAAQ;AAGnE,MAAa,qBAAqB,UAAkB,YAAiC;AACnF,KAAI,2BAAY,SAAS,EAAE;AACzB,QAAM,IAAI,MAAM,iDAAiD;;CAGnE,MAAM,kCAAmB,SAAS;CAClC,MAAM,aAAaC,4BAAc,SAAS;CAI1C,MAAM,UAAU,CAAC,YAAY,QAAQ;AAErC,QAAO,QAAQ,KAAK,qBAAqB;;;;;;;;ACkE3C,MAAM,iBAAiB,UAAyC;AAC9D,QAAO,MAAM,KAAK,UAAU,MAAM,YAAY,CAAC,KAAK,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6B1D,MAAa,0BAA0B,YAGX;CAC1B,MAAM,EAAE,UAAU,kBAAkB;CAGpC,MAAMC,aAA2B,EAAE;CAGnC,MAAM,qBAAqB,IAAI,KAAqB;CAGpD,MAAM,YAAY,IAAI,KAAa;CAGnC,MAAM,iBAAiB,IAAI,KAAqB;CAEhD,MAAM,qBAAqB,QAAwB;EACjD,MAAM,UAAU,mBAAmB,IAAI,IAAI,IAAI;AAC/C,qBAAmB,IAAI,KAAK,UAAU,EAAE;AACxC,SAAO;;CAGT,MAAM,oBAAoB,aAA6B;EACrD,IAAI,OAAO;EACX,IAAI,SAAS;AACb,SAAO,UAAU,IAAI,KAAK,EAAE;AAC1B;AACA,UAAO,GAAG,SAAS,GAAG;;AAExB,YAAU,IAAI,KAAK;AACnB,SAAO;;AAGT,QAAO;EACL,WAAW,EAAE,SAAS,MAAM,aAA0B;GACpD,MAAM,MAAM,aAAa,GAAG,KAAK,GAAG;GACpC,MAAM,aAAa,kBAAkB,IAAI;GAEzC,MAAMC,QAAoB;IACxB,aAAa;IACb;IACA;IACD;AAED,cAAW,KAAK,MAAM;AAEtB,UAAO;IACL,SAAS;IACT,OAAO,WAAW,SAAS;IAC5B;;EAGH,UAAU,QAA2B;AAEnC,OAAI,OAAO,UAAU,WAAW,SAAS,GAAG;AAC1C,UAAM,IAAI,MAAM,iDAAiD,WAAW,SAAS,EAAE,QAAQ,OAAO,QAAQ;;AAEhH,cAAW,KAAK;;EAGlB,qBAIE;GACA,MAAM,WAAW,cAAc,WAAW;GAC1C,MAAM,UAAU,iBAAiB,SAAS;GAC1C,MAAM,aAAa,WAAW,WAAW;GAGzC,IAAIC;AACJ,OAAI,iBAAiB,YAAY;AAG/B,oBAAgB;;AAGlB,UAAO;IACL;IACA;IACA;IACD;;EAGH,mBAAmB,SAA8B;AAC/C,UAAO,kBAAkB,UAAU,QAAQ;;EAG7C,sBAAsB,OAAe,UAAwB;AAC3D,kBAAe,IAAI,OAAO,SAAS;;EAGrC,eAAuB;AACrB,UAAO,WAAW;;EAErB;;;;;AAMH,MAAa,gCAER;CACH,MAAM,qBAAqB,IAAI,KAAqB;AAEpD,QAAO,EACL,kBAAkB,KAAqB;EACrC,MAAM,UAAU,mBAAmB,IAAI,IAAI,IAAI;AAC/C,qBAAmB,IAAI,KAAK,UAAU,EAAE;AACxC,SAAO;IAEV;;;;;AAMH,MAAa,0BAER;CACH,MAAM,YAAY,IAAI,KAAa;AAEnC,QAAO,EACL,iBAAiB,UAA0B;EACzC,IAAI,OAAO;EACX,IAAI,SAAS;AACb,SAAO,UAAU,IAAI,KAAK,EAAE;AAC1B;AACA,UAAO,GAAG,SAAS,GAAG;;AAExB,YAAU,IAAI,KAAK;AACnB,SAAO;IAEV;;;;;AAMH,MAAa,gBAAgB,UAAyC;AACpE,QAAO,MAAM,KAAK,UAAU,MAAM,YAAY,CAAC,KAAK,IAAI"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"canonical-id-BFnyQGST.mjs","names":["CanonicalIdSchema: z.ZodType<CanonicalId>","z","scopeStack: ScopeFrame[]","frame: ScopeFrame","exportBinding: string | undefined"],"sources":["../src/canonical-id/canonical-id.ts","../src/canonical-id/path-tracker.ts"],"sourcesContent":["import { isAbsolute, resolve } from \"node:path\";\nimport z from \"zod\";\nimport { normalizePath } from \"../utils\";\n\nexport type CanonicalId = string & { readonly __brand: \"CanonicalId\" };\n\nconst canonicalIdSeparator = \"::\" as const;\n\nexport const CanonicalIdSchema: z.ZodType<CanonicalId> = z.string() as unknown as z.ZodType<CanonicalId>;\n\n// Type-safe schema for CanonicalId - validates as string but types as branded\nexport const createCanonicalId = (filePath: string, astPath: string): CanonicalId => {\n if (!isAbsolute(filePath)) {\n throw new Error(\"[INTERNAL] CANONICAL_ID_REQUIRES_ABSOLUTE_PATH\");\n }\n\n const resolved = resolve(filePath);\n const normalized = normalizePath(resolved);\n\n // Create a 2-part ID: {absPath}::{astPath}\n // astPath uniquely identifies the definition's location in the AST (e.g., \"MyComponent.useQuery.def\")\n const idParts = [normalized, astPath];\n\n return idParts.join(canonicalIdSeparator) as CanonicalId;\n};\n","/**\n * Canonical path tracker for AST traversal.\n *\n * This module provides a stateful helper that tracks scope information during\n * AST traversal to generate canonical IDs. It's designed to integrate with\n * existing plugin visitor patterns (Babel, SWC, TypeScript) without requiring\n * a separate AST traversal.\n *\n * Usage pattern:\n * 1. Plugin creates tracker at file/program entry\n * 2. Plugin calls enterScope/exitScope during its traversal\n * 3. Plugin calls registerDefinition when discovering GQL definitions\n * 4. Tracker provides canonical ID information\n */\n\nimport type { CanonicalId } from \"./canonical-id\";\nimport { createCanonicalId } from \"./canonical-id\";\n\n/**\n * Scope frame for tracking AST path segments\n */\nexport type ScopeFrame = {\n /** Name segment (e.g., \"MyComponent\", \"useQuery\", \"arrow#1\") */\n readonly nameSegment: string;\n /** Kind of scope */\n readonly kind: \"function\" | \"class\" | \"variable\" | \"property\" | \"method\" | \"expression\";\n /** Occurrence index for disambiguation */\n readonly occurrence: number;\n};\n\n/**\n * Opaque handle for scope tracking\n */\nexport type ScopeHandle = {\n readonly __brand: \"ScopeHandle\";\n readonly depth: number;\n};\n\n/**\n * Canonical path tracker interface\n */\nexport interface CanonicalPathTracker {\n /**\n * Enter a new scope during traversal\n * @param options Scope information\n * @returns Handle to use when exiting the scope\n */\n enterScope(options: { segment: string; kind: ScopeFrame[\"kind\"]; stableKey?: string }): ScopeHandle;\n\n /**\n * Exit a scope during traversal\n * @param handle Handle returned from enterScope\n */\n exitScope(handle: ScopeHandle): void;\n\n /**\n * Register a definition discovered during traversal\n * @returns Definition metadata including astPath and canonical ID information\n */\n registerDefinition(): {\n astPath: string;\n isTopLevel: boolean;\n exportBinding?: string;\n };\n\n /**\n * Resolve a canonical ID from an astPath\n * @param astPath AST path string\n * @returns Canonical ID\n */\n resolveCanonicalId(astPath: string): CanonicalId;\n\n /**\n * Register an export binding\n * @param local Local variable name\n * @param exported Exported name\n */\n registerExportBinding(local: string, exported: string): void;\n\n /**\n * Get current scope depth\n * @returns Current depth (0 = top level)\n */\n currentDepth(): number;\n}\n\n/**\n * Build AST path from scope stack (internal helper)\n */\nconst _buildAstPath = (stack: readonly ScopeFrame[]): string => {\n return stack.map((frame) => frame.nameSegment).join(\".\");\n};\n\n/**\n * Create a canonical path tracker\n *\n * @param options Configuration options\n * @returns Tracker instance\n *\n * @example\n * ```typescript\n * // In a Babel plugin\n * const tracker = createCanonicalTracker({ filePath: state.filename });\n *\n * const visitor = {\n * FunctionDeclaration: {\n * enter(path) {\n * const handle = tracker.enterScope({\n * segment: path.node.id.name,\n * kind: 'function'\n * });\n * },\n * exit(path) {\n * tracker.exitScope(handle);\n * }\n * }\n * };\n * ```\n */\nexport const createCanonicalTracker = (options: {\n filePath: string;\n getExportName?: (localName: string) => string | undefined;\n}): CanonicalPathTracker => {\n const { filePath, getExportName } = options;\n\n // Scope stack\n const scopeStack: ScopeFrame[] = [];\n\n // Occurrence counters for disambiguating duplicate names\n const occurrenceCounters = new Map<string, number>();\n\n // Used paths for ensuring uniqueness\n const usedPaths = new Set<string>();\n\n // Export bindings map\n const exportBindings = new Map<string, string>();\n\n const getNextOccurrence = (key: string): number => {\n const current = occurrenceCounters.get(key) ?? 0;\n occurrenceCounters.set(key, current + 1);\n return current;\n };\n\n const ensureUniquePath = (basePath: string): string => {\n let path = basePath;\n let suffix = 0;\n while (usedPaths.has(path)) {\n suffix++;\n path = `${basePath}$${suffix}`;\n }\n usedPaths.add(path);\n return path;\n };\n\n return {\n enterScope({ segment, kind, stableKey }): ScopeHandle {\n const key = stableKey ?? `${kind}:${segment}`;\n const occurrence = getNextOccurrence(key);\n\n const frame: ScopeFrame = {\n nameSegment: segment,\n kind,\n occurrence,\n };\n\n scopeStack.push(frame);\n\n return {\n __brand: \"ScopeHandle\",\n depth: scopeStack.length - 1,\n } as ScopeHandle;\n },\n\n exitScope(handle: ScopeHandle): void {\n // Validate handle depth matches current stack\n if (handle.depth !== scopeStack.length - 1) {\n throw new Error(`[INTERNAL] Invalid scope exit: expected depth ${scopeStack.length - 1}, got ${handle.depth}`);\n }\n scopeStack.pop();\n },\n\n registerDefinition(): {\n astPath: string;\n isTopLevel: boolean;\n exportBinding?: string;\n } {\n const basePath = _buildAstPath(scopeStack);\n const astPath = ensureUniquePath(basePath);\n const isTopLevel = scopeStack.length === 0;\n\n // Check export binding if provided\n let exportBinding: string | undefined;\n if (getExportName && isTopLevel) {\n // For top-level definitions, try to get export name\n // This is a simplified version - real logic depends on how the definition is bound\n exportBinding = undefined;\n }\n\n return {\n astPath,\n isTopLevel,\n exportBinding,\n };\n },\n\n resolveCanonicalId(astPath: string): CanonicalId {\n return createCanonicalId(filePath, astPath);\n },\n\n registerExportBinding(local: string, exported: string): void {\n exportBindings.set(local, exported);\n },\n\n currentDepth(): number {\n return scopeStack.length;\n },\n };\n};\n\n/**\n * Helper to create occurrence tracker (for backward compatibility)\n */\nexport const createOccurrenceTracker = (): {\n getNextOccurrence: (key: string) => number;\n} => {\n const occurrenceCounters = new Map<string, number>();\n\n return {\n getNextOccurrence(key: string): number {\n const current = occurrenceCounters.get(key) ?? 0;\n occurrenceCounters.set(key, current + 1);\n return current;\n },\n };\n};\n\n/**\n * Helper to create path tracker (for backward compatibility)\n */\nexport const createPathTracker = (): {\n ensureUniquePath: (basePath: string) => string;\n} => {\n const usedPaths = new Set<string>();\n\n return {\n ensureUniquePath(basePath: string): string {\n let path = basePath;\n let suffix = 0;\n while (usedPaths.has(path)) {\n suffix++;\n path = `${basePath}$${suffix}`;\n }\n usedPaths.add(path);\n return path;\n },\n };\n};\n\n/**\n * Build AST path from scope stack (for backward compatibility)\n */\nexport const buildAstPath = (stack: readonly ScopeFrame[]): string => {\n return stack.map((frame) => frame.nameSegment).join(\".\");\n};\n"],"mappings":";;;;;AAMA,MAAM,uBAAuB;AAE7B,MAAaA,oBAA4CC,IAAE,QAAQ;AAGnE,MAAa,qBAAqB,UAAkB,YAAiC;AACnF,KAAI,CAAC,WAAW,SAAS,EAAE;AACzB,QAAM,IAAI,MAAM,iDAAiD;;CAGnE,MAAM,WAAW,QAAQ,SAAS;CAClC,MAAM,aAAa,cAAc,SAAS;CAI1C,MAAM,UAAU,CAAC,YAAY,QAAQ;AAErC,QAAO,QAAQ,KAAK,qBAAqB;;;;;;;;ACkE3C,MAAM,iBAAiB,UAAyC;AAC9D,QAAO,MAAM,KAAK,UAAU,MAAM,YAAY,CAAC,KAAK,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6B1D,MAAa,0BAA0B,YAGX;CAC1B,MAAM,EAAE,UAAU,kBAAkB;CAGpC,MAAMC,aAA2B,EAAE;CAGnC,MAAM,qBAAqB,IAAI,KAAqB;CAGpD,MAAM,YAAY,IAAI,KAAa;CAGnC,MAAM,iBAAiB,IAAI,KAAqB;CAEhD,MAAM,qBAAqB,QAAwB;EACjD,MAAM,UAAU,mBAAmB,IAAI,IAAI,IAAI;AAC/C,qBAAmB,IAAI,KAAK,UAAU,EAAE;AACxC,SAAO;;CAGT,MAAM,oBAAoB,aAA6B;EACrD,IAAI,OAAO;EACX,IAAI,SAAS;AACb,SAAO,UAAU,IAAI,KAAK,EAAE;AAC1B;AACA,UAAO,GAAG,SAAS,GAAG;;AAExB,YAAU,IAAI,KAAK;AACnB,SAAO;;AAGT,QAAO;EACL,WAAW,EAAE,SAAS,MAAM,aAA0B;GACpD,MAAM,MAAM,aAAa,GAAG,KAAK,GAAG;GACpC,MAAM,aAAa,kBAAkB,IAAI;GAEzC,MAAMC,QAAoB;IACxB,aAAa;IACb;IACA;IACD;AAED,cAAW,KAAK,MAAM;AAEtB,UAAO;IACL,SAAS;IACT,OAAO,WAAW,SAAS;IAC5B;;EAGH,UAAU,QAA2B;AAEnC,OAAI,OAAO,UAAU,WAAW,SAAS,GAAG;AAC1C,UAAM,IAAI,MAAM,iDAAiD,WAAW,SAAS,EAAE,QAAQ,OAAO,QAAQ;;AAEhH,cAAW,KAAK;;EAGlB,qBAIE;GACA,MAAM,WAAW,cAAc,WAAW;GAC1C,MAAM,UAAU,iBAAiB,SAAS;GAC1C,MAAM,aAAa,WAAW,WAAW;GAGzC,IAAIC;AACJ,OAAI,iBAAiB,YAAY;AAG/B,oBAAgB;;AAGlB,UAAO;IACL;IACA;IACA;IACD;;EAGH,mBAAmB,SAA8B;AAC/C,UAAO,kBAAkB,UAAU,QAAQ;;EAG7C,sBAAsB,OAAe,UAAwB;AAC3D,kBAAe,IAAI,OAAO,SAAS;;EAGrC,eAAuB;AACrB,UAAO,WAAW;;EAErB;;;;;AAMH,MAAa,gCAER;CACH,MAAM,qBAAqB,IAAI,KAAqB;AAEpD,QAAO,EACL,kBAAkB,KAAqB;EACrC,MAAM,UAAU,mBAAmB,IAAI,IAAI,IAAI;AAC/C,qBAAmB,IAAI,KAAK,UAAU,EAAE;AACxC,SAAO;IAEV;;;;;AAMH,MAAa,0BAER;CACH,MAAM,YAAY,IAAI,KAAa;AAEnC,QAAO,EACL,iBAAiB,UAA0B;EACzC,IAAI,OAAO;EACX,IAAI,SAAS;AACb,SAAO,UAAU,IAAI,KAAK,EAAE;AAC1B;AACA,UAAO,GAAG,SAAS,GAAG;;AAExB,YAAU,IAAI,KAAK;AACnB,SAAO;IAEV;;;;;AAMH,MAAa,gBAAgB,UAAyC;AACpE,QAAO,MAAM,KAAK,UAAU,MAAM,YAAY,CAAC,KAAK,IAAI"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"index-BG7Aiges.d.cts","names":[],"sources":["../src/canonical-id/canonical-id.ts","../src/canonical-id/path-tracker.ts"],"sourcesContent":[],"mappings":";;;KAIY,WAAA;;AAAZ,CAAA;AAIa,cAAA,iBAA6B,EAAV,GAAA,CAAE,OAAF,CAAU,WAAD,CAAA;AAG5B,cAAA,iBAAyD,EAAA,CAAA,QAarE,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,EAAA,GAbqE,WAarE;;;;;;AC+FD;AAuGa,KAzMD,UAAA,GAyMC;EAiBA;EAsBA,SAAA,WAEZ,EAAA,MAF4C;;;;;;;;;KApOjC,WAAA;;;;;;;UAQK,oBAAA;;;;;;;;UAM8B;;MAA2C;;;;;oBAMtE;;;;;;;;;;;;;;;uCAiBmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAiD1B;;;MAGT;;;;cAoGS;;;;;;cAiBA;;;;;;cAsBA,+BAAgC"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"index-BedBpKbv.d.mts","names":[],"sources":["../src/portable/fs.ts","../src/portable/hash.ts","../src/portable/id.ts","../src/portable/runtime.ts","../src/portable/spawn.ts"],"sourcesContent":[],"mappings":";;AAMA;;AAE4C,UAF3B,UAAA,CAE2B;EACpB,QAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAFE,OAEF,CAAA,MAAA,CAAA;EACe,SAAA,CAAA,IAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,CAAA,EAFK,OAEL,CAAA,IAAA,CAAA;EAAjB,MAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EADE,OACF,CAAA,OAAA,CAAA;EACsB,IAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EADtB,OACsB,CAAA;IACc,KAAA,EAFnB,IAEmB;IAAO,IAAA,EAAA,MAAA;EAsBjD,CAAA,CAAA;EA4FA,MAAA,CAAA,OAAA,EAAa,MAAA,EAAA,OAAI,EAAA,MAAU,CAAA,EAnHC,OAmHD,CAAA,IAAA,CAAA;EAW3B,KAAA,CAAA,IAAA,EAAA,MAAA,EAAA,QAAyB,EAAA;;MA7HiB;;ACN9C,iBD4BI,gBAAA,CAAA,CC5BS,ED4BW,UC5BX;AAER,iBDsHD,aAAA,CAAA,CCrHoB,EDqHH,UCrHgB;AAUjD;AAqCA;AAWA;;iBDsEgB,yBAAA,CAAA;;;;AAnIhB;;AAE4C,KCFhC,aAAA,GDEgC,QAAA,GAAA,QAAA;AACpB,UCDP,cAAA,CDCO;EACe,IAAA,CAAA,OAAA,EAAA,MAAA,EAAA,SAAA,CAAA,ECDH,aDCG,CAAA,EAAA,MAAA;;AACK,iBCQ5B,oBAAA,CAAA,CDR4B,ECQJ,cDRI;AACc,iBC4C1C,iBAAA,CAAA,CD5C0C,EC4CrB,cD5CqB;;AAsB1D;AA4FA;AAWA;iBCtEgB,6BAAA,CAAA;;;;AD7DhB;;;;;;AAK4C,iBED5B,UAAA,CAAA,CFC4B,EAAA,MAAA;;;;AAL5C;;AAE4C,cGJ/B,OHI+B,EAAA;EACpB,SAAA,KAAA,EAAA,OAAA;EACe,SAAA,MAAA,EAAA,OAAA;EAAjB,SAAA,iBAAA,EAAA,OAAA;CACsB;;;AAuB5C;AA4FgB,iBGjHA,IHiHa,CAAA,CAAA,CAAA,CAAI,EAAA,EAAA,GAAA,GGjHC,CHiHS,CAAA,EAAA,GAAA,GGjHC,CHiHD;AAW3C;;;;ACnIY,iBEwBI,qBAAA,CAAA,CFxBS,EAAA,IAAA;;;;ADAzB;;AAE4C,UIF3B,YAAA,CJE2B;EACpB,GAAA,EAAA,MAAA,EAAA;EACe,GAAA,CAAA,EAAA,MAAA;EAAjB,GAAA,CAAA,EIDd,MJCc,CAAA,MAAA,EAAA,MAAA,CAAA;;AAEoC,UIAzC,WAAA,CJAyC;EAAO,MAAA,EAAA,MAAA;EAsBjD,MAAA,EAAA,MAAA;EA4FA,QAAA,EAAA,MAAa;AAW7B;iBIvHsB,KAAA,UAAe,eAAe,QAAQ"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"index-C4t2Wbzs.d.mts","names":[],"sources":["../src/canonical-id/canonical-id.ts","../src/canonical-id/path-tracker.ts"],"sourcesContent":[],"mappings":";;;KAIY,WAAA;;AAAZ,CAAA;AAIa,cAAA,iBAA6B,EAAV,GAAA,CAAE,OAAF,CAAU,WAAD,CAAA;AAG5B,cAAA,iBAAyD,EAAA,CAAA,QAarE,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,EAAA,GAbqE,WAarE;;;;;;AC+FD;AAuGa,KAzMD,UAAA,GAyMC;EAiBA;EAsBA,SAAA,WAEZ,EAAA,MAF4C;;;;;;;;;KApOjC,WAAA;;;;;;;UAQK,oBAAA;;;;;;;;UAM8B;;MAA2C;;;;;oBAMtE;;;;;;;;;;;;;;;uCAiBmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cAiD1B;;;MAGT;;;;cAoGS;;;;;;cAiBA;;;;;;cAsBA,+BAAgC"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"index-DaAp2rNj.d.cts","names":[],"sources":["../src/portable/fs.ts","../src/portable/hash.ts","../src/portable/id.ts","../src/portable/runtime.ts","../src/portable/spawn.ts"],"sourcesContent":[],"mappings":";;AAMA;;AAE4C,UAF3B,UAAA,CAE2B;EACpB,QAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EAFE,OAEF,CAAA,MAAA,CAAA;EACe,SAAA,CAAA,IAAA,EAAA,MAAA,EAAA,OAAA,EAAA,MAAA,CAAA,EAFK,OAEL,CAAA,IAAA,CAAA;EAAjB,MAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EADE,OACF,CAAA,OAAA,CAAA;EACsB,IAAA,CAAA,IAAA,EAAA,MAAA,CAAA,EADtB,OACsB,CAAA;IACc,KAAA,EAFnB,IAEmB;IAAO,IAAA,EAAA,MAAA;EAsBjD,CAAA,CAAA;EA4FA,MAAA,CAAA,OAAA,EAAa,MAAA,EAAA,OAAI,EAAA,MAAU,CAAA,EAnHC,OAmHD,CAAA,IAAA,CAAA;EAW3B,KAAA,CAAA,IAAA,EAAA,MAAA,EAAA,QAAyB,EAAA;;MA7HiB;;ACN9C,iBD4BI,gBAAA,CAAA,CC5BS,ED4BW,UC5BX;AAER,iBDsHD,aAAA,CAAA,CCrHoB,EDqHH,UCrHgB;AAUjD;AAqCA;AAWA;;iBDsEgB,yBAAA,CAAA;;;;AAnIhB;;AAE4C,KCFhC,aAAA,GDEgC,QAAA,GAAA,QAAA;AACpB,UCDP,cAAA,CDCO;EACe,IAAA,CAAA,OAAA,EAAA,MAAA,EAAA,SAAA,CAAA,ECDH,aDCG,CAAA,EAAA,MAAA;;AACK,iBCQ5B,oBAAA,CAAA,CDR4B,ECQJ,cDRI;AACc,iBC4C1C,iBAAA,CAAA,CD5C0C,EC4CrB,cD5CqB;;AAsB1D;AA4FA;AAWA;iBCtEgB,6BAAA,CAAA;;;;AD7DhB;;;;;;AAK4C,iBED5B,UAAA,CAAA,CFC4B,EAAA,MAAA;;;;AAL5C;;AAE4C,cGJ/B,OHI+B,EAAA;EACpB,SAAA,KAAA,EAAA,OAAA;EACe,SAAA,MAAA,EAAA,OAAA;EAAjB,SAAA,iBAAA,EAAA,OAAA;CACsB;;;AAuB5C;AA4FgB,iBGjHA,IHiHa,CAAA,CAAA,CAAI,CAAA,EAAA,EAAA,GAAA,GGjHC,CHiHS,CAAA,EAAA,GAAA,GGjHC,CHiHD;AAW3C;;;;ACnIY,iBEwBI,qBAAA,CAAA,CFxBS,EAAA,IAAA;;;;ADAzB;;AAE4C,UIF3B,YAAA,CJE2B;EACpB,GAAA,EAAA,MAAA,EAAA;EACe,GAAA,CAAA,EAAA,MAAA;EAAjB,GAAA,CAAA,EIDd,MJCc,CAAA,MAAA,EAAA,MAAA,CAAA;;AAEoC,UIAzC,WAAA,CJAyC;EAAO,MAAA,EAAA,MAAA;EAsBjD,MAAA,EAAA,MAAA;EA4FA,QAAA,EAAA,MAAa;AAW7B;iBIvHsB,KAAA,UAAe,eAAe,QAAQ"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"portable-C_7gJWmz.cjs","names":["result: T | undefined","fsInstance: PortableFS | null","crypto","hasherInstance: PortableHasher | null","crypto","execOptions: {\n cwd?: string;\n env?: NodeJS.ProcessEnv;\n encoding: BufferEncoding;\n }","error: unknown"],"sources":["../src/portable/runtime.ts","../src/portable/fs.ts","../src/portable/hash.ts","../src/portable/id.ts","../src/portable/spawn.ts"],"sourcesContent":["/**\n * Runtime detection utilities for portable API implementation\n */\n\nexport const runtime = {\n isBun: typeof Bun !== \"undefined\",\n isNode: typeof process !== \"undefined\" && typeof Bun === \"undefined\",\n supportsWebCrypto: typeof crypto !== \"undefined\" && typeof crypto.subtle !== \"undefined\",\n} as const;\n\n/**\n * Helper to cache module imports to avoid repeated dynamic imports\n */\nexport function once<T>(fn: () => T): () => T {\n let result: T | undefined;\n let called = false;\n\n return () => {\n if (!called) {\n result = fn();\n called = true;\n }\n return result as T;\n };\n}\n\n/**\n * Reset runtime state for testing purposes only\n * @internal\n */\nexport function resetPortableForTests(): void {\n // This is a marker function that portable modules can use\n // to reset their singleton state in tests\n}\n","/**\n * Portable filesystem API that works on both Bun and Node.js\n */\n\nimport { once, runtime } from \"./runtime\";\n\nexport interface PortableFS {\n readFile(path: string): Promise<string>;\n writeFile(path: string, content: string): Promise<void>;\n exists(path: string): Promise<boolean>;\n stat(path: string): Promise<{ mtime: Date; size: number }>;\n rename(oldPath: string, newPath: string): Promise<void>;\n mkdir(path: string, options?: { recursive?: boolean }): Promise<void>;\n}\n\ninterface FSPromises {\n readFile: (path: string, encoding: string) => Promise<string>;\n writeFile: (path: string, content: string, encoding: string) => Promise<void>;\n access: (path: string) => Promise<void>;\n stat: (path: string) => Promise<{\n mtime: Date;\n size: number;\n isDirectory: () => boolean;\n }>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n mkdir: (path: string, options?: { recursive?: boolean }) => Promise<void>;\n}\n\n// Cache the fs/promises import\nconst getNodeFS = once(async (): Promise<FSPromises> => {\n const fs = await import(\"node:fs/promises\");\n return fs as FSPromises;\n});\n\nexport function createPortableFS(): PortableFS {\n if (runtime.isBun) {\n return {\n async readFile(path) {\n const file = Bun.file(path);\n return await file.text();\n },\n\n async writeFile(path, content) {\n // Bun.write auto-creates parent directories\n await Bun.write(path, content);\n },\n\n async exists(path) {\n // Bun.file().exists() only works for files, use fs.stat for both files and dirs\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.stat(path);\n return true;\n } catch {\n return false;\n }\n },\n\n async stat(path) {\n const file = Bun.file(path);\n const size = file.size;\n // Bun doesn't expose mtime directly, use Node fs.stat\n const nodeFS = await getNodeFS();\n const { mtime } = await nodeFS.stat(path);\n return { mtime, size };\n },\n\n async rename(oldPath, newPath) {\n const nodeFS = await getNodeFS();\n await nodeFS.rename(oldPath, newPath);\n },\n\n async mkdir(path, options) {\n const nodeFS = await getNodeFS();\n await nodeFS.mkdir(path, options);\n },\n };\n }\n\n // Node.js implementation\n return {\n async readFile(path) {\n const nodeFS = await getNodeFS();\n return await nodeFS.readFile(path, \"utf-8\");\n },\n\n async writeFile(path, content) {\n const nodeFS = await getNodeFS();\n // Auto-create parent directories like Bun.write does\n const pathModule = await import(\"node:path\");\n const dir = pathModule.dirname(path);\n await nodeFS.mkdir(dir, { recursive: true });\n await nodeFS.writeFile(path, content, \"utf-8\");\n },\n\n async exists(path) {\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.access(path);\n return true;\n } catch {\n return false;\n }\n },\n\n async stat(path) {\n const nodeFS = await getNodeFS();\n const stats = await nodeFS.stat(path);\n return { mtime: stats.mtime, size: stats.size };\n },\n\n async rename(oldPath, newPath) {\n const nodeFS = await getNodeFS();\n await nodeFS.rename(oldPath, newPath);\n },\n\n async mkdir(path, options) {\n const nodeFS = await getNodeFS();\n await nodeFS.mkdir(path, options);\n },\n };\n}\n\n// Singleton to avoid recreating instances\nlet fsInstance: PortableFS | null = null;\n\nexport function getPortableFS(): PortableFS {\n if (!fsInstance) {\n fsInstance = createPortableFS();\n }\n return fsInstance;\n}\n\n/**\n * Reset the filesystem singleton for testing\n * @internal\n */\nexport function __resetPortableFSForTests(): void {\n fsInstance = null;\n}\n","/**\n * Portable hashing API that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\nexport type HashAlgorithm = \"sha256\" | \"xxhash\";\n\nexport interface PortableHasher {\n hash(content: string, algorithm?: HashAlgorithm): string;\n}\n\n/**\n * Pads a hex string to the specified length\n */\nfunction padHex(hex: string, length: number): string {\n return hex.padStart(length, \"0\");\n}\n\nexport function createPortableHasher(): PortableHasher {\n if (runtime.isBun) {\n return {\n hash(content, algorithm = \"xxhash\") {\n if (algorithm === \"sha256\") {\n const hasher = new Bun.CryptoHasher(\"sha256\");\n hasher.update(content);\n return hasher.digest(\"hex\");\n }\n // xxhash - Bun.hash returns a number\n const hashNum = Bun.hash(content);\n // Convert to hex and pad to 16 chars for consistency\n return padHex(hashNum.toString(16), 16);\n },\n };\n }\n\n // Node.js implementation\n return {\n hash(content, algorithm = \"xxhash\") {\n if (algorithm === \"sha256\") {\n const crypto = require(\"node:crypto\");\n return crypto.createHash(\"sha256\").update(content).digest(\"hex\");\n }\n // xxhash fallback: use sha256 for now (can add xxhash package later if needed)\n // This ensures consistent behavior across runtimes\n const crypto = require(\"node:crypto\");\n const sha256Hash = crypto.createHash(\"sha256\").update(content).digest(\"hex\");\n // Take first 16 chars to match xxhash output length\n return sha256Hash.substring(0, 16);\n },\n };\n}\n\n// Singleton to avoid recreating instances\nlet hasherInstance: PortableHasher | null = null;\n\nexport function getPortableHasher(): PortableHasher {\n if (!hasherInstance) {\n hasherInstance = createPortableHasher();\n }\n return hasherInstance;\n}\n\n/**\n * Reset the hasher singleton for testing\n * @internal\n */\nexport function __resetPortableHasherForTests(): void {\n hasherInstance = null;\n}\n","/**\n * Portable ID generation that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\n/**\n * Generate a unique ID\n * Uses UUIDv7 on Bun (monotonic), falls back to randomUUID on Node.js\n */\nexport function generateId(): string {\n if (runtime.isBun && typeof Bun !== \"undefined\" && typeof Bun.randomUUIDv7 === \"function\") {\n return Bun.randomUUIDv7();\n }\n\n // Node.js fallback: use crypto.randomUUID\n const crypto = require(\"node:crypto\");\n return crypto.randomUUID();\n}\n","/**\n * Portable subprocess spawning that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\nexport interface SpawnOptions {\n cmd: string[];\n cwd?: string;\n env?: Record<string, string>;\n}\n\nexport interface SpawnResult {\n stdout: string;\n stderr: string;\n exitCode: number;\n}\n\nexport async function spawn(options: SpawnOptions): Promise<SpawnResult> {\n if (runtime.isBun) {\n const proc = Bun.spawn(options.cmd, {\n cwd: options.cwd,\n env: options.env,\n stdout: \"pipe\",\n stderr: \"pipe\",\n });\n\n const [stdout, stderr] = await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]);\n\n const exitCode = await proc.exited;\n\n return { stdout, stderr, exitCode };\n }\n\n // Node.js implementation\n const { execFile } = await import(\"node:child_process\");\n const { promisify } = await import(\"node:util\");\n const execFilePromise = promisify(execFile);\n\n const [command, ...args] = options.cmd;\n if (!command) {\n return {\n stdout: \"\",\n stderr: \"Error: No command provided\",\n exitCode: 1,\n };\n }\n\n try {\n const execOptions: {\n cwd?: string;\n env?: NodeJS.ProcessEnv;\n encoding: BufferEncoding;\n } = {\n encoding: \"utf-8\",\n };\n\n if (options.cwd) {\n execOptions.cwd = options.cwd;\n }\n if (options.env) {\n execOptions.env = options.env as NodeJS.ProcessEnv;\n }\n\n const { stdout, stderr } = await execFilePromise(command, args, execOptions);\n return {\n stdout: stdout || \"\",\n stderr: stderr || \"\",\n exitCode: 0,\n };\n } catch (error: unknown) {\n const err = error as {\n stdout?: string;\n stderr?: string;\n code?: number;\n };\n return {\n stdout: err.stdout || \"\",\n stderr: err.stderr || \"\",\n exitCode: err.code || 1,\n };\n }\n}\n"],"mappings":";;;;;AAIA,MAAa,UAAU;CACrB,OAAO,OAAO,QAAQ;CACtB,QAAQ,OAAO,YAAY,eAAe,OAAO,QAAQ;CACzD,mBAAmB,OAAO,WAAW,eAAe,OAAO,OAAO,WAAW;CAC9E;;;;AAKD,SAAgB,KAAQ,IAAsB;CAC5C,IAAIA;CACJ,IAAI,SAAS;AAEb,cAAa;AACX,MAAI,CAAC,QAAQ;AACX,YAAS,IAAI;AACb,YAAS;;AAEX,SAAO;;;;;;;AAQX,SAAgB,wBAA8B;;;;;;;ACD9C,MAAM,YAAY,KAAK,YAAiC;CACtD,MAAM,KAAK,MAAM,OAAO;AACxB,QAAO;EACP;AAEF,SAAgB,mBAA+B;AAC7C,KAAI,QAAQ,OAAO;AACjB,SAAO;GACL,MAAM,SAAS,MAAM;IACnB,MAAM,OAAO,IAAI,KAAK,KAAK;AAC3B,WAAO,MAAM,KAAK,MAAM;;GAG1B,MAAM,UAAU,MAAM,SAAS;AAE7B,UAAM,IAAI,MAAM,MAAM,QAAQ;;GAGhC,MAAM,OAAO,MAAM;IAEjB,MAAM,SAAS,MAAM,WAAW;AAChC,QAAI;AACF,WAAM,OAAO,KAAK,KAAK;AACvB,YAAO;YACD;AACN,YAAO;;;GAIX,MAAM,KAAK,MAAM;IACf,MAAM,OAAO,IAAI,KAAK,KAAK;IAC3B,MAAM,OAAO,KAAK;IAElB,MAAM,SAAS,MAAM,WAAW;IAChC,MAAM,EAAE,UAAU,MAAM,OAAO,KAAK,KAAK;AACzC,WAAO;KAAE;KAAO;KAAM;;GAGxB,MAAM,OAAO,SAAS,SAAS;IAC7B,MAAM,SAAS,MAAM,WAAW;AAChC,UAAM,OAAO,OAAO,SAAS,QAAQ;;GAGvC,MAAM,MAAM,MAAM,SAAS;IACzB,MAAM,SAAS,MAAM,WAAW;AAChC,UAAM,OAAO,MAAM,MAAM,QAAQ;;GAEpC;;AAIH,QAAO;EACL,MAAM,SAAS,MAAM;GACnB,MAAM,SAAS,MAAM,WAAW;AAChC,UAAO,MAAM,OAAO,SAAS,MAAM,QAAQ;;EAG7C,MAAM,UAAU,MAAM,SAAS;GAC7B,MAAM,SAAS,MAAM,WAAW;GAEhC,MAAM,aAAa,MAAM,OAAO;GAChC,MAAM,MAAM,WAAW,QAAQ,KAAK;AACpC,SAAM,OAAO,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;AAC5C,SAAM,OAAO,UAAU,MAAM,SAAS,QAAQ;;EAGhD,MAAM,OAAO,MAAM;GACjB,MAAM,SAAS,MAAM,WAAW;AAChC,OAAI;AACF,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO;WACD;AACN,WAAO;;;EAIX,MAAM,KAAK,MAAM;GACf,MAAM,SAAS,MAAM,WAAW;GAChC,MAAM,QAAQ,MAAM,OAAO,KAAK,KAAK;AACrC,UAAO;IAAE,OAAO,MAAM;IAAO,MAAM,MAAM;IAAM;;EAGjD,MAAM,OAAO,SAAS,SAAS;GAC7B,MAAM,SAAS,MAAM,WAAW;AAChC,SAAM,OAAO,OAAO,SAAS,QAAQ;;EAGvC,MAAM,MAAM,MAAM,SAAS;GACzB,MAAM,SAAS,MAAM,WAAW;AAChC,SAAM,OAAO,MAAM,MAAM,QAAQ;;EAEpC;;AAIH,IAAIC,aAAgC;AAEpC,SAAgB,gBAA4B;AAC1C,KAAI,CAAC,YAAY;AACf,eAAa,kBAAkB;;AAEjC,QAAO;;;;;;AAOT,SAAgB,4BAAkC;AAChD,cAAa;;;;;;;;;;;AC3Hf,SAAS,OAAO,KAAa,QAAwB;AACnD,QAAO,IAAI,SAAS,QAAQ,IAAI;;AAGlC,SAAgB,uBAAuC;AACrD,KAAI,QAAQ,OAAO;AACjB,SAAO,EACL,KAAK,SAAS,YAAY,UAAU;AAClC,OAAI,cAAc,UAAU;IAC1B,MAAM,SAAS,IAAI,IAAI,aAAa,SAAS;AAC7C,WAAO,OAAO,QAAQ;AACtB,WAAO,OAAO,OAAO,MAAM;;GAG7B,MAAM,UAAU,IAAI,KAAK,QAAQ;AAEjC,UAAO,OAAO,QAAQ,SAAS,GAAG,EAAE,GAAG;KAE1C;;AAIH,QAAO,EACL,KAAK,SAAS,YAAY,UAAU;AAClC,MAAI,cAAc,UAAU;GAC1B,MAAMC,WAAS,QAAQ,cAAc;AACrC,UAAOA,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;EAIlE,MAAMA,WAAS,QAAQ,cAAc;EACrC,MAAM,aAAaA,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;AAE5E,SAAO,WAAW,UAAU,GAAG,GAAG;IAErC;;AAIH,IAAIC,iBAAwC;AAE5C,SAAgB,oBAAoC;AAClD,KAAI,CAAC,gBAAgB;AACnB,mBAAiB,sBAAsB;;AAEzC,QAAO;;;;;;AAOT,SAAgB,gCAAsC;AACpD,kBAAiB;;;;;;;;;;;;AC1DnB,SAAgB,aAAqB;AACnC,KAAI,QAAQ,SAAS,OAAO,QAAQ,eAAe,OAAO,IAAI,iBAAiB,YAAY;AACzF,SAAO,IAAI,cAAc;;CAI3B,MAAMC,WAAS,QAAQ,cAAc;AACrC,QAAOA,SAAO,YAAY;;;;;;;;ACC5B,eAAsB,MAAM,SAA6C;AACvE,KAAI,QAAQ,OAAO;EACjB,MAAM,OAAO,IAAI,MAAM,QAAQ,KAAK;GAClC,KAAK,QAAQ;GACb,KAAK,QAAQ;GACb,QAAQ;GACR,QAAQ;GACT,CAAC;EAEF,MAAM,CAAC,QAAQ,UAAU,MAAM,QAAQ,IAAI,CAAC,IAAI,SAAS,KAAK,OAAO,CAAC,MAAM,EAAE,IAAI,SAAS,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;EAEhH,MAAM,WAAW,MAAM,KAAK;AAE5B,SAAO;GAAE;GAAQ;GAAQ;GAAU;;CAIrC,MAAM,EAAE,aAAa,MAAM,OAAO;CAClC,MAAM,EAAE,cAAc,MAAM,OAAO;CACnC,MAAM,kBAAkB,UAAU,SAAS;CAE3C,MAAM,CAAC,SAAS,GAAG,QAAQ,QAAQ;AACnC,KAAI,CAAC,SAAS;AACZ,SAAO;GACL,QAAQ;GACR,QAAQ;GACR,UAAU;GACX;;AAGH,KAAI;EACF,MAAMC,cAIF,EACF,UAAU,SACX;AAED,MAAI,QAAQ,KAAK;AACf,eAAY,MAAM,QAAQ;;AAE5B,MAAI,QAAQ,KAAK;AACf,eAAY,MAAM,QAAQ;;EAG5B,MAAM,EAAE,QAAQ,WAAW,MAAM,gBAAgB,SAAS,MAAM,YAAY;AAC5E,SAAO;GACL,QAAQ,UAAU;GAClB,QAAQ,UAAU;GAClB,UAAU;GACX;UACMC,OAAgB;EACvB,MAAM,MAAM;AAKZ,SAAO;GACL,QAAQ,IAAI,UAAU;GACtB,QAAQ,IAAI,UAAU;GACtB,UAAU,IAAI,QAAQ;GACvB"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"portable-Dbo3u2CQ.mjs","names":["result: T | undefined","fsInstance: PortableFS | null","crypto","hasherInstance: PortableHasher | null","crypto","execOptions: {\n cwd?: string;\n env?: NodeJS.ProcessEnv;\n encoding: BufferEncoding;\n }","error: unknown"],"sources":["../src/portable/runtime.ts","../src/portable/fs.ts","../src/portable/hash.ts","../src/portable/id.ts","../src/portable/spawn.ts"],"sourcesContent":["/**\n * Runtime detection utilities for portable API implementation\n */\n\nexport const runtime = {\n isBun: typeof Bun !== \"undefined\",\n isNode: typeof process !== \"undefined\" && typeof Bun === \"undefined\",\n supportsWebCrypto: typeof crypto !== \"undefined\" && typeof crypto.subtle !== \"undefined\",\n} as const;\n\n/**\n * Helper to cache module imports to avoid repeated dynamic imports\n */\nexport function once<T>(fn: () => T): () => T {\n let result: T | undefined;\n let called = false;\n\n return () => {\n if (!called) {\n result = fn();\n called = true;\n }\n return result as T;\n };\n}\n\n/**\n * Reset runtime state for testing purposes only\n * @internal\n */\nexport function resetPortableForTests(): void {\n // This is a marker function that portable modules can use\n // to reset their singleton state in tests\n}\n","/**\n * Portable filesystem API that works on both Bun and Node.js\n */\n\nimport { once, runtime } from \"./runtime\";\n\nexport interface PortableFS {\n readFile(path: string): Promise<string>;\n writeFile(path: string, content: string): Promise<void>;\n exists(path: string): Promise<boolean>;\n stat(path: string): Promise<{ mtime: Date; size: number }>;\n rename(oldPath: string, newPath: string): Promise<void>;\n mkdir(path: string, options?: { recursive?: boolean }): Promise<void>;\n}\n\ninterface FSPromises {\n readFile: (path: string, encoding: string) => Promise<string>;\n writeFile: (path: string, content: string, encoding: string) => Promise<void>;\n access: (path: string) => Promise<void>;\n stat: (path: string) => Promise<{\n mtime: Date;\n size: number;\n isDirectory: () => boolean;\n }>;\n rename: (oldPath: string, newPath: string) => Promise<void>;\n mkdir: (path: string, options?: { recursive?: boolean }) => Promise<void>;\n}\n\n// Cache the fs/promises import\nconst getNodeFS = once(async (): Promise<FSPromises> => {\n const fs = await import(\"node:fs/promises\");\n return fs as FSPromises;\n});\n\nexport function createPortableFS(): PortableFS {\n if (runtime.isBun) {\n return {\n async readFile(path) {\n const file = Bun.file(path);\n return await file.text();\n },\n\n async writeFile(path, content) {\n // Bun.write auto-creates parent directories\n await Bun.write(path, content);\n },\n\n async exists(path) {\n // Bun.file().exists() only works for files, use fs.stat for both files and dirs\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.stat(path);\n return true;\n } catch {\n return false;\n }\n },\n\n async stat(path) {\n const file = Bun.file(path);\n const size = file.size;\n // Bun doesn't expose mtime directly, use Node fs.stat\n const nodeFS = await getNodeFS();\n const { mtime } = await nodeFS.stat(path);\n return { mtime, size };\n },\n\n async rename(oldPath, newPath) {\n const nodeFS = await getNodeFS();\n await nodeFS.rename(oldPath, newPath);\n },\n\n async mkdir(path, options) {\n const nodeFS = await getNodeFS();\n await nodeFS.mkdir(path, options);\n },\n };\n }\n\n // Node.js implementation\n return {\n async readFile(path) {\n const nodeFS = await getNodeFS();\n return await nodeFS.readFile(path, \"utf-8\");\n },\n\n async writeFile(path, content) {\n const nodeFS = await getNodeFS();\n // Auto-create parent directories like Bun.write does\n const pathModule = await import(\"node:path\");\n const dir = pathModule.dirname(path);\n await nodeFS.mkdir(dir, { recursive: true });\n await nodeFS.writeFile(path, content, \"utf-8\");\n },\n\n async exists(path) {\n const nodeFS = await getNodeFS();\n try {\n await nodeFS.access(path);\n return true;\n } catch {\n return false;\n }\n },\n\n async stat(path) {\n const nodeFS = await getNodeFS();\n const stats = await nodeFS.stat(path);\n return { mtime: stats.mtime, size: stats.size };\n },\n\n async rename(oldPath, newPath) {\n const nodeFS = await getNodeFS();\n await nodeFS.rename(oldPath, newPath);\n },\n\n async mkdir(path, options) {\n const nodeFS = await getNodeFS();\n await nodeFS.mkdir(path, options);\n },\n };\n}\n\n// Singleton to avoid recreating instances\nlet fsInstance: PortableFS | null = null;\n\nexport function getPortableFS(): PortableFS {\n if (!fsInstance) {\n fsInstance = createPortableFS();\n }\n return fsInstance;\n}\n\n/**\n * Reset the filesystem singleton for testing\n * @internal\n */\nexport function __resetPortableFSForTests(): void {\n fsInstance = null;\n}\n","/**\n * Portable hashing API that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\nexport type HashAlgorithm = \"sha256\" | \"xxhash\";\n\nexport interface PortableHasher {\n hash(content: string, algorithm?: HashAlgorithm): string;\n}\n\n/**\n * Pads a hex string to the specified length\n */\nfunction padHex(hex: string, length: number): string {\n return hex.padStart(length, \"0\");\n}\n\nexport function createPortableHasher(): PortableHasher {\n if (runtime.isBun) {\n return {\n hash(content, algorithm = \"xxhash\") {\n if (algorithm === \"sha256\") {\n const hasher = new Bun.CryptoHasher(\"sha256\");\n hasher.update(content);\n return hasher.digest(\"hex\");\n }\n // xxhash - Bun.hash returns a number\n const hashNum = Bun.hash(content);\n // Convert to hex and pad to 16 chars for consistency\n return padHex(hashNum.toString(16), 16);\n },\n };\n }\n\n // Node.js implementation\n return {\n hash(content, algorithm = \"xxhash\") {\n if (algorithm === \"sha256\") {\n const crypto = require(\"node:crypto\");\n return crypto.createHash(\"sha256\").update(content).digest(\"hex\");\n }\n // xxhash fallback: use sha256 for now (can add xxhash package later if needed)\n // This ensures consistent behavior across runtimes\n const crypto = require(\"node:crypto\");\n const sha256Hash = crypto.createHash(\"sha256\").update(content).digest(\"hex\");\n // Take first 16 chars to match xxhash output length\n return sha256Hash.substring(0, 16);\n },\n };\n}\n\n// Singleton to avoid recreating instances\nlet hasherInstance: PortableHasher | null = null;\n\nexport function getPortableHasher(): PortableHasher {\n if (!hasherInstance) {\n hasherInstance = createPortableHasher();\n }\n return hasherInstance;\n}\n\n/**\n * Reset the hasher singleton for testing\n * @internal\n */\nexport function __resetPortableHasherForTests(): void {\n hasherInstance = null;\n}\n","/**\n * Portable ID generation that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\n/**\n * Generate a unique ID\n * Uses UUIDv7 on Bun (monotonic), falls back to randomUUID on Node.js\n */\nexport function generateId(): string {\n if (runtime.isBun && typeof Bun !== \"undefined\" && typeof Bun.randomUUIDv7 === \"function\") {\n return Bun.randomUUIDv7();\n }\n\n // Node.js fallback: use crypto.randomUUID\n const crypto = require(\"node:crypto\");\n return crypto.randomUUID();\n}\n","/**\n * Portable subprocess spawning that works on both Bun and Node.js\n */\n\nimport { runtime } from \"./runtime\";\n\nexport interface SpawnOptions {\n cmd: string[];\n cwd?: string;\n env?: Record<string, string>;\n}\n\nexport interface SpawnResult {\n stdout: string;\n stderr: string;\n exitCode: number;\n}\n\nexport async function spawn(options: SpawnOptions): Promise<SpawnResult> {\n if (runtime.isBun) {\n const proc = Bun.spawn(options.cmd, {\n cwd: options.cwd,\n env: options.env,\n stdout: \"pipe\",\n stderr: \"pipe\",\n });\n\n const [stdout, stderr] = await Promise.all([new Response(proc.stdout).text(), new Response(proc.stderr).text()]);\n\n const exitCode = await proc.exited;\n\n return { stdout, stderr, exitCode };\n }\n\n // Node.js implementation\n const { execFile } = await import(\"node:child_process\");\n const { promisify } = await import(\"node:util\");\n const execFilePromise = promisify(execFile);\n\n const [command, ...args] = options.cmd;\n if (!command) {\n return {\n stdout: \"\",\n stderr: \"Error: No command provided\",\n exitCode: 1,\n };\n }\n\n try {\n const execOptions: {\n cwd?: string;\n env?: NodeJS.ProcessEnv;\n encoding: BufferEncoding;\n } = {\n encoding: \"utf-8\",\n };\n\n if (options.cwd) {\n execOptions.cwd = options.cwd;\n }\n if (options.env) {\n execOptions.env = options.env as NodeJS.ProcessEnv;\n }\n\n const { stdout, stderr } = await execFilePromise(command, args, execOptions);\n return {\n stdout: stdout || \"\",\n stderr: stderr || \"\",\n exitCode: 0,\n };\n } catch (error: unknown) {\n const err = error as {\n stdout?: string;\n stderr?: string;\n code?: number;\n };\n return {\n stdout: err.stdout || \"\",\n stderr: err.stderr || \"\",\n exitCode: err.code || 1,\n };\n }\n}\n"],"mappings":";;;;;;;;;;AAIA,MAAa,UAAU;CACrB,OAAO,OAAO,QAAQ;CACtB,QAAQ,OAAO,YAAY,eAAe,OAAO,QAAQ;CACzD,mBAAmB,OAAO,WAAW,eAAe,OAAO,OAAO,WAAW;CAC9E;;;;AAKD,SAAgB,KAAQ,IAAsB;CAC5C,IAAIA;CACJ,IAAI,SAAS;AAEb,cAAa;AACX,MAAI,CAAC,QAAQ;AACX,YAAS,IAAI;AACb,YAAS;;AAEX,SAAO;;;;;;;AAQX,SAAgB,wBAA8B;;;;;;;ACD9C,MAAM,YAAY,KAAK,YAAiC;CACtD,MAAM,KAAK,MAAM,OAAO;AACxB,QAAO;EACP;AAEF,SAAgB,mBAA+B;AAC7C,KAAI,QAAQ,OAAO;AACjB,SAAO;GACL,MAAM,SAAS,MAAM;IACnB,MAAM,OAAO,IAAI,KAAK,KAAK;AAC3B,WAAO,MAAM,KAAK,MAAM;;GAG1B,MAAM,UAAU,MAAM,SAAS;AAE7B,UAAM,IAAI,MAAM,MAAM,QAAQ;;GAGhC,MAAM,OAAO,MAAM;IAEjB,MAAM,SAAS,MAAM,WAAW;AAChC,QAAI;AACF,WAAM,OAAO,KAAK,KAAK;AACvB,YAAO;YACD;AACN,YAAO;;;GAIX,MAAM,KAAK,MAAM;IACf,MAAM,OAAO,IAAI,KAAK,KAAK;IAC3B,MAAM,OAAO,KAAK;IAElB,MAAM,SAAS,MAAM,WAAW;IAChC,MAAM,EAAE,UAAU,MAAM,OAAO,KAAK,KAAK;AACzC,WAAO;KAAE;KAAO;KAAM;;GAGxB,MAAM,OAAO,SAAS,SAAS;IAC7B,MAAM,SAAS,MAAM,WAAW;AAChC,UAAM,OAAO,OAAO,SAAS,QAAQ;;GAGvC,MAAM,MAAM,MAAM,SAAS;IACzB,MAAM,SAAS,MAAM,WAAW;AAChC,UAAM,OAAO,MAAM,MAAM,QAAQ;;GAEpC;;AAIH,QAAO;EACL,MAAM,SAAS,MAAM;GACnB,MAAM,SAAS,MAAM,WAAW;AAChC,UAAO,MAAM,OAAO,SAAS,MAAM,QAAQ;;EAG7C,MAAM,UAAU,MAAM,SAAS;GAC7B,MAAM,SAAS,MAAM,WAAW;GAEhC,MAAM,aAAa,MAAM,OAAO;GAChC,MAAM,MAAM,WAAW,QAAQ,KAAK;AACpC,SAAM,OAAO,MAAM,KAAK,EAAE,WAAW,MAAM,CAAC;AAC5C,SAAM,OAAO,UAAU,MAAM,SAAS,QAAQ;;EAGhD,MAAM,OAAO,MAAM;GACjB,MAAM,SAAS,MAAM,WAAW;AAChC,OAAI;AACF,UAAM,OAAO,OAAO,KAAK;AACzB,WAAO;WACD;AACN,WAAO;;;EAIX,MAAM,KAAK,MAAM;GACf,MAAM,SAAS,MAAM,WAAW;GAChC,MAAM,QAAQ,MAAM,OAAO,KAAK,KAAK;AACrC,UAAO;IAAE,OAAO,MAAM;IAAO,MAAM,MAAM;IAAM;;EAGjD,MAAM,OAAO,SAAS,SAAS;GAC7B,MAAM,SAAS,MAAM,WAAW;AAChC,SAAM,OAAO,OAAO,SAAS,QAAQ;;EAGvC,MAAM,MAAM,MAAM,SAAS;GACzB,MAAM,SAAS,MAAM,WAAW;AAChC,SAAM,OAAO,MAAM,MAAM,QAAQ;;EAEpC;;AAIH,IAAIC,aAAgC;AAEpC,SAAgB,gBAA4B;AAC1C,KAAI,CAAC,YAAY;AACf,eAAa,kBAAkB;;AAEjC,QAAO;;;;;;AAOT,SAAgB,4BAAkC;AAChD,cAAa;;;;;;;;;;;AC3Hf,SAAS,OAAO,KAAa,QAAwB;AACnD,QAAO,IAAI,SAAS,QAAQ,IAAI;;AAGlC,SAAgB,uBAAuC;AACrD,KAAI,QAAQ,OAAO;AACjB,SAAO,EACL,KAAK,SAAS,YAAY,UAAU;AAClC,OAAI,cAAc,UAAU;IAC1B,MAAM,SAAS,IAAI,IAAI,aAAa,SAAS;AAC7C,WAAO,OAAO,QAAQ;AACtB,WAAO,OAAO,OAAO,MAAM;;GAG7B,MAAM,UAAU,IAAI,KAAK,QAAQ;AAEjC,UAAO,OAAO,QAAQ,SAAS,GAAG,EAAE,GAAG;KAE1C;;AAIH,QAAO,EACL,KAAK,SAAS,YAAY,UAAU;AAClC,MAAI,cAAc,UAAU;GAC1B,MAAMC,qBAAiB,cAAc;AACrC,UAAOA,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;EAIlE,MAAMA,qBAAiB,cAAc;EACrC,MAAM,aAAaA,SAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;AAE5E,SAAO,WAAW,UAAU,GAAG,GAAG;IAErC;;AAIH,IAAIC,iBAAwC;AAE5C,SAAgB,oBAAoC;AAClD,KAAI,CAAC,gBAAgB;AACnB,mBAAiB,sBAAsB;;AAEzC,QAAO;;;;;;AAOT,SAAgB,gCAAsC;AACpD,kBAAiB;;;;;;;;;;;;AC1DnB,SAAgB,aAAqB;AACnC,KAAI,QAAQ,SAAS,OAAO,QAAQ,eAAe,OAAO,IAAI,iBAAiB,YAAY;AACzF,SAAO,IAAI,cAAc;;CAI3B,MAAMC,qBAAiB,cAAc;AACrC,QAAOA,SAAO,YAAY;;;;;;;;ACC5B,eAAsB,MAAM,SAA6C;AACvE,KAAI,QAAQ,OAAO;EACjB,MAAM,OAAO,IAAI,MAAM,QAAQ,KAAK;GAClC,KAAK,QAAQ;GACb,KAAK,QAAQ;GACb,QAAQ;GACR,QAAQ;GACT,CAAC;EAEF,MAAM,CAAC,QAAQ,UAAU,MAAM,QAAQ,IAAI,CAAC,IAAI,SAAS,KAAK,OAAO,CAAC,MAAM,EAAE,IAAI,SAAS,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC;EAEhH,MAAM,WAAW,MAAM,KAAK;AAE5B,SAAO;GAAE;GAAQ;GAAQ;GAAU;;CAIrC,MAAM,EAAE,aAAa,MAAM,OAAO;CAClC,MAAM,EAAE,cAAc,MAAM,OAAO;CACnC,MAAM,kBAAkB,UAAU,SAAS;CAE3C,MAAM,CAAC,SAAS,GAAG,QAAQ,QAAQ;AACnC,KAAI,CAAC,SAAS;AACZ,SAAO;GACL,QAAQ;GACR,QAAQ;GACR,UAAU;GACX;;AAGH,KAAI;EACF,MAAMC,cAIF,EACF,UAAU,SACX;AAED,MAAI,QAAQ,KAAK;AACf,eAAY,MAAM,QAAQ;;AAE5B,MAAI,QAAQ,KAAK;AACf,eAAY,MAAM,QAAQ;;EAG5B,MAAM,EAAE,QAAQ,WAAW,MAAM,gBAAgB,SAAS,MAAM,YAAY;AAC5E,SAAO;GACL,QAAQ,UAAU;GAClB,QAAQ,UAAU;GAClB,UAAU;GACX;UACMC,OAAgB;EACvB,MAAM,MAAM;AAKZ,SAAO;GACL,QAAQ,IAAI,UAAU;GACtB,QAAQ,IAAI,UAAU;GACtB,UAAU,IAAI,QAAQ;GACvB"}