@intlayer/chokidar 7.0.6 → 7.0.8-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. package/dist/cjs/_virtual/_utils_asset.cjs +0 -3
  2. package/dist/cjs/buildIntlayerDictionary/buildIntlayerDictionary.cjs +0 -2
  3. package/dist/cjs/buildIntlayerDictionary/buildIntlayerDictionary.cjs.map +1 -1
  4. package/dist/cjs/buildIntlayerDictionary/processContentDeclaration.cjs +0 -1
  5. package/dist/cjs/buildIntlayerDictionary/processContentDeclaration.cjs.map +1 -1
  6. package/dist/cjs/buildIntlayerDictionary/writeDynamicDictionary.cjs +0 -4
  7. package/dist/cjs/buildIntlayerDictionary/writeDynamicDictionary.cjs.map +1 -1
  8. package/dist/cjs/buildIntlayerDictionary/writeFetchDictionary.cjs +0 -3
  9. package/dist/cjs/buildIntlayerDictionary/writeFetchDictionary.cjs.map +1 -1
  10. package/dist/cjs/buildIntlayerDictionary/writeMergedDictionary.cjs +0 -4
  11. package/dist/cjs/buildIntlayerDictionary/writeMergedDictionary.cjs.map +1 -1
  12. package/dist/cjs/buildIntlayerDictionary/writeRemoteDictionary.cjs +0 -3
  13. package/dist/cjs/buildIntlayerDictionary/writeRemoteDictionary.cjs.map +1 -1
  14. package/dist/cjs/buildIntlayerDictionary/writeUnmergedDictionary.cjs +0 -4
  15. package/dist/cjs/buildIntlayerDictionary/writeUnmergedDictionary.cjs.map +1 -1
  16. package/dist/cjs/cleanOutputDir.cjs +0 -2
  17. package/dist/cjs/cleanOutputDir.cjs.map +1 -1
  18. package/dist/cjs/createDictionaryEntryPoint/createDictionaryEntryPoint.cjs +0 -3
  19. package/dist/cjs/createDictionaryEntryPoint/createDictionaryEntryPoint.cjs.map +1 -1
  20. package/dist/cjs/createDictionaryEntryPoint/generateDictionaryListContent.cjs +0 -2
  21. package/dist/cjs/createDictionaryEntryPoint/generateDictionaryListContent.cjs.map +1 -1
  22. package/dist/cjs/createDictionaryEntryPoint/getBuiltDictionariesPath.cjs +0 -2
  23. package/dist/cjs/createDictionaryEntryPoint/getBuiltDictionariesPath.cjs.map +1 -1
  24. package/dist/cjs/createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.cjs +0 -2
  25. package/dist/cjs/createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.cjs.map +1 -1
  26. package/dist/cjs/createDictionaryEntryPoint/getBuiltFetchDictionariesPath.cjs +0 -2
  27. package/dist/cjs/createDictionaryEntryPoint/getBuiltFetchDictionariesPath.cjs.map +1 -1
  28. package/dist/cjs/createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.cjs +0 -2
  29. package/dist/cjs/createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.cjs.map +1 -1
  30. package/dist/cjs/createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.cjs +0 -2
  31. package/dist/cjs/createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.cjs.map +1 -1
  32. package/dist/cjs/createType/createModuleAugmentation.cjs +1 -4
  33. package/dist/cjs/createType/createModuleAugmentation.cjs.map +1 -1
  34. package/dist/cjs/createType/createType.cjs +0 -3
  35. package/dist/cjs/createType/createType.cjs.map +1 -1
  36. package/dist/cjs/fetchDistantDictionaries.cjs +0 -2
  37. package/dist/cjs/fetchDistantDictionaries.cjs.map +1 -1
  38. package/dist/cjs/filterInvalidDictionaries.cjs +0 -1
  39. package/dist/cjs/filterInvalidDictionaries.cjs.map +1 -1
  40. package/dist/cjs/handleAdditionalContentDeclarationFile.cjs +0 -1
  41. package/dist/cjs/handleAdditionalContentDeclarationFile.cjs.map +1 -1
  42. package/dist/cjs/handleContentDeclarationFileChange.cjs +3 -1
  43. package/dist/cjs/handleContentDeclarationFileChange.cjs.map +1 -1
  44. package/dist/cjs/handleUnlinkedContentDeclarationFile.cjs +0 -1
  45. package/dist/cjs/handleUnlinkedContentDeclarationFile.cjs.map +1 -1
  46. package/dist/cjs/listGitFiles.cjs +0 -3
  47. package/dist/cjs/listGitFiles.cjs.map +1 -1
  48. package/dist/cjs/loadDictionaries/getIntlayerBundle.cjs +0 -4
  49. package/dist/cjs/loadDictionaries/getIntlayerBundle.cjs.map +1 -1
  50. package/dist/cjs/loadDictionaries/loadContentDeclaration.cjs +0 -3
  51. package/dist/cjs/loadDictionaries/loadContentDeclaration.cjs.map +1 -1
  52. package/dist/cjs/loadDictionaries/loadDictionaries.cjs +0 -1
  53. package/dist/cjs/loadDictionaries/loadDictionaries.cjs.map +1 -1
  54. package/dist/cjs/loadDictionaries/loadLocalDictionaries.cjs +0 -2
  55. package/dist/cjs/loadDictionaries/loadLocalDictionaries.cjs.map +1 -1
  56. package/dist/cjs/loadDictionaries/loadRemoteDictionaries.cjs +0 -3
  57. package/dist/cjs/loadDictionaries/loadRemoteDictionaries.cjs.map +1 -1
  58. package/dist/cjs/loadDictionaries/log.cjs +0 -1
  59. package/dist/cjs/loadDictionaries/log.cjs.map +1 -1
  60. package/dist/cjs/prepareIntlayer.cjs +0 -2
  61. package/dist/cjs/prepareIntlayer.cjs.map +1 -1
  62. package/dist/cjs/reduceDictionaryContent/reduceDictionaryContent.cjs +0 -1
  63. package/dist/cjs/reduceDictionaryContent/reduceDictionaryContent.cjs.map +1 -1
  64. package/dist/cjs/utils/chunkJSON.cjs +7 -0
  65. package/dist/cjs/utils/chunkJSON.cjs.map +1 -1
  66. package/dist/cjs/utils/formatter.cjs +0 -4
  67. package/dist/cjs/utils/formatter.cjs.map +1 -1
  68. package/dist/cjs/utils/runOnce.cjs +0 -2
  69. package/dist/cjs/utils/runOnce.cjs.map +1 -1
  70. package/dist/cjs/utils/runParallel/bin.cjs +0 -1
  71. package/dist/cjs/utils/runParallel/bin.cjs.map +1 -1
  72. package/dist/cjs/utils/runParallel/index.cjs +0 -1
  73. package/dist/cjs/utils/runParallel/index.cjs.map +1 -1
  74. package/dist/cjs/utils/runParallel/runTask.cjs +0 -1
  75. package/dist/cjs/utils/runParallel/runTask.cjs.map +1 -1
  76. package/dist/cjs/utils/runParallel/spawnPosix.cjs +0 -1
  77. package/dist/cjs/utils/runParallel/spawnPosix.cjs.map +1 -1
  78. package/dist/cjs/utils/runParallel/spawnWin32.cjs +0 -1
  79. package/dist/cjs/utils/runParallel/spawnWin32.cjs.map +1 -1
  80. package/dist/cjs/watcher.cjs +1 -3
  81. package/dist/cjs/watcher.cjs.map +1 -1
  82. package/dist/cjs/writeConfiguration/index.cjs +0 -2
  83. package/dist/cjs/writeConfiguration/index.cjs.map +1 -1
  84. package/dist/cjs/writeContentDeclaration/detectFormatCommand.cjs +0 -1
  85. package/dist/cjs/writeContentDeclaration/detectFormatCommand.cjs.map +1 -1
  86. package/dist/cjs/writeContentDeclaration/processContentDeclarationContent.cjs +0 -5
  87. package/dist/cjs/writeContentDeclaration/processContentDeclarationContent.cjs.map +1 -1
  88. package/dist/cjs/writeContentDeclaration/transformJSFile.cjs +0 -3
  89. package/dist/cjs/writeContentDeclaration/transformJSFile.cjs.map +1 -1
  90. package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs +0 -4
  91. package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs.map +1 -1
  92. package/dist/cjs/writeContentDeclaration/writeJSFile.cjs +0 -5
  93. package/dist/cjs/writeContentDeclaration/writeJSFile.cjs.map +1 -1
  94. package/dist/cjs/writeFileIfChanged.cjs +0 -1
  95. package/dist/cjs/writeFileIfChanged.cjs.map +1 -1
  96. package/dist/esm/createType/createModuleAugmentation.mjs +1 -1
  97. package/dist/esm/createType/createModuleAugmentation.mjs.map +1 -1
  98. package/dist/esm/handleContentDeclarationFileChange.mjs +3 -0
  99. package/dist/esm/handleContentDeclarationFileChange.mjs.map +1 -1
  100. package/dist/esm/utils/chunkJSON.mjs +7 -0
  101. package/dist/esm/utils/chunkJSON.mjs.map +1 -1
  102. package/dist/esm/watcher.mjs +1 -0
  103. package/dist/esm/watcher.mjs.map +1 -1
  104. package/dist/types/buildIntlayerDictionary/buildIntlayerDictionary.d.ts +2 -2
  105. package/dist/types/buildIntlayerDictionary/writeDynamicDictionary.d.ts +3 -3
  106. package/dist/types/buildIntlayerDictionary/writeMergedDictionary.d.ts +2 -2
  107. package/dist/types/buildIntlayerDictionary/writeRemoteDictionary.d.ts +2 -2
  108. package/dist/types/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts +2 -2
  109. package/dist/types/createDictionaryEntryPoint/generateDictionaryListContent.d.ts +2 -2
  110. package/dist/types/createDictionaryEntryPoint/generateDictionaryListContent.d.ts.map +1 -1
  111. package/dist/types/fetchDistantDictionaries.d.ts +1 -0
  112. package/dist/types/fetchDistantDictionaries.d.ts.map +1 -1
  113. package/dist/types/handleContentDeclarationFileChange.d.ts.map +1 -1
  114. package/dist/types/index.d.ts +4 -0
  115. package/dist/types/loadDictionaries/loadRemoteDictionaries.d.ts +2 -2
  116. package/package.json +22 -22
@@ -1 +1 @@
1
- {"version":3,"file":"chunkJSON.cjs","names":["output: string[]","getChunk","patches: Patch[]","testPatch: SetPatch","rootType: RootType","sourceString: string","chunks: JsonChunk[]","currentChunk: JsonChunk","root: any","parts: string[]","output: any[]"],"sources":["../../../src/utils/chunkJSON.ts"],"sourcesContent":["/**\n * Split & reassemble JSON by character budget.\n * - Measures serialized size using JSON.stringify(..).length (characters).\n * - Ensures each chunk is itself valid JSON.\n * - Very large strings are split into safe pieces using getChunk and re-concatenated on assemble.\n * - Protects against circular structures (JSON can't serialize those anyway).\n */\n\nimport { getChunk } from './getChunk';\n\ntype JSONPrimitive = string | number | boolean | null;\ntype JSONValue = JSONPrimitive | JSONObject | JSONArray;\n\nexport type JSONObject = {\n [k: string]: JSONValue;\n};\n\ntype JSONArray = JSONValue[];\n\ntype Path = Array<string | number>;\n\ntype SetPatch = { op: 'set'; path: Path; value: JSONValue };\ntype StrAppendPatch = {\n op: 'str-append';\n path: Path;\n value: string; // part of a longer string\n index: number;\n total: number;\n};\ntype Patch = SetPatch | StrAppendPatch;\n\ntype RootType = 'object' | 'array';\n\nexport type JsonChunk = {\n schemaVersion: 1;\n index: number;\n total: number;\n rootType: RootType;\n checksum: string; // hash of the full original JSON string\n entries: Patch[];\n};\n\nconst isObject = (val: unknown): val is Record<string, unknown> => {\n return typeof val === 'object' && val !== null && !Array.isArray(val);\n};\n\nconst computeDjb2 = (str: string): string => {\n // Simple 32-bit hash; deterministic & fast\n let hash = 5381;\n\n for (let i = 0; i < str.length; i++) {\n hash = ((hash << 5) + hash) ^ str.charCodeAt(i);\n }\n\n // convert to unsigned hex\n return (hash >>> 0).toString(16).padStart(8, '0');\n};\n\nconst setAtPath = (root: any, path: Path, value: JSONValue) => {\n let current = root;\n\n for (let i = 0; i < path.length - 1; i++) {\n const key = path[i];\n const nextKey = path[i + 1];\n const isNextIndex = typeof nextKey === 'number';\n\n if (typeof key === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at path segment ${i}`);\n }\n\n if (current[key] === undefined) {\n current[key] = isNextIndex ? [] : {};\n }\n\n current = current[key];\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at path segment ${i}`);\n }\n\n if (!(key in current)) {\n (current as any)[key] = isNextIndex ? [] : {};\n }\n\n current = (current as any)[key];\n }\n }\n\n const last = path[path.length - 1];\n\n if (typeof last === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at final segment`);\n }\n\n current[last] = value as any;\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at final segment`);\n }\n\n (current as any)[last] = value as any;\n }\n};\n\nconst pathKey = (path: Path): string => {\n // stable key for grouping string parts\n return JSON.stringify(path);\n};\n\n/**\n * Split a string into parts using getChunk with a charLength budget per part.\n */\nconst splitStringByBudget = (\n str: string,\n maxCharsPerPart: number\n): string[] => {\n if (maxCharsPerPart <= 0) {\n throw new Error('maxChars must be > 0');\n }\n\n const output: string[] = [];\n let offset = 0;\n\n while (offset < str.length) {\n const part = getChunk(str, {\n charStart: offset,\n charLength: maxCharsPerPart,\n });\n\n if (!part) break;\n\n output.push(part);\n offset += part.length;\n }\n\n return output;\n};\n\n/**\n * Flatten JSON into patches (leaf writes). Strings too large to fit in a single\n * chunk are yielded as multiple str-append patches.\n */\nconst flattenToPatches = (\n value: JSONValue,\n maxCharsPerChunk: number,\n path: Path = [],\n seen = new WeakSet<object>()\n): Patch[] => {\n // Conservative per-entry cap so a single entry fits into an empty chunk with envelope overhead.\n // (Envelope ~ a few hundred chars; we keep a comfortable margin.)\n const maxStringPiece = Math.max(\n 1,\n Math.floor((maxCharsPerChunk - 400) * 0.8)\n );\n\n const patches: Patch[] = [];\n\n const walk = (currentValue: JSONValue, currentPath: Path) => {\n if (typeof currentValue === 'string') {\n // If the serialized patch wouldn't fit, split the string into multiple parts\n // and encode as a split-node sentinel with numbered keys.\n const testPatch: SetPatch = {\n op: 'set',\n path: currentPath,\n value: currentValue,\n };\n const testLen = JSON.stringify(testPatch).length + 150; // margin\n\n if (testLen <= maxCharsPerChunk) {\n patches.push(testPatch);\n return;\n }\n\n // Use getChunk-based splitting to produce stable parts\n const parts = splitStringByBudget(currentValue, maxStringPiece);\n\n // Emit split-node metadata and parts as individual leaf writes\n patches.push({\n op: 'set',\n path: [...currentPath, '__splittedType'],\n value: 'string',\n });\n patches.push({\n op: 'set',\n path: [...currentPath, '__total'],\n value: parts.length,\n });\n\n for (let i = 0; i < parts.length; i++) {\n patches.push({\n op: 'set',\n path: [...currentPath, String(i + 1)],\n value: parts[i],\n });\n }\n\n return;\n }\n\n if (currentValue === null || typeof currentValue !== 'object') {\n patches.push({ op: 'set', path: currentPath, value: currentValue });\n return;\n }\n\n if (seen.has(currentValue as object)) {\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n seen.add(currentValue as object);\n\n if (Array.isArray(currentValue)) {\n for (let i = 0; i < currentValue.length; i++) {\n walk(currentValue[i] as JSONValue, [...currentPath, i]);\n }\n } else {\n for (const key of Object.keys(currentValue)) {\n walk((currentValue as JSONObject)[key], [...currentPath, key]);\n }\n }\n\n seen.delete(currentValue as object);\n };\n\n walk(value, path);\n return patches;\n};\n\n/**\n * Split JSON into chunks constrained by character count of serialized chunk.\n */\nexport const chunkJSON = (\n value: JSONObject | JSONArray,\n maxChars: number\n): JsonChunk[] => {\n if (!isObject(value) && !Array.isArray(value)) {\n throw new Error('Root must be an object or array.');\n }\n\n if (maxChars < 500) {\n // You can lower this if you truly need; recommended to keep some envelope headroom.\n throw new Error('maxChars is too small. Use at least 500 characters.');\n }\n\n const rootType: RootType = Array.isArray(value) ? 'array' : 'object';\n let sourceString: string;\n\n try {\n sourceString = JSON.stringify(value);\n } catch {\n // Provide a deterministic error message for circular refs\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n const checksum = computeDjb2(sourceString);\n const allPatches = flattenToPatches(value as JSONValue, maxChars);\n\n const chunks: JsonChunk[] = [];\n let currentChunk: JsonChunk = {\n schemaVersion: 1,\n index: 0, // provisional\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n };\n\n const emptyEnvelopeSize = JSON.stringify({\n ...currentChunk,\n entries: [],\n }).length;\n\n const tryFlush = () => {\n if (currentChunk.entries.length > 0) {\n chunks.push(currentChunk);\n currentChunk = {\n schemaVersion: 1,\n index: 0,\n total: 0,\n rootType,\n checksum,\n entries: [],\n };\n }\n };\n\n for (const patch of allPatches) {\n // Would adding this patch exceed maxChars?\n const withPatchSize =\n emptyEnvelopeSize +\n JSON.stringify(currentChunk.entries).length + // current entries array\n (currentChunk.entries.length ? 1 : 0) + // possible comma\n JSON.stringify(patch).length;\n\n if (withPatchSize <= maxChars) {\n currentChunk.entries.push(patch);\n } else {\n // Start a new chunk if current has items\n if (currentChunk.entries.length > 0) {\n tryFlush();\n }\n\n // Ensure single patch fits into an empty chunk\n const singleSize = emptyEnvelopeSize + JSON.stringify([patch]).length;\n\n if (singleSize > maxChars) {\n // This should only happen for massive strings, which we pre-split;\n // if it happens for a non-string, we cannot split further.\n throw new Error(\n 'A single entry exceeds maxChars and cannot be split. Reduce entry size or increase maxChars.'\n );\n }\n\n currentChunk.entries.push(patch);\n }\n }\n\n tryFlush();\n\n // Ensure at least one chunk exists (even for empty root)\n if (chunks.length === 0) {\n chunks.push({\n schemaVersion: 1,\n index: 0,\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n });\n }\n\n // Finalize indices & totals\n const totalChunks = chunks.length;\n\n chunks.forEach((chunk, index) => {\n chunk.index = index;\n chunk.total = totalChunks;\n });\n\n return chunks;\n};\n\n/**\n * Reassemble JSON from chunks.\n * - Validates checksums and indices.\n * - Applies 'set' patches and merges string pieces from 'str-append'.\n */\n/**\n * Reconstruct content from a single chunk without validation.\n * Useful for processing individual chunks in a pipeline where you don't have all chunks yet.\n * Note: This will only reconstruct the partial content contained in this chunk.\n */\nexport const reconstructFromSingleChunk = (\n chunk: JsonChunk\n): JSONObject | JSONArray => {\n const root: any = chunk.rootType === 'array' ? [] : {};\n\n // Apply all 'set' patches from this chunk\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n setAtPath(root, entry.path, entry.value);\n }\n }\n\n // Reconcile split-node sentinels for strings/arrays\n // When reconstructing from a single chunk, we may have incomplete split nodes\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const parts: string[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n hasAllParts = false;\n break;\n }\n\n parts.push(piece);\n }\n\n // Only reconstruct if we have all parts, otherwise return the node as-is\n if (hasAllParts) {\n return parts.join('');\n }\n\n return node;\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const output: any[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n hasAllParts = false;\n break;\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n // Only reconstruct if we have all parts\n if (hasAllParts) {\n return output;\n }\n\n return node;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n return reconcileSplitNodes(root);\n};\n\nexport const assembleJSON = (chunks: JsonChunk[]): JSONObject | JSONArray => {\n if (!chunks || chunks.length === 0) {\n throw new Error('No chunks provided.');\n }\n\n // Basic validation & sort\n const sorted = [...chunks].sort((a, b) => a.index - b.index);\n const { checksum, rootType } = sorted[0];\n const schemaVersion = 1;\n\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.schemaVersion !== schemaVersion) {\n console.error('Unsupported schemaVersion.', {\n cause: chunk,\n schemaVersion,\n });\n throw new Error('Unsupported schemaVersion.');\n }\n\n if (chunk.rootType !== rootType) {\n console.error('Chunks rootType mismatch.', {\n cause: chunk,\n rootType,\n });\n throw new Error('Chunks rootType mismatch.');\n }\n\n if (chunk.checksum !== checksum) {\n console.error('Chunks checksum mismatch (different source objects?).', {\n cause: chunk,\n checksum,\n });\n throw new Error('Chunks checksum mismatch (different source objects?).');\n }\n\n if (chunk.index !== i) {\n console.error('Chunk indices are not contiguous or sorted.', {\n cause: chunk,\n index: chunk.index,\n i,\n });\n throw new Error('Chunk indices are not contiguous or sorted.');\n }\n\n // Defer total check until after reconstruction to prefer more specific errors\n }\n\n const root: any = rootType === 'array' ? [] : {};\n\n // Collect string parts by path\n const stringParts = new Map<\n string,\n { path: Path; total: number; received: StrAppendPatch[] }\n >();\n\n const applySet = (patch: SetPatch) =>\n setAtPath(root, patch.path, patch.value);\n\n for (const chunk of sorted) {\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n applySet(entry);\n } else {\n const key = pathKey(entry.path);\n const record = stringParts.get(key) ?? {\n path: entry.path,\n total: entry.total,\n received: [],\n };\n\n if (record.total !== entry.total) {\n throw new Error('Inconsistent string part totals for a path.');\n }\n\n record.received.push(entry);\n stringParts.set(key, record);\n }\n }\n }\n\n // Stitch strings\n for (const { path, total, received } of stringParts.values()) {\n if (received.length !== total) {\n throw new Error('Missing string parts for a path; incomplete chunk set.');\n }\n\n received.sort((a, b) => a.index - b.index);\n const fullString = received.map((part) => part.value).join('');\n setAtPath(root, path, fullString);\n }\n\n // Reconcile split-node sentinels for strings/arrays after all patches applied\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const parts: string[] = [];\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n parts.push(piece);\n }\n\n return parts.join('');\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const output: any[] = [];\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n return output;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n const reconciled = reconcileSplitNodes(root);\n\n // Now validate totals match provided count\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.total !== sorted.length) {\n throw new Error(\n `Chunk total does not match provided count. Expected ${sorted.length}, but chunk ${i} has total=${chunk.total}`\n );\n }\n }\n\n return reconciled;\n};\n\n/* -------------------------------------------\n * Example usage\n * -------------------------------------------\nconst big: JSONObject = {\n title: \"Document\",\n content: \"…a very very long text…\",\n items: Array.from({ length: 2000 }, (_, i) => ({ id: i, label: `Item ${i}` }))\n};\n\n// Split to ~16k-char chunks\nconst chunks = chunkJSON(big, 16_000);\n\n// Send each `chunks[i]` as JSON to your backend.\n// Later, reassemble:\nconst restored = assembleJSON(chunks);\nconsole.log(JSON.stringify(restored) === JSON.stringify(big)); // true\n*/\n"],"mappings":";;;AA0CA,MAAM,YAAY,QAAiD;AACjE,QAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,CAAC,MAAM,QAAQ,IAAI;;AAGvE,MAAM,eAAe,QAAwB;CAE3C,IAAI,OAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,SAAS,QAAQ,KAAK,OAAQ,IAAI,WAAW,EAAE;AAIjD,SAAQ,SAAS,GAAG,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;AAGnD,MAAM,aAAa,MAAW,MAAY,UAAqB;CAC7D,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;EACxC,MAAM,MAAM,KAAK;EAEjB,MAAM,cAAc,OADJ,KAAK,IAAI,OACc;AAEvC,MAAI,OAAO,QAAQ,UAAU;AAC3B,OAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC,IAAI;AAGxD,OAAI,QAAQ,SAAS,OACnB,SAAQ,OAAO,cAAc,EAAE,GAAG,EAAE;AAGtC,aAAU,QAAQ;SACb;AACL,OAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC,IAAI;AAGzD,OAAI,EAAE,OAAO,SACX,CAAC,QAAgB,OAAO,cAAc,EAAE,GAAG,EAAE;AAG/C,aAAW,QAAgB;;;CAI/B,MAAM,OAAO,KAAK,KAAK,SAAS;AAEhC,KAAI,OAAO,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC;AAGpD,UAAQ,QAAQ;QACX;AACL,MAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC;AAGrD,EAAC,QAAgB,QAAQ;;;AAI7B,MAAM,WAAW,SAAuB;AAEtC,QAAO,KAAK,UAAU,KAAK;;;;;AAM7B,MAAM,uBACJ,KACA,oBACa;AACb,KAAI,mBAAmB,EACrB,OAAM,IAAI,MAAM,uBAAuB;CAGzC,MAAMA,SAAmB,EAAE;CAC3B,IAAI,SAAS;AAEb,QAAO,SAAS,IAAI,QAAQ;EAC1B,MAAM,OAAOC,gCAAS,KAAK;GACzB,WAAW;GACX,YAAY;GACb,CAAC;AAEF,MAAI,CAAC,KAAM;AAEX,SAAO,KAAK,KAAK;AACjB,YAAU,KAAK;;AAGjB,QAAO;;;;;;AAOT,MAAM,oBACJ,OACA,kBACA,OAAa,EAAE,EACf,uBAAO,IAAI,SAAiB,KAChB;CAGZ,MAAM,iBAAiB,KAAK,IAC1B,GACA,KAAK,OAAO,mBAAmB,OAAO,GAAI,CAC3C;CAED,MAAMC,UAAmB,EAAE;CAE3B,MAAM,QAAQ,cAAyB,gBAAsB;AAC3D,MAAI,OAAO,iBAAiB,UAAU;GAGpC,MAAMC,YAAsB;IAC1B,IAAI;IACJ,MAAM;IACN,OAAO;IACR;AAGD,OAFgB,KAAK,UAAU,UAAU,CAAC,SAAS,OAEpC,kBAAkB;AAC/B,YAAQ,KAAK,UAAU;AACvB;;GAIF,MAAM,QAAQ,oBAAoB,cAAc,eAAe;AAG/D,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,iBAAiB;IACxC,OAAO;IACR,CAAC;AACF,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,UAAU;IACjC,OAAO,MAAM;IACd,CAAC;AAEF,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,SAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,OAAO,IAAI,EAAE,CAAC;IACrC,OAAO,MAAM;IACd,CAAC;AAGJ;;AAGF,MAAI,iBAAiB,QAAQ,OAAO,iBAAiB,UAAU;AAC7D,WAAQ,KAAK;IAAE,IAAI;IAAO,MAAM;IAAa,OAAO;IAAc,CAAC;AACnE;;AAGF,MAAI,KAAK,IAAI,aAAuB,CAClC,OAAM,IAAI,MAAM,gDAAgD;AAGlE,OAAK,IAAI,aAAuB;AAEhC,MAAI,MAAM,QAAQ,aAAa,CAC7B,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACvC,MAAK,aAAa,IAAiB,CAAC,GAAG,aAAa,EAAE,CAAC;MAGzD,MAAK,MAAM,OAAO,OAAO,KAAK,aAAa,CACzC,MAAM,aAA4B,MAAM,CAAC,GAAG,aAAa,IAAI,CAAC;AAIlE,OAAK,OAAO,aAAuB;;AAGrC,MAAK,OAAO,KAAK;AACjB,QAAO;;;;;AAMT,MAAa,aACX,OACA,aACgB;AAChB,KAAI,CAAC,SAAS,MAAM,IAAI,CAAC,MAAM,QAAQ,MAAM,CAC3C,OAAM,IAAI,MAAM,mCAAmC;AAGrD,KAAI,WAAW,IAEb,OAAM,IAAI,MAAM,sDAAsD;CAGxE,MAAMC,WAAqB,MAAM,QAAQ,MAAM,GAAG,UAAU;CAC5D,IAAIC;AAEJ,KAAI;AACF,iBAAe,KAAK,UAAU,MAAM;SAC9B;AAEN,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,MAAM,WAAW,YAAY,aAAa;CAC1C,MAAM,aAAa,iBAAiB,OAAoB,SAAS;CAEjE,MAAMC,SAAsB,EAAE;CAC9B,IAAIC,eAA0B;EAC5B,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ;CAED,MAAM,oBAAoB,KAAK,UAAU;EACvC,GAAG;EACH,SAAS,EAAE;EACZ,CAAC,CAAC;CAEH,MAAM,iBAAiB;AACrB,MAAI,aAAa,QAAQ,SAAS,GAAG;AACnC,UAAO,KAAK,aAAa;AACzB,kBAAe;IACb,eAAe;IACf,OAAO;IACP,OAAO;IACP;IACA;IACA,SAAS,EAAE;IACZ;;;AAIL,MAAK,MAAM,SAAS,WAQlB,KALE,oBACA,KAAK,UAAU,aAAa,QAAQ,CAAC,UACpC,aAAa,QAAQ,SAAS,IAAI,KACnC,KAAK,UAAU,MAAM,CAAC,UAEH,SACnB,cAAa,QAAQ,KAAK,MAAM;MAC3B;AAEL,MAAI,aAAa,QAAQ,SAAS,EAChC,WAAU;AAMZ,MAFmB,oBAAoB,KAAK,UAAU,CAAC,MAAM,CAAC,CAAC,SAE9C,SAGf,OAAM,IAAI,MACR,+FACD;AAGH,eAAa,QAAQ,KAAK,MAAM;;AAIpC,WAAU;AAGV,KAAI,OAAO,WAAW,EACpB,QAAO,KAAK;EACV,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ,CAAC;CAIJ,MAAM,cAAc,OAAO;AAE3B,QAAO,SAAS,OAAO,UAAU;AAC/B,QAAM,QAAQ;AACd,QAAM,QAAQ;GACd;AAEF,QAAO;;;;;;;;;;;;AAaT,MAAa,8BACX,UAC2B;CAC3B,MAAMC,OAAY,MAAM,aAAa,UAAU,EAAE,GAAG,EAAE;AAGtD,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,WAAU,MAAM,MAAM,MAAM,MAAM,MAAM;CAM5C,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EAExC,QAAO;GAGT,MAAMC,QAAkB,EAAE;GAC1B,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,UAAU;AAC7B,mBAAc;AACd;;AAGF,UAAM,KAAK,MAAM;;AAInB,OAAI,YACF,QAAO,MAAM,KAAK,GAAG;AAGvB,UAAO;;AAIT,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EAEvC,QAAO;GAGT,MAAMC,SAAgB,EAAE;GACxB,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,EAAE;AACzB,mBAAc;AACd;;AAGF,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAK9C,OAAI,YACF,QAAO;AAGT,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;AAGT,QAAO,oBAAoB,KAAK;;AAGlC,MAAa,gBAAgB,WAAgD;AAC3E,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,OAAM,IAAI,MAAM,sBAAsB;CAIxC,MAAM,SAAS,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;CAC5D,MAAM,EAAE,UAAU,aAAa,OAAO;CACtC,MAAM,gBAAgB;AAEtB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,kBAAkB,eAAe;AACzC,WAAQ,MAAM,8BAA8B;IAC1C,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,6BAA6B;;AAG/C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,6BAA6B;IACzC,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,4BAA4B;;AAG9C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,yDAAyD;IACrE,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,wDAAwD;;AAG1E,MAAI,MAAM,UAAU,GAAG;AACrB,WAAQ,MAAM,+CAA+C;IAC3D,OAAO;IACP,OAAO,MAAM;IACb;IACD,CAAC;AACF,SAAM,IAAI,MAAM,8CAA8C;;;CAMlE,MAAMF,OAAY,aAAa,UAAU,EAAE,GAAG,EAAE;CAGhD,MAAM,8BAAc,IAAI,KAGrB;CAEH,MAAM,YAAY,UAChB,UAAU,MAAM,MAAM,MAAM,MAAM,MAAM;AAE1C,MAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,UAAS,MAAM;MACV;EACL,MAAM,MAAM,QAAQ,MAAM,KAAK;EAC/B,MAAM,SAAS,YAAY,IAAI,IAAI,IAAI;GACrC,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,UAAU,EAAE;GACb;AAED,MAAI,OAAO,UAAU,MAAM,MACzB,OAAM,IAAI,MAAM,8CAA8C;AAGhE,SAAO,SAAS,KAAK,MAAM;AAC3B,cAAY,IAAI,KAAK,OAAO;;AAMlC,MAAK,MAAM,EAAE,MAAM,OAAO,cAAc,YAAY,QAAQ,EAAE;AAC5D,MAAI,SAAS,WAAW,MACtB,OAAM,IAAI,MAAM,yDAAyD;AAG3E,WAAS,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAE1C,YAAU,MAAM,MADG,SAAS,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,GAAG,CAC7B;;CAInC,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EACxC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,QAAkB,EAAE;AAE1B,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,SACnB,OAAM,IAAI,MACR,yDACD;AAGH,UAAM,KAAK,MAAM;;AAGnB,UAAO,MAAM,KAAK,GAAG;;AAIvB,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EACvC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,SAAgB,EAAE;AAExB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,OAAM,IAAI,MACR,yDACD;AAGH,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAI9C,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;CAGT,MAAM,aAAa,oBAAoB,KAAK;AAG5C,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,UAAU,OAAO,OACzB,OAAM,IAAI,MACR,uDAAuD,OAAO,OAAO,cAAc,EAAE,aAAa,MAAM,QACzG;;AAIL,QAAO"}
1
+ {"version":3,"file":"chunkJSON.cjs","names":["output: string[]","getChunk","patches: Patch[]","testPatch: SetPatch","rootType: RootType","sourceString: string","chunks: JsonChunk[]","currentChunk: JsonChunk","root: any","parts: string[]","output: any[]"],"sources":["../../../src/utils/chunkJSON.ts"],"sourcesContent":["/**\n * Split & reassemble JSON by character budget.\n * - Measures serialized size using JSON.stringify(..).length (characters).\n * - Ensures each chunk is itself valid JSON.\n * - Very large strings are split into safe pieces using getChunk and re-concatenated on assemble.\n * - Protects against circular structures (JSON can't serialize those anyway).\n */\n\nimport { getChunk } from './getChunk';\n\ntype JSONPrimitive = string | number | boolean | null;\ntype JSONValue = JSONPrimitive | JSONObject | JSONArray;\n\nexport type JSONObject = {\n [k: string]: JSONValue;\n};\n\ntype JSONArray = JSONValue[];\n\ntype Path = Array<string | number>;\n\ntype SetPatch = { op: 'set'; path: Path; value: JSONValue };\ntype StrAppendPatch = {\n op: 'str-append';\n path: Path;\n value: string; // part of a longer string\n index: number;\n total: number;\n};\ntype Patch = SetPatch | StrAppendPatch;\n\ntype RootType = 'object' | 'array';\n\nexport type JsonChunk = {\n schemaVersion: 1;\n index: number;\n total: number;\n rootType: RootType;\n checksum: string; // hash of the full original JSON string\n entries: Patch[];\n};\n\nconst isObject = (val: unknown): val is Record<string, unknown> => {\n return typeof val === 'object' && val !== null && !Array.isArray(val);\n};\n\nconst computeDjb2 = (str: string): string => {\n // Simple 32-bit hash; deterministic & fast\n let hash = 5381;\n\n for (let i = 0; i < str.length; i++) {\n hash = ((hash << 5) + hash) ^ str.charCodeAt(i);\n }\n\n // convert to unsigned hex\n return (hash >>> 0).toString(16).padStart(8, '0');\n};\n\nconst setAtPath = (root: any, path: Path, value: JSONValue) => {\n let current = root;\n\n for (let i = 0; i < path.length - 1; i++) {\n const key = path[i];\n const nextKey = path[i + 1];\n const isNextIndex = typeof nextKey === 'number';\n\n if (typeof key === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at path segment ${i}`);\n }\n\n if (current[key] === undefined) {\n current[key] = isNextIndex ? [] : {};\n }\n\n current = current[key];\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at path segment ${i}`);\n }\n\n if (!(key in current)) {\n (current as any)[key] = isNextIndex ? [] : {};\n }\n\n current = (current as any)[key];\n }\n }\n\n const last = path[path.length - 1];\n\n if (typeof last === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at final segment`);\n }\n\n current[last] = value as any;\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at final segment`);\n }\n\n (current as any)[last] = value as any;\n }\n};\n\nconst pathKey = (path: Path): string => {\n // stable key for grouping string parts\n return JSON.stringify(path);\n};\n\n/**\n * Split a string into parts using getChunk with a charLength budget per part.\n */\nconst splitStringByBudget = (\n str: string,\n maxCharsPerPart: number\n): string[] => {\n if (maxCharsPerPart <= 0) {\n throw new Error('maxChars must be > 0');\n }\n\n const output: string[] = [];\n let offset = 0;\n\n while (offset < str.length) {\n const part = getChunk(str, {\n charStart: offset,\n charLength: maxCharsPerPart,\n });\n\n if (!part) break;\n\n output.push(part);\n offset += part.length;\n }\n\n return output;\n};\n\n/**\n * Flatten JSON into patches (leaf writes). Strings too large to fit in a single\n * chunk are yielded as multiple str-append patches.\n */\nconst flattenToPatches = (\n value: JSONValue,\n maxCharsPerChunk: number,\n path: Path = [],\n seen = new WeakSet<object>()\n): Patch[] => {\n // Conservative per-entry cap so a single entry fits into an empty chunk with envelope overhead.\n // (Envelope ~ a few hundred chars; we keep a comfortable margin.)\n const maxStringPiece = Math.max(\n 1,\n Math.floor((maxCharsPerChunk - 400) * 0.8)\n );\n\n const patches: Patch[] = [];\n\n const walk = (currentValue: JSONValue, currentPath: Path) => {\n if (typeof currentValue === 'string') {\n // If the serialized patch wouldn't fit, split the string into multiple parts\n // and encode as a split-node sentinel with numbered keys.\n const testPatch: SetPatch = {\n op: 'set',\n path: currentPath,\n value: currentValue,\n };\n const testLen = JSON.stringify(testPatch).length + 150; // margin\n\n if (testLen <= maxCharsPerChunk) {\n patches.push(testPatch);\n return;\n }\n\n // Use getChunk-based splitting to produce stable parts\n const parts = splitStringByBudget(currentValue, maxStringPiece);\n\n // Emit split-node metadata and parts as individual leaf writes\n patches.push({\n op: 'set',\n path: [...currentPath, '__splittedType'],\n value: 'string',\n });\n patches.push({\n op: 'set',\n path: [...currentPath, '__total'],\n value: parts.length,\n });\n\n for (let i = 0; i < parts.length; i++) {\n patches.push({\n op: 'set',\n path: [...currentPath, String(i + 1)],\n value: parts[i],\n });\n }\n\n return;\n }\n\n if (currentValue === null || typeof currentValue !== 'object') {\n patches.push({ op: 'set', path: currentPath, value: currentValue });\n return;\n }\n\n if (seen.has(currentValue as object)) {\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n seen.add(currentValue as object);\n\n if (Array.isArray(currentValue)) {\n for (let i = 0; i < currentValue.length; i++) {\n walk(currentValue[i] as JSONValue, [...currentPath, i]);\n }\n } else {\n for (const key of Object.keys(currentValue)) {\n walk((currentValue as JSONObject)[key], [...currentPath, key]);\n }\n }\n\n seen.delete(currentValue as object);\n };\n\n walk(value, path);\n return patches;\n};\n\n/**\n * Split JSON into chunks constrained by character count of serialized chunk.\n */\nexport const chunkJSON = (\n value: JSONObject | JSONArray,\n maxChars: number\n): JsonChunk[] => {\n if (!isObject(value) && !Array.isArray(value)) {\n throw new Error('Root must be an object or array.');\n }\n\n if (maxChars < 500) {\n // You can lower this if you truly need; recommended to keep some envelope headroom.\n throw new Error('maxChars is too small. Use at least 500 characters.');\n }\n\n const rootType: RootType = Array.isArray(value) ? 'array' : 'object';\n let sourceString: string;\n\n try {\n sourceString = JSON.stringify(value);\n } catch {\n // Provide a deterministic error message for circular refs\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n const checksum = computeDjb2(sourceString);\n const allPatches = flattenToPatches(value as JSONValue, maxChars);\n\n const chunks: JsonChunk[] = [];\n let currentChunk: JsonChunk = {\n schemaVersion: 1,\n index: 0, // provisional\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n };\n\n const emptyEnvelopeSize = JSON.stringify({\n ...currentChunk,\n entries: [],\n }).length;\n\n const tryFlush = () => {\n if (currentChunk.entries.length > 0) {\n chunks.push(currentChunk);\n currentChunk = {\n schemaVersion: 1,\n index: 0,\n total: 0,\n rootType,\n checksum,\n entries: [],\n };\n }\n };\n\n for (const patch of allPatches) {\n // Would adding this patch exceed maxChars?\n const withPatchSize =\n emptyEnvelopeSize +\n JSON.stringify(currentChunk.entries).length + // current entries array\n (currentChunk.entries.length ? 1 : 0) + // possible comma\n JSON.stringify(patch).length;\n\n if (withPatchSize <= maxChars) {\n currentChunk.entries.push(patch);\n } else {\n // Start a new chunk if current has items\n if (currentChunk.entries.length > 0) {\n tryFlush();\n }\n\n // Ensure single patch fits into an empty chunk\n const singleSize = emptyEnvelopeSize + JSON.stringify([patch]).length;\n\n if (singleSize > maxChars) {\n // This should only happen for massive strings, which we pre-split;\n // if it happens for a non-string, we cannot split further.\n throw new Error(\n 'A single entry exceeds maxChars and cannot be split. Reduce entry size or increase maxChars.'\n );\n }\n\n currentChunk.entries.push(patch);\n }\n }\n\n tryFlush();\n\n // Ensure at least one chunk exists (even for empty root)\n if (chunks.length === 0) {\n chunks.push({\n schemaVersion: 1,\n index: 0,\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n });\n }\n\n // Finalize indices & totals\n const totalChunks = chunks.length;\n\n chunks.forEach((chunk, index) => {\n chunk.index = index;\n chunk.total = totalChunks;\n });\n\n return chunks;\n};\n\n/**\n * Reassemble JSON from chunks.\n * - Validates checksums and indices.\n * - Applies 'set' patches and merges string pieces from 'str-append'.\n */\n/**\n * Reconstruct content from a single chunk without validation.\n * Useful for processing individual chunks in a pipeline where you don't have all chunks yet.\n * Note: This will only reconstruct the partial content contained in this chunk.\n */\nexport const reconstructFromSingleChunk = (\n chunk: JsonChunk\n): JSONObject | JSONArray => {\n const root: any = chunk.rootType === 'array' ? [] : {};\n\n // Apply all 'set' patches from this chunk\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n setAtPath(root, entry.path, entry.value);\n }\n }\n\n // Reconcile split-node sentinels for strings/arrays\n // When reconstructing from a single chunk, we may have incomplete split nodes\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const parts: string[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n hasAllParts = false;\n break;\n }\n\n parts.push(piece);\n }\n\n // Only reconstruct if we have all parts, otherwise return the node as-is\n if (hasAllParts) {\n return parts.join('');\n }\n\n return node;\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const output: any[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n hasAllParts = false;\n break;\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n // Only reconstruct if we have all parts\n if (hasAllParts) {\n return output;\n }\n\n return node;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n return reconcileSplitNodes(root);\n};\n\nexport const assembleJSON = (chunks: JsonChunk[]): JSONObject | JSONArray => {\n if (!chunks || chunks.length === 0) {\n throw new Error('No chunks provided.');\n }\n\n // Basic validation & sort\n const sorted = [...chunks].sort((a, b) => a.index - b.index);\n const { checksum, rootType } = sorted[0];\n const schemaVersion = 1;\n\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.schemaVersion !== schemaVersion) {\n console.error('Unsupported schemaVersion.', {\n cause: chunk,\n schemaVersion,\n });\n throw new Error('Unsupported schemaVersion.');\n }\n\n if (chunk.rootType !== rootType) {\n console.error('Chunks rootType mismatch.', {\n cause: chunk,\n rootType,\n });\n throw new Error('Chunks rootType mismatch.');\n }\n\n if (chunk.checksum !== checksum) {\n console.error('Chunks checksum mismatch (different source objects?).', {\n cause: chunk,\n checksum,\n });\n throw new Error('Chunks checksum mismatch (different source objects?).');\n }\n\n if (chunk.index !== i) {\n console.error('Chunk indices are not contiguous or sorted.', {\n cause: chunk,\n index: chunk.index,\n i,\n });\n throw new Error('Chunk indices are not contiguous or sorted.');\n }\n\n // Defer total check until after reconstruction to prefer more specific errors\n }\n\n const root: any = rootType === 'array' ? [] : {};\n\n // Collect string parts by path\n const stringParts = new Map<\n string,\n { path: Path; total: number; received: StrAppendPatch[] }\n >();\n\n const applySet = (patch: SetPatch) =>\n setAtPath(root, patch.path, patch.value);\n\n for (const chunk of sorted) {\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n applySet(entry);\n } else {\n const key = pathKey(entry.path);\n const record = stringParts.get(key) ?? {\n path: entry.path,\n total: entry.total,\n received: [],\n };\n\n if (record.total !== entry.total) {\n throw new Error('Inconsistent string part totals for a path.');\n }\n\n record.received.push(entry);\n stringParts.set(key, record);\n }\n }\n }\n\n // Stitch strings\n for (const { path, total, received } of stringParts.values()) {\n if (received.length !== total) {\n throw new Error('Missing string parts for a path; incomplete chunk set.');\n }\n\n received.sort((a, b) => a.index - b.index);\n const fullString = received.map((part) => part.value).join('');\n setAtPath(root, path, fullString);\n }\n\n // Reconcile split-node sentinels for strings/arrays after all patches applied\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const parts: string[] = [];\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n parts.push(piece);\n }\n\n return parts.join('');\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const output: any[] = [];\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n return output;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n const reconciled = reconcileSplitNodes(root);\n\n // Now validate totals match provided count\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.total !== sorted.length) {\n throw new Error(\n `Chunk total does not match provided count. Expected ${sorted.length}, but chunk ${i} has total=${chunk.total}`\n );\n }\n }\n\n return reconciled;\n};\n\n/* -------------------------------------------\n * Example usage\n * -------------------------------------------\nconst big: JSONObject = {\n title: \"Document\",\n content: \"…a very very long text…\",\n items: Array.from({ length: 2000 }, (_, i) => ({ id: i, label: `Item ${i}` }))\n};\n\n// Split to ~16k-char chunks\nconst chunks = chunkJSON(big, 16_000);\n\n// Send each `chunks[i]` as JSON to your backend.\n// Later, reassemble:\nconst restored = assembleJSON(chunks);\nconsole.log(JSON.stringify(restored) === JSON.stringify(big)); // true\n*/\n"],"mappings":";;;;;;;;;;AA0CA,MAAM,YAAY,QAAiD;AACjE,QAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,CAAC,MAAM,QAAQ,IAAI;;AAGvE,MAAM,eAAe,QAAwB;CAE3C,IAAI,OAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,SAAS,QAAQ,KAAK,OAAQ,IAAI,WAAW,EAAE;AAIjD,SAAQ,SAAS,GAAG,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;AAGnD,MAAM,aAAa,MAAW,MAAY,UAAqB;CAC7D,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;EACxC,MAAM,MAAM,KAAK;EAEjB,MAAM,cAAc,OADJ,KAAK,IAAI,OACc;AAEvC,MAAI,OAAO,QAAQ,UAAU;AAC3B,OAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC,IAAI;AAGxD,OAAI,QAAQ,SAAS,OACnB,SAAQ,OAAO,cAAc,EAAE,GAAG,EAAE;AAGtC,aAAU,QAAQ;SACb;AACL,OAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC,IAAI;AAGzD,OAAI,EAAE,OAAO,SACX,CAAC,QAAgB,OAAO,cAAc,EAAE,GAAG,EAAE;AAG/C,aAAW,QAAgB;;;CAI/B,MAAM,OAAO,KAAK,KAAK,SAAS;AAEhC,KAAI,OAAO,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC;AAGpD,UAAQ,QAAQ;QACX;AACL,MAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC;AAGrD,EAAC,QAAgB,QAAQ;;;AAI7B,MAAM,WAAW,SAAuB;AAEtC,QAAO,KAAK,UAAU,KAAK;;;;;AAM7B,MAAM,uBACJ,KACA,oBACa;AACb,KAAI,mBAAmB,EACrB,OAAM,IAAI,MAAM,uBAAuB;CAGzC,MAAMA,SAAmB,EAAE;CAC3B,IAAI,SAAS;AAEb,QAAO,SAAS,IAAI,QAAQ;EAC1B,MAAM,OAAOC,gCAAS,KAAK;GACzB,WAAW;GACX,YAAY;GACb,CAAC;AAEF,MAAI,CAAC,KAAM;AAEX,SAAO,KAAK,KAAK;AACjB,YAAU,KAAK;;AAGjB,QAAO;;;;;;AAOT,MAAM,oBACJ,OACA,kBACA,OAAa,EAAE,EACf,uBAAO,IAAI,SAAiB,KAChB;CAGZ,MAAM,iBAAiB,KAAK,IAC1B,GACA,KAAK,OAAO,mBAAmB,OAAO,GAAI,CAC3C;CAED,MAAMC,UAAmB,EAAE;CAE3B,MAAM,QAAQ,cAAyB,gBAAsB;AAC3D,MAAI,OAAO,iBAAiB,UAAU;GAGpC,MAAMC,YAAsB;IAC1B,IAAI;IACJ,MAAM;IACN,OAAO;IACR;AAGD,OAFgB,KAAK,UAAU,UAAU,CAAC,SAAS,OAEpC,kBAAkB;AAC/B,YAAQ,KAAK,UAAU;AACvB;;GAIF,MAAM,QAAQ,oBAAoB,cAAc,eAAe;AAG/D,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,iBAAiB;IACxC,OAAO;IACR,CAAC;AACF,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,UAAU;IACjC,OAAO,MAAM;IACd,CAAC;AAEF,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,SAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,OAAO,IAAI,EAAE,CAAC;IACrC,OAAO,MAAM;IACd,CAAC;AAGJ;;AAGF,MAAI,iBAAiB,QAAQ,OAAO,iBAAiB,UAAU;AAC7D,WAAQ,KAAK;IAAE,IAAI;IAAO,MAAM;IAAa,OAAO;IAAc,CAAC;AACnE;;AAGF,MAAI,KAAK,IAAI,aAAuB,CAClC,OAAM,IAAI,MAAM,gDAAgD;AAGlE,OAAK,IAAI,aAAuB;AAEhC,MAAI,MAAM,QAAQ,aAAa,CAC7B,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACvC,MAAK,aAAa,IAAiB,CAAC,GAAG,aAAa,EAAE,CAAC;MAGzD,MAAK,MAAM,OAAO,OAAO,KAAK,aAAa,CACzC,MAAM,aAA4B,MAAM,CAAC,GAAG,aAAa,IAAI,CAAC;AAIlE,OAAK,OAAO,aAAuB;;AAGrC,MAAK,OAAO,KAAK;AACjB,QAAO;;;;;AAMT,MAAa,aACX,OACA,aACgB;AAChB,KAAI,CAAC,SAAS,MAAM,IAAI,CAAC,MAAM,QAAQ,MAAM,CAC3C,OAAM,IAAI,MAAM,mCAAmC;AAGrD,KAAI,WAAW,IAEb,OAAM,IAAI,MAAM,sDAAsD;CAGxE,MAAMC,WAAqB,MAAM,QAAQ,MAAM,GAAG,UAAU;CAC5D,IAAIC;AAEJ,KAAI;AACF,iBAAe,KAAK,UAAU,MAAM;SAC9B;AAEN,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,MAAM,WAAW,YAAY,aAAa;CAC1C,MAAM,aAAa,iBAAiB,OAAoB,SAAS;CAEjE,MAAMC,SAAsB,EAAE;CAC9B,IAAIC,eAA0B;EAC5B,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ;CAED,MAAM,oBAAoB,KAAK,UAAU;EACvC,GAAG;EACH,SAAS,EAAE;EACZ,CAAC,CAAC;CAEH,MAAM,iBAAiB;AACrB,MAAI,aAAa,QAAQ,SAAS,GAAG;AACnC,UAAO,KAAK,aAAa;AACzB,kBAAe;IACb,eAAe;IACf,OAAO;IACP,OAAO;IACP;IACA;IACA,SAAS,EAAE;IACZ;;;AAIL,MAAK,MAAM,SAAS,WAQlB,KALE,oBACA,KAAK,UAAU,aAAa,QAAQ,CAAC,UACpC,aAAa,QAAQ,SAAS,IAAI,KACnC,KAAK,UAAU,MAAM,CAAC,UAEH,SACnB,cAAa,QAAQ,KAAK,MAAM;MAC3B;AAEL,MAAI,aAAa,QAAQ,SAAS,EAChC,WAAU;AAMZ,MAFmB,oBAAoB,KAAK,UAAU,CAAC,MAAM,CAAC,CAAC,SAE9C,SAGf,OAAM,IAAI,MACR,+FACD;AAGH,eAAa,QAAQ,KAAK,MAAM;;AAIpC,WAAU;AAGV,KAAI,OAAO,WAAW,EACpB,QAAO,KAAK;EACV,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ,CAAC;CAIJ,MAAM,cAAc,OAAO;AAE3B,QAAO,SAAS,OAAO,UAAU;AAC/B,QAAM,QAAQ;AACd,QAAM,QAAQ;GACd;AAEF,QAAO;;;;;;;;;;;;AAaT,MAAa,8BACX,UAC2B;CAC3B,MAAMC,OAAY,MAAM,aAAa,UAAU,EAAE,GAAG,EAAE;AAGtD,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,WAAU,MAAM,MAAM,MAAM,MAAM,MAAM;CAM5C,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EAExC,QAAO;GAGT,MAAMC,QAAkB,EAAE;GAC1B,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,UAAU;AAC7B,mBAAc;AACd;;AAGF,UAAM,KAAK,MAAM;;AAInB,OAAI,YACF,QAAO,MAAM,KAAK,GAAG;AAGvB,UAAO;;AAIT,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EAEvC,QAAO;GAGT,MAAMC,SAAgB,EAAE;GACxB,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,EAAE;AACzB,mBAAc;AACd;;AAGF,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAK9C,OAAI,YACF,QAAO;AAGT,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;AAGT,QAAO,oBAAoB,KAAK;;AAGlC,MAAa,gBAAgB,WAAgD;AAC3E,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,OAAM,IAAI,MAAM,sBAAsB;CAIxC,MAAM,SAAS,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;CAC5D,MAAM,EAAE,UAAU,aAAa,OAAO;CACtC,MAAM,gBAAgB;AAEtB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,kBAAkB,eAAe;AACzC,WAAQ,MAAM,8BAA8B;IAC1C,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,6BAA6B;;AAG/C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,6BAA6B;IACzC,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,4BAA4B;;AAG9C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,yDAAyD;IACrE,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,wDAAwD;;AAG1E,MAAI,MAAM,UAAU,GAAG;AACrB,WAAQ,MAAM,+CAA+C;IAC3D,OAAO;IACP,OAAO,MAAM;IACb;IACD,CAAC;AACF,SAAM,IAAI,MAAM,8CAA8C;;;CAMlE,MAAMF,OAAY,aAAa,UAAU,EAAE,GAAG,EAAE;CAGhD,MAAM,8BAAc,IAAI,KAGrB;CAEH,MAAM,YAAY,UAChB,UAAU,MAAM,MAAM,MAAM,MAAM,MAAM;AAE1C,MAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,UAAS,MAAM;MACV;EACL,MAAM,MAAM,QAAQ,MAAM,KAAK;EAC/B,MAAM,SAAS,YAAY,IAAI,IAAI,IAAI;GACrC,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,UAAU,EAAE;GACb;AAED,MAAI,OAAO,UAAU,MAAM,MACzB,OAAM,IAAI,MAAM,8CAA8C;AAGhE,SAAO,SAAS,KAAK,MAAM;AAC3B,cAAY,IAAI,KAAK,OAAO;;AAMlC,MAAK,MAAM,EAAE,MAAM,OAAO,cAAc,YAAY,QAAQ,EAAE;AAC5D,MAAI,SAAS,WAAW,MACtB,OAAM,IAAI,MAAM,yDAAyD;AAG3E,WAAS,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAE1C,YAAU,MAAM,MADG,SAAS,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,GAAG,CAC7B;;CAInC,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EACxC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,QAAkB,EAAE;AAE1B,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,SACnB,OAAM,IAAI,MACR,yDACD;AAGH,UAAM,KAAK,MAAM;;AAGnB,UAAO,MAAM,KAAK,GAAG;;AAIvB,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EACvC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,SAAgB,EAAE;AAExB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,OAAM,IAAI,MACR,yDACD;AAGH,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAI9C,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;CAGT,MAAM,aAAa,oBAAoB,KAAK;AAG5C,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,UAAU,OAAO,OACzB,OAAM,IAAI,MACR,uDAAuD,OAAO,OAAO,cAAc,EAAE,aAAa,MAAM,QACzG;;AAIL,QAAO"}
@@ -1,14 +1,10 @@
1
1
  const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
2
  let __intlayer_config = require("@intlayer/config");
3
- __intlayer_config = require_rolldown_runtime.__toESM(__intlayer_config);
4
3
  let node_path = require("node:path");
5
- node_path = require_rolldown_runtime.__toESM(node_path);
6
4
  let __intlayer_config_built = require("@intlayer/config/built");
7
5
  __intlayer_config_built = require_rolldown_runtime.__toESM(__intlayer_config_built);
8
6
  let __intlayer_core = require("@intlayer/core");
9
- __intlayer_core = require_rolldown_runtime.__toESM(__intlayer_core);
10
7
  let __intlayer_types = require("@intlayer/types");
11
- __intlayer_types = require_rolldown_runtime.__toESM(__intlayer_types);
12
8
 
13
9
  //#region src/utils/formatter.ts
14
10
  const formatPath = (path, color) => [path].flat().map((path$1) => path$1.startsWith("/") ? (0, node_path.relative)(__intlayer_config_built.default.content.baseDir, path$1) : path$1).map((relativePath) => color === false ? relativePath : (0, __intlayer_config.colorizePath)(relativePath, color)).join(`, `);
@@ -1 +1 @@
1
- {"version":3,"file":"formatter.cjs","names":["path","configuration","ANSIColors","locale","Locales"],"sources":["../../../src/utils/formatter.ts"],"sourcesContent":["import { relative } from 'node:path';\nimport { ANSIColors, colorize, colorizePath } from '@intlayer/config';\nimport configuration from '@intlayer/config/built';\nimport { getLocaleName } from '@intlayer/core';\nimport { Locales, type LocalesValues } from '@intlayer/types';\n\nexport const formatPath = (\n path: string | string[],\n color?: ANSIColors | false\n) =>\n [path]\n .flat()\n .map((path) =>\n path.startsWith('/')\n ? relative(configuration.content.baseDir, path)\n : path\n )\n .map((relativePath) =>\n color === false ? relativePath : colorizePath(relativePath, color)\n )\n .join(`, `);\n\nexport const formatLocale = (\n locale: LocalesValues | LocalesValues[],\n color: ANSIColors | false = ANSIColors.GREEN\n) =>\n [locale]\n .flat()\n .map((locale) => `${getLocaleName(locale, Locales.ENGLISH)} (${locale})`)\n .map((text) => (color === false ? text : colorize(text, color)))\n .join(`, `);\n"],"mappings":";;;;;;;;;;;;;AAMA,MAAa,cACX,MACA,UAEA,CAAC,KAAK,CACH,MAAM,CACN,KAAK,WACJA,OAAK,WAAW,IAAI,2BACPC,gCAAc,QAAQ,SAASD,OAAK,GAC7CA,OACL,CACA,KAAK,iBACJ,UAAU,QAAQ,mDAA4B,cAAc,MAAM,CACnE,CACA,KAAK,KAAK;AAEf,MAAa,gBACX,QACA,QAA4BE,6BAAW,UAEvC,CAAC,OAAO,CACL,MAAM,CACN,KAAK,aAAW,sCAAiBC,UAAQC,yBAAQ,QAAQ,CAAC,IAAID,SAAO,GAAG,CACxE,KAAK,SAAU,UAAU,QAAQ,uCAAgB,MAAM,MAAM,CAAE,CAC/D,KAAK,KAAK"}
1
+ {"version":3,"file":"formatter.cjs","names":["path","configuration","ANSIColors","locale","Locales"],"sources":["../../../src/utils/formatter.ts"],"sourcesContent":["import { relative } from 'node:path';\nimport { ANSIColors, colorize, colorizePath } from '@intlayer/config';\nimport configuration from '@intlayer/config/built';\nimport { getLocaleName } from '@intlayer/core';\nimport { Locales, type LocalesValues } from '@intlayer/types';\n\nexport const formatPath = (\n path: string | string[],\n color?: ANSIColors | false\n) =>\n [path]\n .flat()\n .map((path) =>\n path.startsWith('/')\n ? relative(configuration.content.baseDir, path)\n : path\n )\n .map((relativePath) =>\n color === false ? relativePath : colorizePath(relativePath, color)\n )\n .join(`, `);\n\nexport const formatLocale = (\n locale: LocalesValues | LocalesValues[],\n color: ANSIColors | false = ANSIColors.GREEN\n) =>\n [locale]\n .flat()\n .map((locale) => `${getLocaleName(locale, Locales.ENGLISH)} (${locale})`)\n .map((text) => (color === false ? text : colorize(text, color)))\n .join(`, `);\n"],"mappings":";;;;;;;;;AAMA,MAAa,cACX,MACA,UAEA,CAAC,KAAK,CACH,MAAM,CACN,KAAK,WACJA,OAAK,WAAW,IAAI,2BACPC,gCAAc,QAAQ,SAASD,OAAK,GAC7CA,OACL,CACA,KAAK,iBACJ,UAAU,QAAQ,mDAA4B,cAAc,MAAM,CACnE,CACA,KAAK,KAAK;AAEf,MAAa,gBACX,QACA,QAA4BE,6BAAW,UAEvC,CAAC,OAAO,CACL,MAAM,CACN,KAAK,aAAW,sCAAiBC,UAAQC,yBAAQ,QAAQ,CAAC,IAAID,SAAO,GAAG,CACxE,KAAK,SAAU,UAAU,QAAQ,uCAAgB,MAAM,MAAM,CAAE,CAC/D,KAAK,KAAK"}
@@ -1,8 +1,6 @@
1
1
  const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
2
  let node_path = require("node:path");
3
- node_path = require_rolldown_runtime.__toESM(node_path);
4
3
  let node_fs_promises = require("node:fs/promises");
5
- node_fs_promises = require_rolldown_runtime.__toESM(node_fs_promises);
6
4
  let __intlayer_core_package_json = require("@intlayer/core/package.json");
7
5
  __intlayer_core_package_json = require_rolldown_runtime.__toESM(__intlayer_core_package_json);
8
6
 
@@ -1 +1 @@
1
- {"version":3,"file":"runOnce.cjs","names":["cachedVersion: string | undefined","packageJson","err: any","data: SentinelData"],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport packageJson from '@intlayer/core/package.json' with { type: 'json' };\n\ntype RunOnceOptions = {\n /**\n * The function to execute when the sentinel is not found or is older than the cache timeout.\n */\n onIsCached?: () => void | Promise<void>;\n /**\n * The time window in milliseconds during which the sentinel is considered valid.\n *\n * @default 60000 = 1 minute\n */\n cacheTimeoutMs?: number;\n /**\n * If true, the callback will always run. If undefined, the callback will run only if the sentinel is older than the cache timeout.\n *\n * @default false\n */\n forceRun?: boolean;\n};\n\nconst DEFAULT_RUN_ONCE_OPTIONS = {\n cacheTimeoutMs: 60 * 1000, // 1 minute in milliseconds,\n} satisfies RunOnceOptions;\n\ntype SentinelData = {\n version: string;\n timestamp: number;\n};\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param options - The options for the runOnce function\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n options?: RunOnceOptions\n) => {\n const { onIsCached, cacheTimeoutMs, forceRun } = {\n ...DEFAULT_RUN_ONCE_OPTIONS,\n ...(options ?? {}),\n };\n const currentTimestamp = Date.now();\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // Determine if we should rebuild based on cache age, force flag, or version mismatch\n let shouldRebuild = Boolean(forceRun) || sentinelAge > cacheTimeoutMs!;\n\n if (!shouldRebuild) {\n try {\n const raw = await readFile(sentinelFilePath, 'utf8');\n let cachedVersion: string | undefined;\n try {\n const parsed = JSON.parse(raw) as Partial<SentinelData>;\n cachedVersion = parsed.version;\n } catch {\n // Legacy format (timestamp only). Force a rebuild once to write versioned sentinel.\n cachedVersion = undefined;\n }\n\n if (!cachedVersion || cachedVersion !== packageJson.version) {\n shouldRebuild = true;\n }\n } catch {\n // If we cannot read the file, err on the safe side and rebuild\n shouldRebuild = true;\n }\n }\n\n if (shouldRebuild) {\n try {\n await unlink(sentinelFilePath);\n } catch (err: any) {\n if (err.code !== 'ENOENT') throw err;\n }\n // Fall through to create new sentinel and rebuild\n } else {\n await onIsCached?.();\n // Sentinel is recent and versions match, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n const data: SentinelData = {\n version: packageJson.version,\n timestamp: currentTimestamp,\n };\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n throw err; // unexpected FS error\n }\n\n await callback();\n};\n"],"mappings":";;;;;;;;;AAuBA,MAAM,2BAA2B,EAC/B,gBAAgB,KAAK,KACtB;;;;;;;;;;;;;;;;;;;;;;;AA6BD,MAAa,UAAU,OACrB,kBACA,UACA,YACG;CACH,MAAM,EAAE,YAAY,gBAAgB,aAAa;EAC/C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;CACD,MAAM,mBAAmB,KAAK,KAAK;AAEnC,KAAI;EAGF,MAAM,cAAc,oBADE,iCAAW,iBAAiB,EACG,MAAM,SAAS;EAGpE,IAAI,gBAAgB,QAAQ,SAAS,IAAI,cAAc;AAEvD,MAAI,CAAC,cACH,KAAI;GACF,MAAM,MAAM,qCAAe,kBAAkB,OAAO;GACpD,IAAIA;AACJ,OAAI;AAEF,oBADe,KAAK,MAAM,IAAI,CACP;WACjB;AAEN,oBAAgB;;AAGlB,OAAI,CAAC,iBAAiB,kBAAkBC,qCAAY,QAClD,iBAAgB;UAEZ;AAEN,mBAAgB;;AAIpB,MAAI,cACF,KAAI;AACF,sCAAa,iBAAiB;WACvBC,KAAU;AACjB,OAAI,IAAI,SAAS,SAAU,OAAM;;OAG9B;AACL,SAAM,cAAc;AAEpB;;UAEKA,KAAU;AACjB,MAAI,IAAI,SAAS,UAAU,OAGzB,OAAM;;AAIV,KAAI;AAEF,2DAAoB,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;EAG3D,MAAMC,OAAqB;GACzB,SAASF,qCAAY;GACrB,WAAW;GACZ;AACD,wCAAgB,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;UAChEC,KAAU;AACjB,MAAI,IAAI,SAAS,SAEf;AAEF,QAAM;;AAGR,OAAM,UAAU"}
1
+ {"version":3,"file":"runOnce.cjs","names":["cachedVersion: string | undefined","packageJson","err: any","data: SentinelData"],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport packageJson from '@intlayer/core/package.json' with { type: 'json' };\n\ntype RunOnceOptions = {\n /**\n * The function to execute when the sentinel is not found or is older than the cache timeout.\n */\n onIsCached?: () => void | Promise<void>;\n /**\n * The time window in milliseconds during which the sentinel is considered valid.\n *\n * @default 60000 = 1 minute\n */\n cacheTimeoutMs?: number;\n /**\n * If true, the callback will always run. If undefined, the callback will run only if the sentinel is older than the cache timeout.\n *\n * @default false\n */\n forceRun?: boolean;\n};\n\nconst DEFAULT_RUN_ONCE_OPTIONS = {\n cacheTimeoutMs: 60 * 1000, // 1 minute in milliseconds,\n} satisfies RunOnceOptions;\n\ntype SentinelData = {\n version: string;\n timestamp: number;\n};\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param options - The options for the runOnce function\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n options?: RunOnceOptions\n) => {\n const { onIsCached, cacheTimeoutMs, forceRun } = {\n ...DEFAULT_RUN_ONCE_OPTIONS,\n ...(options ?? {}),\n };\n const currentTimestamp = Date.now();\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // Determine if we should rebuild based on cache age, force flag, or version mismatch\n let shouldRebuild = Boolean(forceRun) || sentinelAge > cacheTimeoutMs!;\n\n if (!shouldRebuild) {\n try {\n const raw = await readFile(sentinelFilePath, 'utf8');\n let cachedVersion: string | undefined;\n try {\n const parsed = JSON.parse(raw) as Partial<SentinelData>;\n cachedVersion = parsed.version;\n } catch {\n // Legacy format (timestamp only). Force a rebuild once to write versioned sentinel.\n cachedVersion = undefined;\n }\n\n if (!cachedVersion || cachedVersion !== packageJson.version) {\n shouldRebuild = true;\n }\n } catch {\n // If we cannot read the file, err on the safe side and rebuild\n shouldRebuild = true;\n }\n }\n\n if (shouldRebuild) {\n try {\n await unlink(sentinelFilePath);\n } catch (err: any) {\n if (err.code !== 'ENOENT') throw err;\n }\n // Fall through to create new sentinel and rebuild\n } else {\n await onIsCached?.();\n // Sentinel is recent and versions match, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n const data: SentinelData = {\n version: packageJson.version,\n timestamp: currentTimestamp,\n };\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n throw err; // unexpected FS error\n }\n\n await callback();\n};\n"],"mappings":";;;;;;;AAuBA,MAAM,2BAA2B,EAC/B,gBAAgB,KAAK,KACtB;;;;;;;;;;;;;;;;;;;;;;;AA6BD,MAAa,UAAU,OACrB,kBACA,UACA,YACG;CACH,MAAM,EAAE,YAAY,gBAAgB,aAAa;EAC/C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;CACD,MAAM,mBAAmB,KAAK,KAAK;AAEnC,KAAI;EAGF,MAAM,cAAc,oBADE,iCAAW,iBAAiB,EACG,MAAM,SAAS;EAGpE,IAAI,gBAAgB,QAAQ,SAAS,IAAI,cAAc;AAEvD,MAAI,CAAC,cACH,KAAI;GACF,MAAM,MAAM,qCAAe,kBAAkB,OAAO;GACpD,IAAIA;AACJ,OAAI;AAEF,oBADe,KAAK,MAAM,IAAI,CACP;WACjB;AAEN,oBAAgB;;AAGlB,OAAI,CAAC,iBAAiB,kBAAkBC,qCAAY,QAClD,iBAAgB;UAEZ;AAEN,mBAAgB;;AAIpB,MAAI,cACF,KAAI;AACF,sCAAa,iBAAiB;WACvBC,KAAU;AACjB,OAAI,IAAI,SAAS,SAAU,OAAM;;OAG9B;AACL,SAAM,cAAc;AAEpB;;UAEKA,KAAU;AACjB,MAAI,IAAI,SAAS,UAAU,OAGzB,OAAM;;AAIV,KAAI;AAEF,2DAAoB,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;EAG3D,MAAMC,OAAqB;GACzB,SAASF,qCAAY;GACrB,WAAW;GACZ;AACD,wCAAgB,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;UAChEC,KAAU;AACjB,MAAI,IAAI,SAAS,SAEf;AAEF,QAAM;;AAGR,OAAM,UAAU"}
@@ -1,6 +1,5 @@
1
1
  const require_rolldown_runtime = require('../../_virtual/rolldown_runtime.cjs');
2
2
  let node_child_process = require("node:child_process");
3
- node_child_process = require_rolldown_runtime.__toESM(node_child_process);
4
3
 
5
4
  //#region src/utils/runParallel/bin.ts
6
5
  const stripStderr = (stderr) => {
@@ -1 +1 @@
1
- {"version":3,"file":"bin.cjs","names":["normalizedOptions: SpawnOptions | undefined","normalizedDone: BinCallback","ch: ChildProcess"],"sources":["../../../../src/utils/runParallel/bin.ts"],"sourcesContent":["import type { ChildProcess } from 'node:child_process';\nimport { type SpawnOptions, spawn } from 'node:child_process';\n\ntype BinCallback = (err: Error | null, stdout?: string, code?: number) => void;\n\nconst stripStderr = (stderr: string | undefined): string | undefined => {\n if (!stderr) return;\n let cleaned = stderr.trim();\n // Strip bogus screen size error.\n // See https://github.com/microsoft/vscode/issues/98590\n const regex = /your \\d+x\\d+ screen size is bogus\\. expect trouble/gi;\n cleaned = cleaned.replace(regex, '');\n\n return cleaned.trim() || undefined;\n};\n\n/**\n * Spawn a binary and read its stdout.\n * @param cmd The name of the binary to spawn.\n * @param args The arguments for the binary.\n * @param options Optional option for the spawn function.\n * @param done Callback function.\n */\nexport const run = (\n cmd: string,\n args: string[],\n options: SpawnOptions | BinCallback | undefined,\n done?: BinCallback\n): void => {\n let normalizedOptions: SpawnOptions | undefined;\n let normalizedDone: BinCallback;\n\n if (typeof options === 'function') {\n normalizedDone = options;\n normalizedOptions = undefined;\n } else {\n normalizedDone = done!;\n normalizedOptions = options;\n }\n\n let executed = false;\n const ch: ChildProcess = spawn(cmd, args, normalizedOptions ?? {});\n let stdout = '';\n let stderr = '';\n\n if (ch.stdout) {\n ch.stdout.on('data', (d: Buffer) => {\n stdout += d.toString();\n });\n }\n\n if (ch.stderr) {\n ch.stderr.on('data', (d: Buffer) => {\n stderr += d.toString();\n });\n }\n\n ch.on('error', (err: Error) => {\n if (executed) return;\n executed = true;\n normalizedDone(new Error(String(err)));\n });\n\n ch.on('close', (code: number | null) => {\n if (executed) return;\n executed = true;\n\n const cleanedStderr = stripStderr(stderr);\n if (cleanedStderr) {\n return normalizedDone(new Error(cleanedStderr));\n }\n\n normalizedDone(null, stdout, code ?? undefined);\n });\n};\n"],"mappings":";;;;;AAKA,MAAM,eAAe,WAAmD;AACtE,KAAI,CAAC,OAAQ;CACb,IAAI,UAAU,OAAO,MAAM;AAI3B,WAAU,QAAQ,QADJ,wDACmB,GAAG;AAEpC,QAAO,QAAQ,MAAM,IAAI;;;;;;;;;AAU3B,MAAa,OACX,KACA,MACA,SACA,SACS;CACT,IAAIA;CACJ,IAAIC;AAEJ,KAAI,OAAO,YAAY,YAAY;AACjC,mBAAiB;AACjB,sBAAoB;QACf;AACL,mBAAiB;AACjB,sBAAoB;;CAGtB,IAAI,WAAW;CACf,MAAMC,mCAAyB,KAAK,MAAM,qBAAqB,EAAE,CAAC;CAClE,IAAI,SAAS;CACb,IAAI,SAAS;AAEb,KAAI,GAAG,OACL,IAAG,OAAO,GAAG,SAAS,MAAc;AAClC,YAAU,EAAE,UAAU;GACtB;AAGJ,KAAI,GAAG,OACL,IAAG,OAAO,GAAG,SAAS,MAAc;AAClC,YAAU,EAAE,UAAU;GACtB;AAGJ,IAAG,GAAG,UAAU,QAAe;AAC7B,MAAI,SAAU;AACd,aAAW;AACX,iBAAe,IAAI,MAAM,OAAO,IAAI,CAAC,CAAC;GACtC;AAEF,IAAG,GAAG,UAAU,SAAwB;AACtC,MAAI,SAAU;AACd,aAAW;EAEX,MAAM,gBAAgB,YAAY,OAAO;AACzC,MAAI,cACF,QAAO,eAAe,IAAI,MAAM,cAAc,CAAC;AAGjD,iBAAe,MAAM,QAAQ,QAAQ,OAAU;GAC/C"}
1
+ {"version":3,"file":"bin.cjs","names":["normalizedOptions: SpawnOptions | undefined","normalizedDone: BinCallback","ch: ChildProcess"],"sources":["../../../../src/utils/runParallel/bin.ts"],"sourcesContent":["import type { ChildProcess } from 'node:child_process';\nimport { type SpawnOptions, spawn } from 'node:child_process';\n\ntype BinCallback = (err: Error | null, stdout?: string, code?: number) => void;\n\nconst stripStderr = (stderr: string | undefined): string | undefined => {\n if (!stderr) return;\n let cleaned = stderr.trim();\n // Strip bogus screen size error.\n // See https://github.com/microsoft/vscode/issues/98590\n const regex = /your \\d+x\\d+ screen size is bogus\\. expect trouble/gi;\n cleaned = cleaned.replace(regex, '');\n\n return cleaned.trim() || undefined;\n};\n\n/**\n * Spawn a binary and read its stdout.\n * @param cmd The name of the binary to spawn.\n * @param args The arguments for the binary.\n * @param options Optional option for the spawn function.\n * @param done Callback function.\n */\nexport const run = (\n cmd: string,\n args: string[],\n options: SpawnOptions | BinCallback | undefined,\n done?: BinCallback\n): void => {\n let normalizedOptions: SpawnOptions | undefined;\n let normalizedDone: BinCallback;\n\n if (typeof options === 'function') {\n normalizedDone = options;\n normalizedOptions = undefined;\n } else {\n normalizedDone = done!;\n normalizedOptions = options;\n }\n\n let executed = false;\n const ch: ChildProcess = spawn(cmd, args, normalizedOptions ?? {});\n let stdout = '';\n let stderr = '';\n\n if (ch.stdout) {\n ch.stdout.on('data', (d: Buffer) => {\n stdout += d.toString();\n });\n }\n\n if (ch.stderr) {\n ch.stderr.on('data', (d: Buffer) => {\n stderr += d.toString();\n });\n }\n\n ch.on('error', (err: Error) => {\n if (executed) return;\n executed = true;\n normalizedDone(new Error(String(err)));\n });\n\n ch.on('close', (code: number | null) => {\n if (executed) return;\n executed = true;\n\n const cleanedStderr = stripStderr(stderr);\n if (cleanedStderr) {\n return normalizedDone(new Error(cleanedStderr));\n }\n\n normalizedDone(null, stdout, code ?? undefined);\n });\n};\n"],"mappings":";;;;AAKA,MAAM,eAAe,WAAmD;AACtE,KAAI,CAAC,OAAQ;CACb,IAAI,UAAU,OAAO,MAAM;AAI3B,WAAU,QAAQ,QADJ,wDACmB,GAAG;AAEpC,QAAO,QAAQ,MAAM,IAAI;;;;;;;;;AAU3B,MAAa,OACX,KACA,MACA,SACA,SACS;CACT,IAAIA;CACJ,IAAIC;AAEJ,KAAI,OAAO,YAAY,YAAY;AACjC,mBAAiB;AACjB,sBAAoB;QACf;AACL,mBAAiB;AACjB,sBAAoB;;CAGtB,IAAI,WAAW;CACf,MAAMC,mCAAyB,KAAK,MAAM,qBAAqB,EAAE,CAAC;CAClE,IAAI,SAAS;CACb,IAAI,SAAS;AAEb,KAAI,GAAG,OACL,IAAG,OAAO,GAAG,SAAS,MAAc;AAClC,YAAU,EAAE,UAAU;GACtB;AAGJ,KAAI,GAAG,OACL,IAAG,OAAO,GAAG,SAAS,MAAc;AAClC,YAAU,EAAE,UAAU;GACtB;AAGJ,IAAG,GAAG,UAAU,QAAe;AAC7B,MAAI,SAAU;AACd,aAAW;AACX,iBAAe,IAAI,MAAM,OAAO,IAAI,CAAC,CAAC;GACtC;AAEF,IAAG,GAAG,UAAU,SAAwB;AACtC,MAAI,SAAU;AACd,aAAW;EAEX,MAAM,gBAAgB,YAAY,OAAO;AACzC,MAAI,cACF,QAAO,eAAe,IAAI,MAAM,cAAc,CAAC;AAGjD,iBAAe,MAAM,QAAQ,QAAQ,OAAU;GAC/C"}
@@ -2,7 +2,6 @@ const require_rolldown_runtime = require('../../_virtual/rolldown_runtime.cjs');
2
2
  const require_utils_runParallel_spawnPosix = require('./spawnPosix.cjs');
3
3
  const require_utils_runParallel_spawnWin32 = require('./spawnWin32.cjs');
4
4
  let node_path = require("node:path");
5
- node_path = require_rolldown_runtime.__toESM(node_path);
6
5
 
7
6
  //#region src/utils/runParallel/index.ts
8
7
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","names":["delimiter","spawnWin32","spawnPosix","signalHandlers: { event: string; handler: (...args: any[]) => void }[]"],"sources":["../../../../src/utils/runParallel/index.ts"],"sourcesContent":["import { delimiter, join } from 'node:path';\nimport { spawnPosix } from './spawnPosix';\nimport { spawnWin32 } from './spawnWin32';\n\nexport type ParallelHandle = {\n kill: () => void;\n result: Promise<any>;\n commandText: string;\n};\n\n/**\n * Start a cross-platform parallel process using npm-run-all approach.\n * Accepts either a single string (e.g., 'next start') or an array of tokens (e.g., ['next', 'start']).\n */\nexport const runParallel = (proc?: string | string[]): ParallelHandle => {\n if (!proc || (Array.isArray(proc) && proc.length === 0))\n throw new Error('Invalid command');\n\n const commandText = Array.isArray(proc) ? proc.join(' ') : proc;\n\n const isArray = Array.isArray(proc);\n const command = isArray ? (proc as string[])[0] : commandText;\n const args = isArray ? (proc as string[]).slice(1) : [];\n\n // Ensure local binaries (node_modules/.bin) are resolvable\n const cwdBin = join(process.cwd(), 'node_modules', '.bin');\n const PATH_KEY =\n Object.keys(process.env).find((key) => key.toLowerCase() === 'path') ??\n 'PATH';\n\n const extendedPath = [cwdBin, process.env[PATH_KEY] ?? '']\n .filter(Boolean)\n .join(delimiter);\n\n const childEnv = {\n ...process.env,\n [PATH_KEY]: extendedPath,\n } as NodeJS.ProcessEnv;\n\n const isWin = process.platform === 'win32';\n const spawnFunc = isWin ? spawnWin32 : spawnPosix;\n\n // Spawn options\n const spawnOptions = {\n cwd: process.cwd(),\n stdio: 'inherit' as const,\n env: childEnv,\n shell: false,\n };\n\n // Spawn the child process\n const child = isArray\n ? // If provided as a single string element that includes spaces, treat it like a shell command\n args.length === 0 && /\\s/.test(command)\n ? isWin\n ? spawnFunc(\n process.env.ComSpec ?? 'cmd.exe',\n ['/d', '/s', '/c', command],\n spawnOptions\n )\n : spawnFunc(\n process.env.SHELL ?? '/bin/sh',\n ['-c', command],\n spawnOptions\n )\n : spawnFunc(command, args, spawnOptions)\n : isWin\n ? spawnFunc(\n process.env.ComSpec ?? 'cmd.exe',\n ['/d', '/s', '/c', commandText],\n spawnOptions\n )\n : spawnFunc(\n process.env.SHELL ?? '/bin/sh',\n ['-c', commandText],\n spawnOptions\n );\n\n const result = new Promise<void>((resolve, reject) => {\n child.on('error', (err) => {\n try {\n console.error(\n `[runParallel] Failed to start: ${err?.message ?? String(err)}`\n );\n } catch {}\n cleanupHandlers();\n reject(err);\n });\n\n child.on('exit', (code, signal) => {\n cleanupHandlers();\n\n // Treat common termination signals as graceful exits, not failures\n const gracefulSignals = ['SIGINT', 'SIGTERM', 'SIGQUIT', 'SIGHUP'];\n if (code === 0 || (signal && gracefulSignals.includes(signal))) {\n resolve();\n } else {\n reject(\n Object.assign(new Error('Parallel process failed'), { code, signal })\n );\n }\n });\n });\n\n const cleanup = () => {\n try {\n child.kill('SIGTERM');\n } catch {\n // Best effort\n }\n };\n\n const signalHandlers: { event: string; handler: (...args: any[]) => void }[] =\n [\n { event: 'SIGINT', handler: cleanup },\n { event: 'SIGTERM', handler: cleanup },\n { event: 'SIGQUIT', handler: cleanup },\n { event: 'SIGHUP', handler: cleanup },\n ];\n\n // Register signal handlers\n signalHandlers.forEach(({ event, handler }) => {\n process.on(event as any, handler as any);\n });\n\n const cleanupHandlers = () => {\n signalHandlers.forEach(({ event, handler }) => {\n process.off(event as any, handler as any);\n });\n };\n\n const kill = () => {\n try {\n child.kill('SIGTERM');\n } catch {\n // Best effort\n }\n };\n\n return { kill, result, commandText };\n};\n"],"mappings":";;;;;;;;;;;AAcA,MAAa,eAAe,SAA6C;AACvE,KAAI,CAAC,QAAS,MAAM,QAAQ,KAAK,IAAI,KAAK,WAAW,EACnD,OAAM,IAAI,MAAM,kBAAkB;CAEpC,MAAM,cAAc,MAAM,QAAQ,KAAK,GAAG,KAAK,KAAK,IAAI,GAAG;CAE3D,MAAM,UAAU,MAAM,QAAQ,KAAK;CACnC,MAAM,UAAU,UAAW,KAAkB,KAAK;CAClD,MAAM,OAAO,UAAW,KAAkB,MAAM,EAAE,GAAG,EAAE;CAGvD,MAAM,6BAAc,QAAQ,KAAK,EAAE,gBAAgB,OAAO;CAC1D,MAAM,WACJ,OAAO,KAAK,QAAQ,IAAI,CAAC,MAAM,QAAQ,IAAI,aAAa,KAAK,OAAO,IACpE;CAEF,MAAM,eAAe,CAAC,QAAQ,QAAQ,IAAI,aAAa,GAAG,CACvD,OAAO,QAAQ,CACf,KAAKA,oBAAU;CAElB,MAAM,WAAW;EACf,GAAG,QAAQ;GACV,WAAW;EACb;CAED,MAAM,QAAQ,QAAQ,aAAa;CACnC,MAAM,YAAY,QAAQC,kDAAaC;CAGvC,MAAM,eAAe;EACnB,KAAK,QAAQ,KAAK;EAClB,OAAO;EACP,KAAK;EACL,OAAO;EACR;CAGD,MAAM,QAAQ,UAEV,KAAK,WAAW,KAAK,KAAK,KAAK,QAAQ,GACrC,QACE,UACE,QAAQ,IAAI,WAAW,WACvB;EAAC;EAAM;EAAM;EAAM;EAAQ,EAC3B,aACD,GACD,UACE,QAAQ,IAAI,SAAS,WACrB,CAAC,MAAM,QAAQ,EACf,aACD,GACH,UAAU,SAAS,MAAM,aAAa,GACxC,QACE,UACE,QAAQ,IAAI,WAAW,WACvB;EAAC;EAAM;EAAM;EAAM;EAAY,EAC/B,aACD,GACD,UACE,QAAQ,IAAI,SAAS,WACrB,CAAC,MAAM,YAAY,EACnB,aACD;CAEP,MAAM,SAAS,IAAI,SAAe,SAAS,WAAW;AACpD,QAAM,GAAG,UAAU,QAAQ;AACzB,OAAI;AACF,YAAQ,MACN,kCAAkC,KAAK,WAAW,OAAO,IAAI,GAC9D;WACK;AACR,oBAAiB;AACjB,UAAO,IAAI;IACX;AAEF,QAAM,GAAG,SAAS,MAAM,WAAW;AACjC,oBAAiB;AAIjB,OAAI,SAAS,KAAM,UADK;IAAC;IAAU;IAAW;IAAW;IAAS,CACrB,SAAS,OAAO,CAC3D,UAAS;OAET,QACE,OAAO,uBAAO,IAAI,MAAM,0BAA0B,EAAE;IAAE;IAAM;IAAQ,CAAC,CACtE;IAEH;GACF;CAEF,MAAM,gBAAgB;AACpB,MAAI;AACF,SAAM,KAAK,UAAU;UACf;;CAKV,MAAMC,iBACJ;EACE;GAAE,OAAO;GAAU,SAAS;GAAS;EACrC;GAAE,OAAO;GAAW,SAAS;GAAS;EACtC;GAAE,OAAO;GAAW,SAAS;GAAS;EACtC;GAAE,OAAO;GAAU,SAAS;GAAS;EACtC;AAGH,gBAAe,SAAS,EAAE,OAAO,cAAc;AAC7C,UAAQ,GAAG,OAAc,QAAe;GACxC;CAEF,MAAM,wBAAwB;AAC5B,iBAAe,SAAS,EAAE,OAAO,cAAc;AAC7C,WAAQ,IAAI,OAAc,QAAe;IACzC;;CAGJ,MAAM,aAAa;AACjB,MAAI;AACF,SAAM,KAAK,UAAU;UACf;;AAKV,QAAO;EAAE;EAAM;EAAQ;EAAa"}
1
+ {"version":3,"file":"index.cjs","names":["delimiter","spawnWin32","spawnPosix","signalHandlers: { event: string; handler: (...args: any[]) => void }[]"],"sources":["../../../../src/utils/runParallel/index.ts"],"sourcesContent":["import { delimiter, join } from 'node:path';\nimport { spawnPosix } from './spawnPosix';\nimport { spawnWin32 } from './spawnWin32';\n\nexport type ParallelHandle = {\n kill: () => void;\n result: Promise<any>;\n commandText: string;\n};\n\n/**\n * Start a cross-platform parallel process using npm-run-all approach.\n * Accepts either a single string (e.g., 'next start') or an array of tokens (e.g., ['next', 'start']).\n */\nexport const runParallel = (proc?: string | string[]): ParallelHandle => {\n if (!proc || (Array.isArray(proc) && proc.length === 0))\n throw new Error('Invalid command');\n\n const commandText = Array.isArray(proc) ? proc.join(' ') : proc;\n\n const isArray = Array.isArray(proc);\n const command = isArray ? (proc as string[])[0] : commandText;\n const args = isArray ? (proc as string[]).slice(1) : [];\n\n // Ensure local binaries (node_modules/.bin) are resolvable\n const cwdBin = join(process.cwd(), 'node_modules', '.bin');\n const PATH_KEY =\n Object.keys(process.env).find((key) => key.toLowerCase() === 'path') ??\n 'PATH';\n\n const extendedPath = [cwdBin, process.env[PATH_KEY] ?? '']\n .filter(Boolean)\n .join(delimiter);\n\n const childEnv = {\n ...process.env,\n [PATH_KEY]: extendedPath,\n } as NodeJS.ProcessEnv;\n\n const isWin = process.platform === 'win32';\n const spawnFunc = isWin ? spawnWin32 : spawnPosix;\n\n // Spawn options\n const spawnOptions = {\n cwd: process.cwd(),\n stdio: 'inherit' as const,\n env: childEnv,\n shell: false,\n };\n\n // Spawn the child process\n const child = isArray\n ? // If provided as a single string element that includes spaces, treat it like a shell command\n args.length === 0 && /\\s/.test(command)\n ? isWin\n ? spawnFunc(\n process.env.ComSpec ?? 'cmd.exe',\n ['/d', '/s', '/c', command],\n spawnOptions\n )\n : spawnFunc(\n process.env.SHELL ?? '/bin/sh',\n ['-c', command],\n spawnOptions\n )\n : spawnFunc(command, args, spawnOptions)\n : isWin\n ? spawnFunc(\n process.env.ComSpec ?? 'cmd.exe',\n ['/d', '/s', '/c', commandText],\n spawnOptions\n )\n : spawnFunc(\n process.env.SHELL ?? '/bin/sh',\n ['-c', commandText],\n spawnOptions\n );\n\n const result = new Promise<void>((resolve, reject) => {\n child.on('error', (err) => {\n try {\n console.error(\n `[runParallel] Failed to start: ${err?.message ?? String(err)}`\n );\n } catch {}\n cleanupHandlers();\n reject(err);\n });\n\n child.on('exit', (code, signal) => {\n cleanupHandlers();\n\n // Treat common termination signals as graceful exits, not failures\n const gracefulSignals = ['SIGINT', 'SIGTERM', 'SIGQUIT', 'SIGHUP'];\n if (code === 0 || (signal && gracefulSignals.includes(signal))) {\n resolve();\n } else {\n reject(\n Object.assign(new Error('Parallel process failed'), { code, signal })\n );\n }\n });\n });\n\n const cleanup = () => {\n try {\n child.kill('SIGTERM');\n } catch {\n // Best effort\n }\n };\n\n const signalHandlers: { event: string; handler: (...args: any[]) => void }[] =\n [\n { event: 'SIGINT', handler: cleanup },\n { event: 'SIGTERM', handler: cleanup },\n { event: 'SIGQUIT', handler: cleanup },\n { event: 'SIGHUP', handler: cleanup },\n ];\n\n // Register signal handlers\n signalHandlers.forEach(({ event, handler }) => {\n process.on(event as any, handler as any);\n });\n\n const cleanupHandlers = () => {\n signalHandlers.forEach(({ event, handler }) => {\n process.off(event as any, handler as any);\n });\n };\n\n const kill = () => {\n try {\n child.kill('SIGTERM');\n } catch {\n // Best effort\n }\n };\n\n return { kill, result, commandText };\n};\n"],"mappings":";;;;;;;;;;AAcA,MAAa,eAAe,SAA6C;AACvE,KAAI,CAAC,QAAS,MAAM,QAAQ,KAAK,IAAI,KAAK,WAAW,EACnD,OAAM,IAAI,MAAM,kBAAkB;CAEpC,MAAM,cAAc,MAAM,QAAQ,KAAK,GAAG,KAAK,KAAK,IAAI,GAAG;CAE3D,MAAM,UAAU,MAAM,QAAQ,KAAK;CACnC,MAAM,UAAU,UAAW,KAAkB,KAAK;CAClD,MAAM,OAAO,UAAW,KAAkB,MAAM,EAAE,GAAG,EAAE;CAGvD,MAAM,6BAAc,QAAQ,KAAK,EAAE,gBAAgB,OAAO;CAC1D,MAAM,WACJ,OAAO,KAAK,QAAQ,IAAI,CAAC,MAAM,QAAQ,IAAI,aAAa,KAAK,OAAO,IACpE;CAEF,MAAM,eAAe,CAAC,QAAQ,QAAQ,IAAI,aAAa,GAAG,CACvD,OAAO,QAAQ,CACf,KAAKA,oBAAU;CAElB,MAAM,WAAW;EACf,GAAG,QAAQ;GACV,WAAW;EACb;CAED,MAAM,QAAQ,QAAQ,aAAa;CACnC,MAAM,YAAY,QAAQC,kDAAaC;CAGvC,MAAM,eAAe;EACnB,KAAK,QAAQ,KAAK;EAClB,OAAO;EACP,KAAK;EACL,OAAO;EACR;CAGD,MAAM,QAAQ,UAEV,KAAK,WAAW,KAAK,KAAK,KAAK,QAAQ,GACrC,QACE,UACE,QAAQ,IAAI,WAAW,WACvB;EAAC;EAAM;EAAM;EAAM;EAAQ,EAC3B,aACD,GACD,UACE,QAAQ,IAAI,SAAS,WACrB,CAAC,MAAM,QAAQ,EACf,aACD,GACH,UAAU,SAAS,MAAM,aAAa,GACxC,QACE,UACE,QAAQ,IAAI,WAAW,WACvB;EAAC;EAAM;EAAM;EAAM;EAAY,EAC/B,aACD,GACD,UACE,QAAQ,IAAI,SAAS,WACrB,CAAC,MAAM,YAAY,EACnB,aACD;CAEP,MAAM,SAAS,IAAI,SAAe,SAAS,WAAW;AACpD,QAAM,GAAG,UAAU,QAAQ;AACzB,OAAI;AACF,YAAQ,MACN,kCAAkC,KAAK,WAAW,OAAO,IAAI,GAC9D;WACK;AACR,oBAAiB;AACjB,UAAO,IAAI;IACX;AAEF,QAAM,GAAG,SAAS,MAAM,WAAW;AACjC,oBAAiB;AAIjB,OAAI,SAAS,KAAM,UADK;IAAC;IAAU;IAAW;IAAW;IAAS,CACrB,SAAS,OAAO,CAC3D,UAAS;OAET,QACE,OAAO,uBAAO,IAAI,MAAM,0BAA0B,EAAE;IAAE;IAAM;IAAQ,CAAC,CACtE;IAEH;GACF;CAEF,MAAM,gBAAgB;AACpB,MAAI;AACF,SAAM,KAAK,UAAU;UACf;;CAKV,MAAMC,iBACJ;EACE;GAAE,OAAO;GAAU,SAAS;GAAS;EACrC;GAAE,OAAO;GAAW,SAAS;GAAS;EACtC;GAAE,OAAO;GAAW,SAAS;GAAS;EACtC;GAAE,OAAO;GAAU,SAAS;GAAS;EACtC;AAGH,gBAAe,SAAS,EAAE,OAAO,cAAc;AAC7C,UAAQ,GAAG,OAAc,QAAe;GACxC;CAEF,MAAM,wBAAwB;AAC5B,iBAAe,SAAS,EAAE,OAAO,cAAc;AAC7C,WAAQ,IAAI,OAAc,QAAe;IACzC;;CAGJ,MAAM,aAAa;AACjB,MAAI;AACF,SAAM,KAAK,UAAU;UACf;;AAKV,QAAO;EAAE;EAAM;EAAQ;EAAa"}
@@ -1,6 +1,5 @@
1
1
  const require_rolldown_runtime = require('../../_virtual/rolldown_runtime.cjs');
2
2
  let node_stream = require("node:stream");
3
- node_stream = require_rolldown_runtime.__toESM(node_stream);
4
3
 
5
4
  //#region src/utils/runParallel/runTask.ts
6
5
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"runTask.cjs","names":["signals: Record<string, number>","Writable","results: TaskResult[]","queue: TaskQueueItem[]","promises: TaskPromise[]","error: Error | null"],"sources":["../../../../src/utils/runParallel/runTask.ts"],"sourcesContent":["//------------------------------------------------------------------------------\n// Requirements\n//------------------------------------------------------------------------------\n\nimport { Writable } from 'node:stream';\n\n//------------------------------------------------------------------------------\n// Helpers\n//------------------------------------------------------------------------------\n\n/**\n * Remove the given value from the array.\n */\nconst remove = <T>(array: T[], x: T): void => {\n const index = array.indexOf(x);\n if (index !== -1) {\n array.splice(index, 1);\n }\n};\n\nconst signals: Record<string, number> = {\n // Signal name mappings to their respective standard numeric codes.\n // See: https://man7.org/linux/man-pages/man7/signal.7.html\n\n SIGABRT: 6, // Abort signal from abort(3)\n SIGALRM: 14, // Timer signal from alarm(2)\n SIGBUS: 10, // Bus error (bad memory access)\n SIGCHLD: 20, // Child stopped or terminated\n SIGCONT: 19, // Continue if stopped\n SIGFPE: 8, // Floating point exception\n SIGHUP: 1, // Hangup detected on controlling terminal or death of controlling process\n SIGILL: 4, // Illegal Instruction\n SIGINT: 2, // Interrupt from keyboard (Ctrl+C)\n SIGKILL: 9, // Kill signal (cannot be caught or ignored)\n SIGPIPE: 13, // Broken pipe: write to pipe with no readers\n SIGQUIT: 3, // Quit from keyboard (Ctrl+\\)\n SIGSEGV: 11, // Invalid memory reference (segmentation fault)\n SIGSTOP: 17, // Stop process (cannot be caught or ignored)\n SIGTERM: 15, // Termination signal\n SIGTRAP: 5, // Trace/breakpoint trap\n SIGTSTP: 18, // Stop typed at tty (Ctrl+Z)\n SIGTTIN: 21, // tty input for background process\n SIGTTOU: 22, // tty output for background process\n SIGUSR1: 30, // User-defined signal 1\n SIGUSR2: 31, // User-defined signal 2\n};\n\n/**\n * Converts a signal name to a number.\n */\nconst convert = (signal: string): number => {\n return signals[signal] || 0;\n};\n\n/**\n * Simple in-memory writable stream\n */\nclass MemoryStream extends Writable {\n private chunks: Buffer[] = [];\n\n _write(\n chunk: Buffer,\n _encoding: string,\n callback: (error?: Error | null) => void\n ): void {\n this.chunks.push(chunk);\n callback();\n }\n\n toString(): string {\n return Buffer.concat(this.chunks).toString('utf8');\n }\n}\n\n//------------------------------------------------------------------------------\n// Types\n//------------------------------------------------------------------------------\n\ninterface TaskResult {\n name: string;\n code: number | null;\n signal?: string | null;\n}\n\ninterface TaskQueueItem {\n name: string;\n index: number;\n}\n\ninterface RunTaskOptions {\n stdout: NodeJS.WritableStream;\n stderr?: NodeJS.WritableStream;\n aggregateOutput?: boolean;\n continueOnError?: boolean;\n race?: boolean;\n maxParallel?: number;\n}\n\ninterface TaskPromise extends Promise<TaskResult> {\n abort?: () => void;\n}\n\n//------------------------------------------------------------------------------\n// Public Interface\n//------------------------------------------------------------------------------\n\n/**\n * Run npm-scripts of given names in parallel.\n *\n * If a npm-script exited with a non-zero code, this aborts other all npm-scripts.\n *\n * Note: This is a simplified version for our use case.\n * The full implementation would require the actual runTask function from npm-run-all.\n */\nexport const runTasks = (\n tasks: string[],\n options: RunTaskOptions\n): Promise<TaskResult[]> => {\n return new Promise((resolve, reject) => {\n if (tasks.length === 0) {\n resolve([]);\n return;\n }\n\n const results: TaskResult[] = tasks.map((task) => ({\n name: task,\n code: undefined as any,\n }));\n const queue: TaskQueueItem[] = tasks.map((task, index) => ({\n name: task,\n index,\n }));\n const promises: TaskPromise[] = [];\n let error: Error | null = null;\n let aborted = false;\n\n /**\n * Done.\n */\n const done = (): void => {\n if (error == null) {\n resolve(results);\n } else {\n reject(error);\n }\n };\n\n /**\n * Aborts all tasks.\n */\n const abort = (): void => {\n if (aborted) {\n return;\n }\n aborted = true;\n\n if (promises.length === 0) {\n done();\n } else {\n for (const p of promises) {\n p.abort?.();\n }\n Promise.all(promises).then(done, reject);\n }\n };\n\n /**\n * Runs a next task.\n */\n const next = (): void => {\n if (aborted) {\n return;\n }\n if (queue.length === 0) {\n if (promises.length === 0) {\n done();\n }\n return;\n }\n\n const originalOutputStream = options.stdout;\n const optionsClone = { ...options };\n const writer = new MemoryStream();\n\n if (options.aggregateOutput) {\n optionsClone.stdout = writer as any;\n }\n\n const task = queue.shift()!;\n\n // Note: This requires the actual runTask implementation from npm-run-all\n // For now, this is a placeholder that would need to be implemented\n const promise = Promise.resolve({\n name: task.name,\n code: 0,\n signal: null,\n }) as TaskPromise;\n\n promises.push(promise);\n promise.then(\n (result) => {\n remove(promises, promise);\n if (aborted) {\n return;\n }\n\n if (options.aggregateOutput) {\n originalOutputStream.write(writer.toString());\n }\n\n // Check if the task failed as a result of a signal, and\n // amend the exit code as a result.\n if (\n result.code === null &&\n result.signal !== null &&\n result.signal !== undefined\n ) {\n // An exit caused by a signal must return a status code\n // of 128 plus the value of the signal code.\n // Ref: https://nodejs.org/api/process.html#process_exit_codes\n result.code = 128 + convert(result.signal);\n }\n\n // Save the result.\n results[task.index].code = result.code;\n\n // Aborts all tasks if it's an error.\n if (result.code) {\n error = new Error(\n `Task ${result.name} failed with code ${result.code}`\n );\n if (!options.continueOnError) {\n abort();\n return;\n }\n }\n\n // Aborts all tasks if options.race is true.\n if (options.race && !result.code) {\n abort();\n return;\n }\n\n // Call the next task.\n next();\n },\n (thisError: Error) => {\n remove(promises, promise);\n if (!options.continueOnError || options.race) {\n error = thisError;\n abort();\n return;\n }\n next();\n }\n );\n };\n\n const max = options.maxParallel;\n const end =\n typeof max === 'number' && max > 0\n ? Math.min(tasks.length, max)\n : tasks.length;\n for (let i = 0; i < end; ++i) {\n next();\n }\n });\n};\n"],"mappings":";;;;;;;;AAaA,MAAM,UAAa,OAAY,MAAe;CAC5C,MAAM,QAAQ,MAAM,QAAQ,EAAE;AAC9B,KAAI,UAAU,GACZ,OAAM,OAAO,OAAO,EAAE;;AAI1B,MAAMA,UAAkC;CAItC,SAAS;CACT,SAAS;CACT,QAAQ;CACR,SAAS;CACT,SAAS;CACT,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACV;;;;AAKD,MAAM,WAAW,WAA2B;AAC1C,QAAO,QAAQ,WAAW;;;;;AAM5B,IAAM,eAAN,cAA2BC,qBAAS;CAClC,AAAQ,SAAmB,EAAE;CAE7B,OACE,OACA,WACA,UACM;AACN,OAAK,OAAO,KAAK,MAAM;AACvB,YAAU;;CAGZ,WAAmB;AACjB,SAAO,OAAO,OAAO,KAAK,OAAO,CAAC,SAAS,OAAO;;;;;;;;;;;AA4CtD,MAAa,YACX,OACA,YAC0B;AAC1B,QAAO,IAAI,SAAS,SAAS,WAAW;AACtC,MAAI,MAAM,WAAW,GAAG;AACtB,WAAQ,EAAE,CAAC;AACX;;EAGF,MAAMC,UAAwB,MAAM,KAAK,UAAU;GACjD,MAAM;GACN,MAAM;GACP,EAAE;EACH,MAAMC,QAAyB,MAAM,KAAK,MAAM,WAAW;GACzD,MAAM;GACN;GACD,EAAE;EACH,MAAMC,WAA0B,EAAE;EAClC,IAAIC,QAAsB;EAC1B,IAAI,UAAU;;;;EAKd,MAAM,aAAmB;AACvB,OAAI,SAAS,KACX,SAAQ,QAAQ;OAEhB,QAAO,MAAM;;;;;EAOjB,MAAM,cAAoB;AACxB,OAAI,QACF;AAEF,aAAU;AAEV,OAAI,SAAS,WAAW,EACtB,OAAM;QACD;AACL,SAAK,MAAM,KAAK,SACd,GAAE,SAAS;AAEb,YAAQ,IAAI,SAAS,CAAC,KAAK,MAAM,OAAO;;;;;;EAO5C,MAAM,aAAmB;AACvB,OAAI,QACF;AAEF,OAAI,MAAM,WAAW,GAAG;AACtB,QAAI,SAAS,WAAW,EACtB,OAAM;AAER;;GAGF,MAAM,uBAAuB,QAAQ;GACrC,MAAM,eAAe,EAAE,GAAG,SAAS;GACnC,MAAM,SAAS,IAAI,cAAc;AAEjC,OAAI,QAAQ,gBACV,cAAa,SAAS;GAGxB,MAAM,OAAO,MAAM,OAAO;GAI1B,MAAM,UAAU,QAAQ,QAAQ;IAC9B,MAAM,KAAK;IACX,MAAM;IACN,QAAQ;IACT,CAAC;AAEF,YAAS,KAAK,QAAQ;AACtB,WAAQ,MACL,WAAW;AACV,WAAO,UAAU,QAAQ;AACzB,QAAI,QACF;AAGF,QAAI,QAAQ,gBACV,sBAAqB,MAAM,OAAO,UAAU,CAAC;AAK/C,QACE,OAAO,SAAS,QAChB,OAAO,WAAW,QAClB,OAAO,WAAW,OAKlB,QAAO,OAAO,MAAM,QAAQ,OAAO,OAAO;AAI5C,YAAQ,KAAK,OAAO,OAAO,OAAO;AAGlC,QAAI,OAAO,MAAM;AACf,6BAAQ,IAAI,MACV,QAAQ,OAAO,KAAK,oBAAoB,OAAO,OAChD;AACD,SAAI,CAAC,QAAQ,iBAAiB;AAC5B,aAAO;AACP;;;AAKJ,QAAI,QAAQ,QAAQ,CAAC,OAAO,MAAM;AAChC,YAAO;AACP;;AAIF,UAAM;OAEP,cAAqB;AACpB,WAAO,UAAU,QAAQ;AACzB,QAAI,CAAC,QAAQ,mBAAmB,QAAQ,MAAM;AAC5C,aAAQ;AACR,YAAO;AACP;;AAEF,UAAM;KAET;;EAGH,MAAM,MAAM,QAAQ;EACpB,MAAM,MACJ,OAAO,QAAQ,YAAY,MAAM,IAC7B,KAAK,IAAI,MAAM,QAAQ,IAAI,GAC3B,MAAM;AACZ,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,EAAE,EACzB,OAAM;GAER"}
1
+ {"version":3,"file":"runTask.cjs","names":["signals: Record<string, number>","Writable","results: TaskResult[]","queue: TaskQueueItem[]","promises: TaskPromise[]","error: Error | null"],"sources":["../../../../src/utils/runParallel/runTask.ts"],"sourcesContent":["//------------------------------------------------------------------------------\n// Requirements\n//------------------------------------------------------------------------------\n\nimport { Writable } from 'node:stream';\n\n//------------------------------------------------------------------------------\n// Helpers\n//------------------------------------------------------------------------------\n\n/**\n * Remove the given value from the array.\n */\nconst remove = <T>(array: T[], x: T): void => {\n const index = array.indexOf(x);\n if (index !== -1) {\n array.splice(index, 1);\n }\n};\n\nconst signals: Record<string, number> = {\n // Signal name mappings to their respective standard numeric codes.\n // See: https://man7.org/linux/man-pages/man7/signal.7.html\n\n SIGABRT: 6, // Abort signal from abort(3)\n SIGALRM: 14, // Timer signal from alarm(2)\n SIGBUS: 10, // Bus error (bad memory access)\n SIGCHLD: 20, // Child stopped or terminated\n SIGCONT: 19, // Continue if stopped\n SIGFPE: 8, // Floating point exception\n SIGHUP: 1, // Hangup detected on controlling terminal or death of controlling process\n SIGILL: 4, // Illegal Instruction\n SIGINT: 2, // Interrupt from keyboard (Ctrl+C)\n SIGKILL: 9, // Kill signal (cannot be caught or ignored)\n SIGPIPE: 13, // Broken pipe: write to pipe with no readers\n SIGQUIT: 3, // Quit from keyboard (Ctrl+\\)\n SIGSEGV: 11, // Invalid memory reference (segmentation fault)\n SIGSTOP: 17, // Stop process (cannot be caught or ignored)\n SIGTERM: 15, // Termination signal\n SIGTRAP: 5, // Trace/breakpoint trap\n SIGTSTP: 18, // Stop typed at tty (Ctrl+Z)\n SIGTTIN: 21, // tty input for background process\n SIGTTOU: 22, // tty output for background process\n SIGUSR1: 30, // User-defined signal 1\n SIGUSR2: 31, // User-defined signal 2\n};\n\n/**\n * Converts a signal name to a number.\n */\nconst convert = (signal: string): number => {\n return signals[signal] || 0;\n};\n\n/**\n * Simple in-memory writable stream\n */\nclass MemoryStream extends Writable {\n private chunks: Buffer[] = [];\n\n _write(\n chunk: Buffer,\n _encoding: string,\n callback: (error?: Error | null) => void\n ): void {\n this.chunks.push(chunk);\n callback();\n }\n\n toString(): string {\n return Buffer.concat(this.chunks).toString('utf8');\n }\n}\n\n//------------------------------------------------------------------------------\n// Types\n//------------------------------------------------------------------------------\n\ninterface TaskResult {\n name: string;\n code: number | null;\n signal?: string | null;\n}\n\ninterface TaskQueueItem {\n name: string;\n index: number;\n}\n\ninterface RunTaskOptions {\n stdout: NodeJS.WritableStream;\n stderr?: NodeJS.WritableStream;\n aggregateOutput?: boolean;\n continueOnError?: boolean;\n race?: boolean;\n maxParallel?: number;\n}\n\ninterface TaskPromise extends Promise<TaskResult> {\n abort?: () => void;\n}\n\n//------------------------------------------------------------------------------\n// Public Interface\n//------------------------------------------------------------------------------\n\n/**\n * Run npm-scripts of given names in parallel.\n *\n * If a npm-script exited with a non-zero code, this aborts other all npm-scripts.\n *\n * Note: This is a simplified version for our use case.\n * The full implementation would require the actual runTask function from npm-run-all.\n */\nexport const runTasks = (\n tasks: string[],\n options: RunTaskOptions\n): Promise<TaskResult[]> => {\n return new Promise((resolve, reject) => {\n if (tasks.length === 0) {\n resolve([]);\n return;\n }\n\n const results: TaskResult[] = tasks.map((task) => ({\n name: task,\n code: undefined as any,\n }));\n const queue: TaskQueueItem[] = tasks.map((task, index) => ({\n name: task,\n index,\n }));\n const promises: TaskPromise[] = [];\n let error: Error | null = null;\n let aborted = false;\n\n /**\n * Done.\n */\n const done = (): void => {\n if (error == null) {\n resolve(results);\n } else {\n reject(error);\n }\n };\n\n /**\n * Aborts all tasks.\n */\n const abort = (): void => {\n if (aborted) {\n return;\n }\n aborted = true;\n\n if (promises.length === 0) {\n done();\n } else {\n for (const p of promises) {\n p.abort?.();\n }\n Promise.all(promises).then(done, reject);\n }\n };\n\n /**\n * Runs a next task.\n */\n const next = (): void => {\n if (aborted) {\n return;\n }\n if (queue.length === 0) {\n if (promises.length === 0) {\n done();\n }\n return;\n }\n\n const originalOutputStream = options.stdout;\n const optionsClone = { ...options };\n const writer = new MemoryStream();\n\n if (options.aggregateOutput) {\n optionsClone.stdout = writer as any;\n }\n\n const task = queue.shift()!;\n\n // Note: This requires the actual runTask implementation from npm-run-all\n // For now, this is a placeholder that would need to be implemented\n const promise = Promise.resolve({\n name: task.name,\n code: 0,\n signal: null,\n }) as TaskPromise;\n\n promises.push(promise);\n promise.then(\n (result) => {\n remove(promises, promise);\n if (aborted) {\n return;\n }\n\n if (options.aggregateOutput) {\n originalOutputStream.write(writer.toString());\n }\n\n // Check if the task failed as a result of a signal, and\n // amend the exit code as a result.\n if (\n result.code === null &&\n result.signal !== null &&\n result.signal !== undefined\n ) {\n // An exit caused by a signal must return a status code\n // of 128 plus the value of the signal code.\n // Ref: https://nodejs.org/api/process.html#process_exit_codes\n result.code = 128 + convert(result.signal);\n }\n\n // Save the result.\n results[task.index].code = result.code;\n\n // Aborts all tasks if it's an error.\n if (result.code) {\n error = new Error(\n `Task ${result.name} failed with code ${result.code}`\n );\n if (!options.continueOnError) {\n abort();\n return;\n }\n }\n\n // Aborts all tasks if options.race is true.\n if (options.race && !result.code) {\n abort();\n return;\n }\n\n // Call the next task.\n next();\n },\n (thisError: Error) => {\n remove(promises, promise);\n if (!options.continueOnError || options.race) {\n error = thisError;\n abort();\n return;\n }\n next();\n }\n );\n };\n\n const max = options.maxParallel;\n const end =\n typeof max === 'number' && max > 0\n ? Math.min(tasks.length, max)\n : tasks.length;\n for (let i = 0; i < end; ++i) {\n next();\n }\n });\n};\n"],"mappings":";;;;;;;AAaA,MAAM,UAAa,OAAY,MAAe;CAC5C,MAAM,QAAQ,MAAM,QAAQ,EAAE;AAC9B,KAAI,UAAU,GACZ,OAAM,OAAO,OAAO,EAAE;;AAI1B,MAAMA,UAAkC;CAItC,SAAS;CACT,SAAS;CACT,QAAQ;CACR,SAAS;CACT,SAAS;CACT,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACT,SAAS;CACV;;;;AAKD,MAAM,WAAW,WAA2B;AAC1C,QAAO,QAAQ,WAAW;;;;;AAM5B,IAAM,eAAN,cAA2BC,qBAAS;CAClC,AAAQ,SAAmB,EAAE;CAE7B,OACE,OACA,WACA,UACM;AACN,OAAK,OAAO,KAAK,MAAM;AACvB,YAAU;;CAGZ,WAAmB;AACjB,SAAO,OAAO,OAAO,KAAK,OAAO,CAAC,SAAS,OAAO;;;;;;;;;;;AA4CtD,MAAa,YACX,OACA,YAC0B;AAC1B,QAAO,IAAI,SAAS,SAAS,WAAW;AACtC,MAAI,MAAM,WAAW,GAAG;AACtB,WAAQ,EAAE,CAAC;AACX;;EAGF,MAAMC,UAAwB,MAAM,KAAK,UAAU;GACjD,MAAM;GACN,MAAM;GACP,EAAE;EACH,MAAMC,QAAyB,MAAM,KAAK,MAAM,WAAW;GACzD,MAAM;GACN;GACD,EAAE;EACH,MAAMC,WAA0B,EAAE;EAClC,IAAIC,QAAsB;EAC1B,IAAI,UAAU;;;;EAKd,MAAM,aAAmB;AACvB,OAAI,SAAS,KACX,SAAQ,QAAQ;OAEhB,QAAO,MAAM;;;;;EAOjB,MAAM,cAAoB;AACxB,OAAI,QACF;AAEF,aAAU;AAEV,OAAI,SAAS,WAAW,EACtB,OAAM;QACD;AACL,SAAK,MAAM,KAAK,SACd,GAAE,SAAS;AAEb,YAAQ,IAAI,SAAS,CAAC,KAAK,MAAM,OAAO;;;;;;EAO5C,MAAM,aAAmB;AACvB,OAAI,QACF;AAEF,OAAI,MAAM,WAAW,GAAG;AACtB,QAAI,SAAS,WAAW,EACtB,OAAM;AAER;;GAGF,MAAM,uBAAuB,QAAQ;GACrC,MAAM,eAAe,EAAE,GAAG,SAAS;GACnC,MAAM,SAAS,IAAI,cAAc;AAEjC,OAAI,QAAQ,gBACV,cAAa,SAAS;GAGxB,MAAM,OAAO,MAAM,OAAO;GAI1B,MAAM,UAAU,QAAQ,QAAQ;IAC9B,MAAM,KAAK;IACX,MAAM;IACN,QAAQ;IACT,CAAC;AAEF,YAAS,KAAK,QAAQ;AACtB,WAAQ,MACL,WAAW;AACV,WAAO,UAAU,QAAQ;AACzB,QAAI,QACF;AAGF,QAAI,QAAQ,gBACV,sBAAqB,MAAM,OAAO,UAAU,CAAC;AAK/C,QACE,OAAO,SAAS,QAChB,OAAO,WAAW,QAClB,OAAO,WAAW,OAKlB,QAAO,OAAO,MAAM,QAAQ,OAAO,OAAO;AAI5C,YAAQ,KAAK,OAAO,OAAO,OAAO;AAGlC,QAAI,OAAO,MAAM;AACf,6BAAQ,IAAI,MACV,QAAQ,OAAO,KAAK,oBAAoB,OAAO,OAChD;AACD,SAAI,CAAC,QAAQ,iBAAiB;AAC5B,aAAO;AACP;;;AAKJ,QAAI,QAAQ,QAAQ,CAAC,OAAO,MAAM;AAChC,YAAO;AACP;;AAIF,UAAM;OAEP,cAAqB;AACpB,WAAO,UAAU,QAAQ;AACzB,QAAI,CAAC,QAAQ,mBAAmB,QAAQ,MAAM;AAC5C,aAAQ;AACR,YAAO;AACP;;AAEF,UAAM;KAET;;EAGH,MAAM,MAAM,QAAQ;EACpB,MAAM,MACJ,OAAO,QAAQ,YAAY,MAAM,IAC7B,KAAK,IAAI,MAAM,QAAQ,IAAI,GAC3B,MAAM;AACZ,OAAK,IAAI,IAAI,GAAG,IAAI,KAAK,EAAE,EACzB,OAAM;GAER"}
@@ -1,7 +1,6 @@
1
1
  const require_rolldown_runtime = require('../../_virtual/rolldown_runtime.cjs');
2
2
  const require_utils_runParallel_pidTree = require('./pidTree.cjs');
3
3
  let node_child_process = require("node:child_process");
4
- node_child_process = require_rolldown_runtime.__toESM(node_child_process);
5
4
 
6
5
  //#region src/utils/runParallel/spawnPosix.ts
7
6
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"spawnPosix.cjs","names":[],"sources":["../../../../src/utils/runParallel/spawnPosix.ts"],"sourcesContent":["//------------------------------------------------------------------------------\n// Requirements\n//------------------------------------------------------------------------------\n\nimport type { SpawnOptions } from 'node:child_process';\nimport { type ChildProcess, spawn as nodeSpawn } from 'node:child_process';\nimport { list } from './pidTree';\n\n//------------------------------------------------------------------------------\n// Helpers\n//------------------------------------------------------------------------------\n\n/**\n * Kills the new process and its sub processes.\n */\nconst createKillHandler = (\n child: ChildProcess\n): ((signal?: NodeJS.Signals | number) => boolean) => {\n return (signal?: NodeJS.Signals | number): boolean => {\n if (!child.pid) return false;\n\n // Try using list if available\n try {\n list(child.pid, { root: true }, (err: Error | null, pids?: number[]) => {\n if (err) {\n // Fallback to process group kill\n try {\n process.kill(-child.pid!, signal ?? 'SIGTERM');\n } catch {\n // ignore\n }\n return;\n }\n\n if (!pids) {\n // Fallback to process group kill if no pids returned\n try {\n process.kill(-child.pid!, signal ?? 'SIGTERM');\n } catch {\n // ignore\n }\n return;\n }\n\n for (const pid of pids) {\n try {\n process.kill(pid, signal ?? 'SIGTERM');\n } catch (_err) {\n // ignore.\n }\n }\n });\n } catch {\n // pidtree not available, use process group kill\n try {\n process.kill(-child.pid, signal ?? 'SIGTERM');\n } catch {\n // ignore\n }\n }\n\n return true;\n };\n};\n\n//------------------------------------------------------------------------------\n// Public Interface\n//------------------------------------------------------------------------------\n\n/**\n * Launches a new process with the given command.\n * This is almost same as `child_process.spawn`.\n *\n * This returns a `ChildProcess` instance.\n * `kill` method of the instance kills the new process and its sub processes.\n *\n * @param command - The command to run.\n * @param args - List of string arguments.\n * @param options - Options.\n * @returns A ChildProcess instance of new process.\n * @private\n */\nexport const spawnPosix = (\n command: string,\n args: string[],\n options: SpawnOptions\n): ChildProcess => {\n const child = nodeSpawn(command, args, options);\n child.kill = createKillHandler(child);\n\n return child;\n};\n"],"mappings":";;;;;;;;;AAeA,MAAM,qBACJ,UACoD;AACpD,SAAQ,WAA8C;AACpD,MAAI,CAAC,MAAM,IAAK,QAAO;AAGvB,MAAI;AACF,0CAAK,MAAM,KAAK,EAAE,MAAM,MAAM,GAAG,KAAmB,SAAoB;AACtE,QAAI,KAAK;AAEP,SAAI;AACF,cAAQ,KAAK,CAAC,MAAM,KAAM,UAAU,UAAU;aACxC;AAGR;;AAGF,QAAI,CAAC,MAAM;AAET,SAAI;AACF,cAAQ,KAAK,CAAC,MAAM,KAAM,UAAU,UAAU;aACxC;AAGR;;AAGF,SAAK,MAAM,OAAO,KAChB,KAAI;AACF,aAAQ,KAAK,KAAK,UAAU,UAAU;aAC/B,MAAM;KAIjB;UACI;AAEN,OAAI;AACF,YAAQ,KAAK,CAAC,MAAM,KAAK,UAAU,UAAU;WACvC;;AAKV,SAAO;;;;;;;;;;;;;;;;AAqBX,MAAa,cACX,SACA,MACA,YACiB;CACjB,MAAM,sCAAkB,SAAS,MAAM,QAAQ;AAC/C,OAAM,OAAO,kBAAkB,MAAM;AAErC,QAAO"}
1
+ {"version":3,"file":"spawnPosix.cjs","names":[],"sources":["../../../../src/utils/runParallel/spawnPosix.ts"],"sourcesContent":["//------------------------------------------------------------------------------\n// Requirements\n//------------------------------------------------------------------------------\n\nimport type { SpawnOptions } from 'node:child_process';\nimport { type ChildProcess, spawn as nodeSpawn } from 'node:child_process';\nimport { list } from './pidTree';\n\n//------------------------------------------------------------------------------\n// Helpers\n//------------------------------------------------------------------------------\n\n/**\n * Kills the new process and its sub processes.\n */\nconst createKillHandler = (\n child: ChildProcess\n): ((signal?: NodeJS.Signals | number) => boolean) => {\n return (signal?: NodeJS.Signals | number): boolean => {\n if (!child.pid) return false;\n\n // Try using list if available\n try {\n list(child.pid, { root: true }, (err: Error | null, pids?: number[]) => {\n if (err) {\n // Fallback to process group kill\n try {\n process.kill(-child.pid!, signal ?? 'SIGTERM');\n } catch {\n // ignore\n }\n return;\n }\n\n if (!pids) {\n // Fallback to process group kill if no pids returned\n try {\n process.kill(-child.pid!, signal ?? 'SIGTERM');\n } catch {\n // ignore\n }\n return;\n }\n\n for (const pid of pids) {\n try {\n process.kill(pid, signal ?? 'SIGTERM');\n } catch (_err) {\n // ignore.\n }\n }\n });\n } catch {\n // pidtree not available, use process group kill\n try {\n process.kill(-child.pid, signal ?? 'SIGTERM');\n } catch {\n // ignore\n }\n }\n\n return true;\n };\n};\n\n//------------------------------------------------------------------------------\n// Public Interface\n//------------------------------------------------------------------------------\n\n/**\n * Launches a new process with the given command.\n * This is almost same as `child_process.spawn`.\n *\n * This returns a `ChildProcess` instance.\n * `kill` method of the instance kills the new process and its sub processes.\n *\n * @param command - The command to run.\n * @param args - List of string arguments.\n * @param options - Options.\n * @returns A ChildProcess instance of new process.\n * @private\n */\nexport const spawnPosix = (\n command: string,\n args: string[],\n options: SpawnOptions\n): ChildProcess => {\n const child = nodeSpawn(command, args, options);\n child.kill = createKillHandler(child);\n\n return child;\n};\n"],"mappings":";;;;;;;;AAeA,MAAM,qBACJ,UACoD;AACpD,SAAQ,WAA8C;AACpD,MAAI,CAAC,MAAM,IAAK,QAAO;AAGvB,MAAI;AACF,0CAAK,MAAM,KAAK,EAAE,MAAM,MAAM,GAAG,KAAmB,SAAoB;AACtE,QAAI,KAAK;AAEP,SAAI;AACF,cAAQ,KAAK,CAAC,MAAM,KAAM,UAAU,UAAU;aACxC;AAGR;;AAGF,QAAI,CAAC,MAAM;AAET,SAAI;AACF,cAAQ,KAAK,CAAC,MAAM,KAAM,UAAU,UAAU;aACxC;AAGR;;AAGF,SAAK,MAAM,OAAO,KAChB,KAAI;AACF,aAAQ,KAAK,KAAK,UAAU,UAAU;aAC/B,MAAM;KAIjB;UACI;AAEN,OAAI;AACF,YAAQ,KAAK,CAAC,MAAM,KAAK,UAAU,UAAU;WACvC;;AAKV,SAAO;;;;;;;;;;;;;;;;AAqBX,MAAa,cACX,SACA,MACA,YACiB;CACjB,MAAM,sCAAkB,SAAS,MAAM,QAAQ;AAC/C,OAAM,OAAO,kBAAkB,MAAM;AAErC,QAAO"}
@@ -1,6 +1,5 @@
1
1
  const require_rolldown_runtime = require('../../_virtual/rolldown_runtime.cjs');
2
2
  let node_child_process = require("node:child_process");
3
- node_child_process = require_rolldown_runtime.__toESM(node_child_process);
4
3
 
5
4
  //#region src/utils/runParallel/spawnWin32.ts
6
5
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"spawnWin32.cjs","names":[],"sources":["../../../../src/utils/runParallel/spawnWin32.ts"],"sourcesContent":["//------------------------------------------------------------------------------\n// Requirements\n//------------------------------------------------------------------------------\n\nimport type { SpawnOptions } from 'node:child_process';\nimport { type ChildProcess, spawn as nodeSpawn } from 'node:child_process';\n\n//------------------------------------------------------------------------------\n// Helpers\n//------------------------------------------------------------------------------\n\n/**\n * Kills the new process and its sub processes forcibly.\n */\nconst createKillHandler = (child: ChildProcess) => {\n return (): boolean => {\n if (!child.pid) return false;\n\n try {\n nodeSpawn('taskkill', ['/F', '/T', '/PID', String(child.pid)], {\n stdio: 'ignore',\n });\n } catch {\n // ignore\n }\n\n return true;\n };\n};\n\n//------------------------------------------------------------------------------\n// Public Interface\n//------------------------------------------------------------------------------\n\n/**\n * Launches a new process with the given command.\n * This is almost same as `child_process.spawn`.\n *\n * This returns a `ChildProcess` instance.\n * `kill` method of the instance kills the new process and its sub processes forcibly.\n *\n * @param command - The command to run.\n * @param args - List of string arguments.\n * @param options - Options.\n * @returns A ChildProcess instance of new process.\n * @private\n */\nexport const spawnWin32 = (\n command: string,\n args: string[],\n options: SpawnOptions\n): ChildProcess => {\n const child = nodeSpawn(command, args, options);\n child.kill = createKillHandler(child);\n\n return child;\n};\n"],"mappings":";;;;;;;;AAcA,MAAM,qBAAqB,UAAwB;AACjD,cAAsB;AACpB,MAAI,CAAC,MAAM,IAAK,QAAO;AAEvB,MAAI;AACF,iCAAU,YAAY;IAAC;IAAM;IAAM;IAAQ,OAAO,MAAM,IAAI;IAAC,EAAE,EAC7D,OAAO,UACR,CAAC;UACI;AAIR,SAAO;;;;;;;;;;;;;;;;AAqBX,MAAa,cACX,SACA,MACA,YACiB;CACjB,MAAM,sCAAkB,SAAS,MAAM,QAAQ;AAC/C,OAAM,OAAO,kBAAkB,MAAM;AAErC,QAAO"}
1
+ {"version":3,"file":"spawnWin32.cjs","names":[],"sources":["../../../../src/utils/runParallel/spawnWin32.ts"],"sourcesContent":["//------------------------------------------------------------------------------\n// Requirements\n//------------------------------------------------------------------------------\n\nimport type { SpawnOptions } from 'node:child_process';\nimport { type ChildProcess, spawn as nodeSpawn } from 'node:child_process';\n\n//------------------------------------------------------------------------------\n// Helpers\n//------------------------------------------------------------------------------\n\n/**\n * Kills the new process and its sub processes forcibly.\n */\nconst createKillHandler = (child: ChildProcess) => {\n return (): boolean => {\n if (!child.pid) return false;\n\n try {\n nodeSpawn('taskkill', ['/F', '/T', '/PID', String(child.pid)], {\n stdio: 'ignore',\n });\n } catch {\n // ignore\n }\n\n return true;\n };\n};\n\n//------------------------------------------------------------------------------\n// Public Interface\n//------------------------------------------------------------------------------\n\n/**\n * Launches a new process with the given command.\n * This is almost same as `child_process.spawn`.\n *\n * This returns a `ChildProcess` instance.\n * `kill` method of the instance kills the new process and its sub processes forcibly.\n *\n * @param command - The command to run.\n * @param args - List of string arguments.\n * @param options - Options.\n * @returns A ChildProcess instance of new process.\n * @private\n */\nexport const spawnWin32 = (\n command: string,\n args: string[],\n options: SpawnOptions\n): ChildProcess => {\n const child = nodeSpawn(command, args, options);\n child.kill = createKillHandler(child);\n\n return child;\n};\n"],"mappings":";;;;;;;AAcA,MAAM,qBAAqB,UAAwB;AACjD,cAAsB;AACpB,MAAI,CAAC,MAAM,IAAK,QAAO;AAEvB,MAAI;AACF,iCAAU,YAAY;IAAC;IAAM;IAAM;IAAQ,OAAO,MAAM,IAAI;IAAC,EAAE,EAC7D,OAAO,UACR,CAAC;UACI;AAIR,SAAO;;;;;;;;;;;;;;;;AAqBX,MAAa,cACX,SACA,MACA,YACiB;CACjB,MAAM,sCAAkB,SAAS,MAAM,QAAQ;AAC/C,OAAM,OAAO,kBAAkB,MAAM;AAErC,QAAO"}
@@ -4,13 +4,11 @@ const require_handleContentDeclarationFileChange = require('./handleContentDecla
4
4
  const require_handleUnlinkedContentDeclarationFile = require('./handleUnlinkedContentDeclarationFile.cjs');
5
5
  const require_prepareIntlayer = require('./prepareIntlayer.cjs');
6
6
  let __intlayer_config = require("@intlayer/config");
7
- __intlayer_config = require_rolldown_runtime.__toESM(__intlayer_config);
8
7
  let node_path = require("node:path");
9
- node_path = require_rolldown_runtime.__toESM(node_path);
10
8
  let chokidar = require("chokidar");
11
- chokidar = require_rolldown_runtime.__toESM(chokidar);
12
9
 
13
10
  //#region src/watcher.ts
11
+ /** @ts-ignore remove error Module '"chokidar"' has no exported member 'ChokidarOptions'. */
14
12
  const recentlyAddedFiles = /* @__PURE__ */ new Set();
15
13
  const watch = (options) => {
16
14
  const configuration = options?.configuration ?? (0, __intlayer_config.getConfiguration)(options?.configOptions);
@@ -1 +1 @@
1
- {"version":3,"file":"watcher.cjs","names":["handleAdditionalContentDeclarationFile","handleContentDeclarationFileChange","handleUnlinkedContentDeclarationFile","prepareIntlayer"],"sources":["../../src/watcher.ts"],"sourcesContent":["import { basename } from 'node:path';\nimport {\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\n/** @ts-ignore remove error Module '\"chokidar\"' has no exported member 'ChokidarOptions'. */\nimport { type ChokidarOptions, watch as chokidarWatch } from 'chokidar';\nimport { handleAdditionalContentDeclarationFile } from './handleAdditionalContentDeclarationFile';\nimport { handleContentDeclarationFileChange } from './handleContentDeclarationFileChange';\nimport { handleUnlinkedContentDeclarationFile } from './handleUnlinkedContentDeclarationFile';\nimport { prepareIntlayer } from './prepareIntlayer';\n\nconst recentlyAddedFiles = new Set<string>();\n\ntype WatchOptions = ChokidarOptions & {\n configuration?: IntlayerConfig;\n configOptions?: GetConfigurationOptions;\n skipPrepare?: boolean;\n};\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const { watch: isWatchMode, watchedFilesPatternWithPath } =\n configuration.content;\n\n /** @ts-ignore remove error Expected 0-1 arguments, but got 2. */\n return chokidarWatch(watchedFilesPatternWithPath, {\n persistent: isWatchMode, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n awaitWriteFinish: {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n ignored: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.intlayer/**',\n ],\n ...options,\n })\n .on('add', async (filePath) => {\n const fileName = basename(filePath);\n recentlyAddedFiles.add(fileName);\n\n await handleAdditionalContentDeclarationFile(filePath, configuration);\n\n setTimeout(() => recentlyAddedFiles.delete(fileName), 1000); // Allow time for unlink to trigger if it's a move\n })\n .on(\n 'change',\n async (filePath) =>\n await handleContentDeclarationFileChange(filePath, configuration)\n )\n .on('unlink', async (filePath) => {\n setTimeout(async () => {\n const fileName = basename(filePath);\n\n if (recentlyAddedFiles.has(fileName)) {\n // The file was moved, so ignore unlink\n return;\n }\n\n await handleUnlinkedContentDeclarationFile(filePath, configuration);\n }, 300); // Allow time for unlink to trigger if it's a move\n })\n .on('error', async (error) => {\n appLogger(`Watcher error: ${error}`, {\n level: 'error',\n });\n\n appLogger('Restarting watcher');\n\n await prepareIntlayer(configuration);\n });\n};\n\nexport const buildAndWatchIntlayer = async (options?: WatchOptions) => {\n const { skipPrepare, ...rest } = options ?? {};\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n\n if (!options?.skipPrepare) {\n await prepareIntlayer(configuration, { forceRun: true });\n }\n\n if (configuration.content.watch || options?.persistent) {\n const appLogger = getAppLogger(configuration);\n\n appLogger('Watching Intlayer content declarations');\n // Start watching (assuming watch is also async)\n watch({ ...rest, configuration });\n }\n};\n"],"mappings":";;;;;;;;;;;;;AAcA,MAAM,qCAAqB,IAAI,KAAa;AAS5C,MAAa,SAAS,YAA2B;CAC/C,MAAM,gBACJ,SAAS,yDAAkC,SAAS,cAAc;CACpE,MAAM,gDAAyB,cAAc;CAE7C,MAAM,EAAE,OAAO,aAAa,gCAC1B,cAAc;;AAGhB,4BAAqB,6BAA6B;EAChD,YAAY;EACZ,eAAe;EACf,kBAAkB;GAChB,oBAAoB;GACpB,cAAc;GACf;EACD,SAAS;GACP;GACA;GACA;GACA;GACD;EACD,GAAG;EACJ,CAAC,CACC,GAAG,OAAO,OAAO,aAAa;EAC7B,MAAM,mCAAoB,SAAS;AACnC,qBAAmB,IAAI,SAAS;AAEhC,QAAMA,sFAAuC,UAAU,cAAc;AAErE,mBAAiB,mBAAmB,OAAO,SAAS,EAAE,IAAK;GAC3D,CACD,GACC,UACA,OAAO,aACL,MAAMC,8EAAmC,UAAU,cAAc,CACpE,CACA,GAAG,UAAU,OAAO,aAAa;AAChC,aAAW,YAAY;GACrB,MAAM,mCAAoB,SAAS;AAEnC,OAAI,mBAAmB,IAAI,SAAS,CAElC;AAGF,SAAMC,kFAAqC,UAAU,cAAc;KAClE,IAAI;GACP,CACD,GAAG,SAAS,OAAO,UAAU;AAC5B,YAAU,kBAAkB,SAAS,EACnC,OAAO,SACR,CAAC;AAEF,YAAU,qBAAqB;AAE/B,QAAMC,wCAAgB,cAAc;GACpC;;AAGN,MAAa,wBAAwB,OAAO,YAA2B;CACrE,MAAM,EAAE,YAAa,GAAG,SAAS,WAAW,EAAE;CAC9C,MAAM,gBACJ,SAAS,yDAAkC,SAAS,cAAc;AAEpE,KAAI,CAAC,SAAS,YACZ,OAAMA,wCAAgB,eAAe,EAAE,UAAU,MAAM,CAAC;AAG1D,KAAI,cAAc,QAAQ,SAAS,SAAS,YAAY;AAGtD,sCAF+B,cAAc,CAEnC,yCAAyC;AAEnD,QAAM;GAAE,GAAG;GAAM;GAAe,CAAC"}
1
+ {"version":3,"file":"watcher.cjs","names":["handleAdditionalContentDeclarationFile","handleContentDeclarationFileChange","handleUnlinkedContentDeclarationFile","prepareIntlayer"],"sources":["../../src/watcher.ts"],"sourcesContent":["import { basename } from 'node:path';\nimport {\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\n/** @ts-ignore remove error Module '\"chokidar\"' has no exported member 'ChokidarOptions'. */\nimport { type ChokidarOptions, watch as chokidarWatch } from 'chokidar';\nimport { handleAdditionalContentDeclarationFile } from './handleAdditionalContentDeclarationFile';\nimport { handleContentDeclarationFileChange } from './handleContentDeclarationFileChange';\nimport { handleUnlinkedContentDeclarationFile } from './handleUnlinkedContentDeclarationFile';\nimport { prepareIntlayer } from './prepareIntlayer';\n\nconst recentlyAddedFiles = new Set<string>();\n\ntype WatchOptions = ChokidarOptions & {\n configuration?: IntlayerConfig;\n configOptions?: GetConfigurationOptions;\n skipPrepare?: boolean;\n};\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const { watch: isWatchMode, watchedFilesPatternWithPath } =\n configuration.content;\n\n /** @ts-ignore remove error Expected 0-1 arguments, but got 2. */\n return chokidarWatch(watchedFilesPatternWithPath, {\n persistent: isWatchMode, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n awaitWriteFinish: {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n ignored: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.intlayer/**',\n ],\n ...options,\n })\n .on('add', async (filePath) => {\n const fileName = basename(filePath);\n recentlyAddedFiles.add(fileName);\n\n await handleAdditionalContentDeclarationFile(filePath, configuration);\n\n setTimeout(() => recentlyAddedFiles.delete(fileName), 1000); // Allow time for unlink to trigger if it's a move\n })\n .on(\n 'change',\n async (filePath) =>\n await handleContentDeclarationFileChange(filePath, configuration)\n )\n .on('unlink', async (filePath) => {\n setTimeout(async () => {\n const fileName = basename(filePath);\n\n if (recentlyAddedFiles.has(fileName)) {\n // The file was moved, so ignore unlink\n return;\n }\n\n await handleUnlinkedContentDeclarationFile(filePath, configuration);\n }, 300); // Allow time for unlink to trigger if it's a move\n })\n .on('error', async (error) => {\n appLogger(`Watcher error: ${error}`, {\n level: 'error',\n });\n\n appLogger('Restarting watcher');\n\n await prepareIntlayer(configuration);\n });\n};\n\nexport const buildAndWatchIntlayer = async (options?: WatchOptions) => {\n const { skipPrepare, ...rest } = options ?? {};\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n\n if (!options?.skipPrepare) {\n await prepareIntlayer(configuration, { forceRun: true });\n }\n\n if (configuration.content.watch || options?.persistent) {\n const appLogger = getAppLogger(configuration);\n\n appLogger('Watching Intlayer content declarations');\n // Start watching (assuming watch is also async)\n watch({ ...rest, configuration });\n }\n};\n"],"mappings":";;;;;;;;;;;AAcA,MAAM,qCAAqB,IAAI,KAAa;AAS5C,MAAa,SAAS,YAA2B;CAC/C,MAAM,gBACJ,SAAS,yDAAkC,SAAS,cAAc;CACpE,MAAM,gDAAyB,cAAc;CAE7C,MAAM,EAAE,OAAO,aAAa,gCAC1B,cAAc;;AAGhB,4BAAqB,6BAA6B;EAChD,YAAY;EACZ,eAAe;EACf,kBAAkB;GAChB,oBAAoB;GACpB,cAAc;GACf;EACD,SAAS;GACP;GACA;GACA;GACA;GACD;EACD,GAAG;EACJ,CAAC,CACC,GAAG,OAAO,OAAO,aAAa;EAC7B,MAAM,mCAAoB,SAAS;AACnC,qBAAmB,IAAI,SAAS;AAEhC,QAAMA,sFAAuC,UAAU,cAAc;AAErE,mBAAiB,mBAAmB,OAAO,SAAS,EAAE,IAAK;GAC3D,CACD,GACC,UACA,OAAO,aACL,MAAMC,8EAAmC,UAAU,cAAc,CACpE,CACA,GAAG,UAAU,OAAO,aAAa;AAChC,aAAW,YAAY;GACrB,MAAM,mCAAoB,SAAS;AAEnC,OAAI,mBAAmB,IAAI,SAAS,CAElC;AAGF,SAAMC,kFAAqC,UAAU,cAAc;KAClE,IAAI;GACP,CACD,GAAG,SAAS,OAAO,UAAU;AAC5B,YAAU,kBAAkB,SAAS,EACnC,OAAO,SACR,CAAC;AAEF,YAAU,qBAAqB;AAE/B,QAAMC,wCAAgB,cAAc;GACpC;;AAGN,MAAa,wBAAwB,OAAO,YAA2B;CACrE,MAAM,EAAE,YAAa,GAAG,SAAS,WAAW,EAAE;CAC9C,MAAM,gBACJ,SAAS,yDAAkC,SAAS,cAAc;AAEpE,KAAI,CAAC,SAAS,YACZ,OAAMA,wCAAgB,eAAe,EAAE,UAAU,MAAM,CAAC;AAG1D,KAAI,cAAc,QAAQ,SAAS,SAAS,YAAY;AAGtD,sCAF+B,cAAc,CAEnC,yCAAyC;AAEnD,QAAM;GAAE,GAAG;GAAM;GAAe,CAAC"}
@@ -1,9 +1,7 @@
1
1
  const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
2
  const require_writeJsonIfChanged = require('../writeJsonIfChanged.cjs');
3
3
  let node_path = require("node:path");
4
- node_path = require_rolldown_runtime.__toESM(node_path);
5
4
  let node_fs_promises = require("node:fs/promises");
6
- node_fs_promises = require_rolldown_runtime.__toESM(node_fs_promises);
7
5
 
8
6
  //#region src/writeConfiguration/index.ts
9
7
  const writeConfiguration = async (configuration) => {
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","names":["writeJsonIfChanged"],"sources":["../../../src/writeConfiguration/index.ts"],"sourcesContent":["import { mkdir } from 'node:fs/promises';\nimport { join } from 'node:path';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { writeJsonIfChanged } from '../writeJsonIfChanged';\n\nexport const writeConfiguration = async (configuration: IntlayerConfig) => {\n const { content } = configuration;\n const { configDir } = content;\n\n // Ensure target directory exists\n // configDir is expected to be the directory where configuration.json will live\n await mkdir(configDir, { recursive: true });\n\n const configFilePath = join(configDir, 'configuration.json');\n\n await writeJsonIfChanged(configFilePath, configuration);\n};\n"],"mappings":";;;;;;;;AAKA,MAAa,qBAAqB,OAAO,kBAAkC;CACzE,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,cAAc;AAItB,mCAAY,WAAW,EAAE,WAAW,MAAM,CAAC;AAI3C,OAAMA,kEAFsB,WAAW,qBAAqB,EAEnB,cAAc"}
1
+ {"version":3,"file":"index.cjs","names":["writeJsonIfChanged"],"sources":["../../../src/writeConfiguration/index.ts"],"sourcesContent":["import { mkdir } from 'node:fs/promises';\nimport { join } from 'node:path';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { writeJsonIfChanged } from '../writeJsonIfChanged';\n\nexport const writeConfiguration = async (configuration: IntlayerConfig) => {\n const { content } = configuration;\n const { configDir } = content;\n\n // Ensure target directory exists\n // configDir is expected to be the directory where configuration.json will live\n await mkdir(configDir, { recursive: true });\n\n const configFilePath = join(configDir, 'configuration.json');\n\n await writeJsonIfChanged(configFilePath, configuration);\n};\n"],"mappings":";;;;;;AAKA,MAAa,qBAAqB,OAAO,kBAAkC;CACzE,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,cAAc;AAItB,mCAAY,WAAW,EAAE,WAAW,MAAM,CAAC;AAI3C,OAAMA,kEAFsB,WAAW,qBAAqB,EAEnB,cAAc"}
@@ -1,6 +1,5 @@
1
1
  const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
2
  let __intlayer_config = require("@intlayer/config");
3
- __intlayer_config = require_rolldown_runtime.__toESM(__intlayer_config);
4
3
 
5
4
  //#region src/writeContentDeclaration/detectFormatCommand.ts
6
5
  const detectFormatCommand = (configuration) => {
@@ -1 +1 @@
1
- {"version":3,"file":"detectFormatCommand.cjs","names":[],"sources":["../../../src/writeContentDeclaration/detectFormatCommand.ts"],"sourcesContent":["import { getProjectRequire } from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\n\nexport const detectFormatCommand = (configuration: IntlayerConfig) => {\n const { formatCommand } = configuration.content;\n const projectRequire = getProjectRequire();\n\n if (formatCommand) {\n return formatCommand;\n }\n\n // Try Prettier\n try {\n projectRequire.resolve('prettier');\n\n return 'prettier --write \"{{file}}\" --log-level silent';\n } catch (_error) {\n // Prettier not found, continue to next option\n }\n\n // Try Biome\n try {\n projectRequire.resolve('biome');\n\n return 'biome format \"{{file}}\" --write --log-level none';\n } catch (_error) {\n // Biome not found, continue to next option\n }\n\n // Try ESLint\n try {\n projectRequire.resolve('eslint');\n\n return 'eslint --fix \"{{file}}\" --quiet';\n } catch (_error) {\n // ESLint not found, no formatter available\n }\n\n // No formatter found\n return undefined;\n};\n"],"mappings":";;;;;AAGA,MAAa,uBAAuB,kBAAkC;CACpE,MAAM,EAAE,kBAAkB,cAAc;CACxC,MAAM,2DAAoC;AAE1C,KAAI,cACF,QAAO;AAIT,KAAI;AACF,iBAAe,QAAQ,WAAW;AAElC,SAAO;UACA,QAAQ;AAKjB,KAAI;AACF,iBAAe,QAAQ,QAAQ;AAE/B,SAAO;UACA,QAAQ;AAKjB,KAAI;AACF,iBAAe,QAAQ,SAAS;AAEhC,SAAO;UACA,QAAQ"}
1
+ {"version":3,"file":"detectFormatCommand.cjs","names":[],"sources":["../../../src/writeContentDeclaration/detectFormatCommand.ts"],"sourcesContent":["import { getProjectRequire } from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\n\nexport const detectFormatCommand = (configuration: IntlayerConfig) => {\n const { formatCommand } = configuration.content;\n const projectRequire = getProjectRequire();\n\n if (formatCommand) {\n return formatCommand;\n }\n\n // Try Prettier\n try {\n projectRequire.resolve('prettier');\n\n return 'prettier --write \"{{file}}\" --log-level silent';\n } catch (_error) {\n // Prettier not found, continue to next option\n }\n\n // Try Biome\n try {\n projectRequire.resolve('biome');\n\n return 'biome format \"{{file}}\" --write --log-level none';\n } catch (_error) {\n // Biome not found, continue to next option\n }\n\n // Try ESLint\n try {\n projectRequire.resolve('eslint');\n\n return 'eslint --fix \"{{file}}\" --quiet';\n } catch (_error) {\n // ESLint not found, no formatter available\n }\n\n // No formatter found\n return undefined;\n};\n"],"mappings":";;;;AAGA,MAAa,uBAAuB,kBAAkC;CACpE,MAAM,EAAE,kBAAkB,cAAc;CACxC,MAAM,2DAAoC;AAE1C,KAAI,cACF,QAAO;AAIT,KAAI;AACF,iBAAe,QAAQ,WAAW;AAElC,SAAO;UACA,QAAQ;AAKjB,KAAI;AACF,iBAAe,QAAQ,QAAQ;AAE/B,SAAO;UACA,QAAQ;AAKjB,KAAI;AACF,iBAAe,QAAQ,SAAS;AAEhC,SAAO;UACA,QAAQ"}
@@ -1,14 +1,9 @@
1
1
  const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
2
  let node_fs = require("node:fs");
3
- node_fs = require_rolldown_runtime.__toESM(node_fs);
4
3
  let __intlayer_config = require("@intlayer/config");
5
- __intlayer_config = require_rolldown_runtime.__toESM(__intlayer_config);
6
4
  let node_path = require("node:path");
7
- node_path = require_rolldown_runtime.__toESM(node_path);
8
5
  let __intlayer_core = require("@intlayer/core");
9
- __intlayer_core = require_rolldown_runtime.__toESM(__intlayer_core);
10
6
  let __intlayer_types = require("@intlayer/types");
11
- __intlayer_types = require_rolldown_runtime.__toESM(__intlayer_types);
12
7
 
13
8
  //#region src/writeContentDeclaration/processContentDeclarationContent.ts
14
9
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"processContentDeclarationContent.cjs","names":["writeFilePlugin: Plugins","NodeType","x","markdownFilePlugin: Plugins","deepTransformNode","insertionFilePlugin: Plugins"],"sources":["../../../src/writeContentDeclaration/processContentDeclarationContent.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { colorizePath, x } from '@intlayer/config';\nimport {\n deepTransformNode,\n type FileContent,\n type FileContentConstructor,\n type InsertionContent,\n type InsertionContentConstructor,\n type MarkdownContent,\n type MarkdownContentConstructor,\n type Plugins,\n} from '@intlayer/core';\nimport { type Dictionary, NodeType } from '@intlayer/types';\n\n/**\n * Write file plugin\n */\n\nconst writeFilePlugin: Plugins = {\n id: 'write-file-plugin',\n canHandle: (node) =>\n typeof node === 'object' && node?.nodeType === NodeType.File,\n transform: (node: FileContent) => {\n const fileContent = node.content;\n const filePath = node.fixedPath;\n\n if (typeof fileContent !== 'string') {\n throw new Error('File content must be a string');\n }\n\n if (typeof filePath !== 'string') {\n throw new Error('File path must be a string');\n }\n\n // Write the file to the file system\n try {\n const absoluteFilePath = join(process.cwd(), filePath);\n\n // Create the file directory if it doesn't exist\n const fileDirectory = dirname(absoluteFilePath);\n\n if (!existsSync(fileDirectory)) {\n mkdirSync(fileDirectory, { recursive: true });\n }\n\n // Write the file\n writeFileSync(absoluteFilePath, fileContent);\n } catch (error) {\n throw new Error(\n `${x} Error writing file to ${colorizePath(filePath)}: ${error}`\n );\n }\n\n const transformedFileContent: FileContentConstructor = {\n nodeType: NodeType.File,\n [NodeType.File]: node.file,\n };\n\n return transformedFileContent;\n },\n};\n\n/**\n * Markdown file plugin\n */\n\nconst markdownFilePlugin: Plugins = {\n id: 'markdown-file-plugin',\n canHandle: (node) =>\n typeof node === 'object' && node?.nodeType === NodeType.Markdown,\n transform: (node: MarkdownContent, props, deepTransformNode) => {\n const simplifiedMarkdownNode: MarkdownContentConstructor = {\n nodeType: NodeType.Markdown,\n [NodeType.Markdown]: deepTransformNode(node.markdown, props),\n };\n\n return simplifiedMarkdownNode;\n },\n};\n\n/**\n * Insertion file plugin\n */\n\nconst insertionFilePlugin: Plugins = {\n id: 'insertion-file-plugin',\n canHandle: (node) =>\n typeof node === 'object' && node?.nodeType === NodeType.Insertion,\n transform: (node: InsertionContent, props, deepTransformNode) => {\n const simplifiedInsertionNode: InsertionContentConstructor = {\n nodeType: NodeType.Insertion,\n [NodeType.Insertion]: deepTransformNode(node.insertion, props),\n };\n\n return simplifiedInsertionNode;\n },\n};\n\nexport const processContentDeclarationContent = async (\n dictionary: Dictionary\n) =>\n deepTransformNode(dictionary, {\n dictionaryKey: dictionary.key,\n keyPath: [],\n plugins: [writeFilePlugin, markdownFilePlugin, insertionFilePlugin],\n });\n"],"mappings":";;;;;;;;;;;;;;;;AAmBA,MAAMA,kBAA2B;CAC/B,IAAI;CACJ,YAAY,SACV,OAAO,SAAS,YAAY,MAAM,aAAaC,0BAAS;CAC1D,YAAY,SAAsB;EAChC,MAAM,cAAc,KAAK;EACzB,MAAM,WAAW,KAAK;AAEtB,MAAI,OAAO,gBAAgB,SACzB,OAAM,IAAI,MAAM,gCAAgC;AAGlD,MAAI,OAAO,aAAa,SACtB,OAAM,IAAI,MAAM,6BAA6B;AAI/C,MAAI;GACF,MAAM,uCAAwB,QAAQ,KAAK,EAAE,SAAS;GAGtD,MAAM,uCAAwB,iBAAiB;AAE/C,OAAI,yBAAY,cAAc,CAC5B,wBAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAI/C,8BAAc,kBAAkB,YAAY;WACrC,OAAO;AACd,SAAM,IAAI,MACR,GAAGC,oBAAE,6DAAsC,SAAS,CAAC,IAAI,QAC1D;;AAQH,SALuD;GACrD,UAAUD,0BAAS;IAClBA,0BAAS,OAAO,KAAK;GACvB;;CAIJ;;;;AAMD,MAAME,qBAA8B;CAClC,IAAI;CACJ,YAAY,SACV,OAAO,SAAS,YAAY,MAAM,aAAaF,0BAAS;CAC1D,YAAY,MAAuB,OAAO,wBAAsB;AAM9D,SAL2D;GACzD,UAAUA,0BAAS;IAClBA,0BAAS,WAAWG,oBAAkB,KAAK,UAAU,MAAM;GAC7D;;CAIJ;;;;AAMD,MAAMC,sBAA+B;CACnC,IAAI;CACJ,YAAY,SACV,OAAO,SAAS,YAAY,MAAM,aAAaJ,0BAAS;CAC1D,YAAY,MAAwB,OAAO,wBAAsB;AAM/D,SAL6D;GAC3D,UAAUA,0BAAS;IAClBA,0BAAS,YAAYG,oBAAkB,KAAK,WAAW,MAAM;GAC/D;;CAIJ;AAED,MAAa,mCAAmC,OAC9C,sDAEkB,YAAY;CAC5B,eAAe,WAAW;CAC1B,SAAS,EAAE;CACX,SAAS;EAAC;EAAiB;EAAoB;EAAoB;CACpE,CAAC"}
1
+ {"version":3,"file":"processContentDeclarationContent.cjs","names":["writeFilePlugin: Plugins","NodeType","x","markdownFilePlugin: Plugins","deepTransformNode","insertionFilePlugin: Plugins"],"sources":["../../../src/writeContentDeclaration/processContentDeclarationContent.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'node:fs';\nimport { dirname, join } from 'node:path';\nimport { colorizePath, x } from '@intlayer/config';\nimport {\n deepTransformNode,\n type FileContent,\n type FileContentConstructor,\n type InsertionContent,\n type InsertionContentConstructor,\n type MarkdownContent,\n type MarkdownContentConstructor,\n type Plugins,\n} from '@intlayer/core';\nimport { type Dictionary, NodeType } from '@intlayer/types';\n\n/**\n * Write file plugin\n */\n\nconst writeFilePlugin: Plugins = {\n id: 'write-file-plugin',\n canHandle: (node) =>\n typeof node === 'object' && node?.nodeType === NodeType.File,\n transform: (node: FileContent) => {\n const fileContent = node.content;\n const filePath = node.fixedPath;\n\n if (typeof fileContent !== 'string') {\n throw new Error('File content must be a string');\n }\n\n if (typeof filePath !== 'string') {\n throw new Error('File path must be a string');\n }\n\n // Write the file to the file system\n try {\n const absoluteFilePath = join(process.cwd(), filePath);\n\n // Create the file directory if it doesn't exist\n const fileDirectory = dirname(absoluteFilePath);\n\n if (!existsSync(fileDirectory)) {\n mkdirSync(fileDirectory, { recursive: true });\n }\n\n // Write the file\n writeFileSync(absoluteFilePath, fileContent);\n } catch (error) {\n throw new Error(\n `${x} Error writing file to ${colorizePath(filePath)}: ${error}`\n );\n }\n\n const transformedFileContent: FileContentConstructor = {\n nodeType: NodeType.File,\n [NodeType.File]: node.file,\n };\n\n return transformedFileContent;\n },\n};\n\n/**\n * Markdown file plugin\n */\n\nconst markdownFilePlugin: Plugins = {\n id: 'markdown-file-plugin',\n canHandle: (node) =>\n typeof node === 'object' && node?.nodeType === NodeType.Markdown,\n transform: (node: MarkdownContent, props, deepTransformNode) => {\n const simplifiedMarkdownNode: MarkdownContentConstructor = {\n nodeType: NodeType.Markdown,\n [NodeType.Markdown]: deepTransformNode(node.markdown, props),\n };\n\n return simplifiedMarkdownNode;\n },\n};\n\n/**\n * Insertion file plugin\n */\n\nconst insertionFilePlugin: Plugins = {\n id: 'insertion-file-plugin',\n canHandle: (node) =>\n typeof node === 'object' && node?.nodeType === NodeType.Insertion,\n transform: (node: InsertionContent, props, deepTransformNode) => {\n const simplifiedInsertionNode: InsertionContentConstructor = {\n nodeType: NodeType.Insertion,\n [NodeType.Insertion]: deepTransformNode(node.insertion, props),\n };\n\n return simplifiedInsertionNode;\n },\n};\n\nexport const processContentDeclarationContent = async (\n dictionary: Dictionary\n) =>\n deepTransformNode(dictionary, {\n dictionaryKey: dictionary.key,\n keyPath: [],\n plugins: [writeFilePlugin, markdownFilePlugin, insertionFilePlugin],\n });\n"],"mappings":";;;;;;;;;;;AAmBA,MAAMA,kBAA2B;CAC/B,IAAI;CACJ,YAAY,SACV,OAAO,SAAS,YAAY,MAAM,aAAaC,0BAAS;CAC1D,YAAY,SAAsB;EAChC,MAAM,cAAc,KAAK;EACzB,MAAM,WAAW,KAAK;AAEtB,MAAI,OAAO,gBAAgB,SACzB,OAAM,IAAI,MAAM,gCAAgC;AAGlD,MAAI,OAAO,aAAa,SACtB,OAAM,IAAI,MAAM,6BAA6B;AAI/C,MAAI;GACF,MAAM,uCAAwB,QAAQ,KAAK,EAAE,SAAS;GAGtD,MAAM,uCAAwB,iBAAiB;AAE/C,OAAI,yBAAY,cAAc,CAC5B,wBAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAI/C,8BAAc,kBAAkB,YAAY;WACrC,OAAO;AACd,SAAM,IAAI,MACR,GAAGC,oBAAE,6DAAsC,SAAS,CAAC,IAAI,QAC1D;;AAQH,SALuD;GACrD,UAAUD,0BAAS;IAClBA,0BAAS,OAAO,KAAK;GACvB;;CAIJ;;;;AAMD,MAAME,qBAA8B;CAClC,IAAI;CACJ,YAAY,SACV,OAAO,SAAS,YAAY,MAAM,aAAaF,0BAAS;CAC1D,YAAY,MAAuB,OAAO,wBAAsB;AAM9D,SAL2D;GACzD,UAAUA,0BAAS;IAClBA,0BAAS,WAAWG,oBAAkB,KAAK,UAAU,MAAM;GAC7D;;CAIJ;;;;AAMD,MAAMC,sBAA+B;CACnC,IAAI;CACJ,YAAY,SACV,OAAO,SAAS,YAAY,MAAM,aAAaJ,0BAAS;CAC1D,YAAY,MAAwB,OAAO,wBAAsB;AAM/D,SAL6D;GAC3D,UAAUA,0BAAS;IAClBA,0BAAS,YAAYG,oBAAkB,KAAK,WAAW,MAAM;GAC/D;;CAIJ;AAED,MAAa,mCAAmC,OAC9C,sDAEkB,YAAY;CAC5B,eAAe,WAAW;CAC1B,SAAS,EAAE;CACX,SAAS;EAAC;EAAiB;EAAoB;EAAoB;CACpE,CAAC"}
@@ -1,10 +1,7 @@
1
1
  const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
2
  let __intlayer_core = require("@intlayer/core");
3
- __intlayer_core = require_rolldown_runtime.__toESM(__intlayer_core);
4
3
  let __intlayer_types = require("@intlayer/types");
5
- __intlayer_types = require_rolldown_runtime.__toESM(__intlayer_types);
6
4
  let ts_morph = require("ts-morph");
7
- ts_morph = require_rolldown_runtime.__toESM(ts_morph);
8
5
 
9
6
  //#region src/writeContentDeclaration/transformJSFile.ts
10
7
  /**