@intlayer/chokidar 7.5.13 → 7.5.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. package/dist/cjs/buildIntlayerDictionary/buildIntlayerDictionary.cjs.map +1 -1
  2. package/dist/cjs/buildIntlayerDictionary/writeDynamicDictionary.cjs.map +1 -1
  3. package/dist/cjs/buildIntlayerDictionary/writeFetchDictionary.cjs.map +1 -1
  4. package/dist/cjs/cleanOutputDir.cjs.map +1 -1
  5. package/dist/cjs/cleanRemovedContentDeclaration.cjs.map +1 -1
  6. package/dist/cjs/createDictionaryEntryPoint/generateDictionaryListContent.cjs.map +1 -1
  7. package/dist/cjs/createType/createModuleAugmentation.cjs.map +1 -1
  8. package/dist/cjs/createType/createType.cjs.map +1 -1
  9. package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs.map +1 -1
  10. package/dist/cjs/handleContentDeclarationFileChange.cjs.map +1 -1
  11. package/dist/cjs/handleContentDeclarationFileMoved.cjs.map +1 -1
  12. package/dist/cjs/init/index.cjs.map +1 -1
  13. package/dist/cjs/listGitFiles.cjs.map +1 -1
  14. package/dist/cjs/loadDictionaries/loadContentDeclaration.cjs.map +1 -1
  15. package/dist/cjs/loadDictionaries/loadDictionaries.cjs.map +1 -1
  16. package/dist/cjs/loadDictionaries/loadLocalDictionaries.cjs.map +1 -1
  17. package/dist/cjs/loadDictionaries/loadRemoteDictionaries.cjs.map +1 -1
  18. package/dist/cjs/loadDictionaries/log.cjs.map +1 -1
  19. package/dist/cjs/reduceDictionaryContent/applyMask.cjs.map +1 -1
  20. package/dist/cjs/transformFiles/transformFiles.cjs.map +1 -1
  21. package/dist/cjs/utils/chunkJSON.cjs.map +1 -1
  22. package/dist/cjs/utils/pLimit.cjs.map +1 -1
  23. package/dist/cjs/utils/parallelizeGlobal.cjs.map +1 -1
  24. package/dist/cjs/utils/reduceObjectFormat.cjs.map +1 -1
  25. package/dist/cjs/utils/resolveObjectPromises.cjs.map +1 -1
  26. package/dist/cjs/utils/runOnce.cjs +13 -6
  27. package/dist/cjs/utils/runOnce.cjs.map +1 -1
  28. package/dist/cjs/utils/runParallel/bin.cjs.map +1 -1
  29. package/dist/cjs/utils/runParallel/index.cjs.map +1 -1
  30. package/dist/cjs/utils/runParallel/pidTree.cjs.map +1 -1
  31. package/dist/cjs/utils/runParallel/ps.cjs.map +1 -1
  32. package/dist/cjs/utils/runParallel/runTask.cjs.map +1 -1
  33. package/dist/cjs/utils/runParallel/wmic.cjs.map +1 -1
  34. package/dist/cjs/watcher.cjs.map +1 -1
  35. package/dist/cjs/writeContentDeclaration/detectFormatCommand.cjs.map +1 -1
  36. package/dist/cjs/writeContentDeclaration/processContentDeclarationContent.cjs.map +1 -1
  37. package/dist/cjs/writeContentDeclaration/transformJSFile.cjs.map +1 -1
  38. package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs.map +1 -1
  39. package/dist/esm/buildIntlayerDictionary/buildIntlayerDictionary.mjs.map +1 -1
  40. package/dist/esm/buildIntlayerDictionary/writeDynamicDictionary.mjs.map +1 -1
  41. package/dist/esm/buildIntlayerDictionary/writeFetchDictionary.mjs.map +1 -1
  42. package/dist/esm/cleanOutputDir.mjs.map +1 -1
  43. package/dist/esm/cleanRemovedContentDeclaration.mjs.map +1 -1
  44. package/dist/esm/createDictionaryEntryPoint/generateDictionaryListContent.mjs.map +1 -1
  45. package/dist/esm/createType/createModuleAugmentation.mjs.map +1 -1
  46. package/dist/esm/createType/createType.mjs.map +1 -1
  47. package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs.map +1 -1
  48. package/dist/esm/handleContentDeclarationFileChange.mjs.map +1 -1
  49. package/dist/esm/handleContentDeclarationFileMoved.mjs.map +1 -1
  50. package/dist/esm/init/index.mjs.map +1 -1
  51. package/dist/esm/listGitFiles.mjs.map +1 -1
  52. package/dist/esm/loadDictionaries/loadContentDeclaration.mjs.map +1 -1
  53. package/dist/esm/loadDictionaries/loadDictionaries.mjs.map +1 -1
  54. package/dist/esm/loadDictionaries/loadLocalDictionaries.mjs.map +1 -1
  55. package/dist/esm/loadDictionaries/loadRemoteDictionaries.mjs.map +1 -1
  56. package/dist/esm/loadDictionaries/log.mjs.map +1 -1
  57. package/dist/esm/reduceDictionaryContent/applyMask.mjs.map +1 -1
  58. package/dist/esm/transformFiles/transformFiles.mjs.map +1 -1
  59. package/dist/esm/utils/chunkJSON.mjs.map +1 -1
  60. package/dist/esm/utils/pLimit.mjs.map +1 -1
  61. package/dist/esm/utils/parallelizeGlobal.mjs.map +1 -1
  62. package/dist/esm/utils/reduceObjectFormat.mjs.map +1 -1
  63. package/dist/esm/utils/resolveObjectPromises.mjs.map +1 -1
  64. package/dist/esm/utils/runOnce.mjs +13 -6
  65. package/dist/esm/utils/runOnce.mjs.map +1 -1
  66. package/dist/esm/utils/runParallel/bin.mjs.map +1 -1
  67. package/dist/esm/utils/runParallel/index.mjs.map +1 -1
  68. package/dist/esm/utils/runParallel/pidTree.mjs.map +1 -1
  69. package/dist/esm/utils/runParallel/ps.mjs.map +1 -1
  70. package/dist/esm/utils/runParallel/runTask.mjs.map +1 -1
  71. package/dist/esm/utils/runParallel/wmic.mjs.map +1 -1
  72. package/dist/esm/watcher.mjs.map +1 -1
  73. package/dist/esm/writeContentDeclaration/detectFormatCommand.mjs.map +1 -1
  74. package/dist/esm/writeContentDeclaration/processContentDeclarationContent.mjs.map +1 -1
  75. package/dist/esm/writeContentDeclaration/transformJSFile.mjs.map +1 -1
  76. package/dist/esm/writeContentDeclaration/writeContentDeclaration.mjs.map +1 -1
  77. package/dist/types/buildIntlayerDictionary/buildIntlayerDictionary.d.ts +2 -2
  78. package/dist/types/buildIntlayerDictionary/buildIntlayerDictionary.d.ts.map +1 -1
  79. package/dist/types/buildIntlayerDictionary/writeDynamicDictionary.d.ts +3 -3
  80. package/dist/types/buildIntlayerDictionary/writeDynamicDictionary.d.ts.map +1 -1
  81. package/dist/types/buildIntlayerDictionary/writeFetchDictionary.d.ts +3 -3
  82. package/dist/types/buildIntlayerDictionary/writeFetchDictionary.d.ts.map +1 -1
  83. package/dist/types/buildIntlayerDictionary/writeMergedDictionary.d.ts +2 -2
  84. package/dist/types/buildIntlayerDictionary/writeMergedDictionary.d.ts.map +1 -1
  85. package/dist/types/buildIntlayerDictionary/writeRemoteDictionary.d.ts +2 -2
  86. package/dist/types/buildIntlayerDictionary/writeRemoteDictionary.d.ts.map +1 -1
  87. package/dist/types/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts +2 -2
  88. package/dist/types/createDictionaryEntryPoint/generateDictionaryListContent.d.ts +2 -2
  89. package/dist/types/formatDictionary.d.ts +15 -15
  90. package/dist/types/loadDictionaries/loadRemoteDictionaries.d.ts +2 -2
  91. package/dist/types/loadDictionaries/loadRemoteDictionaries.d.ts.map +1 -1
  92. package/dist/types/utils/runOnce.d.ts.map +1 -1
  93. package/package.json +13 -13
@@ -1 +1 @@
1
- {"version":3,"file":"loadRemoteDictionaries.mjs","names":["distantDictionaryUpdateTimeStamp: Record<\n DictionaryId,\n { key: DictionaryKey; updatedAt: number }\n > | null","local: Dictionary | undefined","cachedDictionaries: Dictionary[]","distantDictionaries: Dictionary[]"],"sources":["../../../src/loadDictionaries/loadRemoteDictionaries.ts"],"sourcesContent":["import { getIntlayerAPIProxy } from '@intlayer/api';\n// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport { getConfiguration } from '@intlayer/config';\nimport { getRemoteDictionaries } from '@intlayer/remote-dictionaries-entry';\nimport type { Dictionary, DictionaryId, DictionaryKey } from '@intlayer/types';\nimport { fetchDistantDictionaries } from '../fetchDistantDictionaries';\nimport type { DictionariesStatus } from '../loadDictionaries/loadDictionaries';\nimport { sortAlphabetically } from '../utils/sortAlphabetically';\n\nexport const formatDistantDictionaries = (\n dictionaries: (DictionaryAPI | Dictionary)[]\n): Dictionary[] =>\n dictionaries.map((dict) => ({\n ...dict,\n localId: `${dict.key}::remote::${dict.id}`,\n location: 'remote' as const,\n }));\n\nexport const loadRemoteDictionaries = async (\n configuration = getConfiguration(),\n onStatusUpdate?: (status: DictionariesStatus[]) => void,\n options?: {\n onStartRemoteCheck?: () => void;\n onStopRemoteCheck?: () => void;\n onError?: (error: Error) => void;\n }\n): Promise<Dictionary[]> => {\n const { editor } = configuration;\n const remoteDictionariesRecord = getRemoteDictionaries(configuration);\n\n const hasRemoteDictionaries = Boolean(editor.clientId && editor.clientSecret);\n\n if (!hasRemoteDictionaries) return [];\n\n try {\n options?.onStartRemoteCheck?.();\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, configuration);\n\n // Get the list of dictionary keys\n const getDictionariesKeysResult =\n await intlayerAPI.dictionary.getDictionariesUpdateTimestamp();\n\n const distantDictionaryUpdateTimeStamp: Record<\n DictionaryId,\n { key: DictionaryKey; updatedAt: number }\n > | null = getDictionariesKeysResult.data;\n\n if (!distantDictionaryUpdateTimeStamp) {\n throw new Error('No distant dictionaries found');\n }\n\n const dictionariesIdToFetch = Object.entries(\n distantDictionaryUpdateTimeStamp\n ).filter(([dictionaryId, data]) => {\n // If remote doesn't provide updatedAt, fetch to be safe\n if (!data.updatedAt) return true;\n\n // If no local cache exists, fetch\n const local: Dictionary | undefined = remoteDictionariesRecord[\n data.key\n ]?.find((dictionary) => dictionary.id === dictionaryId);\n if (!local) return true;\n\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n\n // If local timestamp missing or older than remote, fetch\n if (typeof localUpdatedAt !== 'number') return true;\n\n return data.updatedAt > localUpdatedAt;\n });\n\n const flatRemoteDictionariesRecord: DictionaryAPI[] = Object.values(\n remoteDictionariesRecord\n ).flat();\n\n const cachedDictionaries: Dictionary[] =\n flatRemoteDictionariesRecord.filter((dictionary) => {\n const remoteUpdatedAt =\n distantDictionaryUpdateTimeStamp[dictionary.id!].updatedAt;\n\n const localUpdatedAtRaw = dictionary.updatedAt;\n\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n\n // Consider as cached/imported when local exists and is up-to-date or newer\n return (\n typeof localUpdatedAt === 'number' &&\n typeof remoteUpdatedAt === 'number' &&\n localUpdatedAt >= remoteUpdatedAt\n );\n });\n\n // Report cached as already imported\n if (cachedDictionaries.length > 0) {\n onStatusUpdate?.(\n cachedDictionaries.map((dictionary) => ({\n dictionaryKey: dictionary.key,\n type: 'remote',\n status: 'imported',\n }))\n );\n }\n\n const orderedDistantDictionaryKeys = dictionariesIdToFetch\n .map(([, data]) => data.key)\n .sort(sortAlphabetically);\n\n // Report pending for keys to be fetched so totals are visible immediately\n if (orderedDistantDictionaryKeys.length > 0) {\n onStatusUpdate?.(\n orderedDistantDictionaryKeys.map((key) => ({\n dictionaryKey: key,\n type: 'remote',\n status: 'pending',\n }))\n );\n }\n\n const distantDictionariesData = await fetchDistantDictionaries(\n {\n dictionaryKeys: orderedDistantDictionaryKeys,\n },\n onStatusUpdate\n );\n\n const distantDictionaries: Dictionary[] = formatDistantDictionaries(\n distantDictionariesData\n );\n\n return [...cachedDictionaries, ...distantDictionaries];\n } catch (error) {\n options?.onError?.(error as Error);\n return [];\n } finally {\n options?.onStopRemoteCheck?.();\n }\n};\n"],"mappings":";;;;;;;AAUA,MAAa,6BACX,iBAEA,aAAa,KAAK,UAAU;CAC1B,GAAG;CACH,SAAS,GAAG,KAAK,IAAI,YAAY,KAAK;CACtC,UAAU;CACX,EAAE;AAEL,MAAa,yBAAyB,OACpC,gBAAgB,kBAAkB,EAClC,gBACA,YAK0B;CAC1B,MAAM,EAAE,WAAW;CACnB,MAAM,2BAA2B,sBAAsB,cAAc;AAIrE,KAAI,CAF0B,QAAQ,OAAO,YAAY,OAAO,aAAa,CAEjD,QAAO,EAAE;AAErC,KAAI;AACF,WAAS,sBAAsB;EAQ/B,MAAMA,oCAFJ,MAJkB,oBAAoB,QAAW,cAAc,CAI7C,WAAW,gCAAgC,EAK1B;AAErC,MAAI,CAAC,iCACH,OAAM,IAAI,MAAM,gCAAgC;EAGlD,MAAM,wBAAwB,OAAO,QACnC,iCACD,CAAC,QAAQ,CAAC,cAAc,UAAU;AAEjC,OAAI,CAAC,KAAK,UAAW,QAAO;GAG5B,MAAMC,QAAgC,yBACpC,KAAK,MACJ,MAAM,eAAe,WAAW,OAAO,aAAa;AACvD,OAAI,CAAC,MAAO,QAAO;GAEnB,MAAM,oBAAqB,OAAe;GAK1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AAGR,OAAI,OAAO,mBAAmB,SAAU,QAAO;AAE/C,UAAO,KAAK,YAAY;IACxB;EAMF,MAAMC,qBAJgD,OAAO,OAC3D,yBACD,CAAC,MAAM,CAGuB,QAAQ,eAAe;GAClD,MAAM,kBACJ,iCAAiC,WAAW,IAAK;GAEnD,MAAM,oBAAoB,WAAW;GAErC,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AAGR,UACE,OAAO,mBAAmB,YAC1B,OAAO,oBAAoB,YAC3B,kBAAkB;IAEpB;AAGJ,MAAI,mBAAmB,SAAS,EAC9B,kBACE,mBAAmB,KAAK,gBAAgB;GACtC,eAAe,WAAW;GAC1B,MAAM;GACN,QAAQ;GACT,EAAE,CACJ;EAGH,MAAM,+BAA+B,sBAClC,KAAK,GAAG,UAAU,KAAK,IAAI,CAC3B,KAAK,mBAAmB;AAG3B,MAAI,6BAA6B,SAAS,EACxC,kBACE,6BAA6B,KAAK,SAAS;GACzC,eAAe;GACf,MAAM;GACN,QAAQ;GACT,EAAE,CACJ;EAUH,MAAMC,sBAAoC,0BAPV,MAAM,yBACpC,EACE,gBAAgB,8BACjB,EACD,eACD,CAIA;AAED,SAAO,CAAC,GAAG,oBAAoB,GAAG,oBAAoB;UAC/C,OAAO;AACd,WAAS,UAAU,MAAe;AAClC,SAAO,EAAE;WACD;AACR,WAAS,qBAAqB"}
1
+ {"version":3,"file":"loadRemoteDictionaries.mjs","names":[],"sources":["../../../src/loadDictionaries/loadRemoteDictionaries.ts"],"sourcesContent":["import { getIntlayerAPIProxy } from '@intlayer/api';\n// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport { getConfiguration } from '@intlayer/config';\nimport { getRemoteDictionaries } from '@intlayer/remote-dictionaries-entry';\nimport type { Dictionary, DictionaryId, DictionaryKey } from '@intlayer/types';\nimport { fetchDistantDictionaries } from '../fetchDistantDictionaries';\nimport type { DictionariesStatus } from '../loadDictionaries/loadDictionaries';\nimport { sortAlphabetically } from '../utils/sortAlphabetically';\n\nexport const formatDistantDictionaries = (\n dictionaries: (DictionaryAPI | Dictionary)[]\n): Dictionary[] =>\n dictionaries.map((dict) => ({\n ...dict,\n localId: `${dict.key}::remote::${dict.id}`,\n location: 'remote' as const,\n }));\n\nexport const loadRemoteDictionaries = async (\n configuration = getConfiguration(),\n onStatusUpdate?: (status: DictionariesStatus[]) => void,\n options?: {\n onStartRemoteCheck?: () => void;\n onStopRemoteCheck?: () => void;\n onError?: (error: Error) => void;\n }\n): Promise<Dictionary[]> => {\n const { editor } = configuration;\n const remoteDictionariesRecord = getRemoteDictionaries(configuration);\n\n const hasRemoteDictionaries = Boolean(editor.clientId && editor.clientSecret);\n\n if (!hasRemoteDictionaries) return [];\n\n try {\n options?.onStartRemoteCheck?.();\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, configuration);\n\n // Get the list of dictionary keys\n const getDictionariesKeysResult =\n await intlayerAPI.dictionary.getDictionariesUpdateTimestamp();\n\n const distantDictionaryUpdateTimeStamp: Record<\n DictionaryId,\n { key: DictionaryKey; updatedAt: number }\n > | null = getDictionariesKeysResult.data;\n\n if (!distantDictionaryUpdateTimeStamp) {\n throw new Error('No distant dictionaries found');\n }\n\n const dictionariesIdToFetch = Object.entries(\n distantDictionaryUpdateTimeStamp\n ).filter(([dictionaryId, data]) => {\n // If remote doesn't provide updatedAt, fetch to be safe\n if (!data.updatedAt) return true;\n\n // If no local cache exists, fetch\n const local: Dictionary | undefined = remoteDictionariesRecord[\n data.key\n ]?.find((dictionary) => dictionary.id === dictionaryId);\n if (!local) return true;\n\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n\n // If local timestamp missing or older than remote, fetch\n if (typeof localUpdatedAt !== 'number') return true;\n\n return data.updatedAt > localUpdatedAt;\n });\n\n const flatRemoteDictionariesRecord: DictionaryAPI[] = Object.values(\n remoteDictionariesRecord\n ).flat();\n\n const cachedDictionaries: Dictionary[] =\n flatRemoteDictionariesRecord.filter((dictionary) => {\n const remoteUpdatedAt =\n distantDictionaryUpdateTimeStamp[dictionary.id!].updatedAt;\n\n const localUpdatedAtRaw = dictionary.updatedAt;\n\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n\n // Consider as cached/imported when local exists and is up-to-date or newer\n return (\n typeof localUpdatedAt === 'number' &&\n typeof remoteUpdatedAt === 'number' &&\n localUpdatedAt >= remoteUpdatedAt\n );\n });\n\n // Report cached as already imported\n if (cachedDictionaries.length > 0) {\n onStatusUpdate?.(\n cachedDictionaries.map((dictionary) => ({\n dictionaryKey: dictionary.key,\n type: 'remote',\n status: 'imported',\n }))\n );\n }\n\n const orderedDistantDictionaryKeys = dictionariesIdToFetch\n .map(([, data]) => data.key)\n .sort(sortAlphabetically);\n\n // Report pending for keys to be fetched so totals are visible immediately\n if (orderedDistantDictionaryKeys.length > 0) {\n onStatusUpdate?.(\n orderedDistantDictionaryKeys.map((key) => ({\n dictionaryKey: key,\n type: 'remote',\n status: 'pending',\n }))\n );\n }\n\n const distantDictionariesData = await fetchDistantDictionaries(\n {\n dictionaryKeys: orderedDistantDictionaryKeys,\n },\n onStatusUpdate\n );\n\n const distantDictionaries: Dictionary[] = formatDistantDictionaries(\n distantDictionariesData\n );\n\n return [...cachedDictionaries, ...distantDictionaries];\n } catch (error) {\n options?.onError?.(error as Error);\n return [];\n } finally {\n options?.onStopRemoteCheck?.();\n }\n};\n"],"mappings":";;;;;;;AAUA,MAAa,6BACX,iBAEA,aAAa,KAAK,UAAU;CAC1B,GAAG;CACH,SAAS,GAAG,KAAK,IAAI,YAAY,KAAK;CACtC,UAAU;CACX,EAAE;AAEL,MAAa,yBAAyB,OACpC,gBAAgB,kBAAkB,EAClC,gBACA,YAK0B;CAC1B,MAAM,EAAE,WAAW;CACnB,MAAM,2BAA2B,sBAAsB,cAAc;AAIrE,KAAI,CAF0B,QAAQ,OAAO,YAAY,OAAO,aAAa,CAEjD,QAAO,EAAE;AAErC,KAAI;AACF,WAAS,sBAAsB;EAQ/B,MAAM,oCAFJ,MAJkB,oBAAoB,QAAW,cAAc,CAI7C,WAAW,gCAAgC,EAK1B;AAErC,MAAI,CAAC,iCACH,OAAM,IAAI,MAAM,gCAAgC;EAGlD,MAAM,wBAAwB,OAAO,QACnC,iCACD,CAAC,QAAQ,CAAC,cAAc,UAAU;AAEjC,OAAI,CAAC,KAAK,UAAW,QAAO;GAG5B,MAAM,QAAgC,yBACpC,KAAK,MACJ,MAAM,eAAe,WAAW,OAAO,aAAa;AACvD,OAAI,CAAC,MAAO,QAAO;GAEnB,MAAM,oBAAqB,OAAe;GAK1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AAGR,OAAI,OAAO,mBAAmB,SAAU,QAAO;AAE/C,UAAO,KAAK,YAAY;IACxB;EAMF,MAAM,qBAJgD,OAAO,OAC3D,yBACD,CAAC,MAAM,CAGuB,QAAQ,eAAe;GAClD,MAAM,kBACJ,iCAAiC,WAAW,IAAK;GAEnD,MAAM,oBAAoB,WAAW;GAErC,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AAGR,UACE,OAAO,mBAAmB,YAC1B,OAAO,oBAAoB,YAC3B,kBAAkB;IAEpB;AAGJ,MAAI,mBAAmB,SAAS,EAC9B,kBACE,mBAAmB,KAAK,gBAAgB;GACtC,eAAe,WAAW;GAC1B,MAAM;GACN,QAAQ;GACT,EAAE,CACJ;EAGH,MAAM,+BAA+B,sBAClC,KAAK,GAAG,UAAU,KAAK,IAAI,CAC3B,KAAK,mBAAmB;AAG3B,MAAI,6BAA6B,SAAS,EACxC,kBACE,6BAA6B,KAAK,SAAS;GACzC,eAAe;GACf,MAAM;GACN,QAAQ;GACT,EAAE,CACJ;EAUH,MAAM,sBAAoC,0BAPV,MAAM,yBACpC,EACE,gBAAgB,8BACjB,EACD,eACD,CAIA;AAED,SAAO,CAAC,GAAG,oBAAoB,GAAG,oBAAoB;UAC/C,OAAO;AACd,WAAS,UAAU,MAAe;AAClC,SAAO,EAAE;WACD;AACR,WAAS,qBAAqB"}
@@ -1 +1 @@
1
- {"version":3,"file":"log.mjs","names":["lines: string[]"],"sources":["../../../src/loadDictionaries/log.ts"],"sourcesContent":["import configuration from '@intlayer/config/built';\nimport {\n ANSIColors,\n colorize,\n extractErrorMessage,\n spinnerFrames,\n v,\n x,\n} from '@intlayer/config/client';\nimport type { DictionariesStatus } from './loadDictionaries';\n\nexport class DictionariesLogger {\n private statuses: DictionariesStatus[] = [];\n private spinnerTimer: NodeJS.Timeout | null = null;\n private spinnerIndex = 0;\n private renderedLines = 0;\n private readonly spinnerFrames = spinnerFrames;\n private isFinished = false;\n private readonly prefix: string;\n private lastRenderedState: string = '';\n private remoteCheckInProgress = false;\n private expectRemote = false;\n private remoteError: string | undefined;\n private pluginTotal = 0;\n private pluginDone = 0;\n private pluginError: string | undefined;\n\n constructor() {\n this.prefix = configuration?.log?.prefix ?? '';\n }\n\n setExpectRemote(expect: boolean) {\n this.expectRemote = expect;\n }\n\n startRemoteCheck() {\n if (this.isFinished) return;\n this.remoteCheckInProgress = true;\n this.startSpinner();\n this.render();\n }\n\n stopRemoteCheck() {\n this.remoteCheckInProgress = false;\n }\n\n update(newStatuses: DictionariesStatus[]) {\n if (this.isFinished) return;\n for (const status of newStatuses) {\n const index = this.statuses.findIndex(\n (s) =>\n s.dictionaryKey === status.dictionaryKey && s.type === status.type\n );\n if (index >= 0) {\n this.statuses[index] = status;\n } else {\n this.statuses.push(status);\n }\n }\n\n // If we expect remote fetch later, avoid rendering a local-only line first\n const { remoteTotal } = this.computeProgress();\n if (this.expectRemote && !this.remoteCheckInProgress && remoteTotal === 0) {\n // Do not start spinner or render yet; wait until remote check starts\n return;\n }\n\n this.startSpinner();\n this.render();\n }\n\n finish() {\n this.isFinished = true;\n this.stopSpinner();\n // Render final state and keep it visible\n this.render();\n }\n\n private startSpinner() {\n if (this.spinnerTimer || this.isFinished) return;\n this.spinnerTimer = setInterval(() => {\n this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerFrames.length;\n this.render();\n }, 100);\n }\n\n private stopSpinner() {\n if (!this.spinnerTimer) return;\n clearInterval(this.spinnerTimer);\n this.spinnerTimer = null;\n }\n\n public setRemoteError = (error?: Error) => {\n this.remoteError = extractErrorMessage(error);\n // Avoid rendering a transient remote-only line while the remote check flag is still true\n // Ensure local + remote are rendered together after a failure\n this.stopRemoteCheck();\n this.render();\n };\n\n setPluginTotal(total: number) {\n if (this.isFinished) return;\n this.pluginTotal = total;\n if (total > 0) {\n this.startSpinner();\n }\n this.render();\n }\n\n setPluginDone(done: number) {\n if (this.isFinished) return;\n this.pluginDone = done;\n this.render();\n }\n\n setPluginError(error?: Error) {\n if (this.isFinished) return;\n this.pluginError = extractErrorMessage(error);\n this.render();\n }\n\n private render() {\n const {\n localTotal,\n localDone,\n remoteTotal,\n remoteDone,\n pluginTotal,\n pluginDone,\n } = this.computeProgress();\n\n const frame = this.spinnerFrames[this.spinnerIndex];\n const clock = colorize(frame, ANSIColors.BLUE);\n const lines: string[] = [];\n\n const isLocalDone = localDone === localTotal;\n const isRemoteDone = remoteDone === remoteTotal;\n const isPluginDone = pluginDone === pluginTotal;\n\n const suppressLocalWhileCheckingRemote =\n this.expectRemote && this.remoteCheckInProgress && remoteTotal === 0;\n\n if (!suppressLocalWhileCheckingRemote) {\n if (isLocalDone) {\n lines.push(\n `${this.prefix} ${v} Local content: ${colorize(`${localDone}`, ANSIColors.GREEN)}${colorize(`/${localTotal}`, ANSIColors.GREY)}`\n );\n } else {\n lines.push(\n `${this.prefix} ${clock} Local content: ${colorize(`${localDone}`, ANSIColors.BLUE)}${colorize(`/${localTotal}`, ANSIColors.GREY)}`\n );\n }\n }\n\n // Single remote line: show error, check, or progress counts\n if (remoteTotal > 0 || this.remoteCheckInProgress || this.remoteError) {\n if (this.remoteError) {\n lines.push(\n `${this.prefix} ${x} Remote content: ${colorize(\n this.remoteError,\n ANSIColors.RED\n )}`\n );\n } else if (remoteTotal === 0) {\n lines.push(\n `${this.prefix} ${clock} Remote content: ${colorize('Check server', ANSIColors.BLUE)}`\n );\n } else if (isRemoteDone) {\n lines.push(\n `${this.prefix} ${v} Remote content: ${colorize(`${remoteDone}`, ANSIColors.GREEN)}${colorize(`/${remoteTotal}`, ANSIColors.GREY)}`\n );\n } else {\n lines.push(\n `${this.prefix} ${clock} Remote content: ${colorize(`${remoteDone}`, ANSIColors.BLUE)}${colorize(`/${remoteTotal}`, ANSIColors.GREY)}`\n );\n }\n }\n\n // Plugin line: show error or progress counts\n if (pluginTotal > 0 || this.pluginError) {\n if (this.pluginError) {\n lines.push(\n `${this.prefix} ${x} Plugin content: ${colorize(\n this.pluginError,\n ANSIColors.RED\n )}`\n );\n } else if (isPluginDone) {\n lines.push(\n `${this.prefix} ${v} Plugin content: ${colorize(`${pluginDone}`, ANSIColors.GREEN)}${colorize(`/${pluginTotal}`, ANSIColors.GREY)}`\n );\n } else {\n lines.push(\n `${this.prefix} ${clock} Plugin content: ${colorize(`${pluginDone}`, ANSIColors.BLUE)}${colorize(`/${pluginTotal}`, ANSIColors.GREY)}`\n );\n }\n }\n\n // Check if the state has changed to avoid duplicate rendering\n const currentState = lines.join('\\n');\n if (currentState === this.lastRenderedState) {\n return;\n }\n this.lastRenderedState = currentState;\n\n if (this.renderedLines > 0) {\n process.stdout.write(`\\x1b[${this.renderedLines}F`);\n }\n\n const totalLinesToClear = Math.max(this.renderedLines, lines.length);\n for (let i = 0; i < totalLinesToClear; i++) {\n process.stdout.write('\\x1b[2K');\n const line = lines[i];\n if (line !== undefined) {\n process.stdout.write(line);\n }\n process.stdout.write('\\n');\n }\n\n this.renderedLines = lines.length;\n }\n\n private computeProgress() {\n const localKeys = new Set(\n this.statuses\n .filter((s) => s.type === 'local')\n .map((s) => s.dictionaryKey)\n );\n\n const localDoneKeys = new Set(\n this.statuses\n .filter(\n (s) =>\n s.type === 'local' && (s.status === 'built' || s.status === 'error')\n )\n .map((s) => s.dictionaryKey)\n );\n\n const remoteKeys = new Set(\n this.statuses\n .filter((s) => s.type === 'remote')\n .map((s) => s.dictionaryKey)\n );\n\n const remoteDoneKeys = new Set(\n this.statuses\n .filter(\n (s) =>\n s.type === 'remote' &&\n (s.status === 'fetched' ||\n s.status === 'imported' ||\n s.status === 'error')\n )\n .map((s) => s.dictionaryKey)\n );\n\n return {\n localTotal: localKeys.size,\n localDone: localDoneKeys.size,\n remoteTotal: remoteKeys.size,\n remoteDone: remoteDoneKeys.size,\n pluginTotal: this.pluginTotal,\n pluginDone: this.pluginDone,\n } as const;\n }\n}\n"],"mappings":";;;;AAWA,IAAa,qBAAb,MAAgC;CAC9B,AAAQ,WAAiC,EAAE;CAC3C,AAAQ,eAAsC;CAC9C,AAAQ,eAAe;CACvB,AAAQ,gBAAgB;CACxB,AAAiB,gBAAgB;CACjC,AAAQ,aAAa;CACrB,AAAiB;CACjB,AAAQ,oBAA4B;CACpC,AAAQ,wBAAwB;CAChC,AAAQ,eAAe;CACvB,AAAQ;CACR,AAAQ,cAAc;CACtB,AAAQ,aAAa;CACrB,AAAQ;CAER,cAAc;AACZ,OAAK,SAAS,eAAe,KAAK,UAAU;;CAG9C,gBAAgB,QAAiB;AAC/B,OAAK,eAAe;;CAGtB,mBAAmB;AACjB,MAAI,KAAK,WAAY;AACrB,OAAK,wBAAwB;AAC7B,OAAK,cAAc;AACnB,OAAK,QAAQ;;CAGf,kBAAkB;AAChB,OAAK,wBAAwB;;CAG/B,OAAO,aAAmC;AACxC,MAAI,KAAK,WAAY;AACrB,OAAK,MAAM,UAAU,aAAa;GAChC,MAAM,QAAQ,KAAK,SAAS,WACzB,MACC,EAAE,kBAAkB,OAAO,iBAAiB,EAAE,SAAS,OAAO,KACjE;AACD,OAAI,SAAS,EACX,MAAK,SAAS,SAAS;OAEvB,MAAK,SAAS,KAAK,OAAO;;EAK9B,MAAM,EAAE,gBAAgB,KAAK,iBAAiB;AAC9C,MAAI,KAAK,gBAAgB,CAAC,KAAK,yBAAyB,gBAAgB,EAEtE;AAGF,OAAK,cAAc;AACnB,OAAK,QAAQ;;CAGf,SAAS;AACP,OAAK,aAAa;AAClB,OAAK,aAAa;AAElB,OAAK,QAAQ;;CAGf,AAAQ,eAAe;AACrB,MAAI,KAAK,gBAAgB,KAAK,WAAY;AAC1C,OAAK,eAAe,kBAAkB;AACpC,QAAK,gBAAgB,KAAK,eAAe,KAAK,KAAK,cAAc;AACjE,QAAK,QAAQ;KACZ,IAAI;;CAGT,AAAQ,cAAc;AACpB,MAAI,CAAC,KAAK,aAAc;AACxB,gBAAc,KAAK,aAAa;AAChC,OAAK,eAAe;;CAGtB,AAAO,kBAAkB,UAAkB;AACzC,OAAK,cAAc,oBAAoB,MAAM;AAG7C,OAAK,iBAAiB;AACtB,OAAK,QAAQ;;CAGf,eAAe,OAAe;AAC5B,MAAI,KAAK,WAAY;AACrB,OAAK,cAAc;AACnB,MAAI,QAAQ,EACV,MAAK,cAAc;AAErB,OAAK,QAAQ;;CAGf,cAAc,MAAc;AAC1B,MAAI,KAAK,WAAY;AACrB,OAAK,aAAa;AAClB,OAAK,QAAQ;;CAGf,eAAe,OAAe;AAC5B,MAAI,KAAK,WAAY;AACrB,OAAK,cAAc,oBAAoB,MAAM;AAC7C,OAAK,QAAQ;;CAGf,AAAQ,SAAS;EACf,MAAM,EACJ,YACA,WACA,aACA,YACA,aACA,eACE,KAAK,iBAAiB;EAE1B,MAAM,QAAQ,KAAK,cAAc,KAAK;EACtC,MAAM,QAAQ,SAAS,OAAO,WAAW,KAAK;EAC9C,MAAMA,QAAkB,EAAE;EAE1B,MAAM,cAAc,cAAc;EAClC,MAAM,eAAe,eAAe;EACpC,MAAM,eAAe,eAAe;AAKpC,MAAI,EAFF,KAAK,gBAAgB,KAAK,yBAAyB,gBAAgB,GAGnE,KAAI,YACF,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,kBAAkB,SAAS,GAAG,aAAa,WAAW,MAAM,GAAG,SAAS,IAAI,cAAc,WAAW,KAAK,GAC/H;MAED,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,MAAM,kBAAkB,SAAS,GAAG,aAAa,WAAW,KAAK,GAAG,SAAS,IAAI,cAAc,WAAW,KAAK,GAClI;AAKL,MAAI,cAAc,KAAK,KAAK,yBAAyB,KAAK,YACxD,KAAI,KAAK,YACP,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,mBAAmB,SACrC,KAAK,aACL,WAAW,IACZ,GACF;WACQ,gBAAgB,EACzB,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,MAAM,mBAAmB,SAAS,gBAAgB,WAAW,KAAK,GACrF;WACQ,aACT,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,mBAAmB,SAAS,GAAG,cAAc,WAAW,MAAM,GAAG,SAAS,IAAI,eAAe,WAAW,KAAK,GAClI;MAED,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,MAAM,mBAAmB,SAAS,GAAG,cAAc,WAAW,KAAK,GAAG,SAAS,IAAI,eAAe,WAAW,KAAK,GACrI;AAKL,MAAI,cAAc,KAAK,KAAK,YAC1B,KAAI,KAAK,YACP,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,mBAAmB,SACrC,KAAK,aACL,WAAW,IACZ,GACF;WACQ,aACT,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,mBAAmB,SAAS,GAAG,cAAc,WAAW,MAAM,GAAG,SAAS,IAAI,eAAe,WAAW,KAAK,GAClI;MAED,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,MAAM,mBAAmB,SAAS,GAAG,cAAc,WAAW,KAAK,GAAG,SAAS,IAAI,eAAe,WAAW,KAAK,GACrI;EAKL,MAAM,eAAe,MAAM,KAAK,KAAK;AACrC,MAAI,iBAAiB,KAAK,kBACxB;AAEF,OAAK,oBAAoB;AAEzB,MAAI,KAAK,gBAAgB,EACvB,SAAQ,OAAO,MAAM,QAAQ,KAAK,cAAc,GAAG;EAGrD,MAAM,oBAAoB,KAAK,IAAI,KAAK,eAAe,MAAM,OAAO;AACpE,OAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,KAAK;AAC1C,WAAQ,OAAO,MAAM,UAAU;GAC/B,MAAM,OAAO,MAAM;AACnB,OAAI,SAAS,OACX,SAAQ,OAAO,MAAM,KAAK;AAE5B,WAAQ,OAAO,MAAM,KAAK;;AAG5B,OAAK,gBAAgB,MAAM;;CAG7B,AAAQ,kBAAkB;EACxB,MAAM,YAAY,IAAI,IACpB,KAAK,SACF,QAAQ,MAAM,EAAE,SAAS,QAAQ,CACjC,KAAK,MAAM,EAAE,cAAc,CAC/B;EAED,MAAM,gBAAgB,IAAI,IACxB,KAAK,SACF,QACE,MACC,EAAE,SAAS,YAAY,EAAE,WAAW,WAAW,EAAE,WAAW,SAC/D,CACA,KAAK,MAAM,EAAE,cAAc,CAC/B;EAED,MAAM,aAAa,IAAI,IACrB,KAAK,SACF,QAAQ,MAAM,EAAE,SAAS,SAAS,CAClC,KAAK,MAAM,EAAE,cAAc,CAC/B;EAED,MAAM,iBAAiB,IAAI,IACzB,KAAK,SACF,QACE,MACC,EAAE,SAAS,aACV,EAAE,WAAW,aACZ,EAAE,WAAW,cACb,EAAE,WAAW,SAClB,CACA,KAAK,MAAM,EAAE,cAAc,CAC/B;AAED,SAAO;GACL,YAAY,UAAU;GACtB,WAAW,cAAc;GACzB,aAAa,WAAW;GACxB,YAAY,eAAe;GAC3B,aAAa,KAAK;GAClB,YAAY,KAAK;GAClB"}
1
+ {"version":3,"file":"log.mjs","names":[],"sources":["../../../src/loadDictionaries/log.ts"],"sourcesContent":["import configuration from '@intlayer/config/built';\nimport {\n ANSIColors,\n colorize,\n extractErrorMessage,\n spinnerFrames,\n v,\n x,\n} from '@intlayer/config/client';\nimport type { DictionariesStatus } from './loadDictionaries';\n\nexport class DictionariesLogger {\n private statuses: DictionariesStatus[] = [];\n private spinnerTimer: NodeJS.Timeout | null = null;\n private spinnerIndex = 0;\n private renderedLines = 0;\n private readonly spinnerFrames = spinnerFrames;\n private isFinished = false;\n private readonly prefix: string;\n private lastRenderedState: string = '';\n private remoteCheckInProgress = false;\n private expectRemote = false;\n private remoteError: string | undefined;\n private pluginTotal = 0;\n private pluginDone = 0;\n private pluginError: string | undefined;\n\n constructor() {\n this.prefix = configuration?.log?.prefix ?? '';\n }\n\n setExpectRemote(expect: boolean) {\n this.expectRemote = expect;\n }\n\n startRemoteCheck() {\n if (this.isFinished) return;\n this.remoteCheckInProgress = true;\n this.startSpinner();\n this.render();\n }\n\n stopRemoteCheck() {\n this.remoteCheckInProgress = false;\n }\n\n update(newStatuses: DictionariesStatus[]) {\n if (this.isFinished) return;\n for (const status of newStatuses) {\n const index = this.statuses.findIndex(\n (s) =>\n s.dictionaryKey === status.dictionaryKey && s.type === status.type\n );\n if (index >= 0) {\n this.statuses[index] = status;\n } else {\n this.statuses.push(status);\n }\n }\n\n // If we expect remote fetch later, avoid rendering a local-only line first\n const { remoteTotal } = this.computeProgress();\n if (this.expectRemote && !this.remoteCheckInProgress && remoteTotal === 0) {\n // Do not start spinner or render yet; wait until remote check starts\n return;\n }\n\n this.startSpinner();\n this.render();\n }\n\n finish() {\n this.isFinished = true;\n this.stopSpinner();\n // Render final state and keep it visible\n this.render();\n }\n\n private startSpinner() {\n if (this.spinnerTimer || this.isFinished) return;\n this.spinnerTimer = setInterval(() => {\n this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerFrames.length;\n this.render();\n }, 100);\n }\n\n private stopSpinner() {\n if (!this.spinnerTimer) return;\n clearInterval(this.spinnerTimer);\n this.spinnerTimer = null;\n }\n\n public setRemoteError = (error?: Error) => {\n this.remoteError = extractErrorMessage(error);\n // Avoid rendering a transient remote-only line while the remote check flag is still true\n // Ensure local + remote are rendered together after a failure\n this.stopRemoteCheck();\n this.render();\n };\n\n setPluginTotal(total: number) {\n if (this.isFinished) return;\n this.pluginTotal = total;\n if (total > 0) {\n this.startSpinner();\n }\n this.render();\n }\n\n setPluginDone(done: number) {\n if (this.isFinished) return;\n this.pluginDone = done;\n this.render();\n }\n\n setPluginError(error?: Error) {\n if (this.isFinished) return;\n this.pluginError = extractErrorMessage(error);\n this.render();\n }\n\n private render() {\n const {\n localTotal,\n localDone,\n remoteTotal,\n remoteDone,\n pluginTotal,\n pluginDone,\n } = this.computeProgress();\n\n const frame = this.spinnerFrames[this.spinnerIndex];\n const clock = colorize(frame, ANSIColors.BLUE);\n const lines: string[] = [];\n\n const isLocalDone = localDone === localTotal;\n const isRemoteDone = remoteDone === remoteTotal;\n const isPluginDone = pluginDone === pluginTotal;\n\n const suppressLocalWhileCheckingRemote =\n this.expectRemote && this.remoteCheckInProgress && remoteTotal === 0;\n\n if (!suppressLocalWhileCheckingRemote) {\n if (isLocalDone) {\n lines.push(\n `${this.prefix} ${v} Local content: ${colorize(`${localDone}`, ANSIColors.GREEN)}${colorize(`/${localTotal}`, ANSIColors.GREY)}`\n );\n } else {\n lines.push(\n `${this.prefix} ${clock} Local content: ${colorize(`${localDone}`, ANSIColors.BLUE)}${colorize(`/${localTotal}`, ANSIColors.GREY)}`\n );\n }\n }\n\n // Single remote line: show error, check, or progress counts\n if (remoteTotal > 0 || this.remoteCheckInProgress || this.remoteError) {\n if (this.remoteError) {\n lines.push(\n `${this.prefix} ${x} Remote content: ${colorize(\n this.remoteError,\n ANSIColors.RED\n )}`\n );\n } else if (remoteTotal === 0) {\n lines.push(\n `${this.prefix} ${clock} Remote content: ${colorize('Check server', ANSIColors.BLUE)}`\n );\n } else if (isRemoteDone) {\n lines.push(\n `${this.prefix} ${v} Remote content: ${colorize(`${remoteDone}`, ANSIColors.GREEN)}${colorize(`/${remoteTotal}`, ANSIColors.GREY)}`\n );\n } else {\n lines.push(\n `${this.prefix} ${clock} Remote content: ${colorize(`${remoteDone}`, ANSIColors.BLUE)}${colorize(`/${remoteTotal}`, ANSIColors.GREY)}`\n );\n }\n }\n\n // Plugin line: show error or progress counts\n if (pluginTotal > 0 || this.pluginError) {\n if (this.pluginError) {\n lines.push(\n `${this.prefix} ${x} Plugin content: ${colorize(\n this.pluginError,\n ANSIColors.RED\n )}`\n );\n } else if (isPluginDone) {\n lines.push(\n `${this.prefix} ${v} Plugin content: ${colorize(`${pluginDone}`, ANSIColors.GREEN)}${colorize(`/${pluginTotal}`, ANSIColors.GREY)}`\n );\n } else {\n lines.push(\n `${this.prefix} ${clock} Plugin content: ${colorize(`${pluginDone}`, ANSIColors.BLUE)}${colorize(`/${pluginTotal}`, ANSIColors.GREY)}`\n );\n }\n }\n\n // Check if the state has changed to avoid duplicate rendering\n const currentState = lines.join('\\n');\n if (currentState === this.lastRenderedState) {\n return;\n }\n this.lastRenderedState = currentState;\n\n if (this.renderedLines > 0) {\n process.stdout.write(`\\x1b[${this.renderedLines}F`);\n }\n\n const totalLinesToClear = Math.max(this.renderedLines, lines.length);\n for (let i = 0; i < totalLinesToClear; i++) {\n process.stdout.write('\\x1b[2K');\n const line = lines[i];\n if (line !== undefined) {\n process.stdout.write(line);\n }\n process.stdout.write('\\n');\n }\n\n this.renderedLines = lines.length;\n }\n\n private computeProgress() {\n const localKeys = new Set(\n this.statuses\n .filter((s) => s.type === 'local')\n .map((s) => s.dictionaryKey)\n );\n\n const localDoneKeys = new Set(\n this.statuses\n .filter(\n (s) =>\n s.type === 'local' && (s.status === 'built' || s.status === 'error')\n )\n .map((s) => s.dictionaryKey)\n );\n\n const remoteKeys = new Set(\n this.statuses\n .filter((s) => s.type === 'remote')\n .map((s) => s.dictionaryKey)\n );\n\n const remoteDoneKeys = new Set(\n this.statuses\n .filter(\n (s) =>\n s.type === 'remote' &&\n (s.status === 'fetched' ||\n s.status === 'imported' ||\n s.status === 'error')\n )\n .map((s) => s.dictionaryKey)\n );\n\n return {\n localTotal: localKeys.size,\n localDone: localDoneKeys.size,\n remoteTotal: remoteKeys.size,\n remoteDone: remoteDoneKeys.size,\n pluginTotal: this.pluginTotal,\n pluginDone: this.pluginDone,\n } as const;\n }\n}\n"],"mappings":";;;;AAWA,IAAa,qBAAb,MAAgC;CAC9B,AAAQ,WAAiC,EAAE;CAC3C,AAAQ,eAAsC;CAC9C,AAAQ,eAAe;CACvB,AAAQ,gBAAgB;CACxB,AAAiB,gBAAgB;CACjC,AAAQ,aAAa;CACrB,AAAiB;CACjB,AAAQ,oBAA4B;CACpC,AAAQ,wBAAwB;CAChC,AAAQ,eAAe;CACvB,AAAQ;CACR,AAAQ,cAAc;CACtB,AAAQ,aAAa;CACrB,AAAQ;CAER,cAAc;AACZ,OAAK,SAAS,eAAe,KAAK,UAAU;;CAG9C,gBAAgB,QAAiB;AAC/B,OAAK,eAAe;;CAGtB,mBAAmB;AACjB,MAAI,KAAK,WAAY;AACrB,OAAK,wBAAwB;AAC7B,OAAK,cAAc;AACnB,OAAK,QAAQ;;CAGf,kBAAkB;AAChB,OAAK,wBAAwB;;CAG/B,OAAO,aAAmC;AACxC,MAAI,KAAK,WAAY;AACrB,OAAK,MAAM,UAAU,aAAa;GAChC,MAAM,QAAQ,KAAK,SAAS,WACzB,MACC,EAAE,kBAAkB,OAAO,iBAAiB,EAAE,SAAS,OAAO,KACjE;AACD,OAAI,SAAS,EACX,MAAK,SAAS,SAAS;OAEvB,MAAK,SAAS,KAAK,OAAO;;EAK9B,MAAM,EAAE,gBAAgB,KAAK,iBAAiB;AAC9C,MAAI,KAAK,gBAAgB,CAAC,KAAK,yBAAyB,gBAAgB,EAEtE;AAGF,OAAK,cAAc;AACnB,OAAK,QAAQ;;CAGf,SAAS;AACP,OAAK,aAAa;AAClB,OAAK,aAAa;AAElB,OAAK,QAAQ;;CAGf,AAAQ,eAAe;AACrB,MAAI,KAAK,gBAAgB,KAAK,WAAY;AAC1C,OAAK,eAAe,kBAAkB;AACpC,QAAK,gBAAgB,KAAK,eAAe,KAAK,KAAK,cAAc;AACjE,QAAK,QAAQ;KACZ,IAAI;;CAGT,AAAQ,cAAc;AACpB,MAAI,CAAC,KAAK,aAAc;AACxB,gBAAc,KAAK,aAAa;AAChC,OAAK,eAAe;;CAGtB,AAAO,kBAAkB,UAAkB;AACzC,OAAK,cAAc,oBAAoB,MAAM;AAG7C,OAAK,iBAAiB;AACtB,OAAK,QAAQ;;CAGf,eAAe,OAAe;AAC5B,MAAI,KAAK,WAAY;AACrB,OAAK,cAAc;AACnB,MAAI,QAAQ,EACV,MAAK,cAAc;AAErB,OAAK,QAAQ;;CAGf,cAAc,MAAc;AAC1B,MAAI,KAAK,WAAY;AACrB,OAAK,aAAa;AAClB,OAAK,QAAQ;;CAGf,eAAe,OAAe;AAC5B,MAAI,KAAK,WAAY;AACrB,OAAK,cAAc,oBAAoB,MAAM;AAC7C,OAAK,QAAQ;;CAGf,AAAQ,SAAS;EACf,MAAM,EACJ,YACA,WACA,aACA,YACA,aACA,eACE,KAAK,iBAAiB;EAE1B,MAAM,QAAQ,KAAK,cAAc,KAAK;EACtC,MAAM,QAAQ,SAAS,OAAO,WAAW,KAAK;EAC9C,MAAM,QAAkB,EAAE;EAE1B,MAAM,cAAc,cAAc;EAClC,MAAM,eAAe,eAAe;EACpC,MAAM,eAAe,eAAe;AAKpC,MAAI,EAFF,KAAK,gBAAgB,KAAK,yBAAyB,gBAAgB,GAGnE,KAAI,YACF,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,kBAAkB,SAAS,GAAG,aAAa,WAAW,MAAM,GAAG,SAAS,IAAI,cAAc,WAAW,KAAK,GAC/H;MAED,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,MAAM,kBAAkB,SAAS,GAAG,aAAa,WAAW,KAAK,GAAG,SAAS,IAAI,cAAc,WAAW,KAAK,GAClI;AAKL,MAAI,cAAc,KAAK,KAAK,yBAAyB,KAAK,YACxD,KAAI,KAAK,YACP,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,mBAAmB,SACrC,KAAK,aACL,WAAW,IACZ,GACF;WACQ,gBAAgB,EACzB,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,MAAM,mBAAmB,SAAS,gBAAgB,WAAW,KAAK,GACrF;WACQ,aACT,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,mBAAmB,SAAS,GAAG,cAAc,WAAW,MAAM,GAAG,SAAS,IAAI,eAAe,WAAW,KAAK,GAClI;MAED,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,MAAM,mBAAmB,SAAS,GAAG,cAAc,WAAW,KAAK,GAAG,SAAS,IAAI,eAAe,WAAW,KAAK,GACrI;AAKL,MAAI,cAAc,KAAK,KAAK,YAC1B,KAAI,KAAK,YACP,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,mBAAmB,SACrC,KAAK,aACL,WAAW,IACZ,GACF;WACQ,aACT,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,EAAE,mBAAmB,SAAS,GAAG,cAAc,WAAW,MAAM,GAAG,SAAS,IAAI,eAAe,WAAW,KAAK,GAClI;MAED,OAAM,KACJ,GAAG,KAAK,OAAO,GAAG,MAAM,mBAAmB,SAAS,GAAG,cAAc,WAAW,KAAK,GAAG,SAAS,IAAI,eAAe,WAAW,KAAK,GACrI;EAKL,MAAM,eAAe,MAAM,KAAK,KAAK;AACrC,MAAI,iBAAiB,KAAK,kBACxB;AAEF,OAAK,oBAAoB;AAEzB,MAAI,KAAK,gBAAgB,EACvB,SAAQ,OAAO,MAAM,QAAQ,KAAK,cAAc,GAAG;EAGrD,MAAM,oBAAoB,KAAK,IAAI,KAAK,eAAe,MAAM,OAAO;AACpE,OAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,KAAK;AAC1C,WAAQ,OAAO,MAAM,UAAU;GAC/B,MAAM,OAAO,MAAM;AACnB,OAAI,SAAS,OACX,SAAQ,OAAO,MAAM,KAAK;AAE5B,WAAQ,OAAO,MAAM,KAAK;;AAG5B,OAAK,gBAAgB,MAAM;;CAG7B,AAAQ,kBAAkB;EACxB,MAAM,YAAY,IAAI,IACpB,KAAK,SACF,QAAQ,MAAM,EAAE,SAAS,QAAQ,CACjC,KAAK,MAAM,EAAE,cAAc,CAC/B;EAED,MAAM,gBAAgB,IAAI,IACxB,KAAK,SACF,QACE,MACC,EAAE,SAAS,YAAY,EAAE,WAAW,WAAW,EAAE,WAAW,SAC/D,CACA,KAAK,MAAM,EAAE,cAAc,CAC/B;EAED,MAAM,aAAa,IAAI,IACrB,KAAK,SACF,QAAQ,MAAM,EAAE,SAAS,SAAS,CAClC,KAAK,MAAM,EAAE,cAAc,CAC/B;EAED,MAAM,iBAAiB,IAAI,IACzB,KAAK,SACF,QACE,MACC,EAAE,SAAS,aACV,EAAE,WAAW,aACZ,EAAE,WAAW,cACb,EAAE,WAAW,SAClB,CACA,KAAK,MAAM,EAAE,cAAc,CAC/B;AAED,SAAO;GACL,YAAY,UAAU;GACtB,WAAW,cAAc;GACzB,aAAa,WAAW;GACxB,YAAY,eAAe;GAC3B,aAAa,KAAK;GAClB,YAAY,KAAK;GAClB"}
@@ -1 +1 @@
1
- {"version":3,"file":"applyMask.mjs","names":["out: any"],"sources":["../../../src/reduceDictionaryContent/applyMask.ts"],"sourcesContent":["import type { Dictionary } from '@intlayer/types';\n\nexport const applyMask = (full: Dictionary, mask: any): Dictionary => {\n // the mask \"true\" → we don't filter\n if (mask === true) {\n return full;\n }\n\n // arrays\n if (Array.isArray(mask) && Array.isArray(full)) {\n return mask.map((m, i) => applyMask(full[i], m)) as any;\n }\n\n // handle node with nodeType property\n if (full && typeof full === 'object' && 'nodeType' in full) {\n if (mask && typeof mask === 'object') {\n return full; // Keep the full object with nodeType intact\n }\n return full;\n }\n\n // generic object\n if (mask && typeof mask === 'object' && full && typeof full === 'object') {\n const out: any = {};\n const maskEntries = Object.entries(mask);\n const allChildrenAreArrays = maskEntries.every(([, value]) =>\n Array.isArray(value)\n );\n\n for (const [k, m] of maskEntries) {\n const fullValue = (full as any)[k];\n\n // If this child is an array, decide preservation rules.\n // - Preserve when all children at this level are arrays in the mask\n // - Also preserve when the array is an array of translation nodes\n if (Array.isArray(m) && Array.isArray(fullValue)) {\n const isTranslationNode = (val: unknown): boolean =>\n !!val && typeof val === 'object' && 'nodeType' in (val as any);\n const isArrayOfTranslationNodes = fullValue.every((item: any) =>\n isTranslationNode(item)\n );\n\n if (!allChildrenAreArrays && !isArrayOfTranslationNodes) {\n continue; // skip incidental arrays when mixed with non-arrays\n }\n }\n\n out[k] = applyMask(fullValue, m);\n }\n return out;\n }\n\n // unexpected case: we return the original value\n return full;\n};\n"],"mappings":";AAEA,MAAa,aAAa,MAAkB,SAA0B;AAEpE,KAAI,SAAS,KACX,QAAO;AAIT,KAAI,MAAM,QAAQ,KAAK,IAAI,MAAM,QAAQ,KAAK,CAC5C,QAAO,KAAK,KAAK,GAAG,MAAM,UAAU,KAAK,IAAI,EAAE,CAAC;AAIlD,KAAI,QAAQ,OAAO,SAAS,YAAY,cAAc,MAAM;AAC1D,MAAI,QAAQ,OAAO,SAAS,SAC1B,QAAO;AAET,SAAO;;AAIT,KAAI,QAAQ,OAAO,SAAS,YAAY,QAAQ,OAAO,SAAS,UAAU;EACxE,MAAMA,MAAW,EAAE;EACnB,MAAM,cAAc,OAAO,QAAQ,KAAK;EACxC,MAAM,uBAAuB,YAAY,OAAO,GAAG,WACjD,MAAM,QAAQ,MAAM,CACrB;AAED,OAAK,MAAM,CAAC,GAAG,MAAM,aAAa;GAChC,MAAM,YAAa,KAAa;AAKhC,OAAI,MAAM,QAAQ,EAAE,IAAI,MAAM,QAAQ,UAAU,EAAE;IAChD,MAAM,qBAAqB,QACzB,CAAC,CAAC,OAAO,OAAO,QAAQ,YAAY,cAAe;IACrD,MAAM,4BAA4B,UAAU,OAAO,SACjD,kBAAkB,KAAK,CACxB;AAED,QAAI,CAAC,wBAAwB,CAAC,0BAC5B;;AAIJ,OAAI,KAAK,UAAU,WAAW,EAAE;;AAElC,SAAO;;AAIT,QAAO"}
1
+ {"version":3,"file":"applyMask.mjs","names":[],"sources":["../../../src/reduceDictionaryContent/applyMask.ts"],"sourcesContent":["import type { Dictionary } from '@intlayer/types';\n\nexport const applyMask = (full: Dictionary, mask: any): Dictionary => {\n // the mask \"true\" → we don't filter\n if (mask === true) {\n return full;\n }\n\n // arrays\n if (Array.isArray(mask) && Array.isArray(full)) {\n return mask.map((m, i) => applyMask(full[i], m)) as any;\n }\n\n // handle node with nodeType property\n if (full && typeof full === 'object' && 'nodeType' in full) {\n if (mask && typeof mask === 'object') {\n return full; // Keep the full object with nodeType intact\n }\n return full;\n }\n\n // generic object\n if (mask && typeof mask === 'object' && full && typeof full === 'object') {\n const out: any = {};\n const maskEntries = Object.entries(mask);\n const allChildrenAreArrays = maskEntries.every(([, value]) =>\n Array.isArray(value)\n );\n\n for (const [k, m] of maskEntries) {\n const fullValue = (full as any)[k];\n\n // If this child is an array, decide preservation rules.\n // - Preserve when all children at this level are arrays in the mask\n // - Also preserve when the array is an array of translation nodes\n if (Array.isArray(m) && Array.isArray(fullValue)) {\n const isTranslationNode = (val: unknown): boolean =>\n !!val && typeof val === 'object' && 'nodeType' in (val as any);\n const isArrayOfTranslationNodes = fullValue.every((item: any) =>\n isTranslationNode(item)\n );\n\n if (!allChildrenAreArrays && !isArrayOfTranslationNodes) {\n continue; // skip incidental arrays when mixed with non-arrays\n }\n }\n\n out[k] = applyMask(fullValue, m);\n }\n return out;\n }\n\n // unexpected case: we return the original value\n return full;\n};\n"],"mappings":";AAEA,MAAa,aAAa,MAAkB,SAA0B;AAEpE,KAAI,SAAS,KACX,QAAO;AAIT,KAAI,MAAM,QAAQ,KAAK,IAAI,MAAM,QAAQ,KAAK,CAC5C,QAAO,KAAK,KAAK,GAAG,MAAM,UAAU,KAAK,IAAI,EAAE,CAAC;AAIlD,KAAI,QAAQ,OAAO,SAAS,YAAY,cAAc,MAAM;AAC1D,MAAI,QAAQ,OAAO,SAAS,SAC1B,QAAO;AAET,SAAO;;AAIT,KAAI,QAAQ,OAAO,SAAS,YAAY,QAAQ,OAAO,SAAS,UAAU;EACxE,MAAM,MAAW,EAAE;EACnB,MAAM,cAAc,OAAO,QAAQ,KAAK;EACxC,MAAM,uBAAuB,YAAY,OAAO,GAAG,WACjD,MAAM,QAAQ,MAAM,CACrB;AAED,OAAK,MAAM,CAAC,GAAG,MAAM,aAAa;GAChC,MAAM,YAAa,KAAa;AAKhC,OAAI,MAAM,QAAQ,EAAE,IAAI,MAAM,QAAQ,UAAU,EAAE;IAChD,MAAM,qBAAqB,QACzB,CAAC,CAAC,OAAO,OAAO,QAAQ,YAAY,cAAe;IACrD,MAAM,4BAA4B,UAAU,OAAO,SACjD,kBAAkB,KAAK,CACxB;AAED,QAAI,CAAC,wBAAwB,CAAC,0BAC5B;;AAIJ,OAAI,KAAK,UAAU,WAAW,EAAE;;AAElC,SAAO;;AAIT,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"transformFiles.mjs","names":["dictionary: Dictionary","multilingualContent: Record<string, TranslationNode>","extractedContent: Record<string, string>","replacements: TsReplacement[]","sourceFile: SourceFile","v","extractedContent: Record<string, string> | null","error: any"],"sources":["../../../src/transformFiles/transformFiles.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport fs from 'node:fs/promises';\nimport { basename, dirname, extname, join, relative, resolve } from 'node:path';\nimport {\n ANSIColors,\n camelCaseToKebabCase,\n colorizePath,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { Node, Project, type SourceFile, SyntaxKind } from 'ts-morph';\nimport { writeContentDeclaration } from '../writeContentDeclaration';\nimport { detectFormatCommand } from '../writeContentDeclaration/detectFormatCommand';\nimport { extractDictionaryKey } from './extractDictionaryKey';\n\n// ==========================================\n// 1. Shared Utilities (exported for reuse in babel plugin)\n// ==========================================\n\n/**\n * Attributes that should be extracted for localization\n */\nexport const ATTRIBUTES_TO_EXTRACT = [\n 'title',\n 'placeholder',\n 'alt',\n 'aria-label',\n 'label',\n];\n\n/**\n * Default function to determine if a string should be extracted for localization\n */\nexport const shouldExtract = (text: string): boolean => {\n const trimmed = text.trim();\n if (!trimmed) return false;\n if (!trimmed.includes(' ')) return false;\n // Starts with Capital letter\n if (!/^[A-Z]/.test(trimmed)) return false;\n // Filter out template logic identifiers (simple check)\n if (trimmed.startsWith('{') || trimmed.startsWith('v-')) return false;\n return true;\n};\n\n/**\n * Generate a unique key from text for use as a dictionary key\n */\nexport const generateKey = (\n text: string,\n existingKeys: Set<string>\n): string => {\n const maxWords = 5;\n let key = text\n .replace(/\\s+/g, ' ')\n .replace(/_+/g, ' ')\n .replace(/-+/g, ' ')\n .replace(/[^a-zA-Z0-9 ]/g, '')\n .trim()\n .split(' ')\n .filter(Boolean)\n .slice(0, maxWords)\n .map((word, index) =>\n index === 0\n ? word.toLowerCase()\n : word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()\n )\n .join('');\n\n if (!key) key = 'content';\n if (existingKeys.has(key)) {\n let i = 1;\n while (existingKeys.has(`${key}${i}`)) i++;\n key = `${key}${i}`;\n }\n return key;\n};\n\n/**\n * Translation node structure used in multilingual dictionaries\n */\ntype TranslationNode = {\n nodeType: 'translation';\n translation: Record<string, string>;\n};\n\nconst writeContentHelper = async (\n extractedContent: Record<string, string>,\n componentKey: string,\n filePath: string,\n configuration: IntlayerConfig,\n outputDir?: string\n) => {\n const { defaultLocale } = configuration.internationalization;\n const { baseDir, fileExtensions } = configuration.content;\n\n const isPerLocaleFile = configuration?.dictionary?.locale;\n\n const dirName = outputDir ? resolve(outputDir) : dirname(filePath);\n const ext = extname(filePath);\n const baseName = basename(filePath, ext);\n const contentBaseName = baseName.charAt(0).toLowerCase() + baseName.slice(1);\n\n const contentFilePath = join(\n dirName,\n `${contentBaseName}.${fileExtensions[0]}`\n );\n const relativeContentFilePath = relative(baseDir, contentFilePath);\n\n let dictionary: Dictionary;\n\n if (isPerLocaleFile) {\n // Per-locale format: simple string content with locale property\n dictionary = {\n key: componentKey,\n content: extractedContent,\n locale: defaultLocale,\n filePath: relativeContentFilePath,\n };\n } else {\n // Multilingual format: content wrapped in translation nodes, no locale property\n const multilingualContent: Record<string, TranslationNode> = {};\n for (const [key, value] of Object.entries(extractedContent)) {\n multilingualContent[key] = {\n nodeType: 'translation',\n translation: {\n [defaultLocale]: value,\n },\n };\n }\n\n dictionary = {\n key: componentKey,\n content: multilingualContent,\n filePath: relativeContentFilePath,\n };\n }\n\n const relativeDir = relative(baseDir, dirName);\n await writeContentDeclaration(dictionary, configuration, {\n newDictionariesPath: relativeDir,\n });\n\n return contentFilePath;\n};\n\ntype TsReplacement = {\n node: Node;\n key: string;\n type: 'jsx-text' | 'jsx-attribute' | 'string-literal';\n};\n\nconst extractTsContent = (\n sourceFile: SourceFile,\n existingKeys: Set<string>\n): {\n extractedContent: Record<string, string>;\n replacements: TsReplacement[];\n} => {\n const extractedContent: Record<string, string> = {};\n const replacements: TsReplacement[] = [];\n\n sourceFile.forEachDescendant((node) => {\n // 1. JSX Text\n if (Node.isJsxText(node)) {\n const text = node.getText();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.replace(/\\s+/g, ' ').trim();\n replacements.push({ node, key, type: 'jsx-text' });\n }\n }\n\n // 2. JSX Attributes\n else if (Node.isJsxAttribute(node)) {\n const name = node.getNameNode().getText();\n if (ATTRIBUTES_TO_EXTRACT.includes(name)) {\n const initializer = node.getInitializer();\n if (Node.isStringLiteral(initializer)) {\n const text = initializer.getLiteralValue();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'jsx-attribute' });\n }\n }\n }\n }\n\n // 3. String Literals (Variables, Arrays, etc.)\n else if (Node.isStringLiteral(node)) {\n const text = node.getLiteralValue();\n if (shouldExtract(text)) {\n const parent = node.getParent();\n\n // Skip if inside ImportDeclaration\n if (\n parent?.getKindName() === 'ImportDeclaration' ||\n parent?.getKindName() === 'ImportSpecifier' ||\n parent?.getKindName() === 'ModuleSpecifier'\n ) {\n return;\n }\n\n // Skip if it's a JSX Attribute value (handled above)\n if (Node.isJsxAttribute(parent)) return;\n\n // Skip console.log\n if (Node.isCallExpression(parent)) {\n const expression = parent.getExpression();\n if (expression.getText().includes('console.log')) return;\n }\n\n // Skip Object Keys: { key: \"value\" } -> \"key\" is PropertyAssignment name if not computed\n if (Node.isPropertyAssignment(parent)) {\n if (parent.getNameNode() === node) return; // It's the key\n }\n\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'string-literal' });\n }\n }\n });\n\n return { extractedContent, replacements };\n};\n\n// ==========================================\n// 2. React (TS-Morph) Strategy\n// ==========================================\n\nconst processReactFile = async (\n filePath: string,\n componentKey: string,\n packageName: string,\n project: Project,\n save: boolean = true\n) => {\n let sourceFile: SourceFile;\n try {\n sourceFile = project.addSourceFileAtPath(filePath);\n } catch {\n sourceFile = project.getSourceFileOrThrow(filePath);\n }\n\n const existingKeys = new Set<string>();\n const { extractedContent, replacements } = extractTsContent(\n sourceFile,\n existingKeys\n );\n\n if (Object.keys(extractedContent).length === 0) return null;\n\n for (const { node, key, type } of replacements) {\n if (type === 'jsx-text' && Node.isJsxText(node)) {\n node.replaceWithText(`{content.${key}}`);\n } else if (type === 'jsx-attribute' && Node.isJsxAttribute(node)) {\n node.setInitializer(`{content.${key}.value}`);\n } else if (type === 'string-literal' && Node.isStringLiteral(node)) {\n // For React/JS variables, we usually want the value\n node.replaceWithText(`content.${key}.value`);\n }\n }\n\n // Inject hook\n const importDecl = sourceFile.getImportDeclaration(\n (d) => d.getModuleSpecifierValue() === packageName\n );\n if (!importDecl) {\n sourceFile.addImportDeclaration({\n namedImports: ['useIntlayer'],\n moduleSpecifier: packageName,\n });\n } else if (\n !importDecl.getNamedImports().some((n) => n.getName() === 'useIntlayer')\n ) {\n importDecl.addNamedImport('useIntlayer');\n }\n\n // Insert hook at start of component\n sourceFile.getFunctions().forEach((f) => {\n f.getBody()\n ?.asKind(SyntaxKind.Block)\n ?.insertStatements(0, `const content = useIntlayer(\"${componentKey}\");`);\n });\n\n // Also handle const/arrow components\n sourceFile.getVariableDeclarations().forEach((v) => {\n const init = v.getInitializer();\n if (Node.isArrowFunction(init) || Node.isFunctionExpression(init)) {\n const body = init.getBody();\n if (Node.isBlock(body)) {\n // Heuristic: check if it returns JSX or uses hooks\n if (\n body.getText().includes('return') ||\n body.getText().includes('use')\n ) {\n body.insertStatements(\n 0,\n `const content = useIntlayer(\"${componentKey}\");`\n );\n }\n }\n }\n });\n\n if (save) {\n await sourceFile.save();\n }\n return extractedContent;\n};\n\n// ==========================================\n// 5. Main Dispatcher\n// ==========================================\n\nexport type PackageName =\n | 'next-intlayer'\n | 'react-intlayer'\n | 'vue-intlayer'\n | 'svelte-intlayer'\n | 'preact-intlayer'\n | 'solid-intlayer'\n | 'angular-intlayer'\n | 'express-intlayer';\n\nexport type ExtractIntlayerOptions = {\n configOptions?: GetConfigurationOptions;\n outputDir?: string;\n codeOnly?: boolean;\n declarationOnly?: boolean;\n};\n\nexport const extractIntlayer = async (\n filePath: string,\n packageName: PackageName,\n options?: ExtractIntlayerOptions,\n project?: Project\n) => {\n const saveComponent = !options?.declarationOnly;\n const writeContent = !options?.codeOnly;\n\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n const { baseDir } = configuration.content;\n\n // Setup Project for TS/React files if needed\n const _project =\n project || new Project({ skipAddingFilesFromTsConfig: true });\n\n const baseName = extractDictionaryKey(\n filePath,\n (await fs.readFile(filePath)).toString()\n );\n const componentKey = camelCaseToKebabCase(baseName);\n const ext = extname(filePath);\n\n let extractedContent: Record<string, string> | null = null;\n\n if (ext === '.vue') {\n try {\n const { processVueFile } = await import('@intlayer/vue-transformer');\n extractedContent = await processVueFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/vue-transformer', ANSIColors.YELLOW)} to process Vue files.`\n );\n }\n throw error;\n }\n } else if (ext === '.svelte') {\n try {\n const { processSvelteFile } = (await import(\n '@intlayer/svelte-transformer'\n )) as any;\n extractedContent = await processSvelteFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/svelte-transformer', ANSIColors.YELLOW)} to process Svelte files.`\n );\n }\n throw error;\n }\n } else if (['.tsx', '.jsx', '.ts', '.js'].includes(ext)) {\n extractedContent = await processReactFile(\n filePath,\n componentKey,\n packageName,\n _project,\n saveComponent\n );\n }\n\n if (!extractedContent) {\n appLogger(`No extractable text found in ${baseName}`);\n return;\n }\n\n // Shared Write Logic\n if (writeContent) {\n const contentFilePath = await writeContentHelper(\n extractedContent,\n componentKey,\n filePath,\n configuration,\n options?.outputDir\n );\n\n const relativeContentFilePath = relative(\n configuration.content.baseDir,\n contentFilePath\n );\n appLogger(`Created content file: ${colorizePath(relativeContentFilePath)}`);\n }\n\n // Optional: Format\n if (saveComponent) {\n try {\n const formatCommand = detectFormatCommand(configuration);\n if (formatCommand) {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'ignore', // Silent\n cwd: baseDir,\n });\n }\n } catch {\n // Ignore format errors\n }\n\n appLogger(\n `Updated component: ${colorizePath(relative(baseDir, filePath))}`\n );\n }\n};\n\nexport const transformFiles = async (\n filePaths: string[],\n packageName: PackageName,\n options?: ExtractIntlayerOptions\n) => {\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const project = new Project({\n skipAddingFilesFromTsConfig: true,\n });\n\n for (const filePath of filePaths) {\n try {\n await extractIntlayer(filePath, packageName, options, project);\n } catch (error) {\n appLogger(`Failed to transform ${filePath}: ${(error as Error).message}`);\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;AAwBA,MAAa,wBAAwB;CACnC;CACA;CACA;CACA;CACA;CACD;;;;AAKD,MAAa,iBAAiB,SAA0B;CACtD,MAAM,UAAU,KAAK,MAAM;AAC3B,KAAI,CAAC,QAAS,QAAO;AACrB,KAAI,CAAC,QAAQ,SAAS,IAAI,CAAE,QAAO;AAEnC,KAAI,CAAC,SAAS,KAAK,QAAQ,CAAE,QAAO;AAEpC,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,KAAK,CAAE,QAAO;AAChE,QAAO;;;;;AAMT,MAAa,eACX,MACA,iBACW;CAEX,IAAI,MAAM,KACP,QAAQ,QAAQ,IAAI,CACpB,QAAQ,OAAO,IAAI,CACnB,QAAQ,OAAO,IAAI,CACnB,QAAQ,kBAAkB,GAAG,CAC7B,MAAM,CACN,MAAM,IAAI,CACV,OAAO,QAAQ,CACf,MAAM,GATQ,EASI,CAClB,KAAK,MAAM,UACV,UAAU,IACN,KAAK,aAAa,GAClB,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa,CAC/D,CACA,KAAK,GAAG;AAEX,KAAI,CAAC,IAAK,OAAM;AAChB,KAAI,aAAa,IAAI,IAAI,EAAE;EACzB,IAAI,IAAI;AACR,SAAO,aAAa,IAAI,GAAG,MAAM,IAAI,CAAE;AACvC,QAAM,GAAG,MAAM;;AAEjB,QAAO;;AAWT,MAAM,qBAAqB,OACzB,kBACA,cACA,UACA,eACA,cACG;CACH,MAAM,EAAE,kBAAkB,cAAc;CACxC,MAAM,EAAE,SAAS,mBAAmB,cAAc;CAElD,MAAM,kBAAkB,eAAe,YAAY;CAEnD,MAAM,UAAU,YAAY,QAAQ,UAAU,GAAG,QAAQ,SAAS;CAElE,MAAM,WAAW,SAAS,UADd,QAAQ,SAAS,CACW;CAGxC,MAAM,kBAAkB,KACtB,SACA,GAJsB,SAAS,OAAO,EAAE,CAAC,aAAa,GAAG,SAAS,MAAM,EAAE,CAIvD,GAAG,eAAe,KACtC;CACD,MAAM,0BAA0B,SAAS,SAAS,gBAAgB;CAElE,IAAIA;AAEJ,KAAI,gBAEF,cAAa;EACX,KAAK;EACL,SAAS;EACT,QAAQ;EACR,UAAU;EACX;MACI;EAEL,MAAMC,sBAAuD,EAAE;AAC/D,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,iBAAiB,CACzD,qBAAoB,OAAO;GACzB,UAAU;GACV,aAAa,GACV,gBAAgB,OAClB;GACF;AAGH,eAAa;GACX,KAAK;GACL,SAAS;GACT,UAAU;GACX;;CAGH,MAAM,cAAc,SAAS,SAAS,QAAQ;AAC9C,OAAM,wBAAwB,YAAY,eAAe,EACvD,qBAAqB,aACtB,CAAC;AAEF,QAAO;;AAST,MAAM,oBACJ,YACA,iBAIG;CACH,MAAMC,mBAA2C,EAAE;CACnD,MAAMC,eAAgC,EAAE;AAExC,YAAW,mBAAmB,SAAS;AAErC,MAAI,KAAK,UAAU,KAAK,EAAE;GACxB,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,QAAQ,QAAQ,IAAI,CAAC,MAAM;AACxD,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAY,CAAC;;aAK7C,KAAK,eAAe,KAAK,EAAE;GAClC,MAAM,OAAO,KAAK,aAAa,CAAC,SAAS;AACzC,OAAI,sBAAsB,SAAS,KAAK,EAAE;IACxC,MAAM,cAAc,KAAK,gBAAgB;AACzC,QAAI,KAAK,gBAAgB,YAAY,EAAE;KACrC,MAAM,OAAO,YAAY,iBAAiB;AAC1C,SAAI,cAAc,KAAK,EAAE;MACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,mBAAa,IAAI,IAAI;AACrB,uBAAiB,OAAO,KAAK,MAAM;AACnC,mBAAa,KAAK;OAAE;OAAM;OAAK,MAAM;OAAiB,CAAC;;;;aAOtD,KAAK,gBAAgB,KAAK,EAAE;GACnC,MAAM,OAAO,KAAK,iBAAiB;AACnC,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,SAAS,KAAK,WAAW;AAG/B,QACE,QAAQ,aAAa,KAAK,uBAC1B,QAAQ,aAAa,KAAK,qBAC1B,QAAQ,aAAa,KAAK,kBAE1B;AAIF,QAAI,KAAK,eAAe,OAAO,CAAE;AAGjC,QAAI,KAAK,iBAAiB,OAAO,EAE/B;SADmB,OAAO,eAAe,CAC1B,SAAS,CAAC,SAAS,cAAc,CAAE;;AAIpD,QAAI,KAAK,qBAAqB,OAAO,EACnC;SAAI,OAAO,aAAa,KAAK,KAAM;;IAGrC,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,MAAM;AACnC,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAkB,CAAC;;;GAG5D;AAEF,QAAO;EAAE;EAAkB;EAAc;;AAO3C,MAAM,mBAAmB,OACvB,UACA,cACA,aACA,SACA,OAAgB,SACb;CACH,IAAIC;AACJ,KAAI;AACF,eAAa,QAAQ,oBAAoB,SAAS;SAC5C;AACN,eAAa,QAAQ,qBAAqB,SAAS;;CAIrD,MAAM,EAAE,kBAAkB,iBAAiB,iBACzC,4BAFmB,IAAI,KAAa,CAIrC;AAED,KAAI,OAAO,KAAK,iBAAiB,CAAC,WAAW,EAAG,QAAO;AAEvD,MAAK,MAAM,EAAE,MAAM,KAAK,UAAU,aAChC,KAAI,SAAS,cAAc,KAAK,UAAU,KAAK,CAC7C,MAAK,gBAAgB,YAAY,IAAI,GAAG;UAC/B,SAAS,mBAAmB,KAAK,eAAe,KAAK,CAC9D,MAAK,eAAe,YAAY,IAAI,SAAS;UACpC,SAAS,oBAAoB,KAAK,gBAAgB,KAAK,CAEhE,MAAK,gBAAgB,WAAW,IAAI,QAAQ;CAKhD,MAAM,aAAa,WAAW,sBAC3B,MAAM,EAAE,yBAAyB,KAAK,YACxC;AACD,KAAI,CAAC,WACH,YAAW,qBAAqB;EAC9B,cAAc,CAAC,cAAc;EAC7B,iBAAiB;EAClB,CAAC;UAEF,CAAC,WAAW,iBAAiB,CAAC,MAAM,MAAM,EAAE,SAAS,KAAK,cAAc,CAExE,YAAW,eAAe,cAAc;AAI1C,YAAW,cAAc,CAAC,SAAS,MAAM;AACvC,IAAE,SAAS,EACP,OAAO,WAAW,MAAM,EACxB,iBAAiB,GAAG,gCAAgC,aAAa,KAAK;GAC1E;AAGF,YAAW,yBAAyB,CAAC,SAAS,QAAM;EAClD,MAAM,OAAOC,IAAE,gBAAgB;AAC/B,MAAI,KAAK,gBAAgB,KAAK,IAAI,KAAK,qBAAqB,KAAK,EAAE;GACjE,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,KAAK,QAAQ,KAAK,EAEpB;QACE,KAAK,SAAS,CAAC,SAAS,SAAS,IACjC,KAAK,SAAS,CAAC,SAAS,MAAM,CAE9B,MAAK,iBACH,GACA,gCAAgC,aAAa,KAC9C;;;GAIP;AAEF,KAAI,KACF,OAAM,WAAW,MAAM;AAEzB,QAAO;;AAwBT,MAAa,kBAAkB,OAC7B,UACA,aACA,SACA,YACG;CACH,MAAM,gBAAgB,CAAC,SAAS;CAChC,MAAM,eAAe,CAAC,SAAS;CAE/B,MAAM,gBAAgB,iBAAiB,SAAS,cAAc;CAC9D,MAAM,YAAY,aAAa,cAAc;CAC7C,MAAM,EAAE,YAAY,cAAc;CAGlC,MAAM,WACJ,WAAW,IAAI,QAAQ,EAAE,6BAA6B,MAAM,CAAC;CAE/D,MAAM,WAAW,qBACf,WACC,MAAM,GAAG,SAAS,SAAS,EAAE,UAAU,CACzC;CACD,MAAM,eAAe,qBAAqB,SAAS;CACnD,MAAM,MAAM,QAAQ,SAAS;CAE7B,IAAIC,mBAAkD;AAEtD,KAAI,QAAQ,OACV,KAAI;EACF,MAAM,EAAE,mBAAmB,MAAM,OAAO;AACxC,qBAAmB,MAAM,eACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMC,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,kBAAkB,aAAa,6BAA6B,WAAW,OAAO,CAAC,wBAChF;AAEH,QAAM;;UAEC,QAAQ,UACjB,KAAI;EACF,MAAM,EAAE,sBAAuB,MAAM,OACnC;AAEF,qBAAmB,MAAM,kBACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMA,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,kBAAkB,aAAa,gCAAgC,WAAW,OAAO,CAAC,2BACnF;AAEH,QAAM;;UAEC;EAAC;EAAQ;EAAQ;EAAO;EAAM,CAAC,SAAS,IAAI,CACrD,oBAAmB,MAAM,iBACvB,UACA,cACA,aACA,UACA,cACD;AAGH,KAAI,CAAC,kBAAkB;AACrB,YAAU,gCAAgC,WAAW;AACrD;;AAIF,KAAI,cAAc;EAChB,MAAM,kBAAkB,MAAM,mBAC5B,kBACA,cACA,UACA,eACA,SAAS,UACV;AAMD,YAAU,yBAAyB,aAJH,SAC9B,cAAc,QAAQ,SACtB,gBACD,CACuE,GAAG;;AAI7E,KAAI,eAAe;AACjB,MAAI;GACF,MAAM,gBAAgB,oBAAoB,cAAc;AACxD,OAAI,cACF,UAAS,cAAc,QAAQ,YAAY,SAAS,EAAE;IACpD,OAAO;IACP,KAAK;IACN,CAAC;UAEE;AAIR,YACE,sBAAsB,aAAa,SAAS,SAAS,SAAS,CAAC,GAChE;;;AAIL,MAAa,iBAAiB,OAC5B,WACA,aACA,YACG;CAEH,MAAM,YAAY,aADI,iBAAiB,SAAS,cAAc,CACjB;CAE7C,MAAM,UAAU,IAAI,QAAQ,EAC1B,6BAA6B,MAC9B,CAAC;AAEF,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,QAAM,gBAAgB,UAAU,aAAa,SAAS,QAAQ;UACvD,OAAO;AACd,YAAU,uBAAuB,SAAS,IAAK,MAAgB,UAAU"}
1
+ {"version":3,"file":"transformFiles.mjs","names":["v"],"sources":["../../../src/transformFiles/transformFiles.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport fs from 'node:fs/promises';\nimport { basename, dirname, extname, join, relative, resolve } from 'node:path';\nimport {\n ANSIColors,\n camelCaseToKebabCase,\n colorizePath,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { Node, Project, type SourceFile, SyntaxKind } from 'ts-morph';\nimport { writeContentDeclaration } from '../writeContentDeclaration';\nimport { detectFormatCommand } from '../writeContentDeclaration/detectFormatCommand';\nimport { extractDictionaryKey } from './extractDictionaryKey';\n\n// ==========================================\n// 1. Shared Utilities (exported for reuse in babel plugin)\n// ==========================================\n\n/**\n * Attributes that should be extracted for localization\n */\nexport const ATTRIBUTES_TO_EXTRACT = [\n 'title',\n 'placeholder',\n 'alt',\n 'aria-label',\n 'label',\n];\n\n/**\n * Default function to determine if a string should be extracted for localization\n */\nexport const shouldExtract = (text: string): boolean => {\n const trimmed = text.trim();\n if (!trimmed) return false;\n if (!trimmed.includes(' ')) return false;\n // Starts with Capital letter\n if (!/^[A-Z]/.test(trimmed)) return false;\n // Filter out template logic identifiers (simple check)\n if (trimmed.startsWith('{') || trimmed.startsWith('v-')) return false;\n return true;\n};\n\n/**\n * Generate a unique key from text for use as a dictionary key\n */\nexport const generateKey = (\n text: string,\n existingKeys: Set<string>\n): string => {\n const maxWords = 5;\n let key = text\n .replace(/\\s+/g, ' ')\n .replace(/_+/g, ' ')\n .replace(/-+/g, ' ')\n .replace(/[^a-zA-Z0-9 ]/g, '')\n .trim()\n .split(' ')\n .filter(Boolean)\n .slice(0, maxWords)\n .map((word, index) =>\n index === 0\n ? word.toLowerCase()\n : word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()\n )\n .join('');\n\n if (!key) key = 'content';\n if (existingKeys.has(key)) {\n let i = 1;\n while (existingKeys.has(`${key}${i}`)) i++;\n key = `${key}${i}`;\n }\n return key;\n};\n\n/**\n * Translation node structure used in multilingual dictionaries\n */\ntype TranslationNode = {\n nodeType: 'translation';\n translation: Record<string, string>;\n};\n\nconst writeContentHelper = async (\n extractedContent: Record<string, string>,\n componentKey: string,\n filePath: string,\n configuration: IntlayerConfig,\n outputDir?: string\n) => {\n const { defaultLocale } = configuration.internationalization;\n const { baseDir, fileExtensions } = configuration.content;\n\n const isPerLocaleFile = configuration?.dictionary?.locale;\n\n const dirName = outputDir ? resolve(outputDir) : dirname(filePath);\n const ext = extname(filePath);\n const baseName = basename(filePath, ext);\n const contentBaseName = baseName.charAt(0).toLowerCase() + baseName.slice(1);\n\n const contentFilePath = join(\n dirName,\n `${contentBaseName}.${fileExtensions[0]}`\n );\n const relativeContentFilePath = relative(baseDir, contentFilePath);\n\n let dictionary: Dictionary;\n\n if (isPerLocaleFile) {\n // Per-locale format: simple string content with locale property\n dictionary = {\n key: componentKey,\n content: extractedContent,\n locale: defaultLocale,\n filePath: relativeContentFilePath,\n };\n } else {\n // Multilingual format: content wrapped in translation nodes, no locale property\n const multilingualContent: Record<string, TranslationNode> = {};\n for (const [key, value] of Object.entries(extractedContent)) {\n multilingualContent[key] = {\n nodeType: 'translation',\n translation: {\n [defaultLocale]: value,\n },\n };\n }\n\n dictionary = {\n key: componentKey,\n content: multilingualContent,\n filePath: relativeContentFilePath,\n };\n }\n\n const relativeDir = relative(baseDir, dirName);\n await writeContentDeclaration(dictionary, configuration, {\n newDictionariesPath: relativeDir,\n });\n\n return contentFilePath;\n};\n\ntype TsReplacement = {\n node: Node;\n key: string;\n type: 'jsx-text' | 'jsx-attribute' | 'string-literal';\n};\n\nconst extractTsContent = (\n sourceFile: SourceFile,\n existingKeys: Set<string>\n): {\n extractedContent: Record<string, string>;\n replacements: TsReplacement[];\n} => {\n const extractedContent: Record<string, string> = {};\n const replacements: TsReplacement[] = [];\n\n sourceFile.forEachDescendant((node) => {\n // 1. JSX Text\n if (Node.isJsxText(node)) {\n const text = node.getText();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.replace(/\\s+/g, ' ').trim();\n replacements.push({ node, key, type: 'jsx-text' });\n }\n }\n\n // 2. JSX Attributes\n else if (Node.isJsxAttribute(node)) {\n const name = node.getNameNode().getText();\n if (ATTRIBUTES_TO_EXTRACT.includes(name)) {\n const initializer = node.getInitializer();\n if (Node.isStringLiteral(initializer)) {\n const text = initializer.getLiteralValue();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'jsx-attribute' });\n }\n }\n }\n }\n\n // 3. String Literals (Variables, Arrays, etc.)\n else if (Node.isStringLiteral(node)) {\n const text = node.getLiteralValue();\n if (shouldExtract(text)) {\n const parent = node.getParent();\n\n // Skip if inside ImportDeclaration\n if (\n parent?.getKindName() === 'ImportDeclaration' ||\n parent?.getKindName() === 'ImportSpecifier' ||\n parent?.getKindName() === 'ModuleSpecifier'\n ) {\n return;\n }\n\n // Skip if it's a JSX Attribute value (handled above)\n if (Node.isJsxAttribute(parent)) return;\n\n // Skip console.log\n if (Node.isCallExpression(parent)) {\n const expression = parent.getExpression();\n if (expression.getText().includes('console.log')) return;\n }\n\n // Skip Object Keys: { key: \"value\" } -> \"key\" is PropertyAssignment name if not computed\n if (Node.isPropertyAssignment(parent)) {\n if (parent.getNameNode() === node) return; // It's the key\n }\n\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'string-literal' });\n }\n }\n });\n\n return { extractedContent, replacements };\n};\n\n// ==========================================\n// 2. React (TS-Morph) Strategy\n// ==========================================\n\nconst processReactFile = async (\n filePath: string,\n componentKey: string,\n packageName: string,\n project: Project,\n save: boolean = true\n) => {\n let sourceFile: SourceFile;\n try {\n sourceFile = project.addSourceFileAtPath(filePath);\n } catch {\n sourceFile = project.getSourceFileOrThrow(filePath);\n }\n\n const existingKeys = new Set<string>();\n const { extractedContent, replacements } = extractTsContent(\n sourceFile,\n existingKeys\n );\n\n if (Object.keys(extractedContent).length === 0) return null;\n\n for (const { node, key, type } of replacements) {\n if (type === 'jsx-text' && Node.isJsxText(node)) {\n node.replaceWithText(`{content.${key}}`);\n } else if (type === 'jsx-attribute' && Node.isJsxAttribute(node)) {\n node.setInitializer(`{content.${key}.value}`);\n } else if (type === 'string-literal' && Node.isStringLiteral(node)) {\n // For React/JS variables, we usually want the value\n node.replaceWithText(`content.${key}.value`);\n }\n }\n\n // Inject hook\n const importDecl = sourceFile.getImportDeclaration(\n (d) => d.getModuleSpecifierValue() === packageName\n );\n if (!importDecl) {\n sourceFile.addImportDeclaration({\n namedImports: ['useIntlayer'],\n moduleSpecifier: packageName,\n });\n } else if (\n !importDecl.getNamedImports().some((n) => n.getName() === 'useIntlayer')\n ) {\n importDecl.addNamedImport('useIntlayer');\n }\n\n // Insert hook at start of component\n sourceFile.getFunctions().forEach((f) => {\n f.getBody()\n ?.asKind(SyntaxKind.Block)\n ?.insertStatements(0, `const content = useIntlayer(\"${componentKey}\");`);\n });\n\n // Also handle const/arrow components\n sourceFile.getVariableDeclarations().forEach((v) => {\n const init = v.getInitializer();\n if (Node.isArrowFunction(init) || Node.isFunctionExpression(init)) {\n const body = init.getBody();\n if (Node.isBlock(body)) {\n // Heuristic: check if it returns JSX or uses hooks\n if (\n body.getText().includes('return') ||\n body.getText().includes('use')\n ) {\n body.insertStatements(\n 0,\n `const content = useIntlayer(\"${componentKey}\");`\n );\n }\n }\n }\n });\n\n if (save) {\n await sourceFile.save();\n }\n return extractedContent;\n};\n\n// ==========================================\n// 5. Main Dispatcher\n// ==========================================\n\nexport type PackageName =\n | 'next-intlayer'\n | 'react-intlayer'\n | 'vue-intlayer'\n | 'svelte-intlayer'\n | 'preact-intlayer'\n | 'solid-intlayer'\n | 'angular-intlayer'\n | 'express-intlayer';\n\nexport type ExtractIntlayerOptions = {\n configOptions?: GetConfigurationOptions;\n outputDir?: string;\n codeOnly?: boolean;\n declarationOnly?: boolean;\n};\n\nexport const extractIntlayer = async (\n filePath: string,\n packageName: PackageName,\n options?: ExtractIntlayerOptions,\n project?: Project\n) => {\n const saveComponent = !options?.declarationOnly;\n const writeContent = !options?.codeOnly;\n\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n const { baseDir } = configuration.content;\n\n // Setup Project for TS/React files if needed\n const _project =\n project || new Project({ skipAddingFilesFromTsConfig: true });\n\n const baseName = extractDictionaryKey(\n filePath,\n (await fs.readFile(filePath)).toString()\n );\n const componentKey = camelCaseToKebabCase(baseName);\n const ext = extname(filePath);\n\n let extractedContent: Record<string, string> | null = null;\n\n if (ext === '.vue') {\n try {\n const { processVueFile } = await import('@intlayer/vue-transformer');\n extractedContent = await processVueFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/vue-transformer', ANSIColors.YELLOW)} to process Vue files.`\n );\n }\n throw error;\n }\n } else if (ext === '.svelte') {\n try {\n const { processSvelteFile } = (await import(\n '@intlayer/svelte-transformer'\n )) as any;\n extractedContent = await processSvelteFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/svelte-transformer', ANSIColors.YELLOW)} to process Svelte files.`\n );\n }\n throw error;\n }\n } else if (['.tsx', '.jsx', '.ts', '.js'].includes(ext)) {\n extractedContent = await processReactFile(\n filePath,\n componentKey,\n packageName,\n _project,\n saveComponent\n );\n }\n\n if (!extractedContent) {\n appLogger(`No extractable text found in ${baseName}`);\n return;\n }\n\n // Shared Write Logic\n if (writeContent) {\n const contentFilePath = await writeContentHelper(\n extractedContent,\n componentKey,\n filePath,\n configuration,\n options?.outputDir\n );\n\n const relativeContentFilePath = relative(\n configuration.content.baseDir,\n contentFilePath\n );\n appLogger(`Created content file: ${colorizePath(relativeContentFilePath)}`);\n }\n\n // Optional: Format\n if (saveComponent) {\n try {\n const formatCommand = detectFormatCommand(configuration);\n if (formatCommand) {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'ignore', // Silent\n cwd: baseDir,\n });\n }\n } catch {\n // Ignore format errors\n }\n\n appLogger(\n `Updated component: ${colorizePath(relative(baseDir, filePath))}`\n );\n }\n};\n\nexport const transformFiles = async (\n filePaths: string[],\n packageName: PackageName,\n options?: ExtractIntlayerOptions\n) => {\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const project = new Project({\n skipAddingFilesFromTsConfig: true,\n });\n\n for (const filePath of filePaths) {\n try {\n await extractIntlayer(filePath, packageName, options, project);\n } catch (error) {\n appLogger(`Failed to transform ${filePath}: ${(error as Error).message}`);\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;AAwBA,MAAa,wBAAwB;CACnC;CACA;CACA;CACA;CACA;CACD;;;;AAKD,MAAa,iBAAiB,SAA0B;CACtD,MAAM,UAAU,KAAK,MAAM;AAC3B,KAAI,CAAC,QAAS,QAAO;AACrB,KAAI,CAAC,QAAQ,SAAS,IAAI,CAAE,QAAO;AAEnC,KAAI,CAAC,SAAS,KAAK,QAAQ,CAAE,QAAO;AAEpC,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,KAAK,CAAE,QAAO;AAChE,QAAO;;;;;AAMT,MAAa,eACX,MACA,iBACW;CAEX,IAAI,MAAM,KACP,QAAQ,QAAQ,IAAI,CACpB,QAAQ,OAAO,IAAI,CACnB,QAAQ,OAAO,IAAI,CACnB,QAAQ,kBAAkB,GAAG,CAC7B,MAAM,CACN,MAAM,IAAI,CACV,OAAO,QAAQ,CACf,MAAM,GATQ,EASI,CAClB,KAAK,MAAM,UACV,UAAU,IACN,KAAK,aAAa,GAClB,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa,CAC/D,CACA,KAAK,GAAG;AAEX,KAAI,CAAC,IAAK,OAAM;AAChB,KAAI,aAAa,IAAI,IAAI,EAAE;EACzB,IAAI,IAAI;AACR,SAAO,aAAa,IAAI,GAAG,MAAM,IAAI,CAAE;AACvC,QAAM,GAAG,MAAM;;AAEjB,QAAO;;AAWT,MAAM,qBAAqB,OACzB,kBACA,cACA,UACA,eACA,cACG;CACH,MAAM,EAAE,kBAAkB,cAAc;CACxC,MAAM,EAAE,SAAS,mBAAmB,cAAc;CAElD,MAAM,kBAAkB,eAAe,YAAY;CAEnD,MAAM,UAAU,YAAY,QAAQ,UAAU,GAAG,QAAQ,SAAS;CAElE,MAAM,WAAW,SAAS,UADd,QAAQ,SAAS,CACW;CAGxC,MAAM,kBAAkB,KACtB,SACA,GAJsB,SAAS,OAAO,EAAE,CAAC,aAAa,GAAG,SAAS,MAAM,EAAE,CAIvD,GAAG,eAAe,KACtC;CACD,MAAM,0BAA0B,SAAS,SAAS,gBAAgB;CAElE,IAAI;AAEJ,KAAI,gBAEF,cAAa;EACX,KAAK;EACL,SAAS;EACT,QAAQ;EACR,UAAU;EACX;MACI;EAEL,MAAM,sBAAuD,EAAE;AAC/D,OAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,iBAAiB,CACzD,qBAAoB,OAAO;GACzB,UAAU;GACV,aAAa,GACV,gBAAgB,OAClB;GACF;AAGH,eAAa;GACX,KAAK;GACL,SAAS;GACT,UAAU;GACX;;CAGH,MAAM,cAAc,SAAS,SAAS,QAAQ;AAC9C,OAAM,wBAAwB,YAAY,eAAe,EACvD,qBAAqB,aACtB,CAAC;AAEF,QAAO;;AAST,MAAM,oBACJ,YACA,iBAIG;CACH,MAAM,mBAA2C,EAAE;CACnD,MAAM,eAAgC,EAAE;AAExC,YAAW,mBAAmB,SAAS;AAErC,MAAI,KAAK,UAAU,KAAK,EAAE;GACxB,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,QAAQ,QAAQ,IAAI,CAAC,MAAM;AACxD,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAY,CAAC;;aAK7C,KAAK,eAAe,KAAK,EAAE;GAClC,MAAM,OAAO,KAAK,aAAa,CAAC,SAAS;AACzC,OAAI,sBAAsB,SAAS,KAAK,EAAE;IACxC,MAAM,cAAc,KAAK,gBAAgB;AACzC,QAAI,KAAK,gBAAgB,YAAY,EAAE;KACrC,MAAM,OAAO,YAAY,iBAAiB;AAC1C,SAAI,cAAc,KAAK,EAAE;MACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,mBAAa,IAAI,IAAI;AACrB,uBAAiB,OAAO,KAAK,MAAM;AACnC,mBAAa,KAAK;OAAE;OAAM;OAAK,MAAM;OAAiB,CAAC;;;;aAOtD,KAAK,gBAAgB,KAAK,EAAE;GACnC,MAAM,OAAO,KAAK,iBAAiB;AACnC,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,SAAS,KAAK,WAAW;AAG/B,QACE,QAAQ,aAAa,KAAK,uBAC1B,QAAQ,aAAa,KAAK,qBAC1B,QAAQ,aAAa,KAAK,kBAE1B;AAIF,QAAI,KAAK,eAAe,OAAO,CAAE;AAGjC,QAAI,KAAK,iBAAiB,OAAO,EAE/B;SADmB,OAAO,eAAe,CAC1B,SAAS,CAAC,SAAS,cAAc,CAAE;;AAIpD,QAAI,KAAK,qBAAqB,OAAO,EACnC;SAAI,OAAO,aAAa,KAAK,KAAM;;IAGrC,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,MAAM;AACnC,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAkB,CAAC;;;GAG5D;AAEF,QAAO;EAAE;EAAkB;EAAc;;AAO3C,MAAM,mBAAmB,OACvB,UACA,cACA,aACA,SACA,OAAgB,SACb;CACH,IAAI;AACJ,KAAI;AACF,eAAa,QAAQ,oBAAoB,SAAS;SAC5C;AACN,eAAa,QAAQ,qBAAqB,SAAS;;CAIrD,MAAM,EAAE,kBAAkB,iBAAiB,iBACzC,4BAFmB,IAAI,KAAa,CAIrC;AAED,KAAI,OAAO,KAAK,iBAAiB,CAAC,WAAW,EAAG,QAAO;AAEvD,MAAK,MAAM,EAAE,MAAM,KAAK,UAAU,aAChC,KAAI,SAAS,cAAc,KAAK,UAAU,KAAK,CAC7C,MAAK,gBAAgB,YAAY,IAAI,GAAG;UAC/B,SAAS,mBAAmB,KAAK,eAAe,KAAK,CAC9D,MAAK,eAAe,YAAY,IAAI,SAAS;UACpC,SAAS,oBAAoB,KAAK,gBAAgB,KAAK,CAEhE,MAAK,gBAAgB,WAAW,IAAI,QAAQ;CAKhD,MAAM,aAAa,WAAW,sBAC3B,MAAM,EAAE,yBAAyB,KAAK,YACxC;AACD,KAAI,CAAC,WACH,YAAW,qBAAqB;EAC9B,cAAc,CAAC,cAAc;EAC7B,iBAAiB;EAClB,CAAC;UAEF,CAAC,WAAW,iBAAiB,CAAC,MAAM,MAAM,EAAE,SAAS,KAAK,cAAc,CAExE,YAAW,eAAe,cAAc;AAI1C,YAAW,cAAc,CAAC,SAAS,MAAM;AACvC,IAAE,SAAS,EACP,OAAO,WAAW,MAAM,EACxB,iBAAiB,GAAG,gCAAgC,aAAa,KAAK;GAC1E;AAGF,YAAW,yBAAyB,CAAC,SAAS,QAAM;EAClD,MAAM,OAAOA,IAAE,gBAAgB;AAC/B,MAAI,KAAK,gBAAgB,KAAK,IAAI,KAAK,qBAAqB,KAAK,EAAE;GACjE,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,KAAK,QAAQ,KAAK,EAEpB;QACE,KAAK,SAAS,CAAC,SAAS,SAAS,IACjC,KAAK,SAAS,CAAC,SAAS,MAAM,CAE9B,MAAK,iBACH,GACA,gCAAgC,aAAa,KAC9C;;;GAIP;AAEF,KAAI,KACF,OAAM,WAAW,MAAM;AAEzB,QAAO;;AAwBT,MAAa,kBAAkB,OAC7B,UACA,aACA,SACA,YACG;CACH,MAAM,gBAAgB,CAAC,SAAS;CAChC,MAAM,eAAe,CAAC,SAAS;CAE/B,MAAM,gBAAgB,iBAAiB,SAAS,cAAc;CAC9D,MAAM,YAAY,aAAa,cAAc;CAC7C,MAAM,EAAE,YAAY,cAAc;CAGlC,MAAM,WACJ,WAAW,IAAI,QAAQ,EAAE,6BAA6B,MAAM,CAAC;CAE/D,MAAM,WAAW,qBACf,WACC,MAAM,GAAG,SAAS,SAAS,EAAE,UAAU,CACzC;CACD,MAAM,eAAe,qBAAqB,SAAS;CACnD,MAAM,MAAM,QAAQ,SAAS;CAE7B,IAAI,mBAAkD;AAEtD,KAAI,QAAQ,OACV,KAAI;EACF,MAAM,EAAE,mBAAmB,MAAM,OAAO;AACxC,qBAAmB,MAAM,eACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACM,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,kBAAkB,aAAa,6BAA6B,WAAW,OAAO,CAAC,wBAChF;AAEH,QAAM;;UAEC,QAAQ,UACjB,KAAI;EACF,MAAM,EAAE,sBAAuB,MAAM,OACnC;AAEF,qBAAmB,MAAM,kBACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACM,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,kBAAkB,aAAa,gCAAgC,WAAW,OAAO,CAAC,2BACnF;AAEH,QAAM;;UAEC;EAAC;EAAQ;EAAQ;EAAO;EAAM,CAAC,SAAS,IAAI,CACrD,oBAAmB,MAAM,iBACvB,UACA,cACA,aACA,UACA,cACD;AAGH,KAAI,CAAC,kBAAkB;AACrB,YAAU,gCAAgC,WAAW;AACrD;;AAIF,KAAI,cAAc;EAChB,MAAM,kBAAkB,MAAM,mBAC5B,kBACA,cACA,UACA,eACA,SAAS,UACV;AAMD,YAAU,yBAAyB,aAJH,SAC9B,cAAc,QAAQ,SACtB,gBACD,CACuE,GAAG;;AAI7E,KAAI,eAAe;AACjB,MAAI;GACF,MAAM,gBAAgB,oBAAoB,cAAc;AACxD,OAAI,cACF,UAAS,cAAc,QAAQ,YAAY,SAAS,EAAE;IACpD,OAAO;IACP,KAAK;IACN,CAAC;UAEE;AAIR,YACE,sBAAsB,aAAa,SAAS,SAAS,SAAS,CAAC,GAChE;;;AAIL,MAAa,iBAAiB,OAC5B,WACA,aACA,YACG;CAEH,MAAM,YAAY,aADI,iBAAiB,SAAS,cAAc,CACjB;CAE7C,MAAM,UAAU,IAAI,QAAQ,EAC1B,6BAA6B,MAC9B,CAAC;AAEF,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,QAAM,gBAAgB,UAAU,aAAa,SAAS,QAAQ;UACvD,OAAO;AACd,YAAU,uBAAuB,SAAS,IAAK,MAAgB,UAAU"}
@@ -1 +1 @@
1
- {"version":3,"file":"chunkJSON.mjs","names":["output: string[]","patches: Patch[]","testPatch: SetPatch","rootType: RootType","sourceString: string","chunks: JsonChunk[]","currentChunk: JsonChunk","root: any","parts: string[]","output: any[]"],"sources":["../../../src/utils/chunkJSON.ts"],"sourcesContent":["/**\n * Split & reassemble JSON by character budget.\n * - Measures serialized size using JSON.stringify(..).length (characters).\n * - Ensures each chunk is itself valid JSON.\n * - Very large strings are split into safe pieces using getChunk and re-concatenated on assemble.\n * - Protects against circular structures (JSON can't serialize those anyway).\n */\n\nimport { getChunk } from './getChunk';\n\ntype JSONPrimitive = string | number | boolean | null;\ntype JSONValue = JSONPrimitive | JSONObject | JSONArray;\n\nexport type JSONObject = {\n [k: string]: JSONValue;\n};\n\ntype JSONArray = JSONValue[];\n\ntype Path = Array<string | number>;\n\ntype SetPatch = { op: 'set'; path: Path; value: JSONValue };\ntype StrAppendPatch = {\n op: 'str-append';\n path: Path;\n value: string; // part of a longer string\n index: number;\n total: number;\n};\ntype Patch = SetPatch | StrAppendPatch;\n\ntype RootType = 'object' | 'array';\n\nexport type JsonChunk = {\n schemaVersion: 1;\n index: number;\n total: number;\n rootType: RootType;\n checksum: string; // hash of the full original JSON string\n entries: Patch[];\n};\n\nconst isObject = (val: unknown): val is Record<string, unknown> => {\n return typeof val === 'object' && val !== null && !Array.isArray(val);\n};\n\nconst computeDjb2 = (str: string): string => {\n // Simple 32-bit hash; deterministic & fast\n let hash = 5381;\n\n for (let i = 0; i < str.length; i++) {\n hash = ((hash << 5) + hash) ^ str.charCodeAt(i);\n }\n\n // convert to unsigned hex\n return (hash >>> 0).toString(16).padStart(8, '0');\n};\n\nconst setAtPath = (root: any, path: Path, value: JSONValue) => {\n let current = root;\n\n for (let i = 0; i < path.length - 1; i++) {\n const key = path[i];\n const nextKey = path[i + 1];\n const isNextIndex = typeof nextKey === 'number';\n\n if (typeof key === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at path segment ${i}`);\n }\n\n if (current[key] === undefined) {\n current[key] = isNextIndex ? [] : {};\n }\n\n current = current[key];\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at path segment ${i}`);\n }\n\n if (!(key in current)) {\n (current as any)[key] = isNextIndex ? [] : {};\n }\n\n current = (current as any)[key];\n }\n }\n\n const last = path[path.length - 1];\n\n if (typeof last === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at final segment`);\n }\n\n current[last] = value as any;\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at final segment`);\n }\n\n (current as any)[last] = value as any;\n }\n};\n\nconst pathKey = (path: Path): string => {\n // stable key for grouping string parts\n return JSON.stringify(path);\n};\n\n/**\n * Split a string into parts using getChunk with a charLength budget per part.\n */\nconst splitStringByBudget = (\n str: string,\n maxCharsPerPart: number\n): string[] => {\n if (maxCharsPerPart <= 0) {\n throw new Error('maxChars must be > 0');\n }\n\n const output: string[] = [];\n let offset = 0;\n\n while (offset < str.length) {\n const part = getChunk(str, {\n charStart: offset,\n charLength: maxCharsPerPart,\n });\n\n if (!part) break;\n\n output.push(part);\n offset += part.length;\n }\n\n return output;\n};\n\n/**\n * Flatten JSON into patches (leaf writes). Strings too large to fit in a single\n * chunk are yielded as multiple str-append patches.\n */\nconst flattenToPatches = (\n value: JSONValue,\n maxCharsPerChunk: number,\n path: Path = [],\n seen = new WeakSet<object>()\n): Patch[] => {\n // Conservative per-entry cap so a single entry fits into an empty chunk with envelope overhead.\n // (Envelope ~ a few hundred chars; we keep a comfortable margin.)\n const maxStringPiece = Math.max(\n 1,\n Math.floor((maxCharsPerChunk - 400) * 0.8)\n );\n\n const patches: Patch[] = [];\n\n const walk = (currentValue: JSONValue, currentPath: Path) => {\n if (typeof currentValue === 'string') {\n // If the serialized patch wouldn't fit, split the string into multiple parts\n // and encode as a split-node sentinel with numbered keys.\n const testPatch: SetPatch = {\n op: 'set',\n path: currentPath,\n value: currentValue,\n };\n const testLen = JSON.stringify(testPatch).length + 150; // margin\n\n if (testLen <= maxCharsPerChunk) {\n patches.push(testPatch);\n return;\n }\n\n // Use getChunk-based splitting to produce stable parts\n const parts = splitStringByBudget(currentValue, maxStringPiece);\n\n // Emit split-node metadata and parts as individual leaf writes\n patches.push({\n op: 'set',\n path: [...currentPath, '__splittedType'],\n value: 'string',\n });\n patches.push({\n op: 'set',\n path: [...currentPath, '__total'],\n value: parts.length,\n });\n\n for (let i = 0; i < parts.length; i++) {\n patches.push({\n op: 'set',\n path: [...currentPath, String(i + 1)],\n value: parts[i],\n });\n }\n\n return;\n }\n\n if (currentValue === null || typeof currentValue !== 'object') {\n patches.push({ op: 'set', path: currentPath, value: currentValue });\n return;\n }\n\n if (seen.has(currentValue as object)) {\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n seen.add(currentValue as object);\n\n if (Array.isArray(currentValue)) {\n for (let i = 0; i < currentValue.length; i++) {\n walk(currentValue[i] as JSONValue, [...currentPath, i]);\n }\n } else {\n for (const key of Object.keys(currentValue)) {\n walk((currentValue as JSONObject)[key], [...currentPath, key]);\n }\n }\n\n seen.delete(currentValue as object);\n };\n\n walk(value, path);\n return patches;\n};\n\n/**\n * Split JSON into chunks constrained by character count of serialized chunk.\n */\nexport const chunkJSON = (\n value: JSONObject | JSONArray,\n maxChars: number\n): JsonChunk[] => {\n if (!isObject(value) && !Array.isArray(value)) {\n throw new Error('Root must be an object or array.');\n }\n\n if (maxChars < 500) {\n // You can lower this if you truly need; recommended to keep some envelope headroom.\n throw new Error('maxChars is too small. Use at least 500 characters.');\n }\n\n const rootType: RootType = Array.isArray(value) ? 'array' : 'object';\n let sourceString: string;\n\n try {\n sourceString = JSON.stringify(value);\n } catch {\n // Provide a deterministic error message for circular refs\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n const checksum = computeDjb2(sourceString);\n const allPatches = flattenToPatches(value as JSONValue, maxChars);\n\n const chunks: JsonChunk[] = [];\n let currentChunk: JsonChunk = {\n schemaVersion: 1,\n index: 0, // provisional\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n };\n\n const emptyEnvelopeSize = JSON.stringify({\n ...currentChunk,\n entries: [],\n }).length;\n\n const tryFlush = () => {\n if (currentChunk.entries.length > 0) {\n chunks.push(currentChunk);\n currentChunk = {\n schemaVersion: 1,\n index: 0,\n total: 0,\n rootType,\n checksum,\n entries: [],\n };\n }\n };\n\n for (const patch of allPatches) {\n // Would adding this patch exceed maxChars?\n const withPatchSize =\n emptyEnvelopeSize +\n JSON.stringify(currentChunk.entries).length + // current entries array\n (currentChunk.entries.length ? 1 : 0) + // possible comma\n JSON.stringify(patch).length;\n\n if (withPatchSize <= maxChars) {\n currentChunk.entries.push(patch);\n } else {\n // Start a new chunk if current has items\n if (currentChunk.entries.length > 0) {\n tryFlush();\n }\n\n // Ensure single patch fits into an empty chunk\n const singleSize = emptyEnvelopeSize + JSON.stringify([patch]).length;\n\n if (singleSize > maxChars) {\n // This should only happen for massive strings, which we pre-split;\n // if it happens for a non-string, we cannot split further.\n throw new Error(\n 'A single entry exceeds maxChars and cannot be split. Reduce entry size or increase maxChars.'\n );\n }\n\n currentChunk.entries.push(patch);\n }\n }\n\n tryFlush();\n\n // Ensure at least one chunk exists (even for empty root)\n if (chunks.length === 0) {\n chunks.push({\n schemaVersion: 1,\n index: 0,\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n });\n }\n\n // Finalize indices & totals\n const totalChunks = chunks.length;\n\n chunks.forEach((chunk, index) => {\n chunk.index = index;\n chunk.total = totalChunks;\n });\n\n return chunks;\n};\n\n/**\n * Reassemble JSON from chunks.\n * - Validates checksums and indices.\n * - Applies 'set' patches and merges string pieces from 'str-append'.\n */\n/**\n * Reconstruct content from a single chunk without validation.\n * Useful for processing individual chunks in a pipeline where you don't have all chunks yet.\n * Note: This will only reconstruct the partial content contained in this chunk.\n */\nexport const reconstructFromSingleChunk = (\n chunk: JsonChunk\n): JSONObject | JSONArray => {\n const root: any = chunk.rootType === 'array' ? [] : {};\n\n // Apply all 'set' patches from this chunk\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n setAtPath(root, entry.path, entry.value);\n }\n }\n\n // Reconcile split-node sentinels for strings/arrays\n // When reconstructing from a single chunk, we may have incomplete split nodes\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const parts: string[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n hasAllParts = false;\n break;\n }\n\n parts.push(piece);\n }\n\n // Only reconstruct if we have all parts, otherwise return the node as-is\n if (hasAllParts) {\n return parts.join('');\n }\n\n return node;\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const output: any[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n hasAllParts = false;\n break;\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n // Only reconstruct if we have all parts\n if (hasAllParts) {\n return output;\n }\n\n return node;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n return reconcileSplitNodes(root);\n};\n\nexport const assembleJSON = (chunks: JsonChunk[]): JSONObject | JSONArray => {\n if (!chunks || chunks.length === 0) {\n throw new Error('No chunks provided.');\n }\n\n // Basic validation & sort\n const sorted = [...chunks].sort((a, b) => a.index - b.index);\n const { checksum, rootType } = sorted[0];\n const schemaVersion = 1;\n\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.schemaVersion !== schemaVersion) {\n console.error('Unsupported schemaVersion.', {\n cause: chunk,\n schemaVersion,\n });\n throw new Error('Unsupported schemaVersion.');\n }\n\n if (chunk.rootType !== rootType) {\n console.error('Chunks rootType mismatch.', {\n cause: chunk,\n rootType,\n });\n throw new Error('Chunks rootType mismatch.');\n }\n\n if (chunk.checksum !== checksum) {\n console.error('Chunks checksum mismatch (different source objects?).', {\n cause: chunk,\n checksum,\n });\n throw new Error('Chunks checksum mismatch (different source objects?).');\n }\n\n if (chunk.index !== i) {\n console.error('Chunk indices are not contiguous or sorted.', {\n cause: chunk,\n index: chunk.index,\n i,\n });\n throw new Error('Chunk indices are not contiguous or sorted.');\n }\n\n // Defer total check until after reconstruction to prefer more specific errors\n }\n\n const root: any = rootType === 'array' ? [] : {};\n\n // Collect string parts by path\n const stringParts = new Map<\n string,\n { path: Path; total: number; received: StrAppendPatch[] }\n >();\n\n const applySet = (patch: SetPatch) =>\n setAtPath(root, patch.path, patch.value);\n\n for (const chunk of sorted) {\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n applySet(entry);\n } else {\n const key = pathKey(entry.path);\n const record = stringParts.get(key) ?? {\n path: entry.path,\n total: entry.total,\n received: [],\n };\n\n if (record.total !== entry.total) {\n throw new Error('Inconsistent string part totals for a path.');\n }\n\n record.received.push(entry);\n stringParts.set(key, record);\n }\n }\n }\n\n // Stitch strings\n for (const { path, total, received } of stringParts.values()) {\n if (received.length !== total) {\n throw new Error('Missing string parts for a path; incomplete chunk set.');\n }\n\n received.sort((a, b) => a.index - b.index);\n const fullString = received.map((part) => part.value).join('');\n setAtPath(root, path, fullString);\n }\n\n // Reconcile split-node sentinels for strings/arrays after all patches applied\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const parts: string[] = [];\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n parts.push(piece);\n }\n\n return parts.join('');\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const output: any[] = [];\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n return output;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n const reconciled = reconcileSplitNodes(root);\n\n // Now validate totals match provided count\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.total !== sorted.length) {\n throw new Error(\n `Chunk total does not match provided count. Expected ${sorted.length}, but chunk ${i} has total=${chunk.total}`\n );\n }\n }\n\n return reconciled;\n};\n\n/* -------------------------------------------\n * Example usage\n * -------------------------------------------\nconst big: JSONObject = {\n title: \"Document\",\n content: \"…a very very long text…\",\n items: Array.from({ length: 2000 }, (_, i) => ({ id: i, label: `Item ${i}` }))\n};\n\n// Split to ~16k-char chunks\nconst chunks = chunkJSON(big, 16_000);\n\n// Send each `chunks[i]` as JSON to your backend.\n// Later, reassemble:\nconst restored = assembleJSON(chunks);\nconsole.log(JSON.stringify(restored) === JSON.stringify(big)); // true\n*/\n"],"mappings":";;;;;;;;;;AA0CA,MAAM,YAAY,QAAiD;AACjE,QAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,CAAC,MAAM,QAAQ,IAAI;;AAGvE,MAAM,eAAe,QAAwB;CAE3C,IAAI,OAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,SAAS,QAAQ,KAAK,OAAQ,IAAI,WAAW,EAAE;AAIjD,SAAQ,SAAS,GAAG,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;AAGnD,MAAM,aAAa,MAAW,MAAY,UAAqB;CAC7D,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;EACxC,MAAM,MAAM,KAAK;EAEjB,MAAM,cAAc,OADJ,KAAK,IAAI,OACc;AAEvC,MAAI,OAAO,QAAQ,UAAU;AAC3B,OAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC,IAAI;AAGxD,OAAI,QAAQ,SAAS,OACnB,SAAQ,OAAO,cAAc,EAAE,GAAG,EAAE;AAGtC,aAAU,QAAQ;SACb;AACL,OAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC,IAAI;AAGzD,OAAI,EAAE,OAAO,SACX,CAAC,QAAgB,OAAO,cAAc,EAAE,GAAG,EAAE;AAG/C,aAAW,QAAgB;;;CAI/B,MAAM,OAAO,KAAK,KAAK,SAAS;AAEhC,KAAI,OAAO,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC;AAGpD,UAAQ,QAAQ;QACX;AACL,MAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC;AAGrD,EAAC,QAAgB,QAAQ;;;AAI7B,MAAM,WAAW,SAAuB;AAEtC,QAAO,KAAK,UAAU,KAAK;;;;;AAM7B,MAAM,uBACJ,KACA,oBACa;AACb,KAAI,mBAAmB,EACrB,OAAM,IAAI,MAAM,uBAAuB;CAGzC,MAAMA,SAAmB,EAAE;CAC3B,IAAI,SAAS;AAEb,QAAO,SAAS,IAAI,QAAQ;EAC1B,MAAM,OAAO,SAAS,KAAK;GACzB,WAAW;GACX,YAAY;GACb,CAAC;AAEF,MAAI,CAAC,KAAM;AAEX,SAAO,KAAK,KAAK;AACjB,YAAU,KAAK;;AAGjB,QAAO;;;;;;AAOT,MAAM,oBACJ,OACA,kBACA,OAAa,EAAE,EACf,uBAAO,IAAI,SAAiB,KAChB;CAGZ,MAAM,iBAAiB,KAAK,IAC1B,GACA,KAAK,OAAO,mBAAmB,OAAO,GAAI,CAC3C;CAED,MAAMC,UAAmB,EAAE;CAE3B,MAAM,QAAQ,cAAyB,gBAAsB;AAC3D,MAAI,OAAO,iBAAiB,UAAU;GAGpC,MAAMC,YAAsB;IAC1B,IAAI;IACJ,MAAM;IACN,OAAO;IACR;AAGD,OAFgB,KAAK,UAAU,UAAU,CAAC,SAAS,OAEpC,kBAAkB;AAC/B,YAAQ,KAAK,UAAU;AACvB;;GAIF,MAAM,QAAQ,oBAAoB,cAAc,eAAe;AAG/D,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,iBAAiB;IACxC,OAAO;IACR,CAAC;AACF,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,UAAU;IACjC,OAAO,MAAM;IACd,CAAC;AAEF,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,SAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,OAAO,IAAI,EAAE,CAAC;IACrC,OAAO,MAAM;IACd,CAAC;AAGJ;;AAGF,MAAI,iBAAiB,QAAQ,OAAO,iBAAiB,UAAU;AAC7D,WAAQ,KAAK;IAAE,IAAI;IAAO,MAAM;IAAa,OAAO;IAAc,CAAC;AACnE;;AAGF,MAAI,KAAK,IAAI,aAAuB,CAClC,OAAM,IAAI,MAAM,gDAAgD;AAGlE,OAAK,IAAI,aAAuB;AAEhC,MAAI,MAAM,QAAQ,aAAa,CAC7B,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACvC,MAAK,aAAa,IAAiB,CAAC,GAAG,aAAa,EAAE,CAAC;MAGzD,MAAK,MAAM,OAAO,OAAO,KAAK,aAAa,CACzC,MAAM,aAA4B,MAAM,CAAC,GAAG,aAAa,IAAI,CAAC;AAIlE,OAAK,OAAO,aAAuB;;AAGrC,MAAK,OAAO,KAAK;AACjB,QAAO;;;;;AAMT,MAAa,aACX,OACA,aACgB;AAChB,KAAI,CAAC,SAAS,MAAM,IAAI,CAAC,MAAM,QAAQ,MAAM,CAC3C,OAAM,IAAI,MAAM,mCAAmC;AAGrD,KAAI,WAAW,IAEb,OAAM,IAAI,MAAM,sDAAsD;CAGxE,MAAMC,WAAqB,MAAM,QAAQ,MAAM,GAAG,UAAU;CAC5D,IAAIC;AAEJ,KAAI;AACF,iBAAe,KAAK,UAAU,MAAM;SAC9B;AAEN,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,MAAM,WAAW,YAAY,aAAa;CAC1C,MAAM,aAAa,iBAAiB,OAAoB,SAAS;CAEjE,MAAMC,SAAsB,EAAE;CAC9B,IAAIC,eAA0B;EAC5B,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ;CAED,MAAM,oBAAoB,KAAK,UAAU;EACvC,GAAG;EACH,SAAS,EAAE;EACZ,CAAC,CAAC;CAEH,MAAM,iBAAiB;AACrB,MAAI,aAAa,QAAQ,SAAS,GAAG;AACnC,UAAO,KAAK,aAAa;AACzB,kBAAe;IACb,eAAe;IACf,OAAO;IACP,OAAO;IACP;IACA;IACA,SAAS,EAAE;IACZ;;;AAIL,MAAK,MAAM,SAAS,WAQlB,KALE,oBACA,KAAK,UAAU,aAAa,QAAQ,CAAC,UACpC,aAAa,QAAQ,SAAS,IAAI,KACnC,KAAK,UAAU,MAAM,CAAC,UAEH,SACnB,cAAa,QAAQ,KAAK,MAAM;MAC3B;AAEL,MAAI,aAAa,QAAQ,SAAS,EAChC,WAAU;AAMZ,MAFmB,oBAAoB,KAAK,UAAU,CAAC,MAAM,CAAC,CAAC,SAE9C,SAGf,OAAM,IAAI,MACR,+FACD;AAGH,eAAa,QAAQ,KAAK,MAAM;;AAIpC,WAAU;AAGV,KAAI,OAAO,WAAW,EACpB,QAAO,KAAK;EACV,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ,CAAC;CAIJ,MAAM,cAAc,OAAO;AAE3B,QAAO,SAAS,OAAO,UAAU;AAC/B,QAAM,QAAQ;AACd,QAAM,QAAQ;GACd;AAEF,QAAO;;;;;;;;;;;;AAaT,MAAa,8BACX,UAC2B;CAC3B,MAAMC,OAAY,MAAM,aAAa,UAAU,EAAE,GAAG,EAAE;AAGtD,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,WAAU,MAAM,MAAM,MAAM,MAAM,MAAM;CAM5C,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EAExC,QAAO;GAGT,MAAMC,QAAkB,EAAE;GAC1B,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,UAAU;AAC7B,mBAAc;AACd;;AAGF,UAAM,KAAK,MAAM;;AAInB,OAAI,YACF,QAAO,MAAM,KAAK,GAAG;AAGvB,UAAO;;AAIT,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EAEvC,QAAO;GAGT,MAAMC,SAAgB,EAAE;GACxB,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,EAAE;AACzB,mBAAc;AACd;;AAGF,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAK9C,OAAI,YACF,QAAO;AAGT,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;AAGT,QAAO,oBAAoB,KAAK;;AAGlC,MAAa,gBAAgB,WAAgD;AAC3E,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,OAAM,IAAI,MAAM,sBAAsB;CAIxC,MAAM,SAAS,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;CAC5D,MAAM,EAAE,UAAU,aAAa,OAAO;CACtC,MAAM,gBAAgB;AAEtB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,kBAAkB,eAAe;AACzC,WAAQ,MAAM,8BAA8B;IAC1C,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,6BAA6B;;AAG/C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,6BAA6B;IACzC,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,4BAA4B;;AAG9C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,yDAAyD;IACrE,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,wDAAwD;;AAG1E,MAAI,MAAM,UAAU,GAAG;AACrB,WAAQ,MAAM,+CAA+C;IAC3D,OAAO;IACP,OAAO,MAAM;IACb;IACD,CAAC;AACF,SAAM,IAAI,MAAM,8CAA8C;;;CAMlE,MAAMF,OAAY,aAAa,UAAU,EAAE,GAAG,EAAE;CAGhD,MAAM,8BAAc,IAAI,KAGrB;CAEH,MAAM,YAAY,UAChB,UAAU,MAAM,MAAM,MAAM,MAAM,MAAM;AAE1C,MAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,UAAS,MAAM;MACV;EACL,MAAM,MAAM,QAAQ,MAAM,KAAK;EAC/B,MAAM,SAAS,YAAY,IAAI,IAAI,IAAI;GACrC,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,UAAU,EAAE;GACb;AAED,MAAI,OAAO,UAAU,MAAM,MACzB,OAAM,IAAI,MAAM,8CAA8C;AAGhE,SAAO,SAAS,KAAK,MAAM;AAC3B,cAAY,IAAI,KAAK,OAAO;;AAMlC,MAAK,MAAM,EAAE,MAAM,OAAO,cAAc,YAAY,QAAQ,EAAE;AAC5D,MAAI,SAAS,WAAW,MACtB,OAAM,IAAI,MAAM,yDAAyD;AAG3E,WAAS,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAE1C,YAAU,MAAM,MADG,SAAS,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,GAAG,CAC7B;;CAInC,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EACxC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,QAAkB,EAAE;AAE1B,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,SACnB,OAAM,IAAI,MACR,yDACD;AAGH,UAAM,KAAK,MAAM;;AAGnB,UAAO,MAAM,KAAK,GAAG;;AAIvB,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EACvC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,SAAgB,EAAE;AAExB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,OAAM,IAAI,MACR,yDACD;AAGH,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAI9C,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;CAGT,MAAM,aAAa,oBAAoB,KAAK;AAG5C,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,UAAU,OAAO,OACzB,OAAM,IAAI,MACR,uDAAuD,OAAO,OAAO,cAAc,EAAE,aAAa,MAAM,QACzG;;AAIL,QAAO"}
1
+ {"version":3,"file":"chunkJSON.mjs","names":[],"sources":["../../../src/utils/chunkJSON.ts"],"sourcesContent":["/**\n * Split & reassemble JSON by character budget.\n * - Measures serialized size using JSON.stringify(..).length (characters).\n * - Ensures each chunk is itself valid JSON.\n * - Very large strings are split into safe pieces using getChunk and re-concatenated on assemble.\n * - Protects against circular structures (JSON can't serialize those anyway).\n */\n\nimport { getChunk } from './getChunk';\n\ntype JSONPrimitive = string | number | boolean | null;\ntype JSONValue = JSONPrimitive | JSONObject | JSONArray;\n\nexport type JSONObject = {\n [k: string]: JSONValue;\n};\n\ntype JSONArray = JSONValue[];\n\ntype Path = Array<string | number>;\n\ntype SetPatch = { op: 'set'; path: Path; value: JSONValue };\ntype StrAppendPatch = {\n op: 'str-append';\n path: Path;\n value: string; // part of a longer string\n index: number;\n total: number;\n};\ntype Patch = SetPatch | StrAppendPatch;\n\ntype RootType = 'object' | 'array';\n\nexport type JsonChunk = {\n schemaVersion: 1;\n index: number;\n total: number;\n rootType: RootType;\n checksum: string; // hash of the full original JSON string\n entries: Patch[];\n};\n\nconst isObject = (val: unknown): val is Record<string, unknown> => {\n return typeof val === 'object' && val !== null && !Array.isArray(val);\n};\n\nconst computeDjb2 = (str: string): string => {\n // Simple 32-bit hash; deterministic & fast\n let hash = 5381;\n\n for (let i = 0; i < str.length; i++) {\n hash = ((hash << 5) + hash) ^ str.charCodeAt(i);\n }\n\n // convert to unsigned hex\n return (hash >>> 0).toString(16).padStart(8, '0');\n};\n\nconst setAtPath = (root: any, path: Path, value: JSONValue) => {\n let current = root;\n\n for (let i = 0; i < path.length - 1; i++) {\n const key = path[i];\n const nextKey = path[i + 1];\n const isNextIndex = typeof nextKey === 'number';\n\n if (typeof key === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at path segment ${i}`);\n }\n\n if (current[key] === undefined) {\n current[key] = isNextIndex ? [] : {};\n }\n\n current = current[key];\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at path segment ${i}`);\n }\n\n if (!(key in current)) {\n (current as any)[key] = isNextIndex ? [] : {};\n }\n\n current = (current as any)[key];\n }\n }\n\n const last = path[path.length - 1];\n\n if (typeof last === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at final segment`);\n }\n\n current[last] = value as any;\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at final segment`);\n }\n\n (current as any)[last] = value as any;\n }\n};\n\nconst pathKey = (path: Path): string => {\n // stable key for grouping string parts\n return JSON.stringify(path);\n};\n\n/**\n * Split a string into parts using getChunk with a charLength budget per part.\n */\nconst splitStringByBudget = (\n str: string,\n maxCharsPerPart: number\n): string[] => {\n if (maxCharsPerPart <= 0) {\n throw new Error('maxChars must be > 0');\n }\n\n const output: string[] = [];\n let offset = 0;\n\n while (offset < str.length) {\n const part = getChunk(str, {\n charStart: offset,\n charLength: maxCharsPerPart,\n });\n\n if (!part) break;\n\n output.push(part);\n offset += part.length;\n }\n\n return output;\n};\n\n/**\n * Flatten JSON into patches (leaf writes). Strings too large to fit in a single\n * chunk are yielded as multiple str-append patches.\n */\nconst flattenToPatches = (\n value: JSONValue,\n maxCharsPerChunk: number,\n path: Path = [],\n seen = new WeakSet<object>()\n): Patch[] => {\n // Conservative per-entry cap so a single entry fits into an empty chunk with envelope overhead.\n // (Envelope ~ a few hundred chars; we keep a comfortable margin.)\n const maxStringPiece = Math.max(\n 1,\n Math.floor((maxCharsPerChunk - 400) * 0.8)\n );\n\n const patches: Patch[] = [];\n\n const walk = (currentValue: JSONValue, currentPath: Path) => {\n if (typeof currentValue === 'string') {\n // If the serialized patch wouldn't fit, split the string into multiple parts\n // and encode as a split-node sentinel with numbered keys.\n const testPatch: SetPatch = {\n op: 'set',\n path: currentPath,\n value: currentValue,\n };\n const testLen = JSON.stringify(testPatch).length + 150; // margin\n\n if (testLen <= maxCharsPerChunk) {\n patches.push(testPatch);\n return;\n }\n\n // Use getChunk-based splitting to produce stable parts\n const parts = splitStringByBudget(currentValue, maxStringPiece);\n\n // Emit split-node metadata and parts as individual leaf writes\n patches.push({\n op: 'set',\n path: [...currentPath, '__splittedType'],\n value: 'string',\n });\n patches.push({\n op: 'set',\n path: [...currentPath, '__total'],\n value: parts.length,\n });\n\n for (let i = 0; i < parts.length; i++) {\n patches.push({\n op: 'set',\n path: [...currentPath, String(i + 1)],\n value: parts[i],\n });\n }\n\n return;\n }\n\n if (currentValue === null || typeof currentValue !== 'object') {\n patches.push({ op: 'set', path: currentPath, value: currentValue });\n return;\n }\n\n if (seen.has(currentValue as object)) {\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n seen.add(currentValue as object);\n\n if (Array.isArray(currentValue)) {\n for (let i = 0; i < currentValue.length; i++) {\n walk(currentValue[i] as JSONValue, [...currentPath, i]);\n }\n } else {\n for (const key of Object.keys(currentValue)) {\n walk((currentValue as JSONObject)[key], [...currentPath, key]);\n }\n }\n\n seen.delete(currentValue as object);\n };\n\n walk(value, path);\n return patches;\n};\n\n/**\n * Split JSON into chunks constrained by character count of serialized chunk.\n */\nexport const chunkJSON = (\n value: JSONObject | JSONArray,\n maxChars: number\n): JsonChunk[] => {\n if (!isObject(value) && !Array.isArray(value)) {\n throw new Error('Root must be an object or array.');\n }\n\n if (maxChars < 500) {\n // You can lower this if you truly need; recommended to keep some envelope headroom.\n throw new Error('maxChars is too small. Use at least 500 characters.');\n }\n\n const rootType: RootType = Array.isArray(value) ? 'array' : 'object';\n let sourceString: string;\n\n try {\n sourceString = JSON.stringify(value);\n } catch {\n // Provide a deterministic error message for circular refs\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n const checksum = computeDjb2(sourceString);\n const allPatches = flattenToPatches(value as JSONValue, maxChars);\n\n const chunks: JsonChunk[] = [];\n let currentChunk: JsonChunk = {\n schemaVersion: 1,\n index: 0, // provisional\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n };\n\n const emptyEnvelopeSize = JSON.stringify({\n ...currentChunk,\n entries: [],\n }).length;\n\n const tryFlush = () => {\n if (currentChunk.entries.length > 0) {\n chunks.push(currentChunk);\n currentChunk = {\n schemaVersion: 1,\n index: 0,\n total: 0,\n rootType,\n checksum,\n entries: [],\n };\n }\n };\n\n for (const patch of allPatches) {\n // Would adding this patch exceed maxChars?\n const withPatchSize =\n emptyEnvelopeSize +\n JSON.stringify(currentChunk.entries).length + // current entries array\n (currentChunk.entries.length ? 1 : 0) + // possible comma\n JSON.stringify(patch).length;\n\n if (withPatchSize <= maxChars) {\n currentChunk.entries.push(patch);\n } else {\n // Start a new chunk if current has items\n if (currentChunk.entries.length > 0) {\n tryFlush();\n }\n\n // Ensure single patch fits into an empty chunk\n const singleSize = emptyEnvelopeSize + JSON.stringify([patch]).length;\n\n if (singleSize > maxChars) {\n // This should only happen for massive strings, which we pre-split;\n // if it happens for a non-string, we cannot split further.\n throw new Error(\n 'A single entry exceeds maxChars and cannot be split. Reduce entry size or increase maxChars.'\n );\n }\n\n currentChunk.entries.push(patch);\n }\n }\n\n tryFlush();\n\n // Ensure at least one chunk exists (even for empty root)\n if (chunks.length === 0) {\n chunks.push({\n schemaVersion: 1,\n index: 0,\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n });\n }\n\n // Finalize indices & totals\n const totalChunks = chunks.length;\n\n chunks.forEach((chunk, index) => {\n chunk.index = index;\n chunk.total = totalChunks;\n });\n\n return chunks;\n};\n\n/**\n * Reassemble JSON from chunks.\n * - Validates checksums and indices.\n * - Applies 'set' patches and merges string pieces from 'str-append'.\n */\n/**\n * Reconstruct content from a single chunk without validation.\n * Useful for processing individual chunks in a pipeline where you don't have all chunks yet.\n * Note: This will only reconstruct the partial content contained in this chunk.\n */\nexport const reconstructFromSingleChunk = (\n chunk: JsonChunk\n): JSONObject | JSONArray => {\n const root: any = chunk.rootType === 'array' ? [] : {};\n\n // Apply all 'set' patches from this chunk\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n setAtPath(root, entry.path, entry.value);\n }\n }\n\n // Reconcile split-node sentinels for strings/arrays\n // When reconstructing from a single chunk, we may have incomplete split nodes\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const parts: string[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n hasAllParts = false;\n break;\n }\n\n parts.push(piece);\n }\n\n // Only reconstruct if we have all parts, otherwise return the node as-is\n if (hasAllParts) {\n return parts.join('');\n }\n\n return node;\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const output: any[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n hasAllParts = false;\n break;\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n // Only reconstruct if we have all parts\n if (hasAllParts) {\n return output;\n }\n\n return node;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n return reconcileSplitNodes(root);\n};\n\nexport const assembleJSON = (chunks: JsonChunk[]): JSONObject | JSONArray => {\n if (!chunks || chunks.length === 0) {\n throw new Error('No chunks provided.');\n }\n\n // Basic validation & sort\n const sorted = [...chunks].sort((a, b) => a.index - b.index);\n const { checksum, rootType } = sorted[0];\n const schemaVersion = 1;\n\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.schemaVersion !== schemaVersion) {\n console.error('Unsupported schemaVersion.', {\n cause: chunk,\n schemaVersion,\n });\n throw new Error('Unsupported schemaVersion.');\n }\n\n if (chunk.rootType !== rootType) {\n console.error('Chunks rootType mismatch.', {\n cause: chunk,\n rootType,\n });\n throw new Error('Chunks rootType mismatch.');\n }\n\n if (chunk.checksum !== checksum) {\n console.error('Chunks checksum mismatch (different source objects?).', {\n cause: chunk,\n checksum,\n });\n throw new Error('Chunks checksum mismatch (different source objects?).');\n }\n\n if (chunk.index !== i) {\n console.error('Chunk indices are not contiguous or sorted.', {\n cause: chunk,\n index: chunk.index,\n i,\n });\n throw new Error('Chunk indices are not contiguous or sorted.');\n }\n\n // Defer total check until after reconstruction to prefer more specific errors\n }\n\n const root: any = rootType === 'array' ? [] : {};\n\n // Collect string parts by path\n const stringParts = new Map<\n string,\n { path: Path; total: number; received: StrAppendPatch[] }\n >();\n\n const applySet = (patch: SetPatch) =>\n setAtPath(root, patch.path, patch.value);\n\n for (const chunk of sorted) {\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n applySet(entry);\n } else {\n const key = pathKey(entry.path);\n const record = stringParts.get(key) ?? {\n path: entry.path,\n total: entry.total,\n received: [],\n };\n\n if (record.total !== entry.total) {\n throw new Error('Inconsistent string part totals for a path.');\n }\n\n record.received.push(entry);\n stringParts.set(key, record);\n }\n }\n }\n\n // Stitch strings\n for (const { path, total, received } of stringParts.values()) {\n if (received.length !== total) {\n throw new Error('Missing string parts for a path; incomplete chunk set.');\n }\n\n received.sort((a, b) => a.index - b.index);\n const fullString = received.map((part) => part.value).join('');\n setAtPath(root, path, fullString);\n }\n\n // Reconcile split-node sentinels for strings/arrays after all patches applied\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const parts: string[] = [];\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n parts.push(piece);\n }\n\n return parts.join('');\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const output: any[] = [];\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n return output;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n const reconciled = reconcileSplitNodes(root);\n\n // Now validate totals match provided count\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.total !== sorted.length) {\n throw new Error(\n `Chunk total does not match provided count. Expected ${sorted.length}, but chunk ${i} has total=${chunk.total}`\n );\n }\n }\n\n return reconciled;\n};\n\n/* -------------------------------------------\n * Example usage\n * -------------------------------------------\nconst big: JSONObject = {\n title: \"Document\",\n content: \"…a very very long text…\",\n items: Array.from({ length: 2000 }, (_, i) => ({ id: i, label: `Item ${i}` }))\n};\n\n// Split to ~16k-char chunks\nconst chunks = chunkJSON(big, 16_000);\n\n// Send each `chunks[i]` as JSON to your backend.\n// Later, reassemble:\nconst restored = assembleJSON(chunks);\nconsole.log(JSON.stringify(restored) === JSON.stringify(big)); // true\n*/\n"],"mappings":";;;;;;;;;;AA0CA,MAAM,YAAY,QAAiD;AACjE,QAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,CAAC,MAAM,QAAQ,IAAI;;AAGvE,MAAM,eAAe,QAAwB;CAE3C,IAAI,OAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,SAAS,QAAQ,KAAK,OAAQ,IAAI,WAAW,EAAE;AAIjD,SAAQ,SAAS,GAAG,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;AAGnD,MAAM,aAAa,MAAW,MAAY,UAAqB;CAC7D,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;EACxC,MAAM,MAAM,KAAK;EAEjB,MAAM,cAAc,OADJ,KAAK,IAAI,OACc;AAEvC,MAAI,OAAO,QAAQ,UAAU;AAC3B,OAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC,IAAI;AAGxD,OAAI,QAAQ,SAAS,OACnB,SAAQ,OAAO,cAAc,EAAE,GAAG,EAAE;AAGtC,aAAU,QAAQ;SACb;AACL,OAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC,IAAI;AAGzD,OAAI,EAAE,OAAO,SACX,CAAC,QAAgB,OAAO,cAAc,EAAE,GAAG,EAAE;AAG/C,aAAW,QAAgB;;;CAI/B,MAAM,OAAO,KAAK,KAAK,SAAS;AAEhC,KAAI,OAAO,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC;AAGpD,UAAQ,QAAQ;QACX;AACL,MAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC;AAGrD,EAAC,QAAgB,QAAQ;;;AAI7B,MAAM,WAAW,SAAuB;AAEtC,QAAO,KAAK,UAAU,KAAK;;;;;AAM7B,MAAM,uBACJ,KACA,oBACa;AACb,KAAI,mBAAmB,EACrB,OAAM,IAAI,MAAM,uBAAuB;CAGzC,MAAM,SAAmB,EAAE;CAC3B,IAAI,SAAS;AAEb,QAAO,SAAS,IAAI,QAAQ;EAC1B,MAAM,OAAO,SAAS,KAAK;GACzB,WAAW;GACX,YAAY;GACb,CAAC;AAEF,MAAI,CAAC,KAAM;AAEX,SAAO,KAAK,KAAK;AACjB,YAAU,KAAK;;AAGjB,QAAO;;;;;;AAOT,MAAM,oBACJ,OACA,kBACA,OAAa,EAAE,EACf,uBAAO,IAAI,SAAiB,KAChB;CAGZ,MAAM,iBAAiB,KAAK,IAC1B,GACA,KAAK,OAAO,mBAAmB,OAAO,GAAI,CAC3C;CAED,MAAM,UAAmB,EAAE;CAE3B,MAAM,QAAQ,cAAyB,gBAAsB;AAC3D,MAAI,OAAO,iBAAiB,UAAU;GAGpC,MAAM,YAAsB;IAC1B,IAAI;IACJ,MAAM;IACN,OAAO;IACR;AAGD,OAFgB,KAAK,UAAU,UAAU,CAAC,SAAS,OAEpC,kBAAkB;AAC/B,YAAQ,KAAK,UAAU;AACvB;;GAIF,MAAM,QAAQ,oBAAoB,cAAc,eAAe;AAG/D,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,iBAAiB;IACxC,OAAO;IACR,CAAC;AACF,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,UAAU;IACjC,OAAO,MAAM;IACd,CAAC;AAEF,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,SAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,OAAO,IAAI,EAAE,CAAC;IACrC,OAAO,MAAM;IACd,CAAC;AAGJ;;AAGF,MAAI,iBAAiB,QAAQ,OAAO,iBAAiB,UAAU;AAC7D,WAAQ,KAAK;IAAE,IAAI;IAAO,MAAM;IAAa,OAAO;IAAc,CAAC;AACnE;;AAGF,MAAI,KAAK,IAAI,aAAuB,CAClC,OAAM,IAAI,MAAM,gDAAgD;AAGlE,OAAK,IAAI,aAAuB;AAEhC,MAAI,MAAM,QAAQ,aAAa,CAC7B,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACvC,MAAK,aAAa,IAAiB,CAAC,GAAG,aAAa,EAAE,CAAC;MAGzD,MAAK,MAAM,OAAO,OAAO,KAAK,aAAa,CACzC,MAAM,aAA4B,MAAM,CAAC,GAAG,aAAa,IAAI,CAAC;AAIlE,OAAK,OAAO,aAAuB;;AAGrC,MAAK,OAAO,KAAK;AACjB,QAAO;;;;;AAMT,MAAa,aACX,OACA,aACgB;AAChB,KAAI,CAAC,SAAS,MAAM,IAAI,CAAC,MAAM,QAAQ,MAAM,CAC3C,OAAM,IAAI,MAAM,mCAAmC;AAGrD,KAAI,WAAW,IAEb,OAAM,IAAI,MAAM,sDAAsD;CAGxE,MAAM,WAAqB,MAAM,QAAQ,MAAM,GAAG,UAAU;CAC5D,IAAI;AAEJ,KAAI;AACF,iBAAe,KAAK,UAAU,MAAM;SAC9B;AAEN,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,MAAM,WAAW,YAAY,aAAa;CAC1C,MAAM,aAAa,iBAAiB,OAAoB,SAAS;CAEjE,MAAM,SAAsB,EAAE;CAC9B,IAAI,eAA0B;EAC5B,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ;CAED,MAAM,oBAAoB,KAAK,UAAU;EACvC,GAAG;EACH,SAAS,EAAE;EACZ,CAAC,CAAC;CAEH,MAAM,iBAAiB;AACrB,MAAI,aAAa,QAAQ,SAAS,GAAG;AACnC,UAAO,KAAK,aAAa;AACzB,kBAAe;IACb,eAAe;IACf,OAAO;IACP,OAAO;IACP;IACA;IACA,SAAS,EAAE;IACZ;;;AAIL,MAAK,MAAM,SAAS,WAQlB,KALE,oBACA,KAAK,UAAU,aAAa,QAAQ,CAAC,UACpC,aAAa,QAAQ,SAAS,IAAI,KACnC,KAAK,UAAU,MAAM,CAAC,UAEH,SACnB,cAAa,QAAQ,KAAK,MAAM;MAC3B;AAEL,MAAI,aAAa,QAAQ,SAAS,EAChC,WAAU;AAMZ,MAFmB,oBAAoB,KAAK,UAAU,CAAC,MAAM,CAAC,CAAC,SAE9C,SAGf,OAAM,IAAI,MACR,+FACD;AAGH,eAAa,QAAQ,KAAK,MAAM;;AAIpC,WAAU;AAGV,KAAI,OAAO,WAAW,EACpB,QAAO,KAAK;EACV,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ,CAAC;CAIJ,MAAM,cAAc,OAAO;AAE3B,QAAO,SAAS,OAAO,UAAU;AAC/B,QAAM,QAAQ;AACd,QAAM,QAAQ;GACd;AAEF,QAAO;;;;;;;;;;;;AAaT,MAAa,8BACX,UAC2B;CAC3B,MAAM,OAAY,MAAM,aAAa,UAAU,EAAE,GAAG,EAAE;AAGtD,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,WAAU,MAAM,MAAM,MAAM,MAAM,MAAM;CAM5C,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EAExC,QAAO;GAGT,MAAM,QAAkB,EAAE;GAC1B,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,UAAU;AAC7B,mBAAc;AACd;;AAGF,UAAM,KAAK,MAAM;;AAInB,OAAI,YACF,QAAO,MAAM,KAAK,GAAG;AAGvB,UAAO;;AAIT,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EAEvC,QAAO;GAGT,MAAM,SAAgB,EAAE;GACxB,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,EAAE;AACzB,mBAAc;AACd;;AAGF,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAK9C,OAAI,YACF,QAAO;AAGT,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;AAGT,QAAO,oBAAoB,KAAK;;AAGlC,MAAa,gBAAgB,WAAgD;AAC3E,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,OAAM,IAAI,MAAM,sBAAsB;CAIxC,MAAM,SAAS,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;CAC5D,MAAM,EAAE,UAAU,aAAa,OAAO;CACtC,MAAM,gBAAgB;AAEtB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,kBAAkB,eAAe;AACzC,WAAQ,MAAM,8BAA8B;IAC1C,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,6BAA6B;;AAG/C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,6BAA6B;IACzC,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,4BAA4B;;AAG9C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,yDAAyD;IACrE,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,wDAAwD;;AAG1E,MAAI,MAAM,UAAU,GAAG;AACrB,WAAQ,MAAM,+CAA+C;IAC3D,OAAO;IACP,OAAO,MAAM;IACb;IACD,CAAC;AACF,SAAM,IAAI,MAAM,8CAA8C;;;CAMlE,MAAM,OAAY,aAAa,UAAU,EAAE,GAAG,EAAE;CAGhD,MAAM,8BAAc,IAAI,KAGrB;CAEH,MAAM,YAAY,UAChB,UAAU,MAAM,MAAM,MAAM,MAAM,MAAM;AAE1C,MAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,UAAS,MAAM;MACV;EACL,MAAM,MAAM,QAAQ,MAAM,KAAK;EAC/B,MAAM,SAAS,YAAY,IAAI,IAAI,IAAI;GACrC,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,UAAU,EAAE;GACb;AAED,MAAI,OAAO,UAAU,MAAM,MACzB,OAAM,IAAI,MAAM,8CAA8C;AAGhE,SAAO,SAAS,KAAK,MAAM;AAC3B,cAAY,IAAI,KAAK,OAAO;;AAMlC,MAAK,MAAM,EAAE,MAAM,OAAO,cAAc,YAAY,QAAQ,EAAE;AAC5D,MAAI,SAAS,WAAW,MACtB,OAAM,IAAI,MAAM,yDAAyD;AAG3E,WAAS,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAE1C,YAAU,MAAM,MADG,SAAS,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,GAAG,CAC7B;;CAInC,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EACxC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAM,QAAkB,EAAE;AAE1B,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,SACnB,OAAM,IAAI,MACR,yDACD;AAGH,UAAM,KAAK,MAAM;;AAGnB,UAAO,MAAM,KAAK,GAAG;;AAIvB,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EACvC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAM,SAAgB,EAAE;AAExB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,OAAM,IAAI,MACR,yDACD;AAGH,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAI9C,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;CAGT,MAAM,aAAa,oBAAoB,KAAK;AAG5C,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,UAAU,OAAO,OACzB,OAAM,IAAI,MACR,uDAAuD,OAAO,OAAO,cAAc,EAAE,aAAa,MAAM,QACzG;;AAIL,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"pLimit.mjs","names":["#head","#tail","#size","idleWaiters: Array<() => void>"],"sources":["../../../src/utils/pLimit.ts"],"sourcesContent":["/*\nHow it works:\n`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.\n*/\n\nclass Node<T> {\n value: T;\n next: Node<T> | undefined;\n\n constructor(value: T) {\n this.value = value;\n }\n}\n\nexport class Queue<T> {\n #head: Node<T> | undefined;\n #tail: Node<T> | undefined;\n #size!: number;\n\n constructor() {\n this.clear();\n }\n\n enqueue(value: T): void {\n const node = new Node(value);\n\n if (this.#head) {\n this.#tail!.next = node;\n this.#tail = node;\n } else {\n this.#head = node;\n this.#tail = node;\n }\n\n this.#size++;\n }\n\n dequeue(): T | undefined {\n const current = this.#head;\n if (!current) {\n return;\n }\n\n this.#head = current.next;\n this.#size--;\n return current.value;\n }\n\n peek(): T | undefined {\n if (!this.#head) {\n return;\n }\n\n return this.#head.value;\n\n // TODO: Node.js 18.\n // return this.#head?.value;\n }\n\n clear(): void {\n this.#head = undefined;\n this.#tail = undefined;\n this.#size = 0;\n }\n\n get size(): number {\n return this.#size;\n }\n\n *[Symbol.iterator](): Generator<T, void, unknown> {\n let current = this.#head;\n\n while (current) {\n yield current.value;\n current = current.next;\n }\n }\n\n *drain(): Generator<T | undefined, void, unknown> {\n while (this.#head) {\n yield this.dequeue();\n }\n }\n}\n\nexport const pLimit = (concurrency: number) => {\n validateConcurrency(concurrency);\n\n const queue = new Queue<() => void>();\n let activeCount = 0;\n\n const idleWaiters: Array<() => void> = [];\n const notifyIdleIfNeeded = (): void => {\n if (activeCount === 0 && queue.size === 0) {\n while (idleWaiters.length) idleWaiters.pop()?.();\n }\n };\n\n const resumeNext = (): void => {\n // Process the next queued function if we're under the concurrency limit\n if (activeCount < concurrency && queue.size > 0) {\n activeCount++;\n queue.dequeue()?.();\n }\n };\n\n const next = (): void => {\n activeCount--;\n resumeNext();\n };\n\n const run = async (\n fn: (...args: any[]) => Promise<any>,\n resolve: (value: Promise<any>) => void,\n arguments_: any[]\n ): Promise<void> => {\n // Execute the function and capture the result promise\n const result = (async () => fn(...arguments_))();\n\n // Resolve immediately with the promise (don't wait for completion)\n resolve(result);\n\n // Wait for the function to complete (success or failure)\n // We catch errors here to prevent unhandled rejections,\n // but the original promise rejection is preserved for the caller\n try {\n await result;\n } catch {}\n\n // Decrement active count and process next queued function\n next();\n };\n\n const enqueue = (\n fn: (...args: any[]) => Promise<any>,\n resolve: (value: Promise<any>) => void,\n arguments_: any[]\n ): void => {\n // Queue the internal resolve function instead of the run function\n // to preserve the asynchronous execution context.\n new Promise<void>((internalResolve) => {\n queue.enqueue(internalResolve);\n }).then(run.bind(undefined, fn, resolve, arguments_));\n\n // Start processing immediately if we haven't reached the concurrency limit\n if (activeCount < concurrency) {\n resumeNext();\n }\n };\n\n const generator = (\n fn: (...args: any[]) => Promise<any>,\n ...arguments_: any[]\n ): Promise<any> =>\n new Promise<any>((resolve) => {\n enqueue(fn, resolve, arguments_);\n });\n\n Object.defineProperties(generator, {\n activeCount: {\n get: () => activeCount,\n },\n pendingCount: {\n get: () => queue.size,\n },\n clearQueue: {\n value() {\n queue.clear();\n notifyIdleIfNeeded();\n },\n },\n concurrency: {\n get: () => concurrency,\n\n set(newConcurrency: number) {\n validateConcurrency(newConcurrency);\n concurrency = newConcurrency;\n\n queueMicrotask(() => {\n while (activeCount < concurrency && queue.size > 0) {\n resumeNext();\n }\n });\n },\n },\n map: {\n async value<T, R>(\n array: T[],\n fn: (value: T, index: number) => Promise<R>\n ): Promise<R[]> {\n const promises = array.map((value, index) => this(fn, value, index));\n return Promise.all(promises);\n },\n },\n onIdle: {\n /**\n * Resolves when `activeCount === 0` and the queue is empty.\n * Use this to wait for completion without holding a list of Promises.\n */\n value(): Promise<void> {\n if (activeCount === 0 && queue.size === 0) return Promise.resolve();\n return new Promise<void>((resolve) => idleWaiters.push(resolve));\n },\n },\n });\n\n return generator;\n};\n\nconst validateConcurrency = (concurrency: number): void => {\n if (\n !(\n (Number.isInteger(concurrency) ||\n concurrency === Number.POSITIVE_INFINITY) &&\n concurrency > 0\n )\n ) {\n throw new TypeError('Expected `concurrency` to be a number from 1 and up');\n }\n};\n"],"mappings":";AAKA,IAAM,OAAN,MAAc;CACZ;CACA;CAEA,YAAY,OAAU;AACpB,OAAK,QAAQ;;;AAIjB,IAAa,QAAb,MAAsB;CACpB;CACA;CACA;CAEA,cAAc;AACZ,OAAK,OAAO;;CAGd,QAAQ,OAAgB;EACtB,MAAM,OAAO,IAAI,KAAK,MAAM;AAE5B,MAAI,MAAKA,MAAO;AACd,SAAKC,KAAO,OAAO;AACnB,SAAKA,OAAQ;SACR;AACL,SAAKD,OAAQ;AACb,SAAKC,OAAQ;;AAGf,QAAKC;;CAGP,UAAyB;EACvB,MAAM,UAAU,MAAKF;AACrB,MAAI,CAAC,QACH;AAGF,QAAKA,OAAQ,QAAQ;AACrB,QAAKE;AACL,SAAO,QAAQ;;CAGjB,OAAsB;AACpB,MAAI,CAAC,MAAKF,KACR;AAGF,SAAO,MAAKA,KAAM;;CAMpB,QAAc;AACZ,QAAKA,OAAQ;AACb,QAAKC,OAAQ;AACb,QAAKC,OAAQ;;CAGf,IAAI,OAAe;AACjB,SAAO,MAAKA;;CAGd,EAAE,OAAO,YAAyC;EAChD,IAAI,UAAU,MAAKF;AAEnB,SAAO,SAAS;AACd,SAAM,QAAQ;AACd,aAAU,QAAQ;;;CAItB,CAAC,QAAiD;AAChD,SAAO,MAAKA,KACV,OAAM,KAAK,SAAS;;;AAK1B,MAAa,UAAU,gBAAwB;AAC7C,qBAAoB,YAAY;CAEhC,MAAM,QAAQ,IAAI,OAAmB;CACrC,IAAI,cAAc;CAElB,MAAMG,cAAiC,EAAE;CACzC,MAAM,2BAAiC;AACrC,MAAI,gBAAgB,KAAK,MAAM,SAAS,EACtC,QAAO,YAAY,OAAQ,aAAY,KAAK,IAAI;;CAIpD,MAAM,mBAAyB;AAE7B,MAAI,cAAc,eAAe,MAAM,OAAO,GAAG;AAC/C;AACA,SAAM,SAAS,IAAI;;;CAIvB,MAAM,aAAmB;AACvB;AACA,cAAY;;CAGd,MAAM,MAAM,OACV,IACA,SACA,eACkB;EAElB,MAAM,UAAU,YAAY,GAAG,GAAG,WAAW,GAAG;AAGhD,UAAQ,OAAO;AAKf,MAAI;AACF,SAAM;UACA;AAGR,QAAM;;CAGR,MAAM,WACJ,IACA,SACA,eACS;AAGT,MAAI,SAAe,oBAAoB;AACrC,SAAM,QAAQ,gBAAgB;IAC9B,CAAC,KAAK,IAAI,KAAK,QAAW,IAAI,SAAS,WAAW,CAAC;AAGrD,MAAI,cAAc,YAChB,aAAY;;CAIhB,MAAM,aACJ,IACA,GAAG,eAEH,IAAI,SAAc,YAAY;AAC5B,UAAQ,IAAI,SAAS,WAAW;GAChC;AAEJ,QAAO,iBAAiB,WAAW;EACjC,aAAa,EACX,WAAW,aACZ;EACD,cAAc,EACZ,WAAW,MAAM,MAClB;EACD,YAAY,EACV,QAAQ;AACN,SAAM,OAAO;AACb,uBAAoB;KAEvB;EACD,aAAa;GACX,WAAW;GAEX,IAAI,gBAAwB;AAC1B,wBAAoB,eAAe;AACnC,kBAAc;AAEd,yBAAqB;AACnB,YAAO,cAAc,eAAe,MAAM,OAAO,EAC/C,aAAY;MAEd;;GAEL;EACD,KAAK,EACH,MAAM,MACJ,OACA,IACc;GACd,MAAM,WAAW,MAAM,KAAK,OAAO,UAAU,KAAK,IAAI,OAAO,MAAM,CAAC;AACpE,UAAO,QAAQ,IAAI,SAAS;KAE/B;EACD,QAAQ,EAKN,QAAuB;AACrB,OAAI,gBAAgB,KAAK,MAAM,SAAS,EAAG,QAAO,QAAQ,SAAS;AACnE,UAAO,IAAI,SAAe,YAAY,YAAY,KAAK,QAAQ,CAAC;KAEnE;EACF,CAAC;AAEF,QAAO;;AAGT,MAAM,uBAAuB,gBAA8B;AACzD,KACE,GACG,OAAO,UAAU,YAAY,IAC5B,gBAAgB,OAAO,sBACzB,cAAc,GAGhB,OAAM,IAAI,UAAU,sDAAsD"}
1
+ {"version":3,"file":"pLimit.mjs","names":["#head","#tail","#size"],"sources":["../../../src/utils/pLimit.ts"],"sourcesContent":["/*\nHow it works:\n`this.#head` is an instance of `Node` which keeps track of its current value and nests another instance of `Node` that keeps the value that comes after it. When a value is provided to `.enqueue()`, the code needs to iterate through `this.#head`, going deeper and deeper to find the last value. However, iterating through every single item is slow. This problem is solved by saving a reference to the last value as `this.#tail` so that it can reference it to add a new value.\n*/\n\nclass Node<T> {\n value: T;\n next: Node<T> | undefined;\n\n constructor(value: T) {\n this.value = value;\n }\n}\n\nexport class Queue<T> {\n #head: Node<T> | undefined;\n #tail: Node<T> | undefined;\n #size!: number;\n\n constructor() {\n this.clear();\n }\n\n enqueue(value: T): void {\n const node = new Node(value);\n\n if (this.#head) {\n this.#tail!.next = node;\n this.#tail = node;\n } else {\n this.#head = node;\n this.#tail = node;\n }\n\n this.#size++;\n }\n\n dequeue(): T | undefined {\n const current = this.#head;\n if (!current) {\n return;\n }\n\n this.#head = current.next;\n this.#size--;\n return current.value;\n }\n\n peek(): T | undefined {\n if (!this.#head) {\n return;\n }\n\n return this.#head.value;\n\n // TODO: Node.js 18.\n // return this.#head?.value;\n }\n\n clear(): void {\n this.#head = undefined;\n this.#tail = undefined;\n this.#size = 0;\n }\n\n get size(): number {\n return this.#size;\n }\n\n *[Symbol.iterator](): Generator<T, void, unknown> {\n let current = this.#head;\n\n while (current) {\n yield current.value;\n current = current.next;\n }\n }\n\n *drain(): Generator<T | undefined, void, unknown> {\n while (this.#head) {\n yield this.dequeue();\n }\n }\n}\n\nexport const pLimit = (concurrency: number) => {\n validateConcurrency(concurrency);\n\n const queue = new Queue<() => void>();\n let activeCount = 0;\n\n const idleWaiters: Array<() => void> = [];\n const notifyIdleIfNeeded = (): void => {\n if (activeCount === 0 && queue.size === 0) {\n while (idleWaiters.length) idleWaiters.pop()?.();\n }\n };\n\n const resumeNext = (): void => {\n // Process the next queued function if we're under the concurrency limit\n if (activeCount < concurrency && queue.size > 0) {\n activeCount++;\n queue.dequeue()?.();\n }\n };\n\n const next = (): void => {\n activeCount--;\n resumeNext();\n };\n\n const run = async (\n fn: (...args: any[]) => Promise<any>,\n resolve: (value: Promise<any>) => void,\n arguments_: any[]\n ): Promise<void> => {\n // Execute the function and capture the result promise\n const result = (async () => fn(...arguments_))();\n\n // Resolve immediately with the promise (don't wait for completion)\n resolve(result);\n\n // Wait for the function to complete (success or failure)\n // We catch errors here to prevent unhandled rejections,\n // but the original promise rejection is preserved for the caller\n try {\n await result;\n } catch {}\n\n // Decrement active count and process next queued function\n next();\n };\n\n const enqueue = (\n fn: (...args: any[]) => Promise<any>,\n resolve: (value: Promise<any>) => void,\n arguments_: any[]\n ): void => {\n // Queue the internal resolve function instead of the run function\n // to preserve the asynchronous execution context.\n new Promise<void>((internalResolve) => {\n queue.enqueue(internalResolve);\n }).then(run.bind(undefined, fn, resolve, arguments_));\n\n // Start processing immediately if we haven't reached the concurrency limit\n if (activeCount < concurrency) {\n resumeNext();\n }\n };\n\n const generator = (\n fn: (...args: any[]) => Promise<any>,\n ...arguments_: any[]\n ): Promise<any> =>\n new Promise<any>((resolve) => {\n enqueue(fn, resolve, arguments_);\n });\n\n Object.defineProperties(generator, {\n activeCount: {\n get: () => activeCount,\n },\n pendingCount: {\n get: () => queue.size,\n },\n clearQueue: {\n value() {\n queue.clear();\n notifyIdleIfNeeded();\n },\n },\n concurrency: {\n get: () => concurrency,\n\n set(newConcurrency: number) {\n validateConcurrency(newConcurrency);\n concurrency = newConcurrency;\n\n queueMicrotask(() => {\n while (activeCount < concurrency && queue.size > 0) {\n resumeNext();\n }\n });\n },\n },\n map: {\n async value<T, R>(\n array: T[],\n fn: (value: T, index: number) => Promise<R>\n ): Promise<R[]> {\n const promises = array.map((value, index) => this(fn, value, index));\n return Promise.all(promises);\n },\n },\n onIdle: {\n /**\n * Resolves when `activeCount === 0` and the queue is empty.\n * Use this to wait for completion without holding a list of Promises.\n */\n value(): Promise<void> {\n if (activeCount === 0 && queue.size === 0) return Promise.resolve();\n return new Promise<void>((resolve) => idleWaiters.push(resolve));\n },\n },\n });\n\n return generator;\n};\n\nconst validateConcurrency = (concurrency: number): void => {\n if (\n !(\n (Number.isInteger(concurrency) ||\n concurrency === Number.POSITIVE_INFINITY) &&\n concurrency > 0\n )\n ) {\n throw new TypeError('Expected `concurrency` to be a number from 1 and up');\n }\n};\n"],"mappings":";AAKA,IAAM,OAAN,MAAc;CACZ;CACA;CAEA,YAAY,OAAU;AACpB,OAAK,QAAQ;;;AAIjB,IAAa,QAAb,MAAsB;CACpB;CACA;CACA;CAEA,cAAc;AACZ,OAAK,OAAO;;CAGd,QAAQ,OAAgB;EACtB,MAAM,OAAO,IAAI,KAAK,MAAM;AAE5B,MAAI,MAAKA,MAAO;AACd,SAAKC,KAAO,OAAO;AACnB,SAAKA,OAAQ;SACR;AACL,SAAKD,OAAQ;AACb,SAAKC,OAAQ;;AAGf,QAAKC;;CAGP,UAAyB;EACvB,MAAM,UAAU,MAAKF;AACrB,MAAI,CAAC,QACH;AAGF,QAAKA,OAAQ,QAAQ;AACrB,QAAKE;AACL,SAAO,QAAQ;;CAGjB,OAAsB;AACpB,MAAI,CAAC,MAAKF,KACR;AAGF,SAAO,MAAKA,KAAM;;CAMpB,QAAc;AACZ,QAAKA,OAAQ;AACb,QAAKC,OAAQ;AACb,QAAKC,OAAQ;;CAGf,IAAI,OAAe;AACjB,SAAO,MAAKA;;CAGd,EAAE,OAAO,YAAyC;EAChD,IAAI,UAAU,MAAKF;AAEnB,SAAO,SAAS;AACd,SAAM,QAAQ;AACd,aAAU,QAAQ;;;CAItB,CAAC,QAAiD;AAChD,SAAO,MAAKA,KACV,OAAM,KAAK,SAAS;;;AAK1B,MAAa,UAAU,gBAAwB;AAC7C,qBAAoB,YAAY;CAEhC,MAAM,QAAQ,IAAI,OAAmB;CACrC,IAAI,cAAc;CAElB,MAAM,cAAiC,EAAE;CACzC,MAAM,2BAAiC;AACrC,MAAI,gBAAgB,KAAK,MAAM,SAAS,EACtC,QAAO,YAAY,OAAQ,aAAY,KAAK,IAAI;;CAIpD,MAAM,mBAAyB;AAE7B,MAAI,cAAc,eAAe,MAAM,OAAO,GAAG;AAC/C;AACA,SAAM,SAAS,IAAI;;;CAIvB,MAAM,aAAmB;AACvB;AACA,cAAY;;CAGd,MAAM,MAAM,OACV,IACA,SACA,eACkB;EAElB,MAAM,UAAU,YAAY,GAAG,GAAG,WAAW,GAAG;AAGhD,UAAQ,OAAO;AAKf,MAAI;AACF,SAAM;UACA;AAGR,QAAM;;CAGR,MAAM,WACJ,IACA,SACA,eACS;AAGT,MAAI,SAAe,oBAAoB;AACrC,SAAM,QAAQ,gBAAgB;IAC9B,CAAC,KAAK,IAAI,KAAK,QAAW,IAAI,SAAS,WAAW,CAAC;AAGrD,MAAI,cAAc,YAChB,aAAY;;CAIhB,MAAM,aACJ,IACA,GAAG,eAEH,IAAI,SAAc,YAAY;AAC5B,UAAQ,IAAI,SAAS,WAAW;GAChC;AAEJ,QAAO,iBAAiB,WAAW;EACjC,aAAa,EACX,WAAW,aACZ;EACD,cAAc,EACZ,WAAW,MAAM,MAClB;EACD,YAAY,EACV,QAAQ;AACN,SAAM,OAAO;AACb,uBAAoB;KAEvB;EACD,aAAa;GACX,WAAW;GAEX,IAAI,gBAAwB;AAC1B,wBAAoB,eAAe;AACnC,kBAAc;AAEd,yBAAqB;AACnB,YAAO,cAAc,eAAe,MAAM,OAAO,EAC/C,aAAY;MAEd;;GAEL;EACD,KAAK,EACH,MAAM,MACJ,OACA,IACc;GACd,MAAM,WAAW,MAAM,KAAK,OAAO,UAAU,KAAK,IAAI,OAAO,MAAM,CAAC;AACpE,UAAO,QAAQ,IAAI,SAAS;KAE/B;EACD,QAAQ,EAKN,QAAuB;AACrB,OAAI,gBAAgB,KAAK,MAAM,SAAS,EAAG,QAAO,QAAQ,SAAS;AACnE,UAAO,IAAI,SAAe,YAAY,YAAY,KAAK,QAAQ,CAAC;KAEnE;EACF,CAAC;AAEF,QAAO;;AAGT,MAAM,uBAAuB,gBAA8B;AACzD,KACE,GACG,OAAO,UAAU,YAAY,IAC5B,gBAAgB,OAAO,sBACzB,cAAc,GAGhB,OAAM,IAAI,UAAU,sDAAsD"}
@@ -1 +1 @@
1
- {"version":3,"file":"parallelizeGlobal.mjs","names":["_globalLimiter: ReturnType<typeof pLimit> | null","_taskLimiter: ReturnType<typeof pLimit> | null"],"sources":["../../../src/utils/parallelizeGlobal.ts"],"sourcesContent":["// === keep your Queue, Node, pLimit, validateConcurrency exactly as-is ===\n\nimport { pLimit } from './pLimit';\n\n// --- NEW: single, shared limiter for the whole process ---\nlet _globalLimiter: ReturnType<typeof pLimit> | null = null;\n\n/** Get (and optionally configure) the single, shared limiter. */\nexport const getGlobalLimiter = (concurrency?: number) => {\n if (!_globalLimiter) {\n _globalLimiter = pLimit(concurrency ?? 10);\n } else if (typeof concurrency === 'number') {\n (_globalLimiter as any).concurrency = concurrency;\n }\n return _globalLimiter!;\n};\n\n// --- NEW: task limiter for gating task starts ---\nlet _taskLimiter: ReturnType<typeof pLimit> | null = null;\n\n/** Limits how many *tasks* run concurrently (independent from AI calls). */\nexport const getTaskLimiter = (concurrency?: number) => {\n if (!_taskLimiter) {\n _taskLimiter = pLimit(concurrency ?? 5);\n } else if (typeof concurrency === 'number') {\n (_taskLimiter as any).concurrency = concurrency;\n }\n return _taskLimiter!;\n};\n\n// --- REPLACE your existing `parallelize` with this version ---\nexport const parallelizeGlobal = async <T, R>(\n items: T[],\n callback: (item: T, index: number) => Promise<R> = async (item) =>\n item as unknown as Promise<R>,\n options?: {\n /** Share a single limiter across the app. If omitted, uses global limiter. */\n limiter?: ReturnType<typeof pLimit>;\n /** If provided and no limiter is passed, configure the global limiter. */\n concurrency?: number;\n /**\n * Whether to wrap *each* callback run in the limiter.\n * - For orchestration (outer loops): set `false` (don't burn limiter slots).\n * - For atomic work (AI calls, writes): leave `true` (default).\n */\n wrapInLimiter?: boolean;\n }\n): Promise<R[]> => {\n const limiter = options?.limiter ?? getGlobalLimiter(options?.concurrency);\n const wrap = options?.wrapInLimiter ?? true;\n\n const run = wrap\n ? <U>(fn: () => Promise<U>) => limiter(fn)\n : <U>(fn: () => Promise<U>) => fn();\n\n const promises = items.map((item, index) => run(() => callback(item, index)));\n return Promise.all(promises);\n};\n"],"mappings":";;;AAKA,IAAIA,iBAAmD;;AAGvD,MAAa,oBAAoB,gBAAyB;AACxD,KAAI,CAAC,eACH,kBAAiB,OAAO,eAAe,GAAG;UACjC,OAAO,gBAAgB,SAChC,CAAC,eAAuB,cAAc;AAExC,QAAO;;AAIT,IAAIC,eAAiD;;AAGrD,MAAa,kBAAkB,gBAAyB;AACtD,KAAI,CAAC,aACH,gBAAe,OAAO,eAAe,EAAE;UAC9B,OAAO,gBAAgB,SAChC,CAAC,aAAqB,cAAc;AAEtC,QAAO;;AAIT,MAAa,oBAAoB,OAC/B,OACA,WAAmD,OAAO,SACxD,MACF,YAYiB;CACjB,MAAM,UAAU,SAAS,WAAW,iBAAiB,SAAS,YAAY;CAG1E,MAAM,MAFO,SAAS,iBAAiB,QAG/B,OAAyB,QAAQ,GAAG,IACpC,OAAyB,IAAI;CAErC,MAAM,WAAW,MAAM,KAAK,MAAM,UAAU,UAAU,SAAS,MAAM,MAAM,CAAC,CAAC;AAC7E,QAAO,QAAQ,IAAI,SAAS"}
1
+ {"version":3,"file":"parallelizeGlobal.mjs","names":[],"sources":["../../../src/utils/parallelizeGlobal.ts"],"sourcesContent":["// === keep your Queue, Node, pLimit, validateConcurrency exactly as-is ===\n\nimport { pLimit } from './pLimit';\n\n// --- NEW: single, shared limiter for the whole process ---\nlet _globalLimiter: ReturnType<typeof pLimit> | null = null;\n\n/** Get (and optionally configure) the single, shared limiter. */\nexport const getGlobalLimiter = (concurrency?: number) => {\n if (!_globalLimiter) {\n _globalLimiter = pLimit(concurrency ?? 10);\n } else if (typeof concurrency === 'number') {\n (_globalLimiter as any).concurrency = concurrency;\n }\n return _globalLimiter!;\n};\n\n// --- NEW: task limiter for gating task starts ---\nlet _taskLimiter: ReturnType<typeof pLimit> | null = null;\n\n/** Limits how many *tasks* run concurrently (independent from AI calls). */\nexport const getTaskLimiter = (concurrency?: number) => {\n if (!_taskLimiter) {\n _taskLimiter = pLimit(concurrency ?? 5);\n } else if (typeof concurrency === 'number') {\n (_taskLimiter as any).concurrency = concurrency;\n }\n return _taskLimiter!;\n};\n\n// --- REPLACE your existing `parallelize` with this version ---\nexport const parallelizeGlobal = async <T, R>(\n items: T[],\n callback: (item: T, index: number) => Promise<R> = async (item) =>\n item as unknown as Promise<R>,\n options?: {\n /** Share a single limiter across the app. If omitted, uses global limiter. */\n limiter?: ReturnType<typeof pLimit>;\n /** If provided and no limiter is passed, configure the global limiter. */\n concurrency?: number;\n /**\n * Whether to wrap *each* callback run in the limiter.\n * - For orchestration (outer loops): set `false` (don't burn limiter slots).\n * - For atomic work (AI calls, writes): leave `true` (default).\n */\n wrapInLimiter?: boolean;\n }\n): Promise<R[]> => {\n const limiter = options?.limiter ?? getGlobalLimiter(options?.concurrency);\n const wrap = options?.wrapInLimiter ?? true;\n\n const run = wrap\n ? <U>(fn: () => Promise<U>) => limiter(fn)\n : <U>(fn: () => Promise<U>) => fn();\n\n const promises = items.map((item, index) => run(() => callback(item, index)));\n return Promise.all(promises);\n};\n"],"mappings":";;;AAKA,IAAI,iBAAmD;;AAGvD,MAAa,oBAAoB,gBAAyB;AACxD,KAAI,CAAC,eACH,kBAAiB,OAAO,eAAe,GAAG;UACjC,OAAO,gBAAgB,SAChC,CAAC,eAAuB,cAAc;AAExC,QAAO;;AAIT,IAAI,eAAiD;;AAGrD,MAAa,kBAAkB,gBAAyB;AACtD,KAAI,CAAC,aACH,gBAAe,OAAO,eAAe,EAAE;UAC9B,OAAO,gBAAgB,SAChC,CAAC,aAAqB,cAAc;AAEtC,QAAO;;AAIT,MAAa,oBAAoB,OAC/B,OACA,WAAmD,OAAO,SACxD,MACF,YAYiB;CACjB,MAAM,UAAU,SAAS,WAAW,iBAAiB,SAAS,YAAY;CAG1E,MAAM,MAFO,SAAS,iBAAiB,QAG/B,OAAyB,QAAQ,GAAG,IACpC,OAAyB,IAAI;CAErC,MAAM,WAAW,MAAM,KAAK,MAAM,UAAU,UAAU,SAAS,MAAM,MAAM,CAAC,CAAC;AAC7E,QAAO,QAAQ,IAAI,SAAS"}
@@ -1 +1 @@
1
- {"version":3,"file":"reduceObjectFormat.mjs","names":["result: Record<string, Recursive>"],"sources":["../../../src/utils/reduceObjectFormat.ts"],"sourcesContent":["type Primitive = string | number | boolean | null | undefined;\n\ntype Recursive = Primitive | { [key: string]: Recursive } | Array<Recursive>;\n\n/**\n * Reduce an object to only the shape provided by a format object.\n * Values are always taken from the source object; the format is used only for structure.\n *\n * Examples:\n * reduceObjectFormat({ a: 1, b: 2 }, { a: 0 }) => { a: 1 }\n * reduceObjectFormat({ a: { x: 1, y: 2 } }, { a: { x: 0 } }) => { a: { x: 1 } }\n */\nexport const reduceObjectFormat = (\n source: Recursive,\n format: Recursive\n): Recursive => {\n // If the format is an array, reduce each element by its counterpart in source\n if (Array.isArray(format)) {\n const sourceArray = Array.isArray(source) ? source : [];\n return format.map((formatItem, index) =>\n reduceObjectFormat(sourceArray[index], formatItem)\n );\n }\n\n // If the format is an object (and not null), pick matching keys and recurse\n if (typeof format === 'object' && format !== null) {\n const result: Record<string, Recursive> = {};\n const sourceObject =\n typeof source === 'object' && source !== null && !Array.isArray(source)\n ? (source as Record<string, Recursive>)\n : ({} as Record<string, Recursive>);\n\n for (const key of Object.keys(format)) {\n const nextSource = sourceObject[key];\n const nextFormat = (format as Record<string, Recursive>)[key];\n result[key] = reduceObjectFormat(nextSource, nextFormat);\n }\n return result;\n }\n\n // For primitives in the format, simply return the source value (can be undefined)\n return source as Primitive;\n};\n"],"mappings":";;;;;;;;;AAYA,MAAa,sBACX,QACA,WACc;AAEd,KAAI,MAAM,QAAQ,OAAO,EAAE;EACzB,MAAM,cAAc,MAAM,QAAQ,OAAO,GAAG,SAAS,EAAE;AACvD,SAAO,OAAO,KAAK,YAAY,UAC7B,mBAAmB,YAAY,QAAQ,WAAW,CACnD;;AAIH,KAAI,OAAO,WAAW,YAAY,WAAW,MAAM;EACjD,MAAMA,SAAoC,EAAE;EAC5C,MAAM,eACJ,OAAO,WAAW,YAAY,WAAW,QAAQ,CAAC,MAAM,QAAQ,OAAO,GAClE,SACA,EAAE;AAET,OAAK,MAAM,OAAO,OAAO,KAAK,OAAO,EAAE;GACrC,MAAM,aAAa,aAAa;GAChC,MAAM,aAAc,OAAqC;AACzD,UAAO,OAAO,mBAAmB,YAAY,WAAW;;AAE1D,SAAO;;AAIT,QAAO"}
1
+ {"version":3,"file":"reduceObjectFormat.mjs","names":[],"sources":["../../../src/utils/reduceObjectFormat.ts"],"sourcesContent":["type Primitive = string | number | boolean | null | undefined;\n\ntype Recursive = Primitive | { [key: string]: Recursive } | Array<Recursive>;\n\n/**\n * Reduce an object to only the shape provided by a format object.\n * Values are always taken from the source object; the format is used only for structure.\n *\n * Examples:\n * reduceObjectFormat({ a: 1, b: 2 }, { a: 0 }) => { a: 1 }\n * reduceObjectFormat({ a: { x: 1, y: 2 } }, { a: { x: 0 } }) => { a: { x: 1 } }\n */\nexport const reduceObjectFormat = (\n source: Recursive,\n format: Recursive\n): Recursive => {\n // If the format is an array, reduce each element by its counterpart in source\n if (Array.isArray(format)) {\n const sourceArray = Array.isArray(source) ? source : [];\n return format.map((formatItem, index) =>\n reduceObjectFormat(sourceArray[index], formatItem)\n );\n }\n\n // If the format is an object (and not null), pick matching keys and recurse\n if (typeof format === 'object' && format !== null) {\n const result: Record<string, Recursive> = {};\n const sourceObject =\n typeof source === 'object' && source !== null && !Array.isArray(source)\n ? (source as Record<string, Recursive>)\n : ({} as Record<string, Recursive>);\n\n for (const key of Object.keys(format)) {\n const nextSource = sourceObject[key];\n const nextFormat = (format as Record<string, Recursive>)[key];\n result[key] = reduceObjectFormat(nextSource, nextFormat);\n }\n return result;\n }\n\n // For primitives in the format, simply return the source value (can be undefined)\n return source as Primitive;\n};\n"],"mappings":";;;;;;;;;AAYA,MAAa,sBACX,QACA,WACc;AAEd,KAAI,MAAM,QAAQ,OAAO,EAAE;EACzB,MAAM,cAAc,MAAM,QAAQ,OAAO,GAAG,SAAS,EAAE;AACvD,SAAO,OAAO,KAAK,YAAY,UAC7B,mBAAmB,YAAY,QAAQ,WAAW,CACnD;;AAIH,KAAI,OAAO,WAAW,YAAY,WAAW,MAAM;EACjD,MAAM,SAAoC,EAAE;EAC5C,MAAM,eACJ,OAAO,WAAW,YAAY,WAAW,QAAQ,CAAC,MAAM,QAAQ,OAAO,GAClE,SACA,EAAE;AAET,OAAK,MAAM,OAAO,OAAO,KAAK,OAAO,EAAE;GACrC,MAAM,aAAa,aAAa;GAChC,MAAM,aAAc,OAAqC;AACzD,UAAO,OAAO,mBAAmB,YAAY,WAAW;;AAE1D,SAAO;;AAIT,QAAO"}
@@ -1 +1 @@
1
- {"version":3,"file":"resolveObjectPromises.mjs","names":["result: Record<string, any>"],"sources":["../../../src/utils/resolveObjectPromises.ts"],"sourcesContent":["/**\n * A more \"unified\" approach where each type (function, array, object, primitive)\n * is handled inside the main recursive body.\n */\nexport const resolveObjectPromises = async <T = unknown>(\n entry: any\n): Promise<T> => {\n // Check if entry is a Promise (Thenable)\n if (entry && typeof entry.then === 'function') {\n const awaited = await entry;\n return resolveObjectPromises(awaited);\n }\n\n // If entry is a function, invoke it and process the result\n if (typeof entry === 'function') {\n const result = entry();\n return resolveObjectPromises(result);\n }\n\n if (Array.isArray(entry)) {\n return Promise.all(\n entry.map(async (item) => resolveObjectPromises(item))\n ) as unknown as T;\n }\n\n // Handle plain objects (but not arrays)\n if (entry && typeof entry === 'object') {\n // Arrays are handled in the `Array.isArray` branch above, so we know `entry` is a plain object here.\n\n const result: Record<string, any> = {};\n\n // Iterate over keys **sequentially** to make sure the insertion order of the\n // resulting object matches the original key order. Using `Promise.all` here\n // could lead to out-of-order insertions when asynchronous resolutions\n // finish at different times.\n for (const key of Object.keys(entry)) {\n result[key] = await resolveObjectPromises(entry[key]);\n }\n\n return result as T;\n }\n\n return entry as T;\n};\n"],"mappings":";;;;;AAIA,MAAa,wBAAwB,OACnC,UACe;AAEf,KAAI,SAAS,OAAO,MAAM,SAAS,WAEjC,QAAO,sBADS,MAAM,MACe;AAIvC,KAAI,OAAO,UAAU,WAEnB,QAAO,sBADQ,OAAO,CACc;AAGtC,KAAI,MAAM,QAAQ,MAAM,CACtB,QAAO,QAAQ,IACb,MAAM,IAAI,OAAO,SAAS,sBAAsB,KAAK,CAAC,CACvD;AAIH,KAAI,SAAS,OAAO,UAAU,UAAU;EAGtC,MAAMA,SAA8B,EAAE;AAMtC,OAAK,MAAM,OAAO,OAAO,KAAK,MAAM,CAClC,QAAO,OAAO,MAAM,sBAAsB,MAAM,KAAK;AAGvD,SAAO;;AAGT,QAAO"}
1
+ {"version":3,"file":"resolveObjectPromises.mjs","names":[],"sources":["../../../src/utils/resolveObjectPromises.ts"],"sourcesContent":["/**\n * A more \"unified\" approach where each type (function, array, object, primitive)\n * is handled inside the main recursive body.\n */\nexport const resolveObjectPromises = async <T = unknown>(\n entry: any\n): Promise<T> => {\n // Check if entry is a Promise (Thenable)\n if (entry && typeof entry.then === 'function') {\n const awaited = await entry;\n return resolveObjectPromises(awaited);\n }\n\n // If entry is a function, invoke it and process the result\n if (typeof entry === 'function') {\n const result = entry();\n return resolveObjectPromises(result);\n }\n\n if (Array.isArray(entry)) {\n return Promise.all(\n entry.map(async (item) => resolveObjectPromises(item))\n ) as unknown as T;\n }\n\n // Handle plain objects (but not arrays)\n if (entry && typeof entry === 'object') {\n // Arrays are handled in the `Array.isArray` branch above, so we know `entry` is a plain object here.\n\n const result: Record<string, any> = {};\n\n // Iterate over keys **sequentially** to make sure the insertion order of the\n // resulting object matches the original key order. Using `Promise.all` here\n // could lead to out-of-order insertions when asynchronous resolutions\n // finish at different times.\n for (const key of Object.keys(entry)) {\n result[key] = await resolveObjectPromises(entry[key]);\n }\n\n return result as T;\n }\n\n return entry as T;\n};\n"],"mappings":";;;;;AAIA,MAAa,wBAAwB,OACnC,UACe;AAEf,KAAI,SAAS,OAAO,MAAM,SAAS,WAEjC,QAAO,sBADS,MAAM,MACe;AAIvC,KAAI,OAAO,UAAU,WAEnB,QAAO,sBADQ,OAAO,CACc;AAGtC,KAAI,MAAM,QAAQ,MAAM,CACtB,QAAO,QAAQ,IACb,MAAM,IAAI,OAAO,SAAS,sBAAsB,KAAK,CAAC,CACvD;AAIH,KAAI,SAAS,OAAO,UAAU,UAAU;EAGtC,MAAM,SAA8B,EAAE;AAMtC,OAAK,MAAM,OAAO,OAAO,KAAK,MAAM,CAClC,QAAO,OAAO,MAAM,sBAAsB,MAAM,KAAK;AAGvD,SAAO;;AAGT,QAAO"}
@@ -5,15 +5,22 @@ import packageJson from "@intlayer/core/package.json" with { type: "json" };
5
5
  //#region src/utils/runOnce.ts
6
6
  const DEFAULT_RUN_ONCE_OPTIONS = { cacheTimeoutMs: 60 * 1e3 };
7
7
  const writeSentinelFile = async (sentinelFilePath, currentTimestamp) => {
8
+ const data = {
9
+ version: packageJson.version,
10
+ timestamp: currentTimestamp
11
+ };
8
12
  try {
9
13
  await mkdir(dirname(sentinelFilePath), { recursive: true });
10
- const data = {
11
- version: packageJson.version,
12
- timestamp: currentTimestamp
13
- };
14
14
  await writeFile(sentinelFilePath, JSON.stringify(data), { flag: "wx" });
15
15
  } catch (err) {
16
16
  if (err.code === "EEXIST") return;
17
+ if (err.code === "ENOENT") try {
18
+ await mkdir(dirname(sentinelFilePath), { recursive: true });
19
+ await writeFile(sentinelFilePath, JSON.stringify(data), { flag: "wx" });
20
+ return;
21
+ } catch (retryErr) {
22
+ if (retryErr.code === "EEXIST") return;
23
+ }
17
24
  throw err;
18
25
  }
19
26
  };
@@ -70,10 +77,10 @@ const runOnce = async (sentinelFilePath, callback, options) => {
70
77
  } catch (err) {
71
78
  if (err.code === "ENOENT") {} else throw err;
72
79
  }
73
- writeSentinelFile(sentinelFilePath, currentTimestamp);
80
+ await writeSentinelFile(sentinelFilePath, currentTimestamp);
74
81
  try {
75
82
  await callback();
76
- writeSentinelFile(sentinelFilePath, currentTimestamp);
83
+ await writeSentinelFile(sentinelFilePath, currentTimestamp);
77
84
  } catch {
78
85
  try {
79
86
  await unlink(sentinelFilePath);
@@ -1 +1 @@
1
- {"version":3,"file":"runOnce.mjs","names":["data: SentinelData","err: any","cachedVersion: string | undefined"],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport packageJson from '@intlayer/core/package.json' with { type: 'json' };\n\ntype RunOnceOptions = {\n /**\n * The function to execute when the sentinel is not found or is older than the cache timeout.\n */\n onIsCached?: () => void | Promise<void>;\n /**\n * The time window in milliseconds during which the sentinel is considered valid.\n *\n * @default 60000 = 1 minute\n */\n cacheTimeoutMs?: number;\n /**\n * If true, the callback will always run. If undefined, the callback will run only if the sentinel is older than the cache timeout.\n *\n * @default false\n */\n forceRun?: boolean;\n};\n\nconst DEFAULT_RUN_ONCE_OPTIONS = {\n cacheTimeoutMs: 60 * 1000, // 1 minute in milliseconds,\n} satisfies RunOnceOptions;\n\ntype SentinelData = {\n version: string;\n timestamp: number;\n};\n\nconst writeSentinelFile = async (\n sentinelFilePath: string,\n currentTimestamp: number\n) => {\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n // O_EXCL ensures only the *first* process can create the file\n const data: SentinelData = {\n version: packageJson.version,\n timestamp: currentTimestamp,\n };\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n throw err; // unexpected FS error\n }\n};\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param options - The options for the runOnce function\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n options?: RunOnceOptions\n) => {\n const { onIsCached, cacheTimeoutMs, forceRun } = {\n ...DEFAULT_RUN_ONCE_OPTIONS,\n ...(options ?? {}),\n };\n const currentTimestamp = Date.now();\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // Determine if we should rebuild based on cache age, force flag, or version mismatch\n let shouldRebuild = Boolean(forceRun) || sentinelAge > cacheTimeoutMs!;\n\n if (!shouldRebuild) {\n try {\n const raw = await readFile(sentinelFilePath, 'utf8');\n let cachedVersion: string | undefined;\n try {\n const parsed = JSON.parse(raw) as Partial<SentinelData>;\n cachedVersion = parsed.version;\n } catch {\n // Legacy format (timestamp only). Force a rebuild once to write versioned sentinel.\n cachedVersion = undefined;\n }\n\n if (!cachedVersion || cachedVersion !== packageJson.version) {\n shouldRebuild = true;\n }\n } catch {\n // If we cannot read the file, err on the safe side and rebuild\n shouldRebuild = true;\n }\n }\n\n if (shouldRebuild) {\n try {\n await unlink(sentinelFilePath);\n } catch {}\n // Fall through to create new sentinel and rebuild\n } else {\n await onIsCached?.();\n // Sentinel is recent and versions match, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n // Write sentinel file before to block parallel processes\n writeSentinelFile(sentinelFilePath, currentTimestamp);\n\n try {\n await callback();\n\n // Write sentinel file after to ensure the first one has not been removed with cleanOutputDir\n writeSentinelFile(sentinelFilePath, currentTimestamp);\n } catch {\n try {\n await unlink(sentinelFilePath); // Remove sentinel file if an error occurs\n } catch {}\n }\n};\n"],"mappings":";;;;;AAuBA,MAAM,2BAA2B,EAC/B,gBAAgB,KAAK,KACtB;AAOD,MAAM,oBAAoB,OACxB,kBACA,qBACG;AACH,KAAI;AAEF,QAAM,MAAM,QAAQ,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;EAG3D,MAAMA,OAAqB;GACzB,SAAS,YAAY;GACrB,WAAW;GACZ;AACD,QAAM,UAAU,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;UAChEC,KAAU;AACjB,MAAI,IAAI,SAAS,SAEf;AAEF,QAAM;;;;;;;;;;;;;;;;;;;;;;;;;AA0BV,MAAa,UAAU,OACrB,kBACA,UACA,YACG;CACH,MAAM,EAAE,YAAY,gBAAgB,aAAa;EAC/C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;CACD,MAAM,mBAAmB,KAAK,KAAK;AAEnC,KAAI;EAGF,MAAM,cAAc,oBADE,MAAM,KAAK,iBAAiB,EACG,MAAM,SAAS;EAGpE,IAAI,gBAAgB,QAAQ,SAAS,IAAI,cAAc;AAEvD,MAAI,CAAC,cACH,KAAI;GACF,MAAM,MAAM,MAAM,SAAS,kBAAkB,OAAO;GACpD,IAAIC;AACJ,OAAI;AAEF,oBADe,KAAK,MAAM,IAAI,CACP;WACjB;AAEN,oBAAgB;;AAGlB,OAAI,CAAC,iBAAiB,kBAAkB,YAAY,QAClD,iBAAgB;UAEZ;AAEN,mBAAgB;;AAIpB,MAAI,cACF,KAAI;AACF,SAAM,OAAO,iBAAiB;UACxB;OAEH;AACL,SAAM,cAAc;AAEpB;;UAEKD,KAAU;AACjB,MAAI,IAAI,SAAS,UAAU,OAGzB,OAAM;;AAKV,mBAAkB,kBAAkB,iBAAiB;AAErD,KAAI;AACF,QAAM,UAAU;AAGhB,oBAAkB,kBAAkB,iBAAiB;SAC/C;AACN,MAAI;AACF,SAAM,OAAO,iBAAiB;UACxB"}
1
+ {"version":3,"file":"runOnce.mjs","names":[],"sources":["../../../src/utils/runOnce.ts"],"sourcesContent":["import { mkdir, readFile, stat, unlink, writeFile } from 'node:fs/promises';\nimport { dirname } from 'node:path';\nimport packageJson from '@intlayer/core/package.json' with { type: 'json' };\n\ntype RunOnceOptions = {\n /**\n * The function to execute when the sentinel is not found or is older than the cache timeout.\n */\n onIsCached?: () => void | Promise<void>;\n /**\n * The time window in milliseconds during which the sentinel is considered valid.\n *\n * @default 60000 = 1 minute\n */\n cacheTimeoutMs?: number;\n /**\n * If true, the callback will always run. If undefined, the callback will run only if the sentinel is older than the cache timeout.\n *\n * @default false\n */\n forceRun?: boolean;\n};\n\nconst DEFAULT_RUN_ONCE_OPTIONS = {\n cacheTimeoutMs: 60 * 1000, // 1 minute in milliseconds,\n} satisfies RunOnceOptions;\n\ntype SentinelData = {\n version: string;\n timestamp: number;\n};\n\nconst writeSentinelFile = async (\n sentinelFilePath: string,\n currentTimestamp: number\n) => {\n // O_EXCL ensures only the *first* process can create the file\n const data: SentinelData = {\n version: packageJson.version,\n timestamp: currentTimestamp,\n };\n\n try {\n // Ensure the directory exists before writing the file\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n } catch (err: any) {\n if (err.code === 'EEXIST') {\n // Another process already created it → we're done\n return;\n }\n // Optimization: If ENOENT occurs on write despite mkdir (race condition with external deletion), retry once.\n if (err.code === 'ENOENT') {\n try {\n await mkdir(dirname(sentinelFilePath), { recursive: true });\n await writeFile(sentinelFilePath, JSON.stringify(data), { flag: 'wx' });\n return;\n } catch (retryErr: any) {\n if (retryErr.code === 'EEXIST') return;\n }\n }\n throw err; // unexpected FS error\n }\n};\n\n/**\n * Ensures a callback function runs only once within a specified time window across multiple processes.\n * Uses a sentinel file to coordinate execution and prevent duplicate work.\n *\n * @param sentinelFilePath - Path to the sentinel file used for coordination\n * @param callback - The function to execute (should be async)\n * @param options - The options for the runOnce function\n *\n * @example\n * ```typescript\n * await runPrepareIntlayerOnce(\n * '/tmp/intlayer-sentinel',\n * async () => {\n * // Your initialization logic here\n * await prepareIntlayer();\n * },\n * 30 * 1000 // 30 seconds cache\n * );\n * ```\n *\n * @throws {Error} When there are unexpected filesystem errors\n */\nexport const runOnce = async (\n sentinelFilePath: string,\n callback: () => void | Promise<void>,\n options?: RunOnceOptions\n) => {\n const { onIsCached, cacheTimeoutMs, forceRun } = {\n ...DEFAULT_RUN_ONCE_OPTIONS,\n ...(options ?? {}),\n };\n const currentTimestamp = Date.now();\n\n try {\n // Check if sentinel file exists and get its stats\n const sentinelStats = await stat(sentinelFilePath);\n const sentinelAge = currentTimestamp - sentinelStats.mtime.getTime();\n\n // Determine if we should rebuild based on cache age, force flag, or version mismatch\n let shouldRebuild = Boolean(forceRun) || sentinelAge > cacheTimeoutMs!;\n\n if (!shouldRebuild) {\n try {\n const raw = await readFile(sentinelFilePath, 'utf8');\n let cachedVersion: string | undefined;\n try {\n const parsed = JSON.parse(raw) as Partial<SentinelData>;\n cachedVersion = parsed.version;\n } catch {\n // Legacy format (timestamp only). Force a rebuild once to write versioned sentinel.\n cachedVersion = undefined;\n }\n\n if (!cachedVersion || cachedVersion !== packageJson.version) {\n shouldRebuild = true;\n }\n } catch {\n // If we cannot read the file, err on the safe side and rebuild\n shouldRebuild = true;\n }\n }\n\n if (shouldRebuild) {\n try {\n await unlink(sentinelFilePath);\n } catch {}\n // Fall through to create new sentinel and rebuild\n } else {\n await onIsCached?.();\n // Sentinel is recent and versions match, no need to rebuild\n return;\n }\n } catch (err: any) {\n if (err.code === 'ENOENT') {\n // File doesn't exist, continue to create it\n } else {\n throw err; // unexpected FS error\n }\n }\n\n // Write sentinel file before to block parallel processes\n // Added await here\n await writeSentinelFile(sentinelFilePath, currentTimestamp);\n\n try {\n await callback();\n\n // Write sentinel file after to ensure the first one has not been removed with cleanOutputDir\n // Added await here\n await writeSentinelFile(sentinelFilePath, currentTimestamp);\n } catch {\n try {\n await unlink(sentinelFilePath); // Remove sentinel file if an error occurs\n } catch {}\n }\n};\n"],"mappings":";;;;;AAuBA,MAAM,2BAA2B,EAC/B,gBAAgB,KAAK,KACtB;AAOD,MAAM,oBAAoB,OACxB,kBACA,qBACG;CAEH,MAAM,OAAqB;EACzB,SAAS,YAAY;EACrB,WAAW;EACZ;AAED,KAAI;AAEF,QAAM,MAAM,QAAQ,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;AAE3D,QAAM,UAAU,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;UAChE,KAAU;AACjB,MAAI,IAAI,SAAS,SAEf;AAGF,MAAI,IAAI,SAAS,SACf,KAAI;AACF,SAAM,MAAM,QAAQ,iBAAiB,EAAE,EAAE,WAAW,MAAM,CAAC;AAC3D,SAAM,UAAU,kBAAkB,KAAK,UAAU,KAAK,EAAE,EAAE,MAAM,MAAM,CAAC;AACvE;WACO,UAAe;AACtB,OAAI,SAAS,SAAS,SAAU;;AAGpC,QAAM;;;;;;;;;;;;;;;;;;;;;;;;;AA0BV,MAAa,UAAU,OACrB,kBACA,UACA,YACG;CACH,MAAM,EAAE,YAAY,gBAAgB,aAAa;EAC/C,GAAG;EACH,GAAI,WAAW,EAAE;EAClB;CACD,MAAM,mBAAmB,KAAK,KAAK;AAEnC,KAAI;EAGF,MAAM,cAAc,oBADE,MAAM,KAAK,iBAAiB,EACG,MAAM,SAAS;EAGpE,IAAI,gBAAgB,QAAQ,SAAS,IAAI,cAAc;AAEvD,MAAI,CAAC,cACH,KAAI;GACF,MAAM,MAAM,MAAM,SAAS,kBAAkB,OAAO;GACpD,IAAI;AACJ,OAAI;AAEF,oBADe,KAAK,MAAM,IAAI,CACP;WACjB;AAEN,oBAAgB;;AAGlB,OAAI,CAAC,iBAAiB,kBAAkB,YAAY,QAClD,iBAAgB;UAEZ;AAEN,mBAAgB;;AAIpB,MAAI,cACF,KAAI;AACF,SAAM,OAAO,iBAAiB;UACxB;OAEH;AACL,SAAM,cAAc;AAEpB;;UAEK,KAAU;AACjB,MAAI,IAAI,SAAS,UAAU,OAGzB,OAAM;;AAMV,OAAM,kBAAkB,kBAAkB,iBAAiB;AAE3D,KAAI;AACF,QAAM,UAAU;AAIhB,QAAM,kBAAkB,kBAAkB,iBAAiB;SACrD;AACN,MAAI;AACF,SAAM,OAAO,iBAAiB;UACxB"}
@@ -1 +1 @@
1
- {"version":3,"file":"bin.mjs","names":["normalizedOptions: SpawnOptions | undefined","normalizedDone: BinCallback","ch: ChildProcess"],"sources":["../../../../src/utils/runParallel/bin.ts"],"sourcesContent":["import type { ChildProcess } from 'node:child_process';\nimport { type SpawnOptions, spawn } from 'node:child_process';\n\ntype BinCallback = (err: Error | null, stdout?: string, code?: number) => void;\n\nconst stripStderr = (stderr: string | undefined): string | undefined => {\n if (!stderr) return;\n let cleaned = stderr.trim();\n // Strip bogus screen size error.\n // See https://github.com/microsoft/vscode/issues/98590\n const regex = /your \\d+x\\d+ screen size is bogus\\. expect trouble/gi;\n cleaned = cleaned.replace(regex, '');\n\n return cleaned.trim() || undefined;\n};\n\n/**\n * Spawn a binary and read its stdout.\n * @param cmd The name of the binary to spawn.\n * @param args The arguments for the binary.\n * @param options Optional option for the spawn function.\n * @param done Callback function.\n */\nexport const run = (\n cmd: string,\n args: string[],\n options: SpawnOptions | BinCallback | undefined,\n done?: BinCallback\n): void => {\n let normalizedOptions: SpawnOptions | undefined;\n let normalizedDone: BinCallback;\n\n if (typeof options === 'function') {\n normalizedDone = options;\n normalizedOptions = undefined;\n } else {\n normalizedDone = done!;\n normalizedOptions = options;\n }\n\n let executed = false;\n const ch: ChildProcess = spawn(cmd, args, normalizedOptions ?? {});\n let stdout = '';\n let stderr = '';\n\n if (ch.stdout) {\n ch.stdout.on('data', (d: Buffer) => {\n stdout += d.toString();\n });\n }\n\n if (ch.stderr) {\n ch.stderr.on('data', (d: Buffer) => {\n stderr += d.toString();\n });\n }\n\n ch.on('error', (err: Error) => {\n if (executed) return;\n executed = true;\n normalizedDone(new Error(String(err)));\n });\n\n ch.on('close', (code: number | null) => {\n if (executed) return;\n executed = true;\n\n const cleanedStderr = stripStderr(stderr);\n if (cleanedStderr) {\n return normalizedDone(new Error(cleanedStderr));\n }\n\n normalizedDone(null, stdout, code ?? undefined);\n });\n};\n"],"mappings":";;;AAKA,MAAM,eAAe,WAAmD;AACtE,KAAI,CAAC,OAAQ;CACb,IAAI,UAAU,OAAO,MAAM;AAI3B,WAAU,QAAQ,QADJ,wDACmB,GAAG;AAEpC,QAAO,QAAQ,MAAM,IAAI;;;;;;;;;AAU3B,MAAa,OACX,KACA,MACA,SACA,SACS;CACT,IAAIA;CACJ,IAAIC;AAEJ,KAAI,OAAO,YAAY,YAAY;AACjC,mBAAiB;AACjB,sBAAoB;QACf;AACL,mBAAiB;AACjB,sBAAoB;;CAGtB,IAAI,WAAW;CACf,MAAMC,KAAmB,MAAM,KAAK,MAAM,qBAAqB,EAAE,CAAC;CAClE,IAAI,SAAS;CACb,IAAI,SAAS;AAEb,KAAI,GAAG,OACL,IAAG,OAAO,GAAG,SAAS,MAAc;AAClC,YAAU,EAAE,UAAU;GACtB;AAGJ,KAAI,GAAG,OACL,IAAG,OAAO,GAAG,SAAS,MAAc;AAClC,YAAU,EAAE,UAAU;GACtB;AAGJ,IAAG,GAAG,UAAU,QAAe;AAC7B,MAAI,SAAU;AACd,aAAW;AACX,iBAAe,IAAI,MAAM,OAAO,IAAI,CAAC,CAAC;GACtC;AAEF,IAAG,GAAG,UAAU,SAAwB;AACtC,MAAI,SAAU;AACd,aAAW;EAEX,MAAM,gBAAgB,YAAY,OAAO;AACzC,MAAI,cACF,QAAO,eAAe,IAAI,MAAM,cAAc,CAAC;AAGjD,iBAAe,MAAM,QAAQ,QAAQ,OAAU;GAC/C"}
1
+ {"version":3,"file":"bin.mjs","names":[],"sources":["../../../../src/utils/runParallel/bin.ts"],"sourcesContent":["import type { ChildProcess } from 'node:child_process';\nimport { type SpawnOptions, spawn } from 'node:child_process';\n\ntype BinCallback = (err: Error | null, stdout?: string, code?: number) => void;\n\nconst stripStderr = (stderr: string | undefined): string | undefined => {\n if (!stderr) return;\n let cleaned = stderr.trim();\n // Strip bogus screen size error.\n // See https://github.com/microsoft/vscode/issues/98590\n const regex = /your \\d+x\\d+ screen size is bogus\\. expect trouble/gi;\n cleaned = cleaned.replace(regex, '');\n\n return cleaned.trim() || undefined;\n};\n\n/**\n * Spawn a binary and read its stdout.\n * @param cmd The name of the binary to spawn.\n * @param args The arguments for the binary.\n * @param options Optional option for the spawn function.\n * @param done Callback function.\n */\nexport const run = (\n cmd: string,\n args: string[],\n options: SpawnOptions | BinCallback | undefined,\n done?: BinCallback\n): void => {\n let normalizedOptions: SpawnOptions | undefined;\n let normalizedDone: BinCallback;\n\n if (typeof options === 'function') {\n normalizedDone = options;\n normalizedOptions = undefined;\n } else {\n normalizedDone = done!;\n normalizedOptions = options;\n }\n\n let executed = false;\n const ch: ChildProcess = spawn(cmd, args, normalizedOptions ?? {});\n let stdout = '';\n let stderr = '';\n\n if (ch.stdout) {\n ch.stdout.on('data', (d: Buffer) => {\n stdout += d.toString();\n });\n }\n\n if (ch.stderr) {\n ch.stderr.on('data', (d: Buffer) => {\n stderr += d.toString();\n });\n }\n\n ch.on('error', (err: Error) => {\n if (executed) return;\n executed = true;\n normalizedDone(new Error(String(err)));\n });\n\n ch.on('close', (code: number | null) => {\n if (executed) return;\n executed = true;\n\n const cleanedStderr = stripStderr(stderr);\n if (cleanedStderr) {\n return normalizedDone(new Error(cleanedStderr));\n }\n\n normalizedDone(null, stdout, code ?? undefined);\n });\n};\n"],"mappings":";;;AAKA,MAAM,eAAe,WAAmD;AACtE,KAAI,CAAC,OAAQ;CACb,IAAI,UAAU,OAAO,MAAM;AAI3B,WAAU,QAAQ,QADJ,wDACmB,GAAG;AAEpC,QAAO,QAAQ,MAAM,IAAI;;;;;;;;;AAU3B,MAAa,OACX,KACA,MACA,SACA,SACS;CACT,IAAI;CACJ,IAAI;AAEJ,KAAI,OAAO,YAAY,YAAY;AACjC,mBAAiB;AACjB,sBAAoB;QACf;AACL,mBAAiB;AACjB,sBAAoB;;CAGtB,IAAI,WAAW;CACf,MAAM,KAAmB,MAAM,KAAK,MAAM,qBAAqB,EAAE,CAAC;CAClE,IAAI,SAAS;CACb,IAAI,SAAS;AAEb,KAAI,GAAG,OACL,IAAG,OAAO,GAAG,SAAS,MAAc;AAClC,YAAU,EAAE,UAAU;GACtB;AAGJ,KAAI,GAAG,OACL,IAAG,OAAO,GAAG,SAAS,MAAc;AAClC,YAAU,EAAE,UAAU;GACtB;AAGJ,IAAG,GAAG,UAAU,QAAe;AAC7B,MAAI,SAAU;AACd,aAAW;AACX,iBAAe,IAAI,MAAM,OAAO,IAAI,CAAC,CAAC;GACtC;AAEF,IAAG,GAAG,UAAU,SAAwB;AACtC,MAAI,SAAU;AACd,aAAW;EAEX,MAAM,gBAAgB,YAAY,OAAO;AACzC,MAAI,cACF,QAAO,eAAe,IAAI,MAAM,cAAc,CAAC;AAGjD,iBAAe,MAAM,QAAQ,QAAQ,OAAU;GAC/C"}
@@ -1 +1 @@
1
- {"version":3,"file":"index.mjs","names":["signalHandlers: { event: string; handler: (...args: any[]) => void }[]"],"sources":["../../../../src/utils/runParallel/index.ts"],"sourcesContent":["import { delimiter, join } from 'node:path';\nimport { spawnPosix } from './spawnPosix';\nimport { spawnWin32 } from './spawnWin32';\n\nexport type ParallelHandle = {\n kill: () => void;\n result: Promise<any>;\n commandText: string;\n};\n\n/**\n * Start a cross-platform parallel process using npm-run-all approach.\n * Accepts either a single string (e.g., 'next start') or an array of tokens (e.g., ['next', 'start']).\n */\nexport const runParallel = (proc?: string | string[]): ParallelHandle => {\n if (!proc || (Array.isArray(proc) && proc.length === 0))\n throw new Error('Invalid command');\n\n const commandText = Array.isArray(proc) ? proc.join(' ') : proc;\n\n const isArray = Array.isArray(proc);\n const command = isArray ? (proc as string[])[0] : commandText;\n const args = isArray ? (proc as string[]).slice(1) : [];\n\n // Ensure local binaries (node_modules/.bin) are resolvable\n const cwdBin = join(process.cwd(), 'node_modules', '.bin');\n const PATH_KEY =\n Object.keys(process.env).find((key) => key.toLowerCase() === 'path') ??\n 'PATH';\n\n const extendedPath = [cwdBin, process.env[PATH_KEY] ?? '']\n .filter(Boolean)\n .join(delimiter);\n\n const childEnv = {\n ...process.env,\n [PATH_KEY]: extendedPath,\n } as NodeJS.ProcessEnv;\n\n const isWin = process.platform === 'win32';\n const spawnFunc = isWin ? spawnWin32 : spawnPosix;\n\n // Spawn options\n const spawnOptions = {\n cwd: process.cwd(),\n stdio: 'inherit' as const,\n env: childEnv,\n shell: isWin,\n };\n\n // Spawn the child process\n const child = isArray\n ? // If provided as a single string element that includes spaces, treat it like a shell command\n args.length === 0 && /\\s/.test(command)\n ? isWin\n ? spawnFunc(\n process.env.ComSpec ?? 'cmd.exe',\n ['/d', '/s', '/c', command],\n spawnOptions\n )\n : spawnFunc(\n process.env.SHELL ?? '/bin/sh',\n ['-c', command],\n spawnOptions\n )\n : spawnFunc(command, args, spawnOptions)\n : isWin\n ? spawnFunc(\n process.env.ComSpec ?? 'cmd.exe',\n ['/d', '/s', '/c', commandText],\n spawnOptions\n )\n : spawnFunc(\n process.env.SHELL ?? '/bin/sh',\n ['-c', commandText],\n spawnOptions\n );\n\n const result = new Promise<void>((resolve, reject) => {\n child.on('error', (err) => {\n try {\n console.error(\n `[runParallel] Failed to start: ${err?.message ?? String(err)}`\n );\n } catch {}\n cleanupHandlers();\n reject(err);\n });\n\n child.on('exit', (code, signal) => {\n cleanupHandlers();\n\n // Treat common termination signals as graceful exits, not failures\n const gracefulSignals = ['SIGINT', 'SIGTERM', 'SIGQUIT', 'SIGHUP'];\n if (code === 0 || (signal && gracefulSignals.includes(signal))) {\n resolve();\n } else {\n reject(\n Object.assign(new Error('Parallel process failed'), { code, signal })\n );\n }\n });\n });\n\n const cleanup = () => {\n try {\n child.kill('SIGTERM');\n } catch {\n // Best effort\n }\n };\n\n const signalHandlers: { event: string; handler: (...args: any[]) => void }[] =\n [\n { event: 'SIGINT', handler: cleanup },\n { event: 'SIGTERM', handler: cleanup },\n { event: 'SIGQUIT', handler: cleanup },\n { event: 'SIGHUP', handler: cleanup },\n ];\n\n // Register signal handlers\n signalHandlers.forEach(({ event, handler }) => {\n process.on(event as any, handler as any);\n });\n\n const cleanupHandlers = () => {\n signalHandlers.forEach(({ event, handler }) => {\n process.off(event as any, handler as any);\n });\n };\n\n const kill = () => {\n try {\n child.kill('SIGTERM');\n } catch {\n // Best effort\n }\n };\n\n return { kill, result, commandText };\n};\n"],"mappings":";;;;;;;;;AAcA,MAAa,eAAe,SAA6C;AACvE,KAAI,CAAC,QAAS,MAAM,QAAQ,KAAK,IAAI,KAAK,WAAW,EACnD,OAAM,IAAI,MAAM,kBAAkB;CAEpC,MAAM,cAAc,MAAM,QAAQ,KAAK,GAAG,KAAK,KAAK,IAAI,GAAG;CAE3D,MAAM,UAAU,MAAM,QAAQ,KAAK;CACnC,MAAM,UAAU,UAAW,KAAkB,KAAK;CAClD,MAAM,OAAO,UAAW,KAAkB,MAAM,EAAE,GAAG,EAAE;CAGvD,MAAM,SAAS,KAAK,QAAQ,KAAK,EAAE,gBAAgB,OAAO;CAC1D,MAAM,WACJ,OAAO,KAAK,QAAQ,IAAI,CAAC,MAAM,QAAQ,IAAI,aAAa,KAAK,OAAO,IACpE;CAEF,MAAM,eAAe,CAAC,QAAQ,QAAQ,IAAI,aAAa,GAAG,CACvD,OAAO,QAAQ,CACf,KAAK,UAAU;CAElB,MAAM,WAAW;EACf,GAAG,QAAQ;GACV,WAAW;EACb;CAED,MAAM,QAAQ,QAAQ,aAAa;CACnC,MAAM,YAAY,QAAQ,aAAa;CAGvC,MAAM,eAAe;EACnB,KAAK,QAAQ,KAAK;EAClB,OAAO;EACP,KAAK;EACL,OAAO;EACR;CAGD,MAAM,QAAQ,UAEV,KAAK,WAAW,KAAK,KAAK,KAAK,QAAQ,GACrC,QACE,UACE,QAAQ,IAAI,WAAW,WACvB;EAAC;EAAM;EAAM;EAAM;EAAQ,EAC3B,aACD,GACD,UACE,QAAQ,IAAI,SAAS,WACrB,CAAC,MAAM,QAAQ,EACf,aACD,GACH,UAAU,SAAS,MAAM,aAAa,GACxC,QACE,UACE,QAAQ,IAAI,WAAW,WACvB;EAAC;EAAM;EAAM;EAAM;EAAY,EAC/B,aACD,GACD,UACE,QAAQ,IAAI,SAAS,WACrB,CAAC,MAAM,YAAY,EACnB,aACD;CAEP,MAAM,SAAS,IAAI,SAAe,WAAS,WAAW;AACpD,QAAM,GAAG,UAAU,QAAQ;AACzB,OAAI;AACF,YAAQ,MACN,kCAAkC,KAAK,WAAW,OAAO,IAAI,GAC9D;WACK;AACR,oBAAiB;AACjB,UAAO,IAAI;IACX;AAEF,QAAM,GAAG,SAAS,MAAM,WAAW;AACjC,oBAAiB;AAIjB,OAAI,SAAS,KAAM,UADK;IAAC;IAAU;IAAW;IAAW;IAAS,CACrB,SAAS,OAAO,CAC3D,YAAS;OAET,QACE,OAAO,uBAAO,IAAI,MAAM,0BAA0B,EAAE;IAAE;IAAM;IAAQ,CAAC,CACtE;IAEH;GACF;CAEF,MAAM,gBAAgB;AACpB,MAAI;AACF,SAAM,KAAK,UAAU;UACf;;CAKV,MAAMA,iBACJ;EACE;GAAE,OAAO;GAAU,SAAS;GAAS;EACrC;GAAE,OAAO;GAAW,SAAS;GAAS;EACtC;GAAE,OAAO;GAAW,SAAS;GAAS;EACtC;GAAE,OAAO;GAAU,SAAS;GAAS;EACtC;AAGH,gBAAe,SAAS,EAAE,OAAO,cAAc;AAC7C,UAAQ,GAAG,OAAc,QAAe;GACxC;CAEF,MAAM,wBAAwB;AAC5B,iBAAe,SAAS,EAAE,OAAO,cAAc;AAC7C,WAAQ,IAAI,OAAc,QAAe;IACzC;;CAGJ,MAAM,aAAa;AACjB,MAAI;AACF,SAAM,KAAK,UAAU;UACf;;AAKV,QAAO;EAAE;EAAM;EAAQ;EAAa"}
1
+ {"version":3,"file":"index.mjs","names":[],"sources":["../../../../src/utils/runParallel/index.ts"],"sourcesContent":["import { delimiter, join } from 'node:path';\nimport { spawnPosix } from './spawnPosix';\nimport { spawnWin32 } from './spawnWin32';\n\nexport type ParallelHandle = {\n kill: () => void;\n result: Promise<any>;\n commandText: string;\n};\n\n/**\n * Start a cross-platform parallel process using npm-run-all approach.\n * Accepts either a single string (e.g., 'next start') or an array of tokens (e.g., ['next', 'start']).\n */\nexport const runParallel = (proc?: string | string[]): ParallelHandle => {\n if (!proc || (Array.isArray(proc) && proc.length === 0))\n throw new Error('Invalid command');\n\n const commandText = Array.isArray(proc) ? proc.join(' ') : proc;\n\n const isArray = Array.isArray(proc);\n const command = isArray ? (proc as string[])[0] : commandText;\n const args = isArray ? (proc as string[]).slice(1) : [];\n\n // Ensure local binaries (node_modules/.bin) are resolvable\n const cwdBin = join(process.cwd(), 'node_modules', '.bin');\n const PATH_KEY =\n Object.keys(process.env).find((key) => key.toLowerCase() === 'path') ??\n 'PATH';\n\n const extendedPath = [cwdBin, process.env[PATH_KEY] ?? '']\n .filter(Boolean)\n .join(delimiter);\n\n const childEnv = {\n ...process.env,\n [PATH_KEY]: extendedPath,\n } as NodeJS.ProcessEnv;\n\n const isWin = process.platform === 'win32';\n const spawnFunc = isWin ? spawnWin32 : spawnPosix;\n\n // Spawn options\n const spawnOptions = {\n cwd: process.cwd(),\n stdio: 'inherit' as const,\n env: childEnv,\n shell: isWin,\n };\n\n // Spawn the child process\n const child = isArray\n ? // If provided as a single string element that includes spaces, treat it like a shell command\n args.length === 0 && /\\s/.test(command)\n ? isWin\n ? spawnFunc(\n process.env.ComSpec ?? 'cmd.exe',\n ['/d', '/s', '/c', command],\n spawnOptions\n )\n : spawnFunc(\n process.env.SHELL ?? '/bin/sh',\n ['-c', command],\n spawnOptions\n )\n : spawnFunc(command, args, spawnOptions)\n : isWin\n ? spawnFunc(\n process.env.ComSpec ?? 'cmd.exe',\n ['/d', '/s', '/c', commandText],\n spawnOptions\n )\n : spawnFunc(\n process.env.SHELL ?? '/bin/sh',\n ['-c', commandText],\n spawnOptions\n );\n\n const result = new Promise<void>((resolve, reject) => {\n child.on('error', (err) => {\n try {\n console.error(\n `[runParallel] Failed to start: ${err?.message ?? String(err)}`\n );\n } catch {}\n cleanupHandlers();\n reject(err);\n });\n\n child.on('exit', (code, signal) => {\n cleanupHandlers();\n\n // Treat common termination signals as graceful exits, not failures\n const gracefulSignals = ['SIGINT', 'SIGTERM', 'SIGQUIT', 'SIGHUP'];\n if (code === 0 || (signal && gracefulSignals.includes(signal))) {\n resolve();\n } else {\n reject(\n Object.assign(new Error('Parallel process failed'), { code, signal })\n );\n }\n });\n });\n\n const cleanup = () => {\n try {\n child.kill('SIGTERM');\n } catch {\n // Best effort\n }\n };\n\n const signalHandlers: { event: string; handler: (...args: any[]) => void }[] =\n [\n { event: 'SIGINT', handler: cleanup },\n { event: 'SIGTERM', handler: cleanup },\n { event: 'SIGQUIT', handler: cleanup },\n { event: 'SIGHUP', handler: cleanup },\n ];\n\n // Register signal handlers\n signalHandlers.forEach(({ event, handler }) => {\n process.on(event as any, handler as any);\n });\n\n const cleanupHandlers = () => {\n signalHandlers.forEach(({ event, handler }) => {\n process.off(event as any, handler as any);\n });\n };\n\n const kill = () => {\n try {\n child.kill('SIGTERM');\n } catch {\n // Best effort\n }\n };\n\n return { kill, result, commandText };\n};\n"],"mappings":";;;;;;;;;AAcA,MAAa,eAAe,SAA6C;AACvE,KAAI,CAAC,QAAS,MAAM,QAAQ,KAAK,IAAI,KAAK,WAAW,EACnD,OAAM,IAAI,MAAM,kBAAkB;CAEpC,MAAM,cAAc,MAAM,QAAQ,KAAK,GAAG,KAAK,KAAK,IAAI,GAAG;CAE3D,MAAM,UAAU,MAAM,QAAQ,KAAK;CACnC,MAAM,UAAU,UAAW,KAAkB,KAAK;CAClD,MAAM,OAAO,UAAW,KAAkB,MAAM,EAAE,GAAG,EAAE;CAGvD,MAAM,SAAS,KAAK,QAAQ,KAAK,EAAE,gBAAgB,OAAO;CAC1D,MAAM,WACJ,OAAO,KAAK,QAAQ,IAAI,CAAC,MAAM,QAAQ,IAAI,aAAa,KAAK,OAAO,IACpE;CAEF,MAAM,eAAe,CAAC,QAAQ,QAAQ,IAAI,aAAa,GAAG,CACvD,OAAO,QAAQ,CACf,KAAK,UAAU;CAElB,MAAM,WAAW;EACf,GAAG,QAAQ;GACV,WAAW;EACb;CAED,MAAM,QAAQ,QAAQ,aAAa;CACnC,MAAM,YAAY,QAAQ,aAAa;CAGvC,MAAM,eAAe;EACnB,KAAK,QAAQ,KAAK;EAClB,OAAO;EACP,KAAK;EACL,OAAO;EACR;CAGD,MAAM,QAAQ,UAEV,KAAK,WAAW,KAAK,KAAK,KAAK,QAAQ,GACrC,QACE,UACE,QAAQ,IAAI,WAAW,WACvB;EAAC;EAAM;EAAM;EAAM;EAAQ,EAC3B,aACD,GACD,UACE,QAAQ,IAAI,SAAS,WACrB,CAAC,MAAM,QAAQ,EACf,aACD,GACH,UAAU,SAAS,MAAM,aAAa,GACxC,QACE,UACE,QAAQ,IAAI,WAAW,WACvB;EAAC;EAAM;EAAM;EAAM;EAAY,EAC/B,aACD,GACD,UACE,QAAQ,IAAI,SAAS,WACrB,CAAC,MAAM,YAAY,EACnB,aACD;CAEP,MAAM,SAAS,IAAI,SAAe,WAAS,WAAW;AACpD,QAAM,GAAG,UAAU,QAAQ;AACzB,OAAI;AACF,YAAQ,MACN,kCAAkC,KAAK,WAAW,OAAO,IAAI,GAC9D;WACK;AACR,oBAAiB;AACjB,UAAO,IAAI;IACX;AAEF,QAAM,GAAG,SAAS,MAAM,WAAW;AACjC,oBAAiB;AAIjB,OAAI,SAAS,KAAM,UADK;IAAC;IAAU;IAAW;IAAW;IAAS,CACrB,SAAS,OAAO,CAC3D,YAAS;OAET,QACE,OAAO,uBAAO,IAAI,MAAM,0BAA0B,EAAE;IAAE;IAAM;IAAQ,CAAC,CACtE;IAEH;GACF;CAEF,MAAM,gBAAgB;AACpB,MAAI;AACF,SAAM,KAAK,UAAU;UACf;;CAKV,MAAM,iBACJ;EACE;GAAE,OAAO;GAAU,SAAS;GAAS;EACrC;GAAE,OAAO;GAAW,SAAS;GAAS;EACtC;GAAE,OAAO;GAAW,SAAS;GAAS;EACtC;GAAE,OAAO;GAAU,SAAS;GAAS;EACtC;AAGH,gBAAe,SAAS,EAAE,OAAO,cAAc;AAC7C,UAAQ,GAAG,OAAc,QAAe;GACxC;CAEF,MAAM,wBAAwB;AAC5B,iBAAe,SAAS,EAAE,OAAO,cAAc;AAC7C,WAAQ,IAAI,OAAc,QAAe;IACzC;;CAGJ,MAAM,aAAa;AACjB,MAAI;AACF,SAAM,KAAK,UAAU;UACf;;AAKV,QAAO;EAAE;EAAM;EAAQ;EAAa"}