@intlayer/chokidar 7.0.7 → 7.0.8-canary.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/_virtual/_utils_asset.cjs +0 -3
- package/dist/cjs/buildIntlayerDictionary/buildIntlayerDictionary.cjs +0 -2
- package/dist/cjs/buildIntlayerDictionary/buildIntlayerDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/processContentDeclaration.cjs +0 -1
- package/dist/cjs/buildIntlayerDictionary/processContentDeclaration.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeDynamicDictionary.cjs +0 -4
- package/dist/cjs/buildIntlayerDictionary/writeDynamicDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeFetchDictionary.cjs +0 -3
- package/dist/cjs/buildIntlayerDictionary/writeFetchDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeMergedDictionary.cjs +0 -4
- package/dist/cjs/buildIntlayerDictionary/writeMergedDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeRemoteDictionary.cjs +0 -3
- package/dist/cjs/buildIntlayerDictionary/writeRemoteDictionary.cjs.map +1 -1
- package/dist/cjs/buildIntlayerDictionary/writeUnmergedDictionary.cjs +0 -4
- package/dist/cjs/buildIntlayerDictionary/writeUnmergedDictionary.cjs.map +1 -1
- package/dist/cjs/cleanOutputDir.cjs +0 -2
- package/dist/cjs/cleanOutputDir.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/createDictionaryEntryPoint.cjs +0 -3
- package/dist/cjs/createDictionaryEntryPoint/createDictionaryEntryPoint.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/generateDictionaryListContent.cjs +0 -2
- package/dist/cjs/createDictionaryEntryPoint/generateDictionaryListContent.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltDictionariesPath.cjs +0 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.cjs +0 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltDynamicDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltFetchDictionariesPath.cjs +0 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltFetchDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.cjs +0 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.cjs +0 -2
- package/dist/cjs/createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.cjs.map +1 -1
- package/dist/cjs/createType/createModuleAugmentation.cjs +0 -3
- package/dist/cjs/createType/createModuleAugmentation.cjs.map +1 -1
- package/dist/cjs/createType/createType.cjs +0 -3
- package/dist/cjs/createType/createType.cjs.map +1 -1
- package/dist/cjs/fetchDistantDictionaries.cjs +0 -2
- package/dist/cjs/fetchDistantDictionaries.cjs.map +1 -1
- package/dist/cjs/filterInvalidDictionaries.cjs +0 -1
- package/dist/cjs/filterInvalidDictionaries.cjs.map +1 -1
- package/dist/cjs/handleAdditionalContentDeclarationFile.cjs +0 -1
- package/dist/cjs/handleAdditionalContentDeclarationFile.cjs.map +1 -1
- package/dist/cjs/handleContentDeclarationFileChange.cjs +3 -1
- package/dist/cjs/handleContentDeclarationFileChange.cjs.map +1 -1
- package/dist/cjs/handleUnlinkedContentDeclarationFile.cjs +0 -1
- package/dist/cjs/handleUnlinkedContentDeclarationFile.cjs.map +1 -1
- package/dist/cjs/listGitFiles.cjs +0 -3
- package/dist/cjs/listGitFiles.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/getIntlayerBundle.cjs +0 -4
- package/dist/cjs/loadDictionaries/getIntlayerBundle.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadContentDeclaration.cjs +0 -3
- package/dist/cjs/loadDictionaries/loadContentDeclaration.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadDictionaries.cjs +0 -1
- package/dist/cjs/loadDictionaries/loadDictionaries.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadLocalDictionaries.cjs +0 -2
- package/dist/cjs/loadDictionaries/loadLocalDictionaries.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadRemoteDictionaries.cjs +0 -3
- package/dist/cjs/loadDictionaries/loadRemoteDictionaries.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/log.cjs +0 -1
- package/dist/cjs/loadDictionaries/log.cjs.map +1 -1
- package/dist/cjs/prepareIntlayer.cjs +0 -2
- package/dist/cjs/prepareIntlayer.cjs.map +1 -1
- package/dist/cjs/reduceDictionaryContent/reduceDictionaryContent.cjs +0 -1
- package/dist/cjs/reduceDictionaryContent/reduceDictionaryContent.cjs.map +1 -1
- package/dist/cjs/utils/chunkJSON.cjs +7 -0
- package/dist/cjs/utils/chunkJSON.cjs.map +1 -1
- package/dist/cjs/utils/formatter.cjs +0 -4
- package/dist/cjs/utils/formatter.cjs.map +1 -1
- package/dist/cjs/utils/runOnce.cjs +0 -2
- package/dist/cjs/utils/runOnce.cjs.map +1 -1
- package/dist/cjs/utils/runParallel/bin.cjs +0 -1
- package/dist/cjs/utils/runParallel/bin.cjs.map +1 -1
- package/dist/cjs/utils/runParallel/index.cjs +0 -1
- package/dist/cjs/utils/runParallel/index.cjs.map +1 -1
- package/dist/cjs/utils/runParallel/runTask.cjs +0 -1
- package/dist/cjs/utils/runParallel/runTask.cjs.map +1 -1
- package/dist/cjs/utils/runParallel/spawnPosix.cjs +0 -1
- package/dist/cjs/utils/runParallel/spawnPosix.cjs.map +1 -1
- package/dist/cjs/utils/runParallel/spawnWin32.cjs +0 -1
- package/dist/cjs/utils/runParallel/spawnWin32.cjs.map +1 -1
- package/dist/cjs/watcher.cjs +1 -3
- package/dist/cjs/watcher.cjs.map +1 -1
- package/dist/cjs/writeConfiguration/index.cjs +0 -2
- package/dist/cjs/writeConfiguration/index.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/detectFormatCommand.cjs +0 -1
- package/dist/cjs/writeContentDeclaration/detectFormatCommand.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/processContentDeclarationContent.cjs +0 -5
- package/dist/cjs/writeContentDeclaration/processContentDeclarationContent.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/transformJSFile.cjs +0 -3
- package/dist/cjs/writeContentDeclaration/transformJSFile.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs +0 -4
- package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs.map +1 -1
- package/dist/cjs/writeContentDeclaration/writeJSFile.cjs +0 -5
- package/dist/cjs/writeContentDeclaration/writeJSFile.cjs.map +1 -1
- package/dist/cjs/writeFileIfChanged.cjs +0 -1
- package/dist/cjs/writeFileIfChanged.cjs.map +1 -1
- package/dist/esm/handleContentDeclarationFileChange.mjs +3 -0
- package/dist/esm/handleContentDeclarationFileChange.mjs.map +1 -1
- package/dist/esm/utils/chunkJSON.mjs +7 -0
- package/dist/esm/utils/chunkJSON.mjs.map +1 -1
- package/dist/esm/watcher.mjs +1 -0
- package/dist/esm/watcher.mjs.map +1 -1
- package/dist/types/buildIntlayerDictionary/buildIntlayerDictionary.d.ts +2 -2
- package/dist/types/buildIntlayerDictionary/writeDynamicDictionary.d.ts +3 -3
- package/dist/types/buildIntlayerDictionary/writeFetchDictionary.d.ts +3 -3
- package/dist/types/buildIntlayerDictionary/writeMergedDictionary.d.ts +2 -2
- package/dist/types/buildIntlayerDictionary/writeRemoteDictionary.d.ts +2 -2
- package/dist/types/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts +2 -2
- package/dist/types/createDictionaryEntryPoint/generateDictionaryListContent.d.ts +2 -2
- package/dist/types/fetchDistantDictionaries.d.ts +1 -0
- package/dist/types/fetchDistantDictionaries.d.ts.map +1 -1
- package/dist/types/handleContentDeclarationFileChange.d.ts.map +1 -1
- package/dist/types/index.d.ts +4 -0
- package/dist/types/loadDictionaries/loadRemoteDictionaries.d.ts +2 -2
- package/dist/types/utils/chunkJSON.d.ts.map +1 -1
- package/package.json +22 -22
|
@@ -3,13 +3,9 @@ const require_utils_getFormatFromExtension = require('../utils/getFormatFromExte
|
|
|
3
3
|
const require_writeContentDeclaration_processContentDeclarationContent = require('./processContentDeclarationContent.cjs');
|
|
4
4
|
const require_writeContentDeclaration_writeJSFile = require('./writeJSFile.cjs');
|
|
5
5
|
let node_path = require("node:path");
|
|
6
|
-
node_path = require_rolldown_runtime.__toESM(node_path);
|
|
7
6
|
let __intlayer_core = require("@intlayer/core");
|
|
8
|
-
__intlayer_core = require_rolldown_runtime.__toESM(__intlayer_core);
|
|
9
7
|
let __intlayer_unmerged_dictionaries_entry = require("@intlayer/unmerged-dictionaries-entry");
|
|
10
|
-
__intlayer_unmerged_dictionaries_entry = require_rolldown_runtime.__toESM(__intlayer_unmerged_dictionaries_entry);
|
|
11
8
|
let node_fs_promises = require("node:fs/promises");
|
|
12
|
-
node_fs_promises = require_rolldown_runtime.__toESM(node_fs_promises);
|
|
13
9
|
let deep_equal = require("deep-equal");
|
|
14
10
|
deep_equal = require_rolldown_runtime.__toESM(deep_equal);
|
|
15
11
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writeContentDeclaration.cjs","names":["processContentDeclarationContent","pluginFormatResult: any","result: Dictionary","getFormatFromExtension","extension","writeJSFile"],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { mkdir, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport deepEqual from 'deep-equal';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n content,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n tags: dictionary.tags,\n title: dictionary.title,\n description: dictionary.description,\n priority: dictionary.priority,\n };\n\n /**\n * Add $schema to the dictionary\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = deepEqual(\n existingDictionary.content,\n dictionary.content\n );\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n await writeFile(absoluteFilePath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch (error) {\n if (\n error &&\n typeof error === 'object' &&\n 'code' in error &&\n (error as { code: string }).code !== 'ENOENT'\n ) {\n throw error;\n }\n }\n};\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"writeContentDeclaration.cjs","names":["processContentDeclarationContent","pluginFormatResult: any","result: Dictionary","getFormatFromExtension","extension","writeJSFile"],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { mkdir, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport deepEqual from 'deep-equal';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n content,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n tags: dictionary.tags,\n title: dictionary.title,\n description: dictionary.description,\n priority: dictionary.priority,\n };\n\n /**\n * Add $schema to the dictionary\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = deepEqual(\n existingDictionary.content,\n dictionary.content\n );\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n await writeFile(absoluteFilePath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch (error) {\n if (\n error &&\n typeof error === 'object' &&\n 'code' in error &&\n (error as { code: string }).code !== 'ENOENT'\n ) {\n throw error;\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;AAsBA,MAAM,2BAA2B,OAC/B,YACA,eACA,eACG;;;;CAIH,MAAM,sBACJ,MAAMA,kGAAiC,WAAW;CAEpD,IAAI,UAAU,oBAAoB;;;;AAMlC,KAAI,WAAW,OACb,uDACE,qBACA,WAAW,OACZ,CAAC;UACO,WACT,6DACE,qBACA,WACD,CAAC;CAGJ,IAAIC,qBAA0B;EAC5B,GAAG;EACH;EACD;;;;AAMD,YAAW,MAAM,UAAU,cAAc,WAAW,EAAE,CACpD,KAAI,OAAO,cAAc;EACvB,MAAM,kBAAkB,MAAM,OAAO,eAAe;GAClD,YAAY;GACZ;GACD,CAAC;AAEF,MAAI,gBACF,sBAAqB;;AAQ3B,KAAI,EAFF,mBAAmB,WAAW,mBAAmB,KAE1B,QAAO;CAEhC,IAAIC,SAAqB;EACvB,KAAK,WAAW;EAChB;EACA,QAAQ,WAAW;EACnB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,MAAM,WAAW;EACjB,OAAO,WAAW;EAClB,aAAa,WAAW;EACxB,UAAU,WAAW;EACtB;AAUD,KAFeC,4DAFb,WAAW,kCAAmB,WAAW,SAAS,GAAG,QAEP,KAGnC,UACX,mBAAmB,WACnB,mBAAmB,IAEnB,UAAS;EACP,SAAS;EACT,GAAG;EACJ;AAGH,QAAO;;AAST,MAAM,iBAAiB,EACrB,qBAAqB,yBACtB;AAED,MAAa,0BAA0B,OACrC,YACA,eACA,YACwD;CACxD,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,qBAAqB,eAAe;EAC1C,GAAG;EACH,GAAG;EACJ;CAED,MAAM,gDAAiC,SAAS,oBAAoB;CAOpE,MAAM,yFALqD,cAAc,CAEvE,WAAW,MAGoC,MAC9C,OAAO,GAAG,YAAY,WAAW,QACnC;CAED,MAAM,8BAA8B,MAAM,yBACxC,YACA,eACA,WACD;AAED,KAAI,oBAAoB,UAAU;EAEhC,MAAM,wCACJ,mBAAmB,SACnB,WAAW,QACZ;EAED,MAAM,kCACJ,cAAc,QAAQ,SACtB,mBAAmB,SACpB;AAGD,MAAI,cACF,QAAO;GACL,QAAQ;GACR,MAAM;GACP;AAGH,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;AAG9C,KAAI,WAAW,UAAU;EACvB,MAAM,kCACJ,cAAc,QAAQ,SACtB,WAAW,SACZ;AACD,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;CAI9C,MAAM,6CACJ,2BACA,GAAG,WAAW,IAAI,eACnB;AAED,OAAM,yBACJ,wBACA,6BACA,cACD;AAED,QAAO;EACL,QAAQ;EACR,MAAM;EACP;;AAGH,MAAM,2BAA2B,OAC/B,kBACA,YACA,kBACkB;AAKlB,0DAHoB,iBAAiB,EAGpB,EAAE,WAAW,MAAM,CAAC;CAErC,MAAM,mCAAoB,iBAAiB;AAK3C,KAAI,CAJuB,cAAc,QAAQ,eAAe,KAC7D,uCAAsBC,YAAU,CAClC,CAEuB,SAAS,UAAU,CACzC,OAAM,IAAI,MACR,2BAA2B,UAAU,UAAU,mBAChD;AAGH,KAAI,cAAc,SAAS;AAIzB,wCAAgB,kBAAkB,GAHX,KAAK,UAAU,YAAY,MAAM,EAAE,CAGN,IAAI;AAExD;;AAGF,OAAMC,wDAAY,kBAAkB,YAAY,cAAc;AAI9D,KAAI;AAKF,qDAHE,cAAc,QAAQ,UACtB,yBACD,EACsB,EAAE,WAAW,MAAM,CAAC;UACpC,OAAO;AACd,MACE,SACA,OAAO,UAAU,YACjB,UAAU,SACT,MAA2B,SAAS,SAErC,OAAM"}
|
|
@@ -4,15 +4,10 @@ const require_utils_getFormatFromExtension = require('../utils/getFormatFromExte
|
|
|
4
4
|
const require_writeContentDeclaration_transformJSFile = require('./transformJSFile.cjs');
|
|
5
5
|
const require_writeContentDeclaration_detectFormatCommand = require('./detectFormatCommand.cjs');
|
|
6
6
|
let node_fs = require("node:fs");
|
|
7
|
-
node_fs = require_rolldown_runtime.__toESM(node_fs);
|
|
8
7
|
let __intlayer_config = require("@intlayer/config");
|
|
9
|
-
__intlayer_config = require_rolldown_runtime.__toESM(__intlayer_config);
|
|
10
8
|
let node_path = require("node:path");
|
|
11
|
-
node_path = require_rolldown_runtime.__toESM(node_path);
|
|
12
9
|
let node_fs_promises = require("node:fs/promises");
|
|
13
|
-
node_fs_promises = require_rolldown_runtime.__toESM(node_fs_promises);
|
|
14
10
|
let node_child_process = require("node:child_process");
|
|
15
|
-
node_child_process = require_rolldown_runtime.__toESM(node_child_process);
|
|
16
11
|
|
|
17
12
|
//#region src/writeContentDeclaration/writeJSFile.ts
|
|
18
13
|
/**
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writeJSFile.cjs","names":["getFormatFromExtension","getContentDeclarationFileTemplate","transformJSFile","detectFormatCommand"],"sources":["../../../src/writeContentDeclaration/writeJSFile.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { readFile, writeFile } from 'node:fs/promises';\nimport { extname } from 'node:path';\nimport { getAppLogger, logger } from '@intlayer/config';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { getContentDeclarationFileTemplate } from '../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport { detectFormatCommand } from './detectFormatCommand';\nimport { transformJSFile } from './transformJSFile';\n\n/**\n * Updates a JavaScript/TypeScript file based on the provided JSON instructions.\n * It targets a specific dictionary object within the file (identified by its 'key' property)\n * and updates its 'content' entries. Currently, it focuses on modifying arguments\n * of 't' (translation) function calls.\n */\nexport const writeJSFile = async (\n filePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n const mergedDictionary = {\n ...configuration.dictionary,\n ...dictionary,\n };\n\n const appLogger = getAppLogger(configuration);\n\n // Check if the file exist\n if (!existsSync(filePath)) {\n const fileExtension = extname(filePath) as Extension;\n\n const format = getFormatFromExtension(fileExtension);\n\n appLogger('File does not exist, creating it', {\n isVerbose: true,\n });\n const template = await getContentDeclarationFileTemplate(\n mergedDictionary.key,\n format,\n // Filter out undefined values\n Object.fromEntries(\n Object.entries({\n locale: mergedDictionary.locale,\n filled: mergedDictionary.filled,\n fill: mergedDictionary.fill,\n description: mergedDictionary.description,\n title: mergedDictionary.title,\n tags: mergedDictionary.tags,\n version: mergedDictionary.version,\n priority: mergedDictionary.priority,\n live: mergedDictionary.live,\n }).filter(([, value]) => value !== undefined)\n )\n );\n\n await writeFile(filePath, template, 'utf-8');\n }\n\n const fileContent = await readFile(filePath, 'utf-8');\n\n const finalCode = await transformJSFile(fileContent, dictionary);\n\n // Write the modified code back to the file\n try {\n await writeFile(filePath, finalCode, 'utf-8');\n logger(`Successfully updated ${filePath}`, {\n level: 'info',\n isVerbose: true,\n });\n } catch (error) {\n const err = error as Error;\n logger(`Failed to write updated file: ${filePath}`, {\n level: 'error',\n });\n throw new Error(`Failed to write updated file ${filePath}: ${err.message}`);\n }\n\n const formatCommand = detectFormatCommand(configuration);\n\n if (formatCommand) {\n try {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'inherit',\n cwd: configuration.content.baseDir,\n });\n } catch (error) {\n console.error(error);\n }\n }\n};\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"writeJSFile.cjs","names":["getFormatFromExtension","getContentDeclarationFileTemplate","transformJSFile","detectFormatCommand"],"sources":["../../../src/writeContentDeclaration/writeJSFile.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport { existsSync } from 'node:fs';\nimport { readFile, writeFile } from 'node:fs/promises';\nimport { extname } from 'node:path';\nimport { getAppLogger, logger } from '@intlayer/config';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { getContentDeclarationFileTemplate } from '../getContentDeclarationFileTemplate/getContentDeclarationFileTemplate';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport { detectFormatCommand } from './detectFormatCommand';\nimport { transformJSFile } from './transformJSFile';\n\n/**\n * Updates a JavaScript/TypeScript file based on the provided JSON instructions.\n * It targets a specific dictionary object within the file (identified by its 'key' property)\n * and updates its 'content' entries. Currently, it focuses on modifying arguments\n * of 't' (translation) function calls.\n */\nexport const writeJSFile = async (\n filePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n const mergedDictionary = {\n ...configuration.dictionary,\n ...dictionary,\n };\n\n const appLogger = getAppLogger(configuration);\n\n // Check if the file exist\n if (!existsSync(filePath)) {\n const fileExtension = extname(filePath) as Extension;\n\n const format = getFormatFromExtension(fileExtension);\n\n appLogger('File does not exist, creating it', {\n isVerbose: true,\n });\n const template = await getContentDeclarationFileTemplate(\n mergedDictionary.key,\n format,\n // Filter out undefined values\n Object.fromEntries(\n Object.entries({\n locale: mergedDictionary.locale,\n filled: mergedDictionary.filled,\n fill: mergedDictionary.fill,\n description: mergedDictionary.description,\n title: mergedDictionary.title,\n tags: mergedDictionary.tags,\n version: mergedDictionary.version,\n priority: mergedDictionary.priority,\n live: mergedDictionary.live,\n }).filter(([, value]) => value !== undefined)\n )\n );\n\n await writeFile(filePath, template, 'utf-8');\n }\n\n const fileContent = await readFile(filePath, 'utf-8');\n\n const finalCode = await transformJSFile(fileContent, dictionary);\n\n // Write the modified code back to the file\n try {\n await writeFile(filePath, finalCode, 'utf-8');\n logger(`Successfully updated ${filePath}`, {\n level: 'info',\n isVerbose: true,\n });\n } catch (error) {\n const err = error as Error;\n logger(`Failed to write updated file: ${filePath}`, {\n level: 'error',\n });\n throw new Error(`Failed to write updated file ${filePath}: ${err.message}`);\n }\n\n const formatCommand = detectFormatCommand(configuration);\n\n if (formatCommand) {\n try {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'inherit',\n cwd: configuration.content.baseDir,\n });\n } catch (error) {\n console.error(error);\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAoBA,MAAa,cAAc,OACzB,UACA,YACA,kBACkB;CAClB,MAAM,mBAAmB;EACvB,GAAG,cAAc;EACjB,GAAG;EACJ;CAED,MAAM,gDAAyB,cAAc;AAG7C,KAAI,yBAAY,SAAS,EAAE;EAGzB,MAAM,SAASA,mFAFe,SAAS,CAEa;AAEpD,YAAU,oCAAoC,EAC5C,WAAW,MACZ,CAAC;AAoBF,wCAAgB,UAnBC,MAAMC,8GACrB,iBAAiB,KACjB,QAEA,OAAO,YACL,OAAO,QAAQ;GACb,QAAQ,iBAAiB;GACzB,QAAQ,iBAAiB;GACzB,MAAM,iBAAiB;GACvB,aAAa,iBAAiB;GAC9B,OAAO,iBAAiB;GACxB,MAAM,iBAAiB;GACvB,SAAS,iBAAiB;GAC1B,UAAU,iBAAiB;GAC3B,MAAM,iBAAiB;GACxB,CAAC,CAAC,QAAQ,GAAG,WAAW,UAAU,OAAU,CAC9C,CACF,EAEmC,QAAQ;;CAK9C,MAAM,YAAY,MAAMC,gEAFJ,qCAAe,UAAU,QAAQ,EAEA,WAAW;AAGhE,KAAI;AACF,wCAAgB,UAAU,WAAW,QAAQ;AAC7C,gCAAO,wBAAwB,YAAY;GACzC,OAAO;GACP,WAAW;GACZ,CAAC;UACK,OAAO;EACd,MAAM,MAAM;AACZ,gCAAO,iCAAiC,YAAY,EAClD,OAAO,SACR,CAAC;AACF,QAAM,IAAI,MAAM,gCAAgC,SAAS,IAAI,IAAI,UAAU;;CAG7E,MAAM,gBAAgBC,wEAAoB,cAAc;AAExD,KAAI,cACF,KAAI;AACF,mCAAS,cAAc,QAAQ,YAAY,SAAS,EAAE;GACpD,OAAO;GACP,KAAK,cAAc,QAAQ;GAC5B,CAAC;UACK,OAAO;AACd,UAAQ,MAAM,MAAM"}
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('./_virtual/rolldown_runtime.cjs');
|
|
2
2
|
let node_fs_promises = require("node:fs/promises");
|
|
3
|
-
node_fs_promises = require_rolldown_runtime.__toESM(node_fs_promises);
|
|
4
3
|
|
|
5
4
|
//#region src/writeFileIfChanged.ts
|
|
6
5
|
const writeFileIfChanged = async (path, dataOrStream, { encoding = "utf8" } = {}) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"writeFileIfChanged.cjs","names":[],"sources":["../../src/writeFileIfChanged.ts"],"sourcesContent":["import { writeFile } from 'node:fs/promises';\n// import { createHash } from 'node:crypto';\n// import { createReadStream } from 'node:fs';\n// import type { Readable } from 'node:stream';\n\n// const hashFile = async (path: string) => {\n// const h = createHash('sha256');\n// const rs = createReadStream(path);\n// rs.on('data', (chunk) => h.update(chunk));\n// await new Promise<void>((res, rej) => {\n// rs.on('end', () => res());\n// rs.on('error', rej);\n// });\n// return h.digest('hex');\n// };\n\n// const isReadableStream = (value: unknown): value is Readable =>\n// !!value &&\n// typeof value === 'object' &&\n// typeof (value as any).pipe === 'function';\n\nexport const writeFileIfChanged = async (\n path: string,\n dataOrStream: string,\n { encoding = 'utf8' }: { encoding?: BufferEncoding } = {}\n): Promise<boolean> => {\n // Disabled because it's too slow. Build time increases from 3s to 7s.\n await writeFile(path, dataOrStream, { encoding });\n\n // 1) write new content to temporary file (stream-safe)\n // const tmp = `${path}.tmp`;\n\n // if (isReadableStream(dataOrStream)) {\n // await pipeline(dataOrStream, createWriteStream(tmp));\n // } else {\n // // dataOrStream = string | Buffer\n // const buf = Buffer.isBuffer(dataOrStream)\n // ? dataOrStream\n // : Buffer.from(dataOrStream, encoding);\n // await writeFile(tmp, buf);\n // }\n\n // 2) if old file exists, compare hashes (streaming)\n // let same = false;\n // try {\n // const [oldHash, newHash] = await Promise.all([\n // hashFile(path),\n // hashFile(tmp),\n // ]);\n // same = oldHash === newHash;\n // } catch {\n // // old file missing -> will replace\n // }\n\n // if (same) {\n // await rm(tmp);\n // return false; // no change\n // }\n\n // 3) atomic replacement\n // On Unix, rename is atomic. On Windows, if file exists, we can delete it first.\n // try {\n // await rename(tmp, path);\n // } catch {\n // try {\n // await rm(path);\n // } catch {}\n // await rename(tmp, path);\n // }\n\n return true; // changed\n};\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"writeFileIfChanged.cjs","names":[],"sources":["../../src/writeFileIfChanged.ts"],"sourcesContent":["import { writeFile } from 'node:fs/promises';\n// import { createHash } from 'node:crypto';\n// import { createReadStream } from 'node:fs';\n// import type { Readable } from 'node:stream';\n\n// const hashFile = async (path: string) => {\n// const h = createHash('sha256');\n// const rs = createReadStream(path);\n// rs.on('data', (chunk) => h.update(chunk));\n// await new Promise<void>((res, rej) => {\n// rs.on('end', () => res());\n// rs.on('error', rej);\n// });\n// return h.digest('hex');\n// };\n\n// const isReadableStream = (value: unknown): value is Readable =>\n// !!value &&\n// typeof value === 'object' &&\n// typeof (value as any).pipe === 'function';\n\nexport const writeFileIfChanged = async (\n path: string,\n dataOrStream: string,\n { encoding = 'utf8' }: { encoding?: BufferEncoding } = {}\n): Promise<boolean> => {\n // Disabled because it's too slow. Build time increases from 3s to 7s.\n await writeFile(path, dataOrStream, { encoding });\n\n // 1) write new content to temporary file (stream-safe)\n // const tmp = `${path}.tmp`;\n\n // if (isReadableStream(dataOrStream)) {\n // await pipeline(dataOrStream, createWriteStream(tmp));\n // } else {\n // // dataOrStream = string | Buffer\n // const buf = Buffer.isBuffer(dataOrStream)\n // ? dataOrStream\n // : Buffer.from(dataOrStream, encoding);\n // await writeFile(tmp, buf);\n // }\n\n // 2) if old file exists, compare hashes (streaming)\n // let same = false;\n // try {\n // const [oldHash, newHash] = await Promise.all([\n // hashFile(path),\n // hashFile(tmp),\n // ]);\n // same = oldHash === newHash;\n // } catch {\n // // old file missing -> will replace\n // }\n\n // if (same) {\n // await rm(tmp);\n // return false; // no change\n // }\n\n // 3) atomic replacement\n // On Unix, rename is atomic. On Windows, if file exists, we can delete it first.\n // try {\n // await rename(tmp, path);\n // } catch {\n // try {\n // await rm(path);\n // } catch {}\n // await rename(tmp, path);\n // }\n\n return true; // changed\n};\n"],"mappings":";;;;AAqBA,MAAa,qBAAqB,OAChC,MACA,cACA,EAAE,WAAW,WAA0C,EAAE,KACpC;AAErB,uCAAgB,MAAM,cAAc,EAAE,UAAU,CAAC;AA2CjD,QAAO"}
|
|
@@ -2,6 +2,7 @@ import { formatPath } from "./utils/formatter.mjs";
|
|
|
2
2
|
import { buildDictionary } from "./buildIntlayerDictionary/buildIntlayerDictionary.mjs";
|
|
3
3
|
import { getBuiltDictionariesPath } from "./createDictionaryEntryPoint/getBuiltDictionariesPath.mjs";
|
|
4
4
|
import { createDictionaryEntryPoint } from "./createDictionaryEntryPoint/createDictionaryEntryPoint.mjs";
|
|
5
|
+
import { createModuleAugmentation } from "./createType/createModuleAugmentation.mjs";
|
|
5
6
|
import { createTypes } from "./createType/createType.mjs";
|
|
6
7
|
import { loadLocalDictionaries } from "./loadDictionaries/loadLocalDictionaries.mjs";
|
|
7
8
|
import { getAppLogger } from "@intlayer/config";
|
|
@@ -19,6 +20,8 @@ const handleContentDeclarationFileChange = async (filePath, config) => {
|
|
|
19
20
|
await createDictionaryEntryPoint(config);
|
|
20
21
|
appLogger("Dictionary list built", { isVerbose: true });
|
|
21
22
|
}
|
|
23
|
+
createModuleAugmentation(config);
|
|
24
|
+
appLogger("Module augmentation built", { isVerbose: true });
|
|
22
25
|
for await (const plugin of config.plugins ?? []) {
|
|
23
26
|
const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;
|
|
24
27
|
await plugin.afterBuild?.({
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"handleContentDeclarationFileChange.mjs","names":["allDictionariesPaths: string[]"],"sources":["../../src/handleContentDeclarationFileChange.ts"],"sourcesContent":["import { getAppLogger } from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { buildDictionary } from './buildIntlayerDictionary/buildIntlayerDictionary';\nimport { createDictionaryEntryPoint } from './createDictionaryEntryPoint/createDictionaryEntryPoint';\nimport { getBuiltDictionariesPath } from './createDictionaryEntryPoint/getBuiltDictionariesPath';\nimport { createTypes } from './createType';\nimport { loadLocalDictionaries } from './loadDictionaries/loadLocalDictionaries';\nimport { formatPath } from './utils/formatter';\n\nexport const handleContentDeclarationFileChange = async (\n filePath: string,\n config: IntlayerConfig\n) => {\n const appLogger = getAppLogger(config);\n\n // Process the file with the functionToRun\n appLogger(`Change detected: ${formatPath(filePath)}`, {\n isVerbose: true,\n });\n\n const localeDictionaries = await loadLocalDictionaries(filePath, config);\n\n const dictionariesOutput = await buildDictionary(localeDictionaries, config);\n const updatedDictionariesPaths = Object.values(\n dictionariesOutput?.mergedDictionaries ?? {}\n ).map((dictionary) => dictionary.dictionaryPath);\n\n const allDictionariesPaths: string[] = await getBuiltDictionariesPath(config);\n\n createTypes(updatedDictionariesPaths, config);\n appLogger('TypeScript types built', {\n isVerbose: true,\n });\n\n if (\n updatedDictionariesPaths.some(\n (updatedDictionaryPath) =>\n !allDictionariesPaths.includes(updatedDictionaryPath)\n )\n ) {\n await createDictionaryEntryPoint(config);\n\n appLogger('Dictionary list built', {\n isVerbose: true,\n });\n }\n\n // Plugin transformation\n // Allow plugins to post-process the final build output (e.g., write back ICU JSON)\n for await (const plugin of config.plugins ?? []) {\n const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;\n\n await plugin.afterBuild?.({\n dictionaries: {\n unmergedDictionaries,\n mergedDictionaries,\n },\n configuration: config,\n });\n }\n};\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"handleContentDeclarationFileChange.mjs","names":["allDictionariesPaths: string[]"],"sources":["../../src/handleContentDeclarationFileChange.ts"],"sourcesContent":["import { getAppLogger } from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { buildDictionary } from './buildIntlayerDictionary/buildIntlayerDictionary';\nimport { createDictionaryEntryPoint } from './createDictionaryEntryPoint/createDictionaryEntryPoint';\nimport { getBuiltDictionariesPath } from './createDictionaryEntryPoint/getBuiltDictionariesPath';\nimport { createTypes } from './createType';\nimport { createModuleAugmentation } from './createType/createModuleAugmentation';\nimport { loadLocalDictionaries } from './loadDictionaries/loadLocalDictionaries';\nimport { formatPath } from './utils/formatter';\n\nexport const handleContentDeclarationFileChange = async (\n filePath: string,\n config: IntlayerConfig\n) => {\n const appLogger = getAppLogger(config);\n\n // Process the file with the functionToRun\n appLogger(`Change detected: ${formatPath(filePath)}`, {\n isVerbose: true,\n });\n\n const localeDictionaries = await loadLocalDictionaries(filePath, config);\n\n const dictionariesOutput = await buildDictionary(localeDictionaries, config);\n const updatedDictionariesPaths = Object.values(\n dictionariesOutput?.mergedDictionaries ?? {}\n ).map((dictionary) => dictionary.dictionaryPath);\n\n const allDictionariesPaths: string[] = await getBuiltDictionariesPath(config);\n\n createTypes(updatedDictionariesPaths, config);\n appLogger('TypeScript types built', {\n isVerbose: true,\n });\n\n if (\n updatedDictionariesPaths.some(\n (updatedDictionaryPath) =>\n !allDictionariesPaths.includes(updatedDictionaryPath)\n )\n ) {\n await createDictionaryEntryPoint(config);\n\n appLogger('Dictionary list built', {\n isVerbose: true,\n });\n }\n\n createModuleAugmentation(config);\n\n appLogger('Module augmentation built', {\n isVerbose: true,\n });\n\n // Plugin transformation\n // Allow plugins to post-process the final build output (e.g., write back ICU JSON)\n for await (const plugin of config.plugins ?? []) {\n const { unmergedDictionaries, mergedDictionaries } = dictionariesOutput;\n\n await plugin.afterBuild?.({\n dictionaries: {\n unmergedDictionaries,\n mergedDictionaries,\n },\n configuration: config,\n });\n }\n};\n"],"mappings":";;;;;;;;;;AAUA,MAAa,qCAAqC,OAChD,UACA,WACG;CACH,MAAM,YAAY,aAAa,OAAO;AAGtC,WAAU,oBAAoB,WAAW,SAAS,IAAI,EACpD,WAAW,MACZ,CAAC;CAIF,MAAM,qBAAqB,MAAM,gBAFN,MAAM,sBAAsB,UAAU,OAAO,EAEH,OAAO;CAC5E,MAAM,2BAA2B,OAAO,OACtC,oBAAoB,sBAAsB,EAAE,CAC7C,CAAC,KAAK,eAAe,WAAW,eAAe;CAEhD,MAAMA,uBAAiC,MAAM,yBAAyB,OAAO;AAE7E,aAAY,0BAA0B,OAAO;AAC7C,WAAU,0BAA0B,EAClC,WAAW,MACZ,CAAC;AAEF,KACE,yBAAyB,MACtB,0BACC,CAAC,qBAAqB,SAAS,sBAAsB,CACxD,EACD;AACA,QAAM,2BAA2B,OAAO;AAExC,YAAU,yBAAyB,EACjC,WAAW,MACZ,CAAC;;AAGJ,0BAAyB,OAAO;AAEhC,WAAU,6BAA6B,EACrC,WAAW,MACZ,CAAC;AAIF,YAAW,MAAM,UAAU,OAAO,WAAW,EAAE,EAAE;EAC/C,MAAM,EAAE,sBAAsB,uBAAuB;AAErD,QAAM,OAAO,aAAa;GACxB,cAAc;IACZ;IACA;IACD;GACD,eAAe;GAChB,CAAC"}
|
|
@@ -1,6 +1,13 @@
|
|
|
1
1
|
import { getChunk } from "./getChunk.mjs";
|
|
2
2
|
|
|
3
3
|
//#region src/utils/chunkJSON.ts
|
|
4
|
+
/**
|
|
5
|
+
* Split & reassemble JSON by character budget.
|
|
6
|
+
* - Measures serialized size using JSON.stringify(..).length (characters).
|
|
7
|
+
* - Ensures each chunk is itself valid JSON.
|
|
8
|
+
* - Very large strings are split into safe pieces using getChunk and re-concatenated on assemble.
|
|
9
|
+
* - Protects against circular structures (JSON can't serialize those anyway).
|
|
10
|
+
*/
|
|
4
11
|
const isObject = (val) => {
|
|
5
12
|
return typeof val === "object" && val !== null && !Array.isArray(val);
|
|
6
13
|
};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chunkJSON.mjs","names":["output: string[]","patches: Patch[]","testPatch: SetPatch","rootType: RootType","sourceString: string","chunks: JsonChunk[]","currentChunk: JsonChunk","root: any","parts: string[]","output: any[]"],"sources":["../../../src/utils/chunkJSON.ts"],"sourcesContent":["/**\n * Split & reassemble JSON by character budget.\n * - Measures serialized size using JSON.stringify(..).length (characters).\n * - Ensures each chunk is itself valid JSON.\n * - Very large strings are split into safe pieces using getChunk and re-concatenated on assemble.\n * - Protects against circular structures (JSON can't serialize those anyway).\n */\n\nimport { getChunk } from './getChunk';\n\ntype JSONPrimitive = string | number | boolean | null;\ntype JSONValue = JSONPrimitive | JSONObject | JSONArray;\n\nexport type JSONObject = {\n [k: string]: JSONValue;\n};\n\ntype JSONArray = JSONValue[];\n\ntype Path = Array<string | number>;\n\ntype SetPatch = { op: 'set'; path: Path; value: JSONValue };\ntype StrAppendPatch = {\n op: 'str-append';\n path: Path;\n value: string; // part of a longer string\n index: number;\n total: number;\n};\ntype Patch = SetPatch | StrAppendPatch;\n\ntype RootType = 'object' | 'array';\n\nexport type JsonChunk = {\n schemaVersion: 1;\n index: number;\n total: number;\n rootType: RootType;\n checksum: string; // hash of the full original JSON string\n entries: Patch[];\n};\n\nconst isObject = (val: unknown): val is Record<string, unknown> => {\n return typeof val === 'object' && val !== null && !Array.isArray(val);\n};\n\nconst computeDjb2 = (str: string): string => {\n // Simple 32-bit hash; deterministic & fast\n let hash = 5381;\n\n for (let i = 0; i < str.length; i++) {\n hash = ((hash << 5) + hash) ^ str.charCodeAt(i);\n }\n\n // convert to unsigned hex\n return (hash >>> 0).toString(16).padStart(8, '0');\n};\n\nconst setAtPath = (root: any, path: Path, value: JSONValue) => {\n let current = root;\n\n for (let i = 0; i < path.length - 1; i++) {\n const key = path[i];\n const nextKey = path[i + 1];\n const isNextIndex = typeof nextKey === 'number';\n\n if (typeof key === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at path segment ${i}`);\n }\n\n if (current[key] === undefined) {\n current[key] = isNextIndex ? [] : {};\n }\n\n current = current[key];\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at path segment ${i}`);\n }\n\n if (!(key in current)) {\n (current as any)[key] = isNextIndex ? [] : {};\n }\n\n current = (current as any)[key];\n }\n }\n\n const last = path[path.length - 1];\n\n if (typeof last === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at final segment`);\n }\n\n current[last] = value as any;\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at final segment`);\n }\n\n (current as any)[last] = value as any;\n }\n};\n\nconst pathKey = (path: Path): string => {\n // stable key for grouping string parts\n return JSON.stringify(path);\n};\n\n/**\n * Split a string into parts using getChunk with a charLength budget per part.\n */\nconst splitStringByBudget = (\n str: string,\n maxCharsPerPart: number\n): string[] => {\n if (maxCharsPerPart <= 0) {\n throw new Error('maxChars must be > 0');\n }\n\n const output: string[] = [];\n let offset = 0;\n\n while (offset < str.length) {\n const part = getChunk(str, {\n charStart: offset,\n charLength: maxCharsPerPart,\n });\n\n if (!part) break;\n\n output.push(part);\n offset += part.length;\n }\n\n return output;\n};\n\n/**\n * Flatten JSON into patches (leaf writes). Strings too large to fit in a single\n * chunk are yielded as multiple str-append patches.\n */\nconst flattenToPatches = (\n value: JSONValue,\n maxCharsPerChunk: number,\n path: Path = [],\n seen = new WeakSet<object>()\n): Patch[] => {\n // Conservative per-entry cap so a single entry fits into an empty chunk with envelope overhead.\n // (Envelope ~ a few hundred chars; we keep a comfortable margin.)\n const maxStringPiece = Math.max(\n 1,\n Math.floor((maxCharsPerChunk - 400) * 0.8)\n );\n\n const patches: Patch[] = [];\n\n const walk = (currentValue: JSONValue, currentPath: Path) => {\n if (typeof currentValue === 'string') {\n // If the serialized patch wouldn't fit, split the string into multiple parts\n // and encode as a split-node sentinel with numbered keys.\n const testPatch: SetPatch = {\n op: 'set',\n path: currentPath,\n value: currentValue,\n };\n const testLen = JSON.stringify(testPatch).length + 150; // margin\n\n if (testLen <= maxCharsPerChunk) {\n patches.push(testPatch);\n return;\n }\n\n // Use getChunk-based splitting to produce stable parts\n const parts = splitStringByBudget(currentValue, maxStringPiece);\n\n // Emit split-node metadata and parts as individual leaf writes\n patches.push({\n op: 'set',\n path: [...currentPath, '__splittedType'],\n value: 'string',\n });\n patches.push({\n op: 'set',\n path: [...currentPath, '__total'],\n value: parts.length,\n });\n\n for (let i = 0; i < parts.length; i++) {\n patches.push({\n op: 'set',\n path: [...currentPath, String(i + 1)],\n value: parts[i],\n });\n }\n\n return;\n }\n\n if (currentValue === null || typeof currentValue !== 'object') {\n patches.push({ op: 'set', path: currentPath, value: currentValue });\n return;\n }\n\n if (seen.has(currentValue as object)) {\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n seen.add(currentValue as object);\n\n if (Array.isArray(currentValue)) {\n for (let i = 0; i < currentValue.length; i++) {\n walk(currentValue[i] as JSONValue, [...currentPath, i]);\n }\n } else {\n for (const key of Object.keys(currentValue)) {\n walk((currentValue as JSONObject)[key], [...currentPath, key]);\n }\n }\n\n seen.delete(currentValue as object);\n };\n\n walk(value, path);\n return patches;\n};\n\n/**\n * Split JSON into chunks constrained by character count of serialized chunk.\n */\nexport const chunkJSON = (\n value: JSONObject | JSONArray,\n maxChars: number\n): JsonChunk[] => {\n if (!isObject(value) && !Array.isArray(value)) {\n throw new Error('Root must be an object or array.');\n }\n\n if (maxChars < 500) {\n // You can lower this if you truly need; recommended to keep some envelope headroom.\n throw new Error('maxChars is too small. Use at least 500 characters.');\n }\n\n const rootType: RootType = Array.isArray(value) ? 'array' : 'object';\n let sourceString: string;\n\n try {\n sourceString = JSON.stringify(value);\n } catch {\n // Provide a deterministic error message for circular refs\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n const checksum = computeDjb2(sourceString);\n const allPatches = flattenToPatches(value as JSONValue, maxChars);\n\n const chunks: JsonChunk[] = [];\n let currentChunk: JsonChunk = {\n schemaVersion: 1,\n index: 0, // provisional\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n };\n\n const emptyEnvelopeSize = JSON.stringify({\n ...currentChunk,\n entries: [],\n }).length;\n\n const tryFlush = () => {\n if (currentChunk.entries.length > 0) {\n chunks.push(currentChunk);\n currentChunk = {\n schemaVersion: 1,\n index: 0,\n total: 0,\n rootType,\n checksum,\n entries: [],\n };\n }\n };\n\n for (const patch of allPatches) {\n // Would adding this patch exceed maxChars?\n const withPatchSize =\n emptyEnvelopeSize +\n JSON.stringify(currentChunk.entries).length + // current entries array\n (currentChunk.entries.length ? 1 : 0) + // possible comma\n JSON.stringify(patch).length;\n\n if (withPatchSize <= maxChars) {\n currentChunk.entries.push(patch);\n } else {\n // Start a new chunk if current has items\n if (currentChunk.entries.length > 0) {\n tryFlush();\n }\n\n // Ensure single patch fits into an empty chunk\n const singleSize = emptyEnvelopeSize + JSON.stringify([patch]).length;\n\n if (singleSize > maxChars) {\n // This should only happen for massive strings, which we pre-split;\n // if it happens for a non-string, we cannot split further.\n throw new Error(\n 'A single entry exceeds maxChars and cannot be split. Reduce entry size or increase maxChars.'\n );\n }\n\n currentChunk.entries.push(patch);\n }\n }\n\n tryFlush();\n\n // Ensure at least one chunk exists (even for empty root)\n if (chunks.length === 0) {\n chunks.push({\n schemaVersion: 1,\n index: 0,\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n });\n }\n\n // Finalize indices & totals\n const totalChunks = chunks.length;\n\n chunks.forEach((chunk, index) => {\n chunk.index = index;\n chunk.total = totalChunks;\n });\n\n return chunks;\n};\n\n/**\n * Reassemble JSON from chunks.\n * - Validates checksums and indices.\n * - Applies 'set' patches and merges string pieces from 'str-append'.\n */\n/**\n * Reconstruct content from a single chunk without validation.\n * Useful for processing individual chunks in a pipeline where you don't have all chunks yet.\n * Note: This will only reconstruct the partial content contained in this chunk.\n */\nexport const reconstructFromSingleChunk = (\n chunk: JsonChunk\n): JSONObject | JSONArray => {\n const root: any = chunk.rootType === 'array' ? [] : {};\n\n // Apply all 'set' patches from this chunk\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n setAtPath(root, entry.path, entry.value);\n }\n }\n\n // Reconcile split-node sentinels for strings/arrays\n // When reconstructing from a single chunk, we may have incomplete split nodes\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const parts: string[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n hasAllParts = false;\n break;\n }\n\n parts.push(piece);\n }\n\n // Only reconstruct if we have all parts, otherwise return the node as-is\n if (hasAllParts) {\n return parts.join('');\n }\n\n return node;\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const output: any[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n hasAllParts = false;\n break;\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n // Only reconstruct if we have all parts\n if (hasAllParts) {\n return output;\n }\n\n return node;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n return reconcileSplitNodes(root);\n};\n\nexport const assembleJSON = (chunks: JsonChunk[]): JSONObject | JSONArray => {\n if (!chunks || chunks.length === 0) {\n throw new Error('No chunks provided.');\n }\n\n // Basic validation & sort\n const sorted = [...chunks].sort((a, b) => a.index - b.index);\n const { checksum, rootType } = sorted[0];\n const schemaVersion = 1;\n\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.schemaVersion !== schemaVersion) {\n console.error('Unsupported schemaVersion.', {\n cause: chunk,\n schemaVersion,\n });\n throw new Error('Unsupported schemaVersion.');\n }\n\n if (chunk.rootType !== rootType) {\n console.error('Chunks rootType mismatch.', {\n cause: chunk,\n rootType,\n });\n throw new Error('Chunks rootType mismatch.');\n }\n\n if (chunk.checksum !== checksum) {\n console.error('Chunks checksum mismatch (different source objects?).', {\n cause: chunk,\n checksum,\n });\n throw new Error('Chunks checksum mismatch (different source objects?).');\n }\n\n if (chunk.index !== i) {\n console.error('Chunk indices are not contiguous or sorted.', {\n cause: chunk,\n index: chunk.index,\n i,\n });\n throw new Error('Chunk indices are not contiguous or sorted.');\n }\n\n // Defer total check until after reconstruction to prefer more specific errors\n }\n\n const root: any = rootType === 'array' ? [] : {};\n\n // Collect string parts by path\n const stringParts = new Map<\n string,\n { path: Path; total: number; received: StrAppendPatch[] }\n >();\n\n const applySet = (patch: SetPatch) =>\n setAtPath(root, patch.path, patch.value);\n\n for (const chunk of sorted) {\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n applySet(entry);\n } else {\n const key = pathKey(entry.path);\n const record = stringParts.get(key) ?? {\n path: entry.path,\n total: entry.total,\n received: [],\n };\n\n if (record.total !== entry.total) {\n throw new Error('Inconsistent string part totals for a path.');\n }\n\n record.received.push(entry);\n stringParts.set(key, record);\n }\n }\n }\n\n // Stitch strings\n for (const { path, total, received } of stringParts.values()) {\n if (received.length !== total) {\n throw new Error('Missing string parts for a path; incomplete chunk set.');\n }\n\n received.sort((a, b) => a.index - b.index);\n const fullString = received.map((part) => part.value).join('');\n setAtPath(root, path, fullString);\n }\n\n // Reconcile split-node sentinels for strings/arrays after all patches applied\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const parts: string[] = [];\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n parts.push(piece);\n }\n\n return parts.join('');\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const output: any[] = [];\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n return output;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n const reconciled = reconcileSplitNodes(root);\n\n // Now validate totals match provided count\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.total !== sorted.length) {\n throw new Error(\n `Chunk total does not match provided count. Expected ${sorted.length}, but chunk ${i} has total=${chunk.total}`\n );\n }\n }\n\n return reconciled;\n};\n\n/* -------------------------------------------\n * Example usage\n * -------------------------------------------\nconst big: JSONObject = {\n title: \"Document\",\n content: \"…a very very long text…\",\n items: Array.from({ length: 2000 }, (_, i) => ({ id: i, label: `Item ${i}` }))\n};\n\n// Split to ~16k-char chunks\nconst chunks = chunkJSON(big, 16_000);\n\n// Send each `chunks[i]` as JSON to your backend.\n// Later, reassemble:\nconst restored = assembleJSON(chunks);\nconsole.log(JSON.stringify(restored) === JSON.stringify(big)); // true\n*/\n"],"mappings":";;;AA0CA,MAAM,YAAY,QAAiD;AACjE,QAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,CAAC,MAAM,QAAQ,IAAI;;AAGvE,MAAM,eAAe,QAAwB;CAE3C,IAAI,OAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,SAAS,QAAQ,KAAK,OAAQ,IAAI,WAAW,EAAE;AAIjD,SAAQ,SAAS,GAAG,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;AAGnD,MAAM,aAAa,MAAW,MAAY,UAAqB;CAC7D,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;EACxC,MAAM,MAAM,KAAK;EAEjB,MAAM,cAAc,OADJ,KAAK,IAAI,OACc;AAEvC,MAAI,OAAO,QAAQ,UAAU;AAC3B,OAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC,IAAI;AAGxD,OAAI,QAAQ,SAAS,OACnB,SAAQ,OAAO,cAAc,EAAE,GAAG,EAAE;AAGtC,aAAU,QAAQ;SACb;AACL,OAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC,IAAI;AAGzD,OAAI,EAAE,OAAO,SACX,CAAC,QAAgB,OAAO,cAAc,EAAE,GAAG,EAAE;AAG/C,aAAW,QAAgB;;;CAI/B,MAAM,OAAO,KAAK,KAAK,SAAS;AAEhC,KAAI,OAAO,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC;AAGpD,UAAQ,QAAQ;QACX;AACL,MAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC;AAGrD,EAAC,QAAgB,QAAQ;;;AAI7B,MAAM,WAAW,SAAuB;AAEtC,QAAO,KAAK,UAAU,KAAK;;;;;AAM7B,MAAM,uBACJ,KACA,oBACa;AACb,KAAI,mBAAmB,EACrB,OAAM,IAAI,MAAM,uBAAuB;CAGzC,MAAMA,SAAmB,EAAE;CAC3B,IAAI,SAAS;AAEb,QAAO,SAAS,IAAI,QAAQ;EAC1B,MAAM,OAAO,SAAS,KAAK;GACzB,WAAW;GACX,YAAY;GACb,CAAC;AAEF,MAAI,CAAC,KAAM;AAEX,SAAO,KAAK,KAAK;AACjB,YAAU,KAAK;;AAGjB,QAAO;;;;;;AAOT,MAAM,oBACJ,OACA,kBACA,OAAa,EAAE,EACf,uBAAO,IAAI,SAAiB,KAChB;CAGZ,MAAM,iBAAiB,KAAK,IAC1B,GACA,KAAK,OAAO,mBAAmB,OAAO,GAAI,CAC3C;CAED,MAAMC,UAAmB,EAAE;CAE3B,MAAM,QAAQ,cAAyB,gBAAsB;AAC3D,MAAI,OAAO,iBAAiB,UAAU;GAGpC,MAAMC,YAAsB;IAC1B,IAAI;IACJ,MAAM;IACN,OAAO;IACR;AAGD,OAFgB,KAAK,UAAU,UAAU,CAAC,SAAS,OAEpC,kBAAkB;AAC/B,YAAQ,KAAK,UAAU;AACvB;;GAIF,MAAM,QAAQ,oBAAoB,cAAc,eAAe;AAG/D,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,iBAAiB;IACxC,OAAO;IACR,CAAC;AACF,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,UAAU;IACjC,OAAO,MAAM;IACd,CAAC;AAEF,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,SAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,OAAO,IAAI,EAAE,CAAC;IACrC,OAAO,MAAM;IACd,CAAC;AAGJ;;AAGF,MAAI,iBAAiB,QAAQ,OAAO,iBAAiB,UAAU;AAC7D,WAAQ,KAAK;IAAE,IAAI;IAAO,MAAM;IAAa,OAAO;IAAc,CAAC;AACnE;;AAGF,MAAI,KAAK,IAAI,aAAuB,CAClC,OAAM,IAAI,MAAM,gDAAgD;AAGlE,OAAK,IAAI,aAAuB;AAEhC,MAAI,MAAM,QAAQ,aAAa,CAC7B,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACvC,MAAK,aAAa,IAAiB,CAAC,GAAG,aAAa,EAAE,CAAC;MAGzD,MAAK,MAAM,OAAO,OAAO,KAAK,aAAa,CACzC,MAAM,aAA4B,MAAM,CAAC,GAAG,aAAa,IAAI,CAAC;AAIlE,OAAK,OAAO,aAAuB;;AAGrC,MAAK,OAAO,KAAK;AACjB,QAAO;;;;;AAMT,MAAa,aACX,OACA,aACgB;AAChB,KAAI,CAAC,SAAS,MAAM,IAAI,CAAC,MAAM,QAAQ,MAAM,CAC3C,OAAM,IAAI,MAAM,mCAAmC;AAGrD,KAAI,WAAW,IAEb,OAAM,IAAI,MAAM,sDAAsD;CAGxE,MAAMC,WAAqB,MAAM,QAAQ,MAAM,GAAG,UAAU;CAC5D,IAAIC;AAEJ,KAAI;AACF,iBAAe,KAAK,UAAU,MAAM;SAC9B;AAEN,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,MAAM,WAAW,YAAY,aAAa;CAC1C,MAAM,aAAa,iBAAiB,OAAoB,SAAS;CAEjE,MAAMC,SAAsB,EAAE;CAC9B,IAAIC,eAA0B;EAC5B,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ;CAED,MAAM,oBAAoB,KAAK,UAAU;EACvC,GAAG;EACH,SAAS,EAAE;EACZ,CAAC,CAAC;CAEH,MAAM,iBAAiB;AACrB,MAAI,aAAa,QAAQ,SAAS,GAAG;AACnC,UAAO,KAAK,aAAa;AACzB,kBAAe;IACb,eAAe;IACf,OAAO;IACP,OAAO;IACP;IACA;IACA,SAAS,EAAE;IACZ;;;AAIL,MAAK,MAAM,SAAS,WAQlB,KALE,oBACA,KAAK,UAAU,aAAa,QAAQ,CAAC,UACpC,aAAa,QAAQ,SAAS,IAAI,KACnC,KAAK,UAAU,MAAM,CAAC,UAEH,SACnB,cAAa,QAAQ,KAAK,MAAM;MAC3B;AAEL,MAAI,aAAa,QAAQ,SAAS,EAChC,WAAU;AAMZ,MAFmB,oBAAoB,KAAK,UAAU,CAAC,MAAM,CAAC,CAAC,SAE9C,SAGf,OAAM,IAAI,MACR,+FACD;AAGH,eAAa,QAAQ,KAAK,MAAM;;AAIpC,WAAU;AAGV,KAAI,OAAO,WAAW,EACpB,QAAO,KAAK;EACV,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ,CAAC;CAIJ,MAAM,cAAc,OAAO;AAE3B,QAAO,SAAS,OAAO,UAAU;AAC/B,QAAM,QAAQ;AACd,QAAM,QAAQ;GACd;AAEF,QAAO;;;;;;;;;;;;AAaT,MAAa,8BACX,UAC2B;CAC3B,MAAMC,OAAY,MAAM,aAAa,UAAU,EAAE,GAAG,EAAE;AAGtD,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,WAAU,MAAM,MAAM,MAAM,MAAM,MAAM;CAM5C,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EAExC,QAAO;GAGT,MAAMC,QAAkB,EAAE;GAC1B,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,UAAU;AAC7B,mBAAc;AACd;;AAGF,UAAM,KAAK,MAAM;;AAInB,OAAI,YACF,QAAO,MAAM,KAAK,GAAG;AAGvB,UAAO;;AAIT,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EAEvC,QAAO;GAGT,MAAMC,SAAgB,EAAE;GACxB,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,EAAE;AACzB,mBAAc;AACd;;AAGF,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAK9C,OAAI,YACF,QAAO;AAGT,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;AAGT,QAAO,oBAAoB,KAAK;;AAGlC,MAAa,gBAAgB,WAAgD;AAC3E,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,OAAM,IAAI,MAAM,sBAAsB;CAIxC,MAAM,SAAS,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;CAC5D,MAAM,EAAE,UAAU,aAAa,OAAO;CACtC,MAAM,gBAAgB;AAEtB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,kBAAkB,eAAe;AACzC,WAAQ,MAAM,8BAA8B;IAC1C,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,6BAA6B;;AAG/C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,6BAA6B;IACzC,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,4BAA4B;;AAG9C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,yDAAyD;IACrE,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,wDAAwD;;AAG1E,MAAI,MAAM,UAAU,GAAG;AACrB,WAAQ,MAAM,+CAA+C;IAC3D,OAAO;IACP,OAAO,MAAM;IACb;IACD,CAAC;AACF,SAAM,IAAI,MAAM,8CAA8C;;;CAMlE,MAAMF,OAAY,aAAa,UAAU,EAAE,GAAG,EAAE;CAGhD,MAAM,8BAAc,IAAI,KAGrB;CAEH,MAAM,YAAY,UAChB,UAAU,MAAM,MAAM,MAAM,MAAM,MAAM;AAE1C,MAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,UAAS,MAAM;MACV;EACL,MAAM,MAAM,QAAQ,MAAM,KAAK;EAC/B,MAAM,SAAS,YAAY,IAAI,IAAI,IAAI;GACrC,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,UAAU,EAAE;GACb;AAED,MAAI,OAAO,UAAU,MAAM,MACzB,OAAM,IAAI,MAAM,8CAA8C;AAGhE,SAAO,SAAS,KAAK,MAAM;AAC3B,cAAY,IAAI,KAAK,OAAO;;AAMlC,MAAK,MAAM,EAAE,MAAM,OAAO,cAAc,YAAY,QAAQ,EAAE;AAC5D,MAAI,SAAS,WAAW,MACtB,OAAM,IAAI,MAAM,yDAAyD;AAG3E,WAAS,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAE1C,YAAU,MAAM,MADG,SAAS,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,GAAG,CAC7B;;CAInC,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EACxC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,QAAkB,EAAE;AAE1B,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,SACnB,OAAM,IAAI,MACR,yDACD;AAGH,UAAM,KAAK,MAAM;;AAGnB,UAAO,MAAM,KAAK,GAAG;;AAIvB,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EACvC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,SAAgB,EAAE;AAExB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,OAAM,IAAI,MACR,yDACD;AAGH,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAI9C,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;CAGT,MAAM,aAAa,oBAAoB,KAAK;AAG5C,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,UAAU,OAAO,OACzB,OAAM,IAAI,MACR,uDAAuD,OAAO,OAAO,cAAc,EAAE,aAAa,MAAM,QACzG;;AAIL,QAAO"}
|
|
1
|
+
{"version":3,"file":"chunkJSON.mjs","names":["output: string[]","patches: Patch[]","testPatch: SetPatch","rootType: RootType","sourceString: string","chunks: JsonChunk[]","currentChunk: JsonChunk","root: any","parts: string[]","output: any[]"],"sources":["../../../src/utils/chunkJSON.ts"],"sourcesContent":["/**\n * Split & reassemble JSON by character budget.\n * - Measures serialized size using JSON.stringify(..).length (characters).\n * - Ensures each chunk is itself valid JSON.\n * - Very large strings are split into safe pieces using getChunk and re-concatenated on assemble.\n * - Protects against circular structures (JSON can't serialize those anyway).\n */\n\nimport { getChunk } from './getChunk';\n\ntype JSONPrimitive = string | number | boolean | null;\ntype JSONValue = JSONPrimitive | JSONObject | JSONArray;\n\nexport type JSONObject = {\n [k: string]: JSONValue;\n};\n\ntype JSONArray = JSONValue[];\n\ntype Path = Array<string | number>;\n\ntype SetPatch = { op: 'set'; path: Path; value: JSONValue };\ntype StrAppendPatch = {\n op: 'str-append';\n path: Path;\n value: string; // part of a longer string\n index: number;\n total: number;\n};\ntype Patch = SetPatch | StrAppendPatch;\n\ntype RootType = 'object' | 'array';\n\nexport type JsonChunk = {\n schemaVersion: 1;\n index: number;\n total: number;\n rootType: RootType;\n checksum: string; // hash of the full original JSON string\n entries: Patch[];\n};\n\nconst isObject = (val: unknown): val is Record<string, unknown> => {\n return typeof val === 'object' && val !== null && !Array.isArray(val);\n};\n\nconst computeDjb2 = (str: string): string => {\n // Simple 32-bit hash; deterministic & fast\n let hash = 5381;\n\n for (let i = 0; i < str.length; i++) {\n hash = ((hash << 5) + hash) ^ str.charCodeAt(i);\n }\n\n // convert to unsigned hex\n return (hash >>> 0).toString(16).padStart(8, '0');\n};\n\nconst setAtPath = (root: any, path: Path, value: JSONValue) => {\n let current = root;\n\n for (let i = 0; i < path.length - 1; i++) {\n const key = path[i];\n const nextKey = path[i + 1];\n const isNextIndex = typeof nextKey === 'number';\n\n if (typeof key === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at path segment ${i}`);\n }\n\n if (current[key] === undefined) {\n current[key] = isNextIndex ? [] : {};\n }\n\n current = current[key];\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at path segment ${i}`);\n }\n\n if (!(key in current)) {\n (current as any)[key] = isNextIndex ? [] : {};\n }\n\n current = (current as any)[key];\n }\n }\n\n const last = path[path.length - 1];\n\n if (typeof last === 'number') {\n if (!Array.isArray(current)) {\n throw new Error(`Expected array at final segment`);\n }\n\n current[last] = value as any;\n } else {\n if (!isObject(current)) {\n throw new Error(`Expected object at final segment`);\n }\n\n (current as any)[last] = value as any;\n }\n};\n\nconst pathKey = (path: Path): string => {\n // stable key for grouping string parts\n return JSON.stringify(path);\n};\n\n/**\n * Split a string into parts using getChunk with a charLength budget per part.\n */\nconst splitStringByBudget = (\n str: string,\n maxCharsPerPart: number\n): string[] => {\n if (maxCharsPerPart <= 0) {\n throw new Error('maxChars must be > 0');\n }\n\n const output: string[] = [];\n let offset = 0;\n\n while (offset < str.length) {\n const part = getChunk(str, {\n charStart: offset,\n charLength: maxCharsPerPart,\n });\n\n if (!part) break;\n\n output.push(part);\n offset += part.length;\n }\n\n return output;\n};\n\n/**\n * Flatten JSON into patches (leaf writes). Strings too large to fit in a single\n * chunk are yielded as multiple str-append patches.\n */\nconst flattenToPatches = (\n value: JSONValue,\n maxCharsPerChunk: number,\n path: Path = [],\n seen = new WeakSet<object>()\n): Patch[] => {\n // Conservative per-entry cap so a single entry fits into an empty chunk with envelope overhead.\n // (Envelope ~ a few hundred chars; we keep a comfortable margin.)\n const maxStringPiece = Math.max(\n 1,\n Math.floor((maxCharsPerChunk - 400) * 0.8)\n );\n\n const patches: Patch[] = [];\n\n const walk = (currentValue: JSONValue, currentPath: Path) => {\n if (typeof currentValue === 'string') {\n // If the serialized patch wouldn't fit, split the string into multiple parts\n // and encode as a split-node sentinel with numbered keys.\n const testPatch: SetPatch = {\n op: 'set',\n path: currentPath,\n value: currentValue,\n };\n const testLen = JSON.stringify(testPatch).length + 150; // margin\n\n if (testLen <= maxCharsPerChunk) {\n patches.push(testPatch);\n return;\n }\n\n // Use getChunk-based splitting to produce stable parts\n const parts = splitStringByBudget(currentValue, maxStringPiece);\n\n // Emit split-node metadata and parts as individual leaf writes\n patches.push({\n op: 'set',\n path: [...currentPath, '__splittedType'],\n value: 'string',\n });\n patches.push({\n op: 'set',\n path: [...currentPath, '__total'],\n value: parts.length,\n });\n\n for (let i = 0; i < parts.length; i++) {\n patches.push({\n op: 'set',\n path: [...currentPath, String(i + 1)],\n value: parts[i],\n });\n }\n\n return;\n }\n\n if (currentValue === null || typeof currentValue !== 'object') {\n patches.push({ op: 'set', path: currentPath, value: currentValue });\n return;\n }\n\n if (seen.has(currentValue as object)) {\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n seen.add(currentValue as object);\n\n if (Array.isArray(currentValue)) {\n for (let i = 0; i < currentValue.length; i++) {\n walk(currentValue[i] as JSONValue, [...currentPath, i]);\n }\n } else {\n for (const key of Object.keys(currentValue)) {\n walk((currentValue as JSONObject)[key], [...currentPath, key]);\n }\n }\n\n seen.delete(currentValue as object);\n };\n\n walk(value, path);\n return patches;\n};\n\n/**\n * Split JSON into chunks constrained by character count of serialized chunk.\n */\nexport const chunkJSON = (\n value: JSONObject | JSONArray,\n maxChars: number\n): JsonChunk[] => {\n if (!isObject(value) && !Array.isArray(value)) {\n throw new Error('Root must be an object or array.');\n }\n\n if (maxChars < 500) {\n // You can lower this if you truly need; recommended to keep some envelope headroom.\n throw new Error('maxChars is too small. Use at least 500 characters.');\n }\n\n const rootType: RootType = Array.isArray(value) ? 'array' : 'object';\n let sourceString: string;\n\n try {\n sourceString = JSON.stringify(value);\n } catch {\n // Provide a deterministic error message for circular refs\n throw new Error('Cannot serialize circular structures to JSON.');\n }\n\n const checksum = computeDjb2(sourceString);\n const allPatches = flattenToPatches(value as JSONValue, maxChars);\n\n const chunks: JsonChunk[] = [];\n let currentChunk: JsonChunk = {\n schemaVersion: 1,\n index: 0, // provisional\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n };\n\n const emptyEnvelopeSize = JSON.stringify({\n ...currentChunk,\n entries: [],\n }).length;\n\n const tryFlush = () => {\n if (currentChunk.entries.length > 0) {\n chunks.push(currentChunk);\n currentChunk = {\n schemaVersion: 1,\n index: 0,\n total: 0,\n rootType,\n checksum,\n entries: [],\n };\n }\n };\n\n for (const patch of allPatches) {\n // Would adding this patch exceed maxChars?\n const withPatchSize =\n emptyEnvelopeSize +\n JSON.stringify(currentChunk.entries).length + // current entries array\n (currentChunk.entries.length ? 1 : 0) + // possible comma\n JSON.stringify(patch).length;\n\n if (withPatchSize <= maxChars) {\n currentChunk.entries.push(patch);\n } else {\n // Start a new chunk if current has items\n if (currentChunk.entries.length > 0) {\n tryFlush();\n }\n\n // Ensure single patch fits into an empty chunk\n const singleSize = emptyEnvelopeSize + JSON.stringify([patch]).length;\n\n if (singleSize > maxChars) {\n // This should only happen for massive strings, which we pre-split;\n // if it happens for a non-string, we cannot split further.\n throw new Error(\n 'A single entry exceeds maxChars and cannot be split. Reduce entry size or increase maxChars.'\n );\n }\n\n currentChunk.entries.push(patch);\n }\n }\n\n tryFlush();\n\n // Ensure at least one chunk exists (even for empty root)\n if (chunks.length === 0) {\n chunks.push({\n schemaVersion: 1,\n index: 0,\n total: 0, // provisional\n rootType,\n checksum,\n entries: [],\n });\n }\n\n // Finalize indices & totals\n const totalChunks = chunks.length;\n\n chunks.forEach((chunk, index) => {\n chunk.index = index;\n chunk.total = totalChunks;\n });\n\n return chunks;\n};\n\n/**\n * Reassemble JSON from chunks.\n * - Validates checksums and indices.\n * - Applies 'set' patches and merges string pieces from 'str-append'.\n */\n/**\n * Reconstruct content from a single chunk without validation.\n * Useful for processing individual chunks in a pipeline where you don't have all chunks yet.\n * Note: This will only reconstruct the partial content contained in this chunk.\n */\nexport const reconstructFromSingleChunk = (\n chunk: JsonChunk\n): JSONObject | JSONArray => {\n const root: any = chunk.rootType === 'array' ? [] : {};\n\n // Apply all 'set' patches from this chunk\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n setAtPath(root, entry.path, entry.value);\n }\n }\n\n // Reconcile split-node sentinels for strings/arrays\n // When reconstructing from a single chunk, we may have incomplete split nodes\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const parts: string[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n hasAllParts = false;\n break;\n }\n\n parts.push(piece);\n }\n\n // Only reconstruct if we have all parts, otherwise return the node as-is\n if (hasAllParts) {\n return parts.join('');\n }\n\n return node;\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n // Invalid split node, return as-is\n return node;\n }\n\n const output: any[] = [];\n let hasAllParts = true;\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n hasAllParts = false;\n break;\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n // Only reconstruct if we have all parts\n if (hasAllParts) {\n return output;\n }\n\n return node;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n return reconcileSplitNodes(root);\n};\n\nexport const assembleJSON = (chunks: JsonChunk[]): JSONObject | JSONArray => {\n if (!chunks || chunks.length === 0) {\n throw new Error('No chunks provided.');\n }\n\n // Basic validation & sort\n const sorted = [...chunks].sort((a, b) => a.index - b.index);\n const { checksum, rootType } = sorted[0];\n const schemaVersion = 1;\n\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.schemaVersion !== schemaVersion) {\n console.error('Unsupported schemaVersion.', {\n cause: chunk,\n schemaVersion,\n });\n throw new Error('Unsupported schemaVersion.');\n }\n\n if (chunk.rootType !== rootType) {\n console.error('Chunks rootType mismatch.', {\n cause: chunk,\n rootType,\n });\n throw new Error('Chunks rootType mismatch.');\n }\n\n if (chunk.checksum !== checksum) {\n console.error('Chunks checksum mismatch (different source objects?).', {\n cause: chunk,\n checksum,\n });\n throw new Error('Chunks checksum mismatch (different source objects?).');\n }\n\n if (chunk.index !== i) {\n console.error('Chunk indices are not contiguous or sorted.', {\n cause: chunk,\n index: chunk.index,\n i,\n });\n throw new Error('Chunk indices are not contiguous or sorted.');\n }\n\n // Defer total check until after reconstruction to prefer more specific errors\n }\n\n const root: any = rootType === 'array' ? [] : {};\n\n // Collect string parts by path\n const stringParts = new Map<\n string,\n { path: Path; total: number; received: StrAppendPatch[] }\n >();\n\n const applySet = (patch: SetPatch) =>\n setAtPath(root, patch.path, patch.value);\n\n for (const chunk of sorted) {\n for (const entry of chunk.entries) {\n if (entry.op === 'set') {\n applySet(entry);\n } else {\n const key = pathKey(entry.path);\n const record = stringParts.get(key) ?? {\n path: entry.path,\n total: entry.total,\n received: [],\n };\n\n if (record.total !== entry.total) {\n throw new Error('Inconsistent string part totals for a path.');\n }\n\n record.received.push(entry);\n stringParts.set(key, record);\n }\n }\n }\n\n // Stitch strings\n for (const { path, total, received } of stringParts.values()) {\n if (received.length !== total) {\n throw new Error('Missing string parts for a path; incomplete chunk set.');\n }\n\n received.sort((a, b) => a.index - b.index);\n const fullString = received.map((part) => part.value).join('');\n setAtPath(root, path, fullString);\n }\n\n // Reconcile split-node sentinels for strings/arrays after all patches applied\n const reconcileSplitNodes = (node: any): any => {\n if (node === null || typeof node !== 'object') return node;\n\n if (Array.isArray(node)) {\n for (let i = 0; i < node.length; i++) {\n node[i] = reconcileSplitNodes(node[i]);\n }\n return node;\n }\n\n // string split-node\n if ((node as any)['__splittedType'] === 'string') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total <= 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const parts: string[] = [];\n\n for (let i = 1; i <= total; i++) {\n const piece = (node as any)[String(i)];\n\n if (typeof piece !== 'string') {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n parts.push(piece);\n }\n\n return parts.join('');\n }\n\n // array split-node (optional support)\n if ((node as any)['__splittedType'] === 'array') {\n const total = (node as any)['__total'];\n\n if (typeof total !== 'number' || total < 0) {\n throw new Error('Invalid split-node total for a path.');\n }\n\n const output: any[] = [];\n\n for (let i = 1; i <= total; i++) {\n const slice = (node as any)[String(i)];\n\n if (!Array.isArray(slice)) {\n throw new Error(\n 'Missing string parts for a path; incomplete chunk set.'\n );\n }\n\n for (let j = 0; j < slice.length; j++) {\n output.push(reconcileSplitNodes(slice[j]));\n }\n }\n\n return output;\n }\n\n // walk normal object\n for (const key of Object.keys(node)) {\n node[key] = reconcileSplitNodes(node[key]);\n }\n\n return node;\n };\n\n const reconciled = reconcileSplitNodes(root);\n\n // Now validate totals match provided count\n for (let i = 0; i < sorted.length; i++) {\n const chunk = sorted[i];\n\n if (chunk.total !== sorted.length) {\n throw new Error(\n `Chunk total does not match provided count. Expected ${sorted.length}, but chunk ${i} has total=${chunk.total}`\n );\n }\n }\n\n return reconciled;\n};\n\n/* -------------------------------------------\n * Example usage\n * -------------------------------------------\nconst big: JSONObject = {\n title: \"Document\",\n content: \"…a very very long text…\",\n items: Array.from({ length: 2000 }, (_, i) => ({ id: i, label: `Item ${i}` }))\n};\n\n// Split to ~16k-char chunks\nconst chunks = chunkJSON(big, 16_000);\n\n// Send each `chunks[i]` as JSON to your backend.\n// Later, reassemble:\nconst restored = assembleJSON(chunks);\nconsole.log(JSON.stringify(restored) === JSON.stringify(big)); // true\n*/\n"],"mappings":";;;;;;;;;;AA0CA,MAAM,YAAY,QAAiD;AACjE,QAAO,OAAO,QAAQ,YAAY,QAAQ,QAAQ,CAAC,MAAM,QAAQ,IAAI;;AAGvE,MAAM,eAAe,QAAwB;CAE3C,IAAI,OAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,QAAQ,IAC9B,SAAS,QAAQ,KAAK,OAAQ,IAAI,WAAW,EAAE;AAIjD,SAAQ,SAAS,GAAG,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;AAGnD,MAAM,aAAa,MAAW,MAAY,UAAqB;CAC7D,IAAI,UAAU;AAEd,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,SAAS,GAAG,KAAK;EACxC,MAAM,MAAM,KAAK;EAEjB,MAAM,cAAc,OADJ,KAAK,IAAI,OACc;AAEvC,MAAI,OAAO,QAAQ,UAAU;AAC3B,OAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC,IAAI;AAGxD,OAAI,QAAQ,SAAS,OACnB,SAAQ,OAAO,cAAc,EAAE,GAAG,EAAE;AAGtC,aAAU,QAAQ;SACb;AACL,OAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC,IAAI;AAGzD,OAAI,EAAE,OAAO,SACX,CAAC,QAAgB,OAAO,cAAc,EAAE,GAAG,EAAE;AAG/C,aAAW,QAAgB;;;CAI/B,MAAM,OAAO,KAAK,KAAK,SAAS;AAEhC,KAAI,OAAO,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,QAAQ,QAAQ,CACzB,OAAM,IAAI,MAAM,kCAAkC;AAGpD,UAAQ,QAAQ;QACX;AACL,MAAI,CAAC,SAAS,QAAQ,CACpB,OAAM,IAAI,MAAM,mCAAmC;AAGrD,EAAC,QAAgB,QAAQ;;;AAI7B,MAAM,WAAW,SAAuB;AAEtC,QAAO,KAAK,UAAU,KAAK;;;;;AAM7B,MAAM,uBACJ,KACA,oBACa;AACb,KAAI,mBAAmB,EACrB,OAAM,IAAI,MAAM,uBAAuB;CAGzC,MAAMA,SAAmB,EAAE;CAC3B,IAAI,SAAS;AAEb,QAAO,SAAS,IAAI,QAAQ;EAC1B,MAAM,OAAO,SAAS,KAAK;GACzB,WAAW;GACX,YAAY;GACb,CAAC;AAEF,MAAI,CAAC,KAAM;AAEX,SAAO,KAAK,KAAK;AACjB,YAAU,KAAK;;AAGjB,QAAO;;;;;;AAOT,MAAM,oBACJ,OACA,kBACA,OAAa,EAAE,EACf,uBAAO,IAAI,SAAiB,KAChB;CAGZ,MAAM,iBAAiB,KAAK,IAC1B,GACA,KAAK,OAAO,mBAAmB,OAAO,GAAI,CAC3C;CAED,MAAMC,UAAmB,EAAE;CAE3B,MAAM,QAAQ,cAAyB,gBAAsB;AAC3D,MAAI,OAAO,iBAAiB,UAAU;GAGpC,MAAMC,YAAsB;IAC1B,IAAI;IACJ,MAAM;IACN,OAAO;IACR;AAGD,OAFgB,KAAK,UAAU,UAAU,CAAC,SAAS,OAEpC,kBAAkB;AAC/B,YAAQ,KAAK,UAAU;AACvB;;GAIF,MAAM,QAAQ,oBAAoB,cAAc,eAAe;AAG/D,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,iBAAiB;IACxC,OAAO;IACR,CAAC;AACF,WAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,UAAU;IACjC,OAAO,MAAM;IACd,CAAC;AAEF,QAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,SAAQ,KAAK;IACX,IAAI;IACJ,MAAM,CAAC,GAAG,aAAa,OAAO,IAAI,EAAE,CAAC;IACrC,OAAO,MAAM;IACd,CAAC;AAGJ;;AAGF,MAAI,iBAAiB,QAAQ,OAAO,iBAAiB,UAAU;AAC7D,WAAQ,KAAK;IAAE,IAAI;IAAO,MAAM;IAAa,OAAO;IAAc,CAAC;AACnE;;AAGF,MAAI,KAAK,IAAI,aAAuB,CAClC,OAAM,IAAI,MAAM,gDAAgD;AAGlE,OAAK,IAAI,aAAuB;AAEhC,MAAI,MAAM,QAAQ,aAAa,CAC7B,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACvC,MAAK,aAAa,IAAiB,CAAC,GAAG,aAAa,EAAE,CAAC;MAGzD,MAAK,MAAM,OAAO,OAAO,KAAK,aAAa,CACzC,MAAM,aAA4B,MAAM,CAAC,GAAG,aAAa,IAAI,CAAC;AAIlE,OAAK,OAAO,aAAuB;;AAGrC,MAAK,OAAO,KAAK;AACjB,QAAO;;;;;AAMT,MAAa,aACX,OACA,aACgB;AAChB,KAAI,CAAC,SAAS,MAAM,IAAI,CAAC,MAAM,QAAQ,MAAM,CAC3C,OAAM,IAAI,MAAM,mCAAmC;AAGrD,KAAI,WAAW,IAEb,OAAM,IAAI,MAAM,sDAAsD;CAGxE,MAAMC,WAAqB,MAAM,QAAQ,MAAM,GAAG,UAAU;CAC5D,IAAIC;AAEJ,KAAI;AACF,iBAAe,KAAK,UAAU,MAAM;SAC9B;AAEN,QAAM,IAAI,MAAM,gDAAgD;;CAGlE,MAAM,WAAW,YAAY,aAAa;CAC1C,MAAM,aAAa,iBAAiB,OAAoB,SAAS;CAEjE,MAAMC,SAAsB,EAAE;CAC9B,IAAIC,eAA0B;EAC5B,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ;CAED,MAAM,oBAAoB,KAAK,UAAU;EACvC,GAAG;EACH,SAAS,EAAE;EACZ,CAAC,CAAC;CAEH,MAAM,iBAAiB;AACrB,MAAI,aAAa,QAAQ,SAAS,GAAG;AACnC,UAAO,KAAK,aAAa;AACzB,kBAAe;IACb,eAAe;IACf,OAAO;IACP,OAAO;IACP;IACA;IACA,SAAS,EAAE;IACZ;;;AAIL,MAAK,MAAM,SAAS,WAQlB,KALE,oBACA,KAAK,UAAU,aAAa,QAAQ,CAAC,UACpC,aAAa,QAAQ,SAAS,IAAI,KACnC,KAAK,UAAU,MAAM,CAAC,UAEH,SACnB,cAAa,QAAQ,KAAK,MAAM;MAC3B;AAEL,MAAI,aAAa,QAAQ,SAAS,EAChC,WAAU;AAMZ,MAFmB,oBAAoB,KAAK,UAAU,CAAC,MAAM,CAAC,CAAC,SAE9C,SAGf,OAAM,IAAI,MACR,+FACD;AAGH,eAAa,QAAQ,KAAK,MAAM;;AAIpC,WAAU;AAGV,KAAI,OAAO,WAAW,EACpB,QAAO,KAAK;EACV,eAAe;EACf,OAAO;EACP,OAAO;EACP;EACA;EACA,SAAS,EAAE;EACZ,CAAC;CAIJ,MAAM,cAAc,OAAO;AAE3B,QAAO,SAAS,OAAO,UAAU;AAC/B,QAAM,QAAQ;AACd,QAAM,QAAQ;GACd;AAEF,QAAO;;;;;;;;;;;;AAaT,MAAa,8BACX,UAC2B;CAC3B,MAAMC,OAAY,MAAM,aAAa,UAAU,EAAE,GAAG,EAAE;AAGtD,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,WAAU,MAAM,MAAM,MAAM,MAAM,MAAM;CAM5C,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EAExC,QAAO;GAGT,MAAMC,QAAkB,EAAE;GAC1B,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,UAAU;AAC7B,mBAAc;AACd;;AAGF,UAAM,KAAK,MAAM;;AAInB,OAAI,YACF,QAAO,MAAM,KAAK,GAAG;AAGvB,UAAO;;AAIT,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EAEvC,QAAO;GAGT,MAAMC,SAAgB,EAAE;GACxB,IAAI,cAAc;AAElB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,EAAE;AACzB,mBAAc;AACd;;AAGF,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAK9C,OAAI,YACF,QAAO;AAGT,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;AAGT,QAAO,oBAAoB,KAAK;;AAGlC,MAAa,gBAAgB,WAAgD;AAC3E,KAAI,CAAC,UAAU,OAAO,WAAW,EAC/B,OAAM,IAAI,MAAM,sBAAsB;CAIxC,MAAM,SAAS,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;CAC5D,MAAM,EAAE,UAAU,aAAa,OAAO;CACtC,MAAM,gBAAgB;AAEtB,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,kBAAkB,eAAe;AACzC,WAAQ,MAAM,8BAA8B;IAC1C,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,6BAA6B;;AAG/C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,6BAA6B;IACzC,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,4BAA4B;;AAG9C,MAAI,MAAM,aAAa,UAAU;AAC/B,WAAQ,MAAM,yDAAyD;IACrE,OAAO;IACP;IACD,CAAC;AACF,SAAM,IAAI,MAAM,wDAAwD;;AAG1E,MAAI,MAAM,UAAU,GAAG;AACrB,WAAQ,MAAM,+CAA+C;IAC3D,OAAO;IACP,OAAO,MAAM;IACb;IACD,CAAC;AACF,SAAM,IAAI,MAAM,8CAA8C;;;CAMlE,MAAMF,OAAY,aAAa,UAAU,EAAE,GAAG,EAAE;CAGhD,MAAM,8BAAc,IAAI,KAGrB;CAEH,MAAM,YAAY,UAChB,UAAU,MAAM,MAAM,MAAM,MAAM,MAAM;AAE1C,MAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,QACxB,KAAI,MAAM,OAAO,MACf,UAAS,MAAM;MACV;EACL,MAAM,MAAM,QAAQ,MAAM,KAAK;EAC/B,MAAM,SAAS,YAAY,IAAI,IAAI,IAAI;GACrC,MAAM,MAAM;GACZ,OAAO,MAAM;GACb,UAAU,EAAE;GACb;AAED,MAAI,OAAO,UAAU,MAAM,MACzB,OAAM,IAAI,MAAM,8CAA8C;AAGhE,SAAO,SAAS,KAAK,MAAM;AAC3B,cAAY,IAAI,KAAK,OAAO;;AAMlC,MAAK,MAAM,EAAE,MAAM,OAAO,cAAc,YAAY,QAAQ,EAAE;AAC5D,MAAI,SAAS,WAAW,MACtB,OAAM,IAAI,MAAM,yDAAyD;AAG3E,WAAS,MAAM,GAAG,MAAM,EAAE,QAAQ,EAAE,MAAM;AAE1C,YAAU,MAAM,MADG,SAAS,KAAK,SAAS,KAAK,MAAM,CAAC,KAAK,GAAG,CAC7B;;CAInC,MAAM,uBAAuB,SAAmB;AAC9C,MAAI,SAAS,QAAQ,OAAO,SAAS,SAAU,QAAO;AAEtD,MAAI,MAAM,QAAQ,KAAK,EAAE;AACvB,QAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,IAC/B,MAAK,KAAK,oBAAoB,KAAK,GAAG;AAExC,UAAO;;AAIT,MAAK,KAAa,sBAAsB,UAAU;GAChD,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,SAAS,EACxC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,QAAkB,EAAE;AAE1B,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,OAAO,UAAU,SACnB,OAAM,IAAI,MACR,yDACD;AAGH,UAAM,KAAK,MAAM;;AAGnB,UAAO,MAAM,KAAK,GAAG;;AAIvB,MAAK,KAAa,sBAAsB,SAAS;GAC/C,MAAM,QAAS,KAAa;AAE5B,OAAI,OAAO,UAAU,YAAY,QAAQ,EACvC,OAAM,IAAI,MAAM,uCAAuC;GAGzD,MAAMC,SAAgB,EAAE;AAExB,QAAK,IAAI,IAAI,GAAG,KAAK,OAAO,KAAK;IAC/B,MAAM,QAAS,KAAa,OAAO,EAAE;AAErC,QAAI,CAAC,MAAM,QAAQ,MAAM,CACvB,OAAM,IAAI,MACR,yDACD;AAGH,SAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,QAAO,KAAK,oBAAoB,MAAM,GAAG,CAAC;;AAI9C,UAAO;;AAIT,OAAK,MAAM,OAAO,OAAO,KAAK,KAAK,CACjC,MAAK,OAAO,oBAAoB,KAAK,KAAK;AAG5C,SAAO;;CAGT,MAAM,aAAa,oBAAoB,KAAK;AAG5C,MAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;EACtC,MAAM,QAAQ,OAAO;AAErB,MAAI,MAAM,UAAU,OAAO,OACzB,OAAM,IAAI,MACR,uDAAuD,OAAO,OAAO,cAAc,EAAE,aAAa,MAAM,QACzG;;AAIL,QAAO"}
|
package/dist/esm/watcher.mjs
CHANGED
|
@@ -7,6 +7,7 @@ import { basename } from "node:path";
|
|
|
7
7
|
import { watch as watch$1 } from "chokidar";
|
|
8
8
|
|
|
9
9
|
//#region src/watcher.ts
|
|
10
|
+
/** @ts-ignore remove error Module '"chokidar"' has no exported member 'ChokidarOptions'. */
|
|
10
11
|
const recentlyAddedFiles = /* @__PURE__ */ new Set();
|
|
11
12
|
const watch = (options) => {
|
|
12
13
|
const configuration = options?.configuration ?? getConfiguration(options?.configOptions);
|
package/dist/esm/watcher.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"watcher.mjs","names":["chokidarWatch"],"sources":["../../src/watcher.ts"],"sourcesContent":["import { basename } from 'node:path';\nimport {\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\n/** @ts-ignore remove error Module '\"chokidar\"' has no exported member 'ChokidarOptions'. */\nimport { type ChokidarOptions, watch as chokidarWatch } from 'chokidar';\nimport { handleAdditionalContentDeclarationFile } from './handleAdditionalContentDeclarationFile';\nimport { handleContentDeclarationFileChange } from './handleContentDeclarationFileChange';\nimport { handleUnlinkedContentDeclarationFile } from './handleUnlinkedContentDeclarationFile';\nimport { prepareIntlayer } from './prepareIntlayer';\n\nconst recentlyAddedFiles = new Set<string>();\n\ntype WatchOptions = ChokidarOptions & {\n configuration?: IntlayerConfig;\n configOptions?: GetConfigurationOptions;\n skipPrepare?: boolean;\n};\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const { watch: isWatchMode, watchedFilesPatternWithPath } =\n configuration.content;\n\n /** @ts-ignore remove error Expected 0-1 arguments, but got 2. */\n return chokidarWatch(watchedFilesPatternWithPath, {\n persistent: isWatchMode, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n awaitWriteFinish: {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n ignored: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.intlayer/**',\n ],\n ...options,\n })\n .on('add', async (filePath) => {\n const fileName = basename(filePath);\n recentlyAddedFiles.add(fileName);\n\n await handleAdditionalContentDeclarationFile(filePath, configuration);\n\n setTimeout(() => recentlyAddedFiles.delete(fileName), 1000); // Allow time for unlink to trigger if it's a move\n })\n .on(\n 'change',\n async (filePath) =>\n await handleContentDeclarationFileChange(filePath, configuration)\n )\n .on('unlink', async (filePath) => {\n setTimeout(async () => {\n const fileName = basename(filePath);\n\n if (recentlyAddedFiles.has(fileName)) {\n // The file was moved, so ignore unlink\n return;\n }\n\n await handleUnlinkedContentDeclarationFile(filePath, configuration);\n }, 300); // Allow time for unlink to trigger if it's a move\n })\n .on('error', async (error) => {\n appLogger(`Watcher error: ${error}`, {\n level: 'error',\n });\n\n appLogger('Restarting watcher');\n\n await prepareIntlayer(configuration);\n });\n};\n\nexport const buildAndWatchIntlayer = async (options?: WatchOptions) => {\n const { skipPrepare, ...rest } = options ?? {};\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n\n if (!options?.skipPrepare) {\n await prepareIntlayer(configuration, { forceRun: true });\n }\n\n if (configuration.content.watch || options?.persistent) {\n const appLogger = getAppLogger(configuration);\n\n appLogger('Watching Intlayer content declarations');\n // Start watching (assuming watch is also async)\n watch({ ...rest, configuration });\n }\n};\n"],"mappings":"
|
|
1
|
+
{"version":3,"file":"watcher.mjs","names":["chokidarWatch"],"sources":["../../src/watcher.ts"],"sourcesContent":["import { basename } from 'node:path';\nimport {\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { IntlayerConfig } from '@intlayer/types';\n/** @ts-ignore remove error Module '\"chokidar\"' has no exported member 'ChokidarOptions'. */\nimport { type ChokidarOptions, watch as chokidarWatch } from 'chokidar';\nimport { handleAdditionalContentDeclarationFile } from './handleAdditionalContentDeclarationFile';\nimport { handleContentDeclarationFileChange } from './handleContentDeclarationFileChange';\nimport { handleUnlinkedContentDeclarationFile } from './handleUnlinkedContentDeclarationFile';\nimport { prepareIntlayer } from './prepareIntlayer';\n\nconst recentlyAddedFiles = new Set<string>();\n\ntype WatchOptions = ChokidarOptions & {\n configuration?: IntlayerConfig;\n configOptions?: GetConfigurationOptions;\n skipPrepare?: boolean;\n};\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const { watch: isWatchMode, watchedFilesPatternWithPath } =\n configuration.content;\n\n /** @ts-ignore remove error Expected 0-1 arguments, but got 2. */\n return chokidarWatch(watchedFilesPatternWithPath, {\n persistent: isWatchMode, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n awaitWriteFinish: {\n stabilityThreshold: 1000,\n pollInterval: 100,\n },\n ignored: [\n '**/node_modules/**',\n '**/dist/**',\n '**/build/**',\n '**/.intlayer/**',\n ],\n ...options,\n })\n .on('add', async (filePath) => {\n const fileName = basename(filePath);\n recentlyAddedFiles.add(fileName);\n\n await handleAdditionalContentDeclarationFile(filePath, configuration);\n\n setTimeout(() => recentlyAddedFiles.delete(fileName), 1000); // Allow time for unlink to trigger if it's a move\n })\n .on(\n 'change',\n async (filePath) =>\n await handleContentDeclarationFileChange(filePath, configuration)\n )\n .on('unlink', async (filePath) => {\n setTimeout(async () => {\n const fileName = basename(filePath);\n\n if (recentlyAddedFiles.has(fileName)) {\n // The file was moved, so ignore unlink\n return;\n }\n\n await handleUnlinkedContentDeclarationFile(filePath, configuration);\n }, 300); // Allow time for unlink to trigger if it's a move\n })\n .on('error', async (error) => {\n appLogger(`Watcher error: ${error}`, {\n level: 'error',\n });\n\n appLogger('Restarting watcher');\n\n await prepareIntlayer(configuration);\n });\n};\n\nexport const buildAndWatchIntlayer = async (options?: WatchOptions) => {\n const { skipPrepare, ...rest } = options ?? {};\n const configuration =\n options?.configuration ?? getConfiguration(options?.configOptions);\n\n if (!options?.skipPrepare) {\n await prepareIntlayer(configuration, { forceRun: true });\n }\n\n if (configuration.content.watch || options?.persistent) {\n const appLogger = getAppLogger(configuration);\n\n appLogger('Watching Intlayer content declarations');\n // Start watching (assuming watch is also async)\n watch({ ...rest, configuration });\n }\n};\n"],"mappings":";;;;;;;;;;AAcA,MAAM,qCAAqB,IAAI,KAAa;AAS5C,MAAa,SAAS,YAA2B;CAC/C,MAAM,gBACJ,SAAS,iBAAiB,iBAAiB,SAAS,cAAc;CACpE,MAAM,YAAY,aAAa,cAAc;CAE7C,MAAM,EAAE,OAAO,aAAa,gCAC1B,cAAc;;AAGhB,QAAOA,QAAc,6BAA6B;EAChD,YAAY;EACZ,eAAe;EACf,kBAAkB;GAChB,oBAAoB;GACpB,cAAc;GACf;EACD,SAAS;GACP;GACA;GACA;GACA;GACD;EACD,GAAG;EACJ,CAAC,CACC,GAAG,OAAO,OAAO,aAAa;EAC7B,MAAM,WAAW,SAAS,SAAS;AACnC,qBAAmB,IAAI,SAAS;AAEhC,QAAM,uCAAuC,UAAU,cAAc;AAErE,mBAAiB,mBAAmB,OAAO,SAAS,EAAE,IAAK;GAC3D,CACD,GACC,UACA,OAAO,aACL,MAAM,mCAAmC,UAAU,cAAc,CACpE,CACA,GAAG,UAAU,OAAO,aAAa;AAChC,aAAW,YAAY;GACrB,MAAM,WAAW,SAAS,SAAS;AAEnC,OAAI,mBAAmB,IAAI,SAAS,CAElC;AAGF,SAAM,qCAAqC,UAAU,cAAc;KAClE,IAAI;GACP,CACD,GAAG,SAAS,OAAO,UAAU;AAC5B,YAAU,kBAAkB,SAAS,EACnC,OAAO,SACR,CAAC;AAEF,YAAU,qBAAqB;AAE/B,QAAM,gBAAgB,cAAc;GACpC;;AAGN,MAAa,wBAAwB,OAAO,YAA2B;CACrE,MAAM,EAAE,YAAa,GAAG,SAAS,WAAW,EAAE;CAC9C,MAAM,gBACJ,SAAS,iBAAiB,iBAAiB,SAAS,cAAc;AAEpE,KAAI,CAAC,SAAS,YACZ,OAAM,gBAAgB,eAAe,EAAE,UAAU,MAAM,CAAC;AAG1D,KAAI,cAAc,QAAQ,SAAS,SAAS,YAAY;AAGtD,EAFkB,aAAa,cAAc,CAEnC,yCAAyC;AAEnD,QAAM;GAAE,GAAG;GAAM;GAAe,CAAC"}
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
import { UnmergedDictionaryOutput } from "./writeUnmergedDictionary.js";
|
|
2
2
|
import { MergedDictionaryOutput } from "./writeMergedDictionary.js";
|
|
3
3
|
import { LocalizedDictionaryOutput } from "./writeDynamicDictionary.js";
|
|
4
|
-
import * as
|
|
4
|
+
import * as _intlayer_types8 from "@intlayer/types";
|
|
5
5
|
import { Dictionary } from "@intlayer/types";
|
|
6
6
|
|
|
7
7
|
//#region src/buildIntlayerDictionary/buildIntlayerDictionary.d.ts
|
|
8
8
|
/**
|
|
9
9
|
* This function transpile the bundled code to to make dictionaries as JSON files
|
|
10
10
|
*/
|
|
11
|
-
declare const buildDictionary: (localDictionariesEntries: Dictionary[], configuration?:
|
|
11
|
+
declare const buildDictionary: (localDictionariesEntries: Dictionary[], configuration?: _intlayer_types8.IntlayerConfig, formats?: ("cjs" | "esm")[], importOtherDictionaries?: boolean) => Promise<{
|
|
12
12
|
unmergedDictionaries: UnmergedDictionaryOutput;
|
|
13
13
|
mergedDictionaries: MergedDictionaryOutput;
|
|
14
14
|
dynamicDictionaries: LocalizedDictionaryOutput;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { MergedDictionaryOutput } from "./writeMergedDictionary.js";
|
|
2
|
-
import * as
|
|
2
|
+
import * as _intlayer_types2 from "@intlayer/types";
|
|
3
3
|
import { Dictionary, Locale } from "@intlayer/types";
|
|
4
4
|
|
|
5
5
|
//#region src/buildIntlayerDictionary/writeDynamicDictionary.d.ts
|
|
@@ -12,7 +12,7 @@ type LocalizedDictionaryOutput = Record<string, LocalizedDictionaryResult>;
|
|
|
12
12
|
/**
|
|
13
13
|
* This function generates the content of the dictionary list file
|
|
14
14
|
*/
|
|
15
|
-
declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?:
|
|
15
|
+
declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types2.IntlayerConfig) => string;
|
|
16
16
|
/**
|
|
17
17
|
* Write the localized dictionaries to the dictionariesDir
|
|
18
18
|
* @param mergedDictionaries - The merged dictionaries
|
|
@@ -29,7 +29,7 @@ declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: L
|
|
|
29
29
|
* // { key: 'home', content: { ... } },
|
|
30
30
|
* ```
|
|
31
31
|
*/
|
|
32
|
-
declare const writeDynamicDictionary: (mergedDictionaries: MergedDictionaryOutput, configuration?:
|
|
32
|
+
declare const writeDynamicDictionary: (mergedDictionaries: MergedDictionaryOutput, configuration?: _intlayer_types2.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
|
|
33
33
|
//#endregion
|
|
34
34
|
export { DictionaryResult, LocalizedDictionaryOutput, LocalizedDictionaryResult, generateDictionaryEntryPoint, writeDynamicDictionary };
|
|
35
35
|
//# sourceMappingURL=writeDynamicDictionary.d.ts.map
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import { LocalizedDictionaryOutput, LocalizedDictionaryResult } from "./writeDynamicDictionary.js";
|
|
2
|
-
import * as
|
|
2
|
+
import * as _intlayer_types0 from "@intlayer/types";
|
|
3
3
|
|
|
4
4
|
//#region src/buildIntlayerDictionary/writeFetchDictionary.d.ts
|
|
5
5
|
/**
|
|
6
6
|
* This function generates the content of the dictionary list file
|
|
7
7
|
*/
|
|
8
|
-
declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?:
|
|
8
|
+
declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types0.IntlayerConfig) => string;
|
|
9
9
|
/**
|
|
10
10
|
* Write the localized dictionaries to the dictionariesDir
|
|
11
11
|
* @param mergedDictionaries - The merged dictionaries
|
|
@@ -22,7 +22,7 @@ declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: Loc
|
|
|
22
22
|
* // { key: 'home', content: { ... } },
|
|
23
23
|
* ```
|
|
24
24
|
*/
|
|
25
|
-
declare const writeFetchDictionary: (dynamicDictionaries: LocalizedDictionaryOutput, configuration?:
|
|
25
|
+
declare const writeFetchDictionary: (dynamicDictionaries: LocalizedDictionaryOutput, configuration?: _intlayer_types0.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
|
|
26
26
|
//#endregion
|
|
27
27
|
export { generateDictionaryEntryPoint, writeFetchDictionary };
|
|
28
28
|
//# sourceMappingURL=writeFetchDictionary.d.ts.map
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { UnmergedDictionaryOutput } from "./writeUnmergedDictionary.js";
|
|
2
|
-
import * as
|
|
2
|
+
import * as _intlayer_types4 from "@intlayer/types";
|
|
3
3
|
import { Dictionary } from "@intlayer/types";
|
|
4
4
|
|
|
5
5
|
//#region src/buildIntlayerDictionary/writeMergedDictionary.d.ts
|
|
@@ -24,7 +24,7 @@ type MergedDictionaryOutput = Record<string, MergedDictionaryResult>;
|
|
|
24
24
|
* // { key: 'home', content: { ... } },
|
|
25
25
|
* ```
|
|
26
26
|
*/
|
|
27
|
-
declare const writeMergedDictionaries: (groupedDictionaries: UnmergedDictionaryOutput, configuration?:
|
|
27
|
+
declare const writeMergedDictionaries: (groupedDictionaries: UnmergedDictionaryOutput, configuration?: _intlayer_types4.IntlayerConfig) => Promise<MergedDictionaryOutput>;
|
|
28
28
|
//#endregion
|
|
29
29
|
export { MergedDictionaryOutput, MergedDictionaryResult, writeMergedDictionaries };
|
|
30
30
|
//# sourceMappingURL=writeMergedDictionary.d.ts.map
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import * as
|
|
1
|
+
import * as _intlayer_types7 from "@intlayer/types";
|
|
2
2
|
import { Dictionary } from "@intlayer/types";
|
|
3
3
|
|
|
4
4
|
//#region src/buildIntlayerDictionary/writeRemoteDictionary.d.ts
|
|
@@ -19,7 +19,7 @@ import { Dictionary } from "@intlayer/types";
|
|
|
19
19
|
* // { key: 'home', content: { ... } },
|
|
20
20
|
* ```
|
|
21
21
|
*/
|
|
22
|
-
declare const writeRemoteDictionary: (remoteDictionaries: Dictionary[], configuration?:
|
|
22
|
+
declare const writeRemoteDictionary: (remoteDictionaries: Dictionary[], configuration?: _intlayer_types7.IntlayerConfig) => Promise<Dictionary[]>;
|
|
23
23
|
//#endregion
|
|
24
24
|
export { writeRemoteDictionary };
|
|
25
25
|
//# sourceMappingURL=writeRemoteDictionary.d.ts.map
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import * as
|
|
1
|
+
import * as _intlayer_types0 from "@intlayer/types";
|
|
2
2
|
|
|
3
3
|
//#region src/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts
|
|
4
4
|
/**
|
|
5
5
|
* This function generates a list of dictionaries in the main directory
|
|
6
6
|
*/
|
|
7
|
-
declare const createDictionaryEntryPoint: (configuration?:
|
|
7
|
+
declare const createDictionaryEntryPoint: (configuration?: _intlayer_types0.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<void>;
|
|
8
8
|
//#endregion
|
|
9
9
|
export { createDictionaryEntryPoint };
|
|
10
10
|
//# sourceMappingURL=createDictionaryEntryPoint.d.ts.map
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import * as
|
|
1
|
+
import * as _intlayer_types6 from "@intlayer/types";
|
|
2
2
|
|
|
3
3
|
//#region src/createDictionaryEntryPoint/generateDictionaryListContent.d.ts
|
|
4
4
|
/**
|
|
5
5
|
* This function generates the content of the dictionary list file
|
|
6
6
|
*/
|
|
7
|
-
declare const generateDictionaryListContent: (dictionaries: string[], functionName: string, format?: "cjs" | "esm", configuration?:
|
|
7
|
+
declare const generateDictionaryListContent: (dictionaries: string[], functionName: string, format?: "cjs" | "esm", configuration?: _intlayer_types6.IntlayerConfig) => string;
|
|
8
8
|
//#endregion
|
|
9
9
|
export { generateDictionaryListContent };
|
|
10
10
|
//# sourceMappingURL=generateDictionaryListContent.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"fetchDistantDictionaries.d.ts","names":[],"sources":["../../src/fetchDistantDictionaries.ts"],"sourcesContent":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"fetchDistantDictionaries.d.ts","names":[],"sources":["../../src/fetchDistantDictionaries.ts"],"sourcesContent":[],"mappings":";;;;;KAOK,+BAAA;;;EAAA,SAAA,CAAA,EAAA,MAAA;AASL,CAAA;;;;AAGG,cAHU,wBAGV,EAAA,CAAA,OAAA,EAFQ,+BAER,EAAA,cAAA,CAAA,EAAA,CAAA,MAAA,EADyB,kBACzB,EAAA,EAAA,GAAA,IAAA,EAAA,GAAA,OAAA,CAAQ,aAAR,EAAA,CAAA"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"handleContentDeclarationFileChange.d.ts","names":[],"sources":["../../src/handleContentDeclarationFileChange.ts"],"sourcesContent":[],"mappings":";;;
|
|
1
|
+
{"version":3,"file":"handleContentDeclarationFileChange.d.ts","names":[],"sources":["../../src/handleContentDeclarationFileChange.ts"],"sourcesContent":[],"mappings":";;;cAUa,+DAEH,mBAAc"}
|
package/dist/types/index.d.ts
CHANGED
|
@@ -7,12 +7,15 @@ import { getBuiltDynamicDictionariesPath } from "./createDictionaryEntryPoint/ge
|
|
|
7
7
|
import { getBuiltFetchDictionariesPath } from "./createDictionaryEntryPoint/getBuiltFetchDictionariesPath.js";
|
|
8
8
|
import { getBuiltRemoteDictionariesPath } from "./createDictionaryEntryPoint/getBuiltRemoteDictionariesPath.js";
|
|
9
9
|
import { getBuiltUnmergedDictionariesPath } from "./createDictionaryEntryPoint/getBuiltUnmergedDictionariesPath.js";
|
|
10
|
+
import "./createDictionaryEntryPoint/index.js";
|
|
10
11
|
import { createModuleAugmentation } from "./createType/createModuleAugmentation.js";
|
|
11
12
|
import { createTypes } from "./createType/createType.js";
|
|
13
|
+
import "./createType/index.js";
|
|
12
14
|
import { loadDictionaries } from "./loadDictionaries/loadDictionaries.js";
|
|
13
15
|
import { loadContentDeclarations } from "./loadDictionaries/loadContentDeclaration.js";
|
|
14
16
|
import { loadLocalDictionaries } from "./loadDictionaries/loadLocalDictionaries.js";
|
|
15
17
|
import { loadRemoteDictionaries } from "./loadDictionaries/loadRemoteDictionaries.js";
|
|
18
|
+
import "./loadDictionaries/index.js";
|
|
16
19
|
import { fetchDistantDictionaries } from "./fetchDistantDictionaries.js";
|
|
17
20
|
import { isInvalidDictionary } from "./filterInvalidDictionaries.js";
|
|
18
21
|
import { Extension, Format, getExtensionFromFormat, getFormatFromExtension } from "./utils/getFormatFromExtension.js";
|
|
@@ -45,6 +48,7 @@ import { DictionaryStatus } from "./writeContentDeclaration/dictionaryStatus.js"
|
|
|
45
48
|
import { transformJSFile } from "./writeContentDeclaration/transformJSFile.js";
|
|
46
49
|
import { writeContentDeclaration } from "./writeContentDeclaration/writeContentDeclaration.js";
|
|
47
50
|
import { writeJSFile } from "./writeContentDeclaration/writeJSFile.js";
|
|
51
|
+
import "./writeContentDeclaration/index.js";
|
|
48
52
|
import { detectFormatCommand } from "./writeContentDeclaration/detectFormatCommand.js";
|
|
49
53
|
import { processContentDeclarationContent } from "./writeContentDeclaration/processContentDeclarationContent.js";
|
|
50
54
|
export { type DictionaryStatus, type DiffMode, type Extension, type Format, type JSONObject, type JsonChunk, type ListGitFilesOptions, type ListGitLinesOptions, type ParallelHandle, assembleJSON, buildAndWatchIntlayer, buildDictionary, chunkJSON, cleanOutputDir, createDictionaryEntryPoint, createModuleAugmentation, createTypes, detectExportedComponentName, detectFormatCommand, fetchDistantDictionaries, formatLocale, formatPath, generateDictionaryListContent, getBuiltDictionariesPath, getBuiltDynamicDictionariesPath, getBuiltFetchDictionariesPath, getBuiltRemoteDictionariesPath, getBuiltUnmergedDictionariesPath, getChunk, getContentDeclarationFileTemplate, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, handleAdditionalContentDeclarationFile, handleContentDeclarationFileChange, handleUnlinkedContentDeclarationFile, isInvalidDictionary, kebabCaseToCamelCase, listDictionaries, listGitFiles, listGitLines, loadContentDeclarations, loadDictionaries, loadLocalDictionaries, loadRemoteDictionaries, pLimit, parallelize, parallelizeGlobal, prepareIntlayer, processContentDeclarationContent, reconstructFromSingleChunk, reduceDictionaryContent, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, sortAlphabetically, splitTextByLines, transformJSFile, verifyIdenticObjectFormat, watch, writeContentDeclaration, writeJSFile };
|
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import { DictionariesStatus } from "./loadDictionaries.js";
|
|
2
|
-
import * as
|
|
2
|
+
import * as _intlayer_types5 from "@intlayer/types";
|
|
3
3
|
import { Dictionary } from "@intlayer/types";
|
|
4
4
|
import { DictionaryAPI } from "@intlayer/backend";
|
|
5
5
|
|
|
6
6
|
//#region src/loadDictionaries/loadRemoteDictionaries.d.ts
|
|
7
7
|
declare const formatDistantDictionaries: (dictionaries: (DictionaryAPI | Dictionary)[]) => Dictionary[];
|
|
8
|
-
declare const loadRemoteDictionaries: (configuration?:
|
|
8
|
+
declare const loadRemoteDictionaries: (configuration?: _intlayer_types5.IntlayerConfig, onStatusUpdate?: (status: DictionariesStatus[]) => void, options?: {
|
|
9
9
|
onStartRemoteCheck?: () => void;
|
|
10
10
|
onStopRemoteCheck?: () => void;
|
|
11
11
|
onError?: (error: Error) => void;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"chunkJSON.d.ts","names":[],"sources":["../../../src/utils/chunkJSON.ts"],"sourcesContent":[],"mappings":";;;AAUkB;;;;;AAGlB,KAHK,aAAA,GAGiB,MAAA,GACP,MAAA,
|
|
1
|
+
{"version":3,"file":"chunkJSON.d.ts","names":[],"sources":["../../../src/utils/chunkJSON.ts"],"sourcesContent":[],"mappings":";;;AAUkB;;;;;AAGlB,KAHK,aAAA,GAGiB,MAAA,GACP,MAAA,GAAA,OAAS,GAAA,IAAA;AACtB,KAJG,SAAA,GAAY,aAMA,GANgB,UAMP,GANoB,SAMpB;AAErB,KANO,UAAA,GAMA;EAEP,CAAA,CAAA,EAAA,MAAA,CAAQ,EAPE,SAOF;AAA4C,CAAA;AAG7C,KAPP,SAAA,GAAY,SAYJ,EAAA;AAAyB,KAVjC,IAAA,GAAO,KAYC,CAAA,MAAA,GAAA,MAAA,CAAA;AAEb,KAZK,QAAA,GAYgB;EAuMR,EAAA,EAAA,KAAA;EACJ,IAAA,EApN0B,IAoN1B;EAAa,KAAA,EApN0B,SAoN1B;CAEnB;KArNE,cAAA,GAqNO;EAsHC,EAAA,EAAA,YAAA;EACJ,IAAA,EA1UD,IA0UC;EACN,KAAA,EAAA,MAAA;EAAa,KAAA,EAAA,MAAA;EA+Ff,KAAA,EAAA,MAAA;AAED,CAAA;KAvaK,KAAA,GAAQ,QAuawB,GAvab,cAuaa;KArahC,QAAA,GAqa8C,QAAA,GAAA,OAAA;AAAa,KAnapD,SAAA,GAmaoD;EAkL/D,aAAA,EAAA,CAAA;;;YAjlBW;;WAED;;;;;cAiME,mBACJ,aAAa,gCAEnB;;;;;;;;;;;cAsHU,oCACJ,cACN,aAAa;cAiGH,uBAAwB,gBAAc,aAAa"}
|