@intlayer/chokidar 7.3.0 → 7.3.2-canary.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/README.md +0 -2
  2. package/dist/cjs/index.cjs +3 -0
  3. package/dist/cjs/transformFiles/index.cjs +3 -0
  4. package/dist/cjs/transformFiles/transformFiles.cjs +12 -0
  5. package/dist/cjs/transformFiles/transformFiles.cjs.map +1 -1
  6. package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs +1 -3
  7. package/dist/cjs/writeContentDeclaration/writeContentDeclaration.cjs.map +1 -1
  8. package/dist/esm/index.mjs +2 -2
  9. package/dist/esm/transformFiles/index.mjs +2 -2
  10. package/dist/esm/transformFiles/transformFiles.mjs +10 -1
  11. package/dist/esm/transformFiles/transformFiles.mjs.map +1 -1
  12. package/dist/esm/writeContentDeclaration/writeContentDeclaration.mjs +1 -3
  13. package/dist/esm/writeContentDeclaration/writeContentDeclaration.mjs.map +1 -1
  14. package/dist/types/buildIntlayerDictionary/writeDynamicDictionary.d.ts +3 -3
  15. package/dist/types/buildIntlayerDictionary/writeFetchDictionary.d.ts +3 -3
  16. package/dist/types/buildIntlayerDictionary/writeMergedDictionary.d.ts +2 -2
  17. package/dist/types/buildIntlayerDictionary/writeRemoteDictionary.d.ts +2 -2
  18. package/dist/types/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts +2 -2
  19. package/dist/types/createDictionaryEntryPoint/generateDictionaryListContent.d.ts +2 -2
  20. package/dist/types/createDictionaryEntryPoint/generateDictionaryListContent.d.ts.map +1 -1
  21. package/dist/types/index.d.ts +2 -2
  22. package/dist/types/loadDictionaries/loadRemoteDictionaries.d.ts +2 -2
  23. package/dist/types/transformFiles/index.d.ts +2 -2
  24. package/dist/types/transformFiles/transformFiles.d.ts +14 -1
  25. package/dist/types/transformFiles/transformFiles.d.ts.map +1 -1
  26. package/package.json +12 -11
package/README.md CHANGED
@@ -24,8 +24,6 @@
24
24
  <a href="https://github.com/aymericzip/intlayer/blob/main/LICENSE" target="_blank" rel="noopener noreferrer nofollow"><img src="https://img.shields.io/github/license/aymericzip/intlayer?style=for-the-badge&labelColor=000000&color=FFFFFF&logoColor=000000&cacheSeconds=86400" alt="license"/></a>
25
25
  <a href="https://github.com/aymericzip/intlayer/commits/main" target="_blank" rel="noopener noreferrer nofollow"><img src="https://img.shields.io/github/last-commit/aymericzip/intlayer?style=for-the-badge&labelColor=000000&color=FFFFFF&logoColor=000000&cacheSeconds=86400" alt="last commit"/>
26
26
  </a>
27
- <a href="https://bountyhub.dev/bounties?repo=intlayer" target="_blank" rel="noopener noreferrer nofollow"><img src="https://img.shields.io/badge/Bounties-on%20BountyHub-yellow?style=for-the-badge&labelColor=000000&color=FFFFFF&logoColor=000000&cacheSeconds=86400" alt="Bounties on BountyHub"/>
28
- </a>
29
27
  </p>
30
28
 
31
29
  ![Watch the video](https://github.com/aymericzip/intlayer/blob/main/docs/assets/demo_video.gif)
@@ -47,6 +47,7 @@ const require_utils_runParallel_index = require('./utils/runParallel/index.cjs')
47
47
  const require_utils_verifyIdenticObjectFormat = require('./utils/verifyIdenticObjectFormat.cjs');
48
48
  const require_watcher = require('./watcher.cjs');
49
49
 
50
+ exports.ATTRIBUTES_TO_EXTRACT = require_transformFiles_transformFiles.ATTRIBUTES_TO_EXTRACT;
50
51
  exports.assembleJSON = require_utils_chunkJSON.assembleJSON;
51
52
  exports.buildAndWatchIntlayer = require_watcher.buildAndWatchIntlayer;
52
53
  exports.buildDictionary = require_buildIntlayerDictionary_buildIntlayerDictionary.buildDictionary;
@@ -63,6 +64,7 @@ exports.fetchDistantDictionaries = require_fetchDistantDictionaries.fetchDistant
63
64
  exports.formatLocale = require_utils_formatter.formatLocale;
64
65
  exports.formatPath = require_utils_formatter.formatPath;
65
66
  exports.generateDictionaryListContent = require_createDictionaryEntryPoint_generateDictionaryListContent.generateDictionaryListContent;
67
+ exports.generateKey = require_transformFiles_transformFiles.generateKey;
66
68
  exports.getBuiltDictionariesPath = require_createDictionaryEntryPoint_getBuiltDictionariesPath.getBuiltDictionariesPath;
67
69
  exports.getBuiltDynamicDictionariesPath = require_createDictionaryEntryPoint_getBuiltDynamicDictionariesPath.getBuiltDynamicDictionariesPath;
68
70
  exports.getBuiltFetchDictionariesPath = require_createDictionaryEntryPoint_getBuiltFetchDictionariesPath.getBuiltFetchDictionariesPath;
@@ -97,6 +99,7 @@ exports.reduceObjectFormat = require_utils_reduceObjectFormat.reduceObjectFormat
97
99
  exports.resolveObjectPromises = require_utils_resolveObjectPromises.resolveObjectPromises;
98
100
  exports.runOnce = require_utils_runOnce.runOnce;
99
101
  exports.runParallel = require_utils_runParallel_index.runParallel;
102
+ exports.shouldExtract = require_transformFiles_transformFiles.shouldExtract;
100
103
  exports.sortAlphabetically = require_utils_sortAlphabetically.sortAlphabetically;
101
104
  exports.splitTextByLines = require_utils_splitTextByLine.splitTextByLines;
102
105
  exports.transformFiles = require_transformFiles_transformFiles.transformFiles;
@@ -1,6 +1,9 @@
1
1
  const require_transformFiles_extractDictionaryKey = require('./extractDictionaryKey.cjs');
2
2
  const require_transformFiles_transformFiles = require('./transformFiles.cjs');
3
3
 
4
+ exports.ATTRIBUTES_TO_EXTRACT = require_transformFiles_transformFiles.ATTRIBUTES_TO_EXTRACT;
4
5
  exports.extractDictionaryKey = require_transformFiles_extractDictionaryKey.extractDictionaryKey;
5
6
  exports.extractIntlayer = require_transformFiles_transformFiles.extractIntlayer;
7
+ exports.generateKey = require_transformFiles_transformFiles.generateKey;
8
+ exports.shouldExtract = require_transformFiles_transformFiles.shouldExtract;
6
9
  exports.transformFiles = require_transformFiles_transformFiles.transformFiles;
@@ -10,6 +10,9 @@ let node_child_process = require("node:child_process");
10
10
  let ts_morph = require("ts-morph");
11
11
 
12
12
  //#region src/transformFiles/transformFiles.ts
13
+ /**
14
+ * Attributes that should be extracted for localization
15
+ */
13
16
  const ATTRIBUTES_TO_EXTRACT = [
14
17
  "title",
15
18
  "placeholder",
@@ -17,6 +20,9 @@ const ATTRIBUTES_TO_EXTRACT = [
17
20
  "aria-label",
18
21
  "label"
19
22
  ];
23
+ /**
24
+ * Default function to determine if a string should be extracted for localization
25
+ */
20
26
  const shouldExtract = (text) => {
21
27
  const trimmed = text.trim();
22
28
  if (!trimmed) return false;
@@ -25,6 +31,9 @@ const shouldExtract = (text) => {
25
31
  if (trimmed.startsWith("{") || trimmed.startsWith("v-")) return false;
26
32
  return true;
27
33
  };
34
+ /**
35
+ * Generate a unique key from text for use as a dictionary key
36
+ */
28
37
  const generateKey = (text, existingKeys) => {
29
38
  let key = text.replace(/\s+/g, " ").replace(/_+/g, " ").replace(/-+/g, " ").replace(/[^a-zA-Z0-9 ]/g, "").trim().split(" ").filter(Boolean).slice(0, 5).map((word, index) => index === 0 ? word.toLowerCase() : word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()).join("");
30
39
  if (!key) key = "content";
@@ -213,6 +222,9 @@ const transformFiles = async (filePaths, packageName, options) => {
213
222
  };
214
223
 
215
224
  //#endregion
225
+ exports.ATTRIBUTES_TO_EXTRACT = ATTRIBUTES_TO_EXTRACT;
216
226
  exports.extractIntlayer = extractIntlayer;
227
+ exports.generateKey = generateKey;
228
+ exports.shouldExtract = shouldExtract;
217
229
  exports.transformFiles = transformFiles;
218
230
  //# sourceMappingURL=transformFiles.cjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"transformFiles.cjs","names":["writeContentDeclaration","extractedContent: Record<string, string>","replacements: TsReplacement[]","Node","sourceFile: SourceFile","SyntaxKind","Project","extractDictionaryKey","fs","extractedContent: Record<string, string> | null","error: any","ANSIColors","detectFormatCommand"],"sources":["../../../src/transformFiles/transformFiles.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport fs from 'node:fs/promises';\nimport { basename, dirname, extname, join, relative, resolve } from 'node:path';\nimport {\n ANSIColors,\n camelCaseToKebabCase,\n colorizePath,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { Node, Project, type SourceFile, SyntaxKind } from 'ts-morph';\nimport { writeContentDeclaration } from '../writeContentDeclaration';\nimport { detectFormatCommand } from '../writeContentDeclaration/detectFormatCommand';\nimport { extractDictionaryKey } from './extractDictionaryKey';\n\n// ==========================================\n// 1. Shared Utilities\n// ==========================================\n\nconst ATTRIBUTES_TO_EXTRACT = [\n 'title',\n 'placeholder',\n 'alt',\n 'aria-label',\n 'label',\n];\n\nconst shouldExtract = (text: string): boolean => {\n const trimmed = text.trim();\n if (!trimmed) return false;\n if (!trimmed.includes(' ')) return false;\n // Starts with Capital letter\n if (!/^[A-Z]/.test(trimmed)) return false;\n // Filter out template logic identifiers (simple check)\n if (trimmed.startsWith('{') || trimmed.startsWith('v-')) return false;\n return true;\n};\n\nconst generateKey = (text: string, existingKeys: Set<string>): string => {\n const maxWords = 5;\n let key = text\n .replace(/\\s+/g, ' ')\n .replace(/_+/g, ' ')\n .replace(/-+/g, ' ')\n .replace(/[^a-zA-Z0-9 ]/g, '')\n .trim()\n .split(' ')\n .filter(Boolean)\n .slice(0, maxWords)\n .map((word, index) =>\n index === 0\n ? word.toLowerCase()\n : word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()\n )\n .join('');\n\n if (!key) key = 'content';\n if (existingKeys.has(key)) {\n let i = 1;\n while (existingKeys.has(`${key}${i}`)) i++;\n key = `${key}${i}`;\n }\n return key;\n};\n\nconst writeContentHelper = async (\n extractedContent: Record<string, string>,\n componentKey: string,\n filePath: string,\n configuration: IntlayerConfig,\n outputDir?: string\n) => {\n const { defaultLocale } = configuration.internationalization;\n const { baseDir } = configuration.content;\n\n const dirName = outputDir ? resolve(outputDir) : dirname(filePath);\n const ext = extname(filePath);\n const baseName = basename(filePath, ext);\n const contentBaseName = baseName.charAt(0).toLowerCase() + baseName.slice(1);\n\n const contentFilePath = join(dirName, `${contentBaseName}.content.ts`);\n const relativeContentFilePath = relative(baseDir, contentFilePath);\n\n const dictionary: Dictionary = {\n key: componentKey,\n content: extractedContent,\n locale: defaultLocale,\n filePath: relativeContentFilePath,\n };\n\n const relativeDir = relative(baseDir, dirName);\n await writeContentDeclaration(dictionary, configuration, {\n newDictionariesPath: relativeDir,\n });\n\n return contentFilePath;\n};\n\ntype TsReplacement = {\n node: Node;\n key: string;\n type: 'jsx-text' | 'jsx-attribute' | 'string-literal';\n};\n\nconst extractTsContent = (\n sourceFile: SourceFile,\n existingKeys: Set<string>\n): {\n extractedContent: Record<string, string>;\n replacements: TsReplacement[];\n} => {\n const extractedContent: Record<string, string> = {};\n const replacements: TsReplacement[] = [];\n\n sourceFile.forEachDescendant((node) => {\n // 1. JSX Text\n if (Node.isJsxText(node)) {\n const text = node.getText();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.replace(/\\s+/g, ' ').trim();\n replacements.push({ node, key, type: 'jsx-text' });\n }\n }\n\n // 2. JSX Attributes\n else if (Node.isJsxAttribute(node)) {\n const name = node.getNameNode().getText();\n if (ATTRIBUTES_TO_EXTRACT.includes(name)) {\n const initializer = node.getInitializer();\n if (Node.isStringLiteral(initializer)) {\n const text = initializer.getLiteralValue();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'jsx-attribute' });\n }\n }\n }\n }\n\n // 3. String Literals (Variables, Arrays, etc.)\n else if (Node.isStringLiteral(node)) {\n const text = node.getLiteralValue();\n if (shouldExtract(text)) {\n const parent = node.getParent();\n\n // Skip if inside ImportDeclaration\n if (\n parent?.getKindName() === 'ImportDeclaration' ||\n parent?.getKindName() === 'ImportSpecifier' ||\n parent?.getKindName() === 'ModuleSpecifier'\n ) {\n return;\n }\n\n // Skip if it's a JSX Attribute value (handled above)\n if (Node.isJsxAttribute(parent)) return;\n\n // Skip console.log\n if (Node.isCallExpression(parent)) {\n const expression = parent.getExpression();\n if (expression.getText().includes('console.log')) return;\n }\n\n // Skip Object Keys: { key: \"value\" } -> \"key\" is PropertyAssignment name if not computed\n if (Node.isPropertyAssignment(parent)) {\n if (parent.getNameNode() === node) return; // It's the key\n }\n\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'string-literal' });\n }\n }\n });\n\n return { extractedContent, replacements };\n};\n\n// ==========================================\n// 2. React (TS-Morph) Strategy\n// ==========================================\n\nconst processReactFile = async (\n filePath: string,\n componentKey: string,\n packageName: string,\n project: Project,\n save: boolean = true\n) => {\n let sourceFile: SourceFile;\n try {\n sourceFile = project.addSourceFileAtPath(filePath);\n } catch {\n sourceFile = project.getSourceFileOrThrow(filePath);\n }\n\n const existingKeys = new Set<string>();\n const { extractedContent, replacements } = extractTsContent(\n sourceFile,\n existingKeys\n );\n\n if (Object.keys(extractedContent).length === 0) return null;\n\n for (const { node, key, type } of replacements) {\n if (type === 'jsx-text' && Node.isJsxText(node)) {\n node.replaceWithText(`{content.${key}}`);\n } else if (type === 'jsx-attribute' && Node.isJsxAttribute(node)) {\n node.setInitializer(`{content.${key}.value}`);\n } else if (type === 'string-literal' && Node.isStringLiteral(node)) {\n // For React/JS variables, we usually want the value\n node.replaceWithText(`content.${key}.value`);\n }\n }\n\n // Inject hook\n const importDecl = sourceFile.getImportDeclaration(\n (d) => d.getModuleSpecifierValue() === packageName\n );\n if (!importDecl) {\n sourceFile.addImportDeclaration({\n namedImports: ['useIntlayer'],\n moduleSpecifier: packageName,\n });\n } else if (\n !importDecl.getNamedImports().some((n) => n.getName() === 'useIntlayer')\n ) {\n importDecl.addNamedImport('useIntlayer');\n }\n\n // Insert hook at start of component\n sourceFile.getFunctions().forEach((f) => {\n f.getBody()\n ?.asKind(SyntaxKind.Block)\n ?.insertStatements(0, `const content = useIntlayer(\"${componentKey}\");`);\n });\n\n // Also handle const/arrow components\n sourceFile.getVariableDeclarations().forEach((v) => {\n const init = v.getInitializer();\n if (Node.isArrowFunction(init) || Node.isFunctionExpression(init)) {\n const body = init.getBody();\n if (Node.isBlock(body)) {\n // Heuristic: check if it returns JSX or uses hooks\n if (\n body.getText().includes('return') ||\n body.getText().includes('use')\n ) {\n body.insertStatements(\n 0,\n `const content = useIntlayer(\"${componentKey}\");`\n );\n }\n }\n }\n });\n\n if (save) {\n await sourceFile.save();\n }\n return extractedContent;\n};\n\n// ==========================================\n// 5. Main Dispatcher\n// ==========================================\n\nexport type PackageName =\n | 'next-intlayer'\n | 'react-intlayer'\n | 'vue-intlayer'\n | 'svelte-intlayer'\n | 'preact-intlayer'\n | 'solid-intlayer'\n | 'angular-intlayer'\n | 'express-intlayer';\n\nexport type ExtractIntlayerOptions = {\n configOptions?: GetConfigurationOptions;\n outputDir?: string;\n codeOnly?: boolean;\n declarationOnly?: boolean;\n};\n\nexport const extractIntlayer = async (\n filePath: string,\n packageName: PackageName,\n options?: ExtractIntlayerOptions,\n project?: Project\n) => {\n const saveComponent = !options?.declarationOnly;\n const writeContent = !options?.codeOnly;\n\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n const { baseDir } = configuration.content;\n\n // Setup Project for TS/React files if needed\n const _project =\n project || new Project({ skipAddingFilesFromTsConfig: true });\n\n const baseName = extractDictionaryKey(\n filePath,\n (await fs.readFile(filePath)).toString()\n );\n const componentKey = camelCaseToKebabCase(baseName);\n const ext = extname(filePath);\n\n let extractedContent: Record<string, string> | null = null;\n\n if (ext === '.vue') {\n try {\n const { processVueFile } = (await import(\n '@intlayer/vue-transformer'\n )) as any;\n extractedContent = await processVueFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/vue-transformer', ANSIColors.YELLOW)} to process Vue files.`\n );\n }\n throw error;\n }\n } else if (ext === '.svelte') {\n try {\n const { processSvelteFile } = (await import(\n '@intlayer/svelte-transformer'\n )) as any;\n extractedContent = await processSvelteFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/svelte-transformer', ANSIColors.YELLOW)} to process Svelte files.`\n );\n }\n throw error;\n }\n } else if (['.tsx', '.jsx', '.ts', '.js'].includes(ext)) {\n extractedContent = await processReactFile(\n filePath,\n componentKey,\n packageName,\n _project,\n saveComponent\n );\n }\n\n if (!extractedContent) {\n appLogger(`No extractable text found in ${baseName}`);\n return;\n }\n\n // Shared Write Logic\n if (writeContent) {\n const contentFilePath = await writeContentHelper(\n extractedContent,\n componentKey,\n filePath,\n configuration,\n options?.outputDir\n );\n\n const relativeContentFilePath = relative(\n configuration.content.baseDir,\n contentFilePath\n );\n appLogger(`Created content file: ${colorizePath(relativeContentFilePath)}`);\n }\n\n // Optional: Format\n if (saveComponent) {\n try {\n const formatCommand = detectFormatCommand(configuration);\n if (formatCommand) {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'ignore', // Silent\n cwd: baseDir,\n });\n }\n } catch {\n // Ignore format errors\n }\n\n appLogger(\n `Updated component: ${colorizePath(relative(baseDir, filePath))}`\n );\n }\n};\n\nexport const transformFiles = async (\n filePaths: string[],\n packageName: PackageName,\n options?: ExtractIntlayerOptions\n) => {\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const project = new Project({\n skipAddingFilesFromTsConfig: true,\n });\n\n for (const filePath of filePaths) {\n try {\n await extractIntlayer(filePath, packageName, options, project);\n } catch (error) {\n appLogger(`Failed to transform ${filePath}: ${(error as Error).message}`);\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;AAqBA,MAAM,wBAAwB;CAC5B;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,iBAAiB,SAA0B;CAC/C,MAAM,UAAU,KAAK,MAAM;AAC3B,KAAI,CAAC,QAAS,QAAO;AACrB,KAAI,CAAC,QAAQ,SAAS,IAAI,CAAE,QAAO;AAEnC,KAAI,CAAC,SAAS,KAAK,QAAQ,CAAE,QAAO;AAEpC,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,KAAK,CAAE,QAAO;AAChE,QAAO;;AAGT,MAAM,eAAe,MAAc,iBAAsC;CAEvE,IAAI,MAAM,KACP,QAAQ,QAAQ,IAAI,CACpB,QAAQ,OAAO,IAAI,CACnB,QAAQ,OAAO,IAAI,CACnB,QAAQ,kBAAkB,GAAG,CAC7B,MAAM,CACN,MAAM,IAAI,CACV,OAAO,QAAQ,CACf,MAAM,GATQ,EASI,CAClB,KAAK,MAAM,UACV,UAAU,IACN,KAAK,aAAa,GAClB,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa,CAC/D,CACA,KAAK,GAAG;AAEX,KAAI,CAAC,IAAK,OAAM;AAChB,KAAI,aAAa,IAAI,IAAI,EAAE;EACzB,IAAI,IAAI;AACR,SAAO,aAAa,IAAI,GAAG,MAAM,IAAI,CAAE;AACvC,QAAM,GAAG,MAAM;;AAEjB,QAAO;;AAGT,MAAM,qBAAqB,OACzB,kBACA,cACA,UACA,eACA,cACG;CACH,MAAM,EAAE,kBAAkB,cAAc;CACxC,MAAM,EAAE,YAAY,cAAc;CAElC,MAAM,UAAU,mCAAoB,UAAU,0BAAW,SAAS;CAElE,MAAM,mCAAoB,iCADN,SAAS,CACW;CAGxC,MAAM,sCAAuB,SAAS,GAFd,SAAS,OAAO,EAAE,CAAC,aAAa,GAAG,SAAS,MAAM,EAAE,CAEnB,aAAa;AAWtE,OAAMA,gFARyB;EAC7B,KAAK;EACL,SAAS;EACT,QAAQ;EACR,kCANuC,SAAS,gBAAgB;EAOjE,EAGyC,eAAe,EACvD,6CAF2B,SAAS,QAAQ,EAG7C,CAAC;AAEF,QAAO;;AAST,MAAM,oBACJ,YACA,iBAIG;CACH,MAAMC,mBAA2C,EAAE;CACnD,MAAMC,eAAgC,EAAE;AAExC,YAAW,mBAAmB,SAAS;AAErC,MAAIC,cAAK,UAAU,KAAK,EAAE;GACxB,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,QAAQ,QAAQ,IAAI,CAAC,MAAM;AACxD,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAY,CAAC;;aAK7CA,cAAK,eAAe,KAAK,EAAE;GAClC,MAAM,OAAO,KAAK,aAAa,CAAC,SAAS;AACzC,OAAI,sBAAsB,SAAS,KAAK,EAAE;IACxC,MAAM,cAAc,KAAK,gBAAgB;AACzC,QAAIA,cAAK,gBAAgB,YAAY,EAAE;KACrC,MAAM,OAAO,YAAY,iBAAiB;AAC1C,SAAI,cAAc,KAAK,EAAE;MACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,mBAAa,IAAI,IAAI;AACrB,uBAAiB,OAAO,KAAK,MAAM;AACnC,mBAAa,KAAK;OAAE;OAAM;OAAK,MAAM;OAAiB,CAAC;;;;aAOtDA,cAAK,gBAAgB,KAAK,EAAE;GACnC,MAAM,OAAO,KAAK,iBAAiB;AACnC,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,SAAS,KAAK,WAAW;AAG/B,QACE,QAAQ,aAAa,KAAK,uBAC1B,QAAQ,aAAa,KAAK,qBAC1B,QAAQ,aAAa,KAAK,kBAE1B;AAIF,QAAIA,cAAK,eAAe,OAAO,CAAE;AAGjC,QAAIA,cAAK,iBAAiB,OAAO,EAE/B;SADmB,OAAO,eAAe,CAC1B,SAAS,CAAC,SAAS,cAAc,CAAE;;AAIpD,QAAIA,cAAK,qBAAqB,OAAO,EACnC;SAAI,OAAO,aAAa,KAAK,KAAM;;IAGrC,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,MAAM;AACnC,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAkB,CAAC;;;GAG5D;AAEF,QAAO;EAAE;EAAkB;EAAc;;AAO3C,MAAM,mBAAmB,OACvB,UACA,cACA,aACA,SACA,OAAgB,SACb;CACH,IAAIC;AACJ,KAAI;AACF,eAAa,QAAQ,oBAAoB,SAAS;SAC5C;AACN,eAAa,QAAQ,qBAAqB,SAAS;;CAIrD,MAAM,EAAE,kBAAkB,iBAAiB,iBACzC,4BAFmB,IAAI,KAAa,CAIrC;AAED,KAAI,OAAO,KAAK,iBAAiB,CAAC,WAAW,EAAG,QAAO;AAEvD,MAAK,MAAM,EAAE,MAAM,KAAK,UAAU,aAChC,KAAI,SAAS,cAAcD,cAAK,UAAU,KAAK,CAC7C,MAAK,gBAAgB,YAAY,IAAI,GAAG;UAC/B,SAAS,mBAAmBA,cAAK,eAAe,KAAK,CAC9D,MAAK,eAAe,YAAY,IAAI,SAAS;UACpC,SAAS,oBAAoBA,cAAK,gBAAgB,KAAK,CAEhE,MAAK,gBAAgB,WAAW,IAAI,QAAQ;CAKhD,MAAM,aAAa,WAAW,sBAC3B,MAAM,EAAE,yBAAyB,KAAK,YACxC;AACD,KAAI,CAAC,WACH,YAAW,qBAAqB;EAC9B,cAAc,CAAC,cAAc;EAC7B,iBAAiB;EAClB,CAAC;UAEF,CAAC,WAAW,iBAAiB,CAAC,MAAM,MAAM,EAAE,SAAS,KAAK,cAAc,CAExE,YAAW,eAAe,cAAc;AAI1C,YAAW,cAAc,CAAC,SAAS,MAAM;AACvC,IAAE,SAAS,EACP,OAAOE,oBAAW,MAAM,EACxB,iBAAiB,GAAG,gCAAgC,aAAa,KAAK;GAC1E;AAGF,YAAW,yBAAyB,CAAC,SAAS,MAAM;EAClD,MAAM,OAAO,EAAE,gBAAgB;AAC/B,MAAIF,cAAK,gBAAgB,KAAK,IAAIA,cAAK,qBAAqB,KAAK,EAAE;GACjE,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAIA,cAAK,QAAQ,KAAK,EAEpB;QACE,KAAK,SAAS,CAAC,SAAS,SAAS,IACjC,KAAK,SAAS,CAAC,SAAS,MAAM,CAE9B,MAAK,iBACH,GACA,gCAAgC,aAAa,KAC9C;;;GAIP;AAEF,KAAI,KACF,OAAM,WAAW,MAAM;AAEzB,QAAO;;AAwBT,MAAa,kBAAkB,OAC7B,UACA,aACA,SACA,YACG;CACH,MAAM,gBAAgB,CAAC,SAAS;CAChC,MAAM,eAAe,CAAC,SAAS;CAE/B,MAAM,wDAAiC,SAAS,cAAc;CAC9D,MAAM,gDAAyB,cAAc;CAC7C,MAAM,EAAE,YAAY,cAAc;CAGlC,MAAM,WACJ,WAAW,IAAIG,iBAAQ,EAAE,6BAA6B,MAAM,CAAC;CAE/D,MAAM,WAAWC,iEACf,WACC,MAAMC,yBAAG,SAAS,SAAS,EAAE,UAAU,CACzC;CACD,MAAM,2DAAoC,SAAS;CACnD,MAAM,6BAAc,SAAS;CAE7B,IAAIC,mBAAkD;AAEtD,KAAI,QAAQ,OACV,KAAI;EACF,MAAM,EAAE,mBAAoB,MAAM,OAChC;AAEF,qBAAmB,MAAM,eACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMC,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,sDAA+B,6BAA6BC,6BAAW,OAAO,CAAC,wBAChF;AAEH,QAAM;;UAEC,QAAQ,UACjB,KAAI;EACF,MAAM,EAAE,sBAAuB,MAAM,OACnC;AAEF,qBAAmB,MAAM,kBACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMD,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,sDAA+B,gCAAgCC,6BAAW,OAAO,CAAC,2BACnF;AAEH,QAAM;;UAEC;EAAC;EAAQ;EAAQ;EAAO;EAAM,CAAC,SAAS,IAAI,CACrD,oBAAmB,MAAM,iBACvB,UACA,cACA,aACA,UACA,cACD;AAGH,KAAI,CAAC,kBAAkB;AACrB,YAAU,gCAAgC,WAAW;AACrD;;AAIF,KAAI,cAAc;EAChB,MAAM,kBAAkB,MAAM,mBAC5B,kBACA,cACA,UACA,eACA,SAAS,UACV;AAMD,YAAU,qFAHR,cAAc,QAAQ,SACtB,gBACD,CACuE,GAAG;;AAI7E,KAAI,eAAe;AACjB,MAAI;GACF,MAAM,gBAAgBC,wEAAoB,cAAc;AACxD,OAAI,cACF,kCAAS,cAAc,QAAQ,YAAY,SAAS,EAAE;IACpD,OAAO;IACP,KAAK;IACN,CAAC;UAEE;AAIR,YACE,kFAA4C,SAAS,SAAS,CAAC,GAChE;;;AAIL,MAAa,iBAAiB,OAC5B,WACA,aACA,YACG;CAEH,MAAM,wFADiC,SAAS,cAAc,CACjB;CAE7C,MAAM,UAAU,IAAIN,iBAAQ,EAC1B,6BAA6B,MAC9B,CAAC;AAEF,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,QAAM,gBAAgB,UAAU,aAAa,SAAS,QAAQ;UACvD,OAAO;AACd,YAAU,uBAAuB,SAAS,IAAK,MAAgB,UAAU"}
1
+ {"version":3,"file":"transformFiles.cjs","names":["writeContentDeclaration","extractedContent: Record<string, string>","replacements: TsReplacement[]","Node","sourceFile: SourceFile","SyntaxKind","Project","extractDictionaryKey","fs","extractedContent: Record<string, string> | null","error: any","ANSIColors","detectFormatCommand"],"sources":["../../../src/transformFiles/transformFiles.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport fs from 'node:fs/promises';\nimport { basename, dirname, extname, join, relative, resolve } from 'node:path';\nimport {\n ANSIColors,\n camelCaseToKebabCase,\n colorizePath,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { Node, Project, type SourceFile, SyntaxKind } from 'ts-morph';\nimport { writeContentDeclaration } from '../writeContentDeclaration';\nimport { detectFormatCommand } from '../writeContentDeclaration/detectFormatCommand';\nimport { extractDictionaryKey } from './extractDictionaryKey';\n\n// ==========================================\n// 1. Shared Utilities (exported for reuse in babel plugin)\n// ==========================================\n\n/**\n * Attributes that should be extracted for localization\n */\nexport const ATTRIBUTES_TO_EXTRACT = [\n 'title',\n 'placeholder',\n 'alt',\n 'aria-label',\n 'label',\n];\n\n/**\n * Default function to determine if a string should be extracted for localization\n */\nexport const shouldExtract = (text: string): boolean => {\n const trimmed = text.trim();\n if (!trimmed) return false;\n if (!trimmed.includes(' ')) return false;\n // Starts with Capital letter\n if (!/^[A-Z]/.test(trimmed)) return false;\n // Filter out template logic identifiers (simple check)\n if (trimmed.startsWith('{') || trimmed.startsWith('v-')) return false;\n return true;\n};\n\n/**\n * Generate a unique key from text for use as a dictionary key\n */\nexport const generateKey = (\n text: string,\n existingKeys: Set<string>\n): string => {\n const maxWords = 5;\n let key = text\n .replace(/\\s+/g, ' ')\n .replace(/_+/g, ' ')\n .replace(/-+/g, ' ')\n .replace(/[^a-zA-Z0-9 ]/g, '')\n .trim()\n .split(' ')\n .filter(Boolean)\n .slice(0, maxWords)\n .map((word, index) =>\n index === 0\n ? word.toLowerCase()\n : word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()\n )\n .join('');\n\n if (!key) key = 'content';\n if (existingKeys.has(key)) {\n let i = 1;\n while (existingKeys.has(`${key}${i}`)) i++;\n key = `${key}${i}`;\n }\n return key;\n};\n\nconst writeContentHelper = async (\n extractedContent: Record<string, string>,\n componentKey: string,\n filePath: string,\n configuration: IntlayerConfig,\n outputDir?: string\n) => {\n const { defaultLocale } = configuration.internationalization;\n const { baseDir } = configuration.content;\n\n const dirName = outputDir ? resolve(outputDir) : dirname(filePath);\n const ext = extname(filePath);\n const baseName = basename(filePath, ext);\n const contentBaseName = baseName.charAt(0).toLowerCase() + baseName.slice(1);\n\n const contentFilePath = join(dirName, `${contentBaseName}.content.ts`);\n const relativeContentFilePath = relative(baseDir, contentFilePath);\n\n const dictionary: Dictionary = {\n key: componentKey,\n content: extractedContent,\n locale: defaultLocale,\n filePath: relativeContentFilePath,\n };\n\n const relativeDir = relative(baseDir, dirName);\n await writeContentDeclaration(dictionary, configuration, {\n newDictionariesPath: relativeDir,\n });\n\n return contentFilePath;\n};\n\ntype TsReplacement = {\n node: Node;\n key: string;\n type: 'jsx-text' | 'jsx-attribute' | 'string-literal';\n};\n\nconst extractTsContent = (\n sourceFile: SourceFile,\n existingKeys: Set<string>\n): {\n extractedContent: Record<string, string>;\n replacements: TsReplacement[];\n} => {\n const extractedContent: Record<string, string> = {};\n const replacements: TsReplacement[] = [];\n\n sourceFile.forEachDescendant((node) => {\n // 1. JSX Text\n if (Node.isJsxText(node)) {\n const text = node.getText();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.replace(/\\s+/g, ' ').trim();\n replacements.push({ node, key, type: 'jsx-text' });\n }\n }\n\n // 2. JSX Attributes\n else if (Node.isJsxAttribute(node)) {\n const name = node.getNameNode().getText();\n if (ATTRIBUTES_TO_EXTRACT.includes(name)) {\n const initializer = node.getInitializer();\n if (Node.isStringLiteral(initializer)) {\n const text = initializer.getLiteralValue();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'jsx-attribute' });\n }\n }\n }\n }\n\n // 3. String Literals (Variables, Arrays, etc.)\n else if (Node.isStringLiteral(node)) {\n const text = node.getLiteralValue();\n if (shouldExtract(text)) {\n const parent = node.getParent();\n\n // Skip if inside ImportDeclaration\n if (\n parent?.getKindName() === 'ImportDeclaration' ||\n parent?.getKindName() === 'ImportSpecifier' ||\n parent?.getKindName() === 'ModuleSpecifier'\n ) {\n return;\n }\n\n // Skip if it's a JSX Attribute value (handled above)\n if (Node.isJsxAttribute(parent)) return;\n\n // Skip console.log\n if (Node.isCallExpression(parent)) {\n const expression = parent.getExpression();\n if (expression.getText().includes('console.log')) return;\n }\n\n // Skip Object Keys: { key: \"value\" } -> \"key\" is PropertyAssignment name if not computed\n if (Node.isPropertyAssignment(parent)) {\n if (parent.getNameNode() === node) return; // It's the key\n }\n\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'string-literal' });\n }\n }\n });\n\n return { extractedContent, replacements };\n};\n\n// ==========================================\n// 2. React (TS-Morph) Strategy\n// ==========================================\n\nconst processReactFile = async (\n filePath: string,\n componentKey: string,\n packageName: string,\n project: Project,\n save: boolean = true\n) => {\n let sourceFile: SourceFile;\n try {\n sourceFile = project.addSourceFileAtPath(filePath);\n } catch {\n sourceFile = project.getSourceFileOrThrow(filePath);\n }\n\n const existingKeys = new Set<string>();\n const { extractedContent, replacements } = extractTsContent(\n sourceFile,\n existingKeys\n );\n\n if (Object.keys(extractedContent).length === 0) return null;\n\n for (const { node, key, type } of replacements) {\n if (type === 'jsx-text' && Node.isJsxText(node)) {\n node.replaceWithText(`{content.${key}}`);\n } else if (type === 'jsx-attribute' && Node.isJsxAttribute(node)) {\n node.setInitializer(`{content.${key}.value}`);\n } else if (type === 'string-literal' && Node.isStringLiteral(node)) {\n // For React/JS variables, we usually want the value\n node.replaceWithText(`content.${key}.value`);\n }\n }\n\n // Inject hook\n const importDecl = sourceFile.getImportDeclaration(\n (d) => d.getModuleSpecifierValue() === packageName\n );\n if (!importDecl) {\n sourceFile.addImportDeclaration({\n namedImports: ['useIntlayer'],\n moduleSpecifier: packageName,\n });\n } else if (\n !importDecl.getNamedImports().some((n) => n.getName() === 'useIntlayer')\n ) {\n importDecl.addNamedImport('useIntlayer');\n }\n\n // Insert hook at start of component\n sourceFile.getFunctions().forEach((f) => {\n f.getBody()\n ?.asKind(SyntaxKind.Block)\n ?.insertStatements(0, `const content = useIntlayer(\"${componentKey}\");`);\n });\n\n // Also handle const/arrow components\n sourceFile.getVariableDeclarations().forEach((v) => {\n const init = v.getInitializer();\n if (Node.isArrowFunction(init) || Node.isFunctionExpression(init)) {\n const body = init.getBody();\n if (Node.isBlock(body)) {\n // Heuristic: check if it returns JSX or uses hooks\n if (\n body.getText().includes('return') ||\n body.getText().includes('use')\n ) {\n body.insertStatements(\n 0,\n `const content = useIntlayer(\"${componentKey}\");`\n );\n }\n }\n }\n });\n\n if (save) {\n await sourceFile.save();\n }\n return extractedContent;\n};\n\n// ==========================================\n// 5. Main Dispatcher\n// ==========================================\n\nexport type PackageName =\n | 'next-intlayer'\n | 'react-intlayer'\n | 'vue-intlayer'\n | 'svelte-intlayer'\n | 'preact-intlayer'\n | 'solid-intlayer'\n | 'angular-intlayer'\n | 'express-intlayer';\n\nexport type ExtractIntlayerOptions = {\n configOptions?: GetConfigurationOptions;\n outputDir?: string;\n codeOnly?: boolean;\n declarationOnly?: boolean;\n};\n\nexport const extractIntlayer = async (\n filePath: string,\n packageName: PackageName,\n options?: ExtractIntlayerOptions,\n project?: Project\n) => {\n const saveComponent = !options?.declarationOnly;\n const writeContent = !options?.codeOnly;\n\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n const { baseDir } = configuration.content;\n\n // Setup Project for TS/React files if needed\n const _project =\n project || new Project({ skipAddingFilesFromTsConfig: true });\n\n const baseName = extractDictionaryKey(\n filePath,\n (await fs.readFile(filePath)).toString()\n );\n const componentKey = camelCaseToKebabCase(baseName);\n const ext = extname(filePath);\n\n let extractedContent: Record<string, string> | null = null;\n\n if (ext === '.vue') {\n try {\n const { processVueFile } = (await import(\n '@intlayer/vue-transformer'\n )) as any;\n extractedContent = await processVueFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/vue-transformer', ANSIColors.YELLOW)} to process Vue files.`\n );\n }\n throw error;\n }\n } else if (ext === '.svelte') {\n try {\n const { processSvelteFile } = (await import(\n '@intlayer/svelte-transformer'\n )) as any;\n extractedContent = await processSvelteFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/svelte-transformer', ANSIColors.YELLOW)} to process Svelte files.`\n );\n }\n throw error;\n }\n } else if (['.tsx', '.jsx', '.ts', '.js'].includes(ext)) {\n extractedContent = await processReactFile(\n filePath,\n componentKey,\n packageName,\n _project,\n saveComponent\n );\n }\n\n if (!extractedContent) {\n appLogger(`No extractable text found in ${baseName}`);\n return;\n }\n\n // Shared Write Logic\n if (writeContent) {\n const contentFilePath = await writeContentHelper(\n extractedContent,\n componentKey,\n filePath,\n configuration,\n options?.outputDir\n );\n\n const relativeContentFilePath = relative(\n configuration.content.baseDir,\n contentFilePath\n );\n appLogger(`Created content file: ${colorizePath(relativeContentFilePath)}`);\n }\n\n // Optional: Format\n if (saveComponent) {\n try {\n const formatCommand = detectFormatCommand(configuration);\n if (formatCommand) {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'ignore', // Silent\n cwd: baseDir,\n });\n }\n } catch {\n // Ignore format errors\n }\n\n appLogger(\n `Updated component: ${colorizePath(relative(baseDir, filePath))}`\n );\n }\n};\n\nexport const transformFiles = async (\n filePaths: string[],\n packageName: PackageName,\n options?: ExtractIntlayerOptions\n) => {\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const project = new Project({\n skipAddingFilesFromTsConfig: true,\n });\n\n for (const filePath of filePaths) {\n try {\n await extractIntlayer(filePath, packageName, options, project);\n } catch (error) {\n appLogger(`Failed to transform ${filePath}: ${(error as Error).message}`);\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;AAwBA,MAAa,wBAAwB;CACnC;CACA;CACA;CACA;CACA;CACD;;;;AAKD,MAAa,iBAAiB,SAA0B;CACtD,MAAM,UAAU,KAAK,MAAM;AAC3B,KAAI,CAAC,QAAS,QAAO;AACrB,KAAI,CAAC,QAAQ,SAAS,IAAI,CAAE,QAAO;AAEnC,KAAI,CAAC,SAAS,KAAK,QAAQ,CAAE,QAAO;AAEpC,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,KAAK,CAAE,QAAO;AAChE,QAAO;;;;;AAMT,MAAa,eACX,MACA,iBACW;CAEX,IAAI,MAAM,KACP,QAAQ,QAAQ,IAAI,CACpB,QAAQ,OAAO,IAAI,CACnB,QAAQ,OAAO,IAAI,CACnB,QAAQ,kBAAkB,GAAG,CAC7B,MAAM,CACN,MAAM,IAAI,CACV,OAAO,QAAQ,CACf,MAAM,GATQ,EASI,CAClB,KAAK,MAAM,UACV,UAAU,IACN,KAAK,aAAa,GAClB,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa,CAC/D,CACA,KAAK,GAAG;AAEX,KAAI,CAAC,IAAK,OAAM;AAChB,KAAI,aAAa,IAAI,IAAI,EAAE;EACzB,IAAI,IAAI;AACR,SAAO,aAAa,IAAI,GAAG,MAAM,IAAI,CAAE;AACvC,QAAM,GAAG,MAAM;;AAEjB,QAAO;;AAGT,MAAM,qBAAqB,OACzB,kBACA,cACA,UACA,eACA,cACG;CACH,MAAM,EAAE,kBAAkB,cAAc;CACxC,MAAM,EAAE,YAAY,cAAc;CAElC,MAAM,UAAU,mCAAoB,UAAU,0BAAW,SAAS;CAElE,MAAM,mCAAoB,iCADN,SAAS,CACW;CAGxC,MAAM,sCAAuB,SAAS,GAFd,SAAS,OAAO,EAAE,CAAC,aAAa,GAAG,SAAS,MAAM,EAAE,CAEnB,aAAa;AAWtE,OAAMA,gFARyB;EAC7B,KAAK;EACL,SAAS;EACT,QAAQ;EACR,kCANuC,SAAS,gBAAgB;EAOjE,EAGyC,eAAe,EACvD,6CAF2B,SAAS,QAAQ,EAG7C,CAAC;AAEF,QAAO;;AAST,MAAM,oBACJ,YACA,iBAIG;CACH,MAAMC,mBAA2C,EAAE;CACnD,MAAMC,eAAgC,EAAE;AAExC,YAAW,mBAAmB,SAAS;AAErC,MAAIC,cAAK,UAAU,KAAK,EAAE;GACxB,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,QAAQ,QAAQ,IAAI,CAAC,MAAM;AACxD,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAY,CAAC;;aAK7CA,cAAK,eAAe,KAAK,EAAE;GAClC,MAAM,OAAO,KAAK,aAAa,CAAC,SAAS;AACzC,OAAI,sBAAsB,SAAS,KAAK,EAAE;IACxC,MAAM,cAAc,KAAK,gBAAgB;AACzC,QAAIA,cAAK,gBAAgB,YAAY,EAAE;KACrC,MAAM,OAAO,YAAY,iBAAiB;AAC1C,SAAI,cAAc,KAAK,EAAE;MACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,mBAAa,IAAI,IAAI;AACrB,uBAAiB,OAAO,KAAK,MAAM;AACnC,mBAAa,KAAK;OAAE;OAAM;OAAK,MAAM;OAAiB,CAAC;;;;aAOtDA,cAAK,gBAAgB,KAAK,EAAE;GACnC,MAAM,OAAO,KAAK,iBAAiB;AACnC,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,SAAS,KAAK,WAAW;AAG/B,QACE,QAAQ,aAAa,KAAK,uBAC1B,QAAQ,aAAa,KAAK,qBAC1B,QAAQ,aAAa,KAAK,kBAE1B;AAIF,QAAIA,cAAK,eAAe,OAAO,CAAE;AAGjC,QAAIA,cAAK,iBAAiB,OAAO,EAE/B;SADmB,OAAO,eAAe,CAC1B,SAAS,CAAC,SAAS,cAAc,CAAE;;AAIpD,QAAIA,cAAK,qBAAqB,OAAO,EACnC;SAAI,OAAO,aAAa,KAAK,KAAM;;IAGrC,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,MAAM;AACnC,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAkB,CAAC;;;GAG5D;AAEF,QAAO;EAAE;EAAkB;EAAc;;AAO3C,MAAM,mBAAmB,OACvB,UACA,cACA,aACA,SACA,OAAgB,SACb;CACH,IAAIC;AACJ,KAAI;AACF,eAAa,QAAQ,oBAAoB,SAAS;SAC5C;AACN,eAAa,QAAQ,qBAAqB,SAAS;;CAIrD,MAAM,EAAE,kBAAkB,iBAAiB,iBACzC,4BAFmB,IAAI,KAAa,CAIrC;AAED,KAAI,OAAO,KAAK,iBAAiB,CAAC,WAAW,EAAG,QAAO;AAEvD,MAAK,MAAM,EAAE,MAAM,KAAK,UAAU,aAChC,KAAI,SAAS,cAAcD,cAAK,UAAU,KAAK,CAC7C,MAAK,gBAAgB,YAAY,IAAI,GAAG;UAC/B,SAAS,mBAAmBA,cAAK,eAAe,KAAK,CAC9D,MAAK,eAAe,YAAY,IAAI,SAAS;UACpC,SAAS,oBAAoBA,cAAK,gBAAgB,KAAK,CAEhE,MAAK,gBAAgB,WAAW,IAAI,QAAQ;CAKhD,MAAM,aAAa,WAAW,sBAC3B,MAAM,EAAE,yBAAyB,KAAK,YACxC;AACD,KAAI,CAAC,WACH,YAAW,qBAAqB;EAC9B,cAAc,CAAC,cAAc;EAC7B,iBAAiB;EAClB,CAAC;UAEF,CAAC,WAAW,iBAAiB,CAAC,MAAM,MAAM,EAAE,SAAS,KAAK,cAAc,CAExE,YAAW,eAAe,cAAc;AAI1C,YAAW,cAAc,CAAC,SAAS,MAAM;AACvC,IAAE,SAAS,EACP,OAAOE,oBAAW,MAAM,EACxB,iBAAiB,GAAG,gCAAgC,aAAa,KAAK;GAC1E;AAGF,YAAW,yBAAyB,CAAC,SAAS,MAAM;EAClD,MAAM,OAAO,EAAE,gBAAgB;AAC/B,MAAIF,cAAK,gBAAgB,KAAK,IAAIA,cAAK,qBAAqB,KAAK,EAAE;GACjE,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAIA,cAAK,QAAQ,KAAK,EAEpB;QACE,KAAK,SAAS,CAAC,SAAS,SAAS,IACjC,KAAK,SAAS,CAAC,SAAS,MAAM,CAE9B,MAAK,iBACH,GACA,gCAAgC,aAAa,KAC9C;;;GAIP;AAEF,KAAI,KACF,OAAM,WAAW,MAAM;AAEzB,QAAO;;AAwBT,MAAa,kBAAkB,OAC7B,UACA,aACA,SACA,YACG;CACH,MAAM,gBAAgB,CAAC,SAAS;CAChC,MAAM,eAAe,CAAC,SAAS;CAE/B,MAAM,wDAAiC,SAAS,cAAc;CAC9D,MAAM,gDAAyB,cAAc;CAC7C,MAAM,EAAE,YAAY,cAAc;CAGlC,MAAM,WACJ,WAAW,IAAIG,iBAAQ,EAAE,6BAA6B,MAAM,CAAC;CAE/D,MAAM,WAAWC,iEACf,WACC,MAAMC,yBAAG,SAAS,SAAS,EAAE,UAAU,CACzC;CACD,MAAM,2DAAoC,SAAS;CACnD,MAAM,6BAAc,SAAS;CAE7B,IAAIC,mBAAkD;AAEtD,KAAI,QAAQ,OACV,KAAI;EACF,MAAM,EAAE,mBAAoB,MAAM,OAChC;AAEF,qBAAmB,MAAM,eACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMC,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,sDAA+B,6BAA6BC,6BAAW,OAAO,CAAC,wBAChF;AAEH,QAAM;;UAEC,QAAQ,UACjB,KAAI;EACF,MAAM,EAAE,sBAAuB,MAAM,OACnC;AAEF,qBAAmB,MAAM,kBACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMD,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,sDAA+B,gCAAgCC,6BAAW,OAAO,CAAC,2BACnF;AAEH,QAAM;;UAEC;EAAC;EAAQ;EAAQ;EAAO;EAAM,CAAC,SAAS,IAAI,CACrD,oBAAmB,MAAM,iBACvB,UACA,cACA,aACA,UACA,cACD;AAGH,KAAI,CAAC,kBAAkB;AACrB,YAAU,gCAAgC,WAAW;AACrD;;AAIF,KAAI,cAAc;EAChB,MAAM,kBAAkB,MAAM,mBAC5B,kBACA,cACA,UACA,eACA,SAAS,UACV;AAMD,YAAU,qFAHR,cAAc,QAAQ,SACtB,gBACD,CACuE,GAAG;;AAI7E,KAAI,eAAe;AACjB,MAAI;GACF,MAAM,gBAAgBC,wEAAoB,cAAc;AACxD,OAAI,cACF,kCAAS,cAAc,QAAQ,YAAY,SAAS,EAAE;IACpD,OAAO;IACP,KAAK;IACN,CAAC;UAEE;AAIR,YACE,kFAA4C,SAAS,SAAS,CAAC,GAChE;;;AAIL,MAAa,iBAAiB,OAC5B,WACA,aACA,YACG;CAEH,MAAM,wFADiC,SAAS,cAAc,CACjB;CAE7C,MAAM,UAAU,IAAIN,iBAAQ,EAC1B,6BAA6B,MAC9B,CAAC;AAEF,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,QAAM,gBAAgB,UAAU,aAAa,SAAS,QAAQ;UACvD,OAAO;AACd,YAAU,uBAAuB,SAAS,IAAK,MAAgB,UAAU"}
@@ -105,9 +105,7 @@ const writeFileWithDirectories = async (absoluteFilePath, dictionary, configurat
105
105
  await require_writeContentDeclaration_writeJSFile.writeJSFile(absoluteFilePath, dictionary, configuration);
106
106
  try {
107
107
  await (0, node_fs_promises.rm)((0, node_path.join)(configuration.content.cacheDir, "intlayer-prepared.lock"), { recursive: true });
108
- } catch (error) {
109
- if (error && typeof error === "object" && "code" in error && error.code !== "ENOENT") throw error;
110
- }
108
+ } catch {}
111
109
  };
112
110
 
113
111
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"writeContentDeclaration.cjs","names":["processContentDeclarationContent","pluginFormatResult: any","result: Dictionary","getFormatFromExtension","extension","writeJSFile"],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { mkdir, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport { isDeepStrictEqual } from 'node:util';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n id: dictionary.id,\n title: dictionary.title,\n description: dictionary.description,\n tags: dictionary.tags,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n priority: dictionary.priority,\n live: dictionary.live,\n version: dictionary.version,\n content,\n };\n\n /**\n * Add $schema to JSON dictionaries\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = isDeepStrictEqual(existingDictionary, dictionary);\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n await writeFile(absoluteFilePath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch (error) {\n if (\n error &&\n typeof error === 'object' &&\n 'code' in error &&\n (error as { code: string }).code !== 'ENOENT'\n ) {\n throw error;\n }\n }\n};\n"],"mappings":";;;;;;;;;;;AAsBA,MAAM,2BAA2B,OAC/B,YACA,eACA,eACG;;;;CAIH,MAAM,sBACJ,MAAMA,kGAAiC,WAAW;CAEpD,IAAI,UAAU,oBAAoB;;;;AAMlC,KAAI,WAAW,OACb,uDACE,qBACA,WAAW,OACZ,CAAC;UACO,WACT,6DACE,qBACA,WACD,CAAC;CAGJ,IAAIC,qBAA0B;EAC5B,GAAG;EACH;EACD;;;;AAMD,YAAW,MAAM,UAAU,cAAc,WAAW,EAAE,CACpD,KAAI,OAAO,cAAc;EACvB,MAAM,kBAAkB,MAAM,OAAO,eAAe;GAClD,YAAY;GACZ;GACD,CAAC;AAEF,MAAI,gBACF,sBAAqB;;AAQ3B,KAAI,EAFF,mBAAmB,WAAW,mBAAmB,KAE1B,QAAO;CAEhC,IAAIC,SAAqB;EACvB,KAAK,WAAW;EAChB,IAAI,WAAW;EACf,OAAO,WAAW;EAClB,aAAa,WAAW;EACxB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,UAAU,WAAW;EACrB,MAAM,WAAW;EACjB,SAAS,WAAW;EACpB;EACD;AAUD,KAFeC,4DAFb,WAAW,kCAAmB,WAAW,SAAS,GAAG,QAEP,KAGnC,UACX,mBAAmB,WACnB,mBAAmB,IAEnB,UAAS;EACP,SAAS;EACT,GAAG;EACJ;AAGH,QAAO;;AAST,MAAM,iBAAiB,EACrB,qBAAqB,yBACtB;AAED,MAAa,0BAA0B,OACrC,YACA,eACA,YACwD;CACxD,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,qBAAqB,eAAe;EAC1C,GAAG;EACH,GAAG;EACJ;CAED,MAAM,gDAAiC,SAAS,oBAAoB;CAOpE,MAAM,yFALqD,cAAc,CAEvE,WAAW,MAGoC,MAC9C,OAAO,GAAG,YAAY,WAAW,QACnC;CAED,MAAM,8BAA8B,MAAM,yBACxC,YACA,eACA,WACD;AAED,KAAI,oBAAoB,UAAU;EAEhC,MAAM,iDAAkC,oBAAoB,WAAW;EAEvE,MAAM,kCACJ,cAAc,QAAQ,SACtB,mBAAmB,SACpB;AAGD,MAAI,cACF,QAAO;GACL,QAAQ;GACR,MAAM;GACP;AAGH,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;AAG9C,KAAI,WAAW,UAAU;EACvB,MAAM,kCACJ,cAAc,QAAQ,SACtB,WAAW,SACZ;AACD,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;CAI9C,MAAM,6CACJ,2BACA,GAAG,WAAW,IAAI,eACnB;AAED,OAAM,yBACJ,wBACA,6BACA,cACD;AAED,QAAO;EACL,QAAQ;EACR,MAAM;EACP;;AAGH,MAAM,2BAA2B,OAC/B,kBACA,YACA,kBACkB;AAKlB,0DAHoB,iBAAiB,EAGpB,EAAE,WAAW,MAAM,CAAC;CAErC,MAAM,mCAAoB,iBAAiB;AAK3C,KAAI,CAJuB,cAAc,QAAQ,eAAe,KAC7D,uCAAsBC,YAAU,CAClC,CAEuB,SAAS,UAAU,CACzC,OAAM,IAAI,MACR,2BAA2B,UAAU,UAAU,mBAChD;AAGH,KAAI,cAAc,SAAS;AAIzB,wCAAgB,kBAAkB,GAHX,KAAK,UAAU,YAAY,MAAM,EAAE,CAGN,IAAI;AAExD;;AAGF,OAAMC,wDAAY,kBAAkB,YAAY,cAAc;AAI9D,KAAI;AAKF,qDAHE,cAAc,QAAQ,UACtB,yBACD,EACsB,EAAE,WAAW,MAAM,CAAC;UACpC,OAAO;AACd,MACE,SACA,OAAO,UAAU,YACjB,UAAU,SACT,MAA2B,SAAS,SAErC,OAAM"}
1
+ {"version":3,"file":"writeContentDeclaration.cjs","names":["processContentDeclarationContent","pluginFormatResult: any","result: Dictionary","getFormatFromExtension","extension","writeJSFile"],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { mkdir, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport { isDeepStrictEqual } from 'node:util';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n id: dictionary.id,\n title: dictionary.title,\n description: dictionary.description,\n tags: dictionary.tags,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n priority: dictionary.priority,\n live: dictionary.live,\n version: dictionary.version,\n content,\n };\n\n /**\n * Add $schema to JSON dictionaries\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = isDeepStrictEqual(existingDictionary, dictionary);\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n await writeFile(absoluteFilePath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch {}\n};\n"],"mappings":";;;;;;;;;;;AAsBA,MAAM,2BAA2B,OAC/B,YACA,eACA,eACG;;;;CAIH,MAAM,sBACJ,MAAMA,kGAAiC,WAAW;CAEpD,IAAI,UAAU,oBAAoB;;;;AAMlC,KAAI,WAAW,OACb,uDACE,qBACA,WAAW,OACZ,CAAC;UACO,WACT,6DACE,qBACA,WACD,CAAC;CAGJ,IAAIC,qBAA0B;EAC5B,GAAG;EACH;EACD;;;;AAMD,YAAW,MAAM,UAAU,cAAc,WAAW,EAAE,CACpD,KAAI,OAAO,cAAc;EACvB,MAAM,kBAAkB,MAAM,OAAO,eAAe;GAClD,YAAY;GACZ;GACD,CAAC;AAEF,MAAI,gBACF,sBAAqB;;AAQ3B,KAAI,EAFF,mBAAmB,WAAW,mBAAmB,KAE1B,QAAO;CAEhC,IAAIC,SAAqB;EACvB,KAAK,WAAW;EAChB,IAAI,WAAW;EACf,OAAO,WAAW;EAClB,aAAa,WAAW;EACxB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,UAAU,WAAW;EACrB,MAAM,WAAW;EACjB,SAAS,WAAW;EACpB;EACD;AAUD,KAFeC,4DAFb,WAAW,kCAAmB,WAAW,SAAS,GAAG,QAEP,KAGnC,UACX,mBAAmB,WACnB,mBAAmB,IAEnB,UAAS;EACP,SAAS;EACT,GAAG;EACJ;AAGH,QAAO;;AAST,MAAM,iBAAiB,EACrB,qBAAqB,yBACtB;AAED,MAAa,0BAA0B,OACrC,YACA,eACA,YACwD;CACxD,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,qBAAqB,eAAe;EAC1C,GAAG;EACH,GAAG;EACJ;CAED,MAAM,gDAAiC,SAAS,oBAAoB;CAOpE,MAAM,yFALqD,cAAc,CAEvE,WAAW,MAGoC,MAC9C,OAAO,GAAG,YAAY,WAAW,QACnC;CAED,MAAM,8BAA8B,MAAM,yBACxC,YACA,eACA,WACD;AAED,KAAI,oBAAoB,UAAU;EAEhC,MAAM,iDAAkC,oBAAoB,WAAW;EAEvE,MAAM,kCACJ,cAAc,QAAQ,SACtB,mBAAmB,SACpB;AAGD,MAAI,cACF,QAAO;GACL,QAAQ;GACR,MAAM;GACP;AAGH,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;AAG9C,KAAI,WAAW,UAAU;EACvB,MAAM,kCACJ,cAAc,QAAQ,SACtB,WAAW,SACZ;AACD,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;CAI9C,MAAM,6CACJ,2BACA,GAAG,WAAW,IAAI,eACnB;AAED,OAAM,yBACJ,wBACA,6BACA,cACD;AAED,QAAO;EACL,QAAQ;EACR,MAAM;EACP;;AAGH,MAAM,2BAA2B,OAC/B,kBACA,YACA,kBACkB;AAKlB,0DAHoB,iBAAiB,EAGpB,EAAE,WAAW,MAAM,CAAC;CAErC,MAAM,mCAAoB,iBAAiB;AAK3C,KAAI,CAJuB,cAAc,QAAQ,eAAe,KAC7D,uCAAsBC,YAAU,CAClC,CAEuB,SAAS,UAAU,CACzC,OAAM,IAAI,MACR,2BAA2B,UAAU,UAAU,mBAChD;AAGH,KAAI,cAAc,SAAS;AAIzB,wCAAgB,kBAAkB,GAHX,KAAK,UAAU,YAAY,MAAM,EAAE,CAGN,IAAI;AAExD;;AAGF,OAAMC,wDAAY,kBAAkB,YAAY,cAAc;AAI9D,KAAI;AAKF,qDAHE,cAAc,QAAQ,UACtB,yBACD,EACsB,EAAE,WAAW,MAAM,CAAC;SACrC"}
@@ -37,7 +37,7 @@ import { getExtensionFromFormat, getFormatFromExtension } from "./utils/getForma
37
37
  import { detectFormatCommand } from "./writeContentDeclaration/detectFormatCommand.mjs";
38
38
  import { writeJSFile } from "./writeContentDeclaration/writeJSFile.mjs";
39
39
  import { writeContentDeclaration } from "./writeContentDeclaration/writeContentDeclaration.mjs";
40
- import { extractIntlayer, transformFiles } from "./transformFiles/transformFiles.mjs";
40
+ import { ATTRIBUTES_TO_EXTRACT, extractIntlayer, generateKey, shouldExtract, transformFiles } from "./transformFiles/transformFiles.mjs";
41
41
  import { splitTextByLines } from "./utils/splitTextByLine.mjs";
42
42
  import { getChunk } from "./utils/getChunk.mjs";
43
43
  import { assembleJSON, chunkJSON, reconstructFromSingleChunk } from "./utils/chunkJSON.mjs";
@@ -47,4 +47,4 @@ import { runParallel } from "./utils/runParallel/index.mjs";
47
47
  import { verifyIdenticObjectFormat } from "./utils/verifyIdenticObjectFormat.mjs";
48
48
  import { buildAndWatchIntlayer, watch } from "./watcher.mjs";
49
49
 
50
- export { assembleJSON, buildAndWatchIntlayer, buildDictionary, chunkJSON, cleanOutputDir, createDictionaryEntryPoint, createModuleAugmentation, createTypes, detectExportedComponentName, detectFormatCommand, extractDictionaryKey, extractIntlayer, fetchDistantDictionaries, formatLocale, formatPath, generateDictionaryListContent, getBuiltDictionariesPath, getBuiltDynamicDictionariesPath, getBuiltFetchDictionariesPath, getBuiltRemoteDictionariesPath, getBuiltUnmergedDictionariesPath, getChunk, getContentDeclarationFileTemplate, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, handleAdditionalContentDeclarationFile, handleContentDeclarationFileChange, handleUnlinkedContentDeclarationFile, isInvalidDictionary, listDictionaries, listDictionariesWithStats, listGitFiles, listGitLines, loadContentDeclarations, loadDictionaries, loadLocalDictionaries, loadRemoteDictionaries, pLimit, parallelize, parallelizeGlobal, prepareIntlayer, reconstructFromSingleChunk, reduceDictionaryContent, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, sortAlphabetically, splitTextByLines, transformFiles, transformJSFile, verifyIdenticObjectFormat, watch, writeContentDeclaration, writeJSFile };
50
+ export { ATTRIBUTES_TO_EXTRACT, assembleJSON, buildAndWatchIntlayer, buildDictionary, chunkJSON, cleanOutputDir, createDictionaryEntryPoint, createModuleAugmentation, createTypes, detectExportedComponentName, detectFormatCommand, extractDictionaryKey, extractIntlayer, fetchDistantDictionaries, formatLocale, formatPath, generateDictionaryListContent, generateKey, getBuiltDictionariesPath, getBuiltDynamicDictionariesPath, getBuiltFetchDictionariesPath, getBuiltRemoteDictionariesPath, getBuiltUnmergedDictionariesPath, getChunk, getContentDeclarationFileTemplate, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, handleAdditionalContentDeclarationFile, handleContentDeclarationFileChange, handleUnlinkedContentDeclarationFile, isInvalidDictionary, listDictionaries, listDictionariesWithStats, listGitFiles, listGitLines, loadContentDeclarations, loadDictionaries, loadLocalDictionaries, loadRemoteDictionaries, pLimit, parallelize, parallelizeGlobal, prepareIntlayer, reconstructFromSingleChunk, reduceDictionaryContent, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, shouldExtract, sortAlphabetically, splitTextByLines, transformFiles, transformJSFile, verifyIdenticObjectFormat, watch, writeContentDeclaration, writeJSFile };
@@ -1,4 +1,4 @@
1
1
  import { extractDictionaryKey } from "./extractDictionaryKey.mjs";
2
- import { extractIntlayer, transformFiles } from "./transformFiles.mjs";
2
+ import { ATTRIBUTES_TO_EXTRACT, extractIntlayer, generateKey, shouldExtract, transformFiles } from "./transformFiles.mjs";
3
3
 
4
- export { extractDictionaryKey, extractIntlayer, transformFiles };
4
+ export { ATTRIBUTES_TO_EXTRACT, extractDictionaryKey, extractIntlayer, generateKey, shouldExtract, transformFiles };
@@ -8,6 +8,9 @@ import { execSync } from "node:child_process";
8
8
  import { Node, Project, SyntaxKind } from "ts-morph";
9
9
 
10
10
  //#region src/transformFiles/transformFiles.ts
11
+ /**
12
+ * Attributes that should be extracted for localization
13
+ */
11
14
  const ATTRIBUTES_TO_EXTRACT = [
12
15
  "title",
13
16
  "placeholder",
@@ -15,6 +18,9 @@ const ATTRIBUTES_TO_EXTRACT = [
15
18
  "aria-label",
16
19
  "label"
17
20
  ];
21
+ /**
22
+ * Default function to determine if a string should be extracted for localization
23
+ */
18
24
  const shouldExtract = (text) => {
19
25
  const trimmed = text.trim();
20
26
  if (!trimmed) return false;
@@ -23,6 +29,9 @@ const shouldExtract = (text) => {
23
29
  if (trimmed.startsWith("{") || trimmed.startsWith("v-")) return false;
24
30
  return true;
25
31
  };
32
+ /**
33
+ * Generate a unique key from text for use as a dictionary key
34
+ */
26
35
  const generateKey = (text, existingKeys) => {
27
36
  let key = text.replace(/\s+/g, " ").replace(/_+/g, " ").replace(/-+/g, " ").replace(/[^a-zA-Z0-9 ]/g, "").trim().split(" ").filter(Boolean).slice(0, 5).map((word, index) => index === 0 ? word.toLowerCase() : word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()).join("");
28
37
  if (!key) key = "content";
@@ -211,5 +220,5 @@ const transformFiles = async (filePaths, packageName, options) => {
211
220
  };
212
221
 
213
222
  //#endregion
214
- export { extractIntlayer, transformFiles };
223
+ export { ATTRIBUTES_TO_EXTRACT, extractIntlayer, generateKey, shouldExtract, transformFiles };
215
224
  //# sourceMappingURL=transformFiles.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"transformFiles.mjs","names":["extractedContent: Record<string, string>","replacements: TsReplacement[]","sourceFile: SourceFile","extractedContent: Record<string, string> | null","error: any"],"sources":["../../../src/transformFiles/transformFiles.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport fs from 'node:fs/promises';\nimport { basename, dirname, extname, join, relative, resolve } from 'node:path';\nimport {\n ANSIColors,\n camelCaseToKebabCase,\n colorizePath,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { Node, Project, type SourceFile, SyntaxKind } from 'ts-morph';\nimport { writeContentDeclaration } from '../writeContentDeclaration';\nimport { detectFormatCommand } from '../writeContentDeclaration/detectFormatCommand';\nimport { extractDictionaryKey } from './extractDictionaryKey';\n\n// ==========================================\n// 1. Shared Utilities\n// ==========================================\n\nconst ATTRIBUTES_TO_EXTRACT = [\n 'title',\n 'placeholder',\n 'alt',\n 'aria-label',\n 'label',\n];\n\nconst shouldExtract = (text: string): boolean => {\n const trimmed = text.trim();\n if (!trimmed) return false;\n if (!trimmed.includes(' ')) return false;\n // Starts with Capital letter\n if (!/^[A-Z]/.test(trimmed)) return false;\n // Filter out template logic identifiers (simple check)\n if (trimmed.startsWith('{') || trimmed.startsWith('v-')) return false;\n return true;\n};\n\nconst generateKey = (text: string, existingKeys: Set<string>): string => {\n const maxWords = 5;\n let key = text\n .replace(/\\s+/g, ' ')\n .replace(/_+/g, ' ')\n .replace(/-+/g, ' ')\n .replace(/[^a-zA-Z0-9 ]/g, '')\n .trim()\n .split(' ')\n .filter(Boolean)\n .slice(0, maxWords)\n .map((word, index) =>\n index === 0\n ? word.toLowerCase()\n : word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()\n )\n .join('');\n\n if (!key) key = 'content';\n if (existingKeys.has(key)) {\n let i = 1;\n while (existingKeys.has(`${key}${i}`)) i++;\n key = `${key}${i}`;\n }\n return key;\n};\n\nconst writeContentHelper = async (\n extractedContent: Record<string, string>,\n componentKey: string,\n filePath: string,\n configuration: IntlayerConfig,\n outputDir?: string\n) => {\n const { defaultLocale } = configuration.internationalization;\n const { baseDir } = configuration.content;\n\n const dirName = outputDir ? resolve(outputDir) : dirname(filePath);\n const ext = extname(filePath);\n const baseName = basename(filePath, ext);\n const contentBaseName = baseName.charAt(0).toLowerCase() + baseName.slice(1);\n\n const contentFilePath = join(dirName, `${contentBaseName}.content.ts`);\n const relativeContentFilePath = relative(baseDir, contentFilePath);\n\n const dictionary: Dictionary = {\n key: componentKey,\n content: extractedContent,\n locale: defaultLocale,\n filePath: relativeContentFilePath,\n };\n\n const relativeDir = relative(baseDir, dirName);\n await writeContentDeclaration(dictionary, configuration, {\n newDictionariesPath: relativeDir,\n });\n\n return contentFilePath;\n};\n\ntype TsReplacement = {\n node: Node;\n key: string;\n type: 'jsx-text' | 'jsx-attribute' | 'string-literal';\n};\n\nconst extractTsContent = (\n sourceFile: SourceFile,\n existingKeys: Set<string>\n): {\n extractedContent: Record<string, string>;\n replacements: TsReplacement[];\n} => {\n const extractedContent: Record<string, string> = {};\n const replacements: TsReplacement[] = [];\n\n sourceFile.forEachDescendant((node) => {\n // 1. JSX Text\n if (Node.isJsxText(node)) {\n const text = node.getText();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.replace(/\\s+/g, ' ').trim();\n replacements.push({ node, key, type: 'jsx-text' });\n }\n }\n\n // 2. JSX Attributes\n else if (Node.isJsxAttribute(node)) {\n const name = node.getNameNode().getText();\n if (ATTRIBUTES_TO_EXTRACT.includes(name)) {\n const initializer = node.getInitializer();\n if (Node.isStringLiteral(initializer)) {\n const text = initializer.getLiteralValue();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'jsx-attribute' });\n }\n }\n }\n }\n\n // 3. String Literals (Variables, Arrays, etc.)\n else if (Node.isStringLiteral(node)) {\n const text = node.getLiteralValue();\n if (shouldExtract(text)) {\n const parent = node.getParent();\n\n // Skip if inside ImportDeclaration\n if (\n parent?.getKindName() === 'ImportDeclaration' ||\n parent?.getKindName() === 'ImportSpecifier' ||\n parent?.getKindName() === 'ModuleSpecifier'\n ) {\n return;\n }\n\n // Skip if it's a JSX Attribute value (handled above)\n if (Node.isJsxAttribute(parent)) return;\n\n // Skip console.log\n if (Node.isCallExpression(parent)) {\n const expression = parent.getExpression();\n if (expression.getText().includes('console.log')) return;\n }\n\n // Skip Object Keys: { key: \"value\" } -> \"key\" is PropertyAssignment name if not computed\n if (Node.isPropertyAssignment(parent)) {\n if (parent.getNameNode() === node) return; // It's the key\n }\n\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'string-literal' });\n }\n }\n });\n\n return { extractedContent, replacements };\n};\n\n// ==========================================\n// 2. React (TS-Morph) Strategy\n// ==========================================\n\nconst processReactFile = async (\n filePath: string,\n componentKey: string,\n packageName: string,\n project: Project,\n save: boolean = true\n) => {\n let sourceFile: SourceFile;\n try {\n sourceFile = project.addSourceFileAtPath(filePath);\n } catch {\n sourceFile = project.getSourceFileOrThrow(filePath);\n }\n\n const existingKeys = new Set<string>();\n const { extractedContent, replacements } = extractTsContent(\n sourceFile,\n existingKeys\n );\n\n if (Object.keys(extractedContent).length === 0) return null;\n\n for (const { node, key, type } of replacements) {\n if (type === 'jsx-text' && Node.isJsxText(node)) {\n node.replaceWithText(`{content.${key}}`);\n } else if (type === 'jsx-attribute' && Node.isJsxAttribute(node)) {\n node.setInitializer(`{content.${key}.value}`);\n } else if (type === 'string-literal' && Node.isStringLiteral(node)) {\n // For React/JS variables, we usually want the value\n node.replaceWithText(`content.${key}.value`);\n }\n }\n\n // Inject hook\n const importDecl = sourceFile.getImportDeclaration(\n (d) => d.getModuleSpecifierValue() === packageName\n );\n if (!importDecl) {\n sourceFile.addImportDeclaration({\n namedImports: ['useIntlayer'],\n moduleSpecifier: packageName,\n });\n } else if (\n !importDecl.getNamedImports().some((n) => n.getName() === 'useIntlayer')\n ) {\n importDecl.addNamedImport('useIntlayer');\n }\n\n // Insert hook at start of component\n sourceFile.getFunctions().forEach((f) => {\n f.getBody()\n ?.asKind(SyntaxKind.Block)\n ?.insertStatements(0, `const content = useIntlayer(\"${componentKey}\");`);\n });\n\n // Also handle const/arrow components\n sourceFile.getVariableDeclarations().forEach((v) => {\n const init = v.getInitializer();\n if (Node.isArrowFunction(init) || Node.isFunctionExpression(init)) {\n const body = init.getBody();\n if (Node.isBlock(body)) {\n // Heuristic: check if it returns JSX or uses hooks\n if (\n body.getText().includes('return') ||\n body.getText().includes('use')\n ) {\n body.insertStatements(\n 0,\n `const content = useIntlayer(\"${componentKey}\");`\n );\n }\n }\n }\n });\n\n if (save) {\n await sourceFile.save();\n }\n return extractedContent;\n};\n\n// ==========================================\n// 5. Main Dispatcher\n// ==========================================\n\nexport type PackageName =\n | 'next-intlayer'\n | 'react-intlayer'\n | 'vue-intlayer'\n | 'svelte-intlayer'\n | 'preact-intlayer'\n | 'solid-intlayer'\n | 'angular-intlayer'\n | 'express-intlayer';\n\nexport type ExtractIntlayerOptions = {\n configOptions?: GetConfigurationOptions;\n outputDir?: string;\n codeOnly?: boolean;\n declarationOnly?: boolean;\n};\n\nexport const extractIntlayer = async (\n filePath: string,\n packageName: PackageName,\n options?: ExtractIntlayerOptions,\n project?: Project\n) => {\n const saveComponent = !options?.declarationOnly;\n const writeContent = !options?.codeOnly;\n\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n const { baseDir } = configuration.content;\n\n // Setup Project for TS/React files if needed\n const _project =\n project || new Project({ skipAddingFilesFromTsConfig: true });\n\n const baseName = extractDictionaryKey(\n filePath,\n (await fs.readFile(filePath)).toString()\n );\n const componentKey = camelCaseToKebabCase(baseName);\n const ext = extname(filePath);\n\n let extractedContent: Record<string, string> | null = null;\n\n if (ext === '.vue') {\n try {\n const { processVueFile } = (await import(\n '@intlayer/vue-transformer'\n )) as any;\n extractedContent = await processVueFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/vue-transformer', ANSIColors.YELLOW)} to process Vue files.`\n );\n }\n throw error;\n }\n } else if (ext === '.svelte') {\n try {\n const { processSvelteFile } = (await import(\n '@intlayer/svelte-transformer'\n )) as any;\n extractedContent = await processSvelteFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/svelte-transformer', ANSIColors.YELLOW)} to process Svelte files.`\n );\n }\n throw error;\n }\n } else if (['.tsx', '.jsx', '.ts', '.js'].includes(ext)) {\n extractedContent = await processReactFile(\n filePath,\n componentKey,\n packageName,\n _project,\n saveComponent\n );\n }\n\n if (!extractedContent) {\n appLogger(`No extractable text found in ${baseName}`);\n return;\n }\n\n // Shared Write Logic\n if (writeContent) {\n const contentFilePath = await writeContentHelper(\n extractedContent,\n componentKey,\n filePath,\n configuration,\n options?.outputDir\n );\n\n const relativeContentFilePath = relative(\n configuration.content.baseDir,\n contentFilePath\n );\n appLogger(`Created content file: ${colorizePath(relativeContentFilePath)}`);\n }\n\n // Optional: Format\n if (saveComponent) {\n try {\n const formatCommand = detectFormatCommand(configuration);\n if (formatCommand) {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'ignore', // Silent\n cwd: baseDir,\n });\n }\n } catch {\n // Ignore format errors\n }\n\n appLogger(\n `Updated component: ${colorizePath(relative(baseDir, filePath))}`\n );\n }\n};\n\nexport const transformFiles = async (\n filePaths: string[],\n packageName: PackageName,\n options?: ExtractIntlayerOptions\n) => {\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const project = new Project({\n skipAddingFilesFromTsConfig: true,\n });\n\n for (const filePath of filePaths) {\n try {\n await extractIntlayer(filePath, packageName, options, project);\n } catch (error) {\n appLogger(`Failed to transform ${filePath}: ${(error as Error).message}`);\n }\n }\n};\n"],"mappings":";;;;;;;;;;AAqBA,MAAM,wBAAwB;CAC5B;CACA;CACA;CACA;CACA;CACD;AAED,MAAM,iBAAiB,SAA0B;CAC/C,MAAM,UAAU,KAAK,MAAM;AAC3B,KAAI,CAAC,QAAS,QAAO;AACrB,KAAI,CAAC,QAAQ,SAAS,IAAI,CAAE,QAAO;AAEnC,KAAI,CAAC,SAAS,KAAK,QAAQ,CAAE,QAAO;AAEpC,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,KAAK,CAAE,QAAO;AAChE,QAAO;;AAGT,MAAM,eAAe,MAAc,iBAAsC;CAEvE,IAAI,MAAM,KACP,QAAQ,QAAQ,IAAI,CACpB,QAAQ,OAAO,IAAI,CACnB,QAAQ,OAAO,IAAI,CACnB,QAAQ,kBAAkB,GAAG,CAC7B,MAAM,CACN,MAAM,IAAI,CACV,OAAO,QAAQ,CACf,MAAM,GATQ,EASI,CAClB,KAAK,MAAM,UACV,UAAU,IACN,KAAK,aAAa,GAClB,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa,CAC/D,CACA,KAAK,GAAG;AAEX,KAAI,CAAC,IAAK,OAAM;AAChB,KAAI,aAAa,IAAI,IAAI,EAAE;EACzB,IAAI,IAAI;AACR,SAAO,aAAa,IAAI,GAAG,MAAM,IAAI,CAAE;AACvC,QAAM,GAAG,MAAM;;AAEjB,QAAO;;AAGT,MAAM,qBAAqB,OACzB,kBACA,cACA,UACA,eACA,cACG;CACH,MAAM,EAAE,kBAAkB,cAAc;CACxC,MAAM,EAAE,YAAY,cAAc;CAElC,MAAM,UAAU,YAAY,QAAQ,UAAU,GAAG,QAAQ,SAAS;CAElE,MAAM,WAAW,SAAS,UADd,QAAQ,SAAS,CACW;CAGxC,MAAM,kBAAkB,KAAK,SAAS,GAFd,SAAS,OAAO,EAAE,CAAC,aAAa,GAAG,SAAS,MAAM,EAAE,CAEnB,aAAa;AAWtE,OAAM,wBARyB;EAC7B,KAAK;EACL,SAAS;EACT,QAAQ;EACR,UAN8B,SAAS,SAAS,gBAAgB;EAOjE,EAGyC,eAAe,EACvD,qBAFkB,SAAS,SAAS,QAAQ,EAG7C,CAAC;AAEF,QAAO;;AAST,MAAM,oBACJ,YACA,iBAIG;CACH,MAAMA,mBAA2C,EAAE;CACnD,MAAMC,eAAgC,EAAE;AAExC,YAAW,mBAAmB,SAAS;AAErC,MAAI,KAAK,UAAU,KAAK,EAAE;GACxB,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,QAAQ,QAAQ,IAAI,CAAC,MAAM;AACxD,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAY,CAAC;;aAK7C,KAAK,eAAe,KAAK,EAAE;GAClC,MAAM,OAAO,KAAK,aAAa,CAAC,SAAS;AACzC,OAAI,sBAAsB,SAAS,KAAK,EAAE;IACxC,MAAM,cAAc,KAAK,gBAAgB;AACzC,QAAI,KAAK,gBAAgB,YAAY,EAAE;KACrC,MAAM,OAAO,YAAY,iBAAiB;AAC1C,SAAI,cAAc,KAAK,EAAE;MACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,mBAAa,IAAI,IAAI;AACrB,uBAAiB,OAAO,KAAK,MAAM;AACnC,mBAAa,KAAK;OAAE;OAAM;OAAK,MAAM;OAAiB,CAAC;;;;aAOtD,KAAK,gBAAgB,KAAK,EAAE;GACnC,MAAM,OAAO,KAAK,iBAAiB;AACnC,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,SAAS,KAAK,WAAW;AAG/B,QACE,QAAQ,aAAa,KAAK,uBAC1B,QAAQ,aAAa,KAAK,qBAC1B,QAAQ,aAAa,KAAK,kBAE1B;AAIF,QAAI,KAAK,eAAe,OAAO,CAAE;AAGjC,QAAI,KAAK,iBAAiB,OAAO,EAE/B;SADmB,OAAO,eAAe,CAC1B,SAAS,CAAC,SAAS,cAAc,CAAE;;AAIpD,QAAI,KAAK,qBAAqB,OAAO,EACnC;SAAI,OAAO,aAAa,KAAK,KAAM;;IAGrC,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,MAAM;AACnC,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAkB,CAAC;;;GAG5D;AAEF,QAAO;EAAE;EAAkB;EAAc;;AAO3C,MAAM,mBAAmB,OACvB,UACA,cACA,aACA,SACA,OAAgB,SACb;CACH,IAAIC;AACJ,KAAI;AACF,eAAa,QAAQ,oBAAoB,SAAS;SAC5C;AACN,eAAa,QAAQ,qBAAqB,SAAS;;CAIrD,MAAM,EAAE,kBAAkB,iBAAiB,iBACzC,4BAFmB,IAAI,KAAa,CAIrC;AAED,KAAI,OAAO,KAAK,iBAAiB,CAAC,WAAW,EAAG,QAAO;AAEvD,MAAK,MAAM,EAAE,MAAM,KAAK,UAAU,aAChC,KAAI,SAAS,cAAc,KAAK,UAAU,KAAK,CAC7C,MAAK,gBAAgB,YAAY,IAAI,GAAG;UAC/B,SAAS,mBAAmB,KAAK,eAAe,KAAK,CAC9D,MAAK,eAAe,YAAY,IAAI,SAAS;UACpC,SAAS,oBAAoB,KAAK,gBAAgB,KAAK,CAEhE,MAAK,gBAAgB,WAAW,IAAI,QAAQ;CAKhD,MAAM,aAAa,WAAW,sBAC3B,MAAM,EAAE,yBAAyB,KAAK,YACxC;AACD,KAAI,CAAC,WACH,YAAW,qBAAqB;EAC9B,cAAc,CAAC,cAAc;EAC7B,iBAAiB;EAClB,CAAC;UAEF,CAAC,WAAW,iBAAiB,CAAC,MAAM,MAAM,EAAE,SAAS,KAAK,cAAc,CAExE,YAAW,eAAe,cAAc;AAI1C,YAAW,cAAc,CAAC,SAAS,MAAM;AACvC,IAAE,SAAS,EACP,OAAO,WAAW,MAAM,EACxB,iBAAiB,GAAG,gCAAgC,aAAa,KAAK;GAC1E;AAGF,YAAW,yBAAyB,CAAC,SAAS,MAAM;EAClD,MAAM,OAAO,EAAE,gBAAgB;AAC/B,MAAI,KAAK,gBAAgB,KAAK,IAAI,KAAK,qBAAqB,KAAK,EAAE;GACjE,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,KAAK,QAAQ,KAAK,EAEpB;QACE,KAAK,SAAS,CAAC,SAAS,SAAS,IACjC,KAAK,SAAS,CAAC,SAAS,MAAM,CAE9B,MAAK,iBACH,GACA,gCAAgC,aAAa,KAC9C;;;GAIP;AAEF,KAAI,KACF,OAAM,WAAW,MAAM;AAEzB,QAAO;;AAwBT,MAAa,kBAAkB,OAC7B,UACA,aACA,SACA,YACG;CACH,MAAM,gBAAgB,CAAC,SAAS;CAChC,MAAM,eAAe,CAAC,SAAS;CAE/B,MAAM,gBAAgB,iBAAiB,SAAS,cAAc;CAC9D,MAAM,YAAY,aAAa,cAAc;CAC7C,MAAM,EAAE,YAAY,cAAc;CAGlC,MAAM,WACJ,WAAW,IAAI,QAAQ,EAAE,6BAA6B,MAAM,CAAC;CAE/D,MAAM,WAAW,qBACf,WACC,MAAM,GAAG,SAAS,SAAS,EAAE,UAAU,CACzC;CACD,MAAM,eAAe,qBAAqB,SAAS;CACnD,MAAM,MAAM,QAAQ,SAAS;CAE7B,IAAIC,mBAAkD;AAEtD,KAAI,QAAQ,OACV,KAAI;EACF,MAAM,EAAE,mBAAoB,MAAM,OAChC;AAEF,qBAAmB,MAAM,eACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMC,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,kBAAkB,aAAa,6BAA6B,WAAW,OAAO,CAAC,wBAChF;AAEH,QAAM;;UAEC,QAAQ,UACjB,KAAI;EACF,MAAM,EAAE,sBAAuB,MAAM,OACnC;AAEF,qBAAmB,MAAM,kBACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMA,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,kBAAkB,aAAa,gCAAgC,WAAW,OAAO,CAAC,2BACnF;AAEH,QAAM;;UAEC;EAAC;EAAQ;EAAQ;EAAO;EAAM,CAAC,SAAS,IAAI,CACrD,oBAAmB,MAAM,iBACvB,UACA,cACA,aACA,UACA,cACD;AAGH,KAAI,CAAC,kBAAkB;AACrB,YAAU,gCAAgC,WAAW;AACrD;;AAIF,KAAI,cAAc;EAChB,MAAM,kBAAkB,MAAM,mBAC5B,kBACA,cACA,UACA,eACA,SAAS,UACV;AAMD,YAAU,yBAAyB,aAJH,SAC9B,cAAc,QAAQ,SACtB,gBACD,CACuE,GAAG;;AAI7E,KAAI,eAAe;AACjB,MAAI;GACF,MAAM,gBAAgB,oBAAoB,cAAc;AACxD,OAAI,cACF,UAAS,cAAc,QAAQ,YAAY,SAAS,EAAE;IACpD,OAAO;IACP,KAAK;IACN,CAAC;UAEE;AAIR,YACE,sBAAsB,aAAa,SAAS,SAAS,SAAS,CAAC,GAChE;;;AAIL,MAAa,iBAAiB,OAC5B,WACA,aACA,YACG;CAEH,MAAM,YAAY,aADI,iBAAiB,SAAS,cAAc,CACjB;CAE7C,MAAM,UAAU,IAAI,QAAQ,EAC1B,6BAA6B,MAC9B,CAAC;AAEF,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,QAAM,gBAAgB,UAAU,aAAa,SAAS,QAAQ;UACvD,OAAO;AACd,YAAU,uBAAuB,SAAS,IAAK,MAAgB,UAAU"}
1
+ {"version":3,"file":"transformFiles.mjs","names":["extractedContent: Record<string, string>","replacements: TsReplacement[]","sourceFile: SourceFile","extractedContent: Record<string, string> | null","error: any"],"sources":["../../../src/transformFiles/transformFiles.ts"],"sourcesContent":["import { execSync } from 'node:child_process';\nimport fs from 'node:fs/promises';\nimport { basename, dirname, extname, join, relative, resolve } from 'node:path';\nimport {\n ANSIColors,\n camelCaseToKebabCase,\n colorizePath,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary, IntlayerConfig } from '@intlayer/types';\nimport { Node, Project, type SourceFile, SyntaxKind } from 'ts-morph';\nimport { writeContentDeclaration } from '../writeContentDeclaration';\nimport { detectFormatCommand } from '../writeContentDeclaration/detectFormatCommand';\nimport { extractDictionaryKey } from './extractDictionaryKey';\n\n// ==========================================\n// 1. Shared Utilities (exported for reuse in babel plugin)\n// ==========================================\n\n/**\n * Attributes that should be extracted for localization\n */\nexport const ATTRIBUTES_TO_EXTRACT = [\n 'title',\n 'placeholder',\n 'alt',\n 'aria-label',\n 'label',\n];\n\n/**\n * Default function to determine if a string should be extracted for localization\n */\nexport const shouldExtract = (text: string): boolean => {\n const trimmed = text.trim();\n if (!trimmed) return false;\n if (!trimmed.includes(' ')) return false;\n // Starts with Capital letter\n if (!/^[A-Z]/.test(trimmed)) return false;\n // Filter out template logic identifiers (simple check)\n if (trimmed.startsWith('{') || trimmed.startsWith('v-')) return false;\n return true;\n};\n\n/**\n * Generate a unique key from text for use as a dictionary key\n */\nexport const generateKey = (\n text: string,\n existingKeys: Set<string>\n): string => {\n const maxWords = 5;\n let key = text\n .replace(/\\s+/g, ' ')\n .replace(/_+/g, ' ')\n .replace(/-+/g, ' ')\n .replace(/[^a-zA-Z0-9 ]/g, '')\n .trim()\n .split(' ')\n .filter(Boolean)\n .slice(0, maxWords)\n .map((word, index) =>\n index === 0\n ? word.toLowerCase()\n : word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()\n )\n .join('');\n\n if (!key) key = 'content';\n if (existingKeys.has(key)) {\n let i = 1;\n while (existingKeys.has(`${key}${i}`)) i++;\n key = `${key}${i}`;\n }\n return key;\n};\n\nconst writeContentHelper = async (\n extractedContent: Record<string, string>,\n componentKey: string,\n filePath: string,\n configuration: IntlayerConfig,\n outputDir?: string\n) => {\n const { defaultLocale } = configuration.internationalization;\n const { baseDir } = configuration.content;\n\n const dirName = outputDir ? resolve(outputDir) : dirname(filePath);\n const ext = extname(filePath);\n const baseName = basename(filePath, ext);\n const contentBaseName = baseName.charAt(0).toLowerCase() + baseName.slice(1);\n\n const contentFilePath = join(dirName, `${contentBaseName}.content.ts`);\n const relativeContentFilePath = relative(baseDir, contentFilePath);\n\n const dictionary: Dictionary = {\n key: componentKey,\n content: extractedContent,\n locale: defaultLocale,\n filePath: relativeContentFilePath,\n };\n\n const relativeDir = relative(baseDir, dirName);\n await writeContentDeclaration(dictionary, configuration, {\n newDictionariesPath: relativeDir,\n });\n\n return contentFilePath;\n};\n\ntype TsReplacement = {\n node: Node;\n key: string;\n type: 'jsx-text' | 'jsx-attribute' | 'string-literal';\n};\n\nconst extractTsContent = (\n sourceFile: SourceFile,\n existingKeys: Set<string>\n): {\n extractedContent: Record<string, string>;\n replacements: TsReplacement[];\n} => {\n const extractedContent: Record<string, string> = {};\n const replacements: TsReplacement[] = [];\n\n sourceFile.forEachDescendant((node) => {\n // 1. JSX Text\n if (Node.isJsxText(node)) {\n const text = node.getText();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.replace(/\\s+/g, ' ').trim();\n replacements.push({ node, key, type: 'jsx-text' });\n }\n }\n\n // 2. JSX Attributes\n else if (Node.isJsxAttribute(node)) {\n const name = node.getNameNode().getText();\n if (ATTRIBUTES_TO_EXTRACT.includes(name)) {\n const initializer = node.getInitializer();\n if (Node.isStringLiteral(initializer)) {\n const text = initializer.getLiteralValue();\n if (shouldExtract(text)) {\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'jsx-attribute' });\n }\n }\n }\n }\n\n // 3. String Literals (Variables, Arrays, etc.)\n else if (Node.isStringLiteral(node)) {\n const text = node.getLiteralValue();\n if (shouldExtract(text)) {\n const parent = node.getParent();\n\n // Skip if inside ImportDeclaration\n if (\n parent?.getKindName() === 'ImportDeclaration' ||\n parent?.getKindName() === 'ImportSpecifier' ||\n parent?.getKindName() === 'ModuleSpecifier'\n ) {\n return;\n }\n\n // Skip if it's a JSX Attribute value (handled above)\n if (Node.isJsxAttribute(parent)) return;\n\n // Skip console.log\n if (Node.isCallExpression(parent)) {\n const expression = parent.getExpression();\n if (expression.getText().includes('console.log')) return;\n }\n\n // Skip Object Keys: { key: \"value\" } -> \"key\" is PropertyAssignment name if not computed\n if (Node.isPropertyAssignment(parent)) {\n if (parent.getNameNode() === node) return; // It's the key\n }\n\n const key = generateKey(text, existingKeys);\n existingKeys.add(key);\n extractedContent[key] = text.trim();\n replacements.push({ node, key, type: 'string-literal' });\n }\n }\n });\n\n return { extractedContent, replacements };\n};\n\n// ==========================================\n// 2. React (TS-Morph) Strategy\n// ==========================================\n\nconst processReactFile = async (\n filePath: string,\n componentKey: string,\n packageName: string,\n project: Project,\n save: boolean = true\n) => {\n let sourceFile: SourceFile;\n try {\n sourceFile = project.addSourceFileAtPath(filePath);\n } catch {\n sourceFile = project.getSourceFileOrThrow(filePath);\n }\n\n const existingKeys = new Set<string>();\n const { extractedContent, replacements } = extractTsContent(\n sourceFile,\n existingKeys\n );\n\n if (Object.keys(extractedContent).length === 0) return null;\n\n for (const { node, key, type } of replacements) {\n if (type === 'jsx-text' && Node.isJsxText(node)) {\n node.replaceWithText(`{content.${key}}`);\n } else if (type === 'jsx-attribute' && Node.isJsxAttribute(node)) {\n node.setInitializer(`{content.${key}.value}`);\n } else if (type === 'string-literal' && Node.isStringLiteral(node)) {\n // For React/JS variables, we usually want the value\n node.replaceWithText(`content.${key}.value`);\n }\n }\n\n // Inject hook\n const importDecl = sourceFile.getImportDeclaration(\n (d) => d.getModuleSpecifierValue() === packageName\n );\n if (!importDecl) {\n sourceFile.addImportDeclaration({\n namedImports: ['useIntlayer'],\n moduleSpecifier: packageName,\n });\n } else if (\n !importDecl.getNamedImports().some((n) => n.getName() === 'useIntlayer')\n ) {\n importDecl.addNamedImport('useIntlayer');\n }\n\n // Insert hook at start of component\n sourceFile.getFunctions().forEach((f) => {\n f.getBody()\n ?.asKind(SyntaxKind.Block)\n ?.insertStatements(0, `const content = useIntlayer(\"${componentKey}\");`);\n });\n\n // Also handle const/arrow components\n sourceFile.getVariableDeclarations().forEach((v) => {\n const init = v.getInitializer();\n if (Node.isArrowFunction(init) || Node.isFunctionExpression(init)) {\n const body = init.getBody();\n if (Node.isBlock(body)) {\n // Heuristic: check if it returns JSX or uses hooks\n if (\n body.getText().includes('return') ||\n body.getText().includes('use')\n ) {\n body.insertStatements(\n 0,\n `const content = useIntlayer(\"${componentKey}\");`\n );\n }\n }\n }\n });\n\n if (save) {\n await sourceFile.save();\n }\n return extractedContent;\n};\n\n// ==========================================\n// 5. Main Dispatcher\n// ==========================================\n\nexport type PackageName =\n | 'next-intlayer'\n | 'react-intlayer'\n | 'vue-intlayer'\n | 'svelte-intlayer'\n | 'preact-intlayer'\n | 'solid-intlayer'\n | 'angular-intlayer'\n | 'express-intlayer';\n\nexport type ExtractIntlayerOptions = {\n configOptions?: GetConfigurationOptions;\n outputDir?: string;\n codeOnly?: boolean;\n declarationOnly?: boolean;\n};\n\nexport const extractIntlayer = async (\n filePath: string,\n packageName: PackageName,\n options?: ExtractIntlayerOptions,\n project?: Project\n) => {\n const saveComponent = !options?.declarationOnly;\n const writeContent = !options?.codeOnly;\n\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n const { baseDir } = configuration.content;\n\n // Setup Project for TS/React files if needed\n const _project =\n project || new Project({ skipAddingFilesFromTsConfig: true });\n\n const baseName = extractDictionaryKey(\n filePath,\n (await fs.readFile(filePath)).toString()\n );\n const componentKey = camelCaseToKebabCase(baseName);\n const ext = extname(filePath);\n\n let extractedContent: Record<string, string> | null = null;\n\n if (ext === '.vue') {\n try {\n const { processVueFile } = (await import(\n '@intlayer/vue-transformer'\n )) as any;\n extractedContent = await processVueFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/vue-transformer', ANSIColors.YELLOW)} to process Vue files.`\n );\n }\n throw error;\n }\n } else if (ext === '.svelte') {\n try {\n const { processSvelteFile } = (await import(\n '@intlayer/svelte-transformer'\n )) as any;\n extractedContent = await processSvelteFile(\n filePath,\n componentKey,\n packageName,\n {\n generateKey,\n shouldExtract,\n extractTsContent,\n },\n saveComponent\n );\n } catch (error: any) {\n if (\n error.code === 'ERR_MODULE_NOT_FOUND' ||\n error.message?.includes('Cannot find module')\n ) {\n throw new Error(\n `Please install ${colorizePath('@intlayer/svelte-transformer', ANSIColors.YELLOW)} to process Svelte files.`\n );\n }\n throw error;\n }\n } else if (['.tsx', '.jsx', '.ts', '.js'].includes(ext)) {\n extractedContent = await processReactFile(\n filePath,\n componentKey,\n packageName,\n _project,\n saveComponent\n );\n }\n\n if (!extractedContent) {\n appLogger(`No extractable text found in ${baseName}`);\n return;\n }\n\n // Shared Write Logic\n if (writeContent) {\n const contentFilePath = await writeContentHelper(\n extractedContent,\n componentKey,\n filePath,\n configuration,\n options?.outputDir\n );\n\n const relativeContentFilePath = relative(\n configuration.content.baseDir,\n contentFilePath\n );\n appLogger(`Created content file: ${colorizePath(relativeContentFilePath)}`);\n }\n\n // Optional: Format\n if (saveComponent) {\n try {\n const formatCommand = detectFormatCommand(configuration);\n if (formatCommand) {\n execSync(formatCommand.replace('{{file}}', filePath), {\n stdio: 'ignore', // Silent\n cwd: baseDir,\n });\n }\n } catch {\n // Ignore format errors\n }\n\n appLogger(\n `Updated component: ${colorizePath(relative(baseDir, filePath))}`\n );\n }\n};\n\nexport const transformFiles = async (\n filePaths: string[],\n packageName: PackageName,\n options?: ExtractIntlayerOptions\n) => {\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const project = new Project({\n skipAddingFilesFromTsConfig: true,\n });\n\n for (const filePath of filePaths) {\n try {\n await extractIntlayer(filePath, packageName, options, project);\n } catch (error) {\n appLogger(`Failed to transform ${filePath}: ${(error as Error).message}`);\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;AAwBA,MAAa,wBAAwB;CACnC;CACA;CACA;CACA;CACA;CACD;;;;AAKD,MAAa,iBAAiB,SAA0B;CACtD,MAAM,UAAU,KAAK,MAAM;AAC3B,KAAI,CAAC,QAAS,QAAO;AACrB,KAAI,CAAC,QAAQ,SAAS,IAAI,CAAE,QAAO;AAEnC,KAAI,CAAC,SAAS,KAAK,QAAQ,CAAE,QAAO;AAEpC,KAAI,QAAQ,WAAW,IAAI,IAAI,QAAQ,WAAW,KAAK,CAAE,QAAO;AAChE,QAAO;;;;;AAMT,MAAa,eACX,MACA,iBACW;CAEX,IAAI,MAAM,KACP,QAAQ,QAAQ,IAAI,CACpB,QAAQ,OAAO,IAAI,CACnB,QAAQ,OAAO,IAAI,CACnB,QAAQ,kBAAkB,GAAG,CAC7B,MAAM,CACN,MAAM,IAAI,CACV,OAAO,QAAQ,CACf,MAAM,GATQ,EASI,CAClB,KAAK,MAAM,UACV,UAAU,IACN,KAAK,aAAa,GAClB,KAAK,OAAO,EAAE,CAAC,aAAa,GAAG,KAAK,MAAM,EAAE,CAAC,aAAa,CAC/D,CACA,KAAK,GAAG;AAEX,KAAI,CAAC,IAAK,OAAM;AAChB,KAAI,aAAa,IAAI,IAAI,EAAE;EACzB,IAAI,IAAI;AACR,SAAO,aAAa,IAAI,GAAG,MAAM,IAAI,CAAE;AACvC,QAAM,GAAG,MAAM;;AAEjB,QAAO;;AAGT,MAAM,qBAAqB,OACzB,kBACA,cACA,UACA,eACA,cACG;CACH,MAAM,EAAE,kBAAkB,cAAc;CACxC,MAAM,EAAE,YAAY,cAAc;CAElC,MAAM,UAAU,YAAY,QAAQ,UAAU,GAAG,QAAQ,SAAS;CAElE,MAAM,WAAW,SAAS,UADd,QAAQ,SAAS,CACW;CAGxC,MAAM,kBAAkB,KAAK,SAAS,GAFd,SAAS,OAAO,EAAE,CAAC,aAAa,GAAG,SAAS,MAAM,EAAE,CAEnB,aAAa;AAWtE,OAAM,wBARyB;EAC7B,KAAK;EACL,SAAS;EACT,QAAQ;EACR,UAN8B,SAAS,SAAS,gBAAgB;EAOjE,EAGyC,eAAe,EACvD,qBAFkB,SAAS,SAAS,QAAQ,EAG7C,CAAC;AAEF,QAAO;;AAST,MAAM,oBACJ,YACA,iBAIG;CACH,MAAMA,mBAA2C,EAAE;CACnD,MAAMC,eAAgC,EAAE;AAExC,YAAW,mBAAmB,SAAS;AAErC,MAAI,KAAK,UAAU,KAAK,EAAE;GACxB,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,QAAQ,QAAQ,IAAI,CAAC,MAAM;AACxD,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAY,CAAC;;aAK7C,KAAK,eAAe,KAAK,EAAE;GAClC,MAAM,OAAO,KAAK,aAAa,CAAC,SAAS;AACzC,OAAI,sBAAsB,SAAS,KAAK,EAAE;IACxC,MAAM,cAAc,KAAK,gBAAgB;AACzC,QAAI,KAAK,gBAAgB,YAAY,EAAE;KACrC,MAAM,OAAO,YAAY,iBAAiB;AAC1C,SAAI,cAAc,KAAK,EAAE;MACvB,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,mBAAa,IAAI,IAAI;AACrB,uBAAiB,OAAO,KAAK,MAAM;AACnC,mBAAa,KAAK;OAAE;OAAM;OAAK,MAAM;OAAiB,CAAC;;;;aAOtD,KAAK,gBAAgB,KAAK,EAAE;GACnC,MAAM,OAAO,KAAK,iBAAiB;AACnC,OAAI,cAAc,KAAK,EAAE;IACvB,MAAM,SAAS,KAAK,WAAW;AAG/B,QACE,QAAQ,aAAa,KAAK,uBAC1B,QAAQ,aAAa,KAAK,qBAC1B,QAAQ,aAAa,KAAK,kBAE1B;AAIF,QAAI,KAAK,eAAe,OAAO,CAAE;AAGjC,QAAI,KAAK,iBAAiB,OAAO,EAE/B;SADmB,OAAO,eAAe,CAC1B,SAAS,CAAC,SAAS,cAAc,CAAE;;AAIpD,QAAI,KAAK,qBAAqB,OAAO,EACnC;SAAI,OAAO,aAAa,KAAK,KAAM;;IAGrC,MAAM,MAAM,YAAY,MAAM,aAAa;AAC3C,iBAAa,IAAI,IAAI;AACrB,qBAAiB,OAAO,KAAK,MAAM;AACnC,iBAAa,KAAK;KAAE;KAAM;KAAK,MAAM;KAAkB,CAAC;;;GAG5D;AAEF,QAAO;EAAE;EAAkB;EAAc;;AAO3C,MAAM,mBAAmB,OACvB,UACA,cACA,aACA,SACA,OAAgB,SACb;CACH,IAAIC;AACJ,KAAI;AACF,eAAa,QAAQ,oBAAoB,SAAS;SAC5C;AACN,eAAa,QAAQ,qBAAqB,SAAS;;CAIrD,MAAM,EAAE,kBAAkB,iBAAiB,iBACzC,4BAFmB,IAAI,KAAa,CAIrC;AAED,KAAI,OAAO,KAAK,iBAAiB,CAAC,WAAW,EAAG,QAAO;AAEvD,MAAK,MAAM,EAAE,MAAM,KAAK,UAAU,aAChC,KAAI,SAAS,cAAc,KAAK,UAAU,KAAK,CAC7C,MAAK,gBAAgB,YAAY,IAAI,GAAG;UAC/B,SAAS,mBAAmB,KAAK,eAAe,KAAK,CAC9D,MAAK,eAAe,YAAY,IAAI,SAAS;UACpC,SAAS,oBAAoB,KAAK,gBAAgB,KAAK,CAEhE,MAAK,gBAAgB,WAAW,IAAI,QAAQ;CAKhD,MAAM,aAAa,WAAW,sBAC3B,MAAM,EAAE,yBAAyB,KAAK,YACxC;AACD,KAAI,CAAC,WACH,YAAW,qBAAqB;EAC9B,cAAc,CAAC,cAAc;EAC7B,iBAAiB;EAClB,CAAC;UAEF,CAAC,WAAW,iBAAiB,CAAC,MAAM,MAAM,EAAE,SAAS,KAAK,cAAc,CAExE,YAAW,eAAe,cAAc;AAI1C,YAAW,cAAc,CAAC,SAAS,MAAM;AACvC,IAAE,SAAS,EACP,OAAO,WAAW,MAAM,EACxB,iBAAiB,GAAG,gCAAgC,aAAa,KAAK;GAC1E;AAGF,YAAW,yBAAyB,CAAC,SAAS,MAAM;EAClD,MAAM,OAAO,EAAE,gBAAgB;AAC/B,MAAI,KAAK,gBAAgB,KAAK,IAAI,KAAK,qBAAqB,KAAK,EAAE;GACjE,MAAM,OAAO,KAAK,SAAS;AAC3B,OAAI,KAAK,QAAQ,KAAK,EAEpB;QACE,KAAK,SAAS,CAAC,SAAS,SAAS,IACjC,KAAK,SAAS,CAAC,SAAS,MAAM,CAE9B,MAAK,iBACH,GACA,gCAAgC,aAAa,KAC9C;;;GAIP;AAEF,KAAI,KACF,OAAM,WAAW,MAAM;AAEzB,QAAO;;AAwBT,MAAa,kBAAkB,OAC7B,UACA,aACA,SACA,YACG;CACH,MAAM,gBAAgB,CAAC,SAAS;CAChC,MAAM,eAAe,CAAC,SAAS;CAE/B,MAAM,gBAAgB,iBAAiB,SAAS,cAAc;CAC9D,MAAM,YAAY,aAAa,cAAc;CAC7C,MAAM,EAAE,YAAY,cAAc;CAGlC,MAAM,WACJ,WAAW,IAAI,QAAQ,EAAE,6BAA6B,MAAM,CAAC;CAE/D,MAAM,WAAW,qBACf,WACC,MAAM,GAAG,SAAS,SAAS,EAAE,UAAU,CACzC;CACD,MAAM,eAAe,qBAAqB,SAAS;CACnD,MAAM,MAAM,QAAQ,SAAS;CAE7B,IAAIC,mBAAkD;AAEtD,KAAI,QAAQ,OACV,KAAI;EACF,MAAM,EAAE,mBAAoB,MAAM,OAChC;AAEF,qBAAmB,MAAM,eACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMC,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,kBAAkB,aAAa,6BAA6B,WAAW,OAAO,CAAC,wBAChF;AAEH,QAAM;;UAEC,QAAQ,UACjB,KAAI;EACF,MAAM,EAAE,sBAAuB,MAAM,OACnC;AAEF,qBAAmB,MAAM,kBACvB,UACA,cACA,aACA;GACE;GACA;GACA;GACD,EACD,cACD;UACMA,OAAY;AACnB,MACE,MAAM,SAAS,0BACf,MAAM,SAAS,SAAS,qBAAqB,CAE7C,OAAM,IAAI,MACR,kBAAkB,aAAa,gCAAgC,WAAW,OAAO,CAAC,2BACnF;AAEH,QAAM;;UAEC;EAAC;EAAQ;EAAQ;EAAO;EAAM,CAAC,SAAS,IAAI,CACrD,oBAAmB,MAAM,iBACvB,UACA,cACA,aACA,UACA,cACD;AAGH,KAAI,CAAC,kBAAkB;AACrB,YAAU,gCAAgC,WAAW;AACrD;;AAIF,KAAI,cAAc;EAChB,MAAM,kBAAkB,MAAM,mBAC5B,kBACA,cACA,UACA,eACA,SAAS,UACV;AAMD,YAAU,yBAAyB,aAJH,SAC9B,cAAc,QAAQ,SACtB,gBACD,CACuE,GAAG;;AAI7E,KAAI,eAAe;AACjB,MAAI;GACF,MAAM,gBAAgB,oBAAoB,cAAc;AACxD,OAAI,cACF,UAAS,cAAc,QAAQ,YAAY,SAAS,EAAE;IACpD,OAAO;IACP,KAAK;IACN,CAAC;UAEE;AAIR,YACE,sBAAsB,aAAa,SAAS,SAAS,SAAS,CAAC,GAChE;;;AAIL,MAAa,iBAAiB,OAC5B,WACA,aACA,YACG;CAEH,MAAM,YAAY,aADI,iBAAiB,SAAS,cAAc,CACjB;CAE7C,MAAM,UAAU,IAAI,QAAQ,EAC1B,6BAA6B,MAC9B,CAAC;AAEF,MAAK,MAAM,YAAY,UACrB,KAAI;AACF,QAAM,gBAAgB,UAAU,aAAa,SAAS,QAAQ;UACvD,OAAO;AACd,YAAU,uBAAuB,SAAS,IAAK,MAAgB,UAAU"}
@@ -104,9 +104,7 @@ const writeFileWithDirectories = async (absoluteFilePath, dictionary, configurat
104
104
  await writeJSFile(absoluteFilePath, dictionary, configuration);
105
105
  try {
106
106
  await rm(join(configuration.content.cacheDir, "intlayer-prepared.lock"), { recursive: true });
107
- } catch (error) {
108
- if (error && typeof error === "object" && "code" in error && error.code !== "ENOENT") throw error;
109
- }
107
+ } catch {}
110
108
  };
111
109
 
112
110
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"writeContentDeclaration.mjs","names":["pluginFormatResult: any","result: Dictionary","extension"],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { mkdir, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport { isDeepStrictEqual } from 'node:util';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n id: dictionary.id,\n title: dictionary.title,\n description: dictionary.description,\n tags: dictionary.tags,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n priority: dictionary.priority,\n live: dictionary.live,\n version: dictionary.version,\n content,\n };\n\n /**\n * Add $schema to JSON dictionaries\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = isDeepStrictEqual(existingDictionary, dictionary);\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n await writeFile(absoluteFilePath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch (error) {\n if (\n error &&\n typeof error === 'object' &&\n 'code' in error &&\n (error as { code: string }).code !== 'ENOENT'\n ) {\n throw error;\n }\n }\n};\n"],"mappings":";;;;;;;;;;AAsBA,MAAM,2BAA2B,OAC/B,YACA,eACA,eACG;;;;CAIH,MAAM,sBACJ,MAAM,iCAAiC,WAAW;CAEpD,IAAI,UAAU,oBAAoB;;;;AAMlC,KAAI,WAAW,OACb,WAAU,uBACR,qBACA,WAAW,OACZ,CAAC;UACO,WACT,WAAU,6BACR,qBACA,WACD,CAAC;CAGJ,IAAIA,qBAA0B;EAC5B,GAAG;EACH;EACD;;;;AAMD,YAAW,MAAM,UAAU,cAAc,WAAW,EAAE,CACpD,KAAI,OAAO,cAAc;EACvB,MAAM,kBAAkB,MAAM,OAAO,eAAe;GAClD,YAAY;GACZ;GACD,CAAC;AAEF,MAAI,gBACF,sBAAqB;;AAQ3B,KAAI,EAFF,mBAAmB,WAAW,mBAAmB,KAE1B,QAAO;CAEhC,IAAIC,SAAqB;EACvB,KAAK,WAAW;EAChB,IAAI,WAAW;EACf,OAAO,WAAW;EAClB,aAAa,WAAW;EACxB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,UAAU,WAAW;EACrB,MAAM,WAAW;EACjB,SAAS,WAAW;EACpB;EACD;AAUD,KAFe,uBAFb,WAAW,WAAW,QAAQ,WAAW,SAAS,GAAG,QAEP,KAGnC,UACX,mBAAmB,WACnB,mBAAmB,IAEnB,UAAS;EACP,SAAS;EACT,GAAG;EACJ;AAGH,QAAO;;AAST,MAAM,iBAAiB,EACrB,qBAAqB,yBACtB;AAED,MAAa,0BAA0B,OACrC,YACA,eACA,YACwD;CACxD,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,qBAAqB,eAAe;EAC1C,GAAG;EACH,GAAG;EACJ;CAED,MAAM,4BAA4B,KAAK,SAAS,oBAAoB;CAOpE,MAAM,qBAL6B,wBAAwB,cAAc,CAEvE,WAAW,MAGoC,MAC9C,OAAO,GAAG,YAAY,WAAW,QACnC;CAED,MAAM,8BAA8B,MAAM,yBACxC,YACA,eACA,WACD;AAED,KAAI,oBAAoB,UAAU;EAEhC,MAAM,gBAAgB,kBAAkB,oBAAoB,WAAW;EAEvE,MAAM,WAAW,QACf,cAAc,QAAQ,SACtB,mBAAmB,SACpB;AAGD,MAAI,cACF,QAAO;GACL,QAAQ;GACR,MAAM;GACP;AAGH,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;AAG9C,KAAI,WAAW,UAAU;EACvB,MAAM,WAAW,QACf,cAAc,QAAQ,SACtB,WAAW,SACZ;AACD,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;CAI9C,MAAM,yBAAyB,KAC7B,2BACA,GAAG,WAAW,IAAI,eACnB;AAED,OAAM,yBACJ,wBACA,6BACA,cACD;AAED,QAAO;EACL,QAAQ;EACR,MAAM;EACP;;AAGH,MAAM,2BAA2B,OAC/B,kBACA,YACA,kBACkB;AAKlB,OAAM,MAHM,QAAQ,iBAAiB,EAGpB,EAAE,WAAW,MAAM,CAAC;CAErC,MAAM,YAAY,QAAQ,iBAAiB;AAK3C,KAAI,CAJuB,cAAc,QAAQ,eAAe,KAC7D,gBAAc,QAAQC,YAAU,CAClC,CAEuB,SAAS,UAAU,CACzC,OAAM,IAAI,MACR,2BAA2B,UAAU,UAAU,mBAChD;AAGH,KAAI,cAAc,SAAS;AAIzB,QAAM,UAAU,kBAAkB,GAHX,KAAK,UAAU,YAAY,MAAM,EAAE,CAGN,IAAI;AAExD;;AAGF,OAAM,YAAY,kBAAkB,YAAY,cAAc;AAI9D,KAAI;AAKF,QAAM,GAJe,KACnB,cAAc,QAAQ,UACtB,yBACD,EACsB,EAAE,WAAW,MAAM,CAAC;UACpC,OAAO;AACd,MACE,SACA,OAAO,UAAU,YACjB,UAAU,SACT,MAA2B,SAAS,SAErC,OAAM"}
1
+ {"version":3,"file":"writeContentDeclaration.mjs","names":["pluginFormatResult: any","result: Dictionary","extension"],"sources":["../../../src/writeContentDeclaration/writeContentDeclaration.ts"],"sourcesContent":["import { mkdir, rm, writeFile } from 'node:fs/promises';\nimport { dirname, extname, join, resolve } from 'node:path';\nimport { isDeepStrictEqual } from 'node:util';\nimport {\n getFilteredLocalesDictionary,\n getPerLocaleDictionary,\n} from '@intlayer/core';\nimport type {\n Dictionary,\n IntlayerConfig,\n Locale,\n LocalesValues,\n} from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport {\n type Extension,\n getFormatFromExtension,\n} from '../utils/getFormatFromExtension';\nimport type { DictionaryStatus } from './dictionaryStatus';\nimport { processContentDeclarationContent } from './processContentDeclarationContent';\nimport { writeJSFile } from './writeJSFile';\n\nconst formatContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n localeList?: LocalesValues[]\n) => {\n /**\n * Clean Markdown, Insertion, File, etc. node metadata\n */\n const processedDictionary =\n await processContentDeclarationContent(dictionary);\n\n let content = processedDictionary.content;\n\n /**\n * Filter locales content\n */\n\n if (dictionary.locale) {\n content = getPerLocaleDictionary(\n processedDictionary,\n dictionary.locale\n ).content;\n } else if (localeList) {\n content = getFilteredLocalesDictionary(\n processedDictionary,\n localeList\n ).content;\n }\n\n let pluginFormatResult: any = {\n ...dictionary,\n content,\n } satisfies Dictionary;\n\n /**\n * Format the dictionary with the plugins\n */\n\n for await (const plugin of configuration.plugins ?? []) {\n if (plugin.formatOutput) {\n const formattedResult = await plugin.formatOutput?.({\n dictionary: pluginFormatResult,\n configuration,\n });\n\n if (formattedResult) {\n pluginFormatResult = formattedResult;\n }\n }\n }\n\n const isDictionaryFormat =\n pluginFormatResult.content && pluginFormatResult.key;\n\n if (!isDictionaryFormat) return pluginFormatResult;\n\n let result: Dictionary = {\n key: dictionary.key,\n id: dictionary.id,\n title: dictionary.title,\n description: dictionary.description,\n tags: dictionary.tags,\n locale: dictionary.locale,\n fill: dictionary.fill,\n filled: dictionary.filled,\n priority: dictionary.priority,\n live: dictionary.live,\n version: dictionary.version,\n content,\n };\n\n /**\n * Add $schema to JSON dictionaries\n */\n const extension = (\n dictionary.filePath ? extname(dictionary.filePath) : '.json'\n ) as Extension;\n const format = getFormatFromExtension(extension);\n\n if (\n format === 'json' &&\n pluginFormatResult.content &&\n pluginFormatResult.key\n ) {\n result = {\n $schema: 'https://intlayer.org/schema.json',\n ...result,\n };\n }\n\n return result;\n};\n\ntype WriteContentDeclarationOptions = {\n newDictionariesPath?: string;\n localeList?: LocalesValues[];\n fallbackLocale?: Locale;\n};\n\nconst defaultOptions = {\n newDictionariesPath: 'intlayer-dictionaries',\n} satisfies WriteContentDeclarationOptions;\n\nexport const writeContentDeclaration = async (\n dictionary: Dictionary,\n configuration: IntlayerConfig,\n options?: WriteContentDeclarationOptions\n): Promise<{ status: DictionaryStatus; path: string }> => {\n const { content } = configuration;\n const { baseDir } = content;\n const { newDictionariesPath, localeList } = {\n ...defaultOptions,\n ...options,\n };\n\n const newDictionaryLocationPath = join(baseDir, newDictionariesPath);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(configuration);\n const unmergedDictionaries = unmergedDictionariesRecord[\n dictionary.key\n ] as Dictionary[];\n\n const existingDictionary = unmergedDictionaries?.find(\n (el) => el.localId === dictionary.localId\n );\n\n const formattedContentDeclaration = await formatContentDeclaration(\n dictionary,\n configuration,\n localeList\n );\n\n if (existingDictionary?.filePath) {\n // Compare existing dictionary content with new dictionary content\n const isSameContent = isDeepStrictEqual(existingDictionary, dictionary);\n\n const filePath = resolve(\n configuration.content.baseDir,\n existingDictionary.filePath\n );\n\n // Up to date, nothing to do\n if (isSameContent) {\n return {\n status: 'up-to-date',\n path: filePath,\n };\n }\n\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'updated', path: filePath };\n }\n\n if (dictionary.filePath) {\n const filePath = resolve(\n configuration.content.baseDir,\n dictionary.filePath\n );\n await writeFileWithDirectories(\n filePath,\n formattedContentDeclaration,\n configuration\n );\n\n return { status: 'created', path: filePath };\n }\n\n // No existing dictionary, write to new location\n const contentDeclarationPath = join(\n newDictionaryLocationPath,\n `${dictionary.key}.content.json`\n );\n\n await writeFileWithDirectories(\n contentDeclarationPath,\n formattedContentDeclaration,\n configuration\n );\n\n return {\n status: 'imported',\n path: contentDeclarationPath,\n };\n};\n\nconst writeFileWithDirectories = async (\n absoluteFilePath: string,\n dictionary: Dictionary,\n configuration: IntlayerConfig\n): Promise<void> => {\n // Extract the directory from the file path\n const dir = dirname(absoluteFilePath);\n\n // Create the directory recursively\n await mkdir(dir, { recursive: true });\n\n const extension = extname(absoluteFilePath);\n const acceptedExtensions = configuration.content.fileExtensions.map(\n (extension) => extname(extension)\n );\n\n if (!acceptedExtensions.includes(extension)) {\n throw new Error(\n `Invalid file extension: ${extension}, file: ${absoluteFilePath}`\n );\n }\n\n if (extension === '.json') {\n const jsonDictionary = JSON.stringify(dictionary, null, 2);\n\n // Write the file\n await writeFile(absoluteFilePath, `${jsonDictionary}\\n`); // Add a new line at the end of the file to avoid formatting issues with VSCode\n\n return;\n }\n\n await writeJSFile(absoluteFilePath, dictionary, configuration);\n\n // remove the cache as content has changed\n // Will force a new preparation of the intlayer on next build\n try {\n const sentinelPath = join(\n configuration.content.cacheDir,\n 'intlayer-prepared.lock'\n );\n await rm(sentinelPath, { recursive: true });\n } catch {}\n};\n"],"mappings":";;;;;;;;;;AAsBA,MAAM,2BAA2B,OAC/B,YACA,eACA,eACG;;;;CAIH,MAAM,sBACJ,MAAM,iCAAiC,WAAW;CAEpD,IAAI,UAAU,oBAAoB;;;;AAMlC,KAAI,WAAW,OACb,WAAU,uBACR,qBACA,WAAW,OACZ,CAAC;UACO,WACT,WAAU,6BACR,qBACA,WACD,CAAC;CAGJ,IAAIA,qBAA0B;EAC5B,GAAG;EACH;EACD;;;;AAMD,YAAW,MAAM,UAAU,cAAc,WAAW,EAAE,CACpD,KAAI,OAAO,cAAc;EACvB,MAAM,kBAAkB,MAAM,OAAO,eAAe;GAClD,YAAY;GACZ;GACD,CAAC;AAEF,MAAI,gBACF,sBAAqB;;AAQ3B,KAAI,EAFF,mBAAmB,WAAW,mBAAmB,KAE1B,QAAO;CAEhC,IAAIC,SAAqB;EACvB,KAAK,WAAW;EAChB,IAAI,WAAW;EACf,OAAO,WAAW;EAClB,aAAa,WAAW;EACxB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,MAAM,WAAW;EACjB,QAAQ,WAAW;EACnB,UAAU,WAAW;EACrB,MAAM,WAAW;EACjB,SAAS,WAAW;EACpB;EACD;AAUD,KAFe,uBAFb,WAAW,WAAW,QAAQ,WAAW,SAAS,GAAG,QAEP,KAGnC,UACX,mBAAmB,WACnB,mBAAmB,IAEnB,UAAS;EACP,SAAS;EACT,GAAG;EACJ;AAGH,QAAO;;AAST,MAAM,iBAAiB,EACrB,qBAAqB,yBACtB;AAED,MAAa,0BAA0B,OACrC,YACA,eACA,YACwD;CACxD,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,YAAY;CACpB,MAAM,EAAE,qBAAqB,eAAe;EAC1C,GAAG;EACH,GAAG;EACJ;CAED,MAAM,4BAA4B,KAAK,SAAS,oBAAoB;CAOpE,MAAM,qBAL6B,wBAAwB,cAAc,CAEvE,WAAW,MAGoC,MAC9C,OAAO,GAAG,YAAY,WAAW,QACnC;CAED,MAAM,8BAA8B,MAAM,yBACxC,YACA,eACA,WACD;AAED,KAAI,oBAAoB,UAAU;EAEhC,MAAM,gBAAgB,kBAAkB,oBAAoB,WAAW;EAEvE,MAAM,WAAW,QACf,cAAc,QAAQ,SACtB,mBAAmB,SACpB;AAGD,MAAI,cACF,QAAO;GACL,QAAQ;GACR,MAAM;GACP;AAGH,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;AAG9C,KAAI,WAAW,UAAU;EACvB,MAAM,WAAW,QACf,cAAc,QAAQ,SACtB,WAAW,SACZ;AACD,QAAM,yBACJ,UACA,6BACA,cACD;AAED,SAAO;GAAE,QAAQ;GAAW,MAAM;GAAU;;CAI9C,MAAM,yBAAyB,KAC7B,2BACA,GAAG,WAAW,IAAI,eACnB;AAED,OAAM,yBACJ,wBACA,6BACA,cACD;AAED,QAAO;EACL,QAAQ;EACR,MAAM;EACP;;AAGH,MAAM,2BAA2B,OAC/B,kBACA,YACA,kBACkB;AAKlB,OAAM,MAHM,QAAQ,iBAAiB,EAGpB,EAAE,WAAW,MAAM,CAAC;CAErC,MAAM,YAAY,QAAQ,iBAAiB;AAK3C,KAAI,CAJuB,cAAc,QAAQ,eAAe,KAC7D,gBAAc,QAAQC,YAAU,CAClC,CAEuB,SAAS,UAAU,CACzC,OAAM,IAAI,MACR,2BAA2B,UAAU,UAAU,mBAChD;AAGH,KAAI,cAAc,SAAS;AAIzB,QAAM,UAAU,kBAAkB,GAHX,KAAK,UAAU,YAAY,MAAM,EAAE,CAGN,IAAI;AAExD;;AAGF,OAAM,YAAY,kBAAkB,YAAY,cAAc;AAI9D,KAAI;AAKF,QAAM,GAJe,KACnB,cAAc,QAAQ,UACtB,yBACD,EACsB,EAAE,WAAW,MAAM,CAAC;SACrC"}
@@ -1,5 +1,5 @@
1
1
  import { MergedDictionaryOutput } from "./writeMergedDictionary.js";
2
- import * as _intlayer_types0 from "@intlayer/types";
2
+ import * as _intlayer_types7 from "@intlayer/types";
3
3
  import { Dictionary, Locale } from "@intlayer/types";
4
4
 
5
5
  //#region src/buildIntlayerDictionary/writeDynamicDictionary.d.ts
@@ -12,7 +12,7 @@ type LocalizedDictionaryOutput = Record<string, LocalizedDictionaryResult>;
12
12
  /**
13
13
  * This function generates the content of the dictionary list file
14
14
  */
15
- declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types0.IntlayerConfig) => string;
15
+ declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types7.IntlayerConfig) => string;
16
16
  /**
17
17
  * Write the localized dictionaries to the dictionariesDir
18
18
  * @param mergedDictionaries - The merged dictionaries
@@ -29,7 +29,7 @@ declare const generateDictionaryEntryPoint: (localizedDictionariesPathsRecord: L
29
29
  * // { key: 'home', content: { ... } },
30
30
  * ```
31
31
  */
32
- declare const writeDynamicDictionary: (mergedDictionaries: MergedDictionaryOutput, configuration?: _intlayer_types0.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
32
+ declare const writeDynamicDictionary: (mergedDictionaries: MergedDictionaryOutput, configuration?: _intlayer_types7.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
33
33
  //#endregion
34
34
  export { DictionaryResult, LocalizedDictionaryOutput, LocalizedDictionaryResult, generateDictionaryEntryPoint, writeDynamicDictionary };
35
35
  //# sourceMappingURL=writeDynamicDictionary.d.ts.map
@@ -1,11 +1,11 @@
1
1
  import { LocalizedDictionaryOutput, LocalizedDictionaryResult } from "./writeDynamicDictionary.js";
2
- import * as _intlayer_types3 from "@intlayer/types";
2
+ import * as _intlayer_types5 from "@intlayer/types";
3
3
 
4
4
  //#region src/buildIntlayerDictionary/writeFetchDictionary.d.ts
5
5
  /**
6
6
  * This function generates the content of the dictionary list file
7
7
  */
8
- declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types3.IntlayerConfig) => string;
8
+ declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: LocalizedDictionaryResult, format?: "cjs" | "esm", configuration?: _intlayer_types5.IntlayerConfig) => string;
9
9
  /**
10
10
  * Write the localized dictionaries to the dictionariesDir
11
11
  * @param mergedDictionaries - The merged dictionaries
@@ -22,7 +22,7 @@ declare const generateDictionaryEntryPoint: (localedDictionariesPathsRecord: Loc
22
22
  * // { key: 'home', content: { ... } },
23
23
  * ```
24
24
  */
25
- declare const writeFetchDictionary: (dynamicDictionaries: LocalizedDictionaryOutput, configuration?: _intlayer_types3.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
25
+ declare const writeFetchDictionary: (dynamicDictionaries: LocalizedDictionaryOutput, configuration?: _intlayer_types5.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<LocalizedDictionaryOutput>;
26
26
  //#endregion
27
27
  export { generateDictionaryEntryPoint, writeFetchDictionary };
28
28
  //# sourceMappingURL=writeFetchDictionary.d.ts.map
@@ -1,5 +1,5 @@
1
1
  import { UnmergedDictionaryOutput } from "./writeUnmergedDictionary.js";
2
- import * as _intlayer_types2 from "@intlayer/types";
2
+ import * as _intlayer_types3 from "@intlayer/types";
3
3
  import { Dictionary } from "@intlayer/types";
4
4
 
5
5
  //#region src/buildIntlayerDictionary/writeMergedDictionary.d.ts
@@ -24,7 +24,7 @@ type MergedDictionaryOutput = Record<string, MergedDictionaryResult>;
24
24
  * // { key: 'home', content: { ... } },
25
25
  * ```
26
26
  */
27
- declare const writeMergedDictionaries: (groupedDictionaries: UnmergedDictionaryOutput, configuration?: _intlayer_types2.IntlayerConfig) => Promise<MergedDictionaryOutput>;
27
+ declare const writeMergedDictionaries: (groupedDictionaries: UnmergedDictionaryOutput, configuration?: _intlayer_types3.IntlayerConfig) => Promise<MergedDictionaryOutput>;
28
28
  //#endregion
29
29
  export { MergedDictionaryOutput, MergedDictionaryResult, writeMergedDictionaries };
30
30
  //# sourceMappingURL=writeMergedDictionary.d.ts.map
@@ -1,4 +1,4 @@
1
- import * as _intlayer_types7 from "@intlayer/types";
1
+ import * as _intlayer_types2 from "@intlayer/types";
2
2
  import { Dictionary } from "@intlayer/types";
3
3
 
4
4
  //#region src/buildIntlayerDictionary/writeRemoteDictionary.d.ts
@@ -23,7 +23,7 @@ type RemoteDictionaryOutput = Record<string, RemoteDictionaryResult>;
23
23
  * // { key: 'home', content: { ... } },
24
24
  * ```
25
25
  */
26
- declare const writeRemoteDictionary: (remoteDictionaries: Dictionary[], configuration?: _intlayer_types7.IntlayerConfig) => Promise<RemoteDictionaryOutput>;
26
+ declare const writeRemoteDictionary: (remoteDictionaries: Dictionary[], configuration?: _intlayer_types2.IntlayerConfig) => Promise<RemoteDictionaryOutput>;
27
27
  //#endregion
28
28
  export { RemoteDictionaryOutput, RemoteDictionaryResult, writeRemoteDictionary };
29
29
  //# sourceMappingURL=writeRemoteDictionary.d.ts.map
@@ -1,10 +1,10 @@
1
- import * as _intlayer_types5 from "@intlayer/types";
1
+ import * as _intlayer_types0 from "@intlayer/types";
2
2
 
3
3
  //#region src/createDictionaryEntryPoint/createDictionaryEntryPoint.d.ts
4
4
  /**
5
5
  * This function generates a list of dictionaries in the main directory
6
6
  */
7
- declare const createDictionaryEntryPoint: (configuration?: _intlayer_types5.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<void>;
7
+ declare const createDictionaryEntryPoint: (configuration?: _intlayer_types0.IntlayerConfig, formats?: ("cjs" | "esm")[]) => Promise<void>;
8
8
  //#endregion
9
9
  export { createDictionaryEntryPoint };
10
10
  //# sourceMappingURL=createDictionaryEntryPoint.d.ts.map
@@ -1,10 +1,10 @@
1
- import * as _intlayer_types6 from "@intlayer/types";
1
+ import * as _intlayer_types4 from "@intlayer/types";
2
2
 
3
3
  //#region src/createDictionaryEntryPoint/generateDictionaryListContent.d.ts
4
4
  /**
5
5
  * This function generates the content of the dictionary list file
6
6
  */
7
- declare const generateDictionaryListContent: (dictionaries: string[], functionName: string, format?: "cjs" | "esm", configuration?: _intlayer_types6.IntlayerConfig) => string;
7
+ declare const generateDictionaryListContent: (dictionaries: string[], functionName: string, format?: "cjs" | "esm", configuration?: _intlayer_types4.IntlayerConfig) => string;
8
8
  //#endregion
9
9
  export { generateDictionaryListContent };
10
10
  //# sourceMappingURL=generateDictionaryListContent.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"generateDictionaryListContent.d.ts","names":[],"sources":["../../../src/createDictionaryEntryPoint/generateDictionaryListContent.ts"],"sourcesContent":[],"mappings":";;;;;;AAOa,cAAA,6BA+CZ,EAAA,CAAA,YA3CC,EAAA,MAAA,EAAA,EAAA,YAAkC,EAAA,MAAA,EAAA,MAAA,CAAA,EAAA,KAAA,GAAA,KAAA,EAAA,aAAA,CAAA,EA2CnC,gBAAA,CA3CC,cAAkC,EAAA,GAAA,MAAA"}
1
+ {"version":3,"file":"generateDictionaryListContent.d.ts","names":[],"sources":["../../../src/createDictionaryEntryPoint/generateDictionaryListContent.ts"],"sourcesContent":[],"mappings":";;;;;;AAOa,cAAA,6BA+CZ,EAAA,CAAA,YAAA,EA3CC,MAAA,EAAA,EAAA,YAAkC,EAAA,MAAA,EAAA,MAAA,CAAA,EAAA,KAAA,GAAA,KAAA,EAAA,aAAA,CAAA,EA2CnC,gBAAA,CA3CC,cAAkC,EAAA,GAAA,MAAA"}
@@ -28,7 +28,7 @@ import { DiffMode, ListGitFilesOptions, ListGitLinesOptions, listGitFiles, listG
28
28
  import { prepareIntlayer } from "./prepareIntlayer.js";
29
29
  import { reduceDictionaryContent } from "./reduceDictionaryContent/reduceDictionaryContent.js";
30
30
  import { extractDictionaryKey } from "./transformFiles/extractDictionaryKey.js";
31
- import { PackageName, extractIntlayer, transformFiles } from "./transformFiles/transformFiles.js";
31
+ import { ATTRIBUTES_TO_EXTRACT, PackageName, extractIntlayer, generateKey, shouldExtract, transformFiles } from "./transformFiles/transformFiles.js";
32
32
  import "./transformFiles/index.js";
33
33
  import { JSONObject, JsonChunk, assembleJSON, chunkJSON, reconstructFromSingleChunk } from "./utils/chunkJSON.js";
34
34
  import { formatLocale, formatPath } from "./utils/formatter.js";
@@ -52,4 +52,4 @@ import { writeContentDeclaration } from "./writeContentDeclaration/writeContentD
52
52
  import { writeJSFile } from "./writeContentDeclaration/writeJSFile.js";
53
53
  import "./writeContentDeclaration/index.js";
54
54
  import { detectFormatCommand } from "./writeContentDeclaration/detectFormatCommand.js";
55
- export { type DictionaryStatus, type DiffMode, type Extension, type Format, type JSONObject, type JsonChunk, type ListGitFilesOptions, type ListGitLinesOptions, type PackageName, type ParallelHandle, assembleJSON, buildAndWatchIntlayer, buildDictionary, chunkJSON, cleanOutputDir, createDictionaryEntryPoint, createModuleAugmentation, createTypes, detectExportedComponentName, detectFormatCommand, extractDictionaryKey, extractIntlayer, fetchDistantDictionaries, formatLocale, formatPath, generateDictionaryListContent, getBuiltDictionariesPath, getBuiltDynamicDictionariesPath, getBuiltFetchDictionariesPath, getBuiltRemoteDictionariesPath, getBuiltUnmergedDictionariesPath, getChunk, getContentDeclarationFileTemplate, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, handleAdditionalContentDeclarationFile, handleContentDeclarationFileChange, handleUnlinkedContentDeclarationFile, isInvalidDictionary, listDictionaries, listDictionariesWithStats, listGitFiles, listGitLines, loadContentDeclarations, loadDictionaries, loadLocalDictionaries, loadRemoteDictionaries, pLimit, parallelize, parallelizeGlobal, prepareIntlayer, reconstructFromSingleChunk, reduceDictionaryContent, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, sortAlphabetically, splitTextByLines, transformFiles, transformJSFile, verifyIdenticObjectFormat, watch, writeContentDeclaration, writeJSFile };
55
+ export { ATTRIBUTES_TO_EXTRACT, type DictionaryStatus, type DiffMode, type Extension, type Format, type JSONObject, type JsonChunk, type ListGitFilesOptions, type ListGitLinesOptions, type PackageName, type ParallelHandle, assembleJSON, buildAndWatchIntlayer, buildDictionary, chunkJSON, cleanOutputDir, createDictionaryEntryPoint, createModuleAugmentation, createTypes, detectExportedComponentName, detectFormatCommand, extractDictionaryKey, extractIntlayer, fetchDistantDictionaries, formatLocale, formatPath, generateDictionaryListContent, generateKey, getBuiltDictionariesPath, getBuiltDynamicDictionariesPath, getBuiltFetchDictionariesPath, getBuiltRemoteDictionariesPath, getBuiltUnmergedDictionariesPath, getChunk, getContentDeclarationFileTemplate, getExtensionFromFormat, getFileHash, getFormatFromExtension, getGlobalLimiter, getTaskLimiter, handleAdditionalContentDeclarationFile, handleContentDeclarationFileChange, handleUnlinkedContentDeclarationFile, isInvalidDictionary, listDictionaries, listDictionariesWithStats, listGitFiles, listGitLines, loadContentDeclarations, loadDictionaries, loadLocalDictionaries, loadRemoteDictionaries, pLimit, parallelize, parallelizeGlobal, prepareIntlayer, reconstructFromSingleChunk, reduceDictionaryContent, reduceObjectFormat, resolveObjectPromises, runOnce, runParallel, shouldExtract, sortAlphabetically, splitTextByLines, transformFiles, transformJSFile, verifyIdenticObjectFormat, watch, writeContentDeclaration, writeJSFile };
@@ -1,11 +1,11 @@
1
1
  import { DictionariesStatus } from "./loadDictionaries.js";
2
- import * as _intlayer_types8 from "@intlayer/types";
2
+ import * as _intlayer_types1 from "@intlayer/types";
3
3
  import { Dictionary } from "@intlayer/types";
4
4
  import { DictionaryAPI } from "@intlayer/backend";
5
5
 
6
6
  //#region src/loadDictionaries/loadRemoteDictionaries.d.ts
7
7
  declare const formatDistantDictionaries: (dictionaries: (DictionaryAPI | Dictionary)[]) => Dictionary[];
8
- declare const loadRemoteDictionaries: (configuration?: _intlayer_types8.IntlayerConfig, onStatusUpdate?: (status: DictionariesStatus[]) => void, options?: {
8
+ declare const loadRemoteDictionaries: (configuration?: _intlayer_types1.IntlayerConfig, onStatusUpdate?: (status: DictionariesStatus[]) => void, options?: {
9
9
  onStartRemoteCheck?: () => void;
10
10
  onStopRemoteCheck?: () => void;
11
11
  onError?: (error: Error) => void;
@@ -1,3 +1,3 @@
1
1
  import { extractDictionaryKey } from "./extractDictionaryKey.js";
2
- import { ExtractIntlayerOptions, PackageName, extractIntlayer, transformFiles } from "./transformFiles.js";
3
- export { ExtractIntlayerOptions, PackageName, extractDictionaryKey, extractIntlayer, transformFiles };
2
+ import { ATTRIBUTES_TO_EXTRACT, ExtractIntlayerOptions, PackageName, extractIntlayer, generateKey, shouldExtract, transformFiles } from "./transformFiles.js";
3
+ export { ATTRIBUTES_TO_EXTRACT, ExtractIntlayerOptions, PackageName, extractDictionaryKey, extractIntlayer, generateKey, shouldExtract, transformFiles };
@@ -2,6 +2,19 @@ import { GetConfigurationOptions } from "@intlayer/config";
2
2
  import { Project } from "ts-morph";
3
3
 
4
4
  //#region src/transformFiles/transformFiles.d.ts
5
+
6
+ /**
7
+ * Attributes that should be extracted for localization
8
+ */
9
+ declare const ATTRIBUTES_TO_EXTRACT: string[];
10
+ /**
11
+ * Default function to determine if a string should be extracted for localization
12
+ */
13
+ declare const shouldExtract: (text: string) => boolean;
14
+ /**
15
+ * Generate a unique key from text for use as a dictionary key
16
+ */
17
+ declare const generateKey: (text: string, existingKeys: Set<string>) => string;
5
18
  type PackageName = 'next-intlayer' | 'react-intlayer' | 'vue-intlayer' | 'svelte-intlayer' | 'preact-intlayer' | 'solid-intlayer' | 'angular-intlayer' | 'express-intlayer';
6
19
  type ExtractIntlayerOptions = {
7
20
  configOptions?: GetConfigurationOptions;
@@ -12,5 +25,5 @@ type ExtractIntlayerOptions = {
12
25
  declare const extractIntlayer: (filePath: string, packageName: PackageName, options?: ExtractIntlayerOptions, project?: Project) => Promise<void>;
13
26
  declare const transformFiles: (filePaths: string[], packageName: PackageName, options?: ExtractIntlayerOptions) => Promise<void>;
14
27
  //#endregion
15
- export { ExtractIntlayerOptions, PackageName, extractIntlayer, transformFiles };
28
+ export { ATTRIBUTES_TO_EXTRACT, ExtractIntlayerOptions, PackageName, extractIntlayer, generateKey, shouldExtract, transformFiles };
16
29
  //# sourceMappingURL=transformFiles.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"transformFiles.d.ts","names":[],"sources":["../../../src/transformFiles/transformFiles.ts"],"sourcesContent":[],"mappings":";;;;KAkRY,WAAA;KAUA,sBAAA;EAVA,aAAA,CAAW,EAWL,uBAXK;EAUX,SAAA,CAAA,EAAA,MAAA;EAOC,QAAA,CAAA,EAAA,OAkIZ;EAhIc,eAAA,CAAA,EAAA,OAAA;CACH;AACA,cAJC,eAID,EAAA,CAAA,QAAA,EAAA,MAAA,EAAA,WAAA,EAFG,WAEH,EAAA,OAAA,CAAA,EADA,sBACA,EAAA,OAAA,CAAA,EAAA,OAAA,EAAA,GAAO,OAAP,CAAA,IAAA,CAAA;AAAO,cAgIN,cAhIM,EAAA,CAAA,SAAA,EAAA,MAAA,EAAA,EAAA,WAAA,EAkIJ,WAlII,EAAA,OAAA,CAAA,EAmIP,sBAnIO,EAAA,GAmIe,OAnIf,CAAA,IAAA,CAAA"}
1
+ {"version":3,"file":"transformFiles.d.ts","names":[],"sources":["../../../src/transformFiles/transformFiles.ts"],"sourcesContent":[],"mappings":";;;;;;;AAwBA;AAWa,cAXA,qBAoBZ,EAAA,MAAA,EAAA;AAKD;AA6OA;AAUA;AAOa,cA5QA,aA8YZ,EAAA,CAAA,IAAA,EAAA,MAAA,EAAA,GAAA,OAAA;;;;AA9HkB,cAlQN,WAkQM,EAAA,CAAA,IAAA,EAAA,MAAA,EAAA,YAAA,EAhQH,GAgQG,CAAA,MAAA,CAAA,EAAA,GAAA,MAAA;AAAA,KArBP,WAAA,GAqBO,eAAA,GAAA,gBAAA,GAAA,cAAA,GAAA,iBAAA,GAAA,iBAAA,GAAA,gBAAA,GAAA,kBAAA,GAAA,kBAAA;AAgIN,KA3ID,sBAAA,GA8JX;EAjBc,aAAA,CAAA,EA5IG,uBA4IH;EACH,SAAA,CAAA,EAAA,MAAA;EAAsB,QAAA,CAAA,EAAA,OAAA;EAAA,eAAA,CAAA,EAAA,OAAA;;cAvIrB,iDAEE,uBACH,kCACA,YAAO;cAgIN,mDAEE,uBACH,2BAAsB"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@intlayer/chokidar",
3
- "version": "7.3.0",
3
+ "version": "7.3.2-canary.0",
4
4
  "private": false,
5
5
  "description": "Uses chokidar to scan and build Intlayer declaration files into dictionaries based on Intlayer configuration.",
6
6
  "keywords": [
@@ -57,6 +57,7 @@
57
57
  "./package.json"
58
58
  ],
59
59
  "scripts": {
60
+ "_prepublish": "cp -f ../../../README.md ./README.md",
60
61
  "build": "tsdown --config tsdown.config.ts",
61
62
  "build:ci": "tsdown --config tsdown.config.ts",
62
63
  "clean": "rimraf ./dist .turbo",
@@ -65,7 +66,7 @@
65
66
  "format:fix": "biome format --write .",
66
67
  "lint": "biome lint .",
67
68
  "lint:fix": "biome lint --write .",
68
- "prepublish": "cp -f ../../../README.md ./README.md",
69
+ "prepublish": "echo prepublish temporally disabled to avoid rewrite readme",
69
70
  "publish": "bun publish || true",
70
71
  "publish:canary": "bun publish --access public --tag canary || true",
71
72
  "publish:latest": "bun publish --access public --tag latest || true",
@@ -74,13 +75,13 @@
74
75
  "typecheck": "tsc --noEmit --project tsconfig.types.json"
75
76
  },
76
77
  "dependencies": {
77
- "@intlayer/api": "7.3.0",
78
- "@intlayer/config": "7.3.0",
79
- "@intlayer/core": "7.3.0",
80
- "@intlayer/dictionaries-entry": "7.3.0",
81
- "@intlayer/remote-dictionaries-entry": "7.3.0",
82
- "@intlayer/types": "7.3.0",
83
- "@intlayer/unmerged-dictionaries-entry": "7.3.0",
78
+ "@intlayer/api": "7.3.2-canary.0",
79
+ "@intlayer/config": "7.3.2-canary.0",
80
+ "@intlayer/core": "7.3.2-canary.0",
81
+ "@intlayer/dictionaries-entry": "7.3.2-canary.0",
82
+ "@intlayer/remote-dictionaries-entry": "7.3.2-canary.0",
83
+ "@intlayer/types": "7.3.2-canary.0",
84
+ "@intlayer/unmerged-dictionaries-entry": "7.3.2-canary.0",
84
85
  "chokidar": "3.6.0",
85
86
  "crypto-js": "4.2.0",
86
87
  "deepmerge": "4.3.1",
@@ -100,8 +101,8 @@
100
101
  "vitest": "4.0.13"
101
102
  },
102
103
  "peerDependencies": {
103
- "@intlayer/svelte-transformer": "7.3.0",
104
- "@intlayer/vue-transformer": "7.3.0"
104
+ "@intlayer/svelte-transformer": "7.3.2-canary.0",
105
+ "@intlayer/vue-transformer": "7.3.2-canary.0"
105
106
  },
106
107
  "peerDependenciesMeta": {
107
108
  "@intlayer/svelte-transformer": {