@intlayer/chokidar 2.0.0 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/chokidar/watcher.cjs +11 -5
- package/dist/cjs/chokidar/watcher.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.cjs +4 -2
- package/dist/cjs/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.cjs +3 -3
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.d.ts +2 -2
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.d.ts +2 -2
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.cjs +8 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.d.ts +3 -3
- package/dist/cjs/transpiler/dictionary_to_type/createType.cjs +42 -76
- package/dist/cjs/transpiler/dictionary_to_type/createType.cjs.map +1 -1
- package/dist/cjs/transpiler/dictionary_to_type/createType.d.ts +3 -38
- package/dist/cjs/transpiler/dictionary_to_type/index.d.ts +1 -2
- package/dist/esm/chokidar/watcher.mjs +11 -5
- package/dist/esm/chokidar/watcher.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.mjs +4 -2
- package/dist/esm/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.mjs +3 -3
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.d.mts +2 -2
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.d.mts +2 -2
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.d.mts +3 -3
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.mjs +8 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.mjs.map +1 -1
- package/dist/esm/transpiler/dictionary_to_type/createType.d.mts +3 -38
- package/dist/esm/transpiler/dictionary_to_type/createType.mjs +44 -75
- package/dist/esm/transpiler/dictionary_to_type/createType.mjs.map +1 -1
- package/dist/esm/transpiler/dictionary_to_type/index.d.mts +1 -2
- package/package.json +12 -13
- package/src/chokidar/index.ts +0 -1
- package/src/chokidar/watcher.ts +0 -75
- package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts +0 -95
- package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.ts +0 -91
- package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertPluralsValues.ts +0 -22
- package/src/transpiler/declaration_file_to_dictionary/i18n_dictionary/index.ts +0 -1
- package/src/transpiler/declaration_file_to_dictionary/index.ts +0 -21
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts +0 -70
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts +0 -60
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/index.ts +0 -3
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts +0 -114
- package/src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts +0 -65
- package/src/transpiler/dictionary_to_main/createDictionaryList.ts +0 -65
- package/src/transpiler/dictionary_to_main/index.ts +0 -1
- package/src/transpiler/dictionary_to_type/createModuleAugmentation.ts +0 -102
- package/src/transpiler/dictionary_to_type/createType.ts +0 -162
- package/src/transpiler/dictionary_to_type/index.ts +0 -2
- package/src/utils.ts +0 -26
|
@@ -52,12 +52,15 @@ const watch = (options) => {
|
|
|
52
52
|
...options
|
|
53
53
|
}).on("ready", async () => {
|
|
54
54
|
const dictionariesPaths = await (0, import_declaration_file_to_dictionary.buildDictionary)(files);
|
|
55
|
-
console.info("Building
|
|
56
|
-
(0, import_dictionary_to_type.createTypes)(dictionariesPaths);
|
|
55
|
+
console.info("Building TypeScript types...");
|
|
56
|
+
await (0, import_dictionary_to_type.createTypes)(dictionariesPaths);
|
|
57
|
+
console.info("TypeScript types built");
|
|
57
58
|
console.info("Building Intlayer module augmentation...");
|
|
58
59
|
(0, import_dictionary_to_type.createModuleAugmentation)();
|
|
60
|
+
console.info("Intlayer module augmentation built");
|
|
59
61
|
console.info("Building Intlayer dictionary list...");
|
|
60
62
|
(0, import_createDictionaryList.createDictionaryList)();
|
|
63
|
+
console.info("Intlayer dictionary list built");
|
|
61
64
|
const relativeDictionariesPath = dictionariesPaths.map(
|
|
62
65
|
(dictionary) => (0, import_path.relative)(baseDir, dictionary)
|
|
63
66
|
);
|
|
@@ -68,16 +71,19 @@ const watch = (options) => {
|
|
|
68
71
|
console.info("Additional file detected: ", (0, import_path.relative)(baseDir, filePath));
|
|
69
72
|
const dictionaries = await (0, import_declaration_file_to_dictionary.buildDictionary)(filePath);
|
|
70
73
|
console.info("Building TypeScript types...");
|
|
71
|
-
(0, import_dictionary_to_type.createTypes)(dictionaries);
|
|
72
|
-
console.info("
|
|
74
|
+
await (0, import_dictionary_to_type.createTypes)(dictionaries);
|
|
75
|
+
console.info("TypeScript types built");
|
|
76
|
+
console.info("Building Intlayer module augmentation...");
|
|
73
77
|
(0, import_dictionary_to_type.createModuleAugmentation)();
|
|
78
|
+
console.info("Intlayer module augmentation built");
|
|
74
79
|
console.info("Building main...");
|
|
75
80
|
(0, import_createDictionaryList.createDictionaryList)();
|
|
76
81
|
}).on("change", async (filePath) => {
|
|
77
82
|
console.info("Change detected: ", (0, import_path.relative)(baseDir, filePath));
|
|
78
83
|
const dictionaries = await (0, import_declaration_file_to_dictionary.buildDictionary)(filePath);
|
|
79
84
|
console.info("Building TypeScript types...");
|
|
80
|
-
(0, import_dictionary_to_type.createTypes)(dictionaries);
|
|
85
|
+
await (0, import_dictionary_to_type.createTypes)(dictionaries);
|
|
86
|
+
console.info("TypeScript types built");
|
|
81
87
|
}).on("error", (error) => {
|
|
82
88
|
console.error("Watcher error:", error);
|
|
83
89
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/chokidar/watcher.ts"],"sourcesContent":["import { relative } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport chokidar, { type WatchOptions } from 'chokidar';\nimport { sync } from 'glob';\nimport { buildDictionary } from '../transpiler/declaration_file_to_dictionary/index';\nimport { createDictionaryList } from '../transpiler/dictionary_to_main/createDictionaryList';\nimport {\n createTypes,\n createModuleAugmentation,\n} from '../transpiler/dictionary_to_type/index';\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const { content } = getConfiguration({\n verbose: true,\n });\n\n const { watchedFilesPatternWithPath, baseDir } = content;\n\n const files: string[] = sync(watchedFilesPatternWithPath);\n\n return chokidar\n .watch(watchedFilesPatternWithPath, {\n persistent: true, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n ...options,\n })\n .on('ready', async () => {\n const dictionariesPaths = await buildDictionary(files);\n\n console.info('Building
|
|
1
|
+
{"version":3,"sources":["../../../src/chokidar/watcher.ts"],"sourcesContent":["import { relative } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport chokidar, { type WatchOptions } from 'chokidar';\nimport { sync } from 'glob';\nimport { buildDictionary } from '../transpiler/declaration_file_to_dictionary/index';\nimport { createDictionaryList } from '../transpiler/dictionary_to_main/createDictionaryList';\nimport {\n createTypes,\n createModuleAugmentation,\n} from '../transpiler/dictionary_to_type/index';\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const { content } = getConfiguration({\n verbose: true,\n });\n\n const { watchedFilesPatternWithPath, baseDir } = content;\n\n const files: string[] = sync(watchedFilesPatternWithPath);\n\n return chokidar\n .watch(watchedFilesPatternWithPath, {\n persistent: true, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n ...options,\n })\n .on('ready', async () => {\n const dictionariesPaths = await buildDictionary(files);\n\n console.info('Building TypeScript types...');\n await createTypes(dictionariesPaths);\n console.info('TypeScript types built');\n\n console.info('Building Intlayer module augmentation...');\n createModuleAugmentation();\n console.info('Intlayer module augmentation built');\n\n console.info('Building Intlayer dictionary list...');\n createDictionaryList();\n console.info('Intlayer dictionary list built');\n\n const relativeDictionariesPath = dictionariesPaths.map((dictionary) =>\n relative(baseDir, dictionary)\n );\n\n console.info('Dictionaries:', relativeDictionariesPath);\n })\n .on('unlink', (filePath) => {\n // Process the file with the functionToRun\n console.info('Removed file detected: ', relative(baseDir, filePath));\n })\n .on('add', async (filePath) => {\n // Process the file with the functionToRun\n console.info('Additional file detected: ', relative(baseDir, filePath));\n const dictionaries = await buildDictionary(filePath);\n\n console.info('Building TypeScript types...');\n await createTypes(dictionaries);\n console.info('TypeScript types built');\n\n console.info('Building Intlayer module augmentation...');\n createModuleAugmentation();\n console.info('Intlayer module augmentation built');\n\n console.info('Building main...');\n createDictionaryList();\n })\n .on('change', async (filePath) => {\n // Process the file with the functionToRun\n console.info('Change detected: ', relative(baseDir, filePath));\n const dictionaries = await buildDictionary(filePath);\n\n console.info('Building TypeScript types...');\n await createTypes(dictionaries);\n console.info('TypeScript types built');\n })\n .on('error', (error) => {\n console.error('Watcher error:', error);\n });\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAyB;AACzB,oBAAiC;AACjC,sBAA4C;AAC5C,kBAAqB;AACrB,4CAAgC;AAChC,kCAAqC;AACrC,gCAGO;AAGA,MAAM,QAAQ,CAAC,YAA2B;AAC/C,QAAM,EAAE,QAAQ,QAAI,gCAAiB;AAAA,IACnC,SAAS;AAAA,EACX,CAAC;AAED,QAAM,EAAE,6BAA6B,QAAQ,IAAI;AAEjD,QAAM,YAAkB,kBAAK,2BAA2B;AAExD,SAAO,gBAAAA,QACJ,MAAM,6BAA6B;AAAA,IAClC,YAAY;AAAA;AAAA,IACZ,eAAe;AAAA;AAAA,IACf,GAAG;AAAA,EACL,CAAC,EACA,GAAG,SAAS,YAAY;AACvB,UAAM,oBAAoB,UAAM,uDAAgB,KAAK;AAErD,YAAQ,KAAK,8BAA8B;AAC3C,cAAM,uCAAY,iBAAiB;AACnC,YAAQ,KAAK,wBAAwB;AAErC,YAAQ,KAAK,0CAA0C;AACvD,4DAAyB;AACzB,YAAQ,KAAK,oCAAoC;AAEjD,YAAQ,KAAK,sCAAsC;AACnD,0DAAqB;AACrB,YAAQ,KAAK,gCAAgC;AAE7C,UAAM,2BAA2B,kBAAkB;AAAA,MAAI,CAAC,mBACtD,sBAAS,SAAS,UAAU;AAAA,IAC9B;AAEA,YAAQ,KAAK,iBAAiB,wBAAwB;AAAA,EACxD,CAAC,EACA,GAAG,UAAU,CAAC,aAAa;AAE1B,YAAQ,KAAK,+BAA2B,sBAAS,SAAS,QAAQ,CAAC;AAAA,EACrE,CAAC,EACA,GAAG,OAAO,OAAO,aAAa;AAE7B,YAAQ,KAAK,kCAA8B,sBAAS,SAAS,QAAQ,CAAC;AACtE,UAAM,eAAe,UAAM,uDAAgB,QAAQ;AAEnD,YAAQ,KAAK,8BAA8B;AAC3C,cAAM,uCAAY,YAAY;AAC9B,YAAQ,KAAK,wBAAwB;AAErC,YAAQ,KAAK,0CAA0C;AACvD,4DAAyB;AACzB,YAAQ,KAAK,oCAAoC;AAEjD,YAAQ,KAAK,kBAAkB;AAC/B,0DAAqB;AAAA,EACvB,CAAC,EACA,GAAG,UAAU,OAAO,aAAa;AAEhC,YAAQ,KAAK,yBAAqB,sBAAS,SAAS,QAAQ,CAAC;AAC7D,UAAM,eAAe,UAAM,uDAAgB,QAAQ;AAEnD,YAAQ,KAAK,8BAA8B;AAC3C,cAAM,uCAAY,YAAY;AAC9B,YAAQ,KAAK,wBAAwB;AAAA,EACvC,CAAC,EACA,GAAG,SAAS,CAAC,UAAU;AACtB,YAAQ,MAAM,kBAAkB,KAAK;AAAA,EACvC,CAAC;AACL;","names":["chokidar"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/buildI18nDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport {\n processContentDeclaration,\n extractObjectsWithId,\n} from '../intlayer_dictionary/index';\nimport {\n type I18nDictionariesOutput,\n createI18nDictionaries,\n} from './convertContentDeclarationInto18nDictionaries';\n\nconst { content } = getConfiguration();\nconst { i18nDictionariesDir } = content;\n\ntype DictionariesDeclaration = Record<string, I18nDictionariesOutput>;\n\n/**\n * This function writes the dictionaries to the file system\n */\nconst writeDictionary = async (\n dictionariesDeclaration: DictionariesDeclaration\n) => {\n const resultDictionariesPaths: string[] = [];\n\n for (const [nameSpace, localContent] of Object.entries(\n dictionariesDeclaration\n )) {\n for await (const [locale, content] of Object.entries(localContent)) {\n const contentString = JSON.stringify(content);\n\n const outputFileName = `${nameSpace}.json`;\n const resultDirPath = resolve(i18nDictionariesDir, locale);\n const resultFilePath = resolve(resultDirPath, outputFileName);\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resultDirPath, { recursive: true });\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n nestedContent.reduce((acc, content) => {\n const id: string = content.id;\n const i18Content = createI18nDictionaries(content);\n\n return {\n ...acc,\n [id]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration\n );\n\n // Add the paths to the result\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAiC;AACjC,kBAAwB;AACxB,oBAAiC;AAEjC,iCAGO;AACP,0DAGO;AAEP,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,oBAAoB,IAAI;AAOhC,MAAM,kBAAkB,OACtB,4BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,aAAW,CAAC,WAAW,YAAY,KAAK,OAAO;AAAA,IAC7C;AAAA,EACF,GAAG;AACD,qBAAiB,CAAC,QAAQA,QAAO,KAAK,OAAO,QAAQ,YAAY,GAAG;AAClE,YAAM,gBAAgB,KAAK,UAAUA,QAAO;AAE5C,YAAM,iBAAiB,GAAG,SAAS;AACnC,YAAM,oBAAgB,qBAAQ,qBAAqB,MAAM;AACzD,YAAM,qBAAiB,qBAAQ,eAAe,cAAc;AAG5D,gBAAM,uBAAM,eAAe,EAAE,WAAW,KAAK,CAAC;AAG9C,gBAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,gBAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,MACxD,CAAC;AAED,8BAAwB,KAAK,cAAc;AAAA,IAC7C;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,sBAAsB,OACjC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAEA,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,UAAM,sDAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,oBAAsC,iDAAqB,MAAM;AAGvE,UAAM,0BACJ,cAAc,OAAO,CAAC,KAAKA,aAAY;AACrC,YAAM,KAAaA,SAAQ;AAC3B,YAAM,iBAAa,4EAAuBA,QAAO;AAEjD,aAAO;AAAA,QACL,GAAG;AAAA,QACH,CAAC,EAAE,GAAG;AAAA,MACR;AAAA,IACF,GAAG,CAAC,CAAC;AAGP,UAAM,oBAA8B,MAAM;AAAA,MACxC;AAAA,IACF;AAGA,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
|
|
@@ -33,14 +33,16 @@ const buildDictionary = (content, locale) => {
|
|
|
33
33
|
// Translation node
|
|
34
34
|
content && content.nodeType === import_core.NodeType.Translation
|
|
35
35
|
) {
|
|
36
|
-
const result = content[locale];
|
|
36
|
+
const result = content[import_core.NodeType.Translation][locale];
|
|
37
37
|
return buildDictionary(result, locale);
|
|
38
38
|
} else if (
|
|
39
39
|
// Translation node
|
|
40
40
|
content && content.nodeType === import_core.NodeType.Enumeration
|
|
41
41
|
) {
|
|
42
42
|
const plurals = {};
|
|
43
|
-
Object.keys(
|
|
43
|
+
Object.keys(
|
|
44
|
+
content[import_core.NodeType.Enumeration]
|
|
45
|
+
).forEach((quantity) => {
|
|
44
46
|
const letterNumber = (0, import_convertPluralsValues.convertPluralsValues)(quantity);
|
|
45
47
|
const value = content[quantity];
|
|
46
48
|
plurals[`${letterNumber}_${letterNumber}`] = buildDictionary(
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.ts"],"sourcesContent":["import { getConfiguration, type Locales } from '@intlayer/config';\nimport {\n NodeType,\n type TranslationContent,\n type Content,\n type TypedNode,\n type EnumerationContent,\n} from '@intlayer/core';\nimport { convertPluralsValues } from './convertPluralsValues';\n\ntype Dictionary = Record<string, unknown>;\nexport type I18nDictionariesOutput = Partial<Record<Locales, Dictionary>>;\n\nconst {\n internationalization: { locales },\n} = getConfiguration();\n\nconst isReactNode = (node: Record<string, unknown>): boolean =>\n typeof node?.key !== 'undefined' &&\n typeof node?.props !== 'undefined' &&\n typeof node?.type !== 'undefined';\n\n// Build dictionary for a specific locale\nconst buildDictionary = (content: Dictionary, locale: Locales): unknown => {\n if (\n // Translation node\n content &&\n (content as TypedNode).nodeType === NodeType.Translation\n ) {\n const result = (content as TranslationContent<unknown>)[locale];\n\n return buildDictionary(result as Dictionary, locale);\n } else if (\n // Translation node\n content &&\n (content as TypedNode).nodeType === NodeType.Enumeration\n ) {\n const plurals: Record<string, unknown> = {};\n\n Object.keys(content).forEach((quantity) => {\n const letterNumber = convertPluralsValues(quantity);\n\n const value = (content as EnumerationContent<unknown>)[\n quantity as keyof EnumerationContent<unknown>\n ];\n\n plurals[`${letterNumber}_${letterNumber}`] = buildDictionary(\n value as Dictionary,\n locale\n );\n });\n\n return plurals;\n } else if (\n // React element node\n isReactNode(content as Record<string, unknown>)\n ) {\n return JSON.stringify(content);\n } else if (\n // Nested object\n typeof content === 'object'\n ) {\n const result: Record<string, unknown> = {};\n\n Object.keys(content).forEach((dictionaryValue) => {\n result[dictionaryValue] = buildDictionary(\n content[dictionaryValue] as Dictionary,\n locale\n );\n });\n\n return result;\n }\n\n return content;\n};\n\nexport const createI18nDictionaries = (\n content: Content\n): I18nDictionariesOutput => {\n // Map dictionaries for each locale\n const result: I18nDictionariesOutput = locales.reduce(\n (acc, locale) => ({\n ...acc,\n [locale]: buildDictionary(content, locale),\n }),\n {}\n );\n\n return result;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA+C;AAC/C,kBAMO;AACP,kCAAqC;AAKrC,MAAM;AAAA,EACJ,sBAAsB,EAAE,QAAQ;AAClC,QAAI,gCAAiB;AAErB,MAAM,cAAc,CAAC,SACnB,OAAO,MAAM,QAAQ,eACrB,OAAO,MAAM,UAAU,eACvB,OAAO,MAAM,SAAS;AAGxB,MAAM,kBAAkB,CAAC,SAAqB,WAA6B;AACzE;AAAA;AAAA,IAEE,WACC,QAAsB,aAAa,qBAAS;AAAA,IAC7C;AACA,UAAM,SAAU,
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18n_dictionary/convertContentDeclarationInto18nDictionaries.ts"],"sourcesContent":["import { getConfiguration, type Locales } from '@intlayer/config';\nimport {\n NodeType,\n type TranslationContent,\n type Content,\n type TypedNode,\n type EnumerationContent,\n} from '@intlayer/core';\nimport { convertPluralsValues } from './convertPluralsValues';\n\ntype Dictionary = Record<string, unknown>;\nexport type I18nDictionariesOutput = Partial<Record<Locales, Dictionary>>;\n\nconst {\n internationalization: { locales },\n} = getConfiguration();\n\nconst isReactNode = (node: Record<string, unknown>): boolean =>\n typeof node?.key !== 'undefined' &&\n typeof node?.props !== 'undefined' &&\n typeof node?.type !== 'undefined';\n\n// Build dictionary for a specific locale\nconst buildDictionary = (content: Dictionary, locale: Locales): unknown => {\n if (\n // Translation node\n content &&\n (content as TypedNode).nodeType === NodeType.Translation\n ) {\n const result = (content as TranslationContent<unknown>)[\n NodeType.Translation\n ][locale];\n\n return buildDictionary(result as Dictionary, locale);\n } else if (\n // Translation node\n content &&\n (content as TypedNode).nodeType === NodeType.Enumeration\n ) {\n const plurals: Record<string, unknown> = {};\n\n Object.keys(\n (content as EnumerationContent<unknown>)[NodeType.Enumeration]\n ).forEach((quantity) => {\n const letterNumber = convertPluralsValues(quantity);\n\n const value = (content as EnumerationContent<unknown>)[\n quantity as keyof EnumerationContent<unknown>\n ];\n\n plurals[`${letterNumber}_${letterNumber}`] = buildDictionary(\n value as Dictionary,\n locale\n );\n });\n\n return plurals;\n } else if (\n // React element node\n isReactNode(content as Record<string, unknown>)\n ) {\n return JSON.stringify(content);\n } else if (\n // Nested object\n typeof content === 'object'\n ) {\n const result: Record<string, unknown> = {};\n\n Object.keys(content).forEach((dictionaryValue) => {\n result[dictionaryValue] = buildDictionary(\n content[dictionaryValue] as Dictionary,\n locale\n );\n });\n\n return result;\n }\n\n return content;\n};\n\nexport const createI18nDictionaries = (\n content: Content\n): I18nDictionariesOutput => {\n // Map dictionaries for each locale\n const result: I18nDictionariesOutput = locales.reduce(\n (acc, locale) => ({\n ...acc,\n [locale]: buildDictionary(content, locale),\n }),\n {}\n );\n\n return result;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA+C;AAC/C,kBAMO;AACP,kCAAqC;AAKrC,MAAM;AAAA,EACJ,sBAAsB,EAAE,QAAQ;AAClC,QAAI,gCAAiB;AAErB,MAAM,cAAc,CAAC,SACnB,OAAO,MAAM,QAAQ,eACrB,OAAO,MAAM,UAAU,eACvB,OAAO,MAAM,SAAS;AAGxB,MAAM,kBAAkB,CAAC,SAAqB,WAA6B;AACzE;AAAA;AAAA,IAEE,WACC,QAAsB,aAAa,qBAAS;AAAA,IAC7C;AACA,UAAM,SAAU,QACd,qBAAS,WACX,EAAE,MAAM;AAER,WAAO,gBAAgB,QAAsB,MAAM;AAAA,EACrD;AAAA;AAAA,IAEE,WACC,QAAsB,aAAa,qBAAS;AAAA,IAC7C;AACA,UAAM,UAAmC,CAAC;AAE1C,WAAO;AAAA,MACJ,QAAwC,qBAAS,WAAW;AAAA,IAC/D,EAAE,QAAQ,CAAC,aAAa;AACtB,YAAM,mBAAe,kDAAqB,QAAQ;AAElD,YAAM,QAAS,QACb,QACF;AAEA,cAAQ,GAAG,YAAY,IAAI,YAAY,EAAE,IAAI;AAAA,QAC3C;AAAA,QACA;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAAA;AAAA,IAEE,YAAY,OAAkC;AAAA,IAC9C;AACA,WAAO,KAAK,UAAU,OAAO;AAAA,EAC/B;AAAA;AAAA,IAEE,OAAO,YAAY;AAAA,IACnB;AACA,UAAM,SAAkC,CAAC;AAEzC,WAAO,KAAK,OAAO,EAAE,QAAQ,CAAC,oBAAoB;AAChD,aAAO,eAAe,IAAI;AAAA,QACxB,QAAQ,eAAe;AAAA,QACvB;AAAA,MACF;AAAA,IACF,CAAC;AAED,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAEO,MAAM,yBAAyB,CACpC,YAC2B;AAE3B,QAAM,SAAiC,QAAQ;AAAA,IAC7C,CAAC,KAAK,YAAY;AAAA,MAChB,GAAG;AAAA,MACH,CAAC,MAAM,GAAG,gBAAgB,SAAS,MAAM;AAAA,IAC3C;AAAA,IACA,CAAC;AAAA,EACH;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -30,9 +30,9 @@ const { content } = (0, import_config.getConfiguration)();
|
|
|
30
30
|
const { dictionariesDir } = content;
|
|
31
31
|
const writeDictionary = async (dictionaries) => {
|
|
32
32
|
const resultDictionariesPaths = [];
|
|
33
|
-
for await (const
|
|
34
|
-
const contentString = JSON.stringify(
|
|
35
|
-
const id =
|
|
33
|
+
for await (const dictionaryContent of dictionaries) {
|
|
34
|
+
const contentString = JSON.stringify(dictionaryContent);
|
|
35
|
+
const id = dictionaryContent.id;
|
|
36
36
|
const outputFileName = `${id}.json`;
|
|
37
37
|
const resultFilePath = (0, import_path.resolve)(dictionariesDir, outputFileName);
|
|
38
38
|
await (0, import_promises.writeFile)(resultFilePath, contentString, "utf8").catch((err) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/buildIntlayerDictionary.ts"],"sourcesContent":["import { mkdir, writeFile } from 'fs/promises';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { extractObjectsWithId } from './extractNestedJSON';\nimport { processContentDeclaration } from './processContentDeclaration';\n\nconst { content } = getConfiguration();\nconst { dictionariesDir } = content;\n\nconst writeDictionary = async (dictionaries: DeclarationContent[]) => {\n const resultDictionariesPaths: string[] = [];\n\n for await (const dictionaryContent of dictionaries) {\n const contentString = JSON.stringify(dictionaryContent);\n\n const id = dictionaryContent.id;\n const outputFileName = `${id}.json`;\n const resultFilePath = resolve(dictionariesDir, outputFileName);\n\n // Create the json file\n await writeFile(resultFilePath, contentString, 'utf8').catch((err) => {\n console.error(`Error creating ${outputFileName}:`, err);\n });\n\n resultDictionariesPaths.push(resultFilePath);\n }\n\n return resultDictionariesPaths;\n};\n\n/**\n * This function transpile the bundled code to to make dictionaries as JSON files\n */\nexport const buildIntlayerDictionary = async (\n contentDeclarationsPaths: string[] | string\n) => {\n const resultDictionariesPaths: string[] = [];\n\n if (typeof contentDeclarationsPaths === 'string') {\n contentDeclarationsPaths = [contentDeclarationsPaths];\n }\n\n // Create the dictionaries folder if it doesn't exist\n await mkdir(resolve(dictionariesDir), { recursive: true });\n\n for await (const contentDeclarationPath of contentDeclarationsPaths) {\n const result = await processContentDeclaration(contentDeclarationPath);\n\n if (!result) {\n continue;\n }\n\n const nestedContent: DeclarationContent[] = extractObjectsWithId(result);\n\n const contentWithFilePath: DeclarationContent[] = nestedContent.map(\n (content) => ({\n ...content,\n filePath: contentDeclarationPath,\n })\n );\n\n const dictionariesPaths: string[] =\n await writeDictionary(contentWithFilePath);\n\n resultDictionariesPaths.push(...dictionariesPaths);\n }\n\n return resultDictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAiC;AACjC,kBAAwB;AACxB,oBAAiC;AAEjC,+BAAqC;AACrC,uCAA0C;AAE1C,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,gBAAgB,IAAI;AAE5B,MAAM,kBAAkB,OAAO,iBAAuC;AACpE,QAAM,0BAAoC,CAAC;AAE3C,mBAAiB,qBAAqB,cAAc;AAClD,UAAM,gBAAgB,KAAK,UAAU,iBAAiB;AAEtD,UAAM,KAAK,kBAAkB;AAC7B,UAAM,iBAAiB,GAAG,EAAE;AAC5B,UAAM,qBAAiB,qBAAQ,iBAAiB,cAAc;AAG9D,cAAM,2BAAU,gBAAgB,eAAe,MAAM,EAAE,MAAM,CAAC,QAAQ;AACpE,cAAQ,MAAM,kBAAkB,cAAc,KAAK,GAAG;AAAA,IACxD,CAAC;AAED,4BAAwB,KAAK,cAAc;AAAA,EAC7C;AAEA,SAAO;AACT;AAKO,MAAM,0BAA0B,OACrC,6BACG;AACH,QAAM,0BAAoC,CAAC;AAE3C,MAAI,OAAO,6BAA6B,UAAU;AAChD,+BAA2B,CAAC,wBAAwB;AAAA,EACtD;AAGA,YAAM,2BAAM,qBAAQ,eAAe,GAAG,EAAE,WAAW,KAAK,CAAC;AAEzD,mBAAiB,0BAA0B,0BAA0B;AACnE,UAAM,SAAS,UAAM,4DAA0B,sBAAsB;AAErE,QAAI,CAAC,QAAQ;AACX;AAAA,IACF;AAEA,UAAM,oBAAsC,+CAAqB,MAAM;AAEvE,UAAM,sBAA4C,cAAc;AAAA,MAC9D,CAACA,cAAa;AAAA,QACZ,GAAGA;AAAA,QACH,UAAU;AAAA,MACZ;AAAA,IACF;AAEA,UAAM,oBACJ,MAAM,gBAAgB,mBAAmB;AAE3C,4BAAwB,KAAK,GAAG,iBAAiB;AAAA,EACnD;AAEA,SAAO;AACT;","names":["content"]}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content,
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/extractNestedJSON.ts"],"sourcesContent":["import type { Content, DeclarationContent } from '@intlayer/core';\n\n/**\n *\n * This function extracts all nested objects with an 'id' field from the input object and returns them as an array\n *\n * Example:\n *\n * const input = {\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * },\n * };\n * const result = extractObjectsWithId(input);\n * console.log(result);\n *\n * Output:\n *\n * [{\n * id: '1',\n * name: 'John Doe',\n * address: {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }\n * },\n * {\n * id: '2',\n * street: '123 Main St',\n * city: 'Springfield',\n * state: 'IL'\n * }]\n *\n */\nexport const extractObjectsWithId = (\n input: DeclarationContent\n): DeclarationContent[] => {\n // Function to recursively search and extract nested objects with an 'id'\n const search = (obj: Content, results: DeclarationContent[]): void => {\n if (obj && typeof obj === 'object') {\n if (Object.hasOwn(obj, 'id')) {\n results.push(obj as DeclarationContent);\n }\n for (const key of Object.keys(obj)) {\n if (typeof obj[key] === 'object') {\n search(obj[key] as Content, results);\n }\n }\n }\n };\n\n const results: DeclarationContent[] = [];\n search(input, results);\n return results;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAyCO,MAAM,uBAAuB,CAClC,UACyB;AAEzB,QAAM,SAAS,CAAC,KAAcA,aAAwC;AACpE,QAAI,OAAO,OAAO,QAAQ,UAAU;AAClC,UAAI,OAAO,OAAO,KAAK,IAAI,GAAG;AAC5B,QAAAA,SAAQ,KAAK,GAAyB;AAAA,MACxC;AACA,iBAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,YAAI,OAAO,IAAI,GAAG,MAAM,UAAU;AAChC,iBAAO,IAAI,GAAG,GAAcA,QAAO;AAAA,QACrC;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,UAAgC,CAAC;AACvC,SAAO,OAAO,OAAO;AACrB,SAAO;AACT;","names":["results"]}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
*
|
|
@@ -39,6 +39,6 @@ import { ContentModule } from '@intlayer/core';
|
|
|
39
39
|
* }]
|
|
40
40
|
*
|
|
41
41
|
*/
|
|
42
|
-
declare const extractObjectsWithId: (input:
|
|
42
|
+
declare const extractObjectsWithId: (input: DeclarationContent) => DeclarationContent[];
|
|
43
43
|
|
|
44
44
|
export { extractObjectsWithId };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type {
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/loadContentDeclaration.ts"],"sourcesContent":["/* eslint-disable @typescript-eslint/no-var-requires */\nimport { createRequire } from 'module';\nimport { type Context, runInNewContext } from 'vm';\nimport type { DeclarationContent } from '@intlayer/core';\nimport { type BuildOptions, buildSync, type BuildResult } from 'esbuild';\n\nconst isESModule = typeof import.meta.url === 'string';\n\nconst sandboxContext: Context = {\n exports: {\n default: {},\n },\n module: {\n exports: {},\n },\n console,\n require: isESModule ? createRequire(import.meta.url) : require,\n};\n\nconst transformationOption: BuildOptions = {\n loader: {\n '.js': 'js',\n '.jsx': 'jsx',\n '.mjs': 'js',\n '.ts': 'ts',\n '.tsx': 'tsx',\n '.cjs': 'js',\n '.json': 'json',\n },\n format: 'cjs', // Output format as commonjs\n target: 'es2017',\n packages: 'external',\n write: false,\n bundle: true,\n};\n\nconst filterValidContentDeclaration = (\n contentDeclaration: DeclarationContent\n): DeclarationContent => {\n // @TODO Implement filtering of valid content declaration\n return contentDeclaration;\n};\n\n/**\n * Load the content declaration from the given path\n *\n * Accepts JSON, JS, MJS and TS files as configuration\n */\nexport const loadContentDeclaration = (\n contentDeclarationFilePath: string\n): DeclarationContent | undefined => {\n let contentDeclaration: DeclarationContent | undefined = undefined;\n\n const fileExtension = contentDeclarationFilePath.split('.').pop() ?? '';\n\n try {\n if (fileExtension === 'json') {\n // Assume JSON\n return require(contentDeclarationFilePath);\n }\n\n // Rest is JS, MJS or TS\n\n const moduleResult: BuildResult = buildSync({\n entryPoints: [contentDeclarationFilePath],\n\n ...transformationOption,\n });\n\n const moduleResultString = moduleResult.outputFiles?.[0].text;\n\n if (!moduleResultString) {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n runInNewContext(moduleResultString, sandboxContext);\n\n if (\n sandboxContext.exports.default &&\n Object.keys(sandboxContext.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.exports.default;\n } else if (\n sandboxContext.module.exports.defaults &&\n Object.keys(sandboxContext.module.exports.defaults).length > 0\n ) {\n // CommonJS\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports.default &&\n Object.keys(sandboxContext.module.exports.default).length > 0\n ) {\n // ES Module\n contentDeclaration = sandboxContext.module.exports.default;\n } else if (\n sandboxContext.module.exports &&\n Object.keys(sandboxContext.module.exports).length > 0\n ) {\n // Other\n contentDeclaration = sandboxContext.module.exports;\n }\n\n if (typeof contentDeclaration === 'undefined') {\n console.error('Configuration file could not be loaded.');\n return undefined;\n }\n\n return filterValidContentDeclaration(contentDeclaration);\n } catch (error) {\n console.error('Error:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAA8B;AAC9B,gBAA8C;AAE9C,qBAA+D;AAJ/D;AAMA,MAAM,aAAa,OAAO,YAAY,QAAQ;AAE9C,MAAM,iBAA0B;AAAA,EAC9B,SAAS;AAAA,IACP,SAAS,CAAC;AAAA,EACZ;AAAA,EACA,QAAQ;AAAA,IACN,SAAS,CAAC;AAAA,EACZ;AAAA,EACA;AAAA,EACA,SAAS,iBAAa,6BAAc,YAAY,GAAG,IAAI;AACzD;AAEA,MAAM,uBAAqC;AAAA,EACzC,QAAQ;AAAA,IACN,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,SAAS;AAAA,EACX;AAAA,EACA,QAAQ;AAAA;AAAA,EACR,QAAQ;AAAA,EACR,UAAU;AAAA,EACV,OAAO;AAAA,EACP,QAAQ;AACV;AAEA,MAAM,gCAAgC,CACpC,uBACuB;AAEvB,SAAO;AACT;AAOO,MAAM,yBAAyB,CACpC,+BACmC;AACnC,MAAI,qBAAqD;AAEzD,QAAM,gBAAgB,2BAA2B,MAAM,GAAG,EAAE,IAAI,KAAK;AAErE,MAAI;AACF,QAAI,kBAAkB,QAAQ;AAE5B,aAAO,QAAQ,0BAA0B;AAAA,IAC3C;AAIA,UAAM,mBAA4B,0BAAU;AAAA,MAC1C,aAAa,CAAC,0BAA0B;AAAA,MAExC,GAAG;AAAA,IACL,CAAC;AAED,UAAM,qBAAqB,aAAa,cAAc,CAAC,EAAE;AAEzD,QAAI,CAAC,oBAAoB;AACvB,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,mCAAgB,oBAAoB,cAAc;AAElD,QACE,eAAe,QAAQ,WACvB,OAAO,KAAK,eAAe,QAAQ,OAAO,EAAE,SAAS,GACrD;AAEA,2BAAqB,eAAe,QAAQ;AAAA,IAC9C,WACE,eAAe,OAAO,QAAQ,YAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,QAAQ,EAAE,SAAS,GAC7D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,QAAQ,WAC9B,OAAO,KAAK,eAAe,OAAO,QAAQ,OAAO,EAAE,SAAS,GAC5D;AAEA,2BAAqB,eAAe,OAAO,QAAQ;AAAA,IACrD,WACE,eAAe,OAAO,WACtB,OAAO,KAAK,eAAe,OAAO,OAAO,EAAE,SAAS,GACpD;AAEA,2BAAqB,eAAe,OAAO;AAAA,IAC7C;AAEA,QAAI,OAAO,uBAAuB,aAAa;AAC7C,cAAQ,MAAM,yCAAyC;AACvD,aAAO;AAAA,IACT;AAEA,WAAO,8BAA8B,kBAAkB;AAAA,EACzD,SAAS,OAAO;AACd,YAAQ,MAAM,UAAU,KAAK;AAAA,EAC/B;AACF;","names":[]}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
4
|
* Load the content declaration from the given path
|
|
5
5
|
*
|
|
6
6
|
* Accepts JSON, JS, MJS and TS files as configuration
|
|
7
7
|
*/
|
|
8
|
-
declare const loadContentDeclaration: (contentDeclarationFilePath: string) =>
|
|
8
|
+
declare const loadContentDeclaration: (contentDeclarationFilePath: string) => DeclarationContent | undefined;
|
|
9
9
|
|
|
10
10
|
export { loadContentDeclaration };
|
|
@@ -29,7 +29,14 @@ const processFunctionResults = async (entry) => {
|
|
|
29
29
|
const result = {};
|
|
30
30
|
for (const key of Object.keys(entry)) {
|
|
31
31
|
const field = entry?.[key];
|
|
32
|
-
|
|
32
|
+
const isArray = Array.isArray(field);
|
|
33
|
+
if (typeof field === "object" && isArray) {
|
|
34
|
+
result[key] = await Promise.all(
|
|
35
|
+
field.map(async (el) => {
|
|
36
|
+
return await processFunctionResults(el);
|
|
37
|
+
})
|
|
38
|
+
);
|
|
39
|
+
} else if (typeof field === "object") {
|
|
33
40
|
result[key] = await processFunctionResults(
|
|
34
41
|
field
|
|
35
42
|
);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts"],"sourcesContent":["import { resolve } from 'path';\nimport type {\n Content,\n
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/intlayer_dictionary/processContentDeclaration.ts"],"sourcesContent":["import { resolve } from 'path';\nimport type {\n Content,\n DeclarationContent,\n FlatContent,\n FlatContentValue,\n} from '@intlayer/core';\nimport { loadContentDeclaration } from './loadContentDeclaration';\n\n/**\n * Function to replace function and async function fields with their results in the object\n */\nconst processFunctionResults = async (entry: Content): Promise<FlatContent> => {\n if (entry && typeof entry === 'object') {\n const promises: Promise<void>[] = [];\n const result: FlatContent = {};\n\n for (const key of Object.keys(entry)) {\n const field = entry?.[key];\n const isArray = Array.isArray(field);\n\n if (typeof field === 'object' && isArray) {\n result[key] = (await Promise.all(\n field.map(async (el) => {\n return await processFunctionResults(el as Content);\n })\n )) as unknown as FlatContentValue;\n } else if (typeof field === 'object') {\n result[key] = (await processFunctionResults(\n field as Content\n )) as FlatContentValue;\n } else if (typeof field === 'function') {\n // Wait for the function to resolve if it's an async function\n const promise = (async () => {\n // Execute the function and await the result if it's a Promise\n const value = await field();\n\n result[key] = value as FlatContentValue;\n })();\n promises.push(promise);\n } else {\n result[key] = field as FlatContentValue;\n }\n }\n\n // Wait for all async operations to complete\n await Promise.all(promises);\n\n return result;\n }\n\n return entry;\n};\n\n/**\n * Function to load, process the module and return the Intlayer DeclarationContent from the module file\n */\nexport const processContentDeclaration = async (file: string) => {\n try {\n const functionPath = resolve(file);\n const entry = loadContentDeclaration(functionPath);\n\n if (!entry) {\n console.error('No entry found in module:', functionPath);\n return;\n }\n\n return (await processFunctionResults(entry)) as DeclarationContent;\n } catch (error) {\n console.error('Error processing module:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAOxB,oCAAuC;AAKvC,MAAM,yBAAyB,OAAO,UAAyC;AAC7E,MAAI,SAAS,OAAO,UAAU,UAAU;AACtC,UAAM,WAA4B,CAAC;AACnC,UAAM,SAAsB,CAAC;AAE7B,eAAW,OAAO,OAAO,KAAK,KAAK,GAAG;AACpC,YAAM,QAAQ,QAAQ,GAAG;AACzB,YAAM,UAAU,MAAM,QAAQ,KAAK;AAEnC,UAAI,OAAO,UAAU,YAAY,SAAS;AACxC,eAAO,GAAG,IAAK,MAAM,QAAQ;AAAA,UAC3B,MAAM,IAAI,OAAO,OAAO;AACtB,mBAAO,MAAM,uBAAuB,EAAa;AAAA,UACnD,CAAC;AAAA,QACH;AAAA,MACF,WAAW,OAAO,UAAU,UAAU;AACpC,eAAO,GAAG,IAAK,MAAM;AAAA,UACnB;AAAA,QACF;AAAA,MACF,WAAW,OAAO,UAAU,YAAY;AAEtC,cAAM,WAAW,YAAY;AAE3B,gBAAM,QAAQ,MAAM,MAAM;AAE1B,iBAAO,GAAG,IAAI;AAAA,QAChB,GAAG;AACH,iBAAS,KAAK,OAAO;AAAA,MACvB,OAAO;AACL,eAAO,GAAG,IAAI;AAAA,MAChB;AAAA,IACF;AAGA,UAAM,QAAQ,IAAI,QAAQ;AAE1B,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,MAAM,4BAA4B,OAAO,SAAiB;AAC/D,MAAI;AACF,UAAM,mBAAe,qBAAQ,IAAI;AACjC,UAAM,YAAQ,sDAAuB,YAAY;AAEjD,QAAI,CAAC,OAAO;AACV,cAAQ,MAAM,6BAA6B,YAAY;AACvD;AAAA,IACF;AAEA,WAAQ,MAAM,uBAAuB,KAAK;AAAA,EAC5C,SAAS,OAAO;AACd,YAAQ,MAAM,4BAA4B,KAAK;AAAA,EACjD;AACF;","names":[]}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { DeclarationContent } from '@intlayer/core';
|
|
2
2
|
|
|
3
3
|
/**
|
|
4
|
-
* Function to load, process the module and return the Intlayer
|
|
4
|
+
* Function to load, process the module and return the Intlayer DeclarationContent from the module file
|
|
5
5
|
*/
|
|
6
|
-
declare const processContentDeclaration: (file: string) => Promise<
|
|
6
|
+
declare const processContentDeclaration: (file: string) => Promise<DeclarationContent | undefined>;
|
|
7
7
|
|
|
8
8
|
export { processContentDeclaration };
|
|
@@ -19,98 +19,65 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
19
19
|
var createType_exports = {};
|
|
20
20
|
__export(createType_exports, {
|
|
21
21
|
createTypes: () => createTypes,
|
|
22
|
-
generateTypeScriptType: () => generateTypeScriptType
|
|
23
|
-
generateTypeScriptTypeContent: () => generateTypeScriptTypeContent
|
|
22
|
+
generateTypeScriptType: () => generateTypeScriptType
|
|
24
23
|
});
|
|
25
24
|
module.exports = __toCommonJS(createType_exports);
|
|
26
25
|
var import_fs = require("fs");
|
|
27
26
|
var import_module = require("module");
|
|
28
27
|
var import_path = require("path");
|
|
29
28
|
var import_config = require("@intlayer/config");
|
|
30
|
-
var
|
|
31
|
-
var import_createModuleAugmentation = require('./createModuleAugmentation.cjs');
|
|
29
|
+
var import_quicktype_core = require("quicktype-core");
|
|
32
30
|
const import_meta = {};
|
|
33
|
-
const { content
|
|
31
|
+
const { content } = (0, import_config.getConfiguration)();
|
|
34
32
|
const { typesDir } = content;
|
|
35
33
|
const isESModule = typeof import_meta.url === "string";
|
|
36
34
|
const requireFunction = isESModule ? (0, import_module.createRequire)(import_meta.url) : require;
|
|
37
|
-
const
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
typeDefinition += `export type ${typeName} = {
|
|
41
|
-
`;
|
|
42
|
-
typeDefinition += generateTypeScriptTypeContent(obj);
|
|
43
|
-
typeDefinition += "};\n\n";
|
|
44
|
-
return typeDefinition;
|
|
45
|
-
};
|
|
46
|
-
const isReactNode = (node) => typeof node?.key !== "undefined" && typeof node?.props !== "undefined";
|
|
47
|
-
const generateTypeScriptTypeContent = (obj) => {
|
|
48
|
-
if (typeof obj !== "object" || obj === null) {
|
|
49
|
-
return `${typeof obj}`;
|
|
50
|
-
}
|
|
51
|
-
const isReactNodeValue = isReactNode(obj);
|
|
52
|
-
if (isReactNodeValue) {
|
|
53
|
-
return `JSX.Element`;
|
|
35
|
+
const kebabCaseToCammelCase = (name) => name.split(/[\s\-_]+/).map((word, index) => {
|
|
36
|
+
if (index === 0) {
|
|
37
|
+
return word;
|
|
54
38
|
}
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
const isArray = Array.isArray(value);
|
|
81
|
-
if (isArray) {
|
|
82
|
-
const arrayType = generateTypeScriptTypeContent(value);
|
|
83
|
-
typeDefinition += ` ${key}: ${arrayType}[],
|
|
84
|
-
`;
|
|
85
|
-
} else {
|
|
86
|
-
const nestedType = generateTypeScriptTypeContent(value);
|
|
87
|
-
typeDefinition += ` ${key}: {${nestedType}},
|
|
88
|
-
`;
|
|
89
|
-
}
|
|
90
|
-
} else if (
|
|
91
|
-
// Check if the value is an 'id'
|
|
92
|
-
typeof value === "string" && key === "id"
|
|
93
|
-
) {
|
|
94
|
-
const tsType = `"${value}"`;
|
|
95
|
-
typeDefinition += ` ${key}: ${tsType},
|
|
96
|
-
`;
|
|
97
|
-
} else {
|
|
98
|
-
const tsType = typeof value;
|
|
99
|
-
typeDefinition += ` ${key}: ${tsType},
|
|
100
|
-
`;
|
|
39
|
+
return word.charAt(0).toUpperCase() + word.slice(1);
|
|
40
|
+
}).join("");
|
|
41
|
+
const generateTypeScriptType = async (typeName, jsonString) => {
|
|
42
|
+
const { lines } = await quicktypeJSON(typeName, jsonString);
|
|
43
|
+
const linesString = lines.join("\n");
|
|
44
|
+
return linesString;
|
|
45
|
+
};
|
|
46
|
+
const quicktypeJSON = async (typeName, jsonString) => {
|
|
47
|
+
const jsonInput = (0, import_quicktype_core.jsonInputForTargetLanguage)("typescript");
|
|
48
|
+
await jsonInput.addSource({
|
|
49
|
+
name: typeName,
|
|
50
|
+
samples: [jsonString]
|
|
51
|
+
});
|
|
52
|
+
const inputData = new import_quicktype_core.InputData();
|
|
53
|
+
inputData.addInput(jsonInput);
|
|
54
|
+
return await (0, import_quicktype_core.quicktype)({
|
|
55
|
+
inputData,
|
|
56
|
+
lang: "typescript",
|
|
57
|
+
alphabetizeProperties: true,
|
|
58
|
+
rendererOptions: {
|
|
59
|
+
"just-types": "true",
|
|
60
|
+
"explicit-unions": "true",
|
|
61
|
+
"acronym-style": "camel",
|
|
62
|
+
"prefer-types": "true",
|
|
63
|
+
readonly: "false"
|
|
101
64
|
}
|
|
102
|
-
}
|
|
103
|
-
return typeDefinition;
|
|
65
|
+
});
|
|
104
66
|
};
|
|
105
|
-
const createTypes = (dictionariesPaths) => {
|
|
67
|
+
const createTypes = async (dictionariesPaths) => {
|
|
106
68
|
const resultTypesPaths = [];
|
|
107
69
|
if (!(0, import_fs.existsSync)(typesDir)) {
|
|
108
70
|
(0, import_fs.mkdirSync)(typesDir, { recursive: true });
|
|
109
71
|
}
|
|
110
72
|
for (const dictionaryPath of dictionariesPaths) {
|
|
111
|
-
const
|
|
112
|
-
const dictionaryName =
|
|
113
|
-
const
|
|
73
|
+
const dictionary = requireFunction(dictionaryPath);
|
|
74
|
+
const dictionaryName = dictionary.id;
|
|
75
|
+
const dictionaryNameCamelCase = kebabCaseToCammelCase(dictionaryName) + "Content";
|
|
76
|
+
const dictionaryContentString = JSON.stringify(dictionary);
|
|
77
|
+
const typeDefinition = await generateTypeScriptType(
|
|
78
|
+
dictionaryNameCamelCase,
|
|
79
|
+
dictionaryContentString
|
|
80
|
+
);
|
|
114
81
|
const outputPath = (0, import_path.resolve)(typesDir, `${dictionaryName}.d.ts`);
|
|
115
82
|
(0, import_fs.writeFileSync)(outputPath, typeDefinition);
|
|
116
83
|
resultTypesPaths.push(outputPath);
|
|
@@ -120,7 +87,6 @@ const createTypes = (dictionariesPaths) => {
|
|
|
120
87
|
// Annotate the CommonJS export names for ESM import in node:
|
|
121
88
|
0 && (module.exports = {
|
|
122
89
|
createTypes,
|
|
123
|
-
generateTypeScriptType
|
|
124
|
-
generateTypeScriptTypeContent
|
|
90
|
+
generateTypeScriptType
|
|
125
91
|
});
|
|
126
92
|
//# sourceMappingURL=createType.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createType.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { createRequire } from 'module';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport
|
|
1
|
+
{"version":3,"sources":["../../../../src/transpiler/dictionary_to_type/createType.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { createRequire } from 'module';\nimport { resolve } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport {\n quicktype,\n InputData,\n jsonInputForTargetLanguage,\n} from 'quicktype-core';\n\nconst { content } = getConfiguration();\nconst { typesDir } = content;\n\nconst isESModule = typeof import.meta.url === 'string';\nconst requireFunction = isESModule ? createRequire(import.meta.url) : require;\n\nconst kebabCaseToCammelCase = (name: string): string =>\n name\n .split(/[\\s\\-_]+/) // Regular expression to match space, hyphen, or underscore\n .map((word, index) => {\n if (index === 0) {\n return word; // Return the first word as is\n }\n return word.charAt(0).toUpperCase() + word.slice(1); // Capitalize the first letter of subsequent words\n })\n .join(''); // Join all the segments into one string\n\nexport const generateTypeScriptType = async (\n typeName: string,\n jsonString: string\n) => {\n const { lines } = await quicktypeJSON(typeName, jsonString);\n\n const linesString: string = lines.join('\\n');\n\n return linesString;\n};\n\nconst quicktypeJSON = async (typeName: string, jsonString: string) => {\n const jsonInput = jsonInputForTargetLanguage('typescript');\n\n // We could add multiple samples for the same desired\n // type, or many sources for other types. Here we're\n // just making one type from one piece of sample JSON.\n\n await jsonInput.addSource({\n name: typeName,\n samples: [jsonString],\n });\n\n const inputData = new InputData();\n inputData.addInput(jsonInput);\n\n return await quicktype({\n inputData,\n lang: 'typescript',\n alphabetizeProperties: true,\n rendererOptions: {\n 'just-types': 'true',\n 'explicit-unions': 'true',\n 'acronym-style': 'camel',\n 'prefer-types': 'true',\n readonly: 'false',\n },\n });\n};\n\n/**\n * This function generates a TypeScript type definition from a JSON object\n */\nexport const createTypes = async (\n dictionariesPaths: string[]\n): Promise<string[]> => {\n const resultTypesPaths: string[] = [];\n\n // Create type folders if they don't exist\n if (!existsSync(typesDir)) {\n mkdirSync(typesDir, { recursive: true });\n }\n\n for (const dictionaryPath of dictionariesPaths) {\n const dictionary: Dictionary = requireFunction(dictionaryPath);\n const dictionaryName: string = dictionary.id;\n const dictionaryNameCamelCase: string =\n kebabCaseToCammelCase(dictionaryName) + 'Content';\n\n const dictionaryContentString: string = JSON.stringify(dictionary);\n\n const typeDefinition: string = await generateTypeScriptType(\n dictionaryNameCamelCase,\n dictionaryContentString\n );\n\n const outputPath: string = resolve(typesDir, `${dictionaryName}.d.ts`);\n\n writeFileSync(outputPath, typeDefinition);\n\n resultTypesPaths.push(outputPath);\n }\n\n return resultTypesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAAqD;AACrD,oBAA8B;AAC9B,kBAAwB;AACxB,oBAAiC;AAEjC,4BAIO;AATP;AAWA,MAAM,EAAE,QAAQ,QAAI,gCAAiB;AACrC,MAAM,EAAE,SAAS,IAAI;AAErB,MAAM,aAAa,OAAO,YAAY,QAAQ;AAC9C,MAAM,kBAAkB,iBAAa,6BAAc,YAAY,GAAG,IAAI;AAEtE,MAAM,wBAAwB,CAAC,SAC7B,KACG,MAAM,UAAU,EAChB,IAAI,CAAC,MAAM,UAAU;AACpB,MAAI,UAAU,GAAG;AACf,WAAO;AAAA,EACT;AACA,SAAO,KAAK,OAAO,CAAC,EAAE,YAAY,IAAI,KAAK,MAAM,CAAC;AACpD,CAAC,EACA,KAAK,EAAE;AAEL,MAAM,yBAAyB,OACpC,UACA,eACG;AACH,QAAM,EAAE,MAAM,IAAI,MAAM,cAAc,UAAU,UAAU;AAE1D,QAAM,cAAsB,MAAM,KAAK,IAAI;AAE3C,SAAO;AACT;AAEA,MAAM,gBAAgB,OAAO,UAAkB,eAAuB;AACpE,QAAM,gBAAY,kDAA2B,YAAY;AAMzD,QAAM,UAAU,UAAU;AAAA,IACxB,MAAM;AAAA,IACN,SAAS,CAAC,UAAU;AAAA,EACtB,CAAC;AAED,QAAM,YAAY,IAAI,gCAAU;AAChC,YAAU,SAAS,SAAS;AAE5B,SAAO,UAAM,iCAAU;AAAA,IACrB;AAAA,IACA,MAAM;AAAA,IACN,uBAAuB;AAAA,IACvB,iBAAiB;AAAA,MACf,cAAc;AAAA,MACd,mBAAmB;AAAA,MACnB,iBAAiB;AAAA,MACjB,gBAAgB;AAAA,MAChB,UAAU;AAAA,IACZ;AAAA,EACF,CAAC;AACH;AAKO,MAAM,cAAc,OACzB,sBACsB;AACtB,QAAM,mBAA6B,CAAC;AAGpC,MAAI,KAAC,sBAAW,QAAQ,GAAG;AACzB,6BAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,EACzC;AAEA,aAAW,kBAAkB,mBAAmB;AAC9C,UAAM,aAAyB,gBAAgB,cAAc;AAC7D,UAAM,iBAAyB,WAAW;AAC1C,UAAM,0BACJ,sBAAsB,cAAc,IAAI;AAE1C,UAAM,0BAAkC,KAAK,UAAU,UAAU;AAEjE,UAAM,iBAAyB,MAAM;AAAA,MACnC;AAAA,MACA;AAAA,IACF;AAEA,UAAM,iBAAqB,qBAAQ,UAAU,GAAG,cAAc,OAAO;AAErE,iCAAc,YAAY,cAAc;AAExC,qBAAiB,KAAK,UAAU;AAAA,EAClC;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -1,42 +1,7 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
/**
|
|
4
|
-
*
|
|
5
|
-
* This function generates a TypeScript type definition from a JSON object
|
|
6
|
-
*
|
|
7
|
-
* Example:
|
|
8
|
-
*
|
|
9
|
-
* const input = {
|
|
10
|
-
* id: '1',
|
|
11
|
-
* name: 'John Doe',
|
|
12
|
-
* address: {
|
|
13
|
-
* id: '2',
|
|
14
|
-
* street: '123 Main St',
|
|
15
|
-
* city: 'Springfield',
|
|
16
|
-
* }
|
|
17
|
-
* };
|
|
18
|
-
*
|
|
19
|
-
* const result = generateTypeScriptType(input, 'RootObject');
|
|
20
|
-
* console.log(result);
|
|
21
|
-
*
|
|
22
|
-
* Output:
|
|
23
|
-
*
|
|
24
|
-
* type RootObject = {
|
|
25
|
-
* id: '1',
|
|
26
|
-
* name: string,
|
|
27
|
-
* address: {
|
|
28
|
-
* id: '2',
|
|
29
|
-
* street: string,
|
|
30
|
-
* city: string,
|
|
31
|
-
* },
|
|
32
|
-
* };
|
|
33
|
-
*
|
|
34
|
-
*/
|
|
35
|
-
declare const generateTypeScriptType: (obj: ContentModule) => string;
|
|
36
|
-
declare const generateTypeScriptTypeContent: (obj: Content) => string;
|
|
1
|
+
declare const generateTypeScriptType: (typeName: string, jsonString: string) => Promise<string>;
|
|
37
2
|
/**
|
|
38
3
|
* This function generates a TypeScript type definition from a JSON object
|
|
39
4
|
*/
|
|
40
|
-
declare const createTypes: (dictionariesPaths: string[]) => string[]
|
|
5
|
+
declare const createTypes: (dictionariesPaths: string[]) => Promise<string[]>;
|
|
41
6
|
|
|
42
|
-
export { createTypes, generateTypeScriptType
|
|
7
|
+
export { createTypes, generateTypeScriptType };
|
|
@@ -1,3 +1,2 @@
|
|
|
1
1
|
export { createModuleAugmentation, getTypeName } from './createModuleAugmentation.js';
|
|
2
|
-
export { createTypes, generateTypeScriptType
|
|
3
|
-
import '@intlayer/core';
|
|
2
|
+
export { createTypes, generateTypeScriptType } from './createType.js';
|
|
@@ -22,12 +22,15 @@ const watch = (options) => {
|
|
|
22
22
|
...options
|
|
23
23
|
}).on("ready", async () => {
|
|
24
24
|
const dictionariesPaths = await buildDictionary(files);
|
|
25
|
-
console.info("Building
|
|
26
|
-
createTypes(dictionariesPaths);
|
|
25
|
+
console.info("Building TypeScript types...");
|
|
26
|
+
await createTypes(dictionariesPaths);
|
|
27
|
+
console.info("TypeScript types built");
|
|
27
28
|
console.info("Building Intlayer module augmentation...");
|
|
28
29
|
createModuleAugmentation();
|
|
30
|
+
console.info("Intlayer module augmentation built");
|
|
29
31
|
console.info("Building Intlayer dictionary list...");
|
|
30
32
|
createDictionaryList();
|
|
33
|
+
console.info("Intlayer dictionary list built");
|
|
31
34
|
const relativeDictionariesPath = dictionariesPaths.map(
|
|
32
35
|
(dictionary) => relative(baseDir, dictionary)
|
|
33
36
|
);
|
|
@@ -38,16 +41,19 @@ const watch = (options) => {
|
|
|
38
41
|
console.info("Additional file detected: ", relative(baseDir, filePath));
|
|
39
42
|
const dictionaries = await buildDictionary(filePath);
|
|
40
43
|
console.info("Building TypeScript types...");
|
|
41
|
-
createTypes(dictionaries);
|
|
42
|
-
console.info("
|
|
44
|
+
await createTypes(dictionaries);
|
|
45
|
+
console.info("TypeScript types built");
|
|
46
|
+
console.info("Building Intlayer module augmentation...");
|
|
43
47
|
createModuleAugmentation();
|
|
48
|
+
console.info("Intlayer module augmentation built");
|
|
44
49
|
console.info("Building main...");
|
|
45
50
|
createDictionaryList();
|
|
46
51
|
}).on("change", async (filePath) => {
|
|
47
52
|
console.info("Change detected: ", relative(baseDir, filePath));
|
|
48
53
|
const dictionaries = await buildDictionary(filePath);
|
|
49
54
|
console.info("Building TypeScript types...");
|
|
50
|
-
createTypes(dictionaries);
|
|
55
|
+
await createTypes(dictionaries);
|
|
56
|
+
console.info("TypeScript types built");
|
|
51
57
|
}).on("error", (error) => {
|
|
52
58
|
console.error("Watcher error:", error);
|
|
53
59
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/chokidar/watcher.ts"],"sourcesContent":["import { relative } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport chokidar, { type WatchOptions } from 'chokidar';\nimport { sync } from 'glob';\nimport { buildDictionary } from '../transpiler/declaration_file_to_dictionary/index';\nimport { createDictionaryList } from '../transpiler/dictionary_to_main/createDictionaryList';\nimport {\n createTypes,\n createModuleAugmentation,\n} from '../transpiler/dictionary_to_type/index';\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const { content } = getConfiguration({\n verbose: true,\n });\n\n const { watchedFilesPatternWithPath, baseDir } = content;\n\n const files: string[] = sync(watchedFilesPatternWithPath);\n\n return chokidar\n .watch(watchedFilesPatternWithPath, {\n persistent: true, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n ...options,\n })\n .on('ready', async () => {\n const dictionariesPaths = await buildDictionary(files);\n\n console.info('Building
|
|
1
|
+
{"version":3,"sources":["../../../src/chokidar/watcher.ts"],"sourcesContent":["import { relative } from 'path';\nimport { getConfiguration } from '@intlayer/config';\nimport chokidar, { type WatchOptions } from 'chokidar';\nimport { sync } from 'glob';\nimport { buildDictionary } from '../transpiler/declaration_file_to_dictionary/index';\nimport { createDictionaryList } from '../transpiler/dictionary_to_main/createDictionaryList';\nimport {\n createTypes,\n createModuleAugmentation,\n} from '../transpiler/dictionary_to_type/index';\n\n// Initialize chokidar watcher (non-persistent)\nexport const watch = (options?: WatchOptions) => {\n const { content } = getConfiguration({\n verbose: true,\n });\n\n const { watchedFilesPatternWithPath, baseDir } = content;\n\n const files: string[] = sync(watchedFilesPatternWithPath);\n\n return chokidar\n .watch(watchedFilesPatternWithPath, {\n persistent: true, // Make the watcher persistent\n ignoreInitial: true, // Process existing files\n ...options,\n })\n .on('ready', async () => {\n const dictionariesPaths = await buildDictionary(files);\n\n console.info('Building TypeScript types...');\n await createTypes(dictionariesPaths);\n console.info('TypeScript types built');\n\n console.info('Building Intlayer module augmentation...');\n createModuleAugmentation();\n console.info('Intlayer module augmentation built');\n\n console.info('Building Intlayer dictionary list...');\n createDictionaryList();\n console.info('Intlayer dictionary list built');\n\n const relativeDictionariesPath = dictionariesPaths.map((dictionary) =>\n relative(baseDir, dictionary)\n );\n\n console.info('Dictionaries:', relativeDictionariesPath);\n })\n .on('unlink', (filePath) => {\n // Process the file with the functionToRun\n console.info('Removed file detected: ', relative(baseDir, filePath));\n })\n .on('add', async (filePath) => {\n // Process the file with the functionToRun\n console.info('Additional file detected: ', relative(baseDir, filePath));\n const dictionaries = await buildDictionary(filePath);\n\n console.info('Building TypeScript types...');\n await createTypes(dictionaries);\n console.info('TypeScript types built');\n\n console.info('Building Intlayer module augmentation...');\n createModuleAugmentation();\n console.info('Intlayer module augmentation built');\n\n console.info('Building main...');\n createDictionaryList();\n })\n .on('change', async (filePath) => {\n // Process the file with the functionToRun\n console.info('Change detected: ', relative(baseDir, filePath));\n const dictionaries = await buildDictionary(filePath);\n\n console.info('Building TypeScript types...');\n await createTypes(dictionaries);\n console.info('TypeScript types built');\n })\n .on('error', (error) => {\n console.error('Watcher error:', error);\n });\n};\n"],"mappings":"AAAA,SAAS,gBAAgB;AACzB,SAAS,wBAAwB;AACjC,OAAO,cAAqC;AAC5C,SAAS,YAAY;AACrB,SAAS,uBAAuB;AAChC,SAAS,4BAA4B;AACrC;AAAA,EACE;AAAA,EACA;AAAA,OACK;AAGA,MAAM,QAAQ,CAAC,YAA2B;AAC/C,QAAM,EAAE,QAAQ,IAAI,iBAAiB;AAAA,IACnC,SAAS;AAAA,EACX,CAAC;AAED,QAAM,EAAE,6BAA6B,QAAQ,IAAI;AAEjD,QAAM,QAAkB,KAAK,2BAA2B;AAExD,SAAO,SACJ,MAAM,6BAA6B;AAAA,IAClC,YAAY;AAAA;AAAA,IACZ,eAAe;AAAA;AAAA,IACf,GAAG;AAAA,EACL,CAAC,EACA,GAAG,SAAS,YAAY;AACvB,UAAM,oBAAoB,MAAM,gBAAgB,KAAK;AAErD,YAAQ,KAAK,8BAA8B;AAC3C,UAAM,YAAY,iBAAiB;AACnC,YAAQ,KAAK,wBAAwB;AAErC,YAAQ,KAAK,0CAA0C;AACvD,6BAAyB;AACzB,YAAQ,KAAK,oCAAoC;AAEjD,YAAQ,KAAK,sCAAsC;AACnD,yBAAqB;AACrB,YAAQ,KAAK,gCAAgC;AAE7C,UAAM,2BAA2B,kBAAkB;AAAA,MAAI,CAAC,eACtD,SAAS,SAAS,UAAU;AAAA,IAC9B;AAEA,YAAQ,KAAK,iBAAiB,wBAAwB;AAAA,EACxD,CAAC,EACA,GAAG,UAAU,CAAC,aAAa;AAE1B,YAAQ,KAAK,2BAA2B,SAAS,SAAS,QAAQ,CAAC;AAAA,EACrE,CAAC,EACA,GAAG,OAAO,OAAO,aAAa;AAE7B,YAAQ,KAAK,8BAA8B,SAAS,SAAS,QAAQ,CAAC;AACtE,UAAM,eAAe,MAAM,gBAAgB,QAAQ;AAEnD,YAAQ,KAAK,8BAA8B;AAC3C,UAAM,YAAY,YAAY;AAC9B,YAAQ,KAAK,wBAAwB;AAErC,YAAQ,KAAK,0CAA0C;AACvD,6BAAyB;AACzB,YAAQ,KAAK,oCAAoC;AAEjD,YAAQ,KAAK,kBAAkB;AAC/B,yBAAqB;AAAA,EACvB,CAAC,EACA,GAAG,UAAU,OAAO,aAAa;AAEhC,YAAQ,KAAK,qBAAqB,SAAS,SAAS,QAAQ,CAAC;AAC7D,UAAM,eAAe,MAAM,gBAAgB,QAAQ;AAEnD,YAAQ,KAAK,8BAA8B;AAC3C,UAAM,YAAY,YAAY;AAC9B,YAAQ,KAAK,wBAAwB;AAAA,EACvC,CAAC,EACA,GAAG,SAAS,CAAC,UAAU;AACtB,YAAQ,MAAM,kBAAkB,KAAK;AAAA,EACvC,CAAC;AACL;","names":[]}
|