@intlayer/chokidar 5.5.11 → 5.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/cjs/fetchDistantDictionaries.cjs +1 -1
- package/dist/cjs/fetchDistantDictionaries.cjs.map +1 -1
- package/dist/cjs/fetchDistantDictionaryKeys.cjs +1 -1
- package/dist/cjs/fetchDistantDictionaryKeys.cjs.map +1 -1
- package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs +2 -2
- package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs.map +1 -1
- package/dist/cjs/index.cjs +16 -2
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/listGitFiles.cjs +69 -2
- package/dist/cjs/listGitFiles.cjs.map +1 -1
- package/dist/cjs/loadDictionaries/loadDictionaries.cjs +18 -5
- package/dist/cjs/loadDictionaries/loadDictionaries.cjs.map +1 -1
- package/dist/cjs/log.cjs +3 -3
- package/dist/cjs/log.cjs.map +1 -1
- package/dist/cjs/prepareIntlayer.cjs +4 -2
- package/dist/cjs/prepareIntlayer.cjs.map +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.cjs +1 -1
- package/dist/cjs/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.cjs.map +1 -1
- package/dist/cjs/transpiler/dictionary_to_main/generateDictionaryListContent.cjs +2 -2
- package/dist/cjs/transpiler/dictionary_to_main/generateDictionaryListContent.cjs.map +1 -1
- package/dist/cjs/transpiler/dictionary_to_type/createModuleAugmentation.cjs +4 -3
- package/dist/cjs/transpiler/dictionary_to_type/createModuleAugmentation.cjs.map +1 -1
- package/dist/cjs/{utils.cjs → utils/getFileHash.cjs} +6 -22
- package/dist/cjs/utils/getFileHash.cjs.map +1 -0
- package/dist/cjs/utils/kebabCaseToCamelCase.cjs +39 -0
- package/dist/cjs/utils/kebabCaseToCamelCase.cjs.map +1 -0
- package/dist/cjs/utils/runOnce.cjs +58 -0
- package/dist/cjs/utils/runOnce.cjs.map +1 -0
- package/dist/cjs/utils/sortAlphabetically.cjs +29 -0
- package/dist/cjs/utils/sortAlphabetically.cjs.map +1 -0
- package/dist/esm/fetchDistantDictionaries.mjs +2 -2
- package/dist/esm/fetchDistantDictionaries.mjs.map +1 -1
- package/dist/esm/fetchDistantDictionaryKeys.mjs +1 -1
- package/dist/esm/fetchDistantDictionaryKeys.mjs.map +1 -1
- package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs +1 -1
- package/dist/esm/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.mjs.map +1 -1
- package/dist/esm/index.mjs +12 -2
- package/dist/esm/index.mjs.map +1 -1
- package/dist/esm/listGitFiles.mjs +67 -1
- package/dist/esm/listGitFiles.mjs.map +1 -1
- package/dist/esm/loadDictionaries/loadDictionaries.mjs +17 -4
- package/dist/esm/loadDictionaries/loadDictionaries.mjs.map +1 -1
- package/dist/esm/log.mjs +2 -2
- package/dist/esm/log.mjs.map +1 -1
- package/dist/esm/prepareIntlayer.mjs +4 -2
- package/dist/esm/prepareIntlayer.mjs.map +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.mjs +1 -1
- package/dist/esm/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.mjs.map +1 -1
- package/dist/esm/transpiler/dictionary_to_main/generateDictionaryListContent.mjs +1 -1
- package/dist/esm/transpiler/dictionary_to_main/generateDictionaryListContent.mjs.map +1 -1
- package/dist/esm/transpiler/dictionary_to_type/createModuleAugmentation.mjs +2 -1
- package/dist/esm/transpiler/dictionary_to_type/createModuleAugmentation.mjs.map +1 -1
- package/dist/esm/utils/getFileHash.mjs +9 -0
- package/dist/esm/utils/getFileHash.mjs.map +1 -0
- package/dist/esm/{utils.mjs → utils/kebabCaseToCamelCase.mjs} +2 -10
- package/dist/esm/utils/kebabCaseToCamelCase.mjs.map +1 -0
- package/dist/esm/utils/runOnce.mjs +34 -0
- package/dist/esm/utils/runOnce.mjs.map +1 -0
- package/dist/esm/utils/sortAlphabetically.mjs +5 -0
- package/dist/esm/utils/sortAlphabetically.mjs.map +1 -0
- package/dist/types/index.d.ts +6 -2
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/listGitFiles.d.ts +6 -0
- package/dist/types/listGitFiles.d.ts.map +1 -1
- package/dist/types/loadDictionaries/loadDictionaries.d.ts.map +1 -1
- package/dist/types/log.d.ts.map +1 -1
- package/dist/types/prepareIntlayer.d.ts +1 -1
- package/dist/types/prepareIntlayer.d.ts.map +1 -1
- package/dist/types/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.d.ts.map +1 -1
- package/dist/types/transpiler/dictionary_to_type/createModuleAugmentation.d.ts.map +1 -1
- package/dist/types/utils/getFileHash.d.ts +2 -0
- package/dist/types/utils/getFileHash.d.ts.map +1 -0
- package/dist/types/utils/kebabCaseToCamelCase.d.ts +2 -0
- package/dist/types/utils/kebabCaseToCamelCase.d.ts.map +1 -0
- package/dist/types/utils/runOnce.d.ts +24 -0
- package/dist/types/utils/runOnce.d.ts.map +1 -0
- package/dist/types/utils/sortAlphabetically.d.ts +2 -0
- package/dist/types/utils/sortAlphabetically.d.ts.map +1 -0
- package/package.json +13 -13
- package/dist/cjs/utils.cjs.map +0 -1
- package/dist/esm/utils.mjs.map +0 -1
- package/dist/types/utils.d.ts +0 -4
- package/dist/types/utils.d.ts.map +0 -1
package/README.md
CHANGED
|
@@ -33,7 +33,7 @@
|
|
|
33
33
|
<a href="https://www.facebook.com/intlayer" target="blank"><img align="center"
|
|
34
34
|
src="https://img.shields.io/badge/facebook-4267B2.svg?style=for-the-badge&logo=facebook&logoColor=white"
|
|
35
35
|
alt="Intlayer Facebook" height="30"/></a>
|
|
36
|
-
<a href="https://www.instagram.com/
|
|
36
|
+
<a href="https://www.instagram.com/intlayer/" target="blank"><img align="center"
|
|
37
37
|
src="https://img.shields.io/badge/instagram-%23E4405F.svg?style=for-the-badge&logo=Instagram&logoColor=white"
|
|
38
38
|
alt="Intlayer Instagram" height="30"/></a>
|
|
39
39
|
<a href="https://x.com/Intlayer183096" target="blank"><img align="center"
|
|
@@ -40,7 +40,7 @@ const fetchDistantDictionaries = async (options) => {
|
|
|
40
40
|
const appLogger = (0, import_config.getAppLogger)(config);
|
|
41
41
|
try {
|
|
42
42
|
const { clientId, clientSecret } = config.editor;
|
|
43
|
-
const authAPI = (0, import_api.
|
|
43
|
+
const authAPI = (0, import_api.getOAuthAPI)(config);
|
|
44
44
|
const dictionaryAPI = (0, import_api.getDictionaryAPI)(void 0, config);
|
|
45
45
|
if (!clientId || !clientSecret) {
|
|
46
46
|
throw new Error(
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/fetchDistantDictionaries.ts"],"sourcesContent":["import {
|
|
1
|
+
{"version":3,"sources":["../../src/fetchDistantDictionaries.ts"],"sourcesContent":["import { getDictionaryAPI, getOAuthAPI } from '@intlayer/api';\n// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport { getAppLogger, getConfiguration } from '@intlayer/config';\nimport pLimit from 'p-limit';\nimport { logger } from './log';\n\ntype FetchDistantDictionariesOptions = {\n dictionaryKeys: string[];\n newDictionariesPath?: string;\n logPrefix?: string;\n};\n\n/**\n * Fetch distant dictionaries and update the logger with their statuses.\n */\nexport const fetchDistantDictionaries = async (\n options: FetchDistantDictionariesOptions\n): Promise<DictionaryAPI[]> => {\n const config = getConfiguration();\n const appLogger = getAppLogger(config);\n try {\n const { clientId, clientSecret } = config.editor;\n const authAPI = getOAuthAPI(config);\n const dictionaryAPI = getDictionaryAPI(undefined, config);\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const oAuth2TokenResult = await authAPI.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n const distantDictionariesKeys = options.dictionaryKeys;\n\n // Process dictionaries in parallel with a concurrency limit\n const limit = pLimit(5); // Limit the number of concurrent requests\n\n const processDictionary = async (\n dictionaryKey: string\n ): Promise<DictionaryAPI | undefined> => {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: { status: 'fetching' },\n },\n ]);\n\n try {\n // Fetch the dictionary\n const getDictionaryResult = await dictionaryAPI.getDictionary(\n dictionaryKey,\n undefined,\n {\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n }\n );\n\n const distantDictionary = getDictionaryResult.data;\n\n if (!distantDictionary) {\n throw new Error(`Dictionary ${dictionaryKey} not found on remote`);\n }\n\n logger.updateStatus([\n { dictionaryKey, type: 'distant', status: { status: 'imported' } },\n ]);\n\n return distantDictionary;\n } catch (error) {\n logger.updateStatus([\n {\n dictionaryKey,\n type: 'distant',\n status: {\n status: 'error',\n error: error as Error,\n errorMessage: `${options?.logPrefix ?? ''}Error fetching dictionary ${dictionaryKey}: ${error}`,\n },\n },\n ]);\n return undefined;\n }\n };\n\n const fetchPromises = distantDictionariesKeys.map((dictionaryKey) =>\n limit(async () => await processDictionary(dictionaryKey))\n );\n\n const result = await Promise.all(fetchPromises);\n\n // Output any error messages\n const statuses = logger.getStatuses();\n for (const statusObj of statuses) {\n const currentState = statusObj.state.find((s) => s.type === 'distant');\n if (currentState && currentState.errorMessage) {\n appLogger(currentState.errorMessage, { level: 'error' });\n }\n }\n\n // Remove undefined values\n const filteredResult = result.filter(\n (dict): dict is DictionaryAPI => dict !== undefined\n );\n\n return filteredResult;\n } catch (error) {\n appLogger(error, { level: 'error' });\n return [];\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAA8C;AAG9C,oBAA+C;AAC/C,qBAAmB;AACnB,iBAAuB;AAWhB,MAAM,2BAA2B,OACtC,YAC6B;AAC7B,QAAM,aAAS,gCAAiB;AAChC,QAAM,gBAAY,4BAAa,MAAM;AACrC,MAAI;AACF,UAAM,EAAE,UAAU,aAAa,IAAI,OAAO;AAC1C,UAAM,cAAU,wBAAY,MAAM;AAClC,UAAM,oBAAgB,6BAAiB,QAAW,MAAM;AAExD,QAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAEA,UAAM,oBAAoB,MAAM,QAAQ,qBAAqB;AAE7D,UAAM,oBAAoB,kBAAkB,MAAM;AAElD,UAAM,0BAA0B,QAAQ;AAGxC,UAAM,YAAQ,eAAAA,SAAO,CAAC;AAEtB,UAAM,oBAAoB,OACxB,kBACuC;AACvC,wBAAO,aAAa;AAAA,QAClB;AAAA,UACE;AAAA,UACA,MAAM;AAAA,UACN,QAAQ,EAAE,QAAQ,WAAW;AAAA,QAC/B;AAAA,MACF,CAAC;AAED,UAAI;AAEF,cAAM,sBAAsB,MAAM,cAAc;AAAA,UAC9C;AAAA,UACA;AAAA,UACA;AAAA,YACE,GAAI,qBAAqB;AAAA,cACvB,SAAS;AAAA,gBACP,eAAe,UAAU,iBAAiB;AAAA,cAC5C;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAEA,cAAM,oBAAoB,oBAAoB;AAE9C,YAAI,CAAC,mBAAmB;AACtB,gBAAM,IAAI,MAAM,cAAc,aAAa,sBAAsB;AAAA,QACnE;AAEA,0BAAO,aAAa;AAAA,UAClB,EAAE,eAAe,MAAM,WAAW,QAAQ,EAAE,QAAQ,WAAW,EAAE;AAAA,QACnE,CAAC;AAED,eAAO;AAAA,MACT,SAAS,OAAO;AACd,0BAAO,aAAa;AAAA,UAClB;AAAA,YACE;AAAA,YACA,MAAM;AAAA,YACN,QAAQ;AAAA,cACN,QAAQ;AAAA,cACR;AAAA,cACA,cAAc,GAAG,SAAS,aAAa,EAAE,6BAA6B,aAAa,KAAK,KAAK;AAAA,YAC/F;AAAA,UACF;AAAA,QACF,CAAC;AACD,eAAO;AAAA,MACT;AAAA,IACF;AAEA,UAAM,gBAAgB,wBAAwB;AAAA,MAAI,CAAC,kBACjD,MAAM,YAAY,MAAM,kBAAkB,aAAa,CAAC;AAAA,IAC1D;AAEA,UAAM,SAAS,MAAM,QAAQ,IAAI,aAAa;AAG9C,UAAM,WAAW,kBAAO,YAAY;AACpC,eAAW,aAAa,UAAU;AAChC,YAAM,eAAe,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AACrE,UAAI,gBAAgB,aAAa,cAAc;AAC7C,kBAAU,aAAa,cAAc,EAAE,OAAO,QAAQ,CAAC;AAAA,MACzD;AAAA,IACF;AAGA,UAAM,iBAAiB,OAAO;AAAA,MAC5B,CAAC,SAAgC,SAAS;AAAA,IAC5C;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,cAAU,OAAO,EAAE,OAAO,QAAQ,CAAC;AACnC,WAAO,CAAC;AAAA,EACV;AACF;","names":["pLimit"]}
|
|
@@ -31,7 +31,7 @@ const fetchDistantDictionaryKeys = async (configuration = (0, import_config.getC
|
|
|
31
31
|
);
|
|
32
32
|
}
|
|
33
33
|
const intlayerAPI = (0, import_api.getIntlayerAPI)(void 0, configuration);
|
|
34
|
-
const oAuth2TokenResult = await intlayerAPI.
|
|
34
|
+
const oAuth2TokenResult = await intlayerAPI.oAuth.getOAuth2AccessToken();
|
|
35
35
|
const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;
|
|
36
36
|
const getDictionariesKeysResult = await intlayerAPI.dictionary.getDictionariesKeys({
|
|
37
37
|
...oAuth2AccessToken && {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/fetchDistantDictionaryKeys.ts"],"sourcesContent":["import { getIntlayerAPI } from '@intlayer/api';\nimport { getConfiguration, type IntlayerConfig } from '@intlayer/config';\n\nexport const fetchDistantDictionaryKeys = async (\n configuration: IntlayerConfig = getConfiguration()\n): Promise<string[]> => {\n const { clientId, clientSecret } = configuration.editor;\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const intlayerAPI = getIntlayerAPI(undefined, configuration);\n\n const oAuth2TokenResult = await intlayerAPI.
|
|
1
|
+
{"version":3,"sources":["../../src/fetchDistantDictionaryKeys.ts"],"sourcesContent":["import { getIntlayerAPI } from '@intlayer/api';\nimport { getConfiguration, type IntlayerConfig } from '@intlayer/config';\n\nexport const fetchDistantDictionaryKeys = async (\n configuration: IntlayerConfig = getConfiguration()\n): Promise<string[]> => {\n const { clientId, clientSecret } = configuration.editor;\n\n if (!clientId || !clientSecret) {\n throw new Error(\n 'Missing OAuth2 client ID or client secret. To get access token go to https://intlayer.org/dashboard/project.'\n );\n }\n\n const intlayerAPI = getIntlayerAPI(undefined, configuration);\n\n const oAuth2TokenResult = await intlayerAPI.oAuth.getOAuth2AccessToken();\n\n const oAuth2AccessToken = oAuth2TokenResult.data?.accessToken;\n\n // Get the list of dictionary keys\n const getDictionariesKeysResult =\n await intlayerAPI.dictionary.getDictionariesKeys({\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n });\n\n if (!getDictionariesKeysResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n const distantDictionariesKeys: string[] = getDictionariesKeysResult.data;\n\n // Apply any filtering if needed\n return distantDictionariesKeys;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAA+B;AAC/B,oBAAsD;AAE/C,MAAM,6BAA6B,OACxC,oBAAgC,gCAAiB,MAC3B;AACtB,QAAM,EAAE,UAAU,aAAa,IAAI,cAAc;AAEjD,MAAI,CAAC,YAAY,CAAC,cAAc;AAC9B,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,kBAAc,2BAAe,QAAW,aAAa;AAE3D,QAAM,oBAAoB,MAAM,YAAY,MAAM,qBAAqB;AAEvE,QAAM,oBAAoB,kBAAkB,MAAM;AAGlD,QAAM,4BACJ,MAAM,YAAY,WAAW,oBAAoB;AAAA,IAC/C,GAAI,qBAAqB;AAAA,MACvB,SAAS;AAAA,QACP,eAAe,UAAU,iBAAiB;AAAA,MAC5C;AAAA,IACF;AAAA,EACF,CAAC;AAEH,MAAI,CAAC,0BAA0B,MAAM;AACnC,UAAM,IAAI,MAAM,+BAA+B;AAAA,EACjD;AAEA,QAAM,0BAAoC,0BAA0B;AAGpE,SAAO;AACT;","names":[]}
|
|
@@ -24,7 +24,7 @@ module.exports = __toCommonJS(getContentDeclarationFileTemplate_exports);
|
|
|
24
24
|
var import_promises = require("fs/promises");
|
|
25
25
|
var import_path = require("path");
|
|
26
26
|
var import_url = require("url");
|
|
27
|
-
var
|
|
27
|
+
var import_kebabCaseToCamelCase = require('../utils/kebabCaseToCamelCase.cjs');
|
|
28
28
|
const import_meta = {};
|
|
29
29
|
const getContentDeclarationFileTemplate = async (key, format, fileParams = {}) => {
|
|
30
30
|
const dirname = __dirname ?? (0, import_url.fileURLToPath)(import_meta.url);
|
|
@@ -35,7 +35,7 @@ const getContentDeclarationFileTemplate = async (key, format, fileParams = {}) =
|
|
|
35
35
|
fileTemplate = "./cjsTemplate.md";
|
|
36
36
|
}
|
|
37
37
|
const fileContent = await (0, import_promises.readFile)((0, import_path.join)(dirname, fileTemplate), "utf-8");
|
|
38
|
-
const camelCaseKey = (0,
|
|
38
|
+
const camelCaseKey = (0, import_kebabCaseToCamelCase.kebabCaseToCamelCase)(key);
|
|
39
39
|
const nonCapitalizedCamelCaseKey = camelCaseKey.charAt(0).toLowerCase() + camelCaseKey.slice(1);
|
|
40
40
|
const fileParmsString = Object.entries(fileParams).filter(([, value]) => value !== void 0).map(([key2, value]) => {
|
|
41
41
|
if (typeof value === "object") {
|
package/dist/cjs/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.ts"],"sourcesContent":["import { readFile } from 'fs/promises';\nimport { join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { kebabCaseToCamelCase } from '../utils';\n\nexport const getContentDeclarationFileTemplate = async (\n key: string,\n format: 'ts' | 'cjs' | 'esm',\n fileParams: Record<string, any> = {}\n) => {\n const dirname = __dirname ?? fileURLToPath(import.meta.url);\n\n let fileTemplate = './esmTemplate.md';\n\n if (format === 'ts') {\n fileTemplate = './tsTemplate.md';\n } else if (format === 'cjs') {\n fileTemplate = './cjsTemplate.md';\n }\n\n const fileContent = await readFile(join(dirname, fileTemplate), 'utf-8');\n const camelCaseKey = kebabCaseToCamelCase(key);\n const nonCapitalizedCamelCaseKey =\n camelCaseKey.charAt(0).toLowerCase() + camelCaseKey.slice(1);\n\n const fileParmsString = Object.entries(fileParams)\n .filter(([, value]) => value !== undefined)\n .map(([key, value]) => {\n if (typeof value === 'object') {\n return `\\n${key}: ${JSON.stringify(value)},`;\n }\n\n if (typeof value === 'boolean' || typeof value === 'number') {\n return `\\n${key}: ${value},`;\n }\n\n if (typeof value === 'string') {\n return `\\n${key}: '${value}',`;\n }\n\n return `\\n${key}: ${value},`;\n })\n .join('');\n\n return fileContent\n .replace('{{key}}', key)\n .replaceAll('{{name}}', nonCapitalizedCamelCaseKey)\n .replace('{{fileParams}}', fileParmsString);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAyB;AACzB,kBAAqB;AACrB,iBAA8B;AAC9B,
|
|
1
|
+
{"version":3,"sources":["../../../src/getContentDeclarationFileTemplate/getContentDeclarationFileTemplate.ts"],"sourcesContent":["import { readFile } from 'fs/promises';\nimport { join } from 'path';\nimport { fileURLToPath } from 'url';\nimport { kebabCaseToCamelCase } from '../utils/kebabCaseToCamelCase';\n\nexport const getContentDeclarationFileTemplate = async (\n key: string,\n format: 'ts' | 'cjs' | 'esm',\n fileParams: Record<string, any> = {}\n) => {\n const dirname = __dirname ?? fileURLToPath(import.meta.url);\n\n let fileTemplate = './esmTemplate.md';\n\n if (format === 'ts') {\n fileTemplate = './tsTemplate.md';\n } else if (format === 'cjs') {\n fileTemplate = './cjsTemplate.md';\n }\n\n const fileContent = await readFile(join(dirname, fileTemplate), 'utf-8');\n const camelCaseKey = kebabCaseToCamelCase(key);\n const nonCapitalizedCamelCaseKey =\n camelCaseKey.charAt(0).toLowerCase() + camelCaseKey.slice(1);\n\n const fileParmsString = Object.entries(fileParams)\n .filter(([, value]) => value !== undefined)\n .map(([key, value]) => {\n if (typeof value === 'object') {\n return `\\n${key}: ${JSON.stringify(value)},`;\n }\n\n if (typeof value === 'boolean' || typeof value === 'number') {\n return `\\n${key}: ${value},`;\n }\n\n if (typeof value === 'string') {\n return `\\n${key}: '${value}',`;\n }\n\n return `\\n${key}: ${value},`;\n })\n .join('');\n\n return fileContent\n .replace('{{key}}', key)\n .replaceAll('{{name}}', nonCapitalizedCamelCaseKey)\n .replace('{{fileParams}}', fileParmsString);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAyB;AACzB,kBAAqB;AACrB,iBAA8B;AAC9B,kCAAqC;AAHrC;AAKO,MAAM,oCAAoC,OAC/C,KACA,QACA,aAAkC,CAAC,MAChC;AACH,QAAM,UAAU,iBAAa,0BAAc,YAAY,GAAG;AAE1D,MAAI,eAAe;AAEnB,MAAI,WAAW,MAAM;AACnB,mBAAe;AAAA,EACjB,WAAW,WAAW,OAAO;AAC3B,mBAAe;AAAA,EACjB;AAEA,QAAM,cAAc,UAAM,8BAAS,kBAAK,SAAS,YAAY,GAAG,OAAO;AACvE,QAAM,mBAAe,kDAAqB,GAAG;AAC7C,QAAM,6BACJ,aAAa,OAAO,CAAC,EAAE,YAAY,IAAI,aAAa,MAAM,CAAC;AAE7D,QAAM,kBAAkB,OAAO,QAAQ,UAAU,EAC9C,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,UAAU,MAAS,EACzC,IAAI,CAAC,CAACA,MAAK,KAAK,MAAM;AACrB,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK,UAAU,KAAK,CAAC;AAAA,IAC3C;AAEA,QAAI,OAAO,UAAU,aAAa,OAAO,UAAU,UAAU;AAC3D,aAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,IAC3B;AAEA,QAAI,OAAO,UAAU,UAAU;AAC7B,aAAO;AAAA,EAAKA,IAAG,MAAM,KAAK;AAAA,IAC5B;AAEA,WAAO;AAAA,EAAKA,IAAG,KAAK,KAAK;AAAA,EAC3B,CAAC,EACA,KAAK,EAAE;AAEV,SAAO,YACJ,QAAQ,WAAW,GAAG,EACtB,WAAW,YAAY,0BAA0B,EACjD,QAAQ,kBAAkB,eAAe;AAC9C;","names":["key"]}
|
package/dist/cjs/index.cjs
CHANGED
|
@@ -29,12 +29,14 @@ __export(index_exports, {
|
|
|
29
29
|
generateDictionaryListContent: () => import_dictionary_to_main.generateDictionaryListContent,
|
|
30
30
|
getBuiltDictionariesPath: () => import_getBuiltDictionariesPath.getBuiltDictionariesPath,
|
|
31
31
|
getBuiltUnmergedDictionariesPath: () => import_getBuiltUnmergedDictionariesPath.getBuiltUnmergedDictionariesPath,
|
|
32
|
-
getFileHash: () =>
|
|
32
|
+
getFileHash: () => import_getFileHash.getFileHash,
|
|
33
33
|
getFilteredLocalesContent: () => import_getFilteredLocalesContent.getFilteredLocalesContent,
|
|
34
34
|
handleAdditionalContentDeclarationFile: () => import_watcher.handleAdditionalContentDeclarationFile,
|
|
35
35
|
handleContentDeclarationFileChange: () => import_watcher.handleContentDeclarationFileChange,
|
|
36
|
+
kebabCaseToCamelCase: () => import_kebabCaseToCamelCase.kebabCaseToCamelCase,
|
|
36
37
|
listDictionaries: () => import_listDictionariesPath.listDictionaries,
|
|
37
38
|
listGitFiles: () => import_listGitFiles.listGitFiles,
|
|
39
|
+
listGitLines: () => import_listGitFiles.listGitLines,
|
|
38
40
|
loadDictionaries: () => import_loadDictionaries.loadDictionaries,
|
|
39
41
|
loadDistantDictionaries: () => import_loadDictionaries.loadDistantDictionaries,
|
|
40
42
|
loadLocalDictionaries: () => import_loadDictionaries.loadLocalDictionaries,
|
|
@@ -43,6 +45,9 @@ __export(index_exports, {
|
|
|
43
45
|
prepareIntlayer: () => import_prepareIntlayer.prepareIntlayer,
|
|
44
46
|
processPerLocaleDictionary: () => import_processPerLocaleDictionary.processPerLocaleDictionary,
|
|
45
47
|
reduceDictionaryContent: () => import_reduceDictionaryContent.reduceDictionaryContent,
|
|
48
|
+
resolveObjectPromises: () => import_resolveObjectPromises.resolveObjectPromises,
|
|
49
|
+
runOnce: () => import_runOnce.runOnce,
|
|
50
|
+
sortAlphabetically: () => import_sortAlphabetically.sortAlphabetically,
|
|
46
51
|
watch: () => import_watcher.watch,
|
|
47
52
|
writeContentDeclaration: () => import_writeContentDeclaration.writeContentDeclaration
|
|
48
53
|
});
|
|
@@ -66,7 +71,11 @@ var import_reduceDictionaryContent = require('./reduceDictionaryContent/reduceDi
|
|
|
66
71
|
var import_declaration_file_to_dictionary = require('./transpiler/declaration_file_to_dictionary/index.cjs');
|
|
67
72
|
var import_dictionary_to_main = require('./transpiler/dictionary_to_main/index.cjs');
|
|
68
73
|
var import_createModuleAugmentation = require('./transpiler/dictionary_to_type/createModuleAugmentation.cjs');
|
|
69
|
-
var
|
|
74
|
+
var import_getFileHash = require('./utils/getFileHash.cjs');
|
|
75
|
+
var import_kebabCaseToCamelCase = require('./utils/kebabCaseToCamelCase.cjs');
|
|
76
|
+
var import_resolveObjectPromises = require('./utils/resolveObjectPromises.cjs');
|
|
77
|
+
var import_runOnce = require('./utils/runOnce.cjs');
|
|
78
|
+
var import_sortAlphabetically = require('./utils/sortAlphabetically.cjs');
|
|
70
79
|
var import_writeContentDeclaration = require('./writeContentDeclaration/index.cjs');
|
|
71
80
|
// Annotate the CommonJS export names for ESM import in node:
|
|
72
81
|
0 && (module.exports = {
|
|
@@ -85,8 +94,10 @@ var import_writeContentDeclaration = require('./writeContentDeclaration/index.cj
|
|
|
85
94
|
getFilteredLocalesContent,
|
|
86
95
|
handleAdditionalContentDeclarationFile,
|
|
87
96
|
handleContentDeclarationFileChange,
|
|
97
|
+
kebabCaseToCamelCase,
|
|
88
98
|
listDictionaries,
|
|
89
99
|
listGitFiles,
|
|
100
|
+
listGitLines,
|
|
90
101
|
loadDictionaries,
|
|
91
102
|
loadDistantDictionaries,
|
|
92
103
|
loadLocalDictionaries,
|
|
@@ -95,6 +106,9 @@ var import_writeContentDeclaration = require('./writeContentDeclaration/index.cj
|
|
|
95
106
|
prepareIntlayer,
|
|
96
107
|
processPerLocaleDictionary,
|
|
97
108
|
reduceDictionaryContent,
|
|
109
|
+
resolveObjectPromises,
|
|
110
|
+
runOnce,
|
|
111
|
+
sortAlphabetically,
|
|
98
112
|
watch,
|
|
99
113
|
writeContentDeclaration
|
|
100
114
|
});
|
package/dist/cjs/index.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts"],"sourcesContent":["export { checkDictionaryChanges } from './checkDictionaryChanges';\nexport {\n buildAndWatchIntlayer,\n handleAdditionalContentDeclarationFile,\n handleContentDeclarationFileChange,\n watch,\n} from './chokidar/watcher';\nexport { cleanOutputDir } from './cleanOutputDir';\nexport { fetchDistantDictionaries } from './fetchDistantDictionaries';\nexport { fetchDistantDictionaryKeys } from './fetchDistantDictionaryKeys';\nexport { getBuiltDictionariesPath } from './getBuiltDictionariesPath';\nexport { getBuiltUnmergedDictionariesPath } from './getBuiltUnmergedDictionariesPath';\nexport { getFilteredLocalesContent } from './getFilteredLocalesContent';\nexport { listDictionaries } from './listDictionariesPath';\nexport {\n listGitFiles,\n type DiffMode,\n type ListGitFilesOptions,\n} from './listGitFiles';\nexport {\n loadDictionaries,\n loadDistantDictionaries,\n loadLocalDictionaries,\n} from './loadDictionaries/index';\nexport { mergeDictionaries } from './mergeDictionaries';\nexport { prepareContentDeclaration } from './prepareContentDeclaration';\nexport { prepareIntlayer } from './prepareIntlayer';\nexport { processPerLocaleDictionary } from './processPerLocaleDictionary';\nexport { reduceDictionaryContent } from './reduceDictionaryContent/reduceDictionaryContent';\nexport { buildDictionary } from './transpiler/declaration_file_to_dictionary/index';\nexport {\n createDictionaryEntryPoint,\n generateDictionaryListContent,\n} from './transpiler/dictionary_to_main';\nexport { createModuleAugmentation } from './transpiler/dictionary_to_type/createModuleAugmentation';\nexport { getFileHash } from './utils';\nexport {\n writeContentDeclaration,\n type DictionaryStatus,\n} from './writeContentDeclaration';\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAuC;AACvC,qBAKO;AACP,4BAA+B;AAC/B,sCAAyC;AACzC,wCAA2C;AAC3C,sCAAyC;AACzC,8CAAiD;AACjD,uCAA0C;AAC1C,kCAAiC;AACjC,
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts"],"sourcesContent":["export { checkDictionaryChanges } from './checkDictionaryChanges';\nexport {\n buildAndWatchIntlayer,\n handleAdditionalContentDeclarationFile,\n handleContentDeclarationFileChange,\n watch,\n} from './chokidar/watcher';\nexport { cleanOutputDir } from './cleanOutputDir';\nexport { fetchDistantDictionaries } from './fetchDistantDictionaries';\nexport { fetchDistantDictionaryKeys } from './fetchDistantDictionaryKeys';\nexport { getBuiltDictionariesPath } from './getBuiltDictionariesPath';\nexport { getBuiltUnmergedDictionariesPath } from './getBuiltUnmergedDictionariesPath';\nexport { getFilteredLocalesContent } from './getFilteredLocalesContent';\nexport { listDictionaries } from './listDictionariesPath';\nexport {\n listGitFiles,\n listGitLines,\n type DiffMode,\n type ListGitFilesOptions,\n type ListGitLinesOptions,\n} from './listGitFiles';\nexport {\n loadDictionaries,\n loadDistantDictionaries,\n loadLocalDictionaries,\n} from './loadDictionaries/index';\nexport { mergeDictionaries } from './mergeDictionaries';\nexport { prepareContentDeclaration } from './prepareContentDeclaration';\nexport { prepareIntlayer } from './prepareIntlayer';\nexport { processPerLocaleDictionary } from './processPerLocaleDictionary';\nexport { reduceDictionaryContent } from './reduceDictionaryContent/reduceDictionaryContent';\nexport { buildDictionary } from './transpiler/declaration_file_to_dictionary/index';\nexport {\n createDictionaryEntryPoint,\n generateDictionaryListContent,\n} from './transpiler/dictionary_to_main';\nexport { createModuleAugmentation } from './transpiler/dictionary_to_type/createModuleAugmentation';\nexport { getFileHash } from './utils/getFileHash';\nexport { kebabCaseToCamelCase } from './utils/kebabCaseToCamelCase';\nexport { resolveObjectPromises } from './utils/resolveObjectPromises';\nexport { runOnce } from './utils/runOnce';\nexport { sortAlphabetically } from './utils/sortAlphabetically';\nexport {\n writeContentDeclaration,\n type DictionaryStatus,\n} from './writeContentDeclaration';\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAuC;AACvC,qBAKO;AACP,4BAA+B;AAC/B,sCAAyC;AACzC,wCAA2C;AAC3C,sCAAyC;AACzC,8CAAiD;AACjD,uCAA0C;AAC1C,kCAAiC;AACjC,0BAMO;AACP,8BAIO;AACP,+BAAkC;AAClC,uCAA0C;AAC1C,6BAAgC;AAChC,wCAA2C;AAC3C,qCAAwC;AACxC,4CAAgC;AAChC,gCAGO;AACP,sCAAyC;AACzC,yBAA4B;AAC5B,kCAAqC;AACrC,mCAAsC;AACtC,qBAAwB;AACxB,gCAAmC;AACnC,qCAGO;","names":[]}
|
|
@@ -28,11 +28,13 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
28
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
29
|
var listGitFiles_exports = {};
|
|
30
30
|
__export(listGitFiles_exports, {
|
|
31
|
-
listGitFiles: () => listGitFiles
|
|
31
|
+
listGitFiles: () => listGitFiles,
|
|
32
|
+
listGitLines: () => listGitLines
|
|
32
33
|
});
|
|
33
34
|
module.exports = __toCommonJS(listGitFiles_exports);
|
|
34
35
|
var import_config = require("@intlayer/config");
|
|
35
36
|
var import_built = __toESM(require("@intlayer/config/built"));
|
|
37
|
+
var import_fs = require("fs");
|
|
36
38
|
var import_path = require("path");
|
|
37
39
|
var import_simple_git = __toESM(require("simple-git"));
|
|
38
40
|
const getGitRootDir = async () => {
|
|
@@ -93,8 +95,73 @@ const listGitFiles = async ({
|
|
|
93
95
|
console.warn("Failed to get changes list:", error);
|
|
94
96
|
}
|
|
95
97
|
};
|
|
98
|
+
const listGitLines = async (filePath, {
|
|
99
|
+
mode,
|
|
100
|
+
baseRef = "origin/main",
|
|
101
|
+
currentRef = "HEAD"
|
|
102
|
+
// HEAD points to the current branch's latest commit
|
|
103
|
+
}) => {
|
|
104
|
+
const git = (0, import_simple_git.default)();
|
|
105
|
+
const changedLines = /* @__PURE__ */ new Set();
|
|
106
|
+
const collectLinesFromDiff = (diffOutput) => {
|
|
107
|
+
const hunkRegex = /@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/g;
|
|
108
|
+
let match;
|
|
109
|
+
while ((match = hunkRegex.exec(diffOutput)) !== null) {
|
|
110
|
+
const oldCount = match[2] ? Number(match[2]) : 1;
|
|
111
|
+
const newStart = Number(match[3]);
|
|
112
|
+
const newCount = match[4] ? Number(match[4]) : 1;
|
|
113
|
+
if (newCount > 0) {
|
|
114
|
+
for (let i = 0; i < newCount; i++) {
|
|
115
|
+
changedLines.add(newStart + i);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
if (oldCount > 0 && newCount === 0) {
|
|
119
|
+
if (newStart > 1) {
|
|
120
|
+
changedLines.add(newStart - 1);
|
|
121
|
+
}
|
|
122
|
+
changedLines.add(newStart);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
if (mode.includes("untracked")) {
|
|
127
|
+
const status = await git.status();
|
|
128
|
+
const isUntracked = status.not_added.includes(filePath);
|
|
129
|
+
if (isUntracked) {
|
|
130
|
+
try {
|
|
131
|
+
const content = (0, import_fs.readFileSync)(filePath, "utf-8");
|
|
132
|
+
content.split("\n").forEach((_, idx) => changedLines.add(idx + 1));
|
|
133
|
+
} catch {
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
if (mode.includes("uncommitted")) {
|
|
138
|
+
const diffOutput = await git.diff(["--unified=0", "HEAD", "--", filePath]);
|
|
139
|
+
collectLinesFromDiff(diffOutput);
|
|
140
|
+
}
|
|
141
|
+
if (mode.includes("unpushed")) {
|
|
142
|
+
const diffOutput = await git.diff([
|
|
143
|
+
"--unified=0",
|
|
144
|
+
"@{push}...HEAD",
|
|
145
|
+
"--",
|
|
146
|
+
filePath
|
|
147
|
+
]);
|
|
148
|
+
collectLinesFromDiff(diffOutput);
|
|
149
|
+
}
|
|
150
|
+
if (mode.includes("gitDiff")) {
|
|
151
|
+
await git.fetch(baseRef);
|
|
152
|
+
const diffOutput = await git.diff([
|
|
153
|
+
"--unified=0",
|
|
154
|
+
`${baseRef}...${currentRef}`,
|
|
155
|
+
"--",
|
|
156
|
+
filePath
|
|
157
|
+
]);
|
|
158
|
+
collectLinesFromDiff(diffOutput);
|
|
159
|
+
}
|
|
160
|
+
return Array.from(changedLines).sort((a, b) => a - b);
|
|
161
|
+
};
|
|
96
162
|
// Annotate the CommonJS export names for ESM import in node:
|
|
97
163
|
0 && (module.exports = {
|
|
98
|
-
listGitFiles
|
|
164
|
+
listGitFiles,
|
|
165
|
+
listGitLines
|
|
99
166
|
});
|
|
100
167
|
//# sourceMappingURL=listGitFiles.cjs.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/listGitFiles.ts"],"sourcesContent":["import { getAppLogger } from '@intlayer/config';\nimport configuration from '@intlayer/config/built';\nimport { join } from 'path';\nimport simpleGit from 'simple-git';\n\nexport type DiffMode = 'gitDiff' | 'uncommitted' | 'unpushed' | 'untracked';\n\nconst getGitRootDir = async (): Promise<string | null> => {\n try {\n const git = simpleGit();\n const rootDir = await git.revparse(['--show-toplevel']);\n return rootDir.trim();\n } catch (error) {\n const appLogger = getAppLogger(configuration);\n appLogger('Error getting git root directory:' + error, {\n level: 'error',\n });\n return null;\n }\n};\n\nexport type ListGitFilesOptions = {\n mode: DiffMode[];\n baseRef?: string;\n currentRef?: string;\n absolute?: boolean;\n};\n\nexport const listGitFiles = async ({\n mode,\n baseRef = 'origin/main',\n currentRef = 'HEAD', // HEAD points to the current branch's latest commit\n absolute = true,\n}: ListGitFilesOptions) => {\n try {\n const git = simpleGit();\n const diff: Set<string> = new Set();\n\n if (mode.includes('untracked')) {\n const status = await git.status();\n status.not_added.forEach((f) => diff.add(f));\n }\n\n if (mode.includes('uncommitted')) {\n // Get uncommitted changes\n const uncommittedDiff = await git.diff(['--name-only', 'HEAD']);\n\n const uncommittedFiles = uncommittedDiff.split('\\n').filter(Boolean);\n\n uncommittedFiles.forEach((file) => diff.add(file));\n }\n\n if (mode.includes('unpushed')) {\n // Get unpushed commits\n const unpushedDiff = await git.diff(['--name-only', '@{push}...HEAD']);\n\n const unpushedFiles = unpushedDiff.split('\\n').filter(Boolean);\n\n unpushedFiles.forEach((file) => diff.add(file));\n }\n\n if (mode.includes('gitDiff')) {\n // Get the base branch (usually main/master) from CI environment\n\n await git.fetch(baseRef);\n\n const diffBranch = await git.diff([\n '--name-only',\n `${baseRef}...${currentRef}`,\n ]);\n\n const gitDiffFiles = diffBranch.split('\\n').filter(Boolean);\n\n gitDiffFiles.forEach((file) => diff.add(file));\n }\n\n if (absolute) {\n const gitRootDir = await getGitRootDir();\n if (!gitRootDir) {\n return [];\n }\n return Array.from(diff).map((file) => join(gitRootDir, file));\n }\n\n return Array.from(diff);\n } catch (error) {\n console.warn('Failed to get changes list:', error);\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA6B;AAC7B,mBAA0B;AAC1B,kBAAqB;AACrB,wBAAsB;AAItB,MAAM,gBAAgB,YAAoC;AACxD,MAAI;AACF,UAAM,UAAM,kBAAAA,SAAU;AACtB,UAAM,UAAU,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC;AACtD,WAAO,QAAQ,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,gBAAY,4BAAa,aAAAC,OAAa;AAC5C,cAAU,sCAAsC,OAAO;AAAA,MACrD,OAAO;AAAA,IACT,CAAC;AACD,WAAO;AAAA,EACT;AACF;AASO,MAAM,eAAe,OAAO;AAAA,EACjC;AAAA,EACA,UAAU;AAAA,EACV,aAAa;AAAA;AAAA,EACb,WAAW;AACb,MAA2B;AACzB,MAAI;AACF,UAAM,UAAM,kBAAAD,SAAU;AACtB,UAAM,OAAoB,oBAAI,IAAI;AAElC,QAAI,KAAK,SAAS,WAAW,GAAG;AAC9B,YAAM,SAAS,MAAM,IAAI,OAAO;AAChC,aAAO,UAAU,QAAQ,CAAC,MAAM,KAAK,IAAI,CAAC,CAAC;AAAA,IAC7C;AAEA,QAAI,KAAK,SAAS,aAAa,GAAG;AAEhC,YAAM,kBAAkB,MAAM,IAAI,KAAK,CAAC,eAAe,MAAM,CAAC;AAE9D,YAAM,mBAAmB,gBAAgB,MAAM,IAAI,EAAE,OAAO,OAAO;AAEnE,uBAAiB,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IACnD;AAEA,QAAI,KAAK,SAAS,UAAU,GAAG;AAE7B,YAAM,eAAe,MAAM,IAAI,KAAK,CAAC,eAAe,gBAAgB,CAAC;AAErE,YAAM,gBAAgB,aAAa,MAAM,IAAI,EAAE,OAAO,OAAO;AAE7D,oBAAc,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IAChD;AAEA,QAAI,KAAK,SAAS,SAAS,GAAG;AAG5B,YAAM,IAAI,MAAM,OAAO;AAEvB,YAAM,aAAa,MAAM,IAAI,KAAK;AAAA,QAChC;AAAA,QACA,GAAG,OAAO,MAAM,UAAU;AAAA,MAC5B,CAAC;AAED,YAAM,eAAe,WAAW,MAAM,IAAI,EAAE,OAAO,OAAO;AAE1D,mBAAa,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IAC/C;AAEA,QAAI,UAAU;AACZ,YAAM,aAAa,MAAM,cAAc;AACvC,UAAI,CAAC,YAAY;AACf,eAAO,CAAC;AAAA,MACV;AACA,aAAO,MAAM,KAAK,IAAI,EAAE,IAAI,CAAC,aAAS,kBAAK,YAAY,IAAI,CAAC;AAAA,IAC9D;AAEA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB,SAAS,OAAO;AACd,YAAQ,KAAK,+BAA+B,KAAK;AAAA,EACnD;AACF;","names":["simpleGit","configuration"]}
|
|
1
|
+
{"version":3,"sources":["../../src/listGitFiles.ts"],"sourcesContent":["import { getAppLogger } from '@intlayer/config';\nimport configuration from '@intlayer/config/built';\nimport { readFileSync } from 'fs';\nimport { join } from 'path';\nimport simpleGit from 'simple-git';\n\nexport type DiffMode = 'gitDiff' | 'uncommitted' | 'unpushed' | 'untracked';\n\nconst getGitRootDir = async (): Promise<string | null> => {\n try {\n const git = simpleGit();\n const rootDir = await git.revparse(['--show-toplevel']);\n return rootDir.trim();\n } catch (error) {\n const appLogger = getAppLogger(configuration);\n appLogger('Error getting git root directory:' + error, {\n level: 'error',\n });\n return null;\n }\n};\n\nexport type ListGitFilesOptions = {\n mode: DiffMode[];\n baseRef?: string;\n currentRef?: string;\n absolute?: boolean;\n};\n\nexport const listGitFiles = async ({\n mode,\n baseRef = 'origin/main',\n currentRef = 'HEAD', // HEAD points to the current branch's latest commit\n absolute = true,\n}: ListGitFilesOptions) => {\n try {\n const git = simpleGit();\n const diff: Set<string> = new Set();\n\n if (mode.includes('untracked')) {\n const status = await git.status();\n status.not_added.forEach((f) => diff.add(f));\n }\n\n if (mode.includes('uncommitted')) {\n // Get uncommitted changes\n const uncommittedDiff = await git.diff(['--name-only', 'HEAD']);\n\n const uncommittedFiles = uncommittedDiff.split('\\n').filter(Boolean);\n\n uncommittedFiles.forEach((file) => diff.add(file));\n }\n\n if (mode.includes('unpushed')) {\n // Get unpushed commits\n const unpushedDiff = await git.diff(['--name-only', '@{push}...HEAD']);\n\n const unpushedFiles = unpushedDiff.split('\\n').filter(Boolean);\n\n unpushedFiles.forEach((file) => diff.add(file));\n }\n\n if (mode.includes('gitDiff')) {\n // Get the base branch (usually main/master) from CI environment\n\n await git.fetch(baseRef);\n\n const diffBranch = await git.diff([\n '--name-only',\n `${baseRef}...${currentRef}`,\n ]);\n\n const gitDiffFiles = diffBranch.split('\\n').filter(Boolean);\n\n gitDiffFiles.forEach((file) => diff.add(file));\n }\n\n if (absolute) {\n const gitRootDir = await getGitRootDir();\n if (!gitRootDir) {\n return [];\n }\n return Array.from(diff).map((file) => join(gitRootDir, file));\n }\n\n return Array.from(diff);\n } catch (error) {\n console.warn('Failed to get changes list:', error);\n }\n};\n\nexport type ListGitLinesOptions = {\n mode: DiffMode[];\n baseRef?: string;\n currentRef?: string;\n};\n\nexport const listGitLines = async (\n filePath: string,\n {\n mode,\n baseRef = 'origin/main',\n currentRef = 'HEAD', // HEAD points to the current branch's latest commit\n }: ListGitLinesOptions\n): Promise<number[]> => {\n const git = simpleGit();\n // We collect **line numbers** (1-based) that were modified/added by the diff.\n // Using a Set ensures uniqueness when the same line is reported by several modes.\n const changedLines: Set<number> = new Set();\n\n /**\n * Extracts line numbers from a diff generated with `--unified=0`.\n * Each hunk header looks like: @@ -<oldStart>,<oldCount> +<newStart>,<newCount> @@\n * We consider both the \"+\" (new) side for additions and the \"-\" (old) side for deletions.\n * For deletions, we add the line before and after the deletion point in the current file.\n */\n const collectLinesFromDiff = (diffOutput: string) => {\n const hunkRegex = /@@ -(\\d+)(?:,(\\d+))? \\+(\\d+)(?:,(\\d+))? @@/g;\n let match: RegExpExecArray | null;\n\n while ((match = hunkRegex.exec(diffOutput)) !== null) {\n const oldCount = match[2] ? Number(match[2]) : 1;\n const newStart = Number(match[3]);\n const newCount = match[4] ? Number(match[4]) : 1;\n\n // Handle additions/modifications (+ side)\n if (newCount > 0) {\n for (let i = 0; i < newCount; i++) {\n changedLines.add(newStart + i);\n }\n }\n\n // Handle deletions (- side)\n if (oldCount > 0 && newCount === 0) {\n // For deletions, add the line before and after the deletion point\n // The deletion point in the new file is at newStart\n if (newStart > 1) {\n changedLines.add(newStart - 1); // Line before deletion\n }\n changedLines.add(newStart); // Line after deletion (if it exists)\n }\n }\n };\n\n // 1. Handle untracked files – when a file is untracked its entire content is new.\n if (mode.includes('untracked')) {\n const status = await git.status();\n const isUntracked = status.not_added.includes(filePath);\n if (isUntracked) {\n try {\n const content = readFileSync(filePath, 'utf-8');\n content.split('\\n').forEach((_, idx) => changedLines.add(idx + 1));\n } catch {\n // ignore read errors – file may have been deleted, etc.\n }\n }\n }\n\n // 2. Uncommitted changes (working tree vs HEAD)\n if (mode.includes('uncommitted')) {\n const diffOutput = await git.diff(['--unified=0', 'HEAD', '--', filePath]);\n collectLinesFromDiff(diffOutput);\n }\n\n // 3. Unpushed commits – compare local branch to its upstream\n if (mode.includes('unpushed')) {\n const diffOutput = await git.diff([\n '--unified=0',\n '@{push}...HEAD',\n '--',\n filePath,\n ]);\n collectLinesFromDiff(diffOutput);\n }\n\n // 4. Regular git diff between baseRef and currentRef (e.g., CI pull-request diff)\n if (mode.includes('gitDiff')) {\n await git.fetch(baseRef);\n const diffOutput = await git.diff([\n '--unified=0',\n `${baseRef}...${currentRef}`,\n '--',\n filePath,\n ]);\n collectLinesFromDiff(diffOutput);\n }\n\n // Return the list sorted for convenience\n return Array.from(changedLines).sort((a, b) => a - b);\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAA6B;AAC7B,mBAA0B;AAC1B,gBAA6B;AAC7B,kBAAqB;AACrB,wBAAsB;AAItB,MAAM,gBAAgB,YAAoC;AACxD,MAAI;AACF,UAAM,UAAM,kBAAAA,SAAU;AACtB,UAAM,UAAU,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC;AACtD,WAAO,QAAQ,KAAK;AAAA,EACtB,SAAS,OAAO;AACd,UAAM,gBAAY,4BAAa,aAAAC,OAAa;AAC5C,cAAU,sCAAsC,OAAO;AAAA,MACrD,OAAO;AAAA,IACT,CAAC;AACD,WAAO;AAAA,EACT;AACF;AASO,MAAM,eAAe,OAAO;AAAA,EACjC;AAAA,EACA,UAAU;AAAA,EACV,aAAa;AAAA;AAAA,EACb,WAAW;AACb,MAA2B;AACzB,MAAI;AACF,UAAM,UAAM,kBAAAD,SAAU;AACtB,UAAM,OAAoB,oBAAI,IAAI;AAElC,QAAI,KAAK,SAAS,WAAW,GAAG;AAC9B,YAAM,SAAS,MAAM,IAAI,OAAO;AAChC,aAAO,UAAU,QAAQ,CAAC,MAAM,KAAK,IAAI,CAAC,CAAC;AAAA,IAC7C;AAEA,QAAI,KAAK,SAAS,aAAa,GAAG;AAEhC,YAAM,kBAAkB,MAAM,IAAI,KAAK,CAAC,eAAe,MAAM,CAAC;AAE9D,YAAM,mBAAmB,gBAAgB,MAAM,IAAI,EAAE,OAAO,OAAO;AAEnE,uBAAiB,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IACnD;AAEA,QAAI,KAAK,SAAS,UAAU,GAAG;AAE7B,YAAM,eAAe,MAAM,IAAI,KAAK,CAAC,eAAe,gBAAgB,CAAC;AAErE,YAAM,gBAAgB,aAAa,MAAM,IAAI,EAAE,OAAO,OAAO;AAE7D,oBAAc,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IAChD;AAEA,QAAI,KAAK,SAAS,SAAS,GAAG;AAG5B,YAAM,IAAI,MAAM,OAAO;AAEvB,YAAM,aAAa,MAAM,IAAI,KAAK;AAAA,QAChC;AAAA,QACA,GAAG,OAAO,MAAM,UAAU;AAAA,MAC5B,CAAC;AAED,YAAM,eAAe,WAAW,MAAM,IAAI,EAAE,OAAO,OAAO;AAE1D,mBAAa,QAAQ,CAAC,SAAS,KAAK,IAAI,IAAI,CAAC;AAAA,IAC/C;AAEA,QAAI,UAAU;AACZ,YAAM,aAAa,MAAM,cAAc;AACvC,UAAI,CAAC,YAAY;AACf,eAAO,CAAC;AAAA,MACV;AACA,aAAO,MAAM,KAAK,IAAI,EAAE,IAAI,CAAC,aAAS,kBAAK,YAAY,IAAI,CAAC;AAAA,IAC9D;AAEA,WAAO,MAAM,KAAK,IAAI;AAAA,EACxB,SAAS,OAAO;AACd,YAAQ,KAAK,+BAA+B,KAAK;AAAA,EACnD;AACF;AAQO,MAAM,eAAe,OAC1B,UACA;AAAA,EACE;AAAA,EACA,UAAU;AAAA,EACV,aAAa;AAAA;AACf,MACsB;AACtB,QAAM,UAAM,kBAAAA,SAAU;AAGtB,QAAM,eAA4B,oBAAI,IAAI;AAQ1C,QAAM,uBAAuB,CAAC,eAAuB;AACnD,UAAM,YAAY;AAClB,QAAI;AAEJ,YAAQ,QAAQ,UAAU,KAAK,UAAU,OAAO,MAAM;AACpD,YAAM,WAAW,MAAM,CAAC,IAAI,OAAO,MAAM,CAAC,CAAC,IAAI;AAC/C,YAAM,WAAW,OAAO,MAAM,CAAC,CAAC;AAChC,YAAM,WAAW,MAAM,CAAC,IAAI,OAAO,MAAM,CAAC,CAAC,IAAI;AAG/C,UAAI,WAAW,GAAG;AAChB,iBAAS,IAAI,GAAG,IAAI,UAAU,KAAK;AACjC,uBAAa,IAAI,WAAW,CAAC;AAAA,QAC/B;AAAA,MACF;AAGA,UAAI,WAAW,KAAK,aAAa,GAAG;AAGlC,YAAI,WAAW,GAAG;AAChB,uBAAa,IAAI,WAAW,CAAC;AAAA,QAC/B;AACA,qBAAa,IAAI,QAAQ;AAAA,MAC3B;AAAA,IACF;AAAA,EACF;AAGA,MAAI,KAAK,SAAS,WAAW,GAAG;AAC9B,UAAM,SAAS,MAAM,IAAI,OAAO;AAChC,UAAM,cAAc,OAAO,UAAU,SAAS,QAAQ;AACtD,QAAI,aAAa;AACf,UAAI;AACF,cAAM,cAAU,wBAAa,UAAU,OAAO;AAC9C,gBAAQ,MAAM,IAAI,EAAE,QAAQ,CAAC,GAAG,QAAQ,aAAa,IAAI,MAAM,CAAC,CAAC;AAAA,MACnE,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAGA,MAAI,KAAK,SAAS,aAAa,GAAG;AAChC,UAAM,aAAa,MAAM,IAAI,KAAK,CAAC,eAAe,QAAQ,MAAM,QAAQ,CAAC;AACzE,yBAAqB,UAAU;AAAA,EACjC;AAGA,MAAI,KAAK,SAAS,UAAU,GAAG;AAC7B,UAAM,aAAa,MAAM,IAAI,KAAK;AAAA,MAChC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,yBAAqB,UAAU;AAAA,EACjC;AAGA,MAAI,KAAK,SAAS,SAAS,GAAG;AAC5B,UAAM,IAAI,MAAM,OAAO;AACvB,UAAM,aAAa,MAAM,IAAI,KAAK;AAAA,MAChC;AAAA,MACA,GAAG,OAAO,MAAM,UAAU;AAAA,MAC1B;AAAA,MACA;AAAA,IACF,CAAC;AACD,yBAAqB,UAAU;AAAA,EACjC;AAGA,SAAO,MAAM,KAAK,YAAY,EAAE,KAAK,CAAC,GAAG,MAAM,IAAI,CAAC;AACtD;","names":["simpleGit","configuration"]}
|
|
@@ -24,7 +24,7 @@ module.exports = __toCommonJS(loadDictionaries_exports);
|
|
|
24
24
|
var import_config = require("@intlayer/config");
|
|
25
25
|
var import_fetchDistantDictionaryKeys = require('../fetchDistantDictionaryKeys.cjs');
|
|
26
26
|
var import_log = require('../log.cjs');
|
|
27
|
-
var
|
|
27
|
+
var import_sortAlphabetically = require('../utils/sortAlphabetically.cjs');
|
|
28
28
|
var import_loadContentDeclaration = require('./loadContentDeclaration.cjs');
|
|
29
29
|
var import_loadDistantDictionaries = require('./loadDistantDictionaries.cjs');
|
|
30
30
|
const loadDictionaries = async (contentDeclarationsPaths, configuration = (0, import_config.getConfiguration)(), projectRequire = import_config.ESMxCJSRequire) => {
|
|
@@ -37,10 +37,23 @@ const loadDictionaries = async (contentDeclarationsPaths, configuration = (0, im
|
|
|
37
37
|
files,
|
|
38
38
|
projectRequire
|
|
39
39
|
);
|
|
40
|
-
const
|
|
40
|
+
const filteredLocalDictionaries = localDictionaries.filter((dict) => {
|
|
41
|
+
const hasKey = Boolean(dict.key);
|
|
42
|
+
const hasContent = Boolean(dict.content);
|
|
43
|
+
if (!hasContent) {
|
|
44
|
+
console.error(
|
|
45
|
+
"Content declaration has no exported content",
|
|
46
|
+
dict.filePath
|
|
47
|
+
);
|
|
48
|
+
} else if (!hasKey) {
|
|
49
|
+
console.error("Content declaration has no key", dict.filePath);
|
|
50
|
+
}
|
|
51
|
+
return hasKey && hasContent;
|
|
52
|
+
});
|
|
53
|
+
const localDictionaryKeys = filteredLocalDictionaries.map((dict) => dict.key).filter(Boolean);
|
|
41
54
|
import_log.logger.init(localDictionaryKeys, []);
|
|
42
55
|
import_log.logger.updateStatus(
|
|
43
|
-
|
|
56
|
+
filteredLocalDictionaries.map((dict) => ({
|
|
44
57
|
dictionaryKey: dict.key,
|
|
45
58
|
type: "local",
|
|
46
59
|
status: { status: "built" }
|
|
@@ -51,7 +64,7 @@ const loadDictionaries = async (contentDeclarationsPaths, configuration = (0, im
|
|
|
51
64
|
if (editor.clientId && editor.clientSecret) {
|
|
52
65
|
try {
|
|
53
66
|
distantDictionaryKeys = await (0, import_fetchDistantDictionaryKeys.fetchDistantDictionaryKeys)();
|
|
54
|
-
const orderedDistantDictionaryKeys = distantDictionaryKeys.sort(
|
|
67
|
+
const orderedDistantDictionaryKeys = distantDictionaryKeys.sort(import_sortAlphabetically.sortAlphabetically);
|
|
55
68
|
import_log.logger.addDictionaryKeys("distant", orderedDistantDictionaryKeys);
|
|
56
69
|
distantDictionaries = await (0, import_loadDistantDictionaries.loadDistantDictionaries)({
|
|
57
70
|
dictionaryKeys: orderedDistantDictionaryKeys
|
|
@@ -63,7 +76,7 @@ const loadDictionaries = async (contentDeclarationsPaths, configuration = (0, im
|
|
|
63
76
|
}
|
|
64
77
|
}
|
|
65
78
|
import_log.logger.stop();
|
|
66
|
-
return [...
|
|
79
|
+
return [...filteredLocalDictionaries, ...distantDictionaries];
|
|
67
80
|
} catch (error) {
|
|
68
81
|
import_log.logger.stop();
|
|
69
82
|
throw error;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../src/loadDictionaries/loadDictionaries.ts"],"sourcesContent":["// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport {\n ESMxCJSRequire,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { fetchDistantDictionaryKeys } from '../fetchDistantDictionaryKeys';\nimport { logger } from '../log';\nimport { sortAlphabetically } from '../utils';\nimport { loadContentDeclarations } from './loadContentDeclaration';\nimport { loadDistantDictionaries } from './loadDistantDictionaries';\n\nexport const loadDictionaries = async (\n contentDeclarationsPaths: string[] | string,\n configuration = getConfiguration(),\n projectRequire = ESMxCJSRequire\n): Promise<Dictionary[]> => {\n try {\n const appLogger = getAppLogger(configuration);\n const { editor } = configuration;\n\n appLogger('Dictionaries:', { isVerbose: true });\n\n const files = Array.isArray(contentDeclarationsPaths)\n ? contentDeclarationsPaths\n : [contentDeclarationsPaths];\n\n const localDictionaries: Dictionary[] = await loadContentDeclarations(\n files,\n projectRequire\n );\n const
|
|
1
|
+
{"version":3,"sources":["../../../src/loadDictionaries/loadDictionaries.ts"],"sourcesContent":["// @ts-ignore @intlayer/backend is not build yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport {\n ESMxCJSRequire,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { fetchDistantDictionaryKeys } from '../fetchDistantDictionaryKeys';\nimport { logger } from '../log';\nimport { sortAlphabetically } from '../utils/sortAlphabetically';\nimport { loadContentDeclarations } from './loadContentDeclaration';\nimport { loadDistantDictionaries } from './loadDistantDictionaries';\n\nexport const loadDictionaries = async (\n contentDeclarationsPaths: string[] | string,\n configuration = getConfiguration(),\n projectRequire = ESMxCJSRequire\n): Promise<Dictionary[]> => {\n try {\n const appLogger = getAppLogger(configuration);\n const { editor } = configuration;\n\n appLogger('Dictionaries:', { isVerbose: true });\n\n const files = Array.isArray(contentDeclarationsPaths)\n ? contentDeclarationsPaths\n : [contentDeclarationsPaths];\n\n const localDictionaries: Dictionary[] = await loadContentDeclarations(\n files,\n projectRequire\n );\n\n const filteredLocalDictionaries = localDictionaries.filter((dict) => {\n const hasKey = Boolean(dict.key);\n const hasContent = Boolean(dict.content);\n\n if (!hasContent) {\n console.error(\n 'Content declaration has no exported content',\n dict.filePath\n );\n } else if (!hasKey) {\n console.error('Content declaration has no key', dict.filePath);\n }\n\n return hasKey && hasContent;\n });\n\n const localDictionaryKeys = filteredLocalDictionaries\n .map((dict) => dict.key)\n .filter(Boolean); // Remove empty or undefined keys\n\n // Initialize the logger with both local and distant dictionaries\n logger.init(localDictionaryKeys, []);\n\n // Update logger statuses for local dictionaries\n logger.updateStatus(\n filteredLocalDictionaries.map((dict) => ({\n dictionaryKey: dict.key,\n type: 'local',\n status: { status: 'built' },\n }))\n );\n\n let distantDictionaries: DictionaryAPI[] = [];\n let distantDictionaryKeys: string[] = [];\n\n if (editor.clientId && editor.clientSecret) {\n try {\n // Fetch distant dictionary keys\n distantDictionaryKeys = await fetchDistantDictionaryKeys();\n\n const orderedDistantDictionaryKeys =\n distantDictionaryKeys.sort(sortAlphabetically);\n\n // Add distant dictionaries to the logger\n logger.addDictionaryKeys('distant', orderedDistantDictionaryKeys);\n\n // Fetch distant dictionaries\n distantDictionaries = await loadDistantDictionaries({\n dictionaryKeys: orderedDistantDictionaryKeys,\n });\n } catch (_error) {\n appLogger('Error during fetching distant dictionaries', {\n level: 'error',\n });\n }\n }\n\n // Ensure the logger is stopped\n logger.stop();\n\n return [...filteredLocalDictionaries, ...distantDictionaries];\n } catch (error) {\n // Ensure the logger is stopped\n logger.stop();\n\n throw error; // Re-throw the error after logging\n }\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAIO;AAEP,wCAA2C;AAC3C,iBAAuB;AACvB,gCAAmC;AACnC,oCAAwC;AACxC,qCAAwC;AAEjC,MAAM,mBAAmB,OAC9B,0BACA,oBAAgB,gCAAiB,GACjC,iBAAiB,iCACS;AAC1B,MAAI;AACF,UAAM,gBAAY,4BAAa,aAAa;AAC5C,UAAM,EAAE,OAAO,IAAI;AAEnB,cAAU,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAE9C,UAAM,QAAQ,MAAM,QAAQ,wBAAwB,IAChD,2BACA,CAAC,wBAAwB;AAE7B,UAAM,oBAAkC,UAAM;AAAA,MAC5C;AAAA,MACA;AAAA,IACF;AAEA,UAAM,4BAA4B,kBAAkB,OAAO,CAAC,SAAS;AACnE,YAAM,SAAS,QAAQ,KAAK,GAAG;AAC/B,YAAM,aAAa,QAAQ,KAAK,OAAO;AAEvC,UAAI,CAAC,YAAY;AACf,gBAAQ;AAAA,UACN;AAAA,UACA,KAAK;AAAA,QACP;AAAA,MACF,WAAW,CAAC,QAAQ;AAClB,gBAAQ,MAAM,kCAAkC,KAAK,QAAQ;AAAA,MAC/D;AAEA,aAAO,UAAU;AAAA,IACnB,CAAC;AAED,UAAM,sBAAsB,0BACzB,IAAI,CAAC,SAAS,KAAK,GAAG,EACtB,OAAO,OAAO;AAGjB,sBAAO,KAAK,qBAAqB,CAAC,CAAC;AAGnC,sBAAO;AAAA,MACL,0BAA0B,IAAI,CAAC,UAAU;AAAA,QACvC,eAAe,KAAK;AAAA,QACpB,MAAM;AAAA,QACN,QAAQ,EAAE,QAAQ,QAAQ;AAAA,MAC5B,EAAE;AAAA,IACJ;AAEA,QAAI,sBAAuC,CAAC;AAC5C,QAAI,wBAAkC,CAAC;AAEvC,QAAI,OAAO,YAAY,OAAO,cAAc;AAC1C,UAAI;AAEF,gCAAwB,UAAM,8DAA2B;AAEzD,cAAM,+BACJ,sBAAsB,KAAK,4CAAkB;AAG/C,0BAAO,kBAAkB,WAAW,4BAA4B;AAGhE,8BAAsB,UAAM,wDAAwB;AAAA,UAClD,gBAAgB;AAAA,QAClB,CAAC;AAAA,MACH,SAAS,QAAQ;AACf,kBAAU,8CAA8C;AAAA,UACtD,OAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,IACF;AAGA,sBAAO,KAAK;AAEZ,WAAO,CAAC,GAAG,2BAA2B,GAAG,mBAAmB;AAAA,EAC9D,SAAS,OAAO;AAEd,sBAAO,KAAK;AAEZ,UAAM;AAAA,EACR;AACF;","names":[]}
|
package/dist/cjs/log.cjs
CHANGED
|
@@ -31,9 +31,9 @@ __export(log_exports, {
|
|
|
31
31
|
logger: () => logger
|
|
32
32
|
});
|
|
33
33
|
module.exports = __toCommonJS(log_exports);
|
|
34
|
-
var import_readline = __toESM(require("readline"));
|
|
35
34
|
var import_config = require("@intlayer/config");
|
|
36
|
-
var
|
|
35
|
+
var import_readline = __toESM(require("readline"));
|
|
36
|
+
var import_sortAlphabetically = require('./utils/sortAlphabetically.cjs');
|
|
37
37
|
const LINE_DETECTOR = "\u200B\u200B\u200B";
|
|
38
38
|
const SPINNER_FRAMES = ["\u280B", "\u2819", "\u2839", "\u2838", "\u283C", "\u2834", "\u2826", "\u2827", "\u2807", "\u280F"];
|
|
39
39
|
const RESET = "\x1B[0m";
|
|
@@ -67,7 +67,7 @@ class Logger {
|
|
|
67
67
|
const allKeys = Array.from(
|
|
68
68
|
new Set(
|
|
69
69
|
[...localDictionariesKeys, ...distantDictionariesKeys].sort(
|
|
70
|
-
|
|
70
|
+
import_sortAlphabetically.sortAlphabetically
|
|
71
71
|
)
|
|
72
72
|
)
|
|
73
73
|
);
|
package/dist/cjs/log.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/log.ts"],"sourcesContent":["import readline from 'readline';\nimport { type IntlayerConfig, getConfiguration } from '@intlayer/config';\nimport { sortAlphabetically } from './utils';\n\nexport type State = {\n type: 'local' | 'distant';\n status: 'pending' | 'fetching' | 'fetched' | 'error' | 'imported' | 'built';\n icon?: string;\n error?: Error;\n errorMessage?: string;\n spinnerFrameIndex?: number;\n};\n\nexport type DictionariesStatus = {\n dictionaryKey: string;\n state: State[];\n};\n\nconst LINE_DETECTOR = '\\u200B\\u200B\\u200B'; // Three zero-width spaces\nconst SPINNER_FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];\n\n// ANSI color codes\nconst RESET = '\\x1b[0m';\nconst GREEN = '\\x1b[32m';\nconst RED = '\\x1b[31m';\nconst BLUE = '\\x1b[34m';\nconst GREY = '\\x1b[90m';\nconst GREY_DARK = '\\x1b[90m';\n\nclass Logger {\n private dictionariesStatuses: DictionariesStatus[] = [];\n private spinnerTimer: NodeJS.Timeout | null = null;\n private maxDictionaryKeyLength: number = 0;\n private previousLineCount: number = 0;\n private lineDifCounter: number = 0;\n private originalStdoutWrite:\n | ((\n chunk: string | Uint8Array, // `chunk` can be either a string or a Uint8Array\n encoding?: BufferEncoding, // `encoding` is optional and should be a BufferEncoding\n callback?: (err?: Error | null) => void // `callback` is optional and a function\n ) => boolean)\n | null = null;\n private extraLines: number = 0;\n private isUpdating: boolean = false;\n private config?: IntlayerConfig;\n\n // Singleton instance\n private static instance: Logger;\n\n private constructor() {}\n\n public static getInstance(): Logger {\n if (!Logger.instance) {\n Logger.instance = new Logger();\n }\n return Logger.instance;\n }\n\n public init(\n localDictionariesKeys: string[],\n distantDictionariesKeys: string[],\n configuration: IntlayerConfig = getConfiguration()\n ) {\n this.config = configuration;\n\n const allKeys = Array.from(\n new Set(\n [...localDictionariesKeys, ...distantDictionariesKeys].sort(\n sortAlphabetically\n )\n )\n );\n\n this.maxDictionaryKeyLength = allKeys.reduce(\n (max, key) => (key.length > max ? key.length : max),\n 0\n );\n\n this.dictionariesStatuses = allKeys.map((dictionaryKey) => {\n const states: State[] = [];\n\n if (localDictionariesKeys.includes(dictionaryKey)) {\n states.push({\n type: 'local',\n status: 'pending',\n spinnerFrameIndex: 0,\n });\n }\n if (distantDictionariesKeys.includes(dictionaryKey)) {\n states.push({\n type: 'distant',\n status: 'pending',\n spinnerFrameIndex: 0,\n });\n }\n\n return {\n dictionaryKey,\n state: states,\n };\n });\n\n // Start spinner timer\n this.startSpinner();\n\n // Update all status lines (this will output the initial statuses)\n this.updateAllStatusLines();\n }\n\n // New method to add dictionary keys after initialization\n public addDictionaryKeys(\n type: 'local' | 'distant',\n dictionaryKeys: string[]\n ) {\n for (const dictionaryKey of dictionaryKeys) {\n // Update maxDictionaryKeyLength if the new key is longer\n if (dictionaryKey.length > this.maxDictionaryKeyLength) {\n this.maxDictionaryKeyLength = dictionaryKey.length;\n }\n\n let statusObj = this.dictionariesStatuses.find(\n (ds) => ds.dictionaryKey === dictionaryKey\n );\n\n if (!statusObj) {\n // If the dictionaryKey doesn't exist yet, create a new DictionariesStatus\n\n statusObj = {\n dictionaryKey,\n state: [],\n };\n this.dictionariesStatuses.push(statusObj);\n\n const newState: State = {\n type,\n status: 'pending',\n spinnerFrameIndex: 0,\n };\n statusObj.state.push(newState);\n } else {\n const existingState = statusObj.state.find((s) => s.type === type);\n if (!existingState) {\n // Add new state for the type\n\n const newState: State = {\n type,\n status: 'pending',\n spinnerFrameIndex: 0,\n };\n statusObj.state.push(newState);\n }\n }\n }\n\n // Call updateAllStatusLines() to refresh the output\n this.updateAllStatusLines();\n }\n\n private startSpinner() {\n if (!this.spinnerTimer) {\n this.spinnerTimer = setInterval(() => {\n this.updateAllStatusLines();\n }, 100); // Update every 100ms\n }\n }\n\n private stopSpinner() {\n if (this.spinnerTimer) {\n clearInterval(this.spinnerTimer);\n this.spinnerTimer = null;\n }\n }\n\n // Method to update the terminal output\n private updateOutput(content: string) {\n if (this.config?.log.mode !== 'verbose') return;\n\n // Monkey-patch process.stdout.write to keep track of extra lines\n if (!this.originalStdoutWrite) {\n this.originalStdoutWrite = process.stdout.write.bind(process.stdout);\n this.extraLines = 0;\n\n const write = (\n chunk: string | Uint8Array, // `chunk` can be either a string or a Uint8Array\n encoding?: BufferEncoding, // `encoding` is optional and should be a BufferEncoding\n callback?: (err?: Error | null) => void // `callback` is optional and a function\n ) => {\n const str = typeof chunk === 'string' ? chunk : chunk.toString();\n const newLines = (str.match(/\\n/g) ?? []).length;\n\n // If the write is not initiated by Logger's updateOutput method\n if (!this.isUpdating) {\n this.extraLines += newLines;\n }\n\n return this.originalStdoutWrite!(chunk, encoding, callback);\n };\n\n process.stdout.write = write as typeof process.stdout.write;\n }\n\n // Set a flag to indicate that updateOutput is running\n this.isUpdating = true;\n\n // Adjust lineDifCounter if LINE_DETECTOR is not the first line\n const contentLines = content.split('\\n');\n const indexOfLineDetector = contentLines.indexOf(LINE_DETECTOR.trim());\n\n if (indexOfLineDetector > 0) {\n // LINE_DETECTOR is not at the first line\n this.lineDifCounter = indexOfLineDetector;\n } else {\n this.lineDifCounter = 0;\n }\n\n // Calculate total lines to move up\n const totalLinesToMoveUp =\n this.previousLineCount + this.lineDifCounter + this.extraLines;\n\n // Move cursor up by totalLinesToMoveUp\n readline.moveCursor(process.stdout, 0, -totalLinesToMoveUp);\n\n // Clear all lines downwards\n readline.clearScreenDown(process.stdout);\n\n // Write the updated content\n contentLines.forEach((line) => {\n process.stdout.write(`${line}\\x1b[K\\n`);\n });\n\n // Update previousLineCount\n this.previousLineCount = contentLines.length;\n\n // Reset extraLines counter and updating flag\n this.extraLines = 0;\n this.isUpdating = false;\n }\n\n public stop() {\n this.stopSpinner();\n }\n\n public updateStatus(\n dictionaries: {\n dictionaryKey: string;\n type: 'local' | 'distant';\n status: Partial<State>;\n }[]\n ) {\n for (const { dictionaryKey, type, status } of dictionaries) {\n const statusObj = this.dictionariesStatuses.find(\n (ds) => ds.dictionaryKey === dictionaryKey\n );\n if (statusObj) {\n const state = statusObj.state.find((s) => s.type === type);\n if (state) {\n // Update existing state\n Object.assign(state, status);\n } else {\n // If the state for this type doesn't exist yet, add it\n if (status.status === undefined) {\n status.status = 'pending'; // Provide default status\n }\n const newState: State = {\n type,\n status: status.status,\n icon: status.icon ?? '',\n error: status.error,\n errorMessage: status.errorMessage,\n spinnerFrameIndex: status.spinnerFrameIndex ?? 0,\n };\n statusObj.state.push(newState);\n }\n } else {\n // If the status object doesn't exist, create it\n const newState: State = {\n type,\n status: status.status ?? 'pending',\n icon: status.icon ?? '',\n error: status.error,\n errorMessage: status.errorMessage,\n spinnerFrameIndex: status.spinnerFrameIndex ?? 0,\n };\n this.dictionariesStatuses.push({\n dictionaryKey,\n state: [newState],\n });\n }\n }\n\n // Update the display after status change\n this.updateAllStatusLines();\n }\n\n private getStatusIcon(status: string): string {\n const statusIcons: Record<string, string> = {\n pending: '⏲',\n fetching: '', // Spinner handled separately\n built: '✔',\n imported: '✔',\n error: '✖',\n };\n return statusIcons[status] ?? '';\n }\n\n private getStatusLine(statusObj: DictionariesStatus): string {\n const keyPadding =\n this.maxDictionaryKeyLength - statusObj.dictionaryKey.length;\n const paddedKey = `${statusObj.dictionaryKey}${' '.repeat(keyPadding)}`;\n\n const states = statusObj.state.map((state) => {\n let colorStart = '';\n let colorEnd = '';\n let icon = this.getStatusIcon(state.status);\n if (state.status === 'fetching') {\n // Use spinner frame\n icon = SPINNER_FRAMES[state.spinnerFrameIndex! % SPINNER_FRAMES.length];\n colorStart = BLUE;\n colorEnd = RESET;\n } else if (state.status === 'error') {\n colorStart = RED;\n colorEnd = RESET;\n } else if (state.status === 'imported' || state.status === 'built') {\n colorStart = GREEN;\n colorEnd = RESET;\n } else {\n colorStart = GREY;\n colorEnd = RESET;\n }\n\n // Format the status block\n const statusBlock = `${GREY_DARK}[${state.type}: ${colorStart}${icon} ${state.status}${GREY_DARK}]${colorEnd}`;\n\n return `${colorStart}${statusBlock}${colorEnd}`;\n });\n\n return `${this.config?.log.prefix}- ${paddedKey} ${states.join(' ')}`;\n }\n\n // Replace logUpdate calls with your custom methods\n private updateAllStatusLines() {\n const terminalHeight = process.stdout.rows;\n const maxVisibleLines = terminalHeight - 1;\n\n const lines = this.dictionariesStatuses.map((statusObj) => {\n statusObj.state.forEach((state) => {\n if (state.status === 'fetching') {\n // Update spinner frame\n state.spinnerFrameIndex =\n (state.spinnerFrameIndex! + 1) % SPINNER_FRAMES.length;\n }\n });\n return this.getStatusLine(statusObj);\n });\n\n let content;\n\n if (lines.length > maxVisibleLines) {\n const visibleLines = lines.slice(0, maxVisibleLines - 5);\n const summary = `${this.config?.log.prefix}... and ${lines.length - visibleLines.length} more`;\n content = LINE_DETECTOR + visibleLines.join('\\n') + '\\n' + summary;\n } else {\n content = lines.join('\\n');\n }\n\n this.updateOutput(content);\n }\n\n public getStatuses() {\n return this.dictionariesStatuses;\n }\n}\n\nexport const logger = Logger.getInstance();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAqB;AACrB,oBAAsD;AACtD,mBAAmC;AAgBnC,MAAM,gBAAgB;AACtB,MAAM,iBAAiB,CAAC,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,QAAG;AAGxE,MAAM,QAAQ;AACd,MAAM,QAAQ;AACd,MAAM,MAAM;AACZ,MAAM,OAAO;AACb,MAAM,OAAO;AACb,MAAM,YAAY;AAElB,MAAM,OAAO;AAAA,EACH,uBAA6C,CAAC;AAAA,EAC9C,eAAsC;AAAA,EACtC,yBAAiC;AAAA,EACjC,oBAA4B;AAAA,EAC5B,iBAAyB;AAAA,EACzB,sBAMG;AAAA,EACH,aAAqB;AAAA,EACrB,aAAsB;AAAA,EACtB;AAAA;AAAA,EAGR,OAAe;AAAA,EAEP,cAAc;AAAA,EAAC;AAAA,EAEvB,OAAc,cAAsB;AAClC,QAAI,CAAC,OAAO,UAAU;AACpB,aAAO,WAAW,IAAI,OAAO;AAAA,IAC/B;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEO,KACL,uBACA,yBACA,oBAAgC,gCAAiB,GACjD;AACA,SAAK,SAAS;AAEd,UAAM,UAAU,MAAM;AAAA,MACpB,IAAI;AAAA,QACF,CAAC,GAAG,uBAAuB,GAAG,uBAAuB,EAAE;AAAA,UACrD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,SAAK,yBAAyB,QAAQ;AAAA,MACpC,CAAC,KAAK,QAAS,IAAI,SAAS,MAAM,IAAI,SAAS;AAAA,MAC/C;AAAA,IACF;AAEA,SAAK,uBAAuB,QAAQ,IAAI,CAAC,kBAAkB;AACzD,YAAM,SAAkB,CAAC;AAEzB,UAAI,sBAAsB,SAAS,aAAa,GAAG;AACjD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AACA,UAAI,wBAAwB,SAAS,aAAa,GAAG;AACnD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,QACL;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF,CAAC;AAGD,SAAK,aAAa;AAGlB,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA,EAGO,kBACL,MACA,gBACA;AACA,eAAW,iBAAiB,gBAAgB;AAE1C,UAAI,cAAc,SAAS,KAAK,wBAAwB;AACtD,aAAK,yBAAyB,cAAc;AAAA,MAC9C;AAEA,UAAI,YAAY,KAAK,qBAAqB;AAAA,QACxC,CAAC,OAAO,GAAG,kBAAkB;AAAA,MAC/B;AAEA,UAAI,CAAC,WAAW;AAGd,oBAAY;AAAA,UACV;AAAA,UACA,OAAO,CAAC;AAAA,QACV;AACA,aAAK,qBAAqB,KAAK,SAAS;AAExC,cAAM,WAAkB;AAAA,UACtB;AAAA,UACA,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB;AACA,kBAAU,MAAM,KAAK,QAAQ;AAAA,MAC/B,OAAO;AACL,cAAM,gBAAgB,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AACjE,YAAI,CAAC,eAAe;AAGlB,gBAAM,WAAkB;AAAA,YACtB;AAAA,YACA,QAAQ;AAAA,YACR,mBAAmB;AAAA,UACrB;AACA,oBAAU,MAAM,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AAGA,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,eAAe;AACrB,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,YAAY,MAAM;AACpC,aAAK,qBAAqB;AAAA,MAC5B,GAAG,GAAG;AAAA,IACR;AAAA,EACF;AAAA,EAEQ,cAAc;AACpB,QAAI,KAAK,cAAc;AACrB,oBAAc,KAAK,YAAY;AAC/B,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA;AAAA,EAGQ,aAAa,SAAiB;AACpC,QAAI,KAAK,QAAQ,IAAI,SAAS,UAAW;AAGzC,QAAI,CAAC,KAAK,qBAAqB;AAC7B,WAAK,sBAAsB,QAAQ,OAAO,MAAM,KAAK,QAAQ,MAAM;AACnE,WAAK,aAAa;AAElB,YAAM,QAAQ,CACZ,OACA,UACA,aACG;AACH,cAAM,MAAM,OAAO,UAAU,WAAW,QAAQ,MAAM,SAAS;AAC/D,cAAM,YAAY,IAAI,MAAM,KAAK,KAAK,CAAC,GAAG;AAG1C,YAAI,CAAC,KAAK,YAAY;AACpB,eAAK,cAAc;AAAA,QACrB;AAEA,eAAO,KAAK,oBAAqB,OAAO,UAAU,QAAQ;AAAA,MAC5D;AAEA,cAAQ,OAAO,QAAQ;AAAA,IACzB;AAGA,SAAK,aAAa;AAGlB,UAAM,eAAe,QAAQ,MAAM,IAAI;AACvC,UAAM,sBAAsB,aAAa,QAAQ,cAAc,KAAK,CAAC;AAErE,QAAI,sBAAsB,GAAG;AAE3B,WAAK,iBAAiB;AAAA,IACxB,OAAO;AACL,WAAK,iBAAiB;AAAA,IACxB;AAGA,UAAM,qBACJ,KAAK,oBAAoB,KAAK,iBAAiB,KAAK;AAGtD,oBAAAA,QAAS,WAAW,QAAQ,QAAQ,GAAG,CAAC,kBAAkB;AAG1D,oBAAAA,QAAS,gBAAgB,QAAQ,MAAM;AAGvC,iBAAa,QAAQ,CAAC,SAAS;AAC7B,cAAQ,OAAO,MAAM,GAAG,IAAI;AAAA,CAAU;AAAA,IACxC,CAAC;AAGD,SAAK,oBAAoB,aAAa;AAGtC,SAAK,aAAa;AAClB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEO,OAAO;AACZ,SAAK,YAAY;AAAA,EACnB;AAAA,EAEO,aACL,cAKA;AACA,eAAW,EAAE,eAAe,MAAM,OAAO,KAAK,cAAc;AAC1D,YAAM,YAAY,KAAK,qBAAqB;AAAA,QAC1C,CAAC,OAAO,GAAG,kBAAkB;AAAA,MAC/B;AACA,UAAI,WAAW;AACb,cAAM,QAAQ,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AACzD,YAAI,OAAO;AAET,iBAAO,OAAO,OAAO,MAAM;AAAA,QAC7B,OAAO;AAEL,cAAI,OAAO,WAAW,QAAW;AAC/B,mBAAO,SAAS;AAAA,UAClB;AACA,gBAAM,WAAkB;AAAA,YACtB;AAAA,YACA,QAAQ,OAAO;AAAA,YACf,MAAM,OAAO,QAAQ;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,cAAc,OAAO;AAAA,YACrB,mBAAmB,OAAO,qBAAqB;AAAA,UACjD;AACA,oBAAU,MAAM,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF,OAAO;AAEL,cAAM,WAAkB;AAAA,UACtB;AAAA,UACA,QAAQ,OAAO,UAAU;AAAA,UACzB,MAAM,OAAO,QAAQ;AAAA,UACrB,OAAO,OAAO;AAAA,UACd,cAAc,OAAO;AAAA,UACrB,mBAAmB,OAAO,qBAAqB;AAAA,QACjD;AACA,aAAK,qBAAqB,KAAK;AAAA,UAC7B;AAAA,UACA,OAAO,CAAC,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,cAAc,QAAwB;AAC5C,UAAM,cAAsC;AAAA,MAC1C,SAAS;AAAA,MACT,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,OAAO;AAAA,IACT;AACA,WAAO,YAAY,MAAM,KAAK;AAAA,EAChC;AAAA,EAEQ,cAAc,WAAuC;AAC3D,UAAM,aACJ,KAAK,yBAAyB,UAAU,cAAc;AACxD,UAAM,YAAY,GAAG,UAAU,aAAa,GAAG,IAAI,OAAO,UAAU,CAAC;AAErE,UAAM,SAAS,UAAU,MAAM,IAAI,CAAC,UAAU;AAC5C,UAAI,aAAa;AACjB,UAAI,WAAW;AACf,UAAI,OAAO,KAAK,cAAc,MAAM,MAAM;AAC1C,UAAI,MAAM,WAAW,YAAY;AAE/B,eAAO,eAAe,MAAM,oBAAqB,eAAe,MAAM;AACtE,qBAAa;AACb,mBAAW;AAAA,MACb,WAAW,MAAM,WAAW,SAAS;AACnC,qBAAa;AACb,mBAAW;AAAA,MACb,WAAW,MAAM,WAAW,cAAc,MAAM,WAAW,SAAS;AAClE,qBAAa;AACb,mBAAW;AAAA,MACb,OAAO;AACL,qBAAa;AACb,mBAAW;AAAA,MACb;AAGA,YAAM,cAAc,GAAG,SAAS,IAAI,MAAM,IAAI,KAAK,UAAU,GAAG,IAAI,IAAI,MAAM,MAAM,GAAG,SAAS,IAAI,QAAQ;AAE5G,aAAO,GAAG,UAAU,GAAG,WAAW,GAAG,QAAQ;AAAA,IAC/C,CAAC;AAED,WAAO,GAAG,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAS,IAAI,OAAO,KAAK,GAAG,CAAC;AAAA,EACrE;AAAA;AAAA,EAGQ,uBAAuB;AAC7B,UAAM,iBAAiB,QAAQ,OAAO;AACtC,UAAM,kBAAkB,iBAAiB;AAEzC,UAAM,QAAQ,KAAK,qBAAqB,IAAI,CAAC,cAAc;AACzD,gBAAU,MAAM,QAAQ,CAAC,UAAU;AACjC,YAAI,MAAM,WAAW,YAAY;AAE/B,gBAAM,qBACH,MAAM,oBAAqB,KAAK,eAAe;AAAA,QACpD;AAAA,MACF,CAAC;AACD,aAAO,KAAK,cAAc,SAAS;AAAA,IACrC,CAAC;AAED,QAAI;AAEJ,QAAI,MAAM,SAAS,iBAAiB;AAClC,YAAM,eAAe,MAAM,MAAM,GAAG,kBAAkB,CAAC;AACvD,YAAM,UAAU,GAAG,KAAK,QAAQ,IAAI,MAAM,WAAW,MAAM,SAAS,aAAa,MAAM;AACvF,gBAAU,gBAAgB,aAAa,KAAK,IAAI,IAAI,OAAO;AAAA,IAC7D,OAAO;AACL,gBAAU,MAAM,KAAK,IAAI;AAAA,IAC3B;AAEA,SAAK,aAAa,OAAO;AAAA,EAC3B;AAAA,EAEO,cAAc;AACnB,WAAO,KAAK;AAAA,EACd;AACF;AAEO,MAAM,SAAS,OAAO,YAAY;","names":["readline"]}
|
|
1
|
+
{"version":3,"sources":["../../src/log.ts"],"sourcesContent":["import { type IntlayerConfig, getConfiguration } from '@intlayer/config';\nimport readline from 'readline';\nimport { sortAlphabetically } from './utils/sortAlphabetically';\n\nexport type State = {\n type: 'local' | 'distant';\n status: 'pending' | 'fetching' | 'fetched' | 'error' | 'imported' | 'built';\n icon?: string;\n error?: Error;\n errorMessage?: string;\n spinnerFrameIndex?: number;\n};\n\nexport type DictionariesStatus = {\n dictionaryKey: string;\n state: State[];\n};\n\nconst LINE_DETECTOR = '\\u200B\\u200B\\u200B'; // Three zero-width spaces\nconst SPINNER_FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];\n\n// ANSI color codes\nconst RESET = '\\x1b[0m';\nconst GREEN = '\\x1b[32m';\nconst RED = '\\x1b[31m';\nconst BLUE = '\\x1b[34m';\nconst GREY = '\\x1b[90m';\nconst GREY_DARK = '\\x1b[90m';\n\nclass Logger {\n private dictionariesStatuses: DictionariesStatus[] = [];\n private spinnerTimer: NodeJS.Timeout | null = null;\n private maxDictionaryKeyLength: number = 0;\n private previousLineCount: number = 0;\n private lineDifCounter: number = 0;\n private originalStdoutWrite:\n | ((\n chunk: string | Uint8Array, // `chunk` can be either a string or a Uint8Array\n encoding?: BufferEncoding, // `encoding` is optional and should be a BufferEncoding\n callback?: (err?: Error | null) => void // `callback` is optional and a function\n ) => boolean)\n | null = null;\n private extraLines: number = 0;\n private isUpdating: boolean = false;\n private config?: IntlayerConfig;\n\n // Singleton instance\n private static instance: Logger;\n\n private constructor() {}\n\n public static getInstance(): Logger {\n if (!Logger.instance) {\n Logger.instance = new Logger();\n }\n return Logger.instance;\n }\n\n public init(\n localDictionariesKeys: string[],\n distantDictionariesKeys: string[],\n configuration: IntlayerConfig = getConfiguration()\n ) {\n this.config = configuration;\n\n const allKeys = Array.from(\n new Set(\n [...localDictionariesKeys, ...distantDictionariesKeys].sort(\n sortAlphabetically\n )\n )\n );\n\n this.maxDictionaryKeyLength = allKeys.reduce(\n (max, key) => (key.length > max ? key.length : max),\n 0\n );\n\n this.dictionariesStatuses = allKeys.map((dictionaryKey) => {\n const states: State[] = [];\n\n if (localDictionariesKeys.includes(dictionaryKey)) {\n states.push({\n type: 'local',\n status: 'pending',\n spinnerFrameIndex: 0,\n });\n }\n if (distantDictionariesKeys.includes(dictionaryKey)) {\n states.push({\n type: 'distant',\n status: 'pending',\n spinnerFrameIndex: 0,\n });\n }\n\n return {\n dictionaryKey,\n state: states,\n };\n });\n\n // Start spinner timer\n this.startSpinner();\n\n // Update all status lines (this will output the initial statuses)\n this.updateAllStatusLines();\n }\n\n // New method to add dictionary keys after initialization\n public addDictionaryKeys(\n type: 'local' | 'distant',\n dictionaryKeys: string[]\n ) {\n for (const dictionaryKey of dictionaryKeys) {\n // Update maxDictionaryKeyLength if the new key is longer\n if (dictionaryKey.length > this.maxDictionaryKeyLength) {\n this.maxDictionaryKeyLength = dictionaryKey.length;\n }\n\n let statusObj = this.dictionariesStatuses.find(\n (ds) => ds.dictionaryKey === dictionaryKey\n );\n\n if (!statusObj) {\n // If the dictionaryKey doesn't exist yet, create a new DictionariesStatus\n\n statusObj = {\n dictionaryKey,\n state: [],\n };\n this.dictionariesStatuses.push(statusObj);\n\n const newState: State = {\n type,\n status: 'pending',\n spinnerFrameIndex: 0,\n };\n statusObj.state.push(newState);\n } else {\n const existingState = statusObj.state.find((s) => s.type === type);\n if (!existingState) {\n // Add new state for the type\n\n const newState: State = {\n type,\n status: 'pending',\n spinnerFrameIndex: 0,\n };\n statusObj.state.push(newState);\n }\n }\n }\n\n // Call updateAllStatusLines() to refresh the output\n this.updateAllStatusLines();\n }\n\n private startSpinner() {\n if (!this.spinnerTimer) {\n this.spinnerTimer = setInterval(() => {\n this.updateAllStatusLines();\n }, 100); // Update every 100ms\n }\n }\n\n private stopSpinner() {\n if (this.spinnerTimer) {\n clearInterval(this.spinnerTimer);\n this.spinnerTimer = null;\n }\n }\n\n // Method to update the terminal output\n private updateOutput(content: string) {\n if (this.config?.log.mode !== 'verbose') return;\n\n // Monkey-patch process.stdout.write to keep track of extra lines\n if (!this.originalStdoutWrite) {\n this.originalStdoutWrite = process.stdout.write.bind(process.stdout);\n this.extraLines = 0;\n\n const write = (\n chunk: string | Uint8Array, // `chunk` can be either a string or a Uint8Array\n encoding?: BufferEncoding, // `encoding` is optional and should be a BufferEncoding\n callback?: (err?: Error | null) => void // `callback` is optional and a function\n ) => {\n const str = typeof chunk === 'string' ? chunk : chunk.toString();\n const newLines = (str.match(/\\n/g) ?? []).length;\n\n // If the write is not initiated by Logger's updateOutput method\n if (!this.isUpdating) {\n this.extraLines += newLines;\n }\n\n return this.originalStdoutWrite!(chunk, encoding, callback);\n };\n\n process.stdout.write = write as typeof process.stdout.write;\n }\n\n // Set a flag to indicate that updateOutput is running\n this.isUpdating = true;\n\n // Adjust lineDifCounter if LINE_DETECTOR is not the first line\n const contentLines = content.split('\\n');\n const indexOfLineDetector = contentLines.indexOf(LINE_DETECTOR.trim());\n\n if (indexOfLineDetector > 0) {\n // LINE_DETECTOR is not at the first line\n this.lineDifCounter = indexOfLineDetector;\n } else {\n this.lineDifCounter = 0;\n }\n\n // Calculate total lines to move up\n const totalLinesToMoveUp =\n this.previousLineCount + this.lineDifCounter + this.extraLines;\n\n // Move cursor up by totalLinesToMoveUp\n readline.moveCursor(process.stdout, 0, -totalLinesToMoveUp);\n\n // Clear all lines downwards\n readline.clearScreenDown(process.stdout);\n\n // Write the updated content\n contentLines.forEach((line) => {\n process.stdout.write(`${line}\\x1b[K\\n`);\n });\n\n // Update previousLineCount\n this.previousLineCount = contentLines.length;\n\n // Reset extraLines counter and updating flag\n this.extraLines = 0;\n this.isUpdating = false;\n }\n\n public stop() {\n this.stopSpinner();\n }\n\n public updateStatus(\n dictionaries: {\n dictionaryKey: string;\n type: 'local' | 'distant';\n status: Partial<State>;\n }[]\n ) {\n for (const { dictionaryKey, type, status } of dictionaries) {\n const statusObj = this.dictionariesStatuses.find(\n (ds) => ds.dictionaryKey === dictionaryKey\n );\n if (statusObj) {\n const state = statusObj.state.find((s) => s.type === type);\n if (state) {\n // Update existing state\n Object.assign(state, status);\n } else {\n // If the state for this type doesn't exist yet, add it\n if (status.status === undefined) {\n status.status = 'pending'; // Provide default status\n }\n const newState: State = {\n type,\n status: status.status,\n icon: status.icon ?? '',\n error: status.error,\n errorMessage: status.errorMessage,\n spinnerFrameIndex: status.spinnerFrameIndex ?? 0,\n };\n statusObj.state.push(newState);\n }\n } else {\n // If the status object doesn't exist, create it\n const newState: State = {\n type,\n status: status.status ?? 'pending',\n icon: status.icon ?? '',\n error: status.error,\n errorMessage: status.errorMessage,\n spinnerFrameIndex: status.spinnerFrameIndex ?? 0,\n };\n this.dictionariesStatuses.push({\n dictionaryKey,\n state: [newState],\n });\n }\n }\n\n // Update the display after status change\n this.updateAllStatusLines();\n }\n\n private getStatusIcon(status: string): string {\n const statusIcons: Record<string, string> = {\n pending: '⏲',\n fetching: '', // Spinner handled separately\n built: '✔',\n imported: '✔',\n error: '✖',\n };\n return statusIcons[status] ?? '';\n }\n\n private getStatusLine(statusObj: DictionariesStatus): string {\n const keyPadding =\n this.maxDictionaryKeyLength - statusObj.dictionaryKey.length;\n const paddedKey = `${statusObj.dictionaryKey}${' '.repeat(keyPadding)}`;\n\n const states = statusObj.state.map((state) => {\n let colorStart = '';\n let colorEnd = '';\n let icon = this.getStatusIcon(state.status);\n if (state.status === 'fetching') {\n // Use spinner frame\n icon = SPINNER_FRAMES[state.spinnerFrameIndex! % SPINNER_FRAMES.length];\n colorStart = BLUE;\n colorEnd = RESET;\n } else if (state.status === 'error') {\n colorStart = RED;\n colorEnd = RESET;\n } else if (state.status === 'imported' || state.status === 'built') {\n colorStart = GREEN;\n colorEnd = RESET;\n } else {\n colorStart = GREY;\n colorEnd = RESET;\n }\n\n // Format the status block\n const statusBlock = `${GREY_DARK}[${state.type}: ${colorStart}${icon} ${state.status}${GREY_DARK}]${colorEnd}`;\n\n return `${colorStart}${statusBlock}${colorEnd}`;\n });\n\n return `${this.config?.log.prefix}- ${paddedKey} ${states.join(' ')}`;\n }\n\n // Replace logUpdate calls with your custom methods\n private updateAllStatusLines() {\n const terminalHeight = process.stdout.rows;\n const maxVisibleLines = terminalHeight - 1;\n\n const lines = this.dictionariesStatuses.map((statusObj) => {\n statusObj.state.forEach((state) => {\n if (state.status === 'fetching') {\n // Update spinner frame\n state.spinnerFrameIndex =\n (state.spinnerFrameIndex! + 1) % SPINNER_FRAMES.length;\n }\n });\n return this.getStatusLine(statusObj);\n });\n\n let content;\n\n if (lines.length > maxVisibleLines) {\n const visibleLines = lines.slice(0, maxVisibleLines - 5);\n const summary = `${this.config?.log.prefix}... and ${lines.length - visibleLines.length} more`;\n content = LINE_DETECTOR + visibleLines.join('\\n') + '\\n' + summary;\n } else {\n content = lines.join('\\n');\n }\n\n this.updateOutput(content);\n }\n\n public getStatuses() {\n return this.dictionariesStatuses;\n }\n}\n\nexport const logger = Logger.getInstance();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAsD;AACtD,sBAAqB;AACrB,gCAAmC;AAgBnC,MAAM,gBAAgB;AACtB,MAAM,iBAAiB,CAAC,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,UAAK,QAAG;AAGxE,MAAM,QAAQ;AACd,MAAM,QAAQ;AACd,MAAM,MAAM;AACZ,MAAM,OAAO;AACb,MAAM,OAAO;AACb,MAAM,YAAY;AAElB,MAAM,OAAO;AAAA,EACH,uBAA6C,CAAC;AAAA,EAC9C,eAAsC;AAAA,EACtC,yBAAiC;AAAA,EACjC,oBAA4B;AAAA,EAC5B,iBAAyB;AAAA,EACzB,sBAMG;AAAA,EACH,aAAqB;AAAA,EACrB,aAAsB;AAAA,EACtB;AAAA;AAAA,EAGR,OAAe;AAAA,EAEP,cAAc;AAAA,EAAC;AAAA,EAEvB,OAAc,cAAsB;AAClC,QAAI,CAAC,OAAO,UAAU;AACpB,aAAO,WAAW,IAAI,OAAO;AAAA,IAC/B;AACA,WAAO,OAAO;AAAA,EAChB;AAAA,EAEO,KACL,uBACA,yBACA,oBAAgC,gCAAiB,GACjD;AACA,SAAK,SAAS;AAEd,UAAM,UAAU,MAAM;AAAA,MACpB,IAAI;AAAA,QACF,CAAC,GAAG,uBAAuB,GAAG,uBAAuB,EAAE;AAAA,UACrD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,SAAK,yBAAyB,QAAQ;AAAA,MACpC,CAAC,KAAK,QAAS,IAAI,SAAS,MAAM,IAAI,SAAS;AAAA,MAC/C;AAAA,IACF;AAEA,SAAK,uBAAuB,QAAQ,IAAI,CAAC,kBAAkB;AACzD,YAAM,SAAkB,CAAC;AAEzB,UAAI,sBAAsB,SAAS,aAAa,GAAG;AACjD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AACA,UAAI,wBAAwB,SAAS,aAAa,GAAG;AACnD,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,QACL;AAAA,QACA,OAAO;AAAA,MACT;AAAA,IACF,CAAC;AAGD,SAAK,aAAa;AAGlB,SAAK,qBAAqB;AAAA,EAC5B;AAAA;AAAA,EAGO,kBACL,MACA,gBACA;AACA,eAAW,iBAAiB,gBAAgB;AAE1C,UAAI,cAAc,SAAS,KAAK,wBAAwB;AACtD,aAAK,yBAAyB,cAAc;AAAA,MAC9C;AAEA,UAAI,YAAY,KAAK,qBAAqB;AAAA,QACxC,CAAC,OAAO,GAAG,kBAAkB;AAAA,MAC/B;AAEA,UAAI,CAAC,WAAW;AAGd,oBAAY;AAAA,UACV;AAAA,UACA,OAAO,CAAC;AAAA,QACV;AACA,aAAK,qBAAqB,KAAK,SAAS;AAExC,cAAM,WAAkB;AAAA,UACtB;AAAA,UACA,QAAQ;AAAA,UACR,mBAAmB;AAAA,QACrB;AACA,kBAAU,MAAM,KAAK,QAAQ;AAAA,MAC/B,OAAO;AACL,cAAM,gBAAgB,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AACjE,YAAI,CAAC,eAAe;AAGlB,gBAAM,WAAkB;AAAA,YACtB;AAAA,YACA,QAAQ;AAAA,YACR,mBAAmB;AAAA,UACrB;AACA,oBAAU,MAAM,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF;AAAA,IACF;AAGA,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,eAAe;AACrB,QAAI,CAAC,KAAK,cAAc;AACtB,WAAK,eAAe,YAAY,MAAM;AACpC,aAAK,qBAAqB;AAAA,MAC5B,GAAG,GAAG;AAAA,IACR;AAAA,EACF;AAAA,EAEQ,cAAc;AACpB,QAAI,KAAK,cAAc;AACrB,oBAAc,KAAK,YAAY;AAC/B,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA;AAAA,EAGQ,aAAa,SAAiB;AACpC,QAAI,KAAK,QAAQ,IAAI,SAAS,UAAW;AAGzC,QAAI,CAAC,KAAK,qBAAqB;AAC7B,WAAK,sBAAsB,QAAQ,OAAO,MAAM,KAAK,QAAQ,MAAM;AACnE,WAAK,aAAa;AAElB,YAAM,QAAQ,CACZ,OACA,UACA,aACG;AACH,cAAM,MAAM,OAAO,UAAU,WAAW,QAAQ,MAAM,SAAS;AAC/D,cAAM,YAAY,IAAI,MAAM,KAAK,KAAK,CAAC,GAAG;AAG1C,YAAI,CAAC,KAAK,YAAY;AACpB,eAAK,cAAc;AAAA,QACrB;AAEA,eAAO,KAAK,oBAAqB,OAAO,UAAU,QAAQ;AAAA,MAC5D;AAEA,cAAQ,OAAO,QAAQ;AAAA,IACzB;AAGA,SAAK,aAAa;AAGlB,UAAM,eAAe,QAAQ,MAAM,IAAI;AACvC,UAAM,sBAAsB,aAAa,QAAQ,cAAc,KAAK,CAAC;AAErE,QAAI,sBAAsB,GAAG;AAE3B,WAAK,iBAAiB;AAAA,IACxB,OAAO;AACL,WAAK,iBAAiB;AAAA,IACxB;AAGA,UAAM,qBACJ,KAAK,oBAAoB,KAAK,iBAAiB,KAAK;AAGtD,oBAAAA,QAAS,WAAW,QAAQ,QAAQ,GAAG,CAAC,kBAAkB;AAG1D,oBAAAA,QAAS,gBAAgB,QAAQ,MAAM;AAGvC,iBAAa,QAAQ,CAAC,SAAS;AAC7B,cAAQ,OAAO,MAAM,GAAG,IAAI;AAAA,CAAU;AAAA,IACxC,CAAC;AAGD,SAAK,oBAAoB,aAAa;AAGtC,SAAK,aAAa;AAClB,SAAK,aAAa;AAAA,EACpB;AAAA,EAEO,OAAO;AACZ,SAAK,YAAY;AAAA,EACnB;AAAA,EAEO,aACL,cAKA;AACA,eAAW,EAAE,eAAe,MAAM,OAAO,KAAK,cAAc;AAC1D,YAAM,YAAY,KAAK,qBAAqB;AAAA,QAC1C,CAAC,OAAO,GAAG,kBAAkB;AAAA,MAC/B;AACA,UAAI,WAAW;AACb,cAAM,QAAQ,UAAU,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,IAAI;AACzD,YAAI,OAAO;AAET,iBAAO,OAAO,OAAO,MAAM;AAAA,QAC7B,OAAO;AAEL,cAAI,OAAO,WAAW,QAAW;AAC/B,mBAAO,SAAS;AAAA,UAClB;AACA,gBAAM,WAAkB;AAAA,YACtB;AAAA,YACA,QAAQ,OAAO;AAAA,YACf,MAAM,OAAO,QAAQ;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,cAAc,OAAO;AAAA,YACrB,mBAAmB,OAAO,qBAAqB;AAAA,UACjD;AACA,oBAAU,MAAM,KAAK,QAAQ;AAAA,QAC/B;AAAA,MACF,OAAO;AAEL,cAAM,WAAkB;AAAA,UACtB;AAAA,UACA,QAAQ,OAAO,UAAU;AAAA,UACzB,MAAM,OAAO,QAAQ;AAAA,UACrB,OAAO,OAAO;AAAA,UACd,cAAc,OAAO;AAAA,UACrB,mBAAmB,OAAO,qBAAqB;AAAA,QACjD;AACA,aAAK,qBAAqB,KAAK;AAAA,UAC7B;AAAA,UACA,OAAO,CAAC,QAAQ;AAAA,QAClB,CAAC;AAAA,MACH;AAAA,IACF;AAGA,SAAK,qBAAqB;AAAA,EAC5B;AAAA,EAEQ,cAAc,QAAwB;AAC5C,UAAM,cAAsC;AAAA,MAC1C,SAAS;AAAA,MACT,UAAU;AAAA;AAAA,MACV,OAAO;AAAA,MACP,UAAU;AAAA,MACV,OAAO;AAAA,IACT;AACA,WAAO,YAAY,MAAM,KAAK;AAAA,EAChC;AAAA,EAEQ,cAAc,WAAuC;AAC3D,UAAM,aACJ,KAAK,yBAAyB,UAAU,cAAc;AACxD,UAAM,YAAY,GAAG,UAAU,aAAa,GAAG,IAAI,OAAO,UAAU,CAAC;AAErE,UAAM,SAAS,UAAU,MAAM,IAAI,CAAC,UAAU;AAC5C,UAAI,aAAa;AACjB,UAAI,WAAW;AACf,UAAI,OAAO,KAAK,cAAc,MAAM,MAAM;AAC1C,UAAI,MAAM,WAAW,YAAY;AAE/B,eAAO,eAAe,MAAM,oBAAqB,eAAe,MAAM;AACtE,qBAAa;AACb,mBAAW;AAAA,MACb,WAAW,MAAM,WAAW,SAAS;AACnC,qBAAa;AACb,mBAAW;AAAA,MACb,WAAW,MAAM,WAAW,cAAc,MAAM,WAAW,SAAS;AAClE,qBAAa;AACb,mBAAW;AAAA,MACb,OAAO;AACL,qBAAa;AACb,mBAAW;AAAA,MACb;AAGA,YAAM,cAAc,GAAG,SAAS,IAAI,MAAM,IAAI,KAAK,UAAU,GAAG,IAAI,IAAI,MAAM,MAAM,GAAG,SAAS,IAAI,QAAQ;AAE5G,aAAO,GAAG,UAAU,GAAG,WAAW,GAAG,QAAQ;AAAA,IAC/C,CAAC;AAED,WAAO,GAAG,KAAK,QAAQ,IAAI,MAAM,KAAK,SAAS,IAAI,OAAO,KAAK,GAAG,CAAC;AAAA,EACrE;AAAA;AAAA,EAGQ,uBAAuB;AAC7B,UAAM,iBAAiB,QAAQ,OAAO;AACtC,UAAM,kBAAkB,iBAAiB;AAEzC,UAAM,QAAQ,KAAK,qBAAqB,IAAI,CAAC,cAAc;AACzD,gBAAU,MAAM,QAAQ,CAAC,UAAU;AACjC,YAAI,MAAM,WAAW,YAAY;AAE/B,gBAAM,qBACH,MAAM,oBAAqB,KAAK,eAAe;AAAA,QACpD;AAAA,MACF,CAAC;AACD,aAAO,KAAK,cAAc,SAAS;AAAA,IACrC,CAAC;AAED,QAAI;AAEJ,QAAI,MAAM,SAAS,iBAAiB;AAClC,YAAM,eAAe,MAAM,MAAM,GAAG,kBAAkB,CAAC;AACvD,YAAM,UAAU,GAAG,KAAK,QAAQ,IAAI,MAAM,WAAW,MAAM,SAAS,aAAa,MAAM;AACvF,gBAAU,gBAAgB,aAAa,KAAK,IAAI,IAAI,OAAO;AAAA,IAC7D,OAAO;AACL,gBAAU,MAAM,KAAK,IAAI;AAAA,IAC3B;AAEA,SAAK,aAAa,OAAO;AAAA,EAC3B;AAAA,EAEO,cAAc;AACnB,WAAO,KAAK;AAAA,EACd;AACF;AAEO,MAAM,SAAS,OAAO,YAAY;","names":["readline"]}
|
|
@@ -29,9 +29,11 @@ var import_declaration_file_to_dictionary = require('./transpiler/declaration_fi
|
|
|
29
29
|
var import_createDictionaryEntryPoint = require('./transpiler/dictionary_to_main/createDictionaryEntryPoint.cjs');
|
|
30
30
|
var import_dictionary_to_type = require('./transpiler/dictionary_to_type/index.cjs');
|
|
31
31
|
var import_writeConfiguration = require('./writeConfiguration/index.cjs');
|
|
32
|
-
const prepareIntlayer = async (configuration = (0, import_config.getConfiguration)(), projectRequire = import_config.ESMxCJSRequire) => {
|
|
32
|
+
const prepareIntlayer = async (configuration = (0, import_config.getConfiguration)(), projectRequire = import_config.ESMxCJSRequire, clean = false) => {
|
|
33
33
|
const appLogger = (0, import_config.getAppLogger)(configuration);
|
|
34
|
-
|
|
34
|
+
if (clean) {
|
|
35
|
+
(0, import_cleanOutputDir.cleanOutputDir)(configuration);
|
|
36
|
+
}
|
|
35
37
|
const files = (0, import_listDictionariesPath.listDictionaries)(configuration);
|
|
36
38
|
const dictionaries = await (0, import_loadDictionaries.loadDictionaries)(
|
|
37
39
|
files,
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/prepareIntlayer.ts"],"sourcesContent":["import {\n ESMxCJSRequire,\n type IntlayerConfig,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport { cleanOutputDir } from './cleanOutputDir';\nimport { listDictionaries } from './listDictionariesPath';\nimport { loadDictionaries } from './loadDictionaries/loadDictionaries';\nimport { buildDictionary } from './transpiler/declaration_file_to_dictionary/index';\nimport { createDictionaryEntryPoint } from './transpiler/dictionary_to_main/createDictionaryEntryPoint';\nimport {\n createModuleAugmentation,\n createTypes,\n} from './transpiler/dictionary_to_type/index';\nimport { writeConfiguration } from './writeConfiguration';\n\nexport const prepareIntlayer = async (\n configuration: IntlayerConfig = getConfiguration(),\n projectRequire = ESMxCJSRequire\n) => {\n const appLogger = getAppLogger(configuration);\n\n cleanOutputDir(configuration);\n\n const files: string[] = listDictionaries(configuration);\n\n const dictionaries = await loadDictionaries(\n files,\n configuration,\n projectRequire\n );\n\n // Build locale dictionaries\n const dictionariesOutput = await buildDictionary(dictionaries, configuration);\n\n const dictionariesPaths = Object.values(\n dictionariesOutput?.mergedDictionaries ?? {}\n ).map((dictionary) => dictionary.dictionaryPath);\n\n createTypes(dictionariesPaths);\n\n createDictionaryEntryPoint(configuration);\n\n appLogger('Dictionaries built');\n\n createModuleAugmentation(configuration);\n\n appLogger('Module augmentation built', {\n isVerbose: true,\n });\n\n writeConfiguration(configuration);\n\n appLogger('Configuration written', {\n isVerbose: true,\n });\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAKO;AACP,4BAA+B;AAC/B,kCAAiC;AACjC,8BAAiC;AACjC,4CAAgC;AAChC,wCAA2C;AAC3C,gCAGO;AACP,gCAAmC;AAE5B,MAAM,kBAAkB,OAC7B,oBAAgC,gCAAiB,GACjD,iBAAiB,
|
|
1
|
+
{"version":3,"sources":["../../src/prepareIntlayer.ts"],"sourcesContent":["import {\n ESMxCJSRequire,\n type IntlayerConfig,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport { cleanOutputDir } from './cleanOutputDir';\nimport { listDictionaries } from './listDictionariesPath';\nimport { loadDictionaries } from './loadDictionaries/loadDictionaries';\nimport { buildDictionary } from './transpiler/declaration_file_to_dictionary/index';\nimport { createDictionaryEntryPoint } from './transpiler/dictionary_to_main/createDictionaryEntryPoint';\nimport {\n createModuleAugmentation,\n createTypes,\n} from './transpiler/dictionary_to_type/index';\nimport { writeConfiguration } from './writeConfiguration';\n\nexport const prepareIntlayer = async (\n configuration: IntlayerConfig = getConfiguration(),\n projectRequire = ESMxCJSRequire,\n clean = false\n) => {\n const appLogger = getAppLogger(configuration);\n\n if (clean) {\n cleanOutputDir(configuration);\n }\n\n const files: string[] = listDictionaries(configuration);\n\n const dictionaries = await loadDictionaries(\n files,\n configuration,\n projectRequire\n );\n\n // Build locale dictionaries\n const dictionariesOutput = await buildDictionary(dictionaries, configuration);\n\n const dictionariesPaths = Object.values(\n dictionariesOutput?.mergedDictionaries ?? {}\n ).map((dictionary) => dictionary.dictionaryPath);\n\n createTypes(dictionariesPaths);\n\n createDictionaryEntryPoint(configuration);\n\n appLogger('Dictionaries built');\n\n createModuleAugmentation(configuration);\n\n appLogger('Module augmentation built', {\n isVerbose: true,\n });\n\n writeConfiguration(configuration);\n\n appLogger('Configuration written', {\n isVerbose: true,\n });\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAKO;AACP,4BAA+B;AAC/B,kCAAiC;AACjC,8BAAiC;AACjC,4CAAgC;AAChC,wCAA2C;AAC3C,gCAGO;AACP,gCAAmC;AAE5B,MAAM,kBAAkB,OAC7B,oBAAgC,gCAAiB,GACjD,iBAAiB,8BACjB,QAAQ,UACL;AACH,QAAM,gBAAY,4BAAa,aAAa;AAE5C,MAAI,OAAO;AACT,8CAAe,aAAa;AAAA,EAC9B;AAEA,QAAM,YAAkB,8CAAiB,aAAa;AAEtD,QAAM,eAAe,UAAM;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAGA,QAAM,qBAAqB,UAAM,uDAAgB,cAAc,aAAa;AAE5E,QAAM,oBAAoB,OAAO;AAAA,IAC/B,oBAAoB,sBAAsB,CAAC;AAAA,EAC7C,EAAE,IAAI,CAAC,eAAe,WAAW,cAAc;AAE/C,6CAAY,iBAAiB;AAE7B,oEAA2B,aAAa;AAExC,YAAU,oBAAoB;AAE9B,0DAAyB,aAAa;AAEtC,YAAU,6BAA6B;AAAA,IACrC,WAAW;AAAA,EACb,CAAC;AAED,oDAAmB,aAAa;AAEhC,YAAU,yBAAyB;AAAA,IACjC,WAAW;AAAA,EACb,CAAC;AACH;","names":[]}
|
|
@@ -21,9 +21,9 @@ __export(buildI18nextDictionary_exports, {
|
|
|
21
21
|
buildI18nDictionary: () => buildI18nDictionary
|
|
22
22
|
});
|
|
23
23
|
module.exports = __toCommonJS(buildI18nextDictionary_exports);
|
|
24
|
+
var import_config = require("@intlayer/config");
|
|
24
25
|
var import_convertContentDeclarationInto18nDictionaries = require('./convertContentDeclarationInto18nDictionaries.cjs');
|
|
25
26
|
var import_writeDictionary = require('./writeDictionary.cjs');
|
|
26
|
-
var import_config = require("@intlayer/config");
|
|
27
27
|
const buildI18nDictionary = async (contentDeclarations, configuration = (0, import_config.getConfiguration)()) => {
|
|
28
28
|
const dictionariesDeclaration = contentDeclarations.reduce((acc, dictionary) => {
|
|
29
29
|
const { key, content } = dictionary;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.ts"],"sourcesContent":["import type { Dictionary } from '@intlayer/core';\nimport { createI18nextDictionaries } from './convertContentDeclarationInto18nDictionaries';\nimport {\n writeDictionary,\n type DictionariesDeclaration,\n} from './writeDictionary';\
|
|
1
|
+
{"version":3,"sources":["../../../../../src/transpiler/declaration_file_to_dictionary/i18next_dictionary/buildI18nextDictionary.ts"],"sourcesContent":["import { getConfiguration } from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/core';\nimport { createI18nextDictionaries } from './convertContentDeclarationInto18nDictionaries';\nimport {\n writeDictionary,\n type DictionariesDeclaration,\n} from './writeDictionary';\n\n/**\n * This function transpile content declaration to i18n dictionaries\n */\nexport const buildI18nDictionary = async (\n contentDeclarations: Dictionary[],\n configuration = getConfiguration()\n) => {\n // Create dictionaries for each nested content and format them\n const dictionariesDeclaration: DictionariesDeclaration =\n contentDeclarations.reduce((acc, dictionary) => {\n const { key, content } = dictionary;\n const i18Content = createI18nextDictionaries(content, configuration);\n\n return {\n ...acc,\n [key]: i18Content,\n };\n }, {});\n\n // Write the dictionaries to the file system\n const dictionariesPaths: string[] = await writeDictionary(\n dictionariesDeclaration,\n configuration\n );\n\n return dictionariesPaths;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAiC;AAEjC,0DAA0C;AAC1C,6BAGO;AAKA,MAAM,sBAAsB,OACjC,qBACA,oBAAgB,gCAAiB,MAC9B;AAEH,QAAM,0BACJ,oBAAoB,OAAO,CAAC,KAAK,eAAe;AAC9C,UAAM,EAAE,KAAK,QAAQ,IAAI;AACzB,UAAM,iBAAa,+EAA0B,SAAS,aAAa;AAEnE,WAAO;AAAA,MACL,GAAG;AAAA,MACH,CAAC,GAAG,GAAG;AAAA,IACT;AAAA,EACF,GAAG,CAAC,CAAC;AAGP,QAAM,oBAA8B,UAAM;AAAA,IACxC;AAAA,IACA;AAAA,EACF;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -23,7 +23,7 @@ __export(generateDictionaryListContent_exports, {
|
|
|
23
23
|
module.exports = __toCommonJS(generateDictionaryListContent_exports);
|
|
24
24
|
var import_config = require("@intlayer/config");
|
|
25
25
|
var import_path = require("path");
|
|
26
|
-
var
|
|
26
|
+
var import_getFileHash = require('../../utils/getFileHash.cjs');
|
|
27
27
|
const generateDictionaryListContent = (dictionaries, format = "esm", configuration = (0, import_config.getConfiguration)()) => {
|
|
28
28
|
const { mainDir } = configuration.content;
|
|
29
29
|
let content = "";
|
|
@@ -31,7 +31,7 @@ const generateDictionaryListContent = (dictionaries, format = "esm", configurati
|
|
|
31
31
|
relativePath: (0, import_config.normalizePath)((0, import_path.relative)(mainDir, dictionaryPath)),
|
|
32
32
|
id: (0, import_path.basename)(dictionaryPath, (0, import_path.extname)(dictionaryPath)),
|
|
33
33
|
// Get the base name as the dictionary id
|
|
34
|
-
hash: `_${(0,
|
|
34
|
+
hash: `_${(0, import_getFileHash.getFileHash)(dictionaryPath)}`
|
|
35
35
|
// Get the hash of the dictionary to avoid conflicts
|
|
36
36
|
}));
|
|
37
37
|
dictionariesRef.forEach((dictionary) => {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../../../../src/transpiler/dictionary_to_main/generateDictionaryListContent.ts"],"sourcesContent":["import { getConfiguration, normalizePath } from '@intlayer/config';\nimport { basename, extname, relative } from 'path';\nimport { getFileHash } from '../../utils';\n\n/**\n * This function generates the content of the dictionary list file\n */\nexport const generateDictionaryListContent = (\n dictionaries: string[],\n format: 'cjs' | 'esm' = 'esm',\n configuration = getConfiguration()\n): string => {\n const { mainDir } = configuration.content;\n\n let content = '';\n\n const dictionariesRef = dictionaries.map((dictionaryPath) => ({\n relativePath: normalizePath(relative(mainDir, dictionaryPath)),\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n if (format === 'esm')\n content += `import ${dictionary.hash} from '${dictionary.relativePath}' with { type: 'json' };\\n`;\n if (format === 'cjs')\n content += `const ${dictionary.hash} = require('${dictionary.relativePath}');\\n`;\n });\n\n content += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": ${dictionary.hash}`)\n .join(',\\n');\n\n if (format === 'esm')\n content += `export default {\\n${formattedDictionaryMap}\\n};\\n`;\n if (format === 'cjs')\n content += `module.exports = {\\n${formattedDictionaryMap}\\n};\\n`;\n\n return content;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAgD;AAChD,kBAA4C;AAC5C,
|
|
1
|
+
{"version":3,"sources":["../../../../src/transpiler/dictionary_to_main/generateDictionaryListContent.ts"],"sourcesContent":["import { getConfiguration, normalizePath } from '@intlayer/config';\nimport { basename, extname, relative } from 'path';\nimport { getFileHash } from '../../utils/getFileHash';\n\n/**\n * This function generates the content of the dictionary list file\n */\nexport const generateDictionaryListContent = (\n dictionaries: string[],\n format: 'cjs' | 'esm' = 'esm',\n configuration = getConfiguration()\n): string => {\n const { mainDir } = configuration.content;\n\n let content = '';\n\n const dictionariesRef = dictionaries.map((dictionaryPath) => ({\n relativePath: normalizePath(relative(mainDir, dictionaryPath)),\n id: basename(dictionaryPath, extname(dictionaryPath)), // Get the base name as the dictionary id\n hash: `_${getFileHash(dictionaryPath)}`, // Get the hash of the dictionary to avoid conflicts\n }));\n\n // Import all dictionaries\n dictionariesRef.forEach((dictionary) => {\n if (format === 'esm')\n content += `import ${dictionary.hash} from '${dictionary.relativePath}' with { type: 'json' };\\n`;\n if (format === 'cjs')\n content += `const ${dictionary.hash} = require('${dictionary.relativePath}');\\n`;\n });\n\n content += '\\n';\n\n // Format Dictionary Map\n const formattedDictionaryMap: string = dictionariesRef\n .map((dictionary) => ` \"${dictionary.id}\": ${dictionary.hash}`)\n .join(',\\n');\n\n if (format === 'esm')\n content += `export default {\\n${formattedDictionaryMap}\\n};\\n`;\n if (format === 'cjs')\n content += `module.exports = {\\n${formattedDictionaryMap}\\n};\\n`;\n\n return content;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAgD;AAChD,kBAA4C;AAC5C,yBAA4B;AAKrB,MAAM,gCAAgC,CAC3C,cACA,SAAwB,OACxB,oBAAgB,gCAAiB,MACtB;AACX,QAAM,EAAE,QAAQ,IAAI,cAAc;AAElC,MAAI,UAAU;AAEd,QAAM,kBAAkB,aAAa,IAAI,CAAC,oBAAoB;AAAA,IAC5D,kBAAc,iCAAc,sBAAS,SAAS,cAAc,CAAC;AAAA,IAC7D,QAAI,sBAAS,oBAAgB,qBAAQ,cAAc,CAAC;AAAA;AAAA,IACpD,MAAM,QAAI,gCAAY,cAAc,CAAC;AAAA;AAAA,EACvC,EAAE;AAGF,kBAAgB,QAAQ,CAAC,eAAe;AACtC,QAAI,WAAW;AACb,iBAAW,UAAU,WAAW,IAAI,UAAU,WAAW,YAAY;AAAA;AACvE,QAAI,WAAW;AACb,iBAAW,SAAS,WAAW,IAAI,eAAe,WAAW,YAAY;AAAA;AAAA,EAC7E,CAAC;AAED,aAAW;AAGX,QAAM,yBAAiC,gBACpC,IAAI,CAAC,eAAe,MAAM,WAAW,EAAE,MAAM,WAAW,IAAI,EAAE,EAC9D,KAAK,KAAK;AAEb,MAAI,WAAW;AACb,eAAW;AAAA,EAAqB,sBAAsB;AAAA;AAAA;AACxD,MAAI,WAAW;AACb,eAAW;AAAA,EAAuB,sBAAsB;AAAA;AAAA;AAE1D,SAAO;AACT;","names":[]}
|