@intlayer/cli 7.5.0-canary.1 → 7.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/IntlayerEventListener.cjs +8 -8
- package/dist/cjs/IntlayerEventListener.cjs.map +1 -1
- package/dist/cjs/auth/login.cjs +16 -16
- package/dist/cjs/auth/login.cjs.map +1 -1
- package/dist/cjs/build.cjs +5 -5
- package/dist/cjs/build.cjs.map +1 -1
- package/dist/cjs/cli.cjs +3 -3
- package/dist/cjs/cli.cjs.map +1 -1
- package/dist/cjs/config.cjs +3 -3
- package/dist/cjs/config.cjs.map +1 -1
- package/dist/cjs/editor.cjs +2 -2
- package/dist/cjs/editor.cjs.map +1 -1
- package/dist/cjs/fill/fill.cjs +12 -12
- package/dist/cjs/fill/fill.cjs.map +1 -1
- package/dist/cjs/fill/listTranslationsTasks.cjs +15 -15
- package/dist/cjs/fill/listTranslationsTasks.cjs.map +1 -1
- package/dist/cjs/fill/translateDictionary.cjs +36 -36
- package/dist/cjs/fill/translateDictionary.cjs.map +1 -1
- package/dist/cjs/fill/writeFill.cjs +11 -11
- package/dist/cjs/fill/writeFill.cjs.map +1 -1
- package/dist/cjs/getTargetDictionary.cjs +6 -6
- package/dist/cjs/getTargetDictionary.cjs.map +1 -1
- package/dist/cjs/listContentDeclaration.cjs +9 -9
- package/dist/cjs/listContentDeclaration.cjs.map +1 -1
- package/dist/cjs/liveSync.cjs +16 -16
- package/dist/cjs/liveSync.cjs.map +1 -1
- package/dist/cjs/pull.cjs +14 -14
- package/dist/cjs/pull.cjs.map +1 -1
- package/dist/cjs/push/pullLog.cjs +6 -6
- package/dist/cjs/push/pullLog.cjs.map +1 -1
- package/dist/cjs/push/push.cjs +23 -23
- package/dist/cjs/push/push.cjs.map +1 -1
- package/dist/cjs/pushConfig.cjs +5 -5
- package/dist/cjs/pushConfig.cjs.map +1 -1
- package/dist/cjs/pushLog.cjs +6 -6
- package/dist/cjs/pushLog.cjs.map +1 -1
- package/dist/cjs/reviewDoc.cjs +13 -13
- package/dist/cjs/reviewDoc.cjs.map +1 -1
- package/dist/cjs/reviewDocBlockAware.cjs +19 -19
- package/dist/cjs/reviewDocBlockAware.cjs.map +1 -1
- package/dist/cjs/test/listMissingTranslations.cjs +9 -9
- package/dist/cjs/test/listMissingTranslations.cjs.map +1 -1
- package/dist/cjs/test/test.cjs +22 -22
- package/dist/cjs/test/test.cjs.map +1 -1
- package/dist/cjs/transform.cjs +8 -8
- package/dist/cjs/transform.cjs.map +1 -1
- package/dist/cjs/translateDoc.cjs +29 -29
- package/dist/cjs/translateDoc.cjs.map +1 -1
- package/dist/cjs/utils/calculateChunks.cjs +4 -4
- package/dist/cjs/utils/calculateChunks.cjs.map +1 -1
- package/dist/cjs/utils/checkAccess.cjs +13 -13
- package/dist/cjs/utils/checkAccess.cjs.map +1 -1
- package/dist/cjs/utils/chunkInference.cjs +4 -4
- package/dist/cjs/utils/chunkInference.cjs.map +1 -1
- package/dist/cjs/utils/mapChunksBetweenFiles.cjs +4 -4
- package/dist/cjs/utils/mapChunksBetweenFiles.cjs.map +1 -1
- package/dist/cjs/utils/setupAI.cjs +5 -5
- package/dist/cjs/utils/setupAI.cjs.map +1 -1
- package/dist/cjs/watch.cjs +26 -5
- package/dist/cjs/watch.cjs.map +1 -1
- package/dist/esm/auth/login.mjs +1 -1
- package/dist/esm/auth/login.mjs.map +1 -1
- package/dist/esm/watch.mjs +23 -2
- package/dist/esm/watch.mjs.map +1 -1
- package/dist/types/pull.d.ts.map +1 -1
- package/package.json +12 -12
|
@@ -1,34 +1,34 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('./_virtual/rolldown_runtime.cjs');
|
|
2
|
-
let
|
|
3
|
-
let
|
|
2
|
+
let _intlayer_chokidar = require("@intlayer/chokidar");
|
|
3
|
+
let _intlayer_config = require("@intlayer/config");
|
|
4
4
|
let node_path = require("node:path");
|
|
5
|
-
let
|
|
5
|
+
let _intlayer_unmerged_dictionaries_entry = require("@intlayer/unmerged-dictionaries-entry");
|
|
6
6
|
|
|
7
7
|
//#region src/listContentDeclaration.ts
|
|
8
8
|
const listContentDeclarationRows = (options) => {
|
|
9
|
-
const config = (0,
|
|
10
|
-
const unmergedDictionariesRecord = (0,
|
|
9
|
+
const config = (0, _intlayer_config.getConfiguration)(options?.configOptions);
|
|
10
|
+
const unmergedDictionariesRecord = (0, _intlayer_unmerged_dictionaries_entry.getUnmergedDictionaries)(config);
|
|
11
11
|
return Object.values(unmergedDictionariesRecord).flat().map((dictionary) => ({
|
|
12
12
|
key: dictionary.key ?? "",
|
|
13
13
|
path: (0, node_path.relative)(config.content.baseDir, dictionary.filePath ?? "Remote")
|
|
14
14
|
}));
|
|
15
15
|
};
|
|
16
16
|
const listContentDeclaration = (options) => {
|
|
17
|
-
const appLogger = (0,
|
|
17
|
+
const appLogger = (0, _intlayer_config.getAppLogger)((0, _intlayer_config.getConfiguration)(options?.configOptions), { config: { prefix: "" } });
|
|
18
18
|
const rows = listContentDeclarationRows(options);
|
|
19
19
|
const lines = rows.map((row) => [
|
|
20
|
-
(0,
|
|
20
|
+
(0, _intlayer_config.colon)(` - ${(0, _intlayer_config.colorizeKey)(row.key)}`, {
|
|
21
21
|
colSize: rows.map((row$1) => row$1.key.length),
|
|
22
22
|
maxSize: 60
|
|
23
23
|
}),
|
|
24
24
|
" - ",
|
|
25
|
-
(0,
|
|
25
|
+
(0, _intlayer_chokidar.formatPath)(row.path)
|
|
26
26
|
].join(""));
|
|
27
27
|
appLogger(`Content declaration files:`);
|
|
28
28
|
lines.forEach((line) => {
|
|
29
29
|
appLogger(line, { level: "info" });
|
|
30
30
|
});
|
|
31
|
-
appLogger(`Total content declaration files: ${(0,
|
|
31
|
+
appLogger(`Total content declaration files: ${(0, _intlayer_config.colorizeNumber)(rows.length)}`);
|
|
32
32
|
};
|
|
33
33
|
|
|
34
34
|
//#endregion
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"listContentDeclaration.cjs","names":["row"],"sources":["../../src/listContentDeclaration.ts"],"sourcesContent":["import { relative } from 'node:path';\nimport { formatPath } from '@intlayer/chokidar';\nimport {\n colon,\n colorizeKey,\n colorizeNumber,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\n\ntype ListContentDeclarationOptions = {\n configOptions?: GetConfigurationOptions;\n};\n\nexport const listContentDeclarationRows = (\n options?: ListContentDeclarationOptions\n) => {\n const config = getConfiguration(options?.configOptions);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(config);\n\n const rows = Object.values(unmergedDictionariesRecord)\n .flat()\n .map((dictionary) => ({\n key: dictionary.key ?? '',\n path: relative(config.content.baseDir, dictionary.filePath ?? 'Remote'),\n }));\n return rows;\n};\n\nexport const listContentDeclaration = (\n options?: ListContentDeclarationOptions\n) => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n const rows = listContentDeclarationRows(options);\n\n const lines = rows.map((row) =>\n [\n colon(` - ${colorizeKey(row.key)}`, {\n colSize: rows.map((row) => row.key.length),\n maxSize: 60,\n }),\n ' - ',\n formatPath(row.path),\n ].join('')\n );\n\n appLogger(`Content declaration files:`);\n\n lines.forEach((line) => {\n appLogger(line, {\n level: 'info',\n });\n });\n\n appLogger(`Total content declaration files: ${colorizeNumber(rows.length)}`);\n};\n"],"mappings":";;;;;;;AAgBA,MAAa,8BACX,YACG;CACH,MAAM,
|
|
1
|
+
{"version":3,"file":"listContentDeclaration.cjs","names":["row"],"sources":["../../src/listContentDeclaration.ts"],"sourcesContent":["import { relative } from 'node:path';\nimport { formatPath } from '@intlayer/chokidar';\nimport {\n colon,\n colorizeKey,\n colorizeNumber,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\n\ntype ListContentDeclarationOptions = {\n configOptions?: GetConfigurationOptions;\n};\n\nexport const listContentDeclarationRows = (\n options?: ListContentDeclarationOptions\n) => {\n const config = getConfiguration(options?.configOptions);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(config);\n\n const rows = Object.values(unmergedDictionariesRecord)\n .flat()\n .map((dictionary) => ({\n key: dictionary.key ?? '',\n path: relative(config.content.baseDir, dictionary.filePath ?? 'Remote'),\n }));\n return rows;\n};\n\nexport const listContentDeclaration = (\n options?: ListContentDeclarationOptions\n) => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n const rows = listContentDeclarationRows(options);\n\n const lines = rows.map((row) =>\n [\n colon(` - ${colorizeKey(row.key)}`, {\n colSize: rows.map((row) => row.key.length),\n maxSize: 60,\n }),\n ' - ',\n formatPath(row.path),\n ].join('')\n );\n\n appLogger(`Content declaration files:`);\n\n lines.forEach((line) => {\n appLogger(line, {\n level: 'info',\n });\n });\n\n appLogger(`Total content declaration files: ${colorizeNumber(rows.length)}`);\n};\n"],"mappings":";;;;;;;AAgBA,MAAa,8BACX,YACG;CACH,MAAM,gDAA0B,SAAS,cAAc;CAEvD,MAAM,gGAAqD,OAAO;AAQlE,QANa,OAAO,OAAO,2BAA2B,CACnD,MAAM,CACN,KAAK,gBAAgB;EACpB,KAAK,WAAW,OAAO;EACvB,8BAAe,OAAO,QAAQ,SAAS,WAAW,YAAY,SAAS;EACxE,EAAE;;AAIP,MAAa,0BACX,YACG;CAEH,MAAM,sFAD0B,SAAS,cAAc,EAChB,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;CAEF,MAAM,OAAO,2BAA2B,QAAQ;CAEhD,MAAM,QAAQ,KAAK,KAAK,QACtB;8BACQ,wCAAkB,IAAI,IAAI,IAAI;GAClC,SAAS,KAAK,KAAK,UAAQA,MAAI,IAAI,OAAO;GAC1C,SAAS;GACV,CAAC;EACF;qCACW,IAAI,KAAK;EACrB,CAAC,KAAK,GAAG,CACX;AAED,WAAU,6BAA6B;AAEvC,OAAM,SAAS,SAAS;AACtB,YAAU,MAAM,EACd,OAAO,QACR,CAAC;GACF;AAEF,WAAU,yEAAmD,KAAK,OAAO,GAAG"}
|
package/dist/cjs/liveSync.cjs
CHANGED
|
@@ -1,27 +1,27 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('./_virtual/rolldown_runtime.cjs');
|
|
2
2
|
const require_IntlayerEventListener = require('./IntlayerEventListener.cjs');
|
|
3
|
-
let
|
|
4
|
-
let
|
|
3
|
+
let _intlayer_chokidar = require("@intlayer/chokidar");
|
|
4
|
+
let _intlayer_config = require("@intlayer/config");
|
|
5
5
|
let node_http = require("node:http");
|
|
6
|
-
let
|
|
7
|
-
let
|
|
8
|
-
let
|
|
9
|
-
let
|
|
10
|
-
|
|
6
|
+
let _intlayer_unmerged_dictionaries_entry = require("@intlayer/unmerged-dictionaries-entry");
|
|
7
|
+
let _intlayer_core = require("@intlayer/core");
|
|
8
|
+
let _intlayer_dictionaries_entry = require("@intlayer/dictionaries-entry");
|
|
9
|
+
let _intlayer_config_package_json = require("@intlayer/config/package.json");
|
|
10
|
+
_intlayer_config_package_json = require_rolldown_runtime.__toESM(_intlayer_config_package_json);
|
|
11
11
|
|
|
12
12
|
//#region src/liveSync.ts
|
|
13
13
|
const writeDictionary = async (dictionary, configuration) => {
|
|
14
|
-
(0,
|
|
15
|
-
await (0,
|
|
14
|
+
(0, _intlayer_config.getAppLogger)(configuration)(`Writing dictionary ${dictionary.key}`);
|
|
15
|
+
await (0, _intlayer_chokidar.buildDictionary)([dictionary], configuration);
|
|
16
16
|
};
|
|
17
17
|
const liveSync = async (options) => {
|
|
18
|
-
const configuration = (0,
|
|
19
|
-
const appLogger = (0,
|
|
18
|
+
const configuration = (0, _intlayer_config.getConfiguration)(options?.configOptions);
|
|
19
|
+
const appLogger = (0, _intlayer_config.getAppLogger)(configuration);
|
|
20
20
|
const { liveSyncPort, liveSyncURL } = configuration.editor;
|
|
21
21
|
let parallelProcess = null;
|
|
22
22
|
let eventListener = null;
|
|
23
23
|
if (options?.with) {
|
|
24
|
-
parallelProcess = (0,
|
|
24
|
+
parallelProcess = (0, _intlayer_chokidar.runParallel)(options.with);
|
|
25
25
|
parallelProcess.result.catch(() => {});
|
|
26
26
|
}
|
|
27
27
|
if (configuration.editor.liveSync && configuration.editor.backendURL && configuration.editor.clientId && configuration.editor.clientSecret) {
|
|
@@ -63,12 +63,12 @@ const liveSync = async (options) => {
|
|
|
63
63
|
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
|
|
64
64
|
"Access-Control-Allow-Headers": "Content-Type, Authorization"
|
|
65
65
|
});
|
|
66
|
-
const dictionaries = (0,
|
|
66
|
+
const dictionaries = (0, _intlayer_dictionaries_entry.getDictionaries)(configuration);
|
|
67
67
|
if (req.url.startsWith("/dictionaries/")) {
|
|
68
68
|
const [key, locale] = decodeURIComponent(req.url).slice(14).split("/");
|
|
69
69
|
const dictionary = dictionaries[key] ?? null;
|
|
70
70
|
if (locale) {
|
|
71
|
-
const sourceLocaleContent = (0,
|
|
71
|
+
const sourceLocaleContent = (0, _intlayer_core.getLocalizedContent)(dictionary.content, locale, {
|
|
72
72
|
dictionaryKey: key,
|
|
73
73
|
keyPath: []
|
|
74
74
|
});
|
|
@@ -89,7 +89,7 @@ const liveSync = async (options) => {
|
|
|
89
89
|
"Access-Control-Allow-Methods": "GET, POST, PUT, DELETE, OPTIONS",
|
|
90
90
|
"Access-Control-Allow-Headers": "Content-Type, Authorization"
|
|
91
91
|
});
|
|
92
|
-
const unmergedDictionaries = (0,
|
|
92
|
+
const unmergedDictionaries = (0, _intlayer_unmerged_dictionaries_entry.getUnmergedDictionaries)(configuration);
|
|
93
93
|
if (req.url.startsWith("/unmerged_dictionaries/")) {
|
|
94
94
|
const one = unmergedDictionaries[decodeURIComponent(req.url.slice(23))] ?? null;
|
|
95
95
|
res.end(JSON.stringify(one));
|
|
@@ -122,7 +122,7 @@ const liveSync = async (options) => {
|
|
|
122
122
|
};
|
|
123
123
|
server.listen(liveSyncPort, () => {
|
|
124
124
|
console.log(`
|
|
125
|
-
\x1b[1;90mINTLAYER v${
|
|
125
|
+
\x1b[1;90mINTLAYER v${_intlayer_config_package_json.default.version}\x1b[0m
|
|
126
126
|
|
|
127
127
|
Live server running at: \x1b[90m${liveSyncURL}\x1b[0m
|
|
128
128
|
- Backend URL: \x1b[90m${configuration.editor.backendURL ?? "-"}\x1b[0m
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"liveSync.cjs","names":["parallelProcess: ParallelHandle | null","eventListener: IntlayerEventListener | null","IntlayerEventListener","packageJson"],"sources":["../../src/liveSync.ts"],"sourcesContent":["import { createServer } from 'node:http';\n// @ts-ignore: @intlayer/backend is not built yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport {\n buildDictionary,\n type ParallelHandle,\n runParallel,\n} from '@intlayer/chokidar';\nimport type { GetConfigurationOptions } from '@intlayer/config';\nimport { getAppLogger, getConfiguration } from '@intlayer/config';\nimport packageJson from '@intlayer/config/package.json';\nimport { getLocalizedContent } from '@intlayer/core';\nimport { getDictionaries } from '@intlayer/dictionaries-entry';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport { IntlayerEventListener } from './IntlayerEventListener';\n\ntype LiveSyncOptions = {\n with?: string | string[];\n configOptions?: GetConfigurationOptions;\n};\n\nconst writeDictionary = async (\n dictionary: DictionaryAPI,\n configuration: IntlayerConfig\n) => {\n const appLogger = getAppLogger(configuration);\n appLogger(`Writing dictionary ${dictionary.key}`);\n await buildDictionary([dictionary], configuration);\n};\n\nexport const liveSync = async (options?: LiveSyncOptions) => {\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const { liveSyncPort, liveSyncURL } = configuration.editor;\n\n let parallelProcess: ParallelHandle | null = null;\n let eventListener: IntlayerEventListener | null = null;\n let _isHotReloadConnected = false;\n let _connectionStatus = 'disconnected'; // 'connected', 'connecting', 'reconnecting', 'disconnected', 'error'\n\n // Start the parallel process if provided\n if (options?.with) {\n parallelProcess = runParallel(options.with);\n // Handle the promise to avoid unhandled rejection\n parallelProcess.result.catch(() => {\n // Parallel process failed or was terminated\n });\n }\n\n // Initialize the event listener for hot reload if configured\n if (\n configuration.editor.liveSync &&\n configuration.editor.backendURL &&\n configuration.editor.clientId &&\n configuration.editor.clientSecret\n ) {\n eventListener = new IntlayerEventListener(configuration);\n _connectionStatus = 'connecting';\n\n // Set up connection callbacks\n eventListener.onConnectionOpen = () => {\n _connectionStatus = 'connected';\n _isHotReloadConnected = true;\n appLogger('Live sync connection established');\n };\n\n eventListener.onConnectionError = (error) => {\n _connectionStatus = 'error';\n _isHotReloadConnected = false;\n const errorEvent = error as any;\n appLogger(\n `Live sync connection error: ${errorEvent.message ?? 'Unknown error'}`,\n {\n level: 'warn',\n }\n );\n\n // If this is a \"terminated: other side closed\" error, it's likely a server restart\n if (\n errorEvent.message?.includes('terminated') ||\n errorEvent.message?.includes('closed')\n ) {\n appLogger(\n 'Server connection was terminated, automatic reconnection will be attempted...',\n {\n level: 'info',\n }\n );\n _connectionStatus = 'reconnecting';\n }\n };\n\n // Set up dictionary change callbacks\n eventListener.onDictionaryAdded = (dictionary) =>\n writeDictionary(dictionary, configuration);\n eventListener.onDictionaryChange = (dictionary) =>\n writeDictionary(dictionary, configuration);\n eventListener.onDictionaryDeleted = (dictionary) =>\n writeDictionary(dictionary, configuration);\n\n try {\n await eventListener.initialize();\n } catch (error) {\n _connectionStatus = 'error';\n _isHotReloadConnected = false;\n appLogger('Failed to initialize IntlayerEventListener:', {\n level: 'error',\n });\n appLogger(\n `Error: ${error instanceof Error ? error.message : String(error)}`,\n {\n level: 'error',\n }\n );\n }\n } else if (!configuration.editor.liveSync) {\n appLogger(\n 'Hot reload is disabled. Please enable it in the configuration (editor.liveSync).'\n );\n } else if (\n !configuration.editor.clientId ||\n !configuration.editor.clientSecret\n ) {\n appLogger(\n 'Missing client credentials for hot reload. Please configure clientId and clientSecret'\n );\n }\n\n const server = createServer(async (req, res) => {\n // Handle CORS preflight requests\n if (req.method === 'OPTIONS') {\n res.writeHead(200, {\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',\n 'Access-Control-Allow-Headers': 'Content-Type, Authorization',\n });\n\n res.end();\n return;\n }\n\n if (req.url?.startsWith('/dictionaries')) {\n res.writeHead(200, {\n 'Content-Type': 'application/json; charset=utf-8',\n 'Cache-Control': 'no-store',\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',\n 'Access-Control-Allow-Headers': 'Content-Type, Authorization',\n });\n const dictionaries = getDictionaries(configuration);\n\n const prefix = '/dictionaries/';\n if (req.url.startsWith(prefix)) {\n const [key, locale] = decodeURIComponent(req.url)\n .slice(prefix.length)\n .split('/');\n\n const dictionary = dictionaries[key] ?? null;\n\n if (locale) {\n // @ts-ignore Type instantiation is excessively deep and possibly infinite\n const sourceLocaleContent = getLocalizedContent(\n dictionary.content,\n locale,\n {\n dictionaryKey: key,\n keyPath: [],\n }\n );\n\n res.end(JSON.stringify(sourceLocaleContent));\n return;\n }\n\n res.end(JSON.stringify(dictionary));\n return;\n }\n\n res.end(JSON.stringify(dictionaries));\n return;\n }\n\n if (req.url?.startsWith('/unmerged_dictionaries')) {\n res.writeHead(200, {\n 'Content-Type': 'application/json; charset=utf-8',\n 'Cache-Control': 'no-store',\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',\n 'Access-Control-Allow-Headers': 'Content-Type, Authorization',\n });\n const unmergedDictionaries = getUnmergedDictionaries(configuration);\n\n const prefix = '/unmerged_dictionaries/';\n if (req.url.startsWith(prefix)) {\n const key = decodeURIComponent(req.url.slice(prefix.length));\n const one = unmergedDictionaries[key] ?? null;\n\n res.end(JSON.stringify(one));\n return;\n }\n\n res.end(JSON.stringify(unmergedDictionaries));\n return;\n }\n\n if (req.url === '/configuration') {\n res.writeHead(200, {\n 'Content-Type': 'application/json; charset=utf-8',\n 'Cache-Control': 'no-store',\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',\n 'Access-Control-Allow-Headers': 'Content-Type, Authorization',\n });\n res.end(JSON.stringify(configuration));\n return;\n }\n\n if (req.url === '/health') {\n res.writeHead(200, {\n 'Content-Type': 'application/json; charset=utf-8',\n });\n res.end(JSON.stringify({ status: 'ok' }));\n return;\n }\n\n res.end('Not found');\n return;\n });\n\n const getLiveSyncParam = () => {\n if (!configuration.editor.liveSync) return '\\x1b[31m✗ Disabled\\x1b[0m';\n\n return '\\x1b[32m✓ Enabled\\x1b[0m';\n };\n server.listen(liveSyncPort, () => {\n console.log(`\n \\x1b[1;90mINTLAYER v${packageJson.version}\\x1b[0m\n \n Live server running at: \\x1b[90m${liveSyncURL}\\x1b[0m\n - Backend URL: \\x1b[90m${configuration.editor.backendURL ?? '-'}\\x1b[0m\n - Live sync: ${getLiveSyncParam()}\n - Parallel process: ${options?.with ? `\\x1b[90m${Array.isArray(options.with) ? options.with.join(' ') : options.with}\\x1b[0m` : '-'}\n - Access key: ${configuration.editor.clientId ?? '-'}\n `);\n });\n\n // Cleanup function to terminate child process and event listener when the main process exits\n const cleanup = () => {\n // Clean up event listener\n if (eventListener) {\n appLogger('Closing SSE connection...');\n eventListener.cleanup();\n }\n\n if (parallelProcess) {\n parallelProcess.kill();\n }\n\n server.close(() => {\n appLogger('Live sync server stopped');\n process.exit(0);\n });\n };\n\n // Handle process termination signals\n process.on('SIGINT', cleanup);\n process.on('SIGTERM', cleanup);\n process.on('exit', cleanup);\n};\n"],"mappings":";;;;;;;;;;;;AAsBA,MAAM,kBAAkB,OACtB,YACA,kBACG;AAEH,qCAD+B,cAAc,CACnC,sBAAsB,WAAW,MAAM;AACjD,gDAAsB,CAAC,WAAW,EAAE,cAAc;;AAGpD,MAAa,WAAW,OAAO,YAA8B;CAC3D,MAAM,wDAAiC,SAAS,cAAc;CAC9D,MAAM,gDAAyB,cAAc;CAE7C,MAAM,EAAE,cAAc,gBAAgB,cAAc;CAEpD,IAAIA,kBAAyC;CAC7C,IAAIC,gBAA8C;AAKlD,KAAI,SAAS,MAAM;AACjB,yDAA8B,QAAQ,KAAK;AAE3C,kBAAgB,OAAO,YAAY,GAEjC;;AAIJ,KACE,cAAc,OAAO,YACrB,cAAc,OAAO,cACrB,cAAc,OAAO,YACrB,cAAc,OAAO,cACrB;AACA,kBAAgB,IAAIC,oDAAsB,cAAc;AAIxD,gBAAc,yBAAyB;AAGrC,aAAU,mCAAmC;;AAG/C,gBAAc,qBAAqB,UAAU;GAG3C,MAAM,aAAa;AACnB,aACE,+BAA+B,WAAW,WAAW,mBACrD,EACE,OAAO,QACR,CACF;AAGD,OACE,WAAW,SAAS,SAAS,aAAa,IAC1C,WAAW,SAAS,SAAS,SAAS,CAEtC,WACE,iFACA,EACE,OAAO,QACR,CACF;;AAML,gBAAc,qBAAqB,eACjC,gBAAgB,YAAY,cAAc;AAC5C,gBAAc,sBAAsB,eAClC,gBAAgB,YAAY,cAAc;AAC5C,gBAAc,uBAAuB,eACnC,gBAAgB,YAAY,cAAc;AAE5C,MAAI;AACF,SAAM,cAAc,YAAY;WACzB,OAAO;AAGd,aAAU,+CAA+C,EACvD,OAAO,SACR,CAAC;AACF,aACE,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IAChE,EACE,OAAO,SACR,CACF;;YAEM,CAAC,cAAc,OAAO,SAC/B,WACE,mFACD;UAED,CAAC,cAAc,OAAO,YACtB,CAAC,cAAc,OAAO,aAEtB,WACE,wFACD;CAGH,MAAM,qCAAsB,OAAO,KAAK,QAAQ;AAE9C,MAAI,IAAI,WAAW,WAAW;AAC5B,OAAI,UAAU,KAAK;IACjB,+BAA+B;IAC/B,gCAAgC;IAChC,gCAAgC;IACjC,CAAC;AAEF,OAAI,KAAK;AACT;;AAGF,MAAI,IAAI,KAAK,WAAW,gBAAgB,EAAE;AACxC,OAAI,UAAU,KAAK;IACjB,gBAAgB;IAChB,iBAAiB;IACjB,+BAA+B;IAC/B,gCAAgC;IAChC,gCAAgC;IACjC,CAAC;GACF,MAAM,kEAA+B,cAAc;AAGnD,OAAI,IAAI,IAAI,WADG,iBACe,EAAE;IAC9B,MAAM,CAAC,KAAK,UAAU,mBAAmB,IAAI,IAAI,CAC9C,MAAM,GAAc,CACpB,MAAM,IAAI;IAEb,MAAM,aAAa,aAAa,QAAQ;AAExC,QAAI,QAAQ;KAEV,MAAM,+DACJ,WAAW,SACX,QACA;MACE,eAAe;MACf,SAAS,EAAE;MACZ,CACF;AAED,SAAI,IAAI,KAAK,UAAU,oBAAoB,CAAC;AAC5C;;AAGF,QAAI,IAAI,KAAK,UAAU,WAAW,CAAC;AACnC;;AAGF,OAAI,IAAI,KAAK,UAAU,aAAa,CAAC;AACrC;;AAGF,MAAI,IAAI,KAAK,WAAW,yBAAyB,EAAE;AACjD,OAAI,UAAU,KAAK;IACjB,gBAAgB;IAChB,iBAAiB;IACjB,+BAA+B;IAC/B,gCAAgC;IAChC,gCAAgC;IACjC,CAAC;GACF,MAAM,2FAA+C,cAAc;AAGnE,OAAI,IAAI,IAAI,WADG,0BACe,EAAE;IAE9B,MAAM,MAAM,qBADA,mBAAmB,IAAI,IAAI,MAAM,GAAc,CAAC,KACnB;AAEzC,QAAI,IAAI,KAAK,UAAU,IAAI,CAAC;AAC5B;;AAGF,OAAI,IAAI,KAAK,UAAU,qBAAqB,CAAC;AAC7C;;AAGF,MAAI,IAAI,QAAQ,kBAAkB;AAChC,OAAI,UAAU,KAAK;IACjB,gBAAgB;IAChB,iBAAiB;IACjB,+BAA+B;IAC/B,gCAAgC;IAChC,gCAAgC;IACjC,CAAC;AACF,OAAI,IAAI,KAAK,UAAU,cAAc,CAAC;AACtC;;AAGF,MAAI,IAAI,QAAQ,WAAW;AACzB,OAAI,UAAU,KAAK,EACjB,gBAAgB,mCACjB,CAAC;AACF,OAAI,IAAI,KAAK,UAAU,EAAE,QAAQ,MAAM,CAAC,CAAC;AACzC;;AAGF,MAAI,IAAI,YAAY;GAEpB;CAEF,MAAM,yBAAyB;AAC7B,MAAI,CAAC,cAAc,OAAO,SAAU,QAAO;AAE3C,SAAO;;AAET,QAAO,OAAO,oBAAoB;AAChC,UAAQ,IAAI;4BACYC,uCAAY,QAAQ;;iDAEC,YAAY;iDACZ,cAAc,OAAO,cAAc,IAAI;yCAC/C,kBAAkB,CAAC;yCACnB,SAAS,OAAO,WAAW,MAAM,QAAQ,QAAQ,KAAK,GAAG,QAAQ,KAAK,KAAK,IAAI,GAAG,QAAQ,KAAK,WAAW,IAAI;yCAC9G,cAAc,OAAO,YAAY,IAAI;QACtE;GACJ;CAGF,MAAM,gBAAgB;AAEpB,MAAI,eAAe;AACjB,aAAU,4BAA4B;AACtC,iBAAc,SAAS;;AAGzB,MAAI,gBACF,iBAAgB,MAAM;AAGxB,SAAO,YAAY;AACjB,aAAU,2BAA2B;AACrC,WAAQ,KAAK,EAAE;IACf;;AAIJ,SAAQ,GAAG,UAAU,QAAQ;AAC7B,SAAQ,GAAG,WAAW,QAAQ;AAC9B,SAAQ,GAAG,QAAQ,QAAQ"}
|
|
1
|
+
{"version":3,"file":"liveSync.cjs","names":["parallelProcess: ParallelHandle | null","eventListener: IntlayerEventListener | null","IntlayerEventListener","packageJson"],"sources":["../../src/liveSync.ts"],"sourcesContent":["import { createServer } from 'node:http';\n// @ts-ignore: @intlayer/backend is not built yet\nimport type { DictionaryAPI } from '@intlayer/backend';\nimport {\n buildDictionary,\n type ParallelHandle,\n runParallel,\n} from '@intlayer/chokidar';\nimport type { GetConfigurationOptions } from '@intlayer/config';\nimport { getAppLogger, getConfiguration } from '@intlayer/config';\nimport packageJson from '@intlayer/config/package.json';\nimport { getLocalizedContent } from '@intlayer/core';\nimport { getDictionaries } from '@intlayer/dictionaries-entry';\nimport type { IntlayerConfig } from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport { IntlayerEventListener } from './IntlayerEventListener';\n\ntype LiveSyncOptions = {\n with?: string | string[];\n configOptions?: GetConfigurationOptions;\n};\n\nconst writeDictionary = async (\n dictionary: DictionaryAPI,\n configuration: IntlayerConfig\n) => {\n const appLogger = getAppLogger(configuration);\n appLogger(`Writing dictionary ${dictionary.key}`);\n await buildDictionary([dictionary], configuration);\n};\n\nexport const liveSync = async (options?: LiveSyncOptions) => {\n const configuration = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(configuration);\n\n const { liveSyncPort, liveSyncURL } = configuration.editor;\n\n let parallelProcess: ParallelHandle | null = null;\n let eventListener: IntlayerEventListener | null = null;\n let _isHotReloadConnected = false;\n let _connectionStatus = 'disconnected'; // 'connected', 'connecting', 'reconnecting', 'disconnected', 'error'\n\n // Start the parallel process if provided\n if (options?.with) {\n parallelProcess = runParallel(options.with);\n // Handle the promise to avoid unhandled rejection\n parallelProcess.result.catch(() => {\n // Parallel process failed or was terminated\n });\n }\n\n // Initialize the event listener for hot reload if configured\n if (\n configuration.editor.liveSync &&\n configuration.editor.backendURL &&\n configuration.editor.clientId &&\n configuration.editor.clientSecret\n ) {\n eventListener = new IntlayerEventListener(configuration);\n _connectionStatus = 'connecting';\n\n // Set up connection callbacks\n eventListener.onConnectionOpen = () => {\n _connectionStatus = 'connected';\n _isHotReloadConnected = true;\n appLogger('Live sync connection established');\n };\n\n eventListener.onConnectionError = (error) => {\n _connectionStatus = 'error';\n _isHotReloadConnected = false;\n const errorEvent = error as any;\n appLogger(\n `Live sync connection error: ${errorEvent.message ?? 'Unknown error'}`,\n {\n level: 'warn',\n }\n );\n\n // If this is a \"terminated: other side closed\" error, it's likely a server restart\n if (\n errorEvent.message?.includes('terminated') ||\n errorEvent.message?.includes('closed')\n ) {\n appLogger(\n 'Server connection was terminated, automatic reconnection will be attempted...',\n {\n level: 'info',\n }\n );\n _connectionStatus = 'reconnecting';\n }\n };\n\n // Set up dictionary change callbacks\n eventListener.onDictionaryAdded = (dictionary) =>\n writeDictionary(dictionary, configuration);\n eventListener.onDictionaryChange = (dictionary) =>\n writeDictionary(dictionary, configuration);\n eventListener.onDictionaryDeleted = (dictionary) =>\n writeDictionary(dictionary, configuration);\n\n try {\n await eventListener.initialize();\n } catch (error) {\n _connectionStatus = 'error';\n _isHotReloadConnected = false;\n appLogger('Failed to initialize IntlayerEventListener:', {\n level: 'error',\n });\n appLogger(\n `Error: ${error instanceof Error ? error.message : String(error)}`,\n {\n level: 'error',\n }\n );\n }\n } else if (!configuration.editor.liveSync) {\n appLogger(\n 'Hot reload is disabled. Please enable it in the configuration (editor.liveSync).'\n );\n } else if (\n !configuration.editor.clientId ||\n !configuration.editor.clientSecret\n ) {\n appLogger(\n 'Missing client credentials for hot reload. Please configure clientId and clientSecret'\n );\n }\n\n const server = createServer(async (req, res) => {\n // Handle CORS preflight requests\n if (req.method === 'OPTIONS') {\n res.writeHead(200, {\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',\n 'Access-Control-Allow-Headers': 'Content-Type, Authorization',\n });\n\n res.end();\n return;\n }\n\n if (req.url?.startsWith('/dictionaries')) {\n res.writeHead(200, {\n 'Content-Type': 'application/json; charset=utf-8',\n 'Cache-Control': 'no-store',\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',\n 'Access-Control-Allow-Headers': 'Content-Type, Authorization',\n });\n const dictionaries = getDictionaries(configuration);\n\n const prefix = '/dictionaries/';\n if (req.url.startsWith(prefix)) {\n const [key, locale] = decodeURIComponent(req.url)\n .slice(prefix.length)\n .split('/');\n\n const dictionary = dictionaries[key] ?? null;\n\n if (locale) {\n // @ts-ignore Type instantiation is excessively deep and possibly infinite\n const sourceLocaleContent = getLocalizedContent(\n dictionary.content,\n locale,\n {\n dictionaryKey: key,\n keyPath: [],\n }\n );\n\n res.end(JSON.stringify(sourceLocaleContent));\n return;\n }\n\n res.end(JSON.stringify(dictionary));\n return;\n }\n\n res.end(JSON.stringify(dictionaries));\n return;\n }\n\n if (req.url?.startsWith('/unmerged_dictionaries')) {\n res.writeHead(200, {\n 'Content-Type': 'application/json; charset=utf-8',\n 'Cache-Control': 'no-store',\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',\n 'Access-Control-Allow-Headers': 'Content-Type, Authorization',\n });\n const unmergedDictionaries = getUnmergedDictionaries(configuration);\n\n const prefix = '/unmerged_dictionaries/';\n if (req.url.startsWith(prefix)) {\n const key = decodeURIComponent(req.url.slice(prefix.length));\n const one = unmergedDictionaries[key] ?? null;\n\n res.end(JSON.stringify(one));\n return;\n }\n\n res.end(JSON.stringify(unmergedDictionaries));\n return;\n }\n\n if (req.url === '/configuration') {\n res.writeHead(200, {\n 'Content-Type': 'application/json; charset=utf-8',\n 'Cache-Control': 'no-store',\n 'Access-Control-Allow-Origin': '*',\n 'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',\n 'Access-Control-Allow-Headers': 'Content-Type, Authorization',\n });\n res.end(JSON.stringify(configuration));\n return;\n }\n\n if (req.url === '/health') {\n res.writeHead(200, {\n 'Content-Type': 'application/json; charset=utf-8',\n });\n res.end(JSON.stringify({ status: 'ok' }));\n return;\n }\n\n res.end('Not found');\n return;\n });\n\n const getLiveSyncParam = () => {\n if (!configuration.editor.liveSync) return '\\x1b[31m✗ Disabled\\x1b[0m';\n\n return '\\x1b[32m✓ Enabled\\x1b[0m';\n };\n server.listen(liveSyncPort, () => {\n console.log(`\n \\x1b[1;90mINTLAYER v${packageJson.version}\\x1b[0m\n \n Live server running at: \\x1b[90m${liveSyncURL}\\x1b[0m\n - Backend URL: \\x1b[90m${configuration.editor.backendURL ?? '-'}\\x1b[0m\n - Live sync: ${getLiveSyncParam()}\n - Parallel process: ${options?.with ? `\\x1b[90m${Array.isArray(options.with) ? options.with.join(' ') : options.with}\\x1b[0m` : '-'}\n - Access key: ${configuration.editor.clientId ?? '-'}\n `);\n });\n\n // Cleanup function to terminate child process and event listener when the main process exits\n const cleanup = () => {\n // Clean up event listener\n if (eventListener) {\n appLogger('Closing SSE connection...');\n eventListener.cleanup();\n }\n\n if (parallelProcess) {\n parallelProcess.kill();\n }\n\n server.close(() => {\n appLogger('Live sync server stopped');\n process.exit(0);\n });\n };\n\n // Handle process termination signals\n process.on('SIGINT', cleanup);\n process.on('SIGTERM', cleanup);\n process.on('exit', cleanup);\n};\n"],"mappings":";;;;;;;;;;;;AAsBA,MAAM,kBAAkB,OACtB,YACA,kBACG;AAEH,oCAD+B,cAAc,CACnC,sBAAsB,WAAW,MAAM;AACjD,+CAAsB,CAAC,WAAW,EAAE,cAAc;;AAGpD,MAAa,WAAW,OAAO,YAA8B;CAC3D,MAAM,uDAAiC,SAAS,cAAc;CAC9D,MAAM,+CAAyB,cAAc;CAE7C,MAAM,EAAE,cAAc,gBAAgB,cAAc;CAEpD,IAAIA,kBAAyC;CAC7C,IAAIC,gBAA8C;AAKlD,KAAI,SAAS,MAAM;AACjB,wDAA8B,QAAQ,KAAK;AAE3C,kBAAgB,OAAO,YAAY,GAEjC;;AAIJ,KACE,cAAc,OAAO,YACrB,cAAc,OAAO,cACrB,cAAc,OAAO,YACrB,cAAc,OAAO,cACrB;AACA,kBAAgB,IAAIC,oDAAsB,cAAc;AAIxD,gBAAc,yBAAyB;AAGrC,aAAU,mCAAmC;;AAG/C,gBAAc,qBAAqB,UAAU;GAG3C,MAAM,aAAa;AACnB,aACE,+BAA+B,WAAW,WAAW,mBACrD,EACE,OAAO,QACR,CACF;AAGD,OACE,WAAW,SAAS,SAAS,aAAa,IAC1C,WAAW,SAAS,SAAS,SAAS,CAEtC,WACE,iFACA,EACE,OAAO,QACR,CACF;;AAML,gBAAc,qBAAqB,eACjC,gBAAgB,YAAY,cAAc;AAC5C,gBAAc,sBAAsB,eAClC,gBAAgB,YAAY,cAAc;AAC5C,gBAAc,uBAAuB,eACnC,gBAAgB,YAAY,cAAc;AAE5C,MAAI;AACF,SAAM,cAAc,YAAY;WACzB,OAAO;AAGd,aAAU,+CAA+C,EACvD,OAAO,SACR,CAAC;AACF,aACE,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM,IAChE,EACE,OAAO,SACR,CACF;;YAEM,CAAC,cAAc,OAAO,SAC/B,WACE,mFACD;UAED,CAAC,cAAc,OAAO,YACtB,CAAC,cAAc,OAAO,aAEtB,WACE,wFACD;CAGH,MAAM,qCAAsB,OAAO,KAAK,QAAQ;AAE9C,MAAI,IAAI,WAAW,WAAW;AAC5B,OAAI,UAAU,KAAK;IACjB,+BAA+B;IAC/B,gCAAgC;IAChC,gCAAgC;IACjC,CAAC;AAEF,OAAI,KAAK;AACT;;AAGF,MAAI,IAAI,KAAK,WAAW,gBAAgB,EAAE;AACxC,OAAI,UAAU,KAAK;IACjB,gBAAgB;IAChB,iBAAiB;IACjB,+BAA+B;IAC/B,gCAAgC;IAChC,gCAAgC;IACjC,CAAC;GACF,MAAM,iEAA+B,cAAc;AAGnD,OAAI,IAAI,IAAI,WADG,iBACe,EAAE;IAC9B,MAAM,CAAC,KAAK,UAAU,mBAAmB,IAAI,IAAI,CAC9C,MAAM,GAAc,CACpB,MAAM,IAAI;IAEb,MAAM,aAAa,aAAa,QAAQ;AAExC,QAAI,QAAQ;KAEV,MAAM,8DACJ,WAAW,SACX,QACA;MACE,eAAe;MACf,SAAS,EAAE;MACZ,CACF;AAED,SAAI,IAAI,KAAK,UAAU,oBAAoB,CAAC;AAC5C;;AAGF,QAAI,IAAI,KAAK,UAAU,WAAW,CAAC;AACnC;;AAGF,OAAI,IAAI,KAAK,UAAU,aAAa,CAAC;AACrC;;AAGF,MAAI,IAAI,KAAK,WAAW,yBAAyB,EAAE;AACjD,OAAI,UAAU,KAAK;IACjB,gBAAgB;IAChB,iBAAiB;IACjB,+BAA+B;IAC/B,gCAAgC;IAChC,gCAAgC;IACjC,CAAC;GACF,MAAM,0FAA+C,cAAc;AAGnE,OAAI,IAAI,IAAI,WADG,0BACe,EAAE;IAE9B,MAAM,MAAM,qBADA,mBAAmB,IAAI,IAAI,MAAM,GAAc,CAAC,KACnB;AAEzC,QAAI,IAAI,KAAK,UAAU,IAAI,CAAC;AAC5B;;AAGF,OAAI,IAAI,KAAK,UAAU,qBAAqB,CAAC;AAC7C;;AAGF,MAAI,IAAI,QAAQ,kBAAkB;AAChC,OAAI,UAAU,KAAK;IACjB,gBAAgB;IAChB,iBAAiB;IACjB,+BAA+B;IAC/B,gCAAgC;IAChC,gCAAgC;IACjC,CAAC;AACF,OAAI,IAAI,KAAK,UAAU,cAAc,CAAC;AACtC;;AAGF,MAAI,IAAI,QAAQ,WAAW;AACzB,OAAI,UAAU,KAAK,EACjB,gBAAgB,mCACjB,CAAC;AACF,OAAI,IAAI,KAAK,UAAU,EAAE,QAAQ,MAAM,CAAC,CAAC;AACzC;;AAGF,MAAI,IAAI,YAAY;GAEpB;CAEF,MAAM,yBAAyB;AAC7B,MAAI,CAAC,cAAc,OAAO,SAAU,QAAO;AAE3C,SAAO;;AAET,QAAO,OAAO,oBAAoB;AAChC,UAAQ,IAAI;4BACYC,sCAAY,QAAQ;;iDAEC,YAAY;iDACZ,cAAc,OAAO,cAAc,IAAI;yCAC/C,kBAAkB,CAAC;yCACnB,SAAS,OAAO,WAAW,MAAM,QAAQ,QAAQ,KAAK,GAAG,QAAQ,KAAK,KAAK,IAAI,GAAG,QAAQ,KAAK,WAAW,IAAI;yCAC9G,cAAc,OAAO,YAAY,IAAI;QACtE;GACJ;CAGF,MAAM,gBAAgB;AAEpB,MAAI,eAAe;AACjB,aAAU,4BAA4B;AACtC,iBAAc,SAAS;;AAGzB,MAAI,gBACF,iBAAgB,MAAM;AAGxB,SAAO,YAAY;AACjB,aAAU,2BAA2B;AACrC,WAAQ,KAAK,EAAE;IACf;;AAIJ,SAAQ,GAAG,UAAU,QAAQ;AAC7B,SAAQ,GAAG,WAAW,QAAQ;AAC9B,SAAQ,GAAG,QAAQ,QAAQ"}
|
package/dist/cjs/pull.cjs
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('./_virtual/rolldown_runtime.cjs');
|
|
2
2
|
const require_utils_checkAccess = require('./utils/checkAccess.cjs');
|
|
3
3
|
const require_push_pullLog = require('./push/pullLog.cjs');
|
|
4
|
-
let
|
|
5
|
-
let
|
|
6
|
-
let
|
|
4
|
+
let _intlayer_api = require("@intlayer/api");
|
|
5
|
+
let _intlayer_chokidar = require("@intlayer/chokidar");
|
|
6
|
+
let _intlayer_config = require("@intlayer/config");
|
|
7
7
|
let node_path = require("node:path");
|
|
8
8
|
let node_fs = require("node:fs");
|
|
9
9
|
|
|
@@ -13,17 +13,17 @@ let node_fs = require("node:fs");
|
|
|
13
13
|
* with progress indicators and concurrency control.
|
|
14
14
|
*/
|
|
15
15
|
const pull = async (options) => {
|
|
16
|
-
const appLogger = (0,
|
|
16
|
+
const appLogger = (0, _intlayer_config.getAppLogger)(options?.configOptions?.override);
|
|
17
17
|
try {
|
|
18
|
-
const config = (0,
|
|
18
|
+
const config = (0, _intlayer_config.getConfiguration)(options?.configOptions);
|
|
19
19
|
if (!await require_utils_checkAccess.checkCMSAuth(config)) return;
|
|
20
|
-
const intlayerAPI = (0,
|
|
20
|
+
const intlayerAPI = (0, _intlayer_api.getIntlayerAPIProxy)(void 0, config);
|
|
21
21
|
const getDictionariesUpdateTimestampResult = await intlayerAPI.dictionary.getDictionariesUpdateTimestamp();
|
|
22
22
|
if (!getDictionariesUpdateTimestampResult.data) throw new Error("No distant dictionaries found");
|
|
23
23
|
let distantDictionariesUpdateTimeStamp = getDictionariesUpdateTimestampResult.data;
|
|
24
24
|
if (options?.dictionaries) distantDictionariesUpdateTimeStamp = Object.fromEntries(Object.entries(distantDictionariesUpdateTimeStamp).filter(([key]) => options.dictionaries?.includes(key)));
|
|
25
25
|
const remoteDictionariesPath = (0, node_path.join)(config.content.mainDir, "remote_dictionaries.cjs");
|
|
26
|
-
const requireFunction = config.build?.require ?? (0,
|
|
26
|
+
const requireFunction = config.build?.require ?? (0, _intlayer_config.getProjectRequire)();
|
|
27
27
|
const remoteDictionariesRecord = (0, node_fs.existsSync)(remoteDictionariesPath) ? requireFunction(remoteDictionariesPath) : {};
|
|
28
28
|
const entries = Object.entries(distantDictionariesUpdateTimeStamp);
|
|
29
29
|
const keysToFetch = entries.filter(([key, remoteUpdatedAt]) => {
|
|
@@ -72,7 +72,7 @@ const pull = async (options) => {
|
|
|
72
72
|
if (isCached) sourceDictionary = remoteDictionariesRecord[statusObj.dictionaryKey];
|
|
73
73
|
if (!sourceDictionary) sourceDictionary = (await intlayerAPI.dictionary.getDictionary(statusObj.dictionaryKey)).data;
|
|
74
74
|
if (!sourceDictionary) throw new Error(`Dictionary ${statusObj.dictionaryKey} not found on remote`);
|
|
75
|
-
const { status } = await (0,
|
|
75
|
+
const { status } = await (0, _intlayer_chokidar.writeContentDeclaration)(sourceDictionary, config, options);
|
|
76
76
|
statusObj.status = status;
|
|
77
77
|
logger.update([{
|
|
78
78
|
dictionaryKey: statusObj.dictionaryKey,
|
|
@@ -89,7 +89,7 @@ const pull = async (options) => {
|
|
|
89
89
|
}]);
|
|
90
90
|
}
|
|
91
91
|
};
|
|
92
|
-
await (0,
|
|
92
|
+
await (0, _intlayer_chokidar.parallelize)(dictionariesStatuses, processDictionary, 5);
|
|
93
93
|
logger.finish();
|
|
94
94
|
const iconFor = (status) => {
|
|
95
95
|
switch (status) {
|
|
@@ -108,17 +108,17 @@ const pull = async (options) => {
|
|
|
108
108
|
case "fetched":
|
|
109
109
|
case "imported":
|
|
110
110
|
case "updated":
|
|
111
|
-
case "up-to-date": return
|
|
111
|
+
case "up-to-date": return _intlayer_config.ANSIColors.GREEN;
|
|
112
112
|
case "reimported in JSON":
|
|
113
|
-
case "new content file": return
|
|
114
|
-
case "error": return
|
|
115
|
-
default: return
|
|
113
|
+
case "new content file": return _intlayer_config.ANSIColors.YELLOW;
|
|
114
|
+
case "error": return _intlayer_config.ANSIColors.RED;
|
|
115
|
+
default: return _intlayer_config.ANSIColors.BLUE;
|
|
116
116
|
}
|
|
117
117
|
};
|
|
118
118
|
for (const s of dictionariesStatuses) {
|
|
119
119
|
const icon = iconFor(s.status);
|
|
120
120
|
const color = colorFor(s.status);
|
|
121
|
-
appLogger(` - ${s.dictionaryKey} ${
|
|
121
|
+
appLogger(` - ${s.dictionaryKey} ${_intlayer_config.ANSIColors.GREY}[${color}${icon} ${s.status}${_intlayer_config.ANSIColors.GREY}]${_intlayer_config.ANSIColors.RESET}`);
|
|
122
122
|
}
|
|
123
123
|
for (const statusObj of dictionariesStatuses) if (statusObj.errorMessage) appLogger(statusObj.errorMessage, { level: "error" });
|
|
124
124
|
} catch (error) {
|
package/dist/cjs/pull.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pull.cjs","names":["checkCMSAuth","distantDictionariesUpdateTimeStamp: Record<string, number>","remoteDictionariesRecord: Record<string, any>","dictionariesStatuses: DictionariesStatus[]","PullLogger","successfullyFetchedDictionaries: Dictionary[]","sourceDictionary: Dictionary | undefined","ANSIColors"],"sources":["../../src/pull.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n type DictionaryStatus,\n parallelize,\n writeContentDeclaration,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n getProjectRequire,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/types';\nimport { PullLogger, type PullStatus } from './push/pullLog';\nimport { checkCMSAuth } from './utils/checkAccess';\n\ntype PullOptions = {\n dictionaries?: string[];\n newDictionariesPath?: string;\n configOptions?: GetConfigurationOptions;\n};\n\ntype DictionariesStatus = {\n dictionaryKey: string;\n status: DictionaryStatus | 'pending' | 'fetching' | 'error';\n error?: Error;\n errorMessage?: string;\n};\n\n/**\n * Fetch distant dictionaries and write them locally,\n * with progress indicators and concurrency control.\n */\nexport const pull = async (options?: PullOptions): Promise<void> => {\n const appLogger = getAppLogger(options?.configOptions?.override);\n\n try {\n const config = getConfiguration(options?.configOptions);\n\n const hasCMSAuth = await checkCMSAuth(config);\n\n if (!hasCMSAuth) return;\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, config);\n\n // Get remote update timestamps map\n const getDictionariesUpdateTimestampResult =\n await intlayerAPI.dictionary.getDictionariesUpdateTimestamp();\n\n if (!getDictionariesUpdateTimestampResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n let distantDictionariesUpdateTimeStamp: Record<string, number> =\n getDictionariesUpdateTimestampResult.data;\n\n // Optional filtering by requested dictionaries\n if (options?.dictionaries) {\n distantDictionariesUpdateTimeStamp = Object.fromEntries(\n Object.entries(distantDictionariesUpdateTimeStamp).filter(([key]) =>\n options.dictionaries?.includes(key)\n )\n );\n }\n\n // Load local cached remote dictionaries (if any)\n const remoteDictionariesPath = join(\n config.content.mainDir,\n 'remote_dictionaries.cjs'\n );\n const requireFunction = config.build?.require ?? getProjectRequire();\n const remoteDictionariesRecord: Record<string, any> = existsSync(\n remoteDictionariesPath\n )\n ? (requireFunction(remoteDictionariesPath) as any)\n : {};\n\n // Determine which keys need fetching by comparing updatedAt with local cache\n const entries = Object.entries(distantDictionariesUpdateTimeStamp);\n const keysToFetch = entries\n .filter(([key, remoteUpdatedAt]) => {\n if (!remoteUpdatedAt) return true;\n const local = (remoteDictionariesRecord as any)[key];\n if (!local) return true;\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n if (typeof localUpdatedAt !== 'number') return true;\n return remoteUpdatedAt > localUpdatedAt;\n })\n .map(([key]) => key);\n\n const cachedKeys = entries\n .filter(([key, remoteUpdatedAt]) => {\n const local = (remoteDictionariesRecord as any)[key];\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n return (\n typeof localUpdatedAt === 'number' &&\n typeof remoteUpdatedAt === 'number' &&\n localUpdatedAt >= remoteUpdatedAt\n );\n })\n .map(([key]) => key);\n\n // Check if dictionaries list is empty\n if (entries.length === 0) {\n appLogger('No dictionaries to fetch', {\n level: 'error',\n });\n return;\n }\n\n appLogger('Fetching dictionaries:');\n\n // Prepare dictionaries statuses\n const dictionariesStatuses: DictionariesStatus[] = [\n ...cachedKeys.map((dictionaryKey) => ({\n dictionaryKey,\n status: 'imported' as DictionaryStatus,\n })),\n ...keysToFetch.map((dictionaryKey) => ({\n dictionaryKey,\n status: 'pending' as const,\n })),\n ];\n\n // Initialize aggregated logger\n const logger = new PullLogger();\n logger.update(\n dictionariesStatuses.map<PullStatus>((s) => ({\n dictionaryKey: s.dictionaryKey,\n status: s.status,\n }))\n );\n\n const successfullyFetchedDictionaries: Dictionary[] = [];\n\n const processDictionary = async (\n statusObj: DictionariesStatus\n ): Promise<void> => {\n const isCached =\n statusObj.status === 'imported' || statusObj.status === 'up-to-date';\n\n if (!isCached) {\n statusObj.status = 'fetching';\n logger.update([\n { dictionaryKey: statusObj.dictionaryKey, status: 'fetching' },\n ]);\n }\n\n try {\n let sourceDictionary: Dictionary | undefined;\n\n if (isCached) {\n sourceDictionary = remoteDictionariesRecord[\n statusObj.dictionaryKey\n ] as Dictionary | undefined;\n }\n\n if (!sourceDictionary) {\n // Fetch the dictionary\n const getDictionaryResult =\n await intlayerAPI.dictionary.getDictionary(statusObj.dictionaryKey);\n\n sourceDictionary = getDictionaryResult.data as Dictionary | undefined;\n }\n\n if (!sourceDictionary) {\n throw new Error(\n `Dictionary ${statusObj.dictionaryKey} not found on remote`\n );\n }\n\n // Now, write the dictionary to local file\n const { status } = await writeContentDeclaration(\n sourceDictionary,\n config,\n options\n );\n\n statusObj.status = status;\n logger.update([{ dictionaryKey: statusObj.dictionaryKey, status }]);\n\n successfullyFetchedDictionaries.push(sourceDictionary);\n } catch (error) {\n statusObj.status = 'error';\n statusObj.error = error as Error;\n statusObj.errorMessage = `Error fetching dictionary ${statusObj.dictionaryKey}: ${error}`;\n logger.update([\n { dictionaryKey: statusObj.dictionaryKey, status: 'error' },\n ]);\n }\n };\n\n // Process dictionaries in parallel with concurrency limit\n await parallelize(dictionariesStatuses, processDictionary, 5);\n\n // Stop the logger and render final state\n logger.finish();\n\n // Per-dictionary summary\n const iconFor = (status: DictionariesStatus['status']) => {\n switch (status) {\n case 'fetched':\n case 'imported':\n case 'updated':\n case 'up-to-date':\n case 'reimported in JSON':\n case 'new content file':\n return '✔';\n case 'error':\n return '✖';\n default:\n return '⏲';\n }\n };\n\n const colorFor = (status: DictionariesStatus['status']) => {\n switch (status) {\n case 'fetched':\n case 'imported':\n case 'updated':\n case 'up-to-date':\n return ANSIColors.GREEN;\n case 'reimported in JSON':\n case 'new content file':\n return ANSIColors.YELLOW;\n case 'error':\n return ANSIColors.RED;\n default:\n return ANSIColors.BLUE;\n }\n };\n\n for (const s of dictionariesStatuses) {\n const icon = iconFor(s.status);\n const color = colorFor(s.status);\n appLogger(\n ` - ${s.dictionaryKey} ${ANSIColors.GREY}[${color}${icon} ${s.status}${ANSIColors.GREY}]${ANSIColors.RESET}`\n );\n }\n\n // Output any error messages\n for (const statusObj of dictionariesStatuses) {\n if (statusObj.errorMessage) {\n appLogger(statusObj.errorMessage, {\n level: 'error',\n });\n }\n }\n } catch (error) {\n appLogger(error, {\n level: 'error',\n });\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAoCA,MAAa,OAAO,OAAO,YAAyC;CAClE,MAAM,gDAAyB,SAAS,eAAe,SAAS;AAEhE,KAAI;EACF,MAAM,iDAA0B,SAAS,cAAc;AAIvD,MAAI,CAFe,MAAMA,uCAAa,OAAO,CAE5B;EAEjB,MAAM,sDAAkC,QAAW,OAAO;EAG1D,MAAM,uCACJ,MAAM,YAAY,WAAW,gCAAgC;AAE/D,MAAI,CAAC,qCAAqC,KACxC,OAAM,IAAI,MAAM,gCAAgC;EAGlD,IAAIC,qCACF,qCAAqC;AAGvC,MAAI,SAAS,aACX,sCAAqC,OAAO,YAC1C,OAAO,QAAQ,mCAAmC,CAAC,QAAQ,CAAC,SAC1D,QAAQ,cAAc,SAAS,IAAI,CACpC,CACF;EAIH,MAAM,6CACJ,OAAO,QAAQ,SACf,0BACD;EACD,MAAM,kBAAkB,OAAO,OAAO,qDAA8B;EACpE,MAAMC,mDACJ,uBACD,GACI,gBAAgB,uBAAuB,GACxC,EAAE;EAGN,MAAM,UAAU,OAAO,QAAQ,mCAAmC;EAClE,MAAM,cAAc,QACjB,QAAQ,CAAC,KAAK,qBAAqB;AAClC,OAAI,CAAC,gBAAiB,QAAO;GAC7B,MAAM,QAAS,yBAAiC;AAChD,OAAI,CAAC,MAAO,QAAO;GACnB,MAAM,oBAAqB,OAAe;GAI1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AACR,OAAI,OAAO,mBAAmB,SAAU,QAAO;AAC/C,UAAO,kBAAkB;IACzB,CACD,KAAK,CAAC,SAAS,IAAI;EAEtB,MAAM,aAAa,QAChB,QAAQ,CAAC,KAAK,qBAAqB;GAElC,MAAM,oBADS,yBAAiC,MACN;GAI1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AACR,UACE,OAAO,mBAAmB,YAC1B,OAAO,oBAAoB,YAC3B,kBAAkB;IAEpB,CACD,KAAK,CAAC,SAAS,IAAI;AAGtB,MAAI,QAAQ,WAAW,GAAG;AACxB,aAAU,4BAA4B,EACpC,OAAO,SACR,CAAC;AACF;;AAGF,YAAU,yBAAyB;EAGnC,MAAMC,uBAA6C,CACjD,GAAG,WAAW,KAAK,mBAAmB;GACpC;GACA,QAAQ;GACT,EAAE,EACH,GAAG,YAAY,KAAK,mBAAmB;GACrC;GACA,QAAQ;GACT,EAAE,CACJ;EAGD,MAAM,SAAS,IAAIC,iCAAY;AAC/B,SAAO,OACL,qBAAqB,KAAiB,OAAO;GAC3C,eAAe,EAAE;GACjB,QAAQ,EAAE;GACX,EAAE,CACJ;EAED,MAAMC,kCAAgD,EAAE;EAExD,MAAM,oBAAoB,OACxB,cACkB;GAClB,MAAM,WACJ,UAAU,WAAW,cAAc,UAAU,WAAW;AAE1D,OAAI,CAAC,UAAU;AACb,cAAU,SAAS;AACnB,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU;KAAe,QAAQ;KAAY,CAC/D,CAAC;;AAGJ,OAAI;IACF,IAAIC;AAEJ,QAAI,SACF,oBAAmB,yBACjB,UAAU;AAId,QAAI,CAAC,iBAKH,qBAFE,MAAM,YAAY,WAAW,cAAc,UAAU,cAAc,EAE9B;AAGzC,QAAI,CAAC,iBACH,OAAM,IAAI,MACR,cAAc,UAAU,cAAc,sBACvC;IAIH,MAAM,EAAE,WAAW,uDACjB,kBACA,QACA,QACD;AAED,cAAU,SAAS;AACnB,WAAO,OAAO,CAAC;KAAE,eAAe,UAAU;KAAe;KAAQ,CAAC,CAAC;AAEnE,oCAAgC,KAAK,iBAAiB;YAC/C,OAAO;AACd,cAAU,SAAS;AACnB,cAAU,QAAQ;AAClB,cAAU,eAAe,6BAA6B,UAAU,cAAc,IAAI;AAClF,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU;KAAe,QAAQ;KAAS,CAC5D,CAAC;;;AAKN,6CAAkB,sBAAsB,mBAAmB,EAAE;AAG7D,SAAO,QAAQ;EAGf,MAAM,WAAW,WAAyC;AACxD,WAAQ,QAAR;IACE,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK,mBACH,QAAO;IACT,KAAK,QACH,QAAO;IACT,QACE,QAAO;;;EAIb,MAAM,YAAY,WAAyC;AACzD,WAAQ,QAAR;IACE,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK,aACH,QAAOC,6BAAW;IACpB,KAAK;IACL,KAAK,mBACH,QAAOA,6BAAW;IACpB,KAAK,QACH,QAAOA,6BAAW;IACpB,QACE,QAAOA,6BAAW;;;AAIxB,OAAK,MAAM,KAAK,sBAAsB;GACpC,MAAM,OAAO,QAAQ,EAAE,OAAO;GAC9B,MAAM,QAAQ,SAAS,EAAE,OAAO;AAChC,aACE,MAAM,EAAE,cAAc,GAAGA,6BAAW,KAAK,GAAG,QAAQ,KAAK,GAAG,EAAE,SAASA,6BAAW,KAAK,GAAGA,6BAAW,QACtG;;AAIH,OAAK,MAAM,aAAa,qBACtB,KAAI,UAAU,aACZ,WAAU,UAAU,cAAc,EAChC,OAAO,SACR,CAAC;UAGC,OAAO;AACd,YAAU,OAAO,EACf,OAAO,SACR,CAAC"}
|
|
1
|
+
{"version":3,"file":"pull.cjs","names":["checkCMSAuth","distantDictionariesUpdateTimeStamp: Record<string, number>","remoteDictionariesRecord: Record<string, any>","dictionariesStatuses: DictionariesStatus[]","PullLogger","successfullyFetchedDictionaries: Dictionary[]","sourceDictionary: Dictionary | undefined","ANSIColors"],"sources":["../../src/pull.ts"],"sourcesContent":["import { existsSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n type DictionaryStatus,\n parallelize,\n writeContentDeclaration,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n getProjectRequire,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/types';\nimport { PullLogger, type PullStatus } from './push/pullLog';\nimport { checkCMSAuth } from './utils/checkAccess';\n\ntype PullOptions = {\n dictionaries?: string[];\n newDictionariesPath?: string;\n configOptions?: GetConfigurationOptions;\n};\n\ntype DictionariesStatus = {\n dictionaryKey: string;\n status: DictionaryStatus | 'pending' | 'fetching' | 'error';\n error?: Error;\n errorMessage?: string;\n};\n\n/**\n * Fetch distant dictionaries and write them locally,\n * with progress indicators and concurrency control.\n */\nexport const pull = async (options?: PullOptions): Promise<void> => {\n const appLogger = getAppLogger(options?.configOptions?.override);\n\n try {\n const config = getConfiguration(options?.configOptions);\n\n const hasCMSAuth = await checkCMSAuth(config);\n\n if (!hasCMSAuth) return;\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, config);\n\n // Get remote update timestamps map\n const getDictionariesUpdateTimestampResult =\n await intlayerAPI.dictionary.getDictionariesUpdateTimestamp();\n\n if (!getDictionariesUpdateTimestampResult.data) {\n throw new Error('No distant dictionaries found');\n }\n\n let distantDictionariesUpdateTimeStamp: Record<string, number> =\n getDictionariesUpdateTimestampResult.data;\n\n // Optional filtering by requested dictionaries\n if (options?.dictionaries) {\n distantDictionariesUpdateTimeStamp = Object.fromEntries(\n Object.entries(distantDictionariesUpdateTimeStamp).filter(([key]) =>\n options.dictionaries?.includes(key)\n )\n );\n }\n\n // Load local cached remote dictionaries (if any)\n const remoteDictionariesPath = join(\n config.content.mainDir,\n 'remote_dictionaries.cjs'\n );\n const requireFunction = config.build?.require ?? getProjectRequire();\n const remoteDictionariesRecord: Record<string, any> = existsSync(\n remoteDictionariesPath\n )\n ? (requireFunction(remoteDictionariesPath) as any)\n : {};\n\n // Determine which keys need fetching by comparing updatedAt with local cache\n const entries = Object.entries(distantDictionariesUpdateTimeStamp);\n const keysToFetch = entries\n .filter(([key, remoteUpdatedAt]) => {\n if (!remoteUpdatedAt) return true;\n const local = (remoteDictionariesRecord as any)[key];\n if (!local) return true;\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n if (typeof localUpdatedAt !== 'number') return true;\n return remoteUpdatedAt > localUpdatedAt;\n })\n .map(([key]) => key);\n\n const cachedKeys = entries\n .filter(([key, remoteUpdatedAt]) => {\n const local = (remoteDictionariesRecord as any)[key];\n const localUpdatedAtRaw = (local as any)?.updatedAt as\n | number\n | string\n | undefined;\n const localUpdatedAt =\n typeof localUpdatedAtRaw === 'number'\n ? localUpdatedAtRaw\n : localUpdatedAtRaw\n ? new Date(localUpdatedAtRaw).getTime()\n : undefined;\n return (\n typeof localUpdatedAt === 'number' &&\n typeof remoteUpdatedAt === 'number' &&\n localUpdatedAt >= remoteUpdatedAt\n );\n })\n .map(([key]) => key);\n\n // Check if dictionaries list is empty\n if (entries.length === 0) {\n appLogger('No dictionaries to fetch', {\n level: 'error',\n });\n return;\n }\n\n appLogger('Fetching dictionaries:');\n\n // Prepare dictionaries statuses\n const dictionariesStatuses: DictionariesStatus[] = [\n ...cachedKeys.map((dictionaryKey) => ({\n dictionaryKey,\n status: 'imported' as DictionaryStatus,\n })),\n ...keysToFetch.map((dictionaryKey) => ({\n dictionaryKey,\n status: 'pending' as const,\n })),\n ];\n\n // Initialize aggregated logger\n const logger = new PullLogger();\n logger.update(\n dictionariesStatuses.map<PullStatus>((s) => ({\n dictionaryKey: s.dictionaryKey,\n status: s.status,\n }))\n );\n\n const successfullyFetchedDictionaries: Dictionary[] = [];\n\n const processDictionary = async (\n statusObj: DictionariesStatus\n ): Promise<void> => {\n const isCached =\n statusObj.status === 'imported' || statusObj.status === 'up-to-date';\n\n if (!isCached) {\n statusObj.status = 'fetching';\n logger.update([\n { dictionaryKey: statusObj.dictionaryKey, status: 'fetching' },\n ]);\n }\n\n try {\n let sourceDictionary: Dictionary | undefined;\n\n if (isCached) {\n sourceDictionary = remoteDictionariesRecord[\n statusObj.dictionaryKey\n ] as Dictionary | undefined;\n }\n\n if (!sourceDictionary) {\n // Fetch the dictionary\n const getDictionaryResult =\n await intlayerAPI.dictionary.getDictionary(statusObj.dictionaryKey);\n\n sourceDictionary = getDictionaryResult.data as Dictionary | undefined;\n }\n\n if (!sourceDictionary) {\n throw new Error(\n `Dictionary ${statusObj.dictionaryKey} not found on remote`\n );\n }\n\n // Now, write the dictionary to local file\n const { status } = await writeContentDeclaration(\n sourceDictionary,\n config,\n options\n );\n\n statusObj.status = status;\n logger.update([{ dictionaryKey: statusObj.dictionaryKey, status }]);\n\n successfullyFetchedDictionaries.push(sourceDictionary);\n } catch (error) {\n statusObj.status = 'error';\n statusObj.error = error as Error;\n statusObj.errorMessage = `Error fetching dictionary ${statusObj.dictionaryKey}: ${error}`;\n logger.update([\n { dictionaryKey: statusObj.dictionaryKey, status: 'error' },\n ]);\n }\n };\n\n // Process dictionaries in parallel with concurrency limit\n await parallelize(dictionariesStatuses, processDictionary, 5);\n\n // Stop the logger and render final state\n logger.finish();\n\n // Per-dictionary summary\n const iconFor = (status: DictionariesStatus['status']) => {\n switch (status) {\n case 'fetched':\n case 'imported':\n case 'updated':\n case 'up-to-date':\n case 'reimported in JSON':\n case 'new content file':\n return '✔';\n case 'error':\n return '✖';\n default:\n return '⏲';\n }\n };\n\n const colorFor = (status: DictionariesStatus['status']) => {\n switch (status) {\n case 'fetched':\n case 'imported':\n case 'updated':\n case 'up-to-date':\n return ANSIColors.GREEN;\n case 'reimported in JSON':\n case 'new content file':\n return ANSIColors.YELLOW;\n case 'error':\n return ANSIColors.RED;\n default:\n return ANSIColors.BLUE;\n }\n };\n\n for (const s of dictionariesStatuses) {\n const icon = iconFor(s.status);\n const color = colorFor(s.status);\n appLogger(\n ` - ${s.dictionaryKey} ${ANSIColors.GREY}[${color}${icon} ${s.status}${ANSIColors.GREY}]${ANSIColors.RESET}`\n );\n }\n\n // Output any error messages\n for (const statusObj of dictionariesStatuses) {\n if (statusObj.errorMessage) {\n appLogger(statusObj.errorMessage, {\n level: 'error',\n });\n }\n }\n } catch (error) {\n appLogger(error, {\n level: 'error',\n });\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAoCA,MAAa,OAAO,OAAO,YAAyC;CAClE,MAAM,+CAAyB,SAAS,eAAe,SAAS;AAEhE,KAAI;EACF,MAAM,gDAA0B,SAAS,cAAc;AAIvD,MAAI,CAFe,MAAMA,uCAAa,OAAO,CAE5B;EAEjB,MAAM,qDAAkC,QAAW,OAAO;EAG1D,MAAM,uCACJ,MAAM,YAAY,WAAW,gCAAgC;AAE/D,MAAI,CAAC,qCAAqC,KACxC,OAAM,IAAI,MAAM,gCAAgC;EAGlD,IAAIC,qCACF,qCAAqC;AAGvC,MAAI,SAAS,aACX,sCAAqC,OAAO,YAC1C,OAAO,QAAQ,mCAAmC,CAAC,QAAQ,CAAC,SAC1D,QAAQ,cAAc,SAAS,IAAI,CACpC,CACF;EAIH,MAAM,6CACJ,OAAO,QAAQ,SACf,0BACD;EACD,MAAM,kBAAkB,OAAO,OAAO,oDAA8B;EACpE,MAAMC,mDACJ,uBACD,GACI,gBAAgB,uBAAuB,GACxC,EAAE;EAGN,MAAM,UAAU,OAAO,QAAQ,mCAAmC;EAClE,MAAM,cAAc,QACjB,QAAQ,CAAC,KAAK,qBAAqB;AAClC,OAAI,CAAC,gBAAiB,QAAO;GAC7B,MAAM,QAAS,yBAAiC;AAChD,OAAI,CAAC,MAAO,QAAO;GACnB,MAAM,oBAAqB,OAAe;GAI1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AACR,OAAI,OAAO,mBAAmB,SAAU,QAAO;AAC/C,UAAO,kBAAkB;IACzB,CACD,KAAK,CAAC,SAAS,IAAI;EAEtB,MAAM,aAAa,QAChB,QAAQ,CAAC,KAAK,qBAAqB;GAElC,MAAM,oBADS,yBAAiC,MACN;GAI1C,MAAM,iBACJ,OAAO,sBAAsB,WACzB,oBACA,oBACE,IAAI,KAAK,kBAAkB,CAAC,SAAS,GACrC;AACR,UACE,OAAO,mBAAmB,YAC1B,OAAO,oBAAoB,YAC3B,kBAAkB;IAEpB,CACD,KAAK,CAAC,SAAS,IAAI;AAGtB,MAAI,QAAQ,WAAW,GAAG;AACxB,aAAU,4BAA4B,EACpC,OAAO,SACR,CAAC;AACF;;AAGF,YAAU,yBAAyB;EAGnC,MAAMC,uBAA6C,CACjD,GAAG,WAAW,KAAK,mBAAmB;GACpC;GACA,QAAQ;GACT,EAAE,EACH,GAAG,YAAY,KAAK,mBAAmB;GACrC;GACA,QAAQ;GACT,EAAE,CACJ;EAGD,MAAM,SAAS,IAAIC,iCAAY;AAC/B,SAAO,OACL,qBAAqB,KAAiB,OAAO;GAC3C,eAAe,EAAE;GACjB,QAAQ,EAAE;GACX,EAAE,CACJ;EAED,MAAMC,kCAAgD,EAAE;EAExD,MAAM,oBAAoB,OACxB,cACkB;GAClB,MAAM,WACJ,UAAU,WAAW,cAAc,UAAU,WAAW;AAE1D,OAAI,CAAC,UAAU;AACb,cAAU,SAAS;AACnB,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU;KAAe,QAAQ;KAAY,CAC/D,CAAC;;AAGJ,OAAI;IACF,IAAIC;AAEJ,QAAI,SACF,oBAAmB,yBACjB,UAAU;AAId,QAAI,CAAC,iBAKH,qBAFE,MAAM,YAAY,WAAW,cAAc,UAAU,cAAc,EAE9B;AAGzC,QAAI,CAAC,iBACH,OAAM,IAAI,MACR,cAAc,UAAU,cAAc,sBACvC;IAIH,MAAM,EAAE,WAAW,sDACjB,kBACA,QACA,QACD;AAED,cAAU,SAAS;AACnB,WAAO,OAAO,CAAC;KAAE,eAAe,UAAU;KAAe;KAAQ,CAAC,CAAC;AAEnE,oCAAgC,KAAK,iBAAiB;YAC/C,OAAO;AACd,cAAU,SAAS;AACnB,cAAU,QAAQ;AAClB,cAAU,eAAe,6BAA6B,UAAU,cAAc,IAAI;AAClF,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU;KAAe,QAAQ;KAAS,CAC5D,CAAC;;;AAKN,4CAAkB,sBAAsB,mBAAmB,EAAE;AAG7D,SAAO,QAAQ;EAGf,MAAM,WAAW,WAAyC;AACxD,WAAQ,QAAR;IACE,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK,mBACH,QAAO;IACT,KAAK,QACH,QAAO;IACT,QACE,QAAO;;;EAIb,MAAM,YAAY,WAAyC;AACzD,WAAQ,QAAR;IACE,KAAK;IACL,KAAK;IACL,KAAK;IACL,KAAK,aACH,QAAOC,4BAAW;IACpB,KAAK;IACL,KAAK,mBACH,QAAOA,4BAAW;IACpB,KAAK,QACH,QAAOA,4BAAW;IACpB,QACE,QAAOA,4BAAW;;;AAIxB,OAAK,MAAM,KAAK,sBAAsB;GACpC,MAAM,OAAO,QAAQ,EAAE,OAAO;GAC9B,MAAM,QAAQ,SAAS,EAAE,OAAO;AAChC,aACE,MAAM,EAAE,cAAc,GAAGA,4BAAW,KAAK,GAAG,QAAQ,KAAK,GAAG,EAAE,SAASA,4BAAW,KAAK,GAAGA,4BAAW,QACtG;;AAIH,OAAK,MAAM,aAAa,qBACtB,KAAI,UAAU,aACZ,WAAU,UAAU,cAAc,EAChC,OAAO,SACR,CAAC;UAGC,OAAO;AACd,YAAU,OAAO,EACf,OAAO,SACR,CAAC"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
|
-
let
|
|
2
|
+
let _intlayer_config = require("@intlayer/config");
|
|
3
3
|
|
|
4
4
|
//#region src/push/pullLog.ts
|
|
5
5
|
var PullLogger = class {
|
|
@@ -7,12 +7,12 @@ var PullLogger = class {
|
|
|
7
7
|
spinnerTimer = null;
|
|
8
8
|
spinnerIndex = 0;
|
|
9
9
|
renderedLines = 0;
|
|
10
|
-
spinnerFrames =
|
|
10
|
+
spinnerFrames = _intlayer_config.spinnerFrames;
|
|
11
11
|
isFinished = false;
|
|
12
12
|
prefix;
|
|
13
13
|
lastRenderedState = "";
|
|
14
14
|
constructor() {
|
|
15
|
-
this.prefix = (0,
|
|
15
|
+
this.prefix = (0, _intlayer_config.getConfiguration)().log.prefix;
|
|
16
16
|
}
|
|
17
17
|
update(newStatuses) {
|
|
18
18
|
if (this.isFinished) return;
|
|
@@ -49,10 +49,10 @@ var PullLogger = class {
|
|
|
49
49
|
const progressLabel = `dictionaries: ${done}/${total}`;
|
|
50
50
|
const details = [];
|
|
51
51
|
if (success > 0) details.push(`ok: ${success}`);
|
|
52
|
-
if (errors > 0) details.push((0,
|
|
52
|
+
if (errors > 0) details.push((0, _intlayer_config.colorize)(`errors: ${errors}`, _intlayer_config.ANSIColors.RED));
|
|
53
53
|
const suffix = details.length > 0 ? ` (${details.join(", ")})` : "";
|
|
54
|
-
if (isDone) lines.push(`${this.prefix} ${(0,
|
|
55
|
-
else lines.push(`${this.prefix} ${(0,
|
|
54
|
+
if (isDone) lines.push(`${this.prefix} ${(0, _intlayer_config.colorize)("✔", _intlayer_config.ANSIColors.GREEN)} fetched ${progressLabel}${suffix}`);
|
|
55
|
+
else lines.push(`${this.prefix} ${(0, _intlayer_config.colorize)(frame, _intlayer_config.ANSIColors.BLUE)} fetching ${progressLabel}${suffix}`);
|
|
56
56
|
const currentState = lines.join("\n");
|
|
57
57
|
if (currentState === this.lastRenderedState) return;
|
|
58
58
|
this.lastRenderedState = currentState;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pullLog.cjs","names":["spinnerFrames","lines: string[]","details: string[]","ANSIColors"],"sources":["../../../src/push/pullLog.ts"],"sourcesContent":["import type { DictionaryStatus } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colorize,\n getConfiguration,\n spinnerFrames,\n} from '@intlayer/config';\n\nexport type PullStatus = {\n dictionaryKey: string;\n status: DictionaryStatus | 'pending' | 'fetching';\n errorMessage?: string;\n};\n\nexport class PullLogger {\n private statuses: PullStatus[] = [];\n private spinnerTimer: NodeJS.Timeout | null = null;\n private spinnerIndex = 0;\n private renderedLines = 0;\n private readonly spinnerFrames = spinnerFrames;\n private isFinished = false;\n private readonly prefix: string;\n private lastRenderedState: string = '';\n\n constructor() {\n const configuration = getConfiguration();\n this.prefix = configuration.log.prefix;\n }\n\n update(newStatuses: PullStatus[]) {\n if (this.isFinished) return;\n for (const status of newStatuses) {\n const index = this.statuses.findIndex(\n (s) => s.dictionaryKey === status.dictionaryKey\n );\n if (index >= 0) {\n this.statuses[index] = status;\n } else {\n this.statuses.push(status);\n }\n }\n\n this.startSpinner();\n this.render();\n }\n\n finish() {\n this.isFinished = true;\n this.stopSpinner();\n this.render();\n }\n\n private startSpinner() {\n if (this.spinnerTimer || this.isFinished) return;\n this.spinnerTimer = setInterval(() => {\n this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerFrames.length;\n this.render();\n }, 100);\n }\n\n private stopSpinner() {\n if (!this.spinnerTimer) return;\n clearInterval(this.spinnerTimer);\n this.spinnerTimer = null;\n }\n\n private render() {\n const { total, done, success, errors } = this.computeProgress();\n\n const frame = this.spinnerFrames[this.spinnerIndex];\n const lines: string[] = [];\n\n const isDone = done === total;\n const progressLabel = `dictionaries: ${done}/${total}`;\n const details: string[] = [];\n if (success > 0) details.push(`ok: ${success}`);\n if (errors > 0) details.push(colorize(`errors: ${errors}`, ANSIColors.RED));\n\n const suffix = details.length > 0 ? ` (${details.join(', ')})` : '';\n\n if (isDone) {\n lines.push(\n `${this.prefix} ${colorize('✔', ANSIColors.GREEN)} fetched ${progressLabel}${suffix}`\n );\n } else {\n lines.push(\n `${this.prefix} ${colorize(frame, ANSIColors.BLUE)} fetching ${progressLabel}${suffix}`\n );\n }\n\n const currentState = lines.join('\\n');\n if (currentState === this.lastRenderedState) {\n return;\n }\n this.lastRenderedState = currentState;\n\n if (this.renderedLines > 0) {\n process.stdout.write(`\\x1b[${this.renderedLines}F`);\n }\n\n const totalLinesToClear = Math.max(this.renderedLines, lines.length);\n for (let i = 0; i < totalLinesToClear; i++) {\n process.stdout.write('\\x1b[2K');\n const line = lines[i];\n if (line !== undefined) {\n process.stdout.write(line);\n }\n process.stdout.write('\\n');\n }\n\n this.renderedLines = lines.length;\n }\n\n private computeProgress() {\n const keys = new Set(this.statuses.map((s) => s.dictionaryKey));\n\n const doneSet = new Set<DictionaryStatus | 'error'>([\n 'fetched',\n 'imported',\n 'updated',\n 'up-to-date',\n 'reimported in JSON',\n 'new content file',\n 'error',\n ] as const);\n\n const successesSet = new Set<DictionaryStatus>([\n 'fetched',\n 'imported',\n 'updated',\n 'up-to-date',\n 'reimported in JSON',\n 'new content file',\n ] as const);\n\n const done = this.statuses.filter((s) =>\n doneSet.has(s.status as any)\n ).length;\n const success = this.statuses.filter((s) =>\n successesSet.has(s.status as any)\n ).length;\n const errors = this.statuses.filter((s) => s.status === 'error').length;\n\n return {\n total: keys.size,\n done,\n success,\n errors,\n } as const;\n }\n}\n"],"mappings":";;;;AAcA,IAAa,aAAb,MAAwB;CACtB,AAAQ,WAAyB,EAAE;CACnC,AAAQ,eAAsC;CAC9C,AAAQ,eAAe;CACvB,AAAQ,gBAAgB;CACxB,AAAiB,gBAAgBA;CACjC,AAAQ,aAAa;CACrB,AAAiB;CACjB,AAAQ,oBAA4B;CAEpC,cAAc;AAEZ,OAAK,
|
|
1
|
+
{"version":3,"file":"pullLog.cjs","names":["spinnerFrames","lines: string[]","details: string[]","ANSIColors"],"sources":["../../../src/push/pullLog.ts"],"sourcesContent":["import type { DictionaryStatus } from '@intlayer/chokidar';\nimport {\n ANSIColors,\n colorize,\n getConfiguration,\n spinnerFrames,\n} from '@intlayer/config';\n\nexport type PullStatus = {\n dictionaryKey: string;\n status: DictionaryStatus | 'pending' | 'fetching';\n errorMessage?: string;\n};\n\nexport class PullLogger {\n private statuses: PullStatus[] = [];\n private spinnerTimer: NodeJS.Timeout | null = null;\n private spinnerIndex = 0;\n private renderedLines = 0;\n private readonly spinnerFrames = spinnerFrames;\n private isFinished = false;\n private readonly prefix: string;\n private lastRenderedState: string = '';\n\n constructor() {\n const configuration = getConfiguration();\n this.prefix = configuration.log.prefix;\n }\n\n update(newStatuses: PullStatus[]) {\n if (this.isFinished) return;\n for (const status of newStatuses) {\n const index = this.statuses.findIndex(\n (s) => s.dictionaryKey === status.dictionaryKey\n );\n if (index >= 0) {\n this.statuses[index] = status;\n } else {\n this.statuses.push(status);\n }\n }\n\n this.startSpinner();\n this.render();\n }\n\n finish() {\n this.isFinished = true;\n this.stopSpinner();\n this.render();\n }\n\n private startSpinner() {\n if (this.spinnerTimer || this.isFinished) return;\n this.spinnerTimer = setInterval(() => {\n this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerFrames.length;\n this.render();\n }, 100);\n }\n\n private stopSpinner() {\n if (!this.spinnerTimer) return;\n clearInterval(this.spinnerTimer);\n this.spinnerTimer = null;\n }\n\n private render() {\n const { total, done, success, errors } = this.computeProgress();\n\n const frame = this.spinnerFrames[this.spinnerIndex];\n const lines: string[] = [];\n\n const isDone = done === total;\n const progressLabel = `dictionaries: ${done}/${total}`;\n const details: string[] = [];\n if (success > 0) details.push(`ok: ${success}`);\n if (errors > 0) details.push(colorize(`errors: ${errors}`, ANSIColors.RED));\n\n const suffix = details.length > 0 ? ` (${details.join(', ')})` : '';\n\n if (isDone) {\n lines.push(\n `${this.prefix} ${colorize('✔', ANSIColors.GREEN)} fetched ${progressLabel}${suffix}`\n );\n } else {\n lines.push(\n `${this.prefix} ${colorize(frame, ANSIColors.BLUE)} fetching ${progressLabel}${suffix}`\n );\n }\n\n const currentState = lines.join('\\n');\n if (currentState === this.lastRenderedState) {\n return;\n }\n this.lastRenderedState = currentState;\n\n if (this.renderedLines > 0) {\n process.stdout.write(`\\x1b[${this.renderedLines}F`);\n }\n\n const totalLinesToClear = Math.max(this.renderedLines, lines.length);\n for (let i = 0; i < totalLinesToClear; i++) {\n process.stdout.write('\\x1b[2K');\n const line = lines[i];\n if (line !== undefined) {\n process.stdout.write(line);\n }\n process.stdout.write('\\n');\n }\n\n this.renderedLines = lines.length;\n }\n\n private computeProgress() {\n const keys = new Set(this.statuses.map((s) => s.dictionaryKey));\n\n const doneSet = new Set<DictionaryStatus | 'error'>([\n 'fetched',\n 'imported',\n 'updated',\n 'up-to-date',\n 'reimported in JSON',\n 'new content file',\n 'error',\n ] as const);\n\n const successesSet = new Set<DictionaryStatus>([\n 'fetched',\n 'imported',\n 'updated',\n 'up-to-date',\n 'reimported in JSON',\n 'new content file',\n ] as const);\n\n const done = this.statuses.filter((s) =>\n doneSet.has(s.status as any)\n ).length;\n const success = this.statuses.filter((s) =>\n successesSet.has(s.status as any)\n ).length;\n const errors = this.statuses.filter((s) => s.status === 'error').length;\n\n return {\n total: keys.size,\n done,\n success,\n errors,\n } as const;\n }\n}\n"],"mappings":";;;;AAcA,IAAa,aAAb,MAAwB;CACtB,AAAQ,WAAyB,EAAE;CACnC,AAAQ,eAAsC;CAC9C,AAAQ,eAAe;CACvB,AAAQ,gBAAgB;CACxB,AAAiB,gBAAgBA;CACjC,AAAQ,aAAa;CACrB,AAAiB;CACjB,AAAQ,oBAA4B;CAEpC,cAAc;AAEZ,OAAK,iDADmC,CACZ,IAAI;;CAGlC,OAAO,aAA2B;AAChC,MAAI,KAAK,WAAY;AACrB,OAAK,MAAM,UAAU,aAAa;GAChC,MAAM,QAAQ,KAAK,SAAS,WACzB,MAAM,EAAE,kBAAkB,OAAO,cACnC;AACD,OAAI,SAAS,EACX,MAAK,SAAS,SAAS;OAEvB,MAAK,SAAS,KAAK,OAAO;;AAI9B,OAAK,cAAc;AACnB,OAAK,QAAQ;;CAGf,SAAS;AACP,OAAK,aAAa;AAClB,OAAK,aAAa;AAClB,OAAK,QAAQ;;CAGf,AAAQ,eAAe;AACrB,MAAI,KAAK,gBAAgB,KAAK,WAAY;AAC1C,OAAK,eAAe,kBAAkB;AACpC,QAAK,gBAAgB,KAAK,eAAe,KAAK,KAAK,cAAc;AACjE,QAAK,QAAQ;KACZ,IAAI;;CAGT,AAAQ,cAAc;AACpB,MAAI,CAAC,KAAK,aAAc;AACxB,gBAAc,KAAK,aAAa;AAChC,OAAK,eAAe;;CAGtB,AAAQ,SAAS;EACf,MAAM,EAAE,OAAO,MAAM,SAAS,WAAW,KAAK,iBAAiB;EAE/D,MAAM,QAAQ,KAAK,cAAc,KAAK;EACtC,MAAMC,QAAkB,EAAE;EAE1B,MAAM,SAAS,SAAS;EACxB,MAAM,gBAAgB,iBAAiB,KAAK,GAAG;EAC/C,MAAMC,UAAoB,EAAE;AAC5B,MAAI,UAAU,EAAG,SAAQ,KAAK,OAAO,UAAU;AAC/C,MAAI,SAAS,EAAG,SAAQ,oCAAc,WAAW,UAAUC,4BAAW,IAAI,CAAC;EAE3E,MAAM,SAAS,QAAQ,SAAS,IAAI,KAAK,QAAQ,KAAK,KAAK,CAAC,KAAK;AAEjE,MAAI,OACF,OAAM,KACJ,GAAG,KAAK,OAAO,kCAAY,KAAKA,4BAAW,MAAM,CAAC,WAAW,gBAAgB,SAC9E;MAED,OAAM,KACJ,GAAG,KAAK,OAAO,kCAAY,OAAOA,4BAAW,KAAK,CAAC,YAAY,gBAAgB,SAChF;EAGH,MAAM,eAAe,MAAM,KAAK,KAAK;AACrC,MAAI,iBAAiB,KAAK,kBACxB;AAEF,OAAK,oBAAoB;AAEzB,MAAI,KAAK,gBAAgB,EACvB,SAAQ,OAAO,MAAM,QAAQ,KAAK,cAAc,GAAG;EAGrD,MAAM,oBAAoB,KAAK,IAAI,KAAK,eAAe,MAAM,OAAO;AACpE,OAAK,IAAI,IAAI,GAAG,IAAI,mBAAmB,KAAK;AAC1C,WAAQ,OAAO,MAAM,UAAU;GAC/B,MAAM,OAAO,MAAM;AACnB,OAAI,SAAS,OACX,SAAQ,OAAO,MAAM,KAAK;AAE5B,WAAQ,OAAO,MAAM,KAAK;;AAG5B,OAAK,gBAAgB,MAAM;;CAG7B,AAAQ,kBAAkB;EACxB,MAAM,OAAO,IAAI,IAAI,KAAK,SAAS,KAAK,MAAM,EAAE,cAAc,CAAC;EAE/D,MAAM,UAAU,IAAI,IAAgC;GAClD;GACA;GACA;GACA;GACA;GACA;GACA;GACD,CAAU;EAEX,MAAM,eAAe,IAAI,IAAsB;GAC7C;GACA;GACA;GACA;GACA;GACA;GACD,CAAU;EAEX,MAAM,OAAO,KAAK,SAAS,QAAQ,MACjC,QAAQ,IAAI,EAAE,OAAc,CAC7B,CAAC;EACF,MAAM,UAAU,KAAK,SAAS,QAAQ,MACpC,aAAa,IAAI,EAAE,OAAc,CAClC,CAAC;EACF,MAAM,SAAS,KAAK,SAAS,QAAQ,MAAM,EAAE,WAAW,QAAQ,CAAC;AAEjE,SAAO;GACL,OAAO,KAAK;GACZ;GACA;GACA;GACD"}
|
package/dist/cjs/push/push.cjs
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
2
|
const require_utils_checkAccess = require('../utils/checkAccess.cjs');
|
|
3
3
|
const require_pushLog = require('../pushLog.cjs');
|
|
4
|
-
let
|
|
5
|
-
let
|
|
6
|
-
let
|
|
4
|
+
let _intlayer_api = require("@intlayer/api");
|
|
5
|
+
let _intlayer_chokidar = require("@intlayer/chokidar");
|
|
6
|
+
let _intlayer_config = require("@intlayer/config");
|
|
7
7
|
let node_path = require("node:path");
|
|
8
|
-
let
|
|
8
|
+
let _intlayer_unmerged_dictionaries_entry = require("@intlayer/unmerged-dictionaries-entry");
|
|
9
9
|
let node_fs_promises = require("node:fs/promises");
|
|
10
10
|
node_fs_promises = require_rolldown_runtime.__toESM(node_fs_promises);
|
|
11
11
|
let node_readline = require("node:readline");
|
|
@@ -15,19 +15,19 @@ node_readline = require_rolldown_runtime.__toESM(node_readline);
|
|
|
15
15
|
const statusIconsAndColors = {
|
|
16
16
|
pushed: {
|
|
17
17
|
icon: "✔",
|
|
18
|
-
color:
|
|
18
|
+
color: _intlayer_config.ANSIColors.GREEN
|
|
19
19
|
},
|
|
20
20
|
modified: {
|
|
21
21
|
icon: "✔",
|
|
22
|
-
color:
|
|
22
|
+
color: _intlayer_config.ANSIColors.GREEN
|
|
23
23
|
},
|
|
24
24
|
error: {
|
|
25
25
|
icon: "✖",
|
|
26
|
-
color:
|
|
26
|
+
color: _intlayer_config.ANSIColors.RED
|
|
27
27
|
},
|
|
28
28
|
default: {
|
|
29
29
|
icon: "⏲",
|
|
30
|
-
color:
|
|
30
|
+
color: _intlayer_config.ANSIColors.BLUE
|
|
31
31
|
}
|
|
32
32
|
};
|
|
33
33
|
const getIconAndColor = (status) => {
|
|
@@ -37,14 +37,14 @@ const getIconAndColor = (status) => {
|
|
|
37
37
|
* Get all local dictionaries and push them simultaneously.
|
|
38
38
|
*/
|
|
39
39
|
const push = async (options) => {
|
|
40
|
-
const config = (0,
|
|
41
|
-
const appLogger = (0,
|
|
42
|
-
if (options?.build === true) await (0,
|
|
43
|
-
else if (typeof options?.build === "undefined") await (0,
|
|
40
|
+
const config = (0, _intlayer_config.getConfiguration)(options?.configOptions);
|
|
41
|
+
const appLogger = (0, _intlayer_config.getAppLogger)(config, { config: { prefix: "" } });
|
|
42
|
+
if (options?.build === true) await (0, _intlayer_chokidar.prepareIntlayer)(config, { forceRun: true });
|
|
43
|
+
else if (typeof options?.build === "undefined") await (0, _intlayer_chokidar.prepareIntlayer)(config);
|
|
44
44
|
try {
|
|
45
45
|
if (!await require_utils_checkAccess.checkCMSAuth(config)) return;
|
|
46
|
-
const intlayerAPI = (0,
|
|
47
|
-
const unmergedDictionariesRecord = (0,
|
|
46
|
+
const intlayerAPI = (0, _intlayer_api.getIntlayerAPIProxy)(void 0, config);
|
|
47
|
+
const unmergedDictionariesRecord = (0, _intlayer_unmerged_dictionaries_entry.getUnmergedDictionaries)(config);
|
|
48
48
|
let dictionaries = Object.values(unmergedDictionariesRecord).flat();
|
|
49
49
|
const existingDictionariesKeys = Object.keys(unmergedDictionariesRecord);
|
|
50
50
|
if (options?.dictionaries) {
|
|
@@ -53,7 +53,7 @@ const push = async (options) => {
|
|
|
53
53
|
dictionaries = dictionaries.filter((dictionary) => options.dictionaries?.includes(dictionary.key));
|
|
54
54
|
}
|
|
55
55
|
if (options?.gitOptions) {
|
|
56
|
-
const gitFiles = await (0,
|
|
56
|
+
const gitFiles = await (0, _intlayer_chokidar.listGitFiles)(options.gitOptions);
|
|
57
57
|
dictionaries = dictionaries.filter((dictionary) => gitFiles.includes((0, node_path.join)(config.content.baseDir, dictionary.filePath ?? "")));
|
|
58
58
|
}
|
|
59
59
|
if (dictionaries.length === 0) {
|
|
@@ -85,7 +85,7 @@ const push = async (options) => {
|
|
|
85
85
|
for (const remoteDictionaryData of allDictionaries) {
|
|
86
86
|
const localDictionary = unmergedDictionariesRecord[remoteDictionaryData.key]?.find((dictionary) => dictionary.localId === remoteDictionaryData.localId);
|
|
87
87
|
if (!localDictionary) continue;
|
|
88
|
-
await (0,
|
|
88
|
+
await (0, _intlayer_chokidar.writeContentDeclaration)({
|
|
89
89
|
...localDictionary,
|
|
90
90
|
id: remoteDictionaryData.id
|
|
91
91
|
}, config);
|
|
@@ -115,11 +115,11 @@ const push = async (options) => {
|
|
|
115
115
|
}]);
|
|
116
116
|
}
|
|
117
117
|
};
|
|
118
|
-
await (0,
|
|
118
|
+
await (0, _intlayer_chokidar.parallelize)(dictionariesStatuses, processDictionary, 5);
|
|
119
119
|
logger.finish();
|
|
120
120
|
for (const dictionaryStatus of dictionariesStatuses) {
|
|
121
121
|
const { icon, color } = getIconAndColor(dictionaryStatus.status);
|
|
122
|
-
appLogger(` - ${dictionaryStatus.dictionary.key} ${
|
|
122
|
+
appLogger(` - ${dictionaryStatus.dictionary.key} ${_intlayer_config.ANSIColors.GREY}[${color}${icon} ${dictionaryStatus.status}${_intlayer_config.ANSIColors.GREY}]${_intlayer_config.ANSIColors.RESET}`);
|
|
123
123
|
}
|
|
124
124
|
for (const statusObj of dictionariesStatuses) if (statusObj.errorMessage) appLogger(statusObj.errorMessage, { level: "error" });
|
|
125
125
|
const deleteOption = options?.deleteLocaleDictionary;
|
|
@@ -147,7 +147,7 @@ const askUser = (question) => {
|
|
|
147
147
|
});
|
|
148
148
|
};
|
|
149
149
|
const deleteLocalDictionaries = async (dictionariesToDelete, options) => {
|
|
150
|
-
const appLogger = (0,
|
|
150
|
+
const appLogger = (0, _intlayer_config.getAppLogger)((0, _intlayer_config.getConfiguration)(options?.configOptions), { config: { prefix: "" } });
|
|
151
151
|
const filePathsSet = /* @__PURE__ */ new Set();
|
|
152
152
|
for (const dictionary of dictionariesToDelete) {
|
|
153
153
|
const { filePath } = dictionary;
|
|
@@ -161,11 +161,11 @@ const deleteLocalDictionaries = async (dictionariesToDelete, options) => {
|
|
|
161
161
|
const stats = await node_fs_promises.lstat(filePath);
|
|
162
162
|
if (stats.isFile()) {
|
|
163
163
|
await node_fs_promises.unlink(filePath);
|
|
164
|
-
appLogger(`Deleted file ${(0,
|
|
165
|
-
} else if (stats.isDirectory()) appLogger(`Path is a directory ${(0,
|
|
166
|
-
else appLogger(`Unknown file type for ${(0,
|
|
164
|
+
appLogger(`Deleted file ${(0, _intlayer_chokidar.formatPath)(filePath)}`, {});
|
|
165
|
+
} else if (stats.isDirectory()) appLogger(`Path is a directory ${(0, _intlayer_chokidar.formatPath)(filePath)}, skipping.`, {});
|
|
166
|
+
else appLogger(`Unknown file type for ${(0, _intlayer_chokidar.formatPath)(filePath)}, skipping.`, {});
|
|
167
167
|
} catch (err) {
|
|
168
|
-
appLogger(`Error deleting ${(0,
|
|
168
|
+
appLogger(`Error deleting ${(0, _intlayer_chokidar.formatPath)(filePath)}: ${err}`, { level: "error" });
|
|
169
169
|
}
|
|
170
170
|
};
|
|
171
171
|
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"push.cjs","names":["ANSIColors","checkCMSAuth","dictionaries: Dictionary[]","existingDictionariesKeys: string[]","dictionariesStatuses: DictionariesStatus[]","PushLogger","successfullyPushedDictionaries: Dictionary[]","readline","filePathsSet: Set<string>","fsPromises"],"sources":["../../../src/push/push.ts"],"sourcesContent":["import * as fsPromises from 'node:fs/promises';\nimport { join } from 'node:path';\nimport * as readline from 'node:readline';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n formatPath,\n type ListGitFilesOptions,\n listGitFiles,\n parallelize,\n prepareIntlayer,\n writeContentDeclaration,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport { PushLogger, type PushStatus } from '../pushLog';\nimport { checkCMSAuth } from '../utils/checkAccess';\n\ntype PushOptions = {\n deleteLocaleDictionary?: boolean;\n keepLocaleDictionary?: boolean;\n dictionaries?: string[];\n gitOptions?: ListGitFilesOptions;\n configOptions?: GetConfigurationOptions;\n build?: boolean;\n};\n\ntype DictionariesStatus = {\n dictionary: Dictionary;\n status: 'pending' | 'pushing' | 'modified' | 'pushed' | 'unknown' | 'error';\n error?: Error;\n errorMessage?: string;\n};\n\n// Print per-dictionary summary similar to loadDictionaries\nconst statusIconsAndColors = {\n pushed: { icon: '✔', color: ANSIColors.GREEN },\n modified: { icon: '✔', color: ANSIColors.GREEN },\n error: { icon: '✖', color: ANSIColors.RED },\n default: { icon: '⏲', color: ANSIColors.BLUE },\n};\n\nconst getIconAndColor = (status: DictionariesStatus['status']) => {\n return (\n statusIconsAndColors[status as keyof typeof statusIconsAndColors] ??\n statusIconsAndColors.default\n );\n};\n\n/**\n * Get all local dictionaries and push them simultaneously.\n */\nexport const push = async (options?: PushOptions): Promise<void> => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n if (options?.build === true) {\n await prepareIntlayer(config, { forceRun: true });\n } else if (typeof options?.build === 'undefined') {\n await prepareIntlayer(config);\n }\n\n try {\n const hasCMSAuth = await checkCMSAuth(config);\n\n if (!hasCMSAuth) return;\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, config);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(config);\n let dictionaries: Dictionary[] = Object.values(\n unmergedDictionariesRecord\n ).flat();\n const existingDictionariesKeys: string[] = Object.keys(\n unmergedDictionariesRecord\n );\n\n if (options?.dictionaries) {\n // Check if the provided dictionaries exist\n const noneExistingDictionariesOption = options.dictionaries.filter(\n (dictionaryId) => !existingDictionariesKeys.includes(dictionaryId)\n );\n\n if (noneExistingDictionariesOption.length > 0) {\n appLogger(\n `The following dictionaries do not exist: ${noneExistingDictionariesOption.join(\n ', '\n )} and have been ignored.`,\n {\n level: 'error',\n }\n );\n }\n\n // Filter the dictionaries from the provided list of IDs\n dictionaries = dictionaries.filter((dictionary) =>\n options.dictionaries?.includes(dictionary.key)\n );\n }\n\n if (options?.gitOptions) {\n const gitFiles = await listGitFiles(options.gitOptions);\n\n dictionaries = dictionaries.filter((dictionary) =>\n gitFiles.includes(\n join(config.content.baseDir, dictionary.filePath ?? '')\n )\n );\n }\n\n // Check if the dictionaries list is empty\n if (dictionaries.length === 0) {\n appLogger('No local dictionaries found', {\n level: 'error',\n });\n return;\n }\n\n appLogger('Pushing dictionaries:');\n\n // Prepare dictionaries statuses\n const dictionariesStatuses: DictionariesStatus[] = dictionaries.map(\n (dictionary) => ({\n dictionary,\n status: 'pending',\n })\n );\n\n // Initialize aggregated logger similar to loadDictionaries\n const logger = new PushLogger();\n logger.update(\n dictionariesStatuses.map<PushStatus>((s) => ({\n dictionaryKey: s.dictionary.key,\n status: 'pending',\n }))\n );\n\n const successfullyPushedDictionaries: Dictionary[] = [];\n\n const processDictionary = async (\n statusObj: DictionariesStatus\n ): Promise<void> => {\n statusObj.status = 'pushing';\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'pushing' },\n ]);\n\n try {\n const pushResult = await intlayerAPI.dictionary.pushDictionaries([\n statusObj.dictionary,\n ]);\n\n const updatedDictionaries = pushResult.data?.updatedDictionaries ?? [];\n const newDictionaries = pushResult.data?.newDictionaries ?? [];\n\n const allDictionaries = [...updatedDictionaries, ...newDictionaries];\n\n for (const remoteDictionaryData of allDictionaries) {\n const localDictionary = unmergedDictionariesRecord[\n remoteDictionaryData.key\n ]?.find(\n (dictionary) => dictionary.localId === remoteDictionaryData.localId\n );\n\n if (!localDictionary) continue;\n\n await writeContentDeclaration(\n { ...localDictionary, id: remoteDictionaryData.id },\n config\n );\n }\n\n if (\n updatedDictionaries.some(\n (dictionary) => dictionary.key === statusObj.dictionary.key\n )\n ) {\n statusObj.status = 'modified';\n successfullyPushedDictionaries.push(statusObj.dictionary);\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'modified' },\n ]);\n } else if (\n newDictionaries.some(\n (dictionary) => dictionary.key === statusObj.dictionary.key\n )\n ) {\n statusObj.status = 'pushed';\n successfullyPushedDictionaries.push(statusObj.dictionary);\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'pushed' },\n ]);\n } else {\n statusObj.status = 'unknown';\n }\n } catch (error) {\n statusObj.status = 'error';\n statusObj.error = error as Error;\n statusObj.errorMessage = `Error pushing dictionary ${statusObj.dictionary.key}: ${error}`;\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'error' },\n ]);\n }\n };\n\n // Process dictionaries in parallel with a concurrency limit (reuse parallelize)\n await parallelize(dictionariesStatuses, processDictionary, 5);\n\n // Stop the logger and render final state\n logger.finish();\n\n for (const dictionaryStatus of dictionariesStatuses) {\n const { icon, color } = getIconAndColor(dictionaryStatus.status);\n appLogger(\n ` - ${dictionaryStatus.dictionary.key} ${ANSIColors.GREY}[${color}${icon} ${dictionaryStatus.status}${ANSIColors.GREY}]${ANSIColors.RESET}`\n );\n }\n\n // Output any error messages\n for (const statusObj of dictionariesStatuses) {\n if (statusObj.errorMessage) {\n appLogger(statusObj.errorMessage, {\n level: 'error',\n });\n }\n }\n\n // Handle delete or keep options\n const deleteOption = options?.deleteLocaleDictionary;\n const keepOption = options?.keepLocaleDictionary;\n\n if (deleteOption && keepOption) {\n throw new Error(\n 'Cannot specify both --deleteLocaleDictionary and --keepLocaleDictionary options.'\n );\n }\n\n if (deleteOption) {\n // Delete only the successfully pushed dictionaries\n await deleteLocalDictionaries(successfullyPushedDictionaries, options);\n } else if (keepOption) {\n // Do nothing, keep the local dictionaries\n } else {\n // Ask the user\n const answer = await askUser(\n 'Do you want to delete the local dictionaries that were successfully pushed? (yes/no): '\n );\n if (answer.toLowerCase() === 'yes' || answer.toLowerCase() === 'y') {\n await deleteLocalDictionaries(successfullyPushedDictionaries, options);\n }\n }\n } catch (error) {\n appLogger(error, {\n level: 'error',\n });\n }\n};\n\nconst askUser = (question: string): Promise<string> => {\n const rl = readline.createInterface({\n input: process.stdin,\n output: process.stdout,\n });\n return new Promise((resolve) => {\n rl.question(question, (answer: string) => {\n rl.close();\n resolve(answer);\n });\n });\n};\n\nconst deleteLocalDictionaries = async (\n dictionariesToDelete: Dictionary[],\n options?: PushOptions\n): Promise<void> => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n // Use a Set to collect all unique file paths\n const filePathsSet: Set<string> = new Set();\n\n for (const dictionary of dictionariesToDelete) {\n const { filePath } = dictionary;\n\n if (!filePath) {\n appLogger(`Dictionary ${dictionary.key} does not have a file path`, {\n level: 'error',\n });\n continue;\n }\n\n filePathsSet.add(filePath);\n }\n\n for (const filePath of filePathsSet) {\n try {\n const stats = await fsPromises.lstat(filePath);\n\n if (stats.isFile()) {\n await fsPromises.unlink(filePath);\n appLogger(`Deleted file ${formatPath(filePath)}`, {});\n } else if (stats.isDirectory()) {\n appLogger(`Path is a directory ${formatPath(filePath)}, skipping.`, {});\n } else {\n appLogger(\n `Unknown file type for ${formatPath(filePath)}, skipping.`,\n {}\n );\n }\n } catch (err) {\n appLogger(`Error deleting ${formatPath(filePath)}: ${err}`, {\n level: 'error',\n });\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAwCA,MAAM,uBAAuB;CAC3B,QAAQ;EAAE,MAAM;EAAK,OAAOA,6BAAW;EAAO;CAC9C,UAAU;EAAE,MAAM;EAAK,OAAOA,6BAAW;EAAO;CAChD,OAAO;EAAE,MAAM;EAAK,OAAOA,6BAAW;EAAK;CAC3C,SAAS;EAAE,MAAM;EAAK,OAAOA,6BAAW;EAAM;CAC/C;AAED,MAAM,mBAAmB,WAAyC;AAChE,QACE,qBAAqB,WACrB,qBAAqB;;;;;AAOzB,MAAa,OAAO,OAAO,YAAyC;CAClE,MAAM,iDAA0B,SAAS,cAAc;CACvD,MAAM,gDAAyB,QAAQ,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;AAEF,KAAI,SAAS,UAAU,KACrB,gDAAsB,QAAQ,EAAE,UAAU,MAAM,CAAC;UACxC,OAAO,SAAS,UAAU,YACnC,gDAAsB,OAAO;AAG/B,KAAI;AAGF,MAAI,CAFe,MAAMC,uCAAa,OAAO,CAE5B;EAEjB,MAAM,sDAAkC,QAAW,OAAO;EAE1D,MAAM,iGAAqD,OAAO;EAClE,IAAIC,eAA6B,OAAO,OACtC,2BACD,CAAC,MAAM;EACR,MAAMC,2BAAqC,OAAO,KAChD,2BACD;AAED,MAAI,SAAS,cAAc;GAEzB,MAAM,iCAAiC,QAAQ,aAAa,QACzD,iBAAiB,CAAC,yBAAyB,SAAS,aAAa,CACnE;AAED,OAAI,+BAA+B,SAAS,EAC1C,WACE,4CAA4C,+BAA+B,KACzE,KACD,CAAC,0BACF,EACE,OAAO,SACR,CACF;AAIH,kBAAe,aAAa,QAAQ,eAClC,QAAQ,cAAc,SAAS,WAAW,IAAI,CAC/C;;AAGH,MAAI,SAAS,YAAY;GACvB,MAAM,WAAW,4CAAmB,QAAQ,WAAW;AAEvD,kBAAe,aAAa,QAAQ,eAClC,SAAS,6BACF,OAAO,QAAQ,SAAS,WAAW,YAAY,GAAG,CACxD,CACF;;AAIH,MAAI,aAAa,WAAW,GAAG;AAC7B,aAAU,+BAA+B,EACvC,OAAO,SACR,CAAC;AACF;;AAGF,YAAU,wBAAwB;EAGlC,MAAMC,uBAA6C,aAAa,KAC7D,gBAAgB;GACf;GACA,QAAQ;GACT,EACF;EAGD,MAAM,SAAS,IAAIC,4BAAY;AAC/B,SAAO,OACL,qBAAqB,KAAiB,OAAO;GAC3C,eAAe,EAAE,WAAW;GAC5B,QAAQ;GACT,EAAE,CACJ;EAED,MAAMC,iCAA+C,EAAE;EAEvD,MAAM,oBAAoB,OACxB,cACkB;AAClB,aAAU,SAAS;AACnB,UAAO,OAAO,CACZ;IAAE,eAAe,UAAU,WAAW;IAAK,QAAQ;IAAW,CAC/D,CAAC;AAEF,OAAI;IACF,MAAM,aAAa,MAAM,YAAY,WAAW,iBAAiB,CAC/D,UAAU,WACX,CAAC;IAEF,MAAM,sBAAsB,WAAW,MAAM,uBAAuB,EAAE;IACtE,MAAM,kBAAkB,WAAW,MAAM,mBAAmB,EAAE;IAE9D,MAAM,kBAAkB,CAAC,GAAG,qBAAqB,GAAG,gBAAgB;AAEpE,SAAK,MAAM,wBAAwB,iBAAiB;KAClD,MAAM,kBAAkB,2BACtB,qBAAqB,MACpB,MACA,eAAe,WAAW,YAAY,qBAAqB,QAC7D;AAED,SAAI,CAAC,gBAAiB;AAEtB,4DACE;MAAE,GAAG;MAAiB,IAAI,qBAAqB;MAAI,EACnD,OACD;;AAGH,QACE,oBAAoB,MACjB,eAAe,WAAW,QAAQ,UAAU,WAAW,IACzD,EACD;AACA,eAAU,SAAS;AACnB,oCAA+B,KAAK,UAAU,WAAW;AACzD,YAAO,OAAO,CACZ;MAAE,eAAe,UAAU,WAAW;MAAK,QAAQ;MAAY,CAChE,CAAC;eAEF,gBAAgB,MACb,eAAe,WAAW,QAAQ,UAAU,WAAW,IACzD,EACD;AACA,eAAU,SAAS;AACnB,oCAA+B,KAAK,UAAU,WAAW;AACzD,YAAO,OAAO,CACZ;MAAE,eAAe,UAAU,WAAW;MAAK,QAAQ;MAAU,CAC9D,CAAC;UAEF,WAAU,SAAS;YAEd,OAAO;AACd,cAAU,SAAS;AACnB,cAAU,QAAQ;AAClB,cAAU,eAAe,4BAA4B,UAAU,WAAW,IAAI,IAAI;AAClF,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU,WAAW;KAAK,QAAQ;KAAS,CAC7D,CAAC;;;AAKN,6CAAkB,sBAAsB,mBAAmB,EAAE;AAG7D,SAAO,QAAQ;AAEf,OAAK,MAAM,oBAAoB,sBAAsB;GACnD,MAAM,EAAE,MAAM,UAAU,gBAAgB,iBAAiB,OAAO;AAChE,aACE,MAAM,iBAAiB,WAAW,IAAI,GAAGN,6BAAW,KAAK,GAAG,QAAQ,KAAK,GAAG,iBAAiB,SAASA,6BAAW,KAAK,GAAGA,6BAAW,QACrI;;AAIH,OAAK,MAAM,aAAa,qBACtB,KAAI,UAAU,aACZ,WAAU,UAAU,cAAc,EAChC,OAAO,SACR,CAAC;EAKN,MAAM,eAAe,SAAS;EAC9B,MAAM,aAAa,SAAS;AAE5B,MAAI,gBAAgB,WAClB,OAAM,IAAI,MACR,mFACD;AAGH,MAAI,aAEF,OAAM,wBAAwB,gCAAgC,QAAQ;WAC7D,YAAY,QAEhB;GAEL,MAAM,SAAS,MAAM,QACnB,yFACD;AACD,OAAI,OAAO,aAAa,KAAK,SAAS,OAAO,aAAa,KAAK,IAC7D,OAAM,wBAAwB,gCAAgC,QAAQ;;UAGnE,OAAO;AACd,YAAU,OAAO,EACf,OAAO,SACR,CAAC;;;AAIN,MAAM,WAAW,aAAsC;CACrD,MAAM,KAAKO,cAAS,gBAAgB;EAClC,OAAO,QAAQ;EACf,QAAQ,QAAQ;EACjB,CAAC;AACF,QAAO,IAAI,SAAS,YAAY;AAC9B,KAAG,SAAS,WAAW,WAAmB;AACxC,MAAG,OAAO;AACV,WAAQ,OAAO;IACf;GACF;;AAGJ,MAAM,0BAA0B,OAC9B,sBACA,YACkB;CAElB,MAAM,wFAD0B,SAAS,cAAc,EAChB,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;CAGF,MAAMC,+BAA4B,IAAI,KAAK;AAE3C,MAAK,MAAM,cAAc,sBAAsB;EAC7C,MAAM,EAAE,aAAa;AAErB,MAAI,CAAC,UAAU;AACb,aAAU,cAAc,WAAW,IAAI,6BAA6B,EAClE,OAAO,SACR,CAAC;AACF;;AAGF,eAAa,IAAI,SAAS;;AAG5B,MAAK,MAAM,YAAY,aACrB,KAAI;EACF,MAAM,QAAQ,MAAMC,iBAAW,MAAM,SAAS;AAE9C,MAAI,MAAM,QAAQ,EAAE;AAClB,SAAMA,iBAAW,OAAO,SAAS;AACjC,aAAU,oDAA2B,SAAS,IAAI,EAAE,CAAC;aAC5C,MAAM,aAAa,CAC5B,WAAU,2DAAkC,SAAS,CAAC,cAAc,EAAE,CAAC;MAEvE,WACE,6DAAoC,SAAS,CAAC,cAC9C,EAAE,CACH;UAEI,KAAK;AACZ,YAAU,sDAA6B,SAAS,CAAC,IAAI,OAAO,EAC1D,OAAO,SACR,CAAC"}
|
|
1
|
+
{"version":3,"file":"push.cjs","names":["ANSIColors","checkCMSAuth","dictionaries: Dictionary[]","existingDictionariesKeys: string[]","dictionariesStatuses: DictionariesStatus[]","PushLogger","successfullyPushedDictionaries: Dictionary[]","readline","filePathsSet: Set<string>","fsPromises"],"sources":["../../../src/push/push.ts"],"sourcesContent":["import * as fsPromises from 'node:fs/promises';\nimport { join } from 'node:path';\nimport * as readline from 'node:readline';\nimport { getIntlayerAPIProxy } from '@intlayer/api';\nimport {\n formatPath,\n type ListGitFilesOptions,\n listGitFiles,\n parallelize,\n prepareIntlayer,\n writeContentDeclaration,\n} from '@intlayer/chokidar';\nimport {\n ANSIColors,\n type GetConfigurationOptions,\n getAppLogger,\n getConfiguration,\n} from '@intlayer/config';\nimport type { Dictionary } from '@intlayer/types';\nimport { getUnmergedDictionaries } from '@intlayer/unmerged-dictionaries-entry';\nimport { PushLogger, type PushStatus } from '../pushLog';\nimport { checkCMSAuth } from '../utils/checkAccess';\n\ntype PushOptions = {\n deleteLocaleDictionary?: boolean;\n keepLocaleDictionary?: boolean;\n dictionaries?: string[];\n gitOptions?: ListGitFilesOptions;\n configOptions?: GetConfigurationOptions;\n build?: boolean;\n};\n\ntype DictionariesStatus = {\n dictionary: Dictionary;\n status: 'pending' | 'pushing' | 'modified' | 'pushed' | 'unknown' | 'error';\n error?: Error;\n errorMessage?: string;\n};\n\n// Print per-dictionary summary similar to loadDictionaries\nconst statusIconsAndColors = {\n pushed: { icon: '✔', color: ANSIColors.GREEN },\n modified: { icon: '✔', color: ANSIColors.GREEN },\n error: { icon: '✖', color: ANSIColors.RED },\n default: { icon: '⏲', color: ANSIColors.BLUE },\n};\n\nconst getIconAndColor = (status: DictionariesStatus['status']) => {\n return (\n statusIconsAndColors[status as keyof typeof statusIconsAndColors] ??\n statusIconsAndColors.default\n );\n};\n\n/**\n * Get all local dictionaries and push them simultaneously.\n */\nexport const push = async (options?: PushOptions): Promise<void> => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n if (options?.build === true) {\n await prepareIntlayer(config, { forceRun: true });\n } else if (typeof options?.build === 'undefined') {\n await prepareIntlayer(config);\n }\n\n try {\n const hasCMSAuth = await checkCMSAuth(config);\n\n if (!hasCMSAuth) return;\n\n const intlayerAPI = getIntlayerAPIProxy(undefined, config);\n\n const unmergedDictionariesRecord = getUnmergedDictionaries(config);\n let dictionaries: Dictionary[] = Object.values(\n unmergedDictionariesRecord\n ).flat();\n const existingDictionariesKeys: string[] = Object.keys(\n unmergedDictionariesRecord\n );\n\n if (options?.dictionaries) {\n // Check if the provided dictionaries exist\n const noneExistingDictionariesOption = options.dictionaries.filter(\n (dictionaryId) => !existingDictionariesKeys.includes(dictionaryId)\n );\n\n if (noneExistingDictionariesOption.length > 0) {\n appLogger(\n `The following dictionaries do not exist: ${noneExistingDictionariesOption.join(\n ', '\n )} and have been ignored.`,\n {\n level: 'error',\n }\n );\n }\n\n // Filter the dictionaries from the provided list of IDs\n dictionaries = dictionaries.filter((dictionary) =>\n options.dictionaries?.includes(dictionary.key)\n );\n }\n\n if (options?.gitOptions) {\n const gitFiles = await listGitFiles(options.gitOptions);\n\n dictionaries = dictionaries.filter((dictionary) =>\n gitFiles.includes(\n join(config.content.baseDir, dictionary.filePath ?? '')\n )\n );\n }\n\n // Check if the dictionaries list is empty\n if (dictionaries.length === 0) {\n appLogger('No local dictionaries found', {\n level: 'error',\n });\n return;\n }\n\n appLogger('Pushing dictionaries:');\n\n // Prepare dictionaries statuses\n const dictionariesStatuses: DictionariesStatus[] = dictionaries.map(\n (dictionary) => ({\n dictionary,\n status: 'pending',\n })\n );\n\n // Initialize aggregated logger similar to loadDictionaries\n const logger = new PushLogger();\n logger.update(\n dictionariesStatuses.map<PushStatus>((s) => ({\n dictionaryKey: s.dictionary.key,\n status: 'pending',\n }))\n );\n\n const successfullyPushedDictionaries: Dictionary[] = [];\n\n const processDictionary = async (\n statusObj: DictionariesStatus\n ): Promise<void> => {\n statusObj.status = 'pushing';\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'pushing' },\n ]);\n\n try {\n const pushResult = await intlayerAPI.dictionary.pushDictionaries([\n statusObj.dictionary,\n ]);\n\n const updatedDictionaries = pushResult.data?.updatedDictionaries ?? [];\n const newDictionaries = pushResult.data?.newDictionaries ?? [];\n\n const allDictionaries = [...updatedDictionaries, ...newDictionaries];\n\n for (const remoteDictionaryData of allDictionaries) {\n const localDictionary = unmergedDictionariesRecord[\n remoteDictionaryData.key\n ]?.find(\n (dictionary) => dictionary.localId === remoteDictionaryData.localId\n );\n\n if (!localDictionary) continue;\n\n await writeContentDeclaration(\n { ...localDictionary, id: remoteDictionaryData.id },\n config\n );\n }\n\n if (\n updatedDictionaries.some(\n (dictionary) => dictionary.key === statusObj.dictionary.key\n )\n ) {\n statusObj.status = 'modified';\n successfullyPushedDictionaries.push(statusObj.dictionary);\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'modified' },\n ]);\n } else if (\n newDictionaries.some(\n (dictionary) => dictionary.key === statusObj.dictionary.key\n )\n ) {\n statusObj.status = 'pushed';\n successfullyPushedDictionaries.push(statusObj.dictionary);\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'pushed' },\n ]);\n } else {\n statusObj.status = 'unknown';\n }\n } catch (error) {\n statusObj.status = 'error';\n statusObj.error = error as Error;\n statusObj.errorMessage = `Error pushing dictionary ${statusObj.dictionary.key}: ${error}`;\n logger.update([\n { dictionaryKey: statusObj.dictionary.key, status: 'error' },\n ]);\n }\n };\n\n // Process dictionaries in parallel with a concurrency limit (reuse parallelize)\n await parallelize(dictionariesStatuses, processDictionary, 5);\n\n // Stop the logger and render final state\n logger.finish();\n\n for (const dictionaryStatus of dictionariesStatuses) {\n const { icon, color } = getIconAndColor(dictionaryStatus.status);\n appLogger(\n ` - ${dictionaryStatus.dictionary.key} ${ANSIColors.GREY}[${color}${icon} ${dictionaryStatus.status}${ANSIColors.GREY}]${ANSIColors.RESET}`\n );\n }\n\n // Output any error messages\n for (const statusObj of dictionariesStatuses) {\n if (statusObj.errorMessage) {\n appLogger(statusObj.errorMessage, {\n level: 'error',\n });\n }\n }\n\n // Handle delete or keep options\n const deleteOption = options?.deleteLocaleDictionary;\n const keepOption = options?.keepLocaleDictionary;\n\n if (deleteOption && keepOption) {\n throw new Error(\n 'Cannot specify both --deleteLocaleDictionary and --keepLocaleDictionary options.'\n );\n }\n\n if (deleteOption) {\n // Delete only the successfully pushed dictionaries\n await deleteLocalDictionaries(successfullyPushedDictionaries, options);\n } else if (keepOption) {\n // Do nothing, keep the local dictionaries\n } else {\n // Ask the user\n const answer = await askUser(\n 'Do you want to delete the local dictionaries that were successfully pushed? (yes/no): '\n );\n if (answer.toLowerCase() === 'yes' || answer.toLowerCase() === 'y') {\n await deleteLocalDictionaries(successfullyPushedDictionaries, options);\n }\n }\n } catch (error) {\n appLogger(error, {\n level: 'error',\n });\n }\n};\n\nconst askUser = (question: string): Promise<string> => {\n const rl = readline.createInterface({\n input: process.stdin,\n output: process.stdout,\n });\n return new Promise((resolve) => {\n rl.question(question, (answer: string) => {\n rl.close();\n resolve(answer);\n });\n });\n};\n\nconst deleteLocalDictionaries = async (\n dictionariesToDelete: Dictionary[],\n options?: PushOptions\n): Promise<void> => {\n const config = getConfiguration(options?.configOptions);\n const appLogger = getAppLogger(config, {\n config: {\n prefix: '',\n },\n });\n\n // Use a Set to collect all unique file paths\n const filePathsSet: Set<string> = new Set();\n\n for (const dictionary of dictionariesToDelete) {\n const { filePath } = dictionary;\n\n if (!filePath) {\n appLogger(`Dictionary ${dictionary.key} does not have a file path`, {\n level: 'error',\n });\n continue;\n }\n\n filePathsSet.add(filePath);\n }\n\n for (const filePath of filePathsSet) {\n try {\n const stats = await fsPromises.lstat(filePath);\n\n if (stats.isFile()) {\n await fsPromises.unlink(filePath);\n appLogger(`Deleted file ${formatPath(filePath)}`, {});\n } else if (stats.isDirectory()) {\n appLogger(`Path is a directory ${formatPath(filePath)}, skipping.`, {});\n } else {\n appLogger(\n `Unknown file type for ${formatPath(filePath)}, skipping.`,\n {}\n );\n }\n } catch (err) {\n appLogger(`Error deleting ${formatPath(filePath)}: ${err}`, {\n level: 'error',\n });\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAwCA,MAAM,uBAAuB;CAC3B,QAAQ;EAAE,MAAM;EAAK,OAAOA,4BAAW;EAAO;CAC9C,UAAU;EAAE,MAAM;EAAK,OAAOA,4BAAW;EAAO;CAChD,OAAO;EAAE,MAAM;EAAK,OAAOA,4BAAW;EAAK;CAC3C,SAAS;EAAE,MAAM;EAAK,OAAOA,4BAAW;EAAM;CAC/C;AAED,MAAM,mBAAmB,WAAyC;AAChE,QACE,qBAAqB,WACrB,qBAAqB;;;;;AAOzB,MAAa,OAAO,OAAO,YAAyC;CAClE,MAAM,gDAA0B,SAAS,cAAc;CACvD,MAAM,+CAAyB,QAAQ,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;AAEF,KAAI,SAAS,UAAU,KACrB,+CAAsB,QAAQ,EAAE,UAAU,MAAM,CAAC;UACxC,OAAO,SAAS,UAAU,YACnC,+CAAsB,OAAO;AAG/B,KAAI;AAGF,MAAI,CAFe,MAAMC,uCAAa,OAAO,CAE5B;EAEjB,MAAM,qDAAkC,QAAW,OAAO;EAE1D,MAAM,gGAAqD,OAAO;EAClE,IAAIC,eAA6B,OAAO,OACtC,2BACD,CAAC,MAAM;EACR,MAAMC,2BAAqC,OAAO,KAChD,2BACD;AAED,MAAI,SAAS,cAAc;GAEzB,MAAM,iCAAiC,QAAQ,aAAa,QACzD,iBAAiB,CAAC,yBAAyB,SAAS,aAAa,CACnE;AAED,OAAI,+BAA+B,SAAS,EAC1C,WACE,4CAA4C,+BAA+B,KACzE,KACD,CAAC,0BACF,EACE,OAAO,SACR,CACF;AAIH,kBAAe,aAAa,QAAQ,eAClC,QAAQ,cAAc,SAAS,WAAW,IAAI,CAC/C;;AAGH,MAAI,SAAS,YAAY;GACvB,MAAM,WAAW,2CAAmB,QAAQ,WAAW;AAEvD,kBAAe,aAAa,QAAQ,eAClC,SAAS,6BACF,OAAO,QAAQ,SAAS,WAAW,YAAY,GAAG,CACxD,CACF;;AAIH,MAAI,aAAa,WAAW,GAAG;AAC7B,aAAU,+BAA+B,EACvC,OAAO,SACR,CAAC;AACF;;AAGF,YAAU,wBAAwB;EAGlC,MAAMC,uBAA6C,aAAa,KAC7D,gBAAgB;GACf;GACA,QAAQ;GACT,EACF;EAGD,MAAM,SAAS,IAAIC,4BAAY;AAC/B,SAAO,OACL,qBAAqB,KAAiB,OAAO;GAC3C,eAAe,EAAE,WAAW;GAC5B,QAAQ;GACT,EAAE,CACJ;EAED,MAAMC,iCAA+C,EAAE;EAEvD,MAAM,oBAAoB,OACxB,cACkB;AAClB,aAAU,SAAS;AACnB,UAAO,OAAO,CACZ;IAAE,eAAe,UAAU,WAAW;IAAK,QAAQ;IAAW,CAC/D,CAAC;AAEF,OAAI;IACF,MAAM,aAAa,MAAM,YAAY,WAAW,iBAAiB,CAC/D,UAAU,WACX,CAAC;IAEF,MAAM,sBAAsB,WAAW,MAAM,uBAAuB,EAAE;IACtE,MAAM,kBAAkB,WAAW,MAAM,mBAAmB,EAAE;IAE9D,MAAM,kBAAkB,CAAC,GAAG,qBAAqB,GAAG,gBAAgB;AAEpE,SAAK,MAAM,wBAAwB,iBAAiB;KAClD,MAAM,kBAAkB,2BACtB,qBAAqB,MACpB,MACA,eAAe,WAAW,YAAY,qBAAqB,QAC7D;AAED,SAAI,CAAC,gBAAiB;AAEtB,2DACE;MAAE,GAAG;MAAiB,IAAI,qBAAqB;MAAI,EACnD,OACD;;AAGH,QACE,oBAAoB,MACjB,eAAe,WAAW,QAAQ,UAAU,WAAW,IACzD,EACD;AACA,eAAU,SAAS;AACnB,oCAA+B,KAAK,UAAU,WAAW;AACzD,YAAO,OAAO,CACZ;MAAE,eAAe,UAAU,WAAW;MAAK,QAAQ;MAAY,CAChE,CAAC;eAEF,gBAAgB,MACb,eAAe,WAAW,QAAQ,UAAU,WAAW,IACzD,EACD;AACA,eAAU,SAAS;AACnB,oCAA+B,KAAK,UAAU,WAAW;AACzD,YAAO,OAAO,CACZ;MAAE,eAAe,UAAU,WAAW;MAAK,QAAQ;MAAU,CAC9D,CAAC;UAEF,WAAU,SAAS;YAEd,OAAO;AACd,cAAU,SAAS;AACnB,cAAU,QAAQ;AAClB,cAAU,eAAe,4BAA4B,UAAU,WAAW,IAAI,IAAI;AAClF,WAAO,OAAO,CACZ;KAAE,eAAe,UAAU,WAAW;KAAK,QAAQ;KAAS,CAC7D,CAAC;;;AAKN,4CAAkB,sBAAsB,mBAAmB,EAAE;AAG7D,SAAO,QAAQ;AAEf,OAAK,MAAM,oBAAoB,sBAAsB;GACnD,MAAM,EAAE,MAAM,UAAU,gBAAgB,iBAAiB,OAAO;AAChE,aACE,MAAM,iBAAiB,WAAW,IAAI,GAAGN,4BAAW,KAAK,GAAG,QAAQ,KAAK,GAAG,iBAAiB,SAASA,4BAAW,KAAK,GAAGA,4BAAW,QACrI;;AAIH,OAAK,MAAM,aAAa,qBACtB,KAAI,UAAU,aACZ,WAAU,UAAU,cAAc,EAChC,OAAO,SACR,CAAC;EAKN,MAAM,eAAe,SAAS;EAC9B,MAAM,aAAa,SAAS;AAE5B,MAAI,gBAAgB,WAClB,OAAM,IAAI,MACR,mFACD;AAGH,MAAI,aAEF,OAAM,wBAAwB,gCAAgC,QAAQ;WAC7D,YAAY,QAEhB;GAEL,MAAM,SAAS,MAAM,QACnB,yFACD;AACD,OAAI,OAAO,aAAa,KAAK,SAAS,OAAO,aAAa,KAAK,IAC7D,OAAM,wBAAwB,gCAAgC,QAAQ;;UAGnE,OAAO;AACd,YAAU,OAAO,EACf,OAAO,SACR,CAAC;;;AAIN,MAAM,WAAW,aAAsC;CACrD,MAAM,KAAKO,cAAS,gBAAgB;EAClC,OAAO,QAAQ;EACf,QAAQ,QAAQ;EACjB,CAAC;AACF,QAAO,IAAI,SAAS,YAAY;AAC9B,KAAG,SAAS,WAAW,WAAmB;AACxC,MAAG,OAAO;AACV,WAAQ,OAAO;IACf;GACF;;AAGJ,MAAM,0BAA0B,OAC9B,sBACA,YACkB;CAElB,MAAM,sFAD0B,SAAS,cAAc,EAChB,EACrC,QAAQ,EACN,QAAQ,IACT,EACF,CAAC;CAGF,MAAMC,+BAA4B,IAAI,KAAK;AAE3C,MAAK,MAAM,cAAc,sBAAsB;EAC7C,MAAM,EAAE,aAAa;AAErB,MAAI,CAAC,UAAU;AACb,aAAU,cAAc,WAAW,IAAI,6BAA6B,EAClE,OAAO,SACR,CAAC;AACF;;AAGF,eAAa,IAAI,SAAS;;AAG5B,MAAK,MAAM,YAAY,aACrB,KAAI;EACF,MAAM,QAAQ,MAAMC,iBAAW,MAAM,SAAS;AAE9C,MAAI,MAAM,QAAQ,EAAE;AAClB,SAAMA,iBAAW,OAAO,SAAS;AACjC,aAAU,mDAA2B,SAAS,IAAI,EAAE,CAAC;aAC5C,MAAM,aAAa,CAC5B,WAAU,0DAAkC,SAAS,CAAC,cAAc,EAAE,CAAC;MAEvE,WACE,4DAAoC,SAAS,CAAC,cAC9C,EAAE,CACH;UAEI,KAAK;AACZ,YAAU,qDAA6B,SAAS,CAAC,IAAI,OAAO,EAC1D,OAAO,SACR,CAAC"}
|