@red-hat-developer-hub/translations-cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/bin/translations-cli +34 -0
- package/dist/commands/clean.cjs.js +130 -0
- package/dist/commands/deploy.cjs.js +53 -0
- package/dist/commands/download.cjs.js +304 -0
- package/dist/commands/generate.cjs.js +1294 -0
- package/dist/commands/index.cjs.js +146 -0
- package/dist/commands/init.cjs.js +115 -0
- package/dist/commands/list.cjs.js +178 -0
- package/dist/commands/setupMemsource.cjs.js +338 -0
- package/dist/commands/status.cjs.js +40 -0
- package/dist/commands/sync.cjs.js +226 -0
- package/dist/commands/upload.cjs.js +506 -0
- package/dist/index.cjs.js +30 -0
- package/dist/lib/errors.cjs.js +36 -0
- package/dist/lib/i18n/analyzeStatus.cjs.js +79 -0
- package/dist/lib/i18n/config.cjs.js +256 -0
- package/dist/lib/i18n/deployTranslations.cjs.js +1213 -0
- package/dist/lib/i18n/extractKeys.cjs.js +418 -0
- package/dist/lib/i18n/formatReport.cjs.js +138 -0
- package/dist/lib/i18n/generateFiles.cjs.js +94 -0
- package/dist/lib/i18n/loadFile.cjs.js +93 -0
- package/dist/lib/i18n/mergeFiles.cjs.js +126 -0
- package/dist/lib/i18n/uploadCache.cjs.js +83 -0
- package/dist/lib/i18n/validateFile.cjs.js +189 -0
- package/dist/lib/paths.cjs.js +51 -0
- package/dist/lib/utils/exec.cjs.js +41 -0
- package/dist/lib/utils/translationUtils.cjs.js +33 -0
- package/dist/lib/version.cjs.js +38 -0
- package/package.json +65 -0
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/*
|
|
3
|
+
* Copyright Red Hat, Inc.
|
|
4
|
+
*
|
|
5
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
6
|
+
* you may not use this file except in compliance with the License.
|
|
7
|
+
* You may obtain a copy of the License at
|
|
8
|
+
*
|
|
9
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
*
|
|
11
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
* See the License for the specific language governing permissions and
|
|
15
|
+
* limitations under the License.
|
|
16
|
+
*/
|
|
17
|
+
|
|
18
|
+
const path = require('node:path');
|
|
19
|
+
|
|
20
|
+
// Figure out whether we're running inside the backstage repo or as an installed dependency
|
|
21
|
+
/* eslint-disable-next-line no-restricted-syntax */
|
|
22
|
+
const isLocal = require('fs').existsSync(path.resolve(__dirname, '../src'));
|
|
23
|
+
const hasDist = require('fs').existsSync(path.resolve(__dirname, '../dist/index.cjs.js'));
|
|
24
|
+
|
|
25
|
+
// Prefer built version if available, otherwise use source with transform
|
|
26
|
+
if (hasDist) {
|
|
27
|
+
require('..');
|
|
28
|
+
} else if (isLocal) {
|
|
29
|
+
require('@backstage/cli/config/nodeTransform.cjs');
|
|
30
|
+
require('../src');
|
|
31
|
+
} else {
|
|
32
|
+
require('..');
|
|
33
|
+
}
|
|
34
|
+
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var path = require('node:path');
|
|
4
|
+
var chalk = require('chalk');
|
|
5
|
+
var fs = require('fs-extra');
|
|
6
|
+
|
|
7
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
8
|
+
|
|
9
|
+
var path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
10
|
+
var chalk__default = /*#__PURE__*/_interopDefaultCompat(chalk);
|
|
11
|
+
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
12
|
+
|
|
13
|
+
async function findI18nTempFiles(i18nDir) {
|
|
14
|
+
if (!await fs__default.default.pathExists(i18nDir)) {
|
|
15
|
+
return [];
|
|
16
|
+
}
|
|
17
|
+
const files = await fs__default.default.readdir(i18nDir);
|
|
18
|
+
return files.filter(
|
|
19
|
+
(file) => file.startsWith(".") || file.endsWith(".tmp") || file.endsWith(".cache")
|
|
20
|
+
);
|
|
21
|
+
}
|
|
22
|
+
async function collectCleanupTasks(i18nDir, cacheDir, backupDir) {
|
|
23
|
+
const [i18nTempFiles, cacheExists, backupExists] = await Promise.all([
|
|
24
|
+
findI18nTempFiles(i18nDir),
|
|
25
|
+
fs__default.default.pathExists(cacheDir),
|
|
26
|
+
fs__default.default.pathExists(backupDir)
|
|
27
|
+
]);
|
|
28
|
+
const taskPromises = [
|
|
29
|
+
Promise.resolve(
|
|
30
|
+
i18nTempFiles.length > 0 ? {
|
|
31
|
+
name: "i18n directory",
|
|
32
|
+
path: i18nDir,
|
|
33
|
+
files: i18nTempFiles
|
|
34
|
+
} : null
|
|
35
|
+
),
|
|
36
|
+
cacheExists ? fs__default.default.readdir(cacheDir).then((files) => ({
|
|
37
|
+
name: "cache directory",
|
|
38
|
+
path: cacheDir,
|
|
39
|
+
files
|
|
40
|
+
})) : Promise.resolve(null),
|
|
41
|
+
backupExists ? fs__default.default.readdir(backupDir).then((files) => ({
|
|
42
|
+
name: "backup directory",
|
|
43
|
+
path: backupDir,
|
|
44
|
+
files
|
|
45
|
+
})) : Promise.resolve(null)
|
|
46
|
+
];
|
|
47
|
+
const tasks = await Promise.all(taskPromises);
|
|
48
|
+
return tasks.filter((task) => task !== null);
|
|
49
|
+
}
|
|
50
|
+
function displayCleanupPreview(cleanupTasks) {
|
|
51
|
+
console.log(chalk__default.default.yellow("\u{1F4CB} Files to be cleaned:"));
|
|
52
|
+
for (const task of cleanupTasks) {
|
|
53
|
+
console.log(chalk__default.default.gray(` ${task.name}: ${task.files.length} files`));
|
|
54
|
+
for (const file of task.files) {
|
|
55
|
+
console.log(chalk__default.default.gray(` - ${file}`));
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
async function performCleanup(cleanupTasks) {
|
|
60
|
+
let totalCleaned = 0;
|
|
61
|
+
for (const task of cleanupTasks) {
|
|
62
|
+
console.log(chalk__default.default.yellow(`\u{1F9F9} Cleaning ${task.name}...`));
|
|
63
|
+
for (const file of task.files) {
|
|
64
|
+
const filePath = path__default.default.join(task.path, file);
|
|
65
|
+
try {
|
|
66
|
+
await fs__default.default.remove(filePath);
|
|
67
|
+
totalCleaned++;
|
|
68
|
+
} catch (error) {
|
|
69
|
+
console.warn(
|
|
70
|
+
chalk__default.default.yellow(`\u26A0\uFE0F Could not remove ${filePath}: ${error}`)
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return totalCleaned;
|
|
76
|
+
}
|
|
77
|
+
async function removeEmptyDirectories(cleanupTasks) {
|
|
78
|
+
for (const task of cleanupTasks) {
|
|
79
|
+
const remainingFiles = await fs__default.default.readdir(task.path).catch(() => []);
|
|
80
|
+
if (remainingFiles.length === 0) {
|
|
81
|
+
try {
|
|
82
|
+
await fs__default.default.remove(task.path);
|
|
83
|
+
console.log(chalk__default.default.gray(` Removed empty directory: ${task.path}`));
|
|
84
|
+
} catch {
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
function displaySummary(totalCleaned, directoriesProcessed) {
|
|
90
|
+
console.log(chalk__default.default.green(`\u2705 Cleanup completed successfully!`));
|
|
91
|
+
console.log(chalk__default.default.gray(` Files cleaned: ${totalCleaned}`));
|
|
92
|
+
console.log(chalk__default.default.gray(` Directories processed: ${directoriesProcessed}`));
|
|
93
|
+
}
|
|
94
|
+
async function cleanCommand(opts) {
|
|
95
|
+
console.log(chalk__default.default.blue("\u{1F9F9} Cleaning up temporary i18n files and caches..."));
|
|
96
|
+
const {
|
|
97
|
+
i18nDir = "i18n",
|
|
98
|
+
cacheDir = ".i18n-cache",
|
|
99
|
+
backupDir = ".i18n-backup",
|
|
100
|
+
force = false
|
|
101
|
+
} = opts;
|
|
102
|
+
try {
|
|
103
|
+
const cleanupTasks = await collectCleanupTasks(
|
|
104
|
+
i18nDir,
|
|
105
|
+
cacheDir,
|
|
106
|
+
backupDir
|
|
107
|
+
);
|
|
108
|
+
if (cleanupTasks.length === 0) {
|
|
109
|
+
console.log(chalk__default.default.yellow("\u2728 No temporary files found to clean"));
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
displayCleanupPreview(cleanupTasks);
|
|
113
|
+
if (force) {
|
|
114
|
+
const totalCleaned = await performCleanup(cleanupTasks);
|
|
115
|
+
await removeEmptyDirectories(cleanupTasks);
|
|
116
|
+
displaySummary(totalCleaned, cleanupTasks.length);
|
|
117
|
+
} else {
|
|
118
|
+
console.log(
|
|
119
|
+
chalk__default.default.yellow("\u26A0\uFE0F This will permanently delete the above files.")
|
|
120
|
+
);
|
|
121
|
+
console.log(chalk__default.default.yellow(" Use --force to skip this confirmation."));
|
|
122
|
+
}
|
|
123
|
+
} catch (error) {
|
|
124
|
+
console.error(chalk__default.default.red("\u274C Error during cleanup:"), error);
|
|
125
|
+
throw error;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
exports.cleanCommand = cleanCommand;
|
|
130
|
+
//# sourceMappingURL=clean.cjs.js.map
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var chalk = require('chalk');
|
|
4
|
+
var fs = require('fs-extra');
|
|
5
|
+
var deployTranslations = require('../lib/i18n/deployTranslations.cjs.js');
|
|
6
|
+
|
|
7
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
8
|
+
|
|
9
|
+
var chalk__default = /*#__PURE__*/_interopDefaultCompat(chalk);
|
|
10
|
+
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
11
|
+
|
|
12
|
+
async function deployCommand(opts) {
|
|
13
|
+
console.log(
|
|
14
|
+
chalk__default.default.blue(
|
|
15
|
+
"\u{1F680} Deploying translated strings to application language files..."
|
|
16
|
+
)
|
|
17
|
+
);
|
|
18
|
+
const { sourceDir = "i18n/downloads" } = opts;
|
|
19
|
+
try {
|
|
20
|
+
const sourceDirStr = String(sourceDir || "i18n/downloads");
|
|
21
|
+
const repoRoot = process.cwd();
|
|
22
|
+
if (!await fs__default.default.pathExists(sourceDirStr)) {
|
|
23
|
+
throw new Error(`Source directory not found: ${sourceDirStr}`);
|
|
24
|
+
}
|
|
25
|
+
const files = await fs__default.default.readdir(sourceDirStr);
|
|
26
|
+
const jsonFiles = files.filter((f) => f.endsWith(".json"));
|
|
27
|
+
if (jsonFiles.length === 0) {
|
|
28
|
+
console.log(
|
|
29
|
+
chalk__default.default.yellow(`\u26A0\uFE0F No translation JSON files found in ${sourceDirStr}`)
|
|
30
|
+
);
|
|
31
|
+
console.log(
|
|
32
|
+
chalk__default.default.gray(
|
|
33
|
+
" Make sure you have downloaded translations first using:"
|
|
34
|
+
)
|
|
35
|
+
);
|
|
36
|
+
console.log(chalk__default.default.gray(" translations-cli i18n download"));
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
console.log(
|
|
40
|
+
chalk__default.default.yellow(
|
|
41
|
+
`\u{1F4C1} Found ${jsonFiles.length} translation file(s) to deploy`
|
|
42
|
+
)
|
|
43
|
+
);
|
|
44
|
+
await deployTranslations.deployTranslations(sourceDirStr, repoRoot);
|
|
45
|
+
console.log(chalk__default.default.green(`\u2705 Deployment completed successfully!`));
|
|
46
|
+
} catch (error) {
|
|
47
|
+
console.error(chalk__default.default.red("\u274C Error deploying translations:"), error.message);
|
|
48
|
+
throw error;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
exports.deployCommand = deployCommand;
|
|
53
|
+
//# sourceMappingURL=deploy.cjs.js.map
|
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var chalk = require('chalk');
|
|
4
|
+
var fs = require('fs-extra');
|
|
5
|
+
var path = require('node:path');
|
|
6
|
+
var config = require('../lib/i18n/config.cjs.js');
|
|
7
|
+
var exec = require('../lib/utils/exec.cjs.js');
|
|
8
|
+
|
|
9
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
10
|
+
|
|
11
|
+
var chalk__default = /*#__PURE__*/_interopDefaultCompat(chalk);
|
|
12
|
+
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
13
|
+
var path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
14
|
+
|
|
15
|
+
function buildDownloadJobArgs(projectId, jobId, outputDir) {
|
|
16
|
+
return [
|
|
17
|
+
"job",
|
|
18
|
+
"download",
|
|
19
|
+
"--project-id",
|
|
20
|
+
projectId,
|
|
21
|
+
"--job-id",
|
|
22
|
+
jobId,
|
|
23
|
+
"--type",
|
|
24
|
+
"target",
|
|
25
|
+
"--output-dir",
|
|
26
|
+
outputDir
|
|
27
|
+
];
|
|
28
|
+
}
|
|
29
|
+
function buildListJobsArgs(projectId) {
|
|
30
|
+
return ["job", "list", "--project-id", projectId, "--format", "json"];
|
|
31
|
+
}
|
|
32
|
+
function findDownloadedFile(outputDir, targetLang) {
|
|
33
|
+
try {
|
|
34
|
+
const files = fs__default.default.readdirSync(outputDir);
|
|
35
|
+
const langPattern = new RegExp(`-${targetLang}(?:-C)?\\.json$`, "i");
|
|
36
|
+
const matchingFiles = files.filter((f) => langPattern.test(f));
|
|
37
|
+
if (matchingFiles.length > 0) {
|
|
38
|
+
const fileStats = matchingFiles.map((f) => ({
|
|
39
|
+
name: f,
|
|
40
|
+
mtime: fs__default.default.statSync(path__default.default.join(outputDir, f)).mtime.getTime()
|
|
41
|
+
}));
|
|
42
|
+
fileStats.sort((a, b) => b.mtime - a.mtime);
|
|
43
|
+
return fileStats[0].name;
|
|
44
|
+
}
|
|
45
|
+
return null;
|
|
46
|
+
} catch {
|
|
47
|
+
return null;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
function renameDownloadedFile(outputDir, originalFilename, targetLang) {
|
|
51
|
+
try {
|
|
52
|
+
const originalPath = path__default.default.join(outputDir, originalFilename);
|
|
53
|
+
if (!fs__default.default.existsSync(originalPath)) {
|
|
54
|
+
const foundFile = findDownloadedFile(outputDir, targetLang);
|
|
55
|
+
if (foundFile) {
|
|
56
|
+
return renameDownloadedFile(outputDir, foundFile, targetLang);
|
|
57
|
+
}
|
|
58
|
+
return null;
|
|
59
|
+
}
|
|
60
|
+
let cleanFilename = null;
|
|
61
|
+
const pattern1 = originalFilename.match(
|
|
62
|
+
/^([a-z-]+)-(\d{4}-\d{2}-\d{2})-([a-z]{2})-([a-z]{2})(?:-C)?\.json$/i
|
|
63
|
+
);
|
|
64
|
+
if (pattern1) {
|
|
65
|
+
const [, repo, date] = pattern1;
|
|
66
|
+
cleanFilename = `${repo}-${date}-${targetLang}.json`;
|
|
67
|
+
} else {
|
|
68
|
+
const pattern2 = originalFilename.match(
|
|
69
|
+
/^([a-z-]+)-reference-(\d{4}-\d{2}-\d{2})-([a-z]{2})-([a-z]{2})(?:-C)?\.json$/i
|
|
70
|
+
);
|
|
71
|
+
if (pattern2) {
|
|
72
|
+
const [, repo, date] = pattern2;
|
|
73
|
+
cleanFilename = `${repo}-${date}-${targetLang}.json`;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
if (!cleanFilename) {
|
|
77
|
+
return originalFilename;
|
|
78
|
+
}
|
|
79
|
+
const cleanPath = path__default.default.join(outputDir, cleanFilename);
|
|
80
|
+
if (originalPath !== cleanPath) {
|
|
81
|
+
fs__default.default.moveSync(originalPath, cleanPath, { overwrite: true });
|
|
82
|
+
}
|
|
83
|
+
return cleanFilename;
|
|
84
|
+
} catch (error) {
|
|
85
|
+
console.warn(
|
|
86
|
+
chalk__default.default.yellow(
|
|
87
|
+
`\u26A0\uFE0F Warning: Could not rename file ${originalFilename}: ${error.message}`
|
|
88
|
+
)
|
|
89
|
+
);
|
|
90
|
+
return originalFilename;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
async function downloadJob(projectId, jobId, outputDir) {
|
|
94
|
+
try {
|
|
95
|
+
const cmdArgs = buildDownloadJobArgs(projectId, jobId, outputDir);
|
|
96
|
+
exec.safeExecSyncOrThrow("memsource", cmdArgs, {
|
|
97
|
+
stdio: "pipe",
|
|
98
|
+
env: { ...process.env }
|
|
99
|
+
});
|
|
100
|
+
const jobInfoArgs = buildListJobsArgs(projectId);
|
|
101
|
+
const jobListOutput = exec.safeExecSyncOrThrow("memsource", jobInfoArgs, {
|
|
102
|
+
encoding: "utf-8",
|
|
103
|
+
env: { ...process.env }
|
|
104
|
+
});
|
|
105
|
+
const jobs = JSON.parse(jobListOutput);
|
|
106
|
+
const jobArray = Array.isArray(jobs) ? jobs : [jobs];
|
|
107
|
+
const job = jobArray.find((j) => j.uid === jobId);
|
|
108
|
+
if (job) {
|
|
109
|
+
const originalFilename = job.filename;
|
|
110
|
+
let actualFile = originalFilename;
|
|
111
|
+
if (!fs__default.default.existsSync(path__default.default.join(outputDir, originalFilename))) {
|
|
112
|
+
const foundFile = findDownloadedFile(outputDir, job.target_lang);
|
|
113
|
+
if (foundFile) {
|
|
114
|
+
actualFile = foundFile;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
const cleanFilename = renameDownloadedFile(
|
|
118
|
+
outputDir,
|
|
119
|
+
actualFile,
|
|
120
|
+
job.target_lang
|
|
121
|
+
);
|
|
122
|
+
return {
|
|
123
|
+
jobId,
|
|
124
|
+
filename: cleanFilename || actualFile,
|
|
125
|
+
lang: job.target_lang
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
return null;
|
|
129
|
+
} catch (error) {
|
|
130
|
+
console.warn(
|
|
131
|
+
chalk__default.default.yellow(
|
|
132
|
+
`\u26A0\uFE0F Warning: Could not download job ${jobId}: ${error.message}`
|
|
133
|
+
)
|
|
134
|
+
);
|
|
135
|
+
return null;
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
function validateMemsourcePrerequisites() {
|
|
139
|
+
if (!exec.commandExists("memsource")) {
|
|
140
|
+
throw new Error(
|
|
141
|
+
"memsource CLI not found. Please ensure memsource-cli is installed and ~/.memsourcerc is sourced."
|
|
142
|
+
);
|
|
143
|
+
}
|
|
144
|
+
if (!process.env.MEMSOURCE_TOKEN) {
|
|
145
|
+
throw new Error(
|
|
146
|
+
"MEMSOURCE_TOKEN not found. Please source ~/.memsourcerc first: source ~/.memsourcerc"
|
|
147
|
+
);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
async function downloadSpecificJobs(projectId, jobIds, outputDir) {
|
|
151
|
+
console.log(
|
|
152
|
+
chalk__default.default.yellow(`\u{1F4E5} Downloading ${jobIds.length} specific job(s)...`)
|
|
153
|
+
);
|
|
154
|
+
const downloadResults = [];
|
|
155
|
+
for (const jobId of jobIds) {
|
|
156
|
+
const result = await downloadJob(projectId, jobId, outputDir);
|
|
157
|
+
if (result) {
|
|
158
|
+
downloadResults.push(result);
|
|
159
|
+
console.log(
|
|
160
|
+
chalk__default.default.green(
|
|
161
|
+
`\u2705 Downloaded job ${result.jobId}: ${result.filename} (${result.lang})`
|
|
162
|
+
)
|
|
163
|
+
);
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
return downloadResults;
|
|
167
|
+
}
|
|
168
|
+
function listJobs(projectId, languages, statusFilter) {
|
|
169
|
+
const listArgs = buildListJobsArgs(projectId);
|
|
170
|
+
const listOutput = exec.safeExecSyncOrThrow("memsource", listArgs, {
|
|
171
|
+
encoding: "utf-8",
|
|
172
|
+
env: { ...process.env }
|
|
173
|
+
});
|
|
174
|
+
const jobs = JSON.parse(listOutput);
|
|
175
|
+
const jobArray = Array.isArray(jobs) ? jobs : [jobs];
|
|
176
|
+
let filteredJobs = jobArray;
|
|
177
|
+
if (statusFilter && statusFilter !== "ALL") {
|
|
178
|
+
filteredJobs = filteredJobs.filter(
|
|
179
|
+
(job) => job.status === statusFilter
|
|
180
|
+
);
|
|
181
|
+
}
|
|
182
|
+
if (languages && languages.length > 0) {
|
|
183
|
+
const languageSet = new Set(languages);
|
|
184
|
+
filteredJobs = filteredJobs.filter(
|
|
185
|
+
(job) => languageSet.has(job.target_lang)
|
|
186
|
+
);
|
|
187
|
+
}
|
|
188
|
+
return filteredJobs;
|
|
189
|
+
}
|
|
190
|
+
async function downloadFilteredJobs(projectId, outputDir, languages, statusFilter) {
|
|
191
|
+
console.log(chalk__default.default.yellow("\u{1F4CB} Listing available jobs..."));
|
|
192
|
+
try {
|
|
193
|
+
const jobsToDownload = listJobs(projectId, languages, statusFilter);
|
|
194
|
+
const statusDisplay = statusFilter === "ALL" ? "all statuses" : statusFilter || "COMPLETED";
|
|
195
|
+
console.log(
|
|
196
|
+
chalk__default.default.yellow(
|
|
197
|
+
`\u{1F4E5} Found ${jobsToDownload.length} job(s) with status "${statusDisplay}" to download...`
|
|
198
|
+
)
|
|
199
|
+
);
|
|
200
|
+
if (jobsToDownload.length === 0) {
|
|
201
|
+
console.log(
|
|
202
|
+
chalk__default.default.yellow(
|
|
203
|
+
'\u{1F4A1} Tip: Use "i18n list" to see all available jobs and their UIDs.'
|
|
204
|
+
)
|
|
205
|
+
);
|
|
206
|
+
return [];
|
|
207
|
+
}
|
|
208
|
+
const downloadResults = [];
|
|
209
|
+
for (const job of jobsToDownload) {
|
|
210
|
+
const result = await downloadJob(projectId, job.uid, outputDir);
|
|
211
|
+
if (result) {
|
|
212
|
+
downloadResults.push(result);
|
|
213
|
+
const statusIcon = job.status === "COMPLETED" ? "\u2705" : "\u26A0\uFE0F";
|
|
214
|
+
console.log(
|
|
215
|
+
chalk__default.default.green(
|
|
216
|
+
`${statusIcon} Downloaded: ${result.filename} (${result.lang}) [${job.status}]`
|
|
217
|
+
)
|
|
218
|
+
);
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
return downloadResults;
|
|
222
|
+
} catch (error) {
|
|
223
|
+
throw new Error(`Failed to list jobs: ${error.message}`);
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
async function downloadWithMemsourceCLI(projectId, outputDir, jobIds, languages, statusFilter) {
|
|
227
|
+
validateMemsourcePrerequisites();
|
|
228
|
+
await fs__default.default.ensureDir(outputDir);
|
|
229
|
+
if (jobIds && jobIds.length > 0) {
|
|
230
|
+
return downloadSpecificJobs(projectId, jobIds, outputDir);
|
|
231
|
+
}
|
|
232
|
+
return downloadFilteredJobs(projectId, outputDir, languages, statusFilter);
|
|
233
|
+
}
|
|
234
|
+
async function downloadCommand(opts) {
|
|
235
|
+
console.log(chalk__default.default.blue("\u{1F4E5} Downloading translated strings from TMS..."));
|
|
236
|
+
const config$1 = await config.loadI18nConfig();
|
|
237
|
+
const mergedOpts = await config.mergeConfigWithOptions(config$1, opts);
|
|
238
|
+
const {
|
|
239
|
+
projectId,
|
|
240
|
+
outputDir = "i18n/downloads",
|
|
241
|
+
languages,
|
|
242
|
+
jobIds,
|
|
243
|
+
status,
|
|
244
|
+
includeIncomplete
|
|
245
|
+
} = mergedOpts;
|
|
246
|
+
let statusFilter = status || "COMPLETED";
|
|
247
|
+
if (includeIncomplete || statusFilter === "ALL") {
|
|
248
|
+
statusFilter = "ALL";
|
|
249
|
+
}
|
|
250
|
+
if (!projectId) {
|
|
251
|
+
console.error(chalk__default.default.red("\u274C Missing required TMS configuration:"));
|
|
252
|
+
console.error("");
|
|
253
|
+
console.error(chalk__default.default.yellow(" \u2717 Project ID"));
|
|
254
|
+
console.error(
|
|
255
|
+
chalk__default.default.gray(
|
|
256
|
+
" Set via: --project-id <id> or I18N_TMS_PROJECT_ID or .i18n.config.json"
|
|
257
|
+
)
|
|
258
|
+
);
|
|
259
|
+
console.error("");
|
|
260
|
+
console.error(chalk__default.default.blue("\u{1F4CB} Quick Setup Guide:"));
|
|
261
|
+
console.error(chalk__default.default.gray(" 1. Run: translations-cli i18n init"));
|
|
262
|
+
console.error(chalk__default.default.gray(" 2. Edit .i18n.config.json to add Project ID"));
|
|
263
|
+
console.error(
|
|
264
|
+
chalk__default.default.gray(" 3. Source ~/.memsourcerc: source ~/.memsourcerc")
|
|
265
|
+
);
|
|
266
|
+
process.exit(1);
|
|
267
|
+
}
|
|
268
|
+
if (!process.env.MEMSOURCE_TOKEN) {
|
|
269
|
+
console.error(chalk__default.default.red("\u274C MEMSOURCE_TOKEN not found"));
|
|
270
|
+
console.error(chalk__default.default.yellow(" Please source ~/.memsourcerc first:"));
|
|
271
|
+
console.error(chalk__default.default.gray(" source ~/.memsourcerc"));
|
|
272
|
+
process.exit(1);
|
|
273
|
+
}
|
|
274
|
+
try {
|
|
275
|
+
const jobIdArray = jobIds && typeof jobIds === "string" ? jobIds.split(",").map((id) => id.trim()) : void 0;
|
|
276
|
+
const languageArray = languages && typeof languages === "string" ? languages.split(",").map((lang) => lang.trim()) : void 0;
|
|
277
|
+
const downloadResults = await downloadWithMemsourceCLI(
|
|
278
|
+
projectId,
|
|
279
|
+
String(outputDir),
|
|
280
|
+
jobIdArray,
|
|
281
|
+
languageArray,
|
|
282
|
+
statusFilter
|
|
283
|
+
);
|
|
284
|
+
console.log(chalk__default.default.green(`\u2705 Download completed successfully!`));
|
|
285
|
+
console.log(chalk__default.default.gray(` Output directory: ${outputDir}`));
|
|
286
|
+
console.log(chalk__default.default.gray(` Files downloaded: ${downloadResults.length}`));
|
|
287
|
+
if (downloadResults.length > 0) {
|
|
288
|
+
console.log(chalk__default.default.blue("\u{1F4C1} Downloaded files:"));
|
|
289
|
+
for (const result of downloadResults) {
|
|
290
|
+
console.log(
|
|
291
|
+
chalk__default.default.gray(
|
|
292
|
+
` ${result.filename} (${result.lang}) - Job ID: ${result.jobId}`
|
|
293
|
+
)
|
|
294
|
+
);
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
} catch (error) {
|
|
298
|
+
console.error(chalk__default.default.red("\u274C Error downloading from TMS:"), error.message);
|
|
299
|
+
throw error;
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
exports.downloadCommand = downloadCommand;
|
|
304
|
+
//# sourceMappingURL=download.cjs.js.map
|