gtx-cli 2.5.17 → 2.5.19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/api/collectUserEditDiffs.js +14 -0
- package/dist/api/downloadFileBatch.js +8 -2
- package/dist/cli/commands/translate.js +4 -0
- package/dist/fs/config/downloadedVersions.d.ts +1 -0
- package/dist/state/recentDownloads.d.ts +10 -1
- package/dist/state/recentDownloads.js +9 -1
- package/dist/utils/addExplicitAnchorIds.js +80 -23
- package/dist/utils/persistPostprocessHashes.d.ts +12 -0
- package/dist/utils/persistPostprocessHashes.js +39 -0
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,17 @@
|
|
|
1
1
|
# gtx-cli
|
|
2
2
|
|
|
3
|
+
## 2.5.19
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- [#853](https://github.com/generaltranslation/gt/pull/853) [`02abd0a`](https://github.com/generaltranslation/gt/commit/02abd0a970a09c514744982f06169f385dfdd972) Thanks [@fernando-aviles](https://github.com/fernando-aviles)! - Including hash of post-processed files in `gt-lock.json` to avoid unnecessary saves when calling `save-local`
|
|
8
|
+
|
|
9
|
+
## 2.5.18
|
|
10
|
+
|
|
11
|
+
### Patch Changes
|
|
12
|
+
|
|
13
|
+
- [#851](https://github.com/generaltranslation/gt/pull/851) [`cf5f0e3`](https://github.com/generaltranslation/gt/commit/cf5f0e3f1537c304b7ea5703714ffb4956a7f6f4) Thanks [@fernando-aviles](https://github.com/fernando-aviles)! - Skip anchor ID fallback in codeblocks to avoid adding anchors to comments
|
|
14
|
+
|
|
3
15
|
## 2.5.17
|
|
4
16
|
|
|
5
17
|
### Patch Changes
|
|
@@ -6,6 +6,7 @@ import { getGitUnifiedDiff } from '../utils/gitDiff.js';
|
|
|
6
6
|
import { gt } from '../utils/gt.js';
|
|
7
7
|
import os from 'node:os';
|
|
8
8
|
import { randomUUID } from 'node:crypto';
|
|
9
|
+
import { hashStringSync } from '../utils/hash.js';
|
|
9
10
|
/**
|
|
10
11
|
* Collects local user edits by diffing the latest downloaded server translation version
|
|
11
12
|
* against the current local translation file, and submits the diffs upstream.
|
|
@@ -32,6 +33,19 @@ export async function collectAndSendUserEditDiffs(files, settings) {
|
|
|
32
33
|
const downloadedVersion = downloadedVersions.entries?.[uploadedFile.branchId]?.[uploadedFile.fileId]?.[uploadedFile.versionId]?.[locale];
|
|
33
34
|
if (!downloadedVersion)
|
|
34
35
|
continue;
|
|
36
|
+
// Skip if local file matches the last postprocessed content hash
|
|
37
|
+
if (downloadedVersion.postProcessHash) {
|
|
38
|
+
try {
|
|
39
|
+
const localContent = await fs.promises.readFile(outputPath, 'utf8');
|
|
40
|
+
const localHash = hashStringSync(localContent);
|
|
41
|
+
if (localHash === downloadedVersion.postProcessHash) {
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
// If hash check fails, fall through to diff
|
|
47
|
+
}
|
|
48
|
+
}
|
|
35
49
|
candidates.push({
|
|
36
50
|
branchId: uploadedFile.branchId,
|
|
37
51
|
fileName: uploadedFile.fileName,
|
|
@@ -115,8 +115,14 @@ export async function downloadFileBatch(fileTracker, files, options, forceDownlo
|
|
|
115
115
|
}
|
|
116
116
|
// Write the file to disk
|
|
117
117
|
await fs.promises.writeFile(outputPath, data);
|
|
118
|
-
// Track as downloaded
|
|
119
|
-
recordDownloaded(outputPath
|
|
118
|
+
// Track as downloaded with metadata for downstream postprocessing
|
|
119
|
+
recordDownloaded(outputPath, {
|
|
120
|
+
branchId,
|
|
121
|
+
fileId,
|
|
122
|
+
versionId,
|
|
123
|
+
locale,
|
|
124
|
+
inputPath,
|
|
125
|
+
});
|
|
120
126
|
result.successful.push(requestedFile);
|
|
121
127
|
if (branchId && fileId && versionId && locale) {
|
|
122
128
|
ensureNestedObject(downloadedVersions.entries, [
|
|
@@ -9,6 +9,8 @@ import processOpenApi from '../../utils/processOpenApi.js';
|
|
|
9
9
|
import { noFilesError, noVersionIdError } from '../../console/index.js';
|
|
10
10
|
import localizeStaticImports from '../../utils/localizeStaticImports.js';
|
|
11
11
|
import { logErrorAndExit } from '../../console/logging.js';
|
|
12
|
+
import { getDownloadedMeta } from '../../state/recentDownloads.js';
|
|
13
|
+
import { persistPostProcessHashes } from '../../utils/persistPostprocessHashes.js';
|
|
12
14
|
// Downloads translations that were completed
|
|
13
15
|
export async function handleTranslate(options, settings, fileVersionData, jobData, branchData) {
|
|
14
16
|
if (fileVersionData) {
|
|
@@ -65,4 +67,6 @@ export async function postProcessTranslations(settings, includeFiles) {
|
|
|
65
67
|
if (settings.options?.copyFiles) {
|
|
66
68
|
await copyFile(settings);
|
|
67
69
|
}
|
|
70
|
+
// Record postprocessed content hashes for newly downloaded files
|
|
71
|
+
persistPostProcessHashes(settings, includeFiles, getDownloadedMeta());
|
|
68
72
|
}
|
|
@@ -1,3 +1,12 @@
|
|
|
1
|
-
|
|
1
|
+
type DownloadMeta = {
|
|
2
|
+
branchId: string;
|
|
3
|
+
fileId: string;
|
|
4
|
+
versionId: string;
|
|
5
|
+
locale: string;
|
|
6
|
+
inputPath?: string;
|
|
7
|
+
};
|
|
8
|
+
export declare function recordDownloaded(filePath: string, meta?: DownloadMeta): void;
|
|
2
9
|
export declare function getDownloaded(): Set<string>;
|
|
10
|
+
export declare function getDownloadedMeta(): Map<string, DownloadMeta>;
|
|
3
11
|
export declare function clearDownloaded(): void;
|
|
12
|
+
export {};
|
|
@@ -1,10 +1,18 @@
|
|
|
1
1
|
const recent = new Set();
|
|
2
|
-
|
|
2
|
+
const recentMeta = new Map();
|
|
3
|
+
export function recordDownloaded(filePath, meta) {
|
|
3
4
|
recent.add(filePath);
|
|
5
|
+
if (meta) {
|
|
6
|
+
recentMeta.set(filePath, meta);
|
|
7
|
+
}
|
|
4
8
|
}
|
|
5
9
|
export function getDownloaded() {
|
|
6
10
|
return recent;
|
|
7
11
|
}
|
|
12
|
+
export function getDownloadedMeta() {
|
|
13
|
+
return recentMeta;
|
|
14
|
+
}
|
|
8
15
|
export function clearDownloaded() {
|
|
9
16
|
recent.clear();
|
|
17
|
+
recentMeta.clear();
|
|
10
18
|
}
|
|
@@ -30,6 +30,53 @@ function extractHeadingText(heading) {
|
|
|
30
30
|
});
|
|
31
31
|
return text;
|
|
32
32
|
}
|
|
33
|
+
/**
|
|
34
|
+
* Simple line-by-line heading extractor that skips fenced code blocks.
|
|
35
|
+
* Used as a fallback when MDX parsing fails.
|
|
36
|
+
*/
|
|
37
|
+
function extractHeadingsWithFallback(mdxContent) {
|
|
38
|
+
const headings = [];
|
|
39
|
+
const lines = mdxContent.split('\n');
|
|
40
|
+
let position = 0;
|
|
41
|
+
let inFence = false;
|
|
42
|
+
let fenceChar = null;
|
|
43
|
+
for (const line of lines) {
|
|
44
|
+
const fenceMatch = line.match(/^(\s*)(`{3,}|~{3,})/);
|
|
45
|
+
if (fenceMatch) {
|
|
46
|
+
const fenceString = fenceMatch[2];
|
|
47
|
+
if (!inFence) {
|
|
48
|
+
inFence = true;
|
|
49
|
+
fenceChar = fenceString;
|
|
50
|
+
}
|
|
51
|
+
else if (fenceChar &&
|
|
52
|
+
fenceString[0] === fenceChar[0] &&
|
|
53
|
+
fenceString.length >= fenceChar.length) {
|
|
54
|
+
inFence = false;
|
|
55
|
+
fenceChar = null;
|
|
56
|
+
}
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
if (inFence) {
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
const headingMatch = line.match(/^(#{1,6})\s+(.*)$/);
|
|
63
|
+
if (!headingMatch) {
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
const hashes = headingMatch[1];
|
|
67
|
+
const rawText = headingMatch[2];
|
|
68
|
+
const { cleanedText, explicitId } = parseHeadingContent(rawText);
|
|
69
|
+
if (cleanedText || explicitId) {
|
|
70
|
+
headings.push({
|
|
71
|
+
text: cleanedText,
|
|
72
|
+
level: hashes.length,
|
|
73
|
+
slug: explicitId ?? generateSlug(cleanedText),
|
|
74
|
+
position: position++,
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
return headings;
|
|
79
|
+
}
|
|
33
80
|
function parseHeadingContent(text) {
|
|
34
81
|
// Support both {#id} and escaped \{#id\} forms
|
|
35
82
|
const anchorMatch = text.match(/(\\\{#([^}]+)\\\}|\{#([^}]+)\})\s*$/);
|
|
@@ -67,25 +114,8 @@ export function extractHeadingInfo(mdxContent) {
|
|
|
67
114
|
}
|
|
68
115
|
catch (error) {
|
|
69
116
|
console.warn(`Failed to parse MDX content: ${error instanceof Error ? error.message : String(error)}`);
|
|
70
|
-
// Fallback:
|
|
71
|
-
|
|
72
|
-
const headingRegex = /^(#{1,6})\s+(.*)$/gm;
|
|
73
|
-
let position = 0;
|
|
74
|
-
let match;
|
|
75
|
-
while ((match = headingRegex.exec(mdxContent)) !== null) {
|
|
76
|
-
const hashes = match[1];
|
|
77
|
-
const rawText = match[2];
|
|
78
|
-
const { cleanedText, explicitId } = parseHeadingContent(rawText);
|
|
79
|
-
if (cleanedText || explicitId) {
|
|
80
|
-
fallbackHeadings.push({
|
|
81
|
-
text: cleanedText,
|
|
82
|
-
level: hashes.length,
|
|
83
|
-
slug: explicitId ?? generateSlug(cleanedText),
|
|
84
|
-
position: position++,
|
|
85
|
-
});
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
return fallbackHeadings;
|
|
117
|
+
// Fallback: line-by-line extraction skipping fenced code blocks
|
|
118
|
+
return extractHeadingsWithFallback(mdxContent);
|
|
89
119
|
}
|
|
90
120
|
let position = 0;
|
|
91
121
|
visit(processedAst, 'heading', (heading) => {
|
|
@@ -270,25 +300,52 @@ function applyInlineIds(translatedContent, idMappings, escapeAnchors) {
|
|
|
270
300
|
*/
|
|
271
301
|
function applyInlineIdsStringFallback(translatedContent, idMappings, escapeAnchors) {
|
|
272
302
|
let headingIndex = 0;
|
|
273
|
-
|
|
303
|
+
let inFence = false;
|
|
304
|
+
let fenceChar = null;
|
|
305
|
+
const processedLines = translatedContent.split('\n').map((line) => {
|
|
306
|
+
const fenceMatch = line.match(/^(\s*)(`{3,}|~{3,})/);
|
|
307
|
+
if (fenceMatch) {
|
|
308
|
+
const fenceString = fenceMatch[2];
|
|
309
|
+
if (!inFence) {
|
|
310
|
+
inFence = true;
|
|
311
|
+
fenceChar = fenceString;
|
|
312
|
+
}
|
|
313
|
+
else if (fenceChar &&
|
|
314
|
+
fenceString[0] === fenceChar[0] &&
|
|
315
|
+
fenceString.length >= fenceChar.length) {
|
|
316
|
+
inFence = false;
|
|
317
|
+
fenceChar = null;
|
|
318
|
+
}
|
|
319
|
+
return line;
|
|
320
|
+
}
|
|
321
|
+
if (inFence) {
|
|
322
|
+
return line;
|
|
323
|
+
}
|
|
324
|
+
const headingMatch = line.match(/^(#{1,6}\s+)(.*)$/);
|
|
325
|
+
if (!headingMatch) {
|
|
326
|
+
return line;
|
|
327
|
+
}
|
|
328
|
+
const prefix = headingMatch[1];
|
|
329
|
+
const text = headingMatch[2];
|
|
274
330
|
const id = idMappings.get(headingIndex++);
|
|
275
331
|
if (!id) {
|
|
276
|
-
return
|
|
332
|
+
return line;
|
|
277
333
|
}
|
|
278
334
|
const hasEscaped = /\\\{#[^}]+\\\}\s*$/.test(text);
|
|
279
335
|
const hasUnescaped = /\{#[^}]+\}\s*$/.test(text);
|
|
280
336
|
if (hasEscaped) {
|
|
281
|
-
return
|
|
337
|
+
return line;
|
|
282
338
|
}
|
|
283
339
|
if (hasUnescaped) {
|
|
284
340
|
if (!escapeAnchors) {
|
|
285
|
-
return
|
|
341
|
+
return line;
|
|
286
342
|
}
|
|
287
343
|
return `${prefix}${text.replace(/\{#([^}]+)\}\s*$/, '\\\\{#$1\\\\}')}`;
|
|
288
344
|
}
|
|
289
345
|
const suffix = escapeAnchors ? ` \\{#${id}\\}` : ` {#${id}}`;
|
|
290
346
|
return `${prefix}${text}${suffix}`;
|
|
291
347
|
});
|
|
348
|
+
return processedLines.join('\n');
|
|
292
349
|
}
|
|
293
350
|
/**
|
|
294
351
|
* Wraps headings in divs with IDs (Mintlify approach)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import type { Settings } from '../types/index.js';
|
|
2
|
+
type DownloadMeta = {
|
|
3
|
+
branchId: string;
|
|
4
|
+
fileId: string;
|
|
5
|
+
versionId: string;
|
|
6
|
+
locale: string;
|
|
7
|
+
};
|
|
8
|
+
/**
|
|
9
|
+
* Persist postprocessed content hashes for recently downloaded files into gt-lock.json.
|
|
10
|
+
*/
|
|
11
|
+
export declare function persistPostProcessHashes(settings: Settings, includeFiles: Set<string> | undefined, downloadedMeta: Map<string, DownloadMeta>): void;
|
|
12
|
+
export {};
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import * as fs from 'node:fs';
|
|
2
|
+
import { ensureNestedObject, getDownloadedVersions, saveDownloadedVersions, } from '../fs/config/downloadedVersions.js';
|
|
3
|
+
import { hashStringSync } from './hash.js';
|
|
4
|
+
/**
|
|
5
|
+
* Persist postprocessed content hashes for recently downloaded files into gt-lock.json.
|
|
6
|
+
*/
|
|
7
|
+
export function persistPostProcessHashes(settings, includeFiles, downloadedMeta) {
|
|
8
|
+
if (!includeFiles || includeFiles.size === 0 || downloadedMeta.size === 0) {
|
|
9
|
+
return;
|
|
10
|
+
}
|
|
11
|
+
const downloadedVersions = getDownloadedVersions(settings.configDirectory);
|
|
12
|
+
let lockUpdated = false;
|
|
13
|
+
for (const filePath of includeFiles) {
|
|
14
|
+
const meta = downloadedMeta.get(filePath);
|
|
15
|
+
if (!meta)
|
|
16
|
+
continue;
|
|
17
|
+
if (!fs.existsSync(filePath))
|
|
18
|
+
continue;
|
|
19
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
20
|
+
const hash = hashStringSync(content);
|
|
21
|
+
ensureNestedObject(downloadedVersions.entries, [
|
|
22
|
+
meta.branchId,
|
|
23
|
+
meta.fileId,
|
|
24
|
+
meta.versionId,
|
|
25
|
+
meta.locale,
|
|
26
|
+
]);
|
|
27
|
+
const existing = downloadedVersions.entries[meta.branchId][meta.fileId][meta.versionId][meta.locale] || {};
|
|
28
|
+
if (existing.postProcessHash !== hash) {
|
|
29
|
+
downloadedVersions.entries[meta.branchId][meta.fileId][meta.versionId][meta.locale] = {
|
|
30
|
+
...existing,
|
|
31
|
+
postProcessHash: hash,
|
|
32
|
+
};
|
|
33
|
+
lockUpdated = true;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
if (lockUpdated) {
|
|
37
|
+
saveDownloadedVersions(settings.configDirectory, downloadedVersions);
|
|
38
|
+
}
|
|
39
|
+
}
|