glotto 2.9.0 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +150 -54
- package/esm/cli.js +155 -43
- package/esm/deno.d.ts +5 -1
- package/esm/deno.js +18 -7
- package/esm/src/config.d.ts +4 -0
- package/esm/src/config.d.ts.map +1 -0
- package/esm/src/config.js +95 -0
- package/esm/src/contants.d.ts +6 -2
- package/esm/src/contants.d.ts.map +1 -1
- package/esm/src/contants.js +34 -15
- package/esm/src/diff.d.ts +4 -0
- package/esm/src/diff.d.ts.map +1 -0
- package/esm/src/diff.js +53 -0
- package/esm/src/file.d.ts +5 -9
- package/esm/src/file.d.ts.map +1 -1
- package/esm/src/file.js +14 -103
- package/esm/src/providers/anthropic.d.ts +6 -11
- package/esm/src/providers/anthropic.d.ts.map +1 -1
- package/esm/src/providers/anthropic.js +21 -107
- package/esm/src/providers/gemini.d.ts +6 -11
- package/esm/src/providers/gemini.d.ts.map +1 -1
- package/esm/src/providers/gemini.js +20 -113
- package/esm/src/providers/openai.d.ts +6 -11
- package/esm/src/providers/openai.d.ts.map +1 -1
- package/esm/src/providers/openai.js +17 -108
- package/esm/src/translator.d.ts +15 -0
- package/esm/src/translator.d.ts.map +1 -0
- package/esm/src/translator.js +284 -0
- package/esm/src/types.d.ts +58 -13
- package/esm/src/types.d.ts.map +1 -1
- package/esm/src/utilites.d.ts +3 -10
- package/esm/src/utilites.d.ts.map +1 -1
- package/esm/src/utilites.js +41 -131
- package/package.json +20 -6
- package/schema/glotto.schema.json +87 -0
- package/script/cli.js +153 -41
- package/script/deno.d.ts +5 -1
- package/script/deno.js +18 -7
- package/script/src/config.d.ts +4 -0
- package/script/src/config.d.ts.map +1 -0
- package/script/src/config.js +132 -0
- package/script/src/contants.d.ts +6 -2
- package/script/src/contants.d.ts.map +1 -1
- package/script/src/contants.js +35 -16
- package/script/src/diff.d.ts +4 -0
- package/script/src/diff.d.ts.map +1 -0
- package/script/src/diff.js +57 -0
- package/script/src/file.d.ts +5 -9
- package/script/src/file.d.ts.map +1 -1
- package/script/src/file.js +19 -113
- package/script/src/providers/anthropic.d.ts +6 -11
- package/script/src/providers/anthropic.d.ts.map +1 -1
- package/script/src/providers/anthropic.js +20 -106
- package/script/src/providers/gemini.d.ts +6 -11
- package/script/src/providers/gemini.d.ts.map +1 -1
- package/script/src/providers/gemini.js +19 -112
- package/script/src/providers/openai.d.ts +6 -11
- package/script/src/providers/openai.d.ts.map +1 -1
- package/script/src/providers/openai.js +16 -107
- package/script/src/translator.d.ts +15 -0
- package/script/src/translator.d.ts.map +1 -0
- package/script/src/translator.js +294 -0
- package/script/src/types.d.ts +58 -13
- package/script/src/types.d.ts.map +1 -1
- package/script/src/utilites.d.ts +3 -10
- package/script/src/utilites.d.ts.map +1 -1
- package/script/src/utilites.js +44 -138
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common16.d.ts +0 -23
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common16.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common16.js +0 -51
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common32.d.ts +0 -35
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common32.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common32.js +0 -192
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common64.d.ts +0 -35
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common64.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common64.js +0 -113
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common_detach.d.ts +0 -4
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common_detach.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_common_detach.js +0 -13
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_types.d.ts +0 -9
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_types.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_types.js +0 -2
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_validate_binary_like.d.ts +0 -2
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_validate_binary_like.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/_validate_binary_like.js +0 -26
- package/esm/deps/jsr.io/@std/encoding/1.0.10/ascii85.d.ts +0 -61
- package/esm/deps/jsr.io/@std/encoding/1.0.10/ascii85.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/ascii85.js +0 -152
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base32.d.ts +0 -40
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base32.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base32.js +0 -87
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base58.d.ts +0 -40
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base58.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base58.js +0 -131
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base64.d.ts +0 -40
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base64.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base64.js +0 -82
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base64url.d.ts +0 -40
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base64url.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/base64url.js +0 -72
- package/esm/deps/jsr.io/@std/encoding/1.0.10/hex.d.ts +0 -39
- package/esm/deps/jsr.io/@std/encoding/1.0.10/hex.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/hex.js +0 -87
- package/esm/deps/jsr.io/@std/encoding/1.0.10/mod.d.ts +0 -98
- package/esm/deps/jsr.io/@std/encoding/1.0.10/mod.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/mod.js +0 -99
- package/esm/deps/jsr.io/@std/encoding/1.0.10/varint.d.ts +0 -120
- package/esm/deps/jsr.io/@std/encoding/1.0.10/varint.d.ts.map +0 -1
- package/esm/deps/jsr.io/@std/encoding/1.0.10/varint.js +0 -205
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common16.d.ts +0 -23
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common16.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common16.js +0 -57
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common32.d.ts +0 -35
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common32.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common32.js +0 -198
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common64.d.ts +0 -35
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common64.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common64.js +0 -119
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common_detach.d.ts +0 -4
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common_detach.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/_common_detach.js +0 -16
- package/script/deps/jsr.io/@std/encoding/1.0.10/_types.d.ts +0 -9
- package/script/deps/jsr.io/@std/encoding/1.0.10/_types.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/_types.js +0 -3
- package/script/deps/jsr.io/@std/encoding/1.0.10/_validate_binary_like.d.ts +0 -2
- package/script/deps/jsr.io/@std/encoding/1.0.10/_validate_binary_like.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/_validate_binary_like.js +0 -29
- package/script/deps/jsr.io/@std/encoding/1.0.10/ascii85.d.ts +0 -61
- package/script/deps/jsr.io/@std/encoding/1.0.10/ascii85.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/ascii85.js +0 -156
- package/script/deps/jsr.io/@std/encoding/1.0.10/base32.d.ts +0 -40
- package/script/deps/jsr.io/@std/encoding/1.0.10/base32.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/base32.js +0 -91
- package/script/deps/jsr.io/@std/encoding/1.0.10/base58.d.ts +0 -40
- package/script/deps/jsr.io/@std/encoding/1.0.10/base58.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/base58.js +0 -135
- package/script/deps/jsr.io/@std/encoding/1.0.10/base64.d.ts +0 -40
- package/script/deps/jsr.io/@std/encoding/1.0.10/base64.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/base64.js +0 -86
- package/script/deps/jsr.io/@std/encoding/1.0.10/base64url.d.ts +0 -40
- package/script/deps/jsr.io/@std/encoding/1.0.10/base64url.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/base64url.js +0 -76
- package/script/deps/jsr.io/@std/encoding/1.0.10/hex.d.ts +0 -39
- package/script/deps/jsr.io/@std/encoding/1.0.10/hex.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/hex.js +0 -91
- package/script/deps/jsr.io/@std/encoding/1.0.10/mod.d.ts +0 -98
- package/script/deps/jsr.io/@std/encoding/1.0.10/mod.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/mod.js +0 -115
- package/script/deps/jsr.io/@std/encoding/1.0.10/varint.d.ts +0 -120
- package/script/deps/jsr.io/@std/encoding/1.0.10/varint.d.ts.map +0 -1
- package/script/deps/jsr.io/@std/encoding/1.0.10/varint.js +0 -211
|
@@ -1,29 +1,7 @@
|
|
|
1
1
|
import { GoogleGenAI } from '@google/genai';
|
|
2
|
-
import {
|
|
3
|
-
import { delay, formatBytes, generatePrompts, isValidJson, stripJsonMarkdown } from '../utilites.js';
|
|
4
|
-
import { logger } from '../logger.js';
|
|
5
|
-
import { mergeInputs, writeTemp } from '../file.js';
|
|
6
|
-
import { BASE_RETRY_DELAY_MS, DEFAULT_MODELS, INTER_CHUNK_DELAY_MS, MAX_RETRIES } from '../contants.js';
|
|
2
|
+
import { DEFAULT_MODELS } from '../contants.js';
|
|
7
3
|
class Gemini {
|
|
8
|
-
constructor(key,
|
|
9
|
-
Object.defineProperty(this, "chunks", {
|
|
10
|
-
enumerable: true,
|
|
11
|
-
configurable: true,
|
|
12
|
-
writable: true,
|
|
13
|
-
value: void 0
|
|
14
|
-
});
|
|
15
|
-
Object.defineProperty(this, "from", {
|
|
16
|
-
enumerable: true,
|
|
17
|
-
configurable: true,
|
|
18
|
-
writable: true,
|
|
19
|
-
value: void 0
|
|
20
|
-
});
|
|
21
|
-
Object.defineProperty(this, "to", {
|
|
22
|
-
enumerable: true,
|
|
23
|
-
configurable: true,
|
|
24
|
-
writable: true,
|
|
25
|
-
value: void 0
|
|
26
|
-
});
|
|
4
|
+
constructor(key, modelName, options = { noLimit: false, noTimeout: false }) {
|
|
27
5
|
Object.defineProperty(this, "genAI", {
|
|
28
6
|
enumerable: true,
|
|
29
7
|
configurable: true,
|
|
@@ -36,101 +14,30 @@ class Gemini {
|
|
|
36
14
|
writable: true,
|
|
37
15
|
value: void 0
|
|
38
16
|
});
|
|
39
|
-
this.chunks = chunks;
|
|
40
|
-
this.from = from;
|
|
41
|
-
this.to = to;
|
|
42
17
|
this.model = modelName ?? DEFAULT_MODELS.gemini;
|
|
43
18
|
this.genAI = new GoogleGenAI({
|
|
44
19
|
apiKey: key,
|
|
20
|
+
httpOptions: options.noTimeout ? { timeout: 2_147_483_647 } : undefined,
|
|
45
21
|
});
|
|
46
22
|
}
|
|
47
|
-
async
|
|
48
|
-
const
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
const { text } = await this.genAI.models.generateContent({
|
|
64
|
-
model: this.model,
|
|
65
|
-
contents: [systemPrompt, userPrompt, part],
|
|
66
|
-
config: {
|
|
67
|
-
systemInstruction: systemPrompt,
|
|
68
|
-
candidateCount: 1,
|
|
69
|
-
responseMimeType: 'application/json',
|
|
70
|
-
responseLogprobs: false,
|
|
71
|
-
temperature: 0.3,
|
|
72
|
-
topP: 0.7,
|
|
73
|
-
topK: 20,
|
|
74
|
-
maxOutputTokens: Number.POSITIVE_INFINITY,
|
|
75
|
-
},
|
|
76
|
-
});
|
|
77
|
-
if (!text) {
|
|
78
|
-
logger.error(`${chunkLabel} Empty response from Gemini AI`);
|
|
79
|
-
continue;
|
|
80
|
-
}
|
|
81
|
-
const cleanedText = stripJsonMarkdown(text);
|
|
82
|
-
const tempJsonFileName = `chunk_${chunk.index + 1}.json`;
|
|
83
|
-
await writeTemp(this.to, tempJsonFileName, cleanedText);
|
|
84
|
-
if (!isValidJson(cleanedText)) {
|
|
85
|
-
if (attempt === MAX_RETRIES) {
|
|
86
|
-
logger.error(`${chunkLabel} Failed after ${MAX_RETRIES + 1} attempts: Invalid JSON response`);
|
|
87
|
-
return null;
|
|
88
|
-
}
|
|
89
|
-
logger.error(`${chunkLabel} Invalid JSON response, saved to ${tempJsonFileName}`);
|
|
90
|
-
continue;
|
|
91
|
-
}
|
|
92
|
-
logger.info(`${chunkLabel} Translated successfully (${formatBytes(chunk.byteSize)}, ${chunk.keyCount} keys)`);
|
|
93
|
-
return cleanedText;
|
|
94
|
-
}
|
|
95
|
-
catch (error) {
|
|
96
|
-
const isLastAttempt = attempt === MAX_RETRIES;
|
|
97
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
98
|
-
if (isLastAttempt) {
|
|
99
|
-
logger.error(`${chunkLabel} Failed after ${MAX_RETRIES + 1} attempts: ${errorMessage}`);
|
|
100
|
-
return null;
|
|
101
|
-
}
|
|
102
|
-
logger.warn(`${chunkLabel} Attempt ${attempt + 1} failed: ${errorMessage}`);
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
return null;
|
|
106
|
-
}
|
|
107
|
-
async translate() {
|
|
108
|
-
const { systemPrompt, userPrompt } = generatePrompts(this.from, this.to);
|
|
109
|
-
const totalChunks = this.chunks.length;
|
|
110
|
-
logger.info(`Starting translation: ${totalChunks} chunk(s) to process`);
|
|
111
|
-
const results = [];
|
|
112
|
-
const failedChunks = [];
|
|
113
|
-
for (let i = 0; i < totalChunks; i++) {
|
|
114
|
-
const chunk = this.chunks[i];
|
|
115
|
-
if (i > 0) {
|
|
116
|
-
logger.info(`Waiting ${INTER_CHUNK_DELAY_MS}ms before next chunk (rate limit protection)...`);
|
|
117
|
-
await delay(INTER_CHUNK_DELAY_MS);
|
|
118
|
-
}
|
|
119
|
-
const result = await this.translateChunk(chunk, systemPrompt, userPrompt);
|
|
120
|
-
if (result) {
|
|
121
|
-
results.push(result);
|
|
122
|
-
}
|
|
123
|
-
else {
|
|
124
|
-
failedChunks.push(chunk.index + 1);
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
if (failedChunks.length > 0) {
|
|
128
|
-
throw new Error(`Translation failed for chunk(s): ${failedChunks.join(', ')}. Check temp files for partial results.`);
|
|
23
|
+
async translate(prompt) {
|
|
24
|
+
const response = await this.genAI.models.generateContent({
|
|
25
|
+
model: this.model,
|
|
26
|
+
contents: prompt,
|
|
27
|
+
config: {
|
|
28
|
+
candidateCount: 1,
|
|
29
|
+
temperature: 0.2,
|
|
30
|
+
},
|
|
31
|
+
});
|
|
32
|
+
const text = response.text ?? '';
|
|
33
|
+
let usage;
|
|
34
|
+
if (response.usageMetadata) {
|
|
35
|
+
usage = {
|
|
36
|
+
inputTokens: response.usageMetadata.promptTokenCount ?? 0,
|
|
37
|
+
outputTokens: response.usageMetadata.candidatesTokenCount ?? 0,
|
|
38
|
+
};
|
|
129
39
|
}
|
|
130
|
-
|
|
131
|
-
const jsonInputs = results.map((r) => JSON.parse(r));
|
|
132
|
-
const mergedContent = mergeInputs(jsonInputs);
|
|
133
|
-
return JSON.stringify(mergedContent, null, 2);
|
|
40
|
+
return { text, usage };
|
|
134
41
|
}
|
|
135
42
|
}
|
|
136
43
|
export default Gemini;
|
|
@@ -1,14 +1,9 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
client: OpenAI;
|
|
8
|
-
model: string;
|
|
9
|
-
constructor(key: string, chunks: ChunkInfo[], from: string, to: string, baseUrl?: string, modelName?: string);
|
|
10
|
-
private translateChunk;
|
|
11
|
-
translate(): Promise<string>;
|
|
1
|
+
import type { TextTranslator, TranslateOptions, TranslateResult } from '../types.js';
|
|
2
|
+
declare class OpenAIModel implements TextTranslator {
|
|
3
|
+
private client;
|
|
4
|
+
private model;
|
|
5
|
+
constructor(key: string, baseUrl?: string, modelName?: string, options?: TranslateOptions);
|
|
6
|
+
translate(prompt: string): Promise<TranslateResult>;
|
|
12
7
|
}
|
|
13
8
|
export default OpenAIModel;
|
|
14
9
|
//# sourceMappingURL=openai.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/src/providers/openai.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/src/providers/openai.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,cAAc,EAAE,gBAAgB,EAAE,eAAe,EAAkB,MAAM,aAAa,CAAC;AAGrG,cAAM,WAAY,YAAW,cAAc;IACzC,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,KAAK,CAAS;gBAGpB,GAAG,EAAE,MAAM,EACX,OAAO,CAAC,EAAE,MAAM,EAChB,SAAS,CAAC,EAAE,MAAM,EAClB,OAAO,GAAE,gBAAuD;IAU5D,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC;CAgB1D;AAED,eAAe,WAAW,CAAC"}
|
|
@@ -1,29 +1,7 @@
|
|
|
1
1
|
import OpenAI from 'openai';
|
|
2
|
-
import {
|
|
3
|
-
import { logger } from '../logger.js';
|
|
4
|
-
import { mergeInputs, writeTemp } from '../file.js';
|
|
5
|
-
import { BASE_RETRY_DELAY_MS, DEFAULT_MODELS, INTER_CHUNK_DELAY_MS, MAX_RETRIES } from '../contants.js';
|
|
6
|
-
const decoder = new TextDecoder();
|
|
2
|
+
import { DEFAULT_MODELS } from '../contants.js';
|
|
7
3
|
class OpenAIModel {
|
|
8
|
-
constructor(key,
|
|
9
|
-
Object.defineProperty(this, "chunks", {
|
|
10
|
-
enumerable: true,
|
|
11
|
-
configurable: true,
|
|
12
|
-
writable: true,
|
|
13
|
-
value: void 0
|
|
14
|
-
});
|
|
15
|
-
Object.defineProperty(this, "from", {
|
|
16
|
-
enumerable: true,
|
|
17
|
-
configurable: true,
|
|
18
|
-
writable: true,
|
|
19
|
-
value: void 0
|
|
20
|
-
});
|
|
21
|
-
Object.defineProperty(this, "to", {
|
|
22
|
-
enumerable: true,
|
|
23
|
-
configurable: true,
|
|
24
|
-
writable: true,
|
|
25
|
-
value: void 0
|
|
26
|
-
});
|
|
4
|
+
constructor(key, baseUrl, modelName, options = { noLimit: false, noTimeout: false }) {
|
|
27
5
|
Object.defineProperty(this, "client", {
|
|
28
6
|
enumerable: true,
|
|
29
7
|
configurable: true,
|
|
@@ -36,97 +14,28 @@ class OpenAIModel {
|
|
|
36
14
|
writable: true,
|
|
37
15
|
value: void 0
|
|
38
16
|
});
|
|
39
|
-
this.chunks = chunks;
|
|
40
|
-
this.from = from;
|
|
41
|
-
this.to = to;
|
|
42
17
|
this.model = modelName ?? DEFAULT_MODELS.openai;
|
|
43
18
|
this.client = new OpenAI({
|
|
44
19
|
apiKey: key,
|
|
45
20
|
baseURL: baseUrl,
|
|
21
|
+
timeout: options.noTimeout ? 2_147_483_647 : undefined,
|
|
46
22
|
});
|
|
47
23
|
}
|
|
48
|
-
async
|
|
49
|
-
const
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
{
|
|
62
|
-
role: 'user',
|
|
63
|
-
content: [
|
|
64
|
-
{
|
|
65
|
-
type: 'input_text',
|
|
66
|
-
text: `${systemPrompt}\n\n${userPrompt}\n\nSOURCE_JSON:\n${sourceJson}`,
|
|
67
|
-
},
|
|
68
|
-
],
|
|
69
|
-
},
|
|
70
|
-
],
|
|
71
|
-
});
|
|
72
|
-
const text = response.output_text;
|
|
73
|
-
if (!text) {
|
|
74
|
-
logger.error(`${chunkLabel} Empty response from OpenAI`);
|
|
75
|
-
continue;
|
|
76
|
-
}
|
|
77
|
-
const cleanedText = stripJsonMarkdown(text);
|
|
78
|
-
const tempJsonFileName = `chunk_${chunk.index + 1}.json`;
|
|
79
|
-
await writeTemp(this.to, tempJsonFileName, cleanedText);
|
|
80
|
-
if (!isValidJson(cleanedText)) {
|
|
81
|
-
if (attempt === MAX_RETRIES) {
|
|
82
|
-
logger.error(`${chunkLabel} Failed after ${MAX_RETRIES + 1} attempts: Invalid JSON response`);
|
|
83
|
-
return null;
|
|
84
|
-
}
|
|
85
|
-
logger.error(`${chunkLabel} Invalid JSON response, saved to ${tempJsonFileName}`);
|
|
86
|
-
continue;
|
|
87
|
-
}
|
|
88
|
-
logger.info(`${chunkLabel} Translated successfully (${formatBytes(chunk.byteSize)}, ${chunk.keyCount} keys)`);
|
|
89
|
-
return cleanedText;
|
|
90
|
-
}
|
|
91
|
-
catch (error) {
|
|
92
|
-
const isLastAttempt = attempt === MAX_RETRIES;
|
|
93
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
94
|
-
if (isLastAttempt) {
|
|
95
|
-
logger.error(`${chunkLabel} Failed after ${MAX_RETRIES + 1} attempts: ${errorMessage}`);
|
|
96
|
-
return null;
|
|
97
|
-
}
|
|
98
|
-
logger.warn(`${chunkLabel} Attempt ${attempt + 1} failed: ${errorMessage}`);
|
|
99
|
-
}
|
|
100
|
-
}
|
|
101
|
-
return null;
|
|
102
|
-
}
|
|
103
|
-
async translate() {
|
|
104
|
-
const { systemPrompt, userPrompt } = generatePrompts(this.from, this.to);
|
|
105
|
-
const totalChunks = this.chunks.length;
|
|
106
|
-
logger.info(`Starting translation: ${totalChunks} chunk(s) to process`);
|
|
107
|
-
const results = [];
|
|
108
|
-
const failedChunks = [];
|
|
109
|
-
for (let i = 0; i < totalChunks; i++) {
|
|
110
|
-
const chunk = this.chunks[i];
|
|
111
|
-
if (i > 0) {
|
|
112
|
-
logger.info(`Waiting ${INTER_CHUNK_DELAY_MS}ms before next chunk (rate limit protection)...`);
|
|
113
|
-
await delay(INTER_CHUNK_DELAY_MS);
|
|
114
|
-
}
|
|
115
|
-
const result = await this.translateChunk(chunk, systemPrompt, userPrompt);
|
|
116
|
-
if (result) {
|
|
117
|
-
results.push(result);
|
|
118
|
-
}
|
|
119
|
-
else {
|
|
120
|
-
failedChunks.push(chunk.index + 1);
|
|
121
|
-
}
|
|
122
|
-
}
|
|
123
|
-
if (failedChunks.length > 0) {
|
|
124
|
-
throw new Error(`Translation failed for chunk(s): ${failedChunks.join(', ')}. Check temp files for partial results.`);
|
|
24
|
+
async translate(prompt) {
|
|
25
|
+
const completion = await this.client.chat.completions.create({
|
|
26
|
+
model: this.model,
|
|
27
|
+
messages: [{ role: 'user', content: prompt }],
|
|
28
|
+
temperature: 0.2,
|
|
29
|
+
});
|
|
30
|
+
const text = completion.choices[0]?.message?.content ?? '';
|
|
31
|
+
let usage;
|
|
32
|
+
if (completion.usage) {
|
|
33
|
+
usage = {
|
|
34
|
+
inputTokens: completion.usage.prompt_tokens ?? 0,
|
|
35
|
+
outputTokens: completion.usage.completion_tokens ?? 0,
|
|
36
|
+
};
|
|
125
37
|
}
|
|
126
|
-
|
|
127
|
-
const jsonInputs = results.map((r) => JSON.parse(r));
|
|
128
|
-
const mergedContent = mergeInputs(jsonInputs);
|
|
129
|
-
return JSON.stringify(mergedContent, null, 2);
|
|
38
|
+
return { text, usage };
|
|
130
39
|
}
|
|
131
40
|
}
|
|
132
41
|
export default OpenAIModel;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { Batch, JsonValue, Leaf, Path, TextTranslator, TranslateOptions, TranslateUsage } from './types.js';
|
|
2
|
+
export declare function extractLeaves(data: JsonValue): Leaf[];
|
|
3
|
+
export declare function groupIntoBatches(leaves: Leaf[], maxBytes: number): Batch[];
|
|
4
|
+
export declare function buildBatchPrompt(from: string, to: string, leaves: Leaf[]): string;
|
|
5
|
+
export declare function buildSinglePrompt(from: string, to: string, value: string): string;
|
|
6
|
+
export declare function decodeResponse(text: string): Map<number, string>;
|
|
7
|
+
export declare function setPath(root: JsonValue, path: Path, value: JsonValue): void;
|
|
8
|
+
export declare function reconstruct(allLeaves: Leaf[], translations: Map<number, string>): JsonValue;
|
|
9
|
+
export type RunBatchesResult = {
|
|
10
|
+
translations: Map<number, string>;
|
|
11
|
+
usage: TranslateUsage;
|
|
12
|
+
calls: number;
|
|
13
|
+
};
|
|
14
|
+
export declare function runBatches(batches: Batch[], translator: TextTranslator, from: string, to: string, options: TranslateOptions): Promise<RunBatchesResult>;
|
|
15
|
+
//# sourceMappingURL=translator.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"translator.d.ts","sourceRoot":"","sources":["../../src/src/translator.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,cAAc,EAAE,gBAAgB,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAWjH,wBAAgB,aAAa,CAAC,IAAI,EAAE,SAAS,GAAG,IAAI,EAAE,CAmCrD;AAgBD,wBAAgB,gBAAgB,CAAC,MAAM,EAAE,IAAI,EAAE,EAAE,QAAQ,EAAE,MAAM,GAAG,KAAK,EAAE,CA0B1E;AAED,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,MAAM,CAkBjF;AAED,wBAAgB,iBAAiB,CAAC,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,GAAG,MAAM,CAMjF;AAQD,wBAAgB,cAAc,CAAC,IAAI,EAAE,MAAM,GAAG,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAWhE;AAED,wBAAgB,OAAO,CAAC,IAAI,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,SAAS,GAAG,IAAI,CAY3E;AAED,wBAAgB,WAAW,CAAC,SAAS,EAAE,IAAI,EAAE,EAAE,YAAY,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,SAAS,CAe3F;AAED,MAAM,MAAM,gBAAgB,GAAG;IAC7B,YAAY,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAClC,KAAK,EAAE,cAAc,CAAC;IACtB,KAAK,EAAE,MAAM,CAAC;CACf,CAAC;AA6IF,wBAAsB,UAAU,CAC9B,OAAO,EAAE,KAAK,EAAE,EAChB,UAAU,EAAE,cAAc,EAC1B,IAAI,EAAE,MAAM,EACZ,EAAE,EAAE,MAAM,EACV,OAAO,EAAE,gBAAgB,GACxB,OAAO,CAAC,gBAAgB,CAAC,CAwC3B"}
|
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
import { logger } from './logger.js';
|
|
2
|
+
import { delay, formatBytes } from './utilites.js';
|
|
3
|
+
import { BASE_RETRY_DELAY_MS, INTER_BATCH_DELAY_MS, INTER_LEAF_DELAY_MS, MAX_RETRIES, PER_LEAF_FALLBACK_RATIO } from './contants.js';
|
|
4
|
+
const encoder = new TextEncoder();
|
|
5
|
+
function byteSize(s) {
|
|
6
|
+
return encoder.encode(s).byteLength;
|
|
7
|
+
}
|
|
8
|
+
export function extractLeaves(data) {
|
|
9
|
+
const out = [];
|
|
10
|
+
let nextId = 0;
|
|
11
|
+
function walk(node, path) {
|
|
12
|
+
if (node === null || typeof node !== 'object') {
|
|
13
|
+
const isString = typeof node === 'string';
|
|
14
|
+
out.push({
|
|
15
|
+
id: ++nextId,
|
|
16
|
+
path,
|
|
17
|
+
value: node,
|
|
18
|
+
translatable: isString && node.trim().length > 0,
|
|
19
|
+
});
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
if (Array.isArray(node)) {
|
|
23
|
+
if (node.length === 0) {
|
|
24
|
+
out.push({ id: ++nextId, path, value: node, translatable: false });
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
node.forEach((item, i) => walk(item, [...path, i]));
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
const entries = Object.entries(node);
|
|
31
|
+
if (entries.length === 0) {
|
|
32
|
+
out.push({ id: ++nextId, path, value: node, translatable: false });
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
for (const [k, v] of entries) {
|
|
36
|
+
walk(v, [...path, k]);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
walk(data, []);
|
|
40
|
+
return out;
|
|
41
|
+
}
|
|
42
|
+
function tagOpen(id) {
|
|
43
|
+
return `≪${id}≫`;
|
|
44
|
+
}
|
|
45
|
+
function tagClose(id) {
|
|
46
|
+
return `≪/${id}≫`;
|
|
47
|
+
}
|
|
48
|
+
const TAG_OVERHEAD_CHARS = 8;
|
|
49
|
+
function wrapEntry(leaf) {
|
|
50
|
+
return `${tagOpen(leaf.id)}${String(leaf.value)}${tagClose(leaf.id)}`;
|
|
51
|
+
}
|
|
52
|
+
export function groupIntoBatches(leaves, maxBytes) {
|
|
53
|
+
const translatable = leaves.filter((l) => l.translatable);
|
|
54
|
+
const batches = [];
|
|
55
|
+
let current = [];
|
|
56
|
+
let currentSize = 0;
|
|
57
|
+
for (const leaf of translatable) {
|
|
58
|
+
const valueBytes = byteSize(String(leaf.value));
|
|
59
|
+
const idChars = String(leaf.id).length;
|
|
60
|
+
const overhead = TAG_OVERHEAD_CHARS + idChars * 2 + 1;
|
|
61
|
+
const itemSize = valueBytes + overhead;
|
|
62
|
+
if (current.length > 0 && currentSize + itemSize > maxBytes) {
|
|
63
|
+
batches.push({ index: batches.length, leaves: current, byteSize: currentSize });
|
|
64
|
+
current = [];
|
|
65
|
+
currentSize = 0;
|
|
66
|
+
}
|
|
67
|
+
current.push(leaf);
|
|
68
|
+
currentSize += itemSize;
|
|
69
|
+
}
|
|
70
|
+
if (current.length > 0) {
|
|
71
|
+
batches.push({ index: batches.length, leaves: current, byteSize: currentSize });
|
|
72
|
+
}
|
|
73
|
+
return batches;
|
|
74
|
+
}
|
|
75
|
+
export function buildBatchPrompt(from, to, leaves) {
|
|
76
|
+
const entries = leaves.map(wrapEntry).join('\n');
|
|
77
|
+
return `Translate each tagged entry below from ${from} to ${to}.
|
|
78
|
+
|
|
79
|
+
Tag format:
|
|
80
|
+
- Every entry is wrapped exactly as ≪N≫value≪/N≫, where N is the entry id (a positive integer).
|
|
81
|
+
- The opening marker is the character ≪ (U+226A), then the id, then ≫ (U+226B). The closing marker is ≪, /, the same id, then ≫.
|
|
82
|
+
- Copy these markers verbatim. Do not put spaces inside them. Do not change them to <<>>, «», <tag>, [n], or any other notation.
|
|
83
|
+
- The id must match the input id of the same entry — never renumber, merge, split, or reorder entries.
|
|
84
|
+
|
|
85
|
+
What to write back:
|
|
86
|
+
- One ≪N≫translated_value≪/N≫ block per input entry, in the same order, separated by single newlines.
|
|
87
|
+
- Translate only the text that sits between ≪N≫ and ≪/N≫.
|
|
88
|
+
- Preserve every variable ({{name}}, {name}, __VAR__, $t(...), %s, %d), HTML tag, markdown token, escape sequence, and special character inside the value.
|
|
89
|
+
- Output nothing else: no headings, no prose, no commentary, no code fences, no language labels.
|
|
90
|
+
|
|
91
|
+
Now translate the following entries:
|
|
92
|
+
${entries}`;
|
|
93
|
+
}
|
|
94
|
+
export function buildSinglePrompt(from, to, value) {
|
|
95
|
+
return `You are a professional ${from} to ${to} translator. Your goal is to accurately convey the meaning and nuances of the original ${from} text while adhering to ${to} grammar, vocabulary, and cultural sensitivities. Preserve every variable ({{name}}, {name}, __VAR__, $t(...), %s, %d), HTML tag, markdown token, escape sequence, and special character exactly as given.
|
|
96
|
+
Produce only the ${to} translation, without any additional explanations, quotes, labels, or commentary. Please translate the following ${from} text into ${to}:
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
${value}`;
|
|
100
|
+
}
|
|
101
|
+
const TAG_PATTERNS = [
|
|
102
|
+
/≪\s*(\d+)\s*≫([\s\S]*?)≪\s*\/\s*\1\s*≫/g,
|
|
103
|
+
/<<\s*(\d+)\s*>>([\s\S]*?)<<\s*\/\s*\1\s*>>/g,
|
|
104
|
+
/«\s*(\d+)\s*»([\s\S]*?)«\s*\/\s*\1\s*»/g,
|
|
105
|
+
];
|
|
106
|
+
export function decodeResponse(text) {
|
|
107
|
+
const map = new Map();
|
|
108
|
+
for (const pattern of TAG_PATTERNS) {
|
|
109
|
+
for (const match of text.matchAll(pattern)) {
|
|
110
|
+
const id = parseInt(match[1], 10);
|
|
111
|
+
if (!map.has(id)) {
|
|
112
|
+
map.set(id, match[2].trim());
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return map;
|
|
117
|
+
}
|
|
118
|
+
export function setPath(root, path, value) {
|
|
119
|
+
let node = root;
|
|
120
|
+
for (let i = 0; i < path.length - 1; i++) {
|
|
121
|
+
const key = path[i];
|
|
122
|
+
const nextKey = path[i + 1];
|
|
123
|
+
const container = node;
|
|
124
|
+
if (container[key] === undefined) {
|
|
125
|
+
container[key] = typeof nextKey === 'number' ? [] : {};
|
|
126
|
+
}
|
|
127
|
+
node = container[key];
|
|
128
|
+
}
|
|
129
|
+
node[path[path.length - 1]] = value;
|
|
130
|
+
}
|
|
131
|
+
export function reconstruct(allLeaves, translations) {
|
|
132
|
+
if (allLeaves.length === 0) {
|
|
133
|
+
return {};
|
|
134
|
+
}
|
|
135
|
+
const firstStep = allLeaves[0].path[0];
|
|
136
|
+
if (firstStep === undefined) {
|
|
137
|
+
const only = allLeaves[0];
|
|
138
|
+
return only.translatable ? translations.get(only.id) ?? only.value : only.value;
|
|
139
|
+
}
|
|
140
|
+
const root = typeof firstStep === 'number' ? [] : {};
|
|
141
|
+
for (const leaf of allLeaves) {
|
|
142
|
+
const value = leaf.translatable && translations.has(leaf.id) ? translations.get(leaf.id) : leaf.value;
|
|
143
|
+
setPath(root, leaf.path, value);
|
|
144
|
+
}
|
|
145
|
+
return root;
|
|
146
|
+
}
|
|
147
|
+
function accumulateUsage(target, source) {
|
|
148
|
+
if (!source) {
|
|
149
|
+
return;
|
|
150
|
+
}
|
|
151
|
+
target.inputTokens += source.inputTokens;
|
|
152
|
+
target.outputTokens += source.outputTokens;
|
|
153
|
+
}
|
|
154
|
+
async function translateOne(translator, from, to, value, usage, callCounter) {
|
|
155
|
+
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
|
156
|
+
if (attempt > 0) {
|
|
157
|
+
await delay(BASE_RETRY_DELAY_MS * Math.pow(2, attempt - 1));
|
|
158
|
+
}
|
|
159
|
+
try {
|
|
160
|
+
callCounter.count += 1;
|
|
161
|
+
const response = await translator.translate(buildSinglePrompt(from, to, value));
|
|
162
|
+
accumulateUsage(usage, response.usage);
|
|
163
|
+
const trimmed = response.text.trim();
|
|
164
|
+
if (trimmed.length > 0) {
|
|
165
|
+
return trimmed;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
catch {
|
|
169
|
+
if (attempt === MAX_RETRIES) {
|
|
170
|
+
return null;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
return null;
|
|
175
|
+
}
|
|
176
|
+
async function runBatchAttempts(batchLeaves, translator, from, to, label, translations, usage, callCounter) {
|
|
177
|
+
let remaining = [...batchLeaves];
|
|
178
|
+
let firstAttempt = true;
|
|
179
|
+
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
|
180
|
+
if (attempt > 0) {
|
|
181
|
+
const backoffMs = BASE_RETRY_DELAY_MS * Math.pow(2, attempt - 1);
|
|
182
|
+
logger.warn(`${label} Retry ${attempt}/${MAX_RETRIES} after ${backoffMs}ms backoff (${remaining.length} entries)...`);
|
|
183
|
+
await delay(backoffMs);
|
|
184
|
+
}
|
|
185
|
+
try {
|
|
186
|
+
const prompt = buildBatchPrompt(from, to, remaining);
|
|
187
|
+
callCounter.count += 1;
|
|
188
|
+
const response = await translator.translate(prompt);
|
|
189
|
+
accumulateUsage(usage, response.usage);
|
|
190
|
+
const text = response.text;
|
|
191
|
+
if (!text || text.trim().length === 0) {
|
|
192
|
+
if (attempt === MAX_RETRIES) {
|
|
193
|
+
return remaining;
|
|
194
|
+
}
|
|
195
|
+
logger.warn(`${label} Attempt ${attempt + 1} returned empty response`);
|
|
196
|
+
continue;
|
|
197
|
+
}
|
|
198
|
+
const decoded = decodeResponse(text);
|
|
199
|
+
const stillMissing = [];
|
|
200
|
+
for (const leaf of remaining) {
|
|
201
|
+
const value = decoded.get(leaf.id);
|
|
202
|
+
if (!value || value.trim().length === 0) {
|
|
203
|
+
stillMissing.push(leaf);
|
|
204
|
+
}
|
|
205
|
+
else {
|
|
206
|
+
translations.set(leaf.id, value);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
if (stillMissing.length === 0) {
|
|
210
|
+
return [];
|
|
211
|
+
}
|
|
212
|
+
if (firstAttempt && stillMissing.length >= remaining.length * PER_LEAF_FALLBACK_RATIO) {
|
|
213
|
+
logger.warn(`${label} Batch mode unreliable for this model (${stillMissing.length}/${remaining.length} missing on first attempt), switching to per-entry mode`);
|
|
214
|
+
return stillMissing;
|
|
215
|
+
}
|
|
216
|
+
logger.warn(`${label} ${stillMissing.length}/${remaining.length} entries missing, will retry`);
|
|
217
|
+
firstAttempt = false;
|
|
218
|
+
remaining = stillMissing;
|
|
219
|
+
}
|
|
220
|
+
catch (error) {
|
|
221
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
222
|
+
if (attempt === MAX_RETRIES) {
|
|
223
|
+
logger.warn(`${label} Batch attempt failed after retries: ${message}`);
|
|
224
|
+
return remaining;
|
|
225
|
+
}
|
|
226
|
+
logger.warn(`${label} Attempt ${attempt + 1} failed: ${message}`);
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
return remaining;
|
|
230
|
+
}
|
|
231
|
+
async function runPerLeafFallback(leaves, translator, from, to, options, label, translations, usage, callCounter) {
|
|
232
|
+
logger.info(`${label} Per-entry fallback for ${leaves.length} entries`);
|
|
233
|
+
const failed = [];
|
|
234
|
+
for (let i = 0; i < leaves.length; i++) {
|
|
235
|
+
if (i > 0 && !options.noLimit) {
|
|
236
|
+
await delay(INTER_LEAF_DELAY_MS);
|
|
237
|
+
}
|
|
238
|
+
const leaf = leaves[i];
|
|
239
|
+
const result = await translateOne(translator, from, to, String(leaf.value), usage, callCounter);
|
|
240
|
+
if (result) {
|
|
241
|
+
translations.set(leaf.id, result);
|
|
242
|
+
}
|
|
243
|
+
else {
|
|
244
|
+
logger.warn(`${label} Per-entry failed for id=${leaf.id}`);
|
|
245
|
+
failed.push(leaf);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
return failed;
|
|
249
|
+
}
|
|
250
|
+
export async function runBatches(batches, translator, from, to, options) {
|
|
251
|
+
const translations = new Map();
|
|
252
|
+
const failedIds = [];
|
|
253
|
+
const usage = { inputTokens: 0, outputTokens: 0 };
|
|
254
|
+
const callCounter = { count: 0 };
|
|
255
|
+
for (let i = 0; i < batches.length; i++) {
|
|
256
|
+
const batch = batches[i];
|
|
257
|
+
const label = `[Batch ${batch.index + 1}/${batches.length}]`;
|
|
258
|
+
if (i > 0 && !options.noLimit) {
|
|
259
|
+
logger.info(`Waiting ${INTER_BATCH_DELAY_MS}ms before next batch (rate limit protection)...`);
|
|
260
|
+
await delay(INTER_BATCH_DELAY_MS);
|
|
261
|
+
}
|
|
262
|
+
const missingAfterBatch = await runBatchAttempts(batch.leaves, translator, from, to, label, translations, usage, callCounter);
|
|
263
|
+
let stillFailed = [];
|
|
264
|
+
if (missingAfterBatch.length > 0) {
|
|
265
|
+
stillFailed = await runPerLeafFallback(missingAfterBatch, translator, from, to, options, label, translations, usage, callCounter);
|
|
266
|
+
}
|
|
267
|
+
const succeeded = batch.leaves.length - stillFailed.length;
|
|
268
|
+
if (stillFailed.length === 0) {
|
|
269
|
+
logger.info(`${label} Translated successfully (${formatBytes(batch.byteSize)}, ${succeeded} entries)`);
|
|
270
|
+
}
|
|
271
|
+
else {
|
|
272
|
+
logger.error(`${label} ${stillFailed.length}/${batch.leaves.length} entries failed permanently`);
|
|
273
|
+
for (const leaf of stillFailed) {
|
|
274
|
+
failedIds.push(leaf.id);
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
if (failedIds.length > 0) {
|
|
279
|
+
const preview = failedIds.slice(0, 20).join(', ');
|
|
280
|
+
const more = failedIds.length > 20 ? `, …(+${failedIds.length - 20})` : '';
|
|
281
|
+
throw new Error(`Translation failed for ${failedIds.length} entry/entries (ids: ${preview}${more})`);
|
|
282
|
+
}
|
|
283
|
+
return { translations, usage, calls: callCounter.count };
|
|
284
|
+
}
|