@beg4660/translator 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,58 @@
1
+ # @nelon/translator
2
+
3
+ Gemini(클라우드) / Ollama(로컬) 중 원하는 Provider로 **번역 API 호출**을 할 수 있는 작은 TypeScript 라이브러리입니다.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ npm i @beg4660/translator
9
+ ```
10
+
11
+ Node 18+ 필요합니다.
12
+
13
+ ## Usage
14
+
15
+ ### Gemini
16
+
17
+ ```ts
18
+ import { GeminiProvider, TranslatorClient } from "@beg4660/translator";
19
+
20
+ const provider = new GeminiProvider({
21
+ apiKey: process.env.GEMINI_API_KEY!,
22
+ model: "gemini-1.5-flash"
23
+ });
24
+
25
+ const client = new TranslatorClient(provider);
26
+ const result = await client.translate({
27
+ text: "Hello, how are you?",
28
+ targetLang: "Korean"
29
+ });
30
+
31
+ console.log(result.text);
32
+ ```
33
+
34
+ ### Ollama
35
+
36
+ ```ts
37
+ import { OllamaProvider, TranslatorClient } from "@beg4660/translator";
38
+
39
+ const provider = new OllamaProvider({
40
+ host: "http://127.0.0.1:11434",
41
+ model: "llama3"
42
+ });
43
+
44
+ const client = new TranslatorClient(provider);
45
+ const result = await client.translate({
46
+ text: "오늘 회의는 3시에 시작합니다.",
47
+ targetLang: "English",
48
+ tone: "formal"
49
+ });
50
+
51
+ console.log(result.text);
52
+ ```
53
+
54
+ ## Notes
55
+
56
+ - Gemini API Key는 앱/확장 쪽에서는 `vscode.SecretStorage` 같은 안전한 저장소를 권장합니다.
57
+ - Ollama는 기본적으로 `http://127.0.0.1:11434` 를 사용합니다.
58
+
package/dist/index.cjs ADDED
@@ -0,0 +1,117 @@
1
+ 'use strict';
2
+
3
+ var generativeAi = require('@google/generative-ai');
4
+
5
+ // src/prompt.ts
6
+ function formatGlossary(glossary) {
7
+ if (!glossary) return "";
8
+ const entries = Object.entries(glossary);
9
+ if (entries.length === 0) return "";
10
+ return entries.map(([k, v]) => `- "${k}" => "${v}"`).join("\n");
11
+ }
12
+ function buildTranslatePrompt(opts) {
13
+ const source = opts.sourceLang?.trim() ? opts.sourceLang.trim() : "auto";
14
+ const tone = opts.tone ?? "neutral";
15
+ const glossary = formatGlossary(opts.glossary);
16
+ return [
17
+ "You are a professional translation engine.",
18
+ "Translate the user's text precisely and naturally.",
19
+ "",
20
+ "Rules:",
21
+ "- Output ONLY the translated text. No quotes, no markdown, no explanations.",
22
+ "- Preserve line breaks.",
23
+ "- Keep code blocks, URLs, file paths, and identifiers unchanged.",
24
+ "- If the input is already in the target language, return it unchanged.",
25
+ glossary ? `- Use this glossary strictly (source => target):
26
+ ${glossary}` : "- No glossary provided.",
27
+ "",
28
+ `Source language: ${source}`,
29
+ `Target language: ${opts.targetLang}`,
30
+ `Tone: ${tone}`,
31
+ "",
32
+ "Text to translate:",
33
+ opts.text
34
+ ].join("\n");
35
+ }
36
+
37
+ // src/client.ts
38
+ var TranslatorClient = class {
39
+ constructor(provider) {
40
+ this.provider = provider;
41
+ }
42
+ provider;
43
+ async translate(opts) {
44
+ if (!opts?.text) throw new Error("translate: text is required");
45
+ if (!opts?.targetLang) throw new Error("translate: targetLang is required");
46
+ const prompt = buildTranslatePrompt(opts);
47
+ const out = await this.provider.generate(prompt);
48
+ return {
49
+ text: cleanupText(out.text),
50
+ provider: this.provider.kind,
51
+ model: this.provider.model,
52
+ raw: out.raw
53
+ };
54
+ }
55
+ };
56
+ function cleanupText(s) {
57
+ return s.replace(/\r\n/g, "\n").replace(/[ \t]+\n/g, "\n").trimEnd();
58
+ }
59
+ var GeminiProvider = class {
60
+ kind = "gemini";
61
+ model;
62
+ client;
63
+ constructor(opts) {
64
+ if (!opts?.apiKey) throw new Error("GeminiProvider: apiKey is required");
65
+ this.model = opts.model ?? "gemini-1.5-flash";
66
+ this.client = new generativeAi.GoogleGenerativeAI(opts.apiKey);
67
+ }
68
+ async generate(prompt) {
69
+ const model = this.client.getGenerativeModel({ model: this.model });
70
+ const res = await model.generateContent(prompt);
71
+ const text = res.response.text();
72
+ return { text, raw: res };
73
+ }
74
+ };
75
+
76
+ // src/providers/ollama.ts
77
+ var OllamaProvider = class {
78
+ kind = "ollama";
79
+ model;
80
+ host;
81
+ constructor(opts) {
82
+ if (!opts?.model) throw new Error("OllamaProvider: model is required");
83
+ this.model = opts.model;
84
+ this.host = (opts.host ?? "http://127.0.0.1:11434").replace(/\/+$/, "");
85
+ }
86
+ async generate(prompt) {
87
+ const body = {
88
+ model: this.model,
89
+ prompt,
90
+ stream: false
91
+ };
92
+ const res = await fetch(`${this.host}/api/generate`, {
93
+ method: "POST",
94
+ headers: { "content-type": "application/json" },
95
+ body: JSON.stringify(body)
96
+ });
97
+ if (!res.ok) {
98
+ const msg = await safeReadText(res);
99
+ throw new Error(`OllamaProvider: HTTP ${res.status} ${res.statusText}${msg ? ` - ${msg}` : ""}`);
100
+ }
101
+ const json = await res.json();
102
+ return { text: (json.response ?? "").toString(), raw: json };
103
+ }
104
+ };
105
+ async function safeReadText(res) {
106
+ try {
107
+ return await res.text();
108
+ } catch {
109
+ return "";
110
+ }
111
+ }
112
+
113
+ exports.GeminiProvider = GeminiProvider;
114
+ exports.OllamaProvider = OllamaProvider;
115
+ exports.TranslatorClient = TranslatorClient;
116
+ //# sourceMappingURL=index.cjs.map
117
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/prompt.ts","../src/client.ts","../src/providers/gemini.ts","../src/providers/ollama.ts"],"names":["GoogleGenerativeAI"],"mappings":";;;;;AAEA,SAAS,eAAe,QAAA,EAA2C;AACjE,EAAA,IAAI,CAAC,UAAU,OAAO,EAAA;AACtB,EAAA,MAAM,OAAA,GAAU,MAAA,CAAO,OAAA,CAAQ,QAAQ,CAAA;AACvC,EAAA,IAAI,OAAA,CAAQ,MAAA,KAAW,CAAA,EAAG,OAAO,EAAA;AACjC,EAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,CAAC,CAAC,GAAG,CAAC,CAAA,KAAM,CAAA,GAAA,EAAM,CAAC,CAAA,MAAA,EAAS,CAAC,CAAA,CAAA,CAAG,CAAA,CAAE,KAAK,IAAI,CAAA;AAChE;AAEO,SAAS,qBAAqB,IAAA,EAAgC;AACnE,EAAA,MAAM,MAAA,GAAS,KAAK,UAAA,EAAY,IAAA,KAAS,IAAA,CAAK,UAAA,CAAW,MAAK,GAAI,MAAA;AAClE,EAAA,MAAM,IAAA,GAAO,KAAK,IAAA,IAAQ,SAAA;AAC1B,EAAA,MAAM,QAAA,GAAW,cAAA,CAAe,IAAA,CAAK,QAAQ,CAAA;AAE7C,EAAA,OAAO;AAAA,IACL,4CAAA;AAAA,IACA,oDAAA;AAAA,IACA,EAAA;AAAA,IACA,QAAA;AAAA,IACA,6EAAA;AAAA,IACA,yBAAA;AAAA,IACA,kEAAA;AAAA,IACA,wEAAA;AAAA,IACA,QAAA,GACI,CAAA;AAAA,EAAqD,QAAQ,CAAA,CAAA,GAC7D,yBAAA;AAAA,IACJ,EAAA;AAAA,IACA,oBAAoB,MAAM,CAAA,CAAA;AAAA,IAC1B,CAAA,iBAAA,EAAoB,KAAK,UAAU,CAAA,CAAA;AAAA,IACnC,SAAS,IAAI,CAAA,CAAA;AAAA,IACb,EAAA;AAAA,IACA,oBAAA;AAAA,IACA,IAAA,CAAK;AAAA,GACP,CAAE,KAAK,IAAI,CAAA;AACb;;;AC/BO,IAAM,mBAAN,MAAuB;AAAA,EAC5B,YAA6B,QAAA,EAAuB;AAAvB,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AAAA,EAAwB;AAAA,EAAxB,QAAA;AAAA,EAE7B,MAAM,UAAU,IAAA,EAAkD;AAChE,IAAA,IAAI,CAAC,IAAA,EAAM,IAAA,EAAM,MAAM,IAAI,MAAM,6BAA6B,CAAA;AAC9D,IAAA,IAAI,CAAC,IAAA,EAAM,UAAA,EAAY,MAAM,IAAI,MAAM,mCAAmC,CAAA;AAE1E,IAAA,MAAM,MAAA,GAAS,qBAAqB,IAAI,CAAA;AACxC,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,QAAA,CAAS,SAAS,MAAM,CAAA;AAE/C,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,WAAA,CAAY,GAAA,CAAI,IAAI,CAAA;AAAA,MAC1B,QAAA,EAAU,KAAK,QAAA,CAAS,IAAA;AAAA,MACxB,KAAA,EAAO,KAAK,QAAA,CAAS,KAAA;AAAA,MACrB,KAAK,GAAA,CAAI;AAAA,KACX;AAAA,EACF;AACF;AAEA,SAAS,YAAY,CAAA,EAAmB;AAEtC,EAAA,OAAO,CAAA,CAAE,QAAQ,OAAA,EAAS,IAAI,EAAE,OAAA,CAAQ,WAAA,EAAa,IAAI,CAAA,CAAE,OAAA,EAAQ;AACrE;ACtBO,IAAM,iBAAN,MAA4C;AAAA,EACxC,IAAA,GAAO,QAAA;AAAA,EACP,KAAA;AAAA,EACQ,MAAA;AAAA,EAEjB,YAAY,IAAA,EAA6B;AACvC,IAAA,IAAI,CAAC,IAAA,EAAM,MAAA,EAAQ,MAAM,IAAI,MAAM,oCAAoC,CAAA;AACvE,IAAA,IAAA,CAAK,KAAA,GAAQ,KAAK,KAAA,IAAS,kBAAA;AAC3B,IAAA,IAAA,CAAK,MAAA,GAAS,IAAIA,+BAAA,CAAmB,IAAA,CAAK,MAAM,CAAA;AAAA,EAClD;AAAA,EAEA,MAAM,SAAS,MAAA,EAA0D;AACvE,IAAA,MAAM,KAAA,GAAQ,KAAK,MAAA,CAAO,kBAAA,CAAmB,EAAE,KAAA,EAAO,IAAA,CAAK,OAAO,CAAA;AAClE,IAAA,MAAM,GAAA,GAAM,MAAM,KAAA,CAAM,eAAA,CAAgB,MAAM,CAAA;AAC9C,IAAA,MAAM,IAAA,GAAO,GAAA,CAAI,QAAA,CAAS,IAAA,EAAK;AAC/B,IAAA,OAAO,EAAE,IAAA,EAAM,GAAA,EAAK,GAAA,EAAI;AAAA,EAC1B;AACF;;;ACLO,IAAM,iBAAN,MAA4C;AAAA,EACxC,IAAA,GAAO,QAAA;AAAA,EACP,KAAA;AAAA,EACQ,IAAA;AAAA,EAEjB,YAAY,IAAA,EAA6B;AACvC,IAAA,IAAI,CAAC,IAAA,EAAM,KAAA,EAAO,MAAM,IAAI,MAAM,mCAAmC,CAAA;AACrE,IAAA,IAAA,CAAK,QAAQ,IAAA,CAAK,KAAA;AAClB,IAAA,IAAA,CAAK,QAAQ,IAAA,CAAK,IAAA,IAAQ,wBAAA,EAA0B,OAAA,CAAQ,QAAQ,EAAE,CAAA;AAAA,EACxE;AAAA,EAEA,MAAM,SAAS,MAAA,EAA0D;AACvE,IAAA,MAAM,IAAA,GAA8B;AAAA,MAClC,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,MAAA;AAAA,MACA,MAAA,EAAQ;AAAA,KACV;AAEA,IAAA,MAAM,MAAM,MAAM,KAAA,CAAM,CAAA,EAAG,IAAA,CAAK,IAAI,CAAA,aAAA,CAAA,EAAiB;AAAA,MACnD,MAAA,EAAQ,MAAA;AAAA,MACR,OAAA,EAAS,EAAE,cAAA,EAAgB,kBAAA,EAAmB;AAAA,MAC9C,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,IAAI;AAAA,KAC1B,CAAA;AAED,IAAA,IAAI,CAAC,IAAI,EAAA,EAAI;AACX,MAAA,MAAM,GAAA,GAAM,MAAM,YAAA,CAAa,GAAG,CAAA;AAClC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,qBAAA,EAAwB,GAAA,CAAI,MAAM,CAAA,CAAA,EAAI,GAAA,CAAI,UAAU,CAAA,EAAG,GAAA,GAAM,CAAA,GAAA,EAAM,GAAG,CAAA,CAAA,GAAK,EAAE,CAAA,CAAE,CAAA;AAAA,IACjG;AAEA,IAAA,MAAM,IAAA,GAAQ,MAAM,GAAA,CAAI,IAAA,EAAK;AAC7B,IAAA,OAAO,EAAE,OAAO,IAAA,CAAK,QAAA,IAAY,IAAI,QAAA,EAAS,EAAG,KAAK,IAAA,EAAK;AAAA,EAC7D;AACF;AAEA,eAAe,aAAa,GAAA,EAAgC;AAC1D,EAAA,IAAI;AACF,IAAA,OAAO,MAAM,IAAI,IAAA,EAAK;AAAA,EACxB,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,EAAA;AAAA,EACT;AACF","file":"index.cjs","sourcesContent":["import type { TranslateOptions } from \"./types.js\";\r\n\r\nfunction formatGlossary(glossary?: Record<string, string>): string {\r\n if (!glossary) return \"\";\r\n const entries = Object.entries(glossary);\r\n if (entries.length === 0) return \"\";\r\n return entries.map(([k, v]) => `- \"${k}\" => \"${v}\"`).join(\"\\n\");\r\n}\r\n\r\nexport function buildTranslatePrompt(opts: TranslateOptions): string {\r\n const source = opts.sourceLang?.trim() ? opts.sourceLang.trim() : \"auto\";\r\n const tone = opts.tone ?? \"neutral\";\r\n const glossary = formatGlossary(opts.glossary);\r\n\r\n return [\r\n \"You are a professional translation engine.\",\r\n \"Translate the user's text precisely and naturally.\",\r\n \"\",\r\n \"Rules:\",\r\n \"- Output ONLY the translated text. No quotes, no markdown, no explanations.\",\r\n \"- Preserve line breaks.\",\r\n \"- Keep code blocks, URLs, file paths, and identifiers unchanged.\",\r\n \"- If the input is already in the target language, return it unchanged.\",\r\n glossary\r\n ? `- Use this glossary strictly (source => target):\\n${glossary}`\r\n : \"- No glossary provided.\",\r\n \"\",\r\n `Source language: ${source}`,\r\n `Target language: ${opts.targetLang}`,\r\n `Tone: ${tone}`,\r\n \"\",\r\n \"Text to translate:\",\r\n opts.text\r\n ].join(\"\\n\");\r\n}\r\n\r\n","import { buildTranslatePrompt } from \"./prompt.js\";\r\nimport type { LLMProvider, TranslateOptions, TranslateResult } from \"./types.js\";\r\n\r\nexport class TranslatorClient {\r\n constructor(private readonly provider: LLMProvider) {}\r\n\r\n async translate(opts: TranslateOptions): Promise<TranslateResult> {\r\n if (!opts?.text) throw new Error(\"translate: text is required\");\r\n if (!opts?.targetLang) throw new Error(\"translate: targetLang is required\");\r\n\r\n const prompt = buildTranslatePrompt(opts);\r\n const out = await this.provider.generate(prompt);\r\n\r\n return {\r\n text: cleanupText(out.text),\r\n provider: this.provider.kind,\r\n model: this.provider.model,\r\n raw: out.raw\r\n };\r\n }\r\n}\r\n\r\nfunction cleanupText(s: string): string {\r\n // Keep it conservative: just normalize trailing whitespace/newlines.\r\n return s.replace(/\\r\\n/g, \"\\n\").replace(/[ \\t]+\\n/g, \"\\n\").trimEnd();\r\n}\r\n\r\n","import { GoogleGenerativeAI } from \"@google/generative-ai\";\r\nimport type { GeminiProviderOptions, LLMProvider } from \"../types.js\";\r\n\r\nexport class GeminiProvider implements LLMProvider {\r\n readonly kind = \"gemini\" as const;\r\n readonly model: string;\r\n private readonly client: GoogleGenerativeAI;\r\n\r\n constructor(opts: GeminiProviderOptions) {\r\n if (!opts?.apiKey) throw new Error(\"GeminiProvider: apiKey is required\");\r\n this.model = opts.model ?? \"gemini-1.5-flash\";\r\n this.client = new GoogleGenerativeAI(opts.apiKey);\r\n }\r\n\r\n async generate(prompt: string): Promise<{ text: string; raw?: unknown }> {\r\n const model = this.client.getGenerativeModel({ model: this.model });\r\n const res = await model.generateContent(prompt);\r\n const text = res.response.text();\r\n return { text, raw: res };\r\n }\r\n}\r\n\r\n","import type { LLMProvider, OllamaProviderOptions } from \"../types.js\";\r\n\r\ntype OllamaGenerateRequest = {\r\n model: string;\r\n prompt: string;\r\n stream?: boolean;\r\n options?: Record<string, unknown>;\r\n};\r\n\r\ntype OllamaGenerateResponse = {\r\n response?: string;\r\n done?: boolean;\r\n model?: string;\r\n};\r\n\r\nexport class OllamaProvider implements LLMProvider {\r\n readonly kind = \"ollama\" as const;\r\n readonly model: string;\r\n private readonly host: string;\r\n\r\n constructor(opts: OllamaProviderOptions) {\r\n if (!opts?.model) throw new Error(\"OllamaProvider: model is required\");\r\n this.model = opts.model;\r\n this.host = (opts.host ?? \"http://127.0.0.1:11434\").replace(/\\/+$/, \"\");\r\n }\r\n\r\n async generate(prompt: string): Promise<{ text: string; raw?: unknown }> {\r\n const body: OllamaGenerateRequest = {\r\n model: this.model,\r\n prompt,\r\n stream: false\r\n };\r\n\r\n const res = await fetch(`${this.host}/api/generate`, {\r\n method: \"POST\",\r\n headers: { \"content-type\": \"application/json\" },\r\n body: JSON.stringify(body)\r\n });\r\n\r\n if (!res.ok) {\r\n const msg = await safeReadText(res);\r\n throw new Error(`OllamaProvider: HTTP ${res.status} ${res.statusText}${msg ? ` - ${msg}` : \"\"}`);\r\n }\r\n\r\n const json = (await res.json()) as OllamaGenerateResponse;\r\n return { text: (json.response ?? \"\").toString(), raw: json };\r\n }\r\n}\r\n\r\nasync function safeReadText(res: Response): Promise<string> {\r\n try {\r\n return await res.text();\r\n } catch {\r\n return \"\";\r\n }\r\n}\r\n\r\n"]}
@@ -0,0 +1,61 @@
1
+ type ProviderKind = "gemini" | "ollama";
2
+ type TranslateTone = "neutral" | "formal" | "informal" | "friendly" | "technical";
3
+ interface TranslateOptions {
4
+ text: string;
5
+ targetLang: string;
6
+ sourceLang?: string;
7
+ tone?: TranslateTone;
8
+ glossary?: Record<string, string>;
9
+ }
10
+ interface TranslateResult {
11
+ text: string;
12
+ provider: ProviderKind;
13
+ model?: string;
14
+ raw?: unknown;
15
+ }
16
+ interface LLMProvider {
17
+ readonly kind: ProviderKind;
18
+ readonly model?: string;
19
+ generate(prompt: string): Promise<{
20
+ text: string;
21
+ raw?: unknown;
22
+ }>;
23
+ }
24
+ interface GeminiProviderOptions {
25
+ apiKey: string;
26
+ model?: string;
27
+ }
28
+ interface OllamaProviderOptions {
29
+ host?: string;
30
+ model: string;
31
+ }
32
+
33
+ declare class TranslatorClient {
34
+ private readonly provider;
35
+ constructor(provider: LLMProvider);
36
+ translate(opts: TranslateOptions): Promise<TranslateResult>;
37
+ }
38
+
39
+ declare class GeminiProvider implements LLMProvider {
40
+ readonly kind: "gemini";
41
+ readonly model: string;
42
+ private readonly client;
43
+ constructor(opts: GeminiProviderOptions);
44
+ generate(prompt: string): Promise<{
45
+ text: string;
46
+ raw?: unknown;
47
+ }>;
48
+ }
49
+
50
+ declare class OllamaProvider implements LLMProvider {
51
+ readonly kind: "ollama";
52
+ readonly model: string;
53
+ private readonly host;
54
+ constructor(opts: OllamaProviderOptions);
55
+ generate(prompt: string): Promise<{
56
+ text: string;
57
+ raw?: unknown;
58
+ }>;
59
+ }
60
+
61
+ export { GeminiProvider, type GeminiProviderOptions, type LLMProvider, OllamaProvider, type OllamaProviderOptions, type ProviderKind, type TranslateOptions, type TranslateResult, type TranslateTone, TranslatorClient };
@@ -0,0 +1,61 @@
1
+ type ProviderKind = "gemini" | "ollama";
2
+ type TranslateTone = "neutral" | "formal" | "informal" | "friendly" | "technical";
3
+ interface TranslateOptions {
4
+ text: string;
5
+ targetLang: string;
6
+ sourceLang?: string;
7
+ tone?: TranslateTone;
8
+ glossary?: Record<string, string>;
9
+ }
10
+ interface TranslateResult {
11
+ text: string;
12
+ provider: ProviderKind;
13
+ model?: string;
14
+ raw?: unknown;
15
+ }
16
+ interface LLMProvider {
17
+ readonly kind: ProviderKind;
18
+ readonly model?: string;
19
+ generate(prompt: string): Promise<{
20
+ text: string;
21
+ raw?: unknown;
22
+ }>;
23
+ }
24
+ interface GeminiProviderOptions {
25
+ apiKey: string;
26
+ model?: string;
27
+ }
28
+ interface OllamaProviderOptions {
29
+ host?: string;
30
+ model: string;
31
+ }
32
+
33
+ declare class TranslatorClient {
34
+ private readonly provider;
35
+ constructor(provider: LLMProvider);
36
+ translate(opts: TranslateOptions): Promise<TranslateResult>;
37
+ }
38
+
39
+ declare class GeminiProvider implements LLMProvider {
40
+ readonly kind: "gemini";
41
+ readonly model: string;
42
+ private readonly client;
43
+ constructor(opts: GeminiProviderOptions);
44
+ generate(prompt: string): Promise<{
45
+ text: string;
46
+ raw?: unknown;
47
+ }>;
48
+ }
49
+
50
+ declare class OllamaProvider implements LLMProvider {
51
+ readonly kind: "ollama";
52
+ readonly model: string;
53
+ private readonly host;
54
+ constructor(opts: OllamaProviderOptions);
55
+ generate(prompt: string): Promise<{
56
+ text: string;
57
+ raw?: unknown;
58
+ }>;
59
+ }
60
+
61
+ export { GeminiProvider, type GeminiProviderOptions, type LLMProvider, OllamaProvider, type OllamaProviderOptions, type ProviderKind, type TranslateOptions, type TranslateResult, type TranslateTone, TranslatorClient };
package/dist/index.js ADDED
@@ -0,0 +1,113 @@
1
+ import { GoogleGenerativeAI } from '@google/generative-ai';
2
+
3
+ // src/prompt.ts
4
+ function formatGlossary(glossary) {
5
+ if (!glossary) return "";
6
+ const entries = Object.entries(glossary);
7
+ if (entries.length === 0) return "";
8
+ return entries.map(([k, v]) => `- "${k}" => "${v}"`).join("\n");
9
+ }
10
+ function buildTranslatePrompt(opts) {
11
+ const source = opts.sourceLang?.trim() ? opts.sourceLang.trim() : "auto";
12
+ const tone = opts.tone ?? "neutral";
13
+ const glossary = formatGlossary(opts.glossary);
14
+ return [
15
+ "You are a professional translation engine.",
16
+ "Translate the user's text precisely and naturally.",
17
+ "",
18
+ "Rules:",
19
+ "- Output ONLY the translated text. No quotes, no markdown, no explanations.",
20
+ "- Preserve line breaks.",
21
+ "- Keep code blocks, URLs, file paths, and identifiers unchanged.",
22
+ "- If the input is already in the target language, return it unchanged.",
23
+ glossary ? `- Use this glossary strictly (source => target):
24
+ ${glossary}` : "- No glossary provided.",
25
+ "",
26
+ `Source language: ${source}`,
27
+ `Target language: ${opts.targetLang}`,
28
+ `Tone: ${tone}`,
29
+ "",
30
+ "Text to translate:",
31
+ opts.text
32
+ ].join("\n");
33
+ }
34
+
35
+ // src/client.ts
36
+ var TranslatorClient = class {
37
+ constructor(provider) {
38
+ this.provider = provider;
39
+ }
40
+ provider;
41
+ async translate(opts) {
42
+ if (!opts?.text) throw new Error("translate: text is required");
43
+ if (!opts?.targetLang) throw new Error("translate: targetLang is required");
44
+ const prompt = buildTranslatePrompt(opts);
45
+ const out = await this.provider.generate(prompt);
46
+ return {
47
+ text: cleanupText(out.text),
48
+ provider: this.provider.kind,
49
+ model: this.provider.model,
50
+ raw: out.raw
51
+ };
52
+ }
53
+ };
54
+ function cleanupText(s) {
55
+ return s.replace(/\r\n/g, "\n").replace(/[ \t]+\n/g, "\n").trimEnd();
56
+ }
57
+ var GeminiProvider = class {
58
+ kind = "gemini";
59
+ model;
60
+ client;
61
+ constructor(opts) {
62
+ if (!opts?.apiKey) throw new Error("GeminiProvider: apiKey is required");
63
+ this.model = opts.model ?? "gemini-1.5-flash";
64
+ this.client = new GoogleGenerativeAI(opts.apiKey);
65
+ }
66
+ async generate(prompt) {
67
+ const model = this.client.getGenerativeModel({ model: this.model });
68
+ const res = await model.generateContent(prompt);
69
+ const text = res.response.text();
70
+ return { text, raw: res };
71
+ }
72
+ };
73
+
74
+ // src/providers/ollama.ts
75
+ var OllamaProvider = class {
76
+ kind = "ollama";
77
+ model;
78
+ host;
79
+ constructor(opts) {
80
+ if (!opts?.model) throw new Error("OllamaProvider: model is required");
81
+ this.model = opts.model;
82
+ this.host = (opts.host ?? "http://127.0.0.1:11434").replace(/\/+$/, "");
83
+ }
84
+ async generate(prompt) {
85
+ const body = {
86
+ model: this.model,
87
+ prompt,
88
+ stream: false
89
+ };
90
+ const res = await fetch(`${this.host}/api/generate`, {
91
+ method: "POST",
92
+ headers: { "content-type": "application/json" },
93
+ body: JSON.stringify(body)
94
+ });
95
+ if (!res.ok) {
96
+ const msg = await safeReadText(res);
97
+ throw new Error(`OllamaProvider: HTTP ${res.status} ${res.statusText}${msg ? ` - ${msg}` : ""}`);
98
+ }
99
+ const json = await res.json();
100
+ return { text: (json.response ?? "").toString(), raw: json };
101
+ }
102
+ };
103
+ async function safeReadText(res) {
104
+ try {
105
+ return await res.text();
106
+ } catch {
107
+ return "";
108
+ }
109
+ }
110
+
111
+ export { GeminiProvider, OllamaProvider, TranslatorClient };
112
+ //# sourceMappingURL=index.js.map
113
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/prompt.ts","../src/client.ts","../src/providers/gemini.ts","../src/providers/ollama.ts"],"names":[],"mappings":";;;AAEA,SAAS,eAAe,QAAA,EAA2C;AACjE,EAAA,IAAI,CAAC,UAAU,OAAO,EAAA;AACtB,EAAA,MAAM,OAAA,GAAU,MAAA,CAAO,OAAA,CAAQ,QAAQ,CAAA;AACvC,EAAA,IAAI,OAAA,CAAQ,MAAA,KAAW,CAAA,EAAG,OAAO,EAAA;AACjC,EAAA,OAAO,OAAA,CAAQ,GAAA,CAAI,CAAC,CAAC,GAAG,CAAC,CAAA,KAAM,CAAA,GAAA,EAAM,CAAC,CAAA,MAAA,EAAS,CAAC,CAAA,CAAA,CAAG,CAAA,CAAE,KAAK,IAAI,CAAA;AAChE;AAEO,SAAS,qBAAqB,IAAA,EAAgC;AACnE,EAAA,MAAM,MAAA,GAAS,KAAK,UAAA,EAAY,IAAA,KAAS,IAAA,CAAK,UAAA,CAAW,MAAK,GAAI,MAAA;AAClE,EAAA,MAAM,IAAA,GAAO,KAAK,IAAA,IAAQ,SAAA;AAC1B,EAAA,MAAM,QAAA,GAAW,cAAA,CAAe,IAAA,CAAK,QAAQ,CAAA;AAE7C,EAAA,OAAO;AAAA,IACL,4CAAA;AAAA,IACA,oDAAA;AAAA,IACA,EAAA;AAAA,IACA,QAAA;AAAA,IACA,6EAAA;AAAA,IACA,yBAAA;AAAA,IACA,kEAAA;AAAA,IACA,wEAAA;AAAA,IACA,QAAA,GACI,CAAA;AAAA,EAAqD,QAAQ,CAAA,CAAA,GAC7D,yBAAA;AAAA,IACJ,EAAA;AAAA,IACA,oBAAoB,MAAM,CAAA,CAAA;AAAA,IAC1B,CAAA,iBAAA,EAAoB,KAAK,UAAU,CAAA,CAAA;AAAA,IACnC,SAAS,IAAI,CAAA,CAAA;AAAA,IACb,EAAA;AAAA,IACA,oBAAA;AAAA,IACA,IAAA,CAAK;AAAA,GACP,CAAE,KAAK,IAAI,CAAA;AACb;;;AC/BO,IAAM,mBAAN,MAAuB;AAAA,EAC5B,YAA6B,QAAA,EAAuB;AAAvB,IAAA,IAAA,CAAA,QAAA,GAAA,QAAA;AAAA,EAAwB;AAAA,EAAxB,QAAA;AAAA,EAE7B,MAAM,UAAU,IAAA,EAAkD;AAChE,IAAA,IAAI,CAAC,IAAA,EAAM,IAAA,EAAM,MAAM,IAAI,MAAM,6BAA6B,CAAA;AAC9D,IAAA,IAAI,CAAC,IAAA,EAAM,UAAA,EAAY,MAAM,IAAI,MAAM,mCAAmC,CAAA;AAE1E,IAAA,MAAM,MAAA,GAAS,qBAAqB,IAAI,CAAA;AACxC,IAAA,MAAM,GAAA,GAAM,MAAM,IAAA,CAAK,QAAA,CAAS,SAAS,MAAM,CAAA;AAE/C,IAAA,OAAO;AAAA,MACL,IAAA,EAAM,WAAA,CAAY,GAAA,CAAI,IAAI,CAAA;AAAA,MAC1B,QAAA,EAAU,KAAK,QAAA,CAAS,IAAA;AAAA,MACxB,KAAA,EAAO,KAAK,QAAA,CAAS,KAAA;AAAA,MACrB,KAAK,GAAA,CAAI;AAAA,KACX;AAAA,EACF;AACF;AAEA,SAAS,YAAY,CAAA,EAAmB;AAEtC,EAAA,OAAO,CAAA,CAAE,QAAQ,OAAA,EAAS,IAAI,EAAE,OAAA,CAAQ,WAAA,EAAa,IAAI,CAAA,CAAE,OAAA,EAAQ;AACrE;ACtBO,IAAM,iBAAN,MAA4C;AAAA,EACxC,IAAA,GAAO,QAAA;AAAA,EACP,KAAA;AAAA,EACQ,MAAA;AAAA,EAEjB,YAAY,IAAA,EAA6B;AACvC,IAAA,IAAI,CAAC,IAAA,EAAM,MAAA,EAAQ,MAAM,IAAI,MAAM,oCAAoC,CAAA;AACvE,IAAA,IAAA,CAAK,KAAA,GAAQ,KAAK,KAAA,IAAS,kBAAA;AAC3B,IAAA,IAAA,CAAK,MAAA,GAAS,IAAI,kBAAA,CAAmB,IAAA,CAAK,MAAM,CAAA;AAAA,EAClD;AAAA,EAEA,MAAM,SAAS,MAAA,EAA0D;AACvE,IAAA,MAAM,KAAA,GAAQ,KAAK,MAAA,CAAO,kBAAA,CAAmB,EAAE,KAAA,EAAO,IAAA,CAAK,OAAO,CAAA;AAClE,IAAA,MAAM,GAAA,GAAM,MAAM,KAAA,CAAM,eAAA,CAAgB,MAAM,CAAA;AAC9C,IAAA,MAAM,IAAA,GAAO,GAAA,CAAI,QAAA,CAAS,IAAA,EAAK;AAC/B,IAAA,OAAO,EAAE,IAAA,EAAM,GAAA,EAAK,GAAA,EAAI;AAAA,EAC1B;AACF;;;ACLO,IAAM,iBAAN,MAA4C;AAAA,EACxC,IAAA,GAAO,QAAA;AAAA,EACP,KAAA;AAAA,EACQ,IAAA;AAAA,EAEjB,YAAY,IAAA,EAA6B;AACvC,IAAA,IAAI,CAAC,IAAA,EAAM,KAAA,EAAO,MAAM,IAAI,MAAM,mCAAmC,CAAA;AACrE,IAAA,IAAA,CAAK,QAAQ,IAAA,CAAK,KAAA;AAClB,IAAA,IAAA,CAAK,QAAQ,IAAA,CAAK,IAAA,IAAQ,wBAAA,EAA0B,OAAA,CAAQ,QAAQ,EAAE,CAAA;AAAA,EACxE;AAAA,EAEA,MAAM,SAAS,MAAA,EAA0D;AACvE,IAAA,MAAM,IAAA,GAA8B;AAAA,MAClC,OAAO,IAAA,CAAK,KAAA;AAAA,MACZ,MAAA;AAAA,MACA,MAAA,EAAQ;AAAA,KACV;AAEA,IAAA,MAAM,MAAM,MAAM,KAAA,CAAM,CAAA,EAAG,IAAA,CAAK,IAAI,CAAA,aAAA,CAAA,EAAiB;AAAA,MACnD,MAAA,EAAQ,MAAA;AAAA,MACR,OAAA,EAAS,EAAE,cAAA,EAAgB,kBAAA,EAAmB;AAAA,MAC9C,IAAA,EAAM,IAAA,CAAK,SAAA,CAAU,IAAI;AAAA,KAC1B,CAAA;AAED,IAAA,IAAI,CAAC,IAAI,EAAA,EAAI;AACX,MAAA,MAAM,GAAA,GAAM,MAAM,YAAA,CAAa,GAAG,CAAA;AAClC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,qBAAA,EAAwB,GAAA,CAAI,MAAM,CAAA,CAAA,EAAI,GAAA,CAAI,UAAU,CAAA,EAAG,GAAA,GAAM,CAAA,GAAA,EAAM,GAAG,CAAA,CAAA,GAAK,EAAE,CAAA,CAAE,CAAA;AAAA,IACjG;AAEA,IAAA,MAAM,IAAA,GAAQ,MAAM,GAAA,CAAI,IAAA,EAAK;AAC7B,IAAA,OAAO,EAAE,OAAO,IAAA,CAAK,QAAA,IAAY,IAAI,QAAA,EAAS,EAAG,KAAK,IAAA,EAAK;AAAA,EAC7D;AACF;AAEA,eAAe,aAAa,GAAA,EAAgC;AAC1D,EAAA,IAAI;AACF,IAAA,OAAO,MAAM,IAAI,IAAA,EAAK;AAAA,EACxB,CAAA,CAAA,MAAQ;AACN,IAAA,OAAO,EAAA;AAAA,EACT;AACF","file":"index.js","sourcesContent":["import type { TranslateOptions } from \"./types.js\";\r\n\r\nfunction formatGlossary(glossary?: Record<string, string>): string {\r\n if (!glossary) return \"\";\r\n const entries = Object.entries(glossary);\r\n if (entries.length === 0) return \"\";\r\n return entries.map(([k, v]) => `- \"${k}\" => \"${v}\"`).join(\"\\n\");\r\n}\r\n\r\nexport function buildTranslatePrompt(opts: TranslateOptions): string {\r\n const source = opts.sourceLang?.trim() ? opts.sourceLang.trim() : \"auto\";\r\n const tone = opts.tone ?? \"neutral\";\r\n const glossary = formatGlossary(opts.glossary);\r\n\r\n return [\r\n \"You are a professional translation engine.\",\r\n \"Translate the user's text precisely and naturally.\",\r\n \"\",\r\n \"Rules:\",\r\n \"- Output ONLY the translated text. No quotes, no markdown, no explanations.\",\r\n \"- Preserve line breaks.\",\r\n \"- Keep code blocks, URLs, file paths, and identifiers unchanged.\",\r\n \"- If the input is already in the target language, return it unchanged.\",\r\n glossary\r\n ? `- Use this glossary strictly (source => target):\\n${glossary}`\r\n : \"- No glossary provided.\",\r\n \"\",\r\n `Source language: ${source}`,\r\n `Target language: ${opts.targetLang}`,\r\n `Tone: ${tone}`,\r\n \"\",\r\n \"Text to translate:\",\r\n opts.text\r\n ].join(\"\\n\");\r\n}\r\n\r\n","import { buildTranslatePrompt } from \"./prompt.js\";\r\nimport type { LLMProvider, TranslateOptions, TranslateResult } from \"./types.js\";\r\n\r\nexport class TranslatorClient {\r\n constructor(private readonly provider: LLMProvider) {}\r\n\r\n async translate(opts: TranslateOptions): Promise<TranslateResult> {\r\n if (!opts?.text) throw new Error(\"translate: text is required\");\r\n if (!opts?.targetLang) throw new Error(\"translate: targetLang is required\");\r\n\r\n const prompt = buildTranslatePrompt(opts);\r\n const out = await this.provider.generate(prompt);\r\n\r\n return {\r\n text: cleanupText(out.text),\r\n provider: this.provider.kind,\r\n model: this.provider.model,\r\n raw: out.raw\r\n };\r\n }\r\n}\r\n\r\nfunction cleanupText(s: string): string {\r\n // Keep it conservative: just normalize trailing whitespace/newlines.\r\n return s.replace(/\\r\\n/g, \"\\n\").replace(/[ \\t]+\\n/g, \"\\n\").trimEnd();\r\n}\r\n\r\n","import { GoogleGenerativeAI } from \"@google/generative-ai\";\r\nimport type { GeminiProviderOptions, LLMProvider } from \"../types.js\";\r\n\r\nexport class GeminiProvider implements LLMProvider {\r\n readonly kind = \"gemini\" as const;\r\n readonly model: string;\r\n private readonly client: GoogleGenerativeAI;\r\n\r\n constructor(opts: GeminiProviderOptions) {\r\n if (!opts?.apiKey) throw new Error(\"GeminiProvider: apiKey is required\");\r\n this.model = opts.model ?? \"gemini-1.5-flash\";\r\n this.client = new GoogleGenerativeAI(opts.apiKey);\r\n }\r\n\r\n async generate(prompt: string): Promise<{ text: string; raw?: unknown }> {\r\n const model = this.client.getGenerativeModel({ model: this.model });\r\n const res = await model.generateContent(prompt);\r\n const text = res.response.text();\r\n return { text, raw: res };\r\n }\r\n}\r\n\r\n","import type { LLMProvider, OllamaProviderOptions } from \"../types.js\";\r\n\r\ntype OllamaGenerateRequest = {\r\n model: string;\r\n prompt: string;\r\n stream?: boolean;\r\n options?: Record<string, unknown>;\r\n};\r\n\r\ntype OllamaGenerateResponse = {\r\n response?: string;\r\n done?: boolean;\r\n model?: string;\r\n};\r\n\r\nexport class OllamaProvider implements LLMProvider {\r\n readonly kind = \"ollama\" as const;\r\n readonly model: string;\r\n private readonly host: string;\r\n\r\n constructor(opts: OllamaProviderOptions) {\r\n if (!opts?.model) throw new Error(\"OllamaProvider: model is required\");\r\n this.model = opts.model;\r\n this.host = (opts.host ?? \"http://127.0.0.1:11434\").replace(/\\/+$/, \"\");\r\n }\r\n\r\n async generate(prompt: string): Promise<{ text: string; raw?: unknown }> {\r\n const body: OllamaGenerateRequest = {\r\n model: this.model,\r\n prompt,\r\n stream: false\r\n };\r\n\r\n const res = await fetch(`${this.host}/api/generate`, {\r\n method: \"POST\",\r\n headers: { \"content-type\": \"application/json\" },\r\n body: JSON.stringify(body)\r\n });\r\n\r\n if (!res.ok) {\r\n const msg = await safeReadText(res);\r\n throw new Error(`OllamaProvider: HTTP ${res.status} ${res.statusText}${msg ? ` - ${msg}` : \"\"}`);\r\n }\r\n\r\n const json = (await res.json()) as OllamaGenerateResponse;\r\n return { text: (json.response ?? \"\").toString(), raw: json };\r\n }\r\n}\r\n\r\nasync function safeReadText(res: Response): Promise<string> {\r\n try {\r\n return await res.text();\r\n } catch {\r\n return \"\";\r\n }\r\n}\r\n\r\n"]}
package/package.json ADDED
@@ -0,0 +1,42 @@
1
+ {
2
+ "name": "@beg4660/translator",
3
+ "version": "0.1.0",
4
+ "description": "Unified Gemini/Ollama translation client",
5
+ "license": "MIT",
6
+ "repository": {
7
+ "type": "git",
8
+ "url": ""
9
+ },
10
+ "type": "module",
11
+ "main": "./dist/index.cjs",
12
+ "module": "./dist/index.js",
13
+ "types": "./dist/index.d.ts",
14
+ "exports": {
15
+ ".": {
16
+ "types": "./dist/index.d.ts",
17
+ "import": "./dist/index.js",
18
+ "require": "./dist/index.cjs"
19
+ }
20
+ },
21
+ "files": [
22
+ "dist"
23
+ ],
24
+ "sideEffects": false,
25
+ "engines": {
26
+ "node": ">=18"
27
+ },
28
+ "scripts": {
29
+ "build": "tsup",
30
+ "dev": "tsup --watch",
31
+ "chat:ollama": "npm run build && node ./scripts/chat-ollama.mjs",
32
+ "typecheck": "tsc -p tsconfig.json --noEmit",
33
+ "prepublishOnly": "npm run build"
34
+ },
35
+ "dependencies": {
36
+ "@google/generative-ai": "^0.24.1"
37
+ },
38
+ "devDependencies": {
39
+ "tsup": "^8.5.0",
40
+ "typescript": "^5.8.3"
41
+ }
42
+ }