@nogataka/imgen 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -10,25 +10,32 @@ npm install -g @nogataka/imgen
10
10
 
11
11
  ## セットアップ
12
12
 
13
- ### 環境変数(推奨)
13
+ ### 環境変数
14
14
 
15
15
  ```bash
16
16
  export AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com"
17
17
  export AZURE_OPENAI_API_KEY="your-api-key"
18
18
  export AZURE_OPENAI_DEPLOYMENT_NAME="gpt-5.1"
19
19
  export AZURE_OPENAI_DEPLOYMENT_NAME_IMAGE="gpt-image-1.5"
20
- export AZURE_OPENAI_API_VERSION="2024-02-15-preview"
21
- export AZURE_OPENAI_IMAGE_API_VERSION="2025-04-01-preview"
20
+ export AZURE_OPENAI_API_VERSION="2024-02-15-preview" # 省略可
21
+ export AZURE_OPENAI_IMAGE_API_VERSION="2025-04-01-preview" # 省略可
22
22
  ```
23
23
 
24
- ### 対話式設定
24
+ ### .env ファイル
25
+
26
+ 環境変数の代わりに `.env` ファイルで設定できます。探索順:
27
+
28
+ 1. `cwd/.env`(カレントディレクトリ)
29
+ 2. `~/.imgen/.env`
25
30
 
26
31
  ```bash
27
- imgen configure
32
+ # .env
33
+ AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com
34
+ AZURE_OPENAI_API_KEY=your-api-key
35
+ AZURE_OPENAI_DEPLOYMENT_NAME=gpt-5.1
36
+ AZURE_OPENAI_DEPLOYMENT_NAME_IMAGE=gpt-image-1.5
28
37
  ```
29
38
 
30
- Azure OpenAI の接続情報やデフォルト値を対話的に設定できます。設定は `~/.imgen/config.json` に保存されます。
31
-
32
39
  環境変数が設定されている場合、環境変数が優先されます。
33
40
 
34
41
  ## 使い方
@@ -54,7 +61,7 @@ imgen image gen "商品写真" -p builtin:landscape
54
61
  | `--output <path>` | `-o` | 出力先ファイルまたはディレクトリ | カレントディレクトリ |
55
62
  | `--json` | | JSON 形式で出力 | - |
56
63
  | `--dry-run` | | API を呼ばずに設定を確認 | - |
57
- | `--debug` | `-d` | デバッグログを有効化 | - |
64
+ | `--debug` | `-d` | デバッグ情報を表示 | - |
58
65
 
59
66
  ### 画像編集
60
67
 
@@ -95,11 +102,7 @@ imgen image explain chart.png -c "Q4 sales report" -f json -o description.json
95
102
 
96
103
  対応言語: `ja`(日本語), `en`(英語), `zh`(中国語), `ko`(韓国語), `es`(スペイン語), `fr`(フランス語), `de`(ドイツ語), `it`(イタリア語), `ru`(ロシア語), `vi`(ベトナム語)
97
104
 
98
- ## プリセット
99
-
100
- よく使う設定をプリセットとして保存・呼び出しできます。
101
-
102
- ### ビルトインプリセット
105
+ ## ビルトインプリセット
103
106
 
104
107
  | 名前 | サイズ | 品質 | 用途 |
105
108
  |------|--------|------|------|
@@ -109,32 +112,95 @@ imgen image explain chart.png -c "Q4 sales report" -f json -o description.json
109
112
  | `builtin:draft` | 1024x1024 | low | 下書き、プロトタイプ |
110
113
  | `builtin:photo` | 1536x1024 | high | 商品写真 |
111
114
 
112
- ### カスタムプリセット
115
+ ## 設定の優先順位
116
+
117
+ 1. CLI オプション(最優先)
118
+ 2. プリセット値(`-p` 指定時)
119
+ 3. デフォルト値
120
+
121
+ ## SDK として使う
122
+
123
+ imgen は CLI だけでなく、Node.js ライブラリとしても利用できます。スライドジェネレーターや LLM ツールなど、他のアプリケーションから画像生成・編集・説明機能を呼び出せます。
113
124
 
114
125
  ```bash
115
- imgen preset save myhd -s 1536x1024 -q high -f png
116
- imgen preset list
117
- imgen preset delete myhd
126
+ npm install @nogataka/imgen
118
127
  ```
119
128
 
120
- プリセットは `~/.imgen/presets.json` に保存されます。
129
+ ### 認証設定
121
130
 
122
- ## ログ
131
+ `getAzureConfig()` は以下の優先順位で設定を解決します:
123
132
 
124
- 操作ログは `~/.imgen/logs/` に JSON Lines 形式で保存されます。
133
+ 1. **環境変数**(最優先)
134
+ 2. **`.env` ファイル**(`cwd/.env` → `~/.imgen/.env`)
135
+
136
+ アプリ側で `.env` ファイルを読み込んでいる場合(dotenv, Next.js 等)、そこに追記するだけで動きます:
125
137
 
126
138
  ```bash
127
- imgen log # 直近 20 件
128
- imgen log -n 50 -l debug # 直近 50 件(DEBUG 以上)
129
- imgen log -l error # エラーのみ
139
+ # アプリ側の .env
140
+ AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com
141
+ AZURE_OPENAI_API_KEY=your-api-key
142
+ AZURE_OPENAI_DEPLOYMENT_NAME=gpt-5.1
143
+ AZURE_OPENAI_DEPLOYMENT_NAME_IMAGE=gpt-image-1.5
130
144
  ```
131
145
 
132
- ## 設定の優先順位
146
+ ### 使用例
147
+
148
+ ```typescript
149
+ import {
150
+ AzureImageClient,
151
+ AzureChatClient,
152
+ getAzureConfig,
153
+ saveFileWithUniqueNameIfExists,
154
+ } from "@nogataka/imgen/sdk";
155
+
156
+ // Azure OpenAI 設定を取得(環境変数 or .envファイル)
157
+ const config = await getAzureConfig();
158
+
159
+ // 画像生成
160
+ const imageClient = new AzureImageClient(config);
161
+ const imageBytes = await imageClient.generateImage("夕日の海辺", {
162
+ size: "1536x1024",
163
+ quality: "high",
164
+ });
165
+ const savedPath = await saveFileWithUniqueNameIfExists("sunset.png", imageBytes);
166
+
167
+ // プロンプト拡張・画像説明
168
+ const chatClient = new AzureChatClient(config);
169
+ const prompt = await chatClient.generatePrompt("可愛い猫のマスコット");
170
+ const fileName = await chatClient.generateFileName("可愛い猫のマスコット");
171
+
172
+ // 画像編集
173
+ import * as fs from "node:fs/promises";
174
+ const photo = Buffer.from(await fs.readFile("photo.jpg"));
175
+ const edited = await imageClient.editImage(photo, "背景を青空に変更");
176
+
177
+ // 画像説明
178
+ import { readImageFile } from "@nogataka/imgen/sdk";
179
+ const imgData = await readImageFile("screenshot.png");
180
+ const explanation = await chatClient.generateExplanation(imgData, "ja");
181
+ ```
133
182
 
134
- 1. CLI オプション(最優先)
135
- 2. プリセット値(`-p` 指定時)
136
- 3. 設定ファイル(`~/.imgen/config.json`)
137
- 4. デフォルト値
183
+ ### LLM ツールとしての組み込み例
184
+
185
+ ```typescript
186
+ import { AzureImageClient, AzureChatClient, getAzureConfig, saveFileWithUniqueNameIfExists } from "@nogataka/imgen/sdk";
187
+
188
+ const generateImageTool = {
189
+ name: "generate_image",
190
+ description: "テキストから画像を生成する",
191
+ execute: async ({ theme }: { theme: string }) => {
192
+ const config = await getAzureConfig();
193
+ const chat = new AzureChatClient(config);
194
+ const image = new AzureImageClient(config);
195
+
196
+ const prompt = await chat.generatePrompt(theme);
197
+ const fileName = await chat.generateFileName(theme);
198
+ const bytes = await image.generateImage(prompt, { size: "1024x1024", quality: "high" });
199
+ const path = await saveFileWithUniqueNameIfExists(`${fileName}.png`, bytes);
200
+ return { path };
201
+ },
202
+ };
203
+ ```
138
204
 
139
205
  ## 開発
140
206
 
@@ -0,0 +1,371 @@
1
+ // src/utils/config.ts
2
+ import * as fs from "fs/promises";
3
+ import * as os from "os";
4
+ import * as path from "path";
5
+ function parseEnvFile(content) {
6
+ const result = {};
7
+ for (const line of content.split("\n")) {
8
+ const trimmed = line.trim();
9
+ if (!trimmed || trimmed.startsWith("#")) continue;
10
+ const eqIndex = trimmed.indexOf("=");
11
+ if (eqIndex === -1) continue;
12
+ const key = trimmed.slice(0, eqIndex).trim();
13
+ let value = trimmed.slice(eqIndex + 1).trim();
14
+ if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
15
+ value = value.slice(1, -1);
16
+ }
17
+ result[key] = value;
18
+ }
19
+ return result;
20
+ }
21
+ async function loadEnvVars() {
22
+ const candidates = [path.join(process.cwd(), ".env"), path.join(os.homedir(), ".imgen", ".env")];
23
+ for (const envPath of candidates) {
24
+ try {
25
+ const content = await fs.readFile(envPath, "utf-8");
26
+ return parseEnvFile(content);
27
+ } catch {
28
+ }
29
+ }
30
+ return {};
31
+ }
32
+ async function getAzureConfig() {
33
+ const envVars = await loadEnvVars();
34
+ const endpoint = process.env.AZURE_OPENAI_ENDPOINT || envVars.AZURE_OPENAI_ENDPOINT;
35
+ const apiKey = process.env.AZURE_OPENAI_API_KEY || envVars.AZURE_OPENAI_API_KEY;
36
+ const deploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME || envVars.AZURE_OPENAI_DEPLOYMENT_NAME;
37
+ const imageDeploymentName = process.env.AZURE_OPENAI_DEPLOYMENT_NAME_IMAGE || envVars.AZURE_OPENAI_DEPLOYMENT_NAME_IMAGE;
38
+ const apiVersion = process.env.AZURE_OPENAI_API_VERSION || envVars.AZURE_OPENAI_API_VERSION || "2024-02-15-preview";
39
+ const imageApiVersion = process.env.AZURE_OPENAI_IMAGE_API_VERSION || envVars.AZURE_OPENAI_IMAGE_API_VERSION || "2025-04-01-preview";
40
+ if (!endpoint || !apiKey || !deploymentName || !imageDeploymentName) {
41
+ throw new Error(
42
+ "Azure OpenAI \u306E\u8A2D\u5B9A\u304C\u898B\u3064\u304B\u308A\u307E\u305B\u3093\u3002\u74B0\u5883\u5909\u6570\u307E\u305F\u306F .env \u30D5\u30A1\u30A4\u30EB\u3067\u8A2D\u5B9A\u3057\u3066\u304F\u3060\u3055\u3044\u3002"
43
+ );
44
+ }
45
+ return { endpoint, apiKey, deploymentName, imageDeploymentName, apiVersion, imageApiVersion };
46
+ }
47
+
48
+ // src/utils/file.ts
49
+ import * as fs2 from "fs/promises";
50
+ async function generateUniqueFilePath(outputPath, maxRetries = 3) {
51
+ let finalPath = outputPath;
52
+ let retryCount = 0;
53
+ while (retryCount < maxRetries) {
54
+ try {
55
+ await fs2.stat(finalPath);
56
+ const baseName = finalPath.slice(0, finalPath.lastIndexOf("."));
57
+ const ext = finalPath.slice(finalPath.lastIndexOf("."));
58
+ const rand = Math.floor(Math.random() * 1e4).toString().padStart(4, "0");
59
+ finalPath = `${baseName}-${rand}${ext}`;
60
+ retryCount++;
61
+ } catch (error) {
62
+ if (error.code === "ENOENT") {
63
+ return finalPath;
64
+ }
65
+ throw error;
66
+ }
67
+ }
68
+ throw new Error(
69
+ `\u30D5\u30A1\u30A4\u30EB\u540D\u306E\u751F\u6210\u306B\u5931\u6557\u3057\u307E\u3057\u305F\u3002${maxRetries}\u56DE\u8A66\u884C\u3057\u307E\u3057\u305F\u304C\u3001\u3059\u3079\u3066\u65E2\u5B58\u306E\u30D5\u30A1\u30A4\u30EB\u540D\u3068\u885D\u7A81\u3057\u3066\u3044\u307E\u3059\u3002`
70
+ );
71
+ }
72
+ async function saveFileWithUniqueNameIfExists(outputPath, data, maxRetries = 3) {
73
+ const finalPath = await generateUniqueFilePath(outputPath, maxRetries);
74
+ await fs2.writeFile(finalPath, data);
75
+ return finalPath;
76
+ }
77
+ async function loadContextFile(contextPath) {
78
+ if (!contextPath) return "";
79
+ try {
80
+ return await fs2.readFile(contextPath, "utf-8");
81
+ } catch (error) {
82
+ if (error.code === "ENOENT") {
83
+ throw new Error(`\u30B3\u30F3\u30C6\u30AD\u30B9\u30C8\u30D5\u30A1\u30A4\u30EB\u304C\u898B\u3064\u304B\u308A\u307E\u305B\u3093: ${contextPath}`);
84
+ }
85
+ if (error instanceof Error) {
86
+ throw new Error(`\u30B3\u30F3\u30C6\u30AD\u30B9\u30C8\u30D5\u30A1\u30A4\u30EB\u306E\u8AAD\u307F\u8FBC\u307F\u306B\u5931\u6557\u3057\u307E\u3057\u305F: ${error.message}`);
87
+ }
88
+ throw new Error(`\u30B3\u30F3\u30C6\u30AD\u30B9\u30C8\u30D5\u30A1\u30A4\u30EB\u306E\u8AAD\u307F\u8FBC\u307F\u306B\u5931\u6557\u3057\u307E\u3057\u305F: ${String(error)}`);
89
+ }
90
+ }
91
+ async function fileExists(filePath) {
92
+ try {
93
+ await fs2.access(filePath);
94
+ return true;
95
+ } catch {
96
+ return false;
97
+ }
98
+ }
99
+
100
+ // src/utils/azure-chat.ts
101
+ import { AzureOpenAI } from "openai";
102
+ var AzureChatClient = class {
103
+ client;
104
+ deploymentName;
105
+ constructor(config) {
106
+ this.client = new AzureOpenAI({
107
+ endpoint: config.endpoint,
108
+ apiKey: config.apiKey,
109
+ apiVersion: config.apiVersion,
110
+ deployment: config.deploymentName
111
+ });
112
+ this.deploymentName = config.deploymentName;
113
+ }
114
+ /**
115
+ * Generates a detailed image-generation prompt from a short theme description.
116
+ * Optionally accepts additional context to guide prompt generation.
117
+ */
118
+ async generatePrompt(theme, context = "") {
119
+ if (!theme) throw new Error("\u30C6\u30FC\u30DE\u304C\u7A7A\u3067\u3059");
120
+ const prompt = `
121
+ Generate a detailed image generation prompt based on the following information.
122
+
123
+ Theme: ${theme}
124
+ ${context ? `Context:
125
+ ${context}
126
+ ` : ""}
127
+ Please generate a prompt that meets the following criteria:
128
+ 1. Include specific and detailed descriptions
129
+ 2. Clearly specify the image style and atmosphere
130
+ 3. Include all necessary elements
131
+ 4. Output in English
132
+ 5. Focus on visual elements and composition
133
+ 6. Include lighting and color descriptions
134
+ 7. Specify the mood and emotional tone
135
+ 8. Limit the output to approximately 1500 characters
136
+
137
+ Prompt:
138
+ `;
139
+ try {
140
+ const response = await this.client.chat.completions.create({
141
+ model: this.deploymentName,
142
+ messages: [{ role: "user", content: prompt }]
143
+ });
144
+ return response.choices[0]?.message?.content ?? "";
145
+ } catch {
146
+ throw new Error("\u30D7\u30ED\u30F3\u30D7\u30C8\u306E\u751F\u6210\u306B\u5931\u6557\u3057\u307E\u3057\u305F");
147
+ }
148
+ }
149
+ /**
150
+ * Generates a sanitized file name (lowercase alphanumeric + hyphens only) from a theme.
151
+ */
152
+ async generateFileName(theme, maxLength = 40) {
153
+ if (!theme) throw new Error("\u30C6\u30FC\u30DE\u304C\u7A7A\u3067\u3059");
154
+ try {
155
+ const response = await this.client.chat.completions.create({
156
+ model: this.deploymentName,
157
+ messages: [
158
+ {
159
+ role: "user",
160
+ content: `\u4EE5\u4E0B\u306E\u30C6\u30FC\u30DE\u304B\u3089\u753B\u50CF\u306E\u30D5\u30A1\u30A4\u30EB\u540D\u3092\u751F\u6210\u3057\u3066\u304F\u3060\u3055\u3044\u3002\u82F1\u5C0F\u6587\u5B57\u3068\u30CF\u30A4\u30D5\u30F3\u306E\u307F\u3001${maxLength}\u6587\u5B57\u4EE5\u5185\u3002\u62E1\u5F35\u5B50\u306A\u3057\u3002
161
+
162
+ \u30C6\u30FC\u30DE: ${theme}
163
+
164
+ \u30D5\u30A1\u30A4\u30EB\u540D:`
165
+ }
166
+ ]
167
+ });
168
+ let fileName = (response.choices[0]?.message?.content ?? "").trim();
169
+ fileName = fileName.toLowerCase().replace(/[^a-z0-9-]/g, "").replace(/-+/g, "-").replace(/^-|-$/g, "");
170
+ if (fileName.length > maxLength) fileName = fileName.substring(0, maxLength);
171
+ return fileName || "image";
172
+ } catch {
173
+ throw new Error("\u30D5\u30A1\u30A4\u30EB\u540D\u306E\u751F\u6210\u306B\u5931\u6557\u3057\u307E\u3057\u305F");
174
+ }
175
+ }
176
+ /**
177
+ * Generates a detailed explanation of an image using multimodal (vision) input.
178
+ * The explanation language is controlled by the `lang` parameter.
179
+ */
180
+ async generateExplanation(imageData, lang = "ja", context) {
181
+ try {
182
+ const response = await this.client.chat.completions.create({
183
+ model: this.deploymentName,
184
+ messages: [
185
+ {
186
+ role: "user",
187
+ content: [
188
+ {
189
+ type: "image_url",
190
+ image_url: { url: `data:${imageData.mimeType};base64,${imageData.data}` }
191
+ },
192
+ {
193
+ type: "text",
194
+ text: `\u3053\u306E\u753B\u50CF\u306B\u3064\u3044\u3066\u3001${lang}\u3067\u8A73\u7D30\u306A\u8AAC\u660E\u3092\u751F\u6210\u3057\u3066\u304F\u3060\u3055\u3044\u3002${context ? `
195
+
196
+ \u30B3\u30F3\u30C6\u30AD\u30B9\u30C8\u60C5\u5831:
197
+ ${context}` : ""}`
198
+ }
199
+ ]
200
+ }
201
+ ]
202
+ });
203
+ return response.choices[0]?.message?.content ?? "";
204
+ } catch {
205
+ throw new Error("\u753B\u50CF\u306E\u8AAC\u660E\u751F\u6210\u306B\u5931\u6557\u3057\u307E\u3057\u305F");
206
+ }
207
+ }
208
+ };
209
+
210
+ // src/utils/azure-image.ts
211
+ import { AzureOpenAI as AzureOpenAI2 } from "openai";
212
+ var AzureImageClient = class {
213
+ client;
214
+ config;
215
+ constructor(config) {
216
+ this.config = config;
217
+ this.client = new AzureOpenAI2({
218
+ endpoint: config.endpoint,
219
+ apiKey: config.apiKey,
220
+ apiVersion: config.imageApiVersion,
221
+ deployment: config.imageDeploymentName
222
+ });
223
+ }
224
+ /**
225
+ * Generates an image from a text prompt using the Azure OpenAI SDK.
226
+ * Returns raw image bytes as a Uint8Array.
227
+ */
228
+ async generateImage(prompt, options) {
229
+ const { size = "1024x1024", quality = "high" } = options;
230
+ try {
231
+ const response = await this.client.images.generate({
232
+ model: this.config.imageDeploymentName,
233
+ prompt,
234
+ n: 1,
235
+ size,
236
+ quality,
237
+ output_format: "png"
238
+ });
239
+ if (!response.data || response.data.length === 0 || !response.data[0].b64_json) {
240
+ throw new Error("\u753B\u50CF\u30C7\u30FC\u30BF\u304C\u898B\u3064\u304B\u308A\u307E\u305B\u3093");
241
+ }
242
+ const b64 = response.data[0].b64_json;
243
+ const binary = atob(b64);
244
+ const bytes = new Uint8Array(binary.length);
245
+ for (let i = 0; i < binary.length; i++) {
246
+ bytes[i] = binary.charCodeAt(i);
247
+ }
248
+ return bytes;
249
+ } catch (error) {
250
+ if (error instanceof Error && error.message === "\u753B\u50CF\u30C7\u30FC\u30BF\u304C\u898B\u3064\u304B\u308A\u307E\u305B\u3093") throw error;
251
+ throw new Error(
252
+ `\u753B\u50CF\u751F\u6210\u306B\u5931\u6557\u3057\u307E\u3057\u305F: ${error instanceof Error ? error.message : String(error)}`
253
+ );
254
+ }
255
+ }
256
+ /**
257
+ * Edits an existing image using the Azure OpenAI REST API.
258
+ * Uses fetch + FormData because the SDK's image editing support is unreliable.
259
+ * Returns raw image bytes as a Uint8Array.
260
+ */
261
+ async editImage(imageBuffer, prompt, options = {}) {
262
+ const { size = "1024x1024" } = options;
263
+ const url = `${this.config.endpoint}/openai/deployments/${this.config.imageDeploymentName}/images/edits?api-version=${this.config.imageApiVersion}`;
264
+ const blob = new Blob([imageBuffer], { type: "image/png" });
265
+ const formData = new FormData();
266
+ formData.append("image", blob, "image.png");
267
+ formData.append("prompt", prompt);
268
+ formData.append("size", size);
269
+ try {
270
+ const response = await fetch(url, {
271
+ method: "POST",
272
+ headers: { "api-key": this.config.apiKey },
273
+ body: formData
274
+ });
275
+ if (!response.ok) {
276
+ const errorText = await response.text();
277
+ throw new Error(`Azure API error (${response.status}): ${errorText}`);
278
+ }
279
+ const json = await response.json();
280
+ if (!json.data || json.data.length === 0 || !json.data[0].b64_json) {
281
+ throw new Error("\u753B\u50CF\u30C7\u30FC\u30BF\u304C\u898B\u3064\u304B\u308A\u307E\u305B\u3093");
282
+ }
283
+ const b64 = json.data[0].b64_json;
284
+ const binary = atob(b64);
285
+ const bytes = new Uint8Array(binary.length);
286
+ for (let i = 0; i < binary.length; i++) {
287
+ bytes[i] = binary.charCodeAt(i);
288
+ }
289
+ return bytes;
290
+ } catch (error) {
291
+ if (error instanceof Error && error.message === "\u753B\u50CF\u30C7\u30FC\u30BF\u304C\u898B\u3064\u304B\u308A\u307E\u305B\u3093") throw error;
292
+ throw new Error(
293
+ `\u753B\u50CF\u7DE8\u96C6\u306B\u5931\u6557\u3057\u307E\u3057\u305F: ${error instanceof Error ? error.message : String(error)}`
294
+ );
295
+ }
296
+ }
297
+ };
298
+
299
+ // src/lang.ts
300
+ var LANGUAGE_DESCRIPTIONS = {
301
+ ja: "\u65E5\u672C\u8A9E",
302
+ en: "\u82F1\u8A9E",
303
+ zh: "\u4E2D\u56FD\u8A9E",
304
+ ko: "\u97D3\u56FD\u8A9E",
305
+ es: "\u30B9\u30DA\u30A4\u30F3\u8A9E",
306
+ fr: "\u30D5\u30E9\u30F3\u30B9\u8A9E",
307
+ de: "\u30C9\u30A4\u30C4\u8A9E",
308
+ it: "\u30A4\u30BF\u30EA\u30A2\u8A9E",
309
+ ru: "\u30ED\u30B7\u30A2\u8A9E",
310
+ vi: "\u30D9\u30C8\u30CA\u30E0\u8A9E"
311
+ };
312
+
313
+ // src/utils/image.ts
314
+ import * as fs3 from "fs/promises";
315
+ async function readImageFile(filePath) {
316
+ try {
317
+ const buffer = await fs3.readFile(filePath);
318
+ return {
319
+ data: buffer.toString("base64"),
320
+ mimeType: getMimeType(filePath)
321
+ };
322
+ } catch (error) {
323
+ if (error instanceof Error && error.message.startsWith("\u30B5\u30DD\u30FC\u30C8\u3055\u308C\u3066\u3044\u306A\u3044")) {
324
+ throw error;
325
+ }
326
+ throw new Error(
327
+ `\u753B\u50CF\u30D5\u30A1\u30A4\u30EB\u306E\u8AAD\u307F\u8FBC\u307F\u306B\u5931\u6557\u3057\u307E\u3057\u305F: ${error instanceof Error ? error.message : String(error)}`
328
+ );
329
+ }
330
+ }
331
+ function getMimeType(filePath) {
332
+ const ext = filePath.toLowerCase().split(".").pop();
333
+ const map = {
334
+ jpg: "image/jpeg",
335
+ jpeg: "image/jpeg",
336
+ png: "image/png",
337
+ gif: "image/gif",
338
+ webp: "image/webp"
339
+ };
340
+ if (ext && map[ext]) return map[ext];
341
+ throw new Error(`\u30B5\u30DD\u30FC\u30C8\u3055\u308C\u3066\u3044\u306A\u3044\u30D5\u30A1\u30A4\u30EB\u5F62\u5F0F\u3067\u3059: .${ext}`);
342
+ }
343
+
344
+ // src/utils/preset.ts
345
+ var BUILTIN_PRESETS = {
346
+ "builtin:square": { size: "1024x1024", quality: "high" },
347
+ "builtin:landscape": { size: "1536x1024", quality: "high" },
348
+ "builtin:portrait": { size: "1024x1536", quality: "high" },
349
+ "builtin:draft": { size: "1024x1024", quality: "low" },
350
+ "builtin:photo": { size: "1536x1024", quality: "high" }
351
+ };
352
+ function getPreset(name) {
353
+ if (name.startsWith("builtin:") && BUILTIN_PRESETS[name]) {
354
+ return BUILTIN_PRESETS[name];
355
+ }
356
+ return BUILTIN_PRESETS[name] || null;
357
+ }
358
+
359
+ export {
360
+ getAzureConfig,
361
+ saveFileWithUniqueNameIfExists,
362
+ loadContextFile,
363
+ fileExists,
364
+ AzureChatClient,
365
+ AzureImageClient,
366
+ LANGUAGE_DESCRIPTIONS,
367
+ readImageFile,
368
+ getMimeType,
369
+ BUILTIN_PRESETS,
370
+ getPreset
371
+ };