@tombcato/ai-selector-core 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +354 -13
- package/dist/index.js +784 -19
- package/dist/index.js.map +1 -1
- package/dist/index.umd.js +2 -0
- package/dist/index.umd.js.map +1 -0
- package/package.json +6 -4
- package/src/styles.css +2 -0
- package/dist/api.d.ts +0 -14
- package/dist/api.d.ts.map +0 -1
- package/dist/api.js +0 -139
- package/dist/api.js.map +0 -1
- package/dist/config.d.ts +0 -18
- package/dist/config.d.ts.map +0 -1
- package/dist/config.js +0 -85
- package/dist/config.js.map +0 -1
- package/dist/i18n.d.ts +0 -53
- package/dist/i18n.d.ts.map +0 -1
- package/dist/i18n.js +0 -51
- package/dist/i18n.js.map +0 -1
- package/dist/icons.d.ts +0 -10
- package/dist/icons.d.ts.map +0 -1
- package/dist/icons.js +0 -22
- package/dist/icons.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/models.d.ts +0 -11
- package/dist/models.d.ts.map +0 -1
- package/dist/models.js +0 -199
- package/dist/models.js.map +0 -1
- package/dist/providers.d.ts +0 -42
- package/dist/providers.d.ts.map +0 -1
- package/dist/providers.js +0 -229
- package/dist/providers.js.map +0 -1
- package/dist/storage.d.ts +0 -31
- package/dist/storage.d.ts.map +0 -1
- package/dist/storage.js +0 -65
- package/dist/storage.js.map +0 -1
- package/dist/strategies.d.ts +0 -54
- package/dist/strategies.d.ts.map +0 -1
- package/dist/strategies.js +0 -184
- package/dist/strategies.js.map +0 -1
- package/dist/types.d.ts +0 -122
- package/dist/types.d.ts.map +0 -1
- package/dist/types.js +0 -6
- package/dist/types.js.map +0 -1
- package/dist/utils.d.ts +0 -2
- package/dist/utils.d.ts.map +0 -1
- package/dist/utils.js +0 -4
- package/dist/utils.js.map +0 -1
package/dist/index.js
CHANGED
|
@@ -1,19 +1,784 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
1
|
+
const K = {
|
|
2
|
+
zh: {
|
|
3
|
+
save: "保存配置",
|
|
4
|
+
saved: "保存成功",
|
|
5
|
+
providerLabel: "Provider",
|
|
6
|
+
modelLabel: "Model",
|
|
7
|
+
selectProvider: "选择 Provider...",
|
|
8
|
+
customBaseUrl: "自定义 Base URL",
|
|
9
|
+
apiKeyLabel: "API Key",
|
|
10
|
+
apiKeyPlaceholder: "输入 API Key...",
|
|
11
|
+
testConnection: "测试模型连接",
|
|
12
|
+
testing: "测试中...",
|
|
13
|
+
testSuccess: "连接成功",
|
|
14
|
+
testFailed: "测试连通性失败",
|
|
15
|
+
selectModel: "选择 Model...",
|
|
16
|
+
searchModel: "搜索或自定义模型...",
|
|
17
|
+
useCustom: "使用自定义",
|
|
18
|
+
noModels: "暂无模型数据",
|
|
19
|
+
apiKeyTip: "输入 API Key 后可获取完整模型列表",
|
|
20
|
+
fetchModelsFailed: "拉取模型列表失败,已使用离线模型列表",
|
|
21
|
+
refreshingModels: "列表刷新中...",
|
|
22
|
+
modelListUpdated: "模型列表已刷新",
|
|
23
|
+
preview: "配置预览",
|
|
24
|
+
unselected: "(未选择)"
|
|
25
|
+
},
|
|
26
|
+
en: {
|
|
27
|
+
save: "Save Config",
|
|
28
|
+
saved: "Saved",
|
|
29
|
+
providerLabel: "Provider",
|
|
30
|
+
modelLabel: "Model",
|
|
31
|
+
selectProvider: "Select Provider...",
|
|
32
|
+
customBaseUrl: "Custom Base URL",
|
|
33
|
+
apiKeyLabel: "API Key",
|
|
34
|
+
apiKeyPlaceholder: "Enter API Key...",
|
|
35
|
+
testConnection: "Test Model Connection",
|
|
36
|
+
testing: "Testing...",
|
|
37
|
+
testSuccess: "Connection Successful",
|
|
38
|
+
testFailed: "Connection Failed",
|
|
39
|
+
selectModel: "Select Model...",
|
|
40
|
+
searchModel: "Search or custom models...",
|
|
41
|
+
useCustom: "Use custom",
|
|
42
|
+
noModels: "No models found",
|
|
43
|
+
apiKeyTip: "Enter API Key to fetch full model list",
|
|
44
|
+
fetchModelsFailed: "Failed to fetch model list, using offline models",
|
|
45
|
+
refreshingModels: "Refreshing models...",
|
|
46
|
+
modelListUpdated: "Model list updated",
|
|
47
|
+
preview: "Config Preview",
|
|
48
|
+
unselected: "(Unselected)"
|
|
49
|
+
}
|
|
50
|
+
}, m = "https://registry.npmmirror.com/@lobehub/icons-static-svg/1.77.0/files/icons", n = {
|
|
51
|
+
OPENAI: "openai",
|
|
52
|
+
ANTHROPIC: "anthropic",
|
|
53
|
+
GEMINI: "gemini",
|
|
54
|
+
DEEPSEEK: "deepseek",
|
|
55
|
+
OPENROUTER: "openrouter",
|
|
56
|
+
GROQ: "groq",
|
|
57
|
+
MISTRAL: "mistral",
|
|
58
|
+
MOONSHOT: "moonshot",
|
|
59
|
+
QWEN: "qwen",
|
|
60
|
+
ZHIPU: "zhipu",
|
|
61
|
+
SILICONFLOW: "siliconflow",
|
|
62
|
+
XAI: "xai",
|
|
63
|
+
TOGETHER: "together",
|
|
64
|
+
FIREWORKS: "fireworks",
|
|
65
|
+
DEEPINFRA: "deepinfra",
|
|
66
|
+
PERPLEXITY: "perplexity",
|
|
67
|
+
COHERE: "cohere",
|
|
68
|
+
OLLAMA: "ollama",
|
|
69
|
+
DOUBAO: "doubao",
|
|
70
|
+
MINIMAX: "minimax"
|
|
71
|
+
}, w = {
|
|
72
|
+
[n.OPENAI]: {
|
|
73
|
+
id: n.OPENAI,
|
|
74
|
+
name: "OpenAI",
|
|
75
|
+
baseUrl: "https://api.openai.com/v1",
|
|
76
|
+
needsApiKey: !0,
|
|
77
|
+
apiFormat: "openai",
|
|
78
|
+
supportsModelsApi: !0,
|
|
79
|
+
icon: `${m}/openai.svg`
|
|
80
|
+
},
|
|
81
|
+
[n.ANTHROPIC]: {
|
|
82
|
+
id: n.ANTHROPIC,
|
|
83
|
+
name: "Anthropic (Claude)",
|
|
84
|
+
baseUrl: "https://api.anthropic.com/v1",
|
|
85
|
+
needsApiKey: !0,
|
|
86
|
+
apiFormat: "anthropic",
|
|
87
|
+
supportsModelsApi: !1,
|
|
88
|
+
icon: `${m}/anthropic.svg`
|
|
89
|
+
},
|
|
90
|
+
[n.GEMINI]: {
|
|
91
|
+
id: n.GEMINI,
|
|
92
|
+
name: "Google Gemini",
|
|
93
|
+
baseUrl: "https://generativelanguage.googleapis.com/v1beta",
|
|
94
|
+
needsApiKey: !0,
|
|
95
|
+
apiFormat: "gemini",
|
|
96
|
+
supportsModelsApi: !0,
|
|
97
|
+
icon: `${m}/gemini.svg`
|
|
98
|
+
},
|
|
99
|
+
[n.OPENROUTER]: {
|
|
100
|
+
id: n.OPENROUTER,
|
|
101
|
+
name: "OpenRouter",
|
|
102
|
+
baseUrl: "https://openrouter.ai/api/v1",
|
|
103
|
+
needsApiKey: !0,
|
|
104
|
+
apiFormat: "openai",
|
|
105
|
+
supportsModelsApi: !0,
|
|
106
|
+
icon: `${m}/openrouter.svg`
|
|
107
|
+
},
|
|
108
|
+
[n.DEEPSEEK]: {
|
|
109
|
+
id: n.DEEPSEEK,
|
|
110
|
+
name: "DeepSeek",
|
|
111
|
+
baseUrl: "https://api.deepseek.com",
|
|
112
|
+
needsApiKey: !0,
|
|
113
|
+
apiFormat: "openai",
|
|
114
|
+
supportsModelsApi: !0,
|
|
115
|
+
icon: `${m}/deepseek.svg`
|
|
116
|
+
},
|
|
117
|
+
[n.MOONSHOT]: {
|
|
118
|
+
id: n.MOONSHOT,
|
|
119
|
+
name: "Moonshot (Kimi)",
|
|
120
|
+
baseUrl: "https://api.moonshot.cn/v1",
|
|
121
|
+
needsApiKey: !0,
|
|
122
|
+
apiFormat: "openai",
|
|
123
|
+
supportsModelsApi: !0,
|
|
124
|
+
icon: `${m}/moonshot.svg`
|
|
125
|
+
},
|
|
126
|
+
[n.QWEN]: {
|
|
127
|
+
id: n.QWEN,
|
|
128
|
+
name: "通义千问 (Qwen)",
|
|
129
|
+
baseUrl: "https://dashscope.aliyuncs.com/compatible-mode/v1",
|
|
130
|
+
needsApiKey: !0,
|
|
131
|
+
apiFormat: "openai",
|
|
132
|
+
supportsModelsApi: !0,
|
|
133
|
+
icon: `${m}/qwen.svg`
|
|
134
|
+
},
|
|
135
|
+
[n.ZHIPU]: {
|
|
136
|
+
id: n.ZHIPU,
|
|
137
|
+
name: "智谱 AI (GLM)",
|
|
138
|
+
baseUrl: "https://open.bigmodel.cn/api/paas/v4",
|
|
139
|
+
needsApiKey: !0,
|
|
140
|
+
apiFormat: "openai",
|
|
141
|
+
supportsModelsApi: !0,
|
|
142
|
+
icon: `${m}/zhipu.svg`
|
|
143
|
+
},
|
|
144
|
+
[n.SILICONFLOW]: {
|
|
145
|
+
id: n.SILICONFLOW,
|
|
146
|
+
name: "硅基流动 (siliconflow)",
|
|
147
|
+
baseUrl: "https://api.siliconflow.cn/v1",
|
|
148
|
+
needsApiKey: !0,
|
|
149
|
+
apiFormat: "openai",
|
|
150
|
+
supportsModelsApi: !0,
|
|
151
|
+
icon: `${m}/siliconcloud.svg`
|
|
152
|
+
},
|
|
153
|
+
[n.DOUBAO]: {
|
|
154
|
+
id: n.DOUBAO,
|
|
155
|
+
name: "火山方舟 (Doubao)",
|
|
156
|
+
baseUrl: "https://ark.cn-beijing.volces.com/api/v3",
|
|
157
|
+
needsApiKey: !0,
|
|
158
|
+
apiFormat: "openai",
|
|
159
|
+
supportsModelsApi: !0,
|
|
160
|
+
icon: `${m}/doubao.svg`
|
|
161
|
+
},
|
|
162
|
+
[n.MINIMAX]: {
|
|
163
|
+
id: n.MINIMAX,
|
|
164
|
+
name: "MiniMax",
|
|
165
|
+
baseUrl: "https://api.minimax.io/v1",
|
|
166
|
+
needsApiKey: !0,
|
|
167
|
+
apiFormat: "openai",
|
|
168
|
+
supportsModelsApi: !1,
|
|
169
|
+
icon: `${m}/minimax.svg`
|
|
170
|
+
},
|
|
171
|
+
[n.XAI]: {
|
|
172
|
+
id: n.XAI,
|
|
173
|
+
name: "xAI (Grok)",
|
|
174
|
+
baseUrl: "https://api.x.ai/v1",
|
|
175
|
+
needsApiKey: !0,
|
|
176
|
+
apiFormat: "openai",
|
|
177
|
+
supportsModelsApi: !0,
|
|
178
|
+
icon: `${m}/grok.svg`
|
|
179
|
+
},
|
|
180
|
+
[n.GROQ]: {
|
|
181
|
+
id: n.GROQ,
|
|
182
|
+
name: "Groq",
|
|
183
|
+
baseUrl: "https://api.groq.com/openai/v1",
|
|
184
|
+
needsApiKey: !0,
|
|
185
|
+
apiFormat: "openai",
|
|
186
|
+
supportsModelsApi: !0,
|
|
187
|
+
icon: `${m}/groq.svg`
|
|
188
|
+
},
|
|
189
|
+
[n.MISTRAL]: {
|
|
190
|
+
id: n.MISTRAL,
|
|
191
|
+
name: "Mistral AI",
|
|
192
|
+
baseUrl: "https://api.mistral.ai/v1",
|
|
193
|
+
needsApiKey: !0,
|
|
194
|
+
apiFormat: "openai",
|
|
195
|
+
supportsModelsApi: !0,
|
|
196
|
+
icon: `${m}/mistral.svg`
|
|
197
|
+
},
|
|
198
|
+
[n.TOGETHER]: {
|
|
199
|
+
id: n.TOGETHER,
|
|
200
|
+
name: "Together AI",
|
|
201
|
+
baseUrl: "https://api.together.xyz/v1",
|
|
202
|
+
needsApiKey: !0,
|
|
203
|
+
apiFormat: "openai",
|
|
204
|
+
supportsModelsApi: !0,
|
|
205
|
+
icon: `${m}/together.svg`
|
|
206
|
+
},
|
|
207
|
+
[n.FIREWORKS]: {
|
|
208
|
+
id: n.FIREWORKS,
|
|
209
|
+
name: "Fireworks AI",
|
|
210
|
+
baseUrl: "https://api.fireworks.ai/inference/v1",
|
|
211
|
+
needsApiKey: !0,
|
|
212
|
+
apiFormat: "openai",
|
|
213
|
+
supportsModelsApi: !0,
|
|
214
|
+
icon: `${m}/fireworks.svg`
|
|
215
|
+
},
|
|
216
|
+
[n.DEEPINFRA]: {
|
|
217
|
+
id: n.DEEPINFRA,
|
|
218
|
+
name: "DeepInfra",
|
|
219
|
+
baseUrl: "https://api.deepinfra.com/v1/openai",
|
|
220
|
+
needsApiKey: !0,
|
|
221
|
+
apiFormat: "openai",
|
|
222
|
+
supportsModelsApi: !0,
|
|
223
|
+
icon: `${m}/deepinfra.svg`
|
|
224
|
+
},
|
|
225
|
+
[n.PERPLEXITY]: {
|
|
226
|
+
id: n.PERPLEXITY,
|
|
227
|
+
name: "Perplexity",
|
|
228
|
+
baseUrl: "https://api.perplexity.ai",
|
|
229
|
+
needsApiKey: !0,
|
|
230
|
+
apiFormat: "openai",
|
|
231
|
+
supportsModelsApi: !1,
|
|
232
|
+
icon: `${m}/perplexity.svg`
|
|
233
|
+
},
|
|
234
|
+
[n.COHERE]: {
|
|
235
|
+
id: n.COHERE,
|
|
236
|
+
name: "Cohere",
|
|
237
|
+
baseUrl: "https://api.cohere.com/v2",
|
|
238
|
+
needsApiKey: !0,
|
|
239
|
+
apiFormat: "cohere",
|
|
240
|
+
supportsModelsApi: !0,
|
|
241
|
+
icon: `${m}/cohere.svg`
|
|
242
|
+
},
|
|
243
|
+
[n.OLLAMA]: {
|
|
244
|
+
id: n.OLLAMA,
|
|
245
|
+
name: "Ollama (Local)",
|
|
246
|
+
baseUrl: "http://localhost:11434/v1",
|
|
247
|
+
needsApiKey: !1,
|
|
248
|
+
apiFormat: "openai",
|
|
249
|
+
supportsModelsApi: !0,
|
|
250
|
+
icon: `${m}/ollama.svg`
|
|
251
|
+
}
|
|
252
|
+
};
|
|
253
|
+
function U(e) {
|
|
254
|
+
return w[e];
|
|
255
|
+
}
|
|
256
|
+
function I() {
|
|
257
|
+
return Object.values(w);
|
|
258
|
+
}
|
|
259
|
+
function D(e) {
|
|
260
|
+
return I().filter((a) => a.apiFormat === e);
|
|
261
|
+
}
|
|
262
|
+
const E = {
|
|
263
|
+
[n.OPENAI]: [
|
|
264
|
+
{ id: "gpt-5.2-pro", name: "GPT-5.2 Pro" },
|
|
265
|
+
{ id: "gpt-5.2", name: "GPT-5.2" },
|
|
266
|
+
{ id: "gpt-5", name: "GPT-5" },
|
|
267
|
+
{ id: "gpt-5-mini", name: "GPT-5 Mini" },
|
|
268
|
+
{ id: "gpt-5-nano", name: "GPT-5 Nano" },
|
|
269
|
+
{ id: "gpt-4.1", name: "GPT-4.1" }
|
|
270
|
+
],
|
|
271
|
+
[n.ANTHROPIC]: [
|
|
272
|
+
{ id: "claude-opus-4.5-20251101", name: "Claude Opus 4.5" },
|
|
273
|
+
{ id: "claude-opus-4.5-20251101-thinking", name: "Claude Opus 4.5 Thinking" },
|
|
274
|
+
{ id: "claude-sonnet-4.5-20250929", name: "Claude Sonnet 4.5" },
|
|
275
|
+
{ id: "claude-sonnet-4.5-20250929-thinking", name: "Claude Sonnet 4.5 Thinking" },
|
|
276
|
+
{ id: "claude-haiku-4.5-20251001", name: "Claude Haiku 4.5" },
|
|
277
|
+
{ id: "claude-haiku-4.5-20251001-thinking", name: "Claude Haiku 4.5 Thinking" },
|
|
278
|
+
{ id: "claude-opus-4.1-20250805", name: "Claude Opus 4.1" },
|
|
279
|
+
{ id: "claude-opus-4.1-20250805-thinking", name: "Claude Opus 4.1 Thinking" }
|
|
280
|
+
],
|
|
281
|
+
[n.GEMINI]: [
|
|
282
|
+
{ id: "gemini-3-flash-preview", name: "Gemini 3 Flash Preview" },
|
|
283
|
+
{ id: "gemini-3-pro-preview", name: "Gemini 3 Pro Preview" },
|
|
284
|
+
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro" },
|
|
285
|
+
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash" },
|
|
286
|
+
{ id: "gemini-2.5-flash-lite", name: "Gemini 2.5 Flash-Lite" }
|
|
287
|
+
],
|
|
288
|
+
[n.DEEPSEEK]: [
|
|
289
|
+
{ id: "deepseek-chat", name: "Deepseek Chat" },
|
|
290
|
+
{ id: "deepseek-reasoner", name: "Deepseek Reasoner" }
|
|
291
|
+
],
|
|
292
|
+
[n.MISTRAL]: [
|
|
293
|
+
{ id: "mistral-large-latest", name: "Mistral Large" },
|
|
294
|
+
{ id: "mistral-medium-latest", name: "Mistral Medium" },
|
|
295
|
+
{ id: "mistral-small-latest", name: "Mistral Small" },
|
|
296
|
+
{ id: "codestral-latest", name: "Codestral" },
|
|
297
|
+
{ id: "devstral-latest", name: "Devstral" },
|
|
298
|
+
{ id: "magistral-medium-latest", name: "Magistral Medium" },
|
|
299
|
+
{ id: "pixtral-large-latest", name: "Pixtral Large" },
|
|
300
|
+
{ id: "pixtral-12b-latest", name: "Pixtral 12B" },
|
|
301
|
+
{ id: "ministral-8b-latest", name: "Ministral 8B" },
|
|
302
|
+
{ id: "ministral-3b-latest", name: "Ministral 3B" }
|
|
303
|
+
],
|
|
304
|
+
[n.GROQ]: [
|
|
305
|
+
{ id: "canopylabs/orpheus-v1-english", name: "Orpheus V1 English" },
|
|
306
|
+
{ id: "canopylabs/orpheus-arabic-saudi", name: "Orpheus Arabic Saudi" },
|
|
307
|
+
{ id: "moonshotai/kimi-k2-instruct", name: "Kimi K2 Instruct" },
|
|
308
|
+
{ id: "groq/compound-mini", name: "Compound Mini" },
|
|
309
|
+
{ id: "llama-3.3-70b-versatile", name: "Llama 3.3 70B Versatile" },
|
|
310
|
+
{ id: "openai/gpt-oss-120b", name: "Gpt Oss 120B" },
|
|
311
|
+
{ id: "qwen/qwen3-32b", name: "Qwen3 32B" },
|
|
312
|
+
{ id: "groq/compound", name: "Compound" },
|
|
313
|
+
{ id: "meta-llama/llama-4-maverick-17b-128e-instruct", name: "Llama 4 Maverick 17B 128E Instruct" }
|
|
314
|
+
],
|
|
315
|
+
[n.XAI]: [
|
|
316
|
+
{ id: "grok-4", name: "Grok 4" },
|
|
317
|
+
{ id: "grok-4-fast", name: "Grok 4 Fast" },
|
|
318
|
+
{ id: "grok-3-latest", name: "Grok 3" },
|
|
319
|
+
{ id: "grok-3-fast", name: "Grok 3 Fast" },
|
|
320
|
+
{ id: "grok-3-mini-latest", name: "Grok 3 Mini" },
|
|
321
|
+
{ id: "grok-3-mini-fast", name: "Grok 3 Mini Fast" },
|
|
322
|
+
{ id: "grok-vision-beta", name: "Grok Vision (Beta)" }
|
|
323
|
+
],
|
|
324
|
+
[n.TOGETHER]: [
|
|
325
|
+
{ id: "Qwen/Qwen2.5-72B-Instruct-Turbo", name: "Qwen2.5 72B Instruct Turbo" },
|
|
326
|
+
{ id: "Qwen/Qwen3-235B-A22B-Thinking-2507", name: "Qwen3 235B A22B Thinking 2507" },
|
|
327
|
+
{ id: "deepseek-ai/DeepSeek-R1", name: "Deepseek R1" },
|
|
328
|
+
{ id: "deepseek-ai/DeepSeek-V3.1", name: "Deepseek V3.1" },
|
|
329
|
+
{ id: "meta-llama/Llama-3.2-3B-Instruct-Turbo", name: "Llama 3.2 3B Instruct Turbo" },
|
|
330
|
+
{ id: "meta-llama/Llama-3.3-70B-Instruct-Turbo", name: "Llama 3.3 70B Instruct Turbo" },
|
|
331
|
+
{ id: "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free", name: "Llama 3.3 70B Instruct Turbo Free" }
|
|
332
|
+
],
|
|
333
|
+
[n.FIREWORKS]: [
|
|
334
|
+
{ id: "accounts/fireworks/models/llama4-scout-instruct-basic", name: "Llama4 Scout Instruct Basic" },
|
|
335
|
+
{ id: "accounts/fireworks/models/qwen3-vl-235b-a22b-thinking", name: "Qwen3 Vl 235B A22B Thinking" },
|
|
336
|
+
{ id: "accounts/fireworks/models/deepseek-v3p2", name: "Deepseek V3P2" },
|
|
337
|
+
{ id: "accounts/fireworks/models/qwen3-vl-30b-a3b-thinking", name: "Qwen3 Vl 30B A3B Thinking" },
|
|
338
|
+
{ id: "accounts/fireworks/models/qwen3-8b", name: "Qwen3 8B" },
|
|
339
|
+
{ id: "accounts/fireworks/models/qwen3-vl-30b-a3b-instruct", name: "Qwen3 Vl 30B A3B Instruct" },
|
|
340
|
+
{ id: "accounts/fireworks/models/qwen2p5-vl-32b-instruct", name: "Qwen2P5 Vl 32B Instruct" },
|
|
341
|
+
{ id: "accounts/fireworks/models/llama4-maverick-instruct-basic", name: "Llama4 Maverick Instruct Basic" },
|
|
342
|
+
{ id: "accounts/fireworks/models/qwen3-235b-a22b-thinking-2507", name: "Qwen3 235B A22B Thinking 2507" },
|
|
343
|
+
{ id: "accounts/fireworks/models/qwen3-coder-480b-a35b-instruct", name: "Qwen3 Coder 480B A35B Instruct" }
|
|
344
|
+
],
|
|
345
|
+
[n.DEEPINFRA]: [
|
|
346
|
+
{ id: "meta-llama/Llama-3.2-11B-Vision-Instruct", name: "Llama 3.2 11B Vision Instruct" },
|
|
347
|
+
{ id: "Qwen/Qwen3-32B", name: "Qwen3 32B" },
|
|
348
|
+
{ id: "NousResearch/Hermes-3-Llama-3.1-70B", name: "Hermes 3 Llama 3.1 70B" },
|
|
349
|
+
{ id: "Qwen/Qwen2.5-72B-Instruct", name: "Qwen2.5 72B Instruct" },
|
|
350
|
+
{ id: "deepseek-ai/DeepSeek-V3-0324", name: "Deepseek V3 0324" },
|
|
351
|
+
{ id: "Qwen/Qwen3-VL-235B-A22B-Instruct", name: "Qwen3 Vl 235B A22B Instruct" },
|
|
352
|
+
{ id: "meta-llama/Llama-3.2-3B-Instruct", name: "Llama 3.2 3B Instruct" },
|
|
353
|
+
{ id: "Qwen/Qwen2.5-VL-32B-Instruct", name: "Qwen2.5 Vl 32B Instruct" },
|
|
354
|
+
{ id: "meta-llama/Meta-Llama-3.1-8B-Instruct", name: "Meta Llama 3.1 8B Instruct" },
|
|
355
|
+
{ id: "Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo", name: "Qwen3 Coder 480B A35B Instruct Turbo" }
|
|
356
|
+
],
|
|
357
|
+
[n.OPENROUTER]: [
|
|
358
|
+
{ id: "google/gemini-3-flash-preview", name: "Gemini 3 Flash Preview" },
|
|
359
|
+
{ id: "mistralai/mistral-small-creative", name: "Mistral Small Creative" },
|
|
360
|
+
{ id: "openai/gpt-5.2-chat", name: "Gpt 5.2 Chat" },
|
|
361
|
+
{ id: "openai/gpt-5.2-pro", name: "Gpt 5.2 Pro" },
|
|
362
|
+
{ id: "openai/gpt-5.2", name: "Gpt 5.2" },
|
|
363
|
+
{ id: "mistralai/devstral-2512", name: "Devstral 2512" },
|
|
364
|
+
{ id: "openai/gpt-5.1-codex-max", name: "Gpt 5.1 Codex Max" },
|
|
365
|
+
{ id: "mistralai/ministral-14b-2512", name: "Ministral 14B 2512" },
|
|
366
|
+
{ id: "mistralai/ministral-8b-2512", name: "Ministral 8B 2512" },
|
|
367
|
+
{ id: "mistralai/ministral-3b-2512", name: "Ministral 3B 2512" },
|
|
368
|
+
{ id: "mistralai/mistral-large-2512", name: "Mistral Large 2512" },
|
|
369
|
+
{ id: "deepseek/deepseek-v3.2-speciale", name: "Deepseek V3.2 Speciale" }
|
|
370
|
+
],
|
|
371
|
+
[n.PERPLEXITY]: [
|
|
372
|
+
{ id: "codellama-34b-instruct", name: "Codellama 34B Instruct" },
|
|
373
|
+
{ id: "codellama-70b-instruct", name: "Codellama 70B Instruct" },
|
|
374
|
+
{ id: "llama-2-70b-chat", name: "Llama 2 70B Chat" },
|
|
375
|
+
{ id: "llama-3.1-70b-instruct", name: "Llama 3.1 70B Instruct" },
|
|
376
|
+
{ id: "llama-3.1-8b-instruct", name: "Llama 3.1 8B Instruct" },
|
|
377
|
+
{ id: "llama-3.1-sonar-huge-128k-online", name: "Llama 3.1 Sonar Huge 128K Online" },
|
|
378
|
+
{ id: "llama-3.1-sonar-large-128k-chat", name: "Llama 3.1 Sonar Large 128K Chat" },
|
|
379
|
+
{ id: "llama-3.1-sonar-large-128k-online", name: "Llama 3.1 Sonar Large 128K Online" },
|
|
380
|
+
{ id: "llama-3.1-sonar-small-128k-chat", name: "Llama 3.1 Sonar Small 128K Chat" }
|
|
381
|
+
],
|
|
382
|
+
[n.COHERE]: [
|
|
383
|
+
{ id: "command-a-vision-07-2025", name: "Command A Vision 07 2025" },
|
|
384
|
+
{ id: "command-a-reasoning-08-2025", name: "Command A Reasoning 08 2025" },
|
|
385
|
+
{ id: "command-r-08-2024", name: "Command R 08 2024" },
|
|
386
|
+
{ id: "command-r7b-arabic-02-2025", name: "Command R7B Arabic 02 2025" },
|
|
387
|
+
{ id: "command-r7b-12-2024", name: "Command R7B 12 2024" }
|
|
388
|
+
],
|
|
389
|
+
[n.MOONSHOT]: [
|
|
390
|
+
{ id: "moonshot-v1-128k", name: "Moonshot 128K" },
|
|
391
|
+
{ id: "moonshot-v1-32k", name: "Moonshot 32K" },
|
|
392
|
+
{ id: "moonshot-v1-8k", name: "Moonshot 8K" },
|
|
393
|
+
{ id: "kimi-k2-0711-chat", name: "Kimi K2 Chat" },
|
|
394
|
+
{ id: "moonshot-v1-auto", name: "Moonshot Auto" }
|
|
395
|
+
],
|
|
396
|
+
[n.QWEN]: [
|
|
397
|
+
{ id: "qwen-flash", name: "Qwen Flash" },
|
|
398
|
+
{ id: "qwen3-vl-plus-2025-12-19", name: "Qwen3 Vl Plus 2025 12 19" },
|
|
399
|
+
{ id: "qwen3-tts-vd-realtime-2025-12-16", name: "Qwen3 Tts Vd Realtime 2025 12 16" },
|
|
400
|
+
{ id: "qwen-image-edit-plus-2025-12-15", name: "Qwen Image Edit Plus 2025 12 15" },
|
|
401
|
+
{ id: "qwen3-omni-flash-2025-12-01", name: "Qwen3 Omni Flash 2025 12 01" },
|
|
402
|
+
{ id: "qwen3-omni-flash-realtime-2025-12-01", name: "Qwen3 Omni Flash Realtime 2025 12 01" },
|
|
403
|
+
{ id: "qwen3-livetranslate-flash-2025-12-01", name: "Qwen3 Livetranslate Flash 2025 12 01" },
|
|
404
|
+
{ id: "qwen3-livetranslate-flash", name: "Qwen3 Livetranslate Flash" },
|
|
405
|
+
{ id: "qwen-plus-2025-12-01", name: "Qwen Plus 2025 12 01" },
|
|
406
|
+
{ id: "qwen3-tts-vc-realtime-2025-11-27", name: "Qwen3 Tts Vc Realtime 2025 11 27" }
|
|
407
|
+
],
|
|
408
|
+
[n.ZHIPU]: [
|
|
409
|
+
{ id: "glm-4.5", name: "Glm 4.5" },
|
|
410
|
+
{ id: "glm-4.5-air", name: "Glm 4.5 Air" },
|
|
411
|
+
{ id: "glm-4.6", name: "Glm 4.6" },
|
|
412
|
+
{ id: "glm-4.7", name: "Glm 4.7" }
|
|
413
|
+
],
|
|
414
|
+
[n.SILICONFLOW]: [
|
|
415
|
+
{ id: "deepseek-ai/DeepSeek-V3.2", name: "Deepseek V3.2" },
|
|
416
|
+
{ id: "deepseek-ai/DeepSeek-V3.1-Terminus", name: "Deepseek V3.1 Terminus" },
|
|
417
|
+
{ id: "deepseek-ai/DeepSeek-R1", name: "Deepseek R1" },
|
|
418
|
+
{ id: "deepseek-ai/DeepSeek-V3", name: "Deepseek V3" },
|
|
419
|
+
{ id: "zai-org/GLM-4.6V", name: "Glm 4.6V" },
|
|
420
|
+
{ id: "zai-org/GLM-4.6", name: "Glm 4.6" },
|
|
421
|
+
{ id: "Pro/zai-org/GLM-4.7", name: "Glm 4.7" },
|
|
422
|
+
{ id: "Qwen/Qwen3-VL-32B-Instruct", name: "Qwen3 Vl 32B Instruct" }
|
|
423
|
+
],
|
|
424
|
+
[n.OLLAMA]: [
|
|
425
|
+
{ id: "llama3.3", name: "Llama 3.3" },
|
|
426
|
+
{ id: "llama3.2", name: "Llama 3.2" },
|
|
427
|
+
{ id: "qwq", name: "QwQ (推理)" },
|
|
428
|
+
{ id: "qwen3:32b", name: "Qwen3 32B" },
|
|
429
|
+
{ id: "deepseek-r1:32b", name: "DeepSeek R1 32B" },
|
|
430
|
+
{ id: "deepseek-coder-v2", name: "DeepSeek Coder V2" },
|
|
431
|
+
{ id: "gemma3:27b", name: "Gemma 3 27B" },
|
|
432
|
+
{ id: "mistral:7b", name: "Mistral 7B" },
|
|
433
|
+
{ id: "phi4", name: "Phi 4" },
|
|
434
|
+
{ id: "codellama", name: "Code Llama" }
|
|
435
|
+
],
|
|
436
|
+
[n.DOUBAO]: [
|
|
437
|
+
{ id: "doubao-seed-1-8-251215", name: "Doubao 1.8" },
|
|
438
|
+
{ id: "doubao-seed-1-6-251015", name: "Doubao 1.6" },
|
|
439
|
+
{ id: "doubao-seed-1-6-lite-251015", name: "Doubao 1.6 Lite" },
|
|
440
|
+
{ id: "doubao-seed-1-6-flash-250828", name: "Doubao 1.6 Flash" },
|
|
441
|
+
{ id: "doubao-seed-1-6-thinking-250615", name: "Doubao 1.6 Thinking" }
|
|
442
|
+
],
|
|
443
|
+
[n.MINIMAX]: [
|
|
444
|
+
{ id: "MiniMax-M2.1", name: "MiniMax M2.1" },
|
|
445
|
+
{ id: "MiniMax-M2.1-lightning", name: "MiniMax M2.1 Lightning" },
|
|
446
|
+
{ id: "MiniMax-M2", name: "MiniMax M2" }
|
|
447
|
+
]
|
|
448
|
+
};
|
|
449
|
+
function v(e) {
|
|
450
|
+
return E[e] || [];
|
|
451
|
+
}
|
|
452
|
+
const O = (e) => Array.isArray(e == null ? void 0 : e.data) ? e.data.filter((a) => a.id).map((a) => {
|
|
453
|
+
const t = a.name || (a.id.split("/").pop() ?? "").replace(/[-_]/g, " ").replace(/\b\w/g, (o) => o.toUpperCase());
|
|
454
|
+
return {
|
|
455
|
+
id: a.id,
|
|
456
|
+
name: t,
|
|
457
|
+
created: a.created || 0
|
|
458
|
+
};
|
|
459
|
+
}).sort((a, t) => (t.created || 0) - (a.created || 0)) : [], y = {
|
|
460
|
+
format: "openai",
|
|
461
|
+
getModelsEndpoint: (e) => `${e}/models`,
|
|
462
|
+
getChatEndpoint: (e) => `${e}/chat/completions`,
|
|
463
|
+
buildHeaders: (e) => ({
|
|
464
|
+
"Content-Type": "application/json",
|
|
465
|
+
Authorization: `Bearer ${e}`
|
|
466
|
+
}),
|
|
467
|
+
buildChatPayload: (e, a, t) => ({
|
|
468
|
+
model: e,
|
|
469
|
+
messages: a,
|
|
470
|
+
max_tokens: t
|
|
471
|
+
}),
|
|
472
|
+
parseChatResponse: (e) => {
|
|
473
|
+
var a, t, o;
|
|
474
|
+
return ((o = (t = (a = e.choices) == null ? void 0 : a[0]) == null ? void 0 : t.message) == null ? void 0 : o.content) || "";
|
|
475
|
+
}
|
|
476
|
+
}, P = {
|
|
477
|
+
format: "anthropic",
|
|
478
|
+
getChatEndpoint: (e) => `${e}/messages`,
|
|
479
|
+
buildHeaders: (e) => ({
|
|
480
|
+
"Content-Type": "application/json",
|
|
481
|
+
"x-api-key": e,
|
|
482
|
+
"anthropic-version": "2023-06-01"
|
|
483
|
+
}),
|
|
484
|
+
buildChatPayload: (e, a, t) => ({
|
|
485
|
+
model: e,
|
|
486
|
+
messages: a,
|
|
487
|
+
max_tokens: t
|
|
488
|
+
}),
|
|
489
|
+
parseChatResponse: (e) => {
|
|
490
|
+
var a, t;
|
|
491
|
+
return ((t = (a = e.content) == null ? void 0 : a[0]) == null ? void 0 : t.text) || "";
|
|
492
|
+
}
|
|
493
|
+
}, T = {
|
|
494
|
+
format: "gemini",
|
|
495
|
+
getModelsEndpoint: (e, a) => `${e}/models?key=${a}`,
|
|
496
|
+
getChatEndpoint: (e, a, t) => `${e}/models/${t}:generateContent?key=${a}`,
|
|
497
|
+
buildHeaders: () => ({
|
|
498
|
+
"Content-Type": "application/json"
|
|
499
|
+
}),
|
|
500
|
+
buildChatPayload: (e, a, t) => ({
|
|
501
|
+
contents: a.map((r) => ({
|
|
502
|
+
role: r.role === "assistant" ? "model" : "user",
|
|
503
|
+
parts: [{ text: r.content }]
|
|
504
|
+
})),
|
|
505
|
+
generationConfig: { maxOutputTokens: t }
|
|
506
|
+
}),
|
|
507
|
+
parseChatResponse: (e) => {
|
|
508
|
+
var a, t, o, r, s;
|
|
509
|
+
return ((s = (r = (o = (t = (a = e.candidates) == null ? void 0 : a[0]) == null ? void 0 : t.content) == null ? void 0 : o.parts) == null ? void 0 : r[0]) == null ? void 0 : s.text) || "";
|
|
510
|
+
},
|
|
511
|
+
// Gemini 返回格式: { models: [{ name: "models/gemini-pro", ... }] }
|
|
512
|
+
parseModelsResponse: (e) => Array.isArray(e.models) ? e.models.filter((a) => {
|
|
513
|
+
var t;
|
|
514
|
+
return (t = a.supportedGenerationMethods) == null ? void 0 : t.includes("generateContent");
|
|
515
|
+
}).map((a) => ({
|
|
516
|
+
id: a.name.replace("models/", ""),
|
|
517
|
+
// "models/gemini-pro" -> "gemini-pro"
|
|
518
|
+
name: a.displayName || a.name.replace("models/", ""),
|
|
519
|
+
created: a.created || 0
|
|
520
|
+
})).sort((a, t) => (t.created || 0) - (a.created || 0)) : []
|
|
521
|
+
}, C = {
|
|
522
|
+
format: "cohere",
|
|
523
|
+
getModelsEndpoint: (e) => `${e}/models`,
|
|
524
|
+
getChatEndpoint: (e) => `${e}/chat`,
|
|
525
|
+
buildHeaders: (e) => ({
|
|
526
|
+
"Content-Type": "application/json",
|
|
527
|
+
Authorization: `Bearer ${e}`
|
|
528
|
+
}),
|
|
529
|
+
buildChatPayload: (e, a, t) => {
|
|
530
|
+
const o = a[a.length - 1], r = a.slice(0, -1).map((s) => ({
|
|
531
|
+
role: s.role === "assistant" ? "CHATBOT" : "USER",
|
|
532
|
+
message: s.content
|
|
533
|
+
}));
|
|
534
|
+
return {
|
|
535
|
+
model: e,
|
|
536
|
+
message: o.content,
|
|
537
|
+
chat_history: r,
|
|
538
|
+
max_tokens: t
|
|
539
|
+
};
|
|
540
|
+
},
|
|
541
|
+
parseChatResponse: (e) => e.text || ""
|
|
542
|
+
}, B = {
|
|
543
|
+
openai: y,
|
|
544
|
+
anthropic: P,
|
|
545
|
+
gemini: T,
|
|
546
|
+
cohere: C
|
|
547
|
+
};
|
|
548
|
+
function k(e) {
|
|
549
|
+
return B[e] || y;
|
|
550
|
+
}
|
|
551
|
+
async function L(e) {
|
|
552
|
+
var h;
|
|
553
|
+
const { apiFormat: a, baseUrl: t, apiKey: o, model: r, messages: s, maxTokens: u = 2048 } = e, l = k(a), i = l.getChatEndpoint(t, o, r), d = l.buildHeaders(o), p = l.buildChatPayload(r, s, u), g = performance.now();
|
|
554
|
+
try {
|
|
555
|
+
const c = await fetch(i, {
|
|
556
|
+
method: "POST",
|
|
557
|
+
headers: d,
|
|
558
|
+
body: JSON.stringify(p)
|
|
559
|
+
}), f = Math.round(performance.now() - g);
|
|
560
|
+
if (!c.ok)
|
|
561
|
+
return {
|
|
562
|
+
success: !1,
|
|
563
|
+
message: ((h = (await c.json().catch(() => ({}))).error) == null ? void 0 : h.message) || `HTTP ${c.status}: ${c.statusText}`,
|
|
564
|
+
latencyMs: f
|
|
565
|
+
};
|
|
566
|
+
const M = await c.json();
|
|
567
|
+
return {
|
|
568
|
+
success: !0,
|
|
569
|
+
content: l.parseChatResponse(M),
|
|
570
|
+
latencyMs: f
|
|
571
|
+
};
|
|
572
|
+
} catch (c) {
|
|
573
|
+
return {
|
|
574
|
+
success: !1,
|
|
575
|
+
message: c instanceof Error ? c.message : "网络错误"
|
|
576
|
+
};
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
async function G(e) {
|
|
580
|
+
const a = await L({
|
|
581
|
+
apiFormat: e.apiFormat,
|
|
582
|
+
baseUrl: e.baseUrl,
|
|
583
|
+
apiKey: e.apiKey,
|
|
584
|
+
model: e.model,
|
|
585
|
+
messages: [{ role: "user", content: "Hi" }],
|
|
586
|
+
maxTokens: 5
|
|
587
|
+
// 最小 token 数,节省成本
|
|
588
|
+
});
|
|
589
|
+
return {
|
|
590
|
+
success: a.success,
|
|
591
|
+
latencyMs: a.latencyMs,
|
|
592
|
+
message: a.success ? void 0 : a.message
|
|
593
|
+
};
|
|
594
|
+
}
|
|
595
|
+
async function Q(e) {
|
|
596
|
+
const { provider: a, apiKey: t, model: o, baseUrl: r, proxyUrl: s } = e, u = r || a.baseUrl, l = Date.now();
|
|
597
|
+
try {
|
|
598
|
+
if (s) {
|
|
599
|
+
const b = await (await fetch(`${s}/test`, {
|
|
600
|
+
method: "POST",
|
|
601
|
+
headers: { "Content-Type": "application/json" },
|
|
602
|
+
body: JSON.stringify({
|
|
603
|
+
provider_id: a.id,
|
|
604
|
+
api_key: t,
|
|
605
|
+
model: o || "",
|
|
606
|
+
base_url: r || a.baseUrl,
|
|
607
|
+
api_format: a.apiFormat
|
|
608
|
+
})
|
|
609
|
+
})).json();
|
|
610
|
+
return {
|
|
611
|
+
success: b.success,
|
|
612
|
+
latencyMs: b.latency_ms || Date.now() - l,
|
|
613
|
+
message: b.message
|
|
614
|
+
};
|
|
615
|
+
}
|
|
616
|
+
const i = k(a.apiFormat), d = o || "";
|
|
617
|
+
if (!d)
|
|
618
|
+
return {
|
|
619
|
+
success: !1,
|
|
620
|
+
latencyMs: 0,
|
|
621
|
+
message: "请先选择模型 (Please select a model)"
|
|
622
|
+
};
|
|
623
|
+
const p = i.buildHeaders(t), g = i.buildChatPayload(d, [{ role: "user", content: "Hi" }], 5), h = i.getChatEndpoint(u, t, d), c = await fetch(h, {
|
|
624
|
+
method: "POST",
|
|
625
|
+
headers: p,
|
|
626
|
+
body: JSON.stringify(g)
|
|
627
|
+
}), f = Date.now() - l;
|
|
628
|
+
if (c.ok)
|
|
629
|
+
return { success: !0, latencyMs: f, message: "连接成功" };
|
|
630
|
+
{
|
|
631
|
+
const M = await c.text();
|
|
632
|
+
return {
|
|
633
|
+
success: !1,
|
|
634
|
+
latencyMs: f,
|
|
635
|
+
message: `HTTP ${c.status}: ${M.slice(0, 200)}`
|
|
636
|
+
};
|
|
637
|
+
}
|
|
638
|
+
} catch (i) {
|
|
639
|
+
return {
|
|
640
|
+
success: !1,
|
|
641
|
+
latencyMs: Date.now() - l,
|
|
642
|
+
message: i instanceof Error ? i.message : String(i)
|
|
643
|
+
};
|
|
644
|
+
}
|
|
645
|
+
}
|
|
646
|
+
async function N(e) {
|
|
647
|
+
var u;
|
|
648
|
+
const { provider: a, apiKey: t, baseUrl: o, proxyUrl: r, fallbackToStatic: s = !0 } = e;
|
|
649
|
+
if (r)
|
|
650
|
+
try {
|
|
651
|
+
const i = await (await fetch(`${r}/models`, {
|
|
652
|
+
method: "POST",
|
|
653
|
+
headers: { "Content-Type": "application/json" },
|
|
654
|
+
body: JSON.stringify({
|
|
655
|
+
provider_id: a.id,
|
|
656
|
+
api_key: t || void 0,
|
|
657
|
+
base_url: o || a.baseUrl
|
|
658
|
+
})
|
|
659
|
+
})).json();
|
|
660
|
+
if (i.success && ((u = i.models) == null ? void 0 : u.length) > 0)
|
|
661
|
+
return i.models;
|
|
662
|
+
} catch (l) {
|
|
663
|
+
if (console.warn("Failed to fetch models via proxy:", l), !s) throw l;
|
|
664
|
+
}
|
|
665
|
+
if (!r && a.supportsModelsApi)
|
|
666
|
+
try {
|
|
667
|
+
const l = k(a.apiFormat);
|
|
668
|
+
if (l.getModelsEndpoint) {
|
|
669
|
+
const i = l.getModelsEndpoint(o || a.baseUrl, t || ""), d = l.buildHeaders(t || ""), p = await fetch(i, {
|
|
670
|
+
method: "GET",
|
|
671
|
+
headers: d
|
|
672
|
+
});
|
|
673
|
+
if (p.ok) {
|
|
674
|
+
const g = await p.json();
|
|
675
|
+
return (l.parseModelsResponse || O)(g);
|
|
676
|
+
} else if (!s) throw new Error(`HTTP ${p.status}`);
|
|
677
|
+
}
|
|
678
|
+
} catch (l) {
|
|
679
|
+
if (console.warn("Failed to fetch models directly:", l), !s) throw l;
|
|
680
|
+
}
|
|
681
|
+
if (!s && a.supportsModelsApi)
|
|
682
|
+
throw new Error("Failed to fetch models");
|
|
683
|
+
return v(a.id);
|
|
684
|
+
}
|
|
685
|
+
const A = "ai_provider_config", S = {
|
|
686
|
+
get: (e) => typeof window > "u" ? null : localStorage.getItem(e),
|
|
687
|
+
set: (e, a) => {
|
|
688
|
+
typeof window > "u" || localStorage.setItem(e, a);
|
|
689
|
+
},
|
|
690
|
+
remove: (e) => {
|
|
691
|
+
typeof window > "u" || localStorage.removeItem(e);
|
|
692
|
+
}
|
|
693
|
+
};
|
|
694
|
+
function x(e = S, a = {}) {
|
|
695
|
+
const t = a.serialize || JSON.stringify, o = a.deserialize || JSON.parse;
|
|
696
|
+
return {
|
|
697
|
+
/**
|
|
698
|
+
* Save AI config
|
|
699
|
+
*/
|
|
700
|
+
save(r) {
|
|
701
|
+
try {
|
|
702
|
+
const s = t(r);
|
|
703
|
+
e.set(A, s);
|
|
704
|
+
} catch (s) {
|
|
705
|
+
console.error("Failed to save config:", s);
|
|
706
|
+
}
|
|
707
|
+
},
|
|
708
|
+
/**
|
|
709
|
+
* Load AI config
|
|
710
|
+
*/
|
|
711
|
+
load() {
|
|
712
|
+
const r = e.get(A);
|
|
713
|
+
if (!r) return null;
|
|
714
|
+
try {
|
|
715
|
+
return o(r);
|
|
716
|
+
} catch (s) {
|
|
717
|
+
return console.error("Failed to load config:", s), null;
|
|
718
|
+
}
|
|
719
|
+
},
|
|
720
|
+
/**
|
|
721
|
+
* Clear AI config
|
|
722
|
+
*/
|
|
723
|
+
clear() {
|
|
724
|
+
e.remove(A);
|
|
725
|
+
}
|
|
726
|
+
};
|
|
727
|
+
}
|
|
728
|
+
function R(e) {
|
|
729
|
+
if (!e)
|
|
730
|
+
return {
|
|
731
|
+
providers: Object.values(w),
|
|
732
|
+
getModels: (i) => v(i)
|
|
733
|
+
};
|
|
734
|
+
const { mode: a, include: t, exclude: o, custom: r } = e;
|
|
735
|
+
let s = [];
|
|
736
|
+
if (a === "default") {
|
|
737
|
+
let i = Object.values(w);
|
|
738
|
+
t && t.length > 0 && (i = i.filter((d) => t.includes(d.id))), o && o.length > 0 && (i = i.filter((d) => !o.includes(d.id))), s = [...i];
|
|
739
|
+
}
|
|
740
|
+
const u = {};
|
|
741
|
+
if (r)
|
|
742
|
+
for (const [i, d] of Object.entries(r)) {
|
|
743
|
+
const p = {
|
|
744
|
+
id: i,
|
|
745
|
+
name: d.name,
|
|
746
|
+
baseUrl: d.baseUrl,
|
|
747
|
+
needsApiKey: d.needsApiKey,
|
|
748
|
+
apiFormat: d.apiFormat,
|
|
749
|
+
supportsModelsApi: d.supportsModelsApi ?? !1,
|
|
750
|
+
icon: d.icon
|
|
751
|
+
}, g = s.findIndex((h) => h.id === i);
|
|
752
|
+
g >= 0 ? s[g] = {
|
|
753
|
+
...s[g],
|
|
754
|
+
...p,
|
|
755
|
+
icon: p.icon || s[g].icon
|
|
756
|
+
} : s.push(p), d.models && d.models.length > 0 && (u[i] = d.models);
|
|
757
|
+
}
|
|
758
|
+
return { providers: s, getModels: (i) => u[i] ? u[i] : v(i) };
|
|
759
|
+
}
|
|
760
|
+
function H(e, a) {
|
|
761
|
+
const { providers: t } = R(a);
|
|
762
|
+
return t.find((o) => o.id === e) || null;
|
|
763
|
+
}
|
|
764
|
+
export {
|
|
765
|
+
K as I18N,
|
|
766
|
+
w as PROVIDERS,
|
|
767
|
+
n as PROVIDER_ID,
|
|
768
|
+
E as STATIC_MODELS,
|
|
769
|
+
x as createConfigStorage,
|
|
770
|
+
N as fetchModels,
|
|
771
|
+
I as getAllProviders,
|
|
772
|
+
U as getProvider,
|
|
773
|
+
H as getProviderFromConfig,
|
|
774
|
+
D as getProvidersByFormat,
|
|
775
|
+
v as getStaticModels,
|
|
776
|
+
k as getStrategy,
|
|
777
|
+
S as localStorageAdapter,
|
|
778
|
+
R as resolveProviderConfig,
|
|
779
|
+
L as sendDirectChat,
|
|
780
|
+
B as strategyRegistry,
|
|
781
|
+
Q as testConnection,
|
|
782
|
+
G as testDirectConnection
|
|
783
|
+
};
|
|
784
|
+
//# sourceMappingURL=index.js.map
|