@tombcato/ai-selector-core 0.1.3 → 0.1.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/index.d.ts +1 -1
- package/dist/index.js +169 -164
- package/dist/index.js.map +1 -1
- package/dist/index.umd.js +1 -1
- package/dist/index.umd.js.map +1 -1
- package/package.json +35 -35
- package/src/api.ts +1 -1
- package/src/strategies.ts +39 -21
- package/src/styles.css +7 -0
package/dist/index.js
CHANGED
|
@@ -47,7 +47,7 @@ const K = {
|
|
|
47
47
|
preview: "Config Preview",
|
|
48
48
|
unselected: "(Unselected)"
|
|
49
49
|
}
|
|
50
|
-
}, m = "https://registry.npmmirror.com/@lobehub/icons-static-svg/1.77.0/files/icons",
|
|
50
|
+
}, m = "https://registry.npmmirror.com/@lobehub/icons-static-svg/1.77.0/files/icons", t = {
|
|
51
51
|
OPENAI: "openai",
|
|
52
52
|
ANTHROPIC: "anthropic",
|
|
53
53
|
GEMINI: "gemini",
|
|
@@ -69,8 +69,8 @@ const K = {
|
|
|
69
69
|
DOUBAO: "doubao",
|
|
70
70
|
MINIMAX: "minimax"
|
|
71
71
|
}, w = {
|
|
72
|
-
[
|
|
73
|
-
id:
|
|
72
|
+
[t.OPENAI]: {
|
|
73
|
+
id: t.OPENAI,
|
|
74
74
|
name: "OpenAI",
|
|
75
75
|
baseUrl: "https://api.openai.com/v1",
|
|
76
76
|
needsApiKey: !0,
|
|
@@ -78,8 +78,8 @@ const K = {
|
|
|
78
78
|
supportsModelsApi: !0,
|
|
79
79
|
icon: `${m}/openai.svg`
|
|
80
80
|
},
|
|
81
|
-
[
|
|
82
|
-
id:
|
|
81
|
+
[t.ANTHROPIC]: {
|
|
82
|
+
id: t.ANTHROPIC,
|
|
83
83
|
name: "Anthropic (Claude)",
|
|
84
84
|
baseUrl: "https://api.anthropic.com/v1",
|
|
85
85
|
needsApiKey: !0,
|
|
@@ -87,8 +87,8 @@ const K = {
|
|
|
87
87
|
supportsModelsApi: !1,
|
|
88
88
|
icon: `${m}/anthropic.svg`
|
|
89
89
|
},
|
|
90
|
-
[
|
|
91
|
-
id:
|
|
90
|
+
[t.GEMINI]: {
|
|
91
|
+
id: t.GEMINI,
|
|
92
92
|
name: "Google Gemini",
|
|
93
93
|
baseUrl: "https://generativelanguage.googleapis.com/v1beta",
|
|
94
94
|
needsApiKey: !0,
|
|
@@ -96,8 +96,8 @@ const K = {
|
|
|
96
96
|
supportsModelsApi: !0,
|
|
97
97
|
icon: `${m}/gemini.svg`
|
|
98
98
|
},
|
|
99
|
-
[
|
|
100
|
-
id:
|
|
99
|
+
[t.OPENROUTER]: {
|
|
100
|
+
id: t.OPENROUTER,
|
|
101
101
|
name: "OpenRouter",
|
|
102
102
|
baseUrl: "https://openrouter.ai/api/v1",
|
|
103
103
|
needsApiKey: !0,
|
|
@@ -105,8 +105,8 @@ const K = {
|
|
|
105
105
|
supportsModelsApi: !0,
|
|
106
106
|
icon: `${m}/openrouter.svg`
|
|
107
107
|
},
|
|
108
|
-
[
|
|
109
|
-
id:
|
|
108
|
+
[t.DEEPSEEK]: {
|
|
109
|
+
id: t.DEEPSEEK,
|
|
110
110
|
name: "DeepSeek",
|
|
111
111
|
baseUrl: "https://api.deepseek.com",
|
|
112
112
|
needsApiKey: !0,
|
|
@@ -114,8 +114,8 @@ const K = {
|
|
|
114
114
|
supportsModelsApi: !0,
|
|
115
115
|
icon: `${m}/deepseek.svg`
|
|
116
116
|
},
|
|
117
|
-
[
|
|
118
|
-
id:
|
|
117
|
+
[t.MOONSHOT]: {
|
|
118
|
+
id: t.MOONSHOT,
|
|
119
119
|
name: "Moonshot (Kimi)",
|
|
120
120
|
baseUrl: "https://api.moonshot.cn/v1",
|
|
121
121
|
needsApiKey: !0,
|
|
@@ -123,8 +123,8 @@ const K = {
|
|
|
123
123
|
supportsModelsApi: !0,
|
|
124
124
|
icon: `${m}/moonshot.svg`
|
|
125
125
|
},
|
|
126
|
-
[
|
|
127
|
-
id:
|
|
126
|
+
[t.QWEN]: {
|
|
127
|
+
id: t.QWEN,
|
|
128
128
|
name: "通义千问 (Qwen)",
|
|
129
129
|
baseUrl: "https://dashscope.aliyuncs.com/compatible-mode/v1",
|
|
130
130
|
needsApiKey: !0,
|
|
@@ -132,8 +132,8 @@ const K = {
|
|
|
132
132
|
supportsModelsApi: !0,
|
|
133
133
|
icon: `${m}/qwen.svg`
|
|
134
134
|
},
|
|
135
|
-
[
|
|
136
|
-
id:
|
|
135
|
+
[t.ZHIPU]: {
|
|
136
|
+
id: t.ZHIPU,
|
|
137
137
|
name: "智谱 AI (GLM)",
|
|
138
138
|
baseUrl: "https://open.bigmodel.cn/api/paas/v4",
|
|
139
139
|
needsApiKey: !0,
|
|
@@ -141,8 +141,8 @@ const K = {
|
|
|
141
141
|
supportsModelsApi: !0,
|
|
142
142
|
icon: `${m}/zhipu.svg`
|
|
143
143
|
},
|
|
144
|
-
[
|
|
145
|
-
id:
|
|
144
|
+
[t.SILICONFLOW]: {
|
|
145
|
+
id: t.SILICONFLOW,
|
|
146
146
|
name: "硅基流动 (siliconflow)",
|
|
147
147
|
baseUrl: "https://api.siliconflow.cn/v1",
|
|
148
148
|
needsApiKey: !0,
|
|
@@ -150,8 +150,8 @@ const K = {
|
|
|
150
150
|
supportsModelsApi: !0,
|
|
151
151
|
icon: `${m}/siliconcloud.svg`
|
|
152
152
|
},
|
|
153
|
-
[
|
|
154
|
-
id:
|
|
153
|
+
[t.DOUBAO]: {
|
|
154
|
+
id: t.DOUBAO,
|
|
155
155
|
name: "火山方舟 (Doubao)",
|
|
156
156
|
baseUrl: "https://ark.cn-beijing.volces.com/api/v3",
|
|
157
157
|
needsApiKey: !0,
|
|
@@ -159,8 +159,8 @@ const K = {
|
|
|
159
159
|
supportsModelsApi: !0,
|
|
160
160
|
icon: `${m}/doubao.svg`
|
|
161
161
|
},
|
|
162
|
-
[
|
|
163
|
-
id:
|
|
162
|
+
[t.MINIMAX]: {
|
|
163
|
+
id: t.MINIMAX,
|
|
164
164
|
name: "MiniMax",
|
|
165
165
|
baseUrl: "https://api.minimax.io/v1",
|
|
166
166
|
needsApiKey: !0,
|
|
@@ -168,8 +168,8 @@ const K = {
|
|
|
168
168
|
supportsModelsApi: !1,
|
|
169
169
|
icon: `${m}/minimax.svg`
|
|
170
170
|
},
|
|
171
|
-
[
|
|
172
|
-
id:
|
|
171
|
+
[t.XAI]: {
|
|
172
|
+
id: t.XAI,
|
|
173
173
|
name: "xAI (Grok)",
|
|
174
174
|
baseUrl: "https://api.x.ai/v1",
|
|
175
175
|
needsApiKey: !0,
|
|
@@ -177,8 +177,8 @@ const K = {
|
|
|
177
177
|
supportsModelsApi: !0,
|
|
178
178
|
icon: `${m}/grok.svg`
|
|
179
179
|
},
|
|
180
|
-
[
|
|
181
|
-
id:
|
|
180
|
+
[t.GROQ]: {
|
|
181
|
+
id: t.GROQ,
|
|
182
182
|
name: "Groq",
|
|
183
183
|
baseUrl: "https://api.groq.com/openai/v1",
|
|
184
184
|
needsApiKey: !0,
|
|
@@ -186,8 +186,8 @@ const K = {
|
|
|
186
186
|
supportsModelsApi: !0,
|
|
187
187
|
icon: `${m}/groq.svg`
|
|
188
188
|
},
|
|
189
|
-
[
|
|
190
|
-
id:
|
|
189
|
+
[t.MISTRAL]: {
|
|
190
|
+
id: t.MISTRAL,
|
|
191
191
|
name: "Mistral AI",
|
|
192
192
|
baseUrl: "https://api.mistral.ai/v1",
|
|
193
193
|
needsApiKey: !0,
|
|
@@ -195,8 +195,8 @@ const K = {
|
|
|
195
195
|
supportsModelsApi: !0,
|
|
196
196
|
icon: `${m}/mistral.svg`
|
|
197
197
|
},
|
|
198
|
-
[
|
|
199
|
-
id:
|
|
198
|
+
[t.TOGETHER]: {
|
|
199
|
+
id: t.TOGETHER,
|
|
200
200
|
name: "Together AI",
|
|
201
201
|
baseUrl: "https://api.together.xyz/v1",
|
|
202
202
|
needsApiKey: !0,
|
|
@@ -204,8 +204,8 @@ const K = {
|
|
|
204
204
|
supportsModelsApi: !0,
|
|
205
205
|
icon: `${m}/together.svg`
|
|
206
206
|
},
|
|
207
|
-
[
|
|
208
|
-
id:
|
|
207
|
+
[t.FIREWORKS]: {
|
|
208
|
+
id: t.FIREWORKS,
|
|
209
209
|
name: "Fireworks AI",
|
|
210
210
|
baseUrl: "https://api.fireworks.ai/inference/v1",
|
|
211
211
|
needsApiKey: !0,
|
|
@@ -213,8 +213,8 @@ const K = {
|
|
|
213
213
|
supportsModelsApi: !0,
|
|
214
214
|
icon: `${m}/fireworks.svg`
|
|
215
215
|
},
|
|
216
|
-
[
|
|
217
|
-
id:
|
|
216
|
+
[t.DEEPINFRA]: {
|
|
217
|
+
id: t.DEEPINFRA,
|
|
218
218
|
name: "DeepInfra",
|
|
219
219
|
baseUrl: "https://api.deepinfra.com/v1/openai",
|
|
220
220
|
needsApiKey: !0,
|
|
@@ -222,8 +222,8 @@ const K = {
|
|
|
222
222
|
supportsModelsApi: !0,
|
|
223
223
|
icon: `${m}/deepinfra.svg`
|
|
224
224
|
},
|
|
225
|
-
[
|
|
226
|
-
id:
|
|
225
|
+
[t.PERPLEXITY]: {
|
|
226
|
+
id: t.PERPLEXITY,
|
|
227
227
|
name: "Perplexity",
|
|
228
228
|
baseUrl: "https://api.perplexity.ai",
|
|
229
229
|
needsApiKey: !0,
|
|
@@ -231,8 +231,8 @@ const K = {
|
|
|
231
231
|
supportsModelsApi: !1,
|
|
232
232
|
icon: `${m}/perplexity.svg`
|
|
233
233
|
},
|
|
234
|
-
[
|
|
235
|
-
id:
|
|
234
|
+
[t.COHERE]: {
|
|
235
|
+
id: t.COHERE,
|
|
236
236
|
name: "Cohere",
|
|
237
237
|
baseUrl: "https://api.cohere.com/v2",
|
|
238
238
|
needsApiKey: !0,
|
|
@@ -240,8 +240,8 @@ const K = {
|
|
|
240
240
|
supportsModelsApi: !0,
|
|
241
241
|
icon: `${m}/cohere.svg`
|
|
242
242
|
},
|
|
243
|
-
[
|
|
244
|
-
id:
|
|
243
|
+
[t.OLLAMA]: {
|
|
244
|
+
id: t.OLLAMA,
|
|
245
245
|
name: "Ollama (Local)",
|
|
246
246
|
baseUrl: "http://localhost:11434/v1",
|
|
247
247
|
needsApiKey: !1,
|
|
@@ -260,7 +260,7 @@ function D(e) {
|
|
|
260
260
|
return I().filter((a) => a.apiFormat === e);
|
|
261
261
|
}
|
|
262
262
|
const E = {
|
|
263
|
-
[
|
|
263
|
+
[t.OPENAI]: [
|
|
264
264
|
{ id: "gpt-5.2-pro", name: "GPT-5.2 Pro" },
|
|
265
265
|
{ id: "gpt-5.2", name: "GPT-5.2" },
|
|
266
266
|
{ id: "gpt-5", name: "GPT-5" },
|
|
@@ -268,7 +268,7 @@ const E = {
|
|
|
268
268
|
{ id: "gpt-5-nano", name: "GPT-5 Nano" },
|
|
269
269
|
{ id: "gpt-4.1", name: "GPT-4.1" }
|
|
270
270
|
],
|
|
271
|
-
[
|
|
271
|
+
[t.ANTHROPIC]: [
|
|
272
272
|
{ id: "claude-opus-4.5-20251101", name: "Claude Opus 4.5" },
|
|
273
273
|
{ id: "claude-opus-4.5-20251101-thinking", name: "Claude Opus 4.5 Thinking" },
|
|
274
274
|
{ id: "claude-sonnet-4.5-20250929", name: "Claude Sonnet 4.5" },
|
|
@@ -278,18 +278,18 @@ const E = {
|
|
|
278
278
|
{ id: "claude-opus-4.1-20250805", name: "Claude Opus 4.1" },
|
|
279
279
|
{ id: "claude-opus-4.1-20250805-thinking", name: "Claude Opus 4.1 Thinking" }
|
|
280
280
|
],
|
|
281
|
-
[
|
|
281
|
+
[t.GEMINI]: [
|
|
282
282
|
{ id: "gemini-3-flash-preview", name: "Gemini 3 Flash Preview" },
|
|
283
283
|
{ id: "gemini-3-pro-preview", name: "Gemini 3 Pro Preview" },
|
|
284
284
|
{ id: "gemini-2.5-pro", name: "Gemini 2.5 Pro" },
|
|
285
285
|
{ id: "gemini-2.5-flash", name: "Gemini 2.5 Flash" },
|
|
286
286
|
{ id: "gemini-2.5-flash-lite", name: "Gemini 2.5 Flash-Lite" }
|
|
287
287
|
],
|
|
288
|
-
[
|
|
288
|
+
[t.DEEPSEEK]: [
|
|
289
289
|
{ id: "deepseek-chat", name: "Deepseek Chat" },
|
|
290
290
|
{ id: "deepseek-reasoner", name: "Deepseek Reasoner" }
|
|
291
291
|
],
|
|
292
|
-
[
|
|
292
|
+
[t.MISTRAL]: [
|
|
293
293
|
{ id: "mistral-large-latest", name: "Mistral Large" },
|
|
294
294
|
{ id: "mistral-medium-latest", name: "Mistral Medium" },
|
|
295
295
|
{ id: "mistral-small-latest", name: "Mistral Small" },
|
|
@@ -301,7 +301,7 @@ const E = {
|
|
|
301
301
|
{ id: "ministral-8b-latest", name: "Ministral 8B" },
|
|
302
302
|
{ id: "ministral-3b-latest", name: "Ministral 3B" }
|
|
303
303
|
],
|
|
304
|
-
[
|
|
304
|
+
[t.GROQ]: [
|
|
305
305
|
{ id: "canopylabs/orpheus-v1-english", name: "Orpheus V1 English" },
|
|
306
306
|
{ id: "canopylabs/orpheus-arabic-saudi", name: "Orpheus Arabic Saudi" },
|
|
307
307
|
{ id: "moonshotai/kimi-k2-instruct", name: "Kimi K2 Instruct" },
|
|
@@ -312,7 +312,7 @@ const E = {
|
|
|
312
312
|
{ id: "groq/compound", name: "Compound" },
|
|
313
313
|
{ id: "meta-llama/llama-4-maverick-17b-128e-instruct", name: "Llama 4 Maverick 17B 128E Instruct" }
|
|
314
314
|
],
|
|
315
|
-
[
|
|
315
|
+
[t.XAI]: [
|
|
316
316
|
{ id: "grok-4", name: "Grok 4" },
|
|
317
317
|
{ id: "grok-4-fast", name: "Grok 4 Fast" },
|
|
318
318
|
{ id: "grok-3-latest", name: "Grok 3" },
|
|
@@ -321,7 +321,7 @@ const E = {
|
|
|
321
321
|
{ id: "grok-3-mini-fast", name: "Grok 3 Mini Fast" },
|
|
322
322
|
{ id: "grok-vision-beta", name: "Grok Vision (Beta)" }
|
|
323
323
|
],
|
|
324
|
-
[
|
|
324
|
+
[t.TOGETHER]: [
|
|
325
325
|
{ id: "Qwen/Qwen2.5-72B-Instruct-Turbo", name: "Qwen2.5 72B Instruct Turbo" },
|
|
326
326
|
{ id: "Qwen/Qwen3-235B-A22B-Thinking-2507", name: "Qwen3 235B A22B Thinking 2507" },
|
|
327
327
|
{ id: "deepseek-ai/DeepSeek-R1", name: "Deepseek R1" },
|
|
@@ -330,7 +330,7 @@ const E = {
|
|
|
330
330
|
{ id: "meta-llama/Llama-3.3-70B-Instruct-Turbo", name: "Llama 3.3 70B Instruct Turbo" },
|
|
331
331
|
{ id: "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free", name: "Llama 3.3 70B Instruct Turbo Free" }
|
|
332
332
|
],
|
|
333
|
-
[
|
|
333
|
+
[t.FIREWORKS]: [
|
|
334
334
|
{ id: "accounts/fireworks/models/llama4-scout-instruct-basic", name: "Llama4 Scout Instruct Basic" },
|
|
335
335
|
{ id: "accounts/fireworks/models/qwen3-vl-235b-a22b-thinking", name: "Qwen3 Vl 235B A22B Thinking" },
|
|
336
336
|
{ id: "accounts/fireworks/models/deepseek-v3p2", name: "Deepseek V3P2" },
|
|
@@ -342,7 +342,7 @@ const E = {
|
|
|
342
342
|
{ id: "accounts/fireworks/models/qwen3-235b-a22b-thinking-2507", name: "Qwen3 235B A22B Thinking 2507" },
|
|
343
343
|
{ id: "accounts/fireworks/models/qwen3-coder-480b-a35b-instruct", name: "Qwen3 Coder 480B A35B Instruct" }
|
|
344
344
|
],
|
|
345
|
-
[
|
|
345
|
+
[t.DEEPINFRA]: [
|
|
346
346
|
{ id: "meta-llama/Llama-3.2-11B-Vision-Instruct", name: "Llama 3.2 11B Vision Instruct" },
|
|
347
347
|
{ id: "Qwen/Qwen3-32B", name: "Qwen3 32B" },
|
|
348
348
|
{ id: "NousResearch/Hermes-3-Llama-3.1-70B", name: "Hermes 3 Llama 3.1 70B" },
|
|
@@ -354,7 +354,7 @@ const E = {
|
|
|
354
354
|
{ id: "meta-llama/Meta-Llama-3.1-8B-Instruct", name: "Meta Llama 3.1 8B Instruct" },
|
|
355
355
|
{ id: "Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo", name: "Qwen3 Coder 480B A35B Instruct Turbo" }
|
|
356
356
|
],
|
|
357
|
-
[
|
|
357
|
+
[t.OPENROUTER]: [
|
|
358
358
|
{ id: "google/gemini-3-flash-preview", name: "Gemini 3 Flash Preview" },
|
|
359
359
|
{ id: "mistralai/mistral-small-creative", name: "Mistral Small Creative" },
|
|
360
360
|
{ id: "openai/gpt-5.2-chat", name: "Gpt 5.2 Chat" },
|
|
@@ -368,7 +368,7 @@ const E = {
|
|
|
368
368
|
{ id: "mistralai/mistral-large-2512", name: "Mistral Large 2512" },
|
|
369
369
|
{ id: "deepseek/deepseek-v3.2-speciale", name: "Deepseek V3.2 Speciale" }
|
|
370
370
|
],
|
|
371
|
-
[
|
|
371
|
+
[t.PERPLEXITY]: [
|
|
372
372
|
{ id: "codellama-34b-instruct", name: "Codellama 34B Instruct" },
|
|
373
373
|
{ id: "codellama-70b-instruct", name: "Codellama 70B Instruct" },
|
|
374
374
|
{ id: "llama-2-70b-chat", name: "Llama 2 70B Chat" },
|
|
@@ -379,21 +379,21 @@ const E = {
|
|
|
379
379
|
{ id: "llama-3.1-sonar-large-128k-online", name: "Llama 3.1 Sonar Large 128K Online" },
|
|
380
380
|
{ id: "llama-3.1-sonar-small-128k-chat", name: "Llama 3.1 Sonar Small 128K Chat" }
|
|
381
381
|
],
|
|
382
|
-
[
|
|
382
|
+
[t.COHERE]: [
|
|
383
383
|
{ id: "command-a-vision-07-2025", name: "Command A Vision 07 2025" },
|
|
384
384
|
{ id: "command-a-reasoning-08-2025", name: "Command A Reasoning 08 2025" },
|
|
385
385
|
{ id: "command-r-08-2024", name: "Command R 08 2024" },
|
|
386
386
|
{ id: "command-r7b-arabic-02-2025", name: "Command R7B Arabic 02 2025" },
|
|
387
387
|
{ id: "command-r7b-12-2024", name: "Command R7B 12 2024" }
|
|
388
388
|
],
|
|
389
|
-
[
|
|
389
|
+
[t.MOONSHOT]: [
|
|
390
390
|
{ id: "moonshot-v1-128k", name: "Moonshot 128K" },
|
|
391
391
|
{ id: "moonshot-v1-32k", name: "Moonshot 32K" },
|
|
392
392
|
{ id: "moonshot-v1-8k", name: "Moonshot 8K" },
|
|
393
393
|
{ id: "kimi-k2-0711-chat", name: "Kimi K2 Chat" },
|
|
394
394
|
{ id: "moonshot-v1-auto", name: "Moonshot Auto" }
|
|
395
395
|
],
|
|
396
|
-
[
|
|
396
|
+
[t.QWEN]: [
|
|
397
397
|
{ id: "qwen-flash", name: "Qwen Flash" },
|
|
398
398
|
{ id: "qwen3-vl-plus-2025-12-19", name: "Qwen3 Vl Plus 2025 12 19" },
|
|
399
399
|
{ id: "qwen3-tts-vd-realtime-2025-12-16", name: "Qwen3 Tts Vd Realtime 2025 12 16" },
|
|
@@ -405,13 +405,13 @@ const E = {
|
|
|
405
405
|
{ id: "qwen-plus-2025-12-01", name: "Qwen Plus 2025 12 01" },
|
|
406
406
|
{ id: "qwen3-tts-vc-realtime-2025-11-27", name: "Qwen3 Tts Vc Realtime 2025 11 27" }
|
|
407
407
|
],
|
|
408
|
-
[
|
|
408
|
+
[t.ZHIPU]: [
|
|
409
409
|
{ id: "glm-4.5", name: "Glm 4.5" },
|
|
410
410
|
{ id: "glm-4.5-air", name: "Glm 4.5 Air" },
|
|
411
411
|
{ id: "glm-4.6", name: "Glm 4.6" },
|
|
412
412
|
{ id: "glm-4.7", name: "Glm 4.7" }
|
|
413
413
|
],
|
|
414
|
-
[
|
|
414
|
+
[t.SILICONFLOW]: [
|
|
415
415
|
{ id: "deepseek-ai/DeepSeek-V3.2", name: "Deepseek V3.2" },
|
|
416
416
|
{ id: "deepseek-ai/DeepSeek-V3.1-Terminus", name: "Deepseek V3.1 Terminus" },
|
|
417
417
|
{ id: "deepseek-ai/DeepSeek-R1", name: "Deepseek R1" },
|
|
@@ -421,7 +421,7 @@ const E = {
|
|
|
421
421
|
{ id: "Pro/zai-org/GLM-4.7", name: "Glm 4.7" },
|
|
422
422
|
{ id: "Qwen/Qwen3-VL-32B-Instruct", name: "Qwen3 Vl 32B Instruct" }
|
|
423
423
|
],
|
|
424
|
-
[
|
|
424
|
+
[t.OLLAMA]: [
|
|
425
425
|
{ id: "llama3.3", name: "Llama 3.3" },
|
|
426
426
|
{ id: "llama3.2", name: "Llama 3.2" },
|
|
427
427
|
{ id: "qwq", name: "QwQ (推理)" },
|
|
@@ -433,30 +433,33 @@ const E = {
|
|
|
433
433
|
{ id: "phi4", name: "Phi 4" },
|
|
434
434
|
{ id: "codellama", name: "Code Llama" }
|
|
435
435
|
],
|
|
436
|
-
[
|
|
436
|
+
[t.DOUBAO]: [
|
|
437
437
|
{ id: "doubao-seed-1-8-251215", name: "Doubao 1.8" },
|
|
438
438
|
{ id: "doubao-seed-1-6-251015", name: "Doubao 1.6" },
|
|
439
439
|
{ id: "doubao-seed-1-6-lite-251015", name: "Doubao 1.6 Lite" },
|
|
440
440
|
{ id: "doubao-seed-1-6-flash-250828", name: "Doubao 1.6 Flash" },
|
|
441
441
|
{ id: "doubao-seed-1-6-thinking-250615", name: "Doubao 1.6 Thinking" }
|
|
442
442
|
],
|
|
443
|
-
[
|
|
443
|
+
[t.MINIMAX]: [
|
|
444
444
|
{ id: "MiniMax-M2.1", name: "MiniMax M2.1" },
|
|
445
445
|
{ id: "MiniMax-M2.1-lightning", name: "MiniMax M2.1 Lightning" },
|
|
446
446
|
{ id: "MiniMax-M2", name: "MiniMax M2" }
|
|
447
447
|
]
|
|
448
448
|
};
|
|
449
|
-
function
|
|
449
|
+
function y(e) {
|
|
450
450
|
return E[e] || [];
|
|
451
451
|
}
|
|
452
452
|
const O = (e) => Array.isArray(e == null ? void 0 : e.data) ? e.data.filter((a) => a.id).map((a) => {
|
|
453
|
-
const
|
|
453
|
+
const n = a.name || (a.id.split("/").pop() ?? "").replace(/[-_]/g, " ").replace(/\b\w/g, (i) => i.toUpperCase());
|
|
454
454
|
return {
|
|
455
455
|
id: a.id,
|
|
456
|
-
name:
|
|
456
|
+
name: n,
|
|
457
457
|
created: a.created || 0
|
|
458
458
|
};
|
|
459
|
-
}).sort((a,
|
|
459
|
+
}).sort((a, n) => {
|
|
460
|
+
const i = (n.created || 0) - (a.created || 0);
|
|
461
|
+
return i !== 0 ? i : (n.id || "").localeCompare(a.id || "");
|
|
462
|
+
}) : [], k = {
|
|
460
463
|
format: "openai",
|
|
461
464
|
getModelsEndpoint: (e) => `${e}/models`,
|
|
462
465
|
getChatEndpoint: (e) => `${e}/chat/completions`,
|
|
@@ -464,14 +467,16 @@ const O = (e) => Array.isArray(e == null ? void 0 : e.data) ? e.data.filter((a)
|
|
|
464
467
|
"Content-Type": "application/json",
|
|
465
468
|
Authorization: `Bearer ${e}`
|
|
466
469
|
}),
|
|
467
|
-
buildChatPayload: (e, a,
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
470
|
+
buildChatPayload: (e, a, n) => {
|
|
471
|
+
const i = {
|
|
472
|
+
model: e,
|
|
473
|
+
messages: a
|
|
474
|
+
};
|
|
475
|
+
return n && (i.max_completion_tokens = n), i;
|
|
476
|
+
},
|
|
472
477
|
parseChatResponse: (e) => {
|
|
473
|
-
var a,
|
|
474
|
-
return ((
|
|
478
|
+
var a, n, i;
|
|
479
|
+
return ((i = (n = (a = e.choices) == null ? void 0 : a[0]) == null ? void 0 : n.message) == null ? void 0 : i.content) || "";
|
|
475
480
|
}
|
|
476
481
|
}, P = {
|
|
477
482
|
format: "anthropic",
|
|
@@ -481,44 +486,46 @@ const O = (e) => Array.isArray(e == null ? void 0 : e.data) ? e.data.filter((a)
|
|
|
481
486
|
"x-api-key": e,
|
|
482
487
|
"anthropic-version": "2023-06-01"
|
|
483
488
|
}),
|
|
484
|
-
buildChatPayload: (e, a,
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
+
buildChatPayload: (e, a, n) => {
|
|
490
|
+
const i = {
|
|
491
|
+
model: e,
|
|
492
|
+
messages: a
|
|
493
|
+
};
|
|
494
|
+
return n && (i.max_tokens = n), i;
|
|
495
|
+
},
|
|
489
496
|
parseChatResponse: (e) => {
|
|
490
|
-
var a,
|
|
491
|
-
return ((
|
|
497
|
+
var a, n;
|
|
498
|
+
return ((n = (a = e.content) == null ? void 0 : a[0]) == null ? void 0 : n.text) || "";
|
|
492
499
|
}
|
|
493
|
-
},
|
|
500
|
+
}, C = {
|
|
494
501
|
format: "gemini",
|
|
495
502
|
getModelsEndpoint: (e, a) => `${e}/models?key=${a}`,
|
|
496
|
-
getChatEndpoint: (e, a,
|
|
503
|
+
getChatEndpoint: (e, a, n) => `${e}/models/${n}:generateContent?key=${a}`,
|
|
497
504
|
buildHeaders: () => ({
|
|
498
505
|
"Content-Type": "application/json"
|
|
499
506
|
}),
|
|
500
|
-
buildChatPayload: (e, a,
|
|
501
|
-
contents: a.map((
|
|
502
|
-
role:
|
|
503
|
-
parts: [{ text:
|
|
504
|
-
}))
|
|
505
|
-
generationConfig
|
|
506
|
-
}
|
|
507
|
+
buildChatPayload: (e, a, n) => {
|
|
508
|
+
const r = { contents: a.map((s) => ({
|
|
509
|
+
role: s.role === "assistant" ? "model" : "user",
|
|
510
|
+
parts: [{ text: s.content }]
|
|
511
|
+
})) };
|
|
512
|
+
return n && (r.generationConfig = { maxOutputTokens: n }), r;
|
|
513
|
+
},
|
|
507
514
|
parseChatResponse: (e) => {
|
|
508
|
-
var a,
|
|
509
|
-
return ((s = (r = (
|
|
515
|
+
var a, n, i, r, s;
|
|
516
|
+
return ((s = (r = (i = (n = (a = e.candidates) == null ? void 0 : a[0]) == null ? void 0 : n.content) == null ? void 0 : i.parts) == null ? void 0 : r[0]) == null ? void 0 : s.text) || "";
|
|
510
517
|
},
|
|
511
518
|
// Gemini 返回格式: { models: [{ name: "models/gemini-pro", ... }] }
|
|
512
519
|
parseModelsResponse: (e) => Array.isArray(e.models) ? e.models.filter((a) => {
|
|
513
|
-
var
|
|
514
|
-
return (
|
|
520
|
+
var n;
|
|
521
|
+
return (n = a.supportedGenerationMethods) == null ? void 0 : n.includes("generateContent");
|
|
515
522
|
}).map((a) => ({
|
|
516
523
|
id: a.name.replace("models/", ""),
|
|
517
524
|
// "models/gemini-pro" -> "gemini-pro"
|
|
518
525
|
name: a.displayName || a.name.replace("models/", ""),
|
|
519
526
|
created: a.created || 0
|
|
520
|
-
})).sort((a,
|
|
521
|
-
},
|
|
527
|
+
})).sort((a, n) => (n.id || "").localeCompare(a.id || "")) : []
|
|
528
|
+
}, T = {
|
|
522
529
|
format: "cohere",
|
|
523
530
|
getModelsEndpoint: (e) => `${e}/models`,
|
|
524
531
|
getChatEndpoint: (e) => `${e}/chat`,
|
|
@@ -526,53 +533,52 @@ const O = (e) => Array.isArray(e == null ? void 0 : e.data) ? e.data.filter((a)
|
|
|
526
533
|
"Content-Type": "application/json",
|
|
527
534
|
Authorization: `Bearer ${e}`
|
|
528
535
|
}),
|
|
529
|
-
buildChatPayload: (e, a,
|
|
530
|
-
const
|
|
531
|
-
role:
|
|
532
|
-
message:
|
|
533
|
-
}))
|
|
534
|
-
return {
|
|
536
|
+
buildChatPayload: (e, a, n) => {
|
|
537
|
+
const i = a[a.length - 1], r = a.slice(0, -1).map((c) => ({
|
|
538
|
+
role: c.role === "assistant" ? "CHATBOT" : "USER",
|
|
539
|
+
message: c.content
|
|
540
|
+
})), s = {
|
|
535
541
|
model: e,
|
|
536
|
-
message:
|
|
537
|
-
chat_history: r
|
|
538
|
-
max_tokens: t
|
|
542
|
+
message: i.content,
|
|
543
|
+
chat_history: r
|
|
539
544
|
};
|
|
545
|
+
return n && (s.max_tokens = n), s;
|
|
540
546
|
},
|
|
541
547
|
parseChatResponse: (e) => e.text || ""
|
|
542
548
|
}, B = {
|
|
543
|
-
openai:
|
|
549
|
+
openai: k,
|
|
544
550
|
anthropic: P,
|
|
545
|
-
gemini:
|
|
546
|
-
cohere:
|
|
551
|
+
gemini: C,
|
|
552
|
+
cohere: T
|
|
547
553
|
};
|
|
548
|
-
function
|
|
549
|
-
return B[e] ||
|
|
554
|
+
function v(e) {
|
|
555
|
+
return B[e] || k;
|
|
550
556
|
}
|
|
551
557
|
async function L(e) {
|
|
552
558
|
var h;
|
|
553
|
-
const { apiFormat: a, baseUrl:
|
|
559
|
+
const { apiFormat: a, baseUrl: n, apiKey: i, model: r, messages: s, maxTokens: c } = e, l = v(a), o = l.getChatEndpoint(n, i, r), d = l.buildHeaders(i), u = l.buildChatPayload(r, s, c), g = performance.now();
|
|
554
560
|
try {
|
|
555
|
-
const
|
|
561
|
+
const p = await fetch(o, {
|
|
556
562
|
method: "POST",
|
|
557
563
|
headers: d,
|
|
558
|
-
body: JSON.stringify(
|
|
564
|
+
body: JSON.stringify(u)
|
|
559
565
|
}), f = Math.round(performance.now() - g);
|
|
560
|
-
if (!
|
|
566
|
+
if (!p.ok)
|
|
561
567
|
return {
|
|
562
568
|
success: !1,
|
|
563
|
-
message: ((h = (await
|
|
569
|
+
message: ((h = (await p.json().catch(() => ({}))).error) == null ? void 0 : h.message) || `HTTP ${p.status}: ${p.statusText}`,
|
|
564
570
|
latencyMs: f
|
|
565
571
|
};
|
|
566
|
-
const M = await
|
|
572
|
+
const M = await p.json();
|
|
567
573
|
return {
|
|
568
574
|
success: !0,
|
|
569
575
|
content: l.parseChatResponse(M),
|
|
570
576
|
latencyMs: f
|
|
571
577
|
};
|
|
572
|
-
} catch (
|
|
578
|
+
} catch (p) {
|
|
573
579
|
return {
|
|
574
580
|
success: !1,
|
|
575
|
-
message:
|
|
581
|
+
message: p instanceof Error ? p.message : "网络错误"
|
|
576
582
|
};
|
|
577
583
|
}
|
|
578
584
|
}
|
|
@@ -582,9 +588,8 @@ async function G(e) {
|
|
|
582
588
|
baseUrl: e.baseUrl,
|
|
583
589
|
apiKey: e.apiKey,
|
|
584
590
|
model: e.model,
|
|
585
|
-
messages: [{ role: "user", content: "Hi" }]
|
|
586
|
-
maxTokens: 5
|
|
587
|
-
// 最小 token 数,节省成本
|
|
591
|
+
messages: [{ role: "user", content: "Hi" }]
|
|
592
|
+
// maxTokens: 5, // 不设置 maxTokens 以兼容 o1 等不支持该参数的模型
|
|
588
593
|
});
|
|
589
594
|
return {
|
|
590
595
|
success: a.success,
|
|
@@ -593,7 +598,7 @@ async function G(e) {
|
|
|
593
598
|
};
|
|
594
599
|
}
|
|
595
600
|
async function Q(e) {
|
|
596
|
-
const { provider: a, apiKey:
|
|
601
|
+
const { provider: a, apiKey: n, model: i, baseUrl: r, proxyUrl: s } = e, c = r || a.baseUrl, l = Date.now();
|
|
597
602
|
try {
|
|
598
603
|
if (s) {
|
|
599
604
|
const b = await (await fetch(`${s}/test`, {
|
|
@@ -601,8 +606,8 @@ async function Q(e) {
|
|
|
601
606
|
headers: { "Content-Type": "application/json" },
|
|
602
607
|
body: JSON.stringify({
|
|
603
608
|
provider_id: a.id,
|
|
604
|
-
api_key:
|
|
605
|
-
model:
|
|
609
|
+
api_key: n,
|
|
610
|
+
model: i || "",
|
|
606
611
|
base_url: r || a.baseUrl,
|
|
607
612
|
api_format: a.apiFormat
|
|
608
613
|
})
|
|
@@ -613,74 +618,74 @@ async function Q(e) {
|
|
|
613
618
|
message: b.message
|
|
614
619
|
};
|
|
615
620
|
}
|
|
616
|
-
const
|
|
621
|
+
const o = v(a.apiFormat), d = i || "";
|
|
617
622
|
if (!d)
|
|
618
623
|
return {
|
|
619
624
|
success: !1,
|
|
620
625
|
latencyMs: 0,
|
|
621
626
|
message: "请先选择模型 (Please select a model)"
|
|
622
627
|
};
|
|
623
|
-
const
|
|
628
|
+
const u = o.buildHeaders(n), g = o.buildChatPayload(d, [{ role: "user", content: "Hi" }]), h = o.getChatEndpoint(c, n, d), p = await fetch(h, {
|
|
624
629
|
method: "POST",
|
|
625
|
-
headers:
|
|
630
|
+
headers: u,
|
|
626
631
|
body: JSON.stringify(g)
|
|
627
632
|
}), f = Date.now() - l;
|
|
628
|
-
if (
|
|
633
|
+
if (p.ok)
|
|
629
634
|
return { success: !0, latencyMs: f, message: "连接成功" };
|
|
630
635
|
{
|
|
631
|
-
const M = await
|
|
636
|
+
const M = await p.text();
|
|
632
637
|
return {
|
|
633
638
|
success: !1,
|
|
634
639
|
latencyMs: f,
|
|
635
|
-
message: `HTTP ${
|
|
640
|
+
message: `HTTP ${p.status}: ${M.slice(0, 200)}`
|
|
636
641
|
};
|
|
637
642
|
}
|
|
638
|
-
} catch (
|
|
643
|
+
} catch (o) {
|
|
639
644
|
return {
|
|
640
645
|
success: !1,
|
|
641
646
|
latencyMs: Date.now() - l,
|
|
642
|
-
message:
|
|
647
|
+
message: o instanceof Error ? o.message : String(o)
|
|
643
648
|
};
|
|
644
649
|
}
|
|
645
650
|
}
|
|
646
651
|
async function N(e) {
|
|
647
|
-
var
|
|
648
|
-
const { provider: a, apiKey:
|
|
652
|
+
var c;
|
|
653
|
+
const { provider: a, apiKey: n, baseUrl: i, proxyUrl: r, fallbackToStatic: s = !0 } = e;
|
|
649
654
|
if (r)
|
|
650
655
|
try {
|
|
651
|
-
const
|
|
656
|
+
const o = await (await fetch(`${r}/models`, {
|
|
652
657
|
method: "POST",
|
|
653
658
|
headers: { "Content-Type": "application/json" },
|
|
654
659
|
body: JSON.stringify({
|
|
655
660
|
provider_id: a.id,
|
|
656
|
-
api_key:
|
|
657
|
-
base_url:
|
|
661
|
+
api_key: n || void 0,
|
|
662
|
+
base_url: i || a.baseUrl
|
|
658
663
|
})
|
|
659
664
|
})).json();
|
|
660
|
-
if (
|
|
661
|
-
return
|
|
665
|
+
if (o.success && ((c = o.models) == null ? void 0 : c.length) > 0)
|
|
666
|
+
return o.models;
|
|
662
667
|
} catch (l) {
|
|
663
668
|
if (console.warn("Failed to fetch models via proxy:", l), !s) throw l;
|
|
664
669
|
}
|
|
665
670
|
if (!r && a.supportsModelsApi)
|
|
666
671
|
try {
|
|
667
|
-
const l =
|
|
672
|
+
const l = v(a.apiFormat);
|
|
668
673
|
if (l.getModelsEndpoint) {
|
|
669
|
-
const
|
|
674
|
+
const o = l.getModelsEndpoint(i || a.baseUrl, n || ""), d = l.buildHeaders(n || ""), u = await fetch(o, {
|
|
670
675
|
method: "GET",
|
|
671
676
|
headers: d
|
|
672
677
|
});
|
|
673
|
-
if (
|
|
674
|
-
const g = await
|
|
678
|
+
if (u.ok) {
|
|
679
|
+
const g = await u.json();
|
|
675
680
|
return (l.parseModelsResponse || O)(g);
|
|
676
|
-
} else if (!s) throw new Error(`HTTP ${
|
|
681
|
+
} else if (!s) throw new Error(`HTTP ${u.status}`);
|
|
677
682
|
}
|
|
678
683
|
} catch (l) {
|
|
679
684
|
if (console.warn("Failed to fetch models directly:", l), !s) throw l;
|
|
680
685
|
}
|
|
681
686
|
if (!s && a.supportsModelsApi)
|
|
682
687
|
throw new Error("Failed to fetch models");
|
|
683
|
-
return
|
|
688
|
+
return y(a.id);
|
|
684
689
|
}
|
|
685
690
|
const A = "ai_provider_config", S = {
|
|
686
691
|
get: (e) => typeof window > "u" ? null : localStorage.getItem(e),
|
|
@@ -692,14 +697,14 @@ const A = "ai_provider_config", S = {
|
|
|
692
697
|
}
|
|
693
698
|
};
|
|
694
699
|
function x(e = S, a = {}) {
|
|
695
|
-
const
|
|
700
|
+
const n = a.serialize || JSON.stringify, i = a.deserialize || JSON.parse;
|
|
696
701
|
return {
|
|
697
702
|
/**
|
|
698
703
|
* Save AI config
|
|
699
704
|
*/
|
|
700
705
|
save(r) {
|
|
701
706
|
try {
|
|
702
|
-
const s =
|
|
707
|
+
const s = n(r);
|
|
703
708
|
e.set(A, s);
|
|
704
709
|
} catch (s) {
|
|
705
710
|
console.error("Failed to save config:", s);
|
|
@@ -712,7 +717,7 @@ function x(e = S, a = {}) {
|
|
|
712
717
|
const r = e.get(A);
|
|
713
718
|
if (!r) return null;
|
|
714
719
|
try {
|
|
715
|
-
return
|
|
720
|
+
return i(r);
|
|
716
721
|
} catch (s) {
|
|
717
722
|
return console.error("Failed to load config:", s), null;
|
|
718
723
|
}
|
|
@@ -729,42 +734,42 @@ function R(e) {
|
|
|
729
734
|
if (!e)
|
|
730
735
|
return {
|
|
731
736
|
providers: Object.values(w),
|
|
732
|
-
getModels: (
|
|
737
|
+
getModels: (o) => y(o)
|
|
733
738
|
};
|
|
734
|
-
const { mode: a, include:
|
|
739
|
+
const { mode: a, include: n, exclude: i, custom: r } = e;
|
|
735
740
|
let s = [];
|
|
736
741
|
if (a === "default") {
|
|
737
|
-
let
|
|
738
|
-
|
|
742
|
+
let o = Object.values(w);
|
|
743
|
+
n && n.length > 0 && (o = o.filter((d) => n.includes(d.id))), i && i.length > 0 && (o = o.filter((d) => !i.includes(d.id))), s = [...o];
|
|
739
744
|
}
|
|
740
|
-
const
|
|
745
|
+
const c = {};
|
|
741
746
|
if (r)
|
|
742
|
-
for (const [
|
|
743
|
-
const
|
|
744
|
-
id:
|
|
747
|
+
for (const [o, d] of Object.entries(r)) {
|
|
748
|
+
const u = {
|
|
749
|
+
id: o,
|
|
745
750
|
name: d.name,
|
|
746
751
|
baseUrl: d.baseUrl,
|
|
747
752
|
needsApiKey: d.needsApiKey,
|
|
748
753
|
apiFormat: d.apiFormat,
|
|
749
754
|
supportsModelsApi: d.supportsModelsApi ?? !1,
|
|
750
755
|
icon: d.icon
|
|
751
|
-
}, g = s.findIndex((h) => h.id ===
|
|
756
|
+
}, g = s.findIndex((h) => h.id === o);
|
|
752
757
|
g >= 0 ? s[g] = {
|
|
753
758
|
...s[g],
|
|
754
|
-
...
|
|
755
|
-
icon:
|
|
756
|
-
} : s.push(
|
|
759
|
+
...u,
|
|
760
|
+
icon: u.icon || s[g].icon
|
|
761
|
+
} : s.push(u), d.models && d.models.length > 0 && (c[o] = d.models);
|
|
757
762
|
}
|
|
758
|
-
return { providers: s, getModels: (
|
|
763
|
+
return { providers: s, getModels: (o) => c[o] ? c[o] : y(o) };
|
|
759
764
|
}
|
|
760
765
|
function H(e, a) {
|
|
761
|
-
const { providers:
|
|
762
|
-
return
|
|
766
|
+
const { providers: n } = R(a);
|
|
767
|
+
return n.find((i) => i.id === e) || null;
|
|
763
768
|
}
|
|
764
769
|
export {
|
|
765
770
|
K as I18N,
|
|
766
771
|
w as PROVIDERS,
|
|
767
|
-
|
|
772
|
+
t as PROVIDER_ID,
|
|
768
773
|
E as STATIC_MODELS,
|
|
769
774
|
x as createConfigStorage,
|
|
770
775
|
N as fetchModels,
|
|
@@ -772,8 +777,8 @@ export {
|
|
|
772
777
|
U as getProvider,
|
|
773
778
|
H as getProviderFromConfig,
|
|
774
779
|
D as getProvidersByFormat,
|
|
775
|
-
|
|
776
|
-
|
|
780
|
+
y as getStaticModels,
|
|
781
|
+
v as getStrategy,
|
|
777
782
|
S as localStorageAdapter,
|
|
778
783
|
R as resolveProviderConfig,
|
|
779
784
|
L as sendDirectChat,
|