ai-chat-ui-kit 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.eslintrc.cjs +74 -0
- package/.github/actions/screenshot/action.yml +35 -0
- package/.github/workflows/pages.yml +46 -0
- package/README.md +285 -0
- package/docs/README.md +176 -0
- package/docs/api/components.md +344 -0
- package/docs/api/core.md +349 -0
- package/docs/chat-style-1-minimal.html +78 -0
- package/docs/chat-style-2-neon.html +74 -0
- package/docs/chat-style-3-glass.html +73 -0
- package/docs/chat-style-4-terminal.html +84 -0
- package/docs/chat-style-5-gradient.html +69 -0
- package/docs/chat-style-6-corporate.html +116 -0
- package/docs/examples/basic-chat.md +291 -0
- package/docs/examples/custom-plugins.md +431 -0
- package/docs/examples/multi-model.md +466 -0
- package/docs/guide/api-adapters.md +431 -0
- package/docs/guide/getting-started.md +244 -0
- package/docs/guide/headless-mode.md +508 -0
- package/docs/guide/plugins.md +416 -0
- package/docs/guide/themes.md +327 -0
- package/docs/index.html +256 -0
- package/docs/theme-preview-1-minimal.html +74 -0
- package/docs/theme-preview-2-neon.html +73 -0
- package/docs/theme-preview-3-glass.html +77 -0
- package/docs/theme-preview-4-terminal.html +86 -0
- package/docs/theme-preview-5-gradient.html +79 -0
- package/docs/theme-preview-6-corporate.html +71 -0
- package/examples/index.html +414 -0
- package/examples/react-app/App.tsx +131 -0
- package/examples/react-app/index.html +12 -0
- package/examples/react-app/main.tsx +15 -0
- package/examples/react-app/package.json +24 -0
- package/examples/vue-app/index.html +12 -0
- package/examples/vue-app/package.json +22 -0
- package/examples/vue-app/src/App.vue +145 -0
- package/examples/vue-app/src/main.ts +9 -0
- package/package.json +44 -0
- package/packages/components/package.json +25 -0
- package/packages/components/src/chat/chat.css +80 -0
- package/packages/components/src/chat/chat.ts +236 -0
- package/packages/components/src/index.ts +36 -0
- package/packages/components/src/input/input.css +52 -0
- package/packages/components/src/input/input.ts +116 -0
- package/packages/components/src/markdown/markdown.css +118 -0
- package/packages/components/src/markdown/markdown.ts +229 -0
- package/packages/components/src/message/message.css +56 -0
- package/packages/components/src/message/message.ts +72 -0
- package/packages/components/src/styles/global.css +43 -0
- package/packages/components/src/tool-call/tool-call.css +98 -0
- package/packages/components/src/tool-call/tool-call.ts +171 -0
- package/packages/components/src/types.ts +55 -0
- package/packages/components/src/utils/helpers.ts +128 -0
- package/packages/components/tsconfig.json +25 -0
- package/packages/components/tsup.config.ts +18 -0
- package/packages/core/package.json +47 -0
- package/packages/core/pnpm-lock.yaml +2032 -0
- package/packages/core/pnpm-workspace.yaml +2 -0
- package/packages/core/src/api/adapters.ts +717 -0
- package/packages/core/src/api/base.ts +210 -0
- package/packages/core/src/api/index.ts +54 -0
- package/packages/core/src/index.ts +93 -0
- package/packages/core/src/parser/latex.ts +274 -0
- package/packages/core/src/parser/markdown.test.ts +58 -0
- package/packages/core/src/parser/markdown.ts +206 -0
- package/packages/core/src/parser/mermaid.ts +276 -0
- package/packages/core/src/plugins/PluginManager.ts +232 -0
- package/packages/core/src/plugins/builtin.ts +406 -0
- package/packages/core/src/store/ChatStore.ts +163 -0
- package/packages/core/src/store/ModelConfigStore.ts +136 -0
- package/packages/core/src/store/ToolCallStore.ts +164 -0
- package/packages/core/src/store/base.ts +75 -0
- package/packages/core/src/types/index.ts +133 -0
- package/packages/core/tsup.config.ts +18 -0
- package/packages/themes/package.json +33 -0
- package/packages/themes/src/corporate/index.ts +52 -0
- package/packages/themes/src/corporate/theme.css +228 -0
- package/packages/themes/src/glass/index.ts +52 -0
- package/packages/themes/src/glass/theme.css +237 -0
- package/packages/themes/src/gradient/index.ts +53 -0
- package/packages/themes/src/gradient/theme.css +218 -0
- package/packages/themes/src/index.ts +13 -0
- package/packages/themes/src/minimal/index.ts +52 -0
- package/packages/themes/src/minimal/theme.css +198 -0
- package/packages/themes/src/neon/index.ts +52 -0
- package/packages/themes/src/neon/theme.css +233 -0
- package/packages/themes/src/terminal/index.ts +52 -0
- package/packages/themes/src/terminal/theme.css +235 -0
- package/packages/themes/src/types.ts +10 -0
- package/packages/themes/src/vite-env.d.ts +9 -0
- package/packages/themes/tsup.config.ts +21 -0
- package/pnpm-workspace.yaml +4 -0
- package/tsconfig.json +27 -0
- package/vite.config.ts +25 -0
- package/vitest.config.ts +28 -0
|
@@ -0,0 +1,466 @@
|
|
|
1
|
+
# 多模型支持示例
|
|
2
|
+
|
|
3
|
+
本示例将指导您如何切换不同的 AI 模型(如 GPT-4、Claude、本地模型等)。
|
|
4
|
+
|
|
5
|
+
## 基本思路
|
|
6
|
+
|
|
7
|
+
通过 API 适配器,您可以轻松切换不同的 AI 模型。
|
|
8
|
+
|
|
9
|
+
## 示例 1:切换 OpenAI 模型
|
|
10
|
+
|
|
11
|
+
### 创建多模型适配器
|
|
12
|
+
|
|
13
|
+
```typescript
|
|
14
|
+
// adapters/multi-model.ts
|
|
15
|
+
import { APIAdapter, ChatRequest, ChatResponse } from '@ai-chat/core';
|
|
16
|
+
|
|
17
|
+
export class MultiModelAdapter implements APIAdapter {
|
|
18
|
+
private apiKey: string;
|
|
19
|
+
private baseURL: string;
|
|
20
|
+
private currentModel: string;
|
|
21
|
+
private availableModels: string[];
|
|
22
|
+
|
|
23
|
+
constructor(config: { apiKey: string; baseURL?: string }) {
|
|
24
|
+
this.apiKey = config.apiKey;
|
|
25
|
+
this.baseURL = config.baseURL || 'https://api.openai.com/v1';
|
|
26
|
+
this.currentModel = 'gpt-3.5-turbo';
|
|
27
|
+
this.availableModels = [
|
|
28
|
+
'gpt-3.5-turbo',
|
|
29
|
+
'gpt-4',
|
|
30
|
+
'gpt-4-turbo',
|
|
31
|
+
'gpt-4o',
|
|
32
|
+
];
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
// 切换模型
|
|
36
|
+
setModel(model: string): void {
|
|
37
|
+
if (!this.availableModels.includes(model)) {
|
|
38
|
+
throw new Error(`Model ${model} not available`);
|
|
39
|
+
}
|
|
40
|
+
this.currentModel = model;
|
|
41
|
+
console.log(`Switched to model: ${model}`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// 获取当前模型
|
|
45
|
+
getModel(): string {
|
|
46
|
+
return this.currentModel;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
// 获取可用模型列表
|
|
50
|
+
getAvailableModels(): string[] {
|
|
51
|
+
return [...this.availableModels];
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
async sendMessage(request: ChatRequest): Promise<ChatResponse> {
|
|
55
|
+
const model = request.model || this.currentModel;
|
|
56
|
+
|
|
57
|
+
const response = await fetch(`${this.baseURL}/chat/completions`, {
|
|
58
|
+
method: 'POST',
|
|
59
|
+
headers: {
|
|
60
|
+
'Content-Type': 'application/json',
|
|
61
|
+
'Authorization': `Bearer ${this.apiKey}`,
|
|
62
|
+
},
|
|
63
|
+
body: JSON.stringify({
|
|
64
|
+
model,
|
|
65
|
+
messages: [
|
|
66
|
+
...(request.history || []).map(msg => ({
|
|
67
|
+
role: msg.role,
|
|
68
|
+
content: msg.content,
|
|
69
|
+
})),
|
|
70
|
+
{ role: 'user', content: request.message },
|
|
71
|
+
],
|
|
72
|
+
temperature: request.temperature,
|
|
73
|
+
max_tokens: request.maxTokens,
|
|
74
|
+
}),
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
if (!response.ok) {
|
|
78
|
+
throw new Error(`OpenAI API error: ${response.statusText}`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const data = await response.json();
|
|
82
|
+
|
|
83
|
+
return {
|
|
84
|
+
content: data.choices[0].message.content,
|
|
85
|
+
role: 'assistant',
|
|
86
|
+
usage: {
|
|
87
|
+
promptTokens: data.usage.prompt_tokens,
|
|
88
|
+
completionTokens: data.usage.completion_tokens,
|
|
89
|
+
totalTokens: data.usage.total_tokens,
|
|
90
|
+
},
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
### 使用多模型适配器
|
|
97
|
+
|
|
98
|
+
```typescript
|
|
99
|
+
// main.ts
|
|
100
|
+
import { createChatStore } from '@ai-chat/core';
|
|
101
|
+
import { MultiModelAdapter } from './adapters/multi-model';
|
|
102
|
+
|
|
103
|
+
// 创建适配器实例
|
|
104
|
+
const adapter = new MultiModelAdapter({
|
|
105
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
// 创建 store
|
|
109
|
+
const chatStore = createChatStore({
|
|
110
|
+
async onMessage(message: string) {
|
|
111
|
+
const response = await adapter.sendMessage({
|
|
112
|
+
message,
|
|
113
|
+
history: chatStore.getMessages(),
|
|
114
|
+
});
|
|
115
|
+
return response.content;
|
|
116
|
+
},
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
// 切换模型
|
|
120
|
+
adapter.setModel('gpt-4'); // 切换到 GPT-4
|
|
121
|
+
adapter.setModel('gpt-3.5-turbo'); // 切换回 GPT-3.5
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
### React 中切换模型
|
|
125
|
+
|
|
126
|
+
```tsx
|
|
127
|
+
// ModelSelector.tsx
|
|
128
|
+
import React, { useState } from 'react';
|
|
129
|
+
import { MultiModelAdapter } from './adapters/multi-model';
|
|
130
|
+
|
|
131
|
+
interface ModelSelectorProps {
|
|
132
|
+
adapter: MultiModelAdapter;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
const ModelSelector: React.FC<ModelSelectorProps> = ({ adapter }) => {
|
|
136
|
+
const [currentModel, setCurrentModel] = useState(adapter.getModel());
|
|
137
|
+
const models = adapter.getAvailableModels();
|
|
138
|
+
|
|
139
|
+
const handleModelChange = (model: string) => {
|
|
140
|
+
adapter.setModel(model);
|
|
141
|
+
setCurrentModel(model);
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
return (
|
|
145
|
+
<div style={styles.container}>
|
|
146
|
+
<label style={styles.label}>选择模型:</label>
|
|
147
|
+
<select
|
|
148
|
+
style={styles.select}
|
|
149
|
+
value={currentModel}
|
|
150
|
+
onChange={(e) => handleModelChange(e.target.value)}
|
|
151
|
+
>
|
|
152
|
+
{models.map((model) => (
|
|
153
|
+
<option key={model} value={model}>
|
|
154
|
+
{model}
|
|
155
|
+
</option>
|
|
156
|
+
))}
|
|
157
|
+
</select>
|
|
158
|
+
</div>
|
|
159
|
+
);
|
|
160
|
+
};
|
|
161
|
+
|
|
162
|
+
const styles = {
|
|
163
|
+
container: {
|
|
164
|
+
padding: '8px 16px',
|
|
165
|
+
backgroundColor: '#f5f5f5',
|
|
166
|
+
borderBottom: '1px solid #e8e8e8',
|
|
167
|
+
},
|
|
168
|
+
label: {
|
|
169
|
+
marginRight: '8px',
|
|
170
|
+
fontSize: '14px',
|
|
171
|
+
},
|
|
172
|
+
select: {
|
|
173
|
+
padding: '4px 8px',
|
|
174
|
+
borderRadius: '4px',
|
|
175
|
+
border: '1px solid #d9d9d9',
|
|
176
|
+
fontSize: '14px',
|
|
177
|
+
},
|
|
178
|
+
};
|
|
179
|
+
|
|
180
|
+
export default ModelSelector;
|
|
181
|
+
```
|
|
182
|
+
|
|
183
|
+
```tsx
|
|
184
|
+
// ChatApp.tsx
|
|
185
|
+
import React, { useEffect, useRef } from 'react';
|
|
186
|
+
import '@ai-chat/components';
|
|
187
|
+
import '@ai-chat/themes/default';
|
|
188
|
+
import { createChatStore } from '@ai-chat/core';
|
|
189
|
+
import { MultiModelAdapter } from './adapters/multi-model';
|
|
190
|
+
import ModelSelector from './ModelSelector';
|
|
191
|
+
|
|
192
|
+
const ChatApp: React.FC = () => {
|
|
193
|
+
const chatRef = useRef<HTMLElement>(null);
|
|
194
|
+
const adapterRef = useRef<MultiModelAdapter>();
|
|
195
|
+
|
|
196
|
+
useEffect(() => {
|
|
197
|
+
// 创建适配器
|
|
198
|
+
adapterRef.current = new MultiModelAdapter({
|
|
199
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
200
|
+
});
|
|
201
|
+
|
|
202
|
+
if (chatRef.current) {
|
|
203
|
+
(chatRef.current as any).setMessageHandler(async (message: string) => {
|
|
204
|
+
const response = await adapterRef.current!.sendMessage({
|
|
205
|
+
message,
|
|
206
|
+
history: (chatRef.current as any).getMessages(),
|
|
207
|
+
});
|
|
208
|
+
return response.content;
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
}, []);
|
|
212
|
+
|
|
213
|
+
return (
|
|
214
|
+
<div style={{ height: '100vh' }}>
|
|
215
|
+
<ModelSelector adapter={adapterRef.current!} />
|
|
216
|
+
<ai-chat ref={chatRef}></ai-chat>
|
|
217
|
+
</div>
|
|
218
|
+
);
|
|
219
|
+
};
|
|
220
|
+
|
|
221
|
+
export default ChatApp;
|
|
222
|
+
```
|
|
223
|
+
|
|
224
|
+
## 示例 2:混合使用多个 AI 服务
|
|
225
|
+
|
|
226
|
+
您可以在不同场景下使用不同的 AI 模型。
|
|
227
|
+
|
|
228
|
+
### 创建路由适配器
|
|
229
|
+
|
|
230
|
+
```typescript
|
|
231
|
+
// adapters/router.ts
|
|
232
|
+
import { APIAdapter, ChatRequest, ChatResponse } from '@ai-chat/core';
|
|
233
|
+
|
|
234
|
+
export class ModelRouterAdapter implements APIAdapter {
|
|
235
|
+
private adapters: Map<string, APIAdapter> = new Map();
|
|
236
|
+
private rules: Array<{ pattern: RegExp; model: string }> = [];
|
|
237
|
+
|
|
238
|
+
// 注册适配器
|
|
239
|
+
register(model: string, adapter: APIAdapter): void {
|
|
240
|
+
this.adapters.set(model, adapter);
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
// 添加路由规则
|
|
244
|
+
addRule(pattern: RegExp, model: string): void {
|
|
245
|
+
this.rules.push({ pattern, model });
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// 根据消息内容路由到合适的模型
|
|
249
|
+
private route(message: string): string {
|
|
250
|
+
for (const rule of this.rules) {
|
|
251
|
+
if (rule.pattern.test(message)) {
|
|
252
|
+
return rule.model;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
return 'default'; // 默认模型
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
async sendMessage(request: ChatRequest): Promise<ChatResponse> {
|
|
259
|
+
// 1. 如果 request 指定了模型,使用指定模型
|
|
260
|
+
if (request.model) {
|
|
261
|
+
const adapter = this.adapters.get(request.model);
|
|
262
|
+
if (!adapter) {
|
|
263
|
+
throw new Error(`Model ${request.model} not found`);
|
|
264
|
+
}
|
|
265
|
+
return adapter.sendMessage(request);
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
// 2. 否则根据路由规则选择模型
|
|
269
|
+
const model = this.route(request.message);
|
|
270
|
+
const adapter = this.adapters.get(model);
|
|
271
|
+
if (!adapter) {
|
|
272
|
+
throw new Error(`Model ${model} not found`);
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
return adapter.sendMessage({ ...request, model });
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
```
|
|
279
|
+
|
|
280
|
+
### 使用路由适配器
|
|
281
|
+
|
|
282
|
+
```typescript
|
|
283
|
+
import { ModelRouterAdapter } from './adapters/router';
|
|
284
|
+
import { OpenAIAdapter } from './adapters/openai';
|
|
285
|
+
import { ClaudeAdapter } from './adapters/claude';
|
|
286
|
+
|
|
287
|
+
// 创建路由适配器
|
|
288
|
+
const router = new ModelRouterAdapter();
|
|
289
|
+
|
|
290
|
+
// 注册适配器
|
|
291
|
+
router.register('gpt-4', new OpenAIAdapter({
|
|
292
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
293
|
+
model: 'gpt-4',
|
|
294
|
+
}));
|
|
295
|
+
|
|
296
|
+
router.register('claude', new ClaudeAdapter({
|
|
297
|
+
apiKey: process.env.CLAUDE_API_KEY,
|
|
298
|
+
model: 'claude-3-opus-20240229',
|
|
299
|
+
}));
|
|
300
|
+
|
|
301
|
+
router.register('default', new OpenAIAdapter({
|
|
302
|
+
apiKey: process.env.OPENAI_API_KEY,
|
|
303
|
+
model: 'gpt-3.5-turbo',
|
|
304
|
+
}));
|
|
305
|
+
|
|
306
|
+
// 添加路由规则
|
|
307
|
+
router.addRule(/代码|编程|algorithm/i, 'gpt-4'); // 编程问题用 GPT-4
|
|
308
|
+
router.addRule(/写作|文章|总结/i, 'claude'); // 写作问题用 Claude
|
|
309
|
+
|
|
310
|
+
// 使用
|
|
311
|
+
const chatStore = createChatStore({
|
|
312
|
+
async onMessage(message: string) {
|
|
313
|
+
const response = await router.sendMessage({ message });
|
|
314
|
+
return response.content;
|
|
315
|
+
},
|
|
316
|
+
});
|
|
317
|
+
```
|
|
318
|
+
|
|
319
|
+
## 示例 3:本地模型支持
|
|
320
|
+
|
|
321
|
+
使用 OAI模型 或 LM Studio 运行本地模型。
|
|
322
|
+
|
|
323
|
+
### 创建本地模型适配器
|
|
324
|
+
|
|
325
|
+
```typescript
|
|
326
|
+
// adapters/local-model.ts
|
|
327
|
+
import { APIAdapter, ChatRequest, ChatResponse } from '@ai-chat/core';
|
|
328
|
+
|
|
329
|
+
export class LocalModelAdapter implements APIAdapter {
|
|
330
|
+
private baseURL: string;
|
|
331
|
+
private model: string;
|
|
332
|
+
|
|
333
|
+
constructor(config: { baseURL?: string; model?: string }) {
|
|
334
|
+
this.baseURL = config.baseURL || 'http://localhost:11434'; // OAI模型 默认端口
|
|
335
|
+
this.model = config.model || 'llama3';
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
async sendMessage(request: ChatRequest): Promise<ChatResponse> {
|
|
339
|
+
// OAI模型 API 格式
|
|
340
|
+
const response = await fetch(`${this.baseURL}/api/chat`, {
|
|
341
|
+
method: 'POST',
|
|
342
|
+
headers: { 'Content-Type': 'application/json' },
|
|
343
|
+
body: JSON.stringify({
|
|
344
|
+
model: this.model,
|
|
345
|
+
messages: [
|
|
346
|
+
...(request.history || []).map(msg => ({
|
|
347
|
+
role: msg.role,
|
|
348
|
+
content: msg.content,
|
|
349
|
+
})),
|
|
350
|
+
{ role: 'user', content: request.message },
|
|
351
|
+
],
|
|
352
|
+
stream: false,
|
|
353
|
+
}),
|
|
354
|
+
});
|
|
355
|
+
|
|
356
|
+
if (!response.ok) {
|
|
357
|
+
throw new Error(`Local model API error: ${response.statusText}`);
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
const data = await response.json();
|
|
361
|
+
|
|
362
|
+
return {
|
|
363
|
+
content: data.message.content,
|
|
364
|
+
role: 'assistant',
|
|
365
|
+
};
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
```
|
|
369
|
+
|
|
370
|
+
### 使用本地模型
|
|
371
|
+
|
|
372
|
+
```typescript
|
|
373
|
+
import { LocalModelAdapter } from './adapters/local-model';
|
|
374
|
+
|
|
375
|
+
// 使用 OAI模型
|
|
376
|
+
const localAdapter = new LocalModelAdapter({
|
|
377
|
+
baseURL: 'http://localhost:11434',
|
|
378
|
+
model: 'llama3',
|
|
379
|
+
});
|
|
380
|
+
|
|
381
|
+
// 使用 LM Studio
|
|
382
|
+
const lmStudioAdapter = new LocalModelAdapter({
|
|
383
|
+
baseURL: 'http://localhost:1234/v1', // LM Studio 默认端口
|
|
384
|
+
model: 'local-model',
|
|
385
|
+
});
|
|
386
|
+
|
|
387
|
+
const chatStore = createChatStore({
|
|
388
|
+
async onMessage(message: string) {
|
|
389
|
+
const response = await localAdapter.sendMessage({ message });
|
|
390
|
+
return response.content;
|
|
391
|
+
},
|
|
392
|
+
});
|
|
393
|
+
```
|
|
394
|
+
|
|
395
|
+
## 示例 4:fallback 机制
|
|
396
|
+
|
|
397
|
+
当主模型不可用时,自动 fallback 到备用模型。
|
|
398
|
+
|
|
399
|
+
### 创建 Fallback 适配器
|
|
400
|
+
|
|
401
|
+
```typescript
|
|
402
|
+
// adapters/fallback.ts
|
|
403
|
+
import { APIAdapter, ChatRequest, ChatResponse } from '@ai-chat/core';
|
|
404
|
+
|
|
405
|
+
export class FallbackAdapter implements APIAdapter {
|
|
406
|
+
private adapters: APIAdapter[];
|
|
407
|
+
private errors: Map<number, number> = new Map(); // 适配器索引 -> 错误计数
|
|
408
|
+
private maxErrors: number;
|
|
409
|
+
|
|
410
|
+
constructor(adapters: APIAdapter[], maxErrors: number = 3) {
|
|
411
|
+
this.adapters = adapters;
|
|
412
|
+
this.maxErrors = maxErrors;
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
async sendMessage(request: ChatRequest): Promise<ChatResponse> {
|
|
416
|
+
// 按优先级尝试所有适配器
|
|
417
|
+
for (let i = 0; i < this.adapters.length; i++) {
|
|
418
|
+
// 检查错误计数
|
|
419
|
+
if ((this.errors.get(i) || 0) >= this.maxErrors) {
|
|
420
|
+
console.warn(`Adapter ${i} has too many errors, skipping...`);
|
|
421
|
+
continue;
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
try {
|
|
425
|
+
const response = await this.adapters[i].sendMessage(request);
|
|
426
|
+
// 成功,重置错误计数
|
|
427
|
+
this.errors.set(i, 0);
|
|
428
|
+
return response;
|
|
429
|
+
} catch (error) {
|
|
430
|
+
console.error(`Adapter ${i} failed:`, error);
|
|
431
|
+
// 增加错误计数
|
|
432
|
+
this.errors.set(i, (this.errors.get(i) || 0) + 1);
|
|
433
|
+
// 继续尝试下一个适配器
|
|
434
|
+
}
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
throw new Error('All adapters failed');
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
```
|
|
441
|
+
|
|
442
|
+
### 使用 Fallback 适配器
|
|
443
|
+
|
|
444
|
+
```typescript
|
|
445
|
+
import { FallbackAdapter } from './adapters/fallback';
|
|
446
|
+
import { OpenAIAdapter } from './adapters/openai';
|
|
447
|
+
import { LocalModelAdapter } from './adapters/local-model';
|
|
448
|
+
|
|
449
|
+
const fallbackAdapter = new FallbackAdapter([
|
|
450
|
+
new OpenAIAdapter({ apiKey: process.env.OPENAI_API_KEY }), // 优先使用 OpenAI
|
|
451
|
+
new LocalModelAdapter(), // OpenAI 失败时,使用本地模型
|
|
452
|
+
]);
|
|
453
|
+
|
|
454
|
+
const chatStore = createChatStore({
|
|
455
|
+
async onMessage(message: string) {
|
|
456
|
+
const response = await fallbackAdapter.sendMessage({ message });
|
|
457
|
+
return response.content;
|
|
458
|
+
},
|
|
459
|
+
});
|
|
460
|
+
```
|
|
461
|
+
|
|
462
|
+
## 下一步
|
|
463
|
+
|
|
464
|
+
- [自定义插件](../guide/plugins.md)
|
|
465
|
+
- [基本聊天](./basic-chat.md)
|
|
466
|
+
- [API 适配器指南](../guide/api-adapters.md)
|