@opentiny/tiny-robot-kit 0.2.0-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +164 -0
- package/dist/index.d.mts +351 -0
- package/dist/index.d.ts +351 -0
- package/dist/index.js +561 -0
- package/dist/index.mjs +524 -0
- package/package.json +26 -0
- package/src/client.ts +101 -0
- package/src/error.ts +100 -0
- package/src/index.ts +10 -0
- package/src/providers/base.ts +62 -0
- package/src/providers/openai.ts +134 -0
- package/src/types.ts +163 -0
- package/src/utils.ts +125 -0
- package/src/vue/index.ts +1 -0
- package/src/vue/useMessage.ts +227 -0
- package/tsconfig.json +21 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,351 @@
|
|
|
1
|
+
import { Ref, Reactive } from 'vue';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* 消息角色类型
|
|
5
|
+
*/
|
|
6
|
+
type MessageRole = 'system' | 'user' | 'assistant';
|
|
7
|
+
/**
|
|
8
|
+
* 聊天消息接口
|
|
9
|
+
*/
|
|
10
|
+
interface ChatMessage {
|
|
11
|
+
role: MessageRole;
|
|
12
|
+
content: string;
|
|
13
|
+
name?: string;
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* 聊天历史记录
|
|
17
|
+
*/
|
|
18
|
+
type ChatHistory = ChatMessage[];
|
|
19
|
+
/**
|
|
20
|
+
* 聊天完成请求选项
|
|
21
|
+
*/
|
|
22
|
+
interface ChatCompletionOptions {
|
|
23
|
+
model?: string;
|
|
24
|
+
temperature?: number;
|
|
25
|
+
top_p?: number;
|
|
26
|
+
n?: number;
|
|
27
|
+
stream?: boolean;
|
|
28
|
+
max_tokens?: number;
|
|
29
|
+
signal?: AbortSignal;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* 聊天完成请求参数
|
|
33
|
+
*/
|
|
34
|
+
interface ChatCompletionRequest {
|
|
35
|
+
messages: ChatMessage[];
|
|
36
|
+
options?: ChatCompletionOptions;
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* 聊天完成响应消息
|
|
40
|
+
*/
|
|
41
|
+
interface ChatCompletionResponseMessage {
|
|
42
|
+
role: MessageRole;
|
|
43
|
+
content: string;
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* 聊天完成响应选择
|
|
47
|
+
*/
|
|
48
|
+
interface ChatCompletionResponseChoice {
|
|
49
|
+
index: number;
|
|
50
|
+
message: ChatCompletionResponseMessage;
|
|
51
|
+
finish_reason: string;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* 聊天完成响应使用情况
|
|
55
|
+
*/
|
|
56
|
+
interface ChatCompletionResponseUsage {
|
|
57
|
+
prompt_tokens: number;
|
|
58
|
+
completion_tokens: number;
|
|
59
|
+
total_tokens: number;
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* 聊天完成响应
|
|
63
|
+
*/
|
|
64
|
+
interface ChatCompletionResponse {
|
|
65
|
+
id: string;
|
|
66
|
+
object: string;
|
|
67
|
+
created: number;
|
|
68
|
+
model: string;
|
|
69
|
+
choices: ChatCompletionResponseChoice[];
|
|
70
|
+
usage: ChatCompletionResponseUsage;
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* 流式聊天完成响应增量
|
|
74
|
+
*/
|
|
75
|
+
interface ChatCompletionStreamResponseDelta {
|
|
76
|
+
content?: string;
|
|
77
|
+
role?: MessageRole;
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* 流式聊天完成响应选择
|
|
81
|
+
*/
|
|
82
|
+
interface ChatCompletionStreamResponseChoice {
|
|
83
|
+
index: number;
|
|
84
|
+
delta: ChatCompletionStreamResponseDelta;
|
|
85
|
+
finish_reason: string | null;
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* 流式聊天完成响应
|
|
89
|
+
*/
|
|
90
|
+
interface ChatCompletionStreamResponse {
|
|
91
|
+
id: string;
|
|
92
|
+
object: string;
|
|
93
|
+
created: number;
|
|
94
|
+
model: string;
|
|
95
|
+
choices: ChatCompletionStreamResponseChoice[];
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* AI模型提供商类型
|
|
99
|
+
*/
|
|
100
|
+
type AIProvider = 'openai' | 'deepseek' | 'custom';
|
|
101
|
+
/**
|
|
102
|
+
* AI模型配置接口
|
|
103
|
+
*/
|
|
104
|
+
interface AIModelConfig {
|
|
105
|
+
provider: AIProvider;
|
|
106
|
+
apiKey?: string;
|
|
107
|
+
apiUrl?: string;
|
|
108
|
+
apiVersion?: string;
|
|
109
|
+
defaultModel?: string;
|
|
110
|
+
defaultOptions?: ChatCompletionOptions;
|
|
111
|
+
}
|
|
112
|
+
/**
|
|
113
|
+
* 错误类型
|
|
114
|
+
*/
|
|
115
|
+
declare enum ErrorType {
|
|
116
|
+
NETWORK_ERROR = "network_error",
|
|
117
|
+
AUTHENTICATION_ERROR = "authentication_error",
|
|
118
|
+
RATE_LIMIT_ERROR = "rate_limit_error",
|
|
119
|
+
SERVER_ERROR = "server_error",
|
|
120
|
+
MODEL_ERROR = "model_error",
|
|
121
|
+
TIMEOUT_ERROR = "timeout_error",
|
|
122
|
+
UNKNOWN_ERROR = "unknown_error"
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* AI适配器错误
|
|
126
|
+
*/
|
|
127
|
+
interface AIAdapterError {
|
|
128
|
+
type: ErrorType;
|
|
129
|
+
message: string;
|
|
130
|
+
statusCode?: number;
|
|
131
|
+
originalError?: object;
|
|
132
|
+
}
|
|
133
|
+
/**
|
|
134
|
+
* 流式响应事件类型
|
|
135
|
+
*/
|
|
136
|
+
declare enum StreamEventType {
|
|
137
|
+
DATA = "data",
|
|
138
|
+
ERROR = "error",
|
|
139
|
+
DONE = "done"
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* 流式响应处理器
|
|
143
|
+
*/
|
|
144
|
+
interface StreamHandler {
|
|
145
|
+
onData: (data: ChatCompletionStreamResponse) => void;
|
|
146
|
+
onError: (error: AIAdapterError) => void;
|
|
147
|
+
onDone: () => void;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* AI客户端类
|
|
152
|
+
* 负责根据配置选择合适的提供商并处理请求
|
|
153
|
+
*/
|
|
154
|
+
|
|
155
|
+
/**
|
|
156
|
+
* AI客户端类
|
|
157
|
+
*/
|
|
158
|
+
declare class AIClient {
|
|
159
|
+
private provider;
|
|
160
|
+
private config;
|
|
161
|
+
/**
|
|
162
|
+
* 构造函数
|
|
163
|
+
* @param config AI模型配置
|
|
164
|
+
*/
|
|
165
|
+
constructor(config: AIModelConfig);
|
|
166
|
+
/**
|
|
167
|
+
* 创建提供商实例
|
|
168
|
+
* @param config AI模型配置
|
|
169
|
+
* @returns 提供商实例
|
|
170
|
+
*/
|
|
171
|
+
private createProvider;
|
|
172
|
+
/**
|
|
173
|
+
* 发送聊天请求并获取响应
|
|
174
|
+
* @param request 聊天请求参数
|
|
175
|
+
* @returns 聊天响应
|
|
176
|
+
*/
|
|
177
|
+
chat(request: ChatCompletionRequest): Promise<ChatCompletionResponse>;
|
|
178
|
+
/**
|
|
179
|
+
* 发送流式聊天请求并通过处理器处理响应
|
|
180
|
+
* @param request 聊天请求参数
|
|
181
|
+
* @param handler 流式响应处理器
|
|
182
|
+
*/
|
|
183
|
+
chatStream(request: ChatCompletionRequest, handler: StreamHandler): Promise<void>;
|
|
184
|
+
/**
|
|
185
|
+
* 获取当前配置
|
|
186
|
+
* @returns AI模型配置
|
|
187
|
+
*/
|
|
188
|
+
getConfig(): AIModelConfig;
|
|
189
|
+
/**
|
|
190
|
+
* 更新配置
|
|
191
|
+
* @param config 新的AI模型配置
|
|
192
|
+
*/
|
|
193
|
+
updateConfig(config: Partial<AIModelConfig>): void;
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* 模型Provider基类
|
|
198
|
+
*/
|
|
199
|
+
declare abstract class BaseModelProvider {
|
|
200
|
+
protected config: AIModelConfig;
|
|
201
|
+
/**
|
|
202
|
+
* @param config AI模型配置
|
|
203
|
+
*/
|
|
204
|
+
constructor(config: AIModelConfig);
|
|
205
|
+
/**
|
|
206
|
+
* 发送聊天请求并获取响应
|
|
207
|
+
* @param request 聊天请求参数
|
|
208
|
+
* @returns 聊天响应
|
|
209
|
+
*/
|
|
210
|
+
abstract chat(request: ChatCompletionRequest): Promise<ChatCompletionResponse>;
|
|
211
|
+
/**
|
|
212
|
+
* 发送流式聊天请求并通过处理器处理响应
|
|
213
|
+
* @param request 聊天请求参数
|
|
214
|
+
* @param handler 流式响应处理器
|
|
215
|
+
*/
|
|
216
|
+
abstract chatStream(request: ChatCompletionRequest, handler: StreamHandler): Promise<void>;
|
|
217
|
+
/**
|
|
218
|
+
* 更新配置
|
|
219
|
+
* @param config 新的AI模型配置
|
|
220
|
+
*/
|
|
221
|
+
updateConfig(config: AIModelConfig): void;
|
|
222
|
+
/**
|
|
223
|
+
* 获取当前配置
|
|
224
|
+
* @returns AI模型配置
|
|
225
|
+
*/
|
|
226
|
+
getConfig(): AIModelConfig;
|
|
227
|
+
/**
|
|
228
|
+
* 验证请求参数
|
|
229
|
+
* @param request 聊天请求参数
|
|
230
|
+
*/
|
|
231
|
+
protected validateRequest(request: ChatCompletionRequest): void;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* OpenAI提供商
|
|
236
|
+
* 用于与OpenAI API进行交互
|
|
237
|
+
*/
|
|
238
|
+
|
|
239
|
+
declare class OpenAIProvider extends BaseModelProvider {
|
|
240
|
+
private baseURL;
|
|
241
|
+
private apiKey;
|
|
242
|
+
private defaultModel;
|
|
243
|
+
/**
|
|
244
|
+
* @param config AI模型配置
|
|
245
|
+
*/
|
|
246
|
+
constructor(config: AIModelConfig);
|
|
247
|
+
/**
|
|
248
|
+
* 发送聊天请求并获取响应
|
|
249
|
+
* @param request 聊天请求参数
|
|
250
|
+
* @returns 聊天响应
|
|
251
|
+
*/
|
|
252
|
+
chat(request: ChatCompletionRequest): Promise<ChatCompletionResponse>;
|
|
253
|
+
/**
|
|
254
|
+
* 发送流式聊天请求并通过处理器处理响应
|
|
255
|
+
* @param request 聊天请求参数
|
|
256
|
+
* @param handler 流式响应处理器
|
|
257
|
+
*/
|
|
258
|
+
chatStream(request: ChatCompletionRequest, handler: StreamHandler): Promise<void>;
|
|
259
|
+
/**
|
|
260
|
+
* 更新配置
|
|
261
|
+
* @param config 新的AI模型配置
|
|
262
|
+
*/
|
|
263
|
+
updateConfig(config: AIModelConfig): void;
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
/**
|
|
267
|
+
* 工具函数模块
|
|
268
|
+
* 提供一些实用的辅助函数
|
|
269
|
+
*/
|
|
270
|
+
|
|
271
|
+
/**
|
|
272
|
+
* 格式化消息
|
|
273
|
+
* 将各种格式的消息转换为标准的ChatMessage格式
|
|
274
|
+
* @param messages 消息数组
|
|
275
|
+
* @returns 标准格式的消息数组
|
|
276
|
+
*/
|
|
277
|
+
declare function formatMessages(messages: Array<ChatMessage | string>): ChatMessage[];
|
|
278
|
+
/**
|
|
279
|
+
* 从响应中提取文本内容
|
|
280
|
+
* @param response 聊天完成响应
|
|
281
|
+
* @returns 文本内容
|
|
282
|
+
*/
|
|
283
|
+
declare function extractTextFromResponse(response: ChatCompletionResponse): string;
|
|
284
|
+
|
|
285
|
+
/**
|
|
286
|
+
* useMessage composable
|
|
287
|
+
* 提供消息管理和状态控制功能
|
|
288
|
+
*/
|
|
289
|
+
|
|
290
|
+
declare enum STATUS {
|
|
291
|
+
INIT = "init",// 初始状态
|
|
292
|
+
PROCESSING = "processing",// AI请求正在处理中, 还未响应,显示加载动画
|
|
293
|
+
STREAMING = "streaming",// 流式响应中分块数据返回中
|
|
294
|
+
FINISHED = "finished",// AI请求已完成
|
|
295
|
+
ABORTED = "aborted",// 用户中止请求
|
|
296
|
+
ERROR = "error"
|
|
297
|
+
}
|
|
298
|
+
declare const GeneratingStatus: STATUS[];
|
|
299
|
+
declare const FinalStatus: STATUS[];
|
|
300
|
+
/**
|
|
301
|
+
* 消息状态接口
|
|
302
|
+
*/
|
|
303
|
+
interface MessageState {
|
|
304
|
+
status: STATUS;
|
|
305
|
+
errorMsg: string | null;
|
|
306
|
+
}
|
|
307
|
+
/**
|
|
308
|
+
* useMessage选项接口
|
|
309
|
+
*/
|
|
310
|
+
interface UseMessageOptions {
|
|
311
|
+
/** AI客户端实例 */
|
|
312
|
+
client: AIClient;
|
|
313
|
+
/** 是否默认使用流式响应 */
|
|
314
|
+
useStreamByDefault?: boolean;
|
|
315
|
+
/** 错误消息模板 */
|
|
316
|
+
errorMessage?: string;
|
|
317
|
+
/** 初始消息列表 */
|
|
318
|
+
initialMessages?: ChatMessage[];
|
|
319
|
+
}
|
|
320
|
+
/**
|
|
321
|
+
* useMessage返回值接口
|
|
322
|
+
*/
|
|
323
|
+
interface UseMessageReturn {
|
|
324
|
+
messages: Ref<ChatMessage[]>;
|
|
325
|
+
/** 消息状态 */
|
|
326
|
+
messageState: Reactive<MessageState>;
|
|
327
|
+
/** 输入消息 */
|
|
328
|
+
inputMessage: Ref<string>;
|
|
329
|
+
/** 是否使用流式响应 */
|
|
330
|
+
useStream: Ref<boolean>;
|
|
331
|
+
/** 发送消息 */
|
|
332
|
+
sendMessage: (content?: string, clearInput?: boolean) => Promise<void>;
|
|
333
|
+
/** 清空消息 */
|
|
334
|
+
clearMessages: () => void;
|
|
335
|
+
/** 添加消息 */
|
|
336
|
+
addMessage: (message: ChatMessage) => void;
|
|
337
|
+
/** 中止请求 */
|
|
338
|
+
abortRequest: () => void;
|
|
339
|
+
/** 重试请求 */
|
|
340
|
+
retryRequest: (msgIndex: number) => Promise<void>;
|
|
341
|
+
}
|
|
342
|
+
/**
|
|
343
|
+
* useMessage composable
|
|
344
|
+
* 提供消息管理和状态控制功能
|
|
345
|
+
*
|
|
346
|
+
* @param options useMessage选项
|
|
347
|
+
* @returns UseMessageReturn
|
|
348
|
+
*/
|
|
349
|
+
declare function useMessage(options: UseMessageOptions): UseMessageReturn;
|
|
350
|
+
|
|
351
|
+
export { type AIAdapterError, AIClient, type AIModelConfig, type AIProvider, BaseModelProvider, type ChatCompletionOptions, type ChatCompletionRequest, type ChatCompletionResponse, type ChatCompletionResponseChoice, type ChatCompletionResponseMessage, type ChatCompletionResponseUsage, type ChatCompletionStreamResponse, type ChatCompletionStreamResponseChoice, type ChatCompletionStreamResponseDelta, type ChatHistory, type ChatMessage, ErrorType, FinalStatus, GeneratingStatus, type MessageRole, type MessageState, OpenAIProvider, STATUS, StreamEventType, type StreamHandler, type UseMessageOptions, type UseMessageReturn, extractTextFromResponse, formatMessages, useMessage };
|