@kevisual/ai 0.0.19 → 0.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-provider-browser.d.ts +14 -7
- package/dist/ai-provider-browser.js +2295 -1620
- package/dist/ai-provider.d.ts +14 -7
- package/dist/ai-provider.js +21 -21
- package/package.json +15 -14
- package/src/jimeng/core.ts +113 -43
- package/src/provider/chat-adapter/mimo.ts +10 -0
- package/src/provider/chat-adapter/ollama.ts +0 -5
- package/src/provider/chat-adapter/siliconflow.ts +0 -4
- package/src/provider/core/chat.ts +28 -12
- package/src/provider/core/type.ts +7 -4
|
@@ -5,8 +5,8 @@ type ChatMessage = {
|
|
|
5
5
|
role?: 'user' | 'assistant' | 'system' | 'tool';
|
|
6
6
|
content: string;
|
|
7
7
|
};
|
|
8
|
-
type ChatMessageOptions = {
|
|
9
|
-
messages
|
|
8
|
+
type ChatMessageOptions<T = {}> = {
|
|
9
|
+
messages?: ChatMessage[];
|
|
10
10
|
/**
|
|
11
11
|
* 模型名称
|
|
12
12
|
*/
|
|
@@ -46,7 +46,7 @@ type ChatMessageOptions = {
|
|
|
46
46
|
stream?: boolean;
|
|
47
47
|
/**
|
|
48
48
|
* 是否能够思考
|
|
49
|
-
* 如果会话是千文,服务器的接口,默认为
|
|
49
|
+
* 如果会话是千文,服务器的接口,默认为 false
|
|
50
50
|
*/
|
|
51
51
|
enable_thinking?: boolean;
|
|
52
52
|
response_format?: 'text' | 'json' | 'xml' | 'html';
|
|
@@ -54,7 +54,8 @@ type ChatMessageOptions = {
|
|
|
54
54
|
* 工具调用参数
|
|
55
55
|
*/
|
|
56
56
|
tool_calls?: any;
|
|
57
|
-
|
|
57
|
+
[key: string]: any;
|
|
58
|
+
} & T;
|
|
58
59
|
type Choice = {
|
|
59
60
|
finish_reason: 'stop' | 'length' | 'tool_calls' | 'content_filter' | 'function_call';
|
|
60
61
|
index: number;
|
|
@@ -161,6 +162,7 @@ type EmbeddingMessageComplete = {
|
|
|
161
162
|
usage: Usage;
|
|
162
163
|
};
|
|
163
164
|
interface BaseChatInterface {
|
|
165
|
+
chat(options: ChatMessageOptions): Promise<ChatMessageComplete>;
|
|
164
166
|
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
|
|
165
167
|
}
|
|
166
168
|
interface Usage {
|
|
@@ -305,8 +307,10 @@ declare class BaseChat implements BaseChatInterface, Usage {
|
|
|
305
307
|
/**
|
|
306
308
|
* 聊天
|
|
307
309
|
*/
|
|
310
|
+
chat(options: ChatMessageOptions): Promise<ChatMessageComplete>;
|
|
308
311
|
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
|
|
309
|
-
chatStream(
|
|
312
|
+
chatStream(options: ChatMessageOptions): AsyncGenerator<ChatMessageComplete>;
|
|
313
|
+
chatStream(messages: ChatMessage[], options?: ChatMessageOptions): AsyncGenerator<ChatMessageComplete>;
|
|
310
314
|
/**
|
|
311
315
|
* 简单提问接口
|
|
312
316
|
* @param message
|
|
@@ -323,6 +327,11 @@ declare class BaseChat implements BaseChatInterface, Usage {
|
|
|
323
327
|
total_tokens: number;
|
|
324
328
|
completion_tokens: number;
|
|
325
329
|
};
|
|
330
|
+
setChatUsage(usage: {
|
|
331
|
+
prompt_tokens?: number;
|
|
332
|
+
total_tokens?: number;
|
|
333
|
+
completion_tokens?: number;
|
|
334
|
+
}): void;
|
|
326
335
|
getHeaders(headers?: Record<string, string>): {
|
|
327
336
|
'Content-Type': string;
|
|
328
337
|
Authorization: string;
|
|
@@ -362,7 +371,6 @@ type OllamaModel = {
|
|
|
362
371
|
declare class Ollama extends BaseChat {
|
|
363
372
|
static BASE_URL: string;
|
|
364
373
|
constructor(options: OllamaOptions$1);
|
|
365
|
-
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
|
|
366
374
|
/**
|
|
367
375
|
* 获取模型列表
|
|
368
376
|
* @returns
|
|
@@ -400,7 +408,6 @@ declare class SiliconFlow extends BaseChat {
|
|
|
400
408
|
static BASE_URL: string;
|
|
401
409
|
constructor(options: SiliconFlowOptions);
|
|
402
410
|
getUsageInfo(): Promise<SiliconFlowUsageResponse>;
|
|
403
|
-
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
|
|
404
411
|
}
|
|
405
412
|
|
|
406
413
|
type OllamaOptions = BaseChatOptions;
|