@kevisual/ai 0.0.4 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-provider-browser.d.ts +434 -0
- package/dist/ai-provider-browser.js +24652 -0
- package/dist/ai-provider.d.ts +208 -4
- package/dist/ai-provider.js +1496 -89
- package/package.json +13 -3
|
@@ -0,0 +1,434 @@
|
|
|
1
|
+
import * as openai_resources_index_mjs from 'openai/resources/index.mjs';
|
|
2
|
+
import OpenAI, { OpenAI as OpenAI$1 } from 'openai';
|
|
3
|
+
import * as openai_resources_embeddings_mjs from 'openai/resources/embeddings.mjs';
|
|
4
|
+
import * as _kevisual_permission from '@kevisual/permission';
|
|
5
|
+
import { Permission } from '@kevisual/permission';
|
|
6
|
+
|
|
7
|
+
type ChatMessage = OpenAI.Chat.Completions.ChatCompletionMessageParam;
|
|
8
|
+
type ChatMessageOptions = Partial<OpenAI.Chat.Completions.ChatCompletionCreateParams>;
|
|
9
|
+
type ChatMessageComplete = OpenAI.Chat.Completions.ChatCompletion;
|
|
10
|
+
type ChatMessageStream = OpenAI.Chat.Completions.ChatCompletion;
|
|
11
|
+
type EmbeddingMessage = Partial<OpenAI.Embeddings.EmbeddingCreateParams>;
|
|
12
|
+
type EmbeddingMessageComplete = OpenAI.Embeddings.CreateEmbeddingResponse;
|
|
13
|
+
interface BaseChatInterface {
|
|
14
|
+
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
|
|
15
|
+
}
|
|
16
|
+
interface BaseChatUsageInterface {
|
|
17
|
+
/**
|
|
18
|
+
* 提示词令牌
|
|
19
|
+
*/
|
|
20
|
+
prompt_tokens: number;
|
|
21
|
+
/**
|
|
22
|
+
* 总令牌
|
|
23
|
+
*/
|
|
24
|
+
total_tokens: number;
|
|
25
|
+
/**
|
|
26
|
+
* 完成令牌
|
|
27
|
+
*/
|
|
28
|
+
completion_tokens: number;
|
|
29
|
+
}
|
|
30
|
+
type ChatStream = AsyncGenerator<ChatMessageComplete, void, unknown>;
|
|
31
|
+
|
|
32
|
+
type BaseChatOptions<T = Record<string, any>> = {
|
|
33
|
+
/**
|
|
34
|
+
* 默认baseURL
|
|
35
|
+
*/
|
|
36
|
+
baseURL?: string;
|
|
37
|
+
/**
|
|
38
|
+
* 默认模型
|
|
39
|
+
*/
|
|
40
|
+
model?: string;
|
|
41
|
+
/**
|
|
42
|
+
* 默认apiKey
|
|
43
|
+
*/
|
|
44
|
+
apiKey: string;
|
|
45
|
+
/**
|
|
46
|
+
* 是否在浏览器中使用
|
|
47
|
+
*/
|
|
48
|
+
isBrowser?: boolean;
|
|
49
|
+
/**
|
|
50
|
+
* 是否流式输出, 默认 false
|
|
51
|
+
*/
|
|
52
|
+
stream?: boolean;
|
|
53
|
+
} & T;
|
|
54
|
+
declare const getIsBrowser: () => any;
|
|
55
|
+
declare class BaseChat implements BaseChatInterface, BaseChatUsageInterface {
|
|
56
|
+
/**
|
|
57
|
+
* 默认baseURL
|
|
58
|
+
*/
|
|
59
|
+
baseURL: string;
|
|
60
|
+
/**
|
|
61
|
+
* 默认模型
|
|
62
|
+
*/
|
|
63
|
+
model: string;
|
|
64
|
+
/**
|
|
65
|
+
* 默认apiKey
|
|
66
|
+
*/
|
|
67
|
+
apiKey: string;
|
|
68
|
+
/**
|
|
69
|
+
* 是否在浏览器中使用
|
|
70
|
+
*/
|
|
71
|
+
isBrowser: boolean;
|
|
72
|
+
/**
|
|
73
|
+
* openai实例
|
|
74
|
+
*/
|
|
75
|
+
openai: OpenAI$1;
|
|
76
|
+
prompt_tokens: number;
|
|
77
|
+
total_tokens: number;
|
|
78
|
+
completion_tokens: number;
|
|
79
|
+
constructor(options: BaseChatOptions);
|
|
80
|
+
/**
|
|
81
|
+
* 聊天
|
|
82
|
+
*/
|
|
83
|
+
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
|
|
84
|
+
chatStream(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatStream>;
|
|
85
|
+
/**
|
|
86
|
+
* 测试
|
|
87
|
+
*/
|
|
88
|
+
test(): Promise<OpenAI$1.Chat.Completions.ChatCompletion>;
|
|
89
|
+
/**
|
|
90
|
+
* 获取聊天使用情况
|
|
91
|
+
* @returns
|
|
92
|
+
*/
|
|
93
|
+
getChatUsage(): {
|
|
94
|
+
prompt_tokens: number;
|
|
95
|
+
total_tokens: number;
|
|
96
|
+
completion_tokens: number;
|
|
97
|
+
};
|
|
98
|
+
getHeaders(headers?: Record<string, string>): {
|
|
99
|
+
'Content-Type': string;
|
|
100
|
+
Authorization: string;
|
|
101
|
+
};
|
|
102
|
+
/**
|
|
103
|
+
* 生成embedding 内部
|
|
104
|
+
* @param text
|
|
105
|
+
* @returns
|
|
106
|
+
*/
|
|
107
|
+
generateEmbeddingCore(text: string | string[], options?: EmbeddingMessage): Promise<EmbeddingMessageComplete>;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* for await (const chunk of chatStream) {
|
|
112
|
+
* console.log(chunk);
|
|
113
|
+
* }
|
|
114
|
+
* @param chatStream
|
|
115
|
+
*/
|
|
116
|
+
declare const readStream: (chatStream: ChatStream) => Promise<void>;
|
|
117
|
+
|
|
118
|
+
type OllamaOptions$1 = Partial<BaseChatOptions>;
|
|
119
|
+
type OllamaModel = {
|
|
120
|
+
name: string;
|
|
121
|
+
model: string;
|
|
122
|
+
modified_at: string;
|
|
123
|
+
size: number;
|
|
124
|
+
digest: string;
|
|
125
|
+
details: {
|
|
126
|
+
parent_model: string;
|
|
127
|
+
format: string;
|
|
128
|
+
family: string;
|
|
129
|
+
families: string[];
|
|
130
|
+
parameter_size: string;
|
|
131
|
+
quantization_level: string;
|
|
132
|
+
};
|
|
133
|
+
};
|
|
134
|
+
declare class Ollama extends BaseChat {
|
|
135
|
+
static BASE_URL: string;
|
|
136
|
+
constructor(options: OllamaOptions$1);
|
|
137
|
+
chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<openai_resources_index_mjs.ChatCompletion>;
|
|
138
|
+
/**
|
|
139
|
+
* 获取模型列表
|
|
140
|
+
* @returns
|
|
141
|
+
*/
|
|
142
|
+
listModels(): Promise<{
|
|
143
|
+
models: OllamaModel[];
|
|
144
|
+
}>;
|
|
145
|
+
listRunModels(): Promise<{
|
|
146
|
+
models: OllamaModel[];
|
|
147
|
+
}>;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
type SiliconFlowOptions = Partial<BaseChatOptions>;
|
|
151
|
+
type SiliconFlowUsageData = {
|
|
152
|
+
id: string;
|
|
153
|
+
name: string;
|
|
154
|
+
image: string;
|
|
155
|
+
email: string;
|
|
156
|
+
isAdmin: boolean;
|
|
157
|
+
balance: string;
|
|
158
|
+
status: 'normal' | 'suspended' | 'expired' | string;
|
|
159
|
+
introduce: string;
|
|
160
|
+
role: string;
|
|
161
|
+
chargeBalance: string;
|
|
162
|
+
totalBalance: string;
|
|
163
|
+
category: string;
|
|
164
|
+
};
|
|
165
|
+
type SiliconFlowUsageResponse = {
|
|
166
|
+
code: number;
|
|
167
|
+
message: string;
|
|
168
|
+
status: boolean;
|
|
169
|
+
data: SiliconFlowUsageData;
|
|
170
|
+
};
|
|
171
|
+
declare class SiliconFlow extends BaseChat {
|
|
172
|
+
static BASE_URL: string;
|
|
173
|
+
constructor(options: SiliconFlowOptions);
|
|
174
|
+
getUsageInfo(): Promise<SiliconFlowUsageResponse>;
|
|
175
|
+
chat(messages: OpenAI$1.Chat.Completions.ChatCompletionMessageParam[], options?: Partial<OpenAI$1.Chat.Completions.ChatCompletionCreateParams>): Promise<OpenAI$1.Chat.Completions.ChatCompletion>;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
type OllamaOptions = BaseChatOptions;
|
|
179
|
+
/**
|
|
180
|
+
* 自定义模型
|
|
181
|
+
*/
|
|
182
|
+
declare class Custom extends BaseChat {
|
|
183
|
+
static BASE_URL: string;
|
|
184
|
+
constructor(options: OllamaOptions);
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
type VolcesOptions = Partial<BaseChatOptions>;
|
|
188
|
+
declare class Volces extends BaseChat {
|
|
189
|
+
static BASE_URL: string;
|
|
190
|
+
constructor(options: VolcesOptions);
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
type DeepSeekOptions = Partial<BaseChatOptions>;
|
|
194
|
+
declare class DeepSeek extends BaseChat {
|
|
195
|
+
static BASE_URL: string;
|
|
196
|
+
constructor(options: DeepSeekOptions);
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
type ModelScopeOptions = Partial<BaseChatOptions>;
|
|
200
|
+
declare class ModelScope extends BaseChat {
|
|
201
|
+
static BASE_URL: string;
|
|
202
|
+
constructor(options: ModelScopeOptions);
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
declare const OllamaProvider: typeof Ollama;
|
|
206
|
+
declare const SiliconFlowProvider: typeof SiliconFlow;
|
|
207
|
+
declare const CustomProvider: typeof Custom;
|
|
208
|
+
declare const VolcesProvider: typeof Volces;
|
|
209
|
+
declare const DeepSeekProvider: typeof DeepSeek;
|
|
210
|
+
declare const ModelScopeProvider: typeof ModelScope;
|
|
211
|
+
declare const ChatProviderMap: {
|
|
212
|
+
Ollama: typeof Ollama;
|
|
213
|
+
SiliconFlow: typeof SiliconFlow;
|
|
214
|
+
Custom: typeof Custom;
|
|
215
|
+
Volces: typeof Volces;
|
|
216
|
+
DeepSeek: typeof DeepSeek;
|
|
217
|
+
ModelScope: typeof ModelScope;
|
|
218
|
+
BaseChat: typeof BaseChat;
|
|
219
|
+
};
|
|
220
|
+
type ProviderManagerConfig = {
|
|
221
|
+
provider: string;
|
|
222
|
+
model: string;
|
|
223
|
+
apiKey: string;
|
|
224
|
+
baseURL?: string;
|
|
225
|
+
};
|
|
226
|
+
declare class ProviderManager {
|
|
227
|
+
provider: BaseChat;
|
|
228
|
+
constructor(config: ProviderManagerConfig);
|
|
229
|
+
static createProvider(config: ProviderManagerConfig): Promise<BaseChat>;
|
|
230
|
+
chat(messages: ChatMessage[]): Promise<openai_resources_index_mjs.ChatCompletion>;
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
type KnowledgeOptions<T = Record<string, any>> = BaseChatOptions<{
|
|
234
|
+
embeddingModel?: string;
|
|
235
|
+
splitSize?: number;
|
|
236
|
+
splitOverlap?: number;
|
|
237
|
+
batchSize?: number;
|
|
238
|
+
} & T>;
|
|
239
|
+
/**
|
|
240
|
+
* 知识库构建
|
|
241
|
+
* 1. Embedding generate
|
|
242
|
+
* 2. retriever
|
|
243
|
+
* 3. reranker
|
|
244
|
+
*/
|
|
245
|
+
declare class KnowledgeBase extends BaseChat {
|
|
246
|
+
embeddingModel: string;
|
|
247
|
+
splitSize: number;
|
|
248
|
+
splitOverlap: number;
|
|
249
|
+
batchSize: number;
|
|
250
|
+
constructor(options: KnowledgeOptions);
|
|
251
|
+
/**
|
|
252
|
+
* 生成embedding
|
|
253
|
+
* @param text
|
|
254
|
+
* @returns
|
|
255
|
+
*/
|
|
256
|
+
generateEmbedding(text: string | string[]): Promise<{
|
|
257
|
+
code: number;
|
|
258
|
+
data: openai_resources_embeddings_mjs.Embedding[];
|
|
259
|
+
message?: undefined;
|
|
260
|
+
} | {
|
|
261
|
+
code: any;
|
|
262
|
+
message: string;
|
|
263
|
+
data?: undefined;
|
|
264
|
+
}>;
|
|
265
|
+
/**
|
|
266
|
+
* 批量生成embedding
|
|
267
|
+
* @param text
|
|
268
|
+
* @returns
|
|
269
|
+
*/
|
|
270
|
+
generateEmbeddingBatch(textArray: string[]): Promise<number[][]>;
|
|
271
|
+
/**
|
|
272
|
+
* 分割长文本, 生成对应的embedding
|
|
273
|
+
* @param text
|
|
274
|
+
* @returns
|
|
275
|
+
*/
|
|
276
|
+
splitLongText(text: string): Promise<{
|
|
277
|
+
text: string;
|
|
278
|
+
embedding: number[];
|
|
279
|
+
}[]>;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
declare class SiliconFlowKnowledge extends KnowledgeBase {
|
|
283
|
+
static BASE_URL: string;
|
|
284
|
+
constructor(options: KnowledgeOptions);
|
|
285
|
+
rerank(data: RerankOptions): Promise<any>;
|
|
286
|
+
}
|
|
287
|
+
type RerankOptions = {
|
|
288
|
+
model: string;
|
|
289
|
+
query: string;
|
|
290
|
+
documents: string[];
|
|
291
|
+
top_n?: number;
|
|
292
|
+
return_documents?: boolean;
|
|
293
|
+
max_chunks_per_doc?: number;
|
|
294
|
+
overlap_tokens?: number;
|
|
295
|
+
};
|
|
296
|
+
|
|
297
|
+
type AIModel = {
|
|
298
|
+
/**
|
|
299
|
+
* 提供商
|
|
300
|
+
*/
|
|
301
|
+
provider: string;
|
|
302
|
+
/**
|
|
303
|
+
* 模型名称
|
|
304
|
+
*/
|
|
305
|
+
model: string;
|
|
306
|
+
/**
|
|
307
|
+
* 模型组
|
|
308
|
+
*/
|
|
309
|
+
group: string;
|
|
310
|
+
/**
|
|
311
|
+
* 每日请求频率限制
|
|
312
|
+
*/
|
|
313
|
+
dayLimit?: number;
|
|
314
|
+
/**
|
|
315
|
+
* 总的token限制
|
|
316
|
+
*/
|
|
317
|
+
tokenLimit?: number;
|
|
318
|
+
};
|
|
319
|
+
type SecretKey = {
|
|
320
|
+
/**
|
|
321
|
+
* 组
|
|
322
|
+
*/
|
|
323
|
+
group: string;
|
|
324
|
+
/**
|
|
325
|
+
* API密钥
|
|
326
|
+
*/
|
|
327
|
+
apiKey: string;
|
|
328
|
+
/**
|
|
329
|
+
* 解密密钥
|
|
330
|
+
*/
|
|
331
|
+
decryptKey?: string;
|
|
332
|
+
};
|
|
333
|
+
type AIConfig = {
|
|
334
|
+
title?: string;
|
|
335
|
+
description?: string;
|
|
336
|
+
models: AIModel[];
|
|
337
|
+
secretKeys: SecretKey[];
|
|
338
|
+
permission?: Permission;
|
|
339
|
+
filter?: {
|
|
340
|
+
objectKey: string;
|
|
341
|
+
type: 'array' | 'object';
|
|
342
|
+
operate: 'removeAttribute' | 'remove';
|
|
343
|
+
attribute: string[];
|
|
344
|
+
}[];
|
|
345
|
+
};
|
|
346
|
+
|
|
347
|
+
declare function encryptAES(plainText: string, secretKey: string): string;
|
|
348
|
+
declare function decryptAES(cipherText: string, secretKey: string): string;
|
|
349
|
+
type GetProviderOpts = {
|
|
350
|
+
model: string;
|
|
351
|
+
group: string;
|
|
352
|
+
decryptKey?: string;
|
|
353
|
+
};
|
|
354
|
+
type ProviderResult = {
|
|
355
|
+
provider: string;
|
|
356
|
+
model: string;
|
|
357
|
+
group: string;
|
|
358
|
+
apiKey: string;
|
|
359
|
+
dayLimit?: number;
|
|
360
|
+
tokenLimit?: number;
|
|
361
|
+
baseURL?: string;
|
|
362
|
+
/**
|
|
363
|
+
* 解密密钥
|
|
364
|
+
*/
|
|
365
|
+
decryptKey?: string;
|
|
366
|
+
};
|
|
367
|
+
declare class AIConfigParser {
|
|
368
|
+
private config;
|
|
369
|
+
result: ProviderResult;
|
|
370
|
+
constructor(config: AIConfig);
|
|
371
|
+
/**
|
|
372
|
+
* 获取模型配置
|
|
373
|
+
* @param opts
|
|
374
|
+
* @returns
|
|
375
|
+
*/
|
|
376
|
+
getProvider(opts: GetProviderOpts): ProviderResult;
|
|
377
|
+
/**
|
|
378
|
+
* 获取解密密钥
|
|
379
|
+
* @param opts
|
|
380
|
+
* @returns
|
|
381
|
+
*/
|
|
382
|
+
getSecretKey(opts?: {
|
|
383
|
+
getCache?: (key: string) => Promise<string>;
|
|
384
|
+
setCache?: (key: string, value: string) => Promise<void>;
|
|
385
|
+
providerResult?: ProviderResult;
|
|
386
|
+
}): Promise<string>;
|
|
387
|
+
/**
|
|
388
|
+
* 加密
|
|
389
|
+
* @param plainText
|
|
390
|
+
* @param secretKey
|
|
391
|
+
* @returns
|
|
392
|
+
*/
|
|
393
|
+
encrypt(plainText: string, secretKey: string): string;
|
|
394
|
+
/**
|
|
395
|
+
* 解密
|
|
396
|
+
* @param cipherText
|
|
397
|
+
* @param secretKey
|
|
398
|
+
* @returns
|
|
399
|
+
*/
|
|
400
|
+
decrypt(cipherText: string, secretKey: string): string;
|
|
401
|
+
/**
|
|
402
|
+
* 获取模型配置
|
|
403
|
+
* @returns
|
|
404
|
+
*/
|
|
405
|
+
getSelectOpts(): {
|
|
406
|
+
group: string;
|
|
407
|
+
apiKey: string;
|
|
408
|
+
decryptKey?: string;
|
|
409
|
+
provider: string;
|
|
410
|
+
model: string;
|
|
411
|
+
dayLimit?: number;
|
|
412
|
+
tokenLimit?: number;
|
|
413
|
+
}[];
|
|
414
|
+
getConfig(keepSecret?: boolean, config?: AIConfig): AIConfig | {
|
|
415
|
+
secretKeys: {
|
|
416
|
+
apiKey: any;
|
|
417
|
+
decryptKey: any;
|
|
418
|
+
group: string;
|
|
419
|
+
}[];
|
|
420
|
+
title?: string;
|
|
421
|
+
description?: string;
|
|
422
|
+
models?: AIModel[];
|
|
423
|
+
permission?: _kevisual_permission.Permission;
|
|
424
|
+
filter?: {
|
|
425
|
+
objectKey: string;
|
|
426
|
+
type: "array" | "object";
|
|
427
|
+
operate: "removeAttribute" | "remove";
|
|
428
|
+
attribute: string[];
|
|
429
|
+
}[];
|
|
430
|
+
};
|
|
431
|
+
}
|
|
432
|
+
|
|
433
|
+
export { AIConfigParser, BaseChat, ChatProviderMap, CustomProvider, DeepSeekProvider, KnowledgeBase, ModelScopeProvider, OllamaProvider, ProviderManager, SiliconFlowKnowledge, SiliconFlowProvider, VolcesProvider, decryptAES, encryptAES, getIsBrowser, readStream };
|
|
434
|
+
export type { AIConfig, AIModel, BaseChatInterface, BaseChatOptions, BaseChatUsageInterface, ChatMessage, ChatMessageComplete, ChatMessageOptions, ChatMessageStream, ChatStream, EmbeddingMessage, EmbeddingMessageComplete, GetProviderOpts, KnowledgeOptions, ProviderResult, RerankOptions, SecretKey };
|