@kevisual/ai 0.0.5 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/ai-provider-browser.d.ts +442 -0
- package/dist/ai-provider-browser.js +24664 -0
- package/dist/ai-provider.d.ts +13 -3
- package/dist/ai-provider.js +25 -3
- package/package.json +16 -2
- package/src/modules/logger.ts +6 -0
- package/src/provider/chat-adapter/custom.ts +14 -0
- package/src/provider/chat-adapter/dashscope.ts +10 -0
- package/src/provider/chat-adapter/deepseek.ts +10 -0
- package/src/provider/chat-adapter/model-scope.ts +11 -0
- package/src/provider/chat-adapter/ollama.ts +47 -0
- package/src/provider/chat-adapter/siliconflow.ts +39 -0
- package/src/provider/chat-adapter/volces.ts +10 -0
- package/src/provider/chat.ts +67 -0
- package/src/provider/core/chat.ts +152 -0
- package/src/provider/core/index.ts +27 -0
- package/src/provider/core/text-regex.ts +105 -0
- package/src/provider/core/type.ts +29 -0
- package/src/provider/index.ts +5 -0
- package/src/provider/knowledge-adapter/knowledge-base.ts +107 -0
- package/src/provider/knowledge-adapter/knowledge.ts +7 -0
- package/src/provider/knowledge-adapter/siliconflow.ts +24 -0
- package/src/provider/knowledge.ts +6 -0
- package/src/provider/media/index.ts +1 -0
- package/src/provider/media/video/siliconflow.ts +37 -0
- package/src/provider/utils/ai-config-type.ts +52 -0
- package/src/provider/utils/chunk.ts +86 -0
- package/src/provider/utils/index.ts +2 -0
- package/src/provider/utils/parse-config.ts +192 -0
- package/src/provider/utils/token.ts +34 -0
- package/src/test/chunks/01-get.ts +65 -0
- package/src/test/encrypt/index.ts +9 -0
- package/src/test/func-call/curl.sh +35 -0
- package/src/test/func-call/demo.ts +116 -0
- package/src/test/model-scope/index.ts +26 -0
- package/src/test/ollama-knowledge.ts +37 -0
- package/src/test/ollama.ts +86 -0
- package/src/test/provider/index.ts +7 -0
- package/src/test/siliconflow/common.ts +15 -0
- package/src/test/siliconflow/get.ts +22 -0
- package/src/test/siliconflow/knowledge/create.ts +18 -0
- package/src/test/siliconflow/knowledge/qwen.md +232 -0
- package/src/test/siliconflow/rerank/fc.ts +28 -0
- package/src/test/siliconflow/rerank/index.ts +34 -0
- package/src/test/siliconflow/videos/index.ts +100 -0
- package/src/utils/json.ts +12 -0
package/dist/ai-provider.d.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import * as openai_resources_index_mjs from 'openai/resources/index.mjs';
|
|
2
2
|
import OpenAI, { OpenAI as OpenAI$1 } from 'openai';
|
|
3
3
|
import * as openai_resources_embeddings_mjs from 'openai/resources/embeddings.mjs';
|
|
4
|
+
import * as _kevisual_permission from '@kevisual/permission';
|
|
4
5
|
import { Permission } from '@kevisual/permission';
|
|
5
6
|
|
|
6
7
|
type ChatMessage = OpenAI.Chat.Completions.ChatCompletionMessageParam;
|
|
@@ -32,7 +33,7 @@ type BaseChatOptions<T = Record<string, any>> = {
|
|
|
32
33
|
/**
|
|
33
34
|
* 默认baseURL
|
|
34
35
|
*/
|
|
35
|
-
baseURL
|
|
36
|
+
baseURL?: string;
|
|
36
37
|
/**
|
|
37
38
|
* 默认模型
|
|
38
39
|
*/
|
|
@@ -50,6 +51,7 @@ type BaseChatOptions<T = Record<string, any>> = {
|
|
|
50
51
|
*/
|
|
51
52
|
stream?: boolean;
|
|
52
53
|
} & T;
|
|
54
|
+
declare const getIsBrowser: () => any;
|
|
53
55
|
declare class BaseChat implements BaseChatInterface, BaseChatUsageInterface {
|
|
54
56
|
/**
|
|
55
57
|
* 默认baseURL
|
|
@@ -200,12 +202,19 @@ declare class ModelScope extends BaseChat {
|
|
|
200
202
|
constructor(options: ModelScopeOptions);
|
|
201
203
|
}
|
|
202
204
|
|
|
205
|
+
type BailianOptions = Partial<BaseChatOptions>;
|
|
206
|
+
declare class BailianChat extends BaseChat {
|
|
207
|
+
static BASE_URL: string;
|
|
208
|
+
constructor(options: BailianOptions);
|
|
209
|
+
}
|
|
210
|
+
|
|
203
211
|
declare const OllamaProvider: typeof Ollama;
|
|
204
212
|
declare const SiliconFlowProvider: typeof SiliconFlow;
|
|
205
213
|
declare const CustomProvider: typeof Custom;
|
|
206
214
|
declare const VolcesProvider: typeof Volces;
|
|
207
215
|
declare const DeepSeekProvider: typeof DeepSeek;
|
|
208
216
|
declare const ModelScopeProvider: typeof ModelScope;
|
|
217
|
+
declare const BailianProvider: typeof BailianChat;
|
|
209
218
|
declare const ChatProviderMap: {
|
|
210
219
|
Ollama: typeof Ollama;
|
|
211
220
|
SiliconFlow: typeof SiliconFlow;
|
|
@@ -214,6 +223,7 @@ declare const ChatProviderMap: {
|
|
|
214
223
|
DeepSeek: typeof DeepSeek;
|
|
215
224
|
ModelScope: typeof ModelScope;
|
|
216
225
|
BaseChat: typeof BaseChat;
|
|
226
|
+
Bailian: typeof BailianChat;
|
|
217
227
|
};
|
|
218
228
|
type ProviderManagerConfig = {
|
|
219
229
|
provider: string;
|
|
@@ -418,7 +428,7 @@ declare class AIConfigParser {
|
|
|
418
428
|
title?: string;
|
|
419
429
|
description?: string;
|
|
420
430
|
models?: AIModel[];
|
|
421
|
-
permission?: Permission;
|
|
431
|
+
permission?: _kevisual_permission.Permission;
|
|
422
432
|
filter?: {
|
|
423
433
|
objectKey: string;
|
|
424
434
|
type: "array" | "object";
|
|
@@ -428,5 +438,5 @@ declare class AIConfigParser {
|
|
|
428
438
|
};
|
|
429
439
|
}
|
|
430
440
|
|
|
431
|
-
export { AIConfigParser, BaseChat, ChatProviderMap, CustomProvider, DeepSeekProvider, KnowledgeBase, ModelScopeProvider, OllamaProvider, ProviderManager, SiliconFlowKnowledge, SiliconFlowProvider, VolcesProvider, decryptAES, encryptAES, readStream };
|
|
441
|
+
export { AIConfigParser, BailianProvider, BaseChat, ChatProviderMap, CustomProvider, DeepSeekProvider, KnowledgeBase, ModelScopeProvider, OllamaProvider, ProviderManager, SiliconFlowKnowledge, SiliconFlowProvider, VolcesProvider, decryptAES, encryptAES, getIsBrowser, readStream };
|
|
432
442
|
export type { AIConfig, AIModel, BaseChatInterface, BaseChatOptions, BaseChatUsageInterface, ChatMessage, ChatMessageComplete, ChatMessageOptions, ChatMessageStream, ChatStream, EmbeddingMessage, EmbeddingMessageComplete, GetProviderOpts, KnowledgeOptions, ProviderResult, RerankOptions, SecretKey };
|
package/dist/ai-provider.js
CHANGED
|
@@ -13279,6 +13279,14 @@ var _deployments_endpoints = new Set([
|
|
|
13279
13279
|
]);
|
|
13280
13280
|
|
|
13281
13281
|
// src/provider/core/chat.ts
|
|
13282
|
+
var getIsBrowser = () => {
|
|
13283
|
+
try {
|
|
13284
|
+
return false;
|
|
13285
|
+
} catch (e2) {
|
|
13286
|
+
return false;
|
|
13287
|
+
}
|
|
13288
|
+
};
|
|
13289
|
+
|
|
13282
13290
|
class BaseChat {
|
|
13283
13291
|
baseURL;
|
|
13284
13292
|
model;
|
|
@@ -13292,7 +13300,8 @@ class BaseChat {
|
|
|
13292
13300
|
this.baseURL = options.baseURL;
|
|
13293
13301
|
this.model = options.model;
|
|
13294
13302
|
this.apiKey = options.apiKey;
|
|
13295
|
-
|
|
13303
|
+
const DEFAULT_IS_BROWSER = getIsBrowser();
|
|
13304
|
+
this.isBrowser = options.isBrowser ?? DEFAULT_IS_BROWSER;
|
|
13296
13305
|
this.openai = new OpenAI({
|
|
13297
13306
|
apiKey: this.apiKey,
|
|
13298
13307
|
baseURL: this.baseURL,
|
|
@@ -13437,6 +13446,15 @@ class ModelScope extends BaseChat {
|
|
|
13437
13446
|
}
|
|
13438
13447
|
}
|
|
13439
13448
|
|
|
13449
|
+
// src/provider/chat-adapter/dashscope.ts
|
|
13450
|
+
class BailianChat extends BaseChat {
|
|
13451
|
+
static BASE_URL = "https://bailian.aliyuncs.com/compatible-mode/v1/";
|
|
13452
|
+
constructor(options) {
|
|
13453
|
+
const baseURL = options.baseURL || BailianChat.BASE_URL;
|
|
13454
|
+
super({ ...options, baseURL });
|
|
13455
|
+
}
|
|
13456
|
+
}
|
|
13457
|
+
|
|
13440
13458
|
// src/provider/chat.ts
|
|
13441
13459
|
var OllamaProvider = Ollama;
|
|
13442
13460
|
var SiliconFlowProvider = SiliconFlow;
|
|
@@ -13444,6 +13462,7 @@ var CustomProvider = Custom;
|
|
|
13444
13462
|
var VolcesProvider = Volces;
|
|
13445
13463
|
var DeepSeekProvider = DeepSeek;
|
|
13446
13464
|
var ModelScopeProvider = ModelScope;
|
|
13465
|
+
var BailianProvider = BailianChat;
|
|
13447
13466
|
var ChatProviderMap = {
|
|
13448
13467
|
Ollama: OllamaProvider,
|
|
13449
13468
|
SiliconFlow: SiliconFlowProvider,
|
|
@@ -13451,7 +13470,8 @@ var ChatProviderMap = {
|
|
|
13451
13470
|
Volces: VolcesProvider,
|
|
13452
13471
|
DeepSeek: DeepSeekProvider,
|
|
13453
13472
|
ModelScope: ModelScopeProvider,
|
|
13454
|
-
BaseChat
|
|
13473
|
+
BaseChat,
|
|
13474
|
+
Bailian: BailianProvider
|
|
13455
13475
|
};
|
|
13456
13476
|
|
|
13457
13477
|
class ProviderManager {
|
|
@@ -13550,7 +13570,7 @@ class KnowledgeBase extends BaseChat {
|
|
|
13550
13570
|
|
|
13551
13571
|
// src/provider/knowledge-adapter/siliconflow.ts
|
|
13552
13572
|
class SiliconFlowKnowledge extends KnowledgeBase {
|
|
13553
|
-
static BASE_URL = "https://api.siliconflow.
|
|
13573
|
+
static BASE_URL = "https://api.siliconflow.cn/v1";
|
|
13554
13574
|
constructor(options) {
|
|
13555
13575
|
super({ ...options, baseURL: options?.baseURL ?? SiliconFlowKnowledge.BASE_URL });
|
|
13556
13576
|
}
|
|
@@ -13660,6 +13680,7 @@ class AIConfigParser {
|
|
|
13660
13680
|
}
|
|
13661
13681
|
export {
|
|
13662
13682
|
readStream,
|
|
13683
|
+
getIsBrowser,
|
|
13663
13684
|
encryptAES,
|
|
13664
13685
|
decryptAES,
|
|
13665
13686
|
VolcesProvider,
|
|
@@ -13673,5 +13694,6 @@ export {
|
|
|
13673
13694
|
CustomProvider,
|
|
13674
13695
|
ChatProviderMap,
|
|
13675
13696
|
BaseChat,
|
|
13697
|
+
BailianProvider,
|
|
13676
13698
|
AIConfigParser
|
|
13677
13699
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@kevisual/ai",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.7",
|
|
4
4
|
"description": "后面需要把ai-center的provider模块提取出去",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"basename": "/root/ai-center-services",
|
|
@@ -11,6 +11,7 @@
|
|
|
11
11
|
},
|
|
12
12
|
"files": [
|
|
13
13
|
"dist",
|
|
14
|
+
"src",
|
|
14
15
|
"types"
|
|
15
16
|
],
|
|
16
17
|
"scripts": {
|
|
@@ -20,7 +21,11 @@
|
|
|
20
21
|
"clean": "rm -rf dist",
|
|
21
22
|
"pub": "envision pack -p -u"
|
|
22
23
|
},
|
|
23
|
-
"keywords": [
|
|
24
|
+
"keywords": [
|
|
25
|
+
"kevisual",
|
|
26
|
+
"ai",
|
|
27
|
+
"tools"
|
|
28
|
+
],
|
|
24
29
|
"author": "abearxiong <xiongxiao@xiongxiao.me> (https://www.xiongxiao.me)",
|
|
25
30
|
"license": "MIT",
|
|
26
31
|
"packageManager": "pnpm@10.11.0",
|
|
@@ -32,11 +37,20 @@
|
|
|
32
37
|
"exports": {
|
|
33
38
|
".": {
|
|
34
39
|
"import": "./dist/ai-provider.js",
|
|
40
|
+
"node": "./dist/ai-provider.js",
|
|
41
|
+
"browser": "./dist/ai-provider-browser.js",
|
|
35
42
|
"types": "./dist/ai-provider.d.ts"
|
|
36
43
|
},
|
|
37
44
|
"./ai-provider": {
|
|
38
45
|
"import": "./dist/ai-provider.js",
|
|
39
46
|
"types": "./dist/ai-provider.d.ts"
|
|
47
|
+
},
|
|
48
|
+
"./browser": {
|
|
49
|
+
"import": "./dist/ai-provider-browser.js",
|
|
50
|
+
"types": "./dist/ai-provider-browser.d.ts"
|
|
51
|
+
},
|
|
52
|
+
"./src/**/*": {
|
|
53
|
+
"import": "./src/**/*"
|
|
40
54
|
}
|
|
41
55
|
},
|
|
42
56
|
"devDependencies": {
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
|
2
|
+
|
|
3
|
+
export type OllamaOptions = BaseChatOptions;
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* 自定义模型
|
|
7
|
+
*/
|
|
8
|
+
export class Custom extends BaseChat {
|
|
9
|
+
static BASE_URL = 'https://api.deepseek.com/v1/';
|
|
10
|
+
constructor(options: OllamaOptions) {
|
|
11
|
+
const baseURL = options.baseURL || Custom.BASE_URL;
|
|
12
|
+
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
|
13
|
+
}
|
|
14
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
|
2
|
+
|
|
3
|
+
export type BailianOptions = Partial<BaseChatOptions>;
|
|
4
|
+
export class BailianChat extends BaseChat {
|
|
5
|
+
static BASE_URL = 'https://bailian.aliyuncs.com/compatible-mode/v1/';
|
|
6
|
+
constructor(options: BailianOptions) {
|
|
7
|
+
const baseURL = options.baseURL || BailianChat.BASE_URL;
|
|
8
|
+
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
|
9
|
+
}
|
|
10
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
|
2
|
+
|
|
3
|
+
export type DeepSeekOptions = Partial<BaseChatOptions>;
|
|
4
|
+
export class DeepSeek extends BaseChat {
|
|
5
|
+
static BASE_URL = 'https://api.deepseek.com/v1/';
|
|
6
|
+
constructor(options: DeepSeekOptions) {
|
|
7
|
+
const baseURL = options.baseURL || DeepSeek.BASE_URL;
|
|
8
|
+
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
|
9
|
+
}
|
|
10
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
// https://api-inference.modelscope.cn/v1/
|
|
2
|
+
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
|
3
|
+
|
|
4
|
+
export type ModelScopeOptions = Partial<BaseChatOptions>;
|
|
5
|
+
export class ModelScope extends BaseChat {
|
|
6
|
+
static BASE_URL = 'https://api-inference.modelscope.cn/v1/';
|
|
7
|
+
constructor(options: ModelScopeOptions) {
|
|
8
|
+
const baseURL = options.baseURL || ModelScope.BASE_URL;
|
|
9
|
+
super({ ...options, baseURL: baseURL } as any);
|
|
10
|
+
}
|
|
11
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import { BaseChat, BaseChatOptions } from '../core/index.ts';
|
|
2
|
+
import type { ChatMessage, ChatMessageOptions } from '../core/index.ts';
|
|
3
|
+
|
|
4
|
+
export type OllamaOptions = Partial<BaseChatOptions>;
|
|
5
|
+
|
|
6
|
+
type OllamaModel = {
|
|
7
|
+
name: string;
|
|
8
|
+
model: string;
|
|
9
|
+
modified_at: string;
|
|
10
|
+
|
|
11
|
+
size: number;
|
|
12
|
+
digest: string;
|
|
13
|
+
details: {
|
|
14
|
+
parent_model: string;
|
|
15
|
+
format: string; // example: gguf
|
|
16
|
+
family: string; // example qwen
|
|
17
|
+
families: string[];
|
|
18
|
+
parameter_size: string;
|
|
19
|
+
quantization_level: string; // example: Q4_K_M Q4_0
|
|
20
|
+
};
|
|
21
|
+
};
|
|
22
|
+
export class Ollama extends BaseChat {
|
|
23
|
+
static BASE_URL = 'http://localhost:11434/v1';
|
|
24
|
+
constructor(options: OllamaOptions) {
|
|
25
|
+
const baseURL = options.baseURL || Ollama.BASE_URL;
|
|
26
|
+
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
|
27
|
+
}
|
|
28
|
+
async chat(messages: ChatMessage[], options?: ChatMessageOptions) {
|
|
29
|
+
const res = await super.chat(messages, options);
|
|
30
|
+
console.log('thunk', this.getChatUsage());
|
|
31
|
+
return res;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* 获取模型列表
|
|
35
|
+
* @returns
|
|
36
|
+
*/
|
|
37
|
+
async listModels(): Promise<{ models: OllamaModel[] }> {
|
|
38
|
+
const _url = new URL(this.baseURL);
|
|
39
|
+
const tagsURL = new URL('/api/tags', _url);
|
|
40
|
+
return this.openai.get(tagsURL.toString());
|
|
41
|
+
}
|
|
42
|
+
async listRunModels(): Promise<{ models: OllamaModel[] }> {
|
|
43
|
+
const _url = new URL(this.baseURL);
|
|
44
|
+
const tagsURL = new URL('/api/ps', _url);
|
|
45
|
+
return this.openai.get(tagsURL.toString());
|
|
46
|
+
}
|
|
47
|
+
}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
|
2
|
+
import { OpenAI } from 'openai';
|
|
3
|
+
|
|
4
|
+
export type SiliconFlowOptions = Partial<BaseChatOptions>;
|
|
5
|
+
|
|
6
|
+
type SiliconFlowUsageData = {
|
|
7
|
+
id: string;
|
|
8
|
+
name: string;
|
|
9
|
+
image: string;
|
|
10
|
+
email: string;
|
|
11
|
+
isAdmin: boolean;
|
|
12
|
+
balance: string;
|
|
13
|
+
status: 'normal' | 'suspended' | 'expired' | string; // 状态
|
|
14
|
+
introduce: string;
|
|
15
|
+
role: string;
|
|
16
|
+
chargeBalance: string;
|
|
17
|
+
totalBalance: string;
|
|
18
|
+
category: string;
|
|
19
|
+
};
|
|
20
|
+
type SiliconFlowUsageResponse = {
|
|
21
|
+
code: number;
|
|
22
|
+
message: string;
|
|
23
|
+
status: boolean;
|
|
24
|
+
data: SiliconFlowUsageData;
|
|
25
|
+
};
|
|
26
|
+
export class SiliconFlow extends BaseChat {
|
|
27
|
+
static BASE_URL = 'https://api.siliconflow.cn/v1';
|
|
28
|
+
constructor(options: SiliconFlowOptions) {
|
|
29
|
+
const baseURL = options.baseURL || SiliconFlow.BASE_URL;
|
|
30
|
+
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
|
31
|
+
}
|
|
32
|
+
async getUsageInfo(): Promise<SiliconFlowUsageResponse> {
|
|
33
|
+
return this.openai.get('/user/info');
|
|
34
|
+
}
|
|
35
|
+
async chat(messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[], options?: Partial<OpenAI.Chat.Completions.ChatCompletionCreateParams>) {
|
|
36
|
+
const res = await super.chat(messages, options);
|
|
37
|
+
return res;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { BaseChat, BaseChatOptions } from '../core/chat.ts';
|
|
2
|
+
|
|
3
|
+
export type VolcesOptions = Partial<BaseChatOptions>;
|
|
4
|
+
export class Volces extends BaseChat {
|
|
5
|
+
static BASE_URL = 'https://ark.cn-beijing.volces.com/api/v3/';
|
|
6
|
+
constructor(options: VolcesOptions) {
|
|
7
|
+
const baseURL = options.baseURL || Volces.BASE_URL;
|
|
8
|
+
super({ ...(options as BaseChatOptions), baseURL: baseURL });
|
|
9
|
+
}
|
|
10
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
export * from './core/index.ts';
|
|
2
|
+
import { BaseChat } from './core/chat.ts';
|
|
3
|
+
|
|
4
|
+
import { Ollama } from './chat-adapter/ollama.ts';
|
|
5
|
+
import { SiliconFlow } from './chat-adapter/siliconflow.ts';
|
|
6
|
+
import { Custom } from './chat-adapter/custom.ts';
|
|
7
|
+
import { Volces } from './chat-adapter/volces.ts';
|
|
8
|
+
import { DeepSeek } from './chat-adapter/deepseek.ts';
|
|
9
|
+
import { ModelScope } from './chat-adapter/model-scope.ts';
|
|
10
|
+
import { BailianChat } from './chat-adapter/dashscope.ts';
|
|
11
|
+
|
|
12
|
+
import { ChatMessage } from './core/type.ts';
|
|
13
|
+
|
|
14
|
+
export const OllamaProvider = Ollama;
|
|
15
|
+
export const SiliconFlowProvider = SiliconFlow;
|
|
16
|
+
export const CustomProvider = Custom;
|
|
17
|
+
export const VolcesProvider = Volces;
|
|
18
|
+
export const DeepSeekProvider = DeepSeek;
|
|
19
|
+
export const ModelScopeProvider = ModelScope;
|
|
20
|
+
export const BailianProvider = BailianChat;
|
|
21
|
+
|
|
22
|
+
export const ChatProviderMap = {
|
|
23
|
+
Ollama: OllamaProvider,
|
|
24
|
+
SiliconFlow: SiliconFlowProvider,
|
|
25
|
+
Custom: CustomProvider,
|
|
26
|
+
Volces: VolcesProvider,
|
|
27
|
+
DeepSeek: DeepSeekProvider,
|
|
28
|
+
ModelScope: ModelScopeProvider,
|
|
29
|
+
BaseChat: BaseChat,
|
|
30
|
+
Bailian: BailianProvider,
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
type ProviderManagerConfig = {
|
|
34
|
+
provider: string;
|
|
35
|
+
model: string;
|
|
36
|
+
apiKey: string;
|
|
37
|
+
baseURL?: string;
|
|
38
|
+
};
|
|
39
|
+
export class ProviderManager {
|
|
40
|
+
provider: BaseChat;
|
|
41
|
+
constructor(config: ProviderManagerConfig) {
|
|
42
|
+
const { provider, model, apiKey, baseURL } = config;
|
|
43
|
+
const Provider = ChatProviderMap[provider] as typeof BaseChat;
|
|
44
|
+
if (!Provider) {
|
|
45
|
+
throw new Error(`Provider ${provider} not found`);
|
|
46
|
+
}
|
|
47
|
+
const providerConfig = {
|
|
48
|
+
model,
|
|
49
|
+
apiKey,
|
|
50
|
+
baseURL,
|
|
51
|
+
};
|
|
52
|
+
if (!providerConfig.baseURL) {
|
|
53
|
+
delete providerConfig.baseURL;
|
|
54
|
+
}
|
|
55
|
+
this.provider = new Provider(providerConfig);
|
|
56
|
+
}
|
|
57
|
+
static async createProvider(config: ProviderManagerConfig) {
|
|
58
|
+
if (!config.baseURL) {
|
|
59
|
+
delete config.baseURL;
|
|
60
|
+
}
|
|
61
|
+
const pm = new ProviderManager(config);
|
|
62
|
+
return pm.provider;
|
|
63
|
+
}
|
|
64
|
+
async chat(messages: ChatMessage[]) {
|
|
65
|
+
return this.provider.chat(messages);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
import { OpenAI } from 'openai';
|
|
2
|
+
import type {
|
|
3
|
+
BaseChatInterface,
|
|
4
|
+
ChatMessageComplete,
|
|
5
|
+
ChatMessage,
|
|
6
|
+
ChatMessageOptions,
|
|
7
|
+
BaseChatUsageInterface,
|
|
8
|
+
ChatStream,
|
|
9
|
+
EmbeddingMessage,
|
|
10
|
+
EmbeddingMessageComplete,
|
|
11
|
+
} from './type.ts';
|
|
12
|
+
|
|
13
|
+
export type BaseChatOptions<T = Record<string, any>> = {
|
|
14
|
+
/**
|
|
15
|
+
* 默认baseURL
|
|
16
|
+
*/
|
|
17
|
+
baseURL?: string;
|
|
18
|
+
/**
|
|
19
|
+
* 默认模型
|
|
20
|
+
*/
|
|
21
|
+
model?: string;
|
|
22
|
+
/**
|
|
23
|
+
* 默认apiKey
|
|
24
|
+
*/
|
|
25
|
+
apiKey: string;
|
|
26
|
+
/**
|
|
27
|
+
* 是否在浏览器中使用
|
|
28
|
+
*/
|
|
29
|
+
isBrowser?: boolean;
|
|
30
|
+
/**
|
|
31
|
+
* 是否流式输出, 默认 false
|
|
32
|
+
*/
|
|
33
|
+
stream?: boolean;
|
|
34
|
+
} & T;
|
|
35
|
+
export const getIsBrowser = () => {
|
|
36
|
+
try {
|
|
37
|
+
// @ts-ignore
|
|
38
|
+
return IS_BROWSER;
|
|
39
|
+
} catch (e) {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
export class BaseChat implements BaseChatInterface, BaseChatUsageInterface {
|
|
44
|
+
/**
|
|
45
|
+
* 默认baseURL
|
|
46
|
+
*/
|
|
47
|
+
baseURL: string;
|
|
48
|
+
/**
|
|
49
|
+
* 默认模型
|
|
50
|
+
*/
|
|
51
|
+
model: string;
|
|
52
|
+
/**
|
|
53
|
+
* 默认apiKey
|
|
54
|
+
*/
|
|
55
|
+
apiKey: string;
|
|
56
|
+
/**
|
|
57
|
+
* 是否在浏览器中使用
|
|
58
|
+
*/
|
|
59
|
+
isBrowser: boolean;
|
|
60
|
+
/**
|
|
61
|
+
* openai实例
|
|
62
|
+
*/
|
|
63
|
+
openai: OpenAI;
|
|
64
|
+
|
|
65
|
+
prompt_tokens: number;
|
|
66
|
+
total_tokens: number;
|
|
67
|
+
completion_tokens: number;
|
|
68
|
+
|
|
69
|
+
constructor(options: BaseChatOptions) {
|
|
70
|
+
this.baseURL = options.baseURL;
|
|
71
|
+
this.model = options.model;
|
|
72
|
+
this.apiKey = options.apiKey;
|
|
73
|
+
// @ts-ignore
|
|
74
|
+
const DEFAULT_IS_BROWSER = getIsBrowser();
|
|
75
|
+
this.isBrowser = options.isBrowser ?? DEFAULT_IS_BROWSER;
|
|
76
|
+
this.openai = new OpenAI({
|
|
77
|
+
apiKey: this.apiKey,
|
|
78
|
+
baseURL: this.baseURL,
|
|
79
|
+
dangerouslyAllowBrowser: this.isBrowser,
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* 聊天
|
|
84
|
+
*/
|
|
85
|
+
async chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete> {
|
|
86
|
+
const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParams = {
|
|
87
|
+
model: this.model,
|
|
88
|
+
messages,
|
|
89
|
+
...options,
|
|
90
|
+
stream: false,
|
|
91
|
+
};
|
|
92
|
+
const res = (await this.openai.chat.completions.create(createParams)) as ChatMessageComplete;
|
|
93
|
+
this.prompt_tokens = res.usage?.prompt_tokens ?? 0;
|
|
94
|
+
this.total_tokens = res.usage?.total_tokens ?? 0;
|
|
95
|
+
this.completion_tokens = res.usage?.completion_tokens ?? 0;
|
|
96
|
+
return res;
|
|
97
|
+
}
|
|
98
|
+
async chatStream(messages: ChatMessage[], options?: ChatMessageOptions) {
|
|
99
|
+
const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParams = {
|
|
100
|
+
model: this.model,
|
|
101
|
+
messages,
|
|
102
|
+
...options,
|
|
103
|
+
stream: true,
|
|
104
|
+
};
|
|
105
|
+
if (createParams.response_format) {
|
|
106
|
+
throw new Error('response_format is not supported in stream mode');
|
|
107
|
+
}
|
|
108
|
+
return this.openai.chat.completions.create(createParams) as unknown as ChatStream;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
/**
|
|
112
|
+
* 测试
|
|
113
|
+
*/
|
|
114
|
+
test() {
|
|
115
|
+
return this.chat([{ role: 'user', content: 'Hello, world!' }]);
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* 获取聊天使用情况
|
|
119
|
+
* @returns
|
|
120
|
+
*/
|
|
121
|
+
getChatUsage() {
|
|
122
|
+
return {
|
|
123
|
+
prompt_tokens: this.prompt_tokens,
|
|
124
|
+
total_tokens: this.total_tokens,
|
|
125
|
+
completion_tokens: this.completion_tokens,
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
getHeaders(headers?: Record<string, string>) {
|
|
129
|
+
return {
|
|
130
|
+
'Content-Type': 'application/json',
|
|
131
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
132
|
+
...headers,
|
|
133
|
+
};
|
|
134
|
+
}
|
|
135
|
+
/**
|
|
136
|
+
* 生成embedding 内部
|
|
137
|
+
* @param text
|
|
138
|
+
* @returns
|
|
139
|
+
*/
|
|
140
|
+
async generateEmbeddingCore(text: string | string[], options?: EmbeddingMessage): Promise<EmbeddingMessageComplete> {
|
|
141
|
+
const embeddingModel = options?.model || this.model;
|
|
142
|
+
const res = await this.openai.embeddings.create({
|
|
143
|
+
model: embeddingModel,
|
|
144
|
+
input: text,
|
|
145
|
+
encoding_format: 'float',
|
|
146
|
+
...options,
|
|
147
|
+
});
|
|
148
|
+
this.prompt_tokens += res.usage.prompt_tokens;
|
|
149
|
+
this.total_tokens += res.usage.total_tokens;
|
|
150
|
+
return res;
|
|
151
|
+
}
|
|
152
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { ChatStream } from './type.ts';
|
|
2
|
+
|
|
3
|
+
// export type { BaseChat, BaseChatOptions } from './chat.ts';
|
|
4
|
+
export * from './chat.ts'
|
|
5
|
+
// export {
|
|
6
|
+
// ChatMessage,
|
|
7
|
+
// ChatMessageOptions, //
|
|
8
|
+
// ChatMessageComplete,
|
|
9
|
+
// ChatMessageStream,
|
|
10
|
+
// BaseChatInterface,
|
|
11
|
+
// BaseChatUsageInterface,
|
|
12
|
+
// ChatStream,
|
|
13
|
+
// EmbeddingMessage,
|
|
14
|
+
// EmbeddingMessageComplete,
|
|
15
|
+
// } from './type.ts';
|
|
16
|
+
export * from './type.ts'
|
|
17
|
+
/**
|
|
18
|
+
* for await (const chunk of chatStream) {
|
|
19
|
+
* console.log(chunk);
|
|
20
|
+
* }
|
|
21
|
+
* @param chatStream
|
|
22
|
+
*/
|
|
23
|
+
export const readStream = async (chatStream: ChatStream) => {
|
|
24
|
+
for await (const chunk of chatStream) {
|
|
25
|
+
console.log(chunk);
|
|
26
|
+
}
|
|
27
|
+
};
|