@kevisual/ai 0.0.6 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/dist/ai-provider-browser.d.ts +9 -1
  2. package/dist/ai-provider-browser.js +13 -1
  3. package/dist/ai-provider.d.ts +9 -1
  4. package/dist/ai-provider.js +13 -1
  5. package/package.json +9 -5
  6. package/src/modules/logger.ts +6 -0
  7. package/src/provider/chat-adapter/custom.ts +14 -0
  8. package/src/provider/chat-adapter/dashscope.ts +10 -0
  9. package/src/provider/chat-adapter/deepseek.ts +10 -0
  10. package/src/provider/chat-adapter/model-scope.ts +11 -0
  11. package/src/provider/chat-adapter/ollama.ts +47 -0
  12. package/src/provider/chat-adapter/siliconflow.ts +39 -0
  13. package/src/provider/chat-adapter/volces.ts +10 -0
  14. package/src/provider/chat.ts +67 -0
  15. package/src/provider/core/chat.ts +152 -0
  16. package/src/provider/core/index.ts +27 -0
  17. package/src/provider/core/text-regex.ts +105 -0
  18. package/src/provider/core/type.ts +29 -0
  19. package/src/provider/index.ts +5 -0
  20. package/src/provider/knowledge-adapter/knowledge-base.ts +107 -0
  21. package/src/provider/knowledge-adapter/knowledge.ts +7 -0
  22. package/src/provider/knowledge-adapter/siliconflow.ts +24 -0
  23. package/src/provider/knowledge.ts +6 -0
  24. package/src/provider/media/index.ts +1 -0
  25. package/src/provider/media/video/siliconflow.ts +37 -0
  26. package/src/provider/utils/ai-config-type.ts +52 -0
  27. package/src/provider/utils/chunk.ts +86 -0
  28. package/src/provider/utils/index.ts +2 -0
  29. package/src/provider/utils/parse-config.ts +192 -0
  30. package/src/provider/utils/token.ts +34 -0
  31. package/src/test/chunks/01-get.ts +65 -0
  32. package/src/test/encrypt/index.ts +9 -0
  33. package/src/test/func-call/curl.sh +35 -0
  34. package/src/test/func-call/demo.ts +116 -0
  35. package/src/test/model-scope/index.ts +26 -0
  36. package/src/test/ollama-knowledge.ts +37 -0
  37. package/src/test/ollama.ts +86 -0
  38. package/src/test/provider/index.ts +7 -0
  39. package/src/test/siliconflow/common.ts +15 -0
  40. package/src/test/siliconflow/get.ts +22 -0
  41. package/src/test/siliconflow/knowledge/create.ts +18 -0
  42. package/src/test/siliconflow/knowledge/qwen.md +232 -0
  43. package/src/test/siliconflow/rerank/fc.ts +28 -0
  44. package/src/test/siliconflow/rerank/index.ts +34 -0
  45. package/src/test/siliconflow/videos/index.ts +100 -0
  46. package/src/utils/json.ts +12 -0
@@ -202,12 +202,19 @@ declare class ModelScope extends BaseChat {
202
202
  constructor(options: ModelScopeOptions);
203
203
  }
204
204
 
205
+ type BailianOptions = Partial<BaseChatOptions>;
206
+ declare class BailianChat extends BaseChat {
207
+ static BASE_URL: string;
208
+ constructor(options: BailianOptions);
209
+ }
210
+
205
211
  declare const OllamaProvider: typeof Ollama;
206
212
  declare const SiliconFlowProvider: typeof SiliconFlow;
207
213
  declare const CustomProvider: typeof Custom;
208
214
  declare const VolcesProvider: typeof Volces;
209
215
  declare const DeepSeekProvider: typeof DeepSeek;
210
216
  declare const ModelScopeProvider: typeof ModelScope;
217
+ declare const BailianProvider: typeof BailianChat;
211
218
  declare const ChatProviderMap: {
212
219
  Ollama: typeof Ollama;
213
220
  SiliconFlow: typeof SiliconFlow;
@@ -216,6 +223,7 @@ declare const ChatProviderMap: {
216
223
  DeepSeek: typeof DeepSeek;
217
224
  ModelScope: typeof ModelScope;
218
225
  BaseChat: typeof BaseChat;
226
+ Bailian: typeof BailianChat;
219
227
  };
220
228
  type ProviderManagerConfig = {
221
229
  provider: string;
@@ -430,5 +438,5 @@ declare class AIConfigParser {
430
438
  };
431
439
  }
432
440
 
433
- export { AIConfigParser, BaseChat, ChatProviderMap, CustomProvider, DeepSeekProvider, KnowledgeBase, ModelScopeProvider, OllamaProvider, ProviderManager, SiliconFlowKnowledge, SiliconFlowProvider, VolcesProvider, decryptAES, encryptAES, getIsBrowser, readStream };
441
+ export { AIConfigParser, BailianProvider, BaseChat, ChatProviderMap, CustomProvider, DeepSeekProvider, KnowledgeBase, ModelScopeProvider, OllamaProvider, ProviderManager, SiliconFlowKnowledge, SiliconFlowProvider, VolcesProvider, decryptAES, encryptAES, getIsBrowser, readStream };
434
442
  export type { AIConfig, AIModel, BaseChatInterface, BaseChatOptions, BaseChatUsageInterface, ChatMessage, ChatMessageComplete, ChatMessageOptions, ChatMessageStream, ChatStream, EmbeddingMessage, EmbeddingMessageComplete, GetProviderOpts, KnowledgeOptions, ProviderResult, RerankOptions, SecretKey };
@@ -24411,6 +24411,15 @@ class ModelScope extends BaseChat {
24411
24411
  }
24412
24412
  }
24413
24413
 
24414
+ // src/provider/chat-adapter/dashscope.ts
24415
+ class BailianChat extends BaseChat {
24416
+ static BASE_URL = "https://bailian.aliyuncs.com/compatible-mode/v1/";
24417
+ constructor(options) {
24418
+ const baseURL = options.baseURL || BailianChat.BASE_URL;
24419
+ super({ ...options, baseURL });
24420
+ }
24421
+ }
24422
+
24414
24423
  // src/provider/chat.ts
24415
24424
  var OllamaProvider = Ollama;
24416
24425
  var SiliconFlowProvider = SiliconFlow;
@@ -24418,6 +24427,7 @@ var CustomProvider = Custom;
24418
24427
  var VolcesProvider = Volces;
24419
24428
  var DeepSeekProvider = DeepSeek;
24420
24429
  var ModelScopeProvider = ModelScope;
24430
+ var BailianProvider = BailianChat;
24421
24431
  var ChatProviderMap = {
24422
24432
  Ollama: OllamaProvider,
24423
24433
  SiliconFlow: SiliconFlowProvider,
@@ -24425,7 +24435,8 @@ var ChatProviderMap = {
24425
24435
  Volces: VolcesProvider,
24426
24436
  DeepSeek: DeepSeekProvider,
24427
24437
  ModelScope: ModelScopeProvider,
24428
- BaseChat
24438
+ BaseChat,
24439
+ Bailian: BailianProvider
24429
24440
  };
24430
24441
 
24431
24442
  class ProviderManager {
@@ -24648,5 +24659,6 @@ export {
24648
24659
  CustomProvider,
24649
24660
  ChatProviderMap,
24650
24661
  BaseChat,
24662
+ BailianProvider,
24651
24663
  AIConfigParser
24652
24664
  };
@@ -202,12 +202,19 @@ declare class ModelScope extends BaseChat {
202
202
  constructor(options: ModelScopeOptions);
203
203
  }
204
204
 
205
+ type BailianOptions = Partial<BaseChatOptions>;
206
+ declare class BailianChat extends BaseChat {
207
+ static BASE_URL: string;
208
+ constructor(options: BailianOptions);
209
+ }
210
+
205
211
  declare const OllamaProvider: typeof Ollama;
206
212
  declare const SiliconFlowProvider: typeof SiliconFlow;
207
213
  declare const CustomProvider: typeof Custom;
208
214
  declare const VolcesProvider: typeof Volces;
209
215
  declare const DeepSeekProvider: typeof DeepSeek;
210
216
  declare const ModelScopeProvider: typeof ModelScope;
217
+ declare const BailianProvider: typeof BailianChat;
211
218
  declare const ChatProviderMap: {
212
219
  Ollama: typeof Ollama;
213
220
  SiliconFlow: typeof SiliconFlow;
@@ -216,6 +223,7 @@ declare const ChatProviderMap: {
216
223
  DeepSeek: typeof DeepSeek;
217
224
  ModelScope: typeof ModelScope;
218
225
  BaseChat: typeof BaseChat;
226
+ Bailian: typeof BailianChat;
219
227
  };
220
228
  type ProviderManagerConfig = {
221
229
  provider: string;
@@ -430,5 +438,5 @@ declare class AIConfigParser {
430
438
  };
431
439
  }
432
440
 
433
- export { AIConfigParser, BaseChat, ChatProviderMap, CustomProvider, DeepSeekProvider, KnowledgeBase, ModelScopeProvider, OllamaProvider, ProviderManager, SiliconFlowKnowledge, SiliconFlowProvider, VolcesProvider, decryptAES, encryptAES, getIsBrowser, readStream };
441
+ export { AIConfigParser, BailianProvider, BaseChat, ChatProviderMap, CustomProvider, DeepSeekProvider, KnowledgeBase, ModelScopeProvider, OllamaProvider, ProviderManager, SiliconFlowKnowledge, SiliconFlowProvider, VolcesProvider, decryptAES, encryptAES, getIsBrowser, readStream };
434
442
  export type { AIConfig, AIModel, BaseChatInterface, BaseChatOptions, BaseChatUsageInterface, ChatMessage, ChatMessageComplete, ChatMessageOptions, ChatMessageStream, ChatStream, EmbeddingMessage, EmbeddingMessageComplete, GetProviderOpts, KnowledgeOptions, ProviderResult, RerankOptions, SecretKey };
@@ -13446,6 +13446,15 @@ class ModelScope extends BaseChat {
13446
13446
  }
13447
13447
  }
13448
13448
 
13449
+ // src/provider/chat-adapter/dashscope.ts
13450
+ class BailianChat extends BaseChat {
13451
+ static BASE_URL = "https://bailian.aliyuncs.com/compatible-mode/v1/";
13452
+ constructor(options) {
13453
+ const baseURL = options.baseURL || BailianChat.BASE_URL;
13454
+ super({ ...options, baseURL });
13455
+ }
13456
+ }
13457
+
13449
13458
  // src/provider/chat.ts
13450
13459
  var OllamaProvider = Ollama;
13451
13460
  var SiliconFlowProvider = SiliconFlow;
@@ -13453,6 +13462,7 @@ var CustomProvider = Custom;
13453
13462
  var VolcesProvider = Volces;
13454
13463
  var DeepSeekProvider = DeepSeek;
13455
13464
  var ModelScopeProvider = ModelScope;
13465
+ var BailianProvider = BailianChat;
13456
13466
  var ChatProviderMap = {
13457
13467
  Ollama: OllamaProvider,
13458
13468
  SiliconFlow: SiliconFlowProvider,
@@ -13460,7 +13470,8 @@ var ChatProviderMap = {
13460
13470
  Volces: VolcesProvider,
13461
13471
  DeepSeek: DeepSeekProvider,
13462
13472
  ModelScope: ModelScopeProvider,
13463
- BaseChat
13473
+ BaseChat,
13474
+ Bailian: BailianProvider
13464
13475
  };
13465
13476
 
13466
13477
  class ProviderManager {
@@ -13683,5 +13694,6 @@ export {
13683
13694
  CustomProvider,
13684
13695
  ChatProviderMap,
13685
13696
  BaseChat,
13697
+ BailianProvider,
13686
13698
  AIConfigParser
13687
13699
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kevisual/ai",
3
- "version": "0.0.6",
3
+ "version": "0.0.7",
4
4
  "description": "后面需要把ai-center的provider模块提取出去",
5
5
  "main": "index.js",
6
6
  "basename": "/root/ai-center-services",
@@ -11,6 +11,7 @@
11
11
  },
12
12
  "files": [
13
13
  "dist",
14
+ "src",
14
15
  "types"
15
16
  ],
16
17
  "scripts": {
@@ -20,7 +21,11 @@
20
21
  "clean": "rm -rf dist",
21
22
  "pub": "envision pack -p -u"
22
23
  },
23
- "keywords": [],
24
+ "keywords": [
25
+ "kevisual",
26
+ "ai",
27
+ "tools"
28
+ ],
24
29
  "author": "abearxiong <xiongxiao@xiongxiao.me> (https://www.xiongxiao.me)",
25
30
  "license": "MIT",
26
31
  "packageManager": "pnpm@10.11.0",
@@ -44,9 +49,8 @@
44
49
  "import": "./dist/ai-provider-browser.js",
45
50
  "types": "./dist/ai-provider-browser.d.ts"
46
51
  },
47
- "./ai-provider-browser": {
48
- "import": "./dist/ai-provider-browser.js",
49
- "types": "./dist/ai-provider-browser.d.ts"
52
+ "./src/**/*": {
53
+ "import": "./src/**/*"
50
54
  }
51
55
  },
52
56
  "devDependencies": {
@@ -0,0 +1,6 @@
1
+ import { Logger } from '@kevisual/logger';
2
+
3
+ export const logger = new Logger({
4
+ level: process?.env?.LOG_LEVEL || 'info',
5
+ showTime: true,
6
+ });
@@ -0,0 +1,14 @@
1
+ import { BaseChat, BaseChatOptions } from '../core/chat.ts';
2
+
3
+ export type OllamaOptions = BaseChatOptions;
4
+
5
+ /**
6
+ * 自定义模型
7
+ */
8
+ export class Custom extends BaseChat {
9
+ static BASE_URL = 'https://api.deepseek.com/v1/';
10
+ constructor(options: OllamaOptions) {
11
+ const baseURL = options.baseURL || Custom.BASE_URL;
12
+ super({ ...(options as BaseChatOptions), baseURL: baseURL });
13
+ }
14
+ }
@@ -0,0 +1,10 @@
1
+ import { BaseChat, BaseChatOptions } from '../core/chat.ts';
2
+
3
+ export type BailianOptions = Partial<BaseChatOptions>;
4
+ export class BailianChat extends BaseChat {
5
+ static BASE_URL = 'https://bailian.aliyuncs.com/compatible-mode/v1/';
6
+ constructor(options: BailianOptions) {
7
+ const baseURL = options.baseURL || BailianChat.BASE_URL;
8
+ super({ ...(options as BaseChatOptions), baseURL: baseURL });
9
+ }
10
+ }
@@ -0,0 +1,10 @@
1
+ import { BaseChat, BaseChatOptions } from '../core/chat.ts';
2
+
3
+ export type DeepSeekOptions = Partial<BaseChatOptions>;
4
+ export class DeepSeek extends BaseChat {
5
+ static BASE_URL = 'https://api.deepseek.com/v1/';
6
+ constructor(options: DeepSeekOptions) {
7
+ const baseURL = options.baseURL || DeepSeek.BASE_URL;
8
+ super({ ...(options as BaseChatOptions), baseURL: baseURL });
9
+ }
10
+ }
@@ -0,0 +1,11 @@
1
+ // https://api-inference.modelscope.cn/v1/
2
+ import { BaseChat, BaseChatOptions } from '../core/chat.ts';
3
+
4
+ export type ModelScopeOptions = Partial<BaseChatOptions>;
5
+ export class ModelScope extends BaseChat {
6
+ static BASE_URL = 'https://api-inference.modelscope.cn/v1/';
7
+ constructor(options: ModelScopeOptions) {
8
+ const baseURL = options.baseURL || ModelScope.BASE_URL;
9
+ super({ ...options, baseURL: baseURL } as any);
10
+ }
11
+ }
@@ -0,0 +1,47 @@
1
+ import { BaseChat, BaseChatOptions } from '../core/index.ts';
2
+ import type { ChatMessage, ChatMessageOptions } from '../core/index.ts';
3
+
4
+ export type OllamaOptions = Partial<BaseChatOptions>;
5
+
6
+ type OllamaModel = {
7
+ name: string;
8
+ model: string;
9
+ modified_at: string;
10
+
11
+ size: number;
12
+ digest: string;
13
+ details: {
14
+ parent_model: string;
15
+ format: string; // example: gguf
16
+ family: string; // example qwen
17
+ families: string[];
18
+ parameter_size: string;
19
+ quantization_level: string; // example: Q4_K_M Q4_0
20
+ };
21
+ };
22
+ export class Ollama extends BaseChat {
23
+ static BASE_URL = 'http://localhost:11434/v1';
24
+ constructor(options: OllamaOptions) {
25
+ const baseURL = options.baseURL || Ollama.BASE_URL;
26
+ super({ ...(options as BaseChatOptions), baseURL: baseURL });
27
+ }
28
+ async chat(messages: ChatMessage[], options?: ChatMessageOptions) {
29
+ const res = await super.chat(messages, options);
30
+ console.log('thunk', this.getChatUsage());
31
+ return res;
32
+ }
33
+ /**
34
+ * 获取模型列表
35
+ * @returns
36
+ */
37
+ async listModels(): Promise<{ models: OllamaModel[] }> {
38
+ const _url = new URL(this.baseURL);
39
+ const tagsURL = new URL('/api/tags', _url);
40
+ return this.openai.get(tagsURL.toString());
41
+ }
42
+ async listRunModels(): Promise<{ models: OllamaModel[] }> {
43
+ const _url = new URL(this.baseURL);
44
+ const tagsURL = new URL('/api/ps', _url);
45
+ return this.openai.get(tagsURL.toString());
46
+ }
47
+ }
@@ -0,0 +1,39 @@
1
+ import { BaseChat, BaseChatOptions } from '../core/chat.ts';
2
+ import { OpenAI } from 'openai';
3
+
4
+ export type SiliconFlowOptions = Partial<BaseChatOptions>;
5
+
6
+ type SiliconFlowUsageData = {
7
+ id: string;
8
+ name: string;
9
+ image: string;
10
+ email: string;
11
+ isAdmin: boolean;
12
+ balance: string;
13
+ status: 'normal' | 'suspended' | 'expired' | string; // 状态
14
+ introduce: string;
15
+ role: string;
16
+ chargeBalance: string;
17
+ totalBalance: string;
18
+ category: string;
19
+ };
20
+ type SiliconFlowUsageResponse = {
21
+ code: number;
22
+ message: string;
23
+ status: boolean;
24
+ data: SiliconFlowUsageData;
25
+ };
26
+ export class SiliconFlow extends BaseChat {
27
+ static BASE_URL = 'https://api.siliconflow.cn/v1';
28
+ constructor(options: SiliconFlowOptions) {
29
+ const baseURL = options.baseURL || SiliconFlow.BASE_URL;
30
+ super({ ...(options as BaseChatOptions), baseURL: baseURL });
31
+ }
32
+ async getUsageInfo(): Promise<SiliconFlowUsageResponse> {
33
+ return this.openai.get('/user/info');
34
+ }
35
+ async chat(messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[], options?: Partial<OpenAI.Chat.Completions.ChatCompletionCreateParams>) {
36
+ const res = await super.chat(messages, options);
37
+ return res;
38
+ }
39
+ }
@@ -0,0 +1,10 @@
1
+ import { BaseChat, BaseChatOptions } from '../core/chat.ts';
2
+
3
+ export type VolcesOptions = Partial<BaseChatOptions>;
4
+ export class Volces extends BaseChat {
5
+ static BASE_URL = 'https://ark.cn-beijing.volces.com/api/v3/';
6
+ constructor(options: VolcesOptions) {
7
+ const baseURL = options.baseURL || Volces.BASE_URL;
8
+ super({ ...(options as BaseChatOptions), baseURL: baseURL });
9
+ }
10
+ }
@@ -0,0 +1,67 @@
1
+ export * from './core/index.ts';
2
+ import { BaseChat } from './core/chat.ts';
3
+
4
+ import { Ollama } from './chat-adapter/ollama.ts';
5
+ import { SiliconFlow } from './chat-adapter/siliconflow.ts';
6
+ import { Custom } from './chat-adapter/custom.ts';
7
+ import { Volces } from './chat-adapter/volces.ts';
8
+ import { DeepSeek } from './chat-adapter/deepseek.ts';
9
+ import { ModelScope } from './chat-adapter/model-scope.ts';
10
+ import { BailianChat } from './chat-adapter/dashscope.ts';
11
+
12
+ import { ChatMessage } from './core/type.ts';
13
+
14
+ export const OllamaProvider = Ollama;
15
+ export const SiliconFlowProvider = SiliconFlow;
16
+ export const CustomProvider = Custom;
17
+ export const VolcesProvider = Volces;
18
+ export const DeepSeekProvider = DeepSeek;
19
+ export const ModelScopeProvider = ModelScope;
20
+ export const BailianProvider = BailianChat;
21
+
22
+ export const ChatProviderMap = {
23
+ Ollama: OllamaProvider,
24
+ SiliconFlow: SiliconFlowProvider,
25
+ Custom: CustomProvider,
26
+ Volces: VolcesProvider,
27
+ DeepSeek: DeepSeekProvider,
28
+ ModelScope: ModelScopeProvider,
29
+ BaseChat: BaseChat,
30
+ Bailian: BailianProvider,
31
+ };
32
+
33
+ type ProviderManagerConfig = {
34
+ provider: string;
35
+ model: string;
36
+ apiKey: string;
37
+ baseURL?: string;
38
+ };
39
+ export class ProviderManager {
40
+ provider: BaseChat;
41
+ constructor(config: ProviderManagerConfig) {
42
+ const { provider, model, apiKey, baseURL } = config;
43
+ const Provider = ChatProviderMap[provider] as typeof BaseChat;
44
+ if (!Provider) {
45
+ throw new Error(`Provider ${provider} not found`);
46
+ }
47
+ const providerConfig = {
48
+ model,
49
+ apiKey,
50
+ baseURL,
51
+ };
52
+ if (!providerConfig.baseURL) {
53
+ delete providerConfig.baseURL;
54
+ }
55
+ this.provider = new Provider(providerConfig);
56
+ }
57
+ static async createProvider(config: ProviderManagerConfig) {
58
+ if (!config.baseURL) {
59
+ delete config.baseURL;
60
+ }
61
+ const pm = new ProviderManager(config);
62
+ return pm.provider;
63
+ }
64
+ async chat(messages: ChatMessage[]) {
65
+ return this.provider.chat(messages);
66
+ }
67
+ }
@@ -0,0 +1,152 @@
1
+ import { OpenAI } from 'openai';
2
+ import type {
3
+ BaseChatInterface,
4
+ ChatMessageComplete,
5
+ ChatMessage,
6
+ ChatMessageOptions,
7
+ BaseChatUsageInterface,
8
+ ChatStream,
9
+ EmbeddingMessage,
10
+ EmbeddingMessageComplete,
11
+ } from './type.ts';
12
+
13
+ export type BaseChatOptions<T = Record<string, any>> = {
14
+ /**
15
+ * 默认baseURL
16
+ */
17
+ baseURL?: string;
18
+ /**
19
+ * 默认模型
20
+ */
21
+ model?: string;
22
+ /**
23
+ * 默认apiKey
24
+ */
25
+ apiKey: string;
26
+ /**
27
+ * 是否在浏览器中使用
28
+ */
29
+ isBrowser?: boolean;
30
+ /**
31
+ * 是否流式输出, 默认 false
32
+ */
33
+ stream?: boolean;
34
+ } & T;
35
+ export const getIsBrowser = () => {
36
+ try {
37
+ // @ts-ignore
38
+ return IS_BROWSER;
39
+ } catch (e) {
40
+ return false;
41
+ }
42
+ };
43
+ export class BaseChat implements BaseChatInterface, BaseChatUsageInterface {
44
+ /**
45
+ * 默认baseURL
46
+ */
47
+ baseURL: string;
48
+ /**
49
+ * 默认模型
50
+ */
51
+ model: string;
52
+ /**
53
+ * 默认apiKey
54
+ */
55
+ apiKey: string;
56
+ /**
57
+ * 是否在浏览器中使用
58
+ */
59
+ isBrowser: boolean;
60
+ /**
61
+ * openai实例
62
+ */
63
+ openai: OpenAI;
64
+
65
+ prompt_tokens: number;
66
+ total_tokens: number;
67
+ completion_tokens: number;
68
+
69
+ constructor(options: BaseChatOptions) {
70
+ this.baseURL = options.baseURL;
71
+ this.model = options.model;
72
+ this.apiKey = options.apiKey;
73
+ // @ts-ignore
74
+ const DEFAULT_IS_BROWSER = getIsBrowser();
75
+ this.isBrowser = options.isBrowser ?? DEFAULT_IS_BROWSER;
76
+ this.openai = new OpenAI({
77
+ apiKey: this.apiKey,
78
+ baseURL: this.baseURL,
79
+ dangerouslyAllowBrowser: this.isBrowser,
80
+ });
81
+ }
82
+ /**
83
+ * 聊天
84
+ */
85
+ async chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete> {
86
+ const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParams = {
87
+ model: this.model,
88
+ messages,
89
+ ...options,
90
+ stream: false,
91
+ };
92
+ const res = (await this.openai.chat.completions.create(createParams)) as ChatMessageComplete;
93
+ this.prompt_tokens = res.usage?.prompt_tokens ?? 0;
94
+ this.total_tokens = res.usage?.total_tokens ?? 0;
95
+ this.completion_tokens = res.usage?.completion_tokens ?? 0;
96
+ return res;
97
+ }
98
+ async chatStream(messages: ChatMessage[], options?: ChatMessageOptions) {
99
+ const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParams = {
100
+ model: this.model,
101
+ messages,
102
+ ...options,
103
+ stream: true,
104
+ };
105
+ if (createParams.response_format) {
106
+ throw new Error('response_format is not supported in stream mode');
107
+ }
108
+ return this.openai.chat.completions.create(createParams) as unknown as ChatStream;
109
+ }
110
+
111
+ /**
112
+ * 测试
113
+ */
114
+ test() {
115
+ return this.chat([{ role: 'user', content: 'Hello, world!' }]);
116
+ }
117
+ /**
118
+ * 获取聊天使用情况
119
+ * @returns
120
+ */
121
+ getChatUsage() {
122
+ return {
123
+ prompt_tokens: this.prompt_tokens,
124
+ total_tokens: this.total_tokens,
125
+ completion_tokens: this.completion_tokens,
126
+ };
127
+ }
128
+ getHeaders(headers?: Record<string, string>) {
129
+ return {
130
+ 'Content-Type': 'application/json',
131
+ Authorization: `Bearer ${this.apiKey}`,
132
+ ...headers,
133
+ };
134
+ }
135
+ /**
136
+ * 生成embedding 内部
137
+ * @param text
138
+ * @returns
139
+ */
140
+ async generateEmbeddingCore(text: string | string[], options?: EmbeddingMessage): Promise<EmbeddingMessageComplete> {
141
+ const embeddingModel = options?.model || this.model;
142
+ const res = await this.openai.embeddings.create({
143
+ model: embeddingModel,
144
+ input: text,
145
+ encoding_format: 'float',
146
+ ...options,
147
+ });
148
+ this.prompt_tokens += res.usage.prompt_tokens;
149
+ this.total_tokens += res.usage.total_tokens;
150
+ return res;
151
+ }
152
+ }
@@ -0,0 +1,27 @@
1
+ import { ChatStream } from './type.ts';
2
+
3
+ // export type { BaseChat, BaseChatOptions } from './chat.ts';
4
+ export * from './chat.ts'
5
+ // export {
6
+ // ChatMessage,
7
+ // ChatMessageOptions, //
8
+ // ChatMessageComplete,
9
+ // ChatMessageStream,
10
+ // BaseChatInterface,
11
+ // BaseChatUsageInterface,
12
+ // ChatStream,
13
+ // EmbeddingMessage,
14
+ // EmbeddingMessageComplete,
15
+ // } from './type.ts';
16
+ export * from './type.ts'
17
+ /**
18
+ * for await (const chunk of chatStream) {
19
+ * console.log(chunk);
20
+ * }
21
+ * @param chatStream
22
+ */
23
+ export const readStream = async (chatStream: ChatStream) => {
24
+ for await (const chunk of chatStream) {
25
+ console.log(chunk);
26
+ }
27
+ };