@kevisual/ai 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,230 @@
1
+ import * as openai_resources_index_mjs from 'openai/resources/index.mjs';
2
+ import OpenAI, { OpenAI as OpenAI$1 } from 'openai';
3
+
4
+ type ChatMessage = OpenAI.Chat.Completions.ChatCompletionMessageParam;
5
+ type ChatMessageOptions = Partial<OpenAI.Chat.Completions.ChatCompletionCreateParams>;
6
+ type ChatMessageComplete = OpenAI.Chat.Completions.ChatCompletion;
7
+ type ChatMessageStream = OpenAI.Chat.Completions.ChatCompletion;
8
+ type EmbeddingMessage = Partial<OpenAI.Embeddings.EmbeddingCreateParams>;
9
+ type EmbeddingMessageComplete = OpenAI.Embeddings.CreateEmbeddingResponse;
10
+ interface BaseChatInterface {
11
+ chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
12
+ }
13
+ interface BaseChatUsageInterface {
14
+ /**
15
+ * 提示词令牌
16
+ */
17
+ prompt_tokens: number;
18
+ /**
19
+ * 总令牌
20
+ */
21
+ total_tokens: number;
22
+ /**
23
+ * 完成令牌
24
+ */
25
+ completion_tokens: number;
26
+ }
27
+ type ChatStream = AsyncGenerator<ChatMessageComplete, void, unknown>;
28
+
29
+ type BaseChatOptions<T = Record<string, any>> = {
30
+ /**
31
+ * 默认baseURL
32
+ */
33
+ baseURL: string;
34
+ /**
35
+ * 默认模型
36
+ */
37
+ model?: string;
38
+ /**
39
+ * 默认apiKey
40
+ */
41
+ apiKey: string;
42
+ /**
43
+ * 是否在浏览器中使用
44
+ */
45
+ isBrowser?: boolean;
46
+ /**
47
+ * 是否流式输出, 默认 false
48
+ */
49
+ stream?: boolean;
50
+ } & T;
51
+ declare class BaseChat implements BaseChatInterface, BaseChatUsageInterface {
52
+ /**
53
+ * 默认baseURL
54
+ */
55
+ baseURL: string;
56
+ /**
57
+ * 默认模型
58
+ */
59
+ model: string;
60
+ /**
61
+ * 默认apiKey
62
+ */
63
+ apiKey: string;
64
+ /**
65
+ * 是否在浏览器中使用
66
+ */
67
+ isBrowser: boolean;
68
+ /**
69
+ * openai实例
70
+ */
71
+ openai: OpenAI$1;
72
+ prompt_tokens: number;
73
+ total_tokens: number;
74
+ completion_tokens: number;
75
+ constructor(options: BaseChatOptions);
76
+ /**
77
+ * 聊天
78
+ */
79
+ chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatMessageComplete>;
80
+ chatStream(messages: ChatMessage[], options?: ChatMessageOptions): Promise<ChatStream>;
81
+ /**
82
+ * 测试
83
+ */
84
+ test(): Promise<OpenAI$1.Chat.Completions.ChatCompletion>;
85
+ /**
86
+ * 获取聊天使用情况
87
+ * @returns
88
+ */
89
+ getChatUsage(): {
90
+ prompt_tokens: number;
91
+ total_tokens: number;
92
+ completion_tokens: number;
93
+ };
94
+ getHeaders(headers?: Record<string, string>): {
95
+ 'Content-Type': string;
96
+ Authorization: string;
97
+ };
98
+ /**
99
+ * 生成embedding 内部
100
+ * @param text
101
+ * @returns
102
+ */
103
+ generateEmbeddingCore(text: string | string[], options?: EmbeddingMessage): Promise<EmbeddingMessageComplete>;
104
+ }
105
+
106
+ /**
107
+ * for await (const chunk of chatStream) {
108
+ * console.log(chunk);
109
+ * }
110
+ * @param chatStream
111
+ */
112
+ declare const readStream: (chatStream: ChatStream) => Promise<void>;
113
+
114
+ type OllamaOptions$1 = Partial<BaseChatOptions>;
115
+ type OllamaModel = {
116
+ name: string;
117
+ model: string;
118
+ modified_at: string;
119
+ size: number;
120
+ digest: string;
121
+ details: {
122
+ parent_model: string;
123
+ format: string;
124
+ family: string;
125
+ families: string[];
126
+ parameter_size: string;
127
+ quantization_level: string;
128
+ };
129
+ };
130
+ declare class Ollama extends BaseChat {
131
+ static BASE_URL: string;
132
+ constructor(options: OllamaOptions$1);
133
+ chat(messages: ChatMessage[], options?: ChatMessageOptions): Promise<openai_resources_index_mjs.ChatCompletion>;
134
+ /**
135
+ * 获取模型列表
136
+ * @returns
137
+ */
138
+ listModels(): Promise<{
139
+ models: OllamaModel[];
140
+ }>;
141
+ listRunModels(): Promise<{
142
+ models: OllamaModel[];
143
+ }>;
144
+ }
145
+
146
+ type SiliconFlowOptions = Partial<BaseChatOptions>;
147
+ type SiliconFlowUsageData = {
148
+ id: string;
149
+ name: string;
150
+ image: string;
151
+ email: string;
152
+ isAdmin: boolean;
153
+ balance: string;
154
+ status: 'normal' | 'suspended' | 'expired' | string;
155
+ introduce: string;
156
+ role: string;
157
+ chargeBalance: string;
158
+ totalBalance: string;
159
+ category: string;
160
+ };
161
+ type SiliconFlowUsageResponse = {
162
+ code: number;
163
+ message: string;
164
+ status: boolean;
165
+ data: SiliconFlowUsageData;
166
+ };
167
+ declare class SiliconFlow extends BaseChat {
168
+ static BASE_URL: string;
169
+ constructor(options: SiliconFlowOptions);
170
+ getUsageInfo(): Promise<SiliconFlowUsageResponse>;
171
+ chat(messages: OpenAI$1.Chat.Completions.ChatCompletionMessageParam[], options?: Partial<OpenAI$1.Chat.Completions.ChatCompletionCreateParams>): Promise<OpenAI$1.Chat.Completions.ChatCompletion>;
172
+ }
173
+
174
+ type OllamaOptions = BaseChatOptions;
175
+ /**
176
+ * 自定义模型
177
+ */
178
+ declare class Custom extends BaseChat {
179
+ static BASE_URL: string;
180
+ constructor(options: OllamaOptions);
181
+ }
182
+
183
+ type VolcesOptions = Partial<BaseChatOptions>;
184
+ declare class Volces extends BaseChat {
185
+ static BASE_URL: string;
186
+ constructor(options: VolcesOptions);
187
+ }
188
+
189
+ type DeepSeekOptions = Partial<BaseChatOptions>;
190
+ declare class DeepSeek extends BaseChat {
191
+ static BASE_URL: string;
192
+ constructor(options: DeepSeekOptions);
193
+ }
194
+
195
+ type ModelScopeOptions = Partial<BaseChatOptions>;
196
+ declare class ModelScope extends BaseChat {
197
+ static BASE_URL: string;
198
+ constructor(options: ModelScopeOptions);
199
+ }
200
+
201
+ declare const OllamaProvider: typeof Ollama;
202
+ declare const SiliconFlowProvider: typeof SiliconFlow;
203
+ declare const CustomProvider: typeof Custom;
204
+ declare const VolcesProvider: typeof Volces;
205
+ declare const DeepSeekProvider: typeof DeepSeek;
206
+ declare const ModelScopeProvider: typeof ModelScope;
207
+ declare const ProviderMap: {
208
+ Ollama: typeof Ollama;
209
+ SiliconFlow: typeof SiliconFlow;
210
+ Custom: typeof Custom;
211
+ Volces: typeof Volces;
212
+ DeepSeek: typeof DeepSeek;
213
+ ModelScope: typeof ModelScope;
214
+ BaseChat: typeof BaseChat;
215
+ };
216
+ type ProviderManagerConfig = {
217
+ provider: string;
218
+ model: string;
219
+ apiKey: string;
220
+ baseURL?: string;
221
+ };
222
+ declare class ProviderManager {
223
+ provider: BaseChat;
224
+ constructor(config: ProviderManagerConfig);
225
+ static createProvider(config: ProviderManagerConfig): Promise<BaseChat>;
226
+ chat(messages: ChatMessage[]): Promise<openai_resources_index_mjs.ChatCompletion>;
227
+ }
228
+
229
+ export { BaseChat, CustomProvider, DeepSeekProvider, ModelScopeProvider, OllamaProvider, ProviderManager, ProviderMap, SiliconFlowProvider, VolcesProvider, readStream };
230
+ export type { BaseChatInterface, BaseChatOptions, BaseChatUsageInterface, ChatMessage, ChatMessageComplete, ChatMessageOptions, ChatMessageStream, ChatStream, EmbeddingMessage, EmbeddingMessageComplete };