@weisiren000/oiiai 0.1.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +359 -27
- package/dist/index.d.mts +1647 -17
- package/dist/index.d.ts +1647 -17
- package/dist/index.js +2898 -1308
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2886 -1307
- package/dist/index.mjs.map +1 -1
- package/package.json +7 -2
package/dist/index.js
CHANGED
|
@@ -3,6 +3,9 @@ var __defProp = Object.defineProperty;
|
|
|
3
3
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
5
|
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __esm = (fn, res) => function __init() {
|
|
7
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
8
|
+
};
|
|
6
9
|
var __export = (target, all) => {
|
|
7
10
|
for (var name in all)
|
|
8
11
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
@@ -17,23 +20,2238 @@ var __copyProps = (to, from, except, desc) => {
|
|
|
17
20
|
};
|
|
18
21
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
22
|
|
|
23
|
+
// src/providers/__types__.ts
|
|
24
|
+
var EFFORT_TOKEN_MAP;
|
|
25
|
+
var init_types = __esm({
|
|
26
|
+
"src/providers/__types__.ts"() {
|
|
27
|
+
"use strict";
|
|
28
|
+
EFFORT_TOKEN_MAP = {
|
|
29
|
+
off: 0,
|
|
30
|
+
low: 1024,
|
|
31
|
+
medium: 4096,
|
|
32
|
+
high: 16384
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
// src/utils/request-builder.ts
|
|
38
|
+
var RequestBuilder;
|
|
39
|
+
var init_request_builder = __esm({
|
|
40
|
+
"src/utils/request-builder.ts"() {
|
|
41
|
+
"use strict";
|
|
42
|
+
init_types();
|
|
43
|
+
RequestBuilder = class {
|
|
44
|
+
/**
|
|
45
|
+
* 构建聊天请求的基础参数
|
|
46
|
+
* 生成标准化的 OpenAI 兼容格式请求体
|
|
47
|
+
*
|
|
48
|
+
* @param options - 聊天选项
|
|
49
|
+
* @param stream - 是否为流式请求
|
|
50
|
+
* @returns 请求体对象
|
|
51
|
+
*
|
|
52
|
+
* @example
|
|
53
|
+
* ```ts
|
|
54
|
+
* const body = RequestBuilder.buildChatBody({
|
|
55
|
+
* model: 'gpt-4',
|
|
56
|
+
* messages: [{ role: 'user', content: 'Hello' }],
|
|
57
|
+
* temperature: 0.7
|
|
58
|
+
* });
|
|
59
|
+
* ```
|
|
60
|
+
*/
|
|
61
|
+
static buildChatBody(options, stream = false) {
|
|
62
|
+
const { model, messages, temperature = 0.7, maxTokens } = options;
|
|
63
|
+
const body = {
|
|
64
|
+
model,
|
|
65
|
+
messages,
|
|
66
|
+
temperature,
|
|
67
|
+
stream
|
|
68
|
+
};
|
|
69
|
+
if (maxTokens !== void 0) {
|
|
70
|
+
body.max_tokens = maxTokens;
|
|
71
|
+
}
|
|
72
|
+
return body;
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* 构建 OpenRouter 格式的 reasoning 参数
|
|
76
|
+
*
|
|
77
|
+
* @param config - 推理配置
|
|
78
|
+
* @returns OpenRouter 格式的 reasoning 参数,或 undefined
|
|
79
|
+
*
|
|
80
|
+
* @example
|
|
81
|
+
* ```ts
|
|
82
|
+
* const reasoning = RequestBuilder.buildOpenRouterReasoning({ effort: 'high' });
|
|
83
|
+
* // => { effort: 'high', max_tokens: 16384 }
|
|
84
|
+
* ```
|
|
85
|
+
*/
|
|
86
|
+
static buildOpenRouterReasoning(config) {
|
|
87
|
+
if (!config) return void 0;
|
|
88
|
+
if (config.effort === "off") return void 0;
|
|
89
|
+
const param = {};
|
|
90
|
+
if (config.effort) {
|
|
91
|
+
param.effort = config.effort;
|
|
92
|
+
}
|
|
93
|
+
if (config.budgetTokens !== void 0) {
|
|
94
|
+
param.max_tokens = config.budgetTokens;
|
|
95
|
+
} else if (config.effort && EFFORT_TOKEN_MAP[config.effort]) {
|
|
96
|
+
param.max_tokens = EFFORT_TOKEN_MAP[config.effort];
|
|
97
|
+
}
|
|
98
|
+
if (config.exclude !== void 0) {
|
|
99
|
+
param.exclude = config.exclude;
|
|
100
|
+
}
|
|
101
|
+
return Object.keys(param).length > 0 ? param : void 0;
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* 构建 Gemini 格式的 reasoning 参数
|
|
105
|
+
* Gemini 2.5+ 模型使用 reasoning_effort 控制思考
|
|
106
|
+
*
|
|
107
|
+
* @param config - 推理配置
|
|
108
|
+
* @returns Gemini 格式的参数对象
|
|
109
|
+
*
|
|
110
|
+
* @example
|
|
111
|
+
* ```ts
|
|
112
|
+
* const params = RequestBuilder.buildGeminiReasoning({ effort: 'high' });
|
|
113
|
+
* // => { reasoning_effort: 'high' }
|
|
114
|
+
* ```
|
|
115
|
+
*/
|
|
116
|
+
static buildGeminiReasoning(config) {
|
|
117
|
+
if (!config || !config.effort || config.effort === "off") {
|
|
118
|
+
return {};
|
|
119
|
+
}
|
|
120
|
+
return {
|
|
121
|
+
reasoning_effort: config.effort
|
|
122
|
+
};
|
|
123
|
+
}
|
|
124
|
+
/**
|
|
125
|
+
* 构建 Groq 格式的 reasoning 参数
|
|
126
|
+
* Groq 使用 reasoning_format 参数控制推理输出
|
|
127
|
+
*
|
|
128
|
+
* @param config - 推理配置
|
|
129
|
+
* @returns Groq 格式的参数对象
|
|
130
|
+
*
|
|
131
|
+
* @example
|
|
132
|
+
* ```ts
|
|
133
|
+
* const params = RequestBuilder.buildGroqReasoning({ effort: 'high' });
|
|
134
|
+
* // => { reasoning_format: 'parsed' }
|
|
135
|
+
* ```
|
|
136
|
+
*/
|
|
137
|
+
static buildGroqReasoning(config) {
|
|
138
|
+
if (!config) {
|
|
139
|
+
return {};
|
|
140
|
+
}
|
|
141
|
+
if (config.effort === "off") {
|
|
142
|
+
return { include_reasoning: false };
|
|
143
|
+
}
|
|
144
|
+
if (config.effort) {
|
|
145
|
+
return { reasoning_format: "parsed" };
|
|
146
|
+
}
|
|
147
|
+
return {};
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* 构建 DeepSeek 格式的 reasoning 参数
|
|
151
|
+
* DeepSeek 使用 thinking 参数启用思考模式
|
|
152
|
+
*
|
|
153
|
+
* @param config - 推理配置
|
|
154
|
+
* @returns DeepSeek 格式的参数对象
|
|
155
|
+
*/
|
|
156
|
+
static buildDeepSeekReasoning(config) {
|
|
157
|
+
if (!config || !config.effort || config.effort === "off") {
|
|
158
|
+
return {};
|
|
159
|
+
}
|
|
160
|
+
return {
|
|
161
|
+
thinking: { type: "enabled" }
|
|
162
|
+
};
|
|
163
|
+
}
|
|
164
|
+
/**
|
|
165
|
+
* 构建 Nova 格式的 reasoning 参数
|
|
166
|
+
* Nova 使用 reasoningConfig 控制 extended thinking
|
|
167
|
+
*
|
|
168
|
+
* @param config - 推理配置
|
|
169
|
+
* @returns Nova 格式的参数对象
|
|
170
|
+
*/
|
|
171
|
+
static buildNovaReasoning(config) {
|
|
172
|
+
if (!config || !config.effort || config.effort === "off") {
|
|
173
|
+
return {};
|
|
174
|
+
}
|
|
175
|
+
return {
|
|
176
|
+
reasoningConfig: {
|
|
177
|
+
type: "enabled",
|
|
178
|
+
maxReasoningEffort: config.effort
|
|
179
|
+
}
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
/**
|
|
183
|
+
* 构建 HTTP 请求头
|
|
184
|
+
*
|
|
185
|
+
* @param apiKey - API 密钥
|
|
186
|
+
* @param additionalHeaders - 额外的请求头
|
|
187
|
+
* @returns 请求头对象
|
|
188
|
+
*
|
|
189
|
+
* @example
|
|
190
|
+
* ```ts
|
|
191
|
+
* const headers = RequestBuilder.buildHeaders('sk-xxx', {
|
|
192
|
+
* 'X-Custom-Header': 'value'
|
|
193
|
+
* });
|
|
194
|
+
* ```
|
|
195
|
+
*/
|
|
196
|
+
static buildHeaders(apiKey, additionalHeaders) {
|
|
197
|
+
return {
|
|
198
|
+
"Content-Type": "application/json",
|
|
199
|
+
Authorization: `Bearer ${apiKey}`,
|
|
200
|
+
...additionalHeaders
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
});
|
|
206
|
+
|
|
207
|
+
// src/client/types.ts
|
|
208
|
+
var ProviderError, APIError, NetworkError, TimeoutError;
|
|
209
|
+
var init_types2 = __esm({
|
|
210
|
+
"src/client/types.ts"() {
|
|
211
|
+
"use strict";
|
|
212
|
+
ProviderError = class extends Error {
|
|
213
|
+
constructor(message, code, provider, cause) {
|
|
214
|
+
super(message);
|
|
215
|
+
this.code = code;
|
|
216
|
+
this.provider = provider;
|
|
217
|
+
this.cause = cause;
|
|
218
|
+
this.name = "ProviderError";
|
|
219
|
+
}
|
|
220
|
+
};
|
|
221
|
+
APIError = class extends ProviderError {
|
|
222
|
+
constructor(message, provider, statusCode, responseBody) {
|
|
223
|
+
super(message, "API_ERROR", provider);
|
|
224
|
+
this.statusCode = statusCode;
|
|
225
|
+
this.responseBody = responseBody;
|
|
226
|
+
this.name = "APIError";
|
|
227
|
+
}
|
|
228
|
+
};
|
|
229
|
+
NetworkError = class extends ProviderError {
|
|
230
|
+
constructor(message, provider, cause) {
|
|
231
|
+
super(message, "NETWORK_ERROR", provider, cause);
|
|
232
|
+
this.name = "NetworkError";
|
|
233
|
+
}
|
|
234
|
+
};
|
|
235
|
+
TimeoutError = class extends ProviderError {
|
|
236
|
+
constructor(message, provider, timeoutMs) {
|
|
237
|
+
super(message, "TIMEOUT_ERROR", provider);
|
|
238
|
+
this.timeoutMs = timeoutMs;
|
|
239
|
+
this.name = "TimeoutError";
|
|
240
|
+
}
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
});
|
|
244
|
+
|
|
245
|
+
// src/client/http-provider-client.ts
|
|
246
|
+
var http_provider_client_exports = {};
|
|
247
|
+
__export(http_provider_client_exports, {
|
|
248
|
+
HttpProviderClient: () => HttpProviderClient
|
|
249
|
+
});
|
|
250
|
+
var DEFAULT_TIMEOUT, HttpProviderClient;
|
|
251
|
+
var init_http_provider_client = __esm({
|
|
252
|
+
"src/client/http-provider-client.ts"() {
|
|
253
|
+
"use strict";
|
|
254
|
+
init_request_builder();
|
|
255
|
+
init_types2();
|
|
256
|
+
DEFAULT_TIMEOUT = 3e4;
|
|
257
|
+
HttpProviderClient = class {
|
|
258
|
+
config;
|
|
259
|
+
/**
|
|
260
|
+
* 创建 HTTP Provider 客户端实例
|
|
261
|
+
*
|
|
262
|
+
* @param config - 客户端配置
|
|
263
|
+
*
|
|
264
|
+
* @example
|
|
265
|
+
* ```ts
|
|
266
|
+
* const client = new HttpProviderClient({
|
|
267
|
+
* apiKey: 'sk-xxx',
|
|
268
|
+
* baseUrl: 'https://api.openai.com/v1',
|
|
269
|
+
* timeout: 60000
|
|
270
|
+
* });
|
|
271
|
+
* ```
|
|
272
|
+
*/
|
|
273
|
+
constructor(config) {
|
|
274
|
+
this.config = {
|
|
275
|
+
...config,
|
|
276
|
+
timeout: config.timeout ?? DEFAULT_TIMEOUT
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* 获取 Provider 名称(从 baseUrl 推断)
|
|
281
|
+
*/
|
|
282
|
+
getProviderName() {
|
|
283
|
+
try {
|
|
284
|
+
const url = new URL(this.config.baseUrl);
|
|
285
|
+
return url.hostname;
|
|
286
|
+
} catch {
|
|
287
|
+
return "unknown";
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* 构建完整的请求 URL
|
|
292
|
+
*/
|
|
293
|
+
buildUrl(endpoint) {
|
|
294
|
+
const baseUrl = this.config.baseUrl.replace(/\/$/, "");
|
|
295
|
+
const path = endpoint.startsWith("/") ? endpoint : `/${endpoint}`;
|
|
296
|
+
return `${baseUrl}${path}`;
|
|
297
|
+
}
|
|
298
|
+
/**
|
|
299
|
+
* 构建请求头
|
|
300
|
+
*/
|
|
301
|
+
buildHeaders() {
|
|
302
|
+
return RequestBuilder.buildHeaders(this.config.apiKey, this.config.headers);
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* 创建带超时的 AbortController
|
|
306
|
+
*/
|
|
307
|
+
createAbortController() {
|
|
308
|
+
const controller = new AbortController();
|
|
309
|
+
const timeoutId = setTimeout(() => {
|
|
310
|
+
controller.abort();
|
|
311
|
+
}, this.config.timeout ?? DEFAULT_TIMEOUT);
|
|
312
|
+
return { controller, timeoutId };
|
|
313
|
+
}
|
|
314
|
+
/**
|
|
315
|
+
* 处理 HTTP 错误响应
|
|
316
|
+
*/
|
|
317
|
+
async handleErrorResponse(response) {
|
|
318
|
+
const provider = this.getProviderName();
|
|
319
|
+
let responseBody;
|
|
320
|
+
try {
|
|
321
|
+
responseBody = await response.text();
|
|
322
|
+
} catch {
|
|
323
|
+
}
|
|
324
|
+
let message;
|
|
325
|
+
switch (response.status) {
|
|
326
|
+
case 400:
|
|
327
|
+
message = "\u8BF7\u6C42\u53C2\u6570\u9519\u8BEF";
|
|
328
|
+
break;
|
|
329
|
+
case 401:
|
|
330
|
+
message = "API \u5BC6\u94A5\u65E0\u6548\u6216\u5DF2\u8FC7\u671F";
|
|
331
|
+
break;
|
|
332
|
+
case 403:
|
|
333
|
+
message = "\u6CA1\u6709\u6743\u9650\u8BBF\u95EE\u6B64\u8D44\u6E90";
|
|
334
|
+
break;
|
|
335
|
+
case 404:
|
|
336
|
+
message = "\u8BF7\u6C42\u7684\u8D44\u6E90\u4E0D\u5B58\u5728";
|
|
337
|
+
break;
|
|
338
|
+
case 429:
|
|
339
|
+
message = "\u8BF7\u6C42\u8FC7\u4E8E\u9891\u7E41\uFF0C\u8BF7\u7A0D\u540E\u91CD\u8BD5";
|
|
340
|
+
break;
|
|
341
|
+
case 500:
|
|
342
|
+
message = "\u670D\u52A1\u5668\u5185\u90E8\u9519\u8BEF";
|
|
343
|
+
break;
|
|
344
|
+
case 502:
|
|
345
|
+
message = "\u7F51\u5173\u9519\u8BEF";
|
|
346
|
+
break;
|
|
347
|
+
case 503:
|
|
348
|
+
message = "\u670D\u52A1\u6682\u65F6\u4E0D\u53EF\u7528";
|
|
349
|
+
break;
|
|
350
|
+
default:
|
|
351
|
+
message = `HTTP \u9519\u8BEF: ${response.status} ${response.statusText}`;
|
|
352
|
+
}
|
|
353
|
+
throw new APIError(message, provider, response.status, responseBody);
|
|
354
|
+
}
|
|
355
|
+
/**
|
|
356
|
+
* 发送聊天请求(非流式)
|
|
357
|
+
*
|
|
358
|
+
* @param endpoint - API 端点路径
|
|
359
|
+
* @param body - 请求体
|
|
360
|
+
* @returns 响应数据
|
|
361
|
+
*/
|
|
362
|
+
async chat(endpoint, body) {
|
|
363
|
+
const url = this.buildUrl(endpoint);
|
|
364
|
+
const headers = this.buildHeaders();
|
|
365
|
+
const { controller, timeoutId } = this.createAbortController();
|
|
366
|
+
const provider = this.getProviderName();
|
|
367
|
+
try {
|
|
368
|
+
const response = await fetch(url, {
|
|
369
|
+
method: "POST",
|
|
370
|
+
headers,
|
|
371
|
+
body: JSON.stringify(body),
|
|
372
|
+
signal: controller.signal
|
|
373
|
+
});
|
|
374
|
+
clearTimeout(timeoutId);
|
|
375
|
+
if (!response.ok) {
|
|
376
|
+
await this.handleErrorResponse(response);
|
|
377
|
+
}
|
|
378
|
+
const data = await response.json();
|
|
379
|
+
return data;
|
|
380
|
+
} catch (error) {
|
|
381
|
+
clearTimeout(timeoutId);
|
|
382
|
+
if (error instanceof APIError) {
|
|
383
|
+
throw error;
|
|
384
|
+
}
|
|
385
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
386
|
+
throw new TimeoutError(
|
|
387
|
+
`\u8BF7\u6C42\u8D85\u65F6\uFF08${this.config.timeout}ms\uFF09`,
|
|
388
|
+
provider,
|
|
389
|
+
this.config.timeout ?? DEFAULT_TIMEOUT
|
|
390
|
+
);
|
|
391
|
+
}
|
|
392
|
+
if (error instanceof TypeError) {
|
|
393
|
+
throw new NetworkError("\u7F51\u7EDC\u8FDE\u63A5\u5931\u8D25\uFF0C\u8BF7\u68C0\u67E5\u7F51\u7EDC\u8BBE\u7F6E", provider, error);
|
|
394
|
+
}
|
|
395
|
+
throw new NetworkError(
|
|
396
|
+
error instanceof Error ? error.message : "\u672A\u77E5\u9519\u8BEF",
|
|
397
|
+
provider,
|
|
398
|
+
error instanceof Error ? error : void 0
|
|
399
|
+
);
|
|
400
|
+
}
|
|
401
|
+
}
|
|
402
|
+
/**
|
|
403
|
+
* 发送流式聊天请求
|
|
404
|
+
*
|
|
405
|
+
* @param endpoint - API 端点路径
|
|
406
|
+
* @param body - 请求体
|
|
407
|
+
* @returns fetch Response 对象
|
|
408
|
+
*/
|
|
409
|
+
async chatStream(endpoint, body) {
|
|
410
|
+
const url = this.buildUrl(endpoint);
|
|
411
|
+
const headers = this.buildHeaders();
|
|
412
|
+
const { controller, timeoutId } = this.createAbortController();
|
|
413
|
+
const provider = this.getProviderName();
|
|
414
|
+
try {
|
|
415
|
+
const response = await fetch(url, {
|
|
416
|
+
method: "POST",
|
|
417
|
+
headers,
|
|
418
|
+
body: JSON.stringify(body),
|
|
419
|
+
signal: controller.signal
|
|
420
|
+
});
|
|
421
|
+
clearTimeout(timeoutId);
|
|
422
|
+
if (!response.ok) {
|
|
423
|
+
await this.handleErrorResponse(response);
|
|
424
|
+
}
|
|
425
|
+
return response;
|
|
426
|
+
} catch (error) {
|
|
427
|
+
clearTimeout(timeoutId);
|
|
428
|
+
if (error instanceof APIError) {
|
|
429
|
+
throw error;
|
|
430
|
+
}
|
|
431
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
432
|
+
throw new TimeoutError(
|
|
433
|
+
`\u8BF7\u6C42\u8D85\u65F6\uFF08${this.config.timeout}ms\uFF09`,
|
|
434
|
+
provider,
|
|
435
|
+
this.config.timeout ?? DEFAULT_TIMEOUT
|
|
436
|
+
);
|
|
437
|
+
}
|
|
438
|
+
if (error instanceof TypeError) {
|
|
439
|
+
throw new NetworkError("\u7F51\u7EDC\u8FDE\u63A5\u5931\u8D25\uFF0C\u8BF7\u68C0\u67E5\u7F51\u7EDC\u8BBE\u7F6E", provider, error);
|
|
440
|
+
}
|
|
441
|
+
throw new NetworkError(
|
|
442
|
+
error instanceof Error ? error.message : "\u672A\u77E5\u9519\u8BEF",
|
|
443
|
+
provider,
|
|
444
|
+
error instanceof Error ? error : void 0
|
|
445
|
+
);
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
};
|
|
449
|
+
}
|
|
450
|
+
});
|
|
451
|
+
|
|
452
|
+
// src/utils/stream-processor.ts
|
|
453
|
+
var stream_processor_exports = {};
|
|
454
|
+
__export(stream_processor_exports, {
|
|
455
|
+
StreamProcessor: () => StreamProcessor
|
|
456
|
+
});
|
|
457
|
+
var StreamProcessor;
|
|
458
|
+
var init_stream_processor = __esm({
|
|
459
|
+
"src/utils/stream-processor.ts"() {
|
|
460
|
+
"use strict";
|
|
461
|
+
StreamProcessor = class _StreamProcessor {
|
|
462
|
+
/**
|
|
463
|
+
* 从响应内容中提取文本
|
|
464
|
+
* 支持字符串和数组格式的 content
|
|
465
|
+
*
|
|
466
|
+
* @param content - 响应内容,可以是字符串、数组或其他类型
|
|
467
|
+
* @returns 提取的文本内容
|
|
468
|
+
*
|
|
469
|
+
* @example
|
|
470
|
+
* ```ts
|
|
471
|
+
* // 字符串格式
|
|
472
|
+
* StreamProcessor.extractTextContent('Hello') // => 'Hello'
|
|
473
|
+
*
|
|
474
|
+
* // 数组格式
|
|
475
|
+
* StreamProcessor.extractTextContent([
|
|
476
|
+
* { type: 'text', text: 'Hello' },
|
|
477
|
+
* { type: 'text', text: ' World' }
|
|
478
|
+
* ]) // => 'Hello World'
|
|
479
|
+
* ```
|
|
480
|
+
*/
|
|
481
|
+
static extractTextContent(content) {
|
|
482
|
+
if (typeof content === "string") {
|
|
483
|
+
return content;
|
|
484
|
+
}
|
|
485
|
+
if (Array.isArray(content)) {
|
|
486
|
+
return content.filter(
|
|
487
|
+
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
488
|
+
).map((item) => item.text).join("");
|
|
489
|
+
}
|
|
490
|
+
return "";
|
|
491
|
+
}
|
|
492
|
+
/**
|
|
493
|
+
* 解析 SSE 数据行
|
|
494
|
+
*
|
|
495
|
+
* @param line - SSE 数据行(如 "data: {...}")
|
|
496
|
+
* @returns 解析后的 JSON 对象,或 null(如果是 [DONE] 或无效数据)
|
|
497
|
+
*
|
|
498
|
+
* @example
|
|
499
|
+
* ```ts
|
|
500
|
+
* StreamProcessor.parseSSELine('data: {"content": "Hello"}')
|
|
501
|
+
* // => { content: 'Hello' }
|
|
502
|
+
*
|
|
503
|
+
* StreamProcessor.parseSSELine('data: [DONE]')
|
|
504
|
+
* // => null
|
|
505
|
+
* ```
|
|
506
|
+
*/
|
|
507
|
+
static parseSSELine(line) {
|
|
508
|
+
const trimmed = line.trim();
|
|
509
|
+
if (!trimmed || trimmed === "data: [DONE]") {
|
|
510
|
+
return null;
|
|
511
|
+
}
|
|
512
|
+
if (!trimmed.startsWith("data: ")) {
|
|
513
|
+
return null;
|
|
514
|
+
}
|
|
515
|
+
try {
|
|
516
|
+
const jsonStr = trimmed.slice(6);
|
|
517
|
+
return JSON.parse(jsonStr);
|
|
518
|
+
} catch {
|
|
519
|
+
return null;
|
|
520
|
+
}
|
|
521
|
+
}
|
|
522
|
+
/**
|
|
523
|
+
* 创建流式响应处理器
|
|
524
|
+
* 处理 SSE 格式的流式响应,提取并生成 StreamChunk
|
|
525
|
+
*
|
|
526
|
+
* @param response - fetch Response 对象
|
|
527
|
+
* @param deltaExtractor - 从 delta 中提取 StreamChunk 的函数
|
|
528
|
+
* @returns AsyncGenerator<StreamChunk>
|
|
529
|
+
*
|
|
530
|
+
* @example
|
|
531
|
+
* ```ts
|
|
532
|
+
* const response = await fetch(url, { ... });
|
|
533
|
+
* const extractor = (delta) => {
|
|
534
|
+
* if (delta.content) {
|
|
535
|
+
* return { type: 'content', text: delta.content };
|
|
536
|
+
* }
|
|
537
|
+
* return null;
|
|
538
|
+
* };
|
|
539
|
+
*
|
|
540
|
+
* for await (const chunk of StreamProcessor.processStream(response, extractor)) {
|
|
541
|
+
* console.log(chunk.type, chunk.text);
|
|
542
|
+
* }
|
|
543
|
+
* ```
|
|
544
|
+
*/
|
|
545
|
+
static async *processStream(response, deltaExtractor) {
|
|
546
|
+
const reader = response.body?.getReader();
|
|
547
|
+
if (!reader) {
|
|
548
|
+
throw new Error("No response body");
|
|
549
|
+
}
|
|
550
|
+
const decoder = new TextDecoder();
|
|
551
|
+
let buffer = "";
|
|
552
|
+
try {
|
|
553
|
+
while (true) {
|
|
554
|
+
const { done, value } = await reader.read();
|
|
555
|
+
if (done) break;
|
|
556
|
+
buffer += decoder.decode(value, { stream: true });
|
|
557
|
+
const lines = buffer.split("\n");
|
|
558
|
+
buffer = lines.pop() ?? "";
|
|
559
|
+
for (const line of lines) {
|
|
560
|
+
const data = _StreamProcessor.parseSSELine(line);
|
|
561
|
+
if (!data) continue;
|
|
562
|
+
const choices = data.choices;
|
|
563
|
+
const delta = choices?.[0]?.delta;
|
|
564
|
+
if (!delta) continue;
|
|
565
|
+
const chunk = deltaExtractor(delta);
|
|
566
|
+
if (chunk) {
|
|
567
|
+
yield chunk;
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
}
|
|
571
|
+
} finally {
|
|
572
|
+
reader.releaseLock();
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
/**
|
|
576
|
+
* 创建默认的 delta 提取器
|
|
577
|
+
* 支持 reasoning_content、reasoning、thoughts 和 content 字段
|
|
578
|
+
*
|
|
579
|
+
* @returns DeltaExtractor 函数
|
|
580
|
+
*/
|
|
581
|
+
static createDefaultExtractor() {
|
|
582
|
+
return (delta) => {
|
|
583
|
+
const reasoningContent = delta.reasoning_content ?? delta.reasoning ?? delta.thoughts;
|
|
584
|
+
if (reasoningContent) {
|
|
585
|
+
return {
|
|
586
|
+
type: "reasoning",
|
|
587
|
+
text: _StreamProcessor.extractTextContent(reasoningContent)
|
|
588
|
+
};
|
|
589
|
+
}
|
|
590
|
+
if (delta.content) {
|
|
591
|
+
return {
|
|
592
|
+
type: "content",
|
|
593
|
+
text: _StreamProcessor.extractTextContent(delta.content)
|
|
594
|
+
};
|
|
595
|
+
}
|
|
596
|
+
return null;
|
|
597
|
+
};
|
|
598
|
+
}
|
|
599
|
+
};
|
|
600
|
+
}
|
|
601
|
+
});
|
|
602
|
+
|
|
603
|
+
// src/fluent/chat-session.ts
|
|
604
|
+
var chat_session_exports = {};
|
|
605
|
+
__export(chat_session_exports, {
|
|
606
|
+
ChatSessionImpl: () => ChatSessionImpl
|
|
607
|
+
});
|
|
608
|
+
var ChatSessionImpl;
|
|
609
|
+
var init_chat_session = __esm({
|
|
610
|
+
"src/fluent/chat-session.ts"() {
|
|
611
|
+
"use strict";
|
|
612
|
+
ChatSessionImpl = class {
|
|
613
|
+
/** 预设实例引用 */
|
|
614
|
+
preset;
|
|
615
|
+
/** 模型 ID */
|
|
616
|
+
model;
|
|
617
|
+
/** 会话配置 */
|
|
618
|
+
options;
|
|
619
|
+
/** 对话历史 */
|
|
620
|
+
history = [];
|
|
621
|
+
/**
|
|
622
|
+
* 创建对话会话
|
|
623
|
+
* @param preset - 预设实例
|
|
624
|
+
* @param model - 模型 ID
|
|
625
|
+
* @param options - 会话配置
|
|
626
|
+
*/
|
|
627
|
+
constructor(preset, model, options) {
|
|
628
|
+
this.preset = preset;
|
|
629
|
+
this.model = model;
|
|
630
|
+
this.options = options ?? {};
|
|
631
|
+
if (this.options.system) {
|
|
632
|
+
this.history.push({
|
|
633
|
+
role: "system",
|
|
634
|
+
content: this.options.system
|
|
635
|
+
});
|
|
636
|
+
}
|
|
637
|
+
}
|
|
638
|
+
/**
|
|
639
|
+
* 发送消息并获取响应(非流式)
|
|
640
|
+
* @param message - 用户消息
|
|
641
|
+
* @returns 助手响应内容
|
|
642
|
+
*/
|
|
643
|
+
async send(message) {
|
|
644
|
+
this.history.push({
|
|
645
|
+
role: "user",
|
|
646
|
+
content: message
|
|
647
|
+
});
|
|
648
|
+
try {
|
|
649
|
+
const response = await this.preset.ask(this.model, message, {
|
|
650
|
+
system: this.buildSystemContext(),
|
|
651
|
+
temperature: this.options.temperature,
|
|
652
|
+
maxTokens: this.options.maxTokens,
|
|
653
|
+
reasoning: this.options.reasoning
|
|
654
|
+
});
|
|
655
|
+
this.history.push({
|
|
656
|
+
role: "assistant",
|
|
657
|
+
content: response
|
|
658
|
+
});
|
|
659
|
+
return response;
|
|
660
|
+
} catch (error) {
|
|
661
|
+
this.history.pop();
|
|
662
|
+
throw error;
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
/**
|
|
666
|
+
* 发送消息并获取流式响应
|
|
667
|
+
* @param message - 用户消息
|
|
668
|
+
* @returns 流式数据块生成器
|
|
669
|
+
*/
|
|
670
|
+
async *sendStream(message) {
|
|
671
|
+
this.history.push({
|
|
672
|
+
role: "user",
|
|
673
|
+
content: message
|
|
674
|
+
});
|
|
675
|
+
let responseContent = "";
|
|
676
|
+
try {
|
|
677
|
+
const stream = this.preset.stream(this.model, message, {
|
|
678
|
+
system: this.buildSystemContext(),
|
|
679
|
+
temperature: this.options.temperature,
|
|
680
|
+
maxTokens: this.options.maxTokens,
|
|
681
|
+
reasoning: this.options.reasoning
|
|
682
|
+
});
|
|
683
|
+
for await (const chunk of stream) {
|
|
684
|
+
if (chunk.type === "content") {
|
|
685
|
+
responseContent += chunk.text;
|
|
686
|
+
}
|
|
687
|
+
yield chunk;
|
|
688
|
+
}
|
|
689
|
+
this.history.push({
|
|
690
|
+
role: "assistant",
|
|
691
|
+
content: responseContent
|
|
692
|
+
});
|
|
693
|
+
} catch (error) {
|
|
694
|
+
this.history.pop();
|
|
695
|
+
throw error;
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
/**
|
|
699
|
+
* 获取对话历史
|
|
700
|
+
* @returns 按发送顺序排列的消息列表
|
|
701
|
+
*/
|
|
702
|
+
getHistory() {
|
|
703
|
+
return [...this.history];
|
|
704
|
+
}
|
|
705
|
+
/**
|
|
706
|
+
* 清空对话历史
|
|
707
|
+
*/
|
|
708
|
+
clearHistory() {
|
|
709
|
+
this.history = [];
|
|
710
|
+
if (this.options.system) {
|
|
711
|
+
this.history.push({
|
|
712
|
+
role: "system",
|
|
713
|
+
content: this.options.system
|
|
714
|
+
});
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
/**
|
|
718
|
+
* 构建系统上下文
|
|
719
|
+
* 将对话历史转换为系统提示词的一部分
|
|
720
|
+
* @returns 系统上下文字符串
|
|
721
|
+
*/
|
|
722
|
+
buildSystemContext() {
|
|
723
|
+
const conversationHistory = this.history.filter(
|
|
724
|
+
(msg, index) => msg.role !== "system" && index < this.history.length - 1
|
|
725
|
+
);
|
|
726
|
+
if (conversationHistory.length === 0) {
|
|
727
|
+
return this.options.system;
|
|
728
|
+
}
|
|
729
|
+
const historyContext = conversationHistory.map((msg) => `${msg.role === "user" ? "\u7528\u6237" : "\u52A9\u624B"}: ${msg.content}`).join("\n");
|
|
730
|
+
const baseSystem = this.options.system ?? "";
|
|
731
|
+
return `${baseSystem}
|
|
732
|
+
|
|
733
|
+
\u4EE5\u4E0B\u662F\u4E4B\u524D\u7684\u5BF9\u8BDD\u5386\u53F2\uFF1A
|
|
734
|
+
${historyContext}`.trim();
|
|
735
|
+
}
|
|
736
|
+
};
|
|
737
|
+
}
|
|
738
|
+
});
|
|
739
|
+
|
|
20
740
|
// src/index.ts
|
|
21
741
|
var index_exports = {};
|
|
22
742
|
__export(index_exports, {
|
|
743
|
+
APIError: () => APIError,
|
|
744
|
+
BaseAdapter: () => BaseAdapter,
|
|
23
745
|
BaseProvider: () => BaseProvider,
|
|
746
|
+
CONFIG_DEFAULTS: () => CONFIG_DEFAULTS,
|
|
747
|
+
ConfigManager: () => ConfigManager,
|
|
748
|
+
ConfigValidator: () => ConfigValidator,
|
|
749
|
+
ConfigurationError: () => ConfigurationError,
|
|
750
|
+
DeepSeekAdapter: () => DeepSeekAdapter,
|
|
24
751
|
EFFORT_TOKEN_MAP: () => EFFORT_TOKEN_MAP,
|
|
752
|
+
FluentValidationError: () => ValidationError,
|
|
753
|
+
GeminiAdapter: () => GeminiAdapter,
|
|
25
754
|
GeminiProvider: () => GeminiProvider,
|
|
755
|
+
GroqAdapter: () => GroqAdapter,
|
|
26
756
|
GroqProvider: () => GroqProvider,
|
|
757
|
+
HttpProviderClient: () => HttpProviderClient,
|
|
758
|
+
HuggingFaceAdapter: () => HuggingFaceAdapter,
|
|
27
759
|
HuggingFaceProvider: () => HuggingFaceProvider,
|
|
760
|
+
ModelScopeAdapter: () => ModelScopeAdapter,
|
|
28
761
|
ModelScopeProvider: () => ModelScopeProvider,
|
|
762
|
+
NetworkError: () => NetworkError,
|
|
763
|
+
NovaAdapter: () => NovaAdapter,
|
|
764
|
+
OpenRouterAdapter: () => OpenRouterAdapter,
|
|
29
765
|
OpenRouterProvider: () => OpenRouterProvider,
|
|
766
|
+
PoeAdapter: () => PoeAdapter,
|
|
767
|
+
ProviderError: () => ProviderError,
|
|
768
|
+
ProviderRegistry: () => ProviderRegistry,
|
|
769
|
+
RegistryError: () => RegistryError,
|
|
770
|
+
RequestBuilder: () => RequestBuilder,
|
|
771
|
+
StreamProcessor: () => StreamProcessor,
|
|
772
|
+
TimeoutError: () => TimeoutError,
|
|
773
|
+
VALID_PROVIDERS: () => VALID_PROVIDERS,
|
|
30
774
|
ai: () => ai,
|
|
31
|
-
|
|
775
|
+
createBuilder: () => createBuilder,
|
|
776
|
+
createBuiltInAdapters: () => createBuiltInAdapters,
|
|
777
|
+
createProvider: () => createProvider,
|
|
778
|
+
deepseek: () => deepseek,
|
|
779
|
+
gemini: () => gemini,
|
|
780
|
+
groq: () => groq,
|
|
781
|
+
huggingface: () => huggingface,
|
|
782
|
+
modelscope: () => modelscope,
|
|
783
|
+
nova: () => nova,
|
|
784
|
+
oiiai: () => oiiai,
|
|
785
|
+
openrouter: () => openrouter,
|
|
786
|
+
poe: () => poe
|
|
32
787
|
});
|
|
33
788
|
module.exports = __toCommonJS(index_exports);
|
|
34
789
|
|
|
35
|
-
// src/
|
|
36
|
-
var
|
|
790
|
+
// src/adapters/types.ts
|
|
791
|
+
var BaseAdapter = class {
|
|
792
|
+
/**
|
|
793
|
+
* 创建 Provider 客户端
|
|
794
|
+
* 默认实现:需要在运行时导入 HttpProviderClient 以避免循环依赖
|
|
795
|
+
*/
|
|
796
|
+
createClient(config) {
|
|
797
|
+
const { HttpProviderClient: HttpProviderClient2 } = (init_http_provider_client(), __toCommonJS(http_provider_client_exports));
|
|
798
|
+
return new HttpProviderClient2(config);
|
|
799
|
+
}
|
|
800
|
+
/**
|
|
801
|
+
* 构建聊天请求体
|
|
802
|
+
* 默认实现:构建 OpenAI 兼容格式的请求体
|
|
803
|
+
*/
|
|
804
|
+
buildChatRequest(options, stream = false) {
|
|
805
|
+
const {
|
|
806
|
+
model,
|
|
807
|
+
messages,
|
|
808
|
+
temperature = 0.7,
|
|
809
|
+
maxTokens,
|
|
810
|
+
reasoning
|
|
811
|
+
} = options;
|
|
812
|
+
const body = {
|
|
813
|
+
model,
|
|
814
|
+
messages,
|
|
815
|
+
temperature,
|
|
816
|
+
stream
|
|
817
|
+
};
|
|
818
|
+
if (maxTokens !== void 0) {
|
|
819
|
+
body.max_tokens = maxTokens;
|
|
820
|
+
}
|
|
821
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
822
|
+
Object.assign(body, reasoningParams);
|
|
823
|
+
return body;
|
|
824
|
+
}
|
|
825
|
+
/**
|
|
826
|
+
* 构建 reasoning 参数
|
|
827
|
+
* 默认实现:返回空对象,子类应覆盖此方法
|
|
828
|
+
*/
|
|
829
|
+
buildReasoningParams(_config) {
|
|
830
|
+
return {};
|
|
831
|
+
}
|
|
832
|
+
/**
|
|
833
|
+
* 解析聊天响应
|
|
834
|
+
* 默认实现:解析 OpenAI 兼容格式的响应
|
|
835
|
+
*/
|
|
836
|
+
parseChatResponse(response, model) {
|
|
837
|
+
const choices = response.choices;
|
|
838
|
+
const choice = choices?.[0];
|
|
839
|
+
if (!choice) {
|
|
840
|
+
throw new Error("No response from model");
|
|
841
|
+
}
|
|
842
|
+
const msg = choice.message;
|
|
843
|
+
const reasoningContent = msg?.reasoning_content ?? msg?.reasoning ?? null;
|
|
844
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
845
|
+
const usage = response.usage;
|
|
846
|
+
return {
|
|
847
|
+
content: StreamProcessor2.extractTextContent(msg?.content),
|
|
848
|
+
reasoning: reasoningContent ? StreamProcessor2.extractTextContent(reasoningContent) : null,
|
|
849
|
+
model: response.model ?? model,
|
|
850
|
+
usage: {
|
|
851
|
+
promptTokens: usage?.prompt_tokens ?? usage?.promptTokens ?? 0,
|
|
852
|
+
completionTokens: usage?.completion_tokens ?? usage?.completionTokens ?? 0,
|
|
853
|
+
totalTokens: usage?.total_tokens ?? usage?.totalTokens ?? 0
|
|
854
|
+
},
|
|
855
|
+
finishReason: choice.finish_reason ?? choice.finishReason ?? null
|
|
856
|
+
};
|
|
857
|
+
}
|
|
858
|
+
/**
|
|
859
|
+
* 从 delta 中提取 StreamChunk
|
|
860
|
+
* 默认实现:支持 reasoning_content、reasoning、thoughts 和 content 字段
|
|
861
|
+
*/
|
|
862
|
+
extractStreamChunk(delta) {
|
|
863
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
864
|
+
const reasoningContent = delta.reasoning_content ?? delta.reasoning ?? delta.thoughts;
|
|
865
|
+
if (reasoningContent) {
|
|
866
|
+
return {
|
|
867
|
+
type: "reasoning",
|
|
868
|
+
text: StreamProcessor2.extractTextContent(reasoningContent)
|
|
869
|
+
};
|
|
870
|
+
}
|
|
871
|
+
if (delta.content) {
|
|
872
|
+
return {
|
|
873
|
+
type: "content",
|
|
874
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
875
|
+
};
|
|
876
|
+
}
|
|
877
|
+
return null;
|
|
878
|
+
}
|
|
879
|
+
/**
|
|
880
|
+
* 获取 API 端点 URL
|
|
881
|
+
* 默认实现:返回 /chat/completions 端点
|
|
882
|
+
*/
|
|
883
|
+
getEndpointUrl(baseUrl) {
|
|
884
|
+
return `${baseUrl}/chat/completions`;
|
|
885
|
+
}
|
|
886
|
+
};
|
|
887
|
+
|
|
888
|
+
// src/adapters/openrouter-adapter.ts
|
|
889
|
+
init_request_builder();
|
|
890
|
+
var DEFAULT_BASE_URL = "https://openrouter.ai/api/v1";
|
|
891
|
+
var OpenRouterAdapter = class extends BaseAdapter {
|
|
892
|
+
name = "openrouter";
|
|
893
|
+
defaultBaseUrl = DEFAULT_BASE_URL;
|
|
894
|
+
/**
|
|
895
|
+
* 构建聊天请求体
|
|
896
|
+
* OpenRouter 使用 OpenAI 兼容格式,但有特殊的 reasoning 参数
|
|
897
|
+
*/
|
|
898
|
+
buildChatRequest(options, stream = false) {
|
|
899
|
+
const {
|
|
900
|
+
model,
|
|
901
|
+
messages,
|
|
902
|
+
temperature = 0.7,
|
|
903
|
+
maxTokens,
|
|
904
|
+
reasoning
|
|
905
|
+
} = options;
|
|
906
|
+
const body = {
|
|
907
|
+
model,
|
|
908
|
+
messages,
|
|
909
|
+
temperature,
|
|
910
|
+
stream
|
|
911
|
+
};
|
|
912
|
+
if (maxTokens !== void 0) {
|
|
913
|
+
body.max_tokens = maxTokens;
|
|
914
|
+
}
|
|
915
|
+
const reasoningParam = this.buildReasoningParams(reasoning);
|
|
916
|
+
if (reasoningParam && Object.keys(reasoningParam).length > 0) {
|
|
917
|
+
body.reasoning = reasoningParam;
|
|
918
|
+
}
|
|
919
|
+
return body;
|
|
920
|
+
}
|
|
921
|
+
/**
|
|
922
|
+
* 构建 OpenRouter 格式的 reasoning 参数
|
|
923
|
+
*
|
|
924
|
+
* OpenRouter reasoning 参数格式:
|
|
925
|
+
* {
|
|
926
|
+
* effort: 'low' | 'medium' | 'high',
|
|
927
|
+
* max_tokens: number,
|
|
928
|
+
* exclude: boolean
|
|
929
|
+
* }
|
|
930
|
+
*/
|
|
931
|
+
buildReasoningParams(config) {
|
|
932
|
+
return RequestBuilder.buildOpenRouterReasoning(config) ?? {};
|
|
933
|
+
}
|
|
934
|
+
/**
|
|
935
|
+
* 获取 API 端点 URL
|
|
936
|
+
* OpenRouter 使用标准的 /chat/completions 端点
|
|
937
|
+
*/
|
|
938
|
+
getEndpointUrl(baseUrl) {
|
|
939
|
+
return `${baseUrl}/chat/completions`;
|
|
940
|
+
}
|
|
941
|
+
};
|
|
942
|
+
|
|
943
|
+
// src/adapters/gemini-adapter.ts
|
|
944
|
+
init_request_builder();
|
|
945
|
+
var DEFAULT_BASE_URL2 = "https://generativelanguage.googleapis.com/v1beta/openai";
|
|
946
|
+
var GeminiAdapter = class extends BaseAdapter {
|
|
947
|
+
name = "gemini";
|
|
948
|
+
defaultBaseUrl = DEFAULT_BASE_URL2;
|
|
949
|
+
/**
|
|
950
|
+
* 构建聊天请求体
|
|
951
|
+
* Gemini 使用 OpenAI 兼容格式,reasoning_effort 直接放在请求体中
|
|
952
|
+
*/
|
|
953
|
+
buildChatRequest(options, stream = false) {
|
|
954
|
+
const {
|
|
955
|
+
model,
|
|
956
|
+
messages,
|
|
957
|
+
temperature = 0.7,
|
|
958
|
+
maxTokens,
|
|
959
|
+
reasoning
|
|
960
|
+
} = options;
|
|
961
|
+
const body = {
|
|
962
|
+
model,
|
|
963
|
+
messages,
|
|
964
|
+
temperature,
|
|
965
|
+
stream
|
|
966
|
+
};
|
|
967
|
+
if (maxTokens !== void 0) {
|
|
968
|
+
body.max_tokens = maxTokens;
|
|
969
|
+
}
|
|
970
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
971
|
+
Object.assign(body, reasoningParams);
|
|
972
|
+
return body;
|
|
973
|
+
}
|
|
974
|
+
/**
|
|
975
|
+
* 构建 Gemini 格式的 reasoning 参数
|
|
976
|
+
*
|
|
977
|
+
* Gemini 2.5+ 模型使用 reasoning_effort 参数:
|
|
978
|
+
* - 'low': 快速思考
|
|
979
|
+
* - 'medium': 平衡模式
|
|
980
|
+
* - 'high': 深度思考
|
|
981
|
+
*/
|
|
982
|
+
buildReasoningParams(config) {
|
|
983
|
+
return RequestBuilder.buildGeminiReasoning(config);
|
|
984
|
+
}
|
|
985
|
+
/**
|
|
986
|
+
* 从 delta 中提取 StreamChunk
|
|
987
|
+
* Gemini 可能使用 reasoning_content 或 thoughts 字段
|
|
988
|
+
*/
|
|
989
|
+
extractStreamChunk(delta) {
|
|
990
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
991
|
+
const reasoningContent = delta.reasoning_content ?? delta.thoughts;
|
|
992
|
+
if (reasoningContent) {
|
|
993
|
+
return {
|
|
994
|
+
type: "reasoning",
|
|
995
|
+
text: StreamProcessor2.extractTextContent(reasoningContent)
|
|
996
|
+
};
|
|
997
|
+
}
|
|
998
|
+
if (delta.content) {
|
|
999
|
+
return {
|
|
1000
|
+
type: "content",
|
|
1001
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1002
|
+
};
|
|
1003
|
+
}
|
|
1004
|
+
return null;
|
|
1005
|
+
}
|
|
1006
|
+
/**
|
|
1007
|
+
* 获取 API 端点 URL
|
|
1008
|
+
*/
|
|
1009
|
+
getEndpointUrl(baseUrl) {
|
|
1010
|
+
return `${baseUrl}/chat/completions`;
|
|
1011
|
+
}
|
|
1012
|
+
};
|
|
1013
|
+
|
|
1014
|
+
// src/adapters/groq-adapter.ts
|
|
1015
|
+
init_request_builder();
|
|
1016
|
+
var DEFAULT_BASE_URL3 = "https://api.groq.com/openai/v1";
|
|
1017
|
+
var GroqAdapter = class extends BaseAdapter {
|
|
1018
|
+
name = "groq";
|
|
1019
|
+
defaultBaseUrl = DEFAULT_BASE_URL3;
|
|
1020
|
+
/**
|
|
1021
|
+
* 构建聊天请求体
|
|
1022
|
+
* Groq 使用 OpenAI 兼容格式,但有一些特殊参数
|
|
1023
|
+
*/
|
|
1024
|
+
buildChatRequest(options, stream = false) {
|
|
1025
|
+
const { model, messages, temperature = 1, maxTokens, reasoning } = options;
|
|
1026
|
+
const body = {
|
|
1027
|
+
model,
|
|
1028
|
+
messages,
|
|
1029
|
+
temperature,
|
|
1030
|
+
stream,
|
|
1031
|
+
top_p: 1
|
|
1032
|
+
};
|
|
1033
|
+
if (maxTokens !== void 0) {
|
|
1034
|
+
body.max_completion_tokens = maxTokens;
|
|
1035
|
+
}
|
|
1036
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1037
|
+
Object.assign(body, reasoningParams);
|
|
1038
|
+
return body;
|
|
1039
|
+
}
|
|
1040
|
+
/**
|
|
1041
|
+
* 构建 Groq 格式的 reasoning 参数
|
|
1042
|
+
*
|
|
1043
|
+
* Groq 使用 reasoning_format 参数:
|
|
1044
|
+
* - 'raw': 原始格式
|
|
1045
|
+
* - 'parsed': 解析格式(推荐)
|
|
1046
|
+
*
|
|
1047
|
+
* 注意:不能同时使用 include_reasoning 和 reasoning_format
|
|
1048
|
+
*/
|
|
1049
|
+
buildReasoningParams(config) {
|
|
1050
|
+
return RequestBuilder.buildGroqReasoning(config);
|
|
1051
|
+
}
|
|
1052
|
+
/**
|
|
1053
|
+
* 从 delta 中提取 StreamChunk
|
|
1054
|
+
* Groq 使用 reasoning_content 或 reasoning 字段
|
|
1055
|
+
*/
|
|
1056
|
+
extractStreamChunk(delta) {
|
|
1057
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1058
|
+
const reasoningContent = delta.reasoning_content ?? delta.reasoning;
|
|
1059
|
+
if (reasoningContent) {
|
|
1060
|
+
return {
|
|
1061
|
+
type: "reasoning",
|
|
1062
|
+
text: StreamProcessor2.extractTextContent(reasoningContent)
|
|
1063
|
+
};
|
|
1064
|
+
}
|
|
1065
|
+
if (delta.content) {
|
|
1066
|
+
return {
|
|
1067
|
+
type: "content",
|
|
1068
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1069
|
+
};
|
|
1070
|
+
}
|
|
1071
|
+
return null;
|
|
1072
|
+
}
|
|
1073
|
+
/**
|
|
1074
|
+
* 获取 API 端点 URL
|
|
1075
|
+
*/
|
|
1076
|
+
getEndpointUrl(baseUrl) {
|
|
1077
|
+
return `${baseUrl}/chat/completions`;
|
|
1078
|
+
}
|
|
1079
|
+
};
|
|
1080
|
+
|
|
1081
|
+
// src/adapters/huggingface-adapter.ts
|
|
1082
|
+
var DEFAULT_BASE_URL4 = "https://router.huggingface.co/v1";
|
|
1083
|
+
var HuggingFaceAdapter = class extends BaseAdapter {
|
|
1084
|
+
name = "huggingface";
|
|
1085
|
+
defaultBaseUrl = DEFAULT_BASE_URL4;
|
|
1086
|
+
/**
|
|
1087
|
+
* 构建聊天请求体
|
|
1088
|
+
* HuggingFace 使用标准 OpenAI 兼容格式
|
|
1089
|
+
*/
|
|
1090
|
+
buildChatRequest(options, stream = false) {
|
|
1091
|
+
const {
|
|
1092
|
+
model,
|
|
1093
|
+
messages,
|
|
1094
|
+
temperature = 0.7,
|
|
1095
|
+
maxTokens,
|
|
1096
|
+
reasoning
|
|
1097
|
+
} = options;
|
|
1098
|
+
const body = {
|
|
1099
|
+
model,
|
|
1100
|
+
messages,
|
|
1101
|
+
temperature,
|
|
1102
|
+
stream
|
|
1103
|
+
};
|
|
1104
|
+
if (maxTokens !== void 0) {
|
|
1105
|
+
body.max_tokens = maxTokens;
|
|
1106
|
+
}
|
|
1107
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1108
|
+
Object.assign(body, reasoningParams);
|
|
1109
|
+
return body;
|
|
1110
|
+
}
|
|
1111
|
+
/**
|
|
1112
|
+
* 构建 HuggingFace 格式的 reasoning 参数
|
|
1113
|
+
* HuggingFace 使用 reasoning_effort 参数(取决于具体模型是否支持)
|
|
1114
|
+
*/
|
|
1115
|
+
buildReasoningParams(config) {
|
|
1116
|
+
if (!config || !config.effort || config.effort === "off") {
|
|
1117
|
+
return {};
|
|
1118
|
+
}
|
|
1119
|
+
return {
|
|
1120
|
+
reasoning_effort: config.effort
|
|
1121
|
+
};
|
|
1122
|
+
}
|
|
1123
|
+
/**
|
|
1124
|
+
* 从 delta 中提取 StreamChunk
|
|
1125
|
+
*/
|
|
1126
|
+
extractStreamChunk(delta) {
|
|
1127
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1128
|
+
if (delta.reasoning_content) {
|
|
1129
|
+
return {
|
|
1130
|
+
type: "reasoning",
|
|
1131
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1132
|
+
};
|
|
1133
|
+
}
|
|
1134
|
+
if (delta.content) {
|
|
1135
|
+
return {
|
|
1136
|
+
type: "content",
|
|
1137
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1138
|
+
};
|
|
1139
|
+
}
|
|
1140
|
+
return null;
|
|
1141
|
+
}
|
|
1142
|
+
/**
|
|
1143
|
+
* 获取 API 端点 URL
|
|
1144
|
+
*/
|
|
1145
|
+
getEndpointUrl(baseUrl) {
|
|
1146
|
+
return `${baseUrl}/chat/completions`;
|
|
1147
|
+
}
|
|
1148
|
+
};
|
|
1149
|
+
|
|
1150
|
+
// src/adapters/modelscope-adapter.ts
|
|
1151
|
+
var DEFAULT_BASE_URL5 = "https://api-inference.modelscope.cn/v1";
|
|
1152
|
+
var ModelScopeAdapter = class extends BaseAdapter {
|
|
1153
|
+
name = "modelscope";
|
|
1154
|
+
defaultBaseUrl = DEFAULT_BASE_URL5;
|
|
1155
|
+
/**
|
|
1156
|
+
* 构建聊天请求体
|
|
1157
|
+
* ModelScope 使用 OpenAI 兼容格式
|
|
1158
|
+
*/
|
|
1159
|
+
buildChatRequest(options, stream = false) {
|
|
1160
|
+
const {
|
|
1161
|
+
model,
|
|
1162
|
+
messages,
|
|
1163
|
+
temperature = 0.7,
|
|
1164
|
+
maxTokens,
|
|
1165
|
+
reasoning
|
|
1166
|
+
} = options;
|
|
1167
|
+
const body = {
|
|
1168
|
+
model,
|
|
1169
|
+
messages,
|
|
1170
|
+
temperature,
|
|
1171
|
+
stream
|
|
1172
|
+
};
|
|
1173
|
+
if (maxTokens !== void 0) {
|
|
1174
|
+
body.max_tokens = maxTokens;
|
|
1175
|
+
}
|
|
1176
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1177
|
+
Object.assign(body, reasoningParams);
|
|
1178
|
+
return body;
|
|
1179
|
+
}
|
|
1180
|
+
/**
|
|
1181
|
+
* 构建 ModelScope 格式的 reasoning 参数
|
|
1182
|
+
* ModelScope 使用 enable_thinking 参数控制思考模式
|
|
1183
|
+
*/
|
|
1184
|
+
buildReasoningParams(config) {
|
|
1185
|
+
if (!config || !config.effort) {
|
|
1186
|
+
return {};
|
|
1187
|
+
}
|
|
1188
|
+
if (config.effort === "off") {
|
|
1189
|
+
return { enable_thinking: false };
|
|
1190
|
+
}
|
|
1191
|
+
return { enable_thinking: true };
|
|
1192
|
+
}
|
|
1193
|
+
/**
|
|
1194
|
+
* 从 delta 中提取 StreamChunk
|
|
1195
|
+
*/
|
|
1196
|
+
extractStreamChunk(delta) {
|
|
1197
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1198
|
+
if (delta.reasoning_content) {
|
|
1199
|
+
return {
|
|
1200
|
+
type: "reasoning",
|
|
1201
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1202
|
+
};
|
|
1203
|
+
}
|
|
1204
|
+
if (delta.content) {
|
|
1205
|
+
return {
|
|
1206
|
+
type: "content",
|
|
1207
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1208
|
+
};
|
|
1209
|
+
}
|
|
1210
|
+
return null;
|
|
1211
|
+
}
|
|
1212
|
+
/**
|
|
1213
|
+
* 获取 API 端点 URL
|
|
1214
|
+
*/
|
|
1215
|
+
getEndpointUrl(baseUrl) {
|
|
1216
|
+
return `${baseUrl}/chat/completions`;
|
|
1217
|
+
}
|
|
1218
|
+
};
|
|
1219
|
+
|
|
1220
|
+
// src/adapters/deepseek-adapter.ts
|
|
1221
|
+
init_request_builder();
|
|
1222
|
+
var DEFAULT_BASE_URL6 = "https://api.deepseek.com";
|
|
1223
|
+
var DeepSeekAdapter = class extends BaseAdapter {
|
|
1224
|
+
name = "deepseek";
|
|
1225
|
+
defaultBaseUrl = DEFAULT_BASE_URL6;
|
|
1226
|
+
/**
|
|
1227
|
+
* 构建聊天请求体
|
|
1228
|
+
* DeepSeek 使用 OpenAI 兼容格式
|
|
1229
|
+
*/
|
|
1230
|
+
buildChatRequest(options, stream = false) {
|
|
1231
|
+
const {
|
|
1232
|
+
model,
|
|
1233
|
+
messages,
|
|
1234
|
+
temperature = 0.7,
|
|
1235
|
+
maxTokens,
|
|
1236
|
+
reasoning
|
|
1237
|
+
} = options;
|
|
1238
|
+
const body = {
|
|
1239
|
+
model,
|
|
1240
|
+
messages,
|
|
1241
|
+
temperature,
|
|
1242
|
+
stream
|
|
1243
|
+
};
|
|
1244
|
+
if (maxTokens !== void 0) {
|
|
1245
|
+
body.max_tokens = maxTokens;
|
|
1246
|
+
}
|
|
1247
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1248
|
+
Object.assign(body, reasoningParams);
|
|
1249
|
+
return body;
|
|
1250
|
+
}
|
|
1251
|
+
/**
|
|
1252
|
+
* 构建 DeepSeek 格式的 reasoning 参数
|
|
1253
|
+
* DeepSeek 使用 thinking 参数启用思考模式
|
|
1254
|
+
*/
|
|
1255
|
+
buildReasoningParams(config) {
|
|
1256
|
+
return RequestBuilder.buildDeepSeekReasoning(config);
|
|
1257
|
+
}
|
|
1258
|
+
/**
|
|
1259
|
+
* 从 delta 中提取 StreamChunk
|
|
1260
|
+
* DeepSeek R1 使用 reasoning_content 返回思考过程
|
|
1261
|
+
*/
|
|
1262
|
+
extractStreamChunk(delta) {
|
|
1263
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1264
|
+
if (delta.reasoning_content) {
|
|
1265
|
+
return {
|
|
1266
|
+
type: "reasoning",
|
|
1267
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1268
|
+
};
|
|
1269
|
+
}
|
|
1270
|
+
if (delta.content) {
|
|
1271
|
+
return {
|
|
1272
|
+
type: "content",
|
|
1273
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1274
|
+
};
|
|
1275
|
+
}
|
|
1276
|
+
return null;
|
|
1277
|
+
}
|
|
1278
|
+
/**
|
|
1279
|
+
* 获取 API 端点 URL
|
|
1280
|
+
*/
|
|
1281
|
+
getEndpointUrl(baseUrl) {
|
|
1282
|
+
return `${baseUrl}/chat/completions`;
|
|
1283
|
+
}
|
|
1284
|
+
};
|
|
1285
|
+
|
|
1286
|
+
// src/adapters/poe-adapter.ts
|
|
1287
|
+
init_types();
|
|
1288
|
+
var DEFAULT_BASE_URL7 = "https://api.poe.com/v1";
|
|
1289
|
+
function extractThinkingFromContent(content) {
|
|
1290
|
+
const thinkMatch = content.match(/<think>([\s\S]*?)<\/think>/);
|
|
1291
|
+
if (thinkMatch) {
|
|
1292
|
+
const thinking = thinkMatch[1].trim();
|
|
1293
|
+
const cleanContent = content.replace(/<think>[\s\S]*?<\/think>/, "").trim();
|
|
1294
|
+
return { thinking, content: cleanContent };
|
|
1295
|
+
}
|
|
1296
|
+
const thinkingMatch = content.match(
|
|
1297
|
+
/^\*Thinking\.{0,3}\*\s*\n((?:>.*(?:\n|$))+)/
|
|
1298
|
+
);
|
|
1299
|
+
if (thinkingMatch) {
|
|
1300
|
+
const thinking = thinkingMatch[1].split("\n").map((line) => line.replace(/^>\s?/, "")).join("\n").trim();
|
|
1301
|
+
const cleanContent = content.replace(thinkingMatch[0], "").trim();
|
|
1302
|
+
return { thinking, content: cleanContent };
|
|
1303
|
+
}
|
|
1304
|
+
return { thinking: "", content };
|
|
1305
|
+
}
|
|
1306
|
+
var PoeAdapter = class extends BaseAdapter {
|
|
1307
|
+
name = "poe";
|
|
1308
|
+
defaultBaseUrl = DEFAULT_BASE_URL7;
|
|
1309
|
+
/**
|
|
1310
|
+
* 构建聊天请求体
|
|
1311
|
+
* Poe 使用 OpenAI 兼容格式,通过 extra_body 传递自定义参数
|
|
1312
|
+
*/
|
|
1313
|
+
buildChatRequest(options, stream = false) {
|
|
1314
|
+
const {
|
|
1315
|
+
model,
|
|
1316
|
+
messages,
|
|
1317
|
+
temperature = 0.7,
|
|
1318
|
+
maxTokens,
|
|
1319
|
+
reasoning
|
|
1320
|
+
} = options;
|
|
1321
|
+
const body = {
|
|
1322
|
+
model,
|
|
1323
|
+
messages,
|
|
1324
|
+
temperature,
|
|
1325
|
+
stream
|
|
1326
|
+
};
|
|
1327
|
+
if (maxTokens !== void 0) {
|
|
1328
|
+
body.max_tokens = maxTokens;
|
|
1329
|
+
}
|
|
1330
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1331
|
+
Object.assign(body, reasoningParams);
|
|
1332
|
+
return body;
|
|
1333
|
+
}
|
|
1334
|
+
/**
|
|
1335
|
+
* 构建 Poe 格式的 reasoning 参数
|
|
1336
|
+
* Poe 通过 extra_body 传递 reasoning_effort 和 thinking_budget
|
|
1337
|
+
*/
|
|
1338
|
+
buildReasoningParams(config) {
|
|
1339
|
+
if (!config || config.effort === "off") {
|
|
1340
|
+
return {};
|
|
1341
|
+
}
|
|
1342
|
+
const params = {};
|
|
1343
|
+
if (config.effort) {
|
|
1344
|
+
params.reasoning_effort = config.effort;
|
|
1345
|
+
}
|
|
1346
|
+
if (config.budgetTokens !== void 0) {
|
|
1347
|
+
params.thinking_budget = config.budgetTokens;
|
|
1348
|
+
} else if (config.effort && EFFORT_TOKEN_MAP[config.effort]) {
|
|
1349
|
+
params.thinking_budget = EFFORT_TOKEN_MAP[config.effort];
|
|
1350
|
+
}
|
|
1351
|
+
return params;
|
|
1352
|
+
}
|
|
1353
|
+
/**
|
|
1354
|
+
* 解析聊天响应
|
|
1355
|
+
* Poe 可能返回 reasoning_content,或者需要从 <think> 标签提取
|
|
1356
|
+
*/
|
|
1357
|
+
parseChatResponse(response, model) {
|
|
1358
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1359
|
+
const choices = response.choices;
|
|
1360
|
+
const choice = choices?.[0];
|
|
1361
|
+
if (!choice) {
|
|
1362
|
+
throw new Error("No response from model");
|
|
1363
|
+
}
|
|
1364
|
+
const msg = choice.message;
|
|
1365
|
+
let reasoningContent = msg?.reasoning_content ?? null;
|
|
1366
|
+
let contentText = StreamProcessor2.extractTextContent(msg?.content);
|
|
1367
|
+
if (!reasoningContent && contentText) {
|
|
1368
|
+
const extracted = extractThinkingFromContent(contentText);
|
|
1369
|
+
if (extracted.thinking) {
|
|
1370
|
+
reasoningContent = extracted.thinking;
|
|
1371
|
+
contentText = extracted.content;
|
|
1372
|
+
}
|
|
1373
|
+
}
|
|
1374
|
+
const usage = response.usage;
|
|
1375
|
+
return {
|
|
1376
|
+
content: contentText,
|
|
1377
|
+
reasoning: reasoningContent ? StreamProcessor2.extractTextContent(reasoningContent) : null,
|
|
1378
|
+
model: response.model ?? model,
|
|
1379
|
+
usage: {
|
|
1380
|
+
promptTokens: usage?.prompt_tokens ?? usage?.promptTokens ?? 0,
|
|
1381
|
+
completionTokens: usage?.completion_tokens ?? usage?.completionTokens ?? 0,
|
|
1382
|
+
totalTokens: usage?.total_tokens ?? usage?.totalTokens ?? 0
|
|
1383
|
+
},
|
|
1384
|
+
finishReason: choice.finish_reason ?? choice.finishReason ?? null
|
|
1385
|
+
};
|
|
1386
|
+
}
|
|
1387
|
+
/**
|
|
1388
|
+
* 从 delta 中提取 StreamChunk
|
|
1389
|
+
* Poe 的流式响应处理比较复杂,需要处理多种思考格式
|
|
1390
|
+
*/
|
|
1391
|
+
extractStreamChunk(delta) {
|
|
1392
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1393
|
+
if (delta.reasoning_content) {
|
|
1394
|
+
return {
|
|
1395
|
+
type: "reasoning",
|
|
1396
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1397
|
+
};
|
|
1398
|
+
}
|
|
1399
|
+
if (delta.content) {
|
|
1400
|
+
return {
|
|
1401
|
+
type: "content",
|
|
1402
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1403
|
+
};
|
|
1404
|
+
}
|
|
1405
|
+
return null;
|
|
1406
|
+
}
|
|
1407
|
+
/**
|
|
1408
|
+
* 获取 API 端点 URL
|
|
1409
|
+
*/
|
|
1410
|
+
getEndpointUrl(baseUrl) {
|
|
1411
|
+
return `${baseUrl}/chat/completions`;
|
|
1412
|
+
}
|
|
1413
|
+
};
|
|
1414
|
+
|
|
1415
|
+
// src/adapters/nova-adapter.ts
|
|
1416
|
+
init_request_builder();
|
|
1417
|
+
var DEFAULT_BASE_URL8 = "https://api.nova.amazon.com/v1";
|
|
1418
|
+
var NovaAdapter = class extends BaseAdapter {
|
|
1419
|
+
name = "nova";
|
|
1420
|
+
defaultBaseUrl = DEFAULT_BASE_URL8;
|
|
1421
|
+
/**
|
|
1422
|
+
* 构建聊天请求体
|
|
1423
|
+
* Nova 使用 OpenAI 兼容格式
|
|
1424
|
+
*/
|
|
1425
|
+
buildChatRequest(options, stream = false) {
|
|
1426
|
+
const {
|
|
1427
|
+
model,
|
|
1428
|
+
messages,
|
|
1429
|
+
temperature = 0.7,
|
|
1430
|
+
maxTokens,
|
|
1431
|
+
reasoning
|
|
1432
|
+
} = options;
|
|
1433
|
+
const body = {
|
|
1434
|
+
model,
|
|
1435
|
+
messages,
|
|
1436
|
+
temperature,
|
|
1437
|
+
stream
|
|
1438
|
+
};
|
|
1439
|
+
if (maxTokens !== void 0) {
|
|
1440
|
+
body.max_tokens = maxTokens;
|
|
1441
|
+
}
|
|
1442
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1443
|
+
Object.assign(body, reasoningParams);
|
|
1444
|
+
return body;
|
|
1445
|
+
}
|
|
1446
|
+
/**
|
|
1447
|
+
* 构建 Nova 格式的 reasoning 参数
|
|
1448
|
+
* Nova 使用 reasoningConfig 控制 extended thinking
|
|
1449
|
+
*/
|
|
1450
|
+
buildReasoningParams(config) {
|
|
1451
|
+
return RequestBuilder.buildNovaReasoning(config);
|
|
1452
|
+
}
|
|
1453
|
+
/**
|
|
1454
|
+
* 从 delta 中提取 StreamChunk
|
|
1455
|
+
* Nova 返回 reasoning_content 作为思考过程
|
|
1456
|
+
*/
|
|
1457
|
+
extractStreamChunk(delta) {
|
|
1458
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1459
|
+
if (delta.reasoning_content) {
|
|
1460
|
+
return {
|
|
1461
|
+
type: "reasoning",
|
|
1462
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1463
|
+
};
|
|
1464
|
+
}
|
|
1465
|
+
if (delta.content) {
|
|
1466
|
+
return {
|
|
1467
|
+
type: "content",
|
|
1468
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1469
|
+
};
|
|
1470
|
+
}
|
|
1471
|
+
return null;
|
|
1472
|
+
}
|
|
1473
|
+
/**
|
|
1474
|
+
* 获取 API 端点 URL
|
|
1475
|
+
*/
|
|
1476
|
+
getEndpointUrl(baseUrl) {
|
|
1477
|
+
return `${baseUrl}/chat/completions`;
|
|
1478
|
+
}
|
|
1479
|
+
};
|
|
1480
|
+
|
|
1481
|
+
// src/adapters/index.ts
|
|
1482
|
+
function createBuiltInAdapters() {
|
|
1483
|
+
const adapters = /* @__PURE__ */ new Map();
|
|
1484
|
+
adapters.set("openrouter", new OpenRouterAdapter());
|
|
1485
|
+
adapters.set("gemini", new GeminiAdapter());
|
|
1486
|
+
adapters.set("groq", new GroqAdapter());
|
|
1487
|
+
adapters.set("huggingface", new HuggingFaceAdapter());
|
|
1488
|
+
adapters.set("modelscope", new ModelScopeAdapter());
|
|
1489
|
+
adapters.set("deepseek", new DeepSeekAdapter());
|
|
1490
|
+
adapters.set("poe", new PoeAdapter());
|
|
1491
|
+
adapters.set("nova", new NovaAdapter());
|
|
1492
|
+
return adapters;
|
|
1493
|
+
}
|
|
1494
|
+
|
|
1495
|
+
// src/registry/provider-registry.ts
|
|
1496
|
+
var RegistryError = class extends Error {
|
|
1497
|
+
constructor(message, provider, code = "REGISTRY_ERROR") {
|
|
1498
|
+
super(message);
|
|
1499
|
+
this.provider = provider;
|
|
1500
|
+
this.code = code;
|
|
1501
|
+
this.name = "RegistryError";
|
|
1502
|
+
}
|
|
1503
|
+
};
|
|
1504
|
+
var ProviderRegistry = class {
|
|
1505
|
+
/** 适配器映射表 */
|
|
1506
|
+
static adapters = /* @__PURE__ */ new Map();
|
|
1507
|
+
/** 是否已初始化内置适配器 */
|
|
1508
|
+
static initialized = false;
|
|
1509
|
+
/**
|
|
1510
|
+
* 注册 Provider 适配器
|
|
1511
|
+
*
|
|
1512
|
+
* @param adapter - 要注册的适配器实例
|
|
1513
|
+
* @throws RegistryError 如果适配器无效
|
|
1514
|
+
*
|
|
1515
|
+
* @example
|
|
1516
|
+
* ```typescript
|
|
1517
|
+
* const myAdapter = new MyCustomAdapter();
|
|
1518
|
+
* ProviderRegistry.register(myAdapter);
|
|
1519
|
+
* ```
|
|
1520
|
+
*/
|
|
1521
|
+
static register(adapter) {
|
|
1522
|
+
if (!adapter) {
|
|
1523
|
+
throw new RegistryError("\u9002\u914D\u5668\u4E0D\u80FD\u4E3A\u7A7A", void 0, "INVALID_ADAPTER");
|
|
1524
|
+
}
|
|
1525
|
+
if (!adapter.name) {
|
|
1526
|
+
throw new RegistryError(
|
|
1527
|
+
"\u9002\u914D\u5668\u5FC5\u987B\u6709 name \u5C5E\u6027",
|
|
1528
|
+
void 0,
|
|
1529
|
+
"INVALID_ADAPTER"
|
|
1530
|
+
);
|
|
1531
|
+
}
|
|
1532
|
+
this.adapters.set(adapter.name, adapter);
|
|
1533
|
+
}
|
|
1534
|
+
/**
|
|
1535
|
+
* 获取 Provider 适配器
|
|
1536
|
+
*
|
|
1537
|
+
* @param type - Provider 类型
|
|
1538
|
+
* @returns 对应的适配器实例
|
|
1539
|
+
* @throws RegistryError 如果 Provider 未注册
|
|
1540
|
+
*
|
|
1541
|
+
* @example
|
|
1542
|
+
* ```typescript
|
|
1543
|
+
* const adapter = ProviderRegistry.getAdapter('openrouter');
|
|
1544
|
+
* const client = adapter.createClient(config);
|
|
1545
|
+
* ```
|
|
1546
|
+
*/
|
|
1547
|
+
static getAdapter(type) {
|
|
1548
|
+
this.initializeBuiltIn();
|
|
1549
|
+
const adapter = this.adapters.get(type);
|
|
1550
|
+
if (!adapter) {
|
|
1551
|
+
const supported = this.listSupported();
|
|
1552
|
+
throw new RegistryError(
|
|
1553
|
+
`Provider "${type}" \u672A\u6CE8\u518C\u3002\u53EF\u7528\u7684 Provider: ${supported.join(", ")}`,
|
|
1554
|
+
type,
|
|
1555
|
+
"PROVIDER_NOT_FOUND"
|
|
1556
|
+
);
|
|
1557
|
+
}
|
|
1558
|
+
return adapter;
|
|
1559
|
+
}
|
|
1560
|
+
/**
|
|
1561
|
+
* 检查 Provider 是否已注册
|
|
1562
|
+
*
|
|
1563
|
+
* @param type - Provider 类型
|
|
1564
|
+
* @returns 是否已注册
|
|
1565
|
+
*
|
|
1566
|
+
* @example
|
|
1567
|
+
* ```typescript
|
|
1568
|
+
* if (ProviderRegistry.hasAdapter('gemini')) {
|
|
1569
|
+
* console.log('Gemini 已注册');
|
|
1570
|
+
* }
|
|
1571
|
+
* ```
|
|
1572
|
+
*/
|
|
1573
|
+
static hasAdapter(type) {
|
|
1574
|
+
this.initializeBuiltIn();
|
|
1575
|
+
return this.adapters.has(type);
|
|
1576
|
+
}
|
|
1577
|
+
/**
|
|
1578
|
+
* 获取所有已注册的 Provider 类型
|
|
1579
|
+
*
|
|
1580
|
+
* @returns Provider 类型数组
|
|
1581
|
+
*
|
|
1582
|
+
* @example
|
|
1583
|
+
* ```typescript
|
|
1584
|
+
* const providers = ProviderRegistry.listSupported();
|
|
1585
|
+
* console.log('支持的 Provider:', providers);
|
|
1586
|
+
* ```
|
|
1587
|
+
*/
|
|
1588
|
+
static listSupported() {
|
|
1589
|
+
this.initializeBuiltIn();
|
|
1590
|
+
return Array.from(this.adapters.keys());
|
|
1591
|
+
}
|
|
1592
|
+
/**
|
|
1593
|
+
* 从配置文件加载并注册 Provider
|
|
1594
|
+
*
|
|
1595
|
+
* @param config - 注册表配置
|
|
1596
|
+
* @throws RegistryError 如果配置无效或加载失败
|
|
1597
|
+
*
|
|
1598
|
+
* @example
|
|
1599
|
+
* ```typescript
|
|
1600
|
+
* const config: RegistryConfig = {
|
|
1601
|
+
* providers: {
|
|
1602
|
+
* 'custom-provider': {
|
|
1603
|
+
* adapter: './my-adapter',
|
|
1604
|
+
* config: {
|
|
1605
|
+
* apiKey: 'xxx',
|
|
1606
|
+
* baseUrl: 'https://api.example.com'
|
|
1607
|
+
* }
|
|
1608
|
+
* }
|
|
1609
|
+
* }
|
|
1610
|
+
* };
|
|
1611
|
+
* ProviderRegistry.loadFromConfig(config);
|
|
1612
|
+
* ```
|
|
1613
|
+
*/
|
|
1614
|
+
static loadFromConfig(config) {
|
|
1615
|
+
if (!config || !config.providers) {
|
|
1616
|
+
throw new RegistryError(
|
|
1617
|
+
"\u914D\u7F6E\u65E0\u6548\uFF1A\u7F3A\u5C11 providers \u5B57\u6BB5",
|
|
1618
|
+
void 0,
|
|
1619
|
+
"INVALID_CONFIG"
|
|
1620
|
+
);
|
|
1621
|
+
}
|
|
1622
|
+
this.initializeBuiltIn();
|
|
1623
|
+
for (const [providerName, providerConfig] of Object.entries(
|
|
1624
|
+
config.providers
|
|
1625
|
+
)) {
|
|
1626
|
+
if (providerConfig.adapter) {
|
|
1627
|
+
try {
|
|
1628
|
+
const CustomAdapter = require(providerConfig.adapter);
|
|
1629
|
+
const AdapterClass = CustomAdapter.default || CustomAdapter;
|
|
1630
|
+
const adapter = new AdapterClass();
|
|
1631
|
+
if (typeof adapter.name !== "string" || typeof adapter.createClient !== "function") {
|
|
1632
|
+
throw new RegistryError(
|
|
1633
|
+
`\u81EA\u5B9A\u4E49\u9002\u914D\u5668 "${providerConfig.adapter}" \u672A\u6B63\u786E\u5B9E\u73B0 ProviderAdapter \u63A5\u53E3`,
|
|
1634
|
+
providerName,
|
|
1635
|
+
"INVALID_ADAPTER"
|
|
1636
|
+
);
|
|
1637
|
+
}
|
|
1638
|
+
this.adapters.set(providerName, adapter);
|
|
1639
|
+
} catch (error) {
|
|
1640
|
+
if (error instanceof RegistryError) {
|
|
1641
|
+
throw error;
|
|
1642
|
+
}
|
|
1643
|
+
throw new RegistryError(
|
|
1644
|
+
`\u52A0\u8F7D\u81EA\u5B9A\u4E49\u9002\u914D\u5668\u5931\u8D25: ${providerConfig.adapter}`,
|
|
1645
|
+
providerName,
|
|
1646
|
+
"ADAPTER_LOAD_ERROR"
|
|
1647
|
+
);
|
|
1648
|
+
}
|
|
1649
|
+
} else if (!this.adapters.has(providerName)) {
|
|
1650
|
+
throw new RegistryError(
|
|
1651
|
+
`Provider "${providerName}" \u672A\u6CE8\u518C\u4E14\u672A\u6307\u5B9A\u81EA\u5B9A\u4E49\u9002\u914D\u5668`,
|
|
1652
|
+
providerName,
|
|
1653
|
+
"PROVIDER_NOT_FOUND"
|
|
1654
|
+
);
|
|
1655
|
+
}
|
|
1656
|
+
}
|
|
1657
|
+
}
|
|
1658
|
+
/**
|
|
1659
|
+
* 初始化内置 Provider
|
|
1660
|
+
* 在首次使用时自动调用
|
|
1661
|
+
*
|
|
1662
|
+
* @example
|
|
1663
|
+
* ```typescript
|
|
1664
|
+
* // 通常不需要手动调用,会在首次使用时自动初始化
|
|
1665
|
+
* ProviderRegistry.initializeBuiltIn();
|
|
1666
|
+
* ```
|
|
1667
|
+
*/
|
|
1668
|
+
static initializeBuiltIn() {
|
|
1669
|
+
if (this.initialized) {
|
|
1670
|
+
return;
|
|
1671
|
+
}
|
|
1672
|
+
const builtInAdapters = createBuiltInAdapters();
|
|
1673
|
+
for (const [type, adapter] of builtInAdapters) {
|
|
1674
|
+
if (!this.adapters.has(type)) {
|
|
1675
|
+
this.adapters.set(type, adapter);
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
this.initialized = true;
|
|
1679
|
+
}
|
|
1680
|
+
/**
|
|
1681
|
+
* 重置注册表(主要用于测试)
|
|
1682
|
+
* 清除所有已注册的适配器并重置初始化状态
|
|
1683
|
+
*/
|
|
1684
|
+
static reset() {
|
|
1685
|
+
this.adapters.clear();
|
|
1686
|
+
this.initialized = false;
|
|
1687
|
+
}
|
|
1688
|
+
/**
|
|
1689
|
+
* 获取适配器数量(主要用于测试)
|
|
1690
|
+
*
|
|
1691
|
+
* @returns 已注册的适配器数量
|
|
1692
|
+
*/
|
|
1693
|
+
static get size() {
|
|
1694
|
+
this.initializeBuiltIn();
|
|
1695
|
+
return this.adapters.size;
|
|
1696
|
+
}
|
|
1697
|
+
};
|
|
1698
|
+
|
|
1699
|
+
// src/config/types.ts
|
|
1700
|
+
var CONFIG_DEFAULTS = {
|
|
1701
|
+
/** 默认超时时间(毫秒) */
|
|
1702
|
+
timeout: 3e4,
|
|
1703
|
+
/** 默认重试次数 */
|
|
1704
|
+
retries: 3,
|
|
1705
|
+
/** 默认功能开关 */
|
|
1706
|
+
features: {
|
|
1707
|
+
streaming: true,
|
|
1708
|
+
reasoning: false
|
|
1709
|
+
}
|
|
1710
|
+
};
|
|
1711
|
+
var VALID_PROVIDERS = [
|
|
1712
|
+
"openrouter",
|
|
1713
|
+
"gemini",
|
|
1714
|
+
"groq",
|
|
1715
|
+
"huggingface",
|
|
1716
|
+
"modelscope",
|
|
1717
|
+
"deepseek",
|
|
1718
|
+
"poe",
|
|
1719
|
+
"nova"
|
|
1720
|
+
];
|
|
1721
|
+
|
|
1722
|
+
// src/utils/config-validator.ts
|
|
1723
|
+
var VALID_PROVIDERS2 = [
|
|
1724
|
+
"openrouter",
|
|
1725
|
+
"gemini",
|
|
1726
|
+
"groq",
|
|
1727
|
+
"huggingface",
|
|
1728
|
+
"modelscope",
|
|
1729
|
+
"deepseek",
|
|
1730
|
+
"poe",
|
|
1731
|
+
"nova"
|
|
1732
|
+
];
|
|
1733
|
+
var ConfigValidator = class _ConfigValidator {
|
|
1734
|
+
/**
|
|
1735
|
+
* 验证 Provider 配置
|
|
1736
|
+
*
|
|
1737
|
+
* @param config - 要验证的配置对象
|
|
1738
|
+
* @returns 验证结果
|
|
1739
|
+
*
|
|
1740
|
+
* @example
|
|
1741
|
+
* ```ts
|
|
1742
|
+
* const result = ConfigValidator.validate({
|
|
1743
|
+
* provider: 'openrouter',
|
|
1744
|
+
* credentials: { apiKey: 'sk-xxx' }
|
|
1745
|
+
* });
|
|
1746
|
+
*
|
|
1747
|
+
* if (!result.valid) {
|
|
1748
|
+
* console.error(result.errors);
|
|
1749
|
+
* }
|
|
1750
|
+
* ```
|
|
1751
|
+
*/
|
|
1752
|
+
static validate(config) {
|
|
1753
|
+
const errors = [];
|
|
1754
|
+
if (!config || typeof config !== "object") {
|
|
1755
|
+
return {
|
|
1756
|
+
valid: false,
|
|
1757
|
+
errors: [
|
|
1758
|
+
{
|
|
1759
|
+
field: "",
|
|
1760
|
+
message: "\u914D\u7F6E\u5FC5\u987B\u662F\u4E00\u4E2A\u5BF9\u8C61",
|
|
1761
|
+
code: "INVALID_CONFIG_TYPE"
|
|
1762
|
+
}
|
|
1763
|
+
]
|
|
1764
|
+
};
|
|
1765
|
+
}
|
|
1766
|
+
const cfg = config;
|
|
1767
|
+
if (!cfg.provider) {
|
|
1768
|
+
errors.push({
|
|
1769
|
+
field: "provider",
|
|
1770
|
+
message: "provider \u5B57\u6BB5\u662F\u5FC5\u586B\u7684",
|
|
1771
|
+
code: "MISSING_PROVIDER"
|
|
1772
|
+
});
|
|
1773
|
+
} else if (typeof cfg.provider !== "string") {
|
|
1774
|
+
errors.push({
|
|
1775
|
+
field: "provider",
|
|
1776
|
+
message: "provider \u5FC5\u987B\u662F\u5B57\u7B26\u4E32",
|
|
1777
|
+
code: "INVALID_PROVIDER_TYPE"
|
|
1778
|
+
});
|
|
1779
|
+
} else if (!VALID_PROVIDERS2.includes(cfg.provider)) {
|
|
1780
|
+
errors.push({
|
|
1781
|
+
field: "provider",
|
|
1782
|
+
message: `\u65E0\u6548\u7684 provider: ${cfg.provider}\uFF0C\u6709\u6548\u503C\u4E3A: ${VALID_PROVIDERS2.join(", ")}`,
|
|
1783
|
+
code: "INVALID_PROVIDER"
|
|
1784
|
+
});
|
|
1785
|
+
}
|
|
1786
|
+
if (!cfg.credentials) {
|
|
1787
|
+
errors.push({
|
|
1788
|
+
field: "credentials",
|
|
1789
|
+
message: "credentials \u5B57\u6BB5\u662F\u5FC5\u586B\u7684",
|
|
1790
|
+
code: "MISSING_CREDENTIALS"
|
|
1791
|
+
});
|
|
1792
|
+
} else if (typeof cfg.credentials !== "object") {
|
|
1793
|
+
errors.push({
|
|
1794
|
+
field: "credentials",
|
|
1795
|
+
message: "credentials \u5FC5\u987B\u662F\u4E00\u4E2A\u5BF9\u8C61",
|
|
1796
|
+
code: "INVALID_CREDENTIALS_TYPE"
|
|
1797
|
+
});
|
|
1798
|
+
} else {
|
|
1799
|
+
const creds = cfg.credentials;
|
|
1800
|
+
if (!creds.apiKey) {
|
|
1801
|
+
errors.push({
|
|
1802
|
+
field: "credentials.apiKey",
|
|
1803
|
+
message: "apiKey \u5B57\u6BB5\u662F\u5FC5\u586B\u7684",
|
|
1804
|
+
code: "MISSING_API_KEY"
|
|
1805
|
+
});
|
|
1806
|
+
} else if (typeof creds.apiKey !== "string") {
|
|
1807
|
+
errors.push({
|
|
1808
|
+
field: "credentials.apiKey",
|
|
1809
|
+
message: "apiKey \u5FC5\u987B\u662F\u5B57\u7B26\u4E32",
|
|
1810
|
+
code: "INVALID_API_KEY_TYPE"
|
|
1811
|
+
});
|
|
1812
|
+
} else if (creds.apiKey.trim() === "") {
|
|
1813
|
+
errors.push({
|
|
1814
|
+
field: "credentials.apiKey",
|
|
1815
|
+
message: "apiKey \u4E0D\u80FD\u4E3A\u7A7A",
|
|
1816
|
+
code: "EMPTY_API_KEY"
|
|
1817
|
+
});
|
|
1818
|
+
}
|
|
1819
|
+
if (creds.baseUrl !== void 0) {
|
|
1820
|
+
const urlResult = _ConfigValidator.validateUrl(creds.baseUrl);
|
|
1821
|
+
if (!urlResult.valid) {
|
|
1822
|
+
errors.push(
|
|
1823
|
+
...urlResult.errors.map((e) => ({
|
|
1824
|
+
...e,
|
|
1825
|
+
field: "credentials.baseUrl"
|
|
1826
|
+
}))
|
|
1827
|
+
);
|
|
1828
|
+
}
|
|
1829
|
+
}
|
|
1830
|
+
}
|
|
1831
|
+
if (cfg.options !== void 0) {
|
|
1832
|
+
if (typeof cfg.options !== "object") {
|
|
1833
|
+
errors.push({
|
|
1834
|
+
field: "options",
|
|
1835
|
+
message: "options \u5FC5\u987B\u662F\u4E00\u4E2A\u5BF9\u8C61",
|
|
1836
|
+
code: "INVALID_OPTIONS_TYPE"
|
|
1837
|
+
});
|
|
1838
|
+
} else {
|
|
1839
|
+
const opts = cfg.options;
|
|
1840
|
+
if (opts.timeout !== void 0) {
|
|
1841
|
+
if (typeof opts.timeout !== "number" || opts.timeout <= 0) {
|
|
1842
|
+
errors.push({
|
|
1843
|
+
field: "options.timeout",
|
|
1844
|
+
message: "timeout \u5FC5\u987B\u662F\u6B63\u6570",
|
|
1845
|
+
code: "INVALID_TIMEOUT"
|
|
1846
|
+
});
|
|
1847
|
+
}
|
|
1848
|
+
}
|
|
1849
|
+
if (opts.retries !== void 0) {
|
|
1850
|
+
if (typeof opts.retries !== "number" || opts.retries < 0 || !Number.isInteger(opts.retries)) {
|
|
1851
|
+
errors.push({
|
|
1852
|
+
field: "options.retries",
|
|
1853
|
+
message: "retries \u5FC5\u987B\u662F\u975E\u8D1F\u6574\u6570",
|
|
1854
|
+
code: "INVALID_RETRIES"
|
|
1855
|
+
});
|
|
1856
|
+
}
|
|
1857
|
+
}
|
|
1858
|
+
}
|
|
1859
|
+
}
|
|
1860
|
+
return {
|
|
1861
|
+
valid: errors.length === 0,
|
|
1862
|
+
errors
|
|
1863
|
+
};
|
|
1864
|
+
}
|
|
1865
|
+
/**
|
|
1866
|
+
* 验证 API Key 格式
|
|
1867
|
+
* 不同 Provider 可能有不同的 API Key 格式要求
|
|
1868
|
+
*
|
|
1869
|
+
* @param apiKey - API 密钥
|
|
1870
|
+
* @param provider - Provider 类型
|
|
1871
|
+
* @returns 验证结果
|
|
1872
|
+
*/
|
|
1873
|
+
static validateApiKey(apiKey, provider) {
|
|
1874
|
+
const errors = [];
|
|
1875
|
+
if (!apiKey || typeof apiKey !== "string") {
|
|
1876
|
+
errors.push({
|
|
1877
|
+
field: "apiKey",
|
|
1878
|
+
message: "apiKey \u5FC5\u987B\u662F\u975E\u7A7A\u5B57\u7B26\u4E32",
|
|
1879
|
+
code: "INVALID_API_KEY"
|
|
1880
|
+
});
|
|
1881
|
+
return { valid: false, errors };
|
|
1882
|
+
}
|
|
1883
|
+
const trimmed = apiKey.trim();
|
|
1884
|
+
if (trimmed === "") {
|
|
1885
|
+
errors.push({
|
|
1886
|
+
field: "apiKey",
|
|
1887
|
+
message: "apiKey \u4E0D\u80FD\u4E3A\u7A7A",
|
|
1888
|
+
code: "EMPTY_API_KEY"
|
|
1889
|
+
});
|
|
1890
|
+
return { valid: false, errors };
|
|
1891
|
+
}
|
|
1892
|
+
switch (provider) {
|
|
1893
|
+
case "openrouter":
|
|
1894
|
+
if (!trimmed.startsWith("sk-")) {
|
|
1895
|
+
errors.push({
|
|
1896
|
+
field: "apiKey",
|
|
1897
|
+
message: "OpenRouter API Key \u5E94\u4EE5 sk- \u5F00\u5934",
|
|
1898
|
+
code: "INVALID_API_KEY_FORMAT"
|
|
1899
|
+
});
|
|
1900
|
+
}
|
|
1901
|
+
break;
|
|
1902
|
+
case "gemini":
|
|
1903
|
+
if (!trimmed.startsWith("AI")) {
|
|
1904
|
+
errors.push({
|
|
1905
|
+
field: "apiKey",
|
|
1906
|
+
message: "Gemini API Key \u683C\u5F0F\u53EF\u80FD\u4E0D\u6B63\u786E",
|
|
1907
|
+
code: "INVALID_API_KEY_FORMAT"
|
|
1908
|
+
});
|
|
1909
|
+
}
|
|
1910
|
+
break;
|
|
1911
|
+
// 其他 Provider 暂不做特定格式验证
|
|
1912
|
+
default:
|
|
1913
|
+
break;
|
|
1914
|
+
}
|
|
1915
|
+
return {
|
|
1916
|
+
valid: errors.length === 0,
|
|
1917
|
+
errors
|
|
1918
|
+
};
|
|
1919
|
+
}
|
|
1920
|
+
/**
|
|
1921
|
+
* 验证 URL 格式
|
|
1922
|
+
*
|
|
1923
|
+
* @param url - 要验证的 URL
|
|
1924
|
+
* @returns 验证结果
|
|
1925
|
+
*/
|
|
1926
|
+
static validateUrl(url) {
|
|
1927
|
+
const errors = [];
|
|
1928
|
+
if (typeof url !== "string") {
|
|
1929
|
+
errors.push({
|
|
1930
|
+
field: "url",
|
|
1931
|
+
message: "URL \u5FC5\u987B\u662F\u5B57\u7B26\u4E32",
|
|
1932
|
+
code: "INVALID_URL_TYPE"
|
|
1933
|
+
});
|
|
1934
|
+
return { valid: false, errors };
|
|
1935
|
+
}
|
|
1936
|
+
const trimmed = url.trim();
|
|
1937
|
+
if (trimmed === "") {
|
|
1938
|
+
errors.push({
|
|
1939
|
+
field: "url",
|
|
1940
|
+
message: "URL \u4E0D\u80FD\u4E3A\u7A7A",
|
|
1941
|
+
code: "EMPTY_URL"
|
|
1942
|
+
});
|
|
1943
|
+
return { valid: false, errors };
|
|
1944
|
+
}
|
|
1945
|
+
try {
|
|
1946
|
+
const parsed = new URL(trimmed);
|
|
1947
|
+
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
1948
|
+
errors.push({
|
|
1949
|
+
field: "url",
|
|
1950
|
+
message: "URL \u5FC5\u987B\u4F7F\u7528 http \u6216 https \u534F\u8BAE",
|
|
1951
|
+
code: "INVALID_URL_PROTOCOL"
|
|
1952
|
+
});
|
|
1953
|
+
}
|
|
1954
|
+
} catch {
|
|
1955
|
+
errors.push({
|
|
1956
|
+
field: "url",
|
|
1957
|
+
message: "URL \u683C\u5F0F\u65E0\u6548",
|
|
1958
|
+
code: "INVALID_URL_FORMAT"
|
|
1959
|
+
});
|
|
1960
|
+
}
|
|
1961
|
+
return {
|
|
1962
|
+
valid: errors.length === 0,
|
|
1963
|
+
errors
|
|
1964
|
+
};
|
|
1965
|
+
}
|
|
1966
|
+
};
|
|
1967
|
+
|
|
1968
|
+
// src/config/config-manager.ts
|
|
1969
|
+
var ConfigManager = class _ConfigManager {
|
|
1970
|
+
/**
|
|
1971
|
+
* 验证配置
|
|
1972
|
+
* 检查配置是否符合 UnifiedProviderConfig 格式要求
|
|
1973
|
+
*
|
|
1974
|
+
* @param config - 要验证的配置对象
|
|
1975
|
+
* @returns 验证结果
|
|
1976
|
+
*
|
|
1977
|
+
* @example
|
|
1978
|
+
* ```ts
|
|
1979
|
+
* const result = ConfigManager.validate({
|
|
1980
|
+
* provider: 'openrouter',
|
|
1981
|
+
* credentials: { apiKey: 'sk-xxx' }
|
|
1982
|
+
* });
|
|
1983
|
+
*
|
|
1984
|
+
* if (!result.valid) {
|
|
1985
|
+
* console.error(result.errors);
|
|
1986
|
+
* }
|
|
1987
|
+
* ```
|
|
1988
|
+
*/
|
|
1989
|
+
static validate(config) {
|
|
1990
|
+
return ConfigValidator.validate(config);
|
|
1991
|
+
}
|
|
1992
|
+
/**
|
|
1993
|
+
* 应用默认值
|
|
1994
|
+
* 为缺失的可选字段填充默认值
|
|
1995
|
+
*
|
|
1996
|
+
* @param config - 部分配置对象
|
|
1997
|
+
* @returns 填充默认值后的完整配置
|
|
1998
|
+
*
|
|
1999
|
+
* @example
|
|
2000
|
+
* ```ts
|
|
2001
|
+
* const fullConfig = ConfigManager.applyDefaults({
|
|
2002
|
+
* provider: 'openrouter',
|
|
2003
|
+
* credentials: { apiKey: 'sk-xxx' }
|
|
2004
|
+
* });
|
|
2005
|
+
* // fullConfig.options.timeout === 30000
|
|
2006
|
+
* // fullConfig.options.retries === 3
|
|
2007
|
+
* ```
|
|
2008
|
+
*/
|
|
2009
|
+
static applyDefaults(config) {
|
|
2010
|
+
if (!config.provider || !config.credentials?.apiKey) {
|
|
2011
|
+
throw new Error("\u914D\u7F6E\u7F3A\u5C11\u5FC5\u586B\u5B57\u6BB5: provider \u548C credentials.apiKey");
|
|
2012
|
+
}
|
|
2013
|
+
return {
|
|
2014
|
+
provider: config.provider,
|
|
2015
|
+
adapter: config.adapter,
|
|
2016
|
+
credentials: {
|
|
2017
|
+
apiKey: config.credentials.apiKey,
|
|
2018
|
+
baseUrl: config.credentials.baseUrl
|
|
2019
|
+
},
|
|
2020
|
+
options: {
|
|
2021
|
+
timeout: config.options?.timeout ?? CONFIG_DEFAULTS.timeout,
|
|
2022
|
+
retries: config.options?.retries ?? CONFIG_DEFAULTS.retries,
|
|
2023
|
+
headers: config.options?.headers ?? {}
|
|
2024
|
+
},
|
|
2025
|
+
features: {
|
|
2026
|
+
streaming: config.features?.streaming ?? CONFIG_DEFAULTS.features.streaming,
|
|
2027
|
+
reasoning: config.features?.reasoning ?? CONFIG_DEFAULTS.features.reasoning
|
|
2028
|
+
}
|
|
2029
|
+
};
|
|
2030
|
+
}
|
|
2031
|
+
/**
|
|
2032
|
+
* 合并环境变量
|
|
2033
|
+
* 将 ${ENV_VAR} 格式的占位符替换为实际环境变量值
|
|
2034
|
+
*
|
|
2035
|
+
* @param config - 包含环境变量占位符的配置
|
|
2036
|
+
* @returns 替换后的配置
|
|
2037
|
+
*
|
|
2038
|
+
* @example
|
|
2039
|
+
* ```ts
|
|
2040
|
+
* // 假设 process.env.OPENROUTER_API_KEY = 'sk-xxx'
|
|
2041
|
+
* const config = ConfigManager.mergeWithEnv({
|
|
2042
|
+
* provider: 'openrouter',
|
|
2043
|
+
* credentials: { apiKey: '${OPENROUTER_API_KEY}' }
|
|
2044
|
+
* });
|
|
2045
|
+
* // config.credentials.apiKey === 'sk-xxx'
|
|
2046
|
+
* ```
|
|
2047
|
+
*/
|
|
2048
|
+
static mergeWithEnv(config) {
|
|
2049
|
+
const result = JSON.parse(JSON.stringify(config));
|
|
2050
|
+
const replaceEnvVars = (obj) => {
|
|
2051
|
+
for (const key of Object.keys(obj)) {
|
|
2052
|
+
const value = obj[key];
|
|
2053
|
+
if (typeof value === "string") {
|
|
2054
|
+
obj[key] = _ConfigManager.replaceEnvPlaceholders(value);
|
|
2055
|
+
} else if (value && typeof value === "object" && !Array.isArray(value)) {
|
|
2056
|
+
replaceEnvVars(value);
|
|
2057
|
+
}
|
|
2058
|
+
}
|
|
2059
|
+
};
|
|
2060
|
+
replaceEnvVars(result);
|
|
2061
|
+
return result;
|
|
2062
|
+
}
|
|
2063
|
+
/**
|
|
2064
|
+
* 替换字符串中的环境变量占位符
|
|
2065
|
+
* 支持 ${ENV_VAR} 格式
|
|
2066
|
+
*
|
|
2067
|
+
* @param str - 包含占位符的字符串
|
|
2068
|
+
* @returns 替换后的字符串
|
|
2069
|
+
*/
|
|
2070
|
+
static replaceEnvPlaceholders(str) {
|
|
2071
|
+
const envVarPattern = /\$\{([^}]+)\}/g;
|
|
2072
|
+
return str.replace(envVarPattern, (match, envVarName) => {
|
|
2073
|
+
const envValue = process.env[envVarName];
|
|
2074
|
+
return envValue !== void 0 ? envValue : match;
|
|
2075
|
+
});
|
|
2076
|
+
}
|
|
2077
|
+
/**
|
|
2078
|
+
* 从旧格式配置转换为新格式
|
|
2079
|
+
* 保持向后兼容性
|
|
2080
|
+
*
|
|
2081
|
+
* @param config - 旧格式的 Provider 配置
|
|
2082
|
+
* @returns 新格式的统一配置
|
|
2083
|
+
*
|
|
2084
|
+
* @example
|
|
2085
|
+
* ```ts
|
|
2086
|
+
* const newConfig = ConfigManager.fromLegacyConfig({
|
|
2087
|
+
* provider: 'openrouter',
|
|
2088
|
+
* apiKey: 'sk-xxx',
|
|
2089
|
+
* baseUrl: 'https://api.example.com'
|
|
2090
|
+
* });
|
|
2091
|
+
* // newConfig.credentials.apiKey === 'sk-xxx'
|
|
2092
|
+
* // newConfig.credentials.baseUrl === 'https://api.example.com'
|
|
2093
|
+
* ```
|
|
2094
|
+
*/
|
|
2095
|
+
static fromLegacyConfig(config) {
|
|
2096
|
+
if (!config.provider) {
|
|
2097
|
+
throw new Error("\u65E7\u683C\u5F0F\u914D\u7F6E\u7F3A\u5C11 provider \u5B57\u6BB5");
|
|
2098
|
+
}
|
|
2099
|
+
if (!config.apiKey) {
|
|
2100
|
+
throw new Error("\u65E7\u683C\u5F0F\u914D\u7F6E\u7F3A\u5C11 apiKey \u5B57\u6BB5");
|
|
2101
|
+
}
|
|
2102
|
+
if (!VALID_PROVIDERS.includes(config.provider)) {
|
|
2103
|
+
throw new Error(
|
|
2104
|
+
`\u65E0\u6548\u7684 provider: ${config.provider}\uFF0C\u6709\u6548\u503C\u4E3A: ${VALID_PROVIDERS.join(", ")}`
|
|
2105
|
+
);
|
|
2106
|
+
}
|
|
2107
|
+
return _ConfigManager.applyDefaults({
|
|
2108
|
+
provider: config.provider,
|
|
2109
|
+
credentials: {
|
|
2110
|
+
apiKey: config.apiKey,
|
|
2111
|
+
baseUrl: config.baseUrl
|
|
2112
|
+
}
|
|
2113
|
+
});
|
|
2114
|
+
}
|
|
2115
|
+
/**
|
|
2116
|
+
* 检查配置是否为旧格式
|
|
2117
|
+
*
|
|
2118
|
+
* @param config - 要检查的配置对象
|
|
2119
|
+
* @returns 是否为旧格式
|
|
2120
|
+
*/
|
|
2121
|
+
static isLegacyConfig(config) {
|
|
2122
|
+
if (!config || typeof config !== "object") {
|
|
2123
|
+
return false;
|
|
2124
|
+
}
|
|
2125
|
+
const cfg = config;
|
|
2126
|
+
return typeof cfg.provider === "string" && typeof cfg.apiKey === "string" && cfg.credentials === void 0;
|
|
2127
|
+
}
|
|
2128
|
+
/**
|
|
2129
|
+
* 智能转换配置
|
|
2130
|
+
* 自动检测配置格式并转换为统一格式
|
|
2131
|
+
*
|
|
2132
|
+
* @param config - 任意格式的配置
|
|
2133
|
+
* @returns 统一格式的配置
|
|
2134
|
+
*/
|
|
2135
|
+
static normalize(config) {
|
|
2136
|
+
if (_ConfigManager.isLegacyConfig(config)) {
|
|
2137
|
+
return _ConfigManager.fromLegacyConfig(config);
|
|
2138
|
+
}
|
|
2139
|
+
return _ConfigManager.applyDefaults(
|
|
2140
|
+
config
|
|
2141
|
+
);
|
|
2142
|
+
}
|
|
2143
|
+
/**
|
|
2144
|
+
* 获取指定 Provider 的默认基础 URL
|
|
2145
|
+
*
|
|
2146
|
+
* @param provider - Provider 类型
|
|
2147
|
+
* @returns 默认基础 URL
|
|
2148
|
+
*/
|
|
2149
|
+
static getDefaultBaseUrl(provider) {
|
|
2150
|
+
const defaultUrls = {
|
|
2151
|
+
openrouter: "https://openrouter.ai/api/v1",
|
|
2152
|
+
gemini: "https://generativelanguage.googleapis.com/v1beta",
|
|
2153
|
+
groq: "https://api.groq.com/openai/v1",
|
|
2154
|
+
huggingface: "https://api-inference.huggingface.co",
|
|
2155
|
+
modelscope: "https://dashscope.aliyuncs.com/compatible-mode/v1",
|
|
2156
|
+
deepseek: "https://api.deepseek.com/v1",
|
|
2157
|
+
poe: "https://api.poe.com/bot",
|
|
2158
|
+
nova: "https://bedrock-runtime.us-east-1.amazonaws.com"
|
|
2159
|
+
};
|
|
2160
|
+
return defaultUrls[provider];
|
|
2161
|
+
}
|
|
2162
|
+
};
|
|
2163
|
+
|
|
2164
|
+
// src/providers/__factory__.ts
|
|
2165
|
+
var AdapterBasedProvider = class {
|
|
2166
|
+
constructor(adapter, apiKey, baseUrl) {
|
|
2167
|
+
this.adapter = adapter;
|
|
2168
|
+
this.apiKey = apiKey;
|
|
2169
|
+
this.baseUrl = baseUrl;
|
|
2170
|
+
this.name = adapter.name;
|
|
2171
|
+
}
|
|
2172
|
+
name;
|
|
2173
|
+
/**
|
|
2174
|
+
* 获取客户端实例
|
|
2175
|
+
*/
|
|
2176
|
+
getClient() {
|
|
2177
|
+
return this.adapter.createClient({
|
|
2178
|
+
apiKey: this.apiKey,
|
|
2179
|
+
baseUrl: this.baseUrl ?? this.adapter.defaultBaseUrl
|
|
2180
|
+
});
|
|
2181
|
+
}
|
|
2182
|
+
/**
|
|
2183
|
+
* 发送聊天请求(非流式)
|
|
2184
|
+
*/
|
|
2185
|
+
async chat(options) {
|
|
2186
|
+
const client = this.getClient();
|
|
2187
|
+
const baseUrl = this.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2188
|
+
const endpoint = this.adapter.getEndpointUrl(baseUrl);
|
|
2189
|
+
const endpointPath = endpoint.replace(baseUrl, "");
|
|
2190
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2191
|
+
const response = await client.chat(endpointPath, body);
|
|
2192
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
2193
|
+
}
|
|
2194
|
+
/**
|
|
2195
|
+
* 发送流式聊天请求
|
|
2196
|
+
*/
|
|
2197
|
+
async *chatStream(options) {
|
|
2198
|
+
const client = this.getClient();
|
|
2199
|
+
const baseUrl = this.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2200
|
+
const endpoint = this.adapter.getEndpointUrl(baseUrl);
|
|
2201
|
+
const endpointPath = endpoint.replace(baseUrl, "");
|
|
2202
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2203
|
+
const response = await client.chatStream(endpointPath, body);
|
|
2204
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
2205
|
+
yield* StreamProcessor2.processStream(
|
|
2206
|
+
response,
|
|
2207
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2208
|
+
);
|
|
2209
|
+
}
|
|
2210
|
+
/**
|
|
2211
|
+
* 简单对话:单轮问答
|
|
2212
|
+
*/
|
|
2213
|
+
async ask(model, question, options) {
|
|
2214
|
+
const result = await this.chat({
|
|
2215
|
+
model,
|
|
2216
|
+
messages: [{ role: "user", content: question }],
|
|
2217
|
+
...options
|
|
2218
|
+
});
|
|
2219
|
+
return result.content;
|
|
2220
|
+
}
|
|
2221
|
+
/**
|
|
2222
|
+
* 带系统提示的对话
|
|
2223
|
+
*/
|
|
2224
|
+
async askWithSystem(model, systemPrompt, userMessage, options) {
|
|
2225
|
+
const result = await this.chat({
|
|
2226
|
+
model,
|
|
2227
|
+
messages: [
|
|
2228
|
+
{ role: "system", content: systemPrompt },
|
|
2229
|
+
{ role: "user", content: userMessage }
|
|
2230
|
+
],
|
|
2231
|
+
...options
|
|
2232
|
+
});
|
|
2233
|
+
return result.content;
|
|
2234
|
+
}
|
|
2235
|
+
};
|
|
2236
|
+
function createProvider(config) {
|
|
2237
|
+
const unifiedConfig = ConfigManager.fromLegacyConfig(config);
|
|
2238
|
+
const adapter = ProviderRegistry.getAdapter(unifiedConfig.provider);
|
|
2239
|
+
return new AdapterBasedProvider(
|
|
2240
|
+
adapter,
|
|
2241
|
+
unifiedConfig.credentials.apiKey,
|
|
2242
|
+
unifiedConfig.credentials.baseUrl
|
|
2243
|
+
);
|
|
2244
|
+
}
|
|
2245
|
+
var ai = {
|
|
2246
|
+
openrouter: (apiKey, baseUrl) => createProvider({ provider: "openrouter", apiKey, baseUrl }),
|
|
2247
|
+
gemini: (apiKey, baseUrl) => createProvider({ provider: "gemini", apiKey, baseUrl }),
|
|
2248
|
+
groq: (apiKey, baseUrl) => createProvider({ provider: "groq", apiKey, baseUrl }),
|
|
2249
|
+
huggingface: (apiKey, baseUrl) => createProvider({ provider: "huggingface", apiKey, baseUrl }),
|
|
2250
|
+
modelscope: (apiKey, baseUrl) => createProvider({ provider: "modelscope", apiKey, baseUrl }),
|
|
2251
|
+
deepseek: (apiKey, baseUrl) => createProvider({ provider: "deepseek", apiKey, baseUrl }),
|
|
2252
|
+
poe: (apiKey, baseUrl) => createProvider({ provider: "poe", apiKey, baseUrl }),
|
|
2253
|
+
nova: (apiKey, baseUrl) => createProvider({ provider: "nova", apiKey, baseUrl })
|
|
2254
|
+
};
|
|
37
2255
|
|
|
38
2256
|
// src/providers/__model-detection__.ts
|
|
39
2257
|
var THINKING_MODEL_PATTERNS = [
|
|
@@ -335,7 +2553,11 @@ var BaseProvider = class {
|
|
|
335
2553
|
* 3. 如果 content 为空,智能降级(提取结论或返回 reasoning)
|
|
336
2554
|
*/
|
|
337
2555
|
async ask(model, question, options) {
|
|
338
|
-
const {
|
|
2556
|
+
const {
|
|
2557
|
+
fallback,
|
|
2558
|
+
autoAdjust = this.autoAdjustEnabled,
|
|
2559
|
+
...chatOptions
|
|
2560
|
+
} = options ?? {};
|
|
339
2561
|
let finalOptions = {
|
|
340
2562
|
model,
|
|
341
2563
|
messages: [{ role: "user", content: question }],
|
|
@@ -361,7 +2583,11 @@ var BaseProvider = class {
|
|
|
361
2583
|
* 3. 如果 content 为空,智能降级(提取结论或返回 reasoning)
|
|
362
2584
|
*/
|
|
363
2585
|
async askWithSystem(model, systemPrompt, userMessage, options) {
|
|
364
|
-
const {
|
|
2586
|
+
const {
|
|
2587
|
+
fallback,
|
|
2588
|
+
autoAdjust = this.autoAdjustEnabled,
|
|
2589
|
+
...chatOptions
|
|
2590
|
+
} = options ?? {};
|
|
365
2591
|
let finalOptions = {
|
|
366
2592
|
model,
|
|
367
2593
|
messages: [
|
|
@@ -392,1432 +2618,796 @@ var BaseProvider = class {
|
|
|
392
2618
|
* - 'reasoning': 逻辑推理
|
|
393
2619
|
* - 'fast': 快速回答(关闭思考)
|
|
394
2620
|
*/
|
|
395
|
-
async askWithScenario(model, question, scenario = "simple", options) {
|
|
396
|
-
const recommendedConfig = ModelDetection.getRecommendedConfig(
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
// src/providers/__types__.ts
|
|
405
|
-
var EFFORT_TOKEN_MAP = {
|
|
406
|
-
off: 0,
|
|
407
|
-
low: 1024,
|
|
408
|
-
medium: 4096,
|
|
409
|
-
high: 16384
|
|
410
|
-
};
|
|
411
|
-
|
|
412
|
-
// src/providers/openrouter.ts
|
|
413
|
-
function extractTextContent(content) {
|
|
414
|
-
if (typeof content === "string") {
|
|
415
|
-
return content;
|
|
416
|
-
}
|
|
417
|
-
if (Array.isArray(content)) {
|
|
418
|
-
return content.filter(
|
|
419
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
420
|
-
).map((item) => item.text).join("");
|
|
421
|
-
}
|
|
422
|
-
return "";
|
|
423
|
-
}
|
|
424
|
-
function buildReasoningParam(config) {
|
|
425
|
-
if (!config) return void 0;
|
|
426
|
-
if (config.effort === "off") return void 0;
|
|
427
|
-
const param = {};
|
|
428
|
-
if (config.effort) {
|
|
429
|
-
param.effort = config.effort;
|
|
430
|
-
}
|
|
431
|
-
if (config.budgetTokens !== void 0) {
|
|
432
|
-
param.max_tokens = config.budgetTokens;
|
|
433
|
-
} else if (config.effort && EFFORT_TOKEN_MAP[config.effort]) {
|
|
434
|
-
param.max_tokens = EFFORT_TOKEN_MAP[config.effort];
|
|
435
|
-
}
|
|
436
|
-
if (config.exclude !== void 0) {
|
|
437
|
-
param.exclude = config.exclude;
|
|
438
|
-
}
|
|
439
|
-
return Object.keys(param).length > 0 ? param : void 0;
|
|
440
|
-
}
|
|
441
|
-
var OpenRouterProvider = class extends BaseProvider {
|
|
442
|
-
name = "openrouter";
|
|
443
|
-
client;
|
|
444
|
-
constructor(apiKey) {
|
|
445
|
-
super();
|
|
446
|
-
this.client = new import_sdk.OpenRouter({ apiKey });
|
|
447
|
-
}
|
|
448
|
-
/**
|
|
449
|
-
* 发送聊天请求(非流式)
|
|
450
|
-
*/
|
|
451
|
-
async chat(options) {
|
|
452
|
-
const {
|
|
453
|
-
model,
|
|
454
|
-
messages,
|
|
455
|
-
temperature = 0.7,
|
|
456
|
-
maxTokens,
|
|
457
|
-
reasoning
|
|
458
|
-
} = options;
|
|
459
|
-
const reasoningParam = buildReasoningParam(reasoning);
|
|
460
|
-
const requestParams = {
|
|
461
|
-
model,
|
|
462
|
-
messages,
|
|
463
|
-
temperature,
|
|
464
|
-
maxTokens,
|
|
465
|
-
stream: false
|
|
466
|
-
};
|
|
467
|
-
if (reasoningParam) {
|
|
468
|
-
requestParams.reasoning = reasoningParam;
|
|
469
|
-
}
|
|
470
|
-
const result = await this.client.chat.send(requestParams);
|
|
471
|
-
const choice = result.choices[0];
|
|
472
|
-
if (!choice) {
|
|
473
|
-
throw new Error("No response from model");
|
|
474
|
-
}
|
|
475
|
-
const msg = choice.message;
|
|
476
|
-
const reasoningContent = msg.reasoning_content ?? msg.reasoning ?? null;
|
|
477
|
-
return {
|
|
478
|
-
content: extractTextContent(msg.content),
|
|
479
|
-
reasoning: reasoningContent ? extractTextContent(reasoningContent) : null,
|
|
480
|
-
model: result.model,
|
|
481
|
-
usage: {
|
|
482
|
-
promptTokens: result.usage?.promptTokens ?? 0,
|
|
483
|
-
completionTokens: result.usage?.completionTokens ?? 0,
|
|
484
|
-
totalTokens: result.usage?.totalTokens ?? 0
|
|
485
|
-
},
|
|
486
|
-
finishReason: choice.finishReason
|
|
487
|
-
};
|
|
488
|
-
}
|
|
489
|
-
/**
|
|
490
|
-
* 发送流式聊天请求
|
|
491
|
-
*/
|
|
492
|
-
async *chatStream(options) {
|
|
493
|
-
const {
|
|
494
|
-
model,
|
|
495
|
-
messages,
|
|
496
|
-
temperature = 0.7,
|
|
497
|
-
maxTokens,
|
|
498
|
-
reasoning
|
|
499
|
-
} = options;
|
|
500
|
-
const reasoningParam = buildReasoningParam(reasoning);
|
|
501
|
-
const requestParams = {
|
|
502
|
-
model,
|
|
503
|
-
messages,
|
|
504
|
-
temperature,
|
|
505
|
-
maxTokens,
|
|
506
|
-
stream: true
|
|
507
|
-
};
|
|
508
|
-
if (reasoningParam) {
|
|
509
|
-
requestParams.reasoning = reasoningParam;
|
|
510
|
-
}
|
|
511
|
-
const stream = await this.client.chat.send(
|
|
512
|
-
requestParams
|
|
513
|
-
);
|
|
514
|
-
for await (const chunk of stream) {
|
|
515
|
-
const delta = chunk.choices?.[0]?.delta;
|
|
516
|
-
if (!delta) continue;
|
|
517
|
-
const reasoningContent = delta.reasoning_content ?? delta.reasoning;
|
|
518
|
-
if (reasoningContent) {
|
|
519
|
-
yield { type: "reasoning", text: extractTextContent(reasoningContent) };
|
|
520
|
-
}
|
|
521
|
-
if (delta.content) {
|
|
522
|
-
yield { type: "content", text: extractTextContent(delta.content) };
|
|
523
|
-
}
|
|
524
|
-
}
|
|
525
|
-
}
|
|
526
|
-
/**
|
|
527
|
-
* 获取可用模型列表
|
|
528
|
-
*/
|
|
529
|
-
async listModels() {
|
|
530
|
-
const result = await this.client.models.list();
|
|
531
|
-
return (result.data ?? []).map((m) => ({
|
|
532
|
-
id: m.id,
|
|
533
|
-
canonicalSlug: m.canonical_slug ?? m.id,
|
|
534
|
-
name: m.name,
|
|
535
|
-
description: m.description ?? "",
|
|
536
|
-
created: m.created ?? 0,
|
|
537
|
-
pricing: {
|
|
538
|
-
prompt: m.pricing?.prompt ?? "0",
|
|
539
|
-
completion: m.pricing?.completion ?? "0",
|
|
540
|
-
request: m.pricing?.request ?? "0",
|
|
541
|
-
image: m.pricing?.image ?? "0"
|
|
542
|
-
},
|
|
543
|
-
contextLength: m.context_length ?? 0,
|
|
544
|
-
architecture: {
|
|
545
|
-
modality: m.architecture?.modality ?? "",
|
|
546
|
-
inputModalities: m.architecture?.input_modalities ?? [],
|
|
547
|
-
outputModalities: m.architecture?.output_modalities ?? [],
|
|
548
|
-
tokenizer: m.architecture?.tokenizer ?? "",
|
|
549
|
-
instructType: m.architecture?.instruct_type ?? ""
|
|
550
|
-
},
|
|
551
|
-
supportedParameters: m.supported_parameters ?? []
|
|
552
|
-
}));
|
|
553
|
-
}
|
|
554
|
-
};
|
|
555
|
-
|
|
556
|
-
// src/providers/gemini.ts
|
|
557
|
-
var BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
|
|
558
|
-
function extractTextContent2(content) {
|
|
559
|
-
if (typeof content === "string") {
|
|
560
|
-
return content;
|
|
561
|
-
}
|
|
562
|
-
if (Array.isArray(content)) {
|
|
563
|
-
return content.filter(
|
|
564
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
565
|
-
).map((item) => item.text).join("");
|
|
566
|
-
}
|
|
567
|
-
return "";
|
|
568
|
-
}
|
|
569
|
-
var GeminiProvider = class extends BaseProvider {
|
|
570
|
-
name = "gemini";
|
|
571
|
-
apiKey;
|
|
572
|
-
baseUrl;
|
|
573
|
-
constructor(config) {
|
|
574
|
-
super();
|
|
575
|
-
if (typeof config === "string") {
|
|
576
|
-
this.apiKey = config;
|
|
577
|
-
this.baseUrl = BASE_URL;
|
|
578
|
-
} else {
|
|
579
|
-
this.apiKey = config.apiKey;
|
|
580
|
-
this.baseUrl = config.baseUrl ?? BASE_URL;
|
|
581
|
-
}
|
|
582
|
-
}
|
|
583
|
-
/**
|
|
584
|
-
* 发送聊天请求(非流式)
|
|
585
|
-
*/
|
|
586
|
-
async chat(options) {
|
|
587
|
-
const {
|
|
588
|
-
model,
|
|
589
|
-
messages,
|
|
590
|
-
temperature = 0.7,
|
|
591
|
-
maxTokens,
|
|
592
|
-
reasoning
|
|
593
|
-
} = options;
|
|
594
|
-
const body = {
|
|
595
|
-
model,
|
|
596
|
-
messages,
|
|
597
|
-
temperature,
|
|
598
|
-
stream: false
|
|
599
|
-
};
|
|
600
|
-
if (maxTokens) {
|
|
601
|
-
body.max_tokens = maxTokens;
|
|
602
|
-
}
|
|
603
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
604
|
-
body.reasoning_effort = reasoning.effort;
|
|
605
|
-
}
|
|
606
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
607
|
-
method: "POST",
|
|
608
|
-
headers: {
|
|
609
|
-
"Content-Type": "application/json",
|
|
610
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
611
|
-
},
|
|
612
|
-
body: JSON.stringify(body)
|
|
613
|
-
});
|
|
614
|
-
if (!response.ok) {
|
|
615
|
-
const error = await response.text();
|
|
616
|
-
throw new Error(`Gemini API error: ${response.status} ${error}`);
|
|
617
|
-
}
|
|
618
|
-
const result = await response.json();
|
|
619
|
-
const choice = result.choices?.[0];
|
|
620
|
-
if (!choice) {
|
|
621
|
-
throw new Error("No response from model");
|
|
622
|
-
}
|
|
623
|
-
const msg = choice.message;
|
|
624
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
625
|
-
return {
|
|
626
|
-
content: extractTextContent2(msg?.content),
|
|
627
|
-
reasoning: reasoningContent ? extractTextContent2(reasoningContent) : null,
|
|
628
|
-
model: result.model ?? model,
|
|
629
|
-
usage: {
|
|
630
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
631
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
632
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
633
|
-
},
|
|
634
|
-
finishReason: choice.finish_reason ?? null
|
|
635
|
-
};
|
|
636
|
-
}
|
|
637
|
-
/**
|
|
638
|
-
* 发送流式聊天请求
|
|
639
|
-
*/
|
|
640
|
-
async *chatStream(options) {
|
|
641
|
-
const {
|
|
642
|
-
model,
|
|
643
|
-
messages,
|
|
644
|
-
temperature = 0.7,
|
|
645
|
-
maxTokens,
|
|
646
|
-
reasoning
|
|
647
|
-
} = options;
|
|
648
|
-
const body = {
|
|
649
|
-
model,
|
|
650
|
-
messages,
|
|
651
|
-
temperature,
|
|
652
|
-
stream: true
|
|
653
|
-
};
|
|
654
|
-
if (maxTokens) {
|
|
655
|
-
body.max_tokens = maxTokens;
|
|
656
|
-
}
|
|
657
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
658
|
-
body.reasoning_effort = reasoning.effort;
|
|
659
|
-
}
|
|
660
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
661
|
-
method: "POST",
|
|
662
|
-
headers: {
|
|
663
|
-
"Content-Type": "application/json",
|
|
664
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
665
|
-
},
|
|
666
|
-
body: JSON.stringify(body)
|
|
667
|
-
});
|
|
668
|
-
if (!response.ok) {
|
|
669
|
-
const error = await response.text();
|
|
670
|
-
throw new Error(`Gemini API error: ${response.status} ${error}`);
|
|
671
|
-
}
|
|
672
|
-
const reader = response.body?.getReader();
|
|
673
|
-
if (!reader) {
|
|
674
|
-
throw new Error("No response body");
|
|
675
|
-
}
|
|
676
|
-
const decoder = new TextDecoder();
|
|
677
|
-
let buffer = "";
|
|
678
|
-
try {
|
|
679
|
-
while (true) {
|
|
680
|
-
const { done, value } = await reader.read();
|
|
681
|
-
if (done) break;
|
|
682
|
-
buffer += decoder.decode(value, { stream: true });
|
|
683
|
-
const lines = buffer.split("\n");
|
|
684
|
-
buffer = lines.pop() ?? "";
|
|
685
|
-
for (const line of lines) {
|
|
686
|
-
const trimmed = line.trim();
|
|
687
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
688
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
689
|
-
try {
|
|
690
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
691
|
-
const delta = data.choices?.[0]?.delta;
|
|
692
|
-
if (!delta) continue;
|
|
693
|
-
const thought = delta.reasoning_content ?? delta.thoughts;
|
|
694
|
-
if (thought) {
|
|
695
|
-
yield {
|
|
696
|
-
type: "reasoning",
|
|
697
|
-
text: extractTextContent2(thought)
|
|
698
|
-
};
|
|
699
|
-
}
|
|
700
|
-
if (delta.content) {
|
|
701
|
-
yield {
|
|
702
|
-
type: "content",
|
|
703
|
-
text: extractTextContent2(delta.content)
|
|
704
|
-
};
|
|
705
|
-
}
|
|
706
|
-
} catch {
|
|
707
|
-
}
|
|
708
|
-
}
|
|
709
|
-
}
|
|
710
|
-
} finally {
|
|
711
|
-
reader.releaseLock();
|
|
712
|
-
}
|
|
713
|
-
}
|
|
714
|
-
};
|
|
715
|
-
|
|
716
|
-
// src/providers/groq.ts
|
|
717
|
-
var BASE_URL2 = "https://api.groq.com/openai/v1";
|
|
718
|
-
function extractTextContent3(content) {
|
|
719
|
-
if (typeof content === "string") {
|
|
720
|
-
return content;
|
|
721
|
-
}
|
|
722
|
-
if (Array.isArray(content)) {
|
|
723
|
-
return content.filter(
|
|
724
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
725
|
-
).map((item) => item.text).join("");
|
|
726
|
-
}
|
|
727
|
-
return "";
|
|
728
|
-
}
|
|
729
|
-
var GroqProvider = class extends BaseProvider {
|
|
730
|
-
name = "groq";
|
|
731
|
-
apiKey;
|
|
732
|
-
baseUrl;
|
|
733
|
-
constructor(config) {
|
|
734
|
-
super();
|
|
735
|
-
if (typeof config === "string") {
|
|
736
|
-
this.apiKey = config;
|
|
737
|
-
this.baseUrl = BASE_URL2;
|
|
738
|
-
} else {
|
|
739
|
-
this.apiKey = config.apiKey;
|
|
740
|
-
this.baseUrl = config.baseUrl ?? BASE_URL2;
|
|
741
|
-
}
|
|
742
|
-
}
|
|
743
|
-
/**
|
|
744
|
-
* 发送聊天请求(非流式)
|
|
745
|
-
*/
|
|
746
|
-
async chat(options) {
|
|
747
|
-
const { model, messages, temperature = 1, maxTokens, reasoning } = options;
|
|
748
|
-
const body = {
|
|
749
|
-
model,
|
|
750
|
-
messages,
|
|
751
|
-
temperature,
|
|
752
|
-
stream: false,
|
|
753
|
-
top_p: 1
|
|
754
|
-
};
|
|
755
|
-
if (maxTokens) {
|
|
756
|
-
body.max_completion_tokens = maxTokens;
|
|
757
|
-
}
|
|
758
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
759
|
-
body.reasoning_format = "parsed";
|
|
760
|
-
} else if (reasoning?.effort === "off") {
|
|
761
|
-
body.include_reasoning = false;
|
|
762
|
-
}
|
|
763
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
764
|
-
method: "POST",
|
|
765
|
-
headers: {
|
|
766
|
-
"Content-Type": "application/json",
|
|
767
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
768
|
-
},
|
|
769
|
-
body: JSON.stringify(body)
|
|
770
|
-
});
|
|
771
|
-
if (!response.ok) {
|
|
772
|
-
const error = await response.text();
|
|
773
|
-
throw new Error(`Groq API error: ${response.status} ${error}`);
|
|
774
|
-
}
|
|
775
|
-
const result = await response.json();
|
|
776
|
-
const choice = result.choices?.[0];
|
|
777
|
-
if (!choice) {
|
|
778
|
-
throw new Error("No response from model");
|
|
779
|
-
}
|
|
780
|
-
const msg = choice.message;
|
|
781
|
-
const reasoningContent = msg?.reasoning_content ?? msg?.reasoning ?? null;
|
|
782
|
-
return {
|
|
783
|
-
content: extractTextContent3(msg?.content),
|
|
784
|
-
reasoning: reasoningContent ? extractTextContent3(reasoningContent) : null,
|
|
785
|
-
model: result.model ?? model,
|
|
786
|
-
usage: {
|
|
787
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
788
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
789
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
790
|
-
},
|
|
791
|
-
finishReason: choice.finish_reason ?? null
|
|
792
|
-
};
|
|
793
|
-
}
|
|
794
|
-
/**
|
|
795
|
-
* 发送流式聊天请求
|
|
796
|
-
*/
|
|
797
|
-
async *chatStream(options) {
|
|
798
|
-
const { model, messages, temperature = 1, maxTokens, reasoning } = options;
|
|
799
|
-
const body = {
|
|
800
|
-
model,
|
|
801
|
-
messages,
|
|
802
|
-
temperature,
|
|
803
|
-
stream: true,
|
|
804
|
-
top_p: 1
|
|
805
|
-
};
|
|
806
|
-
if (maxTokens) {
|
|
807
|
-
body.max_completion_tokens = maxTokens;
|
|
808
|
-
}
|
|
809
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
810
|
-
body.reasoning_format = "parsed";
|
|
811
|
-
} else if (reasoning?.effort === "off") {
|
|
812
|
-
body.include_reasoning = false;
|
|
813
|
-
}
|
|
814
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
815
|
-
method: "POST",
|
|
816
|
-
headers: {
|
|
817
|
-
"Content-Type": "application/json",
|
|
818
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
819
|
-
},
|
|
820
|
-
body: JSON.stringify(body)
|
|
821
|
-
});
|
|
822
|
-
if (!response.ok) {
|
|
823
|
-
const error = await response.text();
|
|
824
|
-
throw new Error(`Groq API error: ${response.status} ${error}`);
|
|
825
|
-
}
|
|
826
|
-
const reader = response.body?.getReader();
|
|
827
|
-
if (!reader) {
|
|
828
|
-
throw new Error("No response body");
|
|
829
|
-
}
|
|
830
|
-
const decoder = new TextDecoder();
|
|
831
|
-
let buffer = "";
|
|
832
|
-
try {
|
|
833
|
-
while (true) {
|
|
834
|
-
const { done, value } = await reader.read();
|
|
835
|
-
if (done) break;
|
|
836
|
-
buffer += decoder.decode(value, { stream: true });
|
|
837
|
-
const lines = buffer.split("\n");
|
|
838
|
-
buffer = lines.pop() ?? "";
|
|
839
|
-
for (const line of lines) {
|
|
840
|
-
const trimmed = line.trim();
|
|
841
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
842
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
843
|
-
try {
|
|
844
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
845
|
-
const delta = data.choices?.[0]?.delta;
|
|
846
|
-
if (!delta) continue;
|
|
847
|
-
const reasoningContent = delta.reasoning_content ?? delta.reasoning;
|
|
848
|
-
if (reasoningContent) {
|
|
849
|
-
yield {
|
|
850
|
-
type: "reasoning",
|
|
851
|
-
text: extractTextContent3(reasoningContent)
|
|
852
|
-
};
|
|
853
|
-
}
|
|
854
|
-
if (delta.content) {
|
|
855
|
-
yield {
|
|
856
|
-
type: "content",
|
|
857
|
-
text: extractTextContent3(delta.content)
|
|
858
|
-
};
|
|
859
|
-
}
|
|
860
|
-
} catch {
|
|
861
|
-
}
|
|
862
|
-
}
|
|
863
|
-
}
|
|
864
|
-
} finally {
|
|
865
|
-
reader.releaseLock();
|
|
866
|
-
}
|
|
2621
|
+
async askWithScenario(model, question, scenario = "simple", options) {
|
|
2622
|
+
const recommendedConfig = ModelDetection.getRecommendedConfig(
|
|
2623
|
+
model,
|
|
2624
|
+
scenario
|
|
2625
|
+
);
|
|
2626
|
+
return this.ask(model, question, {
|
|
2627
|
+
...recommendedConfig,
|
|
2628
|
+
...options
|
|
2629
|
+
});
|
|
867
2630
|
}
|
|
868
2631
|
};
|
|
869
2632
|
|
|
870
|
-
// src/providers/
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
}
|
|
881
|
-
return "";
|
|
882
|
-
}
|
|
883
|
-
var HuggingFaceProvider = class extends BaseProvider {
|
|
884
|
-
name = "huggingface";
|
|
885
|
-
apiKey;
|
|
2633
|
+
// src/providers/__index__.ts
|
|
2634
|
+
init_types();
|
|
2635
|
+
|
|
2636
|
+
// src/providers/openrouter.ts
|
|
2637
|
+
init_http_provider_client();
|
|
2638
|
+
init_stream_processor();
|
|
2639
|
+
var OpenRouterProvider = class extends BaseProvider {
|
|
2640
|
+
name = "openrouter";
|
|
2641
|
+
adapter;
|
|
2642
|
+
client;
|
|
886
2643
|
baseUrl;
|
|
887
|
-
|
|
2644
|
+
apiKey;
|
|
2645
|
+
constructor(apiKey, baseUrl) {
|
|
888
2646
|
super();
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
}
|
|
2647
|
+
this.apiKey = apiKey;
|
|
2648
|
+
this.adapter = new OpenRouterAdapter();
|
|
2649
|
+
this.baseUrl = baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2650
|
+
this.client = new HttpProviderClient({
|
|
2651
|
+
apiKey,
|
|
2652
|
+
baseUrl: this.baseUrl
|
|
2653
|
+
});
|
|
896
2654
|
}
|
|
897
2655
|
/**
|
|
898
2656
|
* 发送聊天请求(非流式)
|
|
899
|
-
*
|
|
900
|
-
* reasoning 参数说明:
|
|
901
|
-
* - HuggingFace 是模型聚合平台,thinking 支持取决于具体模型
|
|
902
|
-
* - 如果模型支持,会返回 reasoning_content
|
|
903
2657
|
*/
|
|
904
2658
|
async chat(options) {
|
|
905
|
-
const
|
|
906
|
-
const
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
stream: false
|
|
911
|
-
};
|
|
912
|
-
if (maxTokens) {
|
|
913
|
-
body.max_tokens = maxTokens;
|
|
914
|
-
}
|
|
915
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
916
|
-
body.reasoning_effort = reasoning.effort;
|
|
917
|
-
}
|
|
918
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
919
|
-
method: "POST",
|
|
920
|
-
headers: {
|
|
921
|
-
"Content-Type": "application/json",
|
|
922
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
923
|
-
},
|
|
924
|
-
body: JSON.stringify(body)
|
|
925
|
-
});
|
|
926
|
-
if (!response.ok) {
|
|
927
|
-
const error = await response.text();
|
|
928
|
-
throw new Error(`HuggingFace API error: ${response.status} ${error}`);
|
|
929
|
-
}
|
|
930
|
-
const result = await response.json();
|
|
931
|
-
const choice = result.choices?.[0];
|
|
932
|
-
if (!choice) {
|
|
933
|
-
throw new Error("No response from model");
|
|
934
|
-
}
|
|
935
|
-
const msg = choice.message;
|
|
936
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
937
|
-
return {
|
|
938
|
-
content: extractTextContent4(msg?.content),
|
|
939
|
-
reasoning: reasoningContent ? extractTextContent4(reasoningContent) : null,
|
|
940
|
-
model: result.model ?? model,
|
|
941
|
-
usage: {
|
|
942
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
943
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
944
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
945
|
-
},
|
|
946
|
-
finishReason: choice.finish_reason ?? null
|
|
947
|
-
};
|
|
2659
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2660
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2661
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2662
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2663
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
948
2664
|
}
|
|
949
2665
|
/**
|
|
950
2666
|
* 发送流式聊天请求
|
|
951
2667
|
*/
|
|
952
2668
|
async *chatStream(options) {
|
|
953
|
-
const
|
|
954
|
-
const
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
2669
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2670
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2671
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2672
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2673
|
+
yield* StreamProcessor.processStream(
|
|
2674
|
+
response,
|
|
2675
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2676
|
+
);
|
|
2677
|
+
}
|
|
2678
|
+
/**
|
|
2679
|
+
* 获取可用模型列表
|
|
2680
|
+
* 注意:此方法直接调用 OpenRouter API,不使用适配器
|
|
2681
|
+
*/
|
|
2682
|
+
async listModels() {
|
|
2683
|
+
const response = await fetch(`${this.baseUrl}/models`, {
|
|
968
2684
|
headers: {
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
}
|
|
972
|
-
body: JSON.stringify(body)
|
|
2685
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
2686
|
+
"Content-Type": "application/json"
|
|
2687
|
+
}
|
|
973
2688
|
});
|
|
974
2689
|
if (!response.ok) {
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
const reader = response.body?.getReader();
|
|
979
|
-
if (!reader) {
|
|
980
|
-
throw new Error("No response body");
|
|
981
|
-
}
|
|
982
|
-
const decoder = new TextDecoder();
|
|
983
|
-
let buffer = "";
|
|
984
|
-
try {
|
|
985
|
-
while (true) {
|
|
986
|
-
const { done, value } = await reader.read();
|
|
987
|
-
if (done) break;
|
|
988
|
-
buffer += decoder.decode(value, { stream: true });
|
|
989
|
-
const lines = buffer.split("\n");
|
|
990
|
-
buffer = lines.pop() ?? "";
|
|
991
|
-
for (const line of lines) {
|
|
992
|
-
const trimmed = line.trim();
|
|
993
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
994
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
995
|
-
try {
|
|
996
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
997
|
-
const delta = data.choices?.[0]?.delta;
|
|
998
|
-
if (!delta) continue;
|
|
999
|
-
if (delta.reasoning_content) {
|
|
1000
|
-
yield {
|
|
1001
|
-
type: "reasoning",
|
|
1002
|
-
text: extractTextContent4(delta.reasoning_content)
|
|
1003
|
-
};
|
|
1004
|
-
}
|
|
1005
|
-
if (delta.content) {
|
|
1006
|
-
yield {
|
|
1007
|
-
type: "content",
|
|
1008
|
-
text: extractTextContent4(delta.content)
|
|
1009
|
-
};
|
|
1010
|
-
}
|
|
1011
|
-
} catch {
|
|
1012
|
-
}
|
|
1013
|
-
}
|
|
1014
|
-
}
|
|
1015
|
-
} finally {
|
|
1016
|
-
reader.releaseLock();
|
|
2690
|
+
throw new Error(
|
|
2691
|
+
`Failed to fetch models: ${response.status} ${response.statusText}`
|
|
2692
|
+
);
|
|
1017
2693
|
}
|
|
2694
|
+
const result = await response.json();
|
|
2695
|
+
return (result.data ?? []).map((m) => ({
|
|
2696
|
+
id: m.id,
|
|
2697
|
+
canonicalSlug: m.canonical_slug ?? m.id,
|
|
2698
|
+
name: m.name,
|
|
2699
|
+
description: m.description ?? "",
|
|
2700
|
+
created: m.created ?? 0,
|
|
2701
|
+
pricing: {
|
|
2702
|
+
prompt: m.pricing?.prompt ?? "0",
|
|
2703
|
+
completion: m.pricing?.completion ?? "0",
|
|
2704
|
+
request: m.pricing?.request ?? "0",
|
|
2705
|
+
image: m.pricing?.image ?? "0"
|
|
2706
|
+
},
|
|
2707
|
+
contextLength: m.context_length ?? 0,
|
|
2708
|
+
architecture: {
|
|
2709
|
+
modality: m.architecture?.modality ?? "",
|
|
2710
|
+
inputModalities: m.architecture?.input_modalities ?? [],
|
|
2711
|
+
outputModalities: m.architecture?.output_modalities ?? [],
|
|
2712
|
+
tokenizer: m.architecture?.tokenizer ?? "",
|
|
2713
|
+
instructType: m.architecture?.instruct_type ?? ""
|
|
2714
|
+
},
|
|
2715
|
+
supportedParameters: m.supported_parameters ?? []
|
|
2716
|
+
}));
|
|
1018
2717
|
}
|
|
1019
2718
|
};
|
|
1020
2719
|
|
|
1021
2720
|
// src/providers/modelscope.ts
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
if (typeof content === "string") {
|
|
1025
|
-
return content;
|
|
1026
|
-
}
|
|
1027
|
-
if (Array.isArray(content)) {
|
|
1028
|
-
return content.filter(
|
|
1029
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
1030
|
-
).map((item) => item.text).join("");
|
|
1031
|
-
}
|
|
1032
|
-
return "";
|
|
1033
|
-
}
|
|
2721
|
+
init_http_provider_client();
|
|
2722
|
+
init_stream_processor();
|
|
1034
2723
|
var ModelScopeProvider = class extends BaseProvider {
|
|
1035
2724
|
name = "modelscope";
|
|
1036
|
-
|
|
2725
|
+
adapter;
|
|
2726
|
+
client;
|
|
1037
2727
|
baseUrl;
|
|
1038
2728
|
constructor(config) {
|
|
1039
2729
|
super();
|
|
2730
|
+
this.adapter = new ModelScopeAdapter();
|
|
1040
2731
|
if (typeof config === "string") {
|
|
1041
|
-
this.
|
|
1042
|
-
this.
|
|
2732
|
+
this.baseUrl = this.adapter.defaultBaseUrl;
|
|
2733
|
+
this.client = new HttpProviderClient({
|
|
2734
|
+
apiKey: config,
|
|
2735
|
+
baseUrl: this.baseUrl
|
|
2736
|
+
});
|
|
1043
2737
|
} else {
|
|
1044
|
-
this.
|
|
1045
|
-
this.
|
|
2738
|
+
this.baseUrl = config.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2739
|
+
this.client = new HttpProviderClient({
|
|
2740
|
+
apiKey: config.apiKey,
|
|
2741
|
+
baseUrl: this.baseUrl
|
|
2742
|
+
});
|
|
1046
2743
|
}
|
|
1047
2744
|
}
|
|
1048
2745
|
/**
|
|
1049
2746
|
* 发送聊天请求(非流式)
|
|
1050
2747
|
*/
|
|
1051
2748
|
async chat(options) {
|
|
1052
|
-
const
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
reasoning
|
|
1058
|
-
} = options;
|
|
1059
|
-
const body = {
|
|
1060
|
-
model,
|
|
1061
|
-
messages,
|
|
1062
|
-
temperature,
|
|
1063
|
-
stream: false
|
|
1064
|
-
};
|
|
1065
|
-
if (maxTokens) {
|
|
1066
|
-
body.max_tokens = maxTokens;
|
|
1067
|
-
}
|
|
1068
|
-
if (reasoning?.effort) {
|
|
1069
|
-
if (reasoning.effort === "off") {
|
|
1070
|
-
body.enable_thinking = false;
|
|
1071
|
-
} else {
|
|
1072
|
-
body.enable_thinking = true;
|
|
1073
|
-
}
|
|
1074
|
-
}
|
|
1075
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1076
|
-
method: "POST",
|
|
1077
|
-
headers: {
|
|
1078
|
-
"Content-Type": "application/json",
|
|
1079
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1080
|
-
},
|
|
1081
|
-
body: JSON.stringify(body)
|
|
1082
|
-
});
|
|
1083
|
-
if (!response.ok) {
|
|
1084
|
-
const error = await response.text();
|
|
1085
|
-
throw new Error(`ModelScope API error: ${response.status} ${error}`);
|
|
1086
|
-
}
|
|
1087
|
-
const result = await response.json();
|
|
1088
|
-
const choice = result.choices?.[0];
|
|
1089
|
-
if (!choice) {
|
|
1090
|
-
throw new Error("No response from model");
|
|
1091
|
-
}
|
|
1092
|
-
const msg = choice.message;
|
|
1093
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
1094
|
-
return {
|
|
1095
|
-
content: extractTextContent5(msg?.content),
|
|
1096
|
-
reasoning: reasoningContent ? extractTextContent5(reasoningContent) : null,
|
|
1097
|
-
model: result.model ?? model,
|
|
1098
|
-
usage: {
|
|
1099
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
1100
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
1101
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
1102
|
-
},
|
|
1103
|
-
finishReason: choice.finish_reason ?? null
|
|
1104
|
-
};
|
|
2749
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2750
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2751
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2752
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2753
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
1105
2754
|
}
|
|
1106
2755
|
/**
|
|
1107
2756
|
* 发送流式聊天请求
|
|
1108
2757
|
*/
|
|
1109
2758
|
async *chatStream(options) {
|
|
1110
|
-
const
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
model,
|
|
1119
|
-
messages,
|
|
1120
|
-
temperature,
|
|
1121
|
-
stream: true
|
|
1122
|
-
};
|
|
1123
|
-
if (maxTokens) {
|
|
1124
|
-
body.max_tokens = maxTokens;
|
|
1125
|
-
}
|
|
1126
|
-
if (reasoning?.effort) {
|
|
1127
|
-
if (reasoning.effort === "off") {
|
|
1128
|
-
body.enable_thinking = false;
|
|
1129
|
-
} else {
|
|
1130
|
-
body.enable_thinking = true;
|
|
1131
|
-
}
|
|
1132
|
-
}
|
|
1133
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1134
|
-
method: "POST",
|
|
1135
|
-
headers: {
|
|
1136
|
-
"Content-Type": "application/json",
|
|
1137
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1138
|
-
},
|
|
1139
|
-
body: JSON.stringify(body)
|
|
1140
|
-
});
|
|
1141
|
-
if (!response.ok) {
|
|
1142
|
-
const error = await response.text();
|
|
1143
|
-
throw new Error(`ModelScope API error: ${response.status} ${error}`);
|
|
1144
|
-
}
|
|
1145
|
-
const reader = response.body?.getReader();
|
|
1146
|
-
if (!reader) {
|
|
1147
|
-
throw new Error("No response body");
|
|
1148
|
-
}
|
|
1149
|
-
const decoder = new TextDecoder();
|
|
1150
|
-
let buffer = "";
|
|
1151
|
-
try {
|
|
1152
|
-
while (true) {
|
|
1153
|
-
const { done, value } = await reader.read();
|
|
1154
|
-
if (done) break;
|
|
1155
|
-
buffer += decoder.decode(value, { stream: true });
|
|
1156
|
-
const lines = buffer.split("\n");
|
|
1157
|
-
buffer = lines.pop() ?? "";
|
|
1158
|
-
for (const line of lines) {
|
|
1159
|
-
const trimmed = line.trim();
|
|
1160
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
1161
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
1162
|
-
try {
|
|
1163
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
1164
|
-
const delta = data.choices?.[0]?.delta;
|
|
1165
|
-
if (!delta) continue;
|
|
1166
|
-
if (delta.reasoning_content) {
|
|
1167
|
-
yield {
|
|
1168
|
-
type: "reasoning",
|
|
1169
|
-
text: extractTextContent5(delta.reasoning_content)
|
|
1170
|
-
};
|
|
1171
|
-
}
|
|
1172
|
-
if (delta.content) {
|
|
1173
|
-
yield {
|
|
1174
|
-
type: "content",
|
|
1175
|
-
text: extractTextContent5(delta.content)
|
|
1176
|
-
};
|
|
1177
|
-
}
|
|
1178
|
-
} catch {
|
|
1179
|
-
}
|
|
1180
|
-
}
|
|
1181
|
-
}
|
|
1182
|
-
} finally {
|
|
1183
|
-
reader.releaseLock();
|
|
1184
|
-
}
|
|
2759
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2760
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2761
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2762
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2763
|
+
yield* StreamProcessor.processStream(
|
|
2764
|
+
response,
|
|
2765
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2766
|
+
);
|
|
1185
2767
|
}
|
|
1186
2768
|
};
|
|
1187
2769
|
|
|
1188
|
-
// src/providers/
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
return content.filter(
|
|
1196
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
1197
|
-
).map((item) => item.text).join("");
|
|
1198
|
-
}
|
|
1199
|
-
return "";
|
|
1200
|
-
}
|
|
1201
|
-
var DeepSeekProvider = class extends BaseProvider {
|
|
1202
|
-
name = "deepseek";
|
|
1203
|
-
apiKey;
|
|
2770
|
+
// src/providers/huggingface.ts
|
|
2771
|
+
init_http_provider_client();
|
|
2772
|
+
init_stream_processor();
|
|
2773
|
+
var HuggingFaceProvider = class extends BaseProvider {
|
|
2774
|
+
name = "huggingface";
|
|
2775
|
+
adapter;
|
|
2776
|
+
client;
|
|
1204
2777
|
baseUrl;
|
|
1205
2778
|
constructor(config) {
|
|
1206
2779
|
super();
|
|
2780
|
+
this.adapter = new HuggingFaceAdapter();
|
|
1207
2781
|
if (typeof config === "string") {
|
|
1208
|
-
this.
|
|
1209
|
-
this.
|
|
2782
|
+
this.baseUrl = this.adapter.defaultBaseUrl;
|
|
2783
|
+
this.client = new HttpProviderClient({
|
|
2784
|
+
apiKey: config,
|
|
2785
|
+
baseUrl: this.baseUrl
|
|
2786
|
+
});
|
|
1210
2787
|
} else {
|
|
1211
|
-
this.
|
|
1212
|
-
this.
|
|
2788
|
+
this.baseUrl = config.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2789
|
+
this.client = new HttpProviderClient({
|
|
2790
|
+
apiKey: config.apiKey,
|
|
2791
|
+
baseUrl: this.baseUrl
|
|
2792
|
+
});
|
|
1213
2793
|
}
|
|
1214
2794
|
}
|
|
1215
2795
|
/**
|
|
1216
2796
|
* 发送聊天请求(非流式)
|
|
1217
2797
|
*
|
|
1218
2798
|
* reasoning 参数说明:
|
|
1219
|
-
* -
|
|
1220
|
-
* -
|
|
2799
|
+
* - HuggingFace 是模型聚合平台,thinking 支持取决于具体模型
|
|
2800
|
+
* - 如果模型支持,会返回 reasoning_content
|
|
1221
2801
|
*/
|
|
1222
2802
|
async chat(options) {
|
|
1223
|
-
const
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
reasoning
|
|
1229
|
-
} = options;
|
|
1230
|
-
const body = {
|
|
1231
|
-
model,
|
|
1232
|
-
messages,
|
|
1233
|
-
temperature,
|
|
1234
|
-
stream: false
|
|
1235
|
-
};
|
|
1236
|
-
if (maxTokens) {
|
|
1237
|
-
body.max_tokens = maxTokens;
|
|
1238
|
-
}
|
|
1239
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
1240
|
-
body.thinking = { type: "enabled" };
|
|
1241
|
-
}
|
|
1242
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1243
|
-
method: "POST",
|
|
1244
|
-
headers: {
|
|
1245
|
-
"Content-Type": "application/json",
|
|
1246
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1247
|
-
},
|
|
1248
|
-
body: JSON.stringify(body)
|
|
1249
|
-
});
|
|
1250
|
-
if (!response.ok) {
|
|
1251
|
-
const error = await response.text();
|
|
1252
|
-
throw new Error(`DeepSeek API error: ${response.status} ${error}`);
|
|
1253
|
-
}
|
|
1254
|
-
const result = await response.json();
|
|
1255
|
-
const choice = result.choices?.[0];
|
|
1256
|
-
if (!choice) {
|
|
1257
|
-
throw new Error("No response from model");
|
|
1258
|
-
}
|
|
1259
|
-
const msg = choice.message;
|
|
1260
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
1261
|
-
return {
|
|
1262
|
-
content: extractTextContent6(msg?.content),
|
|
1263
|
-
reasoning: reasoningContent ? extractTextContent6(reasoningContent) : null,
|
|
1264
|
-
model: result.model ?? model,
|
|
1265
|
-
usage: {
|
|
1266
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
1267
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
1268
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
1269
|
-
},
|
|
1270
|
-
finishReason: choice.finish_reason ?? null
|
|
1271
|
-
};
|
|
2803
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2804
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2805
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2806
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2807
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
1272
2808
|
}
|
|
1273
2809
|
/**
|
|
1274
2810
|
* 发送流式聊天请求
|
|
1275
2811
|
*/
|
|
1276
2812
|
async *chatStream(options) {
|
|
1277
|
-
const
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
model,
|
|
1286
|
-
messages,
|
|
1287
|
-
temperature,
|
|
1288
|
-
stream: true
|
|
1289
|
-
};
|
|
1290
|
-
if (maxTokens) {
|
|
1291
|
-
body.max_tokens = maxTokens;
|
|
1292
|
-
}
|
|
1293
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
1294
|
-
body.thinking = { type: "enabled" };
|
|
1295
|
-
}
|
|
1296
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1297
|
-
method: "POST",
|
|
1298
|
-
headers: {
|
|
1299
|
-
"Content-Type": "application/json",
|
|
1300
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1301
|
-
},
|
|
1302
|
-
body: JSON.stringify(body)
|
|
1303
|
-
});
|
|
1304
|
-
if (!response.ok) {
|
|
1305
|
-
const error = await response.text();
|
|
1306
|
-
throw new Error(`DeepSeek API error: ${response.status} ${error}`);
|
|
1307
|
-
}
|
|
1308
|
-
const reader = response.body?.getReader();
|
|
1309
|
-
if (!reader) {
|
|
1310
|
-
throw new Error("No response body");
|
|
1311
|
-
}
|
|
1312
|
-
const decoder = new TextDecoder();
|
|
1313
|
-
let buffer = "";
|
|
1314
|
-
try {
|
|
1315
|
-
while (true) {
|
|
1316
|
-
const { done, value } = await reader.read();
|
|
1317
|
-
if (done) break;
|
|
1318
|
-
buffer += decoder.decode(value, { stream: true });
|
|
1319
|
-
const lines = buffer.split("\n");
|
|
1320
|
-
buffer = lines.pop() ?? "";
|
|
1321
|
-
for (const line of lines) {
|
|
1322
|
-
const trimmed = line.trim();
|
|
1323
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
1324
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
1325
|
-
try {
|
|
1326
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
1327
|
-
const delta = data.choices?.[0]?.delta;
|
|
1328
|
-
if (!delta) continue;
|
|
1329
|
-
if (delta.reasoning_content) {
|
|
1330
|
-
yield {
|
|
1331
|
-
type: "reasoning",
|
|
1332
|
-
text: extractTextContent6(delta.reasoning_content)
|
|
1333
|
-
};
|
|
1334
|
-
}
|
|
1335
|
-
if (delta.content) {
|
|
1336
|
-
yield {
|
|
1337
|
-
type: "content",
|
|
1338
|
-
text: extractTextContent6(delta.content)
|
|
1339
|
-
};
|
|
1340
|
-
}
|
|
1341
|
-
} catch {
|
|
1342
|
-
}
|
|
1343
|
-
}
|
|
1344
|
-
}
|
|
1345
|
-
} finally {
|
|
1346
|
-
reader.releaseLock();
|
|
1347
|
-
}
|
|
2813
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2814
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2815
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2816
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2817
|
+
yield* StreamProcessor.processStream(
|
|
2818
|
+
response,
|
|
2819
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2820
|
+
);
|
|
1348
2821
|
}
|
|
1349
2822
|
};
|
|
1350
2823
|
|
|
1351
|
-
// src/providers/
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
const thinking = thinkingMatch[1].split("\n").map((line) => line.replace(/^>\s?/, "")).join("\n").trim();
|
|
1376
|
-
const cleanContent = content.replace(thinkingMatch[0], "").trim();
|
|
1377
|
-
return { thinking, content: cleanContent };
|
|
1378
|
-
}
|
|
1379
|
-
return { thinking: "", content };
|
|
1380
|
-
}
|
|
1381
|
-
function buildExtraBody(reasoning) {
|
|
1382
|
-
if (!reasoning || reasoning.effort === "off") {
|
|
1383
|
-
return void 0;
|
|
2824
|
+
// src/providers/groq.ts
|
|
2825
|
+
init_http_provider_client();
|
|
2826
|
+
init_stream_processor();
|
|
2827
|
+
var GroqProvider = class extends BaseProvider {
|
|
2828
|
+
name = "groq";
|
|
2829
|
+
adapter;
|
|
2830
|
+
client;
|
|
2831
|
+
baseUrl;
|
|
2832
|
+
constructor(config) {
|
|
2833
|
+
super();
|
|
2834
|
+
this.adapter = new GroqAdapter();
|
|
2835
|
+
if (typeof config === "string") {
|
|
2836
|
+
this.baseUrl = this.adapter.defaultBaseUrl;
|
|
2837
|
+
this.client = new HttpProviderClient({
|
|
2838
|
+
apiKey: config,
|
|
2839
|
+
baseUrl: this.baseUrl
|
|
2840
|
+
});
|
|
2841
|
+
} else {
|
|
2842
|
+
this.baseUrl = config.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2843
|
+
this.client = new HttpProviderClient({
|
|
2844
|
+
apiKey: config.apiKey,
|
|
2845
|
+
baseUrl: this.baseUrl
|
|
2846
|
+
});
|
|
2847
|
+
}
|
|
1384
2848
|
}
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
2849
|
+
/**
|
|
2850
|
+
* 发送聊天请求(非流式)
|
|
2851
|
+
*/
|
|
2852
|
+
async chat(options) {
|
|
2853
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2854
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2855
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2856
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2857
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
1388
2858
|
}
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
2859
|
+
/**
|
|
2860
|
+
* 发送流式聊天请求
|
|
2861
|
+
*/
|
|
2862
|
+
async *chatStream(options) {
|
|
2863
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2864
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2865
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2866
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2867
|
+
yield* StreamProcessor.processStream(
|
|
2868
|
+
response,
|
|
2869
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2870
|
+
);
|
|
1393
2871
|
}
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
2872
|
+
};
|
|
2873
|
+
|
|
2874
|
+
// src/providers/gemini.ts
|
|
2875
|
+
init_http_provider_client();
|
|
2876
|
+
init_stream_processor();
|
|
2877
|
+
var GeminiProvider = class extends BaseProvider {
|
|
2878
|
+
name = "gemini";
|
|
2879
|
+
adapter;
|
|
2880
|
+
client;
|
|
1399
2881
|
baseUrl;
|
|
1400
2882
|
constructor(config) {
|
|
1401
2883
|
super();
|
|
2884
|
+
this.adapter = new GeminiAdapter();
|
|
1402
2885
|
if (typeof config === "string") {
|
|
1403
|
-
this.
|
|
1404
|
-
this.
|
|
2886
|
+
this.baseUrl = this.adapter.defaultBaseUrl;
|
|
2887
|
+
this.client = new HttpProviderClient({
|
|
2888
|
+
apiKey: config,
|
|
2889
|
+
baseUrl: this.baseUrl
|
|
2890
|
+
});
|
|
1405
2891
|
} else {
|
|
1406
|
-
this.
|
|
1407
|
-
this.
|
|
2892
|
+
this.baseUrl = config.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2893
|
+
this.client = new HttpProviderClient({
|
|
2894
|
+
apiKey: config.apiKey,
|
|
2895
|
+
baseUrl: this.baseUrl
|
|
2896
|
+
});
|
|
1408
2897
|
}
|
|
1409
2898
|
}
|
|
1410
2899
|
/**
|
|
1411
2900
|
* 发送聊天请求(非流式)
|
|
1412
2901
|
*/
|
|
1413
2902
|
async chat(options) {
|
|
1414
|
-
const
|
|
1415
|
-
const
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
stream: false
|
|
1420
|
-
};
|
|
1421
|
-
if (maxTokens) {
|
|
1422
|
-
body.max_tokens = maxTokens;
|
|
1423
|
-
}
|
|
1424
|
-
const extraBody = buildExtraBody(reasoning);
|
|
1425
|
-
if (extraBody) {
|
|
1426
|
-
Object.assign(body, extraBody);
|
|
1427
|
-
}
|
|
1428
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1429
|
-
method: "POST",
|
|
1430
|
-
headers: {
|
|
1431
|
-
"Content-Type": "application/json",
|
|
1432
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1433
|
-
},
|
|
1434
|
-
body: JSON.stringify(body)
|
|
1435
|
-
});
|
|
1436
|
-
if (!response.ok) {
|
|
1437
|
-
const error = await response.text();
|
|
1438
|
-
throw new Error(`Poe API error: ${response.status} ${error}`);
|
|
1439
|
-
}
|
|
1440
|
-
const result = await response.json();
|
|
1441
|
-
const choice = result.choices?.[0];
|
|
1442
|
-
if (!choice) {
|
|
1443
|
-
throw new Error("No response from model");
|
|
1444
|
-
}
|
|
1445
|
-
const msg = choice.message;
|
|
1446
|
-
let reasoningContent = msg?.reasoning_content ?? null;
|
|
1447
|
-
let contentText = extractTextContent7(msg?.content);
|
|
1448
|
-
if (!reasoningContent && contentText) {
|
|
1449
|
-
const extracted = extractThinkingFromContent(contentText);
|
|
1450
|
-
if (extracted.thinking) {
|
|
1451
|
-
reasoningContent = extracted.thinking;
|
|
1452
|
-
contentText = extracted.content;
|
|
1453
|
-
}
|
|
1454
|
-
}
|
|
1455
|
-
return {
|
|
1456
|
-
content: contentText,
|
|
1457
|
-
reasoning: reasoningContent ? extractTextContent7(reasoningContent) : null,
|
|
1458
|
-
model: result.model ?? model,
|
|
1459
|
-
usage: {
|
|
1460
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
1461
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
1462
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
1463
|
-
},
|
|
1464
|
-
finishReason: choice.finish_reason ?? null
|
|
1465
|
-
};
|
|
2903
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2904
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2905
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2906
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2907
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
1466
2908
|
}
|
|
1467
2909
|
/**
|
|
1468
2910
|
* 发送流式聊天请求
|
|
1469
2911
|
*/
|
|
1470
2912
|
async *chatStream(options) {
|
|
1471
|
-
const
|
|
1472
|
-
const
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
|
|
2913
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2914
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2915
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2916
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2917
|
+
yield* StreamProcessor.processStream(
|
|
2918
|
+
response,
|
|
2919
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2920
|
+
);
|
|
2921
|
+
}
|
|
2922
|
+
};
|
|
2923
|
+
|
|
2924
|
+
// src/providers/deepseek.ts
|
|
2925
|
+
init_http_provider_client();
|
|
2926
|
+
init_stream_processor();
|
|
2927
|
+
|
|
2928
|
+
// src/providers/poe.ts
|
|
2929
|
+
init_http_provider_client();
|
|
2930
|
+
init_stream_processor();
|
|
2931
|
+
|
|
2932
|
+
// src/providers/nova.ts
|
|
2933
|
+
init_http_provider_client();
|
|
2934
|
+
init_stream_processor();
|
|
2935
|
+
|
|
2936
|
+
// src/utils/index.ts
|
|
2937
|
+
init_stream_processor();
|
|
2938
|
+
init_request_builder();
|
|
2939
|
+
|
|
2940
|
+
// src/client/index.ts
|
|
2941
|
+
init_types2();
|
|
2942
|
+
init_http_provider_client();
|
|
2943
|
+
|
|
2944
|
+
// src/fluent/errors.ts
|
|
2945
|
+
var ConfigurationError = class _ConfigurationError extends Error {
|
|
2946
|
+
constructor(message) {
|
|
2947
|
+
super(message);
|
|
2948
|
+
this.name = "ConfigurationError";
|
|
2949
|
+
Object.setPrototypeOf(this, _ConfigurationError.prototype);
|
|
2950
|
+
}
|
|
2951
|
+
};
|
|
2952
|
+
var ValidationError = class _ValidationError extends Error {
|
|
2953
|
+
constructor(message) {
|
|
2954
|
+
super(message);
|
|
2955
|
+
this.name = "ValidationError";
|
|
2956
|
+
Object.setPrototypeOf(this, _ValidationError.prototype);
|
|
2957
|
+
}
|
|
2958
|
+
};
|
|
2959
|
+
|
|
2960
|
+
// src/fluent/builder.ts
|
|
2961
|
+
var OiiaiBuilderImpl = class _OiiaiBuilderImpl {
|
|
2962
|
+
/** 内部配置状态 */
|
|
2963
|
+
config = {};
|
|
2964
|
+
/**
|
|
2965
|
+
* 创建构建器实例
|
|
2966
|
+
* @param initialConfig - 可选的初始配置
|
|
2967
|
+
*/
|
|
2968
|
+
constructor(initialConfig) {
|
|
2969
|
+
if (initialConfig) {
|
|
2970
|
+
this.config = { ...initialConfig };
|
|
1480
2971
|
}
|
|
1481
|
-
|
|
1482
|
-
|
|
1483
|
-
|
|
2972
|
+
}
|
|
2973
|
+
/**
|
|
2974
|
+
* 选择服务提供商
|
|
2975
|
+
* @param provider - Provider 类型
|
|
2976
|
+
* @returns this 支持链式调用
|
|
2977
|
+
*/
|
|
2978
|
+
use(provider) {
|
|
2979
|
+
if (!ProviderRegistry.hasAdapter(provider)) {
|
|
2980
|
+
const supported = ProviderRegistry.listSupported();
|
|
2981
|
+
throw new ValidationError(
|
|
2982
|
+
`\u4E0D\u652F\u6301\u7684 Provider: ${provider}\uFF0C\u652F\u6301\u7684 Provider: ${supported.join(", ")}`
|
|
2983
|
+
);
|
|
2984
|
+
}
|
|
2985
|
+
this.config.provider = provider;
|
|
2986
|
+
return this;
|
|
2987
|
+
}
|
|
2988
|
+
/**
|
|
2989
|
+
* 指定模型
|
|
2990
|
+
* @param modelId - 模型 ID
|
|
2991
|
+
* @returns this 支持链式调用
|
|
2992
|
+
*/
|
|
2993
|
+
model(modelId) {
|
|
2994
|
+
this.config.model = modelId;
|
|
2995
|
+
return this;
|
|
2996
|
+
}
|
|
2997
|
+
/**
|
|
2998
|
+
* 设置系统提示词
|
|
2999
|
+
* @param prompt - 系统提示词
|
|
3000
|
+
* @returns this 支持链式调用
|
|
3001
|
+
*/
|
|
3002
|
+
system(prompt) {
|
|
3003
|
+
this.config.system = prompt;
|
|
3004
|
+
return this;
|
|
3005
|
+
}
|
|
3006
|
+
/**
|
|
3007
|
+
* 设置温度参数
|
|
3008
|
+
* @param value - 温度值 (0-2)
|
|
3009
|
+
* @returns this 支持链式调用
|
|
3010
|
+
*/
|
|
3011
|
+
temperature(value) {
|
|
3012
|
+
if (value < 0 || value > 2) {
|
|
3013
|
+
throw new ValidationError("temperature \u5FC5\u987B\u5728 0-2 \u4E4B\u95F4");
|
|
1484
3014
|
}
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
3015
|
+
this.config.temperature = value;
|
|
3016
|
+
return this;
|
|
3017
|
+
}
|
|
3018
|
+
/**
|
|
3019
|
+
* 设置最大输出 token 数
|
|
3020
|
+
* @param value - token 数量
|
|
3021
|
+
* @returns this 支持链式调用
|
|
3022
|
+
*/
|
|
3023
|
+
maxTokens(value) {
|
|
3024
|
+
this.config.maxTokens = value;
|
|
3025
|
+
return this;
|
|
3026
|
+
}
|
|
3027
|
+
/**
|
|
3028
|
+
* 配置思考/推理模式
|
|
3029
|
+
* @param config - 推理配置
|
|
3030
|
+
* @returns this 支持链式调用
|
|
3031
|
+
*/
|
|
3032
|
+
reasoning(config) {
|
|
3033
|
+
this.config.reasoning = config;
|
|
3034
|
+
return this;
|
|
3035
|
+
}
|
|
3036
|
+
/**
|
|
3037
|
+
* 设置 API Key
|
|
3038
|
+
* @param apiKey - API Key
|
|
3039
|
+
* @returns this 支持链式调用
|
|
3040
|
+
*/
|
|
3041
|
+
key(apiKey) {
|
|
3042
|
+
this.config.apiKey = apiKey;
|
|
3043
|
+
return this;
|
|
3044
|
+
}
|
|
3045
|
+
/**
|
|
3046
|
+
* 设置基础 URL
|
|
3047
|
+
* @param url - 基础 URL
|
|
3048
|
+
* @returns this 支持链式调用
|
|
3049
|
+
*/
|
|
3050
|
+
baseUrl(url) {
|
|
3051
|
+
this.config.baseUrl = url;
|
|
3052
|
+
return this;
|
|
3053
|
+
}
|
|
3054
|
+
/**
|
|
3055
|
+
* 标记为流式请求
|
|
3056
|
+
* 调用后 ask() 将返回 AsyncGenerator
|
|
3057
|
+
* @returns StreamBuilder 类型
|
|
3058
|
+
*/
|
|
3059
|
+
stream() {
|
|
3060
|
+
this.config.isStream = true;
|
|
3061
|
+
return this;
|
|
3062
|
+
}
|
|
3063
|
+
ask(question) {
|
|
3064
|
+
this.validateConfig();
|
|
3065
|
+
const adapter = ProviderRegistry.getAdapter(this.config.provider);
|
|
3066
|
+
const client = adapter.createClient({
|
|
3067
|
+
apiKey: this.config.apiKey,
|
|
3068
|
+
baseUrl: this.config.baseUrl ?? adapter.defaultBaseUrl
|
|
1492
3069
|
});
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
3070
|
+
const messages = [];
|
|
3071
|
+
if (this.config.system) {
|
|
3072
|
+
messages.push({ role: "system", content: this.config.system });
|
|
1496
3073
|
}
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
3074
|
+
messages.push({ role: "user", content: question });
|
|
3075
|
+
const chatOptions = {
|
|
3076
|
+
model: this.config.model,
|
|
3077
|
+
messages,
|
|
3078
|
+
temperature: this.config.temperature,
|
|
3079
|
+
maxTokens: this.config.maxTokens,
|
|
3080
|
+
reasoning: this.config.reasoning
|
|
3081
|
+
};
|
|
3082
|
+
if (this.config.isStream) {
|
|
3083
|
+
return this.executeStreamRequest(adapter, client, chatOptions);
|
|
1500
3084
|
}
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
|
|
1511
|
-
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1516
|
-
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
|
|
1523
|
-
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
|
|
1527
|
-
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
|
|
1535
|
-
|
|
1536
|
-
|
|
1537
|
-
|
|
1538
|
-
|
|
1539
|
-
|
|
1540
|
-
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
|
|
1544
|
-
|
|
1545
|
-
|
|
1546
|
-
|
|
1547
|
-
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
|
|
1551
|
-
|
|
1552
|
-
|
|
1553
|
-
}
|
|
1554
|
-
}
|
|
1555
|
-
break;
|
|
1556
|
-
} else if (thinkingMode === "think_tag") {
|
|
1557
|
-
const endIdx = contentBuffer.indexOf("</think>");
|
|
1558
|
-
if (endIdx !== -1) {
|
|
1559
|
-
yield { type: "reasoning", text: contentBuffer.slice(0, endIdx) };
|
|
1560
|
-
contentBuffer = contentBuffer.slice(endIdx + 8);
|
|
1561
|
-
thinkingMode = "none";
|
|
1562
|
-
continue;
|
|
1563
|
-
}
|
|
1564
|
-
if (contentBuffer.length > 8) {
|
|
1565
|
-
yield { type: "reasoning", text: contentBuffer.slice(0, -8) };
|
|
1566
|
-
contentBuffer = contentBuffer.slice(-8);
|
|
1567
|
-
}
|
|
1568
|
-
break;
|
|
1569
|
-
} else if (thinkingMode === "markdown_thinking") {
|
|
1570
|
-
if (contentBuffer.startsWith(">")) {
|
|
1571
|
-
thinkingMode = "markdown_quote";
|
|
1572
|
-
continue;
|
|
1573
|
-
}
|
|
1574
|
-
if (contentBuffer.length > 0 && !contentBuffer.startsWith(">")) {
|
|
1575
|
-
thinkingMode = "none";
|
|
1576
|
-
continue;
|
|
1577
|
-
}
|
|
1578
|
-
break;
|
|
1579
|
-
} else if (thinkingMode === "markdown_quote") {
|
|
1580
|
-
const newlineIdx = contentBuffer.indexOf("\n");
|
|
1581
|
-
if (newlineIdx !== -1) {
|
|
1582
|
-
const quoteLine = contentBuffer.slice(0, newlineIdx);
|
|
1583
|
-
contentBuffer = contentBuffer.slice(newlineIdx + 1);
|
|
1584
|
-
if (quoteLine.startsWith(">")) {
|
|
1585
|
-
const thinkText = quoteLine.replace(/^>\s?/, "");
|
|
1586
|
-
yield { type: "reasoning", text: thinkText + "\n" };
|
|
1587
|
-
continue;
|
|
1588
|
-
}
|
|
1589
|
-
if (quoteLine.trim() === "") {
|
|
1590
|
-
yield { type: "reasoning", text: "\n" };
|
|
1591
|
-
continue;
|
|
1592
|
-
}
|
|
1593
|
-
thinkingMode = "none";
|
|
1594
|
-
yield { type: "content", text: quoteLine + "\n" };
|
|
1595
|
-
continue;
|
|
1596
|
-
}
|
|
1597
|
-
break;
|
|
1598
|
-
}
|
|
1599
|
-
}
|
|
1600
|
-
}
|
|
1601
|
-
} catch {
|
|
1602
|
-
}
|
|
1603
|
-
}
|
|
1604
|
-
}
|
|
1605
|
-
if (contentBuffer.length > 0) {
|
|
1606
|
-
if (thinkingMode === "think_tag" || thinkingMode === "markdown_quote") {
|
|
1607
|
-
yield { type: "reasoning", text: contentBuffer };
|
|
1608
|
-
} else {
|
|
1609
|
-
yield { type: "content", text: contentBuffer };
|
|
1610
|
-
}
|
|
1611
|
-
}
|
|
1612
|
-
} finally {
|
|
1613
|
-
reader.releaseLock();
|
|
3085
|
+
return this.executeNonStreamRequest(adapter, client, chatOptions);
|
|
3086
|
+
}
|
|
3087
|
+
/**
|
|
3088
|
+
* 执行非流式请求
|
|
3089
|
+
* @param adapter - Provider 适配器
|
|
3090
|
+
* @param client - Provider 客户端
|
|
3091
|
+
* @param chatOptions - 聊天选项
|
|
3092
|
+
* @returns 响应内容
|
|
3093
|
+
*/
|
|
3094
|
+
async executeNonStreamRequest(adapter, client, chatOptions) {
|
|
3095
|
+
const baseUrl = this.config.baseUrl ?? adapter.defaultBaseUrl;
|
|
3096
|
+
const endpoint = adapter.getEndpointUrl(baseUrl);
|
|
3097
|
+
const endpointPath = endpoint.replace(baseUrl, "");
|
|
3098
|
+
const body = adapter.buildChatRequest(chatOptions, false);
|
|
3099
|
+
const response = await client.chat(endpointPath, body);
|
|
3100
|
+
const result = adapter.parseChatResponse(response, this.config.model);
|
|
3101
|
+
return result.content;
|
|
3102
|
+
}
|
|
3103
|
+
/**
|
|
3104
|
+
* 执行流式请求
|
|
3105
|
+
* @param adapter - Provider 适配器
|
|
3106
|
+
* @param client - Provider 客户端
|
|
3107
|
+
* @param chatOptions - 聊天选项
|
|
3108
|
+
* @returns 流式数据块生成器
|
|
3109
|
+
*/
|
|
3110
|
+
async *executeStreamRequest(adapter, client, chatOptions) {
|
|
3111
|
+
const baseUrl = this.config.baseUrl ?? adapter.defaultBaseUrl;
|
|
3112
|
+
const endpoint = adapter.getEndpointUrl(baseUrl);
|
|
3113
|
+
const endpointPath = endpoint.replace(baseUrl, "");
|
|
3114
|
+
const body = adapter.buildChatRequest(chatOptions, true);
|
|
3115
|
+
const response = await client.chatStream(endpointPath, body);
|
|
3116
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
3117
|
+
yield* StreamProcessor2.processStream(
|
|
3118
|
+
response,
|
|
3119
|
+
(delta) => adapter.extractStreamChunk(delta)
|
|
3120
|
+
);
|
|
3121
|
+
}
|
|
3122
|
+
/**
|
|
3123
|
+
* 验证配置是否完整
|
|
3124
|
+
* @throws ConfigurationError 如果缺少必需参数
|
|
3125
|
+
*/
|
|
3126
|
+
validateConfig() {
|
|
3127
|
+
if (!this.config.provider) {
|
|
3128
|
+
throw new ConfigurationError("\u8BF7\u5148\u8C03\u7528 use(provider) \u9009\u62E9\u670D\u52A1\u63D0\u4F9B\u5546");
|
|
3129
|
+
}
|
|
3130
|
+
if (!this.config.model) {
|
|
3131
|
+
throw new ConfigurationError("\u8BF7\u5148\u8C03\u7528 model(modelId) \u6307\u5B9A\u6A21\u578B");
|
|
3132
|
+
}
|
|
3133
|
+
if (!this.config.apiKey) {
|
|
3134
|
+
throw new ConfigurationError(
|
|
3135
|
+
"\u8BF7\u5148\u914D\u7F6E API Key\uFF1A\u8C03\u7528 key(apiKey) \u6216\u901A\u8FC7\u9884\u8BBE\u5B9E\u4F8B\u914D\u7F6E"
|
|
3136
|
+
);
|
|
1614
3137
|
}
|
|
1615
3138
|
}
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
return content;
|
|
3139
|
+
/**
|
|
3140
|
+
* 获取当前配置(用于测试)
|
|
3141
|
+
* @returns 当前配置的副本
|
|
3142
|
+
*/
|
|
3143
|
+
getConfig() {
|
|
3144
|
+
return { ...this.config };
|
|
1623
3145
|
}
|
|
1624
|
-
|
|
1625
|
-
|
|
1626
|
-
|
|
1627
|
-
|
|
3146
|
+
/**
|
|
3147
|
+
* 克隆构建器(用于创建新实例)
|
|
3148
|
+
* @returns 新的构建器实例
|
|
3149
|
+
*/
|
|
3150
|
+
clone() {
|
|
3151
|
+
return new _OiiaiBuilderImpl({ ...this.config });
|
|
1628
3152
|
}
|
|
1629
|
-
|
|
3153
|
+
};
|
|
3154
|
+
function createBuilder(initialConfig) {
|
|
3155
|
+
return new OiiaiBuilderImpl(initialConfig);
|
|
1630
3156
|
}
|
|
1631
|
-
var
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
if (typeof config === "string") {
|
|
1638
|
-
this.apiKey = config;
|
|
1639
|
-
this.baseUrl = BASE_URL7;
|
|
1640
|
-
} else {
|
|
1641
|
-
this.apiKey = config.apiKey;
|
|
1642
|
-
this.baseUrl = config.baseUrl ?? BASE_URL7;
|
|
3157
|
+
var oiiai = new Proxy({}, {
|
|
3158
|
+
get(_target, prop) {
|
|
3159
|
+
const builder = new OiiaiBuilderImpl();
|
|
3160
|
+
const value = builder[prop];
|
|
3161
|
+
if (typeof value === "function") {
|
|
3162
|
+
return value.bind(builder);
|
|
1643
3163
|
}
|
|
3164
|
+
return value;
|
|
1644
3165
|
}
|
|
3166
|
+
});
|
|
3167
|
+
|
|
3168
|
+
// src/fluent/preset-provider.ts
|
|
3169
|
+
var PresetProviderImpl = class _PresetProviderImpl {
|
|
3170
|
+
/** Provider 名称 */
|
|
3171
|
+
name;
|
|
3172
|
+
/** 内部配置状态 */
|
|
3173
|
+
config = {};
|
|
3174
|
+
/** 环境变量名称映射 */
|
|
3175
|
+
static ENV_KEY_MAP = {
|
|
3176
|
+
deepseek: "DEEPSEEK_API_KEY",
|
|
3177
|
+
openrouter: "OPENROUTER_API_KEY",
|
|
3178
|
+
gemini: "GEMINI_API_KEY",
|
|
3179
|
+
groq: "GROQ_API_KEY",
|
|
3180
|
+
huggingface: "HUGGINGFACE_API_KEY",
|
|
3181
|
+
modelscope: "MODELSCOPE_API_KEY",
|
|
3182
|
+
poe: "POE_API_KEY",
|
|
3183
|
+
nova: "NOVA_API_KEY"
|
|
3184
|
+
};
|
|
1645
3185
|
/**
|
|
1646
|
-
*
|
|
1647
|
-
*
|
|
1648
|
-
* 注意:
|
|
1649
|
-
* - Nova API 的 temperature 范围是 0-1(不是 0-2)
|
|
1650
|
-
* - Nova 2 Lite 支持 extended thinking (reasoningConfig)
|
|
1651
|
-
* - effort 映射为 maxReasoningEffort
|
|
3186
|
+
* 创建预设实例
|
|
3187
|
+
* @param providerType - Provider 类型
|
|
1652
3188
|
*/
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
stream: false
|
|
1660
|
-
};
|
|
1661
|
-
if (maxTokens) {
|
|
1662
|
-
body.max_tokens = maxTokens;
|
|
3189
|
+
constructor(providerType) {
|
|
3190
|
+
if (!ProviderRegistry.hasAdapter(providerType)) {
|
|
3191
|
+
const supported = ProviderRegistry.listSupported();
|
|
3192
|
+
throw new ValidationError(
|
|
3193
|
+
`\u4E0D\u652F\u6301\u7684 Provider: ${providerType}\uFF0C\u652F\u6301\u7684 Provider: ${supported.join(", ")}`
|
|
3194
|
+
);
|
|
1663
3195
|
}
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
|
|
1668
|
-
|
|
1669
|
-
|
|
3196
|
+
this.name = providerType;
|
|
3197
|
+
}
|
|
3198
|
+
/**
|
|
3199
|
+
* 配置 API Key 和其他选项
|
|
3200
|
+
* @param options - 配置选项
|
|
3201
|
+
* @returns this 支持链式调用
|
|
3202
|
+
*/
|
|
3203
|
+
configure(options) {
|
|
3204
|
+
this.config.apiKey = options.apiKey;
|
|
3205
|
+
if (options.baseUrl) {
|
|
3206
|
+
this.config.baseUrl = options.baseUrl;
|
|
1670
3207
|
}
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
|
|
3208
|
+
return this;
|
|
3209
|
+
}
|
|
3210
|
+
/**
|
|
3211
|
+
* 从环境变量读取配置
|
|
3212
|
+
* 环境变量名格式: {PROVIDER}_API_KEY (如 DEEPSEEK_API_KEY)
|
|
3213
|
+
* @returns this 支持链式调用
|
|
3214
|
+
*/
|
|
3215
|
+
fromEnv() {
|
|
3216
|
+
const envKey = _PresetProviderImpl.ENV_KEY_MAP[this.name];
|
|
3217
|
+
const apiKey = process.env[envKey];
|
|
3218
|
+
if (!apiKey) {
|
|
3219
|
+
throw new ConfigurationError(`\u73AF\u5883\u53D8\u91CF ${envKey} \u672A\u8BBE\u7F6E`);
|
|
1682
3220
|
}
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
3221
|
+
this.config.apiKey = apiKey;
|
|
3222
|
+
return this;
|
|
3223
|
+
}
|
|
3224
|
+
/**
|
|
3225
|
+
* 简单问答(非流式)
|
|
3226
|
+
* @param model - 模型 ID
|
|
3227
|
+
* @param question - 问题
|
|
3228
|
+
* @param options - 可选配置
|
|
3229
|
+
* @returns 响应内容
|
|
3230
|
+
*/
|
|
3231
|
+
async ask(model, question, options) {
|
|
3232
|
+
this.validateApiKey();
|
|
3233
|
+
const builder = this.createConfiguredBuilder(model, options);
|
|
3234
|
+
const result = builder.ask(question);
|
|
3235
|
+
return result;
|
|
3236
|
+
}
|
|
3237
|
+
/**
|
|
3238
|
+
* 流式问答
|
|
3239
|
+
* @param model - 模型 ID
|
|
3240
|
+
* @param question - 问题
|
|
3241
|
+
* @param options - 可选配置
|
|
3242
|
+
* @returns 流式数据块生成器
|
|
3243
|
+
*/
|
|
3244
|
+
async *stream(model, question, options) {
|
|
3245
|
+
this.validateApiKey();
|
|
3246
|
+
const builder = this.createConfiguredBuilder(model, options);
|
|
3247
|
+
const streamBuilder = builder.stream();
|
|
3248
|
+
yield* streamBuilder.ask(question);
|
|
3249
|
+
}
|
|
3250
|
+
/**
|
|
3251
|
+
* 带回调的流式问答
|
|
3252
|
+
* @param model - 模型 ID
|
|
3253
|
+
* @param question - 问题
|
|
3254
|
+
* @param callbacks - 回调函数
|
|
3255
|
+
* @returns Promise,完成时 resolve
|
|
3256
|
+
*/
|
|
3257
|
+
async streamWithCallbacks(model, question, callbacks) {
|
|
3258
|
+
this.validateApiKey();
|
|
3259
|
+
let reasoningContent = "";
|
|
3260
|
+
let contentText = "";
|
|
3261
|
+
const builder = this.createConfiguredBuilder(model);
|
|
3262
|
+
const streamBuilder = builder.stream();
|
|
3263
|
+
for await (const chunk of streamBuilder.ask(question)) {
|
|
3264
|
+
if (chunk.type === "reasoning") {
|
|
3265
|
+
reasoningContent += chunk.text;
|
|
3266
|
+
callbacks.onReasoning?.(chunk.text);
|
|
3267
|
+
} else if (chunk.type === "content") {
|
|
3268
|
+
contentText += chunk.text;
|
|
3269
|
+
callbacks.onContent?.(chunk.text);
|
|
3270
|
+
}
|
|
1687
3271
|
}
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
reasoning: reasoningContent ? extractTextContent8(reasoningContent) : null,
|
|
1693
|
-
model: result.model ?? model,
|
|
1694
|
-
usage: {
|
|
1695
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
1696
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
1697
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
1698
|
-
},
|
|
1699
|
-
finishReason: choice.finish_reason ?? null
|
|
1700
|
-
};
|
|
3272
|
+
callbacks.onDone?.({
|
|
3273
|
+
reasoning: reasoningContent,
|
|
3274
|
+
content: contentText
|
|
3275
|
+
});
|
|
1701
3276
|
}
|
|
1702
3277
|
/**
|
|
1703
|
-
*
|
|
3278
|
+
* 获取构建器(预配置 provider 和 model)
|
|
3279
|
+
* @param model - 模型 ID
|
|
3280
|
+
* @returns 预配置的构建器
|
|
1704
3281
|
*/
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
const
|
|
3282
|
+
builder(model) {
|
|
3283
|
+
this.validateApiKey();
|
|
3284
|
+
const adapter = ProviderRegistry.getAdapter(this.name);
|
|
3285
|
+
const builder = new OiiaiBuilderImpl({
|
|
3286
|
+
provider: this.name,
|
|
1708
3287
|
model,
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
3288
|
+
apiKey: this.config.apiKey,
|
|
3289
|
+
baseUrl: this.config.baseUrl ?? adapter.defaultBaseUrl
|
|
3290
|
+
});
|
|
3291
|
+
return builder;
|
|
3292
|
+
}
|
|
3293
|
+
/**
|
|
3294
|
+
* 创建多轮对话会话
|
|
3295
|
+
* @param model - 模型 ID
|
|
3296
|
+
* @param options - 会话配置
|
|
3297
|
+
* @returns 对话会话实例
|
|
3298
|
+
*/
|
|
3299
|
+
chat(model, options) {
|
|
3300
|
+
this.validateApiKey();
|
|
3301
|
+
const { ChatSessionImpl: ChatSessionImpl2 } = (init_chat_session(), __toCommonJS(chat_session_exports));
|
|
3302
|
+
return new ChatSessionImpl2(this, model, options);
|
|
3303
|
+
}
|
|
3304
|
+
/**
|
|
3305
|
+
* 验证 API Key 是否已配置
|
|
3306
|
+
* @throws ConfigurationError 如果未配置 API Key
|
|
3307
|
+
*/
|
|
3308
|
+
validateApiKey() {
|
|
3309
|
+
if (!this.config.apiKey) {
|
|
3310
|
+
throw new ConfigurationError(
|
|
3311
|
+
`\u8BF7\u5148\u914D\u7F6E API Key\uFF1A\u8C03\u7528 configure({ apiKey: 'xxx' }) \u6216 fromEnv()`
|
|
3312
|
+
);
|
|
1721
3313
|
}
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
3314
|
+
}
|
|
3315
|
+
/**
|
|
3316
|
+
* 创建已配置的构建器
|
|
3317
|
+
* @param model - 模型 ID
|
|
3318
|
+
* @param options - 可选配置
|
|
3319
|
+
* @returns 配置好的构建器
|
|
3320
|
+
*/
|
|
3321
|
+
createConfiguredBuilder(model, options) {
|
|
3322
|
+
const adapter = ProviderRegistry.getAdapter(this.name);
|
|
3323
|
+
const builder = new OiiaiBuilderImpl({
|
|
3324
|
+
provider: this.name,
|
|
3325
|
+
model,
|
|
3326
|
+
apiKey: this.config.apiKey,
|
|
3327
|
+
baseUrl: this.config.baseUrl ?? adapter.defaultBaseUrl
|
|
1729
3328
|
});
|
|
1730
|
-
if (
|
|
1731
|
-
|
|
1732
|
-
throw new Error(`Nova API error: ${response.status} ${error}`);
|
|
3329
|
+
if (options?.system) {
|
|
3330
|
+
builder.system(options.system);
|
|
1733
3331
|
}
|
|
1734
|
-
|
|
1735
|
-
|
|
1736
|
-
throw new Error("No response body");
|
|
3332
|
+
if (options?.temperature !== void 0) {
|
|
3333
|
+
builder.temperature(options.temperature);
|
|
1737
3334
|
}
|
|
1738
|
-
|
|
1739
|
-
|
|
1740
|
-
|
|
1741
|
-
|
|
1742
|
-
|
|
1743
|
-
if (done) break;
|
|
1744
|
-
buffer += decoder.decode(value, { stream: true });
|
|
1745
|
-
const lines = buffer.split("\n");
|
|
1746
|
-
buffer = lines.pop() ?? "";
|
|
1747
|
-
for (const line of lines) {
|
|
1748
|
-
const trimmed = line.trim();
|
|
1749
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
1750
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
1751
|
-
try {
|
|
1752
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
1753
|
-
const delta = data.choices?.[0]?.delta;
|
|
1754
|
-
if (!delta) continue;
|
|
1755
|
-
if (delta.reasoning_content) {
|
|
1756
|
-
yield {
|
|
1757
|
-
type: "reasoning",
|
|
1758
|
-
text: extractTextContent8(delta.reasoning_content)
|
|
1759
|
-
};
|
|
1760
|
-
}
|
|
1761
|
-
if (delta.content) {
|
|
1762
|
-
yield {
|
|
1763
|
-
type: "content",
|
|
1764
|
-
text: extractTextContent8(delta.content)
|
|
1765
|
-
};
|
|
1766
|
-
}
|
|
1767
|
-
} catch {
|
|
1768
|
-
}
|
|
1769
|
-
}
|
|
1770
|
-
}
|
|
1771
|
-
} finally {
|
|
1772
|
-
reader.releaseLock();
|
|
3335
|
+
if (options?.maxTokens !== void 0) {
|
|
3336
|
+
builder.maxTokens(options.maxTokens);
|
|
3337
|
+
}
|
|
3338
|
+
if (options?.reasoning) {
|
|
3339
|
+
builder.reasoning(options.reasoning);
|
|
1773
3340
|
}
|
|
3341
|
+
return builder;
|
|
1774
3342
|
}
|
|
1775
|
-
|
|
1776
|
-
|
|
1777
|
-
|
|
1778
|
-
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
case "openrouter":
|
|
1782
|
-
return new OpenRouterProvider(apiKey);
|
|
1783
|
-
case "gemini":
|
|
1784
|
-
return new GeminiProvider(baseUrl ? { apiKey, baseUrl } : apiKey);
|
|
1785
|
-
case "groq":
|
|
1786
|
-
return new GroqProvider(baseUrl ? { apiKey, baseUrl } : apiKey);
|
|
1787
|
-
case "huggingface":
|
|
1788
|
-
return new HuggingFaceProvider(baseUrl ? { apiKey, baseUrl } : apiKey);
|
|
1789
|
-
case "modelscope":
|
|
1790
|
-
return new ModelScopeProvider(baseUrl ? { apiKey, baseUrl } : apiKey);
|
|
1791
|
-
case "deepseek":
|
|
1792
|
-
return new DeepSeekProvider(baseUrl ? { apiKey, baseUrl } : apiKey);
|
|
1793
|
-
case "poe":
|
|
1794
|
-
return new PoeProvider(baseUrl ? { apiKey, baseUrl } : apiKey);
|
|
1795
|
-
case "nova":
|
|
1796
|
-
return new NovaProvider(baseUrl ? { apiKey, baseUrl } : apiKey);
|
|
1797
|
-
default:
|
|
1798
|
-
throw new Error(`Unknown provider: ${provider}`);
|
|
3343
|
+
/**
|
|
3344
|
+
* 获取当前配置(用于测试)
|
|
3345
|
+
* @returns 当前配置的副本
|
|
3346
|
+
*/
|
|
3347
|
+
getConfig() {
|
|
3348
|
+
return { ...this.config };
|
|
1799
3349
|
}
|
|
1800
|
-
}
|
|
1801
|
-
var ai = {
|
|
1802
|
-
openrouter: (apiKey, baseUrl) => createProvider({ provider: "openrouter", apiKey, baseUrl }),
|
|
1803
|
-
gemini: (apiKey, baseUrl) => createProvider({ provider: "gemini", apiKey, baseUrl }),
|
|
1804
|
-
groq: (apiKey, baseUrl) => createProvider({ provider: "groq", apiKey, baseUrl }),
|
|
1805
|
-
huggingface: (apiKey, baseUrl) => createProvider({ provider: "huggingface", apiKey, baseUrl }),
|
|
1806
|
-
modelscope: (apiKey, baseUrl) => createProvider({ provider: "modelscope", apiKey, baseUrl }),
|
|
1807
|
-
deepseek: (apiKey, baseUrl) => createProvider({ provider: "deepseek", apiKey, baseUrl }),
|
|
1808
|
-
poe: (apiKey, baseUrl) => createProvider({ provider: "poe", apiKey, baseUrl }),
|
|
1809
|
-
nova: (apiKey, baseUrl) => createProvider({ provider: "nova", apiKey, baseUrl })
|
|
1810
3350
|
};
|
|
3351
|
+
|
|
3352
|
+
// src/fluent/index.ts
|
|
3353
|
+
init_chat_session();
|
|
3354
|
+
|
|
3355
|
+
// src/fluent/preset-instances.ts
|
|
3356
|
+
var deepseek = new PresetProviderImpl("deepseek");
|
|
3357
|
+
var openrouter = new PresetProviderImpl("openrouter");
|
|
3358
|
+
var gemini = new PresetProviderImpl("gemini");
|
|
3359
|
+
var groq = new PresetProviderImpl("groq");
|
|
3360
|
+
var huggingface = new PresetProviderImpl(
|
|
3361
|
+
"huggingface"
|
|
3362
|
+
);
|
|
3363
|
+
var modelscope = new PresetProviderImpl("modelscope");
|
|
3364
|
+
var poe = new PresetProviderImpl("poe");
|
|
3365
|
+
var nova = new PresetProviderImpl("nova");
|
|
1811
3366
|
// Annotate the CommonJS export names for ESM import in node:
|
|
1812
3367
|
0 && (module.exports = {
|
|
3368
|
+
APIError,
|
|
3369
|
+
BaseAdapter,
|
|
1813
3370
|
BaseProvider,
|
|
3371
|
+
CONFIG_DEFAULTS,
|
|
3372
|
+
ConfigManager,
|
|
3373
|
+
ConfigValidator,
|
|
3374
|
+
ConfigurationError,
|
|
3375
|
+
DeepSeekAdapter,
|
|
1814
3376
|
EFFORT_TOKEN_MAP,
|
|
3377
|
+
FluentValidationError,
|
|
3378
|
+
GeminiAdapter,
|
|
1815
3379
|
GeminiProvider,
|
|
3380
|
+
GroqAdapter,
|
|
1816
3381
|
GroqProvider,
|
|
3382
|
+
HttpProviderClient,
|
|
3383
|
+
HuggingFaceAdapter,
|
|
1817
3384
|
HuggingFaceProvider,
|
|
3385
|
+
ModelScopeAdapter,
|
|
1818
3386
|
ModelScopeProvider,
|
|
3387
|
+
NetworkError,
|
|
3388
|
+
NovaAdapter,
|
|
3389
|
+
OpenRouterAdapter,
|
|
1819
3390
|
OpenRouterProvider,
|
|
3391
|
+
PoeAdapter,
|
|
3392
|
+
ProviderError,
|
|
3393
|
+
ProviderRegistry,
|
|
3394
|
+
RegistryError,
|
|
3395
|
+
RequestBuilder,
|
|
3396
|
+
StreamProcessor,
|
|
3397
|
+
TimeoutError,
|
|
3398
|
+
VALID_PROVIDERS,
|
|
1820
3399
|
ai,
|
|
1821
|
-
|
|
3400
|
+
createBuilder,
|
|
3401
|
+
createBuiltInAdapters,
|
|
3402
|
+
createProvider,
|
|
3403
|
+
deepseek,
|
|
3404
|
+
gemini,
|
|
3405
|
+
groq,
|
|
3406
|
+
huggingface,
|
|
3407
|
+
modelscope,
|
|
3408
|
+
nova,
|
|
3409
|
+
oiiai,
|
|
3410
|
+
openrouter,
|
|
3411
|
+
poe
|
|
1822
3412
|
});
|
|
1823
3413
|
//# sourceMappingURL=index.js.map
|