@weisiren000/oiiai 0.1.4 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +359 -27
- package/dist/index.d.mts +1647 -17
- package/dist/index.d.ts +1647 -17
- package/dist/index.js +2898 -1308
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2886 -1307
- package/dist/index.mjs.map +1 -1
- package/package.json +7 -2
package/dist/index.mjs
CHANGED
|
@@ -1,5 +1,2212 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
6
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
7
|
+
}) : x)(function(x) {
|
|
8
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
9
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
10
|
+
});
|
|
11
|
+
var __esm = (fn, res) => function __init() {
|
|
12
|
+
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
13
|
+
};
|
|
14
|
+
var __export = (target, all) => {
|
|
15
|
+
for (var name in all)
|
|
16
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
17
|
+
};
|
|
18
|
+
var __copyProps = (to, from, except, desc) => {
|
|
19
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
20
|
+
for (let key of __getOwnPropNames(from))
|
|
21
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
22
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
23
|
+
}
|
|
24
|
+
return to;
|
|
25
|
+
};
|
|
26
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
27
|
+
|
|
28
|
+
// src/providers/__types__.ts
|
|
29
|
+
var EFFORT_TOKEN_MAP;
|
|
30
|
+
var init_types = __esm({
|
|
31
|
+
"src/providers/__types__.ts"() {
|
|
32
|
+
"use strict";
|
|
33
|
+
EFFORT_TOKEN_MAP = {
|
|
34
|
+
off: 0,
|
|
35
|
+
low: 1024,
|
|
36
|
+
medium: 4096,
|
|
37
|
+
high: 16384
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
// src/utils/request-builder.ts
|
|
43
|
+
var RequestBuilder;
|
|
44
|
+
var init_request_builder = __esm({
|
|
45
|
+
"src/utils/request-builder.ts"() {
|
|
46
|
+
"use strict";
|
|
47
|
+
init_types();
|
|
48
|
+
RequestBuilder = class {
|
|
49
|
+
/**
|
|
50
|
+
* 构建聊天请求的基础参数
|
|
51
|
+
* 生成标准化的 OpenAI 兼容格式请求体
|
|
52
|
+
*
|
|
53
|
+
* @param options - 聊天选项
|
|
54
|
+
* @param stream - 是否为流式请求
|
|
55
|
+
* @returns 请求体对象
|
|
56
|
+
*
|
|
57
|
+
* @example
|
|
58
|
+
* ```ts
|
|
59
|
+
* const body = RequestBuilder.buildChatBody({
|
|
60
|
+
* model: 'gpt-4',
|
|
61
|
+
* messages: [{ role: 'user', content: 'Hello' }],
|
|
62
|
+
* temperature: 0.7
|
|
63
|
+
* });
|
|
64
|
+
* ```
|
|
65
|
+
*/
|
|
66
|
+
static buildChatBody(options, stream = false) {
|
|
67
|
+
const { model, messages, temperature = 0.7, maxTokens } = options;
|
|
68
|
+
const body = {
|
|
69
|
+
model,
|
|
70
|
+
messages,
|
|
71
|
+
temperature,
|
|
72
|
+
stream
|
|
73
|
+
};
|
|
74
|
+
if (maxTokens !== void 0) {
|
|
75
|
+
body.max_tokens = maxTokens;
|
|
76
|
+
}
|
|
77
|
+
return body;
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* 构建 OpenRouter 格式的 reasoning 参数
|
|
81
|
+
*
|
|
82
|
+
* @param config - 推理配置
|
|
83
|
+
* @returns OpenRouter 格式的 reasoning 参数,或 undefined
|
|
84
|
+
*
|
|
85
|
+
* @example
|
|
86
|
+
* ```ts
|
|
87
|
+
* const reasoning = RequestBuilder.buildOpenRouterReasoning({ effort: 'high' });
|
|
88
|
+
* // => { effort: 'high', max_tokens: 16384 }
|
|
89
|
+
* ```
|
|
90
|
+
*/
|
|
91
|
+
static buildOpenRouterReasoning(config) {
|
|
92
|
+
if (!config) return void 0;
|
|
93
|
+
if (config.effort === "off") return void 0;
|
|
94
|
+
const param = {};
|
|
95
|
+
if (config.effort) {
|
|
96
|
+
param.effort = config.effort;
|
|
97
|
+
}
|
|
98
|
+
if (config.budgetTokens !== void 0) {
|
|
99
|
+
param.max_tokens = config.budgetTokens;
|
|
100
|
+
} else if (config.effort && EFFORT_TOKEN_MAP[config.effort]) {
|
|
101
|
+
param.max_tokens = EFFORT_TOKEN_MAP[config.effort];
|
|
102
|
+
}
|
|
103
|
+
if (config.exclude !== void 0) {
|
|
104
|
+
param.exclude = config.exclude;
|
|
105
|
+
}
|
|
106
|
+
return Object.keys(param).length > 0 ? param : void 0;
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* 构建 Gemini 格式的 reasoning 参数
|
|
110
|
+
* Gemini 2.5+ 模型使用 reasoning_effort 控制思考
|
|
111
|
+
*
|
|
112
|
+
* @param config - 推理配置
|
|
113
|
+
* @returns Gemini 格式的参数对象
|
|
114
|
+
*
|
|
115
|
+
* @example
|
|
116
|
+
* ```ts
|
|
117
|
+
* const params = RequestBuilder.buildGeminiReasoning({ effort: 'high' });
|
|
118
|
+
* // => { reasoning_effort: 'high' }
|
|
119
|
+
* ```
|
|
120
|
+
*/
|
|
121
|
+
static buildGeminiReasoning(config) {
|
|
122
|
+
if (!config || !config.effort || config.effort === "off") {
|
|
123
|
+
return {};
|
|
124
|
+
}
|
|
125
|
+
return {
|
|
126
|
+
reasoning_effort: config.effort
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* 构建 Groq 格式的 reasoning 参数
|
|
131
|
+
* Groq 使用 reasoning_format 参数控制推理输出
|
|
132
|
+
*
|
|
133
|
+
* @param config - 推理配置
|
|
134
|
+
* @returns Groq 格式的参数对象
|
|
135
|
+
*
|
|
136
|
+
* @example
|
|
137
|
+
* ```ts
|
|
138
|
+
* const params = RequestBuilder.buildGroqReasoning({ effort: 'high' });
|
|
139
|
+
* // => { reasoning_format: 'parsed' }
|
|
140
|
+
* ```
|
|
141
|
+
*/
|
|
142
|
+
static buildGroqReasoning(config) {
|
|
143
|
+
if (!config) {
|
|
144
|
+
return {};
|
|
145
|
+
}
|
|
146
|
+
if (config.effort === "off") {
|
|
147
|
+
return { include_reasoning: false };
|
|
148
|
+
}
|
|
149
|
+
if (config.effort) {
|
|
150
|
+
return { reasoning_format: "parsed" };
|
|
151
|
+
}
|
|
152
|
+
return {};
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* 构建 DeepSeek 格式的 reasoning 参数
|
|
156
|
+
* DeepSeek 使用 thinking 参数启用思考模式
|
|
157
|
+
*
|
|
158
|
+
* @param config - 推理配置
|
|
159
|
+
* @returns DeepSeek 格式的参数对象
|
|
160
|
+
*/
|
|
161
|
+
static buildDeepSeekReasoning(config) {
|
|
162
|
+
if (!config || !config.effort || config.effort === "off") {
|
|
163
|
+
return {};
|
|
164
|
+
}
|
|
165
|
+
return {
|
|
166
|
+
thinking: { type: "enabled" }
|
|
167
|
+
};
|
|
168
|
+
}
|
|
169
|
+
/**
|
|
170
|
+
* 构建 Nova 格式的 reasoning 参数
|
|
171
|
+
* Nova 使用 reasoningConfig 控制 extended thinking
|
|
172
|
+
*
|
|
173
|
+
* @param config - 推理配置
|
|
174
|
+
* @returns Nova 格式的参数对象
|
|
175
|
+
*/
|
|
176
|
+
static buildNovaReasoning(config) {
|
|
177
|
+
if (!config || !config.effort || config.effort === "off") {
|
|
178
|
+
return {};
|
|
179
|
+
}
|
|
180
|
+
return {
|
|
181
|
+
reasoningConfig: {
|
|
182
|
+
type: "enabled",
|
|
183
|
+
maxReasoningEffort: config.effort
|
|
184
|
+
}
|
|
185
|
+
};
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* 构建 HTTP 请求头
|
|
189
|
+
*
|
|
190
|
+
* @param apiKey - API 密钥
|
|
191
|
+
* @param additionalHeaders - 额外的请求头
|
|
192
|
+
* @returns 请求头对象
|
|
193
|
+
*
|
|
194
|
+
* @example
|
|
195
|
+
* ```ts
|
|
196
|
+
* const headers = RequestBuilder.buildHeaders('sk-xxx', {
|
|
197
|
+
* 'X-Custom-Header': 'value'
|
|
198
|
+
* });
|
|
199
|
+
* ```
|
|
200
|
+
*/
|
|
201
|
+
static buildHeaders(apiKey, additionalHeaders) {
|
|
202
|
+
return {
|
|
203
|
+
"Content-Type": "application/json",
|
|
204
|
+
Authorization: `Bearer ${apiKey}`,
|
|
205
|
+
...additionalHeaders
|
|
206
|
+
};
|
|
207
|
+
}
|
|
208
|
+
};
|
|
209
|
+
}
|
|
210
|
+
});
|
|
211
|
+
|
|
212
|
+
// src/client/types.ts
|
|
213
|
+
var ProviderError, APIError, NetworkError, TimeoutError;
|
|
214
|
+
var init_types2 = __esm({
|
|
215
|
+
"src/client/types.ts"() {
|
|
216
|
+
"use strict";
|
|
217
|
+
ProviderError = class extends Error {
|
|
218
|
+
constructor(message, code, provider, cause) {
|
|
219
|
+
super(message);
|
|
220
|
+
this.code = code;
|
|
221
|
+
this.provider = provider;
|
|
222
|
+
this.cause = cause;
|
|
223
|
+
this.name = "ProviderError";
|
|
224
|
+
}
|
|
225
|
+
};
|
|
226
|
+
APIError = class extends ProviderError {
|
|
227
|
+
constructor(message, provider, statusCode, responseBody) {
|
|
228
|
+
super(message, "API_ERROR", provider);
|
|
229
|
+
this.statusCode = statusCode;
|
|
230
|
+
this.responseBody = responseBody;
|
|
231
|
+
this.name = "APIError";
|
|
232
|
+
}
|
|
233
|
+
};
|
|
234
|
+
NetworkError = class extends ProviderError {
|
|
235
|
+
constructor(message, provider, cause) {
|
|
236
|
+
super(message, "NETWORK_ERROR", provider, cause);
|
|
237
|
+
this.name = "NetworkError";
|
|
238
|
+
}
|
|
239
|
+
};
|
|
240
|
+
TimeoutError = class extends ProviderError {
|
|
241
|
+
constructor(message, provider, timeoutMs) {
|
|
242
|
+
super(message, "TIMEOUT_ERROR", provider);
|
|
243
|
+
this.timeoutMs = timeoutMs;
|
|
244
|
+
this.name = "TimeoutError";
|
|
245
|
+
}
|
|
246
|
+
};
|
|
247
|
+
}
|
|
248
|
+
});
|
|
249
|
+
|
|
250
|
+
// src/client/http-provider-client.ts
|
|
251
|
+
var http_provider_client_exports = {};
|
|
252
|
+
__export(http_provider_client_exports, {
|
|
253
|
+
HttpProviderClient: () => HttpProviderClient
|
|
254
|
+
});
|
|
255
|
+
var DEFAULT_TIMEOUT, HttpProviderClient;
|
|
256
|
+
var init_http_provider_client = __esm({
|
|
257
|
+
"src/client/http-provider-client.ts"() {
|
|
258
|
+
"use strict";
|
|
259
|
+
init_request_builder();
|
|
260
|
+
init_types2();
|
|
261
|
+
DEFAULT_TIMEOUT = 3e4;
|
|
262
|
+
HttpProviderClient = class {
|
|
263
|
+
config;
|
|
264
|
+
/**
|
|
265
|
+
* 创建 HTTP Provider 客户端实例
|
|
266
|
+
*
|
|
267
|
+
* @param config - 客户端配置
|
|
268
|
+
*
|
|
269
|
+
* @example
|
|
270
|
+
* ```ts
|
|
271
|
+
* const client = new HttpProviderClient({
|
|
272
|
+
* apiKey: 'sk-xxx',
|
|
273
|
+
* baseUrl: 'https://api.openai.com/v1',
|
|
274
|
+
* timeout: 60000
|
|
275
|
+
* });
|
|
276
|
+
* ```
|
|
277
|
+
*/
|
|
278
|
+
constructor(config) {
|
|
279
|
+
this.config = {
|
|
280
|
+
...config,
|
|
281
|
+
timeout: config.timeout ?? DEFAULT_TIMEOUT
|
|
282
|
+
};
|
|
283
|
+
}
|
|
284
|
+
/**
|
|
285
|
+
* 获取 Provider 名称(从 baseUrl 推断)
|
|
286
|
+
*/
|
|
287
|
+
getProviderName() {
|
|
288
|
+
try {
|
|
289
|
+
const url = new URL(this.config.baseUrl);
|
|
290
|
+
return url.hostname;
|
|
291
|
+
} catch {
|
|
292
|
+
return "unknown";
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
/**
|
|
296
|
+
* 构建完整的请求 URL
|
|
297
|
+
*/
|
|
298
|
+
buildUrl(endpoint) {
|
|
299
|
+
const baseUrl = this.config.baseUrl.replace(/\/$/, "");
|
|
300
|
+
const path = endpoint.startsWith("/") ? endpoint : `/${endpoint}`;
|
|
301
|
+
return `${baseUrl}${path}`;
|
|
302
|
+
}
|
|
303
|
+
/**
|
|
304
|
+
* 构建请求头
|
|
305
|
+
*/
|
|
306
|
+
buildHeaders() {
|
|
307
|
+
return RequestBuilder.buildHeaders(this.config.apiKey, this.config.headers);
|
|
308
|
+
}
|
|
309
|
+
/**
|
|
310
|
+
* 创建带超时的 AbortController
|
|
311
|
+
*/
|
|
312
|
+
createAbortController() {
|
|
313
|
+
const controller = new AbortController();
|
|
314
|
+
const timeoutId = setTimeout(() => {
|
|
315
|
+
controller.abort();
|
|
316
|
+
}, this.config.timeout ?? DEFAULT_TIMEOUT);
|
|
317
|
+
return { controller, timeoutId };
|
|
318
|
+
}
|
|
319
|
+
/**
|
|
320
|
+
* 处理 HTTP 错误响应
|
|
321
|
+
*/
|
|
322
|
+
async handleErrorResponse(response) {
|
|
323
|
+
const provider = this.getProviderName();
|
|
324
|
+
let responseBody;
|
|
325
|
+
try {
|
|
326
|
+
responseBody = await response.text();
|
|
327
|
+
} catch {
|
|
328
|
+
}
|
|
329
|
+
let message;
|
|
330
|
+
switch (response.status) {
|
|
331
|
+
case 400:
|
|
332
|
+
message = "\u8BF7\u6C42\u53C2\u6570\u9519\u8BEF";
|
|
333
|
+
break;
|
|
334
|
+
case 401:
|
|
335
|
+
message = "API \u5BC6\u94A5\u65E0\u6548\u6216\u5DF2\u8FC7\u671F";
|
|
336
|
+
break;
|
|
337
|
+
case 403:
|
|
338
|
+
message = "\u6CA1\u6709\u6743\u9650\u8BBF\u95EE\u6B64\u8D44\u6E90";
|
|
339
|
+
break;
|
|
340
|
+
case 404:
|
|
341
|
+
message = "\u8BF7\u6C42\u7684\u8D44\u6E90\u4E0D\u5B58\u5728";
|
|
342
|
+
break;
|
|
343
|
+
case 429:
|
|
344
|
+
message = "\u8BF7\u6C42\u8FC7\u4E8E\u9891\u7E41\uFF0C\u8BF7\u7A0D\u540E\u91CD\u8BD5";
|
|
345
|
+
break;
|
|
346
|
+
case 500:
|
|
347
|
+
message = "\u670D\u52A1\u5668\u5185\u90E8\u9519\u8BEF";
|
|
348
|
+
break;
|
|
349
|
+
case 502:
|
|
350
|
+
message = "\u7F51\u5173\u9519\u8BEF";
|
|
351
|
+
break;
|
|
352
|
+
case 503:
|
|
353
|
+
message = "\u670D\u52A1\u6682\u65F6\u4E0D\u53EF\u7528";
|
|
354
|
+
break;
|
|
355
|
+
default:
|
|
356
|
+
message = `HTTP \u9519\u8BEF: ${response.status} ${response.statusText}`;
|
|
357
|
+
}
|
|
358
|
+
throw new APIError(message, provider, response.status, responseBody);
|
|
359
|
+
}
|
|
360
|
+
/**
|
|
361
|
+
* 发送聊天请求(非流式)
|
|
362
|
+
*
|
|
363
|
+
* @param endpoint - API 端点路径
|
|
364
|
+
* @param body - 请求体
|
|
365
|
+
* @returns 响应数据
|
|
366
|
+
*/
|
|
367
|
+
async chat(endpoint, body) {
|
|
368
|
+
const url = this.buildUrl(endpoint);
|
|
369
|
+
const headers = this.buildHeaders();
|
|
370
|
+
const { controller, timeoutId } = this.createAbortController();
|
|
371
|
+
const provider = this.getProviderName();
|
|
372
|
+
try {
|
|
373
|
+
const response = await fetch(url, {
|
|
374
|
+
method: "POST",
|
|
375
|
+
headers,
|
|
376
|
+
body: JSON.stringify(body),
|
|
377
|
+
signal: controller.signal
|
|
378
|
+
});
|
|
379
|
+
clearTimeout(timeoutId);
|
|
380
|
+
if (!response.ok) {
|
|
381
|
+
await this.handleErrorResponse(response);
|
|
382
|
+
}
|
|
383
|
+
const data = await response.json();
|
|
384
|
+
return data;
|
|
385
|
+
} catch (error) {
|
|
386
|
+
clearTimeout(timeoutId);
|
|
387
|
+
if (error instanceof APIError) {
|
|
388
|
+
throw error;
|
|
389
|
+
}
|
|
390
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
391
|
+
throw new TimeoutError(
|
|
392
|
+
`\u8BF7\u6C42\u8D85\u65F6\uFF08${this.config.timeout}ms\uFF09`,
|
|
393
|
+
provider,
|
|
394
|
+
this.config.timeout ?? DEFAULT_TIMEOUT
|
|
395
|
+
);
|
|
396
|
+
}
|
|
397
|
+
if (error instanceof TypeError) {
|
|
398
|
+
throw new NetworkError("\u7F51\u7EDC\u8FDE\u63A5\u5931\u8D25\uFF0C\u8BF7\u68C0\u67E5\u7F51\u7EDC\u8BBE\u7F6E", provider, error);
|
|
399
|
+
}
|
|
400
|
+
throw new NetworkError(
|
|
401
|
+
error instanceof Error ? error.message : "\u672A\u77E5\u9519\u8BEF",
|
|
402
|
+
provider,
|
|
403
|
+
error instanceof Error ? error : void 0
|
|
404
|
+
);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
/**
|
|
408
|
+
* 发送流式聊天请求
|
|
409
|
+
*
|
|
410
|
+
* @param endpoint - API 端点路径
|
|
411
|
+
* @param body - 请求体
|
|
412
|
+
* @returns fetch Response 对象
|
|
413
|
+
*/
|
|
414
|
+
async chatStream(endpoint, body) {
|
|
415
|
+
const url = this.buildUrl(endpoint);
|
|
416
|
+
const headers = this.buildHeaders();
|
|
417
|
+
const { controller, timeoutId } = this.createAbortController();
|
|
418
|
+
const provider = this.getProviderName();
|
|
419
|
+
try {
|
|
420
|
+
const response = await fetch(url, {
|
|
421
|
+
method: "POST",
|
|
422
|
+
headers,
|
|
423
|
+
body: JSON.stringify(body),
|
|
424
|
+
signal: controller.signal
|
|
425
|
+
});
|
|
426
|
+
clearTimeout(timeoutId);
|
|
427
|
+
if (!response.ok) {
|
|
428
|
+
await this.handleErrorResponse(response);
|
|
429
|
+
}
|
|
430
|
+
return response;
|
|
431
|
+
} catch (error) {
|
|
432
|
+
clearTimeout(timeoutId);
|
|
433
|
+
if (error instanceof APIError) {
|
|
434
|
+
throw error;
|
|
435
|
+
}
|
|
436
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
437
|
+
throw new TimeoutError(
|
|
438
|
+
`\u8BF7\u6C42\u8D85\u65F6\uFF08${this.config.timeout}ms\uFF09`,
|
|
439
|
+
provider,
|
|
440
|
+
this.config.timeout ?? DEFAULT_TIMEOUT
|
|
441
|
+
);
|
|
442
|
+
}
|
|
443
|
+
if (error instanceof TypeError) {
|
|
444
|
+
throw new NetworkError("\u7F51\u7EDC\u8FDE\u63A5\u5931\u8D25\uFF0C\u8BF7\u68C0\u67E5\u7F51\u7EDC\u8BBE\u7F6E", provider, error);
|
|
445
|
+
}
|
|
446
|
+
throw new NetworkError(
|
|
447
|
+
error instanceof Error ? error.message : "\u672A\u77E5\u9519\u8BEF",
|
|
448
|
+
provider,
|
|
449
|
+
error instanceof Error ? error : void 0
|
|
450
|
+
);
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
};
|
|
454
|
+
}
|
|
455
|
+
});
|
|
456
|
+
|
|
457
|
+
// src/utils/stream-processor.ts
|
|
458
|
+
var stream_processor_exports = {};
|
|
459
|
+
__export(stream_processor_exports, {
|
|
460
|
+
StreamProcessor: () => StreamProcessor
|
|
461
|
+
});
|
|
462
|
+
var StreamProcessor;
|
|
463
|
+
var init_stream_processor = __esm({
|
|
464
|
+
"src/utils/stream-processor.ts"() {
|
|
465
|
+
"use strict";
|
|
466
|
+
StreamProcessor = class _StreamProcessor {
|
|
467
|
+
/**
|
|
468
|
+
* 从响应内容中提取文本
|
|
469
|
+
* 支持字符串和数组格式的 content
|
|
470
|
+
*
|
|
471
|
+
* @param content - 响应内容,可以是字符串、数组或其他类型
|
|
472
|
+
* @returns 提取的文本内容
|
|
473
|
+
*
|
|
474
|
+
* @example
|
|
475
|
+
* ```ts
|
|
476
|
+
* // 字符串格式
|
|
477
|
+
* StreamProcessor.extractTextContent('Hello') // => 'Hello'
|
|
478
|
+
*
|
|
479
|
+
* // 数组格式
|
|
480
|
+
* StreamProcessor.extractTextContent([
|
|
481
|
+
* { type: 'text', text: 'Hello' },
|
|
482
|
+
* { type: 'text', text: ' World' }
|
|
483
|
+
* ]) // => 'Hello World'
|
|
484
|
+
* ```
|
|
485
|
+
*/
|
|
486
|
+
static extractTextContent(content) {
|
|
487
|
+
if (typeof content === "string") {
|
|
488
|
+
return content;
|
|
489
|
+
}
|
|
490
|
+
if (Array.isArray(content)) {
|
|
491
|
+
return content.filter(
|
|
492
|
+
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
493
|
+
).map((item) => item.text).join("");
|
|
494
|
+
}
|
|
495
|
+
return "";
|
|
496
|
+
}
|
|
497
|
+
/**
|
|
498
|
+
* 解析 SSE 数据行
|
|
499
|
+
*
|
|
500
|
+
* @param line - SSE 数据行(如 "data: {...}")
|
|
501
|
+
* @returns 解析后的 JSON 对象,或 null(如果是 [DONE] 或无效数据)
|
|
502
|
+
*
|
|
503
|
+
* @example
|
|
504
|
+
* ```ts
|
|
505
|
+
* StreamProcessor.parseSSELine('data: {"content": "Hello"}')
|
|
506
|
+
* // => { content: 'Hello' }
|
|
507
|
+
*
|
|
508
|
+
* StreamProcessor.parseSSELine('data: [DONE]')
|
|
509
|
+
* // => null
|
|
510
|
+
* ```
|
|
511
|
+
*/
|
|
512
|
+
static parseSSELine(line) {
|
|
513
|
+
const trimmed = line.trim();
|
|
514
|
+
if (!trimmed || trimmed === "data: [DONE]") {
|
|
515
|
+
return null;
|
|
516
|
+
}
|
|
517
|
+
if (!trimmed.startsWith("data: ")) {
|
|
518
|
+
return null;
|
|
519
|
+
}
|
|
520
|
+
try {
|
|
521
|
+
const jsonStr = trimmed.slice(6);
|
|
522
|
+
return JSON.parse(jsonStr);
|
|
523
|
+
} catch {
|
|
524
|
+
return null;
|
|
525
|
+
}
|
|
526
|
+
}
|
|
527
|
+
/**
|
|
528
|
+
* 创建流式响应处理器
|
|
529
|
+
* 处理 SSE 格式的流式响应,提取并生成 StreamChunk
|
|
530
|
+
*
|
|
531
|
+
* @param response - fetch Response 对象
|
|
532
|
+
* @param deltaExtractor - 从 delta 中提取 StreamChunk 的函数
|
|
533
|
+
* @returns AsyncGenerator<StreamChunk>
|
|
534
|
+
*
|
|
535
|
+
* @example
|
|
536
|
+
* ```ts
|
|
537
|
+
* const response = await fetch(url, { ... });
|
|
538
|
+
* const extractor = (delta) => {
|
|
539
|
+
* if (delta.content) {
|
|
540
|
+
* return { type: 'content', text: delta.content };
|
|
541
|
+
* }
|
|
542
|
+
* return null;
|
|
543
|
+
* };
|
|
544
|
+
*
|
|
545
|
+
* for await (const chunk of StreamProcessor.processStream(response, extractor)) {
|
|
546
|
+
* console.log(chunk.type, chunk.text);
|
|
547
|
+
* }
|
|
548
|
+
* ```
|
|
549
|
+
*/
|
|
550
|
+
static async *processStream(response, deltaExtractor) {
|
|
551
|
+
const reader = response.body?.getReader();
|
|
552
|
+
if (!reader) {
|
|
553
|
+
throw new Error("No response body");
|
|
554
|
+
}
|
|
555
|
+
const decoder = new TextDecoder();
|
|
556
|
+
let buffer = "";
|
|
557
|
+
try {
|
|
558
|
+
while (true) {
|
|
559
|
+
const { done, value } = await reader.read();
|
|
560
|
+
if (done) break;
|
|
561
|
+
buffer += decoder.decode(value, { stream: true });
|
|
562
|
+
const lines = buffer.split("\n");
|
|
563
|
+
buffer = lines.pop() ?? "";
|
|
564
|
+
for (const line of lines) {
|
|
565
|
+
const data = _StreamProcessor.parseSSELine(line);
|
|
566
|
+
if (!data) continue;
|
|
567
|
+
const choices = data.choices;
|
|
568
|
+
const delta = choices?.[0]?.delta;
|
|
569
|
+
if (!delta) continue;
|
|
570
|
+
const chunk = deltaExtractor(delta);
|
|
571
|
+
if (chunk) {
|
|
572
|
+
yield chunk;
|
|
573
|
+
}
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
} finally {
|
|
577
|
+
reader.releaseLock();
|
|
578
|
+
}
|
|
579
|
+
}
|
|
580
|
+
/**
|
|
581
|
+
* 创建默认的 delta 提取器
|
|
582
|
+
* 支持 reasoning_content、reasoning、thoughts 和 content 字段
|
|
583
|
+
*
|
|
584
|
+
* @returns DeltaExtractor 函数
|
|
585
|
+
*/
|
|
586
|
+
static createDefaultExtractor() {
|
|
587
|
+
return (delta) => {
|
|
588
|
+
const reasoningContent = delta.reasoning_content ?? delta.reasoning ?? delta.thoughts;
|
|
589
|
+
if (reasoningContent) {
|
|
590
|
+
return {
|
|
591
|
+
type: "reasoning",
|
|
592
|
+
text: _StreamProcessor.extractTextContent(reasoningContent)
|
|
593
|
+
};
|
|
594
|
+
}
|
|
595
|
+
if (delta.content) {
|
|
596
|
+
return {
|
|
597
|
+
type: "content",
|
|
598
|
+
text: _StreamProcessor.extractTextContent(delta.content)
|
|
599
|
+
};
|
|
600
|
+
}
|
|
601
|
+
return null;
|
|
602
|
+
};
|
|
603
|
+
}
|
|
604
|
+
};
|
|
605
|
+
}
|
|
606
|
+
});
|
|
607
|
+
|
|
608
|
+
// src/fluent/chat-session.ts
|
|
609
|
+
var chat_session_exports = {};
|
|
610
|
+
__export(chat_session_exports, {
|
|
611
|
+
ChatSessionImpl: () => ChatSessionImpl
|
|
612
|
+
});
|
|
613
|
+
var ChatSessionImpl;
|
|
614
|
+
var init_chat_session = __esm({
|
|
615
|
+
"src/fluent/chat-session.ts"() {
|
|
616
|
+
"use strict";
|
|
617
|
+
ChatSessionImpl = class {
|
|
618
|
+
/** 预设实例引用 */
|
|
619
|
+
preset;
|
|
620
|
+
/** 模型 ID */
|
|
621
|
+
model;
|
|
622
|
+
/** 会话配置 */
|
|
623
|
+
options;
|
|
624
|
+
/** 对话历史 */
|
|
625
|
+
history = [];
|
|
626
|
+
/**
|
|
627
|
+
* 创建对话会话
|
|
628
|
+
* @param preset - 预设实例
|
|
629
|
+
* @param model - 模型 ID
|
|
630
|
+
* @param options - 会话配置
|
|
631
|
+
*/
|
|
632
|
+
constructor(preset, model, options) {
|
|
633
|
+
this.preset = preset;
|
|
634
|
+
this.model = model;
|
|
635
|
+
this.options = options ?? {};
|
|
636
|
+
if (this.options.system) {
|
|
637
|
+
this.history.push({
|
|
638
|
+
role: "system",
|
|
639
|
+
content: this.options.system
|
|
640
|
+
});
|
|
641
|
+
}
|
|
642
|
+
}
|
|
643
|
+
/**
|
|
644
|
+
* 发送消息并获取响应(非流式)
|
|
645
|
+
* @param message - 用户消息
|
|
646
|
+
* @returns 助手响应内容
|
|
647
|
+
*/
|
|
648
|
+
async send(message) {
|
|
649
|
+
this.history.push({
|
|
650
|
+
role: "user",
|
|
651
|
+
content: message
|
|
652
|
+
});
|
|
653
|
+
try {
|
|
654
|
+
const response = await this.preset.ask(this.model, message, {
|
|
655
|
+
system: this.buildSystemContext(),
|
|
656
|
+
temperature: this.options.temperature,
|
|
657
|
+
maxTokens: this.options.maxTokens,
|
|
658
|
+
reasoning: this.options.reasoning
|
|
659
|
+
});
|
|
660
|
+
this.history.push({
|
|
661
|
+
role: "assistant",
|
|
662
|
+
content: response
|
|
663
|
+
});
|
|
664
|
+
return response;
|
|
665
|
+
} catch (error) {
|
|
666
|
+
this.history.pop();
|
|
667
|
+
throw error;
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
/**
|
|
671
|
+
* 发送消息并获取流式响应
|
|
672
|
+
* @param message - 用户消息
|
|
673
|
+
* @returns 流式数据块生成器
|
|
674
|
+
*/
|
|
675
|
+
async *sendStream(message) {
|
|
676
|
+
this.history.push({
|
|
677
|
+
role: "user",
|
|
678
|
+
content: message
|
|
679
|
+
});
|
|
680
|
+
let responseContent = "";
|
|
681
|
+
try {
|
|
682
|
+
const stream = this.preset.stream(this.model, message, {
|
|
683
|
+
system: this.buildSystemContext(),
|
|
684
|
+
temperature: this.options.temperature,
|
|
685
|
+
maxTokens: this.options.maxTokens,
|
|
686
|
+
reasoning: this.options.reasoning
|
|
687
|
+
});
|
|
688
|
+
for await (const chunk of stream) {
|
|
689
|
+
if (chunk.type === "content") {
|
|
690
|
+
responseContent += chunk.text;
|
|
691
|
+
}
|
|
692
|
+
yield chunk;
|
|
693
|
+
}
|
|
694
|
+
this.history.push({
|
|
695
|
+
role: "assistant",
|
|
696
|
+
content: responseContent
|
|
697
|
+
});
|
|
698
|
+
} catch (error) {
|
|
699
|
+
this.history.pop();
|
|
700
|
+
throw error;
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
/**
|
|
704
|
+
* 获取对话历史
|
|
705
|
+
* @returns 按发送顺序排列的消息列表
|
|
706
|
+
*/
|
|
707
|
+
getHistory() {
|
|
708
|
+
return [...this.history];
|
|
709
|
+
}
|
|
710
|
+
/**
|
|
711
|
+
* 清空对话历史
|
|
712
|
+
*/
|
|
713
|
+
clearHistory() {
|
|
714
|
+
this.history = [];
|
|
715
|
+
if (this.options.system) {
|
|
716
|
+
this.history.push({
|
|
717
|
+
role: "system",
|
|
718
|
+
content: this.options.system
|
|
719
|
+
});
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
/**
|
|
723
|
+
* 构建系统上下文
|
|
724
|
+
* 将对话历史转换为系统提示词的一部分
|
|
725
|
+
* @returns 系统上下文字符串
|
|
726
|
+
*/
|
|
727
|
+
buildSystemContext() {
|
|
728
|
+
const conversationHistory = this.history.filter(
|
|
729
|
+
(msg, index) => msg.role !== "system" && index < this.history.length - 1
|
|
730
|
+
);
|
|
731
|
+
if (conversationHistory.length === 0) {
|
|
732
|
+
return this.options.system;
|
|
733
|
+
}
|
|
734
|
+
const historyContext = conversationHistory.map((msg) => `${msg.role === "user" ? "\u7528\u6237" : "\u52A9\u624B"}: ${msg.content}`).join("\n");
|
|
735
|
+
const baseSystem = this.options.system ?? "";
|
|
736
|
+
return `${baseSystem}
|
|
737
|
+
|
|
738
|
+
\u4EE5\u4E0B\u662F\u4E4B\u524D\u7684\u5BF9\u8BDD\u5386\u53F2\uFF1A
|
|
739
|
+
${historyContext}`.trim();
|
|
740
|
+
}
|
|
741
|
+
};
|
|
742
|
+
}
|
|
743
|
+
});
|
|
744
|
+
|
|
745
|
+
// src/adapters/types.ts
|
|
746
|
+
var BaseAdapter = class {
|
|
747
|
+
/**
|
|
748
|
+
* 创建 Provider 客户端
|
|
749
|
+
* 默认实现:需要在运行时导入 HttpProviderClient 以避免循环依赖
|
|
750
|
+
*/
|
|
751
|
+
createClient(config) {
|
|
752
|
+
const { HttpProviderClient: HttpProviderClient2 } = (init_http_provider_client(), __toCommonJS(http_provider_client_exports));
|
|
753
|
+
return new HttpProviderClient2(config);
|
|
754
|
+
}
|
|
755
|
+
/**
|
|
756
|
+
* 构建聊天请求体
|
|
757
|
+
* 默认实现:构建 OpenAI 兼容格式的请求体
|
|
758
|
+
*/
|
|
759
|
+
buildChatRequest(options, stream = false) {
|
|
760
|
+
const {
|
|
761
|
+
model,
|
|
762
|
+
messages,
|
|
763
|
+
temperature = 0.7,
|
|
764
|
+
maxTokens,
|
|
765
|
+
reasoning
|
|
766
|
+
} = options;
|
|
767
|
+
const body = {
|
|
768
|
+
model,
|
|
769
|
+
messages,
|
|
770
|
+
temperature,
|
|
771
|
+
stream
|
|
772
|
+
};
|
|
773
|
+
if (maxTokens !== void 0) {
|
|
774
|
+
body.max_tokens = maxTokens;
|
|
775
|
+
}
|
|
776
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
777
|
+
Object.assign(body, reasoningParams);
|
|
778
|
+
return body;
|
|
779
|
+
}
|
|
780
|
+
/**
|
|
781
|
+
* 构建 reasoning 参数
|
|
782
|
+
* 默认实现:返回空对象,子类应覆盖此方法
|
|
783
|
+
*/
|
|
784
|
+
buildReasoningParams(_config) {
|
|
785
|
+
return {};
|
|
786
|
+
}
|
|
787
|
+
/**
|
|
788
|
+
* 解析聊天响应
|
|
789
|
+
* 默认实现:解析 OpenAI 兼容格式的响应
|
|
790
|
+
*/
|
|
791
|
+
parseChatResponse(response, model) {
|
|
792
|
+
const choices = response.choices;
|
|
793
|
+
const choice = choices?.[0];
|
|
794
|
+
if (!choice) {
|
|
795
|
+
throw new Error("No response from model");
|
|
796
|
+
}
|
|
797
|
+
const msg = choice.message;
|
|
798
|
+
const reasoningContent = msg?.reasoning_content ?? msg?.reasoning ?? null;
|
|
799
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
800
|
+
const usage = response.usage;
|
|
801
|
+
return {
|
|
802
|
+
content: StreamProcessor2.extractTextContent(msg?.content),
|
|
803
|
+
reasoning: reasoningContent ? StreamProcessor2.extractTextContent(reasoningContent) : null,
|
|
804
|
+
model: response.model ?? model,
|
|
805
|
+
usage: {
|
|
806
|
+
promptTokens: usage?.prompt_tokens ?? usage?.promptTokens ?? 0,
|
|
807
|
+
completionTokens: usage?.completion_tokens ?? usage?.completionTokens ?? 0,
|
|
808
|
+
totalTokens: usage?.total_tokens ?? usage?.totalTokens ?? 0
|
|
809
|
+
},
|
|
810
|
+
finishReason: choice.finish_reason ?? choice.finishReason ?? null
|
|
811
|
+
};
|
|
812
|
+
}
|
|
813
|
+
/**
|
|
814
|
+
* 从 delta 中提取 StreamChunk
|
|
815
|
+
* 默认实现:支持 reasoning_content、reasoning、thoughts 和 content 字段
|
|
816
|
+
*/
|
|
817
|
+
extractStreamChunk(delta) {
|
|
818
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
819
|
+
const reasoningContent = delta.reasoning_content ?? delta.reasoning ?? delta.thoughts;
|
|
820
|
+
if (reasoningContent) {
|
|
821
|
+
return {
|
|
822
|
+
type: "reasoning",
|
|
823
|
+
text: StreamProcessor2.extractTextContent(reasoningContent)
|
|
824
|
+
};
|
|
825
|
+
}
|
|
826
|
+
if (delta.content) {
|
|
827
|
+
return {
|
|
828
|
+
type: "content",
|
|
829
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
830
|
+
};
|
|
831
|
+
}
|
|
832
|
+
return null;
|
|
833
|
+
}
|
|
834
|
+
/**
|
|
835
|
+
* 获取 API 端点 URL
|
|
836
|
+
* 默认实现:返回 /chat/completions 端点
|
|
837
|
+
*/
|
|
838
|
+
getEndpointUrl(baseUrl) {
|
|
839
|
+
return `${baseUrl}/chat/completions`;
|
|
840
|
+
}
|
|
841
|
+
};
|
|
842
|
+
|
|
843
|
+
// src/adapters/openrouter-adapter.ts
|
|
844
|
+
init_request_builder();
|
|
845
|
+
var DEFAULT_BASE_URL = "https://openrouter.ai/api/v1";
|
|
846
|
+
var OpenRouterAdapter = class extends BaseAdapter {
|
|
847
|
+
name = "openrouter";
|
|
848
|
+
defaultBaseUrl = DEFAULT_BASE_URL;
|
|
849
|
+
/**
|
|
850
|
+
* 构建聊天请求体
|
|
851
|
+
* OpenRouter 使用 OpenAI 兼容格式,但有特殊的 reasoning 参数
|
|
852
|
+
*/
|
|
853
|
+
buildChatRequest(options, stream = false) {
|
|
854
|
+
const {
|
|
855
|
+
model,
|
|
856
|
+
messages,
|
|
857
|
+
temperature = 0.7,
|
|
858
|
+
maxTokens,
|
|
859
|
+
reasoning
|
|
860
|
+
} = options;
|
|
861
|
+
const body = {
|
|
862
|
+
model,
|
|
863
|
+
messages,
|
|
864
|
+
temperature,
|
|
865
|
+
stream
|
|
866
|
+
};
|
|
867
|
+
if (maxTokens !== void 0) {
|
|
868
|
+
body.max_tokens = maxTokens;
|
|
869
|
+
}
|
|
870
|
+
const reasoningParam = this.buildReasoningParams(reasoning);
|
|
871
|
+
if (reasoningParam && Object.keys(reasoningParam).length > 0) {
|
|
872
|
+
body.reasoning = reasoningParam;
|
|
873
|
+
}
|
|
874
|
+
return body;
|
|
875
|
+
}
|
|
876
|
+
/**
|
|
877
|
+
* 构建 OpenRouter 格式的 reasoning 参数
|
|
878
|
+
*
|
|
879
|
+
* OpenRouter reasoning 参数格式:
|
|
880
|
+
* {
|
|
881
|
+
* effort: 'low' | 'medium' | 'high',
|
|
882
|
+
* max_tokens: number,
|
|
883
|
+
* exclude: boolean
|
|
884
|
+
* }
|
|
885
|
+
*/
|
|
886
|
+
buildReasoningParams(config) {
|
|
887
|
+
return RequestBuilder.buildOpenRouterReasoning(config) ?? {};
|
|
888
|
+
}
|
|
889
|
+
/**
|
|
890
|
+
* 获取 API 端点 URL
|
|
891
|
+
* OpenRouter 使用标准的 /chat/completions 端点
|
|
892
|
+
*/
|
|
893
|
+
getEndpointUrl(baseUrl) {
|
|
894
|
+
return `${baseUrl}/chat/completions`;
|
|
895
|
+
}
|
|
896
|
+
};
|
|
897
|
+
|
|
898
|
+
// src/adapters/gemini-adapter.ts
|
|
899
|
+
init_request_builder();
|
|
900
|
+
var DEFAULT_BASE_URL2 = "https://generativelanguage.googleapis.com/v1beta/openai";
|
|
901
|
+
var GeminiAdapter = class extends BaseAdapter {
|
|
902
|
+
name = "gemini";
|
|
903
|
+
defaultBaseUrl = DEFAULT_BASE_URL2;
|
|
904
|
+
/**
|
|
905
|
+
* 构建聊天请求体
|
|
906
|
+
* Gemini 使用 OpenAI 兼容格式,reasoning_effort 直接放在请求体中
|
|
907
|
+
*/
|
|
908
|
+
buildChatRequest(options, stream = false) {
|
|
909
|
+
const {
|
|
910
|
+
model,
|
|
911
|
+
messages,
|
|
912
|
+
temperature = 0.7,
|
|
913
|
+
maxTokens,
|
|
914
|
+
reasoning
|
|
915
|
+
} = options;
|
|
916
|
+
const body = {
|
|
917
|
+
model,
|
|
918
|
+
messages,
|
|
919
|
+
temperature,
|
|
920
|
+
stream
|
|
921
|
+
};
|
|
922
|
+
if (maxTokens !== void 0) {
|
|
923
|
+
body.max_tokens = maxTokens;
|
|
924
|
+
}
|
|
925
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
926
|
+
Object.assign(body, reasoningParams);
|
|
927
|
+
return body;
|
|
928
|
+
}
|
|
929
|
+
/**
|
|
930
|
+
* 构建 Gemini 格式的 reasoning 参数
|
|
931
|
+
*
|
|
932
|
+
* Gemini 2.5+ 模型使用 reasoning_effort 参数:
|
|
933
|
+
* - 'low': 快速思考
|
|
934
|
+
* - 'medium': 平衡模式
|
|
935
|
+
* - 'high': 深度思考
|
|
936
|
+
*/
|
|
937
|
+
buildReasoningParams(config) {
|
|
938
|
+
return RequestBuilder.buildGeminiReasoning(config);
|
|
939
|
+
}
|
|
940
|
+
/**
|
|
941
|
+
* 从 delta 中提取 StreamChunk
|
|
942
|
+
* Gemini 可能使用 reasoning_content 或 thoughts 字段
|
|
943
|
+
*/
|
|
944
|
+
extractStreamChunk(delta) {
|
|
945
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
946
|
+
const reasoningContent = delta.reasoning_content ?? delta.thoughts;
|
|
947
|
+
if (reasoningContent) {
|
|
948
|
+
return {
|
|
949
|
+
type: "reasoning",
|
|
950
|
+
text: StreamProcessor2.extractTextContent(reasoningContent)
|
|
951
|
+
};
|
|
952
|
+
}
|
|
953
|
+
if (delta.content) {
|
|
954
|
+
return {
|
|
955
|
+
type: "content",
|
|
956
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
957
|
+
};
|
|
958
|
+
}
|
|
959
|
+
return null;
|
|
960
|
+
}
|
|
961
|
+
/**
|
|
962
|
+
* 获取 API 端点 URL
|
|
963
|
+
*/
|
|
964
|
+
getEndpointUrl(baseUrl) {
|
|
965
|
+
return `${baseUrl}/chat/completions`;
|
|
966
|
+
}
|
|
967
|
+
};
|
|
968
|
+
|
|
969
|
+
// src/adapters/groq-adapter.ts
|
|
970
|
+
init_request_builder();
|
|
971
|
+
var DEFAULT_BASE_URL3 = "https://api.groq.com/openai/v1";
|
|
972
|
+
var GroqAdapter = class extends BaseAdapter {
|
|
973
|
+
name = "groq";
|
|
974
|
+
defaultBaseUrl = DEFAULT_BASE_URL3;
|
|
975
|
+
/**
|
|
976
|
+
* 构建聊天请求体
|
|
977
|
+
* Groq 使用 OpenAI 兼容格式,但有一些特殊参数
|
|
978
|
+
*/
|
|
979
|
+
buildChatRequest(options, stream = false) {
|
|
980
|
+
const { model, messages, temperature = 1, maxTokens, reasoning } = options;
|
|
981
|
+
const body = {
|
|
982
|
+
model,
|
|
983
|
+
messages,
|
|
984
|
+
temperature,
|
|
985
|
+
stream,
|
|
986
|
+
top_p: 1
|
|
987
|
+
};
|
|
988
|
+
if (maxTokens !== void 0) {
|
|
989
|
+
body.max_completion_tokens = maxTokens;
|
|
990
|
+
}
|
|
991
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
992
|
+
Object.assign(body, reasoningParams);
|
|
993
|
+
return body;
|
|
994
|
+
}
|
|
995
|
+
/**
|
|
996
|
+
* 构建 Groq 格式的 reasoning 参数
|
|
997
|
+
*
|
|
998
|
+
* Groq 使用 reasoning_format 参数:
|
|
999
|
+
* - 'raw': 原始格式
|
|
1000
|
+
* - 'parsed': 解析格式(推荐)
|
|
1001
|
+
*
|
|
1002
|
+
* 注意:不能同时使用 include_reasoning 和 reasoning_format
|
|
1003
|
+
*/
|
|
1004
|
+
buildReasoningParams(config) {
|
|
1005
|
+
return RequestBuilder.buildGroqReasoning(config);
|
|
1006
|
+
}
|
|
1007
|
+
/**
|
|
1008
|
+
* 从 delta 中提取 StreamChunk
|
|
1009
|
+
* Groq 使用 reasoning_content 或 reasoning 字段
|
|
1010
|
+
*/
|
|
1011
|
+
extractStreamChunk(delta) {
|
|
1012
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1013
|
+
const reasoningContent = delta.reasoning_content ?? delta.reasoning;
|
|
1014
|
+
if (reasoningContent) {
|
|
1015
|
+
return {
|
|
1016
|
+
type: "reasoning",
|
|
1017
|
+
text: StreamProcessor2.extractTextContent(reasoningContent)
|
|
1018
|
+
};
|
|
1019
|
+
}
|
|
1020
|
+
if (delta.content) {
|
|
1021
|
+
return {
|
|
1022
|
+
type: "content",
|
|
1023
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1024
|
+
};
|
|
1025
|
+
}
|
|
1026
|
+
return null;
|
|
1027
|
+
}
|
|
1028
|
+
/**
|
|
1029
|
+
* 获取 API 端点 URL
|
|
1030
|
+
*/
|
|
1031
|
+
getEndpointUrl(baseUrl) {
|
|
1032
|
+
return `${baseUrl}/chat/completions`;
|
|
1033
|
+
}
|
|
1034
|
+
};
|
|
1035
|
+
|
|
1036
|
+
// src/adapters/huggingface-adapter.ts
|
|
1037
|
+
var DEFAULT_BASE_URL4 = "https://router.huggingface.co/v1";
|
|
1038
|
+
var HuggingFaceAdapter = class extends BaseAdapter {
|
|
1039
|
+
name = "huggingface";
|
|
1040
|
+
defaultBaseUrl = DEFAULT_BASE_URL4;
|
|
1041
|
+
/**
|
|
1042
|
+
* 构建聊天请求体
|
|
1043
|
+
* HuggingFace 使用标准 OpenAI 兼容格式
|
|
1044
|
+
*/
|
|
1045
|
+
buildChatRequest(options, stream = false) {
|
|
1046
|
+
const {
|
|
1047
|
+
model,
|
|
1048
|
+
messages,
|
|
1049
|
+
temperature = 0.7,
|
|
1050
|
+
maxTokens,
|
|
1051
|
+
reasoning
|
|
1052
|
+
} = options;
|
|
1053
|
+
const body = {
|
|
1054
|
+
model,
|
|
1055
|
+
messages,
|
|
1056
|
+
temperature,
|
|
1057
|
+
stream
|
|
1058
|
+
};
|
|
1059
|
+
if (maxTokens !== void 0) {
|
|
1060
|
+
body.max_tokens = maxTokens;
|
|
1061
|
+
}
|
|
1062
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1063
|
+
Object.assign(body, reasoningParams);
|
|
1064
|
+
return body;
|
|
1065
|
+
}
|
|
1066
|
+
/**
|
|
1067
|
+
* 构建 HuggingFace 格式的 reasoning 参数
|
|
1068
|
+
* HuggingFace 使用 reasoning_effort 参数(取决于具体模型是否支持)
|
|
1069
|
+
*/
|
|
1070
|
+
buildReasoningParams(config) {
|
|
1071
|
+
if (!config || !config.effort || config.effort === "off") {
|
|
1072
|
+
return {};
|
|
1073
|
+
}
|
|
1074
|
+
return {
|
|
1075
|
+
reasoning_effort: config.effort
|
|
1076
|
+
};
|
|
1077
|
+
}
|
|
1078
|
+
/**
|
|
1079
|
+
* 从 delta 中提取 StreamChunk
|
|
1080
|
+
*/
|
|
1081
|
+
extractStreamChunk(delta) {
|
|
1082
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1083
|
+
if (delta.reasoning_content) {
|
|
1084
|
+
return {
|
|
1085
|
+
type: "reasoning",
|
|
1086
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1087
|
+
};
|
|
1088
|
+
}
|
|
1089
|
+
if (delta.content) {
|
|
1090
|
+
return {
|
|
1091
|
+
type: "content",
|
|
1092
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1093
|
+
};
|
|
1094
|
+
}
|
|
1095
|
+
return null;
|
|
1096
|
+
}
|
|
1097
|
+
/**
|
|
1098
|
+
* 获取 API 端点 URL
|
|
1099
|
+
*/
|
|
1100
|
+
getEndpointUrl(baseUrl) {
|
|
1101
|
+
return `${baseUrl}/chat/completions`;
|
|
1102
|
+
}
|
|
1103
|
+
};
|
|
1104
|
+
|
|
1105
|
+
// src/adapters/modelscope-adapter.ts
|
|
1106
|
+
var DEFAULT_BASE_URL5 = "https://api-inference.modelscope.cn/v1";
|
|
1107
|
+
var ModelScopeAdapter = class extends BaseAdapter {
|
|
1108
|
+
name = "modelscope";
|
|
1109
|
+
defaultBaseUrl = DEFAULT_BASE_URL5;
|
|
1110
|
+
/**
|
|
1111
|
+
* 构建聊天请求体
|
|
1112
|
+
* ModelScope 使用 OpenAI 兼容格式
|
|
1113
|
+
*/
|
|
1114
|
+
buildChatRequest(options, stream = false) {
|
|
1115
|
+
const {
|
|
1116
|
+
model,
|
|
1117
|
+
messages,
|
|
1118
|
+
temperature = 0.7,
|
|
1119
|
+
maxTokens,
|
|
1120
|
+
reasoning
|
|
1121
|
+
} = options;
|
|
1122
|
+
const body = {
|
|
1123
|
+
model,
|
|
1124
|
+
messages,
|
|
1125
|
+
temperature,
|
|
1126
|
+
stream
|
|
1127
|
+
};
|
|
1128
|
+
if (maxTokens !== void 0) {
|
|
1129
|
+
body.max_tokens = maxTokens;
|
|
1130
|
+
}
|
|
1131
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1132
|
+
Object.assign(body, reasoningParams);
|
|
1133
|
+
return body;
|
|
1134
|
+
}
|
|
1135
|
+
/**
|
|
1136
|
+
* 构建 ModelScope 格式的 reasoning 参数
|
|
1137
|
+
* ModelScope 使用 enable_thinking 参数控制思考模式
|
|
1138
|
+
*/
|
|
1139
|
+
buildReasoningParams(config) {
|
|
1140
|
+
if (!config || !config.effort) {
|
|
1141
|
+
return {};
|
|
1142
|
+
}
|
|
1143
|
+
if (config.effort === "off") {
|
|
1144
|
+
return { enable_thinking: false };
|
|
1145
|
+
}
|
|
1146
|
+
return { enable_thinking: true };
|
|
1147
|
+
}
|
|
1148
|
+
/**
|
|
1149
|
+
* 从 delta 中提取 StreamChunk
|
|
1150
|
+
*/
|
|
1151
|
+
extractStreamChunk(delta) {
|
|
1152
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1153
|
+
if (delta.reasoning_content) {
|
|
1154
|
+
return {
|
|
1155
|
+
type: "reasoning",
|
|
1156
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1157
|
+
};
|
|
1158
|
+
}
|
|
1159
|
+
if (delta.content) {
|
|
1160
|
+
return {
|
|
1161
|
+
type: "content",
|
|
1162
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1163
|
+
};
|
|
1164
|
+
}
|
|
1165
|
+
return null;
|
|
1166
|
+
}
|
|
1167
|
+
/**
|
|
1168
|
+
* 获取 API 端点 URL
|
|
1169
|
+
*/
|
|
1170
|
+
getEndpointUrl(baseUrl) {
|
|
1171
|
+
return `${baseUrl}/chat/completions`;
|
|
1172
|
+
}
|
|
1173
|
+
};
|
|
1174
|
+
|
|
1175
|
+
// src/adapters/deepseek-adapter.ts
|
|
1176
|
+
init_request_builder();
|
|
1177
|
+
var DEFAULT_BASE_URL6 = "https://api.deepseek.com";
|
|
1178
|
+
var DeepSeekAdapter = class extends BaseAdapter {
|
|
1179
|
+
name = "deepseek";
|
|
1180
|
+
defaultBaseUrl = DEFAULT_BASE_URL6;
|
|
1181
|
+
/**
|
|
1182
|
+
* 构建聊天请求体
|
|
1183
|
+
* DeepSeek 使用 OpenAI 兼容格式
|
|
1184
|
+
*/
|
|
1185
|
+
buildChatRequest(options, stream = false) {
|
|
1186
|
+
const {
|
|
1187
|
+
model,
|
|
1188
|
+
messages,
|
|
1189
|
+
temperature = 0.7,
|
|
1190
|
+
maxTokens,
|
|
1191
|
+
reasoning
|
|
1192
|
+
} = options;
|
|
1193
|
+
const body = {
|
|
1194
|
+
model,
|
|
1195
|
+
messages,
|
|
1196
|
+
temperature,
|
|
1197
|
+
stream
|
|
1198
|
+
};
|
|
1199
|
+
if (maxTokens !== void 0) {
|
|
1200
|
+
body.max_tokens = maxTokens;
|
|
1201
|
+
}
|
|
1202
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1203
|
+
Object.assign(body, reasoningParams);
|
|
1204
|
+
return body;
|
|
1205
|
+
}
|
|
1206
|
+
/**
|
|
1207
|
+
* 构建 DeepSeek 格式的 reasoning 参数
|
|
1208
|
+
* DeepSeek 使用 thinking 参数启用思考模式
|
|
1209
|
+
*/
|
|
1210
|
+
buildReasoningParams(config) {
|
|
1211
|
+
return RequestBuilder.buildDeepSeekReasoning(config);
|
|
1212
|
+
}
|
|
1213
|
+
/**
|
|
1214
|
+
* 从 delta 中提取 StreamChunk
|
|
1215
|
+
* DeepSeek R1 使用 reasoning_content 返回思考过程
|
|
1216
|
+
*/
|
|
1217
|
+
extractStreamChunk(delta) {
|
|
1218
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1219
|
+
if (delta.reasoning_content) {
|
|
1220
|
+
return {
|
|
1221
|
+
type: "reasoning",
|
|
1222
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1223
|
+
};
|
|
1224
|
+
}
|
|
1225
|
+
if (delta.content) {
|
|
1226
|
+
return {
|
|
1227
|
+
type: "content",
|
|
1228
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1229
|
+
};
|
|
1230
|
+
}
|
|
1231
|
+
return null;
|
|
1232
|
+
}
|
|
1233
|
+
/**
|
|
1234
|
+
* 获取 API 端点 URL
|
|
1235
|
+
*/
|
|
1236
|
+
getEndpointUrl(baseUrl) {
|
|
1237
|
+
return `${baseUrl}/chat/completions`;
|
|
1238
|
+
}
|
|
1239
|
+
};
|
|
1240
|
+
|
|
1241
|
+
// src/adapters/poe-adapter.ts
|
|
1242
|
+
init_types();
|
|
1243
|
+
var DEFAULT_BASE_URL7 = "https://api.poe.com/v1";
|
|
1244
|
+
function extractThinkingFromContent(content) {
|
|
1245
|
+
const thinkMatch = content.match(/<think>([\s\S]*?)<\/think>/);
|
|
1246
|
+
if (thinkMatch) {
|
|
1247
|
+
const thinking = thinkMatch[1].trim();
|
|
1248
|
+
const cleanContent = content.replace(/<think>[\s\S]*?<\/think>/, "").trim();
|
|
1249
|
+
return { thinking, content: cleanContent };
|
|
1250
|
+
}
|
|
1251
|
+
const thinkingMatch = content.match(
|
|
1252
|
+
/^\*Thinking\.{0,3}\*\s*\n((?:>.*(?:\n|$))+)/
|
|
1253
|
+
);
|
|
1254
|
+
if (thinkingMatch) {
|
|
1255
|
+
const thinking = thinkingMatch[1].split("\n").map((line) => line.replace(/^>\s?/, "")).join("\n").trim();
|
|
1256
|
+
const cleanContent = content.replace(thinkingMatch[0], "").trim();
|
|
1257
|
+
return { thinking, content: cleanContent };
|
|
1258
|
+
}
|
|
1259
|
+
return { thinking: "", content };
|
|
1260
|
+
}
|
|
1261
|
+
var PoeAdapter = class extends BaseAdapter {
|
|
1262
|
+
name = "poe";
|
|
1263
|
+
defaultBaseUrl = DEFAULT_BASE_URL7;
|
|
1264
|
+
/**
|
|
1265
|
+
* 构建聊天请求体
|
|
1266
|
+
* Poe 使用 OpenAI 兼容格式,通过 extra_body 传递自定义参数
|
|
1267
|
+
*/
|
|
1268
|
+
buildChatRequest(options, stream = false) {
|
|
1269
|
+
const {
|
|
1270
|
+
model,
|
|
1271
|
+
messages,
|
|
1272
|
+
temperature = 0.7,
|
|
1273
|
+
maxTokens,
|
|
1274
|
+
reasoning
|
|
1275
|
+
} = options;
|
|
1276
|
+
const body = {
|
|
1277
|
+
model,
|
|
1278
|
+
messages,
|
|
1279
|
+
temperature,
|
|
1280
|
+
stream
|
|
1281
|
+
};
|
|
1282
|
+
if (maxTokens !== void 0) {
|
|
1283
|
+
body.max_tokens = maxTokens;
|
|
1284
|
+
}
|
|
1285
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1286
|
+
Object.assign(body, reasoningParams);
|
|
1287
|
+
return body;
|
|
1288
|
+
}
|
|
1289
|
+
/**
|
|
1290
|
+
* 构建 Poe 格式的 reasoning 参数
|
|
1291
|
+
* Poe 通过 extra_body 传递 reasoning_effort 和 thinking_budget
|
|
1292
|
+
*/
|
|
1293
|
+
buildReasoningParams(config) {
|
|
1294
|
+
if (!config || config.effort === "off") {
|
|
1295
|
+
return {};
|
|
1296
|
+
}
|
|
1297
|
+
const params = {};
|
|
1298
|
+
if (config.effort) {
|
|
1299
|
+
params.reasoning_effort = config.effort;
|
|
1300
|
+
}
|
|
1301
|
+
if (config.budgetTokens !== void 0) {
|
|
1302
|
+
params.thinking_budget = config.budgetTokens;
|
|
1303
|
+
} else if (config.effort && EFFORT_TOKEN_MAP[config.effort]) {
|
|
1304
|
+
params.thinking_budget = EFFORT_TOKEN_MAP[config.effort];
|
|
1305
|
+
}
|
|
1306
|
+
return params;
|
|
1307
|
+
}
|
|
1308
|
+
/**
|
|
1309
|
+
* 解析聊天响应
|
|
1310
|
+
* Poe 可能返回 reasoning_content,或者需要从 <think> 标签提取
|
|
1311
|
+
*/
|
|
1312
|
+
parseChatResponse(response, model) {
|
|
1313
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1314
|
+
const choices = response.choices;
|
|
1315
|
+
const choice = choices?.[0];
|
|
1316
|
+
if (!choice) {
|
|
1317
|
+
throw new Error("No response from model");
|
|
1318
|
+
}
|
|
1319
|
+
const msg = choice.message;
|
|
1320
|
+
let reasoningContent = msg?.reasoning_content ?? null;
|
|
1321
|
+
let contentText = StreamProcessor2.extractTextContent(msg?.content);
|
|
1322
|
+
if (!reasoningContent && contentText) {
|
|
1323
|
+
const extracted = extractThinkingFromContent(contentText);
|
|
1324
|
+
if (extracted.thinking) {
|
|
1325
|
+
reasoningContent = extracted.thinking;
|
|
1326
|
+
contentText = extracted.content;
|
|
1327
|
+
}
|
|
1328
|
+
}
|
|
1329
|
+
const usage = response.usage;
|
|
1330
|
+
return {
|
|
1331
|
+
content: contentText,
|
|
1332
|
+
reasoning: reasoningContent ? StreamProcessor2.extractTextContent(reasoningContent) : null,
|
|
1333
|
+
model: response.model ?? model,
|
|
1334
|
+
usage: {
|
|
1335
|
+
promptTokens: usage?.prompt_tokens ?? usage?.promptTokens ?? 0,
|
|
1336
|
+
completionTokens: usage?.completion_tokens ?? usage?.completionTokens ?? 0,
|
|
1337
|
+
totalTokens: usage?.total_tokens ?? usage?.totalTokens ?? 0
|
|
1338
|
+
},
|
|
1339
|
+
finishReason: choice.finish_reason ?? choice.finishReason ?? null
|
|
1340
|
+
};
|
|
1341
|
+
}
|
|
1342
|
+
/**
|
|
1343
|
+
* 从 delta 中提取 StreamChunk
|
|
1344
|
+
* Poe 的流式响应处理比较复杂,需要处理多种思考格式
|
|
1345
|
+
*/
|
|
1346
|
+
extractStreamChunk(delta) {
|
|
1347
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1348
|
+
if (delta.reasoning_content) {
|
|
1349
|
+
return {
|
|
1350
|
+
type: "reasoning",
|
|
1351
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1352
|
+
};
|
|
1353
|
+
}
|
|
1354
|
+
if (delta.content) {
|
|
1355
|
+
return {
|
|
1356
|
+
type: "content",
|
|
1357
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1358
|
+
};
|
|
1359
|
+
}
|
|
1360
|
+
return null;
|
|
1361
|
+
}
|
|
1362
|
+
/**
|
|
1363
|
+
* 获取 API 端点 URL
|
|
1364
|
+
*/
|
|
1365
|
+
getEndpointUrl(baseUrl) {
|
|
1366
|
+
return `${baseUrl}/chat/completions`;
|
|
1367
|
+
}
|
|
1368
|
+
};
|
|
1369
|
+
|
|
1370
|
+
// src/adapters/nova-adapter.ts
|
|
1371
|
+
init_request_builder();
|
|
1372
|
+
var DEFAULT_BASE_URL8 = "https://api.nova.amazon.com/v1";
|
|
1373
|
+
var NovaAdapter = class extends BaseAdapter {
|
|
1374
|
+
name = "nova";
|
|
1375
|
+
defaultBaseUrl = DEFAULT_BASE_URL8;
|
|
1376
|
+
/**
|
|
1377
|
+
* 构建聊天请求体
|
|
1378
|
+
* Nova 使用 OpenAI 兼容格式
|
|
1379
|
+
*/
|
|
1380
|
+
buildChatRequest(options, stream = false) {
|
|
1381
|
+
const {
|
|
1382
|
+
model,
|
|
1383
|
+
messages,
|
|
1384
|
+
temperature = 0.7,
|
|
1385
|
+
maxTokens,
|
|
1386
|
+
reasoning
|
|
1387
|
+
} = options;
|
|
1388
|
+
const body = {
|
|
1389
|
+
model,
|
|
1390
|
+
messages,
|
|
1391
|
+
temperature,
|
|
1392
|
+
stream
|
|
1393
|
+
};
|
|
1394
|
+
if (maxTokens !== void 0) {
|
|
1395
|
+
body.max_tokens = maxTokens;
|
|
1396
|
+
}
|
|
1397
|
+
const reasoningParams = this.buildReasoningParams(reasoning);
|
|
1398
|
+
Object.assign(body, reasoningParams);
|
|
1399
|
+
return body;
|
|
1400
|
+
}
|
|
1401
|
+
/**
|
|
1402
|
+
* 构建 Nova 格式的 reasoning 参数
|
|
1403
|
+
* Nova 使用 reasoningConfig 控制 extended thinking
|
|
1404
|
+
*/
|
|
1405
|
+
buildReasoningParams(config) {
|
|
1406
|
+
return RequestBuilder.buildNovaReasoning(config);
|
|
1407
|
+
}
|
|
1408
|
+
/**
|
|
1409
|
+
* 从 delta 中提取 StreamChunk
|
|
1410
|
+
* Nova 返回 reasoning_content 作为思考过程
|
|
1411
|
+
*/
|
|
1412
|
+
extractStreamChunk(delta) {
|
|
1413
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
1414
|
+
if (delta.reasoning_content) {
|
|
1415
|
+
return {
|
|
1416
|
+
type: "reasoning",
|
|
1417
|
+
text: StreamProcessor2.extractTextContent(delta.reasoning_content)
|
|
1418
|
+
};
|
|
1419
|
+
}
|
|
1420
|
+
if (delta.content) {
|
|
1421
|
+
return {
|
|
1422
|
+
type: "content",
|
|
1423
|
+
text: StreamProcessor2.extractTextContent(delta.content)
|
|
1424
|
+
};
|
|
1425
|
+
}
|
|
1426
|
+
return null;
|
|
1427
|
+
}
|
|
1428
|
+
/**
|
|
1429
|
+
* 获取 API 端点 URL
|
|
1430
|
+
*/
|
|
1431
|
+
getEndpointUrl(baseUrl) {
|
|
1432
|
+
return `${baseUrl}/chat/completions`;
|
|
1433
|
+
}
|
|
1434
|
+
};
|
|
1435
|
+
|
|
1436
|
+
// src/adapters/index.ts
|
|
1437
|
+
function createBuiltInAdapters() {
|
|
1438
|
+
const adapters = /* @__PURE__ */ new Map();
|
|
1439
|
+
adapters.set("openrouter", new OpenRouterAdapter());
|
|
1440
|
+
adapters.set("gemini", new GeminiAdapter());
|
|
1441
|
+
adapters.set("groq", new GroqAdapter());
|
|
1442
|
+
adapters.set("huggingface", new HuggingFaceAdapter());
|
|
1443
|
+
adapters.set("modelscope", new ModelScopeAdapter());
|
|
1444
|
+
adapters.set("deepseek", new DeepSeekAdapter());
|
|
1445
|
+
adapters.set("poe", new PoeAdapter());
|
|
1446
|
+
adapters.set("nova", new NovaAdapter());
|
|
1447
|
+
return adapters;
|
|
1448
|
+
}
|
|
1449
|
+
|
|
1450
|
+
// src/registry/provider-registry.ts
|
|
1451
|
+
var RegistryError = class extends Error {
|
|
1452
|
+
constructor(message, provider, code = "REGISTRY_ERROR") {
|
|
1453
|
+
super(message);
|
|
1454
|
+
this.provider = provider;
|
|
1455
|
+
this.code = code;
|
|
1456
|
+
this.name = "RegistryError";
|
|
1457
|
+
}
|
|
1458
|
+
};
|
|
1459
|
+
var ProviderRegistry = class {
|
|
1460
|
+
/** 适配器映射表 */
|
|
1461
|
+
static adapters = /* @__PURE__ */ new Map();
|
|
1462
|
+
/** 是否已初始化内置适配器 */
|
|
1463
|
+
static initialized = false;
|
|
1464
|
+
/**
|
|
1465
|
+
* 注册 Provider 适配器
|
|
1466
|
+
*
|
|
1467
|
+
* @param adapter - 要注册的适配器实例
|
|
1468
|
+
* @throws RegistryError 如果适配器无效
|
|
1469
|
+
*
|
|
1470
|
+
* @example
|
|
1471
|
+
* ```typescript
|
|
1472
|
+
* const myAdapter = new MyCustomAdapter();
|
|
1473
|
+
* ProviderRegistry.register(myAdapter);
|
|
1474
|
+
* ```
|
|
1475
|
+
*/
|
|
1476
|
+
static register(adapter) {
|
|
1477
|
+
if (!adapter) {
|
|
1478
|
+
throw new RegistryError("\u9002\u914D\u5668\u4E0D\u80FD\u4E3A\u7A7A", void 0, "INVALID_ADAPTER");
|
|
1479
|
+
}
|
|
1480
|
+
if (!adapter.name) {
|
|
1481
|
+
throw new RegistryError(
|
|
1482
|
+
"\u9002\u914D\u5668\u5FC5\u987B\u6709 name \u5C5E\u6027",
|
|
1483
|
+
void 0,
|
|
1484
|
+
"INVALID_ADAPTER"
|
|
1485
|
+
);
|
|
1486
|
+
}
|
|
1487
|
+
this.adapters.set(adapter.name, adapter);
|
|
1488
|
+
}
|
|
1489
|
+
/**
|
|
1490
|
+
* 获取 Provider 适配器
|
|
1491
|
+
*
|
|
1492
|
+
* @param type - Provider 类型
|
|
1493
|
+
* @returns 对应的适配器实例
|
|
1494
|
+
* @throws RegistryError 如果 Provider 未注册
|
|
1495
|
+
*
|
|
1496
|
+
* @example
|
|
1497
|
+
* ```typescript
|
|
1498
|
+
* const adapter = ProviderRegistry.getAdapter('openrouter');
|
|
1499
|
+
* const client = adapter.createClient(config);
|
|
1500
|
+
* ```
|
|
1501
|
+
*/
|
|
1502
|
+
static getAdapter(type) {
|
|
1503
|
+
this.initializeBuiltIn();
|
|
1504
|
+
const adapter = this.adapters.get(type);
|
|
1505
|
+
if (!adapter) {
|
|
1506
|
+
const supported = this.listSupported();
|
|
1507
|
+
throw new RegistryError(
|
|
1508
|
+
`Provider "${type}" \u672A\u6CE8\u518C\u3002\u53EF\u7528\u7684 Provider: ${supported.join(", ")}`,
|
|
1509
|
+
type,
|
|
1510
|
+
"PROVIDER_NOT_FOUND"
|
|
1511
|
+
);
|
|
1512
|
+
}
|
|
1513
|
+
return adapter;
|
|
1514
|
+
}
|
|
1515
|
+
/**
|
|
1516
|
+
* 检查 Provider 是否已注册
|
|
1517
|
+
*
|
|
1518
|
+
* @param type - Provider 类型
|
|
1519
|
+
* @returns 是否已注册
|
|
1520
|
+
*
|
|
1521
|
+
* @example
|
|
1522
|
+
* ```typescript
|
|
1523
|
+
* if (ProviderRegistry.hasAdapter('gemini')) {
|
|
1524
|
+
* console.log('Gemini 已注册');
|
|
1525
|
+
* }
|
|
1526
|
+
* ```
|
|
1527
|
+
*/
|
|
1528
|
+
static hasAdapter(type) {
|
|
1529
|
+
this.initializeBuiltIn();
|
|
1530
|
+
return this.adapters.has(type);
|
|
1531
|
+
}
|
|
1532
|
+
/**
|
|
1533
|
+
* 获取所有已注册的 Provider 类型
|
|
1534
|
+
*
|
|
1535
|
+
* @returns Provider 类型数组
|
|
1536
|
+
*
|
|
1537
|
+
* @example
|
|
1538
|
+
* ```typescript
|
|
1539
|
+
* const providers = ProviderRegistry.listSupported();
|
|
1540
|
+
* console.log('支持的 Provider:', providers);
|
|
1541
|
+
* ```
|
|
1542
|
+
*/
|
|
1543
|
+
static listSupported() {
|
|
1544
|
+
this.initializeBuiltIn();
|
|
1545
|
+
return Array.from(this.adapters.keys());
|
|
1546
|
+
}
|
|
1547
|
+
/**
|
|
1548
|
+
* 从配置文件加载并注册 Provider
|
|
1549
|
+
*
|
|
1550
|
+
* @param config - 注册表配置
|
|
1551
|
+
* @throws RegistryError 如果配置无效或加载失败
|
|
1552
|
+
*
|
|
1553
|
+
* @example
|
|
1554
|
+
* ```typescript
|
|
1555
|
+
* const config: RegistryConfig = {
|
|
1556
|
+
* providers: {
|
|
1557
|
+
* 'custom-provider': {
|
|
1558
|
+
* adapter: './my-adapter',
|
|
1559
|
+
* config: {
|
|
1560
|
+
* apiKey: 'xxx',
|
|
1561
|
+
* baseUrl: 'https://api.example.com'
|
|
1562
|
+
* }
|
|
1563
|
+
* }
|
|
1564
|
+
* }
|
|
1565
|
+
* };
|
|
1566
|
+
* ProviderRegistry.loadFromConfig(config);
|
|
1567
|
+
* ```
|
|
1568
|
+
*/
|
|
1569
|
+
static loadFromConfig(config) {
|
|
1570
|
+
if (!config || !config.providers) {
|
|
1571
|
+
throw new RegistryError(
|
|
1572
|
+
"\u914D\u7F6E\u65E0\u6548\uFF1A\u7F3A\u5C11 providers \u5B57\u6BB5",
|
|
1573
|
+
void 0,
|
|
1574
|
+
"INVALID_CONFIG"
|
|
1575
|
+
);
|
|
1576
|
+
}
|
|
1577
|
+
this.initializeBuiltIn();
|
|
1578
|
+
for (const [providerName, providerConfig] of Object.entries(
|
|
1579
|
+
config.providers
|
|
1580
|
+
)) {
|
|
1581
|
+
if (providerConfig.adapter) {
|
|
1582
|
+
try {
|
|
1583
|
+
const CustomAdapter = __require(providerConfig.adapter);
|
|
1584
|
+
const AdapterClass = CustomAdapter.default || CustomAdapter;
|
|
1585
|
+
const adapter = new AdapterClass();
|
|
1586
|
+
if (typeof adapter.name !== "string" || typeof adapter.createClient !== "function") {
|
|
1587
|
+
throw new RegistryError(
|
|
1588
|
+
`\u81EA\u5B9A\u4E49\u9002\u914D\u5668 "${providerConfig.adapter}" \u672A\u6B63\u786E\u5B9E\u73B0 ProviderAdapter \u63A5\u53E3`,
|
|
1589
|
+
providerName,
|
|
1590
|
+
"INVALID_ADAPTER"
|
|
1591
|
+
);
|
|
1592
|
+
}
|
|
1593
|
+
this.adapters.set(providerName, adapter);
|
|
1594
|
+
} catch (error) {
|
|
1595
|
+
if (error instanceof RegistryError) {
|
|
1596
|
+
throw error;
|
|
1597
|
+
}
|
|
1598
|
+
throw new RegistryError(
|
|
1599
|
+
`\u52A0\u8F7D\u81EA\u5B9A\u4E49\u9002\u914D\u5668\u5931\u8D25: ${providerConfig.adapter}`,
|
|
1600
|
+
providerName,
|
|
1601
|
+
"ADAPTER_LOAD_ERROR"
|
|
1602
|
+
);
|
|
1603
|
+
}
|
|
1604
|
+
} else if (!this.adapters.has(providerName)) {
|
|
1605
|
+
throw new RegistryError(
|
|
1606
|
+
`Provider "${providerName}" \u672A\u6CE8\u518C\u4E14\u672A\u6307\u5B9A\u81EA\u5B9A\u4E49\u9002\u914D\u5668`,
|
|
1607
|
+
providerName,
|
|
1608
|
+
"PROVIDER_NOT_FOUND"
|
|
1609
|
+
);
|
|
1610
|
+
}
|
|
1611
|
+
}
|
|
1612
|
+
}
|
|
1613
|
+
/**
|
|
1614
|
+
* 初始化内置 Provider
|
|
1615
|
+
* 在首次使用时自动调用
|
|
1616
|
+
*
|
|
1617
|
+
* @example
|
|
1618
|
+
* ```typescript
|
|
1619
|
+
* // 通常不需要手动调用,会在首次使用时自动初始化
|
|
1620
|
+
* ProviderRegistry.initializeBuiltIn();
|
|
1621
|
+
* ```
|
|
1622
|
+
*/
|
|
1623
|
+
static initializeBuiltIn() {
|
|
1624
|
+
if (this.initialized) {
|
|
1625
|
+
return;
|
|
1626
|
+
}
|
|
1627
|
+
const builtInAdapters = createBuiltInAdapters();
|
|
1628
|
+
for (const [type, adapter] of builtInAdapters) {
|
|
1629
|
+
if (!this.adapters.has(type)) {
|
|
1630
|
+
this.adapters.set(type, adapter);
|
|
1631
|
+
}
|
|
1632
|
+
}
|
|
1633
|
+
this.initialized = true;
|
|
1634
|
+
}
|
|
1635
|
+
/**
|
|
1636
|
+
* 重置注册表(主要用于测试)
|
|
1637
|
+
* 清除所有已注册的适配器并重置初始化状态
|
|
1638
|
+
*/
|
|
1639
|
+
static reset() {
|
|
1640
|
+
this.adapters.clear();
|
|
1641
|
+
this.initialized = false;
|
|
1642
|
+
}
|
|
1643
|
+
/**
|
|
1644
|
+
* 获取适配器数量(主要用于测试)
|
|
1645
|
+
*
|
|
1646
|
+
* @returns 已注册的适配器数量
|
|
1647
|
+
*/
|
|
1648
|
+
static get size() {
|
|
1649
|
+
this.initializeBuiltIn();
|
|
1650
|
+
return this.adapters.size;
|
|
1651
|
+
}
|
|
1652
|
+
};
|
|
1653
|
+
|
|
1654
|
+
// src/config/types.ts
|
|
1655
|
+
var CONFIG_DEFAULTS = {
|
|
1656
|
+
/** 默认超时时间(毫秒) */
|
|
1657
|
+
timeout: 3e4,
|
|
1658
|
+
/** 默认重试次数 */
|
|
1659
|
+
retries: 3,
|
|
1660
|
+
/** 默认功能开关 */
|
|
1661
|
+
features: {
|
|
1662
|
+
streaming: true,
|
|
1663
|
+
reasoning: false
|
|
1664
|
+
}
|
|
1665
|
+
};
|
|
1666
|
+
var VALID_PROVIDERS = [
|
|
1667
|
+
"openrouter",
|
|
1668
|
+
"gemini",
|
|
1669
|
+
"groq",
|
|
1670
|
+
"huggingface",
|
|
1671
|
+
"modelscope",
|
|
1672
|
+
"deepseek",
|
|
1673
|
+
"poe",
|
|
1674
|
+
"nova"
|
|
1675
|
+
];
|
|
1676
|
+
|
|
1677
|
+
// src/utils/config-validator.ts
|
|
1678
|
+
var VALID_PROVIDERS2 = [
|
|
1679
|
+
"openrouter",
|
|
1680
|
+
"gemini",
|
|
1681
|
+
"groq",
|
|
1682
|
+
"huggingface",
|
|
1683
|
+
"modelscope",
|
|
1684
|
+
"deepseek",
|
|
1685
|
+
"poe",
|
|
1686
|
+
"nova"
|
|
1687
|
+
];
|
|
1688
|
+
var ConfigValidator = class _ConfigValidator {
|
|
1689
|
+
/**
|
|
1690
|
+
* 验证 Provider 配置
|
|
1691
|
+
*
|
|
1692
|
+
* @param config - 要验证的配置对象
|
|
1693
|
+
* @returns 验证结果
|
|
1694
|
+
*
|
|
1695
|
+
* @example
|
|
1696
|
+
* ```ts
|
|
1697
|
+
* const result = ConfigValidator.validate({
|
|
1698
|
+
* provider: 'openrouter',
|
|
1699
|
+
* credentials: { apiKey: 'sk-xxx' }
|
|
1700
|
+
* });
|
|
1701
|
+
*
|
|
1702
|
+
* if (!result.valid) {
|
|
1703
|
+
* console.error(result.errors);
|
|
1704
|
+
* }
|
|
1705
|
+
* ```
|
|
1706
|
+
*/
|
|
1707
|
+
static validate(config) {
|
|
1708
|
+
const errors = [];
|
|
1709
|
+
if (!config || typeof config !== "object") {
|
|
1710
|
+
return {
|
|
1711
|
+
valid: false,
|
|
1712
|
+
errors: [
|
|
1713
|
+
{
|
|
1714
|
+
field: "",
|
|
1715
|
+
message: "\u914D\u7F6E\u5FC5\u987B\u662F\u4E00\u4E2A\u5BF9\u8C61",
|
|
1716
|
+
code: "INVALID_CONFIG_TYPE"
|
|
1717
|
+
}
|
|
1718
|
+
]
|
|
1719
|
+
};
|
|
1720
|
+
}
|
|
1721
|
+
const cfg = config;
|
|
1722
|
+
if (!cfg.provider) {
|
|
1723
|
+
errors.push({
|
|
1724
|
+
field: "provider",
|
|
1725
|
+
message: "provider \u5B57\u6BB5\u662F\u5FC5\u586B\u7684",
|
|
1726
|
+
code: "MISSING_PROVIDER"
|
|
1727
|
+
});
|
|
1728
|
+
} else if (typeof cfg.provider !== "string") {
|
|
1729
|
+
errors.push({
|
|
1730
|
+
field: "provider",
|
|
1731
|
+
message: "provider \u5FC5\u987B\u662F\u5B57\u7B26\u4E32",
|
|
1732
|
+
code: "INVALID_PROVIDER_TYPE"
|
|
1733
|
+
});
|
|
1734
|
+
} else if (!VALID_PROVIDERS2.includes(cfg.provider)) {
|
|
1735
|
+
errors.push({
|
|
1736
|
+
field: "provider",
|
|
1737
|
+
message: `\u65E0\u6548\u7684 provider: ${cfg.provider}\uFF0C\u6709\u6548\u503C\u4E3A: ${VALID_PROVIDERS2.join(", ")}`,
|
|
1738
|
+
code: "INVALID_PROVIDER"
|
|
1739
|
+
});
|
|
1740
|
+
}
|
|
1741
|
+
if (!cfg.credentials) {
|
|
1742
|
+
errors.push({
|
|
1743
|
+
field: "credentials",
|
|
1744
|
+
message: "credentials \u5B57\u6BB5\u662F\u5FC5\u586B\u7684",
|
|
1745
|
+
code: "MISSING_CREDENTIALS"
|
|
1746
|
+
});
|
|
1747
|
+
} else if (typeof cfg.credentials !== "object") {
|
|
1748
|
+
errors.push({
|
|
1749
|
+
field: "credentials",
|
|
1750
|
+
message: "credentials \u5FC5\u987B\u662F\u4E00\u4E2A\u5BF9\u8C61",
|
|
1751
|
+
code: "INVALID_CREDENTIALS_TYPE"
|
|
1752
|
+
});
|
|
1753
|
+
} else {
|
|
1754
|
+
const creds = cfg.credentials;
|
|
1755
|
+
if (!creds.apiKey) {
|
|
1756
|
+
errors.push({
|
|
1757
|
+
field: "credentials.apiKey",
|
|
1758
|
+
message: "apiKey \u5B57\u6BB5\u662F\u5FC5\u586B\u7684",
|
|
1759
|
+
code: "MISSING_API_KEY"
|
|
1760
|
+
});
|
|
1761
|
+
} else if (typeof creds.apiKey !== "string") {
|
|
1762
|
+
errors.push({
|
|
1763
|
+
field: "credentials.apiKey",
|
|
1764
|
+
message: "apiKey \u5FC5\u987B\u662F\u5B57\u7B26\u4E32",
|
|
1765
|
+
code: "INVALID_API_KEY_TYPE"
|
|
1766
|
+
});
|
|
1767
|
+
} else if (creds.apiKey.trim() === "") {
|
|
1768
|
+
errors.push({
|
|
1769
|
+
field: "credentials.apiKey",
|
|
1770
|
+
message: "apiKey \u4E0D\u80FD\u4E3A\u7A7A",
|
|
1771
|
+
code: "EMPTY_API_KEY"
|
|
1772
|
+
});
|
|
1773
|
+
}
|
|
1774
|
+
if (creds.baseUrl !== void 0) {
|
|
1775
|
+
const urlResult = _ConfigValidator.validateUrl(creds.baseUrl);
|
|
1776
|
+
if (!urlResult.valid) {
|
|
1777
|
+
errors.push(
|
|
1778
|
+
...urlResult.errors.map((e) => ({
|
|
1779
|
+
...e,
|
|
1780
|
+
field: "credentials.baseUrl"
|
|
1781
|
+
}))
|
|
1782
|
+
);
|
|
1783
|
+
}
|
|
1784
|
+
}
|
|
1785
|
+
}
|
|
1786
|
+
if (cfg.options !== void 0) {
|
|
1787
|
+
if (typeof cfg.options !== "object") {
|
|
1788
|
+
errors.push({
|
|
1789
|
+
field: "options",
|
|
1790
|
+
message: "options \u5FC5\u987B\u662F\u4E00\u4E2A\u5BF9\u8C61",
|
|
1791
|
+
code: "INVALID_OPTIONS_TYPE"
|
|
1792
|
+
});
|
|
1793
|
+
} else {
|
|
1794
|
+
const opts = cfg.options;
|
|
1795
|
+
if (opts.timeout !== void 0) {
|
|
1796
|
+
if (typeof opts.timeout !== "number" || opts.timeout <= 0) {
|
|
1797
|
+
errors.push({
|
|
1798
|
+
field: "options.timeout",
|
|
1799
|
+
message: "timeout \u5FC5\u987B\u662F\u6B63\u6570",
|
|
1800
|
+
code: "INVALID_TIMEOUT"
|
|
1801
|
+
});
|
|
1802
|
+
}
|
|
1803
|
+
}
|
|
1804
|
+
if (opts.retries !== void 0) {
|
|
1805
|
+
if (typeof opts.retries !== "number" || opts.retries < 0 || !Number.isInteger(opts.retries)) {
|
|
1806
|
+
errors.push({
|
|
1807
|
+
field: "options.retries",
|
|
1808
|
+
message: "retries \u5FC5\u987B\u662F\u975E\u8D1F\u6574\u6570",
|
|
1809
|
+
code: "INVALID_RETRIES"
|
|
1810
|
+
});
|
|
1811
|
+
}
|
|
1812
|
+
}
|
|
1813
|
+
}
|
|
1814
|
+
}
|
|
1815
|
+
return {
|
|
1816
|
+
valid: errors.length === 0,
|
|
1817
|
+
errors
|
|
1818
|
+
};
|
|
1819
|
+
}
|
|
1820
|
+
/**
|
|
1821
|
+
* 验证 API Key 格式
|
|
1822
|
+
* 不同 Provider 可能有不同的 API Key 格式要求
|
|
1823
|
+
*
|
|
1824
|
+
* @param apiKey - API 密钥
|
|
1825
|
+
* @param provider - Provider 类型
|
|
1826
|
+
* @returns 验证结果
|
|
1827
|
+
*/
|
|
1828
|
+
static validateApiKey(apiKey, provider) {
|
|
1829
|
+
const errors = [];
|
|
1830
|
+
if (!apiKey || typeof apiKey !== "string") {
|
|
1831
|
+
errors.push({
|
|
1832
|
+
field: "apiKey",
|
|
1833
|
+
message: "apiKey \u5FC5\u987B\u662F\u975E\u7A7A\u5B57\u7B26\u4E32",
|
|
1834
|
+
code: "INVALID_API_KEY"
|
|
1835
|
+
});
|
|
1836
|
+
return { valid: false, errors };
|
|
1837
|
+
}
|
|
1838
|
+
const trimmed = apiKey.trim();
|
|
1839
|
+
if (trimmed === "") {
|
|
1840
|
+
errors.push({
|
|
1841
|
+
field: "apiKey",
|
|
1842
|
+
message: "apiKey \u4E0D\u80FD\u4E3A\u7A7A",
|
|
1843
|
+
code: "EMPTY_API_KEY"
|
|
1844
|
+
});
|
|
1845
|
+
return { valid: false, errors };
|
|
1846
|
+
}
|
|
1847
|
+
switch (provider) {
|
|
1848
|
+
case "openrouter":
|
|
1849
|
+
if (!trimmed.startsWith("sk-")) {
|
|
1850
|
+
errors.push({
|
|
1851
|
+
field: "apiKey",
|
|
1852
|
+
message: "OpenRouter API Key \u5E94\u4EE5 sk- \u5F00\u5934",
|
|
1853
|
+
code: "INVALID_API_KEY_FORMAT"
|
|
1854
|
+
});
|
|
1855
|
+
}
|
|
1856
|
+
break;
|
|
1857
|
+
case "gemini":
|
|
1858
|
+
if (!trimmed.startsWith("AI")) {
|
|
1859
|
+
errors.push({
|
|
1860
|
+
field: "apiKey",
|
|
1861
|
+
message: "Gemini API Key \u683C\u5F0F\u53EF\u80FD\u4E0D\u6B63\u786E",
|
|
1862
|
+
code: "INVALID_API_KEY_FORMAT"
|
|
1863
|
+
});
|
|
1864
|
+
}
|
|
1865
|
+
break;
|
|
1866
|
+
// 其他 Provider 暂不做特定格式验证
|
|
1867
|
+
default:
|
|
1868
|
+
break;
|
|
1869
|
+
}
|
|
1870
|
+
return {
|
|
1871
|
+
valid: errors.length === 0,
|
|
1872
|
+
errors
|
|
1873
|
+
};
|
|
1874
|
+
}
|
|
1875
|
+
/**
|
|
1876
|
+
* 验证 URL 格式
|
|
1877
|
+
*
|
|
1878
|
+
* @param url - 要验证的 URL
|
|
1879
|
+
* @returns 验证结果
|
|
1880
|
+
*/
|
|
1881
|
+
static validateUrl(url) {
|
|
1882
|
+
const errors = [];
|
|
1883
|
+
if (typeof url !== "string") {
|
|
1884
|
+
errors.push({
|
|
1885
|
+
field: "url",
|
|
1886
|
+
message: "URL \u5FC5\u987B\u662F\u5B57\u7B26\u4E32",
|
|
1887
|
+
code: "INVALID_URL_TYPE"
|
|
1888
|
+
});
|
|
1889
|
+
return { valid: false, errors };
|
|
1890
|
+
}
|
|
1891
|
+
const trimmed = url.trim();
|
|
1892
|
+
if (trimmed === "") {
|
|
1893
|
+
errors.push({
|
|
1894
|
+
field: "url",
|
|
1895
|
+
message: "URL \u4E0D\u80FD\u4E3A\u7A7A",
|
|
1896
|
+
code: "EMPTY_URL"
|
|
1897
|
+
});
|
|
1898
|
+
return { valid: false, errors };
|
|
1899
|
+
}
|
|
1900
|
+
try {
|
|
1901
|
+
const parsed = new URL(trimmed);
|
|
1902
|
+
if (!["http:", "https:"].includes(parsed.protocol)) {
|
|
1903
|
+
errors.push({
|
|
1904
|
+
field: "url",
|
|
1905
|
+
message: "URL \u5FC5\u987B\u4F7F\u7528 http \u6216 https \u534F\u8BAE",
|
|
1906
|
+
code: "INVALID_URL_PROTOCOL"
|
|
1907
|
+
});
|
|
1908
|
+
}
|
|
1909
|
+
} catch {
|
|
1910
|
+
errors.push({
|
|
1911
|
+
field: "url",
|
|
1912
|
+
message: "URL \u683C\u5F0F\u65E0\u6548",
|
|
1913
|
+
code: "INVALID_URL_FORMAT"
|
|
1914
|
+
});
|
|
1915
|
+
}
|
|
1916
|
+
return {
|
|
1917
|
+
valid: errors.length === 0,
|
|
1918
|
+
errors
|
|
1919
|
+
};
|
|
1920
|
+
}
|
|
1921
|
+
};
|
|
1922
|
+
|
|
1923
|
+
// src/config/config-manager.ts
|
|
1924
|
+
var ConfigManager = class _ConfigManager {
|
|
1925
|
+
/**
|
|
1926
|
+
* 验证配置
|
|
1927
|
+
* 检查配置是否符合 UnifiedProviderConfig 格式要求
|
|
1928
|
+
*
|
|
1929
|
+
* @param config - 要验证的配置对象
|
|
1930
|
+
* @returns 验证结果
|
|
1931
|
+
*
|
|
1932
|
+
* @example
|
|
1933
|
+
* ```ts
|
|
1934
|
+
* const result = ConfigManager.validate({
|
|
1935
|
+
* provider: 'openrouter',
|
|
1936
|
+
* credentials: { apiKey: 'sk-xxx' }
|
|
1937
|
+
* });
|
|
1938
|
+
*
|
|
1939
|
+
* if (!result.valid) {
|
|
1940
|
+
* console.error(result.errors);
|
|
1941
|
+
* }
|
|
1942
|
+
* ```
|
|
1943
|
+
*/
|
|
1944
|
+
static validate(config) {
|
|
1945
|
+
return ConfigValidator.validate(config);
|
|
1946
|
+
}
|
|
1947
|
+
/**
|
|
1948
|
+
* 应用默认值
|
|
1949
|
+
* 为缺失的可选字段填充默认值
|
|
1950
|
+
*
|
|
1951
|
+
* @param config - 部分配置对象
|
|
1952
|
+
* @returns 填充默认值后的完整配置
|
|
1953
|
+
*
|
|
1954
|
+
* @example
|
|
1955
|
+
* ```ts
|
|
1956
|
+
* const fullConfig = ConfigManager.applyDefaults({
|
|
1957
|
+
* provider: 'openrouter',
|
|
1958
|
+
* credentials: { apiKey: 'sk-xxx' }
|
|
1959
|
+
* });
|
|
1960
|
+
* // fullConfig.options.timeout === 30000
|
|
1961
|
+
* // fullConfig.options.retries === 3
|
|
1962
|
+
* ```
|
|
1963
|
+
*/
|
|
1964
|
+
static applyDefaults(config) {
|
|
1965
|
+
if (!config.provider || !config.credentials?.apiKey) {
|
|
1966
|
+
throw new Error("\u914D\u7F6E\u7F3A\u5C11\u5FC5\u586B\u5B57\u6BB5: provider \u548C credentials.apiKey");
|
|
1967
|
+
}
|
|
1968
|
+
return {
|
|
1969
|
+
provider: config.provider,
|
|
1970
|
+
adapter: config.adapter,
|
|
1971
|
+
credentials: {
|
|
1972
|
+
apiKey: config.credentials.apiKey,
|
|
1973
|
+
baseUrl: config.credentials.baseUrl
|
|
1974
|
+
},
|
|
1975
|
+
options: {
|
|
1976
|
+
timeout: config.options?.timeout ?? CONFIG_DEFAULTS.timeout,
|
|
1977
|
+
retries: config.options?.retries ?? CONFIG_DEFAULTS.retries,
|
|
1978
|
+
headers: config.options?.headers ?? {}
|
|
1979
|
+
},
|
|
1980
|
+
features: {
|
|
1981
|
+
streaming: config.features?.streaming ?? CONFIG_DEFAULTS.features.streaming,
|
|
1982
|
+
reasoning: config.features?.reasoning ?? CONFIG_DEFAULTS.features.reasoning
|
|
1983
|
+
}
|
|
1984
|
+
};
|
|
1985
|
+
}
|
|
1986
|
+
/**
|
|
1987
|
+
* 合并环境变量
|
|
1988
|
+
* 将 ${ENV_VAR} 格式的占位符替换为实际环境变量值
|
|
1989
|
+
*
|
|
1990
|
+
* @param config - 包含环境变量占位符的配置
|
|
1991
|
+
* @returns 替换后的配置
|
|
1992
|
+
*
|
|
1993
|
+
* @example
|
|
1994
|
+
* ```ts
|
|
1995
|
+
* // 假设 process.env.OPENROUTER_API_KEY = 'sk-xxx'
|
|
1996
|
+
* const config = ConfigManager.mergeWithEnv({
|
|
1997
|
+
* provider: 'openrouter',
|
|
1998
|
+
* credentials: { apiKey: '${OPENROUTER_API_KEY}' }
|
|
1999
|
+
* });
|
|
2000
|
+
* // config.credentials.apiKey === 'sk-xxx'
|
|
2001
|
+
* ```
|
|
2002
|
+
*/
|
|
2003
|
+
static mergeWithEnv(config) {
|
|
2004
|
+
const result = JSON.parse(JSON.stringify(config));
|
|
2005
|
+
const replaceEnvVars = (obj) => {
|
|
2006
|
+
for (const key of Object.keys(obj)) {
|
|
2007
|
+
const value = obj[key];
|
|
2008
|
+
if (typeof value === "string") {
|
|
2009
|
+
obj[key] = _ConfigManager.replaceEnvPlaceholders(value);
|
|
2010
|
+
} else if (value && typeof value === "object" && !Array.isArray(value)) {
|
|
2011
|
+
replaceEnvVars(value);
|
|
2012
|
+
}
|
|
2013
|
+
}
|
|
2014
|
+
};
|
|
2015
|
+
replaceEnvVars(result);
|
|
2016
|
+
return result;
|
|
2017
|
+
}
|
|
2018
|
+
/**
|
|
2019
|
+
* 替换字符串中的环境变量占位符
|
|
2020
|
+
* 支持 ${ENV_VAR} 格式
|
|
2021
|
+
*
|
|
2022
|
+
* @param str - 包含占位符的字符串
|
|
2023
|
+
* @returns 替换后的字符串
|
|
2024
|
+
*/
|
|
2025
|
+
static replaceEnvPlaceholders(str) {
|
|
2026
|
+
const envVarPattern = /\$\{([^}]+)\}/g;
|
|
2027
|
+
return str.replace(envVarPattern, (match, envVarName) => {
|
|
2028
|
+
const envValue = process.env[envVarName];
|
|
2029
|
+
return envValue !== void 0 ? envValue : match;
|
|
2030
|
+
});
|
|
2031
|
+
}
|
|
2032
|
+
/**
|
|
2033
|
+
* 从旧格式配置转换为新格式
|
|
2034
|
+
* 保持向后兼容性
|
|
2035
|
+
*
|
|
2036
|
+
* @param config - 旧格式的 Provider 配置
|
|
2037
|
+
* @returns 新格式的统一配置
|
|
2038
|
+
*
|
|
2039
|
+
* @example
|
|
2040
|
+
* ```ts
|
|
2041
|
+
* const newConfig = ConfigManager.fromLegacyConfig({
|
|
2042
|
+
* provider: 'openrouter',
|
|
2043
|
+
* apiKey: 'sk-xxx',
|
|
2044
|
+
* baseUrl: 'https://api.example.com'
|
|
2045
|
+
* });
|
|
2046
|
+
* // newConfig.credentials.apiKey === 'sk-xxx'
|
|
2047
|
+
* // newConfig.credentials.baseUrl === 'https://api.example.com'
|
|
2048
|
+
* ```
|
|
2049
|
+
*/
|
|
2050
|
+
static fromLegacyConfig(config) {
|
|
2051
|
+
if (!config.provider) {
|
|
2052
|
+
throw new Error("\u65E7\u683C\u5F0F\u914D\u7F6E\u7F3A\u5C11 provider \u5B57\u6BB5");
|
|
2053
|
+
}
|
|
2054
|
+
if (!config.apiKey) {
|
|
2055
|
+
throw new Error("\u65E7\u683C\u5F0F\u914D\u7F6E\u7F3A\u5C11 apiKey \u5B57\u6BB5");
|
|
2056
|
+
}
|
|
2057
|
+
if (!VALID_PROVIDERS.includes(config.provider)) {
|
|
2058
|
+
throw new Error(
|
|
2059
|
+
`\u65E0\u6548\u7684 provider: ${config.provider}\uFF0C\u6709\u6548\u503C\u4E3A: ${VALID_PROVIDERS.join(", ")}`
|
|
2060
|
+
);
|
|
2061
|
+
}
|
|
2062
|
+
return _ConfigManager.applyDefaults({
|
|
2063
|
+
provider: config.provider,
|
|
2064
|
+
credentials: {
|
|
2065
|
+
apiKey: config.apiKey,
|
|
2066
|
+
baseUrl: config.baseUrl
|
|
2067
|
+
}
|
|
2068
|
+
});
|
|
2069
|
+
}
|
|
2070
|
+
/**
|
|
2071
|
+
* 检查配置是否为旧格式
|
|
2072
|
+
*
|
|
2073
|
+
* @param config - 要检查的配置对象
|
|
2074
|
+
* @returns 是否为旧格式
|
|
2075
|
+
*/
|
|
2076
|
+
static isLegacyConfig(config) {
|
|
2077
|
+
if (!config || typeof config !== "object") {
|
|
2078
|
+
return false;
|
|
2079
|
+
}
|
|
2080
|
+
const cfg = config;
|
|
2081
|
+
return typeof cfg.provider === "string" && typeof cfg.apiKey === "string" && cfg.credentials === void 0;
|
|
2082
|
+
}
|
|
2083
|
+
/**
|
|
2084
|
+
* 智能转换配置
|
|
2085
|
+
* 自动检测配置格式并转换为统一格式
|
|
2086
|
+
*
|
|
2087
|
+
* @param config - 任意格式的配置
|
|
2088
|
+
* @returns 统一格式的配置
|
|
2089
|
+
*/
|
|
2090
|
+
static normalize(config) {
|
|
2091
|
+
if (_ConfigManager.isLegacyConfig(config)) {
|
|
2092
|
+
return _ConfigManager.fromLegacyConfig(config);
|
|
2093
|
+
}
|
|
2094
|
+
return _ConfigManager.applyDefaults(
|
|
2095
|
+
config
|
|
2096
|
+
);
|
|
2097
|
+
}
|
|
2098
|
+
/**
|
|
2099
|
+
* 获取指定 Provider 的默认基础 URL
|
|
2100
|
+
*
|
|
2101
|
+
* @param provider - Provider 类型
|
|
2102
|
+
* @returns 默认基础 URL
|
|
2103
|
+
*/
|
|
2104
|
+
static getDefaultBaseUrl(provider) {
|
|
2105
|
+
const defaultUrls = {
|
|
2106
|
+
openrouter: "https://openrouter.ai/api/v1",
|
|
2107
|
+
gemini: "https://generativelanguage.googleapis.com/v1beta",
|
|
2108
|
+
groq: "https://api.groq.com/openai/v1",
|
|
2109
|
+
huggingface: "https://api-inference.huggingface.co",
|
|
2110
|
+
modelscope: "https://dashscope.aliyuncs.com/compatible-mode/v1",
|
|
2111
|
+
deepseek: "https://api.deepseek.com/v1",
|
|
2112
|
+
poe: "https://api.poe.com/bot",
|
|
2113
|
+
nova: "https://bedrock-runtime.us-east-1.amazonaws.com"
|
|
2114
|
+
};
|
|
2115
|
+
return defaultUrls[provider];
|
|
2116
|
+
}
|
|
2117
|
+
};
|
|
2118
|
+
|
|
2119
|
+
// src/providers/__factory__.ts
|
|
2120
|
+
var AdapterBasedProvider = class {
|
|
2121
|
+
constructor(adapter, apiKey, baseUrl) {
|
|
2122
|
+
this.adapter = adapter;
|
|
2123
|
+
this.apiKey = apiKey;
|
|
2124
|
+
this.baseUrl = baseUrl;
|
|
2125
|
+
this.name = adapter.name;
|
|
2126
|
+
}
|
|
2127
|
+
name;
|
|
2128
|
+
/**
|
|
2129
|
+
* 获取客户端实例
|
|
2130
|
+
*/
|
|
2131
|
+
getClient() {
|
|
2132
|
+
return this.adapter.createClient({
|
|
2133
|
+
apiKey: this.apiKey,
|
|
2134
|
+
baseUrl: this.baseUrl ?? this.adapter.defaultBaseUrl
|
|
2135
|
+
});
|
|
2136
|
+
}
|
|
2137
|
+
/**
|
|
2138
|
+
* 发送聊天请求(非流式)
|
|
2139
|
+
*/
|
|
2140
|
+
async chat(options) {
|
|
2141
|
+
const client = this.getClient();
|
|
2142
|
+
const baseUrl = this.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2143
|
+
const endpoint = this.adapter.getEndpointUrl(baseUrl);
|
|
2144
|
+
const endpointPath = endpoint.replace(baseUrl, "");
|
|
2145
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2146
|
+
const response = await client.chat(endpointPath, body);
|
|
2147
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
2148
|
+
}
|
|
2149
|
+
/**
|
|
2150
|
+
* 发送流式聊天请求
|
|
2151
|
+
*/
|
|
2152
|
+
async *chatStream(options) {
|
|
2153
|
+
const client = this.getClient();
|
|
2154
|
+
const baseUrl = this.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2155
|
+
const endpoint = this.adapter.getEndpointUrl(baseUrl);
|
|
2156
|
+
const endpointPath = endpoint.replace(baseUrl, "");
|
|
2157
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2158
|
+
const response = await client.chatStream(endpointPath, body);
|
|
2159
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
2160
|
+
yield* StreamProcessor2.processStream(
|
|
2161
|
+
response,
|
|
2162
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2163
|
+
);
|
|
2164
|
+
}
|
|
2165
|
+
/**
|
|
2166
|
+
* 简单对话:单轮问答
|
|
2167
|
+
*/
|
|
2168
|
+
async ask(model, question, options) {
|
|
2169
|
+
const result = await this.chat({
|
|
2170
|
+
model,
|
|
2171
|
+
messages: [{ role: "user", content: question }],
|
|
2172
|
+
...options
|
|
2173
|
+
});
|
|
2174
|
+
return result.content;
|
|
2175
|
+
}
|
|
2176
|
+
/**
|
|
2177
|
+
* 带系统提示的对话
|
|
2178
|
+
*/
|
|
2179
|
+
async askWithSystem(model, systemPrompt, userMessage, options) {
|
|
2180
|
+
const result = await this.chat({
|
|
2181
|
+
model,
|
|
2182
|
+
messages: [
|
|
2183
|
+
{ role: "system", content: systemPrompt },
|
|
2184
|
+
{ role: "user", content: userMessage }
|
|
2185
|
+
],
|
|
2186
|
+
...options
|
|
2187
|
+
});
|
|
2188
|
+
return result.content;
|
|
2189
|
+
}
|
|
2190
|
+
};
|
|
2191
|
+
function createProvider(config) {
|
|
2192
|
+
const unifiedConfig = ConfigManager.fromLegacyConfig(config);
|
|
2193
|
+
const adapter = ProviderRegistry.getAdapter(unifiedConfig.provider);
|
|
2194
|
+
return new AdapterBasedProvider(
|
|
2195
|
+
adapter,
|
|
2196
|
+
unifiedConfig.credentials.apiKey,
|
|
2197
|
+
unifiedConfig.credentials.baseUrl
|
|
2198
|
+
);
|
|
2199
|
+
}
|
|
2200
|
+
var ai = {
|
|
2201
|
+
openrouter: (apiKey, baseUrl) => createProvider({ provider: "openrouter", apiKey, baseUrl }),
|
|
2202
|
+
gemini: (apiKey, baseUrl) => createProvider({ provider: "gemini", apiKey, baseUrl }),
|
|
2203
|
+
groq: (apiKey, baseUrl) => createProvider({ provider: "groq", apiKey, baseUrl }),
|
|
2204
|
+
huggingface: (apiKey, baseUrl) => createProvider({ provider: "huggingface", apiKey, baseUrl }),
|
|
2205
|
+
modelscope: (apiKey, baseUrl) => createProvider({ provider: "modelscope", apiKey, baseUrl }),
|
|
2206
|
+
deepseek: (apiKey, baseUrl) => createProvider({ provider: "deepseek", apiKey, baseUrl }),
|
|
2207
|
+
poe: (apiKey, baseUrl) => createProvider({ provider: "poe", apiKey, baseUrl }),
|
|
2208
|
+
nova: (apiKey, baseUrl) => createProvider({ provider: "nova", apiKey, baseUrl })
|
|
2209
|
+
};
|
|
3
2210
|
|
|
4
2211
|
// src/providers/__model-detection__.ts
|
|
5
2212
|
var THINKING_MODEL_PATTERNS = [
|
|
@@ -301,7 +2508,11 @@ var BaseProvider = class {
|
|
|
301
2508
|
* 3. 如果 content 为空,智能降级(提取结论或返回 reasoning)
|
|
302
2509
|
*/
|
|
303
2510
|
async ask(model, question, options) {
|
|
304
|
-
const {
|
|
2511
|
+
const {
|
|
2512
|
+
fallback,
|
|
2513
|
+
autoAdjust = this.autoAdjustEnabled,
|
|
2514
|
+
...chatOptions
|
|
2515
|
+
} = options ?? {};
|
|
305
2516
|
let finalOptions = {
|
|
306
2517
|
model,
|
|
307
2518
|
messages: [{ role: "user", content: question }],
|
|
@@ -327,7 +2538,11 @@ var BaseProvider = class {
|
|
|
327
2538
|
* 3. 如果 content 为空,智能降级(提取结论或返回 reasoning)
|
|
328
2539
|
*/
|
|
329
2540
|
async askWithSystem(model, systemPrompt, userMessage, options) {
|
|
330
|
-
const {
|
|
2541
|
+
const {
|
|
2542
|
+
fallback,
|
|
2543
|
+
autoAdjust = this.autoAdjustEnabled,
|
|
2544
|
+
...chatOptions
|
|
2545
|
+
} = options ?? {};
|
|
331
2546
|
let finalOptions = {
|
|
332
2547
|
model,
|
|
333
2548
|
messages: [
|
|
@@ -359,1430 +2574,794 @@ var BaseProvider = class {
|
|
|
359
2574
|
* - 'fast': 快速回答(关闭思考)
|
|
360
2575
|
*/
|
|
361
2576
|
async askWithScenario(model, question, scenario = "simple", options) {
|
|
362
|
-
const recommendedConfig = ModelDetection.getRecommendedConfig(
|
|
363
|
-
return this.ask(model, question, {
|
|
364
|
-
...recommendedConfig,
|
|
365
|
-
...options
|
|
366
|
-
});
|
|
367
|
-
}
|
|
368
|
-
};
|
|
369
|
-
|
|
370
|
-
// src/providers/__types__.ts
|
|
371
|
-
var EFFORT_TOKEN_MAP = {
|
|
372
|
-
off: 0,
|
|
373
|
-
low: 1024,
|
|
374
|
-
medium: 4096,
|
|
375
|
-
high: 16384
|
|
376
|
-
};
|
|
377
|
-
|
|
378
|
-
// src/providers/openrouter.ts
|
|
379
|
-
function extractTextContent(content) {
|
|
380
|
-
if (typeof content === "string") {
|
|
381
|
-
return content;
|
|
382
|
-
}
|
|
383
|
-
if (Array.isArray(content)) {
|
|
384
|
-
return content.filter(
|
|
385
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
386
|
-
).map((item) => item.text).join("");
|
|
387
|
-
}
|
|
388
|
-
return "";
|
|
389
|
-
}
|
|
390
|
-
function buildReasoningParam(config) {
|
|
391
|
-
if (!config) return void 0;
|
|
392
|
-
if (config.effort === "off") return void 0;
|
|
393
|
-
const param = {};
|
|
394
|
-
if (config.effort) {
|
|
395
|
-
param.effort = config.effort;
|
|
396
|
-
}
|
|
397
|
-
if (config.budgetTokens !== void 0) {
|
|
398
|
-
param.max_tokens = config.budgetTokens;
|
|
399
|
-
} else if (config.effort && EFFORT_TOKEN_MAP[config.effort]) {
|
|
400
|
-
param.max_tokens = EFFORT_TOKEN_MAP[config.effort];
|
|
401
|
-
}
|
|
402
|
-
if (config.exclude !== void 0) {
|
|
403
|
-
param.exclude = config.exclude;
|
|
404
|
-
}
|
|
405
|
-
return Object.keys(param).length > 0 ? param : void 0;
|
|
406
|
-
}
|
|
407
|
-
var OpenRouterProvider = class extends BaseProvider {
|
|
408
|
-
name = "openrouter";
|
|
409
|
-
client;
|
|
410
|
-
constructor(apiKey) {
|
|
411
|
-
super();
|
|
412
|
-
this.client = new OpenRouter({ apiKey });
|
|
413
|
-
}
|
|
414
|
-
/**
|
|
415
|
-
* 发送聊天请求(非流式)
|
|
416
|
-
*/
|
|
417
|
-
async chat(options) {
|
|
418
|
-
const {
|
|
419
|
-
model,
|
|
420
|
-
messages,
|
|
421
|
-
temperature = 0.7,
|
|
422
|
-
maxTokens,
|
|
423
|
-
reasoning
|
|
424
|
-
} = options;
|
|
425
|
-
const reasoningParam = buildReasoningParam(reasoning);
|
|
426
|
-
const requestParams = {
|
|
427
|
-
model,
|
|
428
|
-
messages,
|
|
429
|
-
temperature,
|
|
430
|
-
maxTokens,
|
|
431
|
-
stream: false
|
|
432
|
-
};
|
|
433
|
-
if (reasoningParam) {
|
|
434
|
-
requestParams.reasoning = reasoningParam;
|
|
435
|
-
}
|
|
436
|
-
const result = await this.client.chat.send(requestParams);
|
|
437
|
-
const choice = result.choices[0];
|
|
438
|
-
if (!choice) {
|
|
439
|
-
throw new Error("No response from model");
|
|
440
|
-
}
|
|
441
|
-
const msg = choice.message;
|
|
442
|
-
const reasoningContent = msg.reasoning_content ?? msg.reasoning ?? null;
|
|
443
|
-
return {
|
|
444
|
-
content: extractTextContent(msg.content),
|
|
445
|
-
reasoning: reasoningContent ? extractTextContent(reasoningContent) : null,
|
|
446
|
-
model: result.model,
|
|
447
|
-
usage: {
|
|
448
|
-
promptTokens: result.usage?.promptTokens ?? 0,
|
|
449
|
-
completionTokens: result.usage?.completionTokens ?? 0,
|
|
450
|
-
totalTokens: result.usage?.totalTokens ?? 0
|
|
451
|
-
},
|
|
452
|
-
finishReason: choice.finishReason
|
|
453
|
-
};
|
|
454
|
-
}
|
|
455
|
-
/**
|
|
456
|
-
* 发送流式聊天请求
|
|
457
|
-
*/
|
|
458
|
-
async *chatStream(options) {
|
|
459
|
-
const {
|
|
2577
|
+
const recommendedConfig = ModelDetection.getRecommendedConfig(
|
|
460
2578
|
model,
|
|
461
|
-
|
|
462
|
-
temperature = 0.7,
|
|
463
|
-
maxTokens,
|
|
464
|
-
reasoning
|
|
465
|
-
} = options;
|
|
466
|
-
const reasoningParam = buildReasoningParam(reasoning);
|
|
467
|
-
const requestParams = {
|
|
468
|
-
model,
|
|
469
|
-
messages,
|
|
470
|
-
temperature,
|
|
471
|
-
maxTokens,
|
|
472
|
-
stream: true
|
|
473
|
-
};
|
|
474
|
-
if (reasoningParam) {
|
|
475
|
-
requestParams.reasoning = reasoningParam;
|
|
476
|
-
}
|
|
477
|
-
const stream = await this.client.chat.send(
|
|
478
|
-
requestParams
|
|
2579
|
+
scenario
|
|
479
2580
|
);
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
if (reasoningContent) {
|
|
485
|
-
yield { type: "reasoning", text: extractTextContent(reasoningContent) };
|
|
486
|
-
}
|
|
487
|
-
if (delta.content) {
|
|
488
|
-
yield { type: "content", text: extractTextContent(delta.content) };
|
|
489
|
-
}
|
|
490
|
-
}
|
|
491
|
-
}
|
|
492
|
-
/**
|
|
493
|
-
* 获取可用模型列表
|
|
494
|
-
*/
|
|
495
|
-
async listModels() {
|
|
496
|
-
const result = await this.client.models.list();
|
|
497
|
-
return (result.data ?? []).map((m) => ({
|
|
498
|
-
id: m.id,
|
|
499
|
-
canonicalSlug: m.canonical_slug ?? m.id,
|
|
500
|
-
name: m.name,
|
|
501
|
-
description: m.description ?? "",
|
|
502
|
-
created: m.created ?? 0,
|
|
503
|
-
pricing: {
|
|
504
|
-
prompt: m.pricing?.prompt ?? "0",
|
|
505
|
-
completion: m.pricing?.completion ?? "0",
|
|
506
|
-
request: m.pricing?.request ?? "0",
|
|
507
|
-
image: m.pricing?.image ?? "0"
|
|
508
|
-
},
|
|
509
|
-
contextLength: m.context_length ?? 0,
|
|
510
|
-
architecture: {
|
|
511
|
-
modality: m.architecture?.modality ?? "",
|
|
512
|
-
inputModalities: m.architecture?.input_modalities ?? [],
|
|
513
|
-
outputModalities: m.architecture?.output_modalities ?? [],
|
|
514
|
-
tokenizer: m.architecture?.tokenizer ?? "",
|
|
515
|
-
instructType: m.architecture?.instruct_type ?? ""
|
|
516
|
-
},
|
|
517
|
-
supportedParameters: m.supported_parameters ?? []
|
|
518
|
-
}));
|
|
519
|
-
}
|
|
520
|
-
};
|
|
521
|
-
|
|
522
|
-
// src/providers/gemini.ts
|
|
523
|
-
var BASE_URL = "https://generativelanguage.googleapis.com/v1beta/openai";
|
|
524
|
-
function extractTextContent2(content) {
|
|
525
|
-
if (typeof content === "string") {
|
|
526
|
-
return content;
|
|
527
|
-
}
|
|
528
|
-
if (Array.isArray(content)) {
|
|
529
|
-
return content.filter(
|
|
530
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
531
|
-
).map((item) => item.text).join("");
|
|
532
|
-
}
|
|
533
|
-
return "";
|
|
534
|
-
}
|
|
535
|
-
var GeminiProvider = class extends BaseProvider {
|
|
536
|
-
name = "gemini";
|
|
537
|
-
apiKey;
|
|
538
|
-
baseUrl;
|
|
539
|
-
constructor(config) {
|
|
540
|
-
super();
|
|
541
|
-
if (typeof config === "string") {
|
|
542
|
-
this.apiKey = config;
|
|
543
|
-
this.baseUrl = BASE_URL;
|
|
544
|
-
} else {
|
|
545
|
-
this.apiKey = config.apiKey;
|
|
546
|
-
this.baseUrl = config.baseUrl ?? BASE_URL;
|
|
547
|
-
}
|
|
548
|
-
}
|
|
549
|
-
/**
|
|
550
|
-
* 发送聊天请求(非流式)
|
|
551
|
-
*/
|
|
552
|
-
async chat(options) {
|
|
553
|
-
const {
|
|
554
|
-
model,
|
|
555
|
-
messages,
|
|
556
|
-
temperature = 0.7,
|
|
557
|
-
maxTokens,
|
|
558
|
-
reasoning
|
|
559
|
-
} = options;
|
|
560
|
-
const body = {
|
|
561
|
-
model,
|
|
562
|
-
messages,
|
|
563
|
-
temperature,
|
|
564
|
-
stream: false
|
|
565
|
-
};
|
|
566
|
-
if (maxTokens) {
|
|
567
|
-
body.max_tokens = maxTokens;
|
|
568
|
-
}
|
|
569
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
570
|
-
body.reasoning_effort = reasoning.effort;
|
|
571
|
-
}
|
|
572
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
573
|
-
method: "POST",
|
|
574
|
-
headers: {
|
|
575
|
-
"Content-Type": "application/json",
|
|
576
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
577
|
-
},
|
|
578
|
-
body: JSON.stringify(body)
|
|
579
|
-
});
|
|
580
|
-
if (!response.ok) {
|
|
581
|
-
const error = await response.text();
|
|
582
|
-
throw new Error(`Gemini API error: ${response.status} ${error}`);
|
|
583
|
-
}
|
|
584
|
-
const result = await response.json();
|
|
585
|
-
const choice = result.choices?.[0];
|
|
586
|
-
if (!choice) {
|
|
587
|
-
throw new Error("No response from model");
|
|
588
|
-
}
|
|
589
|
-
const msg = choice.message;
|
|
590
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
591
|
-
return {
|
|
592
|
-
content: extractTextContent2(msg?.content),
|
|
593
|
-
reasoning: reasoningContent ? extractTextContent2(reasoningContent) : null,
|
|
594
|
-
model: result.model ?? model,
|
|
595
|
-
usage: {
|
|
596
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
597
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
598
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
599
|
-
},
|
|
600
|
-
finishReason: choice.finish_reason ?? null
|
|
601
|
-
};
|
|
602
|
-
}
|
|
603
|
-
/**
|
|
604
|
-
* 发送流式聊天请求
|
|
605
|
-
*/
|
|
606
|
-
async *chatStream(options) {
|
|
607
|
-
const {
|
|
608
|
-
model,
|
|
609
|
-
messages,
|
|
610
|
-
temperature = 0.7,
|
|
611
|
-
maxTokens,
|
|
612
|
-
reasoning
|
|
613
|
-
} = options;
|
|
614
|
-
const body = {
|
|
615
|
-
model,
|
|
616
|
-
messages,
|
|
617
|
-
temperature,
|
|
618
|
-
stream: true
|
|
619
|
-
};
|
|
620
|
-
if (maxTokens) {
|
|
621
|
-
body.max_tokens = maxTokens;
|
|
622
|
-
}
|
|
623
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
624
|
-
body.reasoning_effort = reasoning.effort;
|
|
625
|
-
}
|
|
626
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
627
|
-
method: "POST",
|
|
628
|
-
headers: {
|
|
629
|
-
"Content-Type": "application/json",
|
|
630
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
631
|
-
},
|
|
632
|
-
body: JSON.stringify(body)
|
|
633
|
-
});
|
|
634
|
-
if (!response.ok) {
|
|
635
|
-
const error = await response.text();
|
|
636
|
-
throw new Error(`Gemini API error: ${response.status} ${error}`);
|
|
637
|
-
}
|
|
638
|
-
const reader = response.body?.getReader();
|
|
639
|
-
if (!reader) {
|
|
640
|
-
throw new Error("No response body");
|
|
641
|
-
}
|
|
642
|
-
const decoder = new TextDecoder();
|
|
643
|
-
let buffer = "";
|
|
644
|
-
try {
|
|
645
|
-
while (true) {
|
|
646
|
-
const { done, value } = await reader.read();
|
|
647
|
-
if (done) break;
|
|
648
|
-
buffer += decoder.decode(value, { stream: true });
|
|
649
|
-
const lines = buffer.split("\n");
|
|
650
|
-
buffer = lines.pop() ?? "";
|
|
651
|
-
for (const line of lines) {
|
|
652
|
-
const trimmed = line.trim();
|
|
653
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
654
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
655
|
-
try {
|
|
656
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
657
|
-
const delta = data.choices?.[0]?.delta;
|
|
658
|
-
if (!delta) continue;
|
|
659
|
-
const thought = delta.reasoning_content ?? delta.thoughts;
|
|
660
|
-
if (thought) {
|
|
661
|
-
yield {
|
|
662
|
-
type: "reasoning",
|
|
663
|
-
text: extractTextContent2(thought)
|
|
664
|
-
};
|
|
665
|
-
}
|
|
666
|
-
if (delta.content) {
|
|
667
|
-
yield {
|
|
668
|
-
type: "content",
|
|
669
|
-
text: extractTextContent2(delta.content)
|
|
670
|
-
};
|
|
671
|
-
}
|
|
672
|
-
} catch {
|
|
673
|
-
}
|
|
674
|
-
}
|
|
675
|
-
}
|
|
676
|
-
} finally {
|
|
677
|
-
reader.releaseLock();
|
|
678
|
-
}
|
|
679
|
-
}
|
|
680
|
-
};
|
|
681
|
-
|
|
682
|
-
// src/providers/groq.ts
|
|
683
|
-
var BASE_URL2 = "https://api.groq.com/openai/v1";
|
|
684
|
-
function extractTextContent3(content) {
|
|
685
|
-
if (typeof content === "string") {
|
|
686
|
-
return content;
|
|
687
|
-
}
|
|
688
|
-
if (Array.isArray(content)) {
|
|
689
|
-
return content.filter(
|
|
690
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
691
|
-
).map((item) => item.text).join("");
|
|
692
|
-
}
|
|
693
|
-
return "";
|
|
694
|
-
}
|
|
695
|
-
var GroqProvider = class extends BaseProvider {
|
|
696
|
-
name = "groq";
|
|
697
|
-
apiKey;
|
|
698
|
-
baseUrl;
|
|
699
|
-
constructor(config) {
|
|
700
|
-
super();
|
|
701
|
-
if (typeof config === "string") {
|
|
702
|
-
this.apiKey = config;
|
|
703
|
-
this.baseUrl = BASE_URL2;
|
|
704
|
-
} else {
|
|
705
|
-
this.apiKey = config.apiKey;
|
|
706
|
-
this.baseUrl = config.baseUrl ?? BASE_URL2;
|
|
707
|
-
}
|
|
708
|
-
}
|
|
709
|
-
/**
|
|
710
|
-
* 发送聊天请求(非流式)
|
|
711
|
-
*/
|
|
712
|
-
async chat(options) {
|
|
713
|
-
const { model, messages, temperature = 1, maxTokens, reasoning } = options;
|
|
714
|
-
const body = {
|
|
715
|
-
model,
|
|
716
|
-
messages,
|
|
717
|
-
temperature,
|
|
718
|
-
stream: false,
|
|
719
|
-
top_p: 1
|
|
720
|
-
};
|
|
721
|
-
if (maxTokens) {
|
|
722
|
-
body.max_completion_tokens = maxTokens;
|
|
723
|
-
}
|
|
724
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
725
|
-
body.reasoning_format = "parsed";
|
|
726
|
-
} else if (reasoning?.effort === "off") {
|
|
727
|
-
body.include_reasoning = false;
|
|
728
|
-
}
|
|
729
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
730
|
-
method: "POST",
|
|
731
|
-
headers: {
|
|
732
|
-
"Content-Type": "application/json",
|
|
733
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
734
|
-
},
|
|
735
|
-
body: JSON.stringify(body)
|
|
736
|
-
});
|
|
737
|
-
if (!response.ok) {
|
|
738
|
-
const error = await response.text();
|
|
739
|
-
throw new Error(`Groq API error: ${response.status} ${error}`);
|
|
740
|
-
}
|
|
741
|
-
const result = await response.json();
|
|
742
|
-
const choice = result.choices?.[0];
|
|
743
|
-
if (!choice) {
|
|
744
|
-
throw new Error("No response from model");
|
|
745
|
-
}
|
|
746
|
-
const msg = choice.message;
|
|
747
|
-
const reasoningContent = msg?.reasoning_content ?? msg?.reasoning ?? null;
|
|
748
|
-
return {
|
|
749
|
-
content: extractTextContent3(msg?.content),
|
|
750
|
-
reasoning: reasoningContent ? extractTextContent3(reasoningContent) : null,
|
|
751
|
-
model: result.model ?? model,
|
|
752
|
-
usage: {
|
|
753
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
754
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
755
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
756
|
-
},
|
|
757
|
-
finishReason: choice.finish_reason ?? null
|
|
758
|
-
};
|
|
759
|
-
}
|
|
760
|
-
/**
|
|
761
|
-
* 发送流式聊天请求
|
|
762
|
-
*/
|
|
763
|
-
async *chatStream(options) {
|
|
764
|
-
const { model, messages, temperature = 1, maxTokens, reasoning } = options;
|
|
765
|
-
const body = {
|
|
766
|
-
model,
|
|
767
|
-
messages,
|
|
768
|
-
temperature,
|
|
769
|
-
stream: true,
|
|
770
|
-
top_p: 1
|
|
771
|
-
};
|
|
772
|
-
if (maxTokens) {
|
|
773
|
-
body.max_completion_tokens = maxTokens;
|
|
774
|
-
}
|
|
775
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
776
|
-
body.reasoning_format = "parsed";
|
|
777
|
-
} else if (reasoning?.effort === "off") {
|
|
778
|
-
body.include_reasoning = false;
|
|
779
|
-
}
|
|
780
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
781
|
-
method: "POST",
|
|
782
|
-
headers: {
|
|
783
|
-
"Content-Type": "application/json",
|
|
784
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
785
|
-
},
|
|
786
|
-
body: JSON.stringify(body)
|
|
787
|
-
});
|
|
788
|
-
if (!response.ok) {
|
|
789
|
-
const error = await response.text();
|
|
790
|
-
throw new Error(`Groq API error: ${response.status} ${error}`);
|
|
791
|
-
}
|
|
792
|
-
const reader = response.body?.getReader();
|
|
793
|
-
if (!reader) {
|
|
794
|
-
throw new Error("No response body");
|
|
795
|
-
}
|
|
796
|
-
const decoder = new TextDecoder();
|
|
797
|
-
let buffer = "";
|
|
798
|
-
try {
|
|
799
|
-
while (true) {
|
|
800
|
-
const { done, value } = await reader.read();
|
|
801
|
-
if (done) break;
|
|
802
|
-
buffer += decoder.decode(value, { stream: true });
|
|
803
|
-
const lines = buffer.split("\n");
|
|
804
|
-
buffer = lines.pop() ?? "";
|
|
805
|
-
for (const line of lines) {
|
|
806
|
-
const trimmed = line.trim();
|
|
807
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
808
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
809
|
-
try {
|
|
810
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
811
|
-
const delta = data.choices?.[0]?.delta;
|
|
812
|
-
if (!delta) continue;
|
|
813
|
-
const reasoningContent = delta.reasoning_content ?? delta.reasoning;
|
|
814
|
-
if (reasoningContent) {
|
|
815
|
-
yield {
|
|
816
|
-
type: "reasoning",
|
|
817
|
-
text: extractTextContent3(reasoningContent)
|
|
818
|
-
};
|
|
819
|
-
}
|
|
820
|
-
if (delta.content) {
|
|
821
|
-
yield {
|
|
822
|
-
type: "content",
|
|
823
|
-
text: extractTextContent3(delta.content)
|
|
824
|
-
};
|
|
825
|
-
}
|
|
826
|
-
} catch {
|
|
827
|
-
}
|
|
828
|
-
}
|
|
829
|
-
}
|
|
830
|
-
} finally {
|
|
831
|
-
reader.releaseLock();
|
|
832
|
-
}
|
|
2581
|
+
return this.ask(model, question, {
|
|
2582
|
+
...recommendedConfig,
|
|
2583
|
+
...options
|
|
2584
|
+
});
|
|
833
2585
|
}
|
|
834
2586
|
};
|
|
835
2587
|
|
|
836
|
-
// src/providers/
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
}
|
|
847
|
-
return "";
|
|
848
|
-
}
|
|
849
|
-
var HuggingFaceProvider = class extends BaseProvider {
|
|
850
|
-
name = "huggingface";
|
|
851
|
-
apiKey;
|
|
2588
|
+
// src/providers/__index__.ts
|
|
2589
|
+
init_types();
|
|
2590
|
+
|
|
2591
|
+
// src/providers/openrouter.ts
|
|
2592
|
+
init_http_provider_client();
|
|
2593
|
+
init_stream_processor();
|
|
2594
|
+
var OpenRouterProvider = class extends BaseProvider {
|
|
2595
|
+
name = "openrouter";
|
|
2596
|
+
adapter;
|
|
2597
|
+
client;
|
|
852
2598
|
baseUrl;
|
|
853
|
-
|
|
2599
|
+
apiKey;
|
|
2600
|
+
constructor(apiKey, baseUrl) {
|
|
854
2601
|
super();
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
}
|
|
2602
|
+
this.apiKey = apiKey;
|
|
2603
|
+
this.adapter = new OpenRouterAdapter();
|
|
2604
|
+
this.baseUrl = baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2605
|
+
this.client = new HttpProviderClient({
|
|
2606
|
+
apiKey,
|
|
2607
|
+
baseUrl: this.baseUrl
|
|
2608
|
+
});
|
|
862
2609
|
}
|
|
863
2610
|
/**
|
|
864
2611
|
* 发送聊天请求(非流式)
|
|
865
|
-
*
|
|
866
|
-
* reasoning 参数说明:
|
|
867
|
-
* - HuggingFace 是模型聚合平台,thinking 支持取决于具体模型
|
|
868
|
-
* - 如果模型支持,会返回 reasoning_content
|
|
869
2612
|
*/
|
|
870
2613
|
async chat(options) {
|
|
871
|
-
const
|
|
872
|
-
const
|
|
873
|
-
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
stream: false
|
|
877
|
-
};
|
|
878
|
-
if (maxTokens) {
|
|
879
|
-
body.max_tokens = maxTokens;
|
|
880
|
-
}
|
|
881
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
882
|
-
body.reasoning_effort = reasoning.effort;
|
|
883
|
-
}
|
|
884
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
885
|
-
method: "POST",
|
|
886
|
-
headers: {
|
|
887
|
-
"Content-Type": "application/json",
|
|
888
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
889
|
-
},
|
|
890
|
-
body: JSON.stringify(body)
|
|
891
|
-
});
|
|
892
|
-
if (!response.ok) {
|
|
893
|
-
const error = await response.text();
|
|
894
|
-
throw new Error(`HuggingFace API error: ${response.status} ${error}`);
|
|
895
|
-
}
|
|
896
|
-
const result = await response.json();
|
|
897
|
-
const choice = result.choices?.[0];
|
|
898
|
-
if (!choice) {
|
|
899
|
-
throw new Error("No response from model");
|
|
900
|
-
}
|
|
901
|
-
const msg = choice.message;
|
|
902
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
903
|
-
return {
|
|
904
|
-
content: extractTextContent4(msg?.content),
|
|
905
|
-
reasoning: reasoningContent ? extractTextContent4(reasoningContent) : null,
|
|
906
|
-
model: result.model ?? model,
|
|
907
|
-
usage: {
|
|
908
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
909
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
910
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
911
|
-
},
|
|
912
|
-
finishReason: choice.finish_reason ?? null
|
|
913
|
-
};
|
|
2614
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2615
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2616
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2617
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2618
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
914
2619
|
}
|
|
915
2620
|
/**
|
|
916
2621
|
* 发送流式聊天请求
|
|
917
2622
|
*/
|
|
918
2623
|
async *chatStream(options) {
|
|
919
|
-
const
|
|
920
|
-
const
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
2624
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2625
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2626
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2627
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2628
|
+
yield* StreamProcessor.processStream(
|
|
2629
|
+
response,
|
|
2630
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2631
|
+
);
|
|
2632
|
+
}
|
|
2633
|
+
/**
|
|
2634
|
+
* 获取可用模型列表
|
|
2635
|
+
* 注意:此方法直接调用 OpenRouter API,不使用适配器
|
|
2636
|
+
*/
|
|
2637
|
+
async listModels() {
|
|
2638
|
+
const response = await fetch(`${this.baseUrl}/models`, {
|
|
934
2639
|
headers: {
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
}
|
|
938
|
-
body: JSON.stringify(body)
|
|
2640
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
2641
|
+
"Content-Type": "application/json"
|
|
2642
|
+
}
|
|
939
2643
|
});
|
|
940
2644
|
if (!response.ok) {
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
const reader = response.body?.getReader();
|
|
945
|
-
if (!reader) {
|
|
946
|
-
throw new Error("No response body");
|
|
947
|
-
}
|
|
948
|
-
const decoder = new TextDecoder();
|
|
949
|
-
let buffer = "";
|
|
950
|
-
try {
|
|
951
|
-
while (true) {
|
|
952
|
-
const { done, value } = await reader.read();
|
|
953
|
-
if (done) break;
|
|
954
|
-
buffer += decoder.decode(value, { stream: true });
|
|
955
|
-
const lines = buffer.split("\n");
|
|
956
|
-
buffer = lines.pop() ?? "";
|
|
957
|
-
for (const line of lines) {
|
|
958
|
-
const trimmed = line.trim();
|
|
959
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
960
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
961
|
-
try {
|
|
962
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
963
|
-
const delta = data.choices?.[0]?.delta;
|
|
964
|
-
if (!delta) continue;
|
|
965
|
-
if (delta.reasoning_content) {
|
|
966
|
-
yield {
|
|
967
|
-
type: "reasoning",
|
|
968
|
-
text: extractTextContent4(delta.reasoning_content)
|
|
969
|
-
};
|
|
970
|
-
}
|
|
971
|
-
if (delta.content) {
|
|
972
|
-
yield {
|
|
973
|
-
type: "content",
|
|
974
|
-
text: extractTextContent4(delta.content)
|
|
975
|
-
};
|
|
976
|
-
}
|
|
977
|
-
} catch {
|
|
978
|
-
}
|
|
979
|
-
}
|
|
980
|
-
}
|
|
981
|
-
} finally {
|
|
982
|
-
reader.releaseLock();
|
|
2645
|
+
throw new Error(
|
|
2646
|
+
`Failed to fetch models: ${response.status} ${response.statusText}`
|
|
2647
|
+
);
|
|
983
2648
|
}
|
|
2649
|
+
const result = await response.json();
|
|
2650
|
+
return (result.data ?? []).map((m) => ({
|
|
2651
|
+
id: m.id,
|
|
2652
|
+
canonicalSlug: m.canonical_slug ?? m.id,
|
|
2653
|
+
name: m.name,
|
|
2654
|
+
description: m.description ?? "",
|
|
2655
|
+
created: m.created ?? 0,
|
|
2656
|
+
pricing: {
|
|
2657
|
+
prompt: m.pricing?.prompt ?? "0",
|
|
2658
|
+
completion: m.pricing?.completion ?? "0",
|
|
2659
|
+
request: m.pricing?.request ?? "0",
|
|
2660
|
+
image: m.pricing?.image ?? "0"
|
|
2661
|
+
},
|
|
2662
|
+
contextLength: m.context_length ?? 0,
|
|
2663
|
+
architecture: {
|
|
2664
|
+
modality: m.architecture?.modality ?? "",
|
|
2665
|
+
inputModalities: m.architecture?.input_modalities ?? [],
|
|
2666
|
+
outputModalities: m.architecture?.output_modalities ?? [],
|
|
2667
|
+
tokenizer: m.architecture?.tokenizer ?? "",
|
|
2668
|
+
instructType: m.architecture?.instruct_type ?? ""
|
|
2669
|
+
},
|
|
2670
|
+
supportedParameters: m.supported_parameters ?? []
|
|
2671
|
+
}));
|
|
984
2672
|
}
|
|
985
2673
|
};
|
|
986
2674
|
|
|
987
2675
|
// src/providers/modelscope.ts
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
if (typeof content === "string") {
|
|
991
|
-
return content;
|
|
992
|
-
}
|
|
993
|
-
if (Array.isArray(content)) {
|
|
994
|
-
return content.filter(
|
|
995
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
996
|
-
).map((item) => item.text).join("");
|
|
997
|
-
}
|
|
998
|
-
return "";
|
|
999
|
-
}
|
|
2676
|
+
init_http_provider_client();
|
|
2677
|
+
init_stream_processor();
|
|
1000
2678
|
var ModelScopeProvider = class extends BaseProvider {
|
|
1001
2679
|
name = "modelscope";
|
|
1002
|
-
|
|
2680
|
+
adapter;
|
|
2681
|
+
client;
|
|
1003
2682
|
baseUrl;
|
|
1004
2683
|
constructor(config) {
|
|
1005
2684
|
super();
|
|
2685
|
+
this.adapter = new ModelScopeAdapter();
|
|
1006
2686
|
if (typeof config === "string") {
|
|
1007
|
-
this.
|
|
1008
|
-
this.
|
|
2687
|
+
this.baseUrl = this.adapter.defaultBaseUrl;
|
|
2688
|
+
this.client = new HttpProviderClient({
|
|
2689
|
+
apiKey: config,
|
|
2690
|
+
baseUrl: this.baseUrl
|
|
2691
|
+
});
|
|
1009
2692
|
} else {
|
|
1010
|
-
this.
|
|
1011
|
-
this.
|
|
2693
|
+
this.baseUrl = config.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2694
|
+
this.client = new HttpProviderClient({
|
|
2695
|
+
apiKey: config.apiKey,
|
|
2696
|
+
baseUrl: this.baseUrl
|
|
2697
|
+
});
|
|
1012
2698
|
}
|
|
1013
2699
|
}
|
|
1014
2700
|
/**
|
|
1015
2701
|
* 发送聊天请求(非流式)
|
|
1016
2702
|
*/
|
|
1017
2703
|
async chat(options) {
|
|
1018
|
-
const
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
reasoning
|
|
1024
|
-
} = options;
|
|
1025
|
-
const body = {
|
|
1026
|
-
model,
|
|
1027
|
-
messages,
|
|
1028
|
-
temperature,
|
|
1029
|
-
stream: false
|
|
1030
|
-
};
|
|
1031
|
-
if (maxTokens) {
|
|
1032
|
-
body.max_tokens = maxTokens;
|
|
1033
|
-
}
|
|
1034
|
-
if (reasoning?.effort) {
|
|
1035
|
-
if (reasoning.effort === "off") {
|
|
1036
|
-
body.enable_thinking = false;
|
|
1037
|
-
} else {
|
|
1038
|
-
body.enable_thinking = true;
|
|
1039
|
-
}
|
|
1040
|
-
}
|
|
1041
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1042
|
-
method: "POST",
|
|
1043
|
-
headers: {
|
|
1044
|
-
"Content-Type": "application/json",
|
|
1045
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1046
|
-
},
|
|
1047
|
-
body: JSON.stringify(body)
|
|
1048
|
-
});
|
|
1049
|
-
if (!response.ok) {
|
|
1050
|
-
const error = await response.text();
|
|
1051
|
-
throw new Error(`ModelScope API error: ${response.status} ${error}`);
|
|
1052
|
-
}
|
|
1053
|
-
const result = await response.json();
|
|
1054
|
-
const choice = result.choices?.[0];
|
|
1055
|
-
if (!choice) {
|
|
1056
|
-
throw new Error("No response from model");
|
|
1057
|
-
}
|
|
1058
|
-
const msg = choice.message;
|
|
1059
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
1060
|
-
return {
|
|
1061
|
-
content: extractTextContent5(msg?.content),
|
|
1062
|
-
reasoning: reasoningContent ? extractTextContent5(reasoningContent) : null,
|
|
1063
|
-
model: result.model ?? model,
|
|
1064
|
-
usage: {
|
|
1065
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
1066
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
1067
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
1068
|
-
},
|
|
1069
|
-
finishReason: choice.finish_reason ?? null
|
|
1070
|
-
};
|
|
2704
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2705
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2706
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2707
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2708
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
1071
2709
|
}
|
|
1072
2710
|
/**
|
|
1073
2711
|
* 发送流式聊天请求
|
|
1074
2712
|
*/
|
|
1075
2713
|
async *chatStream(options) {
|
|
1076
|
-
const
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
model,
|
|
1085
|
-
messages,
|
|
1086
|
-
temperature,
|
|
1087
|
-
stream: true
|
|
1088
|
-
};
|
|
1089
|
-
if (maxTokens) {
|
|
1090
|
-
body.max_tokens = maxTokens;
|
|
1091
|
-
}
|
|
1092
|
-
if (reasoning?.effort) {
|
|
1093
|
-
if (reasoning.effort === "off") {
|
|
1094
|
-
body.enable_thinking = false;
|
|
1095
|
-
} else {
|
|
1096
|
-
body.enable_thinking = true;
|
|
1097
|
-
}
|
|
1098
|
-
}
|
|
1099
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1100
|
-
method: "POST",
|
|
1101
|
-
headers: {
|
|
1102
|
-
"Content-Type": "application/json",
|
|
1103
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1104
|
-
},
|
|
1105
|
-
body: JSON.stringify(body)
|
|
1106
|
-
});
|
|
1107
|
-
if (!response.ok) {
|
|
1108
|
-
const error = await response.text();
|
|
1109
|
-
throw new Error(`ModelScope API error: ${response.status} ${error}`);
|
|
1110
|
-
}
|
|
1111
|
-
const reader = response.body?.getReader();
|
|
1112
|
-
if (!reader) {
|
|
1113
|
-
throw new Error("No response body");
|
|
1114
|
-
}
|
|
1115
|
-
const decoder = new TextDecoder();
|
|
1116
|
-
let buffer = "";
|
|
1117
|
-
try {
|
|
1118
|
-
while (true) {
|
|
1119
|
-
const { done, value } = await reader.read();
|
|
1120
|
-
if (done) break;
|
|
1121
|
-
buffer += decoder.decode(value, { stream: true });
|
|
1122
|
-
const lines = buffer.split("\n");
|
|
1123
|
-
buffer = lines.pop() ?? "";
|
|
1124
|
-
for (const line of lines) {
|
|
1125
|
-
const trimmed = line.trim();
|
|
1126
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
1127
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
1128
|
-
try {
|
|
1129
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
1130
|
-
const delta = data.choices?.[0]?.delta;
|
|
1131
|
-
if (!delta) continue;
|
|
1132
|
-
if (delta.reasoning_content) {
|
|
1133
|
-
yield {
|
|
1134
|
-
type: "reasoning",
|
|
1135
|
-
text: extractTextContent5(delta.reasoning_content)
|
|
1136
|
-
};
|
|
1137
|
-
}
|
|
1138
|
-
if (delta.content) {
|
|
1139
|
-
yield {
|
|
1140
|
-
type: "content",
|
|
1141
|
-
text: extractTextContent5(delta.content)
|
|
1142
|
-
};
|
|
1143
|
-
}
|
|
1144
|
-
} catch {
|
|
1145
|
-
}
|
|
1146
|
-
}
|
|
1147
|
-
}
|
|
1148
|
-
} finally {
|
|
1149
|
-
reader.releaseLock();
|
|
1150
|
-
}
|
|
2714
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2715
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2716
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2717
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2718
|
+
yield* StreamProcessor.processStream(
|
|
2719
|
+
response,
|
|
2720
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2721
|
+
);
|
|
1151
2722
|
}
|
|
1152
2723
|
};
|
|
1153
2724
|
|
|
1154
|
-
// src/providers/
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
return content.filter(
|
|
1162
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
1163
|
-
).map((item) => item.text).join("");
|
|
1164
|
-
}
|
|
1165
|
-
return "";
|
|
1166
|
-
}
|
|
1167
|
-
var DeepSeekProvider = class extends BaseProvider {
|
|
1168
|
-
name = "deepseek";
|
|
1169
|
-
apiKey;
|
|
2725
|
+
// src/providers/huggingface.ts
|
|
2726
|
+
init_http_provider_client();
|
|
2727
|
+
init_stream_processor();
|
|
2728
|
+
var HuggingFaceProvider = class extends BaseProvider {
|
|
2729
|
+
name = "huggingface";
|
|
2730
|
+
adapter;
|
|
2731
|
+
client;
|
|
1170
2732
|
baseUrl;
|
|
1171
2733
|
constructor(config) {
|
|
1172
2734
|
super();
|
|
2735
|
+
this.adapter = new HuggingFaceAdapter();
|
|
1173
2736
|
if (typeof config === "string") {
|
|
1174
|
-
this.
|
|
1175
|
-
this.
|
|
2737
|
+
this.baseUrl = this.adapter.defaultBaseUrl;
|
|
2738
|
+
this.client = new HttpProviderClient({
|
|
2739
|
+
apiKey: config,
|
|
2740
|
+
baseUrl: this.baseUrl
|
|
2741
|
+
});
|
|
1176
2742
|
} else {
|
|
1177
|
-
this.
|
|
1178
|
-
this.
|
|
2743
|
+
this.baseUrl = config.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2744
|
+
this.client = new HttpProviderClient({
|
|
2745
|
+
apiKey: config.apiKey,
|
|
2746
|
+
baseUrl: this.baseUrl
|
|
2747
|
+
});
|
|
1179
2748
|
}
|
|
1180
2749
|
}
|
|
1181
2750
|
/**
|
|
1182
2751
|
* 发送聊天请求(非流式)
|
|
1183
2752
|
*
|
|
1184
2753
|
* reasoning 参数说明:
|
|
1185
|
-
* -
|
|
1186
|
-
* -
|
|
2754
|
+
* - HuggingFace 是模型聚合平台,thinking 支持取决于具体模型
|
|
2755
|
+
* - 如果模型支持,会返回 reasoning_content
|
|
1187
2756
|
*/
|
|
1188
2757
|
async chat(options) {
|
|
1189
|
-
const
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
reasoning
|
|
1195
|
-
} = options;
|
|
1196
|
-
const body = {
|
|
1197
|
-
model,
|
|
1198
|
-
messages,
|
|
1199
|
-
temperature,
|
|
1200
|
-
stream: false
|
|
1201
|
-
};
|
|
1202
|
-
if (maxTokens) {
|
|
1203
|
-
body.max_tokens = maxTokens;
|
|
1204
|
-
}
|
|
1205
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
1206
|
-
body.thinking = { type: "enabled" };
|
|
1207
|
-
}
|
|
1208
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1209
|
-
method: "POST",
|
|
1210
|
-
headers: {
|
|
1211
|
-
"Content-Type": "application/json",
|
|
1212
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1213
|
-
},
|
|
1214
|
-
body: JSON.stringify(body)
|
|
1215
|
-
});
|
|
1216
|
-
if (!response.ok) {
|
|
1217
|
-
const error = await response.text();
|
|
1218
|
-
throw new Error(`DeepSeek API error: ${response.status} ${error}`);
|
|
1219
|
-
}
|
|
1220
|
-
const result = await response.json();
|
|
1221
|
-
const choice = result.choices?.[0];
|
|
1222
|
-
if (!choice) {
|
|
1223
|
-
throw new Error("No response from model");
|
|
1224
|
-
}
|
|
1225
|
-
const msg = choice.message;
|
|
1226
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
1227
|
-
return {
|
|
1228
|
-
content: extractTextContent6(msg?.content),
|
|
1229
|
-
reasoning: reasoningContent ? extractTextContent6(reasoningContent) : null,
|
|
1230
|
-
model: result.model ?? model,
|
|
1231
|
-
usage: {
|
|
1232
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
1233
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
1234
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
1235
|
-
},
|
|
1236
|
-
finishReason: choice.finish_reason ?? null
|
|
1237
|
-
};
|
|
2758
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2759
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2760
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2761
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2762
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
1238
2763
|
}
|
|
1239
2764
|
/**
|
|
1240
2765
|
* 发送流式聊天请求
|
|
1241
2766
|
*/
|
|
1242
2767
|
async *chatStream(options) {
|
|
1243
|
-
const
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
model,
|
|
1252
|
-
messages,
|
|
1253
|
-
temperature,
|
|
1254
|
-
stream: true
|
|
1255
|
-
};
|
|
1256
|
-
if (maxTokens) {
|
|
1257
|
-
body.max_tokens = maxTokens;
|
|
1258
|
-
}
|
|
1259
|
-
if (reasoning?.effort && reasoning.effort !== "off") {
|
|
1260
|
-
body.thinking = { type: "enabled" };
|
|
1261
|
-
}
|
|
1262
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1263
|
-
method: "POST",
|
|
1264
|
-
headers: {
|
|
1265
|
-
"Content-Type": "application/json",
|
|
1266
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1267
|
-
},
|
|
1268
|
-
body: JSON.stringify(body)
|
|
1269
|
-
});
|
|
1270
|
-
if (!response.ok) {
|
|
1271
|
-
const error = await response.text();
|
|
1272
|
-
throw new Error(`DeepSeek API error: ${response.status} ${error}`);
|
|
1273
|
-
}
|
|
1274
|
-
const reader = response.body?.getReader();
|
|
1275
|
-
if (!reader) {
|
|
1276
|
-
throw new Error("No response body");
|
|
1277
|
-
}
|
|
1278
|
-
const decoder = new TextDecoder();
|
|
1279
|
-
let buffer = "";
|
|
1280
|
-
try {
|
|
1281
|
-
while (true) {
|
|
1282
|
-
const { done, value } = await reader.read();
|
|
1283
|
-
if (done) break;
|
|
1284
|
-
buffer += decoder.decode(value, { stream: true });
|
|
1285
|
-
const lines = buffer.split("\n");
|
|
1286
|
-
buffer = lines.pop() ?? "";
|
|
1287
|
-
for (const line of lines) {
|
|
1288
|
-
const trimmed = line.trim();
|
|
1289
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
1290
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
1291
|
-
try {
|
|
1292
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
1293
|
-
const delta = data.choices?.[0]?.delta;
|
|
1294
|
-
if (!delta) continue;
|
|
1295
|
-
if (delta.reasoning_content) {
|
|
1296
|
-
yield {
|
|
1297
|
-
type: "reasoning",
|
|
1298
|
-
text: extractTextContent6(delta.reasoning_content)
|
|
1299
|
-
};
|
|
1300
|
-
}
|
|
1301
|
-
if (delta.content) {
|
|
1302
|
-
yield {
|
|
1303
|
-
type: "content",
|
|
1304
|
-
text: extractTextContent6(delta.content)
|
|
1305
|
-
};
|
|
1306
|
-
}
|
|
1307
|
-
} catch {
|
|
1308
|
-
}
|
|
1309
|
-
}
|
|
1310
|
-
}
|
|
1311
|
-
} finally {
|
|
1312
|
-
reader.releaseLock();
|
|
1313
|
-
}
|
|
1314
|
-
}
|
|
1315
|
-
};
|
|
1316
|
-
|
|
1317
|
-
// src/providers/poe.ts
|
|
1318
|
-
var BASE_URL6 = "https://api.poe.com/v1";
|
|
1319
|
-
function extractTextContent7(content) {
|
|
1320
|
-
if (typeof content === "string") {
|
|
1321
|
-
return content;
|
|
1322
|
-
}
|
|
1323
|
-
if (Array.isArray(content)) {
|
|
1324
|
-
return content.filter(
|
|
1325
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
1326
|
-
).map((item) => item.text).join("");
|
|
1327
|
-
}
|
|
1328
|
-
return "";
|
|
1329
|
-
}
|
|
1330
|
-
function extractThinkingFromContent(content) {
|
|
1331
|
-
const thinkMatch = content.match(/<think>([\s\S]*?)<\/think>/);
|
|
1332
|
-
if (thinkMatch) {
|
|
1333
|
-
const thinking = thinkMatch[1].trim();
|
|
1334
|
-
const cleanContent = content.replace(/<think>[\s\S]*?<\/think>/, "").trim();
|
|
1335
|
-
return { thinking, content: cleanContent };
|
|
1336
|
-
}
|
|
1337
|
-
const thinkingMatch = content.match(
|
|
1338
|
-
/^\*Thinking\.{0,3}\*\s*\n((?:>.*(?:\n|$))+)/
|
|
1339
|
-
);
|
|
1340
|
-
if (thinkingMatch) {
|
|
1341
|
-
const thinking = thinkingMatch[1].split("\n").map((line) => line.replace(/^>\s?/, "")).join("\n").trim();
|
|
1342
|
-
const cleanContent = content.replace(thinkingMatch[0], "").trim();
|
|
1343
|
-
return { thinking, content: cleanContent };
|
|
1344
|
-
}
|
|
1345
|
-
return { thinking: "", content };
|
|
1346
|
-
}
|
|
1347
|
-
function buildExtraBody(reasoning) {
|
|
1348
|
-
if (!reasoning || reasoning.effort === "off") {
|
|
1349
|
-
return void 0;
|
|
1350
|
-
}
|
|
1351
|
-
const extraBody = {};
|
|
1352
|
-
if (reasoning.effort) {
|
|
1353
|
-
extraBody.reasoning_effort = reasoning.effort;
|
|
1354
|
-
}
|
|
1355
|
-
if (reasoning.budgetTokens !== void 0) {
|
|
1356
|
-
extraBody.thinking_budget = reasoning.budgetTokens;
|
|
1357
|
-
} else if (reasoning.effort && EFFORT_TOKEN_MAP[reasoning.effort]) {
|
|
1358
|
-
extraBody.thinking_budget = EFFORT_TOKEN_MAP[reasoning.effort];
|
|
2768
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2769
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2770
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2771
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2772
|
+
yield* StreamProcessor.processStream(
|
|
2773
|
+
response,
|
|
2774
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2775
|
+
);
|
|
1359
2776
|
}
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
2777
|
+
};
|
|
2778
|
+
|
|
2779
|
+
// src/providers/groq.ts
|
|
2780
|
+
init_http_provider_client();
|
|
2781
|
+
init_stream_processor();
|
|
2782
|
+
var GroqProvider = class extends BaseProvider {
|
|
2783
|
+
name = "groq";
|
|
2784
|
+
adapter;
|
|
2785
|
+
client;
|
|
1365
2786
|
baseUrl;
|
|
1366
2787
|
constructor(config) {
|
|
1367
2788
|
super();
|
|
2789
|
+
this.adapter = new GroqAdapter();
|
|
1368
2790
|
if (typeof config === "string") {
|
|
1369
|
-
this.
|
|
1370
|
-
this.
|
|
2791
|
+
this.baseUrl = this.adapter.defaultBaseUrl;
|
|
2792
|
+
this.client = new HttpProviderClient({
|
|
2793
|
+
apiKey: config,
|
|
2794
|
+
baseUrl: this.baseUrl
|
|
2795
|
+
});
|
|
1371
2796
|
} else {
|
|
1372
|
-
this.
|
|
1373
|
-
this.
|
|
2797
|
+
this.baseUrl = config.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2798
|
+
this.client = new HttpProviderClient({
|
|
2799
|
+
apiKey: config.apiKey,
|
|
2800
|
+
baseUrl: this.baseUrl
|
|
2801
|
+
});
|
|
1374
2802
|
}
|
|
1375
2803
|
}
|
|
1376
2804
|
/**
|
|
1377
2805
|
* 发送聊天请求(非流式)
|
|
1378
2806
|
*/
|
|
1379
2807
|
async chat(options) {
|
|
1380
|
-
const
|
|
1381
|
-
const
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
stream: false
|
|
1386
|
-
};
|
|
1387
|
-
if (maxTokens) {
|
|
1388
|
-
body.max_tokens = maxTokens;
|
|
1389
|
-
}
|
|
1390
|
-
const extraBody = buildExtraBody(reasoning);
|
|
1391
|
-
if (extraBody) {
|
|
1392
|
-
Object.assign(body, extraBody);
|
|
1393
|
-
}
|
|
1394
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1395
|
-
method: "POST",
|
|
1396
|
-
headers: {
|
|
1397
|
-
"Content-Type": "application/json",
|
|
1398
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1399
|
-
},
|
|
1400
|
-
body: JSON.stringify(body)
|
|
1401
|
-
});
|
|
1402
|
-
if (!response.ok) {
|
|
1403
|
-
const error = await response.text();
|
|
1404
|
-
throw new Error(`Poe API error: ${response.status} ${error}`);
|
|
1405
|
-
}
|
|
1406
|
-
const result = await response.json();
|
|
1407
|
-
const choice = result.choices?.[0];
|
|
1408
|
-
if (!choice) {
|
|
1409
|
-
throw new Error("No response from model");
|
|
1410
|
-
}
|
|
1411
|
-
const msg = choice.message;
|
|
1412
|
-
let reasoningContent = msg?.reasoning_content ?? null;
|
|
1413
|
-
let contentText = extractTextContent7(msg?.content);
|
|
1414
|
-
if (!reasoningContent && contentText) {
|
|
1415
|
-
const extracted = extractThinkingFromContent(contentText);
|
|
1416
|
-
if (extracted.thinking) {
|
|
1417
|
-
reasoningContent = extracted.thinking;
|
|
1418
|
-
contentText = extracted.content;
|
|
1419
|
-
}
|
|
1420
|
-
}
|
|
1421
|
-
return {
|
|
1422
|
-
content: contentText,
|
|
1423
|
-
reasoning: reasoningContent ? extractTextContent7(reasoningContent) : null,
|
|
1424
|
-
model: result.model ?? model,
|
|
1425
|
-
usage: {
|
|
1426
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
1427
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
1428
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
1429
|
-
},
|
|
1430
|
-
finishReason: choice.finish_reason ?? null
|
|
1431
|
-
};
|
|
2808
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2809
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2810
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2811
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2812
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
1432
2813
|
}
|
|
1433
2814
|
/**
|
|
1434
2815
|
* 发送流式聊天请求
|
|
1435
2816
|
*/
|
|
1436
2817
|
async *chatStream(options) {
|
|
1437
|
-
const
|
|
1438
|
-
const
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
|
|
1442
|
-
|
|
1443
|
-
|
|
1444
|
-
|
|
1445
|
-
body.max_tokens = maxTokens;
|
|
1446
|
-
}
|
|
1447
|
-
const extraBody = buildExtraBody(reasoning);
|
|
1448
|
-
if (extraBody) {
|
|
1449
|
-
Object.assign(body, extraBody);
|
|
1450
|
-
}
|
|
1451
|
-
const response = await fetch(`${this.baseUrl}/chat/completions`, {
|
|
1452
|
-
method: "POST",
|
|
1453
|
-
headers: {
|
|
1454
|
-
"Content-Type": "application/json",
|
|
1455
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1456
|
-
},
|
|
1457
|
-
body: JSON.stringify(body)
|
|
1458
|
-
});
|
|
1459
|
-
if (!response.ok) {
|
|
1460
|
-
const error = await response.text();
|
|
1461
|
-
throw new Error(`Poe API error: ${response.status} ${error}`);
|
|
1462
|
-
}
|
|
1463
|
-
const reader = response.body?.getReader();
|
|
1464
|
-
if (!reader) {
|
|
1465
|
-
throw new Error("No response body");
|
|
1466
|
-
}
|
|
1467
|
-
const decoder = new TextDecoder();
|
|
1468
|
-
let buffer = "";
|
|
1469
|
-
let thinkingMode = "none";
|
|
1470
|
-
let contentBuffer = "";
|
|
1471
|
-
try {
|
|
1472
|
-
while (true) {
|
|
1473
|
-
const { done, value } = await reader.read();
|
|
1474
|
-
if (done) break;
|
|
1475
|
-
buffer += decoder.decode(value, { stream: true });
|
|
1476
|
-
const lines = buffer.split("\n");
|
|
1477
|
-
buffer = lines.pop() ?? "";
|
|
1478
|
-
for (const line of lines) {
|
|
1479
|
-
const trimmed = line.trim();
|
|
1480
|
-
if (!trimmed || trimmed === "data: [DONE]") continue;
|
|
1481
|
-
if (!trimmed.startsWith("data: ")) continue;
|
|
1482
|
-
try {
|
|
1483
|
-
const data = JSON.parse(trimmed.slice(6));
|
|
1484
|
-
const delta = data.choices?.[0]?.delta;
|
|
1485
|
-
if (!delta) continue;
|
|
1486
|
-
if (delta.reasoning_content) {
|
|
1487
|
-
yield {
|
|
1488
|
-
type: "reasoning",
|
|
1489
|
-
text: extractTextContent7(delta.reasoning_content)
|
|
1490
|
-
};
|
|
1491
|
-
continue;
|
|
1492
|
-
}
|
|
1493
|
-
if (delta.content) {
|
|
1494
|
-
const text = extractTextContent7(delta.content);
|
|
1495
|
-
contentBuffer += text;
|
|
1496
|
-
while (true) {
|
|
1497
|
-
if (thinkingMode === "none") {
|
|
1498
|
-
const thinkStart = contentBuffer.indexOf("<think>");
|
|
1499
|
-
if (thinkStart !== -1) {
|
|
1500
|
-
if (thinkStart > 0) {
|
|
1501
|
-
yield { type: "content", text: contentBuffer.slice(0, thinkStart) };
|
|
1502
|
-
}
|
|
1503
|
-
contentBuffer = contentBuffer.slice(thinkStart + 7);
|
|
1504
|
-
thinkingMode = "think_tag";
|
|
1505
|
-
continue;
|
|
1506
|
-
}
|
|
1507
|
-
const thinkingMatch = contentBuffer.match(/^\*Thinking\.{0,3}\*\s*\n/);
|
|
1508
|
-
if (thinkingMatch) {
|
|
1509
|
-
contentBuffer = contentBuffer.slice(thinkingMatch[0].length);
|
|
1510
|
-
thinkingMode = "markdown_thinking";
|
|
1511
|
-
continue;
|
|
1512
|
-
}
|
|
1513
|
-
if (contentBuffer.length > 0) {
|
|
1514
|
-
const keepLen = Math.min(15, contentBuffer.length);
|
|
1515
|
-
const output = contentBuffer.slice(0, -keepLen) || "";
|
|
1516
|
-
if (output) {
|
|
1517
|
-
yield { type: "content", text: output };
|
|
1518
|
-
contentBuffer = contentBuffer.slice(-keepLen);
|
|
1519
|
-
}
|
|
1520
|
-
}
|
|
1521
|
-
break;
|
|
1522
|
-
} else if (thinkingMode === "think_tag") {
|
|
1523
|
-
const endIdx = contentBuffer.indexOf("</think>");
|
|
1524
|
-
if (endIdx !== -1) {
|
|
1525
|
-
yield { type: "reasoning", text: contentBuffer.slice(0, endIdx) };
|
|
1526
|
-
contentBuffer = contentBuffer.slice(endIdx + 8);
|
|
1527
|
-
thinkingMode = "none";
|
|
1528
|
-
continue;
|
|
1529
|
-
}
|
|
1530
|
-
if (contentBuffer.length > 8) {
|
|
1531
|
-
yield { type: "reasoning", text: contentBuffer.slice(0, -8) };
|
|
1532
|
-
contentBuffer = contentBuffer.slice(-8);
|
|
1533
|
-
}
|
|
1534
|
-
break;
|
|
1535
|
-
} else if (thinkingMode === "markdown_thinking") {
|
|
1536
|
-
if (contentBuffer.startsWith(">")) {
|
|
1537
|
-
thinkingMode = "markdown_quote";
|
|
1538
|
-
continue;
|
|
1539
|
-
}
|
|
1540
|
-
if (contentBuffer.length > 0 && !contentBuffer.startsWith(">")) {
|
|
1541
|
-
thinkingMode = "none";
|
|
1542
|
-
continue;
|
|
1543
|
-
}
|
|
1544
|
-
break;
|
|
1545
|
-
} else if (thinkingMode === "markdown_quote") {
|
|
1546
|
-
const newlineIdx = contentBuffer.indexOf("\n");
|
|
1547
|
-
if (newlineIdx !== -1) {
|
|
1548
|
-
const quoteLine = contentBuffer.slice(0, newlineIdx);
|
|
1549
|
-
contentBuffer = contentBuffer.slice(newlineIdx + 1);
|
|
1550
|
-
if (quoteLine.startsWith(">")) {
|
|
1551
|
-
const thinkText = quoteLine.replace(/^>\s?/, "");
|
|
1552
|
-
yield { type: "reasoning", text: thinkText + "\n" };
|
|
1553
|
-
continue;
|
|
1554
|
-
}
|
|
1555
|
-
if (quoteLine.trim() === "") {
|
|
1556
|
-
yield { type: "reasoning", text: "\n" };
|
|
1557
|
-
continue;
|
|
1558
|
-
}
|
|
1559
|
-
thinkingMode = "none";
|
|
1560
|
-
yield { type: "content", text: quoteLine + "\n" };
|
|
1561
|
-
continue;
|
|
1562
|
-
}
|
|
1563
|
-
break;
|
|
1564
|
-
}
|
|
1565
|
-
}
|
|
1566
|
-
}
|
|
1567
|
-
} catch {
|
|
1568
|
-
}
|
|
1569
|
-
}
|
|
1570
|
-
}
|
|
1571
|
-
if (contentBuffer.length > 0) {
|
|
1572
|
-
if (thinkingMode === "think_tag" || thinkingMode === "markdown_quote") {
|
|
1573
|
-
yield { type: "reasoning", text: contentBuffer };
|
|
1574
|
-
} else {
|
|
1575
|
-
yield { type: "content", text: contentBuffer };
|
|
1576
|
-
}
|
|
1577
|
-
}
|
|
1578
|
-
} finally {
|
|
1579
|
-
reader.releaseLock();
|
|
1580
|
-
}
|
|
2818
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2819
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2820
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2821
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2822
|
+
yield* StreamProcessor.processStream(
|
|
2823
|
+
response,
|
|
2824
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2825
|
+
);
|
|
1581
2826
|
}
|
|
1582
2827
|
};
|
|
1583
2828
|
|
|
1584
|
-
// src/providers/
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
return content.filter(
|
|
1592
|
-
(item) => typeof item === "object" && item !== null && item.type === "text" && typeof item.text === "string"
|
|
1593
|
-
).map((item) => item.text).join("");
|
|
1594
|
-
}
|
|
1595
|
-
return "";
|
|
1596
|
-
}
|
|
1597
|
-
var NovaProvider = class extends BaseProvider {
|
|
1598
|
-
name = "nova";
|
|
1599
|
-
apiKey;
|
|
2829
|
+
// src/providers/gemini.ts
|
|
2830
|
+
init_http_provider_client();
|
|
2831
|
+
init_stream_processor();
|
|
2832
|
+
var GeminiProvider = class extends BaseProvider {
|
|
2833
|
+
name = "gemini";
|
|
2834
|
+
adapter;
|
|
2835
|
+
client;
|
|
1600
2836
|
baseUrl;
|
|
1601
2837
|
constructor(config) {
|
|
1602
2838
|
super();
|
|
2839
|
+
this.adapter = new GeminiAdapter();
|
|
1603
2840
|
if (typeof config === "string") {
|
|
1604
|
-
this.
|
|
1605
|
-
this.
|
|
2841
|
+
this.baseUrl = this.adapter.defaultBaseUrl;
|
|
2842
|
+
this.client = new HttpProviderClient({
|
|
2843
|
+
apiKey: config,
|
|
2844
|
+
baseUrl: this.baseUrl
|
|
2845
|
+
});
|
|
1606
2846
|
} else {
|
|
1607
|
-
this.
|
|
1608
|
-
this.
|
|
2847
|
+
this.baseUrl = config.baseUrl ?? this.adapter.defaultBaseUrl;
|
|
2848
|
+
this.client = new HttpProviderClient({
|
|
2849
|
+
apiKey: config.apiKey,
|
|
2850
|
+
baseUrl: this.baseUrl
|
|
2851
|
+
});
|
|
1609
2852
|
}
|
|
1610
2853
|
}
|
|
1611
2854
|
/**
|
|
1612
2855
|
* 发送聊天请求(非流式)
|
|
1613
|
-
*
|
|
1614
|
-
* 注意:
|
|
1615
|
-
* - Nova API 的 temperature 范围是 0-1(不是 0-2)
|
|
1616
|
-
* - Nova 2 Lite 支持 extended thinking (reasoningConfig)
|
|
1617
|
-
* - effort 映射为 maxReasoningEffort
|
|
1618
2856
|
*/
|
|
1619
2857
|
async chat(options) {
|
|
1620
|
-
const
|
|
1621
|
-
const
|
|
1622
|
-
|
|
2858
|
+
const body = this.adapter.buildChatRequest(options, false);
|
|
2859
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2860
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2861
|
+
const response = await this.client.chat(endpointPath, body);
|
|
2862
|
+
return this.adapter.parseChatResponse(response, options.model);
|
|
2863
|
+
}
|
|
2864
|
+
/**
|
|
2865
|
+
* 发送流式聊天请求
|
|
2866
|
+
*/
|
|
2867
|
+
async *chatStream(options) {
|
|
2868
|
+
const body = this.adapter.buildChatRequest(options, true);
|
|
2869
|
+
const endpoint = this.adapter.getEndpointUrl(this.baseUrl);
|
|
2870
|
+
const endpointPath = endpoint.replace(this.baseUrl, "");
|
|
2871
|
+
const response = await this.client.chatStream(endpointPath, body);
|
|
2872
|
+
yield* StreamProcessor.processStream(
|
|
2873
|
+
response,
|
|
2874
|
+
(delta) => this.adapter.extractStreamChunk(delta)
|
|
2875
|
+
);
|
|
2876
|
+
}
|
|
2877
|
+
};
|
|
2878
|
+
|
|
2879
|
+
// src/providers/deepseek.ts
|
|
2880
|
+
init_http_provider_client();
|
|
2881
|
+
init_stream_processor();
|
|
2882
|
+
|
|
2883
|
+
// src/providers/poe.ts
|
|
2884
|
+
init_http_provider_client();
|
|
2885
|
+
init_stream_processor();
|
|
2886
|
+
|
|
2887
|
+
// src/providers/nova.ts
|
|
2888
|
+
init_http_provider_client();
|
|
2889
|
+
init_stream_processor();
|
|
2890
|
+
|
|
2891
|
+
// src/utils/index.ts
|
|
2892
|
+
init_stream_processor();
|
|
2893
|
+
init_request_builder();
|
|
2894
|
+
|
|
2895
|
+
// src/client/index.ts
|
|
2896
|
+
init_types2();
|
|
2897
|
+
init_http_provider_client();
|
|
2898
|
+
|
|
2899
|
+
// src/fluent/errors.ts
|
|
2900
|
+
var ConfigurationError = class _ConfigurationError extends Error {
|
|
2901
|
+
constructor(message) {
|
|
2902
|
+
super(message);
|
|
2903
|
+
this.name = "ConfigurationError";
|
|
2904
|
+
Object.setPrototypeOf(this, _ConfigurationError.prototype);
|
|
2905
|
+
}
|
|
2906
|
+
};
|
|
2907
|
+
var ValidationError = class _ValidationError extends Error {
|
|
2908
|
+
constructor(message) {
|
|
2909
|
+
super(message);
|
|
2910
|
+
this.name = "ValidationError";
|
|
2911
|
+
Object.setPrototypeOf(this, _ValidationError.prototype);
|
|
2912
|
+
}
|
|
2913
|
+
};
|
|
2914
|
+
|
|
2915
|
+
// src/fluent/builder.ts
|
|
2916
|
+
var OiiaiBuilderImpl = class _OiiaiBuilderImpl {
|
|
2917
|
+
/** 内部配置状态 */
|
|
2918
|
+
config = {};
|
|
2919
|
+
/**
|
|
2920
|
+
* 创建构建器实例
|
|
2921
|
+
* @param initialConfig - 可选的初始配置
|
|
2922
|
+
*/
|
|
2923
|
+
constructor(initialConfig) {
|
|
2924
|
+
if (initialConfig) {
|
|
2925
|
+
this.config = { ...initialConfig };
|
|
2926
|
+
}
|
|
2927
|
+
}
|
|
2928
|
+
/**
|
|
2929
|
+
* 选择服务提供商
|
|
2930
|
+
* @param provider - Provider 类型
|
|
2931
|
+
* @returns this 支持链式调用
|
|
2932
|
+
*/
|
|
2933
|
+
use(provider) {
|
|
2934
|
+
if (!ProviderRegistry.hasAdapter(provider)) {
|
|
2935
|
+
const supported = ProviderRegistry.listSupported();
|
|
2936
|
+
throw new ValidationError(
|
|
2937
|
+
`\u4E0D\u652F\u6301\u7684 Provider: ${provider}\uFF0C\u652F\u6301\u7684 Provider: ${supported.join(", ")}`
|
|
2938
|
+
);
|
|
2939
|
+
}
|
|
2940
|
+
this.config.provider = provider;
|
|
2941
|
+
return this;
|
|
2942
|
+
}
|
|
2943
|
+
/**
|
|
2944
|
+
* 指定模型
|
|
2945
|
+
* @param modelId - 模型 ID
|
|
2946
|
+
* @returns this 支持链式调用
|
|
2947
|
+
*/
|
|
2948
|
+
model(modelId) {
|
|
2949
|
+
this.config.model = modelId;
|
|
2950
|
+
return this;
|
|
2951
|
+
}
|
|
2952
|
+
/**
|
|
2953
|
+
* 设置系统提示词
|
|
2954
|
+
* @param prompt - 系统提示词
|
|
2955
|
+
* @returns this 支持链式调用
|
|
2956
|
+
*/
|
|
2957
|
+
system(prompt) {
|
|
2958
|
+
this.config.system = prompt;
|
|
2959
|
+
return this;
|
|
2960
|
+
}
|
|
2961
|
+
/**
|
|
2962
|
+
* 设置温度参数
|
|
2963
|
+
* @param value - 温度值 (0-2)
|
|
2964
|
+
* @returns this 支持链式调用
|
|
2965
|
+
*/
|
|
2966
|
+
temperature(value) {
|
|
2967
|
+
if (value < 0 || value > 2) {
|
|
2968
|
+
throw new ValidationError("temperature \u5FC5\u987B\u5728 0-2 \u4E4B\u95F4");
|
|
2969
|
+
}
|
|
2970
|
+
this.config.temperature = value;
|
|
2971
|
+
return this;
|
|
2972
|
+
}
|
|
2973
|
+
/**
|
|
2974
|
+
* 设置最大输出 token 数
|
|
2975
|
+
* @param value - token 数量
|
|
2976
|
+
* @returns this 支持链式调用
|
|
2977
|
+
*/
|
|
2978
|
+
maxTokens(value) {
|
|
2979
|
+
this.config.maxTokens = value;
|
|
2980
|
+
return this;
|
|
2981
|
+
}
|
|
2982
|
+
/**
|
|
2983
|
+
* 配置思考/推理模式
|
|
2984
|
+
* @param config - 推理配置
|
|
2985
|
+
* @returns this 支持链式调用
|
|
2986
|
+
*/
|
|
2987
|
+
reasoning(config) {
|
|
2988
|
+
this.config.reasoning = config;
|
|
2989
|
+
return this;
|
|
2990
|
+
}
|
|
2991
|
+
/**
|
|
2992
|
+
* 设置 API Key
|
|
2993
|
+
* @param apiKey - API Key
|
|
2994
|
+
* @returns this 支持链式调用
|
|
2995
|
+
*/
|
|
2996
|
+
key(apiKey) {
|
|
2997
|
+
this.config.apiKey = apiKey;
|
|
2998
|
+
return this;
|
|
2999
|
+
}
|
|
3000
|
+
/**
|
|
3001
|
+
* 设置基础 URL
|
|
3002
|
+
* @param url - 基础 URL
|
|
3003
|
+
* @returns this 支持链式调用
|
|
3004
|
+
*/
|
|
3005
|
+
baseUrl(url) {
|
|
3006
|
+
this.config.baseUrl = url;
|
|
3007
|
+
return this;
|
|
3008
|
+
}
|
|
3009
|
+
/**
|
|
3010
|
+
* 标记为流式请求
|
|
3011
|
+
* 调用后 ask() 将返回 AsyncGenerator
|
|
3012
|
+
* @returns StreamBuilder 类型
|
|
3013
|
+
*/
|
|
3014
|
+
stream() {
|
|
3015
|
+
this.config.isStream = true;
|
|
3016
|
+
return this;
|
|
3017
|
+
}
|
|
3018
|
+
ask(question) {
|
|
3019
|
+
this.validateConfig();
|
|
3020
|
+
const adapter = ProviderRegistry.getAdapter(this.config.provider);
|
|
3021
|
+
const client = adapter.createClient({
|
|
3022
|
+
apiKey: this.config.apiKey,
|
|
3023
|
+
baseUrl: this.config.baseUrl ?? adapter.defaultBaseUrl
|
|
3024
|
+
});
|
|
3025
|
+
const messages = [];
|
|
3026
|
+
if (this.config.system) {
|
|
3027
|
+
messages.push({ role: "system", content: this.config.system });
|
|
3028
|
+
}
|
|
3029
|
+
messages.push({ role: "user", content: question });
|
|
3030
|
+
const chatOptions = {
|
|
3031
|
+
model: this.config.model,
|
|
1623
3032
|
messages,
|
|
1624
|
-
temperature,
|
|
1625
|
-
|
|
3033
|
+
temperature: this.config.temperature,
|
|
3034
|
+
maxTokens: this.config.maxTokens,
|
|
3035
|
+
reasoning: this.config.reasoning
|
|
1626
3036
|
};
|
|
1627
|
-
if (
|
|
1628
|
-
|
|
3037
|
+
if (this.config.isStream) {
|
|
3038
|
+
return this.executeStreamRequest(adapter, client, chatOptions);
|
|
1629
3039
|
}
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
3040
|
+
return this.executeNonStreamRequest(adapter, client, chatOptions);
|
|
3041
|
+
}
|
|
3042
|
+
/**
|
|
3043
|
+
* 执行非流式请求
|
|
3044
|
+
* @param adapter - Provider 适配器
|
|
3045
|
+
* @param client - Provider 客户端
|
|
3046
|
+
* @param chatOptions - 聊天选项
|
|
3047
|
+
* @returns 响应内容
|
|
3048
|
+
*/
|
|
3049
|
+
async executeNonStreamRequest(adapter, client, chatOptions) {
|
|
3050
|
+
const baseUrl = this.config.baseUrl ?? adapter.defaultBaseUrl;
|
|
3051
|
+
const endpoint = adapter.getEndpointUrl(baseUrl);
|
|
3052
|
+
const endpointPath = endpoint.replace(baseUrl, "");
|
|
3053
|
+
const body = adapter.buildChatRequest(chatOptions, false);
|
|
3054
|
+
const response = await client.chat(endpointPath, body);
|
|
3055
|
+
const result = adapter.parseChatResponse(response, this.config.model);
|
|
3056
|
+
return result.content;
|
|
3057
|
+
}
|
|
3058
|
+
/**
|
|
3059
|
+
* 执行流式请求
|
|
3060
|
+
* @param adapter - Provider 适配器
|
|
3061
|
+
* @param client - Provider 客户端
|
|
3062
|
+
* @param chatOptions - 聊天选项
|
|
3063
|
+
* @returns 流式数据块生成器
|
|
3064
|
+
*/
|
|
3065
|
+
async *executeStreamRequest(adapter, client, chatOptions) {
|
|
3066
|
+
const baseUrl = this.config.baseUrl ?? adapter.defaultBaseUrl;
|
|
3067
|
+
const endpoint = adapter.getEndpointUrl(baseUrl);
|
|
3068
|
+
const endpointPath = endpoint.replace(baseUrl, "");
|
|
3069
|
+
const body = adapter.buildChatRequest(chatOptions, true);
|
|
3070
|
+
const response = await client.chatStream(endpointPath, body);
|
|
3071
|
+
const { StreamProcessor: StreamProcessor2 } = (init_stream_processor(), __toCommonJS(stream_processor_exports));
|
|
3072
|
+
yield* StreamProcessor2.processStream(
|
|
3073
|
+
response,
|
|
3074
|
+
(delta) => adapter.extractStreamChunk(delta)
|
|
3075
|
+
);
|
|
3076
|
+
}
|
|
3077
|
+
/**
|
|
3078
|
+
* 验证配置是否完整
|
|
3079
|
+
* @throws ConfigurationError 如果缺少必需参数
|
|
3080
|
+
*/
|
|
3081
|
+
validateConfig() {
|
|
3082
|
+
if (!this.config.provider) {
|
|
3083
|
+
throw new ConfigurationError("\u8BF7\u5148\u8C03\u7528 use(provider) \u9009\u62E9\u670D\u52A1\u63D0\u4F9B\u5546");
|
|
1636
3084
|
}
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
headers: {
|
|
1640
|
-
"Content-Type": "application/json",
|
|
1641
|
-
Authorization: `Bearer ${this.apiKey}`
|
|
1642
|
-
},
|
|
1643
|
-
body: JSON.stringify(body)
|
|
1644
|
-
});
|
|
1645
|
-
if (!response.ok) {
|
|
1646
|
-
const error = await response.text();
|
|
1647
|
-
throw new Error(`Nova API error: ${response.status} ${error}`);
|
|
3085
|
+
if (!this.config.model) {
|
|
3086
|
+
throw new ConfigurationError("\u8BF7\u5148\u8C03\u7528 model(modelId) \u6307\u5B9A\u6A21\u578B");
|
|
1648
3087
|
}
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
3088
|
+
if (!this.config.apiKey) {
|
|
3089
|
+
throw new ConfigurationError(
|
|
3090
|
+
"\u8BF7\u5148\u914D\u7F6E API Key\uFF1A\u8C03\u7528 key(apiKey) \u6216\u901A\u8FC7\u9884\u8BBE\u5B9E\u4F8B\u914D\u7F6E"
|
|
3091
|
+
);
|
|
1653
3092
|
}
|
|
1654
|
-
const msg = choice.message;
|
|
1655
|
-
const reasoningContent = msg?.reasoning_content ?? null;
|
|
1656
|
-
return {
|
|
1657
|
-
content: extractTextContent8(msg?.content),
|
|
1658
|
-
reasoning: reasoningContent ? extractTextContent8(reasoningContent) : null,
|
|
1659
|
-
model: result.model ?? model,
|
|
1660
|
-
usage: {
|
|
1661
|
-
promptTokens: result.usage?.prompt_tokens ?? 0,
|
|
1662
|
-
completionTokens: result.usage?.completion_tokens ?? 0,
|
|
1663
|
-
totalTokens: result.usage?.total_tokens ?? 0
|
|
1664
|
-
},
|
|
1665
|
-
finishReason: choice.finish_reason ?? null
|
|
1666
|
-
};
|
|
1667
3093
|
}
|
|
1668
3094
|
/**
|
|
1669
|
-
*
|
|
3095
|
+
* 获取当前配置(用于测试)
|
|
3096
|
+
* @returns 当前配置的副本
|
|
1670
3097
|
*/
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
3098
|
+
getConfig() {
|
|
3099
|
+
return { ...this.config };
|
|
3100
|
+
}
|
|
3101
|
+
/**
|
|
3102
|
+
* 克隆构建器(用于创建新实例)
|
|
3103
|
+
* @returns 新的构建器实例
|
|
3104
|
+
*/
|
|
3105
|
+
clone() {
|
|
3106
|
+
return new _OiiaiBuilderImpl({ ...this.config });
|
|
3107
|
+
}
|
|
3108
|
+
};
|
|
3109
|
+
function createBuilder(initialConfig) {
|
|
3110
|
+
return new OiiaiBuilderImpl(initialConfig);
|
|
3111
|
+
}
|
|
3112
|
+
var oiiai = new Proxy({}, {
|
|
3113
|
+
get(_target, prop) {
|
|
3114
|
+
const builder = new OiiaiBuilderImpl();
|
|
3115
|
+
const value = builder[prop];
|
|
3116
|
+
if (typeof value === "function") {
|
|
3117
|
+
return value.bind(builder);
|
|
1681
3118
|
}
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
3119
|
+
return value;
|
|
3120
|
+
}
|
|
3121
|
+
});
|
|
3122
|
+
|
|
3123
|
+
// src/fluent/preset-provider.ts
|
|
3124
|
+
var PresetProviderImpl = class _PresetProviderImpl {
|
|
3125
|
+
/** Provider 名称 */
|
|
3126
|
+
name;
|
|
3127
|
+
/** 内部配置状态 */
|
|
3128
|
+
config = {};
|
|
3129
|
+
/** 环境变量名称映射 */
|
|
3130
|
+
static ENV_KEY_MAP = {
|
|
3131
|
+
deepseek: "DEEPSEEK_API_KEY",
|
|
3132
|
+
openrouter: "OPENROUTER_API_KEY",
|
|
3133
|
+
gemini: "GEMINI_API_KEY",
|
|
3134
|
+
groq: "GROQ_API_KEY",
|
|
3135
|
+
huggingface: "HUGGINGFACE_API_KEY",
|
|
3136
|
+
modelscope: "MODELSCOPE_API_KEY",
|
|
3137
|
+
poe: "POE_API_KEY",
|
|
3138
|
+
nova: "NOVA_API_KEY"
|
|
3139
|
+
};
|
|
3140
|
+
/**
|
|
3141
|
+
* 创建预设实例
|
|
3142
|
+
* @param providerType - Provider 类型
|
|
3143
|
+
*/
|
|
3144
|
+
constructor(providerType) {
|
|
3145
|
+
if (!ProviderRegistry.hasAdapter(providerType)) {
|
|
3146
|
+
const supported = ProviderRegistry.listSupported();
|
|
3147
|
+
throw new ValidationError(
|
|
3148
|
+
`\u4E0D\u652F\u6301\u7684 Provider: ${providerType}\uFF0C\u652F\u6301\u7684 Provider: ${supported.join(", ")}`
|
|
3149
|
+
);
|
|
1687
3150
|
}
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
|
|
1698
|
-
|
|
3151
|
+
this.name = providerType;
|
|
3152
|
+
}
|
|
3153
|
+
/**
|
|
3154
|
+
* 配置 API Key 和其他选项
|
|
3155
|
+
* @param options - 配置选项
|
|
3156
|
+
* @returns this 支持链式调用
|
|
3157
|
+
*/
|
|
3158
|
+
configure(options) {
|
|
3159
|
+
this.config.apiKey = options.apiKey;
|
|
3160
|
+
if (options.baseUrl) {
|
|
3161
|
+
this.config.baseUrl = options.baseUrl;
|
|
1699
3162
|
}
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
3163
|
+
return this;
|
|
3164
|
+
}
|
|
3165
|
+
/**
|
|
3166
|
+
* 从环境变量读取配置
|
|
3167
|
+
* 环境变量名格式: {PROVIDER}_API_KEY (如 DEEPSEEK_API_KEY)
|
|
3168
|
+
* @returns this 支持链式调用
|
|
3169
|
+
*/
|
|
3170
|
+
fromEnv() {
|
|
3171
|
+
const envKey = _PresetProviderImpl.ENV_KEY_MAP[this.name];
|
|
3172
|
+
const apiKey = process.env[envKey];
|
|
3173
|
+
if (!apiKey) {
|
|
3174
|
+
throw new ConfigurationError(`\u73AF\u5883\u53D8\u91CF ${envKey} \u672A\u8BBE\u7F6E`);
|
|
1703
3175
|
}
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1731
|
-
|
|
1732
|
-
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
|
|
3176
|
+
this.config.apiKey = apiKey;
|
|
3177
|
+
return this;
|
|
3178
|
+
}
|
|
3179
|
+
/**
|
|
3180
|
+
* 简单问答(非流式)
|
|
3181
|
+
* @param model - 模型 ID
|
|
3182
|
+
* @param question - 问题
|
|
3183
|
+
* @param options - 可选配置
|
|
3184
|
+
* @returns 响应内容
|
|
3185
|
+
*/
|
|
3186
|
+
async ask(model, question, options) {
|
|
3187
|
+
this.validateApiKey();
|
|
3188
|
+
const builder = this.createConfiguredBuilder(model, options);
|
|
3189
|
+
const result = builder.ask(question);
|
|
3190
|
+
return result;
|
|
3191
|
+
}
|
|
3192
|
+
/**
|
|
3193
|
+
* 流式问答
|
|
3194
|
+
* @param model - 模型 ID
|
|
3195
|
+
* @param question - 问题
|
|
3196
|
+
* @param options - 可选配置
|
|
3197
|
+
* @returns 流式数据块生成器
|
|
3198
|
+
*/
|
|
3199
|
+
async *stream(model, question, options) {
|
|
3200
|
+
this.validateApiKey();
|
|
3201
|
+
const builder = this.createConfiguredBuilder(model, options);
|
|
3202
|
+
const streamBuilder = builder.stream();
|
|
3203
|
+
yield* streamBuilder.ask(question);
|
|
3204
|
+
}
|
|
3205
|
+
/**
|
|
3206
|
+
* 带回调的流式问答
|
|
3207
|
+
* @param model - 模型 ID
|
|
3208
|
+
* @param question - 问题
|
|
3209
|
+
* @param callbacks - 回调函数
|
|
3210
|
+
* @returns Promise,完成时 resolve
|
|
3211
|
+
*/
|
|
3212
|
+
async streamWithCallbacks(model, question, callbacks) {
|
|
3213
|
+
this.validateApiKey();
|
|
3214
|
+
let reasoningContent = "";
|
|
3215
|
+
let contentText = "";
|
|
3216
|
+
const builder = this.createConfiguredBuilder(model);
|
|
3217
|
+
const streamBuilder = builder.stream();
|
|
3218
|
+
for await (const chunk of streamBuilder.ask(question)) {
|
|
3219
|
+
if (chunk.type === "reasoning") {
|
|
3220
|
+
reasoningContent += chunk.text;
|
|
3221
|
+
callbacks.onReasoning?.(chunk.text);
|
|
3222
|
+
} else if (chunk.type === "content") {
|
|
3223
|
+
contentText += chunk.text;
|
|
3224
|
+
callbacks.onContent?.(chunk.text);
|
|
1736
3225
|
}
|
|
1737
|
-
} finally {
|
|
1738
|
-
reader.releaseLock();
|
|
1739
3226
|
}
|
|
3227
|
+
callbacks.onDone?.({
|
|
3228
|
+
reasoning: reasoningContent,
|
|
3229
|
+
content: contentText
|
|
3230
|
+
});
|
|
1740
3231
|
}
|
|
1741
|
-
|
|
1742
|
-
|
|
1743
|
-
|
|
1744
|
-
|
|
1745
|
-
|
|
1746
|
-
|
|
1747
|
-
|
|
1748
|
-
|
|
1749
|
-
|
|
1750
|
-
|
|
1751
|
-
|
|
1752
|
-
|
|
1753
|
-
|
|
1754
|
-
|
|
1755
|
-
|
|
1756
|
-
|
|
1757
|
-
|
|
1758
|
-
|
|
1759
|
-
|
|
1760
|
-
|
|
1761
|
-
|
|
1762
|
-
|
|
1763
|
-
|
|
1764
|
-
|
|
3232
|
+
/**
|
|
3233
|
+
* 获取构建器(预配置 provider 和 model)
|
|
3234
|
+
* @param model - 模型 ID
|
|
3235
|
+
* @returns 预配置的构建器
|
|
3236
|
+
*/
|
|
3237
|
+
builder(model) {
|
|
3238
|
+
this.validateApiKey();
|
|
3239
|
+
const adapter = ProviderRegistry.getAdapter(this.name);
|
|
3240
|
+
const builder = new OiiaiBuilderImpl({
|
|
3241
|
+
provider: this.name,
|
|
3242
|
+
model,
|
|
3243
|
+
apiKey: this.config.apiKey,
|
|
3244
|
+
baseUrl: this.config.baseUrl ?? adapter.defaultBaseUrl
|
|
3245
|
+
});
|
|
3246
|
+
return builder;
|
|
3247
|
+
}
|
|
3248
|
+
/**
|
|
3249
|
+
* 创建多轮对话会话
|
|
3250
|
+
* @param model - 模型 ID
|
|
3251
|
+
* @param options - 会话配置
|
|
3252
|
+
* @returns 对话会话实例
|
|
3253
|
+
*/
|
|
3254
|
+
chat(model, options) {
|
|
3255
|
+
this.validateApiKey();
|
|
3256
|
+
const { ChatSessionImpl: ChatSessionImpl2 } = (init_chat_session(), __toCommonJS(chat_session_exports));
|
|
3257
|
+
return new ChatSessionImpl2(this, model, options);
|
|
3258
|
+
}
|
|
3259
|
+
/**
|
|
3260
|
+
* 验证 API Key 是否已配置
|
|
3261
|
+
* @throws ConfigurationError 如果未配置 API Key
|
|
3262
|
+
*/
|
|
3263
|
+
validateApiKey() {
|
|
3264
|
+
if (!this.config.apiKey) {
|
|
3265
|
+
throw new ConfigurationError(
|
|
3266
|
+
`\u8BF7\u5148\u914D\u7F6E API Key\uFF1A\u8C03\u7528 configure({ apiKey: 'xxx' }) \u6216 fromEnv()`
|
|
3267
|
+
);
|
|
3268
|
+
}
|
|
3269
|
+
}
|
|
3270
|
+
/**
|
|
3271
|
+
* 创建已配置的构建器
|
|
3272
|
+
* @param model - 模型 ID
|
|
3273
|
+
* @param options - 可选配置
|
|
3274
|
+
* @returns 配置好的构建器
|
|
3275
|
+
*/
|
|
3276
|
+
createConfiguredBuilder(model, options) {
|
|
3277
|
+
const adapter = ProviderRegistry.getAdapter(this.name);
|
|
3278
|
+
const builder = new OiiaiBuilderImpl({
|
|
3279
|
+
provider: this.name,
|
|
3280
|
+
model,
|
|
3281
|
+
apiKey: this.config.apiKey,
|
|
3282
|
+
baseUrl: this.config.baseUrl ?? adapter.defaultBaseUrl
|
|
3283
|
+
});
|
|
3284
|
+
if (options?.system) {
|
|
3285
|
+
builder.system(options.system);
|
|
3286
|
+
}
|
|
3287
|
+
if (options?.temperature !== void 0) {
|
|
3288
|
+
builder.temperature(options.temperature);
|
|
3289
|
+
}
|
|
3290
|
+
if (options?.maxTokens !== void 0) {
|
|
3291
|
+
builder.maxTokens(options.maxTokens);
|
|
3292
|
+
}
|
|
3293
|
+
if (options?.reasoning) {
|
|
3294
|
+
builder.reasoning(options.reasoning);
|
|
3295
|
+
}
|
|
3296
|
+
return builder;
|
|
3297
|
+
}
|
|
3298
|
+
/**
|
|
3299
|
+
* 获取当前配置(用于测试)
|
|
3300
|
+
* @returns 当前配置的副本
|
|
3301
|
+
*/
|
|
3302
|
+
getConfig() {
|
|
3303
|
+
return { ...this.config };
|
|
1765
3304
|
}
|
|
1766
|
-
}
|
|
1767
|
-
var ai = {
|
|
1768
|
-
openrouter: (apiKey, baseUrl) => createProvider({ provider: "openrouter", apiKey, baseUrl }),
|
|
1769
|
-
gemini: (apiKey, baseUrl) => createProvider({ provider: "gemini", apiKey, baseUrl }),
|
|
1770
|
-
groq: (apiKey, baseUrl) => createProvider({ provider: "groq", apiKey, baseUrl }),
|
|
1771
|
-
huggingface: (apiKey, baseUrl) => createProvider({ provider: "huggingface", apiKey, baseUrl }),
|
|
1772
|
-
modelscope: (apiKey, baseUrl) => createProvider({ provider: "modelscope", apiKey, baseUrl }),
|
|
1773
|
-
deepseek: (apiKey, baseUrl) => createProvider({ provider: "deepseek", apiKey, baseUrl }),
|
|
1774
|
-
poe: (apiKey, baseUrl) => createProvider({ provider: "poe", apiKey, baseUrl }),
|
|
1775
|
-
nova: (apiKey, baseUrl) => createProvider({ provider: "nova", apiKey, baseUrl })
|
|
1776
3305
|
};
|
|
3306
|
+
|
|
3307
|
+
// src/fluent/index.ts
|
|
3308
|
+
init_chat_session();
|
|
3309
|
+
|
|
3310
|
+
// src/fluent/preset-instances.ts
|
|
3311
|
+
var deepseek = new PresetProviderImpl("deepseek");
|
|
3312
|
+
var openrouter = new PresetProviderImpl("openrouter");
|
|
3313
|
+
var gemini = new PresetProviderImpl("gemini");
|
|
3314
|
+
var groq = new PresetProviderImpl("groq");
|
|
3315
|
+
var huggingface = new PresetProviderImpl(
|
|
3316
|
+
"huggingface"
|
|
3317
|
+
);
|
|
3318
|
+
var modelscope = new PresetProviderImpl("modelscope");
|
|
3319
|
+
var poe = new PresetProviderImpl("poe");
|
|
3320
|
+
var nova = new PresetProviderImpl("nova");
|
|
1777
3321
|
export {
|
|
3322
|
+
APIError,
|
|
3323
|
+
BaseAdapter,
|
|
1778
3324
|
BaseProvider,
|
|
3325
|
+
CONFIG_DEFAULTS,
|
|
3326
|
+
ConfigManager,
|
|
3327
|
+
ConfigValidator,
|
|
3328
|
+
ConfigurationError,
|
|
3329
|
+
DeepSeekAdapter,
|
|
1779
3330
|
EFFORT_TOKEN_MAP,
|
|
3331
|
+
ValidationError as FluentValidationError,
|
|
3332
|
+
GeminiAdapter,
|
|
1780
3333
|
GeminiProvider,
|
|
3334
|
+
GroqAdapter,
|
|
1781
3335
|
GroqProvider,
|
|
3336
|
+
HttpProviderClient,
|
|
3337
|
+
HuggingFaceAdapter,
|
|
1782
3338
|
HuggingFaceProvider,
|
|
3339
|
+
ModelScopeAdapter,
|
|
1783
3340
|
ModelScopeProvider,
|
|
3341
|
+
NetworkError,
|
|
3342
|
+
NovaAdapter,
|
|
3343
|
+
OpenRouterAdapter,
|
|
1784
3344
|
OpenRouterProvider,
|
|
3345
|
+
PoeAdapter,
|
|
3346
|
+
ProviderError,
|
|
3347
|
+
ProviderRegistry,
|
|
3348
|
+
RegistryError,
|
|
3349
|
+
RequestBuilder,
|
|
3350
|
+
StreamProcessor,
|
|
3351
|
+
TimeoutError,
|
|
3352
|
+
VALID_PROVIDERS,
|
|
1785
3353
|
ai,
|
|
1786
|
-
|
|
3354
|
+
createBuilder,
|
|
3355
|
+
createBuiltInAdapters,
|
|
3356
|
+
createProvider,
|
|
3357
|
+
deepseek,
|
|
3358
|
+
gemini,
|
|
3359
|
+
groq,
|
|
3360
|
+
huggingface,
|
|
3361
|
+
modelscope,
|
|
3362
|
+
nova,
|
|
3363
|
+
oiiai,
|
|
3364
|
+
openrouter,
|
|
3365
|
+
poe
|
|
1787
3366
|
};
|
|
1788
3367
|
//# sourceMappingURL=index.mjs.map
|