sdkwork-browser-agent 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. package/README.md +228 -0
  2. package/README.zh.md +228 -0
  3. package/dist/agent-Bpxmkz8W.d.ts +197 -0
  4. package/dist/agent-kexkkI13.d.cts +197 -0
  5. package/dist/browser/agent-Bpxmkz8W.d.ts +197 -0
  6. package/dist/browser/chunk-7W2JJCSS.js +276 -0
  7. package/dist/browser/chunk-7W2JJCSS.js.map +1 -0
  8. package/dist/browser/chunk-BHRFRGR7.js +144 -0
  9. package/dist/browser/chunk-BHRFRGR7.js.map +1 -0
  10. package/dist/browser/chunk-CLP6UNSV.js +285 -0
  11. package/dist/browser/chunk-CLP6UNSV.js.map +1 -0
  12. package/dist/browser/chunk-HXLRBB7S.js +1569 -0
  13. package/dist/browser/chunk-HXLRBB7S.js.map +1 -0
  14. package/dist/browser/chunk-VJEFLRZT.js +1720 -0
  15. package/dist/browser/chunk-VJEFLRZT.js.map +1 -0
  16. package/dist/browser/index.d.ts +842 -0
  17. package/dist/browser/index.js +3293 -0
  18. package/dist/browser/index.js.map +1 -0
  19. package/dist/browser/llm/index.d.ts +235 -0
  20. package/dist/browser/llm/index.js +29 -0
  21. package/dist/browser/llm/index.js.map +1 -0
  22. package/dist/browser/mcp/index.d.ts +63 -0
  23. package/dist/browser/mcp/index.js +9 -0
  24. package/dist/browser/mcp/index.js.map +1 -0
  25. package/dist/browser/provider-Dna36xA-.d.ts +105 -0
  26. package/dist/browser/skills/index.d.ts +401 -0
  27. package/dist/browser/skills/index.js +31 -0
  28. package/dist/browser/skills/index.js.map +1 -0
  29. package/dist/browser/storage/index.d.ts +64 -0
  30. package/dist/browser/storage/index.js +15 -0
  31. package/dist/browser/storage/index.js.map +1 -0
  32. package/dist/browser/tools/index.d.ts +45 -0
  33. package/dist/browser/tools/index.js +15 -0
  34. package/dist/browser/tools/index.js.map +1 -0
  35. package/dist/browser/types-CG5I-byI.d.ts +30 -0
  36. package/dist/chunk-56J3IBXZ.js +144 -0
  37. package/dist/chunk-56J3IBXZ.js.map +1 -0
  38. package/dist/chunk-5XTVS5MB.js +1720 -0
  39. package/dist/chunk-5XTVS5MB.js.map +1 -0
  40. package/dist/chunk-6AYIRBGI.js +166 -0
  41. package/dist/chunk-6AYIRBGI.js.map +1 -0
  42. package/dist/chunk-C2EYJHXW.cjs +276 -0
  43. package/dist/chunk-C2EYJHXW.cjs.map +1 -0
  44. package/dist/chunk-HOZQ445W.cjs +166 -0
  45. package/dist/chunk-HOZQ445W.cjs.map +1 -0
  46. package/dist/chunk-KZNZ6CGD.cjs +144 -0
  47. package/dist/chunk-KZNZ6CGD.cjs.map +1 -0
  48. package/dist/chunk-XFMT5ZA4.js +276 -0
  49. package/dist/chunk-XFMT5ZA4.js.map +1 -0
  50. package/dist/chunk-XPGICLEJ.cjs +1720 -0
  51. package/dist/chunk-XPGICLEJ.cjs.map +1 -0
  52. package/dist/index.cjs +1311 -0
  53. package/dist/index.cjs.map +1 -0
  54. package/dist/index.d.cts +395 -0
  55. package/dist/index.d.ts +395 -0
  56. package/dist/index.js +1311 -0
  57. package/dist/index.js.map +1 -0
  58. package/dist/llm/index.cjs +29 -0
  59. package/dist/llm/index.cjs.map +1 -0
  60. package/dist/llm/index.d.cts +235 -0
  61. package/dist/llm/index.d.ts +235 -0
  62. package/dist/llm/index.js +29 -0
  63. package/dist/llm/index.js.map +1 -0
  64. package/dist/mcp/index.cjs +9 -0
  65. package/dist/mcp/index.cjs.map +1 -0
  66. package/dist/mcp/index.d.cts +63 -0
  67. package/dist/mcp/index.d.ts +63 -0
  68. package/dist/mcp/index.js +9 -0
  69. package/dist/mcp/index.js.map +1 -0
  70. package/dist/node/agent-Bpxmkz8W.d.ts +197 -0
  71. package/dist/node/agent-kexkkI13.d.cts +197 -0
  72. package/dist/node/chunk-7W2JJCSS.js +276 -0
  73. package/dist/node/chunk-7W2JJCSS.js.map +1 -0
  74. package/dist/node/chunk-BHRFRGR7.js +144 -0
  75. package/dist/node/chunk-BHRFRGR7.js.map +1 -0
  76. package/dist/node/chunk-CLP6UNSV.js +285 -0
  77. package/dist/node/chunk-CLP6UNSV.js.map +1 -0
  78. package/dist/node/chunk-HXLRBB7S.js +1569 -0
  79. package/dist/node/chunk-HXLRBB7S.js.map +1 -0
  80. package/dist/node/chunk-IYG37UN3.cjs +144 -0
  81. package/dist/node/chunk-IYG37UN3.cjs.map +1 -0
  82. package/dist/node/chunk-JF33ZOMB.cjs +285 -0
  83. package/dist/node/chunk-JF33ZOMB.cjs.map +1 -0
  84. package/dist/node/chunk-KXXS33G3.cjs +276 -0
  85. package/dist/node/chunk-KXXS33G3.cjs.map +1 -0
  86. package/dist/node/chunk-MTFOABGC.cjs +1720 -0
  87. package/dist/node/chunk-MTFOABGC.cjs.map +1 -0
  88. package/dist/node/chunk-VJEFLRZT.js +1720 -0
  89. package/dist/node/chunk-VJEFLRZT.js.map +1 -0
  90. package/dist/node/chunk-YDHQCPSN.cjs +1569 -0
  91. package/dist/node/chunk-YDHQCPSN.cjs.map +1 -0
  92. package/dist/node/index.cjs +3293 -0
  93. package/dist/node/index.cjs.map +1 -0
  94. package/dist/node/index.d.cts +842 -0
  95. package/dist/node/index.d.ts +842 -0
  96. package/dist/node/index.js +3293 -0
  97. package/dist/node/index.js.map +1 -0
  98. package/dist/node/llm/index.cjs +29 -0
  99. package/dist/node/llm/index.cjs.map +1 -0
  100. package/dist/node/llm/index.d.cts +235 -0
  101. package/dist/node/llm/index.d.ts +235 -0
  102. package/dist/node/llm/index.js +29 -0
  103. package/dist/node/llm/index.js.map +1 -0
  104. package/dist/node/mcp/index.cjs +9 -0
  105. package/dist/node/mcp/index.cjs.map +1 -0
  106. package/dist/node/mcp/index.d.cts +63 -0
  107. package/dist/node/mcp/index.d.ts +63 -0
  108. package/dist/node/mcp/index.js +9 -0
  109. package/dist/node/mcp/index.js.map +1 -0
  110. package/dist/node/provider-Dna36xA-.d.cts +105 -0
  111. package/dist/node/provider-Dna36xA-.d.ts +105 -0
  112. package/dist/node/skills/index.cjs +31 -0
  113. package/dist/node/skills/index.cjs.map +1 -0
  114. package/dist/node/skills/index.d.cts +401 -0
  115. package/dist/node/skills/index.d.ts +401 -0
  116. package/dist/node/skills/index.js +31 -0
  117. package/dist/node/skills/index.js.map +1 -0
  118. package/dist/node/storage/index.cjs +15 -0
  119. package/dist/node/storage/index.cjs.map +1 -0
  120. package/dist/node/storage/index.d.cts +64 -0
  121. package/dist/node/storage/index.d.ts +64 -0
  122. package/dist/node/storage/index.js +15 -0
  123. package/dist/node/storage/index.js.map +1 -0
  124. package/dist/node/tools/index.cjs +15 -0
  125. package/dist/node/tools/index.cjs.map +1 -0
  126. package/dist/node/tools/index.d.cts +45 -0
  127. package/dist/node/tools/index.d.ts +45 -0
  128. package/dist/node/tools/index.js +15 -0
  129. package/dist/node/tools/index.js.map +1 -0
  130. package/dist/node/types-CG5I-byI.d.cts +30 -0
  131. package/dist/node/types-CG5I-byI.d.ts +30 -0
  132. package/dist/provider-Dna36xA-.d.cts +105 -0
  133. package/dist/provider-Dna36xA-.d.ts +105 -0
  134. package/dist/skills/index.cjs +15 -0
  135. package/dist/skills/index.cjs.map +1 -0
  136. package/dist/skills/index.d.cts +43 -0
  137. package/dist/skills/index.d.ts +43 -0
  138. package/dist/skills/index.js +15 -0
  139. package/dist/skills/index.js.map +1 -0
  140. package/dist/tools/index.cjs +15 -0
  141. package/dist/tools/index.cjs.map +1 -0
  142. package/dist/tools/index.d.cts +45 -0
  143. package/dist/tools/index.d.ts +45 -0
  144. package/dist/tools/index.js +15 -0
  145. package/dist/tools/index.js.map +1 -0
  146. package/package.json +150 -0
@@ -0,0 +1,1720 @@
1
+ // src/llm/provider.ts
2
+ var LLMProviderRegistry = class {
3
+ providers = /* @__PURE__ */ new Map();
4
+ instances = /* @__PURE__ */ new Map();
5
+ register(name, factory) {
6
+ this.providers.set(name, factory);
7
+ }
8
+ create(name, config) {
9
+ const factory = this.providers.get(name);
10
+ if (!factory) {
11
+ throw new Error(`Unknown LLM provider: ${name}`);
12
+ }
13
+ const instance = factory(config);
14
+ this.instances.set(name, instance);
15
+ return instance;
16
+ }
17
+ get(name) {
18
+ return this.instances.get(name);
19
+ }
20
+ listProviders() {
21
+ return Array.from(this.providers.keys());
22
+ }
23
+ isRegistered(name) {
24
+ return this.providers.has(name);
25
+ }
26
+ };
27
+ var globalProviderRegistry = new LLMProviderRegistry();
28
+ var LLMManager = class {
29
+ constructor(registry = globalProviderRegistry) {
30
+ this.registry = registry;
31
+ }
32
+ providers = /* @__PURE__ */ new Map();
33
+ defaultProvider;
34
+ addProvider(name, config) {
35
+ const provider = this.registry.create(name, config);
36
+ this.providers.set(name, provider);
37
+ return provider;
38
+ }
39
+ setDefaultProvider(name) {
40
+ if (!this.providers.has(name)) {
41
+ throw new Error(`Provider '${name}' not found. Add it first.`);
42
+ }
43
+ this.defaultProvider = name;
44
+ }
45
+ getProvider(name) {
46
+ const providerName = name || this.defaultProvider;
47
+ if (!providerName) {
48
+ throw new Error("No provider specified and no default provider set");
49
+ }
50
+ const provider = this.providers.get(providerName);
51
+ if (!provider) {
52
+ throw new Error(`Provider '${providerName}' not found`);
53
+ }
54
+ return provider;
55
+ }
56
+ async complete(request, providerName) {
57
+ const provider = this.getProvider(providerName);
58
+ return provider.complete(request);
59
+ }
60
+ async *stream(request, providerName) {
61
+ const provider = this.getProvider(providerName);
62
+ yield* provider.stream(request);
63
+ }
64
+ listProviders() {
65
+ return Array.from(this.providers.keys());
66
+ }
67
+ };
68
+
69
+ // src/llm/providers/openai.ts
70
+ var OpenAIProvider = class {
71
+ name = "openai";
72
+ supportedModels = [
73
+ // GPT-5.2 系列 (最新旗舰模型 - 2025)
74
+ "gpt-5.2",
75
+ "gpt-5.2-2025-01-28",
76
+ "gpt-5.2-pro",
77
+ "gpt-5.2-pro-2025-01-28",
78
+ "gpt-5.2-codex",
79
+ "gpt-5.2-codex-2025-01-28",
80
+ // GPT-5.1 系列
81
+ "gpt-5.1",
82
+ "gpt-5.1-2025-01-28",
83
+ "gpt-5.1-codex",
84
+ "gpt-5.1-codex-2025-01-28",
85
+ "gpt-5.1-codex-max",
86
+ // GPT-5 系列
87
+ "gpt-5",
88
+ "gpt-5-2025-01-28",
89
+ "gpt-5-pro",
90
+ // GPT-5 Mini/Nano (轻量版)
91
+ "gpt-5-mini",
92
+ "gpt-5-mini-2025-01-28",
93
+ "gpt-5-nano",
94
+ "gpt-5-nano-2025-01-28",
95
+ // GPT-4.1 系列
96
+ "gpt-4.1",
97
+ "gpt-4.1-2025-01-28",
98
+ "gpt-4.1-mini",
99
+ "gpt-4.1-nano",
100
+ // GPT-4o 系列 (旧版但仍支持)
101
+ "gpt-4o",
102
+ "gpt-4o-2024-11-20",
103
+ "gpt-4o-2024-08-06",
104
+ "gpt-4o-mini",
105
+ "gpt-4o-mini-2024-07-18",
106
+ // o 系列 (推理模型)
107
+ "o3",
108
+ "o3-2025-01-28",
109
+ "o3-pro",
110
+ "o4-mini",
111
+ "o4-mini-2025-01-28",
112
+ "o1",
113
+ "o1-2024-12-17",
114
+ "o1-pro",
115
+ // 开源模型
116
+ "gpt-oss-120b",
117
+ "gpt-oss-20b",
118
+ // 专用模型
119
+ "o3-deep-research",
120
+ "o4-mini-deep-research",
121
+ "computer-use-preview"
122
+ ];
123
+ _apiKey;
124
+ _baseUrl;
125
+ _defaultParams;
126
+ _timeout;
127
+ _organization;
128
+ _project;
129
+ constructor(config) {
130
+ this._apiKey = config.apiKey || "";
131
+ this._baseUrl = config.baseUrl || "https://api.openai.com/v1";
132
+ this._defaultParams = config.defaultParams || {};
133
+ this._timeout = config.timeout || 6e4;
134
+ this._organization = config.organization;
135
+ this._project = config.project;
136
+ }
137
+ validateConfig() {
138
+ return !!this._apiKey;
139
+ }
140
+ async makeRequest(endpoint, body) {
141
+ const headers = {
142
+ "Content-Type": "application/json",
143
+ Authorization: `Bearer ${this._apiKey}`
144
+ };
145
+ if (this._organization) {
146
+ headers["OpenAI-Organization"] = this._organization;
147
+ }
148
+ if (this._project) {
149
+ headers["OpenAI-Project"] = this._project;
150
+ }
151
+ const controller = new AbortController();
152
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
153
+ try {
154
+ const response = await fetch(`${this._baseUrl}${endpoint}`, {
155
+ method: "POST",
156
+ headers,
157
+ body: JSON.stringify(body),
158
+ signal: controller.signal
159
+ });
160
+ clearTimeout(timeoutId);
161
+ if (!response.ok) {
162
+ const error = await response.text();
163
+ throw new Error(`OpenAI API error: ${response.status} - ${error}`);
164
+ }
165
+ return response;
166
+ } catch (error) {
167
+ clearTimeout(timeoutId);
168
+ throw error;
169
+ }
170
+ }
171
+ async complete(request) {
172
+ const body = this.buildRequestBody(request);
173
+ const response = await this.makeRequest("/chat/completions", body);
174
+ const data = await response.json();
175
+ return this.parseResponse(data);
176
+ }
177
+ async *stream(request) {
178
+ const body = this.buildRequestBody(request, true);
179
+ const response = await this.makeRequest("/chat/completions", body);
180
+ const reader = response.body?.getReader();
181
+ if (!reader) {
182
+ throw new Error("No response body");
183
+ }
184
+ const decoder = new TextDecoder();
185
+ let buffer = "";
186
+ try {
187
+ while (true) {
188
+ const { done, value } = await reader.read();
189
+ if (done) break;
190
+ buffer += decoder.decode(value, { stream: true });
191
+ const lines = buffer.split("\n");
192
+ buffer = lines.pop() || "";
193
+ for (const line of lines) {
194
+ if (line.startsWith("data: ")) {
195
+ const data = line.slice(6);
196
+ if (data === "[DONE]") return;
197
+ try {
198
+ const chunk = JSON.parse(data);
199
+ yield this.parseStreamChunk(chunk);
200
+ } catch {
201
+ }
202
+ }
203
+ }
204
+ }
205
+ } finally {
206
+ reader.releaseLock();
207
+ }
208
+ }
209
+ buildRequestBody(request, stream = false) {
210
+ return {
211
+ model: request.model || this._defaultParams.model || "gpt-5.2",
212
+ messages: this.formatMessages(request.messages),
213
+ temperature: request.temperature ?? this._defaultParams.temperature ?? 0.7,
214
+ max_tokens: request.max_tokens ?? this._defaultParams.max_tokens,
215
+ top_p: request.top_p ?? this._defaultParams.top_p ?? 1,
216
+ stream,
217
+ tools: request.tools,
218
+ tool_choice: request.tool_choice
219
+ };
220
+ }
221
+ formatMessages(messages) {
222
+ return messages.map((msg) => {
223
+ const formatted = {
224
+ role: msg.role,
225
+ content: msg.content
226
+ };
227
+ if (msg.name) formatted.name = msg.name;
228
+ if (msg.tool_calls) formatted.tool_calls = msg.tool_calls;
229
+ if (msg.tool_call_id) formatted.tool_call_id = msg.tool_call_id;
230
+ return formatted;
231
+ });
232
+ }
233
+ parseResponse(data) {
234
+ const choice = data.choices[0];
235
+ const message = choice.message;
236
+ return {
237
+ id: data.id,
238
+ model: data.model,
239
+ content: message.content || "",
240
+ role: "assistant",
241
+ tool_calls: message.tool_calls,
242
+ usage: data.usage,
243
+ finish_reason: choice.finish_reason
244
+ };
245
+ }
246
+ parseStreamChunk(data) {
247
+ const choice = data.choices[0];
248
+ const delta = choice.delta;
249
+ return {
250
+ id: data.id,
251
+ model: data.model,
252
+ delta: {
253
+ content: delta.content,
254
+ role: delta.role,
255
+ tool_calls: delta.tool_calls
256
+ },
257
+ finish_reason: choice.finish_reason
258
+ };
259
+ }
260
+ };
261
+ globalProviderRegistry.register(
262
+ "openai",
263
+ (config) => new OpenAIProvider(config)
264
+ );
265
+
266
+ // src/llm/providers/anthropic.ts
267
+ var AnthropicProvider = class {
268
+ name = "anthropic";
269
+ supportedModels = [
270
+ // Claude 4.5 系列 (最新 - 2025)
271
+ "claude-sonnet-4-5",
272
+ "claude-sonnet-4-5-20250929",
273
+ "claude-sonnet-4-5-latest",
274
+ "claude-haiku-4-5",
275
+ "claude-haiku-4-5-20251001",
276
+ "claude-haiku-4-5-latest",
277
+ "claude-opus-4-5",
278
+ "claude-opus-4-5-20251101",
279
+ "claude-opus-4-5-latest",
280
+ // Claude 4.1/4.0 系列
281
+ "claude-opus-4-1",
282
+ "claude-opus-4-1-20250805",
283
+ "claude-opus-4-1-latest",
284
+ "claude-sonnet-4",
285
+ "claude-sonnet-4-20250514",
286
+ "claude-sonnet-4-0",
287
+ "claude-sonnet-4-latest",
288
+ // Claude 3.7 系列
289
+ "claude-3-7-sonnet",
290
+ "claude-3-7-sonnet-20250219",
291
+ "claude-3-7-sonnet-latest",
292
+ // Claude 3.5 系列 (旧版)
293
+ "claude-3-5-sonnet",
294
+ "claude-3-5-sonnet-20241022",
295
+ "claude-3-5-sonnet-20240620",
296
+ "claude-3-5-haiku",
297
+ "claude-3-5-haiku-20241022",
298
+ // Claude 3 系列 (旧版)
299
+ "claude-3-opus",
300
+ "claude-3-opus-20240229",
301
+ "claude-3-opus-4",
302
+ "claude-3-opus-4-20250514",
303
+ "claude-3-sonnet",
304
+ "claude-3-sonnet-20240229",
305
+ "claude-3-haiku",
306
+ "claude-3-haiku-20240307"
307
+ ];
308
+ _apiKey;
309
+ _baseUrl;
310
+ _defaultParams;
311
+ _timeout;
312
+ _anthropicVersion;
313
+ constructor(config) {
314
+ this._apiKey = config.apiKey || "";
315
+ this._baseUrl = config.baseUrl || "https://api.anthropic.com/v1";
316
+ this._defaultParams = config.defaultParams || {};
317
+ this._timeout = config.timeout || 6e4;
318
+ this._anthropicVersion = config.anthropicVersion || "2023-06-01";
319
+ }
320
+ validateConfig() {
321
+ return !!this._apiKey;
322
+ }
323
+ async makeRequest(endpoint, body) {
324
+ const controller = new AbortController();
325
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
326
+ try {
327
+ const response = await fetch(`${this._baseUrl}${endpoint}`, {
328
+ method: "POST",
329
+ headers: {
330
+ "Content-Type": "application/json",
331
+ "X-Api-Key": this._apiKey,
332
+ "Anthropic-Version": this._anthropicVersion
333
+ },
334
+ body: JSON.stringify(body),
335
+ signal: controller.signal
336
+ });
337
+ clearTimeout(timeoutId);
338
+ if (!response.ok) {
339
+ const error = await response.text();
340
+ throw new Error(`Anthropic API error: ${response.status} - ${error}`);
341
+ }
342
+ return response;
343
+ } catch (error) {
344
+ clearTimeout(timeoutId);
345
+ throw error;
346
+ }
347
+ }
348
+ async complete(request) {
349
+ const body = this.buildRequestBody(request);
350
+ const response = await this.makeRequest("/messages", body);
351
+ const data = await response.json();
352
+ return this.parseResponse(data);
353
+ }
354
+ async *stream(request) {
355
+ const body = this.buildRequestBody(request, true);
356
+ const response = await this.makeRequest("/messages", body);
357
+ const reader = response.body?.getReader();
358
+ if (!reader) {
359
+ throw new Error("No response body");
360
+ }
361
+ const decoder = new TextDecoder();
362
+ let buffer = "";
363
+ try {
364
+ while (true) {
365
+ const { done, value } = await reader.read();
366
+ if (done) break;
367
+ buffer += decoder.decode(value, { stream: true });
368
+ const lines = buffer.split("\n");
369
+ buffer = lines.pop() || "";
370
+ for (const line of lines) {
371
+ if (line.startsWith("data: ")) {
372
+ const data = line.slice(6);
373
+ if (data === "[DONE]") return;
374
+ try {
375
+ const event = JSON.parse(data);
376
+ if (event.type === "content_block_delta") {
377
+ yield this.parseStreamChunk(event);
378
+ }
379
+ } catch {
380
+ }
381
+ }
382
+ }
383
+ }
384
+ } finally {
385
+ reader.releaseLock();
386
+ }
387
+ }
388
+ buildRequestBody(request, stream = false) {
389
+ const systemMessage = request.messages.find((m) => m.role === "system");
390
+ const messages = request.messages.filter((m) => m.role !== "system");
391
+ const body = {
392
+ model: request.model || this._defaultParams.model || "claude-sonnet-4-5-latest",
393
+ messages: messages.map((msg) => ({
394
+ role: msg.role === "assistant" ? "assistant" : "user",
395
+ content: msg.content
396
+ })),
397
+ max_tokens: request.max_tokens ?? this._defaultParams.max_tokens ?? 4096,
398
+ stream
399
+ };
400
+ if (systemMessage) {
401
+ body.system = systemMessage.content;
402
+ }
403
+ if (request.temperature !== void 0) {
404
+ body.temperature = request.temperature;
405
+ }
406
+ if (request.tools) {
407
+ body.tools = request.tools.map((tool) => ({
408
+ name: tool.function.name,
409
+ description: tool.function.description,
410
+ input_schema: tool.function.parameters
411
+ }));
412
+ }
413
+ return body;
414
+ }
415
+ parseResponse(data) {
416
+ const content = data.content || [];
417
+ const textContent = content.find((c) => c.type === "text");
418
+ return {
419
+ id: data.id,
420
+ model: data.model,
421
+ content: textContent?.text || "",
422
+ role: "assistant",
423
+ usage: {
424
+ prompt_tokens: data.usage?.input_tokens || 0,
425
+ completion_tokens: data.usage?.output_tokens || 0,
426
+ total_tokens: (data.usage?.input_tokens || 0) + (data.usage?.output_tokens || 0)
427
+ },
428
+ finish_reason: "stop"
429
+ };
430
+ }
431
+ parseStreamChunk(event) {
432
+ return {
433
+ id: event.id,
434
+ model: event.model,
435
+ delta: {
436
+ content: event.delta?.text
437
+ }
438
+ };
439
+ }
440
+ };
441
+ globalProviderRegistry.register(
442
+ "anthropic",
443
+ (config) => new AnthropicProvider(config)
444
+ );
445
+
446
+ // src/llm/providers/gemini.ts
447
+ var GeminiProvider = class {
448
+ name = "gemini";
449
+ supportedModels = [
450
+ // Gemini 3.0 系列 (最新 - 2025)
451
+ "gemini-3.0-pro",
452
+ "gemini-3.0-pro-001",
453
+ "gemini-3.0-pro-latest",
454
+ "gemini-3.0-pro-exp",
455
+ "gemini-3.0-flash",
456
+ "gemini-3.0-flash-001",
457
+ "gemini-3.0-flash-latest",
458
+ "gemini-3.0-flash-exp",
459
+ "gemini-3.0-ultra",
460
+ "gemini-3.0-ultra-001",
461
+ "gemini-3.0-ultra-latest",
462
+ // Gemini 2.5 系列
463
+ "gemini-2.5-pro",
464
+ "gemini-2.5-pro-001",
465
+ "gemini-2.5-pro-latest",
466
+ "gemini-2.5-pro-exp",
467
+ "gemini-2.5-flash",
468
+ "gemini-2.5-flash-001",
469
+ "gemini-2.5-flash-latest",
470
+ // Gemini 2.0 系列
471
+ "gemini-2.0-flash",
472
+ "gemini-2.0-flash-001",
473
+ "gemini-2.0-flash-latest",
474
+ "gemini-2.0-flash-exp",
475
+ "gemini-2.0-flash-lite",
476
+ "gemini-2.0-flash-thinking-exp",
477
+ "gemini-2.0-flash-thinking-exp-01-21",
478
+ "gemini-2.0-pro",
479
+ "gemini-2.0-pro-001",
480
+ "gemini-2.0-pro-latest",
481
+ "gemini-2.0-pro-exp",
482
+ "gemini-2.0-pro-exp-02-05",
483
+ "gemini-2.0-ultra",
484
+ "gemini-2.0-ultra-001",
485
+ "gemini-2.0-ultra-latest",
486
+ // Gemini 1.5 系列 (旧版)
487
+ "gemini-1.5-flash",
488
+ "gemini-1.5-flash-002",
489
+ "gemini-1.5-flash-8b",
490
+ "gemini-1.5-flash-8b-latest",
491
+ "gemini-1.5-flash-8b-001",
492
+ "gemini-1.5-flash-latest",
493
+ "gemini-1.5-pro",
494
+ "gemini-1.5-pro-002",
495
+ "gemini-1.5-pro-latest",
496
+ "gemini-1.5-ultra",
497
+ // Gemini 1.0 系列 (旧版)
498
+ "gemini-1.0-pro",
499
+ "gemini-1.0-pro-002",
500
+ "gemini-1.0-pro-vision-latest",
501
+ "gemini-1.0-pro-vision"
502
+ ];
503
+ _apiKey;
504
+ _baseUrl;
505
+ _defaultParams;
506
+ _timeout;
507
+ _apiVersion;
508
+ constructor(config) {
509
+ this._apiKey = config.apiKey || "";
510
+ this._baseUrl = config.baseUrl || "https://generativelanguage.googleapis.com";
511
+ this._defaultParams = config.defaultParams || {};
512
+ this._timeout = config.timeout || 6e4;
513
+ this._apiVersion = config.apiVersion || "v1beta";
514
+ }
515
+ validateConfig() {
516
+ return !!this._apiKey;
517
+ }
518
+ async makeRequest(endpoint, body) {
519
+ const controller = new AbortController();
520
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
521
+ try {
522
+ const response = await fetch(
523
+ `${this._baseUrl}/${this._apiVersion}/${endpoint}?key=${this._apiKey}`,
524
+ {
525
+ method: "POST",
526
+ headers: {
527
+ "Content-Type": "application/json"
528
+ },
529
+ body: JSON.stringify(body),
530
+ signal: controller.signal
531
+ }
532
+ );
533
+ clearTimeout(timeoutId);
534
+ if (!response.ok) {
535
+ const error = await response.text();
536
+ throw new Error(`Gemini API error: ${response.status} - ${error}`);
537
+ }
538
+ return response;
539
+ } catch (error) {
540
+ clearTimeout(timeoutId);
541
+ throw error;
542
+ }
543
+ }
544
+ async complete(request) {
545
+ const model = request.model || this._defaultParams.model || "gemini-3.0-flash";
546
+ const body = this.buildRequestBody(request);
547
+ const response = await this.makeRequest(`models/${model}:generateContent`, body);
548
+ const data = await response.json();
549
+ return this.parseResponse(data, model);
550
+ }
551
+ async *stream(request) {
552
+ const model = request.model || this._defaultParams.model || "gemini-3.0-flash";
553
+ const body = this.buildRequestBody(request);
554
+ const response = await this.makeRequest(`models/${model}:streamGenerateContent`, body);
555
+ const reader = response.body?.getReader();
556
+ if (!reader) {
557
+ throw new Error("No response body");
558
+ }
559
+ const decoder = new TextDecoder();
560
+ let buffer = "";
561
+ try {
562
+ while (true) {
563
+ const { done, value } = await reader.read();
564
+ if (done) break;
565
+ buffer += decoder.decode(value, { stream: true });
566
+ const lines = buffer.split("\n");
567
+ buffer = lines.pop() || "";
568
+ for (const line of lines) {
569
+ if (line.trim()) {
570
+ try {
571
+ const event = JSON.parse(line);
572
+ yield this.parseStreamChunk(event, model);
573
+ } catch {
574
+ }
575
+ }
576
+ }
577
+ }
578
+ } finally {
579
+ reader.releaseLock();
580
+ }
581
+ }
582
+ buildRequestBody(request) {
583
+ const systemMessage = request.messages.find((m) => m.role === "system");
584
+ const messages = request.messages.filter((m) => m.role !== "system");
585
+ const contents = messages.map((msg) => ({
586
+ role: msg.role === "assistant" ? "model" : "user",
587
+ parts: [{ text: msg.content }]
588
+ }));
589
+ const body = {
590
+ contents,
591
+ generationConfig: {
592
+ temperature: request.temperature ?? this._defaultParams.temperature ?? 0.7,
593
+ maxOutputTokens: request.max_tokens ?? this._defaultParams.max_tokens ?? 2048,
594
+ topP: request.top_p ?? this._defaultParams.top_p ?? 0.95
595
+ }
596
+ };
597
+ if (systemMessage) {
598
+ body.systemInstruction = {
599
+ parts: [{ text: systemMessage.content }]
600
+ };
601
+ }
602
+ if (request.tools) {
603
+ body.tools = [
604
+ {
605
+ functionDeclarations: request.tools.map((tool) => ({
606
+ name: tool.function.name,
607
+ description: tool.function.description,
608
+ parameters: tool.function.parameters
609
+ }))
610
+ }
611
+ ];
612
+ }
613
+ return body;
614
+ }
615
+ parseResponse(data, model) {
616
+ const candidates = data.candidates || [];
617
+ const firstCandidate = candidates[0];
618
+ const content = firstCandidate?.content;
619
+ const parts = content?.parts || [];
620
+ const textPart = parts.find((p) => p.text);
621
+ return {
622
+ id: data.id,
623
+ model,
624
+ content: textPart?.text || "",
625
+ role: "assistant",
626
+ usage: {
627
+ prompt_tokens: data.usageMetadata?.promptTokenCount || 0,
628
+ completion_tokens: data.usageMetadata?.candidatesTokenCount || 0,
629
+ total_tokens: data.usageMetadata?.totalTokenCount || 0
630
+ },
631
+ finish_reason: firstCandidate?.finishReason === "STOP" ? "stop" : "length"
632
+ };
633
+ }
634
+ parseStreamChunk(event, model) {
635
+ const candidates = event.candidates || [];
636
+ const firstCandidate = candidates[0];
637
+ const content = firstCandidate?.content;
638
+ const parts = content?.parts || [];
639
+ const textPart = parts.find((p) => p.text);
640
+ return {
641
+ id: event.id,
642
+ model,
643
+ delta: {
644
+ content: textPart?.text
645
+ },
646
+ finish_reason: firstCandidate?.finishReason === "STOP" ? "stop" : void 0
647
+ };
648
+ }
649
+ };
650
+ globalProviderRegistry.register(
651
+ "gemini",
652
+ (config) => new GeminiProvider(config)
653
+ );
654
+
655
+ // src/llm/providers/moonshot.ts
656
+ var MoonshotProvider = class {
657
+ name = "moonshot";
658
+ supportedModels = [
659
+ // Kimi K2.5 系列 (最新旗舰)
660
+ "kimi-k2.5",
661
+ "kimi-k2.5-20250128",
662
+ "kimi-k2.5-latest",
663
+ // Kimi K2 系列
664
+ "kimi-k2",
665
+ "kimi-k2-20250128",
666
+ "kimi-k2-latest",
667
+ // Kimi K1.5 系列
668
+ "kimi-k1.5",
669
+ "kimi-k1.5-20250128",
670
+ "kimi-k1.5-latest",
671
+ // Kimi K1 系列
672
+ "kimi-k1",
673
+ "kimi-k1-20250128",
674
+ "kimi-k1-latest",
675
+ // Kimi 标准系列
676
+ "kimi-latest",
677
+ "kimi-2025-01-28",
678
+ // 上下文长度版本
679
+ "moonshot-v1-128k",
680
+ "moonshot-v1-32k",
681
+ "moonshot-v1-8k"
682
+ ];
683
+ _apiKey;
684
+ _baseUrl;
685
+ _defaultParams;
686
+ _timeout;
687
+ constructor(config) {
688
+ this._apiKey = config.apiKey || "";
689
+ this._baseUrl = config.baseUrl || "https://api.moonshot.cn/v1";
690
+ this._defaultParams = config.defaultParams || {};
691
+ this._timeout = config.timeout || 6e4;
692
+ }
693
+ validateConfig() {
694
+ return !!this._apiKey;
695
+ }
696
+ async makeRequest(endpoint, body) {
697
+ const controller = new AbortController();
698
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
699
+ try {
700
+ const response = await fetch(`${this._baseUrl}${endpoint}`, {
701
+ method: "POST",
702
+ headers: {
703
+ "Content-Type": "application/json",
704
+ Authorization: `Bearer ${this._apiKey}`
705
+ },
706
+ body: JSON.stringify(body),
707
+ signal: controller.signal
708
+ });
709
+ clearTimeout(timeoutId);
710
+ if (!response.ok) {
711
+ const error = await response.text();
712
+ throw new Error(`Moonshot API error: ${response.status} - ${error}`);
713
+ }
714
+ return response;
715
+ } catch (error) {
716
+ clearTimeout(timeoutId);
717
+ throw error;
718
+ }
719
+ }
720
+ async complete(request) {
721
+ const body = this.buildRequestBody(request);
722
+ const response = await this.makeRequest("/chat/completions", body);
723
+ const data = await response.json();
724
+ return this.parseResponse(data);
725
+ }
726
+ async *stream(request) {
727
+ const body = this.buildRequestBody(request, true);
728
+ const response = await this.makeRequest("/chat/completions", body);
729
+ const reader = response.body?.getReader();
730
+ if (!reader) {
731
+ throw new Error("No response body");
732
+ }
733
+ const decoder = new TextDecoder();
734
+ let buffer = "";
735
+ try {
736
+ while (true) {
737
+ const { done, value } = await reader.read();
738
+ if (done) break;
739
+ buffer += decoder.decode(value, { stream: true });
740
+ const lines = buffer.split("\n");
741
+ buffer = lines.pop() || "";
742
+ for (const line of lines) {
743
+ if (line.startsWith("data: ")) {
744
+ const data = line.slice(6);
745
+ if (data === "[DONE]") return;
746
+ try {
747
+ const chunk = JSON.parse(data);
748
+ yield this.parseStreamChunk(chunk);
749
+ } catch {
750
+ }
751
+ }
752
+ }
753
+ }
754
+ } finally {
755
+ reader.releaseLock();
756
+ }
757
+ }
758
+ buildRequestBody(request, stream = false) {
759
+ return {
760
+ model: request.model || this._defaultParams.model || "kimi-k2.5",
761
+ messages: request.messages,
762
+ temperature: request.temperature ?? this._defaultParams.temperature ?? 0.7,
763
+ max_tokens: request.max_tokens ?? this._defaultParams.max_tokens ?? 4096,
764
+ top_p: request.top_p ?? this._defaultParams.top_p ?? 1,
765
+ stream
766
+ };
767
+ }
768
+ parseResponse(data) {
769
+ const choice = data.choices[0];
770
+ const message = choice.message;
771
+ return {
772
+ id: data.id,
773
+ model: data.model,
774
+ content: message.content || "",
775
+ role: "assistant",
776
+ usage: data.usage,
777
+ finish_reason: choice.finish_reason
778
+ };
779
+ }
780
+ parseStreamChunk(data) {
781
+ const choice = data.choices[0];
782
+ const delta = choice.delta;
783
+ return {
784
+ id: data.id,
785
+ model: data.model,
786
+ delta: {
787
+ content: delta.content,
788
+ role: delta.role
789
+ },
790
+ finish_reason: choice.finish_reason
791
+ };
792
+ }
793
+ };
794
+ globalProviderRegistry.register(
795
+ "moonshot",
796
+ (config) => new MoonshotProvider(config)
797
+ );
798
+
799
+ // src/llm/providers/minimax.ts
800
+ var MiniMaxProvider = class {
801
+ name = "minimax";
802
+ supportedModels = [
803
+ // MiniMax-Text-01 系列 (最新)
804
+ "MiniMax-Text-01",
805
+ "MiniMax-Text-01-20250128",
806
+ "MiniMax-Text-01-latest",
807
+ // MiniMax-01 系列
808
+ "MiniMax-01",
809
+ "MiniMax-01-20250128",
810
+ "MiniMax-01-latest",
811
+ // abab6.5 系列
812
+ "abab6.5",
813
+ "abab6.5-20250128",
814
+ "abab6.5-latest",
815
+ "abab6.5s",
816
+ "abab6.5s-20250128",
817
+ // abab6 系列
818
+ "abab6",
819
+ "abab6-20250128",
820
+ "abab6-latest",
821
+ // abab5.5 系列
822
+ "abab5.5",
823
+ "abab5.5-20250128",
824
+ "abab5.5-latest",
825
+ "abab5.5s",
826
+ "abab5.5s-20250128",
827
+ // abab5 系列
828
+ "abab5",
829
+ "abab5-20250128"
830
+ ];
831
+ _apiKey;
832
+ _groupId;
833
+ _baseUrl;
834
+ _defaultParams;
835
+ _timeout;
836
+ constructor(config) {
837
+ this._apiKey = config.apiKey || "";
838
+ this._groupId = config.groupId || "";
839
+ this._baseUrl = config.baseUrl || "https://api.minimaxi.com/v1";
840
+ this._defaultParams = config.defaultParams || {};
841
+ this._timeout = config.timeout || 6e4;
842
+ }
843
+ validateConfig() {
844
+ return !!this._apiKey && !!this._groupId;
845
+ }
846
+ async makeRequest(endpoint, body) {
847
+ const controller = new AbortController();
848
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
849
+ try {
850
+ const response = await fetch(`${this._baseUrl}${endpoint}`, {
851
+ method: "POST",
852
+ headers: {
853
+ "Content-Type": "application/json",
854
+ Authorization: `Bearer ${this._apiKey}`
855
+ },
856
+ body: JSON.stringify(body),
857
+ signal: controller.signal
858
+ });
859
+ clearTimeout(timeoutId);
860
+ if (!response.ok) {
861
+ const error = await response.text();
862
+ throw new Error(`MiniMax API error: ${response.status} - ${error}`);
863
+ }
864
+ return response;
865
+ } catch (error) {
866
+ clearTimeout(timeoutId);
867
+ throw error;
868
+ }
869
+ }
870
+ async complete(request) {
871
+ const body = this.buildRequestBody(request);
872
+ const response = await this.makeRequest("/text/chatcompletion_v2", body);
873
+ const data = await response.json();
874
+ return this.parseResponse(data);
875
+ }
876
+ async *stream(request) {
877
+ const body = this.buildRequestBody(request, true);
878
+ const response = await this.makeRequest("/text/chatcompletion_v2", body);
879
+ const reader = response.body?.getReader();
880
+ if (!reader) {
881
+ throw new Error("No response body");
882
+ }
883
+ const decoder = new TextDecoder();
884
+ let buffer = "";
885
+ try {
886
+ while (true) {
887
+ const { done, value } = await reader.read();
888
+ if (done) break;
889
+ buffer += decoder.decode(value, { stream: true });
890
+ const lines = buffer.split("\n");
891
+ buffer = lines.pop() || "";
892
+ for (const line of lines) {
893
+ if (line.startsWith("data: ")) {
894
+ const data = line.slice(6);
895
+ if (data === "[DONE]") return;
896
+ try {
897
+ const chunk = JSON.parse(data);
898
+ yield this.parseStreamChunk(chunk);
899
+ } catch {
900
+ }
901
+ }
902
+ }
903
+ }
904
+ } finally {
905
+ reader.releaseLock();
906
+ }
907
+ }
908
+ buildRequestBody(request, stream = false) {
909
+ return {
910
+ model: request.model || this._defaultParams.model || "MiniMax-Text-01",
911
+ messages: request.messages,
912
+ temperature: request.temperature ?? this._defaultParams.temperature ?? 0.7,
913
+ max_tokens: request.max_tokens ?? this._defaultParams.max_tokens ?? 4096,
914
+ top_p: request.top_p ?? this._defaultParams.top_p ?? 1,
915
+ stream
916
+ };
917
+ }
918
+ parseResponse(data) {
919
+ const choice = data.choices[0];
920
+ const message = choice.message;
921
+ return {
922
+ id: data.id,
923
+ model: data.model,
924
+ content: message.content || "",
925
+ role: "assistant",
926
+ usage: data.usage,
927
+ finish_reason: choice.finish_reason
928
+ };
929
+ }
930
+ parseStreamChunk(data) {
931
+ const choice = data.choices[0];
932
+ const delta = choice.delta;
933
+ return {
934
+ id: data.id,
935
+ model: data.model,
936
+ delta: {
937
+ content: delta.content,
938
+ role: delta.role
939
+ },
940
+ finish_reason: choice.finish_reason
941
+ };
942
+ }
943
+ };
944
+ globalProviderRegistry.register(
945
+ "minimax",
946
+ (config) => new MiniMaxProvider(config)
947
+ );
948
+
949
+ // src/llm/providers/zhipu.ts
950
+ var ZhipuProvider = class {
951
+ name = "zhipu";
952
+ supportedModels = [
953
+ // GLM-4.5 系列 (最新)
954
+ "glm-4.5",
955
+ "glm-4.5-20250128",
956
+ "glm-4.5-latest",
957
+ // GLM-4 系列
958
+ "glm-4",
959
+ "glm-4-20250128",
960
+ "glm-4-latest",
961
+ "glm-4-plus",
962
+ "glm-4-plus-20250128",
963
+ "glm-4-flash",
964
+ "glm-4-flash-20250128",
965
+ "glm-4-air",
966
+ "glm-4-air-20250128",
967
+ "glm-4-airx",
968
+ "glm-4-airx-20250128",
969
+ "glm-4-long",
970
+ "glm-4-long-20250128",
971
+ "glm-4-alltools",
972
+ "glm-4-alltools-20250128",
973
+ // GLM-3 系列
974
+ "glm-3-turbo",
975
+ "glm-3-turbo-20250128",
976
+ "glm-3-turbo-latest",
977
+ // ChatGLM3 系列
978
+ "chatglm3-6b",
979
+ "chatglm3-6b-32k",
980
+ // 代码模型
981
+ "codegeex-4",
982
+ "codegeex-4-20250128",
983
+ // 视觉模型
984
+ "glm-4v",
985
+ "glm-4v-20250128",
986
+ "glm-4v-plus",
987
+ "glm-4v-plus-20250128",
988
+ // 嵌入模型
989
+ "embedding-3",
990
+ "embedding-2"
991
+ ];
992
+ _apiKey;
993
+ _baseUrl;
994
+ _defaultParams;
995
+ _timeout;
996
+ constructor(config) {
997
+ this._apiKey = config.apiKey || "";
998
+ this._baseUrl = config.baseUrl || "https://open.bigmodel.cn/api/paas/v4";
999
+ this._defaultParams = config.defaultParams || {};
1000
+ this._timeout = config.timeout || 6e4;
1001
+ }
1002
+ validateConfig() {
1003
+ return !!this._apiKey;
1004
+ }
1005
+ async makeRequest(endpoint, body) {
1006
+ const controller = new AbortController();
1007
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
1008
+ try {
1009
+ const response = await fetch(`${this._baseUrl}${endpoint}`, {
1010
+ method: "POST",
1011
+ headers: {
1012
+ "Content-Type": "application/json",
1013
+ Authorization: `Bearer ${this._apiKey}`
1014
+ },
1015
+ body: JSON.stringify(body),
1016
+ signal: controller.signal
1017
+ });
1018
+ clearTimeout(timeoutId);
1019
+ if (!response.ok) {
1020
+ const error = await response.text();
1021
+ throw new Error(`Zhipu API error: ${response.status} - ${error}`);
1022
+ }
1023
+ return response;
1024
+ } catch (error) {
1025
+ clearTimeout(timeoutId);
1026
+ throw error;
1027
+ }
1028
+ }
1029
+ async complete(request) {
1030
+ const body = this.buildRequestBody(request);
1031
+ const response = await this.makeRequest("/chat/completions", body);
1032
+ const data = await response.json();
1033
+ return this.parseResponse(data);
1034
+ }
1035
+ async *stream(request) {
1036
+ const body = this.buildRequestBody(request, true);
1037
+ const response = await this.makeRequest("/chat/completions", body);
1038
+ const reader = response.body?.getReader();
1039
+ if (!reader) {
1040
+ throw new Error("No response body");
1041
+ }
1042
+ const decoder = new TextDecoder();
1043
+ let buffer = "";
1044
+ try {
1045
+ while (true) {
1046
+ const { done, value } = await reader.read();
1047
+ if (done) break;
1048
+ buffer += decoder.decode(value, { stream: true });
1049
+ const lines = buffer.split("\n");
1050
+ buffer = lines.pop() || "";
1051
+ for (const line of lines) {
1052
+ if (line.startsWith("data: ")) {
1053
+ const data = line.slice(6);
1054
+ if (data === "[DONE]") return;
1055
+ try {
1056
+ const chunk = JSON.parse(data);
1057
+ yield this.parseStreamChunk(chunk);
1058
+ } catch {
1059
+ }
1060
+ }
1061
+ }
1062
+ }
1063
+ } finally {
1064
+ reader.releaseLock();
1065
+ }
1066
+ }
1067
+ buildRequestBody(request, stream = false) {
1068
+ return {
1069
+ model: request.model || this._defaultParams.model || "glm-4.5",
1070
+ messages: request.messages,
1071
+ temperature: request.temperature ?? this._defaultParams.temperature ?? 0.7,
1072
+ max_tokens: request.max_tokens ?? this._defaultParams.max_tokens ?? 4096,
1073
+ top_p: request.top_p ?? this._defaultParams.top_p ?? 1,
1074
+ stream
1075
+ };
1076
+ }
1077
+ parseResponse(data) {
1078
+ const choice = data.choices[0];
1079
+ const message = choice.message;
1080
+ return {
1081
+ id: data.id,
1082
+ model: data.model,
1083
+ content: message.content || "",
1084
+ role: "assistant",
1085
+ usage: data.usage,
1086
+ finish_reason: choice.finish_reason
1087
+ };
1088
+ }
1089
+ parseStreamChunk(data) {
1090
+ const choice = data.choices[0];
1091
+ const delta = choice.delta;
1092
+ return {
1093
+ id: data.id,
1094
+ model: data.model,
1095
+ delta: {
1096
+ content: delta.content,
1097
+ role: delta.role
1098
+ },
1099
+ finish_reason: choice.finish_reason
1100
+ };
1101
+ }
1102
+ };
1103
+ globalProviderRegistry.register(
1104
+ "zhipu",
1105
+ (config) => new ZhipuProvider(config)
1106
+ );
1107
+
1108
+ // src/llm/providers/qwen.ts
1109
+ var QwenProvider = class {
1110
+ name = "qwen";
1111
+ supportedModels = [
1112
+ // Qwen3 Max 系列 (最新旗舰)
1113
+ "qwen3-max",
1114
+ "qwen3-max-2026-01-23",
1115
+ "qwen3-max-2025-09-23",
1116
+ "qwen3-max-latest",
1117
+ "qwen3-max-preview",
1118
+ // Qwen3 Plus 系列
1119
+ "qwen3-plus",
1120
+ "qwen3-plus-2025-12-01",
1121
+ "qwen3-plus-2025-09-11",
1122
+ "qwen3-plus-2025-07-28",
1123
+ "qwen3-plus-2025-07-14",
1124
+ "qwen3-plus-2025-04-28",
1125
+ "qwen3-plus-latest",
1126
+ // Qwen3 Flash 系列
1127
+ "qwen3-flash",
1128
+ "qwen3-flash-2025-07-28",
1129
+ "qwen3-flash-latest",
1130
+ // Qwen3 Omni 系列 (多模态)
1131
+ "qwen3-omni-flash",
1132
+ "qwen3-omni-flash-2025-12-01",
1133
+ "qwen3-omni-flash-2025-09-15",
1134
+ "qwen3-omni-flash-realtime",
1135
+ "qwen3-omni-flash-realtime-2025-12-01",
1136
+ "qwen3-omni-flash-realtime-2025-09-15",
1137
+ // Qwen3 VL 系列 (视觉)
1138
+ "qwen3-vl-plus",
1139
+ "qwen3-vl-plus-2025-12-19",
1140
+ "qwen3-vl-plus-2025-09-23",
1141
+ "qwen3-vl-flash",
1142
+ "qwen3-vl-flash-2026-01-22",
1143
+ "qwen3-vl-flash-2025-10-15",
1144
+ // Qwen Max 系列 (旧版)
1145
+ "qwen-max",
1146
+ "qwen-max-2025-01-25",
1147
+ "qwen-max-2024-09-19",
1148
+ "qwen-max-latest",
1149
+ // Qwen Plus 系列 (旧版)
1150
+ "qwen-plus",
1151
+ "qwen-plus-2025-01-25",
1152
+ "qwen-plus-2025-01-12",
1153
+ "qwen-plus-2024-12-20",
1154
+ "qwen-plus-2024-11-27",
1155
+ "qwen-plus-2024-11-25",
1156
+ "qwen-plus-2024-09-19",
1157
+ "qwen-plus-2024-08-06",
1158
+ "qwen-plus-2024-07-23",
1159
+ "qwen-plus-latest",
1160
+ // Qwen Turbo 系列 (旧版)
1161
+ "qwen-turbo",
1162
+ "qwen-turbo-2025-04-28",
1163
+ "qwen-turbo-2025-02-11",
1164
+ "qwen-turbo-2024-11-01",
1165
+ "qwen-turbo-2024-09-19",
1166
+ "qwen-turbo-2024-06-24",
1167
+ "qwen-turbo-latest",
1168
+ // Qwen Long 系列 (长上下文)
1169
+ "qwen-long",
1170
+ "qwen-long-2025-01-25",
1171
+ "qwen-long-latest",
1172
+ // QwQ 推理模型
1173
+ "qwq-plus",
1174
+ "qwq-plus-2025-05-15",
1175
+ "qwq-plus-2025-03-05",
1176
+ "qwq-plus-latest",
1177
+ // QVQ 视觉推理
1178
+ "qvq-max",
1179
+ "qvq-max-2025-05-15",
1180
+ "qvq-max-2025-03-25",
1181
+ "qvq-plus",
1182
+ "qvq-plus-2025-05-15",
1183
+ // Qwen VL 系列 (视觉旧版)
1184
+ "qwen-vl-max",
1185
+ "qwen-vl-max-2025-08-13",
1186
+ "qwen-vl-max-2025-04-08",
1187
+ "qwen-vl-max-2025-04-02",
1188
+ "qwen-vl-max-2025-01-25",
1189
+ "qwen-vl-max-2024-12-30",
1190
+ "qwen-vl-max-2024-11-19",
1191
+ "qwen-vl-max-2024-10-30",
1192
+ "qwen-vl-max-2024-08-09",
1193
+ "qwen-vl-max-latest",
1194
+ "qwen-vl-plus",
1195
+ "qwen-vl-plus-2025-08-15",
1196
+ "qwen-vl-plus-2025-07-10",
1197
+ "qwen-vl-plus-2025-05-07",
1198
+ "qwen-vl-plus-2025-01-25",
1199
+ "qwen-vl-plus-2025-01-02",
1200
+ "qwen-vl-plus-2024-08-09",
1201
+ "qwen-vl-plus-latest",
1202
+ // Qwen OCR
1203
+ "qwen-vl-ocr",
1204
+ "qwen-vl-ocr-2025-11-20",
1205
+ "qwen-vl-ocr-2025-08-28",
1206
+ "qwen-vl-ocr-2025-04-13",
1207
+ "qwen-vl-ocr-2024-10-28",
1208
+ "qwen-vl-ocr-latest",
1209
+ // Qwen Audio
1210
+ "qwen-audio-turbo",
1211
+ "qwen-audio-turbo-2024-12-04",
1212
+ "qwen-audio-turbo-2024-08-07",
1213
+ "qwen-audio-turbo-latest",
1214
+ // 嵌入模型
1215
+ "text-embedding-v3",
1216
+ "text-embedding-v2",
1217
+ "text-embedding-v1"
1218
+ ];
1219
+ _apiKey;
1220
+ _baseUrl;
1221
+ _defaultParams;
1222
+ _timeout;
1223
+ constructor(config) {
1224
+ this._apiKey = config.apiKey || "";
1225
+ this._baseUrl = config.baseUrl || "https://dashscope.aliyuncs.com/api/v1";
1226
+ this._defaultParams = config.defaultParams || {};
1227
+ this._timeout = config.timeout || 6e4;
1228
+ }
1229
+ validateConfig() {
1230
+ return !!this._apiKey;
1231
+ }
1232
+ async makeRequest(endpoint, body) {
1233
+ const controller = new AbortController();
1234
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
1235
+ try {
1236
+ const response = await fetch(`${this._baseUrl}${endpoint}`, {
1237
+ method: "POST",
1238
+ headers: {
1239
+ "Content-Type": "application/json",
1240
+ Authorization: `Bearer ${this._apiKey}`
1241
+ },
1242
+ body: JSON.stringify(body),
1243
+ signal: controller.signal
1244
+ });
1245
+ clearTimeout(timeoutId);
1246
+ if (!response.ok) {
1247
+ const error = await response.text();
1248
+ throw new Error(`Qwen API error: ${response.status} - ${error}`);
1249
+ }
1250
+ return response;
1251
+ } catch (error) {
1252
+ clearTimeout(timeoutId);
1253
+ throw error;
1254
+ }
1255
+ }
1256
+ async complete(request) {
1257
+ const body = this.buildRequestBody(request);
1258
+ const response = await this.makeRequest("/services/aigc/text-generation/generation", body);
1259
+ const data = await response.json();
1260
+ return this.parseResponse(data);
1261
+ }
1262
+ async *stream(request) {
1263
+ const body = this.buildRequestBody(request, true);
1264
+ const response = await this.makeRequest("/services/aigc/text-generation/generation", body);
1265
+ const reader = response.body?.getReader();
1266
+ if (!reader) {
1267
+ throw new Error("No response body");
1268
+ }
1269
+ const decoder = new TextDecoder();
1270
+ let buffer = "";
1271
+ try {
1272
+ while (true) {
1273
+ const { done, value } = await reader.read();
1274
+ if (done) break;
1275
+ buffer += decoder.decode(value, { stream: true });
1276
+ const lines = buffer.split("\n");
1277
+ buffer = lines.pop() || "";
1278
+ for (const line of lines) {
1279
+ if (line.startsWith("data: ")) {
1280
+ const data = line.slice(6);
1281
+ if (data === "[DONE]") return;
1282
+ try {
1283
+ const chunk = JSON.parse(data);
1284
+ yield this.parseStreamChunk(chunk);
1285
+ } catch {
1286
+ }
1287
+ }
1288
+ }
1289
+ }
1290
+ } finally {
1291
+ reader.releaseLock();
1292
+ }
1293
+ }
1294
+ buildRequestBody(request, _stream = false) {
1295
+ return {
1296
+ model: request.model || this._defaultParams.model || "qwen3-max",
1297
+ input: {
1298
+ messages: request.messages
1299
+ },
1300
+ parameters: {
1301
+ temperature: request.temperature ?? this._defaultParams.temperature ?? 0.7,
1302
+ max_tokens: request.max_tokens ?? this._defaultParams.max_tokens ?? 4096,
1303
+ top_p: request.top_p ?? this._defaultParams.top_p ?? 1,
1304
+ result_format: "message"
1305
+ }
1306
+ };
1307
+ }
1308
+ parseResponse(data) {
1309
+ const output = data.output;
1310
+ const choice = output.choices[0];
1311
+ const message = choice.message;
1312
+ const usage = data.usage;
1313
+ return {
1314
+ id: data.request_id,
1315
+ model: data.model,
1316
+ content: message.content || "",
1317
+ role: "assistant",
1318
+ usage: {
1319
+ prompt_tokens: usage?.input_tokens || 0,
1320
+ completion_tokens: usage?.output_tokens || 0,
1321
+ total_tokens: (usage?.input_tokens || 0) + (usage?.output_tokens || 0)
1322
+ },
1323
+ finish_reason: choice.finish_reason
1324
+ };
1325
+ }
1326
+ parseStreamChunk(data) {
1327
+ const output = data.output;
1328
+ const choice = output.choices[0];
1329
+ const message = choice.message;
1330
+ return {
1331
+ id: data.request_id,
1332
+ model: data.model,
1333
+ delta: {
1334
+ content: message.content,
1335
+ role: message.role
1336
+ },
1337
+ finish_reason: choice.finish_reason
1338
+ };
1339
+ }
1340
+ };
1341
+ globalProviderRegistry.register(
1342
+ "qwen",
1343
+ (config) => new QwenProvider(config)
1344
+ );
1345
+
1346
+ // src/llm/providers/deepseek.ts
1347
+ var DeepSeekProvider = class {
1348
+ name = "deepseek";
1349
+ supportedModels = [
1350
+ // DeepSeek-V3.2 系列 (最新)
1351
+ "deepseek-v3.2",
1352
+ "deepseek-v3.2-20251201",
1353
+ "deepseek-v3.2-latest",
1354
+ "deepseek-v3.2-exp",
1355
+ "deepseek-v3.2-exp-20250929",
1356
+ // DeepSeek-V3.1 系列
1357
+ "deepseek-v3.1",
1358
+ "deepseek-v3.1-20250922",
1359
+ "deepseek-v3.1-20250821",
1360
+ "deepseek-v3.1-latest",
1361
+ // DeepSeek-V3 系列
1362
+ "deepseek-v3",
1363
+ "deepseek-v3-20250325",
1364
+ "deepseek-v3-20241226",
1365
+ "deepseek-v3-latest",
1366
+ // DeepSeek-R1 系列 (推理模型)
1367
+ "deepseek-r1",
1368
+ "deepseek-r1-20250528",
1369
+ "deepseek-r1-20250120",
1370
+ "deepseek-r1-latest",
1371
+ // DeepSeek-R1-Lite 系列
1372
+ "deepseek-r1-lite",
1373
+ "deepseek-r1-lite-20241120",
1374
+ "deepseek-r1-lite-latest",
1375
+ // DeepSeek-V2.5 系列
1376
+ "deepseek-v2.5",
1377
+ "deepseek-v2.5-1210",
1378
+ "deepseek-v2.5-20240905",
1379
+ "deepseek-v2.5-latest",
1380
+ // DeepSeek-V2 系列
1381
+ "deepseek-v2",
1382
+ "deepseek-v2-20250128",
1383
+ "deepseek-v2-20240802",
1384
+ "deepseek-v2-latest",
1385
+ // DeepSeek-Coder-V2 系列
1386
+ "deepseek-coder-v2",
1387
+ "deepseek-coder-v2-20250128",
1388
+ "deepseek-coder-v2-20240725",
1389
+ "deepseek-coder-v2-latest",
1390
+ // DeepSeek-Coder 系列
1391
+ "deepseek-coder",
1392
+ "deepseek-coder-20250128",
1393
+ "deepseek-coder-6.7b",
1394
+ "deepseek-coder-33b",
1395
+ "deepseek-coder-latest",
1396
+ // 别名 (API 兼容)
1397
+ "deepseek-chat",
1398
+ "deepseek-reasoner"
1399
+ ];
1400
+ _apiKey;
1401
+ _baseUrl;
1402
+ _defaultParams;
1403
+ _timeout;
1404
+ constructor(config) {
1405
+ this._apiKey = config.apiKey || "";
1406
+ this._baseUrl = config.baseUrl || "https://api.deepseek.com";
1407
+ this._defaultParams = config.defaultParams || {};
1408
+ this._timeout = config.timeout || 6e4;
1409
+ }
1410
+ validateConfig() {
1411
+ return !!this._apiKey;
1412
+ }
1413
+ async makeRequest(endpoint, body) {
1414
+ const controller = new AbortController();
1415
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
1416
+ try {
1417
+ const response = await fetch(`${this._baseUrl}${endpoint}`, {
1418
+ method: "POST",
1419
+ headers: {
1420
+ "Content-Type": "application/json",
1421
+ Authorization: `Bearer ${this._apiKey}`
1422
+ },
1423
+ body: JSON.stringify(body),
1424
+ signal: controller.signal
1425
+ });
1426
+ clearTimeout(timeoutId);
1427
+ if (!response.ok) {
1428
+ const error = await response.text();
1429
+ throw new Error(`DeepSeek API error: ${response.status} - ${error}`);
1430
+ }
1431
+ return response;
1432
+ } catch (error) {
1433
+ clearTimeout(timeoutId);
1434
+ throw error;
1435
+ }
1436
+ }
1437
+ async complete(request) {
1438
+ const body = this.buildRequestBody(request);
1439
+ const response = await this.makeRequest("/chat/completions", body);
1440
+ const data = await response.json();
1441
+ return this.parseResponse(data);
1442
+ }
1443
+ async *stream(request) {
1444
+ const body = this.buildRequestBody(request, true);
1445
+ const response = await this.makeRequest("/chat/completions", body);
1446
+ const reader = response.body?.getReader();
1447
+ if (!reader) {
1448
+ throw new Error("No response body");
1449
+ }
1450
+ const decoder = new TextDecoder();
1451
+ let buffer = "";
1452
+ try {
1453
+ while (true) {
1454
+ const { done, value } = await reader.read();
1455
+ if (done) break;
1456
+ buffer += decoder.decode(value, { stream: true });
1457
+ const lines = buffer.split("\n");
1458
+ buffer = lines.pop() || "";
1459
+ for (const line of lines) {
1460
+ if (line.startsWith("data: ")) {
1461
+ const data = line.slice(6);
1462
+ if (data === "[DONE]") return;
1463
+ try {
1464
+ const chunk = JSON.parse(data);
1465
+ yield this.parseStreamChunk(chunk);
1466
+ } catch {
1467
+ }
1468
+ }
1469
+ }
1470
+ }
1471
+ } finally {
1472
+ reader.releaseLock();
1473
+ }
1474
+ }
1475
+ buildRequestBody(request, stream = false) {
1476
+ return {
1477
+ model: request.model || this._defaultParams.model || "deepseek-v3.2",
1478
+ messages: request.messages,
1479
+ temperature: request.temperature ?? this._defaultParams.temperature ?? 0.7,
1480
+ max_tokens: request.max_tokens ?? this._defaultParams.max_tokens ?? 4096,
1481
+ top_p: request.top_p ?? this._defaultParams.top_p ?? 1,
1482
+ stream
1483
+ };
1484
+ }
1485
+ parseResponse(data) {
1486
+ const choice = data.choices[0];
1487
+ const message = choice.message;
1488
+ return {
1489
+ id: data.id,
1490
+ model: data.model,
1491
+ content: message.content || "",
1492
+ role: "assistant",
1493
+ usage: data.usage,
1494
+ finish_reason: choice.finish_reason
1495
+ };
1496
+ }
1497
+ parseStreamChunk(data) {
1498
+ const choice = data.choices[0];
1499
+ const delta = choice.delta;
1500
+ return {
1501
+ id: data.id,
1502
+ model: data.model,
1503
+ delta: {
1504
+ content: delta.content,
1505
+ role: delta.role
1506
+ },
1507
+ finish_reason: choice.finish_reason
1508
+ };
1509
+ }
1510
+ };
1511
+ globalProviderRegistry.register(
1512
+ "deepseek",
1513
+ (config) => new DeepSeekProvider(config)
1514
+ );
1515
+
1516
+ // src/llm/providers/doubao.ts
1517
+ var DoubaoProvider = class {
1518
+ name = "doubao";
1519
+ supportedModels = [
1520
+ // Doubao 1.8 系列 (最新)
1521
+ "doubao-1.8-pro-32k",
1522
+ "doubao-1.8-pro-32k-20250128",
1523
+ "doubao-1.8-pro-32k-latest",
1524
+ "doubao-1.8-pro-256k",
1525
+ "doubao-1.8-pro-256k-20250128",
1526
+ "doubao-1.8-pro-256k-latest",
1527
+ "doubao-1.8-lite-32k",
1528
+ "doubao-1.8-lite-32k-20250128",
1529
+ "doubao-1.8-lite-32k-latest",
1530
+ "doubao-1.8-vision-pro-32k",
1531
+ "doubao-1.8-vision-pro-32k-20250128",
1532
+ "doubao-1.8-vision-pro-32k-latest",
1533
+ // Doubao 1.5 系列
1534
+ "doubao-1.5-pro-32k",
1535
+ "doubao-1.5-pro-32k-20250128",
1536
+ "doubao-1.5-pro-32k-latest",
1537
+ "doubao-1.5-pro-256k",
1538
+ "doubao-1.5-pro-256k-20250128",
1539
+ "doubao-1.5-pro-256k-latest",
1540
+ "doubao-1.5-lite-32k",
1541
+ "doubao-1.5-lite-32k-20250128",
1542
+ "doubao-1.5-lite-32k-latest",
1543
+ "doubao-1.5-vision-pro-32k",
1544
+ "doubao-1.5-vision-pro-32k-20250128",
1545
+ "doubao-1.5-vision-pro-32k-latest",
1546
+ // Doubao Pro 系列
1547
+ "doubao-pro-4k",
1548
+ "doubao-pro-4k-20250128",
1549
+ "doubao-pro-4k-latest",
1550
+ "doubao-pro-32k",
1551
+ "doubao-pro-32k-20250128",
1552
+ "doubao-pro-32k-latest",
1553
+ "doubao-pro-128k",
1554
+ "doubao-pro-128k-20250128",
1555
+ "doubao-pro-128k-latest",
1556
+ "doubao-pro-256k",
1557
+ "doubao-pro-256k-20250128",
1558
+ "doubao-pro-256k-latest",
1559
+ // Doubao Lite 系列
1560
+ "doubao-lite-4k",
1561
+ "doubao-lite-4k-20250128",
1562
+ "doubao-lite-4k-latest",
1563
+ "doubao-lite-32k",
1564
+ "doubao-lite-32k-20250128",
1565
+ "doubao-lite-32k-latest",
1566
+ "doubao-lite-128k",
1567
+ "doubao-lite-128k-20250128",
1568
+ "doubao-lite-128k-latest",
1569
+ // Doubao Vision 系列
1570
+ "doubao-vision-lite-32k",
1571
+ "doubao-vision-lite-32k-20250128",
1572
+ "doubao-vision-lite-32k-latest",
1573
+ "doubao-vision-pro-32k",
1574
+ "doubao-vision-pro-32k-20250128",
1575
+ "doubao-vision-pro-32k-latest",
1576
+ // 角色扮演系列
1577
+ "doubao-character-pro-32k",
1578
+ "doubao-character-pro-32k-20250128",
1579
+ "doubao-character-pro-32k-latest",
1580
+ "doubao-character-lite-32k",
1581
+ "doubao-character-lite-32k-20250128",
1582
+ "doubao-character-lite-32k-latest",
1583
+ // 语音模型
1584
+ "doubao-asr",
1585
+ "doubao-tts",
1586
+ // 嵌入模型
1587
+ "doubao-embedding",
1588
+ "doubao-embedding-large"
1589
+ ];
1590
+ _apiKey;
1591
+ _baseUrl;
1592
+ _defaultParams;
1593
+ _timeout;
1594
+ constructor(config) {
1595
+ this._apiKey = config.apiKey || "";
1596
+ this._baseUrl = config.baseUrl || "https://ark.cn-beijing.volces.com/api/v3";
1597
+ this._defaultParams = config.defaultParams || {};
1598
+ this._timeout = config.timeout || 6e4;
1599
+ }
1600
+ validateConfig() {
1601
+ return !!this._apiKey;
1602
+ }
1603
+ async makeRequest(endpoint, body) {
1604
+ const controller = new AbortController();
1605
+ const timeoutId = setTimeout(() => controller.abort(), this._timeout);
1606
+ try {
1607
+ const response = await fetch(`${this._baseUrl}${endpoint}`, {
1608
+ method: "POST",
1609
+ headers: {
1610
+ "Content-Type": "application/json",
1611
+ Authorization: `Bearer ${this._apiKey}`
1612
+ },
1613
+ body: JSON.stringify(body),
1614
+ signal: controller.signal
1615
+ });
1616
+ clearTimeout(timeoutId);
1617
+ if (!response.ok) {
1618
+ const error = await response.text();
1619
+ throw new Error(`Doubao API error: ${response.status} - ${error}`);
1620
+ }
1621
+ return response;
1622
+ } catch (error) {
1623
+ clearTimeout(timeoutId);
1624
+ throw error;
1625
+ }
1626
+ }
1627
+ async complete(request) {
1628
+ const body = this.buildRequestBody(request);
1629
+ const response = await this.makeRequest("/chat/completions", body);
1630
+ const data = await response.json();
1631
+ return this.parseResponse(data);
1632
+ }
1633
+ async *stream(request) {
1634
+ const body = this.buildRequestBody(request, true);
1635
+ const response = await this.makeRequest("/chat/completions", body);
1636
+ const reader = response.body?.getReader();
1637
+ if (!reader) {
1638
+ throw new Error("No response body");
1639
+ }
1640
+ const decoder = new TextDecoder();
1641
+ let buffer = "";
1642
+ try {
1643
+ while (true) {
1644
+ const { done, value } = await reader.read();
1645
+ if (done) break;
1646
+ buffer += decoder.decode(value, { stream: true });
1647
+ const lines = buffer.split("\n");
1648
+ buffer = lines.pop() || "";
1649
+ for (const line of lines) {
1650
+ if (line.startsWith("data: ")) {
1651
+ const data = line.slice(6);
1652
+ if (data === "[DONE]") return;
1653
+ try {
1654
+ const chunk = JSON.parse(data);
1655
+ yield this.parseStreamChunk(chunk);
1656
+ } catch {
1657
+ }
1658
+ }
1659
+ }
1660
+ }
1661
+ } finally {
1662
+ reader.releaseLock();
1663
+ }
1664
+ }
1665
+ buildRequestBody(request, stream = false) {
1666
+ return {
1667
+ model: request.model || this._defaultParams.model || "doubao-1.8-pro-32k",
1668
+ messages: request.messages,
1669
+ temperature: request.temperature ?? this._defaultParams.temperature ?? 0.7,
1670
+ max_tokens: request.max_tokens ?? this._defaultParams.max_tokens ?? 4096,
1671
+ top_p: request.top_p ?? this._defaultParams.top_p ?? 1,
1672
+ stream
1673
+ };
1674
+ }
1675
+ parseResponse(data) {
1676
+ const choice = data.choices[0];
1677
+ const message = choice.message;
1678
+ return {
1679
+ id: data.id,
1680
+ model: data.model,
1681
+ content: message.content || "",
1682
+ role: "assistant",
1683
+ usage: data.usage,
1684
+ finish_reason: choice.finish_reason
1685
+ };
1686
+ }
1687
+ parseStreamChunk(data) {
1688
+ const choice = data.choices[0];
1689
+ const delta = choice.delta;
1690
+ return {
1691
+ id: data.id,
1692
+ model: data.model,
1693
+ delta: {
1694
+ content: delta.content,
1695
+ role: delta.role
1696
+ },
1697
+ finish_reason: choice.finish_reason
1698
+ };
1699
+ }
1700
+ };
1701
+ globalProviderRegistry.register(
1702
+ "doubao",
1703
+ (config) => new DoubaoProvider(config)
1704
+ );
1705
+
1706
+ export {
1707
+ LLMProviderRegistry,
1708
+ globalProviderRegistry,
1709
+ LLMManager,
1710
+ OpenAIProvider,
1711
+ AnthropicProvider,
1712
+ GeminiProvider,
1713
+ MoonshotProvider,
1714
+ MiniMaxProvider,
1715
+ ZhipuProvider,
1716
+ QwenProvider,
1717
+ DeepSeekProvider,
1718
+ DoubaoProvider
1719
+ };
1720
+ //# sourceMappingURL=chunk-5XTVS5MB.js.map