@nuvin/nuvin-core 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -14,7 +14,6 @@ var AgentEventTypes = {
14
14
  AssistantChunk: "assistant_chunk",
15
15
  AssistantMessage: "assistant_message",
16
16
  StreamFinish: "stream_finish",
17
- MemoryAppended: "memory_appended",
18
17
  Done: "done",
19
18
  Error: "error",
20
19
  MCPStderr: "mcp_stderr",
@@ -161,19 +160,22 @@ var AgentOrchestrator = class {
161
160
  const todoTools = ["todo_write", "todo_read"];
162
161
  return readOnlyTools.includes(toolName) || todoTools.includes(toolName);
163
162
  }
164
- async handleToolDenial(denialMessage, conversationId, messageId, accumulatedMessages, turnHistory, originalToolCalls, assistantContent) {
165
- accumulatedMessages.push({
166
- role: "assistant",
167
- content: assistantContent ?? null,
168
- tool_calls: originalToolCalls
169
- });
170
- turnHistory.push({
163
+ async handleToolDenial(denialMessage, conversationId, messageId, accumulatedMessages, turnHistory, originalToolCalls, assistantContent, usage) {
164
+ const assistantMsg = {
171
165
  id: this.deps.ids.uuid(),
172
166
  role: "assistant",
173
167
  content: assistantContent ?? null,
174
168
  timestamp: this.deps.clock.iso(),
169
+ tool_calls: originalToolCalls,
170
+ usage
171
+ };
172
+ accumulatedMessages.push({
173
+ role: "assistant",
174
+ content: assistantContent ?? null,
175
175
  tool_calls: originalToolCalls
176
176
  });
177
+ turnHistory.push(assistantMsg);
178
+ const toolResultMsgs = [];
177
179
  for (const toolCall of originalToolCalls) {
178
180
  const toolDenialResult = "Tool execution denied by user";
179
181
  accumulatedMessages.push({
@@ -182,23 +184,27 @@ var AgentOrchestrator = class {
182
184
  tool_call_id: toolCall.id,
183
185
  name: toolCall.function.name
184
186
  });
185
- turnHistory.push({
187
+ const toolMsg = {
186
188
  id: toolCall.id,
187
189
  role: "tool",
188
190
  content: toolDenialResult,
189
191
  timestamp: this.deps.clock.iso(),
190
192
  tool_call_id: toolCall.id,
191
193
  name: toolCall.function.name
192
- });
194
+ };
195
+ turnHistory.push(toolMsg);
196
+ toolResultMsgs.push(toolMsg);
193
197
  }
198
+ await this.deps.memory.append(conversationId, [assistantMsg, ...toolResultMsgs]);
194
199
  await this.deps.events?.emit({
195
200
  type: AgentEventTypes.AssistantMessage,
196
201
  conversationId,
197
202
  messageId,
198
- content: denialMessage
203
+ content: denialMessage,
204
+ usage: void 0
199
205
  });
200
206
  }
201
- async processToolApproval(toolCalls, conversationId, messageId, accumulatedMessages, turnHistory, assistantContent) {
207
+ async processToolApproval(toolCalls, conversationId, messageId, accumulatedMessages, turnHistory, assistantContent, usage) {
202
208
  if (this.cfg.requireToolApproval === false) {
203
209
  return { approvedCalls: toolCalls, wasDenied: false };
204
210
  }
@@ -220,7 +226,8 @@ var AgentOrchestrator = class {
220
226
  accumulatedMessages,
221
227
  turnHistory,
222
228
  toolCalls,
223
- assistantContent
229
+ assistantContent,
230
+ usage
224
231
  );
225
232
  return { approvedCalls: [], wasDenied: true, denialMessage };
226
233
  }
@@ -263,6 +270,7 @@ var AgentOrchestrator = class {
263
270
  userDisplay = resolveDisplayText(normalized.text, attachments, normalized.displayText);
264
271
  const userTimestamp = this.deps.clock.iso();
265
272
  userMessages = [{ id: this.deps.ids.uuid(), role: "user", content: userContent, timestamp: userTimestamp }];
273
+ await this.deps.memory.append(convo, userMessages);
266
274
  }
267
275
  if (opts.signal?.aborted) throw new Error("Aborted");
268
276
  const toolDefs = this.deps.tools.getToolDefinitions(this.cfg.enabledTools ?? []);
@@ -293,8 +301,10 @@ var AgentOrchestrator = class {
293
301
  let result;
294
302
  let toolApprovalDenied = false;
295
303
  let denialMessage = "";
304
+ let finalResponseSaved = false;
296
305
  try {
297
306
  if (opts.stream && typeof this.deps.llm.streamCompletion === "function") {
307
+ let isFirstChunk = true;
298
308
  result = await this.deps.llm.streamCompletion(
299
309
  params,
300
310
  {
@@ -303,7 +313,8 @@ var AgentOrchestrator = class {
303
313
  streamedAssistantContent += delta;
304
314
  } catch {
305
315
  }
306
- const cleanDelta = delta.replace(/^\n+/, "");
316
+ const cleanDelta = isFirstChunk ? delta.replace(/^\n+/, "") : delta;
317
+ isFirstChunk = false;
307
318
  const chunkEvent = {
308
319
  type: AgentEventTypes.AssistantChunk,
309
320
  conversationId: convo,
@@ -329,6 +340,28 @@ var AgentOrchestrator = class {
329
340
  } else {
330
341
  result = await this.deps.llm.generateCompletion(params, opts.signal);
331
342
  }
343
+ if (!result.tool_calls?.length && result.content && !finalResponseSaved) {
344
+ const content2 = opts.stream ? streamedAssistantContent : result.content;
345
+ const assistantMsg = {
346
+ id: msgId,
347
+ role: "assistant",
348
+ content: content2,
349
+ timestamp: this.deps.clock.iso(),
350
+ usage: result.usage
351
+ };
352
+ await this.deps.memory.append(convo, [assistantMsg]);
353
+ finalResponseSaved = true;
354
+ if (content2.trim()) {
355
+ const messageEvent = {
356
+ type: AgentEventTypes.AssistantMessage,
357
+ conversationId: convo,
358
+ messageId: msgId,
359
+ content: content2,
360
+ ...result.usage && { usage: result.usage }
361
+ };
362
+ await this.deps.events?.emit(messageEvent);
363
+ }
364
+ }
332
365
  while (result.tool_calls?.length) {
333
366
  if (result.content?.trim()) {
334
367
  const messageEvent = {
@@ -345,7 +378,8 @@ var AgentOrchestrator = class {
345
378
  type: AgentEventTypes.ToolCalls,
346
379
  conversationId: convo,
347
380
  messageId: msgId,
348
- toolCalls: result.tool_calls
381
+ toolCalls: result.tool_calls,
382
+ usage: result.usage
349
383
  });
350
384
  const approvalResult = await this.processToolApproval(
351
385
  result.tool_calls,
@@ -353,7 +387,8 @@ var AgentOrchestrator = class {
353
387
  msgId,
354
388
  accumulatedMessages,
355
389
  turnHistory,
356
- result.content
390
+ result.content,
391
+ result.usage
357
392
  );
358
393
  if (approvalResult.wasDenied) {
359
394
  denialMessage = approvalResult.denialMessage || "";
@@ -375,18 +410,18 @@ var AgentOrchestrator = class {
375
410
  opts.signal
376
411
  );
377
412
  allToolResults.push(...toolResults);
378
- accumulatedMessages.push({ role: "assistant", content: result.content ?? null, tool_calls: approvedCalls });
379
- turnHistory.push({
413
+ const assistantMsg = {
380
414
  id: this.deps.ids.uuid(),
381
415
  role: "assistant",
382
416
  content: result.content ?? null,
383
417
  timestamp: this.deps.clock.iso(),
384
- tool_calls: approvedCalls
385
- });
418
+ tool_calls: approvedCalls,
419
+ usage: result.usage
420
+ };
421
+ const toolResultMsgs = [];
386
422
  for (const tr of toolResults) {
387
423
  const contentStr = tr.status === "error" ? String(tr.result) : typeof tr.result === "string" ? tr.result : JSON.stringify(tr.result);
388
- accumulatedMessages.push({ role: "tool", content: contentStr, tool_call_id: tr.id, name: tr.name });
389
- turnHistory.push({
424
+ toolResultMsgs.push({
390
425
  id: tr.id,
391
426
  role: "tool",
392
427
  content: contentStr,
@@ -401,8 +436,16 @@ var AgentOrchestrator = class {
401
436
  result: tr
402
437
  });
403
438
  }
439
+ await this.deps.memory.append(convo, [assistantMsg, ...toolResultMsgs]);
440
+ accumulatedMessages.push({ role: "assistant", content: result.content ?? null, tool_calls: approvedCalls });
441
+ for (const tr of toolResults) {
442
+ const contentStr = tr.status === "error" ? String(tr.result) : typeof tr.result === "string" ? tr.result : JSON.stringify(tr.result);
443
+ accumulatedMessages.push({ role: "tool", content: contentStr, tool_call_id: tr.id, name: tr.name });
444
+ }
404
445
  if (opts.signal?.aborted) throw new Error("Aborted");
446
+ streamedAssistantContent = "";
405
447
  if (opts.stream && typeof this.deps.llm.streamCompletion === "function") {
448
+ let isFirstChunk = true;
406
449
  result = await this.deps.llm.streamCompletion(
407
450
  { ...params, messages: accumulatedMessages },
408
451
  {
@@ -411,7 +454,8 @@ var AgentOrchestrator = class {
411
454
  streamedAssistantContent += delta;
412
455
  } catch {
413
456
  }
414
- const cleanDelta = delta.replace(/^\n+/, "");
457
+ const cleanDelta = isFirstChunk ? delta.replace(/^\n+/, "") : delta;
458
+ isFirstChunk = false;
415
459
  const chunkEvent = {
416
460
  type: AgentEventTypes.AssistantChunk,
417
461
  conversationId: convo,
@@ -437,16 +481,32 @@ var AgentOrchestrator = class {
437
481
  } else {
438
482
  result = await this.deps.llm.generateCompletion({ ...params, messages: accumulatedMessages }, opts.signal);
439
483
  }
484
+ if (!result.tool_calls?.length && result.content && !finalResponseSaved) {
485
+ const content2 = opts.stream ? streamedAssistantContent : result.content;
486
+ const assistantMsg2 = {
487
+ id: msgId,
488
+ role: "assistant",
489
+ content: content2,
490
+ timestamp: this.deps.clock.iso(),
491
+ usage: result.usage
492
+ };
493
+ await this.deps.memory.append(convo, [assistantMsg2]);
494
+ finalResponseSaved = true;
495
+ if (content2.trim()) {
496
+ const messageEvent = {
497
+ type: AgentEventTypes.AssistantMessage,
498
+ conversationId: convo,
499
+ messageId: msgId,
500
+ content: content2,
501
+ ...result.usage && { usage: result.usage }
502
+ };
503
+ await this.deps.events?.emit(messageEvent);
504
+ }
505
+ }
440
506
  }
441
507
  const t1 = this.deps.clock.now();
442
508
  const timestamp = this.deps.clock.iso();
443
- const newHistory = [];
444
- for (const m of userMessages) newHistory.push(m);
445
- for (const m of turnHistory) newHistory.push(m);
446
- if (!toolApprovalDenied) {
447
- newHistory.push({ id: msgId, role: "assistant", content: result.content, timestamp });
448
- }
449
- const shouldEmitFinalMessage = result.content?.trim() && !toolApprovalDenied;
509
+ const shouldEmitFinalMessage = result.content?.trim() && !toolApprovalDenied && !finalResponseSaved;
450
510
  if (shouldEmitFinalMessage) {
451
511
  const messageEvent = {
452
512
  type: AgentEventTypes.AssistantMessage,
@@ -475,8 +535,6 @@ var AgentOrchestrator = class {
475
535
  toolCalls: allToolResults.length
476
536
  }
477
537
  };
478
- await this.deps.memory.append(convo, newHistory);
479
- await this.deps.events?.emit({ type: AgentEventTypes.MemoryAppended, conversationId: convo, delta: newHistory });
480
538
  await this.deps.events?.emit({
481
539
  type: AgentEventTypes.Done,
482
540
  conversationId: convo,
@@ -486,26 +544,6 @@ var AgentOrchestrator = class {
486
544
  });
487
545
  return resp;
488
546
  } catch (err2) {
489
- try {
490
- const partial = [];
491
- for (const m of userMessages) partial.push(m);
492
- for (const m of turnHistory) partial.push(m);
493
- const partialAssistant = (streamedAssistantContent || "").trim();
494
- const assistantAlreadyRecorded = turnHistory.some((m) => m.role === "assistant");
495
- if (partialAssistant && !assistantAlreadyRecorded) {
496
- partial.push({
497
- id: this.deps.ids.uuid(),
498
- role: "assistant",
499
- content: partialAssistant,
500
- timestamp: this.deps.clock.iso()
501
- });
502
- }
503
- if (partial.length) {
504
- await this.deps.memory.append(convo, partial);
505
- await this.deps.events?.emit({ type: AgentEventTypes.MemoryAppended, conversationId: convo, delta: partial });
506
- }
507
- } catch {
508
- }
509
547
  throw err2;
510
548
  }
511
549
  }
@@ -1251,9 +1289,8 @@ var SystemClock = class {
1251
1289
 
1252
1290
  // cost.ts
1253
1291
  var SimpleCost = class {
1254
- // Demo cost: $0.00 (undefined) to avoid implying pricing; could be extended
1255
- estimate(_model, _usage) {
1256
- return void 0;
1292
+ estimate(_model, usage) {
1293
+ return usage?.cost;
1257
1294
  }
1258
1295
  };
1259
1296
 
@@ -1843,7 +1880,10 @@ var FileReadTool = class {
1843
1880
  parameters = {
1844
1881
  type: "object",
1845
1882
  properties: {
1846
- description: { type: "string", description: 'Explanation of what file is being read and why (e.g., "Read package.json to check dependencies")' },
1883
+ description: {
1884
+ type: "string",
1885
+ description: 'Explanation of what file is being read and why (e.g., "Read package.json to check dependencies")'
1886
+ },
1847
1887
  path: { type: "string", description: "Read contents of this file" },
1848
1888
  lineStart: { type: "integer", minimum: 1, description: "Start reading from this line number (1-based)" },
1849
1889
  lineEnd: { type: "integer", minimum: 1, description: "Stop reading at this line number (inclusive)" }
@@ -1884,7 +1924,7 @@ var FileReadTool = class {
1884
1924
  const [lo, hi] = a <= b ? [a, b] : [b, a];
1885
1925
  const numberedLines = lines.slice(lo - 1, hi).map((line, index) => {
1886
1926
  const lineNum = lo + index;
1887
- return `${lineNum}:${line}`;
1927
+ return `${lineNum}\u2502${line}`;
1888
1928
  });
1889
1929
  const slice = numberedLines.join("\n");
1890
1930
  return ok(slice, {
@@ -3643,59 +3683,6 @@ var AgentFilePersistence = class {
3643
3683
  }
3644
3684
  };
3645
3685
 
3646
- // llm-providers/llm-echo.ts
3647
- var EchoLLM = class {
3648
- async generateCompletion(params) {
3649
- const last = [...params.messages].reverse().find((m) => m.role === "user");
3650
- const content = String(last?.content ?? "");
3651
- const toolCalls = [];
3652
- if (content.startsWith("!reverse ") && this.hasTool(params, "reverse_text")) {
3653
- const text = content.slice("!reverse ".length);
3654
- toolCalls.push({
3655
- id: this.id("rev"),
3656
- type: "function",
3657
- function: { name: "reverse_text", arguments: JSON.stringify({ text }) }
3658
- });
3659
- } else if (content.startsWith("!wc ") && this.hasTool(params, "word_count")) {
3660
- const text = content.slice("!wc ".length);
3661
- toolCalls.push({
3662
- id: this.id("wc"),
3663
- type: "function",
3664
- function: { name: "word_count", arguments: JSON.stringify({ text }) }
3665
- });
3666
- } else if (content.startsWith("!todo ") && this.hasTool(params, "todo_write")) {
3667
- const payload = content.slice("!todo ".length);
3668
- let todos;
3669
- try {
3670
- todos = JSON.parse(payload);
3671
- } catch {
3672
- todos = [];
3673
- }
3674
- toolCalls.push({
3675
- id: this.id("todo"),
3676
- type: "function",
3677
- function: { name: "todo_write", arguments: JSON.stringify({ todos }) }
3678
- });
3679
- }
3680
- const sawTool = params.messages.some((m) => m.role === "tool");
3681
- if (sawTool) {
3682
- const lastTool = [...params.messages].reverse().find((m) => m.role === "tool");
3683
- const toolText = typeof lastTool?.content === "string" ? lastTool.content : "";
3684
- return { content: `Tool result: ${toolText}` };
3685
- }
3686
- if (toolCalls.length > 0) {
3687
- return { content: "", tool_calls: toolCalls };
3688
- }
3689
- return { content: `Echo: ${content}` };
3690
- }
3691
- hasTool(params, name) {
3692
- return !!params.tools?.some((t) => t.function.name === name);
3693
- }
3694
- id(prefix) {
3695
- return `${prefix}_${Math.random().toString(36).slice(2, 10)}`;
3696
- }
3697
- };
3698
-
3699
3686
  // llm-providers/llm-utils.ts
3700
3687
  function mergeChoices(choices) {
3701
3688
  const contentParts = [];
@@ -3794,9 +3781,7 @@ var BaseLLM = class {
3794
3781
  }
3795
3782
  }
3796
3783
  }
3797
- const userAssistantMessages = messages.filter(
3798
- (msg) => msg.role === "user" || msg.role === "assistant"
3799
- );
3784
+ const userAssistantMessages = messages.filter((msg) => msg.role === "user" || msg.role === "assistant");
3800
3785
  const lastTwoIndices = [];
3801
3786
  if (userAssistantMessages.length >= 2) {
3802
3787
  for (let i = userAssistantMessages.length - 2; i < userAssistantMessages.length; i++) {
@@ -3839,7 +3824,8 @@ var BaseLLM = class {
3839
3824
  ...enhancedParams.usage && { usage: enhancedParams.usage }
3840
3825
  };
3841
3826
  if (enhancedParams.tools && enhancedParams.tools.length > 0) body.tools = enhancedParams.tools;
3842
- if (enhancedParams.tool_choice && enhancedParams.tools && enhancedParams.tools.length > 0) body.tool_choice = enhancedParams.tool_choice;
3827
+ if (enhancedParams.tool_choice && enhancedParams.tools && enhancedParams.tools.length > 0)
3828
+ body.tool_choice = enhancedParams.tool_choice;
3843
3829
  const res = await this.getTransport().postJson("/chat/completions", body, void 0, signal);
3844
3830
  if (!res.ok) {
3845
3831
  const text = await res.text();
@@ -3864,7 +3850,8 @@ var BaseLLM = class {
3864
3850
  ...enhancedParams.usage && { usage: enhancedParams.usage }
3865
3851
  };
3866
3852
  if (enhancedParams.tools && enhancedParams.tools.length > 0) body.tools = enhancedParams.tools;
3867
- if (enhancedParams.tool_choice && enhancedParams.tools && enhancedParams.tools.length > 0) body.tool_choice = enhancedParams.tool_choice;
3853
+ if (enhancedParams.tool_choice && enhancedParams.tools && enhancedParams.tools.length > 0)
3854
+ body.tool_choice = enhancedParams.tool_choice;
3868
3855
  const res = await this.getTransport().postStream(
3869
3856
  "/chat/completions",
3870
3857
  body,
@@ -3883,6 +3870,7 @@ var BaseLLM = class {
3883
3870
  let content = "";
3884
3871
  const mergedToolCalls = [];
3885
3872
  let usage;
3873
+ let lastFinishReason;
3886
3874
  const flushEvent = (rawEvent) => {
3887
3875
  const lines = rawEvent.split("\n");
3888
3876
  const dataLines = [];
@@ -3895,11 +3883,15 @@ var BaseLLM = class {
3895
3883
  try {
3896
3884
  const evt = JSON.parse(dataStr);
3897
3885
  const choices = Array.isArray(evt.choices) ? evt.choices : [];
3898
- const finishReason = choices.find((ch) => ch.finish_reason)?.finish_reason;
3899
- if (evt.usage) {
3900
- usage = normalizeUsage(evt.usage);
3901
- if (finishReason && handlers.onStreamFinish) {
3902
- handlers.onStreamFinish(finishReason, usage);
3886
+ const finishReason = choices.find((ch) => ch.finish_reason && ch.finish_reason !== null)?.finish_reason;
3887
+ if (finishReason) {
3888
+ lastFinishReason = finishReason;
3889
+ }
3890
+ const usageData = evt.usage || choices[0]?.usage;
3891
+ if (usageData) {
3892
+ usage = normalizeUsage(usageData);
3893
+ if (lastFinishReason && handlers.onStreamFinish) {
3894
+ handlers.onStreamFinish(lastFinishReason, usage);
3903
3895
  } else {
3904
3896
  handlers.onChunk?.("", usage);
3905
3897
  }
@@ -4447,15 +4439,17 @@ var GithubAuthTransport = class {
4447
4439
  }
4448
4440
  };
4449
4441
 
4450
- // transports/openrouter-transport.ts
4451
- var OpenRouterAuthTransport = class {
4442
+ // transports/base-bearer-auth-transport.ts
4443
+ var BaseBearerAuthTransport = class {
4452
4444
  inner;
4453
4445
  apiKey;
4454
4446
  baseUrl;
4455
- constructor(inner, apiKey, baseUrl) {
4447
+ version;
4448
+ constructor(inner, apiKey, baseUrl, version) {
4456
4449
  this.inner = inner;
4457
4450
  this.apiKey = apiKey;
4458
- this.baseUrl = baseUrl ?? "https://openrouter.ai/api/v1";
4451
+ this.baseUrl = baseUrl ?? this.getDefaultBaseUrl();
4452
+ this.version = version;
4459
4453
  }
4460
4454
  buildFullUrl(path9) {
4461
4455
  if (path9.startsWith("/")) {
@@ -4468,45 +4462,10 @@ var OpenRouterAuthTransport = class {
4468
4462
  throw new Error("API key missing");
4469
4463
  }
4470
4464
  const base = headers ? { ...headers } : {};
4471
- base["Authorization"] = `Bearer ${this.apiKey}`;
4472
- return base;
4473
- }
4474
- async get(url, headers, signal) {
4475
- const fullUrl = this.buildFullUrl(url);
4476
- return this.inner.get(fullUrl, this.makeAuthHeaders(headers), signal);
4477
- }
4478
- async postJson(url, body, headers, signal) {
4479
- const fullUrl = this.buildFullUrl(url);
4480
- return this.inner.postJson(fullUrl, body, this.makeAuthHeaders(headers), signal);
4481
- }
4482
- async postStream(url, body, headers, signal) {
4483
- const fullUrl = this.buildFullUrl(url);
4484
- return this.inner.postStream(fullUrl, body, this.makeAuthHeaders(headers), signal);
4485
- }
4486
- };
4487
-
4488
- // transports/deepinfra-transport.ts
4489
- var DeepInfraAuthTransport = class {
4490
- inner;
4491
- apiKey;
4492
- baseUrl;
4493
- constructor(inner, apiKey, baseUrl) {
4494
- this.inner = inner;
4495
- this.apiKey = apiKey;
4496
- this.baseUrl = baseUrl ?? "https://api.deepinfra.com/v1/openai";
4497
- }
4498
- buildFullUrl(path9) {
4499
- if (path9.startsWith("/")) {
4500
- return `${this.baseUrl}${path9}`;
4501
- }
4502
- return path9;
4503
- }
4504
- makeAuthHeaders(headers) {
4505
- if (!this.apiKey || this.apiKey.trim() === "") {
4506
- throw new Error("API key missing");
4465
+ base.Authorization = `Bearer ${this.apiKey}`;
4466
+ if (!base["User-Agent"] && this.version) {
4467
+ base["User-Agent"] = `nuvin-cli/${this.version}`;
4507
4468
  }
4508
- const base = headers ? { ...headers } : {};
4509
- base["Authorization"] = `Bearer ${this.apiKey}`;
4510
4469
  return base;
4511
4470
  }
4512
4471
  async get(url, headers, signal) {
@@ -4523,142 +4482,22 @@ var DeepInfraAuthTransport = class {
4523
4482
  }
4524
4483
  };
4525
4484
 
4526
- // transports/zai-transport.ts
4527
- var ZAIAuthTransport = class {
4528
- inner;
4529
- apiKey;
4530
- baseUrl;
4531
- constructor(inner, apiKey, baseUrl) {
4532
- this.inner = inner;
4533
- this.apiKey = apiKey;
4534
- this.baseUrl = baseUrl ?? "https://api.z.ai/api/coding/paas/v4";
4535
- }
4536
- buildFullUrl(path9) {
4537
- if (path9.startsWith("/")) {
4538
- return `${this.baseUrl}${path9}`;
4539
- }
4540
- return path9;
4541
- }
4542
- makeAuthHeaders(headers) {
4543
- if (!this.apiKey || this.apiKey.trim() === "") {
4544
- throw new Error("API key missing");
4545
- }
4546
- const base = headers ? { ...headers } : {};
4547
- base.Authorization = `Bearer ${this.apiKey}`;
4548
- return base;
4549
- }
4550
- async get(url, headers, signal) {
4551
- const fullUrl = this.buildFullUrl(url);
4552
- return this.inner.get(fullUrl, this.makeAuthHeaders(headers), signal);
4485
+ // transports/simple-bearer-transport.ts
4486
+ var SimpleBearerAuthTransport = class extends BaseBearerAuthTransport {
4487
+ defaultUrl;
4488
+ constructor(inner, defaultBaseUrl, apiKey, baseUrl, version) {
4489
+ super(inner, apiKey, baseUrl ?? defaultBaseUrl, version);
4490
+ this.defaultUrl = defaultBaseUrl;
4553
4491
  }
4554
- async postJson(url, body, headers, signal) {
4555
- const fullUrl = this.buildFullUrl(url);
4556
- return this.inner.postJson(fullUrl, body, this.makeAuthHeaders(headers), signal);
4557
- }
4558
- async postStream(url, body, headers, signal) {
4559
- const fullUrl = this.buildFullUrl(url);
4560
- return this.inner.postStream(fullUrl, body, this.makeAuthHeaders(headers), signal);
4492
+ getDefaultBaseUrl() {
4493
+ return this.defaultUrl;
4561
4494
  }
4562
4495
  };
4563
4496
 
4564
- // transports/anthropic-transport.ts
4565
- var AnthropicAuthTransport = class {
4566
- apiKey;
4567
- oauth;
4568
- baseUrl;
4569
- inner;
4570
- constructor(inner, opts = {}) {
4571
- this.inner = inner;
4572
- this.apiKey = opts.apiKey;
4573
- this.oauth = opts.oauth;
4574
- this.baseUrl = opts.baseUrl || "https://api.anthropic.com";
4575
- }
4576
- async refreshOAuth(signal) {
4577
- if (!this.oauth?.refresh) return;
4578
- if (this.oauth.expires > Date.now()) return;
4579
- try {
4580
- const response = await fetch("https://console.anthropic.com/v1/oauth/token", {
4581
- method: "POST",
4582
- headers: {
4583
- "Content-Type": "application/json"
4584
- },
4585
- body: JSON.stringify({
4586
- grant_type: "refresh_token",
4587
- refresh_token: this.oauth.refresh,
4588
- client_id: "9d1c250a-e61b-44d9-88ed-5944d1962f5e"
4589
- }),
4590
- signal
4591
- });
4592
- if (!response.ok) {
4593
- throw new Error(`OAuth token refresh failed: ${response.status} ${response.statusText}`);
4594
- }
4595
- const data = await response.json();
4596
- this.oauth = {
4597
- type: "oauth",
4598
- access: data.access_token,
4599
- refresh: data.refresh_token || this.oauth.refresh,
4600
- expires: Date.now() + data.expires_in * 1e3
4601
- };
4602
- } catch (error) {
4603
- throw new Error(`Failed to refresh OAuth token: ${error instanceof Error ? error.message : String(error)}`);
4604
- }
4605
- }
4606
- buildFullUrl(path9) {
4607
- if (path9.startsWith("/")) {
4608
- return `${this.baseUrl}${path9}`;
4609
- }
4610
- return path9;
4611
- }
4612
- getAuthHeaders() {
4613
- const headers = {};
4614
- if (this.oauth) {
4615
- headers.authorization = `Bearer ${this.oauth.access}`;
4616
- } else if (this.apiKey) {
4617
- headers["x-api-key"] = this.apiKey;
4618
- } else {
4619
- throw new Error("No authentication credentials provided");
4620
- }
4621
- headers["anthropic-version"] = "2023-06-01";
4622
- headers["anthropic-beta"] = [
4623
- "claude-code-20250219",
4624
- "interleaved-thinking-2025-05-14",
4625
- "fine-grained-tool-streaming-2025-05-14"
4626
- ].join(",");
4627
- return headers;
4628
- }
4629
- async get(url, headers, signal) {
4630
- if (this.oauth) {
4631
- await this.refreshOAuth(signal);
4632
- }
4633
- const fullUrl = this.buildFullUrl(url);
4634
- const authHeaders = this.getAuthHeaders();
4635
- const mergedHeaders = { ...authHeaders, ...headers };
4636
- return this.inner.get(fullUrl, mergedHeaders, signal);
4637
- }
4638
- async postJson(url, body, headers, signal) {
4639
- if (this.oauth) {
4640
- await this.refreshOAuth(signal);
4641
- }
4642
- const fullUrl = this.buildFullUrl(url);
4643
- const authHeaders = this.getAuthHeaders();
4644
- const mergedHeaders = { ...authHeaders, ...headers };
4645
- return this.inner.postJson(fullUrl, body, mergedHeaders, signal);
4646
- }
4647
- async postStream(url, body, headers, signal) {
4648
- if (this.oauth) {
4649
- await this.refreshOAuth(signal);
4650
- }
4651
- const fullUrl = this.buildFullUrl(url);
4652
- const authHeaders = this.getAuthHeaders();
4653
- const mergedHeaders = { ...authHeaders, ...headers };
4654
- return this.inner.postStream(fullUrl, body, mergedHeaders, signal);
4655
- }
4656
- // Method to update OAuth credentials (useful for token refresh from config)
4657
- updateCredentials(opts) {
4658
- if (opts.apiKey) this.apiKey = opts.apiKey;
4659
- if (opts.oauth) this.oauth = opts.oauth;
4660
- }
4661
- };
4497
+ // transports/transport-factory.ts
4498
+ function createTransport(inner, defaultBaseUrl, apiKey, baseUrl, version) {
4499
+ return new SimpleBearerAuthTransport(inner, defaultBaseUrl, apiKey, baseUrl, version);
4500
+ }
4662
4501
 
4663
4502
  // llm-providers/llm-github.ts
4664
4503
  var GithubLLM = class extends BaseLLM {
@@ -4685,392 +4524,6 @@ var GithubLLM = class extends BaseLLM {
4685
4524
  }
4686
4525
  };
4687
4526
 
4688
- // llm-providers/llm-openrouter.ts
4689
- var OpenRouterLLM = class extends BaseLLM {
4690
- includeUsage;
4691
- constructor(opts = {}) {
4692
- const { enablePromptCaching = true, includeUsage = true, ...restOpts } = opts;
4693
- super("https://openrouter.ai/api/v1", { enablePromptCaching });
4694
- this.includeUsage = includeUsage;
4695
- this.opts = restOpts;
4696
- }
4697
- opts;
4698
- createTransport() {
4699
- const base = new FetchTransport({ persistFile: this.opts.httpLogFile });
4700
- return new OpenRouterAuthTransport(base, this.opts.apiKey, this.apiUrl);
4701
- }
4702
- async generateCompletion(params, signal) {
4703
- let enhancedParams = params;
4704
- if (this.includeUsage && !enhancedParams.usage) {
4705
- enhancedParams = { ...enhancedParams, usage: { include: true } };
4706
- }
4707
- return super.generateCompletion(enhancedParams, signal);
4708
- }
4709
- async streamCompletion(params, handlers, signal) {
4710
- let enhancedParams = params;
4711
- if (this.includeUsage && !enhancedParams.usage) {
4712
- enhancedParams = { ...enhancedParams, usage: { include: true } };
4713
- }
4714
- return super.streamCompletion(enhancedParams, handlers, signal);
4715
- }
4716
- async getModels(signal) {
4717
- const transport = this.createTransport();
4718
- const res = await transport.get("/models", void 0, signal);
4719
- if (!res.ok) {
4720
- const text = await res.text();
4721
- throw new Error(`Failed to fetch models: ${res.status} ${text}`);
4722
- }
4723
- const data = await res.json();
4724
- return data.data;
4725
- }
4726
- };
4727
-
4728
- // llm-providers/llm-deepinfra.ts
4729
- var DeepInfraLLM = class extends BaseLLM {
4730
- opts;
4731
- constructor(opts = {}) {
4732
- super(opts.apiUrl || "https://api.deepinfra.com/v1/openai");
4733
- this.opts = opts;
4734
- }
4735
- createTransport() {
4736
- const base = new FetchTransport({ persistFile: this.opts.httpLogFile });
4737
- return new DeepInfraAuthTransport(base, this.opts.apiKey || process.env.DEEPINFRA_API_KEY, this.opts.apiUrl);
4738
- }
4739
- async getModels(signal) {
4740
- const transport = this.createTransport();
4741
- const res = await transport.get("/models", void 0, signal);
4742
- if (!res.ok) {
4743
- const text = await res.text();
4744
- throw new Error(`Failed to fetch models: ${res.status} ${text}`);
4745
- }
4746
- const data = await res.json();
4747
- return data.data;
4748
- }
4749
- };
4750
-
4751
- // llm-providers/llm-zai.ts
4752
- var ZaiLLM = class extends BaseLLM {
4753
- opts;
4754
- constructor(opts = {}) {
4755
- super("https://api.z.ai/api/coding/paas/v4");
4756
- this.opts = opts;
4757
- }
4758
- createTransport() {
4759
- const base = new FetchTransport({
4760
- persistFile: this.opts.httpLogFile,
4761
- logLevel: "INFO",
4762
- enableConsoleLog: false,
4763
- maxFileSize: 5 * 1024 * 1024,
4764
- // 5MB before rotation
4765
- captureResponseBody: true
4766
- // Disable for better performance
4767
- });
4768
- return new ZAIAuthTransport(base, this.opts.apiKey, this.apiUrl);
4769
- }
4770
- };
4771
-
4772
- // llm-providers/llm-anthropic.ts
4773
- var AnthropicLLM = class {
4774
- transport = null;
4775
- opts;
4776
- apiUrl;
4777
- constructor(opts = {}) {
4778
- this.opts = opts;
4779
- this.apiUrl = opts.apiUrl || "https://api.anthropic.com";
4780
- }
4781
- getTransport() {
4782
- if (!this.transport) {
4783
- const base = new FetchTransport({
4784
- persistFile: this.opts.httpLogFile
4785
- });
4786
- this.transport = new AnthropicAuthTransport(base, {
4787
- apiKey: this.opts.apiKey,
4788
- oauth: this.opts.oauth,
4789
- baseUrl: this.apiUrl
4790
- });
4791
- }
4792
- return this.transport;
4793
- }
4794
- transformToAnthropicMessages(messages) {
4795
- const systemMessages = [];
4796
- const anthropicMessages = [];
4797
- for (const msg of messages) {
4798
- if (msg.role === "system") {
4799
- if (typeof msg.content === "string") {
4800
- systemMessages.push(msg.content);
4801
- } else if (Array.isArray(msg.content)) {
4802
- for (const part of msg.content) {
4803
- if (part.type === "text") {
4804
- systemMessages.push(part.text);
4805
- }
4806
- }
4807
- }
4808
- continue;
4809
- }
4810
- if (msg.role === "tool") {
4811
- const lastMsg = anthropicMessages[anthropicMessages.length - 1];
4812
- if (lastMsg && lastMsg.role === "user" && Array.isArray(lastMsg.content)) {
4813
- lastMsg.content.push({
4814
- type: "tool_result",
4815
- tool_use_id: msg.tool_call_id || "",
4816
- content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content)
4817
- });
4818
- } else {
4819
- anthropicMessages.push({
4820
- role: "user",
4821
- content: [
4822
- {
4823
- type: "tool_result",
4824
- tool_use_id: msg.tool_call_id || "",
4825
- content: typeof msg.content === "string" ? msg.content : JSON.stringify(msg.content)
4826
- }
4827
- ]
4828
- });
4829
- }
4830
- continue;
4831
- }
4832
- const content = [];
4833
- if (typeof msg.content === "string" && msg.content) {
4834
- content.push({ type: "text", text: msg.content });
4835
- } else if (Array.isArray(msg.content)) {
4836
- for (const part of msg.content) {
4837
- if (part.type === "text") {
4838
- content.push({ type: "text", text: part.text });
4839
- } else if (part.type === "image_url") {
4840
- const url = part.image_url.url;
4841
- if (url.startsWith("data:")) {
4842
- const match = url.match(/^data:([^;]+);base64,(.+)$/);
4843
- if (match) {
4844
- content.push({
4845
- type: "image",
4846
- source: {
4847
- type: "base64",
4848
- media_type: match[1],
4849
- data: match[2]
4850
- }
4851
- });
4852
- }
4853
- }
4854
- }
4855
- }
4856
- }
4857
- if (msg.tool_calls) {
4858
- for (const toolCall of msg.tool_calls) {
4859
- content.push({
4860
- type: "tool_use",
4861
- id: toolCall.id,
4862
- name: toolCall.function.name,
4863
- input: JSON.parse(toolCall.function.arguments || "{}")
4864
- });
4865
- }
4866
- }
4867
- if (content.length > 0) {
4868
- anthropicMessages.push({
4869
- role: msg.role === "assistant" ? "assistant" : "user",
4870
- content: content.length === 1 && content[0].type === "text" ? content[0].text : content
4871
- });
4872
- }
4873
- }
4874
- const result = {
4875
- messages: anthropicMessages
4876
- };
4877
- if (systemMessages.length > 0) {
4878
- result.system = [
4879
- {
4880
- type: "text",
4881
- text: "You are Claude Code, Anthropic's official CLI for Claude."
4882
- },
4883
- { type: "text", text: systemMessages.join("\n\n") }
4884
- ];
4885
- }
4886
- return result;
4887
- }
4888
- transformTools(tools) {
4889
- if (!tools || tools.length === 0) return void 0;
4890
- return tools.map((tool) => ({
4891
- name: tool.function.name,
4892
- description: tool.function.description,
4893
- input_schema: tool.function.parameters
4894
- }));
4895
- }
4896
- transformToolChoice(toolChoice) {
4897
- if (!toolChoice || toolChoice === "auto") {
4898
- return { type: "auto" };
4899
- }
4900
- if (toolChoice === "none") {
4901
- return void 0;
4902
- }
4903
- if (typeof toolChoice === "object" && toolChoice.type === "function") {
4904
- return { type: "tool", name: toolChoice.function.name };
4905
- }
4906
- return { type: "auto" };
4907
- }
4908
- transformResponse(response) {
4909
- let content = "";
4910
- const tool_calls = [];
4911
- for (const block of response.content) {
4912
- if (block.type === "text") {
4913
- content += block.text;
4914
- } else if (block.type === "tool_use") {
4915
- tool_calls.push({
4916
- id: block.id,
4917
- type: "function",
4918
- function: {
4919
- name: block.name,
4920
- arguments: JSON.stringify(block.input)
4921
- }
4922
- });
4923
- }
4924
- }
4925
- const usage = {
4926
- prompt_tokens: response.usage.input_tokens,
4927
- completion_tokens: response.usage.output_tokens,
4928
- total_tokens: response.usage.input_tokens + response.usage.output_tokens
4929
- };
4930
- return {
4931
- content,
4932
- ...tool_calls.length > 0 ? { tool_calls } : {},
4933
- usage
4934
- };
4935
- }
4936
- async generateCompletion(params, signal) {
4937
- const { system, messages } = this.transformToAnthropicMessages(params.messages);
4938
- const tools = this.transformTools(params.tools);
4939
- const tool_choice = tools ? this.transformToolChoice(params.tool_choice) : void 0;
4940
- const body = {
4941
- model: params.model,
4942
- messages,
4943
- max_tokens: params.maxTokens ?? 10240,
4944
- temperature: params.temperature,
4945
- stream: false,
4946
- // ...(params.topP !== undefined && { top_p: params.topP }),
4947
- ...system && { system },
4948
- ...tools && { tools },
4949
- ...tool_choice && { tool_choice }
4950
- };
4951
- const res = await this.getTransport().postJson("/v1/messages", body, void 0, signal);
4952
- if (!res.ok) {
4953
- const text = await res.text();
4954
- throw new Error(text || `Anthropic API error ${res.status}`);
4955
- }
4956
- const data = await res.json();
4957
- return this.transformResponse(data);
4958
- }
4959
- async streamCompletion(params, handlers = {}, signal) {
4960
- const { system, messages } = this.transformToAnthropicMessages(params.messages);
4961
- const tools = this.transformTools(params.tools);
4962
- const tool_choice = tools ? this.transformToolChoice(params.tool_choice) : void 0;
4963
- const body = {
4964
- model: params.model,
4965
- messages,
4966
- max_tokens: params.maxTokens ?? 10240,
4967
- temperature: params.temperature,
4968
- stream: true,
4969
- // ...(params.topP !== undefined && { top_p: params.topP }),
4970
- ...system && { system },
4971
- ...tools && { tools },
4972
- ...tool_choice && { tool_choice }
4973
- };
4974
- const res = await this.getTransport().postStream("/v1/messages", body, { Accept: "text/event-stream" }, signal);
4975
- if (!res.ok) {
4976
- const text = await res.text();
4977
- throw new Error(text || `Anthropic stream error ${res.status}`);
4978
- }
4979
- const reader = res.body?.getReader();
4980
- if (!reader) return { content: "" };
4981
- const decoder = new TextDecoder("utf-8");
4982
- let buffer = "";
4983
- let content = "";
4984
- const toolCalls = /* @__PURE__ */ new Map();
4985
- let usage;
4986
- let inputTokens = 0;
4987
- let stopReason = null;
4988
- const processEvent = (eventData) => {
4989
- try {
4990
- const event = JSON.parse(eventData);
4991
- if (event.type === "message_start") {
4992
- if (event.message?.usage) {
4993
- inputTokens = event.message.usage.input_tokens || 0;
4994
- }
4995
- } else if (event.type === "content_block_start") {
4996
- const block = event.content_block;
4997
- if (block?.type === "tool_use" && "id" in block && "name" in block) {
4998
- toolCalls.set(event.index, {
4999
- id: String(block.id),
5000
- type: "function",
5001
- function: {
5002
- name: String(block.name) || "",
5003
- arguments: ""
5004
- }
5005
- });
5006
- }
5007
- } else if (event.type === "content_block_delta") {
5008
- if (event.delta.type === "text_delta") {
5009
- const textDelta = event.delta.text;
5010
- if (textDelta) {
5011
- content += textDelta;
5012
- handlers.onChunk?.(textDelta);
5013
- }
5014
- } else if (event.delta.type === "input_json_delta") {
5015
- const toolCall = toolCalls.get(event.index);
5016
- if (toolCall) {
5017
- toolCall.function.arguments += event.delta.partial_json;
5018
- handlers.onToolCallDelta?.(toolCall);
5019
- }
5020
- }
5021
- } else if (event.type === "message_delta") {
5022
- if (event.delta.stop_reason) {
5023
- stopReason = event.delta.stop_reason;
5024
- }
5025
- if (event.usage) {
5026
- usage = {
5027
- prompt_tokens: inputTokens,
5028
- completion_tokens: event.usage.output_tokens,
5029
- total_tokens: inputTokens + event.usage.output_tokens
5030
- };
5031
- }
5032
- } else if (event.type === "message_stop") {
5033
- if (handlers.onStreamFinish) {
5034
- handlers.onStreamFinish(stopReason || void 0, usage);
5035
- }
5036
- }
5037
- } catch (err2) {
5038
- }
5039
- };
5040
- while (true) {
5041
- const { value, done } = await reader.read();
5042
- if (done) break;
5043
- const chunk = decoder.decode(value, { stream: true });
5044
- buffer += chunk;
5045
- const lines = buffer.split("\n");
5046
- buffer = lines.pop() || "";
5047
- for (const line of lines) {
5048
- if (line.startsWith("data: ")) {
5049
- const data = line.slice(6).trim();
5050
- if (data && data !== "[DONE]") {
5051
- processEvent(data);
5052
- }
5053
- }
5054
- }
5055
- }
5056
- if (buffer.trim()) {
5057
- const line = buffer.trim();
5058
- if (line.startsWith("data: ")) {
5059
- const data = line.slice(6).trim();
5060
- if (data && data !== "[DONE]") {
5061
- processEvent(data);
5062
- }
5063
- }
5064
- }
5065
- const tool_calls = toolCalls.size > 0 ? Array.from(toolCalls.values()) : void 0;
5066
- return {
5067
- content,
5068
- ...tool_calls && { tool_calls },
5069
- ...usage && { usage }
5070
- };
5071
- }
5072
- };
5073
-
5074
4527
  // llm-providers/llm-anthropic-aisdk.ts
5075
4528
  import { createAnthropic } from "@ai-sdk/anthropic";
5076
4529
  import {
@@ -5515,6 +4968,171 @@ var AnthropicAISDKLLM = class {
5515
4968
  }
5516
4969
  };
5517
4970
 
4971
+ // llm-providers/llm-provider-config.json
4972
+ var llm_provider_config_default = {
4973
+ providers: [
4974
+ {
4975
+ name: "deepinfra",
4976
+ type: "openai-compat",
4977
+ baseUrl: "https://api.deepinfra.com/v1/openai",
4978
+ features: {
4979
+ promptCaching: false,
4980
+ getModels: true
4981
+ }
4982
+ },
4983
+ {
4984
+ name: "openrouter",
4985
+ type: "openai-compat",
4986
+ baseUrl: "https://openrouter.ai/api/v1",
4987
+ features: {
4988
+ promptCaching: true,
4989
+ getModels: true,
4990
+ includeUsage: true
4991
+ }
4992
+ },
4993
+ {
4994
+ name: "zai",
4995
+ type: "openai-compat",
4996
+ baseUrl: "https://api.z.ai/api/coding/paas/v4",
4997
+ features: {
4998
+ promptCaching: false,
4999
+ getModels: false
5000
+ }
5001
+ },
5002
+ {
5003
+ name: "moonshot",
5004
+ type: "openai-compat",
5005
+ baseUrl: "https://api.moonshot.ai/v1",
5006
+ features: {
5007
+ promptCaching: false,
5008
+ getModels: true
5009
+ }
5010
+ }
5011
+ ]
5012
+ };
5013
+
5014
+ // llm-providers/llm-factory.ts
5015
+ var providers = llm_provider_config_default.providers;
5016
+ var GenericLLM = class extends BaseLLM {
5017
+ opts;
5018
+ includeUsage;
5019
+ modelConfig;
5020
+ constructor(baseUrl, modelConfig, opts = {}) {
5021
+ const { enablePromptCaching = false, includeUsage = false, ...restOpts } = opts;
5022
+ super(opts.apiUrl || baseUrl, { enablePromptCaching });
5023
+ this.includeUsage = includeUsage;
5024
+ this.modelConfig = modelConfig;
5025
+ this.opts = restOpts;
5026
+ }
5027
+ createTransport() {
5028
+ const base = new FetchTransport({
5029
+ persistFile: this.opts.httpLogFile,
5030
+ logLevel: "INFO",
5031
+ enableConsoleLog: false,
5032
+ maxFileSize: 5 * 1024 * 1024,
5033
+ captureResponseBody: true
5034
+ });
5035
+ return createTransport(base, this.apiUrl, this.opts.apiKey, this.opts.apiUrl, this.opts.version);
5036
+ }
5037
+ async getModels(signal) {
5038
+ if (this.modelConfig === false) {
5039
+ throw new Error("Provider does not support getModels");
5040
+ }
5041
+ if (Array.isArray(this.modelConfig)) {
5042
+ return this.modelConfig.map((m) => typeof m === "string" ? { id: m } : m);
5043
+ }
5044
+ if (typeof this.modelConfig === "string") {
5045
+ const transport2 = this.createTransport();
5046
+ const res2 = await transport2.get(this.modelConfig, void 0, signal);
5047
+ if (!res2.ok) {
5048
+ const text = await res2.text();
5049
+ throw new Error(`Failed to fetch models: ${res2.status} ${text}`);
5050
+ }
5051
+ const data2 = await res2.json();
5052
+ return data2.data;
5053
+ }
5054
+ const transport = this.createTransport();
5055
+ const res = await transport.get("/models", void 0, signal);
5056
+ if (!res.ok) {
5057
+ const text = await res.text();
5058
+ throw new Error(`Failed to fetch models: ${res.status} ${text}`);
5059
+ }
5060
+ const data = await res.json();
5061
+ return data.data;
5062
+ }
5063
+ async generateCompletion(params, signal) {
5064
+ let enhancedParams = params;
5065
+ if (this.includeUsage && !enhancedParams.usage) {
5066
+ enhancedParams = { ...enhancedParams, usage: { include: true } };
5067
+ }
5068
+ return super.generateCompletion(enhancedParams, signal);
5069
+ }
5070
+ async streamCompletion(params, handlers, signal) {
5071
+ let enhancedParams = params;
5072
+ if (this.includeUsage && !enhancedParams.usage) {
5073
+ enhancedParams = { ...enhancedParams, usage: { include: true } };
5074
+ }
5075
+ return super.streamCompletion(enhancedParams, handlers, signal);
5076
+ }
5077
+ };
5078
+ function normalizeModelConfig(config) {
5079
+ if (config.models !== void 0) {
5080
+ return config.models;
5081
+ }
5082
+ return config.features.getModels ?? false;
5083
+ }
5084
+ function mergeProviders(customProviders) {
5085
+ const merged = /* @__PURE__ */ new Map();
5086
+ for (const provider of providers) {
5087
+ merged.set(provider.name.toLowerCase(), provider);
5088
+ }
5089
+ if (customProviders) {
5090
+ for (const [name, custom] of Object.entries(customProviders)) {
5091
+ if (!custom.baseUrl) {
5092
+ continue;
5093
+ }
5094
+ const existing = merged.get(name.toLowerCase());
5095
+ const providerConfig = {
5096
+ name,
5097
+ type: custom.type ?? "openai-compat",
5098
+ baseUrl: custom.baseUrl,
5099
+ models: custom.models ?? false,
5100
+ features: existing?.features ?? {
5101
+ promptCaching: false,
5102
+ getModels: custom.models !== false,
5103
+ includeUsage: false
5104
+ }
5105
+ };
5106
+ merged.set(name.toLowerCase(), providerConfig);
5107
+ }
5108
+ }
5109
+ return Array.from(merged.values());
5110
+ }
5111
+ function createLLM(providerName, options = {}, customProviders) {
5112
+ const allProviders = mergeProviders(customProviders);
5113
+ const config = allProviders.find((p) => p.name.toLowerCase() === providerName.toLowerCase());
5114
+ if (!config) {
5115
+ throw new Error(`Unknown LLM provider: ${providerName}. Available: ${allProviders.map((p) => p.name).join(", ")}`);
5116
+ }
5117
+ const modelConfig = normalizeModelConfig(config);
5118
+ return new GenericLLM(config.baseUrl, modelConfig, {
5119
+ ...options,
5120
+ enablePromptCaching: options.enablePromptCaching ?? config.features.promptCaching,
5121
+ includeUsage: options.includeUsage ?? config.features.includeUsage
5122
+ });
5123
+ }
5124
+ function getAvailableProviders(customProviders) {
5125
+ const allProviders = mergeProviders(customProviders);
5126
+ return allProviders.map((p) => p.name);
5127
+ }
5128
+ function supportsGetModels(providerName, customProviders) {
5129
+ const allProviders = mergeProviders(customProviders);
5130
+ const config = allProviders.find((p) => p.name.toLowerCase() === providerName.toLowerCase());
5131
+ if (!config) return false;
5132
+ const modelConfig = normalizeModelConfig(config);
5133
+ return modelConfig !== false;
5134
+ }
5135
+
5518
5136
  // mcp/mcp-client.ts
5519
5137
  import { Client } from "@modelcontextprotocol/sdk/client/index.js";
5520
5138
  import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
@@ -5815,19 +5433,16 @@ export {
5815
5433
  AgentOrchestrator,
5816
5434
  AgentRegistry,
5817
5435
  AnthropicAISDKLLM,
5818
- AnthropicLLM,
5819
5436
  BashTool,
5820
5437
  CompositeToolPort,
5821
5438
  ConversationContext,
5822
5439
  ConversationStore,
5823
5440
  CoreMCPClient,
5824
- DeepInfraLLM,
5825
5441
  DefaultDelegationPolicy,
5826
5442
  DefaultDelegationResultFormatter,
5827
5443
  DefaultDelegationService,
5828
5444
  DefaultSpecialistAgentFactory,
5829
5445
  DelegationServiceFactory,
5830
- EchoLLM,
5831
5446
  GithubLLM,
5832
5447
  InMemoryMemory,
5833
5448
  InMemoryMetadata,
@@ -5837,7 +5452,6 @@ export {
5837
5452
  MCPToolPort,
5838
5453
  MemoryPortMetadataAdapter,
5839
5454
  NoopReminders,
5840
- OpenRouterLLM,
5841
5455
  PersistedMemory,
5842
5456
  PersistingConsoleEventPort,
5843
5457
  RuntimeEnv,
@@ -5846,15 +5460,17 @@ export {
5846
5460
  SimpleId,
5847
5461
  SystemClock,
5848
5462
  ToolRegistry,
5849
- ZaiLLM,
5850
5463
  buildAgentCreationPrompt,
5851
5464
  buildInjectedSystem,
5852
5465
  canonicalizeTerminalPaste,
5466
+ createLLM,
5853
5467
  generateFolderTree,
5468
+ getAvailableProviders,
5854
5469
  loadMCPConfig,
5855
5470
  normalizeNewlines,
5856
5471
  renderTemplate,
5857
5472
  resolveBackspaces,
5858
5473
  resolveCarriageReturns,
5859
- stripAnsiAndControls
5474
+ stripAnsiAndControls,
5475
+ supportsGetModels
5860
5476
  };