@nuvin/nuvin-core 1.0.1 → 1.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/VERSION CHANGED
@@ -1,4 +1,4 @@
1
1
  {
2
- "version": "1.0.1",
3
- "commit": "d5c55a1"
2
+ "version": "1.1.1",
3
+ "commit": "6744c77"
4
4
  }
package/dist/index.d.ts CHANGED
@@ -290,6 +290,7 @@ type Message = {
290
290
  tool_calls?: ToolCall[];
291
291
  tool_call_id?: string;
292
292
  name?: string;
293
+ usage?: UsageData;
293
294
  };
294
295
  type MessageResponse = {
295
296
  id: string;
@@ -348,15 +349,29 @@ type ToolDefinition = {
348
349
  type ToolInvocation = {
349
350
  id: string;
350
351
  name: string;
351
- parameters: Record<string, any>;
352
+ parameters: Record<string, unknown>;
352
353
  };
354
+ declare enum ErrorReason {
355
+ Aborted = "aborted",
356
+ Denied = "denied",
357
+ Timeout = "timeout",
358
+ NotFound = "not_found",
359
+ PermissionDenied = "permission_denied",
360
+ InvalidInput = "invalid_input",
361
+ NetworkError = "network_error",
362
+ RateLimit = "rate_limit",
363
+ ToolNotFound = "tool_not_found",
364
+ Unknown = "unknown"
365
+ }
353
366
  type ToolExecutionResult = {
354
367
  id: string;
355
368
  name: string;
356
369
  status: 'success' | 'error';
357
370
  type: 'text' | 'json';
358
371
  result: string | object;
359
- metadata?: Record<string, unknown>;
372
+ metadata?: Record<string, unknown> & {
373
+ errorReason?: ErrorReason;
374
+ };
360
375
  durationMs?: number;
361
376
  };
362
377
  interface ToolPort {
@@ -439,7 +454,6 @@ declare const AgentEventTypes: {
439
454
  readonly AssistantChunk: "assistant_chunk";
440
455
  readonly AssistantMessage: "assistant_message";
441
456
  readonly StreamFinish: "stream_finish";
442
- readonly MemoryAppended: "memory_appended";
443
457
  readonly Done: "done";
444
458
  readonly Error: "error";
445
459
  readonly MCPStderr: "mcp_stderr";
@@ -461,6 +475,7 @@ type AgentEvent = {
461
475
  conversationId: string;
462
476
  messageId: string;
463
477
  toolCalls: ToolCall[];
478
+ usage?: UsageData;
464
479
  } | {
465
480
  type: typeof AgentEventTypes.ToolApprovalRequired;
466
481
  conversationId: string;
@@ -497,10 +512,6 @@ type AgentEvent = {
497
512
  messageId: string;
498
513
  finishReason?: string;
499
514
  usage?: UsageData;
500
- } | {
501
- type: typeof AgentEventTypes.MemoryAppended;
502
- conversationId: string;
503
- delta: Message[];
504
515
  } | {
505
516
  type: typeof AgentEventTypes.Done;
506
517
  conversationId: string;
@@ -1088,12 +1099,6 @@ declare class BashTool implements FunctionTool<BashParams, ToolExecutionContext>
1088
1099
  private shellArgs;
1089
1100
  }
1090
1101
 
1091
- declare class EchoLLM implements LLMPort {
1092
- generateCompletion(params: CompletionParams): Promise<CompletionResult>;
1093
- private hasTool;
1094
- private id;
1095
- }
1096
-
1097
1102
  type LogLevel = 'DEBUG' | 'INFO' | 'WARN' | 'ERROR';
1098
1103
  type LogFormat = 'json' | 'structured';
1099
1104
  type PersistOptions = {
@@ -1226,16 +1231,27 @@ declare class AnthropicAISDKLLM {
1226
1231
  }, signal?: AbortSignal): Promise<CompletionResult>;
1227
1232
  }
1228
1233
 
1234
+ type ModelConfig = false | true | string | string[] | Array<{
1235
+ id: string;
1236
+ name?: string;
1237
+ [key: string]: unknown;
1238
+ }>;
1229
1239
  interface LLMOptions {
1230
1240
  apiKey?: string;
1231
1241
  apiUrl?: string;
1232
1242
  httpLogFile?: string;
1233
1243
  enablePromptCaching?: boolean;
1234
1244
  includeUsage?: boolean;
1245
+ version?: string;
1246
+ }
1247
+ interface CustomProviderDefinition {
1248
+ type?: 'openai-compat' | 'anthropic';
1249
+ baseUrl?: string;
1250
+ models?: ModelConfig;
1235
1251
  }
1236
- declare function createLLM(providerName: string, options?: LLMOptions): LLMPort;
1237
- declare function getAvailableProviders(): string[];
1238
- declare function supportsGetModels(providerName: string): boolean;
1252
+ declare function createLLM(providerName: string, options?: LLMOptions, customProviders?: Record<string, CustomProviderDefinition>): LLMPort;
1253
+ declare function getAvailableProviders(customProviders?: Record<string, CustomProviderDefinition>): string[];
1254
+ declare function supportsGetModels(providerName: string, customProviders?: Record<string, CustomProviderDefinition>): boolean;
1239
1255
 
1240
1256
  type MCPHttpOptions = {
1241
1257
  type: 'http';
@@ -1326,4 +1342,4 @@ declare function resolveBackspaces(s: string): string;
1326
1342
  declare function stripAnsiAndControls(s: string): string;
1327
1343
  declare function canonicalizeTerminalPaste(raw: string): string;
1328
1344
 
1329
- export { AGENT_CREATOR_SYSTEM_PROMPT, type AgentAwareToolPort, type AgentCatalog, type AgentConfig, type AgentEvent, AgentEventTypes, AgentFilePersistence, AgentManager, AgentManagerCommandRunner, AgentOrchestrator, AgentRegistry, type AgentTemplate, AnthropicAISDKLLM, type AssignParams, BashTool, CompositeToolPort, type Conversation, ConversationContext, type ConversationMetadata, type ConversationSnapshot, ConversationStore, CoreMCPClient, DefaultDelegationPolicy, DefaultDelegationResultFormatter, DefaultDelegationService, DefaultSpecialistAgentFactory, type DelegationService, type DelegationServiceConfig, DelegationServiceFactory, EchoLLM, type FolderTreeOptions, GithubLLM, InMemoryMemory, InMemoryMetadata, JsonFileMemoryPersistence, type LLMConfig, LLMError, type LLMFactory, type LLMOptions, type LLMPort, LLMResolver, type MCPConfig, type MCPServerConfig, MCPToolPort, type MemoryPort, MemoryPortMetadataAdapter, type Message, type MessageContent, type MessageContentPart, type MetadataPort, NoopReminders, type OrchestratorAwareToolPort, PersistedMemory, PersistingConsoleEventPort, RuntimeEnv, type SendMessageOptions, SimpleContextBuilder, SimpleCost, SimpleId, type SpecialistAgentConfig, type SpecialistAgentResult, SystemClock, type ToolApprovalDecision, type ToolCall, type ToolExecutionResult, type ToolPort, ToolRegistry, type UserAttachment, type UserMessagePayload, buildAgentCreationPrompt, buildInjectedSystem, canonicalizeTerminalPaste, createLLM, generateFolderTree, getAvailableProviders, loadMCPConfig, normalizeNewlines, renderTemplate, resolveBackspaces, resolveCarriageReturns, stripAnsiAndControls, supportsGetModels };
1345
+ export { AGENT_CREATOR_SYSTEM_PROMPT, type AgentAwareToolPort, type AgentCatalog, type AgentConfig, type AgentEvent, AgentEventTypes, AgentFilePersistence, AgentManager, AgentManagerCommandRunner, AgentOrchestrator, AgentRegistry, type AgentTemplate, AnthropicAISDKLLM, type AssignParams, BashTool, CompositeToolPort, type Conversation, ConversationContext, type ConversationMetadata, type ConversationSnapshot, ConversationStore, CoreMCPClient, DefaultDelegationPolicy, DefaultDelegationResultFormatter, DefaultDelegationService, DefaultSpecialistAgentFactory, type DelegationService, type DelegationServiceConfig, DelegationServiceFactory, ErrorReason, type FolderTreeOptions, GithubLLM, InMemoryMemory, InMemoryMetadata, JsonFileMemoryPersistence, type LLMConfig, LLMError, type LLMFactory, type LLMOptions, type LLMPort, LLMResolver, type MCPConfig, type MCPServerConfig, MCPToolPort, type MemoryPort, MemoryPortMetadataAdapter, type Message, type MessageContent, type MessageContentPart, type MetadataPort, NoopReminders, type OrchestratorAwareToolPort, PersistedMemory, PersistingConsoleEventPort, RuntimeEnv, type SendMessageOptions, SimpleContextBuilder, SimpleCost, SimpleId, type SpecialistAgentConfig, type SpecialistAgentResult, SystemClock, type ToolApprovalDecision, type ToolCall, type ToolExecutionResult, type ToolPort, ToolRegistry, type UserAttachment, type UserMessagePayload, buildAgentCreationPrompt, buildInjectedSystem, canonicalizeTerminalPaste, createLLM, generateFolderTree, getAvailableProviders, loadMCPConfig, normalizeNewlines, renderTemplate, resolveBackspaces, resolveCarriageReturns, stripAnsiAndControls, supportsGetModels };
package/dist/index.js CHANGED
@@ -1,4 +1,17 @@
1
1
  // ports.ts
2
+ var ErrorReason = /* @__PURE__ */ ((ErrorReason2) => {
3
+ ErrorReason2["Aborted"] = "aborted";
4
+ ErrorReason2["Denied"] = "denied";
5
+ ErrorReason2["Timeout"] = "timeout";
6
+ ErrorReason2["NotFound"] = "not_found";
7
+ ErrorReason2["PermissionDenied"] = "permission_denied";
8
+ ErrorReason2["InvalidInput"] = "invalid_input";
9
+ ErrorReason2["NetworkError"] = "network_error";
10
+ ErrorReason2["RateLimit"] = "rate_limit";
11
+ ErrorReason2["ToolNotFound"] = "tool_not_found";
12
+ ErrorReason2["Unknown"] = "unknown";
13
+ return ErrorReason2;
14
+ })(ErrorReason || {});
2
15
  var MessageRoles = {
3
16
  System: "system",
4
17
  User: "user",
@@ -14,7 +27,6 @@ var AgentEventTypes = {
14
27
  AssistantChunk: "assistant_chunk",
15
28
  AssistantMessage: "assistant_message",
16
29
  StreamFinish: "stream_finish",
17
- MemoryAppended: "memory_appended",
18
30
  Done: "done",
19
31
  Error: "error",
20
32
  MCPStderr: "mcp_stderr",
@@ -161,19 +173,22 @@ var AgentOrchestrator = class {
161
173
  const todoTools = ["todo_write", "todo_read"];
162
174
  return readOnlyTools.includes(toolName) || todoTools.includes(toolName);
163
175
  }
164
- async handleToolDenial(denialMessage, conversationId, messageId, accumulatedMessages, turnHistory, originalToolCalls, assistantContent) {
165
- accumulatedMessages.push({
166
- role: "assistant",
167
- content: assistantContent ?? null,
168
- tool_calls: originalToolCalls
169
- });
170
- turnHistory.push({
176
+ async handleToolDenial(denialMessage, conversationId, messageId, accumulatedMessages, turnHistory, originalToolCalls, assistantContent, usage) {
177
+ const assistantMsg = {
171
178
  id: this.deps.ids.uuid(),
172
179
  role: "assistant",
173
180
  content: assistantContent ?? null,
174
181
  timestamp: this.deps.clock.iso(),
182
+ tool_calls: originalToolCalls,
183
+ usage
184
+ };
185
+ accumulatedMessages.push({
186
+ role: "assistant",
187
+ content: assistantContent ?? null,
175
188
  tool_calls: originalToolCalls
176
189
  });
190
+ turnHistory.push(assistantMsg);
191
+ const toolResultMsgs = [];
177
192
  for (const toolCall of originalToolCalls) {
178
193
  const toolDenialResult = "Tool execution denied by user";
179
194
  accumulatedMessages.push({
@@ -182,23 +197,27 @@ var AgentOrchestrator = class {
182
197
  tool_call_id: toolCall.id,
183
198
  name: toolCall.function.name
184
199
  });
185
- turnHistory.push({
200
+ const toolMsg = {
186
201
  id: toolCall.id,
187
202
  role: "tool",
188
203
  content: toolDenialResult,
189
204
  timestamp: this.deps.clock.iso(),
190
205
  tool_call_id: toolCall.id,
191
206
  name: toolCall.function.name
192
- });
207
+ };
208
+ turnHistory.push(toolMsg);
209
+ toolResultMsgs.push(toolMsg);
193
210
  }
211
+ await this.deps.memory.append(conversationId, [assistantMsg, ...toolResultMsgs]);
194
212
  await this.deps.events?.emit({
195
213
  type: AgentEventTypes.AssistantMessage,
196
214
  conversationId,
197
215
  messageId,
198
- content: denialMessage
216
+ content: denialMessage,
217
+ usage: void 0
199
218
  });
200
219
  }
201
- async processToolApproval(toolCalls, conversationId, messageId, accumulatedMessages, turnHistory, assistantContent) {
220
+ async processToolApproval(toolCalls, conversationId, messageId, accumulatedMessages, turnHistory, assistantContent, usage) {
202
221
  if (this.cfg.requireToolApproval === false) {
203
222
  return { approvedCalls: toolCalls, wasDenied: false };
204
223
  }
@@ -220,7 +239,8 @@ var AgentOrchestrator = class {
220
239
  accumulatedMessages,
221
240
  turnHistory,
222
241
  toolCalls,
223
- assistantContent
242
+ assistantContent,
243
+ usage
224
244
  );
225
245
  return { approvedCalls: [], wasDenied: true, denialMessage };
226
246
  }
@@ -263,6 +283,7 @@ var AgentOrchestrator = class {
263
283
  userDisplay = resolveDisplayText(normalized.text, attachments, normalized.displayText);
264
284
  const userTimestamp = this.deps.clock.iso();
265
285
  userMessages = [{ id: this.deps.ids.uuid(), role: "user", content: userContent, timestamp: userTimestamp }];
286
+ await this.deps.memory.append(convo, userMessages);
266
287
  }
267
288
  if (opts.signal?.aborted) throw new Error("Aborted");
268
289
  const toolDefs = this.deps.tools.getToolDefinitions(this.cfg.enabledTools ?? []);
@@ -293,8 +314,10 @@ var AgentOrchestrator = class {
293
314
  let result;
294
315
  let toolApprovalDenied = false;
295
316
  let denialMessage = "";
317
+ let finalResponseSaved = false;
296
318
  try {
297
319
  if (opts.stream && typeof this.deps.llm.streamCompletion === "function") {
320
+ let isFirstChunk = true;
298
321
  result = await this.deps.llm.streamCompletion(
299
322
  params,
300
323
  {
@@ -303,7 +326,8 @@ var AgentOrchestrator = class {
303
326
  streamedAssistantContent += delta;
304
327
  } catch {
305
328
  }
306
- const cleanDelta = delta.replace(/^\n+/, "");
329
+ const cleanDelta = isFirstChunk ? delta.replace(/^\n+/, "") : delta;
330
+ isFirstChunk = false;
307
331
  const chunkEvent = {
308
332
  type: AgentEventTypes.AssistantChunk,
309
333
  conversationId: convo,
@@ -329,6 +353,28 @@ var AgentOrchestrator = class {
329
353
  } else {
330
354
  result = await this.deps.llm.generateCompletion(params, opts.signal);
331
355
  }
356
+ if (!result.tool_calls?.length && result.content && !finalResponseSaved) {
357
+ const content2 = opts.stream ? streamedAssistantContent : result.content;
358
+ const assistantMsg = {
359
+ id: msgId,
360
+ role: "assistant",
361
+ content: content2,
362
+ timestamp: this.deps.clock.iso(),
363
+ usage: result.usage
364
+ };
365
+ await this.deps.memory.append(convo, [assistantMsg]);
366
+ finalResponseSaved = true;
367
+ if (content2.trim()) {
368
+ const messageEvent = {
369
+ type: AgentEventTypes.AssistantMessage,
370
+ conversationId: convo,
371
+ messageId: msgId,
372
+ content: content2,
373
+ ...result.usage && { usage: result.usage }
374
+ };
375
+ await this.deps.events?.emit(messageEvent);
376
+ }
377
+ }
332
378
  while (result.tool_calls?.length) {
333
379
  if (result.content?.trim()) {
334
380
  const messageEvent = {
@@ -345,7 +391,8 @@ var AgentOrchestrator = class {
345
391
  type: AgentEventTypes.ToolCalls,
346
392
  conversationId: convo,
347
393
  messageId: msgId,
348
- toolCalls: result.tool_calls
394
+ toolCalls: result.tool_calls,
395
+ usage: result.usage
349
396
  });
350
397
  const approvalResult = await this.processToolApproval(
351
398
  result.tool_calls,
@@ -353,7 +400,8 @@ var AgentOrchestrator = class {
353
400
  msgId,
354
401
  accumulatedMessages,
355
402
  turnHistory,
356
- result.content
403
+ result.content,
404
+ result.usage
357
405
  );
358
406
  if (approvalResult.wasDenied) {
359
407
  denialMessage = approvalResult.denialMessage || "";
@@ -375,18 +423,18 @@ var AgentOrchestrator = class {
375
423
  opts.signal
376
424
  );
377
425
  allToolResults.push(...toolResults);
378
- accumulatedMessages.push({ role: "assistant", content: result.content ?? null, tool_calls: approvedCalls });
379
- turnHistory.push({
426
+ const assistantMsg = {
380
427
  id: this.deps.ids.uuid(),
381
428
  role: "assistant",
382
429
  content: result.content ?? null,
383
430
  timestamp: this.deps.clock.iso(),
384
- tool_calls: approvedCalls
385
- });
431
+ tool_calls: approvedCalls,
432
+ usage: result.usage
433
+ };
434
+ const toolResultMsgs = [];
386
435
  for (const tr of toolResults) {
387
436
  const contentStr = tr.status === "error" ? String(tr.result) : typeof tr.result === "string" ? tr.result : JSON.stringify(tr.result);
388
- accumulatedMessages.push({ role: "tool", content: contentStr, tool_call_id: tr.id, name: tr.name });
389
- turnHistory.push({
437
+ toolResultMsgs.push({
390
438
  id: tr.id,
391
439
  role: "tool",
392
440
  content: contentStr,
@@ -401,8 +449,16 @@ var AgentOrchestrator = class {
401
449
  result: tr
402
450
  });
403
451
  }
452
+ await this.deps.memory.append(convo, [assistantMsg, ...toolResultMsgs]);
453
+ accumulatedMessages.push({ role: "assistant", content: result.content ?? null, tool_calls: approvedCalls });
454
+ for (const tr of toolResults) {
455
+ const contentStr = tr.status === "error" ? String(tr.result) : typeof tr.result === "string" ? tr.result : JSON.stringify(tr.result);
456
+ accumulatedMessages.push({ role: "tool", content: contentStr, tool_call_id: tr.id, name: tr.name });
457
+ }
404
458
  if (opts.signal?.aborted) throw new Error("Aborted");
459
+ streamedAssistantContent = "";
405
460
  if (opts.stream && typeof this.deps.llm.streamCompletion === "function") {
461
+ let isFirstChunk = true;
406
462
  result = await this.deps.llm.streamCompletion(
407
463
  { ...params, messages: accumulatedMessages },
408
464
  {
@@ -411,7 +467,8 @@ var AgentOrchestrator = class {
411
467
  streamedAssistantContent += delta;
412
468
  } catch {
413
469
  }
414
- const cleanDelta = delta.replace(/^\n+/, "");
470
+ const cleanDelta = isFirstChunk ? delta.replace(/^\n+/, "") : delta;
471
+ isFirstChunk = false;
415
472
  const chunkEvent = {
416
473
  type: AgentEventTypes.AssistantChunk,
417
474
  conversationId: convo,
@@ -437,16 +494,32 @@ var AgentOrchestrator = class {
437
494
  } else {
438
495
  result = await this.deps.llm.generateCompletion({ ...params, messages: accumulatedMessages }, opts.signal);
439
496
  }
497
+ if (!result.tool_calls?.length && result.content && !finalResponseSaved) {
498
+ const content2 = opts.stream ? streamedAssistantContent : result.content;
499
+ const assistantMsg2 = {
500
+ id: msgId,
501
+ role: "assistant",
502
+ content: content2,
503
+ timestamp: this.deps.clock.iso(),
504
+ usage: result.usage
505
+ };
506
+ await this.deps.memory.append(convo, [assistantMsg2]);
507
+ finalResponseSaved = true;
508
+ if (content2.trim()) {
509
+ const messageEvent = {
510
+ type: AgentEventTypes.AssistantMessage,
511
+ conversationId: convo,
512
+ messageId: msgId,
513
+ content: content2,
514
+ ...result.usage && { usage: result.usage }
515
+ };
516
+ await this.deps.events?.emit(messageEvent);
517
+ }
518
+ }
440
519
  }
441
520
  const t1 = this.deps.clock.now();
442
521
  const timestamp = this.deps.clock.iso();
443
- const newHistory = [];
444
- for (const m of userMessages) newHistory.push(m);
445
- for (const m of turnHistory) newHistory.push(m);
446
- if (!toolApprovalDenied) {
447
- newHistory.push({ id: msgId, role: "assistant", content: result.content, timestamp });
448
- }
449
- const shouldEmitFinalMessage = result.content?.trim() && !toolApprovalDenied;
522
+ const shouldEmitFinalMessage = result.content?.trim() && !toolApprovalDenied && !finalResponseSaved;
450
523
  if (shouldEmitFinalMessage) {
451
524
  const messageEvent = {
452
525
  type: AgentEventTypes.AssistantMessage,
@@ -475,8 +548,6 @@ var AgentOrchestrator = class {
475
548
  toolCalls: allToolResults.length
476
549
  }
477
550
  };
478
- await this.deps.memory.append(convo, newHistory);
479
- await this.deps.events?.emit({ type: AgentEventTypes.MemoryAppended, conversationId: convo, delta: newHistory });
480
551
  await this.deps.events?.emit({
481
552
  type: AgentEventTypes.Done,
482
553
  conversationId: convo,
@@ -486,26 +557,6 @@ var AgentOrchestrator = class {
486
557
  });
487
558
  return resp;
488
559
  } catch (err2) {
489
- try {
490
- const partial = [];
491
- for (const m of userMessages) partial.push(m);
492
- for (const m of turnHistory) partial.push(m);
493
- const partialAssistant = (streamedAssistantContent || "").trim();
494
- const assistantAlreadyRecorded = turnHistory.some((m) => m.role === "assistant");
495
- if (partialAssistant && !assistantAlreadyRecorded) {
496
- partial.push({
497
- id: this.deps.ids.uuid(),
498
- role: "assistant",
499
- content: partialAssistant,
500
- timestamp: this.deps.clock.iso()
501
- });
502
- }
503
- if (partial.length) {
504
- await this.deps.memory.append(convo, partial);
505
- await this.deps.events?.emit({ type: AgentEventTypes.MemoryAppended, conversationId: convo, delta: partial });
506
- }
507
- } catch {
508
- }
509
560
  throw err2;
510
561
  }
511
562
  }
@@ -1822,8 +1873,9 @@ import * as path3 from "path";
1822
1873
  function ok(result, metadata) {
1823
1874
  return { status: "success", type: "text", result, metadata };
1824
1875
  }
1825
- function err(result, metadata) {
1826
- return { status: "error", type: "text", result, metadata };
1876
+ function err(result, metadata, errorReason) {
1877
+ const finalMetadata = errorReason ? { ...metadata, errorReason } : metadata;
1878
+ return { status: "error", type: "text", result, metadata: finalMetadata };
1827
1879
  }
1828
1880
 
1829
1881
  // tools/FileReadTool.ts
@@ -1842,7 +1894,10 @@ var FileReadTool = class {
1842
1894
  parameters = {
1843
1895
  type: "object",
1844
1896
  properties: {
1845
- description: { type: "string", description: 'Explanation of what file is being read and why (e.g., "Read package.json to check dependencies")' },
1897
+ description: {
1898
+ type: "string",
1899
+ description: 'Explanation of what file is being read and why (e.g., "Read package.json to check dependencies")'
1900
+ },
1846
1901
  path: { type: "string", description: "Read contents of this file" },
1847
1902
  lineStart: { type: "integer", minimum: 1, description: "Start reading from this line number (1-based)" },
1848
1903
  lineEnd: { type: "integer", minimum: 1, description: "Stop reading at this line number (inclusive)" }
@@ -1883,7 +1938,7 @@ var FileReadTool = class {
1883
1938
  const [lo, hi] = a <= b ? [a, b] : [b, a];
1884
1939
  const numberedLines = lines.slice(lo - 1, hi).map((line, index) => {
1885
1940
  const lineNum = lo + index;
1886
- return `${lineNum}:${line}`;
1941
+ return `${lineNum}\u2502${line}`;
1887
1942
  });
1888
1943
  const slice = numberedLines.join("\n");
1889
1944
  return ok(slice, {
@@ -2247,15 +2302,15 @@ var BashTool = class {
2247
2302
  return await this.execOnce(p, ctx?.signal);
2248
2303
  } catch (e) {
2249
2304
  if (e instanceof Error && e.name === "AbortError") {
2250
- return err("Command execution aborted by user");
2305
+ return err("Command execution aborted by user", void 0, "aborted" /* Aborted */);
2251
2306
  }
2252
2307
  const message = e instanceof Error ? e.message : String(e);
2253
- return err(message);
2308
+ return err(message, void 0, "unknown" /* Unknown */);
2254
2309
  }
2255
2310
  }
2256
2311
  async execOnce(p, signal) {
2257
2312
  if (signal?.aborted) {
2258
- return err("Command execution aborted by user");
2313
+ return err("Command execution aborted by user", void 0, "aborted" /* Aborted */);
2259
2314
  }
2260
2315
  const { cmd, cwd = process.cwd(), timeoutMs = DEFAULTS.timeoutMs } = p;
2261
2316
  const maxOutputBytes = DEFAULTS.maxOutputBytes;
@@ -2273,7 +2328,7 @@ var BashTool = class {
2273
2328
  });
2274
2329
  } catch (error) {
2275
2330
  if (error instanceof Error && "code" in error && error.code === "ENOENT") {
2276
- return err(`Shell not found: ${executable}`);
2331
+ return err(`Shell not found: ${executable}`, void 0, "not_found" /* NotFound */);
2277
2332
  }
2278
2333
  throw error;
2279
2334
  }
@@ -2283,8 +2338,10 @@ var BashTool = class {
2283
2338
  }
2284
2339
  });
2285
2340
  let timer = null;
2341
+ let timedOut = false;
2286
2342
  const deadline = new Promise((_, rej) => {
2287
2343
  timer = setTimeout(() => {
2344
+ timedOut = true;
2288
2345
  try {
2289
2346
  child.kill("SIGKILL");
2290
2347
  } catch {
@@ -2342,10 +2399,17 @@ var BashTool = class {
2342
2399
  const partialOutput = output ? `
2343
2400
  Output before abort:
2344
2401
  ${output}` : "";
2345
- return err(`Command execution aborted by user${partialOutput}`);
2402
+ return err(`Command execution aborted by user${partialOutput}`, { cwd }, "aborted" /* Aborted */);
2346
2403
  }
2347
2404
  if (code !== 0) {
2348
- return err(output, { code, signal: exitSignal, cwd });
2405
+ const metadata = { code, signal: exitSignal, cwd };
2406
+ if (output.toLowerCase().includes("permission denied")) {
2407
+ return err(output, { ...metadata, errorReason: "permission_denied" /* PermissionDenied */ });
2408
+ }
2409
+ if (output.toLowerCase().includes("command not found") || output.toLowerCase().includes("not found")) {
2410
+ return err(output, { ...metadata, errorReason: "not_found" /* NotFound */ });
2411
+ }
2412
+ return err(output, metadata);
2349
2413
  }
2350
2414
  return ok(output, { code, signal: exitSignal, cwd });
2351
2415
  } catch (e) {
@@ -2358,9 +2422,12 @@ ${output}` : "";
2358
2422
  const partialOutput = output ? `
2359
2423
  Output before abort:
2360
2424
  ${output}` : "";
2361
- return err(`Command execution aborted by user${partialOutput}`);
2425
+ return err(`Command execution aborted by user${partialOutput}`, { cwd }, "aborted" /* Aborted */);
2362
2426
  }
2363
- return err(message);
2427
+ if (timedOut) {
2428
+ return err(message, { cwd }, "timeout" /* Timeout */);
2429
+ }
2430
+ return err(message, { cwd }, "unknown" /* Unknown */);
2364
2431
  }
2365
2432
  }
2366
2433
  defaultShell() {
@@ -3382,6 +3449,7 @@ var ToolRegistry = class {
3382
3449
  status: "error",
3383
3450
  type: "text",
3384
3451
  result: "Tool execution aborted by user",
3452
+ metadata: { errorReason: "aborted" /* Aborted */ },
3385
3453
  durationMs: 0
3386
3454
  });
3387
3455
  }
@@ -3397,6 +3465,7 @@ var ToolRegistry = class {
3397
3465
  status: "error",
3398
3466
  type: "text",
3399
3467
  result: "Tool execution aborted by user",
3468
+ metadata: { errorReason: "aborted" /* Aborted */ },
3400
3469
  durationMs: 0
3401
3470
  };
3402
3471
  }
@@ -3410,6 +3479,7 @@ var ToolRegistry = class {
3410
3479
  status: "error",
3411
3480
  type: "text",
3412
3481
  result: `Tool '${c.name}' not found`,
3482
+ metadata: { errorReason: "tool_not_found" /* ToolNotFound */ },
3413
3483
  durationMs: durationMs2
3414
3484
  };
3415
3485
  }
@@ -3642,59 +3712,6 @@ var AgentFilePersistence = class {
3642
3712
  }
3643
3713
  };
3644
3714
 
3645
- // llm-providers/llm-echo.ts
3646
- var EchoLLM = class {
3647
- async generateCompletion(params) {
3648
- const last = [...params.messages].reverse().find((m) => m.role === "user");
3649
- const content = String(last?.content ?? "");
3650
- const toolCalls = [];
3651
- if (content.startsWith("!reverse ") && this.hasTool(params, "reverse_text")) {
3652
- const text = content.slice("!reverse ".length);
3653
- toolCalls.push({
3654
- id: this.id("rev"),
3655
- type: "function",
3656
- function: { name: "reverse_text", arguments: JSON.stringify({ text }) }
3657
- });
3658
- } else if (content.startsWith("!wc ") && this.hasTool(params, "word_count")) {
3659
- const text = content.slice("!wc ".length);
3660
- toolCalls.push({
3661
- id: this.id("wc"),
3662
- type: "function",
3663
- function: { name: "word_count", arguments: JSON.stringify({ text }) }
3664
- });
3665
- } else if (content.startsWith("!todo ") && this.hasTool(params, "todo_write")) {
3666
- const payload = content.slice("!todo ".length);
3667
- let todos;
3668
- try {
3669
- todos = JSON.parse(payload);
3670
- } catch {
3671
- todos = [];
3672
- }
3673
- toolCalls.push({
3674
- id: this.id("todo"),
3675
- type: "function",
3676
- function: { name: "todo_write", arguments: JSON.stringify({ todos }) }
3677
- });
3678
- }
3679
- const sawTool = params.messages.some((m) => m.role === "tool");
3680
- if (sawTool) {
3681
- const lastTool = [...params.messages].reverse().find((m) => m.role === "tool");
3682
- const toolText = typeof lastTool?.content === "string" ? lastTool.content : "";
3683
- return { content: `Tool result: ${toolText}` };
3684
- }
3685
- if (toolCalls.length > 0) {
3686
- return { content: "", tool_calls: toolCalls };
3687
- }
3688
- return { content: `Echo: ${content}` };
3689
- }
3690
- hasTool(params, name) {
3691
- return !!params.tools?.some((t) => t.function.name === name);
3692
- }
3693
- id(prefix) {
3694
- return `${prefix}_${Math.random().toString(36).slice(2, 10)}`;
3695
- }
3696
- };
3697
-
3698
3715
  // llm-providers/llm-utils.ts
3699
3716
  function mergeChoices(choices) {
3700
3717
  const contentParts = [];
@@ -4456,10 +4473,12 @@ var BaseBearerAuthTransport = class {
4456
4473
  inner;
4457
4474
  apiKey;
4458
4475
  baseUrl;
4459
- constructor(inner, apiKey, baseUrl) {
4476
+ version;
4477
+ constructor(inner, apiKey, baseUrl, version) {
4460
4478
  this.inner = inner;
4461
4479
  this.apiKey = apiKey;
4462
4480
  this.baseUrl = baseUrl ?? this.getDefaultBaseUrl();
4481
+ this.version = version;
4463
4482
  }
4464
4483
  buildFullUrl(path9) {
4465
4484
  if (path9.startsWith("/")) {
@@ -4473,6 +4492,9 @@ var BaseBearerAuthTransport = class {
4473
4492
  }
4474
4493
  const base = headers ? { ...headers } : {};
4475
4494
  base.Authorization = `Bearer ${this.apiKey}`;
4495
+ if (!base["User-Agent"] && this.version) {
4496
+ base["User-Agent"] = `nuvin-cli/${this.version}`;
4497
+ }
4476
4498
  return base;
4477
4499
  }
4478
4500
  async get(url, headers, signal) {
@@ -4492,8 +4514,8 @@ var BaseBearerAuthTransport = class {
4492
4514
  // transports/simple-bearer-transport.ts
4493
4515
  var SimpleBearerAuthTransport = class extends BaseBearerAuthTransport {
4494
4516
  defaultUrl;
4495
- constructor(inner, defaultBaseUrl, apiKey, baseUrl) {
4496
- super(inner, apiKey, baseUrl ?? defaultBaseUrl);
4517
+ constructor(inner, defaultBaseUrl, apiKey, baseUrl, version) {
4518
+ super(inner, apiKey, baseUrl ?? defaultBaseUrl, version);
4497
4519
  this.defaultUrl = defaultBaseUrl;
4498
4520
  }
4499
4521
  getDefaultBaseUrl() {
@@ -4502,8 +4524,8 @@ var SimpleBearerAuthTransport = class extends BaseBearerAuthTransport {
4502
4524
  };
4503
4525
 
4504
4526
  // transports/transport-factory.ts
4505
- function createTransport(_name, inner, defaultBaseUrl, apiKey, baseUrl) {
4506
- return new SimpleBearerAuthTransport(inner, defaultBaseUrl, apiKey, baseUrl);
4527
+ function createTransport(inner, defaultBaseUrl, apiKey, baseUrl, version) {
4528
+ return new SimpleBearerAuthTransport(inner, defaultBaseUrl, apiKey, baseUrl, version);
4507
4529
  }
4508
4530
 
4509
4531
  // llm-providers/llm-github.ts
@@ -4980,9 +5002,8 @@ var llm_provider_config_default = {
4980
5002
  providers: [
4981
5003
  {
4982
5004
  name: "deepinfra",
4983
- className: "DeepInfraLLM",
5005
+ type: "openai-compat",
4984
5006
  baseUrl: "https://api.deepinfra.com/v1/openai",
4985
- transportName: "deepinfra",
4986
5007
  features: {
4987
5008
  promptCaching: false,
4988
5009
  getModels: true
@@ -4990,9 +5011,8 @@ var llm_provider_config_default = {
4990
5011
  },
4991
5012
  {
4992
5013
  name: "openrouter",
4993
- className: "OpenRouterLLM",
5014
+ type: "openai-compat",
4994
5015
  baseUrl: "https://openrouter.ai/api/v1",
4995
- transportName: "openrouter",
4996
5016
  features: {
4997
5017
  promptCaching: true,
4998
5018
  getModels: true,
@@ -5001,9 +5021,8 @@ var llm_provider_config_default = {
5001
5021
  },
5002
5022
  {
5003
5023
  name: "zai",
5004
- className: "ZaiLLM",
5024
+ type: "openai-compat",
5005
5025
  baseUrl: "https://api.z.ai/api/coding/paas/v4",
5006
- transportName: "zai",
5007
5026
  features: {
5008
5027
  promptCaching: false,
5009
5028
  getModels: false
@@ -5011,9 +5030,8 @@ var llm_provider_config_default = {
5011
5030
  },
5012
5031
  {
5013
5032
  name: "moonshot",
5014
- className: "MoonshotLLM",
5033
+ type: "openai-compat",
5015
5034
  baseUrl: "https://api.moonshot.ai/v1",
5016
- transportName: "moonshot",
5017
5035
  features: {
5018
5036
  promptCaching: false,
5019
5037
  getModels: true
@@ -5026,15 +5044,13 @@ var llm_provider_config_default = {
5026
5044
  var providers = llm_provider_config_default.providers;
5027
5045
  var GenericLLM = class extends BaseLLM {
5028
5046
  opts;
5029
- transportName;
5030
5047
  includeUsage;
5031
- supportsModels;
5032
- constructor(transportName, baseUrl, supportsModels, opts = {}) {
5048
+ modelConfig;
5049
+ constructor(baseUrl, modelConfig, opts = {}) {
5033
5050
  const { enablePromptCaching = false, includeUsage = false, ...restOpts } = opts;
5034
5051
  super(opts.apiUrl || baseUrl, { enablePromptCaching });
5035
- this.transportName = transportName;
5036
5052
  this.includeUsage = includeUsage;
5037
- this.supportsModels = supportsModels;
5053
+ this.modelConfig = modelConfig;
5038
5054
  this.opts = restOpts;
5039
5055
  }
5040
5056
  createTransport() {
@@ -5045,11 +5061,24 @@ var GenericLLM = class extends BaseLLM {
5045
5061
  maxFileSize: 5 * 1024 * 1024,
5046
5062
  captureResponseBody: true
5047
5063
  });
5048
- return createTransport(this.transportName, base, this.apiUrl, this.opts.apiKey, this.opts.apiUrl);
5064
+ return createTransport(base, this.apiUrl, this.opts.apiKey, this.opts.apiUrl, this.opts.version);
5049
5065
  }
5050
5066
  async getModels(signal) {
5051
- if (!this.supportsModels) {
5052
- throw new Error(`Provider ${this.transportName} does not support getModels`);
5067
+ if (this.modelConfig === false) {
5068
+ throw new Error("Provider does not support getModels");
5069
+ }
5070
+ if (Array.isArray(this.modelConfig)) {
5071
+ return this.modelConfig.map((m) => typeof m === "string" ? { id: m } : m);
5072
+ }
5073
+ if (typeof this.modelConfig === "string") {
5074
+ const transport2 = this.createTransport();
5075
+ const res2 = await transport2.get(this.modelConfig, void 0, signal);
5076
+ if (!res2.ok) {
5077
+ const text = await res2.text();
5078
+ throw new Error(`Failed to fetch models: ${res2.status} ${text}`);
5079
+ }
5080
+ const data2 = await res2.json();
5081
+ return data2.data;
5053
5082
  }
5054
5083
  const transport = this.createTransport();
5055
5084
  const res = await transport.get("/models", void 0, signal);
@@ -5075,23 +5104,62 @@ var GenericLLM = class extends BaseLLM {
5075
5104
  return super.streamCompletion(enhancedParams, handlers, signal);
5076
5105
  }
5077
5106
  };
5078
- function createLLM(providerName, options = {}) {
5079
- const config = providers.find((p) => p.name.toLowerCase() === providerName.toLowerCase());
5107
+ function normalizeModelConfig(config) {
5108
+ if (config.models !== void 0) {
5109
+ return config.models;
5110
+ }
5111
+ return config.features.getModels ?? false;
5112
+ }
5113
+ function mergeProviders(customProviders) {
5114
+ const merged = /* @__PURE__ */ new Map();
5115
+ for (const provider of providers) {
5116
+ merged.set(provider.name.toLowerCase(), provider);
5117
+ }
5118
+ if (customProviders) {
5119
+ for (const [name, custom] of Object.entries(customProviders)) {
5120
+ if (!custom.baseUrl) {
5121
+ continue;
5122
+ }
5123
+ const existing = merged.get(name.toLowerCase());
5124
+ const providerConfig = {
5125
+ name,
5126
+ type: custom.type ?? "openai-compat",
5127
+ baseUrl: custom.baseUrl,
5128
+ models: custom.models ?? false,
5129
+ features: existing?.features ?? {
5130
+ promptCaching: false,
5131
+ getModels: custom.models !== false,
5132
+ includeUsage: false
5133
+ }
5134
+ };
5135
+ merged.set(name.toLowerCase(), providerConfig);
5136
+ }
5137
+ }
5138
+ return Array.from(merged.values());
5139
+ }
5140
+ function createLLM(providerName, options = {}, customProviders) {
5141
+ const allProviders = mergeProviders(customProviders);
5142
+ const config = allProviders.find((p) => p.name.toLowerCase() === providerName.toLowerCase());
5080
5143
  if (!config) {
5081
- throw new Error(`Unknown LLM provider: ${providerName}. Available: ${providers.map((p) => p.name).join(", ")}`);
5144
+ throw new Error(`Unknown LLM provider: ${providerName}. Available: ${allProviders.map((p) => p.name).join(", ")}`);
5082
5145
  }
5083
- return new GenericLLM(config.transportName, config.baseUrl, config.features.getModels ?? false, {
5146
+ const modelConfig = normalizeModelConfig(config);
5147
+ return new GenericLLM(config.baseUrl, modelConfig, {
5084
5148
  ...options,
5085
5149
  enablePromptCaching: options.enablePromptCaching ?? config.features.promptCaching,
5086
5150
  includeUsage: options.includeUsage ?? config.features.includeUsage
5087
5151
  });
5088
5152
  }
5089
- function getAvailableProviders() {
5090
- return providers.map((p) => p.name);
5153
+ function getAvailableProviders(customProviders) {
5154
+ const allProviders = mergeProviders(customProviders);
5155
+ return allProviders.map((p) => p.name);
5091
5156
  }
5092
- function supportsGetModels(providerName) {
5093
- const config = providers.find((p) => p.name.toLowerCase() === providerName.toLowerCase());
5094
- return config?.features.getModels ?? false;
5157
+ function supportsGetModels(providerName, customProviders) {
5158
+ const allProviders = mergeProviders(customProviders);
5159
+ const config = allProviders.find((p) => p.name.toLowerCase() === providerName.toLowerCase());
5160
+ if (!config) return false;
5161
+ const modelConfig = normalizeModelConfig(config);
5162
+ return modelConfig !== false;
5095
5163
  }
5096
5164
 
5097
5165
  // mcp/mcp-client.ts
@@ -5404,7 +5472,7 @@ export {
5404
5472
  DefaultDelegationService,
5405
5473
  DefaultSpecialistAgentFactory,
5406
5474
  DelegationServiceFactory,
5407
- EchoLLM,
5475
+ ErrorReason,
5408
5476
  GithubLLM,
5409
5477
  InMemoryMemory,
5410
5478
  InMemoryMetadata,
package/package.json CHANGED
@@ -1,11 +1,17 @@
1
1
  {
2
2
  "name": "@nuvin/nuvin-core",
3
- "version": "1.0.1",
3
+ "version": "1.1.1",
4
4
  "description": "",
5
5
  "private": false,
6
6
  "main": "dist/index.js",
7
7
  "types": "dist/index.d.ts",
8
8
  "type": "module",
9
+ "author": "Marsch Huynh <marsch.huynh@gmail.com>",
10
+ "repository": {
11
+ "type": "git",
12
+ "url": "https://github.com/marschhuynh/nuvin-cli.git",
13
+ "directory": "packages/nuvin-core"
14
+ },
9
15
  "files": [
10
16
  "dist"
11
17
  ],
@@ -19,7 +25,6 @@
19
25
  "access": "public"
20
26
  },
21
27
  "keywords": [],
22
- "author": "Marsch Huynh <marsch.huynh@gmail.com>",
23
28
  "license": "Apache-2.0",
24
29
  "dependencies": {
25
30
  "@ai-sdk/anthropic": "^2.0.30",