@xalia/agent 0.6.3 → 0.6.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/dist/agent/src/agent/agent.js +12 -11
  2. package/dist/agent/src/agent/dummyLLM.js +24 -15
  3. package/dist/agent/src/agent/llm.js +0 -22
  4. package/dist/agent/src/agent/mcpServerManager.js +73 -15
  5. package/dist/agent/src/agent/openAI.js +32 -0
  6. package/dist/agent/src/agent/openAILLM.js +25 -1
  7. package/dist/agent/src/agent/openAILLMStreaming.js +8 -3
  8. package/dist/agent/src/agent/sudoMcpServerManager.js +22 -9
  9. package/dist/agent/src/chat/client/chatClient.js +2 -1
  10. package/dist/agent/src/chat/client/sessionClient.js +28 -3
  11. package/dist/agent/src/chat/data/dbSessionFileModels.js +10 -0
  12. package/dist/agent/src/chat/protocol/messages.js +1 -0
  13. package/dist/agent/src/chat/server/chatContextManager.js +4 -4
  14. package/dist/agent/src/chat/server/conversation.js +1 -1
  15. package/dist/agent/src/chat/server/imageGeneratorTools.js +7 -4
  16. package/dist/agent/src/chat/server/openSession.js +85 -12
  17. package/dist/agent/src/chat/server/sessionFileManager.js +17 -6
  18. package/dist/agent/src/chat/server/sessionRegistry.js +1 -1
  19. package/dist/agent/src/chat/server/tools.js +58 -10
  20. package/dist/agent/src/test/agent.test.js +26 -2
  21. package/dist/agent/src/test/chatContextManager.test.js +5 -5
  22. package/dist/agent/src/test/mcpServerManager.test.js +5 -1
  23. package/dist/agent/src/test/testTools.js +5 -2
  24. package/dist/agent/src/tool/chatMain.js +23 -3
  25. package/dist/agent/src/tool/files.js +0 -27
  26. package/package.json +3 -3
  27. package/scripts/test_chat +3 -1
  28. package/src/agent/agent.ts +53 -47
  29. package/src/agent/agentUtils.ts +7 -7
  30. package/src/agent/compressingContextManager.ts +4 -9
  31. package/src/agent/context.ts +28 -37
  32. package/src/agent/dummyLLM.ts +38 -28
  33. package/src/agent/iAgentEventHandler.ts +6 -9
  34. package/src/agent/imageGenLLM.ts +11 -5
  35. package/src/agent/llm.ts +41 -106
  36. package/src/agent/mcpServerManager.ts +145 -29
  37. package/src/agent/openAI.ts +123 -0
  38. package/src/agent/openAILLM.ts +52 -5
  39. package/src/agent/openAILLMStreaming.ts +36 -32
  40. package/src/agent/repeatLLM.ts +5 -6
  41. package/src/agent/sudoMcpServerManager.ts +48 -16
  42. package/src/agent/tools.ts +3 -5
  43. package/src/chat/client/chatClient.ts +3 -1
  44. package/src/chat/client/sessionClient.ts +47 -7
  45. package/src/chat/data/dataModels.ts +3 -3
  46. package/src/chat/data/dbSessionFileModels.ts +22 -0
  47. package/src/chat/protocol/messages.ts +39 -13
  48. package/src/chat/server/chatContextManager.ts +20 -24
  49. package/src/chat/server/conversation.ts +10 -10
  50. package/src/chat/server/imageGeneratorTools.ts +18 -9
  51. package/src/chat/server/openSession.ts +111 -22
  52. package/src/chat/server/sessionFileManager.ts +33 -10
  53. package/src/chat/server/sessionRegistry.ts +1 -1
  54. package/src/chat/server/tools.ts +77 -18
  55. package/src/chat/utils/approvalManager.ts +2 -2
  56. package/src/test/agent.test.ts +56 -31
  57. package/src/test/approvalManager.test.ts +2 -2
  58. package/src/test/chatContextManager.test.ts +11 -14
  59. package/src/test/compressingContextManager.test.ts +3 -3
  60. package/src/test/context.test.ts +3 -3
  61. package/src/test/conversation.test.ts +7 -7
  62. package/src/test/dbSessionMessages.test.ts +3 -3
  63. package/src/test/mcpServerManager.test.ts +10 -1
  64. package/src/test/testTools.ts +44 -33
  65. package/src/tool/agentChat.ts +10 -8
  66. package/src/tool/agentMain.ts +2 -2
  67. package/src/tool/chatMain.ts +38 -6
  68. package/src/tool/commandPrompt.ts +2 -4
  69. package/src/tool/files.ts +0 -34
  70. package/test_data/dummyllm_script_image_gen.json +27 -17
  71. package/test_data/dummyllm_script_invoke_image_gen_tool.json +9 -2
  72. package/test_data/dummyllm_script_render_tool.json +29 -0
  73. package/test_data/dummyllm_script_test_auto_approve.json +81 -0
  74. package/test_data/dummyllm_script_test_simplecalc_addition.json +29 -0
@@ -0,0 +1,123 @@
1
+ import { OpenAI } from "openai";
2
+
3
+ /**
4
+ *
5
+ * Extension to the OpenAI messages to support custom elements added by other
6
+ * providers. These represent the data returned by from provider APIs, which
7
+ * may not match our internal structures.
8
+ *
9
+ */
10
+
11
+ export type ChatCompletionContentPartImage =
12
+ OpenAI.Chat.Completions.ChatCompletionContentPartImage;
13
+
14
+ export type ChatCompletionMessageToolCall =
15
+ OpenAI.Chat.Completions.ChatCompletionMessageToolCall;
16
+
17
+ // Extend the ChatCompletionMessage type with an `images` value, compatible
18
+ // with google/gemini-2.5-flash-image-preview.
19
+ export interface ChatCompletionMessage
20
+ extends OpenAI.Chat.Completions.ChatCompletionMessage {
21
+ images?: ChatCompletionContentPartImage[];
22
+ }
23
+
24
+ // Extend ChatCompletionChoice with messages containing images
25
+ export interface ChatCompletionChoice
26
+ extends OpenAI.Chat.Completions.ChatCompletion.Choice {
27
+ message: ChatCompletionMessage;
28
+ }
29
+
30
+ // Extends ChatCompletion with Choices containing images
31
+ export interface ChatCompletion extends OpenAI.Chat.Completions.ChatCompletion {
32
+ choices: Array<ChatCompletionChoice>;
33
+ }
34
+
35
+ // TODO: Do we need these? They are input types, generally for convenience.
36
+ // Instead, we could just make sure our internal types are compatible with the
37
+ // OpenAI definition.
38
+
39
+ // Expose for conveinence for now
40
+
41
+ export type ChatCompletionAssistantMessageParam =
42
+ OpenAI.ChatCompletionAssistantMessageParam;
43
+
44
+ export type ChatCompletionContentPart = OpenAI.ChatCompletionContentPart;
45
+
46
+ export type ChatCompletionUserMessageParam =
47
+ OpenAI.ChatCompletionUserMessageParam;
48
+
49
+ // export type ToolMessageParam = OpenAI.ChatCompletionToolMessageParam;
50
+
51
+ // openrouter reasoning types
52
+
53
+ export type ReasoningEffort = {
54
+ effort?: OpenAI.ReasoningEffort;
55
+ max_tokens?: never;
56
+ };
57
+
58
+ export type ReasoningMaxTokens = { effort?: never; max_tokens?: number };
59
+
60
+ export type ReasoningExclude = { exclude?: boolean; enabled?: never };
61
+
62
+ export type ReasoningEnabled = {
63
+ exclude?: never;
64
+ enabled?: boolean;
65
+ };
66
+
67
+ export type Reasoning = (ReasoningEffort | ReasoningMaxTokens) &
68
+ (ReasoningExclude | ReasoningEnabled);
69
+
70
+ export type ReasoningDetails = {
71
+ type: "reasoning.text" | "<unknown>";
72
+ text?: string;
73
+ signature?: string;
74
+ format?: string;
75
+ index?: number;
76
+ };
77
+
78
+ /**
79
+ * A (openrouter-specific) stream chunk possibly containing reasoning tokens.
80
+ */
81
+ export type ChatCompletionChunkChoiceDeltaWithReasoning =
82
+ OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta & {
83
+ reasoning?: string;
84
+ reasoning_details?: ReasoningDetails[];
85
+ };
86
+
87
+ // /**
88
+ // * A chat completion message with extra reasoning tokens.
89
+ // */
90
+ // export interface ChatCompletionMessageWithReasoning
91
+ // extends OpenAI.Chat.Completions.ChatCompletionMessage {
92
+ // reasoning?: string;
93
+ // }
94
+
95
+ // Util function to extract reasoning tokens
96
+
97
+ export function choiceDeltaExtractReasoning(
98
+ delta: ChatCompletionChunkChoiceDeltaWithReasoning
99
+ ): string | undefined {
100
+ if (delta.reasoning) {
101
+ return delta.reasoning;
102
+ }
103
+
104
+ if (delta.reasoning_details) {
105
+ let reasoning = "";
106
+ for (const details of delta.reasoning_details) {
107
+ if (details.type !== "reasoning.text") {
108
+ throw new Error(`unexpected details.type: ${details.type}`);
109
+ }
110
+ if (details.text) {
111
+ if (typeof details.text !== "string") {
112
+ throw new Error(
113
+ `unexpected typeof details.text: ${typeof details.text}`
114
+ );
115
+ }
116
+ reasoning += details.text;
117
+ }
118
+ }
119
+ return reasoning;
120
+ }
121
+
122
+ return undefined;
123
+ }
@@ -1,5 +1,51 @@
1
- import { ILLM, XALIA_APP_HEADER } from "./llm";
1
+ import { strict as assert } from "assert";
2
+
3
+ import {
4
+ ILLM,
5
+ MessageParam,
6
+ ToolDescriptor,
7
+ XALIA_APP_HEADER,
8
+ Completion,
9
+ Choice,
10
+ MessageToolCall,
11
+ Message,
12
+ } from "./llm";
2
13
  import { OpenAI } from "openai";
14
+ import {
15
+ ChatCompletion,
16
+ ChatCompletionChoice,
17
+ ChatCompletionMessage,
18
+ ChatCompletionMessageToolCall,
19
+ } from "./openAI";
20
+
21
+ function toolCallFromOpenAi(
22
+ toolCall: ChatCompletionMessageToolCall
23
+ ): MessageToolCall {
24
+ assert(toolCall.type === "function");
25
+ return toolCall;
26
+ }
27
+
28
+ function messageFromOpenAi(message: ChatCompletionMessage): Message {
29
+ return {
30
+ ...message,
31
+ tool_calls: message.tool_calls?.map(toolCallFromOpenAi),
32
+ };
33
+ }
34
+
35
+ function choicesFromOpenAI(choice: ChatCompletionChoice): Choice {
36
+ return {
37
+ ...choice,
38
+ message: messageFromOpenAi(choice.message),
39
+ // tool_calls: choice.tool_calls?.map(toolCallFromOpenAi),
40
+ };
41
+ }
42
+
43
+ function completionFromOpenAI(completion: ChatCompletion): Completion {
44
+ return {
45
+ ...completion,
46
+ choices: completion.choices.map(choicesFromOpenAI),
47
+ };
48
+ }
3
49
 
4
50
  export class OpenAILLM implements ILLM {
5
51
  private readonly openai: OpenAI;
@@ -28,10 +74,10 @@ export class OpenAILLM implements ILLM {
28
74
  }
29
75
 
30
76
  public async getConversationResponse(
31
- messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[],
32
- tools?: OpenAI.Chat.Completions.ChatCompletionTool[],
77
+ messages: MessageParam[],
78
+ tools?: ToolDescriptor[],
33
79
  onMessage?: (msg: string, end: boolean) => Promise<void>
34
- ): Promise<OpenAI.Chat.Completions.ChatCompletion> {
80
+ ): Promise<Completion> {
35
81
  const completion = await this.openai.chat.completions.create({
36
82
  model: this.model,
37
83
  messages,
@@ -43,6 +89,7 @@ export class OpenAILLM implements ILLM {
43
89
  await onMessage(message.content, true);
44
90
  }
45
91
  }
46
- return completion;
92
+
93
+ return completionFromOpenAI(completion);
47
94
  }
48
95
  }
@@ -5,20 +5,28 @@ import { getLogger } from "@xalia/xmcp/sdk";
5
5
 
6
6
  import {
7
7
  ILLM,
8
- ChatCompletionChunkChoiceDeltaWithReasoning,
9
- ChatCompletionMessageWithReasoning,
10
- Reasoning,
11
- choiceDeltaExtractReasoning,
8
+ Message,
12
9
  XALIA_APP_HEADER,
10
+ MessageToolCall,
11
+ Choice,
12
+ Completion,
13
+ MessageParam,
14
+ ToolDescriptor,
13
15
  } from "./llm";
14
16
 
17
+ import {
18
+ Reasoning,
19
+ ChatCompletionChunkChoiceDeltaWithReasoning,
20
+ choiceDeltaExtractReasoning,
21
+ } from "./openAI";
22
+
15
23
  const logger = getLogger();
16
24
 
17
25
  function initialToolCallFunction(
18
26
  deltaFn:
19
27
  | OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall.Function
20
28
  | undefined
21
- ): OpenAI.Chat.Completions.ChatCompletionMessageToolCall.Function {
29
+ ): OpenAI.Chat.Completions.ChatCompletionMessageFunctionToolCall.Function {
22
30
  // export interface ChatCompletionChunk.Choice.Delta.ToolCall.Function {
23
31
  // arguments?: string;
24
32
  // name?: string;
@@ -38,7 +46,7 @@ function initialToolCallFunction(
38
46
  }
39
47
 
40
48
  function updateToolCallFunction(
41
- existingFn: OpenAI.Chat.Completions.ChatCompletionMessageToolCall.Function,
49
+ existingFn: OpenAI.Chat.Completions.ChatCompletionMessageFunctionToolCall.Function, // eslint-disable-line
42
50
  deltaFn: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall.Function // eslint-disable-line
43
51
  ) {
44
52
  // export interface ChatCompletionChunk.Choice.Delta.ToolCall.Function {
@@ -65,7 +73,7 @@ function updateToolCallFunction(
65
73
 
66
74
  function initialToolCall(
67
75
  delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall
68
- ): OpenAI.Chat.Completions.ChatCompletionMessageToolCall {
76
+ ): MessageToolCall {
69
77
  return {
70
78
  id: delta.id || "",
71
79
  function: initialToolCallFunction(delta.function),
@@ -74,7 +82,7 @@ function initialToolCall(
74
82
  }
75
83
 
76
84
  function updateToolCall(
77
- existing: OpenAI.Chat.Completions.ChatCompletionMessageToolCall,
85
+ existing: MessageToolCall,
78
86
  delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall
79
87
  ) {
80
88
  // export interface ChatCompletionChunk.Choice.Delta.ToolCall {
@@ -110,11 +118,9 @@ function updateToolCall(
110
118
  }
111
119
 
112
120
  function updateToolCalls(
113
- toolCalls:
114
- | OpenAI.Chat.Completions.ChatCompletionMessageToolCall[]
115
- | undefined,
121
+ toolCalls: MessageToolCall[] | undefined,
116
122
  deltaToolCall: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta.ToolCall // eslint-disable-line
117
- ): OpenAI.Chat.Completions.ChatCompletionMessageToolCall[] {
123
+ ): MessageToolCall[] {
118
124
  // export interface ChatCompletionChunk.Choice.Delta.ToolCall {
119
125
  // index: number;
120
126
  // id?: string;
@@ -149,7 +155,7 @@ function updateToolCalls(
149
155
 
150
156
  function initializeCompletionMessage(
151
157
  delta: OpenAI.Chat.Completions.ChatCompletionChunk.Choice.Delta
152
- ): OpenAI.Chat.Completions.ChatCompletionMessage {
158
+ ): Message {
153
159
  assert(delta.role === undefined || delta.role == "assistant");
154
160
  // eslint-disable-next-line @typescript-eslint/no-deprecated
155
161
  assert(!delta.function_call);
@@ -174,9 +180,7 @@ function initializeCompletionMessage(
174
180
  // tool_calls?: Array<ChatCompletionMessageToolCall>;
175
181
  // }
176
182
 
177
- let toolCalls:
178
- | OpenAI.Chat.Completions.ChatCompletionMessageToolCall[]
179
- | undefined = undefined;
183
+ let toolCalls: MessageToolCall[] | undefined = undefined;
180
184
  if (delta.tool_calls) {
181
185
  for (const t of delta.tool_calls) {
182
186
  toolCalls = updateToolCalls(toolCalls, t);
@@ -194,10 +198,7 @@ function initializeCompletionMessage(
194
198
  };
195
199
  }
196
200
 
197
- function updateReasoning(
198
- message: ChatCompletionMessageWithReasoning,
199
- reasoning: string
200
- ) {
201
+ function updateReasoning(message: Message, reasoning: string) {
201
202
  if (!message.reasoning) {
202
203
  message.reasoning = reasoning;
203
204
  } else {
@@ -206,7 +207,7 @@ function updateReasoning(
206
207
  }
207
208
 
208
209
  function updateCompletionMessage(
209
- message: ChatCompletionMessageWithReasoning,
210
+ message: Message,
210
211
  delta: ChatCompletionChunkChoiceDeltaWithReasoning
211
212
  ) {
212
213
  // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
@@ -280,7 +281,7 @@ function updateCompletionMessage(
280
281
 
281
282
  function initializeCompletionChoice(
282
283
  chunkChoice: OpenAI.Chat.Completions.ChatCompletionChunk.Choice
283
- ): { choice: OpenAI.Chat.Completions.ChatCompletion.Choice; done: boolean } {
284
+ ): { choice: Choice; done: boolean } {
284
285
  // export interface ChatCompletionChunk.Choice {
285
286
  // delta: Choice.Delta;
286
287
  // finish_reason:
@@ -313,7 +314,7 @@ function initializeCompletionChoice(
313
314
  }
314
315
 
315
316
  function updateCompletionChoice(
316
- completionChoice: OpenAI.Chat.Completions.ChatCompletion.Choice,
317
+ completionChoice: Choice,
317
318
  chunkChoice: OpenAI.Chat.Completions.ChatCompletionChunk.Choice
318
319
  ): boolean {
319
320
  // export interface ChatCompletionChunk.Choice {
@@ -352,7 +353,7 @@ function updateCompletionChoice(
352
353
 
353
354
  function initializeCompletionChoices(
354
355
  chunkChoices: OpenAI.Chat.Completions.ChatCompletionChunk.Choice[]
355
- ): { choices: OpenAI.Chat.Completions.ChatCompletion.Choice[]; done: boolean } {
356
+ ): { choices: Choice[]; done: boolean } {
356
357
  // Technically, one choice could be done and the other still have some
357
358
  // content to stream. We keep it simple for now and allow zero or one
358
359
  // choice per chunk, which allows us to mark everything as done if any
@@ -361,7 +362,7 @@ function initializeCompletionChoices(
361
362
  assert(chunkChoices.length < 2);
362
363
 
363
364
  let msgDone = false;
364
- const choices: OpenAI.Chat.Completions.ChatCompletion.Choice[] = [];
365
+ const choices: Choice[] = [];
365
366
  for (const chunkChoice of chunkChoices) {
366
367
  const { choice, done } = initializeCompletionChoice(chunkChoice);
367
368
  if (done) {
@@ -374,7 +375,7 @@ function initializeCompletionChoices(
374
375
  }
375
376
 
376
377
  function updateCompletionChoices(
377
- completionChoices: OpenAI.Chat.Completions.ChatCompletion.Choice[],
378
+ completionChoices: Choice[],
378
379
  chunkChoices: OpenAI.Chat.Completions.ChatCompletionChunk.Choice[]
379
380
  ): boolean {
380
381
  // Technically, one choice could be done and the other still have some
@@ -402,7 +403,7 @@ function updateCompletionChoices(
402
403
 
403
404
  export function initializeCompletion(
404
405
  chunk: OpenAI.Chat.Completions.ChatCompletionChunk
405
- ): { initMessage: OpenAI.Chat.Completions.ChatCompletion; done: boolean } {
406
+ ): { initMessage: Completion; done: boolean } {
406
407
  // export interface ChatCompletionChunk {
407
408
  // id: string;
408
409
  // choices: Array<ChatCompletionChunk.Choice>;
@@ -436,6 +437,7 @@ export function initializeCompletion(
436
437
  model: chunk.model,
437
438
  object: "chat.completion",
438
439
  service_tier: chunk.service_tier,
440
+ // eslint-disable-next-line @typescript-eslint/no-deprecated
439
441
  system_fingerprint: chunk.system_fingerprint,
440
442
  usage: chunk.usage ?? undefined,
441
443
  },
@@ -444,7 +446,7 @@ export function initializeCompletion(
444
446
  }
445
447
 
446
448
  export function updateCompletion(
447
- completion: OpenAI.Chat.Completions.ChatCompletion,
449
+ completion: Completion,
448
450
  chunk: OpenAI.Chat.Completions.ChatCompletionChunk
449
451
  ): boolean {
450
452
  // export interface ChatCompletionChunk {
@@ -474,7 +476,9 @@ export function updateCompletion(
474
476
  assert(completion.id === chunk.id);
475
477
  assert(completion.model === chunk.model);
476
478
  completion.service_tier = completion.service_tier || chunk.service_tier;
479
+ // eslint-disable-next-line @typescript-eslint/no-deprecated
477
480
  completion.system_fingerprint =
481
+ // eslint-disable-next-line @typescript-eslint/no-deprecated
478
482
  completion.system_fingerprint || chunk.system_fingerprint;
479
483
  completion.usage = completion.usage || chunk.usage || undefined;
480
484
 
@@ -508,11 +512,11 @@ export class OpenAILLMStreaming implements ILLM {
508
512
  }
509
513
 
510
514
  public async getConversationResponse(
511
- messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[],
512
- tools?: OpenAI.Chat.Completions.ChatCompletionTool[],
515
+ messages: MessageParam[],
516
+ tools?: ToolDescriptor[],
513
517
  onMessage?: (msg: string, end: boolean) => Promise<void>,
514
518
  onReasoning?: (reasoning: string) => Promise<void>
515
- ): Promise<OpenAI.Chat.Completions.ChatCompletion> {
519
+ ): Promise<Completion> {
516
520
  const reasoning: Reasoning = {
517
521
  effort: "medium",
518
522
  enabled: true,
@@ -533,7 +537,7 @@ export class OpenAILLMStreaming implements ILLM {
533
537
  throw new Error("not a stream");
534
538
  }
535
539
 
536
- let aggregatedMessage: OpenAI.Chat.Completions.ChatCompletion | undefined;
540
+ let aggregatedMessage: Completion | undefined;
537
541
 
538
542
  for await (const chunk of chunks) {
539
543
  logger.debug(`[stream] chunk: ${JSON.stringify(chunk)}`);
@@ -1,5 +1,4 @@
1
- import { ILLM } from "./llm";
2
- import { OpenAI } from "openai";
1
+ import { Choice, Completion, ILLM, MessageParam, ToolDescriptor } from "./llm";
3
2
  import { strict as assert } from "assert";
4
3
 
5
4
  export class RepeatLLM implements ILLM {
@@ -14,14 +13,14 @@ export class RepeatLLM implements ILLM {
14
13
  }
15
14
 
16
15
  public async getConversationResponse(
17
- _messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[],
18
- _tools?: OpenAI.Chat.Completions.ChatCompletionTool[],
16
+ _messages: MessageParam[],
17
+ _tools?: ToolDescriptor[],
19
18
  onMessage?: (msg: string, msgEnd: boolean) => Promise<void>
20
- ): Promise<OpenAI.Chat.Completions.ChatCompletion> {
19
+ ): Promise<Completion> {
21
20
  await new Promise((r) => setTimeout(r, 1000));
22
21
 
23
22
  const content = `Message number ${String(this.idx++)}`;
24
- const response: OpenAI.Chat.Completions.ChatCompletion.Choice = {
23
+ const response: Choice = {
25
24
  finish_reason: "stop",
26
25
  index: 0,
27
26
  logprobs: null,
@@ -15,6 +15,11 @@ import {
15
15
  } from "@xalia/xmcp/sdk";
16
16
  import { Client as McpClient } from "@modelcontextprotocol/sdk/client/index.js";
17
17
 
18
+ const DEVELOPMENT: boolean = process.env.DEVELOPMENT === "1";
19
+
20
+ const DEV_MCP_SERVER_URL: string =
21
+ process.env.DEV_MCP_SERVER_URL || "http://localhost:8000/mcp";
22
+
18
23
  const logger = getLogger();
19
24
 
20
25
  export const LOCAL_SERVER_URL: string = "http://localhost:5001";
@@ -66,7 +71,7 @@ export class SkillManager extends McpServerManager implements ISkillManager {
66
71
  private constructor(
67
72
  private apiClient: ApiClient,
68
73
  private serverBriefs: SanitizedServerBrief[],
69
- private serverBriefsMap: { [serverName: string]: SanitizedServerBrief },
74
+ private serverBriefsMap: Record<string, SanitizedServerBrief>,
70
75
  private toolCache: Map<string, Tool[]>,
71
76
  private openUrl: (
72
77
  url: string,
@@ -101,6 +106,23 @@ export class SkillManager extends McpServerManager implements ISkillManager {
101
106
  // Fetch server list
102
107
  const servers = await apiClient.listServers();
103
108
 
109
+ if (DEVELOPMENT) {
110
+ servers.push(
111
+ new McpServerBrief(
112
+ "local_dev",
113
+ "Local Dev Server",
114
+ "A local mcp server using streamable-HTTP for development use. " +
115
+ `URL: ${DEV_MCP_SERVER_URL}`,
116
+ false,
117
+ {},
118
+ "",
119
+ null,
120
+ null,
121
+ DEV_MCP_SERVER_URL
122
+ )
123
+ );
124
+ }
125
+
104
126
  const [mcpServers, mcpServersMap] = buildServersList(servers);
105
127
  return new SkillManager(
106
128
  apiClient,
@@ -206,22 +228,32 @@ export class SkillManager extends McpServerManager implements ISkillManager {
206
228
  serverName: string,
207
229
  enableAll: boolean
208
230
  ): Promise<void> {
209
- const tools = await this.getServerTools(serverName);
210
- const originalName = this.serverBriefsMap[serverName].originalName;
211
- const mcpserver = await this.apiClient.getDetails(originalName, "run");
212
- const client = new McpClient({
213
- name: "@xalia/agent",
214
- version: "1.0.0",
215
- });
216
- await connectServer(
217
- client,
218
- this.apiClient,
219
- mcpserver,
220
- this.openUrl,
221
- this.authorized_url
222
- );
231
+ const brief = this.serverBriefsMap[serverName];
232
+ if (brief.url_override) {
233
+ // Assume no api key for now
234
+ await this.addMcpServerWithStreamableHTTPUrl(
235
+ serverName,
236
+ brief.url_override
237
+ );
238
+ } else {
239
+ const tools = await this.getServerTools(serverName);
240
+ const originalName = this.serverBriefsMap[serverName].originalName;
241
+ const mcpserver = await this.apiClient.getDetails(originalName, "run");
242
+ const client = new McpClient({
243
+ name: "@xalia/agent",
244
+ version: "1.0.0",
245
+ });
246
+ await connectServer(
247
+ client,
248
+ this.apiClient,
249
+ mcpserver,
250
+ this.openUrl,
251
+ this.authorized_url
252
+ );
253
+
254
+ await this.addMcpServerWithClient(client, serverName, tools);
255
+ }
223
256
 
224
- await this.addMcpServerWithClient(client, serverName, tools);
225
257
  if (enableAll) {
226
258
  this.enableAllTools(serverName);
227
259
  }
@@ -1,6 +1,6 @@
1
- import OpenAI from "openai";
1
+ import { MessageToolCall, ToolDescriptor } from "./llm";
2
2
 
3
- export const temperatureTool: OpenAI.ChatCompletionTool = {
3
+ export const temperatureTool: ToolDescriptor = {
4
4
  type: "function",
5
5
  function: {
6
6
  name: "getCurrentTemperature",
@@ -51,9 +51,7 @@ export const toolCallbacks: {
51
51
  },
52
52
  };
53
53
 
54
- export function displayToolCall(
55
- toolCall: OpenAI.ChatCompletionMessageToolCall
56
- ): void {
54
+ export function displayToolCall(toolCall: MessageToolCall): void {
57
55
  console.log(`AGENT: Tool Call: ${toolCall.function.name}`);
58
56
  console.log(` Args: ${toolCall.function.arguments}`);
59
57
  }
@@ -518,7 +518,8 @@ export class ChatClient implements ITeamManager {
518
518
  createNewAgent(
519
519
  agentName: string,
520
520
  templateName?: string,
521
- teamUuid?: string
521
+ teamUuid?: string,
522
+ model?: string
522
523
  ): void {
523
524
  if (this.closed) {
524
525
  throw new Error("ChatClient is closed");
@@ -529,6 +530,7 @@ export class ChatClient implements ITeamManager {
529
530
  title: agentName,
530
531
  template_name: templateName,
531
532
  team_uuid: teamUuid,
533
+ model,
532
534
  });
533
535
  } catch (error) {
534
536
  this.eventHandler.onError(String(error));
@@ -1,4 +1,4 @@
1
- import { Tool } from "@modelcontextprotocol/sdk/types.js";
1
+ import { Tool, Resource } from "@modelcontextprotocol/sdk/types.js";
2
2
  import { strict as assert } from "assert";
3
3
  import { v4 as uuidv4 } from "uuid";
4
4
 
@@ -10,9 +10,13 @@ import {
10
10
  } from "@xalia/xmcp/sdk";
11
11
 
12
12
  import { ISkillManager } from "../../agent/sudoMcpServerManager";
13
- import { McpServerInfo, McpServerInfoRW } from "../../agent/mcpServerManager";
13
+ import {
14
+ McpServerInfo,
15
+ McpServerInfoRW,
16
+ ResourceContent,
17
+ } from "../../agent/mcpServerManager";
14
18
  import { IConversation } from "../../agent/agent";
15
- import { ChatCompletionMessageParam } from "../../agent/llm";
19
+ import { MessageParam } from "../../agent/llm";
16
20
 
17
21
  import {
18
22
  ClientSessionMessage,
@@ -135,14 +139,15 @@ class RemoteSudoMcpServerManager implements ISkillManager {
135
139
  onMcpServerAdded(
136
140
  mcpServerName: string,
137
141
  tools: Tool[],
138
- enabled_tools: string[]
142
+ enabled_tools: string[],
143
+ resources: Resource[]
139
144
  ) {
140
145
  logger.debug(
141
146
  `[onMcpServerAdded]: ${mcpServerName}, tools: ${JSON.stringify(tools)}` +
142
147
  `, enabled: ${JSON.stringify(enabled_tools)}`
143
148
  );
144
149
 
145
- const mcpServerInfo = new McpServerInfoRW(tools);
150
+ const mcpServerInfo = new McpServerInfoRW(mcpServerName, tools, resources);
146
151
  for (const tool of enabled_tools) {
147
152
  mcpServerInfo.enableTool(tool);
148
153
  }
@@ -258,7 +263,7 @@ export class SessionClient implements ISessionMessageSender, IConversation {
258
263
  return this.smsm;
259
264
  }
260
265
 
261
- public getConversation(): ChatCompletionMessageParam[] {
266
+ public getConversation(): MessageParam[] {
262
267
  throw new Error("unimpl: getConversation");
263
268
  }
264
269
 
@@ -304,6 +309,37 @@ export class SessionClient implements ISessionMessageSender, IConversation {
304
309
  });
305
310
  }
306
311
 
312
+ addMcpServerFromUrl(server_name: string, url: string) {
313
+ this.sendSessionMessage({
314
+ type: "add_mcp_server_from_url",
315
+ server_name,
316
+ url,
317
+ });
318
+ }
319
+
320
+ async getMcpResource(
321
+ server_name: string,
322
+ uri: string
323
+ ): Promise<ResourceContent[]> {
324
+ const client_message_id = uuidv4();
325
+ const resourceP = this.responseHandler.waitForResponse(client_message_id);
326
+ this.sender.send({
327
+ type: "get_mcp_resource",
328
+ session_id: this.sessionUUID,
329
+ client_message_id,
330
+ server_name,
331
+ uri,
332
+ });
333
+
334
+ const resourceMsg = await resourceP;
335
+ if (resourceMsg.type !== "mcp_resource") {
336
+ throw new Error(
337
+ `unexpected response to resource req: ${JSON.stringify(resourceMsg)}`
338
+ );
339
+ }
340
+ return resourceMsg.contents;
341
+ }
342
+
307
343
  userMessage(msg?: string, imageB64?: string): void {
308
344
  assert(msg || imageB64, "Either message or image must be provided");
309
345
 
@@ -450,7 +486,8 @@ export class SessionClient implements ISessionMessageSender, IConversation {
450
486
  this.smsm.onMcpServerAdded(
451
487
  message.server_name,
452
488
  message.tools,
453
- message.enabled_tools
489
+ message.enabled_tools,
490
+ message.resources
454
491
  );
455
492
  break;
456
493
  case "mcp_server_removed":
@@ -482,6 +519,9 @@ export class SessionClient implements ISessionMessageSender, IConversation {
482
519
  case "user_removed":
483
520
  this.participants.delete(message.user_uuid);
484
521
  break;
522
+
523
+ // Responses to requests which can be awaited
524
+ case "mcp_resource":
485
525
  case "session_shared":
486
526
  this.responseHandler.onMessage(message);
487
527
  break;