@xalia/agent 0.5.3 → 0.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/dist/agent/src/agent/agent.js +16 -9
  2. package/dist/agent/src/agent/agentUtils.js +24 -4
  3. package/dist/agent/src/agent/mcpServerManager.js +19 -9
  4. package/dist/agent/src/agent/openAILLM.js +3 -1
  5. package/dist/agent/src/agent/openAILLMStreaming.js +24 -25
  6. package/dist/agent/src/agent/repeatLLM.js +43 -0
  7. package/dist/agent/src/agent/sudoMcpServerManager.js +12 -6
  8. package/dist/agent/src/chat/client.js +259 -36
  9. package/dist/agent/src/chat/conversationManager.js +243 -24
  10. package/dist/agent/src/chat/db.js +24 -1
  11. package/dist/agent/src/chat/frontendClient.js +74 -0
  12. package/dist/agent/src/chat/server.js +3 -3
  13. package/dist/agent/src/test/db.test.js +25 -2
  14. package/dist/agent/src/test/openaiStreaming.test.js +133 -0
  15. package/dist/agent/src/test/prompt.test.js +2 -2
  16. package/dist/agent/src/test/sudoMcpServerManager.test.js +1 -1
  17. package/dist/agent/src/tool/agentChat.js +7 -197
  18. package/dist/agent/src/tool/chatMain.js +18 -23
  19. package/dist/agent/src/tool/commandPrompt.js +248 -0
  20. package/dist/agent/src/tool/prompt.js +27 -31
  21. package/package.json +1 -1
  22. package/scripts/test_chat +17 -1
  23. package/src/agent/agent.ts +34 -11
  24. package/src/agent/agentUtils.ts +52 -3
  25. package/src/agent/mcpServerManager.ts +43 -13
  26. package/src/agent/openAILLM.ts +3 -1
  27. package/src/agent/openAILLMStreaming.ts +28 -27
  28. package/src/agent/repeatLLM.ts +51 -0
  29. package/src/agent/sudoMcpServerManager.ts +41 -12
  30. package/src/chat/client.ts +353 -40
  31. package/src/chat/conversationManager.ts +345 -33
  32. package/src/chat/db.ts +28 -2
  33. package/src/chat/frontendClient.ts +123 -0
  34. package/src/chat/messages.ts +146 -2
  35. package/src/chat/server.ts +3 -3
  36. package/src/test/db.test.ts +35 -2
  37. package/src/test/openaiStreaming.test.ts +142 -0
  38. package/src/test/prompt.test.ts +1 -1
  39. package/src/test/sudoMcpServerManager.test.ts +1 -1
  40. package/src/tool/agentChat.ts +13 -211
  41. package/src/tool/chatMain.ts +28 -43
  42. package/src/tool/commandPrompt.ts +252 -0
  43. package/src/tool/prompt.ts +33 -32
@@ -54,17 +54,31 @@ export class McpServerInfo {
54
54
  return this.tools;
55
55
  }
56
56
 
57
- public getTool(toolName: string): Tool {
57
+ public getTool(toolName: string): Tool | undefined {
58
58
  return this.toolsMap[toolName];
59
59
  }
60
60
  }
61
61
 
62
+ /**
63
+ * Instance of McpServerInfo which supports setting tool state. Intended for
64
+ * IMcpServerManager implementations, not for client code.
65
+ */
66
+ export class McpServerInfoRW extends McpServerInfo {
67
+ public enableTool(toolName: string) {
68
+ this.enabledToolsMap[toolName] = true;
69
+ }
70
+
71
+ public disableTool(toolName: string) {
72
+ delete this.enabledToolsMap[toolName];
73
+ }
74
+ }
75
+
62
76
  /**
63
77
  * The internal class holds server info and allows it to be updated. Managed
64
78
  * by McpServerManager. Do not access these methods except via the
65
79
  * McpServerManager.
66
80
  */
67
- class McpServerInfoInternal extends McpServerInfo {
81
+ class McpServerInfoInternal extends McpServerInfoRW {
68
82
  private readonly client: McpClient;
69
83
  private readonly callbacks: McpCallbacks;
70
84
 
@@ -114,25 +128,37 @@ class McpServerInfoInternal extends McpServerInfo {
114
128
  await this.client.close();
115
129
  }
116
130
 
117
- public enableTool(toolName: string) {
118
- this.enabledToolsMap[toolName] = true;
119
- }
120
-
121
- public disableTool(toolName: string) {
122
- delete this.enabledToolsMap[toolName];
123
- }
124
-
125
131
  public getCallback(toolName: string) {
126
132
  return this.callbacks[toolName];
127
133
  }
128
134
  }
129
135
 
136
+ /**
137
+ * The client's interface to a manager which has mcp servers assigned to it,
138
+ * and can then query them for their tools, enable/disbale spercific tools and
139
+ * remove servers completely.
140
+ *
141
+ * This interface says nothing about communication with a specific agent. It
142
+ * only defines the client-facing interactions.
143
+ */
144
+ export interface IMcpServerManager {
145
+ hasMcpServer(mcpServerName: string): boolean;
146
+ getMcpServerNames(): string[];
147
+ getMcpServer(mcpServerName: string): McpServerInfo;
148
+
149
+ removeMcpServer(mcpServerName: string): Promise<void>;
150
+ enableAllTools(mcpServerName: string): void;
151
+ disableAllTools(mcpServerName: string): void;
152
+ enableTool(mcpServerName: string, toolName: string): void;
153
+ disableTool(mcpServerName: string, toolName: string): void;
154
+ }
155
+
130
156
  /**
131
157
  * Manage a set of MCP servers, where the tools for each server have an
132
158
  * 'enabled' flag. Tools are disabled by default. The set of enabled tools
133
159
  * over all servers is exposed as a single list of OpenAI functions.
134
160
  */
135
- export class McpServerManager {
161
+ export class McpServerManager implements IMcpServerManager {
136
162
  private mcpServers: { [serverName: string]: McpServerInfoInternal } = {};
137
163
  private enabledToolsDirty: boolean = true;
138
164
  private enabledOpenAITools: ChatCompletionTool[] = [];
@@ -148,6 +174,10 @@ export class McpServerManager {
148
174
  this.mcpServers = {};
149
175
  }
150
176
 
177
+ public hasMcpServer(mcpServerName: string): boolean {
178
+ return !!this.mcpServers[mcpServerName];
179
+ }
180
+
151
181
  public getMcpServerNames(): string[] {
152
182
  return Object.keys(this.mcpServers);
153
183
  }
@@ -208,7 +238,7 @@ export class McpServerManager {
208
238
  }
209
239
  }
210
240
 
211
- public async removeMcpServer(mcpServerName: string) {
241
+ public async removeMcpServer(mcpServerName: string): Promise<void> {
212
242
  const server = this.getMcpServerInternal(mcpServerName);
213
243
  delete this.mcpServers[mcpServerName];
214
244
  await server.shutdown();
@@ -287,7 +317,7 @@ export class McpServerManager {
287
317
  public getMcpServerSettings(): McpServerSettings {
288
318
  const config: McpServerSettings = {};
289
319
 
290
- // NOTE: on load, entires of the form:
320
+ // NOTE: on load, entries of the form:
291
321
  //
292
322
  // <server>: []
293
323
  //
@@ -1,3 +1,4 @@
1
+ import { DEFAULT_LLM_MODEL, XALIA_APP_HEADER } from "./agentUtils";
1
2
  import { ILLM } from "./llm";
2
3
  import { OpenAI } from "openai";
3
4
 
@@ -14,8 +15,9 @@ export class OpenAILLM implements ILLM {
14
15
  apiKey,
15
16
  baseURL: apiUrl,
16
17
  dangerouslyAllowBrowser: true,
18
+ defaultHeaders: XALIA_APP_HEADER,
17
19
  });
18
- this.model = model || "gpt-4o-mini";
20
+ this.model = model || DEFAULT_LLM_MODEL;
19
21
  }
20
22
 
21
23
  public setModel(model: string) {
@@ -2,6 +2,7 @@ import { getLogger } from "@xalia/xmcp/sdk";
2
2
  import { ILLM } from "./llm";
3
3
  import { OpenAI } from "openai";
4
4
  import { strict as assert } from "assert";
5
+ import { DEFAULT_LLM_MODEL, XALIA_APP_HEADER } from "./agentUtils";
5
6
 
6
7
  const logger = getLogger();
7
8
 
@@ -265,7 +266,8 @@ function initializeCompletionChoice(
265
266
  return {
266
267
  choice: {
267
268
  message,
268
- finish_reason: chunkChoice.finish_reason || "stop",
269
+ // We use `null` to signal that `finish_reason` is unset
270
+ finish_reason: chunkChoice.finish_reason || (null as unknown as "stop"),
269
271
  index: chunkChoice.index,
270
272
  logprobs: chunkChoice.logprobs || null,
271
273
  },
@@ -300,6 +302,10 @@ function updateCompletionChoice(
300
302
  assert(completionChoice.index === chunkChoice.index);
301
303
  updateCompletionMessage(completionChoice.message, chunkChoice.delta);
302
304
  if (chunkChoice.finish_reason) {
305
+ assert(
306
+ completionChoice.finish_reason === null,
307
+ `finish_reason already set: (${completionChoice.finish_reason})`
308
+ );
303
309
  completionChoice.finish_reason = chunkChoice.finish_reason;
304
310
  return true;
305
311
  }
@@ -313,7 +319,7 @@ function initializeCompletionChoices(
313
319
  // content to stream. We keep it simple for now and assume only single
314
320
  // choices, which allows us to mark everything as done if any choice we hit is
315
321
  // done.
316
- assert(chunkChoices.length === 1);
322
+ assert(chunkChoices.length < 2);
317
323
 
318
324
  let msgDone = false;
319
325
  const choices: OpenAI.Chat.Completions.ChatCompletion.Choice[] = [];
@@ -354,7 +360,7 @@ function updateCompletionChoices(
354
360
  return msgDone;
355
361
  }
356
362
 
357
- function initializeCompletion(
363
+ export function initializeCompletion(
358
364
  chunk: OpenAI.Chat.Completions.ChatCompletionChunk
359
365
  ): { initMessage: OpenAI.Chat.Completions.ChatCompletion; done: boolean } {
360
366
  // export interface ChatCompletionChunk {
@@ -397,7 +403,7 @@ function initializeCompletion(
397
403
  };
398
404
  }
399
405
 
400
- function updateCompletion(
406
+ export function updateCompletion(
401
407
  completion: OpenAI.Chat.Completions.ChatCompletion,
402
408
  chunk: OpenAI.Chat.Completions.ChatCompletionChunk
403
409
  ): boolean {
@@ -448,8 +454,9 @@ export class OpenAILLMStreaming implements ILLM {
448
454
  apiKey,
449
455
  baseURL: apiUrl,
450
456
  dangerouslyAllowBrowser: true,
457
+ defaultHeaders: XALIA_APP_HEADER,
451
458
  });
452
- this.model = model || "gpt-4o-mini";
459
+ this.model = model || DEFAULT_LLM_MODEL;
453
460
  }
454
461
 
455
462
  public setModel(model: string) {
@@ -482,46 +489,40 @@ export class OpenAILLMStreaming implements ILLM {
482
489
  }
483
490
 
484
491
  let aggregatedMessage: OpenAI.Chat.Completions.ChatCompletion | undefined;
485
- let done = false;
486
-
487
492
  for await (const chunk of chunks) {
488
493
  logger.debug(`[stream] chunk: ${JSON.stringify(chunk)}`);
489
- assert(!done);
490
494
 
491
495
  if (chunk.object !== "chat.completion.chunk") {
492
496
  // logger.warn("[stream]: unexpected message");
493
497
  continue;
494
498
  }
495
499
 
496
- done = (() => {
497
- if (!aggregatedMessage) {
498
- logger.debug(`[stream] first}`);
499
- const { initMessage, done } = initializeCompletion(chunk);
500
- aggregatedMessage = initMessage;
501
- return done;
502
- } else {
503
- return updateCompletion(aggregatedMessage, chunk);
504
- }
505
- })();
500
+ if (!aggregatedMessage) {
501
+ logger.debug(`[stream] first}`);
502
+ const { initMessage } = initializeCompletion(chunk);
503
+ aggregatedMessage = initMessage;
504
+ } else {
505
+ updateCompletion(aggregatedMessage, chunk);
506
+ }
506
507
 
507
508
  if (onMessage) {
508
- // Inform the call of a message fragment. Note that even if there is
509
- // no content, we must call `onMessage` once `done` is true.
510
-
511
- const delta = chunk.choices[0].delta;
512
- if (delta.content) {
513
- await onMessage(delta.content, done);
514
- } else if (done) {
515
- await onMessage("", true);
509
+ // Inform the call of a message fragment if it contains any text.
510
+
511
+ const delta = chunk.choices[0]?.delta;
512
+ if (delta?.content) {
513
+ await onMessage(delta.content, false);
516
514
  }
517
515
  }
518
516
  }
519
517
 
518
+ if (onMessage) {
519
+ await onMessage("", true);
520
+ }
521
+
520
522
  logger.debug(
521
523
  `[stream] final message: ${JSON.stringify(aggregatedMessage)}`
522
524
  );
523
525
 
524
- assert(done);
525
526
  assert(aggregatedMessage);
526
527
  return aggregatedMessage;
527
528
  }
@@ -0,0 +1,51 @@
1
+ import { ILLM } from "./llm";
2
+ import { OpenAI } from "openai";
3
+ import { strict as assert } from "assert";
4
+
5
+ export class RepeatLLM implements ILLM {
6
+ private idx: number = 0;
7
+
8
+ public getModel(): string {
9
+ return "repeat";
10
+ }
11
+
12
+ public getUrl(): string {
13
+ throw "cannot get url for RepeatLLM";
14
+ }
15
+
16
+ public async getConversationResponse(
17
+ _messages: OpenAI.Chat.Completions.ChatCompletionMessageParam[],
18
+ _tools?: OpenAI.Chat.Completions.ChatCompletionTool[],
19
+ onMessage?: (msg: string, msgEnd: boolean) => Promise<void>
20
+ ): Promise<OpenAI.Chat.Completions.ChatCompletion> {
21
+ await new Promise((r) => setTimeout(r, 0));
22
+
23
+ const content = `Message number ${this.idx++}`;
24
+ const response: OpenAI.Chat.Completions.ChatCompletion.Choice = {
25
+ finish_reason: "stop",
26
+ index: 0,
27
+ logprobs: null,
28
+ message: {
29
+ content,
30
+ refusal: null,
31
+ role: "assistant",
32
+ },
33
+ };
34
+
35
+ if (onMessage) {
36
+ onMessage(content, true);
37
+ }
38
+
39
+ return {
40
+ id: "" + this.idx,
41
+ choices: [response],
42
+ created: Date.now(),
43
+ model: "dummyLlmModel",
44
+ object: "chat.completion",
45
+ };
46
+ }
47
+
48
+ public setModel(_model: string): void {
49
+ assert(false, "unexpected call to setModel");
50
+ }
51
+ }
@@ -1,5 +1,9 @@
1
1
  import { Tool } from "@modelcontextprotocol/sdk/types.js";
2
- import { McpServerManager, McpServerSettings } from "./mcpServerManager";
2
+ import {
3
+ IMcpServerManager,
4
+ McpServerManager,
5
+ McpServerSettings,
6
+ } from "./mcpServerManager";
3
7
  import {
4
8
  ApiClient,
5
9
  McpServerBrief,
@@ -35,11 +39,29 @@ class SanitizedServerBrief extends McpServerBrief {
35
39
  }
36
40
  }
37
41
 
42
+ /**
43
+ * A manager which can expose a set of mcp servers and set them to an
44
+ * McpServerManager.
45
+ */
46
+ export interface ISkillManager {
47
+ getMcpServerManager(): IMcpServerManager;
48
+ getServerBriefs(): McpServerBrief[];
49
+
50
+ /**
51
+ * In some cases (requests to a remote SkillManager), the promise does not
52
+ * correspond the server being added. Hence the caller cannot be sure when
53
+ * it is safe to run the enable-all logic. `enableAll` solves this by having
54
+ * the SkillManager (local or remote) run the enableAll logic as the server
55
+ * is enabled.
56
+ */
57
+ addMcpServer(serverName: string, enableAll: boolean): Promise<void>;
58
+ }
59
+
38
60
  /**
39
61
  * Manages access to the catalogue of servers hosted by sudomcp. Supports
40
62
  * adding these servers to McpServerManager.
41
63
  */
42
- export class SkillManager {
64
+ export class SkillManager implements ISkillManager {
43
65
  private constructor(
44
66
  private mcpServerManager: McpServerManager,
45
67
  private apiClient: ApiClient,
@@ -109,7 +131,7 @@ export class SkillManager {
109
131
  // Concurrently establish all server connections
110
132
  const addServer = async (serverName: string) => {
111
133
  logger.debug(`restoring ${serverName} ...`);
112
- return this.addMcpServer(serverName);
134
+ return this.addMcpServer(serverName, false);
113
135
  };
114
136
  await Promise.all(Object.entries(mcpConfig).map((e) => addServer(e[0])));
115
137
 
@@ -144,6 +166,10 @@ export class SkillManager {
144
166
  this.toolCache = {};
145
167
  }
146
168
 
169
+ public hasServer(serverName: string): boolean {
170
+ return !!this.serverBriefsMap[serverName];
171
+ }
172
+
147
173
  public getServerBriefs(): McpServerBrief[] {
148
174
  return this.serverBriefs;
149
175
  }
@@ -171,11 +197,13 @@ export class SkillManager {
171
197
  }
172
198
 
173
199
  /**
174
- * Add a server to the `McpServerManager`, using `ApiClient`
175
- * to produce the transport. Validates the server's config
176
- * schema, if applicable.
200
+ * Add a server to the `McpServerManager`, using `ApiClient` to produce the
201
+ * transport. Validates the server's config schema, if applicable.
177
202
  */
178
- public async addMcpServer(serverName: string): Promise<void> {
203
+ public async addMcpServer(
204
+ serverName: string,
205
+ enableAll: boolean
206
+ ): Promise<void> {
179
207
  const tools = await this.getServerTools(serverName);
180
208
  const originalName = this.serverBriefsMap[serverName].originalName;
181
209
  const mcpserver = await this.apiClient.getDetails(originalName, "run");
@@ -190,11 +218,12 @@ export class SkillManager {
190
218
  this.openUrl,
191
219
  this.authorized_url
192
220
  );
193
- await this.mcpServerManager.addMcpServerWithClient(
194
- client,
195
- serverName,
196
- tools
197
- );
221
+
222
+ const msm = this.mcpServerManager;
223
+ await msm.addMcpServerWithClient(client, serverName, tools);
224
+ if (enableAll) {
225
+ msm.enableAllTools(serverName);
226
+ }
198
227
  }
199
228
 
200
229
  public getOriginalName(serverName: string): string {