@xalia/agent 0.6.8 → 0.6.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/.env.development +1 -0
  2. package/dist/agent/src/agent/agent.js +100 -77
  3. package/dist/agent/src/agent/agentUtils.js +21 -16
  4. package/dist/agent/src/agent/compressingContextManager.js +10 -14
  5. package/dist/agent/src/agent/context.js +101 -127
  6. package/dist/agent/src/agent/contextWithWorkspace.js +133 -0
  7. package/dist/agent/src/agent/imageGenLLM.js +0 -6
  8. package/dist/agent/src/agent/imageGenerator.js +2 -10
  9. package/dist/agent/src/agent/openAILLMStreaming.js +5 -2
  10. package/dist/agent/src/agent/sudoMcpServerManager.js +21 -9
  11. package/dist/agent/src/chat/client/chatClient.js +35 -2
  12. package/dist/agent/src/chat/client/connection.js +6 -1
  13. package/dist/agent/src/chat/client/sessionClient.js +0 -7
  14. package/dist/agent/src/chat/data/dbSessionMessages.js +11 -0
  15. package/dist/agent/src/chat/protocol/messages.js +4 -0
  16. package/dist/agent/src/chat/server/chatContextManager.js +149 -139
  17. package/dist/agent/src/chat/server/imageGeneratorTools.js +19 -8
  18. package/dist/agent/src/chat/server/openAIRouterLLM.js +114 -0
  19. package/dist/agent/src/chat/server/openSession.js +57 -58
  20. package/dist/agent/src/chat/server/server.js +6 -2
  21. package/dist/agent/src/chat/server/sessionRegistry.js +65 -6
  22. package/dist/agent/src/chat/server/sessionRegistry.test.js +1 -1
  23. package/dist/agent/src/chat/server/tools.js +52 -17
  24. package/dist/agent/src/test/chatContextManager.test.js +31 -29
  25. package/dist/agent/src/test/clientServerConnection.test.js +1 -2
  26. package/dist/agent/src/test/compressingContextManager.test.js +22 -36
  27. package/dist/agent/src/test/context.test.js +55 -17
  28. package/dist/agent/src/test/contextTestTools.js +87 -0
  29. package/dist/agent/src/tool/chatMain.js +22 -8
  30. package/package.json +1 -1
  31. package/scripts/test_chat +3 -0
  32. package/src/agent/agent.ts +170 -125
  33. package/src/agent/agentUtils.ts +31 -20
  34. package/src/agent/compressingContextManager.ts +13 -44
  35. package/src/agent/context.ts +165 -159
  36. package/src/agent/contextWithWorkspace.ts +162 -0
  37. package/src/agent/imageGenLLM.ts +0 -8
  38. package/src/agent/imageGenerator.ts +3 -18
  39. package/src/agent/openAILLMStreaming.ts +20 -3
  40. package/src/agent/sudoMcpServerManager.ts +41 -20
  41. package/src/chat/client/chatClient.ts +47 -3
  42. package/src/chat/client/connection.ts +11 -1
  43. package/src/chat/client/sessionClient.ts +0 -8
  44. package/src/chat/data/dataModels.ts +6 -0
  45. package/src/chat/data/dbSessionMessages.ts +34 -0
  46. package/src/chat/protocol/messages.ts +35 -8
  47. package/src/chat/server/chatContextManager.ts +210 -197
  48. package/src/chat/server/connectionManager.ts +1 -1
  49. package/src/chat/server/imageGeneratorTools.ts +31 -18
  50. package/src/chat/server/openAIRouterLLM.ts +171 -0
  51. package/src/chat/server/openSession.ts +87 -100
  52. package/src/chat/server/server.ts +6 -2
  53. package/src/chat/server/sessionFileManager.ts +5 -5
  54. package/src/chat/server/sessionRegistry.test.ts +0 -1
  55. package/src/chat/server/sessionRegistry.ts +100 -4
  56. package/src/chat/server/tools.ts +73 -35
  57. package/src/test/agent.test.ts +8 -7
  58. package/src/test/chatContextManager.test.ts +42 -37
  59. package/src/test/clientServerConnection.test.ts +0 -2
  60. package/src/test/compressingContextManager.test.ts +29 -34
  61. package/src/test/context.test.ts +59 -15
  62. package/src/test/contextTestTools.ts +95 -0
  63. package/src/tool/chatMain.ts +26 -12
@@ -0,0 +1,171 @@
1
+ import { OpenAI } from "openai";
2
+
3
+ import { getLogger } from "@xalia/xmcp/sdk";
4
+ import { utils } from "@xalia/xmcp/tool";
5
+
6
+ import { OpenAILLMStreaming } from "../../agent/openAILLMStreaming";
7
+ import {
8
+ ILLM,
9
+ MessageParam,
10
+ ToolDescriptor,
11
+ Completion,
12
+ XALIA_APP_HEADER,
13
+ } from "../../agent/llm";
14
+
15
+ const logger = getLogger();
16
+
17
+ // TODO: May be better to allow the calling code to determine this.
18
+
19
+ const DEFAULT_PROVIDER_URLS: Record<string, string | undefined> = {
20
+ openrouter: "https://openrouter.ai/api/v1",
21
+ openai: "https://api.openai.com/v1",
22
+ anthropic: "https://api.anthropic.com/v1",
23
+ together: "https://api.together.xyz/v1",
24
+ };
25
+
26
+ const DEFAULT_MODEL_MAP: Record<string, string | undefined> = {
27
+ "gpt-4o-mini": "openai",
28
+ "gpt-4o": "openai",
29
+ "openai/gpt-4o-mini": "openrouter",
30
+ "openai/gpt-4o": "openrouter",
31
+ "google/gemini-2.5-flash": "openrouter",
32
+ "google/gemini-2.5-pro": "openrouter",
33
+ "google/gemini-2.5-flash-image-preview": "openrouter",
34
+ "anthropic/claude-3.7-sonnet": "openrouter",
35
+ "anthropic/claude-sonnet-4": "openrouter",
36
+ "anthropic/claude-sonnet-4.5": "openrouter",
37
+ "claude-3-7-sonnet-20250219": "anthropic",
38
+ "arcee-ai/AFM-4.5B-Preview": "together",
39
+ "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free": "together",
40
+ };
41
+
42
+ type CachedMaps = {
43
+ modelToProvider: Record<string, string | undefined>;
44
+ providerUrls: Record<string, string | undefined>;
45
+ providerApiKeys: Record<string, string | undefined>;
46
+ };
47
+
48
+ let cached_maps: CachedMaps | undefined = undefined;
49
+
50
+ // TODO: Move this to the CLI command args?
51
+
52
+ function decodeOrUseDefault(
53
+ stringOrFile: string | undefined,
54
+ defaultValue: Record<string, string | undefined>
55
+ ): Record<string, string | undefined> {
56
+ const value =
57
+ utils.loadContentOrFileOrUndefined<Record<string, string | undefined>>(
58
+ stringOrFile
59
+ );
60
+ if (value) {
61
+ return value;
62
+ }
63
+ return defaultValue;
64
+ }
65
+
66
+ function getMaps(): CachedMaps {
67
+ // global cached_maps;
68
+ if (!cached_maps) {
69
+ const providerApiKeys = utils.loadContentOrFileOrUndefined<
70
+ Record<string, string | undefined>
71
+ >(process.env.LLM_API_KEY_MAP);
72
+ if (!providerApiKeys) {
73
+ throw new Error("LLM_API_KEY_MAP not not given");
74
+ }
75
+ const providerUrls = decodeOrUseDefault(
76
+ process.env.LLM_PROVIDER_URLS,
77
+ DEFAULT_PROVIDER_URLS
78
+ );
79
+ const modelToProvider = decodeOrUseDefault(
80
+ process.env.LLM_MODEL_MAP,
81
+ DEFAULT_MODEL_MAP
82
+ );
83
+ cached_maps = {
84
+ modelToProvider,
85
+ providerUrls,
86
+ providerApiKeys,
87
+ };
88
+ }
89
+
90
+ return cached_maps;
91
+ }
92
+
93
+ export function getOpenAIClientParams(model: string): {
94
+ apiKey: string | undefined;
95
+ baseURL: string;
96
+ } {
97
+ const maps = getMaps();
98
+ const provider = maps.modelToProvider[model];
99
+ if (!provider) {
100
+ throw new Error(`no provider registered for model ${model}`);
101
+ }
102
+ const baseURL = maps.providerUrls[provider];
103
+ if (!baseURL) {
104
+ throw new Error(`no provider url for provider ${provider}`);
105
+ }
106
+ const apiKey = maps.providerApiKeys[provider];
107
+ if (!apiKey) {
108
+ logger.warn(`No apiKey registered for provider ${provider}`);
109
+ }
110
+
111
+ return { apiKey, baseURL };
112
+ }
113
+
114
+ export function getOpenAIClient(model: string): OpenAI {
115
+ const { apiKey, baseURL } = getOpenAIClientParams(model);
116
+ return new OpenAI({
117
+ apiKey,
118
+ baseURL,
119
+ dangerouslyAllowBrowser: true,
120
+ defaultHeaders: XALIA_APP_HEADER,
121
+ });
122
+ }
123
+
124
+ /**
125
+ * An ILLM implementation that accepts maps:
126
+ *
127
+ * model-name => prover-name
128
+ * provider-name => url
129
+ * provider-name => api-key
130
+ *
131
+ * Requests are dynamically routed to the appropriate provider based on the
132
+ * currently active model.
133
+ */
134
+ export class OpenAIRouterLLM implements ILLM {
135
+ private model: string;
136
+ private openai: OpenAI;
137
+
138
+ constructor(model: string) {
139
+ this.model = model;
140
+ this.openai = getOpenAIClient(model);
141
+ }
142
+
143
+ public setModel(model: string) {
144
+ this.model = model;
145
+ this.openai = getOpenAIClient(model);
146
+ }
147
+
148
+ public getModel(): string {
149
+ return this.model;
150
+ }
151
+
152
+ public getUrl(): string {
153
+ return this.openai.baseURL;
154
+ }
155
+
156
+ public async getConversationResponse(
157
+ messages: MessageParam[],
158
+ tools?: ToolDescriptor[],
159
+ onMessage?: (msg: string, end: boolean) => Promise<void>,
160
+ onReasoning?: (reasoning: string) => Promise<void>
161
+ ): Promise<Completion> {
162
+ return OpenAILLMStreaming.makeRequest(
163
+ this.openai,
164
+ this.model,
165
+ messages,
166
+ tools,
167
+ onMessage,
168
+ onReasoning
169
+ );
170
+ }
171
+ }
@@ -4,22 +4,24 @@ import { Tool } from "@modelcontextprotocol/sdk/types.js";
4
4
 
5
5
  import {
6
6
  AgentPreferences,
7
+ AgentProfile,
7
8
  Configuration,
8
9
  getLogger,
9
10
  McpServerSettings,
10
11
  SavedAgentProfile,
11
12
  } from "@xalia/xmcp/sdk";
12
13
 
13
- import { Agent, createUserMessage } from "../../agent/agent";
14
+ import { AgentEx, createUserMessage } from "../../agent/agent";
14
15
  import {
15
16
  AssistantMessageParam,
17
+ ILLM,
16
18
  MessageToolCall,
17
19
  ToolMessageParam,
18
20
  } from "../../agent/llm";
19
21
  import { SkillManager } from "../../agent/sudoMcpServerManager";
20
22
  import { McpServerInfo } from "../../agent/mcpServerManager";
21
23
  import { IAgentEventHandler } from "../../agent/iAgentEventHandler";
22
- import { createAgentWithoutSkills } from "../../agent/agentUtils";
24
+ import { createSpecializedLLM } from "../../agent/agentUtils";
23
25
  import { IPlatform } from "../../agent/iplatform";
24
26
 
25
27
  import type {
@@ -44,7 +46,6 @@ import type {
44
46
  ClientShareSession,
45
47
  ClientSessionMessageBase,
46
48
  ServerAgentPaused,
47
- ClientAddMcpServerFromUrl,
48
49
  ClientGetMcpResource,
49
50
  } from "../protocol/messages";
50
51
  import { AsyncQueue } from "../utils/asyncQueue";
@@ -70,7 +71,11 @@ import {
70
71
  } from "../utils/approvalManager";
71
72
  import { ChatErrorMessage, ChatFatalError } from "../protocol/errors";
72
73
  import { addDefaultChatTools } from "./tools";
73
- import { ChatContextManager, ICheckpointWriter } from "./chatContextManager";
74
+ import {
75
+ ChatContextManager,
76
+ ChatContextTransaction,
77
+ ICheckpointWriter,
78
+ } from "./chatContextManager";
74
79
  import {
75
80
  llmUserMessageToUserMessageData,
76
81
  MESSAGE_INDEX_START_VALUE,
@@ -88,6 +93,7 @@ import { SessionFileEntry } from "../data/dbSessionFileModels";
88
93
  import { ApiKeyManager } from "../data/apiKeyManager";
89
94
  import { DbSessionMessages } from "../data/dbSessionMessages";
90
95
  import { ISessionMessageSender } from "./openSessionMessageSender";
96
+ import { OpenAIRouterLLM } from "./openAIRouterLLM";
91
97
 
92
98
  /**
93
99
  * The model to use when the AgentProfile does not specify one.
@@ -208,13 +214,13 @@ export class ChatSessionAgentEventHandler implements IAgentEventHandler {
208
214
  private readonly sessionUUID: string,
209
215
  private readonly sender: ISessionMessageSender<ServerToClient>,
210
216
  private readonly approvalManager: ToolApprovalManager,
211
- private readonly contextManager: ChatContextManager
217
+ private readonly contextTx: ChatContextTransaction
212
218
  ) {}
213
219
 
214
220
  onCompletion(result: AssistantMessageParam): void {
215
221
  logger.debug(`[OpenSession.onCompletion] : ${JSON.stringify(result)}`);
216
222
  // Nothing to broadcast. Caller will receive this via onAgentMessage.
217
- this.contextManager.processAgentResponse(result);
223
+ this.contextTx.processAgentResponse(result);
218
224
  }
219
225
 
220
226
  onImage(image: OpenAI.Chat.Completions.ChatCompletionContentPartImage): void {
@@ -224,7 +230,7 @@ export class ChatSessionAgentEventHandler implements IAgentEventHandler {
224
230
 
225
231
  onToolCallResult(result: ToolMessageParam): void {
226
232
  logger.debug(`[onToolCallResult] : ${JSON.stringify(result)}`);
227
- const toolCallMessage = this.contextManager.processToolCallResult(result);
233
+ const toolCallMessage = this.contextTx.processToolCallResult(result);
228
234
  this.sender.broadcast(toolCallMessage);
229
235
  }
230
236
 
@@ -273,7 +279,7 @@ export class ChatSessionAgentEventHandler implements IAgentEventHandler {
273
279
  );
274
280
 
275
281
  // Inform the contextManager and broadcast the ServerAgentMessageChunk
276
- const agentMsgChunk = this.contextManager.processAgentMessage(msg, end);
282
+ const agentMsgChunk = this.contextTx.processAgentMessageChunk(msg, end);
277
283
  this.sender.broadcast(agentMsgChunk);
278
284
  return Promise.resolve();
279
285
  }
@@ -305,7 +311,7 @@ export class ChatSessionAgentEventHandler implements IAgentEventHandler {
305
311
  */
306
312
  export class OpenSession implements ISessionFileManagerEventHandler {
307
313
  private readonly db: Database;
308
- private /* readonly */ agent: Agent;
314
+ private /* readonly */ agent: AgentEx;
309
315
  private readonly sessionUUID: string;
310
316
  private readonly teamUUID: string | undefined;
311
317
  private readonly userUUID: string;
@@ -328,7 +334,7 @@ export class OpenSession implements ISessionFileManagerEventHandler {
328
334
 
329
335
  private constructor(
330
336
  db: Database,
331
- agent: Agent,
337
+ agent: AgentEx,
332
338
  sessionData: SessionData,
333
339
  savedAgentProfile: SavedAgentProfile,
334
340
  isPersisted: boolean,
@@ -381,7 +387,6 @@ export class OpenSession implements ISessionFileManagerEventHandler {
381
387
  ownerData: UserData,
382
388
  ownerApiKey: string,
383
389
  sessionCheckpoint: SessionCheckpoint | undefined,
384
- llmUrl: string,
385
390
  xmcpUrl: string,
386
391
  connectionManager: IUserConnectionManager<ServerToClient>
387
392
  ): Promise<OpenSession> {
@@ -400,6 +405,8 @@ export class OpenSession implements ISessionFileManagerEventHandler {
400
405
  sender,
401
406
  new DbAgentPreferencesWriter(db)
402
407
  );
408
+ const checkpointWriter = new DBCheckpointWriter(db, sessionId);
409
+
403
410
  const { agent, skillManager, contextManager } = await createContextAndAgent(
404
411
  sessionId,
405
412
  savedAgentProfile.profile.system_prompt,
@@ -409,12 +416,10 @@ export class OpenSession implements ISessionFileManagerEventHandler {
409
416
  sessionCheckpoint,
410
417
  ownerData,
411
418
  ownerApiKey,
412
- llmUrl,
413
419
  xmcpUrl,
414
420
  fileManager,
415
- sender,
416
421
  platform,
417
- toolApprovalManager
422
+ checkpointWriter
418
423
  );
419
424
 
420
425
  const openSession = new OpenSession(
@@ -440,7 +445,6 @@ export class OpenSession implements ISessionFileManagerEventHandler {
440
445
  static async initWithEmptySession(
441
446
  db: Database,
442
447
  sessionData: SessionData,
443
- llmUrl: string,
444
448
  xmcpUrl: string,
445
449
  connectionManager: IUserConnectionManager<ServerToClient>
446
450
  ): Promise<OpenSession> {
@@ -481,7 +485,6 @@ export class OpenSession implements ISessionFileManagerEventHandler {
481
485
  ownerData,
482
486
  ownerApiKey,
483
487
  sessionCheckpoint,
484
- llmUrl,
485
488
  xmcpUrl,
486
489
  connectionManager
487
490
  );
@@ -490,7 +493,6 @@ export class OpenSession implements ISessionFileManagerEventHandler {
490
493
  static async initWithExistingSession(
491
494
  db: Database,
492
495
  sessionId: string,
493
- llmUrl: string,
494
496
  xmcpUrl: string,
495
497
  connectionManager: IUserConnectionManager<ServerToClient>
496
498
  ): Promise<OpenSession> {
@@ -514,7 +516,6 @@ export class OpenSession implements ISessionFileManagerEventHandler {
514
516
  ownerData,
515
517
  ownerApiKey,
516
518
  sessionCheckpoint,
517
- llmUrl,
518
519
  xmcpUrl,
519
520
  connectionManager
520
521
  );
@@ -590,7 +591,7 @@ export class OpenSession implements ISessionFileManagerEventHandler {
590
591
  this.sendMcpSettings(connectionId);
591
592
 
592
593
  // add system prompt and model
593
- const agentProfile = this.agent.getAgentProfile();
594
+ const agentProfile = this.getAgentProfile();
594
595
  connMgr.sendToConnection(connectionId, {
595
596
  type: "system_prompt_updated",
596
597
  system_prompt: agentProfile.system_prompt,
@@ -879,9 +880,6 @@ export class OpenSession implements ISessionFileManagerEventHandler {
879
880
  case "share_session":
880
881
  await this.handleShareSession(msg, queuedMessage.from);
881
882
  break;
882
- case "add_mcp_server_from_url":
883
- void this.handleAddMcpServerFromUrl(msg, queuedMessage.from);
884
- break;
885
883
  case "get_mcp_resource":
886
884
  void this.handleGetMcpResource(msg, queuedMessage.from);
887
885
  break;
@@ -928,34 +926,6 @@ export class OpenSession implements ISessionFileManagerEventHandler {
928
926
  });
929
927
  }
930
928
 
931
- private async handleAddMcpServerFromUrl(
932
- msg: ClientAddMcpServerFromUrl,
933
- from: string
934
- ): Promise<void> {
935
- const skillManager = this.skillManager;
936
- try {
937
- const serverName = msg.server_name;
938
-
939
- await skillManager.addMcpServerWithStreamableHTTPUrl(serverName, msg.url);
940
- this.skillManager.enableAllTools(serverName);
941
-
942
- const server = skillManager.getMcpServer(serverName);
943
- const tools = server.getTools();
944
- const resources = server.getResources();
945
- const enabled_tools = Array.from(server.getEnabledTools().keys());
946
- this.sender.broadcast({
947
- type: "mcp_server_added",
948
- server_name: serverName,
949
- tools,
950
- resources,
951
- enabled_tools,
952
- session_id: this.sessionUUID,
953
- });
954
- } catch (err: unknown) {
955
- this.handleError(err, from);
956
- }
957
- }
958
-
959
929
  private async handleGetMcpResource(
960
930
  msg: ClientGetMcpResource,
961
931
  from: string
@@ -1001,26 +971,39 @@ export class OpenSession implements ISessionFileManagerEventHandler {
1001
971
  * `processUserMessage` logic when agent is paused. Trigger the context,
1002
972
  * add the user messages and then extract the new DB messages.
1003
973
  */
1004
- private processUserMessagePaused(
974
+ private async processUserMessagePaused(
1005
975
  msgs: ServerUserMessage[]
1006
- ): SessionMessage[] {
1007
- const { llmUserMessages } = this.contextManager.startAgentResponse(msgs);
1008
-
1009
- // Just send the user LLM messages direct to the ContextManager, so they
1010
- // are available to the LLM once it is restarted.
976
+ ): Promise<SessionMessage[]> {
977
+ // Just send the user LLM user messages direct to the ContextManager, so
978
+ // they are available to the LLM once it is restarted.
1011
979
 
1012
- this.contextManager.addMessages(llmUserMessages);
1013
- return this.contextManager.endAgentResponse();
980
+ const { contextTx } = await this.contextManager.startAgentResponse(msgs);
981
+ return this.contextManager.endAgentResponse(contextTx);
1014
982
  }
1015
983
 
1016
984
  private async processUserMessagesActive(
1017
985
  msgs: ServerUserMessage[]
1018
986
  ): Promise<SessionMessage[]> {
1019
- const { llmUserMessages, agentFirstChunk } =
1020
- this.contextManager.startAgentResponse(msgs);
987
+ // TODO: create the contextTx and store all new messages on this. Event
988
+ // handler should accept the contextTx and forward messages to it, as well
989
+ // as sending the updates.
990
+
991
+ // All accumulated messages (DB, Protocol and LLM) should be on the
992
+ // specialized contextTx.
993
+
994
+ const { contextTx, agentFirstChunk } =
995
+ await this.contextManager.startAgentResponse(msgs);
1021
996
  this.sender.broadcast(agentFirstChunk);
997
+
998
+ const eventHandler = new ChatSessionAgentEventHandler(
999
+ this.sessionUUID,
1000
+ this.sender,
1001
+ this.approvalManager,
1002
+ contextTx
1003
+ );
1004
+
1022
1005
  try {
1023
- await this.agent.userMessagesRaw(llmUserMessages);
1006
+ await this.agent.userMessagesRaw(contextTx, eventHandler);
1024
1007
  } catch (e) {
1025
1008
  logger.warn(
1026
1009
  `[OpenSession.processUserMessages] agent error: ${String(e)}`
@@ -1029,16 +1012,18 @@ export class OpenSession implements ISessionFileManagerEventHandler {
1029
1012
  // Errors during agent replies must be turned into messages.
1030
1013
 
1031
1014
  const errMsg = `error from LLM: ${String(e)}`;
1032
- this.contextManager.revertAgentResponse(errMsg);
1015
+ contextTx.revertAgentResponse(errMsg);
1016
+ // TODO: This will prevent the user messages from being saved in the
1017
+ // DB. Should we keep them?
1033
1018
  throw new Error(errMsg);
1034
1019
  }
1035
- return this.contextManager.endAgentResponse();
1020
+ return this.contextManager.endAgentResponse(contextTx);
1036
1021
  }
1037
1022
 
1038
1023
  private async processUserMessages(msgs: ServerUserMessage[]): Promise<void> {
1039
1024
  try {
1040
1025
  const newSessionMessages = this.agentPaused
1041
- ? this.processUserMessagePaused(msgs)
1026
+ ? await this.processUserMessagePaused(msgs)
1042
1027
  : await this.processUserMessagesActive(msgs);
1043
1028
 
1044
1029
  logger.debug(
@@ -1328,7 +1313,7 @@ export class OpenSession implements ISessionFileManagerEventHandler {
1328
1313
  private async handleSetSystemPrompt(
1329
1314
  system_prompt: string
1330
1315
  ): Promise<ServerSystemPromptUpdated> {
1331
- this.agent.setSystemPrompt(system_prompt);
1316
+ this.contextManager.setAgentPrompt(system_prompt);
1332
1317
  await this.updateAgentProfile();
1333
1318
  return {
1334
1319
  type: "system_prompt_updated",
@@ -1338,7 +1323,7 @@ export class OpenSession implements ISessionFileManagerEventHandler {
1338
1323
  }
1339
1324
 
1340
1325
  private async handleSetModel(model: string): Promise<ServerModelUpdated> {
1341
- this.agent.setModel(model);
1326
+ this.agent.llm.setModel(model);
1342
1327
  await this.updateAgentProfile();
1343
1328
  return { type: "model_updated", model, session_id: this.sessionUUID };
1344
1329
  }
@@ -1444,8 +1429,16 @@ export class OpenSession implements ISessionFileManagerEventHandler {
1444
1429
  return tool;
1445
1430
  }
1446
1431
 
1432
+ private getAgentProfile(): AgentProfile {
1433
+ return new AgentProfile(
1434
+ this.agent.llm.getModel(),
1435
+ this.contextManager.getAgentPrompt(),
1436
+ this.skillManager.getMcpServerSettings()
1437
+ );
1438
+ }
1439
+
1447
1440
  private async updateAgentProfile(): Promise<void> {
1448
- const profile = this.agent.getAgentProfile();
1441
+ const profile = this.getAgentProfile();
1449
1442
  logger.debug(
1450
1443
  `[updateAgentProfile]: uuid: ${this.agentProfileUUID} profile: ` +
1451
1444
  JSON.stringify(profile)
@@ -1642,28 +1635,35 @@ async function createContextAndAgent(
1642
1635
  sessionCheckpoint: SessionCheckpoint | undefined,
1643
1636
  ownerData: UserData,
1644
1637
  ownerApiKey: string,
1645
- llmUrl: string,
1646
1638
  xmcpUrl: string,
1647
1639
  fileManager: ChatSessionFileManager,
1648
- sender: ISessionMessageSender<ServerToClient>,
1649
1640
  platform: IPlatform,
1650
- approvalManager: ToolApprovalManager
1641
+ checkpointWriter: ICheckpointWriter
1651
1642
  ): Promise<{
1652
- agent: Agent;
1643
+ agent: AgentEx;
1653
1644
  skillManager: SkillManager;
1654
1645
  contextManager: ChatContextManager;
1655
1646
  }> {
1647
+ // Fn to create the llm. One invocation for the compression context, one
1648
+ // for the Agent.
1649
+ const createLLM = async (): Promise<ILLM> => {
1650
+ let llm = await createSpecializedLLM(model, platform);
1651
+ if (!llm) {
1652
+ llm = new OpenAIRouterLLM(model);
1653
+ }
1654
+ assert(llm);
1655
+ return llm;
1656
+ };
1657
+
1656
1658
  const contextManager = new ChatContextManager(
1657
1659
  systemPrompt,
1658
1660
  sessionMessages,
1659
1661
  sessionUUID,
1660
1662
  ownerData.uuid,
1661
1663
  sessionCheckpoint,
1662
- llmUrl,
1663
- model,
1664
- ownerApiKey,
1665
- undefined as unknown as DBCheckpointWriter, // TODO
1666
- fileManager
1664
+ checkpointWriter,
1665
+ fileManager,
1666
+ await createLLM()
1667
1667
  );
1668
1668
  if (workspace) {
1669
1669
  contextManager.setWorkspace(
@@ -1671,32 +1671,19 @@ async function createContextAndAgent(
1671
1671
  );
1672
1672
  }
1673
1673
 
1674
- const eventHandler = new ChatSessionAgentEventHandler(
1675
- sessionUUID,
1676
- sender,
1677
- approvalManager,
1678
- contextManager
1679
- );
1680
-
1681
1674
  const xmcpConfig = Configuration.new(ownerApiKey, xmcpUrl, false);
1682
- const [agent, skillManager] = await createAgentWithoutSkills(
1683
- llmUrl,
1684
- model,
1685
- eventHandler,
1686
- platform,
1687
- contextManager,
1688
- ownerApiKey,
1689
- xmcpConfig,
1690
- undefined,
1691
- true
1692
- );
1693
- await addDefaultChatTools(
1694
- agent,
1695
- ownerData.timezone,
1696
- fileManager,
1697
- llmUrl,
1698
- ownerApiKey
1675
+ const skillManager = await SkillManager.initialize(
1676
+ (url: string, authResultP: Promise<boolean>, displayName: string) => {
1677
+ platform.openUrl(url, authResultP, displayName);
1678
+ },
1679
+ xmcpConfig.backend_url,
1680
+ xmcpConfig.api_key,
1681
+ undefined /* authorizedUrl */
1699
1682
  );
1683
+ const llm = await createLLM();
1684
+ const agent = new AgentEx(skillManager, llm);
1685
+
1686
+ await addDefaultChatTools(agent, ownerData.timezone, platform, fileManager);
1700
1687
 
1701
1688
  return { agent, skillManager, contextManager };
1702
1689
  }
@@ -13,7 +13,12 @@ import { ChatFatalError } from "../protocol/errors";
13
13
  import { ServerToClient } from "../protocol/messages";
14
14
  import { SessionRegistry } from "./sessionRegistry";
15
15
 
16
+ const DEVELOPMENT: boolean = process.env.DEVELOPMENT === "1";
17
+
16
18
  dotenv.config();
19
+ if (DEVELOPMENT) {
20
+ dotenv.config({ path: ".env.development" });
21
+ }
17
22
 
18
23
  const logger = getLogger();
19
24
 
@@ -59,7 +64,6 @@ export async function runServer(
59
64
  port: number,
60
65
  supabaseUrl: string,
61
66
  supabaseKey: string,
62
- llmUrl: string,
63
67
  xmcpUrl: string
64
68
  ): Promise<ws.Server> {
65
69
  return new Promise((r, _e) => {
@@ -68,7 +72,7 @@ export async function runServer(
68
72
  const createSessionRegistry = (
69
73
  connManager: IUserConnectionManager<ServerToClient>
70
74
  ) => {
71
- return new SessionRegistry(db, connManager, llmUrl, xmcpUrl);
75
+ return new SessionRegistry(db, connManager, xmcpUrl);
72
76
  };
73
77
  const connectionManager = ConnectionManager.init(createSessionRegistry);
74
78
 
@@ -2,7 +2,7 @@ import { v4 as uuidv4 } from "uuid";
2
2
  import { strict as assert } from "assert";
3
3
 
4
4
  import {
5
- Agent,
5
+ AgentEx,
6
6
  IAgentToolProvider,
7
7
  ToolCallResult,
8
8
  ToolHandler,
@@ -359,7 +359,7 @@ export function fileManagerTool(
359
359
 
360
360
  const parseName = makeParseArgsFn(["name"] as const);
361
361
  const getFileContentFn: ToolHandler = async (
362
- _agent: Agent,
362
+ _agent: AgentEx,
363
363
  args: unknown
364
364
  ): Promise<ToolCallResult> => {
365
365
  const { name } = parseName(args);
@@ -376,7 +376,7 @@ export function fileManagerTool(
376
376
  const putArgs = ["name", "summary", "data_url"] as const;
377
377
  const parseNameSummaryDataUrl = makeParseArgsFn(putArgs);
378
378
  const putFileContentFn: ToolHandler = async (
379
- _: Agent,
379
+ _: AgentEx,
380
380
  args: unknown
381
381
  ): Promise<ToolCallResultWithFileRef> => {
382
382
  const parsed = parseNameSummaryDataUrl(args);
@@ -399,14 +399,14 @@ export function fileManagerTool(
399
399
  // Allows LLM to write data (as a data-url) to the session file manager. The
400
400
  // data is replaced by a session file url after being saved.
401
401
 
402
- const deleteFileFn: ToolHandler = async (_: Agent, args: unknown) => {
402
+ const deleteFileFn: ToolHandler = async (_: AgentEx, args: unknown) => {
403
403
  const { name } = parseName(args);
404
404
  await fileManager.deleteFile(name);
405
405
  return { response: "" };
406
406
  };
407
407
 
408
408
  return {
409
- setup: (agent: Agent) => {
409
+ setup: (agent: AgentEx) => {
410
410
  agent.addAgentTool(GET_FILE_CONTENT_TOOL, getFileContentFn);
411
411
  agent.addAgentTool(PUT_FILE_CONTENT_TOOL, putFileContentFn);
412
412
  agent.addAgentTool(DELETE_FILE_CONTENT_TOOL, deleteFileFn);
@@ -49,7 +49,6 @@ describe("SessionRegistry", () => {
49
49
  sessionRegistry = new SessionRegistry(
50
50
  mockDatabase.mock,
51
51
  mockUserConnectionManager.mock,
52
- "http://llm-api.test",
53
52
  "http://xmcp-api.test"
54
53
  );
55
54
  });