@townco/agent 0.1.20 → 0.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/acp-server/adapter.js +77 -73
- package/dist/acp-server/http.js +233 -10
- package/dist/index.js +5 -11
- package/dist/runner/agent-runner.d.ts +16 -2
- package/dist/runner/agent-runner.js +4 -4
- package/dist/runner/langchain/index.d.ts +4 -4
- package/dist/runner/langchain/index.js +32 -10
- package/dist/test-script.js +12 -12
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/index.ts +7 -11
- package/package.json +5 -5
|
@@ -1,77 +1,81 @@
|
|
|
1
1
|
import * as acp from "@agentclientprotocol/sdk";
|
|
2
2
|
/** Adapts an Agent to speak the ACP protocol */
|
|
3
3
|
export class AgentAcpAdapter {
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
4
|
+
connection;
|
|
5
|
+
sessions;
|
|
6
|
+
agent;
|
|
7
|
+
constructor(agent, connection) {
|
|
8
|
+
this.connection = connection;
|
|
9
|
+
this.sessions = new Map();
|
|
10
|
+
this.agent = agent;
|
|
11
|
+
}
|
|
12
|
+
async initialize(_params) {
|
|
13
|
+
return {
|
|
14
|
+
protocolVersion: acp.PROTOCOL_VERSION,
|
|
15
|
+
agentCapabilities: {
|
|
16
|
+
loadSession: false,
|
|
17
|
+
},
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
async newSession(_params) {
|
|
21
|
+
const sessionId = Math.random().toString(36).substring(2);
|
|
22
|
+
this.sessions.set(sessionId, {
|
|
23
|
+
pendingPrompt: null,
|
|
24
|
+
messages: [],
|
|
25
|
+
});
|
|
26
|
+
return {
|
|
27
|
+
sessionId,
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
async authenticate(_params) {
|
|
31
|
+
// No auth needed - return empty response
|
|
32
|
+
return {};
|
|
33
|
+
}
|
|
34
|
+
async setSessionMode(_params) {
|
|
35
|
+
// Session mode changes are no-op for us (not related to coding)
|
|
36
|
+
return {};
|
|
37
|
+
}
|
|
38
|
+
async prompt(params) {
|
|
39
|
+
let session = this.sessions.get(params.sessionId);
|
|
40
|
+
// If session not found (e.g., after server restart), create a new one
|
|
41
|
+
if (!session) {
|
|
42
|
+
console.log(`Session ${params.sessionId} not found, creating new session`);
|
|
43
|
+
session = {
|
|
44
|
+
pendingPrompt: null,
|
|
45
|
+
messages: [],
|
|
46
|
+
};
|
|
47
|
+
this.sessions.set(params.sessionId, session);
|
|
48
|
+
}
|
|
49
|
+
session.pendingPrompt?.abort();
|
|
50
|
+
session.pendingPrompt = new AbortController();
|
|
51
|
+
// Generate a unique messageId for this assistant response
|
|
52
|
+
const messageId = Math.random().toString(36).substring(2);
|
|
53
|
+
try {
|
|
54
|
+
for await (const msg of this.agent.invoke({
|
|
55
|
+
prompt: params.prompt,
|
|
56
|
+
sessionId: params.sessionId,
|
|
57
|
+
messageId,
|
|
58
|
+
})) {
|
|
59
|
+
// The agent may emit extended types (like tool_output) that aren't in ACP SDK yet
|
|
60
|
+
// The http transport will handle routing these appropriately
|
|
61
|
+
this.connection.sessionUpdate({
|
|
62
|
+
sessionId: params.sessionId,
|
|
63
|
+
update: msg,
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
catch (err) {
|
|
68
|
+
if (session.pendingPrompt.signal.aborted) {
|
|
69
|
+
return { stopReason: "cancelled" };
|
|
70
|
+
}
|
|
71
|
+
throw err;
|
|
72
|
+
}
|
|
73
|
+
session.pendingPrompt = null;
|
|
74
|
+
return {
|
|
75
|
+
stopReason: "end_turn",
|
|
76
|
+
};
|
|
77
|
+
}
|
|
78
|
+
async cancel(params) {
|
|
79
|
+
this.sessions.get(params.sessionId)?.pendingPrompt?.abort();
|
|
80
|
+
}
|
|
77
81
|
}
|
package/dist/acp-server/http.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { createHash } from "node:crypto";
|
|
2
|
+
import { gzipSync } from "node:zlib";
|
|
2
3
|
import * as acp from "@agentclientprotocol/sdk";
|
|
3
4
|
import { PGlite } from "@electric-sql/pglite";
|
|
4
5
|
import { Hono } from "hono";
|
|
@@ -8,6 +9,37 @@ import { makeRunnerFromDefinition } from "../runner";
|
|
|
8
9
|
import { createLogger } from "../utils/logger.js";
|
|
9
10
|
import { AgentAcpAdapter } from "./adapter";
|
|
10
11
|
const logger = createLogger("agent");
|
|
12
|
+
/**
|
|
13
|
+
* Compress a payload using gzip if it's too large for PostgreSQL NOTIFY
|
|
14
|
+
* Returns an object with the payload and metadata about compression
|
|
15
|
+
*/
|
|
16
|
+
function compressIfNeeded(rawMsg) {
|
|
17
|
+
const jsonStr = JSON.stringify(rawMsg);
|
|
18
|
+
const originalSize = jsonStr.length;
|
|
19
|
+
// If it fits without compression, send as-is
|
|
20
|
+
if (originalSize <= 7500) {
|
|
21
|
+
return {
|
|
22
|
+
payload: jsonStr,
|
|
23
|
+
isCompressed: false,
|
|
24
|
+
originalSize,
|
|
25
|
+
compressedSize: originalSize,
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
// Compress and encode as base64
|
|
29
|
+
const compressed = gzipSync(jsonStr);
|
|
30
|
+
const base64 = compressed.toString("base64");
|
|
31
|
+
// Wrap in a compression envelope
|
|
32
|
+
const envelope = JSON.stringify({
|
|
33
|
+
_compressed: true,
|
|
34
|
+
data: base64,
|
|
35
|
+
});
|
|
36
|
+
return {
|
|
37
|
+
payload: envelope,
|
|
38
|
+
isCompressed: true,
|
|
39
|
+
originalSize,
|
|
40
|
+
compressedSize: envelope.length,
|
|
41
|
+
};
|
|
42
|
+
}
|
|
11
43
|
// Use PGlite in-memory database for LISTEN/NOTIFY
|
|
12
44
|
const pg = new PGlite();
|
|
13
45
|
// Helper to create safe channel names from untrusted IDs
|
|
@@ -22,6 +54,8 @@ export function makeHttpTransport(agent) {
|
|
|
22
54
|
const agentRunner = "definition" in agent ? agent : makeRunnerFromDefinition(agent);
|
|
23
55
|
new acp.AgentSideConnection((conn) => new AgentAcpAdapter(agentRunner, conn), bridge);
|
|
24
56
|
const app = new Hono();
|
|
57
|
+
// Track active SSE streams by sessionId for direct output delivery
|
|
58
|
+
const sseStreams = new Map();
|
|
25
59
|
const decoder = new TextDecoder();
|
|
26
60
|
const encoder = new TextEncoder();
|
|
27
61
|
(async () => {
|
|
@@ -63,20 +97,150 @@ export function makeHttpTransport(agent) {
|
|
|
63
97
|
rawMsg.id != null) {
|
|
64
98
|
// This is a response to a request - send to response-specific channel
|
|
65
99
|
const channel = safeChannelName("response", rawMsg.id);
|
|
66
|
-
const payload =
|
|
67
|
-
|
|
100
|
+
const { payload, isCompressed, originalSize, compressedSize } = compressIfNeeded(rawMsg);
|
|
101
|
+
if (isCompressed) {
|
|
102
|
+
logger.info("Compressed response payload", {
|
|
103
|
+
requestId: rawMsg.id,
|
|
104
|
+
originalSize,
|
|
105
|
+
compressedSize,
|
|
106
|
+
compressionRatio: ((1 - compressedSize / originalSize) * 100).toFixed(1) + "%",
|
|
107
|
+
});
|
|
108
|
+
}
|
|
109
|
+
// Escape single quotes for PostgreSQL
|
|
110
|
+
const escapedPayload = payload.replace(/'/g, "''");
|
|
111
|
+
// Check if even compressed payload is too large
|
|
112
|
+
if (compressedSize > 7500) {
|
|
113
|
+
logger.error("Response payload too large even after compression", {
|
|
114
|
+
requestId: rawMsg.id,
|
|
115
|
+
originalSize,
|
|
116
|
+
compressedSize,
|
|
117
|
+
});
|
|
118
|
+
// Send error response
|
|
119
|
+
const errorResponse = {
|
|
120
|
+
jsonrpc: "2.0",
|
|
121
|
+
id: rawMsg.id,
|
|
122
|
+
error: {
|
|
123
|
+
code: -32603,
|
|
124
|
+
message: "Response payload too large even after compression",
|
|
125
|
+
data: {
|
|
126
|
+
originalSize,
|
|
127
|
+
compressedSize,
|
|
128
|
+
},
|
|
129
|
+
},
|
|
130
|
+
};
|
|
131
|
+
const errorPayload = JSON.stringify(errorResponse).replace(/'/g, "''");
|
|
132
|
+
await pg.query(`NOTIFY ${channel}, '${errorPayload}'`);
|
|
133
|
+
continue;
|
|
134
|
+
}
|
|
135
|
+
try {
|
|
136
|
+
await pg.query(`NOTIFY ${channel}, '${escapedPayload}'`);
|
|
137
|
+
}
|
|
138
|
+
catch (error) {
|
|
139
|
+
logger.error("Failed to send response", {
|
|
140
|
+
error,
|
|
141
|
+
requestId: rawMsg.id,
|
|
142
|
+
originalSize,
|
|
143
|
+
compressedSize,
|
|
144
|
+
});
|
|
145
|
+
// For responses, we still need to send something to unblock the client
|
|
146
|
+
const errorResponse = {
|
|
147
|
+
jsonrpc: "2.0",
|
|
148
|
+
id: rawMsg.id,
|
|
149
|
+
error: {
|
|
150
|
+
code: -32603,
|
|
151
|
+
message: "Failed to send response",
|
|
152
|
+
data: {
|
|
153
|
+
originalSize,
|
|
154
|
+
compressedSize,
|
|
155
|
+
error: error instanceof Error ? error.message : String(error),
|
|
156
|
+
},
|
|
157
|
+
},
|
|
158
|
+
};
|
|
159
|
+
const errorPayload = JSON.stringify(errorResponse).replace(/'/g, "''");
|
|
160
|
+
await pg.query(`NOTIFY ${channel}, '${errorPayload}'`);
|
|
161
|
+
}
|
|
68
162
|
}
|
|
69
163
|
else if ("params" in rawMsg &&
|
|
70
164
|
rawMsg.params != null &&
|
|
71
165
|
typeof rawMsg.params === "object" &&
|
|
72
166
|
"sessionId" in rawMsg.params &&
|
|
73
167
|
typeof rawMsg.params.sessionId === "string") {
|
|
74
|
-
// Other messages (notifications, requests from agent) go to
|
|
75
|
-
// session-specific channel
|
|
76
168
|
const sessionId = rawMsg.params.sessionId;
|
|
169
|
+
const messageType = "method" in rawMsg && typeof rawMsg.method === "string"
|
|
170
|
+
? rawMsg.method
|
|
171
|
+
: undefined;
|
|
172
|
+
// Check if this is a tool_output update - send directly via SSE
|
|
173
|
+
if (messageType === "session/update" &&
|
|
174
|
+
"params" in rawMsg &&
|
|
175
|
+
rawMsg.params != null &&
|
|
176
|
+
typeof rawMsg.params === "object" &&
|
|
177
|
+
"update" in rawMsg.params &&
|
|
178
|
+
rawMsg.params.update != null &&
|
|
179
|
+
typeof rawMsg.params.update === "object" &&
|
|
180
|
+
"sessionUpdate" in rawMsg.params.update &&
|
|
181
|
+
rawMsg.params.update.sessionUpdate === "tool_output") {
|
|
182
|
+
// Send tool output directly via SSE, bypassing PostgreSQL NOTIFY
|
|
183
|
+
const stream = sseStreams.get(sessionId);
|
|
184
|
+
if (stream) {
|
|
185
|
+
try {
|
|
186
|
+
await stream.writeSSE({
|
|
187
|
+
event: "message",
|
|
188
|
+
data: JSON.stringify(rawMsg),
|
|
189
|
+
});
|
|
190
|
+
logger.debug("Sent tool output", {
|
|
191
|
+
sessionId,
|
|
192
|
+
payloadSize: JSON.stringify(rawMsg).length,
|
|
193
|
+
});
|
|
194
|
+
}
|
|
195
|
+
catch (error) {
|
|
196
|
+
logger.error("Failed to send tool output", {
|
|
197
|
+
error,
|
|
198
|
+
sessionId,
|
|
199
|
+
});
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
else {
|
|
203
|
+
logger.warn("No SSE stream found for tool output", { sessionId });
|
|
204
|
+
}
|
|
205
|
+
continue;
|
|
206
|
+
}
|
|
207
|
+
// Other messages (notifications, requests from agent) go to
|
|
208
|
+
// session-specific channel via PostgreSQL NOTIFY
|
|
77
209
|
const channel = safeChannelName("notifications", sessionId);
|
|
78
|
-
const payload =
|
|
79
|
-
|
|
210
|
+
const { payload, isCompressed, originalSize, compressedSize } = compressIfNeeded(rawMsg);
|
|
211
|
+
if (isCompressed) {
|
|
212
|
+
logger.info("Compressed notification payload", {
|
|
213
|
+
sessionId,
|
|
214
|
+
messageType,
|
|
215
|
+
originalSize,
|
|
216
|
+
compressedSize,
|
|
217
|
+
compressionRatio: ((1 - compressedSize / originalSize) * 100).toFixed(1) + "%",
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
// Escape single quotes for PostgreSQL
|
|
221
|
+
const escapedPayload = payload.replace(/'/g, "''");
|
|
222
|
+
// Check if even compressed payload is too large
|
|
223
|
+
if (compressedSize > 7500) {
|
|
224
|
+
logger.error("Notification payload too large even after compression, skipping", {
|
|
225
|
+
sessionId,
|
|
226
|
+
messageType,
|
|
227
|
+
originalSize,
|
|
228
|
+
compressedSize,
|
|
229
|
+
});
|
|
230
|
+
continue;
|
|
231
|
+
}
|
|
232
|
+
try {
|
|
233
|
+
await pg.query(`NOTIFY ${channel}, '${escapedPayload}'`);
|
|
234
|
+
}
|
|
235
|
+
catch (error) {
|
|
236
|
+
logger.error("Failed to send notification", {
|
|
237
|
+
error,
|
|
238
|
+
sessionId,
|
|
239
|
+
messageType,
|
|
240
|
+
originalSize,
|
|
241
|
+
compressedSize,
|
|
242
|
+
});
|
|
243
|
+
}
|
|
80
244
|
}
|
|
81
245
|
else {
|
|
82
246
|
logger.warn("Message without sessionId, cannot route", {
|
|
@@ -101,6 +265,8 @@ export function makeHttpTransport(agent) {
|
|
|
101
265
|
}
|
|
102
266
|
logger.debug("GET /events - SSE connection opened", { sessionId });
|
|
103
267
|
return streamSSE(c, async (stream) => {
|
|
268
|
+
// Register this stream for direct tool output delivery
|
|
269
|
+
sseStreams.set(sessionId, stream);
|
|
104
270
|
await stream.writeSSE({ event: "ping", data: "{}" });
|
|
105
271
|
const hb = setInterval(() => {
|
|
106
272
|
// Heartbeat to keep proxies from terminating idle connections
|
|
@@ -108,7 +274,31 @@ export function makeHttpTransport(agent) {
|
|
|
108
274
|
}, 1000);
|
|
109
275
|
const channel = safeChannelName("notifications", sessionId);
|
|
110
276
|
const unsub = await pg.listen(channel, async (payload) => {
|
|
111
|
-
|
|
277
|
+
let json = JSON.parse(payload);
|
|
278
|
+
// Check if the message is compressed
|
|
279
|
+
if (json &&
|
|
280
|
+
typeof json === "object" &&
|
|
281
|
+
"_compressed" in json &&
|
|
282
|
+
json._compressed === true &&
|
|
283
|
+
"data" in json &&
|
|
284
|
+
typeof json.data === "string") {
|
|
285
|
+
// This is a compressed message - decompress it
|
|
286
|
+
try {
|
|
287
|
+
const { gunzipSync } = await import("node:zlib");
|
|
288
|
+
const compressed = Buffer.from(json.data, "base64");
|
|
289
|
+
const decompressed = gunzipSync(compressed);
|
|
290
|
+
json = JSON.parse(decompressed.toString());
|
|
291
|
+
logger.trace("Decompressed SSE message", { sessionId, channel });
|
|
292
|
+
}
|
|
293
|
+
catch (error) {
|
|
294
|
+
logger.error("Failed to decompress message", {
|
|
295
|
+
error,
|
|
296
|
+
sessionId,
|
|
297
|
+
channel,
|
|
298
|
+
});
|
|
299
|
+
return;
|
|
300
|
+
}
|
|
301
|
+
}
|
|
112
302
|
logger.trace("Sending SSE message", { sessionId, channel });
|
|
113
303
|
await stream.writeSSE({
|
|
114
304
|
event: "message",
|
|
@@ -120,6 +310,7 @@ export function makeHttpTransport(agent) {
|
|
|
120
310
|
logger.debug("GET /events - SSE connection closed", { sessionId });
|
|
121
311
|
clearInterval(hb);
|
|
122
312
|
unsub();
|
|
313
|
+
sseStreams.delete(sessionId);
|
|
123
314
|
});
|
|
124
315
|
// Keep the connection open indefinitely
|
|
125
316
|
await stream.sleep(1000 * 60 * 60 * 24);
|
|
@@ -152,8 +343,41 @@ export function makeHttpTransport(agent) {
|
|
|
152
343
|
const responsePromise = new Promise((resolve) => {
|
|
153
344
|
responseResolver = resolve;
|
|
154
345
|
});
|
|
155
|
-
const unsub = await pg.listen(responseChannel, (payload) => {
|
|
156
|
-
|
|
346
|
+
const unsub = await pg.listen(responseChannel, async (payload) => {
|
|
347
|
+
let rawResponse = JSON.parse(payload);
|
|
348
|
+
// Check if the response is compressed
|
|
349
|
+
if (rawResponse &&
|
|
350
|
+
typeof rawResponse === "object" &&
|
|
351
|
+
"_compressed" in rawResponse &&
|
|
352
|
+
rawResponse._compressed === true &&
|
|
353
|
+
"data" in rawResponse &&
|
|
354
|
+
typeof rawResponse.data === "string") {
|
|
355
|
+
// This is a compressed response - decompress it
|
|
356
|
+
try {
|
|
357
|
+
const { gunzipSync } = await import("node:zlib");
|
|
358
|
+
const compressed = Buffer.from(rawResponse.data, "base64");
|
|
359
|
+
const decompressed = gunzipSync(compressed);
|
|
360
|
+
rawResponse = JSON.parse(decompressed.toString());
|
|
361
|
+
logger.trace("Decompressed RPC response", { id });
|
|
362
|
+
}
|
|
363
|
+
catch (error) {
|
|
364
|
+
logger.error("Failed to decompress response", {
|
|
365
|
+
error,
|
|
366
|
+
requestId: id,
|
|
367
|
+
});
|
|
368
|
+
rawResponse = {
|
|
369
|
+
jsonrpc: "2.0",
|
|
370
|
+
id,
|
|
371
|
+
error: {
|
|
372
|
+
code: -32603,
|
|
373
|
+
message: "Failed to decompress response",
|
|
374
|
+
data: {
|
|
375
|
+
error: error instanceof Error ? error.message : String(error),
|
|
376
|
+
},
|
|
377
|
+
},
|
|
378
|
+
};
|
|
379
|
+
}
|
|
380
|
+
}
|
|
157
381
|
responseResolver(rawResponse);
|
|
158
382
|
});
|
|
159
383
|
// Write NDJSON line into the ACP inbound stream
|
|
@@ -184,7 +408,6 @@ export function makeHttpTransport(agent) {
|
|
|
184
408
|
const writer = inbound.writable.getWriter();
|
|
185
409
|
await writer.write(encoder.encode(`${JSON.stringify(body)}\n`));
|
|
186
410
|
writer.releaseLock();
|
|
187
|
-
logger.debug("POST /rpc - Notification sent", { method });
|
|
188
411
|
return c.json({
|
|
189
412
|
success: true,
|
|
190
413
|
message: "Notification sent to agent",
|
package/dist/index.js
CHANGED
|
@@ -1,15 +1,9 @@
|
|
|
1
|
+
import { readFileSync } from "node:fs";
|
|
2
|
+
import { join } from "node:path";
|
|
1
3
|
import { makeHttpTransport, makeStdioTransport } from "./acp-server";
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
tools: [
|
|
6
|
-
"todo_write",
|
|
7
|
-
"get_weather",
|
|
8
|
-
"web_search",
|
|
9
|
-
{ type: "filesystem", working_directory: "/Users/michael/code/town" },
|
|
10
|
-
],
|
|
11
|
-
mcps: [],
|
|
12
|
-
};
|
|
4
|
+
// Load agent definition from shared JSON file at repo root
|
|
5
|
+
const configPath = join(import.meta.dir, "../../agent.json");
|
|
6
|
+
const exampleAgent = JSON.parse(readFileSync(configPath, "utf-8"));
|
|
13
7
|
// Parse transport type from command line argument
|
|
14
8
|
const transport = process.argv[2] || "stdio";
|
|
15
9
|
if (transport === "http") {
|
|
@@ -23,9 +23,23 @@ export declare const zAgentRunnerParams: z.ZodObject<{
|
|
|
23
23
|
}, z.core.$strip>]>>>;
|
|
24
24
|
}, z.core.$strip>;
|
|
25
25
|
export type CreateAgentRunnerParams = z.infer<typeof zAgentRunnerParams>;
|
|
26
|
-
export type InvokeRequest = Omit<PromptRequest, "_meta"
|
|
26
|
+
export type InvokeRequest = Omit<PromptRequest, "_meta"> & {
|
|
27
|
+
messageId: string;
|
|
28
|
+
};
|
|
29
|
+
export type ExtendedSessionUpdate = SessionNotification["update"] | {
|
|
30
|
+
sessionUpdate: "tool_output";
|
|
31
|
+
toolCallId: string;
|
|
32
|
+
content?: Array<{
|
|
33
|
+
type: string;
|
|
34
|
+
[key: string]: unknown;
|
|
35
|
+
}>;
|
|
36
|
+
rawOutput?: Record<string, unknown>;
|
|
37
|
+
_meta?: {
|
|
38
|
+
messageId?: string;
|
|
39
|
+
};
|
|
40
|
+
};
|
|
27
41
|
/** Describes an object that can run an agent definition */
|
|
28
42
|
export interface AgentRunner {
|
|
29
43
|
definition: CreateAgentRunnerParams;
|
|
30
|
-
invoke(req: InvokeRequest): AsyncGenerator<
|
|
44
|
+
invoke(req: InvokeRequest): AsyncGenerator<ExtendedSessionUpdate, PromptResponse, undefined>;
|
|
31
45
|
}
|
|
@@ -2,8 +2,8 @@ import { z } from "zod";
|
|
|
2
2
|
import { McpConfigSchema } from "../definition";
|
|
3
3
|
import { zToolType } from "./tools";
|
|
4
4
|
export const zAgentRunnerParams = z.object({
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
5
|
+
systemPrompt: z.string().nullable(),
|
|
6
|
+
model: z.string(),
|
|
7
|
+
tools: z.array(zToolType).optional(),
|
|
8
|
+
mcps: z.array(McpConfigSchema).optional(),
|
|
9
9
|
});
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import type { PromptResponse
|
|
1
|
+
import type { PromptResponse } from "@agentclientprotocol/sdk";
|
|
2
2
|
import { type DynamicStructuredTool, type Tool } from "langchain";
|
|
3
|
-
import type { AgentRunner, CreateAgentRunnerParams, InvokeRequest } from "../agent-runner";
|
|
4
|
-
import type { BuiltInToolType } from "../tools";
|
|
3
|
+
import type { AgentRunner, CreateAgentRunnerParams, ExtendedSessionUpdate, InvokeRequest } from "../agent-runner";
|
|
4
|
+
import type { BuiltInToolType } from "../tools.js";
|
|
5
5
|
type LangchainTool = DynamicStructuredTool | Tool;
|
|
6
6
|
/** Lazily-loaded langchain tools */
|
|
7
7
|
type LazyLangchainTool = MakeLazy<LangchainTool>;
|
|
@@ -11,6 +11,6 @@ export declare const TOOL_REGISTRY: Record<BuiltInToolType, LangchainTool | Lazy
|
|
|
11
11
|
export declare class LangchainAgent implements AgentRunner {
|
|
12
12
|
definition: CreateAgentRunnerParams;
|
|
13
13
|
constructor(params: CreateAgentRunnerParams);
|
|
14
|
-
invoke(req: InvokeRequest): AsyncGenerator<
|
|
14
|
+
invoke(req: InvokeRequest): AsyncGenerator<ExtendedSessionUpdate, PromptResponse, undefined>;
|
|
15
15
|
}
|
|
16
16
|
export {};
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import { MultiServerMCPClient } from "@langchain/mcp-adapters";
|
|
2
2
|
import { AIMessageChunk, createAgent, ToolMessage, tool, } from "langchain";
|
|
3
3
|
import { z } from "zod";
|
|
4
|
-
import {
|
|
5
|
-
import {
|
|
4
|
+
import { createLogger } from "../../utils/logger.js";
|
|
5
|
+
import { loadCustomToolModule, } from "../tool-loader.js";
|
|
6
|
+
import { makeFilesystemTools } from "./tools/filesystem.js";
|
|
6
7
|
import { todoItemSchema, todoWrite } from "./tools/todo";
|
|
7
8
|
import { makeWebSearchTool } from "./tools/web_search";
|
|
9
|
+
const logger = createLogger("agent-runner");
|
|
8
10
|
const getWeather = tool(({ city }) => `It's always sunny in ${city}!`, {
|
|
9
11
|
name: "get_weather",
|
|
10
12
|
description: "Get the weather for a given city",
|
|
@@ -109,14 +111,13 @@ export class LangchainAgent {
|
|
|
109
111
|
agentConfig.systemPrompt = this.definition.systemPrompt;
|
|
110
112
|
}
|
|
111
113
|
const agent = createAgent(agentConfig);
|
|
112
|
-
const
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
}, {
|
|
114
|
+
const messages = req.prompt
|
|
115
|
+
.filter((promptMsg) => promptMsg.type === "text")
|
|
116
|
+
.map((promptMsg) => ({
|
|
117
|
+
type: "human",
|
|
118
|
+
content: promptMsg.text,
|
|
119
|
+
}));
|
|
120
|
+
const stream = agent.stream({ messages }, {
|
|
120
121
|
streamMode: ["updates", "messages"],
|
|
121
122
|
});
|
|
122
123
|
for await (const [streamMode, chunk] of await stream) {
|
|
@@ -133,6 +134,15 @@ export class LangchainAgent {
|
|
|
133
134
|
throw new Error(`Unhandled updates message chunk types: ${JSON.stringify(updatesMessages)}`);
|
|
134
135
|
}
|
|
135
136
|
for (const msg of updatesMessages) {
|
|
137
|
+
// Extract token usage metadata if available
|
|
138
|
+
const tokenUsage = msg.usage_metadata
|
|
139
|
+
? {
|
|
140
|
+
inputTokens: msg.usage_metadata.input_tokens,
|
|
141
|
+
outputTokens: msg.usage_metadata.output_tokens,
|
|
142
|
+
totalTokens: msg.usage_metadata.total_tokens,
|
|
143
|
+
}
|
|
144
|
+
: undefined;
|
|
145
|
+
logger.debug("Token usage:", tokenUsage);
|
|
136
146
|
for (const toolCall of msg.tool_calls ?? []) {
|
|
137
147
|
if (toolCall.id == null) {
|
|
138
148
|
throw new Error(`Tool call is missing id: ${JSON.stringify(toolCall)}`);
|
|
@@ -168,11 +178,15 @@ export class LangchainAgent {
|
|
|
168
178
|
kind: "other",
|
|
169
179
|
status: "pending",
|
|
170
180
|
rawInput: toolCall.args,
|
|
181
|
+
...(tokenUsage ? { tokenUsage } : {}),
|
|
182
|
+
_meta: { messageId: req.messageId },
|
|
171
183
|
};
|
|
172
184
|
yield {
|
|
173
185
|
sessionUpdate: "tool_call_update",
|
|
174
186
|
toolCallId: toolCall.id,
|
|
175
187
|
status: "in_progress",
|
|
188
|
+
...(tokenUsage ? { tokenUsage } : {}),
|
|
189
|
+
_meta: { messageId: req.messageId },
|
|
176
190
|
};
|
|
177
191
|
}
|
|
178
192
|
}
|
|
@@ -221,10 +235,17 @@ export class LangchainAgent {
|
|
|
221
235
|
// Skip tool_call_update for todo_write tools
|
|
222
236
|
continue;
|
|
223
237
|
}
|
|
238
|
+
// Send status update (metadata only, no content)
|
|
224
239
|
yield {
|
|
225
240
|
sessionUpdate: "tool_call_update",
|
|
226
241
|
toolCallId: aiMessage.tool_call_id,
|
|
227
242
|
status: "completed",
|
|
243
|
+
_meta: { messageId: req.messageId },
|
|
244
|
+
};
|
|
245
|
+
// Send tool output separately (via direct SSE, bypassing PostgreSQL NOTIFY)
|
|
246
|
+
yield {
|
|
247
|
+
sessionUpdate: "tool_output",
|
|
248
|
+
toolCallId: aiMessage.tool_call_id,
|
|
228
249
|
content: [
|
|
229
250
|
{
|
|
230
251
|
type: "content",
|
|
@@ -235,6 +256,7 @@ export class LangchainAgent {
|
|
|
235
256
|
},
|
|
236
257
|
],
|
|
237
258
|
rawOutput: { content: aiMessage.content },
|
|
259
|
+
_meta: { messageId: req.messageId },
|
|
238
260
|
};
|
|
239
261
|
}
|
|
240
262
|
else {
|