@mariozechner/pi-agent-core 0.30.2 → 0.31.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +297 -126
  2. package/dist/agent-loop.d.ts +21 -0
  3. package/dist/agent-loop.d.ts.map +1 -0
  4. package/dist/agent-loop.js +294 -0
  5. package/dist/agent-loop.js.map +1 -0
  6. package/dist/agent.d.ts +43 -29
  7. package/dist/agent.d.ts.map +1 -1
  8. package/dist/agent.js +83 -148
  9. package/dist/agent.js.map +1 -1
  10. package/dist/index.d.ts +4 -3
  11. package/dist/index.d.ts.map +1 -1
  12. package/dist/index.js +7 -3
  13. package/dist/index.js.map +1 -1
  14. package/dist/proxy.d.ts +85 -0
  15. package/dist/proxy.d.ts.map +1 -0
  16. package/dist/proxy.js +269 -0
  17. package/dist/proxy.js.map +1 -0
  18. package/dist/types.d.ts +88 -29
  19. package/dist/types.d.ts.map +1 -1
  20. package/dist/types.js.map +1 -1
  21. package/package.json +3 -3
  22. package/dist/transports/AppTransport.d.ts +0 -28
  23. package/dist/transports/AppTransport.d.ts.map +0 -1
  24. package/dist/transports/AppTransport.js +0 -330
  25. package/dist/transports/AppTransport.js.map +0 -1
  26. package/dist/transports/ProviderTransport.d.ts +0 -29
  27. package/dist/transports/ProviderTransport.d.ts.map +0 -1
  28. package/dist/transports/ProviderTransport.js +0 -54
  29. package/dist/transports/ProviderTransport.js.map +0 -1
  30. package/dist/transports/index.d.ts +0 -5
  31. package/dist/transports/index.d.ts.map +0 -1
  32. package/dist/transports/index.js +0 -3
  33. package/dist/transports/index.js.map +0 -1
  34. package/dist/transports/proxy-types.d.ts +0 -53
  35. package/dist/transports/proxy-types.d.ts.map +0 -1
  36. package/dist/transports/proxy-types.js +0 -2
  37. package/dist/transports/proxy-types.js.map +0 -1
  38. package/dist/transports/types.d.ts +0 -25
  39. package/dist/transports/types.d.ts.map +0 -1
  40. package/dist/transports/types.js +0 -2
  41. package/dist/transports/types.js.map +0 -1
@@ -0,0 +1,294 @@
1
+ /**
2
+ * Agent loop that works with AgentMessage throughout.
3
+ * Transforms to Message[] only at the LLM call boundary.
4
+ */
5
+ import { EventStream, streamSimple, validateToolArguments, } from "@mariozechner/pi-ai";
6
+ /**
7
+ * Start an agent loop with a new prompt message.
8
+ * The prompt is added to the context and events are emitted for it.
9
+ */
10
+ export function agentLoop(prompts, context, config, signal, streamFn) {
11
+ const stream = createAgentStream();
12
+ (async () => {
13
+ const newMessages = [...prompts];
14
+ const currentContext = {
15
+ ...context,
16
+ messages: [...context.messages, ...prompts],
17
+ };
18
+ stream.push({ type: "agent_start" });
19
+ stream.push({ type: "turn_start" });
20
+ for (const prompt of prompts) {
21
+ stream.push({ type: "message_start", message: prompt });
22
+ stream.push({ type: "message_end", message: prompt });
23
+ }
24
+ await runLoop(currentContext, newMessages, config, signal, stream, streamFn);
25
+ })();
26
+ return stream;
27
+ }
28
+ /**
29
+ * Continue an agent loop from the current context without adding a new message.
30
+ * Used for retries - context already has user message or tool results.
31
+ *
32
+ * **Important:** The last message in context must convert to a `user` or `toolResult` message
33
+ * via `convertToLlm`. If it doesn't, the LLM provider will reject the request.
34
+ * This cannot be validated here since `convertToLlm` is only called once per turn.
35
+ */
36
+ export function agentLoopContinue(context, config, signal, streamFn) {
37
+ if (context.messages.length === 0) {
38
+ throw new Error("Cannot continue: no messages in context");
39
+ }
40
+ if (context.messages[context.messages.length - 1].role === "assistant") {
41
+ throw new Error("Cannot continue from message role: assistant");
42
+ }
43
+ const stream = createAgentStream();
44
+ (async () => {
45
+ const newMessages = [];
46
+ const currentContext = { ...context };
47
+ stream.push({ type: "agent_start" });
48
+ stream.push({ type: "turn_start" });
49
+ await runLoop(currentContext, newMessages, config, signal, stream, streamFn);
50
+ })();
51
+ return stream;
52
+ }
53
+ function createAgentStream() {
54
+ return new EventStream((event) => event.type === "agent_end", (event) => (event.type === "agent_end" ? event.messages : []));
55
+ }
56
+ /**
57
+ * Main loop logic shared by agentLoop and agentLoopContinue.
58
+ */
59
+ async function runLoop(currentContext, newMessages, config, signal, stream, streamFn) {
60
+ let hasMoreToolCalls = true;
61
+ let firstTurn = true;
62
+ let queuedMessages = (await config.getQueuedMessages?.()) || [];
63
+ let queuedAfterTools = null;
64
+ while (hasMoreToolCalls || queuedMessages.length > 0) {
65
+ if (!firstTurn) {
66
+ stream.push({ type: "turn_start" });
67
+ }
68
+ else {
69
+ firstTurn = false;
70
+ }
71
+ // Process queued messages (inject before next assistant response)
72
+ if (queuedMessages.length > 0) {
73
+ for (const message of queuedMessages) {
74
+ stream.push({ type: "message_start", message });
75
+ stream.push({ type: "message_end", message });
76
+ currentContext.messages.push(message);
77
+ newMessages.push(message);
78
+ }
79
+ queuedMessages = [];
80
+ }
81
+ // Stream assistant response
82
+ const message = await streamAssistantResponse(currentContext, config, signal, stream, streamFn);
83
+ newMessages.push(message);
84
+ if (message.stopReason === "error" || message.stopReason === "aborted") {
85
+ stream.push({ type: "turn_end", message, toolResults: [] });
86
+ stream.push({ type: "agent_end", messages: newMessages });
87
+ stream.end(newMessages);
88
+ return;
89
+ }
90
+ // Check for tool calls
91
+ const toolCalls = message.content.filter((c) => c.type === "toolCall");
92
+ hasMoreToolCalls = toolCalls.length > 0;
93
+ const toolResults = [];
94
+ if (hasMoreToolCalls) {
95
+ const toolExecution = await executeToolCalls(currentContext.tools, message, signal, stream, config.getQueuedMessages);
96
+ toolResults.push(...toolExecution.toolResults);
97
+ queuedAfterTools = toolExecution.queuedMessages ?? null;
98
+ for (const result of toolResults) {
99
+ currentContext.messages.push(result);
100
+ newMessages.push(result);
101
+ }
102
+ }
103
+ stream.push({ type: "turn_end", message, toolResults });
104
+ // Get queued messages after turn completes
105
+ if (queuedAfterTools && queuedAfterTools.length > 0) {
106
+ queuedMessages = queuedAfterTools;
107
+ queuedAfterTools = null;
108
+ }
109
+ else {
110
+ queuedMessages = (await config.getQueuedMessages?.()) || [];
111
+ }
112
+ }
113
+ stream.push({ type: "agent_end", messages: newMessages });
114
+ stream.end(newMessages);
115
+ }
116
+ /**
117
+ * Stream an assistant response from the LLM.
118
+ * This is where AgentMessage[] gets transformed to Message[] for the LLM.
119
+ */
120
+ async function streamAssistantResponse(context, config, signal, stream, streamFn) {
121
+ // Apply context transform if configured (AgentMessage[] → AgentMessage[])
122
+ let messages = context.messages;
123
+ if (config.transformContext) {
124
+ messages = await config.transformContext(messages, signal);
125
+ }
126
+ // Convert to LLM-compatible messages (AgentMessage[] → Message[])
127
+ const llmMessages = await config.convertToLlm(messages);
128
+ // Build LLM context
129
+ const llmContext = {
130
+ systemPrompt: context.systemPrompt,
131
+ messages: llmMessages,
132
+ tools: context.tools,
133
+ };
134
+ const streamFunction = streamFn || streamSimple;
135
+ // Resolve API key (important for expiring tokens)
136
+ const resolvedApiKey = (config.getApiKey ? await config.getApiKey(config.model.provider) : undefined) || config.apiKey;
137
+ const response = await streamFunction(config.model, llmContext, {
138
+ ...config,
139
+ apiKey: resolvedApiKey,
140
+ signal,
141
+ });
142
+ let partialMessage = null;
143
+ let addedPartial = false;
144
+ for await (const event of response) {
145
+ switch (event.type) {
146
+ case "start":
147
+ partialMessage = event.partial;
148
+ context.messages.push(partialMessage);
149
+ addedPartial = true;
150
+ stream.push({ type: "message_start", message: { ...partialMessage } });
151
+ break;
152
+ case "text_start":
153
+ case "text_delta":
154
+ case "text_end":
155
+ case "thinking_start":
156
+ case "thinking_delta":
157
+ case "thinking_end":
158
+ case "toolcall_start":
159
+ case "toolcall_delta":
160
+ case "toolcall_end":
161
+ if (partialMessage) {
162
+ partialMessage = event.partial;
163
+ context.messages[context.messages.length - 1] = partialMessage;
164
+ stream.push({
165
+ type: "message_update",
166
+ assistantMessageEvent: event,
167
+ message: { ...partialMessage },
168
+ });
169
+ }
170
+ break;
171
+ case "done":
172
+ case "error": {
173
+ const finalMessage = await response.result();
174
+ if (addedPartial) {
175
+ context.messages[context.messages.length - 1] = finalMessage;
176
+ }
177
+ else {
178
+ context.messages.push(finalMessage);
179
+ }
180
+ if (!addedPartial) {
181
+ stream.push({ type: "message_start", message: { ...finalMessage } });
182
+ }
183
+ stream.push({ type: "message_end", message: finalMessage });
184
+ return finalMessage;
185
+ }
186
+ }
187
+ }
188
+ return await response.result();
189
+ }
190
+ /**
191
+ * Execute tool calls from an assistant message.
192
+ */
193
+ async function executeToolCalls(tools, assistantMessage, signal, stream, getQueuedMessages) {
194
+ const toolCalls = assistantMessage.content.filter((c) => c.type === "toolCall");
195
+ const results = [];
196
+ let queuedMessages;
197
+ for (let index = 0; index < toolCalls.length; index++) {
198
+ const toolCall = toolCalls[index];
199
+ const tool = tools?.find((t) => t.name === toolCall.name);
200
+ stream.push({
201
+ type: "tool_execution_start",
202
+ toolCallId: toolCall.id,
203
+ toolName: toolCall.name,
204
+ args: toolCall.arguments,
205
+ });
206
+ let result;
207
+ let isError = false;
208
+ try {
209
+ if (!tool)
210
+ throw new Error(`Tool ${toolCall.name} not found`);
211
+ const validatedArgs = validateToolArguments(tool, toolCall);
212
+ result = await tool.execute(toolCall.id, validatedArgs, signal, (partialResult) => {
213
+ stream.push({
214
+ type: "tool_execution_update",
215
+ toolCallId: toolCall.id,
216
+ toolName: toolCall.name,
217
+ args: toolCall.arguments,
218
+ partialResult,
219
+ });
220
+ });
221
+ }
222
+ catch (e) {
223
+ result = {
224
+ content: [{ type: "text", text: e instanceof Error ? e.message : String(e) }],
225
+ details: {},
226
+ };
227
+ isError = true;
228
+ }
229
+ stream.push({
230
+ type: "tool_execution_end",
231
+ toolCallId: toolCall.id,
232
+ toolName: toolCall.name,
233
+ result,
234
+ isError,
235
+ });
236
+ const toolResultMessage = {
237
+ role: "toolResult",
238
+ toolCallId: toolCall.id,
239
+ toolName: toolCall.name,
240
+ content: result.content,
241
+ details: result.details,
242
+ isError,
243
+ timestamp: Date.now(),
244
+ };
245
+ results.push(toolResultMessage);
246
+ stream.push({ type: "message_start", message: toolResultMessage });
247
+ stream.push({ type: "message_end", message: toolResultMessage });
248
+ // Check for queued messages - skip remaining tools if user interrupted
249
+ if (getQueuedMessages) {
250
+ const queued = await getQueuedMessages();
251
+ if (queued.length > 0) {
252
+ queuedMessages = queued;
253
+ const remainingCalls = toolCalls.slice(index + 1);
254
+ for (const skipped of remainingCalls) {
255
+ results.push(skipToolCall(skipped, stream));
256
+ }
257
+ break;
258
+ }
259
+ }
260
+ }
261
+ return { toolResults: results, queuedMessages };
262
+ }
263
+ function skipToolCall(toolCall, stream) {
264
+ const result = {
265
+ content: [{ type: "text", text: "Skipped due to queued user message." }],
266
+ details: {},
267
+ };
268
+ stream.push({
269
+ type: "tool_execution_start",
270
+ toolCallId: toolCall.id,
271
+ toolName: toolCall.name,
272
+ args: toolCall.arguments,
273
+ });
274
+ stream.push({
275
+ type: "tool_execution_end",
276
+ toolCallId: toolCall.id,
277
+ toolName: toolCall.name,
278
+ result,
279
+ isError: true,
280
+ });
281
+ const toolResultMessage = {
282
+ role: "toolResult",
283
+ toolCallId: toolCall.id,
284
+ toolName: toolCall.name,
285
+ content: result.content,
286
+ details: {},
287
+ isError: true,
288
+ timestamp: Date.now(),
289
+ };
290
+ stream.push({ type: "message_start", message: toolResultMessage });
291
+ stream.push({ type: "message_end", message: toolResultMessage });
292
+ return toolResultMessage;
293
+ }
294
+ //# sourceMappingURL=agent-loop.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"agent-loop.js","sourceRoot":"","sources":["../src/agent-loop.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAGN,WAAW,EACX,YAAY,EAEZ,qBAAqB,GACrB,MAAM,qBAAqB,CAAC;AAW7B;;;GAGG;AACH,MAAM,UAAU,SAAS,CACxB,OAAuB,EACvB,OAAqB,EACrB,MAAuB,EACvB,MAAoB,EACpB,QAAmB,EACuB;IAC1C,MAAM,MAAM,GAAG,iBAAiB,EAAE,CAAC;IAEnC,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,WAAW,GAAmB,CAAC,GAAG,OAAO,CAAC,CAAC;QACjD,MAAM,cAAc,GAAiB;YACpC,GAAG,OAAO;YACV,QAAQ,EAAE,CAAC,GAAG,OAAO,CAAC,QAAQ,EAAE,GAAG,OAAO,CAAC;SAC3C,CAAC;QAEF,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,aAAa,EAAE,CAAC,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,CAAC,CAAC;QACpC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;YAC9B,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;YACxD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,aAAa,EAAE,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;QACvD,CAAC;QAED,MAAM,OAAO,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;IAAA,CAC7E,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,iBAAiB,CAChC,OAAqB,EACrB,MAAuB,EACvB,MAAoB,EACpB,QAAmB,EACuB;IAC1C,IAAI,OAAO,CAAC,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACnC,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;IAC5D,CAAC;IAED,IAAI,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;QACxE,MAAM,IAAI,KAAK,CAAC,8CAA8C,CAAC,CAAC;IACjE,CAAC;IAED,MAAM,MAAM,GAAG,iBAAiB,EAAE,CAAC;IAEnC,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,MAAM,WAAW,GAAmB,EAAE,CAAC;QACvC,MAAM,cAAc,GAAiB,EAAE,GAAG,OAAO,EAAE,CAAC;QAEpD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,aAAa,EAAE,CAAC,CAAC;QACrC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,CAAC,CAAC;QAEpC,MAAM,OAAO,CAAC,cAAc,EAAE,WAAW,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;IAAA,CAC7E,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd;AAED,SAAS,iBAAiB,GAA4C;IACrE,OAAO,IAAI,WAAW,CACrB,CAAC,KAAiB,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,KAAK,WAAW,EACjD,CAAC,KAAiB,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,IAAI,KAAK,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE,CAAC,CACzE,CAAC;AAAA,CACF;AAED;;GAEG;AACH,KAAK,UAAU,OAAO,CACrB,cAA4B,EAC5B,WAA2B,EAC3B,MAAuB,EACvB,MAA+B,EAC/B,MAA+C,EAC/C,QAAmB,EACH;IAChB,IAAI,gBAAgB,GAAG,IAAI,CAAC;IAC5B,IAAI,SAAS,GAAG,IAAI,CAAC;IACrB,IAAI,cAAc,GAAmB,CAAC,MAAM,MAAM,CAAC,iBAAiB,EAAE,EAAE,CAAC,IAAI,EAAE,CAAC;IAChF,IAAI,gBAAgB,GAA0B,IAAI,CAAC;IAEnD,OAAO,gBAAgB,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACtD,IAAI,CAAC,SAAS,EAAE,CAAC;YAChB,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,YAAY,EAAE,CAAC,CAAC;QACrC,CAAC;aAAM,CAAC;YACP,SAAS,GAAG,KAAK,CAAC;QACnB,CAAC;QAED,kEAAkE;QAClE,IAAI,cAAc,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC/B,KAAK,MAAM,OAAO,IAAI,cAAc,EAAE,CAAC;gBACtC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,OAAO,EAAE,CAAC,CAAC;gBAChD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,aAAa,EAAE,OAAO,EAAE,CAAC,CAAC;gBAC9C,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gBACtC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;YAC3B,CAAC;YACD,cAAc,GAAG,EAAE,CAAC;QACrB,CAAC;QAED,4BAA4B;QAC5B,MAAM,OAAO,GAAG,MAAM,uBAAuB,CAAC,cAAc,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;QAChG,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAE1B,IAAI,OAAO,CAAC,UAAU,KAAK,OAAO,IAAI,OAAO,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;YACxE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,OAAO,EAAE,WAAW,EAAE,EAAE,EAAE,CAAC,CAAC;YAC5D,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC;YAC1D,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;YACxB,OAAO;QACR,CAAC;QAED,uBAAuB;QACvB,MAAM,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,CAAC;QACvE,gBAAgB,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,CAAC;QAExC,MAAM,WAAW,GAAwB,EAAE,CAAC;QAC5C,IAAI,gBAAgB,EAAE,CAAC;YACtB,MAAM,aAAa,GAAG,MAAM,gBAAgB,CAC3C,cAAc,CAAC,KAAK,EACpB,OAAO,EACP,MAAM,EACN,MAAM,EACN,MAAM,CAAC,iBAAiB,CACxB,CAAC;YACF,WAAW,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;YAC/C,gBAAgB,GAAG,aAAa,CAAC,cAAc,IAAI,IAAI,CAAC;YAExD,KAAK,MAAM,MAAM,IAAI,WAAW,EAAE,CAAC;gBAClC,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;gBACrC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC1B,CAAC;QACF,CAAC;QAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,UAAU,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,CAAC;QAExD,2CAA2C;QAC3C,IAAI,gBAAgB,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACrD,cAAc,GAAG,gBAAgB,CAAC;YAClC,gBAAgB,GAAG,IAAI,CAAC;QACzB,CAAC;aAAM,CAAC;YACP,cAAc,GAAG,CAAC,MAAM,MAAM,CAAC,iBAAiB,EAAE,EAAE,CAAC,IAAI,EAAE,CAAC;QAC7D,CAAC;IACF,CAAC;IAED,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC;IAC1D,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AAAA,CACxB;AAED;;;GAGG;AACH,KAAK,UAAU,uBAAuB,CACrC,OAAqB,EACrB,MAAuB,EACvB,MAA+B,EAC/B,MAA+C,EAC/C,QAAmB,EACS;IAC5B,4EAA0E;IAC1E,IAAI,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;IAChC,IAAI,MAAM,CAAC,gBAAgB,EAAE,CAAC;QAC7B,QAAQ,GAAG,MAAM,MAAM,CAAC,gBAAgB,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;IAC5D,CAAC;IAED,oEAAkE;IAClE,MAAM,WAAW,GAAG,MAAM,MAAM,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;IAExD,oBAAoB;IACpB,MAAM,UAAU,GAAY;QAC3B,YAAY,EAAE,OAAO,CAAC,YAAY;QAClC,QAAQ,EAAE,WAAW;QACrB,KAAK,EAAE,OAAO,CAAC,KAAK;KACpB,CAAC;IAEF,MAAM,cAAc,GAAG,QAAQ,IAAI,YAAY,CAAC;IAEhD,kDAAkD;IAClD,MAAM,cAAc,GACnB,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,MAAM,CAAC,SAAS,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC;IAEjG,MAAM,QAAQ,GAAG,MAAM,cAAc,CAAC,MAAM,CAAC,KAAK,EAAE,UAAU,EAAE;QAC/D,GAAG,MAAM;QACT,MAAM,EAAE,cAAc;QACtB,MAAM;KACN,CAAC,CAAC;IAEH,IAAI,cAAc,GAA4B,IAAI,CAAC;IACnD,IAAI,YAAY,GAAG,KAAK,CAAC;IAEzB,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,QAAQ,EAAE,CAAC;QACpC,QAAQ,KAAK,CAAC,IAAI,EAAE,CAAC;YACpB,KAAK,OAAO;gBACX,cAAc,GAAG,KAAK,CAAC,OAAO,CAAC;gBAC/B,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;gBACtC,YAAY,GAAG,IAAI,CAAC;gBACpB,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,OAAO,EAAE,EAAE,GAAG,cAAc,EAAE,EAAE,CAAC,CAAC;gBACvE,MAAM;YAEP,KAAK,YAAY,CAAC;YAClB,KAAK,YAAY,CAAC;YAClB,KAAK,UAAU,CAAC;YAChB,KAAK,gBAAgB,CAAC;YACtB,KAAK,gBAAgB,CAAC;YACtB,KAAK,cAAc,CAAC;YACpB,KAAK,gBAAgB,CAAC;YACtB,KAAK,gBAAgB,CAAC;YACtB,KAAK,cAAc;gBAClB,IAAI,cAAc,EAAE,CAAC;oBACpB,cAAc,GAAG,KAAK,CAAC,OAAO,CAAC;oBAC/B,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,cAAc,CAAC;oBAC/D,MAAM,CAAC,IAAI,CAAC;wBACX,IAAI,EAAE,gBAAgB;wBACtB,qBAAqB,EAAE,KAAK;wBAC5B,OAAO,EAAE,EAAE,GAAG,cAAc,EAAE;qBAC9B,CAAC,CAAC;gBACJ,CAAC;gBACD,MAAM;YAEP,KAAK,MAAM,CAAC;YACZ,KAAK,OAAO,EAAE,CAAC;gBACd,MAAM,YAAY,GAAG,MAAM,QAAQ,CAAC,MAAM,EAAE,CAAC;gBAC7C,IAAI,YAAY,EAAE,CAAC;oBAClB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,YAAY,CAAC;gBAC9D,CAAC;qBAAM,CAAC;oBACP,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBACrC,CAAC;gBACD,IAAI,CAAC,YAAY,EAAE,CAAC;oBACnB,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,OAAO,EAAE,EAAE,GAAG,YAAY,EAAE,EAAE,CAAC,CAAC;gBACtE,CAAC;gBACD,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,aAAa,EAAE,OAAO,EAAE,YAAY,EAAE,CAAC,CAAC;gBAC5D,OAAO,YAAY,CAAC;YACrB,CAAC;QACF,CAAC;IACF,CAAC;IAED,OAAO,MAAM,QAAQ,CAAC,MAAM,EAAE,CAAC;AAAA,CAC/B;AAED;;GAEG;AACH,KAAK,UAAU,gBAAgB,CAC9B,KAAmC,EACnC,gBAAkC,EAClC,MAA+B,EAC/B,MAA+C,EAC/C,iBAAwD,EACyB;IACjF,MAAM,SAAS,GAAG,gBAAgB,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,UAAU,CAAC,CAAC;IAChF,MAAM,OAAO,GAAwB,EAAE,CAAC;IACxC,IAAI,cAA0C,CAAC;IAE/C,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,SAAS,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE,CAAC;QACvD,MAAM,QAAQ,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC;QAClC,MAAM,IAAI,GAAG,KAAK,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,IAAI,CAAC,CAAC;QAE1D,MAAM,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,sBAAsB;YAC5B,UAAU,EAAE,QAAQ,CAAC,EAAE;YACvB,QAAQ,EAAE,QAAQ,CAAC,IAAI;YACvB,IAAI,EAAE,QAAQ,CAAC,SAAS;SACxB,CAAC,CAAC;QAEH,IAAI,MAA4B,CAAC;QACjC,IAAI,OAAO,GAAG,KAAK,CAAC;QAEpB,IAAI,CAAC;YACJ,IAAI,CAAC,IAAI;gBAAE,MAAM,IAAI,KAAK,CAAC,QAAQ,QAAQ,CAAC,IAAI,YAAY,CAAC,CAAC;YAE9D,MAAM,aAAa,GAAG,qBAAqB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;YAE5D,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,aAAa,EAAE,MAAM,EAAE,CAAC,aAAa,EAAE,EAAE,CAAC;gBAClF,MAAM,CAAC,IAAI,CAAC;oBACX,IAAI,EAAE,uBAAuB;oBAC7B,UAAU,EAAE,QAAQ,CAAC,EAAE;oBACvB,QAAQ,EAAE,QAAQ,CAAC,IAAI;oBACvB,IAAI,EAAE,QAAQ,CAAC,SAAS;oBACxB,aAAa;iBACb,CAAC,CAAC;YAAA,CACH,CAAC,CAAC;QACJ,CAAC;QAAC,OAAO,CAAC,EAAE,CAAC;YACZ,MAAM,GAAG;gBACR,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC;gBAC7E,OAAO,EAAE,EAAE;aACX,CAAC;YACF,OAAO,GAAG,IAAI,CAAC;QAChB,CAAC;QAED,MAAM,CAAC,IAAI,CAAC;YACX,IAAI,EAAE,oBAAoB;YAC1B,UAAU,EAAE,QAAQ,CAAC,EAAE;YACvB,QAAQ,EAAE,QAAQ,CAAC,IAAI;YACvB,MAAM;YACN,OAAO;SACP,CAAC,CAAC;QAEH,MAAM,iBAAiB,GAAsB;YAC5C,IAAI,EAAE,YAAY;YAClB,UAAU,EAAE,QAAQ,CAAC,EAAE;YACvB,QAAQ,EAAE,QAAQ,CAAC,IAAI;YACvB,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,OAAO;YACP,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,OAAO,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAChC,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,OAAO,EAAE,iBAAiB,EAAE,CAAC,CAAC;QACnE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,aAAa,EAAE,OAAO,EAAE,iBAAiB,EAAE,CAAC,CAAC;QAEjE,uEAAuE;QACvE,IAAI,iBAAiB,EAAE,CAAC;YACvB,MAAM,MAAM,GAAG,MAAM,iBAAiB,EAAE,CAAC;YACzC,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACvB,cAAc,GAAG,MAAM,CAAC;gBACxB,MAAM,cAAc,GAAG,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;gBAClD,KAAK,MAAM,OAAO,IAAI,cAAc,EAAE,CAAC;oBACtC,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC,CAAC;gBAC7C,CAAC;gBACD,MAAM;YACP,CAAC;QACF,CAAC;IACF,CAAC;IAED,OAAO,EAAE,WAAW,EAAE,OAAO,EAAE,cAAc,EAAE,CAAC;AAAA,CAChD;AAED,SAAS,YAAY,CACpB,QAA4E,EAC5E,MAA+C,EAC3B;IACpB,MAAM,MAAM,GAAyB;QACpC,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,qCAAqC,EAAE,CAAC;QACxE,OAAO,EAAE,EAAE;KACX,CAAC;IAEF,MAAM,CAAC,IAAI,CAAC;QACX,IAAI,EAAE,sBAAsB;QAC5B,UAAU,EAAE,QAAQ,CAAC,EAAE;QACvB,QAAQ,EAAE,QAAQ,CAAC,IAAI;QACvB,IAAI,EAAE,QAAQ,CAAC,SAAS;KACxB,CAAC,CAAC;IACH,MAAM,CAAC,IAAI,CAAC;QACX,IAAI,EAAE,oBAAoB;QAC1B,UAAU,EAAE,QAAQ,CAAC,EAAE;QACvB,QAAQ,EAAE,QAAQ,CAAC,IAAI;QACvB,MAAM;QACN,OAAO,EAAE,IAAI;KACb,CAAC,CAAC;IAEH,MAAM,iBAAiB,GAAsB;QAC5C,IAAI,EAAE,YAAY;QAClB,UAAU,EAAE,QAAQ,CAAC,EAAE;QACvB,QAAQ,EAAE,QAAQ,CAAC,IAAI;QACvB,OAAO,EAAE,MAAM,CAAC,OAAO;QACvB,OAAO,EAAE,EAAE;QACX,OAAO,EAAE,IAAI;QACb,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;KACrB,CAAC;IAEF,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,eAAe,EAAE,OAAO,EAAE,iBAAiB,EAAE,CAAC,CAAC;IACnE,MAAM,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,aAAa,EAAE,OAAO,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAEjE,OAAO,iBAAiB,CAAC;AAAA,CACzB","sourcesContent":["/**\n * Agent loop that works with AgentMessage throughout.\n * Transforms to Message[] only at the LLM call boundary.\n */\n\nimport {\n\ttype AssistantMessage,\n\ttype Context,\n\tEventStream,\n\tstreamSimple,\n\ttype ToolResultMessage,\n\tvalidateToolArguments,\n} from \"@mariozechner/pi-ai\";\nimport type {\n\tAgentContext,\n\tAgentEvent,\n\tAgentLoopConfig,\n\tAgentMessage,\n\tAgentTool,\n\tAgentToolResult,\n\tStreamFn,\n} from \"./types.js\";\n\n/**\n * Start an agent loop with a new prompt message.\n * The prompt is added to the context and events are emitted for it.\n */\nexport function agentLoop(\n\tprompts: AgentMessage[],\n\tcontext: AgentContext,\n\tconfig: AgentLoopConfig,\n\tsignal?: AbortSignal,\n\tstreamFn?: StreamFn,\n): EventStream<AgentEvent, AgentMessage[]> {\n\tconst stream = createAgentStream();\n\n\t(async () => {\n\t\tconst newMessages: AgentMessage[] = [...prompts];\n\t\tconst currentContext: AgentContext = {\n\t\t\t...context,\n\t\t\tmessages: [...context.messages, ...prompts],\n\t\t};\n\n\t\tstream.push({ type: \"agent_start\" });\n\t\tstream.push({ type: \"turn_start\" });\n\t\tfor (const prompt of prompts) {\n\t\t\tstream.push({ type: \"message_start\", message: prompt });\n\t\t\tstream.push({ type: \"message_end\", message: prompt });\n\t\t}\n\n\t\tawait runLoop(currentContext, newMessages, config, signal, stream, streamFn);\n\t})();\n\n\treturn stream;\n}\n\n/**\n * Continue an agent loop from the current context without adding a new message.\n * Used for retries - context already has user message or tool results.\n *\n * **Important:** The last message in context must convert to a `user` or `toolResult` message\n * via `convertToLlm`. If it doesn't, the LLM provider will reject the request.\n * This cannot be validated here since `convertToLlm` is only called once per turn.\n */\nexport function agentLoopContinue(\n\tcontext: AgentContext,\n\tconfig: AgentLoopConfig,\n\tsignal?: AbortSignal,\n\tstreamFn?: StreamFn,\n): EventStream<AgentEvent, AgentMessage[]> {\n\tif (context.messages.length === 0) {\n\t\tthrow new Error(\"Cannot continue: no messages in context\");\n\t}\n\n\tif (context.messages[context.messages.length - 1].role === \"assistant\") {\n\t\tthrow new Error(\"Cannot continue from message role: assistant\");\n\t}\n\n\tconst stream = createAgentStream();\n\n\t(async () => {\n\t\tconst newMessages: AgentMessage[] = [];\n\t\tconst currentContext: AgentContext = { ...context };\n\n\t\tstream.push({ type: \"agent_start\" });\n\t\tstream.push({ type: \"turn_start\" });\n\n\t\tawait runLoop(currentContext, newMessages, config, signal, stream, streamFn);\n\t})();\n\n\treturn stream;\n}\n\nfunction createAgentStream(): EventStream<AgentEvent, AgentMessage[]> {\n\treturn new EventStream<AgentEvent, AgentMessage[]>(\n\t\t(event: AgentEvent) => event.type === \"agent_end\",\n\t\t(event: AgentEvent) => (event.type === \"agent_end\" ? event.messages : []),\n\t);\n}\n\n/**\n * Main loop logic shared by agentLoop and agentLoopContinue.\n */\nasync function runLoop(\n\tcurrentContext: AgentContext,\n\tnewMessages: AgentMessage[],\n\tconfig: AgentLoopConfig,\n\tsignal: AbortSignal | undefined,\n\tstream: EventStream<AgentEvent, AgentMessage[]>,\n\tstreamFn?: StreamFn,\n): Promise<void> {\n\tlet hasMoreToolCalls = true;\n\tlet firstTurn = true;\n\tlet queuedMessages: AgentMessage[] = (await config.getQueuedMessages?.()) || [];\n\tlet queuedAfterTools: AgentMessage[] | null = null;\n\n\twhile (hasMoreToolCalls || queuedMessages.length > 0) {\n\t\tif (!firstTurn) {\n\t\t\tstream.push({ type: \"turn_start\" });\n\t\t} else {\n\t\t\tfirstTurn = false;\n\t\t}\n\n\t\t// Process queued messages (inject before next assistant response)\n\t\tif (queuedMessages.length > 0) {\n\t\t\tfor (const message of queuedMessages) {\n\t\t\t\tstream.push({ type: \"message_start\", message });\n\t\t\t\tstream.push({ type: \"message_end\", message });\n\t\t\t\tcurrentContext.messages.push(message);\n\t\t\t\tnewMessages.push(message);\n\t\t\t}\n\t\t\tqueuedMessages = [];\n\t\t}\n\n\t\t// Stream assistant response\n\t\tconst message = await streamAssistantResponse(currentContext, config, signal, stream, streamFn);\n\t\tnewMessages.push(message);\n\n\t\tif (message.stopReason === \"error\" || message.stopReason === \"aborted\") {\n\t\t\tstream.push({ type: \"turn_end\", message, toolResults: [] });\n\t\t\tstream.push({ type: \"agent_end\", messages: newMessages });\n\t\t\tstream.end(newMessages);\n\t\t\treturn;\n\t\t}\n\n\t\t// Check for tool calls\n\t\tconst toolCalls = message.content.filter((c) => c.type === \"toolCall\");\n\t\thasMoreToolCalls = toolCalls.length > 0;\n\n\t\tconst toolResults: ToolResultMessage[] = [];\n\t\tif (hasMoreToolCalls) {\n\t\t\tconst toolExecution = await executeToolCalls(\n\t\t\t\tcurrentContext.tools,\n\t\t\t\tmessage,\n\t\t\t\tsignal,\n\t\t\t\tstream,\n\t\t\t\tconfig.getQueuedMessages,\n\t\t\t);\n\t\t\ttoolResults.push(...toolExecution.toolResults);\n\t\t\tqueuedAfterTools = toolExecution.queuedMessages ?? null;\n\n\t\t\tfor (const result of toolResults) {\n\t\t\t\tcurrentContext.messages.push(result);\n\t\t\t\tnewMessages.push(result);\n\t\t\t}\n\t\t}\n\n\t\tstream.push({ type: \"turn_end\", message, toolResults });\n\n\t\t// Get queued messages after turn completes\n\t\tif (queuedAfterTools && queuedAfterTools.length > 0) {\n\t\t\tqueuedMessages = queuedAfterTools;\n\t\t\tqueuedAfterTools = null;\n\t\t} else {\n\t\t\tqueuedMessages = (await config.getQueuedMessages?.()) || [];\n\t\t}\n\t}\n\n\tstream.push({ type: \"agent_end\", messages: newMessages });\n\tstream.end(newMessages);\n}\n\n/**\n * Stream an assistant response from the LLM.\n * This is where AgentMessage[] gets transformed to Message[] for the LLM.\n */\nasync function streamAssistantResponse(\n\tcontext: AgentContext,\n\tconfig: AgentLoopConfig,\n\tsignal: AbortSignal | undefined,\n\tstream: EventStream<AgentEvent, AgentMessage[]>,\n\tstreamFn?: StreamFn,\n): Promise<AssistantMessage> {\n\t// Apply context transform if configured (AgentMessage[] → AgentMessage[])\n\tlet messages = context.messages;\n\tif (config.transformContext) {\n\t\tmessages = await config.transformContext(messages, signal);\n\t}\n\n\t// Convert to LLM-compatible messages (AgentMessage[] → Message[])\n\tconst llmMessages = await config.convertToLlm(messages);\n\n\t// Build LLM context\n\tconst llmContext: Context = {\n\t\tsystemPrompt: context.systemPrompt,\n\t\tmessages: llmMessages,\n\t\ttools: context.tools,\n\t};\n\n\tconst streamFunction = streamFn || streamSimple;\n\n\t// Resolve API key (important for expiring tokens)\n\tconst resolvedApiKey =\n\t\t(config.getApiKey ? await config.getApiKey(config.model.provider) : undefined) || config.apiKey;\n\n\tconst response = await streamFunction(config.model, llmContext, {\n\t\t...config,\n\t\tapiKey: resolvedApiKey,\n\t\tsignal,\n\t});\n\n\tlet partialMessage: AssistantMessage | null = null;\n\tlet addedPartial = false;\n\n\tfor await (const event of response) {\n\t\tswitch (event.type) {\n\t\t\tcase \"start\":\n\t\t\t\tpartialMessage = event.partial;\n\t\t\t\tcontext.messages.push(partialMessage);\n\t\t\t\taddedPartial = true;\n\t\t\t\tstream.push({ type: \"message_start\", message: { ...partialMessage } });\n\t\t\t\tbreak;\n\n\t\t\tcase \"text_start\":\n\t\t\tcase \"text_delta\":\n\t\t\tcase \"text_end\":\n\t\t\tcase \"thinking_start\":\n\t\t\tcase \"thinking_delta\":\n\t\t\tcase \"thinking_end\":\n\t\t\tcase \"toolcall_start\":\n\t\t\tcase \"toolcall_delta\":\n\t\t\tcase \"toolcall_end\":\n\t\t\t\tif (partialMessage) {\n\t\t\t\t\tpartialMessage = event.partial;\n\t\t\t\t\tcontext.messages[context.messages.length - 1] = partialMessage;\n\t\t\t\t\tstream.push({\n\t\t\t\t\t\ttype: \"message_update\",\n\t\t\t\t\t\tassistantMessageEvent: event,\n\t\t\t\t\t\tmessage: { ...partialMessage },\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tbreak;\n\n\t\t\tcase \"done\":\n\t\t\tcase \"error\": {\n\t\t\t\tconst finalMessage = await response.result();\n\t\t\t\tif (addedPartial) {\n\t\t\t\t\tcontext.messages[context.messages.length - 1] = finalMessage;\n\t\t\t\t} else {\n\t\t\t\t\tcontext.messages.push(finalMessage);\n\t\t\t\t}\n\t\t\t\tif (!addedPartial) {\n\t\t\t\t\tstream.push({ type: \"message_start\", message: { ...finalMessage } });\n\t\t\t\t}\n\t\t\t\tstream.push({ type: \"message_end\", message: finalMessage });\n\t\t\t\treturn finalMessage;\n\t\t\t}\n\t\t}\n\t}\n\n\treturn await response.result();\n}\n\n/**\n * Execute tool calls from an assistant message.\n */\nasync function executeToolCalls(\n\ttools: AgentTool<any>[] | undefined,\n\tassistantMessage: AssistantMessage,\n\tsignal: AbortSignal | undefined,\n\tstream: EventStream<AgentEvent, AgentMessage[]>,\n\tgetQueuedMessages?: AgentLoopConfig[\"getQueuedMessages\"],\n): Promise<{ toolResults: ToolResultMessage[]; queuedMessages?: AgentMessage[] }> {\n\tconst toolCalls = assistantMessage.content.filter((c) => c.type === \"toolCall\");\n\tconst results: ToolResultMessage[] = [];\n\tlet queuedMessages: AgentMessage[] | undefined;\n\n\tfor (let index = 0; index < toolCalls.length; index++) {\n\t\tconst toolCall = toolCalls[index];\n\t\tconst tool = tools?.find((t) => t.name === toolCall.name);\n\n\t\tstream.push({\n\t\t\ttype: \"tool_execution_start\",\n\t\t\ttoolCallId: toolCall.id,\n\t\t\ttoolName: toolCall.name,\n\t\t\targs: toolCall.arguments,\n\t\t});\n\n\t\tlet result: AgentToolResult<any>;\n\t\tlet isError = false;\n\n\t\ttry {\n\t\t\tif (!tool) throw new Error(`Tool ${toolCall.name} not found`);\n\n\t\t\tconst validatedArgs = validateToolArguments(tool, toolCall);\n\n\t\t\tresult = await tool.execute(toolCall.id, validatedArgs, signal, (partialResult) => {\n\t\t\t\tstream.push({\n\t\t\t\t\ttype: \"tool_execution_update\",\n\t\t\t\t\ttoolCallId: toolCall.id,\n\t\t\t\t\ttoolName: toolCall.name,\n\t\t\t\t\targs: toolCall.arguments,\n\t\t\t\t\tpartialResult,\n\t\t\t\t});\n\t\t\t});\n\t\t} catch (e) {\n\t\t\tresult = {\n\t\t\t\tcontent: [{ type: \"text\", text: e instanceof Error ? e.message : String(e) }],\n\t\t\t\tdetails: {},\n\t\t\t};\n\t\t\tisError = true;\n\t\t}\n\n\t\tstream.push({\n\t\t\ttype: \"tool_execution_end\",\n\t\t\ttoolCallId: toolCall.id,\n\t\t\ttoolName: toolCall.name,\n\t\t\tresult,\n\t\t\tisError,\n\t\t});\n\n\t\tconst toolResultMessage: ToolResultMessage = {\n\t\t\trole: \"toolResult\",\n\t\t\ttoolCallId: toolCall.id,\n\t\t\ttoolName: toolCall.name,\n\t\t\tcontent: result.content,\n\t\t\tdetails: result.details,\n\t\t\tisError,\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\tresults.push(toolResultMessage);\n\t\tstream.push({ type: \"message_start\", message: toolResultMessage });\n\t\tstream.push({ type: \"message_end\", message: toolResultMessage });\n\n\t\t// Check for queued messages - skip remaining tools if user interrupted\n\t\tif (getQueuedMessages) {\n\t\t\tconst queued = await getQueuedMessages();\n\t\t\tif (queued.length > 0) {\n\t\t\t\tqueuedMessages = queued;\n\t\t\t\tconst remainingCalls = toolCalls.slice(index + 1);\n\t\t\t\tfor (const skipped of remainingCalls) {\n\t\t\t\t\tresults.push(skipToolCall(skipped, stream));\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t}\n\n\treturn { toolResults: results, queuedMessages };\n}\n\nfunction skipToolCall(\n\ttoolCall: Extract<AssistantMessage[\"content\"][number], { type: \"toolCall\" }>,\n\tstream: EventStream<AgentEvent, AgentMessage[]>,\n): ToolResultMessage {\n\tconst result: AgentToolResult<any> = {\n\t\tcontent: [{ type: \"text\", text: \"Skipped due to queued user message.\" }],\n\t\tdetails: {},\n\t};\n\n\tstream.push({\n\t\ttype: \"tool_execution_start\",\n\t\ttoolCallId: toolCall.id,\n\t\ttoolName: toolCall.name,\n\t\targs: toolCall.arguments,\n\t});\n\tstream.push({\n\t\ttype: \"tool_execution_end\",\n\t\ttoolCallId: toolCall.id,\n\t\ttoolName: toolCall.name,\n\t\tresult,\n\t\tisError: true,\n\t});\n\n\tconst toolResultMessage: ToolResultMessage = {\n\t\trole: \"toolResult\",\n\t\ttoolCallId: toolCall.id,\n\t\ttoolName: toolCall.name,\n\t\tcontent: result.content,\n\t\tdetails: {},\n\t\tisError: true,\n\t\ttimestamp: Date.now(),\n\t};\n\n\tstream.push({ type: \"message_start\", message: toolResultMessage });\n\tstream.push({ type: \"message_end\", message: toolResultMessage });\n\n\treturn toolResultMessage;\n}\n"]}
package/dist/agent.d.ts CHANGED
@@ -1,56 +1,70 @@
1
- import type { Message } from "@mariozechner/pi-ai";
2
- import type { AgentTransport } from "./transports/types.js";
3
- import type { AgentEvent, AgentState, AppMessage, Attachment, ThinkingLevel } from "./types.js";
1
+ /**
2
+ * Agent class that uses the agent-loop directly.
3
+ * No transport abstraction - calls streamSimple via the loop.
4
+ */
5
+ import { type ImageContent, type Message, type Model } from "@mariozechner/pi-ai";
6
+ import type { AgentEvent, AgentMessage, AgentState, AgentTool, StreamFn, ThinkingLevel } from "./types.js";
4
7
  export interface AgentOptions {
5
8
  initialState?: Partial<AgentState>;
6
- transport: AgentTransport;
7
- messageTransformer?: (messages: AppMessage[]) => Message[] | Promise<Message[]>;
9
+ /**
10
+ * Converts AgentMessage[] to LLM-compatible Message[] before each LLM call.
11
+ * Default filters to user/assistant/toolResult and converts attachments.
12
+ */
13
+ convertToLlm?: (messages: AgentMessage[]) => Message[] | Promise<Message[]>;
14
+ /**
15
+ * Optional transform applied to context before convertToLlm.
16
+ * Use for context pruning, injecting external context, etc.
17
+ */
18
+ transformContext?: (messages: AgentMessage[], signal?: AbortSignal) => Promise<AgentMessage[]>;
19
+ /**
20
+ * Queue mode: "all" = send all queued messages at once, "one-at-a-time" = one per turn
21
+ */
8
22
  queueMode?: "all" | "one-at-a-time";
23
+ /**
24
+ * Custom stream function (for proxy backends, etc.). Default uses streamSimple.
25
+ */
26
+ streamFn?: StreamFn;
27
+ /**
28
+ * Resolves an API key dynamically for each LLM call.
29
+ * Useful for expiring tokens (e.g., GitHub Copilot OAuth).
30
+ */
31
+ getApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;
9
32
  }
10
33
  export declare class Agent {
11
34
  private _state;
12
35
  private listeners;
13
36
  private abortController?;
14
- private transport;
15
- private messageTransformer;
37
+ private convertToLlm;
38
+ private transformContext?;
16
39
  private messageQueue;
17
40
  private queueMode;
41
+ streamFn: StreamFn;
42
+ getApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;
18
43
  private runningPrompt?;
19
44
  private resolveRunningPrompt?;
20
- constructor(opts: AgentOptions);
45
+ constructor(opts?: AgentOptions);
21
46
  get state(): AgentState;
22
47
  subscribe(fn: (e: AgentEvent) => void): () => void;
23
48
  setSystemPrompt(v: string): void;
24
- setModel(m: typeof this._state.model): void;
49
+ setModel(m: Model<any>): void;
25
50
  setThinkingLevel(l: ThinkingLevel): void;
26
51
  setQueueMode(mode: "all" | "one-at-a-time"): void;
27
52
  getQueueMode(): "all" | "one-at-a-time";
28
- setTools(t: typeof this._state.tools): void;
29
- replaceMessages(ms: AppMessage[]): void;
30
- appendMessage(m: AppMessage): void;
31
- queueMessage(m: AppMessage): Promise<void>;
53
+ setTools(t: AgentTool<any>[]): void;
54
+ replaceMessages(ms: AgentMessage[]): void;
55
+ appendMessage(m: AgentMessage): void;
56
+ queueMessage(m: AgentMessage): void;
32
57
  clearMessageQueue(): void;
33
58
  clearMessages(): void;
34
59
  abort(): void;
35
- /**
36
- * Returns a promise that resolves when the current prompt completes.
37
- * Returns immediately resolved promise if no prompt is running.
38
- */
39
60
  waitForIdle(): Promise<void>;
40
- /**
41
- * Clear all messages and state. Call abort() first if a prompt is in flight.
42
- */
43
61
  reset(): void;
44
- prompt(input: string, attachments?: Attachment[]): Promise<void>;
45
- /**
46
- * Continue from the current context without adding a new user message.
47
- * Used for retry after overflow recovery when context already has user message or tool results.
48
- */
62
+ /** Send a prompt with an AgentMessage */
63
+ prompt(message: AgentMessage | AgentMessage[]): Promise<void>;
64
+ prompt(input: string, images?: ImageContent[]): Promise<void>;
65
+ /** Continue from current context (for retry after overflow) */
49
66
  continue(): Promise<void>;
50
- private _runAgentLoop;
51
- private _runAgentLoopContinue;
52
- private _prepareRun;
53
- private _processEvents;
67
+ private _runLoop;
54
68
  private emit;
55
69
  }
56
70
  //# sourceMappingURL=agent.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"agent.d.ts","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAgB,OAAO,EAA+C,MAAM,qBAAqB,CAAC;AAE9G,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,uBAAuB,CAAC;AAC5D,OAAO,KAAK,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,YAAY,CAAC;AAiDhG,MAAM,WAAW,YAAY;IAC5B,YAAY,CAAC,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IACnC,SAAS,EAAE,cAAc,CAAC;IAE1B,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,UAAU,EAAE,KAAK,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;IAEhF,SAAS,CAAC,EAAE,KAAK,GAAG,eAAe,CAAC;CACpC;AAED,qBAAa,KAAK;IACjB,OAAO,CAAC,MAAM,CAUZ;IACF,OAAO,CAAC,SAAS,CAAsC;IACvD,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,SAAS,CAAiB;IAClC,OAAO,CAAC,kBAAkB,CAA6D;IACvF,OAAO,CAAC,YAAY,CAAwC;IAC5D,OAAO,CAAC,SAAS,CAA0B;IAC3C,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,oBAAoB,CAAC,CAAa;IAE1C,YAAY,IAAI,EAAE,YAAY,EAK7B;IAED,IAAI,KAAK,IAAI,UAAU,CAEtB;IAED,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,UAAU,KAAK,IAAI,GAAG,MAAM,IAAI,CAGjD;IAGD,eAAe,CAAC,CAAC,EAAE,MAAM,QAExB;IAED,QAAQ,CAAC,CAAC,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,QAEnC;IAED,gBAAgB,CAAC,CAAC,EAAE,aAAa,QAEhC;IAED,YAAY,CAAC,IAAI,EAAE,KAAK,GAAG,eAAe,QAEzC;IAED,YAAY,IAAI,KAAK,GAAG,eAAe,CAEtC;IAED,QAAQ,CAAC,CAAC,EAAE,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,QAEnC;IAED,eAAe,CAAC,EAAE,EAAE,UAAU,EAAE,QAE/B;IAED,aAAa,CAAC,CAAC,EAAE,UAAU,QAE1B;IAEK,YAAY,CAAC,CAAC,EAAE,UAAU,iBAO/B;IAED,iBAAiB,SAEhB;IAED,aAAa,SAEZ;IAED,KAAK,SAEJ;IAED;;;OAGG;IACH,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC,CAE3B;IAED;;OAEG;IACH,KAAK,SAOJ;IAEK,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,WAAW,CAAC,EAAE,UAAU,EAAE,iBA8BrD;IAED;;;OAGG;IACG,QAAQ,kBAYb;YAKa,aAAa;YAWb,qBAAqB;YAWrB,WAAW;YAmDX,cAAc;IAsG5B,OAAO,CAAC,IAAI;CAKZ","sourcesContent":["import type { ImageContent, Message, QueuedMessage, ReasoningEffort, TextContent } from \"@mariozechner/pi-ai\";\nimport { getModel } from \"@mariozechner/pi-ai\";\nimport type { AgentTransport } from \"./transports/types.js\";\nimport type { AgentEvent, AgentState, AppMessage, Attachment, ThinkingLevel } from \"./types.js\";\n\n/**\n * Default message transformer: Keep only LLM-compatible messages, strip app-specific fields.\n * Converts attachments to proper content blocks (images → ImageContent, documents → TextContent).\n */\nfunction defaultMessageTransformer(messages: AppMessage[]): Message[] {\n\treturn messages\n\t\t.filter((m) => {\n\t\t\t// Only keep standard LLM message roles\n\t\t\treturn m.role === \"user\" || m.role === \"assistant\" || m.role === \"toolResult\";\n\t\t})\n\t\t.map((m) => {\n\t\t\tif (m.role === \"user\") {\n\t\t\t\tconst { attachments, ...rest } = m as any;\n\n\t\t\t\t// If no attachments, return as-is\n\t\t\t\tif (!attachments || attachments.length === 0) {\n\t\t\t\t\treturn rest as Message;\n\t\t\t\t}\n\n\t\t\t\t// Convert attachments to content blocks\n\t\t\t\tconst content = Array.isArray(rest.content) ? [...rest.content] : [{ type: \"text\", text: rest.content }];\n\n\t\t\t\tfor (const attachment of attachments as Attachment[]) {\n\t\t\t\t\t// Add image blocks for image attachments\n\t\t\t\t\tif (attachment.type === \"image\") {\n\t\t\t\t\t\tcontent.push({\n\t\t\t\t\t\t\ttype: \"image\",\n\t\t\t\t\t\t\tdata: attachment.content,\n\t\t\t\t\t\t\tmimeType: attachment.mimeType,\n\t\t\t\t\t\t} as ImageContent);\n\t\t\t\t\t}\n\t\t\t\t\t// Add text blocks for documents with extracted text\n\t\t\t\t\telse if (attachment.type === \"document\" && attachment.extractedText) {\n\t\t\t\t\t\tcontent.push({\n\t\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\t\ttext: `\\n\\n[Document: ${attachment.fileName}]\\n${attachment.extractedText}`,\n\t\t\t\t\t\t\tisDocument: true,\n\t\t\t\t\t\t} as TextContent);\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\treturn { ...rest, content } as Message;\n\t\t\t}\n\t\t\treturn m as Message;\n\t\t});\n}\n\nexport interface AgentOptions {\n\tinitialState?: Partial<AgentState>;\n\ttransport: AgentTransport;\n\t// Transform app messages to LLM-compatible messages before sending to transport\n\tmessageTransformer?: (messages: AppMessage[]) => Message[] | Promise<Message[]>;\n\t// Queue mode: \"all\" = send all queued messages at once, \"one-at-a-time\" = send one queued message per turn\n\tqueueMode?: \"all\" | \"one-at-a-time\";\n}\n\nexport class Agent {\n\tprivate _state: AgentState = {\n\t\tsystemPrompt: \"\",\n\t\tmodel: getModel(\"google\", \"gemini-2.5-flash-lite-preview-06-17\"),\n\t\tthinkingLevel: \"off\",\n\t\ttools: [],\n\t\tmessages: [],\n\t\tisStreaming: false,\n\t\tstreamMessage: null,\n\t\tpendingToolCalls: new Set<string>(),\n\t\terror: undefined,\n\t};\n\tprivate listeners = new Set<(e: AgentEvent) => void>();\n\tprivate abortController?: AbortController;\n\tprivate transport: AgentTransport;\n\tprivate messageTransformer: (messages: AppMessage[]) => Message[] | Promise<Message[]>;\n\tprivate messageQueue: Array<QueuedMessage<AppMessage>> = [];\n\tprivate queueMode: \"all\" | \"one-at-a-time\";\n\tprivate runningPrompt?: Promise<void>;\n\tprivate resolveRunningPrompt?: () => void;\n\n\tconstructor(opts: AgentOptions) {\n\t\tthis._state = { ...this._state, ...opts.initialState };\n\t\tthis.transport = opts.transport;\n\t\tthis.messageTransformer = opts.messageTransformer || defaultMessageTransformer;\n\t\tthis.queueMode = opts.queueMode || \"one-at-a-time\";\n\t}\n\n\tget state(): AgentState {\n\t\treturn this._state;\n\t}\n\n\tsubscribe(fn: (e: AgentEvent) => void): () => void {\n\t\tthis.listeners.add(fn);\n\t\treturn () => this.listeners.delete(fn);\n\t}\n\n\t// State mutators - update internal state without emitting events\n\tsetSystemPrompt(v: string) {\n\t\tthis._state.systemPrompt = v;\n\t}\n\n\tsetModel(m: typeof this._state.model) {\n\t\tthis._state.model = m;\n\t}\n\n\tsetThinkingLevel(l: ThinkingLevel) {\n\t\tthis._state.thinkingLevel = l;\n\t}\n\n\tsetQueueMode(mode: \"all\" | \"one-at-a-time\") {\n\t\tthis.queueMode = mode;\n\t}\n\n\tgetQueueMode(): \"all\" | \"one-at-a-time\" {\n\t\treturn this.queueMode;\n\t}\n\n\tsetTools(t: typeof this._state.tools) {\n\t\tthis._state.tools = t;\n\t}\n\n\treplaceMessages(ms: AppMessage[]) {\n\t\tthis._state.messages = ms.slice();\n\t}\n\n\tappendMessage(m: AppMessage) {\n\t\tthis._state.messages = [...this._state.messages, m];\n\t}\n\n\tasync queueMessage(m: AppMessage) {\n\t\t// Transform message and queue it for injection at next turn\n\t\tconst transformed = await this.messageTransformer([m]);\n\t\tthis.messageQueue.push({\n\t\t\toriginal: m,\n\t\t\tllm: transformed[0], // undefined if filtered out\n\t\t});\n\t}\n\n\tclearMessageQueue() {\n\t\tthis.messageQueue = [];\n\t}\n\n\tclearMessages() {\n\t\tthis._state.messages = [];\n\t}\n\n\tabort() {\n\t\tthis.abortController?.abort();\n\t}\n\n\t/**\n\t * Returns a promise that resolves when the current prompt completes.\n\t * Returns immediately resolved promise if no prompt is running.\n\t */\n\twaitForIdle(): Promise<void> {\n\t\treturn this.runningPrompt ?? Promise.resolve();\n\t}\n\n\t/**\n\t * Clear all messages and state. Call abort() first if a prompt is in flight.\n\t */\n\treset() {\n\t\tthis._state.messages = [];\n\t\tthis._state.isStreaming = false;\n\t\tthis._state.streamMessage = null;\n\t\tthis._state.pendingToolCalls = new Set<string>();\n\t\tthis._state.error = undefined;\n\t\tthis.messageQueue = [];\n\t}\n\n\tasync prompt(input: string, attachments?: Attachment[]) {\n\t\tconst model = this._state.model;\n\t\tif (!model) {\n\t\t\tthrow new Error(\"No model configured\");\n\t\t}\n\n\t\t// Build user message with attachments\n\t\tconst content: Array<TextContent | ImageContent> = [{ type: \"text\", text: input }];\n\t\tif (attachments?.length) {\n\t\t\tfor (const a of attachments) {\n\t\t\t\tif (a.type === \"image\") {\n\t\t\t\t\tcontent.push({ type: \"image\", data: a.content, mimeType: a.mimeType });\n\t\t\t\t} else if (a.type === \"document\" && a.extractedText) {\n\t\t\t\t\tcontent.push({\n\t\t\t\t\t\ttype: \"text\",\n\t\t\t\t\t\ttext: `\\n\\n[Document: ${a.fileName}]\\n${a.extractedText}`,\n\t\t\t\t\t\tisDocument: true,\n\t\t\t\t\t} as TextContent);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\tconst userMessage: AppMessage = {\n\t\t\trole: \"user\",\n\t\t\tcontent,\n\t\t\tattachments: attachments?.length ? attachments : undefined,\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\tawait this._runAgentLoop(userMessage);\n\t}\n\n\t/**\n\t * Continue from the current context without adding a new user message.\n\t * Used for retry after overflow recovery when context already has user message or tool results.\n\t */\n\tasync continue() {\n\t\tconst messages = this._state.messages;\n\t\tif (messages.length === 0) {\n\t\t\tthrow new Error(\"No messages to continue from\");\n\t\t}\n\n\t\tconst lastMessage = messages[messages.length - 1];\n\t\tif (lastMessage.role !== \"user\" && lastMessage.role !== \"toolResult\") {\n\t\t\tthrow new Error(`Cannot continue from message role: ${lastMessage.role}`);\n\t\t}\n\n\t\tawait this._runAgentLoopContinue();\n\t}\n\n\t/**\n\t * Internal: Run the agent loop with a new user message.\n\t */\n\tprivate async _runAgentLoop(userMessage: AppMessage) {\n\t\tconst { llmMessages, cfg } = await this._prepareRun();\n\n\t\tconst events = this.transport.run(llmMessages, userMessage as Message, cfg, this.abortController!.signal);\n\n\t\tawait this._processEvents(events);\n\t}\n\n\t/**\n\t * Internal: Continue the agent loop from current context.\n\t */\n\tprivate async _runAgentLoopContinue() {\n\t\tconst { llmMessages, cfg } = await this._prepareRun();\n\n\t\tconst events = this.transport.continue(llmMessages, cfg, this.abortController!.signal);\n\n\t\tawait this._processEvents(events);\n\t}\n\n\t/**\n\t * Prepare for running the agent loop.\n\t */\n\tprivate async _prepareRun() {\n\t\tconst model = this._state.model;\n\t\tif (!model) {\n\t\t\tthrow new Error(\"No model configured\");\n\t\t}\n\n\t\tthis.runningPrompt = new Promise<void>((resolve) => {\n\t\t\tthis.resolveRunningPrompt = resolve;\n\t\t});\n\n\t\tthis.abortController = new AbortController();\n\t\tthis._state.isStreaming = true;\n\t\tthis._state.streamMessage = null;\n\t\tthis._state.error = undefined;\n\n\t\tconst reasoning: ReasoningEffort | undefined =\n\t\t\tthis._state.thinkingLevel === \"off\"\n\t\t\t\t? undefined\n\t\t\t\t: this._state.thinkingLevel === \"minimal\"\n\t\t\t\t\t? \"low\"\n\t\t\t\t\t: this._state.thinkingLevel;\n\n\t\tconst cfg = {\n\t\t\tsystemPrompt: this._state.systemPrompt,\n\t\t\ttools: this._state.tools,\n\t\t\tmodel,\n\t\t\treasoning,\n\t\t\tgetQueuedMessages: async <T>() => {\n\t\t\t\tif (this.queueMode === \"one-at-a-time\") {\n\t\t\t\t\tif (this.messageQueue.length > 0) {\n\t\t\t\t\t\tconst first = this.messageQueue[0];\n\t\t\t\t\t\tthis.messageQueue = this.messageQueue.slice(1);\n\t\t\t\t\t\treturn [first] as QueuedMessage<T>[];\n\t\t\t\t\t}\n\t\t\t\t\treturn [];\n\t\t\t\t} else {\n\t\t\t\t\tconst queued = this.messageQueue.slice();\n\t\t\t\t\tthis.messageQueue = [];\n\t\t\t\t\treturn queued as QueuedMessage<T>[];\n\t\t\t\t}\n\t\t\t},\n\t\t};\n\n\t\tconst llmMessages = await this.messageTransformer(this._state.messages);\n\n\t\treturn { llmMessages, cfg, model };\n\t}\n\n\t/**\n\t * Process events from the transport.\n\t */\n\tprivate async _processEvents(events: AsyncIterable<AgentEvent>) {\n\t\tconst model = this._state.model!;\n\t\tconst generatedMessages: AppMessage[] = [];\n\t\tlet partial: AppMessage | null = null;\n\n\t\ttry {\n\t\t\tfor await (const ev of events) {\n\t\t\t\tswitch (ev.type) {\n\t\t\t\t\tcase \"message_start\": {\n\t\t\t\t\t\tpartial = ev.message as AppMessage;\n\t\t\t\t\t\tthis._state.streamMessage = ev.message as Message;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"message_update\": {\n\t\t\t\t\t\tpartial = ev.message as AppMessage;\n\t\t\t\t\t\tthis._state.streamMessage = ev.message as Message;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"message_end\": {\n\t\t\t\t\t\tpartial = null;\n\t\t\t\t\t\tthis._state.streamMessage = null;\n\t\t\t\t\t\tthis.appendMessage(ev.message as AppMessage);\n\t\t\t\t\t\tgeneratedMessages.push(ev.message as AppMessage);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"tool_execution_start\": {\n\t\t\t\t\t\tconst s = new Set(this._state.pendingToolCalls);\n\t\t\t\t\t\ts.add(ev.toolCallId);\n\t\t\t\t\t\tthis._state.pendingToolCalls = s;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"tool_execution_end\": {\n\t\t\t\t\t\tconst s = new Set(this._state.pendingToolCalls);\n\t\t\t\t\t\ts.delete(ev.toolCallId);\n\t\t\t\t\t\tthis._state.pendingToolCalls = s;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"turn_end\": {\n\t\t\t\t\t\tif (ev.message.role === \"assistant\" && ev.message.errorMessage) {\n\t\t\t\t\t\t\tthis._state.error = ev.message.errorMessage;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tcase \"agent_end\": {\n\t\t\t\t\t\tthis._state.streamMessage = null;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\tthis.emit(ev as AgentEvent);\n\t\t\t}\n\n\t\t\t// Handle any remaining partial message\n\t\t\tif (partial && partial.role === \"assistant\" && partial.content.length > 0) {\n\t\t\t\tconst onlyEmpty = !partial.content.some(\n\t\t\t\t\t(c) =>\n\t\t\t\t\t\t(c.type === \"thinking\" && c.thinking.trim().length > 0) ||\n\t\t\t\t\t\t(c.type === \"text\" && c.text.trim().length > 0) ||\n\t\t\t\t\t\t(c.type === \"toolCall\" && c.name.trim().length > 0),\n\t\t\t\t);\n\t\t\t\tif (!onlyEmpty) {\n\t\t\t\t\tthis.appendMessage(partial as AppMessage);\n\t\t\t\t\tgeneratedMessages.push(partial as AppMessage);\n\t\t\t\t} else {\n\t\t\t\t\tif (this.abortController?.signal.aborted) {\n\t\t\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (err: any) {\n\t\t\tconst msg: Message = {\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: [{ type: \"text\", text: \"\" }],\n\t\t\t\tapi: model.api,\n\t\t\t\tprovider: model.provider,\n\t\t\t\tmodel: model.id,\n\t\t\t\tusage: {\n\t\t\t\t\tinput: 0,\n\t\t\t\t\toutput: 0,\n\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\ttotalTokens: 0,\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t},\n\t\t\t\tstopReason: this.abortController?.signal.aborted ? \"aborted\" : \"error\",\n\t\t\t\terrorMessage: err?.message || String(err),\n\t\t\t\ttimestamp: Date.now(),\n\t\t\t};\n\t\t\tthis.appendMessage(msg as AppMessage);\n\t\t\tgeneratedMessages.push(msg as AppMessage);\n\t\t\tthis._state.error = err?.message || String(err);\n\t\t} finally {\n\t\t\tthis._state.isStreaming = false;\n\t\t\tthis._state.streamMessage = null;\n\t\t\tthis._state.pendingToolCalls = new Set<string>();\n\t\t\tthis.abortController = undefined;\n\t\t\tthis.resolveRunningPrompt?.();\n\t\t\tthis.runningPrompt = undefined;\n\t\t\tthis.resolveRunningPrompt = undefined;\n\t\t}\n\t}\n\n\tprivate emit(e: AgentEvent) {\n\t\tfor (const listener of this.listeners) {\n\t\t\tlistener(e);\n\t\t}\n\t}\n}\n"]}
1
+ {"version":3,"file":"agent.d.ts","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAEN,KAAK,YAAY,EACjB,KAAK,OAAO,EACZ,KAAK,KAAK,EAIV,MAAM,qBAAqB,CAAC;AAE7B,OAAO,KAAK,EAEX,UAAU,EAEV,YAAY,EACZ,UAAU,EACV,SAAS,EACT,QAAQ,EACR,aAAa,EACb,MAAM,YAAY,CAAC;AASpB,MAAM,WAAW,YAAY;IAC5B,YAAY,CAAC,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC;IAEnC;;;OAGG;IACH,YAAY,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,KAAK,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;IAE5E;;;OAGG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,EAAE,MAAM,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,YAAY,EAAE,CAAC,CAAC;IAE/F;;OAEG;IACH,SAAS,CAAC,EAAE,KAAK,GAAG,eAAe,CAAC;IAEpC;;OAEG;IACH,QAAQ,CAAC,EAAE,QAAQ,CAAC;IAEpB;;;OAGG;IACH,SAAS,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,MAAM,GAAG,SAAS,CAAC;CACnF;AAED,qBAAa,KAAK;IACjB,OAAO,CAAC,MAAM,CAUZ;IAEF,OAAO,CAAC,SAAS,CAAsC;IACvD,OAAO,CAAC,eAAe,CAAC,CAAkB;IAC1C,OAAO,CAAC,YAAY,CAA+D;IACnF,OAAO,CAAC,gBAAgB,CAAC,CAA8E;IACvG,OAAO,CAAC,YAAY,CAAsB;IAC1C,OAAO,CAAC,SAAS,CAA0B;IACpC,QAAQ,EAAE,QAAQ,CAAC;IACnB,SAAS,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,MAAM,GAAG,SAAS,CAAC;IAC1F,OAAO,CAAC,aAAa,CAAC,CAAgB;IACtC,OAAO,CAAC,oBAAoB,CAAC,CAAa;IAE1C,YAAY,IAAI,GAAE,YAAiB,EAOlC;IAED,IAAI,KAAK,IAAI,UAAU,CAEtB;IAED,SAAS,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,UAAU,KAAK,IAAI,GAAG,MAAM,IAAI,CAGjD;IAGD,eAAe,CAAC,CAAC,EAAE,MAAM,QAExB;IAED,QAAQ,CAAC,CAAC,EAAE,KAAK,CAAC,GAAG,CAAC,QAErB;IAED,gBAAgB,CAAC,CAAC,EAAE,aAAa,QAEhC;IAED,YAAY,CAAC,IAAI,EAAE,KAAK,GAAG,eAAe,QAEzC;IAED,YAAY,IAAI,KAAK,GAAG,eAAe,CAEtC;IAED,QAAQ,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,CAAC,EAAE,QAE3B;IAED,eAAe,CAAC,EAAE,EAAE,YAAY,EAAE,QAEjC;IAED,aAAa,CAAC,CAAC,EAAE,YAAY,QAE5B;IAED,YAAY,CAAC,CAAC,EAAE,YAAY,QAE3B;IAED,iBAAiB,SAEhB;IAED,aAAa,SAEZ;IAED,KAAK,SAEJ;IAED,WAAW,IAAI,OAAO,CAAC,IAAI,CAAC,CAE3B;IAED,KAAK,SAOJ;IAED,yCAAyC;IACnC,MAAM,CAAC,OAAO,EAAE,YAAY,GAAG,YAAY,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IAC9D,MAAM,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,YAAY,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;IA4BpE,+DAA+D;IACzD,QAAQ,kBAUb;YAOa,QAAQ;IA0JtB,OAAO,CAAC,IAAI;CAKZ","sourcesContent":["/**\n * Agent class that uses the agent-loop directly.\n * No transport abstraction - calls streamSimple via the loop.\n */\n\nimport {\n\tgetModel,\n\ttype ImageContent,\n\ttype Message,\n\ttype Model,\n\ttype ReasoningEffort,\n\tstreamSimple,\n\ttype TextContent,\n} from \"@mariozechner/pi-ai\";\nimport { agentLoop, agentLoopContinue } from \"./agent-loop.js\";\nimport type {\n\tAgentContext,\n\tAgentEvent,\n\tAgentLoopConfig,\n\tAgentMessage,\n\tAgentState,\n\tAgentTool,\n\tStreamFn,\n\tThinkingLevel,\n} from \"./types.js\";\n\n/**\n * Default convertToLlm: Keep only LLM-compatible messages, convert attachments.\n */\nfunction defaultConvertToLlm(messages: AgentMessage[]): Message[] {\n\treturn messages.filter((m) => m.role === \"user\" || m.role === \"assistant\" || m.role === \"toolResult\");\n}\n\nexport interface AgentOptions {\n\tinitialState?: Partial<AgentState>;\n\n\t/**\n\t * Converts AgentMessage[] to LLM-compatible Message[] before each LLM call.\n\t * Default filters to user/assistant/toolResult and converts attachments.\n\t */\n\tconvertToLlm?: (messages: AgentMessage[]) => Message[] | Promise<Message[]>;\n\n\t/**\n\t * Optional transform applied to context before convertToLlm.\n\t * Use for context pruning, injecting external context, etc.\n\t */\n\ttransformContext?: (messages: AgentMessage[], signal?: AbortSignal) => Promise<AgentMessage[]>;\n\n\t/**\n\t * Queue mode: \"all\" = send all queued messages at once, \"one-at-a-time\" = one per turn\n\t */\n\tqueueMode?: \"all\" | \"one-at-a-time\";\n\n\t/**\n\t * Custom stream function (for proxy backends, etc.). Default uses streamSimple.\n\t */\n\tstreamFn?: StreamFn;\n\n\t/**\n\t * Resolves an API key dynamically for each LLM call.\n\t * Useful for expiring tokens (e.g., GitHub Copilot OAuth).\n\t */\n\tgetApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;\n}\n\nexport class Agent {\n\tprivate _state: AgentState = {\n\t\tsystemPrompt: \"\",\n\t\tmodel: getModel(\"google\", \"gemini-2.5-flash-lite-preview-06-17\"),\n\t\tthinkingLevel: \"off\",\n\t\ttools: [],\n\t\tmessages: [],\n\t\tisStreaming: false,\n\t\tstreamMessage: null,\n\t\tpendingToolCalls: new Set<string>(),\n\t\terror: undefined,\n\t};\n\n\tprivate listeners = new Set<(e: AgentEvent) => void>();\n\tprivate abortController?: AbortController;\n\tprivate convertToLlm: (messages: AgentMessage[]) => Message[] | Promise<Message[]>;\n\tprivate transformContext?: (messages: AgentMessage[], signal?: AbortSignal) => Promise<AgentMessage[]>;\n\tprivate messageQueue: AgentMessage[] = [];\n\tprivate queueMode: \"all\" | \"one-at-a-time\";\n\tpublic streamFn: StreamFn;\n\tpublic getApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;\n\tprivate runningPrompt?: Promise<void>;\n\tprivate resolveRunningPrompt?: () => void;\n\n\tconstructor(opts: AgentOptions = {}) {\n\t\tthis._state = { ...this._state, ...opts.initialState };\n\t\tthis.convertToLlm = opts.convertToLlm || defaultConvertToLlm;\n\t\tthis.transformContext = opts.transformContext;\n\t\tthis.queueMode = opts.queueMode || \"one-at-a-time\";\n\t\tthis.streamFn = opts.streamFn || streamSimple;\n\t\tthis.getApiKey = opts.getApiKey;\n\t}\n\n\tget state(): AgentState {\n\t\treturn this._state;\n\t}\n\n\tsubscribe(fn: (e: AgentEvent) => void): () => void {\n\t\tthis.listeners.add(fn);\n\t\treturn () => this.listeners.delete(fn);\n\t}\n\n\t// State mutators\n\tsetSystemPrompt(v: string) {\n\t\tthis._state.systemPrompt = v;\n\t}\n\n\tsetModel(m: Model<any>) {\n\t\tthis._state.model = m;\n\t}\n\n\tsetThinkingLevel(l: ThinkingLevel) {\n\t\tthis._state.thinkingLevel = l;\n\t}\n\n\tsetQueueMode(mode: \"all\" | \"one-at-a-time\") {\n\t\tthis.queueMode = mode;\n\t}\n\n\tgetQueueMode(): \"all\" | \"one-at-a-time\" {\n\t\treturn this.queueMode;\n\t}\n\n\tsetTools(t: AgentTool<any>[]) {\n\t\tthis._state.tools = t;\n\t}\n\n\treplaceMessages(ms: AgentMessage[]) {\n\t\tthis._state.messages = ms.slice();\n\t}\n\n\tappendMessage(m: AgentMessage) {\n\t\tthis._state.messages = [...this._state.messages, m];\n\t}\n\n\tqueueMessage(m: AgentMessage) {\n\t\tthis.messageQueue.push(m);\n\t}\n\n\tclearMessageQueue() {\n\t\tthis.messageQueue = [];\n\t}\n\n\tclearMessages() {\n\t\tthis._state.messages = [];\n\t}\n\n\tabort() {\n\t\tthis.abortController?.abort();\n\t}\n\n\twaitForIdle(): Promise<void> {\n\t\treturn this.runningPrompt ?? Promise.resolve();\n\t}\n\n\treset() {\n\t\tthis._state.messages = [];\n\t\tthis._state.isStreaming = false;\n\t\tthis._state.streamMessage = null;\n\t\tthis._state.pendingToolCalls = new Set<string>();\n\t\tthis._state.error = undefined;\n\t\tthis.messageQueue = [];\n\t}\n\n\t/** Send a prompt with an AgentMessage */\n\tasync prompt(message: AgentMessage | AgentMessage[]): Promise<void>;\n\tasync prompt(input: string, images?: ImageContent[]): Promise<void>;\n\tasync prompt(input: string | AgentMessage | AgentMessage[], images?: ImageContent[]) {\n\t\tconst model = this._state.model;\n\t\tif (!model) throw new Error(\"No model configured\");\n\n\t\tlet msgs: AgentMessage[];\n\n\t\tif (Array.isArray(input)) {\n\t\t\tmsgs = input;\n\t\t} else if (typeof input === \"string\") {\n\t\t\tconst content: Array<TextContent | ImageContent> = [{ type: \"text\", text: input }];\n\t\t\tif (images && images.length > 0) {\n\t\t\t\tcontent.push(...images);\n\t\t\t}\n\t\t\tmsgs = [\n\t\t\t\t{\n\t\t\t\t\trole: \"user\",\n\t\t\t\t\tcontent,\n\t\t\t\t\ttimestamp: Date.now(),\n\t\t\t\t},\n\t\t\t];\n\t\t} else {\n\t\t\tmsgs = [input];\n\t\t}\n\n\t\tawait this._runLoop(msgs);\n\t}\n\n\t/** Continue from current context (for retry after overflow) */\n\tasync continue() {\n\t\tconst messages = this._state.messages;\n\t\tif (messages.length === 0) {\n\t\t\tthrow new Error(\"No messages to continue from\");\n\t\t}\n\t\tif (messages[messages.length - 1].role === \"assistant\") {\n\t\t\tthrow new Error(\"Cannot continue from message role: assistant\");\n\t\t}\n\n\t\tawait this._runLoop(undefined);\n\t}\n\n\t/**\n\t * Run the agent loop.\n\t * If messages are provided, starts a new conversation turn with those messages.\n\t * Otherwise, continues from existing context.\n\t */\n\tprivate async _runLoop(messages?: AgentMessage[]) {\n\t\tconst model = this._state.model;\n\t\tif (!model) throw new Error(\"No model configured\");\n\n\t\tthis.runningPrompt = new Promise<void>((resolve) => {\n\t\t\tthis.resolveRunningPrompt = resolve;\n\t\t});\n\n\t\tthis.abortController = new AbortController();\n\t\tthis._state.isStreaming = true;\n\t\tthis._state.streamMessage = null;\n\t\tthis._state.error = undefined;\n\n\t\tconst reasoning: ReasoningEffort | undefined =\n\t\t\tthis._state.thinkingLevel === \"off\"\n\t\t\t\t? undefined\n\t\t\t\t: this._state.thinkingLevel === \"minimal\"\n\t\t\t\t\t? \"low\"\n\t\t\t\t\t: (this._state.thinkingLevel as ReasoningEffort);\n\n\t\tconst context: AgentContext = {\n\t\t\tsystemPrompt: this._state.systemPrompt,\n\t\t\tmessages: this._state.messages.slice(),\n\t\t\ttools: this._state.tools,\n\t\t};\n\n\t\tconst config: AgentLoopConfig = {\n\t\t\tmodel,\n\t\t\treasoning,\n\t\t\tconvertToLlm: this.convertToLlm,\n\t\t\ttransformContext: this.transformContext,\n\t\t\tgetApiKey: this.getApiKey,\n\t\t\tgetQueuedMessages: async () => {\n\t\t\t\tif (this.queueMode === \"one-at-a-time\") {\n\t\t\t\t\tif (this.messageQueue.length > 0) {\n\t\t\t\t\t\tconst first = this.messageQueue[0];\n\t\t\t\t\t\tthis.messageQueue = this.messageQueue.slice(1);\n\t\t\t\t\t\treturn [first];\n\t\t\t\t\t}\n\t\t\t\t\treturn [];\n\t\t\t\t} else {\n\t\t\t\t\tconst queued = this.messageQueue.slice();\n\t\t\t\t\tthis.messageQueue = [];\n\t\t\t\t\treturn queued;\n\t\t\t\t}\n\t\t\t},\n\t\t};\n\n\t\tlet partial: AgentMessage | null = null;\n\n\t\ttry {\n\t\t\tconst stream = messages\n\t\t\t\t? agentLoop(messages, context, config, this.abortController.signal, this.streamFn)\n\t\t\t\t: agentLoopContinue(context, config, this.abortController.signal, this.streamFn);\n\n\t\t\tfor await (const event of stream) {\n\t\t\t\t// Update internal state based on events\n\t\t\t\tswitch (event.type) {\n\t\t\t\t\tcase \"message_start\":\n\t\t\t\t\t\tpartial = event.message;\n\t\t\t\t\t\tthis._state.streamMessage = event.message;\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\tcase \"message_update\":\n\t\t\t\t\t\tpartial = event.message;\n\t\t\t\t\t\tthis._state.streamMessage = event.message;\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\tcase \"message_end\":\n\t\t\t\t\t\tpartial = null;\n\t\t\t\t\t\tthis._state.streamMessage = null;\n\t\t\t\t\t\tthis.appendMessage(event.message);\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\tcase \"tool_execution_start\": {\n\t\t\t\t\t\tconst s = new Set(this._state.pendingToolCalls);\n\t\t\t\t\t\ts.add(event.toolCallId);\n\t\t\t\t\t\tthis._state.pendingToolCalls = s;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\n\t\t\t\t\tcase \"tool_execution_end\": {\n\t\t\t\t\t\tconst s = new Set(this._state.pendingToolCalls);\n\t\t\t\t\t\ts.delete(event.toolCallId);\n\t\t\t\t\t\tthis._state.pendingToolCalls = s;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\n\t\t\t\t\tcase \"turn_end\":\n\t\t\t\t\t\tif (event.message.role === \"assistant\" && (event.message as any).errorMessage) {\n\t\t\t\t\t\t\tthis._state.error = (event.message as any).errorMessage;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\n\t\t\t\t\tcase \"agent_end\":\n\t\t\t\t\t\tthis._state.isStreaming = false;\n\t\t\t\t\t\tthis._state.streamMessage = null;\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\n\t\t\t\t// Emit to listeners\n\t\t\t\tthis.emit(event);\n\t\t\t}\n\n\t\t\t// Handle any remaining partial message\n\t\t\tif (partial && partial.role === \"assistant\" && partial.content.length > 0) {\n\t\t\t\tconst onlyEmpty = !partial.content.some(\n\t\t\t\t\t(c) =>\n\t\t\t\t\t\t(c.type === \"thinking\" && c.thinking.trim().length > 0) ||\n\t\t\t\t\t\t(c.type === \"text\" && c.text.trim().length > 0) ||\n\t\t\t\t\t\t(c.type === \"toolCall\" && c.name.trim().length > 0),\n\t\t\t\t);\n\t\t\t\tif (!onlyEmpty) {\n\t\t\t\t\tthis.appendMessage(partial);\n\t\t\t\t} else {\n\t\t\t\t\tif (this.abortController?.signal.aborted) {\n\t\t\t\t\t\tthrow new Error(\"Request was aborted\");\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (err: any) {\n\t\t\tconst errorMsg: AgentMessage = {\n\t\t\t\trole: \"assistant\",\n\t\t\t\tcontent: [{ type: \"text\", text: \"\" }],\n\t\t\t\tapi: model.api,\n\t\t\t\tprovider: model.provider,\n\t\t\t\tmodel: model.id,\n\t\t\t\tusage: {\n\t\t\t\t\tinput: 0,\n\t\t\t\t\toutput: 0,\n\t\t\t\t\tcacheRead: 0,\n\t\t\t\t\tcacheWrite: 0,\n\t\t\t\t\ttotalTokens: 0,\n\t\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t\t},\n\t\t\t\tstopReason: this.abortController?.signal.aborted ? \"aborted\" : \"error\",\n\t\t\t\terrorMessage: err?.message || String(err),\n\t\t\t\ttimestamp: Date.now(),\n\t\t\t} as AgentMessage;\n\n\t\t\tthis.appendMessage(errorMsg);\n\t\t\tthis._state.error = err?.message || String(err);\n\t\t\tthis.emit({ type: \"agent_end\", messages: [errorMsg] });\n\t\t} finally {\n\t\t\tthis._state.isStreaming = false;\n\t\t\tthis._state.streamMessage = null;\n\t\t\tthis._state.pendingToolCalls = new Set<string>();\n\t\t\tthis.abortController = undefined;\n\t\t\tthis.resolveRunningPrompt?.();\n\t\t\tthis.runningPrompt = undefined;\n\t\t\tthis.resolveRunningPrompt = undefined;\n\t\t}\n\t}\n\n\tprivate emit(e: AgentEvent) {\n\t\tfor (const listener of this.listeners) {\n\t\t\tlistener(e);\n\t\t}\n\t}\n}\n"]}