@mariozechner/pi-agent-core 0.30.2 → 0.31.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +297 -126
  2. package/dist/agent-loop.d.ts +21 -0
  3. package/dist/agent-loop.d.ts.map +1 -0
  4. package/dist/agent-loop.js +294 -0
  5. package/dist/agent-loop.js.map +1 -0
  6. package/dist/agent.d.ts +43 -29
  7. package/dist/agent.d.ts.map +1 -1
  8. package/dist/agent.js +83 -148
  9. package/dist/agent.js.map +1 -1
  10. package/dist/index.d.ts +4 -3
  11. package/dist/index.d.ts.map +1 -1
  12. package/dist/index.js +7 -3
  13. package/dist/index.js.map +1 -1
  14. package/dist/proxy.d.ts +85 -0
  15. package/dist/proxy.d.ts.map +1 -0
  16. package/dist/proxy.js +269 -0
  17. package/dist/proxy.js.map +1 -0
  18. package/dist/types.d.ts +88 -29
  19. package/dist/types.d.ts.map +1 -1
  20. package/dist/types.js.map +1 -1
  21. package/package.json +3 -3
  22. package/dist/transports/AppTransport.d.ts +0 -28
  23. package/dist/transports/AppTransport.d.ts.map +0 -1
  24. package/dist/transports/AppTransport.js +0 -330
  25. package/dist/transports/AppTransport.js.map +0 -1
  26. package/dist/transports/ProviderTransport.d.ts +0 -29
  27. package/dist/transports/ProviderTransport.d.ts.map +0 -1
  28. package/dist/transports/ProviderTransport.js +0 -54
  29. package/dist/transports/ProviderTransport.js.map +0 -1
  30. package/dist/transports/index.d.ts +0 -5
  31. package/dist/transports/index.d.ts.map +0 -1
  32. package/dist/transports/index.js +0 -3
  33. package/dist/transports/index.js.map +0 -1
  34. package/dist/transports/proxy-types.d.ts +0 -53
  35. package/dist/transports/proxy-types.d.ts.map +0 -1
  36. package/dist/transports/proxy-types.js +0 -2
  37. package/dist/transports/proxy-types.js.map +0 -1
  38. package/dist/transports/types.d.ts +0 -25
  39. package/dist/transports/types.d.ts.map +0 -1
  40. package/dist/transports/types.js +0 -2
  41. package/dist/transports/types.js.map +0 -1
package/dist/proxy.js ADDED
@@ -0,0 +1,269 @@
1
+ /**
2
+ * Proxy stream function for apps that route LLM calls through a server.
3
+ * The server manages auth and proxies requests to LLM providers.
4
+ */
5
+ import { EventStream, } from "@mariozechner/pi-ai";
6
+ // Internal import for JSON parsing utility
7
+ import { parseStreamingJson } from "@mariozechner/pi-ai/dist/utils/json-parse.js";
8
+ // Create stream class matching ProxyMessageEventStream
9
+ class ProxyMessageEventStream extends EventStream {
10
+ constructor() {
11
+ super((event) => event.type === "done" || event.type === "error", (event) => {
12
+ if (event.type === "done")
13
+ return event.message;
14
+ if (event.type === "error")
15
+ return event.error;
16
+ throw new Error("Unexpected event type");
17
+ });
18
+ }
19
+ }
20
+ /**
21
+ * Stream function that proxies through a server instead of calling LLM providers directly.
22
+ * The server strips the partial field from delta events to reduce bandwidth.
23
+ * We reconstruct the partial message client-side.
24
+ *
25
+ * Use this as the `streamFn` option when creating an Agent that needs to go through a proxy.
26
+ *
27
+ * @example
28
+ * ```typescript
29
+ * const agent = new Agent({
30
+ * streamFn: (model, context, options) =>
31
+ * streamProxy(model, context, {
32
+ * ...options,
33
+ * authToken: await getAuthToken(),
34
+ * proxyUrl: "https://genai.example.com",
35
+ * }),
36
+ * });
37
+ * ```
38
+ */
39
+ export function streamProxy(model, context, options) {
40
+ const stream = new ProxyMessageEventStream();
41
+ (async () => {
42
+ // Initialize the partial message that we'll build up from events
43
+ const partial = {
44
+ role: "assistant",
45
+ stopReason: "stop",
46
+ content: [],
47
+ api: model.api,
48
+ provider: model.provider,
49
+ model: model.id,
50
+ usage: {
51
+ input: 0,
52
+ output: 0,
53
+ cacheRead: 0,
54
+ cacheWrite: 0,
55
+ totalTokens: 0,
56
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
57
+ },
58
+ timestamp: Date.now(),
59
+ };
60
+ let reader;
61
+ const abortHandler = () => {
62
+ if (reader) {
63
+ reader.cancel("Request aborted by user").catch(() => { });
64
+ }
65
+ };
66
+ if (options.signal) {
67
+ options.signal.addEventListener("abort", abortHandler);
68
+ }
69
+ try {
70
+ const response = await fetch(`${options.proxyUrl}/api/stream`, {
71
+ method: "POST",
72
+ headers: {
73
+ Authorization: `Bearer ${options.authToken}`,
74
+ "Content-Type": "application/json",
75
+ },
76
+ body: JSON.stringify({
77
+ model,
78
+ context,
79
+ options: {
80
+ temperature: options.temperature,
81
+ maxTokens: options.maxTokens,
82
+ reasoning: options.reasoning,
83
+ },
84
+ }),
85
+ signal: options.signal,
86
+ });
87
+ if (!response.ok) {
88
+ let errorMessage = `Proxy error: ${response.status} ${response.statusText}`;
89
+ try {
90
+ const errorData = (await response.json());
91
+ if (errorData.error) {
92
+ errorMessage = `Proxy error: ${errorData.error}`;
93
+ }
94
+ }
95
+ catch {
96
+ // Couldn't parse error response
97
+ }
98
+ throw new Error(errorMessage);
99
+ }
100
+ reader = response.body.getReader();
101
+ const decoder = new TextDecoder();
102
+ let buffer = "";
103
+ while (true) {
104
+ const { done, value } = await reader.read();
105
+ if (done)
106
+ break;
107
+ if (options.signal?.aborted) {
108
+ throw new Error("Request aborted by user");
109
+ }
110
+ buffer += decoder.decode(value, { stream: true });
111
+ const lines = buffer.split("\n");
112
+ buffer = lines.pop() || "";
113
+ for (const line of lines) {
114
+ if (line.startsWith("data: ")) {
115
+ const data = line.slice(6).trim();
116
+ if (data) {
117
+ const proxyEvent = JSON.parse(data);
118
+ const event = processProxyEvent(proxyEvent, partial);
119
+ if (event) {
120
+ stream.push(event);
121
+ }
122
+ }
123
+ }
124
+ }
125
+ }
126
+ if (options.signal?.aborted) {
127
+ throw new Error("Request aborted by user");
128
+ }
129
+ stream.end();
130
+ }
131
+ catch (error) {
132
+ const errorMessage = error instanceof Error ? error.message : String(error);
133
+ const reason = options.signal?.aborted ? "aborted" : "error";
134
+ partial.stopReason = reason;
135
+ partial.errorMessage = errorMessage;
136
+ stream.push({
137
+ type: "error",
138
+ reason,
139
+ error: partial,
140
+ });
141
+ stream.end();
142
+ }
143
+ finally {
144
+ if (options.signal) {
145
+ options.signal.removeEventListener("abort", abortHandler);
146
+ }
147
+ }
148
+ })();
149
+ return stream;
150
+ }
151
+ /**
152
+ * Process a proxy event and update the partial message.
153
+ */
154
+ function processProxyEvent(proxyEvent, partial) {
155
+ switch (proxyEvent.type) {
156
+ case "start":
157
+ return { type: "start", partial };
158
+ case "text_start":
159
+ partial.content[proxyEvent.contentIndex] = { type: "text", text: "" };
160
+ return { type: "text_start", contentIndex: proxyEvent.contentIndex, partial };
161
+ case "text_delta": {
162
+ const content = partial.content[proxyEvent.contentIndex];
163
+ if (content?.type === "text") {
164
+ content.text += proxyEvent.delta;
165
+ return {
166
+ type: "text_delta",
167
+ contentIndex: proxyEvent.contentIndex,
168
+ delta: proxyEvent.delta,
169
+ partial,
170
+ };
171
+ }
172
+ throw new Error("Received text_delta for non-text content");
173
+ }
174
+ case "text_end": {
175
+ const content = partial.content[proxyEvent.contentIndex];
176
+ if (content?.type === "text") {
177
+ content.textSignature = proxyEvent.contentSignature;
178
+ return {
179
+ type: "text_end",
180
+ contentIndex: proxyEvent.contentIndex,
181
+ content: content.text,
182
+ partial,
183
+ };
184
+ }
185
+ throw new Error("Received text_end for non-text content");
186
+ }
187
+ case "thinking_start":
188
+ partial.content[proxyEvent.contentIndex] = { type: "thinking", thinking: "" };
189
+ return { type: "thinking_start", contentIndex: proxyEvent.contentIndex, partial };
190
+ case "thinking_delta": {
191
+ const content = partial.content[proxyEvent.contentIndex];
192
+ if (content?.type === "thinking") {
193
+ content.thinking += proxyEvent.delta;
194
+ return {
195
+ type: "thinking_delta",
196
+ contentIndex: proxyEvent.contentIndex,
197
+ delta: proxyEvent.delta,
198
+ partial,
199
+ };
200
+ }
201
+ throw new Error("Received thinking_delta for non-thinking content");
202
+ }
203
+ case "thinking_end": {
204
+ const content = partial.content[proxyEvent.contentIndex];
205
+ if (content?.type === "thinking") {
206
+ content.thinkingSignature = proxyEvent.contentSignature;
207
+ return {
208
+ type: "thinking_end",
209
+ contentIndex: proxyEvent.contentIndex,
210
+ content: content.thinking,
211
+ partial,
212
+ };
213
+ }
214
+ throw new Error("Received thinking_end for non-thinking content");
215
+ }
216
+ case "toolcall_start":
217
+ partial.content[proxyEvent.contentIndex] = {
218
+ type: "toolCall",
219
+ id: proxyEvent.id,
220
+ name: proxyEvent.toolName,
221
+ arguments: {},
222
+ partialJson: "",
223
+ };
224
+ return { type: "toolcall_start", contentIndex: proxyEvent.contentIndex, partial };
225
+ case "toolcall_delta": {
226
+ const content = partial.content[proxyEvent.contentIndex];
227
+ if (content?.type === "toolCall") {
228
+ content.partialJson += proxyEvent.delta;
229
+ content.arguments = parseStreamingJson(content.partialJson) || {};
230
+ partial.content[proxyEvent.contentIndex] = { ...content }; // Trigger reactivity
231
+ return {
232
+ type: "toolcall_delta",
233
+ contentIndex: proxyEvent.contentIndex,
234
+ delta: proxyEvent.delta,
235
+ partial,
236
+ };
237
+ }
238
+ throw new Error("Received toolcall_delta for non-toolCall content");
239
+ }
240
+ case "toolcall_end": {
241
+ const content = partial.content[proxyEvent.contentIndex];
242
+ if (content?.type === "toolCall") {
243
+ delete content.partialJson;
244
+ return {
245
+ type: "toolcall_end",
246
+ contentIndex: proxyEvent.contentIndex,
247
+ toolCall: content,
248
+ partial,
249
+ };
250
+ }
251
+ return undefined;
252
+ }
253
+ case "done":
254
+ partial.stopReason = proxyEvent.reason;
255
+ partial.usage = proxyEvent.usage;
256
+ return { type: "done", reason: proxyEvent.reason, message: partial };
257
+ case "error":
258
+ partial.stopReason = proxyEvent.reason;
259
+ partial.errorMessage = proxyEvent.errorMessage;
260
+ partial.usage = proxyEvent.usage;
261
+ return { type: "error", reason: proxyEvent.reason, error: partial };
262
+ default: {
263
+ const _exhaustiveCheck = proxyEvent;
264
+ console.warn(`Unhandled proxy event type: ${proxyEvent.type}`);
265
+ return undefined;
266
+ }
267
+ }
268
+ }
269
+ //# sourceMappingURL=proxy.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"proxy.js","sourceRoot":"","sources":["../src/proxy.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAIN,WAAW,GAKX,MAAM,qBAAqB,CAAC;AAC7B,2CAA2C;AAC3C,OAAO,EAAE,kBAAkB,EAAE,MAAM,8CAA8C,CAAC;AAElF,uDAAuD;AACvD,MAAM,uBAAwB,SAAQ,WAAoD;IACzF,cAAc;QACb,KAAK,CACJ,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAC1D,CAAC,KAAK,EAAE,EAAE,CAAC;YACV,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM;gBAAE,OAAO,KAAK,CAAC,OAAO,CAAC;YAChD,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO;gBAAE,OAAO,KAAK,CAAC,KAAK,CAAC;YAC/C,MAAM,IAAI,KAAK,CAAC,uBAAuB,CAAC,CAAC;QAAA,CACzC,CACD,CAAC;IAAA,CACF;CACD;AAmCD;;;;;;;;;;;;;;;;;;GAkBG;AACH,MAAM,UAAU,WAAW,CAAC,KAAiB,EAAE,OAAgB,EAAE,OAA2B,EAA2B;IACtH,MAAM,MAAM,GAAG,IAAI,uBAAuB,EAAE,CAAC;IAE7C,CAAC,KAAK,IAAI,EAAE,CAAC;QACZ,iEAAiE;QACjE,MAAM,OAAO,GAAqB;YACjC,IAAI,EAAE,WAAW;YACjB,UAAU,EAAE,MAAM;YAClB,OAAO,EAAE,EAAE;YACX,GAAG,EAAE,KAAK,CAAC,GAAG;YACd,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,EAAE;YACf,KAAK,EAAE;gBACN,KAAK,EAAE,CAAC;gBACR,MAAM,EAAE,CAAC;gBACT,SAAS,EAAE,CAAC;gBACZ,UAAU,EAAE,CAAC;gBACb,WAAW,EAAE,CAAC;gBACd,IAAI,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,CAAC,EAAE;aACpE;YACD,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB,CAAC;QAEF,IAAI,MAA2D,CAAC;QAEhE,MAAM,YAAY,GAAG,GAAG,EAAE,CAAC;YAC1B,IAAI,MAAM,EAAE,CAAC;gBACZ,MAAM,CAAC,MAAM,CAAC,yBAAyB,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,EAAC,CAAC,CAAC,CAAC;YAC1D,CAAC;QAAA,CACD,CAAC;QAEF,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;YACpB,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;QACxD,CAAC;QAED,IAAI,CAAC;YACJ,MAAM,QAAQ,GAAG,MAAM,KAAK,CAAC,GAAG,OAAO,CAAC,QAAQ,aAAa,EAAE;gBAC9D,MAAM,EAAE,MAAM;gBACd,OAAO,EAAE;oBACR,aAAa,EAAE,UAAU,OAAO,CAAC,SAAS,EAAE;oBAC5C,cAAc,EAAE,kBAAkB;iBAClC;gBACD,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC;oBACpB,KAAK;oBACL,OAAO;oBACP,OAAO,EAAE;wBACR,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,SAAS,EAAE,OAAO,CAAC,SAAS;wBAC5B,SAAS,EAAE,OAAO,CAAC,SAAS;qBAC5B;iBACD,CAAC;gBACF,MAAM,EAAE,OAAO,CAAC,MAAM;aACtB,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;gBAClB,IAAI,YAAY,GAAG,gBAAgB,QAAQ,CAAC,MAAM,IAAI,QAAQ,CAAC,UAAU,EAAE,CAAC;gBAC5E,IAAI,CAAC;oBACJ,MAAM,SAAS,GAAG,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAuB,CAAC;oBAChE,IAAI,SAAS,CAAC,KAAK,EAAE,CAAC;wBACrB,YAAY,GAAG,gBAAgB,SAAS,CAAC,KAAK,EAAE,CAAC;oBAClD,CAAC;gBACF,CAAC;gBAAC,MAAM,CAAC;oBACR,gCAAgC;gBACjC,CAAC;gBACD,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC;YAC/B,CAAC;YAED,MAAM,GAAG,QAAQ,CAAC,IAAK,CAAC,SAAS,EAAE,CAAC;YACpC,MAAM,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClC,IAAI,MAAM,GAAG,EAAE,CAAC;YAEhB,OAAO,IAAI,EAAE,CAAC;gBACb,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;gBAC5C,IAAI,IAAI;oBAAE,MAAM;gBAEhB,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC;oBAC7B,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;gBAC5C,CAAC;gBAED,MAAM,IAAI,OAAO,CAAC,MAAM,CAAC,KAAK,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC;gBAClD,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBACjC,MAAM,GAAG,KAAK,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC;gBAE3B,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;oBAC1B,IAAI,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;wBAC/B,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC;wBAClC,IAAI,IAAI,EAAE,CAAC;4BACV,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAA+B,CAAC;4BAClE,MAAM,KAAK,GAAG,iBAAiB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;4BACrD,IAAI,KAAK,EAAE,CAAC;gCACX,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;4BACpB,CAAC;wBACF,CAAC;oBACF,CAAC;gBACF,CAAC;YACF,CAAC;YAED,IAAI,OAAO,CAAC,MAAM,EAAE,OAAO,EAAE,CAAC;gBAC7B,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;YAC5C,CAAC;YAED,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YAChB,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;YAC5E,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC;YAC7D,OAAO,CAAC,UAAU,GAAG,MAAM,CAAC;YAC5B,OAAO,CAAC,YAAY,GAAG,YAAY,CAAC;YACpC,MAAM,CAAC,IAAI,CAAC;gBACX,IAAI,EAAE,OAAO;gBACb,MAAM;gBACN,KAAK,EAAE,OAAO;aACd,CAAC,CAAC;YACH,MAAM,CAAC,GAAG,EAAE,CAAC;QACd,CAAC;gBAAS,CAAC;YACV,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;gBACpB,OAAO,CAAC,MAAM,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;YAC3D,CAAC;QACF,CAAC;IAAA,CACD,CAAC,EAAE,CAAC;IAEL,OAAO,MAAM,CAAC;AAAA,CACd;AAED;;GAEG;AACH,SAAS,iBAAiB,CACzB,UAAsC,EACtC,OAAyB,EACW;IACpC,QAAQ,UAAU,CAAC,IAAI,EAAE,CAAC;QACzB,KAAK,OAAO;YACX,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC;QAEnC,KAAK,YAAY;YAChB,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,EAAE,EAAE,CAAC;YACtE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,YAAY,EAAE,UAAU,CAAC,YAAY,EAAE,OAAO,EAAE,CAAC;QAE/E,KAAK,YAAY,EAAE,CAAC;YACnB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;YACzD,IAAI,OAAO,EAAE,IAAI,KAAK,MAAM,EAAE,CAAC;gBAC9B,OAAO,CAAC,IAAI,IAAI,UAAU,CAAC,KAAK,CAAC;gBACjC,OAAO;oBACN,IAAI,EAAE,YAAY;oBAClB,YAAY,EAAE,UAAU,CAAC,YAAY;oBACrC,KAAK,EAAE,UAAU,CAAC,KAAK;oBACvB,OAAO;iBACP,CAAC;YACH,CAAC;YACD,MAAM,IAAI,KAAK,CAAC,0CAA0C,CAAC,CAAC;QAC7D,CAAC;QAED,KAAK,UAAU,EAAE,CAAC;YACjB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;YACzD,IAAI,OAAO,EAAE,IAAI,KAAK,MAAM,EAAE,CAAC;gBAC9B,OAAO,CAAC,aAAa,GAAG,UAAU,CAAC,gBAAgB,CAAC;gBACpD,OAAO;oBACN,IAAI,EAAE,UAAU;oBAChB,YAAY,EAAE,UAAU,CAAC,YAAY;oBACrC,OAAO,EAAE,OAAO,CAAC,IAAI;oBACrB,OAAO;iBACP,CAAC;YACH,CAAC;YACD,MAAM,IAAI,KAAK,CAAC,wCAAwC,CAAC,CAAC;QAC3D,CAAC;QAED,KAAK,gBAAgB;YACpB,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,GAAG,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,EAAE,EAAE,CAAC;YAC9E,OAAO,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,CAAC,YAAY,EAAE,OAAO,EAAE,CAAC;QAEnF,KAAK,gBAAgB,EAAE,CAAC;YACvB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;YACzD,IAAI,OAAO,EAAE,IAAI,KAAK,UAAU,EAAE,CAAC;gBAClC,OAAO,CAAC,QAAQ,IAAI,UAAU,CAAC,KAAK,CAAC;gBACrC,OAAO;oBACN,IAAI,EAAE,gBAAgB;oBACtB,YAAY,EAAE,UAAU,CAAC,YAAY;oBACrC,KAAK,EAAE,UAAU,CAAC,KAAK;oBACvB,OAAO;iBACP,CAAC;YACH,CAAC;YACD,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACrE,CAAC;QAED,KAAK,cAAc,EAAE,CAAC;YACrB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;YACzD,IAAI,OAAO,EAAE,IAAI,KAAK,UAAU,EAAE,CAAC;gBAClC,OAAO,CAAC,iBAAiB,GAAG,UAAU,CAAC,gBAAgB,CAAC;gBACxD,OAAO;oBACN,IAAI,EAAE,cAAc;oBACpB,YAAY,EAAE,UAAU,CAAC,YAAY;oBACrC,OAAO,EAAE,OAAO,CAAC,QAAQ;oBACzB,OAAO;iBACP,CAAC;YACH,CAAC;YACD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;QACnE,CAAC;QAED,KAAK,gBAAgB;YACpB,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,GAAG;gBAC1C,IAAI,EAAE,UAAU;gBAChB,EAAE,EAAE,UAAU,CAAC,EAAE;gBACjB,IAAI,EAAE,UAAU,CAAC,QAAQ;gBACzB,SAAS,EAAE,EAAE;gBACb,WAAW,EAAE,EAAE;aAC0C,CAAC;YAC3D,OAAO,EAAE,IAAI,EAAE,gBAAgB,EAAE,YAAY,EAAE,UAAU,CAAC,YAAY,EAAE,OAAO,EAAE,CAAC;QAEnF,KAAK,gBAAgB,EAAE,CAAC;YACvB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;YACzD,IAAI,OAAO,EAAE,IAAI,KAAK,UAAU,EAAE,CAAC;gBACjC,OAAe,CAAC,WAAW,IAAI,UAAU,CAAC,KAAK,CAAC;gBACjD,OAAO,CAAC,SAAS,GAAG,kBAAkB,CAAE,OAAe,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC;gBAC3E,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,GAAG,EAAE,GAAG,OAAO,EAAE,CAAC,CAAC,qBAAqB;gBAChF,OAAO;oBACN,IAAI,EAAE,gBAAgB;oBACtB,YAAY,EAAE,UAAU,CAAC,YAAY;oBACrC,KAAK,EAAE,UAAU,CAAC,KAAK;oBACvB,OAAO;iBACP,CAAC;YACH,CAAC;YACD,MAAM,IAAI,KAAK,CAAC,kDAAkD,CAAC,CAAC;QACrE,CAAC;QAED,KAAK,cAAc,EAAE,CAAC;YACrB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC;YACzD,IAAI,OAAO,EAAE,IAAI,KAAK,UAAU,EAAE,CAAC;gBAClC,OAAQ,OAAe,CAAC,WAAW,CAAC;gBACpC,OAAO;oBACN,IAAI,EAAE,cAAc;oBACpB,YAAY,EAAE,UAAU,CAAC,YAAY;oBACrC,QAAQ,EAAE,OAAO;oBACjB,OAAO;iBACP,CAAC;YACH,CAAC;YACD,OAAO,SAAS,CAAC;QAClB,CAAC;QAED,KAAK,MAAM;YACV,OAAO,CAAC,UAAU,GAAG,UAAU,CAAC,MAAM,CAAC;YACvC,OAAO,CAAC,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC;YACjC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC;QAEtE,KAAK,OAAO;YACX,OAAO,CAAC,UAAU,GAAG,UAAU,CAAC,MAAM,CAAC;YACvC,OAAO,CAAC,YAAY,GAAG,UAAU,CAAC,YAAY,CAAC;YAC/C,OAAO,CAAC,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC;YACjC,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,UAAU,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,CAAC;QAErE,SAAS,CAAC;YACT,MAAM,gBAAgB,GAAU,UAAU,CAAC;YAC3C,OAAO,CAAC,IAAI,CAAC,+BAAgC,UAAkB,CAAC,IAAI,EAAE,CAAC,CAAC;YACxE,OAAO,SAAS,CAAC;QAClB,CAAC;IACF,CAAC;AAAA,CACD","sourcesContent":["/**\n * Proxy stream function for apps that route LLM calls through a server.\n * The server manages auth and proxies requests to LLM providers.\n */\n\nimport {\n\ttype AssistantMessage,\n\ttype AssistantMessageEvent,\n\ttype Context,\n\tEventStream,\n\ttype Model,\n\ttype SimpleStreamOptions,\n\ttype StopReason,\n\ttype ToolCall,\n} from \"@mariozechner/pi-ai\";\n// Internal import for JSON parsing utility\nimport { parseStreamingJson } from \"@mariozechner/pi-ai/dist/utils/json-parse.js\";\n\n// Create stream class matching ProxyMessageEventStream\nclass ProxyMessageEventStream extends EventStream<AssistantMessageEvent, AssistantMessage> {\n\tconstructor() {\n\t\tsuper(\n\t\t\t(event) => event.type === \"done\" || event.type === \"error\",\n\t\t\t(event) => {\n\t\t\t\tif (event.type === \"done\") return event.message;\n\t\t\t\tif (event.type === \"error\") return event.error;\n\t\t\t\tthrow new Error(\"Unexpected event type\");\n\t\t\t},\n\t\t);\n\t}\n}\n\n/**\n * Proxy event types - server sends these with partial field stripped to reduce bandwidth.\n */\nexport type ProxyAssistantMessageEvent =\n\t| { type: \"start\" }\n\t| { type: \"text_start\"; contentIndex: number }\n\t| { type: \"text_delta\"; contentIndex: number; delta: string }\n\t| { type: \"text_end\"; contentIndex: number; contentSignature?: string }\n\t| { type: \"thinking_start\"; contentIndex: number }\n\t| { type: \"thinking_delta\"; contentIndex: number; delta: string }\n\t| { type: \"thinking_end\"; contentIndex: number; contentSignature?: string }\n\t| { type: \"toolcall_start\"; contentIndex: number; id: string; toolName: string }\n\t| { type: \"toolcall_delta\"; contentIndex: number; delta: string }\n\t| { type: \"toolcall_end\"; contentIndex: number }\n\t| {\n\t\t\ttype: \"done\";\n\t\t\treason: Extract<StopReason, \"stop\" | \"length\" | \"toolUse\">;\n\t\t\tusage: AssistantMessage[\"usage\"];\n\t }\n\t| {\n\t\t\ttype: \"error\";\n\t\t\treason: Extract<StopReason, \"aborted\" | \"error\">;\n\t\t\terrorMessage?: string;\n\t\t\tusage: AssistantMessage[\"usage\"];\n\t };\n\nexport interface ProxyStreamOptions extends SimpleStreamOptions {\n\t/** Auth token for the proxy server */\n\tauthToken: string;\n\t/** Proxy server URL (e.g., \"https://genai.example.com\") */\n\tproxyUrl: string;\n}\n\n/**\n * Stream function that proxies through a server instead of calling LLM providers directly.\n * The server strips the partial field from delta events to reduce bandwidth.\n * We reconstruct the partial message client-side.\n *\n * Use this as the `streamFn` option when creating an Agent that needs to go through a proxy.\n *\n * @example\n * ```typescript\n * const agent = new Agent({\n * streamFn: (model, context, options) =>\n * streamProxy(model, context, {\n * ...options,\n * authToken: await getAuthToken(),\n * proxyUrl: \"https://genai.example.com\",\n * }),\n * });\n * ```\n */\nexport function streamProxy(model: Model<any>, context: Context, options: ProxyStreamOptions): ProxyMessageEventStream {\n\tconst stream = new ProxyMessageEventStream();\n\n\t(async () => {\n\t\t// Initialize the partial message that we'll build up from events\n\t\tconst partial: AssistantMessage = {\n\t\t\trole: \"assistant\",\n\t\t\tstopReason: \"stop\",\n\t\t\tcontent: [],\n\t\t\tapi: model.api,\n\t\t\tprovider: model.provider,\n\t\t\tmodel: model.id,\n\t\t\tusage: {\n\t\t\t\tinput: 0,\n\t\t\t\toutput: 0,\n\t\t\t\tcacheRead: 0,\n\t\t\t\tcacheWrite: 0,\n\t\t\t\ttotalTokens: 0,\n\t\t\t\tcost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },\n\t\t\t},\n\t\t\ttimestamp: Date.now(),\n\t\t};\n\n\t\tlet reader: ReadableStreamDefaultReader<Uint8Array> | undefined;\n\n\t\tconst abortHandler = () => {\n\t\t\tif (reader) {\n\t\t\t\treader.cancel(\"Request aborted by user\").catch(() => {});\n\t\t\t}\n\t\t};\n\n\t\tif (options.signal) {\n\t\t\toptions.signal.addEventListener(\"abort\", abortHandler);\n\t\t}\n\n\t\ttry {\n\t\t\tconst response = await fetch(`${options.proxyUrl}/api/stream`, {\n\t\t\t\tmethod: \"POST\",\n\t\t\t\theaders: {\n\t\t\t\t\tAuthorization: `Bearer ${options.authToken}`,\n\t\t\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t\t},\n\t\t\t\tbody: JSON.stringify({\n\t\t\t\t\tmodel,\n\t\t\t\t\tcontext,\n\t\t\t\t\toptions: {\n\t\t\t\t\t\ttemperature: options.temperature,\n\t\t\t\t\t\tmaxTokens: options.maxTokens,\n\t\t\t\t\t\treasoning: options.reasoning,\n\t\t\t\t\t},\n\t\t\t\t}),\n\t\t\t\tsignal: options.signal,\n\t\t\t});\n\n\t\t\tif (!response.ok) {\n\t\t\t\tlet errorMessage = `Proxy error: ${response.status} ${response.statusText}`;\n\t\t\t\ttry {\n\t\t\t\t\tconst errorData = (await response.json()) as { error?: string };\n\t\t\t\t\tif (errorData.error) {\n\t\t\t\t\t\terrorMessage = `Proxy error: ${errorData.error}`;\n\t\t\t\t\t}\n\t\t\t\t} catch {\n\t\t\t\t\t// Couldn't parse error response\n\t\t\t\t}\n\t\t\t\tthrow new Error(errorMessage);\n\t\t\t}\n\n\t\t\treader = response.body!.getReader();\n\t\t\tconst decoder = new TextDecoder();\n\t\t\tlet buffer = \"\";\n\n\t\t\twhile (true) {\n\t\t\t\tconst { done, value } = await reader.read();\n\t\t\t\tif (done) break;\n\n\t\t\t\tif (options.signal?.aborted) {\n\t\t\t\t\tthrow new Error(\"Request aborted by user\");\n\t\t\t\t}\n\n\t\t\t\tbuffer += decoder.decode(value, { stream: true });\n\t\t\t\tconst lines = buffer.split(\"\\n\");\n\t\t\t\tbuffer = lines.pop() || \"\";\n\n\t\t\t\tfor (const line of lines) {\n\t\t\t\t\tif (line.startsWith(\"data: \")) {\n\t\t\t\t\t\tconst data = line.slice(6).trim();\n\t\t\t\t\t\tif (data) {\n\t\t\t\t\t\t\tconst proxyEvent = JSON.parse(data) as ProxyAssistantMessageEvent;\n\t\t\t\t\t\t\tconst event = processProxyEvent(proxyEvent, partial);\n\t\t\t\t\t\t\tif (event) {\n\t\t\t\t\t\t\t\tstream.push(event);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (options.signal?.aborted) {\n\t\t\t\tthrow new Error(\"Request aborted by user\");\n\t\t\t}\n\n\t\t\tstream.end();\n\t\t} catch (error) {\n\t\t\tconst errorMessage = error instanceof Error ? error.message : String(error);\n\t\t\tconst reason = options.signal?.aborted ? \"aborted\" : \"error\";\n\t\t\tpartial.stopReason = reason;\n\t\t\tpartial.errorMessage = errorMessage;\n\t\t\tstream.push({\n\t\t\t\ttype: \"error\",\n\t\t\t\treason,\n\t\t\t\terror: partial,\n\t\t\t});\n\t\t\tstream.end();\n\t\t} finally {\n\t\t\tif (options.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\t\t}\n\t})();\n\n\treturn stream;\n}\n\n/**\n * Process a proxy event and update the partial message.\n */\nfunction processProxyEvent(\n\tproxyEvent: ProxyAssistantMessageEvent,\n\tpartial: AssistantMessage,\n): AssistantMessageEvent | undefined {\n\tswitch (proxyEvent.type) {\n\t\tcase \"start\":\n\t\t\treturn { type: \"start\", partial };\n\n\t\tcase \"text_start\":\n\t\t\tpartial.content[proxyEvent.contentIndex] = { type: \"text\", text: \"\" };\n\t\t\treturn { type: \"text_start\", contentIndex: proxyEvent.contentIndex, partial };\n\n\t\tcase \"text_delta\": {\n\t\t\tconst content = partial.content[proxyEvent.contentIndex];\n\t\t\tif (content?.type === \"text\") {\n\t\t\t\tcontent.text += proxyEvent.delta;\n\t\t\t\treturn {\n\t\t\t\t\ttype: \"text_delta\",\n\t\t\t\t\tcontentIndex: proxyEvent.contentIndex,\n\t\t\t\t\tdelta: proxyEvent.delta,\n\t\t\t\t\tpartial,\n\t\t\t\t};\n\t\t\t}\n\t\t\tthrow new Error(\"Received text_delta for non-text content\");\n\t\t}\n\n\t\tcase \"text_end\": {\n\t\t\tconst content = partial.content[proxyEvent.contentIndex];\n\t\t\tif (content?.type === \"text\") {\n\t\t\t\tcontent.textSignature = proxyEvent.contentSignature;\n\t\t\t\treturn {\n\t\t\t\t\ttype: \"text_end\",\n\t\t\t\t\tcontentIndex: proxyEvent.contentIndex,\n\t\t\t\t\tcontent: content.text,\n\t\t\t\t\tpartial,\n\t\t\t\t};\n\t\t\t}\n\t\t\tthrow new Error(\"Received text_end for non-text content\");\n\t\t}\n\n\t\tcase \"thinking_start\":\n\t\t\tpartial.content[proxyEvent.contentIndex] = { type: \"thinking\", thinking: \"\" };\n\t\t\treturn { type: \"thinking_start\", contentIndex: proxyEvent.contentIndex, partial };\n\n\t\tcase \"thinking_delta\": {\n\t\t\tconst content = partial.content[proxyEvent.contentIndex];\n\t\t\tif (content?.type === \"thinking\") {\n\t\t\t\tcontent.thinking += proxyEvent.delta;\n\t\t\t\treturn {\n\t\t\t\t\ttype: \"thinking_delta\",\n\t\t\t\t\tcontentIndex: proxyEvent.contentIndex,\n\t\t\t\t\tdelta: proxyEvent.delta,\n\t\t\t\t\tpartial,\n\t\t\t\t};\n\t\t\t}\n\t\t\tthrow new Error(\"Received thinking_delta for non-thinking content\");\n\t\t}\n\n\t\tcase \"thinking_end\": {\n\t\t\tconst content = partial.content[proxyEvent.contentIndex];\n\t\t\tif (content?.type === \"thinking\") {\n\t\t\t\tcontent.thinkingSignature = proxyEvent.contentSignature;\n\t\t\t\treturn {\n\t\t\t\t\ttype: \"thinking_end\",\n\t\t\t\t\tcontentIndex: proxyEvent.contentIndex,\n\t\t\t\t\tcontent: content.thinking,\n\t\t\t\t\tpartial,\n\t\t\t\t};\n\t\t\t}\n\t\t\tthrow new Error(\"Received thinking_end for non-thinking content\");\n\t\t}\n\n\t\tcase \"toolcall_start\":\n\t\t\tpartial.content[proxyEvent.contentIndex] = {\n\t\t\t\ttype: \"toolCall\",\n\t\t\t\tid: proxyEvent.id,\n\t\t\t\tname: proxyEvent.toolName,\n\t\t\t\targuments: {},\n\t\t\t\tpartialJson: \"\",\n\t\t\t} satisfies ToolCall & { partialJson: string } as ToolCall;\n\t\t\treturn { type: \"toolcall_start\", contentIndex: proxyEvent.contentIndex, partial };\n\n\t\tcase \"toolcall_delta\": {\n\t\t\tconst content = partial.content[proxyEvent.contentIndex];\n\t\t\tif (content?.type === \"toolCall\") {\n\t\t\t\t(content as any).partialJson += proxyEvent.delta;\n\t\t\t\tcontent.arguments = parseStreamingJson((content as any).partialJson) || {};\n\t\t\t\tpartial.content[proxyEvent.contentIndex] = { ...content }; // Trigger reactivity\n\t\t\t\treturn {\n\t\t\t\t\ttype: \"toolcall_delta\",\n\t\t\t\t\tcontentIndex: proxyEvent.contentIndex,\n\t\t\t\t\tdelta: proxyEvent.delta,\n\t\t\t\t\tpartial,\n\t\t\t\t};\n\t\t\t}\n\t\t\tthrow new Error(\"Received toolcall_delta for non-toolCall content\");\n\t\t}\n\n\t\tcase \"toolcall_end\": {\n\t\t\tconst content = partial.content[proxyEvent.contentIndex];\n\t\t\tif (content?.type === \"toolCall\") {\n\t\t\t\tdelete (content as any).partialJson;\n\t\t\t\treturn {\n\t\t\t\t\ttype: \"toolcall_end\",\n\t\t\t\t\tcontentIndex: proxyEvent.contentIndex,\n\t\t\t\t\ttoolCall: content,\n\t\t\t\t\tpartial,\n\t\t\t\t};\n\t\t\t}\n\t\t\treturn undefined;\n\t\t}\n\n\t\tcase \"done\":\n\t\t\tpartial.stopReason = proxyEvent.reason;\n\t\t\tpartial.usage = proxyEvent.usage;\n\t\t\treturn { type: \"done\", reason: proxyEvent.reason, message: partial };\n\n\t\tcase \"error\":\n\t\t\tpartial.stopReason = proxyEvent.reason;\n\t\t\tpartial.errorMessage = proxyEvent.errorMessage;\n\t\t\tpartial.usage = proxyEvent.usage;\n\t\t\treturn { type: \"error\", reason: proxyEvent.reason, error: partial };\n\n\t\tdefault: {\n\t\t\tconst _exhaustiveCheck: never = proxyEvent;\n\t\t\tconsole.warn(`Unhandled proxy event type: ${(proxyEvent as any).type}`);\n\t\t\treturn undefined;\n\t\t}\n\t}\n}\n"]}
package/dist/types.d.ts CHANGED
@@ -1,29 +1,74 @@
1
- import type { AgentTool, AssistantMessage, AssistantMessageEvent, Message, Model, ToolResultMessage, UserMessage } from "@mariozechner/pi-ai";
1
+ import type { AssistantMessageEvent, ImageContent, Message, Model, SimpleStreamOptions, streamSimple, TextContent, Tool, ToolResultMessage } from "@mariozechner/pi-ai";
2
+ import type { Static, TSchema } from "@sinclair/typebox";
3
+ /** Stream function - can return sync or Promise for async config lookup */
4
+ export type StreamFn = (...args: Parameters<typeof streamSimple>) => ReturnType<typeof streamSimple> | Promise<ReturnType<typeof streamSimple>>;
2
5
  /**
3
- * Attachment type definition.
4
- * Processing is done by consumers (e.g., document extraction in web-ui).
6
+ * Configuration for the agent loop.
5
7
  */
6
- export interface Attachment {
7
- id: string;
8
- type: "image" | "document";
9
- fileName: string;
10
- mimeType: string;
11
- size: number;
12
- content: string;
13
- extractedText?: string;
14
- preview?: string;
8
+ export interface AgentLoopConfig extends SimpleStreamOptions {
9
+ model: Model<any>;
10
+ /**
11
+ * Converts AgentMessage[] to LLM-compatible Message[] before each LLM call.
12
+ *
13
+ * Each AgentMessage must be converted to a UserMessage, AssistantMessage, or ToolResultMessage
14
+ * that the LLM can understand. AgentMessages that cannot be converted (e.g., UI-only notifications,
15
+ * status messages) should be filtered out.
16
+ *
17
+ * @example
18
+ * ```typescript
19
+ * convertToLlm: (messages) => messages.flatMap(m => {
20
+ * if (m.role === "hookMessage") {
21
+ * // Convert custom message to user message
22
+ * return [{ role: "user", content: m.content, timestamp: m.timestamp }];
23
+ * }
24
+ * if (m.role === "notification") {
25
+ * // Filter out UI-only messages
26
+ * return [];
27
+ * }
28
+ * // Pass through standard LLM messages
29
+ * return [m];
30
+ * })
31
+ * ```
32
+ */
33
+ convertToLlm: (messages: AgentMessage[]) => Message[] | Promise<Message[]>;
34
+ /**
35
+ * Optional transform applied to the context before `convertToLlm`.
36
+ *
37
+ * Use this for operations that work at the AgentMessage level:
38
+ * - Context window management (pruning old messages)
39
+ * - Injecting context from external sources
40
+ *
41
+ * @example
42
+ * ```typescript
43
+ * transformContext: async (messages) => {
44
+ * if (estimateTokens(messages) > MAX_TOKENS) {
45
+ * return pruneOldMessages(messages);
46
+ * }
47
+ * return messages;
48
+ * }
49
+ * ```
50
+ */
51
+ transformContext?: (messages: AgentMessage[], signal?: AbortSignal) => Promise<AgentMessage[]>;
52
+ /**
53
+ * Resolves an API key dynamically for each LLM call.
54
+ *
55
+ * Useful for short-lived OAuth tokens (e.g., GitHub Copilot) that may expire
56
+ * during long-running tool execution phases.
57
+ */
58
+ getApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;
59
+ /**
60
+ * Returns queued messages to inject into the conversation.
61
+ *
62
+ * Called after each turn to check for user interruptions or injected messages.
63
+ * If messages are returned, they're added to the context before the next LLM call.
64
+ */
65
+ getQueuedMessages?: () => Promise<AgentMessage[]>;
15
66
  }
16
67
  /**
17
68
  * Thinking/reasoning level for models that support it.
18
69
  * Note: "xhigh" is only supported by OpenAI gpt-5.1-codex-max, gpt-5.2, and gpt-5.2-codex models.
19
70
  */
20
71
  export type ThinkingLevel = "off" | "minimal" | "low" | "medium" | "high" | "xhigh";
21
- /**
22
- * User message with optional attachments.
23
- */
24
- export type UserMessageWithAttachments = UserMessage & {
25
- attachments?: Attachment[];
26
- };
27
72
  /**
28
73
  * Extensible interface for custom app messages.
29
74
  * Apps can extend via declaration merging:
@@ -31,21 +76,21 @@ export type UserMessageWithAttachments = UserMessage & {
31
76
  * @example
32
77
  * ```typescript
33
78
  * declare module "@mariozechner/agent" {
34
- * interface CustomMessages {
79
+ * interface CustomAgentMessages {
35
80
  * artifact: ArtifactMessage;
36
81
  * notification: NotificationMessage;
37
82
  * }
38
83
  * }
39
84
  * ```
40
85
  */
41
- export interface CustomMessages {
86
+ export interface CustomAgentMessages {
42
87
  }
43
88
  /**
44
- * AppMessage: Union of LLM messages + attachments + custom messages.
89
+ * AgentMessage: Union of LLM messages + custom messages.
45
90
  * This abstraction allows apps to add custom message types while maintaining
46
91
  * type safety and compatibility with the base LLM messages.
47
92
  */
48
- export type AppMessage = AssistantMessage | UserMessageWithAttachments | Message | CustomMessages[keyof CustomMessages];
93
+ export type AgentMessage = Message | CustomAgentMessages[keyof CustomAgentMessages];
49
94
  /**
50
95
  * Agent state containing all configuration and conversation data.
51
96
  */
@@ -54,12 +99,26 @@ export interface AgentState {
54
99
  model: Model<any>;
55
100
  thinkingLevel: ThinkingLevel;
56
101
  tools: AgentTool<any>[];
57
- messages: AppMessage[];
102
+ messages: AgentMessage[];
58
103
  isStreaming: boolean;
59
- streamMessage: Message | null;
104
+ streamMessage: AgentMessage | null;
60
105
  pendingToolCalls: Set<string>;
61
106
  error?: string;
62
107
  }
108
+ export interface AgentToolResult<T> {
109
+ content: (TextContent | ImageContent)[];
110
+ details: T;
111
+ }
112
+ export type AgentToolUpdateCallback<T = any> = (partialResult: AgentToolResult<T>) => void;
113
+ export interface AgentTool<TParameters extends TSchema = TSchema, TDetails = any> extends Tool<TParameters> {
114
+ label: string;
115
+ execute: (toolCallId: string, params: Static<TParameters>, signal?: AbortSignal, onUpdate?: AgentToolUpdateCallback<TDetails>) => Promise<AgentToolResult<TDetails>>;
116
+ }
117
+ export interface AgentContext {
118
+ systemPrompt: string;
119
+ messages: AgentMessage[];
120
+ tools?: AgentTool<any>[];
121
+ }
63
122
  /**
64
123
  * Events emitted by the Agent for UI updates.
65
124
  * These events provide fine-grained lifecycle information for messages, turns, and tool executions.
@@ -68,23 +127,23 @@ export type AgentEvent = {
68
127
  type: "agent_start";
69
128
  } | {
70
129
  type: "agent_end";
71
- messages: AppMessage[];
130
+ messages: AgentMessage[];
72
131
  } | {
73
132
  type: "turn_start";
74
133
  } | {
75
134
  type: "turn_end";
76
- message: AppMessage;
135
+ message: AgentMessage;
77
136
  toolResults: ToolResultMessage[];
78
137
  } | {
79
138
  type: "message_start";
80
- message: AppMessage;
139
+ message: AgentMessage;
81
140
  } | {
82
141
  type: "message_update";
83
- message: AppMessage;
142
+ message: AgentMessage;
84
143
  assistantMessageEvent: AssistantMessageEvent;
85
144
  } | {
86
145
  type: "message_end";
87
- message: AppMessage;
146
+ message: AgentMessage;
88
147
  } | {
89
148
  type: "tool_execution_start";
90
149
  toolCallId: string;
@@ -1 +1 @@
1
- {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACX,SAAS,EACT,gBAAgB,EAChB,qBAAqB,EACrB,OAAO,EACP,KAAK,EACL,iBAAiB,EACjB,WAAW,EACX,MAAM,qBAAqB,CAAC;AAE7B;;;GAGG;AACH,MAAM,WAAW,UAAU;IAC1B,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,OAAO,GAAG,UAAU,CAAC;IAC3B,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,OAAO,CAAC,EAAE,MAAM,CAAC;CACjB;AAED;;;GAGG;AACH,MAAM,MAAM,aAAa,GAAG,KAAK,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;AAEpF;;GAEG;AACH,MAAM,MAAM,0BAA0B,GAAG,WAAW,GAAG;IAAE,WAAW,CAAC,EAAE,UAAU,EAAE,CAAA;CAAE,CAAC;AAEtF;;;;;;;;;;;;;GAaG;AACH,MAAM,WAAW,cAAc;CAE9B;AAED;;;;GAIG;AACH,MAAM,MAAM,UAAU,GACnB,gBAAgB,GAChB,0BAA0B,GAC1B,OAAO,GACP,cAAc,CAAC,MAAM,cAAc,CAAC,CAAC;AAExC;;GAEG;AACH,MAAM,WAAW,UAAU;IAC1B,YAAY,EAAE,MAAM,CAAC;IACrB,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC;IAClB,aAAa,EAAE,aAAa,CAAC;IAC7B,KAAK,EAAE,SAAS,CAAC,GAAG,CAAC,EAAE,CAAC;IACxB,QAAQ,EAAE,UAAU,EAAE,CAAC;IACvB,WAAW,EAAE,OAAO,CAAC;IACrB,aAAa,EAAE,OAAO,GAAG,IAAI,CAAC;IAC9B,gBAAgB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IAC9B,KAAK,CAAC,EAAE,MAAM,CAAC;CACf;AAED;;;GAGG;AACH,MAAM,MAAM,UAAU,GAEnB;IAAE,IAAI,EAAE,aAAa,CAAA;CAAE,GACvB;IAAE,IAAI,EAAE,WAAW,CAAC;IAAC,QAAQ,EAAE,UAAU,EAAE,CAAA;CAAE,GAE7C;IAAE,IAAI,EAAE,YAAY,CAAA;CAAE,GACtB;IAAE,IAAI,EAAE,UAAU,CAAC;IAAC,OAAO,EAAE,UAAU,CAAC;IAAC,WAAW,EAAE,iBAAiB,EAAE,CAAA;CAAE,GAE3E;IAAE,IAAI,EAAE,eAAe,CAAC;IAAC,OAAO,EAAE,UAAU,CAAA;CAAE,GAE9C;IAAE,IAAI,EAAE,gBAAgB,CAAC;IAAC,OAAO,EAAE,UAAU,CAAC;IAAC,qBAAqB,EAAE,qBAAqB,CAAA;CAAE,GAC7F;IAAE,IAAI,EAAE,aAAa,CAAC;IAAC,OAAO,EAAE,UAAU,CAAA;CAAE,GAE5C;IAAE,IAAI,EAAE,sBAAsB,CAAC;IAAC,UAAU,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,GAAG,CAAA;CAAE,GACjF;IAAE,IAAI,EAAE,uBAAuB,CAAC;IAAC,UAAU,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,GAAG,CAAC;IAAC,aAAa,EAAE,GAAG,CAAA;CAAE,GACtG;IAAE,IAAI,EAAE,oBAAoB,CAAC;IAAC,UAAU,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,GAAG,CAAC;IAAC,OAAO,EAAE,OAAO,CAAA;CAAE,CAAC","sourcesContent":["import type {\n\tAgentTool,\n\tAssistantMessage,\n\tAssistantMessageEvent,\n\tMessage,\n\tModel,\n\tToolResultMessage,\n\tUserMessage,\n} from \"@mariozechner/pi-ai\";\n\n/**\n * Attachment type definition.\n * Processing is done by consumers (e.g., document extraction in web-ui).\n */\nexport interface Attachment {\n\tid: string;\n\ttype: \"image\" | \"document\";\n\tfileName: string;\n\tmimeType: string;\n\tsize: number;\n\tcontent: string; // base64 encoded (without data URL prefix)\n\textractedText?: string; // For documents\n\tpreview?: string; // base64 image preview\n}\n\n/**\n * Thinking/reasoning level for models that support it.\n * Note: \"xhigh\" is only supported by OpenAI gpt-5.1-codex-max, gpt-5.2, and gpt-5.2-codex models.\n */\nexport type ThinkingLevel = \"off\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\n/**\n * User message with optional attachments.\n */\nexport type UserMessageWithAttachments = UserMessage & { attachments?: Attachment[] };\n\n/**\n * Extensible interface for custom app messages.\n * Apps can extend via declaration merging:\n *\n * @example\n * ```typescript\n * declare module \"@mariozechner/agent\" {\n * interface CustomMessages {\n * artifact: ArtifactMessage;\n * notification: NotificationMessage;\n * }\n * }\n * ```\n */\nexport interface CustomMessages {\n\t// Empty by default - apps extend via declaration merging\n}\n\n/**\n * AppMessage: Union of LLM messages + attachments + custom messages.\n * This abstraction allows apps to add custom message types while maintaining\n * type safety and compatibility with the base LLM messages.\n */\nexport type AppMessage =\n\t| AssistantMessage\n\t| UserMessageWithAttachments\n\t| Message // Includes ToolResultMessage\n\t| CustomMessages[keyof CustomMessages];\n\n/**\n * Agent state containing all configuration and conversation data.\n */\nexport interface AgentState {\n\tsystemPrompt: string;\n\tmodel: Model<any>;\n\tthinkingLevel: ThinkingLevel;\n\ttools: AgentTool<any>[];\n\tmessages: AppMessage[]; // Can include attachments + custom message types\n\tisStreaming: boolean;\n\tstreamMessage: Message | null;\n\tpendingToolCalls: Set<string>;\n\terror?: string;\n}\n\n/**\n * Events emitted by the Agent for UI updates.\n * These events provide fine-grained lifecycle information for messages, turns, and tool executions.\n */\nexport type AgentEvent =\n\t// Agent lifecycle\n\t| { type: \"agent_start\" }\n\t| { type: \"agent_end\"; messages: AppMessage[] }\n\t// Turn lifecycle - a turn is one assistant response + any tool calls/results\n\t| { type: \"turn_start\" }\n\t| { type: \"turn_end\"; message: AppMessage; toolResults: ToolResultMessage[] }\n\t// Message lifecycle - emitted for user, assistant, and toolResult messages\n\t| { type: \"message_start\"; message: AppMessage }\n\t// Only emitted for assistant messages during streaming\n\t| { type: \"message_update\"; message: AppMessage; assistantMessageEvent: AssistantMessageEvent }\n\t| { type: \"message_end\"; message: AppMessage }\n\t// Tool execution lifecycle\n\t| { type: \"tool_execution_start\"; toolCallId: string; toolName: string; args: any }\n\t| { type: \"tool_execution_update\"; toolCallId: string; toolName: string; args: any; partialResult: any }\n\t| { type: \"tool_execution_end\"; toolCallId: string; toolName: string; result: any; isError: boolean };\n"]}
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACX,qBAAqB,EACrB,YAAY,EACZ,OAAO,EACP,KAAK,EACL,mBAAmB,EACnB,YAAY,EACZ,WAAW,EACX,IAAI,EACJ,iBAAiB,EACjB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,KAAK,EAAE,MAAM,EAAE,OAAO,EAAE,MAAM,mBAAmB,CAAC;AAEzD,2EAA2E;AAC3E,MAAM,MAAM,QAAQ,GAAG,CACtB,GAAG,IAAI,EAAE,UAAU,CAAC,OAAO,YAAY,CAAC,KACpC,UAAU,CAAC,OAAO,YAAY,CAAC,GAAG,OAAO,CAAC,UAAU,CAAC,OAAO,YAAY,CAAC,CAAC,CAAC;AAEhF;;GAEG;AACH,MAAM,WAAW,eAAgB,SAAQ,mBAAmB;IAC3D,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC;IAElB;;;;;;;;;;;;;;;;;;;;;;OAsBG;IACH,YAAY,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,KAAK,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;IAE3E;;;;;;;;;;;;;;;;OAgBG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,EAAE,MAAM,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,YAAY,EAAE,CAAC,CAAC;IAE/F;;;;;OAKG;IACH,SAAS,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,GAAG,MAAM,GAAG,SAAS,CAAC;IAEnF;;;;;OAKG;IACH,iBAAiB,CAAC,EAAE,MAAM,OAAO,CAAC,YAAY,EAAE,CAAC,CAAC;CAClD;AAED;;;GAGG;AACH,MAAM,MAAM,aAAa,GAAG,KAAK,GAAG,SAAS,GAAG,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,OAAO,CAAC;AAEpF;;;;;;;;;;;;;GAaG;AACH,MAAM,WAAW,mBAAmB;CAEnC;AAED;;;;GAIG;AACH,MAAM,MAAM,YAAY,GAAG,OAAO,GAAG,mBAAmB,CAAC,MAAM,mBAAmB,CAAC,CAAC;AAEpF;;GAEG;AACH,MAAM,WAAW,UAAU;IAC1B,YAAY,EAAE,MAAM,CAAC;IACrB,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC;IAClB,aAAa,EAAE,aAAa,CAAC;IAC7B,KAAK,EAAE,SAAS,CAAC,GAAG,CAAC,EAAE,CAAC;IACxB,QAAQ,EAAE,YAAY,EAAE,CAAC;IACzB,WAAW,EAAE,OAAO,CAAC;IACrB,aAAa,EAAE,YAAY,GAAG,IAAI,CAAC;IACnC,gBAAgB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IAC9B,KAAK,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,eAAe,CAAC,CAAC;IAEjC,OAAO,EAAE,CAAC,WAAW,GAAG,YAAY,CAAC,EAAE,CAAC;IAExC,OAAO,EAAE,CAAC,CAAC;CACX;AAGD,MAAM,MAAM,uBAAuB,CAAC,CAAC,GAAG,GAAG,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;AAG3F,MAAM,WAAW,SAAS,CAAC,WAAW,SAAS,OAAO,GAAG,OAAO,EAAE,QAAQ,GAAG,GAAG,CAAE,SAAQ,IAAI,CAAC,WAAW,CAAC;IAE1G,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,CACR,UAAU,EAAE,MAAM,EAClB,MAAM,EAAE,MAAM,CAAC,WAAW,CAAC,EAC3B,MAAM,CAAC,EAAE,WAAW,EACpB,QAAQ,CAAC,EAAE,uBAAuB,CAAC,QAAQ,CAAC,KACxC,OAAO,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC;CACxC;AAGD,MAAM,WAAW,YAAY;IAC5B,YAAY,EAAE,MAAM,CAAC;IACrB,QAAQ,EAAE,YAAY,EAAE,CAAC;IACzB,KAAK,CAAC,EAAE,SAAS,CAAC,GAAG,CAAC,EAAE,CAAC;CACzB;AAED;;;GAGG;AACH,MAAM,MAAM,UAAU,GAEnB;IAAE,IAAI,EAAE,aAAa,CAAA;CAAE,GACvB;IAAE,IAAI,EAAE,WAAW,CAAC;IAAC,QAAQ,EAAE,YAAY,EAAE,CAAA;CAAE,GAE/C;IAAE,IAAI,EAAE,YAAY,CAAA;CAAE,GACtB;IAAE,IAAI,EAAE,UAAU,CAAC;IAAC,OAAO,EAAE,YAAY,CAAC;IAAC,WAAW,EAAE,iBAAiB,EAAE,CAAA;CAAE,GAE7E;IAAE,IAAI,EAAE,eAAe,CAAC;IAAC,OAAO,EAAE,YAAY,CAAA;CAAE,GAEhD;IAAE,IAAI,EAAE,gBAAgB,CAAC;IAAC,OAAO,EAAE,YAAY,CAAC;IAAC,qBAAqB,EAAE,qBAAqB,CAAA;CAAE,GAC/F;IAAE,IAAI,EAAE,aAAa,CAAC;IAAC,OAAO,EAAE,YAAY,CAAA;CAAE,GAE9C;IAAE,IAAI,EAAE,sBAAsB,CAAC;IAAC,UAAU,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,GAAG,CAAA;CAAE,GACjF;IAAE,IAAI,EAAE,uBAAuB,CAAC;IAAC,UAAU,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,GAAG,CAAC;IAAC,aAAa,EAAE,GAAG,CAAA;CAAE,GACtG;IAAE,IAAI,EAAE,oBAAoB,CAAC;IAAC,UAAU,EAAE,MAAM,CAAC;IAAC,QAAQ,EAAE,MAAM,CAAC;IAAC,MAAM,EAAE,GAAG,CAAC;IAAC,OAAO,EAAE,OAAO,CAAA;CAAE,CAAC","sourcesContent":["import type {\n\tAssistantMessageEvent,\n\tImageContent,\n\tMessage,\n\tModel,\n\tSimpleStreamOptions,\n\tstreamSimple,\n\tTextContent,\n\tTool,\n\tToolResultMessage,\n} from \"@mariozechner/pi-ai\";\nimport type { Static, TSchema } from \"@sinclair/typebox\";\n\n/** Stream function - can return sync or Promise for async config lookup */\nexport type StreamFn = (\n\t...args: Parameters<typeof streamSimple>\n) => ReturnType<typeof streamSimple> | Promise<ReturnType<typeof streamSimple>>;\n\n/**\n * Configuration for the agent loop.\n */\nexport interface AgentLoopConfig extends SimpleStreamOptions {\n\tmodel: Model<any>;\n\n\t/**\n\t * Converts AgentMessage[] to LLM-compatible Message[] before each LLM call.\n\t *\n\t * Each AgentMessage must be converted to a UserMessage, AssistantMessage, or ToolResultMessage\n\t * that the LLM can understand. AgentMessages that cannot be converted (e.g., UI-only notifications,\n\t * status messages) should be filtered out.\n\t *\n\t * @example\n\t * ```typescript\n\t * convertToLlm: (messages) => messages.flatMap(m => {\n\t * if (m.role === \"hookMessage\") {\n\t * // Convert custom message to user message\n\t * return [{ role: \"user\", content: m.content, timestamp: m.timestamp }];\n\t * }\n\t * if (m.role === \"notification\") {\n\t * // Filter out UI-only messages\n\t * return [];\n\t * }\n\t * // Pass through standard LLM messages\n\t * return [m];\n\t * })\n\t * ```\n\t */\n\tconvertToLlm: (messages: AgentMessage[]) => Message[] | Promise<Message[]>;\n\n\t/**\n\t * Optional transform applied to the context before `convertToLlm`.\n\t *\n\t * Use this for operations that work at the AgentMessage level:\n\t * - Context window management (pruning old messages)\n\t * - Injecting context from external sources\n\t *\n\t * @example\n\t * ```typescript\n\t * transformContext: async (messages) => {\n\t * if (estimateTokens(messages) > MAX_TOKENS) {\n\t * return pruneOldMessages(messages);\n\t * }\n\t * return messages;\n\t * }\n\t * ```\n\t */\n\ttransformContext?: (messages: AgentMessage[], signal?: AbortSignal) => Promise<AgentMessage[]>;\n\n\t/**\n\t * Resolves an API key dynamically for each LLM call.\n\t *\n\t * Useful for short-lived OAuth tokens (e.g., GitHub Copilot) that may expire\n\t * during long-running tool execution phases.\n\t */\n\tgetApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;\n\n\t/**\n\t * Returns queued messages to inject into the conversation.\n\t *\n\t * Called after each turn to check for user interruptions or injected messages.\n\t * If messages are returned, they're added to the context before the next LLM call.\n\t */\n\tgetQueuedMessages?: () => Promise<AgentMessage[]>;\n}\n\n/**\n * Thinking/reasoning level for models that support it.\n * Note: \"xhigh\" is only supported by OpenAI gpt-5.1-codex-max, gpt-5.2, and gpt-5.2-codex models.\n */\nexport type ThinkingLevel = \"off\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\n/**\n * Extensible interface for custom app messages.\n * Apps can extend via declaration merging:\n *\n * @example\n * ```typescript\n * declare module \"@mariozechner/agent\" {\n * interface CustomAgentMessages {\n * artifact: ArtifactMessage;\n * notification: NotificationMessage;\n * }\n * }\n * ```\n */\nexport interface CustomAgentMessages {\n\t// Empty by default - apps extend via declaration merging\n}\n\n/**\n * AgentMessage: Union of LLM messages + custom messages.\n * This abstraction allows apps to add custom message types while maintaining\n * type safety and compatibility with the base LLM messages.\n */\nexport type AgentMessage = Message | CustomAgentMessages[keyof CustomAgentMessages];\n\n/**\n * Agent state containing all configuration and conversation data.\n */\nexport interface AgentState {\n\tsystemPrompt: string;\n\tmodel: Model<any>;\n\tthinkingLevel: ThinkingLevel;\n\ttools: AgentTool<any>[];\n\tmessages: AgentMessage[]; // Can include attachments + custom message types\n\tisStreaming: boolean;\n\tstreamMessage: AgentMessage | null;\n\tpendingToolCalls: Set<string>;\n\terror?: string;\n}\n\nexport interface AgentToolResult<T> {\n\t// Content blocks supporting text and images\n\tcontent: (TextContent | ImageContent)[];\n\t// Details to be displayed in a UI or logged\n\tdetails: T;\n}\n\n// Callback for streaming tool execution updates\nexport type AgentToolUpdateCallback<T = any> = (partialResult: AgentToolResult<T>) => void;\n\n// AgentTool extends Tool but adds the execute function\nexport interface AgentTool<TParameters extends TSchema = TSchema, TDetails = any> extends Tool<TParameters> {\n\t// A human-readable label for the tool to be displayed in UI\n\tlabel: string;\n\texecute: (\n\t\ttoolCallId: string,\n\t\tparams: Static<TParameters>,\n\t\tsignal?: AbortSignal,\n\t\tonUpdate?: AgentToolUpdateCallback<TDetails>,\n\t) => Promise<AgentToolResult<TDetails>>;\n}\n\n// AgentContext is like Context but uses AgentTool\nexport interface AgentContext {\n\tsystemPrompt: string;\n\tmessages: AgentMessage[];\n\ttools?: AgentTool<any>[];\n}\n\n/**\n * Events emitted by the Agent for UI updates.\n * These events provide fine-grained lifecycle information for messages, turns, and tool executions.\n */\nexport type AgentEvent =\n\t// Agent lifecycle\n\t| { type: \"agent_start\" }\n\t| { type: \"agent_end\"; messages: AgentMessage[] }\n\t// Turn lifecycle - a turn is one assistant response + any tool calls/results\n\t| { type: \"turn_start\" }\n\t| { type: \"turn_end\"; message: AgentMessage; toolResults: ToolResultMessage[] }\n\t// Message lifecycle - emitted for user, assistant, and toolResult messages\n\t| { type: \"message_start\"; message: AgentMessage }\n\t// Only emitted for assistant messages during streaming\n\t| { type: \"message_update\"; message: AgentMessage; assistantMessageEvent: AssistantMessageEvent }\n\t| { type: \"message_end\"; message: AgentMessage }\n\t// Tool execution lifecycle\n\t| { type: \"tool_execution_start\"; toolCallId: string; toolName: string; args: any }\n\t| { type: \"tool_execution_update\"; toolCallId: string; toolName: string; args: any; partialResult: any }\n\t| { type: \"tool_execution_end\"; toolCallId: string; toolName: string; result: any; isError: boolean };\n"]}
package/dist/types.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"","sourcesContent":["import type {\n\tAgentTool,\n\tAssistantMessage,\n\tAssistantMessageEvent,\n\tMessage,\n\tModel,\n\tToolResultMessage,\n\tUserMessage,\n} from \"@mariozechner/pi-ai\";\n\n/**\n * Attachment type definition.\n * Processing is done by consumers (e.g., document extraction in web-ui).\n */\nexport interface Attachment {\n\tid: string;\n\ttype: \"image\" | \"document\";\n\tfileName: string;\n\tmimeType: string;\n\tsize: number;\n\tcontent: string; // base64 encoded (without data URL prefix)\n\textractedText?: string; // For documents\n\tpreview?: string; // base64 image preview\n}\n\n/**\n * Thinking/reasoning level for models that support it.\n * Note: \"xhigh\" is only supported by OpenAI gpt-5.1-codex-max, gpt-5.2, and gpt-5.2-codex models.\n */\nexport type ThinkingLevel = \"off\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\n/**\n * User message with optional attachments.\n */\nexport type UserMessageWithAttachments = UserMessage & { attachments?: Attachment[] };\n\n/**\n * Extensible interface for custom app messages.\n * Apps can extend via declaration merging:\n *\n * @example\n * ```typescript\n * declare module \"@mariozechner/agent\" {\n * interface CustomMessages {\n * artifact: ArtifactMessage;\n * notification: NotificationMessage;\n * }\n * }\n * ```\n */\nexport interface CustomMessages {\n\t// Empty by default - apps extend via declaration merging\n}\n\n/**\n * AppMessage: Union of LLM messages + attachments + custom messages.\n * This abstraction allows apps to add custom message types while maintaining\n * type safety and compatibility with the base LLM messages.\n */\nexport type AppMessage =\n\t| AssistantMessage\n\t| UserMessageWithAttachments\n\t| Message // Includes ToolResultMessage\n\t| CustomMessages[keyof CustomMessages];\n\n/**\n * Agent state containing all configuration and conversation data.\n */\nexport interface AgentState {\n\tsystemPrompt: string;\n\tmodel: Model<any>;\n\tthinkingLevel: ThinkingLevel;\n\ttools: AgentTool<any>[];\n\tmessages: AppMessage[]; // Can include attachments + custom message types\n\tisStreaming: boolean;\n\tstreamMessage: Message | null;\n\tpendingToolCalls: Set<string>;\n\terror?: string;\n}\n\n/**\n * Events emitted by the Agent for UI updates.\n * These events provide fine-grained lifecycle information for messages, turns, and tool executions.\n */\nexport type AgentEvent =\n\t// Agent lifecycle\n\t| { type: \"agent_start\" }\n\t| { type: \"agent_end\"; messages: AppMessage[] }\n\t// Turn lifecycle - a turn is one assistant response + any tool calls/results\n\t| { type: \"turn_start\" }\n\t| { type: \"turn_end\"; message: AppMessage; toolResults: ToolResultMessage[] }\n\t// Message lifecycle - emitted for user, assistant, and toolResult messages\n\t| { type: \"message_start\"; message: AppMessage }\n\t// Only emitted for assistant messages during streaming\n\t| { type: \"message_update\"; message: AppMessage; assistantMessageEvent: AssistantMessageEvent }\n\t| { type: \"message_end\"; message: AppMessage }\n\t// Tool execution lifecycle\n\t| { type: \"tool_execution_start\"; toolCallId: string; toolName: string; args: any }\n\t| { type: \"tool_execution_update\"; toolCallId: string; toolName: string; args: any; partialResult: any }\n\t| { type: \"tool_execution_end\"; toolCallId: string; toolName: string; result: any; isError: boolean };\n"]}
1
+ {"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"","sourcesContent":["import type {\n\tAssistantMessageEvent,\n\tImageContent,\n\tMessage,\n\tModel,\n\tSimpleStreamOptions,\n\tstreamSimple,\n\tTextContent,\n\tTool,\n\tToolResultMessage,\n} from \"@mariozechner/pi-ai\";\nimport type { Static, TSchema } from \"@sinclair/typebox\";\n\n/** Stream function - can return sync or Promise for async config lookup */\nexport type StreamFn = (\n\t...args: Parameters<typeof streamSimple>\n) => ReturnType<typeof streamSimple> | Promise<ReturnType<typeof streamSimple>>;\n\n/**\n * Configuration for the agent loop.\n */\nexport interface AgentLoopConfig extends SimpleStreamOptions {\n\tmodel: Model<any>;\n\n\t/**\n\t * Converts AgentMessage[] to LLM-compatible Message[] before each LLM call.\n\t *\n\t * Each AgentMessage must be converted to a UserMessage, AssistantMessage, or ToolResultMessage\n\t * that the LLM can understand. AgentMessages that cannot be converted (e.g., UI-only notifications,\n\t * status messages) should be filtered out.\n\t *\n\t * @example\n\t * ```typescript\n\t * convertToLlm: (messages) => messages.flatMap(m => {\n\t * if (m.role === \"hookMessage\") {\n\t * // Convert custom message to user message\n\t * return [{ role: \"user\", content: m.content, timestamp: m.timestamp }];\n\t * }\n\t * if (m.role === \"notification\") {\n\t * // Filter out UI-only messages\n\t * return [];\n\t * }\n\t * // Pass through standard LLM messages\n\t * return [m];\n\t * })\n\t * ```\n\t */\n\tconvertToLlm: (messages: AgentMessage[]) => Message[] | Promise<Message[]>;\n\n\t/**\n\t * Optional transform applied to the context before `convertToLlm`.\n\t *\n\t * Use this for operations that work at the AgentMessage level:\n\t * - Context window management (pruning old messages)\n\t * - Injecting context from external sources\n\t *\n\t * @example\n\t * ```typescript\n\t * transformContext: async (messages) => {\n\t * if (estimateTokens(messages) > MAX_TOKENS) {\n\t * return pruneOldMessages(messages);\n\t * }\n\t * return messages;\n\t * }\n\t * ```\n\t */\n\ttransformContext?: (messages: AgentMessage[], signal?: AbortSignal) => Promise<AgentMessage[]>;\n\n\t/**\n\t * Resolves an API key dynamically for each LLM call.\n\t *\n\t * Useful for short-lived OAuth tokens (e.g., GitHub Copilot) that may expire\n\t * during long-running tool execution phases.\n\t */\n\tgetApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;\n\n\t/**\n\t * Returns queued messages to inject into the conversation.\n\t *\n\t * Called after each turn to check for user interruptions or injected messages.\n\t * If messages are returned, they're added to the context before the next LLM call.\n\t */\n\tgetQueuedMessages?: () => Promise<AgentMessage[]>;\n}\n\n/**\n * Thinking/reasoning level for models that support it.\n * Note: \"xhigh\" is only supported by OpenAI gpt-5.1-codex-max, gpt-5.2, and gpt-5.2-codex models.\n */\nexport type ThinkingLevel = \"off\" | \"minimal\" | \"low\" | \"medium\" | \"high\" | \"xhigh\";\n\n/**\n * Extensible interface for custom app messages.\n * Apps can extend via declaration merging:\n *\n * @example\n * ```typescript\n * declare module \"@mariozechner/agent\" {\n * interface CustomAgentMessages {\n * artifact: ArtifactMessage;\n * notification: NotificationMessage;\n * }\n * }\n * ```\n */\nexport interface CustomAgentMessages {\n\t// Empty by default - apps extend via declaration merging\n}\n\n/**\n * AgentMessage: Union of LLM messages + custom messages.\n * This abstraction allows apps to add custom message types while maintaining\n * type safety and compatibility with the base LLM messages.\n */\nexport type AgentMessage = Message | CustomAgentMessages[keyof CustomAgentMessages];\n\n/**\n * Agent state containing all configuration and conversation data.\n */\nexport interface AgentState {\n\tsystemPrompt: string;\n\tmodel: Model<any>;\n\tthinkingLevel: ThinkingLevel;\n\ttools: AgentTool<any>[];\n\tmessages: AgentMessage[]; // Can include attachments + custom message types\n\tisStreaming: boolean;\n\tstreamMessage: AgentMessage | null;\n\tpendingToolCalls: Set<string>;\n\terror?: string;\n}\n\nexport interface AgentToolResult<T> {\n\t// Content blocks supporting text and images\n\tcontent: (TextContent | ImageContent)[];\n\t// Details to be displayed in a UI or logged\n\tdetails: T;\n}\n\n// Callback for streaming tool execution updates\nexport type AgentToolUpdateCallback<T = any> = (partialResult: AgentToolResult<T>) => void;\n\n// AgentTool extends Tool but adds the execute function\nexport interface AgentTool<TParameters extends TSchema = TSchema, TDetails = any> extends Tool<TParameters> {\n\t// A human-readable label for the tool to be displayed in UI\n\tlabel: string;\n\texecute: (\n\t\ttoolCallId: string,\n\t\tparams: Static<TParameters>,\n\t\tsignal?: AbortSignal,\n\t\tonUpdate?: AgentToolUpdateCallback<TDetails>,\n\t) => Promise<AgentToolResult<TDetails>>;\n}\n\n// AgentContext is like Context but uses AgentTool\nexport interface AgentContext {\n\tsystemPrompt: string;\n\tmessages: AgentMessage[];\n\ttools?: AgentTool<any>[];\n}\n\n/**\n * Events emitted by the Agent for UI updates.\n * These events provide fine-grained lifecycle information for messages, turns, and tool executions.\n */\nexport type AgentEvent =\n\t// Agent lifecycle\n\t| { type: \"agent_start\" }\n\t| { type: \"agent_end\"; messages: AgentMessage[] }\n\t// Turn lifecycle - a turn is one assistant response + any tool calls/results\n\t| { type: \"turn_start\" }\n\t| { type: \"turn_end\"; message: AgentMessage; toolResults: ToolResultMessage[] }\n\t// Message lifecycle - emitted for user, assistant, and toolResult messages\n\t| { type: \"message_start\"; message: AgentMessage }\n\t// Only emitted for assistant messages during streaming\n\t| { type: \"message_update\"; message: AgentMessage; assistantMessageEvent: AssistantMessageEvent }\n\t| { type: \"message_end\"; message: AgentMessage }\n\t// Tool execution lifecycle\n\t| { type: \"tool_execution_start\"; toolCallId: string; toolName: string; args: any }\n\t| { type: \"tool_execution_update\"; toolCallId: string; toolName: string; args: any; partialResult: any }\n\t| { type: \"tool_execution_end\"; toolCallId: string; toolName: string; result: any; isError: boolean };\n"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@mariozechner/pi-agent-core",
3
- "version": "0.30.2",
3
+ "version": "0.31.0",
4
4
  "description": "General-purpose agent with transport abstraction, state management, and attachment support",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -17,8 +17,8 @@
17
17
  "prepublishOnly": "npm run clean && npm run build"
18
18
  },
19
19
  "dependencies": {
20
- "@mariozechner/pi-ai": "^0.30.2",
21
- "@mariozechner/pi-tui": "^0.30.2"
20
+ "@mariozechner/pi-ai": "^0.31.0",
21
+ "@mariozechner/pi-tui": "^0.31.0"
22
22
  },
23
23
  "keywords": [
24
24
  "ai",
@@ -1,28 +0,0 @@
1
- import type { Message } from "@mariozechner/pi-ai";
2
- import type { AgentRunConfig, AgentTransport } from "./types.js";
3
- export interface AppTransportOptions {
4
- /**
5
- * Proxy server URL. The server manages user accounts and proxies requests to LLM providers.
6
- * Example: "https://genai.mariozechner.at"
7
- */
8
- proxyUrl: string;
9
- /**
10
- * Function to retrieve auth token for the proxy server.
11
- * The token is used for user authentication and authorization.
12
- */
13
- getAuthToken: () => Promise<string> | string;
14
- }
15
- /**
16
- * Transport that uses an app server with user authentication tokens.
17
- * The server manages user accounts and proxies requests to LLM providers.
18
- */
19
- export declare class AppTransport implements AgentTransport {
20
- private options;
21
- constructor(options: AppTransportOptions);
22
- private getStreamFn;
23
- private buildContext;
24
- private buildLoopConfig;
25
- run(messages: Message[], userMessage: Message, cfg: AgentRunConfig, signal?: AbortSignal): AsyncGenerator<import("@mariozechner/pi-ai").AgentEvent, void, unknown>;
26
- continue(messages: Message[], cfg: AgentRunConfig, signal?: AbortSignal): AsyncGenerator<import("@mariozechner/pi-ai").AgentEvent, void, unknown>;
27
- }
28
- //# sourceMappingURL=AppTransport.d.ts.map