@radaros/core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dist/index.d.ts +887 -0
  2. package/dist/index.js +3462 -0
  3. package/package.json +64 -0
  4. package/src/agent/agent.ts +314 -0
  5. package/src/agent/llm-loop.ts +263 -0
  6. package/src/agent/run-context.ts +35 -0
  7. package/src/agent/types.ts +77 -0
  8. package/src/events/event-bus.ts +45 -0
  9. package/src/events/types.ts +16 -0
  10. package/src/guardrails/types.ts +5 -0
  11. package/src/hooks/types.ts +6 -0
  12. package/src/index.ts +111 -0
  13. package/src/knowledge/knowledge-base.ts +146 -0
  14. package/src/logger/logger.ts +232 -0
  15. package/src/memory/memory.ts +87 -0
  16. package/src/memory/types.ts +13 -0
  17. package/src/models/provider.ts +22 -0
  18. package/src/models/providers/anthropic.ts +330 -0
  19. package/src/models/providers/google.ts +361 -0
  20. package/src/models/providers/ollama.ts +211 -0
  21. package/src/models/providers/openai.ts +323 -0
  22. package/src/models/registry.ts +90 -0
  23. package/src/models/types.ts +112 -0
  24. package/src/session/session-manager.ts +75 -0
  25. package/src/session/types.ts +10 -0
  26. package/src/storage/driver.ts +10 -0
  27. package/src/storage/in-memory.ts +44 -0
  28. package/src/storage/mongodb.ts +70 -0
  29. package/src/storage/postgres.ts +81 -0
  30. package/src/storage/sqlite.ts +81 -0
  31. package/src/team/modes.ts +1 -0
  32. package/src/team/team.ts +323 -0
  33. package/src/team/types.ts +26 -0
  34. package/src/tools/define-tool.ts +20 -0
  35. package/src/tools/tool-executor.ts +131 -0
  36. package/src/tools/types.ts +27 -0
  37. package/src/vector/base.ts +44 -0
  38. package/src/vector/embeddings/google.ts +64 -0
  39. package/src/vector/embeddings/openai.ts +66 -0
  40. package/src/vector/in-memory.ts +115 -0
  41. package/src/vector/mongodb.ts +241 -0
  42. package/src/vector/pgvector.ts +169 -0
  43. package/src/vector/qdrant.ts +203 -0
  44. package/src/vector/types.ts +55 -0
  45. package/src/workflow/step-runner.ts +303 -0
  46. package/src/workflow/types.ts +55 -0
  47. package/src/workflow/workflow.ts +68 -0
  48. package/tsconfig.json +8 -0
package/dist/index.js ADDED
@@ -0,0 +1,3462 @@
1
+ // src/agent/agent.ts
2
+ import { v4 as uuidv42 } from "uuid";
3
+
4
+ // src/events/event-bus.ts
5
+ import { EventEmitter } from "events";
6
+ var EventBus = class {
7
+ emitter = new EventEmitter();
8
+ constructor() {
9
+ this.emitter.setMaxListeners(100);
10
+ }
11
+ on(event, handler) {
12
+ this.emitter.on(event, handler);
13
+ return this;
14
+ }
15
+ once(event, handler) {
16
+ this.emitter.once(event, handler);
17
+ return this;
18
+ }
19
+ off(event, handler) {
20
+ this.emitter.off(event, handler);
21
+ return this;
22
+ }
23
+ emit(event, data) {
24
+ return this.emitter.emit(event, data);
25
+ }
26
+ removeAllListeners(event) {
27
+ this.emitter.removeAllListeners(event);
28
+ return this;
29
+ }
30
+ };
31
+
32
+ // src/storage/in-memory.ts
33
+ var InMemoryStorage = class {
34
+ store = /* @__PURE__ */ new Map();
35
+ makeKey(namespace, key) {
36
+ return `${namespace}:${key}`;
37
+ }
38
+ async get(namespace, key) {
39
+ const raw = this.store.get(this.makeKey(namespace, key));
40
+ if (raw === void 0) return null;
41
+ return JSON.parse(raw);
42
+ }
43
+ async set(namespace, key, value) {
44
+ this.store.set(this.makeKey(namespace, key), JSON.stringify(value));
45
+ }
46
+ async delete(namespace, key) {
47
+ this.store.delete(this.makeKey(namespace, key));
48
+ }
49
+ async list(namespace, prefix) {
50
+ const nsPrefix = prefix ? `${namespace}:${prefix}` : `${namespace}:`;
51
+ const results = [];
52
+ for (const [fullKey, raw] of this.store.entries()) {
53
+ if (fullKey.startsWith(nsPrefix)) {
54
+ const key = fullKey.slice(namespace.length + 1);
55
+ results.push({ key, value: JSON.parse(raw) });
56
+ }
57
+ }
58
+ return results;
59
+ }
60
+ async close() {
61
+ this.store.clear();
62
+ }
63
+ };
64
+
65
+ // src/session/session-manager.ts
66
+ var NAMESPACE = "sessions";
67
+ var SessionManager = class {
68
+ constructor(storage) {
69
+ this.storage = storage;
70
+ }
71
+ async getOrCreate(sessionId, userId) {
72
+ const existing = await this.storage.get(NAMESPACE, sessionId);
73
+ if (existing) return existing;
74
+ const session = {
75
+ sessionId,
76
+ userId,
77
+ messages: [],
78
+ state: {},
79
+ createdAt: /* @__PURE__ */ new Date(),
80
+ updatedAt: /* @__PURE__ */ new Date()
81
+ };
82
+ await this.storage.set(NAMESPACE, sessionId, session);
83
+ return session;
84
+ }
85
+ async appendMessage(sessionId, msg) {
86
+ const session = await this.getOrCreate(sessionId);
87
+ session.messages.push(msg);
88
+ session.updatedAt = /* @__PURE__ */ new Date();
89
+ await this.storage.set(NAMESPACE, sessionId, session);
90
+ }
91
+ async appendMessages(sessionId, msgs) {
92
+ const session = await this.getOrCreate(sessionId);
93
+ session.messages.push(...msgs);
94
+ session.updatedAt = /* @__PURE__ */ new Date();
95
+ await this.storage.set(NAMESPACE, sessionId, session);
96
+ }
97
+ async getHistory(sessionId, limit) {
98
+ const session = await this.storage.get(NAMESPACE, sessionId);
99
+ if (!session) return [];
100
+ if (limit && limit > 0) {
101
+ return session.messages.slice(-limit);
102
+ }
103
+ return session.messages;
104
+ }
105
+ async updateState(sessionId, patch) {
106
+ const session = await this.getOrCreate(sessionId);
107
+ Object.assign(session.state, patch);
108
+ session.updatedAt = /* @__PURE__ */ new Date();
109
+ await this.storage.set(NAMESPACE, sessionId, session);
110
+ }
111
+ async getState(sessionId) {
112
+ const session = await this.storage.get(NAMESPACE, sessionId);
113
+ return session?.state ?? {};
114
+ }
115
+ async deleteSession(sessionId) {
116
+ await this.storage.delete(NAMESPACE, sessionId);
117
+ }
118
+ };
119
+
120
+ // src/tools/tool-executor.ts
121
+ import { createRequire } from "module";
122
+ var _require = createRequire(import.meta.url);
123
+ var ToolExecutor = class {
124
+ tools;
125
+ concurrency;
126
+ constructor(tools, concurrency = 5) {
127
+ this.tools = new Map(tools.map((t) => [t.name, t]));
128
+ this.concurrency = concurrency;
129
+ }
130
+ async executeAll(toolCalls, ctx) {
131
+ const results = [];
132
+ for (let i = 0; i < toolCalls.length; i += this.concurrency) {
133
+ const batch = toolCalls.slice(i, i + this.concurrency);
134
+ const batchResults = await Promise.allSettled(
135
+ batch.map((tc) => this.executeSingle(tc, ctx))
136
+ );
137
+ for (let j = 0; j < batchResults.length; j++) {
138
+ const settled = batchResults[j];
139
+ const tc = batch[j];
140
+ if (settled.status === "fulfilled") {
141
+ results.push(settled.value);
142
+ } else {
143
+ results.push({
144
+ toolCallId: tc.id,
145
+ toolName: tc.name,
146
+ result: `Error: ${settled.reason?.message ?? "Unknown error"}`,
147
+ error: settled.reason?.message ?? "Unknown error"
148
+ });
149
+ }
150
+ }
151
+ }
152
+ return results;
153
+ }
154
+ async executeSingle(toolCall, ctx) {
155
+ const tool = this.tools.get(toolCall.name);
156
+ if (!tool) {
157
+ return {
158
+ toolCallId: toolCall.id,
159
+ toolName: toolCall.name,
160
+ result: `Error: Tool "${toolCall.name}" not found`,
161
+ error: `Tool "${toolCall.name}" not found`
162
+ };
163
+ }
164
+ ctx.eventBus.emit("tool.call", {
165
+ runId: ctx.runId,
166
+ toolName: toolCall.name,
167
+ args: toolCall.arguments
168
+ });
169
+ const parsed = tool.parameters.safeParse(toolCall.arguments);
170
+ if (!parsed.success) {
171
+ const errMsg = `Invalid arguments: ${parsed.error.message}`;
172
+ const result = {
173
+ toolCallId: toolCall.id,
174
+ toolName: toolCall.name,
175
+ result: errMsg,
176
+ error: errMsg
177
+ };
178
+ ctx.eventBus.emit("tool.result", {
179
+ runId: ctx.runId,
180
+ toolName: toolCall.name,
181
+ result: errMsg
182
+ });
183
+ return result;
184
+ }
185
+ const rawResult = await tool.execute(parsed.data, ctx);
186
+ const resultContent = typeof rawResult === "string" ? rawResult : rawResult.content;
187
+ ctx.eventBus.emit("tool.result", {
188
+ runId: ctx.runId,
189
+ toolName: toolCall.name,
190
+ result: resultContent
191
+ });
192
+ return {
193
+ toolCallId: toolCall.id,
194
+ toolName: toolCall.name,
195
+ result: rawResult
196
+ };
197
+ }
198
+ getToolDefinitions() {
199
+ const { zodToJsonSchema } = _require("zod-to-json-schema");
200
+ const defs = [];
201
+ for (const tool of this.tools.values()) {
202
+ const jsonSchema = zodToJsonSchema(tool.parameters, {
203
+ target: "openApi3"
204
+ });
205
+ defs.push({
206
+ name: tool.name,
207
+ description: tool.description,
208
+ parameters: jsonSchema
209
+ });
210
+ }
211
+ return defs;
212
+ }
213
+ };
214
+
215
+ // src/logger/logger.ts
216
+ var LEVEL_ORDER = {
217
+ debug: 0,
218
+ info: 1,
219
+ warn: 2,
220
+ error: 3,
221
+ silent: 4
222
+ };
223
+ var C = {
224
+ reset: "\x1B[0m",
225
+ bold: "\x1B[1m",
226
+ dim: "\x1B[2m",
227
+ italic: "\x1B[3m",
228
+ black: "\x1B[30m",
229
+ red: "\x1B[31m",
230
+ green: "\x1B[32m",
231
+ yellow: "\x1B[33m",
232
+ blue: "\x1B[34m",
233
+ magenta: "\x1B[35m",
234
+ cyan: "\x1B[36m",
235
+ white: "\x1B[37m",
236
+ bgBlack: "\x1B[40m",
237
+ bgRed: "\x1B[41m",
238
+ bgGreen: "\x1B[42m",
239
+ bgYellow: "\x1B[43m",
240
+ bgBlue: "\x1B[44m",
241
+ bgMagenta: "\x1B[45m",
242
+ bgCyan: "\x1B[46m",
243
+ gray: "\x1B[90m",
244
+ brightGreen: "\x1B[92m",
245
+ brightYellow: "\x1B[93m",
246
+ brightBlue: "\x1B[94m",
247
+ brightMagenta: "\x1B[95m",
248
+ brightCyan: "\x1B[96m"
249
+ };
250
+ function noColor(str) {
251
+ return str.replace(/\x1b\[[0-9;]*m/g, "");
252
+ }
253
+ var Logger = class {
254
+ level;
255
+ color;
256
+ prefix;
257
+ constructor(config = {}) {
258
+ this.level = config.level ?? "info";
259
+ this.color = config.color ?? process.stdout.isTTY !== false;
260
+ this.prefix = config.prefix ?? "radaros";
261
+ }
262
+ c(code, text) {
263
+ return this.color ? `${code}${text}${C.reset}` : text;
264
+ }
265
+ shouldLog(level) {
266
+ return LEVEL_ORDER[level] >= LEVEL_ORDER[this.level];
267
+ }
268
+ tag(level) {
269
+ switch (level) {
270
+ case "debug":
271
+ return this.c(C.gray, "DBG");
272
+ case "info":
273
+ return this.c(C.brightCyan, "INF");
274
+ case "warn":
275
+ return this.c(C.brightYellow, "WRN");
276
+ case "error":
277
+ return this.c(C.red, "ERR");
278
+ default:
279
+ return "";
280
+ }
281
+ }
282
+ timestamp() {
283
+ const now = /* @__PURE__ */ new Date();
284
+ const ts = now.toISOString().slice(11, 23);
285
+ return this.c(C.dim, ts);
286
+ }
287
+ log(level, msg, data) {
288
+ if (!this.shouldLog(level)) return;
289
+ const parts = [
290
+ this.timestamp(),
291
+ this.tag(level),
292
+ this.c(C.dim, `[${this.prefix}]`),
293
+ msg
294
+ ];
295
+ if (data && Object.keys(data).length > 0) {
296
+ const formatted = Object.entries(data).map(([k, v]) => `${this.c(C.dim, k + "=")}${this.formatValue(v)}`).join(" ");
297
+ parts.push(formatted);
298
+ }
299
+ console.log(parts.join(" "));
300
+ }
301
+ formatValue(v) {
302
+ if (typeof v === "number") return this.c(C.brightGreen, String(v));
303
+ if (typeof v === "string") return this.c(C.yellow, `"${v}"`);
304
+ if (typeof v === "boolean") return this.c(C.magenta, String(v));
305
+ return String(v);
306
+ }
307
+ debug(msg, data) {
308
+ this.log("debug", msg, data);
309
+ }
310
+ info(msg, data) {
311
+ this.log("info", msg, data);
312
+ }
313
+ warn(msg, data) {
314
+ this.log("warn", msg, data);
315
+ }
316
+ error(msg, data) {
317
+ this.log("error", msg, data);
318
+ }
319
+ // ── Formatted agent output helpers ────────────────────────────────────
320
+ boxWidth = 80;
321
+ pipe() {
322
+ return this.c(C.brightCyan, "\u2502");
323
+ }
324
+ printBoxLine(label, value, labelColor = C.dim, valueColor = C.white) {
325
+ const lines = value.split("\n");
326
+ const prefix = `${this.pipe()} ${this.c(labelColor, label)}`;
327
+ console.log(`${prefix}${this.c(valueColor, lines[0])}`);
328
+ const pad = " ".repeat(noColor(label).length);
329
+ for (let i = 1; i < lines.length; i++) {
330
+ console.log(`${this.pipe()} ${pad}${this.c(valueColor, lines[i])}`);
331
+ }
332
+ }
333
+ agentStart(agentName, input) {
334
+ if (!this.shouldLog("info")) return;
335
+ const title = ` Agent: ${agentName} `;
336
+ const lineLen = Math.max(0, this.boxWidth - title.length - 2);
337
+ console.log("");
338
+ console.log(
339
+ this.c(C.bold + C.brightCyan, "\u250C\u2500") + this.c(C.bold + C.brightCyan, title) + this.c(C.dim, "\u2500".repeat(lineLen))
340
+ );
341
+ this.printBoxLine("Input: ", input);
342
+ console.log(this.pipe());
343
+ }
344
+ toolCall(toolName, args) {
345
+ if (!this.shouldLog("debug")) return;
346
+ const argsStr = JSON.stringify(args, null, 2);
347
+ console.log(
348
+ `${this.pipe()} ${this.c(C.brightMagenta, "\u26A1")} ${this.c(C.magenta, toolName)}`
349
+ );
350
+ if (argsStr !== "{}" && argsStr !== "[]") {
351
+ const truncated = argsStr.length > 200 ? argsStr.slice(0, 200) + "\u2026" : argsStr;
352
+ for (const line of truncated.split("\n")) {
353
+ console.log(`${this.pipe()} ${this.c(C.dim, line)}`);
354
+ }
355
+ }
356
+ }
357
+ toolResult(toolName, result) {
358
+ if (!this.shouldLog("debug")) return;
359
+ const truncated = result.length > 300 ? result.slice(0, 300) + "\u2026" : result;
360
+ console.log(
361
+ `${this.pipe()} ${this.c(C.green, "\u2713")} ${this.c(C.dim, toolName + " \u2192")}`
362
+ );
363
+ for (const line of truncated.split("\n")) {
364
+ console.log(`${this.pipe()} ${this.c(C.gray, line)}`);
365
+ }
366
+ console.log(this.pipe());
367
+ }
368
+ agentEnd(agentName, output, usage, durationMs) {
369
+ if (!this.shouldLog("info")) return;
370
+ console.log(this.pipe());
371
+ this.printBoxLine("Output: ", output);
372
+ console.log(this.pipe());
373
+ const tokensLine = this.c(C.dim, "Tokens: ") + this.c(C.brightGreen, `\u2191 ${usage.promptTokens}`) + this.c(C.dim, " ") + this.c(C.brightGreen, `\u2193 ${usage.completionTokens}`) + this.c(C.dim, " ") + this.c(C.bold + C.brightGreen, `\u03A3 ${usage.totalTokens}`);
374
+ const duration = this.c(C.dim, "Duration: ") + this.c(C.yellow, this.formatDuration(durationMs));
375
+ console.log(`${this.pipe()} ${tokensLine}`);
376
+ console.log(`${this.pipe()} ${duration}`);
377
+ console.log(
378
+ this.c(C.bold + C.brightCyan, "\u2514") + this.c(C.dim, "\u2500".repeat(this.boxWidth - 1))
379
+ );
380
+ }
381
+ separator() {
382
+ if (!this.shouldLog("info")) return;
383
+ console.log(this.c(C.dim, "\u2500".repeat(this.boxWidth)));
384
+ }
385
+ formatDuration(ms) {
386
+ if (ms < 1e3) return `${ms}ms`;
387
+ const secs = (ms / 1e3).toFixed(1);
388
+ return `${secs}s`;
389
+ }
390
+ };
391
+
392
+ // src/agent/llm-loop.ts
393
+ import { createRequire as createRequire2 } from "module";
394
+
395
+ // src/models/types.ts
396
+ function getTextContent(content) {
397
+ if (content === null) return "";
398
+ if (typeof content === "string") return content;
399
+ return content.filter((p) => p.type === "text").map((p) => p.text).join("");
400
+ }
401
+ function isMultiModal(content) {
402
+ return Array.isArray(content);
403
+ }
404
+
405
+ // src/agent/llm-loop.ts
406
+ var _require2 = createRequire2(import.meta.url);
407
+ var LLMLoop = class {
408
+ provider;
409
+ toolExecutor;
410
+ maxToolRoundtrips;
411
+ temperature;
412
+ maxTokens;
413
+ structuredOutput;
414
+ logger;
415
+ constructor(provider, toolExecutor, options) {
416
+ this.provider = provider;
417
+ this.toolExecutor = toolExecutor;
418
+ this.maxToolRoundtrips = options.maxToolRoundtrips;
419
+ this.temperature = options.temperature;
420
+ this.maxTokens = options.maxTokens;
421
+ this.structuredOutput = options.structuredOutput;
422
+ this.logger = options.logger;
423
+ }
424
+ async run(messages, ctx, apiKey) {
425
+ const allToolCalls = [];
426
+ let totalPromptTokens = 0;
427
+ let totalCompletionTokens = 0;
428
+ const currentMessages = [...messages];
429
+ const toolDefs = this.toolExecutor?.getToolDefinitions() ?? [];
430
+ for (let roundtrip = 0; roundtrip <= this.maxToolRoundtrips; roundtrip++) {
431
+ const modelConfig = {};
432
+ if (apiKey) modelConfig.apiKey = apiKey;
433
+ if (this.temperature !== void 0)
434
+ modelConfig.temperature = this.temperature;
435
+ if (this.maxTokens !== void 0) modelConfig.maxTokens = this.maxTokens;
436
+ if (toolDefs.length > 0) modelConfig.tools = toolDefs;
437
+ if (this.structuredOutput) {
438
+ modelConfig.responseFormat = {
439
+ type: "json_schema",
440
+ schema: this.zodToJsonSchema(this.structuredOutput),
441
+ name: "structured_response"
442
+ };
443
+ }
444
+ const response = await this.provider.generate(
445
+ currentMessages,
446
+ modelConfig
447
+ );
448
+ totalPromptTokens += response.usage.promptTokens;
449
+ totalCompletionTokens += response.usage.completionTokens;
450
+ currentMessages.push(response.message);
451
+ if (response.finishReason !== "tool_calls" || !response.message.toolCalls?.length || !this.toolExecutor) {
452
+ const text2 = getTextContent(response.message.content);
453
+ const output = {
454
+ text: text2,
455
+ toolCalls: allToolCalls,
456
+ usage: {
457
+ promptTokens: totalPromptTokens,
458
+ completionTokens: totalCompletionTokens,
459
+ totalTokens: totalPromptTokens + totalCompletionTokens
460
+ }
461
+ };
462
+ if (this.structuredOutput && text2) {
463
+ try {
464
+ const jsonStr = this.extractJson(text2);
465
+ const parsed = JSON.parse(jsonStr);
466
+ output.structured = this.structuredOutput.parse(parsed);
467
+ } catch {
468
+ }
469
+ }
470
+ return output;
471
+ }
472
+ const toolResults = await this.toolExecutor.executeAll(
473
+ response.message.toolCalls,
474
+ ctx
475
+ );
476
+ allToolCalls.push(...toolResults);
477
+ for (const result of toolResults) {
478
+ const content = typeof result.result === "string" ? result.result : result.result.content;
479
+ this.logger?.toolCall(result.toolName, {});
480
+ this.logger?.toolResult(result.toolName, typeof content === "string" ? content : JSON.stringify(content));
481
+ currentMessages.push({
482
+ role: "tool",
483
+ content,
484
+ toolCallId: result.toolCallId,
485
+ name: result.toolName
486
+ });
487
+ }
488
+ }
489
+ const lastAssistantMsg = currentMessages.reverse().find((m) => m.role === "assistant");
490
+ const text = getTextContent(lastAssistantMsg?.content ?? null);
491
+ return {
492
+ text,
493
+ toolCalls: allToolCalls,
494
+ usage: {
495
+ promptTokens: totalPromptTokens,
496
+ completionTokens: totalCompletionTokens,
497
+ totalTokens: totalPromptTokens + totalCompletionTokens
498
+ }
499
+ };
500
+ }
501
+ async *stream(messages, ctx, apiKey) {
502
+ const currentMessages = [...messages];
503
+ const toolDefs = this.toolExecutor?.getToolDefinitions() ?? [];
504
+ for (let roundtrip = 0; roundtrip <= this.maxToolRoundtrips; roundtrip++) {
505
+ const modelConfig = {};
506
+ if (apiKey) modelConfig.apiKey = apiKey;
507
+ if (this.temperature !== void 0)
508
+ modelConfig.temperature = this.temperature;
509
+ if (this.maxTokens !== void 0) modelConfig.maxTokens = this.maxTokens;
510
+ if (toolDefs.length > 0) modelConfig.tools = toolDefs;
511
+ let fullText = "";
512
+ const pendingToolCalls = [];
513
+ let finishReason = "stop";
514
+ const streamGen = this.provider.stream(currentMessages, modelConfig);
515
+ for await (const chunk of streamGen) {
516
+ yield chunk;
517
+ if (chunk.type === "text") {
518
+ fullText += chunk.text;
519
+ ctx.eventBus.emit("run.stream.chunk", {
520
+ runId: ctx.runId,
521
+ chunk: chunk.text
522
+ });
523
+ } else if (chunk.type === "tool_call_start") {
524
+ pendingToolCalls.push({
525
+ id: chunk.toolCall.id,
526
+ name: chunk.toolCall.name,
527
+ args: ""
528
+ });
529
+ } else if (chunk.type === "tool_call_delta") {
530
+ const tc = pendingToolCalls.find(
531
+ (t) => t.id === chunk.toolCallId
532
+ );
533
+ if (tc) {
534
+ tc.args += chunk.argumentsDelta;
535
+ }
536
+ } else if (chunk.type === "finish") {
537
+ finishReason = chunk.finishReason;
538
+ }
539
+ }
540
+ if (finishReason !== "tool_calls" || pendingToolCalls.length === 0 || !this.toolExecutor) {
541
+ return;
542
+ }
543
+ const assistantMsg = {
544
+ role: "assistant",
545
+ content: fullText || null,
546
+ toolCalls: pendingToolCalls.map((tc) => ({
547
+ id: tc.id,
548
+ name: tc.name,
549
+ arguments: JSON.parse(tc.args || "{}")
550
+ }))
551
+ };
552
+ currentMessages.push(assistantMsg);
553
+ const toolResults = await this.toolExecutor.executeAll(
554
+ assistantMsg.toolCalls,
555
+ ctx
556
+ );
557
+ for (const result of toolResults) {
558
+ const content = typeof result.result === "string" ? result.result : result.result.content;
559
+ currentMessages.push({
560
+ role: "tool",
561
+ content,
562
+ toolCallId: result.toolCallId,
563
+ name: result.toolName
564
+ });
565
+ }
566
+ }
567
+ }
568
+ extractJson(text) {
569
+ const fenceMatch = text.match(/```(?:json)?\s*\n?([\s\S]*?)```/);
570
+ if (fenceMatch) return fenceMatch[1].trim();
571
+ const braceStart = text.indexOf("{");
572
+ const braceEnd = text.lastIndexOf("}");
573
+ if (braceStart !== -1 && braceEnd > braceStart) {
574
+ return text.slice(braceStart, braceEnd + 1);
575
+ }
576
+ return text.trim();
577
+ }
578
+ zodToJsonSchema(schema) {
579
+ try {
580
+ const { zodToJsonSchema } = _require2("zod-to-json-schema");
581
+ return zodToJsonSchema(schema, { target: "openApi3" });
582
+ } catch {
583
+ return {};
584
+ }
585
+ }
586
+ };
587
+
588
+ // src/agent/run-context.ts
589
+ import { v4 as uuidv4 } from "uuid";
590
+ var RunContext = class {
591
+ runId;
592
+ sessionId;
593
+ userId;
594
+ metadata;
595
+ eventBus;
596
+ sessionState;
597
+ constructor(opts) {
598
+ this.runId = opts.runId ?? uuidv4();
599
+ this.sessionId = opts.sessionId;
600
+ this.userId = opts.userId;
601
+ this.metadata = opts.metadata ?? {};
602
+ this.eventBus = opts.eventBus;
603
+ this.sessionState = opts.sessionState ?? {};
604
+ }
605
+ getState(key) {
606
+ return this.sessionState[key];
607
+ }
608
+ setState(key, value) {
609
+ this.sessionState[key] = value;
610
+ }
611
+ };
612
+
613
+ // src/agent/agent.ts
614
+ var Agent = class {
615
+ name;
616
+ eventBus;
617
+ instructions;
618
+ config;
619
+ sessionManager;
620
+ llmLoop;
621
+ logger;
622
+ get tools() {
623
+ return this.config.tools ?? [];
624
+ }
625
+ get modelId() {
626
+ return this.config.model.modelId;
627
+ }
628
+ get providerId() {
629
+ return this.config.model.providerId;
630
+ }
631
+ get hasStructuredOutput() {
632
+ return !!this.config.structuredOutput;
633
+ }
634
+ constructor(config) {
635
+ this.config = config;
636
+ this.name = config.name;
637
+ this.instructions = config.instructions;
638
+ this.eventBus = config.eventBus ?? new EventBus();
639
+ const storage = config.storage ?? new InMemoryStorage();
640
+ this.sessionManager = new SessionManager(storage);
641
+ this.logger = new Logger({
642
+ level: config.logLevel ?? "silent",
643
+ prefix: config.name
644
+ });
645
+ const toolExecutor = config.tools && config.tools.length > 0 ? new ToolExecutor(config.tools) : null;
646
+ this.llmLoop = new LLMLoop(config.model, toolExecutor, {
647
+ maxToolRoundtrips: config.maxToolRoundtrips ?? 10,
648
+ temperature: config.temperature,
649
+ structuredOutput: config.structuredOutput,
650
+ logger: this.logger
651
+ });
652
+ }
653
+ async run(input, opts) {
654
+ const startTime = Date.now();
655
+ const sessionId = opts?.sessionId ?? this.config.sessionId ?? uuidv42();
656
+ const userId = opts?.userId ?? this.config.userId;
657
+ const inputText = typeof input === "string" ? input : getTextContent(input);
658
+ const session = await this.sessionManager.getOrCreate(sessionId, userId);
659
+ const ctx = new RunContext({
660
+ sessionId,
661
+ userId,
662
+ metadata: opts?.metadata ?? {},
663
+ eventBus: this.eventBus,
664
+ sessionState: { ...session.state }
665
+ });
666
+ this.logger.agentStart(this.name, inputText);
667
+ this.eventBus.emit("run.start", {
668
+ runId: ctx.runId,
669
+ agentName: this.name,
670
+ input: inputText
671
+ });
672
+ try {
673
+ if (this.config.hooks?.beforeRun) {
674
+ await this.config.hooks.beforeRun(ctx);
675
+ }
676
+ if (this.config.guardrails?.input) {
677
+ for (const guardrail of this.config.guardrails.input) {
678
+ const result = await guardrail.validate(input, ctx);
679
+ if (!result.pass) {
680
+ throw new Error(
681
+ `Input guardrail "${guardrail.name}" blocked: ${result.reason}`
682
+ );
683
+ }
684
+ }
685
+ }
686
+ const messages = await this.buildMessages(input, sessionId, ctx);
687
+ const output = await this.llmLoop.run(messages, ctx, opts?.apiKey);
688
+ output.durationMs = Date.now() - startTime;
689
+ if (this.config.guardrails?.output) {
690
+ for (const guardrail of this.config.guardrails.output) {
691
+ const result = await guardrail.validate(output, ctx);
692
+ if (!result.pass) {
693
+ throw new Error(
694
+ `Output guardrail "${guardrail.name}" blocked: ${result.reason}`
695
+ );
696
+ }
697
+ }
698
+ }
699
+ await this.sessionManager.appendMessages(sessionId, [
700
+ { role: "user", content: inputText },
701
+ { role: "assistant", content: output.text }
702
+ ]);
703
+ await this.sessionManager.updateState(sessionId, ctx.sessionState);
704
+ if (this.config.memory) {
705
+ await this.config.memory.addMessages(sessionId, [
706
+ { role: "user", content: inputText },
707
+ { role: "assistant", content: output.text }
708
+ ]);
709
+ }
710
+ if (this.config.hooks?.afterRun) {
711
+ await this.config.hooks.afterRun(ctx, output);
712
+ }
713
+ this.logger.agentEnd(this.name, output.text, output.usage, output.durationMs);
714
+ this.eventBus.emit("run.complete", {
715
+ runId: ctx.runId,
716
+ output
717
+ });
718
+ return output;
719
+ } catch (error) {
720
+ const err = error instanceof Error ? error : new Error(String(error));
721
+ this.logger.error(`Run failed: ${err.message}`);
722
+ if (this.config.hooks?.onError) {
723
+ await this.config.hooks.onError(ctx, err);
724
+ }
725
+ this.eventBus.emit("run.error", {
726
+ runId: ctx.runId,
727
+ error: err
728
+ });
729
+ throw err;
730
+ }
731
+ }
732
+ async *stream(input, opts) {
733
+ const sessionId = opts?.sessionId ?? this.config.sessionId ?? uuidv42();
734
+ const userId = opts?.userId ?? this.config.userId;
735
+ const inputText = typeof input === "string" ? input : getTextContent(input);
736
+ const session = await this.sessionManager.getOrCreate(sessionId, userId);
737
+ const ctx = new RunContext({
738
+ sessionId,
739
+ userId,
740
+ metadata: opts?.metadata ?? {},
741
+ eventBus: this.eventBus,
742
+ sessionState: { ...session.state }
743
+ });
744
+ this.eventBus.emit("run.start", {
745
+ runId: ctx.runId,
746
+ agentName: this.name,
747
+ input: inputText
748
+ });
749
+ let fullText = "";
750
+ let streamOk = false;
751
+ try {
752
+ if (this.config.hooks?.beforeRun) {
753
+ await this.config.hooks.beforeRun(ctx);
754
+ }
755
+ if (this.config.guardrails?.input) {
756
+ for (const guardrail of this.config.guardrails.input) {
757
+ const result = await guardrail.validate(input, ctx);
758
+ if (!result.pass) {
759
+ throw new Error(
760
+ `Input guardrail "${guardrail.name}" blocked: ${result.reason}`
761
+ );
762
+ }
763
+ }
764
+ }
765
+ const messages = await this.buildMessages(input, sessionId, ctx);
766
+ for await (const chunk of this.llmLoop.stream(messages, ctx, opts?.apiKey)) {
767
+ if (chunk.type === "text") {
768
+ fullText += chunk.text;
769
+ }
770
+ yield chunk;
771
+ }
772
+ streamOk = true;
773
+ } catch (error) {
774
+ const err = error instanceof Error ? error : new Error(String(error));
775
+ if (this.config.hooks?.onError) {
776
+ await this.config.hooks.onError(ctx, err);
777
+ }
778
+ this.eventBus.emit("run.error", {
779
+ runId: ctx.runId,
780
+ error: err
781
+ });
782
+ throw err;
783
+ } finally {
784
+ if (streamOk) {
785
+ await this.sessionManager.appendMessages(sessionId, [
786
+ { role: "user", content: inputText },
787
+ { role: "assistant", content: fullText }
788
+ ]);
789
+ await this.sessionManager.updateState(sessionId, ctx.sessionState);
790
+ if (this.config.memory) {
791
+ await this.config.memory.addMessages(sessionId, [
792
+ { role: "user", content: inputText },
793
+ { role: "assistant", content: fullText }
794
+ ]);
795
+ }
796
+ this.eventBus.emit("run.complete", {
797
+ runId: ctx.runId,
798
+ output: {
799
+ text: fullText,
800
+ toolCalls: [],
801
+ usage: { promptTokens: 0, completionTokens: 0, totalTokens: 0 }
802
+ }
803
+ });
804
+ }
805
+ }
806
+ }
807
+ async buildMessages(input, sessionId, ctx) {
808
+ const messages = [];
809
+ let systemContent = "";
810
+ if (this.config.instructions) {
811
+ systemContent = typeof this.config.instructions === "function" ? this.config.instructions(ctx) : this.config.instructions;
812
+ }
813
+ if (this.config.memory) {
814
+ const memoryContext = await this.config.memory.getContextString(
815
+ sessionId
816
+ );
817
+ if (memoryContext) {
818
+ systemContent = systemContent ? `${systemContent}
819
+
820
+ ${memoryContext}` : memoryContext;
821
+ }
822
+ }
823
+ if (systemContent) {
824
+ messages.push({ role: "system", content: systemContent });
825
+ }
826
+ if (this.config.addHistoryToMessages !== false) {
827
+ const limit = this.config.numHistoryRuns ? this.config.numHistoryRuns * 2 : 20;
828
+ const history = await this.sessionManager.getHistory(sessionId, limit);
829
+ if (history.length > 0) {
830
+ this.logger.info(`Loaded ${history.length} history messages for session ${sessionId}`);
831
+ if (messages.length > 0 && messages[0].role === "system") {
832
+ messages[0] = {
833
+ ...messages[0],
834
+ content: `${getTextContent(messages[0].content)}
835
+
836
+ This is a multi-turn conversation. The previous messages in this session are included below. Use them to maintain context and answer questions about prior exchanges.`
837
+ };
838
+ }
839
+ }
840
+ messages.push(...history);
841
+ }
842
+ messages.push({ role: "user", content: input });
843
+ this.logger.info(`Sending ${messages.length} messages to LLM: ${messages.map((m) => `[${m.role}: ${typeof m.content === "string" ? m.content.slice(0, 40) : "(multimodal)"}]`).join(", ")}`);
844
+ return messages;
845
+ }
846
+ };
847
+
848
+ // src/team/team.ts
849
+ import { v4 as uuidv43 } from "uuid";
850
+
851
+ // src/team/types.ts
852
+ var TeamMode = /* @__PURE__ */ ((TeamMode2) => {
853
+ TeamMode2["Coordinate"] = "coordinate";
854
+ TeamMode2["Route"] = "route";
855
+ TeamMode2["Broadcast"] = "broadcast";
856
+ TeamMode2["Collaborate"] = "collaborate";
857
+ return TeamMode2;
858
+ })(TeamMode || {});
859
+
860
+ // src/team/team.ts
861
+ var Team = class {
862
+ name;
863
+ eventBus;
864
+ config;
865
+ constructor(config) {
866
+ this.config = config;
867
+ this.name = config.name;
868
+ this.eventBus = config.eventBus ?? new EventBus();
869
+ }
870
+ async run(input, opts) {
871
+ const ctx = new RunContext({
872
+ sessionId: opts?.sessionId ?? uuidv43(),
873
+ userId: opts?.userId,
874
+ metadata: opts?.metadata ?? {},
875
+ eventBus: this.eventBus,
876
+ sessionState: { ...this.config.sessionState ?? {} }
877
+ });
878
+ this.eventBus.emit("run.start", {
879
+ runId: ctx.runId,
880
+ agentName: this.name,
881
+ input
882
+ });
883
+ try {
884
+ let output;
885
+ switch (this.config.mode) {
886
+ case "route" /* Route */:
887
+ output = await this.runRouteMode(input, ctx);
888
+ break;
889
+ case "broadcast" /* Broadcast */:
890
+ output = await this.runBroadcastMode(input, ctx);
891
+ break;
892
+ case "collaborate" /* Collaborate */:
893
+ output = await this.runCollaborateMode(input, ctx);
894
+ break;
895
+ case "coordinate" /* Coordinate */:
896
+ default:
897
+ output = await this.runCoordinateMode(input, ctx);
898
+ break;
899
+ }
900
+ this.eventBus.emit("run.complete", { runId: ctx.runId, output });
901
+ return output;
902
+ } catch (error) {
903
+ const err = error instanceof Error ? error : new Error(String(error));
904
+ this.eventBus.emit("run.error", { runId: ctx.runId, error: err });
905
+ throw err;
906
+ }
907
+ }
908
+ async *stream(input, opts) {
909
+ const result = await this.run(input, opts);
910
+ yield { type: "text", text: result.text };
911
+ yield { type: "finish", finishReason: "stop", usage: result.usage };
912
+ }
913
+ async runCoordinateMode(input, ctx) {
914
+ const memberDescriptions = this.buildMemberDescriptions();
915
+ const planPrompt = this.buildCoordinatorPrompt(
916
+ input,
917
+ memberDescriptions,
918
+ "coordinate"
919
+ );
920
+ const planResponse = await this.config.model.generate([
921
+ { role: "system", content: planPrompt },
922
+ { role: "user", content: input }
923
+ ]);
924
+ const delegations = this.parseDelegationPlan(
925
+ getTextContent(planResponse.message.content)
926
+ );
927
+ const memberOutputs = [];
928
+ for (const delegation of delegations) {
929
+ const member = this.findMember(delegation.memberId);
930
+ if (!member) continue;
931
+ this.eventBus.emit("team.delegate", {
932
+ runId: ctx.runId,
933
+ memberId: delegation.memberId,
934
+ task: delegation.task
935
+ });
936
+ const output = await member.run(delegation.task, {
937
+ sessionId: ctx.sessionId
938
+ });
939
+ memberOutputs.push({ memberId: delegation.memberId, output });
940
+ }
941
+ const synthesisPrompt = this.buildSynthesisPrompt(
942
+ input,
943
+ memberOutputs
944
+ );
945
+ const synthesisResponse = await this.config.model.generate([
946
+ { role: "user", content: synthesisPrompt }
947
+ ]);
948
+ return {
949
+ text: getTextContent(synthesisResponse.message.content),
950
+ toolCalls: memberOutputs.flatMap((o) => o.output.toolCalls),
951
+ usage: synthesisResponse.usage
952
+ };
953
+ }
954
+ async runRouteMode(input, ctx) {
955
+ const memberDescriptions = this.buildMemberDescriptions();
956
+ const routePrompt = this.buildCoordinatorPrompt(
957
+ input,
958
+ memberDescriptions,
959
+ "route"
960
+ );
961
+ const routeResponse = await this.config.model.generate([
962
+ { role: "system", content: routePrompt },
963
+ { role: "user", content: input }
964
+ ]);
965
+ const selectedName = getTextContent(routeResponse.message.content).trim();
966
+ const member = this.findMember(selectedName);
967
+ if (!member) {
968
+ return {
969
+ text: `Could not route to member "${selectedName}". Available: ${this.config.members.map((m) => m.name).join(", ")}`,
970
+ toolCalls: [],
971
+ usage: routeResponse.usage
972
+ };
973
+ }
974
+ this.eventBus.emit("team.delegate", {
975
+ runId: ctx.runId,
976
+ memberId: member.name,
977
+ task: input
978
+ });
979
+ return member.run(input, { sessionId: ctx.sessionId });
980
+ }
981
+ async runBroadcastMode(input, ctx) {
982
+ for (const member of this.config.members) {
983
+ this.eventBus.emit("team.delegate", {
984
+ runId: ctx.runId,
985
+ memberId: member.name,
986
+ task: input
987
+ });
988
+ }
989
+ const outputs = await Promise.all(
990
+ this.config.members.map(
991
+ (member) => member.run(input, { sessionId: ctx.sessionId })
992
+ )
993
+ );
994
+ const memberOutputs = this.config.members.map((member, i) => ({
995
+ memberId: member.name,
996
+ output: outputs[i]
997
+ }));
998
+ const synthesisPrompt = this.buildSynthesisPrompt(input, memberOutputs);
999
+ const synthesisResponse = await this.config.model.generate([
1000
+ { role: "user", content: synthesisPrompt }
1001
+ ]);
1002
+ return {
1003
+ text: getTextContent(synthesisResponse.message.content),
1004
+ toolCalls: outputs.flatMap((o) => o.toolCalls),
1005
+ usage: synthesisResponse.usage
1006
+ };
1007
+ }
1008
+ async runCollaborateMode(input, ctx) {
1009
+ const maxRounds = this.config.maxRounds ?? 3;
1010
+ let currentInput = input;
1011
+ let finalOutput = null;
1012
+ for (let round = 0; round < maxRounds; round++) {
1013
+ for (const member of this.config.members) {
1014
+ this.eventBus.emit("team.delegate", {
1015
+ runId: ctx.runId,
1016
+ memberId: member.name,
1017
+ task: currentInput
1018
+ });
1019
+ }
1020
+ const outputs = await Promise.all(
1021
+ this.config.members.map(
1022
+ (member) => member.run(currentInput, { sessionId: ctx.sessionId })
1023
+ )
1024
+ );
1025
+ const memberOutputs = this.config.members.map((member, i) => ({
1026
+ memberId: member.name,
1027
+ output: outputs[i]
1028
+ }));
1029
+ const consensusPrompt = `Given the following responses to "${input}", determine if there is consensus. If yes, synthesize a final answer. If not, provide a follow-up question.
1030
+
1031
+ ${memberOutputs.map((o) => `${o.memberId}: ${o.output.text}`).join("\n\n")}
1032
+
1033
+ Respond with either "CONSENSUS: <final answer>" or "FOLLOW_UP: <question>"`;
1034
+ const consensusResponse = await this.config.model.generate([
1035
+ { role: "user", content: consensusPrompt }
1036
+ ]);
1037
+ const responseText = getTextContent(consensusResponse.message.content);
1038
+ if (responseText.startsWith("CONSENSUS:")) {
1039
+ finalOutput = {
1040
+ text: responseText.slice("CONSENSUS:".length).trim(),
1041
+ toolCalls: outputs.flatMap((o) => o.toolCalls),
1042
+ usage: consensusResponse.usage
1043
+ };
1044
+ break;
1045
+ }
1046
+ currentInput = responseText.startsWith("FOLLOW_UP:") ? responseText.slice("FOLLOW_UP:".length).trim() : responseText;
1047
+ }
1048
+ if (!finalOutput) {
1049
+ const lastSynthesis = this.buildSynthesisPrompt(input, []);
1050
+ const response = await this.config.model.generate([
1051
+ { role: "user", content: lastSynthesis }
1052
+ ]);
1053
+ finalOutput = {
1054
+ text: getTextContent(response.message.content),
1055
+ toolCalls: [],
1056
+ usage: response.usage
1057
+ };
1058
+ }
1059
+ return finalOutput;
1060
+ }
1061
+ buildMemberDescriptions() {
1062
+ return this.config.members.map((member) => {
1063
+ const desc = typeof member.instructions === "function" ? "(dynamic instructions)" : member.instructions ?? "General-purpose agent";
1064
+ return `- ${member.name}: ${desc}`;
1065
+ }).join("\n");
1066
+ }
1067
+ buildCoordinatorPrompt(input, memberDescriptions, mode) {
1068
+ if (mode === "route") {
1069
+ return `You are a team coordinator. Based on the user's request, select the single most appropriate team member to handle it. Available members:
1070
+ ${memberDescriptions}
1071
+
1072
+ Respond with ONLY the member name, nothing else.`;
1073
+ }
1074
+ return `You are a team coordinator. Break down the user's request into subtasks and delegate to appropriate team members. Available members:
1075
+ ${memberDescriptions}
1076
+
1077
+ Respond with a JSON array of delegations: [{"memberId": "name", "task": "specific task"}]
1078
+ ${this.config.instructions ? `
1079
+ Additional instructions: ${this.config.instructions}` : ""}`;
1080
+ }
1081
+ buildSynthesisPrompt(originalInput, memberOutputs) {
1082
+ const outputsText = memberOutputs.map((o) => `### ${o.memberId}
1083
+ ${o.output.text}`).join("\n\n");
1084
+ return `Original request: ${originalInput}
1085
+
1086
+ Team member responses:
1087
+ ${outputsText}
1088
+
1089
+ Synthesize these responses into a single coherent answer.`;
1090
+ }
1091
+ parseDelegationPlan(content) {
1092
+ try {
1093
+ const jsonMatch = content.match(/\[[\s\S]*\]/);
1094
+ if (jsonMatch) {
1095
+ return JSON.parse(jsonMatch[0]);
1096
+ }
1097
+ } catch {
1098
+ }
1099
+ return this.config.members.map((m) => ({
1100
+ memberId: m.name,
1101
+ task: content
1102
+ }));
1103
+ }
1104
+ findMember(name) {
1105
+ return this.config.members.find(
1106
+ (m) => m.name.toLowerCase() === name.toLowerCase()
1107
+ );
1108
+ }
1109
+ };
1110
+
1111
+ // src/workflow/workflow.ts
1112
+ import { v4 as uuidv44 } from "uuid";
1113
+
1114
+ // src/workflow/step-runner.ts
1115
+ function isAgentStep(step) {
1116
+ return "agent" in step;
1117
+ }
1118
+ function isFunctionStep(step) {
1119
+ return "run" in step;
1120
+ }
1121
+ function isConditionStep(step) {
1122
+ return "condition" in step;
1123
+ }
1124
+ function isParallelStep(step) {
1125
+ return "parallel" in step;
1126
+ }
1127
+ var StepRunner = class {
1128
+ retryPolicy;
1129
+ constructor(retryPolicy) {
1130
+ this.retryPolicy = retryPolicy;
1131
+ }
1132
+ async executeSteps(steps, state, ctx) {
1133
+ let currentState = { ...state };
1134
+ const allResults = [];
1135
+ for (const step of steps) {
1136
+ const { state: newState, results } = await this.executeStep(
1137
+ step,
1138
+ currentState,
1139
+ ctx
1140
+ );
1141
+ currentState = newState;
1142
+ allResults.push(...results);
1143
+ }
1144
+ return { state: currentState, results: allResults };
1145
+ }
1146
+ async executeStep(step, state, ctx) {
1147
+ if (isConditionStep(step)) {
1148
+ return this.executeConditionStep(step, state, ctx);
1149
+ }
1150
+ if (isParallelStep(step)) {
1151
+ return this.executeParallelStep(step, state, ctx);
1152
+ }
1153
+ if (isAgentStep(step)) {
1154
+ return this.executeAgentStep(step, state, ctx);
1155
+ }
1156
+ if (isFunctionStep(step)) {
1157
+ return this.executeFunctionStep(step, state, ctx);
1158
+ }
1159
+ return { state, results: [] };
1160
+ }
1161
+ async executeAgentStep(step, state, ctx) {
1162
+ const startTime = Date.now();
1163
+ ctx.eventBus.emit("workflow.step", {
1164
+ runId: ctx.runId,
1165
+ stepName: step.name,
1166
+ status: "start"
1167
+ });
1168
+ const execute = async () => {
1169
+ const input = step.inputFrom ? step.inputFrom(state) : JSON.stringify(state);
1170
+ const output = await step.agent.run(input, {
1171
+ sessionId: ctx.sessionId
1172
+ });
1173
+ const newState = {
1174
+ ...state,
1175
+ [`${step.name}_output`]: output.text
1176
+ };
1177
+ Object.assign(state, newState);
1178
+ return {
1179
+ stepName: step.name,
1180
+ status: "done",
1181
+ durationMs: Date.now() - startTime
1182
+ };
1183
+ };
1184
+ const result = await this.withRetry(step.name, execute, ctx);
1185
+ return { state, results: [result] };
1186
+ }
1187
+ async executeFunctionStep(step, state, ctx) {
1188
+ const startTime = Date.now();
1189
+ ctx.eventBus.emit("workflow.step", {
1190
+ runId: ctx.runId,
1191
+ stepName: step.name,
1192
+ status: "start"
1193
+ });
1194
+ const execute = async () => {
1195
+ const patch = await step.run(state, ctx);
1196
+ Object.assign(state, patch);
1197
+ return {
1198
+ stepName: step.name,
1199
+ status: "done",
1200
+ durationMs: Date.now() - startTime
1201
+ };
1202
+ };
1203
+ const result = await this.withRetry(step.name, execute, ctx);
1204
+ return { state, results: [result] };
1205
+ }
1206
+ async executeConditionStep(step, state, ctx) {
1207
+ const startTime = Date.now();
1208
+ ctx.eventBus.emit("workflow.step", {
1209
+ runId: ctx.runId,
1210
+ stepName: step.name,
1211
+ status: "start"
1212
+ });
1213
+ if (step.condition(state)) {
1214
+ const { state: newState, results } = await this.executeSteps(
1215
+ step.steps,
1216
+ state,
1217
+ ctx
1218
+ );
1219
+ ctx.eventBus.emit("workflow.step", {
1220
+ runId: ctx.runId,
1221
+ stepName: step.name,
1222
+ status: "done"
1223
+ });
1224
+ return {
1225
+ state: newState,
1226
+ results: [
1227
+ {
1228
+ stepName: step.name,
1229
+ status: "done",
1230
+ durationMs: Date.now() - startTime
1231
+ },
1232
+ ...results
1233
+ ]
1234
+ };
1235
+ }
1236
+ ctx.eventBus.emit("workflow.step", {
1237
+ runId: ctx.runId,
1238
+ stepName: step.name,
1239
+ status: "done"
1240
+ });
1241
+ return {
1242
+ state,
1243
+ results: [
1244
+ {
1245
+ stepName: step.name,
1246
+ status: "skipped",
1247
+ durationMs: Date.now() - startTime
1248
+ }
1249
+ ]
1250
+ };
1251
+ }
1252
+ async executeParallelStep(step, state, ctx) {
1253
+ const startTime = Date.now();
1254
+ ctx.eventBus.emit("workflow.step", {
1255
+ runId: ctx.runId,
1256
+ stepName: step.name,
1257
+ status: "start"
1258
+ });
1259
+ const settled = await Promise.allSettled(
1260
+ step.parallel.map((s) => this.executeStep(s, { ...state }, ctx))
1261
+ );
1262
+ const allResults = [];
1263
+ let mergedState = { ...state };
1264
+ for (const result of settled) {
1265
+ if (result.status === "fulfilled") {
1266
+ Object.assign(mergedState, result.value.state);
1267
+ allResults.push(...result.value.results);
1268
+ } else {
1269
+ allResults.push({
1270
+ stepName: step.name,
1271
+ status: "error",
1272
+ error: result.reason?.message ?? "Unknown error",
1273
+ durationMs: Date.now() - startTime
1274
+ });
1275
+ }
1276
+ }
1277
+ ctx.eventBus.emit("workflow.step", {
1278
+ runId: ctx.runId,
1279
+ stepName: step.name,
1280
+ status: "done"
1281
+ });
1282
+ return {
1283
+ state: mergedState,
1284
+ results: [
1285
+ {
1286
+ stepName: step.name,
1287
+ status: "done",
1288
+ durationMs: Date.now() - startTime
1289
+ },
1290
+ ...allResults
1291
+ ]
1292
+ };
1293
+ }
1294
+ async withRetry(stepName, fn, ctx) {
1295
+ const maxRetries = this.retryPolicy?.maxRetries ?? 0;
1296
+ const backoffMs = this.retryPolicy?.backoffMs ?? 1e3;
1297
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
1298
+ try {
1299
+ const result = await fn();
1300
+ ctx.eventBus.emit("workflow.step", {
1301
+ runId: ctx.runId,
1302
+ stepName,
1303
+ status: "done"
1304
+ });
1305
+ return result;
1306
+ } catch (error) {
1307
+ if (attempt === maxRetries) {
1308
+ const err = error instanceof Error ? error : new Error(String(error));
1309
+ ctx.eventBus.emit("workflow.step", {
1310
+ runId: ctx.runId,
1311
+ stepName,
1312
+ status: "error"
1313
+ });
1314
+ return {
1315
+ stepName,
1316
+ status: "error",
1317
+ error: err.message,
1318
+ durationMs: 0
1319
+ };
1320
+ }
1321
+ await new Promise(
1322
+ (resolve) => setTimeout(resolve, backoffMs * Math.pow(2, attempt))
1323
+ );
1324
+ }
1325
+ }
1326
+ return { stepName, status: "error", error: "Exhausted retries", durationMs: 0 };
1327
+ }
1328
+ };
1329
+
1330
+ // src/workflow/workflow.ts
1331
+ var Workflow = class {
1332
+ name;
1333
+ eventBus;
1334
+ config;
1335
+ stepRunner;
1336
+ constructor(config) {
1337
+ this.config = config;
1338
+ this.name = config.name;
1339
+ this.eventBus = config.eventBus ?? new EventBus();
1340
+ this.stepRunner = new StepRunner(config.retryPolicy);
1341
+ }
1342
+ async run(opts) {
1343
+ const ctx = new RunContext({
1344
+ sessionId: opts?.sessionId ?? uuidv44(),
1345
+ userId: opts?.userId,
1346
+ eventBus: this.eventBus,
1347
+ sessionState: {}
1348
+ });
1349
+ this.eventBus.emit("run.start", {
1350
+ runId: ctx.runId,
1351
+ agentName: `workflow:${this.name}`,
1352
+ input: JSON.stringify(this.config.initialState)
1353
+ });
1354
+ try {
1355
+ const { state, results } = await this.stepRunner.executeSteps(
1356
+ this.config.steps,
1357
+ { ...this.config.initialState },
1358
+ ctx
1359
+ );
1360
+ const workflowResult = {
1361
+ state,
1362
+ stepResults: results
1363
+ };
1364
+ this.eventBus.emit("run.complete", {
1365
+ runId: ctx.runId,
1366
+ output: {
1367
+ text: JSON.stringify(state),
1368
+ toolCalls: [],
1369
+ usage: { promptTokens: 0, completionTokens: 0, totalTokens: 0 }
1370
+ }
1371
+ });
1372
+ return workflowResult;
1373
+ } catch (error) {
1374
+ const err = error instanceof Error ? error : new Error(String(error));
1375
+ this.eventBus.emit("run.error", { runId: ctx.runId, error: err });
1376
+ throw err;
1377
+ }
1378
+ }
1379
+ };
1380
+
1381
+ // src/models/providers/openai.ts
1382
+ import { createRequire as createRequire3 } from "module";
1383
+ var _require3 = createRequire3(import.meta.url);
1384
+ var OpenAIProvider = class {
1385
+ providerId = "openai";
1386
+ modelId;
1387
+ client;
1388
+ OpenAICtor;
1389
+ baseURL;
1390
+ clientCache = /* @__PURE__ */ new Map();
1391
+ constructor(modelId, config) {
1392
+ this.modelId = modelId;
1393
+ this.baseURL = config?.baseURL;
1394
+ try {
1395
+ const mod = _require3("openai");
1396
+ this.OpenAICtor = mod.default ?? mod;
1397
+ const key = config?.apiKey ?? process.env.OPENAI_API_KEY;
1398
+ if (key) {
1399
+ this.client = new this.OpenAICtor({ apiKey: key, baseURL: config?.baseURL });
1400
+ }
1401
+ } catch {
1402
+ throw new Error(
1403
+ "openai package is required for OpenAIProvider. Install it: npm install openai"
1404
+ );
1405
+ }
1406
+ }
1407
+ getClient(apiKey) {
1408
+ if (apiKey) {
1409
+ let cached = this.clientCache.get(apiKey);
1410
+ if (!cached) {
1411
+ cached = new this.OpenAICtor({ apiKey, baseURL: this.baseURL });
1412
+ this.clientCache.set(apiKey, cached);
1413
+ }
1414
+ return cached;
1415
+ }
1416
+ if (this.client) return this.client;
1417
+ const envKey = process.env.OPENAI_API_KEY;
1418
+ if (envKey) {
1419
+ this.client = new this.OpenAICtor({ apiKey: envKey, baseURL: this.baseURL });
1420
+ return this.client;
1421
+ }
1422
+ throw new Error("No OpenAI API key provided. Pass it via the x-openai-api-key header, apiKey in request body, or set OPENAI_API_KEY env var.");
1423
+ }
1424
+ async generate(messages, options) {
1425
+ const params = {
1426
+ model: this.modelId,
1427
+ messages: this.toOpenAIMessages(messages)
1428
+ };
1429
+ if (options?.temperature !== void 0)
1430
+ params.temperature = options.temperature;
1431
+ if (options?.maxTokens !== void 0)
1432
+ params.max_tokens = options.maxTokens;
1433
+ if (options?.topP !== void 0) params.top_p = options.topP;
1434
+ if (options?.stop) params.stop = options.stop;
1435
+ this.applyResponseFormat(params, options);
1436
+ if (options?.tools?.length) {
1437
+ params.tools = this.toOpenAITools(options.tools);
1438
+ }
1439
+ const client = this.getClient(options?.apiKey);
1440
+ const response = await client.chat.completions.create(params);
1441
+ return this.normalizeResponse(response);
1442
+ }
1443
+ async *stream(messages, options) {
1444
+ const params = {
1445
+ model: this.modelId,
1446
+ messages: this.toOpenAIMessages(messages),
1447
+ stream: true
1448
+ };
1449
+ if (options?.temperature !== void 0)
1450
+ params.temperature = options.temperature;
1451
+ if (options?.maxTokens !== void 0)
1452
+ params.max_tokens = options.maxTokens;
1453
+ if (options?.topP !== void 0) params.top_p = options.topP;
1454
+ if (options?.stop) params.stop = options.stop;
1455
+ this.applyResponseFormat(params, options);
1456
+ if (options?.tools?.length) {
1457
+ params.tools = this.toOpenAITools(options.tools);
1458
+ }
1459
+ const client = this.getClient(options?.apiKey);
1460
+ const stream = await client.chat.completions.create(params);
1461
+ const activeToolCalls = /* @__PURE__ */ new Map();
1462
+ for await (const chunk of stream) {
1463
+ const choice = chunk.choices?.[0];
1464
+ if (!choice) continue;
1465
+ const delta = choice.delta;
1466
+ if (delta?.content) {
1467
+ yield { type: "text", text: delta.content };
1468
+ }
1469
+ if (delta?.tool_calls) {
1470
+ for (const tc of delta.tool_calls) {
1471
+ const idx = tc.index ?? 0;
1472
+ if (tc.id) {
1473
+ activeToolCalls.set(idx, {
1474
+ id: tc.id,
1475
+ name: tc.function?.name ?? "",
1476
+ args: tc.function?.arguments ?? ""
1477
+ });
1478
+ yield {
1479
+ type: "tool_call_start",
1480
+ toolCall: {
1481
+ id: tc.id,
1482
+ name: tc.function?.name ?? ""
1483
+ }
1484
+ };
1485
+ } else if (tc.function?.arguments) {
1486
+ const existing = activeToolCalls.get(idx);
1487
+ if (existing) {
1488
+ existing.args += tc.function.arguments;
1489
+ yield {
1490
+ type: "tool_call_delta",
1491
+ toolCallId: existing.id,
1492
+ argumentsDelta: tc.function.arguments
1493
+ };
1494
+ }
1495
+ }
1496
+ if (tc.function?.name && !tc.id) {
1497
+ const existing = activeToolCalls.get(idx);
1498
+ if (existing) {
1499
+ existing.name = tc.function.name;
1500
+ }
1501
+ }
1502
+ }
1503
+ }
1504
+ if (choice.finish_reason) {
1505
+ for (const [, tc] of activeToolCalls) {
1506
+ yield { type: "tool_call_end", toolCallId: tc.id };
1507
+ }
1508
+ yield {
1509
+ type: "finish",
1510
+ finishReason: choice.finish_reason === "tool_calls" ? "tool_calls" : choice.finish_reason,
1511
+ usage: chunk.usage ? {
1512
+ promptTokens: chunk.usage.prompt_tokens ?? 0,
1513
+ completionTokens: chunk.usage.completion_tokens ?? 0,
1514
+ totalTokens: chunk.usage.total_tokens ?? 0
1515
+ } : void 0
1516
+ };
1517
+ }
1518
+ }
1519
+ }
1520
+ applyResponseFormat(params, options) {
1521
+ if (!options?.responseFormat) return;
1522
+ if (options.responseFormat === "json") {
1523
+ params.response_format = { type: "json_object" };
1524
+ } else if (options.responseFormat === "text") {
1525
+ } else if (typeof options.responseFormat === "object") {
1526
+ params.response_format = {
1527
+ type: "json_schema",
1528
+ json_schema: {
1529
+ name: options.responseFormat.name ?? "response",
1530
+ schema: options.responseFormat.schema,
1531
+ strict: true
1532
+ }
1533
+ };
1534
+ }
1535
+ }
1536
+ toOpenAIMessages(messages) {
1537
+ return messages.map((msg) => {
1538
+ if (msg.role === "assistant" && msg.toolCalls?.length) {
1539
+ return {
1540
+ role: "assistant",
1541
+ content: getTextContent(msg.content),
1542
+ tool_calls: msg.toolCalls.map((tc) => ({
1543
+ id: tc.id,
1544
+ type: "function",
1545
+ function: {
1546
+ name: tc.name,
1547
+ arguments: JSON.stringify(tc.arguments)
1548
+ }
1549
+ }))
1550
+ };
1551
+ }
1552
+ if (msg.role === "tool") {
1553
+ return {
1554
+ role: "tool",
1555
+ tool_call_id: msg.toolCallId,
1556
+ content: getTextContent(msg.content)
1557
+ };
1558
+ }
1559
+ if (isMultiModal(msg.content)) {
1560
+ return {
1561
+ role: msg.role,
1562
+ content: msg.content.map((part) => this.partToOpenAI(part))
1563
+ };
1564
+ }
1565
+ return {
1566
+ role: msg.role,
1567
+ content: msg.content ?? ""
1568
+ };
1569
+ });
1570
+ }
1571
+ partToOpenAI(part) {
1572
+ switch (part.type) {
1573
+ case "text":
1574
+ return { type: "text", text: part.text };
1575
+ case "image": {
1576
+ const isUrl = part.data.startsWith("http://") || part.data.startsWith("https://");
1577
+ return {
1578
+ type: "image_url",
1579
+ image_url: {
1580
+ url: isUrl ? part.data : `data:${part.mimeType ?? "image/png"};base64,${part.data}`
1581
+ }
1582
+ };
1583
+ }
1584
+ case "audio":
1585
+ return {
1586
+ type: "input_audio",
1587
+ input_audio: {
1588
+ data: part.data,
1589
+ format: part.mimeType?.split("/")[1] ?? "mp3"
1590
+ }
1591
+ };
1592
+ case "file":
1593
+ return {
1594
+ type: "text",
1595
+ text: `[File: ${part.filename ?? "attachment"} (${part.mimeType})]`
1596
+ };
1597
+ }
1598
+ }
1599
+ toOpenAITools(tools) {
1600
+ return tools.map((t) => ({
1601
+ type: "function",
1602
+ function: {
1603
+ name: t.name,
1604
+ description: t.description,
1605
+ parameters: t.parameters
1606
+ }
1607
+ }));
1608
+ }
1609
+ normalizeResponse(response) {
1610
+ const choice = response.choices[0];
1611
+ const msg = choice.message;
1612
+ const toolCalls = (msg.tool_calls ?? []).map((tc) => ({
1613
+ id: tc.id,
1614
+ name: tc.function.name,
1615
+ arguments: JSON.parse(tc.function.arguments || "{}")
1616
+ }));
1617
+ const usage = {
1618
+ promptTokens: response.usage?.prompt_tokens ?? 0,
1619
+ completionTokens: response.usage?.completion_tokens ?? 0,
1620
+ totalTokens: response.usage?.total_tokens ?? 0
1621
+ };
1622
+ let finishReason = "stop";
1623
+ if (choice.finish_reason === "tool_calls") finishReason = "tool_calls";
1624
+ else if (choice.finish_reason === "length") finishReason = "length";
1625
+ else if (choice.finish_reason === "content_filter")
1626
+ finishReason = "content_filter";
1627
+ return {
1628
+ message: {
1629
+ role: "assistant",
1630
+ content: msg.content ?? null,
1631
+ toolCalls: toolCalls.length > 0 ? toolCalls : void 0
1632
+ },
1633
+ usage,
1634
+ finishReason,
1635
+ raw: response
1636
+ };
1637
+ }
1638
+ };
1639
+
1640
+ // src/models/providers/anthropic.ts
1641
+ import { createRequire as createRequire4 } from "module";
1642
+ var _require4 = createRequire4(import.meta.url);
1643
+ var AnthropicProvider = class {
1644
+ providerId = "anthropic";
1645
+ modelId;
1646
+ client;
1647
+ AnthropicCtor;
1648
+ clientCache = /* @__PURE__ */ new Map();
1649
+ constructor(modelId, config) {
1650
+ this.modelId = modelId;
1651
+ try {
1652
+ const mod = _require4("@anthropic-ai/sdk");
1653
+ this.AnthropicCtor = mod.default ?? mod;
1654
+ const key = config?.apiKey ?? process.env.ANTHROPIC_API_KEY;
1655
+ if (key) {
1656
+ this.client = new this.AnthropicCtor({ apiKey: key });
1657
+ }
1658
+ } catch {
1659
+ throw new Error(
1660
+ "@anthropic-ai/sdk is required for AnthropicProvider. Install it: npm install @anthropic-ai/sdk"
1661
+ );
1662
+ }
1663
+ }
1664
+ getClient(apiKey) {
1665
+ if (apiKey) {
1666
+ let cached = this.clientCache.get(apiKey);
1667
+ if (!cached) {
1668
+ cached = new this.AnthropicCtor({ apiKey });
1669
+ this.clientCache.set(apiKey, cached);
1670
+ }
1671
+ return cached;
1672
+ }
1673
+ if (this.client) return this.client;
1674
+ const envKey = process.env.ANTHROPIC_API_KEY;
1675
+ if (envKey) {
1676
+ this.client = new this.AnthropicCtor({ apiKey: envKey });
1677
+ return this.client;
1678
+ }
1679
+ throw new Error("No Anthropic API key provided. Pass it via the x-anthropic-api-key header, apiKey in request body, or set ANTHROPIC_API_KEY env var.");
1680
+ }
1681
+ async generate(messages, options) {
1682
+ const { systemMsg, anthropicMessages } = this.toAnthropicMessages(messages);
1683
+ const params = {
1684
+ model: this.modelId,
1685
+ messages: anthropicMessages,
1686
+ max_tokens: options?.maxTokens ?? 4096
1687
+ };
1688
+ if (systemMsg) params.system = systemMsg;
1689
+ if (options?.temperature !== void 0)
1690
+ params.temperature = options.temperature;
1691
+ if (options?.topP !== void 0) params.top_p = options.topP;
1692
+ if (options?.stop) params.stop_sequences = options.stop;
1693
+ if (options?.tools?.length) {
1694
+ params.tools = this.toAnthropicTools(options.tools);
1695
+ }
1696
+ const client = this.getClient(options?.apiKey);
1697
+ const response = await client.messages.create(params);
1698
+ return this.normalizeResponse(response);
1699
+ }
1700
+ async *stream(messages, options) {
1701
+ const { systemMsg, anthropicMessages } = this.toAnthropicMessages(messages);
1702
+ const params = {
1703
+ model: this.modelId,
1704
+ messages: anthropicMessages,
1705
+ max_tokens: options?.maxTokens ?? 4096,
1706
+ stream: true
1707
+ };
1708
+ if (systemMsg) params.system = systemMsg;
1709
+ if (options?.temperature !== void 0)
1710
+ params.temperature = options.temperature;
1711
+ if (options?.topP !== void 0) params.top_p = options.topP;
1712
+ if (options?.stop) params.stop_sequences = options.stop;
1713
+ if (options?.tools?.length) {
1714
+ params.tools = this.toAnthropicTools(options.tools);
1715
+ }
1716
+ const client = this.getClient(options?.apiKey);
1717
+ const stream = await client.messages.create(params);
1718
+ let currentToolId = "";
1719
+ for await (const event of stream) {
1720
+ switch (event.type) {
1721
+ case "content_block_start": {
1722
+ if (event.content_block?.type === "tool_use") {
1723
+ currentToolId = event.content_block.id;
1724
+ yield {
1725
+ type: "tool_call_start",
1726
+ toolCall: {
1727
+ id: event.content_block.id,
1728
+ name: event.content_block.name
1729
+ }
1730
+ };
1731
+ }
1732
+ break;
1733
+ }
1734
+ case "content_block_delta": {
1735
+ if (event.delta?.type === "text_delta") {
1736
+ yield { type: "text", text: event.delta.text };
1737
+ } else if (event.delta?.type === "input_json_delta") {
1738
+ yield {
1739
+ type: "tool_call_delta",
1740
+ toolCallId: currentToolId,
1741
+ argumentsDelta: event.delta.partial_json
1742
+ };
1743
+ }
1744
+ break;
1745
+ }
1746
+ case "content_block_stop": {
1747
+ if (currentToolId) {
1748
+ yield { type: "tool_call_end", toolCallId: currentToolId };
1749
+ currentToolId = "";
1750
+ }
1751
+ break;
1752
+ }
1753
+ case "message_delta": {
1754
+ const usage = event.usage ? {
1755
+ promptTokens: 0,
1756
+ completionTokens: event.usage.output_tokens ?? 0,
1757
+ totalTokens: event.usage.output_tokens ?? 0
1758
+ } : void 0;
1759
+ let finishReason = event.delta?.stop_reason ?? "stop";
1760
+ if (finishReason === "tool_use") finishReason = "tool_calls";
1761
+ if (finishReason === "end_turn") finishReason = "stop";
1762
+ yield { type: "finish", finishReason, usage };
1763
+ break;
1764
+ }
1765
+ case "message_start": {
1766
+ if (event.message?.usage) {
1767
+ }
1768
+ break;
1769
+ }
1770
+ }
1771
+ }
1772
+ }
1773
+ toAnthropicMessages(messages) {
1774
+ let systemMsg;
1775
+ const anthropicMessages = [];
1776
+ for (const msg of messages) {
1777
+ if (msg.role === "system") {
1778
+ systemMsg = getTextContent(msg.content) || void 0;
1779
+ continue;
1780
+ }
1781
+ if (msg.role === "user") {
1782
+ if (isMultiModal(msg.content)) {
1783
+ anthropicMessages.push({
1784
+ role: "user",
1785
+ content: msg.content.map((p) => this.partToAnthropic(p))
1786
+ });
1787
+ } else {
1788
+ anthropicMessages.push({
1789
+ role: "user",
1790
+ content: [{ type: "text", text: msg.content ?? "" }]
1791
+ });
1792
+ }
1793
+ continue;
1794
+ }
1795
+ if (msg.role === "assistant") {
1796
+ const content = [];
1797
+ if (msg.content) {
1798
+ content.push({ type: "text", text: msg.content });
1799
+ }
1800
+ if (msg.toolCalls) {
1801
+ for (const tc of msg.toolCalls) {
1802
+ content.push({
1803
+ type: "tool_use",
1804
+ id: tc.id,
1805
+ name: tc.name,
1806
+ input: tc.arguments
1807
+ });
1808
+ }
1809
+ }
1810
+ anthropicMessages.push({
1811
+ role: "assistant",
1812
+ content: content.length > 0 ? content : [{ type: "text", text: "" }]
1813
+ });
1814
+ continue;
1815
+ }
1816
+ if (msg.role === "tool") {
1817
+ anthropicMessages.push({
1818
+ role: "user",
1819
+ content: [
1820
+ {
1821
+ type: "tool_result",
1822
+ tool_use_id: msg.toolCallId,
1823
+ content: msg.content ?? ""
1824
+ }
1825
+ ]
1826
+ });
1827
+ continue;
1828
+ }
1829
+ }
1830
+ return { systemMsg, anthropicMessages };
1831
+ }
1832
+ partToAnthropic(part) {
1833
+ switch (part.type) {
1834
+ case "text":
1835
+ return { type: "text", text: part.text };
1836
+ case "image": {
1837
+ const isUrl = part.data.startsWith("http://") || part.data.startsWith("https://");
1838
+ if (isUrl) {
1839
+ return { type: "image", source: { type: "url", url: part.data } };
1840
+ }
1841
+ return {
1842
+ type: "image",
1843
+ source: {
1844
+ type: "base64",
1845
+ media_type: part.mimeType ?? "image/png",
1846
+ data: part.data
1847
+ }
1848
+ };
1849
+ }
1850
+ case "audio":
1851
+ return {
1852
+ type: "text",
1853
+ text: `[Audio content: ${part.mimeType ?? "audio"}]`
1854
+ };
1855
+ case "file":
1856
+ if (part.mimeType === "application/pdf") {
1857
+ return {
1858
+ type: "document",
1859
+ source: { type: "base64", media_type: "application/pdf", data: part.data }
1860
+ };
1861
+ }
1862
+ return {
1863
+ type: "text",
1864
+ text: `[File: ${part.filename ?? "attachment"} (${part.mimeType})]`
1865
+ };
1866
+ }
1867
+ }
1868
+ toAnthropicTools(tools) {
1869
+ return tools.map((t) => ({
1870
+ name: t.name,
1871
+ description: t.description,
1872
+ input_schema: t.parameters
1873
+ }));
1874
+ }
1875
+ normalizeResponse(response) {
1876
+ const toolCalls = [];
1877
+ let textContent = "";
1878
+ for (const block of response.content ?? []) {
1879
+ if (block.type === "text") {
1880
+ textContent += block.text;
1881
+ } else if (block.type === "tool_use") {
1882
+ toolCalls.push({
1883
+ id: block.id,
1884
+ name: block.name,
1885
+ arguments: block.input ?? {}
1886
+ });
1887
+ }
1888
+ }
1889
+ const usage = {
1890
+ promptTokens: response.usage?.input_tokens ?? 0,
1891
+ completionTokens: response.usage?.output_tokens ?? 0,
1892
+ totalTokens: (response.usage?.input_tokens ?? 0) + (response.usage?.output_tokens ?? 0)
1893
+ };
1894
+ let finishReason = "stop";
1895
+ if (response.stop_reason === "tool_use") finishReason = "tool_calls";
1896
+ else if (response.stop_reason === "max_tokens") finishReason = "length";
1897
+ return {
1898
+ message: {
1899
+ role: "assistant",
1900
+ content: textContent || null,
1901
+ toolCalls: toolCalls.length > 0 ? toolCalls : void 0
1902
+ },
1903
+ usage,
1904
+ finishReason,
1905
+ raw: response
1906
+ };
1907
+ }
1908
+ };
1909
+
1910
+ // src/models/providers/google.ts
1911
+ import { createRequire as createRequire5 } from "module";
1912
+ var _require5 = createRequire5(import.meta.url);
1913
+ var GoogleProvider = class {
1914
+ providerId = "google";
1915
+ modelId;
1916
+ ai;
1917
+ GoogleGenAICtor;
1918
+ clientCache = /* @__PURE__ */ new Map();
1919
+ constructor(modelId, config) {
1920
+ this.modelId = modelId;
1921
+ try {
1922
+ const { GoogleGenAI } = _require5("@google/genai");
1923
+ this.GoogleGenAICtor = GoogleGenAI;
1924
+ const key = config?.apiKey ?? process.env.GOOGLE_API_KEY;
1925
+ if (key) {
1926
+ this.ai = new GoogleGenAI({ apiKey: key });
1927
+ }
1928
+ } catch {
1929
+ throw new Error(
1930
+ "@google/genai is required for GoogleProvider. Install it: npm install @google/genai"
1931
+ );
1932
+ }
1933
+ }
1934
+ getClient(apiKey) {
1935
+ if (apiKey) {
1936
+ let cached = this.clientCache.get(apiKey);
1937
+ if (!cached) {
1938
+ cached = new this.GoogleGenAICtor({ apiKey });
1939
+ this.clientCache.set(apiKey, cached);
1940
+ }
1941
+ return cached;
1942
+ }
1943
+ if (this.ai) return this.ai;
1944
+ const envKey = process.env.GOOGLE_API_KEY;
1945
+ if (envKey) {
1946
+ this.ai = new this.GoogleGenAICtor({ apiKey: envKey });
1947
+ return this.ai;
1948
+ }
1949
+ throw new Error("No Google API key provided. Pass it via the x-google-api-key header, apiKey in request body, or set GOOGLE_API_KEY env var.");
1950
+ }
1951
+ async generate(messages, options) {
1952
+ const { systemInstruction, contents } = this.toGoogleMessages(messages);
1953
+ const config = {};
1954
+ if (options?.temperature !== void 0)
1955
+ config.temperature = options.temperature;
1956
+ if (options?.maxTokens !== void 0)
1957
+ config.maxOutputTokens = options.maxTokens;
1958
+ if (options?.topP !== void 0) config.topP = options.topP;
1959
+ if (options?.stop) config.stopSequences = options.stop;
1960
+ if (options?.responseFormat) {
1961
+ config.responseMimeType = "application/json";
1962
+ const rf = options.responseFormat;
1963
+ if (typeof rf === "object" && rf !== null && "type" in rf && rf.type === "json_schema" && "schema" in rf && rf.schema) {
1964
+ config.responseSchema = this.cleanJsonSchema(rf.schema);
1965
+ }
1966
+ }
1967
+ const params = {
1968
+ model: this.modelId,
1969
+ contents,
1970
+ config
1971
+ };
1972
+ if (systemInstruction) params.systemInstruction = systemInstruction;
1973
+ if (options?.tools?.length) {
1974
+ params.tools = [
1975
+ {
1976
+ functionDeclarations: this.toGoogleTools(options.tools)
1977
+ }
1978
+ ];
1979
+ }
1980
+ const client = this.getClient(options?.apiKey);
1981
+ const response = await client.models.generateContent(params);
1982
+ return this.normalizeResponse(response);
1983
+ }
1984
+ async *stream(messages, options) {
1985
+ const { systemInstruction, contents } = this.toGoogleMessages(messages);
1986
+ const config = {};
1987
+ if (options?.temperature !== void 0)
1988
+ config.temperature = options.temperature;
1989
+ if (options?.maxTokens !== void 0)
1990
+ config.maxOutputTokens = options.maxTokens;
1991
+ if (options?.topP !== void 0) config.topP = options.topP;
1992
+ if (options?.stop) config.stopSequences = options.stop;
1993
+ const params = {
1994
+ model: this.modelId,
1995
+ contents,
1996
+ config
1997
+ };
1998
+ if (systemInstruction) params.systemInstruction = systemInstruction;
1999
+ if (options?.tools?.length) {
2000
+ params.tools = [
2001
+ {
2002
+ functionDeclarations: this.toGoogleTools(options.tools)
2003
+ }
2004
+ ];
2005
+ }
2006
+ const client = this.getClient(options?.apiKey);
2007
+ const streamResult = await client.models.generateContentStream(params);
2008
+ let toolCallCounter = 0;
2009
+ for await (const chunk of streamResult) {
2010
+ const candidate = chunk.candidates?.[0];
2011
+ if (!candidate?.content?.parts) continue;
2012
+ for (const part of candidate.content.parts) {
2013
+ if (part.text) {
2014
+ yield { type: "text", text: part.text };
2015
+ }
2016
+ if (part.functionCall) {
2017
+ const id = `google_tc_${toolCallCounter++}`;
2018
+ yield {
2019
+ type: "tool_call_start",
2020
+ toolCall: { id, name: part.functionCall.name }
2021
+ };
2022
+ yield {
2023
+ type: "tool_call_delta",
2024
+ toolCallId: id,
2025
+ argumentsDelta: JSON.stringify(part.functionCall.args ?? {})
2026
+ };
2027
+ yield { type: "tool_call_end", toolCallId: id };
2028
+ }
2029
+ }
2030
+ if (candidate.finishReason) {
2031
+ let finishReason = "stop";
2032
+ if (candidate.finishReason === "STOP" || candidate.finishReason === "END_TURN") {
2033
+ finishReason = "stop";
2034
+ } else if (candidate.finishReason === "MAX_TOKENS") {
2035
+ finishReason = "length";
2036
+ } else if (candidate.finishReason === "SAFETY") {
2037
+ finishReason = "content_filter";
2038
+ }
2039
+ const hasToolCalls = candidate.content?.parts?.some(
2040
+ (p) => p.functionCall
2041
+ );
2042
+ if (hasToolCalls) finishReason = "tool_calls";
2043
+ yield {
2044
+ type: "finish",
2045
+ finishReason,
2046
+ usage: chunk.usageMetadata ? {
2047
+ promptTokens: chunk.usageMetadata.promptTokenCount ?? 0,
2048
+ completionTokens: chunk.usageMetadata.candidatesTokenCount ?? 0,
2049
+ totalTokens: chunk.usageMetadata.totalTokenCount ?? 0
2050
+ } : void 0
2051
+ };
2052
+ }
2053
+ }
2054
+ }
2055
+ toGoogleMessages(messages) {
2056
+ let systemInstruction;
2057
+ const contents = [];
2058
+ for (const msg of messages) {
2059
+ if (msg.role === "system") {
2060
+ systemInstruction = getTextContent(msg.content) || void 0;
2061
+ continue;
2062
+ }
2063
+ if (msg.role === "user") {
2064
+ if (isMultiModal(msg.content)) {
2065
+ contents.push({
2066
+ role: "user",
2067
+ parts: msg.content.map((p) => this.partToGoogle(p))
2068
+ });
2069
+ } else {
2070
+ contents.push({
2071
+ role: "user",
2072
+ parts: [{ text: msg.content ?? "" }]
2073
+ });
2074
+ }
2075
+ continue;
2076
+ }
2077
+ if (msg.role === "assistant") {
2078
+ const parts = [];
2079
+ if (msg.content) {
2080
+ parts.push({ text: msg.content });
2081
+ }
2082
+ if (msg.toolCalls) {
2083
+ for (const tc of msg.toolCalls) {
2084
+ parts.push({
2085
+ functionCall: { name: tc.name, args: tc.arguments }
2086
+ });
2087
+ }
2088
+ }
2089
+ if (parts.length === 0) {
2090
+ parts.push({ text: "" });
2091
+ }
2092
+ contents.push({ role: "model", parts });
2093
+ continue;
2094
+ }
2095
+ if (msg.role === "tool") {
2096
+ contents.push({
2097
+ role: "function",
2098
+ parts: [
2099
+ {
2100
+ functionResponse: {
2101
+ name: msg.name ?? "unknown",
2102
+ response: { result: msg.content ?? "" }
2103
+ }
2104
+ }
2105
+ ]
2106
+ });
2107
+ continue;
2108
+ }
2109
+ }
2110
+ return { systemInstruction, contents };
2111
+ }
2112
+ partToGoogle(part) {
2113
+ switch (part.type) {
2114
+ case "text":
2115
+ return { text: part.text };
2116
+ case "image":
2117
+ case "audio":
2118
+ case "file": {
2119
+ const isUrl = part.data.startsWith("http://") || part.data.startsWith("https://");
2120
+ if (isUrl) {
2121
+ return { fileData: { fileUri: part.data, mimeType: part.mimeType ?? (part.type === "image" ? "image/png" : "application/octet-stream") } };
2122
+ }
2123
+ return {
2124
+ inlineData: {
2125
+ data: part.data,
2126
+ mimeType: part.mimeType ?? (part.type === "image" ? "image/png" : part.type === "audio" ? "audio/mp3" : "application/octet-stream")
2127
+ }
2128
+ };
2129
+ }
2130
+ }
2131
+ }
2132
+ toGoogleTools(tools) {
2133
+ return tools.map((t) => ({
2134
+ name: t.name,
2135
+ description: t.description,
2136
+ parameters: t.parameters
2137
+ }));
2138
+ }
2139
+ cleanJsonSchema(schema) {
2140
+ const cleaned = { ...schema };
2141
+ delete cleaned["$schema"];
2142
+ delete cleaned["$ref"];
2143
+ delete cleaned["additionalProperties"];
2144
+ if (cleaned.properties && typeof cleaned.properties === "object") {
2145
+ const props = {};
2146
+ for (const [key, val] of Object.entries(cleaned.properties)) {
2147
+ props[key] = typeof val === "object" && val ? this.cleanJsonSchema(val) : val;
2148
+ }
2149
+ cleaned.properties = props;
2150
+ }
2151
+ if (cleaned.items && typeof cleaned.items === "object") {
2152
+ cleaned.items = this.cleanJsonSchema(cleaned.items);
2153
+ }
2154
+ return cleaned;
2155
+ }
2156
+ normalizeResponse(response) {
2157
+ const candidate = response.candidates?.[0];
2158
+ const parts = candidate?.content?.parts ?? [];
2159
+ let textContent = "";
2160
+ const toolCalls = [];
2161
+ let toolCallCounter = 0;
2162
+ for (const part of parts) {
2163
+ if (part.text) {
2164
+ textContent += part.text;
2165
+ }
2166
+ if (part.functionCall) {
2167
+ toolCalls.push({
2168
+ id: `google_tc_${toolCallCounter++}`,
2169
+ name: part.functionCall.name,
2170
+ arguments: part.functionCall.args ?? {}
2171
+ });
2172
+ }
2173
+ }
2174
+ const usage = {
2175
+ promptTokens: response.usageMetadata?.promptTokenCount ?? 0,
2176
+ completionTokens: response.usageMetadata?.candidatesTokenCount ?? 0,
2177
+ totalTokens: response.usageMetadata?.totalTokenCount ?? 0
2178
+ };
2179
+ let finishReason = "stop";
2180
+ if (toolCalls.length > 0) finishReason = "tool_calls";
2181
+ else if (candidate?.finishReason === "MAX_TOKENS")
2182
+ finishReason = "length";
2183
+ else if (candidate?.finishReason === "SAFETY")
2184
+ finishReason = "content_filter";
2185
+ return {
2186
+ message: {
2187
+ role: "assistant",
2188
+ content: textContent || null,
2189
+ toolCalls: toolCalls.length > 0 ? toolCalls : void 0
2190
+ },
2191
+ usage,
2192
+ finishReason,
2193
+ raw: response
2194
+ };
2195
+ }
2196
+ };
2197
+
2198
+ // src/models/providers/ollama.ts
2199
+ import { createRequire as createRequire6 } from "module";
2200
+ var _require6 = createRequire6(import.meta.url);
2201
+ var OllamaProvider = class {
2202
+ providerId = "ollama";
2203
+ modelId;
2204
+ client;
2205
+ constructor(modelId, config) {
2206
+ this.modelId = modelId;
2207
+ try {
2208
+ const { Ollama } = _require6("ollama");
2209
+ this.client = new Ollama({
2210
+ host: config?.host ?? "http://localhost:11434"
2211
+ });
2212
+ } catch {
2213
+ throw new Error(
2214
+ "ollama package is required for OllamaProvider. Install it: npm install ollama"
2215
+ );
2216
+ }
2217
+ }
2218
+ async generate(messages, options) {
2219
+ const params = {
2220
+ model: this.modelId,
2221
+ messages: this.toOllamaMessages(messages),
2222
+ stream: false
2223
+ };
2224
+ const ollamaOptions = {};
2225
+ if (options?.temperature !== void 0)
2226
+ ollamaOptions.temperature = options.temperature;
2227
+ if (options?.topP !== void 0) ollamaOptions.top_p = options.topP;
2228
+ if (options?.stop) ollamaOptions.stop = options.stop;
2229
+ if (options?.maxTokens !== void 0)
2230
+ ollamaOptions.num_predict = options.maxTokens;
2231
+ if (Object.keys(ollamaOptions).length > 0) params.options = ollamaOptions;
2232
+ if (options?.tools?.length) {
2233
+ params.tools = this.toOllamaTools(options.tools);
2234
+ }
2235
+ if (options?.responseFormat === "json") {
2236
+ params.format = "json";
2237
+ }
2238
+ const response = await this.client.chat(params);
2239
+ return this.normalizeResponse(response);
2240
+ }
2241
+ async *stream(messages, options) {
2242
+ const params = {
2243
+ model: this.modelId,
2244
+ messages: this.toOllamaMessages(messages),
2245
+ stream: true
2246
+ };
2247
+ const ollamaOptions = {};
2248
+ if (options?.temperature !== void 0)
2249
+ ollamaOptions.temperature = options.temperature;
2250
+ if (options?.topP !== void 0) ollamaOptions.top_p = options.topP;
2251
+ if (options?.stop) ollamaOptions.stop = options.stop;
2252
+ if (options?.maxTokens !== void 0)
2253
+ ollamaOptions.num_predict = options.maxTokens;
2254
+ if (Object.keys(ollamaOptions).length > 0) params.options = ollamaOptions;
2255
+ if (options?.tools?.length) {
2256
+ params.tools = this.toOllamaTools(options.tools);
2257
+ }
2258
+ if (options?.responseFormat === "json") {
2259
+ params.format = "json";
2260
+ }
2261
+ const stream = await this.client.chat(params);
2262
+ let toolCallCounter = 0;
2263
+ for await (const chunk of stream) {
2264
+ if (chunk.message?.content) {
2265
+ yield { type: "text", text: chunk.message.content };
2266
+ }
2267
+ if (chunk.message?.tool_calls) {
2268
+ for (const tc of chunk.message.tool_calls) {
2269
+ const id = `ollama_tc_${toolCallCounter++}`;
2270
+ yield {
2271
+ type: "tool_call_start",
2272
+ toolCall: {
2273
+ id,
2274
+ name: tc.function?.name ?? ""
2275
+ }
2276
+ };
2277
+ yield {
2278
+ type: "tool_call_delta",
2279
+ toolCallId: id,
2280
+ argumentsDelta: JSON.stringify(tc.function?.arguments ?? {})
2281
+ };
2282
+ yield { type: "tool_call_end", toolCallId: id };
2283
+ }
2284
+ }
2285
+ if (chunk.done) {
2286
+ const hasToolCalls = chunk.message?.tool_calls?.length > 0;
2287
+ yield {
2288
+ type: "finish",
2289
+ finishReason: hasToolCalls ? "tool_calls" : "stop",
2290
+ usage: {
2291
+ promptTokens: chunk.prompt_eval_count ?? 0,
2292
+ completionTokens: chunk.eval_count ?? 0,
2293
+ totalTokens: (chunk.prompt_eval_count ?? 0) + (chunk.eval_count ?? 0)
2294
+ }
2295
+ };
2296
+ }
2297
+ }
2298
+ }
2299
+ toOllamaMessages(messages) {
2300
+ return messages.map((msg) => {
2301
+ if (msg.role === "assistant" && msg.toolCalls?.length) {
2302
+ return {
2303
+ role: "assistant",
2304
+ content: msg.content ?? "",
2305
+ tool_calls: msg.toolCalls.map((tc) => ({
2306
+ function: {
2307
+ name: tc.name,
2308
+ arguments: tc.arguments
2309
+ }
2310
+ }))
2311
+ };
2312
+ }
2313
+ if (msg.role === "tool") {
2314
+ return {
2315
+ role: "tool",
2316
+ content: msg.content ?? ""
2317
+ };
2318
+ }
2319
+ return {
2320
+ role: msg.role,
2321
+ content: msg.content ?? ""
2322
+ };
2323
+ });
2324
+ }
2325
+ toOllamaTools(tools) {
2326
+ return tools.map((t) => ({
2327
+ type: "function",
2328
+ function: {
2329
+ name: t.name,
2330
+ description: t.description,
2331
+ parameters: t.parameters
2332
+ }
2333
+ }));
2334
+ }
2335
+ normalizeResponse(response) {
2336
+ const toolCalls = (response.message?.tool_calls ?? []).map(
2337
+ (tc, i) => ({
2338
+ id: `ollama_tc_${i}`,
2339
+ name: tc.function?.name ?? "",
2340
+ arguments: tc.function?.arguments ?? {}
2341
+ })
2342
+ );
2343
+ const usage = {
2344
+ promptTokens: response.prompt_eval_count ?? 0,
2345
+ completionTokens: response.eval_count ?? 0,
2346
+ totalTokens: (response.prompt_eval_count ?? 0) + (response.eval_count ?? 0)
2347
+ };
2348
+ const hasToolCalls = toolCalls.length > 0;
2349
+ return {
2350
+ message: {
2351
+ role: "assistant",
2352
+ content: response.message?.content ?? null,
2353
+ toolCalls: hasToolCalls ? toolCalls : void 0
2354
+ },
2355
+ usage,
2356
+ finishReason: hasToolCalls ? "tool_calls" : "stop",
2357
+ raw: response
2358
+ };
2359
+ }
2360
+ };
2361
+
2362
+ // src/models/registry.ts
2363
+ var ModelRegistry = class {
2364
+ factories = /* @__PURE__ */ new Map();
2365
+ register(providerId, factory) {
2366
+ this.factories.set(providerId, factory);
2367
+ }
2368
+ resolve(providerId, modelId, config) {
2369
+ const factory = this.factories.get(providerId);
2370
+ if (!factory) {
2371
+ throw new Error(
2372
+ `Unknown provider "${providerId}". Register it first with registry.register().`
2373
+ );
2374
+ }
2375
+ return factory(modelId, config);
2376
+ }
2377
+ has(providerId) {
2378
+ return this.factories.has(providerId);
2379
+ }
2380
+ };
2381
+ var registry = new ModelRegistry();
2382
+ registry.register(
2383
+ "openai",
2384
+ (modelId, config) => new OpenAIProvider(modelId, config)
2385
+ );
2386
+ registry.register(
2387
+ "anthropic",
2388
+ (modelId, config) => new AnthropicProvider(modelId, config)
2389
+ );
2390
+ registry.register(
2391
+ "google",
2392
+ (modelId, config) => new GoogleProvider(modelId, config)
2393
+ );
2394
+ registry.register(
2395
+ "ollama",
2396
+ (modelId, config) => new OllamaProvider(modelId, config)
2397
+ );
2398
+ function openai(modelId, config) {
2399
+ return registry.resolve("openai", modelId, config);
2400
+ }
2401
+ function anthropic(modelId, config) {
2402
+ return registry.resolve("anthropic", modelId, config);
2403
+ }
2404
+ function google(modelId, config) {
2405
+ return registry.resolve("google", modelId, config);
2406
+ }
2407
+ function ollama(modelId, config) {
2408
+ return registry.resolve("ollama", modelId, config);
2409
+ }
2410
+
2411
+ // src/tools/define-tool.ts
2412
+ function defineTool(config) {
2413
+ return {
2414
+ name: config.name,
2415
+ description: config.description,
2416
+ parameters: config.parameters,
2417
+ execute: config.execute
2418
+ };
2419
+ }
2420
+
2421
+ // src/storage/sqlite.ts
2422
+ import { createRequire as createRequire7 } from "module";
2423
+ var _require7 = createRequire7(import.meta.url);
2424
+ var SqliteStorage = class {
2425
+ db;
2426
+ constructor(dbPath) {
2427
+ try {
2428
+ const Database = _require7("better-sqlite3");
2429
+ this.db = new Database(dbPath);
2430
+ this.db.pragma("journal_mode = WAL");
2431
+ this.db.exec(`
2432
+ CREATE TABLE IF NOT EXISTS kv_store (
2433
+ namespace TEXT NOT NULL,
2434
+ key TEXT NOT NULL,
2435
+ value TEXT NOT NULL,
2436
+ updated_at TEXT DEFAULT (datetime('now')),
2437
+ PRIMARY KEY (namespace, key)
2438
+ )
2439
+ `);
2440
+ } catch {
2441
+ throw new Error(
2442
+ "better-sqlite3 is required for SqliteStorage. Install it: npm install better-sqlite3"
2443
+ );
2444
+ }
2445
+ }
2446
+ async get(namespace, key) {
2447
+ const row = this.db.prepare("SELECT value FROM kv_store WHERE namespace = ? AND key = ?").get(namespace, key);
2448
+ if (!row) return null;
2449
+ return JSON.parse(row.value);
2450
+ }
2451
+ async set(namespace, key, value) {
2452
+ this.db.prepare(
2453
+ `INSERT INTO kv_store (namespace, key, value, updated_at)
2454
+ VALUES (?, ?, ?, datetime('now'))
2455
+ ON CONFLICT(namespace, key)
2456
+ DO UPDATE SET value = excluded.value, updated_at = datetime('now')`
2457
+ ).run(namespace, key, JSON.stringify(value));
2458
+ }
2459
+ async delete(namespace, key) {
2460
+ this.db.prepare("DELETE FROM kv_store WHERE namespace = ? AND key = ?").run(namespace, key);
2461
+ }
2462
+ async list(namespace, prefix) {
2463
+ const rows = prefix ? this.db.prepare(
2464
+ "SELECT key, value FROM kv_store WHERE namespace = ? AND key LIKE ?"
2465
+ ).all(namespace, `${prefix}%`) : this.db.prepare("SELECT key, value FROM kv_store WHERE namespace = ?").all(namespace);
2466
+ return rows.map((row) => ({
2467
+ key: row.key,
2468
+ value: JSON.parse(row.value)
2469
+ }));
2470
+ }
2471
+ async close() {
2472
+ this.db.close();
2473
+ }
2474
+ };
2475
+
2476
+ // src/storage/postgres.ts
2477
+ import { createRequire as createRequire8 } from "module";
2478
+ var _require8 = createRequire8(import.meta.url);
2479
+ var PostgresStorage = class {
2480
+ pool;
2481
+ constructor(connectionString) {
2482
+ try {
2483
+ const { Pool } = _require8("pg");
2484
+ this.pool = new Pool({ connectionString });
2485
+ } catch {
2486
+ throw new Error(
2487
+ "pg is required for PostgresStorage. Install it: npm install pg"
2488
+ );
2489
+ }
2490
+ }
2491
+ async initialize() {
2492
+ await this.pool.query(`
2493
+ CREATE TABLE IF NOT EXISTS kv_store (
2494
+ namespace TEXT NOT NULL,
2495
+ key TEXT NOT NULL,
2496
+ value JSONB NOT NULL,
2497
+ updated_at TIMESTAMPTZ DEFAULT NOW(),
2498
+ PRIMARY KEY (namespace, key)
2499
+ )
2500
+ `);
2501
+ }
2502
+ async get(namespace, key) {
2503
+ const result = await this.pool.query(
2504
+ "SELECT value FROM kv_store WHERE namespace = $1 AND key = $2",
2505
+ [namespace, key]
2506
+ );
2507
+ if (result.rows.length === 0) return null;
2508
+ return result.rows[0].value;
2509
+ }
2510
+ async set(namespace, key, value) {
2511
+ await this.pool.query(
2512
+ `INSERT INTO kv_store (namespace, key, value, updated_at)
2513
+ VALUES ($1, $2, $3, NOW())
2514
+ ON CONFLICT (namespace, key)
2515
+ DO UPDATE SET value = EXCLUDED.value, updated_at = NOW()`,
2516
+ [namespace, key, JSON.stringify(value)]
2517
+ );
2518
+ }
2519
+ async delete(namespace, key) {
2520
+ await this.pool.query(
2521
+ "DELETE FROM kv_store WHERE namespace = $1 AND key = $2",
2522
+ [namespace, key]
2523
+ );
2524
+ }
2525
+ async list(namespace, prefix) {
2526
+ const result = prefix ? await this.pool.query(
2527
+ "SELECT key, value FROM kv_store WHERE namespace = $1 AND key LIKE $2",
2528
+ [namespace, `${prefix}%`]
2529
+ ) : await this.pool.query(
2530
+ "SELECT key, value FROM kv_store WHERE namespace = $1",
2531
+ [namespace]
2532
+ );
2533
+ return result.rows.map((row) => ({
2534
+ key: row.key,
2535
+ value: row.value
2536
+ }));
2537
+ }
2538
+ async close() {
2539
+ await this.pool.end();
2540
+ }
2541
+ };
2542
+
2543
+ // src/storage/mongodb.ts
2544
+ import { createRequire as createRequire9 } from "module";
2545
+ var _require9 = createRequire9(import.meta.url);
2546
+ var MongoDBStorage = class {
2547
+ constructor(uri, dbName = "radaros", collectionName = "kv_store") {
2548
+ this.uri = uri;
2549
+ this.dbName = dbName;
2550
+ this.collectionName = collectionName;
2551
+ try {
2552
+ const { MongoClient } = _require9("mongodb");
2553
+ this.client = new MongoClient(uri);
2554
+ } catch {
2555
+ throw new Error(
2556
+ "mongodb is required for MongoDBStorage. Install it: npm install mongodb"
2557
+ );
2558
+ }
2559
+ }
2560
+ client;
2561
+ db;
2562
+ collection;
2563
+ async initialize() {
2564
+ await this.client.connect();
2565
+ this.db = this.client.db(this.dbName);
2566
+ this.collection = this.db.collection(this.collectionName);
2567
+ await this.collection.createIndex(
2568
+ { namespace: 1, key: 1 },
2569
+ { unique: true }
2570
+ );
2571
+ }
2572
+ async get(namespace, key) {
2573
+ const doc = await this.collection.findOne({ namespace, key });
2574
+ if (!doc) return null;
2575
+ return doc.value;
2576
+ }
2577
+ async set(namespace, key, value) {
2578
+ await this.collection.updateOne(
2579
+ { namespace, key },
2580
+ { $set: { value, updatedAt: /* @__PURE__ */ new Date() } },
2581
+ { upsert: true }
2582
+ );
2583
+ }
2584
+ async delete(namespace, key) {
2585
+ await this.collection.deleteOne({ namespace, key });
2586
+ }
2587
+ async list(namespace, prefix) {
2588
+ const filter = { namespace };
2589
+ if (prefix) {
2590
+ filter.key = { $regex: `^${prefix}` };
2591
+ }
2592
+ const docs = await this.collection.find(filter).toArray();
2593
+ return docs.map((doc) => ({ key: doc.key, value: doc.value }));
2594
+ }
2595
+ async close() {
2596
+ await this.client.close();
2597
+ }
2598
+ };
2599
+
2600
+ // src/vector/base.ts
2601
+ var BaseVectorStore = class {
2602
+ constructor(embedder) {
2603
+ this.embedder = embedder;
2604
+ }
2605
+ async ensureEmbedding(doc) {
2606
+ if (doc.embedding) return doc.embedding;
2607
+ if (!this.embedder) {
2608
+ throw new Error(
2609
+ "No embedding provided on document and no EmbeddingProvider configured"
2610
+ );
2611
+ }
2612
+ return this.embedder.embed(doc.content);
2613
+ }
2614
+ async ensureQueryVector(query) {
2615
+ if (Array.isArray(query)) return query;
2616
+ if (!this.embedder) {
2617
+ throw new Error(
2618
+ "String query requires an EmbeddingProvider to be configured"
2619
+ );
2620
+ }
2621
+ return this.embedder.embed(query);
2622
+ }
2623
+ };
2624
+
2625
+ // src/vector/in-memory.ts
2626
+ var InMemoryVectorStore = class extends BaseVectorStore {
2627
+ collections = /* @__PURE__ */ new Map();
2628
+ constructor(embedder) {
2629
+ super(embedder);
2630
+ }
2631
+ async initialize() {
2632
+ }
2633
+ getCol(collection) {
2634
+ let col = this.collections.get(collection);
2635
+ if (!col) {
2636
+ col = /* @__PURE__ */ new Map();
2637
+ this.collections.set(collection, col);
2638
+ }
2639
+ return col;
2640
+ }
2641
+ async upsert(collection, doc) {
2642
+ const embedding = await this.ensureEmbedding(doc);
2643
+ this.getCol(collection).set(doc.id, {
2644
+ id: doc.id,
2645
+ content: doc.content,
2646
+ embedding,
2647
+ metadata: doc.metadata ?? {}
2648
+ });
2649
+ }
2650
+ async upsertBatch(collection, docs) {
2651
+ for (const doc of docs) {
2652
+ await this.upsert(collection, doc);
2653
+ }
2654
+ }
2655
+ async search(collection, query, options) {
2656
+ const vec = await this.ensureQueryVector(query);
2657
+ const topK = options?.topK ?? 10;
2658
+ const col = this.getCol(collection);
2659
+ const scored = [];
2660
+ for (const doc of col.values()) {
2661
+ const score = this.cosineSimilarity(vec, doc.embedding);
2662
+ if (options?.minScore != null && score < options.minScore) continue;
2663
+ if (options?.filter) {
2664
+ let match = true;
2665
+ for (const [k, v] of Object.entries(options.filter)) {
2666
+ if (doc.metadata[k] !== v) {
2667
+ match = false;
2668
+ break;
2669
+ }
2670
+ }
2671
+ if (!match) continue;
2672
+ }
2673
+ scored.push({
2674
+ id: doc.id,
2675
+ content: doc.content,
2676
+ score,
2677
+ metadata: doc.metadata
2678
+ });
2679
+ }
2680
+ scored.sort((a, b) => b.score - a.score);
2681
+ return scored.slice(0, topK);
2682
+ }
2683
+ cosineSimilarity(a, b) {
2684
+ let dot = 0;
2685
+ let normA = 0;
2686
+ let normB = 0;
2687
+ for (let i = 0; i < a.length; i++) {
2688
+ dot += a[i] * b[i];
2689
+ normA += a[i] * a[i];
2690
+ normB += b[i] * b[i];
2691
+ }
2692
+ const denom = Math.sqrt(normA) * Math.sqrt(normB);
2693
+ return denom === 0 ? 0 : dot / denom;
2694
+ }
2695
+ async delete(collection, id) {
2696
+ this.getCol(collection).delete(id);
2697
+ }
2698
+ async get(collection, id) {
2699
+ const doc = this.getCol(collection).get(id);
2700
+ if (!doc) return null;
2701
+ return { id: doc.id, content: doc.content, metadata: doc.metadata };
2702
+ }
2703
+ async dropCollection(collection) {
2704
+ this.collections.delete(collection);
2705
+ }
2706
+ async close() {
2707
+ this.collections.clear();
2708
+ }
2709
+ };
2710
+
2711
+ // src/vector/pgvector.ts
2712
+ import { createRequire as createRequire10 } from "module";
2713
+ var _require10 = createRequire10(import.meta.url);
2714
+ var PgVectorStore = class extends BaseVectorStore {
2715
+ pool;
2716
+ dimensions;
2717
+ initializedCollections = /* @__PURE__ */ new Set();
2718
+ constructor(config, embedder) {
2719
+ super(embedder);
2720
+ this.dimensions = config.dimensions ?? embedder?.dimensions ?? 1536;
2721
+ try {
2722
+ const { Pool } = _require10("pg");
2723
+ this.pool = new Pool({ connectionString: config.connectionString });
2724
+ } catch {
2725
+ throw new Error(
2726
+ "pg is required for PgVectorStore. Install it: npm install pg"
2727
+ );
2728
+ }
2729
+ }
2730
+ async initialize() {
2731
+ await this.pool.query("CREATE EXTENSION IF NOT EXISTS vector");
2732
+ }
2733
+ async ensureCollection(collection) {
2734
+ if (this.initializedCollections.has(collection)) return;
2735
+ const table = this.sanitize(collection);
2736
+ await this.pool.query(`
2737
+ CREATE TABLE IF NOT EXISTS ${table} (
2738
+ id TEXT PRIMARY KEY,
2739
+ content TEXT NOT NULL,
2740
+ embedding vector(${this.dimensions}),
2741
+ metadata JSONB DEFAULT '{}'::jsonb,
2742
+ created_at TIMESTAMPTZ DEFAULT NOW()
2743
+ )
2744
+ `);
2745
+ await this.pool.query(`
2746
+ CREATE INDEX IF NOT EXISTS ${table}_embedding_idx
2747
+ ON ${table} USING ivfflat (embedding vector_cosine_ops)
2748
+ WITH (lists = 100)
2749
+ `).catch(() => {
2750
+ });
2751
+ this.initializedCollections.add(collection);
2752
+ }
2753
+ sanitize(name) {
2754
+ return name.replace(/[^a-zA-Z0-9_]/g, "_");
2755
+ }
2756
+ toSql(vec) {
2757
+ return `[${vec.join(",")}]`;
2758
+ }
2759
+ async upsert(collection, doc) {
2760
+ await this.ensureCollection(collection);
2761
+ const embedding = await this.ensureEmbedding(doc);
2762
+ const table = this.sanitize(collection);
2763
+ await this.pool.query(
2764
+ `INSERT INTO ${table} (id, content, embedding, metadata)
2765
+ VALUES ($1, $2, $3::vector, $4::jsonb)
2766
+ ON CONFLICT (id)
2767
+ DO UPDATE SET content = EXCLUDED.content,
2768
+ embedding = EXCLUDED.embedding,
2769
+ metadata = EXCLUDED.metadata`,
2770
+ [doc.id, doc.content, this.toSql(embedding), JSON.stringify(doc.metadata ?? {})]
2771
+ );
2772
+ }
2773
+ async upsertBatch(collection, docs) {
2774
+ for (const doc of docs) {
2775
+ await this.upsert(collection, doc);
2776
+ }
2777
+ }
2778
+ async search(collection, query, options) {
2779
+ await this.ensureCollection(collection);
2780
+ const vec = await this.ensureQueryVector(query);
2781
+ const topK = options?.topK ?? 10;
2782
+ const table = this.sanitize(collection);
2783
+ let filterClause = "";
2784
+ const params = [this.toSql(vec), topK];
2785
+ if (options?.filter) {
2786
+ const conditions = Object.entries(options.filter).map(([k, v], i) => {
2787
+ params.push(JSON.stringify(v));
2788
+ return `metadata->>'${k.replace(/'/g, "''")}' = $${i + 3}`;
2789
+ });
2790
+ if (conditions.length > 0) {
2791
+ filterClause = `WHERE ${conditions.join(" AND ")}`;
2792
+ }
2793
+ }
2794
+ const result = await this.pool.query(
2795
+ `SELECT id, content, metadata,
2796
+ 1 - (embedding <=> $1::vector) AS score
2797
+ FROM ${table}
2798
+ ${filterClause}
2799
+ ORDER BY embedding <=> $1::vector
2800
+ LIMIT $2`,
2801
+ params
2802
+ );
2803
+ let rows = result.rows;
2804
+ if (options?.minScore != null) {
2805
+ rows = rows.filter((r) => r.score >= options.minScore);
2806
+ }
2807
+ return rows.map((r) => ({
2808
+ id: r.id,
2809
+ content: r.content,
2810
+ score: r.score,
2811
+ metadata: r.metadata
2812
+ }));
2813
+ }
2814
+ async delete(collection, id) {
2815
+ await this.ensureCollection(collection);
2816
+ const table = this.sanitize(collection);
2817
+ await this.pool.query(`DELETE FROM ${table} WHERE id = $1`, [id]);
2818
+ }
2819
+ async get(collection, id) {
2820
+ await this.ensureCollection(collection);
2821
+ const table = this.sanitize(collection);
2822
+ const result = await this.pool.query(
2823
+ `SELECT id, content, metadata FROM ${table} WHERE id = $1`,
2824
+ [id]
2825
+ );
2826
+ if (result.rows.length === 0) return null;
2827
+ const row = result.rows[0];
2828
+ return { id: row.id, content: row.content, metadata: row.metadata };
2829
+ }
2830
+ async dropCollection(collection) {
2831
+ const table = this.sanitize(collection);
2832
+ await this.pool.query(`DROP TABLE IF EXISTS ${table}`);
2833
+ this.initializedCollections.delete(collection);
2834
+ }
2835
+ async close() {
2836
+ await this.pool.end();
2837
+ }
2838
+ };
2839
+
2840
+ // src/vector/qdrant.ts
2841
+ import { createRequire as createRequire11 } from "module";
2842
+ import { createHash } from "crypto";
2843
+ var _require11 = createRequire11(import.meta.url);
2844
+ var UUID_RE = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
2845
+ function stringToUUID(str) {
2846
+ const hex = createHash("md5").update(str).digest("hex");
2847
+ return [
2848
+ hex.slice(0, 8),
2849
+ hex.slice(8, 12),
2850
+ "4" + hex.slice(13, 16),
2851
+ "8" + hex.slice(17, 20),
2852
+ hex.slice(20, 32)
2853
+ ].join("-");
2854
+ }
2855
+ function toQdrantId(id) {
2856
+ if (/^\d+$/.test(id)) return Number(id);
2857
+ if (UUID_RE.test(id)) return id;
2858
+ return stringToUUID(id);
2859
+ }
2860
+ var QdrantVectorStore = class extends BaseVectorStore {
2861
+ client;
2862
+ dimensions;
2863
+ initializedCollections = /* @__PURE__ */ new Set();
2864
+ constructor(config = {}, embedder) {
2865
+ super(embedder);
2866
+ this.dimensions = config.dimensions ?? embedder?.dimensions ?? 1536;
2867
+ try {
2868
+ const { QdrantClient } = _require11("@qdrant/js-client-rest");
2869
+ this.client = new QdrantClient({
2870
+ url: config.url ?? "http://localhost:6333",
2871
+ apiKey: config.apiKey,
2872
+ checkCompatibility: config.checkCompatibility ?? false
2873
+ });
2874
+ } catch {
2875
+ throw new Error(
2876
+ "@qdrant/js-client-rest is required for QdrantVectorStore. Install it: npm install @qdrant/js-client-rest"
2877
+ );
2878
+ }
2879
+ }
2880
+ async initialize() {
2881
+ }
2882
+ async ensureCollection(collection) {
2883
+ if (this.initializedCollections.has(collection)) return;
2884
+ try {
2885
+ await this.client.getCollection(collection);
2886
+ } catch {
2887
+ await this.client.createCollection(collection, {
2888
+ vectors: {
2889
+ size: this.dimensions,
2890
+ distance: "Cosine"
2891
+ }
2892
+ });
2893
+ }
2894
+ this.initializedCollections.add(collection);
2895
+ }
2896
+ async upsert(collection, doc) {
2897
+ await this.ensureCollection(collection);
2898
+ const embedding = await this.ensureEmbedding(doc);
2899
+ await this.client.upsert(collection, {
2900
+ wait: true,
2901
+ points: [
2902
+ {
2903
+ id: toQdrantId(doc.id),
2904
+ vector: embedding,
2905
+ payload: {
2906
+ _originalId: doc.id,
2907
+ content: doc.content,
2908
+ ...doc.metadata ?? {}
2909
+ }
2910
+ }
2911
+ ]
2912
+ });
2913
+ }
2914
+ async upsertBatch(collection, docs) {
2915
+ await this.ensureCollection(collection);
2916
+ const points = await Promise.all(
2917
+ docs.map(async (doc) => {
2918
+ const embedding = await this.ensureEmbedding(doc);
2919
+ return {
2920
+ id: toQdrantId(doc.id),
2921
+ vector: embedding,
2922
+ payload: {
2923
+ _originalId: doc.id,
2924
+ content: doc.content,
2925
+ ...doc.metadata ?? {}
2926
+ }
2927
+ };
2928
+ })
2929
+ );
2930
+ await this.client.upsert(collection, { wait: true, points });
2931
+ }
2932
+ async search(collection, query, options) {
2933
+ await this.ensureCollection(collection);
2934
+ const vec = await this.ensureQueryVector(query);
2935
+ const topK = options?.topK ?? 10;
2936
+ const searchParams = {
2937
+ vector: vec,
2938
+ limit: topK,
2939
+ with_payload: true
2940
+ };
2941
+ if (options?.filter) {
2942
+ searchParams.filter = {
2943
+ must: Object.entries(options.filter).map(([key, value]) => ({
2944
+ key,
2945
+ match: { value }
2946
+ }))
2947
+ };
2948
+ }
2949
+ if (options?.minScore != null) {
2950
+ searchParams.score_threshold = options.minScore;
2951
+ }
2952
+ const results = await this.client.search(collection, searchParams);
2953
+ return results.map((r) => {
2954
+ const { _originalId, content, ...rest } = r.payload ?? {};
2955
+ return {
2956
+ id: _originalId ?? String(r.id),
2957
+ content: content ?? "",
2958
+ score: r.score,
2959
+ metadata: rest
2960
+ };
2961
+ });
2962
+ }
2963
+ async delete(collection, id) {
2964
+ await this.ensureCollection(collection);
2965
+ await this.client.delete(collection, {
2966
+ wait: true,
2967
+ points: [toQdrantId(id)]
2968
+ });
2969
+ }
2970
+ async get(collection, id) {
2971
+ await this.ensureCollection(collection);
2972
+ try {
2973
+ const results = await this.client.retrieve(collection, {
2974
+ ids: [toQdrantId(id)],
2975
+ with_payload: true
2976
+ });
2977
+ if (!results || results.length === 0) return null;
2978
+ const point = results[0];
2979
+ const { _originalId, content, ...rest } = point.payload ?? {};
2980
+ return {
2981
+ id: _originalId ?? String(point.id),
2982
+ content: content ?? "",
2983
+ metadata: rest
2984
+ };
2985
+ } catch {
2986
+ return null;
2987
+ }
2988
+ }
2989
+ async dropCollection(collection) {
2990
+ try {
2991
+ await this.client.deleteCollection(collection);
2992
+ } catch {
2993
+ }
2994
+ this.initializedCollections.delete(collection);
2995
+ }
2996
+ async close() {
2997
+ }
2998
+ };
2999
+
3000
+ // src/vector/mongodb.ts
3001
+ import { createRequire as createRequire12 } from "module";
3002
+ var _require12 = createRequire12(import.meta.url);
3003
+ var MongoDBVectorStore = class extends BaseVectorStore {
3004
+ client;
3005
+ db;
3006
+ indexName;
3007
+ dbName;
3008
+ useAtlas = null;
3009
+ constructor(config, embedder) {
3010
+ super(embedder);
3011
+ this.indexName = config.indexName ?? "vector_index";
3012
+ this.dbName = config.dbName ?? "radaros_vectors";
3013
+ try {
3014
+ const { MongoClient } = _require12("mongodb");
3015
+ this.client = new MongoClient(config.uri);
3016
+ } catch {
3017
+ throw new Error(
3018
+ "mongodb is required for MongoDBVectorStore. Install it: npm install mongodb"
3019
+ );
3020
+ }
3021
+ }
3022
+ async initialize() {
3023
+ await this.client.connect();
3024
+ this.db = this.client.db(this.dbName);
3025
+ }
3026
+ col(collection) {
3027
+ return this.db.collection(collection);
3028
+ }
3029
+ async upsert(collection, doc) {
3030
+ const embedding = await this.ensureEmbedding(doc);
3031
+ await this.col(collection).updateOne(
3032
+ { _id: doc.id },
3033
+ {
3034
+ $set: {
3035
+ content: doc.content,
3036
+ embedding,
3037
+ metadata: doc.metadata ?? {},
3038
+ updatedAt: /* @__PURE__ */ new Date()
3039
+ }
3040
+ },
3041
+ { upsert: true }
3042
+ );
3043
+ }
3044
+ async upsertBatch(collection, docs) {
3045
+ const ops = await Promise.all(
3046
+ docs.map(async (doc) => {
3047
+ const embedding = await this.ensureEmbedding(doc);
3048
+ return {
3049
+ updateOne: {
3050
+ filter: { _id: doc.id },
3051
+ update: {
3052
+ $set: {
3053
+ content: doc.content,
3054
+ embedding,
3055
+ metadata: doc.metadata ?? {},
3056
+ updatedAt: /* @__PURE__ */ new Date()
3057
+ }
3058
+ },
3059
+ upsert: true
3060
+ }
3061
+ };
3062
+ })
3063
+ );
3064
+ if (ops.length > 0) {
3065
+ await this.col(collection).bulkWrite(ops);
3066
+ }
3067
+ }
3068
+ async search(collection, query, options) {
3069
+ const vec = await this.ensureQueryVector(query);
3070
+ if (this.useAtlas === true) {
3071
+ return this.atlasSearch(collection, vec, options);
3072
+ }
3073
+ if (this.useAtlas === false) {
3074
+ return this.localSearch(collection, vec, options);
3075
+ }
3076
+ try {
3077
+ const results = await this.atlasSearch(collection, vec, options);
3078
+ this.useAtlas = true;
3079
+ return results;
3080
+ } catch {
3081
+ this.useAtlas = false;
3082
+ return this.localSearch(collection, vec, options);
3083
+ }
3084
+ }
3085
+ async atlasSearch(collection, vec, options) {
3086
+ const topK = options?.topK ?? 10;
3087
+ const pipeline = [
3088
+ {
3089
+ $vectorSearch: {
3090
+ index: this.indexName,
3091
+ path: "embedding",
3092
+ queryVector: vec,
3093
+ numCandidates: topK * 10,
3094
+ limit: topK,
3095
+ ...options?.filter ? { filter: this.buildFilter(options.filter) } : {}
3096
+ }
3097
+ },
3098
+ {
3099
+ $addFields: {
3100
+ score: { $meta: "vectorSearchScore" }
3101
+ }
3102
+ }
3103
+ ];
3104
+ if (options?.minScore != null) {
3105
+ pipeline.push({ $match: { score: { $gte: options.minScore } } });
3106
+ }
3107
+ pipeline.push({
3108
+ $project: { _id: 1, content: 1, score: 1, metadata: 1 }
3109
+ });
3110
+ const results = await this.col(collection).aggregate(pipeline).toArray();
3111
+ return results.map((r) => ({
3112
+ id: String(r._id),
3113
+ content: r.content ?? "",
3114
+ score: r.score,
3115
+ metadata: r.metadata
3116
+ }));
3117
+ }
3118
+ async localSearch(collection, vec, options) {
3119
+ const topK = options?.topK ?? 10;
3120
+ const filter = {};
3121
+ if (options?.filter) {
3122
+ for (const [k, v] of Object.entries(options.filter)) {
3123
+ filter[`metadata.${k}`] = v;
3124
+ }
3125
+ }
3126
+ const docs = await this.col(collection).find(filter, { projection: { _id: 1, content: 1, embedding: 1, metadata: 1 } }).toArray();
3127
+ const scored = [];
3128
+ for (const doc of docs) {
3129
+ if (!doc.embedding) continue;
3130
+ const score = cosine(vec, doc.embedding);
3131
+ if (options?.minScore != null && score < options.minScore) continue;
3132
+ scored.push({
3133
+ id: String(doc._id),
3134
+ content: doc.content ?? "",
3135
+ score,
3136
+ metadata: doc.metadata
3137
+ });
3138
+ }
3139
+ scored.sort((a, b) => b.score - a.score);
3140
+ return scored.slice(0, topK);
3141
+ }
3142
+ buildFilter(filter) {
3143
+ const conditions = {};
3144
+ for (const [key, value] of Object.entries(filter)) {
3145
+ conditions[`metadata.${key}`] = value;
3146
+ }
3147
+ return conditions;
3148
+ }
3149
+ async delete(collection, id) {
3150
+ await this.col(collection).deleteOne({ _id: id });
3151
+ }
3152
+ async get(collection, id) {
3153
+ const doc = await this.col(collection).findOne({ _id: id });
3154
+ if (!doc) return null;
3155
+ return {
3156
+ id: String(doc._id),
3157
+ content: doc.content,
3158
+ metadata: doc.metadata
3159
+ };
3160
+ }
3161
+ async dropCollection(collection) {
3162
+ try {
3163
+ await this.col(collection).drop();
3164
+ } catch {
3165
+ }
3166
+ }
3167
+ async close() {
3168
+ await this.client.close();
3169
+ }
3170
+ };
3171
+ function cosine(a, b) {
3172
+ let dot = 0;
3173
+ let normA = 0;
3174
+ let normB = 0;
3175
+ for (let i = 0; i < a.length; i++) {
3176
+ dot += a[i] * b[i];
3177
+ normA += a[i] * a[i];
3178
+ normB += b[i] * b[i];
3179
+ }
3180
+ const denom = Math.sqrt(normA) * Math.sqrt(normB);
3181
+ return denom === 0 ? 0 : dot / denom;
3182
+ }
3183
+
3184
+ // src/vector/embeddings/openai.ts
3185
+ import { createRequire as createRequire13 } from "module";
3186
+ var _require13 = createRequire13(import.meta.url);
3187
+ var MODEL_DIMENSIONS = {
3188
+ "text-embedding-3-small": 1536,
3189
+ "text-embedding-3-large": 3072,
3190
+ "text-embedding-ada-002": 1536
3191
+ };
3192
+ var OpenAIEmbedding = class {
3193
+ dimensions;
3194
+ client;
3195
+ model;
3196
+ constructor(config = {}) {
3197
+ this.model = config.model ?? "text-embedding-3-small";
3198
+ this.dimensions = config.dimensions ?? MODEL_DIMENSIONS[this.model] ?? 1536;
3199
+ try {
3200
+ const mod = _require13("openai");
3201
+ const OpenAI = mod.default ?? mod;
3202
+ this.client = new OpenAI({
3203
+ apiKey: config.apiKey ?? process.env.OPENAI_API_KEY,
3204
+ baseURL: config.baseURL
3205
+ });
3206
+ } catch {
3207
+ throw new Error(
3208
+ "openai package is required for OpenAIEmbedding. Install it: npm install openai"
3209
+ );
3210
+ }
3211
+ }
3212
+ async embed(text) {
3213
+ const response = await this.client.embeddings.create({
3214
+ model: this.model,
3215
+ input: text,
3216
+ ...this.dimensions !== MODEL_DIMENSIONS[this.model] ? { dimensions: this.dimensions } : {}
3217
+ });
3218
+ return response.data[0].embedding;
3219
+ }
3220
+ async embedBatch(texts) {
3221
+ const response = await this.client.embeddings.create({
3222
+ model: this.model,
3223
+ input: texts,
3224
+ ...this.dimensions !== MODEL_DIMENSIONS[this.model] ? { dimensions: this.dimensions } : {}
3225
+ });
3226
+ return response.data.sort((a, b) => a.index - b.index).map((d) => d.embedding);
3227
+ }
3228
+ };
3229
+
3230
+ // src/vector/embeddings/google.ts
3231
+ import { createRequire as createRequire14 } from "module";
3232
+ var _require14 = createRequire14(import.meta.url);
3233
+ var MODEL_DIMENSIONS2 = {
3234
+ "text-embedding-004": 768,
3235
+ "embedding-001": 768
3236
+ };
3237
+ var GoogleEmbedding = class {
3238
+ dimensions;
3239
+ ai;
3240
+ model;
3241
+ constructor(config = {}) {
3242
+ this.model = config.model ?? "text-embedding-004";
3243
+ this.dimensions = config.dimensions ?? MODEL_DIMENSIONS2[this.model] ?? 768;
3244
+ try {
3245
+ const { GoogleGenAI } = _require14("@google/genai");
3246
+ this.ai = new GoogleGenAI({
3247
+ apiKey: config.apiKey ?? process.env.GOOGLE_API_KEY
3248
+ });
3249
+ } catch {
3250
+ throw new Error(
3251
+ "@google/genai is required for GoogleEmbedding. Install it: npm install @google/genai"
3252
+ );
3253
+ }
3254
+ }
3255
+ async embed(text) {
3256
+ const result = await this.ai.models.embedContent({
3257
+ model: this.model,
3258
+ contents: text,
3259
+ ...this.dimensions !== MODEL_DIMENSIONS2[this.model] ? { config: { outputDimensionality: this.dimensions } } : {}
3260
+ });
3261
+ return result.embeddings[0].values;
3262
+ }
3263
+ async embedBatch(texts) {
3264
+ const results = await Promise.all(
3265
+ texts.map(
3266
+ (text) => this.ai.models.embedContent({
3267
+ model: this.model,
3268
+ contents: text,
3269
+ ...this.dimensions !== MODEL_DIMENSIONS2[this.model] ? { config: { outputDimensionality: this.dimensions } } : {}
3270
+ })
3271
+ )
3272
+ );
3273
+ return results.map((r) => r.embeddings[0].values);
3274
+ }
3275
+ };
3276
+
3277
+ // src/knowledge/knowledge-base.ts
3278
+ import { z } from "zod";
3279
+ var KnowledgeBase = class {
3280
+ name;
3281
+ collection;
3282
+ store;
3283
+ initialized = false;
3284
+ constructor(config) {
3285
+ this.name = config.name;
3286
+ this.store = config.vectorStore;
3287
+ this.collection = config.collection ?? config.name.toLowerCase().replace(/\s+/g, "_");
3288
+ }
3289
+ async initialize() {
3290
+ if (this.initialized) return;
3291
+ await this.store.initialize();
3292
+ this.initialized = true;
3293
+ }
3294
+ async add(doc) {
3295
+ await this.ensureInit();
3296
+ await this.store.upsert(this.collection, doc);
3297
+ }
3298
+ async addDocuments(docs) {
3299
+ await this.ensureInit();
3300
+ await this.store.upsertBatch(this.collection, docs);
3301
+ }
3302
+ async search(query, options) {
3303
+ await this.ensureInit();
3304
+ return this.store.search(this.collection, query, options);
3305
+ }
3306
+ async get(id) {
3307
+ await this.ensureInit();
3308
+ return this.store.get(this.collection, id);
3309
+ }
3310
+ async delete(id) {
3311
+ await this.ensureInit();
3312
+ await this.store.delete(this.collection, id);
3313
+ }
3314
+ async clear() {
3315
+ await this.store.dropCollection(this.collection);
3316
+ }
3317
+ async close() {
3318
+ await this.store.close();
3319
+ }
3320
+ /**
3321
+ * Returns a ToolDef that an Agent can use to search this knowledge base.
3322
+ * Plug the result directly into `Agent({ tools: [kb.asTool()] })`.
3323
+ */
3324
+ asTool(config = {}) {
3325
+ const topK = config.topK ?? 5;
3326
+ const minScore = config.minScore;
3327
+ const filter = config.filter;
3328
+ const toolName = config.toolName ?? `search_${this.collection}`;
3329
+ const description = config.description ?? `Search the "${this.name}" knowledge base for relevant information. Use this before answering questions related to ${this.name}.`;
3330
+ const formatResults = config.formatResults ?? defaultFormatResults;
3331
+ const kb = this;
3332
+ return {
3333
+ name: toolName,
3334
+ description,
3335
+ parameters: z.object({
3336
+ query: z.string().describe("Search query to find relevant documents")
3337
+ }),
3338
+ execute: async (args) => {
3339
+ const results = await kb.search(args.query, {
3340
+ topK,
3341
+ minScore,
3342
+ filter
3343
+ });
3344
+ if (results.length === 0) {
3345
+ return "No relevant documents found in the knowledge base.";
3346
+ }
3347
+ return formatResults(results);
3348
+ }
3349
+ };
3350
+ }
3351
+ async ensureInit() {
3352
+ if (!this.initialized) await this.initialize();
3353
+ }
3354
+ };
3355
+ function defaultFormatResults(results) {
3356
+ const lines = results.map((r, i) => {
3357
+ const meta = r.metadata ? Object.entries(r.metadata).filter(([, v]) => v !== void 0).map(([k, v]) => `${k}: ${v}`).join(", ") : "";
3358
+ const metaStr = meta ? ` | ${meta}` : "";
3359
+ return `[${i + 1}] (score: ${r.score.toFixed(3)}${metaStr})
3360
+ ${r.content}`;
3361
+ });
3362
+ return `Found ${results.length} relevant document(s):
3363
+
3364
+ ${lines.join("\n\n")}`;
3365
+ }
3366
+
3367
+ // src/memory/memory.ts
3368
+ var SHORT_TERM_NS = "memory:short";
3369
+ var LONG_TERM_NS = "memory:long";
3370
+ var Memory = class {
3371
+ storage;
3372
+ maxShortTermMessages;
3373
+ enableLongTerm;
3374
+ constructor(config) {
3375
+ this.storage = config?.storage ?? new InMemoryStorage();
3376
+ this.maxShortTermMessages = config?.maxShortTermMessages ?? 50;
3377
+ this.enableLongTerm = config?.enableLongTerm ?? false;
3378
+ }
3379
+ async addMessages(sessionId, messages) {
3380
+ const existing = await this.storage.get(SHORT_TERM_NS, sessionId) ?? [];
3381
+ const updated = [...existing, ...messages];
3382
+ if (updated.length > this.maxShortTermMessages) {
3383
+ const overflow = updated.splice(
3384
+ 0,
3385
+ updated.length - this.maxShortTermMessages
3386
+ );
3387
+ if (this.enableLongTerm && overflow.length > 0) {
3388
+ await this.summarizeAndStore(sessionId, overflow);
3389
+ }
3390
+ }
3391
+ await this.storage.set(SHORT_TERM_NS, sessionId, updated);
3392
+ }
3393
+ async getMessages(sessionId) {
3394
+ return await this.storage.get(SHORT_TERM_NS, sessionId) ?? [];
3395
+ }
3396
+ async getSummaries(sessionId) {
3397
+ if (!this.enableLongTerm) return [];
3398
+ const entries = await this.storage.list(
3399
+ LONG_TERM_NS,
3400
+ sessionId
3401
+ );
3402
+ return entries.map((e) => e.value.summary);
3403
+ }
3404
+ async getContextString(sessionId) {
3405
+ const summaries = await this.getSummaries(sessionId);
3406
+ if (summaries.length === 0) return "";
3407
+ return `Previous context:
3408
+ ${summaries.join("\n")}`;
3409
+ }
3410
+ async summarizeAndStore(sessionId, messages) {
3411
+ const textParts = messages.filter((m) => m.content).map((m) => `${m.role}: ${getTextContent(m.content)}`);
3412
+ if (textParts.length === 0) return;
3413
+ const summary = textParts.join(" | ").slice(0, 500);
3414
+ const entry = {
3415
+ key: `${sessionId}:${Date.now()}`,
3416
+ summary,
3417
+ createdAt: /* @__PURE__ */ new Date()
3418
+ };
3419
+ await this.storage.set(LONG_TERM_NS, entry.key, entry);
3420
+ }
3421
+ async clear(sessionId) {
3422
+ await this.storage.delete(SHORT_TERM_NS, sessionId);
3423
+ }
3424
+ };
3425
+ export {
3426
+ Agent,
3427
+ AnthropicProvider,
3428
+ BaseVectorStore,
3429
+ EventBus,
3430
+ GoogleEmbedding,
3431
+ GoogleProvider,
3432
+ InMemoryStorage,
3433
+ InMemoryVectorStore,
3434
+ KnowledgeBase,
3435
+ LLMLoop,
3436
+ Logger,
3437
+ Memory,
3438
+ ModelRegistry,
3439
+ MongoDBStorage,
3440
+ MongoDBVectorStore,
3441
+ OllamaProvider,
3442
+ OpenAIEmbedding,
3443
+ OpenAIProvider,
3444
+ PgVectorStore,
3445
+ PostgresStorage,
3446
+ QdrantVectorStore,
3447
+ RunContext,
3448
+ SessionManager,
3449
+ SqliteStorage,
3450
+ Team,
3451
+ TeamMode,
3452
+ ToolExecutor,
3453
+ Workflow,
3454
+ anthropic,
3455
+ defineTool,
3456
+ getTextContent,
3457
+ google,
3458
+ isMultiModal,
3459
+ ollama,
3460
+ openai,
3461
+ registry
3462
+ };