@radaros/core 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/dist/index.d.ts +887 -0
  2. package/dist/index.js +3462 -0
  3. package/package.json +64 -0
  4. package/src/agent/agent.ts +314 -0
  5. package/src/agent/llm-loop.ts +263 -0
  6. package/src/agent/run-context.ts +35 -0
  7. package/src/agent/types.ts +77 -0
  8. package/src/events/event-bus.ts +45 -0
  9. package/src/events/types.ts +16 -0
  10. package/src/guardrails/types.ts +5 -0
  11. package/src/hooks/types.ts +6 -0
  12. package/src/index.ts +111 -0
  13. package/src/knowledge/knowledge-base.ts +146 -0
  14. package/src/logger/logger.ts +232 -0
  15. package/src/memory/memory.ts +87 -0
  16. package/src/memory/types.ts +13 -0
  17. package/src/models/provider.ts +22 -0
  18. package/src/models/providers/anthropic.ts +330 -0
  19. package/src/models/providers/google.ts +361 -0
  20. package/src/models/providers/ollama.ts +211 -0
  21. package/src/models/providers/openai.ts +323 -0
  22. package/src/models/registry.ts +90 -0
  23. package/src/models/types.ts +112 -0
  24. package/src/session/session-manager.ts +75 -0
  25. package/src/session/types.ts +10 -0
  26. package/src/storage/driver.ts +10 -0
  27. package/src/storage/in-memory.ts +44 -0
  28. package/src/storage/mongodb.ts +70 -0
  29. package/src/storage/postgres.ts +81 -0
  30. package/src/storage/sqlite.ts +81 -0
  31. package/src/team/modes.ts +1 -0
  32. package/src/team/team.ts +323 -0
  33. package/src/team/types.ts +26 -0
  34. package/src/tools/define-tool.ts +20 -0
  35. package/src/tools/tool-executor.ts +131 -0
  36. package/src/tools/types.ts +27 -0
  37. package/src/vector/base.ts +44 -0
  38. package/src/vector/embeddings/google.ts +64 -0
  39. package/src/vector/embeddings/openai.ts +66 -0
  40. package/src/vector/in-memory.ts +115 -0
  41. package/src/vector/mongodb.ts +241 -0
  42. package/src/vector/pgvector.ts +169 -0
  43. package/src/vector/qdrant.ts +203 -0
  44. package/src/vector/types.ts +55 -0
  45. package/src/workflow/step-runner.ts +303 -0
  46. package/src/workflow/types.ts +55 -0
  47. package/src/workflow/workflow.ts +68 -0
  48. package/tsconfig.json +8 -0
package/package.json ADDED
@@ -0,0 +1,64 @@
1
+ {
2
+ "name": "@radaros/core",
3
+ "version": "0.1.0",
4
+ "type": "module",
5
+ "main": "./dist/index.js",
6
+ "types": "./dist/index.d.ts",
7
+ "exports": {
8
+ ".": {
9
+ "import": "./dist/index.js",
10
+ "types": "./dist/index.d.ts"
11
+ }
12
+ },
13
+ "scripts": {
14
+ "build": "tsup src/index.ts --format esm --dts --clean",
15
+ "dev": "tsup src/index.ts --format esm --dts --watch"
16
+ },
17
+ "dependencies": {
18
+ "uuid": "^11.0.0",
19
+ "zod": "^3.23.0",
20
+ "zod-to-json-schema": "^3.24.0"
21
+ },
22
+ "devDependencies": {
23
+ "@types/node": "^25.3.1",
24
+ "@types/uuid": "^10.0.0",
25
+ "tsup": "^8.0.0",
26
+ "typescript": "^5.6.0"
27
+ },
28
+ "peerDependencies": {
29
+ "@anthropic-ai/sdk": "^0.30.0",
30
+ "@google/genai": "^0.10.0",
31
+ "@qdrant/js-client-rest": "^1.0.0",
32
+ "better-sqlite3": "^11.0.0",
33
+ "mongodb": "^6.0.0",
34
+ "ollama": "^0.5.0",
35
+ "openai": "^4.0.0 || ^5.0.0 || ^6.0.0",
36
+ "pg": "^8.0.0"
37
+ },
38
+ "peerDependenciesMeta": {
39
+ "openai": {
40
+ "optional": true
41
+ },
42
+ "@anthropic-ai/sdk": {
43
+ "optional": true
44
+ },
45
+ "@google/genai": {
46
+ "optional": true
47
+ },
48
+ "ollama": {
49
+ "optional": true
50
+ },
51
+ "better-sqlite3": {
52
+ "optional": true
53
+ },
54
+ "pg": {
55
+ "optional": true
56
+ },
57
+ "mongodb": {
58
+ "optional": true
59
+ },
60
+ "@qdrant/js-client-rest": {
61
+ "optional": true
62
+ }
63
+ }
64
+ }
@@ -0,0 +1,314 @@
1
+ import { v4 as uuidv4 } from "uuid";
2
+ import { EventBus } from "../events/event-bus.js";
3
+ import { InMemoryStorage } from "../storage/in-memory.js";
4
+ import { SessionManager } from "../session/session-manager.js";
5
+ import { ToolExecutor } from "../tools/tool-executor.js";
6
+ import { Logger } from "../logger/logger.js";
7
+ import { LLMLoop } from "./llm-loop.js";
8
+ import { RunContext } from "./run-context.js";
9
+ import { getTextContent, type ChatMessage, type MessageContent, type StreamChunk } from "../models/types.js";
10
+ import type {
11
+ AgentConfig,
12
+ RunOpts,
13
+ RunOutput,
14
+ } from "./types.js";
15
+
16
+ export class Agent {
17
+ readonly name: string;
18
+ readonly eventBus: EventBus;
19
+ readonly instructions?: string | ((ctx: RunContext) => string);
20
+
21
+ private config: AgentConfig;
22
+ private sessionManager: SessionManager;
23
+ private llmLoop: LLMLoop;
24
+ private logger: Logger;
25
+
26
+ get tools() {
27
+ return this.config.tools ?? [];
28
+ }
29
+
30
+ get modelId(): string {
31
+ return this.config.model.modelId;
32
+ }
33
+
34
+ get providerId(): string {
35
+ return this.config.model.providerId;
36
+ }
37
+
38
+ get hasStructuredOutput(): boolean {
39
+ return !!this.config.structuredOutput;
40
+ }
41
+
42
+ constructor(config: AgentConfig) {
43
+ this.config = config;
44
+ this.name = config.name;
45
+ this.instructions = config.instructions;
46
+ this.eventBus = config.eventBus ?? new EventBus();
47
+
48
+ const storage = config.storage ?? new InMemoryStorage();
49
+ this.sessionManager = new SessionManager(storage);
50
+
51
+ this.logger = new Logger({
52
+ level: config.logLevel ?? "silent",
53
+ prefix: config.name,
54
+ });
55
+
56
+ const toolExecutor =
57
+ config.tools && config.tools.length > 0
58
+ ? new ToolExecutor(config.tools)
59
+ : null;
60
+
61
+ this.llmLoop = new LLMLoop(config.model, toolExecutor, {
62
+ maxToolRoundtrips: config.maxToolRoundtrips ?? 10,
63
+ temperature: config.temperature,
64
+ structuredOutput: config.structuredOutput,
65
+ logger: this.logger,
66
+ });
67
+ }
68
+
69
+ async run(input: MessageContent, opts?: RunOpts): Promise<RunOutput> {
70
+ const startTime = Date.now();
71
+ const sessionId = opts?.sessionId ?? this.config.sessionId ?? uuidv4();
72
+ const userId = opts?.userId ?? this.config.userId;
73
+ const inputText = typeof input === "string" ? input : getTextContent(input);
74
+
75
+ const session = await this.sessionManager.getOrCreate(sessionId, userId);
76
+
77
+ const ctx = new RunContext({
78
+ sessionId,
79
+ userId,
80
+ metadata: opts?.metadata ?? {},
81
+ eventBus: this.eventBus,
82
+ sessionState: { ...session.state },
83
+ });
84
+
85
+ this.logger.agentStart(this.name, inputText);
86
+
87
+ this.eventBus.emit("run.start", {
88
+ runId: ctx.runId,
89
+ agentName: this.name,
90
+ input: inputText,
91
+ });
92
+
93
+ try {
94
+ if (this.config.hooks?.beforeRun) {
95
+ await this.config.hooks.beforeRun(ctx);
96
+ }
97
+
98
+ if (this.config.guardrails?.input) {
99
+ for (const guardrail of this.config.guardrails.input) {
100
+ const result = await guardrail.validate(input, ctx);
101
+ if (!result.pass) {
102
+ throw new Error(
103
+ `Input guardrail "${guardrail.name}" blocked: ${result.reason}`
104
+ );
105
+ }
106
+ }
107
+ }
108
+
109
+ const messages = await this.buildMessages(input, sessionId, ctx);
110
+ const output = await this.llmLoop.run(messages, ctx, opts?.apiKey);
111
+
112
+ output.durationMs = Date.now() - startTime;
113
+
114
+ if (this.config.guardrails?.output) {
115
+ for (const guardrail of this.config.guardrails.output) {
116
+ const result = await guardrail.validate(output, ctx);
117
+ if (!result.pass) {
118
+ throw new Error(
119
+ `Output guardrail "${guardrail.name}" blocked: ${result.reason}`
120
+ );
121
+ }
122
+ }
123
+ }
124
+
125
+ await this.sessionManager.appendMessages(sessionId, [
126
+ { role: "user", content: inputText },
127
+ { role: "assistant", content: output.text },
128
+ ]);
129
+ await this.sessionManager.updateState(sessionId, ctx.sessionState);
130
+
131
+ if (this.config.memory) {
132
+ await this.config.memory.addMessages(sessionId, [
133
+ { role: "user", content: inputText },
134
+ { role: "assistant", content: output.text },
135
+ ]);
136
+ }
137
+
138
+ if (this.config.hooks?.afterRun) {
139
+ await this.config.hooks.afterRun(ctx, output);
140
+ }
141
+
142
+ this.logger.agentEnd(this.name, output.text, output.usage, output.durationMs);
143
+
144
+ this.eventBus.emit("run.complete", {
145
+ runId: ctx.runId,
146
+ output,
147
+ });
148
+
149
+ return output;
150
+ } catch (error) {
151
+ const err = error instanceof Error ? error : new Error(String(error));
152
+
153
+ this.logger.error(`Run failed: ${err.message}`);
154
+
155
+ if (this.config.hooks?.onError) {
156
+ await this.config.hooks.onError(ctx, err);
157
+ }
158
+
159
+ this.eventBus.emit("run.error", {
160
+ runId: ctx.runId,
161
+ error: err,
162
+ });
163
+
164
+ throw err;
165
+ }
166
+ }
167
+
168
+ async *stream(
169
+ input: MessageContent,
170
+ opts?: RunOpts
171
+ ): AsyncGenerator<StreamChunk> {
172
+ const sessionId = opts?.sessionId ?? this.config.sessionId ?? uuidv4();
173
+ const userId = opts?.userId ?? this.config.userId;
174
+ const inputText = typeof input === "string" ? input : getTextContent(input);
175
+
176
+ const session = await this.sessionManager.getOrCreate(sessionId, userId);
177
+
178
+ const ctx = new RunContext({
179
+ sessionId,
180
+ userId,
181
+ metadata: opts?.metadata ?? {},
182
+ eventBus: this.eventBus,
183
+ sessionState: { ...session.state },
184
+ });
185
+
186
+ this.eventBus.emit("run.start", {
187
+ runId: ctx.runId,
188
+ agentName: this.name,
189
+ input: inputText,
190
+ });
191
+
192
+ let fullText = "";
193
+ let streamOk = false;
194
+
195
+ try {
196
+ if (this.config.hooks?.beforeRun) {
197
+ await this.config.hooks.beforeRun(ctx);
198
+ }
199
+
200
+ if (this.config.guardrails?.input) {
201
+ for (const guardrail of this.config.guardrails.input) {
202
+ const result = await guardrail.validate(input, ctx);
203
+ if (!result.pass) {
204
+ throw new Error(
205
+ `Input guardrail "${guardrail.name}" blocked: ${result.reason}`
206
+ );
207
+ }
208
+ }
209
+ }
210
+
211
+ const messages = await this.buildMessages(input, sessionId, ctx);
212
+
213
+ for await (const chunk of this.llmLoop.stream(messages, ctx, opts?.apiKey)) {
214
+ if (chunk.type === "text") {
215
+ fullText += chunk.text;
216
+ }
217
+ yield chunk;
218
+ }
219
+
220
+ streamOk = true;
221
+ } catch (error) {
222
+ const err = error instanceof Error ? error : new Error(String(error));
223
+
224
+ if (this.config.hooks?.onError) {
225
+ await this.config.hooks.onError(ctx, err);
226
+ }
227
+
228
+ this.eventBus.emit("run.error", {
229
+ runId: ctx.runId,
230
+ error: err,
231
+ });
232
+
233
+ throw err;
234
+ } finally {
235
+ if (streamOk) {
236
+ await this.sessionManager.appendMessages(sessionId, [
237
+ { role: "user", content: inputText },
238
+ { role: "assistant", content: fullText },
239
+ ]);
240
+ await this.sessionManager.updateState(sessionId, ctx.sessionState);
241
+
242
+ if (this.config.memory) {
243
+ await this.config.memory.addMessages(sessionId, [
244
+ { role: "user", content: inputText },
245
+ { role: "assistant", content: fullText },
246
+ ]);
247
+ }
248
+
249
+ this.eventBus.emit("run.complete", {
250
+ runId: ctx.runId,
251
+ output: {
252
+ text: fullText,
253
+ toolCalls: [],
254
+ usage: { promptTokens: 0, completionTokens: 0, totalTokens: 0 },
255
+ },
256
+ });
257
+ }
258
+ }
259
+ }
260
+
261
+ private async buildMessages(
262
+ input: MessageContent,
263
+ sessionId: string,
264
+ ctx: RunContext
265
+ ): Promise<ChatMessage[]> {
266
+ const messages: ChatMessage[] = [];
267
+
268
+ let systemContent = "";
269
+ if (this.config.instructions) {
270
+ systemContent =
271
+ typeof this.config.instructions === "function"
272
+ ? this.config.instructions(ctx)
273
+ : this.config.instructions;
274
+ }
275
+
276
+ if (this.config.memory) {
277
+ const memoryContext = await this.config.memory.getContextString(
278
+ sessionId
279
+ );
280
+ if (memoryContext) {
281
+ systemContent = systemContent
282
+ ? `${systemContent}\n\n${memoryContext}`
283
+ : memoryContext;
284
+ }
285
+ }
286
+
287
+ if (systemContent) {
288
+ messages.push({ role: "system", content: systemContent });
289
+ }
290
+
291
+ if (this.config.addHistoryToMessages !== false) {
292
+ const limit = this.config.numHistoryRuns
293
+ ? this.config.numHistoryRuns * 2
294
+ : 20;
295
+ const history = await this.sessionManager.getHistory(sessionId, limit);
296
+ if (history.length > 0) {
297
+ this.logger.info(`Loaded ${history.length} history messages for session ${sessionId}`);
298
+ if (messages.length > 0 && messages[0].role === "system") {
299
+ messages[0] = {
300
+ ...messages[0],
301
+ content: `${getTextContent(messages[0].content)}\n\nThis is a multi-turn conversation. The previous messages in this session are included below. Use them to maintain context and answer questions about prior exchanges.`,
302
+ };
303
+ }
304
+ }
305
+ messages.push(...history);
306
+ }
307
+
308
+ messages.push({ role: "user", content: input });
309
+
310
+ this.logger.info(`Sending ${messages.length} messages to LLM: ${messages.map(m => `[${m.role}: ${typeof m.content === 'string' ? m.content.slice(0, 40) : '(multimodal)'}]`).join(', ')}`);
311
+
312
+ return messages;
313
+ }
314
+ }
@@ -0,0 +1,263 @@
1
+ import { createRequire } from "node:module";
2
+ import type { z } from "zod";
3
+ import type { ModelProvider } from "../models/provider.js";
4
+ import {
5
+ getTextContent,
6
+ type ChatMessage,
7
+ type ModelConfig,
8
+ type StreamChunk,
9
+ type ToolDefinition,
10
+ } from "../models/types.js";
11
+ import type { ToolExecutor } from "../tools/tool-executor.js";
12
+ import type { RunContext } from "./run-context.js";
13
+ import type { RunOutput } from "./types.js";
14
+ import type { ToolCallResult } from "../tools/types.js";
15
+ import type { Logger } from "../logger/logger.js";
16
+
17
+ const _require = createRequire(import.meta.url);
18
+
19
+ export class LLMLoop {
20
+ private provider: ModelProvider;
21
+ private toolExecutor: ToolExecutor | null;
22
+ private maxToolRoundtrips: number;
23
+ private temperature?: number;
24
+ private maxTokens?: number;
25
+ private structuredOutput?: z.ZodSchema;
26
+ private logger?: Logger;
27
+
28
+ constructor(
29
+ provider: ModelProvider,
30
+ toolExecutor: ToolExecutor | null,
31
+ options: {
32
+ maxToolRoundtrips: number;
33
+ temperature?: number;
34
+ maxTokens?: number;
35
+ structuredOutput?: z.ZodSchema;
36
+ logger?: Logger;
37
+ }
38
+ ) {
39
+ this.provider = provider;
40
+ this.toolExecutor = toolExecutor;
41
+ this.maxToolRoundtrips = options.maxToolRoundtrips;
42
+ this.temperature = options.temperature;
43
+ this.maxTokens = options.maxTokens;
44
+ this.structuredOutput = options.structuredOutput;
45
+ this.logger = options.logger;
46
+ }
47
+
48
+ async run(messages: ChatMessage[], ctx: RunContext, apiKey?: string): Promise<RunOutput> {
49
+ const allToolCalls: ToolCallResult[] = [];
50
+ let totalPromptTokens = 0;
51
+ let totalCompletionTokens = 0;
52
+ const currentMessages = [...messages];
53
+ const toolDefs = this.toolExecutor?.getToolDefinitions() ?? [];
54
+
55
+ for (let roundtrip = 0; roundtrip <= this.maxToolRoundtrips; roundtrip++) {
56
+ const modelConfig: ModelConfig & { tools?: ToolDefinition[] } = {};
57
+ if (apiKey) modelConfig.apiKey = apiKey;
58
+ if (this.temperature !== undefined)
59
+ modelConfig.temperature = this.temperature;
60
+ if (this.maxTokens !== undefined) modelConfig.maxTokens = this.maxTokens;
61
+ if (toolDefs.length > 0) modelConfig.tools = toolDefs;
62
+
63
+ if (this.structuredOutput) {
64
+ modelConfig.responseFormat = {
65
+ type: "json_schema",
66
+ schema: this.zodToJsonSchema(this.structuredOutput),
67
+ name: "structured_response",
68
+ };
69
+ }
70
+
71
+ const response = await this.provider.generate(
72
+ currentMessages,
73
+ modelConfig
74
+ );
75
+
76
+ totalPromptTokens += response.usage.promptTokens;
77
+ totalCompletionTokens += response.usage.completionTokens;
78
+
79
+ currentMessages.push(response.message);
80
+
81
+ if (
82
+ response.finishReason !== "tool_calls" ||
83
+ !response.message.toolCalls?.length ||
84
+ !this.toolExecutor
85
+ ) {
86
+ const text = getTextContent(response.message.content);
87
+
88
+ const output: RunOutput = {
89
+ text,
90
+ toolCalls: allToolCalls,
91
+ usage: {
92
+ promptTokens: totalPromptTokens,
93
+ completionTokens: totalCompletionTokens,
94
+ totalTokens: totalPromptTokens + totalCompletionTokens,
95
+ },
96
+ };
97
+
98
+ if (this.structuredOutput && text) {
99
+ try {
100
+ const jsonStr = this.extractJson(text);
101
+ const parsed = JSON.parse(jsonStr);
102
+ output.structured = this.structuredOutput.parse(parsed);
103
+ } catch {
104
+ // structured parsing failed, raw text is still available
105
+ }
106
+ }
107
+
108
+ return output;
109
+ }
110
+
111
+ const toolResults = await this.toolExecutor.executeAll(
112
+ response.message.toolCalls,
113
+ ctx
114
+ );
115
+
116
+ allToolCalls.push(...toolResults);
117
+
118
+ for (const result of toolResults) {
119
+ const content =
120
+ typeof result.result === "string"
121
+ ? result.result
122
+ : result.result.content;
123
+
124
+ this.logger?.toolCall(result.toolName, {});
125
+ this.logger?.toolResult(result.toolName, typeof content === "string" ? content : JSON.stringify(content));
126
+
127
+ currentMessages.push({
128
+ role: "tool",
129
+ content,
130
+ toolCallId: result.toolCallId,
131
+ name: result.toolName,
132
+ });
133
+ }
134
+ }
135
+
136
+ const lastAssistantMsg = currentMessages
137
+ .reverse()
138
+ .find((m) => m.role === "assistant");
139
+
140
+ const text = getTextContent(lastAssistantMsg?.content ?? null);
141
+
142
+ return {
143
+ text,
144
+ toolCalls: allToolCalls,
145
+ usage: {
146
+ promptTokens: totalPromptTokens,
147
+ completionTokens: totalCompletionTokens,
148
+ totalTokens: totalPromptTokens + totalCompletionTokens,
149
+ },
150
+ };
151
+ }
152
+
153
+ async *stream(
154
+ messages: ChatMessage[],
155
+ ctx: RunContext,
156
+ apiKey?: string
157
+ ): AsyncGenerator<StreamChunk> {
158
+ const currentMessages = [...messages];
159
+ const toolDefs = this.toolExecutor?.getToolDefinitions() ?? [];
160
+
161
+ for (let roundtrip = 0; roundtrip <= this.maxToolRoundtrips; roundtrip++) {
162
+ const modelConfig: ModelConfig & { tools?: ToolDefinition[] } = {};
163
+ if (apiKey) modelConfig.apiKey = apiKey;
164
+ if (this.temperature !== undefined)
165
+ modelConfig.temperature = this.temperature;
166
+ if (this.maxTokens !== undefined) modelConfig.maxTokens = this.maxTokens;
167
+ if (toolDefs.length > 0) modelConfig.tools = toolDefs;
168
+
169
+ let fullText = "";
170
+ const pendingToolCalls: Array<{
171
+ id: string;
172
+ name: string;
173
+ args: string;
174
+ }> = [];
175
+ let finishReason = "stop";
176
+
177
+ const streamGen = this.provider.stream(currentMessages, modelConfig);
178
+
179
+ for await (const chunk of streamGen) {
180
+ yield chunk;
181
+
182
+ if (chunk.type === "text") {
183
+ fullText += chunk.text;
184
+ ctx.eventBus.emit("run.stream.chunk", {
185
+ runId: ctx.runId,
186
+ chunk: chunk.text,
187
+ });
188
+ } else if (chunk.type === "tool_call_start") {
189
+ pendingToolCalls.push({
190
+ id: chunk.toolCall.id,
191
+ name: chunk.toolCall.name,
192
+ args: "",
193
+ });
194
+ } else if (chunk.type === "tool_call_delta") {
195
+ const tc = pendingToolCalls.find(
196
+ (t) => t.id === chunk.toolCallId
197
+ );
198
+ if (tc) {
199
+ tc.args += chunk.argumentsDelta;
200
+ }
201
+ } else if (chunk.type === "finish") {
202
+ finishReason = chunk.finishReason;
203
+ }
204
+ }
205
+
206
+ if (finishReason !== "tool_calls" || pendingToolCalls.length === 0 || !this.toolExecutor) {
207
+ return;
208
+ }
209
+
210
+ const assistantMsg: ChatMessage = {
211
+ role: "assistant",
212
+ content: fullText || null,
213
+ toolCalls: pendingToolCalls.map((tc) => ({
214
+ id: tc.id,
215
+ name: tc.name,
216
+ arguments: JSON.parse(tc.args || "{}"),
217
+ })),
218
+ };
219
+ currentMessages.push(assistantMsg);
220
+
221
+ const toolResults = await this.toolExecutor.executeAll(
222
+ assistantMsg.toolCalls!,
223
+ ctx
224
+ );
225
+
226
+ for (const result of toolResults) {
227
+ const content =
228
+ typeof result.result === "string"
229
+ ? result.result
230
+ : result.result.content;
231
+
232
+ currentMessages.push({
233
+ role: "tool",
234
+ content,
235
+ toolCallId: result.toolCallId,
236
+ name: result.toolName,
237
+ });
238
+ }
239
+ }
240
+ }
241
+
242
+ private extractJson(text: string): string {
243
+ const fenceMatch = text.match(/```(?:json)?\s*\n?([\s\S]*?)```/);
244
+ if (fenceMatch) return fenceMatch[1].trim();
245
+
246
+ const braceStart = text.indexOf("{");
247
+ const braceEnd = text.lastIndexOf("}");
248
+ if (braceStart !== -1 && braceEnd > braceStart) {
249
+ return text.slice(braceStart, braceEnd + 1);
250
+ }
251
+
252
+ return text.trim();
253
+ }
254
+
255
+ private zodToJsonSchema(schema: z.ZodSchema): Record<string, unknown> {
256
+ try {
257
+ const { zodToJsonSchema } = _require("zod-to-json-schema");
258
+ return zodToJsonSchema(schema, { target: "openApi3" }) as Record<string, unknown>;
259
+ } catch {
260
+ return {};
261
+ }
262
+ }
263
+ }
@@ -0,0 +1,35 @@
1
+ import { v4 as uuidv4 } from "uuid";
2
+ import type { EventBus } from "../events/event-bus.js";
3
+
4
+ export class RunContext {
5
+ readonly runId: string;
6
+ readonly sessionId: string;
7
+ readonly userId?: string;
8
+ readonly metadata: Record<string, unknown>;
9
+ readonly eventBus: EventBus;
10
+ sessionState: Record<string, unknown>;
11
+
12
+ constructor(opts: {
13
+ sessionId: string;
14
+ userId?: string;
15
+ metadata?: Record<string, unknown>;
16
+ eventBus: EventBus;
17
+ sessionState?: Record<string, unknown>;
18
+ runId?: string;
19
+ }) {
20
+ this.runId = opts.runId ?? uuidv4();
21
+ this.sessionId = opts.sessionId;
22
+ this.userId = opts.userId;
23
+ this.metadata = opts.metadata ?? {};
24
+ this.eventBus = opts.eventBus;
25
+ this.sessionState = opts.sessionState ?? {};
26
+ }
27
+
28
+ getState<T>(key: string): T | undefined {
29
+ return this.sessionState[key] as T | undefined;
30
+ }
31
+
32
+ setState(key: string, value: unknown): void {
33
+ this.sessionState[key] = value;
34
+ }
35
+ }