@cogitator-ai/core 0.10.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/dist/agent.d.ts +52 -2
  2. package/dist/agent.d.ts.map +1 -1
  3. package/dist/agent.js +52 -2
  4. package/dist/agent.js.map +1 -1
  5. package/dist/cogitator/index.d.ts +6 -0
  6. package/dist/cogitator/index.d.ts.map +1 -0
  7. package/dist/cogitator/index.js +6 -0
  8. package/dist/cogitator/index.js.map +1 -0
  9. package/dist/cogitator/initializers.d.ts +54 -0
  10. package/dist/cogitator/initializers.d.ts.map +1 -0
  11. package/dist/cogitator/initializers.js +126 -0
  12. package/dist/cogitator/initializers.js.map +1 -0
  13. package/dist/cogitator/message-builder.d.ts +10 -0
  14. package/dist/cogitator/message-builder.d.ts.map +1 -0
  15. package/dist/cogitator/message-builder.js +72 -0
  16. package/dist/cogitator/message-builder.js.map +1 -0
  17. package/dist/cogitator/span-factory.d.ts +4 -0
  18. package/dist/cogitator/span-factory.d.ts.map +1 -0
  19. package/dist/cogitator/span-factory.js +27 -0
  20. package/dist/cogitator/span-factory.js.map +1 -0
  21. package/dist/cogitator/streaming.d.ts +16 -0
  22. package/dist/cogitator/streaming.d.ts.map +1 -0
  23. package/dist/cogitator/streaming.js +52 -0
  24. package/dist/cogitator/streaming.js.map +1 -0
  25. package/dist/cogitator/tool-executor.d.ts +7 -0
  26. package/dist/cogitator/tool-executor.d.ts.map +1 -0
  27. package/dist/cogitator/tool-executor.js +146 -0
  28. package/dist/cogitator/tool-executor.js.map +1 -0
  29. package/dist/cogitator.d.ts +142 -82
  30. package/dist/cogitator.d.ts.map +1 -1
  31. package/dist/cogitator.js +205 -513
  32. package/dist/cogitator.js.map +1 -1
  33. package/dist/registry.d.ts +64 -2
  34. package/dist/registry.d.ts.map +1 -1
  35. package/dist/registry.js +64 -2
  36. package/dist/registry.js.map +1 -1
  37. package/dist/tool.d.ts +48 -5
  38. package/dist/tool.d.ts.map +1 -1
  39. package/dist/tool.js +48 -5
  40. package/dist/tool.js.map +1 -1
  41. package/package.json +3 -3
package/dist/cogitator.js CHANGED
@@ -1,231 +1,120 @@
1
- /**
2
- * Cogitator - Main runtime class
3
- */
4
1
  import { nanoid } from 'nanoid';
5
- import { InMemoryAdapter, RedisAdapter, PostgresAdapter, ContextBuilder, countMessageTokens, countMessagesTokens, } from '@cogitator-ai/memory';
6
2
  import { getPrice } from '@cogitator-ai/models';
7
3
  import { ToolRegistry } from './registry';
8
4
  import { createLLMBackend, parseModel } from './llm/index';
9
5
  import { getLogger } from './logger';
10
- import { ReflectionEngine, InMemoryInsightStore } from './reflection/index';
11
- import { ConstitutionalAI } from './constitutional/index';
12
- import { CostAwareRouter } from './cost-routing/index';
6
+ import { initializeMemory, initializeSandbox, initializeReflection, initializeGuardrails, initializeCostRouting, cleanupState, } from './cogitator/initializers';
7
+ import { buildInitialMessages, saveEntry, enrichMessagesWithInsights, addContextToMessages, } from './cogitator/message-builder';
8
+ import { createSpan, getTextContent } from './cogitator/span-factory';
9
+ import { executeTool, createToolMessage } from './cogitator/tool-executor';
10
+ import { streamChat } from './cogitator/streaming';
11
+ /**
12
+ * Main runtime for executing AI agents.
13
+ *
14
+ * Cogitator orchestrates agent execution with support for:
15
+ * - Multiple LLM providers (OpenAI, Anthropic, Ollama, Google, Azure, etc.)
16
+ * - Memory persistence (Redis, PostgreSQL, in-memory)
17
+ * - Sandboxed tool execution (Docker, WASM)
18
+ * - Reflection and learning from past runs
19
+ * - Constitutional AI guardrails
20
+ * - Cost-aware model routing
21
+ *
22
+ * @example Basic usage
23
+ * ```ts
24
+ * import { Cogitator, Agent } from '@cogitator-ai/core';
25
+ *
26
+ * const cog = new Cogitator({
27
+ * llm: { defaultProvider: 'anthropic' },
28
+ * });
29
+ *
30
+ * const agent = new Agent({
31
+ * name: 'assistant',
32
+ * model: 'anthropic/claude-sonnet-4-20250514',
33
+ * instructions: 'You are a helpful assistant.',
34
+ * });
35
+ *
36
+ * const result = await cog.run(agent, { input: 'Hello!' });
37
+ * console.log(result.output);
38
+ *
39
+ * await cog.close();
40
+ * ```
41
+ *
42
+ * @example With memory and streaming
43
+ * ```ts
44
+ * const cog = new Cogitator({
45
+ * memory: {
46
+ * adapter: 'redis',
47
+ * redis: { url: 'redis://localhost:6379' },
48
+ * },
49
+ * });
50
+ *
51
+ * const result = await cog.run(agent, {
52
+ * input: 'Remember my name is Alice',
53
+ * threadId: 'conversation-123',
54
+ * stream: true,
55
+ * onToken: (token) => process.stdout.write(token),
56
+ * });
57
+ * ```
58
+ */
13
59
  export class Cogitator {
14
60
  config;
15
61
  backends = new Map();
62
+ /** Global tool registry shared across all runs */
16
63
  tools = new ToolRegistry();
17
- memoryAdapter;
18
- contextBuilder;
19
- memoryInitialized = false;
20
- sandboxManager;
21
- sandboxInitialized = false;
22
- reflectionEngine;
23
- insightStore;
24
- reflectionInitialized = false;
25
- constitutionalAI;
26
- guardrailsInitialized = false;
27
- costRouter;
28
- costRoutingInitialized = false;
29
- constructor(config = {}) {
30
- this.config = config;
31
- }
32
- /**
33
- * Initialize memory adapter and context builder (lazy, on first run)
34
- */
35
- async initializeMemory() {
36
- if (this.memoryInitialized || !this.config.memory?.adapter)
37
- return;
38
- const provider = this.config.memory.adapter;
39
- let adapter;
40
- if (provider === 'memory') {
41
- adapter = new InMemoryAdapter({
42
- provider: 'memory',
43
- ...this.config.memory.inMemory,
44
- });
45
- }
46
- else if (provider === 'redis') {
47
- const url = this.config.memory.redis?.url;
48
- if (!url) {
49
- getLogger().warn('Redis adapter requires url in config');
50
- return;
51
- }
52
- adapter = new RedisAdapter({
53
- provider: 'redis',
54
- url,
55
- ...this.config.memory.redis,
56
- });
57
- }
58
- else if (provider === 'postgres') {
59
- const connectionString = this.config.memory.postgres?.connectionString;
60
- if (!connectionString) {
61
- getLogger().warn('Postgres adapter requires connectionString in config');
62
- return;
63
- }
64
- adapter = new PostgresAdapter({
65
- provider: 'postgres',
66
- connectionString,
67
- ...this.config.memory.postgres,
68
- });
69
- }
70
- else {
71
- getLogger().warn(`Unknown memory provider: ${provider}`);
72
- return;
73
- }
74
- const result = await adapter.connect();
75
- if (!result.success) {
76
- getLogger().warn('Memory adapter connection failed', { error: result.error });
77
- return;
78
- }
79
- this.memoryAdapter = adapter;
80
- if (this.config.memory.contextBuilder) {
81
- const deps = {
82
- memoryAdapter: this.memoryAdapter,
83
- };
84
- const contextConfig = {
85
- maxTokens: this.config.memory.contextBuilder.maxTokens ?? 4000,
86
- strategy: this.config.memory.contextBuilder.strategy ?? 'recent',
87
- ...this.config.memory.contextBuilder,
88
- };
89
- this.contextBuilder = new ContextBuilder(contextConfig, deps);
90
- }
91
- this.memoryInitialized = true;
92
- }
93
- /**
94
- * Initialize sandbox manager (lazy, on first sandboxed tool execution)
95
- */
96
- async initializeSandbox() {
97
- if (this.sandboxInitialized)
98
- return;
99
- try {
100
- const { SandboxManager } = await import('@cogitator-ai/sandbox');
101
- this.sandboxManager = new SandboxManager(this.config.sandbox);
102
- await this.sandboxManager.initialize();
103
- this.sandboxInitialized = true;
104
- }
105
- catch {
106
- this.sandboxInitialized = true;
107
- }
108
- }
109
- /**
110
- * Initialize reflection engine (lazy, on first run with reflection enabled)
111
- */
112
- async initializeReflection(agent) {
113
- if (this.reflectionInitialized || !this.config.reflection?.enabled)
114
- return;
115
- const backend = this.getBackend(this.config.reflection.reflectionModel ?? agent.model);
116
- this.insightStore = new InMemoryInsightStore();
117
- this.reflectionEngine = new ReflectionEngine({
118
- llm: backend,
119
- insightStore: this.insightStore,
120
- config: this.config.reflection,
121
- });
122
- this.reflectionInitialized = true;
123
- }
124
- /**
125
- * Initialize constitutional AI guardrails (lazy, on first run with guardrails enabled)
126
- */
127
- initializeGuardrails(agent) {
128
- if (this.guardrailsInitialized || !this.config.guardrails?.enabled)
129
- return;
130
- const backend = this.getBackend(this.config.guardrails.model ?? agent.model);
131
- this.constitutionalAI = new ConstitutionalAI({
132
- llm: backend,
133
- constitution: this.config.guardrails.constitution,
134
- config: this.config.guardrails,
135
- });
136
- this.guardrailsInitialized = true;
137
- }
138
- /**
139
- * Initialize cost-aware routing (lazy, on first run with cost routing enabled)
140
- */
141
- initializeCostRouting() {
142
- if (this.costRoutingInitialized || !this.config.costRouting?.enabled)
143
- return;
144
- this.costRouter = new CostAwareRouter({ config: this.config.costRouting });
145
- this.costRoutingInitialized = true;
146
- }
147
- /**
148
- * Build initial messages for a run, loading history if memory is enabled
149
- */
150
- async buildInitialMessages(agent, options, threadId) {
151
- if (!this.memoryAdapter || options.useMemory === false) {
152
- return [
153
- { role: 'system', content: agent.instructions },
154
- { role: 'user', content: options.input },
155
- ];
156
- }
157
- const threadResult = await this.memoryAdapter.getThread(threadId);
158
- if (!threadResult.success || !threadResult.data) {
159
- await this.memoryAdapter.createThread(agent.id, { agentId: agent.id }, threadId);
160
- }
161
- if (this.contextBuilder && options.loadHistory !== false) {
162
- const ctx = await this.contextBuilder.build({
163
- threadId,
164
- agentId: agent.id,
165
- systemPrompt: agent.instructions,
166
- });
167
- return [...ctx.messages, { role: 'user', content: options.input }];
168
- }
169
- if (options.loadHistory !== false) {
170
- const entries = await this.memoryAdapter.getEntries({ threadId, limit: 20 });
171
- const messages = [{ role: 'system', content: agent.instructions }];
172
- if (entries.success) {
173
- messages.push(...entries.data.map((e) => e.message));
174
- }
175
- messages.push({ role: 'user', content: options.input });
176
- return messages;
177
- }
178
- return [
179
- { role: 'system', content: agent.instructions },
180
- { role: 'user', content: options.input },
181
- ];
182
- }
183
- /**
184
- * Save a message entry to memory (non-blocking, won't crash on failure)
185
- */
186
- async saveEntry(threadId, agentId, message, toolCalls, toolResults, onError) {
187
- if (!this.memoryAdapter)
188
- return;
189
- try {
190
- const threadResult = await this.memoryAdapter.getThread(threadId);
191
- if (!threadResult.success || !threadResult.data) {
192
- await this.memoryAdapter.createThread(agentId, { agentId }, threadId);
193
- }
194
- await this.memoryAdapter.addEntry({
195
- threadId,
196
- message,
197
- toolCalls,
198
- toolResults,
199
- tokenCount: countMessageTokens(message),
200
- });
201
- }
202
- catch (err) {
203
- const error = err instanceof Error ? err : new Error(String(err));
204
- getLogger().warn('Failed to save memory entry', { error: error.message });
205
- onError?.(error, 'save');
206
- }
207
- }
64
+ state = {
65
+ memoryInitialized: false,
66
+ sandboxInitialized: false,
67
+ reflectionInitialized: false,
68
+ guardrailsInitialized: false,
69
+ costRoutingInitialized: false,
70
+ };
208
71
  /**
209
- * Create a span with proper IDs and emit callback
72
+ * Create a new Cogitator runtime.
73
+ *
74
+ * @param config - Runtime configuration
75
+ * @param config.llm - LLM provider settings (API keys, base URLs)
76
+ * @param config.memory - Memory adapter configuration
77
+ * @param config.sandbox - Sandbox execution settings
78
+ * @param config.reflection - Reflection engine settings
79
+ * @param config.guardrails - Constitutional AI settings
80
+ * @param config.costRouting - Cost-aware routing settings
210
81
  */
211
- createSpan(name, traceId, parentId, startTime, endTime, attributes, status = 'ok', kind = 'internal', onSpan) {
212
- const span = {
213
- id: `span_${nanoid(12)}`,
214
- traceId,
215
- parentId,
216
- name,
217
- kind,
218
- status,
219
- startTime,
220
- endTime,
221
- duration: endTime - startTime,
222
- attributes,
223
- };
224
- onSpan?.(span);
225
- return span;
82
+ constructor(config = {}) {
83
+ this.config = config;
226
84
  }
227
85
  /**
228
- * Run an agent with the given input
86
+ * Run an agent with the given input.
87
+ *
88
+ * Executes the agent's task, handling LLM calls, tool execution,
89
+ * memory persistence, and observability callbacks.
90
+ *
91
+ * @param agent - Agent to execute
92
+ * @param options - Run configuration
93
+ * @param options.input - User input/prompt for the agent
94
+ * @param options.threadId - Thread ID for memory persistence
95
+ * @param options.context - Additional context to include in system prompt
96
+ * @param options.stream - Enable streaming responses
97
+ * @param options.onToken - Callback for each streamed token
98
+ * @param options.onToolCall - Callback when a tool is called
99
+ * @param options.onToolResult - Callback when a tool returns a result
100
+ * @param options.onSpan - Callback for observability spans
101
+ * @param options.timeout - Override agent timeout
102
+ * @returns Run result with output, usage stats, and trace
103
+ *
104
+ * @example
105
+ * ```ts
106
+ * const result = await cog.run(agent, {
107
+ * input: 'Search for TypeScript tutorials',
108
+ * threadId: 'session-123',
109
+ * stream: true,
110
+ * onToken: (token) => process.stdout.write(token),
111
+ * onToolCall: (call) => console.log('Tool:', call.name),
112
+ * });
113
+ *
114
+ * console.log('Output:', result.output);
115
+ * console.log('Tokens:', result.usage.totalTokens);
116
+ * console.log('Cost:', result.usage.cost);
117
+ * ```
229
118
  */
230
119
  async run(agent, options) {
231
120
  const runId = `run_${nanoid(12)}`;
@@ -244,48 +133,36 @@ export class Cogitator {
244
133
  options.onRunStart?.({ runId, agentId: agent.id, input: options.input, threadId });
245
134
  const rootSpanId = `span_${nanoid(12)}`;
246
135
  try {
247
- if (this.config.memory?.adapter && !this.memoryInitialized) {
248
- await this.initializeMemory();
249
- }
250
- if (this.config.reflection?.enabled && !this.reflectionInitialized) {
251
- await this.initializeReflection(agent);
252
- }
253
- if (this.config.guardrails?.enabled && !this.guardrailsInitialized) {
254
- this.initializeGuardrails(agent);
255
- }
256
- if (this.config.costRouting?.enabled && !this.costRoutingInitialized) {
257
- this.initializeCostRouting();
258
- }
136
+ await this.initializeAll(agent);
259
137
  const registry = new ToolRegistry();
260
138
  if (agent.tools && agent.tools.length > 0) {
261
139
  registry.registerMany(agent.tools);
262
140
  }
263
141
  let effectiveModel = agent.model;
264
- if (this.costRouter && this.config.costRouting?.autoSelectModel) {
265
- const recommendation = await this.costRouter.recommendModel(options.input);
142
+ if (this.state.costRouter && this.config.costRouting?.autoSelectModel) {
143
+ const recommendation = await this.state.costRouter.recommendModel(options.input);
266
144
  effectiveModel = `${recommendation.provider}/${recommendation.modelId}`;
267
- const budgetCheck = this.costRouter.checkBudget(recommendation.estimatedCost);
145
+ const budgetCheck = this.state.costRouter.checkBudget(recommendation.estimatedCost);
268
146
  if (!budgetCheck.allowed) {
269
147
  throw new Error(`Budget exceeded: ${budgetCheck.reason}`);
270
148
  }
271
149
  }
272
150
  const backend = this.getBackend(effectiveModel, agent.config.provider);
273
151
  const model = agent.config.provider ? effectiveModel : parseModel(effectiveModel).model;
274
- const messages = await this.buildInitialMessages(agent, options, threadId);
275
- if (this.constitutionalAI && this.config.guardrails?.filterInput) {
276
- const inputResult = await this.constitutionalAI.filterInput(options.input);
152
+ const messages = await buildInitialMessages(agent, options, threadId, this.state.memoryAdapter, this.state.contextBuilder);
153
+ if (this.state.constitutionalAI && this.config.guardrails?.filterInput) {
154
+ const inputResult = await this.state.constitutionalAI.filterInput(options.input);
277
155
  if (!inputResult.allowed) {
278
156
  throw new Error(`Input blocked: ${inputResult.blockedReason ?? 'Policy violation'}`);
279
157
  }
280
158
  }
281
- if (options.context && messages.length > 0 && messages[0].role === 'system') {
282
- const contextStr = Object.entries(options.context)
283
- .map(([k, v]) => `${k}: ${JSON.stringify(v)}`)
284
- .join('\n');
285
- messages[0].content += `\n\nContext:\n${contextStr}`;
159
+ if (options.context) {
160
+ addContextToMessages(messages, options.context);
286
161
  }
287
- if (this.memoryAdapter && options.saveHistory !== false && options.useMemory !== false) {
288
- await this.saveEntry(threadId, agent.id, { role: 'user', content: options.input }, undefined, undefined, options.onMemoryError);
162
+ if (this.state.memoryAdapter &&
163
+ options.saveHistory !== false &&
164
+ options.useMemory !== false) {
165
+ await saveEntry(threadId, agent.id, { role: 'user', content: options.input }, this.state.memoryAdapter, undefined, undefined, options.onMemoryError);
289
166
  }
290
167
  const allToolCalls = [];
291
168
  let totalInputTokens = 0;
@@ -304,11 +181,8 @@ export class Cogitator {
304
181
  previousActions: [],
305
182
  availableTools: registry.getNames(),
306
183
  };
307
- if (this.reflectionEngine && this.config.reflection?.enabled) {
308
- const insights = await this.reflectionEngine.getRelevantInsights(agentContext);
309
- if (insights.length > 0 && messages.length > 0 && messages[0].role === 'system') {
310
- messages[0].content += `\n\nPast learnings that may help:\n${insights.map((i) => `- ${i.content}`).join('\n')}`;
311
- }
184
+ if (this.state.reflectionEngine && this.config.reflection?.enabled) {
185
+ await enrichMessagesWithInsights(messages, this.state.reflectionEngine, agentContext);
312
186
  }
313
187
  while (iterations < maxIterations) {
314
188
  if (abortController.signal.aborted) {
@@ -320,7 +194,7 @@ export class Cogitator {
320
194
  const llmSpanStart = Date.now();
321
195
  let response;
322
196
  if (options.stream && options.onToken) {
323
- response = await this.streamChat(backend, model, messages, registry, agent, options.onToken);
197
+ response = await streamChat(backend, model, messages, registry, agent, options.onToken);
324
198
  }
325
199
  else {
326
200
  response = await backend.chat({
@@ -333,7 +207,7 @@ export class Cogitator {
333
207
  stop: agent.config.stopSequences,
334
208
  });
335
209
  }
336
- const llmSpan = this.createSpan('llm.chat', traceId, rootSpanId, llmSpanStart, Date.now(), {
210
+ const llmSpan = createSpan('llm.chat', traceId, rootSpanId, llmSpanStart, Date.now(), {
337
211
  'llm.model': model,
338
212
  'llm.iteration': iterations,
339
213
  'llm.input_tokens': response.usage.inputTokens,
@@ -344,8 +218,8 @@ export class Cogitator {
344
218
  totalInputTokens += response.usage.inputTokens;
345
219
  totalOutputTokens += response.usage.outputTokens;
346
220
  let outputContent = response.content;
347
- if (this.constitutionalAI && this.config.guardrails?.filterOutput) {
348
- const outputResult = await this.constitutionalAI.filterOutput(outputContent, messages);
221
+ if (this.state.constitutionalAI && this.config.guardrails?.filterOutput) {
222
+ const outputResult = await this.state.constitutionalAI.filterOutput(outputContent, messages);
349
223
  if (!outputResult.allowed) {
350
224
  if (outputResult.suggestedRevision) {
351
225
  outputContent = outputResult.suggestedRevision;
@@ -360,8 +234,10 @@ export class Cogitator {
360
234
  content: outputContent,
361
235
  };
362
236
  messages.push(assistantMessage);
363
- if (this.memoryAdapter && options.saveHistory !== false && options.useMemory !== false) {
364
- await this.saveEntry(threadId, agent.id, assistantMessage, response.toolCalls, undefined, options.onMemoryError);
237
+ if (this.state.memoryAdapter &&
238
+ options.saveHistory !== false &&
239
+ options.useMemory !== false) {
240
+ await saveEntry(threadId, agent.id, assistantMessage, this.state.memoryAdapter, response.toolCalls, undefined, options.onMemoryError);
365
241
  }
366
242
  if (response.finishReason === 'tool_calls' && response.toolCalls) {
367
243
  const toolCalls = response.toolCalls;
@@ -371,7 +247,7 @@ export class Cogitator {
371
247
  }
372
248
  const executeToolCall = async (toolCall) => {
373
249
  const toolSpanStart = Date.now();
374
- const result = await this.executeTool(registry, toolCall, runId, agent.id, abortController.signal);
250
+ const result = await executeTool(registry, toolCall, runId, agent.id, this.state.sandboxManager, this.state.constitutionalAI, !!this.config.guardrails?.filterToolCalls, () => initializeSandbox(this.config, this.state), abortController.signal);
375
251
  const toolSpanEnd = Date.now();
376
252
  return { toolCall, result, toolSpanStart, toolSpanEnd };
377
253
  };
@@ -385,7 +261,7 @@ export class Cogitator {
385
261
  return results;
386
262
  })();
387
263
  for (const { toolCall, result, toolSpanStart, toolSpanEnd } of toolResults) {
388
- const toolSpan = this.createSpan(`tool.${toolCall.name}`, traceId, rootSpanId, toolSpanStart, toolSpanEnd, {
264
+ const toolSpan = createSpan(`tool.${toolCall.name}`, traceId, rootSpanId, toolSpanStart, toolSpanEnd, {
389
265
  'tool.name': toolCall.name,
390
266
  'tool.call_id': toolCall.id,
391
267
  'tool.arguments': JSON.stringify(toolCall.arguments),
@@ -394,17 +270,12 @@ export class Cogitator {
394
270
  }, result.error ? 'error' : 'ok', 'internal', options.onSpan);
395
271
  spans.push(toolSpan);
396
272
  options.onToolResult?.(result);
397
- const toolMessage = {
398
- role: 'tool',
399
- content: JSON.stringify(result.result),
400
- toolCallId: toolCall.id,
401
- name: toolCall.name,
402
- };
273
+ const toolMessage = createToolMessage(toolCall, result);
403
274
  messages.push(toolMessage);
404
- if (this.memoryAdapter &&
275
+ if (this.state.memoryAdapter &&
405
276
  options.saveHistory !== false &&
406
277
  options.useMemory !== false) {
407
- await this.saveEntry(threadId, agent.id, toolMessage, undefined, [result], options.onMemoryError);
278
+ await saveEntry(threadId, agent.id, toolMessage, this.state.memoryAdapter, undefined, [result], options.onMemoryError);
408
279
  }
409
280
  const action = {
410
281
  type: 'tool_call',
@@ -415,11 +286,11 @@ export class Cogitator {
415
286
  duration: toolSpanEnd - toolSpanStart,
416
287
  };
417
288
  allActions.push(action);
418
- if (this.reflectionEngine &&
289
+ if (this.state.reflectionEngine &&
419
290
  this.config.reflection?.enabled &&
420
291
  this.config.reflection.reflectAfterToolCall) {
421
292
  try {
422
- const reflectionResult = await this.reflectionEngine.reflectOnToolCall(action, agentContext);
293
+ const reflectionResult = await this.state.reflectionEngine.reflectOnToolCall(action, agentContext);
423
294
  allReflections.push(reflectionResult.reflection);
424
295
  if (reflectionResult.shouldAdjustStrategy && reflectionResult.suggestedAction) {
425
296
  messages.push({
@@ -444,14 +315,12 @@ export class Cogitator {
444
315
  }
445
316
  const endTime = Date.now();
446
317
  const lastAssistantMessage = messages.filter((m) => m.role === 'assistant').pop();
447
- const finalOutput = lastAssistantMessage
448
- ? this.getTextContent(lastAssistantMessage.content)
449
- : '';
450
- if (this.reflectionEngine &&
318
+ const finalOutput = lastAssistantMessage ? getTextContent(lastAssistantMessage.content) : '';
319
+ if (this.state.reflectionEngine &&
451
320
  this.config.reflection?.enabled &&
452
321
  this.config.reflection.reflectAtEnd) {
453
322
  try {
454
- const runReflection = await this.reflectionEngine.reflectOnRun(agentContext, allActions, finalOutput, true);
323
+ const runReflection = await this.state.reflectionEngine.reflectOnRun(agentContext, allActions, finalOutput, true);
455
324
  allReflections.push(runReflection.reflection);
456
325
  }
457
326
  catch (reflectionError) {
@@ -460,7 +329,7 @@ export class Cogitator {
460
329
  });
461
330
  }
462
331
  }
463
- const rootSpan = this.createSpan('agent.run', traceId, undefined, startTime, endTime, {
332
+ const rootSpan = createSpan('agent.run', traceId, undefined, startTime, endTime, {
464
333
  'agent.id': agent.id,
465
334
  'agent.name': agent.name,
466
335
  'agent.model': agent.model,
@@ -473,8 +342,8 @@ export class Cogitator {
473
342
  }, 'ok', 'server', options.onSpan);
474
343
  spans.unshift(rootSpan);
475
344
  const runCost = this.calculateCost(effectiveModel, totalInputTokens, totalOutputTokens);
476
- if (this.costRouter) {
477
- this.costRouter.recordCost({
345
+ if (this.state.costRouter) {
346
+ this.state.costRouter.recordCost({
478
347
  runId,
479
348
  agentId: agent.id,
480
349
  threadId,
@@ -504,8 +373,8 @@ export class Cogitator {
504
373
  spans,
505
374
  },
506
375
  reflections: allReflections.length > 0 ? allReflections : undefined,
507
- reflectionSummary: this.reflectionEngine
508
- ? await this.reflectionEngine.getSummary(agent.id)
376
+ reflectionSummary: this.state.reflectionEngine
377
+ ? await this.state.reflectionEngine.getSummary(agent.id)
509
378
  : undefined,
510
379
  };
511
380
  options.onRunComplete?.(result);
@@ -513,7 +382,7 @@ export class Cogitator {
513
382
  }
514
383
  catch (error) {
515
384
  const endTime = Date.now();
516
- const errorSpan = this.createSpan('agent.run', traceId, undefined, startTime, endTime, {
385
+ const errorSpan = createSpan('agent.run', traceId, undefined, startTime, endTime, {
517
386
  'agent.id': agent.id,
518
387
  'agent.name': agent.name,
519
388
  'agent.model': agent.model,
@@ -530,205 +399,20 @@ export class Cogitator {
530
399
  }
531
400
  }
532
401
  }
533
- /**
534
- * Stream chat with token callback
535
- */
536
- async streamChat(backend, model, messages, registry, agent, onToken) {
537
- let content = '';
538
- let toolCalls;
539
- let finishReason = 'stop';
540
- let inputTokens = 0;
541
- let outputTokens = 0;
542
- let hasUsageFromStream = false;
543
- const stream = backend.chatStream({
544
- model,
545
- messages,
546
- tools: registry.getSchemas(),
547
- temperature: agent.config.temperature,
548
- topP: agent.config.topP,
549
- maxTokens: agent.config.maxTokens,
550
- stop: agent.config.stopSequences,
551
- });
552
- for await (const chunk of stream) {
553
- if (chunk.delta.content) {
554
- content += chunk.delta.content;
555
- onToken(chunk.delta.content);
556
- }
557
- if (chunk.delta.toolCalls) {
558
- toolCalls = chunk.delta.toolCalls;
559
- }
560
- if (chunk.finishReason) {
561
- finishReason = chunk.finishReason;
562
- }
563
- if (chunk.usage) {
564
- inputTokens = chunk.usage.inputTokens;
565
- outputTokens = chunk.usage.outputTokens;
566
- hasUsageFromStream = true;
567
- }
568
- }
569
- if (!hasUsageFromStream) {
570
- inputTokens = countMessagesTokens(messages);
571
- outputTokens = Math.ceil(content.length / 4);
572
- }
573
- return {
574
- id: `stream_${nanoid(8)}`,
575
- content,
576
- toolCalls,
577
- finishReason,
578
- usage: {
579
- inputTokens,
580
- outputTokens,
581
- totalTokens: inputTokens + outputTokens,
582
- },
583
- };
584
- }
585
- /**
586
- * Execute a tool
587
- */
588
- async executeTool(registry, toolCall, runId, agentId, signal) {
589
- const tool = registry.get(toolCall.name);
590
- if (!tool) {
591
- return {
592
- callId: toolCall.id,
593
- name: toolCall.name,
594
- result: null,
595
- error: `Tool not found: ${toolCall.name}`,
596
- };
402
+ async initializeAll(agent) {
403
+ if (this.config.memory?.adapter && !this.state.memoryInitialized) {
404
+ await initializeMemory(this.config, this.state);
597
405
  }
598
- const parseResult = tool.parameters.safeParse(toolCall.arguments);
599
- if (!parseResult.success) {
600
- return {
601
- callId: toolCall.id,
602
- name: toolCall.name,
603
- result: null,
604
- error: `Invalid arguments: ${parseResult.error.message}`,
605
- };
606
- }
607
- if (this.constitutionalAI && this.config.guardrails?.filterToolCalls) {
608
- const context = {
609
- agentId,
610
- runId,
611
- signal: signal ?? new AbortController().signal,
612
- };
613
- const guardResult = await this.constitutionalAI.guardTool(tool, toolCall.arguments, context);
614
- if (!guardResult.approved) {
615
- return {
616
- callId: toolCall.id,
617
- name: toolCall.name,
618
- result: null,
619
- error: `Tool blocked: ${guardResult.reason ?? 'Policy violation'}`,
620
- };
621
- }
406
+ if (this.config.reflection?.enabled && !this.state.reflectionInitialized) {
407
+ await initializeReflection(this.config, this.state, agent, (model) => this.getBackend(model));
622
408
  }
623
- if (tool.sandbox?.type === 'docker' || tool.sandbox?.type === 'wasm') {
624
- return this.executeInSandbox(tool, toolCall, runId, agentId);
409
+ if (this.config.guardrails?.enabled && !this.state.guardrailsInitialized) {
410
+ initializeGuardrails(this.config, this.state, agent, (model) => this.getBackend(model));
625
411
  }
626
- const context = {
627
- agentId,
628
- runId,
629
- signal: signal ?? new AbortController().signal,
630
- };
631
- try {
632
- const result = await tool.execute(parseResult.data, context);
633
- return {
634
- callId: toolCall.id,
635
- name: toolCall.name,
636
- result,
637
- };
638
- }
639
- catch (error) {
640
- return {
641
- callId: toolCall.id,
642
- name: toolCall.name,
643
- result: null,
644
- error: error instanceof Error ? error.message : String(error),
645
- };
412
+ if (this.config.costRouting?.enabled && !this.state.costRoutingInitialized) {
413
+ initializeCostRouting(this.config, this.state);
646
414
  }
647
415
  }
648
- /**
649
- * Execute a tool in sandbox (Docker or WASM)
650
- */
651
- async executeInSandbox(tool, toolCall, runId, agentId) {
652
- await this.initializeSandbox();
653
- if (!this.sandboxManager) {
654
- getLogger().warn('Sandbox unavailable, executing natively', { tool: tool.name });
655
- const context = {
656
- agentId,
657
- runId,
658
- signal: new AbortController().signal,
659
- };
660
- try {
661
- const result = await tool.execute(toolCall.arguments, context);
662
- return { callId: toolCall.id, name: toolCall.name, result };
663
- }
664
- catch (error) {
665
- return {
666
- callId: toolCall.id,
667
- name: toolCall.name,
668
- result: null,
669
- error: error instanceof Error ? error.message : String(error),
670
- };
671
- }
672
- }
673
- const args = toolCall.arguments;
674
- const sandboxConfig = tool.sandbox;
675
- const isWasm = sandboxConfig.type === 'wasm';
676
- const request = isWasm
677
- ? {
678
- command: [],
679
- stdin: JSON.stringify(args),
680
- timeout: tool.timeout,
681
- }
682
- : {
683
- command: ['sh', '-c', String(args.command ?? '')],
684
- cwd: args.cwd,
685
- env: args.env,
686
- timeout: tool.timeout,
687
- };
688
- const result = await this.sandboxManager.execute(request, sandboxConfig);
689
- if (!result.success) {
690
- return {
691
- callId: toolCall.id,
692
- name: toolCall.name,
693
- result: null,
694
- error: result.error,
695
- };
696
- }
697
- if (isWasm) {
698
- try {
699
- const parsed = JSON.parse(result.data.stdout);
700
- return {
701
- callId: toolCall.id,
702
- name: toolCall.name,
703
- result: parsed,
704
- };
705
- }
706
- catch {
707
- return {
708
- callId: toolCall.id,
709
- name: toolCall.name,
710
- result: result.data.stdout,
711
- };
712
- }
713
- }
714
- return {
715
- callId: toolCall.id,
716
- name: toolCall.name,
717
- result: {
718
- stdout: result.data.stdout,
719
- stderr: result.data.stderr,
720
- exitCode: result.data.exitCode,
721
- timedOut: result.data.timedOut,
722
- duration: result.data.duration,
723
- command: args.command,
724
- },
725
- };
726
- }
727
- /**
728
- * Get or create an LLM backend
729
- * @param modelString - Model string (e.g., "x-ai/grok-4.1-fast")
730
- * @param explicitProvider - Explicit provider override (e.g., 'openai' for OpenRouter)
731
- */
732
416
  getBackend(modelString, explicitProvider) {
733
417
  const { provider: parsedProvider } = parseModel(modelString);
734
418
  const actualProvider = (explicitProvider ??
@@ -742,9 +426,6 @@ export class Cogitator {
742
426
  }
743
427
  return backend;
744
428
  }
745
- /**
746
- * Calculate cost based on model and tokens using dynamic model registry
747
- */
748
429
  calculateCost(model, inputTokens, outputTokens) {
749
430
  const { model: modelName } = parseModel(model);
750
431
  const price = getPrice(modelName);
@@ -754,76 +435,87 @@ export class Cogitator {
754
435
  return (inputTokens * price.input + outputTokens * price.output) / 1_000_000;
755
436
  }
756
437
  /**
757
- * Get accumulated insights for an agent
438
+ * Get accumulated insights from reflection for an agent.
439
+ *
440
+ * Insights are learnings derived from past runs that can help
441
+ * improve future agent performance.
442
+ *
443
+ * @param agentId - ID of the agent to get insights for
444
+ * @returns Array of insights, empty if reflection is not enabled
758
445
  */
759
446
  async getInsights(agentId) {
760
- if (!this.insightStore)
447
+ if (!this.state.insightStore)
761
448
  return [];
762
- return this.insightStore.getAll(agentId);
449
+ return this.state.insightStore.getAll(agentId);
763
450
  }
764
451
  /**
765
- * Get reflection summary for an agent
452
+ * Get reflection summary for an agent.
453
+ *
454
+ * Summary includes statistics about total runs, successful tool calls,
455
+ * common patterns, and accumulated learnings.
456
+ *
457
+ * @param agentId - ID of the agent to get summary for
458
+ * @returns Reflection summary, null if reflection is not enabled
766
459
  */
767
460
  async getReflectionSummary(agentId) {
768
- if (!this.reflectionEngine)
461
+ if (!this.state.reflectionEngine)
769
462
  return null;
770
- return this.reflectionEngine.getSummary(agentId);
463
+ return this.state.reflectionEngine.getSummary(agentId);
771
464
  }
772
465
  /**
773
- * Get the constitutional AI guardrails instance
466
+ * Get the constitutional AI guardrails instance.
467
+ *
468
+ * @returns ConstitutionalAI instance, undefined if guardrails not enabled
774
469
  */
775
470
  getGuardrails() {
776
- return this.constitutionalAI;
471
+ return this.state.constitutionalAI;
777
472
  }
778
473
  /**
779
- * Set the constitution for guardrails
474
+ * Set or update the constitution for guardrails.
475
+ *
476
+ * The constitution defines principles and rules that the agent
477
+ * must follow, filtering both input and output.
478
+ *
479
+ * @param constitution - New constitution to apply
780
480
  */
781
481
  setConstitution(constitution) {
782
- this.constitutionalAI?.setConstitution(constitution);
482
+ this.state.constitutionalAI?.setConstitution(constitution);
783
483
  }
784
484
  /**
785
- * Get cost-aware routing summary
485
+ * Get cost tracking summary across all runs.
486
+ *
487
+ * @returns Cost summary with total spent, runs count, and per-model breakdown
786
488
  */
787
489
  getCostSummary() {
788
- return this.costRouter?.getCostSummary();
490
+ return this.state.costRouter?.getCostSummary();
789
491
  }
790
492
  /**
791
- * Get the cost-aware router instance
493
+ * Get the cost-aware router instance for advanced cost management.
494
+ *
495
+ * @returns CostAwareRouter instance, undefined if cost routing not enabled
792
496
  */
793
497
  getCostRouter() {
794
- return this.costRouter;
795
- }
796
- getTextContent(content) {
797
- if (typeof content === 'string') {
798
- return content;
799
- }
800
- return content
801
- .filter((part) => part.type === 'text')
802
- .map((part) => part.text)
803
- .join(' ');
498
+ return this.state.costRouter;
804
499
  }
805
500
  /**
806
- * Close all connections
501
+ * Close all connections and release resources.
502
+ *
503
+ * Should be called when done using the Cogitator instance to properly
504
+ * disconnect from memory adapters, shut down sandbox containers, and
505
+ * clean up internal state.
506
+ *
507
+ * @example
508
+ * ```ts
509
+ * const cog = new Cogitator({ ... });
510
+ * try {
511
+ * await cog.run(agent, { input: 'Hello' });
512
+ * } finally {
513
+ * await cog.close();
514
+ * }
515
+ * ```
807
516
  */
808
517
  async close() {
809
- if (this.memoryAdapter) {
810
- await this.memoryAdapter.disconnect();
811
- this.memoryAdapter = undefined;
812
- this.contextBuilder = undefined;
813
- this.memoryInitialized = false;
814
- }
815
- if (this.sandboxManager) {
816
- await this.sandboxManager.shutdown();
817
- this.sandboxManager = undefined;
818
- this.sandboxInitialized = false;
819
- }
820
- this.reflectionEngine = undefined;
821
- this.insightStore = undefined;
822
- this.reflectionInitialized = false;
823
- this.constitutionalAI = undefined;
824
- this.guardrailsInitialized = false;
825
- this.costRouter = undefined;
826
- this.costRoutingInitialized = false;
518
+ await cleanupState(this.state);
827
519
  this.backends.clear();
828
520
  }
829
521
  }