@node-llm/orm 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -72,13 +72,13 @@ interface PrismaModel<T = Record<string, unknown>> {
72
72
  findUnique(args: { where: { id: string } }): Promise<T | null>;
73
73
  }
74
74
 
75
- type AgentClass<T extends Agent = Agent> = (new (
76
- overrides?: Partial<AgentConfig & ChatOptions>
75
+ type AgentClass<T extends Agent<any, any> = Agent<any, any>> = (new (
76
+ overrides?: Partial<AgentConfig<any> & ChatOptions>
77
77
  ) => T) & {
78
78
  name: string;
79
79
  model?: string;
80
- instructions?: string;
81
- tools?: unknown[];
80
+ instructions?: unknown;
81
+ tools?: unknown;
82
82
  };
83
83
 
84
84
  /**
@@ -87,6 +87,7 @@ type AgentClass<T extends Agent = Agent> = (new (
87
87
  * Follows "Code Wins" sovereignty:
88
88
  * - Model, Tools, Instructions come from the Agent class (code)
89
89
  * - Message history comes from the database
90
+ * - Metadata from DB is injected as 'inputs' for dynamic resolution
90
91
  *
91
92
  * @example
92
93
  * ```typescript
@@ -102,7 +103,10 @@ type AgentClass<T extends Agent = Agent> = (new (
102
103
  * const result = await session.ask("Hello");
103
104
  * ```
104
105
  */
105
- export class AgentSession<T extends Agent = Agent> {
106
+ export class AgentSession<
107
+ I extends Record<string, any> = Record<string, any>,
108
+ T extends Agent<I, any> = Agent<I, any>
109
+ > {
106
110
  private currentMessageId: string | null = null;
107
111
  private tableNames: Required<TableNames>;
108
112
  private debug: boolean;
@@ -114,7 +118,8 @@ export class AgentSession<T extends Agent = Agent> {
114
118
  private record: AgentSessionRecord,
115
119
  tableNames?: TableNames,
116
120
  private agent: T = new AgentClass({
117
- llm
121
+ llm,
122
+ inputs: record.metadata as I
118
123
  }),
119
124
  debug: boolean = false
120
125
  ) {
@@ -126,6 +131,8 @@ export class AgentSession<T extends Agent = Agent> {
126
131
  toolCall: tableNames?.toolCall || "llmToolCall",
127
132
  request: tableNames?.request || "llmRequest"
128
133
  };
134
+
135
+ this.registerHooks();
129
136
  }
130
137
 
131
138
  private log(...args: any[]) {
@@ -150,8 +157,8 @@ export class AgentSession<T extends Agent = Agent> {
150
157
  }
151
158
 
152
159
  /** Session metadata */
153
- get metadata(): Record<string, unknown> | null | undefined {
154
- return this.record.metadata;
160
+ get metadata(): I | null | undefined {
161
+ return this.record.metadata as I;
155
162
  }
156
163
 
157
164
  /** Agent class name */
@@ -181,15 +188,70 @@ export class AgentSession<T extends Agent = Agent> {
181
188
  return getTable(this.prisma, name) as unknown as PrismaModel<R>;
182
189
  }
183
190
 
191
+ /**
192
+ * Register persistence hooks on the agent.
193
+ */
194
+ private registerHooks() {
195
+ this.agent.onToolCallStart(async (toolCall) => {
196
+ if (!this.currentMessageId) return;
197
+ const model = this.getModel(this.tableNames.toolCall);
198
+ await model.create({
199
+ data: {
200
+ messageId: this.currentMessageId,
201
+ toolCallId: toolCall.id,
202
+ name: toolCall.function.name,
203
+ arguments: toolCall.function.arguments
204
+ }
205
+ });
206
+ });
207
+
208
+ this.agent.onToolCallEnd(async (toolCall, result) => {
209
+ if (!this.currentMessageId) return;
210
+ const model = this.getModel(this.tableNames.toolCall);
211
+ try {
212
+ await model.update({
213
+ where: {
214
+ messageId_toolCallId: {
215
+ messageId: this.currentMessageId,
216
+ toolCallId: toolCall.id
217
+ }
218
+ } as any,
219
+ data: {
220
+ result: typeof result === "string" ? result : JSON.stringify(result)
221
+ }
222
+ });
223
+ } catch (e) {
224
+ this.log(`Failed to update tool call result: ${e}`);
225
+ }
226
+ });
227
+
228
+ this.agent.afterResponse(async (response) => {
229
+ const model = this.getModel(this.tableNames.request);
230
+ await model.create({
231
+ data: {
232
+ chatId: this.chatId,
233
+ messageId: this.currentMessageId,
234
+ provider: response.provider || "unknown",
235
+ model: response.model || "unknown",
236
+ statusCode: 200,
237
+ duration: 0,
238
+ inputTokens: response.usage?.input_tokens || 0,
239
+ outputTokens: response.usage?.output_tokens || 0,
240
+ cost: response.usage?.cost || 0
241
+ }
242
+ });
243
+ });
244
+ }
245
+
184
246
  /**
185
247
  * Send a message and persist the conversation.
186
248
  */
187
- async ask(input: string, options: AskOptions = {}): Promise<MessageRecord> {
249
+ async ask(message: string, options: AskOptions & { inputs?: I } = {}): Promise<MessageRecord> {
188
250
  const model = this.getModel<MessageRecord>(this.tableNames.message);
189
251
 
190
252
  // Persist user message
191
253
  await model.create({
192
- data: { chatId: this.chatId, role: "user", content: input }
254
+ data: { chatId: this.chatId, role: "user", content: message }
193
255
  });
194
256
 
195
257
  // Create placeholder for assistant message
@@ -200,8 +262,11 @@ export class AgentSession<T extends Agent = Agent> {
200
262
  this.currentMessageId = assistantMessage.id;
201
263
 
202
264
  try {
265
+ // Merge turn-level inputs with session metadata
266
+ const inputs = { ...(this.record.metadata as I), ...options.inputs };
267
+
203
268
  // Get response from agent (uses code-defined config + injected history)
204
- const response = await this.agent.ask(input, options);
269
+ const response = await this.agent.ask(message, { ...options, inputs });
205
270
 
206
271
  // Update assistant message with response
207
272
  return await model.update({
@@ -229,14 +294,14 @@ export class AgentSession<T extends Agent = Agent> {
229
294
  * Stream a response and persist the conversation.
230
295
  */
231
296
  async *askStream(
232
- input: string,
233
- options: AskOptions = {}
297
+ message: string,
298
+ options: AskOptions & { inputs?: I } = {}
234
299
  ): AsyncGenerator<ChatChunk, MessageRecord, undefined> {
235
300
  const model = this.getModel<MessageRecord>(this.tableNames.message);
236
301
 
237
302
  // Persist user message
238
303
  await model.create({
239
- data: { chatId: this.chatId, role: "user", content: input }
304
+ data: { chatId: this.chatId, role: "user", content: message }
240
305
  });
241
306
 
242
307
  // Create placeholder for assistant message
@@ -247,7 +312,9 @@ export class AgentSession<T extends Agent = Agent> {
247
312
  this.currentMessageId = assistantMessage.id;
248
313
 
249
314
  try {
250
- const stream = this.agent.stream(input, options);
315
+ // Merge turn-level inputs with session metadata
316
+ const inputs = { ...(this.record.metadata as I), ...options.inputs };
317
+ const stream = this.agent.stream(message, { ...options, inputs });
251
318
 
252
319
  let fullContent = "";
253
320
  let lastChunk: ChatChunk | null = null;
@@ -278,6 +345,44 @@ export class AgentSession<T extends Agent = Agent> {
278
345
  }
279
346
  }
280
347
 
348
+ /**
349
+ * Returns a usage summary for this chat session.
350
+ */
351
+ async stats(): Promise<Usage> {
352
+ const requestModel = getTable(this.prisma, this.tableNames.request);
353
+ const aggregate = await (requestModel as any).aggregate({
354
+ where: { chatId: this.chatId },
355
+ _sum: {
356
+ inputTokens: true,
357
+ outputTokens: true,
358
+ cost: true
359
+ }
360
+ });
361
+
362
+ return {
363
+ input_tokens: Number(aggregate._sum.inputTokens || 0),
364
+ output_tokens: Number(aggregate._sum.outputTokens || 0),
365
+ total_tokens: Number((aggregate._sum.inputTokens || 0) + (aggregate._sum.outputTokens || 0)),
366
+ cost: Number(aggregate._sum.cost || 0)
367
+ };
368
+ }
369
+
370
+ /**
371
+ * Add a tool to the session (turn-level).
372
+ */
373
+ withTool(tool: any): this {
374
+ this.agent.use(tool);
375
+ return this;
376
+ }
377
+
378
+ /**
379
+ * Add instructions to the session (turn-level).
380
+ */
381
+ withInstructions(instructions: string, options?: { replace?: boolean }): this {
382
+ this.agent.withInstructions(instructions, options);
383
+ return this;
384
+ }
385
+
281
386
  /**
282
387
  * Returns the current full message history for this session.
283
388
  */
@@ -297,26 +402,50 @@ export class AgentSession<T extends Agent = Agent> {
297
402
  await chatTable.delete({ where: { id: this.chatId } });
298
403
  // AgentSession record is deleted via Cascade from LlmChat
299
404
  }
405
+
406
+ /**
407
+ * Update session metadata and re-resolve agent configuration.
408
+ */
409
+ async updateMetadata(metadata: Partial<I>): Promise<void> {
410
+ const sessionTable = this.getModel<AgentSessionRecord>(this.tableNames.agentSession);
411
+ const newMetadata = { ...(this.record.metadata as I), ...metadata };
412
+
413
+ await sessionTable.update({
414
+ where: { id: this.id },
415
+ data: { metadata: newMetadata as any }
416
+ });
417
+
418
+ this.record.metadata = newMetadata as any;
419
+
420
+ // Apply changes to the underlying agent immediately
421
+ // resolveLazyConfig is private, so we need a cast or make it protected.
422
+ // Given we are in the same package, we can cast.
423
+ (this.agent as any).resolveLazyConfig(newMetadata);
424
+ }
300
425
  }
301
426
 
302
427
  /**
303
428
  * Options for creating a new agent session.
304
429
  */
305
- export interface CreateAgentSessionOptions {
306
- metadata?: Record<string, unknown>;
430
+ export interface CreateAgentSessionOptions<I = any> {
431
+ metadata?: I;
307
432
  tableNames?: TableNames;
308
433
  debug?: boolean;
434
+ model?: string;
435
+ provider?: string;
436
+ instructions?: string;
437
+ maxToolCalls?: number;
309
438
  }
310
439
 
311
440
  /**
312
441
  * Creates a new agent session and its persistent chat record.
313
442
  */
314
- export async function createAgentSession<T extends Agent>(
443
+ export async function createAgentSession<I extends Record<string, any>, T extends Agent<I, any>>(
315
444
  prisma: any,
316
445
  llm: NodeLLMCore,
317
446
  AgentClass: AgentClass<T>,
318
- options: CreateAgentSessionOptions = {}
319
- ): Promise<AgentSession<T>> {
447
+ options: CreateAgentSessionOptions<I> = {}
448
+ ): Promise<AgentSession<I, T>> {
320
449
  const tableNames = {
321
450
  agentSession: options.tableNames?.agentSession || "llmAgentSession",
322
451
  chat: options.tableNames?.chat || "llmChat",
@@ -331,9 +460,11 @@ export async function createAgentSession<T extends Agent>(
331
460
  const chatTable = getTable(prisma, tableNames.chat);
332
461
  const chatRecord = (await chatTable.create({
333
462
  data: {
334
- model: AgentClass.model || null,
335
- provider: null,
336
- instructions: AgentClass.instructions || null,
463
+ model: options.model || AgentClass.model || null,
464
+ provider: options.provider || null,
465
+ instructions:
466
+ options.instructions ||
467
+ (typeof AgentClass.instructions === "string" ? AgentClass.instructions : null),
337
468
  metadata: null // Runtime metadata goes in Chat, session context in AgentSession
338
469
  }
339
470
  })) as unknown as { id: string };
@@ -344,17 +475,27 @@ export async function createAgentSession<T extends Agent>(
344
475
  data: {
345
476
  agentClass: AgentClass.name,
346
477
  chatId: chatRecord.id,
347
- metadata: options.metadata || null
478
+ metadata: (options.metadata as any) || null
348
479
  }
349
480
  })) as unknown as AgentSessionRecord;
350
481
 
351
- return new AgentSession(
482
+ // 3. Instantiate Agent with overrides
483
+ const agent = new AgentClass({
484
+ llm,
485
+ inputs: sessionRecord.metadata as I,
486
+ model: options.model,
487
+ provider: options.provider,
488
+ instructions: options.instructions,
489
+ maxToolCalls: options.maxToolCalls
490
+ });
491
+
492
+ return new AgentSession<I, T>(
352
493
  prisma,
353
494
  llm,
354
495
  AgentClass,
355
496
  sessionRecord,
356
497
  options.tableNames,
357
- undefined,
498
+ agent,
358
499
  options.debug
359
500
  );
360
501
  }
@@ -370,13 +511,13 @@ export interface LoadAgentSessionOptions {
370
511
  /**
371
512
  * Loads an existing agent session and re-instantiates the agent with history.
372
513
  */
373
- export async function loadAgentSession<T extends Agent>(
514
+ export async function loadAgentSession<I extends Record<string, any>, T extends Agent<I, any>>(
374
515
  prisma: any,
375
516
  llm: NodeLLMCore,
376
517
  AgentClass: AgentClass<T>,
377
518
  sessionId: string,
378
519
  options: LoadAgentSessionOptions = {}
379
- ): Promise<AgentSession<T> | null> {
520
+ ): Promise<AgentSession<I, T> | null> {
380
521
  const tableNames = {
381
522
  agentSession: options.tableNames?.agentSession || "llmAgentSession",
382
523
  chat: options.tableNames?.chat || "llmChat",
@@ -417,14 +558,16 @@ export async function loadAgentSession<T extends Agent>(
417
558
  content: m.content || ""
418
559
  }));
419
560
 
420
- // 4. Instantiate agent with injected history and LLM
561
+ // 4. Instantiate agent with injected history, LLM, AND metadata (as inputs)
421
562
  // "Code Wins" - model, tools, instructions come from AgentClass
563
+ // Metadata from DB handles the lazy resolution of behavior
422
564
  const agent = new AgentClass({
423
565
  llm,
424
- messages: history
566
+ messages: history,
567
+ inputs: sessionRecord.metadata as I
425
568
  }) as T;
426
569
 
427
- return new AgentSession(
570
+ return new AgentSession<I, T>(
428
571
  prisma,
429
572
  llm,
430
573
  AgentClass,
@@ -180,10 +180,10 @@ export class Chat extends BaseChat {
180
180
  /**
181
181
  * Send a message and persist the conversation.
182
182
  */
183
- async ask(input: string, options: AskOptions = {}): Promise<MessageRecord> {
183
+ async ask(message: string, options: AskOptions = {}): Promise<MessageRecord> {
184
184
  const messageModel = this.tables.message;
185
185
  const userMessage = await (this.prisma as any)[messageModel].create({
186
- data: { chatId: this.id, role: "user", content: input }
186
+ data: { chatId: this.id, role: "user", content: message }
187
187
  });
188
188
 
189
189
  const assistantMessage = await (this.prisma as any)[messageModel].create({
@@ -202,7 +202,7 @@ export class Chat extends BaseChat {
202
202
  }));
203
203
 
204
204
  const coreChat = await this.prepareCoreChat(history, assistantMessage!.id);
205
- const response = await coreChat.ask(input, options);
205
+ const response = await coreChat.ask(message, options);
206
206
 
207
207
  return await (this.prisma as any)[messageModel].update({
208
208
  where: { id: assistantMessage!.id },
@@ -231,12 +231,12 @@ export class Chat extends BaseChat {
231
231
  * Yields ChatChunk objects for full visibility of thinking, content, and tools.
232
232
  */
233
233
  async *askStream(
234
- input: string,
234
+ message: string,
235
235
  options: AskOptions = {}
236
236
  ): AsyncGenerator<ChatChunk, MessageRecord, undefined> {
237
237
  const messageModel = this.tables.message;
238
238
  const userMessage = await (this.prisma as any)[messageModel].create({
239
- data: { chatId: this.id, role: "user", content: input }
239
+ data: { chatId: this.id, role: "user", content: message }
240
240
  });
241
241
 
242
242
  const assistantMessage = await (this.prisma as any)[messageModel].create({
@@ -255,7 +255,7 @@ export class Chat extends BaseChat {
255
255
  }));
256
256
 
257
257
  const coreChat = await this.prepareCoreChat(history, assistantMessage!.id);
258
- const stream = coreChat.stream(input, options);
258
+ const stream = coreChat.stream(message, options);
259
259
 
260
260
  let fullContent = "";
261
261
  let metadata: any = {};
@@ -1,6 +1,6 @@
1
1
  import { describe, it, expect, vi, beforeEach } from "vitest";
2
2
  import { Agent, Tool, NodeLLM } from "@node-llm/core";
3
- import { createAgentSession, loadAgentSession } from "../src/adapters/prisma/AgentSession";
3
+ import { createAgentSession, loadAgentSession } from "../src/adapters/prisma/AgentSession.js";
4
4
 
5
5
  // --- Mocks ---
6
6
 
@@ -12,7 +12,8 @@ const mockPrisma = {
12
12
  },
13
13
  llmAgentSession: {
14
14
  create: vi.fn(),
15
- findUnique: vi.fn()
15
+ findUnique: vi.fn(),
16
+ update: vi.fn()
16
17
  },
17
18
  llmMessage: {
18
19
  create: vi.fn(),
@@ -42,7 +43,8 @@ const createMockChat = () => {
42
43
  onToolCallStart: vi.fn().mockReturnThis(),
43
44
  onToolCallEnd: vi.fn().mockReturnThis(),
44
45
  onToolCallError: vi.fn().mockReturnThis(),
45
- onEndMessage: vi.fn().mockReturnThis()
46
+ onEndMessage: vi.fn().mockReturnThis(),
47
+ afterResponse: vi.fn().mockReturnThis()
46
48
  };
47
49
  return mockChat;
48
50
  };
@@ -201,4 +203,130 @@ describe("AgentSession", () => {
201
203
  );
202
204
  });
203
205
  });
206
+
207
+ describe("Lazy Evaluation & Metadata", () => {
208
+ interface TestInputs {
209
+ userName: string;
210
+ }
211
+
212
+ class LazyTestAgent extends Agent<TestInputs> {
213
+ static model = "gpt-4-lazy";
214
+ static instructions = (i: TestInputs) => `Hello ${i.userName}`;
215
+ }
216
+
217
+ it("injects metadata as inputs for lazy resolution during load", async () => {
218
+ mockPrisma.llmAgentSession.findUnique.mockResolvedValue({
219
+ id: "session-123",
220
+ chatId: "chat-123",
221
+ agentClass: "LazyTestAgent",
222
+ metadata: { userName: "Alice" }
223
+ });
224
+ mockPrisma.llmMessage.findMany.mockResolvedValue([]);
225
+
226
+ const session = await loadAgentSession(
227
+ mockPrisma as any,
228
+ mockLlm,
229
+ LazyTestAgent as any,
230
+ "session-123"
231
+ );
232
+
233
+ // Extract the underlying agent's chat instance
234
+ const mockChat = (session as any).agent.chat;
235
+ expect(mockChat.withInstructions).toHaveBeenCalledWith("Hello Alice", { replace: true });
236
+ });
237
+
238
+ it("merges turn-level inputs with session metadata during ask()", async () => {
239
+ mockPrisma.llmAgentSession.findUnique.mockResolvedValue({
240
+ id: "session-123",
241
+ chatId: "chat-123",
242
+ agentClass: "LazyTestAgent",
243
+ metadata: { userName: "Bob" }
244
+ });
245
+ mockPrisma.llmMessage.findMany.mockResolvedValue([]);
246
+ mockPrisma.llmMessage.create.mockResolvedValue({ id: "msg" });
247
+ mockPrisma.llmMessage.update.mockResolvedValue({ id: "msg" });
248
+
249
+ const session = (await loadAgentSession(
250
+ mockPrisma as any,
251
+ mockLlm,
252
+ LazyTestAgent as any,
253
+ "session-123"
254
+ ))!;
255
+
256
+ // Mock the instructions resolver again to proof turn-level override
257
+ LazyTestAgent.instructions = (i: any) => `Hi ${i.userName}, turn: ${i.turn}`;
258
+
259
+ await session.ask("Hello", { inputs: { turn: "1" } } as any);
260
+
261
+ const mockChat = (session as any).agent.chat;
262
+ expect(mockChat.ask).toHaveBeenCalledWith(
263
+ "Hello",
264
+ expect.objectContaining({
265
+ inputs: expect.objectContaining({
266
+ userName: "Bob",
267
+ turn: "1"
268
+ })
269
+ })
270
+ );
271
+ });
272
+ });
273
+
274
+ describe("Delegation & Metadata", () => {
275
+ it("delegates withTool to the underlying agent", async () => {
276
+ mockPrisma.llmAgentSession.findUnique.mockResolvedValue({
277
+ agentClass: "TestAgent",
278
+ metadata: {}
279
+ });
280
+ mockPrisma.llmMessage.findMany.mockResolvedValue([]);
281
+
282
+ const session = (await loadAgentSession(mockPrisma as any, mockLlm, TestAgent, "123"))!;
283
+ session.withTool({ name: "extra-tool" });
284
+
285
+ expect((session as any).agent.chat.withTools).toHaveBeenCalledWith(
286
+ [{ name: "extra-tool" }],
287
+ undefined
288
+ );
289
+ });
290
+
291
+ it("updates metadata and re-resolves lazy config", async () => {
292
+ class LazyAgent extends Agent<{ color: string }> {
293
+ static model = "mock-model";
294
+ static instructions = (i: any) => `Color is ${i.color}`;
295
+ }
296
+
297
+ mockPrisma.llmAgentSession.findUnique.mockResolvedValue({
298
+ id: "123",
299
+ agentClass: "LazyAgent",
300
+ metadata: { color: "red" }
301
+ });
302
+ mockPrisma.llmMessage.findMany.mockResolvedValue([]);
303
+ mockPrisma.llmAgentSession.update = vi.fn().mockResolvedValue({});
304
+
305
+ const session = (await loadAgentSession(
306
+ mockPrisma as any,
307
+ mockLlm,
308
+ LazyAgent as any,
309
+ "123"
310
+ ))!;
311
+
312
+ // Initial resolution
313
+ expect((session as any).agent.chat.withInstructions).toHaveBeenCalledWith("Color is red", {
314
+ replace: true
315
+ });
316
+
317
+ await session.updateMetadata({ color: "blue" });
318
+
319
+ // Verify DB update
320
+ expect(mockPrisma.llmAgentSession.update).toHaveBeenCalledWith(
321
+ expect.objectContaining({
322
+ data: { metadata: { color: "blue" } }
323
+ })
324
+ );
325
+
326
+ // Verify re-resolution
327
+ expect((session as any).agent.chat.withInstructions).toHaveBeenCalledWith("Color is blue", {
328
+ replace: true
329
+ });
330
+ });
331
+ });
204
332
  });
@@ -43,6 +43,7 @@ const mockChat = {
43
43
  onToolCallEnd: vi.fn().mockReturnThis(),
44
44
  onToolCallError: vi.fn().mockReturnThis(),
45
45
  onEndMessage: vi.fn().mockReturnThis(),
46
+ afterResponse: vi.fn().mockReturnThis(),
46
47
  ask: vi.fn(),
47
48
  messages: [],
48
49
  modelId: "agent-model"