@node-llm/orm 0.4.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +54 -2
- package/README.md +94 -8
- package/bin/cli.js +40 -6
- package/dist/adapters/prisma/AgentSession.d.ts +171 -0
- package/dist/adapters/prisma/AgentSession.d.ts.map +1 -0
- package/dist/adapters/prisma/AgentSession.js +408 -0
- package/dist/adapters/prisma/Chat.d.ts +2 -2
- package/dist/adapters/prisma/Chat.d.ts.map +1 -1
- package/dist/adapters/prisma/Chat.js +6 -6
- package/dist/adapters/prisma/index.d.ts +25 -2
- package/dist/adapters/prisma/index.d.ts.map +1 -1
- package/dist/adapters/prisma/index.js +25 -1
- package/dist/index.d.ts +21 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +21 -1
- package/migrations/README.md +53 -0
- package/migrations/add_agent_session.sql +44 -0
- package/migrations/add_thinking_support.sql +34 -0
- package/package.json +6 -2
- package/schema.prisma +50 -33
- package/src/adapters/prisma/AgentSession.ts +601 -0
- package/src/adapters/prisma/Chat.ts +6 -6
- package/src/adapters/prisma/index.ts +33 -2
- package/src/index.ts +21 -1
- package/test/AgentSession.test.ts +332 -0
- package/test/CodeWins.test.ts +117 -0
- package/test/docs/prisma-docs.test.ts +221 -0
- package/test/docs/readme-exports.test.ts +62 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -0,0 +1,601 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ChatOptions,
|
|
3
|
+
AskOptions,
|
|
4
|
+
NodeLLMCore,
|
|
5
|
+
Agent,
|
|
6
|
+
AgentConfig,
|
|
7
|
+
Message,
|
|
8
|
+
ChatChunk,
|
|
9
|
+
Usage
|
|
10
|
+
} from "@node-llm/core";
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Internal interface for dynamic Prisma Client access.
|
|
14
|
+
* We use 'any' here because PrismaClient has no index signature by default,
|
|
15
|
+
* making it hard to access models dynamically by string name.
|
|
16
|
+
*/
|
|
17
|
+
type GenericPrismaClient = any;
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Record structure for the LLM Agent Session table.
|
|
21
|
+
*/
|
|
22
|
+
export interface AgentSessionRecord {
|
|
23
|
+
id: string;
|
|
24
|
+
agentClass: string;
|
|
25
|
+
chatId: string;
|
|
26
|
+
metadata?: Record<string, unknown> | null;
|
|
27
|
+
createdAt: Date;
|
|
28
|
+
updatedAt: Date;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Record structure for the LLM Message table.
|
|
33
|
+
*/
|
|
34
|
+
export interface MessageRecord {
|
|
35
|
+
id: string;
|
|
36
|
+
chatId: string;
|
|
37
|
+
role: string;
|
|
38
|
+
content: string | null;
|
|
39
|
+
contentRaw?: string | null;
|
|
40
|
+
thinkingText?: string | null;
|
|
41
|
+
thinkingSignature?: string | null;
|
|
42
|
+
thinkingTokens?: number | null;
|
|
43
|
+
inputTokens?: number | null;
|
|
44
|
+
outputTokens?: number | null;
|
|
45
|
+
modelId?: string | null;
|
|
46
|
+
provider?: string | null;
|
|
47
|
+
createdAt: Date;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Table name customization.
|
|
52
|
+
*/
|
|
53
|
+
export interface TableNames {
|
|
54
|
+
agentSession?: string;
|
|
55
|
+
chat?: string;
|
|
56
|
+
message?: string;
|
|
57
|
+
toolCall?: string;
|
|
58
|
+
request?: string;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Internal interface for dynamic Prisma model access
|
|
63
|
+
*/
|
|
64
|
+
interface PrismaModel<T = Record<string, unknown>> {
|
|
65
|
+
create(args: { data: Record<string, unknown> }): Promise<T>;
|
|
66
|
+
update(args: { where: { id: string }; data: Record<string, unknown> }): Promise<T>;
|
|
67
|
+
delete(args: { where: { id: string } }): Promise<void>;
|
|
68
|
+
findMany(args: {
|
|
69
|
+
where: Record<string, unknown>;
|
|
70
|
+
orderBy?: Record<string, string>;
|
|
71
|
+
}): Promise<T[]>;
|
|
72
|
+
findUnique(args: { where: { id: string } }): Promise<T | null>;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
type AgentClass<T extends Agent<any, any> = Agent<any, any>> = (new (
|
|
76
|
+
overrides?: Partial<AgentConfig<any> & ChatOptions>
|
|
77
|
+
) => T) & {
|
|
78
|
+
name: string;
|
|
79
|
+
model?: string;
|
|
80
|
+
instructions?: unknown;
|
|
81
|
+
tools?: unknown;
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* AgentSession - Wraps an Agent instance with persistence capabilities.
|
|
86
|
+
*
|
|
87
|
+
* Follows "Code Wins" sovereignty:
|
|
88
|
+
* - Model, Tools, Instructions come from the Agent class (code)
|
|
89
|
+
* - Message history comes from the database
|
|
90
|
+
* - Metadata from DB is injected as 'inputs' for dynamic resolution
|
|
91
|
+
*
|
|
92
|
+
* @example
|
|
93
|
+
* ```typescript
|
|
94
|
+
* // Create a new session
|
|
95
|
+
* const session = await createAgentSession(prisma, llm, SupportAgent, {
|
|
96
|
+
* metadata: { userId: "123" }
|
|
97
|
+
* });
|
|
98
|
+
*
|
|
99
|
+
* // Resume a session
|
|
100
|
+
* const session = await loadAgentSession(prisma, llm, SupportAgent, "sess_abc");
|
|
101
|
+
*
|
|
102
|
+
* // Agent behavior is always defined in code
|
|
103
|
+
* const result = await session.ask("Hello");
|
|
104
|
+
* ```
|
|
105
|
+
*/
|
|
106
|
+
export class AgentSession<
|
|
107
|
+
I extends Record<string, any> = Record<string, any>,
|
|
108
|
+
T extends Agent<I, any> = Agent<I, any>
|
|
109
|
+
> {
|
|
110
|
+
private currentMessageId: string | null = null;
|
|
111
|
+
private tableNames: Required<TableNames>;
|
|
112
|
+
private debug: boolean;
|
|
113
|
+
|
|
114
|
+
constructor(
|
|
115
|
+
private prisma: any,
|
|
116
|
+
private llm: NodeLLMCore,
|
|
117
|
+
private AgentClass: AgentClass<T>,
|
|
118
|
+
private record: AgentSessionRecord,
|
|
119
|
+
tableNames?: TableNames,
|
|
120
|
+
private agent: T = new AgentClass({
|
|
121
|
+
llm,
|
|
122
|
+
inputs: record.metadata as I
|
|
123
|
+
}),
|
|
124
|
+
debug: boolean = false
|
|
125
|
+
) {
|
|
126
|
+
this.debug = debug;
|
|
127
|
+
this.tableNames = {
|
|
128
|
+
agentSession: tableNames?.agentSession || "llmAgentSession",
|
|
129
|
+
chat: tableNames?.chat || "llmChat",
|
|
130
|
+
message: tableNames?.message || "llmMessage",
|
|
131
|
+
toolCall: tableNames?.toolCall || "llmToolCall",
|
|
132
|
+
request: tableNames?.request || "llmRequest"
|
|
133
|
+
};
|
|
134
|
+
|
|
135
|
+
this.registerHooks();
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
private log(...args: any[]) {
|
|
139
|
+
if (this.debug) {
|
|
140
|
+
console.log(`[@node-llm/orm]`, ...args);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/** Agent instance (for direct access if needed) */
|
|
145
|
+
get instance(): T {
|
|
146
|
+
return this.agent;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
/** Session ID for persistence */
|
|
150
|
+
get id(): string {
|
|
151
|
+
return this.record.id;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
/** Underlying chat ID */
|
|
155
|
+
get chatId(): string {
|
|
156
|
+
return this.record.chatId;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
/** Session metadata */
|
|
160
|
+
get metadata(): I | null | undefined {
|
|
161
|
+
return this.record.metadata as I;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
/** Agent class name */
|
|
165
|
+
get agentClass(): string {
|
|
166
|
+
return this.record.agentClass;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/** Model ID used by the agent */
|
|
170
|
+
get modelId(): string {
|
|
171
|
+
return this.agent.modelId;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/** Cumulative usage for this session (from agent memory) */
|
|
175
|
+
get totalUsage(): Usage {
|
|
176
|
+
return this.agent.totalUsage;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/** Current in-memory message history */
|
|
180
|
+
get history(): readonly Message[] {
|
|
181
|
+
return this.agent.history;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
/**
|
|
185
|
+
* Helper to get a typed Prisma model by its dynamic name.
|
|
186
|
+
*/
|
|
187
|
+
private getModel<R = Record<string, unknown>>(name: string): PrismaModel<R> {
|
|
188
|
+
return getTable(this.prisma, name) as unknown as PrismaModel<R>;
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
/**
|
|
192
|
+
* Register persistence hooks on the agent.
|
|
193
|
+
*/
|
|
194
|
+
private registerHooks() {
|
|
195
|
+
this.agent.onToolCallStart(async (toolCall) => {
|
|
196
|
+
if (!this.currentMessageId) return;
|
|
197
|
+
const model = this.getModel(this.tableNames.toolCall);
|
|
198
|
+
await model.create({
|
|
199
|
+
data: {
|
|
200
|
+
messageId: this.currentMessageId,
|
|
201
|
+
toolCallId: toolCall.id,
|
|
202
|
+
name: toolCall.function.name,
|
|
203
|
+
arguments: toolCall.function.arguments
|
|
204
|
+
}
|
|
205
|
+
});
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
this.agent.onToolCallEnd(async (toolCall, result) => {
|
|
209
|
+
if (!this.currentMessageId) return;
|
|
210
|
+
const model = this.getModel(this.tableNames.toolCall);
|
|
211
|
+
try {
|
|
212
|
+
await model.update({
|
|
213
|
+
where: {
|
|
214
|
+
messageId_toolCallId: {
|
|
215
|
+
messageId: this.currentMessageId,
|
|
216
|
+
toolCallId: toolCall.id
|
|
217
|
+
}
|
|
218
|
+
} as any,
|
|
219
|
+
data: {
|
|
220
|
+
result: typeof result === "string" ? result : JSON.stringify(result)
|
|
221
|
+
}
|
|
222
|
+
});
|
|
223
|
+
} catch (e) {
|
|
224
|
+
this.log(`Failed to update tool call result: ${e}`);
|
|
225
|
+
}
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
this.agent.afterResponse(async (response) => {
|
|
229
|
+
const model = this.getModel(this.tableNames.request);
|
|
230
|
+
await model.create({
|
|
231
|
+
data: {
|
|
232
|
+
chatId: this.chatId,
|
|
233
|
+
messageId: this.currentMessageId,
|
|
234
|
+
provider: response.provider || "unknown",
|
|
235
|
+
model: response.model || "unknown",
|
|
236
|
+
statusCode: 200,
|
|
237
|
+
duration: 0,
|
|
238
|
+
inputTokens: response.usage?.input_tokens || 0,
|
|
239
|
+
outputTokens: response.usage?.output_tokens || 0,
|
|
240
|
+
cost: response.usage?.cost || 0
|
|
241
|
+
}
|
|
242
|
+
});
|
|
243
|
+
});
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
/**
|
|
247
|
+
* Send a message and persist the conversation.
|
|
248
|
+
*/
|
|
249
|
+
async ask(message: string, options: AskOptions & { inputs?: I } = {}): Promise<MessageRecord> {
|
|
250
|
+
const model = this.getModel<MessageRecord>(this.tableNames.message);
|
|
251
|
+
|
|
252
|
+
// Persist user message
|
|
253
|
+
await model.create({
|
|
254
|
+
data: { chatId: this.chatId, role: "user", content: message }
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
// Create placeholder for assistant message
|
|
258
|
+
const assistantMessage = await model.create({
|
|
259
|
+
data: { chatId: this.chatId, role: "assistant", content: null }
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
this.currentMessageId = assistantMessage.id;
|
|
263
|
+
|
|
264
|
+
try {
|
|
265
|
+
// Merge turn-level inputs with session metadata
|
|
266
|
+
const inputs = { ...(this.record.metadata as I), ...options.inputs };
|
|
267
|
+
|
|
268
|
+
// Get response from agent (uses code-defined config + injected history)
|
|
269
|
+
const response = await this.agent.ask(message, { ...options, inputs });
|
|
270
|
+
|
|
271
|
+
// Update assistant message with response
|
|
272
|
+
return await model.update({
|
|
273
|
+
where: { id: assistantMessage.id },
|
|
274
|
+
data: {
|
|
275
|
+
content: response.content,
|
|
276
|
+
contentRaw: JSON.stringify(response.meta),
|
|
277
|
+
inputTokens: response.usage?.input_tokens || 0,
|
|
278
|
+
outputTokens: response.usage?.output_tokens || 0,
|
|
279
|
+
thinkingText: response.thinking?.text || null,
|
|
280
|
+
thinkingSignature: response.thinking?.signature || null,
|
|
281
|
+
thinkingTokens: response.thinking?.tokens || null,
|
|
282
|
+
modelId: response.model || null,
|
|
283
|
+
provider: response.provider || null
|
|
284
|
+
}
|
|
285
|
+
});
|
|
286
|
+
} catch (error) {
|
|
287
|
+
// Clean up placeholder on error
|
|
288
|
+
await model.delete({ where: { id: assistantMessage.id } });
|
|
289
|
+
throw error;
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
/**
|
|
294
|
+
* Stream a response and persist the conversation.
|
|
295
|
+
*/
|
|
296
|
+
async *askStream(
|
|
297
|
+
message: string,
|
|
298
|
+
options: AskOptions & { inputs?: I } = {}
|
|
299
|
+
): AsyncGenerator<ChatChunk, MessageRecord, undefined> {
|
|
300
|
+
const model = this.getModel<MessageRecord>(this.tableNames.message);
|
|
301
|
+
|
|
302
|
+
// Persist user message
|
|
303
|
+
await model.create({
|
|
304
|
+
data: { chatId: this.chatId, role: "user", content: message }
|
|
305
|
+
});
|
|
306
|
+
|
|
307
|
+
// Create placeholder for assistant message
|
|
308
|
+
const assistantMessage = await model.create({
|
|
309
|
+
data: { chatId: this.chatId, role: "assistant", content: null }
|
|
310
|
+
});
|
|
311
|
+
|
|
312
|
+
this.currentMessageId = assistantMessage.id;
|
|
313
|
+
|
|
314
|
+
try {
|
|
315
|
+
// Merge turn-level inputs with session metadata
|
|
316
|
+
const inputs = { ...(this.record.metadata as I), ...options.inputs };
|
|
317
|
+
const stream = this.agent.stream(message, { ...options, inputs });
|
|
318
|
+
|
|
319
|
+
let fullContent = "";
|
|
320
|
+
let lastChunk: ChatChunk | null = null;
|
|
321
|
+
|
|
322
|
+
for await (const chunk of stream) {
|
|
323
|
+
fullContent += chunk.content;
|
|
324
|
+
lastChunk = chunk;
|
|
325
|
+
yield chunk;
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
// Final update with accumulated result
|
|
329
|
+
return await model.update({
|
|
330
|
+
where: { id: assistantMessage.id },
|
|
331
|
+
data: {
|
|
332
|
+
content: fullContent,
|
|
333
|
+
inputTokens: lastChunk?.usage?.input_tokens || 0,
|
|
334
|
+
outputTokens: lastChunk?.usage?.output_tokens || 0,
|
|
335
|
+
thinkingText: lastChunk?.thinking?.text || null,
|
|
336
|
+
thinkingSignature: lastChunk?.thinking?.signature || null,
|
|
337
|
+
thinkingTokens: lastChunk?.thinking?.tokens || null,
|
|
338
|
+
modelId: (lastChunk?.metadata?.model as string) || null,
|
|
339
|
+
provider: (lastChunk?.metadata?.provider as string) || null
|
|
340
|
+
}
|
|
341
|
+
});
|
|
342
|
+
} catch (error) {
|
|
343
|
+
await model.delete({ where: { id: assistantMessage.id } });
|
|
344
|
+
throw error;
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
|
|
348
|
+
/**
|
|
349
|
+
* Returns a usage summary for this chat session.
|
|
350
|
+
*/
|
|
351
|
+
async stats(): Promise<Usage> {
|
|
352
|
+
const requestModel = getTable(this.prisma, this.tableNames.request);
|
|
353
|
+
const aggregate = await (requestModel as any).aggregate({
|
|
354
|
+
where: { chatId: this.chatId },
|
|
355
|
+
_sum: {
|
|
356
|
+
inputTokens: true,
|
|
357
|
+
outputTokens: true,
|
|
358
|
+
cost: true
|
|
359
|
+
}
|
|
360
|
+
});
|
|
361
|
+
|
|
362
|
+
return {
|
|
363
|
+
input_tokens: Number(aggregate._sum.inputTokens || 0),
|
|
364
|
+
output_tokens: Number(aggregate._sum.outputTokens || 0),
|
|
365
|
+
total_tokens: Number((aggregate._sum.inputTokens || 0) + (aggregate._sum.outputTokens || 0)),
|
|
366
|
+
cost: Number(aggregate._sum.cost || 0)
|
|
367
|
+
};
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
/**
|
|
371
|
+
* Add a tool to the session (turn-level).
|
|
372
|
+
*/
|
|
373
|
+
withTool(tool: any): this {
|
|
374
|
+
this.agent.use(tool);
|
|
375
|
+
return this;
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
/**
|
|
379
|
+
* Add instructions to the session (turn-level).
|
|
380
|
+
*/
|
|
381
|
+
withInstructions(instructions: string, options?: { replace?: boolean }): this {
|
|
382
|
+
this.agent.withInstructions(instructions, options);
|
|
383
|
+
return this;
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
/**
|
|
387
|
+
* Returns the current full message history for this session.
|
|
388
|
+
*/
|
|
389
|
+
async messages(): Promise<MessageRecord[]> {
|
|
390
|
+
const model = this.getModel<MessageRecord>(this.tableNames.message);
|
|
391
|
+
return await model.findMany({
|
|
392
|
+
where: { chatId: this.chatId },
|
|
393
|
+
orderBy: { createdAt: "asc" }
|
|
394
|
+
});
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
/**
|
|
398
|
+
* Delete the entire session and its history.
|
|
399
|
+
*/
|
|
400
|
+
async delete(): Promise<void> {
|
|
401
|
+
const chatTable = this.getModel(this.tableNames.chat);
|
|
402
|
+
await chatTable.delete({ where: { id: this.chatId } });
|
|
403
|
+
// AgentSession record is deleted via Cascade from LlmChat
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
/**
|
|
407
|
+
* Update session metadata and re-resolve agent configuration.
|
|
408
|
+
*/
|
|
409
|
+
async updateMetadata(metadata: Partial<I>): Promise<void> {
|
|
410
|
+
const sessionTable = this.getModel<AgentSessionRecord>(this.tableNames.agentSession);
|
|
411
|
+
const newMetadata = { ...(this.record.metadata as I), ...metadata };
|
|
412
|
+
|
|
413
|
+
await sessionTable.update({
|
|
414
|
+
where: { id: this.id },
|
|
415
|
+
data: { metadata: newMetadata as any }
|
|
416
|
+
});
|
|
417
|
+
|
|
418
|
+
this.record.metadata = newMetadata as any;
|
|
419
|
+
|
|
420
|
+
// Apply changes to the underlying agent immediately
|
|
421
|
+
// resolveLazyConfig is private, so we need a cast or make it protected.
|
|
422
|
+
// Given we are in the same package, we can cast.
|
|
423
|
+
(this.agent as any).resolveLazyConfig(newMetadata);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
/**
|
|
428
|
+
* Options for creating a new agent session.
|
|
429
|
+
*/
|
|
430
|
+
export interface CreateAgentSessionOptions<I = any> {
|
|
431
|
+
metadata?: I;
|
|
432
|
+
tableNames?: TableNames;
|
|
433
|
+
debug?: boolean;
|
|
434
|
+
model?: string;
|
|
435
|
+
provider?: string;
|
|
436
|
+
instructions?: string;
|
|
437
|
+
maxToolCalls?: number;
|
|
438
|
+
}
|
|
439
|
+
|
|
440
|
+
/**
|
|
441
|
+
* Creates a new agent session and its persistent chat record.
|
|
442
|
+
*/
|
|
443
|
+
export async function createAgentSession<I extends Record<string, any>, T extends Agent<I, any>>(
|
|
444
|
+
prisma: any,
|
|
445
|
+
llm: NodeLLMCore,
|
|
446
|
+
AgentClass: AgentClass<T>,
|
|
447
|
+
options: CreateAgentSessionOptions<I> = {}
|
|
448
|
+
): Promise<AgentSession<I, T>> {
|
|
449
|
+
const tableNames = {
|
|
450
|
+
agentSession: options.tableNames?.agentSession || "llmAgentSession",
|
|
451
|
+
chat: options.tableNames?.chat || "llmChat",
|
|
452
|
+
message: options.tableNames?.message || "llmMessage"
|
|
453
|
+
};
|
|
454
|
+
|
|
455
|
+
if (options.debug) {
|
|
456
|
+
console.log(`[@node-llm/orm] createAgentSession: agentClass=${AgentClass.name}`);
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
// 1. Create underlying LlmChat record
|
|
460
|
+
const chatTable = getTable(prisma, tableNames.chat);
|
|
461
|
+
const chatRecord = (await chatTable.create({
|
|
462
|
+
data: {
|
|
463
|
+
model: options.model || AgentClass.model || null,
|
|
464
|
+
provider: options.provider || null,
|
|
465
|
+
instructions:
|
|
466
|
+
options.instructions ||
|
|
467
|
+
(typeof AgentClass.instructions === "string" ? AgentClass.instructions : null),
|
|
468
|
+
metadata: null // Runtime metadata goes in Chat, session context in AgentSession
|
|
469
|
+
}
|
|
470
|
+
})) as unknown as { id: string };
|
|
471
|
+
|
|
472
|
+
// 2. Create AgentSession record
|
|
473
|
+
const sessionTable = getTable(prisma, tableNames.agentSession);
|
|
474
|
+
const sessionRecord = (await sessionTable.create({
|
|
475
|
+
data: {
|
|
476
|
+
agentClass: AgentClass.name,
|
|
477
|
+
chatId: chatRecord.id,
|
|
478
|
+
metadata: (options.metadata as any) || null
|
|
479
|
+
}
|
|
480
|
+
})) as unknown as AgentSessionRecord;
|
|
481
|
+
|
|
482
|
+
// 3. Instantiate Agent with overrides
|
|
483
|
+
const agent = new AgentClass({
|
|
484
|
+
llm,
|
|
485
|
+
inputs: sessionRecord.metadata as I,
|
|
486
|
+
model: options.model,
|
|
487
|
+
provider: options.provider,
|
|
488
|
+
instructions: options.instructions,
|
|
489
|
+
maxToolCalls: options.maxToolCalls
|
|
490
|
+
});
|
|
491
|
+
|
|
492
|
+
return new AgentSession<I, T>(
|
|
493
|
+
prisma,
|
|
494
|
+
llm,
|
|
495
|
+
AgentClass,
|
|
496
|
+
sessionRecord,
|
|
497
|
+
options.tableNames,
|
|
498
|
+
agent,
|
|
499
|
+
options.debug
|
|
500
|
+
);
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
/**
|
|
504
|
+
* Options for loading an existing agent session.
|
|
505
|
+
*/
|
|
506
|
+
export interface LoadAgentSessionOptions {
|
|
507
|
+
tableNames?: TableNames;
|
|
508
|
+
debug?: boolean;
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
/**
|
|
512
|
+
* Loads an existing agent session and re-instantiates the agent with history.
|
|
513
|
+
*/
|
|
514
|
+
export async function loadAgentSession<I extends Record<string, any>, T extends Agent<I, any>>(
|
|
515
|
+
prisma: any,
|
|
516
|
+
llm: NodeLLMCore,
|
|
517
|
+
AgentClass: AgentClass<T>,
|
|
518
|
+
sessionId: string,
|
|
519
|
+
options: LoadAgentSessionOptions = {}
|
|
520
|
+
): Promise<AgentSession<I, T> | null> {
|
|
521
|
+
const tableNames = {
|
|
522
|
+
agentSession: options.tableNames?.agentSession || "llmAgentSession",
|
|
523
|
+
chat: options.tableNames?.chat || "llmChat",
|
|
524
|
+
message: options.tableNames?.message || "llmMessage"
|
|
525
|
+
};
|
|
526
|
+
|
|
527
|
+
if (options.debug) {
|
|
528
|
+
console.log(`[@node-llm/orm] loadAgentSession: id=${sessionId}`);
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
// 1. Find session record
|
|
532
|
+
const sessionTable = getTable(prisma, tableNames.agentSession);
|
|
533
|
+
const sessionRecord = (await sessionTable.findUnique({
|
|
534
|
+
where: { id: sessionId }
|
|
535
|
+
})) as unknown as AgentSessionRecord | null;
|
|
536
|
+
|
|
537
|
+
if (!sessionRecord) {
|
|
538
|
+
return null;
|
|
539
|
+
}
|
|
540
|
+
|
|
541
|
+
// 1.5. Validate Agent Class (Code Wins Sovereignty)
|
|
542
|
+
if (sessionRecord.agentClass !== AgentClass.name) {
|
|
543
|
+
throw new Error(
|
|
544
|
+
`Agent class mismatch: Session "${sessionId}" was created for "${sessionRecord.agentClass}", but is being loaded with "${AgentClass.name}".`
|
|
545
|
+
);
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
// 2. Load message history
|
|
549
|
+
const messageTable = getTable(prisma, tableNames.message);
|
|
550
|
+
const messages = (await messageTable.findMany({
|
|
551
|
+
where: { chatId: sessionRecord.chatId },
|
|
552
|
+
orderBy: { createdAt: "asc" }
|
|
553
|
+
})) as unknown as MessageRecord[];
|
|
554
|
+
|
|
555
|
+
// 3. Convert DB messages to NodeLLM Message format
|
|
556
|
+
const history: Message[] = messages.map((m) => ({
|
|
557
|
+
role: m.role as "user" | "assistant" | "system",
|
|
558
|
+
content: m.content || ""
|
|
559
|
+
}));
|
|
560
|
+
|
|
561
|
+
// 4. Instantiate agent with injected history, LLM, AND metadata (as inputs)
|
|
562
|
+
// "Code Wins" - model, tools, instructions come from AgentClass
|
|
563
|
+
// Metadata from DB handles the lazy resolution of behavior
|
|
564
|
+
const agent = new AgentClass({
|
|
565
|
+
llm,
|
|
566
|
+
messages: history,
|
|
567
|
+
inputs: sessionRecord.metadata as I
|
|
568
|
+
}) as T;
|
|
569
|
+
|
|
570
|
+
return new AgentSession<I, T>(
|
|
571
|
+
prisma,
|
|
572
|
+
llm,
|
|
573
|
+
AgentClass,
|
|
574
|
+
sessionRecord,
|
|
575
|
+
options.tableNames,
|
|
576
|
+
agent,
|
|
577
|
+
options.debug
|
|
578
|
+
);
|
|
579
|
+
}
|
|
580
|
+
|
|
581
|
+
/**
|
|
582
|
+
* Dynamic helper to access Prisma models by name.
|
|
583
|
+
* Handles both case-sensitive and case-insensitive lookups for flexibility.
|
|
584
|
+
*/
|
|
585
|
+
function getTable(prisma: GenericPrismaClient, tableName: string): PrismaModel {
|
|
586
|
+
const p = prisma as unknown as Record<string, PrismaModel>;
|
|
587
|
+
|
|
588
|
+
// 1. Direct match
|
|
589
|
+
const table = p[tableName];
|
|
590
|
+
if (table) return table;
|
|
591
|
+
|
|
592
|
+
// 2. Case-insensitive match
|
|
593
|
+
const keys = Object.keys(prisma).filter((k) => !k.startsWith("$") && !k.startsWith("_"));
|
|
594
|
+
const match = keys.find((k) => k.toLowerCase() === tableName.toLowerCase());
|
|
595
|
+
|
|
596
|
+
if (match && p[match]) return p[match];
|
|
597
|
+
|
|
598
|
+
throw new Error(
|
|
599
|
+
`[@node-llm/orm] Prisma table "${tableName}" not found. Available tables: ${keys.join(", ")}`
|
|
600
|
+
);
|
|
601
|
+
}
|
|
@@ -180,10 +180,10 @@ export class Chat extends BaseChat {
|
|
|
180
180
|
/**
|
|
181
181
|
* Send a message and persist the conversation.
|
|
182
182
|
*/
|
|
183
|
-
async ask(
|
|
183
|
+
async ask(message: string, options: AskOptions = {}): Promise<MessageRecord> {
|
|
184
184
|
const messageModel = this.tables.message;
|
|
185
185
|
const userMessage = await (this.prisma as any)[messageModel].create({
|
|
186
|
-
data: { chatId: this.id, role: "user", content:
|
|
186
|
+
data: { chatId: this.id, role: "user", content: message }
|
|
187
187
|
});
|
|
188
188
|
|
|
189
189
|
const assistantMessage = await (this.prisma as any)[messageModel].create({
|
|
@@ -202,7 +202,7 @@ export class Chat extends BaseChat {
|
|
|
202
202
|
}));
|
|
203
203
|
|
|
204
204
|
const coreChat = await this.prepareCoreChat(history, assistantMessage!.id);
|
|
205
|
-
const response = await coreChat.ask(
|
|
205
|
+
const response = await coreChat.ask(message, options);
|
|
206
206
|
|
|
207
207
|
return await (this.prisma as any)[messageModel].update({
|
|
208
208
|
where: { id: assistantMessage!.id },
|
|
@@ -231,12 +231,12 @@ export class Chat extends BaseChat {
|
|
|
231
231
|
* Yields ChatChunk objects for full visibility of thinking, content, and tools.
|
|
232
232
|
*/
|
|
233
233
|
async *askStream(
|
|
234
|
-
|
|
234
|
+
message: string,
|
|
235
235
|
options: AskOptions = {}
|
|
236
236
|
): AsyncGenerator<ChatChunk, MessageRecord, undefined> {
|
|
237
237
|
const messageModel = this.tables.message;
|
|
238
238
|
const userMessage = await (this.prisma as any)[messageModel].create({
|
|
239
|
-
data: { chatId: this.id, role: "user", content:
|
|
239
|
+
data: { chatId: this.id, role: "user", content: message }
|
|
240
240
|
});
|
|
241
241
|
|
|
242
242
|
const assistantMessage = await (this.prisma as any)[messageModel].create({
|
|
@@ -255,7 +255,7 @@ export class Chat extends BaseChat {
|
|
|
255
255
|
}));
|
|
256
256
|
|
|
257
257
|
const coreChat = await this.prepareCoreChat(history, assistantMessage!.id);
|
|
258
|
-
const stream = coreChat.stream(
|
|
258
|
+
const stream = coreChat.stream(message, options);
|
|
259
259
|
|
|
260
260
|
let fullContent = "";
|
|
261
261
|
let metadata: any = {};
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
* Prisma adapter for NodeLLM ORM.
|
|
5
5
|
* Provides automatic persistence of chats, messages, tool calls, and API requests.
|
|
6
6
|
*
|
|
7
|
-
* @example
|
|
7
|
+
* @example Chat API (low-level)
|
|
8
8
|
* ```typescript
|
|
9
9
|
* import { PrismaClient } from '@prisma/client';
|
|
10
10
|
* import { createLLM } from '@node-llm/core';
|
|
@@ -21,7 +21,38 @@
|
|
|
21
21
|
* const response = await chat.ask('Hello!');
|
|
22
22
|
* console.log(response.content);
|
|
23
23
|
* ```
|
|
24
|
+
*
|
|
25
|
+
* @example AgentSession API (recommended for agents)
|
|
26
|
+
* ```typescript
|
|
27
|
+
* import { Agent } from '@node-llm/core';
|
|
28
|
+
* import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
|
|
29
|
+
*
|
|
30
|
+
* class SupportAgent extends Agent {
|
|
31
|
+
* static model = 'gpt-4.1';
|
|
32
|
+
* static instructions = 'You are a helpful support agent.';
|
|
33
|
+
* }
|
|
34
|
+
*
|
|
35
|
+
* // Create new session
|
|
36
|
+
* const session = await createAgentSession(prisma, llm, SupportAgent, {
|
|
37
|
+
* metadata: { userId: 'user_123' }
|
|
38
|
+
* });
|
|
39
|
+
* await session.ask('Hello!');
|
|
40
|
+
*
|
|
41
|
+
* // Resume later (Code Wins - model/tools from class, history from DB)
|
|
42
|
+
* const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
|
|
43
|
+
* await session.ask('Continue our conversation');
|
|
44
|
+
* ```
|
|
24
45
|
*/
|
|
25
46
|
|
|
47
|
+
// Chat API
|
|
26
48
|
export { Chat, createChat, loadChat } from "./Chat.js";
|
|
27
|
-
export type { ChatRecord, MessageRecord, ChatOptions
|
|
49
|
+
export type { ChatRecord, MessageRecord, ChatOptions } from "./Chat.js";
|
|
50
|
+
|
|
51
|
+
// AgentSession API
|
|
52
|
+
export { AgentSession, createAgentSession, loadAgentSession } from "./AgentSession.js";
|
|
53
|
+
export type {
|
|
54
|
+
AgentSessionRecord,
|
|
55
|
+
CreateAgentSessionOptions,
|
|
56
|
+
LoadAgentSessionOptions,
|
|
57
|
+
TableNames // Export from AgentSession which includes agentSession key
|
|
58
|
+
} from "./AgentSession.js";
|