@node-llm/orm 0.3.0 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,284 @@
1
+ /**
2
+ * AgentSession - Wraps an Agent instance with persistence capabilities.
3
+ *
4
+ * Follows "Code Wins" sovereignty:
5
+ * - Model, Tools, Instructions come from the Agent class (code)
6
+ * - Message history comes from the database
7
+ *
8
+ * @example
9
+ * ```typescript
10
+ * // Create a new session
11
+ * const session = await createAgentSession(prisma, llm, SupportAgent, {
12
+ * metadata: { userId: "123" }
13
+ * });
14
+ *
15
+ * // Resume a session
16
+ * const session = await loadAgentSession(prisma, llm, SupportAgent, "sess_abc");
17
+ *
18
+ * // Agent behavior is always defined in code
19
+ * const result = await session.ask("Hello");
20
+ * ```
21
+ */
22
+ export class AgentSession {
23
+ prisma;
24
+ llm;
25
+ AgentClass;
26
+ record;
27
+ agent;
28
+ currentMessageId = null;
29
+ tableNames;
30
+ debug;
31
+ constructor(prisma, llm, AgentClass, record, tableNames, agent = new AgentClass({
32
+ llm
33
+ }), debug = false) {
34
+ this.prisma = prisma;
35
+ this.llm = llm;
36
+ this.AgentClass = AgentClass;
37
+ this.record = record;
38
+ this.agent = agent;
39
+ this.debug = debug;
40
+ this.tableNames = {
41
+ agentSession: tableNames?.agentSession || "llmAgentSession",
42
+ chat: tableNames?.chat || "llmChat",
43
+ message: tableNames?.message || "llmMessage",
44
+ toolCall: tableNames?.toolCall || "llmToolCall",
45
+ request: tableNames?.request || "llmRequest"
46
+ };
47
+ }
48
+ log(...args) {
49
+ if (this.debug) {
50
+ console.log(`[@node-llm/orm]`, ...args);
51
+ }
52
+ }
53
+ /** Agent instance (for direct access if needed) */
54
+ get instance() {
55
+ return this.agent;
56
+ }
57
+ /** Session ID for persistence */
58
+ get id() {
59
+ return this.record.id;
60
+ }
61
+ /** Underlying chat ID */
62
+ get chatId() {
63
+ return this.record.chatId;
64
+ }
65
+ /** Session metadata */
66
+ get metadata() {
67
+ return this.record.metadata;
68
+ }
69
+ /** Agent class name */
70
+ get agentClass() {
71
+ return this.record.agentClass;
72
+ }
73
+ /** Model ID used by the agent */
74
+ get modelId() {
75
+ return this.agent.modelId;
76
+ }
77
+ /** Cumulative usage for this session (from agent memory) */
78
+ get totalUsage() {
79
+ return this.agent.totalUsage;
80
+ }
81
+ /** Current in-memory message history */
82
+ get history() {
83
+ return this.agent.history;
84
+ }
85
+ /**
86
+ * Helper to get a typed Prisma model by its dynamic name.
87
+ */
88
+ getModel(name) {
89
+ return getTable(this.prisma, name);
90
+ }
91
+ /**
92
+ * Send a message and persist the conversation.
93
+ */
94
+ async ask(input, options = {}) {
95
+ const model = this.getModel(this.tableNames.message);
96
+ // Persist user message
97
+ await model.create({
98
+ data: { chatId: this.chatId, role: "user", content: input }
99
+ });
100
+ // Create placeholder for assistant message
101
+ const assistantMessage = await model.create({
102
+ data: { chatId: this.chatId, role: "assistant", content: null }
103
+ });
104
+ this.currentMessageId = assistantMessage.id;
105
+ try {
106
+ // Get response from agent (uses code-defined config + injected history)
107
+ const response = await this.agent.ask(input, options);
108
+ // Update assistant message with response
109
+ return await model.update({
110
+ where: { id: assistantMessage.id },
111
+ data: {
112
+ content: response.content,
113
+ contentRaw: JSON.stringify(response.meta),
114
+ inputTokens: response.usage?.input_tokens || 0,
115
+ outputTokens: response.usage?.output_tokens || 0,
116
+ thinkingText: response.thinking?.text || null,
117
+ thinkingSignature: response.thinking?.signature || null,
118
+ thinkingTokens: response.thinking?.tokens || null,
119
+ modelId: response.model || null,
120
+ provider: response.provider || null
121
+ }
122
+ });
123
+ }
124
+ catch (error) {
125
+ // Clean up placeholder on error
126
+ await model.delete({ where: { id: assistantMessage.id } });
127
+ throw error;
128
+ }
129
+ }
130
+ /**
131
+ * Stream a response and persist the conversation.
132
+ */
133
+ async *askStream(input, options = {}) {
134
+ const model = this.getModel(this.tableNames.message);
135
+ // Persist user message
136
+ await model.create({
137
+ data: { chatId: this.chatId, role: "user", content: input }
138
+ });
139
+ // Create placeholder for assistant message
140
+ const assistantMessage = await model.create({
141
+ data: { chatId: this.chatId, role: "assistant", content: null }
142
+ });
143
+ this.currentMessageId = assistantMessage.id;
144
+ try {
145
+ const stream = this.agent.stream(input, options);
146
+ let fullContent = "";
147
+ let lastChunk = null;
148
+ for await (const chunk of stream) {
149
+ fullContent += chunk.content;
150
+ lastChunk = chunk;
151
+ yield chunk;
152
+ }
153
+ // Final update with accumulated result
154
+ return await model.update({
155
+ where: { id: assistantMessage.id },
156
+ data: {
157
+ content: fullContent,
158
+ inputTokens: lastChunk?.usage?.input_tokens || 0,
159
+ outputTokens: lastChunk?.usage?.output_tokens || 0,
160
+ thinkingText: lastChunk?.thinking?.text || null,
161
+ thinkingSignature: lastChunk?.thinking?.signature || null,
162
+ thinkingTokens: lastChunk?.thinking?.tokens || null,
163
+ modelId: lastChunk?.metadata?.model || null,
164
+ provider: lastChunk?.metadata?.provider || null
165
+ }
166
+ });
167
+ }
168
+ catch (error) {
169
+ await model.delete({ where: { id: assistantMessage.id } });
170
+ throw error;
171
+ }
172
+ }
173
+ /**
174
+ * Returns the current full message history for this session.
175
+ */
176
+ async messages() {
177
+ const model = this.getModel(this.tableNames.message);
178
+ return await model.findMany({
179
+ where: { chatId: this.chatId },
180
+ orderBy: { createdAt: "asc" }
181
+ });
182
+ }
183
+ /**
184
+ * Delete the entire session and its history.
185
+ */
186
+ async delete() {
187
+ const chatTable = this.getModel(this.tableNames.chat);
188
+ await chatTable.delete({ where: { id: this.chatId } });
189
+ // AgentSession record is deleted via Cascade from LlmChat
190
+ }
191
+ }
192
+ /**
193
+ * Creates a new agent session and its persistent chat record.
194
+ */
195
+ export async function createAgentSession(prisma, llm, AgentClass, options = {}) {
196
+ const tableNames = {
197
+ agentSession: options.tableNames?.agentSession || "llmAgentSession",
198
+ chat: options.tableNames?.chat || "llmChat",
199
+ message: options.tableNames?.message || "llmMessage"
200
+ };
201
+ if (options.debug) {
202
+ console.log(`[@node-llm/orm] createAgentSession: agentClass=${AgentClass.name}`);
203
+ }
204
+ // 1. Create underlying LlmChat record
205
+ const chatTable = getTable(prisma, tableNames.chat);
206
+ const chatRecord = (await chatTable.create({
207
+ data: {
208
+ model: AgentClass.model || null,
209
+ provider: null,
210
+ instructions: AgentClass.instructions || null,
211
+ metadata: null // Runtime metadata goes in Chat, session context in AgentSession
212
+ }
213
+ }));
214
+ // 2. Create AgentSession record
215
+ const sessionTable = getTable(prisma, tableNames.agentSession);
216
+ const sessionRecord = (await sessionTable.create({
217
+ data: {
218
+ agentClass: AgentClass.name,
219
+ chatId: chatRecord.id,
220
+ metadata: options.metadata || null
221
+ }
222
+ }));
223
+ return new AgentSession(prisma, llm, AgentClass, sessionRecord, options.tableNames, undefined, options.debug);
224
+ }
225
+ /**
226
+ * Loads an existing agent session and re-instantiates the agent with history.
227
+ */
228
+ export async function loadAgentSession(prisma, llm, AgentClass, sessionId, options = {}) {
229
+ const tableNames = {
230
+ agentSession: options.tableNames?.agentSession || "llmAgentSession",
231
+ chat: options.tableNames?.chat || "llmChat",
232
+ message: options.tableNames?.message || "llmMessage"
233
+ };
234
+ if (options.debug) {
235
+ console.log(`[@node-llm/orm] loadAgentSession: id=${sessionId}`);
236
+ }
237
+ // 1. Find session record
238
+ const sessionTable = getTable(prisma, tableNames.agentSession);
239
+ const sessionRecord = (await sessionTable.findUnique({
240
+ where: { id: sessionId }
241
+ }));
242
+ if (!sessionRecord) {
243
+ return null;
244
+ }
245
+ // 1.5. Validate Agent Class (Code Wins Sovereignty)
246
+ if (sessionRecord.agentClass !== AgentClass.name) {
247
+ throw new Error(`Agent class mismatch: Session "${sessionId}" was created for "${sessionRecord.agentClass}", but is being loaded with "${AgentClass.name}".`);
248
+ }
249
+ // 2. Load message history
250
+ const messageTable = getTable(prisma, tableNames.message);
251
+ const messages = (await messageTable.findMany({
252
+ where: { chatId: sessionRecord.chatId },
253
+ orderBy: { createdAt: "asc" }
254
+ }));
255
+ // 3. Convert DB messages to NodeLLM Message format
256
+ const history = messages.map((m) => ({
257
+ role: m.role,
258
+ content: m.content || ""
259
+ }));
260
+ // 4. Instantiate agent with injected history and LLM
261
+ // "Code Wins" - model, tools, instructions come from AgentClass
262
+ const agent = new AgentClass({
263
+ llm,
264
+ messages: history
265
+ });
266
+ return new AgentSession(prisma, llm, AgentClass, sessionRecord, options.tableNames, agent, options.debug);
267
+ }
268
+ /**
269
+ * Dynamic helper to access Prisma models by name.
270
+ * Handles both case-sensitive and case-insensitive lookups for flexibility.
271
+ */
272
+ function getTable(prisma, tableName) {
273
+ const p = prisma;
274
+ // 1. Direct match
275
+ const table = p[tableName];
276
+ if (table)
277
+ return table;
278
+ // 2. Case-insensitive match
279
+ const keys = Object.keys(prisma).filter((k) => !k.startsWith("$") && !k.startsWith("_"));
280
+ const match = keys.find((k) => k.toLowerCase() === tableName.toLowerCase());
281
+ if (match && p[match])
282
+ return p[match];
283
+ throw new Error(`[@node-llm/orm] Prisma table "${tableName}" not found. Available tables: ${keys.join(", ")}`);
284
+ }
@@ -58,13 +58,14 @@ export declare class Chat extends BaseChat {
58
58
  /**
59
59
  * Convenience method to create a new chat session.
60
60
  */
61
- export declare function createChat<T = Record<string, any>>(prisma: PrismaClient, llm: NodeLLMCore, options?: ChatOptions & {
61
+ export declare function createChat<T = Record<string, any>>(prisma: any, llm: NodeLLMCore, options?: ChatOptions & {
62
62
  tableNames?: TableNames;
63
63
  } & T): Promise<Chat>;
64
64
  /**
65
65
  * Convenience method to load an existing chat session.
66
66
  */
67
- export declare function loadChat(prisma: PrismaClient, llm: NodeLLMCore, chatId: string, options?: ChatOptions & {
67
+ export declare function loadChat(prisma: any, llm: NodeLLMCore, chatId: string, options?: ChatOptions & {
68
68
  tableNames?: TableNames;
69
+ debug?: boolean;
69
70
  }): Promise<Chat | null>;
70
71
  //# sourceMappingURL=Chat.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../../src/adapters/prisma/Chat.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AACnD,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAChF,OAAO,EAAE,QAAQ,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhF,OAAO,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,CAAC;AAE7C,MAAM,WAAW,aAAa;IAC5B,EAAE,EAAE,MAAM,CAAC;IACX,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,iBAAiB,EAAE,MAAM,GAAG,IAAI,CAAC;IACjC,cAAc,EAAE,MAAM,GAAG,IAAI,CAAC;IAC9B,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,IAAI,CAAC;CACjB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,qBAAa,IAAK,SAAQ,QAAQ;IAK9B,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,GAAG;IALb,OAAO,CAAC,MAAM,CAAuB;IACrC,OAAO,CAAC,iBAAiB,CAAoD;gBAGnE,MAAM,EAAE,YAAY,EACpB,GAAG,EAAE,WAAW,EACxB,MAAM,EAAE,UAAU,EAClB,OAAO,GAAE,WAAgB,EACzB,UAAU,GAAE,UAAe;IAgB7B;;OAEG;YACW,eAAe;IAoH7B;;OAEG;IACG,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,GAAE,UAAe,GAAG,OAAO,CAAC,aAAa,CAAC;IA8C1E;;;OAGG;IACI,SAAS,CACd,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,UAAe,GACvB,cAAc,CAAC,SAAS,EAAE,aAAa,EAAE,SAAS,CAAC;IAmEtD;;OAEG;IACG,QAAQ,IAAI,OAAO,CAAC,aAAa,EAAE,CAAC;IAQ1C;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC;CAkB9B;AAED;;GAEG;AACH,wBAAsB,UAAU,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EACtD,MAAM,EAAE,YAAY,EACpB,GAAG,EAAE,WAAW,EAChB,OAAO,GAAE,WAAW,GAAG;IAAE,UAAU,CAAC,EAAE,UAAU,CAAA;CAAE,GAAG,CAAa,GACjE,OAAO,CAAC,IAAI,CAAC,CA0Bf;AAED;;GAEG;AACH,wBAAsB,QAAQ,CAC5B,MAAM,EAAE,YAAY,EACpB,GAAG,EAAE,WAAW,EAChB,MAAM,EAAE,MAAM,EACd,OAAO,GAAE,WAAW,GAAG;IAAE,UAAU,CAAC,EAAE,UAAU,CAAA;CAAO,GACtD,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,CAQtB"}
1
+ {"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../../src/adapters/prisma/Chat.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AACnD,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAChF,OAAO,EAAE,QAAQ,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhF,OAAO,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,CAAC;AAE7C,MAAM,WAAW,aAAa;IAC5B,EAAE,EAAE,MAAM,CAAC;IACX,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,iBAAiB,EAAE,MAAM,GAAG,IAAI,CAAC;IACjC,cAAc,EAAE,MAAM,GAAG,IAAI,CAAC;IAC9B,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,IAAI,CAAC;CACjB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,qBAAa,IAAK,SAAQ,QAAQ;IAK9B,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,GAAG;IALb,OAAO,CAAC,MAAM,CAAuB;IACrC,OAAO,CAAC,iBAAiB,CAAoD;gBAGnE,MAAM,EAAE,YAAY,EACpB,GAAG,EAAE,WAAW,EACxB,MAAM,EAAE,UAAU,EAClB,OAAO,GAAE,WAAgB,EACzB,UAAU,GAAE,UAAe;IAgB7B;;OAEG;YACW,eAAe;IAqH7B;;OAEG;IACG,GAAG,CAAC,KAAK,EAAE,MAAM,EAAE,OAAO,GAAE,UAAe,GAAG,OAAO,CAAC,aAAa,CAAC;IA8C1E;;;OAGG;IACI,SAAS,CACd,KAAK,EAAE,MAAM,EACb,OAAO,GAAE,UAAe,GACvB,cAAc,CAAC,SAAS,EAAE,aAAa,EAAE,SAAS,CAAC;IAmEtD;;OAEG;IACG,QAAQ,IAAI,OAAO,CAAC,aAAa,EAAE,CAAC;IAQ1C;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC;CAkB9B;AAuBD;;GAEG;AACH,wBAAsB,UAAU,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EACtD,MAAM,EAAE,GAAG,EACX,GAAG,EAAE,WAAW,EAChB,OAAO,GAAE,WAAW,GAAG;IAAE,UAAU,CAAC,EAAE,UAAU,CAAA;CAAE,GAAG,CAAa,GACjE,OAAO,CAAC,IAAI,CAAC,CA2Cf;AAED;;GAEG;AACH,wBAAsB,QAAQ,CAC5B,MAAM,EAAE,GAAG,EACX,GAAG,EAAE,WAAW,EAChB,MAAM,EAAE,MAAM,EACd,OAAO,GAAE,WAAW,GAAG;IAAE,UAAU,CAAC,EAAE,UAAU,CAAC;IAAC,KAAK,CAAC,EAAE,OAAO,CAAA;CAAO,GACvE,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,CAiBtB"}
@@ -31,7 +31,8 @@ export class Chat extends BaseChat {
31
31
  const llmInstance = provider ? this.llm.withProvider(provider) : this.llm;
32
32
  const coreChat = llmInstance.chat(model || undefined, {
33
33
  messages: history,
34
- ...this.localOptions
34
+ ...this.localOptions,
35
+ middlewares: this.customMiddlewares
35
36
  });
36
37
  // Register tools
37
38
  if (this.customTools.length > 0) {
@@ -265,14 +266,37 @@ export class Chat extends BaseChat {
265
266
  };
266
267
  }
267
268
  }
269
+ /**
270
+ * Helper to find the correct table property in the prisma client.
271
+ * Prisma usually camelCases model names (e.g., AssistantChat -> assistantChat),
272
+ * but mapping can vary based on configuration.
273
+ */
274
+ function getTable(prisma, tableName) {
275
+ if (prisma[tableName])
276
+ return prisma[tableName];
277
+ // Try case-insensitive match if not found directly
278
+ const keys = Object.keys(prisma).filter((k) => !k.startsWith("$") && !k.startsWith("_"));
279
+ const match = keys.find((k) => k.toLowerCase() === tableName.toLowerCase());
280
+ if (match)
281
+ return prisma[match];
282
+ // If still not found, search for the model name if it's different from the property name
283
+ // but for now, we'll just throw a clear error
284
+ throw new Error(`[@node-llm/orm] Prisma table "${tableName}" not found. Available tables: ${keys.join(", ")}`);
285
+ }
268
286
  /**
269
287
  * Convenience method to create a new chat session.
270
288
  */
271
289
  export async function createChat(prisma, llm, options = {}) {
272
290
  const chatTable = options.tableNames?.chat || "llmChat";
273
291
  // Extract known options so we don't double-pass them or pass them incorrectly
274
- const { model, provider, instructions, metadata, tableNames: _tableNames, debug: _debug, persistence: _persistence, ...extras } = options;
275
- const record = await prisma[chatTable].create({
292
+ // runtime options should NOT be persisted to DB
293
+ const { model, provider, instructions, metadata, tableNames: _tableNames, debug: _debug, persistence: _persistence, middlewares: _middlewares, maxToolCalls: _maxToolCalls, thinking: _thinking, temperature: _temperature, maxTokens: _maxTokens, headers: _headers, requestTimeout: _requestTimeout, params: _params, ...extras } = options;
294
+ if (options.debug) {
295
+ const keys = Object.keys(prisma).filter((k) => !k.startsWith("$") && !k.startsWith("_"));
296
+ console.log(`[@node-llm/orm] createChat: table=${chatTable}, availableTables=${keys.join(", ")}`);
297
+ }
298
+ const table = getTable(prisma, chatTable);
299
+ const record = await table.create({
276
300
  data: {
277
301
  model,
278
302
  provider,
@@ -288,10 +312,16 @@ export async function createChat(prisma, llm, options = {}) {
288
312
  */
289
313
  export async function loadChat(prisma, llm, chatId, options = {}) {
290
314
  const chatTable = options.tableNames?.chat || "llmChat";
291
- const record = await prisma[chatTable].findUnique({
315
+ if (options.debug) {
316
+ const keys = Object.keys(prisma).filter((k) => !k.startsWith("$") && !k.startsWith("_"));
317
+ console.log(`[@node-llm/orm] loadChat: table=${chatTable}, availableTables=${keys.join(", ")}`);
318
+ }
319
+ const table = getTable(prisma, chatTable);
320
+ const record = await table.findUnique({
292
321
  where: { id: chatId }
293
322
  });
294
323
  if (!record)
295
324
  return null;
325
+ // Reconstruct chat with options from DB or manual overrides if needed
296
326
  return new Chat(prisma, llm, record, options, options.tableNames);
297
327
  }
@@ -4,7 +4,7 @@
4
4
  * Prisma adapter for NodeLLM ORM.
5
5
  * Provides automatic persistence of chats, messages, tool calls, and API requests.
6
6
  *
7
- * @example
7
+ * @example Chat API (low-level)
8
8
  * ```typescript
9
9
  * import { PrismaClient } from '@prisma/client';
10
10
  * import { createLLM } from '@node-llm/core';
@@ -21,7 +21,30 @@
21
21
  * const response = await chat.ask('Hello!');
22
22
  * console.log(response.content);
23
23
  * ```
24
+ *
25
+ * @example AgentSession API (recommended for agents)
26
+ * ```typescript
27
+ * import { Agent } from '@node-llm/core';
28
+ * import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
29
+ *
30
+ * class SupportAgent extends Agent {
31
+ * static model = 'gpt-4.1';
32
+ * static instructions = 'You are a helpful support agent.';
33
+ * }
34
+ *
35
+ * // Create new session
36
+ * const session = await createAgentSession(prisma, llm, SupportAgent, {
37
+ * metadata: { userId: 'user_123' }
38
+ * });
39
+ * await session.ask('Hello!');
40
+ *
41
+ * // Resume later (Code Wins - model/tools from class, history from DB)
42
+ * const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
43
+ * await session.ask('Continue our conversation');
44
+ * ```
24
45
  */
25
46
  export { Chat, createChat, loadChat } from "./Chat.js";
26
- export type { ChatRecord, MessageRecord, ChatOptions, TableNames } from "./Chat.js";
47
+ export type { ChatRecord, MessageRecord, ChatOptions } from "./Chat.js";
48
+ export { AgentSession, createAgentSession, loadAgentSession } from "./AgentSession.js";
49
+ export type { AgentSessionRecord, CreateAgentSessionOptions, LoadAgentSessionOptions, TableNames } from "./AgentSession.js";
27
50
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/adapters/prisma/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AAEH,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AACvD,YAAY,EAAE,UAAU,EAAE,aAAa,EAAE,WAAW,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/adapters/prisma/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4CG;AAGH,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AACvD,YAAY,EAAE,UAAU,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,WAAW,CAAC;AAGxE,OAAO,EAAE,YAAY,EAAE,kBAAkB,EAAE,gBAAgB,EAAE,MAAM,mBAAmB,CAAC;AACvF,YAAY,EACV,kBAAkB,EAClB,yBAAyB,EACzB,uBAAuB,EACvB,UAAU,EACX,MAAM,mBAAmB,CAAC"}
@@ -4,7 +4,7 @@
4
4
  * Prisma adapter for NodeLLM ORM.
5
5
  * Provides automatic persistence of chats, messages, tool calls, and API requests.
6
6
  *
7
- * @example
7
+ * @example Chat API (low-level)
8
8
  * ```typescript
9
9
  * import { PrismaClient } from '@prisma/client';
10
10
  * import { createLLM } from '@node-llm/core';
@@ -21,5 +21,29 @@
21
21
  * const response = await chat.ask('Hello!');
22
22
  * console.log(response.content);
23
23
  * ```
24
+ *
25
+ * @example AgentSession API (recommended for agents)
26
+ * ```typescript
27
+ * import { Agent } from '@node-llm/core';
28
+ * import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
29
+ *
30
+ * class SupportAgent extends Agent {
31
+ * static model = 'gpt-4.1';
32
+ * static instructions = 'You are a helpful support agent.';
33
+ * }
34
+ *
35
+ * // Create new session
36
+ * const session = await createAgentSession(prisma, llm, SupportAgent, {
37
+ * metadata: { userId: 'user_123' }
38
+ * });
39
+ * await session.ask('Hello!');
40
+ *
41
+ * // Resume later (Code Wins - model/tools from class, history from DB)
42
+ * const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
43
+ * await session.ask('Continue our conversation');
44
+ * ```
24
45
  */
46
+ // Chat API
25
47
  export { Chat, createChat, loadChat } from "./Chat.js";
48
+ // AgentSession API
49
+ export { AgentSession, createAgentSession, loadAgentSession } from "./AgentSession.js";
package/dist/index.d.ts CHANGED
@@ -23,13 +23,33 @@
23
23
  * await chat.ask('Hello!');
24
24
  * ```
25
25
  *
26
+ * ## Agent Sessions (Recommended for Agents)
27
+ *
28
+ * ```typescript
29
+ * import { Agent } from '@node-llm/core';
30
+ * import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
31
+ *
32
+ * class SupportAgent extends Agent {
33
+ * static model = 'gpt-4.1';
34
+ * static instructions = 'You are a helpful support agent.';
35
+ * }
36
+ *
37
+ * // Create and persist
38
+ * const session = await createAgentSession(prisma, llm, SupportAgent);
39
+ * await session.ask('Hello!');
40
+ *
41
+ * // Resume later (Code Wins - model/tools from class, history from DB)
42
+ * const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
43
+ * ```
44
+ *
26
45
  * ## Adapters
27
46
  *
28
47
  * - `@node-llm/orm/prisma` - Prisma adapter (recommended)
29
48
  *
30
49
  * ## Schema
31
50
  *
32
- * The ORM tracks four core entities:
51
+ * The ORM tracks five core entities:
52
+ * - **AgentSession** - Links Agent class to persistent Chat (v0.5.0+)
33
53
  * - **Chat** - Session container (model, provider, instructions)
34
54
  * - **Message** - User/Assistant conversation history
35
55
  * - **ToolCall** - Tool executions (name, arguments, results)
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAoCG;AAGH,cAAc,4BAA4B,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAwDG;AAGH,cAAc,4BAA4B,CAAC"}
package/dist/index.js CHANGED
@@ -23,13 +23,33 @@
23
23
  * await chat.ask('Hello!');
24
24
  * ```
25
25
  *
26
+ * ## Agent Sessions (Recommended for Agents)
27
+ *
28
+ * ```typescript
29
+ * import { Agent } from '@node-llm/core';
30
+ * import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
31
+ *
32
+ * class SupportAgent extends Agent {
33
+ * static model = 'gpt-4.1';
34
+ * static instructions = 'You are a helpful support agent.';
35
+ * }
36
+ *
37
+ * // Create and persist
38
+ * const session = await createAgentSession(prisma, llm, SupportAgent);
39
+ * await session.ask('Hello!');
40
+ *
41
+ * // Resume later (Code Wins - model/tools from class, history from DB)
42
+ * const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
43
+ * ```
44
+ *
26
45
  * ## Adapters
27
46
  *
28
47
  * - `@node-llm/orm/prisma` - Prisma adapter (recommended)
29
48
  *
30
49
  * ## Schema
31
50
  *
32
- * The ORM tracks four core entities:
51
+ * The ORM tracks five core entities:
52
+ * - **AgentSession** - Links Agent class to persistent Chat (v0.5.0+)
33
53
  * - **Chat** - Session container (model, provider, instructions)
34
54
  * - **Message** - User/Assistant conversation history
35
55
  * - **ToolCall** - Tool executions (name, arguments, results)
@@ -0,0 +1,53 @@
1
+ # @node-llm/orm Migrations
2
+
3
+ Reference SQL migrations for upgrading your database schema.
4
+
5
+ ## Who Needs These?
6
+
7
+ | User Type | Action |
8
+ | ----------------- | ------------------------------------------------------------------ |
9
+ | **New user** | ❌ Skip these. Run `npx @node-llm/orm init` → full schema included |
10
+ | **Existing user** | ✅ Use these to upgrade without losing data |
11
+
12
+ > **Note:** These migrations are **idempotent** — safe to run multiple times. They use `IF NOT EXISTS` and conditional checks, so running them on a fresh database won't cause errors.
13
+
14
+ ## Available Migrations
15
+
16
+ | File | Version | Description |
17
+ | -------------------------- | ------- | --------------------------------------------------- |
18
+ | `add_thinking_support.sql` | v0.2.0+ | Extended Thinking columns (Claude 3.7, DeepSeek R1) |
19
+ | `add_agent_session.sql` | v0.5.0+ | AgentSession for persistent agent conversations |
20
+
21
+ ## How to Use
22
+
23
+ ### Option 1: Copy and Apply
24
+
25
+ ```bash
26
+ # Create migration folder
27
+ mkdir -p prisma/migrations/$(date +%Y%m%d%H%M%S)_add_agent_session
28
+
29
+ # Copy the SQL
30
+ cp node_modules/@node-llm/orm/migrations/add_agent_session.sql \
31
+ prisma/migrations/$(date +%Y%m%d%H%M%S)_add_agent_session/migration.sql
32
+
33
+ # Mark as applied
34
+ npx prisma migrate resolve --applied $(date +%Y%m%d%H%M%S)_add_agent_session
35
+ ```
36
+
37
+ ### Option 2: Let Prisma Generate
38
+
39
+ 1. Update your `schema.prisma` with the new models from `@node-llm/orm/schema.prisma`
40
+ 2. Run: `npx prisma migrate dev --name add_agent_session`
41
+
42
+ ## Custom Table Names
43
+
44
+ If you're using custom table names (e.g., `AssistantMessage` instead of `LlmMessage`),
45
+ edit the SQL file to match your table names before applying.
46
+
47
+ ## Documentation
48
+
49
+ See the full [Migration Guide](https://node-llm.eshaiju.com/orm/migrations) for:
50
+
51
+ - Baseline migrations
52
+ - Production deployment
53
+ - Renaming columns safely
@@ -0,0 +1,44 @@
1
+ -- Migration: Add AgentSession support
2
+ -- Version: @node-llm/orm v0.5.0+
3
+ --
4
+ -- This migration adds the LlmAgentSession table for persistent agent conversations.
5
+ -- Run this if you're upgrading from a previous version of @node-llm/orm.
6
+ --
7
+ -- Usage:
8
+ -- 1. Copy this file to your prisma/migrations/<timestamp>_add_agent_session/ folder
9
+ -- 2. Run: npx prisma migrate resolve --applied <timestamp>_add_agent_session
10
+ -- Or simply run: npx prisma migrate dev --name add_agent_session
11
+
12
+ -- Create the AgentSession table
13
+ CREATE TABLE IF NOT EXISTS "LlmAgentSession" (
14
+ "id" TEXT NOT NULL,
15
+ "agentClass" TEXT NOT NULL,
16
+ "chatId" TEXT NOT NULL,
17
+ "metadata" JSONB,
18
+ "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
19
+ "updatedAt" TIMESTAMP(3) NOT NULL,
20
+
21
+ CONSTRAINT "LlmAgentSession_pkey" PRIMARY KEY ("id")
22
+ );
23
+
24
+ -- Create unique constraint on chatId (1:1 with LlmChat)
25
+ CREATE UNIQUE INDEX IF NOT EXISTS "LlmAgentSession_chatId_key" ON "LlmAgentSession"("chatId");
26
+
27
+ -- Create indexes for common queries
28
+ CREATE INDEX IF NOT EXISTS "LlmAgentSession_agentClass_idx" ON "LlmAgentSession"("agentClass");
29
+ CREATE INDEX IF NOT EXISTS "LlmAgentSession_createdAt_idx" ON "LlmAgentSession"("createdAt");
30
+
31
+ -- Add foreign key constraint (idempotent - skips if already exists)
32
+ DO $$
33
+ BEGIN
34
+ IF NOT EXISTS (
35
+ SELECT 1 FROM pg_constraint WHERE conname = 'LlmAgentSession_chatId_fkey'
36
+ ) THEN
37
+ ALTER TABLE "LlmAgentSession"
38
+ ADD CONSTRAINT "LlmAgentSession_chatId_fkey"
39
+ FOREIGN KEY ("chatId")
40
+ REFERENCES "LlmChat"("id")
41
+ ON DELETE CASCADE
42
+ ON UPDATE CASCADE;
43
+ END IF;
44
+ END $$;
@@ -0,0 +1,34 @@
1
+ -- Migration: Add Extended Thinking support
2
+ -- Version: @node-llm/orm v0.2.0+
3
+ --
4
+ -- This migration adds columns for Extended Thinking (Claude 3.7+, DeepSeek R1).
5
+ -- Run this if you're upgrading from a previous version of @node-llm/orm.
6
+ --
7
+ -- Usage:
8
+ -- 1. Copy this file to your prisma/migrations/<timestamp>_add_thinking_support/ folder
9
+ -- 2. Run: npx prisma migrate resolve --applied <timestamp>_add_thinking_support
10
+ -- Or simply run: npx prisma migrate dev --name add_thinking_support
11
+ --
12
+ -- Note: Adjust table names if using custom names (e.g., AssistantMessage instead of LlmMessage)
13
+ -- Note: This migration is idempotent - safe to run multiple times.
14
+
15
+ -- AlterTable: Convert metadata to native JSONB (idempotent - skips if already JSONB)
16
+ DO $$
17
+ BEGIN
18
+ IF EXISTS (
19
+ SELECT 1 FROM information_schema.columns
20
+ WHERE table_name = 'LlmChat' AND column_name = 'metadata' AND data_type != 'jsonb'
21
+ ) THEN
22
+ ALTER TABLE "LlmChat" ALTER COLUMN "metadata" TYPE JSONB USING metadata::JSONB;
23
+ END IF;
24
+ END $$;
25
+
26
+ -- AlterTable: Add thinking columns to Message
27
+ ALTER TABLE "LlmMessage"
28
+ ADD COLUMN IF NOT EXISTS "thinkingText" TEXT,
29
+ ADD COLUMN IF NOT EXISTS "thinkingSignature" TEXT,
30
+ ADD COLUMN IF NOT EXISTS "thinkingTokens" INTEGER;
31
+
32
+ -- AlterTable: Add thought signature to ToolCall
33
+ ALTER TABLE "LlmToolCall"
34
+ ADD COLUMN IF NOT EXISTS "thoughtSignature" TEXT;