@node-llm/orm 0.4.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +54 -2
- package/README.md +94 -8
- package/bin/cli.js +40 -6
- package/dist/adapters/prisma/AgentSession.d.ts +171 -0
- package/dist/adapters/prisma/AgentSession.d.ts.map +1 -0
- package/dist/adapters/prisma/AgentSession.js +408 -0
- package/dist/adapters/prisma/Chat.d.ts +2 -2
- package/dist/adapters/prisma/Chat.d.ts.map +1 -1
- package/dist/adapters/prisma/Chat.js +6 -6
- package/dist/adapters/prisma/index.d.ts +25 -2
- package/dist/adapters/prisma/index.d.ts.map +1 -1
- package/dist/adapters/prisma/index.js +25 -1
- package/dist/index.d.ts +21 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +21 -1
- package/migrations/README.md +53 -0
- package/migrations/add_agent_session.sql +44 -0
- package/migrations/add_thinking_support.sql +34 -0
- package/package.json +6 -2
- package/schema.prisma +50 -33
- package/src/adapters/prisma/AgentSession.ts +601 -0
- package/src/adapters/prisma/Chat.ts +6 -6
- package/src/adapters/prisma/index.ts +33 -2
- package/src/index.ts +21 -1
- package/test/AgentSession.test.ts +332 -0
- package/test/CodeWins.test.ts +117 -0
- package/test/docs/prisma-docs.test.ts +221 -0
- package/test/docs/readme-exports.test.ts +62 -0
- package/tsconfig.tsbuildinfo +1 -1
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* AgentSession - Wraps an Agent instance with persistence capabilities.
|
|
3
|
+
*
|
|
4
|
+
* Follows "Code Wins" sovereignty:
|
|
5
|
+
* - Model, Tools, Instructions come from the Agent class (code)
|
|
6
|
+
* - Message history comes from the database
|
|
7
|
+
* - Metadata from DB is injected as 'inputs' for dynamic resolution
|
|
8
|
+
*
|
|
9
|
+
* @example
|
|
10
|
+
* ```typescript
|
|
11
|
+
* // Create a new session
|
|
12
|
+
* const session = await createAgentSession(prisma, llm, SupportAgent, {
|
|
13
|
+
* metadata: { userId: "123" }
|
|
14
|
+
* });
|
|
15
|
+
*
|
|
16
|
+
* // Resume a session
|
|
17
|
+
* const session = await loadAgentSession(prisma, llm, SupportAgent, "sess_abc");
|
|
18
|
+
*
|
|
19
|
+
* // Agent behavior is always defined in code
|
|
20
|
+
* const result = await session.ask("Hello");
|
|
21
|
+
* ```
|
|
22
|
+
*/
|
|
23
|
+
export class AgentSession {
|
|
24
|
+
prisma;
|
|
25
|
+
llm;
|
|
26
|
+
AgentClass;
|
|
27
|
+
record;
|
|
28
|
+
agent;
|
|
29
|
+
currentMessageId = null;
|
|
30
|
+
tableNames;
|
|
31
|
+
debug;
|
|
32
|
+
constructor(prisma, llm, AgentClass, record, tableNames, agent = new AgentClass({
|
|
33
|
+
llm,
|
|
34
|
+
inputs: record.metadata
|
|
35
|
+
}), debug = false) {
|
|
36
|
+
this.prisma = prisma;
|
|
37
|
+
this.llm = llm;
|
|
38
|
+
this.AgentClass = AgentClass;
|
|
39
|
+
this.record = record;
|
|
40
|
+
this.agent = agent;
|
|
41
|
+
this.debug = debug;
|
|
42
|
+
this.tableNames = {
|
|
43
|
+
agentSession: tableNames?.agentSession || "llmAgentSession",
|
|
44
|
+
chat: tableNames?.chat || "llmChat",
|
|
45
|
+
message: tableNames?.message || "llmMessage",
|
|
46
|
+
toolCall: tableNames?.toolCall || "llmToolCall",
|
|
47
|
+
request: tableNames?.request || "llmRequest"
|
|
48
|
+
};
|
|
49
|
+
this.registerHooks();
|
|
50
|
+
}
|
|
51
|
+
log(...args) {
|
|
52
|
+
if (this.debug) {
|
|
53
|
+
console.log(`[@node-llm/orm]`, ...args);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
/** Agent instance (for direct access if needed) */
|
|
57
|
+
get instance() {
|
|
58
|
+
return this.agent;
|
|
59
|
+
}
|
|
60
|
+
/** Session ID for persistence */
|
|
61
|
+
get id() {
|
|
62
|
+
return this.record.id;
|
|
63
|
+
}
|
|
64
|
+
/** Underlying chat ID */
|
|
65
|
+
get chatId() {
|
|
66
|
+
return this.record.chatId;
|
|
67
|
+
}
|
|
68
|
+
/** Session metadata */
|
|
69
|
+
get metadata() {
|
|
70
|
+
return this.record.metadata;
|
|
71
|
+
}
|
|
72
|
+
/** Agent class name */
|
|
73
|
+
get agentClass() {
|
|
74
|
+
return this.record.agentClass;
|
|
75
|
+
}
|
|
76
|
+
/** Model ID used by the agent */
|
|
77
|
+
get modelId() {
|
|
78
|
+
return this.agent.modelId;
|
|
79
|
+
}
|
|
80
|
+
/** Cumulative usage for this session (from agent memory) */
|
|
81
|
+
get totalUsage() {
|
|
82
|
+
return this.agent.totalUsage;
|
|
83
|
+
}
|
|
84
|
+
/** Current in-memory message history */
|
|
85
|
+
get history() {
|
|
86
|
+
return this.agent.history;
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Helper to get a typed Prisma model by its dynamic name.
|
|
90
|
+
*/
|
|
91
|
+
getModel(name) {
|
|
92
|
+
return getTable(this.prisma, name);
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Register persistence hooks on the agent.
|
|
96
|
+
*/
|
|
97
|
+
registerHooks() {
|
|
98
|
+
this.agent.onToolCallStart(async (toolCall) => {
|
|
99
|
+
if (!this.currentMessageId)
|
|
100
|
+
return;
|
|
101
|
+
const model = this.getModel(this.tableNames.toolCall);
|
|
102
|
+
await model.create({
|
|
103
|
+
data: {
|
|
104
|
+
messageId: this.currentMessageId,
|
|
105
|
+
toolCallId: toolCall.id,
|
|
106
|
+
name: toolCall.function.name,
|
|
107
|
+
arguments: toolCall.function.arguments
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
});
|
|
111
|
+
this.agent.onToolCallEnd(async (toolCall, result) => {
|
|
112
|
+
if (!this.currentMessageId)
|
|
113
|
+
return;
|
|
114
|
+
const model = this.getModel(this.tableNames.toolCall);
|
|
115
|
+
try {
|
|
116
|
+
await model.update({
|
|
117
|
+
where: {
|
|
118
|
+
messageId_toolCallId: {
|
|
119
|
+
messageId: this.currentMessageId,
|
|
120
|
+
toolCallId: toolCall.id
|
|
121
|
+
}
|
|
122
|
+
},
|
|
123
|
+
data: {
|
|
124
|
+
result: typeof result === "string" ? result : JSON.stringify(result)
|
|
125
|
+
}
|
|
126
|
+
});
|
|
127
|
+
}
|
|
128
|
+
catch (e) {
|
|
129
|
+
this.log(`Failed to update tool call result: ${e}`);
|
|
130
|
+
}
|
|
131
|
+
});
|
|
132
|
+
this.agent.afterResponse(async (response) => {
|
|
133
|
+
const model = this.getModel(this.tableNames.request);
|
|
134
|
+
await model.create({
|
|
135
|
+
data: {
|
|
136
|
+
chatId: this.chatId,
|
|
137
|
+
messageId: this.currentMessageId,
|
|
138
|
+
provider: response.provider || "unknown",
|
|
139
|
+
model: response.model || "unknown",
|
|
140
|
+
statusCode: 200,
|
|
141
|
+
duration: 0,
|
|
142
|
+
inputTokens: response.usage?.input_tokens || 0,
|
|
143
|
+
outputTokens: response.usage?.output_tokens || 0,
|
|
144
|
+
cost: response.usage?.cost || 0
|
|
145
|
+
}
|
|
146
|
+
});
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
* Send a message and persist the conversation.
|
|
151
|
+
*/
|
|
152
|
+
async ask(message, options = {}) {
|
|
153
|
+
const model = this.getModel(this.tableNames.message);
|
|
154
|
+
// Persist user message
|
|
155
|
+
await model.create({
|
|
156
|
+
data: { chatId: this.chatId, role: "user", content: message }
|
|
157
|
+
});
|
|
158
|
+
// Create placeholder for assistant message
|
|
159
|
+
const assistantMessage = await model.create({
|
|
160
|
+
data: { chatId: this.chatId, role: "assistant", content: null }
|
|
161
|
+
});
|
|
162
|
+
this.currentMessageId = assistantMessage.id;
|
|
163
|
+
try {
|
|
164
|
+
// Merge turn-level inputs with session metadata
|
|
165
|
+
const inputs = { ...this.record.metadata, ...options.inputs };
|
|
166
|
+
// Get response from agent (uses code-defined config + injected history)
|
|
167
|
+
const response = await this.agent.ask(message, { ...options, inputs });
|
|
168
|
+
// Update assistant message with response
|
|
169
|
+
return await model.update({
|
|
170
|
+
where: { id: assistantMessage.id },
|
|
171
|
+
data: {
|
|
172
|
+
content: response.content,
|
|
173
|
+
contentRaw: JSON.stringify(response.meta),
|
|
174
|
+
inputTokens: response.usage?.input_tokens || 0,
|
|
175
|
+
outputTokens: response.usage?.output_tokens || 0,
|
|
176
|
+
thinkingText: response.thinking?.text || null,
|
|
177
|
+
thinkingSignature: response.thinking?.signature || null,
|
|
178
|
+
thinkingTokens: response.thinking?.tokens || null,
|
|
179
|
+
modelId: response.model || null,
|
|
180
|
+
provider: response.provider || null
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
}
|
|
184
|
+
catch (error) {
|
|
185
|
+
// Clean up placeholder on error
|
|
186
|
+
await model.delete({ where: { id: assistantMessage.id } });
|
|
187
|
+
throw error;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
/**
|
|
191
|
+
* Stream a response and persist the conversation.
|
|
192
|
+
*/
|
|
193
|
+
async *askStream(message, options = {}) {
|
|
194
|
+
const model = this.getModel(this.tableNames.message);
|
|
195
|
+
// Persist user message
|
|
196
|
+
await model.create({
|
|
197
|
+
data: { chatId: this.chatId, role: "user", content: message }
|
|
198
|
+
});
|
|
199
|
+
// Create placeholder for assistant message
|
|
200
|
+
const assistantMessage = await model.create({
|
|
201
|
+
data: { chatId: this.chatId, role: "assistant", content: null }
|
|
202
|
+
});
|
|
203
|
+
this.currentMessageId = assistantMessage.id;
|
|
204
|
+
try {
|
|
205
|
+
// Merge turn-level inputs with session metadata
|
|
206
|
+
const inputs = { ...this.record.metadata, ...options.inputs };
|
|
207
|
+
const stream = this.agent.stream(message, { ...options, inputs });
|
|
208
|
+
let fullContent = "";
|
|
209
|
+
let lastChunk = null;
|
|
210
|
+
for await (const chunk of stream) {
|
|
211
|
+
fullContent += chunk.content;
|
|
212
|
+
lastChunk = chunk;
|
|
213
|
+
yield chunk;
|
|
214
|
+
}
|
|
215
|
+
// Final update with accumulated result
|
|
216
|
+
return await model.update({
|
|
217
|
+
where: { id: assistantMessage.id },
|
|
218
|
+
data: {
|
|
219
|
+
content: fullContent,
|
|
220
|
+
inputTokens: lastChunk?.usage?.input_tokens || 0,
|
|
221
|
+
outputTokens: lastChunk?.usage?.output_tokens || 0,
|
|
222
|
+
thinkingText: lastChunk?.thinking?.text || null,
|
|
223
|
+
thinkingSignature: lastChunk?.thinking?.signature || null,
|
|
224
|
+
thinkingTokens: lastChunk?.thinking?.tokens || null,
|
|
225
|
+
modelId: lastChunk?.metadata?.model || null,
|
|
226
|
+
provider: lastChunk?.metadata?.provider || null
|
|
227
|
+
}
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
catch (error) {
|
|
231
|
+
await model.delete({ where: { id: assistantMessage.id } });
|
|
232
|
+
throw error;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* Returns a usage summary for this chat session.
|
|
237
|
+
*/
|
|
238
|
+
async stats() {
|
|
239
|
+
const requestModel = getTable(this.prisma, this.tableNames.request);
|
|
240
|
+
const aggregate = await requestModel.aggregate({
|
|
241
|
+
where: { chatId: this.chatId },
|
|
242
|
+
_sum: {
|
|
243
|
+
inputTokens: true,
|
|
244
|
+
outputTokens: true,
|
|
245
|
+
cost: true
|
|
246
|
+
}
|
|
247
|
+
});
|
|
248
|
+
return {
|
|
249
|
+
input_tokens: Number(aggregate._sum.inputTokens || 0),
|
|
250
|
+
output_tokens: Number(aggregate._sum.outputTokens || 0),
|
|
251
|
+
total_tokens: Number((aggregate._sum.inputTokens || 0) + (aggregate._sum.outputTokens || 0)),
|
|
252
|
+
cost: Number(aggregate._sum.cost || 0)
|
|
253
|
+
};
|
|
254
|
+
}
|
|
255
|
+
/**
|
|
256
|
+
* Add a tool to the session (turn-level).
|
|
257
|
+
*/
|
|
258
|
+
withTool(tool) {
|
|
259
|
+
this.agent.use(tool);
|
|
260
|
+
return this;
|
|
261
|
+
}
|
|
262
|
+
/**
|
|
263
|
+
* Add instructions to the session (turn-level).
|
|
264
|
+
*/
|
|
265
|
+
withInstructions(instructions, options) {
|
|
266
|
+
this.agent.withInstructions(instructions, options);
|
|
267
|
+
return this;
|
|
268
|
+
}
|
|
269
|
+
/**
|
|
270
|
+
* Returns the current full message history for this session.
|
|
271
|
+
*/
|
|
272
|
+
async messages() {
|
|
273
|
+
const model = this.getModel(this.tableNames.message);
|
|
274
|
+
return await model.findMany({
|
|
275
|
+
where: { chatId: this.chatId },
|
|
276
|
+
orderBy: { createdAt: "asc" }
|
|
277
|
+
});
|
|
278
|
+
}
|
|
279
|
+
/**
|
|
280
|
+
* Delete the entire session and its history.
|
|
281
|
+
*/
|
|
282
|
+
async delete() {
|
|
283
|
+
const chatTable = this.getModel(this.tableNames.chat);
|
|
284
|
+
await chatTable.delete({ where: { id: this.chatId } });
|
|
285
|
+
// AgentSession record is deleted via Cascade from LlmChat
|
|
286
|
+
}
|
|
287
|
+
/**
|
|
288
|
+
* Update session metadata and re-resolve agent configuration.
|
|
289
|
+
*/
|
|
290
|
+
async updateMetadata(metadata) {
|
|
291
|
+
const sessionTable = this.getModel(this.tableNames.agentSession);
|
|
292
|
+
const newMetadata = { ...this.record.metadata, ...metadata };
|
|
293
|
+
await sessionTable.update({
|
|
294
|
+
where: { id: this.id },
|
|
295
|
+
data: { metadata: newMetadata }
|
|
296
|
+
});
|
|
297
|
+
this.record.metadata = newMetadata;
|
|
298
|
+
// Apply changes to the underlying agent immediately
|
|
299
|
+
// resolveLazyConfig is private, so we need a cast or make it protected.
|
|
300
|
+
// Given we are in the same package, we can cast.
|
|
301
|
+
this.agent.resolveLazyConfig(newMetadata);
|
|
302
|
+
}
|
|
303
|
+
}
|
|
304
|
+
/**
|
|
305
|
+
* Creates a new agent session and its persistent chat record.
|
|
306
|
+
*/
|
|
307
|
+
export async function createAgentSession(prisma, llm, AgentClass, options = {}) {
|
|
308
|
+
const tableNames = {
|
|
309
|
+
agentSession: options.tableNames?.agentSession || "llmAgentSession",
|
|
310
|
+
chat: options.tableNames?.chat || "llmChat",
|
|
311
|
+
message: options.tableNames?.message || "llmMessage"
|
|
312
|
+
};
|
|
313
|
+
if (options.debug) {
|
|
314
|
+
console.log(`[@node-llm/orm] createAgentSession: agentClass=${AgentClass.name}`);
|
|
315
|
+
}
|
|
316
|
+
// 1. Create underlying LlmChat record
|
|
317
|
+
const chatTable = getTable(prisma, tableNames.chat);
|
|
318
|
+
const chatRecord = (await chatTable.create({
|
|
319
|
+
data: {
|
|
320
|
+
model: options.model || AgentClass.model || null,
|
|
321
|
+
provider: options.provider || null,
|
|
322
|
+
instructions: options.instructions ||
|
|
323
|
+
(typeof AgentClass.instructions === "string" ? AgentClass.instructions : null),
|
|
324
|
+
metadata: null // Runtime metadata goes in Chat, session context in AgentSession
|
|
325
|
+
}
|
|
326
|
+
}));
|
|
327
|
+
// 2. Create AgentSession record
|
|
328
|
+
const sessionTable = getTable(prisma, tableNames.agentSession);
|
|
329
|
+
const sessionRecord = (await sessionTable.create({
|
|
330
|
+
data: {
|
|
331
|
+
agentClass: AgentClass.name,
|
|
332
|
+
chatId: chatRecord.id,
|
|
333
|
+
metadata: options.metadata || null
|
|
334
|
+
}
|
|
335
|
+
}));
|
|
336
|
+
// 3. Instantiate Agent with overrides
|
|
337
|
+
const agent = new AgentClass({
|
|
338
|
+
llm,
|
|
339
|
+
inputs: sessionRecord.metadata,
|
|
340
|
+
model: options.model,
|
|
341
|
+
provider: options.provider,
|
|
342
|
+
instructions: options.instructions,
|
|
343
|
+
maxToolCalls: options.maxToolCalls
|
|
344
|
+
});
|
|
345
|
+
return new AgentSession(prisma, llm, AgentClass, sessionRecord, options.tableNames, agent, options.debug);
|
|
346
|
+
}
|
|
347
|
+
/**
|
|
348
|
+
* Loads an existing agent session and re-instantiates the agent with history.
|
|
349
|
+
*/
|
|
350
|
+
export async function loadAgentSession(prisma, llm, AgentClass, sessionId, options = {}) {
|
|
351
|
+
const tableNames = {
|
|
352
|
+
agentSession: options.tableNames?.agentSession || "llmAgentSession",
|
|
353
|
+
chat: options.tableNames?.chat || "llmChat",
|
|
354
|
+
message: options.tableNames?.message || "llmMessage"
|
|
355
|
+
};
|
|
356
|
+
if (options.debug) {
|
|
357
|
+
console.log(`[@node-llm/orm] loadAgentSession: id=${sessionId}`);
|
|
358
|
+
}
|
|
359
|
+
// 1. Find session record
|
|
360
|
+
const sessionTable = getTable(prisma, tableNames.agentSession);
|
|
361
|
+
const sessionRecord = (await sessionTable.findUnique({
|
|
362
|
+
where: { id: sessionId }
|
|
363
|
+
}));
|
|
364
|
+
if (!sessionRecord) {
|
|
365
|
+
return null;
|
|
366
|
+
}
|
|
367
|
+
// 1.5. Validate Agent Class (Code Wins Sovereignty)
|
|
368
|
+
if (sessionRecord.agentClass !== AgentClass.name) {
|
|
369
|
+
throw new Error(`Agent class mismatch: Session "${sessionId}" was created for "${sessionRecord.agentClass}", but is being loaded with "${AgentClass.name}".`);
|
|
370
|
+
}
|
|
371
|
+
// 2. Load message history
|
|
372
|
+
const messageTable = getTable(prisma, tableNames.message);
|
|
373
|
+
const messages = (await messageTable.findMany({
|
|
374
|
+
where: { chatId: sessionRecord.chatId },
|
|
375
|
+
orderBy: { createdAt: "asc" }
|
|
376
|
+
}));
|
|
377
|
+
// 3. Convert DB messages to NodeLLM Message format
|
|
378
|
+
const history = messages.map((m) => ({
|
|
379
|
+
role: m.role,
|
|
380
|
+
content: m.content || ""
|
|
381
|
+
}));
|
|
382
|
+
// 4. Instantiate agent with injected history, LLM, AND metadata (as inputs)
|
|
383
|
+
// "Code Wins" - model, tools, instructions come from AgentClass
|
|
384
|
+
// Metadata from DB handles the lazy resolution of behavior
|
|
385
|
+
const agent = new AgentClass({
|
|
386
|
+
llm,
|
|
387
|
+
messages: history,
|
|
388
|
+
inputs: sessionRecord.metadata
|
|
389
|
+
});
|
|
390
|
+
return new AgentSession(prisma, llm, AgentClass, sessionRecord, options.tableNames, agent, options.debug);
|
|
391
|
+
}
|
|
392
|
+
/**
|
|
393
|
+
* Dynamic helper to access Prisma models by name.
|
|
394
|
+
* Handles both case-sensitive and case-insensitive lookups for flexibility.
|
|
395
|
+
*/
|
|
396
|
+
function getTable(prisma, tableName) {
|
|
397
|
+
const p = prisma;
|
|
398
|
+
// 1. Direct match
|
|
399
|
+
const table = p[tableName];
|
|
400
|
+
if (table)
|
|
401
|
+
return table;
|
|
402
|
+
// 2. Case-insensitive match
|
|
403
|
+
const keys = Object.keys(prisma).filter((k) => !k.startsWith("$") && !k.startsWith("_"));
|
|
404
|
+
const match = keys.find((k) => k.toLowerCase() === tableName.toLowerCase());
|
|
405
|
+
if (match && p[match])
|
|
406
|
+
return p[match];
|
|
407
|
+
throw new Error(`[@node-llm/orm] Prisma table "${tableName}" not found. Available tables: ${keys.join(", ")}`);
|
|
408
|
+
}
|
|
@@ -40,12 +40,12 @@ export declare class Chat extends BaseChat {
|
|
|
40
40
|
/**
|
|
41
41
|
* Send a message and persist the conversation.
|
|
42
42
|
*/
|
|
43
|
-
ask(
|
|
43
|
+
ask(message: string, options?: AskOptions): Promise<MessageRecord>;
|
|
44
44
|
/**
|
|
45
45
|
* Stream a response and persist the conversation.
|
|
46
46
|
* Yields ChatChunk objects for full visibility of thinking, content, and tools.
|
|
47
47
|
*/
|
|
48
|
-
askStream(
|
|
48
|
+
askStream(message: string, options?: AskOptions): AsyncGenerator<ChatChunk, MessageRecord, undefined>;
|
|
49
49
|
/**
|
|
50
50
|
* Get all messages for this chat.
|
|
51
51
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../../src/adapters/prisma/Chat.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AACnD,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAChF,OAAO,EAAE,QAAQ,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhF,OAAO,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,CAAC;AAE7C,MAAM,WAAW,aAAa;IAC5B,EAAE,EAAE,MAAM,CAAC;IACX,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,iBAAiB,EAAE,MAAM,GAAG,IAAI,CAAC;IACjC,cAAc,EAAE,MAAM,GAAG,IAAI,CAAC;IAC9B,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,IAAI,CAAC;CACjB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,qBAAa,IAAK,SAAQ,QAAQ;IAK9B,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,GAAG;IALb,OAAO,CAAC,MAAM,CAAuB;IACrC,OAAO,CAAC,iBAAiB,CAAoD;gBAGnE,MAAM,EAAE,YAAY,EACpB,GAAG,EAAE,WAAW,EACxB,MAAM,EAAE,UAAU,EAClB,OAAO,GAAE,WAAgB,EACzB,UAAU,GAAE,UAAe;IAgB7B;;OAEG;YACW,eAAe;IAqH7B;;OAEG;IACG,GAAG,CAAC,
|
|
1
|
+
{"version":3,"file":"Chat.d.ts","sourceRoot":"","sources":["../../../src/adapters/prisma/Chat.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AACnD,OAAO,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,UAAU,EAAE,KAAK,EAAE,MAAM,gBAAgB,CAAC;AAChF,OAAO,EAAE,QAAQ,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhF,OAAO,EAAE,KAAK,UAAU,EAAE,KAAK,WAAW,EAAE,CAAC;AAE7C,MAAM,WAAW,aAAa;IAC5B,EAAE,EAAE,MAAM,CAAC;IACX,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;IAC1B,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;IAC5B,iBAAiB,EAAE,MAAM,GAAG,IAAI,CAAC;IACjC,cAAc,EAAE,MAAM,GAAG,IAAI,CAAC;IAC9B,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;IACvB,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,SAAS,EAAE,IAAI,CAAC;CACjB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,qBAAa,IAAK,SAAQ,QAAQ;IAK9B,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,GAAG;IALb,OAAO,CAAC,MAAM,CAAuB;IACrC,OAAO,CAAC,iBAAiB,CAAoD;gBAGnE,MAAM,EAAE,YAAY,EACpB,GAAG,EAAE,WAAW,EACxB,MAAM,EAAE,UAAU,EAClB,OAAO,GAAE,WAAgB,EACzB,UAAU,GAAE,UAAe;IAgB7B;;OAEG;YACW,eAAe;IAqH7B;;OAEG;IACG,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,GAAE,UAAe,GAAG,OAAO,CAAC,aAAa,CAAC;IA8C5E;;;OAGG;IACI,SAAS,CACd,OAAO,EAAE,MAAM,EACf,OAAO,GAAE,UAAe,GACvB,cAAc,CAAC,SAAS,EAAE,aAAa,EAAE,SAAS,CAAC;IAmEtD;;OAEG;IACG,QAAQ,IAAI,OAAO,CAAC,aAAa,EAAE,CAAC;IAQ1C;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,KAAK,CAAC;CAkB9B;AAuBD;;GAEG;AACH,wBAAsB,UAAU,CAAC,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EACtD,MAAM,EAAE,GAAG,EACX,GAAG,EAAE,WAAW,EAChB,OAAO,GAAE,WAAW,GAAG;IAAE,UAAU,CAAC,EAAE,UAAU,CAAA;CAAE,GAAG,CAAa,GACjE,OAAO,CAAC,IAAI,CAAC,CA2Cf;AAED;;GAEG;AACH,wBAAsB,QAAQ,CAC5B,MAAM,EAAE,GAAG,EACX,GAAG,EAAE,WAAW,EAChB,MAAM,EAAE,MAAM,EACd,OAAO,GAAE,WAAW,GAAG;IAAE,UAAU,CAAC,EAAE,UAAU,CAAC;IAAC,KAAK,CAAC,EAAE,OAAO,CAAA;CAAO,GACvE,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,CAiBtB"}
|
|
@@ -131,10 +131,10 @@ export class Chat extends BaseChat {
|
|
|
131
131
|
/**
|
|
132
132
|
* Send a message and persist the conversation.
|
|
133
133
|
*/
|
|
134
|
-
async ask(
|
|
134
|
+
async ask(message, options = {}) {
|
|
135
135
|
const messageModel = this.tables.message;
|
|
136
136
|
const userMessage = await this.prisma[messageModel].create({
|
|
137
|
-
data: { chatId: this.id, role: "user", content:
|
|
137
|
+
data: { chatId: this.id, role: "user", content: message }
|
|
138
138
|
});
|
|
139
139
|
const assistantMessage = await this.prisma[messageModel].create({
|
|
140
140
|
data: { chatId: this.id, role: "assistant", content: null }
|
|
@@ -149,7 +149,7 @@ export class Chat extends BaseChat {
|
|
|
149
149
|
content: m.content || ""
|
|
150
150
|
}));
|
|
151
151
|
const coreChat = await this.prepareCoreChat(history, assistantMessage.id);
|
|
152
|
-
const response = await coreChat.ask(
|
|
152
|
+
const response = await coreChat.ask(message, options);
|
|
153
153
|
return await this.prisma[messageModel].update({
|
|
154
154
|
where: { id: assistantMessage.id },
|
|
155
155
|
data: {
|
|
@@ -176,10 +176,10 @@ export class Chat extends BaseChat {
|
|
|
176
176
|
* Stream a response and persist the conversation.
|
|
177
177
|
* Yields ChatChunk objects for full visibility of thinking, content, and tools.
|
|
178
178
|
*/
|
|
179
|
-
async *askStream(
|
|
179
|
+
async *askStream(message, options = {}) {
|
|
180
180
|
const messageModel = this.tables.message;
|
|
181
181
|
const userMessage = await this.prisma[messageModel].create({
|
|
182
|
-
data: { chatId: this.id, role: "user", content:
|
|
182
|
+
data: { chatId: this.id, role: "user", content: message }
|
|
183
183
|
});
|
|
184
184
|
const assistantMessage = await this.prisma[messageModel].create({
|
|
185
185
|
data: { chatId: this.id, role: "assistant", content: null }
|
|
@@ -194,7 +194,7 @@ export class Chat extends BaseChat {
|
|
|
194
194
|
content: m.content || ""
|
|
195
195
|
}));
|
|
196
196
|
const coreChat = await this.prepareCoreChat(history, assistantMessage.id);
|
|
197
|
-
const stream = coreChat.stream(
|
|
197
|
+
const stream = coreChat.stream(message, options);
|
|
198
198
|
let fullContent = "";
|
|
199
199
|
let metadata = {};
|
|
200
200
|
for await (const chunk of stream) {
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
* Prisma adapter for NodeLLM ORM.
|
|
5
5
|
* Provides automatic persistence of chats, messages, tool calls, and API requests.
|
|
6
6
|
*
|
|
7
|
-
* @example
|
|
7
|
+
* @example Chat API (low-level)
|
|
8
8
|
* ```typescript
|
|
9
9
|
* import { PrismaClient } from '@prisma/client';
|
|
10
10
|
* import { createLLM } from '@node-llm/core';
|
|
@@ -21,7 +21,30 @@
|
|
|
21
21
|
* const response = await chat.ask('Hello!');
|
|
22
22
|
* console.log(response.content);
|
|
23
23
|
* ```
|
|
24
|
+
*
|
|
25
|
+
* @example AgentSession API (recommended for agents)
|
|
26
|
+
* ```typescript
|
|
27
|
+
* import { Agent } from '@node-llm/core';
|
|
28
|
+
* import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
|
|
29
|
+
*
|
|
30
|
+
* class SupportAgent extends Agent {
|
|
31
|
+
* static model = 'gpt-4.1';
|
|
32
|
+
* static instructions = 'You are a helpful support agent.';
|
|
33
|
+
* }
|
|
34
|
+
*
|
|
35
|
+
* // Create new session
|
|
36
|
+
* const session = await createAgentSession(prisma, llm, SupportAgent, {
|
|
37
|
+
* metadata: { userId: 'user_123' }
|
|
38
|
+
* });
|
|
39
|
+
* await session.ask('Hello!');
|
|
40
|
+
*
|
|
41
|
+
* // Resume later (Code Wins - model/tools from class, history from DB)
|
|
42
|
+
* const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
|
|
43
|
+
* await session.ask('Continue our conversation');
|
|
44
|
+
* ```
|
|
24
45
|
*/
|
|
25
46
|
export { Chat, createChat, loadChat } from "./Chat.js";
|
|
26
|
-
export type { ChatRecord, MessageRecord, ChatOptions
|
|
47
|
+
export type { ChatRecord, MessageRecord, ChatOptions } from "./Chat.js";
|
|
48
|
+
export { AgentSession, createAgentSession, loadAgentSession } from "./AgentSession.js";
|
|
49
|
+
export type { AgentSessionRecord, CreateAgentSessionOptions, LoadAgentSessionOptions, TableNames } from "./AgentSession.js";
|
|
27
50
|
//# sourceMappingURL=index.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/adapters/prisma/index.ts"],"names":[],"mappings":"AAAA
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/adapters/prisma/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4CG;AAGH,OAAO,EAAE,IAAI,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AACvD,YAAY,EAAE,UAAU,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,WAAW,CAAC;AAGxE,OAAO,EAAE,YAAY,EAAE,kBAAkB,EAAE,gBAAgB,EAAE,MAAM,mBAAmB,CAAC;AACvF,YAAY,EACV,kBAAkB,EAClB,yBAAyB,EACzB,uBAAuB,EACvB,UAAU,EACX,MAAM,mBAAmB,CAAC"}
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
* Prisma adapter for NodeLLM ORM.
|
|
5
5
|
* Provides automatic persistence of chats, messages, tool calls, and API requests.
|
|
6
6
|
*
|
|
7
|
-
* @example
|
|
7
|
+
* @example Chat API (low-level)
|
|
8
8
|
* ```typescript
|
|
9
9
|
* import { PrismaClient } from '@prisma/client';
|
|
10
10
|
* import { createLLM } from '@node-llm/core';
|
|
@@ -21,5 +21,29 @@
|
|
|
21
21
|
* const response = await chat.ask('Hello!');
|
|
22
22
|
* console.log(response.content);
|
|
23
23
|
* ```
|
|
24
|
+
*
|
|
25
|
+
* @example AgentSession API (recommended for agents)
|
|
26
|
+
* ```typescript
|
|
27
|
+
* import { Agent } from '@node-llm/core';
|
|
28
|
+
* import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
|
|
29
|
+
*
|
|
30
|
+
* class SupportAgent extends Agent {
|
|
31
|
+
* static model = 'gpt-4.1';
|
|
32
|
+
* static instructions = 'You are a helpful support agent.';
|
|
33
|
+
* }
|
|
34
|
+
*
|
|
35
|
+
* // Create new session
|
|
36
|
+
* const session = await createAgentSession(prisma, llm, SupportAgent, {
|
|
37
|
+
* metadata: { userId: 'user_123' }
|
|
38
|
+
* });
|
|
39
|
+
* await session.ask('Hello!');
|
|
40
|
+
*
|
|
41
|
+
* // Resume later (Code Wins - model/tools from class, history from DB)
|
|
42
|
+
* const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
|
|
43
|
+
* await session.ask('Continue our conversation');
|
|
44
|
+
* ```
|
|
24
45
|
*/
|
|
46
|
+
// Chat API
|
|
25
47
|
export { Chat, createChat, loadChat } from "./Chat.js";
|
|
48
|
+
// AgentSession API
|
|
49
|
+
export { AgentSession, createAgentSession, loadAgentSession } from "./AgentSession.js";
|
package/dist/index.d.ts
CHANGED
|
@@ -23,13 +23,33 @@
|
|
|
23
23
|
* await chat.ask('Hello!');
|
|
24
24
|
* ```
|
|
25
25
|
*
|
|
26
|
+
* ## Agent Sessions (Recommended for Agents)
|
|
27
|
+
*
|
|
28
|
+
* ```typescript
|
|
29
|
+
* import { Agent } from '@node-llm/core';
|
|
30
|
+
* import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
|
|
31
|
+
*
|
|
32
|
+
* class SupportAgent extends Agent {
|
|
33
|
+
* static model = 'gpt-4.1';
|
|
34
|
+
* static instructions = 'You are a helpful support agent.';
|
|
35
|
+
* }
|
|
36
|
+
*
|
|
37
|
+
* // Create and persist
|
|
38
|
+
* const session = await createAgentSession(prisma, llm, SupportAgent);
|
|
39
|
+
* await session.ask('Hello!');
|
|
40
|
+
*
|
|
41
|
+
* // Resume later (Code Wins - model/tools from class, history from DB)
|
|
42
|
+
* const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
|
|
43
|
+
* ```
|
|
44
|
+
*
|
|
26
45
|
* ## Adapters
|
|
27
46
|
*
|
|
28
47
|
* - `@node-llm/orm/prisma` - Prisma adapter (recommended)
|
|
29
48
|
*
|
|
30
49
|
* ## Schema
|
|
31
50
|
*
|
|
32
|
-
* The ORM tracks
|
|
51
|
+
* The ORM tracks five core entities:
|
|
52
|
+
* - **AgentSession** - Links Agent class to persistent Chat (v0.5.0+)
|
|
33
53
|
* - **Chat** - Session container (model, provider, instructions)
|
|
34
54
|
* - **Message** - User/Assistant conversation history
|
|
35
55
|
* - **ToolCall** - Tool executions (name, arguments, results)
|
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAwDG;AAGH,cAAc,4BAA4B,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -23,13 +23,33 @@
|
|
|
23
23
|
* await chat.ask('Hello!');
|
|
24
24
|
* ```
|
|
25
25
|
*
|
|
26
|
+
* ## Agent Sessions (Recommended for Agents)
|
|
27
|
+
*
|
|
28
|
+
* ```typescript
|
|
29
|
+
* import { Agent } from '@node-llm/core';
|
|
30
|
+
* import { createAgentSession, loadAgentSession } from '@node-llm/orm/prisma';
|
|
31
|
+
*
|
|
32
|
+
* class SupportAgent extends Agent {
|
|
33
|
+
* static model = 'gpt-4.1';
|
|
34
|
+
* static instructions = 'You are a helpful support agent.';
|
|
35
|
+
* }
|
|
36
|
+
*
|
|
37
|
+
* // Create and persist
|
|
38
|
+
* const session = await createAgentSession(prisma, llm, SupportAgent);
|
|
39
|
+
* await session.ask('Hello!');
|
|
40
|
+
*
|
|
41
|
+
* // Resume later (Code Wins - model/tools from class, history from DB)
|
|
42
|
+
* const session = await loadAgentSession(prisma, llm, SupportAgent, sessionId);
|
|
43
|
+
* ```
|
|
44
|
+
*
|
|
26
45
|
* ## Adapters
|
|
27
46
|
*
|
|
28
47
|
* - `@node-llm/orm/prisma` - Prisma adapter (recommended)
|
|
29
48
|
*
|
|
30
49
|
* ## Schema
|
|
31
50
|
*
|
|
32
|
-
* The ORM tracks
|
|
51
|
+
* The ORM tracks five core entities:
|
|
52
|
+
* - **AgentSession** - Links Agent class to persistent Chat (v0.5.0+)
|
|
33
53
|
* - **Chat** - Session container (model, provider, instructions)
|
|
34
54
|
* - **Message** - User/Assistant conversation history
|
|
35
55
|
* - **ToolCall** - Tool executions (name, arguments, results)
|