@firtoz/chat-agent 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +443 -0
- package/package.json +74 -0
- package/src/chat-agent-base.ts +1128 -0
- package/src/chat-agent-drizzle.ts +227 -0
- package/src/chat-agent-sql.ts +199 -0
- package/src/chat-messages.ts +472 -0
- package/src/db/index.ts +21 -0
- package/src/db/schema.ts +47 -0
- package/src/index.ts +99 -0
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
import { and, asc, eq, lt } from "drizzle-orm";
|
|
2
|
+
import { migrate } from "drizzle-orm/durable-sqlite/migrator";
|
|
3
|
+
import { createDb, type Database } from "./db/index";
|
|
4
|
+
import {
|
|
5
|
+
messagesTable,
|
|
6
|
+
type NewMessage,
|
|
7
|
+
streamChunksTable,
|
|
8
|
+
streamMetadataTable,
|
|
9
|
+
} from "./db/schema";
|
|
10
|
+
import type { ChatMessage } from "./chat-messages";
|
|
11
|
+
import { ChatAgentBase } from "./chat-agent-base";
|
|
12
|
+
import migrations from "../drizzle/migrations.js";
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* ChatAgent implementation using Drizzle ORM
|
|
16
|
+
*
|
|
17
|
+
* Uses Drizzle's type-safe query builder for database operations.
|
|
18
|
+
*/
|
|
19
|
+
export class DrizzleChatAgent<
|
|
20
|
+
Env extends Cloudflare.Env & {
|
|
21
|
+
OPENROUTER_API_KEY: string;
|
|
22
|
+
} = Cloudflare.Env & { OPENROUTER_API_KEY: string },
|
|
23
|
+
> extends ChatAgentBase<Env> {
|
|
24
|
+
private db!: Database;
|
|
25
|
+
|
|
26
|
+
// ============================================================================
|
|
27
|
+
// Database Implementation - Drizzle ORM
|
|
28
|
+
// ============================================================================
|
|
29
|
+
|
|
30
|
+
protected dbInitialize(): void {
|
|
31
|
+
// Initialize Drizzle DB
|
|
32
|
+
this.db = createDb(this.ctx.storage);
|
|
33
|
+
|
|
34
|
+
// Run migrations
|
|
35
|
+
migrate(this.db, migrations);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
protected dbLoadMessages(): ChatMessage[] {
|
|
39
|
+
const rows = this.db
|
|
40
|
+
.select()
|
|
41
|
+
.from(messagesTable)
|
|
42
|
+
.orderBy(asc(messagesTable.createdAt))
|
|
43
|
+
.all();
|
|
44
|
+
|
|
45
|
+
return rows
|
|
46
|
+
.map((row) => {
|
|
47
|
+
try {
|
|
48
|
+
return JSON.parse(row.messageJson) as ChatMessage;
|
|
49
|
+
} catch (err) {
|
|
50
|
+
console.error(`Failed to parse message ${row.id}:`, err);
|
|
51
|
+
return null;
|
|
52
|
+
}
|
|
53
|
+
})
|
|
54
|
+
.filter((msg): msg is ChatMessage => msg !== null);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
protected dbSaveMessage(msg: ChatMessage): void {
|
|
58
|
+
const newMsg: NewMessage = {
|
|
59
|
+
id: msg.id,
|
|
60
|
+
role: msg.role,
|
|
61
|
+
messageJson: JSON.stringify(msg),
|
|
62
|
+
createdAt: new Date(msg.createdAt),
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
this.db
|
|
66
|
+
.insert(messagesTable)
|
|
67
|
+
.values(newMsg)
|
|
68
|
+
.onConflictDoUpdate({
|
|
69
|
+
target: messagesTable.id,
|
|
70
|
+
set: { messageJson: newMsg.messageJson },
|
|
71
|
+
})
|
|
72
|
+
.run();
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
protected dbClearAll(): void {
|
|
76
|
+
this.db.delete(messagesTable).run();
|
|
77
|
+
this.db.delete(streamChunksTable).run();
|
|
78
|
+
this.db.delete(streamMetadataTable).run();
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
protected dbFindActiveStream(): {
|
|
82
|
+
id: string;
|
|
83
|
+
messageId: string;
|
|
84
|
+
createdAt: Date;
|
|
85
|
+
} | null {
|
|
86
|
+
const activeStreams = this.db
|
|
87
|
+
.select()
|
|
88
|
+
.from(streamMetadataTable)
|
|
89
|
+
.where(eq(streamMetadataTable.status, "streaming"))
|
|
90
|
+
.orderBy(asc(streamMetadataTable.createdAt))
|
|
91
|
+
.limit(1)
|
|
92
|
+
.all();
|
|
93
|
+
|
|
94
|
+
if (activeStreams.length === 0) {
|
|
95
|
+
return null;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
const stream = activeStreams[0];
|
|
99
|
+
return {
|
|
100
|
+
id: stream.id,
|
|
101
|
+
messageId: stream.messageId,
|
|
102
|
+
createdAt: stream.createdAt,
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
protected dbDeleteStreamWithChunks(streamId: string): void {
|
|
107
|
+
this.db
|
|
108
|
+
.delete(streamChunksTable)
|
|
109
|
+
.where(eq(streamChunksTable.streamId, streamId))
|
|
110
|
+
.run();
|
|
111
|
+
this.db
|
|
112
|
+
.delete(streamMetadataTable)
|
|
113
|
+
.where(eq(streamMetadataTable.id, streamId))
|
|
114
|
+
.run();
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
protected dbInsertStreamMetadata(streamId: string, messageId: string): void {
|
|
118
|
+
this.db
|
|
119
|
+
.insert(streamMetadataTable)
|
|
120
|
+
.values({
|
|
121
|
+
id: streamId,
|
|
122
|
+
messageId,
|
|
123
|
+
status: "streaming",
|
|
124
|
+
createdAt: new Date(),
|
|
125
|
+
})
|
|
126
|
+
.run();
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
protected dbUpdateStreamStatus(
|
|
130
|
+
streamId: string,
|
|
131
|
+
status: "completed" | "error",
|
|
132
|
+
): void {
|
|
133
|
+
this.db
|
|
134
|
+
.update(streamMetadataTable)
|
|
135
|
+
.set({ status, completedAt: new Date() })
|
|
136
|
+
.where(eq(streamMetadataTable.id, streamId))
|
|
137
|
+
.run();
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
protected dbDeleteOldCompletedStreams(cutoffMs: number): void {
|
|
141
|
+
const cutoff = new Date(cutoffMs);
|
|
142
|
+
|
|
143
|
+
// Delete old stream chunks for completed streams
|
|
144
|
+
const oldStreams = this.db
|
|
145
|
+
.select({ id: streamMetadataTable.id })
|
|
146
|
+
.from(streamMetadataTable)
|
|
147
|
+
.where(
|
|
148
|
+
and(
|
|
149
|
+
eq(streamMetadataTable.status, "completed"),
|
|
150
|
+
lt(streamMetadataTable.completedAt, cutoff),
|
|
151
|
+
),
|
|
152
|
+
)
|
|
153
|
+
.all();
|
|
154
|
+
|
|
155
|
+
for (const stream of oldStreams) {
|
|
156
|
+
this.db
|
|
157
|
+
.delete(streamChunksTable)
|
|
158
|
+
.where(eq(streamChunksTable.streamId, stream.id))
|
|
159
|
+
.run();
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Delete old stream metadata
|
|
163
|
+
this.db
|
|
164
|
+
.delete(streamMetadataTable)
|
|
165
|
+
.where(
|
|
166
|
+
and(
|
|
167
|
+
eq(streamMetadataTable.status, "completed"),
|
|
168
|
+
lt(streamMetadataTable.completedAt, cutoff),
|
|
169
|
+
),
|
|
170
|
+
)
|
|
171
|
+
.run();
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
protected dbFindMaxChunkIndex(streamId: string): number | null {
|
|
175
|
+
const lastChunk = this.db
|
|
176
|
+
.select({ maxIndex: streamChunksTable.chunkIndex })
|
|
177
|
+
.from(streamChunksTable)
|
|
178
|
+
.where(eq(streamChunksTable.streamId, streamId))
|
|
179
|
+
.orderBy(asc(streamChunksTable.chunkIndex))
|
|
180
|
+
.limit(1)
|
|
181
|
+
.all();
|
|
182
|
+
|
|
183
|
+
const firstChunk = lastChunk[0];
|
|
184
|
+
return firstChunk?.maxIndex != null ? firstChunk.maxIndex : null;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
protected dbInsertChunks(
|
|
188
|
+
chunks: Array<{
|
|
189
|
+
id: string;
|
|
190
|
+
streamId: string;
|
|
191
|
+
content: string;
|
|
192
|
+
chunkIndex: number;
|
|
193
|
+
}>,
|
|
194
|
+
): void {
|
|
195
|
+
const now = new Date();
|
|
196
|
+
for (const chunk of chunks) {
|
|
197
|
+
this.db
|
|
198
|
+
.insert(streamChunksTable)
|
|
199
|
+
.values({
|
|
200
|
+
id: chunk.id,
|
|
201
|
+
streamId: chunk.streamId,
|
|
202
|
+
content: chunk.content,
|
|
203
|
+
chunkIndex: chunk.chunkIndex,
|
|
204
|
+
createdAt: now,
|
|
205
|
+
})
|
|
206
|
+
.run();
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
protected dbGetChunks(streamId: string): string[] {
|
|
211
|
+
const rows = this.db
|
|
212
|
+
.select()
|
|
213
|
+
.from(streamChunksTable)
|
|
214
|
+
.where(eq(streamChunksTable.streamId, streamId))
|
|
215
|
+
.orderBy(asc(streamChunksTable.chunkIndex))
|
|
216
|
+
.all();
|
|
217
|
+
|
|
218
|
+
return rows.map((r) => r.content);
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
protected dbDeleteChunks(streamId: string): void {
|
|
222
|
+
this.db
|
|
223
|
+
.delete(streamChunksTable)
|
|
224
|
+
.where(eq(streamChunksTable.streamId, streamId))
|
|
225
|
+
.run();
|
|
226
|
+
}
|
|
227
|
+
}
|
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
import type { ChatMessage } from "./chat-messages";
|
|
2
|
+
import { ChatAgentBase } from "./chat-agent-base";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* ChatAgent implementation using raw SQL (like @cloudflare/ai-chat)
|
|
6
|
+
*
|
|
7
|
+
* Uses Agent's built-in `this.sql` template tag for database operations.
|
|
8
|
+
*/
|
|
9
|
+
export class SqlChatAgent<
|
|
10
|
+
Env extends Cloudflare.Env & {
|
|
11
|
+
OPENROUTER_API_KEY: string;
|
|
12
|
+
} = Cloudflare.Env & { OPENROUTER_API_KEY: string },
|
|
13
|
+
> extends ChatAgentBase<Env> {
|
|
14
|
+
// ============================================================================
|
|
15
|
+
// Database Implementation - Raw SQL
|
|
16
|
+
// ============================================================================
|
|
17
|
+
|
|
18
|
+
protected dbInitialize(): void {
|
|
19
|
+
// Create tables for chat messages and resumable streaming
|
|
20
|
+
// Based on @cloudflare/ai-chat pattern from reference
|
|
21
|
+
this.sql`create table if not exists cf_ai_chat_agent_messages (
|
|
22
|
+
id text primary key,
|
|
23
|
+
message text not null,
|
|
24
|
+
created_at datetime default current_timestamp
|
|
25
|
+
)`;
|
|
26
|
+
|
|
27
|
+
this.sql`create table if not exists cf_ai_chat_stream_chunks (
|
|
28
|
+
id text primary key,
|
|
29
|
+
stream_id text not null,
|
|
30
|
+
body text not null,
|
|
31
|
+
chunk_index integer not null,
|
|
32
|
+
created_at integer not null
|
|
33
|
+
)`;
|
|
34
|
+
|
|
35
|
+
this.sql`create table if not exists cf_ai_chat_stream_metadata (
|
|
36
|
+
id text primary key,
|
|
37
|
+
request_id text not null,
|
|
38
|
+
status text not null,
|
|
39
|
+
created_at integer not null,
|
|
40
|
+
completed_at integer
|
|
41
|
+
)`;
|
|
42
|
+
|
|
43
|
+
this.sql`create index if not exists idx_stream_chunks_stream_id
|
|
44
|
+
on cf_ai_chat_stream_chunks(stream_id, chunk_index)`;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
protected dbLoadMessages(): ChatMessage[] {
|
|
48
|
+
const rows =
|
|
49
|
+
(this
|
|
50
|
+
.sql`select * from cf_ai_chat_agent_messages order by created_at` as Array<{
|
|
51
|
+
id: string;
|
|
52
|
+
message: string;
|
|
53
|
+
}>) || [];
|
|
54
|
+
|
|
55
|
+
return rows
|
|
56
|
+
.map((row) => {
|
|
57
|
+
try {
|
|
58
|
+
return JSON.parse(row.message) as ChatMessage;
|
|
59
|
+
} catch (err) {
|
|
60
|
+
console.error(`Failed to parse message ${row.id}:`, err);
|
|
61
|
+
return null;
|
|
62
|
+
}
|
|
63
|
+
})
|
|
64
|
+
.filter((msg): msg is ChatMessage => msg !== null);
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
protected dbSaveMessage(msg: ChatMessage): void {
|
|
68
|
+
const messageJson = JSON.stringify(msg);
|
|
69
|
+
this.sql`
|
|
70
|
+
insert into cf_ai_chat_agent_messages (id, message)
|
|
71
|
+
values (${msg.id}, ${messageJson})
|
|
72
|
+
on conflict(id) do update set message = excluded.message
|
|
73
|
+
`;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
protected dbClearAll(): void {
|
|
77
|
+
this.sql`delete from cf_ai_chat_agent_messages`;
|
|
78
|
+
this.sql`delete from cf_ai_chat_stream_chunks`;
|
|
79
|
+
this.sql`delete from cf_ai_chat_stream_metadata`;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
protected dbFindActiveStream(): {
|
|
83
|
+
id: string;
|
|
84
|
+
messageId: string;
|
|
85
|
+
createdAt: Date;
|
|
86
|
+
} | null {
|
|
87
|
+
const activeStreams = this.sql`
|
|
88
|
+
select * from cf_ai_chat_stream_metadata
|
|
89
|
+
where status = 'streaming'
|
|
90
|
+
order by created_at desc
|
|
91
|
+
limit 1
|
|
92
|
+
` as Array<{
|
|
93
|
+
id: string;
|
|
94
|
+
request_id: string;
|
|
95
|
+
status: string;
|
|
96
|
+
created_at: number;
|
|
97
|
+
completed_at: number | null;
|
|
98
|
+
}>;
|
|
99
|
+
|
|
100
|
+
if (!activeStreams || activeStreams.length === 0) {
|
|
101
|
+
return null;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
const stream = activeStreams[0];
|
|
105
|
+
return {
|
|
106
|
+
id: stream.id,
|
|
107
|
+
messageId: stream.request_id,
|
|
108
|
+
createdAt: new Date(stream.created_at),
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
protected dbDeleteStreamWithChunks(streamId: string): void {
|
|
113
|
+
this
|
|
114
|
+
.sql`delete from cf_ai_chat_stream_chunks where stream_id = ${streamId}`;
|
|
115
|
+
this.sql`delete from cf_ai_chat_stream_metadata where id = ${streamId}`;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
protected dbInsertStreamMetadata(streamId: string, messageId: string): void {
|
|
119
|
+
const now = Date.now();
|
|
120
|
+
this.sql`
|
|
121
|
+
insert into cf_ai_chat_stream_metadata (id, request_id, status, created_at)
|
|
122
|
+
values (${streamId}, ${messageId}, 'streaming', ${now})
|
|
123
|
+
`;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
protected dbUpdateStreamStatus(
|
|
127
|
+
streamId: string,
|
|
128
|
+
status: "completed" | "error",
|
|
129
|
+
): void {
|
|
130
|
+
const now = Date.now();
|
|
131
|
+
this.sql`
|
|
132
|
+
update cf_ai_chat_stream_metadata
|
|
133
|
+
set status = ${status}, completed_at = ${now}
|
|
134
|
+
where id = ${streamId}
|
|
135
|
+
`;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
protected dbDeleteOldCompletedStreams(cutoffMs: number): void {
|
|
139
|
+
// Delete old stream chunks first
|
|
140
|
+
this.sql`
|
|
141
|
+
delete from cf_ai_chat_stream_chunks
|
|
142
|
+
where stream_id in (
|
|
143
|
+
select id from cf_ai_chat_stream_metadata
|
|
144
|
+
where status = 'completed' and completed_at < ${cutoffMs}
|
|
145
|
+
)
|
|
146
|
+
`;
|
|
147
|
+
// Then delete the metadata
|
|
148
|
+
this.sql`
|
|
149
|
+
delete from cf_ai_chat_stream_metadata
|
|
150
|
+
where status = 'completed' and completed_at < ${cutoffMs}
|
|
151
|
+
`;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
protected dbFindMaxChunkIndex(streamId: string): number | null {
|
|
155
|
+
const result = this.sql`
|
|
156
|
+
select max(chunk_index) as max_index
|
|
157
|
+
from cf_ai_chat_stream_chunks
|
|
158
|
+
where stream_id = ${streamId}
|
|
159
|
+
` as Array<{ max_index: number | null }>;
|
|
160
|
+
|
|
161
|
+
if (!result || result.length === 0 || result[0].max_index == null) {
|
|
162
|
+
return null;
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
return result[0].max_index;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
protected dbInsertChunks(
|
|
169
|
+
chunks: Array<{
|
|
170
|
+
id: string;
|
|
171
|
+
streamId: string;
|
|
172
|
+
content: string;
|
|
173
|
+
chunkIndex: number;
|
|
174
|
+
}>,
|
|
175
|
+
): void {
|
|
176
|
+
const now = Date.now();
|
|
177
|
+
for (const chunk of chunks) {
|
|
178
|
+
this.sql`
|
|
179
|
+
insert into cf_ai_chat_stream_chunks (id, stream_id, body, chunk_index, created_at)
|
|
180
|
+
values (${chunk.id}, ${chunk.streamId}, ${chunk.content}, ${chunk.chunkIndex}, ${now})
|
|
181
|
+
`;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
protected dbGetChunks(streamId: string): string[] {
|
|
186
|
+
const rows = this.sql`
|
|
187
|
+
select body from cf_ai_chat_stream_chunks
|
|
188
|
+
where stream_id = ${streamId}
|
|
189
|
+
order by chunk_index asc
|
|
190
|
+
` as Array<{ body: string }>;
|
|
191
|
+
|
|
192
|
+
return (rows || []).map((r) => r.body);
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
protected dbDeleteChunks(streamId: string): void {
|
|
196
|
+
this
|
|
197
|
+
.sql`delete from cf_ai_chat_stream_chunks where stream_id = ${streamId}`;
|
|
198
|
+
}
|
|
199
|
+
}
|