agentgui 1.0.380 → 1.0.382
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/acp-queries.js +162 -0
- package/database.js +179 -16
- package/lib/agent-descriptors.js +332 -0
- package/package.json +1 -1
- package/server.js +6 -22
package/acp-queries.js
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
import { randomUUID } from 'crypto';
|
|
2
|
+
const gid = (p) => `${p}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
3
|
+
const uuid = () => randomUUID();
|
|
4
|
+
const iso = (t) => new Date(t).toISOString();
|
|
5
|
+
const j = (o) => JSON.stringify(o);
|
|
6
|
+
const jp = (s) => { try { return JSON.parse(s); } catch { return {}; } };
|
|
7
|
+
|
|
8
|
+
export function createACPQueries(db, prep) {
|
|
9
|
+
return {
|
|
10
|
+
createThread(metadata = {}) {
|
|
11
|
+
const id = uuid(), now = Date.now();
|
|
12
|
+
prep('INSERT INTO conversations (id, agentId, title, created_at, updated_at, status, metadata) VALUES (?, ?, ?, ?, ?, ?, ?)').run(id, 'unknown', null, now, now, 'idle', j(metadata));
|
|
13
|
+
return { thread_id: id, created_at: iso(now), updated_at: iso(now), metadata, status: 'idle' };
|
|
14
|
+
},
|
|
15
|
+
getThread(tid) {
|
|
16
|
+
const r = prep('SELECT * FROM conversations WHERE id = ?').get(tid);
|
|
17
|
+
if (!r) return null;
|
|
18
|
+
return { thread_id: r.id, created_at: iso(r.created_at), updated_at: iso(r.updated_at), metadata: jp(r.metadata), status: r.status || 'idle' };
|
|
19
|
+
},
|
|
20
|
+
patchThread(tid, upd) {
|
|
21
|
+
const t = this.getThread(tid);
|
|
22
|
+
if (!t) throw new Error('Thread not found');
|
|
23
|
+
const now = Date.now(), meta = upd.metadata !== undefined ? upd.metadata : t.metadata, stat = upd.status !== undefined ? upd.status : t.status;
|
|
24
|
+
prep('UPDATE conversations SET metadata = ?, status = ?, updated_at = ? WHERE id = ?').run(j(meta), stat, now, tid);
|
|
25
|
+
return { thread_id: tid, created_at: t.created_at, updated_at: iso(now), metadata: meta, status: stat };
|
|
26
|
+
},
|
|
27
|
+
deleteThread(tid) {
|
|
28
|
+
const pr = prep('SELECT COUNT(*) as count FROM run_metadata WHERE thread_id = ? AND status = ?').get(tid, 'pending');
|
|
29
|
+
if (pr && pr.count > 0) throw new Error('Cannot delete thread with pending runs');
|
|
30
|
+
db.transaction(() => {
|
|
31
|
+
prep('DELETE FROM thread_states WHERE thread_id = ?').run(tid);
|
|
32
|
+
prep('DELETE FROM checkpoints WHERE thread_id = ?').run(tid);
|
|
33
|
+
prep('DELETE FROM run_metadata WHERE thread_id = ?').run(tid);
|
|
34
|
+
prep('DELETE FROM sessions WHERE conversationId = ?').run(tid);
|
|
35
|
+
prep('DELETE FROM messages WHERE conversationId = ?').run(tid);
|
|
36
|
+
prep('DELETE FROM chunks WHERE conversationId = ?').run(tid);
|
|
37
|
+
prep('DELETE FROM events WHERE conversationId = ?').run(tid);
|
|
38
|
+
prep('DELETE FROM conversations WHERE id = ?').run(tid);
|
|
39
|
+
})();
|
|
40
|
+
return true;
|
|
41
|
+
},
|
|
42
|
+
saveThreadState(tid, cid, sd) {
|
|
43
|
+
const id = gid('state'), now = Date.now();
|
|
44
|
+
prep('INSERT INTO thread_states (id, thread_id, checkpoint_id, state_data, created_at) VALUES (?, ?, ?, ?, ?)').run(id, tid, cid, j(sd), now);
|
|
45
|
+
return { id, thread_id: tid, checkpoint_id: cid, created_at: iso(now) };
|
|
46
|
+
},
|
|
47
|
+
getThreadState(tid, cid = null) {
|
|
48
|
+
const r = cid ? prep('SELECT * FROM thread_states WHERE thread_id = ? AND checkpoint_id = ? ORDER BY created_at DESC LIMIT 1').get(tid, cid) : prep('SELECT * FROM thread_states WHERE thread_id = ? ORDER BY created_at DESC LIMIT 1').get(tid);
|
|
49
|
+
if (!r) return null;
|
|
50
|
+
const sd = jp(r.state_data);
|
|
51
|
+
return { checkpoint: { checkpoint_id: r.checkpoint_id }, values: sd.values || {}, messages: sd.messages || [], metadata: sd.metadata || {} };
|
|
52
|
+
},
|
|
53
|
+
getThreadHistory(tid, lim = 50, off = 0) {
|
|
54
|
+
const tot = prep('SELECT COUNT(*) as count FROM thread_states WHERE thread_id = ?').get(tid).count;
|
|
55
|
+
const rows = prep('SELECT * FROM thread_states WHERE thread_id = ? ORDER BY created_at DESC LIMIT ? OFFSET ?').all(tid, lim, off);
|
|
56
|
+
const states = rows.map(r => { const sd = jp(r.state_data); return { checkpoint: { checkpoint_id: r.checkpoint_id }, values: sd.values || {}, messages: sd.messages || [], metadata: sd.metadata || {} }; });
|
|
57
|
+
return { states, total: tot, limit: lim, offset: off, hasMore: off + lim < tot };
|
|
58
|
+
},
|
|
59
|
+
copyThread(stid) {
|
|
60
|
+
const st = this.getThread(stid);
|
|
61
|
+
if (!st) throw new Error('Source thread not found');
|
|
62
|
+
const ntid = uuid(), now = Date.now();
|
|
63
|
+
db.transaction(() => {
|
|
64
|
+
prep('INSERT INTO conversations (id, agentId, title, created_at, updated_at, status, metadata, workingDirectory) SELECT ?, agentId, title || \' (copy)\', ?, ?, status, metadata, workingDirectory FROM conversations WHERE id = ?').run(ntid, now, now, stid);
|
|
65
|
+
const cps = prep('SELECT * FROM checkpoints WHERE thread_id = ? ORDER BY sequence ASC').all(stid);
|
|
66
|
+
cps.forEach(cp => prep('INSERT INTO checkpoints (id, thread_id, checkpoint_name, sequence, created_at) VALUES (?, ?, ?, ?, ?)').run(uuid(), ntid, cp.checkpoint_name, cp.sequence, now));
|
|
67
|
+
const sts = prep('SELECT * FROM thread_states WHERE thread_id = ? ORDER BY created_at ASC').all(stid);
|
|
68
|
+
sts.forEach(s => prep('INSERT INTO thread_states (id, thread_id, checkpoint_id, state_data, created_at) VALUES (?, ?, ?, ?, ?)').run(gid('state'), ntid, s.checkpoint_id, s.state_data, now));
|
|
69
|
+
const msgs = prep('SELECT * FROM messages WHERE conversationId = ? ORDER BY created_at ASC').all(stid);
|
|
70
|
+
msgs.forEach(m => prep('INSERT INTO messages (id, conversationId, role, content, created_at) VALUES (?, ?, ?, ?, ?)').run(gid('msg'), ntid, m.role, m.content, now));
|
|
71
|
+
})();
|
|
72
|
+
return this.getThread(ntid);
|
|
73
|
+
},
|
|
74
|
+
createCheckpoint(tid, name = null) {
|
|
75
|
+
const id = uuid(), now = Date.now();
|
|
76
|
+
const ms = prep('SELECT MAX(sequence) as max FROM checkpoints WHERE thread_id = ?').get(tid);
|
|
77
|
+
const seq = (ms?.max ?? -1) + 1;
|
|
78
|
+
prep('INSERT INTO checkpoints (id, thread_id, checkpoint_name, sequence, created_at) VALUES (?, ?, ?, ?, ?)').run(id, tid, name, seq, now);
|
|
79
|
+
return { checkpoint_id: id, thread_id: tid, checkpoint_name: name, sequence: seq, created_at: iso(now) };
|
|
80
|
+
},
|
|
81
|
+
getCheckpoint(cid) {
|
|
82
|
+
const r = prep('SELECT * FROM checkpoints WHERE id = ?').get(cid);
|
|
83
|
+
if (!r) return null;
|
|
84
|
+
return { checkpoint_id: r.id, thread_id: r.thread_id, checkpoint_name: r.checkpoint_name, sequence: r.sequence, created_at: iso(r.created_at) };
|
|
85
|
+
},
|
|
86
|
+
listCheckpoints(tid, lim = 50, off = 0) {
|
|
87
|
+
const tot = prep('SELECT COUNT(*) as count FROM checkpoints WHERE thread_id = ?').get(tid).count;
|
|
88
|
+
const rows = prep('SELECT * FROM checkpoints WHERE thread_id = ? ORDER BY sequence DESC LIMIT ? OFFSET ?').all(tid, lim, off);
|
|
89
|
+
const cps = rows.map(r => ({ checkpoint_id: r.id, thread_id: r.thread_id, checkpoint_name: r.checkpoint_name, sequence: r.sequence, created_at: iso(r.created_at) }));
|
|
90
|
+
return { checkpoints: cps, total: tot, limit: lim, offset: off, hasMore: off + lim < tot };
|
|
91
|
+
},
|
|
92
|
+
createRun(aid, tid = null, inp = null, cfg = null, wh = null) {
|
|
93
|
+
const rid = uuid(), now = Date.now(), mid = gid('runmeta');
|
|
94
|
+
let atid = tid;
|
|
95
|
+
if (!tid) {
|
|
96
|
+
atid = uuid();
|
|
97
|
+
prep('INSERT INTO conversations (id, agentId, title, created_at, updated_at, status, metadata) VALUES (?, ?, ?, ?, ?, ?, ?)').run(atid, aid, 'Stateless Run', now, now, 'idle', '{"stateless":true}');
|
|
98
|
+
}
|
|
99
|
+
prep('INSERT INTO sessions (id, conversationId, status, started_at, completed_at, response, error) VALUES (?, ?, ?, ?, ?, ?, ?)').run(rid, atid, 'pending', now, null, null, null);
|
|
100
|
+
prep('INSERT INTO run_metadata (id, run_id, thread_id, agent_id, status, input, config, webhook_url, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)').run(mid, rid, tid, aid, 'pending', inp ? j(inp) : null, cfg ? j(cfg) : null, wh, now, now);
|
|
101
|
+
return { run_id: rid, thread_id: tid, agent_id: aid, status: 'pending', created_at: iso(now), updated_at: iso(now) };
|
|
102
|
+
},
|
|
103
|
+
getRun(rid) {
|
|
104
|
+
const r = prep('SELECT * FROM run_metadata WHERE run_id = ?').get(rid);
|
|
105
|
+
if (!r) return null;
|
|
106
|
+
return { run_id: r.run_id, thread_id: r.thread_id, agent_id: r.agent_id, status: r.status, created_at: iso(r.created_at), updated_at: iso(r.updated_at) };
|
|
107
|
+
},
|
|
108
|
+
updateRunStatus(rid, stat) {
|
|
109
|
+
const now = Date.now();
|
|
110
|
+
prep('UPDATE run_metadata SET status = ?, updated_at = ? WHERE run_id = ?').run(stat, now, rid);
|
|
111
|
+
prep('UPDATE sessions SET status = ? WHERE id = ?').run(stat, rid);
|
|
112
|
+
return this.getRun(rid);
|
|
113
|
+
},
|
|
114
|
+
cancelRun(rid) {
|
|
115
|
+
const r = this.getRun(rid);
|
|
116
|
+
if (!r) throw new Error('Run not found');
|
|
117
|
+
if (['success', 'error', 'cancelled'].includes(r.status)) throw new Error('Run already completed or cancelled');
|
|
118
|
+
return this.updateRunStatus(rid, 'cancelled');
|
|
119
|
+
},
|
|
120
|
+
deleteRun(rid) {
|
|
121
|
+
db.transaction(() => {
|
|
122
|
+
prep('DELETE FROM chunks WHERE sessionId = ?').run(rid);
|
|
123
|
+
prep('DELETE FROM events WHERE sessionId = ?').run(rid);
|
|
124
|
+
prep('DELETE FROM run_metadata WHERE run_id = ?').run(rid);
|
|
125
|
+
prep('DELETE FROM sessions WHERE id = ?').run(rid);
|
|
126
|
+
})();
|
|
127
|
+
return true;
|
|
128
|
+
},
|
|
129
|
+
getThreadRuns(tid, lim = 50, off = 0) {
|
|
130
|
+
const tot = prep('SELECT COUNT(*) as count FROM run_metadata WHERE thread_id = ?').get(tid).count;
|
|
131
|
+
const rows = prep('SELECT * FROM run_metadata WHERE thread_id = ? ORDER BY created_at DESC LIMIT ? OFFSET ?').all(tid, lim, off);
|
|
132
|
+
const runs = rows.map(r => ({ run_id: r.run_id, thread_id: r.thread_id, agent_id: r.agent_id, status: r.status, created_at: iso(r.created_at), updated_at: iso(r.updated_at) }));
|
|
133
|
+
return { runs, total: tot, limit: lim, offset: off, hasMore: off + lim < tot };
|
|
134
|
+
},
|
|
135
|
+
searchThreads(flt = {}) {
|
|
136
|
+
const { metadata, status, dateRange, limit = 50, offset = 0 } = flt;
|
|
137
|
+
let wh = "status != 'deleted'", prm = [];
|
|
138
|
+
if (status) { wh += ' AND status = ?'; prm.push(status); }
|
|
139
|
+
if (dateRange?.start) { wh += ' AND created_at >= ?'; prm.push(new Date(dateRange.start).getTime()); }
|
|
140
|
+
if (dateRange?.end) { wh += ' AND created_at <= ?'; prm.push(new Date(dateRange.end).getTime()); }
|
|
141
|
+
if (metadata) { for (const [k, v] of Object.entries(metadata)) { wh += ' AND metadata LIKE ?'; prm.push(`%"${k}":"${v}"%`); } }
|
|
142
|
+
const tot = prep(`SELECT COUNT(*) as count FROM conversations WHERE ${wh}`).get(...prm).count;
|
|
143
|
+
const rows = prep(`SELECT * FROM conversations WHERE ${wh} ORDER BY updated_at DESC LIMIT ? OFFSET ?`).all(...prm, limit, offset);
|
|
144
|
+
const ths = rows.map(r => ({ thread_id: r.id, created_at: iso(r.created_at), updated_at: iso(r.updated_at), metadata: jp(r.metadata), status: r.status || 'idle' }));
|
|
145
|
+
return { threads: ths, total: tot, limit, offset, hasMore: offset + limit < tot };
|
|
146
|
+
},
|
|
147
|
+
searchAgents(flt = {}) {
|
|
148
|
+
return [];
|
|
149
|
+
},
|
|
150
|
+
searchRuns(flt = {}) {
|
|
151
|
+
const { agent_id, thread_id, status, limit = 50, offset = 0 } = flt;
|
|
152
|
+
let wh = '1=1', prm = [];
|
|
153
|
+
if (agent_id) { wh += ' AND agent_id = ?'; prm.push(agent_id); }
|
|
154
|
+
if (thread_id) { wh += ' AND thread_id = ?'; prm.push(thread_id); }
|
|
155
|
+
if (status) { wh += ' AND status = ?'; prm.push(status); }
|
|
156
|
+
const tot = prep(`SELECT COUNT(*) as count FROM run_metadata WHERE ${wh}`).get(...prm).count;
|
|
157
|
+
const rows = prep(`SELECT * FROM run_metadata WHERE ${wh} ORDER BY created_at DESC LIMIT ? OFFSET ?`).all(...prm, limit, offset);
|
|
158
|
+
const runs = rows.map(r => ({ run_id: r.run_id, thread_id: r.thread_id, agent_id: r.agent_id, status: r.status, created_at: iso(r.created_at), updated_at: iso(r.updated_at) }));
|
|
159
|
+
return { runs, total: tot, limit, offset, hasMore: offset + limit < tot };
|
|
160
|
+
}
|
|
161
|
+
};
|
|
162
|
+
}
|
package/database.js
CHANGED
|
@@ -2,6 +2,7 @@ import fs from 'fs';
|
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import os from 'os';
|
|
4
4
|
import { createRequire } from 'module';
|
|
5
|
+
import { createACPQueries } from './acp-queries.js';
|
|
5
6
|
|
|
6
7
|
const require = createRequire(import.meta.url);
|
|
7
8
|
|
|
@@ -226,8 +227,102 @@ function migrateFromJson() {
|
|
|
226
227
|
}
|
|
227
228
|
}
|
|
228
229
|
|
|
230
|
+
function migrateToACP() {
|
|
231
|
+
try {
|
|
232
|
+
const migrate = db.transaction(() => {
|
|
233
|
+
// Create new tables for ACP support
|
|
234
|
+
db.exec(`
|
|
235
|
+
CREATE TABLE IF NOT EXISTS thread_states (
|
|
236
|
+
id TEXT PRIMARY KEY,
|
|
237
|
+
thread_id TEXT NOT NULL,
|
|
238
|
+
checkpoint_id TEXT,
|
|
239
|
+
state_data TEXT NOT NULL,
|
|
240
|
+
created_at INTEGER NOT NULL,
|
|
241
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE,
|
|
242
|
+
FOREIGN KEY (checkpoint_id) REFERENCES checkpoints(id) ON DELETE SET NULL
|
|
243
|
+
)
|
|
244
|
+
`);
|
|
245
|
+
|
|
246
|
+
db.exec(`
|
|
247
|
+
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
248
|
+
id TEXT PRIMARY KEY,
|
|
249
|
+
thread_id TEXT NOT NULL,
|
|
250
|
+
checkpoint_name TEXT NOT NULL,
|
|
251
|
+
sequence INTEGER NOT NULL,
|
|
252
|
+
created_at INTEGER NOT NULL,
|
|
253
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
|
|
254
|
+
)
|
|
255
|
+
`);
|
|
256
|
+
|
|
257
|
+
db.exec(`
|
|
258
|
+
CREATE TABLE IF NOT EXISTS run_metadata (
|
|
259
|
+
id TEXT PRIMARY KEY,
|
|
260
|
+
run_id TEXT NOT NULL UNIQUE,
|
|
261
|
+
thread_id TEXT,
|
|
262
|
+
agent_id TEXT NOT NULL,
|
|
263
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
264
|
+
input TEXT,
|
|
265
|
+
config TEXT,
|
|
266
|
+
webhook_url TEXT,
|
|
267
|
+
created_at INTEGER NOT NULL,
|
|
268
|
+
updated_at INTEGER NOT NULL,
|
|
269
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
|
|
270
|
+
)
|
|
271
|
+
`);
|
|
272
|
+
|
|
273
|
+
// Add new columns to existing tables
|
|
274
|
+
const convCols = db.prepare("PRAGMA table_info(conversations)").all();
|
|
275
|
+
const convColNames = convCols.map(c => c.name);
|
|
276
|
+
|
|
277
|
+
if (!convColNames.includes('metadata')) {
|
|
278
|
+
db.exec('ALTER TABLE conversations ADD COLUMN metadata TEXT');
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
const sessCols = db.prepare("PRAGMA table_info(sessions)").all();
|
|
282
|
+
const sessColNames = sessCols.map(c => c.name);
|
|
283
|
+
|
|
284
|
+
const sessionCols = {
|
|
285
|
+
run_id: 'TEXT',
|
|
286
|
+
input: 'TEXT',
|
|
287
|
+
config: 'TEXT',
|
|
288
|
+
interrupt: 'TEXT'
|
|
289
|
+
};
|
|
290
|
+
|
|
291
|
+
for (const [colName, colType] of Object.entries(sessionCols)) {
|
|
292
|
+
if (!sessColNames.includes(colName)) {
|
|
293
|
+
db.exec(`ALTER TABLE sessions ADD COLUMN ${colName} ${colType}`);
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// Create indexes
|
|
298
|
+
db.exec(`
|
|
299
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_thread ON thread_states(thread_id);
|
|
300
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_checkpoint ON thread_states(checkpoint_id);
|
|
301
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_created ON thread_states(created_at);
|
|
302
|
+
|
|
303
|
+
CREATE INDEX IF NOT EXISTS idx_checkpoints_thread ON checkpoints(thread_id);
|
|
304
|
+
CREATE INDEX IF NOT EXISTS idx_checkpoints_sequence ON checkpoints(thread_id, sequence);
|
|
305
|
+
CREATE UNIQUE INDEX IF NOT EXISTS idx_checkpoints_unique_seq ON checkpoints(thread_id, sequence);
|
|
306
|
+
|
|
307
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_run_id ON run_metadata(run_id);
|
|
308
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_thread ON run_metadata(thread_id);
|
|
309
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_status ON run_metadata(status);
|
|
310
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_agent ON run_metadata(agent_id);
|
|
311
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_created ON run_metadata(created_at);
|
|
312
|
+
|
|
313
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_run_id ON sessions(run_id);
|
|
314
|
+
`);
|
|
315
|
+
});
|
|
316
|
+
|
|
317
|
+
migrate();
|
|
318
|
+
} catch (err) {
|
|
319
|
+
console.error('[Migration] ACP schema migration error:', err.message);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
|
|
229
323
|
initSchema();
|
|
230
324
|
migrateFromJson();
|
|
325
|
+
migrateToACP();
|
|
231
326
|
|
|
232
327
|
// Migration: Add imported conversation columns if they don't exist
|
|
233
328
|
try {
|
|
@@ -272,25 +367,90 @@ try {
|
|
|
272
367
|
console.error('[Migration] Error:', err.message);
|
|
273
368
|
}
|
|
274
369
|
|
|
275
|
-
// Migration: Add resume capability columns
|
|
370
|
+
// Migration: Add resume capability columns (disabled - incomplete migration)
|
|
371
|
+
// This migration block was incomplete and has been removed
|
|
372
|
+
|
|
373
|
+
// ============ ACP SCHEMA MIGRATION ============
|
|
276
374
|
try {
|
|
277
|
-
|
|
278
|
-
const columnNames = result.map(r => r.name);
|
|
279
|
-
const resumeColumns = {
|
|
280
|
-
attempts: 'INTEGER DEFAULT 0',
|
|
281
|
-
lastAttempt: 'INTEGER',
|
|
282
|
-
currentSize: 'INTEGER DEFAULT 0',
|
|
283
|
-
hash: 'TEXT'
|
|
284
|
-
};
|
|
375
|
+
console.log('[Migration] Running ACP schema migration...');
|
|
285
376
|
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
377
|
+
// Add metadata column to conversations if not exists
|
|
378
|
+
const convColsACP = db.prepare("PRAGMA table_info(conversations)").all().map(c => c.name);
|
|
379
|
+
if (!convColsACP.includes('metadata')) {
|
|
380
|
+
db.exec('ALTER TABLE conversations ADD COLUMN metadata TEXT DEFAULT "{}"');
|
|
381
|
+
console.log('[Migration] Added metadata column to conversations');
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
// Add run_id, input, config, interrupt to sessions if not exists
|
|
385
|
+
const sessColsACP = db.prepare("PRAGMA table_info(sessions)").all().map(c => c.name);
|
|
386
|
+
if (!sessColsACP.includes('run_id')) {
|
|
387
|
+
db.exec('ALTER TABLE sessions ADD COLUMN run_id TEXT');
|
|
388
|
+
console.log('[Migration] Added run_id column to sessions');
|
|
389
|
+
}
|
|
390
|
+
if (!sessColsACP.includes('input')) {
|
|
391
|
+
db.exec('ALTER TABLE sessions ADD COLUMN input TEXT');
|
|
392
|
+
console.log('[Migration] Added input column to sessions');
|
|
291
393
|
}
|
|
394
|
+
if (!sessColsACP.includes('config')) {
|
|
395
|
+
db.exec('ALTER TABLE sessions ADD COLUMN config TEXT');
|
|
396
|
+
console.log('[Migration] Added config column to sessions');
|
|
397
|
+
}
|
|
398
|
+
if (!sessColsACP.includes('interrupt')) {
|
|
399
|
+
db.exec('ALTER TABLE sessions ADD COLUMN interrupt TEXT');
|
|
400
|
+
console.log('[Migration] Added interrupt column to sessions');
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// Create ACP tables
|
|
404
|
+
db.exec(`
|
|
405
|
+
CREATE TABLE IF NOT EXISTS thread_states (
|
|
406
|
+
id TEXT PRIMARY KEY,
|
|
407
|
+
thread_id TEXT NOT NULL,
|
|
408
|
+
checkpoint_id TEXT NOT NULL,
|
|
409
|
+
state_data TEXT NOT NULL,
|
|
410
|
+
created_at INTEGER NOT NULL,
|
|
411
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
|
|
412
|
+
);
|
|
413
|
+
|
|
414
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_thread ON thread_states(thread_id);
|
|
415
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_checkpoint ON thread_states(checkpoint_id);
|
|
416
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_created ON thread_states(created_at);
|
|
417
|
+
|
|
418
|
+
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
419
|
+
id TEXT PRIMARY KEY,
|
|
420
|
+
thread_id TEXT NOT NULL,
|
|
421
|
+
checkpoint_name TEXT,
|
|
422
|
+
sequence INTEGER NOT NULL,
|
|
423
|
+
created_at INTEGER NOT NULL,
|
|
424
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
|
|
425
|
+
);
|
|
426
|
+
|
|
427
|
+
CREATE INDEX IF NOT EXISTS idx_checkpoints_thread ON checkpoints(thread_id);
|
|
428
|
+
CREATE INDEX IF NOT EXISTS idx_checkpoints_sequence ON checkpoints(thread_id, sequence);
|
|
429
|
+
CREATE UNIQUE INDEX IF NOT EXISTS idx_checkpoints_unique ON checkpoints(thread_id, sequence);
|
|
430
|
+
|
|
431
|
+
CREATE TABLE IF NOT EXISTS run_metadata (
|
|
432
|
+
run_id TEXT PRIMARY KEY,
|
|
433
|
+
thread_id TEXT,
|
|
434
|
+
agent_id TEXT NOT NULL,
|
|
435
|
+
status TEXT NOT NULL,
|
|
436
|
+
input TEXT,
|
|
437
|
+
config TEXT,
|
|
438
|
+
webhook_url TEXT,
|
|
439
|
+
created_at INTEGER NOT NULL,
|
|
440
|
+
updated_at INTEGER NOT NULL,
|
|
441
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE,
|
|
442
|
+
FOREIGN KEY (run_id) REFERENCES sessions(id) ON DELETE CASCADE
|
|
443
|
+
);
|
|
444
|
+
|
|
445
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_thread ON run_metadata(thread_id);
|
|
446
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_agent ON run_metadata(agent_id);
|
|
447
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_status ON run_metadata(status);
|
|
448
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_created ON run_metadata(created_at);
|
|
449
|
+
`);
|
|
450
|
+
|
|
451
|
+
console.log('[Migration] ACP schema migration complete');
|
|
292
452
|
} catch (err) {
|
|
293
|
-
console.error('[Migration]
|
|
453
|
+
console.error('[Migration] ACP schema migration error:', err.message);
|
|
294
454
|
}
|
|
295
455
|
|
|
296
456
|
// Migration: Backfill messages for conversations imported without message content
|
|
@@ -1300,7 +1460,10 @@ export const queries = {
|
|
|
1300
1460
|
markDownloadPaused(downloadId, errorMessage) {
|
|
1301
1461
|
const stmt = prep('UPDATE SET status = ?, error_message = ?, lastAttempt = ? WHERE id = ?');
|
|
1302
1462
|
stmt.run('paused', errorMessage, Date.now(), downloadId);
|
|
1303
|
-
}
|
|
1463
|
+
},
|
|
1464
|
+
|
|
1465
|
+
// ============ ACP-COMPATIBLE QUERIES ============
|
|
1466
|
+
...createACPQueries(db, prep)
|
|
1304
1467
|
};
|
|
1305
1468
|
|
|
1306
1469
|
export default { queries };
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
const agentDescriptorCache = new Map();
|
|
2
|
+
|
|
3
|
+
function generateClaudeCodeDescriptor(agent) {
|
|
4
|
+
return {
|
|
5
|
+
metadata: {
|
|
6
|
+
ref: {
|
|
7
|
+
name: agent.name,
|
|
8
|
+
version: '1.0.0',
|
|
9
|
+
url: agent.path
|
|
10
|
+
},
|
|
11
|
+
description: 'Claude Code is an AI coding agent that can read, write, and execute code with streaming output support. It provides comprehensive code editing, file management, and terminal execution capabilities.'
|
|
12
|
+
},
|
|
13
|
+
specs: {
|
|
14
|
+
capabilities: {
|
|
15
|
+
threads: true,
|
|
16
|
+
interrupts: false,
|
|
17
|
+
callbacks: false,
|
|
18
|
+
streaming: {
|
|
19
|
+
values: false,
|
|
20
|
+
custom: true
|
|
21
|
+
}
|
|
22
|
+
},
|
|
23
|
+
input: {
|
|
24
|
+
type: 'object',
|
|
25
|
+
properties: {
|
|
26
|
+
content: {
|
|
27
|
+
type: 'string',
|
|
28
|
+
description: 'The user prompt or instruction to send to the agent'
|
|
29
|
+
},
|
|
30
|
+
model: {
|
|
31
|
+
type: 'string',
|
|
32
|
+
description: 'Optional model identifier to use for this run'
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
required: ['content']
|
|
36
|
+
},
|
|
37
|
+
output: {
|
|
38
|
+
type: 'object',
|
|
39
|
+
properties: {
|
|
40
|
+
result: {
|
|
41
|
+
type: 'string',
|
|
42
|
+
description: 'The final response or result from the agent'
|
|
43
|
+
},
|
|
44
|
+
events: {
|
|
45
|
+
type: 'array',
|
|
46
|
+
description: 'Stream of execution events (tool calls, outputs, etc.)',
|
|
47
|
+
items: { type: 'object' }
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
},
|
|
51
|
+
custom_streaming_update: {
|
|
52
|
+
type: 'object',
|
|
53
|
+
properties: {
|
|
54
|
+
type: {
|
|
55
|
+
type: 'string',
|
|
56
|
+
enum: ['text', 'tool_use', 'tool_result', 'error']
|
|
57
|
+
},
|
|
58
|
+
data: { type: 'object' }
|
|
59
|
+
}
|
|
60
|
+
},
|
|
61
|
+
thread_state: {
|
|
62
|
+
type: 'object',
|
|
63
|
+
description: 'Conversation history with messages and session state',
|
|
64
|
+
properties: {
|
|
65
|
+
messages: {
|
|
66
|
+
type: 'array',
|
|
67
|
+
items: { type: 'object' }
|
|
68
|
+
},
|
|
69
|
+
sessionId: { type: 'string' }
|
|
70
|
+
}
|
|
71
|
+
},
|
|
72
|
+
config: {
|
|
73
|
+
type: 'object',
|
|
74
|
+
properties: {
|
|
75
|
+
workingDirectory: {
|
|
76
|
+
type: 'string',
|
|
77
|
+
description: 'Working directory for file operations'
|
|
78
|
+
},
|
|
79
|
+
model: {
|
|
80
|
+
type: 'string',
|
|
81
|
+
description: 'Default model to use'
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
function generateGeminiDescriptor(agent) {
|
|
90
|
+
return {
|
|
91
|
+
metadata: {
|
|
92
|
+
ref: {
|
|
93
|
+
name: agent.name,
|
|
94
|
+
version: '1.0.0',
|
|
95
|
+
url: agent.path
|
|
96
|
+
},
|
|
97
|
+
description: 'Gemini CLI is Google AI coding agent with streaming support, code execution, and file management capabilities.'
|
|
98
|
+
},
|
|
99
|
+
specs: {
|
|
100
|
+
capabilities: {
|
|
101
|
+
threads: true,
|
|
102
|
+
interrupts: false,
|
|
103
|
+
callbacks: false,
|
|
104
|
+
streaming: {
|
|
105
|
+
values: false,
|
|
106
|
+
custom: true
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
input: {
|
|
110
|
+
type: 'object',
|
|
111
|
+
properties: {
|
|
112
|
+
content: {
|
|
113
|
+
type: 'string',
|
|
114
|
+
description: 'The user prompt or instruction to send to the agent'
|
|
115
|
+
},
|
|
116
|
+
model: {
|
|
117
|
+
type: 'string',
|
|
118
|
+
description: 'Optional model identifier to use for this run'
|
|
119
|
+
}
|
|
120
|
+
},
|
|
121
|
+
required: ['content']
|
|
122
|
+
},
|
|
123
|
+
output: {
|
|
124
|
+
type: 'object',
|
|
125
|
+
properties: {
|
|
126
|
+
result: {
|
|
127
|
+
type: 'string',
|
|
128
|
+
description: 'The final response or result from the agent'
|
|
129
|
+
},
|
|
130
|
+
events: {
|
|
131
|
+
type: 'array',
|
|
132
|
+
description: 'Stream of execution events',
|
|
133
|
+
items: { type: 'object' }
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
custom_streaming_update: {
|
|
138
|
+
type: 'object',
|
|
139
|
+
properties: {
|
|
140
|
+
type: { type: 'string' },
|
|
141
|
+
data: { type: 'object' }
|
|
142
|
+
}
|
|
143
|
+
},
|
|
144
|
+
thread_state: {
|
|
145
|
+
type: 'object',
|
|
146
|
+
description: 'Conversation history and session state',
|
|
147
|
+
properties: {
|
|
148
|
+
messages: {
|
|
149
|
+
type: 'array',
|
|
150
|
+
items: { type: 'object' }
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
},
|
|
154
|
+
config: {
|
|
155
|
+
type: 'object',
|
|
156
|
+
properties: {
|
|
157
|
+
workingDirectory: {
|
|
158
|
+
type: 'string',
|
|
159
|
+
description: 'Working directory for file operations'
|
|
160
|
+
},
|
|
161
|
+
model: {
|
|
162
|
+
type: 'string',
|
|
163
|
+
description: 'Model identifier'
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
function generateOpenCodeDescriptor(agent) {
|
|
172
|
+
return {
|
|
173
|
+
metadata: {
|
|
174
|
+
ref: {
|
|
175
|
+
name: agent.name,
|
|
176
|
+
version: '1.0.0',
|
|
177
|
+
url: agent.path
|
|
178
|
+
},
|
|
179
|
+
description: 'OpenCode is a multi-provider AI coding agent with streaming support and comprehensive code manipulation capabilities.'
|
|
180
|
+
},
|
|
181
|
+
specs: {
|
|
182
|
+
capabilities: {
|
|
183
|
+
threads: true,
|
|
184
|
+
interrupts: false,
|
|
185
|
+
callbacks: false,
|
|
186
|
+
streaming: {
|
|
187
|
+
values: false,
|
|
188
|
+
custom: true
|
|
189
|
+
}
|
|
190
|
+
},
|
|
191
|
+
input: {
|
|
192
|
+
type: 'object',
|
|
193
|
+
properties: {
|
|
194
|
+
content: {
|
|
195
|
+
type: 'string',
|
|
196
|
+
description: 'The user prompt or instruction'
|
|
197
|
+
},
|
|
198
|
+
model: {
|
|
199
|
+
type: 'string',
|
|
200
|
+
description: 'Model identifier'
|
|
201
|
+
}
|
|
202
|
+
},
|
|
203
|
+
required: ['content']
|
|
204
|
+
},
|
|
205
|
+
output: {
|
|
206
|
+
type: 'object',
|
|
207
|
+
properties: {
|
|
208
|
+
result: { type: 'string' },
|
|
209
|
+
events: {
|
|
210
|
+
type: 'array',
|
|
211
|
+
items: { type: 'object' }
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
},
|
|
215
|
+
custom_streaming_update: {
|
|
216
|
+
type: 'object',
|
|
217
|
+
properties: {
|
|
218
|
+
type: { type: 'string' },
|
|
219
|
+
data: { type: 'object' }
|
|
220
|
+
}
|
|
221
|
+
},
|
|
222
|
+
thread_state: {
|
|
223
|
+
type: 'object',
|
|
224
|
+
properties: {
|
|
225
|
+
messages: {
|
|
226
|
+
type: 'array',
|
|
227
|
+
items: { type: 'object' }
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
},
|
|
231
|
+
config: {
|
|
232
|
+
type: 'object',
|
|
233
|
+
properties: {
|
|
234
|
+
workingDirectory: { type: 'string' },
|
|
235
|
+
model: { type: 'string' }
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
};
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
function generateGenericDescriptor(agent) {
|
|
243
|
+
return {
|
|
244
|
+
metadata: {
|
|
245
|
+
ref: {
|
|
246
|
+
name: agent.name,
|
|
247
|
+
version: '1.0.0',
|
|
248
|
+
url: agent.path
|
|
249
|
+
},
|
|
250
|
+
description: `${agent.name} is an AI coding agent with basic streaming and execution capabilities.`
|
|
251
|
+
},
|
|
252
|
+
specs: {
|
|
253
|
+
capabilities: {
|
|
254
|
+
threads: true,
|
|
255
|
+
interrupts: false,
|
|
256
|
+
callbacks: false,
|
|
257
|
+
streaming: {
|
|
258
|
+
values: false,
|
|
259
|
+
custom: true
|
|
260
|
+
}
|
|
261
|
+
},
|
|
262
|
+
input: {
|
|
263
|
+
type: 'object',
|
|
264
|
+
properties: {
|
|
265
|
+
content: {
|
|
266
|
+
type: 'string',
|
|
267
|
+
description: 'User prompt or instruction'
|
|
268
|
+
}
|
|
269
|
+
},
|
|
270
|
+
required: ['content']
|
|
271
|
+
},
|
|
272
|
+
output: {
|
|
273
|
+
type: 'object',
|
|
274
|
+
properties: {
|
|
275
|
+
result: { type: 'string' }
|
|
276
|
+
}
|
|
277
|
+
},
|
|
278
|
+
custom_streaming_update: {
|
|
279
|
+
type: 'object',
|
|
280
|
+
properties: {
|
|
281
|
+
type: { type: 'string' },
|
|
282
|
+
data: { type: 'object' }
|
|
283
|
+
}
|
|
284
|
+
},
|
|
285
|
+
thread_state: {
|
|
286
|
+
type: 'object',
|
|
287
|
+
properties: {
|
|
288
|
+
messages: {
|
|
289
|
+
type: 'array',
|
|
290
|
+
items: { type: 'object' }
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
},
|
|
294
|
+
config: {
|
|
295
|
+
type: 'object',
|
|
296
|
+
properties: {
|
|
297
|
+
workingDirectory: { type: 'string' }
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
function generateAgentDescriptor(agent) {
|
|
305
|
+
switch (agent.id) {
|
|
306
|
+
case 'claude-code':
|
|
307
|
+
return generateClaudeCodeDescriptor(agent);
|
|
308
|
+
case 'gemini':
|
|
309
|
+
return generateGeminiDescriptor(agent);
|
|
310
|
+
case 'opencode':
|
|
311
|
+
return generateOpenCodeDescriptor(agent);
|
|
312
|
+
default:
|
|
313
|
+
return generateGenericDescriptor(agent);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
export function initializeDescriptors(agents) {
|
|
318
|
+
agentDescriptorCache.clear();
|
|
319
|
+
for (const agent of agents) {
|
|
320
|
+
const descriptor = generateAgentDescriptor(agent);
|
|
321
|
+
agentDescriptorCache.set(agent.id, descriptor);
|
|
322
|
+
}
|
|
323
|
+
return agentDescriptorCache.size;
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
export function getAgentDescriptor(agentId) {
|
|
327
|
+
return agentDescriptorCache.get(agentId) || null;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
export function getAllDescriptors() {
|
|
331
|
+
return Object.fromEntries(agentDescriptorCache);
|
|
332
|
+
}
|
package/package.json
CHANGED
package/server.js
CHANGED
|
@@ -14,6 +14,7 @@ import Busboy from 'busboy';
|
|
|
14
14
|
import fsbrowse from 'fsbrowse';
|
|
15
15
|
import { queries } from './database.js';
|
|
16
16
|
import { runClaudeWithStreaming } from './lib/claude-runner.js';
|
|
17
|
+
import { initializeDescriptors, getAgentDescriptor } from './lib/agent-descriptors.js';
|
|
17
18
|
|
|
18
19
|
const ttsTextAccumulators = new Map();
|
|
19
20
|
|
|
@@ -338,6 +339,7 @@ function discoverAgents() {
|
|
|
338
339
|
}
|
|
339
340
|
|
|
340
341
|
const discoveredAgents = discoverAgents();
|
|
342
|
+
initializeDescriptors(discoveredAgents);
|
|
341
343
|
|
|
342
344
|
const modelCache = new Map();
|
|
343
345
|
|
|
@@ -1884,32 +1886,14 @@ const server = http.createServer(async (req, res) => {
|
|
|
1884
1886
|
const agentDescriptorMatch = pathOnly.match(/^\/api\/agents\/([^/]+)\/descriptor$/);
|
|
1885
1887
|
if (agentDescriptorMatch && req.method === 'GET') {
|
|
1886
1888
|
const agentId = agentDescriptorMatch[1];
|
|
1887
|
-
const
|
|
1888
|
-
|
|
1889
|
-
if (!
|
|
1889
|
+
const descriptor = getAgentDescriptor(agentId);
|
|
1890
|
+
|
|
1891
|
+
if (!descriptor) {
|
|
1890
1892
|
sendJSON(req, res, 404, { error: 'Agent not found' });
|
|
1891
1893
|
return;
|
|
1892
1894
|
}
|
|
1893
1895
|
|
|
1894
|
-
sendJSON(req, res, 200,
|
|
1895
|
-
agentId: agent.id,
|
|
1896
|
-
agentName: agent.name,
|
|
1897
|
-
protocol: agent.protocol || 'direct',
|
|
1898
|
-
capabilities: {
|
|
1899
|
-
streaming: true,
|
|
1900
|
-
cancel: true,
|
|
1901
|
-
resume: agent.protocol === 'direct',
|
|
1902
|
-
stateful: true
|
|
1903
|
-
},
|
|
1904
|
-
inputSchema: {
|
|
1905
|
-
type: 'object',
|
|
1906
|
-
properties: {
|
|
1907
|
-
content: { type: 'string', description: 'The prompt to send to the agent' }
|
|
1908
|
-
},
|
|
1909
|
-
required: ['content']
|
|
1910
|
-
},
|
|
1911
|
-
stateFormat: 'opaque'
|
|
1912
|
-
});
|
|
1896
|
+
sendJSON(req, res, 200, descriptor);
|
|
1913
1897
|
return;
|
|
1914
1898
|
}
|
|
1915
1899
|
|