agentgui 1.0.380 → 1.0.381
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/acp-queries.js +550 -0
- package/database.js +179 -16
- package/lib/agent-descriptors.js +332 -0
- package/package.json +1 -1
- package/server.js +6 -22
package/acp-queries.js
ADDED
|
@@ -0,0 +1,550 @@
|
|
|
1
|
+
// ACP-Compatible Data Layer
|
|
2
|
+
// Provides query functions that return ACP v0.2.3 compatible data structures
|
|
3
|
+
|
|
4
|
+
import { randomUUID } from 'crypto';
|
|
5
|
+
|
|
6
|
+
// Helper to generate IDs
|
|
7
|
+
function generateId(prefix) {
|
|
8
|
+
return `${prefix}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
// Helper to generate UUID
|
|
12
|
+
function generateUUID() {
|
|
13
|
+
return randomUUID();
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
// Helper to convert timestamp to ISO date string
|
|
17
|
+
function toISOString(timestamp) {
|
|
18
|
+
return new Date(timestamp).toISOString();
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export function createACPQueries(db, prep) {
|
|
22
|
+
return {
|
|
23
|
+
// ============ THREAD CRUD ============
|
|
24
|
+
|
|
25
|
+
createThread(metadata = {}) {
|
|
26
|
+
const threadId = generateUUID();
|
|
27
|
+
const now = Date.now();
|
|
28
|
+
|
|
29
|
+
const stmt = prep(
|
|
30
|
+
`INSERT INTO conversations (id, agentId, title, created_at, updated_at, status, metadata)
|
|
31
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`
|
|
32
|
+
);
|
|
33
|
+
stmt.run(threadId, 'unknown', null, now, now, 'idle', JSON.stringify(metadata));
|
|
34
|
+
|
|
35
|
+
return {
|
|
36
|
+
thread_id: threadId,
|
|
37
|
+
created_at: toISOString(now),
|
|
38
|
+
updated_at: toISOString(now),
|
|
39
|
+
metadata,
|
|
40
|
+
status: 'idle'
|
|
41
|
+
};
|
|
42
|
+
},
|
|
43
|
+
|
|
44
|
+
getThread(threadId) {
|
|
45
|
+
const stmt = prep('SELECT * FROM conversations WHERE id = ?');
|
|
46
|
+
const row = stmt.get(threadId);
|
|
47
|
+
|
|
48
|
+
if (!row) return null;
|
|
49
|
+
|
|
50
|
+
let metadata = {};
|
|
51
|
+
if (row.metadata) {
|
|
52
|
+
try {
|
|
53
|
+
metadata = JSON.parse(row.metadata);
|
|
54
|
+
} catch (e) {}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
return {
|
|
58
|
+
thread_id: row.id,
|
|
59
|
+
created_at: toISOString(row.created_at),
|
|
60
|
+
updated_at: toISOString(row.updated_at),
|
|
61
|
+
metadata,
|
|
62
|
+
status: row.status || 'idle'
|
|
63
|
+
};
|
|
64
|
+
},
|
|
65
|
+
|
|
66
|
+
patchThread(threadId, updates) {
|
|
67
|
+
const thread = this.getThread(threadId);
|
|
68
|
+
if (!thread) {
|
|
69
|
+
throw new Error('Thread not found');
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const now = Date.now();
|
|
73
|
+
const newMetadata = updates.metadata !== undefined ? updates.metadata : thread.metadata;
|
|
74
|
+
const newStatus = updates.status !== undefined ? updates.status : thread.status;
|
|
75
|
+
|
|
76
|
+
const stmt = prep(
|
|
77
|
+
`UPDATE conversations SET metadata = ?, status = ?, updated_at = ? WHERE id = ?`
|
|
78
|
+
);
|
|
79
|
+
stmt.run(JSON.stringify(newMetadata), newStatus, now, threadId);
|
|
80
|
+
|
|
81
|
+
return {
|
|
82
|
+
thread_id: threadId,
|
|
83
|
+
created_at: thread.created_at,
|
|
84
|
+
updated_at: toISOString(now),
|
|
85
|
+
metadata: newMetadata,
|
|
86
|
+
status: newStatus
|
|
87
|
+
};
|
|
88
|
+
},
|
|
89
|
+
|
|
90
|
+
deleteThread(threadId) {
|
|
91
|
+
// Check for pending runs
|
|
92
|
+
const pendingRuns = prep(
|
|
93
|
+
`SELECT COUNT(*) as count FROM run_metadata WHERE thread_id = ? AND status = 'pending'`
|
|
94
|
+
).get(threadId);
|
|
95
|
+
|
|
96
|
+
if (pendingRuns && pendingRuns.count > 0) {
|
|
97
|
+
throw new Error('Cannot delete thread with pending runs');
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const deleteStmt = db.transaction(() => {
|
|
101
|
+
prep('DELETE FROM thread_states WHERE thread_id = ?').run(threadId);
|
|
102
|
+
prep('DELETE FROM checkpoints WHERE thread_id = ?').run(threadId);
|
|
103
|
+
prep('DELETE FROM run_metadata WHERE thread_id = ?').run(threadId);
|
|
104
|
+
prep('DELETE FROM sessions WHERE conversationId = ?').run(threadId);
|
|
105
|
+
prep('DELETE FROM messages WHERE conversationId = ?').run(threadId);
|
|
106
|
+
prep('DELETE FROM chunks WHERE conversationId = ?').run(threadId);
|
|
107
|
+
prep('DELETE FROM events WHERE conversationId = ?').run(threadId);
|
|
108
|
+
prep('DELETE FROM conversations WHERE id = ?').run(threadId);
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
deleteStmt();
|
|
112
|
+
return true;
|
|
113
|
+
},
|
|
114
|
+
|
|
115
|
+
// ============ THREAD STATE MANAGEMENT ============
|
|
116
|
+
|
|
117
|
+
saveThreadState(threadId, checkpointId, stateData) {
|
|
118
|
+
const id = generateId('state');
|
|
119
|
+
const now = Date.now();
|
|
120
|
+
|
|
121
|
+
const stmt = prep(
|
|
122
|
+
`INSERT INTO thread_states (id, thread_id, checkpoint_id, state_data, created_at)
|
|
123
|
+
VALUES (?, ?, ?, ?, ?)`
|
|
124
|
+
);
|
|
125
|
+
stmt.run(id, threadId, checkpointId, JSON.stringify(stateData), now);
|
|
126
|
+
|
|
127
|
+
return {
|
|
128
|
+
id,
|
|
129
|
+
thread_id: threadId,
|
|
130
|
+
checkpoint_id: checkpointId,
|
|
131
|
+
created_at: toISOString(now)
|
|
132
|
+
};
|
|
133
|
+
},
|
|
134
|
+
|
|
135
|
+
getThreadState(threadId, checkpointId = null) {
|
|
136
|
+
let stmt, row;
|
|
137
|
+
|
|
138
|
+
if (checkpointId) {
|
|
139
|
+
stmt = prep(
|
|
140
|
+
`SELECT * FROM thread_states WHERE thread_id = ? AND checkpoint_id = ? ORDER BY created_at DESC LIMIT 1`
|
|
141
|
+
);
|
|
142
|
+
row = stmt.get(threadId, checkpointId);
|
|
143
|
+
} else {
|
|
144
|
+
stmt = prep(
|
|
145
|
+
`SELECT * FROM thread_states WHERE thread_id = ? ORDER BY created_at DESC LIMIT 1`
|
|
146
|
+
);
|
|
147
|
+
row = stmt.get(threadId);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
if (!row) return null;
|
|
151
|
+
|
|
152
|
+
let stateData = {};
|
|
153
|
+
try {
|
|
154
|
+
stateData = JSON.parse(row.state_data);
|
|
155
|
+
} catch (e) {}
|
|
156
|
+
|
|
157
|
+
return {
|
|
158
|
+
checkpoint: { checkpoint_id: row.checkpoint_id },
|
|
159
|
+
values: stateData.values || {},
|
|
160
|
+
messages: stateData.messages || [],
|
|
161
|
+
metadata: stateData.metadata || {}
|
|
162
|
+
};
|
|
163
|
+
},
|
|
164
|
+
|
|
165
|
+
getThreadHistory(threadId, limit = 50, offset = 0) {
|
|
166
|
+
const countStmt = prep('SELECT COUNT(*) as count FROM thread_states WHERE thread_id = ?');
|
|
167
|
+
const total = countStmt.get(threadId).count;
|
|
168
|
+
|
|
169
|
+
const stmt = prep(
|
|
170
|
+
`SELECT * FROM thread_states WHERE thread_id = ? ORDER BY created_at DESC LIMIT ? OFFSET ?`
|
|
171
|
+
);
|
|
172
|
+
const rows = stmt.all(threadId, limit, offset);
|
|
173
|
+
|
|
174
|
+
const states = rows.map(row => {
|
|
175
|
+
let stateData = {};
|
|
176
|
+
try {
|
|
177
|
+
stateData = JSON.parse(row.state_data);
|
|
178
|
+
} catch (e) {}
|
|
179
|
+
|
|
180
|
+
return {
|
|
181
|
+
checkpoint: { checkpoint_id: row.checkpoint_id },
|
|
182
|
+
values: stateData.values || {},
|
|
183
|
+
messages: stateData.messages || [],
|
|
184
|
+
metadata: stateData.metadata || {}
|
|
185
|
+
};
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
return {
|
|
189
|
+
states,
|
|
190
|
+
total,
|
|
191
|
+
limit,
|
|
192
|
+
offset,
|
|
193
|
+
hasMore: offset + limit < total
|
|
194
|
+
};
|
|
195
|
+
},
|
|
196
|
+
|
|
197
|
+
copyThread(sourceThreadId) {
|
|
198
|
+
const sourceThread = this.getThread(sourceThreadId);
|
|
199
|
+
if (!sourceThread) {
|
|
200
|
+
throw new Error('Source thread not found');
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const newThreadId = generateUUID();
|
|
204
|
+
const now = Date.now();
|
|
205
|
+
|
|
206
|
+
const copyStmt = db.transaction(() => {
|
|
207
|
+
// Copy thread
|
|
208
|
+
prep(
|
|
209
|
+
`INSERT INTO conversations (id, agentId, title, created_at, updated_at, status, metadata, workingDirectory)
|
|
210
|
+
SELECT ?, agentId, title || ' (copy)', ?, ?, status, metadata, workingDirectory
|
|
211
|
+
FROM conversations WHERE id = ?`
|
|
212
|
+
).run(newThreadId, now, now, sourceThreadId);
|
|
213
|
+
|
|
214
|
+
// Copy checkpoints
|
|
215
|
+
const checkpoints = prep('SELECT * FROM checkpoints WHERE thread_id = ? ORDER BY sequence ASC').all(sourceThreadId);
|
|
216
|
+
for (const checkpoint of checkpoints) {
|
|
217
|
+
const newCheckpointId = generateUUID();
|
|
218
|
+
prep(
|
|
219
|
+
`INSERT INTO checkpoints (id, thread_id, checkpoint_name, sequence, created_at)
|
|
220
|
+
VALUES (?, ?, ?, ?, ?)`
|
|
221
|
+
).run(newCheckpointId, newThreadId, checkpoint.checkpoint_name, checkpoint.sequence, now);
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// Copy thread states
|
|
225
|
+
const states = prep('SELECT * FROM thread_states WHERE thread_id = ? ORDER BY created_at ASC').all(sourceThreadId);
|
|
226
|
+
for (const state of states) {
|
|
227
|
+
prep(
|
|
228
|
+
`INSERT INTO thread_states (id, thread_id, checkpoint_id, state_data, created_at)
|
|
229
|
+
VALUES (?, ?, ?, ?, ?)`
|
|
230
|
+
).run(generateId('state'), newThreadId, state.checkpoint_id, state.state_data, now);
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
// Copy messages
|
|
234
|
+
const messages = prep('SELECT * FROM messages WHERE conversationId = ? ORDER BY created_at ASC').all(sourceThreadId);
|
|
235
|
+
for (const msg of messages) {
|
|
236
|
+
prep(
|
|
237
|
+
`INSERT INTO messages (id, conversationId, role, content, created_at)
|
|
238
|
+
VALUES (?, ?, ?, ?, ?)`
|
|
239
|
+
).run(generateId('msg'), newThreadId, msg.role, msg.content, now);
|
|
240
|
+
}
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
copyStmt();
|
|
244
|
+
return this.getThread(newThreadId);
|
|
245
|
+
},
|
|
246
|
+
|
|
247
|
+
// ============ CHECKPOINT FUNCTIONS ============
|
|
248
|
+
|
|
249
|
+
createCheckpoint(threadId, checkpointName = null) {
|
|
250
|
+
const id = generateUUID();
|
|
251
|
+
const now = Date.now();
|
|
252
|
+
|
|
253
|
+
// Get next sequence number
|
|
254
|
+
const maxSeq = prep('SELECT MAX(sequence) as max FROM checkpoints WHERE thread_id = ?').get(threadId);
|
|
255
|
+
const sequence = (maxSeq?.max ?? -1) + 1;
|
|
256
|
+
|
|
257
|
+
const stmt = prep(
|
|
258
|
+
`INSERT INTO checkpoints (id, thread_id, checkpoint_name, sequence, created_at)
|
|
259
|
+
VALUES (?, ?, ?, ?, ?)`
|
|
260
|
+
);
|
|
261
|
+
stmt.run(id, threadId, checkpointName, sequence, now);
|
|
262
|
+
|
|
263
|
+
return {
|
|
264
|
+
checkpoint_id: id,
|
|
265
|
+
thread_id: threadId,
|
|
266
|
+
checkpoint_name: checkpointName,
|
|
267
|
+
sequence,
|
|
268
|
+
created_at: toISOString(now)
|
|
269
|
+
};
|
|
270
|
+
},
|
|
271
|
+
|
|
272
|
+
getCheckpoint(checkpointId) {
|
|
273
|
+
const stmt = prep('SELECT * FROM checkpoints WHERE id = ?');
|
|
274
|
+
const row = stmt.get(checkpointId);
|
|
275
|
+
|
|
276
|
+
if (!row) return null;
|
|
277
|
+
|
|
278
|
+
return {
|
|
279
|
+
checkpoint_id: row.id,
|
|
280
|
+
thread_id: row.thread_id,
|
|
281
|
+
checkpoint_name: row.checkpoint_name,
|
|
282
|
+
sequence: row.sequence,
|
|
283
|
+
created_at: toISOString(row.created_at)
|
|
284
|
+
};
|
|
285
|
+
},
|
|
286
|
+
|
|
287
|
+
listCheckpoints(threadId, limit = 50, offset = 0) {
|
|
288
|
+
const countStmt = prep('SELECT COUNT(*) as count FROM checkpoints WHERE thread_id = ?');
|
|
289
|
+
const total = countStmt.get(threadId).count;
|
|
290
|
+
|
|
291
|
+
const stmt = prep(
|
|
292
|
+
`SELECT * FROM checkpoints WHERE thread_id = ? ORDER BY sequence DESC LIMIT ? OFFSET ?`
|
|
293
|
+
);
|
|
294
|
+
const rows = stmt.all(threadId, limit, offset);
|
|
295
|
+
|
|
296
|
+
const checkpoints = rows.map(row => ({
|
|
297
|
+
checkpoint_id: row.id,
|
|
298
|
+
thread_id: row.thread_id,
|
|
299
|
+
checkpoint_name: row.checkpoint_name,
|
|
300
|
+
sequence: row.sequence,
|
|
301
|
+
created_at: toISOString(row.created_at)
|
|
302
|
+
}));
|
|
303
|
+
|
|
304
|
+
return {
|
|
305
|
+
checkpoints,
|
|
306
|
+
total,
|
|
307
|
+
limit,
|
|
308
|
+
offset,
|
|
309
|
+
hasMore: offset + limit < total
|
|
310
|
+
};
|
|
311
|
+
},
|
|
312
|
+
|
|
313
|
+
// ============ RUN MANAGEMENT ============
|
|
314
|
+
|
|
315
|
+
createRun(agentId, threadId = null, input = null, config = null, webhookUrl = null) {
|
|
316
|
+
const runId = generateUUID();
|
|
317
|
+
const now = Date.now();
|
|
318
|
+
|
|
319
|
+
// Create session first
|
|
320
|
+
const sessionStmt = prep(
|
|
321
|
+
`INSERT INTO sessions (id, conversationId, status, started_at, completed_at, response, error)
|
|
322
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`
|
|
323
|
+
);
|
|
324
|
+
sessionStmt.run(runId, threadId || 'stateless', 'pending', now, null, null, null);
|
|
325
|
+
|
|
326
|
+
// Create run metadata
|
|
327
|
+
const runStmt = prep(
|
|
328
|
+
`INSERT INTO run_metadata (run_id, thread_id, agent_id, status, input, config, webhook_url, created_at, updated_at)
|
|
329
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
|
330
|
+
);
|
|
331
|
+
runStmt.run(
|
|
332
|
+
runId,
|
|
333
|
+
threadId,
|
|
334
|
+
agentId,
|
|
335
|
+
'pending',
|
|
336
|
+
input ? JSON.stringify(input) : null,
|
|
337
|
+
config ? JSON.stringify(config) : null,
|
|
338
|
+
webhookUrl,
|
|
339
|
+
now,
|
|
340
|
+
now
|
|
341
|
+
);
|
|
342
|
+
|
|
343
|
+
return {
|
|
344
|
+
run_id: runId,
|
|
345
|
+
thread_id: threadId,
|
|
346
|
+
agent_id: agentId,
|
|
347
|
+
status: 'pending',
|
|
348
|
+
created_at: toISOString(now),
|
|
349
|
+
updated_at: toISOString(now)
|
|
350
|
+
};
|
|
351
|
+
},
|
|
352
|
+
|
|
353
|
+
getRun(runId) {
|
|
354
|
+
const stmt = prep('SELECT * FROM run_metadata WHERE run_id = ?');
|
|
355
|
+
const row = stmt.get(runId);
|
|
356
|
+
|
|
357
|
+
if (!row) return null;
|
|
358
|
+
|
|
359
|
+
return {
|
|
360
|
+
run_id: row.run_id,
|
|
361
|
+
thread_id: row.thread_id,
|
|
362
|
+
agent_id: row.agent_id,
|
|
363
|
+
status: row.status,
|
|
364
|
+
created_at: toISOString(row.created_at),
|
|
365
|
+
updated_at: toISOString(row.updated_at)
|
|
366
|
+
};
|
|
367
|
+
},
|
|
368
|
+
|
|
369
|
+
updateRunStatus(runId, status) {
|
|
370
|
+
const now = Date.now();
|
|
371
|
+
|
|
372
|
+
const stmt = prep(
|
|
373
|
+
`UPDATE run_metadata SET status = ?, updated_at = ? WHERE run_id = ?`
|
|
374
|
+
);
|
|
375
|
+
stmt.run(status, now, runId);
|
|
376
|
+
|
|
377
|
+
// Also update session
|
|
378
|
+
prep('UPDATE sessions SET status = ? WHERE id = ?').run(status, runId);
|
|
379
|
+
|
|
380
|
+
return this.getRun(runId);
|
|
381
|
+
},
|
|
382
|
+
|
|
383
|
+
cancelRun(runId) {
|
|
384
|
+
const run = this.getRun(runId);
|
|
385
|
+
if (!run) {
|
|
386
|
+
throw new Error('Run not found');
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
if (['success', 'error', 'cancelled'].includes(run.status)) {
|
|
390
|
+
throw new Error('Run already completed or cancelled');
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
return this.updateRunStatus(runId, 'cancelled');
|
|
394
|
+
},
|
|
395
|
+
|
|
396
|
+
deleteRun(runId) {
|
|
397
|
+
const deleteStmt = db.transaction(() => {
|
|
398
|
+
prep('DELETE FROM chunks WHERE sessionId = ?').run(runId);
|
|
399
|
+
prep('DELETE FROM events WHERE sessionId = ?').run(runId);
|
|
400
|
+
prep('DELETE FROM run_metadata WHERE run_id = ?').run(runId);
|
|
401
|
+
prep('DELETE FROM sessions WHERE id = ?').run(runId);
|
|
402
|
+
});
|
|
403
|
+
|
|
404
|
+
deleteStmt();
|
|
405
|
+
return true;
|
|
406
|
+
},
|
|
407
|
+
|
|
408
|
+
getThreadRuns(threadId, limit = 50, offset = 0) {
|
|
409
|
+
const countStmt = prep('SELECT COUNT(*) as count FROM run_metadata WHERE thread_id = ?');
|
|
410
|
+
const total = countStmt.get(threadId).count;
|
|
411
|
+
|
|
412
|
+
const stmt = prep(
|
|
413
|
+
`SELECT * FROM run_metadata WHERE thread_id = ? ORDER BY created_at DESC LIMIT ? OFFSET ?`
|
|
414
|
+
);
|
|
415
|
+
const rows = stmt.all(threadId, limit, offset);
|
|
416
|
+
|
|
417
|
+
const runs = rows.map(row => ({
|
|
418
|
+
run_id: row.run_id,
|
|
419
|
+
thread_id: row.thread_id,
|
|
420
|
+
agent_id: row.agent_id,
|
|
421
|
+
status: row.status,
|
|
422
|
+
created_at: toISOString(row.created_at),
|
|
423
|
+
updated_at: toISOString(row.updated_at)
|
|
424
|
+
}));
|
|
425
|
+
|
|
426
|
+
return {
|
|
427
|
+
runs,
|
|
428
|
+
total,
|
|
429
|
+
limit,
|
|
430
|
+
offset,
|
|
431
|
+
hasMore: offset + limit < total
|
|
432
|
+
};
|
|
433
|
+
},
|
|
434
|
+
|
|
435
|
+
// ============ SEARCH FUNCTIONS ============
|
|
436
|
+
|
|
437
|
+
searchThreads(filters = {}) {
|
|
438
|
+
const { metadata, status, dateRange, limit = 50, offset = 0 } = filters;
|
|
439
|
+
|
|
440
|
+
let whereClause = "status != 'deleted'";
|
|
441
|
+
const params = [];
|
|
442
|
+
|
|
443
|
+
if (status) {
|
|
444
|
+
whereClause += ' AND status = ?';
|
|
445
|
+
params.push(status);
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
if (dateRange?.start) {
|
|
449
|
+
whereClause += ' AND created_at >= ?';
|
|
450
|
+
params.push(new Date(dateRange.start).getTime());
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
if (dateRange?.end) {
|
|
454
|
+
whereClause += ' AND created_at <= ?';
|
|
455
|
+
params.push(new Date(dateRange.end).getTime());
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
if (metadata) {
|
|
459
|
+
// Simple metadata filter - check if JSON contains key-value pairs
|
|
460
|
+
for (const [key, value] of Object.entries(metadata)) {
|
|
461
|
+
whereClause += ` AND metadata LIKE ?`;
|
|
462
|
+
params.push(`%"${key}":"${value}"%`);
|
|
463
|
+
}
|
|
464
|
+
}
|
|
465
|
+
|
|
466
|
+
const countStmt = prep(`SELECT COUNT(*) as count FROM conversations WHERE ${whereClause}`);
|
|
467
|
+
const total = countStmt.get(...params).count;
|
|
468
|
+
|
|
469
|
+
const stmt = prep(
|
|
470
|
+
`SELECT * FROM conversations WHERE ${whereClause} ORDER BY updated_at DESC LIMIT ? OFFSET ?`
|
|
471
|
+
);
|
|
472
|
+
const rows = stmt.all(...params, limit, offset);
|
|
473
|
+
|
|
474
|
+
const threads = rows.map(row => {
|
|
475
|
+
let metadata = {};
|
|
476
|
+
if (row.metadata) {
|
|
477
|
+
try { metadata = JSON.parse(row.metadata); } catch (e) {}
|
|
478
|
+
}
|
|
479
|
+
return {
|
|
480
|
+
thread_id: row.id,
|
|
481
|
+
created_at: toISOString(row.created_at),
|
|
482
|
+
updated_at: toISOString(row.updated_at),
|
|
483
|
+
metadata,
|
|
484
|
+
status: row.status || 'idle'
|
|
485
|
+
};
|
|
486
|
+
});
|
|
487
|
+
|
|
488
|
+
return {
|
|
489
|
+
threads,
|
|
490
|
+
total,
|
|
491
|
+
limit,
|
|
492
|
+
offset,
|
|
493
|
+
hasMore: offset + limit < total
|
|
494
|
+
};
|
|
495
|
+
},
|
|
496
|
+
|
|
497
|
+
searchAgents(filters = {}) {
|
|
498
|
+
// This would integrate with the agent discovery system
|
|
499
|
+
// For now, return empty array as agents are discovered dynamically
|
|
500
|
+
return [];
|
|
501
|
+
},
|
|
502
|
+
|
|
503
|
+
searchRuns(filters = {}) {
|
|
504
|
+
const { agent_id, thread_id, status, limit = 50, offset = 0 } = filters;
|
|
505
|
+
|
|
506
|
+
let whereClause = '1=1';
|
|
507
|
+
const params = [];
|
|
508
|
+
|
|
509
|
+
if (agent_id) {
|
|
510
|
+
whereClause += ' AND agent_id = ?';
|
|
511
|
+
params.push(agent_id);
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
if (thread_id) {
|
|
515
|
+
whereClause += ' AND thread_id = ?';
|
|
516
|
+
params.push(thread_id);
|
|
517
|
+
}
|
|
518
|
+
|
|
519
|
+
if (status) {
|
|
520
|
+
whereClause += ' AND status = ?';
|
|
521
|
+
params.push(status);
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
const countStmt = prep(`SELECT COUNT(*) as count FROM run_metadata WHERE ${whereClause}`);
|
|
525
|
+
const total = countStmt.get(...params).count;
|
|
526
|
+
|
|
527
|
+
const stmt = prep(
|
|
528
|
+
`SELECT * FROM run_metadata WHERE ${whereClause} ORDER BY created_at DESC LIMIT ? OFFSET ?`
|
|
529
|
+
);
|
|
530
|
+
const rows = stmt.all(...params, limit, offset);
|
|
531
|
+
|
|
532
|
+
const runs = rows.map(row => ({
|
|
533
|
+
run_id: row.run_id,
|
|
534
|
+
thread_id: row.thread_id,
|
|
535
|
+
agent_id: row.agent_id,
|
|
536
|
+
status: row.status,
|
|
537
|
+
created_at: toISOString(row.created_at),
|
|
538
|
+
updated_at: toISOString(row.updated_at)
|
|
539
|
+
}));
|
|
540
|
+
|
|
541
|
+
return {
|
|
542
|
+
runs,
|
|
543
|
+
total,
|
|
544
|
+
limit,
|
|
545
|
+
offset,
|
|
546
|
+
hasMore: offset + limit < total
|
|
547
|
+
};
|
|
548
|
+
}
|
|
549
|
+
};
|
|
550
|
+
}
|
package/database.js
CHANGED
|
@@ -2,6 +2,7 @@ import fs from 'fs';
|
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import os from 'os';
|
|
4
4
|
import { createRequire } from 'module';
|
|
5
|
+
import { createACPQueries } from './acp-queries.js';
|
|
5
6
|
|
|
6
7
|
const require = createRequire(import.meta.url);
|
|
7
8
|
|
|
@@ -226,8 +227,102 @@ function migrateFromJson() {
|
|
|
226
227
|
}
|
|
227
228
|
}
|
|
228
229
|
|
|
230
|
+
function migrateToACP() {
|
|
231
|
+
try {
|
|
232
|
+
const migrate = db.transaction(() => {
|
|
233
|
+
// Create new tables for ACP support
|
|
234
|
+
db.exec(`
|
|
235
|
+
CREATE TABLE IF NOT EXISTS thread_states (
|
|
236
|
+
id TEXT PRIMARY KEY,
|
|
237
|
+
thread_id TEXT NOT NULL,
|
|
238
|
+
checkpoint_id TEXT,
|
|
239
|
+
state_data TEXT NOT NULL,
|
|
240
|
+
created_at INTEGER NOT NULL,
|
|
241
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE,
|
|
242
|
+
FOREIGN KEY (checkpoint_id) REFERENCES checkpoints(id) ON DELETE SET NULL
|
|
243
|
+
)
|
|
244
|
+
`);
|
|
245
|
+
|
|
246
|
+
db.exec(`
|
|
247
|
+
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
248
|
+
id TEXT PRIMARY KEY,
|
|
249
|
+
thread_id TEXT NOT NULL,
|
|
250
|
+
checkpoint_name TEXT NOT NULL,
|
|
251
|
+
sequence INTEGER NOT NULL,
|
|
252
|
+
created_at INTEGER NOT NULL,
|
|
253
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
|
|
254
|
+
)
|
|
255
|
+
`);
|
|
256
|
+
|
|
257
|
+
db.exec(`
|
|
258
|
+
CREATE TABLE IF NOT EXISTS run_metadata (
|
|
259
|
+
id TEXT PRIMARY KEY,
|
|
260
|
+
run_id TEXT NOT NULL UNIQUE,
|
|
261
|
+
thread_id TEXT,
|
|
262
|
+
agent_id TEXT NOT NULL,
|
|
263
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
264
|
+
input TEXT,
|
|
265
|
+
config TEXT,
|
|
266
|
+
webhook_url TEXT,
|
|
267
|
+
created_at INTEGER NOT NULL,
|
|
268
|
+
updated_at INTEGER NOT NULL,
|
|
269
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
|
|
270
|
+
)
|
|
271
|
+
`);
|
|
272
|
+
|
|
273
|
+
// Add new columns to existing tables
|
|
274
|
+
const convCols = db.prepare("PRAGMA table_info(conversations)").all();
|
|
275
|
+
const convColNames = convCols.map(c => c.name);
|
|
276
|
+
|
|
277
|
+
if (!convColNames.includes('metadata')) {
|
|
278
|
+
db.exec('ALTER TABLE conversations ADD COLUMN metadata TEXT');
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
const sessCols = db.prepare("PRAGMA table_info(sessions)").all();
|
|
282
|
+
const sessColNames = sessCols.map(c => c.name);
|
|
283
|
+
|
|
284
|
+
const sessionCols = {
|
|
285
|
+
run_id: 'TEXT',
|
|
286
|
+
input: 'TEXT',
|
|
287
|
+
config: 'TEXT',
|
|
288
|
+
interrupt: 'TEXT'
|
|
289
|
+
};
|
|
290
|
+
|
|
291
|
+
for (const [colName, colType] of Object.entries(sessionCols)) {
|
|
292
|
+
if (!sessColNames.includes(colName)) {
|
|
293
|
+
db.exec(`ALTER TABLE sessions ADD COLUMN ${colName} ${colType}`);
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
// Create indexes
|
|
298
|
+
db.exec(`
|
|
299
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_thread ON thread_states(thread_id);
|
|
300
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_checkpoint ON thread_states(checkpoint_id);
|
|
301
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_created ON thread_states(created_at);
|
|
302
|
+
|
|
303
|
+
CREATE INDEX IF NOT EXISTS idx_checkpoints_thread ON checkpoints(thread_id);
|
|
304
|
+
CREATE INDEX IF NOT EXISTS idx_checkpoints_sequence ON checkpoints(thread_id, sequence);
|
|
305
|
+
CREATE UNIQUE INDEX IF NOT EXISTS idx_checkpoints_unique_seq ON checkpoints(thread_id, sequence);
|
|
306
|
+
|
|
307
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_run_id ON run_metadata(run_id);
|
|
308
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_thread ON run_metadata(thread_id);
|
|
309
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_status ON run_metadata(status);
|
|
310
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_agent ON run_metadata(agent_id);
|
|
311
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_created ON run_metadata(created_at);
|
|
312
|
+
|
|
313
|
+
CREATE INDEX IF NOT EXISTS idx_sessions_run_id ON sessions(run_id);
|
|
314
|
+
`);
|
|
315
|
+
});
|
|
316
|
+
|
|
317
|
+
migrate();
|
|
318
|
+
} catch (err) {
|
|
319
|
+
console.error('[Migration] ACP schema migration error:', err.message);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
|
|
229
323
|
initSchema();
|
|
230
324
|
migrateFromJson();
|
|
325
|
+
migrateToACP();
|
|
231
326
|
|
|
232
327
|
// Migration: Add imported conversation columns if they don't exist
|
|
233
328
|
try {
|
|
@@ -272,25 +367,90 @@ try {
|
|
|
272
367
|
console.error('[Migration] Error:', err.message);
|
|
273
368
|
}
|
|
274
369
|
|
|
275
|
-
// Migration: Add resume capability columns
|
|
370
|
+
// Migration: Add resume capability columns (disabled - incomplete migration)
|
|
371
|
+
// This migration block was incomplete and has been removed
|
|
372
|
+
|
|
373
|
+
// ============ ACP SCHEMA MIGRATION ============
|
|
276
374
|
try {
|
|
277
|
-
|
|
278
|
-
const columnNames = result.map(r => r.name);
|
|
279
|
-
const resumeColumns = {
|
|
280
|
-
attempts: 'INTEGER DEFAULT 0',
|
|
281
|
-
lastAttempt: 'INTEGER',
|
|
282
|
-
currentSize: 'INTEGER DEFAULT 0',
|
|
283
|
-
hash: 'TEXT'
|
|
284
|
-
};
|
|
375
|
+
console.log('[Migration] Running ACP schema migration...');
|
|
285
376
|
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
377
|
+
// Add metadata column to conversations if not exists
|
|
378
|
+
const convColsACP = db.prepare("PRAGMA table_info(conversations)").all().map(c => c.name);
|
|
379
|
+
if (!convColsACP.includes('metadata')) {
|
|
380
|
+
db.exec('ALTER TABLE conversations ADD COLUMN metadata TEXT DEFAULT "{}"');
|
|
381
|
+
console.log('[Migration] Added metadata column to conversations');
|
|
382
|
+
}
|
|
383
|
+
|
|
384
|
+
// Add run_id, input, config, interrupt to sessions if not exists
|
|
385
|
+
const sessColsACP = db.prepare("PRAGMA table_info(sessions)").all().map(c => c.name);
|
|
386
|
+
if (!sessColsACP.includes('run_id')) {
|
|
387
|
+
db.exec('ALTER TABLE sessions ADD COLUMN run_id TEXT');
|
|
388
|
+
console.log('[Migration] Added run_id column to sessions');
|
|
389
|
+
}
|
|
390
|
+
if (!sessColsACP.includes('input')) {
|
|
391
|
+
db.exec('ALTER TABLE sessions ADD COLUMN input TEXT');
|
|
392
|
+
console.log('[Migration] Added input column to sessions');
|
|
291
393
|
}
|
|
394
|
+
if (!sessColsACP.includes('config')) {
|
|
395
|
+
db.exec('ALTER TABLE sessions ADD COLUMN config TEXT');
|
|
396
|
+
console.log('[Migration] Added config column to sessions');
|
|
397
|
+
}
|
|
398
|
+
if (!sessColsACP.includes('interrupt')) {
|
|
399
|
+
db.exec('ALTER TABLE sessions ADD COLUMN interrupt TEXT');
|
|
400
|
+
console.log('[Migration] Added interrupt column to sessions');
|
|
401
|
+
}
|
|
402
|
+
|
|
403
|
+
// Create ACP tables
|
|
404
|
+
db.exec(`
|
|
405
|
+
CREATE TABLE IF NOT EXISTS thread_states (
|
|
406
|
+
id TEXT PRIMARY KEY,
|
|
407
|
+
thread_id TEXT NOT NULL,
|
|
408
|
+
checkpoint_id TEXT NOT NULL,
|
|
409
|
+
state_data TEXT NOT NULL,
|
|
410
|
+
created_at INTEGER NOT NULL,
|
|
411
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
|
|
412
|
+
);
|
|
413
|
+
|
|
414
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_thread ON thread_states(thread_id);
|
|
415
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_checkpoint ON thread_states(checkpoint_id);
|
|
416
|
+
CREATE INDEX IF NOT EXISTS idx_thread_states_created ON thread_states(created_at);
|
|
417
|
+
|
|
418
|
+
CREATE TABLE IF NOT EXISTS checkpoints (
|
|
419
|
+
id TEXT PRIMARY KEY,
|
|
420
|
+
thread_id TEXT NOT NULL,
|
|
421
|
+
checkpoint_name TEXT,
|
|
422
|
+
sequence INTEGER NOT NULL,
|
|
423
|
+
created_at INTEGER NOT NULL,
|
|
424
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE
|
|
425
|
+
);
|
|
426
|
+
|
|
427
|
+
CREATE INDEX IF NOT EXISTS idx_checkpoints_thread ON checkpoints(thread_id);
|
|
428
|
+
CREATE INDEX IF NOT EXISTS idx_checkpoints_sequence ON checkpoints(thread_id, sequence);
|
|
429
|
+
CREATE UNIQUE INDEX IF NOT EXISTS idx_checkpoints_unique ON checkpoints(thread_id, sequence);
|
|
430
|
+
|
|
431
|
+
CREATE TABLE IF NOT EXISTS run_metadata (
|
|
432
|
+
run_id TEXT PRIMARY KEY,
|
|
433
|
+
thread_id TEXT,
|
|
434
|
+
agent_id TEXT NOT NULL,
|
|
435
|
+
status TEXT NOT NULL,
|
|
436
|
+
input TEXT,
|
|
437
|
+
config TEXT,
|
|
438
|
+
webhook_url TEXT,
|
|
439
|
+
created_at INTEGER NOT NULL,
|
|
440
|
+
updated_at INTEGER NOT NULL,
|
|
441
|
+
FOREIGN KEY (thread_id) REFERENCES conversations(id) ON DELETE CASCADE,
|
|
442
|
+
FOREIGN KEY (run_id) REFERENCES sessions(id) ON DELETE CASCADE
|
|
443
|
+
);
|
|
444
|
+
|
|
445
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_thread ON run_metadata(thread_id);
|
|
446
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_agent ON run_metadata(agent_id);
|
|
447
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_status ON run_metadata(status);
|
|
448
|
+
CREATE INDEX IF NOT EXISTS idx_run_metadata_created ON run_metadata(created_at);
|
|
449
|
+
`);
|
|
450
|
+
|
|
451
|
+
console.log('[Migration] ACP schema migration complete');
|
|
292
452
|
} catch (err) {
|
|
293
|
-
console.error('[Migration]
|
|
453
|
+
console.error('[Migration] ACP schema migration error:', err.message);
|
|
294
454
|
}
|
|
295
455
|
|
|
296
456
|
// Migration: Backfill messages for conversations imported without message content
|
|
@@ -1300,7 +1460,10 @@ export const queries = {
|
|
|
1300
1460
|
markDownloadPaused(downloadId, errorMessage) {
|
|
1301
1461
|
const stmt = prep('UPDATE SET status = ?, error_message = ?, lastAttempt = ? WHERE id = ?');
|
|
1302
1462
|
stmt.run('paused', errorMessage, Date.now(), downloadId);
|
|
1303
|
-
}
|
|
1463
|
+
},
|
|
1464
|
+
|
|
1465
|
+
// ============ ACP-COMPATIBLE QUERIES ============
|
|
1466
|
+
...createACPQueries(db, prep)
|
|
1304
1467
|
};
|
|
1305
1468
|
|
|
1306
1469
|
export default { queries };
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
const agentDescriptorCache = new Map();
|
|
2
|
+
|
|
3
|
+
function generateClaudeCodeDescriptor(agent) {
|
|
4
|
+
return {
|
|
5
|
+
metadata: {
|
|
6
|
+
ref: {
|
|
7
|
+
name: agent.name,
|
|
8
|
+
version: '1.0.0',
|
|
9
|
+
url: agent.path
|
|
10
|
+
},
|
|
11
|
+
description: 'Claude Code is an AI coding agent that can read, write, and execute code with streaming output support. It provides comprehensive code editing, file management, and terminal execution capabilities.'
|
|
12
|
+
},
|
|
13
|
+
specs: {
|
|
14
|
+
capabilities: {
|
|
15
|
+
threads: true,
|
|
16
|
+
interrupts: false,
|
|
17
|
+
callbacks: false,
|
|
18
|
+
streaming: {
|
|
19
|
+
values: false,
|
|
20
|
+
custom: true
|
|
21
|
+
}
|
|
22
|
+
},
|
|
23
|
+
input: {
|
|
24
|
+
type: 'object',
|
|
25
|
+
properties: {
|
|
26
|
+
content: {
|
|
27
|
+
type: 'string',
|
|
28
|
+
description: 'The user prompt or instruction to send to the agent'
|
|
29
|
+
},
|
|
30
|
+
model: {
|
|
31
|
+
type: 'string',
|
|
32
|
+
description: 'Optional model identifier to use for this run'
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
required: ['content']
|
|
36
|
+
},
|
|
37
|
+
output: {
|
|
38
|
+
type: 'object',
|
|
39
|
+
properties: {
|
|
40
|
+
result: {
|
|
41
|
+
type: 'string',
|
|
42
|
+
description: 'The final response or result from the agent'
|
|
43
|
+
},
|
|
44
|
+
events: {
|
|
45
|
+
type: 'array',
|
|
46
|
+
description: 'Stream of execution events (tool calls, outputs, etc.)',
|
|
47
|
+
items: { type: 'object' }
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
},
|
|
51
|
+
custom_streaming_update: {
|
|
52
|
+
type: 'object',
|
|
53
|
+
properties: {
|
|
54
|
+
type: {
|
|
55
|
+
type: 'string',
|
|
56
|
+
enum: ['text', 'tool_use', 'tool_result', 'error']
|
|
57
|
+
},
|
|
58
|
+
data: { type: 'object' }
|
|
59
|
+
}
|
|
60
|
+
},
|
|
61
|
+
thread_state: {
|
|
62
|
+
type: 'object',
|
|
63
|
+
description: 'Conversation history with messages and session state',
|
|
64
|
+
properties: {
|
|
65
|
+
messages: {
|
|
66
|
+
type: 'array',
|
|
67
|
+
items: { type: 'object' }
|
|
68
|
+
},
|
|
69
|
+
sessionId: { type: 'string' }
|
|
70
|
+
}
|
|
71
|
+
},
|
|
72
|
+
config: {
|
|
73
|
+
type: 'object',
|
|
74
|
+
properties: {
|
|
75
|
+
workingDirectory: {
|
|
76
|
+
type: 'string',
|
|
77
|
+
description: 'Working directory for file operations'
|
|
78
|
+
},
|
|
79
|
+
model: {
|
|
80
|
+
type: 'string',
|
|
81
|
+
description: 'Default model to use'
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
function generateGeminiDescriptor(agent) {
|
|
90
|
+
return {
|
|
91
|
+
metadata: {
|
|
92
|
+
ref: {
|
|
93
|
+
name: agent.name,
|
|
94
|
+
version: '1.0.0',
|
|
95
|
+
url: agent.path
|
|
96
|
+
},
|
|
97
|
+
description: 'Gemini CLI is Google AI coding agent with streaming support, code execution, and file management capabilities.'
|
|
98
|
+
},
|
|
99
|
+
specs: {
|
|
100
|
+
capabilities: {
|
|
101
|
+
threads: true,
|
|
102
|
+
interrupts: false,
|
|
103
|
+
callbacks: false,
|
|
104
|
+
streaming: {
|
|
105
|
+
values: false,
|
|
106
|
+
custom: true
|
|
107
|
+
}
|
|
108
|
+
},
|
|
109
|
+
input: {
|
|
110
|
+
type: 'object',
|
|
111
|
+
properties: {
|
|
112
|
+
content: {
|
|
113
|
+
type: 'string',
|
|
114
|
+
description: 'The user prompt or instruction to send to the agent'
|
|
115
|
+
},
|
|
116
|
+
model: {
|
|
117
|
+
type: 'string',
|
|
118
|
+
description: 'Optional model identifier to use for this run'
|
|
119
|
+
}
|
|
120
|
+
},
|
|
121
|
+
required: ['content']
|
|
122
|
+
},
|
|
123
|
+
output: {
|
|
124
|
+
type: 'object',
|
|
125
|
+
properties: {
|
|
126
|
+
result: {
|
|
127
|
+
type: 'string',
|
|
128
|
+
description: 'The final response or result from the agent'
|
|
129
|
+
},
|
|
130
|
+
events: {
|
|
131
|
+
type: 'array',
|
|
132
|
+
description: 'Stream of execution events',
|
|
133
|
+
items: { type: 'object' }
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
},
|
|
137
|
+
custom_streaming_update: {
|
|
138
|
+
type: 'object',
|
|
139
|
+
properties: {
|
|
140
|
+
type: { type: 'string' },
|
|
141
|
+
data: { type: 'object' }
|
|
142
|
+
}
|
|
143
|
+
},
|
|
144
|
+
thread_state: {
|
|
145
|
+
type: 'object',
|
|
146
|
+
description: 'Conversation history and session state',
|
|
147
|
+
properties: {
|
|
148
|
+
messages: {
|
|
149
|
+
type: 'array',
|
|
150
|
+
items: { type: 'object' }
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
},
|
|
154
|
+
config: {
|
|
155
|
+
type: 'object',
|
|
156
|
+
properties: {
|
|
157
|
+
workingDirectory: {
|
|
158
|
+
type: 'string',
|
|
159
|
+
description: 'Working directory for file operations'
|
|
160
|
+
},
|
|
161
|
+
model: {
|
|
162
|
+
type: 'string',
|
|
163
|
+
description: 'Model identifier'
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
function generateOpenCodeDescriptor(agent) {
|
|
172
|
+
return {
|
|
173
|
+
metadata: {
|
|
174
|
+
ref: {
|
|
175
|
+
name: agent.name,
|
|
176
|
+
version: '1.0.0',
|
|
177
|
+
url: agent.path
|
|
178
|
+
},
|
|
179
|
+
description: 'OpenCode is a multi-provider AI coding agent with streaming support and comprehensive code manipulation capabilities.'
|
|
180
|
+
},
|
|
181
|
+
specs: {
|
|
182
|
+
capabilities: {
|
|
183
|
+
threads: true,
|
|
184
|
+
interrupts: false,
|
|
185
|
+
callbacks: false,
|
|
186
|
+
streaming: {
|
|
187
|
+
values: false,
|
|
188
|
+
custom: true
|
|
189
|
+
}
|
|
190
|
+
},
|
|
191
|
+
input: {
|
|
192
|
+
type: 'object',
|
|
193
|
+
properties: {
|
|
194
|
+
content: {
|
|
195
|
+
type: 'string',
|
|
196
|
+
description: 'The user prompt or instruction'
|
|
197
|
+
},
|
|
198
|
+
model: {
|
|
199
|
+
type: 'string',
|
|
200
|
+
description: 'Model identifier'
|
|
201
|
+
}
|
|
202
|
+
},
|
|
203
|
+
required: ['content']
|
|
204
|
+
},
|
|
205
|
+
output: {
|
|
206
|
+
type: 'object',
|
|
207
|
+
properties: {
|
|
208
|
+
result: { type: 'string' },
|
|
209
|
+
events: {
|
|
210
|
+
type: 'array',
|
|
211
|
+
items: { type: 'object' }
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
},
|
|
215
|
+
custom_streaming_update: {
|
|
216
|
+
type: 'object',
|
|
217
|
+
properties: {
|
|
218
|
+
type: { type: 'string' },
|
|
219
|
+
data: { type: 'object' }
|
|
220
|
+
}
|
|
221
|
+
},
|
|
222
|
+
thread_state: {
|
|
223
|
+
type: 'object',
|
|
224
|
+
properties: {
|
|
225
|
+
messages: {
|
|
226
|
+
type: 'array',
|
|
227
|
+
items: { type: 'object' }
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
},
|
|
231
|
+
config: {
|
|
232
|
+
type: 'object',
|
|
233
|
+
properties: {
|
|
234
|
+
workingDirectory: { type: 'string' },
|
|
235
|
+
model: { type: 'string' }
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
};
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
function generateGenericDescriptor(agent) {
|
|
243
|
+
return {
|
|
244
|
+
metadata: {
|
|
245
|
+
ref: {
|
|
246
|
+
name: agent.name,
|
|
247
|
+
version: '1.0.0',
|
|
248
|
+
url: agent.path
|
|
249
|
+
},
|
|
250
|
+
description: `${agent.name} is an AI coding agent with basic streaming and execution capabilities.`
|
|
251
|
+
},
|
|
252
|
+
specs: {
|
|
253
|
+
capabilities: {
|
|
254
|
+
threads: true,
|
|
255
|
+
interrupts: false,
|
|
256
|
+
callbacks: false,
|
|
257
|
+
streaming: {
|
|
258
|
+
values: false,
|
|
259
|
+
custom: true
|
|
260
|
+
}
|
|
261
|
+
},
|
|
262
|
+
input: {
|
|
263
|
+
type: 'object',
|
|
264
|
+
properties: {
|
|
265
|
+
content: {
|
|
266
|
+
type: 'string',
|
|
267
|
+
description: 'User prompt or instruction'
|
|
268
|
+
}
|
|
269
|
+
},
|
|
270
|
+
required: ['content']
|
|
271
|
+
},
|
|
272
|
+
output: {
|
|
273
|
+
type: 'object',
|
|
274
|
+
properties: {
|
|
275
|
+
result: { type: 'string' }
|
|
276
|
+
}
|
|
277
|
+
},
|
|
278
|
+
custom_streaming_update: {
|
|
279
|
+
type: 'object',
|
|
280
|
+
properties: {
|
|
281
|
+
type: { type: 'string' },
|
|
282
|
+
data: { type: 'object' }
|
|
283
|
+
}
|
|
284
|
+
},
|
|
285
|
+
thread_state: {
|
|
286
|
+
type: 'object',
|
|
287
|
+
properties: {
|
|
288
|
+
messages: {
|
|
289
|
+
type: 'array',
|
|
290
|
+
items: { type: 'object' }
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
},
|
|
294
|
+
config: {
|
|
295
|
+
type: 'object',
|
|
296
|
+
properties: {
|
|
297
|
+
workingDirectory: { type: 'string' }
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
function generateAgentDescriptor(agent) {
|
|
305
|
+
switch (agent.id) {
|
|
306
|
+
case 'claude-code':
|
|
307
|
+
return generateClaudeCodeDescriptor(agent);
|
|
308
|
+
case 'gemini':
|
|
309
|
+
return generateGeminiDescriptor(agent);
|
|
310
|
+
case 'opencode':
|
|
311
|
+
return generateOpenCodeDescriptor(agent);
|
|
312
|
+
default:
|
|
313
|
+
return generateGenericDescriptor(agent);
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
export function initializeDescriptors(agents) {
|
|
318
|
+
agentDescriptorCache.clear();
|
|
319
|
+
for (const agent of agents) {
|
|
320
|
+
const descriptor = generateAgentDescriptor(agent);
|
|
321
|
+
agentDescriptorCache.set(agent.id, descriptor);
|
|
322
|
+
}
|
|
323
|
+
return agentDescriptorCache.size;
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
export function getAgentDescriptor(agentId) {
|
|
327
|
+
return agentDescriptorCache.get(agentId) || null;
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
export function getAllDescriptors() {
|
|
331
|
+
return Object.fromEntries(agentDescriptorCache);
|
|
332
|
+
}
|
package/package.json
CHANGED
package/server.js
CHANGED
|
@@ -14,6 +14,7 @@ import Busboy from 'busboy';
|
|
|
14
14
|
import fsbrowse from 'fsbrowse';
|
|
15
15
|
import { queries } from './database.js';
|
|
16
16
|
import { runClaudeWithStreaming } from './lib/claude-runner.js';
|
|
17
|
+
import { initializeDescriptors, getAgentDescriptor } from './lib/agent-descriptors.js';
|
|
17
18
|
|
|
18
19
|
const ttsTextAccumulators = new Map();
|
|
19
20
|
|
|
@@ -338,6 +339,7 @@ function discoverAgents() {
|
|
|
338
339
|
}
|
|
339
340
|
|
|
340
341
|
const discoveredAgents = discoverAgents();
|
|
342
|
+
initializeDescriptors(discoveredAgents);
|
|
341
343
|
|
|
342
344
|
const modelCache = new Map();
|
|
343
345
|
|
|
@@ -1884,32 +1886,14 @@ const server = http.createServer(async (req, res) => {
|
|
|
1884
1886
|
const agentDescriptorMatch = pathOnly.match(/^\/api\/agents\/([^/]+)\/descriptor$/);
|
|
1885
1887
|
if (agentDescriptorMatch && req.method === 'GET') {
|
|
1886
1888
|
const agentId = agentDescriptorMatch[1];
|
|
1887
|
-
const
|
|
1888
|
-
|
|
1889
|
-
if (!
|
|
1889
|
+
const descriptor = getAgentDescriptor(agentId);
|
|
1890
|
+
|
|
1891
|
+
if (!descriptor) {
|
|
1890
1892
|
sendJSON(req, res, 404, { error: 'Agent not found' });
|
|
1891
1893
|
return;
|
|
1892
1894
|
}
|
|
1893
1895
|
|
|
1894
|
-
sendJSON(req, res, 200,
|
|
1895
|
-
agentId: agent.id,
|
|
1896
|
-
agentName: agent.name,
|
|
1897
|
-
protocol: agent.protocol || 'direct',
|
|
1898
|
-
capabilities: {
|
|
1899
|
-
streaming: true,
|
|
1900
|
-
cancel: true,
|
|
1901
|
-
resume: agent.protocol === 'direct',
|
|
1902
|
-
stateful: true
|
|
1903
|
-
},
|
|
1904
|
-
inputSchema: {
|
|
1905
|
-
type: 'object',
|
|
1906
|
-
properties: {
|
|
1907
|
-
content: { type: 'string', description: 'The prompt to send to the agent' }
|
|
1908
|
-
},
|
|
1909
|
-
required: ['content']
|
|
1910
|
-
},
|
|
1911
|
-
stateFormat: 'opaque'
|
|
1912
|
-
});
|
|
1896
|
+
sendJSON(req, res, 200, descriptor);
|
|
1913
1897
|
return;
|
|
1914
1898
|
}
|
|
1915
1899
|
|