agentgui 1.0.385 → 1.0.387
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.prd +255 -0
- package/lib/sse-stream.js +125 -0
- package/package.json +1 -1
- package/server.js +431 -65
- package/test-acp-endpoints.js +119 -0
- package/test-cancel.mjs +185 -0
package/.prd
ADDED
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
# AgentGUI ACP Compliance PRD
|
|
2
|
+
|
|
3
|
+
## Overview
|
|
4
|
+
Transform AgentGUI into a fully ACP (Agent Connect Protocol) v0.2.3 compliant server while fixing UI consistency issues and optimizing WebSocket usage.
|
|
5
|
+
|
|
6
|
+
**Current Status**: ~30% ACP compliant (basic conversation/message CRUD exists)
|
|
7
|
+
**Target**: 100% ACP compliant with all endpoints, thread management, stateless runs, and run control
|
|
8
|
+
|
|
9
|
+
**Note on "Slash Commands"**: ACP spec contains no slash command concept. This is purely a client-side UI feature outside ACP scope. If user wants slash commands implemented, that would be a separate UI enhancement task.
|
|
10
|
+
|
|
11
|
+
---
|
|
12
|
+
|
|
13
|
+
## Dependency Graph & Execution Waves
|
|
14
|
+
|
|
15
|
+
### WAVE 3: Streaming & Run Control (2 items - after Wave 2)
|
|
16
|
+
|
|
17
|
+
**3.1** SSE (Server-Sent Events) Streaming
|
|
18
|
+
- BLOCKS: 2.1, 2.2, 2.3
|
|
19
|
+
- BLOCKED_BY: 4.1
|
|
20
|
+
- Implement SSE endpoint format (Content-Type: text/event-stream)
|
|
21
|
+
- Stream run outputs as ACP `RunOutputStream` format
|
|
22
|
+
- Support both `ValueRunResultUpdate` and `CustomRunResultUpdate` modes
|
|
23
|
+
- Event types: data, error, done
|
|
24
|
+
- Keep-alive pings every 15 seconds
|
|
25
|
+
- Handle client disconnect gracefully
|
|
26
|
+
- Convert existing chunk/event stream to SSE format
|
|
27
|
+
- Parallel SSE + WebSocket support (both work simultaneously)
|
|
28
|
+
|
|
29
|
+
**3.2** Run Cancellation & Control
|
|
30
|
+
- BLOCKS: 1.1, 1.2
|
|
31
|
+
- BLOCKED_BY: 4.1
|
|
32
|
+
- Implement run status state machine: pending → active → completed/error/cancelled
|
|
33
|
+
- Cancel endpoint kills agent process (SIGTERM then SIGKILL)
|
|
34
|
+
- Update run status to 'cancelled' in database
|
|
35
|
+
- Broadcast cancellation via WebSocket
|
|
36
|
+
- Clean up active execution tracking
|
|
37
|
+
- Return 409 if run already completed/cancelled
|
|
38
|
+
- Wait endpoint implements long-polling (30s timeout, return current status)
|
|
39
|
+
- Handle graceful degradation if agent doesn't support cancellation
|
|
40
|
+
|
|
41
|
+
### WAVE 4: UI Fixes & Optimization (3 items - after Wave 3)
|
|
42
|
+
|
|
43
|
+
**4.1** Thread Sidebar UI Consistency
|
|
44
|
+
- BLOCKS: 2.1, 2.2, 3.1
|
|
45
|
+
- BLOCKED_BY: nothing
|
|
46
|
+
- Audit conversation list rendering: verify agent display matches conversation.agentId
|
|
47
|
+
- Ensure model selection persists when loading existing conversation
|
|
48
|
+
- On conversation resume: restore last-used agent and model to UI selectors
|
|
49
|
+
- Fix any duplicate agent/model displays in sidebar or header
|
|
50
|
+
- Test: create conversation with agent A, reload page, verify agent A shown
|
|
51
|
+
- Test: switch to agent B mid-conversation, reload, verify agent B shown
|
|
52
|
+
- Store agent/model in conversation record, use as source of truth
|
|
53
|
+
|
|
54
|
+
**4.2** WebSocket Usage Optimization
|
|
55
|
+
- BLOCKS: 3.1
|
|
56
|
+
- BLOCKED_BY: nothing
|
|
57
|
+
- Audit all broadcastSync calls: identify high-frequency low-value messages
|
|
58
|
+
- Batch streaming_progress events (max 10 events per 100ms window)
|
|
59
|
+
- Only broadcast to subscribed clients (per sessionId or conversationId)
|
|
60
|
+
- Compress large payloads before WebSocket send
|
|
61
|
+
- Add message priority: high (errors, completion), normal (progress), low (status)
|
|
62
|
+
- Rate limit per client: max 100 msg/sec
|
|
63
|
+
- Implement message deduplication for identical consecutive events
|
|
64
|
+
- Monitor: track bytes sent per client, log if >1MB/sec sustained
|
|
65
|
+
|
|
66
|
+
**4.3** Consolidate Duplicate Displays
|
|
67
|
+
- BLOCKS: 4.1
|
|
68
|
+
- BLOCKED_BY: nothing
|
|
69
|
+
- Identify all places where agent/model info is displayed
|
|
70
|
+
- Remove duplicate displays: keep one authoritative location per UI section
|
|
71
|
+
- Sidebar: show agent name only (remove if duplicated elsewhere)
|
|
72
|
+
- Header/toolbar: show model + agent if conversation active
|
|
73
|
+
- Message bubbles: show agent avatar/name per message only if multi-agent conversation
|
|
74
|
+
- Test: verify no redundant agent/model text after changes
|
|
75
|
+
|
|
76
|
+
---
|
|
77
|
+
|
|
78
|
+
## Additional Enhancements (Non-blocking)
|
|
79
|
+
|
|
80
|
+
### NICE-TO-HAVE 1: Webhook Callbacks
|
|
81
|
+
- Implement webhook support for run status changes
|
|
82
|
+
- POST to webhook URL when run status changes (pending → active → completed)
|
|
83
|
+
- Retry logic: 3 attempts with exponential backoff
|
|
84
|
+
- Store webhook config in run_metadata table
|
|
85
|
+
- Validate webhook URL format on run creation
|
|
86
|
+
|
|
87
|
+
### NICE-TO-HAVE 2: Run Interrupts
|
|
88
|
+
- Support interrupt mechanism for agents that implement it
|
|
89
|
+
- Interrupt types: user feedback request, tool approval, configuration needed
|
|
90
|
+
- Store interrupt state in sessions table
|
|
91
|
+
- API endpoints: GET /runs/{id}/interrupts, POST /runs/{id}/resume with interrupt response
|
|
92
|
+
- UI: show interrupt prompt, collect user input, resume run
|
|
93
|
+
|
|
94
|
+
### NICE-TO-HAVE 3: Enhanced Search & Filtering
|
|
95
|
+
- Full-text search on thread content (messages, agent responses)
|
|
96
|
+
- Filter by agent type, date range, status, metadata fields
|
|
97
|
+
- Search history: recent searches saved per user
|
|
98
|
+
- Autocomplete for search filters
|
|
99
|
+
- Export search results as JSON
|
|
100
|
+
|
|
101
|
+
### NICE-TO-HAVE 4: Thread Templates
|
|
102
|
+
- Save thread configuration as template
|
|
103
|
+
- Templates include: agent, model, initial prompt, working directory
|
|
104
|
+
- Clone thread from template
|
|
105
|
+
- Share templates between users (if multi-user support added)
|
|
106
|
+
|
|
107
|
+
---
|
|
108
|
+
|
|
109
|
+
## Testing Requirements (Per Item)
|
|
110
|
+
|
|
111
|
+
Each implementation item must include:
|
|
112
|
+
1. Execute in plugin:gm:dev: create test run for every endpoint/function
|
|
113
|
+
2. Success paths: valid inputs, expected outputs verified
|
|
114
|
+
3. Error paths: invalid inputs, 404s, 409s, 422s verified
|
|
115
|
+
4. Edge cases: empty results, large payloads, concurrent requests
|
|
116
|
+
5. Integration tests: end-to-end flow (create thread → run → stream → cancel)
|
|
117
|
+
6. Database verification: inspect tables after operations, verify foreign keys
|
|
118
|
+
7. WebSocket verification: subscribe, receive events, verify payload format
|
|
119
|
+
8. SSE verification: curl endpoint, verify event-stream format
|
|
120
|
+
|
|
121
|
+
---
|
|
122
|
+
|
|
123
|
+
## Acceptance Criteria (All Must Pass)
|
|
124
|
+
|
|
125
|
+
### Core ACP Compliance
|
|
126
|
+
- [ ] All 23 ACP endpoints implemented and tested
|
|
127
|
+
- [ ] All ACP data models match spec (Thread, ThreadState, Run, Agent, etc.)
|
|
128
|
+
- [ ] Error responses follow ACP format (ErrorResponse schema)
|
|
129
|
+
- [ ] SSE streaming works with curl: `curl -N /threads/{id}/runs/stream`
|
|
130
|
+
- [ ] Stateless runs work without thread context
|
|
131
|
+
- [ ] Run cancellation kills agent process within 5 seconds
|
|
132
|
+
- [ ] Thread copy duplicates all states and checkpoints
|
|
133
|
+
- [ ] Agent descriptors return valid JSON matching AgentACPDescriptor schema
|
|
134
|
+
|
|
135
|
+
### Database Integrity
|
|
136
|
+
- [ ] No orphaned records after thread/run deletion
|
|
137
|
+
- [ ] Foreign key constraints enforced
|
|
138
|
+
- [ ] Thread status correctly reflects run states
|
|
139
|
+
- [ ] Checkpoint sequences monotonically increase
|
|
140
|
+
- [ ] WAL mode enabled, queries under 100ms for typical operations
|
|
141
|
+
|
|
142
|
+
### UI Consistency
|
|
143
|
+
- [ ] Sidebar shows correct agent for each conversation
|
|
144
|
+
- [ ] Model selection persists after page reload
|
|
145
|
+
- [ ] No duplicate agent/model displays found
|
|
146
|
+
- [ ] Agent/model changes reflected in database immediately
|
|
147
|
+
|
|
148
|
+
### WebSocket Optimization
|
|
149
|
+
- [ ] Streaming progress events batched (max 10/100ms)
|
|
150
|
+
- [ ] Only subscribed clients receive messages
|
|
151
|
+
- [ ] No client exceeds 1MB/sec sustained WebSocket traffic
|
|
152
|
+
- [ ] Message deduplication prevents identical consecutive events
|
|
153
|
+
|
|
154
|
+
### Integration & E2E
|
|
155
|
+
- [ ] Full flow: create thread → start run → stream events → cancel → verify cancelled
|
|
156
|
+
- [ ] Stateless run: create run → stream → complete → verify output
|
|
157
|
+
- [ ] Thread search: create 10 threads → search by metadata → verify correct results
|
|
158
|
+
- [ ] Agent search: search by capability "streaming" → verify all streaming agents returned
|
|
159
|
+
- [ ] Thread copy: create thread with 5 runs → copy → verify new thread has all history
|
|
160
|
+
- [ ] Concurrent runs blocked: start run on thread → start second run → verify 409 conflict
|
|
161
|
+
|
|
162
|
+
---
|
|
163
|
+
|
|
164
|
+
## Migration Strategy
|
|
165
|
+
|
|
166
|
+
### Backward Compatibility
|
|
167
|
+
- Existing conversations map to threads (1:1)
|
|
168
|
+
- Existing sessions map to thread runs
|
|
169
|
+
- `/api/conversations/*` endpoints remain functional (alias to `/threads/*`)
|
|
170
|
+
- Old WebSocket message formats supported alongside new ACP formats
|
|
171
|
+
- No breaking changes to current client code
|
|
172
|
+
|
|
173
|
+
### Rollout Plan
|
|
174
|
+
1. Deploy database schema changes (additive only, no drops)
|
|
175
|
+
2. Deploy new ACP endpoints alongside existing endpoints
|
|
176
|
+
3. Update client to use ACP endpoints where beneficial
|
|
177
|
+
4. Deprecation notice for old endpoints (6 month window)
|
|
178
|
+
5. Remove old endpoints after deprecation period
|
|
179
|
+
|
|
180
|
+
---
|
|
181
|
+
|
|
182
|
+
## Out of Scope
|
|
183
|
+
|
|
184
|
+
- Multi-user authentication/authorization
|
|
185
|
+
- Slash command implementation (not in ACP spec, pure client feature)
|
|
186
|
+
- Agent marketplace or discovery service
|
|
187
|
+
- Real-time collaboration on threads
|
|
188
|
+
- Thread branching/forking (beyond simple copy)
|
|
189
|
+
- Custom agent development framework
|
|
190
|
+
- Billing/metering for agent usage
|
|
191
|
+
|
|
192
|
+
---
|
|
193
|
+
|
|
194
|
+
## Technical Notes
|
|
195
|
+
|
|
196
|
+
### ACP Terminology Mapping
|
|
197
|
+
- AgentGUI "conversations" = ACP "threads"
|
|
198
|
+
- AgentGUI "sessions" = ACP "runs" (stateful, on a thread)
|
|
199
|
+
- AgentGUI "chunks/events" = ACP "run output stream"
|
|
200
|
+
- AgentGUI "claudeSessionId" = ACP checkpoint ID concept
|
|
201
|
+
|
|
202
|
+
### Known Gotchas
|
|
203
|
+
- ACP requires UUID format for thread_id, run_id, agent_id (current AgentGUI uses strings)
|
|
204
|
+
- SSE requires newline-delimited format, different from current JSON streaming
|
|
205
|
+
- Run cancellation must handle agents that don't support it gracefully
|
|
206
|
+
- Thread status "idle" means no pending runs; must validate on run creation
|
|
207
|
+
- Webhook URLs must be validated to prevent SSRF attacks
|
|
208
|
+
|
|
209
|
+
### Performance Targets
|
|
210
|
+
- Thread search: <200ms for 10,000 threads
|
|
211
|
+
- Run creation: <50ms (background processing)
|
|
212
|
+
- SSE streaming: <10ms latency per event
|
|
213
|
+
- WebSocket batch: <100ms accumulation window
|
|
214
|
+
- Database writes: <20ms per transaction
|
|
215
|
+
|
|
216
|
+
---
|
|
217
|
+
|
|
218
|
+
## Dependencies
|
|
219
|
+
|
|
220
|
+
**External**:
|
|
221
|
+
- None (all features implemented with existing dependencies)
|
|
222
|
+
|
|
223
|
+
**Internal**:
|
|
224
|
+
- database.js (extended with new tables/queries)
|
|
225
|
+
- server.js (new route handlers)
|
|
226
|
+
- lib/claude-runner.js (run cancellation support)
|
|
227
|
+
- static/js/client.js (UI consistency fixes)
|
|
228
|
+
- static/js/conversations.js (agent/model persistence)
|
|
229
|
+
- static/js/websocket-manager.js (optimization)
|
|
230
|
+
|
|
231
|
+
**Configuration**:
|
|
232
|
+
- No new env vars required
|
|
233
|
+
- Existing BASE_URL, PORT, STARTUP_CWD remain unchanged
|
|
234
|
+
|
|
235
|
+
---
|
|
236
|
+
|
|
237
|
+
## Success Metrics
|
|
238
|
+
|
|
239
|
+
- ACP compliance score: 0% → 100%
|
|
240
|
+
- API endpoint coverage: 20 → 43 endpoints
|
|
241
|
+
- WebSocket bandwidth: <50% reduction in bytes/sec per client
|
|
242
|
+
- UI consistency issues: 4 identified → 0 remaining
|
|
243
|
+
- Database tables: 5 → 8 (conversations, messages, sessions, events, chunks, thread_states, checkpoints, run_metadata)
|
|
244
|
+
- Test coverage: endpoint tests for all 43 routes, integration tests for all critical flows
|
|
245
|
+
|
|
246
|
+
---
|
|
247
|
+
|
|
248
|
+
## Timeline Estimate
|
|
249
|
+
|
|
250
|
+
- Wave 1 (Foundation): 3 parallel tasks = 1 completion cycle
|
|
251
|
+
- Wave 2 (Core APIs): 3 parallel tasks = 1 completion cycle
|
|
252
|
+
- Wave 3 (Streaming): 2 tasks = 1 completion cycle
|
|
253
|
+
- Wave 4 (UI Fixes): 3 tasks = 1 completion cycle
|
|
254
|
+
|
|
255
|
+
**Total**: 4 completion cycles (waves executed sequentially, items within wave executed in parallel with max 3 concurrent subagents per wave)
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import crypto from 'crypto';
|
|
2
|
+
|
|
3
|
+
export function formatSSEEvent(eventType, data) {
|
|
4
|
+
const lines = [];
|
|
5
|
+
if (eventType) {
|
|
6
|
+
lines.push(`event: ${eventType}`);
|
|
7
|
+
}
|
|
8
|
+
if (data) {
|
|
9
|
+
const jsonData = typeof data === 'string' ? data : JSON.stringify(data);
|
|
10
|
+
lines.push(`data: ${jsonData}`);
|
|
11
|
+
}
|
|
12
|
+
lines.push('');
|
|
13
|
+
return lines.join('\n') + '\n';
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export function convertToACPRunOutputStream(sessionId, block, runStatus = 'active') {
|
|
17
|
+
const eventId = crypto.randomUUID();
|
|
18
|
+
return {
|
|
19
|
+
id: eventId,
|
|
20
|
+
event: 'agent_event',
|
|
21
|
+
data: {
|
|
22
|
+
type: 'custom',
|
|
23
|
+
run_id: sessionId,
|
|
24
|
+
status: runStatus,
|
|
25
|
+
update: block
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export function createErrorEvent(runId, errorMessage, errorCode = 'execution_error') {
|
|
31
|
+
const eventId = crypto.randomUUID();
|
|
32
|
+
return {
|
|
33
|
+
id: eventId,
|
|
34
|
+
event: 'agent_event',
|
|
35
|
+
data: {
|
|
36
|
+
type: 'error',
|
|
37
|
+
run_id: runId,
|
|
38
|
+
error: errorMessage,
|
|
39
|
+
code: errorCode,
|
|
40
|
+
status: 'error'
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export function createCompletionEvent(runId, values = {}, metadata = {}) {
|
|
46
|
+
const eventId = crypto.randomUUID();
|
|
47
|
+
return {
|
|
48
|
+
id: eventId,
|
|
49
|
+
event: 'agent_event',
|
|
50
|
+
data: {
|
|
51
|
+
type: 'result',
|
|
52
|
+
run_id: runId,
|
|
53
|
+
status: 'completed',
|
|
54
|
+
values,
|
|
55
|
+
metadata
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
export function createKeepAlive() {
|
|
61
|
+
return ': ping\n\n';
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export class SSEStreamManager {
|
|
65
|
+
constructor(res, runId) {
|
|
66
|
+
this.res = res;
|
|
67
|
+
this.runId = runId;
|
|
68
|
+
this.keepAliveInterval = null;
|
|
69
|
+
this.closed = false;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
start() {
|
|
73
|
+
this.res.writeHead(200, {
|
|
74
|
+
'Content-Type': 'text/event-stream',
|
|
75
|
+
'Cache-Control': 'no-cache',
|
|
76
|
+
'Connection': 'keep-alive',
|
|
77
|
+
'X-Accel-Buffering': 'no'
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
this.keepAliveInterval = setInterval(() => {
|
|
81
|
+
if (!this.closed) {
|
|
82
|
+
this.writeRaw(createKeepAlive());
|
|
83
|
+
}
|
|
84
|
+
}, 15000);
|
|
85
|
+
|
|
86
|
+
this.res.on('close', () => {
|
|
87
|
+
this.cleanup();
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
writeRaw(text) {
|
|
92
|
+
if (!this.closed) {
|
|
93
|
+
this.res.write(text);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
sendProgress(block, runStatus = 'active') {
|
|
98
|
+
const acpEvent = convertToACPRunOutputStream(this.runId, block, runStatus);
|
|
99
|
+
const sse = formatSSEEvent('message', acpEvent.data);
|
|
100
|
+
this.writeRaw(sse);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
sendError(errorMessage, errorCode = 'execution_error') {
|
|
104
|
+
const errorEvent = createErrorEvent(this.runId, errorMessage, errorCode);
|
|
105
|
+
const sse = formatSSEEvent('error', errorEvent.data);
|
|
106
|
+
this.writeRaw(sse);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
sendComplete(values = {}, metadata = {}) {
|
|
110
|
+
const completionEvent = createCompletionEvent(this.runId, values, metadata);
|
|
111
|
+
const sse = formatSSEEvent('done', completionEvent.data);
|
|
112
|
+
this.writeRaw(sse);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
cleanup() {
|
|
116
|
+
if (this.keepAliveInterval) {
|
|
117
|
+
clearInterval(this.keepAliveInterval);
|
|
118
|
+
this.keepAliveInterval = null;
|
|
119
|
+
}
|
|
120
|
+
this.closed = true;
|
|
121
|
+
if (!this.res.writableEnded) {
|
|
122
|
+
this.res.end();
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
}
|
package/package.json
CHANGED
package/server.js
CHANGED
|
@@ -12,10 +12,10 @@ import { OAuth2Client } from 'google-auth-library';
|
|
|
12
12
|
import express from 'express';
|
|
13
13
|
import Busboy from 'busboy';
|
|
14
14
|
import fsbrowse from 'fsbrowse';
|
|
15
|
-
import { queries
|
|
16
|
-
import { createACPQueries } from './acp-queries.js';
|
|
15
|
+
import { queries } from './database.js';
|
|
17
16
|
import { runClaudeWithStreaming } from './lib/claude-runner.js';
|
|
18
17
|
import { initializeDescriptors, getAgentDescriptor } from './lib/agent-descriptors.js';
|
|
18
|
+
import { SSEStreamManager } from './lib/sse-stream.js';
|
|
19
19
|
|
|
20
20
|
const ttsTextAccumulators = new Map();
|
|
21
21
|
|
|
@@ -215,6 +215,8 @@ const activeExecutions = new Map();
|
|
|
215
215
|
const activeScripts = new Map();
|
|
216
216
|
const messageQueues = new Map();
|
|
217
217
|
const rateLimitState = new Map();
|
|
218
|
+
const activeProcessesByRunId = new Map();
|
|
219
|
+
const acpQueries = queries;
|
|
218
220
|
const STUCK_AGENT_THRESHOLD_MS = 600000;
|
|
219
221
|
const NO_PID_GRACE_PERIOD_MS = 60000;
|
|
220
222
|
const DEFAULT_RATE_LIMIT_COOLDOWN_MS = 60000;
|
|
@@ -341,8 +343,6 @@ function discoverAgents() {
|
|
|
341
343
|
|
|
342
344
|
const discoveredAgents = discoverAgents();
|
|
343
345
|
initializeDescriptors(discoveredAgents);
|
|
344
|
-
const acpQueries = createACPQueries(db, prepare);
|
|
345
|
-
acpQueries.getAgentDescriptor = getAgentDescriptor;
|
|
346
346
|
|
|
347
347
|
const modelCache = new Map();
|
|
348
348
|
|
|
@@ -1772,7 +1772,7 @@ const server = http.createServer(async (req, res) => {
|
|
|
1772
1772
|
|
|
1773
1773
|
if (pathOnly === '/api/agents/search' && req.method === 'POST') {
|
|
1774
1774
|
const body = await parseBody(req);
|
|
1775
|
-
const result =
|
|
1775
|
+
const result = queries.searchAgents(discoveredAgents, body);
|
|
1776
1776
|
sendJSON(req, res, 200, result);
|
|
1777
1777
|
return;
|
|
1778
1778
|
}
|
|
@@ -1906,14 +1906,14 @@ const server = http.createServer(async (req, res) => {
|
|
|
1906
1906
|
sendJSON(req, res, 404, { error: 'Agent not found' });
|
|
1907
1907
|
return;
|
|
1908
1908
|
}
|
|
1909
|
-
const run =
|
|
1909
|
+
const run = queries.createRun(agent_id, null, input, config, webhook_url);
|
|
1910
1910
|
sendJSON(req, res, 201, run);
|
|
1911
1911
|
return;
|
|
1912
1912
|
}
|
|
1913
1913
|
|
|
1914
1914
|
if (pathOnly === '/api/runs/search' && req.method === 'POST') {
|
|
1915
1915
|
const body = await parseBody(req);
|
|
1916
|
-
const result =
|
|
1916
|
+
const result = queries.searchRuns(body);
|
|
1917
1917
|
sendJSON(req, res, 200, result);
|
|
1918
1918
|
return;
|
|
1919
1919
|
}
|
|
@@ -1930,27 +1930,42 @@ const server = http.createServer(async (req, res) => {
|
|
|
1930
1930
|
sendJSON(req, res, 404, { error: 'Agent not found' });
|
|
1931
1931
|
return;
|
|
1932
1932
|
}
|
|
1933
|
-
const run =
|
|
1934
|
-
res
|
|
1935
|
-
|
|
1936
|
-
|
|
1937
|
-
|
|
1938
|
-
});
|
|
1939
|
-
res.write('data: ' + JSON.stringify({ type: 'run_created', run_id: run.run_id }) + '\n\n');
|
|
1933
|
+
const run = queries.createRun(agent_id, null, input, config);
|
|
1934
|
+
const sseManager = new SSEStreamManager(res, run.run_id);
|
|
1935
|
+
sseManager.start();
|
|
1936
|
+
sseManager.sendProgress({ type: 'run_created', run_id: run.run_id });
|
|
1937
|
+
|
|
1940
1938
|
const eventHandler = (eventData) => {
|
|
1941
1939
|
if (eventData.sessionId === run.run_id || eventData.conversationId === run.thread_id) {
|
|
1942
|
-
|
|
1940
|
+
if (eventData.type === 'streaming_progress' && eventData.block) {
|
|
1941
|
+
sseManager.sendProgress(eventData.block);
|
|
1942
|
+
} else if (eventData.type === 'streaming_error') {
|
|
1943
|
+
sseManager.sendError(eventData.error || 'Execution error');
|
|
1944
|
+
} else if (eventData.type === 'streaming_complete') {
|
|
1945
|
+
sseManager.sendComplete({ eventCount: eventData.eventCount }, { timestamp: eventData.timestamp });
|
|
1946
|
+
sseManager.cleanup();
|
|
1947
|
+
}
|
|
1943
1948
|
}
|
|
1944
1949
|
};
|
|
1945
|
-
|
|
1946
|
-
|
|
1947
|
-
|
|
1948
|
-
|
|
1949
|
-
|
|
1950
|
+
|
|
1951
|
+
sseStreamHandlers.set(run.run_id, eventHandler);
|
|
1952
|
+
req.on('close', () => {
|
|
1953
|
+
sseStreamHandlers.delete(run.run_id);
|
|
1954
|
+
sseManager.cleanup();
|
|
1955
|
+
});
|
|
1956
|
+
|
|
1957
|
+
const statelessThreadId = queries.getRun(run.run_id)?.thread_id;
|
|
1950
1958
|
if (statelessThreadId) {
|
|
1951
1959
|
const conv = queries.getConversation(statelessThreadId);
|
|
1952
1960
|
if (conv && input?.content) {
|
|
1953
|
-
|
|
1961
|
+
const session = queries.createSession(statelessThreadId);
|
|
1962
|
+
acpQueries.updateRunStatus(run.run_id, 'active');
|
|
1963
|
+
activeExecutions.set(statelessThreadId, { pid: null, startTime: Date.now(), sessionId: session.id, lastActivity: Date.now() });
|
|
1964
|
+
activeProcessesByRunId.set(run.run_id, { threadId: statelessThreadId, sessionId: session.id });
|
|
1965
|
+
queries.setIsStreaming(statelessThreadId, true);
|
|
1966
|
+
processMessageWithStreaming(statelessThreadId, null, session.id, input.content, agent_id, config?.model || null)
|
|
1967
|
+
.then(() => { acpQueries.updateRunStatus(run.run_id, 'success'); activeProcessesByRunId.delete(run.run_id); })
|
|
1968
|
+
.catch((err) => { acpQueries.updateRunStatus(run.run_id, 'error'); activeProcessesByRunId.delete(run.run_id); sseManager.sendError(err.message); sseManager.cleanup(); });
|
|
1954
1969
|
}
|
|
1955
1970
|
}
|
|
1956
1971
|
return;
|
|
@@ -1968,15 +1983,15 @@ const server = http.createServer(async (req, res) => {
|
|
|
1968
1983
|
sendJSON(req, res, 404, { error: 'Agent not found' });
|
|
1969
1984
|
return;
|
|
1970
1985
|
}
|
|
1971
|
-
const run =
|
|
1972
|
-
const statelessThreadId =
|
|
1986
|
+
const run = queries.createRun(agent_id, null, input, config);
|
|
1987
|
+
const statelessThreadId = queries.getRun(run.run_id)?.thread_id;
|
|
1973
1988
|
if (statelessThreadId && input?.content) {
|
|
1974
1989
|
try {
|
|
1975
1990
|
await runClaudeWithStreaming(agent_id, statelessThreadId, input.content, config?.model || null);
|
|
1976
|
-
const finalRun =
|
|
1991
|
+
const finalRun = queries.getRun(run.run_id);
|
|
1977
1992
|
sendJSON(req, res, 200, finalRun);
|
|
1978
1993
|
} catch (err) {
|
|
1979
|
-
|
|
1994
|
+
queries.updateRunStatus(run.run_id, 'error');
|
|
1980
1995
|
sendJSON(req, res, 500, { error: err.message });
|
|
1981
1996
|
}
|
|
1982
1997
|
} else {
|
|
@@ -1990,7 +2005,7 @@ const server = http.createServer(async (req, res) => {
|
|
|
1990
2005
|
const runId = oldRunByIdMatch1[1];
|
|
1991
2006
|
|
|
1992
2007
|
if (req.method === 'GET') {
|
|
1993
|
-
const run =
|
|
2008
|
+
const run = queries.getRun(runId);
|
|
1994
2009
|
if (!run) {
|
|
1995
2010
|
sendJSON(req, res, 404, { error: 'Run not found' });
|
|
1996
2011
|
return;
|
|
@@ -2001,7 +2016,7 @@ const server = http.createServer(async (req, res) => {
|
|
|
2001
2016
|
|
|
2002
2017
|
if (req.method === 'POST') {
|
|
2003
2018
|
const body = await parseBody(req);
|
|
2004
|
-
const run =
|
|
2019
|
+
const run = queries.getRun(runId);
|
|
2005
2020
|
if (!run) {
|
|
2006
2021
|
sendJSON(req, res, 404, { error: 'Run not found' });
|
|
2007
2022
|
return;
|
|
@@ -2019,7 +2034,7 @@ const server = http.createServer(async (req, res) => {
|
|
|
2019
2034
|
|
|
2020
2035
|
if (req.method === 'DELETE') {
|
|
2021
2036
|
try {
|
|
2022
|
-
|
|
2037
|
+
queries.deleteRun(runId);
|
|
2023
2038
|
res.writeHead(204);
|
|
2024
2039
|
res.end();
|
|
2025
2040
|
} catch (err) {
|
|
@@ -2032,17 +2047,22 @@ const server = http.createServer(async (req, res) => {
|
|
|
2032
2047
|
const runWaitMatch = pathOnly.match(/^\/api\/runs\/([^/]+)\/wait$/);
|
|
2033
2048
|
if (runWaitMatch && req.method === 'GET') {
|
|
2034
2049
|
const runId = runWaitMatch[1];
|
|
2035
|
-
const run =
|
|
2050
|
+
const run = queries.getRun(runId);
|
|
2036
2051
|
if (!run) {
|
|
2037
2052
|
sendJSON(req, res, 404, { error: 'Run not found' });
|
|
2038
2053
|
return;
|
|
2039
2054
|
}
|
|
2040
2055
|
const startTime = Date.now();
|
|
2041
2056
|
const pollInterval = setInterval(() => {
|
|
2042
|
-
const currentRun =
|
|
2043
|
-
|
|
2057
|
+
const currentRun = queries.getRun(runId);
|
|
2058
|
+
const elapsed = Date.now() - startTime;
|
|
2059
|
+
const done = currentRun && ['success', 'error', 'cancelled'].includes(currentRun.status);
|
|
2060
|
+
if (done) {
|
|
2044
2061
|
clearInterval(pollInterval);
|
|
2045
|
-
sendJSON(req, res, 200, currentRun
|
|
2062
|
+
sendJSON(req, res, 200, currentRun);
|
|
2063
|
+
} else if (elapsed > 30000) {
|
|
2064
|
+
clearInterval(pollInterval);
|
|
2065
|
+
sendJSON(req, res, 408, { error: 'Run still pending after 30s', run_id: runId, status: currentRun?.status || run.status });
|
|
2046
2066
|
}
|
|
2047
2067
|
}, 500);
|
|
2048
2068
|
req.on('close', () => clearInterval(pollInterval));
|
|
@@ -2052,26 +2072,34 @@ const server = http.createServer(async (req, res) => {
|
|
|
2052
2072
|
const runStreamMatch = pathOnly.match(/^\/api\/runs\/([^/]+)\/stream$/);
|
|
2053
2073
|
if (runStreamMatch && req.method === 'GET') {
|
|
2054
2074
|
const runId = runStreamMatch[1];
|
|
2055
|
-
const run =
|
|
2075
|
+
const run = queries.getRun(runId);
|
|
2056
2076
|
if (!run) {
|
|
2057
2077
|
sendJSON(req, res, 404, { error: 'Run not found' });
|
|
2058
2078
|
return;
|
|
2059
2079
|
}
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
|
|
2063
|
-
|
|
2064
|
-
|
|
2065
|
-
res.write('data: ' + JSON.stringify({ type: 'joined', run_id: runId }) + '\n\n');
|
|
2080
|
+
|
|
2081
|
+
const sseManager = new SSEStreamManager(res, runId);
|
|
2082
|
+
sseManager.start();
|
|
2083
|
+
sseManager.sendProgress({ type: 'joined', run_id: runId });
|
|
2084
|
+
|
|
2066
2085
|
const eventHandler = (eventData) => {
|
|
2067
2086
|
if (eventData.sessionId === runId || eventData.conversationId === run.thread_id) {
|
|
2068
|
-
|
|
2087
|
+
if (eventData.type === 'streaming_progress' && eventData.block) {
|
|
2088
|
+
sseManager.sendProgress(eventData.block);
|
|
2089
|
+
} else if (eventData.type === 'streaming_error') {
|
|
2090
|
+
sseManager.sendError(eventData.error || 'Execution error');
|
|
2091
|
+
} else if (eventData.type === 'streaming_complete') {
|
|
2092
|
+
sseManager.sendComplete({ eventCount: eventData.eventCount }, { timestamp: eventData.timestamp });
|
|
2093
|
+
sseManager.cleanup();
|
|
2094
|
+
}
|
|
2069
2095
|
}
|
|
2070
2096
|
};
|
|
2071
|
-
|
|
2072
|
-
|
|
2073
|
-
|
|
2074
|
-
|
|
2097
|
+
|
|
2098
|
+
sseStreamHandlers.set(runId, eventHandler);
|
|
2099
|
+
req.on('close', () => {
|
|
2100
|
+
sseStreamHandlers.delete(runId);
|
|
2101
|
+
sseManager.cleanup();
|
|
2102
|
+
});
|
|
2075
2103
|
return;
|
|
2076
2104
|
}
|
|
2077
2105
|
|
|
@@ -2079,17 +2107,144 @@ const server = http.createServer(async (req, res) => {
|
|
|
2079
2107
|
if (oldRunCancelMatch1 && req.method === 'POST') {
|
|
2080
2108
|
const runId = oldRunCancelMatch1[1];
|
|
2081
2109
|
try {
|
|
2082
|
-
const run =
|
|
2083
|
-
|
|
2084
|
-
|
|
2085
|
-
|
|
2110
|
+
const run = queries.getRun(runId);
|
|
2111
|
+
if (!run) {
|
|
2112
|
+
sendJSON(req, res, 404, { error: 'Run not found' });
|
|
2113
|
+
return;
|
|
2114
|
+
}
|
|
2115
|
+
|
|
2116
|
+
if (['success', 'error', 'cancelled'].includes(run.status)) {
|
|
2117
|
+
sendJSON(req, res, 409, { error: 'Run already completed or cancelled' });
|
|
2118
|
+
return;
|
|
2119
|
+
}
|
|
2120
|
+
|
|
2121
|
+
const cancelledRun = queries.cancelRun(runId);
|
|
2122
|
+
|
|
2123
|
+
const threadId = run.thread_id;
|
|
2124
|
+
if (threadId) {
|
|
2125
|
+
const execution = activeExecutions.get(threadId);
|
|
2126
|
+
if (execution?.pid) {
|
|
2127
|
+
try {
|
|
2128
|
+
process.kill(-execution.pid, 'SIGTERM');
|
|
2129
|
+
} catch {
|
|
2130
|
+
try {
|
|
2131
|
+
process.kill(execution.pid, 'SIGTERM');
|
|
2132
|
+
} catch (e) {
|
|
2133
|
+
console.error(`[cancel] Failed to SIGTERM PID ${execution.pid}:`, e.message);
|
|
2134
|
+
}
|
|
2135
|
+
}
|
|
2136
|
+
|
|
2137
|
+
setTimeout(() => {
|
|
2138
|
+
try {
|
|
2139
|
+
process.kill(-execution.pid, 'SIGKILL');
|
|
2140
|
+
} catch {
|
|
2141
|
+
try {
|
|
2142
|
+
process.kill(execution.pid, 'SIGKILL');
|
|
2143
|
+
} catch (e) {}
|
|
2144
|
+
}
|
|
2145
|
+
}, 3000);
|
|
2146
|
+
}
|
|
2147
|
+
|
|
2148
|
+
if (execution?.sessionId) {
|
|
2149
|
+
queries.updateSession(execution.sessionId, {
|
|
2150
|
+
status: 'error',
|
|
2151
|
+
error: 'Cancelled by user',
|
|
2152
|
+
completed_at: Date.now()
|
|
2153
|
+
});
|
|
2154
|
+
}
|
|
2155
|
+
|
|
2156
|
+
activeExecutions.delete(threadId);
|
|
2157
|
+
queries.setIsStreaming(threadId, false);
|
|
2158
|
+
|
|
2159
|
+
broadcastSync({
|
|
2160
|
+
type: 'streaming_cancelled',
|
|
2161
|
+
sessionId: execution?.sessionId || runId,
|
|
2162
|
+
conversationId: threadId,
|
|
2163
|
+
runId: runId,
|
|
2164
|
+
timestamp: Date.now()
|
|
2165
|
+
});
|
|
2166
|
+
}
|
|
2167
|
+
|
|
2168
|
+
sendJSON(req, res, 200, cancelledRun);
|
|
2169
|
+
} catch (err) {
|
|
2170
|
+
if (err.message === 'Run not found') {
|
|
2171
|
+
sendJSON(req, res, 404, { error: err.message });
|
|
2172
|
+
} else if (err.message.includes('already completed')) {
|
|
2173
|
+
sendJSON(req, res, 409, { error: err.message });
|
|
2174
|
+
} else {
|
|
2175
|
+
sendJSON(req, res, 500, { error: err.message });
|
|
2176
|
+
}
|
|
2177
|
+
}
|
|
2178
|
+
return;
|
|
2179
|
+
}
|
|
2180
|
+
|
|
2181
|
+
const threadRunCancelMatch = pathOnly.match(/^\/api\/threads\/([^/]+)\/runs\/([^/]+)\/cancel$/);
|
|
2182
|
+
if (threadRunCancelMatch && req.method === 'POST') {
|
|
2183
|
+
const threadId = threadRunCancelMatch[1];
|
|
2184
|
+
const runId = threadRunCancelMatch[2];
|
|
2185
|
+
|
|
2186
|
+
try {
|
|
2187
|
+
const run = queries.getRun(runId);
|
|
2188
|
+
if (!run) {
|
|
2189
|
+
sendJSON(req, res, 404, { error: 'Run not found' });
|
|
2190
|
+
return;
|
|
2191
|
+
}
|
|
2192
|
+
|
|
2193
|
+
if (run.thread_id !== threadId) {
|
|
2194
|
+
sendJSON(req, res, 400, { error: 'Run does not belong to specified thread' });
|
|
2195
|
+
return;
|
|
2196
|
+
}
|
|
2197
|
+
|
|
2198
|
+
if (['success', 'error', 'cancelled'].includes(run.status)) {
|
|
2199
|
+
sendJSON(req, res, 409, { error: 'Run already completed or cancelled' });
|
|
2200
|
+
return;
|
|
2201
|
+
}
|
|
2202
|
+
|
|
2203
|
+
const cancelledRun = queries.cancelRun(runId);
|
|
2204
|
+
|
|
2205
|
+
const execution = activeExecutions.get(threadId);
|
|
2206
|
+
if (execution?.pid) {
|
|
2207
|
+
try {
|
|
2208
|
+
process.kill(-execution.pid, 'SIGTERM');
|
|
2209
|
+
} catch {
|
|
2210
|
+
try {
|
|
2211
|
+
process.kill(execution.pid, 'SIGTERM');
|
|
2212
|
+
} catch (e) {
|
|
2213
|
+
console.error(`[cancel] Failed to SIGTERM PID ${execution.pid}:`, e.message);
|
|
2214
|
+
}
|
|
2215
|
+
}
|
|
2216
|
+
|
|
2086
2217
|
setTimeout(() => {
|
|
2087
|
-
|
|
2088
|
-
|
|
2218
|
+
try {
|
|
2219
|
+
process.kill(-execution.pid, 'SIGKILL');
|
|
2220
|
+
} catch {
|
|
2221
|
+
try {
|
|
2222
|
+
process.kill(execution.pid, 'SIGKILL');
|
|
2223
|
+
} catch (e) {}
|
|
2089
2224
|
}
|
|
2090
|
-
},
|
|
2225
|
+
}, 3000);
|
|
2091
2226
|
}
|
|
2092
|
-
|
|
2227
|
+
|
|
2228
|
+
if (execution?.sessionId) {
|
|
2229
|
+
queries.updateSession(execution.sessionId, {
|
|
2230
|
+
status: 'error',
|
|
2231
|
+
error: 'Cancelled by user',
|
|
2232
|
+
completed_at: Date.now()
|
|
2233
|
+
});
|
|
2234
|
+
}
|
|
2235
|
+
|
|
2236
|
+
activeExecutions.delete(threadId);
|
|
2237
|
+
queries.setIsStreaming(threadId, false);
|
|
2238
|
+
|
|
2239
|
+
broadcastSync({
|
|
2240
|
+
type: 'streaming_cancelled',
|
|
2241
|
+
sessionId: execution?.sessionId || runId,
|
|
2242
|
+
conversationId: threadId,
|
|
2243
|
+
runId: runId,
|
|
2244
|
+
timestamp: Date.now()
|
|
2245
|
+
});
|
|
2246
|
+
|
|
2247
|
+
sendJSON(req, res, 200, cancelledRun);
|
|
2093
2248
|
} catch (err) {
|
|
2094
2249
|
if (err.message === 'Run not found') {
|
|
2095
2250
|
sendJSON(req, res, 404, { error: err.message });
|
|
@@ -2102,6 +2257,34 @@ const server = http.createServer(async (req, res) => {
|
|
|
2102
2257
|
return;
|
|
2103
2258
|
}
|
|
2104
2259
|
|
|
2260
|
+
const threadRunWaitMatch = pathOnly.match(/^\/api\/threads\/([^/]+)\/runs\/([^/]+)\/wait$/);
|
|
2261
|
+
if (threadRunWaitMatch && req.method === 'GET') {
|
|
2262
|
+
const threadId = threadRunWaitMatch[1];
|
|
2263
|
+
const runId = threadRunWaitMatch[2];
|
|
2264
|
+
|
|
2265
|
+
const run = queries.getRun(runId);
|
|
2266
|
+
if (!run) {
|
|
2267
|
+
sendJSON(req, res, 404, { error: 'Run not found' });
|
|
2268
|
+
return;
|
|
2269
|
+
}
|
|
2270
|
+
|
|
2271
|
+
if (run.thread_id !== threadId) {
|
|
2272
|
+
sendJSON(req, res, 400, { error: 'Run does not belong to specified thread' });
|
|
2273
|
+
return;
|
|
2274
|
+
}
|
|
2275
|
+
|
|
2276
|
+
const startTime = Date.now();
|
|
2277
|
+
const pollInterval = setInterval(() => {
|
|
2278
|
+
const currentRun = queries.getRun(runId);
|
|
2279
|
+
if (!currentRun || ['success', 'error', 'cancelled'].includes(currentRun.status) || (Date.now() - startTime) > 30000) {
|
|
2280
|
+
clearInterval(pollInterval);
|
|
2281
|
+
sendJSON(req, res, 200, currentRun || run);
|
|
2282
|
+
}
|
|
2283
|
+
}, 500);
|
|
2284
|
+
req.on('close', () => clearInterval(pollInterval));
|
|
2285
|
+
return;
|
|
2286
|
+
}
|
|
2287
|
+
|
|
2105
2288
|
if (pathOnly === '/api/gemini-oauth/start' && req.method === 'POST') {
|
|
2106
2289
|
try {
|
|
2107
2290
|
const result = await startGeminiOAuth(req);
|
|
@@ -2608,6 +2791,7 @@ const server = http.createServer(async (req, res) => {
|
|
|
2608
2791
|
|
|
2609
2792
|
// POST /threads - Create empty thread
|
|
2610
2793
|
if (pathOnly === '/api/threads' && req.method === 'POST') {
|
|
2794
|
+
console.log('[ACP] POST /api/threads HIT');
|
|
2611
2795
|
try {
|
|
2612
2796
|
const body = await parseBody(req);
|
|
2613
2797
|
const metadata = body.metadata || {};
|
|
@@ -2744,6 +2928,179 @@ const server = http.createServer(async (req, res) => {
|
|
|
2744
2928
|
return;
|
|
2745
2929
|
}
|
|
2746
2930
|
|
|
2931
|
+
// POST /threads/{thread_id}/runs/stream - Create run on thread and stream output
|
|
2932
|
+
const threadRunsStreamMatch = pathOnly.match(/^\/api\/threads\/([a-f0-9-]{36})\/runs\/stream$/);
|
|
2933
|
+
if (threadRunsStreamMatch && req.method === 'POST') {
|
|
2934
|
+
const threadId = threadRunsStreamMatch[1];
|
|
2935
|
+
try {
|
|
2936
|
+
const body = await parseBody(req);
|
|
2937
|
+
const { agent_id, input, config } = body;
|
|
2938
|
+
|
|
2939
|
+
const thread = queries.getThread(threadId);
|
|
2940
|
+
if (!thread) {
|
|
2941
|
+
sendJSON(req, res, 404, { error: 'Thread not found', type: 'not_found' });
|
|
2942
|
+
return;
|
|
2943
|
+
}
|
|
2944
|
+
|
|
2945
|
+
if (thread.status !== 'idle') {
|
|
2946
|
+
sendJSON(req, res, 409, { error: 'Thread has pending runs', type: 'conflict' });
|
|
2947
|
+
return;
|
|
2948
|
+
}
|
|
2949
|
+
|
|
2950
|
+
const agent = discoveredAgents.find(a => a.id === agent_id);
|
|
2951
|
+
if (!agent) {
|
|
2952
|
+
sendJSON(req, res, 404, { error: 'Agent not found', type: 'not_found' });
|
|
2953
|
+
return;
|
|
2954
|
+
}
|
|
2955
|
+
|
|
2956
|
+
const run = queries.createRun(agent_id, threadId, input, config);
|
|
2957
|
+
const sseManager = new SSEStreamManager(res, run.run_id);
|
|
2958
|
+
sseManager.start();
|
|
2959
|
+
sseManager.sendProgress({ type: 'run_created', run_id: run.run_id, thread_id: threadId });
|
|
2960
|
+
|
|
2961
|
+
const eventHandler = (eventData) => {
|
|
2962
|
+
if (eventData.sessionId === run.run_id || eventData.conversationId === threadId) {
|
|
2963
|
+
if (eventData.type === 'streaming_progress' && eventData.block) {
|
|
2964
|
+
sseManager.sendProgress(eventData.block);
|
|
2965
|
+
} else if (eventData.type === 'streaming_error') {
|
|
2966
|
+
sseManager.sendError(eventData.error || 'Execution error');
|
|
2967
|
+
} else if (eventData.type === 'streaming_complete') {
|
|
2968
|
+
sseManager.sendComplete({ eventCount: eventData.eventCount }, { timestamp: eventData.timestamp });
|
|
2969
|
+
sseManager.cleanup();
|
|
2970
|
+
}
|
|
2971
|
+
}
|
|
2972
|
+
};
|
|
2973
|
+
|
|
2974
|
+
sseStreamHandlers.set(run.run_id, eventHandler);
|
|
2975
|
+
req.on('close', () => {
|
|
2976
|
+
sseStreamHandlers.delete(run.run_id);
|
|
2977
|
+
sseManager.cleanup();
|
|
2978
|
+
});
|
|
2979
|
+
|
|
2980
|
+
const conv = queries.getConversation(threadId);
|
|
2981
|
+
if (conv && input?.content) {
|
|
2982
|
+
const session = queries.createSession(threadId);
|
|
2983
|
+
queries.updateRunStatus(run.run_id, 'active');
|
|
2984
|
+
activeExecutions.set(threadId, { pid: null, startTime: Date.now(), sessionId: session.id, lastActivity: Date.now() });
|
|
2985
|
+
activeProcessesByRunId.set(run.run_id, { threadId, sessionId: session.id });
|
|
2986
|
+
queries.setIsStreaming(threadId, true);
|
|
2987
|
+
processMessageWithStreaming(threadId, null, session.id, input.content, agent_id, config?.model || null)
|
|
2988
|
+
.then(() => { queries.updateRunStatus(run.run_id, 'success'); activeProcessesByRunId.delete(run.run_id); })
|
|
2989
|
+
.catch((err) => { queries.updateRunStatus(run.run_id, 'error'); activeProcessesByRunId.delete(run.run_id); sseManager.sendError(err.message); sseManager.cleanup(); });
|
|
2990
|
+
}
|
|
2991
|
+
} catch (err) {
|
|
2992
|
+
sendJSON(req, res, 422, { error: err.message, type: 'validation_error' });
|
|
2993
|
+
}
|
|
2994
|
+
return;
|
|
2995
|
+
}
|
|
2996
|
+
|
|
2997
|
+
// GET /threads/{thread_id}/runs/{run_id}/stream - Stream output from run on thread
|
|
2998
|
+
const threadRunStreamMatch = pathOnly.match(/^\/api\/threads\/([a-f0-9-]{36})\/runs\/([a-f0-9-]{36})\/stream$/);
|
|
2999
|
+
if (threadRunStreamMatch && req.method === 'GET') {
|
|
3000
|
+
const threadId = threadRunStreamMatch[1];
|
|
3001
|
+
const runId = threadRunStreamMatch[2];
|
|
3002
|
+
|
|
3003
|
+
const thread = queries.getThread(threadId);
|
|
3004
|
+
if (!thread) {
|
|
3005
|
+
sendJSON(req, res, 404, { error: 'Thread not found', type: 'not_found' });
|
|
3006
|
+
return;
|
|
3007
|
+
}
|
|
3008
|
+
|
|
3009
|
+
const run = queries.getRun(runId);
|
|
3010
|
+
if (!run || run.thread_id !== threadId) {
|
|
3011
|
+
sendJSON(req, res, 404, { error: 'Run not found on thread', type: 'not_found' });
|
|
3012
|
+
return;
|
|
3013
|
+
}
|
|
3014
|
+
|
|
3015
|
+
const sseManager = new SSEStreamManager(res, runId);
|
|
3016
|
+
sseManager.start();
|
|
3017
|
+
sseManager.sendProgress({ type: 'joined', run_id: runId, thread_id: threadId });
|
|
3018
|
+
|
|
3019
|
+
const eventHandler = (eventData) => {
|
|
3020
|
+
if (eventData.sessionId === runId || eventData.conversationId === threadId) {
|
|
3021
|
+
if (eventData.type === 'streaming_progress' && eventData.block) {
|
|
3022
|
+
sseManager.sendProgress(eventData.block);
|
|
3023
|
+
} else if (eventData.type === 'streaming_error') {
|
|
3024
|
+
sseManager.sendError(eventData.error || 'Execution error');
|
|
3025
|
+
} else if (eventData.type === 'streaming_complete') {
|
|
3026
|
+
sseManager.sendComplete({ eventCount: eventData.eventCount }, { timestamp: eventData.timestamp });
|
|
3027
|
+
sseManager.cleanup();
|
|
3028
|
+
}
|
|
3029
|
+
}
|
|
3030
|
+
};
|
|
3031
|
+
|
|
3032
|
+
sseStreamHandlers.set(runId, eventHandler);
|
|
3033
|
+
req.on('close', () => {
|
|
3034
|
+
sseStreamHandlers.delete(runId);
|
|
3035
|
+
sseManager.cleanup();
|
|
3036
|
+
});
|
|
3037
|
+
return;
|
|
3038
|
+
}
|
|
3039
|
+
|
|
3040
|
+
// POST /threads/{thread_id}/runs/{run_id}/cancel - Cancel a run on a thread
|
|
3041
|
+
const threadRunCancelMatch = pathOnly.match(/^\/api\/threads\/([a-f0-9-]{36})\/runs\/([a-f0-9-]{36})\/cancel$/);
|
|
3042
|
+
if (threadRunCancelMatch && req.method === 'POST') {
|
|
3043
|
+
const threadId = threadRunCancelMatch[1];
|
|
3044
|
+
const runId = threadRunCancelMatch[2];
|
|
3045
|
+
try {
|
|
3046
|
+
const run = queries.getRun(runId);
|
|
3047
|
+
if (!run || run.thread_id !== threadId) {
|
|
3048
|
+
sendJSON(req, res, 404, { error: 'Run not found on thread', type: 'not_found' });
|
|
3049
|
+
return;
|
|
3050
|
+
}
|
|
3051
|
+
if (['success', 'error', 'cancelled'].includes(run.status)) {
|
|
3052
|
+
sendJSON(req, res, 409, { error: 'Run already completed or cancelled', type: 'conflict' });
|
|
3053
|
+
return;
|
|
3054
|
+
}
|
|
3055
|
+
const cancelledRun = queries.cancelRun(runId);
|
|
3056
|
+
const execution = activeExecutions.get(threadId);
|
|
3057
|
+
if (execution?.pid) {
|
|
3058
|
+
try { process.kill(-execution.pid, 'SIGTERM'); } catch { try { process.kill(execution.pid, 'SIGTERM'); } catch (e) {} }
|
|
3059
|
+
setTimeout(() => {
|
|
3060
|
+
try { process.kill(-execution.pid, 'SIGKILL'); } catch { try { process.kill(execution.pid, 'SIGKILL'); } catch (e) {} }
|
|
3061
|
+
}, 3000);
|
|
3062
|
+
}
|
|
3063
|
+
if (execution?.sessionId) {
|
|
3064
|
+
queries.updateSession(execution.sessionId, { status: 'error', error: 'Cancelled by user', completed_at: Date.now() });
|
|
3065
|
+
}
|
|
3066
|
+
activeExecutions.delete(threadId);
|
|
3067
|
+
activeProcessesByRunId.delete(runId);
|
|
3068
|
+
queries.setIsStreaming(threadId, false);
|
|
3069
|
+
broadcastSync({ type: 'run_cancelled', runId, threadId, sessionId: execution?.sessionId, timestamp: Date.now() });
|
|
3070
|
+
sendJSON(req, res, 200, cancelledRun);
|
|
3071
|
+
} catch (err) {
|
|
3072
|
+
sendJSON(req, res, 500, { error: err.message, type: 'internal_error' });
|
|
3073
|
+
}
|
|
3074
|
+
return;
|
|
3075
|
+
}
|
|
3076
|
+
|
|
3077
|
+
// GET /threads/{thread_id}/runs/{run_id}/wait - Long-poll for run completion on thread
|
|
3078
|
+
const threadRunWaitMatch = pathOnly.match(/^\/api\/threads\/([a-f0-9-]{36})\/runs\/([a-f0-9-]{36})\/wait$/);
|
|
3079
|
+
if (threadRunWaitMatch && req.method === 'GET') {
|
|
3080
|
+
const threadId = threadRunWaitMatch[1];
|
|
3081
|
+
const runId = threadRunWaitMatch[2];
|
|
3082
|
+
const run = queries.getRun(runId);
|
|
3083
|
+
if (!run || run.thread_id !== threadId) {
|
|
3084
|
+
sendJSON(req, res, 404, { error: 'Run not found on thread', type: 'not_found' });
|
|
3085
|
+
return;
|
|
3086
|
+
}
|
|
3087
|
+
const startTime = Date.now();
|
|
3088
|
+
const pollInterval = setInterval(() => {
|
|
3089
|
+
const currentRun = queries.getRun(runId);
|
|
3090
|
+
const elapsed = Date.now() - startTime;
|
|
3091
|
+
const done = currentRun && ['success', 'error', 'cancelled'].includes(currentRun.status);
|
|
3092
|
+
if (done) {
|
|
3093
|
+
clearInterval(pollInterval);
|
|
3094
|
+
sendJSON(req, res, 200, currentRun);
|
|
3095
|
+
} else if (elapsed > 30000) {
|
|
3096
|
+
clearInterval(pollInterval);
|
|
3097
|
+
sendJSON(req, res, 408, { error: 'Run still pending after 30s', run_id: runId, status: currentRun?.status || run.status });
|
|
3098
|
+
}
|
|
3099
|
+
}, 500);
|
|
3100
|
+
req.on('close', () => clearInterval(pollInterval));
|
|
3101
|
+
return;
|
|
3102
|
+
}
|
|
3103
|
+
|
|
2747
3104
|
if (routePath.startsWith('/api/image/')) {
|
|
2748
3105
|
const imagePath = routePath.slice('/api/image/'.length);
|
|
2749
3106
|
const decodedPath = decodeURIComponent(imagePath);
|
|
@@ -3318,6 +3675,7 @@ const wss = new WebSocketServer({
|
|
|
3318
3675
|
const hotReloadClients = [];
|
|
3319
3676
|
const syncClients = new Set();
|
|
3320
3677
|
const subscriptionIndex = new Map();
|
|
3678
|
+
const sseStreamHandlers = new Map();
|
|
3321
3679
|
|
|
3322
3680
|
wss.on('connection', (ws, req) => {
|
|
3323
3681
|
// req.url in WebSocket is just the path (e.g., '/gm/sync'), not a full URL
|
|
@@ -3491,25 +3849,33 @@ function sendToClient(ws, data) {
|
|
|
3491
3849
|
}
|
|
3492
3850
|
|
|
3493
3851
|
function broadcastSync(event) {
|
|
3494
|
-
if (syncClients.size === 0) return;
|
|
3495
3852
|
const data = JSON.stringify(event);
|
|
3496
3853
|
const isBroadcast = BROADCAST_TYPES.has(event.type);
|
|
3497
3854
|
|
|
3498
|
-
|
|
3499
|
-
|
|
3500
|
-
|
|
3855
|
+
// Send to WebSocket clients
|
|
3856
|
+
if (syncClients.size > 0) {
|
|
3857
|
+
if (isBroadcast) {
|
|
3858
|
+
for (const ws of syncClients) sendToClient(ws, data);
|
|
3859
|
+
} else {
|
|
3860
|
+
const targets = new Set();
|
|
3861
|
+
if (event.sessionId) {
|
|
3862
|
+
const subs = subscriptionIndex.get(event.sessionId);
|
|
3863
|
+
if (subs) for (const ws of subs) targets.add(ws);
|
|
3864
|
+
}
|
|
3865
|
+
if (event.conversationId) {
|
|
3866
|
+
const subs = subscriptionIndex.get(`conv-${event.conversationId}`);
|
|
3867
|
+
if (subs) for (const ws of subs) targets.add(ws);
|
|
3868
|
+
}
|
|
3869
|
+
for (const ws of targets) sendToClient(ws, data);
|
|
3870
|
+
}
|
|
3501
3871
|
}
|
|
3502
3872
|
|
|
3503
|
-
|
|
3504
|
-
if (
|
|
3505
|
-
const
|
|
3506
|
-
|
|
3507
|
-
|
|
3508
|
-
if (event.conversationId) {
|
|
3509
|
-
const subs = subscriptionIndex.get(`conv-${event.conversationId}`);
|
|
3510
|
-
if (subs) for (const ws of subs) targets.add(ws);
|
|
3873
|
+
// Send to SSE handlers
|
|
3874
|
+
if (sseStreamHandlers.size > 0) {
|
|
3875
|
+
for (const [runId, handler] of sseStreamHandlers.entries()) {
|
|
3876
|
+
handler(event);
|
|
3877
|
+
}
|
|
3511
3878
|
}
|
|
3512
|
-
for (const ws of targets) sendToClient(ws, data);
|
|
3513
3879
|
}
|
|
3514
3880
|
|
|
3515
3881
|
// Heartbeat interval to detect stale connections
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const http = require('http');
|
|
4
|
+
|
|
5
|
+
const BASE_URL = '/gm';
|
|
6
|
+
const PORT = 3000;
|
|
7
|
+
|
|
8
|
+
function makeRequest(method, path, body = null) {
|
|
9
|
+
return new Promise((resolve, reject) => {
|
|
10
|
+
const options = {
|
|
11
|
+
hostname: 'localhost',
|
|
12
|
+
port: PORT,
|
|
13
|
+
path: BASE_URL + path,
|
|
14
|
+
method: method,
|
|
15
|
+
headers: body ? {
|
|
16
|
+
'Content-Type': 'application/json',
|
|
17
|
+
'Content-Length': Buffer.byteLength(JSON.stringify(body))
|
|
18
|
+
} : {}
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const req = http.request(options, (res) => {
|
|
22
|
+
let data = '';
|
|
23
|
+
res.on('data', (chunk) => { data += chunk; });
|
|
24
|
+
res.on('end', () => {
|
|
25
|
+
try {
|
|
26
|
+
resolve({ status: res.statusCode, data: data ? JSON.parse(data) : null, raw: data });
|
|
27
|
+
} catch {
|
|
28
|
+
resolve({ status: res.statusCode, data: null, raw: data });
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
req.on('error', reject);
|
|
34
|
+
|
|
35
|
+
if (body) {
|
|
36
|
+
req.write(JSON.stringify(body));
|
|
37
|
+
}
|
|
38
|
+
req.end();
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
async function runTests() {
|
|
43
|
+
console.log('Testing ACP Agents & Stateless Runs Endpoints\n');
|
|
44
|
+
|
|
45
|
+
const tests = [
|
|
46
|
+
{
|
|
47
|
+
name: 'POST /api/agents/search - empty search',
|
|
48
|
+
test: async () => {
|
|
49
|
+
const res = await makeRequest('POST', '/api/agents/search', {});
|
|
50
|
+
return res.status === 200 && res.data.agents !== undefined;
|
|
51
|
+
}
|
|
52
|
+
},
|
|
53
|
+
{
|
|
54
|
+
name: 'POST /api/agents/search - search by name',
|
|
55
|
+
test: async () => {
|
|
56
|
+
const res = await makeRequest('POST', '/api/agents/search', { name: 'Claude' });
|
|
57
|
+
return res.status === 200 && Array.isArray(res.data.agents);
|
|
58
|
+
}
|
|
59
|
+
},
|
|
60
|
+
{
|
|
61
|
+
name: 'GET /api/agents/claude-code',
|
|
62
|
+
test: async () => {
|
|
63
|
+
const res = await makeRequest('GET', '/api/agents/claude-code');
|
|
64
|
+
return res.status === 200 || res.status === 404;
|
|
65
|
+
}
|
|
66
|
+
},
|
|
67
|
+
{
|
|
68
|
+
name: 'GET /api/agents/claude-code/descriptor',
|
|
69
|
+
test: async () => {
|
|
70
|
+
const res = await makeRequest('GET', '/api/agents/claude-code/descriptor');
|
|
71
|
+
return (res.status === 200 && res.data.metadata && res.data.specs) || res.status === 404;
|
|
72
|
+
}
|
|
73
|
+
},
|
|
74
|
+
{
|
|
75
|
+
name: 'POST /api/runs/search',
|
|
76
|
+
test: async () => {
|
|
77
|
+
const res = await makeRequest('POST', '/api/runs/search', {});
|
|
78
|
+
return res.status === 200 && res.data.runs !== undefined;
|
|
79
|
+
}
|
|
80
|
+
},
|
|
81
|
+
{
|
|
82
|
+
name: 'POST /api/runs - missing agent_id',
|
|
83
|
+
test: async () => {
|
|
84
|
+
const res = await makeRequest('POST', '/api/runs', {});
|
|
85
|
+
return res.status === 422;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
];
|
|
89
|
+
|
|
90
|
+
let passed = 0;
|
|
91
|
+
let failed = 0;
|
|
92
|
+
|
|
93
|
+
for (const t of tests) {
|
|
94
|
+
try {
|
|
95
|
+
const success = await t.test();
|
|
96
|
+
if (success) {
|
|
97
|
+
console.log(`✓ ${t.name}`);
|
|
98
|
+
passed++;
|
|
99
|
+
} else {
|
|
100
|
+
console.log(`✗ ${t.name}`);
|
|
101
|
+
failed++;
|
|
102
|
+
}
|
|
103
|
+
} catch (err) {
|
|
104
|
+
console.log(`✗ ${t.name} - ${err.message}`);
|
|
105
|
+
failed++;
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
console.log(`\nResults: ${passed} passed, ${failed} failed`);
|
|
110
|
+
process.exit(failed > 0 ? 1 : 0);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
http.get(`http://localhost:${PORT}${BASE_URL}/`, (res) => {
|
|
114
|
+
console.log('Server is running\n');
|
|
115
|
+
runTests();
|
|
116
|
+
}).on('error', () => {
|
|
117
|
+
console.log('Server is not running. Please start with: npm run dev');
|
|
118
|
+
process.exit(1);
|
|
119
|
+
});
|
package/test-cancel.mjs
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
// Integration test for run cancellation and control
|
|
2
|
+
import http from 'http';
|
|
3
|
+
import { randomUUID } from 'crypto';
|
|
4
|
+
import Database from 'better-sqlite3';
|
|
5
|
+
import path from 'path';
|
|
6
|
+
import os from 'os';
|
|
7
|
+
import { createACPQueries } from './acp-queries.js';
|
|
8
|
+
|
|
9
|
+
const dbPath = path.join(os.homedir(), '.gmgui', 'data.db');
|
|
10
|
+
const db = new Database(dbPath);
|
|
11
|
+
const prep = (sql) => db.prepare(sql);
|
|
12
|
+
const acpQueries = createACPQueries(db, prep);
|
|
13
|
+
|
|
14
|
+
const BASE_URL = 'http://localhost:3000/gm';
|
|
15
|
+
const testResults = {
|
|
16
|
+
passed: [],
|
|
17
|
+
failed: []
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
function testPass(name) {
|
|
21
|
+
testResults.passed.push(name);
|
|
22
|
+
console.log(`✓ ${name}`);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function testFail(name, error) {
|
|
26
|
+
testResults.failed.push({ name, error });
|
|
27
|
+
console.log(`✗ ${name}: ${error}`);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function makeRequest(method, path, body = null) {
|
|
31
|
+
return new Promise((resolve, reject) => {
|
|
32
|
+
const fullPath = `/gm${path}`;
|
|
33
|
+
const options = {
|
|
34
|
+
method,
|
|
35
|
+
hostname: 'localhost',
|
|
36
|
+
port: 3000,
|
|
37
|
+
path: fullPath,
|
|
38
|
+
headers: {
|
|
39
|
+
'Content-Type': 'application/json'
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
const req = http.request(options, (res) => {
|
|
44
|
+
let data = '';
|
|
45
|
+
res.on('data', chunk => data += chunk);
|
|
46
|
+
res.on('end', () => {
|
|
47
|
+
try {
|
|
48
|
+
const parsed = data ? JSON.parse(data) : null;
|
|
49
|
+
resolve({ status: res.statusCode, data: parsed, headers: res.headers });
|
|
50
|
+
} catch {
|
|
51
|
+
resolve({ status: res.statusCode, data: data, headers: res.headers });
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
});
|
|
55
|
+
|
|
56
|
+
req.on('error', reject);
|
|
57
|
+
if (body) req.write(JSON.stringify(body));
|
|
58
|
+
req.end();
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
async function runTests() {
|
|
63
|
+
console.log('=== RUNNING INTEGRATION TESTS ===\n');
|
|
64
|
+
|
|
65
|
+
try {
|
|
66
|
+
// Test 1: Create a thread
|
|
67
|
+
console.log('[Test 1] Creating thread...');
|
|
68
|
+
const threadResp = await makeRequest('POST', '/api/threads', {});
|
|
69
|
+
if ((threadResp.status === 200 || threadResp.status === 201) && threadResp.data.thread_id) {
|
|
70
|
+
testPass('Thread creation');
|
|
71
|
+
} else {
|
|
72
|
+
testFail('Thread creation', `Status ${threadResp.status}`);
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
const threadId = threadResp.data.thread_id;
|
|
77
|
+
|
|
78
|
+
// Test 2: Create a run (stateless, without thread)
|
|
79
|
+
console.log('[Test 2] Creating stateless run...');
|
|
80
|
+
const runResp = await makeRequest('POST', '/api/runs', {
|
|
81
|
+
agent_id: 'claude-code',
|
|
82
|
+
input: 'test input'
|
|
83
|
+
});
|
|
84
|
+
if (runResp.status === 200 && runResp.data.run_id) {
|
|
85
|
+
testPass('Stateless run creation');
|
|
86
|
+
} else {
|
|
87
|
+
testFail('Stateless run creation', `Status ${runResp.status}`);
|
|
88
|
+
return;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
const runId = runResp.data.run_id;
|
|
92
|
+
|
|
93
|
+
// Test 3: Verify run status is pending
|
|
94
|
+
console.log('[Test 3] Verifying run status...');
|
|
95
|
+
const run = acpQueries.getRun(runId);
|
|
96
|
+
if (run && run.status === 'pending') {
|
|
97
|
+
testPass('Run status is pending');
|
|
98
|
+
} else {
|
|
99
|
+
testFail('Run status is pending', `Status is ${run?.status}`);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// Test 4: Cancel the run using /api/runs/{run_id}/cancel
|
|
103
|
+
console.log('[Test 4] Cancelling run via /api/runs/{run_id}/cancel...');
|
|
104
|
+
const cancelResp = await makeRequest('POST', `/api/runs/${runId}/cancel`);
|
|
105
|
+
if (cancelResp.status === 200 && cancelResp.data.status === 'cancelled') {
|
|
106
|
+
testPass('Run cancellation via /api/runs');
|
|
107
|
+
} else {
|
|
108
|
+
testFail('Run cancellation via /api/runs', `Status ${cancelResp.status}, run status ${cancelResp.data?.status}`);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// Test 5: Verify run status is cancelled in database
|
|
112
|
+
console.log('[Test 5] Verifying cancelled status in DB...');
|
|
113
|
+
const cancelledRun = acpQueries.getRun(runId);
|
|
114
|
+
if (cancelledRun && cancelledRun.status === 'cancelled') {
|
|
115
|
+
testPass('Cancelled status persisted in database');
|
|
116
|
+
} else {
|
|
117
|
+
testFail('Cancelled status persisted in database', `Status is ${cancelledRun?.status}`);
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Test 6: Try to cancel again - should get 409 conflict
|
|
121
|
+
console.log('[Test 6] Testing 409 conflict on re-cancel...');
|
|
122
|
+
const recancel = await makeRequest('POST', `/api/runs/${runId}/cancel`);
|
|
123
|
+
if (recancel.status === 409) {
|
|
124
|
+
testPass('409 conflict on already-cancelled run');
|
|
125
|
+
} else {
|
|
126
|
+
testFail('409 conflict on already-cancelled run', `Got status ${recancel.status}`);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// Test 7: Test wait endpoint with already-completed run
|
|
130
|
+
console.log('[Test 7] Testing wait endpoint with completed run...');
|
|
131
|
+
const waitStart = Date.now();
|
|
132
|
+
const waitResp = await makeRequest('GET', `/api/runs/${runId}/wait`);
|
|
133
|
+
const waitDuration = Date.now() - waitStart;
|
|
134
|
+
if (waitResp.status === 200 && waitDuration < 5000) {
|
|
135
|
+
testPass('Wait endpoint returns immediately for completed run');
|
|
136
|
+
} else {
|
|
137
|
+
testFail('Wait endpoint returns immediately for completed run', `Took ${waitDuration}ms`);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// Test 8: Test cancellation of non-existent run
|
|
141
|
+
console.log('[Test 8] Testing 404 on non-existent run...');
|
|
142
|
+
const fakeRunId = randomUUID();
|
|
143
|
+
const notFound = await makeRequest('POST', `/api/runs/${fakeRunId}/cancel`);
|
|
144
|
+
if (notFound.status === 404) {
|
|
145
|
+
testPass('404 on non-existent run');
|
|
146
|
+
} else {
|
|
147
|
+
testFail('404 on non-existent run', `Got status ${notFound.status}`);
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// Cleanup
|
|
151
|
+
console.log('\n[Cleanup] Deleting test thread...');
|
|
152
|
+
try {
|
|
153
|
+
acpQueries.deleteThread(threadId);
|
|
154
|
+
console.log('Cleanup complete');
|
|
155
|
+
} catch (e) {
|
|
156
|
+
console.log('Cleanup warning:', e.message);
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
} catch (error) {
|
|
160
|
+
console.error('Test suite error:', error);
|
|
161
|
+
testFail('Test suite execution', error.message);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
db.close();
|
|
165
|
+
|
|
166
|
+
// Summary
|
|
167
|
+
console.log('\n=== TEST SUMMARY ===');
|
|
168
|
+
console.log(`Passed: ${testResults.passed.length}`);
|
|
169
|
+
console.log(`Failed: ${testResults.failed.length}`);
|
|
170
|
+
if (testResults.failed.length > 0) {
|
|
171
|
+
console.log('\nFailed tests:');
|
|
172
|
+
testResults.failed.forEach(f => console.log(` - ${f.name}: ${f.error}`));
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
return testResults.passed.length > 0 && testResults.failed.length === 0;
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
// Run the tests
|
|
179
|
+
runTests().then(success => {
|
|
180
|
+
console.log(`\n${success ? '✓ ALL TESTS PASSED' : '✗ SOME TESTS FAILED'}`);
|
|
181
|
+
process.exit(success ? 0 : 1);
|
|
182
|
+
}).catch(err => {
|
|
183
|
+
console.error('Fatal test error:', err);
|
|
184
|
+
process.exit(1);
|
|
185
|
+
});
|