@anastops/mcp-server 0.1.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/formatters.d.ts.map +1 -1
- package/dist/formatters.js +12 -3
- package/dist/formatters.js.map +1 -1
- package/dist/handlers/agent-handlers.d.ts +8 -0
- package/dist/handlers/agent-handlers.d.ts.map +1 -0
- package/dist/handlers/agent-handlers.js +184 -0
- package/dist/handlers/agent-handlers.js.map +1 -0
- package/dist/handlers/artifact-handlers.d.ts +8 -0
- package/dist/handlers/artifact-handlers.d.ts.map +1 -0
- package/dist/handlers/artifact-handlers.js +122 -0
- package/dist/handlers/artifact-handlers.js.map +1 -0
- package/dist/handlers/cost-handlers.d.ts +8 -0
- package/dist/handlers/cost-handlers.d.ts.map +1 -0
- package/dist/handlers/cost-handlers.js +140 -0
- package/dist/handlers/cost-handlers.js.map +1 -0
- package/dist/handlers/handlers.agent.d.ts +10 -0
- package/dist/handlers/handlers.agent.d.ts.map +1 -0
- package/dist/handlers/handlers.agent.js +99 -0
- package/dist/handlers/handlers.agent.js.map +1 -0
- package/dist/handlers/handlers.base.d.ts +83 -0
- package/dist/handlers/handlers.base.d.ts.map +1 -0
- package/dist/handlers/handlers.base.js +351 -0
- package/dist/handlers/handlers.base.js.map +1 -0
- package/dist/handlers/handlers.lock.d.ts +8 -0
- package/dist/handlers/handlers.lock.d.ts.map +1 -0
- package/dist/handlers/handlers.lock.js +111 -0
- package/dist/handlers/handlers.lock.js.map +1 -0
- package/dist/handlers/handlers.memory.d.ts +11 -0
- package/dist/handlers/handlers.memory.d.ts.map +1 -0
- package/dist/handlers/handlers.memory.js +122 -0
- package/dist/handlers/handlers.memory.js.map +1 -0
- package/dist/handlers/handlers.monitoring.d.ts +8 -0
- package/dist/handlers/handlers.monitoring.d.ts.map +1 -0
- package/dist/handlers/handlers.monitoring.js +99 -0
- package/dist/handlers/handlers.monitoring.js.map +1 -0
- package/dist/handlers/handlers.orchestration.d.ts +9 -0
- package/dist/handlers/handlers.orchestration.d.ts.map +1 -0
- package/dist/handlers/handlers.orchestration.js +128 -0
- package/dist/handlers/handlers.orchestration.js.map +1 -0
- package/dist/handlers/handlers.session.d.ts +18 -0
- package/dist/handlers/handlers.session.d.ts.map +1 -0
- package/dist/handlers/handlers.session.js +286 -0
- package/dist/handlers/handlers.session.js.map +1 -0
- package/dist/handlers/handlers.task.d.ts +15 -0
- package/dist/handlers/handlers.task.d.ts.map +1 -0
- package/dist/handlers/handlers.task.js +762 -0
- package/dist/handlers/handlers.task.js.map +1 -0
- package/dist/handlers/handlers.utility.d.ts +10 -0
- package/dist/handlers/handlers.utility.d.ts.map +1 -0
- package/dist/handlers/handlers.utility.js +59 -0
- package/dist/handlers/handlers.utility.js.map +1 -0
- package/dist/handlers/index.d.ts +18 -0
- package/dist/handlers/index.d.ts.map +1 -0
- package/dist/handlers/index.js +209 -0
- package/dist/handlers/index.js.map +1 -0
- package/dist/handlers/lock-handlers.d.ts +8 -0
- package/dist/handlers/lock-handlers.d.ts.map +1 -0
- package/dist/handlers/lock-handlers.js +154 -0
- package/dist/handlers/lock-handlers.js.map +1 -0
- package/dist/handlers/memory-handlers.d.ts +8 -0
- package/dist/handlers/memory-handlers.d.ts.map +1 -0
- package/dist/handlers/memory-handlers.js +76 -0
- package/dist/handlers/memory-handlers.js.map +1 -0
- package/dist/handlers/orchestration-handlers.d.ts +8 -0
- package/dist/handlers/orchestration-handlers.d.ts.map +1 -0
- package/dist/handlers/orchestration-handlers.js +113 -0
- package/dist/handlers/orchestration-handlers.js.map +1 -0
- package/dist/handlers/session-handlers.d.ts +8 -0
- package/dist/handlers/session-handlers.d.ts.map +1 -0
- package/dist/handlers/session-handlers.js +558 -0
- package/dist/handlers/session-handlers.js.map +1 -0
- package/dist/handlers/task-handlers.d.ts +8 -0
- package/dist/handlers/task-handlers.d.ts.map +1 -0
- package/dist/handlers/task-handlers.js +677 -0
- package/dist/handlers/task-handlers.js.map +1 -0
- package/dist/handlers/tool-definitions.d.ts +2626 -0
- package/dist/handlers/tool-definitions.d.ts.map +1 -0
- package/dist/handlers/tool-definitions.js +641 -0
- package/dist/handlers/tool-definitions.js.map +1 -0
- package/dist/handlers/types.d.ts +90 -0
- package/dist/handlers/types.d.ts.map +1 -0
- package/dist/handlers/types.js +5 -0
- package/dist/handlers/types.js.map +1 -0
- package/dist/handlers/utility-handlers.d.ts +8 -0
- package/dist/handlers/utility-handlers.d.ts.map +1 -0
- package/dist/handlers/utility-handlers.js +113 -0
- package/dist/handlers/utility-handlers.js.map +1 -0
- package/dist/handlers/utils.d.ts +30 -0
- package/dist/handlers/utils.d.ts.map +1 -0
- package/dist/handlers/utils.js +95 -0
- package/dist/handlers/utils.js.map +1 -0
- package/dist/handlers.d.ts +17 -2260
- package/dist/handlers.d.ts.map +1 -1
- package/dist/handlers.js +17 -1836
- package/dist/handlers.js.map +1 -1
- package/dist/index.js +41 -7
- package/dist/index.js.map +1 -1
- package/dist/persistence.d.ts +18 -1
- package/dist/persistence.d.ts.map +1 -1
- package/dist/persistence.js +159 -99
- package/dist/persistence.js.map +1 -1
- package/dist/schemas.d.ts +299 -0
- package/dist/schemas.d.ts.map +1 -0
- package/dist/schemas.js +334 -0
- package/dist/schemas.js.map +1 -0
- package/package.json +11 -8
|
@@ -0,0 +1,762 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MCP Tool Handlers - Task Management
|
|
3
|
+
* Handles: task_create, task_queue, task_status, task_complete, task_list, task_execute, task_cancel, task_retry, task_batch_create, task_batch_execute
|
|
4
|
+
*/
|
|
5
|
+
import { AdapterRegistry } from '@anastops/adapters';
|
|
6
|
+
import { SessionManager, IntelligentRouter, addBreadcrumb, captureError, ProviderFailoverService, AllProvidersFailedError, } from '@anastops/core';
|
|
7
|
+
import { nanoid } from 'nanoid';
|
|
8
|
+
import { getPersistence } from '../persistence.js';
|
|
9
|
+
import { safePersist, tasks, getTask } from './handlers.base.js';
|
|
10
|
+
// Shared instances
|
|
11
|
+
const router = new IntelligentRouter();
|
|
12
|
+
const registry = AdapterRegistry.getInstance();
|
|
13
|
+
const sessionManager = new SessionManager();
|
|
14
|
+
const failoverService = new ProviderFailoverService(router);
|
|
15
|
+
/**
|
|
16
|
+
* Process the task queue for a session.
|
|
17
|
+
* Starts queued tasks up to the concurrency limit if auto_execute is enabled.
|
|
18
|
+
* Returns the number of tasks started.
|
|
19
|
+
*/
|
|
20
|
+
function processTaskQueue(sessionId) {
|
|
21
|
+
// Check if session exists and has auto_execute enabled
|
|
22
|
+
if (!sessionManager.exists(sessionId)) {
|
|
23
|
+
return Promise.resolve(0);
|
|
24
|
+
}
|
|
25
|
+
const session = sessionManager.getSession(sessionId);
|
|
26
|
+
if (session.queue_config.auto_execute !== true) {
|
|
27
|
+
return Promise.resolve(0);
|
|
28
|
+
}
|
|
29
|
+
// Get all tasks for the session
|
|
30
|
+
const sessionTasks = [];
|
|
31
|
+
for (const [, task] of tasks.entries()) {
|
|
32
|
+
if (task.session_id === sessionId) {
|
|
33
|
+
sessionTasks.push(task);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
// Count running tasks
|
|
37
|
+
const runningCount = sessionTasks.filter((t) => t.status === 'running').length;
|
|
38
|
+
const availableSlots = Math.max(0, session.queue_config.concurrency - runningCount);
|
|
39
|
+
if (availableSlots === 0) {
|
|
40
|
+
return Promise.resolve(0);
|
|
41
|
+
}
|
|
42
|
+
// Get queued tasks that have met dependencies
|
|
43
|
+
const taskMap = new Map(sessionTasks.map((t) => [t.id, t]));
|
|
44
|
+
const queuedTasks = sessionTasks
|
|
45
|
+
.filter((t) => t.status === 'queued')
|
|
46
|
+
.filter((task) => {
|
|
47
|
+
// Check if all dependencies are completed
|
|
48
|
+
if (task.dependencies.length === 0)
|
|
49
|
+
return true;
|
|
50
|
+
return task.dependencies.every((depId) => {
|
|
51
|
+
const depTask = taskMap.get(depId);
|
|
52
|
+
return depTask !== undefined && depTask.status === 'completed';
|
|
53
|
+
});
|
|
54
|
+
})
|
|
55
|
+
.sort((a, b) => {
|
|
56
|
+
// Sort by priority (higher first), then by created_at (earlier first)
|
|
57
|
+
if (a.priority !== b.priority)
|
|
58
|
+
return b.priority - a.priority;
|
|
59
|
+
return a.created_at.getTime() - b.created_at.getTime();
|
|
60
|
+
})
|
|
61
|
+
.slice(0, availableSlots);
|
|
62
|
+
// Start tasks (fire and forget)
|
|
63
|
+
let startedCount = 0;
|
|
64
|
+
for (const task of queuedTasks) {
|
|
65
|
+
// Execute task in background - don't await
|
|
66
|
+
void executeTaskInBackground(task.id);
|
|
67
|
+
startedCount++;
|
|
68
|
+
}
|
|
69
|
+
return Promise.resolve(startedCount);
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Execute a task in the background.
|
|
73
|
+
* Used by queue processing to start tasks without blocking.
|
|
74
|
+
*/
|
|
75
|
+
async function executeTaskInBackground(taskId) {
|
|
76
|
+
try {
|
|
77
|
+
await handleTaskExecuteInternal(taskId);
|
|
78
|
+
}
|
|
79
|
+
catch {
|
|
80
|
+
// Error is already handled and persisted in handleTaskExecuteInternal
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Internal task execution logic.
|
|
85
|
+
* Shared between handleTaskExecute and background execution.
|
|
86
|
+
*/
|
|
87
|
+
async function handleTaskExecuteInternal(taskId) {
|
|
88
|
+
const task = await getTask(taskId);
|
|
89
|
+
if (task === null) {
|
|
90
|
+
throw new Error(`Task not found: ${taskId}`);
|
|
91
|
+
}
|
|
92
|
+
if (task.status !== 'pending' && task.status !== 'queued') {
|
|
93
|
+
return {
|
|
94
|
+
task_id: taskId,
|
|
95
|
+
status: task.status,
|
|
96
|
+
error: `Task cannot be executed - current status is '${task.status}'`,
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
addBreadcrumb('Executing task', { task_id: taskId, provider: task.provider }, 'info', 'task');
|
|
100
|
+
// Update task status to running
|
|
101
|
+
task.status = 'running';
|
|
102
|
+
task.started_at = new Date();
|
|
103
|
+
tasks.set(taskId, task);
|
|
104
|
+
safePersist(getPersistence().saveTask(task));
|
|
105
|
+
try {
|
|
106
|
+
const adapter = registry.get(task.provider);
|
|
107
|
+
if (adapter === undefined) {
|
|
108
|
+
throw new Error(`Provider adapter not found: ${task.provider}`);
|
|
109
|
+
}
|
|
110
|
+
const workingDir = process.env['ANASTOPS_WORKSPACE'] ?? process.cwd();
|
|
111
|
+
const taskInput = task.input;
|
|
112
|
+
const executeRequest = {
|
|
113
|
+
model: task.model,
|
|
114
|
+
prompt: taskInput?.prompt ?? task.description,
|
|
115
|
+
working_dir: workingDir,
|
|
116
|
+
};
|
|
117
|
+
if (taskInput?.agent !== undefined && taskInput.agent !== '') {
|
|
118
|
+
executeRequest.agent = taskInput.agent;
|
|
119
|
+
}
|
|
120
|
+
if (taskInput?.skills !== undefined && taskInput.skills.length > 0) {
|
|
121
|
+
executeRequest.skills = taskInput.skills;
|
|
122
|
+
}
|
|
123
|
+
// Execute with streaming logs support
|
|
124
|
+
const executeOptions = { workingDir };
|
|
125
|
+
// Add streaming logs callback
|
|
126
|
+
executeOptions.onProgress = (output) => {
|
|
127
|
+
// Update task logs in memory and persist
|
|
128
|
+
task.logs = output;
|
|
129
|
+
tasks.set(taskId, task);
|
|
130
|
+
safePersist(getPersistence().saveTask(task));
|
|
131
|
+
};
|
|
132
|
+
executeOptions.progressInterval = 1000; // Update logs every 1 second
|
|
133
|
+
const response = await adapter.execute(executeRequest, executeOptions);
|
|
134
|
+
task.status = 'completed';
|
|
135
|
+
task.completed_at = new Date();
|
|
136
|
+
task.token_usage = response.usage ?? task.token_usage;
|
|
137
|
+
task.output = {
|
|
138
|
+
content: response.content,
|
|
139
|
+
artifacts: [],
|
|
140
|
+
files_modified: [],
|
|
141
|
+
metadata: { usage: response.usage },
|
|
142
|
+
};
|
|
143
|
+
tasks.set(taskId, task);
|
|
144
|
+
safePersist(getPersistence().saveTask(task));
|
|
145
|
+
addBreadcrumb('Task completed', {
|
|
146
|
+
task_id: taskId,
|
|
147
|
+
tokens_used: response.usage?.total_tokens ?? 0,
|
|
148
|
+
}, 'info', 'task');
|
|
149
|
+
if (sessionManager.exists(task.session_id)) {
|
|
150
|
+
const session = sessionManager.getSession(task.session_id);
|
|
151
|
+
session.metadata = session.metadata ?? {
|
|
152
|
+
total_tokens: 0,
|
|
153
|
+
total_cost: 0,
|
|
154
|
+
agents_used: [],
|
|
155
|
+
files_affected: [],
|
|
156
|
+
tasks_completed: 0,
|
|
157
|
+
tasks_failed: 0,
|
|
158
|
+
};
|
|
159
|
+
session.metadata.tasks_completed = (session.metadata.tasks_completed ?? 0) + 1;
|
|
160
|
+
session.metadata.total_tokens =
|
|
161
|
+
(session.metadata.total_tokens ?? 0) + (response.usage?.total_tokens ?? 0);
|
|
162
|
+
session.metadata.total_cost =
|
|
163
|
+
(session.metadata.total_cost ?? 0) + (response.usage?.cost ?? 0);
|
|
164
|
+
session.updated_at = new Date();
|
|
165
|
+
safePersist(getPersistence().saveSession(session));
|
|
166
|
+
}
|
|
167
|
+
// Process queue to start next tasks
|
|
168
|
+
void processTaskQueue(task.session_id);
|
|
169
|
+
return {
|
|
170
|
+
task_id: taskId,
|
|
171
|
+
status: 'completed',
|
|
172
|
+
result: response,
|
|
173
|
+
};
|
|
174
|
+
}
|
|
175
|
+
catch (error) {
|
|
176
|
+
task.status = 'failed';
|
|
177
|
+
task.completed_at = new Date();
|
|
178
|
+
task.error = error instanceof Error ? error.message : String(error);
|
|
179
|
+
tasks.set(taskId, task);
|
|
180
|
+
safePersist(getPersistence().saveTask(task));
|
|
181
|
+
addBreadcrumb('Task failed', {
|
|
182
|
+
task_id: taskId,
|
|
183
|
+
error: task.error,
|
|
184
|
+
}, 'error', 'task');
|
|
185
|
+
captureError(error, {
|
|
186
|
+
task_id: taskId,
|
|
187
|
+
provider: task.provider,
|
|
188
|
+
type: task.type,
|
|
189
|
+
});
|
|
190
|
+
if (sessionManager.exists(task.session_id)) {
|
|
191
|
+
const session = sessionManager.getSession(task.session_id);
|
|
192
|
+
session.metadata = session.metadata ?? {
|
|
193
|
+
total_tokens: 0,
|
|
194
|
+
total_cost: 0,
|
|
195
|
+
agents_used: [],
|
|
196
|
+
files_affected: [],
|
|
197
|
+
tasks_completed: 0,
|
|
198
|
+
tasks_failed: 0,
|
|
199
|
+
};
|
|
200
|
+
session.metadata.tasks_failed = (session.metadata.tasks_failed ?? 0) + 1;
|
|
201
|
+
session.updated_at = new Date();
|
|
202
|
+
safePersist(getPersistence().saveSession(session));
|
|
203
|
+
}
|
|
204
|
+
// Process queue to start next tasks even after failure
|
|
205
|
+
void processTaskQueue(task.session_id);
|
|
206
|
+
throw error;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
export function handleTaskCreate(args) {
|
|
210
|
+
const taskId = nanoid(21);
|
|
211
|
+
const now = new Date();
|
|
212
|
+
const taskType = args['type'] ?? 'other';
|
|
213
|
+
const sessionId = args['session_id'];
|
|
214
|
+
const description = args['description'];
|
|
215
|
+
addBreadcrumb('Creating task', { session_id: sessionId, type: taskType, description }, 'info', 'task');
|
|
216
|
+
// Store agent and skills in task input for execution
|
|
217
|
+
const taskInput = {
|
|
218
|
+
prompt: args['prompt'],
|
|
219
|
+
context_files: args['context_files'] ?? [],
|
|
220
|
+
};
|
|
221
|
+
if (args['agent'] !== undefined) {
|
|
222
|
+
taskInput.agent = args['agent'];
|
|
223
|
+
}
|
|
224
|
+
if (args['skills'] !== undefined) {
|
|
225
|
+
taskInput.skills = args['skills'];
|
|
226
|
+
}
|
|
227
|
+
const task = {
|
|
228
|
+
id: taskId,
|
|
229
|
+
session_id: sessionId,
|
|
230
|
+
agent_id: null,
|
|
231
|
+
type: taskType,
|
|
232
|
+
status: 'pending',
|
|
233
|
+
description,
|
|
234
|
+
input: taskInput,
|
|
235
|
+
output: null,
|
|
236
|
+
error: null,
|
|
237
|
+
complexity_score: 0,
|
|
238
|
+
routing_tier: 3,
|
|
239
|
+
provider: 'claude',
|
|
240
|
+
model: 'claude-sonnet',
|
|
241
|
+
token_usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0, cost: 0 },
|
|
242
|
+
created_at: now,
|
|
243
|
+
started_at: null,
|
|
244
|
+
completed_at: null,
|
|
245
|
+
dependencies: [],
|
|
246
|
+
priority: 5,
|
|
247
|
+
retry_count: 0,
|
|
248
|
+
max_retries: 3,
|
|
249
|
+
failover_history: [],
|
|
250
|
+
failover_enabled: args['disable_failover'] !== true,
|
|
251
|
+
};
|
|
252
|
+
// Route the task with optional overrides
|
|
253
|
+
const routingOverride = {};
|
|
254
|
+
if (args['force_provider'] !== undefined) {
|
|
255
|
+
routingOverride.force_provider = args['force_provider'];
|
|
256
|
+
}
|
|
257
|
+
if (args['force_tier'] !== undefined) {
|
|
258
|
+
routingOverride.force_tier = args['force_tier'];
|
|
259
|
+
}
|
|
260
|
+
const routing = router.route(task, routingOverride);
|
|
261
|
+
task.routing_tier = routing.tier;
|
|
262
|
+
task.provider = routing.provider;
|
|
263
|
+
task.model = routing.model;
|
|
264
|
+
task.complexity_score = routing.complexity_score;
|
|
265
|
+
addBreadcrumb('Task routed', { task_id: taskId, provider: routing.provider, tier: routing.tier }, 'info', 'task');
|
|
266
|
+
tasks.set(taskId, task);
|
|
267
|
+
// Persist to MongoDB
|
|
268
|
+
safePersist(getPersistence().saveTask(task));
|
|
269
|
+
const response = {
|
|
270
|
+
task_id: task.id,
|
|
271
|
+
type: task.type,
|
|
272
|
+
routing: { tier: routing.tier, provider: routing.provider, model: routing.model },
|
|
273
|
+
};
|
|
274
|
+
if (taskInput.agent !== undefined && taskInput.agent !== '') {
|
|
275
|
+
response.agent = taskInput.agent;
|
|
276
|
+
}
|
|
277
|
+
return Promise.resolve(response);
|
|
278
|
+
}
|
|
279
|
+
export async function handleTaskQueue(args) {
|
|
280
|
+
const task = await getTask(args['task_id']);
|
|
281
|
+
if (task === null)
|
|
282
|
+
throw new Error('Task not found');
|
|
283
|
+
task.status = 'queued';
|
|
284
|
+
tasks.set(task.id, task);
|
|
285
|
+
safePersist(getPersistence().saveTask(task));
|
|
286
|
+
// Trigger queue processing to potentially start this task
|
|
287
|
+
const tasksStarted = await processTaskQueue(task.session_id);
|
|
288
|
+
return {
|
|
289
|
+
task_id: task.id,
|
|
290
|
+
status: task.status,
|
|
291
|
+
queue_processed: tasksStarted > 0,
|
|
292
|
+
tasks_started: tasksStarted,
|
|
293
|
+
};
|
|
294
|
+
}
|
|
295
|
+
export async function handleTaskStatus(args) {
|
|
296
|
+
const task = await getTask(args['task_id']);
|
|
297
|
+
if (task === null)
|
|
298
|
+
throw new Error('Task not found');
|
|
299
|
+
return task;
|
|
300
|
+
}
|
|
301
|
+
export async function handleTaskComplete(args) {
|
|
302
|
+
const task = await getTask(args['task_id']);
|
|
303
|
+
if (task === null)
|
|
304
|
+
throw new Error('Task not found');
|
|
305
|
+
task.status = 'completed';
|
|
306
|
+
task.completed_at = new Date();
|
|
307
|
+
task.output = {
|
|
308
|
+
content: args['content'],
|
|
309
|
+
artifacts: args['artifacts'] ?? [],
|
|
310
|
+
files_modified: [],
|
|
311
|
+
metadata: {},
|
|
312
|
+
};
|
|
313
|
+
tasks.set(task.id, task);
|
|
314
|
+
// Persist to MongoDB
|
|
315
|
+
safePersist(getPersistence().saveTask(task));
|
|
316
|
+
return { task_id: task.id, status: task.status };
|
|
317
|
+
}
|
|
318
|
+
export async function handleTaskList(args) {
|
|
319
|
+
// Get from in-memory cache first
|
|
320
|
+
const inMemoryTasks = [];
|
|
321
|
+
for (const [, task] of tasks.entries()) {
|
|
322
|
+
if (args['session_id'] === undefined || task.session_id === args['session_id']) {
|
|
323
|
+
inMemoryTasks.push(task);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
// Also get from MongoDB for persistence
|
|
327
|
+
const persistedTasks = await getPersistence().listTasks({
|
|
328
|
+
...(args['session_id'] !== undefined && { session_id: args['session_id'] }),
|
|
329
|
+
});
|
|
330
|
+
// Merge: in-memory takes precedence
|
|
331
|
+
const inMemoryIds = new Set(inMemoryTasks.map((t) => t.id));
|
|
332
|
+
const sessionTasks = [...inMemoryTasks, ...persistedTasks.filter((t) => !inMemoryIds.has(t.id))];
|
|
333
|
+
return {
|
|
334
|
+
count: sessionTasks.length,
|
|
335
|
+
tasks: sessionTasks.map((t) => ({
|
|
336
|
+
id: t.id,
|
|
337
|
+
type: t.type,
|
|
338
|
+
status: t.status,
|
|
339
|
+
description: t.description.slice(0, 50),
|
|
340
|
+
})),
|
|
341
|
+
};
|
|
342
|
+
}
|
|
343
|
+
export async function handleTaskExecute(args) {
|
|
344
|
+
const taskId = args['task_id'];
|
|
345
|
+
const wait = args['wait'] ?? true;
|
|
346
|
+
const task = await getTask(taskId);
|
|
347
|
+
if (task === null) {
|
|
348
|
+
throw new Error(`Task not found: ${taskId}`);
|
|
349
|
+
}
|
|
350
|
+
if (task.status !== 'pending' && task.status !== 'queued') {
|
|
351
|
+
return {
|
|
352
|
+
task_id: taskId,
|
|
353
|
+
status: task.status,
|
|
354
|
+
error: `Task cannot be executed - current status is '${task.status}'`,
|
|
355
|
+
};
|
|
356
|
+
}
|
|
357
|
+
addBreadcrumb('Executing task', { task_id: taskId, provider: task.provider, failover_enabled: task.failover_enabled }, 'info', 'task');
|
|
358
|
+
// Update task status to running
|
|
359
|
+
task.status = 'running';
|
|
360
|
+
task.started_at = new Date();
|
|
361
|
+
tasks.set(taskId, task);
|
|
362
|
+
// Persist running status
|
|
363
|
+
safePersist(getPersistence().saveTask(task));
|
|
364
|
+
try {
|
|
365
|
+
// Use workspace root from environment or cwd
|
|
366
|
+
const workingDir = process.env['ANASTOPS_WORKSPACE'] ?? process.cwd();
|
|
367
|
+
// Build request with optional agent and skills
|
|
368
|
+
const taskInput = task.input;
|
|
369
|
+
const executeRequest = {
|
|
370
|
+
model: task.model,
|
|
371
|
+
prompt: taskInput?.prompt ?? task.description,
|
|
372
|
+
working_dir: workingDir,
|
|
373
|
+
};
|
|
374
|
+
// Get adapter for the task's provider
|
|
375
|
+
const adapter = registry.get(task.provider);
|
|
376
|
+
if (adapter === undefined) {
|
|
377
|
+
throw new Error(`Provider adapter not found: ${task.provider}`);
|
|
378
|
+
}
|
|
379
|
+
// Execute with streaming logs support (direct execution, no failover)
|
|
380
|
+
const executeOptions = {
|
|
381
|
+
workingDir,
|
|
382
|
+
onProgress: (output) => {
|
|
383
|
+
// Update task logs in memory and persist
|
|
384
|
+
task.logs = output;
|
|
385
|
+
tasks.set(taskId, task);
|
|
386
|
+
safePersist(getPersistence().saveTask(task));
|
|
387
|
+
},
|
|
388
|
+
progressInterval: 1000, // Update logs every 1 second
|
|
389
|
+
};
|
|
390
|
+
// Type assertion needed due to pnpm workspace linking not picking up updated types
|
|
391
|
+
const response = await adapter.execute(executeRequest, executeOptions);
|
|
392
|
+
// Update task with result
|
|
393
|
+
task.status = 'completed';
|
|
394
|
+
task.completed_at = new Date();
|
|
395
|
+
task.token_usage = response.usage ?? task.token_usage;
|
|
396
|
+
task.output = {
|
|
397
|
+
content: response.content,
|
|
398
|
+
artifacts: [],
|
|
399
|
+
files_modified: [],
|
|
400
|
+
metadata: {
|
|
401
|
+
usage: response.usage,
|
|
402
|
+
},
|
|
403
|
+
};
|
|
404
|
+
tasks.set(taskId, task);
|
|
405
|
+
// Persist completed task
|
|
406
|
+
safePersist(getPersistence().saveTask(task));
|
|
407
|
+
addBreadcrumb('Task completed', {
|
|
408
|
+
task_id: taskId,
|
|
409
|
+
tokens_used: response.usage?.total_tokens ?? 0,
|
|
410
|
+
}, 'info', 'task');
|
|
411
|
+
// Update session metadata via sessionManager
|
|
412
|
+
if (sessionManager.exists(task.session_id)) {
|
|
413
|
+
const session = sessionManager.getSession(task.session_id);
|
|
414
|
+
session.metadata = session.metadata ?? {
|
|
415
|
+
total_tokens: 0,
|
|
416
|
+
total_cost: 0,
|
|
417
|
+
agents_used: [],
|
|
418
|
+
files_affected: [],
|
|
419
|
+
tasks_completed: 0,
|
|
420
|
+
tasks_failed: 0,
|
|
421
|
+
};
|
|
422
|
+
session.metadata.tasks_completed = (session.metadata.tasks_completed ?? 0) + 1;
|
|
423
|
+
session.metadata.total_tokens =
|
|
424
|
+
(session.metadata.total_tokens ?? 0) + (response.usage?.total_tokens ?? 0);
|
|
425
|
+
session.metadata.total_cost =
|
|
426
|
+
(session.metadata.total_cost ?? 0) + (response.usage?.cost ?? 0);
|
|
427
|
+
session.updated_at = new Date();
|
|
428
|
+
safePersist(getPersistence().saveSession(session));
|
|
429
|
+
}
|
|
430
|
+
// Process queue to start next tasks after completion
|
|
431
|
+
void processTaskQueue(task.session_id);
|
|
432
|
+
return {
|
|
433
|
+
task_id: taskId,
|
|
434
|
+
status: 'completed',
|
|
435
|
+
result: response,
|
|
436
|
+
waited: wait,
|
|
437
|
+
};
|
|
438
|
+
}
|
|
439
|
+
catch (error) {
|
|
440
|
+
const failoverHistory = [];
|
|
441
|
+
task.status = 'failed';
|
|
442
|
+
task.completed_at = new Date();
|
|
443
|
+
task.error = error instanceof Error ? error.message : String(error);
|
|
444
|
+
task.failover_history = failoverHistory;
|
|
445
|
+
tasks.set(taskId, task);
|
|
446
|
+
// Persist failed task
|
|
447
|
+
safePersist(getPersistence().saveTask(task));
|
|
448
|
+
addBreadcrumb('Task failed', {
|
|
449
|
+
task_id: taskId,
|
|
450
|
+
error: task.error,
|
|
451
|
+
failover_attempts: failoverHistory.length,
|
|
452
|
+
}, 'error', 'task');
|
|
453
|
+
// Capture task execution error
|
|
454
|
+
captureError(error, {
|
|
455
|
+
task_id: taskId,
|
|
456
|
+
provider: task.provider,
|
|
457
|
+
type: task.type,
|
|
458
|
+
failover_attempts: failoverHistory.length,
|
|
459
|
+
});
|
|
460
|
+
// Update session metadata for failure via sessionManager
|
|
461
|
+
if (sessionManager.exists(task.session_id)) {
|
|
462
|
+
const session = sessionManager.getSession(task.session_id);
|
|
463
|
+
session.metadata = session.metadata ?? {
|
|
464
|
+
total_tokens: 0,
|
|
465
|
+
total_cost: 0,
|
|
466
|
+
agents_used: [],
|
|
467
|
+
files_affected: [],
|
|
468
|
+
tasks_completed: 0,
|
|
469
|
+
tasks_failed: 0,
|
|
470
|
+
};
|
|
471
|
+
session.metadata.tasks_failed = (session.metadata.tasks_failed ?? 0) + 1;
|
|
472
|
+
session.updated_at = new Date();
|
|
473
|
+
safePersist(getPersistence().saveSession(session));
|
|
474
|
+
}
|
|
475
|
+
// Process queue to start next tasks even after failure
|
|
476
|
+
void processTaskQueue(task.session_id);
|
|
477
|
+
throw error;
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
export async function handleTaskCancel(args) {
|
|
481
|
+
const taskId = args['task_id'];
|
|
482
|
+
const task = await getTask(taskId);
|
|
483
|
+
if (task === null) {
|
|
484
|
+
throw new Error(`Task not found: ${taskId}`);
|
|
485
|
+
}
|
|
486
|
+
// Check if task is cancelable (pending, queued, or running)
|
|
487
|
+
const cancelableStatuses = ['pending', 'queued', 'running'];
|
|
488
|
+
if (!cancelableStatuses.includes(task.status)) {
|
|
489
|
+
return {
|
|
490
|
+
task_id: taskId,
|
|
491
|
+
status: task.status,
|
|
492
|
+
cancelled: false,
|
|
493
|
+
error: `Task cannot be cancelled - current status is '${task.status}'`,
|
|
494
|
+
};
|
|
495
|
+
}
|
|
496
|
+
// Cancel the task
|
|
497
|
+
task.status = 'cancelled';
|
|
498
|
+
task.completed_at = new Date();
|
|
499
|
+
tasks.set(taskId, task);
|
|
500
|
+
// Persist to MongoDB
|
|
501
|
+
safePersist(getPersistence().saveTask(task));
|
|
502
|
+
return {
|
|
503
|
+
task_id: taskId,
|
|
504
|
+
status: task.status,
|
|
505
|
+
cancelled: true,
|
|
506
|
+
};
|
|
507
|
+
}
|
|
508
|
+
export async function handleTaskRetry(args) {
|
|
509
|
+
const taskId = args['task_id'];
|
|
510
|
+
const task = await getTask(taskId);
|
|
511
|
+
if (task === null) {
|
|
512
|
+
throw new Error(`Task not found: ${taskId}`);
|
|
513
|
+
}
|
|
514
|
+
// Check if task is in failed status
|
|
515
|
+
if (task.status !== 'failed') {
|
|
516
|
+
return {
|
|
517
|
+
task_id: taskId,
|
|
518
|
+
status: task.status,
|
|
519
|
+
retried: false,
|
|
520
|
+
error: `Task cannot be retried - current status is '${task.status}' (must be 'failed')`,
|
|
521
|
+
};
|
|
522
|
+
}
|
|
523
|
+
// Check if max retries exceeded
|
|
524
|
+
if (task.retry_count >= task.max_retries) {
|
|
525
|
+
return {
|
|
526
|
+
task_id: taskId,
|
|
527
|
+
status: task.status,
|
|
528
|
+
retried: false,
|
|
529
|
+
error: `Task has exceeded max retries (${task.max_retries})`,
|
|
530
|
+
retry_count: task.retry_count,
|
|
531
|
+
};
|
|
532
|
+
}
|
|
533
|
+
// Retry the task
|
|
534
|
+
task.status = 'pending';
|
|
535
|
+
task.retry_count += 1;
|
|
536
|
+
task.error = null;
|
|
537
|
+
task.started_at = null;
|
|
538
|
+
task.completed_at = null;
|
|
539
|
+
tasks.set(taskId, task);
|
|
540
|
+
// Persist to MongoDB
|
|
541
|
+
safePersist(getPersistence().saveTask(task));
|
|
542
|
+
return {
|
|
543
|
+
task_id: taskId,
|
|
544
|
+
status: task.status,
|
|
545
|
+
retried: true,
|
|
546
|
+
retry_count: task.retry_count,
|
|
547
|
+
max_retries: task.max_retries,
|
|
548
|
+
};
|
|
549
|
+
}
|
|
550
|
+
export function handleTaskBatchCreate(args) {
|
|
551
|
+
const sessionId = args['session_id'];
|
|
552
|
+
const taskDefs = args['tasks'];
|
|
553
|
+
const createdTasks = [];
|
|
554
|
+
const now = new Date();
|
|
555
|
+
for (const def of taskDefs) {
|
|
556
|
+
const taskId = nanoid(21);
|
|
557
|
+
const taskType = def.type ?? 'other';
|
|
558
|
+
const taskInput = {
|
|
559
|
+
prompt: def.prompt,
|
|
560
|
+
context_files: def.context_files ?? [],
|
|
561
|
+
};
|
|
562
|
+
if (def.agent !== undefined) {
|
|
563
|
+
taskInput.agent = def.agent;
|
|
564
|
+
}
|
|
565
|
+
if (def.skills !== undefined) {
|
|
566
|
+
taskInput.skills = def.skills;
|
|
567
|
+
}
|
|
568
|
+
const provider = def.force_provider ?? 'claude';
|
|
569
|
+
const model = 'claude-sonnet';
|
|
570
|
+
const tier = def.force_tier ?? 3;
|
|
571
|
+
const task = {
|
|
572
|
+
id: taskId,
|
|
573
|
+
session_id: sessionId,
|
|
574
|
+
agent_id: null,
|
|
575
|
+
type: taskType,
|
|
576
|
+
status: 'pending',
|
|
577
|
+
description: def.description,
|
|
578
|
+
input: taskInput,
|
|
579
|
+
output: null,
|
|
580
|
+
error: null,
|
|
581
|
+
complexity_score: 50,
|
|
582
|
+
routing_tier: tier,
|
|
583
|
+
provider: provider,
|
|
584
|
+
model: model,
|
|
585
|
+
token_usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0, cost: 0 },
|
|
586
|
+
created_at: now,
|
|
587
|
+
started_at: null,
|
|
588
|
+
completed_at: null,
|
|
589
|
+
dependencies: [],
|
|
590
|
+
priority: 5,
|
|
591
|
+
retry_count: 0,
|
|
592
|
+
max_retries: 3,
|
|
593
|
+
failover_history: [],
|
|
594
|
+
failover_enabled: true, // Failover enabled by default for batch tasks
|
|
595
|
+
};
|
|
596
|
+
tasks.set(taskId, task);
|
|
597
|
+
safePersist(getPersistence().saveTask(task));
|
|
598
|
+
const createdTask = {
|
|
599
|
+
task_id: taskId,
|
|
600
|
+
type: task.type,
|
|
601
|
+
routing: { tier, provider, model },
|
|
602
|
+
};
|
|
603
|
+
if (taskInput.agent !== undefined) {
|
|
604
|
+
createdTask.agent = taskInput.agent;
|
|
605
|
+
}
|
|
606
|
+
createdTasks.push(createdTask);
|
|
607
|
+
}
|
|
608
|
+
return Promise.resolve({
|
|
609
|
+
created: createdTasks.length,
|
|
610
|
+
task_ids: createdTasks.map((t) => t.task_id),
|
|
611
|
+
tasks: createdTasks,
|
|
612
|
+
});
|
|
613
|
+
}
|
|
614
|
+
export async function handleTaskBatchExecute(args) {
|
|
615
|
+
const taskIds = args['task_ids'];
|
|
616
|
+
const parallel = args['parallel'] ?? true;
|
|
617
|
+
const wait = args['wait'] ?? true;
|
|
618
|
+
const executeOne = async (taskId) => {
|
|
619
|
+
const task = await getTask(taskId);
|
|
620
|
+
if (task === null) {
|
|
621
|
+
return { task_id: taskId, status: 'not_found', error: `Task not found: ${taskId}` };
|
|
622
|
+
}
|
|
623
|
+
if (task.status !== 'pending' && task.status !== 'queued') {
|
|
624
|
+
return {
|
|
625
|
+
task_id: taskId,
|
|
626
|
+
status: task.status,
|
|
627
|
+
error: `Task cannot be executed - current status is '${task.status}'`,
|
|
628
|
+
};
|
|
629
|
+
}
|
|
630
|
+
task.status = 'running';
|
|
631
|
+
task.started_at = new Date();
|
|
632
|
+
tasks.set(taskId, task);
|
|
633
|
+
safePersist(getPersistence().saveTask(task));
|
|
634
|
+
try {
|
|
635
|
+
const workingDir = process.env['ANASTOPS_WORKSPACE'] ?? process.cwd();
|
|
636
|
+
const taskInput = task.input;
|
|
637
|
+
const executeRequest = {
|
|
638
|
+
model: task.model,
|
|
639
|
+
prompt: taskInput?.prompt ?? task.description,
|
|
640
|
+
working_dir: workingDir,
|
|
641
|
+
};
|
|
642
|
+
// Execute with failover support
|
|
643
|
+
const failoverResult = await failoverService.executeWithFailover(task, executeRequest, registry, { workingDir });
|
|
644
|
+
const response = failoverResult.response;
|
|
645
|
+
task.status = 'completed';
|
|
646
|
+
task.completed_at = new Date();
|
|
647
|
+
task.provider = failoverResult.successful_provider;
|
|
648
|
+
task.routing_tier = failoverResult.successful_tier;
|
|
649
|
+
task.failover_history = failoverResult.attempt_history;
|
|
650
|
+
task.token_usage = response.usage ?? task.token_usage;
|
|
651
|
+
task.output = {
|
|
652
|
+
content: response.content,
|
|
653
|
+
artifacts: [],
|
|
654
|
+
files_modified: [],
|
|
655
|
+
metadata: {
|
|
656
|
+
usage: response.usage,
|
|
657
|
+
failover_used: failoverResult.failover_used,
|
|
658
|
+
failover_attempts: failoverResult.failover_attempts,
|
|
659
|
+
},
|
|
660
|
+
};
|
|
661
|
+
tasks.set(taskId, task);
|
|
662
|
+
safePersist(getPersistence().saveTask(task));
|
|
663
|
+
if (sessionManager.exists(task.session_id)) {
|
|
664
|
+
const session = sessionManager.getSession(task.session_id);
|
|
665
|
+
session.metadata = session.metadata ?? {
|
|
666
|
+
total_tokens: 0,
|
|
667
|
+
total_cost: 0,
|
|
668
|
+
agents_used: [],
|
|
669
|
+
files_affected: [],
|
|
670
|
+
tasks_completed: 0,
|
|
671
|
+
tasks_failed: 0,
|
|
672
|
+
};
|
|
673
|
+
session.metadata.tasks_completed = (session.metadata.tasks_completed ?? 0) + 1;
|
|
674
|
+
session.metadata.total_tokens =
|
|
675
|
+
(session.metadata.total_tokens ?? 0) + (response.usage?.total_tokens ?? 0);
|
|
676
|
+
session.metadata.total_cost =
|
|
677
|
+
(session.metadata.total_cost ?? 0) + (response.usage?.cost ?? 0);
|
|
678
|
+
session.updated_at = new Date();
|
|
679
|
+
safePersist(getPersistence().saveSession(session));
|
|
680
|
+
}
|
|
681
|
+
const batchResult = {
|
|
682
|
+
task_id: taskId,
|
|
683
|
+
status: 'completed',
|
|
684
|
+
result: { content: response.content, usage: response.usage },
|
|
685
|
+
};
|
|
686
|
+
if (failoverResult.failover_used) {
|
|
687
|
+
batchResult.failover = {
|
|
688
|
+
used: true,
|
|
689
|
+
attempts: failoverResult.failover_attempts,
|
|
690
|
+
final_provider: failoverResult.successful_provider,
|
|
691
|
+
final_tier: failoverResult.successful_tier,
|
|
692
|
+
};
|
|
693
|
+
}
|
|
694
|
+
return batchResult;
|
|
695
|
+
}
|
|
696
|
+
catch (error) {
|
|
697
|
+
// Get failover history from error if available
|
|
698
|
+
const failoverHistory = error instanceof AllProvidersFailedError ? error.failoverHistory : [];
|
|
699
|
+
task.status = 'failed';
|
|
700
|
+
task.completed_at = new Date();
|
|
701
|
+
task.error = error instanceof Error ? error.message : String(error);
|
|
702
|
+
task.failover_history = failoverHistory;
|
|
703
|
+
tasks.set(taskId, task);
|
|
704
|
+
safePersist(getPersistence().saveTask(task));
|
|
705
|
+
if (sessionManager.exists(task.session_id)) {
|
|
706
|
+
const session = sessionManager.getSession(task.session_id);
|
|
707
|
+
session.metadata = session.metadata ?? {
|
|
708
|
+
total_tokens: 0,
|
|
709
|
+
total_cost: 0,
|
|
710
|
+
agents_used: [],
|
|
711
|
+
files_affected: [],
|
|
712
|
+
tasks_completed: 0,
|
|
713
|
+
tasks_failed: 0,
|
|
714
|
+
};
|
|
715
|
+
session.metadata.tasks_failed = (session.metadata.tasks_failed ?? 0) + 1;
|
|
716
|
+
session.updated_at = new Date();
|
|
717
|
+
safePersist(getPersistence().saveSession(session));
|
|
718
|
+
}
|
|
719
|
+
return {
|
|
720
|
+
task_id: taskId,
|
|
721
|
+
status: 'failed',
|
|
722
|
+
error: error instanceof Error ? error.message : String(error),
|
|
723
|
+
};
|
|
724
|
+
}
|
|
725
|
+
};
|
|
726
|
+
let results;
|
|
727
|
+
if (parallel) {
|
|
728
|
+
results = await Promise.all(taskIds.map(executeOne));
|
|
729
|
+
}
|
|
730
|
+
else {
|
|
731
|
+
results = [];
|
|
732
|
+
for (const taskId of taskIds) {
|
|
733
|
+
results.push(await executeOne(taskId));
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
const failoverUsedCount = results.filter((r) => r.failover?.used).length;
|
|
737
|
+
const summary = {
|
|
738
|
+
completed: results.filter((r) => r.status === 'completed').length,
|
|
739
|
+
failed: results.filter((r) => r.status === 'failed').length,
|
|
740
|
+
skipped: results.filter((r) => r.status !== 'completed' && r.status !== 'failed').length,
|
|
741
|
+
failover_used: failoverUsedCount,
|
|
742
|
+
};
|
|
743
|
+
// Process queues for all sessions involved
|
|
744
|
+
const sessionIds = new Set();
|
|
745
|
+
for (const taskId of taskIds) {
|
|
746
|
+
const task = await getTask(taskId);
|
|
747
|
+
if (task !== null) {
|
|
748
|
+
sessionIds.add(task.session_id);
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
for (const sessionId of sessionIds) {
|
|
752
|
+
void processTaskQueue(sessionId);
|
|
753
|
+
}
|
|
754
|
+
return {
|
|
755
|
+
executed: results.length,
|
|
756
|
+
parallel,
|
|
757
|
+
waited: wait,
|
|
758
|
+
results,
|
|
759
|
+
summary,
|
|
760
|
+
};
|
|
761
|
+
}
|
|
762
|
+
//# sourceMappingURL=handlers.task.js.map
|