@vibescope/mcp-server 0.4.5 → 0.4.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +84 -84
- package/README.md +194 -194
- package/dist/api-client/project.d.ts +1 -0
- package/dist/api-client.d.ts +1 -0
- package/dist/cli-init.js +21 -21
- package/dist/cli.js +26 -26
- package/dist/handlers/chat.d.ts +2 -0
- package/dist/handlers/chat.js +25 -0
- package/dist/handlers/discovery.js +12 -0
- package/dist/handlers/project.js +4 -2
- package/dist/handlers/tool-docs.js +1203 -1137
- package/dist/index.js +73 -73
- package/dist/templates/agent-guidelines.d.ts +1 -1
- package/dist/templates/agent-guidelines.js +205 -187
- package/dist/templates/help-content.js +1621 -1621
- package/dist/tools/bodies-of-work.js +6 -6
- package/dist/tools/chat.d.ts +1 -0
- package/dist/tools/chat.js +24 -0
- package/dist/tools/cloud-agents.js +22 -22
- package/dist/tools/features.d.ts +13 -0
- package/dist/tools/features.js +151 -0
- package/dist/tools/index.d.ts +3 -1
- package/dist/tools/index.js +4 -1
- package/dist/tools/milestones.js +2 -2
- package/dist/tools/project.js +4 -0
- package/dist/tools/requests.js +1 -1
- package/dist/tools/session.js +11 -11
- package/dist/tools/sprints.js +9 -9
- package/dist/tools/tasks.js +35 -35
- package/dist/tools/worktrees.js +14 -14
- package/dist/tools.d.ts +2 -0
- package/dist/tools.js +3602 -0
- package/dist/utils.js +11 -11
- package/docs/TOOLS.md +2663 -2559
- package/package.json +53 -53
- package/scripts/generate-docs.ts +212 -212
- package/scripts/version-bump.ts +203 -203
- package/src/api-client/blockers.ts +86 -86
- package/src/api-client/bodies-of-work.ts +194 -194
- package/src/api-client/chat.ts +50 -50
- package/src/api-client/connectors.ts +152 -152
- package/src/api-client/cost.ts +185 -185
- package/src/api-client/decisions.ts +87 -87
- package/src/api-client/deployment.ts +313 -313
- package/src/api-client/discovery.ts +81 -81
- package/src/api-client/fallback.ts +52 -52
- package/src/api-client/file-checkouts.ts +115 -115
- package/src/api-client/findings.ts +100 -100
- package/src/api-client/git-issues.ts +88 -88
- package/src/api-client/ideas.ts +112 -112
- package/src/api-client/index.ts +592 -592
- package/src/api-client/milestones.ts +83 -83
- package/src/api-client/organizations.ts +185 -185
- package/src/api-client/progress.ts +94 -94
- package/src/api-client/project.ts +180 -179
- package/src/api-client/requests.ts +54 -54
- package/src/api-client/session.ts +220 -220
- package/src/api-client/sprints.ts +227 -227
- package/src/api-client/subtasks.ts +57 -57
- package/src/api-client/tasks.ts +450 -450
- package/src/api-client/types.ts +32 -32
- package/src/api-client/validation.ts +60 -60
- package/src/api-client/worktrees.ts +53 -53
- package/src/api-client.test.ts +847 -847
- package/src/api-client.ts +2707 -2706
- package/src/cli-init.ts +557 -557
- package/src/cli.test.ts +284 -284
- package/src/cli.ts +204 -204
- package/src/handlers/__test-setup__.ts +240 -240
- package/src/handlers/__test-utils__.ts +89 -89
- package/src/handlers/blockers.test.ts +468 -468
- package/src/handlers/blockers.ts +172 -172
- package/src/handlers/bodies-of-work.test.ts +704 -704
- package/src/handlers/bodies-of-work.ts +526 -526
- package/src/handlers/chat.test.ts +185 -185
- package/src/handlers/chat.ts +101 -69
- package/src/handlers/cloud-agents.test.ts +438 -438
- package/src/handlers/cloud-agents.ts +156 -156
- package/src/handlers/connectors.test.ts +834 -834
- package/src/handlers/connectors.ts +229 -229
- package/src/handlers/cost.test.ts +462 -462
- package/src/handlers/cost.ts +285 -285
- package/src/handlers/decisions.test.ts +382 -382
- package/src/handlers/decisions.ts +153 -153
- package/src/handlers/deployment.test.ts +551 -551
- package/src/handlers/deployment.ts +570 -570
- package/src/handlers/discovery.test.ts +206 -206
- package/src/handlers/discovery.ts +427 -415
- package/src/handlers/fallback.test.ts +537 -537
- package/src/handlers/fallback.ts +194 -194
- package/src/handlers/file-checkouts.test.ts +750 -750
- package/src/handlers/file-checkouts.ts +185 -185
- package/src/handlers/findings.test.ts +633 -633
- package/src/handlers/findings.ts +239 -239
- package/src/handlers/git-issues.test.ts +631 -631
- package/src/handlers/git-issues.ts +136 -136
- package/src/handlers/ideas.test.ts +644 -644
- package/src/handlers/ideas.ts +207 -207
- package/src/handlers/index.ts +93 -93
- package/src/handlers/milestones.test.ts +475 -475
- package/src/handlers/milestones.ts +180 -180
- package/src/handlers/organizations.test.ts +826 -826
- package/src/handlers/organizations.ts +315 -315
- package/src/handlers/progress.test.ts +269 -269
- package/src/handlers/progress.ts +77 -77
- package/src/handlers/project.test.ts +546 -546
- package/src/handlers/project.ts +242 -239
- package/src/handlers/requests.test.ts +303 -303
- package/src/handlers/requests.ts +99 -99
- package/src/handlers/roles.test.ts +305 -305
- package/src/handlers/roles.ts +219 -219
- package/src/handlers/session.test.ts +998 -998
- package/src/handlers/session.ts +1105 -1105
- package/src/handlers/sprints.test.ts +732 -732
- package/src/handlers/sprints.ts +537 -537
- package/src/handlers/tasks.test.ts +931 -931
- package/src/handlers/tasks.ts +1133 -1133
- package/src/handlers/tool-categories.test.ts +66 -66
- package/src/handlers/tool-docs.test.ts +511 -511
- package/src/handlers/tool-docs.ts +1571 -1499
- package/src/handlers/types.test.ts +259 -259
- package/src/handlers/types.ts +176 -176
- package/src/handlers/validation.test.ts +582 -582
- package/src/handlers/validation.ts +164 -164
- package/src/handlers/version.ts +63 -63
- package/src/index.test.ts +674 -674
- package/src/index.ts +807 -807
- package/src/setup.test.ts +233 -233
- package/src/setup.ts +404 -404
- package/src/templates/agent-guidelines.ts +233 -215
- package/src/templates/help-content.ts +1751 -1751
- package/src/token-tracking.test.ts +463 -463
- package/src/token-tracking.ts +167 -167
- package/src/tools/blockers.ts +122 -122
- package/src/tools/bodies-of-work.ts +283 -283
- package/src/tools/chat.ts +72 -46
- package/src/tools/cloud-agents.ts +101 -101
- package/src/tools/connectors.ts +191 -191
- package/src/tools/cost.ts +111 -111
- package/src/tools/decisions.ts +111 -111
- package/src/tools/deployment.ts +455 -455
- package/src/tools/discovery.ts +76 -76
- package/src/tools/fallback.ts +111 -111
- package/src/tools/features.ts +154 -0
- package/src/tools/file-checkouts.ts +145 -145
- package/src/tools/findings.ts +101 -101
- package/src/tools/git-issues.ts +130 -130
- package/src/tools/ideas.ts +162 -162
- package/src/tools/index.ts +141 -137
- package/src/tools/milestones.ts +118 -118
- package/src/tools/organizations.ts +224 -224
- package/src/tools/progress.ts +73 -73
- package/src/tools/project.ts +206 -202
- package/src/tools/requests.ts +68 -68
- package/src/tools/roles.ts +112 -112
- package/src/tools/session.ts +181 -181
- package/src/tools/sprints.ts +298 -298
- package/src/tools/tasks.ts +550 -550
- package/src/tools/tools.test.ts +222 -222
- package/src/tools/types.ts +9 -9
- package/src/tools/validation.ts +75 -75
- package/src/tools/version.ts +34 -34
- package/src/tools/worktrees.ts +66 -66
- package/src/tools.test.ts +416 -416
- package/src/utils.test.ts +1014 -1014
- package/src/utils.ts +586 -586
- package/src/validators.test.ts +223 -223
- package/src/validators.ts +249 -249
- package/src/version.ts +109 -109
- package/tsconfig.json +16 -16
- package/vitest.config.ts +14 -14
package/src/handlers/tasks.ts
CHANGED
|
@@ -1,1133 +1,1133 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Task Handlers (Migrated to API Client)
|
|
3
|
-
*
|
|
4
|
-
* Handles task CRUD and management:
|
|
5
|
-
* - get_task (single task by ID)
|
|
6
|
-
* - search_tasks (text search)
|
|
7
|
-
* - get_tasks_by_priority (priority filter)
|
|
8
|
-
* - get_recent_tasks (by date)
|
|
9
|
-
* - get_task_stats (aggregate counts)
|
|
10
|
-
* - get_next_task
|
|
11
|
-
* - add_task
|
|
12
|
-
* - update_task
|
|
13
|
-
* - complete_task
|
|
14
|
-
* - delete_task
|
|
15
|
-
* - release_task
|
|
16
|
-
* - cancel_task
|
|
17
|
-
* - add_task_reference
|
|
18
|
-
* - remove_task_reference
|
|
19
|
-
* - batch_update_tasks
|
|
20
|
-
* - batch_complete_tasks
|
|
21
|
-
* - add_subtask
|
|
22
|
-
* - get_subtasks
|
|
23
|
-
*/
|
|
24
|
-
|
|
25
|
-
import os from 'os';
|
|
26
|
-
import type { Handler, HandlerRegistry } from './types.js';
|
|
27
|
-
import {
|
|
28
|
-
parseArgs,
|
|
29
|
-
uuidValidator,
|
|
30
|
-
taskStatusValidator,
|
|
31
|
-
priorityValidator,
|
|
32
|
-
progressValidator,
|
|
33
|
-
minutesValidator,
|
|
34
|
-
createEnumValidator,
|
|
35
|
-
ValidationError,
|
|
36
|
-
} from '../validators.js';
|
|
37
|
-
import { getApiClient } from '../api-client.js';
|
|
38
|
-
import { capPagination, PAGINATION_LIMITS } from '../utils.js';
|
|
39
|
-
import { autoPostActivity } from './chat.js';
|
|
40
|
-
|
|
41
|
-
// Auto-detect machine hostname for worktree tracking
|
|
42
|
-
const MACHINE_HOSTNAME = os.hostname();
|
|
43
|
-
|
|
44
|
-
// Valid task types
|
|
45
|
-
const VALID_TASK_TYPES = [
|
|
46
|
-
'frontend', 'backend', 'database', 'feature', 'bugfix',
|
|
47
|
-
'design', 'mcp', 'testing', 'docs', 'infra', 'other'
|
|
48
|
-
] as const;
|
|
49
|
-
|
|
50
|
-
// ============================================================================
|
|
51
|
-
// Argument Schemas
|
|
52
|
-
// ============================================================================
|
|
53
|
-
|
|
54
|
-
const getNextTaskSchema = {
|
|
55
|
-
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
56
|
-
};
|
|
57
|
-
|
|
58
|
-
const addTaskSchema = {
|
|
59
|
-
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
60
|
-
title: { type: 'string' as const, required: true as const },
|
|
61
|
-
description: { type: 'string' as const },
|
|
62
|
-
priority: { type: 'number' as const, default: 3, validate: priorityValidator },
|
|
63
|
-
estimated_minutes: { type: 'number' as const, validate: minutesValidator },
|
|
64
|
-
blocking: { type: 'boolean' as const, default: false },
|
|
65
|
-
task_type: { type: 'string' as const, validate: createEnumValidator(VALID_TASK_TYPES) },
|
|
66
|
-
};
|
|
67
|
-
|
|
68
|
-
const updateTaskSchema = {
|
|
69
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
70
|
-
title: { type: 'string' as const },
|
|
71
|
-
description: { type: 'string' as const },
|
|
72
|
-
priority: { type: 'number' as const, validate: priorityValidator },
|
|
73
|
-
status: { type: 'string' as const, validate: taskStatusValidator },
|
|
74
|
-
progress_percentage: { type: 'number' as const, validate: progressValidator },
|
|
75
|
-
progress_note: { type: 'string' as const },
|
|
76
|
-
estimated_minutes: { type: 'number' as const, validate: minutesValidator },
|
|
77
|
-
git_branch: { type: 'string' as const },
|
|
78
|
-
worktree_path: { type: 'string' as const },
|
|
79
|
-
task_type: { type: 'string' as const, validate: createEnumValidator(VALID_TASK_TYPES) },
|
|
80
|
-
skip_worktree_requirement: { type: 'boolean' as const, default: false },
|
|
81
|
-
session_id: { type: 'string' as const },
|
|
82
|
-
};
|
|
83
|
-
|
|
84
|
-
const completeTaskSchema = {
|
|
85
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
86
|
-
summary: { type: 'string' as const },
|
|
87
|
-
session_id: { type: 'string' as const },
|
|
88
|
-
};
|
|
89
|
-
|
|
90
|
-
const deleteTaskSchema = {
|
|
91
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
92
|
-
};
|
|
93
|
-
|
|
94
|
-
const releaseTaskSchema = {
|
|
95
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
96
|
-
reason: { type: 'string' as const },
|
|
97
|
-
};
|
|
98
|
-
|
|
99
|
-
// Valid reasons for task cancellation
|
|
100
|
-
const VALID_CANCELLED_REASONS = [
|
|
101
|
-
'pr_closed', 'superseded', 'user_cancelled', 'validation_failed', 'obsolete', 'blocked'
|
|
102
|
-
] as const;
|
|
103
|
-
|
|
104
|
-
const cancelTaskSchema = {
|
|
105
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
106
|
-
cancelled_reason: { type: 'string' as const, validate: createEnumValidator(VALID_CANCELLED_REASONS) },
|
|
107
|
-
cancellation_note: { type: 'string' as const },
|
|
108
|
-
};
|
|
109
|
-
|
|
110
|
-
const addTaskReferenceSchema = {
|
|
111
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
112
|
-
url: { type: 'string' as const, required: true as const },
|
|
113
|
-
label: { type: 'string' as const },
|
|
114
|
-
};
|
|
115
|
-
|
|
116
|
-
const removeTaskReferenceSchema = {
|
|
117
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
118
|
-
url: { type: 'string' as const, required: true as const },
|
|
119
|
-
};
|
|
120
|
-
|
|
121
|
-
const batchUpdateTasksSchema = {
|
|
122
|
-
updates: { type: 'array' as const, required: true as const },
|
|
123
|
-
};
|
|
124
|
-
|
|
125
|
-
const batchCompleteTasksSchema = {
|
|
126
|
-
completions: { type: 'array' as const, required: true as const },
|
|
127
|
-
};
|
|
128
|
-
|
|
129
|
-
const addSubtaskSchema = {
|
|
130
|
-
parent_task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
131
|
-
title: { type: 'string' as const, required: true as const },
|
|
132
|
-
description: { type: 'string' as const },
|
|
133
|
-
priority: { type: 'number' as const, validate: priorityValidator },
|
|
134
|
-
estimated_minutes: { type: 'number' as const, validate: minutesValidator },
|
|
135
|
-
};
|
|
136
|
-
|
|
137
|
-
const getSubtasksSchema = {
|
|
138
|
-
parent_task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
139
|
-
status: { type: 'string' as const, validate: taskStatusValidator },
|
|
140
|
-
limit: { type: 'number' as const, default: 20 },
|
|
141
|
-
offset: { type: 'number' as const, default: 0 },
|
|
142
|
-
};
|
|
143
|
-
|
|
144
|
-
// ============================================================================
|
|
145
|
-
// New Targeted Task Query Schemas
|
|
146
|
-
// ============================================================================
|
|
147
|
-
|
|
148
|
-
const getTaskSchema = {
|
|
149
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
150
|
-
include_subtasks: { type: 'boolean' as const, default: false },
|
|
151
|
-
include_milestones: { type: 'boolean' as const, default: false },
|
|
152
|
-
};
|
|
153
|
-
|
|
154
|
-
const searchTasksSchema = {
|
|
155
|
-
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
156
|
-
query: { type: 'string' as const, required: true as const },
|
|
157
|
-
status: { type: 'array' as const },
|
|
158
|
-
limit: { type: 'number' as const, default: 10 },
|
|
159
|
-
offset: { type: 'number' as const, default: 0 },
|
|
160
|
-
};
|
|
161
|
-
|
|
162
|
-
const getTasksByPrioritySchema = {
|
|
163
|
-
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
164
|
-
priority: { type: 'number' as const, validate: priorityValidator },
|
|
165
|
-
priority_max: { type: 'number' as const, validate: priorityValidator },
|
|
166
|
-
status: { type: 'string' as const, validate: taskStatusValidator },
|
|
167
|
-
limit: { type: 'number' as const, default: 10 },
|
|
168
|
-
offset: { type: 'number' as const, default: 0 },
|
|
169
|
-
};
|
|
170
|
-
|
|
171
|
-
const getRecentTasksSchema = {
|
|
172
|
-
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
173
|
-
order: { type: 'string' as const, validate: createEnumValidator(['newest', 'oldest']) },
|
|
174
|
-
status: { type: 'string' as const, validate: taskStatusValidator },
|
|
175
|
-
limit: { type: 'number' as const, default: 10 },
|
|
176
|
-
offset: { type: 'number' as const, default: 0 },
|
|
177
|
-
};
|
|
178
|
-
|
|
179
|
-
const getTaskStatsSchema = {
|
|
180
|
-
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
181
|
-
};
|
|
182
|
-
|
|
183
|
-
// ============================================================================
|
|
184
|
-
// Git workflow helpers (used by complete_task response)
|
|
185
|
-
// ============================================================================
|
|
186
|
-
|
|
187
|
-
interface GitWorkflowConfig {
|
|
188
|
-
git_workflow: string;
|
|
189
|
-
git_main_branch: string;
|
|
190
|
-
git_develop_branch?: string | null;
|
|
191
|
-
git_auto_branch?: boolean;
|
|
192
|
-
}
|
|
193
|
-
|
|
194
|
-
interface GitCompleteInstructions {
|
|
195
|
-
steps: string[];
|
|
196
|
-
pr_suggestion?: {
|
|
197
|
-
title: string;
|
|
198
|
-
body_template: string;
|
|
199
|
-
};
|
|
200
|
-
next_step: string;
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
interface GitMergeInstructions {
|
|
204
|
-
target_branch: string;
|
|
205
|
-
feature_branch: string;
|
|
206
|
-
steps: string[];
|
|
207
|
-
cleanup: string[];
|
|
208
|
-
note: string;
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
function getTaskCompleteGitInstructions(
|
|
212
|
-
gitWorkflow: string,
|
|
213
|
-
gitMainBranch: string,
|
|
214
|
-
gitDevelopBranch: string | undefined,
|
|
215
|
-
taskBranch: string | undefined,
|
|
216
|
-
taskTitle: string,
|
|
217
|
-
taskId: string
|
|
218
|
-
): GitCompleteInstructions | undefined {
|
|
219
|
-
if (gitWorkflow === 'none') {
|
|
220
|
-
return undefined;
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
if (gitWorkflow === 'trunk-based') {
|
|
224
|
-
return {
|
|
225
|
-
steps: [`git add .`, `git commit -m "feat: ${taskTitle}"`, `git push origin ${gitMainBranch}`],
|
|
226
|
-
next_step: 'Changes committed directly to main branch.',
|
|
227
|
-
};
|
|
228
|
-
}
|
|
229
|
-
|
|
230
|
-
if (!taskBranch) {
|
|
231
|
-
return {
|
|
232
|
-
steps: ['No branch was tracked for this task.'],
|
|
233
|
-
next_step: 'Consider creating a branch for future tasks using the git_branch parameter.',
|
|
234
|
-
};
|
|
235
|
-
}
|
|
236
|
-
|
|
237
|
-
// github-flow or git-flow
|
|
238
|
-
return {
|
|
239
|
-
steps: [`git add .`, `git commit -m "feat: ${taskTitle}"`, `git push -u origin ${taskBranch}`],
|
|
240
|
-
pr_suggestion: {
|
|
241
|
-
title: taskTitle,
|
|
242
|
-
body_template: `## Summary\n[Describe what was implemented]\n\n## Task Reference\nVibescope Task: ${taskId}\n\n## Testing\n- [ ] Tests pass\n- [ ] Manual testing done\n\n## Checklist\n- [ ] Code follows project conventions\n- [ ] No unnecessary changes included`,
|
|
243
|
-
},
|
|
244
|
-
next_step: 'Create PR and add link via add_task_reference. Merge happens AFTER validation approval.',
|
|
245
|
-
};
|
|
246
|
-
}
|
|
247
|
-
|
|
248
|
-
export function getValidationApprovedGitInstructions(
|
|
249
|
-
config: GitWorkflowConfig,
|
|
250
|
-
taskBranch: string | undefined
|
|
251
|
-
): GitMergeInstructions | undefined {
|
|
252
|
-
const { git_workflow, git_main_branch, git_develop_branch } = config;
|
|
253
|
-
|
|
254
|
-
if (git_workflow === 'none' || git_workflow === 'trunk-based' || !taskBranch) {
|
|
255
|
-
return undefined;
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
const targetBranch = git_workflow === 'git-flow' ? (git_develop_branch || 'develop') : git_main_branch;
|
|
259
|
-
|
|
260
|
-
return {
|
|
261
|
-
target_branch: targetBranch,
|
|
262
|
-
feature_branch: taskBranch,
|
|
263
|
-
steps: [
|
|
264
|
-
'Option 1: Merge via GitHub/GitLab PR UI (recommended)',
|
|
265
|
-
`Option 2: Command line merge:`,
|
|
266
|
-
` git checkout ${targetBranch}`,
|
|
267
|
-
` git pull origin ${targetBranch}`,
|
|
268
|
-
` git merge ${taskBranch}`,
|
|
269
|
-
` git push origin ${targetBranch}`,
|
|
270
|
-
],
|
|
271
|
-
cleanup: [`git branch -d ${taskBranch}`, `git push origin --delete ${taskBranch}`],
|
|
272
|
-
note: 'Validation approved - safe to merge. Clean up branch after successful merge.',
|
|
273
|
-
};
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
// ============================================================================
|
|
277
|
-
// Task Handlers - Using API Client
|
|
278
|
-
// ============================================================================
|
|
279
|
-
|
|
280
|
-
export const getNextTask: Handler = async (args, ctx) => {
|
|
281
|
-
const { project_id } = parseArgs(args, getNextTaskSchema);
|
|
282
|
-
|
|
283
|
-
const api = getApiClient();
|
|
284
|
-
const response = await api.getNextTask(project_id, ctx.session.currentSessionId || undefined);
|
|
285
|
-
|
|
286
|
-
if (!response.ok) {
|
|
287
|
-
return { result: { error: response.error || 'Failed to get next task' }, isError: true };
|
|
288
|
-
}
|
|
289
|
-
|
|
290
|
-
const data = response.data;
|
|
291
|
-
if (!data) {
|
|
292
|
-
return { result: { task: null, message: 'No response from server' } };
|
|
293
|
-
}
|
|
294
|
-
|
|
295
|
-
// Map API response to handler response format
|
|
296
|
-
const result: Record<string, unknown> = {};
|
|
297
|
-
|
|
298
|
-
if (data.task) {
|
|
299
|
-
result.task = data.task;
|
|
300
|
-
} else {
|
|
301
|
-
result.task = null;
|
|
302
|
-
// Add IDLE_GUIDANCE when no tasks are available
|
|
303
|
-
result.IDLE_GUIDANCE = {
|
|
304
|
-
message: 'No tasks available. Follow these steps:',
|
|
305
|
-
steps: [
|
|
306
|
-
'1. Call signal_idle() to update dashboard immediately - shows you are available',
|
|
307
|
-
'2. Start a fallback_activity (code_review, security_review, test_coverage, etc.)',
|
|
308
|
-
'3. Never ask "what should I do?" - be autonomous',
|
|
309
|
-
],
|
|
310
|
-
autonomy_rules: [
|
|
311
|
-
'Never ask "should I continue?" → Just continue',
|
|
312
|
-
'Never say "let me know what to do" → Use fallback activities',
|
|
313
|
-
'When context grows large: /clear → start_work_session (don\'t ask, just do it)',
|
|
314
|
-
],
|
|
315
|
-
next_action: `signal_idle() then start_fallback_activity(project_id: "${project_id}", activity: "code_review")`,
|
|
316
|
-
};
|
|
317
|
-
}
|
|
318
|
-
|
|
319
|
-
if (data.blocking_task) result.blocking_task = true;
|
|
320
|
-
if (data.deployment_blocks_tasks) {
|
|
321
|
-
result.deployment_blocks_tasks = true;
|
|
322
|
-
result.deployment = data.deployment;
|
|
323
|
-
result.action = data.action;
|
|
324
|
-
}
|
|
325
|
-
if (data.awaiting_validation) {
|
|
326
|
-
result.awaiting_validation = data.awaiting_validation;
|
|
327
|
-
result.validation_priority = data.validation_priority;
|
|
328
|
-
result.suggested_activity = data.suggested_activity;
|
|
329
|
-
}
|
|
330
|
-
if (data.all_claimed) result.all_claimed = true;
|
|
331
|
-
if (data.is_subtask) result.is_subtask = true;
|
|
332
|
-
if (data.suggested_activity) result.suggested_activity = data.suggested_activity;
|
|
333
|
-
if (data.directive) result.directive = data.directive;
|
|
334
|
-
if (data.message) result.message = data.message;
|
|
335
|
-
|
|
336
|
-
return { result };
|
|
337
|
-
};
|
|
338
|
-
|
|
339
|
-
export const addTask: Handler = async (args, ctx) => {
|
|
340
|
-
const { project_id, title, description, priority, estimated_minutes, blocking, task_type } = parseArgs(args, addTaskSchema);
|
|
341
|
-
|
|
342
|
-
const api = getApiClient();
|
|
343
|
-
const response = await api.createTask(project_id, {
|
|
344
|
-
title,
|
|
345
|
-
description,
|
|
346
|
-
priority,
|
|
347
|
-
estimated_minutes,
|
|
348
|
-
blocking,
|
|
349
|
-
session_id: ctx.session.currentSessionId || undefined,
|
|
350
|
-
task_type,
|
|
351
|
-
});
|
|
352
|
-
|
|
353
|
-
if (!response.ok) {
|
|
354
|
-
return { result: { error: response.error || 'Failed to add task' }, isError: true };
|
|
355
|
-
}
|
|
356
|
-
|
|
357
|
-
const data = response.data;
|
|
358
|
-
const result: Record<string, unknown> = {
|
|
359
|
-
success: true,
|
|
360
|
-
task_id: data?.task_id,
|
|
361
|
-
title,
|
|
362
|
-
};
|
|
363
|
-
|
|
364
|
-
if (data?.blocking) {
|
|
365
|
-
result.blocking = true;
|
|
366
|
-
result.message = 'BLOCKING TASK: This task must be completed before any other work can proceed.';
|
|
367
|
-
}
|
|
368
|
-
|
|
369
|
-
return { result };
|
|
370
|
-
};
|
|
371
|
-
|
|
372
|
-
export const updateTask: Handler = async (args, ctx) => {
|
|
373
|
-
const { task_id, title, description, priority, status, progress_percentage, progress_note, estimated_minutes, git_branch, worktree_path, task_type, skip_worktree_requirement, session_id: explicit_session_id } = parseArgs(args, updateTaskSchema);
|
|
374
|
-
const updates = { title, description, priority, status, progress_percentage, estimated_minutes, git_branch, worktree_path, task_type };
|
|
375
|
-
|
|
376
|
-
// Enforce worktree creation: require git_branch when marking task as in_progress
|
|
377
|
-
// This ensures multi-agent collaboration works properly with isolated worktrees
|
|
378
|
-
if (status === 'in_progress' && !git_branch && !skip_worktree_requirement) {
|
|
379
|
-
return {
|
|
380
|
-
result: {
|
|
381
|
-
error: 'worktree_required',
|
|
382
|
-
message: 'git_branch is required when marking a task as in_progress. Create a worktree first and provide the branch name.',
|
|
383
|
-
hint: 'Create a worktree with: git worktree add ../PROJECT-task-TASKID -b feature/TASKID-description BASE_BRANCH, then call update_task with both status and git_branch parameters.',
|
|
384
|
-
worktree_example: {
|
|
385
|
-
command: `git worktree add ../worktree-${task_id.substring(0, 8)} -b feature/${task_id.substring(0, 8)}-task develop`,
|
|
386
|
-
then: `update_task(task_id: "${task_id}", status: "in_progress", git_branch: "feature/${task_id.substring(0, 8)}-task")`,
|
|
387
|
-
},
|
|
388
|
-
skip_option: 'If this project does not use git branching (trunk-based or no git workflow), pass skip_worktree_requirement: true',
|
|
389
|
-
},
|
|
390
|
-
};
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
const api = getApiClient();
|
|
394
|
-
const response = await api.updateTask(task_id, {
|
|
395
|
-
...updates,
|
|
396
|
-
progress_note,
|
|
397
|
-
session_id: explicit_session_id || ctx.session.currentSessionId || undefined,
|
|
398
|
-
});
|
|
399
|
-
|
|
400
|
-
if (!response.ok) {
|
|
401
|
-
// Check for specific error types
|
|
402
|
-
if (response.error?.includes('agent_task_limit') || response.error?.includes('already has a task')) {
|
|
403
|
-
return {
|
|
404
|
-
result: {
|
|
405
|
-
error: 'agent_task_limit',
|
|
406
|
-
message: response.error,
|
|
407
|
-
},
|
|
408
|
-
};
|
|
409
|
-
}
|
|
410
|
-
if (response.error?.includes('task_claimed') || response.error?.includes('task_already_claimed') || response.error?.includes('being worked on') || response.error?.includes('already being worked on')) {
|
|
411
|
-
const data = response.data as { claimed_by?: string; claimed_session_id?: string; message?: string } | undefined;
|
|
412
|
-
return {
|
|
413
|
-
result: {
|
|
414
|
-
error: 'task_already_claimed',
|
|
415
|
-
message: data?.message || response.error || 'Task is already claimed by another agent',
|
|
416
|
-
claimed_by: data?.claimed_by,
|
|
417
|
-
claimed_session_id: data?.claimed_session_id,
|
|
418
|
-
suggestion: 'Use get_next_task() to get a different available task, or wait for the claiming agent to finish.',
|
|
419
|
-
},
|
|
420
|
-
};
|
|
421
|
-
}
|
|
422
|
-
if (response.error?.includes('invalid_status_transition')) {
|
|
423
|
-
return {
|
|
424
|
-
result: {
|
|
425
|
-
error: 'invalid_status_transition',
|
|
426
|
-
message: response.error,
|
|
427
|
-
},
|
|
428
|
-
};
|
|
429
|
-
}
|
|
430
|
-
if (response.error?.includes('branch_conflict')) {
|
|
431
|
-
return {
|
|
432
|
-
result: {
|
|
433
|
-
error: 'branch_conflict',
|
|
434
|
-
message: response.error,
|
|
435
|
-
conflicting_task_id: (response.data as { conflicting_task_id?: string })?.conflicting_task_id,
|
|
436
|
-
conflicting_task_title: (response.data as { conflicting_task_title?: string })?.conflicting_task_title,
|
|
437
|
-
},
|
|
438
|
-
};
|
|
439
|
-
}
|
|
440
|
-
return { result: { error: response.error || 'Failed to update task' }, isError: true };
|
|
441
|
-
}
|
|
442
|
-
|
|
443
|
-
// Build result - include git workflow info when transitioning to in_progress
|
|
444
|
-
const data = response.data;
|
|
445
|
-
const result: Record<string, unknown> = { success: true, task_id };
|
|
446
|
-
|
|
447
|
-
if (data?.git_workflow) {
|
|
448
|
-
result.git_workflow = data.git_workflow;
|
|
449
|
-
}
|
|
450
|
-
if (data?.worktree_setup) {
|
|
451
|
-
result.worktree_setup = data.worktree_setup;
|
|
452
|
-
}
|
|
453
|
-
if (data?.next_step) {
|
|
454
|
-
result.next_step = data.next_step;
|
|
455
|
-
}
|
|
456
|
-
|
|
457
|
-
// Add test reminder when starting work on a task
|
|
458
|
-
if (status === 'in_progress') {
|
|
459
|
-
result.test_reminder = {
|
|
460
|
-
message: 'Remember to write tests for this task before marking it complete.',
|
|
461
|
-
minimum_expectation: 'Basic tests that validate the task requirements are met',
|
|
462
|
-
ideal: 'Tests that also cover edge cases and error handling',
|
|
463
|
-
test_patterns: ['*.test.ts', '*.spec.ts', '*.test.js', '*.spec.js', '__tests__/*'],
|
|
464
|
-
note: 'Validators will check for test file changes during review. Documentation-only or config changes may not require tests.',
|
|
465
|
-
};
|
|
466
|
-
|
|
467
|
-
// Add comprehensive WORKTREE RULES for branching workflows
|
|
468
|
-
// This reminds agents of the critical workflow order
|
|
469
|
-
result.WORKTREE_RULES = {
|
|
470
|
-
mandatory: true,
|
|
471
|
-
rules: [
|
|
472
|
-
'1. Create worktree BEFORE any file edits - reading is fine, editing requires worktree first',
|
|
473
|
-
'2. Naming: ../PROJECT-PERSONA-short-desc (max 24 chars for description)',
|
|
474
|
-
'3. Command: git worktree add ../PROJECT-PERSONA-desc -b feature/TASKID-desc BASE_BRANCH',
|
|
475
|
-
'4. Report location: heartbeat(current_worktree_path: "...")',
|
|
476
|
-
'5. Store path: update_task(task_id, worktree_path: "...")',
|
|
477
|
-
'6. REBASE before PR: git fetch origin && git rebase origin/BASE_BRANCH && git push --force-with-lease',
|
|
478
|
-
],
|
|
479
|
-
rebase_before_pr: {
|
|
480
|
-
mandatory: true,
|
|
481
|
-
why: 'Without rebasing, your branch may contain old versions of files that other agents modified. When merged, your old version overwrites their changes.',
|
|
482
|
-
commands: [
|
|
483
|
-
'git fetch origin',
|
|
484
|
-
'git rebase origin/develop # or origin/main for github-flow',
|
|
485
|
-
'git push --force-with-lease',
|
|
486
|
-
],
|
|
487
|
-
},
|
|
488
|
-
wrong_order: {
|
|
489
|
-
violation: 'Edit file → stash → create worktree → pop → commit',
|
|
490
|
-
why: 'Even if you eventually use a worktree, editing before creating one is a violation',
|
|
491
|
-
},
|
|
492
|
-
right_order: {
|
|
493
|
-
correct: 'Read to understand → create worktree → cd into it → THEN edit',
|
|
494
|
-
why: 'Worktrees must exist BEFORE any file modifications',
|
|
495
|
-
},
|
|
496
|
-
};
|
|
497
|
-
|
|
498
|
-
// Add HOTFIX_WORKFLOW guidance when branch name indicates hotfix
|
|
499
|
-
if (git_branch && git_branch.includes('hotfix/')) {
|
|
500
|
-
result.HOTFIX_WORKFLOW = {
|
|
501
|
-
message: 'HOTFIX detected - special workflow applies:',
|
|
502
|
-
steps: [
|
|
503
|
-
'1. Create worktree from MAIN (not develop): git worktree add ../PROJECT-PERSONA-hotfix-desc -b hotfix/TASKID-desc main',
|
|
504
|
-
'2. Work in worktree and make your fix',
|
|
505
|
-
'3. Commit: git add -A && git commit -m "fix: description"',
|
|
506
|
-
'4. Push: git push -u origin hotfix/TASKID-desc',
|
|
507
|
-
'5. Create PR targeting MAIN: gh pr create --base main --title "fix: ..." --body "Hotfix for production"',
|
|
508
|
-
'6. Remove worktree immediately after PR',
|
|
509
|
-
],
|
|
510
|
-
important: 'Hotfixes go to MAIN, not develop. They are later merged to develop separately.',
|
|
511
|
-
worktree_required: true,
|
|
512
|
-
};
|
|
513
|
-
}
|
|
514
|
-
|
|
515
|
-
// Guidance for when investigation reveals fix already exists
|
|
516
|
-
result.FIX_ALREADY_EXISTS_GUIDANCE = {
|
|
517
|
-
message: 'If investigation reveals the fix already exists but needs deployment:',
|
|
518
|
-
steps: [
|
|
519
|
-
'1. Add finding: add_finding(project_id, title: "Fix exists, awaits deployment", category: "other", severity: "info", description: "...", related_task_id: task_id)',
|
|
520
|
-
'2. Complete task: complete_task(task_id, summary: "Fix already exists in codebase (PR #{pr_number}). Needs deployment.")',
|
|
521
|
-
'3. Check deployment: check_deployment_status(project_id)',
|
|
522
|
-
'4. Request deployment if not pending: request_deployment(project_id, notes: "Includes fix for [issue]")',
|
|
523
|
-
],
|
|
524
|
-
rationale: 'This prevents tasks from being blocked waiting for deployment when the actual work is done.',
|
|
525
|
-
};
|
|
526
|
-
}
|
|
527
|
-
|
|
528
|
-
return { result };
|
|
529
|
-
};
|
|
530
|
-
|
|
531
|
-
export const completeTask: Handler = async (args, ctx) => {
|
|
532
|
-
const { task_id, summary, session_id: explicit_session_id } = parseArgs(args, completeTaskSchema);
|
|
533
|
-
|
|
534
|
-
const api = getApiClient();
|
|
535
|
-
const response = await api.completeTask(task_id, {
|
|
536
|
-
summary,
|
|
537
|
-
session_id: explicit_session_id || ctx.session.currentSessionId || undefined,
|
|
538
|
-
});
|
|
539
|
-
|
|
540
|
-
if (!response.ok) {
|
|
541
|
-
return { result: { error: response.error || 'Failed to complete task' }, isError: true };
|
|
542
|
-
}
|
|
543
|
-
|
|
544
|
-
const data = response.data;
|
|
545
|
-
if (!data) {
|
|
546
|
-
return { result: { error: 'No response data from complete task' }, isError: true };
|
|
547
|
-
}
|
|
548
|
-
|
|
549
|
-
// Build result matching expected format
|
|
550
|
-
const result: Record<string, unknown> = {
|
|
551
|
-
success: true,
|
|
552
|
-
directive: data.directive,
|
|
553
|
-
auto_continue: data.auto_continue,
|
|
554
|
-
completed_task_id: data.completed_task_id,
|
|
555
|
-
next_task: data.next_task,
|
|
556
|
-
};
|
|
557
|
-
|
|
558
|
-
if (data.context) {
|
|
559
|
-
result.context = data.context;
|
|
560
|
-
}
|
|
561
|
-
|
|
562
|
-
// Pass through warnings (e.g., missing git_branch)
|
|
563
|
-
if (data.warnings) {
|
|
564
|
-
result.warnings = data.warnings;
|
|
565
|
-
}
|
|
566
|
-
|
|
567
|
-
// Git workflow instructions are already in API response but we need to fetch
|
|
568
|
-
// task details if we want to include them (API should return these)
|
|
569
|
-
result.next_action = data.next_action;
|
|
570
|
-
|
|
571
|
-
// Add mandatory action reminders for complete_task
|
|
572
|
-
result.MANDATORY_ACTIONS = {
|
|
573
|
-
message: 'Before marking task complete, ensure you have done the following:',
|
|
574
|
-
checklist: [
|
|
575
|
-
'If you made code changes: Commit and push all changes to your branch',
|
|
576
|
-
'REBASE before PR: git fetch origin && git rebase origin/BASE_BRANCH && git push --force-with-lease',
|
|
577
|
-
'If project uses PR workflow: Create PR targeting correct branch (develop for git-flow, main for github-flow)',
|
|
578
|
-
'If using worktree: Remove worktree IMMEDIATELY after PR is created',
|
|
579
|
-
],
|
|
580
|
-
sequence: 'Commit → Rebase → Push → PR created → complete_task() → remove worktree → next task',
|
|
581
|
-
important: 'DO NOT wait for PR review/merge - validation handles that. Complete task immediately after PR.',
|
|
582
|
-
rebase_warning: 'Always rebase before creating PR to avoid overwriting other agents\' work.',
|
|
583
|
-
};
|
|
584
|
-
|
|
585
|
-
// Add worktree cleanup reminder if worktree was used
|
|
586
|
-
if (data.context?.worktree_path) {
|
|
587
|
-
result.worktree_cleanup = {
|
|
588
|
-
required: true,
|
|
589
|
-
path: data.context.worktree_path,
|
|
590
|
-
command: `git worktree remove ${data.context.worktree_path}`,
|
|
591
|
-
timing: 'Remove immediately after PR is created and complete_task is called',
|
|
592
|
-
};
|
|
593
|
-
}
|
|
594
|
-
|
|
595
|
-
// Auto-post completion activity to project chat
|
|
596
|
-
if (ctx.session.currentProjectId) {
|
|
597
|
-
const persona = ctx.session.currentPersona || 'Agent';
|
|
598
|
-
const summaryText = summary ? `: ${summary}` : '';
|
|
599
|
-
void autoPostActivity(
|
|
600
|
-
ctx.session.currentProjectId,
|
|
601
|
-
`✅ **${persona}** completed a task${summaryText}`,
|
|
602
|
-
ctx.session.currentSessionId || undefined
|
|
603
|
-
);
|
|
604
|
-
}
|
|
605
|
-
|
|
606
|
-
return { result };
|
|
607
|
-
};
|
|
608
|
-
|
|
609
|
-
export const deleteTask: Handler = async (args, ctx) => {
|
|
610
|
-
const { task_id } = parseArgs(args, deleteTaskSchema);
|
|
611
|
-
|
|
612
|
-
const api = getApiClient();
|
|
613
|
-
const response = await api.deleteTask(task_id);
|
|
614
|
-
|
|
615
|
-
if (!response.ok) {
|
|
616
|
-
return { result: { error: response.error || 'Failed to delete task' }, isError: true };
|
|
617
|
-
}
|
|
618
|
-
|
|
619
|
-
return { result: { success: true, deleted_id: task_id } };
|
|
620
|
-
};
|
|
621
|
-
|
|
622
|
-
/**
|
|
623
|
-
* Release a task back to pending status.
|
|
624
|
-
* Use when an agent needs to give up a claimed task (context limits, conflicts, user request).
|
|
625
|
-
*/
|
|
626
|
-
export const releaseTask: Handler = async (args, ctx) => {
|
|
627
|
-
const { task_id, reason } = parseArgs(args, releaseTaskSchema);
|
|
628
|
-
|
|
629
|
-
const api = getApiClient();
|
|
630
|
-
const response = await api.releaseTask(task_id, {
|
|
631
|
-
reason,
|
|
632
|
-
session_id: ctx.session.currentSessionId || undefined,
|
|
633
|
-
});
|
|
634
|
-
|
|
635
|
-
if (!response.ok) {
|
|
636
|
-
return { result: { error: response.error || 'Failed to release task' }, isError: true };
|
|
637
|
-
}
|
|
638
|
-
|
|
639
|
-
return {
|
|
640
|
-
result: {
|
|
641
|
-
success: true,
|
|
642
|
-
task_id,
|
|
643
|
-
message: response.data?.message || 'Task released and returned to pending status',
|
|
644
|
-
reason: reason || null,
|
|
645
|
-
hint: 'The task is now available for other agents to claim. Call get_next_task() to get a new task.',
|
|
646
|
-
},
|
|
647
|
-
};
|
|
648
|
-
};
|
|
649
|
-
|
|
650
|
-
export const cancelTask: Handler = async (args, ctx) => {
|
|
651
|
-
const { task_id, cancelled_reason, cancellation_note } = parseArgs(args, cancelTaskSchema);
|
|
652
|
-
|
|
653
|
-
const api = getApiClient();
|
|
654
|
-
// Cast cancelled_reason to the expected union type - validation already ensures it's valid
|
|
655
|
-
const response = await api.cancelTask(task_id, {
|
|
656
|
-
cancelled_reason: cancelled_reason as 'pr_closed' | 'superseded' | 'user_cancelled' | 'validation_failed' | 'obsolete' | 'blocked' | undefined,
|
|
657
|
-
cancellation_note,
|
|
658
|
-
session_id: ctx.session.currentSessionId || undefined,
|
|
659
|
-
});
|
|
660
|
-
|
|
661
|
-
if (!response.ok) {
|
|
662
|
-
return { result: { error: response.error || 'Failed to cancel task' }, isError: true };
|
|
663
|
-
}
|
|
664
|
-
|
|
665
|
-
return {
|
|
666
|
-
result: {
|
|
667
|
-
success: true,
|
|
668
|
-
task_id,
|
|
669
|
-
cancelled_reason: cancelled_reason || null,
|
|
670
|
-
message: response.data?.message || `Task cancelled${cancelled_reason ? ` (${cancelled_reason})` : ''}`,
|
|
671
|
-
},
|
|
672
|
-
};
|
|
673
|
-
};
|
|
674
|
-
|
|
675
|
-
export const addTaskReference: Handler = async (args, ctx) => {
|
|
676
|
-
const { task_id, url, label } = parseArgs(args, addTaskReferenceSchema);
|
|
677
|
-
|
|
678
|
-
const api = getApiClient();
|
|
679
|
-
const response = await api.addTaskReference(task_id, url, label);
|
|
680
|
-
|
|
681
|
-
if (!response.ok) {
|
|
682
|
-
if (response.error?.includes('already exists')) {
|
|
683
|
-
return { result: { success: false, error: 'Reference with this URL already exists' } };
|
|
684
|
-
}
|
|
685
|
-
return { result: { error: response.error || 'Failed to add reference' }, isError: true };
|
|
686
|
-
}
|
|
687
|
-
|
|
688
|
-
return {
|
|
689
|
-
result: {
|
|
690
|
-
success: true,
|
|
691
|
-
reference: response.data?.reference,
|
|
692
|
-
},
|
|
693
|
-
};
|
|
694
|
-
};
|
|
695
|
-
|
|
696
|
-
export const removeTaskReference: Handler = async (args, ctx) => {
|
|
697
|
-
const { task_id, url } = parseArgs(args, removeTaskReferenceSchema);
|
|
698
|
-
|
|
699
|
-
const api = getApiClient();
|
|
700
|
-
const response = await api.removeTaskReference(task_id, url);
|
|
701
|
-
|
|
702
|
-
if (!response.ok) {
|
|
703
|
-
if (response.error?.includes('not found')) {
|
|
704
|
-
return { result: { success: false, error: 'Reference with this URL not found' } };
|
|
705
|
-
}
|
|
706
|
-
return { result: { error: response.error || 'Failed to remove reference' }, isError: true };
|
|
707
|
-
}
|
|
708
|
-
|
|
709
|
-
return { result: { success: true } };
|
|
710
|
-
};
|
|
711
|
-
|
|
712
|
-
export const batchUpdateTasks: Handler = async (args, ctx) => {
|
|
713
|
-
const { updates } = parseArgs(args, batchUpdateTasksSchema);
|
|
714
|
-
|
|
715
|
-
const typedUpdates = updates as Array<{
|
|
716
|
-
task_id: string;
|
|
717
|
-
status?: string;
|
|
718
|
-
progress_percentage?: number;
|
|
719
|
-
progress_note?: string;
|
|
720
|
-
priority?: number;
|
|
721
|
-
}>;
|
|
722
|
-
|
|
723
|
-
if (!Array.isArray(typedUpdates) || typedUpdates.length === 0) {
|
|
724
|
-
throw new ValidationError('updates must be a non-empty array', {
|
|
725
|
-
field: 'updates',
|
|
726
|
-
hint: 'Provide an array of task updates with at least one item',
|
|
727
|
-
});
|
|
728
|
-
}
|
|
729
|
-
|
|
730
|
-
if (typedUpdates.length > 50) {
|
|
731
|
-
throw new ValidationError('Too many updates. Maximum is 50 per batch.', {
|
|
732
|
-
field: 'updates',
|
|
733
|
-
hint: 'Split your updates into smaller batches',
|
|
734
|
-
});
|
|
735
|
-
}
|
|
736
|
-
|
|
737
|
-
// Individual item validation happens at API level
|
|
738
|
-
const api = getApiClient();
|
|
739
|
-
const response = await api.batchUpdateTasks(typedUpdates);
|
|
740
|
-
|
|
741
|
-
if (!response.ok) {
|
|
742
|
-
return { result: { error: response.error || 'Failed to batch update tasks' }, isError: true };
|
|
743
|
-
}
|
|
744
|
-
|
|
745
|
-
return {
|
|
746
|
-
result: {
|
|
747
|
-
success: response.data?.success || false,
|
|
748
|
-
total: typedUpdates.length,
|
|
749
|
-
succeeded: response.data?.updated_count || 0,
|
|
750
|
-
},
|
|
751
|
-
};
|
|
752
|
-
};
|
|
753
|
-
|
|
754
|
-
export const batchCompleteTasks: Handler = async (args, ctx) => {
|
|
755
|
-
const { completions } = parseArgs(args, batchCompleteTasksSchema);
|
|
756
|
-
|
|
757
|
-
const typedCompletions = completions as Array<{
|
|
758
|
-
task_id: string;
|
|
759
|
-
summary?: string;
|
|
760
|
-
}>;
|
|
761
|
-
|
|
762
|
-
if (!Array.isArray(typedCompletions) || typedCompletions.length === 0) {
|
|
763
|
-
throw new ValidationError('completions must be a non-empty array', {
|
|
764
|
-
field: 'completions',
|
|
765
|
-
hint: 'Provide an array of task completions with at least one item',
|
|
766
|
-
});
|
|
767
|
-
}
|
|
768
|
-
|
|
769
|
-
if (typedCompletions.length > 50) {
|
|
770
|
-
throw new ValidationError('Too many completions. Maximum is 50 per batch.', {
|
|
771
|
-
field: 'completions',
|
|
772
|
-
hint: 'Split your completions into smaller batches',
|
|
773
|
-
});
|
|
774
|
-
}
|
|
775
|
-
|
|
776
|
-
// Individual item validation happens at API level
|
|
777
|
-
|
|
778
|
-
const api = getApiClient();
|
|
779
|
-
const response = await api.batchCompleteTasks(typedCompletions);
|
|
780
|
-
|
|
781
|
-
if (!response.ok) {
|
|
782
|
-
return { result: { error: response.error || 'Failed to batch complete tasks' }, isError: true };
|
|
783
|
-
}
|
|
784
|
-
|
|
785
|
-
const data = response.data;
|
|
786
|
-
return {
|
|
787
|
-
result: {
|
|
788
|
-
success: data?.success || false,
|
|
789
|
-
total: typedCompletions.length,
|
|
790
|
-
succeeded: data?.completed_count || 0,
|
|
791
|
-
failed: typedCompletions.length - (data?.completed_count || 0),
|
|
792
|
-
next_task: data?.next_task,
|
|
793
|
-
},
|
|
794
|
-
};
|
|
795
|
-
};
|
|
796
|
-
|
|
797
|
-
// ============================================================================
|
|
798
|
-
// Subtask Handlers
|
|
799
|
-
// ============================================================================
|
|
800
|
-
|
|
801
|
-
export const addSubtask: Handler = async (args, ctx) => {
|
|
802
|
-
const { parent_task_id, title, description, priority, estimated_minutes } = parseArgs(args, addSubtaskSchema);
|
|
803
|
-
|
|
804
|
-
const api = getApiClient();
|
|
805
|
-
const response = await api.addSubtask(parent_task_id, {
|
|
806
|
-
title,
|
|
807
|
-
description,
|
|
808
|
-
priority,
|
|
809
|
-
estimated_minutes,
|
|
810
|
-
}, ctx.session.currentSessionId || undefined);
|
|
811
|
-
|
|
812
|
-
if (!response.ok) {
|
|
813
|
-
if (response.error?.includes('Cannot create subtask of a subtask')) {
|
|
814
|
-
return {
|
|
815
|
-
result: {
|
|
816
|
-
success: false,
|
|
817
|
-
error: 'Cannot create subtask of a subtask',
|
|
818
|
-
hint: 'Subtasks cannot have their own subtasks. Add this task to the parent task instead.',
|
|
819
|
-
},
|
|
820
|
-
};
|
|
821
|
-
}
|
|
822
|
-
return { result: { error: response.error || 'Failed to add subtask' }, isError: true };
|
|
823
|
-
}
|
|
824
|
-
|
|
825
|
-
return {
|
|
826
|
-
result: {
|
|
827
|
-
success: true,
|
|
828
|
-
subtask_id: response.data?.subtask_id,
|
|
829
|
-
parent_task_id: response.data?.parent_task_id,
|
|
830
|
-
},
|
|
831
|
-
};
|
|
832
|
-
};
|
|
833
|
-
|
|
834
|
-
export const getSubtasks: Handler = async (args, ctx) => {
|
|
835
|
-
const { parent_task_id, status } = parseArgs(args, getSubtasksSchema);
|
|
836
|
-
|
|
837
|
-
const api = getApiClient();
|
|
838
|
-
const response = await api.getSubtasks(parent_task_id, status);
|
|
839
|
-
|
|
840
|
-
if (!response.ok) {
|
|
841
|
-
return { result: { error: response.error || 'Failed to fetch subtasks' }, isError: true };
|
|
842
|
-
}
|
|
843
|
-
|
|
844
|
-
return {
|
|
845
|
-
result: {
|
|
846
|
-
subtasks: response.data?.subtasks || [],
|
|
847
|
-
stats: response.data?.stats || {
|
|
848
|
-
total: 0,
|
|
849
|
-
completed: 0,
|
|
850
|
-
progress_percentage: 0,
|
|
851
|
-
},
|
|
852
|
-
},
|
|
853
|
-
};
|
|
854
|
-
};
|
|
855
|
-
|
|
856
|
-
// ============================================================================
|
|
857
|
-
// New Targeted Task Query Handlers
|
|
858
|
-
// ============================================================================
|
|
859
|
-
|
|
860
|
-
/**
|
|
861
|
-
* Get a single task by ID with optional subtasks and milestones
|
|
862
|
-
*/
|
|
863
|
-
export const getTask: Handler = async (args, ctx) => {
|
|
864
|
-
const { task_id, include_subtasks, include_milestones } = parseArgs(args, getTaskSchema);
|
|
865
|
-
|
|
866
|
-
const api = getApiClient();
|
|
867
|
-
const response = await api.getTaskById(task_id, {
|
|
868
|
-
include_subtasks,
|
|
869
|
-
include_milestones,
|
|
870
|
-
});
|
|
871
|
-
|
|
872
|
-
if (!response.ok) {
|
|
873
|
-
return { result: { error: response.error || 'Failed to fetch task' }, isError: true };
|
|
874
|
-
}
|
|
875
|
-
|
|
876
|
-
const result: Record<string, unknown> = {
|
|
877
|
-
task: response.data?.task,
|
|
878
|
-
};
|
|
879
|
-
|
|
880
|
-
if (include_subtasks && response.data?.subtasks) {
|
|
881
|
-
result.subtasks = response.data.subtasks;
|
|
882
|
-
}
|
|
883
|
-
|
|
884
|
-
if (include_milestones && response.data?.milestones) {
|
|
885
|
-
result.milestones = response.data.milestones;
|
|
886
|
-
}
|
|
887
|
-
|
|
888
|
-
return { result };
|
|
889
|
-
};
|
|
890
|
-
|
|
891
|
-
/**
|
|
892
|
-
* Search tasks by text query with pagination
|
|
893
|
-
*/
|
|
894
|
-
export const searchTasks: Handler = async (args, ctx) => {
|
|
895
|
-
const { project_id, query, status, limit, offset } = parseArgs(args, searchTasksSchema);
|
|
896
|
-
|
|
897
|
-
// Validate query length
|
|
898
|
-
if (query.length < 2) {
|
|
899
|
-
return {
|
|
900
|
-
result: {
|
|
901
|
-
error: 'query_too_short',
|
|
902
|
-
message: 'Search query must be at least 2 characters',
|
|
903
|
-
},
|
|
904
|
-
};
|
|
905
|
-
}
|
|
906
|
-
|
|
907
|
-
// Cap pagination to safe values
|
|
908
|
-
const { cappedLimit, safeOffset } = capPagination(limit ?? 10, offset, PAGINATION_LIMITS.TASK_LIMIT);
|
|
909
|
-
|
|
910
|
-
const api = getApiClient();
|
|
911
|
-
const response = await api.searchTasks(project_id, {
|
|
912
|
-
query,
|
|
913
|
-
status: status as string[] | undefined,
|
|
914
|
-
limit: cappedLimit,
|
|
915
|
-
offset: safeOffset,
|
|
916
|
-
});
|
|
917
|
-
|
|
918
|
-
if (!response.ok) {
|
|
919
|
-
return { result: { error: response.error || 'Failed to search tasks' }, isError: true };
|
|
920
|
-
}
|
|
921
|
-
|
|
922
|
-
const tasks = response.data?.tasks || [];
|
|
923
|
-
const totalMatches = response.data?.total_matches || 0;
|
|
924
|
-
|
|
925
|
-
return {
|
|
926
|
-
result: {
|
|
927
|
-
tasks,
|
|
928
|
-
total_matches: totalMatches,
|
|
929
|
-
has_more: safeOffset + tasks.length < totalMatches,
|
|
930
|
-
offset: safeOffset,
|
|
931
|
-
limit: cappedLimit,
|
|
932
|
-
},
|
|
933
|
-
};
|
|
934
|
-
};
|
|
935
|
-
|
|
936
|
-
/**
|
|
937
|
-
* Get tasks filtered by priority with pagination
|
|
938
|
-
*/
|
|
939
|
-
export const getTasksByPriority: Handler = async (args, ctx) => {
|
|
940
|
-
const { project_id, priority, priority_max, status, limit, offset } = parseArgs(args, getTasksByPrioritySchema);
|
|
941
|
-
|
|
942
|
-
// Cap pagination to safe values
|
|
943
|
-
const { cappedLimit, safeOffset } = capPagination(limit ?? 10, offset, PAGINATION_LIMITS.TASK_LIMIT);
|
|
944
|
-
|
|
945
|
-
const api = getApiClient();
|
|
946
|
-
const response = await api.getTasksByPriority(project_id, {
|
|
947
|
-
priority,
|
|
948
|
-
priority_max,
|
|
949
|
-
status,
|
|
950
|
-
limit: cappedLimit,
|
|
951
|
-
offset: safeOffset,
|
|
952
|
-
});
|
|
953
|
-
|
|
954
|
-
if (!response.ok) {
|
|
955
|
-
return { result: { error: response.error || 'Failed to fetch tasks by priority' }, isError: true };
|
|
956
|
-
}
|
|
957
|
-
|
|
958
|
-
const tasks = response.data?.tasks || [];
|
|
959
|
-
const totalCount = response.data?.total_count || 0;
|
|
960
|
-
|
|
961
|
-
return {
|
|
962
|
-
result: {
|
|
963
|
-
tasks,
|
|
964
|
-
total_count: totalCount,
|
|
965
|
-
has_more: safeOffset + tasks.length < totalCount,
|
|
966
|
-
offset: safeOffset,
|
|
967
|
-
limit: cappedLimit,
|
|
968
|
-
},
|
|
969
|
-
};
|
|
970
|
-
};
|
|
971
|
-
|
|
972
|
-
/**
|
|
973
|
-
* Get recent tasks (newest or oldest) with pagination
|
|
974
|
-
*/
|
|
975
|
-
export const getRecentTasks: Handler = async (args, ctx) => {
|
|
976
|
-
const { project_id, order, status, limit, offset } = parseArgs(args, getRecentTasksSchema);
|
|
977
|
-
|
|
978
|
-
// Cap pagination to safe values
|
|
979
|
-
const { cappedLimit, safeOffset } = capPagination(limit ?? 10, offset, PAGINATION_LIMITS.TASK_LIMIT);
|
|
980
|
-
|
|
981
|
-
const api = getApiClient();
|
|
982
|
-
const response = await api.getRecentTasks(project_id, {
|
|
983
|
-
order: order as 'newest' | 'oldest' | undefined,
|
|
984
|
-
status,
|
|
985
|
-
limit: cappedLimit,
|
|
986
|
-
offset: safeOffset,
|
|
987
|
-
});
|
|
988
|
-
|
|
989
|
-
if (!response.ok) {
|
|
990
|
-
return { result: { error: response.error || 'Failed to fetch recent tasks' }, isError: true };
|
|
991
|
-
}
|
|
992
|
-
|
|
993
|
-
const tasks = response.data?.tasks || [];
|
|
994
|
-
const totalCount = response.data?.total_count || 0;
|
|
995
|
-
|
|
996
|
-
return {
|
|
997
|
-
result: {
|
|
998
|
-
tasks,
|
|
999
|
-
total_count: totalCount,
|
|
1000
|
-
has_more: safeOffset + tasks.length < totalCount,
|
|
1001
|
-
offset: safeOffset,
|
|
1002
|
-
limit: cappedLimit,
|
|
1003
|
-
},
|
|
1004
|
-
};
|
|
1005
|
-
};
|
|
1006
|
-
|
|
1007
|
-
/**
|
|
1008
|
-
* Get task statistics for a project (aggregate counts only, minimal tokens)
|
|
1009
|
-
*/
|
|
1010
|
-
export const getTaskStats: Handler = async (args, ctx) => {
|
|
1011
|
-
const { project_id } = parseArgs(args, getTaskStatsSchema);
|
|
1012
|
-
|
|
1013
|
-
const api = getApiClient();
|
|
1014
|
-
const response = await api.getTaskStats(project_id);
|
|
1015
|
-
|
|
1016
|
-
if (!response.ok) {
|
|
1017
|
-
return { result: { error: response.error || 'Failed to fetch task stats' }, isError: true };
|
|
1018
|
-
}
|
|
1019
|
-
|
|
1020
|
-
return {
|
|
1021
|
-
result: {
|
|
1022
|
-
total: response.data?.total || 0,
|
|
1023
|
-
by_status: response.data?.by_status || {
|
|
1024
|
-
backlog: 0,
|
|
1025
|
-
pending: 0,
|
|
1026
|
-
in_progress: 0,
|
|
1027
|
-
completed: 0,
|
|
1028
|
-
cancelled: 0,
|
|
1029
|
-
},
|
|
1030
|
-
by_priority: response.data?.by_priority || { 1: 0, 2: 0, 3: 0, 4: 0, 5: 0 },
|
|
1031
|
-
awaiting_validation: response.data?.awaiting_validation || 0,
|
|
1032
|
-
oldest_pending_days: response.data?.oldest_pending_days ?? null,
|
|
1033
|
-
},
|
|
1034
|
-
};
|
|
1035
|
-
};
|
|
1036
|
-
|
|
1037
|
-
// ============================================================================
|
|
1038
|
-
// Worktree Cleanup Handlers
|
|
1039
|
-
// ============================================================================
|
|
1040
|
-
|
|
1041
|
-
const getStaleWorktreesSchema = {
|
|
1042
|
-
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
1043
|
-
hostname: { type: 'string' as const }, // Machine hostname to filter worktrees
|
|
1044
|
-
limit: { type: 'number' as const, default: 20 },
|
|
1045
|
-
offset: { type: 'number' as const, default: 0 },
|
|
1046
|
-
};
|
|
1047
|
-
|
|
1048
|
-
const clearWorktreePathSchema = {
|
|
1049
|
-
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
1050
|
-
};
|
|
1051
|
-
|
|
1052
|
-
export const getStaleWorktrees: Handler = async (args, ctx) => {
|
|
1053
|
-
const { project_id, hostname: providedHostname, limit, offset } = parseArgs(args, getStaleWorktreesSchema);
|
|
1054
|
-
|
|
1055
|
-
// Use auto-detected hostname if not provided - filters to only worktrees on THIS machine
|
|
1056
|
-
const hostname = providedHostname || MACHINE_HOSTNAME;
|
|
1057
|
-
|
|
1058
|
-
// Cap pagination to safe values
|
|
1059
|
-
const { cappedLimit, safeOffset } = capPagination(limit, offset, PAGINATION_LIMITS.DEFAULT_MAX_LIMIT);
|
|
1060
|
-
|
|
1061
|
-
const api = getApiClient();
|
|
1062
|
-
const response = await api.getStaleWorktrees(project_id, { hostname, limit: cappedLimit, offset: safeOffset });
|
|
1063
|
-
|
|
1064
|
-
if (!response.ok) {
|
|
1065
|
-
return { result: { error: response.error || 'Failed to get stale worktrees' }, isError: true };
|
|
1066
|
-
}
|
|
1067
|
-
|
|
1068
|
-
const data = response.data;
|
|
1069
|
-
return {
|
|
1070
|
-
result: {
|
|
1071
|
-
project_id: data?.project_id,
|
|
1072
|
-
project_name: data?.project_name,
|
|
1073
|
-
hostname_filter: data?.hostname_filter,
|
|
1074
|
-
stale_worktrees: data?.stale_worktrees || [],
|
|
1075
|
-
count: data?.count || 0,
|
|
1076
|
-
local_count: data?.local_count || 0,
|
|
1077
|
-
remote_count: data?.remote_count || 0,
|
|
1078
|
-
total_count: data?.total_count || 0,
|
|
1079
|
-
has_more: data?.has_more || false,
|
|
1080
|
-
cleanup_instructions: data?.cleanup_instructions,
|
|
1081
|
-
remote_worktree_note: data?.remote_worktree_note,
|
|
1082
|
-
},
|
|
1083
|
-
};
|
|
1084
|
-
};
|
|
1085
|
-
|
|
1086
|
-
export const clearWorktreePath: Handler = async (args, ctx) => {
|
|
1087
|
-
const { task_id } = parseArgs(args, clearWorktreePathSchema);
|
|
1088
|
-
|
|
1089
|
-
const api = getApiClient();
|
|
1090
|
-
const response = await api.clearWorktreePath(task_id);
|
|
1091
|
-
|
|
1092
|
-
if (!response.ok) {
|
|
1093
|
-
return { result: { error: response.error || 'Failed to clear worktree path' }, isError: true };
|
|
1094
|
-
}
|
|
1095
|
-
|
|
1096
|
-
return {
|
|
1097
|
-
result: {
|
|
1098
|
-
success: true,
|
|
1099
|
-
task_id,
|
|
1100
|
-
message: 'Worktree path cleared. The worktree can now be safely removed if not already done.',
|
|
1101
|
-
},
|
|
1102
|
-
};
|
|
1103
|
-
};
|
|
1104
|
-
|
|
1105
|
-
/**
|
|
1106
|
-
* Task handlers registry
|
|
1107
|
-
*/
|
|
1108
|
-
export const taskHandlers: HandlerRegistry = {
|
|
1109
|
-
// Targeted task query endpoints (token-efficient)
|
|
1110
|
-
get_task: getTask,
|
|
1111
|
-
search_tasks: searchTasks,
|
|
1112
|
-
get_tasks_by_priority: getTasksByPriority,
|
|
1113
|
-
get_recent_tasks: getRecentTasks,
|
|
1114
|
-
get_task_stats: getTaskStats,
|
|
1115
|
-
// Core task operations
|
|
1116
|
-
get_next_task: getNextTask,
|
|
1117
|
-
add_task: addTask,
|
|
1118
|
-
update_task: updateTask,
|
|
1119
|
-
complete_task: completeTask,
|
|
1120
|
-
delete_task: deleteTask,
|
|
1121
|
-
release_task: releaseTask,
|
|
1122
|
-
cancel_task: cancelTask,
|
|
1123
|
-
add_task_reference: addTaskReference,
|
|
1124
|
-
remove_task_reference: removeTaskReference,
|
|
1125
|
-
batch_update_tasks: batchUpdateTasks,
|
|
1126
|
-
batch_complete_tasks: batchCompleteTasks,
|
|
1127
|
-
// Subtask handlers
|
|
1128
|
-
add_subtask: addSubtask,
|
|
1129
|
-
get_subtasks: getSubtasks,
|
|
1130
|
-
// Worktree cleanup handlers
|
|
1131
|
-
get_stale_worktrees: getStaleWorktrees,
|
|
1132
|
-
clear_worktree_path: clearWorktreePath,
|
|
1133
|
-
};
|
|
1
|
+
/**
|
|
2
|
+
* Task Handlers (Migrated to API Client)
|
|
3
|
+
*
|
|
4
|
+
* Handles task CRUD and management:
|
|
5
|
+
* - get_task (single task by ID)
|
|
6
|
+
* - search_tasks (text search)
|
|
7
|
+
* - get_tasks_by_priority (priority filter)
|
|
8
|
+
* - get_recent_tasks (by date)
|
|
9
|
+
* - get_task_stats (aggregate counts)
|
|
10
|
+
* - get_next_task
|
|
11
|
+
* - add_task
|
|
12
|
+
* - update_task
|
|
13
|
+
* - complete_task
|
|
14
|
+
* - delete_task
|
|
15
|
+
* - release_task
|
|
16
|
+
* - cancel_task
|
|
17
|
+
* - add_task_reference
|
|
18
|
+
* - remove_task_reference
|
|
19
|
+
* - batch_update_tasks
|
|
20
|
+
* - batch_complete_tasks
|
|
21
|
+
* - add_subtask
|
|
22
|
+
* - get_subtasks
|
|
23
|
+
*/
|
|
24
|
+
|
|
25
|
+
import os from 'os';
|
|
26
|
+
import type { Handler, HandlerRegistry } from './types.js';
|
|
27
|
+
import {
|
|
28
|
+
parseArgs,
|
|
29
|
+
uuidValidator,
|
|
30
|
+
taskStatusValidator,
|
|
31
|
+
priorityValidator,
|
|
32
|
+
progressValidator,
|
|
33
|
+
minutesValidator,
|
|
34
|
+
createEnumValidator,
|
|
35
|
+
ValidationError,
|
|
36
|
+
} from '../validators.js';
|
|
37
|
+
import { getApiClient } from '../api-client.js';
|
|
38
|
+
import { capPagination, PAGINATION_LIMITS } from '../utils.js';
|
|
39
|
+
import { autoPostActivity } from './chat.js';
|
|
40
|
+
|
|
41
|
+
// Auto-detect machine hostname for worktree tracking
|
|
42
|
+
const MACHINE_HOSTNAME = os.hostname();
|
|
43
|
+
|
|
44
|
+
// Valid task types
|
|
45
|
+
const VALID_TASK_TYPES = [
|
|
46
|
+
'frontend', 'backend', 'database', 'feature', 'bugfix',
|
|
47
|
+
'design', 'mcp', 'testing', 'docs', 'infra', 'other'
|
|
48
|
+
] as const;
|
|
49
|
+
|
|
50
|
+
// ============================================================================
|
|
51
|
+
// Argument Schemas
|
|
52
|
+
// ============================================================================
|
|
53
|
+
|
|
54
|
+
const getNextTaskSchema = {
|
|
55
|
+
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
const addTaskSchema = {
|
|
59
|
+
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
60
|
+
title: { type: 'string' as const, required: true as const },
|
|
61
|
+
description: { type: 'string' as const },
|
|
62
|
+
priority: { type: 'number' as const, default: 3, validate: priorityValidator },
|
|
63
|
+
estimated_minutes: { type: 'number' as const, validate: minutesValidator },
|
|
64
|
+
blocking: { type: 'boolean' as const, default: false },
|
|
65
|
+
task_type: { type: 'string' as const, validate: createEnumValidator(VALID_TASK_TYPES) },
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
const updateTaskSchema = {
|
|
69
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
70
|
+
title: { type: 'string' as const },
|
|
71
|
+
description: { type: 'string' as const },
|
|
72
|
+
priority: { type: 'number' as const, validate: priorityValidator },
|
|
73
|
+
status: { type: 'string' as const, validate: taskStatusValidator },
|
|
74
|
+
progress_percentage: { type: 'number' as const, validate: progressValidator },
|
|
75
|
+
progress_note: { type: 'string' as const },
|
|
76
|
+
estimated_minutes: { type: 'number' as const, validate: minutesValidator },
|
|
77
|
+
git_branch: { type: 'string' as const },
|
|
78
|
+
worktree_path: { type: 'string' as const },
|
|
79
|
+
task_type: { type: 'string' as const, validate: createEnumValidator(VALID_TASK_TYPES) },
|
|
80
|
+
skip_worktree_requirement: { type: 'boolean' as const, default: false },
|
|
81
|
+
session_id: { type: 'string' as const },
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
const completeTaskSchema = {
|
|
85
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
86
|
+
summary: { type: 'string' as const },
|
|
87
|
+
session_id: { type: 'string' as const },
|
|
88
|
+
};
|
|
89
|
+
|
|
90
|
+
const deleteTaskSchema = {
|
|
91
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
const releaseTaskSchema = {
|
|
95
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
96
|
+
reason: { type: 'string' as const },
|
|
97
|
+
};
|
|
98
|
+
|
|
99
|
+
// Valid reasons for task cancellation
|
|
100
|
+
const VALID_CANCELLED_REASONS = [
|
|
101
|
+
'pr_closed', 'superseded', 'user_cancelled', 'validation_failed', 'obsolete', 'blocked'
|
|
102
|
+
] as const;
|
|
103
|
+
|
|
104
|
+
const cancelTaskSchema = {
|
|
105
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
106
|
+
cancelled_reason: { type: 'string' as const, validate: createEnumValidator(VALID_CANCELLED_REASONS) },
|
|
107
|
+
cancellation_note: { type: 'string' as const },
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
const addTaskReferenceSchema = {
|
|
111
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
112
|
+
url: { type: 'string' as const, required: true as const },
|
|
113
|
+
label: { type: 'string' as const },
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
const removeTaskReferenceSchema = {
|
|
117
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
118
|
+
url: { type: 'string' as const, required: true as const },
|
|
119
|
+
};
|
|
120
|
+
|
|
121
|
+
const batchUpdateTasksSchema = {
|
|
122
|
+
updates: { type: 'array' as const, required: true as const },
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
const batchCompleteTasksSchema = {
|
|
126
|
+
completions: { type: 'array' as const, required: true as const },
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
const addSubtaskSchema = {
|
|
130
|
+
parent_task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
131
|
+
title: { type: 'string' as const, required: true as const },
|
|
132
|
+
description: { type: 'string' as const },
|
|
133
|
+
priority: { type: 'number' as const, validate: priorityValidator },
|
|
134
|
+
estimated_minutes: { type: 'number' as const, validate: minutesValidator },
|
|
135
|
+
};
|
|
136
|
+
|
|
137
|
+
const getSubtasksSchema = {
|
|
138
|
+
parent_task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
139
|
+
status: { type: 'string' as const, validate: taskStatusValidator },
|
|
140
|
+
limit: { type: 'number' as const, default: 20 },
|
|
141
|
+
offset: { type: 'number' as const, default: 0 },
|
|
142
|
+
};
|
|
143
|
+
|
|
144
|
+
// ============================================================================
|
|
145
|
+
// New Targeted Task Query Schemas
|
|
146
|
+
// ============================================================================
|
|
147
|
+
|
|
148
|
+
const getTaskSchema = {
|
|
149
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
150
|
+
include_subtasks: { type: 'boolean' as const, default: false },
|
|
151
|
+
include_milestones: { type: 'boolean' as const, default: false },
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
const searchTasksSchema = {
|
|
155
|
+
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
156
|
+
query: { type: 'string' as const, required: true as const },
|
|
157
|
+
status: { type: 'array' as const },
|
|
158
|
+
limit: { type: 'number' as const, default: 10 },
|
|
159
|
+
offset: { type: 'number' as const, default: 0 },
|
|
160
|
+
};
|
|
161
|
+
|
|
162
|
+
const getTasksByPrioritySchema = {
|
|
163
|
+
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
164
|
+
priority: { type: 'number' as const, validate: priorityValidator },
|
|
165
|
+
priority_max: { type: 'number' as const, validate: priorityValidator },
|
|
166
|
+
status: { type: 'string' as const, validate: taskStatusValidator },
|
|
167
|
+
limit: { type: 'number' as const, default: 10 },
|
|
168
|
+
offset: { type: 'number' as const, default: 0 },
|
|
169
|
+
};
|
|
170
|
+
|
|
171
|
+
const getRecentTasksSchema = {
|
|
172
|
+
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
173
|
+
order: { type: 'string' as const, validate: createEnumValidator(['newest', 'oldest']) },
|
|
174
|
+
status: { type: 'string' as const, validate: taskStatusValidator },
|
|
175
|
+
limit: { type: 'number' as const, default: 10 },
|
|
176
|
+
offset: { type: 'number' as const, default: 0 },
|
|
177
|
+
};
|
|
178
|
+
|
|
179
|
+
const getTaskStatsSchema = {
|
|
180
|
+
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
181
|
+
};
|
|
182
|
+
|
|
183
|
+
// ============================================================================
|
|
184
|
+
// Git workflow helpers (used by complete_task response)
|
|
185
|
+
// ============================================================================
|
|
186
|
+
|
|
187
|
+
interface GitWorkflowConfig {
|
|
188
|
+
git_workflow: string;
|
|
189
|
+
git_main_branch: string;
|
|
190
|
+
git_develop_branch?: string | null;
|
|
191
|
+
git_auto_branch?: boolean;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
interface GitCompleteInstructions {
|
|
195
|
+
steps: string[];
|
|
196
|
+
pr_suggestion?: {
|
|
197
|
+
title: string;
|
|
198
|
+
body_template: string;
|
|
199
|
+
};
|
|
200
|
+
next_step: string;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
interface GitMergeInstructions {
|
|
204
|
+
target_branch: string;
|
|
205
|
+
feature_branch: string;
|
|
206
|
+
steps: string[];
|
|
207
|
+
cleanup: string[];
|
|
208
|
+
note: string;
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
function getTaskCompleteGitInstructions(
|
|
212
|
+
gitWorkflow: string,
|
|
213
|
+
gitMainBranch: string,
|
|
214
|
+
gitDevelopBranch: string | undefined,
|
|
215
|
+
taskBranch: string | undefined,
|
|
216
|
+
taskTitle: string,
|
|
217
|
+
taskId: string
|
|
218
|
+
): GitCompleteInstructions | undefined {
|
|
219
|
+
if (gitWorkflow === 'none') {
|
|
220
|
+
return undefined;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (gitWorkflow === 'trunk-based') {
|
|
224
|
+
return {
|
|
225
|
+
steps: [`git add .`, `git commit -m "feat: ${taskTitle}"`, `git push origin ${gitMainBranch}`],
|
|
226
|
+
next_step: 'Changes committed directly to main branch.',
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
if (!taskBranch) {
|
|
231
|
+
return {
|
|
232
|
+
steps: ['No branch was tracked for this task.'],
|
|
233
|
+
next_step: 'Consider creating a branch for future tasks using the git_branch parameter.',
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
// github-flow or git-flow
|
|
238
|
+
return {
|
|
239
|
+
steps: [`git add .`, `git commit -m "feat: ${taskTitle}"`, `git push -u origin ${taskBranch}`],
|
|
240
|
+
pr_suggestion: {
|
|
241
|
+
title: taskTitle,
|
|
242
|
+
body_template: `## Summary\n[Describe what was implemented]\n\n## Task Reference\nVibescope Task: ${taskId}\n\n## Testing\n- [ ] Tests pass\n- [ ] Manual testing done\n\n## Checklist\n- [ ] Code follows project conventions\n- [ ] No unnecessary changes included`,
|
|
243
|
+
},
|
|
244
|
+
next_step: 'Create PR and add link via add_task_reference. Merge happens AFTER validation approval.',
|
|
245
|
+
};
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
export function getValidationApprovedGitInstructions(
|
|
249
|
+
config: GitWorkflowConfig,
|
|
250
|
+
taskBranch: string | undefined
|
|
251
|
+
): GitMergeInstructions | undefined {
|
|
252
|
+
const { git_workflow, git_main_branch, git_develop_branch } = config;
|
|
253
|
+
|
|
254
|
+
if (git_workflow === 'none' || git_workflow === 'trunk-based' || !taskBranch) {
|
|
255
|
+
return undefined;
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
const targetBranch = git_workflow === 'git-flow' ? (git_develop_branch || 'develop') : git_main_branch;
|
|
259
|
+
|
|
260
|
+
return {
|
|
261
|
+
target_branch: targetBranch,
|
|
262
|
+
feature_branch: taskBranch,
|
|
263
|
+
steps: [
|
|
264
|
+
'Option 1: Merge via GitHub/GitLab PR UI (recommended)',
|
|
265
|
+
`Option 2: Command line merge:`,
|
|
266
|
+
` git checkout ${targetBranch}`,
|
|
267
|
+
` git pull origin ${targetBranch}`,
|
|
268
|
+
` git merge ${taskBranch}`,
|
|
269
|
+
` git push origin ${targetBranch}`,
|
|
270
|
+
],
|
|
271
|
+
cleanup: [`git branch -d ${taskBranch}`, `git push origin --delete ${taskBranch}`],
|
|
272
|
+
note: 'Validation approved - safe to merge. Clean up branch after successful merge.',
|
|
273
|
+
};
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// ============================================================================
|
|
277
|
+
// Task Handlers - Using API Client
|
|
278
|
+
// ============================================================================
|
|
279
|
+
|
|
280
|
+
export const getNextTask: Handler = async (args, ctx) => {
|
|
281
|
+
const { project_id } = parseArgs(args, getNextTaskSchema);
|
|
282
|
+
|
|
283
|
+
const api = getApiClient();
|
|
284
|
+
const response = await api.getNextTask(project_id, ctx.session.currentSessionId || undefined);
|
|
285
|
+
|
|
286
|
+
if (!response.ok) {
|
|
287
|
+
return { result: { error: response.error || 'Failed to get next task' }, isError: true };
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
const data = response.data;
|
|
291
|
+
if (!data) {
|
|
292
|
+
return { result: { task: null, message: 'No response from server' } };
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
// Map API response to handler response format
|
|
296
|
+
const result: Record<string, unknown> = {};
|
|
297
|
+
|
|
298
|
+
if (data.task) {
|
|
299
|
+
result.task = data.task;
|
|
300
|
+
} else {
|
|
301
|
+
result.task = null;
|
|
302
|
+
// Add IDLE_GUIDANCE when no tasks are available
|
|
303
|
+
result.IDLE_GUIDANCE = {
|
|
304
|
+
message: 'No tasks available. Follow these steps:',
|
|
305
|
+
steps: [
|
|
306
|
+
'1. Call signal_idle() to update dashboard immediately - shows you are available',
|
|
307
|
+
'2. Start a fallback_activity (code_review, security_review, test_coverage, etc.)',
|
|
308
|
+
'3. Never ask "what should I do?" - be autonomous',
|
|
309
|
+
],
|
|
310
|
+
autonomy_rules: [
|
|
311
|
+
'Never ask "should I continue?" → Just continue',
|
|
312
|
+
'Never say "let me know what to do" → Use fallback activities',
|
|
313
|
+
'When context grows large: /clear → start_work_session (don\'t ask, just do it)',
|
|
314
|
+
],
|
|
315
|
+
next_action: `signal_idle() then start_fallback_activity(project_id: "${project_id}", activity: "code_review")`,
|
|
316
|
+
};
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
if (data.blocking_task) result.blocking_task = true;
|
|
320
|
+
if (data.deployment_blocks_tasks) {
|
|
321
|
+
result.deployment_blocks_tasks = true;
|
|
322
|
+
result.deployment = data.deployment;
|
|
323
|
+
result.action = data.action;
|
|
324
|
+
}
|
|
325
|
+
if (data.awaiting_validation) {
|
|
326
|
+
result.awaiting_validation = data.awaiting_validation;
|
|
327
|
+
result.validation_priority = data.validation_priority;
|
|
328
|
+
result.suggested_activity = data.suggested_activity;
|
|
329
|
+
}
|
|
330
|
+
if (data.all_claimed) result.all_claimed = true;
|
|
331
|
+
if (data.is_subtask) result.is_subtask = true;
|
|
332
|
+
if (data.suggested_activity) result.suggested_activity = data.suggested_activity;
|
|
333
|
+
if (data.directive) result.directive = data.directive;
|
|
334
|
+
if (data.message) result.message = data.message;
|
|
335
|
+
|
|
336
|
+
return { result };
|
|
337
|
+
};
|
|
338
|
+
|
|
339
|
+
export const addTask: Handler = async (args, ctx) => {
|
|
340
|
+
const { project_id, title, description, priority, estimated_minutes, blocking, task_type } = parseArgs(args, addTaskSchema);
|
|
341
|
+
|
|
342
|
+
const api = getApiClient();
|
|
343
|
+
const response = await api.createTask(project_id, {
|
|
344
|
+
title,
|
|
345
|
+
description,
|
|
346
|
+
priority,
|
|
347
|
+
estimated_minutes,
|
|
348
|
+
blocking,
|
|
349
|
+
session_id: ctx.session.currentSessionId || undefined,
|
|
350
|
+
task_type,
|
|
351
|
+
});
|
|
352
|
+
|
|
353
|
+
if (!response.ok) {
|
|
354
|
+
return { result: { error: response.error || 'Failed to add task' }, isError: true };
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
const data = response.data;
|
|
358
|
+
const result: Record<string, unknown> = {
|
|
359
|
+
success: true,
|
|
360
|
+
task_id: data?.task_id,
|
|
361
|
+
title,
|
|
362
|
+
};
|
|
363
|
+
|
|
364
|
+
if (data?.blocking) {
|
|
365
|
+
result.blocking = true;
|
|
366
|
+
result.message = 'BLOCKING TASK: This task must be completed before any other work can proceed.';
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
return { result };
|
|
370
|
+
};
|
|
371
|
+
|
|
372
|
+
export const updateTask: Handler = async (args, ctx) => {
|
|
373
|
+
const { task_id, title, description, priority, status, progress_percentage, progress_note, estimated_minutes, git_branch, worktree_path, task_type, skip_worktree_requirement, session_id: explicit_session_id } = parseArgs(args, updateTaskSchema);
|
|
374
|
+
const updates = { title, description, priority, status, progress_percentage, estimated_minutes, git_branch, worktree_path, task_type };
|
|
375
|
+
|
|
376
|
+
// Enforce worktree creation: require git_branch when marking task as in_progress
|
|
377
|
+
// This ensures multi-agent collaboration works properly with isolated worktrees
|
|
378
|
+
if (status === 'in_progress' && !git_branch && !skip_worktree_requirement) {
|
|
379
|
+
return {
|
|
380
|
+
result: {
|
|
381
|
+
error: 'worktree_required',
|
|
382
|
+
message: 'git_branch is required when marking a task as in_progress. Create a worktree first and provide the branch name.',
|
|
383
|
+
hint: 'Create a worktree with: git worktree add ../PROJECT-task-TASKID -b feature/TASKID-description BASE_BRANCH, then call update_task with both status and git_branch parameters.',
|
|
384
|
+
worktree_example: {
|
|
385
|
+
command: `git worktree add ../worktree-${task_id.substring(0, 8)} -b feature/${task_id.substring(0, 8)}-task develop`,
|
|
386
|
+
then: `update_task(task_id: "${task_id}", status: "in_progress", git_branch: "feature/${task_id.substring(0, 8)}-task")`,
|
|
387
|
+
},
|
|
388
|
+
skip_option: 'If this project does not use git branching (trunk-based or no git workflow), pass skip_worktree_requirement: true',
|
|
389
|
+
},
|
|
390
|
+
};
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
const api = getApiClient();
|
|
394
|
+
const response = await api.updateTask(task_id, {
|
|
395
|
+
...updates,
|
|
396
|
+
progress_note,
|
|
397
|
+
session_id: explicit_session_id || ctx.session.currentSessionId || undefined,
|
|
398
|
+
});
|
|
399
|
+
|
|
400
|
+
if (!response.ok) {
|
|
401
|
+
// Check for specific error types
|
|
402
|
+
if (response.error?.includes('agent_task_limit') || response.error?.includes('already has a task')) {
|
|
403
|
+
return {
|
|
404
|
+
result: {
|
|
405
|
+
error: 'agent_task_limit',
|
|
406
|
+
message: response.error,
|
|
407
|
+
},
|
|
408
|
+
};
|
|
409
|
+
}
|
|
410
|
+
if (response.error?.includes('task_claimed') || response.error?.includes('task_already_claimed') || response.error?.includes('being worked on') || response.error?.includes('already being worked on')) {
|
|
411
|
+
const data = response.data as { claimed_by?: string; claimed_session_id?: string; message?: string } | undefined;
|
|
412
|
+
return {
|
|
413
|
+
result: {
|
|
414
|
+
error: 'task_already_claimed',
|
|
415
|
+
message: data?.message || response.error || 'Task is already claimed by another agent',
|
|
416
|
+
claimed_by: data?.claimed_by,
|
|
417
|
+
claimed_session_id: data?.claimed_session_id,
|
|
418
|
+
suggestion: 'Use get_next_task() to get a different available task, or wait for the claiming agent to finish.',
|
|
419
|
+
},
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
if (response.error?.includes('invalid_status_transition')) {
|
|
423
|
+
return {
|
|
424
|
+
result: {
|
|
425
|
+
error: 'invalid_status_transition',
|
|
426
|
+
message: response.error,
|
|
427
|
+
},
|
|
428
|
+
};
|
|
429
|
+
}
|
|
430
|
+
if (response.error?.includes('branch_conflict')) {
|
|
431
|
+
return {
|
|
432
|
+
result: {
|
|
433
|
+
error: 'branch_conflict',
|
|
434
|
+
message: response.error,
|
|
435
|
+
conflicting_task_id: (response.data as { conflicting_task_id?: string })?.conflicting_task_id,
|
|
436
|
+
conflicting_task_title: (response.data as { conflicting_task_title?: string })?.conflicting_task_title,
|
|
437
|
+
},
|
|
438
|
+
};
|
|
439
|
+
}
|
|
440
|
+
return { result: { error: response.error || 'Failed to update task' }, isError: true };
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
// Build result - include git workflow info when transitioning to in_progress
|
|
444
|
+
const data = response.data;
|
|
445
|
+
const result: Record<string, unknown> = { success: true, task_id };
|
|
446
|
+
|
|
447
|
+
if (data?.git_workflow) {
|
|
448
|
+
result.git_workflow = data.git_workflow;
|
|
449
|
+
}
|
|
450
|
+
if (data?.worktree_setup) {
|
|
451
|
+
result.worktree_setup = data.worktree_setup;
|
|
452
|
+
}
|
|
453
|
+
if (data?.next_step) {
|
|
454
|
+
result.next_step = data.next_step;
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
// Add test reminder when starting work on a task
|
|
458
|
+
if (status === 'in_progress') {
|
|
459
|
+
result.test_reminder = {
|
|
460
|
+
message: 'Remember to write tests for this task before marking it complete.',
|
|
461
|
+
minimum_expectation: 'Basic tests that validate the task requirements are met',
|
|
462
|
+
ideal: 'Tests that also cover edge cases and error handling',
|
|
463
|
+
test_patterns: ['*.test.ts', '*.spec.ts', '*.test.js', '*.spec.js', '__tests__/*'],
|
|
464
|
+
note: 'Validators will check for test file changes during review. Documentation-only or config changes may not require tests.',
|
|
465
|
+
};
|
|
466
|
+
|
|
467
|
+
// Add comprehensive WORKTREE RULES for branching workflows
|
|
468
|
+
// This reminds agents of the critical workflow order
|
|
469
|
+
result.WORKTREE_RULES = {
|
|
470
|
+
mandatory: true,
|
|
471
|
+
rules: [
|
|
472
|
+
'1. Create worktree BEFORE any file edits - reading is fine, editing requires worktree first',
|
|
473
|
+
'2. Naming: ../PROJECT-PERSONA-short-desc (max 24 chars for description)',
|
|
474
|
+
'3. Command: git worktree add ../PROJECT-PERSONA-desc -b feature/TASKID-desc BASE_BRANCH',
|
|
475
|
+
'4. Report location: heartbeat(current_worktree_path: "...")',
|
|
476
|
+
'5. Store path: update_task(task_id, worktree_path: "...")',
|
|
477
|
+
'6. REBASE before PR: git fetch origin && git rebase origin/BASE_BRANCH && git push --force-with-lease',
|
|
478
|
+
],
|
|
479
|
+
rebase_before_pr: {
|
|
480
|
+
mandatory: true,
|
|
481
|
+
why: 'Without rebasing, your branch may contain old versions of files that other agents modified. When merged, your old version overwrites their changes.',
|
|
482
|
+
commands: [
|
|
483
|
+
'git fetch origin',
|
|
484
|
+
'git rebase origin/develop # or origin/main for github-flow',
|
|
485
|
+
'git push --force-with-lease',
|
|
486
|
+
],
|
|
487
|
+
},
|
|
488
|
+
wrong_order: {
|
|
489
|
+
violation: 'Edit file → stash → create worktree → pop → commit',
|
|
490
|
+
why: 'Even if you eventually use a worktree, editing before creating one is a violation',
|
|
491
|
+
},
|
|
492
|
+
right_order: {
|
|
493
|
+
correct: 'Read to understand → create worktree → cd into it → THEN edit',
|
|
494
|
+
why: 'Worktrees must exist BEFORE any file modifications',
|
|
495
|
+
},
|
|
496
|
+
};
|
|
497
|
+
|
|
498
|
+
// Add HOTFIX_WORKFLOW guidance when branch name indicates hotfix
|
|
499
|
+
if (git_branch && git_branch.includes('hotfix/')) {
|
|
500
|
+
result.HOTFIX_WORKFLOW = {
|
|
501
|
+
message: 'HOTFIX detected - special workflow applies:',
|
|
502
|
+
steps: [
|
|
503
|
+
'1. Create worktree from MAIN (not develop): git worktree add ../PROJECT-PERSONA-hotfix-desc -b hotfix/TASKID-desc main',
|
|
504
|
+
'2. Work in worktree and make your fix',
|
|
505
|
+
'3. Commit: git add -A && git commit -m "fix: description"',
|
|
506
|
+
'4. Push: git push -u origin hotfix/TASKID-desc',
|
|
507
|
+
'5. Create PR targeting MAIN: gh pr create --base main --title "fix: ..." --body "Hotfix for production"',
|
|
508
|
+
'6. Remove worktree immediately after PR',
|
|
509
|
+
],
|
|
510
|
+
important: 'Hotfixes go to MAIN, not develop. They are later merged to develop separately.',
|
|
511
|
+
worktree_required: true,
|
|
512
|
+
};
|
|
513
|
+
}
|
|
514
|
+
|
|
515
|
+
// Guidance for when investigation reveals fix already exists
|
|
516
|
+
result.FIX_ALREADY_EXISTS_GUIDANCE = {
|
|
517
|
+
message: 'If investigation reveals the fix already exists but needs deployment:',
|
|
518
|
+
steps: [
|
|
519
|
+
'1. Add finding: add_finding(project_id, title: "Fix exists, awaits deployment", category: "other", severity: "info", description: "...", related_task_id: task_id)',
|
|
520
|
+
'2. Complete task: complete_task(task_id, summary: "Fix already exists in codebase (PR #{pr_number}). Needs deployment.")',
|
|
521
|
+
'3. Check deployment: check_deployment_status(project_id)',
|
|
522
|
+
'4. Request deployment if not pending: request_deployment(project_id, notes: "Includes fix for [issue]")',
|
|
523
|
+
],
|
|
524
|
+
rationale: 'This prevents tasks from being blocked waiting for deployment when the actual work is done.',
|
|
525
|
+
};
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
return { result };
|
|
529
|
+
};
|
|
530
|
+
|
|
531
|
+
export const completeTask: Handler = async (args, ctx) => {
|
|
532
|
+
const { task_id, summary, session_id: explicit_session_id } = parseArgs(args, completeTaskSchema);
|
|
533
|
+
|
|
534
|
+
const api = getApiClient();
|
|
535
|
+
const response = await api.completeTask(task_id, {
|
|
536
|
+
summary,
|
|
537
|
+
session_id: explicit_session_id || ctx.session.currentSessionId || undefined,
|
|
538
|
+
});
|
|
539
|
+
|
|
540
|
+
if (!response.ok) {
|
|
541
|
+
return { result: { error: response.error || 'Failed to complete task' }, isError: true };
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
const data = response.data;
|
|
545
|
+
if (!data) {
|
|
546
|
+
return { result: { error: 'No response data from complete task' }, isError: true };
|
|
547
|
+
}
|
|
548
|
+
|
|
549
|
+
// Build result matching expected format
|
|
550
|
+
const result: Record<string, unknown> = {
|
|
551
|
+
success: true,
|
|
552
|
+
directive: data.directive,
|
|
553
|
+
auto_continue: data.auto_continue,
|
|
554
|
+
completed_task_id: data.completed_task_id,
|
|
555
|
+
next_task: data.next_task,
|
|
556
|
+
};
|
|
557
|
+
|
|
558
|
+
if (data.context) {
|
|
559
|
+
result.context = data.context;
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
// Pass through warnings (e.g., missing git_branch)
|
|
563
|
+
if (data.warnings) {
|
|
564
|
+
result.warnings = data.warnings;
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
// Git workflow instructions are already in API response but we need to fetch
|
|
568
|
+
// task details if we want to include them (API should return these)
|
|
569
|
+
result.next_action = data.next_action;
|
|
570
|
+
|
|
571
|
+
// Add mandatory action reminders for complete_task
|
|
572
|
+
result.MANDATORY_ACTIONS = {
|
|
573
|
+
message: 'Before marking task complete, ensure you have done the following:',
|
|
574
|
+
checklist: [
|
|
575
|
+
'If you made code changes: Commit and push all changes to your branch',
|
|
576
|
+
'REBASE before PR: git fetch origin && git rebase origin/BASE_BRANCH && git push --force-with-lease',
|
|
577
|
+
'If project uses PR workflow: Create PR targeting correct branch (develop for git-flow, main for github-flow)',
|
|
578
|
+
'If using worktree: Remove worktree IMMEDIATELY after PR is created',
|
|
579
|
+
],
|
|
580
|
+
sequence: 'Commit → Rebase → Push → PR created → complete_task() → remove worktree → next task',
|
|
581
|
+
important: 'DO NOT wait for PR review/merge - validation handles that. Complete task immediately after PR.',
|
|
582
|
+
rebase_warning: 'Always rebase before creating PR to avoid overwriting other agents\' work.',
|
|
583
|
+
};
|
|
584
|
+
|
|
585
|
+
// Add worktree cleanup reminder if worktree was used
|
|
586
|
+
if (data.context?.worktree_path) {
|
|
587
|
+
result.worktree_cleanup = {
|
|
588
|
+
required: true,
|
|
589
|
+
path: data.context.worktree_path,
|
|
590
|
+
command: `git worktree remove ${data.context.worktree_path}`,
|
|
591
|
+
timing: 'Remove immediately after PR is created and complete_task is called',
|
|
592
|
+
};
|
|
593
|
+
}
|
|
594
|
+
|
|
595
|
+
// Auto-post completion activity to project chat
|
|
596
|
+
if (ctx.session.currentProjectId) {
|
|
597
|
+
const persona = ctx.session.currentPersona || 'Agent';
|
|
598
|
+
const summaryText = summary ? `: ${summary}` : '';
|
|
599
|
+
void autoPostActivity(
|
|
600
|
+
ctx.session.currentProjectId,
|
|
601
|
+
`✅ **${persona}** completed a task${summaryText}`,
|
|
602
|
+
ctx.session.currentSessionId || undefined
|
|
603
|
+
);
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
return { result };
|
|
607
|
+
};
|
|
608
|
+
|
|
609
|
+
export const deleteTask: Handler = async (args, ctx) => {
|
|
610
|
+
const { task_id } = parseArgs(args, deleteTaskSchema);
|
|
611
|
+
|
|
612
|
+
const api = getApiClient();
|
|
613
|
+
const response = await api.deleteTask(task_id);
|
|
614
|
+
|
|
615
|
+
if (!response.ok) {
|
|
616
|
+
return { result: { error: response.error || 'Failed to delete task' }, isError: true };
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
return { result: { success: true, deleted_id: task_id } };
|
|
620
|
+
};
|
|
621
|
+
|
|
622
|
+
/**
|
|
623
|
+
* Release a task back to pending status.
|
|
624
|
+
* Use when an agent needs to give up a claimed task (context limits, conflicts, user request).
|
|
625
|
+
*/
|
|
626
|
+
export const releaseTask: Handler = async (args, ctx) => {
|
|
627
|
+
const { task_id, reason } = parseArgs(args, releaseTaskSchema);
|
|
628
|
+
|
|
629
|
+
const api = getApiClient();
|
|
630
|
+
const response = await api.releaseTask(task_id, {
|
|
631
|
+
reason,
|
|
632
|
+
session_id: ctx.session.currentSessionId || undefined,
|
|
633
|
+
});
|
|
634
|
+
|
|
635
|
+
if (!response.ok) {
|
|
636
|
+
return { result: { error: response.error || 'Failed to release task' }, isError: true };
|
|
637
|
+
}
|
|
638
|
+
|
|
639
|
+
return {
|
|
640
|
+
result: {
|
|
641
|
+
success: true,
|
|
642
|
+
task_id,
|
|
643
|
+
message: response.data?.message || 'Task released and returned to pending status',
|
|
644
|
+
reason: reason || null,
|
|
645
|
+
hint: 'The task is now available for other agents to claim. Call get_next_task() to get a new task.',
|
|
646
|
+
},
|
|
647
|
+
};
|
|
648
|
+
};
|
|
649
|
+
|
|
650
|
+
export const cancelTask: Handler = async (args, ctx) => {
|
|
651
|
+
const { task_id, cancelled_reason, cancellation_note } = parseArgs(args, cancelTaskSchema);
|
|
652
|
+
|
|
653
|
+
const api = getApiClient();
|
|
654
|
+
// Cast cancelled_reason to the expected union type - validation already ensures it's valid
|
|
655
|
+
const response = await api.cancelTask(task_id, {
|
|
656
|
+
cancelled_reason: cancelled_reason as 'pr_closed' | 'superseded' | 'user_cancelled' | 'validation_failed' | 'obsolete' | 'blocked' | undefined,
|
|
657
|
+
cancellation_note,
|
|
658
|
+
session_id: ctx.session.currentSessionId || undefined,
|
|
659
|
+
});
|
|
660
|
+
|
|
661
|
+
if (!response.ok) {
|
|
662
|
+
return { result: { error: response.error || 'Failed to cancel task' }, isError: true };
|
|
663
|
+
}
|
|
664
|
+
|
|
665
|
+
return {
|
|
666
|
+
result: {
|
|
667
|
+
success: true,
|
|
668
|
+
task_id,
|
|
669
|
+
cancelled_reason: cancelled_reason || null,
|
|
670
|
+
message: response.data?.message || `Task cancelled${cancelled_reason ? ` (${cancelled_reason})` : ''}`,
|
|
671
|
+
},
|
|
672
|
+
};
|
|
673
|
+
};
|
|
674
|
+
|
|
675
|
+
export const addTaskReference: Handler = async (args, ctx) => {
|
|
676
|
+
const { task_id, url, label } = parseArgs(args, addTaskReferenceSchema);
|
|
677
|
+
|
|
678
|
+
const api = getApiClient();
|
|
679
|
+
const response = await api.addTaskReference(task_id, url, label);
|
|
680
|
+
|
|
681
|
+
if (!response.ok) {
|
|
682
|
+
if (response.error?.includes('already exists')) {
|
|
683
|
+
return { result: { success: false, error: 'Reference with this URL already exists' } };
|
|
684
|
+
}
|
|
685
|
+
return { result: { error: response.error || 'Failed to add reference' }, isError: true };
|
|
686
|
+
}
|
|
687
|
+
|
|
688
|
+
return {
|
|
689
|
+
result: {
|
|
690
|
+
success: true,
|
|
691
|
+
reference: response.data?.reference,
|
|
692
|
+
},
|
|
693
|
+
};
|
|
694
|
+
};
|
|
695
|
+
|
|
696
|
+
export const removeTaskReference: Handler = async (args, ctx) => {
|
|
697
|
+
const { task_id, url } = parseArgs(args, removeTaskReferenceSchema);
|
|
698
|
+
|
|
699
|
+
const api = getApiClient();
|
|
700
|
+
const response = await api.removeTaskReference(task_id, url);
|
|
701
|
+
|
|
702
|
+
if (!response.ok) {
|
|
703
|
+
if (response.error?.includes('not found')) {
|
|
704
|
+
return { result: { success: false, error: 'Reference with this URL not found' } };
|
|
705
|
+
}
|
|
706
|
+
return { result: { error: response.error || 'Failed to remove reference' }, isError: true };
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
return { result: { success: true } };
|
|
710
|
+
};
|
|
711
|
+
|
|
712
|
+
export const batchUpdateTasks: Handler = async (args, ctx) => {
|
|
713
|
+
const { updates } = parseArgs(args, batchUpdateTasksSchema);
|
|
714
|
+
|
|
715
|
+
const typedUpdates = updates as Array<{
|
|
716
|
+
task_id: string;
|
|
717
|
+
status?: string;
|
|
718
|
+
progress_percentage?: number;
|
|
719
|
+
progress_note?: string;
|
|
720
|
+
priority?: number;
|
|
721
|
+
}>;
|
|
722
|
+
|
|
723
|
+
if (!Array.isArray(typedUpdates) || typedUpdates.length === 0) {
|
|
724
|
+
throw new ValidationError('updates must be a non-empty array', {
|
|
725
|
+
field: 'updates',
|
|
726
|
+
hint: 'Provide an array of task updates with at least one item',
|
|
727
|
+
});
|
|
728
|
+
}
|
|
729
|
+
|
|
730
|
+
if (typedUpdates.length > 50) {
|
|
731
|
+
throw new ValidationError('Too many updates. Maximum is 50 per batch.', {
|
|
732
|
+
field: 'updates',
|
|
733
|
+
hint: 'Split your updates into smaller batches',
|
|
734
|
+
});
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
// Individual item validation happens at API level
|
|
738
|
+
const api = getApiClient();
|
|
739
|
+
const response = await api.batchUpdateTasks(typedUpdates);
|
|
740
|
+
|
|
741
|
+
if (!response.ok) {
|
|
742
|
+
return { result: { error: response.error || 'Failed to batch update tasks' }, isError: true };
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
return {
|
|
746
|
+
result: {
|
|
747
|
+
success: response.data?.success || false,
|
|
748
|
+
total: typedUpdates.length,
|
|
749
|
+
succeeded: response.data?.updated_count || 0,
|
|
750
|
+
},
|
|
751
|
+
};
|
|
752
|
+
};
|
|
753
|
+
|
|
754
|
+
export const batchCompleteTasks: Handler = async (args, ctx) => {
|
|
755
|
+
const { completions } = parseArgs(args, batchCompleteTasksSchema);
|
|
756
|
+
|
|
757
|
+
const typedCompletions = completions as Array<{
|
|
758
|
+
task_id: string;
|
|
759
|
+
summary?: string;
|
|
760
|
+
}>;
|
|
761
|
+
|
|
762
|
+
if (!Array.isArray(typedCompletions) || typedCompletions.length === 0) {
|
|
763
|
+
throw new ValidationError('completions must be a non-empty array', {
|
|
764
|
+
field: 'completions',
|
|
765
|
+
hint: 'Provide an array of task completions with at least one item',
|
|
766
|
+
});
|
|
767
|
+
}
|
|
768
|
+
|
|
769
|
+
if (typedCompletions.length > 50) {
|
|
770
|
+
throw new ValidationError('Too many completions. Maximum is 50 per batch.', {
|
|
771
|
+
field: 'completions',
|
|
772
|
+
hint: 'Split your completions into smaller batches',
|
|
773
|
+
});
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
// Individual item validation happens at API level
|
|
777
|
+
|
|
778
|
+
const api = getApiClient();
|
|
779
|
+
const response = await api.batchCompleteTasks(typedCompletions);
|
|
780
|
+
|
|
781
|
+
if (!response.ok) {
|
|
782
|
+
return { result: { error: response.error || 'Failed to batch complete tasks' }, isError: true };
|
|
783
|
+
}
|
|
784
|
+
|
|
785
|
+
const data = response.data;
|
|
786
|
+
return {
|
|
787
|
+
result: {
|
|
788
|
+
success: data?.success || false,
|
|
789
|
+
total: typedCompletions.length,
|
|
790
|
+
succeeded: data?.completed_count || 0,
|
|
791
|
+
failed: typedCompletions.length - (data?.completed_count || 0),
|
|
792
|
+
next_task: data?.next_task,
|
|
793
|
+
},
|
|
794
|
+
};
|
|
795
|
+
};
|
|
796
|
+
|
|
797
|
+
// ============================================================================
|
|
798
|
+
// Subtask Handlers
|
|
799
|
+
// ============================================================================
|
|
800
|
+
|
|
801
|
+
export const addSubtask: Handler = async (args, ctx) => {
|
|
802
|
+
const { parent_task_id, title, description, priority, estimated_minutes } = parseArgs(args, addSubtaskSchema);
|
|
803
|
+
|
|
804
|
+
const api = getApiClient();
|
|
805
|
+
const response = await api.addSubtask(parent_task_id, {
|
|
806
|
+
title,
|
|
807
|
+
description,
|
|
808
|
+
priority,
|
|
809
|
+
estimated_minutes,
|
|
810
|
+
}, ctx.session.currentSessionId || undefined);
|
|
811
|
+
|
|
812
|
+
if (!response.ok) {
|
|
813
|
+
if (response.error?.includes('Cannot create subtask of a subtask')) {
|
|
814
|
+
return {
|
|
815
|
+
result: {
|
|
816
|
+
success: false,
|
|
817
|
+
error: 'Cannot create subtask of a subtask',
|
|
818
|
+
hint: 'Subtasks cannot have their own subtasks. Add this task to the parent task instead.',
|
|
819
|
+
},
|
|
820
|
+
};
|
|
821
|
+
}
|
|
822
|
+
return { result: { error: response.error || 'Failed to add subtask' }, isError: true };
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
return {
|
|
826
|
+
result: {
|
|
827
|
+
success: true,
|
|
828
|
+
subtask_id: response.data?.subtask_id,
|
|
829
|
+
parent_task_id: response.data?.parent_task_id,
|
|
830
|
+
},
|
|
831
|
+
};
|
|
832
|
+
};
|
|
833
|
+
|
|
834
|
+
export const getSubtasks: Handler = async (args, ctx) => {
|
|
835
|
+
const { parent_task_id, status } = parseArgs(args, getSubtasksSchema);
|
|
836
|
+
|
|
837
|
+
const api = getApiClient();
|
|
838
|
+
const response = await api.getSubtasks(parent_task_id, status);
|
|
839
|
+
|
|
840
|
+
if (!response.ok) {
|
|
841
|
+
return { result: { error: response.error || 'Failed to fetch subtasks' }, isError: true };
|
|
842
|
+
}
|
|
843
|
+
|
|
844
|
+
return {
|
|
845
|
+
result: {
|
|
846
|
+
subtasks: response.data?.subtasks || [],
|
|
847
|
+
stats: response.data?.stats || {
|
|
848
|
+
total: 0,
|
|
849
|
+
completed: 0,
|
|
850
|
+
progress_percentage: 0,
|
|
851
|
+
},
|
|
852
|
+
},
|
|
853
|
+
};
|
|
854
|
+
};
|
|
855
|
+
|
|
856
|
+
// ============================================================================
|
|
857
|
+
// New Targeted Task Query Handlers
|
|
858
|
+
// ============================================================================
|
|
859
|
+
|
|
860
|
+
/**
|
|
861
|
+
* Get a single task by ID with optional subtasks and milestones
|
|
862
|
+
*/
|
|
863
|
+
export const getTask: Handler = async (args, ctx) => {
|
|
864
|
+
const { task_id, include_subtasks, include_milestones } = parseArgs(args, getTaskSchema);
|
|
865
|
+
|
|
866
|
+
const api = getApiClient();
|
|
867
|
+
const response = await api.getTaskById(task_id, {
|
|
868
|
+
include_subtasks,
|
|
869
|
+
include_milestones,
|
|
870
|
+
});
|
|
871
|
+
|
|
872
|
+
if (!response.ok) {
|
|
873
|
+
return { result: { error: response.error || 'Failed to fetch task' }, isError: true };
|
|
874
|
+
}
|
|
875
|
+
|
|
876
|
+
const result: Record<string, unknown> = {
|
|
877
|
+
task: response.data?.task,
|
|
878
|
+
};
|
|
879
|
+
|
|
880
|
+
if (include_subtasks && response.data?.subtasks) {
|
|
881
|
+
result.subtasks = response.data.subtasks;
|
|
882
|
+
}
|
|
883
|
+
|
|
884
|
+
if (include_milestones && response.data?.milestones) {
|
|
885
|
+
result.milestones = response.data.milestones;
|
|
886
|
+
}
|
|
887
|
+
|
|
888
|
+
return { result };
|
|
889
|
+
};
|
|
890
|
+
|
|
891
|
+
/**
|
|
892
|
+
* Search tasks by text query with pagination
|
|
893
|
+
*/
|
|
894
|
+
export const searchTasks: Handler = async (args, ctx) => {
|
|
895
|
+
const { project_id, query, status, limit, offset } = parseArgs(args, searchTasksSchema);
|
|
896
|
+
|
|
897
|
+
// Validate query length
|
|
898
|
+
if (query.length < 2) {
|
|
899
|
+
return {
|
|
900
|
+
result: {
|
|
901
|
+
error: 'query_too_short',
|
|
902
|
+
message: 'Search query must be at least 2 characters',
|
|
903
|
+
},
|
|
904
|
+
};
|
|
905
|
+
}
|
|
906
|
+
|
|
907
|
+
// Cap pagination to safe values
|
|
908
|
+
const { cappedLimit, safeOffset } = capPagination(limit ?? 10, offset, PAGINATION_LIMITS.TASK_LIMIT);
|
|
909
|
+
|
|
910
|
+
const api = getApiClient();
|
|
911
|
+
const response = await api.searchTasks(project_id, {
|
|
912
|
+
query,
|
|
913
|
+
status: status as string[] | undefined,
|
|
914
|
+
limit: cappedLimit,
|
|
915
|
+
offset: safeOffset,
|
|
916
|
+
});
|
|
917
|
+
|
|
918
|
+
if (!response.ok) {
|
|
919
|
+
return { result: { error: response.error || 'Failed to search tasks' }, isError: true };
|
|
920
|
+
}
|
|
921
|
+
|
|
922
|
+
const tasks = response.data?.tasks || [];
|
|
923
|
+
const totalMatches = response.data?.total_matches || 0;
|
|
924
|
+
|
|
925
|
+
return {
|
|
926
|
+
result: {
|
|
927
|
+
tasks,
|
|
928
|
+
total_matches: totalMatches,
|
|
929
|
+
has_more: safeOffset + tasks.length < totalMatches,
|
|
930
|
+
offset: safeOffset,
|
|
931
|
+
limit: cappedLimit,
|
|
932
|
+
},
|
|
933
|
+
};
|
|
934
|
+
};
|
|
935
|
+
|
|
936
|
+
/**
|
|
937
|
+
* Get tasks filtered by priority with pagination
|
|
938
|
+
*/
|
|
939
|
+
export const getTasksByPriority: Handler = async (args, ctx) => {
|
|
940
|
+
const { project_id, priority, priority_max, status, limit, offset } = parseArgs(args, getTasksByPrioritySchema);
|
|
941
|
+
|
|
942
|
+
// Cap pagination to safe values
|
|
943
|
+
const { cappedLimit, safeOffset } = capPagination(limit ?? 10, offset, PAGINATION_LIMITS.TASK_LIMIT);
|
|
944
|
+
|
|
945
|
+
const api = getApiClient();
|
|
946
|
+
const response = await api.getTasksByPriority(project_id, {
|
|
947
|
+
priority,
|
|
948
|
+
priority_max,
|
|
949
|
+
status,
|
|
950
|
+
limit: cappedLimit,
|
|
951
|
+
offset: safeOffset,
|
|
952
|
+
});
|
|
953
|
+
|
|
954
|
+
if (!response.ok) {
|
|
955
|
+
return { result: { error: response.error || 'Failed to fetch tasks by priority' }, isError: true };
|
|
956
|
+
}
|
|
957
|
+
|
|
958
|
+
const tasks = response.data?.tasks || [];
|
|
959
|
+
const totalCount = response.data?.total_count || 0;
|
|
960
|
+
|
|
961
|
+
return {
|
|
962
|
+
result: {
|
|
963
|
+
tasks,
|
|
964
|
+
total_count: totalCount,
|
|
965
|
+
has_more: safeOffset + tasks.length < totalCount,
|
|
966
|
+
offset: safeOffset,
|
|
967
|
+
limit: cappedLimit,
|
|
968
|
+
},
|
|
969
|
+
};
|
|
970
|
+
};
|
|
971
|
+
|
|
972
|
+
/**
|
|
973
|
+
* Get recent tasks (newest or oldest) with pagination
|
|
974
|
+
*/
|
|
975
|
+
export const getRecentTasks: Handler = async (args, ctx) => {
|
|
976
|
+
const { project_id, order, status, limit, offset } = parseArgs(args, getRecentTasksSchema);
|
|
977
|
+
|
|
978
|
+
// Cap pagination to safe values
|
|
979
|
+
const { cappedLimit, safeOffset } = capPagination(limit ?? 10, offset, PAGINATION_LIMITS.TASK_LIMIT);
|
|
980
|
+
|
|
981
|
+
const api = getApiClient();
|
|
982
|
+
const response = await api.getRecentTasks(project_id, {
|
|
983
|
+
order: order as 'newest' | 'oldest' | undefined,
|
|
984
|
+
status,
|
|
985
|
+
limit: cappedLimit,
|
|
986
|
+
offset: safeOffset,
|
|
987
|
+
});
|
|
988
|
+
|
|
989
|
+
if (!response.ok) {
|
|
990
|
+
return { result: { error: response.error || 'Failed to fetch recent tasks' }, isError: true };
|
|
991
|
+
}
|
|
992
|
+
|
|
993
|
+
const tasks = response.data?.tasks || [];
|
|
994
|
+
const totalCount = response.data?.total_count || 0;
|
|
995
|
+
|
|
996
|
+
return {
|
|
997
|
+
result: {
|
|
998
|
+
tasks,
|
|
999
|
+
total_count: totalCount,
|
|
1000
|
+
has_more: safeOffset + tasks.length < totalCount,
|
|
1001
|
+
offset: safeOffset,
|
|
1002
|
+
limit: cappedLimit,
|
|
1003
|
+
},
|
|
1004
|
+
};
|
|
1005
|
+
};
|
|
1006
|
+
|
|
1007
|
+
/**
|
|
1008
|
+
* Get task statistics for a project (aggregate counts only, minimal tokens)
|
|
1009
|
+
*/
|
|
1010
|
+
export const getTaskStats: Handler = async (args, ctx) => {
|
|
1011
|
+
const { project_id } = parseArgs(args, getTaskStatsSchema);
|
|
1012
|
+
|
|
1013
|
+
const api = getApiClient();
|
|
1014
|
+
const response = await api.getTaskStats(project_id);
|
|
1015
|
+
|
|
1016
|
+
if (!response.ok) {
|
|
1017
|
+
return { result: { error: response.error || 'Failed to fetch task stats' }, isError: true };
|
|
1018
|
+
}
|
|
1019
|
+
|
|
1020
|
+
return {
|
|
1021
|
+
result: {
|
|
1022
|
+
total: response.data?.total || 0,
|
|
1023
|
+
by_status: response.data?.by_status || {
|
|
1024
|
+
backlog: 0,
|
|
1025
|
+
pending: 0,
|
|
1026
|
+
in_progress: 0,
|
|
1027
|
+
completed: 0,
|
|
1028
|
+
cancelled: 0,
|
|
1029
|
+
},
|
|
1030
|
+
by_priority: response.data?.by_priority || { 1: 0, 2: 0, 3: 0, 4: 0, 5: 0 },
|
|
1031
|
+
awaiting_validation: response.data?.awaiting_validation || 0,
|
|
1032
|
+
oldest_pending_days: response.data?.oldest_pending_days ?? null,
|
|
1033
|
+
},
|
|
1034
|
+
};
|
|
1035
|
+
};
|
|
1036
|
+
|
|
1037
|
+
// ============================================================================
|
|
1038
|
+
// Worktree Cleanup Handlers
|
|
1039
|
+
// ============================================================================
|
|
1040
|
+
|
|
1041
|
+
const getStaleWorktreesSchema = {
|
|
1042
|
+
project_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
1043
|
+
hostname: { type: 'string' as const }, // Machine hostname to filter worktrees
|
|
1044
|
+
limit: { type: 'number' as const, default: 20 },
|
|
1045
|
+
offset: { type: 'number' as const, default: 0 },
|
|
1046
|
+
};
|
|
1047
|
+
|
|
1048
|
+
const clearWorktreePathSchema = {
|
|
1049
|
+
task_id: { type: 'string' as const, required: true as const, validate: uuidValidator },
|
|
1050
|
+
};
|
|
1051
|
+
|
|
1052
|
+
export const getStaleWorktrees: Handler = async (args, ctx) => {
|
|
1053
|
+
const { project_id, hostname: providedHostname, limit, offset } = parseArgs(args, getStaleWorktreesSchema);
|
|
1054
|
+
|
|
1055
|
+
// Use auto-detected hostname if not provided - filters to only worktrees on THIS machine
|
|
1056
|
+
const hostname = providedHostname || MACHINE_HOSTNAME;
|
|
1057
|
+
|
|
1058
|
+
// Cap pagination to safe values
|
|
1059
|
+
const { cappedLimit, safeOffset } = capPagination(limit, offset, PAGINATION_LIMITS.DEFAULT_MAX_LIMIT);
|
|
1060
|
+
|
|
1061
|
+
const api = getApiClient();
|
|
1062
|
+
const response = await api.getStaleWorktrees(project_id, { hostname, limit: cappedLimit, offset: safeOffset });
|
|
1063
|
+
|
|
1064
|
+
if (!response.ok) {
|
|
1065
|
+
return { result: { error: response.error || 'Failed to get stale worktrees' }, isError: true };
|
|
1066
|
+
}
|
|
1067
|
+
|
|
1068
|
+
const data = response.data;
|
|
1069
|
+
return {
|
|
1070
|
+
result: {
|
|
1071
|
+
project_id: data?.project_id,
|
|
1072
|
+
project_name: data?.project_name,
|
|
1073
|
+
hostname_filter: data?.hostname_filter,
|
|
1074
|
+
stale_worktrees: data?.stale_worktrees || [],
|
|
1075
|
+
count: data?.count || 0,
|
|
1076
|
+
local_count: data?.local_count || 0,
|
|
1077
|
+
remote_count: data?.remote_count || 0,
|
|
1078
|
+
total_count: data?.total_count || 0,
|
|
1079
|
+
has_more: data?.has_more || false,
|
|
1080
|
+
cleanup_instructions: data?.cleanup_instructions,
|
|
1081
|
+
remote_worktree_note: data?.remote_worktree_note,
|
|
1082
|
+
},
|
|
1083
|
+
};
|
|
1084
|
+
};
|
|
1085
|
+
|
|
1086
|
+
export const clearWorktreePath: Handler = async (args, ctx) => {
|
|
1087
|
+
const { task_id } = parseArgs(args, clearWorktreePathSchema);
|
|
1088
|
+
|
|
1089
|
+
const api = getApiClient();
|
|
1090
|
+
const response = await api.clearWorktreePath(task_id);
|
|
1091
|
+
|
|
1092
|
+
if (!response.ok) {
|
|
1093
|
+
return { result: { error: response.error || 'Failed to clear worktree path' }, isError: true };
|
|
1094
|
+
}
|
|
1095
|
+
|
|
1096
|
+
return {
|
|
1097
|
+
result: {
|
|
1098
|
+
success: true,
|
|
1099
|
+
task_id,
|
|
1100
|
+
message: 'Worktree path cleared. The worktree can now be safely removed if not already done.',
|
|
1101
|
+
},
|
|
1102
|
+
};
|
|
1103
|
+
};
|
|
1104
|
+
|
|
1105
|
+
/**
|
|
1106
|
+
* Task handlers registry
|
|
1107
|
+
*/
|
|
1108
|
+
export const taskHandlers: HandlerRegistry = {
|
|
1109
|
+
// Targeted task query endpoints (token-efficient)
|
|
1110
|
+
get_task: getTask,
|
|
1111
|
+
search_tasks: searchTasks,
|
|
1112
|
+
get_tasks_by_priority: getTasksByPriority,
|
|
1113
|
+
get_recent_tasks: getRecentTasks,
|
|
1114
|
+
get_task_stats: getTaskStats,
|
|
1115
|
+
// Core task operations
|
|
1116
|
+
get_next_task: getNextTask,
|
|
1117
|
+
add_task: addTask,
|
|
1118
|
+
update_task: updateTask,
|
|
1119
|
+
complete_task: completeTask,
|
|
1120
|
+
delete_task: deleteTask,
|
|
1121
|
+
release_task: releaseTask,
|
|
1122
|
+
cancel_task: cancelTask,
|
|
1123
|
+
add_task_reference: addTaskReference,
|
|
1124
|
+
remove_task_reference: removeTaskReference,
|
|
1125
|
+
batch_update_tasks: batchUpdateTasks,
|
|
1126
|
+
batch_complete_tasks: batchCompleteTasks,
|
|
1127
|
+
// Subtask handlers
|
|
1128
|
+
add_subtask: addSubtask,
|
|
1129
|
+
get_subtasks: getSubtasks,
|
|
1130
|
+
// Worktree cleanup handlers
|
|
1131
|
+
get_stale_worktrees: getStaleWorktrees,
|
|
1132
|
+
clear_worktree_path: clearWorktreePath,
|
|
1133
|
+
};
|