internaltool-mcp 1.6.35 → 1.6.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +1004 -115
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -227,21 +227,299 @@ function registerTaskTools(server, { isAdmin, scopedProjectId }) {
|
|
|
227
227
|
async ({ taskId }) => call(() => api.get(`/api/tasks/${taskId}`))
|
|
228
228
|
)
|
|
229
229
|
|
|
230
|
+
// ── search_tasks ──────────────────────────────────────────────────────────────
|
|
231
|
+
server.tool(
|
|
232
|
+
'search_tasks',
|
|
233
|
+
`Search and filter tasks within a project.
|
|
234
|
+
|
|
235
|
+
Use this to find tasks by keyword, type, status, or assignee. Much faster than listing all tasks.
|
|
236
|
+
|
|
237
|
+
Examples:
|
|
238
|
+
- Find all bugfix tasks: search_tasks(projectId, taskType="bugfix")
|
|
239
|
+
- Find tasks about "auth": search_tasks(projectId, query="auth")
|
|
240
|
+
- Find your in-progress tasks: search_tasks(projectId, column="in_progress", assignee=myUserId)
|
|
241
|
+
- Find high priority todo items: search_tasks(projectId, column="todo", priority="high,critical")
|
|
242
|
+
- Find tasks in review: search_tasks(projectId, column="in_review")
|
|
243
|
+
|
|
244
|
+
Returns tasks with key, title, column, assignees, priority, taskType, and branch info.`,
|
|
245
|
+
{
|
|
246
|
+
projectId: z.string().describe("Project's MongoDB ObjectId"),
|
|
247
|
+
query: z.string().optional().describe('Free-text search across title, description, README, and key (e.g. "auth", "TASK-042", "payment webhook")'),
|
|
248
|
+
column: z.string().optional().describe('Filter by column — comma-separated: "todo,in_progress" or single "done"'),
|
|
249
|
+
taskType: z.string().optional().describe('Filter by task type — comma-separated: "bugfix,feature" or single "migration"'),
|
|
250
|
+
priority: z.string().optional().describe('Filter by priority — comma-separated: "high,critical"'),
|
|
251
|
+
assignee: z.string().optional().describe('Filter by assignee user ID — returns only tasks assigned to this user'),
|
|
252
|
+
limit: z.number().optional().default(20).describe('Max results to return (default 20, max 100)'),
|
|
253
|
+
},
|
|
254
|
+
async ({ projectId, query, column, taskType, priority, assignee, limit = 20 }) => {
|
|
255
|
+
if (scopedProjectId && projectId !== scopedProjectId) {
|
|
256
|
+
return errorText(`Access denied: session is scoped to project ${scopedProjectId}`)
|
|
257
|
+
}
|
|
258
|
+
const params = new URLSearchParams()
|
|
259
|
+
if (query) params.set('q', query)
|
|
260
|
+
if (column) params.set('column', column)
|
|
261
|
+
if (taskType) params.set('taskType', taskType)
|
|
262
|
+
if (priority) params.set('priority', priority)
|
|
263
|
+
if (assignee) params.set('assignee', assignee)
|
|
264
|
+
if (limit) params.set('limit', String(limit))
|
|
265
|
+
|
|
266
|
+
const res = await api.get(`/api/projects/${projectId}/tasks/search?${params.toString()}`)
|
|
267
|
+
if (!res?.success) return errorText(res?.message || 'Search failed')
|
|
268
|
+
|
|
269
|
+
const tasks = res.data.tasks || []
|
|
270
|
+
if (!tasks.length) return text({ found: 0, message: 'No tasks match your search criteria.' })
|
|
271
|
+
|
|
272
|
+
return text({
|
|
273
|
+
found: tasks.length,
|
|
274
|
+
tasks: tasks.map(t => ({
|
|
275
|
+
taskId: t._id,
|
|
276
|
+
key: t.key,
|
|
277
|
+
title: t.title,
|
|
278
|
+
column: t.column,
|
|
279
|
+
priority: t.priority,
|
|
280
|
+
taskType: t.taskType || null,
|
|
281
|
+
assignees: (t.assignees || []).map(a => ({ id: a._id, name: a.name || a.email })),
|
|
282
|
+
branch: t.github?.headBranch || null,
|
|
283
|
+
prNumber: t.github?.prNumber || null,
|
|
284
|
+
hasPR: !!(t.github?.prNumber),
|
|
285
|
+
updatedAt: t.updatedAt,
|
|
286
|
+
})),
|
|
287
|
+
tip: tasks.length === limit ? `Results capped at ${limit}. Use more specific filters or increase limit to see more.` : null,
|
|
288
|
+
})
|
|
289
|
+
}
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
// ── plan_task_from_codebase ───────────────────────────────────────────────────
|
|
293
|
+
server.tool(
|
|
294
|
+
'plan_task_from_codebase',
|
|
295
|
+
`Specialized task-creation agent — analyzes the codebase and creates a fully structured, kickoff-ready task.
|
|
296
|
+
|
|
297
|
+
Use this instead of create_task when you need to implement something and want the task to
|
|
298
|
+
contain a real implementation plan based on the actual codebase, not a generic description.
|
|
299
|
+
|
|
300
|
+
## MANDATORY protocol — follow every step in order
|
|
301
|
+
|
|
302
|
+
### Step 1 — Duplicate check (always first)
|
|
303
|
+
Call search_tasks(projectId, query="<keywords from the request>").
|
|
304
|
+
If a similar open task already exists → return it with kickoff instructions instead of creating a duplicate.
|
|
305
|
+
|
|
306
|
+
### Step 2 — Codebase analysis (READ the code, do not guess)
|
|
307
|
+
Using your native Read / Grep / Glob tools:
|
|
308
|
+
|
|
309
|
+
a) **Stack detection** — read package.json / go.mod / requirements.txt / Cargo.toml.
|
|
310
|
+
Identify: language, framework, major libraries, test runner.
|
|
311
|
+
|
|
312
|
+
b) **Entry point mapping** — find where the relevant feature area lives:
|
|
313
|
+
- For backend: grep for existing route patterns (router.post, app.get, @Controller, etc.)
|
|
314
|
+
- For frontend: grep for existing component patterns, hooks, state management
|
|
315
|
+
- For DB: find schema/model files
|
|
316
|
+
|
|
317
|
+
c) **Pattern extraction** — read 2-3 existing files similar to what you'll build.
|
|
318
|
+
Note: naming conventions, folder structure, how services/routes/components are wired.
|
|
319
|
+
|
|
320
|
+
d) **Impact analysis** — identify every file that needs to change:
|
|
321
|
+
- Files to CREATE (new route, new component, new model, new test)
|
|
322
|
+
- Files to MODIFY (existing router index, existing schema, existing types)
|
|
323
|
+
|
|
324
|
+
e) **Dependency order** — which files must be built first (schema before service, service before route, etc.)
|
|
325
|
+
|
|
326
|
+
### Step 3 — Write the implementation plan
|
|
327
|
+
Using what you found in Step 2, build:
|
|
328
|
+
- ## Goal — one sentence
|
|
329
|
+
- ## Stack — language/framework detected
|
|
330
|
+
- ## Technical approach — how it fits into the existing code (name actual files and functions)
|
|
331
|
+
- ## Files to create — path + what it does
|
|
332
|
+
- ## Files to modify — path + what changes
|
|
333
|
+
- ## Subtasks — ordered implementation steps (schema → service → route → test → UI)
|
|
334
|
+
- ## Acceptance criteria — what done looks like
|
|
335
|
+
|
|
336
|
+
### Step 4 — Determine task metadata
|
|
337
|
+
- taskType: feature / bugfix / migration / integration / ui / backend / security / refactor
|
|
338
|
+
- priority: low / medium / high / critical (use "high" if the request sounds important)
|
|
339
|
+
- suggestedFiles: the exact file paths from your Step 2d impact analysis
|
|
340
|
+
|
|
341
|
+
### Step 5 — Create the task
|
|
342
|
+
Call create_task with:
|
|
343
|
+
- projectId (from this call)
|
|
344
|
+
- title: action-oriented, concise (verb + noun, e.g. "Add rate limiting to /api/auth/login")
|
|
345
|
+
- description: one paragraph summary
|
|
346
|
+
- readmeMarkdown: the full plan from Step 3
|
|
347
|
+
- taskType, priority, column="todo"
|
|
348
|
+
- subtasks: the ordered list from Step 3 (each step = one subtask)
|
|
349
|
+
- suggestedFiles: from Step 2d
|
|
350
|
+
|
|
351
|
+
After create_task succeeds, immediately call:
|
|
352
|
+
kickoff_task(taskId=<returned id>, confirmed=true, agentRole="builder", files=<suggestedFiles>)
|
|
353
|
+
|
|
354
|
+
## What makes a high-quality task
|
|
355
|
+
- readmeMarkdown references REAL file paths found by grepping the codebase
|
|
356
|
+
- subtasks are ordered (schema first, then service, then route, then test, then UI)
|
|
357
|
+
- suggestedFiles lists every file that will be touched — no omissions
|
|
358
|
+
- title is specific ("Add email verification to /api/auth/register") not generic ("Add email feature")
|
|
359
|
+
|
|
360
|
+
Do NOT skip the codebase analysis. Do NOT create the task before reading the code.
|
|
361
|
+
Do NOT ask the developer to describe the codebase — read it yourself.`,
|
|
362
|
+
{
|
|
363
|
+
projectId: z.string().describe("InternalTool project's MongoDB ObjectId — from the project's task board URL or CLAUDE.md"),
|
|
364
|
+
request: z.string().describe('What the developer wants to build — the raw natural language request (e.g. "add rate limiting to the login endpoint", "fix the pagination bug on users list")'),
|
|
365
|
+
priority: z.enum(['low', 'medium', 'high', 'critical']).optional().default('medium')
|
|
366
|
+
.describe('Task priority — default medium, use high/critical for urgent work'),
|
|
367
|
+
},
|
|
368
|
+
async ({ projectId, request, priority = 'medium' }) => {
|
|
369
|
+
if (scopedProjectId && projectId !== scopedProjectId) {
|
|
370
|
+
return errorText(`Access denied: session is scoped to project ${scopedProjectId}`)
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
// Fetch project context so the agent knows the project name and any existing conventions
|
|
374
|
+
let projectContext = null
|
|
375
|
+
try {
|
|
376
|
+
const projRes = await api.get(`/api/projects/${projectId}`)
|
|
377
|
+
if (projRes?.success) {
|
|
378
|
+
const p = projRes.data.project
|
|
379
|
+
projectContext = {
|
|
380
|
+
name: p.name,
|
|
381
|
+
taskCount: (projRes.data.tasks || []).length,
|
|
382
|
+
githubRepo: p.github?.repoUrl || null,
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
} catch { /* non-fatal */ }
|
|
386
|
+
|
|
387
|
+
// Check for duplicate tasks first
|
|
388
|
+
let duplicateCheck = null
|
|
389
|
+
try {
|
|
390
|
+
const keywords = request.split(' ').filter(w => w.length > 4).slice(0, 3).join(' ')
|
|
391
|
+
const searchRes = await api.get(
|
|
392
|
+
`/api/projects/${projectId}/tasks/search?q=${encodeURIComponent(keywords)}&limit=3`
|
|
393
|
+
)
|
|
394
|
+
if (searchRes?.success && searchRes.data?.tasks?.length > 0) {
|
|
395
|
+
duplicateCheck = searchRes.data.tasks.map(t => ({
|
|
396
|
+
taskId: t._id,
|
|
397
|
+
key: t.key,
|
|
398
|
+
title: t.title,
|
|
399
|
+
column: t.column,
|
|
400
|
+
taskType: t.taskType,
|
|
401
|
+
}))
|
|
402
|
+
}
|
|
403
|
+
} catch { /* non-fatal */ }
|
|
404
|
+
|
|
405
|
+
if (duplicateCheck?.length > 0) {
|
|
406
|
+
return text({
|
|
407
|
+
duplicateWarning: true,
|
|
408
|
+
message: `Found ${duplicateCheck.length} potentially similar task(s). Review before creating a duplicate.`,
|
|
409
|
+
similarTasks: duplicateCheck,
|
|
410
|
+
instruction: [
|
|
411
|
+
'If one of these IS the same task: call kickoff_task on it instead of creating a new one.',
|
|
412
|
+
'If none match: proceed with the analysis protocol below.',
|
|
413
|
+
],
|
|
414
|
+
analysisProtocol: {
|
|
415
|
+
step1: 'Read package.json / go.mod / requirements.txt to identify the stack',
|
|
416
|
+
step2: 'Grep for existing patterns relevant to the request',
|
|
417
|
+
step3: 'Identify files to create and modify',
|
|
418
|
+
step4: `Call create_task(projectId="${projectId}", title="...", readmeMarkdown="...", taskType="...", priority="${priority}", column="todo", subtasks=[...], suggestedFiles=[...])`,
|
|
419
|
+
step5: 'Call kickoff_task(taskId=<id>, confirmed=true, agentRole="builder", files=[...suggestedFiles])',
|
|
420
|
+
},
|
|
421
|
+
projectContext,
|
|
422
|
+
request,
|
|
423
|
+
projectId,
|
|
424
|
+
priority,
|
|
425
|
+
})
|
|
426
|
+
}
|
|
427
|
+
|
|
428
|
+
// No duplicate — return the analysis brief
|
|
429
|
+
return text({
|
|
430
|
+
projectId,
|
|
431
|
+
request,
|
|
432
|
+
priority,
|
|
433
|
+
projectContext,
|
|
434
|
+
analysisProtocol: {
|
|
435
|
+
overview: 'Follow these steps IN ORDER before calling create_task',
|
|
436
|
+
step1_stack: 'Read package.json / go.mod / requirements.txt / Cargo.toml — detect language, framework, test runner',
|
|
437
|
+
step2_entrypoint: 'Grep for the relevant area: routes, controllers, components, models — find where similar features live',
|
|
438
|
+
step3_patterns: 'Read 2-3 existing similar files — note naming conventions, folder structure, wiring patterns',
|
|
439
|
+
step4_impact: 'List every file to CREATE and every file to MODIFY — be exhaustive',
|
|
440
|
+
step5_create: `Call create_task(projectId="${projectId}", title="<action-verb noun>", readmeMarkdown="<full plan>", taskType="<type>", priority="${priority}", column="todo", subtasks=[{title: "step1"}, ...], suggestedFiles=["path/to/file1", ...])`,
|
|
441
|
+
step6_kickoff: 'Call kickoff_task(taskId=<returned id>, confirmed=true, agentRole="builder", files=[...suggestedFiles])',
|
|
442
|
+
},
|
|
443
|
+
reminder: 'Do NOT call create_task before completing steps 1-4. The readmeMarkdown must reference real file paths from your analysis.',
|
|
444
|
+
})
|
|
445
|
+
}
|
|
446
|
+
)
|
|
447
|
+
|
|
230
448
|
server.tool(
|
|
231
449
|
'create_task',
|
|
232
|
-
|
|
450
|
+
`Create a new task in a project — use this as the final step after analysing the codebase.
|
|
451
|
+
|
|
452
|
+
## Optimal flow when a developer asks to implement a feature:
|
|
453
|
+
|
|
454
|
+
1. **Read the codebase first** (use your native Read/Grep/Glob tools — do NOT ask the developer to describe the code):
|
|
455
|
+
- Find the entry point: package.json / go.mod / requirements.txt → detect stack
|
|
456
|
+
- Find existing patterns relevant to the feature (e.g. grep for "router.post" to see how routes work)
|
|
457
|
+
- Identify which files will need to be created or modified
|
|
458
|
+
|
|
459
|
+
2. **Write the implementation plan** in readmeMarkdown:
|
|
460
|
+
- ## Goal — one sentence of what this builds
|
|
461
|
+
- ## Technical approach — how it fits into the existing code (name actual files and functions)
|
|
462
|
+
- ## Files to create — list each new file and what it does
|
|
463
|
+
- ## Files to modify — list each existing file and what changes
|
|
464
|
+
- ## Subtasks — ordered implementation steps (these become the task checklist)
|
|
465
|
+
|
|
466
|
+
3. **Call create_task** with the completed plan — the task is immediately kickoff-ready.
|
|
467
|
+
|
|
468
|
+
## What makes a good task
|
|
469
|
+
- readmeMarkdown has enough detail that an agent can implement without asking questions
|
|
470
|
+
- subtasks are ordered (schema first → service → route → test → frontend)
|
|
471
|
+
- taskType is set so kickoff_task routes correctly (feature/bugfix/migration/etc.)
|
|
472
|
+
- suggestedFiles tells the builder exactly which files to claim at kickoff
|
|
473
|
+
|
|
474
|
+
Always prefer column="todo" so the task is visibly ready to start.`,
|
|
233
475
|
{
|
|
234
476
|
projectId: z.string().describe("Project's MongoDB ObjectId"),
|
|
235
|
-
title: z.string().describe('Task title'),
|
|
236
|
-
description: z.string().optional().describe('
|
|
237
|
-
readmeMarkdown: z.string().optional().describe('
|
|
477
|
+
title: z.string().describe('Task title — concise, action-oriented (e.g. "Add email channel to sendNotification")'),
|
|
478
|
+
description: z.string().optional().describe('One-sentence summary of what this task does and why'),
|
|
479
|
+
readmeMarkdown: z.string().optional().describe('Full markdown implementation plan — see tool description for the recommended structure'),
|
|
238
480
|
priority: z.enum(['low', 'medium', 'high', 'critical']).optional(),
|
|
239
|
-
column: z.enum(['backlog', 'todo', 'in_progress', 'in_review', 'done']).optional(),
|
|
481
|
+
column: z.enum(['backlog', 'todo', 'in_progress', 'in_review', 'done']).optional().describe('Use "todo" for features ready to start'),
|
|
240
482
|
assignees: z.array(z.string()).optional().describe('User IDs to assign'),
|
|
483
|
+
taskType: z.enum(['migration', 'integration', 'bugfix', 'ui', 'backend', 'security', 'refactor', 'feature']).optional()
|
|
484
|
+
.describe('Task type — drives agent routing at kickoff (scout-first, coordinator vs single builder, etc.)'),
|
|
485
|
+
subtasks: z.array(z.object({ title: z.string() })).optional()
|
|
486
|
+
.describe('Ordered implementation checklist — shown in the task UI and read by agents at kickoff. Order matters: schema → model → service → route → test → frontend.'),
|
|
487
|
+
suggestedFiles: z.array(z.string()).optional()
|
|
488
|
+
.describe('Files the builder will claim at kickoff (e.g. ["server/routes/tasks.js", "server/models/Task.js"]). Included in the task README automatically so the builder knows what to pass to kickoff_task files=[...].'),
|
|
241
489
|
},
|
|
242
|
-
async ({ projectId, ...taskData }) => {
|
|
490
|
+
async ({ projectId, suggestedFiles, ...taskData }) => {
|
|
243
491
|
try { assertProjectScope(projectId) } catch (e) { return errorText(e.message) }
|
|
244
|
-
|
|
492
|
+
|
|
493
|
+
// Append suggested files section to the README so the builder sees them at kickoff
|
|
494
|
+
if (suggestedFiles?.length > 0 && taskData.readmeMarkdown) {
|
|
495
|
+
taskData.readmeMarkdown = [
|
|
496
|
+
taskData.readmeMarkdown.trimEnd(),
|
|
497
|
+
'',
|
|
498
|
+
'## Files to claim at kickoff',
|
|
499
|
+
'```',
|
|
500
|
+
...suggestedFiles.map(f => f),
|
|
501
|
+
'```',
|
|
502
|
+
`Call \`kickoff_task(confirmed=true, agentRole="builder", files=[${suggestedFiles.map(f => `"${f}"`).join(', ')}])\` to atomically claim ownership and start.`,
|
|
503
|
+
].join('\n')
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
const res = await api.post(`/api/projects/${projectId}/tasks`, taskData)
|
|
507
|
+
if (!res?.success) return errorText(res?.message || 'Failed to create task')
|
|
508
|
+
|
|
509
|
+
const task = res.data?.task
|
|
510
|
+
return text({
|
|
511
|
+
created: true,
|
|
512
|
+
taskId: task?._id,
|
|
513
|
+
taskKey: task?.key,
|
|
514
|
+
title: task?.title,
|
|
515
|
+
column: task?.column,
|
|
516
|
+
taskType: task?.taskType || null,
|
|
517
|
+
subtasks: (task?.subtasks || []).length,
|
|
518
|
+
kickoff: task?._id
|
|
519
|
+
? `kickoff_task(taskId="${task._id}", confirmed=false) ← read plan first\nkickoff_task(taskId="${task._id}", confirmed=true, agentRole="builder", files=[...]) ← start building`
|
|
520
|
+
: null,
|
|
521
|
+
message: `Task ${task?.key} created and ready to kick off.`,
|
|
522
|
+
})
|
|
245
523
|
}
|
|
246
524
|
)
|
|
247
525
|
|
|
@@ -602,6 +880,65 @@ Set confirmed=false first to read everything, then confirmed=true to execute.`,
|
|
|
602
880
|
}
|
|
603
881
|
)
|
|
604
882
|
|
|
883
|
+
// ── add_blocker ───────────────────────────────────────────────────────────────
|
|
884
|
+
server.tool(
|
|
885
|
+
'add_blocker',
|
|
886
|
+
`Mark a task as blocked by another task that must complete first.
|
|
887
|
+
|
|
888
|
+
Use this when you discover that TASK-A cannot start or cannot be merged until TASK-B is done.
|
|
889
|
+
Examples: auth middleware must ship before the protected routes task starts;
|
|
890
|
+
DB schema migration must merge before the service layer task.
|
|
891
|
+
|
|
892
|
+
The blocked task will show a warning in kickoff_task and be deprioritized in what_should_i_work_on.`,
|
|
893
|
+
{
|
|
894
|
+
taskId: z.string().describe("Task that is blocked — the one that cannot start yet"),
|
|
895
|
+
blockerTaskId: z.string().describe("Task that must complete first — the blocker"),
|
|
896
|
+
reason: z.string().optional().describe('Why this dependency exists — shown to developers'),
|
|
897
|
+
},
|
|
898
|
+
async ({ taskId, blockerTaskId, reason }) => {
|
|
899
|
+
const res = await api.post(`/api/tasks/${taskId}/blockers`, { blockerTaskId })
|
|
900
|
+
if (!res?.success) return errorText(res?.message || 'Could not add blocker')
|
|
901
|
+
// Optionally add a comment explaining the dependency
|
|
902
|
+
if (reason) {
|
|
903
|
+
await api.post(`/api/tasks/${taskId}/comments`, {
|
|
904
|
+
body: `🔒 **Blocked by task \`${blockerTaskId}\`**: ${reason}`,
|
|
905
|
+
}).catch(() => {})
|
|
906
|
+
}
|
|
907
|
+
return text({
|
|
908
|
+
blocked: true,
|
|
909
|
+
taskId,
|
|
910
|
+
blockerTaskId,
|
|
911
|
+
reason: reason || null,
|
|
912
|
+
message: `Task is now blocked. It will not be recommended until the blocker is resolved.`,
|
|
913
|
+
nextStep: `When the blocker task is done, call remove_blocker with taskId="${taskId}" and blockerTaskId="${blockerTaskId}".`,
|
|
914
|
+
})
|
|
915
|
+
}
|
|
916
|
+
)
|
|
917
|
+
|
|
918
|
+
// ── remove_blocker ────────────────────────────────────────────────────────────
|
|
919
|
+
server.tool(
|
|
920
|
+
'remove_blocker',
|
|
921
|
+
`Remove a blocker from a task, marking it as unblocked and ready to start.
|
|
922
|
+
|
|
923
|
+
Call this when the blocking task has been merged or completed.
|
|
924
|
+
After removing the blocker, the task will appear in what_should_i_work_on recommendations again.`,
|
|
925
|
+
{
|
|
926
|
+
taskId: z.string().describe("Task that was blocked"),
|
|
927
|
+
blockerTaskId: z.string().describe("The blocker task that is now resolved"),
|
|
928
|
+
},
|
|
929
|
+
async ({ taskId, blockerTaskId }) => {
|
|
930
|
+
const res = await api.delete(`/api/tasks/${taskId}/blockers/${blockerTaskId}`)
|
|
931
|
+
if (!res?.success) return errorText(res?.message || 'Could not remove blocker')
|
|
932
|
+
return text({
|
|
933
|
+
unblocked: true,
|
|
934
|
+
taskId,
|
|
935
|
+
blockerTaskId,
|
|
936
|
+
message: `Blocker removed. Task is now unblocked.`,
|
|
937
|
+
nextStep: `Call kickoff_task with taskId="${taskId}" to start this task.`,
|
|
938
|
+
})
|
|
939
|
+
}
|
|
940
|
+
)
|
|
941
|
+
|
|
605
942
|
// ── claim_files ──────────────────────────────────────────────────────────────
|
|
606
943
|
server.tool(
|
|
607
944
|
'claim_files',
|
|
@@ -828,32 +1165,51 @@ Call confirmed=false to preview the decomposition, confirmed=true to save it.`,
|
|
|
828
1165
|
})
|
|
829
1166
|
}
|
|
830
1167
|
|
|
831
|
-
// Save decomposition to task
|
|
1168
|
+
// Save decomposition JSON to parent task
|
|
832
1169
|
const decompositionJson = JSON.stringify(executionPlan, null, 2)
|
|
833
1170
|
try {
|
|
834
1171
|
await api.patch(`/api/tasks/${taskId}`, { decomposition: decompositionJson })
|
|
835
|
-
} catch { /* non-fatal — decomposition is returned regardless */ }
|
|
836
|
-
|
|
837
|
-
// Create subtasks on the board
|
|
838
|
-
const currentSubtasks = task.subtasks || []
|
|
839
|
-
const newSubtasks = [
|
|
840
|
-
...currentSubtasks,
|
|
841
|
-
...subtaskPlan.map((s, i) => ({
|
|
842
|
-
title: `[${s.role.toUpperCase()}] ${s.title}`,
|
|
843
|
-
done: false,
|
|
844
|
-
order: currentSubtasks.length + i,
|
|
845
|
-
})),
|
|
846
|
-
]
|
|
847
|
-
try {
|
|
848
|
-
await api.patch(`/api/tasks/${taskId}`, { subtasks: newSubtasks })
|
|
849
1172
|
} catch { /* non-fatal */ }
|
|
850
1173
|
|
|
1174
|
+
// Create real child tasks for each subtask in the plan
|
|
1175
|
+
const projectId = task.project?._id || task.project
|
|
1176
|
+
const createdTasks = []
|
|
1177
|
+
for (const s of subtaskPlan) {
|
|
1178
|
+
try {
|
|
1179
|
+
const childRes = await api.post(`/api/projects/${projectId}/tasks`, {
|
|
1180
|
+
title: `[${s.role.toUpperCase()}] ${s.title}`,
|
|
1181
|
+
description: s.description,
|
|
1182
|
+
readmeMarkdown: [
|
|
1183
|
+
`## Role: ${s.role}`,
|
|
1184
|
+
`## Description\n${s.description}`,
|
|
1185
|
+
s.files?.length ? `## Files to claim at kickoff\n${s.files.map(f => `- \`${f}\``).join('\n')}` : '',
|
|
1186
|
+
s.dependsOn?.length ? `## Depends on\n${s.dependsOn.map(d => `- ${d}`).join('\n')}` : '',
|
|
1187
|
+
].filter(Boolean).join('\n\n'),
|
|
1188
|
+
column: 'todo',
|
|
1189
|
+
priority: task.priority || 'medium',
|
|
1190
|
+
taskType: s.role === 'reviewer' ? 'feature' : (task.taskType || 'feature'),
|
|
1191
|
+
parentTask: taskId,
|
|
1192
|
+
suggestedFiles: s.files || [],
|
|
1193
|
+
})
|
|
1194
|
+
if (childRes?.success) {
|
|
1195
|
+
createdTasks.push({
|
|
1196
|
+
taskId: childRes.data?.task?._id,
|
|
1197
|
+
taskKey: childRes.data?.task?.key,
|
|
1198
|
+
title: childRes.data?.task?.title,
|
|
1199
|
+
role: s.role,
|
|
1200
|
+
files: s.files,
|
|
1201
|
+
})
|
|
1202
|
+
}
|
|
1203
|
+
} catch { /* non-fatal — continue creating remaining tasks */ }
|
|
1204
|
+
}
|
|
1205
|
+
|
|
851
1206
|
return text({
|
|
852
1207
|
decomposed: true,
|
|
853
1208
|
taskKey: task.key,
|
|
854
1209
|
executionPlan,
|
|
855
|
-
|
|
856
|
-
|
|
1210
|
+
childTasksCreated: createdTasks.length,
|
|
1211
|
+
childTasks: createdTasks,
|
|
1212
|
+
message: `Decomposition saved. ${createdTasks.length} child task(s) created on the board (${subtaskPlan.length - createdTasks.length} failed).`,
|
|
857
1213
|
nextStep: parallelGroups.length > 0
|
|
858
1214
|
? `⚡ COORDINATOR: Call get_parallel_kickoffs with taskId="${taskId}" NOW. It writes Cursor Background Agent files for each parallel builder automatically. Then tell the user to open Background Agents panel (⌘⇧J) and click Start. DO NOT implement code yourself.`
|
|
859
1215
|
: `Call get_parallel_kickoffs with taskId="${taskId}". It writes a Cursor Background Agent file for the builder. Tell the user to open Background Agents panel (⌘⇧J) and start it.`,
|
|
@@ -982,10 +1338,57 @@ Returns:
|
|
|
982
1338
|
message: 'GitHub is still computing mergeability. Wait a few seconds and call check_merge_conflicts again.',
|
|
983
1339
|
})
|
|
984
1340
|
|
|
985
|
-
if (!d.hasConflict && d.behindBy === 0)
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
1341
|
+
if (!d.hasConflict && d.behindBy === 0) {
|
|
1342
|
+
// Branch is clean — remove conflict-resolver.mdc if it was written during a previous kickoff.
|
|
1343
|
+
// Leaving it on disk keeps injecting "BRANCH CONFLICT ACTIVE" into every Cursor prompt forever.
|
|
1344
|
+
let cleanedRuleFile = false
|
|
1345
|
+
try {
|
|
1346
|
+
const root = findRepoRoot(process.cwd())
|
|
1347
|
+
if (root) {
|
|
1348
|
+
const ruleFile = join(root, '.cursor', 'rules', 'conflict-resolver.mdc')
|
|
1349
|
+
if (existsSync(ruleFile)) {
|
|
1350
|
+
unlinkSync(ruleFile)
|
|
1351
|
+
cleanedRuleFile = true
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
} catch { /* non-fatal — never block on cleanup */ }
|
|
1355
|
+
return text({
|
|
1356
|
+
status: 'clean',
|
|
1357
|
+
message: `PR is clean — no conflicts, not behind ${d.base}. Safe to merge.`,
|
|
1358
|
+
...(cleanedRuleFile ? { cleaned: 'conflict-resolver.mdc removed from .cursor/rules/ — Cursor will no longer inject conflict warnings.' } : {}),
|
|
1359
|
+
})
|
|
1360
|
+
}
|
|
1361
|
+
|
|
1362
|
+
// Diff-based fallback: when conflicting files exist but no task owns them via claimedFiles,
|
|
1363
|
+
// scan the git tree to find branches that touch the same files without having claimed them.
|
|
1364
|
+
// This catches real overlaps when developers skip claim_files or edit files directly.
|
|
1365
|
+
let uncoveredConflicts = []
|
|
1366
|
+
if (d.hasConflict && d.conflictingFiles?.length > 0) {
|
|
1367
|
+
try {
|
|
1368
|
+
const taskRes = await api.get(`/api/tasks/${taskId}`)
|
|
1369
|
+
const projectId = taskRes?.data?.task?.project
|
|
1370
|
+
if (projectId) {
|
|
1371
|
+
const treeRes = await api.get(`/api/projects/${projectId}/github/git-tree`).catch(() => null)
|
|
1372
|
+
if (treeRes?.success) {
|
|
1373
|
+
const coveredFiles = new Set((d.conflictingTasks || []).flatMap(t => t.claimedFiles || []))
|
|
1374
|
+
const branches = treeRes.data.branches || []
|
|
1375
|
+
for (const file of d.conflictingFiles) {
|
|
1376
|
+
if (coveredFiles.has(file)) continue // already identified via claimedFiles
|
|
1377
|
+
const matches = branches
|
|
1378
|
+
.filter(b => String(b.taskId) !== taskId && (b.claimedFiles || []).includes(file))
|
|
1379
|
+
.map(b => ({ taskKey: b.taskKey, branch: b.headBranch, assignees: (b.assignees || []).map(a => a.name).join(', ') }))
|
|
1380
|
+
uncoveredConflicts.push({
|
|
1381
|
+
file,
|
|
1382
|
+
likelyCausedBy: matches.length > 0 ? matches : null,
|
|
1383
|
+
hint: matches.length === 0
|
|
1384
|
+
? `No task claims this file. Check: git log origin/${d.base} -- ${file} --oneline -5`
|
|
1385
|
+
: null,
|
|
1386
|
+
})
|
|
1387
|
+
}
|
|
1388
|
+
}
|
|
1389
|
+
}
|
|
1390
|
+
} catch { /* non-fatal */ }
|
|
1391
|
+
}
|
|
989
1392
|
|
|
990
1393
|
return text({
|
|
991
1394
|
status: d.hasConflict ? 'conflict' : 'behind',
|
|
@@ -995,10 +1398,11 @@ Returns:
|
|
|
995
1398
|
headBranch: d.headBranch,
|
|
996
1399
|
conflictingFiles: d.conflictingFiles,
|
|
997
1400
|
conflictingTasks: d.conflictingTasks,
|
|
1401
|
+
...(uncoveredConflicts.length > 0 ? { uncoveredConflicts } : {}),
|
|
998
1402
|
coordinationNote: d.coordinationNote,
|
|
999
1403
|
resolution: {
|
|
1000
1404
|
steps: d.resolution,
|
|
1001
|
-
note: d.hasConflict && d.conflictingTasks?.length > 0
|
|
1405
|
+
note: d.hasConflict && (d.conflictingTasks?.length > 0 || uncoveredConflicts.length > 0)
|
|
1002
1406
|
? `⚠️ COORDINATE FIRST: Contact the assignees of the conflicting tasks before resolving. Resolving without coordination may overwrite their work.`
|
|
1003
1407
|
: `Rebase your branch on ${d.base} to resolve.`,
|
|
1004
1408
|
},
|
|
@@ -1058,6 +1462,27 @@ the Edit tool to resolve the conflict markers, then continue the rebase with Bas
|
|
|
1058
1462
|
} catch { /* non-fatal */ }
|
|
1059
1463
|
}
|
|
1060
1464
|
|
|
1465
|
+
// Fallback: no task owns this file via claimedFiles — conflict came from a direct commit to the
|
|
1466
|
+
// base branch. Fetch the 5 most recent commits touching this file so the agent can read
|
|
1467
|
+
// what changed and why, instead of getting a generic "check git log" message.
|
|
1468
|
+
let recentMainCommits = null
|
|
1469
|
+
if (!otherTask && task.project) {
|
|
1470
|
+
try {
|
|
1471
|
+
const base = conflictData.base || 'main'
|
|
1472
|
+
const commitsRes = await api.get(
|
|
1473
|
+
`/api/projects/${task.project}/github/commits?sha=${base}&per_page=5&path=${encodeURIComponent(filePath)}`
|
|
1474
|
+
)
|
|
1475
|
+
if (commitsRes?.success && commitsRes.data?.commits?.length > 0) {
|
|
1476
|
+
recentMainCommits = commitsRes.data.commits.slice(0, 5).map(c => ({
|
|
1477
|
+
sha: c.sha?.slice(0, 7),
|
|
1478
|
+
message: c.commit?.message?.split('\n')[0],
|
|
1479
|
+
author: c.commit?.author?.name,
|
|
1480
|
+
date: c.commit?.author?.date,
|
|
1481
|
+
}))
|
|
1482
|
+
}
|
|
1483
|
+
} catch { /* non-fatal */ }
|
|
1484
|
+
}
|
|
1485
|
+
|
|
1061
1486
|
const base = conflictData.base || 'main'
|
|
1062
1487
|
|
|
1063
1488
|
return text({
|
|
@@ -1069,7 +1494,11 @@ the Edit tool to resolve the conflict markers, then continue the rebase with Bas
|
|
|
1069
1494
|
scoutReport: task.scoutReport || null,
|
|
1070
1495
|
},
|
|
1071
1496
|
otherTask: otherTask || {
|
|
1072
|
-
note: '
|
|
1497
|
+
note: 'No task claims this file — conflict came from a direct commit to ' + base + '.',
|
|
1498
|
+
...(recentMainCommits
|
|
1499
|
+
? { recentCommits: recentMainCommits, hint: 'Read recentCommits above to understand what changed in ' + base + ' before deciding what to keep.' }
|
|
1500
|
+
: { hint: 'Run: git log origin/' + base + ' -- ' + filePath + ' --oneline -5' }
|
|
1501
|
+
),
|
|
1073
1502
|
},
|
|
1074
1503
|
conflictMarkers: {
|
|
1075
1504
|
ours: '<<<<<<< HEAD ← YOUR changes (this task)',
|
|
@@ -1231,7 +1660,7 @@ Returns systemPrompt ready to use as a Claude system prompt.`,
|
|
|
1231
1660
|
const ctx = res.data
|
|
1232
1661
|
|
|
1233
1662
|
const effectiveRole = ctx.role
|
|
1234
|
-
const roleRules = effectiveRole
|
|
1663
|
+
const roleRules = getRoleRules(effectiveRole, ctx.claimedFiles || [])
|
|
1235
1664
|
|
|
1236
1665
|
// Compose the full system prompt
|
|
1237
1666
|
const parts = []
|
|
@@ -2192,6 +2621,24 @@ Call this when the developer says "generate standup", "what did I do yesterday",
|
|
|
2192
2621
|
const yesterday = new Date(Date.now() - 24 * 60 * 60 * 1000)
|
|
2193
2622
|
const PRIORITY_ORDER = { critical: 0, high: 1, medium: 2, low: 3 }
|
|
2194
2623
|
|
|
2624
|
+
// 0. Real git commits since midnight (local repo)
|
|
2625
|
+
let gitCommitsToday = []
|
|
2626
|
+
try {
|
|
2627
|
+
const midnight = new Date()
|
|
2628
|
+
midnight.setHours(0, 0, 0, 0)
|
|
2629
|
+
const out = execSync(
|
|
2630
|
+
`git log --since="${midnight.toISOString()}" --format="%h|||%s|||%an|||%D" --all 2>/dev/null`,
|
|
2631
|
+
{ cwd: process.cwd(), encoding: 'utf8', timeout: 3000 }
|
|
2632
|
+
)
|
|
2633
|
+
gitCommitsToday = out.trim().split('\n').filter(Boolean).map(line => {
|
|
2634
|
+
const [sha, msg, author, refs] = line.split('|||')
|
|
2635
|
+
return { sha: sha?.slice(0, 7), message: msg?.trim(), author: author?.trim(), refs: refs?.trim() || null }
|
|
2636
|
+
})
|
|
2637
|
+
} catch { /* non-fatal — local git not available */ }
|
|
2638
|
+
|
|
2639
|
+
// 0b. PRs opened or merged today (across my tasks)
|
|
2640
|
+
// We'll collect these from task activity below
|
|
2641
|
+
|
|
2195
2642
|
// 1. Who am I?
|
|
2196
2643
|
const meRes = await api.get('/api/auth/me')
|
|
2197
2644
|
const me = meRes?.data?.user || {}
|
|
@@ -2306,10 +2753,14 @@ Call this when the developer says "generate standup", "what did I do yesterday",
|
|
|
2306
2753
|
const todayFocus = Object.entries(byAssignee)
|
|
2307
2754
|
.map(([person, tasks]) => ` ${person}: ${tasks.map(t => `${t.key} (${t.priority})`).join(', ')}`)
|
|
2308
2755
|
.join('\n')
|
|
2756
|
+
const gitCommitsSummary = gitCommitsToday.length > 0
|
|
2757
|
+
? gitCommitsToday.slice(0, 5).map(c => ` \`${c.sha}\` ${c.message}`).join('\n')
|
|
2758
|
+
: null
|
|
2309
2759
|
const copyPaste = [
|
|
2310
2760
|
`**Yesterday:** ${yesterdayItems.length
|
|
2311
2761
|
? yesterdayItems.map(i => `${i.key} — ${i.activity.slice(0,2).join('; ')}`).join('. ')
|
|
2312
2762
|
: 'No recorded activity in last 24 h.'}`,
|
|
2763
|
+
gitCommitsSummary ? `**Commits today:**\n${gitCommitsSummary}` : null,
|
|
2313
2764
|
`**Today:**\n${todayFocus || ' No active tasks.'}`,
|
|
2314
2765
|
blockers.length ? `**Blockers:** ${blockers.join(' | ')}` : '**Blockers:** None.',
|
|
2315
2766
|
risks.length ? `**Risks:** ${risks.join(' | ')}` : null,
|
|
@@ -2317,6 +2768,7 @@ Call this when the developer says "generate standup", "what did I do yesterday",
|
|
|
2317
2768
|
|
|
2318
2769
|
return text({
|
|
2319
2770
|
me: { name: me.name, email: me.email, role: me.role },
|
|
2771
|
+
gitCommitsToday: gitCommitsToday.length ? gitCommitsToday : '(no local git commits found since midnight)',
|
|
2320
2772
|
yesterday: yesterdayItems.length ? yesterdayItems : '(no activity logged in last 24 h)',
|
|
2321
2773
|
today: {
|
|
2322
2774
|
byAssignee: Object.fromEntries(
|
|
@@ -2349,80 +2801,150 @@ Call this when the developer says "generate standup", "what did I do yesterday",
|
|
|
2349
2801
|
'what_should_i_work_on',
|
|
2350
2802
|
`Recommend the single most important task to work on right now.
|
|
2351
2803
|
|
|
2352
|
-
Scores all assigned tasks by: priority, urgency (PR needs fix), column state, branch readiness
|
|
2353
|
-
|
|
2804
|
+
Scores all assigned tasks by: priority, urgency (PR needs fix), column state, branch readiness,
|
|
2805
|
+
files you recently edited (git log), and PRs waiting for your review.
|
|
2806
|
+
|
|
2807
|
+
Also surfaces:
|
|
2808
|
+
- Pending approval reviews assigned to you
|
|
2809
|
+
- Tasks you can unblock (blockers that are now done)
|
|
2354
2810
|
|
|
2355
2811
|
Use this when the developer says "what should I do next", "I'm free", or "what's my priority".`,
|
|
2356
2812
|
{},
|
|
2357
2813
|
async () => {
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
|
|
2814
|
+
// Fetch my tasks + identity in parallel
|
|
2815
|
+
const [tasksRes, meRes] = await Promise.all([
|
|
2816
|
+
api.get('/api/users/me/tasks'),
|
|
2817
|
+
api.get('/api/auth/me'),
|
|
2818
|
+
])
|
|
2819
|
+
if (!tasksRes?.success) return errorText('Could not fetch tasks')
|
|
2820
|
+
const tasks = (tasksRes.data.tasks || []).filter(t => t.column !== 'done')
|
|
2821
|
+
const meId = meRes?.data?.user?._id || ''
|
|
2822
|
+
|
|
2823
|
+
// Recently edited files from local git (last 2 days) — boosts tasks claiming those files
|
|
2824
|
+
let recentFiles = new Set()
|
|
2825
|
+
try {
|
|
2826
|
+
const out = execSync(
|
|
2827
|
+
'git log --since=2.days.ago --name-only --format="" --diff-filter=ACM 2>/dev/null',
|
|
2828
|
+
{ cwd: process.cwd(), encoding: 'utf8', timeout: 3000 }
|
|
2829
|
+
)
|
|
2830
|
+
out.split('\n').map(s => s.trim()).filter(Boolean).forEach(f => recentFiles.add(f))
|
|
2831
|
+
} catch { /* non-fatal — local git may not be available */ }
|
|
2361
2832
|
|
|
2362
|
-
|
|
2833
|
+
// Pending approval reviews assigned to me (across all projects)
|
|
2834
|
+
let pendingReviews = []
|
|
2835
|
+
try {
|
|
2836
|
+
const projectsRes = await api.get('/api/projects')
|
|
2837
|
+
const projects = projectsRes?.data?.projects || []
|
|
2838
|
+
const boards = await Promise.all(
|
|
2839
|
+
projects.map(p => api.get(`/api/projects/${p._id}`).catch(() => null))
|
|
2840
|
+
)
|
|
2841
|
+
for (const r of boards.filter(Boolean)) {
|
|
2842
|
+
for (const t of (r?.data?.project?.tasks || [])) {
|
|
2843
|
+
const apv = (t.approvals || []).find(a => a.state === 'pending')
|
|
2844
|
+
if (!apv) continue
|
|
2845
|
+
const rid = apv?.reviewer?._id || apv?.reviewer
|
|
2846
|
+
if (!meId || !rid || String(rid) !== String(meId)) continue
|
|
2847
|
+
const waitMs = apv.requestedAt ? Date.now() - new Date(apv.requestedAt).getTime() : 0
|
|
2848
|
+
pendingReviews.push({
|
|
2849
|
+
taskId: t._id,
|
|
2850
|
+
key: t.key,
|
|
2851
|
+
title: t.title,
|
|
2852
|
+
waitingHours: Math.round(waitMs / 3600000 * 10) / 10,
|
|
2853
|
+
submittedBy: apv?.requestedBy?.name || apv?.requestedBy?.email || 'unknown',
|
|
2854
|
+
})
|
|
2855
|
+
}
|
|
2856
|
+
}
|
|
2857
|
+
pendingReviews.sort((a, b) => b.waitingHours - a.waitingHours)
|
|
2858
|
+
} catch { /* non-fatal */ }
|
|
2859
|
+
|
|
2860
|
+
if (!tasks.length && !pendingReviews.length) {
|
|
2861
|
+
return text({ recommendation: 'No open tasks or pending reviews. Ask your project lead for work.' })
|
|
2862
|
+
}
|
|
2363
2863
|
|
|
2364
2864
|
function scoreTask(t) {
|
|
2365
2865
|
const PRIORITY = { critical: 100, high: 60, medium: 30, low: 10 }
|
|
2366
2866
|
let score = PRIORITY[t.priority] ?? 0
|
|
2367
2867
|
if (t.github?.changesRequestedAt) score += 80 // PR needs fix — most urgent
|
|
2368
|
-
if (t.column === 'in_progress') score += 25 // already in flight
|
|
2369
|
-
if (t.column === 'in_review') score += 10 // waiting on reviewer
|
|
2868
|
+
if (t.column === 'in_progress') score += 25 // already in flight
|
|
2869
|
+
if (t.column === 'in_review') score += 10 // waiting on reviewer
|
|
2370
2870
|
if (t.column === 'todo') score += 15 // ready to start
|
|
2371
2871
|
if (t.column === 'backlog') score -= 10 // not yet planned
|
|
2372
2872
|
if (t.parkNote?.parkedAt) score -= 20 // intentionally paused
|
|
2373
|
-
if (t.
|
|
2873
|
+
if (t.blockedBy?.some(b => typeof b === 'object' ? b.column !== 'done' : false)) score -= 50
|
|
2874
|
+
if (t.github?.headBranch) score += 5
|
|
2875
|
+
// Boost tasks overlapping files you've been editing recently
|
|
2876
|
+
if (recentFiles.size > 0 && (t.claimedFiles || []).some(f => recentFiles.has(f))) score += 20
|
|
2374
2877
|
return score
|
|
2375
2878
|
}
|
|
2376
2879
|
|
|
2377
|
-
const scored = tasks
|
|
2378
|
-
.map(t => ({ ...t, _score: scoreTask(t) }))
|
|
2379
|
-
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
|
|
2394
|
-
|
|
2395
|
-
|
|
2396
|
-
|
|
2397
|
-
|
|
2398
|
-
|
|
2399
|
-
|
|
2400
|
-
|
|
2401
|
-
|
|
2402
|
-
|
|
2403
|
-
|
|
2404
|
-
|
|
2405
|
-
|
|
2406
|
-
|
|
2407
|
-
|
|
2408
|
-
|
|
2409
|
-
|
|
2410
|
-
|
|
2411
|
-
|
|
2412
|
-
|
|
2413
|
-
|
|
2414
|
-
|
|
2415
|
-
|
|
2416
|
-
|
|
2417
|
-
|
|
2418
|
-
|
|
2419
|
-
|
|
2420
|
-
|
|
2421
|
-
|
|
2880
|
+
const scored = tasks.length
|
|
2881
|
+
? tasks.map(t => ({ ...t, _score: scoreTask(t) })).sort((a, b) => b._score - a._score)
|
|
2882
|
+
: []
|
|
2883
|
+
|
|
2884
|
+
// If pending reviews are waiting and outrank the top task, surface review first
|
|
2885
|
+
const longestWaitingReview = pendingReviews[0]
|
|
2886
|
+
const topTask = scored[0]
|
|
2887
|
+
|
|
2888
|
+
let recommendation, isReview = false
|
|
2889
|
+
if (longestWaitingReview && (!topTask || longestWaitingReview.waitingHours > 4)) {
|
|
2890
|
+
isReview = true
|
|
2891
|
+
recommendation = {
|
|
2892
|
+
type: 'review',
|
|
2893
|
+
taskId: longestWaitingReview.taskId,
|
|
2894
|
+
key: longestWaitingReview.key,
|
|
2895
|
+
title: longestWaitingReview.title,
|
|
2896
|
+
reason: `Approval review waiting ${longestWaitingReview.waitingHours}h — ${longestWaitingReview.submittedBy} is blocked on you.`,
|
|
2897
|
+
nextStep: `Call get_review_bundle with taskId="${longestWaitingReview.taskId}" to start the review.`,
|
|
2898
|
+
}
|
|
2899
|
+
} else if (topTask) {
|
|
2900
|
+
let reason, nextStep
|
|
2901
|
+
if (topTask.github?.changesRequestedAt) {
|
|
2902
|
+
reason = `PR has changes requested — this blocks the merge and the reviewer is waiting.`
|
|
2903
|
+
nextStep = `Call fix_pr_feedback with taskAId="${topTask._id}" to get the full fix plan.`
|
|
2904
|
+
} else if (topTask.column === 'in_progress' && topTask.github?.headBranch) {
|
|
2905
|
+
const fileHint = recentFiles.size > 0 && (topTask.claimedFiles || []).some(f => recentFiles.has(f))
|
|
2906
|
+
? ' (matches your recent git activity)'
|
|
2907
|
+
: ''
|
|
2908
|
+
reason = `Already in progress on branch ${topTask.github.headBranch} — keep the momentum going.${fileHint}`
|
|
2909
|
+
nextStep = `Continue coding on ${topTask.github.headBranch}. When commits are pushed, call raise_pr.`
|
|
2910
|
+
} else if (topTask.column === 'in_progress' && !topTask.github?.headBranch) {
|
|
2911
|
+
reason = `In progress but no branch yet — needs a branch to start committing.`
|
|
2912
|
+
nextStep = `Call create_branch with taskId="${topTask._id}" and projectId="${topTask.project?._id || topTask.project}".`
|
|
2913
|
+
} else if (topTask.column === 'todo') {
|
|
2914
|
+
reason = `Highest priority unstarted task — ready to kick off.`
|
|
2915
|
+
nextStep = `Call kickoff_task with taskId="${topTask._id}" to read the plan and move to in_progress.`
|
|
2916
|
+
} else {
|
|
2917
|
+
reason = `Highest priority available task.`
|
|
2918
|
+
nextStep = `Call get_task_context with taskId="${topTask._id}" for full details.`
|
|
2919
|
+
}
|
|
2920
|
+
recommendation = {
|
|
2921
|
+
type: 'task',
|
|
2922
|
+
taskId: topTask._id,
|
|
2923
|
+
key: topTask.key,
|
|
2924
|
+
title: topTask.title,
|
|
2925
|
+
priority: topTask.priority,
|
|
2926
|
+
column: topTask.column,
|
|
2927
|
+
project: topTask.project?.name || topTask.project,
|
|
2422
2928
|
reason,
|
|
2423
2929
|
nextStep,
|
|
2424
|
-
}
|
|
2425
|
-
|
|
2930
|
+
}
|
|
2931
|
+
}
|
|
2932
|
+
|
|
2933
|
+
return text({
|
|
2934
|
+
recommendation,
|
|
2935
|
+
queue: scored.slice(0, 3).map((t, i) => ({
|
|
2936
|
+
rank: i + 1,
|
|
2937
|
+
key: t.key,
|
|
2938
|
+
title: t.title,
|
|
2939
|
+
priority: t.priority,
|
|
2940
|
+
column: t.column,
|
|
2941
|
+
score: t._score,
|
|
2942
|
+
branch: t.github?.headBranch || null,
|
|
2943
|
+
needsFix: !!t.github?.changesRequestedAt,
|
|
2944
|
+
recentFileMatch: recentFiles.size > 0 && (t.claimedFiles || []).some(f => recentFiles.has(f)),
|
|
2945
|
+
})),
|
|
2946
|
+
pendingReviews: pendingReviews.length > 0 ? pendingReviews : null,
|
|
2947
|
+
recentFilesDetected: recentFiles.size,
|
|
2426
2948
|
})
|
|
2427
2949
|
}
|
|
2428
2950
|
)
|
|
@@ -2556,6 +3078,28 @@ Use this when the developer or team lead asks "how's the board?", "any blockers?
|
|
|
2556
3078
|
suggestion: `Use update_task to add a readmeMarkdown implementation plan`,
|
|
2557
3079
|
})
|
|
2558
3080
|
}
|
|
3081
|
+
|
|
3082
|
+
// Stale PR — open 5+ days with no approval or changes-requested (forgotten in review queue)
|
|
3083
|
+
if (
|
|
3084
|
+
t.column === 'in_review' &&
|
|
3085
|
+
t.github?.prNumber &&
|
|
3086
|
+
ageDays > 5 &&
|
|
3087
|
+
!t.github?.changesRequestedAt // already flagged above if changes were requested
|
|
3088
|
+
) {
|
|
3089
|
+
flags.push({
|
|
3090
|
+
severity: 'warning',
|
|
3091
|
+
task: t.key,
|
|
3092
|
+
title: t.title,
|
|
3093
|
+
project: board.name,
|
|
3094
|
+
assignee: assignees,
|
|
3095
|
+
issue: `PR #${t.github.prNumber} has been open ${Math.floor(ageDays)} days with no reviewer action — likely forgotten`,
|
|
3096
|
+
suggestion: t.github?.prUrl
|
|
3097
|
+
? `Check PR at ${t.github.prUrl} — ping the reviewer or close if abandoned`
|
|
3098
|
+
: `Check GitHub for PR #${t.github.prNumber} — ping the reviewer`,
|
|
3099
|
+
prNumber: t.github.prNumber,
|
|
3100
|
+
prUrl: t.github.prUrl || null,
|
|
3101
|
+
})
|
|
3102
|
+
}
|
|
2559
3103
|
}
|
|
2560
3104
|
}
|
|
2561
3105
|
|
|
@@ -2582,21 +3126,138 @@ Use this when the developer or team lead asks "how's the board?", "any blockers?
|
|
|
2582
3126
|
|
|
2583
3127
|
server.tool(
|
|
2584
3128
|
'end_of_day',
|
|
2585
|
-
`End-of-day wrap-up
|
|
2586
|
-
|
|
2587
|
-
|
|
3129
|
+
`End-of-day wrap-up — call with confirmed=false first to get a checklist, then confirmed=true to park tasks.
|
|
3130
|
+
|
|
3131
|
+
Phase 1 (confirmed=false — default):
|
|
3132
|
+
- Scans local git for uncommitted changes and unpushed commits
|
|
3133
|
+
- Fetches your in_progress tasks
|
|
3134
|
+
- Detects stale PRs (open 5+ days)
|
|
3135
|
+
- Returns a full action checklist so nothing is left behind
|
|
3136
|
+
|
|
3137
|
+
Phase 2 (confirmed=true):
|
|
3138
|
+
- Parks each in_progress task with your notes
|
|
3139
|
+
- Releases file claims on parked tasks
|
|
2588
3140
|
|
|
2589
|
-
After this tool runs, post a short comment on each parked task via add_task_comment summarising the day.
|
|
2590
3141
|
Use this when the developer says "wrap up", "end of day", or "I'm done for today".`,
|
|
2591
3142
|
{
|
|
3143
|
+
confirmed: z.boolean().optional().default(false).describe('Set true to actually park the tasks after reviewing the checklist'),
|
|
2592
3144
|
taskNotes: z.array(z.object({
|
|
2593
3145
|
taskId: z.string().describe("Task's MongoDB ObjectId"),
|
|
2594
3146
|
summary: z.string().describe('What was done today — be specific, include file names'),
|
|
2595
3147
|
remaining: z.string().describe('What is left to do next session'),
|
|
2596
|
-
blockers: z.string().optional().describe('Anything blocking'),
|
|
2597
|
-
})).describe('Park notes for each in_progress task.
|
|
3148
|
+
blockers: z.string().optional().describe('Anything blocking progress'),
|
|
3149
|
+
})).optional().describe('Park notes for each in_progress task. Required when confirmed=true.'),
|
|
2598
3150
|
},
|
|
2599
|
-
async ({ taskNotes }) => {
|
|
3151
|
+
async ({ confirmed = false, taskNotes = [] }) => {
|
|
3152
|
+
// Always fetch in-progress tasks and git state
|
|
3153
|
+
const myTasksRes = await api.get('/api/users/me/tasks')
|
|
3154
|
+
const inProgress = (myTasksRes?.data?.tasks || []).filter(t => t.column === 'in_progress')
|
|
3155
|
+
|
|
3156
|
+
// Local git state
|
|
3157
|
+
let gitState = { uncommitted: [], unpushedBranches: [], currentBranch: null }
|
|
3158
|
+
try {
|
|
3159
|
+
const cwd = process.cwd()
|
|
3160
|
+
const repoRoot = findRepoRoot(cwd)
|
|
3161
|
+
if (repoRoot) {
|
|
3162
|
+
// Uncommitted changes
|
|
3163
|
+
const statusOut = execSync('git status --short 2>/dev/null', { cwd: repoRoot, encoding: 'utf8', timeout: 3000 })
|
|
3164
|
+
gitState.uncommitted = statusOut.trim().split('\n').filter(Boolean)
|
|
3165
|
+
// Current branch
|
|
3166
|
+
try {
|
|
3167
|
+
gitState.currentBranch = execSync('git rev-parse --abbrev-ref HEAD 2>/dev/null', { cwd: repoRoot, encoding: 'utf8', timeout: 2000 }).trim()
|
|
3168
|
+
} catch { /* non-fatal */ }
|
|
3169
|
+
// Unpushed commits on current branch
|
|
3170
|
+
if (gitState.currentBranch && gitState.currentBranch !== 'HEAD') {
|
|
3171
|
+
try {
|
|
3172
|
+
const unpushed = execSync(
|
|
3173
|
+
`git log origin/${gitState.currentBranch}..HEAD --format="%h %s" 2>/dev/null || git log HEAD -3 --format="%h %s"`,
|
|
3174
|
+
{ cwd: repoRoot, encoding: 'utf8', timeout: 3000 }
|
|
3175
|
+
)
|
|
3176
|
+
gitState.unpushedBranch = unpushed.trim().split('\n').filter(Boolean)
|
|
3177
|
+
} catch { /* non-fatal */ }
|
|
3178
|
+
}
|
|
3179
|
+
}
|
|
3180
|
+
} catch { /* non-fatal */ }
|
|
3181
|
+
|
|
3182
|
+
// Stale PRs (in_review 5+ days)
|
|
3183
|
+
const now = Date.now()
|
|
3184
|
+
const stalePRs = inProgress
|
|
3185
|
+
.concat((myTasksRes?.data?.tasks || []).filter(t => t.column === 'in_review'))
|
|
3186
|
+
.filter(t => {
|
|
3187
|
+
if (!t.github?.prNumber) return false
|
|
3188
|
+
const ageDays = (now - new Date(t.updatedAt).getTime()) / (1000 * 60 * 60 * 24)
|
|
3189
|
+
return ageDays > 5
|
|
3190
|
+
})
|
|
3191
|
+
.map(t => ({ key: t.key, title: t.title, prNumber: t.github.prNumber, prUrl: t.github.prUrl || null }))
|
|
3192
|
+
|
|
3193
|
+
if (!confirmed) {
|
|
3194
|
+
// Phase 1: checklist
|
|
3195
|
+
const checklist = []
|
|
3196
|
+
if (gitState.uncommitted.length > 0) {
|
|
3197
|
+
checklist.push({
|
|
3198
|
+
urgent: true,
|
|
3199
|
+
action: 'Commit uncommitted changes',
|
|
3200
|
+
detail: `${gitState.uncommitted.length} file(s) modified but not committed`,
|
|
3201
|
+
files: gitState.uncommitted.slice(0, 10),
|
|
3202
|
+
cmd: `git add -p && git commit -m "wip: end of day checkpoint"`,
|
|
3203
|
+
})
|
|
3204
|
+
}
|
|
3205
|
+
if (gitState.unpushedBranch?.length > 0) {
|
|
3206
|
+
checklist.push({
|
|
3207
|
+
urgent: true,
|
|
3208
|
+
action: 'Push unpushed commits',
|
|
3209
|
+
detail: `${gitState.unpushedBranch.length} commit(s) not pushed to remote`,
|
|
3210
|
+
commits: gitState.unpushedBranch,
|
|
3211
|
+
cmd: `git push origin ${gitState.currentBranch}`,
|
|
3212
|
+
})
|
|
3213
|
+
}
|
|
3214
|
+
if (inProgress.length > 0) {
|
|
3215
|
+
checklist.push({
|
|
3216
|
+
urgent: false,
|
|
3217
|
+
action: 'Park in-progress tasks with notes',
|
|
3218
|
+
detail: `${inProgress.length} task(s) need a park note so work can resume cleanly`,
|
|
3219
|
+
tasks: inProgress.map(t => ({ taskId: t._id, key: t.key, title: t.title, branch: t.github?.headBranch || null })),
|
|
3220
|
+
nextStep: `Fill taskNotes for each task above, then call end_of_day with confirmed=true`,
|
|
3221
|
+
})
|
|
3222
|
+
}
|
|
3223
|
+
if (stalePRs.length > 0) {
|
|
3224
|
+
checklist.push({
|
|
3225
|
+
urgent: false,
|
|
3226
|
+
action: 'Follow up on stale PRs',
|
|
3227
|
+
detail: `${stalePRs.length} PR(s) open 5+ days with no reviewer action`,
|
|
3228
|
+
prs: stalePRs,
|
|
3229
|
+
})
|
|
3230
|
+
}
|
|
3231
|
+
|
|
3232
|
+
return text({
|
|
3233
|
+
checklist,
|
|
3234
|
+
gitState: {
|
|
3235
|
+
currentBranch: gitState.currentBranch,
|
|
3236
|
+
uncommitted: gitState.uncommitted.length,
|
|
3237
|
+
unpushedCommits: gitState.unpushedBranch?.length || 0,
|
|
3238
|
+
},
|
|
3239
|
+
inProgressTasks: inProgress.map(t => ({ taskId: t._id, key: t.key, title: t.title })),
|
|
3240
|
+
stalePRs: stalePRs.length ? stalePRs : null,
|
|
3241
|
+
message: checklist.length === 0
|
|
3242
|
+
? '✅ All clear — no uncommitted work, no unpushed commits, no in-progress tasks. Good day!'
|
|
3243
|
+
: `${checklist.filter(c => c.urgent).length} urgent item(s) need attention before logging off.`,
|
|
3244
|
+
nextStep: inProgress.length > 0
|
|
3245
|
+
? `Fill taskNotes for your in-progress tasks then call end_of_day with confirmed=true and taskNotes=[...].`
|
|
3246
|
+
: checklist.some(c => c.urgent)
|
|
3247
|
+
? `Handle the urgent items above (commit/push), then you are done.`
|
|
3248
|
+
: `Nothing to park. You are good to go.`,
|
|
3249
|
+
})
|
|
3250
|
+
}
|
|
3251
|
+
|
|
3252
|
+
// Phase 2: park tasks
|
|
3253
|
+
if (!taskNotes.length && inProgress.length > 0) {
|
|
3254
|
+
return text({
|
|
3255
|
+
blocked: true,
|
|
3256
|
+
message: `confirmed=true requires taskNotes for each in-progress task. Call end_of_day with confirmed=false first to get the task list.`,
|
|
3257
|
+
inProgressTasks: inProgress.map(t => ({ taskId: t._id, key: t.key, title: t.title })),
|
|
3258
|
+
})
|
|
3259
|
+
}
|
|
3260
|
+
|
|
2600
3261
|
const results = []
|
|
2601
3262
|
for (const note of taskNotes) {
|
|
2602
3263
|
try {
|
|
@@ -2614,7 +3275,15 @@ Use this when the developer says "wrap up", "end of day", or "I'm done for today
|
|
|
2614
3275
|
results.push({ taskId: note.taskId, success: false, message: e.message })
|
|
2615
3276
|
}
|
|
2616
3277
|
}
|
|
2617
|
-
|
|
3278
|
+
|
|
3279
|
+
return text({
|
|
3280
|
+
wrapped: results,
|
|
3281
|
+
stalePRs: stalePRs.length ? stalePRs : null,
|
|
3282
|
+
gitReminder: gitState.uncommitted.length > 0 || gitState.unpushedBranch?.length > 0
|
|
3283
|
+
? `⚠️ You still have ${gitState.uncommitted.length} uncommitted file(s) and ${gitState.unpushedBranch?.length || 0} unpushed commit(s). Push before logging off!`
|
|
3284
|
+
: null,
|
|
3285
|
+
message: `${results.filter(r => r.success).length}/${results.length} task(s) parked successfully.`,
|
|
3286
|
+
})
|
|
2618
3287
|
}
|
|
2619
3288
|
)
|
|
2620
3289
|
|
|
@@ -2643,13 +3312,36 @@ Use this when a developer says "start task", "brief me on", or "what do I need t
|
|
|
2643
3312
|
repoPath: z.string().optional().describe('Absolute path to the local git repo (defaults to MCP process working directory). Used to write cursor rules file.'),
|
|
2644
3313
|
agentRole: z.enum(['builder', 'reviewer', 'scout', 'coordinator']).optional()
|
|
2645
3314
|
.describe('Set the agent role for this task session. Role-specific behavioral constraints are injected into cursor rules. builder=implements code, scout=reads/analyzes only, reviewer=reviews PRs only, coordinator=decomposes work.'),
|
|
3315
|
+
files: z.array(z.string()).optional()
|
|
3316
|
+
.describe('REQUIRED for agentRole=builder. Files this builder will exclusively edit (e.g. ["server/routes/tasks.js", "client/src/App.jsx"]). Ownership is claimed atomically during kickoff — kickoff is blocked until files are provided. Read the implementation plan first (confirmed=false), identify your files, then call confirmed=true with this list.'),
|
|
2646
3317
|
},
|
|
2647
|
-
async ({ taskId, confirmed = false, repoPath, agentRole }) => {
|
|
3318
|
+
async ({ taskId, confirmed = false, repoPath, agentRole, files }) => {
|
|
2648
3319
|
trackTaskActivity(taskId, 'kickoff_task')
|
|
2649
3320
|
const taskRes = await api.get(`/api/tasks/${taskId}`)
|
|
2650
3321
|
if (!taskRes?.success) return errorText('Task not found')
|
|
2651
3322
|
const task = taskRes.data.task
|
|
2652
3323
|
|
|
3324
|
+
// Warn if task has unresolved blockers
|
|
3325
|
+
const activeBlockers = (task.blockedBy || []).filter(b => {
|
|
3326
|
+
if (typeof b === 'object' && b !== null && b.column !== undefined) return b.column !== 'done'
|
|
3327
|
+
return false // if not populated, can't check — don't block
|
|
3328
|
+
})
|
|
3329
|
+
if (activeBlockers.length > 0) {
|
|
3330
|
+
return text({
|
|
3331
|
+
blocked: true,
|
|
3332
|
+
reason: 'This task has unresolved dependencies that must complete first.',
|
|
3333
|
+
blockers: activeBlockers.map(b => ({
|
|
3334
|
+
taskKey: b.key || b._id,
|
|
3335
|
+
title: b.title || '(unknown)',
|
|
3336
|
+
column: b.column || 'unknown',
|
|
3337
|
+
})),
|
|
3338
|
+
message: `⛔ Cannot kick off ${task.key} — ${activeBlockers.length} blocker(s) must complete first.`,
|
|
3339
|
+
nextStep: activeBlockers.length === 1
|
|
3340
|
+
? `Work on ${activeBlockers[0].key || 'the blocker task'} first. Call remove_blocker when it's done.`
|
|
3341
|
+
: `Complete the blockers listed above. Call remove_blocker for each one when done.`,
|
|
3342
|
+
})
|
|
3343
|
+
}
|
|
3344
|
+
|
|
2653
3345
|
// Include developer name in branch so it's clear who created it
|
|
2654
3346
|
let devSlug = ''
|
|
2655
3347
|
try {
|
|
@@ -2970,6 +3662,59 @@ Use this when a developer says "start task", "brief me on", or "what do I need t
|
|
|
2970
3662
|
})
|
|
2971
3663
|
}
|
|
2972
3664
|
|
|
3665
|
+
// ── Builder file ownership gate ───────────────────────────────────────────
|
|
3666
|
+
// Structural equivalent of BridgeSwarm's file-lock: a builder cannot complete
|
|
3667
|
+
// kickoff without declaring which files it owns. This prevents two agents editing
|
|
3668
|
+
// the same file because both kickoffs either succeed (different files) or one is
|
|
3669
|
+
// blocked (conflict detected before a single line of code is written).
|
|
3670
|
+
if (agentRole === 'builder') {
|
|
3671
|
+
if (!files?.length) {
|
|
3672
|
+
// Hard block — return plan context so agent can identify files, then retry
|
|
3673
|
+
return text({
|
|
3674
|
+
requiresFileClaim: true,
|
|
3675
|
+
taskKey: task.key,
|
|
3676
|
+
title: task.title,
|
|
3677
|
+
implementationPlan: task.readmeMarkdown || '(no plan — write one in the Plan tab first)',
|
|
3678
|
+
scoutReport: task.scoutReport || null,
|
|
3679
|
+
subtasks: (task.subtasks || []).map(s => ({ title: s.title, done: s.done })),
|
|
3680
|
+
message: [
|
|
3681
|
+
`⛔ Builder kickoff blocked — file ownership required.`,
|
|
3682
|
+
``,
|
|
3683
|
+
`Read the implementation plan above and identify EVERY file you will create or modify.`,
|
|
3684
|
+
`Then call kickoff_task again with confirmed=true, agentRole="builder", and files=[...].`,
|
|
3685
|
+
``,
|
|
3686
|
+
`This is enforced structurally: kickoff cannot complete until files are declared.`,
|
|
3687
|
+
`It prevents two agents from editing the same file simultaneously.`,
|
|
3688
|
+
].join('\n'),
|
|
3689
|
+
nextStep: `kickoff_task(taskId="${taskId}", confirmed=true, agentRole="builder", files=["path/to/file1", "path/to/file2"])`,
|
|
3690
|
+
})
|
|
3691
|
+
}
|
|
3692
|
+
|
|
3693
|
+
// Files provided — attempt atomic claim before proceeding with kickoff
|
|
3694
|
+
const claimRes = await api.post(`/api/tasks/${taskId}/files/claim`, { files }).catch(() => null)
|
|
3695
|
+
if (!claimRes?.success) {
|
|
3696
|
+
if (claimRes?.conflicts) {
|
|
3697
|
+
return text({
|
|
3698
|
+
blocked: true,
|
|
3699
|
+
reason: `File ownership conflict — another in-progress task already owns one or more of your files.`,
|
|
3700
|
+
conflicts: claimRes.conflicts,
|
|
3701
|
+
message: [
|
|
3702
|
+
`⛔ Builder kickoff blocked — file conflict detected.`,
|
|
3703
|
+
``,
|
|
3704
|
+
`The files listed in conflicts[] are already claimed by another task.`,
|
|
3705
|
+
`Options:`,
|
|
3706
|
+
`1. Coordinate with the other task's assignee and wait for them to release the files.`,
|
|
3707
|
+
`2. Remove the conflicting files from your list if you don't actually need them.`,
|
|
3708
|
+
`3. Call release_files on the other task if it is no longer active (coordinator only).`,
|
|
3709
|
+
].join('\n'),
|
|
3710
|
+
nextStep: `Resolve the conflict, then retry: kickoff_task(taskId="${taskId}", confirmed=true, agentRole="builder", files=[...updated list...])`,
|
|
3711
|
+
})
|
|
3712
|
+
}
|
|
3713
|
+
return errorText(claimRes?.message || 'Could not claim files — retry kickoff_task with confirmed=true and files=[...]')
|
|
3714
|
+
}
|
|
3715
|
+
// Files claimed successfully — kickoff continues below with ownership established
|
|
3716
|
+
}
|
|
3717
|
+
|
|
2973
3718
|
// ── #1 Preflight: check dirty tree before writing cursor rules / moving task ──
|
|
2974
3719
|
{
|
|
2975
3720
|
const pCwd = repoPath || process.cwd()
|
|
@@ -3104,7 +3849,9 @@ Use this when a developer says "start task", "brief me on", or "what do I need t
|
|
|
3104
3849
|
const baseRules = hasCursorRules ? task.cursorRules : ''
|
|
3105
3850
|
const mergedRules = [baseRules, typeExtraRules].filter(Boolean).join('\n\n') || '(Follow role constraints above.)'
|
|
3106
3851
|
if (hasCursorRules || agentRole || typeExtraRules) {
|
|
3107
|
-
|
|
3852
|
+
// Pass claimedFiles so the builder cursor rule lists exactly which files are owned
|
|
3853
|
+
const effectiveClaimedFiles = files?.length ? files : (task.claimedFiles || [])
|
|
3854
|
+
cursorRulesFile = writeCursorRulesFile(task.key, mergedRules, repoPath, agentRole || null, effectiveClaimedFiles)
|
|
3108
3855
|
}
|
|
3109
3856
|
|
|
3110
3857
|
// Dynamically generate .cursor/agents, .cursor/skills, .cursor/commands
|
|
@@ -3187,7 +3934,7 @@ After \`request_human_input\`: STOP, show the question in chat, wait for reply,
|
|
|
3187
3934
|
active: true,
|
|
3188
3935
|
agentRole: agentRole || null,
|
|
3189
3936
|
rules: task.cursorRules || null,
|
|
3190
|
-
roleRules: agentRole ?
|
|
3937
|
+
roleRules: agentRole ? getRoleRules(agentRole, task.claimedFiles || []) : null,
|
|
3191
3938
|
instruction: agentRole
|
|
3192
3939
|
? `⚠️ AGENT ROLE: ${agentRole.toUpperCase()} — Follow the role behavioral constraints injected into the cursor rules file. These override default behavior.`
|
|
3193
3940
|
: '⚠️ CURSOR RULES ACTIVE — You MUST follow every rule in the "rules" field for the entire duration of this task.',
|
|
@@ -3363,6 +4110,52 @@ function registerIssueTools(server) {
|
|
|
3363
4110
|
}
|
|
3364
4111
|
|
|
3365
4112
|
function registerApprovalTools(server) {
|
|
4113
|
+
// ── report_test_results ───────────────────────────────────────────────────────
|
|
4114
|
+
server.tool(
|
|
4115
|
+
'report_test_results',
|
|
4116
|
+
`Save a test run result to the task — call this after running the test suite.
|
|
4117
|
+
|
|
4118
|
+
REQUIRED WORKFLOW:
|
|
4119
|
+
1. Run tests using your shell or the run-tests skill
|
|
4120
|
+
2. Parse the output to extract pass/fail counts
|
|
4121
|
+
3. Call this tool to attach the results to the task
|
|
4122
|
+
|
|
4123
|
+
The latest test run is visible in the UI and blocks submit_task_for_approval if failing.
|
|
4124
|
+
Calling this with status="passing" is required before submitting for approval.
|
|
4125
|
+
|
|
4126
|
+
How to determine status:
|
|
4127
|
+
- "passing" → all tests pass (failed === 0)
|
|
4128
|
+
- "partial" → some pass, some fail — needs investigation
|
|
4129
|
+
- "failing" → no tests pass or test runner itself crashed`,
|
|
4130
|
+
{
|
|
4131
|
+
taskId: z.string().describe("Task's MongoDB ObjectId"),
|
|
4132
|
+
status: z.enum(['passing', 'failing', 'partial']).describe('Overall test run outcome'),
|
|
4133
|
+
total: z.number().optional().default(0).describe('Total number of tests run'),
|
|
4134
|
+
passed: z.number().optional().default(0).describe('Tests that passed'),
|
|
4135
|
+
failed: z.number().optional().default(0).describe('Tests that failed'),
|
|
4136
|
+
skipped: z.number().optional().default(0).describe('Tests that were skipped'),
|
|
4137
|
+
summary: z.string().describe('One-line summary, e.g. "42 passed, 2 failed in 3.4s"'),
|
|
4138
|
+
rawOutput: z.string().optional().default('').describe('Raw test output (truncated to 3000 chars) for reviewer context'),
|
|
4139
|
+
},
|
|
4140
|
+
async ({ taskId, status, total = 0, passed = 0, failed = 0, skipped = 0, summary, rawOutput = '' }) => {
|
|
4141
|
+
trackTaskActivity(taskId, 'report_test_results', { summary: `Tests: ${status} (${passed}/${total})` })
|
|
4142
|
+
const res = await api.post(`/api/tasks/${taskId}/test-runs`, {
|
|
4143
|
+
status, total, passed, failed, skipped,
|
|
4144
|
+
summary,
|
|
4145
|
+
raw: rawOutput.slice(0, 3000),
|
|
4146
|
+
})
|
|
4147
|
+
if (!res?.success) return errorText(res?.message || 'Could not save test results')
|
|
4148
|
+
const icon = status === 'passing' ? '✅' : status === 'partial' ? '⚠️' : '❌'
|
|
4149
|
+
return text({
|
|
4150
|
+
saved: true, status, summary, passed, failed, total, skipped,
|
|
4151
|
+
message: `${icon} Test results saved to task.`,
|
|
4152
|
+
nextStep: status === 'passing'
|
|
4153
|
+
? `Tests passing. You can now call submit_task_for_approval or raise_pr.`
|
|
4154
|
+
: `${failed} test(s) failing. Fix them and call report_test_results again before submitting for approval.`,
|
|
4155
|
+
})
|
|
4156
|
+
}
|
|
4157
|
+
)
|
|
4158
|
+
|
|
3366
4159
|
server.tool(
|
|
3367
4160
|
'submit_task_for_approval',
|
|
3368
4161
|
'Create and submit a new approval request on a task. Each request has its own title, plan/readme, and reviewer. Only one request can be pending at a time.',
|
|
@@ -3372,8 +4165,28 @@ function registerApprovalTools(server) {
|
|
|
3372
4165
|
readme: z.string().describe('The plan/markdown describing what you want to do and why (min 80 chars)'),
|
|
3373
4166
|
reviewerId: z.string().describe('User ID of the reviewer'),
|
|
3374
4167
|
},
|
|
3375
|
-
async ({ taskId, title, readme, reviewerId }) =>
|
|
3376
|
-
|
|
4168
|
+
async ({ taskId, title, readme, reviewerId }) => {
|
|
4169
|
+
// Gate: block if latest test run is failing
|
|
4170
|
+
const taskRes = await api.get(`/api/tasks/${taskId}`)
|
|
4171
|
+
if (taskRes?.success) {
|
|
4172
|
+
const latestRun = taskRes.data?.task?.testRuns?.[0]
|
|
4173
|
+
if (latestRun && latestRun.status === 'failing') {
|
|
4174
|
+
return text({
|
|
4175
|
+
blocked: true,
|
|
4176
|
+
reason: 'Latest test run is failing — fix tests before submitting for approval.',
|
|
4177
|
+
testRun: {
|
|
4178
|
+
status: latestRun.status,
|
|
4179
|
+
summary: latestRun.summary,
|
|
4180
|
+
failed: latestRun.failed,
|
|
4181
|
+
total: latestRun.total,
|
|
4182
|
+
runAt: latestRun.runAt,
|
|
4183
|
+
},
|
|
4184
|
+
nextStep: `Fix the failing tests, then call report_test_results with status="passing" before retrying submit_task_for_approval.`,
|
|
4185
|
+
})
|
|
4186
|
+
}
|
|
4187
|
+
}
|
|
4188
|
+
return call(() => api.post(`/api/tasks/${taskId}/approvals`, { title, readme, reviewerId }))
|
|
4189
|
+
}
|
|
3377
4190
|
)
|
|
3378
4191
|
|
|
3379
4192
|
server.tool(
|
|
@@ -3642,26 +4455,30 @@ function findRepoRoot(startPath) {
|
|
|
3642
4455
|
// Prepended to cursor rules when an agentRole is set. Defines what the agent
|
|
3643
4456
|
// CAN and CANNOT do for the duration of the task session.
|
|
3644
4457
|
const ROLE_RULES = {
|
|
3645
|
-
builder: `## Agent Role: BUILDER
|
|
4458
|
+
builder: (claimedFiles = []) => `## Agent Role: BUILDER
|
|
3646
4459
|
|
|
3647
4460
|
You are a BUILDER agent. Your behavioral constraints for this session:
|
|
3648
4461
|
|
|
4462
|
+
**YOUR CLAIMED FILES (exclusive ownership — enforced at kickoff):**
|
|
4463
|
+
${claimedFiles.length > 0
|
|
4464
|
+
? claimedFiles.map(f => `- ${f}`).join('\n')
|
|
4465
|
+
: '- (no files claimed yet — call claim_files before editing anything)'}
|
|
4466
|
+
|
|
3649
4467
|
**ALLOWED:**
|
|
3650
|
-
- Write, modify, and delete code files
|
|
3651
|
-
- Create new files required by the implementation plan
|
|
4468
|
+
- Write, modify, and delete code files listed above under YOUR CLAIMED FILES
|
|
4469
|
+
- Create new files required by the implementation plan (call claim_files to add them first)
|
|
3652
4470
|
- Commit and push changes on the task branch
|
|
3653
4471
|
- Run tests and fix failures
|
|
3654
|
-
- Claim file ownership before editing (use claim_files MCP tool)
|
|
3655
4472
|
|
|
3656
4473
|
**NOT ALLOWED:**
|
|
3657
|
-
-
|
|
4474
|
+
- Edit ANY file not listed in YOUR CLAIMED FILES above without first calling claim_files
|
|
4475
|
+
- Modify files owned by another in-progress task
|
|
3658
4476
|
- Make architectural decisions not in the implementation plan — flag them instead
|
|
3659
|
-
- Edit files outside the task scope without explicit approval
|
|
3660
4477
|
- Merge to main/master/dev directly
|
|
3661
4478
|
|
|
3662
4479
|
**WORK STYLE:**
|
|
3663
|
-
-
|
|
3664
|
-
-
|
|
4480
|
+
- YOUR CLAIMED FILES list is your contract — stay within it
|
|
4481
|
+
- If you discover you need an additional file mid-session, call claim_files first, then edit
|
|
3665
4482
|
- Follow the spec precisely — don't add unrequested features
|
|
3666
4483
|
- Commit atomically with conventional commit format (feat/fix/refactor)`,
|
|
3667
4484
|
|
|
@@ -4008,16 +4825,24 @@ function detectTaskType(task) {
|
|
|
4008
4825
|
}
|
|
4009
4826
|
|
|
4010
4827
|
/** Write task-specific cursor rules to .cursor/rules/<taskKey>.mdc in the local repo root.
|
|
4011
|
-
* When role is provided, role-specific behavioral constraints are prepended.
|
|
4012
|
-
|
|
4828
|
+
* When role is provided, role-specific behavioral constraints are prepended.
|
|
4829
|
+
* claimedFiles is passed through to the builder role template so Cursor injects
|
|
4830
|
+
* the exact file ownership list into every prompt. */
|
|
4831
|
+
function getRoleRules(role, claimedFiles = []) {
|
|
4832
|
+
if (!role || !ROLE_RULES[role]) return null
|
|
4833
|
+
const entry = ROLE_RULES[role]
|
|
4834
|
+
return typeof entry === 'function' ? entry(claimedFiles) : entry
|
|
4835
|
+
}
|
|
4836
|
+
|
|
4837
|
+
function writeCursorRulesFile(taskKey, rulesMarkdown, startPath, role = null, claimedFiles = []) {
|
|
4013
4838
|
try {
|
|
4014
4839
|
const repoRoot = findRepoRoot(startPath)
|
|
4015
4840
|
if (!repoRoot) return null
|
|
4016
4841
|
const rulesDir = join(repoRoot, '.cursor', 'rules')
|
|
4017
4842
|
mkdirSync(rulesDir, { recursive: true })
|
|
4018
4843
|
const filePath = join(rulesDir, `${taskKey.toLowerCase()}.mdc`)
|
|
4019
|
-
const roleSection = role
|
|
4020
|
-
? `${
|
|
4844
|
+
const roleSection = getRoleRules(role, claimedFiles)
|
|
4845
|
+
? `${getRoleRules(role, claimedFiles)}\n\n---\n\n## Task-Specific Rules\n\n`
|
|
4021
4846
|
: ''
|
|
4022
4847
|
const content = `---\ndescription: Task-specific rules for ${taskKey}${role ? ` (role: ${role})` : ''} — auto-generated by InternalTool MCP. Do not edit manually.\nalwaysApply: true\n---\n\n${roleSection}${rulesMarkdown}\n`
|
|
4023
4848
|
writeFileSync(filePath, content, 'utf8')
|
|
@@ -5498,13 +6323,46 @@ Set confirmed=false first to preview the full PR content, then confirmed=true to
|
|
|
5498
6323
|
}
|
|
5499
6324
|
|
|
5500
6325
|
const prTitle = `[${task.key}] ${task.title}`
|
|
6326
|
+
|
|
6327
|
+
// Subtask checklist with completion state
|
|
6328
|
+
const subtaskLines = (task.subtasks || []).map(s =>
|
|
6329
|
+
`- [${s.done ? 'x' : ' '}] ${s.title}`
|
|
6330
|
+
)
|
|
6331
|
+
|
|
6332
|
+
// Recent commits for this branch
|
|
6333
|
+
let commitLines = []
|
|
6334
|
+
try {
|
|
6335
|
+
const commitsRes = await api.get(
|
|
6336
|
+
`/api/projects/${projectId}/github/commits?sha=${encodeURIComponent(headBranch)}&per_page=10`
|
|
6337
|
+
)
|
|
6338
|
+
if (commitsRes?.success && commitsRes.data?.commits?.length) {
|
|
6339
|
+
commitLines = commitsRes.data.commits.slice(0, 10).map(c => {
|
|
6340
|
+
const msg = c.commit?.message?.split('\n')[0] || ''
|
|
6341
|
+
const sha = c.sha?.slice(0, 7) || ''
|
|
6342
|
+
return `- \`${sha}\` ${msg}`
|
|
6343
|
+
})
|
|
6344
|
+
}
|
|
6345
|
+
} catch { /* non-fatal */ }
|
|
6346
|
+
|
|
6347
|
+
// Latest test run
|
|
6348
|
+
const latestTest = task.testRuns?.[0]
|
|
6349
|
+
const testLine = latestTest
|
|
6350
|
+
? latestTest.status === 'passing'
|
|
6351
|
+
? `✅ Tests passing — ${latestTest.summary}`
|
|
6352
|
+
: latestTest.status === 'partial'
|
|
6353
|
+
? `⚠️ Tests partial — ${latestTest.summary}`
|
|
6354
|
+
: `❌ Tests failing — ${latestTest.summary}`
|
|
6355
|
+
: null
|
|
6356
|
+
|
|
5501
6357
|
const bodyParts = [
|
|
5502
6358
|
`## ${task.key}: ${task.title}`,
|
|
5503
6359
|
'',
|
|
5504
|
-
task.description ? `###
|
|
5505
|
-
task.readmeMarkdown ? `### Implementation
|
|
6360
|
+
task.description?.trim() ? `### What & Why\n${task.description.trim()}` : null,
|
|
6361
|
+
task.readmeMarkdown?.trim() ? `### Implementation Plan\n${task.readmeMarkdown.trim()}` : null,
|
|
6362
|
+
subtaskLines.length > 0 ? `### Checklist\n${subtaskLines.join('\n')}` : null,
|
|
6363
|
+
commitLines.length > 0 ? `### Commits\n${commitLines.join('\n')}` : null,
|
|
6364
|
+
testLine ? `### Test Results\n${testLine}` : null,
|
|
5506
6365
|
task.parkNote?.summary ? `### Work Summary\n${task.parkNote.summary}` : null,
|
|
5507
|
-
task.parkNote?.remaining ? `### What Remains\n${task.parkNote.remaining}` : null,
|
|
5508
6366
|
additionalNotes ? `### Additional Notes\n${additionalNotes}` : null,
|
|
5509
6367
|
'',
|
|
5510
6368
|
'---',
|
|
@@ -6431,6 +7289,27 @@ Use this tool between those two to understand the merge context.`,
|
|
|
6431
7289
|
: null
|
|
6432
7290
|
const isStale = hoursSinceCommit !== null && hoursSinceCommit >= 48
|
|
6433
7291
|
|
|
7292
|
+
// CI status — fetch check runs for the PR if one exists
|
|
7293
|
+
let ciCheck = null
|
|
7294
|
+
if (branch.prNumber) {
|
|
7295
|
+
try {
|
|
7296
|
+
const prRes = await api.get(`/api/projects/${projectId}/github/pull-requests/${branch.prNumber}`)
|
|
7297
|
+
if (prRes?.success && prRes.data?.checks) {
|
|
7298
|
+
const runs = Array.isArray(prRes.data.checks.check_runs) ? prRes.data.checks.check_runs
|
|
7299
|
+
: Array.isArray(prRes.data.checks) ? prRes.data.checks
|
|
7300
|
+
: []
|
|
7301
|
+
const total = runs.length
|
|
7302
|
+
const failed = runs.filter(r => r.conclusion === 'failure' || r.conclusion === 'timed_out').length
|
|
7303
|
+
const pending = runs.filter(r => !r.conclusion).length
|
|
7304
|
+
const ciStatus = total === 0 ? 'no_checks'
|
|
7305
|
+
: failed > 0 ? 'failing'
|
|
7306
|
+
: pending > 0 ? 'pending'
|
|
7307
|
+
: 'passing'
|
|
7308
|
+
ciCheck = { status: ciStatus, total, failed, pending, passed: total - failed - pending }
|
|
7309
|
+
}
|
|
7310
|
+
} catch { /* non-fatal — CI is optional */ }
|
|
7311
|
+
}
|
|
7312
|
+
|
|
6434
7313
|
// Build checklist
|
|
6435
7314
|
const checklist = [
|
|
6436
7315
|
{
|
|
@@ -6485,6 +7364,16 @@ Use this tool between those two to understand the merge context.`,
|
|
|
6485
7364
|
: 'No commit data available',
|
|
6486
7365
|
blocking: false,
|
|
6487
7366
|
},
|
|
7367
|
+
...(ciCheck && ciCheck.status !== 'no_checks' ? [{
|
|
7368
|
+
item: 'CI checks passing',
|
|
7369
|
+
passed: ciCheck.status === 'passing',
|
|
7370
|
+
detail: ciCheck.status === 'passing'
|
|
7371
|
+
? `All ${ciCheck.total} CI check${ciCheck.total !== 1 ? 's' : ''} passing`
|
|
7372
|
+
: ciCheck.status === 'failing'
|
|
7373
|
+
? `${ciCheck.failed} of ${ciCheck.total} CI check${ciCheck.total !== 1 ? 's' : ''} failing — fix before requesting review`
|
|
7374
|
+
: `${ciCheck.pending} CI check${ciCheck.pending !== 1 ? 's' : ''} still running — wait before reviewing`,
|
|
7375
|
+
blocking: ciCheck.status === 'failing',
|
|
7376
|
+
}] : []),
|
|
6488
7377
|
]
|
|
6489
7378
|
|
|
6490
7379
|
const blockingIssues = checklist.filter(c => !c.passed && c.blocking)
|