internaltool-mcp 1.6.35 → 1.6.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +1192 -116
- package/package.json +1 -1
package/index.js
CHANGED
|
@@ -18,7 +18,7 @@
|
|
|
18
18
|
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'
|
|
19
19
|
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'
|
|
20
20
|
import { execSync } from 'child_process'
|
|
21
|
-
import { mkdirSync, writeFileSync, unlinkSync, existsSync, readdirSync, statSync } from 'fs'
|
|
21
|
+
import { mkdirSync, writeFileSync, readFileSync, unlinkSync, existsSync, readdirSync, statSync } from 'fs'
|
|
22
22
|
import { join } from 'path'
|
|
23
23
|
import { z } from 'zod'
|
|
24
24
|
import { api, login, configure } from './api-client.js'
|
|
@@ -83,6 +83,180 @@ async function assertAdmin() {
|
|
|
83
83
|
}
|
|
84
84
|
}
|
|
85
85
|
|
|
86
|
+
// ── Codebase analysis helpers (used by plan_task_from_codebase) ───────────────
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Reads manifest files in cwd and returns detected stack info.
|
|
90
|
+
* Works for any language — JS/TS, Python, Go, Ruby, Java, Rust, PHP.
|
|
91
|
+
*/
|
|
92
|
+
function detectStack(cwd) {
|
|
93
|
+
const stack = { language: null, framework: null, testRunner: null, packageManager: null, extra: [] }
|
|
94
|
+
try {
|
|
95
|
+
// Node.js / JS / TS
|
|
96
|
+
const pkgPath = join(cwd, 'package.json')
|
|
97
|
+
if (existsSync(pkgPath)) {
|
|
98
|
+
const pkg = JSON.parse(readFileSync(pkgPath, 'utf8'))
|
|
99
|
+
const deps = { ...pkg.dependencies, ...pkg.devDependencies }
|
|
100
|
+
stack.language = pkg.scripts?.build?.includes('tsc') || existsSync(join(cwd, 'tsconfig.json')) ? 'typescript' : 'javascript'
|
|
101
|
+
stack.packageManager = existsSync(join(cwd, 'pnpm-lock.yaml')) ? 'pnpm' : existsSync(join(cwd, 'yarn.lock')) ? 'yarn' : 'npm'
|
|
102
|
+
// Framework detection
|
|
103
|
+
if (deps.next) stack.framework = 'nextjs'
|
|
104
|
+
else if (deps.nuxt) stack.framework = 'nuxt'
|
|
105
|
+
else if (deps.react) stack.framework = 'react'
|
|
106
|
+
else if (deps.vue) stack.framework = 'vue'
|
|
107
|
+
else if (deps.svelte) stack.framework = 'svelte'
|
|
108
|
+
else if (deps.express) stack.framework = 'express'
|
|
109
|
+
else if (deps.fastify) stack.framework = 'fastify'
|
|
110
|
+
else if (deps.nestjs || deps['@nestjs/core']) stack.framework = 'nestjs'
|
|
111
|
+
else if (deps.hono) stack.framework = 'hono'
|
|
112
|
+
// Test runner
|
|
113
|
+
if (deps.jest || deps['@jest/core']) stack.testRunner = 'jest'
|
|
114
|
+
else if (deps.vitest) stack.testRunner = 'vitest'
|
|
115
|
+
else if (deps.mocha) stack.testRunner = 'mocha'
|
|
116
|
+
else if (deps.playwright || deps['@playwright/test']) stack.testRunner = 'playwright'
|
|
117
|
+
// ORM / DB
|
|
118
|
+
if (deps.mongoose) stack.extra.push('mongoose')
|
|
119
|
+
if (deps.prisma) stack.extra.push('prisma')
|
|
120
|
+
if (deps.drizzle) stack.extra.push('drizzle')
|
|
121
|
+
if (deps.typeorm) stack.extra.push('typeorm')
|
|
122
|
+
if (deps.sequelize) stack.extra.push('sequelize')
|
|
123
|
+
if (deps['@supabase/supabase-js']) stack.extra.push('supabase')
|
|
124
|
+
if (deps.trpc || deps['@trpc/server']) stack.extra.push('trpc')
|
|
125
|
+
return stack
|
|
126
|
+
}
|
|
127
|
+
// Python
|
|
128
|
+
const reqPath = join(cwd, 'requirements.txt')
|
|
129
|
+
const pyprojPath = join(cwd, 'pyproject.toml')
|
|
130
|
+
if (existsSync(reqPath) || existsSync(pyprojPath)) {
|
|
131
|
+
stack.language = 'python'
|
|
132
|
+
const content = existsSync(reqPath) ? readFileSync(reqPath, 'utf8') : readFileSync(pyprojPath, 'utf8')
|
|
133
|
+
if (content.includes('django')) stack.framework = 'django'
|
|
134
|
+
else if (content.includes('fastapi')) stack.framework = 'fastapi'
|
|
135
|
+
else if (content.includes('flask')) stack.framework = 'flask'
|
|
136
|
+
else if (content.includes('starlette')) stack.framework = 'starlette'
|
|
137
|
+
if (content.includes('pytest')) stack.testRunner = 'pytest'
|
|
138
|
+
if (content.includes('sqlalchemy')) stack.extra.push('sqlalchemy')
|
|
139
|
+
if (content.includes('alembic')) stack.extra.push('alembic')
|
|
140
|
+
return stack
|
|
141
|
+
}
|
|
142
|
+
// Go
|
|
143
|
+
if (existsSync(join(cwd, 'go.mod'))) {
|
|
144
|
+
stack.language = 'go'
|
|
145
|
+
const gomod = readFileSync(join(cwd, 'go.mod'), 'utf8')
|
|
146
|
+
if (gomod.includes('gin-gonic/gin')) stack.framework = 'gin'
|
|
147
|
+
else if (gomod.includes('go-chi/chi')) stack.framework = 'chi'
|
|
148
|
+
else if (gomod.includes('labstack/echo')) stack.framework = 'echo'
|
|
149
|
+
else if (gomod.includes('gofiber/fiber')) stack.framework = 'fiber'
|
|
150
|
+
stack.testRunner = 'go test'
|
|
151
|
+
return stack
|
|
152
|
+
}
|
|
153
|
+
// Ruby
|
|
154
|
+
if (existsSync(join(cwd, 'Gemfile'))) {
|
|
155
|
+
stack.language = 'ruby'
|
|
156
|
+
const gemfile = readFileSync(join(cwd, 'Gemfile'), 'utf8')
|
|
157
|
+
if (gemfile.includes('rails')) stack.framework = 'rails'
|
|
158
|
+
else if (gemfile.includes('sinatra')) stack.framework = 'sinatra'
|
|
159
|
+
if (gemfile.includes('rspec')) stack.testRunner = 'rspec'
|
|
160
|
+
return stack
|
|
161
|
+
}
|
|
162
|
+
// Rust
|
|
163
|
+
if (existsSync(join(cwd, 'Cargo.toml'))) {
|
|
164
|
+
stack.language = 'rust'
|
|
165
|
+
const cargo = readFileSync(join(cwd, 'Cargo.toml'), 'utf8')
|
|
166
|
+
if (cargo.includes('actix-web')) stack.framework = 'actix-web'
|
|
167
|
+
else if (cargo.includes('axum')) stack.framework = 'axum'
|
|
168
|
+
else if (cargo.includes('warp')) stack.framework = 'warp'
|
|
169
|
+
stack.testRunner = 'cargo test'
|
|
170
|
+
return stack
|
|
171
|
+
}
|
|
172
|
+
// Java / Kotlin
|
|
173
|
+
if (existsSync(join(cwd, 'pom.xml')) || existsSync(join(cwd, 'build.gradle'))) {
|
|
174
|
+
stack.language = existsSync(join(cwd, 'build.gradle.kts')) ? 'kotlin' : 'java'
|
|
175
|
+
stack.framework = 'spring-boot'
|
|
176
|
+
stack.testRunner = 'junit'
|
|
177
|
+
return stack
|
|
178
|
+
}
|
|
179
|
+
// PHP
|
|
180
|
+
if (existsSync(join(cwd, 'composer.json'))) {
|
|
181
|
+
stack.language = 'php'
|
|
182
|
+
const composer = JSON.parse(readFileSync(join(cwd, 'composer.json'), 'utf8'))
|
|
183
|
+
const req = { ...composer.require, ...composer['require-dev'] }
|
|
184
|
+
if (req['laravel/framework']) stack.framework = 'laravel'
|
|
185
|
+
else if (req['symfony/symfony'] || req['symfony/framework-bundle']) stack.framework = 'symfony'
|
|
186
|
+
return stack
|
|
187
|
+
}
|
|
188
|
+
} catch { /* non-fatal */ }
|
|
189
|
+
return stack
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Returns a shallow directory tree (depth 2) so the agent knows the folder layout.
|
|
194
|
+
* Ignores node_modules, .git, vendor, __pycache__, dist, build, .next
|
|
195
|
+
*/
|
|
196
|
+
function getDirTree(cwd, maxDepth = 2) {
|
|
197
|
+
const IGNORE = new Set(['node_modules', '.git', 'vendor', '__pycache__', 'dist', 'build', '.next', '.turbo', 'coverage', '.cache', 'target', 'venv', '.venv'])
|
|
198
|
+
function walk(dir, depth) {
|
|
199
|
+
if (depth > maxDepth) return []
|
|
200
|
+
let entries
|
|
201
|
+
try { entries = readdirSync(dir, { withFileTypes: true }) } catch { return [] }
|
|
202
|
+
return entries
|
|
203
|
+
.filter(e => !IGNORE.has(e.name) && !e.name.startsWith('.'))
|
|
204
|
+
.map(e => {
|
|
205
|
+
if (e.isDirectory()) {
|
|
206
|
+
const children = walk(join(dir, e.name), depth + 1)
|
|
207
|
+
return { name: e.name + '/', children: children.length ? children : undefined }
|
|
208
|
+
}
|
|
209
|
+
return { name: e.name }
|
|
210
|
+
})
|
|
211
|
+
}
|
|
212
|
+
return walk(cwd, 0)
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Based on detected stack, grep for the most likely entry-point files.
|
|
217
|
+
* Returns a list of { pattern, matches } — actual file paths found locally.
|
|
218
|
+
*/
|
|
219
|
+
function findEntryPoints(cwd, stack) {
|
|
220
|
+
const results = {}
|
|
221
|
+
const greps = {
|
|
222
|
+
javascript: { typescript: true,
|
|
223
|
+
routes: ['router\\.', 'app\\.get\\|app\\.post\\|app\\.put\\|fastify\\.', 'Route path='],
|
|
224
|
+
models: ['mongoose\\.Schema\\|new Schema\\|@Entity\\|@Table'],
|
|
225
|
+
components: ['export default function\\|export const.*=.*=>\\|React\\.FC'],
|
|
226
|
+
},
|
|
227
|
+
python: {
|
|
228
|
+
routes: ['@app\\.route\\|@router\\.\\|path(\\|urlpatterns\\|@api_view'],
|
|
229
|
+
models: ['class.*Model\\|db\\.Model\\|Base\\)\\|models\\.Model'],
|
|
230
|
+
schemas: ['class.*Schema\\|serializers\\.\\|Pydantic\\|BaseModel'],
|
|
231
|
+
},
|
|
232
|
+
go: {
|
|
233
|
+
routes: ['router\\.\\.\\|http\\.HandleFunc\\|r\\.GET\\|r\\.POST\\|e\\.GET'],
|
|
234
|
+
models: ['type.*struct\\|gorm\\.Model'],
|
|
235
|
+
},
|
|
236
|
+
ruby: {
|
|
237
|
+
routes: ['resources \\|get \'\\|post \'\\|Rails\\.application\\.routes'],
|
|
238
|
+
models: ['ApplicationRecord\\|ActiveRecord::Base\\|belongs_to\\|has_many'],
|
|
239
|
+
},
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
const lang = stack.language === 'typescript' ? 'javascript' : stack.language
|
|
243
|
+
const patterns = greps[lang] || {}
|
|
244
|
+
|
|
245
|
+
for (const [type, patternList] of Object.entries(patterns)) {
|
|
246
|
+
if (type === 'typescript') continue
|
|
247
|
+
try {
|
|
248
|
+
const pat = Array.isArray(patternList) ? patternList.join('\\|') : patternList
|
|
249
|
+
const out = execSync(
|
|
250
|
+
`grep -rl "${pat}" . --include="*.js" --include="*.ts" --include="*.jsx" --include="*.tsx" --include="*.py" --include="*.go" --include="*.rb" --include="*.java" --include="*.kt" --include="*.rs" --include="*.php" 2>/dev/null | grep -v node_modules | grep -v ".git" | head -8`,
|
|
251
|
+
{ cwd, encoding: 'utf8', timeout: 4000 }
|
|
252
|
+
)
|
|
253
|
+
const files = out.trim().split('\n').filter(Boolean)
|
|
254
|
+
if (files.length) results[type] = files
|
|
255
|
+
} catch { /* non-fatal */ }
|
|
256
|
+
}
|
|
257
|
+
return results
|
|
258
|
+
}
|
|
259
|
+
|
|
86
260
|
// ── Tool registration functions ───────────────────────────────────────────────
|
|
87
261
|
|
|
88
262
|
function registerAuthTools(server) {
|
|
@@ -227,21 +401,300 @@ function registerTaskTools(server, { isAdmin, scopedProjectId }) {
|
|
|
227
401
|
async ({ taskId }) => call(() => api.get(`/api/tasks/${taskId}`))
|
|
228
402
|
)
|
|
229
403
|
|
|
404
|
+
// ── search_tasks ──────────────────────────────────────────────────────────────
|
|
405
|
+
server.tool(
|
|
406
|
+
'search_tasks',
|
|
407
|
+
`Search and filter tasks within a project.
|
|
408
|
+
|
|
409
|
+
Use this to find tasks by keyword, type, status, or assignee. Much faster than listing all tasks.
|
|
410
|
+
|
|
411
|
+
Examples:
|
|
412
|
+
- Find all bugfix tasks: search_tasks(projectId, taskType="bugfix")
|
|
413
|
+
- Find tasks about "auth": search_tasks(projectId, query="auth")
|
|
414
|
+
- Find your in-progress tasks: search_tasks(projectId, column="in_progress", assignee=myUserId)
|
|
415
|
+
- Find high priority todo items: search_tasks(projectId, column="todo", priority="high,critical")
|
|
416
|
+
- Find tasks in review: search_tasks(projectId, column="in_review")
|
|
417
|
+
|
|
418
|
+
Returns tasks with key, title, column, assignees, priority, taskType, and branch info.`,
|
|
419
|
+
{
|
|
420
|
+
projectId: z.string().describe("Project's MongoDB ObjectId"),
|
|
421
|
+
query: z.string().optional().describe('Free-text search across title, description, README, and key (e.g. "auth", "TASK-042", "payment webhook")'),
|
|
422
|
+
column: z.string().optional().describe('Filter by column — comma-separated: "todo,in_progress" or single "done"'),
|
|
423
|
+
taskType: z.string().optional().describe('Filter by task type — comma-separated: "bugfix,feature" or single "migration"'),
|
|
424
|
+
priority: z.string().optional().describe('Filter by priority — comma-separated: "high,critical"'),
|
|
425
|
+
assignee: z.string().optional().describe('Filter by assignee user ID — returns only tasks assigned to this user'),
|
|
426
|
+
limit: z.number().optional().default(20).describe('Max results to return (default 20, max 100)'),
|
|
427
|
+
},
|
|
428
|
+
async ({ projectId, query, column, taskType, priority, assignee, limit = 20 }) => {
|
|
429
|
+
if (scopedProjectId && projectId !== scopedProjectId) {
|
|
430
|
+
return errorText(`Access denied: session is scoped to project ${scopedProjectId}`)
|
|
431
|
+
}
|
|
432
|
+
const params = new URLSearchParams()
|
|
433
|
+
if (query) params.set('q', query)
|
|
434
|
+
if (column) params.set('column', column)
|
|
435
|
+
if (taskType) params.set('taskType', taskType)
|
|
436
|
+
if (priority) params.set('priority', priority)
|
|
437
|
+
if (assignee) params.set('assignee', assignee)
|
|
438
|
+
if (limit) params.set('limit', String(limit))
|
|
439
|
+
|
|
440
|
+
const res = await api.get(`/api/projects/${projectId}/tasks/search?${params.toString()}`)
|
|
441
|
+
if (!res?.success) return errorText(res?.message || 'Search failed')
|
|
442
|
+
|
|
443
|
+
const tasks = res.data.tasks || []
|
|
444
|
+
if (!tasks.length) return text({ found: 0, message: 'No tasks match your search criteria.' })
|
|
445
|
+
|
|
446
|
+
return text({
|
|
447
|
+
found: tasks.length,
|
|
448
|
+
tasks: tasks.map(t => ({
|
|
449
|
+
taskId: t._id,
|
|
450
|
+
key: t.key,
|
|
451
|
+
title: t.title,
|
|
452
|
+
column: t.column,
|
|
453
|
+
priority: t.priority,
|
|
454
|
+
taskType: t.taskType || null,
|
|
455
|
+
assignees: (t.assignees || []).map(a => ({ id: a._id, name: a.name || a.email })),
|
|
456
|
+
branch: t.github?.headBranch || null,
|
|
457
|
+
prNumber: t.github?.prNumber || null,
|
|
458
|
+
hasPR: !!(t.github?.prNumber),
|
|
459
|
+
updatedAt: t.updatedAt,
|
|
460
|
+
})),
|
|
461
|
+
tip: tasks.length === limit ? `Results capped at ${limit}. Use more specific filters or increase limit to see more.` : null,
|
|
462
|
+
})
|
|
463
|
+
}
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
// ── plan_task_from_codebase ───────────────────────────────────────────────────
|
|
467
|
+
server.tool(
|
|
468
|
+
'plan_task_from_codebase',
|
|
469
|
+
`Specialized task-creation agent — analyzes the codebase and creates a fully structured, kickoff-ready task.
|
|
470
|
+
|
|
471
|
+
Use this instead of create_task when you need to implement something and want the task to
|
|
472
|
+
contain a real implementation plan based on the actual codebase, not a generic description.
|
|
473
|
+
|
|
474
|
+
## MANDATORY protocol — follow every step in order
|
|
475
|
+
|
|
476
|
+
### Step 1 — Duplicate check (always first)
|
|
477
|
+
Call search_tasks(projectId, query="<keywords from the request>").
|
|
478
|
+
If a similar open task already exists → return it with kickoff instructions instead of creating a duplicate.
|
|
479
|
+
|
|
480
|
+
### Step 2 — Codebase analysis (READ the code, do not guess)
|
|
481
|
+
Using your native Read / Grep / Glob tools:
|
|
482
|
+
|
|
483
|
+
a) **Stack detection** — read package.json / go.mod / requirements.txt / Cargo.toml.
|
|
484
|
+
Identify: language, framework, major libraries, test runner.
|
|
485
|
+
|
|
486
|
+
b) **Entry point mapping** — find where the relevant feature area lives:
|
|
487
|
+
- For backend: grep for existing route patterns (router.post, app.get, @Controller, etc.)
|
|
488
|
+
- For frontend: grep for existing component patterns, hooks, state management
|
|
489
|
+
- For DB: find schema/model files
|
|
490
|
+
|
|
491
|
+
c) **Pattern extraction** — read 2-3 existing files similar to what you'll build.
|
|
492
|
+
Note: naming conventions, folder structure, how services/routes/components are wired.
|
|
493
|
+
|
|
494
|
+
d) **Impact analysis** — identify every file that needs to change:
|
|
495
|
+
- Files to CREATE (new route, new component, new model, new test)
|
|
496
|
+
- Files to MODIFY (existing router index, existing schema, existing types)
|
|
497
|
+
|
|
498
|
+
e) **Dependency order** — which files must be built first (schema before service, service before route, etc.)
|
|
499
|
+
|
|
500
|
+
### Step 3 — Write the implementation plan
|
|
501
|
+
Using what you found in Step 2, build:
|
|
502
|
+
- ## Goal — one sentence
|
|
503
|
+
- ## Stack — language/framework detected
|
|
504
|
+
- ## Technical approach — how it fits into the existing code (name actual files and functions)
|
|
505
|
+
- ## Files to create — path + what it does
|
|
506
|
+
- ## Files to modify — path + what changes
|
|
507
|
+
- ## Subtasks — ordered implementation steps (schema → service → route → test → UI)
|
|
508
|
+
- ## Acceptance criteria — what done looks like
|
|
509
|
+
|
|
510
|
+
### Step 4 — Determine task metadata
|
|
511
|
+
- taskType: feature / bugfix / migration / integration / ui / backend / security / refactor
|
|
512
|
+
- priority: low / medium / high / critical (use "high" if the request sounds important)
|
|
513
|
+
- suggestedFiles: the exact file paths from your Step 2d impact analysis
|
|
514
|
+
|
|
515
|
+
### Step 5 — Create the task
|
|
516
|
+
Call create_task with:
|
|
517
|
+
- projectId (from this call)
|
|
518
|
+
- title: action-oriented, concise (verb + noun, e.g. "Add rate limiting to /api/auth/login")
|
|
519
|
+
- description: one paragraph summary
|
|
520
|
+
- readmeMarkdown: the full plan from Step 3
|
|
521
|
+
- taskType, priority, column="todo"
|
|
522
|
+
- subtasks: the ordered list from Step 3 (each step = one subtask)
|
|
523
|
+
- suggestedFiles: from Step 2d
|
|
524
|
+
|
|
525
|
+
After create_task succeeds, immediately call:
|
|
526
|
+
kickoff_task(taskId=<returned id>, confirmed=true, agentRole="builder", files=<suggestedFiles>)
|
|
527
|
+
|
|
528
|
+
## What makes a high-quality task
|
|
529
|
+
- readmeMarkdown references REAL file paths found by grepping the codebase
|
|
530
|
+
- subtasks are ordered (schema first, then service, then route, then test, then UI)
|
|
531
|
+
- suggestedFiles lists every file that will be touched — no omissions
|
|
532
|
+
- title is specific ("Add email verification to /api/auth/register") not generic ("Add email feature")
|
|
533
|
+
|
|
534
|
+
Do NOT skip the codebase analysis. Do NOT create the task before reading the code.
|
|
535
|
+
Do NOT ask the developer to describe the codebase — read it yourself.`,
|
|
536
|
+
{
|
|
537
|
+
projectId: z.string().describe("InternalTool project's MongoDB ObjectId — from the project's task board URL or CLAUDE.md"),
|
|
538
|
+
request: z.string().describe('What the developer wants to build — the raw natural language request (e.g. "add rate limiting to the login endpoint", "fix the pagination bug on users list")'),
|
|
539
|
+
priority: z.enum(['low', 'medium', 'high', 'critical']).optional().default('medium')
|
|
540
|
+
.describe('Task priority — default medium, use high/critical for urgent work'),
|
|
541
|
+
},
|
|
542
|
+
async ({ projectId, request, priority = 'medium' }) => {
|
|
543
|
+
if (scopedProjectId && projectId !== scopedProjectId) {
|
|
544
|
+
return errorText(`Access denied: session is scoped to project ${scopedProjectId}`)
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
// ── 1. Run codebase analysis using local filesystem access ────────────────
|
|
548
|
+
const cwd = process.cwd()
|
|
549
|
+
const stack = detectStack(cwd)
|
|
550
|
+
const dirTree = getDirTree(cwd, 2)
|
|
551
|
+
const entryPoints = findEntryPoints(cwd, stack)
|
|
552
|
+
|
|
553
|
+
// ── 2. Fetch project context ──────────────────────────────────────────────
|
|
554
|
+
let projectContext = null
|
|
555
|
+
try {
|
|
556
|
+
const projRes = await api.get(`/api/projects/${projectId}`)
|
|
557
|
+
if (projRes?.success) {
|
|
558
|
+
const p = projRes.data.project
|
|
559
|
+
projectContext = {
|
|
560
|
+
name: p.name,
|
|
561
|
+
taskCount: (projRes.data.tasks || []).length,
|
|
562
|
+
githubRepo: p.github?.repoUrl || null,
|
|
563
|
+
}
|
|
564
|
+
}
|
|
565
|
+
} catch { /* non-fatal */ }
|
|
566
|
+
|
|
567
|
+
// ── 3. Duplicate check ────────────────────────────────────────────────────
|
|
568
|
+
let similarTasks = null
|
|
569
|
+
try {
|
|
570
|
+
const keywords = request.split(' ').filter(w => w.length > 4).slice(0, 3).join(' ')
|
|
571
|
+
const searchRes = await api.get(
|
|
572
|
+
`/api/projects/${projectId}/tasks/search?q=${encodeURIComponent(keywords)}&limit=3`
|
|
573
|
+
)
|
|
574
|
+
if (searchRes?.success && searchRes.data?.tasks?.length > 0) {
|
|
575
|
+
similarTasks = searchRes.data.tasks.map(t => ({
|
|
576
|
+
taskId: t._id, key: t.key, title: t.title, column: t.column, taskType: t.taskType,
|
|
577
|
+
}))
|
|
578
|
+
}
|
|
579
|
+
} catch { /* non-fatal */ }
|
|
580
|
+
|
|
581
|
+
// ── 4. Return codebase intelligence + tight instructions ──────────────────
|
|
582
|
+
const hasEntryPoints = Object.keys(entryPoints).length > 0
|
|
583
|
+
const stackSummary = [
|
|
584
|
+
stack.language, stack.framework, stack.testRunner, ...(stack.extra || [])
|
|
585
|
+
].filter(Boolean).join(', ') || 'unknown (no manifest found at cwd)'
|
|
586
|
+
|
|
587
|
+
return text({
|
|
588
|
+
// Real codebase data — use these directly, do NOT re-read what's already here
|
|
589
|
+
codebaseIntelligence: {
|
|
590
|
+
cwd,
|
|
591
|
+
stack: { ...stack, summary: stackSummary },
|
|
592
|
+
dirTree,
|
|
593
|
+
entryPoints: hasEntryPoints
|
|
594
|
+
? entryPoints
|
|
595
|
+
: { note: 'No entry points found at cwd — check that MCP server is running from the project root' },
|
|
596
|
+
},
|
|
597
|
+
|
|
598
|
+
// Duplicate guard
|
|
599
|
+
...(similarTasks?.length > 0 && {
|
|
600
|
+
duplicateWarning: true,
|
|
601
|
+
similarTasks,
|
|
602
|
+
duplicateInstruction: 'Check the list above. If one matches → call kickoff_task on it. If none match → proceed.',
|
|
603
|
+
}),
|
|
604
|
+
|
|
605
|
+
// What to do now — use codebaseIntelligence above to skip re-reading the filesystem
|
|
606
|
+
nextSteps: [
|
|
607
|
+
'Use dirTree + entryPoints above to identify which files need changing — you already have the map.',
|
|
608
|
+
`Read 2-3 files from entryPoints.routes / entryPoints.models / entryPoints.components to extract naming conventions and wiring patterns.`,
|
|
609
|
+
'Draft readmeMarkdown with: ## Goal, ## Stack, ## Technical approach (name real files), ## Files to create, ## Files to modify, ## Subtasks (ordered), ## Acceptance criteria.',
|
|
610
|
+
`Call create_task(projectId="${projectId}", title="<verb + noun>", readmeMarkdown="<plan>", taskType="<feature|bugfix|...>", priority="${priority}", column="todo", subtasks=[...], suggestedFiles=[...])`,
|
|
611
|
+
'Immediately after: call kickoff_task(taskId=<returned id>, confirmed=true, agentRole="builder", files=[...suggestedFiles])',
|
|
612
|
+
],
|
|
613
|
+
|
|
614
|
+
// Context
|
|
615
|
+
request,
|
|
616
|
+
projectId,
|
|
617
|
+
priority,
|
|
618
|
+
projectContext,
|
|
619
|
+
})
|
|
620
|
+
}
|
|
621
|
+
)
|
|
622
|
+
|
|
230
623
|
server.tool(
|
|
231
624
|
'create_task',
|
|
232
|
-
|
|
625
|
+
`Create a new task in a project — use this as the final step after analysing the codebase.
|
|
626
|
+
|
|
627
|
+
## Optimal flow when a developer asks to implement a feature:
|
|
628
|
+
|
|
629
|
+
1. **Read the codebase first** (use your native Read/Grep/Glob tools — do NOT ask the developer to describe the code):
|
|
630
|
+
- Find the entry point: package.json / go.mod / requirements.txt → detect stack
|
|
631
|
+
- Find existing patterns relevant to the feature (e.g. grep for "router.post" to see how routes work)
|
|
632
|
+
- Identify which files will need to be created or modified
|
|
633
|
+
|
|
634
|
+
2. **Write the implementation plan** in readmeMarkdown:
|
|
635
|
+
- ## Goal — one sentence of what this builds
|
|
636
|
+
- ## Technical approach — how it fits into the existing code (name actual files and functions)
|
|
637
|
+
- ## Files to create — list each new file and what it does
|
|
638
|
+
- ## Files to modify — list each existing file and what changes
|
|
639
|
+
- ## Subtasks — ordered implementation steps (these become the task checklist)
|
|
640
|
+
|
|
641
|
+
3. **Call create_task** with the completed plan — the task is immediately kickoff-ready.
|
|
642
|
+
|
|
643
|
+
## What makes a good task
|
|
644
|
+
- readmeMarkdown has enough detail that an agent can implement without asking questions
|
|
645
|
+
- subtasks are ordered (schema first → service → route → test → frontend)
|
|
646
|
+
- taskType is set so kickoff_task routes correctly (feature/bugfix/migration/etc.)
|
|
647
|
+
- suggestedFiles tells the builder exactly which files to claim at kickoff
|
|
648
|
+
|
|
649
|
+
Always prefer column="todo" so the task is visibly ready to start.`,
|
|
233
650
|
{
|
|
234
651
|
projectId: z.string().describe("Project's MongoDB ObjectId"),
|
|
235
|
-
title: z.string().describe('Task title'),
|
|
236
|
-
description: z.string().optional().describe('
|
|
237
|
-
readmeMarkdown: z.string().optional().describe('
|
|
652
|
+
title: z.string().describe('Task title — concise, action-oriented (e.g. "Add email channel to sendNotification")'),
|
|
653
|
+
description: z.string().optional().describe('One-sentence summary of what this task does and why'),
|
|
654
|
+
readmeMarkdown: z.string().optional().describe('Full markdown implementation plan — see tool description for the recommended structure'),
|
|
238
655
|
priority: z.enum(['low', 'medium', 'high', 'critical']).optional(),
|
|
239
|
-
column: z.enum(['backlog', 'todo', 'in_progress', 'in_review', 'done']).optional(),
|
|
656
|
+
column: z.enum(['backlog', 'todo', 'in_progress', 'in_review', 'done']).optional().describe('Use "todo" for features ready to start'),
|
|
240
657
|
assignees: z.array(z.string()).optional().describe('User IDs to assign'),
|
|
658
|
+
taskType: z.enum(['migration', 'integration', 'bugfix', 'ui', 'backend', 'security', 'refactor', 'feature']).optional()
|
|
659
|
+
.describe('Task type — drives agent routing at kickoff (scout-first, coordinator vs single builder, etc.)'),
|
|
660
|
+
subtasks: z.array(z.object({ title: z.string() })).optional()
|
|
661
|
+
.describe('Ordered implementation checklist — shown in the task UI and read by agents at kickoff. Order matters: schema → model → service → route → test → frontend.'),
|
|
662
|
+
suggestedFiles: z.array(z.string()).optional()
|
|
663
|
+
.describe('Files the builder will claim at kickoff (e.g. ["server/routes/tasks.js", "server/models/Task.js"]). Included in the task README automatically so the builder knows what to pass to kickoff_task files=[...].'),
|
|
241
664
|
},
|
|
242
|
-
async ({ projectId, ...taskData }) => {
|
|
665
|
+
async ({ projectId, suggestedFiles, ...taskData }) => {
|
|
243
666
|
try { assertProjectScope(projectId) } catch (e) { return errorText(e.message) }
|
|
244
|
-
|
|
667
|
+
|
|
668
|
+
// Append suggested files section to the README so the builder sees them at kickoff
|
|
669
|
+
if (suggestedFiles?.length > 0 && taskData.readmeMarkdown) {
|
|
670
|
+
taskData.readmeMarkdown = [
|
|
671
|
+
taskData.readmeMarkdown.trimEnd(),
|
|
672
|
+
'',
|
|
673
|
+
'## Files to claim at kickoff',
|
|
674
|
+
'```',
|
|
675
|
+
...suggestedFiles.map(f => f),
|
|
676
|
+
'```',
|
|
677
|
+
`Call \`kickoff_task(confirmed=true, agentRole="builder", files=[${suggestedFiles.map(f => `"${f}"`).join(', ')}])\` to atomically claim ownership and start.`,
|
|
678
|
+
].join('\n')
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
const res = await api.post(`/api/projects/${projectId}/tasks`, taskData)
|
|
682
|
+
if (!res?.success) return errorText(res?.message || 'Failed to create task')
|
|
683
|
+
|
|
684
|
+
const task = res.data?.task
|
|
685
|
+
return text({
|
|
686
|
+
created: true,
|
|
687
|
+
taskId: task?._id,
|
|
688
|
+
taskKey: task?.key,
|
|
689
|
+
title: task?.title,
|
|
690
|
+
column: task?.column,
|
|
691
|
+
taskType: task?.taskType || null,
|
|
692
|
+
subtasks: (task?.subtasks || []).length,
|
|
693
|
+
kickoff: task?._id
|
|
694
|
+
? `kickoff_task(taskId="${task._id}", confirmed=false) ← read plan first\nkickoff_task(taskId="${task._id}", confirmed=true, agentRole="builder", files=[...]) ← start building`
|
|
695
|
+
: null,
|
|
696
|
+
message: `Task ${task?.key} created and ready to kick off.`,
|
|
697
|
+
})
|
|
245
698
|
}
|
|
246
699
|
)
|
|
247
700
|
|
|
@@ -402,6 +855,9 @@ Set confirmed=false first to preview, then confirmed=true to execute everything.
|
|
|
402
855
|
agentWorkspace: { clearedAt: new Date().toISOString() }
|
|
403
856
|
}).catch(() => {/* non-fatal */})
|
|
404
857
|
|
|
858
|
+
// Remove .internaltool-active-task so the Claude Code hook blocks edits until next kickoff
|
|
859
|
+
try { unlinkSync(join(process.cwd(), '.internaltool-active-task')) } catch { /* non-fatal */ }
|
|
860
|
+
|
|
405
861
|
// ── #9 Capture last commit for handoff metadata ───────────────────────
|
|
406
862
|
const lastCommit = getLastCommitMeta(repoRoot)
|
|
407
863
|
|
|
@@ -602,6 +1058,65 @@ Set confirmed=false first to read everything, then confirmed=true to execute.`,
|
|
|
602
1058
|
}
|
|
603
1059
|
)
|
|
604
1060
|
|
|
1061
|
+
// ── add_blocker ───────────────────────────────────────────────────────────────
|
|
1062
|
+
server.tool(
|
|
1063
|
+
'add_blocker',
|
|
1064
|
+
`Mark a task as blocked by another task that must complete first.
|
|
1065
|
+
|
|
1066
|
+
Use this when you discover that TASK-A cannot start or cannot be merged until TASK-B is done.
|
|
1067
|
+
Examples: auth middleware must ship before the protected routes task starts;
|
|
1068
|
+
DB schema migration must merge before the service layer task.
|
|
1069
|
+
|
|
1070
|
+
The blocked task will show a warning in kickoff_task and be deprioritized in what_should_i_work_on.`,
|
|
1071
|
+
{
|
|
1072
|
+
taskId: z.string().describe("Task that is blocked — the one that cannot start yet"),
|
|
1073
|
+
blockerTaskId: z.string().describe("Task that must complete first — the blocker"),
|
|
1074
|
+
reason: z.string().optional().describe('Why this dependency exists — shown to developers'),
|
|
1075
|
+
},
|
|
1076
|
+
async ({ taskId, blockerTaskId, reason }) => {
|
|
1077
|
+
const res = await api.post(`/api/tasks/${taskId}/blockers`, { blockerTaskId })
|
|
1078
|
+
if (!res?.success) return errorText(res?.message || 'Could not add blocker')
|
|
1079
|
+
// Optionally add a comment explaining the dependency
|
|
1080
|
+
if (reason) {
|
|
1081
|
+
await api.post(`/api/tasks/${taskId}/comments`, {
|
|
1082
|
+
body: `🔒 **Blocked by task \`${blockerTaskId}\`**: ${reason}`,
|
|
1083
|
+
}).catch(() => {})
|
|
1084
|
+
}
|
|
1085
|
+
return text({
|
|
1086
|
+
blocked: true,
|
|
1087
|
+
taskId,
|
|
1088
|
+
blockerTaskId,
|
|
1089
|
+
reason: reason || null,
|
|
1090
|
+
message: `Task is now blocked. It will not be recommended until the blocker is resolved.`,
|
|
1091
|
+
nextStep: `When the blocker task is done, call remove_blocker with taskId="${taskId}" and blockerTaskId="${blockerTaskId}".`,
|
|
1092
|
+
})
|
|
1093
|
+
}
|
|
1094
|
+
)
|
|
1095
|
+
|
|
1096
|
+
// ── remove_blocker ────────────────────────────────────────────────────────────
|
|
1097
|
+
server.tool(
|
|
1098
|
+
'remove_blocker',
|
|
1099
|
+
`Remove a blocker from a task, marking it as unblocked and ready to start.
|
|
1100
|
+
|
|
1101
|
+
Call this when the blocking task has been merged or completed.
|
|
1102
|
+
After removing the blocker, the task will appear in what_should_i_work_on recommendations again.`,
|
|
1103
|
+
{
|
|
1104
|
+
taskId: z.string().describe("Task that was blocked"),
|
|
1105
|
+
blockerTaskId: z.string().describe("The blocker task that is now resolved"),
|
|
1106
|
+
},
|
|
1107
|
+
async ({ taskId, blockerTaskId }) => {
|
|
1108
|
+
const res = await api.delete(`/api/tasks/${taskId}/blockers/${blockerTaskId}`)
|
|
1109
|
+
if (!res?.success) return errorText(res?.message || 'Could not remove blocker')
|
|
1110
|
+
return text({
|
|
1111
|
+
unblocked: true,
|
|
1112
|
+
taskId,
|
|
1113
|
+
blockerTaskId,
|
|
1114
|
+
message: `Blocker removed. Task is now unblocked.`,
|
|
1115
|
+
nextStep: `Call kickoff_task with taskId="${taskId}" to start this task.`,
|
|
1116
|
+
})
|
|
1117
|
+
}
|
|
1118
|
+
)
|
|
1119
|
+
|
|
605
1120
|
// ── claim_files ──────────────────────────────────────────────────────────────
|
|
606
1121
|
server.tool(
|
|
607
1122
|
'claim_files',
|
|
@@ -828,32 +1343,51 @@ Call confirmed=false to preview the decomposition, confirmed=true to save it.`,
|
|
|
828
1343
|
})
|
|
829
1344
|
}
|
|
830
1345
|
|
|
831
|
-
// Save decomposition to task
|
|
1346
|
+
// Save decomposition JSON to parent task
|
|
832
1347
|
const decompositionJson = JSON.stringify(executionPlan, null, 2)
|
|
833
1348
|
try {
|
|
834
1349
|
await api.patch(`/api/tasks/${taskId}`, { decomposition: decompositionJson })
|
|
835
|
-
} catch { /* non-fatal — decomposition is returned regardless */ }
|
|
836
|
-
|
|
837
|
-
// Create subtasks on the board
|
|
838
|
-
const currentSubtasks = task.subtasks || []
|
|
839
|
-
const newSubtasks = [
|
|
840
|
-
...currentSubtasks,
|
|
841
|
-
...subtaskPlan.map((s, i) => ({
|
|
842
|
-
title: `[${s.role.toUpperCase()}] ${s.title}`,
|
|
843
|
-
done: false,
|
|
844
|
-
order: currentSubtasks.length + i,
|
|
845
|
-
})),
|
|
846
|
-
]
|
|
847
|
-
try {
|
|
848
|
-
await api.patch(`/api/tasks/${taskId}`, { subtasks: newSubtasks })
|
|
849
1350
|
} catch { /* non-fatal */ }
|
|
850
1351
|
|
|
1352
|
+
// Create real child tasks for each subtask in the plan
|
|
1353
|
+
const projectId = task.project?._id || task.project
|
|
1354
|
+
const createdTasks = []
|
|
1355
|
+
for (const s of subtaskPlan) {
|
|
1356
|
+
try {
|
|
1357
|
+
const childRes = await api.post(`/api/projects/${projectId}/tasks`, {
|
|
1358
|
+
title: `[${s.role.toUpperCase()}] ${s.title}`,
|
|
1359
|
+
description: s.description,
|
|
1360
|
+
readmeMarkdown: [
|
|
1361
|
+
`## Role: ${s.role}`,
|
|
1362
|
+
`## Description\n${s.description}`,
|
|
1363
|
+
s.files?.length ? `## Files to claim at kickoff\n${s.files.map(f => `- \`${f}\``).join('\n')}` : '',
|
|
1364
|
+
s.dependsOn?.length ? `## Depends on\n${s.dependsOn.map(d => `- ${d}`).join('\n')}` : '',
|
|
1365
|
+
].filter(Boolean).join('\n\n'),
|
|
1366
|
+
column: 'todo',
|
|
1367
|
+
priority: task.priority || 'medium',
|
|
1368
|
+
taskType: s.role === 'reviewer' ? 'feature' : (task.taskType || 'feature'),
|
|
1369
|
+
parentTask: taskId,
|
|
1370
|
+
suggestedFiles: s.files || [],
|
|
1371
|
+
})
|
|
1372
|
+
if (childRes?.success) {
|
|
1373
|
+
createdTasks.push({
|
|
1374
|
+
taskId: childRes.data?.task?._id,
|
|
1375
|
+
taskKey: childRes.data?.task?.key,
|
|
1376
|
+
title: childRes.data?.task?.title,
|
|
1377
|
+
role: s.role,
|
|
1378
|
+
files: s.files,
|
|
1379
|
+
})
|
|
1380
|
+
}
|
|
1381
|
+
} catch { /* non-fatal — continue creating remaining tasks */ }
|
|
1382
|
+
}
|
|
1383
|
+
|
|
851
1384
|
return text({
|
|
852
1385
|
decomposed: true,
|
|
853
1386
|
taskKey: task.key,
|
|
854
1387
|
executionPlan,
|
|
855
|
-
|
|
856
|
-
|
|
1388
|
+
childTasksCreated: createdTasks.length,
|
|
1389
|
+
childTasks: createdTasks,
|
|
1390
|
+
message: `Decomposition saved. ${createdTasks.length} child task(s) created on the board (${subtaskPlan.length - createdTasks.length} failed).`,
|
|
857
1391
|
nextStep: parallelGroups.length > 0
|
|
858
1392
|
? `⚡ COORDINATOR: Call get_parallel_kickoffs with taskId="${taskId}" NOW. It writes Cursor Background Agent files for each parallel builder automatically. Then tell the user to open Background Agents panel (⌘⇧J) and click Start. DO NOT implement code yourself.`
|
|
859
1393
|
: `Call get_parallel_kickoffs with taskId="${taskId}". It writes a Cursor Background Agent file for the builder. Tell the user to open Background Agents panel (⌘⇧J) and start it.`,
|
|
@@ -982,10 +1516,57 @@ Returns:
|
|
|
982
1516
|
message: 'GitHub is still computing mergeability. Wait a few seconds and call check_merge_conflicts again.',
|
|
983
1517
|
})
|
|
984
1518
|
|
|
985
|
-
if (!d.hasConflict && d.behindBy === 0)
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
1519
|
+
if (!d.hasConflict && d.behindBy === 0) {
|
|
1520
|
+
// Branch is clean — remove conflict-resolver.mdc if it was written during a previous kickoff.
|
|
1521
|
+
// Leaving it on disk keeps injecting "BRANCH CONFLICT ACTIVE" into every Cursor prompt forever.
|
|
1522
|
+
let cleanedRuleFile = false
|
|
1523
|
+
try {
|
|
1524
|
+
const root = findRepoRoot(process.cwd())
|
|
1525
|
+
if (root) {
|
|
1526
|
+
const ruleFile = join(root, '.cursor', 'rules', 'conflict-resolver.mdc')
|
|
1527
|
+
if (existsSync(ruleFile)) {
|
|
1528
|
+
unlinkSync(ruleFile)
|
|
1529
|
+
cleanedRuleFile = true
|
|
1530
|
+
}
|
|
1531
|
+
}
|
|
1532
|
+
} catch { /* non-fatal — never block on cleanup */ }
|
|
1533
|
+
return text({
|
|
1534
|
+
status: 'clean',
|
|
1535
|
+
message: `PR is clean — no conflicts, not behind ${d.base}. Safe to merge.`,
|
|
1536
|
+
...(cleanedRuleFile ? { cleaned: 'conflict-resolver.mdc removed from .cursor/rules/ — Cursor will no longer inject conflict warnings.' } : {}),
|
|
1537
|
+
})
|
|
1538
|
+
}
|
|
1539
|
+
|
|
1540
|
+
// Diff-based fallback: when conflicting files exist but no task owns them via claimedFiles,
|
|
1541
|
+
// scan the git tree to find branches that touch the same files without having claimed them.
|
|
1542
|
+
// This catches real overlaps when developers skip claim_files or edit files directly.
|
|
1543
|
+
let uncoveredConflicts = []
|
|
1544
|
+
if (d.hasConflict && d.conflictingFiles?.length > 0) {
|
|
1545
|
+
try {
|
|
1546
|
+
const taskRes = await api.get(`/api/tasks/${taskId}`)
|
|
1547
|
+
const projectId = taskRes?.data?.task?.project
|
|
1548
|
+
if (projectId) {
|
|
1549
|
+
const treeRes = await api.get(`/api/projects/${projectId}/github/git-tree`).catch(() => null)
|
|
1550
|
+
if (treeRes?.success) {
|
|
1551
|
+
const coveredFiles = new Set((d.conflictingTasks || []).flatMap(t => t.claimedFiles || []))
|
|
1552
|
+
const branches = treeRes.data.branches || []
|
|
1553
|
+
for (const file of d.conflictingFiles) {
|
|
1554
|
+
if (coveredFiles.has(file)) continue // already identified via claimedFiles
|
|
1555
|
+
const matches = branches
|
|
1556
|
+
.filter(b => String(b.taskId) !== taskId && (b.claimedFiles || []).includes(file))
|
|
1557
|
+
.map(b => ({ taskKey: b.taskKey, branch: b.headBranch, assignees: (b.assignees || []).map(a => a.name).join(', ') }))
|
|
1558
|
+
uncoveredConflicts.push({
|
|
1559
|
+
file,
|
|
1560
|
+
likelyCausedBy: matches.length > 0 ? matches : null,
|
|
1561
|
+
hint: matches.length === 0
|
|
1562
|
+
? `No task claims this file. Check: git log origin/${d.base} -- ${file} --oneline -5`
|
|
1563
|
+
: null,
|
|
1564
|
+
})
|
|
1565
|
+
}
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
} catch { /* non-fatal */ }
|
|
1569
|
+
}
|
|
989
1570
|
|
|
990
1571
|
return text({
|
|
991
1572
|
status: d.hasConflict ? 'conflict' : 'behind',
|
|
@@ -995,10 +1576,11 @@ Returns:
|
|
|
995
1576
|
headBranch: d.headBranch,
|
|
996
1577
|
conflictingFiles: d.conflictingFiles,
|
|
997
1578
|
conflictingTasks: d.conflictingTasks,
|
|
1579
|
+
...(uncoveredConflicts.length > 0 ? { uncoveredConflicts } : {}),
|
|
998
1580
|
coordinationNote: d.coordinationNote,
|
|
999
1581
|
resolution: {
|
|
1000
1582
|
steps: d.resolution,
|
|
1001
|
-
note: d.hasConflict && d.conflictingTasks?.length > 0
|
|
1583
|
+
note: d.hasConflict && (d.conflictingTasks?.length > 0 || uncoveredConflicts.length > 0)
|
|
1002
1584
|
? `⚠️ COORDINATE FIRST: Contact the assignees of the conflicting tasks before resolving. Resolving without coordination may overwrite their work.`
|
|
1003
1585
|
: `Rebase your branch on ${d.base} to resolve.`,
|
|
1004
1586
|
},
|
|
@@ -1058,6 +1640,27 @@ the Edit tool to resolve the conflict markers, then continue the rebase with Bas
|
|
|
1058
1640
|
} catch { /* non-fatal */ }
|
|
1059
1641
|
}
|
|
1060
1642
|
|
|
1643
|
+
// Fallback: no task owns this file via claimedFiles — conflict came from a direct commit to the
|
|
1644
|
+
// base branch. Fetch the 5 most recent commits touching this file so the agent can read
|
|
1645
|
+
// what changed and why, instead of getting a generic "check git log" message.
|
|
1646
|
+
let recentMainCommits = null
|
|
1647
|
+
if (!otherTask && task.project) {
|
|
1648
|
+
try {
|
|
1649
|
+
const base = conflictData.base || 'main'
|
|
1650
|
+
const commitsRes = await api.get(
|
|
1651
|
+
`/api/projects/${task.project}/github/commits?sha=${base}&per_page=5&path=${encodeURIComponent(filePath)}`
|
|
1652
|
+
)
|
|
1653
|
+
if (commitsRes?.success && commitsRes.data?.commits?.length > 0) {
|
|
1654
|
+
recentMainCommits = commitsRes.data.commits.slice(0, 5).map(c => ({
|
|
1655
|
+
sha: c.sha?.slice(0, 7),
|
|
1656
|
+
message: c.commit?.message?.split('\n')[0],
|
|
1657
|
+
author: c.commit?.author?.name,
|
|
1658
|
+
date: c.commit?.author?.date,
|
|
1659
|
+
}))
|
|
1660
|
+
}
|
|
1661
|
+
} catch { /* non-fatal */ }
|
|
1662
|
+
}
|
|
1663
|
+
|
|
1061
1664
|
const base = conflictData.base || 'main'
|
|
1062
1665
|
|
|
1063
1666
|
return text({
|
|
@@ -1069,7 +1672,11 @@ the Edit tool to resolve the conflict markers, then continue the rebase with Bas
|
|
|
1069
1672
|
scoutReport: task.scoutReport || null,
|
|
1070
1673
|
},
|
|
1071
1674
|
otherTask: otherTask || {
|
|
1072
|
-
note: '
|
|
1675
|
+
note: 'No task claims this file — conflict came from a direct commit to ' + base + '.',
|
|
1676
|
+
...(recentMainCommits
|
|
1677
|
+
? { recentCommits: recentMainCommits, hint: 'Read recentCommits above to understand what changed in ' + base + ' before deciding what to keep.' }
|
|
1678
|
+
: { hint: 'Run: git log origin/' + base + ' -- ' + filePath + ' --oneline -5' }
|
|
1679
|
+
),
|
|
1073
1680
|
},
|
|
1074
1681
|
conflictMarkers: {
|
|
1075
1682
|
ours: '<<<<<<< HEAD ← YOUR changes (this task)',
|
|
@@ -1231,7 +1838,7 @@ Returns systemPrompt ready to use as a Claude system prompt.`,
|
|
|
1231
1838
|
const ctx = res.data
|
|
1232
1839
|
|
|
1233
1840
|
const effectiveRole = ctx.role
|
|
1234
|
-
const roleRules = effectiveRole
|
|
1841
|
+
const roleRules = getRoleRules(effectiveRole, ctx.claimedFiles || [])
|
|
1235
1842
|
|
|
1236
1843
|
// Compose the full system prompt
|
|
1237
1844
|
const parts = []
|
|
@@ -2192,6 +2799,24 @@ Call this when the developer says "generate standup", "what did I do yesterday",
|
|
|
2192
2799
|
const yesterday = new Date(Date.now() - 24 * 60 * 60 * 1000)
|
|
2193
2800
|
const PRIORITY_ORDER = { critical: 0, high: 1, medium: 2, low: 3 }
|
|
2194
2801
|
|
|
2802
|
+
// 0. Real git commits since midnight (local repo)
|
|
2803
|
+
let gitCommitsToday = []
|
|
2804
|
+
try {
|
|
2805
|
+
const midnight = new Date()
|
|
2806
|
+
midnight.setHours(0, 0, 0, 0)
|
|
2807
|
+
const out = execSync(
|
|
2808
|
+
`git log --since="${midnight.toISOString()}" --format="%h|||%s|||%an|||%D" --all 2>/dev/null`,
|
|
2809
|
+
{ cwd: process.cwd(), encoding: 'utf8', timeout: 3000 }
|
|
2810
|
+
)
|
|
2811
|
+
gitCommitsToday = out.trim().split('\n').filter(Boolean).map(line => {
|
|
2812
|
+
const [sha, msg, author, refs] = line.split('|||')
|
|
2813
|
+
return { sha: sha?.slice(0, 7), message: msg?.trim(), author: author?.trim(), refs: refs?.trim() || null }
|
|
2814
|
+
})
|
|
2815
|
+
} catch { /* non-fatal — local git not available */ }
|
|
2816
|
+
|
|
2817
|
+
// 0b. PRs opened or merged today (across my tasks)
|
|
2818
|
+
// We'll collect these from task activity below
|
|
2819
|
+
|
|
2195
2820
|
// 1. Who am I?
|
|
2196
2821
|
const meRes = await api.get('/api/auth/me')
|
|
2197
2822
|
const me = meRes?.data?.user || {}
|
|
@@ -2306,10 +2931,14 @@ Call this when the developer says "generate standup", "what did I do yesterday",
|
|
|
2306
2931
|
const todayFocus = Object.entries(byAssignee)
|
|
2307
2932
|
.map(([person, tasks]) => ` ${person}: ${tasks.map(t => `${t.key} (${t.priority})`).join(', ')}`)
|
|
2308
2933
|
.join('\n')
|
|
2934
|
+
const gitCommitsSummary = gitCommitsToday.length > 0
|
|
2935
|
+
? gitCommitsToday.slice(0, 5).map(c => ` \`${c.sha}\` ${c.message}`).join('\n')
|
|
2936
|
+
: null
|
|
2309
2937
|
const copyPaste = [
|
|
2310
2938
|
`**Yesterday:** ${yesterdayItems.length
|
|
2311
2939
|
? yesterdayItems.map(i => `${i.key} — ${i.activity.slice(0,2).join('; ')}`).join('. ')
|
|
2312
2940
|
: 'No recorded activity in last 24 h.'}`,
|
|
2941
|
+
gitCommitsSummary ? `**Commits today:**\n${gitCommitsSummary}` : null,
|
|
2313
2942
|
`**Today:**\n${todayFocus || ' No active tasks.'}`,
|
|
2314
2943
|
blockers.length ? `**Blockers:** ${blockers.join(' | ')}` : '**Blockers:** None.',
|
|
2315
2944
|
risks.length ? `**Risks:** ${risks.join(' | ')}` : null,
|
|
@@ -2317,6 +2946,7 @@ Call this when the developer says "generate standup", "what did I do yesterday",
|
|
|
2317
2946
|
|
|
2318
2947
|
return text({
|
|
2319
2948
|
me: { name: me.name, email: me.email, role: me.role },
|
|
2949
|
+
gitCommitsToday: gitCommitsToday.length ? gitCommitsToday : '(no local git commits found since midnight)',
|
|
2320
2950
|
yesterday: yesterdayItems.length ? yesterdayItems : '(no activity logged in last 24 h)',
|
|
2321
2951
|
today: {
|
|
2322
2952
|
byAssignee: Object.fromEntries(
|
|
@@ -2349,80 +2979,150 @@ Call this when the developer says "generate standup", "what did I do yesterday",
|
|
|
2349
2979
|
'what_should_i_work_on',
|
|
2350
2980
|
`Recommend the single most important task to work on right now.
|
|
2351
2981
|
|
|
2352
|
-
Scores all assigned tasks by: priority, urgency (PR needs fix), column state, branch readiness
|
|
2353
|
-
|
|
2982
|
+
Scores all assigned tasks by: priority, urgency (PR needs fix), column state, branch readiness,
|
|
2983
|
+
files you recently edited (git log), and PRs waiting for your review.
|
|
2984
|
+
|
|
2985
|
+
Also surfaces:
|
|
2986
|
+
- Pending approval reviews assigned to you
|
|
2987
|
+
- Tasks you can unblock (blockers that are now done)
|
|
2354
2988
|
|
|
2355
2989
|
Use this when the developer says "what should I do next", "I'm free", or "what's my priority".`,
|
|
2356
2990
|
{},
|
|
2357
2991
|
async () => {
|
|
2358
|
-
|
|
2359
|
-
|
|
2360
|
-
|
|
2992
|
+
// Fetch my tasks + identity in parallel
|
|
2993
|
+
const [tasksRes, meRes] = await Promise.all([
|
|
2994
|
+
api.get('/api/users/me/tasks'),
|
|
2995
|
+
api.get('/api/auth/me'),
|
|
2996
|
+
])
|
|
2997
|
+
if (!tasksRes?.success) return errorText('Could not fetch tasks')
|
|
2998
|
+
const tasks = (tasksRes.data.tasks || []).filter(t => t.column !== 'done')
|
|
2999
|
+
const meId = meRes?.data?.user?._id || ''
|
|
2361
3000
|
|
|
2362
|
-
|
|
3001
|
+
// Recently edited files from local git (last 2 days) — boosts tasks claiming those files
|
|
3002
|
+
let recentFiles = new Set()
|
|
3003
|
+
try {
|
|
3004
|
+
const out = execSync(
|
|
3005
|
+
'git log --since=2.days.ago --name-only --format="" --diff-filter=ACM 2>/dev/null',
|
|
3006
|
+
{ cwd: process.cwd(), encoding: 'utf8', timeout: 3000 }
|
|
3007
|
+
)
|
|
3008
|
+
out.split('\n').map(s => s.trim()).filter(Boolean).forEach(f => recentFiles.add(f))
|
|
3009
|
+
} catch { /* non-fatal — local git may not be available */ }
|
|
3010
|
+
|
|
3011
|
+
// Pending approval reviews assigned to me (across all projects)
|
|
3012
|
+
let pendingReviews = []
|
|
3013
|
+
try {
|
|
3014
|
+
const projectsRes = await api.get('/api/projects')
|
|
3015
|
+
const projects = projectsRes?.data?.projects || []
|
|
3016
|
+
const boards = await Promise.all(
|
|
3017
|
+
projects.map(p => api.get(`/api/projects/${p._id}`).catch(() => null))
|
|
3018
|
+
)
|
|
3019
|
+
for (const r of boards.filter(Boolean)) {
|
|
3020
|
+
for (const t of (r?.data?.project?.tasks || [])) {
|
|
3021
|
+
const apv = (t.approvals || []).find(a => a.state === 'pending')
|
|
3022
|
+
if (!apv) continue
|
|
3023
|
+
const rid = apv?.reviewer?._id || apv?.reviewer
|
|
3024
|
+
if (!meId || !rid || String(rid) !== String(meId)) continue
|
|
3025
|
+
const waitMs = apv.requestedAt ? Date.now() - new Date(apv.requestedAt).getTime() : 0
|
|
3026
|
+
pendingReviews.push({
|
|
3027
|
+
taskId: t._id,
|
|
3028
|
+
key: t.key,
|
|
3029
|
+
title: t.title,
|
|
3030
|
+
waitingHours: Math.round(waitMs / 3600000 * 10) / 10,
|
|
3031
|
+
submittedBy: apv?.requestedBy?.name || apv?.requestedBy?.email || 'unknown',
|
|
3032
|
+
})
|
|
3033
|
+
}
|
|
3034
|
+
}
|
|
3035
|
+
pendingReviews.sort((a, b) => b.waitingHours - a.waitingHours)
|
|
3036
|
+
} catch { /* non-fatal */ }
|
|
3037
|
+
|
|
3038
|
+
if (!tasks.length && !pendingReviews.length) {
|
|
3039
|
+
return text({ recommendation: 'No open tasks or pending reviews. Ask your project lead for work.' })
|
|
3040
|
+
}
|
|
2363
3041
|
|
|
2364
3042
|
function scoreTask(t) {
|
|
2365
3043
|
const PRIORITY = { critical: 100, high: 60, medium: 30, low: 10 }
|
|
2366
3044
|
let score = PRIORITY[t.priority] ?? 0
|
|
2367
3045
|
if (t.github?.changesRequestedAt) score += 80 // PR needs fix — most urgent
|
|
2368
|
-
if (t.column === 'in_progress') score += 25 // already in flight
|
|
2369
|
-
if (t.column === 'in_review') score += 10 // waiting on reviewer
|
|
3046
|
+
if (t.column === 'in_progress') score += 25 // already in flight
|
|
3047
|
+
if (t.column === 'in_review') score += 10 // waiting on reviewer
|
|
2370
3048
|
if (t.column === 'todo') score += 15 // ready to start
|
|
2371
3049
|
if (t.column === 'backlog') score -= 10 // not yet planned
|
|
2372
3050
|
if (t.parkNote?.parkedAt) score -= 20 // intentionally paused
|
|
2373
|
-
if (t.
|
|
3051
|
+
if (t.blockedBy?.some(b => typeof b === 'object' ? b.column !== 'done' : false)) score -= 50
|
|
3052
|
+
if (t.github?.headBranch) score += 5
|
|
3053
|
+
// Boost tasks overlapping files you've been editing recently
|
|
3054
|
+
if (recentFiles.size > 0 && (t.claimedFiles || []).some(f => recentFiles.has(f))) score += 20
|
|
2374
3055
|
return score
|
|
2375
3056
|
}
|
|
2376
3057
|
|
|
2377
|
-
const scored = tasks
|
|
2378
|
-
.map(t => ({ ...t, _score: scoreTask(t) }))
|
|
2379
|
-
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2389
|
-
|
|
2390
|
-
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
|
|
2394
|
-
|
|
2395
|
-
|
|
2396
|
-
|
|
2397
|
-
|
|
2398
|
-
|
|
2399
|
-
|
|
2400
|
-
|
|
2401
|
-
|
|
2402
|
-
|
|
2403
|
-
|
|
2404
|
-
|
|
2405
|
-
|
|
2406
|
-
|
|
2407
|
-
|
|
2408
|
-
|
|
2409
|
-
|
|
2410
|
-
|
|
2411
|
-
|
|
2412
|
-
|
|
2413
|
-
|
|
2414
|
-
|
|
2415
|
-
|
|
2416
|
-
|
|
2417
|
-
|
|
2418
|
-
|
|
2419
|
-
|
|
2420
|
-
|
|
2421
|
-
|
|
3058
|
+
const scored = tasks.length
|
|
3059
|
+
? tasks.map(t => ({ ...t, _score: scoreTask(t) })).sort((a, b) => b._score - a._score)
|
|
3060
|
+
: []
|
|
3061
|
+
|
|
3062
|
+
// If pending reviews are waiting and outrank the top task, surface review first
|
|
3063
|
+
const longestWaitingReview = pendingReviews[0]
|
|
3064
|
+
const topTask = scored[0]
|
|
3065
|
+
|
|
3066
|
+
let recommendation, isReview = false
|
|
3067
|
+
if (longestWaitingReview && (!topTask || longestWaitingReview.waitingHours > 4)) {
|
|
3068
|
+
isReview = true
|
|
3069
|
+
recommendation = {
|
|
3070
|
+
type: 'review',
|
|
3071
|
+
taskId: longestWaitingReview.taskId,
|
|
3072
|
+
key: longestWaitingReview.key,
|
|
3073
|
+
title: longestWaitingReview.title,
|
|
3074
|
+
reason: `Approval review waiting ${longestWaitingReview.waitingHours}h — ${longestWaitingReview.submittedBy} is blocked on you.`,
|
|
3075
|
+
nextStep: `Call get_review_bundle with taskId="${longestWaitingReview.taskId}" to start the review.`,
|
|
3076
|
+
}
|
|
3077
|
+
} else if (topTask) {
|
|
3078
|
+
let reason, nextStep
|
|
3079
|
+
if (topTask.github?.changesRequestedAt) {
|
|
3080
|
+
reason = `PR has changes requested — this blocks the merge and the reviewer is waiting.`
|
|
3081
|
+
nextStep = `Call fix_pr_feedback with taskAId="${topTask._id}" to get the full fix plan.`
|
|
3082
|
+
} else if (topTask.column === 'in_progress' && topTask.github?.headBranch) {
|
|
3083
|
+
const fileHint = recentFiles.size > 0 && (topTask.claimedFiles || []).some(f => recentFiles.has(f))
|
|
3084
|
+
? ' (matches your recent git activity)'
|
|
3085
|
+
: ''
|
|
3086
|
+
reason = `Already in progress on branch ${topTask.github.headBranch} — keep the momentum going.${fileHint}`
|
|
3087
|
+
nextStep = `Continue coding on ${topTask.github.headBranch}. When commits are pushed, call raise_pr.`
|
|
3088
|
+
} else if (topTask.column === 'in_progress' && !topTask.github?.headBranch) {
|
|
3089
|
+
reason = `In progress but no branch yet — needs a branch to start committing.`
|
|
3090
|
+
nextStep = `Call create_branch with taskId="${topTask._id}" and projectId="${topTask.project?._id || topTask.project}".`
|
|
3091
|
+
} else if (topTask.column === 'todo') {
|
|
3092
|
+
reason = `Highest priority unstarted task — ready to kick off.`
|
|
3093
|
+
nextStep = `Call kickoff_task with taskId="${topTask._id}" to read the plan and move to in_progress.`
|
|
3094
|
+
} else {
|
|
3095
|
+
reason = `Highest priority available task.`
|
|
3096
|
+
nextStep = `Call get_task_context with taskId="${topTask._id}" for full details.`
|
|
3097
|
+
}
|
|
3098
|
+
recommendation = {
|
|
3099
|
+
type: 'task',
|
|
3100
|
+
taskId: topTask._id,
|
|
3101
|
+
key: topTask.key,
|
|
3102
|
+
title: topTask.title,
|
|
3103
|
+
priority: topTask.priority,
|
|
3104
|
+
column: topTask.column,
|
|
3105
|
+
project: topTask.project?.name || topTask.project,
|
|
2422
3106
|
reason,
|
|
2423
3107
|
nextStep,
|
|
2424
|
-
}
|
|
2425
|
-
|
|
3108
|
+
}
|
|
3109
|
+
}
|
|
3110
|
+
|
|
3111
|
+
return text({
|
|
3112
|
+
recommendation,
|
|
3113
|
+
queue: scored.slice(0, 3).map((t, i) => ({
|
|
3114
|
+
rank: i + 1,
|
|
3115
|
+
key: t.key,
|
|
3116
|
+
title: t.title,
|
|
3117
|
+
priority: t.priority,
|
|
3118
|
+
column: t.column,
|
|
3119
|
+
score: t._score,
|
|
3120
|
+
branch: t.github?.headBranch || null,
|
|
3121
|
+
needsFix: !!t.github?.changesRequestedAt,
|
|
3122
|
+
recentFileMatch: recentFiles.size > 0 && (t.claimedFiles || []).some(f => recentFiles.has(f)),
|
|
3123
|
+
})),
|
|
3124
|
+
pendingReviews: pendingReviews.length > 0 ? pendingReviews : null,
|
|
3125
|
+
recentFilesDetected: recentFiles.size,
|
|
2426
3126
|
})
|
|
2427
3127
|
}
|
|
2428
3128
|
)
|
|
@@ -2556,6 +3256,28 @@ Use this when the developer or team lead asks "how's the board?", "any blockers?
|
|
|
2556
3256
|
suggestion: `Use update_task to add a readmeMarkdown implementation plan`,
|
|
2557
3257
|
})
|
|
2558
3258
|
}
|
|
3259
|
+
|
|
3260
|
+
// Stale PR — open 5+ days with no approval or changes-requested (forgotten in review queue)
|
|
3261
|
+
if (
|
|
3262
|
+
t.column === 'in_review' &&
|
|
3263
|
+
t.github?.prNumber &&
|
|
3264
|
+
ageDays > 5 &&
|
|
3265
|
+
!t.github?.changesRequestedAt // already flagged above if changes were requested
|
|
3266
|
+
) {
|
|
3267
|
+
flags.push({
|
|
3268
|
+
severity: 'warning',
|
|
3269
|
+
task: t.key,
|
|
3270
|
+
title: t.title,
|
|
3271
|
+
project: board.name,
|
|
3272
|
+
assignee: assignees,
|
|
3273
|
+
issue: `PR #${t.github.prNumber} has been open ${Math.floor(ageDays)} days with no reviewer action — likely forgotten`,
|
|
3274
|
+
suggestion: t.github?.prUrl
|
|
3275
|
+
? `Check PR at ${t.github.prUrl} — ping the reviewer or close if abandoned`
|
|
3276
|
+
: `Check GitHub for PR #${t.github.prNumber} — ping the reviewer`,
|
|
3277
|
+
prNumber: t.github.prNumber,
|
|
3278
|
+
prUrl: t.github.prUrl || null,
|
|
3279
|
+
})
|
|
3280
|
+
}
|
|
2559
3281
|
}
|
|
2560
3282
|
}
|
|
2561
3283
|
|
|
@@ -2582,21 +3304,138 @@ Use this when the developer or team lead asks "how's the board?", "any blockers?
|
|
|
2582
3304
|
|
|
2583
3305
|
server.tool(
|
|
2584
3306
|
'end_of_day',
|
|
2585
|
-
`End-of-day wrap-up
|
|
2586
|
-
|
|
2587
|
-
|
|
3307
|
+
`End-of-day wrap-up — call with confirmed=false first to get a checklist, then confirmed=true to park tasks.
|
|
3308
|
+
|
|
3309
|
+
Phase 1 (confirmed=false — default):
|
|
3310
|
+
- Scans local git for uncommitted changes and unpushed commits
|
|
3311
|
+
- Fetches your in_progress tasks
|
|
3312
|
+
- Detects stale PRs (open 5+ days)
|
|
3313
|
+
- Returns a full action checklist so nothing is left behind
|
|
3314
|
+
|
|
3315
|
+
Phase 2 (confirmed=true):
|
|
3316
|
+
- Parks each in_progress task with your notes
|
|
3317
|
+
- Releases file claims on parked tasks
|
|
2588
3318
|
|
|
2589
|
-
After this tool runs, post a short comment on each parked task via add_task_comment summarising the day.
|
|
2590
3319
|
Use this when the developer says "wrap up", "end of day", or "I'm done for today".`,
|
|
2591
3320
|
{
|
|
3321
|
+
confirmed: z.boolean().optional().default(false).describe('Set true to actually park the tasks after reviewing the checklist'),
|
|
2592
3322
|
taskNotes: z.array(z.object({
|
|
2593
3323
|
taskId: z.string().describe("Task's MongoDB ObjectId"),
|
|
2594
3324
|
summary: z.string().describe('What was done today — be specific, include file names'),
|
|
2595
3325
|
remaining: z.string().describe('What is left to do next session'),
|
|
2596
|
-
blockers: z.string().optional().describe('Anything blocking'),
|
|
2597
|
-
})).describe('Park notes for each in_progress task.
|
|
3326
|
+
blockers: z.string().optional().describe('Anything blocking progress'),
|
|
3327
|
+
})).optional().describe('Park notes for each in_progress task. Required when confirmed=true.'),
|
|
2598
3328
|
},
|
|
2599
|
-
async ({ taskNotes }) => {
|
|
3329
|
+
async ({ confirmed = false, taskNotes = [] }) => {
|
|
3330
|
+
// Always fetch in-progress tasks and git state
|
|
3331
|
+
const myTasksRes = await api.get('/api/users/me/tasks')
|
|
3332
|
+
const inProgress = (myTasksRes?.data?.tasks || []).filter(t => t.column === 'in_progress')
|
|
3333
|
+
|
|
3334
|
+
// Local git state
|
|
3335
|
+
let gitState = { uncommitted: [], unpushedBranches: [], currentBranch: null }
|
|
3336
|
+
try {
|
|
3337
|
+
const cwd = process.cwd()
|
|
3338
|
+
const repoRoot = findRepoRoot(cwd)
|
|
3339
|
+
if (repoRoot) {
|
|
3340
|
+
// Uncommitted changes
|
|
3341
|
+
const statusOut = execSync('git status --short 2>/dev/null', { cwd: repoRoot, encoding: 'utf8', timeout: 3000 })
|
|
3342
|
+
gitState.uncommitted = statusOut.trim().split('\n').filter(Boolean)
|
|
3343
|
+
// Current branch
|
|
3344
|
+
try {
|
|
3345
|
+
gitState.currentBranch = execSync('git rev-parse --abbrev-ref HEAD 2>/dev/null', { cwd: repoRoot, encoding: 'utf8', timeout: 2000 }).trim()
|
|
3346
|
+
} catch { /* non-fatal */ }
|
|
3347
|
+
// Unpushed commits on current branch
|
|
3348
|
+
if (gitState.currentBranch && gitState.currentBranch !== 'HEAD') {
|
|
3349
|
+
try {
|
|
3350
|
+
const unpushed = execSync(
|
|
3351
|
+
`git log origin/${gitState.currentBranch}..HEAD --format="%h %s" 2>/dev/null || git log HEAD -3 --format="%h %s"`,
|
|
3352
|
+
{ cwd: repoRoot, encoding: 'utf8', timeout: 3000 }
|
|
3353
|
+
)
|
|
3354
|
+
gitState.unpushedBranch = unpushed.trim().split('\n').filter(Boolean)
|
|
3355
|
+
} catch { /* non-fatal */ }
|
|
3356
|
+
}
|
|
3357
|
+
}
|
|
3358
|
+
} catch { /* non-fatal */ }
|
|
3359
|
+
|
|
3360
|
+
// Stale PRs (in_review 5+ days)
|
|
3361
|
+
const now = Date.now()
|
|
3362
|
+
const stalePRs = inProgress
|
|
3363
|
+
.concat((myTasksRes?.data?.tasks || []).filter(t => t.column === 'in_review'))
|
|
3364
|
+
.filter(t => {
|
|
3365
|
+
if (!t.github?.prNumber) return false
|
|
3366
|
+
const ageDays = (now - new Date(t.updatedAt).getTime()) / (1000 * 60 * 60 * 24)
|
|
3367
|
+
return ageDays > 5
|
|
3368
|
+
})
|
|
3369
|
+
.map(t => ({ key: t.key, title: t.title, prNumber: t.github.prNumber, prUrl: t.github.prUrl || null }))
|
|
3370
|
+
|
|
3371
|
+
if (!confirmed) {
|
|
3372
|
+
// Phase 1: checklist
|
|
3373
|
+
const checklist = []
|
|
3374
|
+
if (gitState.uncommitted.length > 0) {
|
|
3375
|
+
checklist.push({
|
|
3376
|
+
urgent: true,
|
|
3377
|
+
action: 'Commit uncommitted changes',
|
|
3378
|
+
detail: `${gitState.uncommitted.length} file(s) modified but not committed`,
|
|
3379
|
+
files: gitState.uncommitted.slice(0, 10),
|
|
3380
|
+
cmd: `git add -p && git commit -m "wip: end of day checkpoint"`,
|
|
3381
|
+
})
|
|
3382
|
+
}
|
|
3383
|
+
if (gitState.unpushedBranch?.length > 0) {
|
|
3384
|
+
checklist.push({
|
|
3385
|
+
urgent: true,
|
|
3386
|
+
action: 'Push unpushed commits',
|
|
3387
|
+
detail: `${gitState.unpushedBranch.length} commit(s) not pushed to remote`,
|
|
3388
|
+
commits: gitState.unpushedBranch,
|
|
3389
|
+
cmd: `git push origin ${gitState.currentBranch}`,
|
|
3390
|
+
})
|
|
3391
|
+
}
|
|
3392
|
+
if (inProgress.length > 0) {
|
|
3393
|
+
checklist.push({
|
|
3394
|
+
urgent: false,
|
|
3395
|
+
action: 'Park in-progress tasks with notes',
|
|
3396
|
+
detail: `${inProgress.length} task(s) need a park note so work can resume cleanly`,
|
|
3397
|
+
tasks: inProgress.map(t => ({ taskId: t._id, key: t.key, title: t.title, branch: t.github?.headBranch || null })),
|
|
3398
|
+
nextStep: `Fill taskNotes for each task above, then call end_of_day with confirmed=true`,
|
|
3399
|
+
})
|
|
3400
|
+
}
|
|
3401
|
+
if (stalePRs.length > 0) {
|
|
3402
|
+
checklist.push({
|
|
3403
|
+
urgent: false,
|
|
3404
|
+
action: 'Follow up on stale PRs',
|
|
3405
|
+
detail: `${stalePRs.length} PR(s) open 5+ days with no reviewer action`,
|
|
3406
|
+
prs: stalePRs,
|
|
3407
|
+
})
|
|
3408
|
+
}
|
|
3409
|
+
|
|
3410
|
+
return text({
|
|
3411
|
+
checklist,
|
|
3412
|
+
gitState: {
|
|
3413
|
+
currentBranch: gitState.currentBranch,
|
|
3414
|
+
uncommitted: gitState.uncommitted.length,
|
|
3415
|
+
unpushedCommits: gitState.unpushedBranch?.length || 0,
|
|
3416
|
+
},
|
|
3417
|
+
inProgressTasks: inProgress.map(t => ({ taskId: t._id, key: t.key, title: t.title })),
|
|
3418
|
+
stalePRs: stalePRs.length ? stalePRs : null,
|
|
3419
|
+
message: checklist.length === 0
|
|
3420
|
+
? '✅ All clear — no uncommitted work, no unpushed commits, no in-progress tasks. Good day!'
|
|
3421
|
+
: `${checklist.filter(c => c.urgent).length} urgent item(s) need attention before logging off.`,
|
|
3422
|
+
nextStep: inProgress.length > 0
|
|
3423
|
+
? `Fill taskNotes for your in-progress tasks then call end_of_day with confirmed=true and taskNotes=[...].`
|
|
3424
|
+
: checklist.some(c => c.urgent)
|
|
3425
|
+
? `Handle the urgent items above (commit/push), then you are done.`
|
|
3426
|
+
: `Nothing to park. You are good to go.`,
|
|
3427
|
+
})
|
|
3428
|
+
}
|
|
3429
|
+
|
|
3430
|
+
// Phase 2: park tasks
|
|
3431
|
+
if (!taskNotes.length && inProgress.length > 0) {
|
|
3432
|
+
return text({
|
|
3433
|
+
blocked: true,
|
|
3434
|
+
message: `confirmed=true requires taskNotes for each in-progress task. Call end_of_day with confirmed=false first to get the task list.`,
|
|
3435
|
+
inProgressTasks: inProgress.map(t => ({ taskId: t._id, key: t.key, title: t.title })),
|
|
3436
|
+
})
|
|
3437
|
+
}
|
|
3438
|
+
|
|
2600
3439
|
const results = []
|
|
2601
3440
|
for (const note of taskNotes) {
|
|
2602
3441
|
try {
|
|
@@ -2614,7 +3453,15 @@ Use this when the developer says "wrap up", "end of day", or "I'm done for today
|
|
|
2614
3453
|
results.push({ taskId: note.taskId, success: false, message: e.message })
|
|
2615
3454
|
}
|
|
2616
3455
|
}
|
|
2617
|
-
|
|
3456
|
+
|
|
3457
|
+
return text({
|
|
3458
|
+
wrapped: results,
|
|
3459
|
+
stalePRs: stalePRs.length ? stalePRs : null,
|
|
3460
|
+
gitReminder: gitState.uncommitted.length > 0 || gitState.unpushedBranch?.length > 0
|
|
3461
|
+
? `⚠️ You still have ${gitState.uncommitted.length} uncommitted file(s) and ${gitState.unpushedBranch?.length || 0} unpushed commit(s). Push before logging off!`
|
|
3462
|
+
: null,
|
|
3463
|
+
message: `${results.filter(r => r.success).length}/${results.length} task(s) parked successfully.`,
|
|
3464
|
+
})
|
|
2618
3465
|
}
|
|
2619
3466
|
)
|
|
2620
3467
|
|
|
@@ -2643,13 +3490,36 @@ Use this when a developer says "start task", "brief me on", or "what do I need t
|
|
|
2643
3490
|
repoPath: z.string().optional().describe('Absolute path to the local git repo (defaults to MCP process working directory). Used to write cursor rules file.'),
|
|
2644
3491
|
agentRole: z.enum(['builder', 'reviewer', 'scout', 'coordinator']).optional()
|
|
2645
3492
|
.describe('Set the agent role for this task session. Role-specific behavioral constraints are injected into cursor rules. builder=implements code, scout=reads/analyzes only, reviewer=reviews PRs only, coordinator=decomposes work.'),
|
|
3493
|
+
files: z.array(z.string()).optional()
|
|
3494
|
+
.describe('REQUIRED for agentRole=builder. Files this builder will exclusively edit (e.g. ["server/routes/tasks.js", "client/src/App.jsx"]). Ownership is claimed atomically during kickoff — kickoff is blocked until files are provided. Read the implementation plan first (confirmed=false), identify your files, then call confirmed=true with this list.'),
|
|
2646
3495
|
},
|
|
2647
|
-
async ({ taskId, confirmed = false, repoPath, agentRole }) => {
|
|
3496
|
+
async ({ taskId, confirmed = false, repoPath, agentRole, files }) => {
|
|
2648
3497
|
trackTaskActivity(taskId, 'kickoff_task')
|
|
2649
3498
|
const taskRes = await api.get(`/api/tasks/${taskId}`)
|
|
2650
3499
|
if (!taskRes?.success) return errorText('Task not found')
|
|
2651
3500
|
const task = taskRes.data.task
|
|
2652
3501
|
|
|
3502
|
+
// Warn if task has unresolved blockers
|
|
3503
|
+
const activeBlockers = (task.blockedBy || []).filter(b => {
|
|
3504
|
+
if (typeof b === 'object' && b !== null && b.column !== undefined) return b.column !== 'done'
|
|
3505
|
+
return false // if not populated, can't check — don't block
|
|
3506
|
+
})
|
|
3507
|
+
if (activeBlockers.length > 0) {
|
|
3508
|
+
return text({
|
|
3509
|
+
blocked: true,
|
|
3510
|
+
reason: 'This task has unresolved dependencies that must complete first.',
|
|
3511
|
+
blockers: activeBlockers.map(b => ({
|
|
3512
|
+
taskKey: b.key || b._id,
|
|
3513
|
+
title: b.title || '(unknown)',
|
|
3514
|
+
column: b.column || 'unknown',
|
|
3515
|
+
})),
|
|
3516
|
+
message: `⛔ Cannot kick off ${task.key} — ${activeBlockers.length} blocker(s) must complete first.`,
|
|
3517
|
+
nextStep: activeBlockers.length === 1
|
|
3518
|
+
? `Work on ${activeBlockers[0].key || 'the blocker task'} first. Call remove_blocker when it's done.`
|
|
3519
|
+
: `Complete the blockers listed above. Call remove_blocker for each one when done.`,
|
|
3520
|
+
})
|
|
3521
|
+
}
|
|
3522
|
+
|
|
2653
3523
|
// Include developer name in branch so it's clear who created it
|
|
2654
3524
|
let devSlug = ''
|
|
2655
3525
|
try {
|
|
@@ -2970,6 +3840,59 @@ Use this when a developer says "start task", "brief me on", or "what do I need t
|
|
|
2970
3840
|
})
|
|
2971
3841
|
}
|
|
2972
3842
|
|
|
3843
|
+
// ── Builder file ownership gate ───────────────────────────────────────────
|
|
3844
|
+
// Structural equivalent of BridgeSwarm's file-lock: a builder cannot complete
|
|
3845
|
+
// kickoff without declaring which files it owns. This prevents two agents editing
|
|
3846
|
+
// the same file because both kickoffs either succeed (different files) or one is
|
|
3847
|
+
// blocked (conflict detected before a single line of code is written).
|
|
3848
|
+
if (agentRole === 'builder') {
|
|
3849
|
+
if (!files?.length) {
|
|
3850
|
+
// Hard block — return plan context so agent can identify files, then retry
|
|
3851
|
+
return text({
|
|
3852
|
+
requiresFileClaim: true,
|
|
3853
|
+
taskKey: task.key,
|
|
3854
|
+
title: task.title,
|
|
3855
|
+
implementationPlan: task.readmeMarkdown || '(no plan — write one in the Plan tab first)',
|
|
3856
|
+
scoutReport: task.scoutReport || null,
|
|
3857
|
+
subtasks: (task.subtasks || []).map(s => ({ title: s.title, done: s.done })),
|
|
3858
|
+
message: [
|
|
3859
|
+
`⛔ Builder kickoff blocked — file ownership required.`,
|
|
3860
|
+
``,
|
|
3861
|
+
`Read the implementation plan above and identify EVERY file you will create or modify.`,
|
|
3862
|
+
`Then call kickoff_task again with confirmed=true, agentRole="builder", and files=[...].`,
|
|
3863
|
+
``,
|
|
3864
|
+
`This is enforced structurally: kickoff cannot complete until files are declared.`,
|
|
3865
|
+
`It prevents two agents from editing the same file simultaneously.`,
|
|
3866
|
+
].join('\n'),
|
|
3867
|
+
nextStep: `kickoff_task(taskId="${taskId}", confirmed=true, agentRole="builder", files=["path/to/file1", "path/to/file2"])`,
|
|
3868
|
+
})
|
|
3869
|
+
}
|
|
3870
|
+
|
|
3871
|
+
// Files provided — attempt atomic claim before proceeding with kickoff
|
|
3872
|
+
const claimRes = await api.post(`/api/tasks/${taskId}/files/claim`, { files }).catch(() => null)
|
|
3873
|
+
if (!claimRes?.success) {
|
|
3874
|
+
if (claimRes?.conflicts) {
|
|
3875
|
+
return text({
|
|
3876
|
+
blocked: true,
|
|
3877
|
+
reason: `File ownership conflict — another in-progress task already owns one or more of your files.`,
|
|
3878
|
+
conflicts: claimRes.conflicts,
|
|
3879
|
+
message: [
|
|
3880
|
+
`⛔ Builder kickoff blocked — file conflict detected.`,
|
|
3881
|
+
``,
|
|
3882
|
+
`The files listed in conflicts[] are already claimed by another task.`,
|
|
3883
|
+
`Options:`,
|
|
3884
|
+
`1. Coordinate with the other task's assignee and wait for them to release the files.`,
|
|
3885
|
+
`2. Remove the conflicting files from your list if you don't actually need them.`,
|
|
3886
|
+
`3. Call release_files on the other task if it is no longer active (coordinator only).`,
|
|
3887
|
+
].join('\n'),
|
|
3888
|
+
nextStep: `Resolve the conflict, then retry: kickoff_task(taskId="${taskId}", confirmed=true, agentRole="builder", files=[...updated list...])`,
|
|
3889
|
+
})
|
|
3890
|
+
}
|
|
3891
|
+
return errorText(claimRes?.message || 'Could not claim files — retry kickoff_task with confirmed=true and files=[...]')
|
|
3892
|
+
}
|
|
3893
|
+
// Files claimed successfully — kickoff continues below with ownership established
|
|
3894
|
+
}
|
|
3895
|
+
|
|
2973
3896
|
// ── #1 Preflight: check dirty tree before writing cursor rules / moving task ──
|
|
2974
3897
|
{
|
|
2975
3898
|
const pCwd = repoPath || process.cwd()
|
|
@@ -3104,7 +4027,9 @@ Use this when a developer says "start task", "brief me on", or "what do I need t
|
|
|
3104
4027
|
const baseRules = hasCursorRules ? task.cursorRules : ''
|
|
3105
4028
|
const mergedRules = [baseRules, typeExtraRules].filter(Boolean).join('\n\n') || '(Follow role constraints above.)'
|
|
3106
4029
|
if (hasCursorRules || agentRole || typeExtraRules) {
|
|
3107
|
-
|
|
4030
|
+
// Pass claimedFiles so the builder cursor rule lists exactly which files are owned
|
|
4031
|
+
const effectiveClaimedFiles = files?.length ? files : (task.claimedFiles || [])
|
|
4032
|
+
cursorRulesFile = writeCursorRulesFile(task.key, mergedRules, repoPath, agentRole || null, effectiveClaimedFiles)
|
|
3108
4033
|
}
|
|
3109
4034
|
|
|
3110
4035
|
// Dynamically generate .cursor/agents, .cursor/skills, .cursor/commands
|
|
@@ -3166,6 +4091,15 @@ After \`request_human_input\`: STOP, show the question in chat, wait for reply,
|
|
|
3166
4091
|
}
|
|
3167
4092
|
api.patch(`/api/tasks/${taskId}`, workspacePatch).catch(() => {/* non-fatal */})
|
|
3168
4093
|
|
|
4094
|
+
// Write .internaltool-active-task so the Claude Code hook knows a task is active
|
|
4095
|
+
try {
|
|
4096
|
+
writeFileSync(
|
|
4097
|
+
join(process.cwd(), '.internaltool-active-task'),
|
|
4098
|
+
JSON.stringify({ taskId, taskKey: task.key, title: task.title, agentRole: agentRole || null, kickedOffAt: new Date().toISOString() }, null, 2),
|
|
4099
|
+
'utf8'
|
|
4100
|
+
)
|
|
4101
|
+
} catch { /* non-fatal — hook gracefully degrades if file can't be written */ }
|
|
4102
|
+
|
|
3169
4103
|
// Auto-scan workspace so the Workspace tab in the UI is fresh immediately after kickoff.
|
|
3170
4104
|
if (kickoffProject) {
|
|
3171
4105
|
runWorkspaceScan(taskId, task, kickoffProject, repoPath).catch(() => {/* non-fatal */})
|
|
@@ -3187,7 +4121,7 @@ After \`request_human_input\`: STOP, show the question in chat, wait for reply,
|
|
|
3187
4121
|
active: true,
|
|
3188
4122
|
agentRole: agentRole || null,
|
|
3189
4123
|
rules: task.cursorRules || null,
|
|
3190
|
-
roleRules: agentRole ?
|
|
4124
|
+
roleRules: agentRole ? getRoleRules(agentRole, task.claimedFiles || []) : null,
|
|
3191
4125
|
instruction: agentRole
|
|
3192
4126
|
? `⚠️ AGENT ROLE: ${agentRole.toUpperCase()} — Follow the role behavioral constraints injected into the cursor rules file. These override default behavior.`
|
|
3193
4127
|
: '⚠️ CURSOR RULES ACTIVE — You MUST follow every rule in the "rules" field for the entire duration of this task.',
|
|
@@ -3363,6 +4297,52 @@ function registerIssueTools(server) {
|
|
|
3363
4297
|
}
|
|
3364
4298
|
|
|
3365
4299
|
function registerApprovalTools(server) {
|
|
4300
|
+
// ── report_test_results ───────────────────────────────────────────────────────
|
|
4301
|
+
server.tool(
|
|
4302
|
+
'report_test_results',
|
|
4303
|
+
`Save a test run result to the task — call this after running the test suite.
|
|
4304
|
+
|
|
4305
|
+
REQUIRED WORKFLOW:
|
|
4306
|
+
1. Run tests using your shell or the run-tests skill
|
|
4307
|
+
2. Parse the output to extract pass/fail counts
|
|
4308
|
+
3. Call this tool to attach the results to the task
|
|
4309
|
+
|
|
4310
|
+
The latest test run is visible in the UI and blocks submit_task_for_approval if failing.
|
|
4311
|
+
Calling this with status="passing" is required before submitting for approval.
|
|
4312
|
+
|
|
4313
|
+
How to determine status:
|
|
4314
|
+
- "passing" → all tests pass (failed === 0)
|
|
4315
|
+
- "partial" → some pass, some fail — needs investigation
|
|
4316
|
+
- "failing" → no tests pass or test runner itself crashed`,
|
|
4317
|
+
{
|
|
4318
|
+
taskId: z.string().describe("Task's MongoDB ObjectId"),
|
|
4319
|
+
status: z.enum(['passing', 'failing', 'partial']).describe('Overall test run outcome'),
|
|
4320
|
+
total: z.number().optional().default(0).describe('Total number of tests run'),
|
|
4321
|
+
passed: z.number().optional().default(0).describe('Tests that passed'),
|
|
4322
|
+
failed: z.number().optional().default(0).describe('Tests that failed'),
|
|
4323
|
+
skipped: z.number().optional().default(0).describe('Tests that were skipped'),
|
|
4324
|
+
summary: z.string().describe('One-line summary, e.g. "42 passed, 2 failed in 3.4s"'),
|
|
4325
|
+
rawOutput: z.string().optional().default('').describe('Raw test output (truncated to 3000 chars) for reviewer context'),
|
|
4326
|
+
},
|
|
4327
|
+
async ({ taskId, status, total = 0, passed = 0, failed = 0, skipped = 0, summary, rawOutput = '' }) => {
|
|
4328
|
+
trackTaskActivity(taskId, 'report_test_results', { summary: `Tests: ${status} (${passed}/${total})` })
|
|
4329
|
+
const res = await api.post(`/api/tasks/${taskId}/test-runs`, {
|
|
4330
|
+
status, total, passed, failed, skipped,
|
|
4331
|
+
summary,
|
|
4332
|
+
raw: rawOutput.slice(0, 3000),
|
|
4333
|
+
})
|
|
4334
|
+
if (!res?.success) return errorText(res?.message || 'Could not save test results')
|
|
4335
|
+
const icon = status === 'passing' ? '✅' : status === 'partial' ? '⚠️' : '❌'
|
|
4336
|
+
return text({
|
|
4337
|
+
saved: true, status, summary, passed, failed, total, skipped,
|
|
4338
|
+
message: `${icon} Test results saved to task.`,
|
|
4339
|
+
nextStep: status === 'passing'
|
|
4340
|
+
? `Tests passing. You can now call submit_task_for_approval or raise_pr.`
|
|
4341
|
+
: `${failed} test(s) failing. Fix them and call report_test_results again before submitting for approval.`,
|
|
4342
|
+
})
|
|
4343
|
+
}
|
|
4344
|
+
)
|
|
4345
|
+
|
|
3366
4346
|
server.tool(
|
|
3367
4347
|
'submit_task_for_approval',
|
|
3368
4348
|
'Create and submit a new approval request on a task. Each request has its own title, plan/readme, and reviewer. Only one request can be pending at a time.',
|
|
@@ -3372,8 +4352,28 @@ function registerApprovalTools(server) {
|
|
|
3372
4352
|
readme: z.string().describe('The plan/markdown describing what you want to do and why (min 80 chars)'),
|
|
3373
4353
|
reviewerId: z.string().describe('User ID of the reviewer'),
|
|
3374
4354
|
},
|
|
3375
|
-
async ({ taskId, title, readme, reviewerId }) =>
|
|
3376
|
-
|
|
4355
|
+
async ({ taskId, title, readme, reviewerId }) => {
|
|
4356
|
+
// Gate: block if latest test run is failing
|
|
4357
|
+
const taskRes = await api.get(`/api/tasks/${taskId}`)
|
|
4358
|
+
if (taskRes?.success) {
|
|
4359
|
+
const latestRun = taskRes.data?.task?.testRuns?.[0]
|
|
4360
|
+
if (latestRun && latestRun.status === 'failing') {
|
|
4361
|
+
return text({
|
|
4362
|
+
blocked: true,
|
|
4363
|
+
reason: 'Latest test run is failing — fix tests before submitting for approval.',
|
|
4364
|
+
testRun: {
|
|
4365
|
+
status: latestRun.status,
|
|
4366
|
+
summary: latestRun.summary,
|
|
4367
|
+
failed: latestRun.failed,
|
|
4368
|
+
total: latestRun.total,
|
|
4369
|
+
runAt: latestRun.runAt,
|
|
4370
|
+
},
|
|
4371
|
+
nextStep: `Fix the failing tests, then call report_test_results with status="passing" before retrying submit_task_for_approval.`,
|
|
4372
|
+
})
|
|
4373
|
+
}
|
|
4374
|
+
}
|
|
4375
|
+
return call(() => api.post(`/api/tasks/${taskId}/approvals`, { title, readme, reviewerId }))
|
|
4376
|
+
}
|
|
3377
4377
|
)
|
|
3378
4378
|
|
|
3379
4379
|
server.tool(
|
|
@@ -3642,26 +4642,30 @@ function findRepoRoot(startPath) {
|
|
|
3642
4642
|
// Prepended to cursor rules when an agentRole is set. Defines what the agent
|
|
3643
4643
|
// CAN and CANNOT do for the duration of the task session.
|
|
3644
4644
|
const ROLE_RULES = {
|
|
3645
|
-
builder: `## Agent Role: BUILDER
|
|
4645
|
+
builder: (claimedFiles = []) => `## Agent Role: BUILDER
|
|
3646
4646
|
|
|
3647
4647
|
You are a BUILDER agent. Your behavioral constraints for this session:
|
|
3648
4648
|
|
|
4649
|
+
**YOUR CLAIMED FILES (exclusive ownership — enforced at kickoff):**
|
|
4650
|
+
${claimedFiles.length > 0
|
|
4651
|
+
? claimedFiles.map(f => `- ${f}`).join('\n')
|
|
4652
|
+
: '- (no files claimed yet — call claim_files before editing anything)'}
|
|
4653
|
+
|
|
3649
4654
|
**ALLOWED:**
|
|
3650
|
-
- Write, modify, and delete code files
|
|
3651
|
-
- Create new files required by the implementation plan
|
|
4655
|
+
- Write, modify, and delete code files listed above under YOUR CLAIMED FILES
|
|
4656
|
+
- Create new files required by the implementation plan (call claim_files to add them first)
|
|
3652
4657
|
- Commit and push changes on the task branch
|
|
3653
4658
|
- Run tests and fix failures
|
|
3654
|
-
- Claim file ownership before editing (use claim_files MCP tool)
|
|
3655
4659
|
|
|
3656
4660
|
**NOT ALLOWED:**
|
|
3657
|
-
-
|
|
4661
|
+
- Edit ANY file not listed in YOUR CLAIMED FILES above without first calling claim_files
|
|
4662
|
+
- Modify files owned by another in-progress task
|
|
3658
4663
|
- Make architectural decisions not in the implementation plan — flag them instead
|
|
3659
|
-
- Edit files outside the task scope without explicit approval
|
|
3660
4664
|
- Merge to main/master/dev directly
|
|
3661
4665
|
|
|
3662
4666
|
**WORK STYLE:**
|
|
3663
|
-
-
|
|
3664
|
-
-
|
|
4667
|
+
- YOUR CLAIMED FILES list is your contract — stay within it
|
|
4668
|
+
- If you discover you need an additional file mid-session, call claim_files first, then edit
|
|
3665
4669
|
- Follow the spec precisely — don't add unrequested features
|
|
3666
4670
|
- Commit atomically with conventional commit format (feat/fix/refactor)`,
|
|
3667
4671
|
|
|
@@ -4008,16 +5012,24 @@ function detectTaskType(task) {
|
|
|
4008
5012
|
}
|
|
4009
5013
|
|
|
4010
5014
|
/** Write task-specific cursor rules to .cursor/rules/<taskKey>.mdc in the local repo root.
|
|
4011
|
-
* When role is provided, role-specific behavioral constraints are prepended.
|
|
4012
|
-
|
|
5015
|
+
* When role is provided, role-specific behavioral constraints are prepended.
|
|
5016
|
+
* claimedFiles is passed through to the builder role template so Cursor injects
|
|
5017
|
+
* the exact file ownership list into every prompt. */
|
|
5018
|
+
function getRoleRules(role, claimedFiles = []) {
|
|
5019
|
+
if (!role || !ROLE_RULES[role]) return null
|
|
5020
|
+
const entry = ROLE_RULES[role]
|
|
5021
|
+
return typeof entry === 'function' ? entry(claimedFiles) : entry
|
|
5022
|
+
}
|
|
5023
|
+
|
|
5024
|
+
function writeCursorRulesFile(taskKey, rulesMarkdown, startPath, role = null, claimedFiles = []) {
|
|
4013
5025
|
try {
|
|
4014
5026
|
const repoRoot = findRepoRoot(startPath)
|
|
4015
5027
|
if (!repoRoot) return null
|
|
4016
5028
|
const rulesDir = join(repoRoot, '.cursor', 'rules')
|
|
4017
5029
|
mkdirSync(rulesDir, { recursive: true })
|
|
4018
5030
|
const filePath = join(rulesDir, `${taskKey.toLowerCase()}.mdc`)
|
|
4019
|
-
const roleSection = role
|
|
4020
|
-
? `${
|
|
5031
|
+
const roleSection = getRoleRules(role, claimedFiles)
|
|
5032
|
+
? `${getRoleRules(role, claimedFiles)}\n\n---\n\n## Task-Specific Rules\n\n`
|
|
4021
5033
|
: ''
|
|
4022
5034
|
const content = `---\ndescription: Task-specific rules for ${taskKey}${role ? ` (role: ${role})` : ''} — auto-generated by InternalTool MCP. Do not edit manually.\nalwaysApply: true\n---\n\n${roleSection}${rulesMarkdown}\n`
|
|
4023
5035
|
writeFileSync(filePath, content, 'utf8')
|
|
@@ -5498,13 +6510,46 @@ Set confirmed=false first to preview the full PR content, then confirmed=true to
|
|
|
5498
6510
|
}
|
|
5499
6511
|
|
|
5500
6512
|
const prTitle = `[${task.key}] ${task.title}`
|
|
6513
|
+
|
|
6514
|
+
// Subtask checklist with completion state
|
|
6515
|
+
const subtaskLines = (task.subtasks || []).map(s =>
|
|
6516
|
+
`- [${s.done ? 'x' : ' '}] ${s.title}`
|
|
6517
|
+
)
|
|
6518
|
+
|
|
6519
|
+
// Recent commits for this branch
|
|
6520
|
+
let commitLines = []
|
|
6521
|
+
try {
|
|
6522
|
+
const commitsRes = await api.get(
|
|
6523
|
+
`/api/projects/${projectId}/github/commits?sha=${encodeURIComponent(headBranch)}&per_page=10`
|
|
6524
|
+
)
|
|
6525
|
+
if (commitsRes?.success && commitsRes.data?.commits?.length) {
|
|
6526
|
+
commitLines = commitsRes.data.commits.slice(0, 10).map(c => {
|
|
6527
|
+
const msg = c.commit?.message?.split('\n')[0] || ''
|
|
6528
|
+
const sha = c.sha?.slice(0, 7) || ''
|
|
6529
|
+
return `- \`${sha}\` ${msg}`
|
|
6530
|
+
})
|
|
6531
|
+
}
|
|
6532
|
+
} catch { /* non-fatal */ }
|
|
6533
|
+
|
|
6534
|
+
// Latest test run
|
|
6535
|
+
const latestTest = task.testRuns?.[0]
|
|
6536
|
+
const testLine = latestTest
|
|
6537
|
+
? latestTest.status === 'passing'
|
|
6538
|
+
? `✅ Tests passing — ${latestTest.summary}`
|
|
6539
|
+
: latestTest.status === 'partial'
|
|
6540
|
+
? `⚠️ Tests partial — ${latestTest.summary}`
|
|
6541
|
+
: `❌ Tests failing — ${latestTest.summary}`
|
|
6542
|
+
: null
|
|
6543
|
+
|
|
5501
6544
|
const bodyParts = [
|
|
5502
6545
|
`## ${task.key}: ${task.title}`,
|
|
5503
6546
|
'',
|
|
5504
|
-
task.description ? `###
|
|
5505
|
-
task.readmeMarkdown ? `### Implementation
|
|
6547
|
+
task.description?.trim() ? `### What & Why\n${task.description.trim()}` : null,
|
|
6548
|
+
task.readmeMarkdown?.trim() ? `### Implementation Plan\n${task.readmeMarkdown.trim()}` : null,
|
|
6549
|
+
subtaskLines.length > 0 ? `### Checklist\n${subtaskLines.join('\n')}` : null,
|
|
6550
|
+
commitLines.length > 0 ? `### Commits\n${commitLines.join('\n')}` : null,
|
|
6551
|
+
testLine ? `### Test Results\n${testLine}` : null,
|
|
5506
6552
|
task.parkNote?.summary ? `### Work Summary\n${task.parkNote.summary}` : null,
|
|
5507
|
-
task.parkNote?.remaining ? `### What Remains\n${task.parkNote.remaining}` : null,
|
|
5508
6553
|
additionalNotes ? `### Additional Notes\n${additionalNotes}` : null,
|
|
5509
6554
|
'',
|
|
5510
6555
|
'---',
|
|
@@ -6431,6 +7476,27 @@ Use this tool between those two to understand the merge context.`,
|
|
|
6431
7476
|
: null
|
|
6432
7477
|
const isStale = hoursSinceCommit !== null && hoursSinceCommit >= 48
|
|
6433
7478
|
|
|
7479
|
+
// CI status — fetch check runs for the PR if one exists
|
|
7480
|
+
let ciCheck = null
|
|
7481
|
+
if (branch.prNumber) {
|
|
7482
|
+
try {
|
|
7483
|
+
const prRes = await api.get(`/api/projects/${projectId}/github/pull-requests/${branch.prNumber}`)
|
|
7484
|
+
if (prRes?.success && prRes.data?.checks) {
|
|
7485
|
+
const runs = Array.isArray(prRes.data.checks.check_runs) ? prRes.data.checks.check_runs
|
|
7486
|
+
: Array.isArray(prRes.data.checks) ? prRes.data.checks
|
|
7487
|
+
: []
|
|
7488
|
+
const total = runs.length
|
|
7489
|
+
const failed = runs.filter(r => r.conclusion === 'failure' || r.conclusion === 'timed_out').length
|
|
7490
|
+
const pending = runs.filter(r => !r.conclusion).length
|
|
7491
|
+
const ciStatus = total === 0 ? 'no_checks'
|
|
7492
|
+
: failed > 0 ? 'failing'
|
|
7493
|
+
: pending > 0 ? 'pending'
|
|
7494
|
+
: 'passing'
|
|
7495
|
+
ciCheck = { status: ciStatus, total, failed, pending, passed: total - failed - pending }
|
|
7496
|
+
}
|
|
7497
|
+
} catch { /* non-fatal — CI is optional */ }
|
|
7498
|
+
}
|
|
7499
|
+
|
|
6434
7500
|
// Build checklist
|
|
6435
7501
|
const checklist = [
|
|
6436
7502
|
{
|
|
@@ -6485,6 +7551,16 @@ Use this tool between those two to understand the merge context.`,
|
|
|
6485
7551
|
: 'No commit data available',
|
|
6486
7552
|
blocking: false,
|
|
6487
7553
|
},
|
|
7554
|
+
...(ciCheck && ciCheck.status !== 'no_checks' ? [{
|
|
7555
|
+
item: 'CI checks passing',
|
|
7556
|
+
passed: ciCheck.status === 'passing',
|
|
7557
|
+
detail: ciCheck.status === 'passing'
|
|
7558
|
+
? `All ${ciCheck.total} CI check${ciCheck.total !== 1 ? 's' : ''} passing`
|
|
7559
|
+
: ciCheck.status === 'failing'
|
|
7560
|
+
? `${ciCheck.failed} of ${ciCheck.total} CI check${ciCheck.total !== 1 ? 's' : ''} failing — fix before requesting review`
|
|
7561
|
+
: `${ciCheck.pending} CI check${ciCheck.pending !== 1 ? 's' : ''} still running — wait before reviewing`,
|
|
7562
|
+
blocking: ciCheck.status === 'failing',
|
|
7563
|
+
}] : []),
|
|
6488
7564
|
]
|
|
6489
7565
|
|
|
6490
7566
|
const blockingIssues = checklist.filter(c => !c.passed && c.blocking)
|